168 """Get an actual ``sc`` argument from Startpage's search form (HTML page).
170 Startpage puts a ``sc`` argument on every HTML :py:obj:`search form
171 <search_form_xpath>`. Without this argument Startpage considers the request
172 is from a bot. We do not know what is encoded in the value of the ``sc``
173 argument, but it seems to be a kind of a *time-stamp*.
175 Startpage's search form generates a new sc-code on each request. This
176 function scrap a new sc-code from Startpage's home page every
177 :py:obj:`sc_code_cache_sec` seconds.
181 global sc_code_ts, sc_code
183 if sc_code
and (time() < (sc_code_ts + sc_code_cache_sec)):
184 logger.debug(
"get_sc_code: reuse '%s'", sc_code)
187 headers = {**params[
'headers']}
188 headers[
'Origin'] = base_url
189 headers[
'Referer'] = base_url +
'/'
196 if searxng_locale ==
'all':
197 searxng_locale =
'en-US'
198 locale = babel.Locale.parse(searxng_locale, sep=
'-')
200 if send_accept_language_header:
201 ac_lang = locale.language
203 ac_lang =
"%s-%s,%s;q=0.9,*;q=0.5" % (
208 headers[
'Accept-Language'] = ac_lang
210 get_sc_url = base_url +
'/?sc=%s' % (sc_code)
211 logger.debug(
"query new sc time-stamp ... %s", get_sc_url)
212 logger.debug(
"headers: %s", headers)
213 resp = get(get_sc_url, headers=headers)
219 if str(resp.url).startswith(
'https://www.startpage.com/sp/captcha'):
221 message=
"get_sc_code: got redirected to https://www.startpage.com/sp/captcha",
224 dom = lxml.html.fromstring(resp.text)
227 sc_code = eval_xpath(dom, search_form_xpath +
'//input[@name="sc"]/@value')[0]
228 except IndexError
as exc:
229 logger.debug(
"suspend startpage API --> https://github.com/searxng/searxng/pull/695")
231 message=
"get_sc_code: [PR-695] query new sc time-stamp failed! (%s)" % resp.url,
235 logger.debug(
"get_sc_code: new value is: %s", sc_code)
334 for result
in eval_xpath(dom,
'//div[@class="w-gl"]/div[contains(@class, "result")]'):
335 links = eval_xpath(result,
'.//a[contains(@class, "result-title result-link")]')
339 url = link.attrib.get(
'href')
342 if re.match(
r"^http(s|)://(www\.)?google\.[a-z]+/aclk.*$", url):
346 if re.match(
r"^http(s|)://(www\.)?startpage\.com/do/search\?.*$", url):
349 title = extract_text(eval_xpath(link,
'h2'))
350 content = eval_xpath(result,
'.//p[contains(@class, "description")]')
351 content = extract_text(content, allow_none=
True)
or ''
353 published_date =
None
356 if re.match(
r"^([1-9]|[1-2][0-9]|3[0-1]) [A-Z][a-z]{2} [0-9]{4} \.\.\. ", content):
357 date_pos = content.find(
'...') + 4
358 date_string = content[0 : date_pos - 5]
360 content = content[date_pos:]
363 published_date = dateutil.parser.parse(date_string, dayfirst=
True)
368 elif re.match(
r"^[0-9]+ days? ago \.\.\. ", content):
369 date_pos = content.find(
'...') + 4
370 date_string = content[0 : date_pos - 5]
373 published_date = datetime.now() - timedelta(days=int(re.match(
r'\d+', date_string).group()))
376 content = content[date_pos:]
380 results.append({
'url': url,
'title': title,
'content': content,
'publishedDate': published_date})
383 results.append({
'url': url,
'title': title,
'content': content})
390 """Fetch :ref:`languages <startpage languages>` and :ref:`regions <startpage
391 regions>` from Startpage."""
395 'User-Agent': gen_useragent(),
396 'Accept-Language':
"en-US,en;q=0.5",
398 resp = get(
'https://www.startpage.com/do/settings', headers=headers)
401 print(
"ERROR: response from Startpage is not OK.")
403 dom = lxml.html.fromstring(resp.text)
408 for option
in dom.xpath(
'//form[@name="settings"]//select[@name="search_results_region"]/option'):
409 sp_region_names.append(option.get(
'value'))
411 for eng_tag
in sp_region_names:
414 babel_region_tag = {
'no_NO':
'nb_NO'}.get(eng_tag, eng_tag)
416 if '-' in babel_region_tag:
417 l, r = babel_region_tag.split(
'-')
419 sxng_tag = region_tag(babel.Locale.parse(l +
'_' + r, sep=
'_'))
423 sxng_tag = region_tag(babel.Locale.parse(babel_region_tag, sep=
'_'))
425 except babel.UnknownLocaleError:
426 print(
"ERROR: can't determine babel locale of startpage's locale %s" % eng_tag)
429 conflict = engine_traits.regions.get(sxng_tag)
431 if conflict != eng_tag:
432 print(
"CONFLICT: babel %s --> %s, %s" % (sxng_tag, conflict, eng_tag))
434 engine_traits.regions[sxng_tag] = eng_tag
438 catalog_engine2code = {name.lower(): lang_code
for lang_code, name
in babel.Locale(
'en').languages.items()}
442 for lang_code
in filter(
443 lambda lang_code: lang_code.find(
'_') == -1, babel.localedata.locale_identifiers()
445 native_name = babel.Locale(lang_code).get_language_name().lower()
447 catalog_engine2code[native_name] = lang_code
450 unaccented_name =
''.join(filter(
lambda c:
not combining(c), normalize(
'NFKD', native_name)))
451 if len(unaccented_name) == len(unaccented_name.encode()):
453 catalog_engine2code[unaccented_name] = lang_code
457 catalog_engine2code.update(
460 'fantizhengwen':
'zh_Hant',
474 for option
in dom.xpath(
'//form[@name="settings"]//select[@name="language"]/option'):
476 eng_tag = option.get(
'value')
477 if eng_tag
in skip_eng_tags:
479 name = extract_text(option).lower()
481 sxng_tag = catalog_engine2code.get(eng_tag)
483 sxng_tag = catalog_engine2code[name]
485 conflict = engine_traits.languages.get(sxng_tag)
487 if conflict != eng_tag:
488 print(
"CONFLICT: babel %s --> %s, %s" % (sxng_tag, conflict, eng_tag))
490 engine_traits.languages[sxng_tag] = eng_tag
_request_cat_web(query, params)
get_sc_code(searxng_locale, params)
fetch_traits(EngineTraits engine_traits)