74 """Composing various (language) properties for the google engines (:ref:`google
77 This function is called by the various google engines (:ref:`google web
78 engine`, :ref:`google images engine`, :ref:`google news engine` and
79 :ref:`google videos engine`).
81 :param dict param: Request parameters of the engine. At least
82 a ``searxng_locale`` key should be in the dictionary.
84 :param eng_traits: Engine's traits fetched from google preferences
85 (:py:obj:`searx.enginelib.traits.EngineTraits`)
89 Py-Dictionary with the key/value pairs:
92 The language code that is used by google (e.g. ``lang_en`` or
96 The country code that is used by google (e.g. ``US`` or ``TW``)
99 A instance of :py:obj:`babel.core.Locale` build from the
100 ``searxng_locale`` value.
103 Google subdomain :py:obj:`google_domains` that fits to the country
107 Py-Dictionary with additional request arguments (can be passed to
108 :py:func:`urllib.parse.urlencode`).
110 - ``hl`` parameter: specifies the interface language of user interface.
111 - ``lr`` parameter: restricts search results to documents written in
112 a particular language.
113 - ``cr`` parameter: restricts search results to documents
114 originating in a particular country.
115 - ``ie`` parameter: sets the character encoding scheme that should
116 be used to interpret the query string ('utf8').
117 - ``oe`` parameter: sets the character encoding scheme that should
118 be used to decode the XML result ('utf8').
121 Py-Dictionary with additional HTTP headers (can be passed to
138 sxng_locale = params.get(
'searxng_locale',
'all')
140 locale = babel.Locale.parse(sxng_locale, sep=
'-')
141 except babel.core.UnknownLocaleError:
144 eng_lang = eng_traits.get_language(sxng_locale,
'lang_en')
145 lang_code = eng_lang.split(
'_')[-1]
146 country = eng_traits.get_region(sxng_locale, eng_traits.all_locale)
156 ret_val[
'language'] = eng_lang
157 ret_val[
'country'] = country
158 ret_val[
'locale'] = locale
159 ret_val[
'subdomain'] = eng_traits.custom[
'supported_domains'].get(country.upper(),
'www.google.com')
171 ret_val[
'params'][
'hl'] = f
'{lang_code}-{country}'
187 ret_val[
'params'][
'lr'] = eng_lang
188 if sxng_locale ==
'all':
189 ret_val[
'params'][
'lr'] =
''
198 ret_val[
'params'][
'cr'] =
''
199 if len(sxng_locale.split(
'-')) > 1:
200 ret_val[
'params'][
'cr'] =
'country' + country
221 ret_val[
'params'][
'ie'] =
'utf8'
228 ret_val[
'params'][
'oe'] =
'utf8'
241 ret_val[
'headers'][
'Accept'] =
'*/*'
247 ret_val[
'cookies'][
'CONSENT'] =
"YES+"
309 for img_id, data_image
in RE_DATA_IMAGE.findall(dom.text_content()):
310 end_pos = data_image.rfind(
'=')
312 data_image = data_image[: end_pos + 1]
313 data_image_map[img_id] = data_image
314 logger.debug(
'data:image objects --> %s', list(data_image_map.keys()))
315 return data_image_map
319 """Get response from google's search request"""
321 detect_google_sorry(resp)
326 dom = html.fromstring(resp.text)
330 answer_list = eval_xpath(dom,
'//div[contains(@class, "LGOjhe")]')
331 for item
in answer_list:
332 for bubble
in eval_xpath(item,
'.//div[@class="nnFGuf"]'):
336 'answer': extract_text(item),
337 'url': (eval_xpath(item,
'../..//a/@href') + [
None])[0],
343 for result
in eval_xpath_list(dom,
'.//div[contains(@jscontroller, "SC7lYd")]'):
347 title_tag = eval_xpath_getindex(result,
'.//a/h3[1]', 0, default=
None)
348 if title_tag
is None:
350 logger.debug(
'ignoring item from the result_xpath list: missing title')
352 title = extract_text(title_tag)
354 url = eval_xpath_getindex(result,
'.//a[h3]/@href', 0,
None)
356 logger.debug(
'ignoring item from the result_xpath list: missing url of title "%s"', title)
359 content_nodes = eval_xpath(result,
'.//div[contains(@data-sncf, "1")]')
360 for item
in content_nodes:
361 for script
in item.xpath(
".//script"):
362 script.getparent().remove(script)
364 content = extract_text(content_nodes)
367 logger.debug(
'ignoring item from the result_xpath list: missing content of title "%s"', title)
370 thumbnail = content_nodes[0].xpath(
'.//img/@src')
372 thumbnail = thumbnail[0]
373 if thumbnail.startswith(
'data:image'):
374 img_id = content_nodes[0].xpath(
'.//img/@id')
376 thumbnail = data_image_map.get(img_id[0])
380 results.append({
'url': url,
'title': title,
'content': content,
'thumbnail': thumbnail})
382 except Exception
as e:
383 logger.error(e, exc_info=
True)
387 for suggestion
in eval_xpath_list(dom, suggestion_xpath):
389 results.append({
'suggestion': extract_text(suggestion)})
424def fetch_traits(engine_traits: EngineTraits, add_domains: bool =
True):
425 """Fetch languages from Google."""
428 engine_traits.custom[
'supported_domains'] = {}
430 resp = get(
'https://www.google.com/preferences')
432 raise RuntimeError(
"Response from Google's preferences is not OK.")
434 dom = html.fromstring(resp.text.replace(
'<?xml version="1.0" encoding="UTF-8"?>',
''))
438 lang_map = {
'no':
'nb'}
439 for x
in eval_xpath_list(dom,
"//select[@name='hl']/option"):
440 eng_lang = x.get(
"value")
442 locale = babel.Locale.parse(lang_map.get(eng_lang, eng_lang), sep=
'-')
443 except babel.UnknownLocaleError:
444 print(
"INFO: google UI language %s (%s) is unknown by babel" % (eng_lang, x.text.split(
"(")[0].strip()))
446 sxng_lang = language_tag(locale)
448 conflict = engine_traits.languages.get(sxng_lang)
450 if conflict != eng_lang:
451 print(
"CONFLICT: babel %s --> %s, %s" % (sxng_lang, conflict, eng_lang))
453 engine_traits.languages[sxng_lang] =
'lang_' + eng_lang
456 engine_traits.languages[
'zh'] =
'lang_zh-CN'
460 for x
in eval_xpath_list(dom,
"//select[@name='gl']/option"):
461 eng_country = x.get(
"value")
463 if eng_country
in skip_countries:
465 if eng_country ==
'ZZ':
466 engine_traits.all_locale =
'ZZ'
469 sxng_locales = get_official_locales(eng_country, engine_traits.languages.keys(), regional=
True)
472 print(
"ERROR: can't map from google country %s (%s) to a babel region." % (x.get(
'data-name'), eng_country))
475 for sxng_locale
in sxng_locales:
476 engine_traits.regions[region_tag(sxng_locale)] = eng_country
479 engine_traits.regions[
'zh-CN'] =
'HK'
484 resp = get(
'https://www.google.com/supported_domains')
486 raise RuntimeError(
"Response from https://www.google.com/supported_domains is not OK.")
488 for domain
in resp.text.split():
489 domain = domain.strip()
490 if not domain
or domain
in [
494 region = domain.split(
'.')[-1].upper()
495 engine_traits.custom[
'supported_domains'][region] =
'www' + domain
498 engine_traits.custom[
'supported_domains'][
'CN'] =
'www' + domain