79 """Composing various (language) properties for the google engines (:ref:`google
82 This function is called by the various google engines (:ref:`google web
83 engine`, :ref:`google images engine`, :ref:`google news engine` and
84 :ref:`google videos engine`).
86 :param dict param: Request parameters of the engine. At least
87 a ``searxng_locale`` key should be in the dictionary.
89 :param eng_traits: Engine's traits fetched from google preferences
90 (:py:obj:`searx.enginelib.traits.EngineTraits`)
94 Py-Dictionary with the key/value pairs:
97 The language code that is used by google (e.g. ``lang_en`` or
101 The country code that is used by google (e.g. ``US`` or ``TW``)
104 A instance of :py:obj:`babel.core.Locale` build from the
105 ``searxng_locale`` value.
108 Google subdomain :py:obj:`google_domains` that fits to the country
112 Py-Dictionary with additional request arguments (can be passed to
113 :py:func:`urllib.parse.urlencode`).
115 - ``hl`` parameter: specifies the interface language of user interface.
116 - ``lr`` parameter: restricts search results to documents written in
117 a particular language.
118 - ``cr`` parameter: restricts search results to documents
119 originating in a particular country.
120 - ``ie`` parameter: sets the character encoding scheme that should
121 be used to interpret the query string ('utf8').
122 - ``oe`` parameter: sets the character encoding scheme that should
123 be used to decode the XML result ('utf8').
126 Py-Dictionary with additional HTTP headers (can be passed to
143 sxng_locale = params.get(
'searxng_locale',
'all')
145 locale = babel.Locale.parse(sxng_locale, sep=
'-')
146 except babel.core.UnknownLocaleError:
149 eng_lang = eng_traits.get_language(sxng_locale,
'lang_en')
150 lang_code = eng_lang.split(
'_')[-1]
151 country = eng_traits.get_region(sxng_locale, eng_traits.all_locale)
161 ret_val[
'language'] = eng_lang
162 ret_val[
'country'] = country
163 ret_val[
'locale'] = locale
164 ret_val[
'subdomain'] = eng_traits.custom[
'supported_domains'].get(country.upper(),
'www.google.com')
176 ret_val[
'params'][
'hl'] = f
'{lang_code}-{country}'
192 ret_val[
'params'][
'lr'] = eng_lang
193 if sxng_locale ==
'all':
194 ret_val[
'params'][
'lr'] =
''
203 ret_val[
'params'][
'cr'] =
''
204 if len(sxng_locale.split(
'-')) > 1:
205 ret_val[
'params'][
'cr'] =
'country' + country
226 ret_val[
'params'][
'ie'] =
'utf8'
233 ret_val[
'params'][
'oe'] =
'utf8'
246 ret_val[
'headers'][
'Accept'] =
'*/*'
252 ret_val[
'cookies'][
'CONSENT'] =
"YES+"
314 for img_id, data_image
in RE_DATA_IMAGE.findall(dom.text_content()):
315 end_pos = data_image.rfind(
'=')
317 data_image = data_image[: end_pos + 1]
318 data_image_map[img_id] = data_image
319 logger.debug(
'data:image objects --> %s', list(data_image_map.keys()))
320 return data_image_map
324 """Get response from google's search request"""
326 detect_google_sorry(resp)
331 dom = html.fromstring(resp.text)
335 answer_list = eval_xpath(dom,
'//div[contains(@class, "LGOjhe")]')
336 for item
in answer_list:
337 for bubble
in eval_xpath(item,
'.//div[@class="nnFGuf"]'):
341 'answer': extract_text(item),
342 'url': (eval_xpath(item,
'../..//a/@href') + [
None])[0],
348 for result
in eval_xpath_list(dom, results_xpath):
351 title_tag = eval_xpath_getindex(result, title_xpath, 0, default=
None)
352 if title_tag
is None:
354 logger.debug(
'ignoring item from the result_xpath list: missing title')
356 title = extract_text(title_tag)
358 url = eval_xpath_getindex(result, href_xpath, 0,
None)
360 logger.debug(
'ignoring item from the result_xpath list: missing url of title "%s"', title)
363 content_nodes = eval_xpath(result, content_xpath)
364 content = extract_text(content_nodes)
367 logger.debug(
'ignoring item from the result_xpath list: missing content of title "%s"', title)
370 thumbnail = content_nodes[0].xpath(
'.//img/@src')
372 thumbnail = thumbnail[0]
373 if thumbnail.startswith(
'data:image'):
374 img_id = content_nodes[0].xpath(
'.//img/@id')
376 thumbnail = data_image_map.get(img_id[0])
380 results.append({
'url': url,
'title': title,
'content': content,
'thumbnail': thumbnail})
382 except Exception
as e:
383 logger.error(e, exc_info=
True)
387 for suggestion
in eval_xpath_list(dom, suggestion_xpath):
389 results.append({
'suggestion': extract_text(suggestion)})
424def fetch_traits(engine_traits: EngineTraits, add_domains: bool =
True):
425 """Fetch languages from Google."""
428 engine_traits.custom[
'supported_domains'] = {}
430 resp = get(
'https://www.google.com/preferences')
432 raise RuntimeError(
"Response from Google's preferences is not OK.")
434 dom = html.fromstring(resp.text.replace(
'<?xml version="1.0" encoding="UTF-8"?>',
''))
438 lang_map = {
'no':
'nb'}
439 for x
in eval_xpath_list(dom,
"//select[@name='hl']/option"):
440 eng_lang = x.get(
"value")
442 locale = babel.Locale.parse(lang_map.get(eng_lang, eng_lang), sep=
'-')
443 except babel.UnknownLocaleError:
444 print(
"INFO: google UI language %s (%s) is unknown by babel" % (eng_lang, x.text.split(
"(")[0].strip()))
446 sxng_lang = language_tag(locale)
448 conflict = engine_traits.languages.get(sxng_lang)
450 if conflict != eng_lang:
451 print(
"CONFLICT: babel %s --> %s, %s" % (sxng_lang, conflict, eng_lang))
453 engine_traits.languages[sxng_lang] =
'lang_' + eng_lang
456 engine_traits.languages[
'zh'] =
'lang_zh-CN'
460 for x
in eval_xpath_list(dom,
"//select[@name='gl']/option"):
461 eng_country = x.get(
"value")
463 if eng_country
in skip_countries:
465 if eng_country ==
'ZZ':
466 engine_traits.all_locale =
'ZZ'
469 sxng_locales = get_official_locales(eng_country, engine_traits.languages.keys(), regional=
True)
472 print(
"ERROR: can't map from google country %s (%s) to a babel region." % (x.get(
'data-name'), eng_country))
475 for sxng_locale
in sxng_locales:
476 engine_traits.regions[region_tag(sxng_locale)] = eng_country
479 engine_traits.regions[
'zh-CN'] =
'HK'
484 resp = get(
'https://www.google.com/supported_domains')
486 raise RuntimeError(
"Response from https://www.google.com/supported_domains is not OK.")
488 for domain
in resp.text.split():
489 domain = domain.strip()
490 if not domain
or domain
in [
494 region = domain.split(
'.')[-1].upper()
495 engine_traits.custom[
'supported_domains'][region] =
'www' + domain
498 engine_traits.custom[
'supported_domains'][
'CN'] =
'www' + domain