75 """Composing various (language) properties for the google engines (:ref:`google
78 This function is called by the various google engines (:ref:`google web
79 engine`, :ref:`google images engine`, :ref:`google news engine` and
80 :ref:`google videos engine`).
82 :param dict param: Request parameters of the engine. At least
83 a ``searxng_locale`` key should be in the dictionary.
85 :param eng_traits: Engine's traits fetched from google preferences
86 (:py:obj:`searx.enginelib.traits.EngineTraits`)
90 Py-Dictionary with the key/value pairs:
93 The language code that is used by google (e.g. ``lang_en`` or
97 The country code that is used by google (e.g. ``US`` or ``TW``)
100 A instance of :py:obj:`babel.core.Locale` build from the
101 ``searxng_locale`` value.
104 Google subdomain :py:obj:`google_domains` that fits to the country
108 Py-Dictionary with additional request arguments (can be passed to
109 :py:func:`urllib.parse.urlencode`).
111 - ``hl`` parameter: specifies the interface language of user interface.
112 - ``lr`` parameter: restricts search results to documents written in
113 a particular language.
114 - ``cr`` parameter: restricts search results to documents
115 originating in a particular country.
116 - ``ie`` parameter: sets the character encoding scheme that should
117 be used to interpret the query string ('utf8').
118 - ``oe`` parameter: sets the character encoding scheme that should
119 be used to decode the XML result ('utf8').
122 Py-Dictionary with additional HTTP headers (can be passed to
139 sxng_locale = params.get(
'searxng_locale',
'all')
141 locale = babel.Locale.parse(sxng_locale, sep=
'-')
142 except babel.core.UnknownLocaleError:
145 eng_lang = eng_traits.get_language(sxng_locale,
'lang_en')
146 lang_code = eng_lang.split(
'_')[-1]
147 country = eng_traits.get_region(sxng_locale, eng_traits.all_locale)
157 ret_val[
'language'] = eng_lang
158 ret_val[
'country'] = country
159 ret_val[
'locale'] = locale
160 ret_val[
'subdomain'] = eng_traits.custom[
'supported_domains'].get(country.upper(),
'www.google.com')
172 ret_val[
'params'][
'hl'] = f
'{lang_code}-{country}'
188 ret_val[
'params'][
'lr'] = eng_lang
189 if sxng_locale ==
'all':
190 ret_val[
'params'][
'lr'] =
''
199 ret_val[
'params'][
'cr'] =
''
200 if len(sxng_locale.split(
'-')) > 1:
201 ret_val[
'params'][
'cr'] =
'country' + country
222 ret_val[
'params'][
'ie'] =
'utf8'
229 ret_val[
'params'][
'oe'] =
'utf8'
242 ret_val[
'headers'][
'Accept'] =
'*/*'
248 ret_val[
'cookies'][
'CONSENT'] =
"YES+"
310 for img_id, data_image
in RE_DATA_IMAGE.findall(dom.text_content()):
311 end_pos = data_image.rfind(
'=')
313 data_image = data_image[: end_pos + 1]
314 data_image_map[img_id] = data_image
315 logger.debug(
'data:image objects --> %s', list(data_image_map.keys()))
316 return data_image_map
320 """Get response from google's search request"""
327 dom = html.fromstring(resp.text)
331 answer_list = eval_xpath(dom,
'//div[contains(@class, "LGOjhe")]')
332 for item
in answer_list:
333 for bubble
in eval_xpath(item,
'.//div[@class="nnFGuf"]'):
336 results.types.Answer(
337 answer=extract_text(item),
338 url=(eval_xpath(item,
'../..//a/@href') + [
None])[0],
344 for result
in eval_xpath_list(dom,
'.//div[contains(@jscontroller, "SC7lYd")]'):
348 title_tag = eval_xpath_getindex(result,
'.//a/h3[1]', 0, default=
None)
349 if title_tag
is None:
351 logger.debug(
'ignoring item from the result_xpath list: missing title')
353 title = extract_text(title_tag)
355 url = eval_xpath_getindex(result,
'.//a[h3]/@href', 0,
None)
357 logger.debug(
'ignoring item from the result_xpath list: missing url of title "%s"', title)
360 content_nodes = eval_xpath(result,
'.//div[contains(@data-sncf, "1")]')
361 for item
in content_nodes:
362 for script
in item.xpath(
".//script"):
363 script.getparent().remove(script)
365 content = extract_text(content_nodes)
368 logger.debug(
'ignoring item from the result_xpath list: missing content of title "%s"', title)
371 thumbnail = content_nodes[0].xpath(
'.//img/@src')
373 thumbnail = thumbnail[0]
374 if thumbnail.startswith(
'data:image'):
375 img_id = content_nodes[0].xpath(
'.//img/@id')
377 thumbnail = data_image_map.get(img_id[0])
381 results.append({
'url': url,
'title': title,
'content': content,
'thumbnail': thumbnail})
383 except Exception
as e:
384 logger.error(e, exc_info=
True)
388 for suggestion
in eval_xpath_list(dom, suggestion_xpath):
390 results.append({
'suggestion': extract_text(suggestion)})
425def fetch_traits(engine_traits: EngineTraits, add_domains: bool =
True):
426 """Fetch languages from Google."""
429 engine_traits.custom[
'supported_domains'] = {}
431 resp = get(
'https://www.google.com/preferences')
433 raise RuntimeError(
"Response from Google's preferences is not OK.")
435 dom = html.fromstring(resp.text.replace(
'<?xml version="1.0" encoding="UTF-8"?>',
''))
439 lang_map = {
'no':
'nb'}
440 for x
in eval_xpath_list(dom,
"//select[@name='hl']/option"):
441 eng_lang = x.get(
"value")
443 locale = babel.Locale.parse(lang_map.get(eng_lang, eng_lang), sep=
'-')
444 except babel.UnknownLocaleError:
445 print(
"INFO: google UI language %s (%s) is unknown by babel" % (eng_lang, x.text.split(
"(")[0].strip()))
447 sxng_lang = language_tag(locale)
449 conflict = engine_traits.languages.get(sxng_lang)
451 if conflict != eng_lang:
452 print(
"CONFLICT: babel %s --> %s, %s" % (sxng_lang, conflict, eng_lang))
454 engine_traits.languages[sxng_lang] =
'lang_' + eng_lang
457 engine_traits.languages[
'zh'] =
'lang_zh-CN'
461 for x
in eval_xpath_list(dom,
"//select[@name='gl']/option"):
462 eng_country = x.get(
"value")
464 if eng_country
in skip_countries:
466 if eng_country ==
'ZZ':
467 engine_traits.all_locale =
'ZZ'
470 sxng_locales = get_official_locales(eng_country, engine_traits.languages.keys(), regional=
True)
473 print(
"ERROR: can't map from google country %s (%s) to a babel region." % (x.get(
'data-name'), eng_country))
476 for sxng_locale
in sxng_locales:
477 engine_traits.regions[region_tag(sxng_locale)] = eng_country
480 engine_traits.regions[
'zh-CN'] =
'HK'
485 resp = get(
'https://www.google.com/supported_domains')
487 raise RuntimeError(
"Response from https://www.google.com/supported_domains is not OK.")
489 for domain
in resp.text.split():
490 domain = domain.strip()
491 if not domain
or domain
in [
495 region = domain.split(
'.')[-1].upper()
496 engine_traits.custom[
'supported_domains'][region] =
'www' + domain
499 engine_traits.custom[
'supported_domains'][
'CN'] =
'www' + domain