79 """Composing various (language) properties for the google engines (:ref:`google
82 This function is called by the various google engines (:ref:`google web
83 engine`, :ref:`google images engine`, :ref:`google news engine` and
84 :ref:`google videos engine`).
86 :param dict param: Request parameters of the engine. At least
87 a ``searxng_locale`` key should be in the dictionary.
89 :param eng_traits: Engine's traits fetched from google preferences
90 (:py:obj:`searx.enginelib.traits.EngineTraits`)
94 Py-Dictionary with the key/value pairs:
97 The language code that is used by google (e.g. ``lang_en`` or
101 The country code that is used by google (e.g. ``US`` or ``TW``)
104 A instance of :py:obj:`babel.core.Locale` build from the
105 ``searxng_locale`` value.
108 Google subdomain :py:obj:`google_domains` that fits to the country
112 Py-Dictionary with additional request arguments (can be passed to
113 :py:func:`urllib.parse.urlencode`).
115 - ``hl`` parameter: specifies the interface language of user interface.
116 - ``lr`` parameter: restricts search results to documents written in
117 a particular language.
118 - ``cr`` parameter: restricts search results to documents
119 originating in a particular country.
120 - ``ie`` parameter: sets the character encoding scheme that should
121 be used to interpret the query string ('utf8').
122 - ``oe`` parameter: sets the character encoding scheme that should
123 be used to decode the XML result ('utf8').
126 Py-Dictionary with additional HTTP headers (can be passed to
143 sxng_locale = params.get(
'searxng_locale',
'all')
145 locale = babel.Locale.parse(sxng_locale, sep=
'-')
146 except babel.core.UnknownLocaleError:
149 eng_lang = eng_traits.get_language(sxng_locale,
'lang_en')
150 lang_code = eng_lang.split(
'_')[-1]
151 country = eng_traits.get_region(sxng_locale, eng_traits.all_locale)
161 ret_val[
'language'] = eng_lang
162 ret_val[
'country'] = country
163 ret_val[
'locale'] = locale
164 ret_val[
'subdomain'] = eng_traits.custom[
'supported_domains'].get(country.upper(),
'www.google.com')
176 ret_val[
'params'][
'hl'] = f
'{lang_code}-{country}'
192 ret_val[
'params'][
'lr'] = eng_lang
193 if sxng_locale ==
'all':
194 ret_val[
'params'][
'lr'] =
''
203 ret_val[
'params'][
'cr'] =
''
204 if len(sxng_locale.split(
'-')) > 1:
205 ret_val[
'params'][
'cr'] =
'country' + country
226 ret_val[
'params'][
'ie'] =
'utf8'
233 ret_val[
'params'][
'oe'] =
'utf8'
246 ret_val[
'headers'][
'Accept'] =
'*/*'
252 ret_val[
'cookies'][
'CONSENT'] =
"YES+"
314 for img_id, data_image
in RE_DATA_IMAGE.findall(dom.text_content()):
315 end_pos = data_image.rfind(
'=')
317 data_image = data_image[: end_pos + 1]
318 data_image_map[img_id] = data_image
319 logger.debug(
'data:image objects --> %s', list(data_image_map.keys()))
320 return data_image_map
324 """Get response from google's search request"""
326 detect_google_sorry(resp)
331 dom = html.fromstring(resp.text)
335 answer_list = eval_xpath(dom,
'//div[contains(@class, "LGOjhe")]')
336 for item
in answer_list:
339 'answer': item.xpath(
"normalize-space()"),
340 'url': (eval_xpath(item,
'../..//a/@href') + [
None])[0],
346 for result
in eval_xpath_list(dom, results_xpath):
349 title_tag = eval_xpath_getindex(result, title_xpath, 0, default=
None)
350 if title_tag
is None:
352 logger.debug(
'ignoring item from the result_xpath list: missing title')
354 title = extract_text(title_tag)
356 url = eval_xpath_getindex(result, href_xpath, 0,
None)
358 logger.debug(
'ignoring item from the result_xpath list: missing url of title "%s"', title)
361 content_nodes = eval_xpath(result, content_xpath)
362 content = extract_text(content_nodes)
365 logger.debug(
'ignoring item from the result_xpath list: missing content of title "%s"', title)
368 img_src = content_nodes[0].xpath(
'.//img/@src')
371 if img_src.startswith(
'data:image'):
372 img_id = content_nodes[0].xpath(
'.//img/@id')
374 img_src = data_image_map.get(img_id[0])
378 results.append({
'url': url,
'title': title,
'content': content,
'img_src': img_src})
380 except Exception
as e:
381 logger.error(e, exc_info=
True)
385 for suggestion
in eval_xpath_list(dom, suggestion_xpath):
387 results.append({
'suggestion': extract_text(suggestion)})
422def fetch_traits(engine_traits: EngineTraits, add_domains: bool =
True):
423 """Fetch languages from Google."""
426 engine_traits.custom[
'supported_domains'] = {}
428 resp = get(
'https://www.google.com/preferences')
430 raise RuntimeError(
"Response from Google's preferences is not OK.")
432 dom = html.fromstring(resp.text.replace(
'<?xml version="1.0" encoding="UTF-8"?>',
''))
436 lang_map = {
'no':
'nb'}
437 for x
in eval_xpath_list(dom,
"//select[@name='hl']/option"):
438 eng_lang = x.get(
"value")
440 locale = babel.Locale.parse(lang_map.get(eng_lang, eng_lang), sep=
'-')
441 except babel.UnknownLocaleError:
442 print(
"ERROR: %s -> %s is unknown by babel" % (x.get(
"data-name"), eng_lang))
444 sxng_lang = language_tag(locale)
446 conflict = engine_traits.languages.get(sxng_lang)
448 if conflict != eng_lang:
449 print(
"CONFLICT: babel %s --> %s, %s" % (sxng_lang, conflict, eng_lang))
451 engine_traits.languages[sxng_lang] =
'lang_' + eng_lang
454 engine_traits.languages[
'zh'] =
'lang_zh-CN'
458 for x
in eval_xpath_list(dom,
"//select[@name='gl']/option"):
459 eng_country = x.get(
"value")
461 if eng_country
in skip_countries:
463 if eng_country ==
'ZZ':
464 engine_traits.all_locale =
'ZZ'
467 sxng_locales = get_official_locales(eng_country, engine_traits.languages.keys(), regional=
True)
470 print(
"ERROR: can't map from google country %s (%s) to a babel region." % (x.get(
'data-name'), eng_country))
473 for sxng_locale
in sxng_locales:
474 engine_traits.regions[region_tag(sxng_locale)] = eng_country
477 engine_traits.regions[
'zh-CN'] =
'HK'
482 resp = get(
'https://www.google.com/supported_domains')
484 raise RuntimeError(
"Response from https://www.google.com/supported_domains is not OK.")
486 for domain
in resp.text.split():
487 domain = domain.strip()
488 if not domain
or domain
in [
492 region = domain.split(
'.')[-1].upper()
493 engine_traits.custom[
'supported_domains'][region] =
'www' + domain
496 engine_traits.custom[
'supported_domains'][
'CN'] =
'www' + domain