.oO SearXNG Developer Documentation Oo.
Loading...
Searching...
No Matches
google.py
Go to the documentation of this file.
1# SPDX-License-Identifier: AGPL-3.0-or-later
2"""This is the implementation of the Google WEB engine. Some of this
3implementations (manly the :py:obj:`get_google_info`) are shared by other
4engines:
5
6- :ref:`google images engine`
7- :ref:`google news engine`
8- :ref:`google videos engine`
9- :ref:`google scholar engine`
10- :ref:`google autocomplete`
11
12"""
13
14import typing as t
15
16import re
17import random
18import string
19import time
20from urllib.parse import urlencode
21from lxml import html
22import babel
23import babel.core
24import babel.languages
25
26from searx.utils import extract_text, eval_xpath, eval_xpath_list, eval_xpath_getindex
27from searx.locales import language_tag, region_tag, get_official_locales
28from searx.network import get # see https://github.com/searxng/searxng/issues/762
29from searx.exceptions import SearxEngineCaptchaException
30from searx.enginelib.traits import EngineTraits
31from searx.result_types import EngineResults
32
33if t.TYPE_CHECKING:
34 from searx.extended_types import SXNG_Response
35 from searx.search.processors import OnlineParams
36
37about = {
38 "website": 'https://www.google.com',
39 "wikidata_id": 'Q9366',
40 "official_api_documentation": 'https://developers.google.com/custom-search/',
41 "use_official_api": False,
42 "require_api_key": False,
43 "results": 'HTML',
44}
45
46# engine dependent config
47categories = ['general', 'web']
48paging = True
49max_page = 50
50"""`Google max 50 pages`_
51
52.. _Google max 50 pages: https://github.com/searxng/searxng/issues/2982
53"""
54time_range_support = True
55safesearch = True
56
57time_range_dict = {'day': 'd', 'week': 'w', 'month': 'm', 'year': 'y'}
58
59# Filter results. 0: None, 1: Moderate, 2: Strict
60filter_mapping = {0: 'off', 1: 'medium', 2: 'high'}
61
62# specific xpath variables
63# ------------------------
64
65# Suggestions are links placed in a *card-section*, we extract only the text
66# from the links not the links itself.
67suggestion_xpath = '//div[contains(@class, "EIaa9b")]//a'
68
69
70_arcid_range = string.ascii_letters + string.digits + "_-"
71_arcid_random: tuple[str, int] | None = None
72
73
74def ui_async(start: int) -> str:
75 """Format of the response from UI's async request.
76
77 - ``arc_id:<...>,use_ac:true,_fmt:prog``
78
79 The arc_id is random generated every hour.
80 """
81 global _arcid_random # pylint: disable=global-statement
82
83 use_ac = "use_ac:true"
84 # _fmt:html returns a HTTP 500 when user search for celebrities like
85 # '!google natasha allegri' or '!google chris evans'
86 _fmt = "_fmt:prog"
87
88 # create a new random arc_id every hour
89 if not _arcid_random or (int(time.time()) - _arcid_random[1]) > 3600:
90 _arcid_random = (''.join(random.choices(_arcid_range, k=23)), int(time.time()))
91 arc_id = f"arc_id:srp_{_arcid_random[0]}_1{start:02}"
92
93 return ",".join([arc_id, use_ac, _fmt])
94
95
96def get_google_info(params: "OnlineParams", eng_traits: EngineTraits) -> dict[str, t.Any]:
97 """Composing various (language) properties for the google engines (:ref:`google
98 API`).
99
100 This function is called by the various google engines (:ref:`google web
101 engine`, :ref:`google images engine`, :ref:`google news engine` and
102 :ref:`google videos engine`).
103
104 :param dict param: Request parameters of the engine. At least
105 a ``searxng_locale`` key should be in the dictionary.
106
107 :param eng_traits: Engine's traits fetched from google preferences
108 (:py:obj:`searx.enginelib.traits.EngineTraits`)
109
110 :rtype: dict
111 :returns:
112 Py-Dictionary with the key/value pairs:
113
114 language:
115 The language code that is used by google (e.g. ``lang_en`` or
116 ``lang_zh-TW``)
117
118 country:
119 The country code that is used by google (e.g. ``US`` or ``TW``)
120
121 locale:
122 A instance of :py:obj:`babel.core.Locale` build from the
123 ``searxng_locale`` value.
124
125 subdomain:
126 Google subdomain :py:obj:`google_domains` that fits to the country
127 code.
128
129 params:
130 Py-Dictionary with additional request arguments (can be passed to
131 :py:func:`urllib.parse.urlencode`).
132
133 - ``hl`` parameter: specifies the interface language of user interface.
134 - ``lr`` parameter: restricts search results to documents written in
135 a particular language.
136 - ``cr`` parameter: restricts search results to documents
137 originating in a particular country.
138 - ``ie`` parameter: sets the character encoding scheme that should
139 be used to interpret the query string ('utf8').
140 - ``oe`` parameter: sets the character encoding scheme that should
141 be used to decode the XML result ('utf8').
142
143 headers:
144 Py-Dictionary with additional HTTP headers (can be passed to
145 request's headers)
146
147 - ``Accept: '*/*``
148
149 """
150
151 ret_val: dict[str, t.Any] = {
152 'language': None,
153 'country': None,
154 'subdomain': None,
155 'params': {},
156 'headers': {},
157 'cookies': {},
158 'locale': None,
159 }
160
161 sxng_locale = params.get('searxng_locale', 'all')
162 try:
163 locale = babel.Locale.parse(sxng_locale, sep='-')
164 except babel.core.UnknownLocaleError:
165 locale = None
166
167 eng_lang = eng_traits.get_language(sxng_locale, 'lang_en')
168 lang_code = eng_lang.split('_')[-1] # lang_zh-TW --> zh-TW / lang_en --> en
169 country = eng_traits.get_region(sxng_locale, eng_traits.all_locale)
170
171 # Test zh_hans & zh_hant --> in the topmost links in the result list of list
172 # TW and HK you should a find wiktionary.org zh_hant link. In the result
173 # list of zh-CN should not be no hant link instead you should find
174 # zh.m.wikipedia.org/zh somewhere in the top.
175
176 # '!go 日 :zh-TW' --> https://zh.m.wiktionary.org/zh-hant/%E6%97%A5
177 # '!go 日 :zh-CN' --> https://zh.m.wikipedia.org/zh/%E6%97%A5
178
179 ret_val['language'] = eng_lang
180 ret_val['country'] = country
181 ret_val['locale'] = locale
182 ret_val['subdomain'] = eng_traits.custom['supported_domains'].get(country.upper(), 'www.google.com')
183
184 # hl parameter:
185 # The hl parameter specifies the interface language (host language) of
186 # your user interface. To improve the performance and the quality of your
187 # search results, you are strongly encouraged to set this parameter
188 # explicitly.
189 # https://developers.google.com/custom-search/docs/xml_results#hlsp
190 # The Interface Language:
191 # https://developers.google.com/custom-search/docs/xml_results_appendices#interfaceLanguages
192
193 # https://github.com/searxng/searxng/issues/2515#issuecomment-1607150817
194 ret_val['params']['hl'] = f'{lang_code}-{country}'
195
196 # lr parameter:
197 # The lr (language restrict) parameter restricts search results to
198 # documents written in a particular language.
199 # https://developers.google.com/custom-search/docs/xml_results#lrsp
200 # Language Collection Values:
201 # https://developers.google.com/custom-search/docs/xml_results_appendices#languageCollections
202 #
203 # To select 'all' languages an empty 'lr' value is used.
204 #
205 # Different to other google services, Google Scholar supports to select more
206 # than one language. The languages are separated by a pipe '|' (logical OR).
207 # By example: &lr=lang_zh-TW%7Clang_de selects articles written in
208 # traditional chinese OR german language.
209
210 ret_val['params']['lr'] = eng_lang
211 if sxng_locale == 'all':
212 ret_val['params']['lr'] = ''
213
214 # cr parameter:
215 # The cr parameter restricts search results to documents originating in a
216 # particular country.
217 # https://developers.google.com/custom-search/docs/xml_results#crsp
218
219 # specify a region (country) only if a region is given in the selected
220 # locale --> https://github.com/searxng/searxng/issues/2672
221 ret_val['params']['cr'] = ''
222 if len(sxng_locale.split('-')) > 1:
223 ret_val['params']['cr'] = 'country' + country
224
225 # gl parameter: (mandatory by Google News)
226 # The gl parameter value is a two-letter country code. For WebSearch
227 # results, the gl parameter boosts search results whose country of origin
228 # matches the parameter value. See the Country Codes section for a list of
229 # valid values.
230 # Specifying a gl parameter value in WebSearch requests should improve the
231 # relevance of results. This is particularly true for international
232 # customers and, even more specifically, for customers in English-speaking
233 # countries other than the United States.
234 # https://developers.google.com/custom-search/docs/xml_results#glsp
235
236 # https://github.com/searxng/searxng/issues/2515#issuecomment-1606294635
237 # ret_val['params']['gl'] = country
238
239 # ie parameter:
240 # The ie parameter sets the character encoding scheme that should be used
241 # to interpret the query string. The default ie value is latin1.
242 # https://developers.google.com/custom-search/docs/xml_results#iesp
243
244 ret_val['params']['ie'] = 'utf8'
245
246 # oe parameter:
247 # The oe parameter sets the character encoding scheme that should be used
248 # to decode the XML result. The default oe value is latin1.
249 # https://developers.google.com/custom-search/docs/xml_results#oesp
250
251 ret_val['params']['oe'] = 'utf8'
252
253 # num parameter:
254 # The num parameter identifies the number of search results to return.
255 # The default num value is 10, and the maximum value is 20. If you request
256 # more than 20 results, only 20 results will be returned.
257 # https://developers.google.com/custom-search/docs/xml_results#numsp
258
259 # HINT: seems to have no effect (tested in google WEB & Images)
260 # ret_val['params']['num'] = 20
261
262 # HTTP headers
263
264 ret_val['headers']['Accept'] = '*/*'
265
266 # Cookies
267
268 # - https://github.com/searxng/searxng/pull/1679#issuecomment-1235432746
269 # - https://github.com/searxng/searxng/issues/1555
270 ret_val['cookies']['CONSENT'] = "YES+"
271
272 return ret_val
273
274
276 if resp.url.host == 'sorry.google.com' or resp.url.path.startswith('/sorry'):
278
279
280def request(query: str, params: "OnlineParams") -> None:
281 """Google search request"""
282 # pylint: disable=line-too-long
283 start = (params['pageno'] - 1) * 10
284 str_async = ui_async(start)
285 google_info = get_google_info(params, traits)
286 logger.debug("ARC_ID: %s", str_async)
287
288 # https://www.google.de/search?q=corona&hl=de&lr=lang_de&start=0&tbs=qdr%3Ad&safe=medium
289 query_url = (
290 'https://'
291 + google_info['subdomain']
292 + '/search'
293 + "?"
294 + urlencode(
295 {
296 'q': query,
297 **google_info['params'],
298 'filter': '0',
299 'start': start,
300 # 'vet': '12ahUKEwik3ZbIzfn7AhXMX_EDHbUDBh0QxK8CegQIARAC..i',
301 # 'ved': '2ahUKEwik3ZbIzfn7AhXMX_EDHbUDBh0Q_skCegQIARAG',
302 # 'cs' : 1,
303 # 'sa': 'N',
304 # 'yv': 3,
305 # 'prmd': 'vin',
306 # 'ei': 'GASaY6TxOcy_xc8PtYeY6AE',
307 # 'sa': 'N',
308 # 'sstk': 'AcOHfVkD7sWCSAheZi-0tx_09XDO55gTWY0JNq3_V26cNN-c8lfD45aZYPI8s_Bqp8s57AHz5pxchDtAGCA_cikAWSjy9kw3kgg'
309 # formally known as use_mobile_ui
310 'asearch': 'arc',
311 'async': str_async,
312 }
313 )
314 )
315
316 if params['time_range'] in time_range_dict:
317 query_url += '&' + urlencode({'tbs': 'qdr:' + time_range_dict[params['time_range']]})
318 if params['safesearch']:
319 query_url += '&' + urlencode({'safe': filter_mapping[params['safesearch']]})
320 params['url'] = query_url
321
322 params['cookies'] = google_info['cookies']
323 params['headers'].update(google_info['headers'])
324
325
326# =26;[3,"dimg_ZNMiZPCqE4apxc8P3a2tuAQ_137"]a87;data:image/jpeg;base64,/9j/4AAQSkZJRgABA
327# ...6T+9Nl4cnD+gr9OK8I56/tX3l86nWYw//2Q==26;
328RE_DATA_IMAGE = re.compile(r'"(dimg_[^"]*)"[^;]*;(data:image[^;]*;[^;]*);')
329RE_DATA_IMAGE_end = re.compile(r'"(dimg_[^"]*)"[^;]*;(data:image[^;]*;[^;]*)$')
330
331
332def parse_data_images(text: str):
333 data_image_map = {}
334
335 for img_id, data_image in RE_DATA_IMAGE.findall(text):
336 end_pos = data_image.rfind('=')
337 if end_pos > 0:
338 data_image = data_image[: end_pos + 1]
339 data_image_map[img_id] = data_image
340 last = RE_DATA_IMAGE_end.search(text)
341 if last:
342 data_image_map[last.group(1)] = last.group(2)
343 logger.debug('data:image objects --> %s', list(data_image_map.keys()))
344 return data_image_map
345
346
347def response(resp: "SXNG_Response"):
348 """Get response from google's search request"""
349 # pylint: disable=too-many-branches, too-many-statements
351 data_image_map = parse_data_images(resp.text)
352
353 results = EngineResults()
354
355 # convert the text to dom
356 dom = html.fromstring(resp.text)
357
358 # results --> answer
359 answer_list = eval_xpath(dom, '//div[contains(@class, "LGOjhe")]')
360 for item in answer_list:
361 for bubble in eval_xpath(item, './/div[@class="nnFGuf"]'):
362 bubble.drop_tree()
363 results.add(
364 results.types.Answer(
365 answer=extract_text(item),
366 url=(eval_xpath(item, '../..//a/@href') + [None])[0],
367 )
368 )
369
370 # parse results
371
372 for result in eval_xpath_list(dom, './/div[contains(@jscontroller, "SC7lYd")]'):
373 # pylint: disable=too-many-nested-blocks
374
375 try:
376 title_tag = eval_xpath_getindex(result, './/a/h3[1]', 0, default=None)
377 if title_tag is None:
378 # this not one of the common google results *section*
379 logger.debug('ignoring item from the result_xpath list: missing title')
380 continue
381 title = extract_text(title_tag)
382
383 url = eval_xpath_getindex(result, './/a[h3]/@href', 0, None)
384 if url is None:
385 logger.debug('ignoring item from the result_xpath list: missing url of title "%s"', title)
386 continue
387
388 content_nodes = eval_xpath(result, './/div[contains(@data-sncf, "1")]')
389 for item in content_nodes:
390 for script in item.xpath(".//script"):
391 script.getparent().remove(script)
392
393 content = extract_text(content_nodes)
394
395 if not content:
396 logger.debug('ignoring item from the result_xpath list: missing content of title "%s"', title)
397 continue
398
399 thumbnail = content_nodes[0].xpath('.//img/@src')
400 if thumbnail:
401 thumbnail = thumbnail[0]
402 if thumbnail.startswith('data:image'):
403 img_id = content_nodes[0].xpath('.//img/@id')
404 if img_id:
405 thumbnail = data_image_map.get(img_id[0])
406 else:
407 thumbnail = None
408
409 results.append({'url': url, 'title': title, 'content': content, 'thumbnail': thumbnail})
410
411 except Exception as e: # pylint: disable=broad-except
412 logger.error(e, exc_info=True)
413 continue
414
415 # parse suggestion
416 for suggestion in eval_xpath_list(dom, suggestion_xpath):
417 # append suggestion
418 results.append({'suggestion': extract_text(suggestion)})
419
420 # return results
421 return results
422
423
424# get supported languages from their site
425
426
427skip_countries = [
428 # official language of google-country not in google-languages
429 'AL', # Albanien (sq)
430 'AZ', # Aserbaidschan (az)
431 'BD', # Bangladesch (bn)
432 'BN', # Brunei Darussalam (ms)
433 'BT', # Bhutan (dz)
434 'ET', # Äthiopien (am)
435 'GE', # Georgien (ka, os)
436 'GL', # Grönland (kl)
437 'KH', # Kambodscha (km)
438 'LA', # Laos (lo)
439 'LK', # Sri Lanka (si, ta)
440 'ME', # Montenegro (sr)
441 'MK', # Nordmazedonien (mk, sq)
442 'MM', # Myanmar (my)
443 'MN', # Mongolei (mn)
444 'MV', # Malediven (dv) // dv_MV is unknown by babel
445 'MY', # Malaysia (ms)
446 'NP', # Nepal (ne)
447 'TJ', # Tadschikistan (tg)
448 'TM', # Turkmenistan (tk)
449 'UZ', # Usbekistan (uz)
450]
451
452
453def fetch_traits(engine_traits: EngineTraits, add_domains: bool = True):
454 """Fetch languages from Google."""
455 # pylint: disable=import-outside-toplevel, too-many-branches
456
457 engine_traits.custom['supported_domains'] = {}
458
459 resp = get('https://www.google.com/preferences')
460 if not resp.ok: # type: ignore
461 raise RuntimeError("Response from Google's preferences is not OK.")
462
463 dom = html.fromstring(resp.text.replace('<?xml version="1.0" encoding="UTF-8"?>', ''))
464
465 # supported language codes
466
467 lang_map = {'no': 'nb'}
468 for x in eval_xpath_list(dom, "//select[@name='hl']/option"):
469 eng_lang = x.get("value")
470 try:
471 locale = babel.Locale.parse(lang_map.get(eng_lang, eng_lang), sep='-')
472 except babel.UnknownLocaleError:
473 print("INFO: google UI language %s (%s) is unknown by babel" % (eng_lang, x.text.split("(")[0].strip()))
474 continue
475 sxng_lang = language_tag(locale)
476
477 conflict = engine_traits.languages.get(sxng_lang)
478 if conflict:
479 if conflict != eng_lang:
480 print("CONFLICT: babel %s --> %s, %s" % (sxng_lang, conflict, eng_lang))
481 continue
482 engine_traits.languages[sxng_lang] = 'lang_' + eng_lang
483
484 # alias languages
485 engine_traits.languages['zh'] = 'lang_zh-CN'
486
487 # supported region codes
488
489 for x in eval_xpath_list(dom, "//select[@name='gl']/option"):
490 eng_country = x.get("value")
491
492 if eng_country in skip_countries:
493 continue
494 if eng_country == 'ZZ':
495 engine_traits.all_locale = 'ZZ'
496 continue
497
498 sxng_locales = get_official_locales(eng_country, engine_traits.languages.keys(), regional=True)
499
500 if not sxng_locales:
501 print("ERROR: can't map from google country %s (%s) to a babel region." % (x.get('data-name'), eng_country))
502 continue
503
504 for sxng_locale in sxng_locales:
505 engine_traits.regions[region_tag(sxng_locale)] = eng_country
506
507 # alias regions
508 engine_traits.regions['zh-CN'] = 'HK'
509
510 # supported domains
511
512 if add_domains:
513 resp = get('https://www.google.com/supported_domains')
514 if not resp.ok: # type: ignore
515 raise RuntimeError("Response from https://www.google.com/supported_domains is not OK.")
516
517 for domain in resp.text.split(): # type: ignore
518 domain = domain.strip()
519 if not domain or domain in [
520 '.google.com',
521 ]:
522 continue
523 region = domain.split('.')[-1].upper()
524 engine_traits.custom['supported_domains'][region] = 'www' + domain # type: ignore
525 if region == 'HK':
526 # There is no google.cn, we use .com.hk for zh-CN
527 engine_traits.custom['supported_domains']['CN'] = 'www' + domain # type: ignore
parse_data_images(str text)
Definition google.py:332
str ui_async(int start)
Definition google.py:74
None request(str query, "OnlineParams" params)
Definition google.py:280
fetch_traits(EngineTraits engine_traits, bool add_domains=True)
Definition google.py:453
dict[str, t.Any] get_google_info("OnlineParams" params, EngineTraits eng_traits)
Definition google.py:96
detect_google_sorry(resp)
Definition google.py:275
response("SXNG_Response" resp)
Definition google.py:347