.oO SearXNG Developer Documentation Oo.
Loading...
Searching...
No Matches
bing.py
Go to the documentation of this file.
1# SPDX-License-Identifier: AGPL-3.0-or-later
2"""This is the implementation of the Bing-WEB engine. Some of this
3implementations are shared by other engines:
4
5- :ref:`bing images engine`
6- :ref:`bing news engine`
7- :ref:`bing videos engine`
8
9On the `preference page`_ Bing offers a lot of languages an regions (see section
10LANGUAGE and COUNTRY/REGION). The Language is the language of the UI, we need
11in SearXNG to get the translations of data such as *"published last week"*.
12
13There is a description of the offical search-APIs_, unfortunately this is not
14the API we can use or that bing itself would use. You can look up some things
15in the API to get a better picture of bing, but the value specifications like
16the market codes are usually outdated or at least no longer used by bing itself.
17
18The market codes have been harmonized and are identical for web, video and
19images. The news area has also been harmonized with the other categories. Only
20political adjustments still seem to be made -- for example, there is no news
21category for the Chinese market.
22
23.. _preference page: https://www.bing.com/account/general
24.. _search-APIs: https://learn.microsoft.com/en-us/bing/search-apis/
25
26"""
27# pylint: disable=too-many-branches, invalid-name
28
29from typing import TYPE_CHECKING
30import base64
31import re
32import time
33from urllib.parse import parse_qs, urlencode, urlparse
34from lxml import html
35import babel
36import babel.languages
37
38from searx.utils import eval_xpath, extract_text, eval_xpath_list, eval_xpath_getindex
39from searx.locales import language_tag, region_tag
40from searx.enginelib.traits import EngineTraits
41
42if TYPE_CHECKING:
43 import logging
44
45 logger = logging.getLogger()
46
47traits: EngineTraits
48
49about = {
50 "website": 'https://www.bing.com',
51 "wikidata_id": 'Q182496',
52 "official_api_documentation": 'https://www.microsoft.com/en-us/bing/apis/bing-web-search-api',
53 "use_official_api": False,
54 "require_api_key": False,
55 "results": 'HTML',
56}
57
58# engine dependent config
59categories = ['general', 'web']
60paging = True
61max_page = 200
62"""200 pages maximum (``&first=1991``)"""
63
64time_range_support = True
65safesearch = True
66"""Bing results are always SFW. To get NSFW links from bing some age
67verification by a cookie is needed / thats not possible in SearXNG.
68"""
69
70base_url = 'https://www.bing.com/search'
71"""Bing (Web) search URL"""
72
73
74def _page_offset(pageno):
75 return (int(pageno) - 1) * 10 + 1
76
77
78def set_bing_cookies(params, engine_language, engine_region):
79 params['cookies']['_EDGE_CD'] = f'm={engine_region}&u={engine_language}'
80 params['cookies']['_EDGE_S'] = f'mkt={engine_region}&ui={engine_language}'
81 logger.debug("bing cookies: %s", params['cookies'])
82
83
84def request(query, params):
85 """Assemble a Bing-Web request."""
86
87 engine_region = traits.get_region(params['searxng_locale'], traits.all_locale) # type: ignore
88 engine_language = traits.get_language(params['searxng_locale'], 'en') # type: ignore
89 set_bing_cookies(params, engine_language, engine_region)
90
91 page = params.get('pageno', 1)
92 query_params = {
93 'q': query,
94 # if arg 'pq' is missed, somtimes on page 4 we get results from page 1,
95 # don't ask why it is only sometimes / its M$ and they have never been
96 # deterministic ;)
97 'pq': query,
98 }
99
100 # To get correct page, arg first and this arg FORM is needed, the value PERE
101 # is on page 2, on page 3 its PERE1 and on page 4 its PERE2 .. and so forth.
102 # The 'first' arg should never send on page 1.
103
104 if page > 1:
105 query_params['first'] = _page_offset(page) # see also arg FORM
106 if page == 2:
107 query_params['FORM'] = 'PERE'
108 elif page > 2:
109 query_params['FORM'] = 'PERE%s' % (page - 2)
110
111 params['url'] = f'{base_url}?{urlencode(query_params)}'
112
113 if params.get('time_range'):
114 unix_day = int(time.time() / 86400)
115 time_ranges = {'day': '1', 'week': '2', 'month': '3', 'year': f'5_{unix_day-365}_{unix_day}'}
116 params['url'] += f'&filters=ex1:"ez{time_ranges[params["time_range"]]}"'
117
118 return params
119
120
121def response(resp):
122 # pylint: disable=too-many-locals
123
124 results = []
125 result_len = 0
126
127 dom = html.fromstring(resp.text)
128
129 # parse results again if nothing is found yet
130
131 for result in eval_xpath_list(dom, '//ol[@id="b_results"]/li[contains(@class, "b_algo")]'):
132
133 link = eval_xpath_getindex(result, './/h2/a', 0, None)
134 if link is None:
135 continue
136 url = link.attrib.get('href')
137 title = extract_text(link)
138
139 content = eval_xpath(result, './/p')
140 for p in content:
141 # Make sure that the element is free of:
142 # <span class="algoSlug_icon" # data-priority="2">Web</span>
143 for e in p.xpath('.//span[@class="algoSlug_icon"]'):
144 e.getparent().remove(e)
145 content = extract_text(content)
146
147 # get the real URL
148 if url.startswith('https://www.bing.com/ck/a?'):
149 # get the first value of u parameter
150 url_query = urlparse(url).query
151 parsed_url_query = parse_qs(url_query)
152 param_u = parsed_url_query["u"][0]
153 # remove "a1" in front
154 encoded_url = param_u[2:]
155 # add padding
156 encoded_url = encoded_url + '=' * (-len(encoded_url) % 4)
157 # decode base64 encoded URL
158 url = base64.urlsafe_b64decode(encoded_url).decode()
159
160 # append result
161 results.append({'url': url, 'title': title, 'content': content})
162
163 # get number_of_results
164 try:
165 result_len_container = "".join(eval_xpath(dom, '//span[@class="sb_count"]//text()'))
166 if "-" in result_len_container:
167
168 # Remove the part "from-to" for paginated request ...
169 result_len_container = result_len_container[result_len_container.find("-") * 2 + 2 :]
170
171 result_len_container = re.sub('[^0-9]', '', result_len_container)
172
173 if len(result_len_container) > 0:
174 result_len = int(result_len_container)
175
176 except Exception as e: # pylint: disable=broad-except
177 logger.debug('result error :\n%s', e)
178
179 if result_len and _page_offset(resp.search_params.get("pageno", 0)) > result_len:
180 # Avoid reading more results than avalaible.
181 # For example, if there is 100 results from some search and we try to get results from 120 to 130,
182 # Bing will send back the results from 0 to 10 and no error.
183 # If we compare results count with the first parameter of the request we can avoid this "invalid" results.
184 return []
185
186 results.append({'number_of_results': result_len})
187 return results
188
189
190def fetch_traits(engine_traits: EngineTraits):
191 """Fetch languages and regions from Bing-Web."""
192 # pylint: disable=import-outside-toplevel
193
194 from searx.network import get # see https://github.com/searxng/searxng/issues/762
195
196 resp = get("https://www.bing.com/account/general")
197 if not resp.ok: # type: ignore
198 print("ERROR: response from bing is not OK.")
199
200 dom = html.fromstring(resp.text) # type: ignore
201
202 # languages
203
204 engine_traits.languages['zh'] = 'zh-hans'
205
206 map_lang = {'prs': 'fa-AF', 'en': 'en-us'}
207 bing_ui_lang_map = {
208 # HINT: this list probably needs to be supplemented
209 'en': 'us', # en --> en-us
210 'da': 'dk', # da --> da-dk
211 }
212
213 for href in eval_xpath(dom, '//div[@id="language-section"]//li/a/@href'):
214 eng_lang = parse_qs(urlparse(href).query)['setlang'][0]
215 babel_lang = map_lang.get(eng_lang, eng_lang)
216 try:
217 sxng_tag = language_tag(babel.Locale.parse(babel_lang.replace('-', '_')))
218 except babel.UnknownLocaleError:
219 print("ERROR: language (%s) is unknown by babel" % (babel_lang))
220 continue
221 # Language (e.g. 'en' or 'de') from https://www.bing.com/account/general
222 # is converted by bing to 'en-us' or 'de-de'. But only if there is not
223 # already a '-' delemitter in the language. For instance 'pt-PT' -->
224 # 'pt-pt' and 'pt-br' --> 'pt-br'
225 bing_ui_lang = eng_lang.lower()
226 if '-' not in bing_ui_lang:
227 bing_ui_lang = bing_ui_lang + '-' + bing_ui_lang_map.get(bing_ui_lang, bing_ui_lang)
228
229 conflict = engine_traits.languages.get(sxng_tag)
230 if conflict:
231 if conflict != bing_ui_lang:
232 print(f"CONFLICT: babel {sxng_tag} --> {conflict}, {bing_ui_lang}")
233 continue
234 engine_traits.languages[sxng_tag] = bing_ui_lang
235
236 # regions (aka "market codes")
237
238 engine_traits.regions['zh-CN'] = 'zh-cn'
239
240 map_market_codes = {
241 'zh-hk': 'en-hk', # not sure why, but at M$ this is the market code for Hongkong
242 }
243 for href in eval_xpath(dom, '//div[@id="region-section"]//li/a/@href'):
244 cc_tag = parse_qs(urlparse(href).query)['cc'][0]
245 if cc_tag == 'clear':
246 engine_traits.all_locale = cc_tag
247 continue
248
249 # add market codes from official languages of the country ..
250 for lang_tag in babel.languages.get_official_languages(cc_tag, de_facto=True):
251 if lang_tag not in engine_traits.languages.keys():
252 # print("ignore lang: %s <-- %s" % (cc_tag, lang_tag))
253 continue
254 lang_tag = lang_tag.split('_')[0] # zh_Hant --> zh
255 market_code = f"{lang_tag}-{cc_tag}" # zh-tw
256
257 market_code = map_market_codes.get(market_code, market_code)
258 sxng_tag = region_tag(babel.Locale.parse('%s_%s' % (lang_tag, cc_tag.upper())))
259 conflict = engine_traits.regions.get(sxng_tag)
260 if conflict:
261 if conflict != market_code:
262 print("CONFLICT: babel %s --> %s, %s" % (sxng_tag, conflict, market_code))
263 continue
264 engine_traits.regions[sxng_tag] = market_code
fetch_traits(EngineTraits engine_traits)
Definition bing.py:190
_page_offset(pageno)
Definition bing.py:74
request(query, params)
Definition bing.py:84
set_bing_cookies(params, engine_language, engine_region)
Definition bing.py:78