2"""This module implements functions needed for the autocompleter.
9from urllib.parse
import urlencode, quote_plus
13from httpx
import HTTPError
16from searx
import settings
27 if 'timeout' not in kwargs:
28 kwargs[
'timeout'] = settings[
'outgoing'][
'request_timeout']
29 kwargs[
'raise_for_httperror'] =
True
32def get(*args, **kwargs) -> SXNG_Response:
34 return http_get(*args, **kwargs)
37def post(*args, **kwargs) -> SXNG_Response:
39 return http_post(*args, **kwargs)
44 base_url =
"https://www.baidu.com/sugrec?"
45 response =
get(base_url + urlencode({
'ie':
'utf-8',
'json': 1,
'prod':
'pc',
'wd': query}))
50 data = response.json()
52 for item
in data[
'g']:
53 results.append(item[
'q'])
59 url =
'https://search.brave.com/api/suggest?'
60 url += urlencode({
'q': query})
64 kwargs = {
'cookies': {
'country': country}}
65 resp =
get(url, **kwargs)
78 autocomplete_url =
'https://lookup.dbpedia.org/api/search.asmx/KeywordSearch?'
80 response =
get(autocomplete_url + urlencode(dict(QueryString=query)))
85 dom = lxml.etree.fromstring(response.content)
86 results = dom.xpath(
'//Result/Label//text()')
92 """Autocomplete from DuckDuckGo. Supports DuckDuckGo's languages"""
94 traits = engines[
'duckduckgo'].traits
97 'kl': traits.get_region(sxng_locale, traits.all_locale),
100 url =
'https://duckduckgo.com/ac/?type=list&' + urlencode(args)
112 """Autocomplete from Google. Supports Google's languages and subdomains
113 (:py:obj:`searx.engines.google.get_google_info`) by using the async REST
116 https://{subdomain}/complete/search?{args}
120 google_info = google.get_google_info({
'searxng_locale': sxng_locale}, engines[
'google'].traits)
122 url =
'https://{subdomain}/complete/search?{args}'
127 'hl': google_info[
'params'][
'hl'],
131 resp =
get(url.format(subdomain=google_info[
'subdomain'], args=args))
133 json_txt = resp.text[resp.text.find(
'[') : resp.text.find(
']', -3) + 1]
134 data = json.loads(json_txt)
136 results.append(lxml.html.fromstring(item[0]).text_content())
141 """Autocomplete from Mwmbl_."""
144 url =
'https://api.mwmbl.org/search/complete?{query}'
146 results =
get(url.format(query=urlencode({
'q': query}))).
json()[1]
149 return [result
for result
in results
if not result.startswith(
"go: ")
and not result.startswith(
"search: ")]
154 url = f
"https://ac.search.naver.com/nx/ac?{urlencode({'q': query, 'r_format': 'json', 'st': 0})}"
160 data = response.json()
161 if data.get(
'items'):
162 for item
in data[
'items'][0]:
163 results.append(item[0])
169 url = f
"https://sug.so.360.cn/suggest?{urlencode({'format': 'json', 'word': query})}"
175 data = response.json()
177 for item
in data[
'result']:
178 results.append(item[
'word'])
184 url = f
"https://sugs.m.sm.cn/web?{urlencode({'q': query})}"
190 data = response.json()
191 for item
in data.get(
'r', []):
192 results.append(item[
'w'])
198 url =
'https://suggest.seznam.cz/fulltext/cs?{query}'
203 {
'phrase': query,
'cursorPosition': len(query),
'format':
'json-2',
'highlight':
'1',
'count':
'6'}
213 ''.join([part.get(
'text',
'')
for part
in item.get(
'text', [])])
214 for item
in data.get(
'result', [])
215 if item.get(
'itemType',
None) ==
'ItemType.TEXT'
221 base_url =
"https://sor.html5.qq.com/api/getsug?"
222 response =
get(base_url + urlencode({
'm':
'searxng',
'key': query}))
225 raw_json = extr(response.text,
"[",
"]", default=
"")
228 data = json.loads(f
"[{raw_json}]]")
230 except json.JSONDecodeError:
238 url = f
"https://stract.com/beta/api/autosuggest?q={quote_plus(query)}"
245 return [html.unescape(suggestion[
'raw'])
for suggestion
in resp.json()]
250 url =
'https://swisscows.ch/api/suggest?{query}&itemsCount=5'
252 resp = json.loads(
get(url.format(query=urlencode({
'query': query}))).text)
257 """Autocomplete from Qwant. Supports Qwant's regions."""
260 locale = engines[
'qwant'].traits.get_region(sxng_locale,
'en_US')
261 url =
'https://api.qwant.com/v3/suggest?{query}'
262 resp =
get(url.format(query=urlencode({
'q': query,
'locale': locale,
'version':
'2'})))
266 if data[
'status'] ==
'success':
267 for item
in data[
'data'][
'items']:
268 results.append(item[
'value'])
274 """Autocomplete from Wikipedia. Supports Wikipedia's languages (aka netloc)."""
276 eng_traits = engines[
'wikipedia'].traits
277 wiki_lang = eng_traits.get_language(sxng_locale,
'en')
278 wiki_netloc = eng_traits.custom[
'wiki_netloc'].
get(wiki_lang,
'en.wikipedia.org')
280 url =
'https://{wiki_netloc}/w/api.php?{args}'
283 'action':
'opensearch',
285 'formatversion':
'2',
291 resp =
get(url.format(args=args, wiki_netloc=wiki_netloc))
302 url =
"https://suggest.yandex.com/suggest-ff.cgi?{0}"
304 resp = json.loads(
get(url.format(urlencode(dict(part=query)))).text)
311 '360search': qihu360search,
315 'duckduckgo': duckduckgo,
316 'google': google_complete,
324 'swisscows': swisscows,
325 'wikipedia': wikipedia,
331 backend = backends.get(backend_name)
335 return backend(query, sxng_locale)
336 except (HTTPError, SearxEngineResponseException):
search_autocomplete(backend_name, query, sxng_locale)
qwant(query, sxng_locale)
wikipedia(query, sxng_locale)
google_complete(query, sxng_locale)
SXNG_Response get(*args, **kwargs)
qihu360search(query, _lang)
duckduckgo(query, sxng_locale)
SXNG_Response post(*args, **kwargs)