.oO SearXNG Developer Documentation Oo.
Loading...
Searching...
No Matches
ahmia.py
Go to the documentation of this file.
1# SPDX-License-Identifier: AGPL-3.0-or-later
2"""
3 Ahmia (Onions)
4"""
5
6from urllib.parse import urlencode, urlparse, parse_qs
7from lxml.html import fromstring
8from searx.engines.xpath import extract_url, extract_text, eval_xpath_list, eval_xpath
9
10# about
11about = {
12 "website": 'http://juhanurmihxlp77nkq76byazcldy2hlmovfu2epvl5ankdibsot4csyd.onion',
13 "wikidata_id": 'Q18693938',
14 "official_api_documentation": None,
15 "use_official_api": False,
16 "require_api_key": False,
17 "results": 'HTML',
18}
19
20# engine config
21categories = ['onions']
22paging = True
23page_size = 10
24
25# search url
26search_url = 'http://juhanurmihxlp77nkq76byazcldy2hlmovfu2epvl5ankdibsot4csyd.onion/search/?{query}'
27time_range_support = True
28time_range_dict = {'day': 1, 'week': 7, 'month': 30}
29
30# xpaths
31results_xpath = '//li[@class="result"]'
32url_xpath = './h4/a/@href'
33title_xpath = './h4/a[1]'
34content_xpath = './/p[1]'
35correction_xpath = '//*[@id="didYouMean"]//a'
36number_of_results_xpath = '//*[@id="totalResults"]'
37
38
39def request(query, params):
40 params['url'] = search_url.format(query=urlencode({'q': query}))
41
42 if params['time_range'] in time_range_dict:
43 params['url'] += '&' + urlencode({'d': time_range_dict[params['time_range']]})
44
45 return params
46
47
48def response(resp):
49 results = []
50 dom = fromstring(resp.text)
51
52 # trim results so there's not way too many at once
53 first_result_index = page_size * (resp.search_params.get('pageno', 1) - 1)
54 all_results = eval_xpath_list(dom, results_xpath)
55 trimmed_results = all_results[first_result_index : first_result_index + page_size]
56
57 # get results
58 for result in trimmed_results:
59 # remove ahmia url and extract the actual url for the result
60 raw_url = extract_url(eval_xpath_list(result, url_xpath, min_len=1), search_url)
61 cleaned_url = parse_qs(urlparse(raw_url).query).get('redirect_url', [''])[0]
62
63 title = extract_text(eval_xpath(result, title_xpath))
64 content = extract_text(eval_xpath(result, content_xpath))
65
66 results.append({'url': cleaned_url, 'title': title, 'content': content, 'is_onion': True})
67
68 # get spelling corrections
69 for correction in eval_xpath_list(dom, correction_xpath):
70 results.append({'correction': extract_text(correction)})
71
72 # get number of results
73 number_of_results = eval_xpath(dom, number_of_results_xpath)
74 if number_of_results:
75 try:
76 results.append({'number_of_results': int(extract_text(number_of_results))})
77 except: # pylint: disable=bare-except
78 pass
79
80 return results
request(query, params)
Definition ahmia.py:39