.oO SearXNG Developer Documentation Oo.
Loading...
Searching...
No Matches
xpath.py
Go to the documentation of this file.
1# SPDX-License-Identifier: AGPL-3.0-or-later
2"""The XPath engine is a *generic* engine with which it is possible to configure
3engines in the settings.
4
5.. _XPath selector: https://quickref.me/xpath.html#xpath-selectors
6
7Configuration
8=============
9
10Request:
11
12- :py:obj:`search_url`
13- :py:obj:`lang_all`
14- :py:obj:`soft_max_redirects`
15- :py:obj:`cookies`
16- :py:obj:`headers`
17
18Paging:
19
20- :py:obj:`paging`
21- :py:obj:`page_size`
22- :py:obj:`first_page_num`
23
24Time Range:
25
26- :py:obj:`time_range_support`
27- :py:obj:`time_range_url`
28- :py:obj:`time_range_map`
29
30Safe-Search:
31
32- :py:obj:`safe_search_support`
33- :py:obj:`safe_search_map`
34
35Response:
36
37- :py:obj:`no_result_for_http_status`
38
39`XPath selector`_:
40
41- :py:obj:`results_xpath`
42- :py:obj:`url_xpath`
43- :py:obj:`title_xpath`
44- :py:obj:`content_xpath`
45- :py:obj:`thumbnail_xpath`
46- :py:obj:`suggestion_xpath`
47
48
49Example
50=======
51
52Here is a simple example of a XPath engine configured in the :ref:`settings
53engine` section, further read :ref:`engines-dev`.
54
55.. code:: yaml
56
57 - name : bitbucket
58 engine : xpath
59 paging : True
60 search_url : https://bitbucket.org/repo/all/{pageno}?name={query}
61 url_xpath : //article[@class="repo-summary"]//a[@class="repo-link"]/@href
62 title_xpath : //article[@class="repo-summary"]//a[@class="repo-link"]
63 content_xpath : //article[@class="repo-summary"]/p
64
65Implementations
66===============
67
68"""
69
70from urllib.parse import urlencode
71
72from lxml import html
73from searx.utils import extract_text, extract_url, eval_xpath, eval_xpath_list
74from searx.network import raise_for_httperror
75
76search_url = None
77"""
78Search URL of the engine. Example::
79
80 https://example.org/?search={query}&page={pageno}{time_range}{safe_search}
81
82Replacements are:
83
84``{query}``:
85 Search terms from user.
86
87``{pageno}``:
88 Page number if engine supports paging :py:obj:`paging`
89
90``{lang}``:
91 ISO 639-1 language code (en, de, fr ..)
92
93``{time_range}``:
94 :py:obj:`URL parameter <time_range_url>` if engine :py:obj:`supports time
95 range <time_range_support>`. The value for the parameter is taken from
96 :py:obj:`time_range_map`.
97
98``{safe_search}``:
99 Safe-search :py:obj:`URL parameter <safe_search_map>` if engine
100 :py:obj:`supports safe-search <safe_search_support>`. The ``{safe_search}``
101 replacement is taken from the :py:obj:`safes_search_map`. Filter results::
102
103 0: none, 1: moderate, 2:strict
104
105 If not supported, the URL parameter is an empty string.
106
107"""
108
109lang_all = 'en'
110'''Replacement ``{lang}`` in :py:obj:`search_url` if language ``all`` is
111selected.
112'''
113
114no_result_for_http_status = []
115'''Return empty result for these HTTP status codes instead of throwing an error.
116
117.. code:: yaml
118
119 no_result_for_http_status: []
120'''
121
122soft_max_redirects = 0
123'''Maximum redirects, soft limit. Record an error but don't stop the engine'''
124
125results_xpath = ''
126'''`XPath selector`_ for the list of result items'''
127
128url_xpath = None
129'''`XPath selector`_ of result's ``url``.'''
130
131content_xpath = None
132'''`XPath selector`_ of result's ``content``.'''
133
134title_xpath = None
135'''`XPath selector`_ of result's ``title``.'''
136
137thumbnail_xpath = False
138'''`XPath selector`_ of result's ``thumbnail``.'''
139
140suggestion_xpath = ''
141'''`XPath selector`_ of result's ``suggestion``.'''
142
143cached_xpath = ''
144cached_url = ''
145
146cookies = {}
147'''Some engines might offer different result based on cookies.
148Possible use-case: To set safesearch cookie.'''
149
150headers = {}
151'''Some engines might offer different result based headers. Possible use-case:
152To set header to moderate.'''
153
154paging = False
155'''Engine supports paging [True or False].'''
156
157page_size = 1
158'''Number of results on each page. Only needed if the site requires not a page
159number, but an offset.'''
160
161first_page_num = 1
162'''Number of the first page (usually 0 or 1).'''
163
164time_range_support = False
165'''Engine supports search time range.'''
166
167time_range_url = '&hours={time_range_val}'
168'''Time range URL parameter in the in :py:obj:`search_url`. If no time range is
169requested by the user, the URL parameter is an empty string. The
170``{time_range_val}`` replacement is taken from the :py:obj:`time_range_map`.
171
172.. code:: yaml
173
174 time_range_url : '&days={time_range_val}'
175'''
176
177time_range_map = {
178 'day': 24,
179 'week': 24 * 7,
180 'month': 24 * 30,
181 'year': 24 * 365,
182}
183'''Maps time range value from user to ``{time_range_val}`` in
184:py:obj:`time_range_url`.
185
186.. code:: yaml
187
188 time_range_map:
189 day: 1
190 week: 7
191 month: 30
192 year: 365
193'''
194
195safe_search_support = False
196'''Engine supports safe-search.'''
197
198safe_search_map = {0: '&filter=none', 1: '&filter=moderate', 2: '&filter=strict'}
199'''Maps safe-search value to ``{safe_search}`` in :py:obj:`search_url`.
200
201.. code:: yaml
202
203 safesearch: true
204 safes_search_map:
205 0: '&filter=none'
206 1: '&filter=moderate'
207 2: '&filter=strict'
208
209'''
210
211
212def request(query, params):
213 '''Build request parameters (see :ref:`engine request`).'''
214 lang = lang_all
215 if params['language'] != 'all':
216 lang = params['language'][:2]
217
218 time_range = ''
219 if params.get('time_range'):
220 time_range_val = time_range_map.get(params.get('time_range'))
221 time_range = time_range_url.format(time_range_val=time_range_val)
222
223 safe_search = ''
224 if params['safesearch']:
225 safe_search = safe_search_map[params['safesearch']]
226
227 fargs = {
228 'query': urlencode({'q': query})[2:],
229 'lang': lang,
230 'pageno': (params['pageno'] - 1) * page_size + first_page_num,
231 'time_range': time_range,
232 'safe_search': safe_search,
233 }
234
235 params['cookies'].update(cookies)
236 params['headers'].update(headers)
237
238 params['url'] = search_url.format(**fargs)
239 params['soft_max_redirects'] = soft_max_redirects
240
241 params['raise_for_httperror'] = False
242
243 return params
244
245
246def response(resp): # pylint: disable=too-many-branches
247 '''Scrap *results* from the response (see :ref:`engine results`).'''
248 if no_result_for_http_status and resp.status_code in no_result_for_http_status:
249 return []
250
251 raise_for_httperror(resp)
252
253 results = []
254 dom = html.fromstring(resp.text)
255 is_onion = 'onions' in categories
256
257 if results_xpath:
258 for result in eval_xpath_list(dom, results_xpath):
259
260 url = extract_url(eval_xpath_list(result, url_xpath, min_len=1), search_url)
261 title = extract_text(eval_xpath_list(result, title_xpath, min_len=1))
262 content = extract_text(eval_xpath_list(result, content_xpath))
263 tmp_result = {'url': url, 'title': title, 'content': content}
264
265 # add thumbnail if available
266 if thumbnail_xpath:
267 thumbnail_xpath_result = eval_xpath_list(result, thumbnail_xpath)
268 if len(thumbnail_xpath_result) > 0:
269 tmp_result['thumbnail'] = extract_url(thumbnail_xpath_result, search_url)
270
271 # add alternative cached url if available
272 if cached_xpath:
273 tmp_result['cached_url'] = cached_url + extract_text(eval_xpath_list(result, cached_xpath, min_len=1))
274
275 if is_onion:
276 tmp_result['is_onion'] = True
277
278 results.append(tmp_result)
279
280 else:
281 if cached_xpath:
282 for url, title, content, cached in zip(
283 (extract_url(x, search_url) for x in eval_xpath_list(dom, url_xpath)),
284 map(extract_text, eval_xpath_list(dom, title_xpath)),
285 map(extract_text, eval_xpath_list(dom, content_xpath)),
286 map(extract_text, eval_xpath_list(dom, cached_xpath)),
287 ):
288 results.append(
289 {
290 'url': url,
291 'title': title,
292 'content': content,
293 'cached_url': cached_url + cached,
294 'is_onion': is_onion,
295 }
296 )
297 else:
298 for url, title, content in zip(
299 (extract_url(x, search_url) for x in eval_xpath_list(dom, url_xpath)),
300 map(extract_text, eval_xpath_list(dom, title_xpath)),
301 map(extract_text, eval_xpath_list(dom, content_xpath)),
302 ):
303 results.append({'url': url, 'title': title, 'content': content, 'is_onion': is_onion})
304
305 if suggestion_xpath:
306 for suggestion in eval_xpath(dom, suggestion_xpath):
307 results.append({'suggestion': extract_text(suggestion)})
308
309 logger.debug("found %s results", len(results))
310 return results
request(query, params)
Definition xpath.py:212