.oO SearXNG Developer Documentation Oo.
Loading...
Searching...
No Matches
network.py
Go to the documentation of this file.
1# SPDX-License-Identifier: AGPL-3.0-or-later
2# pylint: disable=global-statement
3# pylint: disable=missing-module-docstring, missing-class-docstring
4
5import atexit
6import asyncio
7import ipaddress
8from itertools import cycle
9from typing import Dict
10
11import httpx
12
13from searx import logger, searx_debug
14from .client import new_client, get_loop, AsyncHTTPTransportNoHttp
15from .raise_for_httperror import raise_for_httperror
16
17
18logger = logger.getChild('network')
19DEFAULT_NAME = '__DEFAULT__'
20NETWORKS: Dict[str, 'Network'] = {}
21# requests compatibility when reading proxy settings from settings.yml
22PROXY_PATTERN_MAPPING = {
23 'http': 'http://',
24 'https': 'https://',
25 'socks4': 'socks4://',
26 'socks5': 'socks5://',
27 'socks5h': 'socks5h://',
28 'http:': 'http://',
29 'https:': 'https://',
30 'socks4:': 'socks4://',
31 'socks5:': 'socks5://',
32 'socks5h:': 'socks5h://',
33}
34
35ADDRESS_MAPPING = {'ipv4': '0.0.0.0', 'ipv6': '::'}
36
37
38class Network:
39
40 __slots__ = (
41 'enable_http',
42 'verify',
43 'enable_http2',
44 'max_connections',
45 'max_keepalive_connections',
46 'keepalive_expiry',
47 'local_addresses',
48 'proxies',
49 'using_tor_proxy',
50 'max_redirects',
51 'retries',
52 'retry_on_http_error',
53 '_local_addresses_cycle',
54 '_proxies_cycle',
55 '_clients',
56 '_logger',
57 )
58
59 _TOR_CHECK_RESULT = {}
60
62 # pylint: disable=too-many-arguments
63 self,
64 enable_http=True,
65 verify=True,
66 enable_http2=False,
67 max_connections=None,
68 max_keepalive_connections=None,
69 keepalive_expiry=None,
70 proxies=None,
71 using_tor_proxy=False,
72 local_addresses=None,
73 retries=0,
74 retry_on_http_error=None,
75 max_redirects=30,
76 logger_name=None,
77 ):
78
79 self.enable_http = enable_http
80 self.verify = verify
81 self.enable_http2 = enable_http2
82 self.max_connections = max_connections
83 self.max_keepalive_connections = max_keepalive_connections
84 self.keepalive_expiry = keepalive_expiry
85 self.proxies = proxies
86 self.using_tor_proxy = using_tor_proxy
87 self.local_addresses = local_addresses
88 self.retries = retries
89 self.retry_on_http_error = retry_on_http_error
90 self.max_redirects = max_redirects
93 self._clients = {}
94 self._logger = logger.getChild(logger_name) if logger_name else logger
95 self.check_parameters()
96
98 for address in self.iter_ipaddresses():
99 if '/' in address:
100 ipaddress.ip_network(address, False)
101 else:
102 ipaddress.ip_address(address)
103
104 if self.proxies is not None and not isinstance(self.proxies, (str, dict)):
105 raise ValueError('proxies type has to be str, dict or None')
106
108 local_addresses = self.local_addresses
109 if not local_addresses:
110 return
111 if isinstance(local_addresses, str):
112 local_addresses = [local_addresses]
113 yield from local_addresses
114
116 while True:
117 count = 0
118 for address in self.iter_ipaddresses():
119 if '/' in address:
120 for a in ipaddress.ip_network(address, False).hosts():
121 yield str(a)
122 count += 1
123 else:
124 a = ipaddress.ip_address(address)
125 yield str(a)
126 count += 1
127 if count == 0:
128 yield None
129
130 def iter_proxies(self):
131 if not self.proxies:
132 return
133 # https://www.python-httpx.org/compatibility/#proxy-keys
134 if isinstance(self.proxies, str):
135 yield 'all://', [self.proxies]
136 else:
137 for pattern, proxy_url in self.proxies.items():
138 pattern = PROXY_PATTERN_MAPPING.get(pattern, pattern)
139 if isinstance(proxy_url, str):
140 proxy_url = [proxy_url]
141 yield pattern, proxy_url
142
144 proxy_settings = {}
145 for pattern, proxy_urls in self.iter_proxies():
146 proxy_settings[pattern] = cycle(proxy_urls)
147 while True:
148 # pylint: disable=stop-iteration-return
149 yield tuple((pattern, next(proxy_url_cycle)) for pattern, proxy_url_cycle in proxy_settings.items())
150
151 async def log_response(self, response: httpx.Response):
152 request = response.request
153 status = f"{response.status_code} {response.reason_phrase}"
154 response_line = f"{response.http_version} {status}"
155 content_type = response.headers.get("Content-Type")
156 content_type = f' ({content_type})' if content_type else ''
157 self._logger.debug(f'HTTP Request: {request.method} {request.url} "{response_line}"{content_type}')
158
159 @staticmethod
160 async def check_tor_proxy(client: httpx.AsyncClient, proxies) -> bool:
161 if proxies in Network._TOR_CHECK_RESULT:
162 return Network._TOR_CHECK_RESULT[proxies]
163
164 result = True
165 # ignore client._transport because it is not used with all://
166 for transport in client._mounts.values(): # pylint: disable=protected-access
167 if isinstance(transport, AsyncHTTPTransportNoHttp):
168 continue
169 if getattr(transport, "_pool") and getattr(
170 transport._pool, "_rdns", False # pylint: disable=protected-access
171 ):
172 continue
173 return False
174 response = await client.get("https://check.torproject.org/api/ip", timeout=60)
175 if not response.json()["IsTor"]:
176 result = False
177 Network._TOR_CHECK_RESULT[proxies] = result
178 return result
179
180 async def get_client(self, verify=None, max_redirects=None):
181 verify = self.verify if verify is None else verify
182 max_redirects = self.max_redirects if max_redirects is None else max_redirects
183 local_address = next(self._local_addresses_cycle)
184 proxies = next(self._proxies_cycle) # is a tuple so it can be part of the key
185 key = (verify, max_redirects, local_address, proxies)
186 hook_log_response = self.log_response if searx_debug else None
187 if key not in self._clients or self._clients[key].is_closed:
188 client = new_client(
189 self.enable_http,
190 verify,
191 self.enable_http2,
192 self.max_connections,
194 self.keepalive_expiry,
195 dict(proxies),
196 local_address,
197 0,
198 max_redirects,
199 hook_log_response,
200 )
201 if self.using_tor_proxy and not await self.check_tor_proxy(client, proxies):
202 await client.aclose()
203 raise httpx.ProxyError('Network configuration problem: not using Tor')
204 self._clients[key] = client
205 return self._clients[key]
206
207 async def aclose(self):
208 async def close_client(client):
209 try:
210 await client.aclose()
211 except httpx.HTTPError:
212 pass
213
214 await asyncio.gather(*[close_client(client) for client in self._clients.values()], return_exceptions=False)
215
216 @staticmethod
218 kwargs_clients = {}
219 if 'verify' in kwargs:
220 kwargs_clients['verify'] = kwargs.pop('verify')
221 if 'max_redirects' in kwargs:
222 kwargs_clients['max_redirects'] = kwargs.pop('max_redirects')
223 if 'allow_redirects' in kwargs:
224 # see https://github.com/encode/httpx/pull/1808
225 kwargs['follow_redirects'] = kwargs.pop('allow_redirects')
226 return kwargs_clients
227
228 @staticmethod
230 do_raise_for_httperror = True
231 if 'raise_for_httperror' in kwargs:
232 do_raise_for_httperror = kwargs['raise_for_httperror']
233 del kwargs['raise_for_httperror']
234 return do_raise_for_httperror
235
236 def patch_response(self, response, do_raise_for_httperror):
237 if isinstance(response, httpx.Response):
238 # requests compatibility (response is not streamed)
239 # see also https://www.python-httpx.org/compatibility/#checking-for-4xx5xx-responses
240 response.ok = not response.is_error
241
242 # raise an exception
243 if do_raise_for_httperror:
244 try:
245 raise_for_httperror(response)
246 except:
247 self._logger.warning(f"HTTP Request failed: {response.request.method} {response.request.url}")
248 raise
249 return response
250
251 def is_valid_response(self, response):
252 # pylint: disable=too-many-boolean-expressions
253 if (
254 (self.retry_on_http_error is True and 400 <= response.status_code <= 599)
255 or (isinstance(self.retry_on_http_error, list) and response.status_code in self.retry_on_http_error)
256 or (isinstance(self.retry_on_http_error, int) and response.status_code == self.retry_on_http_error)
257 ):
258 return False
259 return True
260
261 async def call_client(self, stream, method, url, **kwargs):
262 retries = self.retries
263 was_disconnected = False
264 do_raise_for_httperror = Network.extract_do_raise_for_httperror(kwargs)
265 kwargs_clients = Network.extract_kwargs_clients(kwargs)
266 while retries >= 0: # pragma: no cover
267 client = await self.get_client(**kwargs_clients)
268 try:
269 if stream:
270 response = client.stream(method, url, **kwargs)
271 else:
272 response = await client.request(method, url, **kwargs)
273 if self.is_valid_response(response) or retries <= 0:
274 return self.patch_response(response, do_raise_for_httperror)
275 except httpx.RemoteProtocolError as e:
276 if not was_disconnected:
277 # the server has closed the connection:
278 # try again without decreasing the retries variable & with a new HTTP client
279 was_disconnected = True
280 await client.aclose()
281 self._logger.warning('httpx.RemoteProtocolError: the server has disconnected, retrying')
282 continue
283 if retries <= 0:
284 raise e
285 except (httpx.RequestError, httpx.HTTPStatusError) as e:
286 if retries <= 0:
287 raise e
288 retries -= 1
289
290 async def request(self, method, url, **kwargs):
291 return await self.call_client(False, method, url, **kwargs)
292
293 async def stream(self, method, url, **kwargs):
294 return await self.call_client(True, method, url, **kwargs)
295
296 @classmethod
297 async def aclose_all(cls):
298 await asyncio.gather(*[network.aclose() for network in NETWORKS.values()], return_exceptions=False)
299
300
301def get_network(name=None):
302 return NETWORKS.get(name or DEFAULT_NAME)
303
304
306 async def check():
307 exception_count = 0
308 for network in NETWORKS.values():
309 if network.using_tor_proxy:
310 try:
311 await network.get_client()
312 except Exception: # pylint: disable=broad-except
313 network._logger.exception('Error') # pylint: disable=protected-access
314 exception_count += 1
315 return exception_count
316
317 future = asyncio.run_coroutine_threadsafe(check(), get_loop())
318 exception_count = future.result()
319 if exception_count > 0:
320 raise RuntimeError("Invalid network configuration")
321
322
323def initialize(settings_engines=None, settings_outgoing=None):
324 # pylint: disable=import-outside-toplevel)
325 from searx.engines import engines
326 from searx import settings
327
328 # pylint: enable=import-outside-toplevel)
329
330 settings_engines = settings_engines or settings['engines']
331 settings_outgoing = settings_outgoing or settings['outgoing']
332
333 # default parameters for AsyncHTTPTransport
334 # see https://github.com/encode/httpx/blob/e05a5372eb6172287458b37447c30f650047e1b8/httpx/_transports/default.py#L108-L121 # pylint: disable=line-too-long
335 default_params = {
336 'enable_http': False,
337 'verify': settings_outgoing['verify'],
338 'enable_http2': settings_outgoing['enable_http2'],
339 'max_connections': settings_outgoing['pool_connections'],
340 'max_keepalive_connections': settings_outgoing['pool_maxsize'],
341 'keepalive_expiry': settings_outgoing['keepalive_expiry'],
342 'local_addresses': settings_outgoing['source_ips'],
343 'using_tor_proxy': settings_outgoing['using_tor_proxy'],
344 'proxies': settings_outgoing['proxies'],
345 'max_redirects': settings_outgoing['max_redirects'],
346 'retries': settings_outgoing['retries'],
347 'retry_on_http_error': None,
348 }
349
350 def new_network(params, logger_name=None):
351 nonlocal default_params
352 result = {}
353 result.update(default_params)
354 result.update(params)
355 if logger_name:
356 result['logger_name'] = logger_name
357 return Network(**result)
358
359 def iter_networks():
360 nonlocal settings_engines
361 for engine_spec in settings_engines:
362 engine_name = engine_spec['name']
363 engine = engines.get(engine_name)
364 if engine is None:
365 continue
366 network = getattr(engine, 'network', None)
367 yield engine_name, engine, network
368
369 if NETWORKS:
370 done()
371 NETWORKS.clear()
372 NETWORKS[DEFAULT_NAME] = new_network({}, logger_name='default')
373 NETWORKS['ipv4'] = new_network({'local_addresses': '0.0.0.0'}, logger_name='ipv4')
374 NETWORKS['ipv6'] = new_network({'local_addresses': '::'}, logger_name='ipv6')
375
376 # define networks from outgoing.networks
377 for network_name, network in settings_outgoing['networks'].items():
378 NETWORKS[network_name] = new_network(network, logger_name=network_name)
379
380 # define networks from engines.[i].network (except references)
381 for engine_name, engine, network in iter_networks():
382 if network is None:
383 network = {}
384 for attribute_name, attribute_value in default_params.items():
385 if hasattr(engine, attribute_name):
386 network[attribute_name] = getattr(engine, attribute_name)
387 else:
388 network[attribute_name] = attribute_value
389 NETWORKS[engine_name] = new_network(network, logger_name=engine_name)
390 elif isinstance(network, dict):
391 NETWORKS[engine_name] = new_network(network, logger_name=engine_name)
392
393 # define networks from engines.[i].network (references)
394 for engine_name, engine, network in iter_networks():
395 if isinstance(network, str):
396 NETWORKS[engine_name] = NETWORKS[network]
397
398 # the /image_proxy endpoint has a dedicated network.
399 # same parameters than the default network, but HTTP/2 is disabled.
400 # It decreases the CPU load average, and the total time is more or less the same
401 if 'image_proxy' not in NETWORKS:
402 image_proxy_params = default_params.copy()
403 image_proxy_params['enable_http2'] = False
404 NETWORKS['image_proxy'] = new_network(image_proxy_params, logger_name='image_proxy')
405
406
407@atexit.register
408def done():
409 """Close all HTTP client
410
411 Avoid a warning at exit
412 See https://github.com/encode/httpx/pull/2026
413
414 Note: since Network.aclose has to be async, it is not possible to call this method on Network.__del__
415 So Network.aclose is called here using atexit.register
416 """
417 try:
418 loop = get_loop()
419 if loop:
420 future = asyncio.run_coroutine_threadsafe(Network.aclose_all(), loop)
421 # wait 3 seconds to close the HTTP clients
422 future.result(3)
423 finally:
424 NETWORKS.clear()
425
426
427NETWORKS[DEFAULT_NAME] = Network()
log_response(self, httpx.Response response)
Definition network.py:151
patch_response(self, response, do_raise_for_httperror)
Definition network.py:236
stream(self, method, url, **kwargs)
Definition network.py:293
call_client(self, stream, method, url, **kwargs)
Definition network.py:261
request(self, method, url, **kwargs)
Definition network.py:290
bool check_tor_proxy(httpx.AsyncClient client, proxies)
Definition network.py:160
is_valid_response(self, response)
Definition network.py:251
__init__(self, enable_http=True, verify=True, enable_http2=False, max_connections=None, max_keepalive_connections=None, keepalive_expiry=None, proxies=None, using_tor_proxy=False, local_addresses=None, retries=0, retry_on_http_error=None, max_redirects=30, logger_name=None)
Definition network.py:77
get_client(self, verify=None, max_redirects=None)
Definition network.py:180
::1337x
Definition 1337x.py:1
initialize(settings_engines=None, settings_outgoing=None)
Definition network.py:323
get_network(name=None)
Definition network.py:301