.oO SearXNG Developer Documentation Oo.
Loading...
Searching...
No Matches
network.py
Go to the documentation of this file.
1# SPDX-License-Identifier: AGPL-3.0-or-later
2# pylint: disable=global-statement
3# pylint: disable=missing-module-docstring, missing-class-docstring
4import typing as t
5from collections.abc import Generator, AsyncIterator
6
7import atexit
8import asyncio
9import ipaddress
10from itertools import cycle
11
12import httpx
13
14from searx import logger, sxng_debug
15from searx.extended_types import SXNG_Response
16from .client import new_client, get_loop, AsyncHTTPTransportNoHttp
17from .raise_for_httperror import raise_for_httperror
18
19
20logger = logger.getChild('network')
21DEFAULT_NAME = '__DEFAULT__'
22NETWORKS: dict[str, "Network"] = {}
23# requests compatibility when reading proxy settings from settings.yml
24PROXY_PATTERN_MAPPING = {
25 'http': 'http://',
26 'https': 'https://',
27 'socks4': 'socks4://',
28 'socks5': 'socks5://',
29 'socks5h': 'socks5h://',
30 'http:': 'http://',
31 'https:': 'https://',
32 'socks4:': 'socks4://',
33 'socks5:': 'socks5://',
34 'socks5h:': 'socks5h://',
35}
36
37ADDRESS_MAPPING = {'ipv4': '0.0.0.0', 'ipv6': '::'}
38
39
40@t.final
41class Network:
42
43 __slots__ = (
44 'enable_http',
45 'verify',
46 'enable_http2',
47 'max_connections',
48 'max_keepalive_connections',
49 'keepalive_expiry',
50 'local_addresses',
51 'proxies',
52 'using_tor_proxy',
53 'max_redirects',
54 'retries',
55 'retry_on_http_error',
56 '_local_addresses_cycle',
57 '_proxies_cycle',
58 '_clients',
59 '_logger',
60 )
61
62 _TOR_CHECK_RESULT = {}
63
65 # pylint: disable=too-many-arguments
66 self,
67 enable_http: bool = True,
68 verify: bool = True,
69 enable_http2: bool = False,
70 max_connections: int = None, # pyright: ignore[reportArgumentType]
71 max_keepalive_connections: int = None, # pyright: ignore[reportArgumentType]
72 keepalive_expiry: float = None, # pyright: ignore[reportArgumentType]
73 proxies: str | dict[str, str] | None = None,
74 using_tor_proxy: bool = False,
75 local_addresses: str | list[str] | None = None,
76 retries: int = 0,
77 retry_on_http_error: None = None,
78 max_redirects: int = 30,
79 logger_name: str = None, # pyright: ignore[reportArgumentType]
80 ):
81
82 self.enable_http = enable_http
83 self.verify = verify
84 self.enable_http2 = enable_http2
85 self.max_connections = max_connections
86 self.max_keepalive_connections = max_keepalive_connections
87 self.keepalive_expiry = keepalive_expiry
88 self.proxies = proxies
89 self.using_tor_proxy = using_tor_proxy
90 self.local_addresses = local_addresses
91 self.retries = retries
92 self.retry_on_http_error = retry_on_http_error
93 self.max_redirects = max_redirects
96 self._clients = {}
97 self._logger = logger.getChild(logger_name) if logger_name else logger
98 self.check_parameters()
99
101 for address in self.iter_ipaddresses():
102 if '/' in address:
103 ipaddress.ip_network(address, False)
104 else:
105 ipaddress.ip_address(address)
106
107 if self.proxies is not None and not isinstance(self.proxies, (str, dict)):
108 raise ValueError('proxies type has to be str, dict or None')
109
110 def iter_ipaddresses(self) -> Generator[str]:
111 local_addresses = self.local_addresses
112 if not local_addresses:
113 return
114 if isinstance(local_addresses, str):
115 local_addresses = [local_addresses]
116 yield from local_addresses
117
119 while True:
120 count = 0
121 for address in self.iter_ipaddresses():
122 if '/' in address:
123 for a in ipaddress.ip_network(address, False).hosts():
124 yield str(a)
125 count += 1
126 else:
127 a = ipaddress.ip_address(address)
128 yield str(a)
129 count += 1
130 if count == 0:
131 yield None
132
133 def iter_proxies(self) -> Generator[tuple[str, list[str]]]:
134 if not self.proxies:
135 return
136 # https://www.python-httpx.org/compatibility/#proxy-keys
137 if isinstance(self.proxies, str):
138 yield 'all://', [self.proxies]
139 else:
140 for pattern, proxy_url in self.proxies.items():
141 pattern: str = PROXY_PATTERN_MAPPING.get(pattern, pattern)
142 if isinstance(proxy_url, str):
143 proxy_url = [proxy_url]
144 yield pattern, proxy_url
145
146 def get_proxy_cycles(self) -> Generator[tuple[tuple[str, str], ...], str, str]: # not sure type is correct
147 proxy_settings: dict[str, t.Any] = {}
148 for pattern, proxy_urls in self.iter_proxies():
149 proxy_settings[pattern] = cycle(proxy_urls)
150 while True:
151 # pylint: disable=stop-iteration-return
152 yield tuple((pattern, next(proxy_url_cycle)) for pattern, proxy_url_cycle in proxy_settings.items())
153
154 async def log_response(self, response: httpx.Response):
155 request = response.request
156 status = f"{response.status_code} {response.reason_phrase}"
157 response_line = f"{response.http_version} {status}"
158 content_type = response.headers.get("Content-Type")
159 content_type = f' ({content_type})' if content_type else ''
160 self._logger.debug(f'HTTP Request: {request.method} {request.url} "{response_line}"{content_type}')
161
162 @staticmethod
163 async def check_tor_proxy(client: httpx.AsyncClient, proxies) -> bool:
164 if proxies in Network._TOR_CHECK_RESULT:
165 return Network._TOR_CHECK_RESULT[proxies]
166
167 result = True
168 # ignore client._transport because it is not used with all://
169 for transport in client._mounts.values(): # pylint: disable=protected-access
170 if isinstance(transport, AsyncHTTPTransportNoHttp):
171 continue
172 if getattr(transport, "_pool") and getattr(
173 # pylint: disable=protected-access
174 transport._pool, # type: ignore
175 "_rdns",
176 False,
177 ):
178 continue
179 return False
180 response = await client.get("https://check.torproject.org/api/ip", timeout=60)
181 if not response.json()["IsTor"]:
182 result = False
183 Network._TOR_CHECK_RESULT[proxies] = result
184 return result
185
186 async def get_client(self, verify: bool | None = None, max_redirects: int | None = None) -> httpx.AsyncClient:
187 verify = self.verify if verify is None else verify
188 max_redirects = self.max_redirects if max_redirects is None else max_redirects
189 local_address = next(self._local_addresses_cycle)
190 proxies = next(self._proxies_cycle) # is a tuple so it can be part of the key
191 key = (verify, max_redirects, local_address, proxies)
192 hook_log_response = self.log_response if sxng_debug else None
193 if key not in self._clients or self._clients[key].is_closed:
194 client = new_client(
195 self.enable_http,
196 verify,
197 self.enable_http2,
198 self.max_connections,
200 self.keepalive_expiry,
201 dict(proxies),
202 local_address,
203 0,
204 max_redirects,
205 hook_log_response,
206 )
207 if self.using_tor_proxy and not await self.check_tor_proxy(client, proxies):
208 await client.aclose()
209 raise httpx.ProxyError('Network configuration problem: not using Tor')
210 self._clients[key] = client
211 return self._clients[key]
212
213 async def aclose(self):
214 async def close_client(client):
215 try:
216 await client.aclose()
217 except httpx.HTTPError:
218 pass
219
220 await asyncio.gather(*[close_client(client) for client in self._clients.values()], return_exceptions=False)
221
222 @staticmethod
223 def extract_kwargs_clients(kwargs: dict[str, t.Any]) -> dict[str, t.Any]:
224 kwargs_clients: dict[str, t.Any] = {}
225 if 'verify' in kwargs:
226 kwargs_clients['verify'] = kwargs.pop('verify')
227 if 'max_redirects' in kwargs:
228 kwargs_clients['max_redirects'] = kwargs.pop('max_redirects')
229 if 'allow_redirects' in kwargs:
230 # see https://github.com/encode/httpx/pull/1808
231 kwargs['follow_redirects'] = kwargs.pop('allow_redirects')
232 return kwargs_clients
233
234 @staticmethod
236 do_raise_for_httperror = True
237 if 'raise_for_httperror' in kwargs:
238 do_raise_for_httperror = kwargs['raise_for_httperror']
239 del kwargs['raise_for_httperror']
240 return do_raise_for_httperror
241
242 def patch_response(self, response: httpx.Response | SXNG_Response, do_raise_for_httperror: bool) -> SXNG_Response:
243 if isinstance(response, httpx.Response):
244 response = t.cast(SXNG_Response, response)
245 # requests compatibility (response is not streamed)
246 # see also https://www.python-httpx.org/compatibility/#checking-for-4xx5xx-responses
247 response.ok = not response.is_error
248
249 # raise an exception
250 if do_raise_for_httperror:
251 try:
252 raise_for_httperror(response)
253 except:
254 self._logger.warning(f"HTTP Request failed: {response.request.method} {response.request.url}")
255 raise
256 return response
257
258 def is_valid_response(self, response: SXNG_Response):
259 # pylint: disable=too-many-boolean-expressions
260 if (
261 (self.retry_on_http_error is True and 400 <= response.status_code <= 599)
262 or (isinstance(self.retry_on_http_error, list) and response.status_code in self.retry_on_http_error)
263 or (isinstance(self.retry_on_http_error, int) and response.status_code == self.retry_on_http_error)
264 ):
265 return False
266 return True
267
268 async def call_client(
269 self, stream: bool, method: str, url: str, **kwargs: t.Any
270 ) -> AsyncIterator[SXNG_Response] | None:
271 retries = self.retries
272 was_disconnected = False
273 do_raise_for_httperror = Network.extract_do_raise_for_httperror(kwargs)
274 kwargs_clients = Network.extract_kwargs_clients(kwargs)
275 while retries >= 0: # pragma: no cover
276 client = await self.get_client(**kwargs_clients)
277 cookies = kwargs.pop("cookies", None)
278 client.cookies = httpx.Cookies(cookies)
279 try:
280 if stream:
281 response = client.stream(method, url, **kwargs) # pyright: ignore[reportAny]
282 else:
283 response = await client.request(method, url, **kwargs) # pyright: ignore[reportAny]
284 if self.is_valid_response(response) or retries <= 0:
285 return self.patch_response(response, do_raise_for_httperror)
286 except httpx.RemoteProtocolError as e:
287 if not was_disconnected:
288 # the server has closed the connection:
289 # try again without decreasing the retries variable & with a new HTTP client
290 was_disconnected = True
291 await client.aclose()
292 self._logger.warning('httpx.RemoteProtocolError: the server has disconnected, retrying')
293 continue
294 if retries <= 0:
295 raise e
296 except (httpx.RequestError, httpx.HTTPStatusError) as e:
297 if retries <= 0:
298 raise e
299 retries -= 1
300
301 async def request(self, method: str, url: str, **kwargs):
302 return await self.call_client(False, method, url, **kwargs)
303
304 async def stream(self, method: str, url: str, **kwargs):
305 return await self.call_client(True, method, url, **kwargs)
306
307 @classmethod
308 async def aclose_all(cls):
309 await asyncio.gather(*[network.aclose() for network in NETWORKS.values()], return_exceptions=False)
310
311
312def get_network(name: str | None = None) -> "Network":
313 return NETWORKS.get(name or DEFAULT_NAME) # pyright: ignore[reportReturnType]
314
315
317 async def check():
318 exception_count = 0
319 for network in NETWORKS.values():
320 if network.using_tor_proxy:
321 try:
322 await network.get_client()
323 except Exception: # pylint: disable=broad-except
324 network._logger.exception('Error') # pylint: disable=protected-access
325 exception_count += 1
326 return exception_count
327
328 future = asyncio.run_coroutine_threadsafe(check(), get_loop())
329 exception_count = future.result()
330 if exception_count > 0:
331 raise RuntimeError("Invalid network configuration")
332
333
335 settings_engines: list[dict[str, t.Any]] = None, # pyright: ignore[reportArgumentType]
336 settings_outgoing: dict[str, t.Any] = None, # pyright: ignore[reportArgumentType]
337) -> None:
338 # pylint: disable=import-outside-toplevel)
339 from searx.engines import engines
340 from searx import settings
341
342 # pylint: enable=import-outside-toplevel)
343
344 settings_engines = settings_engines or settings['engines']
345 settings_outgoing = settings_outgoing or settings['outgoing']
346
347 # default parameters for AsyncHTTPTransport
348 # see https://github.com/encode/httpx/blob/e05a5372eb6172287458b37447c30f650047e1b8/httpx/_transports/default.py#L108-L121 # pylint: disable=line-too-long
349 default_params: dict[str, t.Any] = {
350 'enable_http': False,
351 'verify': settings_outgoing['verify'],
352 'enable_http2': settings_outgoing['enable_http2'],
353 'max_connections': settings_outgoing['pool_connections'],
354 'max_keepalive_connections': settings_outgoing['pool_maxsize'],
355 'keepalive_expiry': settings_outgoing['keepalive_expiry'],
356 'local_addresses': settings_outgoing['source_ips'],
357 'using_tor_proxy': settings_outgoing['using_tor_proxy'],
358 'proxies': settings_outgoing['proxies'],
359 'max_redirects': settings_outgoing['max_redirects'],
360 'retries': settings_outgoing['retries'],
361 'retry_on_http_error': None,
362 }
363
364 def new_network(params: dict[str, t.Any], logger_name: str | None = None):
365 nonlocal default_params
366 result = {}
367 result.update(default_params) # pyright: ignore[reportUnknownMemberType]
368 result.update(params) # pyright: ignore[reportUnknownMemberType]
369 if logger_name:
370 result['logger_name'] = logger_name
371 return Network(**result) # type: ignore
372
373 def iter_networks():
374 nonlocal settings_engines
375 for engine_spec in settings_engines:
376 engine_name = engine_spec['name']
377 engine = engines.get(engine_name)
378 if engine is None:
379 continue
380 network = getattr(engine, 'network', None)
381 yield engine_name, engine, network
382
383 if NETWORKS:
384 done()
385 NETWORKS.clear()
386 NETWORKS[DEFAULT_NAME] = new_network({}, logger_name='default')
387 NETWORKS['ipv4'] = new_network({'local_addresses': '0.0.0.0'}, logger_name='ipv4')
388 NETWORKS['ipv6'] = new_network({'local_addresses': '::'}, logger_name='ipv6')
389
390 # define networks from outgoing.networks
391 for network_name, network in settings_outgoing['networks'].items():
392 NETWORKS[network_name] = new_network(network, logger_name=network_name)
393
394 # define networks from engines.[i].network (except references)
395 for engine_name, engine, network in iter_networks():
396 if network is None:
397 network = {}
398 for attribute_name, attribute_value in default_params.items():
399 if hasattr(engine, attribute_name):
400 network[attribute_name] = getattr(engine, attribute_name)
401 else:
402 network[attribute_name] = attribute_value
403 NETWORKS[engine_name] = new_network(network, logger_name=engine_name)
404 elif isinstance(network, dict):
405 NETWORKS[engine_name] = new_network(network, logger_name=engine_name)
406
407 # define networks from engines.[i].network (references)
408 for engine_name, engine, network in iter_networks():
409 if isinstance(network, str):
410 NETWORKS[engine_name] = NETWORKS[network]
411
412 # the /image_proxy endpoint has a dedicated network.
413 # same parameters than the default network, but HTTP/2 is disabled.
414 # It decreases the CPU load average, and the total time is more or less the same
415 if 'image_proxy' not in NETWORKS:
416 image_proxy_params = default_params.copy()
417 image_proxy_params['enable_http2'] = False
418 NETWORKS['image_proxy'] = new_network(image_proxy_params, logger_name='image_proxy')
419
420
421@atexit.register
422def done():
423 """Close all HTTP client
424
425 Avoid a warning at exit
426 See https://github.com/encode/httpx/pull/2026
427
428 Note: since Network.aclose has to be async, it is not possible to call this method on Network.__del__
429 So Network.aclose is called here using atexit.register
430 """
431 try:
432 loop = get_loop()
433 if loop:
434 future = asyncio.run_coroutine_threadsafe(Network.aclose_all(), loop)
435 # wait 3 seconds to close the HTTP clients
436 future.result(3)
437 finally:
438 NETWORKS.clear()
439
440
441NETWORKS[DEFAULT_NAME] = Network()
SXNG_Response patch_response(self, httpx.Response|SXNG_Response response, bool do_raise_for_httperror)
Definition network.py:242
is_valid_response(self, SXNG_Response response)
Definition network.py:258
Generator[tuple[tuple[str, str],...], str, str] get_proxy_cycles(self)
Definition network.py:146
httpx.AsyncClient get_client(self, bool|None verify=None, int|None max_redirects=None)
Definition network.py:186
request(self, str method, str url, **kwargs)
Definition network.py:301
log_response(self, httpx.Response response)
Definition network.py:154
stream(self, str method, str url, **kwargs)
Definition network.py:304
Generator[str] iter_ipaddresses(self)
Definition network.py:110
__init__(self, bool enable_http=True, bool verify=True, bool enable_http2=False, int max_connections=None, int max_keepalive_connections=None, float keepalive_expiry=None, str|dict[str, str]|None proxies=None, bool using_tor_proxy=False, str|list[str]|None local_addresses=None, int retries=0, None retry_on_http_error=None, int max_redirects=30, str logger_name=None)
Definition network.py:80
dict[str, t.Any] extract_kwargs_clients(dict[str, t.Any] kwargs)
Definition network.py:223
AsyncIterator[SXNG_Response]|None call_client(self, bool stream, str method, str url, **t.Any kwargs)
Definition network.py:270
bool check_tor_proxy(httpx.AsyncClient client, proxies)
Definition network.py:163
Generator[tuple[str, list[str]]] iter_proxies(self)
Definition network.py:133
Generator[tuple[tuple[str, str],...], str, str] _proxies_cycle
Definition network.py:95
::1337x
Definition 1337x.py:1
None initialize(list[dict[str, t.Any]] settings_engines=None, dict[str, t.Any] settings_outgoing=None)
Definition network.py:337
"Network" get_network(str|None name=None)
Definition network.py:312