.oO SearXNG Developer Documentation Oo.
Loading...
Searching...
No Matches
__init__.py
Go to the documentation of this file.
1# SPDX-License-Identifier: AGPL-3.0-or-later
2# pylint: disable=missing-module-docstring, too-few-public-methods
3
4# the public namespace has not yet been finally defined ..
5# __all__ = [..., ]
6
7import typing as t
8
9import threading
10from timeit import default_timer
11from uuid import uuid4
12
13from flask import copy_current_request_context
14
15from searx import logger
16from searx import settings
17import searx.answerers
18import searx.plugins
19from searx.engines import load_engines
20from searx.external_bang import get_bang_url
21from searx.metrics import initialize as initialize_metrics, counter_inc
22from searx.network import initialize as initialize_network, check_network_configuration
23from searx.results import ResultContainer
24from searx.search.checker import initialize as initialize_checker
25from searx.search.processors import PROCESSORS, initialize as initialize_processors
26
27
28if t.TYPE_CHECKING:
29 from .models import SearchQuery
30 from searx.extended_types import SXNG_Request
31
32logger = logger.getChild('search')
33
34
36 settings_engines: list[dict[str, t.Any]] = None, # pyright: ignore[reportArgumentType]
37 enable_checker: bool = False,
38 check_network: bool = False,
39 enable_metrics: bool = True,
40):
41 settings_engines = settings_engines or settings['engines']
42 load_engines(settings_engines)
43 initialize_network(settings_engines, settings['outgoing'])
44 if check_network:
45 check_network_configuration()
46 initialize_metrics([engine['name'] for engine in settings_engines], enable_metrics)
47 initialize_processors(settings_engines)
48 if enable_checker:
49 initialize_checker()
50
51
52class Search:
53 """Search information container"""
54
55 __slots__ = "search_query", "result_container", "start_time", "actual_timeout" # type: ignore
56
57 def __init__(self, search_query: "SearchQuery"):
58 """Initialize the Search"""
59 # init vars
60 super().__init__()
61 self.search_query: "SearchQuery" = search_query
62 self.result_container: ResultContainer = ResultContainer()
63 self.start_time: float | None = None
64 self.actual_timeout: float | None = None
65
66 def search_external_bang(self) -> bool:
67 """Check if there is a external bang. If yes, update
68 self.result_container and return True."""
69 if self.search_query.external_bang:
70 self.result_container.redirect_url = get_bang_url(self.search_query)
71
72 # This means there was a valid bang and the rest of the search does
73 # not need to be continued
74 if isinstance(self.result_container.redirect_url, str):
75 return True
76 return False
77
78 def search_answerers(self):
79
80 results = searx.answerers.STORAGE.ask(self.search_query.query)
81 self.result_container.extend(None, results) # pyright: ignore[reportArgumentType]
82 return bool(results)
83
84 # do search-request
85 def _get_requests(self) -> tuple[list[tuple[str, str, dict[str, t.Any]]], int]:
86 # init vars
87 requests: list[tuple[str, str, dict[str, t.Any]]] = []
88
89 # max of all selected engine timeout
90 default_timeout = 0
91
92 # start search-request for all selected engines
93 for engineref in self.search_query.engineref_list:
94 processor = PROCESSORS[engineref.name]
95
96 # stop the request now if the engine is suspend
97 if processor.extend_container_if_suspended(self.result_container):
98 continue
99
100 # set default request parameters
101 request_params = processor.get_params(self.search_query, engineref.category)
102 if request_params is None:
103 continue
104
105 counter_inc('engine', engineref.name, 'search', 'count', 'sent')
106
107 # append request to list
108 requests.append((engineref.name, self.search_query.query, request_params))
109
110 # update default_timeout
111 default_timeout = max(default_timeout, processor.engine.timeout)
112
113 # adjust timeout
114 max_request_timeout = settings['outgoing']['max_request_timeout']
115 actual_timeout = default_timeout
116 query_timeout = self.search_query.timeout_limit
117
118 if max_request_timeout is None and query_timeout is None:
119 # No max, no user query: default_timeout
120 pass
121 elif max_request_timeout is None and query_timeout is not None:
122 # No max, but user query: From user query except if above default
123 actual_timeout = min(default_timeout, query_timeout)
124 elif max_request_timeout is not None and query_timeout is None:
125 # Max, no user query: Default except if above max
126 actual_timeout = min(default_timeout, max_request_timeout)
127 elif max_request_timeout is not None and query_timeout is not None:
128 # Max & user query: From user query except if above max
129 actual_timeout = min(query_timeout, max_request_timeout)
130
131 logger.debug(
132 "actual_timeout={0} (default_timeout={1}, ?timeout_limit={2}, max_request_timeout={3})".format(
133 actual_timeout, default_timeout, query_timeout, max_request_timeout
134 )
135 )
136
137 return requests, actual_timeout
138
139 def search_multiple_requests(self, requests: list[tuple[str, str, dict[str, t.Any]]]):
140 # pylint: disable=protected-access
141 search_id = str(uuid4())
142
143 for engine_name, query, request_params in requests:
144 _search = copy_current_request_context(PROCESSORS[engine_name].search)
145 th = threading.Thread( # pylint: disable=invalid-name
146 target=_search,
147 args=(query, request_params, self.result_container, self.start_time, self.actual_timeout),
148 name=search_id,
149 )
150 th._timeout = False
151 th._engine_name = engine_name
152 th.start()
153
154 for th in threading.enumerate(): # pylint: disable=invalid-name
155 if th.name == search_id:
156 remaining_time = max(0.0, self.actual_timeout - (default_timer() - self.start_time))
157 th.join(remaining_time)
158 if th.is_alive():
159 th._timeout = True
160 self.result_container.add_unresponsive_engine(th._engine_name, 'timeout')
161 PROCESSORS[th._engine_name].logger.error('engine timeout')
162
163 def search_standard(self):
164 """
165 Update self.result_container, self.actual_timeout
166 """
167 requests, self.actual_timeout = self._get_requests()
168
169 # send all search-request
170 if requests:
171 self.search_multiple_requests(requests)
172
173 # return results, suggestions, answers and infoboxes
174 return True
175
176 # do search-request
177 def search(self) -> ResultContainer:
178 self.start_time = default_timer()
179 if not self.search_external_bang():
180 if not self.search_answerers():
181 self.search_standard()
182 return self.result_container
183
184
185class SearchWithPlugins(Search):
186 """Inherit from the Search class, add calls to the plugins."""
187
188 __slots__ = 'user_plugins', 'request'
189
190 def __init__(self, search_query: "SearchQuery", request: "SXNG_Request", user_plugins: list[str]):
191 super().__init__(search_query)
192 self.user_plugins = user_plugins
193 self.result_container.on_result = self._on_result
194 # pylint: disable=line-too-long
195 # get the "real" request to use it outside the Flask context.
196 # see
197 # * https://github.com/pallets/flask/blob/d01d26e5210e3ee4cbbdef12f05c886e08e92852/src/flask/globals.py#L55
198 # * https://github.com/pallets/werkzeug/blob/3c5d3c9bd0d9ce64590f0af8997a38f3823b368d/src/werkzeug/local.py#L548-L559
199 # * https://werkzeug.palletsprojects.com/en/2.0.x/local/#werkzeug.local.LocalProxy._get_current_object
200 # pylint: enable=line-too-long
201 self.request = request._get_current_object()
202
203 def _on_result(self, result):
204 return searx.plugins.STORAGE.on_result(self.request, self, result)
205
206 def search(self) -> ResultContainer:
207
208 if searx.plugins.STORAGE.pre_search(self.request, self):
209 super().search()
210
211 searx.plugins.STORAGE.post_search(self.request, self)
212 self.result_container.close()
213
214 return self.result_container
::1337x
Definition 1337x.py:1
initialize(list[dict[str, t.Any]] settings_engines=None, bool enable_checker=False, bool check_network=False, bool enable_metrics=True)
Definition __init__.py:40