7from collections
import defaultdict
8from threading
import RLock
10from searx
import logger
as log
18 result: MainResult | LegacyResult,
19 priority: MainResult.PriorityType,
23 for result_engine
in result[
'engines']:
24 if hasattr(searx.engines.engines.get(result_engine),
'weight'):
25 weight *= float(searx.engines.engines[result_engine].weight)
27 weight *= len(result[
'positions'])
30 for position
in result[
'positions']:
33 if priority ==
'high':
36 score += weight / position
54 """In the result container, the results are collected, sorted and duplicates
59 main_results_map: dict[int, MainResult | LegacyResult]
60 infoboxes: list[LegacyResult]
73 self.
engine_data: dict[str, dict[str, str]] = defaultdict(dict)
79 self.
on_result: t.Callable[[Result | LegacyResult], bool] =
lambda _:
True
84 self, engine_name: str |
None, results: list[Result | LegacyResult]
87 log.debug(
"container is closed, ignoring results: %s", results)
91 for result
in list(results):
93 if isinstance(result, Result):
94 result.engine = result.engine
or engine_name
95 result.normalize_result_fields()
99 if isinstance(result, BaseAnswer):
101 elif isinstance(result, MainResult):
106 raise NotImplementedError(f
"no handler implemented to process the result of type {result}")
109 result[
"engine"] = result.get(
"engine")
or engine_name
or ""
111 result.normalize_result_fields()
113 if "suggestion" in result:
118 if "answer" in result:
121 f
"answer results from engine {result.engine}"
122 " are without typification / migrate to Answer class.",
128 if "correction" in result:
133 if "infobox" in result:
138 if "number_of_results" in result:
143 if "engine_data" in result:
146 self.
engine_data[result.engine][result[
"key"]] = result[
"engine_data"]
154 if engine_name
in searx.engines.engines:
155 eng = searx.engines.engines[engine_name]
156 histogram_observe(main_count,
"engine", eng.name,
"result",
"count")
157 if not self.
paging and eng.paging:
163 new_id = getattr(new_infobox,
"id",
None)
164 if new_id
is not None:
167 if new_id == getattr(existing_infobox,
"id",
None):
174 result_hash = hash(result)
181 result.positions = [position]
187 merged.positions.append(position)
194 for eng_name
in result.engines:
195 counter_add(result.score,
'engine', eng_name,
'score')
198 """Returns a sorted list of results to be displayed in the main result
199 area (:ref:`result types`)."""
208 results = sorted(self.
main_results_map.values(), key=
lambda x: x.score, reverse=
True)
211 gresults: list[MainResult | LegacyResult] = []
212 categoryPositions: dict[str, t.Any] = {}
218 engine = searx.engines.engines.get(res.engine
or "")
220 res.category = engine.categories[0]
if len(engine.categories) > 0
else ""
223 category = f
"{res.category}:{res.template}:{'img_src' if (res.thumbnail or res.img_src) else ''}"
224 grp = categoryPositions.get(category)
230 if (grp
is not None)
and (grp[
"count"] > 0)
and (len(gresults) - grp[
"index"] < max_distance):
234 gresults.insert(index, res)
238 for item
in categoryPositions.values():
241 item[
"index"] = v + 1
249 categoryPositions[category] = {
"index": len(gresults),
"count": max_count}
257 """Returns the average of results number, returns zero if the average
258 result number is smaller than the actual result count."""
261 log.error(
"call to ResultContainer.number_of_results before ResultContainer.close")
277 log.error(
"call to ResultContainer.add_unresponsive_engine after ResultContainer.close")
279 if searx.engines.engines[engine_name].display_error_messages:
282 def add_timing(self, engine_name: str, engine_time: float, page_load_time: float):
285 log.error(
"call to ResultContainer.add_timing after ResultContainer.close")
287 self.
timings.append(
Timing(engine_name, total=engine_time, load=page_load_time))
292 log.error(
"call to ResultContainer.get_timings before ResultContainer.close")
298 """Merges the values from ``other`` into ``origin``."""
300 weight1 = getattr(searx.engines.engines[origin.engine],
"weight", 1)
301 weight2 = getattr(searx.engines.engines[other.engine],
"weight", 1)
303 if weight2 > weight1:
304 origin.engine = other.engine
306 origin.engines |= other.engines
309 url_items = origin.get(
"urls", [])
311 for url2
in other.urls:
313 entity_url2 = url2.get(
"entity")
315 for url1
in origin.get(
"urls", []):
316 if (entity_url2
is not None and entity_url2 == url1.get(
"entity"))
or (
317 url1.get(
"url") == url2.get(
"url")
322 url_items.append(url2)
324 origin.urls = url_items
327 if not origin.img_src:
328 origin.img_src = other.img_src
329 elif weight2 > weight1:
330 origin.img_src = other.img_src
333 if not origin.attributes:
334 origin.attributes = other.attributes
336 attr_names_1: set[str] = set()
337 for attr
in origin.attributes:
338 label = attr.get(
"label")
340 attr_names_1.add(label)
342 entity = attr.get(
"entity")
344 attr_names_1.add(entity)
346 for attr
in other.attributes:
347 if attr.get(
"label")
not in attr_names_1
and attr.get(
'entity')
not in attr_names_1:
348 origin.attributes.append(attr)
351 if not origin.content:
352 origin.content = other.content
353 elif len(other.content) > len(origin.content):
354 origin.content = other.content
358 """Merges the values from ``other`` into ``origin``."""
360 if len(other.content) > len(origin.content):
362 origin.content = other.content
365 if len(other.title) > len(origin.title):
366 origin.title = other.title
369 if isinstance(other, MainResult)
and isinstance(origin, MainResult):
370 origin.defaults_from(other)
371 elif isinstance(other, LegacyResult)
and isinstance(origin, LegacyResult):
372 origin.defaults_from(other)
375 origin.engines.add(other.engine
or "")
378 if origin.parsed_url
and not origin.parsed_url.scheme.endswith(
"s"):
379 if other.parsed_url
and other.parsed_url.scheme.endswith(
"s"):
380 origin.parsed_url = origin.parsed_url._replace(scheme=other.parsed_url.scheme)
381 origin.url = origin.parsed_url.geturl()
add_unresponsive_engine(self, str engine_name, str error_type, bool suspended=False)
dict[str, dict[str, str]] engine_data
list[MainResult|LegacyResult] _main_results_sorted
_merge_infobox(self, LegacyResult new_infobox)
_merge_main_result(self, MainResult|LegacyResult result, int position)
int number_of_results(self)
list[MainResult|LegacyResult] get_ordered_results(self)
t.Callable[[Result|LegacyResult], bool] on_result
set[UnresponsiveEngine] unresponsive_engines
extend(self, str|None engine_name, list[Result|LegacyResult] results)
add_timing(self, str engine_name, float engine_time, float page_load_time)
list[Timing] get_timings(self)
merge_two_infoboxes(LegacyResult origin, LegacyResult other)
float calculate_score(MainResult|LegacyResult result, MainResult.PriorityType priority)
merge_two_main_results(MainResult|LegacyResult origin, MainResult|LegacyResult other)