.oO SearXNG Developer Documentation Oo.
Loading...
Searching...
No Matches
annas_archive.py
Go to the documentation of this file.
1# SPDX-License-Identifier: AGPL-3.0-or-later
2"""`Anna's Archive`_ is a free non-profit online shadow library metasearch
3engine providing access to a variety of book resources (also via IPFS), created
4by a team of anonymous archivists (AnnaArchivist_).
5
6.. _Anna's Archive: https://annas-archive.org/
7.. _AnnaArchivist: https://annas-software.org/AnnaArchivist/annas-archive
8
9Configuration
10=============
11
12The engine has the following additional settings:
13
14- :py:obj:`aa_content`
15- :py:obj:`aa_ext`
16- :py:obj:`aa_sort`
17
18With this options a SearXNG maintainer is able to configure **additional**
19engines for specific searches in Anna's Archive. For example a engine to search
20for *newest* articles and journals (PDF) / by shortcut ``!aaa <search-term>``.
21
22.. code:: yaml
23
24 - name: annas articles
25 engine: annas_archive
26 shortcut: aaa
27 aa_content: 'magazine'
28 aa_ext: 'pdf'
29 aa_sort: 'newest'
30
31Implementations
32===============
33
34"""
35import typing as t
36
37from urllib.parse import urlencode
38from lxml import html
39from lxml.etree import ElementBase
40
41from searx.utils import extract_text, eval_xpath, eval_xpath_getindex, eval_xpath_list
42from searx.enginelib.traits import EngineTraits
43from searx.data import ENGINE_TRAITS
44from searx.exceptions import SearxEngineXPathException
45
46from searx.result_types import EngineResults
47
48if t.TYPE_CHECKING:
49 from searx.extended_types import SXNG_Response
50
51# about
52about: dict[str, t.Any] = {
53 "website": "https://annas-archive.org/",
54 "wikidata_id": "Q115288326",
55 "official_api_documentation": None,
56 "use_official_api": False,
57 "require_api_key": False,
58 "results": "HTML",
59}
60
61# engine dependent config
62categories: list[str] = ["files"]
63paging: bool = True
64
65# search-url
66base_url: str = "https://annas-archive.org"
67aa_content: str = ""
68"""Anan's search form field **Content** / possible values::
69
70 book_fiction, book_unknown, book_nonfiction,
71 book_comic, magazine, standards_document
72
73To not filter use an empty string (default).
74"""
75aa_sort: str = ''
76"""Sort Anna's results, possible values::
77
78 newest, oldest, largest, smallest
79
80To sort by *most relevant* use an empty string (default)."""
81
82aa_ext: str = ''
83"""Filter Anna's results by a file ending. Common filters for example are
84``pdf`` and ``epub``.
85
86.. note::
87
88 Anna's Archive is a beta release: Filter results by file extension does not
89 really work on Anna's Archive.
90
91"""
92
93
94def init(engine_settings: dict[str, t.Any]) -> None: # pylint: disable=unused-argument
95 """Check of engine's settings."""
96 traits = EngineTraits(**ENGINE_TRAITS['annas archive'])
97
98 if aa_content and aa_content not in traits.custom['content']:
99 raise ValueError(f'invalid setting content: {aa_content}')
100
101 if aa_sort and aa_sort not in traits.custom['sort']:
102 raise ValueError(f'invalid setting sort: {aa_sort}')
103
104 if aa_ext and aa_ext not in traits.custom['ext']:
105 raise ValueError(f'invalid setting ext: {aa_ext}')
106
107
108def request(query: str, params: dict[str, t.Any]) -> None:
109 lang = traits.get_language(params["language"], traits.all_locale)
110 args = {
111 'lang': lang,
112 'content': aa_content,
113 'ext': aa_ext,
114 'sort': aa_sort,
115 'q': query,
116 'page': params['pageno'],
117 }
118 # filter out None and empty values
119 filtered_args = dict((k, v) for k, v in args.items() if v)
120 params["url"] = f"{base_url}/search?{urlencode(filtered_args)}"
121
122
123def response(resp: "SXNG_Response") -> EngineResults:
124 res = EngineResults()
125 dom = html.fromstring(resp.text)
126
127 # The rendering of the WEB page is strange; positions of Anna's result page
128 # are enclosed in SGML comments. These comments are *uncommented* by some
129 # JS code, see query of class '.js-scroll-hidden' in Anna's HTML template:
130 # https://annas-software.org/AnnaArchivist/annas-archive/-/blob/main/allthethings/templates/macros/md5_list.html
131
132 for item in eval_xpath_list(dom, '//main//div[contains(@class, "js-aarecord-list-outer")]/div'):
133 try:
134 kwargs: dict[str, t.Any] = _get_result(item)
135 except SearxEngineXPathException:
136 continue
137 res.add(res.types.LegacyResult(**kwargs))
138 return res
139
140
141def _get_result(item: ElementBase) -> dict[str, t.Any]:
142 return {
143 'template': 'paper.html',
144 'url': base_url + eval_xpath_getindex(item, './a/@href', 0),
145 'title': extract_text(eval_xpath(item, './div//a[starts-with(@href, "/md5")]')),
146 'authors': [extract_text(eval_xpath_getindex(item, './/a[starts-with(@href, "/search")]', 0))],
147 'publisher': extract_text(
148 eval_xpath_getindex(item, './/a[starts-with(@href, "/search")]', 1, default=None), allow_none=True
149 ),
150 'content': extract_text(eval_xpath(item, './/div[contains(@class, "relative")]')),
151 'thumbnail': extract_text(eval_xpath_getindex(item, './/img/@src', 0, default=None), allow_none=True),
152 }
153
154
155def fetch_traits(engine_traits: EngineTraits):
156 """Fetch languages and other search arguments from Anna's search form."""
157 # pylint: disable=import-outside-toplevel
158
159 import babel
160 from searx.network import get # see https://github.com/searxng/searxng/issues/762
161 from searx.locales import language_tag
162
163 engine_traits.all_locale = ''
164 engine_traits.custom['content'] = []
165 engine_traits.custom['ext'] = []
166 engine_traits.custom['sort'] = []
167
168 resp = get(base_url + '/search')
169 if not resp.ok:
170 raise RuntimeError("Response from Anna's search page is not OK.")
171 dom = html.fromstring(resp.text)
172
173 # supported language codes
174
175 lang_map = {}
176 for x in eval_xpath_list(dom, "//form//input[@name='lang']"):
177 eng_lang = x.get("value")
178 if eng_lang in ('', '_empty', 'nl-BE', 'und') or eng_lang.startswith('anti__'):
179 continue
180 try:
181 locale = babel.Locale.parse(lang_map.get(eng_lang, eng_lang), sep='-')
182 except babel.UnknownLocaleError:
183 # silently ignore unknown languages
184 # print("ERROR: %s -> %s is unknown by babel" % (x.get("data-name"), eng_lang))
185 continue
186 sxng_lang = language_tag(locale)
187 conflict = engine_traits.languages.get(sxng_lang)
188 if conflict:
189 if conflict != eng_lang:
190 print("CONFLICT: babel %s --> %s, %s" % (sxng_lang, conflict, eng_lang))
191 continue
192 engine_traits.languages[sxng_lang] = eng_lang
193
194 for x in eval_xpath_list(dom, "//form//input[@name='content']"):
195 if not x.get("value").startswith("anti__"):
196 engine_traits.custom['content'].append(x.get("value"))
197
198 for x in eval_xpath_list(dom, "//form//input[@name='ext']"):
199 if not x.get("value").startswith("anti__"):
200 engine_traits.custom['ext'].append(x.get("value"))
201
202 for x in eval_xpath_list(dom, "//form//select[@name='sort']//option"):
203 engine_traits.custom['sort'].append(x.get("value"))
204
205 # for better diff; sort the persistence of these traits
206 engine_traits.custom['content'].sort()
207 engine_traits.custom['ext'].sort()
208 engine_traits.custom['sort'].sort()
None init(dict[str, t.Any] engine_settings)
EngineResults response("SXNG_Response" resp)
fetch_traits(EngineTraits engine_traits)
None request(str query, dict[str, t.Any] params)
dict[str, t.Any] _get_result(ElementBase item)