.oO SearXNG Developer Documentation Oo.
Loading...
Searching...
No Matches
annas_archive.py
Go to the documentation of this file.
1# SPDX-License-Identifier: AGPL-3.0-or-later
2"""`Anna's Archive`_ is a free non-profit online shadow library metasearch
3engine providing access to a variety of book resources (also via IPFS), created
4by a team of anonymous archivists (AnnaArchivist_).
5
6.. _Anna's Archive: https://annas-archive.org/
7.. _AnnaArchivist: https://annas-software.org/AnnaArchivist/annas-archive
8
9Configuration
10=============
11
12The engine has the following additional settings:
13
14- :py:obj:`aa_content`
15- :py:obj:`aa_ext`
16- :py:obj:`aa_sort`
17
18With this options a SearXNG maintainer is able to configure **additional**
19engines for specific searches in Anna's Archive. For example a engine to search
20for *newest* articles and journals (PDF) / by shortcut ``!aaa <search-term>``.
21
22.. code:: yaml
23
24 - name: annas articles
25 engine: annas_archive
26 shortcut: aaa
27 aa_content: 'magazine'
28 aa_ext: 'pdf'
29 aa_sort: 'newest'
30
31Implementations
32===============
33
34"""
35
36from typing import List, Dict, Any, Optional
37from urllib.parse import urlencode
38from lxml import html
39
40from searx.utils import extract_text, eval_xpath, eval_xpath_getindex, eval_xpath_list
41from searx.enginelib.traits import EngineTraits
42from searx.data import ENGINE_TRAITS
43
44# about
45about: Dict[str, Any] = {
46 "website": "https://annas-archive.org/",
47 "wikidata_id": "Q115288326",
48 "official_api_documentation": None,
49 "use_official_api": False,
50 "require_api_key": False,
51 "results": "HTML",
52}
53
54# engine dependent config
55categories: List[str] = ["files"]
56paging: bool = True
57
58# search-url
59base_url: str = "https://annas-archive.org"
60aa_content: str = ""
61"""Anan's search form field **Content** / possible values::
62
63 book_fiction, book_unknown, book_nonfiction,
64 book_comic, magazine, standards_document
65
66To not filter use an empty string (default).
67"""
68aa_sort: str = ''
69"""Sort Anna's results, possible values::
70
71 newest, oldest, largest, smallest
72
73To sort by *most relevant* use an empty string (default)."""
74
75aa_ext: str = ''
76"""Filter Anna's results by a file ending. Common filters for example are
77``pdf`` and ``epub``.
78
79.. note::
80
81 Anna's Archive is a beta release: Filter results by file extension does not
82 really work on Anna's Archive.
83
84"""
85
86
87def init(engine_settings=None): # pylint: disable=unused-argument
88 """Check of engine's settings."""
89 traits = EngineTraits(**ENGINE_TRAITS['annas archive'])
90
91 if aa_content and aa_content not in traits.custom['content']:
92 raise ValueError(f'invalid setting content: {aa_content}')
93
94 if aa_sort and aa_sort not in traits.custom['sort']:
95 raise ValueError(f'invalid setting sort: {aa_sort}')
96
97 if aa_ext and aa_ext not in traits.custom['ext']:
98 raise ValueError(f'invalid setting ext: {aa_ext}')
99
100
101def request(query, params: Dict[str, Any]) -> Dict[str, Any]:
102 lang = traits.get_language(params["language"], traits.all_locale) # type: ignore
103 args = {
104 'lang': lang,
105 'content': aa_content,
106 'ext': aa_ext,
107 'sort': aa_sort,
108 'q': query,
109 'page': params['pageno'],
110 }
111 # filter out None and empty values
112 filtered_args = dict((k, v) for k, v in args.items() if v)
113 params["url"] = f"{base_url}/search?{urlencode(filtered_args)}"
114 return params
115
116
117def response(resp) -> List[Dict[str, Optional[str]]]:
118 results: List[Dict[str, Optional[str]]] = []
119 dom = html.fromstring(resp.text)
120
121 for item in eval_xpath_list(dom, '//main//div[contains(@class, "h-[125]")]/a'):
122 results.append(_get_result(item))
123
124 # The rendering of the WEB page is very strange; except the first position
125 # all other positions of Anna's result page are enclosed in SGML comments.
126 # These comments are *uncommented* by some JS code, see query of class
127 # '.js-scroll-hidden' in Anna's HTML template:
128 # https://annas-software.org/AnnaArchivist/annas-archive/-/blob/main/allthethings/templates/macros/md5_list.html
129
130 for item in eval_xpath_list(dom, '//main//div[contains(@class, "js-scroll-hidden")]'):
131 item = html.fromstring(item.xpath('./comment()')[0].text)
132 results.append(_get_result(item))
133
134 return results
135
136
137def _get_result(item):
138 return {
139 'template': 'paper.html',
140 'url': base_url + extract_text(eval_xpath_getindex(item, './@href', 0)),
141 'title': extract_text(eval_xpath(item, './/h3/text()[1]')),
142 'publisher': extract_text(eval_xpath(item, './/div[contains(@class, "text-sm")]')),
143 'authors': [extract_text(eval_xpath(item, './/div[contains(@class, "italic")]'))],
144 'content': extract_text(eval_xpath(item, './/div[contains(@class, "text-xs")]')),
145 'thumbnail': extract_text(eval_xpath_getindex(item, './/img/@src', 0, default=None), allow_none=True),
146 }
147
148
149def fetch_traits(engine_traits: EngineTraits):
150 """Fetch languages and other search arguments from Anna's search form."""
151 # pylint: disable=import-outside-toplevel
152
153 import babel
154 from searx.network import get # see https://github.com/searxng/searxng/issues/762
155 from searx.locales import language_tag
156
157 engine_traits.all_locale = ''
158 engine_traits.custom['content'] = []
159 engine_traits.custom['ext'] = []
160 engine_traits.custom['sort'] = []
161
162 resp = get(base_url + '/search')
163 if not resp.ok: # type: ignore
164 raise RuntimeError("Response from Anna's search page is not OK.")
165 dom = html.fromstring(resp.text) # type: ignore
166
167 # supported language codes
168
169 lang_map = {}
170 for x in eval_xpath_list(dom, "//form//input[@name='lang']"):
171 eng_lang = x.get("value")
172 if eng_lang in ('', '_empty', 'nl-BE', 'und'):
173 continue
174 try:
175 locale = babel.Locale.parse(lang_map.get(eng_lang, eng_lang), sep='-')
176 except babel.UnknownLocaleError:
177 # silently ignore unknown languages
178 # print("ERROR: %s -> %s is unknown by babel" % (x.get("data-name"), eng_lang))
179 continue
180 sxng_lang = language_tag(locale)
181 conflict = engine_traits.languages.get(sxng_lang)
182 if conflict:
183 if conflict != eng_lang:
184 print("CONFLICT: babel %s --> %s, %s" % (sxng_lang, conflict, eng_lang))
185 continue
186 engine_traits.languages[sxng_lang] = eng_lang
187
188 for x in eval_xpath_list(dom, "//form//input[@name='content']"):
189 engine_traits.custom['content'].append(x.get("value"))
190
191 for x in eval_xpath_list(dom, "//form//input[@name='ext']"):
192 engine_traits.custom['ext'].append(x.get("value"))
193
194 for x in eval_xpath_list(dom, "//form//select[@name='sort']//option"):
195 engine_traits.custom['sort'].append(x.get("value"))
196
197 # for better diff; sort the persistence of these traits
198 engine_traits.custom['content'].sort()
199 engine_traits.custom['ext'].sort()
200 engine_traits.custom['sort'].sort()
fetch_traits(EngineTraits engine_traits)
List[Dict[str, Optional[str]]] response(resp)
Dict[str, Any] request(query, Dict[str, Any] params)
init(engine_settings=None)