.oO SearXNG Developer Documentation Oo.
Loading...
Searching...
No Matches
hackernews.py
Go to the documentation of this file.
1
# SPDX-License-Identifier: AGPL-3.0-or-later
2
"""Hackernews
3
"""
4
5
from
datetime
import
datetime
6
from
urllib.parse
import
urlencode
7
from
dateutil.relativedelta
import
relativedelta
8
9
from
flask_babel
import
gettext
10
11
# Engine metadata
12
about = {
13
"website"
:
"https://news.ycombinator.com/"
,
14
"wikidata_id"
:
"Q686797"
,
15
"official_api_documentation"
:
"https://hn.algolia.com/api"
,
16
"use_official_api"
:
True
,
17
"require_api_key"
:
False
,
18
"results"
:
"JSON"
,
19
}
20
21
# Engine configuration
22
paging =
True
23
time_range_support =
True
24
categories = [
"it"
]
25
results_per_page = 30
26
27
# Search URL
28
base_url =
"https://hn.algolia.com/api/v1"
29
30
31
def
request
(query, params):
32
search_type =
'search'
33
if
not
query:
34
# if search query is empty show results from HN's front page
35
search_type =
'search_by_date'
36
query_params = {
37
"tags"
:
"front_page"
,
38
"page"
: (params[
"pageno"
] - 1),
39
}
40
else
:
41
query_params = {
42
"query"
: query,
43
"page"
: (params[
"pageno"
] - 1),
44
"hitsPerPage"
: results_per_page,
45
"minWordSizefor1Typo"
: 4,
46
"minWordSizefor2Typos"
: 8,
47
"advancedSyntax"
:
"true"
,
48
"ignorePlurals"
:
"false"
,
49
"minProximity"
: 7,
50
"numericFilters"
:
'[]'
,
51
"tagFilters"
:
'["story",[]]'
,
52
"typoTolerance"
:
"true"
,
53
"queryType"
:
"prefixLast"
,
54
"restrictSearchableAttributes"
:
'["title","comment_text","url","story_text","author"]'
,
55
"getRankingInfo"
:
"true"
,
56
}
57
58
if
params[
'time_range'
]:
59
search_type =
'search_by_date'
60
timestamp = (
61
# pylint: disable=unexpected-keyword-arg
62
datetime.now()
63
- relativedelta(**{f
"{params['time_range']}s"
: 1})
# type: ignore
64
).timestamp()
65
query_params[
"numericFilters"
] = f
"created_at_i>{timestamp}"
66
67
params[
"url"
] = f
"{base_url}/{search_type}?{urlencode(query_params)}"
68
return
params
69
70
71
def
response
(resp):
72
results = []
73
data = resp.json()
74
75
for
hit
in
data[
"hits"
]:
76
object_id = hit[
"objectID"
]
77
points = hit.get(
"points"
)
or
0
78
num_comments = hit.get(
"num_comments"
)
or
0
79
80
metadata =
""
81
if
points != 0
or
num_comments != 0:
82
metadata = f
"{gettext('points')}: {points}"
f
" | {gettext('comments')}: {num_comments}"
83
results.append(
84
{
85
"title"
: hit.get(
"title"
)
or
f
"{gettext('author')}: {hit['author']}"
,
86
"url"
: f
"https://news.ycombinator.com/item?id={object_id}"
,
87
"content"
: hit.get(
"url"
)
or
hit.get(
"comment_text"
)
or
hit.get(
"story_text"
)
or
""
,
88
"metadata"
: metadata,
89
"author"
: hit[
"author"
],
90
"publishedDate"
: datetime.utcfromtimestamp(hit[
"created_at_i"
]),
91
}
92
)
93
94
return
results
searx.engines.hackernews.request
request(query, params)
Definition
hackernews.py:31
searx.engines.hackernews.response
response(resp)
Definition
hackernews.py:71
searxng
searx
engines
hackernews.py
Generated on Sat Nov 16 2024 00:10:57 for .oO SearXNG Developer Documentation Oo. by
1.12.0