.oO SearXNG Developer Documentation Oo.
Loading...
Searching...
No Matches
tracker_patterns.py
Go to the documentation of this file.
1# SPDX-License-Identifier: AGPL-3.0-or-later
2"""Simple implementation to store TrackerPatterns data in a SQL database."""
3
4from __future__ import annotations
5import typing
6
7__all__ = ["TrackerPatternsDB"]
8
9import re
10from collections.abc import Iterator
11from urllib.parse import urlparse, urlunparse, parse_qsl, urlencode
12
13from httpx import HTTPError
14
15from searx.data.core import get_cache, log
16from searx.network import get as http_get
17
18RuleType = tuple[str, list[str], list[str]]
19
20
22 # pylint: disable=missing-class-docstring
23
24 ctx_name = "data_tracker_patterns"
25
26 CLEAR_LIST_URL = [
27 # ClearURL rule lists, the first one that responds HTTP 200 is used
28 "https://rules1.clearurls.xyz/data.minify.json",
29 "https://rules2.clearurls.xyz/data.minify.json",
30 "https://raw.githubusercontent.com/ClearURLs/Rules/refs/heads/master/data.min.json",
31 ]
32
33 class Fields:
34 # pylint: disable=too-few-public-methods, invalid-name
35 url_regexp: typing.Final = 0 # URL (regular expression) match condition of the link
36 url_ignore: typing.Final = 1 # URL (regular expression) to ignore
37 del_args: typing.Final = 2 # list of URL arguments (regular expression) to delete
38
39 def __init__(self):
40 self.cache = get_cache()
41
42 def init(self):
43 if self.cache.properties("tracker_patterns loaded") != "OK":
44 # To avoid parallel initializations, the property is set first
45 self.cache.properties.set("tracker_patterns loaded", "OK")
46 self.load()
47 # F I X M E:
48 # do we need a maintenance .. remember: database is stored
49 # in /tmp and will be rebuild during the reboot anyway
50
51 def load(self):
52 log.debug("init searx.data.TRACKER_PATTERNS")
53 for rule in self.iter_clear_list():
54 self.add(rule)
55
56 def add(self, rule: RuleType):
57 self.cache.set(
58 key=rule[self.Fields.url_regexp],
59 value=(
60 rule[self.Fields.url_ignore],
61 rule[self.Fields.del_args],
62 ),
63 ctx=self.ctx_name,
64 expire=None,
65 )
66
67 def rules(self) -> Iterator[RuleType]:
68 self.init()
69 for key, value in self.cache.pairs(ctx=self.ctx_name):
70 yield key, value[0], value[1]
71
72 def iter_clear_list(self) -> Iterator[RuleType]:
73 resp = None
74 for url in self.CLEAR_LIST_URL:
75 log.debug("TRACKER_PATTERNS: Trying to fetch %s...", url)
76 try:
77 resp = http_get(url, timeout=3)
78
79 except HTTPError as exc:
80 log.warning("TRACKER_PATTERNS: HTTPError (%s) occured while fetching %s", url, exc)
81 continue
82
83 if resp.status_code != 200:
84 log.warning(f"TRACKER_PATTERNS: ClearURL ignore HTTP {resp.status_code} {url}")
85 continue
86
87 break
88
89 if resp is None:
90 log.error("TRACKER_PATTERNS: failed fetching ClearURL rule lists")
91 return
92
93 for rule in resp.json()["providers"].values():
94 yield (
95 rule["urlPattern"].replace("\\\\", "\\"), # fix javascript regex syntax
96 [exc.replace("\\\\", "\\") for exc in rule.get("exceptions", [])],
97 rule.get("rules", []),
98 )
99
100 def clean_url(self, url: str) -> bool | str:
101 """The URL arguments are normalized and cleaned of tracker parameters.
102
103 Returns bool ``True`` to use URL unchanged (``False`` to ignore URL).
104 If URL should be modified, the returned string is the new URL to use.
105 """
106
107 new_url = url
108 parsed_new_url = urlparse(url=new_url)
109
110 for rule in self.rules():
111
112 if not re.match(rule[self.Fields.url_regexp], new_url):
113 # no match / ignore pattern
114 continue
115
116 do_ignore = False
117 for pattern in rule[self.Fields.url_ignore]:
118 if re.match(pattern, new_url):
119 do_ignore = True
120 break
121
122 if do_ignore:
123 # pattern is in the list of exceptions / ignore pattern
124 # HINT:
125 # we can't break the outer pattern loop since we have
126 # overlapping urlPattern like ".*"
127 continue
128
129 # remove tracker arguments from the url-query part
130 query_args: list[tuple[str, str]] = list(parse_qsl(parsed_new_url.query))
131
132 for name, val in query_args.copy():
133 # remove URL arguments
134 for pattern in rule[self.Fields.del_args]:
135 if re.match(pattern, name):
136 log.debug("TRACKER_PATTERNS: %s remove tracker arg: %s='%s'", parsed_new_url.netloc, name, val)
137 query_args.remove((name, val))
138
139 parsed_new_url = parsed_new_url._replace(query=urlencode(query_args))
140 new_url = urlunparse(parsed_new_url)
141
142 if new_url != url:
143 return new_url
144
145 return True
146
147
148if __name__ == "__main__":
150 for r in db.rules():
151 print(r)