.oO SearXNG Developer Documentation Oo.
Loading...
Searching...
No Matches
google_videos.py
Go to the documentation of this file.
1# SPDX-License-Identifier: AGPL-3.0-or-later
2"""This is the implementation of the Google Videos engine.
3
4.. admonition:: Content-Security-Policy (CSP)
5
6 This engine needs to allow images from the `data URLs`_ (prefixed with the
7 ``data:`` scheme)::
8
9 Header set Content-Security-Policy "img-src 'self' data: ;"
10
11.. _data URLs:
12 https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
13
14"""
15
16from typing import TYPE_CHECKING
17
18from urllib.parse import urlencode
19from lxml import html
20
21from searx.utils import (
22 eval_xpath,
23 eval_xpath_list,
24 eval_xpath_getindex,
25 extract_text,
26)
27
28from searx.engines.google import fetch_traits # pylint: disable=unused-import
29from searx.engines.google import (
30 get_google_info,
31 time_range_dict,
32 filter_mapping,
33 suggestion_xpath,
34 detect_google_sorry,
35)
36from searx.enginelib.traits import EngineTraits
37
38if TYPE_CHECKING:
39 import logging
40
41 logger: logging.Logger
42
43traits: EngineTraits
44
45# about
46about = {
47 "website": 'https://www.google.com',
48 "wikidata_id": 'Q219885',
49 "official_api_documentation": 'https://developers.google.com/custom-search',
50 "use_official_api": False,
51 "require_api_key": False,
52 "results": 'HTML',
53}
54
55# engine dependent config
56
57categories = ['videos', 'web']
58paging = True
59max_page = 50
60language_support = True
61time_range_support = True
62safesearch = True
63
64
65def request(query, params):
66 """Google-Video search request"""
67
68 google_info = get_google_info(params, traits)
69
70 query_url = (
71 'https://'
72 + google_info['subdomain']
73 + '/search'
74 + "?"
75 + urlencode(
76 {
77 'q': query,
78 'tbm': "vid",
79 'start': 10 * params['pageno'],
80 **google_info['params'],
81 'asearch': 'arc',
82 'async': 'use_ac:true,_fmt:html',
83 }
84 )
85 )
86
87 if params['time_range'] in time_range_dict:
88 query_url += '&' + urlencode({'tbs': 'qdr:' + time_range_dict[params['time_range']]})
89 if 'safesearch' in params:
90 query_url += '&' + urlencode({'safe': filter_mapping[params['safesearch']]})
91 params['url'] = query_url
92
93 params['cookies'] = google_info['cookies']
94 params['headers'].update(google_info['headers'])
95 return params
96
97
98def response(resp):
99 """Get response from google's search request"""
100 results = []
101
102 detect_google_sorry(resp)
103
104 # convert the text to dom
105 dom = html.fromstring(resp.text)
106
107 # parse results
108 for result in eval_xpath_list(dom, '//div[contains(@class, "g ")]'):
109
110 img_src = eval_xpath_getindex(result, './/img/@src', 0, None)
111 if img_src is None:
112 continue
113
114 title = extract_text(eval_xpath_getindex(result, './/a/h3[1]', 0))
115 url = eval_xpath_getindex(result, './/a/h3[1]/../@href', 0)
116
117 c_node = eval_xpath_getindex(result, './/div[@class="ITZIwc"]', 0)
118 content = extract_text(c_node)
119 pub_info = extract_text(eval_xpath(result, './/div[@class="gqF9jc"]'))
120
121 results.append(
122 {
123 'url': url,
124 'title': title,
125 'content': content,
126 'author': pub_info,
127 'thumbnail': img_src,
128 'template': 'videos.html',
129 }
130 )
131
132 # parse suggestion
133 for suggestion in eval_xpath_list(dom, suggestion_xpath):
134 # append suggestion
135 results.append({'suggestion': extract_text(suggestion)})
136
137 return results