226 '''Build request parameters (see :ref:`engine request`).'''
228 if params[
'language'] !=
'all':
229 lang = params[
'language'][:2]
232 if params.get(
'time_range'):
233 time_range_val = time_range_map.get(params.get(
'time_range'))
234 time_range = time_range_url.format(time_range_val=time_range_val)
237 if params[
'safesearch']:
238 safe_search = safe_search_map[params[
'safesearch']]
241 'query': urlencode({
'q': query})[2:],
243 'pageno': (params[
'pageno'] - 1) * page_size + first_page_num,
244 'time_range': time_range,
245 'safe_search': safe_search,
248 params[
'cookies'].update(cookies)
249 params[
'headers'].update(headers)
251 params[
'url'] = search_url.format(**fargs)
252 params[
'method'] = method
256 fargs[
'query'] = query
257 params[
'data'] = request_body.format(**fargs)
259 params[
'soft_max_redirects'] = soft_max_redirects
260 params[
'raise_for_httperror'] =
False
266 """Scrap *results* from the response (see :ref:`result types`)."""
269 if no_result_for_http_status
and resp.status_code
in no_result_for_http_status:
272 raise_for_httperror(resp)
277 dom = html.fromstring(resp.text)
278 is_onion =
'onions' in categories
281 for result
in eval_xpath_list(dom, results_xpath):
283 url = extract_url(eval_xpath_list(result, url_xpath, min_len=1), search_url)
284 title = extract_text(eval_xpath_list(result, title_xpath, min_len=1))
285 content = extract_text(eval_xpath_list(result, content_xpath))
286 tmp_result = {
'url': url,
'title': title,
'content': content}
290 thumbnail_xpath_result = eval_xpath_list(result, thumbnail_xpath)
291 if len(thumbnail_xpath_result) > 0:
292 tmp_result[
'thumbnail'] = extract_url(thumbnail_xpath_result, search_url)
296 tmp_result[
'cached_url'] = cached_url + extract_text(eval_xpath_list(result, cached_xpath, min_len=1))
299 tmp_result[
'is_onion'] =
True
301 results.append(tmp_result)
305 for url, title, content, cached
in zip(
306 (extract_url(x, search_url)
for x
in eval_xpath_list(dom, url_xpath)),
307 map(extract_text, eval_xpath_list(dom, title_xpath)),
308 map(extract_text, eval_xpath_list(dom, content_xpath)),
309 map(extract_text, eval_xpath_list(dom, cached_xpath)),
316 'cached_url': cached_url + cached,
317 'is_onion': is_onion,
321 for url, title, content
in zip(
322 (extract_url(x, search_url)
for x
in eval_xpath_list(dom, url_xpath)),
323 map(extract_text, eval_xpath_list(dom, title_xpath)),
324 map(extract_text, eval_xpath_list(dom, content_xpath)),
326 results.append({
'url': url,
'title': title,
'content': content,
'is_onion': is_onion})
329 for suggestion
in eval_xpath(dom, suggestion_xpath):
330 results.append({
'suggestion': extract_text(suggestion)})
332 logger.debug(
"found %s results", len(results))