225 '''Build request parameters (see :ref:`engine request`).'''
227 if params[
'language'] !=
'all':
228 lang = params[
'language'][:2]
231 if params.get(
'time_range'):
232 time_range_val = time_range_map.get(params.get(
'time_range'))
233 time_range = time_range_url.format(time_range_val=time_range_val)
236 if params[
'safesearch']:
237 safe_search = safe_search_map[params[
'safesearch']]
240 'query': urlencode({
'q': query})[2:],
242 'pageno': (params[
'pageno'] - 1) * page_size + first_page_num,
243 'time_range': time_range,
244 'safe_search': safe_search,
247 params[
'cookies'].update(cookies)
248 params[
'headers'].update(headers)
250 params[
'url'] = search_url.format(**fargs)
251 params[
'method'] = method
255 fargs[
'query'] = query
256 params[
'data'] = request_body.format(**fargs)
258 params[
'soft_max_redirects'] = soft_max_redirects
259 params[
'raise_for_httperror'] =
False
265 '''Scrap *results* from the response (see :ref:`engine results`).'''
266 if no_result_for_http_status
and resp.status_code
in no_result_for_http_status:
269 raise_for_httperror(resp)
276 dom = html.fromstring(resp.text)
277 is_onion =
'onions' in categories
280 for result
in eval_xpath_list(dom, results_xpath):
282 url = extract_url(eval_xpath_list(result, url_xpath, min_len=1), search_url)
283 title = extract_text(eval_xpath_list(result, title_xpath, min_len=1))
284 content = extract_text(eval_xpath_list(result, content_xpath))
285 tmp_result = {
'url': url,
'title': title,
'content': content}
289 thumbnail_xpath_result = eval_xpath_list(result, thumbnail_xpath)
290 if len(thumbnail_xpath_result) > 0:
291 tmp_result[
'thumbnail'] = extract_url(thumbnail_xpath_result, search_url)
295 tmp_result[
'cached_url'] = cached_url + extract_text(eval_xpath_list(result, cached_xpath, min_len=1))
298 tmp_result[
'is_onion'] =
True
300 results.append(tmp_result)
304 for url, title, content, cached
in zip(
305 (extract_url(x, search_url)
for x
in eval_xpath_list(dom, url_xpath)),
306 map(extract_text, eval_xpath_list(dom, title_xpath)),
307 map(extract_text, eval_xpath_list(dom, content_xpath)),
308 map(extract_text, eval_xpath_list(dom, cached_xpath)),
315 'cached_url': cached_url + cached,
316 'is_onion': is_onion,
320 for url, title, content
in zip(
321 (extract_url(x, search_url)
for x
in eval_xpath_list(dom, url_xpath)),
322 map(extract_text, eval_xpath_list(dom, title_xpath)),
323 map(extract_text, eval_xpath_list(dom, content_xpath)),
325 results.append({
'url': url,
'title': title,
'content': content,
'is_onion': is_onion})
328 for suggestion
in eval_xpath(dom, suggestion_xpath):
329 results.append({
'suggestion': extract_text(suggestion)})
331 logger.debug(
"found %s results", len(results))