9from functools
import partial
11from flask_babel
import gettext
13from searx.data import OSM_KEYS_TAGS, CURRENCIES
20 "website":
'https://www.openstreetmap.org/',
21 "wikidata_id":
'Q936',
22 "official_api_documentation":
'http://wiki.openstreetmap.org/wiki/Nominatim',
23 "use_official_api":
True,
24 "require_api_key":
False,
31language_support =
True
32send_accept_language_header =
True
35base_url =
'https://nominatim.openstreetmap.org/'
36search_string =
'search?{query}&polygon_geojson=1&format=jsonv2&addressdetails=1&extratags=1&dedupe=1'
37result_id_url =
'https://openstreetmap.org/{osm_type}/{osm_id}'
38result_lat_lon_url =
'https://www.openstreetmap.org/?mlat={lat}&mlon={lon}&zoom={zoom}&layers=M'
40route_url =
'https://graphhopper.com/maps'
42wikidata_image_sparql =
"""
43select ?item ?itemLabel ?image ?sign ?symbol ?website ?wikipediaName
45 hint:Query hint:optimizer "None".
46 values ?item { %WIKIDATA_IDS% }
47 OPTIONAL { ?item wdt:P18|wdt:P8517|wdt:P4291|wdt:P5252|wdt:P3451|wdt:P4640|wdt:P5775|wdt:P2716|wdt:P1801|wdt:P4896 ?image }
48 OPTIONAL { ?item wdt:P1766|wdt:P8505|wdt:P8667 ?sign }
49 OPTIONAL { ?item wdt:P41|wdt:P94|wdt:P154|wdt:P158|wdt:P2910|wdt:P4004|wdt:P5962|wdt:P8972 ?symbol }
50 OPTIONAL { ?item wdt:P856 ?website }
51 SERVICE wikibase:label {
52 bd:serviceParam wikibase:language "%LANGUAGE%,en".
53 ?item rdfs:label ?itemLabel .
56 ?wikipediaUrl schema:about ?item;
57 schema:isPartOf/wikibase:wikiGroup "wikipedia";
58 schema:name ?wikipediaName;
59 schema:inLanguage "%LANGUAGE%" .
72 if value.startswith(http):
73 value =
'https://' + value[len(http) :]
78 value = value.split(
';')[0]
83 value = value.split(
':', 1)
84 return (
'https://{0}.wikipedia.org/wiki/{1}'.format(*value),
'{1} ({0})'.format(*value))
88 return (prefix + value, value)
92 'website': value_to_website_link,
93 'contact:website': value_to_website_link,
94 'email': partial(value_with_prefix,
'mailto:'),
95 'contact:email': partial(value_with_prefix,
'mailto:'),
96 'contact:phone': partial(value_with_prefix,
'tel:'),
97 'phone': partial(value_with_prefix,
'tel:'),
98 'fax': partial(value_with_prefix,
'fax:'),
99 'contact:fax': partial(value_with_prefix,
'fax:'),
100 'contact:mastodon': value_to_https_link,
101 'facebook': value_to_https_link,
102 'contact:facebook': value_to_https_link,
103 'contact:foursquare': value_to_https_link,
104 'contact:instagram': value_to_https_link,
105 'contact:linkedin': value_to_https_link,
106 'contact:pinterest': value_to_https_link,
107 'contact:telegram': value_to_https_link,
108 'contact:tripadvisor': value_to_https_link,
109 'contact:twitter': value_to_https_link,
110 'contact:yelp': value_to_https_link,
111 'contact:youtube': value_to_https_link,
112 'contact:webcam': value_to_website_link,
113 'wikipedia': value_wikipedia_link,
114 'wikidata': partial(value_with_prefix,
'https://wikidata.org/wiki/'),
115 'brand:wikidata': partial(value_with_prefix,
'https://wikidata.org/wiki/'),
123 'opening_hours:covid19',
134 'internet_access:ssid',
136KEY_RANKS = {k: i
for i, k
in enumerate(KEY_ORDER)}
140 params[
'url'] = base_url + search_string.format(query=urllib.parse.urlencode({
'q': query}))
147 nominatim_json = resp.json()
148 user_language = resp.search_params[
'language']
150 l = re.findall(
r"from\s+(.*)\s+to\s+(.+)", resp.search_params[
"query"])
152 l = re.findall(
r"\s*(.*)\s+to\s+(.+)", resp.search_params[
"query"])
154 point1, point2 = [urllib.parse.quote_plus(p)
for p
in l[0]]
157 results.types.Answer(
158 answer=gettext(
'Show route in map ..'),
159 url=f
"{route_url}/?point={point1}&point={point2}",
164 for result
in nominatim_json:
165 if not isinstance(result.get(
'extratags'), dict):
166 result[
"extratags"] = {}
172 for result
in nominatim_json:
181 links, link_keys =
get_links(result, user_language)
182 data =
get_data(result, user_language, link_keys)
186 'template':
'map.html',
193 'thumbnail': thumbnail,
196 'type':
get_tag_label(result.get(
'category'), result.get(
'type',
''), user_language),
197 'type_icon': result.get(
'icon'),
199 'longitude': result[
'lon'],
200 'latitude': result[
'lat'],
201 'boundingbox': result[
'boundingbox'],
211 return get_external_url(
'wikimedia_image', raw_value)
215 """Update nominatim_json using the result of an unique to wikidata
217 For result in nominatim_json:
218 If result['extratags']['wikidata'] or r['extratags']['wikidata link']:
219 Set result['wikidata'] to { 'image': ..., 'image_sign':..., 'image_symbal':... }
220 Set result['extratags']['wikipedia'] if not defined
221 Set result['extratags']['contact:website'] if not defined
225 for result
in nominatim_json:
226 extratags = result[
'extratags']
228 wd_id = extratags.get(
'wikidata', extratags.get(
'wikidata link'))
229 if wd_id
and wd_id
not in wikidata_ids:
230 wikidata_ids.append(
'wd:' + wd_id)
231 wd_to_results.setdefault(wd_id, []).append(result)
234 user_language =
'en' if user_language ==
'all' else user_language.split(
'-')[0]
235 wikidata_ids_str =
" ".join(wikidata_ids)
236 query = wikidata_image_sparql.replace(
'%WIKIDATA_IDS%', sparql_string_escape(wikidata_ids_str)).replace(
237 '%LANGUAGE%', sparql_string_escape(user_language)
239 wikidata_json = send_wikidata_query(query)
240 for wd_result
in wikidata_json.get(
'results', {}).get(
'bindings', {}):
241 wd_id = wd_result[
'item'][
'value'].replace(
'http://www.wikidata.org/entity/',
'')
242 for result
in wd_to_results.get(wd_id, []):
243 result[
'wikidata'] = {
244 'itemLabel': wd_result[
'itemLabel'][
'value'],
250 wikipedia_name = wd_result.get(
'wikipediaName', {}).get(
'value')
252 result[
'extratags'][
'wikipedia'] = user_language +
':' + wikipedia_name
254 website = wd_result.get(
'website', {}).get(
'value')
257 and not result[
'extratags'].get(
'contact:website')
258 and not result[
'extratags'].get(
'website')
260 result[
'extratags'][
'contact:website'] = website
264 """Return title and address
268 address_raw = result.get(
'address')
274 result[
'category'] ==
'amenity'
275 or result[
'category'] ==
'shop'
276 or result[
'category'] ==
'tourism'
277 or result[
'category'] ==
'leisure'
279 if address_raw.get(
'address29'):
281 address_name = address_raw.get(
'address29')
283 address_name = address_raw.get(result[
'category'])
284 elif result[
'type']
in address_raw:
285 address_name = address_raw.get(result[
'type'])
292 'name': address_name,
293 'house_number': address_raw.get(
'house_number'),
294 'road': address_raw.get(
'road'),
295 'locality': address_raw.get(
296 'city', address_raw.get(
'town', address_raw.get(
'village'))
298 'postcode': address_raw.get(
'postcode'),
299 'country': address_raw.get(
'country'),
300 'country_code': address_raw.get(
'country_code'),
304 title = result.get(
'display_name')
306 return title, address
310 """Get url, osm and geojson"""
311 osm_type = result.get(
'osm_type', result.get(
'type'))
312 if 'osm_id' not in result:
315 url = result_lat_lon_url.format(lat=result[
'lat'], lon=result[
'lon'], zoom=12)
318 url = result_id_url.format(osm_type=osm_type, osm_id=result[
'osm_id'])
319 osm = {
'type': osm_type,
'id': result[
'osm_id']}
321 geojson = result.get(
'geojson')
323 if not geojson
and osm_type ==
'node':
324 geojson = {
'type':
'Point',
'coordinates': [result[
'lon'], result[
'lat']]}
326 return url, osm, geojson
330 """Get image URL from either wikidata or r['extratags']"""
333 if 'wikidata' in result:
334 img_src = result[
'wikidata'][
'image']
336 img_src = result[
'wikidata'][
'image_symbol']
338 img_src = result[
'wikidata'][
'image_sign']
341 extratags = result[
'extratags']
342 if not img_src
and extratags.get(
'image'):
343 img_src = extratags[
'image']
344 del extratags[
'image']
345 if not img_src
and extratags.get(
'wikimedia_commons'):
346 img_src = get_external_url(
'wikimedia_image', extratags[
'wikimedia_commons'])
347 del extratags[
'wikimedia_commons']
353 """Return links from result['extratags']"""
356 extratags = result[
'extratags']
359 return links, link_keys
360 for k, mapping_function
in VALUE_TO_LINK.items():
361 raw_value = extratags.get(k)
364 url, url_label = mapping_function(raw_value)
365 if url.startswith(
'https://wikidata.org'):
366 url_label = result.get(
'wikidata', {}).get(
'itemLabel')
or url_label
371 'url_label': url_label,
375 return links, link_keys
379 """Return key, value of result['extratags']
381 Must be call after get_links
383 Note: the values are not translated
386 for k, v
in result[
'extratags'].items():
400 data.sort(key=
lambda entry: (
get_key_rank(entry[
'key']), entry[
'label']))
407 The rank defines in which order the key are displayed in the HTML result
409 key_rank = KEY_RANKS.get(k)
412 key_rank = KEY_RANKS.get(k.split(
':')[0] +
':*')
417 """Get label from labels in OSM_KEYS_TAGS
419 in OSM_KEYS_TAGS, labels have key == '*'
421 tag_label = labels.get(lang.lower())
422 if tag_label
is None:
424 tag_label = labels.get(lang.split(
'-')[0])
425 if tag_label
is None and lang !=
'en':
427 tag_label = labels.get(
'en')
428 if tag_label
is None and len(labels.values()) > 0:
430 tag_label = labels.values()[0]
435 """Get tag label from OSM_KEYS_TAGS"""
436 tag_name =
'' if tag_name
is None else tag_name
437 tag_labels = OSM_KEYS_TAGS[
'tags'].get(tag_category, {}).get(tag_name, {})
442 """Get key label from OSM_KEYS_TAGS"""
443 if key_name.startswith(
'currency:'):
449 currency = key_name.split(
':')
450 if len(currency) > 1:
451 o = CURRENCIES[
'iso4217'].get(currency[1])
456 labels = OSM_KEYS_TAGS[
'keys']
457 for k
in key_name.split(
':') + [
'*']:
458 labels = labels.get(k)
value_with_prefix(prefix, value)
get_data(result, user_language, ignore_keys)
get_links(result, user_language)
get_url_osm_geojson(result)
get_tag_label(tag_category, tag_name, lang)
EngineResults response(resp)
value_to_https_link(value)
fetch_wikidata(nominatim_json, user_language)
get_title_address(result)
value_wikipedia_link(value)
get_key_label(key_name, lang)
value_to_website_link(value)
get_wikipedia_image(raw_value)