summaryrefslogtreecommitdiff
path: root/searx/engines/openstreetmap.py
diff options
context:
space:
mode:
authorAlexandre Flament <alex@al-f.net>2023-08-22 17:52:12 +0000
committerAlexandre Flament <alex@al-f.net>2023-08-27 11:49:16 +0200
commite16c007c220b405484dccc8e285b67bcd7c2b3bf (patch)
treeb3e568c2f002e9a6fc9fd52f29769b71730f1ae7 /searx/engines/openstreetmap.py
parent0647f83a3ea3ce2bd8c5f5fc59bb676be2a790e2 (diff)
downloadsearxng-e16c007c220b405484dccc8e285b67bcd7c2b3bf.tar.gz
searxng-e16c007c220b405484dccc8e285b67bcd7c2b3bf.zip
[fix] openstreetmap engine
It seems there is an API change: extratags can be either a dictionnary or None. This commit avoid crash when extratags is None Test query "!osm gare du nord"
Diffstat (limited to 'searx/engines/openstreetmap.py')
-rw-r--r--searx/engines/openstreetmap.py64
1 files changed, 38 insertions, 26 deletions
diff --git a/searx/engines/openstreetmap.py b/searx/engines/openstreetmap.py
index 4f799fce7..9ef535f6c 100644
--- a/searx/engines/openstreetmap.py
+++ b/searx/engines/openstreetmap.py
@@ -162,8 +162,15 @@ def response(resp):
}
)
+ # simplify the code below: make sure extratags is a dictionnary
+ for result in nominatim_json:
+ if not isinstance(result.get('extratags'), dict):
+ result["extratags"] = {}
+
+ # fetch data from wikidata
fetch_wikidata(nominatim_json, user_language)
+ # create results
for result in nominatim_json:
title, address = get_title_address(result)
@@ -218,13 +225,12 @@ def fetch_wikidata(nominatim_json, user_language):
wikidata_ids = []
wd_to_results = {}
for result in nominatim_json:
- e = result.get("extratags")
- if e:
- # ignore brand:wikidata
- wd_id = e.get("wikidata", e.get("wikidata link"))
- if wd_id and wd_id not in wikidata_ids:
- wikidata_ids.append("wd:" + wd_id)
- wd_to_results.setdefault(wd_id, []).append(result)
+ extratags = result['extratags']
+ # ignore brand:wikidata
+ wd_id = extratags.get('wikidata', extratags.get('wikidata link'))
+ if wd_id and wd_id not in wikidata_ids:
+ wikidata_ids.append('wd:' + wd_id)
+ wd_to_results.setdefault(wd_id, []).append(result)
if wikidata_ids:
user_language = 'en' if user_language == 'all' else user_language.split('-')[0]
@@ -334,12 +340,13 @@ def get_img_src(result):
img_src = result['wikidata']['image_sign']
# img_src
- if not img_src and result.get('extratags', {}).get('image'):
- img_src = result['extratags']['image']
- del result['extratags']['image']
- if not img_src and result.get('extratags', {}).get('wikimedia_commons'):
- img_src = get_external_url('wikimedia_image', result['extratags']['wikimedia_commons'])
- del result['extratags']['wikimedia_commons']
+ extratags = result['extratags']
+ if not img_src and extratags.get('image'):
+ img_src = extratags['image']
+ del extratags['image']
+ if not img_src and extratags.get('wikimedia_commons'):
+ img_src = get_external_url('wikimedia_image', extratags['wikimedia_commons'])
+ del extratags['wikimedia_commons']
return img_src
@@ -348,20 +355,25 @@ def get_links(result, user_language):
"""Return links from result['extratags']"""
links = []
link_keys = set()
+ extratags = result['extratags']
+ if not extratags:
+ # minor optimization : no need to check VALUE_TO_LINK if extratags is empty
+ return links, link_keys
for k, mapping_function in VALUE_TO_LINK.items():
- raw_value = result['extratags'].get(k)
- if raw_value:
- url, url_label = mapping_function(raw_value)
- if url.startswith('https://wikidata.org'):
- url_label = result.get('wikidata', {}).get('itemLabel') or url_label
- links.append(
- {
- 'label': get_key_label(k, user_language),
- 'url': url,
- 'url_label': url_label,
- }
- )
- link_keys.add(k)
+ raw_value = extratags.get(k)
+ if not raw_value:
+ continue
+ url, url_label = mapping_function(raw_value)
+ if url.startswith('https://wikidata.org'):
+ url_label = result.get('wikidata', {}).get('itemLabel') or url_label
+ links.append(
+ {
+ 'label': get_key_label(k, user_language),
+ 'url': url,
+ 'url_label': url_label,
+ }
+ )
+ link_keys.add(k)
return links, link_keys