diff options
author | Marc Abonce Seguin <marc-abonce@mailbox.org> | 2018-11-25 23:32:48 -0600 |
---|---|---|
committer | Marc Abonce Seguin <marc-abonce@mailbox.org> | 2019-01-06 20:31:57 -0600 |
commit | 5568f24d6ca9ae9a7eca27c107982b2689e40105 (patch) | |
tree | c67749e9dfeb4a27cef9a550fe366b291ead3373 /searx/engines | |
parent | 3c95d64ff8f5f42a69b05d721265c6e484edc451 (diff) | |
download | searxng-5568f24d6ca9ae9a7eca27c107982b2689e40105.tar.gz searxng-5568f24d6ca9ae9a7eca27c107982b2689e40105.zip |
[fix] check language aliases when setting search language
Diffstat (limited to 'searx/engines')
-rw-r--r-- | searx/engines/bing_images.py | 2 | ||||
-rw-r--r-- | searx/engines/bing_videos.py | 2 | ||||
-rw-r--r-- | searx/engines/google.py | 2 | ||||
-rw-r--r-- | searx/engines/google_news.py | 2 | ||||
-rw-r--r-- | searx/engines/qwant.py | 2 | ||||
-rw-r--r-- | searx/engines/swisscows.py | 2 | ||||
-rw-r--r-- | searx/engines/wikidata.py | 2 | ||||
-rw-r--r-- | searx/engines/wikipedia.py | 2 |
8 files changed, 8 insertions, 8 deletions
diff --git a/searx/engines/bing_images.py b/searx/engines/bing_images.py index 876011f1d..e2495200c 100644 --- a/searx/engines/bing_images.py +++ b/searx/engines/bing_images.py @@ -55,7 +55,7 @@ def request(query, params): query=urlencode({'q': query}), offset=offset) - language = match_language(params['language'], supported_languages).lower() + language = match_language(params['language'], supported_languages, language_aliases).lower() params['cookies']['SRCHHPGUSR'] = \ 'ADLT=' + safesearch_types.get(params['safesearch'], 'DEMOTE') diff --git a/searx/engines/bing_videos.py b/searx/engines/bing_videos.py index 7002ac861..bf17f9168 100644 --- a/searx/engines/bing_videos.py +++ b/searx/engines/bing_videos.py @@ -48,7 +48,7 @@ def request(query, params): 'ADLT=' + safesearch_types.get(params['safesearch'], 'DEMOTE') # language cookie - language = match_language(params['language'], supported_languages).lower() + language = match_language(params['language'], supported_languages, language_aliases).lower() params['cookies']['_EDGE_S'] = 'mkt=' + language + '&F=1' # query and paging diff --git a/searx/engines/google.py b/searx/engines/google.py index 62e7d1170..49d7f6499 100644 --- a/searx/engines/google.py +++ b/searx/engines/google.py @@ -166,7 +166,7 @@ def extract_text_from_dom(result, xpath): def request(query, params): offset = (params['pageno'] - 1) * 10 - language = match_language(params['language'], supported_languages) + language = match_language(params['language'], supported_languages, language_aliases) language_array = language.split('-') if params['language'].find('-') > 0: country = params['language'].split('-')[1] diff --git a/searx/engines/google_news.py b/searx/engines/google_news.py index aadcb76df..bf5995c88 100644 --- a/searx/engines/google_news.py +++ b/searx/engines/google_news.py @@ -51,7 +51,7 @@ def request(query, params): params['url'] = search_url.format(query=urlencode({'q': query}), search_options=urlencode(search_options)) - language = match_language(params['language'], supported_languages).split('-')[0] + language = match_language(params['language'], supported_languages, language_aliases).split('-')[0] if language: params['url'] += '&lr=lang_' + language diff --git a/searx/engines/qwant.py b/searx/engines/qwant.py index 4b0f1c87c..1bef07cc7 100644 --- a/searx/engines/qwant.py +++ b/searx/engines/qwant.py @@ -46,7 +46,7 @@ def request(query, params): offset=offset) # add language tag - language = match_language(params['language'], supported_languages) + language = match_language(params['language'], supported_languages, language_aliases) params['url'] += '&locale=' + language.replace('-', '_').lower() return params diff --git a/searx/engines/swisscows.py b/searx/engines/swisscows.py index ff4df24b7..0001d56bf 100644 --- a/searx/engines/swisscows.py +++ b/searx/engines/swisscows.py @@ -36,7 +36,7 @@ regex_img_url_remove_start = re.compile(b'^https?://i\.swisscows\.ch/\?link=') # do search-request def request(query, params): - region = match_language(params['language'], supported_languages) + region = match_language(params['language'], supported_languages, language_aliases) ui_language = region.split('-')[0] search_path = search_string.format( diff --git a/searx/engines/wikidata.py b/searx/engines/wikidata.py index c315b30da..2485b6528 100644 --- a/searx/engines/wikidata.py +++ b/searx/engines/wikidata.py @@ -68,7 +68,7 @@ def response(resp): html = fromstring(resp.text) search_results = html.xpath(wikidata_ids_xpath) - language = match_language(resp.search_params['language'], supported_languages).split('-')[0] + language = match_language(resp.search_params['language'], supported_languages, language_aliases).split('-')[0] # TODO: make requests asynchronous to avoid timeout when result_count > 1 for search_result in search_results[:result_count]: diff --git a/searx/engines/wikipedia.py b/searx/engines/wikipedia.py index 6cd17e378..23f23db4d 100644 --- a/searx/engines/wikipedia.py +++ b/searx/engines/wikipedia.py @@ -31,7 +31,7 @@ supported_languages_url = 'https://meta.wikimedia.org/wiki/List_of_Wikipedias' # set language in base_url def url_lang(lang): - return match_language(lang, supported_languages).split('-')[0] + return match_language(lang, supported_languages, language_aliases).split('-')[0] # do search-request |