summaryrefslogtreecommitdiff
path: root/searx/engines
diff options
context:
space:
mode:
authorAlexandre Flament <alex@al-f.net>2021-02-12 10:56:40 +0100
committerGitHub <noreply@github.com>2021-02-12 10:56:40 +0100
commit7e83818879a48fef84a518092d833e3785c64ff2 (patch)
tree2720ac7268671fe0d40243468e3071422077375b /searx/engines
parent63d6ccfbc2f1a61a2b0b9040d89858a540ede475 (diff)
parentd2dac11392c89084e8d6143f09c27c5fefabdef9 (diff)
downloadsearxng-7e83818879a48fef84a518092d833e3785c64ff2.tar.gz
searxng-7e83818879a48fef84a518092d833e3785c64ff2.zip
Merge pull request #2560 from dalf/fix-duckduckgo
Fix duckduckgo
Diffstat (limited to 'searx/engines')
-rw-r--r--searx/engines/duckduckgo.py18
1 files changed, 10 insertions, 8 deletions
diff --git a/searx/engines/duckduckgo.py b/searx/engines/duckduckgo.py
index 7f1378264..638f1211b 100644
--- a/searx/engines/duckduckgo.py
+++ b/searx/engines/duckduckgo.py
@@ -5,7 +5,8 @@
from lxml.html import fromstring
from json import loads
-from searx.utils import extract_text, match_language, eval_xpath
+from searx.utils import extract_text, match_language, eval_xpath, dict_subset
+from searx.poolrequests import get
# about
about = {
@@ -35,6 +36,7 @@ language_aliases = {
# search-url
url = 'https://html.duckduckgo.com/html'
+url_ping = 'https://duckduckgo.com/t/sl_h'
time_range_dict = {'day': 'd',
'week': 'w',
'month': 'm'}
@@ -65,27 +67,27 @@ def request(query, params):
params['url'] = url
params['method'] = 'POST'
- params['data']['b'] = ''
params['data']['q'] = query
- params['data']['df'] = ''
+ params['data']['b'] = ''
region_code = get_region_code(params['language'], supported_languages)
if region_code:
params['data']['kl'] = region_code
params['cookies']['kl'] = region_code
- if params['time_range'] in time_range_dict:
- params['data']['df'] = time_range_dict[params['time_range']]
+ params['data']['df'] = time_range_dict.get(params['time_range'], '')
return params
# get response from search-request
def response(resp):
- results = []
+ # ping
+ headers_ping = dict_subset(resp.request.headers, ['User-Agent', 'Accept-Encoding', 'Accept', 'Cookie'])
+ get(url_ping, headers=headers_ping)
+ # parse the response
+ results = []
doc = fromstring(resp.text)
-
- # parse results
for i, r in enumerate(eval_xpath(doc, result_xpath)):
if i >= 30:
break