summaryrefslogtreecommitdiff
path: root/searx/engines
diff options
context:
space:
mode:
authorAlexandre Flament <alex@al-f.net>2021-07-16 17:25:37 +0200
committerGitHub <noreply@github.com>2021-07-16 17:25:37 +0200
commitf523fd3ea77ed3c8269c68f71f28045dbeffc3e0 (patch)
tree37cf1878fa8358c76d5d05e810fb51588967414b /searx/engines
parent5f4d05db335bba9265a101f923b7b1824e572f8f (diff)
parent1b05ea6a6b1d265007c684063b603b9113943ae2 (diff)
downloadsearxng-f523fd3ea77ed3c8269c68f71f28045dbeffc3e0.tar.gz
searxng-f523fd3ea77ed3c8269c68f71f28045dbeffc3e0.zip
Merge pull request #211 from MarcAbonce/onions_v3_fix_searxng
Update onion engines to v3
Diffstat (limited to 'searx/engines')
-rw-r--r--searx/engines/ahmia.py4
-rw-r--r--searx/engines/not_evil.py67
2 files changed, 2 insertions, 69 deletions
diff --git a/searx/engines/ahmia.py b/searx/engines/ahmia.py
index 6c502bb40..b9a0086bd 100644
--- a/searx/engines/ahmia.py
+++ b/searx/engines/ahmia.py
@@ -9,7 +9,7 @@ from searx.engines.xpath import extract_url, extract_text, eval_xpath_list, eval
# about
about = {
- "website": 'http://msydqstlz2kzerdg.onion',
+ "website": 'http://juhanurmihxlp77nkq76byazcldy2hlmovfu2epvl5ankdibsot4csyd.onion',
"wikidata_id": 'Q18693938',
"official_api_documentation": None,
"use_official_api": False,
@@ -23,7 +23,7 @@ paging = True
page_size = 10
# search url
-search_url = 'http://msydqstlz2kzerdg.onion/search/?{query}'
+search_url = 'http://juhanurmihxlp77nkq76byazcldy2hlmovfu2epvl5ankdibsot4csyd.onion/search/?{query}'
time_range_support = True
time_range_dict = {'day': 1,
'week': 7,
diff --git a/searx/engines/not_evil.py b/searx/engines/not_evil.py
deleted file mode 100644
index df41c0941..000000000
--- a/searx/engines/not_evil.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# SPDX-License-Identifier: AGPL-3.0-or-later
-"""
- not Evil (Onions)
-"""
-
-from urllib.parse import urlencode
-from lxml import html
-from searx.engines.xpath import extract_text
-
-# about
-about = {
- "website": 'http://hss3uro2hsxfogfq.onion',
- "wikidata_id": None,
- "official_api_documentation": 'http://hss3uro2hsxfogfq.onion/api.htm',
- "use_official_api": False,
- "require_api_key": False,
- "results": 'HTML',
-}
-
-# engine dependent config
-categories = ['onions']
-paging = True
-page_size = 20
-
-# search-url
-base_url = 'http://hss3uro2hsxfogfq.onion/'
-search_url = 'index.php?{query}&hostLimit=20&start={pageno}&numRows={page_size}'
-
-# specific xpath variables
-results_xpath = '//*[@id="content"]/div/p'
-url_xpath = './span[1]'
-title_xpath = './a[1]'
-content_xpath = './text()'
-
-
-# do search-request
-def request(query, params):
- offset = (params['pageno'] - 1) * page_size
-
- params['url'] = base_url + search_url.format(pageno=offset,
- query=urlencode({'q': query}),
- page_size=page_size)
-
- return params
-
-
-# get response from search-request
-def response(resp):
- results = []
-
- # needed because otherwise requests guesses wrong encoding
- resp.encoding = 'utf8'
- dom = html.fromstring(resp.text)
-
- # parse results
- for result in dom.xpath(results_xpath):
- url = extract_text(result.xpath(url_xpath)[0])
- title = extract_text(result.xpath(title_xpath)[0])
- content = extract_text(result.xpath(content_xpath))
-
- # append result
- results.append({'url': url,
- 'title': title,
- 'content': content,
- 'is_onion': True})
-
- return results