summaryrefslogtreecommitdiff
path: root/searx
diff options
context:
space:
mode:
Diffstat (limited to 'searx')
-rw-r--r--searx/plugins/doai_rewrite.py8
-rw-r--r--searx/plugins/https_rewrite.py3
-rw-r--r--searx/plugins/self_info.py16
-rw-r--r--searx/plugins/tracker_url_remover.py10
-rw-r--r--searx/search.py32
5 files changed, 28 insertions, 41 deletions
diff --git a/searx/plugins/doai_rewrite.py b/searx/plugins/doai_rewrite.py
index fc5998b14..0142af672 100644
--- a/searx/plugins/doai_rewrite.py
+++ b/searx/plugins/doai_rewrite.py
@@ -20,12 +20,12 @@ def extract_doi(url):
return None
-def on_result(request, ctx):
- doi = extract_doi(ctx['result']['parsed_url'])
+def on_result(request, search, result):
+ doi = extract_doi(result['parsed_url'])
if doi and len(doi) < 50:
for suffix in ('/', '.pdf', '/full', '/meta', '/abstract'):
if doi.endswith(suffix):
doi = doi[:-len(suffix)]
- ctx['result']['url'] = 'http://doai.io/' + doi
- ctx['result']['parsed_url'] = urlparse(ctx['result']['url'])
+ result['url'] = 'http://doai.io/' + doi
+ result['parsed_url'] = urlparse(ctx['result']['url'])
return True
diff --git a/searx/plugins/https_rewrite.py b/searx/plugins/https_rewrite.py
index 8a9fcd4ad..8b4c9784e 100644
--- a/searx/plugins/https_rewrite.py
+++ b/searx/plugins/https_rewrite.py
@@ -220,8 +220,7 @@ def https_url_rewrite(result):
return result
-def on_result(request, ctx):
- result = ctx['result']
+def on_result(request, search, result):
if result['parsed_url'].scheme == 'http':
https_url_rewrite(result)
return True
diff --git a/searx/plugins/self_info.py b/searx/plugins/self_info.py
index 2f19ad9c7..a2aeda98e 100644
--- a/searx/plugins/self_info.py
+++ b/searx/plugins/self_info.py
@@ -28,19 +28,19 @@ p = re.compile('.*user[ -]agent.*', re.IGNORECASE)
# attach callback to the post search hook
# request: flask request object
# ctx: the whole local context of the pre search hook
-def post_search(request, ctx):
- if ctx['search'].pageno > 1:
+def post_search(request, search):
+ if search.search_query.pageno > 1:
return True
- if ctx['search'].query == 'ip':
+ if search.search_query.query == 'ip':
x_forwarded_for = request.headers.getlist("X-Forwarded-For")
if x_forwarded_for:
ip = x_forwarded_for[0]
else:
ip = request.remote_addr
- ctx['result_container'].answers.clear()
- ctx['result_container'].answers.add(ip)
- elif p.match(ctx['search'].query):
+ search.result_container.answers.clear()
+ search.result_container.answers.add(ip)
+ elif p.match(search.search_query.query):
ua = request.user_agent
- ctx['result_container'].answers.clear()
- ctx['result_container'].answers.add(ua)
+ search.result_container.answers.clear()
+ search.result_container.answers.add(ua)
return True
diff --git a/searx/plugins/tracker_url_remover.py b/searx/plugins/tracker_url_remover.py
index b909e3fae..68a004e33 100644
--- a/searx/plugins/tracker_url_remover.py
+++ b/searx/plugins/tracker_url_remover.py
@@ -28,8 +28,8 @@ description = gettext('Remove trackers arguments from the returned URL')
default_on = True
-def on_result(request, ctx):
- query = ctx['result']['parsed_url'].query
+def on_result(request, search, result):
+ query = result['parsed_url'].query
if query == "":
return True
@@ -37,8 +37,8 @@ def on_result(request, ctx):
for reg in regexes:
query = reg.sub('', query)
- if query != ctx['result']['parsed_url'].query:
- ctx['result']['parsed_url'] = ctx['result']['parsed_url']._replace(query=query)
- ctx['result']['url'] = urlunparse(ctx['result']['parsed_url'])
+ if query != result['parsed_url'].query:
+ result['parsed_url'] = result['parsed_url']._replace(query=query)
+ result['url'] = urlunparse(result['parsed_url'])
return True
diff --git a/searx/search.py b/searx/search.py
index a41c4b09a..a8de143b0 100644
--- a/searx/search.py
+++ b/searx/search.py
@@ -357,35 +357,23 @@ class Search(object):
return self.result_container
-def search_with_plugins(do_search, search_query, request, request_data, result_container):
- """Search using the do_search function and with plugins filtering.
- Standalone function to have a well define locals().
- result_container contains the results after the function call.
- """
- search = search_query
-
- if plugins.call('pre_search', request, locals()):
- do_search()
-
- plugins.call('post_search', request, locals())
-
- results = result_container.get_ordered_results()
-
- for result in results:
- plugins.call('on_result', request, locals())
-
-
class SearchWithPlugins(Search):
+ """Similar to the Search class but call the plugins."""
+
def __init__(self, search_query, request):
super(SearchWithPlugins, self).__init__(search_query)
self.request = request
- self.request_data = request.request_data
def search(self):
-
- def do_search():
+ if plugins.call('pre_search', self.request, self):
super(SearchWithPlugins, self).search()
- search_with_plugins(do_search, self.search_query, self.request, self.request_data, self.result_container)
+ plugins.call('post_search', self.request, self)
+
+ results = self.result_container.get_ordered_results()
+
+ for result in results:
+ plugins.call('on_result', self.request, self, result)
+
return self.result_container