summaryrefslogtreecommitdiff
path: root/searx
diff options
context:
space:
mode:
authorAlexandre Flament <alex@al-f.net>2020-12-15 14:50:17 +0100
committerAlexandre Flament <alex@al-f.net>2020-12-17 11:39:36 +0100
commitc0cc01e936593ff3df828fa3bb834507c45cd7ac (patch)
treefaaa9efb565ae9519d7504ecfc0e171785eae8e7 /searx
parent3b87efb3db25f2811923599fcad57f0ff40fe46a (diff)
downloadsearxng-c0cc01e936593ff3df828fa3bb834507c45cd7ac.tar.gz
searxng-c0cc01e936593ff3df828fa3bb834507c45cd7ac.zip
[mod] searx.search: search_multiple_requests is a method of Search class
Diffstat (limited to 'searx')
-rw-r--r--searx/search/__init__.py47
1 files changed, 23 insertions, 24 deletions
diff --git a/searx/search/__init__.py b/searx/search/__init__.py
index 220950803..77835567e 100644
--- a/searx/search/__init__.py
+++ b/searx/search/__init__.py
@@ -321,29 +321,6 @@ def search_one_request_safe(engine_name, query, request_params, result_container
return search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit)
-def search_multiple_requests(requests, result_container, start_time, timeout_limit):
- search_id = uuid4().__str__()
-
- for engine_name, query, request_params in requests:
- th = threading.Thread(
- target=search_one_request_safe,
- args=(engine_name, query, request_params, result_container, start_time, timeout_limit),
- name=search_id,
- )
- th._timeout = False
- th._engine_name = engine_name
- th.start()
-
- for th in threading.enumerate():
- if th.name == search_id:
- remaining_time = max(0.0, timeout_limit - (time() - start_time))
- th.join(remaining_time)
- if th.is_alive():
- th._timeout = True
- result_container.add_unresponsive_engine(th._engine_name, 'timeout')
- logger.warning('engine timeout: {0}'.format(th._engine_name))
-
-
# get default reqest parameter
def default_request_params():
return {
@@ -492,6 +469,28 @@ class Search:
return requests, actual_timeout
+ def search_multiple_requests(self, requests):
+ search_id = uuid4().__str__()
+
+ for engine_name, query, request_params in requests:
+ th = threading.Thread(
+ target=search_one_request_safe,
+ args=(engine_name, query, request_params, self.result_container, self.start_time, self.actual_timeout),
+ name=search_id,
+ )
+ th._timeout = False
+ th._engine_name = engine_name
+ th.start()
+
+ for th in threading.enumerate():
+ if th.name == search_id:
+ remaining_time = max(0.0, self.actual_timeout - (time() - self.start_time))
+ th.join(remaining_time)
+ if th.is_alive():
+ th._timeout = True
+ self.result_container.add_unresponsive_engine(th._engine_name, 'timeout')
+ logger.warning('engine timeout: {0}'.format(th._engine_name))
+
def search_standard(self):
"""
Update self.result_container, self.actual_timeout
@@ -500,7 +499,7 @@ class Search:
# send all search-request
if requests:
- search_multiple_requests(requests, self.result_container, self.start_time, self.actual_timeout)
+ self.search_multiple_requests(requests)
start_new_thread(gc.collect, tuple())
# return results, suggestions, answers and infoboxes