summaryrefslogtreecommitdiff
path: root/searx/search.py
diff options
context:
space:
mode:
authorAlexandre Flament <alex@al-f.net>2019-07-17 10:38:45 +0200
committerGitHub <noreply@github.com>2019-07-17 10:38:45 +0200
commit554a21e1d07f3b434b5097b4e3d49e1403be7527 (patch)
treee4917091b8e32690256fabf64addfc1ea187ba67 /searx/search.py
parentcfcbc3a5c344037fb5423c14223e72578170a234 (diff)
downloadsearxng-554a21e1d07f3b434b5097b4e3d49e1403be7527.tar.gz
searxng-554a21e1d07f3b434b5097b4e3d49e1403be7527.zip
[enh] Add Server-Timing header (#1637)
Server Timing specification: https://www.w3.org/TR/server-timing/ In the browser Dev Tools, focus on the main request, there are the responses per engine in the Timing tab.
Diffstat (limited to 'searx/search.py')
-rw-r--r--searx/search.py39
1 files changed, 24 insertions, 15 deletions
diff --git a/searx/search.py b/searx/search.py
index a51d111a2..465b5ce64 100644
--- a/searx/search.py
+++ b/searx/search.py
@@ -74,10 +74,10 @@ def search_one_request(engine, query, request_params):
# ignoring empty urls
if request_params['url'] is None:
- return []
+ return None
if not request_params['url']:
- return []
+ return None
# send request
response = send_http_request(engine, request_params)
@@ -103,20 +103,29 @@ def search_one_request_safe(engine_name, query, request_params, result_container
# send requests and parse the results
search_results = search_one_request(engine, query, request_params)
- # add results
- result_container.extend(engine_name, search_results)
-
- # update engine time when there is no exception
- with threading.RLock():
- engine.stats['engine_time'] += time() - start_time
- engine.stats['engine_time_count'] += 1
- # update stats with the total HTTP time
- engine.stats['page_load_time'] += requests_lib.get_time_for_thread()
- engine.stats['page_load_count'] += 1
+ # check if the engine accepted the request
+ if search_results is not None:
+ # yes, so add results
+ result_container.extend(engine_name, search_results)
+
+ # update engine time when there is no exception
+ engine_time = time() - start_time
+ page_load_time = requests_lib.get_time_for_thread()
+ result_container.add_timing(engine_name, engine_time, page_load_time)
+ with threading.RLock():
+ engine.stats['engine_time'] += engine_time
+ engine.stats['engine_time_count'] += 1
+ # update stats with the total HTTP time
+ engine.stats['page_load_time'] += page_load_time
+ engine.stats['page_load_count'] += 1
except Exception as e:
- search_duration = time() - start_time
+ # Timing
+ engine_time = time() - start_time
+ page_load_time = requests_lib.get_time_for_thread()
+ result_container.add_timing(engine_name, engine_time, page_load_time)
+ # Record the errors
with threading.RLock():
engine.stats['errors'] += 1
@@ -125,14 +134,14 @@ def search_one_request_safe(engine_name, query, request_params, result_container
# requests timeout (connect or read)
logger.error("engine {0} : HTTP requests timeout"
"(search duration : {1} s, timeout: {2} s) : {3}"
- .format(engine_name, search_duration, timeout_limit, e.__class__.__name__))
+ .format(engine_name, engine_time, timeout_limit, e.__class__.__name__))
requests_exception = True
elif (issubclass(e.__class__, requests.exceptions.RequestException)):
result_container.add_unresponsive_engine((engine_name, gettext('request exception')))
# other requests exception
logger.exception("engine {0} : requests exception"
"(search duration : {1} s, timeout: {2} s) : {3}"
- .format(engine_name, search_duration, timeout_limit, e))
+ .format(engine_name, engine_time, timeout_limit, e))
requests_exception = True
else:
result_container.add_unresponsive_engine((