summaryrefslogtreecommitdiff
path: root/searx
diff options
context:
space:
mode:
authorAlexandre Flament <alex@al-f.net>2021-08-14 20:12:11 +0200
committerAlexandre Flament <alex@al-f.net>2021-08-24 14:51:20 +0200
commit3b0f70ed0f981f76d90a8fab2a2bb73e5b1b97bb (patch)
treefbdb0734a49fcd32ba97e816a1ceab86d5e5c462 /searx
parent43fcaa642a63d75096b33d44ce7f7c0de1bce614 (diff)
downloadsearxng-3b0f70ed0f981f76d90a8fab2a2bb73e5b1b97bb.tar.gz
searxng-3b0f70ed0f981f76d90a8fab2a2bb73e5b1b97bb.zip
[mod] /image_proxy: use HTTP/1 instead of HTTP/2
httpx: HTTP/2 is slow when a lot data is downloaded. https://github.com/dalf/pyhttp-benchmark also, the usage of HTTP/1 decreases the load average
Diffstat (limited to 'searx')
-rw-r--r--searx/network/network.py8
-rwxr-xr-xsearx/webapp.py8
2 files changed, 11 insertions, 5 deletions
diff --git a/searx/network/network.py b/searx/network/network.py
index e7dc5b56e..94e91593d 100644
--- a/searx/network/network.py
+++ b/searx/network/network.py
@@ -289,6 +289,14 @@ def initialize(settings_engines=None, settings_outgoing=None):
if isinstance(network, str):
NETWORKS[engine_name] = NETWORKS[network]
+ # the /image_proxy endpoint has a dedicated network.
+ # same parameters than the default network, but HTTP/2 is disabled.
+ # It decreases the CPU load average, and the total time is more or less the same
+ if 'image_proxy' not in NETWORKS:
+ image_proxy_params = default_params.copy()
+ image_proxy_params['enable_http2'] = False
+ NETWORKS['image_proxy'] = new_network(image_proxy_params)
+
@atexit.register
def done():
diff --git a/searx/webapp.py b/searx/webapp.py
index 0b60969a8..3b7e0f972 100755
--- a/searx/webapp.py
+++ b/searx/webapp.py
@@ -108,7 +108,7 @@ from searx.autocomplete import search_autocomplete, backends as autocomplete_bac
from searx.languages import language_codes as languages
from searx.locales import LOCALE_NAMES, UI_LOCALE_CODES, RTL_LOCALES
from searx.search import SearchWithPlugins, initialize as search_initialize
-from searx.network import stream as http_stream
+from searx.network import stream as http_stream, set_context_network_name
from searx.search.checker import get_result as checker_get_result
from searx.settings_loader import get_default_settings_path
@@ -1086,13 +1086,11 @@ def image_proxy():
'Sec-GPC': '1',
'DNT': '1',
}
+ set_context_network_name('image_proxy')
stream = http_stream(
method = 'GET',
url = url,
- headers = request_headers,
- timeout = settings['outgoing']['request_timeout'],
- allow_redirects = True,
- max_redirects = 20
+ headers = request_headers
)
resp = next(stream)
content_length = resp.headers.get('Content-Length')