diff options
author | Alexandre Flament <alex@al-f.net> | 2021-04-05 10:43:33 +0200 |
---|---|---|
committer | Alexandre Flament <alex@al-f.net> | 2021-04-12 17:25:56 +0200 |
commit | d14994dc73ba5c95382812581dac146d9eceaafa (patch) | |
tree | 2f7720dbae8f1064fe479f986f0b198aff2beb99 /searx_extra | |
parent | eaa694fb7d0e47b943bc6d6edb6cb6a40ab2d85e (diff) | |
download | searxng-d14994dc73ba5c95382812581dac146d9eceaafa.tar.gz searxng-d14994dc73ba5c95382812581dac146d9eceaafa.zip |
[httpx] replace searx.poolrequests by searx.network
settings.yml:
* outgoing.networks:
* can contains network definition
* propertiers: enable_http, verify, http2, max_connections, max_keepalive_connections,
keepalive_expiry, local_addresses, support_ipv4, support_ipv6, proxies, max_redirects, retries
* retries: 0 by default, number of times searx retries to send the HTTP request (using different IP & proxy each time)
* local_addresses can be "192.168.0.1/24" (it supports IPv6)
* support_ipv4 & support_ipv6: both True by default
see https://github.com/searx/searx/pull/1034
* each engine can define a "network" section:
* either a full network description
* either reference an existing network
* all HTTP requests of engine use the same HTTP configuration (it was not the case before, see proxy configuration in master)
Diffstat (limited to 'searx_extra')
-rwxr-xr-x | searx_extra/update/update_engine_descriptions.py | 6 |
1 files changed, 3 insertions, 3 deletions
diff --git a/searx_extra/update/update_engine_descriptions.py b/searx_extra/update/update_engine_descriptions.py index 109fdbfa0..cf9007da3 100755 --- a/searx_extra/update/update_engine_descriptions.py +++ b/searx_extra/update/update_engine_descriptions.py @@ -10,7 +10,7 @@ from searx.engines.wikidata import send_wikidata_query from searx.utils import extract_text import searx import searx.search -import searx.poolrequests +import searx.network SPARQL_WIKIPEDIA_ARTICLE = """ SELECT DISTINCT ?item ?name @@ -59,7 +59,7 @@ def get_wikipedia_summary(language, pageid): search_url = 'https://{language}.wikipedia.org/api/rest_v1/page/summary/{title}' url = search_url.format(title=quote(pageid), language=language) try: - response = searx.poolrequests.get(url) + response = searx.network.get(url) response.raise_for_status() api_result = json.loads(response.text) return api_result.get('extract') @@ -89,7 +89,7 @@ def get_website_description(url, lang1, lang2=None): lang_list.append(lang2) headers['Accept-Language'] = f'{",".join(lang_list)};q=0.8' try: - response = searx.poolrequests.get(url, headers=headers, timeout=10) + response = searx.network.get(url, headers=headers, timeout=10) response.raise_for_status() except Exception: return (None, None) |