summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDalf <alex@al-f.net>2020-08-11 16:25:03 +0200
committerAlexandre Flament <alex@al-f.net>2020-09-10 10:49:42 +0200
commitc225db45c8a4ab466bff049216f7e0189dc1b067 (patch)
tree16be3299a6faf15538fc1f2cf181aadc422039fd
parent78883777438fc07833d983c50d9b131eb6feb9eb (diff)
downloadsearxng-c225db45c8a4ab466bff049216f7e0189dc1b067.tar.gz
searxng-c225db45c8a4ab466bff049216f7e0189dc1b067.zip
Drop Python 2 (4/n): SearchQuery.query is a str instead of bytes
-rw-r--r--searx/answerers/__init__.py4
-rw-r--r--searx/answerers/random/answerer.py12
-rw-r--r--searx/answerers/statistics/answerer.py10
-rw-r--r--searx/engines/archlinux.py2
-rw-r--r--searx/engines/arxiv.py2
-rwxr-xr-xsearx/engines/base.py2
-rw-r--r--searx/engines/bing.py2
-rw-r--r--searx/engines/currency_convert.py4
-rw-r--r--searx/engines/dictzone.py4
-rw-r--r--searx/engines/gentoo.py2
-rw-r--r--searx/engines/openstreetmap.py4
-rw-r--r--searx/engines/scanr_structures.py2
-rw-r--r--searx/engines/translated.py6
-rw-r--r--searx/external_bang.py2
-rw-r--r--searx/plugins/self_info.py4
-rw-r--r--searx/query.py7
-rw-r--r--searx/utils.py1
-rwxr-xr-xsearx/webapp.py6
-rw-r--r--tests/unit/test_plugins.py16
-rw-r--r--tests/unit/test_utils.py4
20 files changed, 48 insertions, 48 deletions
diff --git a/searx/answerers/__init__.py b/searx/answerers/__init__.py
index 47cc33ed3..97e7e5854 100644
--- a/searx/answerers/__init__.py
+++ b/searx/answerers/__init__.py
@@ -32,10 +32,10 @@ def ask(query):
results = []
query_parts = list(filter(None, query.query.split()))
- if query_parts[0].decode() not in answerers_by_keywords:
+ if query_parts[0] not in answerers_by_keywords:
return results
- for answerer in answerers_by_keywords[query_parts[0].decode()]:
+ for answerer in answerers_by_keywords[query_parts[0]]:
result = answerer(query)
if result:
results.append(result)
diff --git a/searx/answerers/random/answerer.py b/searx/answerers/random/answerer.py
index aaf9e1cf6..d5223e517 100644
--- a/searx/answerers/random/answerer.py
+++ b/searx/answerers/random/answerer.py
@@ -39,11 +39,11 @@ def random_uuid():
return str(uuid.uuid4())
-random_types = {b'string': random_string,
- b'int': random_int,
- b'float': random_float,
- b'sha256': random_sha256,
- b'uuid': random_uuid}
+random_types = {'string': random_string,
+ 'int': random_int,
+ 'float': random_float,
+ 'sha256': random_sha256,
+ 'uuid': random_uuid}
# required answerer function
@@ -64,4 +64,4 @@ def answer(query):
def self_info():
return {'name': gettext('Random value generator'),
'description': gettext('Generate different random values'),
- 'examples': ['random {}'.format(x.decode()) for x in random_types]}
+ 'examples': ['random {}'.format(x) for x in random_types]}
diff --git a/searx/answerers/statistics/answerer.py b/searx/answerers/statistics/answerer.py
index cfd1b3e23..abd4be7f5 100644
--- a/searx/answerers/statistics/answerer.py
+++ b/searx/answerers/statistics/answerer.py
@@ -27,15 +27,15 @@ def answer(query):
func = parts[0]
answer = None
- if func == b'min':
+ if func == 'min':
answer = min(args)
- elif func == b'max':
+ elif func == 'max':
answer = max(args)
- elif func == b'avg':
+ elif func == 'avg':
answer = sum(args) / len(args)
- elif func == b'sum':
+ elif func == 'sum':
answer = sum(args)
- elif func == b'prod':
+ elif func == 'prod':
answer = reduce(mul, args, 1)
if answer is None:
diff --git a/searx/engines/archlinux.py b/searx/engines/archlinux.py
index 9e13dc42e..e2f44b0f5 100644
--- a/searx/engines/archlinux.py
+++ b/searx/engines/archlinux.py
@@ -105,7 +105,7 @@ def request(query, params):
# if our language is hosted on the main site, we need to add its name
# to the query in order to narrow the results to that language
if language in main_langs:
- query += b' (' + main_langs[language] + b')'
+ query += ' (' + main_langs[language] + ')'
# prepare the request parameters
query = urlencode({'search': query})
diff --git a/searx/engines/arxiv.py b/searx/engines/arxiv.py
index 851f30bfc..77ddc572e 100644
--- a/searx/engines/arxiv.py
+++ b/searx/engines/arxiv.py
@@ -30,7 +30,7 @@ def request(query, params):
# basic search
offset = (params['pageno'] - 1) * number_of_results
- string_args = dict(query=query.decode(),
+ string_args = dict(query=query,
offset=offset,
number_of_results=number_of_results)
diff --git a/searx/engines/base.py b/searx/engines/base.py
index 34b735b3c..0114f9798 100755
--- a/searx/engines/base.py
+++ b/searx/engines/base.py
@@ -55,7 +55,7 @@ shorcut_dict = {
def request(query, params):
# replace shortcuts with API advanced search keywords
for key in shorcut_dict.keys():
- query = re.sub(key, shorcut_dict[key], str(query))
+ query = re.sub(key, shorcut_dict[key], query)
# basic search
offset = (params['pageno'] - 1) * number_of_results
diff --git a/searx/engines/bing.py b/searx/engines/bing.py
index eda3d42a3..c7b619369 100644
--- a/searx/engines/bing.py
+++ b/searx/engines/bing.py
@@ -47,7 +47,7 @@ def request(query, params):
else:
lang = match_language(params['language'], supported_languages, language_aliases)
- query = 'language:{} {}'.format(lang.split('-')[0].upper(), query.decode()).encode()
+ query = 'language:{} {}'.format(lang.split('-')[0].upper(), query)
search_path = search_string.format(
query=urlencode({'q': query}),
diff --git a/searx/engines/currency_convert.py b/searx/engines/currency_convert.py
index 7281b7175..c6067c4a8 100644
--- a/searx/engines/currency_convert.py
+++ b/searx/engines/currency_convert.py
@@ -11,13 +11,13 @@ categories = []
url = 'https://duckduckgo.com/js/spice/currency/1/{0}/{1}'
weight = 100
-parser_re = re.compile(b'.*?(\\d+(?:\\.\\d+)?) ([^.0-9]+) (?:in|to) ([^.0-9]+)', re.I)
+parser_re = re.compile('.*?(\\d+(?:\\.\\d+)?) ([^.0-9]+) (?:in|to) ([^.0-9]+)', re.I)
db = 1
def normalize_name(name):
- name = name.decode().lower().replace('-', ' ').rstrip('s')
+ name = name.lower().replace('-', ' ').rstrip('s')
name = re.sub(' +', ' ', name)
return unicodedata.normalize('NFKD', name).lower()
diff --git a/searx/engines/dictzone.py b/searx/engines/dictzone.py
index 1d8470c17..5a1fea3cf 100644
--- a/searx/engines/dictzone.py
+++ b/searx/engines/dictzone.py
@@ -18,7 +18,7 @@ categories = ['general']
url = 'https://dictzone.com/{from_lang}-{to_lang}-dictionary/{query}'
weight = 100
-parser_re = re.compile(b'.*?([a-z]+)-([a-z]+) ([^ ]+)$', re.I)
+parser_re = re.compile('.*?([a-z]+)-([a-z]+) ([^ ]+)$', re.I)
results_xpath = './/table[@id="r"]/tr'
@@ -37,7 +37,7 @@ def request(query, params):
params['url'] = url.format(from_lang=from_lang[2],
to_lang=to_lang[2],
- query=query.decode())
+ query=query)
return params
diff --git a/searx/engines/gentoo.py b/searx/engines/gentoo.py
index b4b02e6b4..b6bc99fab 100644
--- a/searx/engines/gentoo.py
+++ b/searx/engines/gentoo.py
@@ -90,7 +90,7 @@ def request(query, params):
# if our language is hosted on the main site, we need to add its name
# to the query in order to narrow the results to that language
if language in main_langs:
- query += b' (' + (main_langs[language]).encode() + b')'
+ query += ' (' + main_langs[language] + ')'
# prepare the request parameters
query = urlencode({'search': query})
diff --git a/searx/engines/openstreetmap.py b/searx/engines/openstreetmap.py
index 48a2a14b7..5475c7a6d 100644
--- a/searx/engines/openstreetmap.py
+++ b/searx/engines/openstreetmap.py
@@ -30,8 +30,8 @@ route_re = re.compile('(?:from )?(.+) to (.+)')
# do search-request
def request(query, params):
- params['url'] = base_url + search_string.format(query=query.decode())
- params['route'] = route_re.match(query.decode())
+ params['url'] = base_url + search_string.format(query=query)
+ params['route'] = route_re.match(query)
return params
diff --git a/searx/engines/scanr_structures.py b/searx/engines/scanr_structures.py
index 3ed6c6fd5..6dbbf4fd9 100644
--- a/searx/engines/scanr_structures.py
+++ b/searx/engines/scanr_structures.py
@@ -29,7 +29,7 @@ def request(query, params):
params['url'] = search_url
params['method'] = 'POST'
params['headers']['Content-type'] = "application/json"
- params['data'] = dumps({"query": query.decode(),
+ params['data'] = dumps({"query": query,
"searchField": "ALL",
"sortDirection": "ASC",
"sortOrder": "RELEVANCY",
diff --git a/searx/engines/translated.py b/searx/engines/translated.py
index 079eebe3c..a50e7c830 100644
--- a/searx/engines/translated.py
+++ b/searx/engines/translated.py
@@ -16,7 +16,7 @@ url = 'https://api.mymemory.translated.net/get?q={query}&langpair={from_lang}|{t
web_url = 'https://mymemory.translated.net/en/{from_lang}/{to_lang}/{query}'
weight = 100
-parser_re = re.compile(b'.*?([a-z]+)-([a-z]+) (.{2,})$', re.I)
+parser_re = re.compile('.*?([a-z]+)-([a-z]+) (.{2,})$', re.I)
api_key = ''
@@ -39,9 +39,9 @@ def request(query, params):
key_form = ''
params['url'] = url.format(from_lang=from_lang[1],
to_lang=to_lang[1],
- query=query.decode(),
+ query=query,
key=key_form)
- params['query'] = query.decode()
+ params['query'] = query
params['from_lang'] = from_lang
params['to_lang'] = to_lang
diff --git a/searx/external_bang.py b/searx/external_bang.py
index 3a25cc493..92b6e6a09 100644
--- a/searx/external_bang.py
+++ b/searx/external_bang.py
@@ -23,7 +23,7 @@ def get_bang_url(search_query):
"""
if search_query.external_bang:
- query = search_query.query.decode(errors='ignore')
+ query = search_query.query
bang = _get_bang(search_query.external_bang)
if bang and query:
diff --git a/searx/plugins/self_info.py b/searx/plugins/self_info.py
index cdd3e9a6e..4fdfb4288 100644
--- a/searx/plugins/self_info.py
+++ b/searx/plugins/self_info.py
@@ -22,7 +22,7 @@ default_on = True
# Self User Agent regex
-p = re.compile(b'.*user[ -]agent.*', re.IGNORECASE)
+p = re.compile('.*user[ -]agent.*', re.IGNORECASE)
# attach callback to the post search hook
@@ -31,7 +31,7 @@ p = re.compile(b'.*user[ -]agent.*', re.IGNORECASE)
def post_search(request, search):
if search.search_query.pageno > 1:
return True
- if search.search_query.query == b'ip':
+ if search.search_query.query == 'ip':
x_forwarded_for = request.headers.getlist("X-Forwarded-For")
if x_forwarded_for:
ip = x_forwarded_for[0]
diff --git a/searx/query.py b/searx/query.py
index 614e05c6b..ef323af7a 100644
--- a/searx/query.py
+++ b/searx/query.py
@@ -32,6 +32,7 @@ class RawTextQuery:
"""parse raw text query (the value from the html input)"""
def __init__(self, query, disabled_engines):
+ assert isinstance(query, str)
self.query = query
self.disabled_engines = []
@@ -51,7 +52,7 @@ class RawTextQuery:
self.query_parts = []
# split query, including whitespaces
- raw_query_parts = re.split(r'(\s+)' if isinstance(self.query, str) else b'(\s+)', self.query)
+ raw_query_parts = re.split(r'(\s+)', self.query)
parse_next = True
@@ -183,7 +184,7 @@ class SearchQuery:
def __init__(self, query, engines, categories, lang, safesearch, pageno, time_range,
timeout_limit=None, preferences=None, external_bang=None):
- self.query = query.encode()
+ self.query = query
self.engines = engines
self.categories = categories
self.lang = lang
@@ -195,4 +196,4 @@ class SearchQuery:
self.external_bang = external_bang
def __str__(self):
- return str(self.query) + ";" + str(self.engines)
+ return self.query + ";" + str(self.engines)
diff --git a/searx/utils.py b/searx/utils.py
index f87ea177a..de5b68ca3 100644
--- a/searx/utils.py
+++ b/searx/utils.py
@@ -59,7 +59,6 @@ def highlight_content(content, query):
if content.find('<') != -1:
return content
- query = query.decode()
if content.lower().find(query.lower()) > -1:
query_regex = '({0})'.format(re.escape(query))
content = re.sub(query_regex, '<span class="highlight">\\1</span>',
diff --git a/searx/webapp.py b/searx/webapp.py
index 25f43662a..a1b0413aa 100755
--- a/searx/webapp.py
+++ b/searx/webapp.py
@@ -623,7 +623,7 @@ def index():
result['publishedDate'] = format_date(result['publishedDate'])
if output_format == 'json':
- return Response(json.dumps({'query': search_query.query.decode(),
+ return Response(json.dumps({'query': search_query.query,
'number_of_results': number_of_results,
'results': results,
'answers': list(result_container.answers),
@@ -652,7 +652,7 @@ def index():
csv.writerow([row.get(key, '') for key in keys])
csv.stream.seek(0)
response = Response(csv.stream.read(), mimetype='application/csv')
- cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search_query.query.decode())
+ cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search_query.query)
response.headers.add('Content-Disposition', cont_disp)
return response
@@ -736,7 +736,7 @@ def autocompleter():
disabled_engines = request.preferences.engines.get_disabled()
# parse query
- raw_text_query = RawTextQuery(request.form.get('q', b''), disabled_engines)
+ raw_text_query = RawTextQuery(str(request.form.get('q', b'')), disabled_engines)
raw_text_query.parse_query()
# check if search query is set
diff --git a/tests/unit/test_plugins.py b/tests/unit/test_plugins.py
index 10de8475a..838c1d574 100644
--- a/tests/unit/test_plugins.py
+++ b/tests/unit/test_plugins.py
@@ -48,11 +48,11 @@ class SelfIPTest(SearxTestCase):
# IP test
request = Mock(remote_addr='127.0.0.1')
request.headers.getlist.return_value = []
- search = get_search_mock(query=b'ip', pageno=1)
+ search = get_search_mock(query='ip', pageno=1)
store.call(store.plugins, 'post_search', request, search)
self.assertTrue('127.0.0.1' in search.result_container.answers["ip"]["answer"])
- search = get_search_mock(query=b'ip', pageno=2)
+ search = get_search_mock(query='ip', pageno=2)
store.call(store.plugins, 'post_search', request, search)
self.assertFalse('ip' in search.result_container.answers)
@@ -60,26 +60,26 @@ class SelfIPTest(SearxTestCase):
request = Mock(user_agent='Mock')
request.headers.getlist.return_value = []
- search = get_search_mock(query=b'user-agent', pageno=1)
+ search = get_search_mock(query='user-agent', pageno=1)
store.call(store.plugins, 'post_search', request, search)
self.assertTrue('Mock' in search.result_container.answers["user-agent"]["answer"])
- search = get_search_mock(query=b'user-agent', pageno=2)
+ search = get_search_mock(query='user-agent', pageno=2)
store.call(store.plugins, 'post_search', request, search)
self.assertFalse('user-agent' in search.result_container.answers)
- search = get_search_mock(query=b'user-agent', pageno=1)
+ search = get_search_mock(query='user-agent', pageno=1)
store.call(store.plugins, 'post_search', request, search)
self.assertTrue('Mock' in search.result_container.answers["user-agent"]["answer"])
- search = get_search_mock(query=b'user-agent', pageno=2)
+ search = get_search_mock(query='user-agent', pageno=2)
store.call(store.plugins, 'post_search', request, search)
self.assertFalse('user-agent' in search.result_container.answers)
- search = get_search_mock(query=b'What is my User-Agent?', pageno=1)
+ search = get_search_mock(query='What is my User-Agent?', pageno=1)
store.call(store.plugins, 'post_search', request, search)
self.assertTrue('Mock' in search.result_container.answers["user-agent"]["answer"])
- search = get_search_mock(query=b'What is my User-Agent?', pageno=2)
+ search = get_search_mock(query='What is my User-Agent?', pageno=2)
store.call(store.plugins, 'post_search', request, search)
self.assertFalse('user-agent' in search.result_container.answers)
diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py
index f7fc7c0e4..5f98511c3 100644
--- a/tests/unit/test_utils.py
+++ b/tests/unit/test_utils.py
@@ -30,9 +30,9 @@ class TestUtils(SearxTestCase):
self.assertEqual(utils.highlight_content(content, None), content)
content = 'a'
- query = b'test'
+ query = 'test'
self.assertEqual(utils.highlight_content(content, query), content)
- query = b'a test'
+ query = 'a test'
self.assertEqual(utils.highlight_content(content, query), content)
def test_html_to_text(self):