summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMarkus Heiser <markus.heiser@darmarit.de>2020-01-03 14:30:53 +0100
committerMarkus Heiser <markus.heiser@darmarit.de>2020-01-03 14:30:53 +0100
commit51b9295b84079776e3ea08971ccb3ac159e37477 (patch)
tree862d88f07cd975a8be0f05ea76b85191f49ed1ae
parentf602cb8e4dde214d51216fb547eb4dfdee32e191 (diff)
parent17b6faa4c3c1cf14a327f4a3538fc70dce08b756 (diff)
downloadsearxng-51b9295b84079776e3ea08971ccb3ac159e37477.tar.gz
searxng-51b9295b84079776e3ea08971ccb3ac159e37477.zip
Merge branch 'master' of https://github.com/asciimoo/searx into add-docs
-rw-r--r--docs/admin/installation.rst22
-rw-r--r--searx/engines/bing.py5
-rw-r--r--searx/engines/flickr_noapi.py26
-rw-r--r--searx/engines/ina.py9
-rw-r--r--searx/engines/microsoft_academic.py2
-rw-r--r--searx/engines/scanr_structures.py2
-rw-r--r--searx/settings.yml16
7 files changed, 52 insertions, 30 deletions
diff --git a/docs/admin/installation.rst b/docs/admin/installation.rst
index 239ce0704..15800fc01 100644
--- a/docs/admin/installation.rst
+++ b/docs/admin/installation.rst
@@ -114,6 +114,9 @@ content:
# Module to import
module = searx.webapp
+ # Support running the module from a webserver subdirectory.
+ route-run = fixpathinfo:
+
# Virtualenv and python path
virtualenv = /usr/local/searx/searx-ve/
pythonpath = /usr/local/searx/
@@ -151,7 +154,10 @@ content:
server {
listen 80;
server_name searx.example.com;
- root /usr/local/searx;
+ root /usr/local/searx/searx;
+
+ location /static {
+ }
location / {
include uwsgi_params;
@@ -180,14 +186,13 @@ Add this configuration in the server config file
.. code:: nginx
- location = /searx { rewrite ^ /searx/; }
- location /searx {
- try_files $uri @searx;
+ location /searx/static {
+ alias /usr/local/searx/searx/static;
}
- location @searx {
+
+ location /searx {
uwsgi_param SCRIPT_NAME /searx;
include uwsgi_params;
- uwsgi_modifier1 30;
uwsgi_pass unix:/run/uwsgi/app/searx/socket;
}
@@ -197,6 +202,10 @@ in case of single-user or low-traffic instances.)
.. code:: nginx
+ location /searx/static {
+ alias /usr/local/searx/searx/static;
+ }
+
location /searx {
proxy_pass http://127.0.0.1:8888;
proxy_set_header Host $host;
@@ -338,4 +347,3 @@ References
* How to: `Setup searx in a couple of hours with a free SSL certificate
<https://www.reddit.com/r/privacytoolsIO/comments/366kvn/how_to_setup_your_own_privacy_respecting_search/>`__
-
diff --git a/searx/engines/bing.py b/searx/engines/bing.py
index ed0b87dbd..b193f7c60 100644
--- a/searx/engines/bing.py
+++ b/searx/engines/bing.py
@@ -89,8 +89,7 @@ def response(resp):
'content': content})
try:
- result_len_container = "".join(eval_xpath(dom, '//span[@class="sb_count"]/text()'))
- result_len_container = utils.to_string(result_len_container)
+ result_len_container = "".join(eval_xpath(dom, '//span[@class="sb_count"]//text()'))
if "-" in result_len_container:
# Remove the part "from-to" for paginated request ...
result_len_container = result_len_container[result_len_container.find("-") * 2 + 2:]
@@ -102,7 +101,7 @@ def response(resp):
logger.debug('result error :\n%s', e)
pass
- if _get_offset_from_pageno(resp.search_params.get("pageno", 0)) > result_len:
+ if result_len and _get_offset_from_pageno(resp.search_params.get("pageno", 0)) > result_len:
return []
results.append({'number_of_results': result_len})
diff --git a/searx/engines/flickr_noapi.py b/searx/engines/flickr_noapi.py
index 198ac2cff..e1abb378f 100644
--- a/searx/engines/flickr_noapi.py
+++ b/searx/engines/flickr_noapi.py
@@ -109,14 +109,22 @@ def response(resp):
else:
url = build_flickr_url(photo['ownerNsid'], photo['id'])
- results.append({'url': url,
- 'title': title,
- 'img_src': img_src,
- 'thumbnail_src': thumbnail_src,
- 'content': content,
- 'author': author,
- 'source': source,
- 'img_format': img_format,
- 'template': 'images.html'})
+ result = {
+ 'url': url,
+ 'img_src': img_src,
+ 'thumbnail_src': thumbnail_src,
+ 'source': source,
+ 'img_format': img_format,
+ 'template': 'images.html'
+ }
+ try:
+ result['author'] = author.encode('utf-8')
+ result['title'] = title.encode('utf-8')
+ result['content'] = content.encode('utf-8')
+ except:
+ result['author'] = ''
+ result['title'] = ''
+ result['content'] = ''
+ results.append(result)
return results
diff --git a/searx/engines/ina.py b/searx/engines/ina.py
index 37a05f099..ea509649f 100644
--- a/searx/engines/ina.py
+++ b/searx/engines/ina.py
@@ -32,7 +32,7 @@ base_url = 'https://www.ina.fr'
search_url = base_url + '/layout/set/ajax/recherche/result?autopromote=&hf={ps}&b={start}&type=Video&r=&{query}'
# specific xpath variables
-results_xpath = '//div[contains(@class,"search-results--list")]/div[@class="media"]'
+results_xpath = '//div[contains(@class,"search-results--list")]//div[@class="media-body"]'
url_xpath = './/a/@href'
title_xpath = './/h3[@class="h3--title media-heading"]'
thumbnail_xpath = './/img/@src'
@@ -65,8 +65,11 @@ def response(resp):
videoid = result.xpath(url_xpath)[0]
url = base_url + videoid
title = p.unescape(extract_text(result.xpath(title_xpath)))
- thumbnail = extract_text(result.xpath(thumbnail_xpath)[0])
- if thumbnail[0] == '/':
+ try:
+ thumbnail = extract_text(result.xpath(thumbnail_xpath)[0])
+ except:
+ thumbnail = ''
+ if thumbnail and thumbnail[0] == '/':
thumbnail = base_url + thumbnail
d = extract_text(result.xpath(publishedDate_xpath)[0])
d = d.split('/')
diff --git a/searx/engines/microsoft_academic.py b/searx/engines/microsoft_academic.py
index 9387b08d0..9bac0069c 100644
--- a/searx/engines/microsoft_academic.py
+++ b/searx/engines/microsoft_academic.py
@@ -45,6 +45,8 @@ def request(query, params):
def response(resp):
results = []
response_data = loads(resp.text)
+ if not response_data:
+ return results
for result in response_data['results']:
url = _get_url(result)
diff --git a/searx/engines/scanr_structures.py b/searx/engines/scanr_structures.py
index 72fd2b3c9..7208dcb70 100644
--- a/searx/engines/scanr_structures.py
+++ b/searx/engines/scanr_structures.py
@@ -29,7 +29,7 @@ def request(query, params):
params['url'] = search_url
params['method'] = 'POST'
params['headers']['Content-type'] = "application/json"
- params['data'] = dumps({"query": query,
+ params['data'] = dumps({"query": query.decode('utf-8'),
"searchField": "ALL",
"sortDirection": "ASC",
"sortOrder": "RELEVANCY",
diff --git a/searx/settings.yml b/searx/settings.yml
index 2a2d2bf87..2777f9caa 100644
--- a/searx/settings.yml
+++ b/searx/settings.yml
@@ -79,9 +79,10 @@ engines:
categories : science
timeout : 4.0
- - name : base
- engine : base
- shortcut : bs
+# tmp suspended: dh key too small
+# - name : base
+# engine : base
+# shortcut : bs
- name : wikipedia
engine : wikipedia
@@ -552,10 +553,11 @@ engines:
timeout : 10.0
disabled : True
- - name : scanr structures
- shortcut: scs
- engine : scanr_structures
- disabled : True
+# tmp suspended: bad certificate
+# - name : scanr structures
+# shortcut: scs
+# engine : scanr_structures
+# disabled : True
- name : soundcloud
engine : soundcloud