summaryrefslogtreecommitdiff
path: root/searx/webapp.py
diff options
context:
space:
mode:
Diffstat (limited to 'searx/webapp.py')
-rw-r--r--searx/webapp.py138
1 files changed, 108 insertions, 30 deletions
diff --git a/searx/webapp.py b/searx/webapp.py
index 13c965e0d..3ef5a72c8 100644
--- a/searx/webapp.py
+++ b/searx/webapp.py
@@ -26,9 +26,23 @@ import json
import cStringIO
import os
import hashlib
+import requests
+
+from searx import logger
+logger = logger.getChild('webapp')
+
+try:
+ from pygments import highlight
+ from pygments.lexers import get_lexer_by_name
+ from pygments.formatters import HtmlFormatter
+except:
+ logger.critical("cannot import dependency: pygments")
+ from sys import exit
+ exit(1)
from datetime import datetime, timedelta
from urllib import urlencode
+from urlparse import urlparse
from werkzeug.contrib.fixers import ProxyFix
from flask import (
Flask, request, render_template, url_for, Response, make_response,
@@ -36,7 +50,6 @@ from flask import (
)
from flask.ext.babel import Babel, gettext, format_date
from searx import settings, searx_dir
-from searx.poolrequests import get as http_get
from searx.engines import (
categories, engines, get_engines_stats, engine_shortcuts
)
@@ -47,22 +60,21 @@ from searx.utils import (
)
from searx.version import VERSION_STRING
from searx.languages import language_codes
-from searx.https_rewrite import https_url_rewrite
from searx.search import Search
from searx.query import Query
from searx.autocomplete import searx_bang, backends as autocomplete_backends
-from searx import logger
-try:
- from pygments import highlight
- from pygments.lexers import get_lexer_by_name
- from pygments.formatters import HtmlFormatter
-except:
- logger.critical("cannot import dependency: pygments")
- from sys import exit
- exit(1)
+from searx.plugins import plugins
+# check if the pyopenssl, ndg-httpsclient, pyasn1 packages are installed.
+# They are needed for SSL connection without trouble, see #298
+try:
+ import OpenSSL.SSL # NOQA
+ import ndg.httpsclient # NOQA
+ import pyasn1 # NOQA
+except ImportError:
+ logger.critical("The pyopenssl, ndg-httpsclient, pyasn1 packages have to be installed.\n"
+ "Some HTTPS connections will failed")
-logger = logger.getChild('webapp')
static_path, templates_path, themes =\
get_themes(settings['themes_path']
@@ -109,6 +121,8 @@ _category_names = (gettext('files'),
gettext('news'),
gettext('map'))
+outgoing_proxies = settings.get('outgoing_proxies', None)
+
@babel.localeselector
def get_locale():
@@ -180,6 +194,12 @@ def code_highlighter(codelines, language=None):
return html_code
+# Extract domain from url
+@app.template_filter('extract_domain')
+def extract_domain(url):
+ return urlparse(url)[1]
+
+
def get_base_url():
if settings['server']['base_url']:
hostname = settings['server']['base_url']
@@ -299,10 +319,37 @@ def render(template_name, override_theme=None, **kwargs):
kwargs['cookies'] = request.cookies
+ kwargs['scripts'] = set()
+ for plugin in request.user_plugins:
+ for script in plugin.js_dependencies:
+ kwargs['scripts'].add(script)
+
+ kwargs['styles'] = set()
+ for plugin in request.user_plugins:
+ for css in plugin.css_dependencies:
+ kwargs['styles'].add(css)
+
return render_template(
'{}/{}'.format(kwargs['theme'], template_name), **kwargs)
+@app.before_request
+def pre_request():
+ # merge GET, POST vars
+ request.form = dict(request.form.items())
+ for k, v in request.args.items():
+ if k not in request.form:
+ request.form[k] = v
+
+ request.user_plugins = []
+ allowed_plugins = request.cookies.get('allowed_plugins', '').split(',')
+ disabled_plugins = request.cookies.get('disabled_plugins', '').split(',')
+ for plugin in plugins:
+ if ((plugin.default_on and plugin.id not in disabled_plugins)
+ or plugin.id in allowed_plugins):
+ request.user_plugins.append(plugin)
+
+
@app.route('/search', methods=['GET', 'POST'])
@app.route('/', methods=['GET', 'POST'])
def index():
@@ -323,20 +370,17 @@ def index():
'index.html',
)
- search.results, search.suggestions,\
- search.answers, search.infoboxes = search.search(request)
+ if plugins.call('pre_search', request, locals()):
+ search.search(request)
+
+ plugins.call('post_search', request, locals())
for result in search.results:
+ plugins.call('on_result', request, locals())
if not search.paging and engines[result['engine']].paging:
search.paging = True
- # check if HTTPS rewrite is required
- if settings['server']['https_rewrite']\
- and result['parsed_url'].scheme == 'http':
-
- result = https_url_rewrite(result)
-
if search.request_data.get('format', 'html') == 'html':
if 'content' in result:
result['content'] = highlight_content(result['content'],
@@ -344,11 +388,10 @@ def index():
result['title'] = highlight_content(result['title'],
search.query.encode('utf-8'))
else:
- if 'content' in result:
+ if result.get('content'):
result['content'] = html_to_text(result['content']).strip()
# removing html content and whitespace duplications
- result['title'] = ' '.join(html_to_text(result['title'])
- .strip().split())
+ result['title'] = ' '.join(html_to_text(result['title']).strip().split())
result['pretty_url'] = prettify_url(result['url'])
@@ -487,11 +530,11 @@ def preferences():
blocked_engines = get_blocked_engines(engines, request.cookies)
else: # on save
selected_categories = []
+ post_disabled_plugins = []
locale = None
autocomplete = ''
method = 'POST'
safesearch = '1'
-
for pd_name, pd in request.form.items():
if pd_name.startswith('category_'):
category = pd_name[9:]
@@ -514,14 +557,34 @@ def preferences():
safesearch = pd
elif pd_name.startswith('engine_'):
if pd_name.find('__') > -1:
- engine_name, category = pd_name.replace('engine_', '', 1).split('__', 1)
+ # TODO fix underscore vs space
+ engine_name, category = [x.replace('_', ' ') for x in
+ pd_name.replace('engine_', '', 1).split('__', 1)]
if engine_name in engines and category in engines[engine_name].categories:
blocked_engines.append((engine_name, category))
elif pd_name == 'theme':
theme = pd if pd in themes else default_theme
+ elif pd_name.startswith('plugin_'):
+ plugin_id = pd_name.replace('plugin_', '', 1)
+ if not any(plugin.id == plugin_id for plugin in plugins):
+ continue
+ post_disabled_plugins.append(plugin_id)
else:
resp.set_cookie(pd_name, pd, max_age=cookie_max_age)
+ disabled_plugins = []
+ allowed_plugins = []
+ for plugin in plugins:
+ if plugin.default_on:
+ if plugin.id in post_disabled_plugins:
+ disabled_plugins.append(plugin.id)
+ elif plugin.id not in post_disabled_plugins:
+ allowed_plugins.append(plugin.id)
+
+ resp.set_cookie('disabled_plugins', ','.join(disabled_plugins), max_age=cookie_max_age)
+
+ resp.set_cookie('allowed_plugins', ','.join(allowed_plugins), max_age=cookie_max_age)
+
resp.set_cookie(
'blocked_engines', ','.join('__'.join(e) for e in blocked_engines),
max_age=cookie_max_age
@@ -566,11 +629,13 @@ def preferences():
current_language=lang or 'all',
image_proxy=image_proxy,
language_codes=language_codes,
- categs=categories.items(),
+ engines_by_category=categories,
blocked_engines=blocked_engines,
autocomplete_backends=autocomplete_backends,
shortcuts={y: x for x, y in engine_shortcuts.items()},
themes=themes,
+ plugins=plugins,
+ allowed_plugins=[plugin.id for plugin in request.user_plugins],
theme=get_current_theme_name())
@@ -589,10 +654,11 @@ def image_proxy():
headers = dict_subset(request.headers, {'If-Modified-Since', 'If-None-Match'})
headers['User-Agent'] = gen_useragent()
- resp = http_get(url,
- stream=True,
- timeout=settings['server'].get('request_timeout', 2),
- headers=headers)
+ resp = requests.get(url,
+ stream=True,
+ timeout=settings['server'].get('request_timeout', 2),
+ headers=headers,
+ proxies=outgoing_proxies)
if resp.status_code == 304:
return '', resp.status_code
@@ -644,6 +710,10 @@ Disallow: /preferences
@app.route('/opensearch.xml', methods=['GET'])
def opensearch():
method = 'post'
+
+ if request.cookies.get('method', 'POST') == 'GET':
+ method = 'get'
+
# chrome/chromium only supports HTTP GET....
if request.headers.get('User-Agent', '').lower().find('webkit') >= 0:
method = 'get'
@@ -668,6 +738,14 @@ def favicon():
mimetype='image/vnd.microsoft.icon')
+@app.route('/clear_cookies')
+def clear_cookies():
+ resp = make_response(redirect(url_for('index')))
+ for cookie_name in request.cookies:
+ resp.delete_cookie(cookie_name)
+ return resp
+
+
def run():
app.run(
debug=settings['server']['debug'],