summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMarkus Heiser <markus.heiser@darmarIT.de>2019-12-24 17:45:13 +0100
committerGitHub <noreply@github.com>2019-12-24 17:45:13 +0100
commitecb054a7a058a1f62a536e5cac88eed8926b107d (patch)
tree925594876f18580732d2c8a438ff8f3bea8d9092
parentcc8d4b958e274eb9e154db5c319d2e50da561d61 (diff)
parent5a0a66e9bc34af2b6404231efc7cf02f389bdfcb (diff)
downloadsearxng-ecb054a7a058a1f62a536e5cac88eed8926b107d.tar.gz
searxng-ecb054a7a058a1f62a536e5cac88eed8926b107d.zip
Merge branch 'master' into patch-1
-rw-r--r--.dir-locals.el133
-rw-r--r--.gitignore6
-rw-r--r--.pylintrc444
-rw-r--r--AUTHORS.rst4
-rw-r--r--Dockerfile34
-rw-r--r--Makefile85
-rwxr-xr-xdockerfiles/docker-entrypoint.sh4
-rw-r--r--docs/_themes/searx/static/searx.css30
-rw-r--r--docs/_themes/searx/theme.conf6
-rw-r--r--docs/admin/api.rst96
-rw-r--r--docs/admin/filtron.rst148
-rw-r--r--docs/admin/index.rst11
-rw-r--r--docs/admin/installation.rst341
-rw-r--r--docs/admin/morty.rst26
-rw-r--r--docs/blog/admin.rst43
-rw-r--r--docs/blog/index.rst10
-rw-r--r--docs/blog/intro-offline.rst77
-rw-r--r--docs/blog/python3.rst68
-rw-r--r--docs/blog/searx-admin-engines.pngbin0 -> 50840 bytes
-rw-r--r--docs/blog/searxpy3.pngbin0 -> 30947 bytes
-rw-r--r--docs/conf.py83
-rw-r--r--docs/dev/contribution_guide.rst147
-rw-r--r--docs/dev/engine_overview.rst265
-rw-r--r--docs/dev/index.rst13
-rw-r--r--docs/dev/plugins.rst48
-rw-r--r--docs/dev/quickstart.rst110
-rw-r--r--docs/dev/search_api.rst112
-rw-r--r--docs/dev/translation.rst71
-rw-r--r--docs/index.rst32
-rw-r--r--docs/static/img/searx_logo_small.pngbin0 -> 6472 bytes
-rw-r--r--docs/user/index.rst9
-rw-r--r--docs/user/own-instance.rst77
-rw-r--r--docs/user/search_syntax.rst42
-rwxr-xr-xmanage.sh10
-rw-r--r--requirements-dev.txt3
-rw-r--r--searx/data/useragents.json15
-rw-r--r--searx/engines/__init__.py27
-rw-r--r--searx/engines/arxiv.py1
-rw-r--r--searx/engines/bing.py20
-rw-r--r--searx/engines/deviantart.py47
-rw-r--r--searx/engines/dictzone.py8
-rw-r--r--searx/engines/digg.py36
-rw-r--r--searx/engines/doku.py15
-rw-r--r--searx/engines/duckduckgo.py43
-rw-r--r--searx/engines/duckduckgo_definitions.py19
-rw-r--r--searx/engines/duden.py15
-rw-r--r--searx/engines/framalibre.py5
-rw-r--r--searx/engines/gigablast.py43
-rw-r--r--searx/engines/google.py52
-rw-r--r--searx/engines/google_images.py16
-rw-r--r--searx/engines/openstreetmap.py2
-rw-r--r--searx/engines/qwant.py1
-rw-r--r--searx/engines/seedpeer.py78
-rw-r--r--searx/engines/soundcloud.py6
-rw-r--r--searx/engines/startpage.py35
-rw-r--r--searx/engines/wikidata.py18
-rw-r--r--searx/engines/wikipedia.py9
-rw-r--r--searx/engines/wolframalpha_noapi.py2
-rw-r--r--searx/engines/www1x.py35
-rw-r--r--searx/engines/xpath.py20
-rw-r--r--searx/engines/yahoo.py18
-rw-r--r--searx/exceptions.py1
-rw-r--r--searx/plugins/https_rewrite.py3
-rw-r--r--searx/plugins/oa_doi_rewrite.py3
-rw-r--r--searx/plugins/tracker_url_remover.py27
-rw-r--r--searx/query.py2
-rw-r--r--searx/results.py82
-rw-r--r--searx/search.py74
-rw-r--r--searx/settings.yml34
-rw-r--r--searx/settings_robot.yml4
-rw-r--r--searx/static/plugins/js/vim_hotkeys.js12
-rw-r--r--searx/static/themes/courgette/css/style.css2
-rw-r--r--searx/static/themes/courgette/less/style.less4
-rw-r--r--searx/static/themes/legacy/css/style.css2
-rw-r--r--searx/static/themes/legacy/less/autocompleter.less122
-rw-r--r--searx/static/themes/legacy/less/style.less4
-rw-r--r--searx/static/themes/oscar/gruntfile.js6
-rw-r--r--searx/static/themes/oscar/js/searx.js652
-rw-r--r--searx/static/themes/oscar/js/searx_src/00_requirejs_config.js46
-rw-r--r--searx/static/themes/oscar/js/searx_src/autocompleter.js74
-rw-r--r--searx/static/themes/oscar/js/searx_src/element_modifiers.js198
-rw-r--r--searx/static/themes/oscar/js/searx_src/leaflet_map.js334
-rw-r--r--searx/static/themes/oscar/less/logicodev-dark/oscar.less4
-rw-r--r--searx/static/themes/oscar/less/logicodev/code.less4
-rw-r--r--searx/static/themes/oscar/less/logicodev/infobox.less2
-rw-r--r--searx/static/themes/oscar/less/logicodev/navbar.less1
-rw-r--r--searx/static/themes/oscar/less/pointhi/code.less2
-rw-r--r--searx/static/themes/oscar/less/pointhi/infobox.less2
-rw-r--r--searx/static/themes/simple/leaflet/leaflet.css1272
-rw-r--r--searx/templates/courgette/result_templates/key-value.html13
-rw-r--r--searx/templates/courgette/result_templates/torrent.html2
-rw-r--r--searx/templates/legacy/result_templates/key-value.html13
-rw-r--r--searx/templates/legacy/result_templates/torrent.html2
-rw-r--r--searx/templates/oscar/advanced.html9
-rw-r--r--searx/templates/oscar/base.html21
-rw-r--r--searx/templates/oscar/categories.html18
-rw-r--r--searx/templates/oscar/infobox.html33
-rw-r--r--searx/templates/oscar/languages.html18
-rw-r--r--searx/templates/oscar/macros.html40
-rw-r--r--searx/templates/oscar/navbar.html16
-rw-r--r--searx/templates/oscar/preferences.html66
-rw-r--r--searx/templates/oscar/result_templates/code.html36
-rw-r--r--searx/templates/oscar/result_templates/default.html62
-rw-r--r--searx/templates/oscar/result_templates/images.html85
-rw-r--r--searx/templates/oscar/result_templates/key-value.html19
-rw-r--r--searx/templates/oscar/result_templates/map.html144
-rw-r--r--searx/templates/oscar/result_templates/torrent.html2
-rw-r--r--searx/templates/oscar/result_templates/videos.html54
-rw-r--r--searx/templates/oscar/results.html312
-rw-r--r--searx/templates/oscar/search.html48
-rw-r--r--searx/templates/oscar/search_full.html36
-rw-r--r--searx/templates/oscar/time-range.html22
-rw-r--r--searx/templates/simple/result_templates/key-value.html11
-rw-r--r--searx/templates/simple/result_templates/torrent.html2
-rw-r--r--searx/templates/simple/results.html4
-rw-r--r--searx/utils.py35
-rw-r--r--searx/webapp.py46
-rw-r--r--setup.py12
-rw-r--r--tests/unit/engines/__init__.py0
-rw-r--r--tests/unit/engines/pubmed.py37
-rw-r--r--tests/unit/engines/seedpeer_fixture.html110
-rw-r--r--tests/unit/engines/test_acgsou.py78
-rw-r--r--tests/unit/engines/test_archlinux.py111
-rw-r--r--tests/unit/engines/test_arxiv.py58
-rw-r--r--tests/unit/engines/test_base.py91
-rw-r--r--tests/unit/engines/test_bing.py178
-rw-r--r--tests/unit/engines/test_bing_images.py132
-rw-r--r--tests/unit/engines/test_bing_news.py147
-rw-r--r--tests/unit/engines/test_bing_videos.py72
-rw-r--r--tests/unit/engines/test_btdigg.py112
-rw-r--r--tests/unit/engines/test_currency_convert.py56
-rw-r--r--tests/unit/engines/test_dailymotion.py112
-rw-r--r--tests/unit/engines/test_deezer.py57
-rw-r--r--tests/unit/engines/test_deviantart.py95
-rw-r--r--tests/unit/engines/test_digbt.py61
-rw-r--r--tests/unit/engines/test_digg.py101
-rw-r--r--tests/unit/engines/test_doku.py79
-rw-r--r--tests/unit/engines/test_duckduckgo.py106
-rw-r--r--tests/unit/engines/test_duckduckgo_definitions.py255
-rw-r--r--tests/unit/engines/test_duckduckgo_images.py75
-rw-r--r--tests/unit/engines/test_duden.py47
-rw-r--r--tests/unit/engines/test_dummy.py26
-rw-r--r--tests/unit/engines/test_faroo.py113
-rw-r--r--tests/unit/engines/test_fdroid.py60
-rw-r--r--tests/unit/engines/test_flickr.py142
-rw-r--r--tests/unit/engines/test_flickr_noapi.py357
-rw-r--r--tests/unit/engines/test_framalibre.py103
-rw-r--r--tests/unit/engines/test_frinkiac.py50
-rw-r--r--tests/unit/engines/test_genius.py231
-rw-r--r--tests/unit/engines/test_gigablast.py119
-rw-r--r--tests/unit/engines/test_github.py61
-rw-r--r--tests/unit/engines/test_google.py237
-rw-r--r--tests/unit/engines/test_google_images.py27
-rw-r--r--tests/unit/engines/test_google_news.py102
-rw-r--r--tests/unit/engines/test_google_videos.py79
-rw-r--r--tests/unit/engines/test_ina.py64
-rw-r--r--tests/unit/engines/test_kickass.py397
-rw-r--r--tests/unit/engines/test_mediawiki.py130
-rw-r--r--tests/unit/engines/test_mixcloud.py67
-rw-r--r--tests/unit/engines/test_nyaa.py124
-rw-r--r--tests/unit/engines/test_openstreetmap.py199
-rw-r--r--tests/unit/engines/test_pdbe.py109
-rw-r--r--tests/unit/engines/test_photon.py166
-rw-r--r--tests/unit/engines/test_piratebay.py166
-rw-r--r--tests/unit/engines/test_qwant.py339
-rw-r--r--tests/unit/engines/test_reddit.py71
-rw-r--r--tests/unit/engines/test_scanr_structures.py175
-rw-r--r--tests/unit/engines/test_searchcode_code.py75
-rw-r--r--tests/unit/engines/test_searchcode_doc.py70
-rw-r--r--tests/unit/engines/test_soundcloud.py192
-rw-r--r--tests/unit/engines/test_spotify.py124
-rw-r--r--tests/unit/engines/test_stackoverflow.py106
-rw-r--r--tests/unit/engines/test_startpage.py140
-rw-r--r--tests/unit/engines/test_tokyotoshokan.py110
-rw-r--r--tests/unit/engines/test_torrentz.py87
-rw-r--r--tests/unit/engines/test_twitter.py502
-rw-r--r--tests/unit/engines/test_unsplash.py38
-rw-r--r--tests/unit/engines/test_vimeo.py36
-rw-r--r--tests/unit/engines/test_wikidata.py514
-rw-r--r--tests/unit/engines/test_wikipedia.py263
-rw-r--r--tests/unit/engines/test_wolframalpha_api.py166
-rw-r--r--tests/unit/engines/test_wolframalpha_noapi.py224
-rw-r--r--tests/unit/engines/test_www1x.py57
-rw-r--r--tests/unit/engines/test_yacy.py96
-rw-r--r--tests/unit/engines/test_yahoo.py190
-rw-r--r--tests/unit/engines/test_yahoo_news.py150
-rw-r--r--tests/unit/engines/test_youtube_api.py111
-rw-r--r--tests/unit/engines/test_youtube_noapi.py124
-rw-r--r--tests/unit/engines/unsplash_fixture.json241
-rw-r--r--utils/makefile.include128
-rw-r--r--utils/makefile.python290
-rw-r--r--utils/makefile.sphinx215
192 files changed, 5720 insertions, 11741 deletions
diff --git a/.dir-locals.el b/.dir-locals.el
new file mode 100644
index 000000000..d7ec87921
--- /dev/null
+++ b/.dir-locals.el
@@ -0,0 +1,133 @@
+;;; .dir-locals.el
+;;
+;; If you get ``*** EPC Error ***`` (even after a jedi:install-server) in your
+;; emacs session, mostly you have jedi-mode enabled but the python enviroment is
+;; missed. The python environment has to be next to the
+;; ``<repo>/.dir-locals.el`` in::
+;;
+;; ./local/py3
+;;
+;; In Emacs, some buffer locals are referencing the project environment:
+;;
+;; - prj-root --> <repo>/
+;; - python-environment-directory --> <repo>/local
+;; - python-environment-default-root-name --> py3
+;; - python-shell-virtualenv-root --> <repo>/local/py3
+;; When this variable is set with the path of the virtualenv to use,
+;; `process-environment' and `exec-path' get proper values in order to run
+;; shells inside the specified virtualenv, example::
+;; (setq python-shell-virtualenv-root "/path/to/env/")
+;;
+;; To setup such an environment build target 'pyenv' or 'pyenvinstall'::
+;;
+;; $ make pyenvinstall
+;;
+;; Alternatively create the virtualenv, source it and install jedi + epc
+;; (required by `emacs-jedi <https://tkf.github.io/emacs-jedi>`_)::
+;;
+;; $ virtualenv --python=python3 "--no-site-packages" ./local/py3
+;; ...
+;; $ source ./local/py3/bin/activate
+;; (py3)$ # now install into the activated 'py3' environment ..
+;; (py3)$ pip install jedi epc
+;; ...
+;;
+;; Here is what also I found useful to add to my .emacs::
+;;
+;; (global-set-key [f6] 'flycheck-mode)
+;; (add-hook 'python-mode-hook 'my:python-mode-hook)
+;;
+;; (defun my:python-mode-hook ()
+;; (add-to-list 'company-backends 'company-jedi)
+;; (require 'jedi-core)
+;; (jedi:setup)
+;; (define-key python-mode-map (kbd "C-c C-d") 'jedi:show-doc)
+;; (define-key python-mode-map (kbd "M-.") 'jedi:goto-definition)
+;; (define-key python-mode-map (kbd "M-,") 'jedi:goto-definition-pop-marker)
+;; )
+;;
+
+((nil
+ . ((fill-column . 80)
+ ))
+ (python-mode
+ . ((indent-tabs-mode . nil)
+
+ ;; project root folder is where the `.dir-locals.el' is located
+ (eval . (setq-local
+ prj-root (locate-dominating-file default-directory ".dir-locals.el")))
+
+ (eval . (setq-local
+ python-environment-directory (expand-file-name "./local" prj-root)))
+
+ ;; use 'py3' enviroment as default
+ (eval . (setq-local
+ python-environment-default-root-name "py3"))
+
+ (eval . (setq-local
+ python-shell-virtualenv-root
+ (concat python-environment-directory
+ "/"
+ python-environment-default-root-name)))
+
+ ;; python-shell-virtualenv-path is obsolete, use python-shell-virtualenv-root!
+ ;; (eval . (setq-local
+ ;; python-shell-virtualenv-path python-shell-virtualenv-root))
+
+ (eval . (setq-local
+ python-shell-interpreter
+ (expand-file-name "bin/python" python-shell-virtualenv-root)))
+
+ (eval . (setq-local
+ python-environment-virtualenv
+ (list (expand-file-name "bin/virtualenv" python-shell-virtualenv-root)
+ ;;"--system-site-packages"
+ "--quiet")))
+
+ (eval . (setq-local
+ pylint-command
+ (expand-file-name "bin/pylint" python-shell-virtualenv-root)))
+
+ ;; pylint will find the '.pylintrc' file next to the CWD
+ ;; https://pylint.readthedocs.io/en/latest/user_guide/run.html#command-line-options
+ (eval . (setq-local
+ flycheck-pylintrc ".pylintrc"))
+
+ ;; flycheck & other python stuff should use the local py3 environment
+ (eval . (setq-local
+ flycheck-python-pylint-executable python-shell-interpreter))
+
+ ;; use 'M-x jedi:show-setup-info' and 'M-x epc:controller' to inspect jedi server
+
+ ;; https://tkf.github.io/emacs-jedi/latest/#jedi:environment-root -- You
+ ;; can specify a full path instead of a name (relative path). In that case,
+ ;; python-environment-directory is ignored and Python virtual environment
+ ;; is created at the specified path.
+ (eval . (setq-local jedi:environment-root python-shell-virtualenv-root))
+
+ ;; https://tkf.github.io/emacs-jedi/latest/#jedi:server-command
+ (eval .(setq-local
+ jedi:server-command
+ (list python-shell-interpreter
+ jedi:server-script)
+ ))
+
+ ;; jedi:environment-virtualenv --> see above 'python-environment-virtualenv'
+ ;; is set buffer local! No need to setup jedi:environment-virtualenv:
+ ;;
+ ;; Virtualenv command to use. A list of string. If it is nil,
+ ;; python-environment-virtualenv is used instead. You must set non-nil
+ ;; value to jedi:environment-root in order to make this setting work.
+ ;;
+ ;; https://tkf.github.io/emacs-jedi/latest/#jedi:environment-virtualenv
+ ;;
+ ;; (eval . (setq-local
+ ;; jedi:environment-virtualenv
+ ;; (list (expand-file-name "bin/virtualenv" python-shell-virtualenv-root)
+ ;; ;;"--python"
+ ;; ;;"/usr/bin/python3.4"
+ ;; )))
+
+ ;; jedi:server-args
+
+ )))
diff --git a/.gitignore b/.gitignore
index db20da83e..069dfd35b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -18,3 +18,9 @@ setup.cfg
node_modules/
.tx/
+
+build/
+dist/
+local/
+gh-pages/
+searx.egg-info/
diff --git a/.pylintrc b/.pylintrc
new file mode 100644
index 000000000..3b4adb2ca
--- /dev/null
+++ b/.pylintrc
@@ -0,0 +1,444 @@
+# -*- coding: utf-8; mode: conf -*-
+# lint Python modules using external checkers.
+#
+# This is the main checker controlling the other ones and the reports
+# generation. It is itself both a raw checker and an astng checker in order
+# to:
+# * handle message activation / deactivation at the module level
+# * handle some basic but necessary stats'data (number of classes, methods...)
+#
+[MASTER]
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code
+extension-pkg-whitelist=
+
+# Add files or directories to the blacklist. They should be base names, not
+# paths.
+ignore=CVS, .git, .svn
+
+# Add files or directories matching the regex patterns to the blacklist. The
+# regex matches against base names, not paths.
+ignore-patterns=
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+#init-hook=
+
+# Use multiple processes to speed up Pylint.
+jobs=1
+
+# List of plugins (as comma separated values of python modules names) to load,
+# usually to register additional checkers.
+load-plugins=
+
+# Pickle collected data for later comparisons.
+persistent=yes
+
+# Specify a configuration file.
+#rcfile=
+
+# Allow loading of arbitrary C extensions. Extensions are imported into the
+# active Python interpreter and may run arbitrary code.
+unsafe-load-any-extension=no
+
+
+[MESSAGES CONTROL]
+
+# Only show warnings with the listed confidence levels. Leave empty to show
+# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
+confidence=
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifiers separated by comma (,) or put this
+# option multiple times (only on the command line, not in the configuration
+# file where it should appear only once).You can also use "--disable=all" to
+# disable everything first and then reenable specific checks. For example, if
+# you want to run only the similarities checker, you can use "--disable=all
+# --enable=similarities". If you want to run only the classes checker, but have
+# no Warning level messages displayed, use"--disable=all --enable=classes
+# --disable=W"
+disable=bad-whitespace, duplicate-code
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time (only on the command line, not in the configuration file where
+# it should appear only once). See also the "--disable" option for examples.
+enable=
+
+
+[REPORTS]
+
+# Python expression which should return a note less than 10 (10 is the highest
+# note). You have access to the variables errors warning, statement which
+# respectively contain the number of errors / warnings messages and the total
+# number of statements analyzed. This is used by the global evaluation report
+# (RP0004).
+evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
+
+# Template used to display messages. This is a python new-style format string
+# used to format the message information. See doc for all details
+
+# HINT: do not set this here, use argument --msg-template=...
+#msg-template={path}:{line}: [{msg_id}({symbol}),{obj}] {msg}
+
+# Set the output format. Available formats are text, parseable, colorized, json
+# and msvs (visual studio).You can also give a reporter class, eg
+# mypackage.mymodule.MyReporterClass.
+
+# HINT: do not set this here, use argument --output-format=...
+#output-format=text
+
+# Tells whether to display a full report or only the messages
+reports=no
+
+# Activate the evaluation score.
+score=yes
+
+
+[REFACTORING]
+
+# Maximum number of nested blocks for function / method body
+max-nested-blocks=5
+
+
+[BASIC]
+
+# List of builtins function names that should not be used, separated by a comma
+bad-functions=map,filter,apply,input
+
+# Naming hint for argument names
+argument-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
+
+# Regular expression matching correct argument names
+argument-rgx=(([a-z][a-zA-Z0-9_]{2,30})|(_[a-z0-9_]*))$
+
+# Naming hint for attribute names
+attr-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
+
+# Regular expression matching correct attribute names
+attr-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*)|([A-Z0-9_]*))$
+
+# Bad variable names which should always be refused, separated by a comma
+bad-names=foo,bar,baz,toto,tutu,tata
+
+# Naming hint for class attribute names
+class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
+
+# Regular expression matching correct class attribute names
+class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
+
+# Naming hint for class names
+class-name-hint=[A-Z_][a-zA-Z0-9]+$
+
+# Regular expression matching correct class names
+class-rgx=[A-Z_][a-zA-Z0-9]+$
+
+# Naming hint for constant names
+const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
+
+# Regular expression matching correct constant names
+const-rgx=(([a-zA-Z_][a-zA-Z0-9_]*)|(__.*__))$
+#const-rgx=[f]?[A-Z_][a-zA-Z0-9_]{2,30}$
+
+# Minimum line length for functions/classes that require docstrings, shorter
+# ones are exempt.
+docstring-min-length=-1
+
+# Naming hint for function names
+function-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
+
+# Regular expression matching correct function names
+function-rgx=(([a-z][a-zA-Z0-9_]{2,30})|(_[a-z0-9_]*))$
+
+# Good variable names which should always be accepted, separated by a comma
+good-names=i,j,k,ex,Run,_,log,cfg,id
+
+# Include a hint for the correct naming format with invalid-name
+include-naming-hint=no
+
+# Naming hint for inline iteration names
+inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
+
+# Regular expression matching correct inline iteration names
+inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
+
+# Naming hint for method names
+method-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
+
+# Regular expression matching correct method names
+method-rgx=(([a-z][a-zA-Z0-9_]{2,30})|(_[a-z0-9_]*))$
+
+# Naming hint for module names
+module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
+
+# Regular expression matching correct module names
+#module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
+module-rgx=([a-z_][a-z0-9_]*)$
+
+# Colon-delimited sets of names that determine each other's naming style when
+# the name regexes allow several styles.
+name-group=
+
+# Regular expression which should only match function or class names that do
+# not require a docstring.
+no-docstring-rgx=^_
+
+# List of decorators that produce properties, such as abc.abstractproperty. Add
+# to this list to register other decorators that produce valid properties.
+property-classes=abc.abstractproperty
+
+# Naming hint for variable names
+variable-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
+
+# Regular expression matching correct variable names
+variable-rgx=(([a-z][a-zA-Z0-9_]{2,30})|(_[a-z0-9_]*)|([a-z]))$
+
+
+[FORMAT]
+
+# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
+expected-line-ending-format=
+
+# Regexp for a line that is allowed to be longer than the limit.
+ignore-long-lines=^\s*(# )?<?https?://\S+>?$
+
+# Number of spaces of indent required inside a hanging or continued line.
+indent-after-paren=4
+
+# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
+# tab).
+indent-string=' '
+
+# Maximum number of characters on a single line.
+max-line-length=120
+
+# Maximum number of lines in a module
+max-module-lines=2000
+
+# List of optional constructs for which whitespace checking is disabled. `dict-
+# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
+# `trailing-comma` allows a space between comma and closing bracket: (a, ).
+# `empty-line` allows space-only lines.
+no-space-check=trailing-comma,dict-separator
+
+# Allow the body of a class to be on the same line as the declaration if body
+# contains single statement.No config file found, using default configuration
+
+single-line-class-stmt=no
+
+# Allow the body of an if to be on the same line as the test if there is no
+# else.
+single-line-if-stmt=no
+
+
+[LOGGING]
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format
+logging-modules=logging
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=FIXME,XXX,TODO
+
+
+[SIMILARITIES]
+
+# Ignore comments when computing similarities.
+ignore-comments=yes
+
+# Ignore docstrings when computing similarities.
+ignore-docstrings=yes
+
+# Ignore imports when computing similarities.
+ignore-imports=no
+
+# Minimum lines number of a similarity.
+min-similarity-lines=4
+
+
+[SPELLING]
+
+# Spelling dictionary name. Available dictionaries: none. To make it working
+# install python-enchant package.
+spelling-dict=
+
+# List of comma separated words that should not be checked.
+spelling-ignore-words=
+
+# A path to a file that contains private dictionary; one word per line.
+spelling-private-dict-file=
+
+# Tells whether to store unknown words to indicated private dictionary in
+# --spelling-private-dict-file option instead of raising a message.
+spelling-store-unknown-words=no
+
+
+[TYPECHECK]
+
+# List of decorators that produce context managers, such as
+# contextlib.contextmanager. Add to this list to register other decorators that
+# produce valid context managers.
+contextmanager-decorators=contextlib.contextmanager
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E1101 when accessed. Python regular
+# expressions are accepted.
+generated-members=
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# mixin class is detected if its name ends with "mixin" (case insensitive).
+ignore-mixin-members=yes
+
+# This flag controls whether pylint should warn about no-member and similar
+# checks whenever an opaque object is returned when inferring. The inference
+# can return multiple potential results while evaluating a Python object, but
+# some branches might not be evaluated, which results in partial inference. In
+# that case, it might be useful to still emit no-member and other checks for
+# the rest of the inferred objects.
+ignore-on-opaque-inference=yes
+
+# List of class names for which member attributes should not be checked (useful
+# for classes with dynamically set attributes). This supports the use of
+# qualified names.
+ignored-classes=optparse.Values,thread._local,_thread._local
+
+# List of module names for which member attributes should not be checked
+# (useful for modules/projects where namespaces are manipulated during runtime
+# and thus existing member attributes cannot be deduced by static analysis. It
+# supports qualified module names, as well as Unix pattern matching.
+ignored-modules=
+
+# Show a hint with possible names when a member name was not found. The aspect
+# of finding the hint is based on edit distance.
+missing-member-hint=yes
+
+# The minimum edit distance a name should have in order to be considered a
+# similar match for a missing member name.
+missing-member-hint-distance=1
+
+# The total number of similar names that should be taken in consideration when
+# showing a hint for a missing member.
+missing-member-max-choices=1
+
+
+[VARIABLES]
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid to define new builtins when possible.
+additional-builtins=
+
+# Tells whether unused global variables should be treated as a violation.
+allow-global-unused-variables=yes
+
+# List of strings which can identify a callback function by name. A callback
+# name must start or end with one of those strings.
+callbacks=cb_,_cb
+
+# A regular expression matching the name of dummy variables (i.e. expectedly
+# not used).
+dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
+
+# Argument names that match this expression will be ignored. Default to name
+# with leading underscore
+ignored-argument-names=_.*|^ignored_|^unused_
+
+# Tells whether we should check for unused import in __init__ files.
+init-import=no
+
+# List of qualified module names which can have objects that can redefine
+# builtins.
+redefining-builtins-modules=six.moves,future.builtins
+
+
+[CLASSES]
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods=__init__,__new__,setUp
+
+# List of member names, which should be excluded from the protected access
+# warning.
+exclude-protected=_asdict,_fields,_replace,_source,_make
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg=cls
+
+# List of valid names for the first argument in a metaclass class method.
+valid-metaclass-classmethod-first-arg=mcs
+
+
+[DESIGN]
+
+# Maximum number of arguments for function / method
+max-args=8
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes=20
+
+# Maximum number of boolean expressions in a if statement
+max-bool-expr=5
+
+# Maximum number of branch for function / method body
+max-branches=12
+
+# Maximum number of locals for function / method body
+max-locals=20
+
+# Maximum number of parents for a class (see R0901).
+max-parents=7
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods=20
+
+# Maximum number of return / yield for function / method body
+max-returns=6
+
+# Maximum number of statements in function / method body
+max-statements=50
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods=2
+
+
+[IMPORTS]
+
+# Allow wildcard imports from modules that define __all__.
+allow-wildcard-with-all=no
+
+# Analyse import fallback blocks. This can be used to support both Python 2 and
+# 3 compatible code, which means that the block might have code that exists
+# only in one or another interpreter, leading to false positives when analysed.
+analyse-fallback-blocks=no
+
+# Deprecated modules which should not be used, separated by a comma
+deprecated-modules=optparse,tkinter.tix
+
+# Create a graph of external dependencies in the given file (report RP0402 must
+# not be disabled)
+ext-import-graph=
+
+# Create a graph of every (i.e. internal and external) dependencies in the
+# given file (report RP0402 must not be disabled)
+import-graph=
+
+# Create a graph of internal dependencies in the given file (report RP0402 must
+# not be disabled)
+int-import-graph=
+
+# Force import order to recognize a module as part of the standard
+# compatibility libraries.
+known-standard-library=
+
+# Force import order to recognize a module as part of a third party library.
+known-third-party=enchant
+
+
+[EXCEPTIONS]
+
+# Exceptions that will emit a warning when being caught. Defaults to
+# "Exception"
+overgeneral-exceptions=Exception
diff --git a/AUTHORS.rst b/AUTHORS.rst
index 674bfd758..2a2f19219 100644
--- a/AUTHORS.rst
+++ b/AUTHORS.rst
@@ -1,4 +1,4 @@
-Searx was created by Adam Tauber and is maintained by Adam Tauber, Alexandre Flament and Noémi Ványi.
+Searx was created by Adam Tauber and is maintained by Adam Tauber, Alexandre Flament, Noémi Ványi, @pofilo and Markus Heiser.
Major contributing authors:
@@ -9,6 +9,8 @@ Major contributing authors:
- @Cqoicebordel
- Noémi Ványi
- Marc Abonce Seguin @a01200356
+- @pofilo
+- Markus Heiser @return42
People who have submitted patches/translates, reported bugs, consulted features or
generally made searx better:
diff --git a/Dockerfile b/Dockerfile
index fb4f2cb67..b0b5a609d 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,32 +1,36 @@
FROM alpine:3.10
+ENTRYPOINT ["/sbin/tini","--","/usr/local/searx/dockerfiles/docker-entrypoint.sh"]
+EXPOSE 8080
+VOLUME /etc/searx
+VOLUME /var/log/uwsgi
-ARG VERSION_GITCOMMIT=unknow
-ARG SEARX_GIT_VERSION=unknow
+ARG VERSION_GITCOMMIT=unknown
+ARG SEARX_GIT_VERSION=unknown
-ARG SEARX_GID=1000
-ARG SEARX_UID=1000
+ARG SEARX_GID=977
+ARG SEARX_UID=977
+
+RUN addgroup -g ${SEARX_GID} searx && \
+ adduser -u ${SEARX_UID} -D -h /usr/local/searx -s /bin/sh -G searx searx
ARG TIMESTAMP_SETTINGS=0
ARG TIMESTAMP_UWSGI=0
ARG LABEL_VCS_REF=
ARG LABEL_VCS_URL=
-ENV BASE_URL= \
+ENV INSTANCE_NAME=searx \
+ AUTOCOMPLETE= \
+ BASE_URL= \
MORTY_KEY= \
MORTY_URL=
-EXPOSE 8080
-VOLUME /etc/searx
-VOLUME /var/log/uwsgi
WORKDIR /usr/local/searx
-RUN addgroup -g ${SEARX_GID} searx && \
- adduser -u ${SEARX_UID} -D -h /usr/local/searx -s /bin/sh -G searx searx
COPY requirements.txt ./requirements.txt
-RUN apk -U upgrade \
- && apk add -t build-dependencies \
+RUN apk upgrade --no-cache \
+ && apk add --no-cache -t build-dependencies \
build-base \
py3-setuptools \
python3-dev \
@@ -36,7 +40,7 @@ RUN apk -U upgrade \
openssl-dev \
tar \
git \
- && apk add \
+ && apk add --no-cache \
ca-certificates \
su-exec \
python3 \
@@ -48,8 +52,7 @@ RUN apk -U upgrade \
uwsgi-python3 \
&& pip3 install --upgrade pip \
&& pip3 install --no-cache -r requirements.txt \
- && apk del build-dependencies \
- && rm -f /var/cache/apk/*
+ && apk del build-dependencies
COPY --chown=searx:searx . .
@@ -60,7 +63,6 @@ RUN su searx -c "/usr/bin/python3 -m compileall -q searx"; \
echo "VERSION_STRING = VERSION_STRING + \"-$VERSION_GITCOMMIT\"" >> /usr/local/searx/searx/version.py; \
fi
-ENTRYPOINT ["/sbin/tini","--","/usr/local/searx/dockerfiles/docker-entrypoint.sh"]
# Keep this argument at the end since it change each time
ARG LABEL_DATE=
diff --git a/Makefile b/Makefile
new file mode 100644
index 000000000..b69202ba2
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,85 @@
+# -*- coding: utf-8; mode: makefile-gmake -*-
+
+export GIT_URL=https://github.com/asciimoo/searx
+export SEARX_URL=https://searx.me
+export DOCS_URL=https://asciimoo.github.io/searx
+
+PYOBJECTS = searx
+DOC = docs
+PY_SETUP_EXTRAS ?= \[test\]
+
+include utils/makefile.include
+include utils/makefile.python
+include utils/makefile.sphinx
+
+all: clean install
+
+PHONY += help
+help:
+ @echo ' test - run developer tests'
+ @echo ' docs - build documentation'
+ @echo ' docs-live - autobuild HTML documentation while editing'
+ @echo ' run - run developer instance'
+ @echo ' install - developer install (./local)'
+ @echo ' uninstall - uninstall (./local)'
+ @echo ' gh-pages - build docs & deploy on gh-pages branch'
+ @echo ''
+ @$(MAKE) -s -f utils/makefile.include make-help
+ @echo ''
+ @$(MAKE) -s -f utils/makefile.python python-help
+
+PHONY += install
+install: pyenvinstall
+
+PHONY += uninstall
+uninstall: pyenvuninstall
+
+PHONY += clean
+clean: pyclean
+ $(call cmd,common_clean)
+
+PHONY += run
+run: pyenvinstall
+ $(Q) ( \
+ sed -i -e "s/debug : False/debug : True/g" ./searx/settings.yml ; \
+ sleep 2 ; \
+ xdg-open http://127.0.0.1:8888/ ; \
+ sleep 3 ; \
+ sed -i -e "s/debug : True/debug : False/g" ./searx/settings.yml ; \
+ ) &
+ $(PY_ENV)/bin/python ./searx/webapp.py
+
+# docs
+# ----
+
+PHONY += docs
+docs: pyenvinstall sphinx-doc
+ $(call cmd,sphinx,html,docs,docs)
+
+PHONY += docs-live
+docs-live: pyenvinstall sphinx-live
+ $(call cmd,sphinx_autobuild,html,docs,docs)
+
+$(GH_PAGES)::
+ @echo "doc available at --> $(DOCS_URL)"
+
+# test
+# ----
+
+PHONY += test test.pylint test.pep8 test.unit test.robot
+
+# TODO: balance linting with pylint
+test: test.pep8 test.unit test.robot
+ - make pylint
+
+test.pep8: pyenvinstall
+ $(PY_ENV_ACT); ./manage.sh pep8_check
+
+test.unit: pyenvinstall
+ $(PY_ENV_ACT); ./manage.sh unit_tests
+
+test.robot: pyenvinstall
+ $(PY_ENV_ACT); ./manage.sh install_geckodriver
+ $(PY_ENV_ACT); ./manage.sh robot_tests
+
+.PHONY: $(PHONY)
diff --git a/dockerfiles/docker-entrypoint.sh b/dockerfiles/docker-entrypoint.sh
index c731e0a89..8b4c34860 100755
--- a/dockerfiles/docker-entrypoint.sh
+++ b/dockerfiles/docker-entrypoint.sh
@@ -29,6 +29,8 @@ do
printf " -f Always update on the configuration files (existing files are renamed with the .old suffix)\n"
printf " Without this option, new configuration files are copied with the .new suffix\n"
printf "\nEnvironment variables:\n\n"
+ printf " INSTANCE_NAME settings.yml : general.instance_name\n"
+ printf " AUTOCOMPLETE settings.yml : search.autocomplete\n"
printf " BASE_URL settings.yml : server.base_url\n"
printf " MORTY_URL settings.yml : result_proxy.url\n"
printf " MORTY_KEY settings.yml : result_proxy.key\n"
@@ -53,6 +55,8 @@ patch_searx_settings() {
# update settings.yml
sed -i -e "s|base_url : False|base_url : ${BASE_URL}|g" \
+ -e "s/instance_name : \"searx\"/instance_name : \"${INSTANCE_NAME}\"/g" \
+ -e "s/autocomplete : \"\"/autocomplete : \"${AUTOCOMPLETE}\"/g" \
-e "s/ultrasecretkey/$(openssl rand -hex 32)/g" \
"${CONF}"
diff --git a/docs/_themes/searx/static/searx.css b/docs/_themes/searx/static/searx.css
new file mode 100644
index 000000000..10f5b4eda
--- /dev/null
+++ b/docs/_themes/searx/static/searx.css
@@ -0,0 +1,30 @@
+@import url("pocoo.css");
+
+a, a.reference, a.footnote-reference {
+ color: #004b6b;
+ border-color: #004b6b;
+}
+
+a:hover {
+ color: #6d4100;
+ border-color: #6d4100;
+}
+
+p.version-warning {
+ background-color: #004b6b;
+}
+
+div.sidebar {
+ background-color: whitesmoke;
+ border-color: lightsteelblue;
+ border-radius: 3pt;
+}
+
+p.sidebar-title, .sidebar p {
+ margin: 6pt;
+}
+
+.sidebar li {
+ list-style-type: disclosure-closed;
+}
+
diff --git a/docs/_themes/searx/theme.conf b/docs/_themes/searx/theme.conf
new file mode 100644
index 000000000..2d5f72e7d
--- /dev/null
+++ b/docs/_themes/searx/theme.conf
@@ -0,0 +1,6 @@
+[theme]
+inherit = pocoo
+stylesheet = searx.css
+
+[options]
+touch_icon =
diff --git a/docs/admin/api.rst b/docs/admin/api.rst
new file mode 100644
index 000000000..7804a8664
--- /dev/null
+++ b/docs/admin/api.rst
@@ -0,0 +1,96 @@
+.. _adminapi:
+
+==================
+Administration API
+==================
+
+Get configuration data
+======================
+
+.. code:: http
+
+ GET /config HTTP/1.1
+
+Sample response
+---------------
+
+.. code:: json
+
+ {
+ "autocomplete": "",
+ "categories": [
+ "map",
+ "it",
+ "images",
+ ],
+ "default_locale": "",
+ "default_theme": "oscar",
+ "engines": [
+ {
+ "categories": [
+ "map"
+ ],
+ "enabled": true,
+ "name": "openstreetmap",
+ "shortcut": "osm"
+ },
+ {
+ "categories": [
+ "it"
+ ],
+ "enabled": true,
+ "name": "arch linux wiki",
+ "shortcut": "al"
+ },
+ {
+ "categories": [
+ "images"
+ ],
+ "enabled": true,
+ "name": "google images",
+ "shortcut": "goi"
+ },
+ {
+ "categories": [
+ "it"
+ ],
+ "enabled": false,
+ "name": "bitbucket",
+ "shortcut": "bb"
+ },
+ ],
+ "instance_name": "searx",
+ "locales": {
+ "de": "Deutsch (German)",
+ "en": "English",
+ "eo": "Esperanto (Esperanto)",
+ },
+ "plugins": [
+ {
+ "enabled": true,
+ "name": "HTTPS rewrite"
+ },
+ {
+ "enabled": false,
+ "name": "Vim-like hotkeys"
+ }
+ ],
+ "safe_search": 0
+ }
+
+
+Embed search bar
+================
+
+The search bar can be embedded into websites. Just paste the example into the
+HTML of the site. URL of the searx instance and values are customizable.
+
+.. code:: html
+
+ <form method="post" action="https://searx.me/">
+ <!-- search --> <input type="text" name="q" />
+ <!-- categories --> <input type="hidden" name="categories" value="general,social media" />
+ <!-- language --> <input type="hidden" name="lang" value="all" />
+ <!-- locale --> <input type="hidden" name="locale" value="en" />
+ <!-- date filter --> <input type="hidden" name="time_range" value="month" />
+ </form>
diff --git a/docs/admin/filtron.rst b/docs/admin/filtron.rst
new file mode 100644
index 000000000..07dcb9bc5
--- /dev/null
+++ b/docs/admin/filtron.rst
@@ -0,0 +1,148 @@
+==========================
+How to protect an instance
+==========================
+
+Searx depens on external search services. To avoid the abuse of these services
+it is advised to limit the number of requests processed by searx.
+
+An application firewall, ``filtron`` solves exactly this problem. Information
+on how to install it can be found at the `project page of filtron
+<https://github.com/asciimoo/filtron>`__.
+
+
+Sample configuration of filtron
+===============================
+
+An example configuration can be find below. This configuration limits the access
+of:
+
+- scripts or applications (roboagent limit)
+- webcrawlers (botlimit)
+- IPs which send too many requests (IP limit)
+- too many json, csv, etc. requests (rss/json limit)
+- the same UserAgent of if too many requests (useragent limit)
+
+.. code:: json
+
+ [{
+ "name":"search request",
+ "filters":[
+ "Param:q",
+ "Path=^(/|/search)$"
+ ],
+ "interval":"<time-interval-in-sec (int)>",
+ "limit":"<max-request-number-in-interval (int)>",
+ "subrules":[
+ {
+ "name":"roboagent limit",
+ "interval":"<time-interval-in-sec (int)>",
+ "limit":"<max-request-number-in-interval (int)>",
+ "filters":[
+ "Header:User-Agent=(curl|cURL|Wget|python-requests|Scrapy|FeedFetcher|Go-http-client)"
+ ],
+ "actions":[
+ {
+ "name":"block",
+ "params":{
+ "message":"Rate limit exceeded"
+ }
+ }
+ ]
+ },
+ {
+ "name":"botlimit",
+ "limit":0,
+ "stop":true,
+ "filters":[
+ "Header:User-Agent=(Googlebot|bingbot|Baiduspider|yacybot|YandexMobileBot|YandexBot|Yahoo! Slurp|MJ12bot|AhrefsBot|archive.org_bot|msnbot|MJ12bot|SeznamBot|linkdexbot|Netvibes|SMTBot|zgrab|James BOT)"
+ ],
+ "actions":[
+ {
+ "name":"block",
+ "params":{
+ "message":"Rate limit exceeded"
+ }
+ }
+ ]
+ },
+ {
+ "name":"IP limit",
+ "interval":"<time-interval-in-sec (int)>",
+ "limit":"<max-request-number-in-interval (int)>",
+ "stop":true,
+ "aggregations":[
+ "Header:X-Forwarded-For"
+ ],
+ "actions":[
+ {
+ "name":"block",
+ "params":{
+ "message":"Rate limit exceeded"
+ }
+ }
+ ]
+ },
+ {
+ "name":"rss/json limit",
+ "interval":"<time-interval-in-sec (int)>",
+ "limit":"<max-request-number-in-interval (int)>",
+ "stop":true,
+ "filters":[
+ "Param:format=(csv|json|rss)"
+ ],
+ "actions":[
+ {
+ "name":"block",
+ "params":{
+ "message":"Rate limit exceeded"
+ }
+ }
+ ]
+ },
+ {
+ "name":"useragent limit",
+ "interval":"<time-interval-in-sec (int)>",
+ "limit":"<max-request-number-in-interval (int)>",
+ "aggregations":[
+ "Header:User-Agent"
+ ],
+ "actions":[
+ {
+ "name":"block",
+ "params":{
+ "message":"Rate limit exceeded"
+ }
+ }
+ ]
+ }
+ ]
+ }]
+
+
+
+Route request through filtron
+=============================
+
+Filtron can be started using the following command:
+
+.. code:: sh
+
+ $ filtron -rules rules.json
+
+It listens on ``127.0.0.1:4004`` and forwards filtered requests to
+``127.0.0.1:8888`` by default.
+
+Use it along with ``nginx`` with the following example configuration.
+
+.. code:: nginx
+
+ location / {
+ proxy_set_header Host $http_host;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Scheme $scheme;
+ proxy_pass http://127.0.0.1:4004/;
+ }
+
+Requests are coming from port 4004 going through filtron and then forwarded to
+port 8888 where a searx is being run.
diff --git a/docs/admin/index.rst b/docs/admin/index.rst
new file mode 100644
index 000000000..f3a995769
--- /dev/null
+++ b/docs/admin/index.rst
@@ -0,0 +1,11 @@
+===========================
+Administrator documentation
+===========================
+
+.. toctree::
+ :maxdepth: 1
+
+ installation
+ api
+ filtron
+ morty
diff --git a/docs/admin/installation.rst b/docs/admin/installation.rst
new file mode 100644
index 000000000..239ce0704
--- /dev/null
+++ b/docs/admin/installation.rst
@@ -0,0 +1,341 @@
+.. _installation:
+
+============
+Installation
+============
+
+.. contents::
+ :depth: 3
+
+Basic installation
+==================
+
+Step by step installation for Debian/Ubuntu with virtualenv. For Ubuntu, be sure
+to have enable universe repository.
+
+Install packages:
+
+.. code:: sh
+
+ $ sudo -H apt-get install \
+ git build-essential libxslt-dev \
+ python-dev python-virtualenv python-babel \
+ zlib1g-dev libffi-dev libssl-dev
+
+Install searx:
+
+.. code:: sh
+
+ cd /usr/local
+ sudo -H git clone https://github.com/asciimoo/searx.git
+ sudo -H useradd searx -d /usr/local/searx
+ sudo -H chown searx:searx -R /usr/local/searx
+
+Install dependencies in a virtualenv:
+
+.. code:: sh
+
+ cd /usr/local/searx
+ sudo -H -u searx -i
+
+.. code:: sh
+
+ (searx)$ virtualenv searx-ve
+ (searx)$ . ./searx-ve/bin/activate
+ (searx)$ ./manage.sh update_packages
+
+Configuration
+==============
+
+.. code:: sh
+
+ sed -i -e "s/ultrasecretkey/`openssl rand -hex 16`/g" searx/settings.yml
+
+Edit searx/settings.yml if necessary.
+
+Check
+=====
+
+Start searx:
+
+.. code:: sh
+
+ python searx/webapp.py
+
+Go to http://localhost:8888
+
+If everything works fine, disable the debug option in settings.yml:
+
+.. code:: sh
+
+ sed -i -e "s/debug : True/debug : False/g" searx/settings.yml
+
+At this point searx is not demonized ; uwsgi allows this.
+
+You can exit the virtualenv and the searx user bash (enter exit command
+twice).
+
+uwsgi
+=====
+
+Install packages:
+
+.. code:: sh
+
+ sudo -H apt-get install \
+ uwsgi uwsgi-plugin-python
+
+Create the configuration file ``/etc/uwsgi/apps-available/searx.ini`` with this
+content:
+
+.. code:: ini
+
+ [uwsgi]
+ # Who will run the code
+ uid = searx
+ gid = searx
+
+ # disable logging for privacy
+ disable-logging = true
+
+ # Number of workers (usually CPU count)
+ workers = 4
+
+ # The right granted on the created socket
+ chmod-socket = 666
+
+ # Plugin to use and interpretor config
+ single-interpreter = true
+ master = true
+ plugin = python
+ lazy-apps = true
+ enable-threads = true
+
+ # Module to import
+ module = searx.webapp
+
+ # Virtualenv and python path
+ virtualenv = /usr/local/searx/searx-ve/
+ pythonpath = /usr/local/searx/
+ chdir = /usr/local/searx/searx/
+
+Activate the uwsgi application and restart:
+
+.. code:: sh
+
+ cd /etc/uwsgi/apps-enabled
+ ln -s ../apps-available/searx.ini
+ /etc/init.d/uwsgi restart
+
+Web server
+==========
+
+with nginx
+----------
+
+If nginx is not installed (uwsgi will not work with the package
+nginx-light):
+
+.. code:: sh
+
+ sudo -H apt-get install nginx
+
+Hosted at /
+~~~~~~~~~~~
+
+Create the configuration file ``/etc/nginx/sites-available/searx`` with this
+content:
+
+.. code:: nginx
+
+ server {
+ listen 80;
+ server_name searx.example.com;
+ root /usr/local/searx;
+
+ location / {
+ include uwsgi_params;
+ uwsgi_pass unix:/run/uwsgi/app/searx/socket;
+ }
+ }
+
+Create a symlink to sites-enabled:
+
+.. code:: sh
+
+ sudo -H ln -s /etc/nginx/sites-available/searx /etc/nginx/sites-enabled/searx
+
+Restart service:
+
+.. code:: sh
+
+ sudo -H service nginx restart
+ sudo -H service uwsgi restart
+
+from subdirectory URL (/searx)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Add this configuration in the server config file
+``/etc/nginx/sites-enabled/default``:
+
+.. code:: nginx
+
+ location = /searx { rewrite ^ /searx/; }
+ location /searx {
+ try_files $uri @searx;
+ }
+ location @searx {
+ uwsgi_param SCRIPT_NAME /searx;
+ include uwsgi_params;
+ uwsgi_modifier1 30;
+ uwsgi_pass unix:/run/uwsgi/app/searx/socket;
+ }
+
+
+**OR** using reverse proxy (Please, note that reverse proxy advised to be used
+in case of single-user or low-traffic instances.)
+
+.. code:: nginx
+
+ location /searx {
+ proxy_pass http://127.0.0.1:8888;
+ proxy_set_header Host $host;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Scheme $scheme;
+ proxy_set_header X-Script-Name /searx;
+ proxy_buffering off;
+ }
+
+
+Enable ``base_url`` in ``searx/settings.yml``
+
+.. code:: yaml
+
+ base_url : http://your.domain.tld/searx/
+
+Restart service:
+
+.. code:: sh
+
+ sudo -H service nginx restart
+ sudo -H service uwsgi restart
+
+disable logs
+^^^^^^^^^^^^
+
+for better privacy you can disable nginx logs about searx.
+
+how to proceed: below ``uwsgi_pass`` in ``/etc/nginx/sites-available/default``
+add:
+
+.. code:: nginx
+
+ access_log /dev/null;
+ error_log /dev/null;
+
+Restart service:
+
+.. code:: sh
+
+ sudo -H service nginx restart
+
+with apache
+-----------
+
+Add wsgi mod:
+
+.. code:: sh
+
+ sudo -H apt-get install libapache2-mod-uwsgi
+ sudo -H a2enmod uwsgi
+
+Add this configuration in the file ``/etc/apache2/apache2.conf``:
+
+.. code:: apache
+
+ <Location />
+ Options FollowSymLinks Indexes
+ SetHandler uwsgi-handler
+ uWSGISocket /run/uwsgi/app/searx/socket
+ </Location>
+
+Note that if your instance of searx is not at the root, you should change
+``<Location />`` by the location of your instance, like ``<Location /searx>``.
+
+Restart Apache:
+
+.. code:: sh
+
+ sudo -H /etc/init.d/apache2 restart
+
+disable logs
+~~~~~~~~~~~~
+
+For better privacy you can disable Apache logs.
+
+.. warning::
+
+ You can only disable logs for the whole (virtual) server not for a specific
+ path.
+
+Go back to ``/etc/apache2/apache2.conf`` and above ``<Location />`` add:
+
+.. code:: apache
+
+ CustomLog /dev/null combined
+
+Restart Apache:
+
+.. code:: sh
+
+ sudo -H /etc/init.d/apache2 restart
+
+How to update
+=============
+
+.. code:: sh
+
+ cd /usr/local/searx
+ sudo -H -u searx -i
+
+.. code:: sh
+
+ (searx)$ . ./searx-ve/bin/activate
+ (searx)$ git stash
+ (searx)$ git pull origin master
+ (searx)$ git stash apply
+ (searx)$ ./manage.sh update_packages
+
+.. code:: sh
+
+ sudo -H service uwsgi restart
+
+Docker
+======
+
+Make sure you have installed Docker. For instance, you can deploy searx like this:
+
+.. code:: sh
+
+ docker pull wonderfall/searx
+ docker run -d --name searx -p $PORT:8888 wonderfall/searx
+
+Go to ``http://localhost:$PORT``.
+
+See https://hub.docker.com/r/wonderfall/searx/ for more informations. It's also
+possible to build searx from the embedded Dockerfile.
+
+.. code:: sh
+
+ git clone https://github.com/asciimoo/searx.git
+ cd searx
+ docker build -t whatever/searx .
+
+References
+==========
+
+* https://about.okhin.fr/posts/Searx/ with some additions
+
+* How to: `Setup searx in a couple of hours with a free SSL certificate
+ <https://www.reddit.com/r/privacytoolsIO/comments/366kvn/how_to_setup_your_own_privacy_respecting_search/>`__
+
diff --git a/docs/admin/morty.rst b/docs/admin/morty.rst
new file mode 100644
index 000000000..7d7b34492
--- /dev/null
+++ b/docs/admin/morty.rst
@@ -0,0 +1,26 @@
+=========================
+How to setup result proxy
+=========================
+
+.. _morty: https://github.com/asciimoo/morty
+.. _morty's README: https://github.com/asciimoo/morty
+
+By default searx can only act as an image proxy for result images, but it is
+possible to proxify all the result URLs with an external service, morty_.
+
+To use this feature, morty has to be installed and activated in searx's
+``settings.yml``.
+
+Add the following snippet to your ``settings.yml`` and restart searx:
+
+.. code:: yaml
+
+ result_proxy:
+ url : http://127.0.0.1:3000/
+ key : your_morty_proxy_key
+
+``url``
+ Is the address of the running morty service.
+
+``key``
+ Is an optional argument, see `morty's README`_ for more information.
diff --git a/docs/blog/admin.rst b/docs/blog/admin.rst
new file mode 100644
index 000000000..e95316192
--- /dev/null
+++ b/docs/blog/admin.rst
@@ -0,0 +1,43 @@
+=============================================================
+Searx admin interface
+=============================================================
+
+.. _searx-admin: https://github.com/kvch/searx-admin#searx-admin
+.. _NLnet Foundation: https://nlnet.nl/
+
+ manage your instance from your browser
+
+.. sidebar:: Installation
+
+ Installation guide can be found in the repository of searx-admin_.
+
+One of the biggest advantages of searx is being extremely customizable. But at
+first it can be daunting to newcomers. A barrier of taking advantage of this
+feature is our ugly settings file which is sometimes hard to understand and
+edit.
+
+To make self-hosting searx more accessible a new tool is introduced, called
+``searx-admin``. It is a web application which is capable of managing your
+instance and manipulating its settings via a web UI. It aims to replace editing
+of ``settings.yml`` for less experienced administrators or people who prefer
+graphical admin interfaces.
+
+.. figure:: searx-admin-engines.png
+ :alt: Screenshot of engine list
+
+ Configuration page of engines
+
+Since ``searx-admin`` acts as a supervisor for searx, we have decided to
+implement it as a standalone tool instead of part of searx. Another reason for
+making it a standalone tool is that the codebase and dependencies of searx
+should not grow because of a fully optional feature, which does not affect
+existing instances.
+
+
+Acknowledgements
+================
+
+This development was sponsored by `NLnet Foundation`_.
+
+| Happy hacking.
+| kvch // 2017.08.22 21:25
diff --git a/docs/blog/index.rst b/docs/blog/index.rst
new file mode 100644
index 000000000..52fa3f126
--- /dev/null
+++ b/docs/blog/index.rst
@@ -0,0 +1,10 @@
+====
+Blog
+====
+
+.. toctree::
+ :maxdepth: 1
+
+ python3
+ admin
+ intro-offline
diff --git a/docs/blog/intro-offline.rst b/docs/blog/intro-offline.rst
new file mode 100644
index 000000000..f6e90de3a
--- /dev/null
+++ b/docs/blog/intro-offline.rst
@@ -0,0 +1,77 @@
+===============================
+Preparation for offline engines
+===============================
+
+Offline engines
+===============
+
+To extend the functionality of searx, offline engines are going to be
+introduced. An offline engine is an engine which does not need Internet
+connection to perform a search and does not use HTTP to communicate.
+
+Offline engines can be configured as online engines, by adding those to the
+`engines` list of :origin:`settings.yml <searx/settings.yml>`. Thus, searx
+finds the engine file and imports it.
+
+Example skeleton for the new engines:
+
+.. code:: python
+
+ from subprocess import PIPE, Popen
+
+ categories = ['general']
+ offline = True
+
+ def init(settings):
+ pass
+
+ def search(query, params):
+ process = Popen(['ls', query], stdout=PIPE)
+ return_code = process.wait()
+ if return_code != 0:
+ raise RuntimeError('non-zero return code', return_code)
+
+ results = []
+ line = process.stdout.readline()
+ while line:
+ result = parse_line(line)
+ results.append(results)
+
+ line = process.stdout.readline()
+
+ return results
+
+
+Development progress
+====================
+
+First, a proposal has been created as a Github issue. Then it was moved to the
+wiki as a design document. You can read it here: :wiki:`Offline-engines`.
+
+In this development step, searx core was prepared to accept and perform offline
+searches. Offline search requests are scheduled together with regular offline
+requests.
+
+As offline searches can return arbitrary results depending on the engine, the
+current result templates were insufficient to present such results. Thus, a new
+template is introduced which is caplable of presenting arbitrary key value pairs
+as a table. You can check out the pull request for more details see
+:pull:`1700`.
+
+Next steps
+==========
+
+Today, it is possible to create/run an offline engine. However, it is going to be publicly available for everyone who knows the searx instance. So the next step is to introduce token based access for engines. This way administrators are able to limit the access to private engines.
+
+Acknowledgement
+===============
+
+This development was sponsored by `Search and Discovery Fund`_ of `NLnet Foundation`_ .
+
+.. _Search and Discovery Fund: https://nlnet.nl/discovery
+.. _NLnet Foundation: https://nlnet.nl/
+
+
+| Happy hacking.
+| kvch // 2019.10.21 17:03
+
diff --git a/docs/blog/python3.rst b/docs/blog/python3.rst
new file mode 100644
index 000000000..5bb7f1c80
--- /dev/null
+++ b/docs/blog/python3.rst
@@ -0,0 +1,68 @@
+============================
+Introducing Python 3 support
+============================
+
+.. _Python 2.7 clock: https://pythonclock.org/
+
+.. sidebar:: Python 2.7 to 3 upgrade
+
+ This chapter exists of historical reasons. Python 2.7 release schedule ends
+ (`Python 2.7 clock`_) after 11 years Python 3 exists
+
+As most operation systems are coming with Python3 installed by default. So it is
+time for searx to support Python3. But don't worry support of Python2.7 won't be
+dropped.
+
+.. image:: searxpy3.png
+ :scale: 50 %
+ :alt: hurray
+ :align: center
+
+
+How to run searx using Python 3
+===============================
+
+Please make sure that you run at least Python 3.5.
+
+To run searx, first a Python3 virtualenv should be created. After entering the
+virtualenv, dependencies must be installed. Then run searx with python3 instead
+of the usual python command.
+
+.. code:: sh
+
+ virtualenv -p python3 venv3
+ source venv3/bin/activate
+ pip3 install -r requirements.txt
+ python3 searx/webapp.py
+
+
+If you want to run searx using Python2.7, you don't have to do anything
+differently as before.
+
+Fun facts
+=========
+
+- 115 files were changed when implementing the support for both Python versions.
+
+- All of the dependencies was compatible except for the robotframework used for
+ browser tests. Thus, these tests were migrated to splinter. So from now on
+ both versions are being tested on Travis and can be tested locally.
+
+If you found bugs
+=================
+
+Please open an issue on `GitHub`_. Make sure that you mention your Python
+version in your issue, so we can investigate it properly.
+
+.. _GitHub: https://github.com/asciimoo/searx/issues
+
+Acknowledgment
+==============
+
+This development was sponsored by `NLnet Foundation`_.
+
+.. _NLnet Foundation: https://nlnet.nl/
+
+
+| Happy hacking.
+| kvch // 2017.05.13 22:57
diff --git a/docs/blog/searx-admin-engines.png b/docs/blog/searx-admin-engines.png
new file mode 100644
index 000000000..610bacdf7
--- /dev/null
+++ b/docs/blog/searx-admin-engines.png
Binary files differ
diff --git a/docs/blog/searxpy3.png b/docs/blog/searxpy3.png
new file mode 100644
index 000000000..8eeaeec55
--- /dev/null
+++ b/docs/blog/searxpy3.png
Binary files differ
diff --git a/docs/conf.py b/docs/conf.py
new file mode 100644
index 000000000..c0bd246ac
--- /dev/null
+++ b/docs/conf.py
@@ -0,0 +1,83 @@
+# -*- coding: utf-8 -*-
+
+import sys, os
+from searx.version import VERSION_STRING
+from pallets_sphinx_themes import ProjectLink
+
+GIT_URL = os.environ.get("GIT_URL", "https://github.com/asciimoo/searx")
+SEARX_URL = os.environ.get("SEARX_URL", "https://searx.me")
+DOCS_URL = os.environ.get("DOCS_URL", "https://asciimoo.github.io/searx/")
+
+# Project --------------------------------------------------------------
+
+project = u'searx'
+copyright = u'2015-2019, Adam Tauber, Noémi Ványi'
+author = u'Adam Tauber'
+release, version = VERSION_STRING, VERSION_STRING
+
+# General --------------------------------------------------------------
+
+master_doc = "index"
+source_suffix = '.rst'
+
+# usage:: lorem :patch:`f373169` ipsum
+extlinks = {}
+
+# upstream links
+extlinks['wiki'] = ('https://github.com/asciimoo/searx/wiki/%s', ' ')
+extlinks['pull'] = ('https://github.com/asciimoo/searx/pull/%s', 'PR ')
+
+# links to custom brand
+extlinks['origin'] = (GIT_URL + '/blob/master/%s', 'git://')
+extlinks['patch'] = (GIT_URL + '/commit/%s', '#')
+extlinks['search'] = (SEARX_URL + '/%s', '#')
+extlinks['docs'] = (DOCS_URL + '/%s', 'docs: ')
+
+extensions = [
+ 'sphinx.ext.extlinks',
+ 'sphinx.ext.viewcode',
+ "sphinx.ext.autodoc",
+ "sphinx.ext.intersphinx",
+ "pallets_sphinx_themes",
+ "sphinx_issues", # https://github.com/sloria/sphinx-issues/blob/master/README.rst
+]
+
+intersphinx_mapping = {
+ "python": ("https://docs.python.org/3/", None),
+ # "flask": ("https://flask.palletsprojects.com/", None),
+ # "werkzeug": ("https://werkzeug.palletsprojects.com/", None),
+ # "jinja": ("https://jinja.palletsprojects.com/", None),
+}
+
+issues_github_path = "asciimoo/searx"
+
+# HTML -----------------------------------------------------------------
+
+sys.path.append(os.path.abspath('_themes'))
+
+html_theme_path = ['_themes']
+html_theme = "searx"
+
+html_theme_options = {"index_sidebar_logo": True}
+html_context = {
+ "project_links": [
+ ProjectLink("Source", GIT_URL),
+ ProjectLink("Wiki", "https://github.com/asciimoo/searx/wiki"),
+ ProjectLink("Public instances", "https://github.com/asciimoo/searx/wiki/Searx-instances"),
+ ProjectLink("Twitter", "https://twitter.com/Searx_engine"),
+ ]
+}
+html_sidebars = {
+ "**": ["project.html", "relations.html", "searchbox.html"],
+}
+singlehtml_sidebars = {"index": ["project.html", "localtoc.html"]}
+html_static_path = ["static"]
+html_logo = "static/img/searx_logo_small.png"
+html_title = "Searx Documentation ({})".format("Searx-{}.tex".format(VERSION_STRING))
+html_show_sourcelink = False
+
+# LaTeX ----------------------------------------------------------------
+
+latex_documents = [
+ (master_doc, "searx-{}.tex".format(VERSION_STRING), html_title, author, "manual")
+]
diff --git a/docs/dev/contribution_guide.rst b/docs/dev/contribution_guide.rst
new file mode 100644
index 000000000..064f28e63
--- /dev/null
+++ b/docs/dev/contribution_guide.rst
@@ -0,0 +1,147 @@
+=================
+How to contribute
+=================
+
+Prime directives: Privacy, Hackability
+======================================
+
+Searx has two prime directives, **privacy-by-design and hackability** . The
+hackability comes in three levels:
+
+- support of search engines
+- plugins to alter search behaviour
+- hacking searx itself
+
+Note the lack of "world domination" among the directives. Searx has no
+intention of wide mass-adoption, rounded corners, etc. The prime directive
+"privacy" deserves a separate chapter, as it's quite uncommon unfortunately.
+
+Privacy-by-design
+-----------------
+
+Searx was born out of the need for a **privacy-respecting** search tool which
+can be extended easily to maximize both, its search and its privacy protecting
+capabilities.
+
+A few widely used features work differently or turned off by default or not
+implemented at all **as a consequence of privacy-by-design**.
+
+If a feature reduces the privacy preserving aspects of searx, it should be
+switched off by default or should not implemented at all. There are plenty of
+search engines already providing such features. If a feature reduces the
+protection of searx, users must be informed about the effect of choosing to
+enable it. Features that protect privacy but differ from the expectations of
+the user should also be explained.
+
+Also, if you think that something works weird with searx, it's might be because
+of the tool you use is designed in a way to interfere with the privacy respect.
+Submitting a bugreport to the vendor of the tool that misbehaves might be a good
+feedback to reconsider the disrespect to its customers (e.g. ``GET`` vs ``POST``
+requests in various browsers).
+
+Remember the other prime directive of searx is to be hackable, so if the above
+privacy concerns do not fancy you, simply fork it.
+
+ *Happy hacking.*
+
+Code
+====
+
+.. _PEP8: https://www.python.org/dev/peps/pep-0008/
+
+
+In order to submit a patch, please follow the steps below:
+
+- Follow coding conventions.
+
+ - PEP8_ standards apply, except the convention of line length
+ - Maximum line length is 120 characters
+
+- Check if your code breaks existing tests. If so, update the tests or fix your
+ code.
+
+- If your code can be unit-tested, add unit tests.
+
+- Add yourself to the :origin:`AUTHORS.rst` file.
+
+- Create a pull request.
+
+For more help on getting started with searx development, see :ref:`devquickstart`.
+
+
+Translation
+===========
+
+Translation currently takes place on :ref:`transifex <translation>`.
+
+.. caution::
+
+ Please, do not update translation files in the repo.
+
+
+Documentation
+=============
+
+.. _Sphinx: http://www.sphinx-doc.org
+.. _reST: http://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html
+
+.. sidebar:: The reST sources
+
+ has been moved from ``gh-branch`` into ``master`` (:origin:`docs`).
+
+The documentation is built using Sphinx_. So in order to be able to generate
+the required files, you have to install it on your system. Much easier, use
+Makefile our targets.
+
+Here is an example which makes a complete rebuild:
+
+.. code:: sh
+
+ $ make docs-clean docs
+ ...
+ The HTML pages are in dist/docs.
+
+
+live build
+----------
+
+.. sidebar:: docs-clean
+
+ It is recommended to assert a complete rebuild before deploying (use
+ ``docs-clean``).
+
+Live build is like WYSIWYG, If you want to edit the documentation, its
+recommended to use. The Makefile target ``docs-live`` builds the docs, opens URL
+in your favorite browser and rebuilds every time a reST file has been changed.
+
+.. code:: sh
+
+ $ make docs-live
+ ...
+ The HTML pages are in dist/docs.
+ ... Serving on http://0.0.0.0:8080
+ ... Start watching changes
+
+
+
+deploy on github.io
+-------------------
+
+To deploy documentation at :docs:`github.io <.>` use Makefile target
+``gh-pages``, which will builds the documentation, clones searx into a sub
+folder ``gh-pages``, cleans it, copies the doc build into and runs all the
+needed git add, commit and push:
+
+.. code:: sh
+
+ $ make docs-clean gh-pages
+ ...
+ SPHINX docs --> file://<...>/dist/docs
+ The HTML pages are in dist/docs.
+ ...
+ Cloning into 'gh-pages' ...
+ ...
+ cd gh-pages; git checkout gh-pages >/dev/null
+ Switched to a new branch 'gh-pages'
+ ...
+ doc available at --> https://asciimoo.github.io/searx
diff --git a/docs/dev/engine_overview.rst b/docs/dev/engine_overview.rst
new file mode 100644
index 000000000..92405dc64
--- /dev/null
+++ b/docs/dev/engine_overview.rst
@@ -0,0 +1,265 @@
+
+.. _engines-dev:
+
+===============
+Engine overview
+===============
+
+.. _metasearch-engine: https://en.wikipedia.org/wiki/Metasearch_engine
+
+searx is a metasearch-engine_, so it uses different search engines to provide
+better results.
+
+Because there is no general search API which could be used for every search
+engine, an adapter has to be built between searx and the external search
+engines. Adapters are stored under the folder :origin:`searx/engines`.
+
+.. contents::
+ :depth: 3
+ :backlinks: entry
+
+general engine configuration
+============================
+
+It is required to tell searx the type of results the engine provides. The
+arguments can be set in the engine file or in the settings file
+(normally ``settings.yml``). The arguments in the settings file override
+the ones in the engine file.
+
+It does not matter if an option is stored in the engine file or in the
+settings. However, the standard way is the following:
+
+
+engine file
+-----------
+
+======================= =========== ===========================================
+argument type information
+======================= =========== ===========================================
+categories list pages, in which the engine is working
+paging boolean support multible pages
+language_support boolean support language choosing
+time_range_support boolean support search time range
+offline boolean engine runs offline
+======================= =========== ===========================================
+
+
+settings.yml
+------------
+
+======================= =========== ===========================================
+argument type information
+======================= =========== ===========================================
+name string name of search-engine
+engine string name of searx-engine
+ (filename without ``.py``)
+shortcut string shortcut of search-engine
+timeout string specific timeout for search-engine
+======================= =========== ===========================================
+
+
+overrides
+---------
+
+A few of the options have default values in the engine, but are often
+overwritten by the settings. If ``None`` is assigned to an option in the engine
+file, it has to be redefined in the settings, otherwise searx will not start
+with that engine.
+
+The naming of overrides is arbitrary. But the recommended overrides are the
+following:
+
+======================= =========== ===========================================
+argument type information
+======================= =========== ===========================================
+base_url string base-url, can be overwritten to use same
+ engine on other URL
+number_of_results int maximum number of results per request
+language string ISO code of language and country like en_US
+api_key string api-key if required by engine
+======================= =========== ===========================================
+
+example code
+------------
+
+.. code:: python
+
+ # engine dependent config
+ categories = ['general']
+ paging = True
+ language_support = True
+
+
+making a request
+================
+
+To perform a search an URL have to be specified. In addition to specifying an
+URL, arguments can be passed to the query.
+
+passed arguments
+----------------
+
+These arguments can be used to construct the search query. Furthermore,
+parameters with default value can be redefined for special purposes.
+
+====================== ============ ========================================================================
+argument type default-value, information
+====================== ============ ========================================================================
+url string ``''``
+method string ``'GET'``
+headers set ``{}``
+data set ``{}``
+cookies set ``{}``
+verify boolean ``True``
+headers.User-Agent string a random User-Agent
+category string current category, like ``'general'``
+started datetime current date-time
+pageno int current pagenumber
+language string specific language code like ``'en_US'``, or ``'all'`` if unspecified
+====================== ============ ========================================================================
+
+parsed arguments
+----------------
+
+The function ``def request(query, params):`` always returns the ``params``
+variable. Inside searx, the following paramters can be used to specify a search
+request:
+
+============ =========== =========================================================
+argument type information
+============ =========== =========================================================
+url string requested url
+method string HTTP request method
+headers set HTTP header information
+data set HTTP data information (parsed if ``method != 'GET'``)
+cookies set HTTP cookies
+verify boolean Performing SSL-Validity check
+============ =========== =========================================================
+
+
+example code
+------------
+
+.. code:: python
+
+ # search-url
+ base_url = 'https://example.com/'
+ search_string = 'search?{query}&page={page}'
+
+ # do search-request
+ def request(query, params):
+ search_path = search_string.format(
+ query=urlencode({'q': query}),
+ page=params['pageno'])
+
+ params['url'] = base_url + search_path
+
+ return params
+
+
+returned results
+================
+
+Searx is able to return results of different media-types. Currently the
+following media-types are supported:
+
+- default_
+- images_
+- videos_
+- torrent_
+- map_
+
+To set another media-type as default, the parameter ``template`` must be set to
+the desired type.
+
+default
+-------
+
+========================= =====================================================
+result-parameter information
+========================= =====================================================
+url string, url of the result
+title string, title of the result
+content string, general result-text
+publishedDate :py:class:`datetime.datetime`, time of publish
+========================= =====================================================
+
+images
+------
+
+To use this template, the parameter:
+
+========================= =====================================================
+result-parameter information
+========================= =====================================================
+template is set to ``images.html``
+url string, url to the result site
+title string, title of the result *(partly implemented)*
+content *(partly implemented)*
+publishedDate :py:class:`datetime.datetime`,
+ time of publish *(partly implemented)*
+img\_src string, url to the result image
+thumbnail\_src string, url to a small-preview image
+========================= =====================================================
+
+videos
+------
+
+========================= =====================================================
+result-parameter information
+========================= =====================================================
+template is set to ``videos.html``
+url string, url of the result
+title string, title of the result
+content *(not implemented yet)*
+publishedDate :py:class:`datetime.datetime`, time of publish
+thumbnail string, url to a small-preview image
+========================= =====================================================
+
+torrent
+-------
+
+.. _magnetlink: https://en.wikipedia.org/wiki/Magnet_URI_scheme
+
+========================= =====================================================
+result-parameter information
+========================= =====================================================
+template is set to ``torrent.html``
+url string, url of the result
+title string, title of the result
+content string, general result-text
+publishedDate :py:class:`datetime.datetime`,
+ time of publish *(not implemented yet)*
+seed int, number of seeder
+leech int, number of leecher
+filesize int, size of file in bytes
+files int, number of files
+magnetlink string, magnetlink_ of the result
+torrentfile string, torrentfile of the result
+========================= =====================================================
+
+
+map
+---
+
+========================= =====================================================
+result-parameter information
+========================= =====================================================
+url string, url of the result
+title string, title of the result
+content string, general result-text
+publishedDate :py:class:`datetime.datetime`, time of publish
+latitude latitude of result (in decimal format)
+longitude longitude of result (in decimal format)
+boundingbox boundingbox of result (array of 4. values
+ ``[lat-min, lat-max, lon-min, lon-max]``)
+geojson geojson of result (http://geojson.org)
+osm.type type of osm-object (if OSM-Result)
+osm.id id of osm-object (if OSM-Result)
+address.name name of object
+address.road street name of object
+address.house_number house number of object
+address.locality city, place of object
+address.postcode postcode of object
+address.country country of object
+========================= =====================================================
diff --git a/docs/dev/index.rst b/docs/dev/index.rst
new file mode 100644
index 000000000..8e18066ca
--- /dev/null
+++ b/docs/dev/index.rst
@@ -0,0 +1,13 @@
+=======================
+Developer documentation
+=======================
+
+.. toctree::
+ :maxdepth: 1
+
+ quickstart
+ contribution_guide
+ engine_overview
+ search_api
+ plugins
+ translation
diff --git a/docs/dev/plugins.rst b/docs/dev/plugins.rst
new file mode 100644
index 000000000..e97bbeb4a
--- /dev/null
+++ b/docs/dev/plugins.rst
@@ -0,0 +1,48 @@
+=======
+Plugins
+=======
+
+Plugins can extend or replace functionality of various components of searx.
+
+Example plugin
+==============
+
+.. code:: python
+
+ name = 'Example plugin'
+ description = 'This plugin extends the suggestions with the word "example"'
+ default_on = False # disabled by default
+
+ js_dependencies = tuple() # optional, list of static js files
+ css_dependencies = tuple() # optional, list of static css files
+
+
+ # attach callback to the post search hook
+ # request: flask request object
+ # ctx: the whole local context of the post search hook
+ def post_search(request, ctx):
+ ctx['search'].suggestions.add('example')
+ return True
+
+Plugin entry points
+===================
+
+Entry points (hooks) define when a plugin runs. Right now only three hooks are
+implemented. So feel free to implement a hook if it fits the behaviour of your
+plugin.
+
+Pre search hook
+---------------
+
+Runs BEFORE the search request. Function to implement: ``pre_search``
+
+Post search hook
+----------------
+
+Runs AFTER the search request. Function to implement: ``post_search``
+
+Result hook
+-----------
+
+Runs when a new result is added to the result list. Function to implement:
+``on_result``
diff --git a/docs/dev/quickstart.rst b/docs/dev/quickstart.rst
new file mode 100644
index 000000000..a4a37a266
--- /dev/null
+++ b/docs/dev/quickstart.rst
@@ -0,0 +1,110 @@
+.. _devquickstart:
+
+======================
+Development Quickstart
+======================
+
+This quickstart guide gets your environment set up with searx. Furthermore, it
+gives a short introduction to the ``manage.sh`` script.
+
+How to setup your development environment
+=========================================
+
+First, clone the source code of searx to the desired folder. In this case the
+source is cloned to ``~/myprojects/searx``. Then create and activate the
+searx-ve virtualenv and install the required packages using manage.sh.
+
+.. code:: sh
+
+ cd ~/myprojects
+ git clone https://github.com/asciimoo/searx.git
+ cd searx
+ virtualenv searx-ve
+ . ./searx-ve/bin/activate
+ ./manage.sh update_dev_packages
+
+
+How to run tests
+================
+
+Tests can be run using the ``manage.sh`` script. Following tests and checks are
+available:
+
+- Unit tests
+- Selenium tests
+- PEP8 validation
+- Unit test coverage check
+
+For example unit tests are run with the command below:
+
+.. code:: sh
+
+ ./manage.sh unit_tests
+
+For further test options, please consult the help of the ``manage.sh`` script.
+
+
+How to compile styles and javascript
+====================================
+
+.. _less: http://lesscss.org/
+.. _NodeJS: https://nodejs.org
+
+How to build styles
+-------------------
+
+Less_ is required to build the styles of searx. Less_ can be installed using
+either NodeJS_ or Apt.
+
+.. code:: sh
+
+ sudo -H apt-get install nodejs
+ sudo -H npm install -g less
+
+OR
+
+.. code:: sh
+
+ sudo -H apt-get install node-less
+
+After satisfying the requirements styles can be build using ``manage.sh``
+
+.. code:: sh
+
+ ./manage.sh styles
+
+
+How to build the source of the oscar theme
+==========================================
+
+.. _grunt: https://gruntjs.com/
+
+Grunt_ must be installed in order to build the javascript sources. It depends on
+NodeJS, so first Node has to be installed.
+
+.. code:: sh
+
+ sudo -H apt-get install nodejs
+ sudo -H npm install -g grunt-cli
+
+After installing grunt, the files can be built using the following command:
+
+.. code:: sh
+
+ ./manage.sh grunt_build
+
+
+Tips for debugging/development
+==============================
+
+Turn on debug logging
+ Whether you are working on a new engine or trying to eliminate a bug, it is
+ always a good idea to turn on debug logging. When debug logging is enabled a
+ stack trace appears, instead of the cryptic ``Internal Server Error``
+ message. It can be turned on by setting ``debug: False`` to ``debug: True`` in
+ :origin:`settings.yml <searx/settings.yml>`.
+
+Run ``./manage.sh tests`` before creating a PR.
+ Failing build on Travis is common because of PEP8 checks. So a new commit
+ must be created containing these format fixes. This phase can be skipped if
+ ``./manage.sh tests`` is run locally before creating a PR.
diff --git a/docs/dev/search_api.rst b/docs/dev/search_api.rst
new file mode 100644
index 000000000..158cab7c5
--- /dev/null
+++ b/docs/dev/search_api.rst
@@ -0,0 +1,112 @@
+==========
+Search API
+==========
+
+The search supports both ``GET`` and ``POST``.
+
+Furthermore, two enpoints ``/`` and ``/search`` are available for querying.
+
+
+``GET /``
+
+``GET /search``
+
+Parameters
+==========
+
+``q`` : required
+ The search query. This string is passed to external search services. Thus,
+ searx supports syntax of each search service. For example, ``site:github.com
+ searx`` is a valid query for Google. However, if simply the query above is
+ passed to any search engine which does not filter its results based on this
+ syntax, you might not get the results you wanted.
+
+ See more at :ref:`search-syntax`
+
+``categories`` : optional
+ Comma separated list, specifies the active search categories
+
+``engines``: optional
+ Comma separated list, specifies the active search engines.
+
+``lang``: default ``all``
+ Code of the language.
+
+``pageno``: default ``1``
+ Search page number.
+
+``time_range``: optional
+ [ ``day``, ``month``, ``year`` ]
+
+ Time range of search for engines which support it. See if an engine supports
+ time range search in the preferences page of an instance.
+
+``format``: optional
+ [ ``json``, ``csv``, ``rss`` ]
+
+ Output format of results.
+
+``results_on_new_tab``: default ``0``
+ [ ``0``, ``1`` ]
+
+ Open search results on new tab.
+
+``image_proxy``: default ``False``
+ [ ``True``, ``False`` ]
+
+ Proxy image results through searx.
+
+``autocomplete``: default *empty*
+ [ ``google``, ``dbpedia``, ``duckduckgo``, ``startpage``, ``wikipedia`` ]
+
+ Service which completes words as you type.
+
+``safesearch``: default ``None``
+ [ ``0``, ``1``, ``None`` ]
+
+ Filter search results of engines which support safe search. See if an engine
+ supports safe search in the preferences page of an instance.
+
+``theme``: default ``oscar``
+ [ ``oscar``, ``simple``, ``legacy``, ``pix-art``, ``courgette`` ]
+
+ Theme of instance.
+
+ Please note, available themes depend on an instance. It is possible that an
+ instance administrator deleted, created or renamed themes on his/her instance.
+ See the available options in the preferences page of the instance.
+
+``oscar-style``: default ``logicodev``
+ [ ``pointhi``, ``logicodev`` ]
+
+ Style of Oscar theme. It is only parsed if the theme of an instance is
+ ``oscar``.
+
+ Please note, available styles depend on an instance. It is possible that an
+ instance administrator deleted, created or renamed styles on his/her
+ instance. See the available options in the preferences page of the instance.
+
+``enabled_plugins``: optional
+ List of enabled plugins.
+
+ :default: ``HTTPS_rewrite``, ``Self_Informations``,
+ ``Search_on_category_select``, ``Tracker_URL_remover``
+
+ :values: [ ``DOAI_rewrite``, ``HTTPS_rewrite``, ``Infinite_scroll``,
+ ``Vim-like_hotkeys``, ``Self_Informations``, ``Tracker_URL_remover``,
+ ``Search_on_category_select`` ]
+
+``disabled_plugins``: optional
+ List of disabled plugins.
+
+ :default: ``DOAI_rewrite``, ``Infinite_scroll``, ``Vim-like_hotkeys``
+ :values: ``DOAI_rewrite``, ``HTTPS_rewrite``, ``Infinite_scroll``,
+ ``Vim-like_hotkeys``, ``Self_Informations``, ``Tracker_URL_remover``,
+ ``Search_on_category_select``
+
+``enabled_engines``: optional : *all* :origin:`engines <searx/engines>`
+ List of enabled engines.
+
+``disabled_engines``: optional : *all* :origin:`engines <searx/engines>`
+ List of disabled engines.
+
diff --git a/docs/dev/translation.rst b/docs/dev/translation.rst
new file mode 100644
index 000000000..86c4c843b
--- /dev/null
+++ b/docs/dev/translation.rst
@@ -0,0 +1,71 @@
+.. _translation:
+
+===========
+Translation
+===========
+
+.. _searx@transifex: https://www.transifex.com/asciimoo/searx/
+
+Translation currently takes place on `searx@transifex`_
+
+Requirements
+============
+
+* Transifex account
+* Installed CLI tool of Transifex
+
+Init Transifex project
+======================
+
+After installing ``transifex`` using pip, run the following command to
+initialize the project.
+
+.. code:: sh
+
+ tx init # Transifex instance: https://www.transifex.com/asciimoo/searx/
+
+
+After ``$HOME/.transifexrc`` is created, get a Transifex API key and insert it
+into the configuration file.
+
+Create a configuration file for ``tx`` named ``$HOME/.tx/config``.
+
+.. code:: ini
+
+ [main]
+ host = https://www.transifex.com
+ [searx.messagespo]
+ file_filter = searx/translations/<lang>/LC_MESSAGES/messages.po
+ source_file = messages.pot
+ source_lang = en
+ type = PO
+
+
+Then run ``tx set``:
+
+.. code:: shell
+
+ tx set --auto-local -r searx.messagespo 'searx/translations/<lang>/LC_MESSAGES/messages.po' \
+ --source-lang en --type PO --source-file messages.pot --execute
+
+
+Update translations
+===================
+
+To retrieve the latest translations, pull it from Transifex.
+
+.. code:: sh
+
+ tx pull -a
+
+Then check the new languages. If strings translated are not enough, delete those
+folders, because those should not be compiled. Call the command below to compile
+the ``.po`` files.
+
+.. code:: shell
+
+ pybabel compile -d searx/translations
+
+
+After the compilation is finished commit the ``.po`` and ``.mo`` files and
+create a PR.
diff --git a/docs/index.rst b/docs/index.rst
new file mode 100644
index 000000000..d9503fef6
--- /dev/null
+++ b/docs/index.rst
@@ -0,0 +1,32 @@
+================
+Welcome to searx
+================
+
+Search without being tracked.
+
+.. sidebar:: Features
+
+ - Self hosted
+ - No user tracking
+ - No user profiling
+ - About 70 supported search engines
+ - Easy integration with any search engine
+ - Cookies are not used by default
+ - Secure, encrypted connections (HTTPS/SSL)
+ - Hosted by organizations, such as *La Quadrature du Net*, which promote
+ digital rights
+
+Searx is a free internet metasearch engine which aggregates results from more
+than 70 search services. Users are neither tracked nor profiled. Additionally,
+searx can be used over Tor for online anonymity.
+
+Get started with searx by using one of the :wiki:`Searx-instances`. If you
+don't trust anyone, you can set up your own, see :ref:`installation`.
+
+.. toctree::
+ :maxdepth: 2
+
+ user/index
+ admin/index
+ dev/index
+ blog/index
diff --git a/docs/static/img/searx_logo_small.png b/docs/static/img/searx_logo_small.png
new file mode 100644
index 000000000..083932686
--- /dev/null
+++ b/docs/static/img/searx_logo_small.png
Binary files differ
diff --git a/docs/user/index.rst b/docs/user/index.rst
new file mode 100644
index 000000000..b13aca216
--- /dev/null
+++ b/docs/user/index.rst
@@ -0,0 +1,9 @@
+==================
+User documentation
+==================
+
+.. toctree::
+ :maxdepth: 1
+
+ search_syntax
+ own-instance
diff --git a/docs/user/own-instance.rst b/docs/user/own-instance.rst
new file mode 100644
index 000000000..a2f736562
--- /dev/null
+++ b/docs/user/own-instance.rst
@@ -0,0 +1,77 @@
+===========================
+Why use a private instance?
+===========================
+
+"Is it worth to run my own instance?" is a common question among searx users.
+Before answering this question, see what options a searx user has.
+
+Public instances are open to everyone who has access to its URL. Usually, these
+are operated by unknown parties (from the users' point of view). Private
+instances can be used by a select group of people. It is for example a searx of
+group of friends or a company which can be accessed through VPN. Also it can be
+single user one which runs on the user's laptop.
+
+To gain more insight on how these instances work let's dive into how searx
+protects its users.
+
+How does searx protect privacy?
+===============================
+
+Searx protects the privacy of its users in multiple ways regardless of the type
+of the instance (private, public). Removal of private data from search requests
+comes in three forms:
+
+ 1. removal of private data from requests going to search services
+ 2. not forwarding anything from a third party services through search services
+ (e.g. advertisement)
+ 3. removal of private data from requests going to the result pages
+
+Removing private data means not sending cookies to external search engines and
+generating a random browser profile for every request. Thus, it does not matter
+if a public or private instance handles the request, because it is anonymized in
+both cases. IP addresses will be the IP of the instance. But searx can be
+configured to use proxy or Tor. `Result proxy
+<https://github.com/asciimoo/morty>`__ is supported, too.
+
+Searx does not serve ads or tracking content unlike most search services. So
+private data is not forwarded to third parties who might monetize it. Besides
+protecting users from search services, both referring page and search query are
+hidden from visited result pages.
+
+
+What are the consequences of using public instances?
+----------------------------------------------------
+
+If someone uses a public instance, he/she has to trust the administrator of that
+instance. This means that the user of the public instance does not know whether
+his/her requests are logged, aggregated and sent or sold to a third party.
+
+Also, public instances without proper protection are more vulnerable to abusing
+the search service, In this case the external service in exchange returns
+CAPTCHAs or bans the IP of the instance. Thus, search requests return less
+results.
+
+I see. What about private instances?
+------------------------------------
+
+If users run their own instances, everything is in their control: the source
+code, logging settings and private data. Unknown instance administrators do not
+have to be trusted.
+
+Furthermore, as the default settings of their instance is editable, there is no
+need to use cookies to tailor searx to their needs. So preferences will not be
+reset to defaults when clearing browser cookies. As settings are stored on
+their computer, it will not be accessible to others as long as their computer is
+not compromised.
+
+Conclusion
+==========
+
+Always use an instance which is operated by people you trust. The privacy
+features of searx are available to users no matter what kind of instance they
+use.
+
+If someone is on the go or just wants to try searx for the first time public
+instances are the best choices. Additionally, public instance are making a
+world a better place, because those who cannot or do not want to run an
+instance, have access to a privacy respecting search service.
diff --git a/docs/user/search_syntax.rst b/docs/user/search_syntax.rst
new file mode 100644
index 000000000..b738c7274
--- /dev/null
+++ b/docs/user/search_syntax.rst
@@ -0,0 +1,42 @@
+
+.. _search-syntax:
+
+=============
+Search syntax
+=============
+
+Searx allows you to modify the default categories, engines and search language
+via the search query.
+
+Prefix ``!``
+ to set Category/engine
+
+Prefix: ``:``
+ to set language
+
+Prefix: ``?``
+ to add engines and categories to the currently selected categories
+
+Abbrevations of the engines and languages are also accepted. Engine/category
+modifiers are chainable and inclusive (e.g. with :search:`!it !ddg !wp qwer
+<?q=%21it%20%21ddg%20%21wp%20qwer>` search in IT category **and** duckduckgo
+**and** wikipedia for ``qwer``).
+
+See the :search:`/preferences page <preferences>` for the list of engines,
+categories and languages.
+
+Examples
+========
+
+Search in wikipedia for ``qwer``:
+
+- :search:`!wp qwer <?q=%21wp%20qwer>` or
+- :search:`!wikipedia qwer :search:<?q=%21wikipedia%20qwer>`
+
+Image search:
+
+- :search:`!images Cthulhu <?q=%21images%20Cthulhu>`
+
+Custom language in wikipedia:
+
+- :search:`:hu !wp hackerspace <?q=%3Ahu%20%21wp%20hackerspace>`
diff --git a/manage.sh b/manage.sh
index dbe48d31d..496a522ba 100755
--- a/manage.sh
+++ b/manage.sh
@@ -18,12 +18,12 @@ ACTION="$1"
update_packages() {
pip install --upgrade pip
pip install --upgrade setuptools
- pip install -r "$BASE_DIR/requirements.txt"
+ pip install -Ur "$BASE_DIR/requirements.txt"
}
update_dev_packages() {
update_packages
- pip install -r "$BASE_DIR/requirements-dev.txt"
+ pip install -Ur "$BASE_DIR/requirements-dev.txt"
}
install_geckodriver() {
@@ -70,6 +70,11 @@ locales() {
pybabel compile -d "$SEARX_DIR/translations"
}
+update_useragents() {
+ echo '[!] Updating user agent versions'
+ python utils/fetch_firefox_version.py
+}
+
pep8_check() {
echo '[!] Running pep8 check'
# ignored rules:
@@ -246,6 +251,7 @@ Commands
update_dev_packages - Check & update development and production dependency changes
install_geckodriver - Download & install geckodriver if not already installed (required for robot_tests)
npm_packages - Download & install npm dependencies
+ update_useragents - Update useragents.json with the most recent versions of Firefox
Build
-----
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 5e015a88a..0bdf20469 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -1,3 +1,6 @@
+pallets-sphinx-themes
+Sphinx
+sphinx-issues
mock==2.0.0
nose2[coverage_plugin]
cov-core==1.15.0
diff --git a/searx/data/useragents.json b/searx/data/useragents.json
index 850bc418a..abb81000c 100644
--- a/searx/data/useragents.json
+++ b/searx/data/useragents.json
@@ -1,14 +1,15 @@
{
- "ua": "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}",
"versions": [
- "61.0.1",
- "61.0",
- "60.0.2",
- "60.0.1",
- "60.0"
+ "70.0.1",
+ "70.0",
+ "69.0.3",
+ "69.0.2",
+ "69.0.1",
+ "69.0"
],
"os": [
"Windows NT 10; WOW64",
"X11; Linux x86_64"
- ]
+ ],
+ "ua": "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}"
} \ No newline at end of file
diff --git a/searx/engines/__init__.py b/searx/engines/__init__.py
index a10b1ccd9..2393f52b6 100644
--- a/searx/engines/__init__.py
+++ b/searx/engines/__init__.py
@@ -27,7 +27,7 @@ from json import loads
from requests import get
from searx import settings
from searx import logger
-from searx.utils import load_module, match_language
+from searx.utils import load_module, match_language, get_engine_from_settings
logger = logger.getChild('engines')
@@ -53,7 +53,8 @@ engine_default_args = {'paging': False,
'disabled': False,
'suspend_end_time': 0,
'continuous_errors': 0,
- 'time_range_support': False}
+ 'time_range_support': False,
+ 'offline': False}
def load_engine(engine_data):
@@ -128,14 +129,16 @@ def load_engine(engine_data):
engine.stats = {
'result_count': 0,
'search_count': 0,
- 'page_load_time': 0,
- 'page_load_count': 0,
'engine_time': 0,
'engine_time_count': 0,
'score_count': 0,
'errors': 0
}
+ if not engine.offline:
+ engine.stats['page_load_time'] = 0
+ engine.stats['page_load_count'] = 0
+
for category_name in engine.categories:
categories.setdefault(category_name, []).append(engine)
@@ -173,11 +176,6 @@ def get_engines_stats():
results_num = \
engine.stats['result_count'] / float(engine.stats['search_count'])
- if engine.stats['page_load_count'] != 0:
- load_times = engine.stats['page_load_time'] / float(engine.stats['page_load_count']) # noqa
- else:
- load_times = 0
-
if engine.stats['engine_time_count'] != 0:
this_engine_time = engine.stats['engine_time'] / float(engine.stats['engine_time_count']) # noqa
else:
@@ -189,14 +187,19 @@ def get_engines_stats():
else:
score = score_per_result = 0.0
- max_pageload = max(load_times, max_pageload)
+ if not engine.offline:
+ load_times = 0
+ if engine.stats['page_load_count'] != 0:
+ load_times = engine.stats['page_load_time'] / float(engine.stats['page_load_count']) # noqa
+ max_pageload = max(load_times, max_pageload)
+ pageloads.append({'avg': load_times, 'name': engine.name})
+
max_engine_times = max(this_engine_time, max_engine_times)
max_results = max(results_num, max_results)
max_score = max(score, max_score)
max_score_per_result = max(score_per_result, max_score_per_result)
max_errors = max(max_errors, engine.stats['errors'])
- pageloads.append({'avg': load_times, 'name': engine.name})
engine_times.append({'avg': this_engine_time, 'name': engine.name})
results.append({'avg': results_num, 'name': engine.name})
scores.append({'avg': score, 'name': engine.name})
@@ -255,7 +258,7 @@ def initialize_engines(engine_list):
load_engines(engine_list)
def engine_init(engine_name, init_fn):
- init_fn()
+ init_fn(get_engine_from_settings(engine_name))
logger.debug('%s engine: Initialized', engine_name)
for engine_name, engine in engines.items():
diff --git a/searx/engines/arxiv.py b/searx/engines/arxiv.py
index 182861892..e3c871d17 100644
--- a/searx/engines/arxiv.py
+++ b/searx/engines/arxiv.py
@@ -17,6 +17,7 @@ from searx.url_utils import urlencode
categories = ['science']
+paging = True
base_url = 'http://export.arxiv.org/api/query?search_query=all:'\
+ '{query}&start={offset}&max_results={number_of_results}'
diff --git a/searx/engines/bing.py b/searx/engines/bing.py
index 1e614867b..ed0b87dbd 100644
--- a/searx/engines/bing.py
+++ b/searx/engines/bing.py
@@ -18,7 +18,7 @@ from lxml import html
from searx import logger, utils
from searx.engines.xpath import extract_text
from searx.url_utils import urlencode
-from searx.utils import match_language, gen_useragent
+from searx.utils import match_language, gen_useragent, eval_xpath
logger = logger.getChild('bing engine')
@@ -65,11 +65,11 @@ def response(resp):
dom = html.fromstring(resp.text)
# parse results
- for result in dom.xpath('//div[@class="sa_cc"]'):
- link = result.xpath('.//h3/a')[0]
+ for result in eval_xpath(dom, '//div[@class="sa_cc"]'):
+ link = eval_xpath(result, './/h3/a')[0]
url = link.attrib.get('href')
title = extract_text(link)
- content = extract_text(result.xpath('.//p'))
+ content = extract_text(eval_xpath(result, './/p'))
# append result
results.append({'url': url,
@@ -77,11 +77,11 @@ def response(resp):
'content': content})
# parse results again if nothing is found yet
- for result in dom.xpath('//li[@class="b_algo"]'):
- link = result.xpath('.//h2/a')[0]
+ for result in eval_xpath(dom, '//li[@class="b_algo"]'):
+ link = eval_xpath(result, './/h2/a')[0]
url = link.attrib.get('href')
title = extract_text(link)
- content = extract_text(result.xpath('.//p'))
+ content = extract_text(eval_xpath(result, './/p'))
# append result
results.append({'url': url,
@@ -89,7 +89,7 @@ def response(resp):
'content': content})
try:
- result_len_container = "".join(dom.xpath('//span[@class="sb_count"]/text()'))
+ result_len_container = "".join(eval_xpath(dom, '//span[@class="sb_count"]/text()'))
result_len_container = utils.to_string(result_len_container)
if "-" in result_len_container:
# Remove the part "from-to" for paginated request ...
@@ -113,9 +113,9 @@ def response(resp):
def _fetch_supported_languages(resp):
supported_languages = []
dom = html.fromstring(resp.text)
- options = dom.xpath('//div[@id="limit-languages"]//input')
+ options = eval_xpath(dom, '//div[@id="limit-languages"]//input')
for option in options:
- code = option.xpath('./@id')[0].replace('_', '-')
+ code = eval_xpath(option, './@id')[0].replace('_', '-')
if code == 'nb':
code = 'no'
supported_languages.append(code)
diff --git a/searx/engines/deviantart.py b/searx/engines/deviantart.py
index bb85c6dc5..a0e27e622 100644
--- a/searx/engines/deviantart.py
+++ b/searx/engines/deviantart.py
@@ -24,7 +24,7 @@ time_range_support = True
# search-url
base_url = 'https://www.deviantart.com/'
-search_url = base_url + 'browse/all/?offset={offset}&{query}'
+search_url = base_url + 'search?page={page}&{query}'
time_range_url = '&order={range}'
time_range_dict = {'day': 11,
@@ -37,9 +37,7 @@ def request(query, params):
if params['time_range'] and params['time_range'] not in time_range_dict:
return params
- offset = (params['pageno'] - 1) * 24
-
- params['url'] = search_url.format(offset=offset,
+ params['url'] = search_url.format(page=params['pageno'],
query=urlencode({'q': query}))
if params['time_range'] in time_range_dict:
params['url'] += time_range_url.format(range=time_range_dict[params['time_range']])
@@ -57,28 +55,27 @@ def response(resp):
dom = html.fromstring(resp.text)
- regex = re.compile(r'\/200H\/')
-
# parse results
- for result in dom.xpath('.//span[@class="thumb wide"]'):
- link = result.xpath('.//a[@class="torpedo-thumb-link"]')[0]
- url = link.attrib.get('href')
- title = extract_text(result.xpath('.//span[@class="title"]'))
- thumbnail_src = link.xpath('.//img')[0].attrib.get('src')
- img_src = regex.sub('/', thumbnail_src)
-
- # http to https, remove domain sharding
- thumbnail_src = re.sub(r"https?://(th|fc)\d+.", "https://th01.", thumbnail_src)
- thumbnail_src = re.sub(r"http://", "https://", thumbnail_src)
-
- url = re.sub(r"http://(.*)\.deviantart\.com/", "https://\\1.deviantart.com/", url)
-
- # append result
- results.append({'url': url,
- 'title': title,
- 'img_src': img_src,
- 'thumbnail_src': thumbnail_src,
- 'template': 'images.html'})
+ for row in dom.xpath('//div[contains(@data-hook, "content_row")]'):
+ for result in row.xpath('./div'):
+ link = result.xpath('.//a[@data-hook="deviation_link"]')[0]
+ url = link.attrib.get('href')
+ title = link.attrib.get('title')
+ thumbnail_src = result.xpath('.//img')[0].attrib.get('src')
+ img_src = thumbnail_src
+
+ # http to https, remove domain sharding
+ thumbnail_src = re.sub(r"https?://(th|fc)\d+.", "https://th01.", thumbnail_src)
+ thumbnail_src = re.sub(r"http://", "https://", thumbnail_src)
+
+ url = re.sub(r"http://(.*)\.deviantart\.com/", "https://\\1.deviantart.com/", url)
+
+ # append result
+ results.append({'url': url,
+ 'title': title,
+ 'img_src': img_src,
+ 'thumbnail_src': thumbnail_src,
+ 'template': 'images.html'})
# return results
return results
diff --git a/searx/engines/dictzone.py b/searx/engines/dictzone.py
index 09db048cc..423af0971 100644
--- a/searx/engines/dictzone.py
+++ b/searx/engines/dictzone.py
@@ -11,7 +11,7 @@
import re
from lxml import html
-from searx.utils import is_valid_lang
+from searx.utils import is_valid_lang, eval_xpath
from searx.url_utils import urljoin
categories = ['general']
@@ -47,14 +47,14 @@ def response(resp):
dom = html.fromstring(resp.text)
- for k, result in enumerate(dom.xpath(results_xpath)[1:]):
+ for k, result in enumerate(eval_xpath(dom, results_xpath)[1:]):
try:
- from_result, to_results_raw = result.xpath('./td')
+ from_result, to_results_raw = eval_xpath(result, './td')
except:
continue
to_results = []
- for to_result in to_results_raw.xpath('./p/a'):
+ for to_result in eval_xpath(to_results_raw, './p/a'):
t = to_result.text_content()
if t.strip():
to_results.append(to_result.text_content())
diff --git a/searx/engines/digg.py b/searx/engines/digg.py
index 4369ccb84..073410eb0 100644
--- a/searx/engines/digg.py
+++ b/searx/engines/digg.py
@@ -15,7 +15,8 @@ import string
from dateutil import parser
from json import loads
from lxml import html
-from searx.url_utils import quote_plus
+from searx.url_utils import urlencode
+from datetime import datetime
# engine dependent config
categories = ['news', 'social media']
@@ -23,7 +24,7 @@ paging = True
# search-url
base_url = 'https://digg.com/'
-search_url = base_url + 'api/search/{query}.json?position={position}&format=html'
+search_url = base_url + 'api/search/?{query}&from={position}&size=20&format=html'
# specific xpath variables
results_xpath = '//article'
@@ -38,9 +39,9 @@ digg_cookie_chars = string.ascii_uppercase + string.ascii_lowercase +\
# do search-request
def request(query, params):
- offset = (params['pageno'] - 1) * 10
+ offset = (params['pageno'] - 1) * 20
params['url'] = search_url.format(position=offset,
- query=quote_plus(query))
+ query=urlencode({'q': query}))
params['cookies']['frontend.auid'] = ''.join(random.choice(
digg_cookie_chars) for _ in range(22))
return params
@@ -52,30 +53,17 @@ def response(resp):
search_result = loads(resp.text)
- if 'html' not in search_result or search_result['html'] == '':
- return results
-
- dom = html.fromstring(search_result['html'])
-
# parse results
- for result in dom.xpath(results_xpath):
- url = result.attrib.get('data-contenturl')
- thumbnail = result.xpath('.//img')[0].attrib.get('src')
- title = ''.join(result.xpath(title_xpath))
- content = ''.join(result.xpath(content_xpath))
- pubdate = result.xpath(pubdate_xpath)[0].attrib.get('datetime')
- publishedDate = parser.parse(pubdate)
-
- # http to https
- thumbnail = thumbnail.replace("http://static.digg.com", "https://static.digg.com")
+ for result in search_result['mapped']:
+ published = datetime.strptime(result['created']['ISO'], "%Y-%m-%d %H:%M:%S")
# append result
- results.append({'url': url,
- 'title': title,
- 'content': content,
+ results.append({'url': result['url'],
+ 'title': result['title'],
+ 'content': result['excerpt'],
'template': 'videos.html',
- 'publishedDate': publishedDate,
- 'thumbnail': thumbnail})
+ 'publishedDate': published,
+ 'thumbnail': result['images']['thumbImage']})
# return results
return results
diff --git a/searx/engines/doku.py b/searx/engines/doku.py
index a391be444..d20e66026 100644
--- a/searx/engines/doku.py
+++ b/searx/engines/doku.py
@@ -11,6 +11,7 @@
from lxml.html import fromstring
from searx.engines.xpath import extract_text
+from searx.utils import eval_xpath
from searx.url_utils import urlencode
# engine dependent config
@@ -45,16 +46,16 @@ def response(resp):
# parse results
# Quickhits
- for r in doc.xpath('//div[@class="search_quickresult"]/ul/li'):
+ for r in eval_xpath(doc, '//div[@class="search_quickresult"]/ul/li'):
try:
- res_url = r.xpath('.//a[@class="wikilink1"]/@href')[-1]
+ res_url = eval_xpath(r, './/a[@class="wikilink1"]/@href')[-1]
except:
continue
if not res_url:
continue
- title = extract_text(r.xpath('.//a[@class="wikilink1"]/@title'))
+ title = extract_text(eval_xpath(r, './/a[@class="wikilink1"]/@title'))
# append result
results.append({'title': title,
@@ -62,13 +63,13 @@ def response(resp):
'url': base_url + res_url})
# Search results
- for r in doc.xpath('//dl[@class="search_results"]/*'):
+ for r in eval_xpath(doc, '//dl[@class="search_results"]/*'):
try:
if r.tag == "dt":
- res_url = r.xpath('.//a[@class="wikilink1"]/@href')[-1]
- title = extract_text(r.xpath('.//a[@class="wikilink1"]/@title'))
+ res_url = eval_xpath(r, './/a[@class="wikilink1"]/@href')[-1]
+ title = extract_text(eval_xpath(r, './/a[@class="wikilink1"]/@title'))
elif r.tag == "dd":
- content = extract_text(r.xpath('.'))
+ content = extract_text(eval_xpath(r, '.'))
# append result
results.append({'title': title,
diff --git a/searx/engines/duckduckgo.py b/searx/engines/duckduckgo.py
index fb8f523ac..0d2c0af2d 100644
--- a/searx/engines/duckduckgo.py
+++ b/searx/engines/duckduckgo.py
@@ -18,7 +18,7 @@ from json import loads
from searx.engines.xpath import extract_text
from searx.poolrequests import get
from searx.url_utils import urlencode
-from searx.utils import match_language
+from searx.utils import match_language, eval_xpath
# engine dependent config
categories = ['general']
@@ -65,21 +65,36 @@ def get_region_code(lang, lang_list=[]):
def request(query, params):
- if params['time_range'] and params['time_range'] not in time_range_dict:
+ if params['time_range'] not in (None, 'None', '') and params['time_range'] not in time_range_dict:
return params
offset = (params['pageno'] - 1) * 30
region_code = get_region_code(params['language'], supported_languages)
- if region_code:
- params['url'] = url.format(
- query=urlencode({'q': query, 'kl': region_code}), offset=offset, dc_param=offset)
+ params['url'] = 'https://duckduckgo.com/html/'
+ if params['pageno'] > 1:
+ params['method'] = 'POST'
+ params['data']['q'] = query
+ params['data']['s'] = offset
+ params['data']['dc'] = 30
+ params['data']['nextParams'] = ''
+ params['data']['v'] = 'l'
+ params['data']['o'] = 'json'
+ params['data']['api'] = '/d.js'
+ if params['time_range'] in time_range_dict:
+ params['data']['df'] = time_range_dict[params['time_range']]
+ if region_code:
+ params['data']['kl'] = region_code
else:
- params['url'] = url.format(
- query=urlencode({'q': query}), offset=offset, dc_param=offset)
+ if region_code:
+ params['url'] = url.format(
+ query=urlencode({'q': query, 'kl': region_code}), offset=offset, dc_param=offset)
+ else:
+ params['url'] = url.format(
+ query=urlencode({'q': query}), offset=offset, dc_param=offset)
- if params['time_range'] in time_range_dict:
- params['url'] += time_range_url.format(range=time_range_dict[params['time_range']])
+ if params['time_range'] in time_range_dict:
+ params['url'] += time_range_url.format(range=time_range_dict[params['time_range']])
return params
@@ -91,17 +106,19 @@ def response(resp):
doc = fromstring(resp.text)
# parse results
- for r in doc.xpath(result_xpath):
+ for i, r in enumerate(eval_xpath(doc, result_xpath)):
+ if i >= 30:
+ break
try:
- res_url = r.xpath(url_xpath)[-1]
+ res_url = eval_xpath(r, url_xpath)[-1]
except:
continue
if not res_url:
continue
- title = extract_text(r.xpath(title_xpath))
- content = extract_text(r.xpath(content_xpath))
+ title = extract_text(eval_xpath(r, title_xpath))
+ content = extract_text(eval_xpath(r, content_xpath))
# append result
results.append({'title': title,
diff --git a/searx/engines/duckduckgo_definitions.py b/searx/engines/duckduckgo_definitions.py
index 957a13ea6..79d10c303 100644
--- a/searx/engines/duckduckgo_definitions.py
+++ b/searx/engines/duckduckgo_definitions.py
@@ -1,3 +1,14 @@
+"""
+DuckDuckGo (definitions)
+
+- `Instant Answer API`_
+- `DuckDuckGo query`_
+
+.. _Instant Answer API: https://duckduckgo.com/api
+.. _DuckDuckGo query: https://api.duckduckgo.com/?q=DuckDuckGo&format=json&pretty=1
+
+"""
+
import json
from lxml import html
from re import compile
@@ -25,7 +36,8 @@ def result_to_text(url, text, htmlResult):
def request(query, params):
params['url'] = url.format(query=urlencode({'q': query}))
language = match_language(params['language'], supported_languages, language_aliases)
- params['headers']['Accept-Language'] = language.split('-')[0]
+ language = language.split('-')[0]
+ params['headers']['Accept-Language'] = language
return params
@@ -43,8 +55,9 @@ def response(resp):
# add answer if there is one
answer = search_res.get('Answer', '')
- if answer != '':
- results.append({'answer': html_to_text(answer)})
+ if answer:
+ if search_res.get('AnswerType', '') not in ['calc']:
+ results.append({'answer': html_to_text(answer)})
# add infobox
if 'Definition' in search_res:
diff --git a/searx/engines/duden.py b/searx/engines/duden.py
index 444f18c1f..cf2f1a278 100644
--- a/searx/engines/duden.py
+++ b/searx/engines/duden.py
@@ -11,6 +11,7 @@
from lxml import html, etree
import re
from searx.engines.xpath import extract_text
+from searx.utils import eval_xpath
from searx.url_utils import quote, urljoin
from searx import logger
@@ -52,9 +53,9 @@ def response(resp):
dom = html.fromstring(resp.text)
try:
- number_of_results_string = re.sub('[^0-9]', '', dom.xpath(
- '//a[@class="active" and contains(@href,"/suchen/dudenonline")]/span/text()')[0]
- )
+ number_of_results_string =\
+ re.sub('[^0-9]', '',
+ eval_xpath(dom, '//a[@class="active" and contains(@href,"/suchen/dudenonline")]/span/text()')[0])
results.append({'number_of_results': int(number_of_results_string)})
@@ -62,12 +63,12 @@ def response(resp):
logger.debug("Couldn't read number of results.")
pass
- for result in dom.xpath('//section[not(contains(@class, "essay"))]'):
+ for result in eval_xpath(dom, '//section[not(contains(@class, "essay"))]'):
try:
- url = result.xpath('.//h2/a')[0].get('href')
+ url = eval_xpath(result, './/h2/a')[0].get('href')
url = urljoin(base_url, url)
- title = result.xpath('string(.//h2/a)').strip()
- content = extract_text(result.xpath('.//p'))
+ title = eval_xpath(result, 'string(.//h2/a)').strip()
+ content = extract_text(eval_xpath(result, './/p'))
# append result
results.append({'url': url,
'title': title,
diff --git a/searx/engines/framalibre.py b/searx/engines/framalibre.py
index 146cdaeec..f3441fa5f 100644
--- a/searx/engines/framalibre.py
+++ b/searx/engines/framalibre.py
@@ -10,7 +10,10 @@
@parse url, title, content, thumbnail, img_src
"""
-from cgi import escape
+try:
+ from cgi import escape
+except:
+ from html import escape
from lxml import html
from searx.engines.xpath import extract_text
from searx.url_utils import urljoin, urlencode
diff --git a/searx/engines/gigablast.py b/searx/engines/gigablast.py
index a6aa5d718..2bb29a9fe 100644
--- a/searx/engines/gigablast.py
+++ b/searx/engines/gigablast.py
@@ -14,7 +14,9 @@ import random
from json import loads
from time import time
from lxml.html import fromstring
+from searx.poolrequests import get
from searx.url_utils import urlencode
+from searx.utils import eval_xpath
# engine dependent config
categories = ['general']
@@ -30,13 +32,9 @@ search_string = 'search?{query}'\
'&c=main'\
'&s={offset}'\
'&format=json'\
- '&qh=0'\
- '&qlang={lang}'\
+ '&langcountry={lang}'\
'&ff={safesearch}'\
- '&rxiec={rxieu}'\
- '&ulse={ulse}'\
- '&rand={rxikd}' # current unix timestamp
-
+ '&rand={rxikd}'
# specific xpath variables
results_xpath = '//response//result'
url_xpath = './/url'
@@ -45,9 +43,26 @@ content_xpath = './/sum'
supported_languages_url = 'https://gigablast.com/search?&rxikd=1'
+extra_param = '' # gigablast requires a random extra parameter
+# which can be extracted from the source code of the search page
+
+
+def parse_extra_param(text):
+ global extra_param
+ param_lines = [x for x in text.splitlines() if x.startswith('var url=') or x.startswith('url=url+')]
+ extra_param = ''
+ for l in param_lines:
+ extra_param += l.split("'")[1]
+ extra_param = extra_param.split('&')[-1]
+
+
+def init(engine_settings=None):
+ parse_extra_param(get('http://gigablast.com/search?c=main&qlangcountry=en-us&q=south&s=10').text)
+
# do search-request
def request(query, params):
+ print("EXTRAPARAM:", extra_param)
offset = (params['pageno'] - 1) * number_of_results
if params['language'] == 'all':
@@ -66,13 +81,11 @@ def request(query, params):
search_path = search_string.format(query=urlencode({'q': query}),
offset=offset,
number_of_results=number_of_results,
- rxikd=int(time() * 1000),
- rxieu=random.randint(1000000000, 9999999999),
- ulse=random.randint(100000000, 999999999),
lang=language,
+ rxikd=int(time() * 1000),
safesearch=safesearch)
- params['url'] = base_url + search_path
+ params['url'] = base_url + search_path + '&' + extra_param
return params
@@ -82,7 +95,11 @@ def response(resp):
results = []
# parse results
- response_json = loads(resp.text)
+ try:
+ response_json = loads(resp.text)
+ except:
+ parse_extra_param(resp.text)
+ raise Exception('extra param expired, please reload')
for result in response_json['results']:
# append result
@@ -98,9 +115,9 @@ def response(resp):
def _fetch_supported_languages(resp):
supported_languages = []
dom = fromstring(resp.text)
- links = dom.xpath('//span[@id="menu2"]/a')
+ links = eval_xpath(dom, '//span[@id="menu2"]/a')
for link in links:
- href = link.xpath('./@href')[0].split('lang%3A')
+ href = eval_xpath(link, './@href')[0].split('lang%3A')
if len(href) == 2:
code = href[1].split('_')
if len(code) == 2:
diff --git a/searx/engines/google.py b/searx/engines/google.py
index 03f0523e7..eed3a044e 100644
--- a/searx/engines/google.py
+++ b/searx/engines/google.py
@@ -14,7 +14,7 @@ from lxml import html, etree
from searx.engines.xpath import extract_text, extract_url
from searx import logger
from searx.url_utils import urlencode, urlparse, parse_qsl
-from searx.utils import match_language
+from searx.utils import match_language, eval_xpath
logger = logger.getChild('google engine')
@@ -107,13 +107,12 @@ images_path = '/images'
supported_languages_url = 'https://www.google.com/preferences?#languages'
# specific xpath variables
-results_xpath = '//div[@class="g"]'
-url_xpath = './/h3/a/@href'
-title_xpath = './/h3'
-content_xpath = './/span[@class="st"]'
-content_misc_xpath = './/div[@class="f slp"]'
-suggestion_xpath = '//p[@class="_Bmc"]'
-spelling_suggestion_xpath = '//a[@class="spell"]'
+results_xpath = '//div[contains(@class, "ZINbbc")]'
+url_xpath = './/div[@class="kCrYT"][1]/a/@href'
+title_xpath = './/div[@class="kCrYT"][1]/a/div[1]'
+content_xpath = './/div[@class="kCrYT"][2]//div[contains(@class, "BNeawe")]//div[contains(@class, "BNeawe")]'
+suggestion_xpath = '//div[contains(@class, "ZINbbc")][last()]//div[@class="rVLSBd"]/a//div[contains(@class, "BNeawe")]'
+spelling_suggestion_xpath = '//div[@id="scc"]//a'
# map : detail location
map_address_xpath = './/div[@class="s"]//table//td[2]/span/text()'
@@ -156,7 +155,7 @@ def parse_url(url_string, google_hostname):
# returns extract_text on the first result selected by the xpath or None
def extract_text_from_dom(result, xpath):
- r = result.xpath(xpath)
+ r = eval_xpath(result, xpath)
if len(r) > 0:
return extract_text(r[0])
return None
@@ -199,9 +198,6 @@ def request(query, params):
params['headers']['Accept-Language'] = language + ',' + language + '-' + country
params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'
- # Force Internet Explorer 12 user agent to avoid loading the new UI that Searx can't parse
- params['headers']['User-Agent'] = "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)"
-
params['google_hostname'] = google_hostname
return params
@@ -226,21 +222,21 @@ def response(resp):
# convert the text to dom
dom = html.fromstring(resp.text)
- instant_answer = dom.xpath('//div[@id="_vBb"]//text()')
+ instant_answer = eval_xpath(dom, '//div[@id="_vBb"]//text()')
if instant_answer:
results.append({'answer': u' '.join(instant_answer)})
try:
- results_num = int(dom.xpath('//div[@id="resultStats"]//text()')[0]
+ results_num = int(eval_xpath(dom, '//div[@id="resultStats"]//text()')[0]
.split()[1].replace(',', ''))
results.append({'number_of_results': results_num})
except:
pass
# parse results
- for result in dom.xpath(results_xpath):
+ for result in eval_xpath(dom, results_xpath):
try:
- title = extract_text(result.xpath(title_xpath)[0])
- url = parse_url(extract_url(result.xpath(url_xpath), google_url), google_hostname)
+ title = extract_text(eval_xpath(result, title_xpath)[0])
+ url = parse_url(extract_url(eval_xpath(result, url_xpath), google_url), google_hostname)
parsed_url = urlparse(url, google_hostname)
# map result
@@ -249,7 +245,7 @@ def response(resp):
continue
# if parsed_url.path.startswith(maps_path) or parsed_url.netloc.startswith(map_hostname_start):
# print "yooooo"*30
- # x = result.xpath(map_near)
+ # x = eval_xpath(result, map_near)
# if len(x) > 0:
# # map : near the location
# results = results + parse_map_near(parsed_url, x, google_hostname)
@@ -273,9 +269,7 @@ def response(resp):
content = extract_text_from_dom(result, content_xpath)
if content is None:
continue
- content_misc = extract_text_from_dom(result, content_misc_xpath)
- if content_misc is not None:
- content = content_misc + "<br />" + content
+
# append result
results.append({'url': url,
'title': title,
@@ -286,11 +280,11 @@ def response(resp):
continue
# parse suggestion
- for suggestion in dom.xpath(suggestion_xpath):
+ for suggestion in eval_xpath(dom, suggestion_xpath):
# append suggestion
results.append({'suggestion': extract_text(suggestion)})
- for correction in dom.xpath(spelling_suggestion_xpath):
+ for correction in eval_xpath(dom, spelling_suggestion_xpath):
results.append({'correction': extract_text(correction)})
# return results
@@ -299,9 +293,9 @@ def response(resp):
def parse_images(result, google_hostname):
results = []
- for image in result.xpath(images_xpath):
- url = parse_url(extract_text(image.xpath(image_url_xpath)[0]), google_hostname)
- img_src = extract_text(image.xpath(image_img_src_xpath)[0])
+ for image in eval_xpath(result, images_xpath):
+ url = parse_url(extract_text(eval_xpath(image, image_url_xpath)[0]), google_hostname)
+ img_src = extract_text(eval_xpath(image, image_img_src_xpath)[0])
# append result
results.append({'url': url,
@@ -388,10 +382,10 @@ def attributes_to_html(attributes):
def _fetch_supported_languages(resp):
supported_languages = {}
dom = html.fromstring(resp.text)
- options = dom.xpath('//*[@id="langSec"]//input[@name="lr"]')
+ options = eval_xpath(dom, '//*[@id="langSec"]//input[@name="lr"]')
for option in options:
- code = option.xpath('./@value')[0].split('_')[-1]
- name = option.xpath('./@data-name')[0].title()
+ code = eval_xpath(option, './@value')[0].split('_')[-1]
+ name = eval_xpath(option, './@data-name')[0].title()
supported_languages[code] = {"name": name}
return supported_languages
diff --git a/searx/engines/google_images.py b/searx/engines/google_images.py
index d9a49e9cc..636913114 100644
--- a/searx/engines/google_images.py
+++ b/searx/engines/google_images.py
@@ -70,11 +70,21 @@ def response(resp):
try:
metadata = loads(result)
- img_format = "{0} {1}x{2}".format(metadata['ity'], str(metadata['ow']), str(metadata['oh']))
- source = "{0} ({1})".format(metadata['st'], metadata['isu'])
+
+ img_format = metadata.get('ity', '')
+ img_width = metadata.get('ow', '')
+ img_height = metadata.get('oh', '')
+ if img_width and img_height:
+ img_format += " {0}x{1}".format(img_width, img_height)
+
+ source = metadata.get('st', '')
+ source_url = metadata.get('isu', '')
+ if source_url:
+ source += " ({0})".format(source_url)
+
results.append({'url': metadata['ru'],
'title': metadata['pt'],
- 'content': metadata['s'],
+ 'content': metadata.get('s', ''),
'source': source,
'img_format': img_format,
'thumbnail_src': metadata['tu'],
diff --git a/searx/engines/openstreetmap.py b/searx/engines/openstreetmap.py
index 733ba6203..cec10a3c7 100644
--- a/searx/engines/openstreetmap.py
+++ b/searx/engines/openstreetmap.py
@@ -24,7 +24,7 @@ result_base_url = 'https://openstreetmap.org/{osm_type}/{osm_id}'
# do search-request
def request(query, params):
- params['url'] = base_url + search_string.format(query=query)
+ params['url'] = base_url + search_string.format(query=query.decode('utf-8'))
return params
diff --git a/searx/engines/qwant.py b/searx/engines/qwant.py
index de12955c6..54e9dafad 100644
--- a/searx/engines/qwant.py
+++ b/searx/engines/qwant.py
@@ -50,6 +50,7 @@ def request(query, params):
language = match_language(params['language'], supported_languages, language_aliases)
params['url'] += '&locale=' + language.replace('-', '_').lower()
+ params['headers']['User-Agent'] = 'Mozilla/5.0 (X11; Linux x86_64; rv:69.0) Gecko/20100101 Firefox/69.0'
return params
diff --git a/searx/engines/seedpeer.py b/searx/engines/seedpeer.py
new file mode 100644
index 000000000..f9b1f99c8
--- /dev/null
+++ b/searx/engines/seedpeer.py
@@ -0,0 +1,78 @@
+# Seedpeer (Videos, Music, Files)
+#
+# @website https://seedpeer.me
+# @provide-api no (nothing found)
+#
+# @using-api no
+# @results HTML (using search portal)
+# @stable yes (HTML can change)
+# @parse url, title, content, seed, leech, magnetlink
+
+from lxml import html
+from json import loads
+from operator import itemgetter
+from searx.url_utils import quote, urljoin
+from searx.engines.xpath import extract_text
+
+
+url = 'https://seedpeer.me/'
+search_url = url + 'search/{search_term}?page={page_no}'
+torrent_file_url = url + 'torrent/{torrent_hash}'
+
+# specific xpath variables
+script_xpath = '//script[@type="text/javascript"][not(@src)]'
+torrent_xpath = '(//table)[2]/tbody/tr'
+link_xpath = '(./td)[1]/a/@href'
+age_xpath = '(./td)[2]'
+size_xpath = '(./td)[3]'
+
+
+# do search-request
+def request(query, params):
+ params['url'] = search_url.format(search_term=quote(query),
+ page_no=params['pageno'])
+ return params
+
+
+# get response from search-request
+def response(resp):
+ results = []
+ dom = html.fromstring(resp.text)
+ result_rows = dom.xpath(torrent_xpath)
+
+ try:
+ script_element = dom.xpath(script_xpath)[0]
+ json_string = script_element.text[script_element.text.find('{'):]
+ torrents_json = loads(json_string)
+ except:
+ return []
+
+ # parse results
+ for torrent_row, torrent_json in zip(result_rows, torrents_json['data']['list']):
+ title = torrent_json['name']
+ seed = int(torrent_json['seeds'])
+ leech = int(torrent_json['peers'])
+ size = int(torrent_json['size'])
+ torrent_hash = torrent_json['hash']
+
+ torrentfile = torrent_file_url.format(torrent_hash=torrent_hash)
+ magnetlink = 'magnet:?xt=urn:btih:{}'.format(torrent_hash)
+
+ age = extract_text(torrent_row.xpath(age_xpath))
+ link = torrent_row.xpath(link_xpath)[0]
+
+ href = urljoin(url, link)
+
+ # append result
+ results.append({'url': href,
+ 'title': title,
+ 'content': age,
+ 'seed': seed,
+ 'leech': leech,
+ 'filesize': size,
+ 'torrentfile': torrentfile,
+ 'magnetlink': magnetlink,
+ 'template': 'torrent.html'})
+
+ # return results sorted by seeder
+ return sorted(results, key=itemgetter('seed'), reverse=True)
diff --git a/searx/engines/soundcloud.py b/searx/engines/soundcloud.py
index 3ba9a7f39..284689bf6 100644
--- a/searx/engines/soundcloud.py
+++ b/searx/engines/soundcloud.py
@@ -51,7 +51,9 @@ def get_client_id():
if response.ok:
tree = html.fromstring(response.content)
- script_tags = tree.xpath("//script[contains(@src, '/assets/app')]")
+ # script_tags has been moved from /assets/app/ to /assets/ path. I
+ # found client_id in https://a-v2.sndcdn.com/assets/49-a0c01933-3.js
+ script_tags = tree.xpath("//script[contains(@src, '/assets/')]")
app_js_urls = [script_tag.get('src') for script_tag in script_tags if script_tag is not None]
# extracts valid app_js urls from soundcloud.com content
@@ -66,7 +68,7 @@ def get_client_id():
return ""
-def init():
+def init(engine_settings=None):
global guest_client_id
# api-key
guest_client_id = get_client_id()
diff --git a/searx/engines/startpage.py b/searx/engines/startpage.py
index 6638f3d83..76567396f 100644
--- a/searx/engines/startpage.py
+++ b/searx/engines/startpage.py
@@ -15,6 +15,8 @@ from dateutil import parser
from datetime import datetime, timedelta
import re
from searx.engines.xpath import extract_text
+from searx.languages import language_codes
+from searx.utils import eval_xpath
# engine dependent config
categories = ['general']
@@ -22,7 +24,7 @@ categories = ['general']
# (probably the parameter qid), require
# storing of qid's between mulitble search-calls
-# paging = False
+paging = True
language_support = True
# search-url
@@ -32,23 +34,32 @@ search_url = base_url + 'do/search'
# specific xpath variables
# ads xpath //div[@id="results"]/div[@id="sponsored"]//div[@class="result"]
# not ads: div[@class="result"] are the direct childs of div[@id="results"]
-results_xpath = '//li[contains(@class, "search-result") and contains(@class, "search-item")]'
-link_xpath = './/h3/a'
-content_xpath = './p[@class="search-item__body"]'
+results_xpath = '//div[@class="w-gl__result"]'
+link_xpath = './/a[@class="w-gl__result-title"]'
+content_xpath = './/p[@class="w-gl__description"]'
# do search-request
def request(query, params):
- offset = (params['pageno'] - 1) * 10
params['url'] = search_url
params['method'] = 'POST'
- params['data'] = {'query': query,
- 'startat': offset}
+ params['data'] = {
+ 'query': query,
+ 'page': params['pageno'],
+ 'cat': 'web',
+ 'cmd': 'process_search',
+ 'engine0': 'v1all',
+ }
# set language if specified
if params['language'] != 'all':
- params['data']['with_language'] = ('lang_' + params['language'].split('-')[0])
+ language = 'english'
+ for lc, _, _, lang in language_codes:
+ if lc == params['language']:
+ language = lang
+ params['data']['language'] = language
+ params['data']['lui'] = language
return params
@@ -60,8 +71,8 @@ def response(resp):
dom = html.fromstring(resp.text)
# parse results
- for result in dom.xpath(results_xpath):
- links = result.xpath(link_xpath)
+ for result in eval_xpath(dom, results_xpath):
+ links = eval_xpath(result, link_xpath)
if not links:
continue
link = links[0]
@@ -77,8 +88,8 @@ def response(resp):
title = extract_text(link)
- if result.xpath(content_xpath):
- content = extract_text(result.xpath(content_xpath))
+ if eval_xpath(result, content_xpath):
+ content = extract_text(eval_xpath(result, content_xpath))
else:
content = ''
diff --git a/searx/engines/wikidata.py b/searx/engines/wikidata.py
index 5ea2b9958..e913b3915 100644
--- a/searx/engines/wikidata.py
+++ b/searx/engines/wikidata.py
@@ -16,7 +16,7 @@ from searx.poolrequests import get
from searx.engines.xpath import extract_text
from searx.engines.wikipedia import _fetch_supported_languages, supported_languages_url
from searx.url_utils import urlencode
-from searx.utils import match_language
+from searx.utils import match_language, eval_xpath
from json import loads
from lxml.html import fromstring
@@ -57,22 +57,6 @@ language_fallback_xpath = '//sup[contains(@class,"wb-language-fallback-indicator
calendar_name_xpath = './/sup[contains(@class,"wb-calendar-name")]'
media_xpath = value_xpath + '//div[contains(@class,"commons-media-caption")]//a'
-# xpath_cache
-xpath_cache = {}
-
-
-def get_xpath(xpath_str):
- result = xpath_cache.get(xpath_str, None)
- if not result:
- result = etree.XPath(xpath_str)
- xpath_cache[xpath_str] = result
- return result
-
-
-def eval_xpath(element, xpath_str):
- xpath = get_xpath(xpath_str)
- return xpath(element)
-
def get_id_cache(result):
id_cache = {}
diff --git a/searx/engines/wikipedia.py b/searx/engines/wikipedia.py
index 4dae735d1..a216ba886 100644
--- a/searx/engines/wikipedia.py
+++ b/searx/engines/wikipedia.py
@@ -21,7 +21,8 @@ search_url = base_url + u'w/api.php?'\
'action=query'\
'&format=json'\
'&{query}'\
- '&prop=extracts|pageimages'\
+ '&prop=extracts|pageimages|pageprops'\
+ '&ppprop=disambiguation'\
'&exintro'\
'&explaintext'\
'&pithumbsize=300'\
@@ -79,12 +80,15 @@ def response(resp):
# wikipedia article's unique id
# first valid id is assumed to be the requested article
+ if 'pages' not in search_result['query']:
+ return results
+
for article_id in search_result['query']['pages']:
page = search_result['query']['pages'][article_id]
if int(article_id) > 0:
break
- if int(article_id) < 0:
+ if int(article_id) < 0 or 'disambiguation' in page.get('pageprops', {}):
return []
title = page.get('title')
@@ -96,6 +100,7 @@ def response(resp):
extract = page.get('extract')
summary = extract_first_paragraph(extract, title, image)
+ summary = summary.replace('() ', '')
# link to wikipedia article
wikipedia_link = base_url.format(language=url_lang(resp.search_params['language'])) \
diff --git a/searx/engines/wolframalpha_noapi.py b/searx/engines/wolframalpha_noapi.py
index 2cbbc5adc..387c9fa17 100644
--- a/searx/engines/wolframalpha_noapi.py
+++ b/searx/engines/wolframalpha_noapi.py
@@ -55,7 +55,7 @@ def obtain_token():
return token
-def init():
+def init(engine_settings=None):
obtain_token()
diff --git a/searx/engines/www1x.py b/searx/engines/www1x.py
index 508803240..f1154b16d 100644
--- a/searx/engines/www1x.py
+++ b/searx/engines/www1x.py
@@ -11,8 +11,8 @@
"""
from lxml import html
-import re
from searx.url_utils import urlencode, urljoin
+from searx.engines.xpath import extract_text
# engine dependent config
categories = ['images']
@@ -34,41 +34,18 @@ def request(query, params):
def response(resp):
results = []
- # get links from result-text
- regex = re.compile('(</a>|<a)')
- results_parts = re.split(regex, resp.text)
-
- cur_element = ''
-
- # iterate over link parts
- for result_part in results_parts:
+ dom = html.fromstring(resp.text)
+ for res in dom.xpath('//div[@class="List-item MainListing"]'):
# processed start and end of link
- if result_part == '<a':
- cur_element = result_part
- continue
- elif result_part != '</a>':
- cur_element += result_part
- continue
-
- cur_element += result_part
-
- # fix xml-error
- cur_element = cur_element.replace('"></a>', '"/></a>')
-
- dom = html.fromstring(cur_element)
- link = dom.xpath('//a')[0]
+ link = res.xpath('//a')[0]
url = urljoin(base_url, link.attrib.get('href'))
- title = link.attrib.get('title', '')
+ title = extract_text(link)
- thumbnail_src = urljoin(base_url, link.xpath('.//img')[0].attrib['src'])
+ thumbnail_src = urljoin(base_url, res.xpath('.//img')[0].attrib['src'])
# TODO: get image with higher resolution
img_src = thumbnail_src
- # check if url is showing to a photo
- if '/photo/' not in url:
- continue
-
# append result
results.append({'url': url,
'title': title,
diff --git a/searx/engines/xpath.py b/searx/engines/xpath.py
index 61494ce4e..b75896cc7 100644
--- a/searx/engines/xpath.py
+++ b/searx/engines/xpath.py
@@ -1,6 +1,6 @@
from lxml import html
from lxml.etree import _ElementStringResult, _ElementUnicodeResult
-from searx.utils import html_to_text
+from searx.utils import html_to_text, eval_xpath
from searx.url_utils import unquote, urlencode, urljoin, urlparse
search_url = None
@@ -104,15 +104,15 @@ def response(resp):
results = []
dom = html.fromstring(resp.text)
if results_xpath:
- for result in dom.xpath(results_xpath):
- url = extract_url(result.xpath(url_xpath), search_url)
- title = extract_text(result.xpath(title_xpath))
- content = extract_text(result.xpath(content_xpath))
+ for result in eval_xpath(dom, results_xpath):
+ url = extract_url(eval_xpath(result, url_xpath), search_url)
+ title = extract_text(eval_xpath(result, title_xpath))
+ content = extract_text(eval_xpath(result, content_xpath))
tmp_result = {'url': url, 'title': title, 'content': content}
# add thumbnail if available
if thumbnail_xpath:
- thumbnail_xpath_result = result.xpath(thumbnail_xpath)
+ thumbnail_xpath_result = eval_xpath(result, thumbnail_xpath)
if len(thumbnail_xpath_result) > 0:
tmp_result['img_src'] = extract_url(thumbnail_xpath_result, search_url)
@@ -120,14 +120,14 @@ def response(resp):
else:
for url, title, content in zip(
(extract_url(x, search_url) for
- x in dom.xpath(url_xpath)),
- map(extract_text, dom.xpath(title_xpath)),
- map(extract_text, dom.xpath(content_xpath))
+ x in eval_xpath(dom, url_xpath)),
+ map(extract_text, eval_xpath(dom, title_xpath)),
+ map(extract_text, eval_xpath(dom, content_xpath))
):
results.append({'url': url, 'title': title, 'content': content})
if not suggestion_xpath:
return results
- for suggestion in dom.xpath(suggestion_xpath):
+ for suggestion in eval_xpath(dom, suggestion_xpath):
results.append({'suggestion': extract_text(suggestion)})
return results
diff --git a/searx/engines/yahoo.py b/searx/engines/yahoo.py
index 73b78bcf7..36c1a11f8 100644
--- a/searx/engines/yahoo.py
+++ b/searx/engines/yahoo.py
@@ -14,7 +14,7 @@
from lxml import html
from searx.engines.xpath import extract_text, extract_url
from searx.url_utils import unquote, urlencode
-from searx.utils import match_language
+from searx.utils import match_language, eval_xpath
# engine dependent config
categories = ['general']
@@ -109,21 +109,21 @@ def response(resp):
dom = html.fromstring(resp.text)
try:
- results_num = int(dom.xpath('//div[@class="compPagination"]/span[last()]/text()')[0]
+ results_num = int(eval_xpath(dom, '//div[@class="compPagination"]/span[last()]/text()')[0]
.split()[0].replace(',', ''))
results.append({'number_of_results': results_num})
except:
pass
# parse results
- for result in dom.xpath(results_xpath):
+ for result in eval_xpath(dom, results_xpath):
try:
- url = parse_url(extract_url(result.xpath(url_xpath), search_url))
- title = extract_text(result.xpath(title_xpath)[0])
+ url = parse_url(extract_url(eval_xpath(result, url_xpath), search_url))
+ title = extract_text(eval_xpath(result, title_xpath)[0])
except:
continue
- content = extract_text(result.xpath(content_xpath)[0])
+ content = extract_text(eval_xpath(result, content_xpath)[0])
# append result
results.append({'url': url,
@@ -131,7 +131,7 @@ def response(resp):
'content': content})
# if no suggestion found, return results
- suggestions = dom.xpath(suggestion_xpath)
+ suggestions = eval_xpath(dom, suggestion_xpath)
if not suggestions:
return results
@@ -148,9 +148,9 @@ def response(resp):
def _fetch_supported_languages(resp):
supported_languages = []
dom = html.fromstring(resp.text)
- options = dom.xpath('//div[@id="yschlang"]/span/label/input')
+ options = eval_xpath(dom, '//div[@id="yschlang"]/span/label/input')
for option in options:
- code_parts = option.xpath('./@value')[0][5:].split('_')
+ code_parts = eval_xpath(option, './@value')[0][5:].split('_')
if len(code_parts) == 2:
code = code_parts[0] + '-' + code_parts[1].upper()
else:
diff --git a/searx/exceptions.py b/searx/exceptions.py
index c605ddcab..0175acfa3 100644
--- a/searx/exceptions.py
+++ b/searx/exceptions.py
@@ -28,5 +28,6 @@ class SearxParameterException(SearxException):
else:
message = 'Invalid value "' + value + '" for parameter ' + name
super(SearxParameterException, self).__init__(message)
+ self.message = message
self.parameter_name = name
self.parameter_value = value
diff --git a/searx/plugins/https_rewrite.py b/searx/plugins/https_rewrite.py
index 3d986770e..82556017e 100644
--- a/searx/plugins/https_rewrite.py
+++ b/searx/plugins/https_rewrite.py
@@ -225,6 +225,9 @@ def https_url_rewrite(result):
def on_result(request, search, result):
+ if 'parsed_url' not in result:
+ return True
+
if result['parsed_url'].scheme == 'http':
https_url_rewrite(result)
return True
diff --git a/searx/plugins/oa_doi_rewrite.py b/searx/plugins/oa_doi_rewrite.py
index d4942498f..be80beb26 100644
--- a/searx/plugins/oa_doi_rewrite.py
+++ b/searx/plugins/oa_doi_rewrite.py
@@ -35,6 +35,9 @@ def get_doi_resolver(args, preference_doi_resolver):
def on_result(request, search, result):
+ if 'parsed_url' not in result:
+ return True
+
doi = extract_doi(result['parsed_url'])
if doi and len(doi) < 50:
for suffix in ('/', '.pdf', '/full', '/meta', '/abstract'):
diff --git a/searx/plugins/tracker_url_remover.py b/searx/plugins/tracker_url_remover.py
index 630c8a638..33dd621e1 100644
--- a/searx/plugins/tracker_url_remover.py
+++ b/searx/plugins/tracker_url_remover.py
@@ -17,10 +17,10 @@ along with searx. If not, see < http://www.gnu.org/licenses/ >.
from flask_babel import gettext
import re
-from searx.url_utils import urlunparse
+from searx.url_utils import urlunparse, parse_qsl, urlencode
-regexes = {re.compile(r'utm_[^&]+&?'),
- re.compile(r'(wkey|wemail)[^&]+&?'),
+regexes = {re.compile(r'utm_[^&]+'),
+ re.compile(r'(wkey|wemail)[^&]*'),
re.compile(r'&$')}
name = gettext('Tracker URL remover')
@@ -30,16 +30,23 @@ preference_section = 'privacy'
def on_result(request, search, result):
+ if 'parsed_url' not in result:
+ return True
+
query = result['parsed_url'].query
if query == "":
return True
-
- for reg in regexes:
- query = reg.sub('', query)
-
- if query != result['parsed_url'].query:
- result['parsed_url'] = result['parsed_url']._replace(query=query)
- result['url'] = urlunparse(result['parsed_url'])
+ parsed_query = parse_qsl(query)
+
+ changes = 0
+ for i, (param_name, _) in enumerate(list(parsed_query)):
+ for reg in regexes:
+ if reg.match(param_name):
+ parsed_query.pop(i - changes)
+ changes += 1
+ result['parsed_url'] = result['parsed_url']._replace(query=urlencode(parsed_query))
+ result['url'] = urlunparse(result['parsed_url'])
+ break
return True
diff --git a/searx/query.py b/searx/query.py
index 382aed871..c4002bd31 100644
--- a/searx/query.py
+++ b/searx/query.py
@@ -184,7 +184,7 @@ class SearchQuery(object):
self.lang = lang
self.safesearch = safesearch
self.pageno = pageno
- self.time_range = time_range
+ self.time_range = None if time_range in ('', 'None', None) else time_range
self.timeout_limit = timeout_limit
def __str__(self):
diff --git a/searx/results.py b/searx/results.py
index be74a836b..3b1e4bd62 100644
--- a/searx/results.py
+++ b/searx/results.py
@@ -67,8 +67,9 @@ def merge_two_infoboxes(infobox1, infobox2):
for url2 in infobox2.get('urls', []):
unique_url = True
- for url1 in infobox1.get('urls', []):
- if compare_urls(urlparse(url1.get('url', '')), urlparse(url2.get('url', ''))):
+ parsed_url2 = urlparse(url2.get('url', ''))
+ for url1 in urls1:
+ if compare_urls(urlparse(url1.get('url', '')), parsed_url2):
unique_url = False
break
if unique_url:
@@ -188,8 +189,9 @@ class ResultContainer(object):
add_infobox = True
infobox_id = infobox.get('id', None)
if infobox_id is not None:
+ parsed_url_infobox_id = urlparse(infobox_id)
for existingIndex in self.infoboxes:
- if compare_urls(urlparse(existingIndex.get('id', '')), urlparse(infobox_id)):
+ if compare_urls(urlparse(existingIndex.get('id', '')), parsed_url_infobox_id):
merge_two_infoboxes(existingIndex, infobox)
add_infobox = False
@@ -197,6 +199,13 @@ class ResultContainer(object):
self.infoboxes.append(infobox)
def _merge_result(self, result, position):
+ if 'url' in result:
+ self.__merge_url_result(result, position)
+ return
+
+ self.__merge_result_no_url(result, position)
+
+ def __merge_url_result(self, result, position):
result['parsed_url'] = urlparse(result['url'])
# if the result has no scheme, use http as default
@@ -210,51 +219,60 @@ class ResultContainer(object):
if result.get('content'):
result['content'] = WHITESPACE_REGEX.sub(' ', result['content'])
- # check for duplicates
- duplicated = False
+ duplicated = self.__find_duplicated_http_result(result)
+ if duplicated:
+ self.__merge_duplicated_http_result(duplicated, result, position)
+ return
+
+ # if there is no duplicate found, append result
+ result['positions'] = [position]
+ with RLock():
+ self._merged_results.append(result)
+
+ def __find_duplicated_http_result(self, result):
result_template = result.get('template')
for merged_result in self._merged_results:
+ if 'parsed_url' not in merged_result:
+ continue
if compare_urls(result['parsed_url'], merged_result['parsed_url'])\
and result_template == merged_result.get('template'):
if result_template != 'images.html':
# not an image, same template, same url : it's a duplicate
- duplicated = merged_result
- break
+ return merged_result
else:
# it's an image
# it's a duplicate if the parsed_url, template and img_src are differents
if result.get('img_src', '') == merged_result.get('img_src', ''):
- duplicated = merged_result
- break
+ return merged_result
+ return None
- # merge duplicates together
- if duplicated:
- # using content with more text
- if result_content_len(result.get('content', '')) >\
- result_content_len(duplicated.get('content', '')):
- duplicated['content'] = result['content']
+ def __merge_duplicated_http_result(self, duplicated, result, position):
+ # using content with more text
+ if result_content_len(result.get('content', '')) >\
+ result_content_len(duplicated.get('content', '')):
+ duplicated['content'] = result['content']
- # merge all result's parameters not found in duplicate
- for key in result.keys():
- if not duplicated.get(key):
- duplicated[key] = result.get(key)
+ # merge all result's parameters not found in duplicate
+ for key in result.keys():
+ if not duplicated.get(key):
+ duplicated[key] = result.get(key)
- # add the new position
- duplicated['positions'].append(position)
+ # add the new position
+ duplicated['positions'].append(position)
- # add engine to list of result-engines
- duplicated['engines'].add(result['engine'])
+ # add engine to list of result-engines
+ duplicated['engines'].add(result['engine'])
- # using https if possible
- if duplicated['parsed_url'].scheme != 'https' and result['parsed_url'].scheme == 'https':
- duplicated['url'] = result['parsed_url'].geturl()
- duplicated['parsed_url'] = result['parsed_url']
+ # using https if possible
+ if duplicated['parsed_url'].scheme != 'https' and result['parsed_url'].scheme == 'https':
+ duplicated['url'] = result['parsed_url'].geturl()
+ duplicated['parsed_url'] = result['parsed_url']
- # if there is no duplicate found, append result
- else:
- result['positions'] = [position]
- with RLock():
- self._merged_results.append(result)
+ def __merge_result_no_url(self, result, position):
+ result['engines'] = set([result['engine']])
+ result['positions'] = [position]
+ with RLock():
+ self._merged_results.append(result)
def order_results(self):
for result in self._merged_results:
diff --git a/searx/search.py b/searx/search.py
index 9c7142c74..5c268cc5d 100644
--- a/searx/search.py
+++ b/searx/search.py
@@ -77,7 +77,7 @@ def send_http_request(engine, request_params):
return req(request_params['url'], **request_args)
-def search_one_request(engine, query, request_params):
+def search_one_http_request(engine, query, request_params):
# update request parameters dependent on
# search-engine (contained in engines folder)
engine.request(query, request_params)
@@ -97,7 +97,53 @@ def search_one_request(engine, query, request_params):
return engine.response(response)
+def search_one_offline_request(engine, query, request_params):
+ return engine.search(query, request_params)
+
+
def search_one_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
+ if engines[engine_name].offline:
+ return search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit) # noqa
+ return search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit)
+
+
+def search_one_offline_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
+ engine = engines[engine_name]
+
+ try:
+ search_results = search_one_offline_request(engine, query, request_params)
+
+ if search_results:
+ result_container.extend(engine_name, search_results)
+
+ engine_time = time() - start_time
+ result_container.add_timing(engine_name, engine_time, engine_time)
+ with threading.RLock():
+ engine.stats['engine_time'] += engine_time
+ engine.stats['engine_time_count'] += 1
+
+ except ValueError as e:
+ record_offline_engine_stats_on_error(engine, result_container, start_time)
+ logger.exception('engine {0} : invalid input : {1}'.format(engine_name, e))
+ except Exception as e:
+ record_offline_engine_stats_on_error(engine, result_container, start_time)
+
+ result_container.add_unresponsive_engine((
+ engine_name,
+ u'{0}: {1}'.format(gettext('unexpected crash'), e),
+ ))
+ logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
+
+
+def record_offline_engine_stats_on_error(engine, result_container, start_time):
+ engine_time = time() - start_time
+ result_container.add_timing(engine.name, engine_time, engine_time)
+
+ with threading.RLock():
+ engine.stats['errors'] += 1
+
+
+def search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
# set timeout for all HTTP requests
requests_lib.set_timeout_for_thread(timeout_limit, start_time=start_time)
# reset the HTTP total time
@@ -111,7 +157,7 @@ def search_one_request_safe(engine_name, query, request_params, result_container
try:
# send requests and parse the results
- search_results = search_one_request(engine, query, request_params)
+ search_results = search_one_http_request(engine, query, request_params)
# check if the engine accepted the request
if search_results is not None:
@@ -427,19 +473,21 @@ class Search(object):
continue
# set default request parameters
- request_params = default_request_params()
- request_params['headers']['User-Agent'] = user_agent
- request_params['category'] = selected_engine['category']
- request_params['pageno'] = search_query.pageno
+ request_params = {}
+ if not engine.offline:
+ request_params = default_request_params()
+ request_params['headers']['User-Agent'] = user_agent
- if hasattr(engine, 'language') and engine.language:
- request_params['language'] = engine.language
- else:
- request_params['language'] = search_query.lang
+ if hasattr(engine, 'language') and engine.language:
+ request_params['language'] = engine.language
+ else:
+ request_params['language'] = search_query.lang
- # 0 = None, 1 = Moderate, 2 = Strict
- request_params['safesearch'] = search_query.safesearch
- request_params['time_range'] = search_query.time_range
+ request_params['safesearch'] = search_query.safesearch
+ request_params['time_range'] = search_query.time_range
+
+ request_params['category'] = selected_engine['category']
+ request_params['pageno'] = search_query.pageno
# append request to list
requests.append((selected_engine['name'], search_query.query, request_params))
diff --git a/searx/settings.yml b/searx/settings.yml
index cf2b13e08..e41b84c13 100644
--- a/searx/settings.yml
+++ b/searx/settings.yml
@@ -161,11 +161,12 @@ engines:
weight : 2
disabled : True
- - name : digbt
- engine : digbt
- shortcut : dbt
- timeout : 6.0
- disabled : True
+# cloudflare protected
+# - name : digbt
+# engine : digbt
+# shortcut : dbt
+# timeout : 6.0
+# disabled : True
- name : digg
engine : digg
@@ -407,7 +408,7 @@ engines:
- name : library genesis
engine : xpath
- search_url : http://libgen.io/search.php?req={query}
+ search_url : https://libgen.is/search.php?req={query}
url_xpath : //a[contains(@href,"bookfi.net")]/@href
title_xpath : //a[contains(@href,"book/")]/text()[1]
content_xpath : //td/a[1][contains(@href,"=author")]/text()
@@ -463,7 +464,7 @@ engines:
- name : openairedatasets
engine : json_engine
paging : True
- search_url : http://api.openaire.eu/search/datasets?format=json&page={pageno}&size=10&title={query}
+ search_url : https://api.openaire.eu/search/datasets?format=json&page={pageno}&size=10&title={query}
results_query : response/results/result
url_query : metadata/oaf:entity/oaf:result/children/instance/webresource/url/$
title_query : metadata/oaf:entity/oaf:result/title/$
@@ -475,7 +476,7 @@ engines:
- name : openairepublications
engine : json_engine
paging : True
- search_url : http://api.openaire.eu/search/publications?format=json&page={pageno}&size=10&title={query}
+ search_url : https://api.openaire.eu/search/publications?format=json&page={pageno}&size=10&title={query}
results_query : response/results/result
url_query : metadata/oaf:entity/oaf:result/children/instance/webresource/url/$
title_query : metadata/oaf:entity/oaf:result/title/$
@@ -703,9 +704,9 @@ engines:
shortcut: vo
categories: social media
search_url : https://searchvoat.co/?t={query}
- url_xpath : //div[@class="entry"]/p/a[contains(@class, "title")]/@href
- title_xpath : //div[@class="entry"]/p/a[contains(@class, "title")]
- content_xpath : //div[@class="entry"]/p/span[@class="domain"]/a/text()
+ url_xpath : //div[@class="entry"]//p[@class="title"]/a/@href
+ title_xpath : //div[@class="entry"]//p[@class="title"]/a/text()
+ content_xpath : //div[@class="entry"]//span[@class="domain"]/a/text()
timeout : 10.0
disabled : True
@@ -743,10 +744,15 @@ engines:
title_xpath : ./h2
content_xpath : ./p[@class="s"]
suggestion_xpath : /html/body//div[@class="top-info"]/p[@class="top-info spell"]/a
- first_page_num : 1
+ first_page_num : 0
page_size : 10
disabled : True
+ - name : seedpeer
+ shortcut : speu
+ engine : seedpeer
+ categories: files, music, videos
+
# - name : yacy
# engine : yacy
# shortcut : ya
@@ -806,7 +812,7 @@ locales:
doi_resolvers :
oadoi.org : 'https://oadoi.org/'
doi.org : 'https://doi.org/'
- doai.io : 'http://doai.io/'
- sci-hub.tw : 'http://sci-hub.tw/'
+ doai.io : 'https://doai.io/'
+ sci-hub.tw : 'https://sci-hub.tw/'
default_doi_resolver : 'oadoi.org'
diff --git a/searx/settings_robot.yml b/searx/settings_robot.yml
index 635809041..25f229e56 100644
--- a/searx/settings_robot.yml
+++ b/searx/settings_robot.yml
@@ -43,7 +43,7 @@ locales:
doi_resolvers :
oadoi.org : 'https://oadoi.org/'
doi.org : 'https://doi.org/'
- doai.io : 'http://doai.io/'
- sci-hub.tw : 'http://sci-hub.tw/'
+ doai.io : 'https://doai.io/'
+ sci-hub.tw : 'https://sci-hub.tw/'
default_doi_resolver : 'oadoi.org'
diff --git a/searx/static/plugins/js/vim_hotkeys.js b/searx/static/plugins/js/vim_hotkeys.js
index 13bd070e0..b0f265cb5 100644
--- a/searx/static/plugins/js/vim_hotkeys.js
+++ b/searx/static/plugins/js/vim_hotkeys.js
@@ -125,6 +125,14 @@ $(document).ready(function() {
}
});
+ function nextResult(current, direction) {
+ var next = current[direction]();
+ while (!next.is('.result') && next.length !== 0) {
+ next = next[direction]();
+ }
+ return next
+ }
+
function highlightResult(which) {
return function() {
var current = $('.result[data-vim-selected]');
@@ -157,13 +165,13 @@ $(document).ready(function() {
}
break;
case 'down':
- next = current.next('.result');
+ next = nextResult(current, 'next');
if (next.length === 0) {
next = $('.result:first');
}
break;
case 'up':
- next = current.prev('.result');
+ next = nextResult(current, 'prev');
if (next.length === 0) {
next = $('.result:last');
}
diff --git a/searx/static/themes/courgette/css/style.css b/searx/static/themes/courgette/css/style.css
index ad5d233ff..508c4b605 100644
--- a/searx/static/themes/courgette/css/style.css
+++ b/searx/static/themes/courgette/css/style.css
@@ -1 +1 @@
-a,h2{color:#666}.center,html{position:relative}#categories_container>div,.top_margin a{display:inline-block}#categories,.center{text-align:center}#categories .hidden,.cache_link,.highlight .c,.highlight .cm,.highlight .ge,.highlight .sd{font-style:italic}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=search]{-webkit-appearance:textfield}h2{text-transform:uppercase}body{font-family:sans-serif;line-height:1.5;margin:0;background:#EEE}html{min-height:100%}.title h1{font-size:7em;color:#3498DB;margin:-20px auto 0;line-height:100px;padding-bottom:20px}.center{max-width:70em;background:rgba(255,255,255,.6);padding:2em;margin:7% auto 0}.center.search{position:static;width:auto;background:0 0;margin:auto;padding-top:1.8em}@media screen and (min-width:1001px){.center:after{content:"";z-index:-1;background:url(../img/bg-body-index.jpg) no-repeat;background-size:cover;width:100%;height:100%;top:0;left:0;position:fixed}.center.search:after{content:none}}.autocompleter-choices{position:absolute;margin:0;padding:0;background:#FFF}.autocompleter-choices li{padding:.5em 1em}.autocompleter-choices li:hover{background:#3498DB;color:#FFF;cursor:pointer}.top_margin{position:absolute;bottom:-3.5em;width:100%;left:0}.top_margin a{margin-right:1em;color:#FFF;text-decoration:none}.top_margin a:focus,.top_margin a:hover{text-decoration:underline}@media screen and (max-width:1000px){.center{background:0 0}.top_margin a{color:#333}}.checkbox_container{margin-top:1.5em}.checkbox_container label{padding:.5em 1em;color:#333;cursor:pointer;font-size:.9em}.checkbox_container input[type=checkbox]:checked+label,.checkbox_container label:hover{background:#3498DB;color:#FFF}.checkbox_container input[type=checkbox]{position:absolute;top:-9999px}#categories .hidden{display:none;position:absolute;bottom:1em;left:0;text-align:center;width:100%;font-size:.9em;color:#333}#categories:hover .hidden,.right a{display:block}@media screen and (max-width:900px){#categories_container{letter-spacing:-5px}#categories_container>div{letter-spacing:normal;margin-top:1em}.checkbox_container{margin:0}.checkbox_container label{display:block;background:#CCC;padding:1em;border:1px solid #FFF}.top_margin{position:static}#categories .hidden{position:static;display:block}}@media screen and (max-width:900px) and (min-width:501px){#categories_container>div{width:31%;margin-left:2.333%}#categories_container>div:nth-child(3n+1){margin-left:0}}@media screen and (max-width:500px){#categories_container>div{width:48%;margin-left:2%;font-size:.9em}#categories_container>div:nth-child(2n+1){margin-left:0}.title h1{background:url(../img/searx-mobile.png) no-repeat;width:200px;height:39px}}#search_wrapper{position:relative}.q{padding:.5em 3em .5em 1em;width:100%;font-size:1.5em;border:0;color:#666}.cache_link,.result p{font-size:.9em}#search_submit{position:absolute;top:0;right:0;border:0;background:url(../img/search-icon.png) center center/65% auto no-repeat #3498DB;text-indent:-9999px;width:5em;height:100%;cursor:pointer}#sidebar,.right{position:fixed;width:15em;right:0;text-align:right}#search_submit:focus,#search_submit:hover{background-color:#0665A2}#sidebar{background:#3498DB;top:0;height:100%;padding:1.5em}.right{bottom:1.5em;z-index:1;padding:0 1.5em}.right a{color:#FFF;text-decoration:none}#sidebar form,#suggestions form,.row fieldset{display:inline-block}.right a:focus,.right a:hover{text-decoration:underline}#preferences{background:url(../img/preference-icon.png) right center/12% auto no-repeat;padding-right:1.8em}#search_url input{border:0;padding:.5em}#sidebar>div{margin-bottom:1em;color:#FFF}#sidebar input[type=submit]{background:#CCC;border:0;padding:.5em 1em;cursor:pointer;margin-top:.5em}#sidebar input[type=submit]:focus,#sidebar input[type=submit]:hover{color:#FFF;background-color:#0665A2}#results{padding:0 17em 0 2em}.result .content{margin:0;color:#666}.result .url{margin-top:0;color:#FF6530}.result .favicon{float:left;position:relative;top:.5em;margin-right:.5em}.definition_result{background:#CCC;padding:1em}.definition_result .result_title,.definition_result p{margin:0}.result_title{margin-bottom:0;font-weight:400}.result_title a{color:#3498DB;text-decoration:none}#answers,#suggestions span{color:#666}.result_title a:focus,.result_title a:hover{text-decoration:underline}.cache_link{color:#666}.search.center{padding-right:17em}#answers{border:2px solid #3498DB;padding:20px;text-align:center;max-width:70em;margin:0 auto 20px}#suggestions{margin-bottom:1em}#suggestions form{vertical-align:top;margin-bottom:.5em}#suggestions input[type=submit]{color:#333;padding:.5em 1em;border:0;background:#CCC;cursor:pointer}#suggestions input[type=submit]:focus,#suggestions input[type=submit]:hover{background:#3498DB;color:#FFF}#pagination{margin:1.5em 0 2em}#pagination form+form{float:right;margin-top:-2em}input[type=submit]{display:inline-block;background:#3498DB;color:#FFF;border:0;padding:.6em 1em;cursor:pointer}input[type=submit]:focus,input[type=submit]:hover{background:#0665A2}.row{max-width:60em;margin:auto}.row a{color:#3498DB}.row form{letter-spacing:-5px}.row form>*{letter-spacing:normal}.row p{margin:0}.row fieldset{width:48%;vertical-align:top}.row fieldset:last-of-type{display:block;width:auto;background:0 0;padding:0}fieldset,table tr:nth-child(odd){background:#CCC}.row fieldset:nth-child(odd){margin-right:2%}.row fieldset:nth-child(2){min-height:10.5em}@media screen and (max-width:900px){.row{margin:0 1em}.row fieldset{width:49%}.row fieldset,.row fieldset:nth-child(odd){margin-right:0}.row fieldset:first-child{width:100%;margin-right:0}.row fieldset:nth-child(even){margin-right:2%}}@media screen and (max-width:800px){.row fieldset,select{width:100%}table{font-size:.8em}#sidebar,.right{display:none}#results{padding:0 2em}.search.center{padding-right:2em}}@media screen and (max-width:400px){.row #categories_container>div{width:100%;margin-left:0}}fieldset{border:0;margin:1em 0;padding:1.5em}table{width:100%;text-align:left;border:1px solid #CCC;border-collapse:collapse}table th{background:#999;color:#FFF}table td,table th{padding:.5em 1em;border:1px solid #FFF}.engine_checkbox label{padding:.5em;background:#3498DB;color:#FFF;cursor:pointer}.engine_checkbox .deny{background:#3498DB}.engine_checkbox .allow{display:none;background:#666}.engine_checkbox input{display:none}.engine_checkbox input:checked+.allow{display:inline}.engine_checkbox input:checked+.allow+.deny{display:none}.row input[type=submit]{font-size:1em;margin:1em 0 2em}.row .right{position:static;display:inline-block}.row .right a{color:#333;width:auto;text-align:left;padding:0}.small_font{font-size:.8em}table th{padding:1em}legend{background:#EEE;padding:0 1em;position:relative}select{border:1px solid #DDD;padding:.5em .8em;font-size:1em}.highlight .hll{background-color:#ffc}.highlight{font-weight:700;background:#f8f8f8}.highlight .c{color:#408080}.highlight .err{border:1px solid red}.highlight .k{color:green;font-weight:700}.highlight .o{color:#666}.highlight .cm{color:#408080}.highlight .cp{color:#BC7A00}.highlight .c1,.highlight .cs{color:#408080;font-style:italic}.highlight .gd{color:#A00000}.highlight .gr{color:red}.highlight .gh{color:navy;font-weight:700}.highlight .gi{color:#00A000}.highlight .go{color:#888}.highlight .gp{color:navy;font-weight:700}.highlight .gs{font-weight:700}.highlight .gu{color:purple;font-weight:700}.highlight .gt{color:#04D}.highlight .kc,.highlight .kd,.highlight .kn{color:green;font-weight:700}.highlight .kp{color:green}.highlight .kr{color:green;font-weight:700}.highlight .kt{color:#B00040}.highlight .m{color:#666}.highlight .s{color:#BA2121}.highlight .na{color:#7D9029}.highlight .nb{color:green}.highlight .nc{color:#00F;font-weight:700}.highlight .no{color:#800}.highlight .nd{color:#A2F}.highlight .ni{color:#999;font-weight:700}.highlight .ne{color:#D2413A;font-weight:700}.highlight .nf{color:#00F}.highlight .nl{color:#A0A000}.highlight .nn{color:#00F;font-weight:700}.highlight .nt{color:green;font-weight:700}.highlight .nv{color:#19177C}.highlight .ow{color:#A2F;font-weight:700}.highlight .w{color:#bbb}.highlight .mf,.highlight .mh,.highlight .mi,.highlight .mo{color:#666}.highlight .s2,.highlight .sb,.highlight .sc{color:#BA2121}.highlight .sd{color:#BA2121}.highlight .se{color:#B62;font-weight:700}.highlight .sh{color:#BA2121}.highlight .si{color:#B68;font-weight:700}.highlight .sx{color:green}.highlight .sr{color:#B68}.highlight .s1{color:#BA2121}.highlight .ss{color:#19177C}.highlight .bp{color:green}.highlight .vc,.highlight .vg,.highlight .vi{color:#19177C}.highlight .il{color:#666}.highlight pre{overflow:auto}.highlight .lineno{-webkit-touch-callout:none;-webkit-user-select:none;-khtml-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;cursor:default}.highlight .lineno::selection{background:0 0}.highlight .lineno::-moz-selection{background:0 0} \ No newline at end of file
+a,h2{color:#666}.center,html{position:relative}#categories_container>div,.top_margin a{display:inline-block}#categories,.center{text-align:center}#categories .hidden,.cache_link,.highlight .c,.highlight .cm,.highlight .ge,.highlight .sd{font-style:italic}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=search]{-webkit-appearance:textfield}h2{text-transform:uppercase}body{font-family:sans-serif;line-height:1.5;margin:0;background:#EEE}html{min-height:100%}.title h1{font-size:7em;color:#3498DB;margin:-20px auto 0;line-height:100px;padding-bottom:20px}.center{max-width:70em;background:rgba(255,255,255,.6);padding:2em;margin:7% auto 0}.center.search{position:static;width:auto;background:0 0;margin:auto;padding-top:1.8em}@media screen and (min-width:1001px){.center:after{content:"";z-index:-1;background:url(../img/bg-body-index.jpg) no-repeat;background-size:cover;width:100%;height:100%;top:0;left:0;position:fixed}.center.search:after{content:none}}.autocompleter-choices{position:absolute;margin:0;padding:0;background:#FFF}.autocompleter-choices li{padding:.5em 1em}.autocompleter-choices li:hover{background:#3498DB;color:#FFF;cursor:pointer}.top_margin{position:absolute;bottom:-3.5em;width:100%;left:0}.top_margin a{margin-right:1em;color:#FFF;text-decoration:none}.top_margin a:focus,.top_margin a:hover{text-decoration:underline}@media screen and (max-width:1000px){.center{background:0 0}.top_margin a{color:#333}}.checkbox_container{margin-top:1.5em}.checkbox_container label{padding:.5em 1em;color:#333;cursor:pointer;font-size:.9em}.checkbox_container input[type=checkbox]:checked+label,.checkbox_container label:hover{background:#3498DB;color:#FFF}.checkbox_container input[type=checkbox]{position:absolute;top:-9999px}#categories .hidden{display:none;position:absolute;bottom:1em;left:0;text-align:center;width:100%;font-size:.9em;color:#333}#categories:hover .hidden,.right a{display:block}@media screen and (max-width:900px){#categories_container{letter-spacing:-5px}#categories_container>div{letter-spacing:normal;margin-top:1em}.checkbox_container{margin:0}.checkbox_container label{display:block;background:#CCC;padding:1em;border:1px solid #FFF}.top_margin{position:static}#categories .hidden{position:static;display:block}}@media screen and (max-width:900px) and (min-width:501px){#categories_container>div{width:31%;margin-left:2.333%}#categories_container>div:nth-child(3n+1){margin-left:0}}@media screen and (max-width:500px){#categories_container>div{width:48%;margin-left:2%;font-size:.9em}#categories_container>div:nth-child(2n+1){margin-left:0}.title h1{background:url(../img/searx-mobile.png) no-repeat;width:200px;height:39px}}#search_wrapper{position:relative}.q{padding:.5em 3em .5em 1em;width:100%;font-size:1.5em;border:0;color:#666}.cache_link,.result p{font-size:.9em}#search_submit{position:absolute;top:0;right:0;border:0;background:url(../img/search-icon.png) center center/65% auto no-repeat #3498DB;text-indent:-9999px;width:5em;height:100%;cursor:pointer}#sidebar,.right{position:fixed;width:15em;right:0;text-align:right}#search_submit:focus,#search_submit:hover{background-color:#0665A2}#sidebar{background:#3498DB;top:0;height:100%;padding:1.5em}.right{bottom:1.5em;z-index:1;padding:0 1.5em}.right a{color:#FFF;text-decoration:none}#sidebar form,#suggestions form,.row fieldset{display:inline-block}.right a:focus,.right a:hover{text-decoration:underline}#preferences{background:url(../img/preference-icon.png) right center/12% auto no-repeat;padding-right:1.8em}#search_url input{border:0;padding:.5em}#sidebar>div{margin-bottom:1em;color:#FFF}#sidebar input[type=submit]{background:#CCC;border:0;padding:.5em 1em;cursor:pointer;margin-top:.5em}#sidebar input[type=submit]:focus,#sidebar input[type=submit]:hover{color:#FFF;background-color:#0665A2}#results{padding:0 17em 0 2em}.result .engines{text-align:right}.result .content{margin:0;color:#666}.result .url{margin-top:0;color:#FF6530}.result .favicon{float:left;position:relative;top:.5em;margin-right:.5em}.definition_result{background:#CCC;padding:1em}.definition_result .result_title,.definition_result p{margin:0}.result_title{margin-bottom:0;font-weight:400}.result_title a{color:#3498DB;text-decoration:none}#answers,#suggestions span{color:#666}.result_title a:focus,.result_title a:hover{text-decoration:underline}.cache_link{color:#666}.search.center{padding-right:17em}#answers{border:2px solid #3498DB;padding:20px;text-align:center;max-width:70em;margin:0 auto 20px}#suggestions{margin-bottom:1em}#suggestions form{vertical-align:top;margin-bottom:.5em}#suggestions input[type=submit]{color:#333;padding:.5em 1em;border:0;background:#CCC;cursor:pointer}#suggestions input[type=submit]:focus,#suggestions input[type=submit]:hover{background:#3498DB;color:#FFF}#pagination{margin:1.5em 0 2em}#pagination form+form{float:right;margin-top:-2em}input[type=submit]{display:inline-block;background:#3498DB;color:#FFF;border:0;padding:.6em 1em;cursor:pointer}input[type=submit]:focus,input[type=submit]:hover{background:#0665A2}.row{max-width:60em;margin:auto}.row a{color:#3498DB}.row form{letter-spacing:-5px}.row form>*{letter-spacing:normal}.row p{margin:0}.row fieldset{width:48%;vertical-align:top}.row fieldset:last-of-type{display:block;width:auto;background:0 0;padding:0}fieldset,table tr:nth-child(odd){background:#CCC}.row fieldset:nth-child(odd){margin-right:2%}.row fieldset:nth-child(2){min-height:10.5em}@media screen and (max-width:900px){.row{margin:0 1em}.row fieldset{width:49%}.row fieldset,.row fieldset:nth-child(odd){margin-right:0}.row fieldset:first-child{width:100%;margin-right:0}.row fieldset:nth-child(even){margin-right:2%}}@media screen and (max-width:800px){.row fieldset,select{width:100%}table{font-size:.8em}#sidebar,.right{display:none}#results{padding:0 2em}.search.center{padding-right:2em}}@media screen and (max-width:400px){.row #categories_container>div{width:100%;margin-left:0}}fieldset{border:0;margin:1em 0;padding:1.5em}table{width:100%;text-align:left;border:1px solid #CCC;border-collapse:collapse}table th{background:#999;color:#FFF}table td,table th{padding:.5em 1em;border:1px solid #FFF}.engine_checkbox label{padding:.5em;background:#3498DB;color:#FFF;cursor:pointer}.engine_checkbox .deny{background:#3498DB}.engine_checkbox .allow{display:none;background:#666}.engine_checkbox input{display:none}.engine_checkbox input:checked+.allow{display:inline}.engine_checkbox input:checked+.allow+.deny{display:none}.row input[type=submit]{font-size:1em;margin:1em 0 2em}.row .right{position:static;display:inline-block}.row .right a{color:#333;width:auto;text-align:left;padding:0}.small_font{font-size:.8em}table th{padding:1em}legend{background:#EEE;padding:0 1em;position:relative}select{border:1px solid #DDD;padding:.5em .8em;font-size:1em}.highlight .hll{background-color:#ffc}.highlight{font-weight:700;background:#f8f8f8}.highlight .c{color:#408080}.highlight .err{border:1px solid red}.highlight .k{color:green;font-weight:700}.highlight .o{color:#666}.highlight .cm{color:#408080}.highlight .cp{color:#BC7A00}.highlight .c1,.highlight .cs{color:#408080;font-style:italic}.highlight .gd{color:#A00000}.highlight .gr{color:red}.highlight .gh{color:navy;font-weight:700}.highlight .gi{color:#00A000}.highlight .go{color:#888}.highlight .gp{color:navy;font-weight:700}.highlight .gs{font-weight:700}.highlight .gu{color:purple;font-weight:700}.highlight .gt{color:#04D}.highlight .kc,.highlight .kd,.highlight .kn{color:green;font-weight:700}.highlight .kp{color:green}.highlight .kr{color:green;font-weight:700}.highlight .kt{color:#B00040}.highlight .m{color:#666}.highlight .s{color:#BA2121}.highlight .na{color:#7D9029}.highlight .nb{color:green}.highlight .nc{color:#00F;font-weight:700}.highlight .no{color:#800}.highlight .nd{color:#A2F}.highlight .ni{color:#999;font-weight:700}.highlight .ne{color:#D2413A;font-weight:700}.highlight .nf{color:#00F}.highlight .nl{color:#A0A000}.highlight .nn{color:#00F;font-weight:700}.highlight .nt{color:green;font-weight:700}.highlight .nv{color:#19177C}.highlight .ow{color:#A2F;font-weight:700}.highlight .w{color:#bbb}.highlight .mf,.highlight .mh,.highlight .mi,.highlight .mo{color:#666}.highlight .s2,.highlight .sb,.highlight .sc{color:#BA2121}.highlight .sd{color:#BA2121}.highlight .se{color:#B62;font-weight:700}.highlight .sh{color:#BA2121}.highlight .si{color:#B68;font-weight:700}.highlight .sx{color:green}.highlight .sr{color:#B68}.highlight .s1{color:#BA2121}.highlight .ss{color:#19177C}.highlight .bp{color:green}.highlight .vc,.highlight .vg,.highlight .vi{color:#19177C}.highlight .il{color:#666}.highlight pre{overflow:auto}.highlight .lineno{-webkit-touch-callout:none;-webkit-user-select:none;-khtml-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;cursor:default}.highlight .lineno::selection{background:0 0}.highlight .lineno::-moz-selection{background:0 0} \ No newline at end of file
diff --git a/searx/static/themes/courgette/less/style.less b/searx/static/themes/courgette/less/style.less
index 0387af5c0..26da72812 100644
--- a/searx/static/themes/courgette/less/style.less
+++ b/searx/static/themes/courgette/less/style.less
@@ -325,6 +325,10 @@ a {
font-size: 0.9em;
}
+.result .engines {
+ text-align: right;
+}
+
.result .content {
margin: 0;
color: #666;
diff --git a/searx/static/themes/legacy/css/style.css b/searx/static/themes/legacy/css/style.css
index f434148bd..ca746a369 100644
--- a/searx/static/themes/legacy/css/style.css
+++ b/searx/static/themes/legacy/css/style.css
@@ -1 +1 @@
-.highlight .c,.highlight .cm,.highlight .ge,.highlight .sd{font-style:italic}#categories,.highlight .lineno{-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none}#container,.search,body,html{padding:0;margin:0}div.title h1,input[type=checkbox]{visibility:hidden}#categories,.checkbox_container label,.engine_checkbox label,.highlight .lineno{-webkit-touch-callout:none;-khtml-user-select:none}#answers input[type=submit],#infoboxes input[type=submit],#sidebar input[type=submit],#suggestions input[type=submit],.result_title a:hover,.torrent_result a:hover{text-decoration:underline}#infoboxes,.result .content,.result .url,.result h3{word-wrap:break-word}#apis,#infoboxes .infobox br,#pagination,#pagination br,.result,.result .content br.last{clear:both}.highlight .hll{background-color:#ffc}.highlight{background:#f8f8f8}.highlight .c{color:#408080}.highlight .err{border:1px solid red}.highlight .k{color:green;font-weight:700}.highlight .o{color:#666}.highlight .cm{color:#408080}.highlight .cp{color:#BC7A00}.highlight .c1,.highlight .cs{color:#408080;font-style:italic}.highlight .gd{color:#A00000}.highlight .gr{color:red}.highlight .gh{color:navy;font-weight:700}.highlight .gi{color:#00A000}.highlight .go{color:#888}.highlight .gp{color:navy;font-weight:700}.highlight .gs{font-weight:700}.highlight .gu{color:purple;font-weight:700}.highlight .gt{color:#04D}.highlight .kc,.highlight .kd,.highlight .kn{color:green;font-weight:700}.highlight .kp{color:green}.highlight .kr{color:green;font-weight:700}.highlight .kt{color:#B00040}.highlight .m{color:#666}.highlight .s{color:#BA2121}.highlight .na{color:#7D9029}.highlight .nb{color:green}.highlight .nc{color:#00F;font-weight:700}.highlight .no{color:#800}.highlight .nd{color:#A2F}.highlight .ni{color:#999;font-weight:700}.highlight .ne{color:#D2413A;font-weight:700}.highlight .nf{color:#00F}.highlight .nl{color:#A0A000}.highlight .nn{color:#00F;font-weight:700}.highlight .nt{color:green;font-weight:700}.highlight .nv{color:#19177C}.highlight .ow{color:#A2F;font-weight:700}.highlight .w{color:#bbb}.highlight .mf,.highlight .mh,.highlight .mi,.highlight .mo{color:#666}.highlight .s2,.highlight .sb,.highlight .sc{color:#BA2121}.highlight .sd{color:#BA2121}.highlight .se{color:#B62;font-weight:700}.highlight .sh{color:#BA2121}.highlight .si{color:#B68;font-weight:700}.highlight .sx{color:green}.highlight .sr{color:#B68}.highlight .s1{color:#BA2121}.highlight .ss{color:#19177C}.highlight .bp{color:green}.highlight .vc,.highlight .vg,.highlight .vi{color:#19177C}.highlight .il{color:#666}.highlight pre{overflow:auto}.highlight .lineno{user-select:none;cursor:default}.highlight .lineno::selection{background:0 0}.highlight .lineno::-moz-selection{background:0 0}html{font-family:sans-serif;font-size:.9em;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%;-moz-text-size-adjust:100%;text-size-adjust:100%;color:#444}#container{width:100%;position:absolute;top:0}.search .checkbox_container label{font-size:.9em;border-bottom:2px solid #E8E7E6}.search .checkbox_container label:hover{border-bottom:2px solid #3498DB}.search .checkbox_container input[type=checkbox]:checked+label{border-bottom:2px solid #2980B9}#search_wrapper{position:relative;width:50em;padding:10px}.center #search_wrapper{margin-left:auto;margin-right:auto}.q,ul.autocompleter-choices{margin:0;border:1px solid #3498DB}.q{background:#FFF;color:#222;font-size:16px;height:28px;outline:0;padding:2px 2px 2px 8px;padding-right:0!important;width:100%;z-index:2}#search_submit{position:absolute;top:13px;right:1px;padding:0;border:0;background:url(../img/search-icon.png) no-repeat;background-size:24px 24px;opacity:.8;width:24px;height:30px;font-size:0}@media screen and (max-width:50em){#search_wrapper{width:90%;clear:both;overflow:hidden}}ul.autocompleter-choices{position:absolute;padding:0;list-style:none;border-left-color:#3498DB;border-right-color:#3498DB;border-bottom-color:#3498DB;text-align:left;font-family:Verdana,Geneva,Arial,Helvetica,sans-serif;z-index:50;background-color:#FFF;color:#444}ul.autocompleter-choices li{position:relative;margin:-2px 0 0;padding:.2em 1.5em .2em 1em;display:block;float:none!important;cursor:pointer;font-weight:400;white-space:nowrap;font-size:1em;line-height:1.5em}ul.autocompleter-choices li.autocompleter-selected{background-color:#444;color:#FFF}ul.autocompleter-choices li.autocompleter-selected span.autocompleter-queried{color:#9FCFFF}ul.autocompleter-choices span.autocompleter-queried{display:inline;float:none;font-weight:700;margin:0;padding:0}.row{max-width:800px;margin:20px auto;text-align:justify}.row h1{font-size:3em;margin-top:50px}.row p{padding:0 10px;max-width:700px}.row h3,.row ul{margin:4px 8px}.hmarg{margin:0 20px;border:1px solid #3498DB;padding:4px 10px}a:active.hmarg,a:hover.hmarg,a:link.hmarg,a:visited.hmarg{color:#3498DB}.top_margin{margin-top:60px}.center{text-align:center}h1{font-size:5em}div.title{background:url(../img/searx.png) center no-repeat;width:100%;min-height:80px}input[type=submit]{padding:2px 6px;margin:2px 4px;display:inline-block;background:#3498DB;color:#FFF;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;border:0;cursor:pointer}fieldset{margin:8px;border:1px solid #3498DB}#categories{margin:0 10px;user-select:none}.checkbox_container{display:inline-block;position:relative;margin:0 3px;padding:0}.checkbox_container input{display:none}.checkbox_container label,.engine_checkbox label{cursor:pointer;padding:4px 10px;margin:0;display:block;text-transform:capitalize;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.checkbox_container input[type=checkbox]:checked+label{background:#3498DB;color:#FFF}.engine_checkbox{padding:4px}label.allow{background:#E74C3C;padding:4px 8px;color:#FFF;display:none}label.deny{background:#2ECC71;padding:4px 8px;color:#444;display:inline}.engine_checkbox input[type=checkbox]:checked+label:nth-child(2)+label{display:none}.engine_checkbox input[type=checkbox]:checked+label.allow{display:inline}a{text-decoration:none;color:#1a11be}a:visited{color:#8E44AD}.result{margin:19px 0 18px;padding:0}.result_title{margin-bottom:0}.result_title a{color:#2980B9;font-weight:400;font-size:1.1em}.result_title a:visited{color:#8E44AD}.cache_link{font-size:10px!important}.result h3{font-size:1em;margin:5px 0 1px;padding:0}.result .content,.result .url,.small_font{font-size:.8em}.result .content{margin:0;padding:0;max-width:54em;line-height:1.24}.result .content img{float:left;margin-right:5px;max-width:200px;max-height:100px}.result .url{margin:0 0 3px;padding:0;max-width:54em;color:#C0392B}.result .published_date{font-size:.8em;color:#888;Margin:5px 20px}.result .thumbnail{width:400px}.engines{color:#888}.small p{margin:2px 0}.right{float:right}.invisible{display:none}.left{float:left}.highlight{color:#094089}.content .highlight{color:#000}.image_result{display:inline-block;margin:10px;position:relative;max-height:160px}.image_result img{border:0;max-height:160px}.image_result p{margin:0;padding:0}.image_result p span a{display:none;color:#FFF}.image_result p:hover span a{display:block;position:absolute;bottom:0;right:0;padding:4px;background-color:rgba(0,0,0,.6);font-size:.7em}#categories_container,.percentage{position:relative}.torrent_result{border-left:10px solid #d3d3d3;padding-left:3px}.torrent_result p{margin:3px;font-size:.8em}.torrent_result a{color:#2980B9}.torrent_result a:visited{color:#8E44AD}.definition_result{border-left:10px solid gray;padding-left:3px}#infoboxes,#sidebar{margin:0 2px 5px 5px;padding:0 2px 2px}.percentage{width:300px}.percentage div{background:#444}table{width:100%}td{padding:0 4px}tr:hover{background:#DDD}#results{margin:auto auto 20px;padding:0;width:50em}#sidebar{position:fixed;bottom:10px;left:10px;width:14em}#answers input,#infoboxes input,#sidebar input,#suggestions input{padding:0;margin:3px;font-size:.8em;display:inline-block;background:0 0;color:#444;cursor:pointer}#suggestions form{display:inline}#answers,#suggestions{margin-top:20px;max-width:45em}#suggestions-title{color:#888}#answers{border:2px solid #2980B9;padding:20px}#answers form,#infoboxes form{min-width:210px}#infoboxes{position:absolute;top:100px;right:20px;max-width:21em}#infoboxes .infobox{margin:10px 0;border:1px solid #ddd;padding:5px;font-size:.8em}#infoboxes .infobox img{max-width:90%;max-heigt:12em;display:block;margin:5px;padding:5px}#infoboxes .infobox h2{margin:0}#apis,#search_url{margin-top:8px}#infoboxes .infobox table{table-layout:fixed}#infoboxes .infobox table td{vertical-align:top}#infoboxes .infobox input{font-size:1em}#search_url input{border:1px solid #888;padding:4px;color:#444;width:14em;display:block;margin:4px;font-size:.8em}#preferences{top:10px;padding:0;border:0;background:url(../img/preference-icon.png) no-repeat;background-size:28px 28px;opacity:.8;width:28px;height:30px;display:block}#preferences *{display:none}@media screen and (max-width:50em){#results{margin:auto;padding:0;width:90%}.github{display:none}.checkbox_container{display:block;width:90%}.checkbox_container label{border-bottom:0}.preferences_container{display:none;postion:fixed!important;top:100px;right:0}}@media screen and (max-width:75em){div.title h1{font-size:1em}html.touch #categories{width:95%;height:30px;text-align:left;overflow-x:scroll;overflow-y:hidden;-webkit-overflow-scrolling:touch}html.touch #categories #categories_container{width:1000px;width:-moz-max-content;width:-webkit-max-content;width:max-content}html.touch #categories #categories_container .checkbox_container{display:inline-block;width:auto}#answers,#suggestions{margin-top:5px}#infoboxes{position:inherit;max-width:inherit}#infoboxes .infobox{clear:both}#infoboxes .infobox img{float:left;max-width:10em}#categories{font-size:90%;clear:both}#categories .checkbox_container{margin:auto}#sidebar{position:static;max-width:50em;margin:0 0 2px;padding:0;float:none;border:none;width:auto}#sidebar input{border:0}#apis,#search_url{display:none}.result{border-top:1px solid #E8E7E6;margin:8px 0}.image_result,.image_result img,.result .thumbnail{max-width:98%}}.favicon{float:left;margin-right:4px;margin-top:2px}.preferences_back{background:#3498DB;border:0;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;cursor:pointer;display:inline-block;margin:2px 4px;padding:4px 6px}.preferences_back a{color:#FFF}.hidden{opacity:0;overflow:hidden;font-size:.8em;position:absolute;bottom:-20px;width:100%;text-position:center;background:#fff;transition:opacity 1s ease}#categories_container:hover .hidden{transition:opacity 1s ease;opacity:.8} \ No newline at end of file
+.highlight .c,.highlight .cm,.highlight .ge,.highlight .sd{font-style:italic}#categories,.highlight .lineno{-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none}#container,.search,body,html{padding:0;margin:0}div.title h1,input[type=checkbox]{visibility:hidden}#categories,.checkbox_container label,.engine_checkbox label,.highlight .lineno{-webkit-touch-callout:none;-khtml-user-select:none}#answers input[type=submit],#infoboxes input[type=submit],#sidebar input[type=submit],#suggestions input[type=submit],.result_title a:hover,.torrent_result a:hover{text-decoration:underline}#infoboxes,.result .content,.result .url,.result h3{word-wrap:break-word}#apis,#infoboxes .infobox br,#pagination,#pagination br,.result,.result .content br.last{clear:both}.highlight .hll{background-color:#ffc}.highlight{background:#f8f8f8}.highlight .c{color:#408080}.highlight .err{border:1px solid red}.highlight .k{color:green;font-weight:700}.highlight .o{color:#666}.highlight .cm{color:#408080}.highlight .cp{color:#BC7A00}.highlight .c1,.highlight .cs{color:#408080;font-style:italic}.highlight .gd{color:#A00000}.highlight .gr{color:red}.highlight .gh{color:navy;font-weight:700}.highlight .gi{color:#00A000}.highlight .go{color:#888}.highlight .gp{color:navy;font-weight:700}.highlight .gs{font-weight:700}.highlight .gu{color:purple;font-weight:700}.highlight .gt{color:#04D}.highlight .kc,.highlight .kd,.highlight .kn{color:green;font-weight:700}.highlight .kp{color:green}.highlight .kr{color:green;font-weight:700}.highlight .kt{color:#B00040}.highlight .m{color:#666}.highlight .s{color:#BA2121}.highlight .na{color:#7D9029}.highlight .nb{color:green}.highlight .nc{color:#00F;font-weight:700}.highlight .no{color:#800}.highlight .nd{color:#A2F}.highlight .ni{color:#999;font-weight:700}.highlight .ne{color:#D2413A;font-weight:700}.highlight .nf{color:#00F}.highlight .nl{color:#A0A000}.highlight .nn{color:#00F;font-weight:700}.highlight .nt{color:green;font-weight:700}.highlight .nv{color:#19177C}.highlight .ow{color:#A2F;font-weight:700}.highlight .w{color:#bbb}.highlight .mf,.highlight .mh,.highlight .mi,.highlight .mo{color:#666}.highlight .s2,.highlight .sb,.highlight .sc{color:#BA2121}.highlight .sd{color:#BA2121}.highlight .se{color:#B62;font-weight:700}.highlight .sh{color:#BA2121}.highlight .si{color:#B68;font-weight:700}.highlight .sx{color:green}.highlight .sr{color:#B68}.highlight .s1{color:#BA2121}.highlight .ss{color:#19177C}.highlight .bp{color:green}.highlight .vc,.highlight .vg,.highlight .vi{color:#19177C}.highlight .il{color:#666}.highlight pre{overflow:auto}.highlight .lineno{user-select:none;cursor:default}.highlight .lineno::selection{background:0 0}.highlight .lineno::-moz-selection{background:0 0}html{font-family:sans-serif;font-size:.9em;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%;-moz-text-size-adjust:100%;text-size-adjust:100%;color:#444}#container{width:100%;position:absolute;top:0}.search .checkbox_container label{font-size:.9em;border-bottom:2px solid #E8E7E6}.search .checkbox_container label:hover{border-bottom:2px solid #3498DB}.search .checkbox_container input[type=checkbox]:checked+label{border-bottom:2px solid #2980B9}#search_wrapper{position:relative;width:50em;padding:10px}.center #search_wrapper{margin-left:auto;margin-right:auto}.q,ul.autocompleter-choices{margin:0;border:1px solid #3498DB}.q{background:#FFF;color:#222;font-size:16px;height:28px;outline:0;padding:2px 2px 2px 8px;padding-right:0!important;width:100%;z-index:2}#search_submit{position:absolute;top:13px;right:1px;padding:0;border:0;background:url(../img/search-icon.png) no-repeat;background-size:24px 24px;opacity:.8;width:24px;height:30px;font-size:0}@media screen and (max-width:50em){#search_wrapper{width:90%;clear:both;overflow:hidden}}ul.autocompleter-choices{position:absolute;padding:0;list-style:none;border-left-color:#3498DB;border-right-color:#3498DB;border-bottom-color:#3498DB;text-align:left;font-family:Verdana,Geneva,Arial,Helvetica,sans-serif;z-index:50;background-color:#FFF;color:#444}ul.autocompleter-choices li{position:relative;margin:-2px 0 0;padding:.2em 1.5em .2em 1em;display:block;float:none!important;cursor:pointer;font-weight:400;white-space:nowrap;font-size:1em;line-height:1.5em}ul.autocompleter-choices li.autocompleter-selected{background-color:#444;color:#FFF}ul.autocompleter-choices li.autocompleter-selected span.autocompleter-queried{color:#9FCFFF}ul.autocompleter-choices span.autocompleter-queried{display:inline;float:none;font-weight:700;margin:0;padding:0}.row{max-width:800px;margin:20px auto;text-align:justify}.row h1{font-size:3em;margin-top:50px}.row p{padding:0 10px;max-width:700px}.row h3,.row ul{margin:4px 8px}.hmarg{margin:0 20px;border:1px solid #3498DB;padding:4px 10px}a:active.hmarg,a:hover.hmarg,a:link.hmarg,a:visited.hmarg{color:#3498DB}.top_margin{margin-top:60px}.center{text-align:center}h1{font-size:5em}div.title{background:url(../img/searx.png) center no-repeat;width:100%;min-height:80px}input[type=submit]{padding:2px 6px;margin:2px 4px;display:inline-block;background:#3498DB;color:#FFF;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;border:0;cursor:pointer}fieldset{margin:8px;border:1px solid #3498DB}#categories{margin:0 10px;user-select:none}.checkbox_container{display:inline-block;position:relative;margin:0 3px;padding:0}.checkbox_container input{display:none}.checkbox_container label,.engine_checkbox label{cursor:pointer;padding:4px 10px;margin:0;display:block;text-transform:capitalize;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.checkbox_container input[type=checkbox]:checked+label{background:#3498DB;color:#FFF}.engine_checkbox{padding:4px}label.allow{background:#E74C3C;padding:4px 8px;color:#FFF;display:none}label.deny{background:#2ECC71;padding:4px 8px;color:#444;display:inline}.engine_checkbox input[type=checkbox]:checked+label:nth-child(2)+label{display:none}.engine_checkbox input[type=checkbox]:checked+label.allow{display:inline}a{text-decoration:none;color:#1a11be}a:visited{color:#8E44AD}.result{margin:19px 0 18px;padding:0}.result_title{margin-bottom:0}.result_title a{color:#2980B9;font-weight:400;font-size:1.1em}.result_title a:visited{color:#8E44AD}.cache_link{font-size:10px!important}.result h3{font-size:1em;margin:5px 0 1px;padding:0}.result .content,.result .url,.small_font{font-size:.8em}.result .content{margin:0;padding:0;max-width:54em;line-height:1.24}.result .content img{float:left;margin-right:5px;max-width:200px;max-height:100px}.result .url{margin:0 0 3px;padding:0;max-width:54em;color:#C0392B}.result .published_date{font-size:.8em;color:#888;Margin:5px 20px}.result .thumbnail{width:400px}.engines{color:#888}.small p{margin:2px 0}.right{float:right}.invisible{display:none}.left{float:left}.highlight{color:#094089}.content .highlight{color:#000}.image_result{display:inline-block;margin:10px;position:relative;max-height:160px}.image_result img{border:0;max-height:160px}.image_result p{margin:0;padding:0}.image_result p span a{display:none;color:#FFF}.image_result p:hover span a{display:block;position:absolute;bottom:0;right:0;padding:4px;background-color:rgba(0,0,0,.6);font-size:.7em}#categories_container,.percentage{position:relative}.torrent_result{border-left:10px solid #d3d3d3;padding-left:3px}.torrent_result p{margin:3px;font-size:.8em}.torrent_result a{color:#2980B9}.torrent_result a:visited{color:#8E44AD}.definition_result{border-left:10px solid gray;padding-left:3px}.percentage{width:300px}.percentage div{background:#444}table{width:100%}.result-table{margin-bottom:10px}#infoboxes,#sidebar{margin:0 2px 5px 5px;padding:0 2px 2px}td{padding:0 4px}tr:hover{background:#DDD}#results{margin:auto auto 20px;padding:0;width:50em}#sidebar{position:fixed;bottom:10px;left:10px;width:14em}#answers input,#infoboxes input,#sidebar input,#suggestions input{padding:0;margin:3px;font-size:.8em;display:inline-block;background:0 0;color:#444;cursor:pointer}#suggestions form{display:inline}#answers,#suggestions{margin-top:20px;max-width:45em}#suggestions-title{color:#888}#answers{border:2px solid #2980B9;padding:20px}#answers form,#infoboxes form{min-width:210px}#infoboxes{position:absolute;top:100px;right:20px;max-width:21em}#infoboxes .infobox{margin:10px 0;border:1px solid #ddd;padding:5px;font-size:.8em}#infoboxes .infobox img{max-width:90%;max-heigt:12em;display:block;margin:5px;padding:5px}#infoboxes .infobox h2{margin:0}#apis,#search_url{margin-top:8px}#infoboxes .infobox table{table-layout:fixed}#infoboxes .infobox table td{vertical-align:top}#infoboxes .infobox input{font-size:1em}#search_url input{border:1px solid #888;padding:4px;color:#444;width:14em;display:block;margin:4px;font-size:.8em}#preferences{top:10px;padding:0;border:0;background:url(../img/preference-icon.png) no-repeat;background-size:28px 28px;opacity:.8;width:28px;height:30px;display:block}#preferences *{display:none}@media screen and (max-width:50em){#results{margin:auto;padding:0;width:90%}.github{display:none}.checkbox_container{display:block;width:90%}.checkbox_container label{border-bottom:0}.preferences_container{display:none;postion:fixed!important;top:100px;right:0}}@media screen and (max-width:75em){div.title h1{font-size:1em}html.touch #categories{width:95%;height:30px;text-align:left;overflow-x:scroll;overflow-y:hidden;-webkit-overflow-scrolling:touch}html.touch #categories #categories_container{width:1000px;width:-moz-max-content;width:-webkit-max-content;width:max-content}html.touch #categories #categories_container .checkbox_container{display:inline-block;width:auto}#answers,#suggestions{margin-top:5px}#infoboxes{position:inherit;max-width:inherit}#infoboxes .infobox{clear:both}#infoboxes .infobox img{float:left;max-width:10em}#categories{font-size:90%;clear:both}#categories .checkbox_container{margin:auto}#sidebar{position:static;max-width:50em;margin:0 0 2px;padding:0;float:none;border:none;width:auto}#sidebar input{border:0}#apis,#search_url{display:none}.result{border-top:1px solid #E8E7E6;margin:8px 0}.image_result,.image_result img,.result .thumbnail{max-width:98%}}.favicon{float:left;margin-right:4px;margin-top:2px}.preferences_back{background:#3498DB;border:0;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;cursor:pointer;display:inline-block;margin:2px 4px;padding:4px 6px}.preferences_back a{color:#FFF}.hidden{opacity:0;overflow:hidden;font-size:.8em;position:absolute;bottom:-20px;width:100%;text-position:center;background:#fff;transition:opacity 1s ease}#categories_container:hover .hidden{transition:opacity 1s ease;opacity:.8} \ No newline at end of file
diff --git a/searx/static/themes/legacy/less/autocompleter.less b/searx/static/themes/legacy/less/autocompleter.less
index db9601aeb..4ab2508f8 100644
--- a/searx/static/themes/legacy/less/autocompleter.less
+++ b/searx/static/themes/legacy/less/autocompleter.less
@@ -1,61 +1,61 @@
-/*
- * searx, A privacy-respecting, hackable metasearch engine
- */
-
-ul {
- &.autocompleter-choices {
- position: absolute;
- margin: 0;
- padding: 0;
- list-style: none;
- border: 1px solid @color-autocompleter-choices-border;
- border-left-color: @color-autocompleter-choices-border-left-right;
- border-right-color: @color-autocompleter-choices-border-left-right;
- border-bottom-color: @color-autocompleter-choices-border-bottom;
- text-align: left;
- font-family: Verdana, Geneva, Arial, Helvetica, sans-serif;
- z-index: 50;
- background-color: @color-autocompleter-choices-background;
- color: @color-autocompleter-choices-font;
-
- li {
- position: relative;
- margin: -2px 0 0 0;
- padding: 0.2em 1.5em 0.2em 1em;
- display: block;
- float: none !important;
- cursor: pointer;
- font-weight: normal;
- white-space: nowrap;
- font-size: 1em;
- line-height: 1.5em;
-
- &.autocompleter-selected {
- background-color: @color-autocompleter-selected-background;
- color: @color-autocompleter-selected-font;
-
- span.autocompleter-queried {
- color: @color-autocompleter-selected-queried-font;
- }
- }
- }
-
- span.autocompleter-queried {
- display: inline;
- float: none;
- font-weight: bold;
- margin: 0;
- padding: 0;
- }
- }
-}
-
-/*.autocompleter-loading {
- //background-image: url(images/spinner.gif);
- background-repeat: no-repeat;
- background-position: right 50%;
-}*/
-
-/*textarea.autocompleter-loading {
- background-position: right bottom;
-}*/
+/*
+ * searx, A privacy-respecting, hackable metasearch engine
+ */
+
+ul {
+ &.autocompleter-choices {
+ position: absolute;
+ margin: 0;
+ padding: 0;
+ list-style: none;
+ border: 1px solid @color-autocompleter-choices-border;
+ border-left-color: @color-autocompleter-choices-border-left-right;
+ border-right-color: @color-autocompleter-choices-border-left-right;
+ border-bottom-color: @color-autocompleter-choices-border-bottom;
+ text-align: left;
+ font-family: Verdana, Geneva, Arial, Helvetica, sans-serif;
+ z-index: 50;
+ background-color: @color-autocompleter-choices-background;
+ color: @color-autocompleter-choices-font;
+
+ li {
+ position: relative;
+ margin: -2px 0 0 0;
+ padding: 0.2em 1.5em 0.2em 1em;
+ display: block;
+ float: none !important;
+ cursor: pointer;
+ font-weight: normal;
+ white-space: nowrap;
+ font-size: 1em;
+ line-height: 1.5em;
+
+ &.autocompleter-selected {
+ background-color: @color-autocompleter-selected-background;
+ color: @color-autocompleter-selected-font;
+
+ span.autocompleter-queried {
+ color: @color-autocompleter-selected-queried-font;
+ }
+ }
+ }
+
+ span.autocompleter-queried {
+ display: inline;
+ float: none;
+ font-weight: bold;
+ margin: 0;
+ padding: 0;
+ }
+ }
+}
+
+/*.autocompleter-loading {
+ //background-image: url(images/spinner.gif);
+ background-repeat: no-repeat;
+ background-position: right 50%;
+}*/
+
+/*textarea.autocompleter-loading {
+ background-position: right bottom;
+}*/
diff --git a/searx/static/themes/legacy/less/style.less b/searx/static/themes/legacy/less/style.less
index 4374f7d68..bbeaf105e 100644
--- a/searx/static/themes/legacy/less/style.less
+++ b/searx/static/themes/legacy/less/style.less
@@ -376,6 +376,10 @@ table {
width: 100%;
}
+.result-table {
+ margin-bottom: 10px;
+}
+
td {
padding: 0 4px;
}
diff --git a/searx/static/themes/oscar/gruntfile.js b/searx/static/themes/oscar/gruntfile.js
index 591399449..def035dba 100644
--- a/searx/static/themes/oscar/gruntfile.js
+++ b/searx/static/themes/oscar/gruntfile.js
@@ -24,7 +24,7 @@ module.exports = function(grunt) {
jshint: {
files: ['gruntfile.js', 'js/searx_src/*.js'],
options: {
- reporterOutput: "",
+ reporterOutput: "",
// options here to override JSHint defaults
globals: {
jQuery: true,
@@ -55,7 +55,7 @@ module.exports = function(grunt) {
"css/logicodev-dark.min.css": "less/logicodev-dark/oscar.less"}
},
/*
- // built with ./manage.sh styles
+ // built with ./manage.sh styles
bootstrap: {
options: {
paths: ["less/bootstrap"],
@@ -90,7 +90,7 @@ module.exports = function(grunt) {
grunt.registerTask('test', ['jshint']);
grunt.registerTask('default', ['jshint', 'concat', 'uglify', 'less']);
-
+
grunt.registerTask('styles', ['less']);
};
diff --git a/searx/static/themes/oscar/js/searx.js b/searx/static/themes/oscar/js/searx.js
index 58b38f019..927aeb422 100644
--- a/searx/static/themes/oscar/js/searx.js
+++ b/searx/static/themes/oscar/js/searx.js
@@ -1,26 +1,26 @@
-/**
- * searx is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * searx is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with searx. If not, see < http://www.gnu.org/licenses/ >.
- *
- * (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
- */
-
-requirejs.config({
- baseUrl: './static/themes/oscar/js',
- paths: {
- app: '../app'
- }
-});
+/**
+ * searx is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * searx is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with searx. If not, see < http://www.gnu.org/licenses/ >.
+ *
+ * (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
+ */
+
+requirejs.config({
+ baseUrl: './static/themes/oscar/js',
+ paths: {
+ app: '../app'
+ }
+});
;/**
* searx is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
@@ -51,306 +51,306 @@ window.searx = (function(d) {
method: script.getAttribute('data-method')
};
})(document);
-;/**
- * searx is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * searx is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with searx. If not, see < http://www.gnu.org/licenses/ >.
- *
- * (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
- */
-
-if(searx.autocompleter) {
- searx.searchResults = new Bloodhound({
- datumTokenizer: Bloodhound.tokenizers.obj.whitespace('value'),
- queryTokenizer: Bloodhound.tokenizers.whitespace,
- remote: './autocompleter?q=%QUERY'
- });
- searx.searchResults.initialize();
-}
-
-$(document).ready(function(){
- if(searx.autocompleter) {
- $('#q').typeahead(null, {
- name: 'search-results',
- displayKey: function(result) {
- return result;
- },
- source: searx.searchResults.ttAdapter()
- });
- }
-});
-;/**
- * searx is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * searx is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with searx. If not, see < http://www.gnu.org/licenses/ >.
- *
- * (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
- */
-
-$(document).ready(function(){
- /**
- * focus element if class="autofocus" and id="q"
- */
- $('#q.autofocus').focus();
-
- /**
- * select full content on click if class="select-all-on-click"
- */
- $(".select-all-on-click").click(function () {
- $(this).select();
- });
-
- /**
- * change text during btn-collapse click if possible
- */
- $('.btn-collapse').click(function() {
- var btnTextCollapsed = $(this).data('btn-text-collapsed');
- var btnTextNotCollapsed = $(this).data('btn-text-not-collapsed');
-
- if(btnTextCollapsed !== '' && btnTextNotCollapsed !== '') {
- if($(this).hasClass('collapsed')) {
- new_html = $(this).html().replace(btnTextCollapsed, btnTextNotCollapsed);
- } else {
- new_html = $(this).html().replace(btnTextNotCollapsed, btnTextCollapsed);
- }
- $(this).html(new_html);
- }
- });
-
- /**
- * change text during btn-toggle click if possible
- */
- $('.btn-toggle .btn').click(function() {
- var btnClass = 'btn-' + $(this).data('btn-class');
- var btnLabelDefault = $(this).data('btn-label-default');
- var btnLabelToggled = $(this).data('btn-label-toggled');
- if(btnLabelToggled !== '') {
- if($(this).hasClass('btn-default')) {
- new_html = $(this).html().replace(btnLabelDefault, btnLabelToggled);
- } else {
- new_html = $(this).html().replace(btnLabelToggled, btnLabelDefault);
- }
- $(this).html(new_html);
- }
- $(this).toggleClass(btnClass);
- $(this).toggleClass('btn-default');
- });
-
- /**
- * change text during btn-toggle click if possible
- */
- $('.media-loader').click(function() {
- var target = $(this).data('target');
- var iframe_load = $(target + ' > iframe');
- var srctest = iframe_load.attr('src');
- if(srctest === undefined || srctest === false){
- iframe_load.attr('src', iframe_load.data('src'));
- }
- });
-
- /**
- * Select or deselect every categories on double clic
- */
- $(".btn-sm").dblclick(function() {
- var btnClass = 'btn-' + $(this).data('btn-class'); // primary
- if($(this).hasClass('btn-default')) {
- $(".btn-sm > input").attr('checked', 'checked');
- $(".btn-sm > input").prop("checked", true);
- $(".btn-sm").addClass(btnClass);
- $(".btn-sm").addClass('active');
- $(".btn-sm").removeClass('btn-default');
- } else {
- $(".btn-sm > input").attr('checked', '');
- $(".btn-sm > input").removeAttr('checked');
- $(".btn-sm > input").checked = false;
- $(".btn-sm").removeClass(btnClass);
- $(".btn-sm").removeClass('active');
- $(".btn-sm").addClass('btn-default');
- }
- });
-});
-;/**
- * searx is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * searx is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with searx. If not, see < http://www.gnu.org/licenses/ >.
- *
- * (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
- */
-
-$(document).ready(function(){
- $(".searx_overpass_request").on( "click", function( event ) {
- var overpass_url = "https://overpass-api.de/api/interpreter?data=";
- var query_start = overpass_url + "[out:json][timeout:25];(";
- var query_end = ");out meta;";
-
- var osm_id = $(this).data('osm-id');
- var osm_type = $(this).data('osm-type');
- var result_table = $(this).data('result-table');
- var result_table_loadicon = "#" + $(this).data('result-table-loadicon');
-
- // tags which can be ignored
- var osm_ignore_tags = [ "addr:city", "addr:country", "addr:housenumber", "addr:postcode", "addr:street" ];
-
- if(osm_id && osm_type && result_table) {
- result_table = "#" + result_table;
- var query = null;
- switch(osm_type) {
- case 'node':
- query = query_start + "node(" + osm_id + ");" + query_end;
- break;
- case 'way':
- query = query_start + "way(" + osm_id + ");" + query_end;
- break;
- case 'relation':
- query = query_start + "relation(" + osm_id + ");" + query_end;
- break;
- default:
- break;
- }
- if(query) {
- //alert(query);
- var ajaxRequest = $.ajax( query )
- .done(function( html) {
- if(html && html.elements && html.elements[0]) {
- var element = html.elements[0];
- var newHtml = $(result_table).html();
- for (var row in element.tags) {
- if(element.tags.name === null || osm_ignore_tags.indexOf(row) == -1) {
- newHtml += "<tr><td>" + row + "</td><td>";
- switch(row) {
- case "phone":
- case "fax":
- newHtml += "<a href=\"tel:" + element.tags[row].replace(/ /g,'') + "\">" + element.tags[row] + "</a>";
- break;
- case "email":
- newHtml += "<a href=\"mailto:" + element.tags[row] + "\">" + element.tags[row] + "</a>";
- break;
- case "website":
- case "url":
- newHtml += "<a href=\"" + element.tags[row] + "\">" + element.tags[row] + "</a>";
- break;
- case "wikidata":
- newHtml += "<a href=\"https://www.wikidata.org/wiki/" + element.tags[row] + "\">" + element.tags[row] + "</a>";
- break;
- case "wikipedia":
- if(element.tags[row].indexOf(":") != -1) {
- newHtml += "<a href=\"https://" + element.tags[row].substring(0,element.tags[row].indexOf(":")) + ".wikipedia.org/wiki/" + element.tags[row].substring(element.tags[row].indexOf(":")+1) + "\">" + element.tags[row] + "</a>";
- break;
- }
- /* jshint ignore:start */
- default:
- /* jshint ignore:end */
- newHtml += element.tags[row];
- break;
- }
- newHtml += "</td></tr>";
- }
- }
- $(result_table).html(newHtml);
- $(result_table).removeClass('hidden');
- $(result_table_loadicon).addClass('hidden');
- }
- })
- .fail(function() {
- $(result_table_loadicon).html($(result_table_loadicon).html() + "<p class=\"text-muted\">could not load data!</p>");
- });
- }
- }
-
- // this event occour only once per element
- $( this ).off( event );
- });
-
- $(".searx_init_map").on( "click", function( event ) {
- var leaflet_target = $(this).data('leaflet-target');
- var map_lon = $(this).data('map-lon');
- var map_lat = $(this).data('map-lat');
- var map_zoom = $(this).data('map-zoom');
- var map_boundingbox = $(this).data('map-boundingbox');
- var map_geojson = $(this).data('map-geojson');
-
- require(['leaflet-0.7.3.min'], function(leaflet) {
- if(map_boundingbox) {
- southWest = L.latLng(map_boundingbox[0], map_boundingbox[2]);
- northEast = L.latLng(map_boundingbox[1], map_boundingbox[3]);
- map_bounds = L.latLngBounds(southWest, northEast);
- }
-
- // TODO hack
- // change default imagePath
- L.Icon.Default.imagePath = "./static/themes/oscar/img/map";
-
- // init map
- var map = L.map(leaflet_target);
-
- // create the tile layer with correct attribution
- var osmMapnikUrl='https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png';
- var osmMapnikAttrib='Map data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors';
- var osmMapnik = new L.TileLayer(osmMapnikUrl, {minZoom: 1, maxZoom: 19, attribution: osmMapnikAttrib});
-
- var osmWikimediaUrl='https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png';
- var osmWikimediaAttrib = 'Wikimedia maps beta | Maps data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors';
- var osmWikimedia = new L.TileLayer(osmWikimediaUrl, {minZoom: 1, maxZoom: 19, attribution: osmWikimediaAttrib});
-
- // init map view
- if(map_bounds) {
- // TODO hack: https://github.com/Leaflet/Leaflet/issues/2021
- setTimeout(function () {
- map.fitBounds(map_bounds, {
- maxZoom:17
- });
- }, 0);
- } else if (map_lon && map_lat) {
- if(map_zoom)
- map.setView(new L.LatLng(map_lat, map_lon),map_zoom);
- else
- map.setView(new L.LatLng(map_lat, map_lon),8);
- }
-
- map.addLayer(osmMapnik);
-
- var baseLayers = {
- "OSM Mapnik": osmMapnik/*,
- "OSM Wikimedia": osmWikimedia*/
- };
-
- L.control.layers(baseLayers).addTo(map);
-
-
- if(map_geojson)
- L.geoJson(map_geojson).addTo(map);
- /*else if(map_bounds)
- L.rectangle(map_bounds, {color: "#ff7800", weight: 3, fill:false}).addTo(map);*/
- });
-
- // this event occour only once per element
- $( this ).off( event );
- });
-});
+;/**
+ * searx is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * searx is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with searx. If not, see < http://www.gnu.org/licenses/ >.
+ *
+ * (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
+ */
+
+if(searx.autocompleter) {
+ searx.searchResults = new Bloodhound({
+ datumTokenizer: Bloodhound.tokenizers.obj.whitespace('value'),
+ queryTokenizer: Bloodhound.tokenizers.whitespace,
+ remote: './autocompleter?q=%QUERY'
+ });
+ searx.searchResults.initialize();
+}
+
+$(document).ready(function(){
+ if(searx.autocompleter) {
+ $('#q').typeahead(null, {
+ name: 'search-results',
+ displayKey: function(result) {
+ return result;
+ },
+ source: searx.searchResults.ttAdapter()
+ });
+ }
+});
+;/**
+ * searx is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * searx is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with searx. If not, see < http://www.gnu.org/licenses/ >.
+ *
+ * (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
+ */
+
+$(document).ready(function(){
+ /**
+ * focus element if class="autofocus" and id="q"
+ */
+ $('#q.autofocus').focus();
+
+ /**
+ * select full content on click if class="select-all-on-click"
+ */
+ $(".select-all-on-click").click(function () {
+ $(this).select();
+ });
+
+ /**
+ * change text during btn-collapse click if possible
+ */
+ $('.btn-collapse').click(function() {
+ var btnTextCollapsed = $(this).data('btn-text-collapsed');
+ var btnTextNotCollapsed = $(this).data('btn-text-not-collapsed');
+
+ if(btnTextCollapsed !== '' && btnTextNotCollapsed !== '') {
+ if($(this).hasClass('collapsed')) {
+ new_html = $(this).html().replace(btnTextCollapsed, btnTextNotCollapsed);
+ } else {
+ new_html = $(this).html().replace(btnTextNotCollapsed, btnTextCollapsed);
+ }
+ $(this).html(new_html);
+ }
+ });
+
+ /**
+ * change text during btn-toggle click if possible
+ */
+ $('.btn-toggle .btn').click(function() {
+ var btnClass = 'btn-' + $(this).data('btn-class');
+ var btnLabelDefault = $(this).data('btn-label-default');
+ var btnLabelToggled = $(this).data('btn-label-toggled');
+ if(btnLabelToggled !== '') {
+ if($(this).hasClass('btn-default')) {
+ new_html = $(this).html().replace(btnLabelDefault, btnLabelToggled);
+ } else {
+ new_html = $(this).html().replace(btnLabelToggled, btnLabelDefault);
+ }
+ $(this).html(new_html);
+ }
+ $(this).toggleClass(btnClass);
+ $(this).toggleClass('btn-default');
+ });
+
+ /**
+ * change text during btn-toggle click if possible
+ */
+ $('.media-loader').click(function() {
+ var target = $(this).data('target');
+ var iframe_load = $(target + ' > iframe');
+ var srctest = iframe_load.attr('src');
+ if(srctest === undefined || srctest === false){
+ iframe_load.attr('src', iframe_load.data('src'));
+ }
+ });
+
+ /**
+ * Select or deselect every categories on double clic
+ */
+ $(".btn-sm").dblclick(function() {
+ var btnClass = 'btn-' + $(this).data('btn-class'); // primary
+ if($(this).hasClass('btn-default')) {
+ $(".btn-sm > input").attr('checked', 'checked');
+ $(".btn-sm > input").prop("checked", true);
+ $(".btn-sm").addClass(btnClass);
+ $(".btn-sm").addClass('active');
+ $(".btn-sm").removeClass('btn-default');
+ } else {
+ $(".btn-sm > input").attr('checked', '');
+ $(".btn-sm > input").removeAttr('checked');
+ $(".btn-sm > input").checked = false;
+ $(".btn-sm").removeClass(btnClass);
+ $(".btn-sm").removeClass('active');
+ $(".btn-sm").addClass('btn-default');
+ }
+ });
+});
+;/**
+ * searx is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * searx is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with searx. If not, see < http://www.gnu.org/licenses/ >.
+ *
+ * (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
+ */
+
+$(document).ready(function(){
+ $(".searx_overpass_request").on( "click", function( event ) {
+ var overpass_url = "https://overpass-api.de/api/interpreter?data=";
+ var query_start = overpass_url + "[out:json][timeout:25];(";
+ var query_end = ");out meta;";
+
+ var osm_id = $(this).data('osm-id');
+ var osm_type = $(this).data('osm-type');
+ var result_table = $(this).data('result-table');
+ var result_table_loadicon = "#" + $(this).data('result-table-loadicon');
+
+ // tags which can be ignored
+ var osm_ignore_tags = [ "addr:city", "addr:country", "addr:housenumber", "addr:postcode", "addr:street" ];
+
+ if(osm_id && osm_type && result_table) {
+ result_table = "#" + result_table;
+ var query = null;
+ switch(osm_type) {
+ case 'node':
+ query = query_start + "node(" + osm_id + ");" + query_end;
+ break;
+ case 'way':
+ query = query_start + "way(" + osm_id + ");" + query_end;
+ break;
+ case 'relation':
+ query = query_start + "relation(" + osm_id + ");" + query_end;
+ break;
+ default:
+ break;
+ }
+ if(query) {
+ //alert(query);
+ var ajaxRequest = $.ajax( query )
+ .done(function( html) {
+ if(html && html.elements && html.elements[0]) {
+ var element = html.elements[0];
+ var newHtml = $(result_table).html();
+ for (var row in element.tags) {
+ if(element.tags.name === null || osm_ignore_tags.indexOf(row) == -1) {
+ newHtml += "<tr><td>" + row + "</td><td>";
+ switch(row) {
+ case "phone":
+ case "fax":
+ newHtml += "<a href=\"tel:" + element.tags[row].replace(/ /g,'') + "\">" + element.tags[row] + "</a>";
+ break;
+ case "email":
+ newHtml += "<a href=\"mailto:" + element.tags[row] + "\">" + element.tags[row] + "</a>";
+ break;
+ case "website":
+ case "url":
+ newHtml += "<a href=\"" + element.tags[row] + "\">" + element.tags[row] + "</a>";
+ break;
+ case "wikidata":
+ newHtml += "<a href=\"https://www.wikidata.org/wiki/" + element.tags[row] + "\">" + element.tags[row] + "</a>";
+ break;
+ case "wikipedia":
+ if(element.tags[row].indexOf(":") != -1) {
+ newHtml += "<a href=\"https://" + element.tags[row].substring(0,element.tags[row].indexOf(":")) + ".wikipedia.org/wiki/" + element.tags[row].substring(element.tags[row].indexOf(":")+1) + "\">" + element.tags[row] + "</a>";
+ break;
+ }
+ /* jshint ignore:start */
+ default:
+ /* jshint ignore:end */
+ newHtml += element.tags[row];
+ break;
+ }
+ newHtml += "</td></tr>";
+ }
+ }
+ $(result_table).html(newHtml);
+ $(result_table).removeClass('hidden');
+ $(result_table_loadicon).addClass('hidden');
+ }
+ })
+ .fail(function() {
+ $(result_table_loadicon).html($(result_table_loadicon).html() + "<p class=\"text-muted\">could not load data!</p>");
+ });
+ }
+ }
+
+ // this event occour only once per element
+ $( this ).off( event );
+ });
+
+ $(".searx_init_map").on( "click", function( event ) {
+ var leaflet_target = $(this).data('leaflet-target');
+ var map_lon = $(this).data('map-lon');
+ var map_lat = $(this).data('map-lat');
+ var map_zoom = $(this).data('map-zoom');
+ var map_boundingbox = $(this).data('map-boundingbox');
+ var map_geojson = $(this).data('map-geojson');
+
+ require(['leaflet-0.7.3.min'], function(leaflet) {
+ if(map_boundingbox) {
+ southWest = L.latLng(map_boundingbox[0], map_boundingbox[2]);
+ northEast = L.latLng(map_boundingbox[1], map_boundingbox[3]);
+ map_bounds = L.latLngBounds(southWest, northEast);
+ }
+
+ // TODO hack
+ // change default imagePath
+ L.Icon.Default.imagePath = "./static/themes/oscar/img/map";
+
+ // init map
+ var map = L.map(leaflet_target);
+
+ // create the tile layer with correct attribution
+ var osmMapnikUrl='https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png';
+ var osmMapnikAttrib='Map data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors';
+ var osmMapnik = new L.TileLayer(osmMapnikUrl, {minZoom: 1, maxZoom: 19, attribution: osmMapnikAttrib});
+
+ var osmWikimediaUrl='https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png';
+ var osmWikimediaAttrib = 'Wikimedia maps beta | Maps data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors';
+ var osmWikimedia = new L.TileLayer(osmWikimediaUrl, {minZoom: 1, maxZoom: 19, attribution: osmWikimediaAttrib});
+
+ // init map view
+ if(map_bounds) {
+ // TODO hack: https://github.com/Leaflet/Leaflet/issues/2021
+ setTimeout(function () {
+ map.fitBounds(map_bounds, {
+ maxZoom:17
+ });
+ }, 0);
+ } else if (map_lon && map_lat) {
+ if(map_zoom)
+ map.setView(new L.LatLng(map_lat, map_lon),map_zoom);
+ else
+ map.setView(new L.LatLng(map_lat, map_lon),8);
+ }
+
+ map.addLayer(osmMapnik);
+
+ var baseLayers = {
+ "OSM Mapnik": osmMapnik/*,
+ "OSM Wikimedia": osmWikimedia*/
+ };
+
+ L.control.layers(baseLayers).addTo(map);
+
+
+ if(map_geojson)
+ L.geoJson(map_geojson).addTo(map);
+ /*else if(map_bounds)
+ L.rectangle(map_bounds, {color: "#ff7800", weight: 3, fill:false}).addTo(map);*/
+ });
+
+ // this event occour only once per element
+ $( this ).off( event );
+ });
+});
diff --git a/searx/static/themes/oscar/js/searx_src/00_requirejs_config.js b/searx/static/themes/oscar/js/searx_src/00_requirejs_config.js
index 1aa434902..e7c2abdac 100644
--- a/searx/static/themes/oscar/js/searx_src/00_requirejs_config.js
+++ b/searx/static/themes/oscar/js/searx_src/00_requirejs_config.js
@@ -1,23 +1,23 @@
-/**
- * searx is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * searx is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with searx. If not, see < http://www.gnu.org/licenses/ >.
- *
- * (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
- */
-
-requirejs.config({
- baseUrl: './static/themes/oscar/js',
- paths: {
- app: '../app'
- }
-});
+/**
+ * searx is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * searx is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with searx. If not, see < http://www.gnu.org/licenses/ >.
+ *
+ * (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
+ */
+
+requirejs.config({
+ baseUrl: './static/themes/oscar/js',
+ paths: {
+ app: '../app'
+ }
+});
diff --git a/searx/static/themes/oscar/js/searx_src/autocompleter.js b/searx/static/themes/oscar/js/searx_src/autocompleter.js
index 70c66d2fc..0907f8e34 100644
--- a/searx/static/themes/oscar/js/searx_src/autocompleter.js
+++ b/searx/static/themes/oscar/js/searx_src/autocompleter.js
@@ -1,37 +1,37 @@
-/**
- * searx is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * searx is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with searx. If not, see < http://www.gnu.org/licenses/ >.
- *
- * (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
- */
-
-if(searx.autocompleter) {
- searx.searchResults = new Bloodhound({
- datumTokenizer: Bloodhound.tokenizers.obj.whitespace('value'),
- queryTokenizer: Bloodhound.tokenizers.whitespace,
- remote: './autocompleter?q=%QUERY'
- });
- searx.searchResults.initialize();
-}
-
-$(document).ready(function(){
- if(searx.autocompleter) {
- $('#q').typeahead(null, {
- name: 'search-results',
- displayKey: function(result) {
- return result;
- },
- source: searx.searchResults.ttAdapter()
- });
- }
-});
+/**
+ * searx is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * searx is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with searx. If not, see < http://www.gnu.org/licenses/ >.
+ *
+ * (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
+ */
+
+if(searx.autocompleter) {
+ searx.searchResults = new Bloodhound({
+ datumTokenizer: Bloodhound.tokenizers.obj.whitespace('value'),
+ queryTokenizer: Bloodhound.tokenizers.whitespace,
+ remote: './autocompleter?q=%QUERY'
+ });
+ searx.searchResults.initialize();
+}
+
+$(document).ready(function(){
+ if(searx.autocompleter) {
+ $('#q').typeahead(null, {
+ name: 'search-results',
+ displayKey: function(result) {
+ return result;
+ },
+ source: searx.searchResults.ttAdapter()
+ });
+ }
+});
diff --git a/searx/static/themes/oscar/js/searx_src/element_modifiers.js b/searx/static/themes/oscar/js/searx_src/element_modifiers.js
index 8e4280548..4264d4c0d 100644
--- a/searx/static/themes/oscar/js/searx_src/element_modifiers.js
+++ b/searx/static/themes/oscar/js/searx_src/element_modifiers.js
@@ -1,99 +1,99 @@
-/**
- * searx is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * searx is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with searx. If not, see < http://www.gnu.org/licenses/ >.
- *
- * (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
- */
-
-$(document).ready(function(){
- /**
- * focus element if class="autofocus" and id="q"
- */
- $('#q.autofocus').focus();
-
- /**
- * select full content on click if class="select-all-on-click"
- */
- $(".select-all-on-click").click(function () {
- $(this).select();
- });
-
- /**
- * change text during btn-collapse click if possible
- */
- $('.btn-collapse').click(function() {
- var btnTextCollapsed = $(this).data('btn-text-collapsed');
- var btnTextNotCollapsed = $(this).data('btn-text-not-collapsed');
-
- if(btnTextCollapsed !== '' && btnTextNotCollapsed !== '') {
- if($(this).hasClass('collapsed')) {
- new_html = $(this).html().replace(btnTextCollapsed, btnTextNotCollapsed);
- } else {
- new_html = $(this).html().replace(btnTextNotCollapsed, btnTextCollapsed);
- }
- $(this).html(new_html);
- }
- });
-
- /**
- * change text during btn-toggle click if possible
- */
- $('.btn-toggle .btn').click(function() {
- var btnClass = 'btn-' + $(this).data('btn-class');
- var btnLabelDefault = $(this).data('btn-label-default');
- var btnLabelToggled = $(this).data('btn-label-toggled');
- if(btnLabelToggled !== '') {
- if($(this).hasClass('btn-default')) {
- new_html = $(this).html().replace(btnLabelDefault, btnLabelToggled);
- } else {
- new_html = $(this).html().replace(btnLabelToggled, btnLabelDefault);
- }
- $(this).html(new_html);
- }
- $(this).toggleClass(btnClass);
- $(this).toggleClass('btn-default');
- });
-
- /**
- * change text during btn-toggle click if possible
- */
- $('.media-loader').click(function() {
- var target = $(this).data('target');
- var iframe_load = $(target + ' > iframe');
- var srctest = iframe_load.attr('src');
- if(srctest === undefined || srctest === false){
- iframe_load.attr('src', iframe_load.data('src'));
- }
- });
-
- /**
- * Select or deselect every categories on double clic
- */
- $(".btn-sm").dblclick(function() {
- var btnClass = 'btn-' + $(this).data('btn-class'); // primary
- if($(this).hasClass('btn-default')) {
- $(".btn-sm > input").attr('checked', 'checked');
- $(".btn-sm > input").prop("checked", true);
- $(".btn-sm").addClass(btnClass);
- $(".btn-sm").addClass('active');
- $(".btn-sm").removeClass('btn-default');
- } else {
- $(".btn-sm > input").attr('checked', '');
- $(".btn-sm > input").removeAttr('checked');
- $(".btn-sm > input").checked = false;
- $(".btn-sm").removeClass(btnClass);
- $(".btn-sm").removeClass('active');
- $(".btn-sm").addClass('btn-default');
- }
- });
-});
+/**
+ * searx is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * searx is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with searx. If not, see < http://www.gnu.org/licenses/ >.
+ *
+ * (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
+ */
+
+$(document).ready(function(){
+ /**
+ * focus element if class="autofocus" and id="q"
+ */
+ $('#q.autofocus').focus();
+
+ /**
+ * select full content on click if class="select-all-on-click"
+ */
+ $(".select-all-on-click").click(function () {
+ $(this).select();
+ });
+
+ /**
+ * change text during btn-collapse click if possible
+ */
+ $('.btn-collapse').click(function() {
+ var btnTextCollapsed = $(this).data('btn-text-collapsed');
+ var btnTextNotCollapsed = $(this).data('btn-text-not-collapsed');
+
+ if(btnTextCollapsed !== '' && btnTextNotCollapsed !== '') {
+ if($(this).hasClass('collapsed')) {
+ new_html = $(this).html().replace(btnTextCollapsed, btnTextNotCollapsed);
+ } else {
+ new_html = $(this).html().replace(btnTextNotCollapsed, btnTextCollapsed);
+ }
+ $(this).html(new_html);
+ }
+ });
+
+ /**
+ * change text during btn-toggle click if possible
+ */
+ $('.btn-toggle .btn').click(function() {
+ var btnClass = 'btn-' + $(this).data('btn-class');
+ var btnLabelDefault = $(this).data('btn-label-default');
+ var btnLabelToggled = $(this).data('btn-label-toggled');
+ if(btnLabelToggled !== '') {
+ if($(this).hasClass('btn-default')) {
+ new_html = $(this).html().replace(btnLabelDefault, btnLabelToggled);
+ } else {
+ new_html = $(this).html().replace(btnLabelToggled, btnLabelDefault);
+ }
+ $(this).html(new_html);
+ }
+ $(this).toggleClass(btnClass);
+ $(this).toggleClass('btn-default');
+ });
+
+ /**
+ * change text during btn-toggle click if possible
+ */
+ $('.media-loader').click(function() {
+ var target = $(this).data('target');
+ var iframe_load = $(target + ' > iframe');
+ var srctest = iframe_load.attr('src');
+ if(srctest === undefined || srctest === false){
+ iframe_load.attr('src', iframe_load.data('src'));
+ }
+ });
+
+ /**
+ * Select or deselect every categories on double clic
+ */
+ $(".btn-sm").dblclick(function() {
+ var btnClass = 'btn-' + $(this).data('btn-class'); // primary
+ if($(this).hasClass('btn-default')) {
+ $(".btn-sm > input").attr('checked', 'checked');
+ $(".btn-sm > input").prop("checked", true);
+ $(".btn-sm").addClass(btnClass);
+ $(".btn-sm").addClass('active');
+ $(".btn-sm").removeClass('btn-default');
+ } else {
+ $(".btn-sm > input").attr('checked', '');
+ $(".btn-sm > input").removeAttr('checked');
+ $(".btn-sm > input").checked = false;
+ $(".btn-sm").removeClass(btnClass);
+ $(".btn-sm").removeClass('active');
+ $(".btn-sm").addClass('btn-default');
+ }
+ });
+});
diff --git a/searx/static/themes/oscar/js/searx_src/leaflet_map.js b/searx/static/themes/oscar/js/searx_src/leaflet_map.js
index 4be46acb5..3c8c616b1 100644
--- a/searx/static/themes/oscar/js/searx_src/leaflet_map.js
+++ b/searx/static/themes/oscar/js/searx_src/leaflet_map.js
@@ -1,167 +1,167 @@
-/**
- * searx is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * searx is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with searx. If not, see < http://www.gnu.org/licenses/ >.
- *
- * (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
- */
-
-$(document).ready(function(){
- $(".searx_overpass_request").on( "click", function( event ) {
- var overpass_url = "https://overpass-api.de/api/interpreter?data=";
- var query_start = overpass_url + "[out:json][timeout:25];(";
- var query_end = ");out meta;";
-
- var osm_id = $(this).data('osm-id');
- var osm_type = $(this).data('osm-type');
- var result_table = $(this).data('result-table');
- var result_table_loadicon = "#" + $(this).data('result-table-loadicon');
-
- // tags which can be ignored
- var osm_ignore_tags = [ "addr:city", "addr:country", "addr:housenumber", "addr:postcode", "addr:street" ];
-
- if(osm_id && osm_type && result_table) {
- result_table = "#" + result_table;
- var query = null;
- switch(osm_type) {
- case 'node':
- query = query_start + "node(" + osm_id + ");" + query_end;
- break;
- case 'way':
- query = query_start + "way(" + osm_id + ");" + query_end;
- break;
- case 'relation':
- query = query_start + "relation(" + osm_id + ");" + query_end;
- break;
- default:
- break;
- }
- if(query) {
- //alert(query);
- var ajaxRequest = $.ajax( query )
- .done(function( html) {
- if(html && html.elements && html.elements[0]) {
- var element = html.elements[0];
- var newHtml = $(result_table).html();
- for (var row in element.tags) {
- if(element.tags.name === null || osm_ignore_tags.indexOf(row) == -1) {
- newHtml += "<tr><td>" + row + "</td><td>";
- switch(row) {
- case "phone":
- case "fax":
- newHtml += "<a href=\"tel:" + element.tags[row].replace(/ /g,'') + "\">" + element.tags[row] + "</a>";
- break;
- case "email":
- newHtml += "<a href=\"mailto:" + element.tags[row] + "\">" + element.tags[row] + "</a>";
- break;
- case "website":
- case "url":
- newHtml += "<a href=\"" + element.tags[row] + "\">" + element.tags[row] + "</a>";
- break;
- case "wikidata":
- newHtml += "<a href=\"https://www.wikidata.org/wiki/" + element.tags[row] + "\">" + element.tags[row] + "</a>";
- break;
- case "wikipedia":
- if(element.tags[row].indexOf(":") != -1) {
- newHtml += "<a href=\"https://" + element.tags[row].substring(0,element.tags[row].indexOf(":")) + ".wikipedia.org/wiki/" + element.tags[row].substring(element.tags[row].indexOf(":")+1) + "\">" + element.tags[row] + "</a>";
- break;
- }
- /* jshint ignore:start */
- default:
- /* jshint ignore:end */
- newHtml += element.tags[row];
- break;
- }
- newHtml += "</td></tr>";
- }
- }
- $(result_table).html(newHtml);
- $(result_table).removeClass('hidden');
- $(result_table_loadicon).addClass('hidden');
- }
- })
- .fail(function() {
- $(result_table_loadicon).html($(result_table_loadicon).html() + "<p class=\"text-muted\">could not load data!</p>");
- });
- }
- }
-
- // this event occour only once per element
- $( this ).off( event );
- });
-
- $(".searx_init_map").on( "click", function( event ) {
- var leaflet_target = $(this).data('leaflet-target');
- var map_lon = $(this).data('map-lon');
- var map_lat = $(this).data('map-lat');
- var map_zoom = $(this).data('map-zoom');
- var map_boundingbox = $(this).data('map-boundingbox');
- var map_geojson = $(this).data('map-geojson');
-
- require(['leaflet-0.7.3.min'], function(leaflet) {
- if(map_boundingbox) {
- southWest = L.latLng(map_boundingbox[0], map_boundingbox[2]);
- northEast = L.latLng(map_boundingbox[1], map_boundingbox[3]);
- map_bounds = L.latLngBounds(southWest, northEast);
- }
-
- // TODO hack
- // change default imagePath
- L.Icon.Default.imagePath = "./static/themes/oscar/img/map";
-
- // init map
- var map = L.map(leaflet_target);
-
- // create the tile layer with correct attribution
- var osmMapnikUrl='https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png';
- var osmMapnikAttrib='Map data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors';
- var osmMapnik = new L.TileLayer(osmMapnikUrl, {minZoom: 1, maxZoom: 19, attribution: osmMapnikAttrib});
-
- var osmWikimediaUrl='https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png';
- var osmWikimediaAttrib = 'Wikimedia maps beta | Maps data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors';
- var osmWikimedia = new L.TileLayer(osmWikimediaUrl, {minZoom: 1, maxZoom: 19, attribution: osmWikimediaAttrib});
-
- // init map view
- if(map_bounds) {
- // TODO hack: https://github.com/Leaflet/Leaflet/issues/2021
- setTimeout(function () {
- map.fitBounds(map_bounds, {
- maxZoom:17
- });
- }, 0);
- } else if (map_lon && map_lat) {
- if(map_zoom)
- map.setView(new L.LatLng(map_lat, map_lon),map_zoom);
- else
- map.setView(new L.LatLng(map_lat, map_lon),8);
- }
-
- map.addLayer(osmMapnik);
-
- var baseLayers = {
- "OSM Mapnik": osmMapnik/*,
- "OSM Wikimedia": osmWikimedia*/
- };
-
- L.control.layers(baseLayers).addTo(map);
-
-
- if(map_geojson)
- L.geoJson(map_geojson).addTo(map);
- /*else if(map_bounds)
- L.rectangle(map_bounds, {color: "#ff7800", weight: 3, fill:false}).addTo(map);*/
- });
-
- // this event occour only once per element
- $( this ).off( event );
- });
-});
+/**
+ * searx is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * searx is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with searx. If not, see < http://www.gnu.org/licenses/ >.
+ *
+ * (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
+ */
+
+$(document).ready(function(){
+ $(".searx_overpass_request").on( "click", function( event ) {
+ var overpass_url = "https://overpass-api.de/api/interpreter?data=";
+ var query_start = overpass_url + "[out:json][timeout:25];(";
+ var query_end = ");out meta;";
+
+ var osm_id = $(this).data('osm-id');
+ var osm_type = $(this).data('osm-type');
+ var result_table = $(this).data('result-table');
+ var result_table_loadicon = "#" + $(this).data('result-table-loadicon');
+
+ // tags which can be ignored
+ var osm_ignore_tags = [ "addr:city", "addr:country", "addr:housenumber", "addr:postcode", "addr:street" ];
+
+ if(osm_id && osm_type && result_table) {
+ result_table = "#" + result_table;
+ var query = null;
+ switch(osm_type) {
+ case 'node':
+ query = query_start + "node(" + osm_id + ");" + query_end;
+ break;
+ case 'way':
+ query = query_start + "way(" + osm_id + ");" + query_end;
+ break;
+ case 'relation':
+ query = query_start + "relation(" + osm_id + ");" + query_end;
+ break;
+ default:
+ break;
+ }
+ if(query) {
+ //alert(query);
+ var ajaxRequest = $.ajax( query )
+ .done(function( html) {
+ if(html && html.elements && html.elements[0]) {
+ var element = html.elements[0];
+ var newHtml = $(result_table).html();
+ for (var row in element.tags) {
+ if(element.tags.name === null || osm_ignore_tags.indexOf(row) == -1) {
+ newHtml += "<tr><td>" + row + "</td><td>";
+ switch(row) {
+ case "phone":
+ case "fax":
+ newHtml += "<a href=\"tel:" + element.tags[row].replace(/ /g,'') + "\">" + element.tags[row] + "</a>";
+ break;
+ case "email":
+ newHtml += "<a href=\"mailto:" + element.tags[row] + "\">" + element.tags[row] + "</a>";
+ break;
+ case "website":
+ case "url":
+ newHtml += "<a href=\"" + element.tags[row] + "\">" + element.tags[row] + "</a>";
+ break;
+ case "wikidata":
+ newHtml += "<a href=\"https://www.wikidata.org/wiki/" + element.tags[row] + "\">" + element.tags[row] + "</a>";
+ break;
+ case "wikipedia":
+ if(element.tags[row].indexOf(":") != -1) {
+ newHtml += "<a href=\"https://" + element.tags[row].substring(0,element.tags[row].indexOf(":")) + ".wikipedia.org/wiki/" + element.tags[row].substring(element.tags[row].indexOf(":")+1) + "\">" + element.tags[row] + "</a>";
+ break;
+ }
+ /* jshint ignore:start */
+ default:
+ /* jshint ignore:end */
+ newHtml += element.tags[row];
+ break;
+ }
+ newHtml += "</td></tr>";
+ }
+ }
+ $(result_table).html(newHtml);
+ $(result_table).removeClass('hidden');
+ $(result_table_loadicon).addClass('hidden');
+ }
+ })
+ .fail(function() {
+ $(result_table_loadicon).html($(result_table_loadicon).html() + "<p class=\"text-muted\">could not load data!</p>");
+ });
+ }
+ }
+
+ // this event occour only once per element
+ $( this ).off( event );
+ });
+
+ $(".searx_init_map").on( "click", function( event ) {
+ var leaflet_target = $(this).data('leaflet-target');
+ var map_lon = $(this).data('map-lon');
+ var map_lat = $(this).data('map-lat');
+ var map_zoom = $(this).data('map-zoom');
+ var map_boundingbox = $(this).data('map-boundingbox');
+ var map_geojson = $(this).data('map-geojson');
+
+ require(['leaflet-0.7.3.min'], function(leaflet) {
+ if(map_boundingbox) {
+ southWest = L.latLng(map_boundingbox[0], map_boundingbox[2]);
+ northEast = L.latLng(map_boundingbox[1], map_boundingbox[3]);
+ map_bounds = L.latLngBounds(southWest, northEast);
+ }
+
+ // TODO hack
+ // change default imagePath
+ L.Icon.Default.imagePath = "./static/themes/oscar/img/map";
+
+ // init map
+ var map = L.map(leaflet_target);
+
+ // create the tile layer with correct attribution
+ var osmMapnikUrl='https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png';
+ var osmMapnikAttrib='Map data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors';
+ var osmMapnik = new L.TileLayer(osmMapnikUrl, {minZoom: 1, maxZoom: 19, attribution: osmMapnikAttrib});
+
+ var osmWikimediaUrl='https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png';
+ var osmWikimediaAttrib = 'Wikimedia maps beta | Maps data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors';
+ var osmWikimedia = new L.TileLayer(osmWikimediaUrl, {minZoom: 1, maxZoom: 19, attribution: osmWikimediaAttrib});
+
+ // init map view
+ if(map_bounds) {
+ // TODO hack: https://github.com/Leaflet/Leaflet/issues/2021
+ setTimeout(function () {
+ map.fitBounds(map_bounds, {
+ maxZoom:17
+ });
+ }, 0);
+ } else if (map_lon && map_lat) {
+ if(map_zoom)
+ map.setView(new L.LatLng(map_lat, map_lon),map_zoom);
+ else
+ map.setView(new L.LatLng(map_lat, map_lon),8);
+ }
+
+ map.addLayer(osmMapnik);
+
+ var baseLayers = {
+ "OSM Mapnik": osmMapnik/*,
+ "OSM Wikimedia": osmWikimedia*/
+ };
+
+ L.control.layers(baseLayers).addTo(map);
+
+
+ if(map_geojson)
+ L.geoJson(map_geojson).addTo(map);
+ /*else if(map_bounds)
+ L.rectangle(map_bounds, {color: "#ff7800", weight: 3, fill:false}).addTo(map);*/
+ });
+
+ // this event occour only once per element
+ $( this ).off( event );
+ });
+});
diff --git a/searx/static/themes/oscar/less/logicodev-dark/oscar.less b/searx/static/themes/oscar/less/logicodev-dark/oscar.less
index 9a4272331..e788b8cba 100644
--- a/searx/static/themes/oscar/less/logicodev-dark/oscar.less
+++ b/searx/static/themes/oscar/less/logicodev-dark/oscar.less
@@ -109,7 +109,7 @@ ul.nav li a {
.btn:hover {
color:#444 !important;
- background-color: #BBB !important;
+ background-color: #BBB !important;
}
.btn-primary.active {
@@ -221,7 +221,7 @@ p.btn.btn-default{
}
.table-hover > tbody > tr:hover > td, .table-hover > tbody > tr:hover > th {
- background: rgb(102, 105, 110) !important;
+ background: rgb(102, 105, 110) !important;
}
.btn-success {
diff --git a/searx/static/themes/oscar/less/logicodev/code.less b/searx/static/themes/oscar/less/logicodev/code.less
index 96486f5aa..491b30e5a 100644
--- a/searx/static/themes/oscar/less/logicodev/code.less
+++ b/searx/static/themes/oscar/less/logicodev/code.less
@@ -78,7 +78,7 @@ pre, code{
user-select: none;
cursor: default;
color: #556366;
-
+
&::selection {
background: transparent; /* WebKit/Blink Browsers */
}
@@ -99,5 +99,3 @@ pre, code{
.highlight {
font-weight: 700;
}
-
-
diff --git a/searx/static/themes/oscar/less/logicodev/infobox.less b/searx/static/themes/oscar/less/logicodev/infobox.less
index 0d488d744..954f4507a 100644
--- a/searx/static/themes/oscar/less/logicodev/infobox.less
+++ b/searx/static/themes/oscar/less/logicodev/infobox.less
@@ -30,7 +30,7 @@
table-layout: fixed;
}
-
+
.infobox_part:last-child {
margin-bottom: 0;
}
diff --git a/searx/static/themes/oscar/less/logicodev/navbar.less b/searx/static/themes/oscar/less/logicodev/navbar.less
index 5da7115d9..6e4f9ee10 100644
--- a/searx/static/themes/oscar/less/logicodev/navbar.less
+++ b/searx/static/themes/oscar/less/logicodev/navbar.less
@@ -28,4 +28,3 @@
width: 80%;
}
}
-
diff --git a/searx/static/themes/oscar/less/pointhi/code.less b/searx/static/themes/oscar/less/pointhi/code.less
index 90a2cd60c..70a2a5d49 100644
--- a/searx/static/themes/oscar/less/pointhi/code.less
+++ b/searx/static/themes/oscar/less/pointhi/code.less
@@ -69,7 +69,7 @@
-ms-user-select: none;
user-select: none;
cursor: default;
-
+
&::selection {
background: transparent; /* WebKit/Blink Browsers */
}
diff --git a/searx/static/themes/oscar/less/pointhi/infobox.less b/searx/static/themes/oscar/less/pointhi/infobox.less
index 41375f277..df51b002e 100644
--- a/searx/static/themes/oscar/less/pointhi/infobox.less
+++ b/searx/static/themes/oscar/less/pointhi/infobox.less
@@ -4,7 +4,7 @@
word-wrap: break-word;
table-layout: fixed;
}
-
+
.infobox_part:last-child {
margin-bottom: 0;
}
diff --git a/searx/static/themes/simple/leaflet/leaflet.css b/searx/static/themes/simple/leaflet/leaflet.css
index 230e5bad1..d1b47a125 100644
--- a/searx/static/themes/simple/leaflet/leaflet.css
+++ b/searx/static/themes/simple/leaflet/leaflet.css
@@ -1,636 +1,636 @@
-/* required styles */
-
-.leaflet-pane,
-.leaflet-tile,
-.leaflet-marker-icon,
-.leaflet-marker-shadow,
-.leaflet-tile-container,
-.leaflet-pane > svg,
-.leaflet-pane > canvas,
-.leaflet-zoom-box,
-.leaflet-image-layer,
-.leaflet-layer {
- position: absolute;
- left: 0;
- top: 0;
- }
-.leaflet-container {
- overflow: hidden;
- }
-.leaflet-tile,
-.leaflet-marker-icon,
-.leaflet-marker-shadow {
- -webkit-user-select: none;
- -moz-user-select: none;
- user-select: none;
- -webkit-user-drag: none;
- }
-/* Safari renders non-retina tile on retina better with this, but Chrome is worse */
-.leaflet-safari .leaflet-tile {
- image-rendering: -webkit-optimize-contrast;
- }
-/* hack that prevents hw layers "stretching" when loading new tiles */
-.leaflet-safari .leaflet-tile-container {
- width: 1600px;
- height: 1600px;
- -webkit-transform-origin: 0 0;
- }
-.leaflet-marker-icon,
-.leaflet-marker-shadow {
- display: block;
- }
-/* .leaflet-container svg: reset svg max-width decleration shipped in Joomla! (joomla.org) 3.x */
-/* .leaflet-container img: map is broken in FF if you have max-width: 100% on tiles */
-.leaflet-container .leaflet-overlay-pane svg,
-.leaflet-container .leaflet-marker-pane img,
-.leaflet-container .leaflet-shadow-pane img,
-.leaflet-container .leaflet-tile-pane img,
-.leaflet-container img.leaflet-image-layer {
- max-width: none !important;
- max-height: none !important;
- }
-
-.leaflet-container.leaflet-touch-zoom {
- -ms-touch-action: pan-x pan-y;
- touch-action: pan-x pan-y;
- }
-.leaflet-container.leaflet-touch-drag {
- -ms-touch-action: pinch-zoom;
- /* Fallback for FF which doesn't support pinch-zoom */
- touch-action: none;
- touch-action: pinch-zoom;
-}
-.leaflet-container.leaflet-touch-drag.leaflet-touch-zoom {
- -ms-touch-action: none;
- touch-action: none;
-}
-.leaflet-container {
- -webkit-tap-highlight-color: transparent;
-}
-.leaflet-container a {
- -webkit-tap-highlight-color: rgba(51, 181, 229, 0.4);
-}
-.leaflet-tile {
- filter: inherit;
- visibility: hidden;
- }
-.leaflet-tile-loaded {
- visibility: inherit;
- }
-.leaflet-zoom-box {
- width: 0;
- height: 0;
- -moz-box-sizing: border-box;
- box-sizing: border-box;
- z-index: 800;
- }
-/* workaround for https://bugzilla.mozilla.org/show_bug.cgi?id=888319 */
-.leaflet-overlay-pane svg {
- -moz-user-select: none;
- }
-
-.leaflet-pane { z-index: 400; }
-
-.leaflet-tile-pane { z-index: 200; }
-.leaflet-overlay-pane { z-index: 400; }
-.leaflet-shadow-pane { z-index: 500; }
-.leaflet-marker-pane { z-index: 600; }
-.leaflet-tooltip-pane { z-index: 650; }
-.leaflet-popup-pane { z-index: 700; }
-
-.leaflet-map-pane canvas { z-index: 100; }
-.leaflet-map-pane svg { z-index: 200; }
-
-.leaflet-vml-shape {
- width: 1px;
- height: 1px;
- }
-.lvml {
- behavior: url(#default#VML);
- display: inline-block;
- position: absolute;
- }
-
-
-/* control positioning */
-
-.leaflet-control {
- position: relative;
- z-index: 800;
- pointer-events: visiblePainted; /* IE 9-10 doesn't have auto */
- pointer-events: auto;
- }
-.leaflet-top,
-.leaflet-bottom {
- position: absolute;
- z-index: 1000;
- pointer-events: none;
- }
-.leaflet-top {
- top: 0;
- }
-.leaflet-right {
- right: 0;
- }
-.leaflet-bottom {
- bottom: 0;
- }
-.leaflet-left {
- left: 0;
- }
-.leaflet-control {
- float: left;
- clear: both;
- }
-.leaflet-right .leaflet-control {
- float: right;
- }
-.leaflet-top .leaflet-control {
- margin-top: 10px;
- }
-.leaflet-bottom .leaflet-control {
- margin-bottom: 10px;
- }
-.leaflet-left .leaflet-control {
- margin-left: 10px;
- }
-.leaflet-right .leaflet-control {
- margin-right: 10px;
- }
-
-
-/* zoom and fade animations */
-
-.leaflet-fade-anim .leaflet-tile {
- will-change: opacity;
- }
-.leaflet-fade-anim .leaflet-popup {
- opacity: 0;
- -webkit-transition: opacity 0.2s linear;
- -moz-transition: opacity 0.2s linear;
- -o-transition: opacity 0.2s linear;
- transition: opacity 0.2s linear;
- }
-.leaflet-fade-anim .leaflet-map-pane .leaflet-popup {
- opacity: 1;
- }
-.leaflet-zoom-animated {
- -webkit-transform-origin: 0 0;
- -ms-transform-origin: 0 0;
- transform-origin: 0 0;
- }
-.leaflet-zoom-anim .leaflet-zoom-animated {
- will-change: transform;
- }
-.leaflet-zoom-anim .leaflet-zoom-animated {
- -webkit-transition: -webkit-transform 0.25s cubic-bezier(0,0,0.25,1);
- -moz-transition: -moz-transform 0.25s cubic-bezier(0,0,0.25,1);
- -o-transition: -o-transform 0.25s cubic-bezier(0,0,0.25,1);
- transition: transform 0.25s cubic-bezier(0,0,0.25,1);
- }
-.leaflet-zoom-anim .leaflet-tile,
-.leaflet-pan-anim .leaflet-tile {
- -webkit-transition: none;
- -moz-transition: none;
- -o-transition: none;
- transition: none;
- }
-
-.leaflet-zoom-anim .leaflet-zoom-hide {
- visibility: hidden;
- }
-
-
-/* cursors */
-
-.leaflet-interactive {
- cursor: pointer;
- }
-.leaflet-grab {
- cursor: -webkit-grab;
- cursor: -moz-grab;
- }
-.leaflet-crosshair,
-.leaflet-crosshair .leaflet-interactive {
- cursor: crosshair;
- }
-.leaflet-popup-pane,
-.leaflet-control {
- cursor: auto;
- }
-.leaflet-dragging .leaflet-grab,
-.leaflet-dragging .leaflet-grab .leaflet-interactive,
-.leaflet-dragging .leaflet-marker-draggable {
- cursor: move;
- cursor: -webkit-grabbing;
- cursor: -moz-grabbing;
- }
-
-/* marker & overlays interactivity */
-.leaflet-marker-icon,
-.leaflet-marker-shadow,
-.leaflet-image-layer,
-.leaflet-pane > svg path,
-.leaflet-tile-container {
- pointer-events: none;
- }
-
-.leaflet-marker-icon.leaflet-interactive,
-.leaflet-image-layer.leaflet-interactive,
-.leaflet-pane > svg path.leaflet-interactive {
- pointer-events: visiblePainted; /* IE 9-10 doesn't have auto */
- pointer-events: auto;
- }
-
-/* visual tweaks */
-
-.leaflet-container {
- background: #ddd;
- outline: 0;
- }
-.leaflet-container a {
- color: #0078A8;
- }
-.leaflet-container a.leaflet-active {
- outline: 2px solid orange;
- }
-.leaflet-zoom-box {
- border: 2px dotted #38f;
- background: rgba(255,255,255,0.5);
- }
-
-
-/* general typography */
-.leaflet-container {
- font: 12px/1.5 "Helvetica Neue", Arial, Helvetica, sans-serif;
- }
-
-
-/* general toolbar styles */
-
-.leaflet-bar {
- box-shadow: 0 1px 5px rgba(0,0,0,0.65);
- border-radius: 4px;
- }
-.leaflet-bar a,
-.leaflet-bar a:hover {
- background-color: #fff;
- border-bottom: 1px solid #ccc;
- width: 26px;
- height: 26px;
- line-height: 26px;
- display: block;
- text-align: center;
- text-decoration: none;
- color: black;
- }
-.leaflet-bar a,
-.leaflet-control-layers-toggle {
- background-position: 50% 50%;
- background-repeat: no-repeat;
- display: block;
- }
-.leaflet-bar a:hover {
- background-color: #f4f4f4;
- }
-.leaflet-bar a:first-child {
- border-top-left-radius: 4px;
- border-top-right-radius: 4px;
- }
-.leaflet-bar a:last-child {
- border-bottom-left-radius: 4px;
- border-bottom-right-radius: 4px;
- border-bottom: none;
- }
-.leaflet-bar a.leaflet-disabled {
- cursor: default;
- background-color: #f4f4f4;
- color: #bbb;
- }
-
-.leaflet-touch .leaflet-bar a {
- width: 30px;
- height: 30px;
- line-height: 30px;
- }
-.leaflet-touch .leaflet-bar a:first-child {
- border-top-left-radius: 2px;
- border-top-right-radius: 2px;
- }
-.leaflet-touch .leaflet-bar a:last-child {
- border-bottom-left-radius: 2px;
- border-bottom-right-radius: 2px;
- }
-
-/* zoom control */
-
-.leaflet-control-zoom-in,
-.leaflet-control-zoom-out {
- font: bold 18px 'Lucida Console', Monaco, monospace;
- text-indent: 1px;
- }
-
-.leaflet-touch .leaflet-control-zoom-in, .leaflet-touch .leaflet-control-zoom-out {
- font-size: 22px;
- }
-
-
-/* layers control */
-
-.leaflet-control-layers {
- box-shadow: 0 1px 5px rgba(0,0,0,0.4);
- background: #fff;
- border-radius: 5px;
- }
-.leaflet-control-layers-toggle {
- background-image: url(images/layers.png);
- width: 36px;
- height: 36px;
- }
-.leaflet-retina .leaflet-control-layers-toggle {
- background-image: url(images/layers-2x.png);
- background-size: 26px 26px;
- }
-.leaflet-touch .leaflet-control-layers-toggle {
- width: 44px;
- height: 44px;
- }
-.leaflet-control-layers .leaflet-control-layers-list,
-.leaflet-control-layers-expanded .leaflet-control-layers-toggle {
- display: none;
- }
-.leaflet-control-layers-expanded .leaflet-control-layers-list {
- display: block;
- position: relative;
- }
-.leaflet-control-layers-expanded {
- padding: 6px 10px 6px 6px;
- color: #333;
- background: #fff;
- }
-.leaflet-control-layers-scrollbar {
- overflow-y: scroll;
- overflow-x: hidden;
- padding-right: 5px;
- }
-.leaflet-control-layers-selector {
- margin-top: 2px;
- position: relative;
- top: 1px;
- }
-.leaflet-control-layers label {
- display: block;
- }
-.leaflet-control-layers-separator {
- height: 0;
- border-top: 1px solid #ddd;
- margin: 5px -10px 5px -6px;
- }
-
-/* Default icon URLs */
-.leaflet-default-icon-path {
- background-image: url(images/marker-icon.png);
- }
-
-
-/* attribution and scale controls */
-
-.leaflet-container .leaflet-control-attribution {
- background: #fff;
- background: rgba(255, 255, 255, 0.7);
- margin: 0;
- }
-.leaflet-control-attribution,
-.leaflet-control-scale-line {
- padding: 0 5px;
- color: #333;
- }
-.leaflet-control-attribution a {
- text-decoration: none;
- }
-.leaflet-control-attribution a:hover {
- text-decoration: underline;
- }
-.leaflet-container .leaflet-control-attribution,
-.leaflet-container .leaflet-control-scale {
- font-size: 11px;
- }
-.leaflet-left .leaflet-control-scale {
- margin-left: 5px;
- }
-.leaflet-bottom .leaflet-control-scale {
- margin-bottom: 5px;
- }
-.leaflet-control-scale-line {
- border: 2px solid #777;
- border-top: none;
- line-height: 1.1;
- padding: 2px 5px 1px;
- font-size: 11px;
- white-space: nowrap;
- overflow: hidden;
- -moz-box-sizing: border-box;
- box-sizing: border-box;
-
- background: #fff;
- background: rgba(255, 255, 255, 0.5);
- }
-.leaflet-control-scale-line:not(:first-child) {
- border-top: 2px solid #777;
- border-bottom: none;
- margin-top: -2px;
- }
-.leaflet-control-scale-line:not(:first-child):not(:last-child) {
- border-bottom: 2px solid #777;
- }
-
-.leaflet-touch .leaflet-control-attribution,
-.leaflet-touch .leaflet-control-layers,
-.leaflet-touch .leaflet-bar {
- box-shadow: none;
- }
-.leaflet-touch .leaflet-control-layers,
-.leaflet-touch .leaflet-bar {
- border: 2px solid rgba(0,0,0,0.2);
- background-clip: padding-box;
- }
-
-
-/* popup */
-
-.leaflet-popup {
- position: absolute;
- text-align: center;
- margin-bottom: 20px;
- }
-.leaflet-popup-content-wrapper {
- padding: 1px;
- text-align: left;
- border-radius: 12px;
- }
-.leaflet-popup-content {
- margin: 13px 19px;
- line-height: 1.4;
- }
-.leaflet-popup-content p {
- margin: 18px 0;
- }
-.leaflet-popup-tip-container {
- width: 40px;
- height: 20px;
- position: absolute;
- left: 50%;
- margin-left: -20px;
- overflow: hidden;
- pointer-events: none;
- }
-.leaflet-popup-tip {
- width: 17px;
- height: 17px;
- padding: 1px;
-
- margin: -10px auto 0;
-
- -webkit-transform: rotate(45deg);
- -moz-transform: rotate(45deg);
- -ms-transform: rotate(45deg);
- -o-transform: rotate(45deg);
- transform: rotate(45deg);
- }
-.leaflet-popup-content-wrapper,
-.leaflet-popup-tip {
- background: white;
- color: #333;
- box-shadow: 0 3px 14px rgba(0,0,0,0.4);
- }
-.leaflet-container a.leaflet-popup-close-button {
- position: absolute;
- top: 0;
- right: 0;
- padding: 4px 4px 0 0;
- border: none;
- text-align: center;
- width: 18px;
- height: 14px;
- font: 16px/14px Tahoma, Verdana, sans-serif;
- color: #c3c3c3;
- text-decoration: none;
- font-weight: bold;
- background: transparent;
- }
-.leaflet-container a.leaflet-popup-close-button:hover {
- color: #999;
- }
-.leaflet-popup-scrolled {
- overflow: auto;
- border-bottom: 1px solid #ddd;
- border-top: 1px solid #ddd;
- }
-
-.leaflet-oldie .leaflet-popup-content-wrapper {
- zoom: 1;
- }
-.leaflet-oldie .leaflet-popup-tip {
- width: 24px;
- margin: 0 auto;
-
- -ms-filter: "progid:DXImageTransform.Microsoft.Matrix(M11=0.70710678, M12=0.70710678, M21=-0.70710678, M22=0.70710678)";
- filter: progid:DXImageTransform.Microsoft.Matrix(M11=0.70710678, M12=0.70710678, M21=-0.70710678, M22=0.70710678);
- }
-.leaflet-oldie .leaflet-popup-tip-container {
- margin-top: -1px;
- }
-
-.leaflet-oldie .leaflet-control-zoom,
-.leaflet-oldie .leaflet-control-layers,
-.leaflet-oldie .leaflet-popup-content-wrapper,
-.leaflet-oldie .leaflet-popup-tip {
- border: 1px solid #999;
- }
-
-
-/* div icon */
-
-.leaflet-div-icon {
- background: #fff;
- border: 1px solid #666;
- }
-
-
-/* Tooltip */
-/* Base styles for the element that has a tooltip */
-.leaflet-tooltip {
- position: absolute;
- padding: 6px;
- background-color: #fff;
- border: 1px solid #fff;
- border-radius: 3px;
- color: #222;
- white-space: nowrap;
- -webkit-user-select: none;
- -moz-user-select: none;
- -ms-user-select: none;
- user-select: none;
- pointer-events: none;
- box-shadow: 0 1px 3px rgba(0,0,0,0.4);
- }
-.leaflet-tooltip.leaflet-clickable {
- cursor: pointer;
- pointer-events: auto;
- }
-.leaflet-tooltip-top:before,
-.leaflet-tooltip-bottom:before,
-.leaflet-tooltip-left:before,
-.leaflet-tooltip-right:before {
- position: absolute;
- pointer-events: none;
- border: 6px solid transparent;
- background: transparent;
- content: "";
- }
-
-/* Directions */
-
-.leaflet-tooltip-bottom {
- margin-top: 6px;
-}
-.leaflet-tooltip-top {
- margin-top: -6px;
-}
-.leaflet-tooltip-bottom:before,
-.leaflet-tooltip-top:before {
- left: 50%;
- margin-left: -6px;
- }
-.leaflet-tooltip-top:before {
- bottom: 0;
- margin-bottom: -12px;
- border-top-color: #fff;
- }
-.leaflet-tooltip-bottom:before {
- top: 0;
- margin-top: -12px;
- margin-left: -6px;
- border-bottom-color: #fff;
- }
-.leaflet-tooltip-left {
- margin-left: -6px;
-}
-.leaflet-tooltip-right {
- margin-left: 6px;
-}
-.leaflet-tooltip-left:before,
-.leaflet-tooltip-right:before {
- top: 50%;
- margin-top: -6px;
- }
-.leaflet-tooltip-left:before {
- right: 0;
- margin-right: -12px;
- border-left-color: #fff;
- }
-.leaflet-tooltip-right:before {
- left: 0;
- margin-left: -12px;
- border-right-color: #fff;
- }
+/* required styles */
+
+.leaflet-pane,
+.leaflet-tile,
+.leaflet-marker-icon,
+.leaflet-marker-shadow,
+.leaflet-tile-container,
+.leaflet-pane > svg,
+.leaflet-pane > canvas,
+.leaflet-zoom-box,
+.leaflet-image-layer,
+.leaflet-layer {
+ position: absolute;
+ left: 0;
+ top: 0;
+ }
+.leaflet-container {
+ overflow: hidden;
+ }
+.leaflet-tile,
+.leaflet-marker-icon,
+.leaflet-marker-shadow {
+ -webkit-user-select: none;
+ -moz-user-select: none;
+ user-select: none;
+ -webkit-user-drag: none;
+ }
+/* Safari renders non-retina tile on retina better with this, but Chrome is worse */
+.leaflet-safari .leaflet-tile {
+ image-rendering: -webkit-optimize-contrast;
+ }
+/* hack that prevents hw layers "stretching" when loading new tiles */
+.leaflet-safari .leaflet-tile-container {
+ width: 1600px;
+ height: 1600px;
+ -webkit-transform-origin: 0 0;
+ }
+.leaflet-marker-icon,
+.leaflet-marker-shadow {
+ display: block;
+ }
+/* .leaflet-container svg: reset svg max-width decleration shipped in Joomla! (joomla.org) 3.x */
+/* .leaflet-container img: map is broken in FF if you have max-width: 100% on tiles */
+.leaflet-container .leaflet-overlay-pane svg,
+.leaflet-container .leaflet-marker-pane img,
+.leaflet-container .leaflet-shadow-pane img,
+.leaflet-container .leaflet-tile-pane img,
+.leaflet-container img.leaflet-image-layer {
+ max-width: none !important;
+ max-height: none !important;
+ }
+
+.leaflet-container.leaflet-touch-zoom {
+ -ms-touch-action: pan-x pan-y;
+ touch-action: pan-x pan-y;
+ }
+.leaflet-container.leaflet-touch-drag {
+ -ms-touch-action: pinch-zoom;
+ /* Fallback for FF which doesn't support pinch-zoom */
+ touch-action: none;
+ touch-action: pinch-zoom;
+}
+.leaflet-container.leaflet-touch-drag.leaflet-touch-zoom {
+ -ms-touch-action: none;
+ touch-action: none;
+}
+.leaflet-container {
+ -webkit-tap-highlight-color: transparent;
+}
+.leaflet-container a {
+ -webkit-tap-highlight-color: rgba(51, 181, 229, 0.4);
+}
+.leaflet-tile {
+ filter: inherit;
+ visibility: hidden;
+ }
+.leaflet-tile-loaded {
+ visibility: inherit;
+ }
+.leaflet-zoom-box {
+ width: 0;
+ height: 0;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box;
+ z-index: 800;
+ }
+/* workaround for https://bugzilla.mozilla.org/show_bug.cgi?id=888319 */
+.leaflet-overlay-pane svg {
+ -moz-user-select: none;
+ }
+
+.leaflet-pane { z-index: 400; }
+
+.leaflet-tile-pane { z-index: 200; }
+.leaflet-overlay-pane { z-index: 400; }
+.leaflet-shadow-pane { z-index: 500; }
+.leaflet-marker-pane { z-index: 600; }
+.leaflet-tooltip-pane { z-index: 650; }
+.leaflet-popup-pane { z-index: 700; }
+
+.leaflet-map-pane canvas { z-index: 100; }
+.leaflet-map-pane svg { z-index: 200; }
+
+.leaflet-vml-shape {
+ width: 1px;
+ height: 1px;
+ }
+.lvml {
+ behavior: url(#default#VML);
+ display: inline-block;
+ position: absolute;
+ }
+
+
+/* control positioning */
+
+.leaflet-control {
+ position: relative;
+ z-index: 800;
+ pointer-events: visiblePainted; /* IE 9-10 doesn't have auto */
+ pointer-events: auto;
+ }
+.leaflet-top,
+.leaflet-bottom {
+ position: absolute;
+ z-index: 1000;
+ pointer-events: none;
+ }
+.leaflet-top {
+ top: 0;
+ }
+.leaflet-right {
+ right: 0;
+ }
+.leaflet-bottom {
+ bottom: 0;
+ }
+.leaflet-left {
+ left: 0;
+ }
+.leaflet-control {
+ float: left;
+ clear: both;
+ }
+.leaflet-right .leaflet-control {
+ float: right;
+ }
+.leaflet-top .leaflet-control {
+ margin-top: 10px;
+ }
+.leaflet-bottom .leaflet-control {
+ margin-bottom: 10px;
+ }
+.leaflet-left .leaflet-control {
+ margin-left: 10px;
+ }
+.leaflet-right .leaflet-control {
+ margin-right: 10px;
+ }
+
+
+/* zoom and fade animations */
+
+.leaflet-fade-anim .leaflet-tile {
+ will-change: opacity;
+ }
+.leaflet-fade-anim .leaflet-popup {
+ opacity: 0;
+ -webkit-transition: opacity 0.2s linear;
+ -moz-transition: opacity 0.2s linear;
+ -o-transition: opacity 0.2s linear;
+ transition: opacity 0.2s linear;
+ }
+.leaflet-fade-anim .leaflet-map-pane .leaflet-popup {
+ opacity: 1;
+ }
+.leaflet-zoom-animated {
+ -webkit-transform-origin: 0 0;
+ -ms-transform-origin: 0 0;
+ transform-origin: 0 0;
+ }
+.leaflet-zoom-anim .leaflet-zoom-animated {
+ will-change: transform;
+ }
+.leaflet-zoom-anim .leaflet-zoom-animated {
+ -webkit-transition: -webkit-transform 0.25s cubic-bezier(0,0,0.25,1);
+ -moz-transition: -moz-transform 0.25s cubic-bezier(0,0,0.25,1);
+ -o-transition: -o-transform 0.25s cubic-bezier(0,0,0.25,1);
+ transition: transform 0.25s cubic-bezier(0,0,0.25,1);
+ }
+.leaflet-zoom-anim .leaflet-tile,
+.leaflet-pan-anim .leaflet-tile {
+ -webkit-transition: none;
+ -moz-transition: none;
+ -o-transition: none;
+ transition: none;
+ }
+
+.leaflet-zoom-anim .leaflet-zoom-hide {
+ visibility: hidden;
+ }
+
+
+/* cursors */
+
+.leaflet-interactive {
+ cursor: pointer;
+ }
+.leaflet-grab {
+ cursor: -webkit-grab;
+ cursor: -moz-grab;
+ }
+.leaflet-crosshair,
+.leaflet-crosshair .leaflet-interactive {
+ cursor: crosshair;
+ }
+.leaflet-popup-pane,
+.leaflet-control {
+ cursor: auto;
+ }
+.leaflet-dragging .leaflet-grab,
+.leaflet-dragging .leaflet-grab .leaflet-interactive,
+.leaflet-dragging .leaflet-marker-draggable {
+ cursor: move;
+ cursor: -webkit-grabbing;
+ cursor: -moz-grabbing;
+ }
+
+/* marker & overlays interactivity */
+.leaflet-marker-icon,
+.leaflet-marker-shadow,
+.leaflet-image-layer,
+.leaflet-pane > svg path,
+.leaflet-tile-container {
+ pointer-events: none;
+ }
+
+.leaflet-marker-icon.leaflet-interactive,
+.leaflet-image-layer.leaflet-interactive,
+.leaflet-pane > svg path.leaflet-interactive {
+ pointer-events: visiblePainted; /* IE 9-10 doesn't have auto */
+ pointer-events: auto;
+ }
+
+/* visual tweaks */
+
+.leaflet-container {
+ background: #ddd;
+ outline: 0;
+ }
+.leaflet-container a {
+ color: #0078A8;
+ }
+.leaflet-container a.leaflet-active {
+ outline: 2px solid orange;
+ }
+.leaflet-zoom-box {
+ border: 2px dotted #38f;
+ background: rgba(255,255,255,0.5);
+ }
+
+
+/* general typography */
+.leaflet-container {
+ font: 12px/1.5 "Helvetica Neue", Arial, Helvetica, sans-serif;
+ }
+
+
+/* general toolbar styles */
+
+.leaflet-bar {
+ box-shadow: 0 1px 5px rgba(0,0,0,0.65);
+ border-radius: 4px;
+ }
+.leaflet-bar a,
+.leaflet-bar a:hover {
+ background-color: #fff;
+ border-bottom: 1px solid #ccc;
+ width: 26px;
+ height: 26px;
+ line-height: 26px;
+ display: block;
+ text-align: center;
+ text-decoration: none;
+ color: black;
+ }
+.leaflet-bar a,
+.leaflet-control-layers-toggle {
+ background-position: 50% 50%;
+ background-repeat: no-repeat;
+ display: block;
+ }
+.leaflet-bar a:hover {
+ background-color: #f4f4f4;
+ }
+.leaflet-bar a:first-child {
+ border-top-left-radius: 4px;
+ border-top-right-radius: 4px;
+ }
+.leaflet-bar a:last-child {
+ border-bottom-left-radius: 4px;
+ border-bottom-right-radius: 4px;
+ border-bottom: none;
+ }
+.leaflet-bar a.leaflet-disabled {
+ cursor: default;
+ background-color: #f4f4f4;
+ color: #bbb;
+ }
+
+.leaflet-touch .leaflet-bar a {
+ width: 30px;
+ height: 30px;
+ line-height: 30px;
+ }
+.leaflet-touch .leaflet-bar a:first-child {
+ border-top-left-radius: 2px;
+ border-top-right-radius: 2px;
+ }
+.leaflet-touch .leaflet-bar a:last-child {
+ border-bottom-left-radius: 2px;
+ border-bottom-right-radius: 2px;
+ }
+
+/* zoom control */
+
+.leaflet-control-zoom-in,
+.leaflet-control-zoom-out {
+ font: bold 18px 'Lucida Console', Monaco, monospace;
+ text-indent: 1px;
+ }
+
+.leaflet-touch .leaflet-control-zoom-in, .leaflet-touch .leaflet-control-zoom-out {
+ font-size: 22px;
+ }
+
+
+/* layers control */
+
+.leaflet-control-layers {
+ box-shadow: 0 1px 5px rgba(0,0,0,0.4);
+ background: #fff;
+ border-radius: 5px;
+ }
+.leaflet-control-layers-toggle {
+ background-image: url(images/layers.png);
+ width: 36px;
+ height: 36px;
+ }
+.leaflet-retina .leaflet-control-layers-toggle {
+ background-image: url(images/layers-2x.png);
+ background-size: 26px 26px;
+ }
+.leaflet-touch .leaflet-control-layers-toggle {
+ width: 44px;
+ height: 44px;
+ }
+.leaflet-control-layers .leaflet-control-layers-list,
+.leaflet-control-layers-expanded .leaflet-control-layers-toggle {
+ display: none;
+ }
+.leaflet-control-layers-expanded .leaflet-control-layers-list {
+ display: block;
+ position: relative;
+ }
+.leaflet-control-layers-expanded {
+ padding: 6px 10px 6px 6px;
+ color: #333;
+ background: #fff;
+ }
+.leaflet-control-layers-scrollbar {
+ overflow-y: scroll;
+ overflow-x: hidden;
+ padding-right: 5px;
+ }
+.leaflet-control-layers-selector {
+ margin-top: 2px;
+ position: relative;
+ top: 1px;
+ }
+.leaflet-control-layers label {
+ display: block;
+ }
+.leaflet-control-layers-separator {
+ height: 0;
+ border-top: 1px solid #ddd;
+ margin: 5px -10px 5px -6px;
+ }
+
+/* Default icon URLs */
+.leaflet-default-icon-path {
+ background-image: url(images/marker-icon.png);
+ }
+
+
+/* attribution and scale controls */
+
+.leaflet-container .leaflet-control-attribution {
+ background: #fff;
+ background: rgba(255, 255, 255, 0.7);
+ margin: 0;
+ }
+.leaflet-control-attribution,
+.leaflet-control-scale-line {
+ padding: 0 5px;
+ color: #333;
+ }
+.leaflet-control-attribution a {
+ text-decoration: none;
+ }
+.leaflet-control-attribution a:hover {
+ text-decoration: underline;
+ }
+.leaflet-container .leaflet-control-attribution,
+.leaflet-container .leaflet-control-scale {
+ font-size: 11px;
+ }
+.leaflet-left .leaflet-control-scale {
+ margin-left: 5px;
+ }
+.leaflet-bottom .leaflet-control-scale {
+ margin-bottom: 5px;
+ }
+.leaflet-control-scale-line {
+ border: 2px solid #777;
+ border-top: none;
+ line-height: 1.1;
+ padding: 2px 5px 1px;
+ font-size: 11px;
+ white-space: nowrap;
+ overflow: hidden;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box;
+
+ background: #fff;
+ background: rgba(255, 255, 255, 0.5);
+ }
+.leaflet-control-scale-line:not(:first-child) {
+ border-top: 2px solid #777;
+ border-bottom: none;
+ margin-top: -2px;
+ }
+.leaflet-control-scale-line:not(:first-child):not(:last-child) {
+ border-bottom: 2px solid #777;
+ }
+
+.leaflet-touch .leaflet-control-attribution,
+.leaflet-touch .leaflet-control-layers,
+.leaflet-touch .leaflet-bar {
+ box-shadow: none;
+ }
+.leaflet-touch .leaflet-control-layers,
+.leaflet-touch .leaflet-bar {
+ border: 2px solid rgba(0,0,0,0.2);
+ background-clip: padding-box;
+ }
+
+
+/* popup */
+
+.leaflet-popup {
+ position: absolute;
+ text-align: center;
+ margin-bottom: 20px;
+ }
+.leaflet-popup-content-wrapper {
+ padding: 1px;
+ text-align: left;
+ border-radius: 12px;
+ }
+.leaflet-popup-content {
+ margin: 13px 19px;
+ line-height: 1.4;
+ }
+.leaflet-popup-content p {
+ margin: 18px 0;
+ }
+.leaflet-popup-tip-container {
+ width: 40px;
+ height: 20px;
+ position: absolute;
+ left: 50%;
+ margin-left: -20px;
+ overflow: hidden;
+ pointer-events: none;
+ }
+.leaflet-popup-tip {
+ width: 17px;
+ height: 17px;
+ padding: 1px;
+
+ margin: -10px auto 0;
+
+ -webkit-transform: rotate(45deg);
+ -moz-transform: rotate(45deg);
+ -ms-transform: rotate(45deg);
+ -o-transform: rotate(45deg);
+ transform: rotate(45deg);
+ }
+.leaflet-popup-content-wrapper,
+.leaflet-popup-tip {
+ background: white;
+ color: #333;
+ box-shadow: 0 3px 14px rgba(0,0,0,0.4);
+ }
+.leaflet-container a.leaflet-popup-close-button {
+ position: absolute;
+ top: 0;
+ right: 0;
+ padding: 4px 4px 0 0;
+ border: none;
+ text-align: center;
+ width: 18px;
+ height: 14px;
+ font: 16px/14px Tahoma, Verdana, sans-serif;
+ color: #c3c3c3;
+ text-decoration: none;
+ font-weight: bold;
+ background: transparent;
+ }
+.leaflet-container a.leaflet-popup-close-button:hover {
+ color: #999;
+ }
+.leaflet-popup-scrolled {
+ overflow: auto;
+ border-bottom: 1px solid #ddd;
+ border-top: 1px solid #ddd;
+ }
+
+.leaflet-oldie .leaflet-popup-content-wrapper {
+ zoom: 1;
+ }
+.leaflet-oldie .leaflet-popup-tip {
+ width: 24px;
+ margin: 0 auto;
+
+ -ms-filter: "progid:DXImageTransform.Microsoft.Matrix(M11=0.70710678, M12=0.70710678, M21=-0.70710678, M22=0.70710678)";
+ filter: progid:DXImageTransform.Microsoft.Matrix(M11=0.70710678, M12=0.70710678, M21=-0.70710678, M22=0.70710678);
+ }
+.leaflet-oldie .leaflet-popup-tip-container {
+ margin-top: -1px;
+ }
+
+.leaflet-oldie .leaflet-control-zoom,
+.leaflet-oldie .leaflet-control-layers,
+.leaflet-oldie .leaflet-popup-content-wrapper,
+.leaflet-oldie .leaflet-popup-tip {
+ border: 1px solid #999;
+ }
+
+
+/* div icon */
+
+.leaflet-div-icon {
+ background: #fff;
+ border: 1px solid #666;
+ }
+
+
+/* Tooltip */
+/* Base styles for the element that has a tooltip */
+.leaflet-tooltip {
+ position: absolute;
+ padding: 6px;
+ background-color: #fff;
+ border: 1px solid #fff;
+ border-radius: 3px;
+ color: #222;
+ white-space: nowrap;
+ -webkit-user-select: none;
+ -moz-user-select: none;
+ -ms-user-select: none;
+ user-select: none;
+ pointer-events: none;
+ box-shadow: 0 1px 3px rgba(0,0,0,0.4);
+ }
+.leaflet-tooltip.leaflet-clickable {
+ cursor: pointer;
+ pointer-events: auto;
+ }
+.leaflet-tooltip-top:before,
+.leaflet-tooltip-bottom:before,
+.leaflet-tooltip-left:before,
+.leaflet-tooltip-right:before {
+ position: absolute;
+ pointer-events: none;
+ border: 6px solid transparent;
+ background: transparent;
+ content: "";
+ }
+
+/* Directions */
+
+.leaflet-tooltip-bottom {
+ margin-top: 6px;
+}
+.leaflet-tooltip-top {
+ margin-top: -6px;
+}
+.leaflet-tooltip-bottom:before,
+.leaflet-tooltip-top:before {
+ left: 50%;
+ margin-left: -6px;
+ }
+.leaflet-tooltip-top:before {
+ bottom: 0;
+ margin-bottom: -12px;
+ border-top-color: #fff;
+ }
+.leaflet-tooltip-bottom:before {
+ top: 0;
+ margin-top: -12px;
+ margin-left: -6px;
+ border-bottom-color: #fff;
+ }
+.leaflet-tooltip-left {
+ margin-left: -6px;
+}
+.leaflet-tooltip-right {
+ margin-left: 6px;
+}
+.leaflet-tooltip-left:before,
+.leaflet-tooltip-right:before {
+ top: 50%;
+ margin-top: -6px;
+ }
+.leaflet-tooltip-left:before {
+ right: 0;
+ margin-right: -12px;
+ border-left-color: #fff;
+ }
+.leaflet-tooltip-right:before {
+ left: 0;
+ margin-left: -12px;
+ border-right-color: #fff;
+ }
diff --git a/searx/templates/courgette/result_templates/key-value.html b/searx/templates/courgette/result_templates/key-value.html
new file mode 100644
index 000000000..789e8de92
--- /dev/null
+++ b/searx/templates/courgette/result_templates/key-value.html
@@ -0,0 +1,13 @@
+<div class="result">
+<table>
+ {% for key, value in result.items() %}
+ {% if key in ['engine', 'engines', 'template', 'score', 'category', 'positions'] %}
+ {% continue %}
+ {% endif %}
+ <tr>
+ <td><b>{{ key|upper }}</b>: {{ value|safe }}</td>
+ </tr>
+ {% endfor %}
+</table>
+<p class="engines">{{ result.engines|join(', ') }}</p>
+</div>
diff --git a/searx/templates/courgette/result_templates/torrent.html b/searx/templates/courgette/result_templates/torrent.html
index d659064d9..7f94a221e 100644
--- a/searx/templates/courgette/result_templates/torrent.html
+++ b/searx/templates/courgette/result_templates/torrent.html
@@ -4,7 +4,7 @@
{% endif %}
<h3 class="result_title"><a href="{{ result.url }}" {% if results_on_new_tab %}target="_blank" rel="noopener noreferrer"{% else %}rel="noreferrer"{% endif %}>{{ result.title|safe }}</a></h3>
{% if result.content %}<span class="content">{{ result.content|safe }}</span><br />{% endif %}
- {% if result.seed %}<span class="stats">{{ _('Seeder') }} : {{ result.seed }}, {{ _('Leecher') }} : {{ result.leech }}</span><br />{% endif %}
+ {% if result.seed is defined %}<span class="stats">{{ _('Seeder') }} : {{ result.seed }}, {{ _('Leecher') }} : {{ result.leech }}</span><br />{% endif %}
<span>
{% if result.magnetlink %}<a href="{{ result.magnetlink }}" class="magnetlink">{{ _('magnet link') }}</a>{% endif %}
{% if result.torrentfile %}<a href="{{ result.torrentfile }}" class="torrentfile" {% if results_on_new_tab %}target="_blank" rel="noopener noreferrer"{% else %}rel="noreferrer"{% endif %}>{{ _('torrent file') }}</a>{% endif %}
diff --git a/searx/templates/legacy/result_templates/key-value.html b/searx/templates/legacy/result_templates/key-value.html
new file mode 100644
index 000000000..a5bb509d9
--- /dev/null
+++ b/searx/templates/legacy/result_templates/key-value.html
@@ -0,0 +1,13 @@
+<table class="result-table">
+ {% for key, value in result.items() %}
+ {% if key in ['engine', 'engines', 'template', 'score', 'category', 'positions'] %}
+ {% continue %}
+ {% endif %}
+ <tr>
+ <td><b>{{ key|upper }}</b>: {{ value|safe }}</td>
+ </tr>
+ {% endfor %}
+ <tr>
+ <td><b>ENGINES</b>: {{ result.engines|join(', ') }}</td>
+ </tr>
+</table>
diff --git a/searx/templates/legacy/result_templates/torrent.html b/searx/templates/legacy/result_templates/torrent.html
index 7a8ac33de..068e05373 100644
--- a/searx/templates/legacy/result_templates/torrent.html
+++ b/searx/templates/legacy/result_templates/torrent.html
@@ -8,6 +8,6 @@
<p>
{% if result.magnetlink %}<a href="{{ result.magnetlink }}" class="magnetlink">{{ _('magnet link') }}</a>{% endif %}
{% if result.torrentfile %}<a href="{{ result.torrentfile }}" {% if results_on_new_tab %}target="_blank" rel="noopener noreferrer"{% else %}rel="noreferrer"{% endif %} class="torrentfile">{{ _('torrent file') }}</a>{% endif %} -
- {% if result.seed %}<span class="stats">{{ _('Seeder') }} : {{ result.seed }}, {{ _('Leecher') }} : {{ result.leech }}</span>{% endif %}
+ {% if result.seed is defined %}<span class="stats">{{ _('Seeder') }} : {{ result.seed }}, {{ _('Leecher') }} : {{ result.leech }}</span>{% endif %}
</p>
</div>
diff --git a/searx/templates/oscar/advanced.html b/searx/templates/oscar/advanced.html
index 95d99ba6a..bf5f86324 100644
--- a/searx/templates/oscar/advanced.html
+++ b/searx/templates/oscar/advanced.html
@@ -1,16 +1,17 @@
<input type="checkbox" name="advanced_search" id="check-advanced" {% if advanced_search %} checked="checked"{% endif %}>
-<label for="check-advanced">
+<label for="check-advanced">{{- "" -}}
<span class="glyphicon glyphicon-cog"></span>
- {{ _('Advanced settings') }}
+ {{- _('Advanced settings') -}}
</label>
<div id="advanced-search-container">
{% include 'oscar/categories.html' %}
+
<div class="row">
<div class="col-xs-6">
- {% include 'oscar/time-range.html' %}
+ {%- include 'oscar/time-range.html' -%}
</div>
<div class="col-xs-6">
- {% include 'oscar/languages.html' %}
+ {%- include 'oscar/languages.html' -%}
</div>
</div>
</div>
diff --git a/searx/templates/oscar/base.html b/searx/templates/oscar/base.html
index 321784ebb..66a9e6029 100644
--- a/searx/templates/oscar/base.html
+++ b/searx/templates/oscar/base.html
@@ -10,16 +10,17 @@
<meta name="referrer" content="no-referrer">
<meta name="viewport" content="width=device-width, initial-scale=1 , maximum-scale=1.0, user-scalable=1" />
{% block meta %}{% endblock %}
- <title>{% block title %}{% endblock %}{{ instance_name }}</title>
+ <title>{% block title %}{% endblock %}{{ instance_name }}</title>
<link rel="stylesheet" href="{{ url_for('static', filename='css/bootstrap.min.css') }}" type="text/css" />
- {% if preferences.get_value('oscar-style') %}
- <link rel="stylesheet" href="{{ url_for('static', filename='css/'+preferences.get_value('oscar-style')+'.min.css') }}" type="text/css" />
- {% else %}
- <link rel="stylesheet" href="{{ url_for('static', filename='css/logicodev.min.css') }}" type="text/css" />
- {% endif %}
+ {% if preferences.get_value('oscar-style') -%}
+ {{' '}}<link rel="stylesheet" href="{{ url_for('static', filename='css/'+preferences.get_value('oscar-style')+'.min.css') }}" type="text/css" />
+ {%- else -%}
+ {{' '}}<link rel="stylesheet" href="{{ url_for('static', filename='css/logicodev.min.css') }}" type="text/css" />
+ {%- endif %}
+
<link rel="stylesheet" href="{{ url_for('static', filename='css/leaflet.min.css') }}" type="text/css" />
- {% for css in styles %}
+ {%- for css in styles %}
<link rel="stylesheet" href="{{ url_for('static', filename=css) }}" type="text/css" />
{% endfor %}
@@ -48,6 +49,7 @@
</head>
<body>
{% include 'oscar/navbar.html' %}
+
<div class="container">
{% if errors %}
<div class="alert alert-danger fade in" role="alert">
@@ -93,13 +95,14 @@
</div>
<script src="{{ url_for('static', filename='js/jquery-1.11.1.min.js') }}"></script>
<script src="{{ url_for('static', filename='js/bootstrap.min.js') }}"></script>
- {% if autocomplete %}<script src="{{ url_for('static', filename='js/typeahead.bundle.min.js') }}"></script>{% endif %}
+ {% if autocomplete %} <script src="{{ url_for('static', filename='js/typeahead.bundle.min.js') }}"></script>{% endif %}
+
<script src="{{ url_for('static', filename='js/require-2.1.15.min.js') }}"></script>
<script src="{{ url_for('static', filename='js/searx.min.js') }}"
data-method="{{ method or 'POST' }}"
data-autocompleter="{% if autocomplete %}true{% else %}false{% endif %}"></script>
{% for script in scripts %}
- <script src="{{ url_for('static', filename=script) }}"></script>
+ {{""}}<script src="{{ url_for('static', filename=script) }}"></script>
{% endfor %}
<noscript>
<style>
diff --git a/searx/templates/oscar/categories.html b/searx/templates/oscar/categories.html
index 1ace10f16..a5c5f61c7 100644
--- a/searx/templates/oscar/categories.html
+++ b/searx/templates/oscar/categories.html
@@ -1,13 +1,13 @@
<div id="categories">
-{% if rtl %}
- {% for category in categories | reverse %}
- <input class="hidden" type="checkbox" id="checkbox_{{ category|replace(' ', '_') }}" name="category_{{ category }}" {% if category in selected_categories %}checked="checked"{% endif %} />
+{%- if rtl -%}
+ {% for category in categories | reverse -%}
+ <input class="hidden" type="checkbox" id="checkbox_{{ category|replace(' ', '_') }}" name="category_{{ category }}" {% if category in selected_categories %}checked="checked"{% endif %} />{{- '' -}}
<label for="checkbox_{{ category|replace(' ', '_') }}">{{ _(category) }}</label>
- {% endfor %}
-{% else %}
- {% for category in categories %}
- <input class="hidden" type="checkbox" id="checkbox_{{ category|replace(' ', '_') }}" name="category_{{ category }}" {% if category in selected_categories %}checked="checked"{% endif %} />
+ {%- endfor %}
+{%- else -%}
+ {% for category in categories -%}
+ <input class="hidden" type="checkbox" id="checkbox_{{ category|replace(' ', '_') }}" name="category_{{ category }}" {% if category in selected_categories %}checked="checked"{% endif %} />{{- '' -}}
<label for="checkbox_{{ category|replace(' ', '_') }}">{{ _(category) }}</label>
- {% endfor %}
-{% endif %}
+ {%- endfor %}
+{%- endif -%}
</div>
diff --git a/searx/templates/oscar/infobox.html b/searx/templates/oscar/infobox.html
index c98fb0e63..9802f11e2 100644
--- a/searx/templates/oscar/infobox.html
+++ b/searx/templates/oscar/infobox.html
@@ -1,34 +1,35 @@
{% from 'oscar/macros.html' import result_link with context %}
<div class="panel panel-default infobox">
- <div class="panel-heading">
- <h4 class="panel-title infobox_part"><bdi>{{ infobox.infobox }}</bdi></h4>
+ <div class="panel-heading">{{- "" -}}
+ <h4 class="panel-title infobox_part"><bdi>{{ infobox.infobox }}</bdi></h4>{{- "" -}}
</div>
<div class="panel-body">
{% if infobox.img_src %}<img class="img-responsive center-block infobox_part" src="{{ image_proxify(infobox.img_src) }}" alt="{{ infobox.infobox }}" />{% endif %}
- {% if infobox.content %}<bdi><p class="infobox_part">{{ infobox.content }}</bdi></p>{% endif %}
- {% if infobox.attributes %}
+ {% if infobox.content %}<bdi><p class="infobox_part">{{ infobox.content | safe }}</p></bdi>{% endif %}
+
+ {% if infobox.attributes -%}
<table class="table table-striped infobox_part">
- {% for attribute in infobox.attributes %}
- <tr>
+ {% for attribute in infobox.attributes -%}
+ <tr>{{- "" -}}
<td><bdi>{{ attribute.label }}</bdi></td>
- {% if attribute.image %}
+ {%- if attribute.image -%}
<td><img class="img-responsive" src="{{ image_proxify(attribute.image.src) }}" alt="{{ attribute.image.alt }}" /></td>
- {% else %}
+ {%- else -%}
<td><bdi>{{ attribute.value }}</bdi></td>
- {% endif %}
+ {%- endif -%}
</tr>
- {% endfor %}
+ {% endfor -%}
</table>
{% endif %}
- {% if infobox.urls %}
- <div class="infobox_part">
+ {% if infobox.urls -%}
+ <div class="infobox_part">{{- "\n" -}}
<bdi>
- {% for url in infobox.urls %}
- <p class="btn btn-default btn-xs">{{ result_link(url.url, url.title) }}</a></p>
- {% endfor %}
- </bdi>
+ {%- for url in infobox.urls -%}
+ <p class="btn btn-default btn-xs">{{ result_link(url.url, url.title) }}</p>
+ {% endfor -%}
+ </bdi>{{- "" -}}
</div>
{% endif %}
</div>
diff --git a/searx/templates/oscar/languages.html b/searx/templates/oscar/languages.html
index 53ade43b2..5aff9f918 100644
--- a/searx/templates/oscar/languages.html
+++ b/searx/templates/oscar/languages.html
@@ -1,12 +1,8 @@
-{% if preferences %}
-<select class="custom-select form-control" name='language'>
-{% else %}
-<select class="time_range custom-select form-control" id='language' name='language'>
-{% endif %}
- <option value="all" {% if current_language == 'all' %}selected="selected"{% endif %}>{{ _('Default language') }}</option>
- {% for lang_id,lang_name,country_name,english_name in language_codes | sort(attribute=1) %}
- <option value="{{ lang_id }}" {% if lang_id == current_language %}selected="selected"{% endif %}>
- {{ lang_name }} {% if country_name %}({{ country_name }}) {% endif %}- {{ lang_id }}
- </option>
- {% endfor %}
+<select class="language custom-select form-control" id="language" name="language" accesskey="l">
+ <option value="all" {% if current_language == 'all' %}selected="selected"{% endif %}>{{ _('Default language') }}</option>
+{%- for lang_id,lang_name,country_name,english_name in language_codes | sort(attribute=1) -%}
+ <option value="{{ lang_id }}" {% if lang_id == current_language %}selected="selected"{% endif %}>
+ {{- lang_name }} {% if country_name %}({{ country_name }}) {% endif %}- {{ lang_id -}}
+ </option>
+{%- endfor -%}
</select>
diff --git a/searx/templates/oscar/macros.html b/searx/templates/oscar/macros.html
index 0ff957521..d2d1dc643 100644
--- a/searx/templates/oscar/macros.html
+++ b/searx/templates/oscar/macros.html
@@ -14,7 +14,7 @@
<!-- Draw result header -->
{% macro result_header(result, favicons) -%}
-<h4 class="result_header">{% if result.engine~".png" in favicons %}{{ draw_favicon(result.engine) }} {% endif %}{{ result_link(result.url, result.title|safe) }}</h4>
+<h4 class="result_header">{% if result.engine~".png" in favicons %}{{ draw_favicon(result.engine) }} {% endif %}{% if result.url %}{{ result_link(result.url, result.title|safe) }}{% else %}{{ result.title|safe}}{% endif %}</h4>
{%- endmacro %}
<!-- Draw result sub header -->
@@ -26,30 +26,38 @@
<!-- Draw result footer -->
{% macro result_footer(result) -%}
- <div class="clearfix"></div>
+ <div class="clearfix"></div>{{- "" -}}
<div class="pull-right">
- {% for engine in result.engines %}
- <span class="label label-default">{{ engine }}</span>
- {% endfor %}
- <small>{{ result_link("https://web.archive.org/web/" + result.url, icon('link') + _('cached'), "text-info") }}</small>
- {% if proxify %}
- <small>{{ result_link(proxify(result.url), icon('sort') + _('proxied'), "text-info") }}</small>
- {% endif %}
-</div>
-<div class="external-link">{{ result.pretty_url }}</div>
+ {%- for engine in result.engines -%}
+ <span class="label label-default">{{ engine }}</span>
+ {%- endfor -%}
+ {%- if result.url -%}
+ <small>{{ result_link("https://web.archive.org/web/" + result.url, icon('link') + _('cached'), "text-info") }}</small>
+ {%- endif -%}
+ {%- if proxify -%}
+ <small>{{ result_link(proxify(result.url), icon('sort') + _('proxied'), "text-info") }}</small>
+ {%- endif -%}
+ </div>
+ {%- if result.pretty_url -%}
+ <div class="external-link">{{ result.pretty_url }}</div>
+ {%- endif -%}
{%- endmacro %}
<!-- Draw result footer -->
{% macro result_footer_rtl(result) -%}
- <div class="clearfix"></div>
- {% for engine in result.engines %}
+ <div class="clearfix"></div>{{- "" -}}
+ {% for engine in result.engines -%}
<span class="label label-default">{{ engine }}</span>
- {% endfor %}
+ {%- endfor %}
+ {%- if result.url -%}
<small>{{ result_link("https://web.archive.org/web/" + result.url, icon('link') + _('cached'), "text-info") }}</small>
- {% if proxify %}
+ {%- endif -%}
+ {% if proxify -%}
<small>{{ result_link(proxify(result.url), icon('sort') + _('proxied'), "text-info") }}</small>
- {% endif %}
+ {%- endif %}
+ {%- if result.pretty_url -%}
<div class="external-link">{{ result.pretty_url }}</div>
+ {%- endif %}
{%- endmacro %}
{% macro preferences_item_header(info, label, rtl) -%}
diff --git a/searx/templates/oscar/navbar.html b/searx/templates/oscar/navbar.html
index 12bf14ffa..077fb9f15 100644
--- a/searx/templates/oscar/navbar.html
+++ b/searx/templates/oscar/navbar.html
@@ -1,9 +1,9 @@
-<div class="searx-navbar">
- <span class="instance {% if rtl %}pull-right{% else %}pull-left{% endif%}">
- <a href="{{ url_for('index') }}">{{ instance_name }}</a>
- </span>
- <span class="{% if rtl %}pull-left{% else %}pull-right{% endif %}">
- <a href="{{ url_for('about') }}">{{ _('about') }}</a>
- <a href="{{ url_for('preferences') }}">{{ _('preferences') }}</a>
- </span>
+<div class="searx-navbar">{{- "" -}}
+ <span class="instance {% if rtl %}pull-right{% else %}pull-left{% endif%}">{{- "" -}}
+ <a href="{{ url_for('index') }}">{{ instance_name }}</a>{{- "" -}}
+ </span>{{- "" -}}
+ <span class="{% if rtl %}pull-left{% else %}pull-right{% endif %}">{{- "" -}}
+ <a href="{{ url_for('about') }}">{{ _('about') }}</a>{{- "" -}}
+ <a href="{{ url_for('preferences') }}">{{ _('preferences') }}</a>{{- "" -}}
+ </span>{{- "" -}}
</div>
diff --git a/searx/templates/oscar/preferences.html b/searx/templates/oscar/preferences.html
index b64d72ddf..1a484dd4b 100644
--- a/searx/templates/oscar/preferences.html
+++ b/searx/templates/oscar/preferences.html
@@ -41,7 +41,7 @@
{% set language_label = _('Search language') %}
{% set language_info = _('What language do you prefer for search?') %}
{{ preferences_item_header(language_info, language_label, rtl) }}
- {% include 'oscar/languages.html' %}
+ {% include 'oscar/languages.html' %}
{{ preferences_item_footer(language_info, language_label, rtl) }}
{% set locale_label = _('Interface language') %}
@@ -156,26 +156,26 @@
<div class="container-fluid">
<fieldset>
<div class="table-responsive">
- <table class="table table-hover table-condensed table-striped">
- <tr>
+ <table class="table table-hover table-condensed table-striped">
+ <tr>
{% if not rtl %}
- <th>{{ _("Allow") }}</th>
- <th>{{ _("Engine name") }}</th>
- <th>{{ _("Shortcut") }}</th>
- <th>{{ _("Selected language") }}</th>
- <th>{{ _("SafeSearch") }}</th>
- <th>{{ _("Time range") }}</th>
- <th>{{ _("Avg. time") }}</th>
- <th>{{ _("Max time") }}</th>
+ <th>{{ _("Allow") }}</th>
+ <th>{{ _("Engine name") }}</th>
+ <th>{{ _("Shortcut") }}</th>
+ <th>{{ _("Selected language") }}</th>
+ <th>{{ _("SafeSearch") }}</th>
+ <th>{{ _("Time range") }}</th>
+ <th>{{ _("Avg. time") }}</th>
+ <th>{{ _("Max time") }}</th>
{% else %}
- <th>{{ _("Max time") }}</th>
- <th>{{ _("Avg. time") }}</th>
- <th>{{ _("Time range") }}</th>
- <th>{{ _("SafeSearch") }}</th>
- <th>{{ _("Selected language") }}</th>
- <th>{{ _("Shortcut") }}</th>
- <th>{{ _("Engine name") }}</th>
- <th>{{ _("Allow") }}</th>
+ <th>{{ _("Max time") }}</th>
+ <th>{{ _("Avg. time") }}</th>
+ <th>{{ _("Time range") }}</th>
+ <th>{{ _("SafeSearch") }}</th>
+ <th>{{ _("Selected language") }}</th>
+ <th>{{ _("Shortcut") }}</th>
+ <th>{{ _("Engine name") }}</th>
+ <th>{{ _("Allow") }}</th>
{% endif %}
</tr>
{% for search_engine in engines_by_category[categ] %}
@@ -186,19 +186,19 @@
{{ checkbox_toggle('engine_' + search_engine.name|replace(' ', '_') + '__' + categ|replace(' ', '_'), (search_engine.name, categ) in disabled_engines) }}
</td>
<th>{{ search_engine.name }}</th>
- <td class="name">{{ shortcuts[search_engine.name] }}</td>
- <td>{{ support_toggle(stats[search_engine.name].supports_selected_language) }}</td>
- <td>{{ support_toggle(search_engine.safesearch==True) }}</td>
- <td>{{ support_toggle(search_engine.time_range_support==True) }}</td>
- <td class="{{ 'danger' if stats[search_engine.name]['warn_time'] else '' }}">{{ 'N/A' if stats[search_engine.name].time==None else stats[search_engine.name].time }}</td>
- <td class="{{ 'danger' if stats[search_engine.name]['warn_timeout'] else '' }}">{{ search_engine.timeout }}</td>
- {% else %}
- <td class="{{ 'danger' if stats[search_engine.name]['warn_timeout'] else '' }}">{{ search_engine.timeout }}</td>
- <td class="{{ 'danger' if stats[search_engine.name]['warn_time'] else '' }}">{{ 'N/A' if stats[search_engine.name].time==None else stats[search_engine.name].time }}</td>
- <td>{{ support_toggle(search_engine.time_range_support==True) }}</td>
- <td>{{ support_toggle(search_engine.safesearch==True) }}</td>
- <td>{{ support_toggle(stats[search_engine.name].supports_selected_language) }}</td>
- <td>{{ shortcuts[search_engine.name] }}</td>
+ <td class="name">{{ shortcuts[search_engine.name] }}</td>
+ <td>{{ support_toggle(stats[search_engine.name].supports_selected_language) }}</td>
+ <td>{{ support_toggle(search_engine.safesearch==True) }}</td>
+ <td>{{ support_toggle(search_engine.time_range_support==True) }}</td>
+ <td class="{{ 'danger' if stats[search_engine.name]['warn_time'] else '' }}">{{ 'N/A' if stats[search_engine.name].time==None else stats[search_engine.name].time }}</td>
+ <td class="{{ 'danger' if stats[search_engine.name]['warn_timeout'] else '' }}">{{ search_engine.timeout }}</td>
+ {% else %}
+ <td class="{{ 'danger' if stats[search_engine.name]['warn_timeout'] else '' }}">{{ search_engine.timeout }}</td>
+ <td class="{{ 'danger' if stats[search_engine.name]['warn_time'] else '' }}">{{ 'N/A' if stats[search_engine.name].time==None else stats[search_engine.name].time }}</td>
+ <td>{{ support_toggle(search_engine.time_range_support==True) }}</td>
+ <td>{{ support_toggle(search_engine.safesearch==True) }}</td>
+ <td>{{ support_toggle(stats[search_engine.name].supports_selected_language) }}</td>
+ <td>{{ shortcuts[search_engine.name] }}</td>
<th>{{ search_engine.name }}</th>
<td class="onoff-checkbox">
{{ checkbox_toggle('engine_' + search_engine.name|replace(' ', '_') + '__' + categ|replace(' ', '_'), (search_engine.name, categ) in disabled_engines) }}
@@ -207,7 +207,7 @@
</tr>
{% endif %}
{% endfor %}
- </table>
+ </table>
</div>
</fieldset>
</div>
diff --git a/searx/templates/oscar/result_templates/code.html b/searx/templates/oscar/result_templates/code.html
index ba74d0333..a1c18a6b7 100644
--- a/searx/templates/oscar/result_templates/code.html
+++ b/searx/templates/oscar/result_templates/code.html
@@ -1,18 +1,18 @@
-{% from 'oscar/macros.html' import result_header, result_sub_header, result_footer, result_footer_rtl, icon %}
-
-{{ result_header(result, favicons) }}
-{{ result_sub_header(result) }}
-
-{% if result.content %}<p class="result-content">{{ result.content|safe }}</p>{% endif %}
-
-{% if result.repository %}<p class="result-content">{{ icon('file') }} <a href="{{ result.repository }}" {% if results_on_new_tab %}target="_blank" rel="noopener noreferrer"{% else %}rel="noreferrer"{% endif %}>{{ result.repository }}</a></p>{% endif %}
-
-<div dir="ltr">
-{{ result.codelines|code_highlighter(result.code_language)|safe }}
-</div>
-
-{% if rtl %}
-{{ result_footer_rtl(result) }}
-{% else %}
-{{ result_footer(result) }}
-{% endif %}
+{% from 'oscar/macros.html' import result_header, result_sub_header, result_footer, result_footer_rtl, icon %}
+
+{{ result_header(result, favicons) }}
+{{ result_sub_header(result) }}
+
+{% if result.content %}<p class="result-content">{{ result.content|safe }}</p>{% endif %}
+
+{% if result.repository %}<p class="result-content">{{ icon('file') }} <a href="{{ result.repository }}" {% if results_on_new_tab %}target="_blank" rel="noopener noreferrer"{% else %}rel="noreferrer"{% endif %}>{{ result.repository }}</a></p>{% endif %}
+
+<div dir="ltr">
+{{ result.codelines|code_highlighter(result.code_language)|safe }}
+</div>
+
+{% if rtl %}
+{{ result_footer_rtl(result) }}
+{% else %}
+{{ result_footer(result) }}
+{% endif %}
diff --git a/searx/templates/oscar/result_templates/default.html b/searx/templates/oscar/result_templates/default.html
index 3ed0f3122..885cbbfa8 100644
--- a/searx/templates/oscar/result_templates/default.html
+++ b/searx/templates/oscar/result_templates/default.html
@@ -1,31 +1,31 @@
-{% from 'oscar/macros.html' import result_header, result_sub_header, result_footer, result_footer_rtl, icon with context %}
-
-{{ result_header(result, favicons) }}
-{{ result_sub_header(result) }}
-
-{% if result.embedded %}
- <small> &bull; <a class="text-info btn-collapse collapsed cursor-pointer media-loader disabled_if_nojs" data-toggle="collapse" data-target="#result-media-{{ index }}" data-btn-text-collapsed="{{ _('show media') }}" data-btn-text-not-collapsed="{{ _('hide media') }}">{{ icon('music') }} {{ _('show media') }}</a></small>
-{% endif %}
-
-{% if result.embedded %}
-<div id="result-media-{{ index }}" class="collapse">
- {{ result.embedded|safe }}
-</div>
-{% endif %}
-
-{% if result.img_src %}
-<div class="container-fluid">
- <div class="row">
-<img src="{{ image_proxify(result.img_src) }}" alt="{{ result.title|striptags }}" title="{{ result.title|striptags }}" style="width: auto; max-height: 60px; min-height: 60px;" class="col-xs-2 col-sm-4 col-md-4 result-content">
-{% if result.content %}<p class="result-content col-xs-8 col-sm-8 col-md-8">{{ result.content|safe }}</p>{% endif %}
- </div>
-</div>
-{% else %}
-{% if result.content %}<p class="result-content">{{ result.content|safe }}</p>{% endif %}
-{% endif %}
-
-{% if rtl %}
-{{ result_footer_rtl(result) }}
-{% else %}
-{{ result_footer(result) }}
-{% endif %}
+{% from 'oscar/macros.html' import result_header, result_sub_header, result_footer, result_footer_rtl, icon with context %}
+
+{{- result_header(result, favicons) -}}
+{{- result_sub_header(result) -}}
+
+{%- if result.embedded -%}
+ <small> &bull; <a class="text-info btn-collapse collapsed cursor-pointer media-loader disabled_if_nojs" data-toggle="collapse" data-target="#result-media-{{ index }}" data-btn-text-collapsed="{{ _('show media') }}" data-btn-text-not-collapsed="{{ _('hide media') }}">{{ icon('music') }} {{ _('show media') }}</a></small>
+{%- endif -%}
+
+{%- if result.embedded -%}
+<div id="result-media-{{ index }}" class="collapse">
+ {{ result.embedded|safe }}
+</div>
+{%- endif -%}
+
+{%- if result.img_src -%}
+<div class="container-fluid">
+ <div class="row">
+<img src="{{ image_proxify(result.img_src) }}" alt="{{ result.title|striptags }}" title="{{ result.title|striptags }}" style="width: auto; max-height: 60px; min-height: 60px;" class="col-xs-2 col-sm-4 col-md-4 result-content">
+{% if result.content %}<p class="result-content col-xs-8 col-sm-8 col-md-8">{{ result.content|safe }}</p>{% endif -%}
+ </div>
+</div>
+{%- else -%}
+{%- if result.content %}<p class="result-content">{{ result.content|safe }}</p>{% endif -%}
+{%- endif -%}
+
+{%- if rtl -%}
+{{ result_footer_rtl(result) }}
+{%- else -%}
+{{ result_footer(result) }}
+{%- endif -%}
diff --git a/searx/templates/oscar/result_templates/images.html b/searx/templates/oscar/result_templates/images.html
index b3292f448..d0a3b7b83 100644
--- a/searx/templates/oscar/result_templates/images.html
+++ b/searx/templates/oscar/result_templates/images.html
@@ -1,49 +1,36 @@
-{% from 'oscar/macros.html' import draw_favicon %}
-
-<a href="{{ result.img_src }}" {% if results_on_new_tab %}target="_blank" rel="noopener noreferrer"{% else %}rel="noreferrer"{% endif %} data-toggle="modal" data-target="#modal-{{ index }}-{{pageno}}">
- <img src="{% if result.thumbnail_src %}{{ image_proxify(result.thumbnail_src) }}{% else %}{{ image_proxify(result.img_src) }}{% endif %}" alt="{{ result.title|striptags }}" title="{{ result.title|striptags }}" class="img-thumbnail">
-</a>
-
-<div class="modal fade" id="modal-{{ index }}-{{ pageno }}" tabindex="-1" role="dialog" aria-hidden="true">
- <div class="modal-dialog">
- <div class="modal-wrapper">
- <div class="modal-header">
- <button type="button" class="close" data-dismiss="modal"><span aria-hidden="true">&times;</span><span class="sr-only">Close</span></button>
- <h4 class="modal-title">{% if result.engine~".png" in favicons %}{{ draw_favicon(result.engine) }} {% endif %}{{ result.title|striptags }}</h4>
- </div>
- <div class="modal-body">
- <img class="img-responsive center-block" src="{% if result.thumbnail_src %}{{ image_proxify(result.thumbnail_src) }}{% else %}{{ image_proxify(result.img_src) }}{% endif %}" alt="{{ result.title|striptags }}">
- {% if result.author %}<span class="photo-author">{{ result.author }}</span><br />{% endif %}
- {% if result.content %}
- <p class="result-content">
- {{ result.content|striptags }}
- </p>
- {% endif %}
- {% if result.img_format %}
- <p class="result-format">
- {{ result.img_format }}
- </p>
- {% endif %}
- {% if result.source %}
- <p class="result-source">
- {{ result.source }}
- </p>
- {% endif %}
- </div>
- <div class="modal-footer">
- <div class="clearfix"></div>
- <span class="label label-default pull-right">{{ result.engine }}</span>
- <p class="text-muted pull-left">{{ result.pretty_url }}</p>
- <div class="clearfix"></div>
- <div class="row">
- <div class="col-md-6">
- <a href="{{ result.img_src }}" {% if results_on_new_tab %}target="_blank" rel="noopener noreferrer"{% else %}rel="noreferrer"{% endif %} class="btn btn-default">{{ _('Get image') }}</a>
- </div>
- <div class="col-md-6">
- <a href="{{ result.url }}" {% if results_on_new_tab %}target="_blank" rel="noopener noreferrer"{% else %}rel="noreferrer"{% endif %} class="btn btn-default">{{ _('View source') }}</a>
- </div>
- </div>
- </div>
- </div>
- </div>
-</div>
+{%- from 'oscar/macros.html' import draw_favicon -%}
+
+<a href="{{ result.img_src }}" {% if results_on_new_tab %}target="_blank" rel="noopener noreferrer"{% else %}rel="noreferrer"{% endif %} data-toggle="modal" data-target="#modal-{{ index }}-{{pageno}}">{{- "" -}}
+ <img src="{% if result.thumbnail_src %}{{ image_proxify(result.thumbnail_src) }}{% else %}{{ image_proxify(result.img_src) }}{% endif %}" alt="{{ result.title|striptags }}" title="{{ result.title|striptags }}" class="img-thumbnail">{{- "" -}}
+</a>
+<div class="modal fade" id="modal-{{ index }}-{{ pageno }}" tabindex="-1" role="dialog" aria-hidden="true">{{- "" -}}
+ <div class="modal-dialog">{{- "" -}}
+ <div class="modal-wrapper">{{- "" -}}
+ <div class="modal-header">{{- "" -}}
+ <button type="button" class="close" data-dismiss="modal"><span aria-hidden="true">&times;</span><span class="sr-only">Close</span></button>{{- "" -}}
+ <h4 class="modal-title">{% if result.engine~".png" in favicons %}{{ draw_favicon(result.engine) }} {% endif %}{{ result.title|striptags }}</h4>{{- "" -}}
+ </div>{{- "" -}}
+ <div class="modal-body">{{- "" -}}
+ <img class="img-responsive center-block" src="{% if result.thumbnail_src %}{{ image_proxify(result.thumbnail_src) }}{% else %}{{ image_proxify(result.img_src) }}{% endif %}" alt="{{ result.title|striptags }}">
+ {%- if result.author %}<span class="photo-author">{{ result.author }}</span><br />{% endif -%}
+ {%- if result.content %}<p class="result-content">{{ result.content|striptags }}</p>{% endif -%}
+ {%- if result.img_format %}<p class="result-format">{{ result.img_format }}</p>{% endif -%}
+ {%- if result.source %}<p class="result-source">{{ result.source }}</p>{% endif -%}
+ </div>{{- "" -}}
+ <div class="modal-footer">{{- "" -}}
+ <div class="clearfix"></div>{{- "" -}}
+ <span class="label label-default pull-right">{{ result.engine }}</span>{{- "" -}}
+ <p class="text-muted pull-left">{{ result.pretty_url }}</p>{{- "" -}}
+ <div class="clearfix"></div>{{- "" -}}
+ <div class="row">{{- "" -}}
+ <div class="col-md-6">{{- "" -}}
+ <a href="{{ result.img_src }}" {% if results_on_new_tab %}target="_blank" rel="noopener noreferrer"{% else %}rel="noreferrer"{% endif %} class="btn btn-default">{{ _('Get image') }}</a>{{- "" -}}
+ </div>{{- "" -}}
+ <div class="col-md-6">{{- "" -}}
+ <a href="{{ result.url }}" {% if results_on_new_tab %}target="_blank" rel="noopener noreferrer"{% else %}rel="noreferrer"{% endif %} class="btn btn-default">{{ _('View source') }}</a>{{- "" -}}
+ </div>{{- "" -}}
+ </div>{{- "" -}}
+ </div>{{- "" -}}
+ </div>{{- "" -}}
+ </div>{{- "" -}}
+</div>{{- "" -}}
diff --git a/searx/templates/oscar/result_templates/key-value.html b/searx/templates/oscar/result_templates/key-value.html
new file mode 100644
index 000000000..67c748e7f
--- /dev/null
+++ b/searx/templates/oscar/result_templates/key-value.html
@@ -0,0 +1,19 @@
+{% from 'oscar/macros.html' import result_footer, result_footer_rtl with context %}
+<div class="panel panel-default">
+<table class="table table-responsive table-bordered table-condensed">
+ {% for key, value in result.items() %}
+ {% if key in ['engine', 'engines', 'template', 'score', 'category', 'positions'] %}
+ {% continue %}
+ {% endif %}
+ <tr>
+ <td><b>{{ key|upper }}</b>: {{ value }}</td>
+ </tr>
+ {% endfor %}
+</table>
+
+{% if rtl %}
+{{ result_footer_rtl(result) }}
+{% else %}
+{{ result_footer(result) }}
+{% endif %}
+</div>
diff --git a/searx/templates/oscar/result_templates/map.html b/searx/templates/oscar/result_templates/map.html
index 822c7cdea..712375d7f 100644
--- a/searx/templates/oscar/result_templates/map.html
+++ b/searx/templates/oscar/result_templates/map.html
@@ -1,72 +1,72 @@
-{% from 'oscar/macros.html' import result_header, result_sub_header, result_footer, result_footer_rtl, icon %}
-
-{{ result_header(result, favicons) }}
-{{ result_sub_header(result) }}
-
-{% if (result.latitude and result.longitude) or result.boundingbox %}
- <small> &bull; <a class="text-info btn-collapse collapsed searx_init_map cursor-pointer disabled_if_nojs" data-toggle="collapse" data-target="#result-map-{{ index }}" data-leaflet-target="osm-map-{{ index }}" data-map-lon="{{ result.longitude }}" data-map-lat="{{ result.latitude }}" {% if result.boundingbox %}data-map-boundingbox='{{ result.boundingbox|tojson|safe }}'{% endif %} {% if result.geojson %}data-map-geojson='{{ result.geojson|tojson|safe }}'{% endif %} data-btn-text-collapsed="{{ _('show map') }}" data-btn-text-not-collapsed="{{ _('hide map') }}">{{ icon('globe') }} {{ _('show map') }}</a></small>
-{% endif %}
-
-{% if result.osm and (result.osm.type and result.osm.id) %}
- <small> &bull; <a class="text-info btn-collapse collapsed cursor-pointer searx_overpass_request disabled_if_nojs" data-toggle="collapse" data-target="#result-overpass-{{ index }}" data-osm-type="{{ result.osm.type }}" data-osm-id="{{ result.osm.id }}" data-result-table="result-overpass-table-{{ index }}" data-result-table-loadicon="result-overpass-table-loading-{{ index }}" data-btn-text-collapsed="{{ _('show details') }}" data-btn-text-not-collapsed="{{ _('hide details') }}">{{ icon('map-marker') }} {{ _('show details') }}</a></small>
-{% endif %}
-
-{# {% if (result.latitude and result.longitude) %}
- <small> &bull; <a class="text-info btn-collapse collapsed cursor-pointer disabled_if_nojs" data-toggle="collapse" data-target="#result-geodata-{{ index }}" data-btn-text-collapsed="{{ _('show geodata') }}" data-btn-text-not-collapsed="{{ _('hide geodata') }}">{{ icon('map-marker') }} {{ _('show geodata') }}</a></small>
-{% endif %} #}
-
-<div class="container-fluid">
-
-{% if result.address %}
-<p class="row result-content result-adress col-xs-12 col-sm-5 col-md-4" itemscope itemtype="http://schema.org/PostalAddress">
- {% if result.address.name %}
- <strong itemprop="name">{{ result.address.name }}</strong><br/>
- {% endif %}
- {% if result.address.road %}
- <span itemprop="streetAddress">
- {% if result.address.house_number %}{{ result.address.house_number }}, {% endif %}
- {{ result.address.road }}
- </span><br/>
- {% endif %}
- {% if result.address.locality %}
- <span itemprop="addressLocality">{{ result.address.locality }}</span>
- {% if result.address.postcode %}, <span itemprop="postalCode">{{ result.address.postcode }}</span>{% endif %}
- <br/>
- {% endif %}
- {% if result.address.country %}
- <span itemprop="addressCountry">{{ result.address.country }}</span>
- {% endif %}
-</p>
-{% endif %}
-
-{% if result.osm and (result.osm.type and result.osm.id) %}
- <div class="row result-content collapse col-xs-12 col-sm-7 col-md-8" id="result-overpass-{{ index }}"{% if rtl %} dir="ltr"{% endif %}>
- <div class="text-center" id="result-overpass-table-loading-{{ index }}"><img src="{{ url_for('static', filename='img/loader.gif') }}" alt="Loading ..."/></div>
- <table class="table table-striped table-condensed hidden" id="result-overpass-table-{{ index }}">
- <tr><th>key</th><th>value</th></tr>
- </table>
- </div>
-{% endif %}
-
-{# {% if (result.latitude and result.longitude) %}
- <div class="row collapse col-xs-12 col-sm-5 col-md-4" id="result-geodata-{{ index }}">
- <strong>Longitude:</strong> {{ result.longitude }} <br/>
- <strong>Latitude:</strong> {{ result.latitude }}
- </div>
-{% endif %} #}
-
-{% if result.content %}<p class="row result-content col-xs-12 col-sm-12 col-md-12">{{ result.content|safe }}</p>{% endif %}
-
-</div>
-
-{% if (result.latitude and result.longitude) or result.boundingbox %}
- <div class="collapse" id="result-map-{{ index }}">
- <div style="height:300px; width:100%; margin: 10px 0;" id="osm-map-{{ index }}"></div>
- </div>
-{% endif %}
-
-{% if rtl %}
-{{ result_footer_rtl(result) }}
-{% else %}
-{{ result_footer(result) }}
-{% endif %}
+{% from 'oscar/macros.html' import result_header, result_sub_header, result_footer, result_footer_rtl, icon %}
+
+{{ result_header(result, favicons) }}
+{{ result_sub_header(result) }}
+
+{% if (result.latitude and result.longitude) or result.boundingbox %}
+ <small> &bull; <a class="text-info btn-collapse collapsed searx_init_map cursor-pointer disabled_if_nojs" data-toggle="collapse" data-target="#result-map-{{ index }}" data-leaflet-target="osm-map-{{ index }}" data-map-lon="{{ result.longitude }}" data-map-lat="{{ result.latitude }}" {% if result.boundingbox %}data-map-boundingbox='{{ result.boundingbox|tojson|safe }}'{% endif %} {% if result.geojson %}data-map-geojson='{{ result.geojson|tojson|safe }}'{% endif %} data-btn-text-collapsed="{{ _('show map') }}" data-btn-text-not-collapsed="{{ _('hide map') }}">{{ icon('globe') }} {{ _('show map') }}</a></small>
+{% endif %}
+
+{% if result.osm and (result.osm.type and result.osm.id) %}
+ <small> &bull; <a class="text-info btn-collapse collapsed cursor-pointer searx_overpass_request disabled_if_nojs" data-toggle="collapse" data-target="#result-overpass-{{ index }}" data-osm-type="{{ result.osm.type }}" data-osm-id="{{ result.osm.id }}" data-result-table="result-overpass-table-{{ index }}" data-result-table-loadicon="result-overpass-table-loading-{{ index }}" data-btn-text-collapsed="{{ _('show details') }}" data-btn-text-not-collapsed="{{ _('hide details') }}">{{ icon('map-marker') }} {{ _('show details') }}</a></small>
+{% endif %}
+
+{# {% if (result.latitude and result.longitude) %}
+ <small> &bull; <a class="text-info btn-collapse collapsed cursor-pointer disabled_if_nojs" data-toggle="collapse" data-target="#result-geodata-{{ index }}" data-btn-text-collapsed="{{ _('show geodata') }}" data-btn-text-not-collapsed="{{ _('hide geodata') }}">{{ icon('map-marker') }} {{ _('show geodata') }}</a></small>
+{% endif %} #}
+
+<div class="container-fluid">
+
+{% if result.address %}
+<p class="row result-content result-adress col-xs-12 col-sm-5 col-md-4" itemscope itemtype="http://schema.org/PostalAddress">
+ {% if result.address.name %}
+ <strong itemprop="name">{{ result.address.name }}</strong><br/>
+ {% endif %}
+ {% if result.address.road %}
+ <span itemprop="streetAddress">
+ {% if result.address.house_number %}{{ result.address.house_number }}, {% endif %}
+ {{ result.address.road }}
+ </span><br/>
+ {% endif %}
+ {% if result.address.locality %}
+ <span itemprop="addressLocality">{{ result.address.locality }}</span>
+ {% if result.address.postcode %}, <span itemprop="postalCode">{{ result.address.postcode }}</span>{% endif %}
+ <br/>
+ {% endif %}
+ {% if result.address.country %}
+ <span itemprop="addressCountry">{{ result.address.country }}</span>
+ {% endif %}
+</p>
+{% endif %}
+
+{% if result.osm and (result.osm.type and result.osm.id) %}
+ <div class="row result-content collapse col-xs-12 col-sm-7 col-md-8" id="result-overpass-{{ index }}"{% if rtl %} dir="ltr"{% endif %}>
+ <div class="text-center" id="result-overpass-table-loading-{{ index }}"><img src="{{ url_for('static', filename='img/loader.gif') }}" alt="Loading ..."/></div>
+ <table class="table table-striped table-condensed hidden" id="result-overpass-table-{{ index }}">
+ <tr><th>key</th><th>value</th></tr>
+ </table>
+ </div>
+{% endif %}
+
+{# {% if (result.latitude and result.longitude) %}
+ <div class="row collapse col-xs-12 col-sm-5 col-md-4" id="result-geodata-{{ index }}">
+ <strong>Longitude:</strong> {{ result.longitude }} <br/>
+ <strong>Latitude:</strong> {{ result.latitude }}
+ </div>
+{% endif %} #}
+
+{% if result.content %}<p class="row result-content col-xs-12 col-sm-12 col-md-12">{{ result.content|safe }}</p>{% endif %}
+
+</div>
+
+{% if (result.latitude and result.longitude) or result.boundingbox %}
+ <div class="collapse" id="result-map-{{ index }}">
+ <div style="height:300px; width:100%; margin: 10px 0;" id="osm-map-{{ index }}"></div>
+ </div>
+{% endif %}
+
+{% if rtl %}
+{{ result_footer_rtl(result) }}
+{% else %}
+{{ result_footer(result) }}
+{% endif %}
diff --git a/searx/templates/oscar/result_templates/torrent.html b/searx/templates/oscar/result_templates/torrent.html
index f5ea415e2..089367e36 100644
--- a/searx/templates/oscar/result_templates/torrent.html
+++ b/searx/templates/oscar/result_templates/torrent.html
@@ -3,7 +3,7 @@
{{ result_header(result, favicons) }}
{{ result_sub_header(result) }}
-{% if result.seed %}<p class="result-content">{{ icon('transfer') }} {{ _('Seeder') }} <span class="badge">{{ result.seed }}</span> &bull; {{ _('Leecher') }} <span class="badge">{{ result.leech }}</span>{% endif %}
+{% if result.seed is defined %}<p class="result-content">{{ icon('transfer') }} {{ _('Seeder') }} <span class="badge">{{ result.seed }}</span> &bull; {{ _('Leecher') }} <span class="badge">{{ result.leech }}</span>{% endif %}
{% if result.filesize %}<br />{{ icon('floppy-disk') }} {{ _('Filesize') }}
<span class="badge">
{% if result.filesize < 1024 %}{{ result.filesize }} {{ _('Bytes') }}
diff --git a/searx/templates/oscar/result_templates/videos.html b/searx/templates/oscar/result_templates/videos.html
index 36fb26240..3c1913d9d 100644
--- a/searx/templates/oscar/result_templates/videos.html
+++ b/searx/templates/oscar/result_templates/videos.html
@@ -1,27 +1,27 @@
-{% from 'oscar/macros.html' import result_header, result_sub_header, result_footer, result_footer_rtl, icon %}
-
-{{ result_header(result, favicons) }}
-{{ result_sub_header(result) }}
-
-{% if result.embedded %}
- <small> &bull; <a class="text-info btn-collapse collapsed cursor-pointer media-loader disabled_if_nojs" data-toggle="collapse" data-target="#result-video-{{ index }}" data-btn-text-collapsed="{{ _('show video') }}" data-btn-text-not-collapsed="{{ _('hide video') }}">{{ icon('film') }} {{ _('show video') }}</a></small>
-{% endif %}
-
-{% if result.embedded %}
-<div id="result-video-{{ index }}" class="collapse">
- {{ result.embedded|safe }}
-</div>
-{% endif %}
-
-<div class="container-fluid">
- <div class="row">
- <a href="{{ result.url }}" {% if results_on_new_tab %}target="_blank" rel="noopener noreferrer"{% else %}rel="noreferrer"{% endif %}><img class="thumbnail col-xs-6 col-sm-4 col-md-4 result-content" src="{{ image_proxify(result.thumbnail) }}" alt="{{ result.title|striptags }} {{ result.engine }}" /></a>
- {% if result.content %}<p class="col-xs-12 col-sm-8 col-md-8 result-content">{{ result.content|safe }}</p>{% endif %}
- </div>
-</div>
-
-{% if rtl %}
-{{ result_footer_rtl(result) }}
-{% else %}
-{{ result_footer(result) }}
-{% endif %}
+{% from 'oscar/macros.html' import result_header, result_sub_header, result_footer, result_footer_rtl, icon %}
+
+{{ result_header(result, favicons) }}
+{{ result_sub_header(result) }}
+
+{% if result.embedded %}
+ <small> &bull; <a class="text-info btn-collapse collapsed cursor-pointer media-loader disabled_if_nojs" data-toggle="collapse" data-target="#result-video-{{ index }}" data-btn-text-collapsed="{{ _('show video') }}" data-btn-text-not-collapsed="{{ _('hide video') }}">{{ icon('film') }} {{ _('show video') }}</a></small>
+{% endif %}
+
+{% if result.embedded %}
+<div id="result-video-{{ index }}" class="collapse">
+ {{ result.embedded|safe }}
+</div>
+{% endif %}
+
+<div class="container-fluid">
+ <div class="row">
+ <a href="{{ result.url }}" {% if results_on_new_tab %}target="_blank" rel="noopener noreferrer"{% else %}rel="noreferrer"{% endif %}><img class="thumbnail col-xs-6 col-sm-4 col-md-4 result-content" src="{{ image_proxify(result.thumbnail) }}" alt="{{ result.title|striptags }} {{ result.engine }}" /></a>
+ {% if result.content %}<p class="col-xs-12 col-sm-8 col-md-8 result-content">{{ result.content|safe }}</p>{% endif %}
+ </div>
+</div>
+
+{% if rtl %}
+{{ result_footer_rtl(result) }}
+{% else %}
+{{ result_footer(result) }}
+{% endif %}
diff --git a/searx/templates/oscar/results.html b/searx/templates/oscar/results.html
index 9a95265b9..9cf942695 100644
--- a/searx/templates/oscar/results.html
+++ b/searx/templates/oscar/results.html
@@ -1,156 +1,156 @@
-{% extends "oscar/base.html" %}
-{% macro search_form_attrs(pageno) -%}
- {% for category in selected_categories %}<input type="hidden" name="category_{{ category }}" value="1"/>{% endfor %}
- <input type="hidden" name="q" value="{{ q|e }}" />
- <input type="hidden" name="pageno" value="{{ pageno }}" />
- <input type="hidden" name="time_range" value="{{ time_range }}" />
- <input type="hidden" name="language" value="{{ current_language }}" />
- {% if timeout_limit %}<input type="hidden" name="timeout_limit" value="{{ timeout_limit|e }}" />{% endif %}
-{%- endmacro %}
-{%- macro search_url() %}{{ base_url }}?q={{ q|urlencode }}{% if selected_categories %}&amp;categories={{ selected_categories|join(",") | replace(' ','+') }}{% endif %}{% if pageno > 1 %}&amp;pageno={{ pageno }}{% endif %}{% if time_range %}&amp;time_range={{ time_range }}{% endif %}{% if current_language != 'all' %}&amp;language={{ current_language }}{% endif %}{% endmacro -%}
-
-{% block title %}{{ q|e }} - {% endblock %}
-{% block meta %}<link rel="alternate" type="application/rss+xml" title="Searx search: {{ q|e }}" href="{{ search_url() }}&amp;format=rss">{% endblock %}
-{% block content %}
- {% include 'oscar/search.html' %}
- <div class="row">
- <div class="col-sm-8" id="main_results">
- <h1 class="sr-only">{{ _('Search results') }}</h1>
-
- {% if corrections %}
- <div class="result">
- <span class="result_header text-muted form-inline pull-left suggestion_item">{{ _('Try searching for:') }}</span>
- {% for correction in corrections %}
- <form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" role="navigation" class="form-inline pull-left suggestion_item">
- <input type="hidden" name="q" value="{{ query_prefix + correction }}">
- <button type="submit" class="btn btn-default btn-xs">{{ correction }}</button>
- </form>
- {% endfor %}
- </div>
- {% endif %}
-
- {% if answers %}
- {% for answer in answers %}
- <div class="result well">
- <span>{{ answer }}</span>
- </div>
- {% endfor %}
- {% endif %}
-
- {% for result in results %}
- <div class="result {% if result['template'] %}result-{{ result.template|replace('.html', '') }}{% else %}result-default{% endif %}">
- {% set index = loop.index %}
- {% if result.template %}
- {% include get_result_template('oscar', result['template']) %}
- {% else %}
- {% include 'oscar/result_templates/default.html' %}
- {% endif %}
- </div>
- {% endfor %}
-
- {% if not results and not answers %}
- {% include 'oscar/messages/no_results.html' %}
- {% endif %}
-
- <div class="clearfix"></div>
-
- {% if paging %}
- {% if rtl %}
- <div id="pagination">
- <div class="pull-left">
- <form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" class="pull-left">
- {{ search_form_attrs(pageno+1) }}
- <button type="submit" class="btn btn-default"><span class="glyphicon glyphicon-backward"></span> {{ _('next page') }}</button>
- </form>
- </div>
- <div class="pull-right">
- <form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" class="pull-left">
- {{ search_form_attrs(pageno-1) }}
- <button type="submit" class="btn btn-default" {% if pageno == 1 %}disabled{% endif %}><span class="glyphicon glyphicon-forward"></span> {{ _('previous page') }}</button>
- </form>
- </div>
- </div><!-- /#pagination -->
- <div class="clearfix"></div>
- {% else %}
- <div id="pagination">
- <div class="pull-left">
- <form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" class="pull-left">
- {{ search_form_attrs(pageno-1) }}
- <button type="submit" class="btn btn-default" {% if pageno == 1 %}disabled{% endif %}><span class="glyphicon glyphicon-backward"></span> {{ _('previous page') }}</button>
- </form>
- </div>
- <div class="pull-right">
- <form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" class="pull-left">
- {{ search_form_attrs(pageno+1) }}
- <button type="submit" class="btn btn-default"><span class="glyphicon glyphicon-forward"></span> {{ _('next page') }}</button>
- </form>
- </div>
- </div><!-- /#pagination -->
- <div class="clearfix"></div>
- {% endif %}
- {% endif %}
- </div><!-- /#main_results -->
-
- <div class="col-sm-4" id="sidebar_results">
- {% if number_of_results != '0' %}
- <p><small>{{ _('Number of results') }}: {{ number_of_results }}</small></p>
- {% endif %}
-
- {% if unresponsive_engines and results|length >= 1 %}
- <div class="alert alert-danger fade in" role="alert">
- <p>{{ _('Engines cannot retrieve results') }}:</p>
- {% for engine_name, error_type in unresponsive_engines %}
- {{ engine_name }} ({{ error_type }}){% if not loop.last %}, {% endif %}
- {% endfor %}
- </div>
- {% endif %}
-
- {% if infoboxes %}
- {% for infobox in infoboxes %}
- {% include 'oscar/infobox.html' %}
- {% endfor %}
- {% endif %}
-
- {% if suggestions %}
- <div class="panel panel-default">
- <div class="panel-heading">
- <h4 class="panel-title">{{ _('Suggestions') }}</h4>
- </div>
- <div class="panel-body">
- {% for suggestion in suggestions %}
- <form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" role="navigation" class="form-inline pull-{% if rtl %}right{% else %}left{% endif %} suggestion_item">
- <input type="hidden" name="q" value="{{ suggestion.url }}">
- <button type="submit" class="btn btn-default btn-xs">{{ suggestion.title }}</button>
- </form>
- {% endfor %}
- </div>
- </div>
- {% endif %}
-
- <div class="panel panel-default">
- <div class="panel-heading">
- <h4 class="panel-title">{{ _('Links') }}</h4>
- </div>
- <div class="panel-body">
- <form role="form">
- <div class="form-group">
- <label for="search_url">{{ _('Search URL') }}</label>
- <input id="search_url" type="url" class="form-control select-all-on-click cursor-text" name="search_url" value="{{ search_url() }}" readonly>
- </div>
- </form>
-
- <label>{{ _('Download results') }}</label>
- <div class="clearfix"></div>
- {% for output_type in ('csv', 'json', 'rss') %}
- <form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" class="form-inline pull-{% if rtl %}right{% else %}left{% endif %} result_download">
- {{ search_form_attrs(pageno) }}
- <input type="hidden" name="format" value="{{ output_type }}">
- <button type="submit" class="btn btn-default">{{ output_type }}</button>
- </form>
- {% endfor %}
- <div class="clearfix"></div>
- </div>
- </div>
- </div><!-- /#sidebar_results -->
- </div>
-{% endblock %}
+{% extends "oscar/base.html" %}
+{% macro search_form_attrs(pageno) -%}
+ {%- for category in selected_categories -%}<input type="hidden" name="category_{{ category }}" value="1"/>{%- endfor -%}
+ <input type="hidden" name="q" value="{{ q|e }}" />{{- "" -}}
+ <input type="hidden" name="pageno" value="{{ pageno }}" />{{- "" -}}
+ <input type="hidden" name="time_range" value="{{ time_range }}" />{{- "" -}}
+ <input type="hidden" name="language" value="{{ current_language }}" />{{- "" -}}
+ {% if timeout_limit %}<input type="hidden" name="timeout_limit" value="{{ timeout_limit|e }}" />{% endif -%}
+{%- endmacro %}
+{%- macro search_url() %}{{ base_url }}?q={{ q|urlencode }}{% if selected_categories %}&amp;categories={{ selected_categories|join(",") | replace(' ','+') }}{% endif %}{% if pageno > 1 %}&amp;pageno={{ pageno }}{% endif %}{% if time_range %}&amp;time_range={{ time_range }}{% endif %}{% if current_language != 'all' %}&amp;language={{ current_language }}{% endif %}{% endmacro -%}
+
+{% block title %}{{ q|e }} - {% endblock %}
+{% block meta %}{{" "}}<link rel="alternate" type="application/rss+xml" title="Searx search: {{ q|e }}" href="{{ search_url() }}&amp;format=rss">{% endblock %}
+{% block content %}
+ {% include 'oscar/search.html' %}
+
+ <div class="row">
+ <div class="col-sm-8" id="main_results">
+ <h1 class="sr-only">{{ _('Search results') }}</h1>
+
+ {% if corrections -%}
+ <div class="result">
+ <span class="result_header text-muted form-inline pull-left suggestion_item">{{ _('Try searching for:') }}</span>
+ {% for correction in corrections -%}
+ <form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" role="navigation" class="form-inline pull-left suggestion_item">{{- "" -}}
+ <input type="hidden" name="q" value="{{ correction.url }}">{{- "" -}}
+ <button type="submit" class="btn btn-default btn-xs">{{ correction.title }}</button>{{- "" -}}
+ </form>
+ {% endfor %}
+ </div>
+ {%- endif %}
+
+ {% if answers -%}
+ {%- for answer in answers %}
+ <div class="result well">
+ <span>{{ answer }}</span>
+ </div>
+ {%- endfor %}
+ {%- endif %}
+
+ {% for result in results -%}
+ <div class="result {% if result['template'] %}result-{{ result.template|replace('.html', '') }}{% else %}result-default{% endif %}">
+ {%- set index = loop.index -%}
+ {%- if result.template -%}
+ {% include get_result_template('oscar', result['template']) %}
+ {%- else -%}
+ {% include 'oscar/result_templates/default.html' %}
+ {%- endif -%}
+ </div>
+ {% endfor %}
+
+ {% if not results and not answers -%}
+ {% include 'oscar/messages/no_results.html' %}
+ {% endif %}
+
+ <div class="clearfix"></div>
+
+ {% if paging -%}
+ {% if rtl %}
+ <div id="pagination">
+ <div class="pull-left">{{- "" -}}
+ <form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" class="pull-left">
+ {{- search_form_attrs(pageno+1) -}}
+ <button type="submit" class="btn btn-default"><span class="glyphicon glyphicon-backward"></span> {{ _('next page') }}</button>{{- "" -}}
+ </form>{{- "" -}}
+ </div>
+ <div class="pull-right">{{- "" -}}
+ <form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" class="pull-left">
+ {{- search_form_attrs(pageno-1) -}}
+ <button type="submit" class="btn btn-default" {% if pageno == 1 %}disabled{% endif %}><span class="glyphicon glyphicon-forward"></span> {{ _('previous page') }}</button>{{- "" -}}
+ </form>{{- "" -}}
+ </div>
+ </div><!-- /#pagination -->
+ <div class="clearfix"></div>
+ {% else %}
+ <div id="pagination">
+ <div class="pull-left">{{- "" -}}
+ <form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" class="pull-left">
+ {{- search_form_attrs(pageno-1) -}}
+ <button type="submit" class="btn btn-default" {% if pageno == 1 %}disabled{% endif %}><span class="glyphicon glyphicon-backward"></span> {{ _('previous page') }}</button>{{- "" -}}
+ </form>{{- "" -}}
+ </div>
+ <div class="pull-right">{{- "" -}}
+ <form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" class="pull-left">
+ {{- search_form_attrs(pageno+1) -}}
+ <button type="submit" class="btn btn-default"><span class="glyphicon glyphicon-forward"></span> {{ _('next page') }}</button>{{- "" -}}
+ </form>{{- "" -}}
+ </div>
+ </div><!-- /#pagination -->
+ <div class="clearfix"></div>
+ {% endif %}
+ {% endif %}
+ </div><!-- /#main_results -->
+
+ <div class="col-sm-4" id="sidebar_results">
+ {% if number_of_results != '0' -%}
+ <p><small>{{ _('Number of results') }}: {{ number_of_results }}</small></p>
+ {%- endif %}
+
+ {% if unresponsive_engines and results|length >= 1 -%}
+ <div class="alert alert-danger fade in" role="alert">
+ <p>{{ _('Engines cannot retrieve results') }}:</p>
+ {%- for engine_name, error_type in unresponsive_engines -%}
+ {{- engine_name }} ({{ error_type }}){% if not loop.last %}, {% endif %}{{- "" -}}
+ {%- endfor -%}
+ </div>
+ {%- endif %}
+
+ {% if infoboxes -%}
+ {% for infobox in infoboxes %}
+ {% include 'oscar/infobox.html' %}{{- "\n\n" -}}
+ {% endfor %}
+ {%- endif %}
+
+ {% if suggestions %}
+ <div class="panel panel-default">
+ <div class="panel-heading">
+ <h4 class="panel-title">{{ _('Suggestions') }}</h4>
+ </div>
+ <div class="panel-body">
+ {% for suggestion in suggestions %}
+ <form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" role="navigation" class="form-inline pull-{% if rtl %}right{% else %}left{% endif %} suggestion_item">
+ <input type="hidden" name="q" value="{{ suggestion.url }}">
+ <button type="submit" class="btn btn-default btn-xs">{{ suggestion.title }}</button>
+ </form>
+ {% endfor %}
+ </div>
+ </div>
+ {%- endif %}
+
+ <div class="panel panel-default">
+ <div class="panel-heading">{{- "" -}}
+ <h4 class="panel-title">{{ _('Links') }}</h4>{{- "" -}}
+ </div>
+ <div class="panel-body">
+ <form role="form">{{- "" -}}
+ <div class="form-group">{{- "" -}}
+ <label for="search_url">{{ _('Search URL') }}</label>{{- "" -}}
+ <input id="search_url" type="url" class="form-control select-all-on-click cursor-text" name="search_url" value="{{ search_url() }}" readonly>{{- "" -}}
+ </div>{{- "" -}}
+ </form>
+ <label>{{ _('Download results') }}</label>
+ <div class="clearfix"></div>
+ {% for output_type in ('csv', 'json', 'rss') %}
+ <form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" class="form-inline pull-{% if rtl %}right{% else %}left{% endif %} result_download">
+ {{- search_form_attrs(pageno) -}}
+ <input type="hidden" name="format" value="{{ output_type }}">{{- "" -}}
+ <button type="submit" class="btn btn-default">{{ output_type }}</button>{{- "" -}}
+ </form>
+ {% endfor %}
+ <div class="clearfix"></div>
+ </div>
+ </div>
+ </div><!-- /#sidebar_results -->
+ </div>
+{% endblock %}
diff --git a/searx/templates/oscar/search.html b/searx/templates/oscar/search.html
index 59ee4688d..cad9eca89 100644
--- a/searx/templates/oscar/search.html
+++ b/searx/templates/oscar/search.html
@@ -1,24 +1,24 @@
-{% from 'oscar/macros.html' import icon %}
-<form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" id="search_form" role="search">
- <div class="row">
- <div class="col-xs-12 col-md-8">
- <div class="input-group search-margin">
- <input type="search" name="q" class="form-control" id="q" placeholder="{{ _('Search for...') }}" autocomplete="off" value="{{ q }}">
- <span class="input-group-btn">
- <button type="submit" class="btn btn-default"><span class="hide_if_nojs">{{ icon('search') }}</span><span class="hidden active_if_nojs">{{ _('Start search') }}</span></button>
- </span>
- </div>
- </div>
- <div class="col-xs-6 col-md-2 search-margin">
- {% include 'oscar/time-range.html' %}
- </div>
- <div class="col-xs-6 col-md-2 search-margin">
- {% include 'oscar/languages.html' %}
- </div>
- </div>
- <div class="row">
- <div class="col-sm-12">
- {% include 'oscar/categories.html' %}
- </div>
- </div>
-</form><!-- / #search_form_full -->
+{% from 'oscar/macros.html' import icon %}
+<form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" id="search_form" role="search">
+ <div class="row">
+ <div class="col-xs-12 col-md-8">
+ <div class="input-group search-margin">
+ <input type="search" name="q" class="form-control" id="q" placeholder="{{ _('Search for...') }}" aria-label="{{ _('Search for...') }}" autocomplete="off" value="{{ q }}" accesskey="s">
+ <span class="input-group-btn">
+ <button type="submit" class="btn btn-default" aria-label="{{ _('Start search') }}"><span class="hide_if_nojs">{{ icon('search') }}</span><span class="hidden active_if_nojs">{{ _('Start search') }}</span></button>
+ </span>
+ </div>
+ </div>
+ <div class="col-xs-6 col-md-2 search-margin">
+ {%- include 'oscar/time-range.html' -%}
+ </div>
+ <div class="col-xs-6 col-md-2 search-margin">
+ {%- include 'oscar/languages.html' -%}
+ </div>
+ </div>
+ <div class="row">
+ <div class="col-sm-12">
+ {%- include 'oscar/categories.html' -%}
+ </div>
+ </div>
+</form><!-- / #search_form_full -->
diff --git a/searx/templates/oscar/search_full.html b/searx/templates/oscar/search_full.html
index 6fdae4028..656463178 100644
--- a/searx/templates/oscar/search_full.html
+++ b/searx/templates/oscar/search_full.html
@@ -1,18 +1,18 @@
-{% from 'oscar/macros.html' import icon %}
-
-<form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" id="search_form" role="search">
- {% if rtl %}
- <div class="input-group">
- {% else %}
- <div class="input-group col-md-8 col-md-offset-2">
- {% endif %}
- <input type="search" name="q" class="form-control input-lg autofocus" id="q" placeholder="{{ _('Search for...') }}" autocomplete="off" value="{{ q }}">
- <span class="input-group-btn">
- <button type="submit" class="btn btn-default input-lg"><span class="hide_if_nojs">{{ icon('search') }}</span><span class="hidden active_if_nojs">{{ _('Start search') }}</span></button>
- </span>
- </div>
- <div class="col-md-8 col-md-offset-2 advanced">
- {% include 'oscar/advanced.html' %}
- </div>
-
-</form><!-- / #search_form_full -->
+{% from 'oscar/macros.html' import icon %}
+
+<form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" id="search_form" role="search">
+ {% if rtl %}
+ <div class="input-group">
+ {% else %}
+ <div class="input-group col-md-8 col-md-offset-2">
+ {% endif %}
+ <input type="search" name="q" class="form-control input-lg autofocus" id="q" placeholder="{{ _('Search for...') }}" aria-label="{{ _('Search for...') }}" autocomplete="off" value="{{ q }}" accesskey="s">
+ <span class="input-group-btn">
+ <button type="submit" class="btn btn-default input-lg" aria-label="{{ _('Start search') }}"><span class="hide_if_nojs">{{ icon('search') }}</span><span class="hidden active_if_nojs">{{ _('Start search') }}</span></button>
+ </span>
+ </div>
+ <div class="col-md-8 col-md-offset-2 advanced">
+ {% include 'oscar/advanced.html' %}
+ </div>
+
+</form><!-- / #search_form_full -->
diff --git a/searx/templates/oscar/time-range.html b/searx/templates/oscar/time-range.html
index d5efe9182..fb1c0754b 100644
--- a/searx/templates/oscar/time-range.html
+++ b/searx/templates/oscar/time-range.html
@@ -1,17 +1,17 @@
-<select name="time_range" id="time-range" class="custom-select form-control">
+<select name="time_range" id="time-range" class="custom-select form-control" accesskey="t">{{- "" -}}
<option id="time-range-anytime" value="" {{ "selected" if time_range=="" or not time_range else ""}}>
- {{ _('Anytime') }}
- </option>
+ {{- _('Anytime') -}}
+ </option>{{- "" -}}
<option id="time-range-day" value="day" {{ "selected" if time_range=="day" else ""}}>
- {{ _('Last day') }}
- </option>
+ {{- _('Last day') -}}
+ </option>{{- "" -}}
<option id="time-range-week" value="week" {{ "selected" if time_range=="week" else ""}}>
- {{ _('Last week') }}
- </option>
+ {{- _('Last week') -}}
+ </option>{{- "" -}}
<option id="time-range-month" value="month" {{ "selected" if time_range=="month" else ""}}>
- {{ _('Last month') }}
- </option>
+ {{- _('Last month') -}}
+ </option>{{- "" -}}
<option id="time-range-year" value="year" {{ "selected" if time_range=="year" else ""}}>
- {{ _('Last year') }}
- </option>
+ {{- _('Last year') -}}
+ </option>{{- "" -}}
</select>
diff --git a/searx/templates/simple/result_templates/key-value.html b/searx/templates/simple/result_templates/key-value.html
new file mode 100644
index 000000000..eebaa2c85
--- /dev/null
+++ b/searx/templates/simple/result_templates/key-value.html
@@ -0,0 +1,11 @@
+<table>
+ {% for key, value in result.items() %}
+ {% if key in ['engine', 'engines', 'template', 'score', 'category', 'positions'] %}
+ {% continue %}
+ {% endif %}
+ <tr>
+ <td><b>{{ key|upper }}</b>: {{ value }}</td>
+ </tr>
+ {% endfor %}
+</table>
+<div class="engines">{% for engine in result.engines %}<span>{{ engine }}</span>{% endfor %}</div>{{- '' -}}
diff --git a/searx/templates/simple/result_templates/torrent.html b/searx/templates/simple/result_templates/torrent.html
index 3c7fd15e8..71c775bc9 100644
--- a/searx/templates/simple/result_templates/torrent.html
+++ b/searx/templates/simple/result_templates/torrent.html
@@ -6,7 +6,7 @@
{% if result.magnetlink %}<p class="altlink"> &bull; {{ result_link(result.magnetlink, icon('magnet') + _('magnet link'), "magnetlink") }}</p>{% endif %}
{% if result.torrentfile %}<p class="altlink"> &bull; {{ result_link(result.torrentfile, icon('download-alt') + _('torrent file'), "torrentfile") }}</p>{% endif %}
-{% if result.seed %}<p class="stat"> &bull; {{ icon('arrow-swap') }} {{ _('Seeder') }} <span class="badge">{{ result.seed }}</span> &bull; {{ _('Leecher') }} <span class="badge">{{ result.leech }}</span></p>{% endif %}
+{% if result.seed is defined %}<p class="stat"> &bull; {{ icon('arrow-swap') }} {{ _('Seeder') }} <span class="badge">{{ result.seed }}</span> &bull; {{ _('Leecher') }} <span class="badge">{{ result.leech }}</span></p>{% endif %}
{%- if result.filesize %}<p class="stat">{{ icon('floppy-disk') }} {{ _('Filesize') }}<span class="badge">
{%- if result.filesize < 1024 %}{{ result.filesize }} {{ _('Bytes') }}
diff --git a/searx/templates/simple/results.html b/searx/templates/simple/results.html
index 770eebe81..8885abc30 100644
--- a/searx/templates/simple/results.html
+++ b/searx/templates/simple/results.html
@@ -95,13 +95,13 @@
{% for correction in corrections %}
<div class="left">
<form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" role="navigation">
- <input type="hidden" name="q" value="{{ correction }}">
+ <input type="hidden" name="q" value="{{ correction.url }}">
<input type="hidden" name="time_range" value="{{ time_range }}">
<input type="hidden" name="language" value="{{ current_language }}">
<input type="hidden" name="safesearch" value="{{ safesearch }}">
<input type="hidden" name="theme" value="{{ theme }}">
{% if timeout_limit %}<input type="hidden" name="timeout_limit" value="{{ timeout_limit }}" >{% endif %}
- <input type="submit" value="{{ correction }}">
+ <input type="submit" value="{{ correction.title }}">
</form>
</div>
{% endfor %}
diff --git a/searx/utils.py b/searx/utils.py
index d88bc9897..5ea9dc89c 100644
--- a/searx/utils.py
+++ b/searx/utils.py
@@ -13,6 +13,7 @@ from numbers import Number
from os.path import splitext, join
from io import open
from random import choice
+from lxml.etree import XPath
import sys
import json
@@ -51,6 +52,7 @@ ecma_unescape2_re = re.compile(r'%([0-9a-fA-F]{2})', re.UNICODE)
useragents = json.loads(open(os.path.dirname(os.path.realpath(__file__))
+ "/data/useragents.json", 'r', encoding='utf-8').read())
+xpath_cache = dict()
lang_to_lc_cache = dict()
@@ -308,14 +310,15 @@ def int_or_zero(num):
def is_valid_lang(lang):
is_abbr = (len(lang) == 2)
+ lang = lang.lower().decode('utf-8')
if is_abbr:
for l in language_codes:
- if l[0][:2] == lang.lower():
+ if l[0][:2] == lang:
return (True, l[0][:2], l[3].lower())
return False
else:
for l in language_codes:
- if l[1].lower() == lang.lower():
+ if l[1].lower() == lang or l[3].lower() == lang:
return (True, l[0][:2], l[3].lower())
return False
@@ -434,3 +437,31 @@ def ecma_unescape(s):
# "%20" becomes " ", "%F3" becomes "ó"
s = ecma_unescape2_re.sub(lambda e: unichr(int(e.group(1), 16)), s)
return s
+
+
+def get_engine_from_settings(name):
+ """Return engine configuration from settings.yml of a given engine name"""
+
+ if 'engines' not in settings:
+ return {}
+
+ for engine in settings['engines']:
+ if 'name' not in engine:
+ continue
+ if name == engine['name']:
+ return engine
+
+ return {}
+
+
+def get_xpath(xpath_str):
+ result = xpath_cache.get(xpath_str, None)
+ if result is None:
+ result = XPath(xpath_str)
+ xpath_cache[xpath_str] = result
+ return result
+
+
+def eval_xpath(element, xpath_str):
+ xpath = get_xpath(xpath_str)
+ return xpath(element)
diff --git a/searx/webapp.py b/searx/webapp.py
index ffe9b4da9..212c874c9 100644
--- a/searx/webapp.py
+++ b/searx/webapp.py
@@ -41,7 +41,10 @@ except:
logger.critical("cannot import dependency: pygments")
from sys import exit
exit(1)
-from cgi import escape
+try:
+ from cgi import escape
+except:
+ from html import escape
from datetime import datetime, timedelta
from time import time
from werkzeug.contrib.fixers import ProxyFix
@@ -124,6 +127,7 @@ app = Flask(
app.jinja_env.trim_blocks = True
app.jinja_env.lstrip_blocks = True
+app.jinja_env.add_extension('jinja2.ext.loopcontrols')
app.secret_key = settings['server']['secret_key']
if not searx_debug \
@@ -153,20 +157,18 @@ outgoing_proxies = settings['outgoing'].get('proxies') or None
@babel.localeselector
def get_locale():
- locale = request.accept_languages.best_match(settings['locales'].keys())
-
- if request.preferences.get_value('locale') != '':
- locale = request.preferences.get_value('locale')
+ if 'locale' in request.form\
+ and request.form['locale'] in settings['locales']:
+ return request.form['locale']
if 'locale' in request.args\
and request.args['locale'] in settings['locales']:
- locale = request.args['locale']
+ return request.args['locale']
- if 'locale' in request.form\
- and request.form['locale'] in settings['locales']:
- locale = request.form['locale']
+ if request.preferences.get_value('locale') != '':
+ return request.preferences.get_value('locale')
- return locale
+ return request.accept_languages.best_match(settings['locales'].keys())
# code-highlighter
@@ -538,14 +540,16 @@ def index():
if output_format == 'html':
if 'content' in result and result['content']:
result['content'] = highlight_content(escape(result['content'][:1024]), search_query.query)
- result['title'] = highlight_content(escape(result['title'] or u''), search_query.query)
+ if 'title' in result and result['title']:
+ result['title'] = highlight_content(escape(result['title'] or u''), search_query.query)
else:
if result.get('content'):
result['content'] = html_to_text(result['content']).strip()
# removing html content and whitespace duplications
result['title'] = ' '.join(html_to_text(result['title']).strip().split())
- result['pretty_url'] = prettify_url(result['url'])
+ if 'url' in result:
+ result['pretty_url'] = prettify_url(result['url'])
# TODO, check if timezone is calculated right
if 'publishedDate' in result:
@@ -602,11 +606,17 @@ def index():
# HTML output format
# suggestions: use RawTextQuery to get the suggestion URLs with the same bang
- suggestion_urls = map(lambda suggestion: {
- 'url': raw_text_query.changeSearchQuery(suggestion).getFullQuery(),
- 'title': suggestion
- },
- result_container.suggestions)
+ suggestion_urls = list(map(lambda suggestion: {
+ 'url': raw_text_query.changeSearchQuery(suggestion).getFullQuery(),
+ 'title': suggestion
+ },
+ result_container.suggestions))
+
+ correction_urls = list(map(lambda correction: {
+ 'url': raw_text_query.changeSearchQuery(correction).getFullQuery(),
+ 'title': correction
+ },
+ result_container.corrections))
#
return render(
'results.html',
@@ -619,7 +629,7 @@ def index():
advanced_search=advanced_search,
suggestions=suggestion_urls,
answers=result_container.answers,
- corrections=result_container.corrections,
+ corrections=correction_urls,
infoboxes=result_container.infoboxes,
paging=result_container.paging,
unresponsive_engines=result_container.unresponsive_engines,
diff --git a/setup.py b/setup.py
index 7333551fe..bd3dd5d1c 100644
--- a/setup.py
+++ b/setup.py
@@ -11,14 +11,14 @@ import sys
sys.path.insert(0, './searx')
from version import VERSION_STRING
+with open('README.rst') as f:
+ long_description = f.read()
-def read(*rnames):
- return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
+with open('requirements.txt') as f:
+ requirements = [ l.strip() for l in f.readlines()]
-
-long_description = read('README.rst')
-requirements = map(str.strip, open('requirements.txt').readlines())
-dev_requirements = map(str.strip, open('requirements-dev.txt').readlines())
+with open('requirements-dev.txt') as f:
+ dev_requirements = [ l.strip() for l in f.readlines()]
setup(
name='searx',
diff --git a/tests/unit/engines/__init__.py b/tests/unit/engines/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/tests/unit/engines/__init__.py
+++ /dev/null
diff --git a/tests/unit/engines/pubmed.py b/tests/unit/engines/pubmed.py
deleted file mode 100644
index 370efe067..000000000
--- a/tests/unit/engines/pubmed.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import pubmed
-from searx.testing import SearxTestCase
-
-
-class TestPubmedEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- params = pubmed.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn('eutils.ncbi.nlm.nih.gov/', params['url'])
- self.assertIn('term', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, pubmed.response, None)
- self.assertRaises(AttributeError, pubmed.response, [])
- self.assertRaises(AttributeError, pubmed.response, '')
- self.assertRaises(AttributeError, pubmed.response, '[]')
-
- response = mock.Mock(text='<PubmedArticleSet></PubmedArticleSet>')
- self.assertEqual(pubmed.response(response), [])
-
- xml_mock = """<eSearchResult><Count>1</Count><RetMax>1</RetMax><RetStart>0</RetStart><IdList>
-<Id>1</Id>
-</IdList></eSearchResult>
-"""
-
- response = mock.Mock(text=xml_mock.encode('utf-8'))
- results = pubmed.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['content'], 'No abstract is available for this publication.')
diff --git a/tests/unit/engines/seedpeer_fixture.html b/tests/unit/engines/seedpeer_fixture.html
deleted file mode 100644
index 28207bfad..000000000
--- a/tests/unit/engines/seedpeer_fixture.html
+++ /dev/null
@@ -1,110 +0,0 @@
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
- <html xmlns="http://www.w3.org/1999/xhtml">
- <head>
- </head>
- <body>
- <div id="header">
- <div id="whoIsYou">
- <a href="/lang.php"><small>SeedPeer in your own language?</small></a>&nbsp;&nbsp;&nbsp;<a href="http://www.seedpeer.eu"><img src="/images/flags/uk.gif" width="16px" alt="Torrents EN" /></a> <a href="http://spanish.seedpeer.eu"><img src="/images/flags/es.gif" width="16px" alt="Torrents ES" /></a> <a href="http://german.seedpeer.eu"><img src="/images/flags/de.gif" width="16px" alt="Torrents DE" /></a> <a href="http://french.seedpeer.eu"><img src="/images/flags/fr.gif" width="16px" alt="Torrents FR" /></a> <a href="http://portuguese.seedpeer.eu"><img src="/images/flags/pt.gif" width="16px" alt="Torrents Portuguese" /></a> <a href="http://swedish.seedpeer.eu"><img src="/images/flags/se.gif" width="16px" alt="Torrents Sweden" /></a>
- </div>
-
- <script type="text/javascript">
- whoIsYou();
- </script>
- <div id="search">
- <form action="/search.php" method="get">
- <input id="topsearchbar" name="search" value="narcos season 2" />
- <input type="submit" class="searchbutton" value="Torrents" />
- <input style="color:#000" type="submit" class="searchbutton" name="usenet" value="Usenet Binaries" />
- </form>
- <div id="suggestion"></div>
- </div>
- <div id="logo"><a href="/"><img src="/images/logo2.gif" alt="Seedpeer homepage" width="415" height="143" /></a></div>
- <div id="subtext"><a href="/">Home</a> &gt; <a href="/search.html">Torrent search</a> &gt; Narcos season 2 | page 1</div>
- </div>
- <div id="nav">
- <ul>
- <!--
- <li><font style="color:red;font-size:9px;font-weight:bold;">NEW</font><a title="Download TOP Games for FREE" rel="nofollow" href="http://www.bigrebelads.com/affiliate/index?ref=9301" target="_blank">FREE Games</a></li>
-
- -->
- <li style="border-left:none" id="categories"><a title="Browse Torrent Categories" href="/browse.html">Categories</a>
- <ul>
- <li><a title="Browse Anime Torrents" href="/browse.html#6">Anime</a></li>
- <li><a title="Browse Game Torrents" href="/browse.html#4">Games</a></li>
- <li><a title="Browse Movie Torrents" href="/browse.html#1">Movies</a></li>
- <li><a title="Browse Music Torrents" href="/browse.html#3">Music</a></li>
- <li><a title="Browse Software Torrents" href="/browse.html#5">Software</a></li>
- <li><a title="Browse TV Torrents" href="/browse.html#2">TV Shows</a></li>
- <li><a title="Browse Other Torrents" href="/browse.html#7">Others</a></li>
- </ul>
- </li>
- <li><a title="Upload A Torrents" href="/upload.html">Upload torrent</a></li>
- <li id="verified"><a title="Verified Torrents" href="/verified.html">Verified</a></li>
- <li id="searchoptions"><a title="Search Torrents" href="/search.html">Torrent search</a></li>
- <li id="newsgroups"><a style="color:#212b3e" title="News Groups" href="/usenet.html">Usenet Binaries</a></li>
- <li id="about" style="border-right:none"><a rel="nofollow" href="/faq.html">About Us</a>
- <ul>
- <li><a title="SeedPeer Statistics" href="/stats.html">Statistics</a></li>
- <li><a title="Contact Us" href="/contact.html">Contact</a></li>
- <li><a title="Frequently Asked Questions" href="/faq.html">FAQ</a></li>
- <li><a title="SeedPeer API" href="http://api.seedpeer.eu">Our API</a></li>
- <li><a title="SeedPeer Blog" href="/blog">Blog</a></li>
- </ul>
- </li>
- <!--<li><a href="/toolbar.php">Our Toolbar</a></li>-->
- </ul>
- <div class="clear"></div>
- </div>
- <div id="body"><div id="pageTop"></div>
- <div id="headerbox"><h1>Verified <font class="colored">Narcos season 2</font> torrents</h1></div><table width="100%"><tr><th>
- <span style="float:right">
- <a href="/search/narcos-season-2/8/1.html"><img style="vertical-align:middle" src="/images/comments.gif" alt="comments" /></a> |
- <a href="/search/narcos-season-2/7/1.html"><img style="vertical-align:middle" src="/images/ver.gif" alt="verified" /></a>
- </span>
- <a href="/search/narcos-season-2/1/1.html">Torrent name</a></th><th class="right"><a href="/search/narcos-season-2/2/1.html">Age</a></th><th class="right"><a href="/search/narcos-season-2/3/1.html">Size</a></th><th class="right"><a href="/search/narcos-season-2/4/1.html">Seeds</a></th><th class="right"><a href="/search/narcos-season-2/5/1.html">Peers</a></th><th class="center"><a href="/search/narcos-season-2/6/1.html">Health</a></th><td class="tableAd" rowspan="6"><iframe src="http://creative.wwwpromoter.com/13689?d=300x250" width="300" height="250" style="border: none;" frameborder="0" scrolling="no"></iframe></td></tr><tr class=""><td><a class="pblink" id="pblink_table_item_1" href="" data-tad="431726" data-last-search="narcos+season+2" target="_blank" rel="nofollow"><strong class='colored'>Narcos season 2</strong> Full Version</a></td><td class="right">20 hours</td><td class="right">681.3 MB</td><td class="right"><font color="green">28</font> </td><td class="right"><font color="navy">654</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" /></td></tr><tr class="tdark"><td><a class="pblink" id="pblink_table_item_2" href="" data-tad="431727" data-url="narcos+season+2" target="_blank" rel="nofollow"><strong class='colored'>Narcos season 2</strong> Trusted Source</a></td><td class="right">12 hours</td><td class="right">787.1 MB</td><td class="right"><font color="green">64</font> </td><td class="right"><font color="navy">220</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" /></td></tr><tr class=""><td><a class="pblink" id="pblink_table_item_3" href="" data-tad="431729" data-last-search="narcos+season+2" target="_blank" rel="nofollow"><strong class='colored'>Full Narcos season 2 Download</strong></a> <small><a class="pblink" id="pblink_table_item_4" href="" data-tad="431729" data-last-search="narcos+season+2" target="_blank" rel="nofollow">Usenet</a></small></td><td class="right">24 hours</td><td class="right">775.5 MB</td><td class="right"><font color="green">60</font> </td><td class="right"><font color="navy">236</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" /></td></tr><tr class="tdark"><td><a class="pblink" id="pblink_table_item_5" href="" data-tad="431730" data-last-search="narcos+season+2" target="_blank" rel="nofollow"><strong class='colored'>Narcos season 2</strong> 2014 - DIRECT STREAMING</a> <small><a class="pblink" id="pblink_table_item_6" href="" data-tad="431729" data-last-search="narcos+season+2" target="_blank" rel="nofollow">Movies</a></small></td><td class="right">17 hours</td><td class="right">654.1 MB</td><td class="right"><font color="green">2</font> </td><td class="right"><font color="navy">391</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" /></td></tr><tr class=""><td><a class="pblink" id="pblink_table_item_7" href="" data-tad="431731" data-last-search="narcos+season+2" target="_blank" rel="nofollow"><strong class='colored'>Narcos season 2</strong> 2014</a> <small><a class="pblink" id="pblink_table_item_8" href="" data-tad="431729" data-last-search="narcos+season+2" target="_blank" rel="nofollow">Movies</a></small></td><td class="right">20 hours</td><td class="right">754.5 MB</td><td class="right"><font color="green">21</font> </td><td class="right"><font color="navy">919</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" /></td></tr></table><br /><br /><center><iframe src='http://creative.wwwpromoter.com/13689?d=728x90' width='728' height='90' style='border: none;' frameborder='0' scrolling='no'></iframe><center><span style="float:right;margin:1em .2em 0 0"><a title="Download at the speed of your connection" href="/usenet.php?search=narcos+season+2"><img src="/images/dlf.gif" alt="Search Binaries" /></a></span><div style="margin-bottom:1em;margin-right:290px" id="headerbox"><h1><a href="/searchfeed/narcos+season+2.xml" target="_blank" title="SeedPeer RSS Torrent Search Feed fornarcos season 2"><img src="/images/feedIcon.png" border="0" /></a>&nbsp;2 <font class="colored">Narcos season 2</font> Torrents were found</h1></div><table width="100%"><tr><th>
- <span style="float:right">
- <a href="/search/narcos-season-2/8/1.html"><img style="vertical-align:middle" src="/images/comments.gif" alt="comments" /></a> |
- <a href="/search/narcos-season-2/7/1.html"><img style="vertical-align:middle" src="/images/ver.gif" alt="verified" /></a>
- </span>
- <a href="/search/narcos-season-2/1/1.html">Torrent name</a></th><th class="right"><a href="/search/narcos-season-2/2/1.html">Age</a></th><th class="right"><a href="/search/narcos-season-2/3/1.html">Size</a></th><th class="right"><a href="/search/narcos-season-2/4/1.html">Seeds</a></th><th class="right"><a href="/search/narcos-season-2/5/1.html">Peers</a></th><th class="center"><a href="/search/narcos-season-2/6/1.html">Health</a></th></tr><tr class=""><td><small class="comments"><a href="http://www.facebook.com/sharer.php?t=Download%20<strong class='colored'>Narcos</strong> <strong class='colored'>Season</strong> <strong class='colored'>2</strong> Complete 7<strong class='colored'>2</strong>0p WebRip EN-SUB x<strong class='colored'>2</strong>64-[MULVAcoded] S0<strong class='colored'>2</strong>%20 torrent&u=http://seedpeer.seedpeer.eu/details/11686840/Narcos-Season-2-Complete-720p-WebRip-EN-SUB-x264-[MULVAcoded]-S02.html"><img src="/images/facebook.png" alt="Add to Facebook" width="14" height="14" /></a></small><a href="/details/11686840/Narcos-Season-2-Complete-720p-WebRip-EN-SUB-x264-[MULVAcoded]-S02.html"><strong class='colored'>Narcos</strong> <strong class='colored'>Season</strong> <strong class='colored'>2</strong> Complete 7<strong class='colored'>2</strong>0p WebRip EN-SUB x<strong class='colored'>2</strong>64-[MULVAcoded] S0<strong class='colored'>2</strong> <small><a href="/browse.html#11686840"></a></small></a></td><td class="right">19 hours</td><td class="right">4.39 GB</td><td class="right"><font color="green">715</font> </td><td class="right"><font color="navy">183</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" width="40" height="11" /></td></tr><tr class="tdark"><td><small class="comments"><a href="http://www.facebook.com/sharer.php?t=Download%20<strong class='colored'>Narcos</strong> - <strong class='colored'>Season</strong> <strong class='colored'>2</strong> - 7<strong class='colored'>2</strong>0p WEBRiP - x<strong class='colored'>2</strong>65 HEVC - ShAaNiG%20 torrent&u=http://seedpeer.seedpeer.eu/details/11685972/Narcos---Season-2---720p-WEBRiP---x265-HEVC---ShAaNiG.html"><img src="/images/facebook.png" alt="Add to Facebook" width="14" height="14" /></a></small><a href="/details/11685972/Narcos---Season-2---720p-WEBRiP---x265-HEVC---ShAaNiG.html"><strong class='colored'>Narcos</strong> - <strong class='colored'>Season</strong> <strong class='colored'>2</strong> - 7<strong class='colored'>2</strong>0p WEBRiP - x<strong class='colored'>2</strong>65 HEVC - ShAaNiG <small><a href="/browse.html#11685972"></a></small></a></td><td class="right">1 day</td><td class="right">2.48 GB</td><td class="right"><font color="green">861</font> </td><td class="right"><font color="navy">332</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" width="40" height="11" /></td></tr></table><div id="headerbox"><h1>Related searches for: <font class="colored">Narcos season 2</font></h1></div><div id="search_suggestions"><br />Other suggested searches: </div><br /><a href="http://torrentz2.eu/search?f=narcos-season-2">Search for "narcos-season-2" on Torrentz2.eu</a><br /><a href="http://torrent-finder.info/show.php?q=narcos-season-2">Search for "narcos-season-2" on Torrent-Finder</a><br /><center><iframe src='http://creative.wwwpromoter.com/13689?d=300x250' width='300' height='250' style='border: none;' frameborder='0' scrolling='no'></iframe>&nbsp;<iframe src='http://creative.wwwpromoter.com/13689?d=300x250' width='300' height='250' style='border: none;' frameborder='0' scrolling='no'></iframe>&nbsp;<iframe src='http://creative.wwwpromoter.com/13689?d=300x250' width='300' height='250' style='border: none;' frameborder='0' scrolling='no'></iframe></center><div id="footer">
- <table width="100%">
- <tr>
- <td width="30%">
- <h2>Torrents Download</h2>
- <a href="/">Torrent search</a><br />
- <a href="/browse.html">Browse categories</a><br />
- <a href="/verified.html">Verified Torrents</a><br />
- <a href="/order-date.html">Today's torrents</a><br />
- <a href="/yesterday.html">Yesterday's torrents</a><br />
- <a href="/stats.html">Statistics</a><br />
- <br />
- <a href="/faq.html#copyright"><strong>Copyright & Removal</strong></a>
- </td>
- <td width="30%"><h2>Cool Stuff</h2>
- <a href="/promotional.php">Promotional</a><br />
- <a href="/contact.html">Advertising Information</a><br />
- <strong><a href="/plugins.php" title="Add a search plugin to Firefox or Internet Explorer">Search Plugin <span style="color:red">*</span></a></strong><br />
- <a href="http://www.utorrent.com">&micro;Torrent Client</a><br />
- <a href="/blog">Seedpeer Blog</a><br />
- </td>
- <td width="30%"><h2>Links</h2>
- <a href="http://www.sumotorrent.com" target="_blank"><strong>SumoTorrent</strong></a><br />
- <a href="http://www.torrent-finder.info" target="_blank"><strong>Torrent Finder</strong></a><br />
- <a href="http://www.torrentpond.com" target="_blank"><strong>TorrentPond</strong></a><br />
- <a href="https://www.limetorrents.cc" target="_blank">LimeTorrents.cc</a><br />
- <a href="http://www.torrents.to/" target="_blank">Torrents.to</a><br />
- <a href="http://www.torrentfunk.com" target="_blank">TorrentFunk</a><br />
- <a href="https://monova.org" target="_blank">Monova</a><br />
- <a href="http://www.torrentroom.com" target="_blank">TorrentRoom</a><br />
- <a href="http://www.katcr.co/" target="_blank">Kickass Torrents Community</a><br />
- </td>
- <td width="10%"><div id="bottomlogo"></div></td>
- </tr>
- </table>
- <br />
- <br />
- </div>
- </div>
- </body>
- </html> \ No newline at end of file
diff --git a/tests/unit/engines/test_acgsou.py b/tests/unit/engines/test_acgsou.py
deleted file mode 100644
index c01acf5de..000000000
--- a/tests/unit/engines/test_acgsou.py
+++ /dev/null
@@ -1,78 +0,0 @@
-# coding=utf-8
-from collections import defaultdict
-import mock
-from searx.engines import acgsou
-from searx.testing import SearxTestCase
-
-
-class TestAcgsouEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dic = defaultdict(dict)
- dic['pageno'] = 1
- params = acgsou.request(query, dic)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('acgsou.com' in params['url'])
-
- def test_response(self):
- resp = mock.Mock(text='<html></html>')
- self.assertEqual(acgsou.response(resp), [])
-
- html = u"""
- <html>
-<table id="listTable" class="list_style table_fixed">
- <thead class="tcat">
- <tr>
- <th axis="string" class="l1 tableHeaderOver">test</th>
- <th axis="string" class="l2 tableHeaderOver">test</th>
- <th axis="string" class="l3 tableHeaderOver">test</th>
- <th axis="size" class="l4 tableHeaderOver">test</th>
- <th axis="number" class="l5 tableHeaderOver">test</th>
- <th axis="number" class="l6 tableHeaderOver">test</th>
- <th axis="number" class="l7 tableHeaderOver">test</th>
- <th axis="string" class="l8 tableHeaderOver">test</th>
- </tr>
- </thead>
- <tbody class="tbody" id="data_list">
- <tr class="alt1 ">
- <td nowrap="nowrap">date</td>
- <td><a href="category.html">testcategory テスト</a></td>
- <td style="text-align:left;">
- <a href="show-torrentid.html" target="_blank">torrentname テスト</a>
- </td>
- <td>1MB</td>
- <td nowrap="nowrap">
- <span class="bts_1">
- 29
- </span>
- </td>
- <td nowrap="nowrap">
- <span class="btl_1">
- 211
- </span>
- </td>
- <td nowrap="nowrap">
- <span class="btc_">
- 168
- </span>
- </td>
- <td><a href="random.html">user</a></td>
- </tr>
- </tbody>
-</table>
-</html>
- """
-
- resp = mock.Mock(text=html)
- results = acgsou.response(resp)
-
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
-
- r = results[0]
- self.assertEqual(r['url'], 'http://www.acgsou.com/show-torrentid.html')
- self.assertEqual(r['content'], u'Category: "testcategory テスト".')
- self.assertEqual(r['title'], u'torrentname テスト')
- self.assertEqual(r['filesize'], 1048576)
diff --git a/tests/unit/engines/test_archlinux.py b/tests/unit/engines/test_archlinux.py
deleted file mode 100644
index 062f023bd..000000000
--- a/tests/unit/engines/test_archlinux.py
+++ /dev/null
@@ -1,111 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import archlinux
-from searx.testing import SearxTestCase
-
-domains = {
- 'all': 'https://wiki.archlinux.org',
- 'de': 'https://wiki.archlinux.de',
- 'fr': 'https://wiki.archlinux.fr',
- 'ja': 'https://wiki.archlinuxjp.org',
- 'ro': 'http://wiki.archlinux.ro',
- 'tr': 'http://archtr.org/wiki'
-}
-
-
-class TestArchLinuxEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dic = defaultdict(dict)
- dic['pageno'] = 1
- dic['language'] = 'en-US'
- params = archlinux.request(query, dic)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('wiki.archlinux.org' in params['url'])
-
- for lang, name in archlinux.main_langs:
- dic['language'] = lang
- params = archlinux.request(query, dic)
- self.assertTrue(name in params['url'])
-
- for lang, domain in domains.items():
- dic['language'] = lang
- params = archlinux.request(query, dic)
- self.assertTrue(domain in params['url'])
-
- def test_response(self):
- response = mock.Mock(text='<html></html>',
- search_params={'language': 'en_US'})
- self.assertEqual(archlinux.response(response), [])
-
- html = """
- <ul class="mw-search-results">
- <li>
- <div class="mw-search-result-heading">
- <a href="/index.php/ATI" title="ATI">ATI</a>
- </div>
- <div class="searchresult">
- Lorem ipsum dolor sit amet
- </div>
- <div class="mw-search-result-data">
- 30 KB (4,630 words) - 19:04, 17 March 2016</div>
- </li>
- <li>
- <div class="mw-search-result-heading">
- <a href="/index.php/Frequently_asked_questions" title="Frequently asked questions">
- Frequently asked questions
- </a>
- </div>
- <div class="searchresult">
- CPUs with AMDs instruction set "AMD64"
- </div>
- <div class="mw-search-result-data">
- 17 KB (2,722 words) - 20:13, 21 March 2016
- </div>
- </li>
- <li>
- <div class="mw-search-result-heading">
- <a href="/index.php/CPU_frequency_scaling" title="CPU frequency scaling">CPU frequency scaling</a>
- </div>
- <div class="searchresult">
- ondemand for AMD and older Intel CPU
- </div>
- <div class="mw-search-result-data">
- 15 KB (2,319 words) - 23:46, 16 March 2016
- </div>
- </li>
- </ul>
- """
-
- expected = [
- {
- 'title': 'ATI',
- 'url': 'https://wiki.archlinux.org/index.php/ATI'
- },
- {
- 'title': 'Frequently asked questions',
- 'url': 'https://wiki.archlinux.org/index.php/Frequently_asked_questions'
- },
- {
- 'title': 'CPU frequency scaling',
- 'url': 'https://wiki.archlinux.org/index.php/CPU_frequency_scaling'
- }
- ]
-
- response = mock.Mock(text=html)
- response.search_params = {
- 'language': 'en_US'
- }
- results = archlinux.response(response)
-
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), len(expected))
-
- i = 0
- for exp in expected:
- res = results[i]
- i += 1
- for key, value in exp.items():
- self.assertEqual(res[key], value)
diff --git a/tests/unit/engines/test_arxiv.py b/tests/unit/engines/test_arxiv.py
deleted file mode 100644
index 83c4f8595..000000000
--- a/tests/unit/engines/test_arxiv.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import arxiv
-from searx.testing import SearxTestCase
-
-
-class TestBaseEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'.encode('utf-8')
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- params = arxiv.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn('export.arxiv.org/api/', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, arxiv.response, None)
- self.assertRaises(AttributeError, arxiv.response, [])
- self.assertRaises(AttributeError, arxiv.response, '')
- self.assertRaises(AttributeError, arxiv.response, '[]')
-
- response = mock.Mock(content=b'''<?xml version="1.0" encoding="UTF-8"?>
-<feed xmlns="http://www.w3.org/2005/Atom"></feed>''')
- self.assertEqual(arxiv.response(response), [])
-
- xml_mock = b'''<?xml version="1.0" encoding="UTF-8"?>
-<feed xmlns="http://www.w3.org/2005/Atom">
- <title type="html">ArXiv Query: search_query=all:test_query&amp;id_list=&amp;start=0&amp;max_results=1</title>
- <id>http://arxiv.org/api/1</id>
- <updated>2000-01-21T00:00:00-01:00</updated>
- <opensearch:totalResults xmlns:opensearch="http://a9.com/-/spec/opensearch/1.1/">1</opensearch:totalResults>
- <opensearch:startIndex xmlns:opensearch="http://a9.com/-/spec/opensearch/1.1/">0</opensearch:startIndex>
- <opensearch:itemsPerPage xmlns:opensearch="http://a9.com/-/spec/opensearch/1.1/">1</opensearch:itemsPerPage>
- <entry>
- <id>http://arxiv.org/1</id>
- <updated>2000-01-01T00:00:01Z</updated>
- <published>2000-01-01T00:00:01Z</published>
- <title>Mathematical proof.</title>
- <summary>Mathematical formula.</summary>
- <author>
- <name>A. B.</name>
- </author>
- <link href="http://arxiv.org/1" rel="alternate" type="text/html"/>
- <link title="pdf" href="http://arxiv.org/1" rel="related" type="application/pdf"/>
- <category term="math.QA" scheme="http://arxiv.org/schemas/atom"/>
- <category term="1" scheme="http://arxiv.org/schemas/atom"/>
- </entry>
-</feed>
-'''
-
- response = mock.Mock(content=xml_mock)
- results = arxiv.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'Mathematical proof.')
- self.assertEqual(results[0]['content'], 'Mathematical formula.')
diff --git a/tests/unit/engines/test_base.py b/tests/unit/engines/test_base.py
deleted file mode 100644
index b5da5bde7..000000000
--- a/tests/unit/engines/test_base.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import base
-from searx.testing import SearxTestCase
-
-
-class TestBaseEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- params = base.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn('base-search.net', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, base.response, None)
- self.assertRaises(AttributeError, base.response, [])
- self.assertRaises(AttributeError, base.response, '')
- self.assertRaises(AttributeError, base.response, '[]')
-
- response = mock.Mock(content=b'<response></response>')
- self.assertEqual(base.response(response), [])
-
- xml_mock = b"""<?xml version="1.0"?>
-<response>
- <lst name="responseHeader">
- <int name="status">0</int>
- <int name="QTime">1</int>
- </lst>
- <result name="response" numFound="1" start="0">
- <doc>
- <date name="dchdate">2000-01-01T01:01:01Z</date>
- <str name="dcdocid">1</str>
- <str name="dccontinent">cna</str>
- <str name="dccountry">us</str>
- <str name="dccollection">ftciteseerx</str>
- <str name="dcprovider">CiteSeerX</str>
- <str name="dctitle">Science and more</str>
- <arr name="dccreator">
- <str>Someone</str>
- </arr>
- <arr name="dcperson">
- <str>Someone</str>
- </arr>
- <arr name="dcsubject">
- <str>Science and more</str>
- </arr>
- <str name="dcdescription">Science, and even more.</str>
- <arr name="dccontributor">
- <str>The neighbour</str>
- </arr>
- <str name="dcdate">2001</str>
- <int name="dcyear">2001</int>
- <arr name="dctype">
- <str>text</str>
- </arr>
- <arr name="dctypenorm">
- <str>1</str>
- </arr>
- <arr name="dcformat">
- <str>application/pdf</str>
- </arr>
- <arr name="dccontenttype">
- <str>application/pdf</str>
- </arr>
- <arr name="dcidentifier">
- <str>http://example.org/</str>
- </arr>
- <str name="dclink">http://example.org</str>
- <str name="dcsource">http://example.org</str>
- <arr name="dclanguage">
- <str>en</str>
- </arr>
- <str name="dcrights">Under the example.org licence</str>
- <int name="dcoa">1</int>
- <arr name="dclang">
- <str>eng</str>
- </arr>
- </doc>
- </result>
-</response>"""
-
- response = mock.Mock(content=xml_mock)
- results = base.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'Science and more')
- self.assertEqual(results[0]['content'], 'Science, and even more.')
diff --git a/tests/unit/engines/test_bing.py b/tests/unit/engines/test_bing.py
deleted file mode 100644
index 387034735..000000000
--- a/tests/unit/engines/test_bing.py
+++ /dev/null
@@ -1,178 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import bing
-from searx.testing import SearxTestCase
-
-
-class TestBingEngine(SearxTestCase):
-
- def test_request(self):
- bing.supported_languages = ['en', 'fr', 'zh-CHS', 'zh-CHT', 'pt-PT', 'pt-BR']
- query = u'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- dicto['language'] = 'fr-FR'
- params = bing.request(query.encode('utf-8'), dicto)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('language%3AFR' in params['url'])
- self.assertTrue('bing.com' in params['url'])
-
- dicto['language'] = 'all'
- params = bing.request(query.encode('utf-8'), dicto)
- self.assertTrue('language' in params['url'])
-
- def test_response(self):
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- dicto['language'] = 'fr-FR'
- self.assertRaises(AttributeError, bing.response, None)
- self.assertRaises(AttributeError, bing.response, [])
- self.assertRaises(AttributeError, bing.response, '')
- self.assertRaises(AttributeError, bing.response, '[]')
-
- response = mock.Mock(text='<html></html>')
- response.search_params = dicto
- self.assertEqual(bing.response(response), [])
-
- response = mock.Mock(text='<html></html>')
- response.search_params = dicto
- self.assertEqual(bing.response(response), [])
-
- html = """
- <div>
- <div id="b_tween">
- <span class="sb_count" data-bm="4">23 900 000 résultats</span>
- </div>
- <ol id="b_results" role="main">
- <div class="sa_cc" u="0|5109|4755453613245655|UAGjXgIrPH5yh-o5oNHRx_3Zta87f_QO">
- <div Class="sa_mc">
- <div class="sb_tlst">
- <h3>
- <a href="http://this.should.be.the.link/" h="ID=SERP,5124.1">
- <strong>This</strong> should be the title</a>
- </h3>
- </div>
- <div class="sb_meta"><cite><strong>this</strong>.meta.com</cite>
- <span class="c_tlbxTrg">
- <span class="c_tlbxH" H="BASE:CACHEDPAGEDEFAULT" K="SERP,5125.1">
- </span>
- </span>
- </div>
- <p><strong>This</strong> should be the content.</p>
- </div>
- </div>
- </ol>
- </div>
- """
- response = mock.Mock(text=html)
- response.search_params = dicto
- results = bing.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
- self.assertEqual(results[0]['title'], 'This should be the title')
- self.assertEqual(results[0]['url'], 'http://this.should.be.the.link/')
- self.assertEqual(results[0]['content'], 'This should be the content.')
- self.assertEqual(results[-1]['number_of_results'], 23900000)
-
- html = """
- <div>
- <div id="b_tween">
- <span class="sb_count" data-bm="4">9-18 résultats sur 23 900 000</span>
- </div>
- <ol id="b_results" role="main">
- <li class="b_algo" u="0|5109|4755453613245655|UAGjXgIrPH5yh-o5oNHRx_3Zta87f_QO">
- <div Class="sa_mc">
- <div class="sb_tlst">
- <h2>
- <a href="http://this.should.be.the.link/" h="ID=SERP,5124.1">
- <strong>This</strong> should be the title</a>
- </h2>
- </div>
- <div class="sb_meta"><cite><strong>this</strong>.meta.com</cite>
- <span class="c_tlbxTrg">
- <span class="c_tlbxH" H="BASE:CACHEDPAGEDEFAULT" K="SERP,5125.1">
- </span>
- </span>
- </div>
- <p><strong>This</strong> should be the content.</p>
- </div>
- </li>
- </ol>
- </div>
- """
- dicto['pageno'] = 2
- response = mock.Mock(text=html)
- response.search_params = dicto
- results = bing.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
- self.assertEqual(results[0]['title'], 'This should be the title')
- self.assertEqual(results[0]['url'], 'http://this.should.be.the.link/')
- self.assertEqual(results[0]['content'], 'This should be the content.')
- self.assertEqual(results[-1]['number_of_results'], 23900000)
-
- html = """
- <div>
- <div id="b_tween">
- <span class="sb_count" data-bm="4">23 900 000 résultats</span>
- </div>
- <ol id="b_results" role="main">
- <li class="b_algo" u="0|5109|4755453613245655|UAGjXgIrPH5yh-o5oNHRx_3Zta87f_QO">
- <div Class="sa_mc">
- <div class="sb_tlst">
- <h2>
- <a href="http://this.should.be.the.link/" h="ID=SERP,5124.1">
- <strong>This</strong> should be the title</a>
- </h2>
- </div>
- <div class="sb_meta"><cite><strong>this</strong>.meta.com</cite>
- <span class="c_tlbxTrg">
- <span class="c_tlbxH" H="BASE:CACHEDPAGEDEFAULT" K="SERP,5125.1">
- </span>
- </span>
- </div>
- <p><strong>This</strong> should be the content.</p>
- </div>
- </li>
- </ol>
- </div>
- """
- dicto['pageno'] = 33900000
- response = mock.Mock(text=html)
- response.search_params = dicto
- results = bing.response(response)
- self.assertEqual(bing.response(response), [])
-
- def test_fetch_supported_languages(self):
- html = """<html></html>"""
- response = mock.Mock(text=html)
- results = bing._fetch_supported_languages(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
-
- html = """
- <html>
- <body>
- <form>
- <div id="limit-languages">
- <div>
- <div><input id="es" value="es"></input></div>
- </div>
- <div>
- <div><input id="pt_BR" value="pt_BR"></input></div>
- <div><input id="pt_PT" value="pt_PT"></input></div>
- </div>
- </div>
- </form>
- </body>
- </html>
- """
- response = mock.Mock(text=html)
- languages = bing._fetch_supported_languages(response)
- self.assertEqual(type(languages), list)
- self.assertEqual(len(languages), 3)
- self.assertIn('es', languages)
- self.assertIn('pt-BR', languages)
- self.assertIn('pt-PT', languages)
diff --git a/tests/unit/engines/test_bing_images.py b/tests/unit/engines/test_bing_images.py
deleted file mode 100644
index a4efcab58..000000000
--- a/tests/unit/engines/test_bing_images.py
+++ /dev/null
@@ -1,132 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import bing_images
-from searx.testing import SearxTestCase
-
-
-class TestBingImagesEngine(SearxTestCase):
-
- def test_request(self):
- bing_images.supported_languages = ['fr-FR', 'en-US']
- bing_images.language_aliases = {}
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- dicto['language'] = 'fr-FR'
- dicto['safesearch'] = 1
- dicto['time_range'] = ''
- params = bing_images.request(query, dicto)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('bing.com' in params['url'])
- self.assertTrue('SRCHHPGUSR' in params['cookies'])
- self.assertTrue('DEMOTE' in params['cookies']['SRCHHPGUSR'])
- self.assertTrue('_EDGE_S' in params['cookies'])
- self.assertTrue('fr-fr' in params['cookies']['_EDGE_S'])
-
- dicto['language'] = 'fr'
- params = bing_images.request(query, dicto)
- self.assertTrue('_EDGE_S' in params['cookies'])
- self.assertTrue('fr-fr' in params['cookies']['_EDGE_S'])
-
- dicto['language'] = 'all'
- params = bing_images.request(query, dicto)
- self.assertTrue('_EDGE_S' in params['cookies'])
- self.assertTrue('en-us' in params['cookies']['_EDGE_S'])
-
- def test_response(self):
- self.assertRaises(AttributeError, bing_images.response, None)
- self.assertRaises(AttributeError, bing_images.response, [])
- self.assertRaises(AttributeError, bing_images.response, '')
- self.assertRaises(AttributeError, bing_images.response, '[]')
-
- response = mock.Mock(text='<html></html>')
- self.assertEqual(bing_images.response(response), [])
-
- response = mock.Mock(text='<html></html>')
- self.assertEqual(bing_images.response(response), [])
-
- html = """
- <div id="mmComponent_images_1">
- <ul>
- <li>
- <div>
- <div class="imgpt">
- <a m='{"purl":"page_url","murl":"img_url","turl":"thumb_url","t":"Page 1 title"}'>
- <img src="" alt="alt text" />
- </a>
- <div class="img_info">
- <span>1 x 1 - jpeg</span>
- <a>1.example.org</a>
- </div>
- </div>
- <div></div>
- </div>
- <div>
- <div class="imgpt">
- <a m='{"purl":"page_url2","murl":"img_url2","turl":"thumb_url2","t":"Page 2 title"}'>
- <img src="" alt="alt text 2" />
- </a>
- <div class="img_info">
- <span>2 x 2 - jpeg</span>
- <a>2.example.org</a>
- </div>
- </div>
- </div>
- </li>
- </ul>
- <ul>
- <li>
- <div>
- <div class="imgpt">
- <a m='{"purl":"page_url3","murl":"img_url3","turl":"thumb_url3","t":"Page 3 title"}'>
- <img src="" alt="alt text 3" />
- </a>
- <div class="img_info">
- <span>3 x 3 - jpeg</span>
- <a>3.example.org</a>
- </div>
- </div>
- </div>
- </li>
- </ul>
- </div>
- """
- html = html.replace('\r\n', '').replace('\n', '').replace('\r', '')
- response = mock.Mock(text=html)
- results = bing_images.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 3)
- self.assertEqual(results[0]['title'], 'Page 1 title')
- self.assertEqual(results[0]['url'], 'page_url')
- self.assertEqual(results[0]['content'], '')
- self.assertEqual(results[0]['thumbnail_src'], 'thumb_url')
- self.assertEqual(results[0]['img_src'], 'img_url')
- self.assertEqual(results[0]['img_format'], '1 x 1 - jpeg')
- self.assertEqual(results[0]['source'], '1.example.org')
-
- def test_fetch_supported_languages(self):
- html = """
- <div>
- <div id="region-section-content">
- <ul class="b_vList">
- <li>
- <a href="https://bing...&setmkt=de-DE&s...">Germany</a>
- <a href="https://bing...&setmkt=nb-NO&s...">Norway</a>
- </li>
- </ul>
- <ul class="b_vList">
- <li>
- <a href="https://bing...&setmkt=es-AR&s...">Argentina</a>
- </li>
- </ul>
- </div>
- </div>
- """
- response = mock.Mock(text=html)
- languages = list(bing_images._fetch_supported_languages(response))
- self.assertEqual(len(languages), 3)
- self.assertIn('de-DE', languages)
- self.assertIn('no-NO', languages)
- self.assertIn('es-AR', languages)
diff --git a/tests/unit/engines/test_bing_news.py b/tests/unit/engines/test_bing_news.py
deleted file mode 100644
index 1155e79c4..000000000
--- a/tests/unit/engines/test_bing_news.py
+++ /dev/null
@@ -1,147 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import bing_news
-from searx.testing import SearxTestCase
-import lxml
-
-
-class TestBingNewsEngine(SearxTestCase):
-
- def test_request(self):
- bing_news.supported_languages = ['en', 'fr']
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- dicto['language'] = 'fr-FR'
- dicto['time_range'] = ''
- params = bing_news.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('bing.com', params['url'])
- self.assertIn('fr', params['url'])
-
- dicto['language'] = 'all'
- params = bing_news.request(query, dicto)
- self.assertIn('en', params['url'])
-
- def test_no_url_in_request_year_time_range(self):
- dicto = defaultdict(dict)
- query = 'test_query'
- dicto['time_range'] = 'year'
- params = bing_news.request(query, dicto)
- self.assertEqual({}, params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, bing_news.response, None)
- self.assertRaises(AttributeError, bing_news.response, [])
- self.assertRaises(AttributeError, bing_news.response, '')
- self.assertRaises(AttributeError, bing_news.response, '[]')
-
- response = mock.Mock(content='<html></html>')
- self.assertEqual(bing_news.response(response), [])
-
- response = mock.Mock(content='<html></html>')
- self.assertEqual(bing_news.response(response), [])
-
- html = """<?xml version="1.0" encoding="utf-8" ?>
-<rss version="2.0" xmlns:News="https://www.bing.com:443/news/search?q=python&amp;setmkt=en-US&amp;first=1&amp;format=RSS">
- <channel>
- <title>python - Bing News</title>
- <link>https://www.bing.com:443/news/search?q=python&amp;setmkt=en-US&amp;first=1&amp;format=RSS</link>
- <description>Search results</description>
- 
- <copyright>Copyright</copyright>
- <item>
- <title>Title</title>
- <link>https://www.bing.com/news/apiclick.aspx?ref=FexRss&amp;aid=&amp;tid=c237eccc50bd4758b106a5e3c94fce09&amp;url=http%3a%2f%2furl.of.article%2f&amp;c=xxxxxxxxx&amp;mkt=en-us</link>
- <description>Article Content</description>
- <pubDate>Tue, 02 Jun 2015 13:37:00 GMT</pubDate>
- <News:Source>Infoworld</News:Source>
- <News:Image>http://a1.bing4.com/th?id=ON.13371337133713371337133713371337&amp;pid=News</News:Image>
- <News:ImageSize>w={0}&amp;h={1}&amp;c=7</News:ImageSize>
- <News:ImageKeepOriginalRatio></News:ImageKeepOriginalRatio>
- <News:ImageMaxWidth>620</News:ImageMaxWidth>
- <News:ImageMaxHeight>413</News:ImageMaxHeight>
- </item>
- <item>
- <title>Another Title</title>
- <link>https://www.bing.com/news/apiclick.aspx?ref=FexRss&amp;aid=&amp;tid=c237eccc50bd4758b106a5e3c94fce09&amp;url=http%3a%2f%2fanother.url.of.article%2f&amp;c=xxxxxxxxx&amp;mkt=en-us</link>
- <description>Another Article Content</description>
- <pubDate>Tue, 02 Jun 2015 13:37:00 GMT</pubDate>
- </item>
- </channel>
-</rss>""" # noqa
- response = mock.Mock(content=html.encode('utf-8'))
- results = bing_news.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
- self.assertEqual(results[0]['title'], 'Title')
- self.assertEqual(results[0]['url'], 'http://url.of.article/')
- self.assertEqual(results[0]['content'], 'Article Content')
- self.assertEqual(results[0]['img_src'], 'https://www.bing.com/th?id=ON.13371337133713371337133713371337')
- self.assertEqual(results[1]['title'], 'Another Title')
- self.assertEqual(results[1]['url'], 'http://another.url.of.article/')
- self.assertEqual(results[1]['content'], 'Another Article Content')
- self.assertNotIn('img_src', results[1])
-
- html = """<?xml version="1.0" encoding="utf-8" ?>
-<rss version="2.0" xmlns:News="https://www.bing.com:443/news/search?q=python&amp;setmkt=en-US&amp;first=1&amp;format=RSS">
- <channel>
- <title>python - Bing News</title>
- <link>https://www.bing.com:443/news/search?q=python&amp;setmkt=en-US&amp;first=1&amp;format=RSS</link>
- <description>Search results</description>
- 
- <copyright>Copyright</copyright>
- <item>
- <title>Title</title>
- <link>http://another.url.of.article/</link>
- <description>Article Content</description>
- <pubDate>garbage</pubDate>
- <News:Source>Infoworld</News:Source>
- <News:Image>http://another.bing.com/image</News:Image>
- <News:ImageSize>w={0}&amp;h={1}&amp;c=7</News:ImageSize>
- <News:ImageKeepOriginalRatio></News:ImageKeepOriginalRatio>
- <News:ImageMaxWidth>620</News:ImageMaxWidth>
- <News:ImageMaxHeight>413</News:ImageMaxHeight>
- </item>
- </channel>
-</rss>""" # noqa
- response = mock.Mock(content=html.encode('utf-8'))
- results = bing_news.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'Title')
- self.assertEqual(results[0]['url'], 'http://another.url.of.article/')
- self.assertEqual(results[0]['content'], 'Article Content')
- self.assertEqual(results[0]['img_src'], 'http://another.bing.com/image')
-
- html = """<?xml version="1.0" encoding="utf-8" ?>
-<rss version="2.0" xmlns:News="https://www.bing.com:443/news/search?q=python&amp;setmkt=en-US&amp;first=1&amp;format=RSS">
- <channel>
- <title>python - Bing News</title>
- <link>https://www.bing.com:443/news/search?q=python&amp;setmkt=en-US&amp;first=1&amp;format=RSS</link>
- <description>Search results</description>
- 
- </channel>
-</rss>""" # noqa
-
- response = mock.Mock(content=html.encode('utf-8'))
- results = bing_news.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
-
- html = """<?xml version="1.0" encoding="utf-8" ?>gabarge"""
- response = mock.Mock(content=html.encode('utf-8'))
- self.assertRaises(lxml.etree.XMLSyntaxError, bing_news.response, response)
diff --git a/tests/unit/engines/test_bing_videos.py b/tests/unit/engines/test_bing_videos.py
deleted file mode 100644
index 5e171eb53..000000000
--- a/tests/unit/engines/test_bing_videos.py
+++ /dev/null
@@ -1,72 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import bing_videos
-from searx.testing import SearxTestCase
-
-
-class TestBingVideosEngine(SearxTestCase):
-
- def test_request(self):
- bing_videos.supported_languages = ['fr-FR', 'en-US']
- bing_videos.language_aliases = {}
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- dicto['language'] = 'fr-FR'
- dicto['safesearch'] = 0
- dicto['time_range'] = ''
- params = bing_videos.request(query, dicto)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('bing.com' in params['url'])
- self.assertTrue('SRCHHPGUSR' in params['cookies'])
- self.assertTrue('OFF' in params['cookies']['SRCHHPGUSR'])
- self.assertTrue('_EDGE_S' in params['cookies'])
- self.assertTrue('fr-fr' in params['cookies']['_EDGE_S'])
-
- dicto['pageno'] = 2
- dicto['time_range'] = 'day'
- dicto['safesearch'] = 2
- params = bing_videos.request(query, dicto)
- self.assertTrue('first=29' in params['url'])
- self.assertTrue('1440' in params['url'])
- self.assertIn('SRCHHPGUSR', params['cookies'])
- self.assertTrue('STRICT' in params['cookies']['SRCHHPGUSR'])
-
- def test_response(self):
- self.assertRaises(AttributeError, bing_videos.response, None)
- self.assertRaises(AttributeError, bing_videos.response, [])
- self.assertRaises(AttributeError, bing_videos.response, '')
- self.assertRaises(AttributeError, bing_videos.response, '[]')
-
- response = mock.Mock(text='<html></html>')
- self.assertEqual(bing_videos.response(response), [])
-
- response = mock.Mock(text='<html></html>')
- self.assertEqual(bing_videos.response(response), [])
-
- html = """
- <div class="dg_u">
- <div>
- <a>
- <div>
- <div>
- <div class="mc_vtvc_meta_block">
- <div><span>100 views</span><span>1 year ago</span></div><div><span>ExampleTube</span><span>Channel 1<span></div> #noqa
- </div>
- </div>
- <div class="vrhdata" vrhm='{"du":"01:11","murl":"https://www.example.com/watch?v=DEADBEEF","thid":"OVP.BINGTHUMB1","vt":"Title 1"}'></div> # noqa
- </div>
- </a>
- </div>
- </div>
- """
- response = mock.Mock(text=html)
- results = bing_videos.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'Title 1')
- self.assertEqual(results[0]['url'], 'https://www.example.com/watch?v=DEADBEEF')
- self.assertEqual(results[0]['content'], '01:11 - 100 views - 1 year ago - ExampleTube - Channel 1')
- self.assertEqual(results[0]['thumbnail'], 'https://www.bing.com/th?id=OVP.BINGTHUMB1')
diff --git a/tests/unit/engines/test_btdigg.py b/tests/unit/engines/test_btdigg.py
deleted file mode 100644
index 45ddaa6e3..000000000
--- a/tests/unit/engines/test_btdigg.py
+++ /dev/null
@@ -1,112 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import btdigg
-from searx.testing import SearxTestCase
-
-
-class TestBtdiggEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 0
- params = btdigg.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('btdig.com', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, btdigg.response, None)
- self.assertRaises(AttributeError, btdigg.response, [])
- self.assertRaises(AttributeError, btdigg.response, '')
- self.assertRaises(AttributeError, btdigg.response, '[]')
-
- response = mock.Mock(text='<html></html>')
- self.assertEqual(btdigg.response(response), [])
-
- html = u"""
- <div class="one_result" style="display:table-row;background-color:#e8e8e8">
- <div style="display:table-cell;color:rgb(0, 0, 0)">
- <div style="display:table">
- <div style="display:table-row">
- <div class="torrent_name" style="display:table-cell">
- <a style="color:rgb(0, 0, 204);text-decoration:underline;font-size:150%"
- href="http://btdig.com/a72f35b7ee3a10928f02bb799e40ae5db701ed1c/pdf?q=pdf&amp;p=1&amp;order=0"
- >3.9GBdeLibrosByHuasoFromHell(3de4)</a>
- </div>
- </div>
- </div>
- <div style="display:table">
- <div style="display:table-row">
- <div style="display:table-cell">
- <span class="torrent_files" style="color:#666;padding-left:10px">4217</span> files <span
- class="torrent_size" style="color:#666;padding-left:10px">1 GB</span><span
- class="torrent_age" style="color:rgb(0, 102, 0);padding-left:10px;margin: 0px 4px"
- >found 3 years ago</span>
- </div>
- </div>
- </div>
- <div style="display:table;width:100%;padding:10px">
- <div style="display:table-row">
- <div class="torrent_magnet" style="display:table-cell">
- <div class="fa fa-magnet" style="color:#cc0000">
- <a href="magnet:?xt=urn:btih:a72f35b7ee3a10928f02bb799e40ae5db701ed1c&amp;dn=3.9GBdeLibrosBy..."
- title="Download via magnet-link"> magnet:?xt=urn:btih:a72f35b7ee...</a>
- </div>
- </div>
- <div style="display:table-cell;color:rgb(0, 0, 0);text-align:right">
- <span style="color:rgb(136, 136, 136);margin: 0px 0px 0px 4px"></span><span
- style="color:rgb(0, 102, 0);margin: 0px 4px">found 3 years ago</span>
- </div>
- </div>
- </div>
- <div class="torrent_excerpt" style="display:table;padding:10px;white-space:nowrap">
- <div class="fa fa-folder-open" style="padding-left:0em"> 3.9GBdeLibrosByHuasoFromHell(3de4)</div><br/>
- <div class="fa fa-folder-open" style="padding-left:1em"> Libros H-Z</div><br/>
- <div class="fa fa-folder-open" style="padding-left:2em"> H</div><br/><div class="fa fa-file-archive-o"
- style="padding-left:3em"> H.H. Hollis - El truco de la espada-<b
- style="color:red; background-color:yellow">pdf</b>.zip</div><span
- style="color:#666;padding-left:10px">17 KB</span><br/>
- <div class="fa fa-file-archive-o" style="padding-left:3em"> Hagakure - El Libro del Samurai-<b
- style="color:red; background-color:yellow">pdf</b>.zip</div><span
- style="color:#666;padding-left:10px">95 KB</span><br/>
- <div class="fa fa-folder-open" style="padding-left:3em"> Hamsun, Knut (1859-1952)</div><br/>
- <div class="fa fa-file-archive-o" style="padding-left:4em"> Hamsun, Knut - Hambre-<b
- style="color:red; background-color:yellow">pdf</b>.zip</div><span
- style="color:#666;padding-left:10px">786 KB</span><br/>
- <div class="fa fa-plus-circle"><a
- href="http://btdig.com/a72f35b7ee3a10928f02bb799e40ae5db701ed1c/pdf?q=pdf&amp;p=1&amp;order=0"
- > 4214 hidden files<span style="color:#666;padding-left:10px">1 GB</span></a></div>
- </div>
- </div>
- </div>
- """
- response = mock.Mock(text=html.encode('utf-8'))
- results = btdigg.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], '3.9GBdeLibrosByHuasoFromHell(3de4)')
- self.assertEqual(results[0]['url'],
- 'http://btdig.com/a72f35b7ee3a10928f02bb799e40ae5db701ed1c/pdf?q=pdf&p=1&order=0')
- self.assertEqual(results[0]['content'],
- '3.9GBdeLibrosByHuasoFromHell(3de4) | ' +
- 'Libros H-Z | ' +
- 'H H.H. Hollis - El truco de la espada-pdf.zip17 KB | ' +
- 'Hagakure - El Libro del Samurai-pdf.zip95 KB | ' +
- 'Hamsun, Knut (1859-1952) | Hamsun, Knut - Hambre-pdf.zip786 KB | ' +
- '4214 hidden files1 GB')
- self.assertEqual(results[0]['filesize'], 1 * 1024 * 1024 * 1024)
- self.assertEqual(results[0]['files'], 4217)
- self.assertEqual(results[0]['magnetlink'],
- 'magnet:?xt=urn:btih:a72f35b7ee3a10928f02bb799e40ae5db701ed1c&dn=3.9GBdeLibrosBy...')
-
- html = """
- <div style="display:table-row;background-color:#e8e8e8">
-
- </div>
- """
- response = mock.Mock(text=html.encode('utf-8'))
- results = btdigg.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
diff --git a/tests/unit/engines/test_currency_convert.py b/tests/unit/engines/test_currency_convert.py
deleted file mode 100644
index fec194103..000000000
--- a/tests/unit/engines/test_currency_convert.py
+++ /dev/null
@@ -1,56 +0,0 @@
-from collections import defaultdict
-from datetime import datetime
-import mock
-from searx.engines import currency_convert
-from searx.testing import SearxTestCase
-
-
-class TestCurrencyConvertEngine(SearxTestCase):
-
- def test_request(self):
- query = b'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- params = currency_convert.request(query, dicto)
- self.assertNotIn('url', params)
-
- query = b'convert 10 Pound Sterlings to United States Dollars'
- params = currency_convert.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn('duckduckgo.com', params['url'])
- self.assertIn('GBP', params['url'])
- self.assertIn('USD', params['url'])
-
- def test_response(self):
- dicto = defaultdict(dict)
- dicto['amount'] = float(10)
- dicto['from'] = "GBP"
- dicto['to'] = "USD"
- dicto['from_name'] = "pound sterling"
- dicto['to_name'] = "United States dollar"
- response = mock.Mock(text='a,b,c,d', search_params=dicto)
- self.assertEqual(currency_convert.response(response), [])
- body = """ddg_spice_currency(
- {
- "conversion":{
- "converted-amount": "0.5"
- },
- "topConversions":[
- {
- },
- {
- }
- ]
- }
- );
- """
- response = mock.Mock(text=body, search_params=dicto)
- results = currency_convert.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['answer'], '10.0 GBP = 5.0 USD, 1 GBP (pound sterling)' +
- ' = 0.5 USD (United States dollar)')
-
- target_url = 'https://duckduckgo.com/js/spice/currency/1/{}/{}'.format(
- dicto['from'], dicto['to'])
- self.assertEqual(results[0]['url'], target_url)
diff --git a/tests/unit/engines/test_dailymotion.py b/tests/unit/engines/test_dailymotion.py
deleted file mode 100644
index ad7f3d283..000000000
--- a/tests/unit/engines/test_dailymotion.py
+++ /dev/null
@@ -1,112 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import dailymotion
-from searx.testing import SearxTestCase
-
-
-class TestDailymotionEngine(SearxTestCase):
-
- def test_request(self):
- dailymotion.supported_languages = ['en', 'fr']
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 0
- dicto['language'] = 'fr-FR'
- params = dailymotion.request(query, dicto)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('dailymotion.com' in params['url'])
- self.assertTrue('fr' in params['url'])
-
- dicto['language'] = 'all'
- params = dailymotion.request(query, dicto)
- self.assertTrue('en' in params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, dailymotion.response, None)
- self.assertRaises(AttributeError, dailymotion.response, [])
- self.assertRaises(AttributeError, dailymotion.response, '')
- self.assertRaises(AttributeError, dailymotion.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(dailymotion.response(response), [])
-
- response = mock.Mock(text='{"data": []}')
- self.assertEqual(dailymotion.response(response), [])
-
- json = """
- {
- "page": 1,
- "limit": 5,
- "explicit": false,
- "total": 289487,
- "has_more": true,
- "list": [
- {
- "created_time": 1422173451,
- "title": "Title",
- "description": "Description",
- "duration": 81,
- "url": "http://www.url",
- "thumbnail_360_url": "http://thumbnail",
- "id": "x2fit7q"
- }
- ]
- }
- """
- response = mock.Mock(text=json)
- results = dailymotion.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'Title')
- self.assertEqual(results[0]['url'], 'http://www.url')
- self.assertEqual(results[0]['content'], 'Description')
- self.assertIn('x2fit7q', results[0]['embedded'])
-
- json = r"""
- {"toto":[
- {"id":200,"name":"Artist Name",
- "link":"http:\/\/www.dailymotion.com\/artist\/1217","type":"artist"}
- ]}
- """
- response = mock.Mock(text=json)
- results = dailymotion.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
-
- def test_fetch_supported_languages(self):
- json = r"""
- {"list":[{"code":"af","name":"Afrikaans","native_name":"Afrikaans",
- "localized_name":"Afrikaans","display_name":"Afrikaans"},
- {"code":"ar","name":"Arabic","native_name":"\u0627\u0644\u0639\u0631\u0628\u064a\u0629",
- "localized_name":"Arabic","display_name":"Arabic"},
- {"code":"la","name":"Latin","native_name":null,
- "localized_name":"Latin","display_name":"Latin"}
- ]}
- """
- response = mock.Mock(text=json)
- languages = dailymotion._fetch_supported_languages(response)
- self.assertEqual(type(languages), dict)
- self.assertEqual(len(languages), 3)
- self.assertIn('af', languages)
- self.assertIn('ar', languages)
- self.assertIn('la', languages)
-
- self.assertEqual(type(languages['af']), dict)
- self.assertEqual(type(languages['ar']), dict)
- self.assertEqual(type(languages['la']), dict)
-
- self.assertIn('name', languages['af'])
- self.assertIn('name', languages['ar'])
- self.assertNotIn('name', languages['la'])
-
- self.assertIn('english_name', languages['af'])
- self.assertIn('english_name', languages['ar'])
- self.assertIn('english_name', languages['la'])
-
- self.assertEqual(languages['af']['name'], 'Afrikaans')
- self.assertEqual(languages['af']['english_name'], 'Afrikaans')
- self.assertEqual(languages['ar']['name'], u'العربية')
- self.assertEqual(languages['ar']['english_name'], 'Arabic')
- self.assertEqual(languages['la']['english_name'], 'Latin')
diff --git a/tests/unit/engines/test_deezer.py b/tests/unit/engines/test_deezer.py
deleted file mode 100644
index 5b9f55c33..000000000
--- a/tests/unit/engines/test_deezer.py
+++ /dev/null
@@ -1,57 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import deezer
-from searx.testing import SearxTestCase
-
-
-class TestDeezerEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 0
- params = deezer.request(query, dicto)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('deezer.com' in params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, deezer.response, None)
- self.assertRaises(AttributeError, deezer.response, [])
- self.assertRaises(AttributeError, deezer.response, '')
- self.assertRaises(AttributeError, deezer.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(deezer.response(response), [])
-
- response = mock.Mock(text='{"data": []}')
- self.assertEqual(deezer.response(response), [])
-
- json = r"""
- {"data":[
- {"id":100, "title":"Title of track",
- "link":"https:\/\/www.deezer.com\/track\/1094042","duration":232,
- "artist":{"id":200,"name":"Artist Name",
- "link":"https:\/\/www.deezer.com\/artist\/1217","type":"artist"},
- "album":{"id":118106,"title":"Album Title","type":"album"},"type":"track"}
- ]}
- """
- response = mock.Mock(text=json)
- results = deezer.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'Title of track')
- self.assertEqual(results[0]['url'], 'https://www.deezer.com/track/1094042')
- self.assertEqual(results[0]['content'], 'Artist Name - Album Title - Title of track')
- self.assertTrue('100' in results[0]['embedded'])
-
- json = r"""
- {"data":[
- {"id":200,"name":"Artist Name",
- "link":"https:\/\/www.deezer.com\/artist\/1217","type":"artist"}
- ]}
- """
- response = mock.Mock(text=json)
- results = deezer.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
diff --git a/tests/unit/engines/test_deviantart.py b/tests/unit/engines/test_deviantart.py
deleted file mode 100644
index bd2cf182f..000000000
--- a/tests/unit/engines/test_deviantart.py
+++ /dev/null
@@ -1,95 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import deviantart
-from searx.testing import SearxTestCase
-
-
-class TestDeviantartEngine(SearxTestCase):
-
- def test_request(self):
- dicto = defaultdict(dict)
- query = 'test_query'
- dicto['pageno'] = 0
- dicto['time_range'] = ''
- params = deviantart.request(query, dicto)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('deviantart.com' in params['url'])
-
- def test_no_url_in_request_year_time_range(self):
- dicto = defaultdict(dict)
- query = 'test_query'
- dicto['time_range'] = 'year'
- params = deviantart.request(query, dicto)
- self.assertEqual({}, params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, deviantart.response, None)
- self.assertRaises(AttributeError, deviantart.response, [])
- self.assertRaises(AttributeError, deviantart.response, '')
- self.assertRaises(AttributeError, deviantart.response, '[]')
-
- response = mock.Mock(text='<html></html>')
- self.assertEqual(deviantart.response(response), [])
-
- response = mock.Mock(status_code=302)
- self.assertEqual(deviantart.response(response), [])
-
- html = """
- <div id="page-1-results" class="page-results results-page-thumb torpedo-container">
- <span class="thumb wide" href="http://amai911.deviantart.com/art/Horse-195212845"
- data-super-full-width="900" data-super-full-height="600">
- <a class="torpedo-thumb-link" href="https://url.of.image">
- <img data-sigil="torpedo-img" src="https://url.of.thumbnail" />
- </a>
- <span class="info"><span class="title-wrap"><span class="title">Title of image</span></span>
- </div>
- """
- response = mock.Mock(text=html)
- results = deviantart.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'Title of image')
- self.assertEqual(results[0]['url'], 'https://url.of.image')
- self.assertNotIn('content', results[0])
- self.assertEqual(results[0]['thumbnail_src'], 'https://url.of.thumbnail')
-
- html = """
- <span class="tt-fh-tc" style="width: 202px;">
- <span class="tt-bb" style="width: 202px;">
- </span>
- <span class="shadow">
- <a class="thumb" href="http://url.of.result/2nd.part.of.url"
- title="Behoimi BE Animation Test by test-0, Jan 4,
- 2010 in Digital Art &gt; Animation"> <i></i>
- <img width="200" height="200" alt="Test"
- src="http://url.of.thumbnail" data-src="http://th08.deviantart.net/test.jpg">
- </a>
- </span>
- <!-- ^TTT -->
- </span>
- <span class="details">
- <a href="http://test-0.deviantart.com/art/Test" class="t"
- title="Behoimi BE Animation Test by test-0, Jan 4, 2010">
- <span class="tt-fh-oe">Title of image</span> </a>
- <small>
- <span class="category">
- <span class="age">
- 5 years ago
- </span>
- in <a title="Behoimi BE Animation Test by test-0, Jan 4, 2010"
- href="http://www.deviantart.com/browse/all/digitalart/animation/">Animation</a>
- </span>
- <div class="commentcount">
- <a href="http://test-0.deviantart.com/art/Test#comments">
- <span class="iconcommentsstats"></span>9 Comments</a>
- </div>
- <a class="mlt-link" href="http://www.deviantart.com/morelikethis/149167425">
- <span class="mlt-icon"></span> <span class="mlt-text">More Like This</span> </a>
- </span>
- </small> <!-- TTT$ -->
- """
- response = mock.Mock(text=html)
- results = deviantart.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
diff --git a/tests/unit/engines/test_digbt.py b/tests/unit/engines/test_digbt.py
deleted file mode 100644
index 31c2ecabb..000000000
--- a/tests/unit/engines/test_digbt.py
+++ /dev/null
@@ -1,61 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import digbt
-from searx.testing import SearxTestCase
-
-
-class TestDigBTEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 0
- params = digbt.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('digbt.org', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, digbt.response, None)
- self.assertRaises(AttributeError, digbt.response, [])
- self.assertRaises(AttributeError, digbt.response, '')
- self.assertRaises(AttributeError, digbt.response, '[]')
-
- response = mock.Mock(text='<html></html>')
- self.assertEqual(digbt.response(response), [])
-
- html = """
- <table class="table">
- <tr><td class="x-item">
- <div>
- <a title="The Big Bang Theory" class="title" href="/The-Big-Bang-Theory-d2.html">
- The Big <span class="highlight">Bang</span> Theory
- </a>
- <span class="ctime"><span style="color:red;">4 hours ago</span></span>
- </div>
- <div class="files">
- <ul>
- <li>The Big Bang Theory 2.9 GB</li>
- <li>....</li>
- </ul>
- </div>
- <div class="tail">
- Files: 1 Size: 2.9 GB Downloads: 1 Updated: <span style="color:red;">4 hours ago</span>
- &nbsp; &nbsp;
- <a class="title" href="magnet:?xt=urn:btih:a&amp;dn=The+Big+Bang+Theory">
- <span class="glyphicon glyphicon-magnet"></span> magnet-link
- </a>
- &nbsp; &nbsp;
- </div>
- </td></tr>
- </table>
- """
- response = mock.Mock(text=html.encode('utf-8'))
- results = digbt.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'The Big Bang Theory')
- self.assertEqual(results[0]['url'], 'https://digbt.org/The-Big-Bang-Theory-d2.html')
- self.assertEqual(results[0]['content'], 'The Big Bang Theory 2.9 GB ....')
- self.assertEqual(results[0]['filesize'], 3113851289)
- self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:a&dn=The+Big+Bang+Theory')
diff --git a/tests/unit/engines/test_digg.py b/tests/unit/engines/test_digg.py
deleted file mode 100644
index 6e7c9cc99..000000000
--- a/tests/unit/engines/test_digg.py
+++ /dev/null
@@ -1,101 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import digg
-from searx.testing import SearxTestCase
-
-
-class TestDiggEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- params = digg.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('digg.com', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, digg.response, None)
- self.assertRaises(AttributeError, digg.response, [])
- self.assertRaises(AttributeError, digg.response, '')
- self.assertRaises(AttributeError, digg.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(digg.response(response), [])
-
- response = mock.Mock(text='{"data": []}')
- self.assertEqual(digg.response(response), [])
-
- json = """
- {
- "status": "ok",
- "num": 10,
- "next_position": 20,
- "html": "<article itemscope itemtype=\\"http://schema.org/Article\\"
- class=\\"story-container digg-story-el hentry entry story-1sRANah col-1\\"
- data-content-id=\\"1sRANah\\" data-contenturl=\\"http://url.of.link\\"
- data-position=\\"0\\" data-diggs=\\"24\\" data-tweets=\\"69\\"
- data-digg-score=\\"1190\\"> <div class=\\"story-image story-image-thumb\\">
- <a data-position=\\"0\\" data-content-id=\\"1sRANah\\"
- class=\\"story-link\\" href=\\"http://www.thedailybeast.com/\\"
- target=\\"_blank\\"><img class=\\"story-image-img\\"
- src=\\"http://url.of.image.jpeg\\" width=\\"312\\" height=\\"170\\"
- alt=\\"\\" /> </a> </div> <div class=\\"story-content\\"><header
- class=\\"story-header\\"> <div itemprop=\\"alternativeHeadline\\"
- class=\\"story-kicker\\" >Kicker</div> <h2 itemprop=\\"headline\\"
- class=\\"story-title entry-title\\"><a class=\\"story-title-link story-link\\"
- rel=\\"bookmark\\" itemprop=\\"url\\" href=\\"http://www.thedailybeast.com/\\"
- target=\\"_blank\\">Title of article</h2> <div class=\\"story-meta\\">
- <div class=\\"story-score \\">
- <div class=\\"story-score-diggscore diggscore-1sRANah\\">1190</div>
- <div class=\\"story-score-details\\"> <div class=\\"arrow\\"></div>
- <ul class=\\"story-score-details-list\\"> <li
- class=\\"story-score-detail story-score-diggs\\"><span
- class=\\"label\\">Diggs:</span> <span class=\\"count diggs-1sRANah\\">24</span>
- </li> <li class=\\"story-score-detail story-score-twitter\\"><span
- class=\\"label\\">Tweets:</span> <span class=\\"count tweets-1sRANah\\">69</span>
- </li> <li class=\\"story-score-detail story-score-facebook\\"><span
- class=\\"label\\">Facebook Shares:</span> <span
- class=\\"count fb_shares-1sRANah\\">1097</span></li> </ul> </div> </div>
- <span class=\\"story-meta-item story-source\\"> <a
- itemprop=\\"publisher copyrightHolder sourceOrganization provider\\"
- class=\\"story-meta-item-link story-source-link\\"
- href=\\"/source/thedailybeast.com\\">The Daily Beast </a> </span>
- <span class=\\"story-meta-item story-tag first-tag\\"> <a
- itemprop=\\"keywords\\" rel=\\"tag\\"
- class=\\"story-meta-item-link story-tag-link\\" href=\\"/tag/news\\">News</a>
- </span> <abbr class=\\"published story-meta-item story-timestamp\\"
- title=\\"2014-10-18 14:53:45\\"> <time datetime=\\"2014-10-18 14:53:45\\">18 Oct 2014</time>
- </abbr> </div> </header> </div> <ul class=\\"story-actions\\"> <li
- class=\\"story-action story-action-digg btn-story-action-container\\">
- <a class=\\"target digg-1sRANah\\" href=\\"#\\">Digg</a></li> <li
- class=\\"story-action story-action-save btn-story-action-container\\">
- <a class=\\"target save-1sRANah\\" href=\\"#\\">Save</a></li> <li
- class=\\"story-action story-action-share\\"><a
- class=\\"target share-facebook\\" href=\\"https://www.facebook.com/\\">Facebook</a></li>
- <li class=\\"story-action story-action-share\\"><a class=\\"target share-twitter\\"
- href=\\"https://twitter.com/\\">Twitter</a></li> </ul> </article>"
- }
- """
- json = json.replace('\r\n', '').replace('\n', '').replace('\r', '')
- response = mock.Mock(text=json)
- results = digg.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'Title of article')
- self.assertEqual(results[0]['url'], 'http://url.of.link')
- self.assertEqual(results[0]['thumbnail'], 'http://url.of.image.jpeg')
- self.assertEqual(results[0]['content'], '')
-
- json = """
- {
- "status": "error",
- "num": 10,
- "next_position": 20
- }
- """
- response = mock.Mock(text=json)
- results = digg.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
diff --git a/tests/unit/engines/test_doku.py b/tests/unit/engines/test_doku.py
deleted file mode 100644
index 22ddb7a7f..000000000
--- a/tests/unit/engines/test_doku.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import doku
-from searx.testing import SearxTestCase
-
-
-class TestDokuEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- params = doku.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, doku.response, None)
- self.assertRaises(AttributeError, doku.response, [])
- self.assertRaises(AttributeError, doku.response, '')
- self.assertRaises(AttributeError, doku.response, '[]')
-
- response = mock.Mock(text='<html></html>')
- self.assertEqual(doku.response(response), [])
-
- html = u"""
- <div class="search_quickresult">
- <h3>Pages trouvées :</h3>
- <ul class="search_quickhits">
- <li> <a href="/xfconf-query" class="wikilink1" title="xfconf-query">xfconf-query</a></li>
- </ul>
- <div class="clearer"></div>
- </div>
- """
- response = mock.Mock(text=html)
- results = doku.response(response)
- expected = [{'content': '', 'title': 'xfconf-query', 'url': 'http://localhost:8090/xfconf-query'}]
- self.assertEqual(doku.response(response), expected)
-
- html = u"""
- <dl class="search_results">
- <dt><a href="/xvnc?s[]=query" class="wikilink1" title="xvnc">xvnc</a>: 40 Occurrences trouvées</dt>
- <dd>er = /usr/bin/Xvnc
- server_args = -inetd -<strong class="search_hit">query</strong> localhost -geometry 640x480 ... er = /usr/bin/Xvnc
- server_args = -inetd -<strong class="search_hit">query</strong> localhost -geometry 800x600 ... er = /usr/bin/Xvnc
- server_args = -inetd -<strong class="search_hit">query</strong> localhost -geometry 1024x768 ... er = /usr/bin/Xvnc
- server_args = -inetd -<strong class="search_hit">query</strong> localhost -geometry 1280x1024 -depth 8 -Sec</dd>
- <dt><a href="/postfix_mysql_tls_sasl_1404?s[]=query"
- class="wikilink1"
- title="postfix_mysql_tls_sasl_1404">postfix_mysql_tls_sasl_1404</a>: 14 Occurrences trouvées</dt>
- <dd>tdepasse
- hosts = 127.0.0.1
- dbname = postfix
- <strong class="search_hit">query</strong> = SELECT goto FROM alias WHERE address='%s' AND a... tdepasse
- hosts = 127.0.0.1
- dbname = postfix
- <strong class="search_hit">query</strong> = SELECT domain FROM domain WHERE domain='%s'
- #optional <strong class="search_hit">query</strong> to use when relaying for backup MX
- #<strong class="search_hit">query</strong> = SELECT domain FROM domain WHERE domain='%s' and backupmx =</dd>
- <dt><a href="/bind9?s[]=query" class="wikilink1" title="bind9">bind9</a>: 12 Occurrences trouvées</dt>
- <dd> printcmd
-;; Got answer:
-;; -&gt;&gt;HEADER&lt;&lt;- opcode: <strong class="search_hit">QUERY</strong>, status: NOERROR, id: 13427
-;; flags: qr aa rd ra; <strong class="search_hit">QUERY</strong>: 1, ANSWER: 1, AUTHORITY: 1, ADDITIONAL: 1
-
-[...]
-
-;; <strong class="search_hit">Query</strong> time: 1 msec
-;; SERVER: 127.0.0.1#53(127.0.0.1)
-;... par la requête (<strong class="search_hit">Query</strong> time) , entre la première et la deuxième requête.</dd>
- </dl>
- """
- response = mock.Mock(text=html)
- results = doku.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 3)
- self.assertEqual(results[0]['title'], 'xvnc')
-# FIXME self.assertEqual(results[0]['url'], u'http://this.should.be.the.link/ű')
-# FIXME self.assertEqual(results[0]['content'], 'This should be the content.')
diff --git a/tests/unit/engines/test_duckduckgo.py b/tests/unit/engines/test_duckduckgo.py
deleted file mode 100644
index eb316a404..000000000
--- a/tests/unit/engines/test_duckduckgo.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import load_engine, duckduckgo
-from searx.testing import SearxTestCase
-
-
-class TestDuckduckgoEngine(SearxTestCase):
-
- def test_request(self):
- duckduckgo = load_engine({'engine': 'duckduckgo', 'name': 'duckduckgo'})
-
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- dicto['time_range'] = ''
-
- dicto['language'] = 'de-CH'
- params = duckduckgo.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('duckduckgo.com', params['url'])
- self.assertIn('ch-de', params['url'])
- self.assertIn('s=0', params['url'])
-
- # when ddg uses non standard codes
- dicto['language'] = 'zh-HK'
- params = duckduckgo.request(query, dicto)
- self.assertIn('hk-tzh', params['url'])
-
- dicto['language'] = 'en-GB'
- params = duckduckgo.request(query, dicto)
- self.assertIn('uk-en', params['url'])
-
- # no country given
- dicto['language'] = 'en'
- params = duckduckgo.request(query, dicto)
- self.assertIn('us-en', params['url'])
-
- def test_no_url_in_request_year_time_range(self):
- dicto = defaultdict(dict)
- query = 'test_query'
- dicto['time_range'] = 'year'
- params = duckduckgo.request(query, dicto)
- self.assertEqual({}, params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, duckduckgo.response, None)
- self.assertRaises(AttributeError, duckduckgo.response, [])
- self.assertRaises(AttributeError, duckduckgo.response, '')
- self.assertRaises(AttributeError, duckduckgo.response, '[]')
-
- response = mock.Mock(text='<html></html>')
- self.assertEqual(duckduckgo.response(response), [])
-
- html = u"""
- <div class="result results_links results_links_deep web-result result--no-result">
- <div class="links_main links_deep result__body">
- <h2 class="result__title">
- </h2>
- <div class="no-results">No results</div>
- <div class="result__extras">
- </div>
- </div>
- </div>
- """
- response = mock.Mock(text=html)
- results = duckduckgo.response(response)
- self.assertEqual(duckduckgo.response(response), [])
-
- html = u"""
- <div class="result results_links results_links_deep web-result ">
- <div class="links_main links_deep result__body">
- <h2 class="result__title">
- <a rel="nofollow" class="result__a" href="http://this.should.be.the.link/ű">
- This <b>is</b> <b>the</b> title
- </a>
- </h2>
- <a class="result__snippet" href="http://this.should.be.the.link/ű">
- <b>This</b> should be the content.
- </a>
- <div class="result__extras">
- </div>
- </div>
- </div>
- """
- response = mock.Mock(text=html)
- results = duckduckgo.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'This is the title')
- self.assertEqual(results[0]['url'], u'http://this.should.be.the.link/ű')
- self.assertEqual(results[0]['content'], 'This should be the content.')
-
- def test_fetch_supported_languages(self):
- js = """some code...regions:{
- "wt-wt":"All Results","ar-es":"Argentina","au-en":"Australia","at-de":"Austria","be-fr":"Belgium (fr)"
- }some more code..."""
- response = mock.Mock(text=js)
- languages = list(duckduckgo._fetch_supported_languages(response))
- self.assertEqual(len(languages), 5)
- self.assertIn('wt-WT', languages)
- self.assertIn('es-AR', languages)
- self.assertIn('en-AU', languages)
- self.assertIn('de-AT', languages)
- self.assertIn('fr-BE', languages)
diff --git a/tests/unit/engines/test_duckduckgo_definitions.py b/tests/unit/engines/test_duckduckgo_definitions.py
deleted file mode 100644
index 37587ed8d..000000000
--- a/tests/unit/engines/test_duckduckgo_definitions.py
+++ /dev/null
@@ -1,255 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import duckduckgo_definitions
-from searx.testing import SearxTestCase
-
-
-class TestDDGDefinitionsEngine(SearxTestCase):
-
- def test_result_to_text(self):
- url = ''
- text = 'Text'
- html_result = 'Html'
- result = duckduckgo_definitions.result_to_text(url, text, html_result)
- self.assertEqual(result, text)
-
- html_result = '<a href="url">Text in link</a>'
- result = duckduckgo_definitions.result_to_text(url, text, html_result)
- self.assertEqual(result, 'Text in link')
-
- def test_request(self):
- duckduckgo_definitions.supported_languages = ['en-US', 'es-ES']
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- dicto['language'] = 'es'
- params = duckduckgo_definitions.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('duckduckgo.com', params['url'])
- self.assertIn('headers', params)
- self.assertIn('Accept-Language', params['headers'])
- self.assertIn('es', params['headers']['Accept-Language'])
-
- def test_response(self):
- self.assertRaises(AttributeError, duckduckgo_definitions.response, None)
- self.assertRaises(AttributeError, duckduckgo_definitions.response, [])
- self.assertRaises(AttributeError, duckduckgo_definitions.response, '')
- self.assertRaises(AttributeError, duckduckgo_definitions.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(duckduckgo_definitions.response(response), [])
-
- response = mock.Mock(text='{"data": []}')
- self.assertEqual(duckduckgo_definitions.response(response), [])
-
- json = """
- {
- "DefinitionSource": "definition source",
- "Heading": "heading",
- "ImageWidth": 0,
- "RelatedTopics": [
- {
- "Result": "Top-level domains",
- "Icon": {
- "URL": "",
- "Height": "",
- "Width": ""
- },
- "FirstURL": "https://first.url",
- "Text": "text"
- },
- {
- "Topics": [
- {
- "Result": "result topic",
- "Icon": {
- "URL": "",
- "Height": "",
- "Width": ""
- },
- "FirstURL": "https://duckduckgo.com/?q=2%2F2",
- "Text": "result topic text"
- }
- ],
- "Name": "name"
- }
- ],
- "Entity": "Entity",
- "Type": "A",
- "Redirect": "",
- "DefinitionURL": "http://definition.url",
- "AbstractURL": "https://abstract.url",
- "Definition": "this is the definition",
- "AbstractSource": "abstract source",
- "Infobox": {
- "content": [
- {
- "data_type": "string",
- "value": "1999",
- "label": "Introduced",
- "wiki_order": 0
- }
- ],
- "meta": [
- {
- "data_type": "string",
- "value": ".test",
- "label": "article_title"
- }
- ]
- },
- "Image": "image.png",
- "ImageIsLogo": 0,
- "Abstract": "abstract",
- "AbstractText": "abstract text",
- "AnswerType": "",
- "ImageHeight": 0,
- "Results": [{
- "Result" : "result title",
- "Icon" : {
- "URL" : "result url",
- "Height" : 16,
- "Width" : 16
- },
- "FirstURL" : "result first url",
- "Text" : "result text"
- }
- ],
- "Answer": "answer"
- }
- """
- response = mock.Mock(text=json)
- results = duckduckgo_definitions.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 4)
- self.assertEqual(results[0]['answer'], 'answer')
- self.assertEqual(results[1]['title'], 'heading')
- self.assertEqual(results[1]['url'], 'result first url')
- self.assertEqual(results[2]['suggestion'], 'text')
- self.assertEqual(results[3]['infobox'], 'heading')
- self.assertEqual(results[3]['id'], 'https://definition.url')
- self.assertEqual(results[3]['entity'], 'Entity')
- self.assertIn('abstract', results[3]['content'])
- self.assertIn('this is the definition', results[3]['content'])
- self.assertEqual(results[3]['img_src'], 'image.png')
- self.assertIn('Introduced', results[3]['attributes'][0]['label'])
- self.assertIn('1999', results[3]['attributes'][0]['value'])
- self.assertIn({'url': 'https://abstract.url', 'title': 'abstract source'}, results[3]['urls'])
- self.assertIn({'url': 'http://definition.url', 'title': 'definition source'}, results[3]['urls'])
- self.assertIn({'name': 'name', 'suggestions': ['result topic text']}, results[3]['relatedTopics'])
-
- json = """
- {
- "DefinitionSource": "definition source",
- "Heading": "heading",
- "ImageWidth": 0,
- "RelatedTopics": [],
- "Entity": "Entity",
- "Type": "A",
- "Redirect": "",
- "DefinitionURL": "",
- "AbstractURL": "https://abstract.url",
- "Definition": "",
- "AbstractSource": "abstract source",
- "Image": "",
- "ImageIsLogo": 0,
- "Abstract": "",
- "AbstractText": "abstract text",
- "AnswerType": "",
- "ImageHeight": 0,
- "Results": [],
- "Answer": ""
- }
- """
- response = mock.Mock(text=json)
- results = duckduckgo_definitions.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['url'], 'https://abstract.url')
- self.assertEqual(results[0]['title'], 'heading')
- self.assertEqual(results[0]['content'], '')
-
- json = """
- {
- "DefinitionSource": "definition source",
- "Heading": "heading",
- "ImageWidth": 0,
- "RelatedTopics": [
- {
- "Result": "Top-level domains",
- "Icon": {
- "URL": "",
- "Height": "",
- "Width": ""
- },
- "FirstURL": "https://first.url",
- "Text": "heading"
- },
- {
- "Name": "name"
- },
- {
- "Topics": [
- {
- "Result": "result topic",
- "Icon": {
- "URL": "",
- "Height": "",
- "Width": ""
- },
- "FirstURL": "https://duckduckgo.com/?q=2%2F2",
- "Text": "heading"
- }
- ],
- "Name": "name"
- }
- ],
- "Entity": "Entity",
- "Type": "A",
- "Redirect": "",
- "DefinitionURL": "http://definition.url",
- "AbstractURL": "https://abstract.url",
- "Definition": "this is the definition",
- "AbstractSource": "abstract source",
- "Infobox": {
- "meta": [
- {
- "data_type": "string",
- "value": ".test",
- "label": "article_title"
- }
- ]
- },
- "Image": "image.png",
- "ImageIsLogo": 0,
- "Abstract": "abstract",
- "AbstractText": "abstract text",
- "AnswerType": "",
- "ImageHeight": 0,
- "Results": [{
- "Result" : "result title",
- "Icon" : {
- "URL" : "result url",
- "Height" : 16,
- "Width" : 16
- },
- "Text" : "result text"
- }
- ],
- "Answer": ""
- }
- """
- response = mock.Mock(text=json)
- results = duckduckgo_definitions.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['infobox'], 'heading')
- self.assertEqual(results[0]['id'], 'https://definition.url')
- self.assertEqual(results[0]['entity'], 'Entity')
- self.assertIn('abstract', results[0]['content'])
- self.assertIn('this is the definition', results[0]['content'])
- self.assertEqual(results[0]['img_src'], 'image.png')
- self.assertIn({'url': 'https://abstract.url', 'title': 'abstract source'}, results[0]['urls'])
- self.assertIn({'url': 'http://definition.url', 'title': 'definition source'}, results[0]['urls'])
- self.assertIn({'name': 'name', 'suggestions': []}, results[0]['relatedTopics'])
diff --git a/tests/unit/engines/test_duckduckgo_images.py b/tests/unit/engines/test_duckduckgo_images.py
deleted file mode 100644
index 0d152bec1..000000000
--- a/tests/unit/engines/test_duckduckgo_images.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import duckduckgo_images
-from searx.testing import SearxTestCase
-
-
-class TestDuckduckgoImagesEngine(SearxTestCase):
-
- def test_request(self):
- duckduckgo_images.supported_languages = ['de-CH', 'en-US']
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['is_test'] = True
- dicto['pageno'] = 1
- dicto['safesearch'] = 0
- dicto['language'] = 'all'
- params = duckduckgo_images.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('duckduckgo.com', params['url'])
- self.assertIn('s=0', params['url'])
- self.assertIn('p=-1', params['url'])
- self.assertIn('vqd=12345', params['url'])
-
- # test paging, safe search and language
- dicto['pageno'] = 2
- dicto['safesearch'] = 2
- dicto['language'] = 'de'
- params = duckduckgo_images.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('s=50', params['url'])
- self.assertIn('p=1', params['url'])
- self.assertIn('ch-de', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, duckduckgo_images.response, None)
- self.assertRaises(AttributeError, duckduckgo_images.response, [])
- self.assertRaises(AttributeError, duckduckgo_images.response, '')
- self.assertRaises(AttributeError, duckduckgo_images.response, '[]')
-
- response = mock.Mock(text='If this error persists, please let us know: ops@duckduckgo.com')
- self.assertRaises(Exception, duckduckgo_images.response, response)
-
- json = u"""
- {
- "query": "test_query",
- "results": [
- {
- "title": "Result 1",
- "url": "https://site1.url",
- "thumbnail": "https://thumb1.nail",
- "image": "https://image1"
- },
- {
- "title": "Result 2",
- "url": "https://site2.url",
- "thumbnail": "https://thumb2.nail",
- "image": "https://image2"
- }
- ]
- }
- """
- response = mock.Mock(text=json)
- results = duckduckgo_images.response(response)
- self.assertEqual(len(results), 2)
- self.assertEqual(results[0]['title'], 'Result 1')
- self.assertEqual(results[0]['url'], 'https://site1.url')
- self.assertEqual(results[0]['thumbnail_src'], 'https://thumb1.nail')
- self.assertEqual(results[0]['img_src'], 'https://image1')
- self.assertEqual(results[1]['title'], 'Result 2')
- self.assertEqual(results[1]['url'], 'https://site2.url')
- self.assertEqual(results[1]['thumbnail_src'], 'https://thumb2.nail')
- self.assertEqual(results[1]['img_src'], 'https://image2')
diff --git a/tests/unit/engines/test_duden.py b/tests/unit/engines/test_duden.py
deleted file mode 100644
index 52fc513d0..000000000
--- a/tests/unit/engines/test_duden.py
+++ /dev/null
@@ -1,47 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import duden
-from searx.testing import SearxTestCase
-from datetime import datetime
-
-
-class TestDudenEngine(SearxTestCase):
-
- def test_request(self):
- query = 'Haus'
- dic = defaultdict(dict)
- data = [
- [1, 'https://www.duden.de/suchen/dudenonline/Haus'],
- [2, 'https://www.duden.de/suchen/dudenonline/Haus?search_api_fulltext=&page=1']
- ]
- for page_no, exp_res in data:
- dic['pageno'] = page_no
- params = duden.request(query, dic)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('duden.de' in params['url'])
- self.assertEqual(params['url'], exp_res)
-
- def test_response(self):
- resp = mock.Mock(text='<html></html>')
- self.assertEqual(duden.response(resp), [])
-
- html = """
- <section class="vignette">
- <h2"> <a href="/rechtschreibung/Haus">
- <strong>This is the title also here</strong>
- </a> </h2>
- <p>This is the content</p>
- </section>
- """
- resp = mock.Mock(text=html)
- results = duden.response(resp)
-
- self.assertEqual(len(results), 1)
- self.assertEqual(type(results), list)
-
- # testing result (dictionary entry)
- r = results[0]
- self.assertEqual(r['url'], 'https://www.duden.de/rechtschreibung/Haus')
- self.assertEqual(r['title'], 'This is the title also here')
- self.assertEqual(r['content'], 'This is the content')
diff --git a/tests/unit/engines/test_dummy.py b/tests/unit/engines/test_dummy.py
deleted file mode 100644
index 9399beaaf..000000000
--- a/tests/unit/engines/test_dummy.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from searx.engines import dummy
-from searx.testing import SearxTestCase
-
-
-class TestDummyEngine(SearxTestCase):
-
- def test_request(self):
- test_params = [
- [1, 2, 3],
- ['a'],
- [],
- 1
- ]
- for params in test_params:
- self.assertEqual(dummy.request(None, params), params)
-
- def test_response(self):
- responses = [
- None,
- [],
- True,
- dict(),
- tuple()
- ]
- for response in responses:
- self.assertEqual(dummy.response(response), [])
diff --git a/tests/unit/engines/test_faroo.py b/tests/unit/engines/test_faroo.py
deleted file mode 100644
index 1bd9f51c3..000000000
--- a/tests/unit/engines/test_faroo.py
+++ /dev/null
@@ -1,113 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import faroo
-from searx.testing import SearxTestCase
-
-
-class TestFarooEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- dicto['language'] = 'fr-FR'
- dicto['category'] = 'general'
- params = faroo.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('faroo.com', params['url'])
- self.assertIn('en', params['url'])
- self.assertIn('web', params['url'])
-
- dicto['language'] = 'all'
- params = faroo.request(query, dicto)
- self.assertIn('en', params['url'])
-
- dicto['language'] = 'de-DE'
- params = faroo.request(query, dicto)
- self.assertIn('de', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, faroo.response, None)
- self.assertRaises(AttributeError, faroo.response, [])
- self.assertRaises(AttributeError, faroo.response, '')
- self.assertRaises(AttributeError, faroo.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(faroo.response(response), [])
-
- response = mock.Mock(text='{"data": []}')
- self.assertEqual(faroo.response(response), [])
-
- response = mock.Mock(text='{"data": []}', status_code=429)
- self.assertRaises(Exception, faroo.response, response)
-
- json = """
- {
- "results": [
- {
- "title": "This is the title",
- "kwic": "This is the content",
- "content": "",
- "url": "http://this.is.the.url/",
- "iurl": "",
- "domain": "css3test.com",
- "author": "Jim Dalrymple",
- "news": true,
- "votes": "10",
- "date": 1360622563000,
- "related": []
- },
- {
- "title": "This is the title2",
- "kwic": "This is the content2",
- "content": "",
- "url": "http://this.is.the.url2/",
- "iurl": "",
- "domain": "css3test.com",
- "author": "Jim Dalrymple",
- "news": false,
- "votes": "10",
- "related": []
- },
- {
- "title": "This is the title3",
- "kwic": "This is the content3",
- "content": "",
- "url": "http://this.is.the.url3/",
- "iurl": "http://upload.wikimedia.org/optimized.jpg",
- "domain": "css3test.com",
- "author": "Jim Dalrymple",
- "news": false,
- "votes": "10",
- "related": []
- }
- ],
- "query": "test",
- "suggestions": [],
- "count": 100,
- "start": 1,
- "length": 10,
- "time": "15"
- }
- """
- response = mock.Mock(text=json)
- results = faroo.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 3)
- self.assertEqual(results[0]['title'], 'This is the title')
- self.assertEqual(results[0]['url'], 'http://this.is.the.url/')
- self.assertEqual(results[0]['content'], 'This is the content')
- self.assertEqual(results[1]['title'], 'This is the title2')
- self.assertEqual(results[1]['url'], 'http://this.is.the.url2/')
- self.assertEqual(results[1]['content'], 'This is the content2')
- self.assertEqual(results[2]['thumbnail'], 'http://upload.wikimedia.org/optimized.jpg')
-
- json = """
- {}
- """
- response = mock.Mock(text=json)
- results = faroo.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
diff --git a/tests/unit/engines/test_fdroid.py b/tests/unit/engines/test_fdroid.py
deleted file mode 100644
index 42a0a7148..000000000
--- a/tests/unit/engines/test_fdroid.py
+++ /dev/null
@@ -1,60 +0,0 @@
-import mock
-from collections import defaultdict
-from searx.engines import fdroid
-from searx.testing import SearxTestCase
-
-
-class TestFdroidEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dic = defaultdict(dict)
- dic['pageno'] = 1
- params = fdroid.request(query, dic)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('search.f-droid.org' in params['url'])
-
- def test_response_empty(self):
- resp = mock.Mock(text='<html></html>')
- self.assertEqual(fdroid.response(resp), [])
-
- def test_response_oneresult(self):
- html = """
-<!DOCTYPE html>
-<html>
-<head>
- <title>test</title>
-</head>
-<body>
- <div class="site-wrapper">
- <div class="main-content">
- <a class="package-header" href="https://example.com/app.url">
- <img class="package-icon" src="https://example.com/appexample.logo.png" />
-
- <div class="package-info">
- <h4 class="package-name">
- App Example 1
- </h4>
-
- <div class="package-desc">
- <span class="package-summary">Description App Example 1</span>
- <span class="package-license">GPL-3.0-only</span>
- </div>
- </div>
- </a>
- </div>
- </div>
-</body>
-</html>
- """
-
- resp = mock.Mock(text=html)
- results = fdroid.response(resp)
-
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['url'], 'https://example.com/app.url')
- self.assertEqual(results[0]['title'], 'App Example 1')
- self.assertEqual(results[0]['content'], 'Description App Example 1 - GPL-3.0-only')
- self.assertEqual(results[0]['img_src'], 'https://example.com/appexample.logo.png')
diff --git a/tests/unit/engines/test_flickr.py b/tests/unit/engines/test_flickr.py
deleted file mode 100644
index be97647ce..000000000
--- a/tests/unit/engines/test_flickr.py
+++ /dev/null
@@ -1,142 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import flickr
-from searx.testing import SearxTestCase
-
-
-class TestFlickrEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 0
- params = flickr.request(query, dicto)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('flickr.com' in params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, flickr.response, None)
- self.assertRaises(AttributeError, flickr.response, [])
- self.assertRaises(AttributeError, flickr.response, '')
- self.assertRaises(AttributeError, flickr.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(flickr.response(response), [])
-
- response = mock.Mock(text='{"data": []}')
- self.assertEqual(flickr.response(response), [])
-
- json = r"""
- { "photos": { "page": 1, "pages": "41001", "perpage": 100, "total": "4100032",
- "photo": [
- { "id": "15751017054", "owner": "66847915@N08",
- "secret": "69c22afc40", "server": "7285", "farm": 8,
- "title": "Photo title", "ispublic": 1,
- "isfriend": 0, "isfamily": 0,
- "description": { "_content": "Description" },
- "ownername": "Owner",
- "url_o": "https:\/\/farm8.staticflickr.com\/7285\/15751017054_9178e0f963_o.jpg",
- "height_o": "2100", "width_o": "2653",
- "url_n": "https:\/\/farm8.staticflickr.com\/7285\/15751017054_69c22afc40_n.jpg",
- "height_n": "253", "width_n": "320",
- "url_z": "https:\/\/farm8.staticflickr.com\/7285\/15751017054_69c22afc40_z.jpg",
- "height_z": "507", "width_z": "640" }
- ] }, "stat": "ok" }
- """
- response = mock.Mock(text=json)
- results = flickr.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'Photo title')
- self.assertEqual(results[0]['url'], 'https://www.flickr.com/photos/66847915@N08/15751017054')
- self.assertTrue('o.jpg' in results[0]['img_src'])
- self.assertTrue('n.jpg' in results[0]['thumbnail_src'])
- self.assertTrue('Owner' in results[0]['author'])
- self.assertTrue('Description' in results[0]['content'])
-
- json = r"""
- { "photos": { "page": 1, "pages": "41001", "perpage": 100, "total": "4100032",
- "photo": [
- { "id": "15751017054", "owner": "66847915@N08",
- "secret": "69c22afc40", "server": "7285", "farm": 8,
- "title": "Photo title", "ispublic": 1,
- "isfriend": 0, "isfamily": 0,
- "description": { "_content": "Description" },
- "ownername": "Owner",
- "url_z": "https:\/\/farm8.staticflickr.com\/7285\/15751017054_69c22afc40_z.jpg",
- "height_z": "507", "width_z": "640" }
- ] }, "stat": "ok" }
- """
- response = mock.Mock(text=json)
- results = flickr.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'Photo title')
- self.assertEqual(results[0]['url'], 'https://www.flickr.com/photos/66847915@N08/15751017054')
- self.assertTrue('z.jpg' in results[0]['img_src'])
- self.assertTrue('z.jpg' in results[0]['thumbnail_src'])
- self.assertTrue('Owner' in results[0]['author'])
- self.assertTrue('Description' in results[0]['content'])
-
- json = r"""
- { "photos": { "page": 1, "pages": "41001", "perpage": 100, "total": "4100032",
- "photo": [
- { "id": "15751017054", "owner": "66847915@N08",
- "secret": "69c22afc40", "server": "7285", "farm": 8,
- "title": "Photo title", "ispublic": 1,
- "isfriend": 0, "isfamily": 0,
- "description": { "_content": "Description" },
- "ownername": "Owner",
- "url_o": "https:\/\/farm8.staticflickr.com\/7285\/15751017054_9178e0f963_o.jpg",
- "height_o": "2100", "width_o": "2653" }
- ] }, "stat": "ok" }
- """
- response = mock.Mock(text=json)
- results = flickr.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'Photo title')
- self.assertEqual(results[0]['url'], 'https://www.flickr.com/photos/66847915@N08/15751017054')
- self.assertTrue('o.jpg' in results[0]['img_src'])
- self.assertTrue('o.jpg' in results[0]['thumbnail_src'])
- self.assertTrue('Owner' in results[0]['author'])
- self.assertTrue('Description' in results[0]['content'])
-
- json = r"""
- { "photos": { "page": 1, "pages": "41001", "perpage": 100, "total": "4100032",
- "photo": [
- { "id": "15751017054", "owner": "66847915@N08",
- "secret": "69c22afc40", "server": "7285", "farm": 8,
- "title": "Photo title", "ispublic": 1,
- "isfriend": 0, "isfamily": 0,
- "description": { "_content": "Description" },
- "ownername": "Owner",
- "url_n": "https:\/\/farm8.staticflickr.com\/7285\/15751017054_69c22afc40_n.jpg",
- "height_n": "253", "width_n": "320" }
- ] }, "stat": "ok" }
- """
- response = mock.Mock(text=json)
- results = flickr.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
-
- json = """
- { "photos": { "page": 1, "pages": "41001", "perpage": 100, "total": "4100032",
- "toto": [] }, "stat": "ok" }
- """
- response = mock.Mock(text=json)
- results = flickr.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
-
- json = r"""
- {"toto":[
- {"id":200,"name":"Artist Name",
- "link":"http:\/\/www.flickr.com\/artist\/1217","type":"artist"}
- ]}
- """
- response = mock.Mock(text=json)
- results = flickr.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
diff --git a/tests/unit/engines/test_flickr_noapi.py b/tests/unit/engines/test_flickr_noapi.py
deleted file mode 100644
index 67699f2f0..000000000
--- a/tests/unit/engines/test_flickr_noapi.py
+++ /dev/null
@@ -1,357 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import flickr_noapi
-from searx.testing import SearxTestCase
-
-
-class TestFlickrNoapiEngine(SearxTestCase):
-
- def test_build_flickr_url(self):
- url = flickr_noapi.build_flickr_url("uid", "pid")
- self.assertIn("uid", url)
- self.assertIn("pid", url)
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- dicto['time_range'] = ''
- params = flickr_noapi.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('flickr.com', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, flickr_noapi.response, None)
- self.assertRaises(AttributeError, flickr_noapi.response, [])
- self.assertRaises(AttributeError, flickr_noapi.response, '')
- self.assertRaises(AttributeError, flickr_noapi.response, '[]')
-
- response = mock.Mock(text='"modelExport:{"legend":[],"main":{"search-photos-lite-models":[{"photos":{}}]}}')
- self.assertEqual(flickr_noapi.response(response), [])
-
- response = \
- mock.Mock(text='"modelExport:{"legend":[],"main":{"search-photos-lite-models":[{"photos":{"_data":[]}}]}}')
- self.assertEqual(flickr_noapi.response(response), [])
-
- # everthing is ok test
- json = """
- modelExport: {
- "legend": [
- [
- "search-photos-lite-models",
- "0",
- "photos",
- "_data",
- "0"
- ]
- ],
- "main": {
- "search-photos-lite-models": [
- {
- "photos": {
- "_data": [
- {
- "_flickrModelRegistry": "photo-lite-models",
- "title": "This%20is%20the%20title",
- "username": "Owner",
- "pathAlias": "klink692",
- "realname": "Owner",
- "license": 0,
- "ownerNsid": "59729010@N00",
- "canComment": false,
- "commentCount": 14,
- "faveCount": 21,
- "id": "14001294434",
- "sizes": {
- "c": {
- "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_c.jpg",
- "width": 541,
- "height": 800,
- "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_c.jpg",
- "key": "c"
- },
- "h": {
- "displayUrl": "//farm8.staticflickr.com/7246/14001294434_761d32237a_h.jpg",
- "width": 1081,
- "height": 1600,
- "url": "//c4.staticflickr.com/8/7246/14001294434_761d32237a_h.jpg",
- "key": "h"
- },
- "k": {
- "displayUrl": "//farm8.staticflickr.com/7246/14001294434_f145a2c11a_k.jpg",
- "width": 1383,
- "height": 2048,
- "url": "//c4.staticflickr.com/8/7246/14001294434_f145a2c11a_k.jpg",
- "key": "k"
- },
- "l": {
- "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_b.jpg",
- "width": 692,
- "height": 1024,
- "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_b.jpg",
- "key": "l"
- },
- "m": {
- "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777.jpg",
- "width": 338,
- "height": 500,
- "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777.jpg",
- "key": "m"
- },
- "n": {
- "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_n.jpg",
- "width": 216,
- "height": 320,
- "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_n.jpg",
- "key": "n"
- },
- "q": {
- "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_q.jpg",
- "width": 150,
- "height": 150,
- "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_q.jpg",
- "key": "q"
- },
- "s": {
- "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_m.jpg",
- "width": 162,
- "height": 240,
- "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_m.jpg",
- "key": "s"
- },
- "sq": {
- "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_s.jpg",
- "width": 75,
- "height": 75,
- "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_s.jpg",
- "key": "sq"
- },
- "t": {
- "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_t.jpg",
- "width": 68,
- "height": 100,
- "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_t.jpg",
- "key": "t"
- },
- "z": {
- "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_z.jpg",
- "width": 433,
- "height": 640,
- "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_z.jpg",
- "key": "z"
- }
- }
- }
- ]
- }
- }
- ]
- }
- }
- """
- # Flickr serves search results in a json block named 'modelExport' buried inside a script tag,
- # this json is served as a single line terminating with a comma.
- json = ''.join(json.split()) + ',\n'
- response = mock.Mock(text=json)
- results = flickr_noapi.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'This is the title')
- self.assertEqual(results[0]['url'], 'https://www.flickr.com/photos/59729010@N00/14001294434')
- self.assertIn('k.jpg', results[0]['img_src'])
- self.assertIn('n.jpg', results[0]['thumbnail_src'])
- self.assertIn('Owner', results[0]['author'])
-
- # no n size, only the z size
- json = """
- modelExport: {
- "legend": [
- [
- "search-photos-lite-models",
- "0",
- "photos",
- "_data",
- "0"
- ]
- ],
- "main": {
- "search-photos-lite-models": [
- {
- "photos": {
- "_data": [
- {
- "_flickrModelRegistry": "photo-lite-models",
- "title": "This%20is%20the%20title",
- "username": "Owner",
- "pathAlias": "klink692",
- "realname": "Owner",
- "license": 0,
- "ownerNsid": "59729010@N00",
- "canComment": false,
- "commentCount": 14,
- "faveCount": 21,
- "id": "14001294434",
- "sizes": {
- "z": {
- "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_z.jpg",
- "width": 433,
- "height": 640,
- "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_z.jpg",
- "key": "z"
- }
- }
- }
- ]
- }
- }
- ]
- }
- }
- """
- json = ''.join(json.split()) + ',\n'
- response = mock.Mock(text=json)
- results = flickr_noapi.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'This is the title')
- self.assertEqual(results[0]['url'], 'https://www.flickr.com/photos/59729010@N00/14001294434')
- self.assertIn('z.jpg', results[0]['img_src'])
- self.assertIn('z.jpg', results[0]['thumbnail_src'])
- self.assertIn('Owner', results[0]['author'])
-
- # no z or n size
- json = """
- modelExport: {
- "legend": [
- [
- "search-photos-lite-models",
- "0",
- "photos",
- "_data",
- "0"
- ]
- ],
- "main": {
- "search-photos-lite-models": [
- {
- "photos": {
- "_data": [
- {
- "_flickrModelRegistry": "photo-lite-models",
- "title": "This%20is%20the%20title",
- "username": "Owner",
- "pathAlias": "klink692",
- "realname": "Owner",
- "license": 0,
- "ownerNsid": "59729010@N00",
- "canComment": false,
- "commentCount": 14,
- "faveCount": 21,
- "id": "14001294434",
- "sizes": {
- "o": {
- "displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_o.jpg",
- "width": 433,
- "height": 640,
- "url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_o.jpg",
- "key": "o"
- }
- }
- }
- ]
- }
- }
- ]
- }
- }
- """
- json = ''.join(json.split()) + ',\n'
- response = mock.Mock(text=json)
- results = flickr_noapi.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'This is the title')
- self.assertEqual(results[0]['url'], 'https://www.flickr.com/photos/59729010@N00/14001294434')
- self.assertIn('o.jpg', results[0]['img_src'])
- self.assertIn('o.jpg', results[0]['thumbnail_src'])
- self.assertIn('Owner', results[0]['author'])
-
- # no image test
- json = """
- modelExport: {
- "legend": [
- [
- "search-photos-lite-models",
- "0",
- "photos",
- "_data",
- "0"
- ]
- ],
- "main": {
- "search-photos-lite-models": [
- {
- "photos": {
- "_data": [
- {
- "_flickrModelRegistry": "photo-lite-models",
- "title": "This is the title",
- "username": "Owner",
- "pathAlias": "klink692",
- "realname": "Owner",
- "license": 0,
- "ownerNsid": "59729010@N00",
- "canComment": false,
- "commentCount": 14,
- "faveCount": 21,
- "id": "14001294434",
- "sizes": {
- }
- }
- ]
- }
- }
- ]
- }
- }
- """
- json = ''.join(json.split()) + ',\n'
- response = mock.Mock(text=json)
- results = flickr_noapi.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
-
- # null test
- json = """
- modelExport: {
- "legend": [null],
- "main": {
- "search-photos-lite-models": [
- {
- "photos": {
- "_data": [null]
- }
- }
- ]
- }
- }
- """
- json = ''.join(json.split()) + ',\n'
- response = mock.Mock(text=json)
- results = flickr_noapi.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
-
- # garbage test
- json = r"""
- {"toto":[
- {"id":200,"name":"Artist Name",
- "link":"http:\/\/www.flickr.com\/artist\/1217","type":"artist"}
- ]}
- """
- json = ''.join(json.split()) + ',\n'
- response = mock.Mock(text=json)
- results = flickr_noapi.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
diff --git a/tests/unit/engines/test_framalibre.py b/tests/unit/engines/test_framalibre.py
deleted file mode 100644
index 850996372..000000000
--- a/tests/unit/engines/test_framalibre.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import framalibre
-from searx.testing import SearxTestCase
-
-
-class TestFramalibreEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 0
- params = framalibre.request(query, dicto)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('framalibre.org' in params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, framalibre.response, None)
- self.assertRaises(AttributeError, framalibre.response, [])
- self.assertRaises(AttributeError, framalibre.response, '')
- self.assertRaises(AttributeError, framalibre.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(framalibre.response(response), [])
-
- response = mock.Mock(text='{"data": []}')
- self.assertEqual(framalibre.response(response), [])
-
- html = u"""
- <div class="nodes-list-row">
- <div id="node-431"
- class="node node-logiciel-annuaires node-promoted node-teaser node-teaser node-sheet clearfix nodes-list"
- about="/content/gogs" typeof="sioc:Item foaf:Document">
- <header class="media">
- <div class="media-left">
- <div class="field field-name-field-logo field-type-image field-label-hidden">
- <div class="field-items">
- <div class="field-item even">
- <a href="/content/gogs">
- <img class="media-object img-responsive" typeof="foaf:Image"
- src="https://framalibre.org/sites/default/files/styles/teaser_logo/public/leslogos/gogs-lg.png?itok=rrCxKKBy"
- width="70" height="70" alt="" />
- </a>
- </div>
- </div>
- </div>
- </div>
- <div class="media-body">
- <h3 class="node-title"><a href="/content/gogs">Gogs</a></h3>
- <span property="dc:title" content="Gogs" class="rdf-meta element-hidden"></span>
- <div class="field field-name-field-annuaires field-type-taxonomy-term-reference field-label-hidden">
- <div class="field-items">
- <div class="field-item even">
- <a href="/annuaires/cloudwebapps"
- typeof="skos:Concept" property="rdfs:label skos:prefLabel"
- datatype="" class="label label-primary">Cloud/webApps</a>
- </div>
- </div>
- </div>
- </div>
- </header>
- <div class="content">
- <div class="field field-name-field-votre-appr-ciation field-type-fivestar field-label-hidden">
- <div class="field-items">
- <div class="field-item even">
- </div>
- </div>
- </div>
- <div class="field field-name-body field-type-text-with-summary field-label-hidden">
- <div class="field-items">
- <div class="field-item even" property="content:encoded">
- <p>Gogs est une interface web basée sur git et une bonne alternative à GitHub.</p>
- </div>
- </div>
- </div>
- </div>
- <footer>
- <a href="/content/gogs" class="read-more btn btn-default btn-sm">Voir la notice</a>
- <div class="field field-name-field-lien-officiel field-type-link-field field-label-hidden">
- <div class="field-items">
- <div class="field-item even">
- <a href="https://gogs.io/" target="_blank" title="Voir le site officiel">
- <span class="glyphicon glyphicon-globe"></span>
- <span class="sr-only">Lien officiel</span>
- </a>
- </div>
- </div>
- </div>
- </footer>
- </div>
- </div>
- """
- response = mock.Mock(text=html)
- results = framalibre.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'Gogs')
- self.assertEqual(results[0]['url'],
- 'https://framalibre.org/content/gogs')
- self.assertEqual(results[0]['content'],
- u"Gogs est une interface web basée sur git et une bonne alternative à GitHub.")
diff --git a/tests/unit/engines/test_frinkiac.py b/tests/unit/engines/test_frinkiac.py
deleted file mode 100644
index 5ea220cd3..000000000
--- a/tests/unit/engines/test_frinkiac.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import frinkiac
-from searx.testing import SearxTestCase
-
-
-class TestFrinkiacEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- request_dict = defaultdict(dict)
- params = frinkiac.request(query, request_dict)
- self.assertTrue('url' in params)
-
- def test_response(self):
- self.assertRaises(AttributeError, frinkiac.response, None)
- self.assertRaises(AttributeError, frinkiac.response, [])
- self.assertRaises(AttributeError, frinkiac.response, '')
- self.assertRaises(AttributeError, frinkiac.response, '[]')
-
- text = """
-[{"Id":770931,
- "Episode":"S06E18",
- "Timestamp":534616,
- "Filename":""},
- {"Id":1657080,
- "Episode":"S12E14",
- "Timestamp":910868,
- "Filename":""},
- {"Id":1943753,
- "Episode":"S14E21",
- "Timestamp":773439,
- "Filename":""},
- {"Id":107835,
- "Episode":"S02E03",
- "Timestamp":531709,
- "Filename":""}]
- """
-
- response = mock.Mock(text=text)
- results = frinkiac.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 4)
- self.assertEqual(results[0]['title'], u'S06E18')
- self.assertIn('p=caption', results[0]['url'])
- self.assertIn('e=S06E18', results[0]['url'])
- self.assertIn('t=534616', results[0]['url'])
- self.assertEqual(results[0]['thumbnail_src'], 'https://frinkiac.com/img/S06E18/534616/medium.jpg')
- self.assertEqual(results[0]['img_src'], 'https://frinkiac.com/img/S06E18/534616.jpg')
diff --git a/tests/unit/engines/test_genius.py b/tests/unit/engines/test_genius.py
deleted file mode 100644
index ea721943a..000000000
--- a/tests/unit/engines/test_genius.py
+++ /dev/null
@@ -1,231 +0,0 @@
-from collections import defaultdict
-import mock
-from datetime import datetime
-from searx.engines import genius
-from searx.testing import SearxTestCase
-
-
-class TestGeniusEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- params = genius.request(query, dicto)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('genius.com' in params['url'])
-
- def test_response(self):
-
- json_empty = """
- {
- "meta": {
- "status": 200
- },
- "response": {
- "sections": [
- {
- "type": "top_hit",
- "hits": []
- },
- {
- "type": "song",
- "hits": []
- },
- {
- "type": "lyric",
- "hits": []
- },
- {
- "type": "artist",
- "hits": []
- },
- {
- "type": "album",
- "hits": []
- },
- {
- "type": "tag",
- "hits": []
- },
- {
- "type": "video",
- "hits": []
- },
- {
- "type": "article",
- "hits": []
- },
- {
- "type": "user",
- "hits": []
- }
- ]
- }
- }
- """
-
- resp = mock.Mock(text=json_empty)
- self.assertEqual(genius.response(resp), [])
-
- json = """
- {
- "meta": {
- "status": 200
- },
- "response": {
- "sections": [
- {
- "type": "lyric",
- "hits": [
- {
- "highlights": [
- {
- "property": "lyrics",
- "value": "Sample lyrics",
- "snippet": true,
- "ranges": []
- }
- ],
- "index": "lyric",
- "type": "song",
- "result": {
- "_type": "song",
- "annotation_count": 45,
- "api_path": "/songs/52916",
- "full_title": "J't'emmerde by MC Jean Gab'1",
- "header_image_thumbnail_url": "https://images.genius.com/xxx.300x300x1.jpg",
- "header_image_url": "https://images.genius.com/ef9f736a86df3c3b1772f3fb7fbdb21c.1000x1000x1.jpg",
- "id": 52916,
- "instrumental": false,
- "lyrics_owner_id": 15586,
- "lyrics_state": "complete",
- "lyrics_updated_at": 1498744545,
- "path": "/Mc-jean-gab1-jtemmerde-lyrics",
- "pyongs_count": 4,
- "song_art_image_thumbnail_url": "https://images.genius.com/xxx.300x300x1.jpg",
- "stats": {
- "hot": false,
- "unreviewed_annotations": 0,
- "pageviews": 62490
- },
- "title": "J't'emmerde",
- "title_with_featured": "J't'emmerde",
- "updated_by_human_at": 1498744546,
- "url": "https://genius.com/Mc-jean-gab1-jtemmerde-lyrics",
- "primary_artist": {
- "_type": "artist",
- "api_path": "/artists/12691",
- "header_image_url": "https://images.genius.com/c7847662a58f8c2b0f02a6e217d60907.960x657x1.jpg",
- "id": 12691,
- "image_url": "https://s3.amazonaws.com/rapgenius/Mc-jean-gab1.jpg",
- "index_character": "m",
- "is_meme_verified": false,
- "is_verified": false,
- "name": "MC Jean Gab'1",
- "slug": "Mc-jean-gab1",
- "url": "https://genius.com/artists/Mc-jean-gab1"
- }
- }
- }
- ]
- },
- {
- "type": "artist",
- "hits": [
- {
- "highlights": [],
- "index": "artist",
- "type": "artist",
- "result": {
- "_type": "artist",
- "api_path": "/artists/191580",
- "header_image_url": "https://assets.genius.com/images/default_avatar_300.png?1503090542",
- "id": 191580,
- "image_url": "https://assets.genius.com/images/default_avatar_300.png?1503090542",
- "index_character": "a",
- "is_meme_verified": false,
- "is_verified": false,
- "name": "ASDF Guy",
- "slug": "Asdf-guy",
- "url": "https://genius.com/artists/Asdf-guy"
- }
- }
- ]
- },
- {
- "type": "album",
- "hits": [
- {
- "highlights": [],
- "index": "album",
- "type": "album",
- "result": {
- "_type": "album",
- "api_path": "/albums/132332",
- "cover_art_thumbnail_url": "https://images.genius.com/xxx.300x300x1.jpg",
- "cover_art_url": "https://images.genius.com/xxx.600x600x1.jpg",
- "full_title": "ASD by A Skylit Drive",
- "id": 132332,
- "name": "ASD",
- "name_with_artist": "ASD (artist: A Skylit Drive)",
- "release_date_components": {
- "year": 2015,
- "month": null,
- "day": null
- },
- "url": "https://genius.com/albums/A-skylit-drive/Asd",
- "artist": {
- "_type": "artist",
- "api_path": "/artists/48712",
- "header_image_url": "https://images.genius.com/814c1551293172c56306d0e310c6aa89.620x400x1.jpg",
- "id": 48712,
- "image_url": "https://images.genius.com/814c1551293172c56306d0e310c6aa89.620x400x1.jpg",
- "index_character": "s",
- "is_meme_verified": false,
- "is_verified": false,
- "name": "A Skylit Drive",
- "slug": "A-skylit-drive",
- "url": "https://genius.com/artists/A-skylit-drive"
- }
- }
- }
- ]
- }
- ]
- }
- }
- """
-
- resp = mock.Mock(text=json)
- results = genius.response(resp)
-
- self.assertEqual(len(results), 3)
- self.assertEqual(type(results), list)
-
- # check lyric parsing
- r = results[0]
- self.assertEqual(r['url'], 'https://genius.com/Mc-jean-gab1-jtemmerde-lyrics')
- self.assertEqual(r['title'], "J't'emmerde by MC Jean Gab'1")
- self.assertEqual(r['content'], "Sample lyrics")
- self.assertEqual(r['template'], 'videos.html')
- self.assertEqual(r['thumbnail'], 'https://images.genius.com/xxx.300x300x1.jpg')
- created = datetime.fromtimestamp(1498744545)
- self.assertEqual(r['publishedDate'], created)
-
- # check artist parsing
- r = results[1]
- self.assertEqual(r['url'], 'https://genius.com/artists/Asdf-guy')
- self.assertEqual(r['title'], "ASDF Guy")
- self.assertEqual(r['content'], None)
- self.assertEqual(r['template'], 'videos.html')
- self.assertEqual(r['thumbnail'], 'https://assets.genius.com/images/default_avatar_300.png?1503090542')
-
- # check album parsing
- r = results[2]
- self.assertEqual(r['url'], 'https://genius.com/albums/A-skylit-drive/Asd')
- self.assertEqual(r['title'], "ASD by A Skylit Drive")
- self.assertEqual(r['content'], "Released: 2015")
- self.assertEqual(r['template'], 'videos.html')
- self.assertEqual(r['thumbnail'], 'https://images.genius.com/xxx.600x600x1.jpg')
diff --git a/tests/unit/engines/test_gigablast.py b/tests/unit/engines/test_gigablast.py
deleted file mode 100644
index 6b2d26458..000000000
--- a/tests/unit/engines/test_gigablast.py
+++ /dev/null
@@ -1,119 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import gigablast
-from searx.testing import SearxTestCase
-
-
-class TestGigablastEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 0
- dicto['safesearch'] = 0
- dicto['language'] = 'all'
- params = gigablast.request(query, dicto)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('gigablast.com' in params['url'])
- self.assertTrue('xx' in params['url'])
-
- dicto['language'] = 'en-US'
- params = gigablast.request(query, dicto)
- self.assertTrue('en' in params['url'])
- self.assertFalse('en-US' in params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, gigablast.response, None)
- self.assertRaises(AttributeError, gigablast.response, [])
- self.assertRaises(AttributeError, gigablast.response, '')
- self.assertRaises(AttributeError, gigablast.response, '[]')
-
- response = mock.Mock(text='{"results": []}')
- self.assertEqual(gigablast.response(response), [])
-
- json = """{"results": [
- {
- "title":"South by Southwest 2016",
- "dmozEntry":{
- "dmozCatId":1041152,
- "directCatId":1,
- "dmozCatStr":"Top: Regional: North America: United States",
- "dmozTitle":"South by Southwest (SXSW)",
- "dmozSum":"Annual music, film, and interactive conference.",
- "dmozAnchor":""
- },
- "dmozEntry":{
- "dmozCatId":763945,
- "directCatId":1,
- "dmozCatStr":"Top: Regional: North America: United States",
- "dmozTitle":"South by Southwest (SXSW)",
- "dmozSum":"",
- "dmozAnchor":"www.sxsw.com"
- },
- "dmozEntry":{
- "dmozCatId":761446,
- "directCatId":1,
- "dmozCatStr":"Top: Regional: North America: United States",
- "dmozTitle":"South by Southwest (SXSW)",
- "dmozSum":"Music, film, and interactive conference and festival.",
- "dmozAnchor":""
- },
- "indirectDmozCatId":1041152,
- "indirectDmozCatId":763945,
- "indirectDmozCatId":761446,
- "contentType":"html",
- "sum":"This should be the content.",
- "url":"www.sxsw.com",
- "hopCount":0,
- "size":" 102k",
- "sizeInBytes":104306,
- "bytesUsedToComputeSummary":70000,
- "docId":269411794364,
- "docScore":586571136.000000,
- "summaryGenTimeMS":12,
- "summaryTagdbLookupTimeMS":0,
- "summaryTitleRecLoadTimeMS":1,
- "site":"www.sxsw.com",
- "spidered":1452203608,
- "firstIndexedDateUTC":1444167123,
- "contentHash32":2170650347,
- "language":"English",
- "langAbbr":"en"
- }
-]}
- """
- response = mock.Mock(text=json)
- results = gigablast.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'South by Southwest 2016')
- self.assertEqual(results[0]['url'], 'www.sxsw.com')
- self.assertEqual(results[0]['content'], 'This should be the content.')
-
- def test_fetch_supported_languages(self):
- html = """<html></html>"""
- response = mock.Mock(text=html)
- results = gigablast._fetch_supported_languages(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
-
- html = """
- <html>
- <body>
- <span id="menu2">
- <a href="/search?&rxikd=1&qlang=xx"></a>
- <a href="/search?&rxikd=1&qlang=en"></a>
- <a href="/search?&rxikd=1&prepend=gblang%3Aen"></a>
- <a href="/search?&rxikd=1&qlang=zh_"></a>
- <a href="/search?&rxikd=1&prepend=gblang%3Azh_tw"></a>
- </span>
- </body>
- </html>
- """
- response = mock.Mock(text=html)
- languages = gigablast._fetch_supported_languages(response)
- self.assertEqual(type(languages), list)
- self.assertEqual(len(languages), 2)
- self.assertIn('en', languages)
- self.assertIn('zh-TW', languages)
diff --git a/tests/unit/engines/test_github.py b/tests/unit/engines/test_github.py
deleted file mode 100644
index 460be8c3d..000000000
--- a/tests/unit/engines/test_github.py
+++ /dev/null
@@ -1,61 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import github
-from searx.testing import SearxTestCase
-
-
-class TestGitHubEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- params = github.request(query, defaultdict(dict))
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('github.com' in params['url'])
- self.assertEqual(params['headers']['Accept'], github.accept_header)
-
- def test_response(self):
- self.assertRaises(AttributeError, github.response, None)
- self.assertRaises(AttributeError, github.response, [])
- self.assertRaises(AttributeError, github.response, '')
- self.assertRaises(AttributeError, github.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(github.response(response), [])
-
- response = mock.Mock(text='{"items": []}')
- self.assertEqual(github.response(response), [])
-
- json = """
- {
- "items": [
- {
- "name": "title",
- "html_url": "url",
- "description": ""
- }
- ]
- }
- """
- response = mock.Mock(text=json)
- results = github.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'title')
- self.assertEqual(results[0]['url'], 'url')
- self.assertEqual(results[0]['content'], '')
-
- json = """
- {
- "items": [
- {
- "name": "title",
- "html_url": "url",
- "description": "desc"
- }
- ]
- }
- """
- response = mock.Mock(text=json)
- results = github.response(response)
- self.assertEqual(results[0]['content'], "desc")
diff --git a/tests/unit/engines/test_google.py b/tests/unit/engines/test_google.py
deleted file mode 100644
index a73e9d2be..000000000
--- a/tests/unit/engines/test_google.py
+++ /dev/null
@@ -1,237 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-import lxml
-from searx.engines import google
-from searx.testing import SearxTestCase
-
-
-class TestGoogleEngine(SearxTestCase):
-
- def mock_response(self, text):
- response = mock.Mock(text=text, url='https://www.google.com/search?q=test&start=0&gbv=1&gws_rd=cr')
- response.search_params = mock.Mock()
- response.search_params.get = mock.Mock(return_value='www.google.com')
- return response
-
- def test_request(self):
- google.supported_languages = ['en', 'fr', 'zh-CN', 'iw']
- google.language_aliases = {'he': 'iw'}
-
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- dicto['language'] = 'fr-FR'
- dicto['time_range'] = ''
- params = google.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('google.fr', params['url'])
- self.assertIn('fr', params['url'])
- self.assertIn('fr', params['headers']['Accept-Language'])
-
- dicto['language'] = 'en-US'
- params = google.request(query, dicto)
- self.assertIn('google.com', params['url'])
- self.assertIn('en', params['url'])
- self.assertIn('en', params['headers']['Accept-Language'])
-
- dicto['language'] = 'zh'
- params = google.request(query, dicto)
- self.assertIn('google.com', params['url'])
- self.assertIn('zh-CN', params['url'])
- self.assertIn('zh-CN', params['headers']['Accept-Language'])
-
- dicto['language'] = 'he'
- params = google.request(query, dicto)
- self.assertIn('google.com', params['url'])
- self.assertIn('iw', params['url'])
- self.assertIn('iw', params['headers']['Accept-Language'])
-
- def test_response(self):
- self.assertRaises(AttributeError, google.response, None)
- self.assertRaises(AttributeError, google.response, [])
- self.assertRaises(AttributeError, google.response, '')
- self.assertRaises(AttributeError, google.response, '[]')
-
- response = self.mock_response('<html></html>')
- self.assertEqual(google.response(response), [])
-
- html = """
- <div class="g">
- <h3 class="r">
- <a href="http://this.should.be.the.link/">
- <b>This</b> is <b>the</b> title
- </a>
- </h3>
- <div class="s">
- <div class="kv" style="margin-bottom:2px">
- <cite>
- <b>test</b>.psychologies.com/
- </cite>
- <div class="_nBb">‎
- <div style="display:inline" onclick="google.sham(this);" aria-expanded="false"
- aria-haspopup="true" tabindex="0" data-ved="0CBUQ7B0wAA">
- <span class="_O0">
- </span>
- </div>
- <div style="display:none" class="am-dropdown-menu" role="menu" tabindex="-1">
- <ul>
- <li class="_Ykb">
- <a class="_Zkb" href="http://www.google.fr/url?url=http://webcache.googleusercontent
- .com/search%3Fcache:R1Z_4pGXjuIJ:http://test.psychologies.com/">
- En cache
- </a>
- </li>
- <li class="_Ykb">
- <a class="_Zkb" href="/search?safe=off&amp;q=related:test.psy.com/">
- Pages similaires
- </a>
- </li>
- </ul>
- </div>
- </div>
- </div>
- <span class="st">
- This should be the content.
- </span>
- <br>
- <div class="osl">‎
- <a href="http://www.google.fr/url?url=http://test.psychologies.com/tests/">
- Test Personnalité
- </a> - ‎
- <a href="http://www.google.fr/url?url=http://test.psychologies.com/test/">
- Tests - Moi
- </a> - ‎
- <a href="http://www.google.fr/url?url=http://test.psychologies.com/test/tests-couple">
- Test Couple
- </a>
- - ‎
- <a href="http://www.google.fr/url?url=http://test.psychologies.com/tests/tests-amour">
- Test Amour
- </a>
- </div>
- </div>
- </div>
- <div class="g">
- <h3 class="r">
- <a href="http://www.google.com/images?q=toto">
- <b>This</b>
- </a>
- </h3>
- </div>
- <div class="g">
- <h3 class="r">
- <a href="http://www.google.com/search?q=toto">
- <b>This</b> is
- </a>
- </h3>
- </div>
- <div class="g">
- <h3 class="r">
- <a href="€">
- <b>This</b> is <b>the</b>
- </a>
- </h3>
- </div>
- <div class="g">
- <h3 class="r">
- <a href="/url?q=url">
- <b>This</b> is <b>the</b>
- </a>
- </h3>
- </div>
- <p class="_Bmc" style="margin:3px 8px">
- <a href="/search?num=20&amp;safe=off&amp;q=t&amp;revid=1754833769&amp;sa=X&amp;ei=-&amp;ved=">
- suggestion <b>title</b>
- </a>
- </p>
- """
- response = self.mock_response(html)
- results = google.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
- self.assertEqual(results[0]['title'], 'This is the title')
- self.assertEqual(results[0]['url'], 'http://this.should.be.the.link/')
- self.assertEqual(results[0]['content'], 'This should be the content.')
- self.assertEqual(results[1]['suggestion'], 'suggestion title')
-
- html = """
- <li class="b_algo" u="0|5109|4755453613245655|UAGjXgIrPH5yh-o5oNHRx_3Zta87f_QO">
- </li>
- """
- response = self.mock_response(html)
- results = google.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
-
- response = mock.Mock(text='<html></html>', url='https://sorry.google.com')
- response.search_params = mock.Mock()
- response.search_params.get = mock.Mock(return_value='www.google.com')
- self.assertRaises(RuntimeWarning, google.response, response)
-
- response = mock.Mock(text='<html></html>', url='https://www.google.com/sorry/IndexRedirect')
- response.search_params = mock.Mock()
- response.search_params.get = mock.Mock(return_value='www.google.com')
- self.assertRaises(RuntimeWarning, google.response, response)
-
- def test_parse_images(self):
- html = """
- <li>
- <div>
- <a href="http://www.google.com/url?q=http://this.is.the.url/">
- <img style="margin:3px 0;margin-right:6px;padding:0" height="90"
- src="https://this.is.the.image/image.jpg" width="60" align="middle" alt="" border="0">
- </a>
- </div>
- </li>
- """
- dom = lxml.html.fromstring(html)
- results = google.parse_images(dom, 'www.google.com')
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['url'], 'http://this.is.the.url/')
- self.assertEqual(results[0]['title'], '')
- self.assertEqual(results[0]['content'], '')
- self.assertEqual(results[0]['img_src'], 'https://this.is.the.image/image.jpg')
-
- def test_fetch_supported_languages(self):
- html = """<html></html>"""
- response = mock.Mock(text=html)
- languages = google._fetch_supported_languages(response)
- self.assertEqual(type(languages), dict)
- self.assertEqual(len(languages), 0)
-
- html = u"""
- <html>
- <body>
- <div id="langSec">
- <div>
- <input name="lr" data-name="english" value="lang_en" />
- <input name="lr" data-name="中文 (简体)" value="lang_zh-CN" />
- <input name="lr" data-name="中文 (繁體)" value="lang_zh-TW" />
- </div>
- </div>
- </body>
- </html>
- """
- response = mock.Mock(text=html)
- languages = google._fetch_supported_languages(response)
- self.assertEqual(type(languages), dict)
- self.assertEqual(len(languages), 3)
-
- self.assertIn('en', languages)
- self.assertIn('zh-CN', languages)
- self.assertIn('zh-TW', languages)
-
- self.assertEquals(type(languages['en']), dict)
- self.assertEquals(type(languages['zh-CN']), dict)
- self.assertEquals(type(languages['zh-TW']), dict)
-
- self.assertIn('name', languages['en'])
- self.assertIn('name', languages['zh-CN'])
- self.assertIn('name', languages['zh-TW'])
-
- self.assertEquals(languages['en']['name'], 'English')
- self.assertEquals(languages['zh-CN']['name'], u'中文 (简体)')
- self.assertEquals(languages['zh-TW']['name'], u'中文 (繁體)')
diff --git a/tests/unit/engines/test_google_images.py b/tests/unit/engines/test_google_images.py
deleted file mode 100644
index 8366e1b08..000000000
--- a/tests/unit/engines/test_google_images.py
+++ /dev/null
@@ -1,27 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import google_images
-from searx.testing import SearxTestCase
-
-
-class TestGoogleImagesEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- dicto['safesearch'] = 1
- dicto['time_range'] = ''
- params = google_images.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
-
- dicto['safesearch'] = 0
- params = google_images.request(query, dicto)
- self.assertNotIn('safe', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, google_images.response, None)
- self.assertRaises(AttributeError, google_images.response, [])
- self.assertRaises(AttributeError, google_images.response, '')
- self.assertRaises(AttributeError, google_images.response, '[]')
diff --git a/tests/unit/engines/test_google_news.py b/tests/unit/engines/test_google_news.py
deleted file mode 100644
index 0a122ca6d..000000000
--- a/tests/unit/engines/test_google_news.py
+++ /dev/null
@@ -1,102 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from collections import defaultdict
-import mock
-from searx.engines import google_news
-from searx.testing import SearxTestCase
-
-
-class TestGoogleNewsEngine(SearxTestCase):
-
- def test_request(self):
- google_news.supported_languages = ['en-US', 'fr-FR']
- google_news.language_aliases = {}
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- dicto['language'] = 'fr-FR'
- dicto['time_range'] = 'w'
- params = google_news.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('fr', params['url'])
-
- dicto['language'] = 'all'
- params = google_news.request(query, dicto)
- self.assertIn('url', params)
- self.assertNotIn('fr', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, google_news.response, None)
- self.assertRaises(AttributeError, google_news.response, [])
- self.assertRaises(AttributeError, google_news.response, '')
- self.assertRaises(AttributeError, google_news.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(google_news.response(response), [])
-
- response = mock.Mock(text='{"data": []}')
- self.assertEqual(google_news.response(response), [])
-
- html = u"""
-<h2 class="hd">Search Results</h2>
-<div data-async-context="query:searx" id="ires">
- <div eid="oC2oWcGXCafR6ASkwoCwDA" id="rso">
- <div class="_NId">
- <!--m-->
- <div class="g _cy">
- <div class="ts _JGs _JHs _tJs _KGs _jHs">
- <div class="_hJs">
- <h3 class="r _gJs">
- <a class="l lLrAF" href="https://example.com/" onmousedown="return rwt(this,'','','','11','AFQjCNEyehpzD5cJK1KUfXBx9RmsbqqG9g','','0ahUKEwjB58OR54HWAhWnKJoKHSQhAMY4ChCpAggiKAAwAA','','',event)">Example title</a>
- </h3>
- <div class="slp">
- <span class="_OHs _PHs">
- Mac &amp; i</span>
- <span class="_QGs">
- -</span>
- <span class="f nsa _QHs">
- Mar 21, 2016</span>
- </div>
- <div class="st">Example description</div>
- </div>
- </div>
- </div>
- <div class="g _cy">
- <div class="ts _JGs _JHs _oGs _KGs _jHs">
- <a class="top _xGs _SHs" href="https://example2.com/" onmousedown="return rwt(this,'','','','12','AFQjCNHObfH7sYmLWI1SC-YhWXKZFRzRjw','','0ahUKEwjB58OR54HWAhWnKJoKHSQhAMY4ChC8iAEIJDAB','','',event)">
- <img class="th _RGs" src="https://example2.com/image.jpg" alt="Story image for searx from Golem.de" onload="typeof google==='object'&&google.aft&&google.aft(this)">
- </a>
- <div class="_hJs">
- <h3 class="r _gJs">
- <a class="l lLrAF" href="https://example2.com/" onmousedown="return rwt(this,'','','','12','AFQjCNHObfH7sYmLWI1SC-YhWXKZFRzRjw','','0ahUKEwjB58OR54HWAhWnKJoKHSQhAMY4ChCpAgglKAAwAQ','','',event)">Example title 2</a>
- </h3>
- <div class="slp">
- <span class="_OHs _PHs">
- Golem.de</span>
- <span class="_QGs">
- -</span>
- <span class="f nsa _QHs">
- Oct 4, 2016</span>
- </div>
- <div class="st">Example description 2</div>
- </div>
- </div>
- </div>
- </div>
- </div>
-</div>
-
-
- """ # noqa
- response = mock.Mock(text=html)
- results = google_news.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
- self.assertEqual(results[0]['title'], u'Example title')
- self.assertEqual(results[0]['url'], 'https://example.com/')
- self.assertEqual(results[0]['content'], 'Example description')
- self.assertEqual(results[1]['title'], u'Example title 2')
- self.assertEqual(results[1]['url'], 'https://example2.com/')
- self.assertEqual(results[1]['content'], 'Example description 2')
- self.assertEqual(results[1]['img_src'], 'https://example2.com/image.jpg')
diff --git a/tests/unit/engines/test_google_videos.py b/tests/unit/engines/test_google_videos.py
deleted file mode 100644
index 3b7edf373..000000000
--- a/tests/unit/engines/test_google_videos.py
+++ /dev/null
@@ -1,79 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import google_videos
-from searx.testing import SearxTestCase
-
-
-class TestGoogleVideosEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- dicto['safesearch'] = 1
- dicto['time_range'] = ''
- params = google_videos.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
-
- dicto['safesearch'] = 0
- params = google_videos.request(query, dicto)
- self.assertNotIn('safe', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, google_videos.response, None)
- self.assertRaises(AttributeError, google_videos.response, [])
- self.assertRaises(AttributeError, google_videos.response, '')
- self.assertRaises(AttributeError, google_videos.response, '[]')
-
- html = r"""
- <div>
- <div>
- <div class="g">
- <div class="r">
- <a href="url_1"><h3>Title 1</h3></a>
- </div>
- <div class="s">
- <div>
- <a>
- <g-img>
- <img id="vidthumb1">
- </g-img>
- </a>
- </div>
- </div>
- <div>
- <span class="st">Content 1</span>
- </div>
- </div>
- <div class="g">
- <div class="r">
- <a href="url_2"><h3>Title 2</h3></a>
- </div>
- <div class="s">
- <div>
- <a>
- <g-img>
- <img id="vidthumb2">
- </g-img>
- </a>
- </div>
- </div>
- <div>
- <span class="st">Content 2</span>
- </div>
- </div>
- </div>
- </div>
- <script>function _setImagesSrc(c,d,e){}</script>
- """
- response = mock.Mock(text=html)
- results = google_videos.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
- self.assertEqual(results[0]['url'], u'url_1')
- self.assertEqual(results[0]['title'], u'Title 1')
- self.assertEqual(results[0]['content'], u'Content 1')
- self.assertEqual(results[1]['url'], u'url_2')
- self.assertEqual(results[1]['title'], u'Title 2')
- self.assertEqual(results[1]['content'], u'Content 2')
diff --git a/tests/unit/engines/test_ina.py b/tests/unit/engines/test_ina.py
deleted file mode 100644
index 109a9592d..000000000
--- a/tests/unit/engines/test_ina.py
+++ /dev/null
@@ -1,64 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import ina
-from searx.testing import SearxTestCase
-
-
-class TestInaEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 0
- params = ina.request(query, dicto)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('ina.fr' in params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, ina.response, None)
- self.assertRaises(AttributeError, ina.response, [])
- self.assertRaises(AttributeError, ina.response, '')
- self.assertRaises(AttributeError, ina.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(ina.response(response), [])
-
- response = mock.Mock(text='{"data": []}')
- self.assertEqual(ina.response(response), [])
-
- json = """
- {"content":"\\t<div class=\\"container\\">\\n\\t\\n\
- <!-- DEBUT CONTENU PRINCIPAL -->\\n<div class=\\"row\\">\\n\
- <div class=\\"search-results--list\\"><div class=\\"media\\">\\n\
- \\t\\t\\t\\t<a class=\\"media-left media-video premium xiti_click_action\\" \
- data-xiti-params=\\"recherche_v4::resultats_conference_de_presse_du_general_de_gaulle::N\\" \
- href=\\"\\/video\\/CAF89035682\\/conference-de-presse-du-general-de-gaulle-video.html\\">\\n\
- <img src=\\"https:\\/\\/www.ina.fr\\/images_v2\\/140x105\\/CAF89035682.jpeg\\" \
- alt=\\"Conf\\u00e9rence de presse du G\\u00e9n\\u00e9ral de Gaulle \\">\\n\
- \\t\\t\\t\\t\\t<\\/a>\\n\
- \\t\\t\\t\\t\\t<div class=\\"media-body\\">\\n\\t\\t\\t\\t\\t\\t<h3 class=\\"h3--title media-heading\\">\\n\
- \\t\\t\\t\\t\\t\\t\\t<a class=\\"xiti_click_action\\" \
- data-xiti-params=\\"recherche_v4::resultats_conference_de_presse_du_general_de_gaulle::N\\" \
- href=\\"\\/video\\/CAF89035682\\/conference-de-presse-du-general-de-gaulle-video.html\\">\
- Conf\\u00e9rence de presse du G\\u00e9n\\u00e9ral de Gaulle <\\/a>\\n\
- <\\/h3>\\n\
- <div class=\\"media-body__info\\">\\n<span class=\\"broadcast\\">27\\/11\\/1967<\\/span>\\n\
- <span class=\\"views\\">29321 vues<\\/span>\\n\
- <span class=\\"duration\\">01h 33m 07s<\\/span>\\n\
- <\\/div>\\n\
- <p class=\\"media-body__summary\\">VERSION INTEGRALE DE LA CONFERENCE DE PRESSE DU GENERAL DE GAULLE . \
- - PA le Pr\\u00e9sident DE GAULLE : il ouvre les bras et s'assied. DP journalis...<\\/p>\\n\
- <\\/div>\\n<\\/div><!-- \\/.media -->\\n"
- }
- """
- response = mock.Mock(text=json)
- results = ina.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], u'Conf\xe9rence de presse du G\xe9n\xe9ral de Gaulle')
- self.assertEqual(results[0]['url'],
- 'https://www.ina.fr/video/CAF89035682/conference-de-presse-du-general-de-gaulle-video.html')
- self.assertEqual(results[0]['content'],
- u"VERSION INTEGRALE DE LA CONFERENCE DE PRESSE DU GENERAL DE GAULLE ."
- u" - PA le Pr\u00e9sident DE GAULLE : il ouvre les bras et s'assied. DP journalis...")
diff --git a/tests/unit/engines/test_kickass.py b/tests/unit/engines/test_kickass.py
deleted file mode 100644
index 3a75c6697..000000000
--- a/tests/unit/engines/test_kickass.py
+++ /dev/null
@@ -1,397 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import kickass
-from searx.testing import SearxTestCase
-
-
-class TestKickassEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- params = kickass.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('kickass.cd', params['url'])
- self.assertFalse(params['verify'])
-
- def test_response(self):
- self.assertRaises(AttributeError, kickass.response, None)
- self.assertRaises(AttributeError, kickass.response, [])
- self.assertRaises(AttributeError, kickass.response, '')
- self.assertRaises(AttributeError, kickass.response, '[]')
-
- response = mock.Mock(text='<html></html>')
- self.assertEqual(kickass.response(response), [])
-
- html = """
- <table cellpadding="0" cellspacing="0" class="data" style="width: 100%">
- <tr class="firstr">
- <th class="width100perc nopad">torrent name</th>
- <th class="center">
- <a href="/search/test/?field=size&sorder=desc" rel="nofollow">size</a>
- </th>
- <th class="center"><span class="files">
- <a href="/search/test/?field=files_count&sorder=desc" rel="nofollow">files</a></span>
- </th>
- <th class="center"><span>
- <a href="/search/test/?field=time_add&sorder=desc" rel="nofollow">age</a></span>
- </th>
- <th class="center"><span class="seed">
- <a href="/search/test/?field=seeders&sorder=desc" rel="nofollow">seed</a></span>
- </th>
- <th class="lasttd nobr center">
- <a href="/search/test/?field=leechers&sorder=desc" rel="nofollow">leech</a>
- </th>
- </tr>
- <tr class="even" id="torrent_test6478745">
- <td>
- <div class="iaconbox center floatright">
- <a rel="6478745,0" class="icommentjs icon16" href="/test-t6478745.html#comment">
- <em style="font-size: 12px; margin: 0 4px 0 4px;" class="iconvalue">3</em>
- <i class="ka ka-comment"></i>
- </a>
- <a class="iverify icon16" href="/test-t6478745.html" title="Verified Torrent">
- <i class="ka ka16 ka-verify ka-green"></i>
- </a>
- <a href="#" onclick="_scq.push([]); return false;" class="partner1Button idownload icon16">
- <i class="ka ka16 ka-arrow-down partner1Button"></i>
- </a>
- <a title="Torrent magnet link"
- href="magnet:?xt=urn:btih:MAGNETURL&dn=test" class="imagnet icon16">
- <i class="ka ka16 ka-magnet"></i>
- </a>
- <a title="Download torrent file"
- href="http://torcache.net/torrent/53917.torrent?title=test" class="idownload icon16">
- <i class="ka ka16 ka-arrow-down"></i>
- </a>
- </div>
- <div class="torrentname">
- <a href="/test-t6478745.html" class="torType txtType"></a>
- <a href="/test-t6478745.html" class="normalgrey font12px plain bold"></a>
- <div class="markeredBlock torType txtType">
- <a href="/url.html" class="cellMainLink">
- <strong class="red">This should be the title</strong>
- </a>
- <span class="font11px lightgrey block">
- Posted by <i class="ka ka-verify" style="font-size: 16px;color:orange;"></i>
- <a class="plain" href="/user/riri/">riri</a> in
- <span id="cat_6478745">
- <strong><a href="/other/">Other</a> > <a href="/unsorted/">Unsorted</a></strong>
- </span>
- </span>
- </div>
- </td>
- <td class="nobr center">449 bytes</td>
- <td class="center">4</td>
- <td class="center">2&nbsp;years</td>
- <td class="green center">10</td>
- <td class="red lasttd center">1</td>
- </tr>
- </table>
- """
- response = mock.Mock(text=html)
- results = kickass.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'This should be the title')
- self.assertEqual(results[0]['url'], 'https://kickass.cd/url.html')
- self.assertEqual(results[0]['content'], 'Posted by riri in Other > Unsorted')
- self.assertEqual(results[0]['seed'], 10)
- self.assertEqual(results[0]['leech'], 1)
- self.assertEqual(results[0]['filesize'], 449)
- self.assertEqual(results[0]['files'], 4)
- self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:MAGNETURL&dn=test')
- self.assertEqual(results[0]['torrentfile'], 'http://torcache.net/torrent/53917.torrent?title=test')
-
- html = """
- <table cellpadding="0" cellspacing="0" class="data" style="width: 100%">
- <tr class="firstr">
- <th class="width100perc nopad">torrent name</th>
- <th class="center">
- <a href="/search/test/?field=size&sorder=desc" rel="nofollow">size</a>
- </th>
- <th class="center"><span class="files">
- <a href="/search/test/?field=files_count&sorder=desc" rel="nofollow">files</a></span>
- </th>
- <th class="center"><span>
- <a href="/search/test/?field=time_add&sorder=desc" rel="nofollow">age</a></span>
- </th>
- <th class="center"><span class="seed">
- <a href="/search/test/?field=seeders&sorder=desc" rel="nofollow">seed</a></span>
- </th>
- <th class="lasttd nobr center">
- <a href="/search/test/?field=leechers&sorder=desc" rel="nofollow">leech</a>
- </th>
- </tr>
- </table>
- """
- response = mock.Mock(text=html)
- results = kickass.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
-
- html = """
- <table cellpadding="0" cellspacing="0" class="data" style="width: 100%">
- <tr class="firstr">
- <th class="width100perc nopad">torrent name</th>
- <th class="center">
- <a href="/search/test/?field=size&sorder=desc" rel="nofollow">size</a>
- </th>
- <th class="center"><span class="files">
- <a href="/search/test/?field=files_count&sorder=desc" rel="nofollow">files</a></span>
- </th>
- <th class="center"><span>
- <a href="/search/test/?field=time_add&sorder=desc" rel="nofollow">age</a></span>
- </th>
- <th class="center"><span class="seed">
- <a href="/search/test/?field=seeders&sorder=desc" rel="nofollow">seed</a></span>
- </th>
- <th class="lasttd nobr center">
- <a href="/search/test/?field=leechers&sorder=desc" rel="nofollow">leech</a>
- </th>
- </tr>
- <tr class="even" id="torrent_test6478745">
- <td>
- <div class="iaconbox center floatright">
- <a rel="6478745,0" class="icommentjs icon16" href="/test-t6478745.html#comment">
- <em style="font-size: 12px; margin: 0 4px 0 4px;" class="iconvalue">3</em>
- <i class="ka ka-comment"></i>
- </a>
- <a class="iverify icon16" href="/test-t6478745.html" title="Verified Torrent">
- <i class="ka ka16 ka-verify ka-green"></i>
- </a>
- <a href="#" onclick="_scq.push([]); return false;" class="partner1Button idownload icon16">
- <i class="ka ka16 ka-arrow-down partner1Button"></i>
- </a>
- <a title="Torrent magnet link"
- href="magnet:?xt=urn:btih:MAGNETURL&dn=test" class="imagnet icon16">
- <i class="ka ka16 ka-magnet"></i>
- </a>
- <a title="Download torrent file"
- href="http://torcache.net/torrent/53917.torrent?title=test" class="idownload icon16">
- <i class="ka ka16 ka-arrow-down"></i>
- </a>
- </div>
- <div class="torrentname">
- <a href="/test-t6478745.html" class="torType txtType"></a>
- <a href="/test-t6478745.html" class="normalgrey font12px plain bold"></a>
- <div class="markeredBlock torType txtType">
- <a href="/url.html" class="cellMainLink">
- <strong class="red">This should be the title</strong>
- </a>
- <span class="font11px lightgrey block">
- Posted by <i class="ka ka-verify" style="font-size: 16px;color:orange;"></i>
- <a class="plain" href="/user/riri/">riri</a> in
- <span id="cat_6478745">
- <strong><a href="/other/">Other</a> > <a href="/unsorted/">Unsorted</a></strong>
- </span>
- </span>
- </div>
- </td>
- <td class="nobr center">1 KiB</td>
- <td class="center">4</td>
- <td class="center">2&nbsp;years</td>
- <td class="green center">10</td>
- <td class="red lasttd center">1</td>
- </tr>
- <tr class="even" id="torrent_test6478745">
- <td>
- <div class="iaconbox center floatright">
- <a rel="6478745,0" class="icommentjs icon16" href="/test-t6478745.html#comment">
- <em style="font-size: 12px; margin: 0 4px 0 4px;" class="iconvalue">3</em>
- <i class="ka ka-comment"></i>
- </a>
- <a class="iverify icon16" href="/test-t6478745.html" title="Verified Torrent">
- <i class="ka ka16 ka-verify ka-green"></i>
- </a>
- <a href="#" onclick="_scq.push([]); return false;" class="partner1Button idownload icon16">
- <i class="ka ka16 ka-arrow-down partner1Button"></i>
- </a>
- <a title="Torrent magnet link"
- href="magnet:?xt=urn:btih:MAGNETURL&dn=test" class="imagnet icon16">
- <i class="ka ka16 ka-magnet"></i>
- </a>
- <a title="Download torrent file"
- href="http://torcache.net/torrent/53917.torrent?title=test" class="idownload icon16">
- <i class="ka ka16 ka-arrow-down"></i>
- </a>
- </div>
- <div class="torrentname">
- <a href="/test-t6478745.html" class="torType txtType"></a>
- <a href="/test-t6478745.html" class="normalgrey font12px plain bold"></a>
- <div class="markeredBlock torType txtType">
- <a href="/url.html" class="cellMainLink">
- <strong class="red">This should be the title</strong>
- </a>
- <span class="font11px lightgrey block">
- Posted by <i class="ka ka-verify" style="font-size: 16px;color:orange;"></i>
- <a class="plain" href="/user/riri/">riri</a> in
- <span id="cat_6478745">
- <strong><a href="/other/">Other</a> > <a href="/unsorted/">Unsorted</a></strong>
- </span>
- </span>
- </div>
- </td>
- <td class="nobr center">1 MiB</td>
- <td class="center">4</td>
- <td class="center">2&nbsp;years</td>
- <td class="green center">9</td>
- <td class="red lasttd center">1</td>
- </tr>
- <tr class="even" id="torrent_test6478745">
- <td>
- <div class="iaconbox center floatright">
- <a rel="6478745,0" class="icommentjs icon16" href="/test-t6478745.html#comment">
- <em style="font-size: 12px; margin: 0 4px 0 4px;" class="iconvalue">3</em>
- <i class="ka ka-comment"></i>
- </a>
- <a class="iverify icon16" href="/test-t6478745.html" title="Verified Torrent">
- <i class="ka ka16 ka-verify ka-green"></i>
- </a>
- <a href="#" onclick="_scq.push([]); return false;" class="partner1Button idownload icon16">
- <i class="ka ka16 ka-arrow-down partner1Button"></i>
- </a>
- <a title="Torrent magnet link"
- href="magnet:?xt=urn:btih:MAGNETURL&dn=test" class="imagnet icon16">
- <i class="ka ka16 ka-magnet"></i>
- </a>
- <a title="Download torrent file"
- href="http://torcache.net/torrent/53917.torrent?title=test" class="idownload icon16">
- <i class="ka ka16 ka-arrow-down"></i>
- </a>
- </div>
- <div class="torrentname">
- <a href="/test-t6478745.html" class="torType txtType"></a>
- <a href="/test-t6478745.html" class="normalgrey font12px plain bold"></a>
- <div class="markeredBlock torType txtType">
- <a href="/url.html" class="cellMainLink">
- <strong class="red">This should be the title</strong>
- </a>
- <span class="font11px lightgrey block">
- Posted by <i class="ka ka-verify" style="font-size: 16px;color:orange;"></i>
- <a class="plain" href="/user/riri/">riri</a> in
- <span id="cat_6478745">
- <strong><a href="/other/">Other</a> > <a href="/unsorted/">Unsorted</a></strong>
- </span>
- </span>
- </div>
- </td>
- <td class="nobr center">1 GiB</td>
- <td class="center">4</td>
- <td class="center">2&nbsp;years</td>
- <td class="green center">8</td>
- <td class="red lasttd center">1</td>
- </tr>
- <tr class="even" id="torrent_test6478745">
- <td>
- <div class="iaconbox center floatright">
- <a rel="6478745,0" class="icommentjs icon16" href="/test-t6478745.html#comment">
- <em style="font-size: 12px; margin: 0 4px 0 4px;" class="iconvalue">3</em>
- <i class="ka ka-comment"></i>
- </a>
- <a class="iverify icon16" href="/test-t6478745.html" title="Verified Torrent">
- <i class="ka ka16 ka-verify ka-green"></i>
- </a>
- <a href="#" onclick="_scq.push([]); return false;" class="partner1Button idownload icon16">
- <i class="ka ka16 ka-arrow-down partner1Button"></i>
- </a>
- <a title="Torrent magnet link"
- href="magnet:?xt=urn:btih:MAGNETURL&dn=test" class="imagnet icon16">
- <i class="ka ka16 ka-magnet"></i>
- </a>
- <a title="Download torrent file"
- href="http://torcache.net/torrent/53917.torrent?title=test" class="idownload icon16">
- <i class="ka ka16 ka-arrow-down"></i>
- </a>
- </div>
- <div class="torrentname">
- <a href="/test-t6478745.html" class="torType txtType"></a>
- <a href="/test-t6478745.html" class="normalgrey font12px plain bold"></a>
- <div class="markeredBlock torType txtType">
- <a href="/url.html" class="cellMainLink">
- <strong class="red">This should be the title</strong>
- </a>
- <span class="font11px lightgrey block">
- Posted by <i class="ka ka-verify" style="font-size: 16px;color:orange;"></i>
- <a class="plain" href="/user/riri/">riri</a> in
- <span id="cat_6478745">
- <strong><a href="/other/">Other</a> > <a href="/unsorted/">Unsorted</a></strong>
- </span>
- </span>
- </div>
- </td>
- <td class="nobr center">1 TiB</td>
- <td class="center">4</td>
- <td class="center">2&nbsp;years</td>
- <td class="green center">7</td>
- <td class="red lasttd center">1</td>
- </tr>
- <tr class="even" id="torrent_test6478745">
- <td>
- <div class="iaconbox center floatright">
- <a rel="6478745,0" class="icommentjs icon16" href="/test-t6478745.html#comment">
- <em style="font-size: 12px; margin: 0 4px 0 4px;" class="iconvalue">3</em>
- <i class="ka ka-comment"></i>
- </a>
- <a class="iverify icon16" href="/test-t6478745.html" title="Verified Torrent">
- <i class="ka ka16 ka-verify ka-green"></i>
- </a>
- <a href="#" onclick="_scq.push([]); return false;" class="partner1Button idownload icon16">
- <i class="ka ka16 ka-arrow-down partner1Button"></i>
- </a>
- <a title="Torrent magnet link"
- href="magnet:?xt=urn:btih:MAGNETURL&dn=test" class="imagnet icon16">
- <i class="ka ka16 ka-magnet"></i>
- </a>
- <a title="Download torrent file"
- href="http://torcache.net/torrent/53917.torrent?title=test" class="idownload icon16">
- <i class="ka ka16 ka-arrow-down"></i>
- </a>
- </div>
- <div class="torrentname">
- <a href="/test-t6478745.html" class="torType txtType"></a>
- <a href="/test-t6478745.html" class="normalgrey font12px plain bold"></a>
- <div class="markeredBlock torType txtType">
- <a href="/url.html" class="cellMainLink">
- <strong class="red">This should be the title</strong>
- </a>
- <span class="font11px lightgrey block">
- Posted by <i class="ka ka-verify" style="font-size: 16px;color:orange;"></i>
- <a class="plain" href="/user/riri/">riri</a> in
- <span id="cat_6478745">
- <strong><a href="/other/">Other</a> > <a href="/unsorted/">Unsorted</a></strong>
- </span>
- </span>
- </div>
- </td>
- <td class="nobr center">z bytes</td>
- <td class="center">r</td>
- <td class="center">2&nbsp;years</td>
- <td class="green center">a</td>
- <td class="red lasttd center">t</td>
- </tr>
- </table>
- """
- response = mock.Mock(text=html)
- results = kickass.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 5)
- self.assertEqual(results[0]['title'], 'This should be the title')
- self.assertEqual(results[0]['url'], 'https://kickass.cd/url.html')
- self.assertEqual(results[0]['content'], 'Posted by riri in Other > Unsorted')
- self.assertEqual(results[0]['seed'], 10)
- self.assertEqual(results[0]['leech'], 1)
- self.assertEqual(results[0]['files'], 4)
- self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:MAGNETURL&dn=test')
- self.assertEqual(results[0]['torrentfile'], 'http://torcache.net/torrent/53917.torrent?title=test')
- self.assertEqual(results[0]['filesize'], 1000)
- self.assertEqual(results[1]['filesize'], 1000000)
- self.assertEqual(results[2]['filesize'], 1000000000)
- self.assertEqual(results[3]['filesize'], 1000000000000)
- self.assertEqual(results[4]['seed'], 0)
- self.assertEqual(results[4]['leech'], 0)
- self.assertEqual(results[4]['files'], None)
- self.assertEqual(results[4]['filesize'], None)
diff --git a/tests/unit/engines/test_mediawiki.py b/tests/unit/engines/test_mediawiki.py
deleted file mode 100644
index b86372700..000000000
--- a/tests/unit/engines/test_mediawiki.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import mediawiki
-from searx.testing import SearxTestCase
-
-
-class TestMediawikiEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- dicto['language'] = 'fr_FR'
- params = mediawiki.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('wikipedia.org', params['url'])
- self.assertIn('fr', params['url'])
-
- dicto['language'] = 'all'
- params = mediawiki.request(query, dicto)
- self.assertIn('en', params['url'])
-
- mediawiki.base_url = "http://test.url/"
- mediawiki.search_url = mediawiki.base_url +\
- 'w/api.php?action=query'\
- '&list=search'\
- '&{query}'\
- '&srprop=timestamp'\
- '&format=json'\
- '&sroffset={offset}'\
- '&srlimit={limit}' # noqa
- params = mediawiki.request(query, dicto)
- self.assertIn('test.url', params['url'])
-
- def test_response(self):
- dicto = defaultdict(dict)
- dicto['language'] = 'fr'
- mediawiki.base_url = "https://{language}.wikipedia.org/"
-
- self.assertRaises(AttributeError, mediawiki.response, None)
- self.assertRaises(AttributeError, mediawiki.response, [])
- self.assertRaises(AttributeError, mediawiki.response, '')
- self.assertRaises(AttributeError, mediawiki.response, '[]')
-
- response = mock.Mock(text='{}', search_params=dicto)
- self.assertEqual(mediawiki.response(response), [])
-
- response = mock.Mock(text='{"data": []}', search_params=dicto)
- self.assertEqual(mediawiki.response(response), [])
-
- json = """
- {
- "query-continue": {
- "search": {
- "sroffset": 1
- }
- },
- "query": {
- "searchinfo": {
- "totalhits": 29721
- },
- "search": [
- {
- "ns": 0,
- "title": "This is the title étude",
- "timestamp": "2014-12-19T17:42:52Z"
- }
- ]
- }
- }
- """
- response = mock.Mock(text=json, search_params=dicto)
- results = mediawiki.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], u'This is the title étude')
- self.assertIn('fr.wikipedia.org', results[0]['url'])
- self.assertIn('This_is_the_title', results[0]['url'])
- self.assertIn('%C3%A9tude', results[0]['url'])
- self.assertEqual(results[0]['content'], '')
-
- json = """
- {
- "query-continue": {
- "search": {
- "sroffset": 1
- }
- },
- "query": {
- "searchinfo": {
- "totalhits": 29721
- },
- "search": [
- ]
- }
- }
- """
- response = mock.Mock(text=json, search_params=dicto)
- results = mediawiki.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
-
- json = """
- {
- "query-continue": {
- "search": {
- "sroffset": 1
- }
- },
- "query": {
- }
- }
- """
- response = mock.Mock(text=json, search_params=dicto)
- results = mediawiki.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
-
- json = r"""
- {"toto":[
- {"id":200,"name":"Artist Name",
- "link":"http:\/\/www.mediawiki.com\/artist\/1217","type":"artist"}
- ]}
- """
- response = mock.Mock(text=json, search_params=dicto)
- results = mediawiki.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
diff --git a/tests/unit/engines/test_mixcloud.py b/tests/unit/engines/test_mixcloud.py
deleted file mode 100644
index 9c79a478e..000000000
--- a/tests/unit/engines/test_mixcloud.py
+++ /dev/null
@@ -1,67 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import mixcloud
-from searx.testing import SearxTestCase
-
-
-class TestMixcloudEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 0
- params = mixcloud.request(query, dicto)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('mixcloud.com' in params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, mixcloud.response, None)
- self.assertRaises(AttributeError, mixcloud.response, [])
- self.assertRaises(AttributeError, mixcloud.response, '')
- self.assertRaises(AttributeError, mixcloud.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(mixcloud.response(response), [])
-
- response = mock.Mock(text='{"data": []}')
- self.assertEqual(mixcloud.response(response), [])
-
- json = """
- {"data":[
- {
- "user": {
- "url": "http://www.mixcloud.com/user/",
- "username": "user",
- "name": "User",
- "key": "/user/"
- },
- "key": "/user/this-is-the-url/",
- "created_time": "2014-11-14T13:30:02Z",
- "audio_length": 3728,
- "slug": "this-is-the-url",
- "name": "Title of track",
- "url": "http://www.mixcloud.com/user/this-is-the-url/",
- "updated_time": "2014-11-14T13:14:10Z"
- }
- ]}
- """
- response = mock.Mock(text=json)
- results = mixcloud.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'Title of track')
- self.assertEqual(results[0]['url'], 'http://www.mixcloud.com/user/this-is-the-url/')
- self.assertEqual(results[0]['content'], 'User')
- self.assertTrue('http://www.mixcloud.com/user/this-is-the-url/' in results[0]['embedded'])
-
- json = r"""
- {"toto":[
- {"id":200,"name":"Artist Name",
- "link":"http:\/\/www.mixcloud.com\/artist\/1217","type":"artist"}
- ]}
- """
- response = mock.Mock(text=json)
- results = mixcloud.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
diff --git a/tests/unit/engines/test_nyaa.py b/tests/unit/engines/test_nyaa.py
deleted file mode 100644
index 6dcafc6b7..000000000
--- a/tests/unit/engines/test_nyaa.py
+++ /dev/null
@@ -1,124 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import nyaa
-from searx.testing import SearxTestCase
-
-
-class TestNyaaEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dic = defaultdict(dict)
- dic['pageno'] = 1
- params = nyaa.request(query, dic)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('nyaa.si' in params['url'])
-
- def test_response(self):
- resp = mock.Mock(text='<html></html>')
- self.assertEqual(nyaa.response(resp), [])
-
- html = """
- <table class="table table-bordered table-hover table-striped torrent-list">
- <thead>
- <tr>
- <th class="hdr-category text-center" style="width:80px;">
- <div>Category</div>
- </th>
- <th class="hdr-name" style="width:auto;">
- <div>Name</div>
- </th>
- <th class="hdr-comments sorting text-center" title="Comments" style="width:50px;">
- <a href="/?f=0&amp;c=0_0&amp;q=Death+Parade&amp;s=comments&amp;o=desc"></a>
- <i class="fa fa-comments-o"></i>
- </th>
- <th class="hdr-link text-center" style="width:70px;">
- <div>Link</div>
- </th>
- <th class="hdr-size sorting text-center" style="width:100px;">
- <a href="/?f=0&amp;c=0_0&amp;q=Death+Parade&amp;s=size&amp;o=desc"></a>
- <div>Size</div>
- </th>
- <th class="hdr-date sorting_desc text-center" title="In local time" style="width:140px;">
- <a href="/?f=0&amp;c=0_0&amp;q=Death+Parade&amp;s=id&amp;o=asc"></a>
- <div>Date</div>
- </th>
- <th class="hdr-seeders sorting text-center" title="Seeders" style="width:50px;">
- <a href="/?f=0&amp;c=0_0&amp;q=Death+Parade&amp;s=seeders&amp;o=desc"></a>
- <i class="fa fa-arrow-up" aria-hidden="true"></i>
- </th>
- <th class="hdr-leechers sorting text-center" title="Leechers" style="width:50px;">
- <a href="/?f=0&amp;c=0_0&amp;q=Death+Parade&amp;s=leechers&amp;o=desc"></a>
- <i class="fa fa-arrow-down" aria-hidden="true"></i>
- </th>
- <th class="hdr-downloads sorting text-center" title="Completed downloads" style="width:50px;">
- <a href="/?f=0&amp;c=0_0&amp;q=Death+Parade&amp;s=downloads&amp;o=desc"></a>
- <i class="fa fa-check" aria-hidden="true"></i>
- </th>
- </tr>
- </thead>
- <tbody>
- <tr class="default">
- <td style="padding:0 4px;">
- <a href="/?c=1_2" title="Anime - English-translated">
- <img src="/static/img/icons/nyaa/1_2.png" alt="Anime - English-translated">
- </a>
- </td>
- <td colspan="2">
- <a href="/view/1" title="Sample title 1">Sample title 1</a>
- </td>
- <td class="text-center" style="white-space: nowrap;">
- <a href="/download/1.torrent"><i class="fa fa-fw fa-download"></i></a>
- <a href="magnet:?xt=urn:btih:2"><i class="fa fa-fw fa-magnet"></i></a>
- </td>
- <td class="text-center">723.7 MiB</td>
- <td class="text-center" data-timestamp="1503307456" title="1 week 3
- days 9 hours 44 minutes 39 seconds ago">2017-08-21 11:24</td>
- <td class="text-center" style="color: green;">1</td>
- <td class="text-center" style="color: red;">3</td>
- <td class="text-center">12</td>
- </tr>
- <tr class="default">
- <td style="padding:0 4px;">
- <a href="/?c=1_2" title="Anime - English-translated">
- <img src="/static/img/icons/nyaa/1_2.png" alt="Anime - English-translated">
- </a>
- </td>
- <td colspan="2">
- <a href="/view/2" title="Sample title 2">Sample title 2</a>
- </td>
- <td class="text-center" style="white-space: nowrap;">
- <a href="magnet:?xt=urn:btih:2"><i class="fa fa-fw fa-magnet"></i></a>
- </td>
- <td class="text-center">8.2 GiB</td>
- <td class="text-center" data-timestamp="1491608400" title="4 months 3
- weeks 4 days 19 hours 28 minutes 55 seconds ago">2017-04-08 01:40</td>
- <td class="text-center" style="color: green;">10</td>
- <td class="text-center" style="color: red;">1</td>
- <td class="text-center">206</td>
- </tr>
- </tbody>
- </table>
- """
-
- resp = mock.Mock(text=html)
- results = nyaa.response(resp)
-
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
-
- r = results[0]
- self.assertTrue(r['url'].find('1') >= 0)
- self.assertTrue(r['torrentfile'].find('1.torrent') >= 0)
- self.assertTrue(r['content'].find('Anime - English-translated') >= 0)
- self.assertTrue(r['content'].find('Downloaded 12 times.') >= 0)
-
- self.assertEqual(r['title'], 'Sample title 1')
- self.assertEqual(r['seed'], 1)
- self.assertEqual(r['leech'], 3)
- self.assertEqual(r['filesize'], 723700000)
-
- r = results[1]
- self.assertTrue(r['url'].find('2') >= 0)
- self.assertTrue(r['magnetlink'].find('magnet:') >= 0)
diff --git a/tests/unit/engines/test_openstreetmap.py b/tests/unit/engines/test_openstreetmap.py
deleted file mode 100644
index 7b7783f04..000000000
--- a/tests/unit/engines/test_openstreetmap.py
+++ /dev/null
@@ -1,199 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import openstreetmap
-from searx.testing import SearxTestCase
-
-
-class TestOpenstreetmapEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- params = openstreetmap.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('openstreetmap.org', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, openstreetmap.response, None)
- self.assertRaises(AttributeError, openstreetmap.response, [])
- self.assertRaises(AttributeError, openstreetmap.response, '')
- self.assertRaises(AttributeError, openstreetmap.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(openstreetmap.response(response), [])
-
- response = mock.Mock(text='{"data": []}')
- self.assertEqual(openstreetmap.response(response), [])
-
- json = """
- [
- {
- "place_id": "127732055",
- "licence": "Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright",
- "osm_type": "relation",
- "osm_id": "7444",
- "boundingbox": [
- "48.8155755",
- "48.902156",
- "2.224122",
- "2.4697602"
- ],
- "lat": "48.8565056",
- "lon": "2.3521334",
- "display_name": "This is the title",
- "class": "place",
- "type": "city",
- "importance": 0.96893459932191,
- "icon": "https://nominatim.openstreetmap.org/images/mapicons/poi_place_city.p.20.png",
- "address": {
- "city": "Paris",
- "county": "Paris",
- "state": "Île-de-France",
- "country": "France",
- "country_code": "fr"
- },
- "geojson": {
- "type": "Polygon",
- "coordinates": [
- [
- [
- 2.224122,
- 48.854199
- ]
- ]
- ]
- }
- }
- ]
- """
- response = mock.Mock(text=json)
- results = openstreetmap.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'This is the title')
- self.assertEqual(results[0]['url'], 'https://openstreetmap.org/relation/7444')
- self.assertIn('coordinates', results[0]['geojson'])
- self.assertEqual(results[0]['geojson']['coordinates'][0][0][0], 2.224122)
- self.assertEqual(results[0]['geojson']['coordinates'][0][0][1], 48.854199)
- self.assertEqual(results[0]['address'], None)
- self.assertIn('48.8155755', results[0]['boundingbox'])
- self.assertIn('48.902156', results[0]['boundingbox'])
- self.assertIn('2.224122', results[0]['boundingbox'])
- self.assertIn('2.4697602', results[0]['boundingbox'])
-
- json = """
- [
- {
- "place_id": "127732055",
- "licence": "Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright",
- "osm_type": "relation",
- "osm_id": "7444",
- "boundingbox": [
- "48.8155755",
- "48.902156",
- "2.224122",
- "2.4697602"
- ],
- "lat": "48.8565056",
- "lon": "2.3521334",
- "display_name": "This is the title",
- "class": "tourism",
- "type": "city",
- "importance": 0.96893459932191,
- "icon": "https://nominatim.openstreetmap.org/images/mapicons/poi_place_city.p.20.png",
- "address": {
- "city": "Paris",
- "county": "Paris",
- "state": "Île-de-France",
- "country": "France",
- "country_code": "fr",
- "address29": "Address"
- },
- "geojson": {
- "type": "Polygon",
- "coordinates": [
- [
- [
- 2.224122,
- 48.854199
- ]
- ]
- ]
- }
- },
- {
- "place_id": "127732055",
- "licence": "Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright",
- "osm_type": "relation",
- "osm_id": "7444",
- "boundingbox": [
- "48.8155755",
- "48.902156",
- "2.224122",
- "2.4697602"
- ],
- "lat": "48.8565056",
- "lon": "2.3521334",
- "display_name": "This is the title",
- "class": "tourism",
- "type": "city",
- "importance": 0.96893459932191,
- "icon": "https://nominatim.openstreetmap.org/images/mapicons/poi_place_city.p.20.png",
- "address": {
- "city": "Paris",
- "county": "Paris",
- "state": "Île-de-France",
- "country": "France",
- "postcode": 75000,
- "country_code": "fr"
- },
- "geojson": {
- "type": "Polygon",
- "coordinates": [
- [
- [
- 2.224122,
- 48.854199
- ]
- ]
- ]
- }
- },
- {
- "place_id": "127732055",
- "licence": "Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright",
- "osm_type": "node",
- "osm_id": "7444",
- "boundingbox": [
- "48.8155755",
- "48.902156",
- "2.224122",
- "2.4697602"
- ],
- "lat": "48.8565056",
- "lon": "2.3521334",
- "display_name": "This is the title",
- "class": "tourism",
- "type": "city",
- "importance": 0.96893459932191,
- "icon": "https://nominatim.openstreetmap.org/images/mapicons/poi_place_city.p.20.png",
- "address": {
- "city": "Paris",
- "county": "Paris",
- "state": "Île-de-France",
- "country": "France",
- "country_code": "fr",
- "address29": "Address"
- }
- }
- ]
- """
- response = mock.Mock(text=json)
- results = openstreetmap.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 3)
- self.assertIn('48.8565056', results[2]['geojson']['coordinates'])
- self.assertIn('2.3521334', results[2]['geojson']['coordinates'])
diff --git a/tests/unit/engines/test_pdbe.py b/tests/unit/engines/test_pdbe.py
deleted file mode 100644
index ea5adf9dc..000000000
--- a/tests/unit/engines/test_pdbe.py
+++ /dev/null
@@ -1,109 +0,0 @@
-import mock
-from collections import defaultdict
-from searx.engines import pdbe
-from searx.testing import SearxTestCase
-
-
-class TestPdbeEngine(SearxTestCase):
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- params = pdbe.request(query, dicto)
- self.assertTrue('url' in params)
- self.assertTrue('ebi.ac.uk' in params['url'])
- self.assertTrue('data' in params)
- self.assertTrue('q' in params['data'])
- self.assertTrue(query in params['data']['q'])
- self.assertTrue('wt' in params['data'])
- self.assertTrue('json' in params['data']['wt'])
- self.assertTrue('method' in params)
- self.assertTrue(params['method'] == 'POST')
-
- def test_response(self):
- self.assertRaises(AttributeError, pdbe.response, None)
- self.assertRaises(AttributeError, pdbe.response, [])
- self.assertRaises(AttributeError, pdbe.response, '')
- self.assertRaises(AttributeError, pdbe.response, '[]')
-
- json = """
-{
- "response": {
- "docs": [
- {
- "citation_title": "X-ray crystal structure of ferric Aplysia limacina myoglobin in different liganded states.",
- "citation_year": 1993,
- "entry_author_list": [
- "Conti E, Moser C, Rizzi M, Mattevi A, Lionetti C, Coda A, Ascenzi P, Brunori M, Bolognesi M"
- ],
- "journal": "J. Mol. Biol.",
- "journal_page": "498-508",
- "journal_volume": "233",
- "pdb_id": "2fal",
- "status": "REL",
- "title": "X-RAY CRYSTAL STRUCTURE OF FERRIC APLYSIA LIMACINA MYOGLOBIN IN DIFFERENT LIGANDED STATES"
- }
- ],
- "numFound": 1,
- "start": 0
- },
- "responseHeader": {
- "QTime": 0,
- "params": {
- "q": "2fal",
- "wt": "json"
- },
- "status": 0
- }
-}
-"""
-
- response = mock.Mock(text=json)
- results = pdbe.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'],
- 'X-RAY CRYSTAL STRUCTURE OF FERRIC APLYSIA LIMACINA MYOGLOBIN IN DIFFERENT LIGANDED STATES')
- self.assertEqual(results[0]['url'], pdbe.pdbe_entry_url.format(pdb_id='2fal'))
- self.assertEqual(results[0]['img_src'], pdbe.pdbe_preview_url.format(pdb_id='2fal'))
- self.assertTrue('Conti E' in results[0]['content'])
- self.assertTrue('X-ray crystal structure of ferric Aplysia limacina myoglobin in different liganded states.' in
- results[0]['content'])
- self.assertTrue('1993' in results[0]['content'])
-
- # Testing proper handling of PDB entries marked as obsolete
- json = """
-{
- "response": {
- "docs": [
- {
- "citation_title": "Obsolete entry test",
- "citation_year": 2016,
- "entry_author_list": ["Doe J"],
- "journal": "J. Obs.",
- "journal_page": "1-2",
- "journal_volume": "1",
- "pdb_id": "xxxx",
- "status": "OBS",
- "title": "OBSOLETE ENTRY TEST",
- "superseded_by": "yyyy"
- }
- ],
- "numFound": 1,
- "start": 0
- },
- "responseHeader": {
- "QTime": 0,
- "params": {
- "q": "xxxx",
- "wt": "json"
- },
- "status": 0
- }
-}
-"""
- response = mock.Mock(text=json)
- results = pdbe.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'OBSOLETE ENTRY TEST (OBSOLETE)')
- self.assertTrue(results[0]['content'].startswith('This entry has been superseded by'))
diff --git a/tests/unit/engines/test_photon.py b/tests/unit/engines/test_photon.py
deleted file mode 100644
index 734497884..000000000
--- a/tests/unit/engines/test_photon.py
+++ /dev/null
@@ -1,166 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import photon
-from searx.testing import SearxTestCase
-
-
-class TestPhotonEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- dicto['language'] = 'all'
- params = photon.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('photon.komoot.de', params['url'])
-
- dicto['language'] = 'all'
- params = photon.request(query, dicto)
- self.assertNotIn('lang', params['url'])
-
- dicto['language'] = 'al'
- params = photon.request(query, dicto)
- self.assertNotIn('lang', params['url'])
-
- dicto['language'] = 'fr'
- params = photon.request(query, dicto)
- self.assertIn('fr', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, photon.response, None)
- self.assertRaises(AttributeError, photon.response, [])
- self.assertRaises(AttributeError, photon.response, '')
- self.assertRaises(AttributeError, photon.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(photon.response(response), [])
-
- response = mock.Mock(text='{"data": []}')
- self.assertEqual(photon.response(response), [])
-
- json = """
- {
- "features": [
- {
- "properties": {
- "osm_key": "waterway",
- "extent": [
- -1.4508446,
- 51.1614997,
- -1.4408036,
- 51.1525635
- ],
- "name": "This is the title",
- "state": "England",
- "osm_id": 114823817,
- "osm_type": "W",
- "osm_value": "river",
- "city": "Test Valley",
- "country": "United Kingdom"
- },
- "type": "Feature",
- "geometry": {
- "type": "Point",
- "coordinates": [
- -1.4458571,
- 51.1576661
- ]
- }
- },
- {
- "properties": {
- "osm_key": "place",
- "street": "Rue",
- "state": "Ile-de-France",
- "osm_id": 129211377,
- "osm_type": "R",
- "housenumber": "10",
- "postcode": "75011",
- "osm_value": "house",
- "city": "Paris",
- "country": "France"
- },
- "type": "Feature",
- "geometry": {
- "type": "Point",
- "coordinates": [
- 2.3725025,
- 48.8654481
- ]
- }
- },
- {
- "properties": {
- "osm_key": "amenity",
- "street": "Allée",
- "name": "Bibliothèque",
- "state": "Ile-de-France",
- "osm_id": 1028573132,
- "osm_type": "N",
- "postcode": "75001",
- "osm_value": "library",
- "city": "Paris",
- "country": "France"
- },
- "type": "Feature",
- "geometry": {
- "type": "Point",
- "coordinates": [
- 2.3445634,
- 48.862494
- ]
- }
- },
- {
- "properties": {
- "osm_key": "amenity",
- "osm_id": 1028573132,
- "osm_type": "Y",
- "postcode": "75001",
- "osm_value": "library",
- "city": "Paris",
- "country": "France"
- },
- "type": "Feature",
- "geometry": {
- "type": "Point",
- "coordinates": [
- 2.3445634,
- 48.862494
- ]
- }
- },
- {
- }
- ],
- "type": "FeatureCollection"
- }
- """
- response = mock.Mock(text=json)
- results = photon.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 3)
- self.assertEqual(results[0]['title'], 'This is the title')
- self.assertEqual(results[0]['content'], '')
- self.assertEqual(results[0]['longitude'], -1.4458571)
- self.assertEqual(results[0]['latitude'], 51.1576661)
- self.assertIn(-1.4508446, results[0]['boundingbox'])
- self.assertIn(51.1614997, results[0]['boundingbox'])
- self.assertIn(-1.4408036, results[0]['boundingbox'])
- self.assertIn(51.1525635, results[0]['boundingbox'])
- self.assertIn('type', results[0]['geojson'])
- self.assertEqual(results[0]['geojson']['type'], 'Point')
- self.assertEqual(results[0]['address'], None)
- self.assertEqual(results[0]['osm']['type'], 'way')
- self.assertEqual(results[0]['osm']['id'], 114823817)
- self.assertEqual(results[0]['url'], 'https://openstreetmap.org/way/114823817')
- self.assertEqual(results[1]['osm']['type'], 'relation')
- self.assertEqual(results[2]['address']['name'], u'Bibliothèque')
- self.assertEqual(results[2]['address']['house_number'], None)
- self.assertEqual(results[2]['address']['locality'], 'Paris')
- self.assertEqual(results[2]['address']['postcode'], '75001')
- self.assertEqual(results[2]['address']['country'], 'France')
- self.assertEqual(results[2]['osm']['type'], 'node')
diff --git a/tests/unit/engines/test_piratebay.py b/tests/unit/engines/test_piratebay.py
deleted file mode 100644
index 89a78e796..000000000
--- a/tests/unit/engines/test_piratebay.py
+++ /dev/null
@@ -1,166 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import piratebay
-from searx.testing import SearxTestCase
-
-
-class TestPiratebayEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- dicto['category'] = 'Toto'
- params = piratebay.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('piratebay.org', params['url'])
- self.assertIn('0', params['url'])
-
- dicto['category'] = 'music'
- params = piratebay.request(query, dicto)
- self.assertIn('100', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, piratebay.response, None)
- self.assertRaises(AttributeError, piratebay.response, [])
- self.assertRaises(AttributeError, piratebay.response, '')
- self.assertRaises(AttributeError, piratebay.response, '[]')
-
- response = mock.Mock(text='<html></html>')
- self.assertEqual(piratebay.response(response), [])
-
- html = """
- <table id="searchResult">
- <tr>
- </tr>
- <tr>
- <td class="vertTh">
- <center>
- <a href="#" title="More from this category">Anime</a><br/>
- (<a href="#" title="More from this category">Anime</a>)
- </center>
- </td>
- <td>
- <div class="detName">
- <a href="/this.is.the.link" class="detLink" title="Title">
- This is the title
- </a>
- </div>
- <a href="magnet:?xt=urn:btih:MAGNETLINK" title="Download this torrent using magnet">
- <img src="/static/img/icon-magnet.gif" alt="Magnet link"/>
- </a>
- <a href="http://torcache.net/torrent/TORRENTFILE.torrent" title="Download this torrent">
- <img src="/static/img/dl.gif" class="dl" alt="Download"/>
- </a>
- <a href="/user/HorribleSubs">
- <img src="/static/img/vip.gif" alt="VIP" title="VIP" style="width:11px;" border='0'/>
- </a>
- <img src="/static/img/11x11p.png"/>
- <font class="detDesc">
- This is the content <span>and should be</span> OK
- </font>
- </td>
- <td align="right">13</td>
- <td align="right">334</td>
- </tr>
- <tr>
- <td class="vertTh">
- <center>
- <a href="#" title="More from this category">Anime</a><br/>
- (<a href="#" title="More from this category">Anime</a>)
- </center>
- </td>
- <td>
- <div class="detName">
- <a href="/this.is.the.link" class="detLink" title="Title">
- This is the title
- </a>
- </div>
- <a href="magnet:?xt=urn:btih:MAGNETLINK" title="Download this torrent using magnet">
- <img src="/static/img/icon-magnet.gif" alt="Magnet link"/>
- </a>
- <a href="/user/HorribleSubs">
- <img src="/static/img/vip.gif" alt="VIP" title="VIP" style="width:11px;" border='0'/>
- </a>
- <img src="/static/img/11x11p.png"/>
- <font class="detDesc">
- This is the content <span>and should be</span> OK
- </font>
- </td>
- <td align="right">13</td>
- <td align="right">334</td>
- </tr>
- </table>
- """
- response = mock.Mock(text=html)
- results = piratebay.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
- self.assertEqual(results[0]['title'], 'This is the title')
- self.assertEqual(results[0]['url'], 'https://thepiratebay.org/this.is.the.link')
- self.assertEqual(results[0]['content'], 'This is the content and should be OK')
- self.assertEqual(results[0]['seed'], 13)
- self.assertEqual(results[0]['leech'], 334)
- self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:MAGNETLINK')
- self.assertEqual(results[0]['torrentfile'], 'http://torcache.net/torrent/TORRENTFILE.torrent')
-
- self.assertEqual(results[1]['torrentfile'], None)
-
- html = """
- <table id="searchResult">
- <tr>
- </tr>
- <tr>
- <td class="vertTh">
- <center>
- <a href="#" title="More from this category">Anime</a><br/>
- (<a href="#" title="More from this category">Anime</a>)
- </center>
- </td>
- <td>
- <div class="detName">
- <a href="/this.is.the.link" class="detLink" title="Title">
- This is the title
- </a>
- </div>
- <a href="magnet:?xt=urn:btih:MAGNETLINK" title="Download this torrent using magnet">
- <img src="/static/img/icon-magnet.gif" alt="Magnet link"/>
- </a>
- <a href="http://torcache.net/torrent/TORRENTFILE.torrent" title="Download this torrent">
- <img src="/static/img/dl.gif" class="dl" alt="Download"/>
- </a>
- <a href="/user/HorribleSubs">
- <img src="/static/img/vip.gif" alt="VIP" title="VIP" style="width:11px;" border='0'/>
- </a>
- <img src="/static/img/11x11p.png"/>
- <font class="detDesc">
- This is the content <span>and should be</span> OK
- </font>
- </td>
- <td align="right">s</td>
- <td align="right">d</td>
- </tr>
- </table>
- """
- response = mock.Mock(text=html)
- results = piratebay.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'This is the title')
- self.assertEqual(results[0]['url'], 'https://thepiratebay.org/this.is.the.link')
- self.assertEqual(results[0]['content'], 'This is the content and should be OK')
- self.assertEqual(results[0]['seed'], 0)
- self.assertEqual(results[0]['leech'], 0)
- self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:MAGNETLINK')
- self.assertEqual(results[0]['torrentfile'], 'http://torcache.net/torrent/TORRENTFILE.torrent')
-
- html = """
- <table id="searchResult">
- </table>
- """
- response = mock.Mock(text=html)
- results = piratebay.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
diff --git a/tests/unit/engines/test_qwant.py b/tests/unit/engines/test_qwant.py
deleted file mode 100644
index 6611264f8..000000000
--- a/tests/unit/engines/test_qwant.py
+++ /dev/null
@@ -1,339 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import qwant
-from searx.testing import SearxTestCase
-
-
-class TestQwantEngine(SearxTestCase):
-
- def test_request(self):
- qwant.supported_languages = ['en-US', 'fr-CA', 'fr-FR']
- qwant.language_aliases = {}
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 0
- dicto['language'] = 'fr-FR'
- qwant.categories = ['']
- params = qwant.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('web', params['url'])
- self.assertIn('qwant.com', params['url'])
- self.assertIn('fr_fr', params['url'])
-
- dicto['language'] = 'all'
- qwant.categories = ['news']
- params = qwant.request(query, dicto)
- self.assertFalse('fr' in params['url'])
- self.assertIn('news', params['url'])
-
- dicto['language'] = 'fr'
- params = qwant.request(query, dicto)
- self.assertIn('fr_fr', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, qwant.response, None)
- self.assertRaises(AttributeError, qwant.response, [])
- self.assertRaises(AttributeError, qwant.response, '')
- self.assertRaises(AttributeError, qwant.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(qwant.response(response), [])
-
- response = mock.Mock(text='{"data": {}}')
- self.assertEqual(qwant.response(response), [])
-
- json = """
- {
- "status": "success",
- "data": {
- "query": {
- "locale": "en_us",
- "query": "Test",
- "offset": 10
- },
- "result": {
- "items": [
- {
- "title": "Title",
- "score": 9999,
- "url": "http://www.url.xyz",
- "source": "...",
- "desc": "Description",
- "date": "",
- "_id": "db0aadd62c2a8565567ffc382f5c61fa",
- "favicon": "https://s.qwant.com/fav.ico"
- }
- ],
- "filters": []
- },
- "cache": {
- "key": "e66aa864c00147a0e3a16ff7a5efafde",
- "created": 1433092754,
- "expiration": 259200,
- "status": "miss",
- "age": 0
- }
- }
- }
- """
- response = mock.Mock(text=json)
- qwant.categories = ['general']
- results = qwant.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'Title')
- self.assertEqual(results[0]['url'], 'http://www.url.xyz')
- self.assertEqual(results[0]['content'], 'Description')
-
- json = """
- {
- "status": "success",
- "data": {
- "query": {
- "locale": "en_us",
- "query": "Test",
- "offset": 10
- },
- "result": {
- "items": [
- {
- "title": "Title",
- "score": 9999,
- "url": "http://www.url.xyz",
- "source": "...",
- "media": "http://image.jpg",
- "desc": "",
- "thumbnail": "http://thumbnail.jpg",
- "date": "",
- "_id": "db0aadd62c2a8565567ffc382f5c61fa",
- "favicon": "https://s.qwant.com/fav.ico"
- }
- ],
- "filters": []
- },
- "cache": {
- "key": "e66aa864c00147a0e3a16ff7a5efafde",
- "created": 1433092754,
- "expiration": 259200,
- "status": "miss",
- "age": 0
- }
- }
- }
- """
- response = mock.Mock(text=json)
- qwant.categories = ['images']
- results = qwant.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'Title')
- self.assertEqual(results[0]['url'], 'http://www.url.xyz')
- self.assertEqual(results[0]['content'], '')
- self.assertEqual(results[0]['thumbnail_src'], 'http://thumbnail.jpg')
- self.assertEqual(results[0]['img_src'], 'http://image.jpg')
-
- json = """
- {
- "status": "success",
- "data": {
- "query": {
- "locale": "en_us",
- "query": "Test",
- "offset": 10
- },
- "result": {
- "items": [
- {
- "title": "Title",
- "score": 9999,
- "url": "http://www.url.xyz",
- "source": "...",
- "desc": "Description",
- "date": 1433260920,
- "_id": "db0aadd62c2a8565567ffc382f5c61fa",
- "favicon": "https://s.qwant.com/fav.ico"
- }
- ],
- "filters": []
- },
- "cache": {
- "key": "e66aa864c00147a0e3a16ff7a5efafde",
- "created": 1433092754,
- "expiration": 259200,
- "status": "miss",
- "age": 0
- }
- }
- }
- """
- response = mock.Mock(text=json)
- qwant.categories = ['news']
- results = qwant.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'Title')
- self.assertEqual(results[0]['url'], 'http://www.url.xyz')
- self.assertEqual(results[0]['content'], 'Description')
- self.assertIn('publishedDate', results[0])
-
- json = """
- {
- "status": "success",
- "data": {
- "query": {
- "locale": "en_us",
- "query": "Test",
- "offset": 10
- },
- "result": {
- "items": [
- {
- "title": "Title",
- "score": 9999,
- "url": "http://www.url.xyz",
- "source": "...",
- "desc": "Description",
- "date": 1433260920,
- "_id": "db0aadd62c2a8565567ffc382f5c61fa",
- "favicon": "https://s.qwant.com/fav.ico"
- }
- ],
- "filters": []
- },
- "cache": {
- "key": "e66aa864c00147a0e3a16ff7a5efafde",
- "created": 1433092754,
- "expiration": 259200,
- "status": "miss",
- "age": 0
- }
- }
- }
- """
- response = mock.Mock(text=json)
- qwant.categories = ['social media']
- results = qwant.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'Title')
- self.assertEqual(results[0]['url'], 'http://www.url.xyz')
- self.assertEqual(results[0]['content'], 'Description')
- self.assertIn('publishedDate', results[0])
-
- json = """
- {
- "status": "success",
- "data": {
- "query": {
- "locale": "en_us",
- "query": "Test",
- "offset": 10
- },
- "result": {
- "items": [
- {
- "title": "Title",
- "score": 9999,
- "url": "http://www.url.xyz",
- "source": "...",
- "desc": "Description",
- "date": 1433260920,
- "_id": "db0aadd62c2a8565567ffc382f5c61fa",
- "favicon": "https://s.qwant.com/fav.ico"
- }
- ],
- "filters": []
- },
- "cache": {
- "key": "e66aa864c00147a0e3a16ff7a5efafde",
- "created": 1433092754,
- "expiration": 259200,
- "status": "miss",
- "age": 0
- }
- }
- }
- """
- response = mock.Mock(text=json)
- qwant.categories = ['']
- results = qwant.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
-
- json = """
- {
- "status": "success",
- "data": {
- "query": {
- "locale": "en_us",
- "query": "Test",
- "offset": 10
- },
- "result": {
- "filters": []
- },
- "cache": {
- "key": "e66aa864c00147a0e3a16ff7a5efafde",
- "created": 1433092754,
- "expiration": 259200,
- "status": "miss",
- "age": 0
- }
- }
- }
- """
- response = mock.Mock(text=json)
- results = qwant.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
-
- json = """
- {
- "status": "success",
- "data": {
- "query": {
- "locale": "en_us",
- "query": "Test",
- "offset": 10
- },
- "cache": {
- "key": "e66aa864c00147a0e3a16ff7a5efafde",
- "created": 1433092754,
- "expiration": 259200,
- "status": "miss",
- "age": 0
- }
- }
- }
- """
- response = mock.Mock(text=json)
- results = qwant.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
-
- json = """
- {
- "status": "success"
- }
- """
- response = mock.Mock(text=json)
- results = qwant.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
-
- def test_fetch_supported_languages(self):
- page = """some code...
- config_set('project.regionalisation', {"continents":{},"languages":
- {"de":{"code":"de","name":"Deutsch","countries":["DE","CH","AT"]},
- "it":{"code":"it","name":"Italiano","countries":["IT","CH"]}}});
- some more code..."""
- response = mock.Mock(text=page)
- languages = qwant._fetch_supported_languages(response)
- self.assertEqual(type(languages), list)
- self.assertEqual(len(languages), 5)
- self.assertIn('de-DE', languages)
- self.assertIn('de-CH', languages)
- self.assertIn('de-AT', languages)
- self.assertIn('it-IT', languages)
- self.assertIn('it-CH', languages)
diff --git a/tests/unit/engines/test_reddit.py b/tests/unit/engines/test_reddit.py
deleted file mode 100644
index 9c94f4e2b..000000000
--- a/tests/unit/engines/test_reddit.py
+++ /dev/null
@@ -1,71 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import reddit
-from searx.testing import SearxTestCase
-from datetime import datetime
-
-
-class TestRedditEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dic = defaultdict(dict)
- params = reddit.request(query, dic)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('reddit.com' in params['url'])
-
- def test_response(self):
- resp = mock.Mock(text='{}')
- self.assertEqual(reddit.response(resp), [])
-
- json = """
- {
- "kind": "Listing",
- "data": {
- "children": [{
- "data": {
- "url": "http://google2.com/",
- "permalink": "http://google.com/",
- "title": "Title number one",
- "selftext": "Sample",
- "created_utc": 1401219957.0,
- "thumbnail": "http://image.com/picture.jpg"
- }
- }, {
- "data": {
- "url": "https://reddit2.com/",
- "permalink": "https://reddit.com/",
- "title": "Title number two",
- "selftext": "Dominus vobiscum",
- "created_utc": 1438792533.0,
- "thumbnail": "self"
- }
- }]
- }
- }
- """
-
- resp = mock.Mock(text=json)
- results = reddit.response(resp)
-
- self.assertEqual(len(results), 2)
- self.assertEqual(type(results), list)
-
- # testing first result (picture)
- r = results[0]
- self.assertEqual(r['url'], 'http://google.com/')
- self.assertEqual(r['title'], 'Title number one')
- self.assertEqual(r['template'], 'images.html')
- self.assertEqual(r['img_src'], 'http://google2.com/')
- self.assertEqual(r['thumbnail_src'], 'http://image.com/picture.jpg')
-
- # testing second result (self-post)
- r = results[1]
- self.assertEqual(r['url'], 'https://reddit.com/')
- self.assertEqual(r['title'], 'Title number two')
- self.assertEqual(r['content'], 'Dominus vobiscum')
- created = datetime.fromtimestamp(1438792533.0)
- self.assertEqual(r['publishedDate'], created)
- self.assertTrue('thumbnail_src' not in r)
- self.assertTrue('img_src' not in r)
diff --git a/tests/unit/engines/test_scanr_structures.py b/tests/unit/engines/test_scanr_structures.py
deleted file mode 100644
index a7b9e9185..000000000
--- a/tests/unit/engines/test_scanr_structures.py
+++ /dev/null
@@ -1,175 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import scanr_structures
-from searx.testing import SearxTestCase
-
-
-class TestScanrStructuresEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- params = scanr_structures.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['data'])
- self.assertIn('scanr.enseignementsup-recherche.gouv.fr', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, scanr_structures.response, None)
- self.assertRaises(AttributeError, scanr_structures.response, [])
- self.assertRaises(AttributeError, scanr_structures.response, '')
- self.assertRaises(AttributeError, scanr_structures.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(scanr_structures.response(response), [])
-
- response = mock.Mock(text='{"data": []}')
- self.assertEqual(scanr_structures.response(response), [])
-
- json = u"""
- {
- "request":
- {
- "query":"test_query",
- "page":1,
- "pageSize":20,
- "sortOrder":"RELEVANCY",
- "sortDirection":"ASC",
- "searchField":"ALL",
- "from":0
- },
- "total":2471,
- "results":[
- {
- "id":"200711886U",
- "label":"Laboratoire d'Informatique de Grenoble",
- "kind":"RNSR",
- "publicEntity":true,
- "address":{"city":"Grenoble","departement":"38"},
- "logo":"/static/logos/200711886U.png",
- "acronym":"LIG",
- "type":{"code":"UR","label":"Unit\xe9 de recherche"},
- "level":2,
- "institutions":[
- {
- "id":"193819125",
- "label":"Grenoble INP",
- "acronym":"IPG",
- "code":"UMR 5217"
- },
- {
- "id":"130021397",
- "label":"Universit\xe9 de Grenoble Alpes",
- "acronym":"UGA",
- "code":"UMR 5217"
- },
- {
- "id":"180089013",
- "label":"Centre national de la recherche scientifique",
- "acronym":"CNRS",
- "code":"UMR 5217"
- },
- {
- "id":"180089047",
- "label":"Institut national de recherche en informatique et en automatique",
- "acronym":"Inria",
- "code":"UMR 5217"
- }
- ],
- "highlights":[
- {
- "type":"projects",
- "value":"linguicielles d\xe9velopp\xe9s jusqu'ici par le GETALP\
- du <strong>LIG</strong> en tant que prototypes op\xe9rationnels.\
-\\r\\nDans le contexte"
- },
- {
- "type":"acronym",
- "value":"<strong>LIG</strong>"
- },
- {
- "type":"websiteContents",
- "value":"S\xe9lection\\nListe structures\\nD\xe9tail\\n\
- Accueil\\n200711886U : <strong>LIG</strong>\
- Laboratoire d'Informatique de Grenoble Unit\xe9 de recherche"},
- {
- "type":"publications",
- "value":"de noms. Nous avons d'abord d\xe9velopp\xe9 LOOV \
- (pour <strong>Lig</strong> Overlaid OCR in Vid\xe9o), \
- un outil d'extraction des"
- }
- ]
- },
- {
- "id":"199511665F",
- "label":"Laboratoire Bordelais de Recherche en Informatique",
- "kind":"RNSR",
- "publicEntity":true,
- "address":{"city":"Talence","departement":"33"},
- "logo":"/static/logos/199511665F.png",
- "acronym":"LaBRI",
- "type":{"code":"UR","label":"Unit\xe9 de recherche"},
- "level":2,
- "institutions":[
- {
- "id":"130006356",
- "label":"Institut polytechnique de Bordeaux",
- "acronym":"IPB",
- "code":"UMR 5800"
- },
- {
- "id":"130018351",
- "label":"Universit\xe9 de Bordeaux",
- "acronym":null,
- "code":"UMR 5800"
- },
- {
- "id":"180089013",
- "label":"Centre national de la recherche scientifique",
- "acronym":"CNRS",
- "code":"UMR 5800"
- },
- {
- "id":"180089047",
- "label":"Institut national de recherche en informatique et en automatique",
- "acronym":"Inria",
- "code":"UMR 5800"
- }
- ],
- "highlights":[
- {
- "type":"websiteContents",
- "value":"Samia Kerdjoudj\\n2016-07-05\\nDouble-exponential\
- and <strong>triple</strong>-exponential bounds for\
- choosability problems parameterized"
- },
- {
- "type":"publications",
- "value":"de cam\xe9ras install\xe9es dans les lieux publiques \
- a <strong>tripl\xe9</strong> en 2009, passant de 20 000 \
- \xe0 60 000. Malgr\xe9 le"
- }
- ]
- }
- ]
- }
- """
- response = mock.Mock(text=json)
- results = scanr_structures.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
- self.assertEqual(results[0]['title'], u"Laboratoire d'Informatique de Grenoble")
- self.assertEqual(results[0]['url'], 'https://scanr.enseignementsup-recherche.gouv.fr/structure/200711886U')
- self.assertEqual(results[0]['content'],
- u"linguicielles d\xe9velopp\xe9s jusqu'ici par le GETALP "
- u"du LIG en tant que prototypes "
- u"op\xe9rationnels. Dans le contexte")
- self.assertEqual(results[1]['img_src'],
- 'https://scanr.enseignementsup-recherche.gouv.fr//static/logos/199511665F.png')
- self.assertEqual(results[1]['content'],
- "Samia Kerdjoudj 2016-07-05 Double-exponential and"
- " triple-exponential bounds for "
- "choosability problems parameterized")
- self.assertEqual(results[1]['url'], 'https://scanr.enseignementsup-recherche.gouv.fr/structure/199511665F')
- self.assertEqual(results[1]['title'], u"Laboratoire Bordelais de Recherche en Informatique")
diff --git a/tests/unit/engines/test_searchcode_code.py b/tests/unit/engines/test_searchcode_code.py
deleted file mode 100644
index 955aea111..000000000
--- a/tests/unit/engines/test_searchcode_code.py
+++ /dev/null
@@ -1,75 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import searchcode_code
-from searx.testing import SearxTestCase
-
-
-class TestSearchcodeCodeEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 0
- params = searchcode_code.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('searchcode.com', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, searchcode_code.response, None)
- self.assertRaises(AttributeError, searchcode_code.response, [])
- self.assertRaises(AttributeError, searchcode_code.response, '')
- self.assertRaises(AttributeError, searchcode_code.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(searchcode_code.response(response), [])
-
- response = mock.Mock(text='{"data": []}')
- self.assertEqual(searchcode_code.response(response), [])
-
- json = """
- {
- "matchterm": "test",
- "previouspage": null,
- "searchterm": "test",
- "query": "test",
- "total": 1000,
- "page": 0,
- "nextpage": 1,
- "results": [
- {
- "repo": "https://repo",
- "linescount": 1044,
- "location": "/tests",
- "name": "Name",
- "url": "https://url",
- "md5hash": "ecac6e479edd2b9406c9e08603cec655",
- "lines": {
- "1": "// Test 011",
- "2": "// Source: "
- },
- "id": 51223527,
- "filename": "File.CPP"
- }
- ]
- }
- """
- response = mock.Mock(text=json)
- results = searchcode_code.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'Name - File.CPP')
- self.assertEqual(results[0]['url'], 'https://url')
- self.assertEqual(results[0]['repository'], 'https://repo')
- self.assertEqual(results[0]['code_language'], 'cpp')
-
- json = r"""
- {"toto":[
- {"id":200,"name":"Artist Name",
- "link":"http:\/\/www.searchcode_code.com\/artist\/1217","type":"artist"}
- ]}
- """
- response = mock.Mock(text=json)
- results = searchcode_code.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
diff --git a/tests/unit/engines/test_searchcode_doc.py b/tests/unit/engines/test_searchcode_doc.py
deleted file mode 100644
index d02bb7a44..000000000
--- a/tests/unit/engines/test_searchcode_doc.py
+++ /dev/null
@@ -1,70 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import searchcode_doc
-from searx.testing import SearxTestCase
-
-
-class TestSearchcodeDocEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 0
- params = searchcode_doc.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('searchcode.com', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, searchcode_doc.response, None)
- self.assertRaises(AttributeError, searchcode_doc.response, [])
- self.assertRaises(AttributeError, searchcode_doc.response, '')
- self.assertRaises(AttributeError, searchcode_doc.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(searchcode_doc.response(response), [])
-
- response = mock.Mock(text='{"data": []}')
- self.assertEqual(searchcode_doc.response(response), [])
-
- json = """
- {
- "matchterm": "test",
- "previouspage": null,
- "searchterm": "test",
- "query": "test",
- "total": 60,
- "page": 0,
- "nextpage": 1,
- "results": [
- {
- "synopsis": "Synopsis",
- "displayname": null,
- "name": "test",
- "url": "http://url",
- "type": "Type",
- "icon": null,
- "namespace": "Namespace",
- "description": "Description"
- }
- ]
- }
- """
- response = mock.Mock(text=json)
- results = searchcode_doc.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], '[Type] Namespace test')
- self.assertEqual(results[0]['url'], 'http://url')
- self.assertIn('Description', results[0]['content'])
-
- json = r"""
- {"toto":[
- {"id":200,"name":"Artist Name",
- "link":"http:\/\/www.searchcode_doc.com\/artist\/1217","type":"artist"}
- ]}
- """
- response = mock.Mock(text=json)
- results = searchcode_doc.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
diff --git a/tests/unit/engines/test_soundcloud.py b/tests/unit/engines/test_soundcloud.py
deleted file mode 100644
index 3077d3b4b..000000000
--- a/tests/unit/engines/test_soundcloud.py
+++ /dev/null
@@ -1,192 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import soundcloud
-from searx.testing import SearxTestCase
-from searx.url_utils import quote_plus
-
-
-class TestSoundcloudEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- params = soundcloud.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('soundcloud.com', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, soundcloud.response, None)
- self.assertRaises(AttributeError, soundcloud.response, [])
- self.assertRaises(AttributeError, soundcloud.response, '')
- self.assertRaises(AttributeError, soundcloud.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(soundcloud.response(response), [])
-
- response = mock.Mock(text='{"data": []}')
- self.assertEqual(soundcloud.response(response), [])
-
- json = """
- {
- "collection": [
- {
- "kind": "track",
- "id": 159723640,
- "created_at": "2014/07/22 00:51:21 +0000",
- "user_id": 2976616,
- "duration": 303780,
- "commentable": true,
- "state": "finished",
- "original_content_size": 13236349,
- "last_modified": "2015/01/31 15:14:50 +0000",
- "sharing": "public",
- "tag_list": "seekae flume",
- "permalink": "seekae-test-recognise-flume-re-work",
- "streamable": true,
- "embeddable_by": "all",
- "downloadable": true,
- "purchase_url": "http://www.facebook.com/seekaemusic",
- "label_id": null,
- "purchase_title": "Seekae",
- "genre": "freedownload",
- "title": "This is the title",
- "description": "This is the content",
- "label_name": "Future Classic",
- "release": "",
- "track_type": "remix",
- "key_signature": "",
- "isrc": "",
- "video_url": null,
- "bpm": null,
- "release_year": 2014,
- "release_month": 7,
- "release_day": 22,
- "original_format": "mp3",
- "license": "all-rights-reserved",
- "uri": "https://api.soundcloud.com/tracks/159723640",
- "user": {
- "id": 2976616,
- "kind": "user",
- "permalink": "flume",
- "username": "Flume",
- "last_modified": "2014/11/24 19:21:29 +0000",
- "uri": "https://api.soundcloud.com/users/2976616",
- "permalink_url": "http://soundcloud.com/flume",
- "avatar_url": "https://i1.sndcdn.com/avatars-000044475439-4zi7ii-large.jpg"
- },
- "permalink_url": "http://soundcloud.com/this.is.the.url",
- "artwork_url": "https://i1.sndcdn.com/artworks-000085857162-xdxy5c-large.jpg",
- "waveform_url": "https://w1.sndcdn.com/DWrL1lAN8BkP_m.png",
- "stream_url": "https://api.soundcloud.com/tracks/159723640/stream",
- "download_url": "https://api.soundcloud.com/tracks/159723640/download",
- "playback_count": 2190687,
- "download_count": 54856,
- "favoritings_count": 49061,
- "comment_count": 826,
- "likes_count": 49061,
- "reposts_count": 15910,
- "attachments_uri": "https://api.soundcloud.com/tracks/159723640/attachments",
- "policy": "ALLOW"
- }
- ],
- "total_results": 375750,
- "next_href": "https://api.soundcloud.com/search?&q=test",
- "tx_id": ""
- }
- """
- response = mock.Mock(text=json)
- results = soundcloud.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'This is the title')
- self.assertEqual(results[0]['url'], 'http://soundcloud.com/this.is.the.url')
- self.assertEqual(results[0]['content'], 'This is the content')
- self.assertIn(quote_plus('https://api.soundcloud.com/tracks/159723640'), results[0]['embedded'])
-
- json = """
- {
- "collection": [
- {
- "kind": "user",
- "id": 159723640,
- "created_at": "2014/07/22 00:51:21 +0000",
- "user_id": 2976616,
- "duration": 303780,
- "commentable": true,
- "state": "finished",
- "original_content_size": 13236349,
- "last_modified": "2015/01/31 15:14:50 +0000",
- "sharing": "public",
- "tag_list": "seekae flume",
- "permalink": "seekae-test-recognise-flume-re-work",
- "streamable": true,
- "embeddable_by": "all",
- "downloadable": true,
- "purchase_url": "http://www.facebook.com/seekaemusic",
- "label_id": null,
- "purchase_title": "Seekae",
- "genre": "freedownload",
- "title": "This is the title",
- "description": "This is the content",
- "label_name": "Future Classic",
- "release": "",
- "track_type": "remix",
- "key_signature": "",
- "isrc": "",
- "video_url": null,
- "bpm": null,
- "release_year": 2014,
- "release_month": 7,
- "release_day": 22,
- "original_format": "mp3",
- "license": "all-rights-reserved",
- "uri": "https://api.soundcloud.com/tracks/159723640",
- "user": {
- "id": 2976616,
- "kind": "user",
- "permalink": "flume",
- "username": "Flume",
- "last_modified": "2014/11/24 19:21:29 +0000",
- "uri": "https://api.soundcloud.com/users/2976616",
- "permalink_url": "http://soundcloud.com/flume",
- "avatar_url": "https://i1.sndcdn.com/avatars-000044475439-4zi7ii-large.jpg"
- },
- "permalink_url": "http://soundcloud.com/this.is.the.url",
- "artwork_url": "https://i1.sndcdn.com/artworks-000085857162-xdxy5c-large.jpg",
- "waveform_url": "https://w1.sndcdn.com/DWrL1lAN8BkP_m.png",
- "stream_url": "https://api.soundcloud.com/tracks/159723640/stream",
- "download_url": "https://api.soundcloud.com/tracks/159723640/download",
- "playback_count": 2190687,
- "download_count": 54856,
- "favoritings_count": 49061,
- "comment_count": 826,
- "likes_count": 49061,
- "reposts_count": 15910,
- "attachments_uri": "https://api.soundcloud.com/tracks/159723640/attachments",
- "policy": "ALLOW"
- }
- ],
- "total_results": 375750,
- "next_href": "https://api.soundcloud.com/search?&q=test",
- "tx_id": ""
- }
- """
- response = mock.Mock(text=json)
- results = soundcloud.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
-
- json = """
- {
- "collection": [],
- "total_results": 375750,
- "next_href": "https://api.soundcloud.com/search?&q=test",
- "tx_id": ""
- }
- """
- response = mock.Mock(text=json)
- results = soundcloud.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
diff --git a/tests/unit/engines/test_spotify.py b/tests/unit/engines/test_spotify.py
deleted file mode 100644
index e37c344d2..000000000
--- a/tests/unit/engines/test_spotify.py
+++ /dev/null
@@ -1,124 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import spotify
-from searx.testing import SearxTestCase
-
-
-class TestSpotifyEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 0
- params = spotify.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('spotify.com', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, spotify.response, None)
- self.assertRaises(AttributeError, spotify.response, [])
- self.assertRaises(AttributeError, spotify.response, '')
- self.assertRaises(AttributeError, spotify.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(spotify.response(response), [])
-
- response = mock.Mock(text='{"data": []}')
- self.assertEqual(spotify.response(response), [])
-
- json = """
- {
- "tracks": {
- "href": "https://api.spotify.com/v1/search?query=nosfell&offset=0&limit=20&type=track",
- "items": [
- {
- "album": {
- "album_type": "album",
- "external_urls": {
- "spotify": "https://open.spotify.com/album/5c9ap1PBkSGLxT3J73toxA"
- },
- "href": "https://api.spotify.com/v1/albums/5c9ap1PBkSGLxT3J73toxA",
- "id": "5c9ap1PBkSGLxT3J73toxA",
- "name": "Album Title",
- "type": "album",
- "uri": "spotify:album:5c9ap1PBkSGLxT3J73toxA"
- },
- "artists": [
- {
- "external_urls": {
- "spotify": "https://open.spotify.com/artist/0bMc6b75FfZEpQHG1jifKu"
- },
- "href": "https://api.spotify.com/v1/artists/0bMc6b75FfZEpQHG1jifKu",
- "id": "0bMc6b75FfZEpQHG1jifKu",
- "name": "Artist Name",
- "type": "artist",
- "uri": "spotify:artist:0bMc6b75FfZEpQHG1jifKu"
- }
- ],
- "disc_number": 1,
- "duration_ms": 202386,
- "explicit": false,
- "external_ids": {
- "isrc": "FRV640600067"
- },
- "external_urls": {
- "spotify": "https://open.spotify.com/track/2GzvFiedqW8hgqUpWcASZa"
- },
- "href": "https://api.spotify.com/v1/tracks/2GzvFiedqW8hgqUpWcASZa",
- "id": "1000",
- "is_playable": true,
- "name": "Title of track",
- "popularity": 6,
- "preview_url": "https://p.scdn.co/mp3-preview/7b8ecda580965a066b768c2647f877e43f7b1a0a",
- "track_number": 3,
- "type": "track",
- "uri": "spotify:track:2GzvFiedqW8hgqUpWcASZa"
- }
- ],
- "limit": 20,
- "next": "https://api.spotify.com/v1/search?query=nosfell&offset=20&limit=20&type=track",
- "offset": 0,
- "previous": null,
- "total": 107
- }
- }
- """
- response = mock.Mock(text=json)
- results = spotify.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'Title of track')
- self.assertEqual(results[0]['url'], 'https://open.spotify.com/track/2GzvFiedqW8hgqUpWcASZa')
- self.assertEqual(results[0]['content'], 'Artist Name - Album Title - Title of track')
- self.assertIn('1000', results[0]['embedded'])
-
- json = """
- {
- "tracks": {
- "href": "https://api.spotify.com/v1/search?query=nosfell&offset=0&limit=20&type=track",
- "items": [
- {
- "href": "https://api.spotify.com/v1/tracks/2GzvFiedqW8hgqUpWcASZa",
- "id": "1000",
- "is_playable": true,
- "name": "Title of track",
- "popularity": 6,
- "preview_url": "https://p.scdn.co/mp3-preview/7b8ecda580965a066b768c2647f877e43f7b1a0a",
- "track_number": 3,
- "type": "album",
- "uri": "spotify:track:2GzvFiedqW8hgqUpWcASZa"
- }
- ],
- "limit": 20,
- "next": "https://api.spotify.com/v1/search?query=nosfell&offset=20&limit=20&type=track",
- "offset": 0,
- "previous": null,
- "total": 107
- }
- }
- """
- response = mock.Mock(text=json)
- results = spotify.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
diff --git a/tests/unit/engines/test_stackoverflow.py b/tests/unit/engines/test_stackoverflow.py
deleted file mode 100644
index 18a1ff4bd..000000000
--- a/tests/unit/engines/test_stackoverflow.py
+++ /dev/null
@@ -1,106 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import stackoverflow
-from searx.testing import SearxTestCase
-
-
-class TestStackoverflowEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 0
- params = stackoverflow.request(query, dicto)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('stackoverflow.com' in params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, stackoverflow.response, None)
- self.assertRaises(AttributeError, stackoverflow.response, [])
- self.assertRaises(AttributeError, stackoverflow.response, '')
- self.assertRaises(AttributeError, stackoverflow.response, '[]')
-
- response = mock.Mock(text='<html></html>')
- self.assertEqual(stackoverflow.response(response), [])
-
- html = """
- <div class="question-summary search-result" id="answer-id-1783426">
- <div class="statscontainer">
- <div class="statsarrow"></div>
- <div class="stats">
- <div class="vote">
- <div class="votes answered">
- <span class="vote-count-post "><strong>2583</strong></span>
- <div class="viewcount">votes</div>
- </div>
- </div>
- </div>
- </div>
- <div class="summary">
- <div class="result-link">
- <span>
- <a href="/questions/this.is.the.url"
- data-searchsession="/questions"
- title="Checkout remote Git branch">
- This is the title
- </a>
- </span>
- </div>
- <div class="excerpt">
- This is the content
- </div>
- <div class="tags user-tags t-git t-git-checkout t-remote-branch">
- </div>
- <div class="started fr">
- answered <span title="2009-11-23 14:26:08Z" class="relativetime">nov 23 '09</span> by
- <a href="/users/214090/hallski">hallski</a>
- </div>
- </div>
- </div>
- """
- response = mock.Mock(text=html)
- results = stackoverflow.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'This is the title')
- self.assertEqual(results[0]['url'], 'https://stackoverflow.com/questions/this.is.the.url')
- self.assertEqual(results[0]['content'], 'This is the content')
-
- html = """
- <div class="statscontainer">
- <div class="statsarrow"></div>
- <div class="stats">
- <div class="vote">
- <div class="votes answered">
- <span class="vote-count-post "><strong>2583</strong></span>
- <div class="viewcount">votes</div>
- </div>
- </div>
- </div>
- </div>
- <div class="summary">
- <div class="result-link">
- <span>
- <a href="/questions/this.is.the.url"
- data-searchsession="/questions"
- title="Checkout remote Git branch">
- This is the title
- </a>
- </span>
- </div>
- <div class="excerpt">
- This is the content
- </div>
- <div class="tags user-tags t-git t-git-checkout t-remote-branch">
- </div>
- <div class="started fr">
- answered <span title="2009-11-23 14:26:08Z" class="relativetime">nov 23 '09</span> by
- <a href="/users/214090/hallski">hallski</a>
- </div>
- </div>
- """
- response = mock.Mock(text=html)
- results = stackoverflow.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
diff --git a/tests/unit/engines/test_startpage.py b/tests/unit/engines/test_startpage.py
deleted file mode 100644
index a4704ce22..000000000
--- a/tests/unit/engines/test_startpage.py
+++ /dev/null
@@ -1,140 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import startpage
-from searx.testing import SearxTestCase
-
-
-class TestStartpageEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- dicto['language'] = 'fr_FR'
- params = startpage.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn('startpage.com', params['url'])
- self.assertIn('data', params)
- self.assertIn('query', params['data'])
- self.assertIn(query, params['data']['query'])
- self.assertIn('with_language', params['data'])
- self.assertIn('lang_fr', params['data']['with_language'])
-
- dicto['language'] = 'all'
- params = startpage.request(query, dicto)
- self.assertNotIn('with_language', params['data'])
-
- def test_response(self):
- self.assertRaises(AttributeError, startpage.response, None)
- self.assertRaises(AttributeError, startpage.response, [])
- self.assertRaises(AttributeError, startpage.response, '')
- self.assertRaises(AttributeError, startpage.response, '[]')
-
- response = mock.Mock(text='<html></html>')
- self.assertEqual(startpage.response(response), [])
-
- html = """
- <li class="search-result search-item">
- <h3>
- <a href='http://this.should.be.the.link/' id='title_2' name='title_2' >
- This should be the title
- </a>
- <span id='title_stars_2' name='title_stars_2'> </span>
- </h3>
- <p class="search-item__body">
- This should be the content.
- </p>
- <p>
- <span class='url'>www.speed<b>test</b>.net/fr/
- </span>
- -
- <A class="proxy" id="proxy_link" HREF="https://ixquick-proxy.com/do/spg/proxy?ep=&edata=&ek=&ekdata="
- class='proxy'>
- Navigation avec Ixquick Proxy
- </A>
- -
- <A HREF="https://ixquick-proxy.com/do/spg/highlight.pl?l=francais&c=hf&cat=web&q=test&rl=NONE&rid=
- &hlq=https://startpage.com/do/search&mtabp=-1&mtcmd=process_search&mtlanguage=francais&mtengine0=
- &mtcat=web&u=http:%2F%2Fwww.speedtest.net%2Ffr%2F" class='proxy'>
- Mis en surbrillance
- </A>
- </p>
- </li>
- """
- response = mock.Mock(text=html.encode('utf-8'))
- results = startpage.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'This should be the title')
- self.assertEqual(results[0]['url'], 'http://this.should.be.the.link/')
- self.assertEqual(results[0]['content'], 'This should be the content.')
-
- html = """
- <li class="search-result search-item">
- <h3>
- <a href='http://www.google.com/aclk?sa=l&ai=C' id='title_2' name='title_2' >
- This should be the title
- </a>
- <span id='title_stars_2' name='title_stars_2'> </span>
- </h3>
- <p class="search-item__body">
- This should be the content.
- </p>
- <p>
- <span class='url'>www.speed<b>test</b>.net/fr/
- </span>
- -
- <A class="proxy" id="proxy_link" HREF="https://ixquick-proxy.com/do/spg/proxy?ep=&edata=&ek=&ekdata="
- class='proxy'>
- Navigation avec Ixquick Proxy
- </A>
- -
- <A HREF="https://ixquick-proxy.com/do/spg/highlight.pl?l=francais&c=hf&cat=web&q=test&rl=NONE&rid=
- &hlq=https://startpage.com/do/search&mtabp=-1&mtcmd=process_search&mtlanguage=francais&mtengine0=
- &mtcat=web&u=http:%2F%2Fwww.speedtest.net%2Ffr%2F" class='proxy'>
- Mis en surbrillance
- </A>
- </p>
- </li>
- <li class="search-result search-item">
- <h3>
- <span id='title_stars_2' name='title_stars_2'> </span>
- </h3>
- <p class="search-item__body">
- This should be the content.
- </p>
- <p>
- <span class='url'>www.speed<b>test</b>.net/fr/
- </span>
- </p>
- </li>
- <li class="search-result search-item">
- <h3>
- <a href='http://this.should.be.the.link/' id='title_2' name='title_2' >
- This should be the title
- </a>
- <span id='title_stars_2' name='title_stars_2'> </span>
- </h3>
- <p>
- <span class='url'>www.speed<b>test</b>.net/fr/
- </span>
- -
- <A class="proxy" id="proxy_link" HREF="https://ixquick-proxy.com/do/spg/proxy?ep=&edata=&ek=&ekdata="
- class='proxy'>
- Navigation avec Ixquick Proxy
- </A>
- -
- <A HREF="https://ixquick-proxy.com/do/spg/highlight.pl?l=francais&c=hf&cat=web&q=test&rl=NONE&rid=
- &hlq=https://startpage.com/do/search&mtabp=-1&mtcmd=process_search&mtlanguage=francais&mtengine0=
- &mtcat=web&u=http:%2F%2Fwww.speedtest.net%2Ffr%2F" class='proxy'>
- Mis en surbrillance
- </A>
- </p>
- </li>
- """
- response = mock.Mock(text=html.encode('utf-8'))
- results = startpage.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['content'], '')
diff --git a/tests/unit/engines/test_tokyotoshokan.py b/tests/unit/engines/test_tokyotoshokan.py
deleted file mode 100644
index b5c6fad17..000000000
--- a/tests/unit/engines/test_tokyotoshokan.py
+++ /dev/null
@@ -1,110 +0,0 @@
-import mock
-from collections import defaultdict
-from searx.engines import tokyotoshokan
-from searx.testing import SearxTestCase
-from datetime import datetime
-
-
-class TestTokyotoshokanEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dic = defaultdict(dict)
- dic['pageno'] = 1
- params = tokyotoshokan.request(query, dic)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('tokyotosho.info' in params['url'])
-
- def test_response(self):
- resp = mock.Mock(text='<html></html>')
- self.assertEqual(tokyotoshokan.response(resp), [])
-
- html = """
- <table class="listing">
- <tbody>
- <tr class="shade category_0">
- <td rowspan="2">
- <a href="/?cat=7"><span class="sprite_cat-raw"></span></a>
- </td>
- <td class="desc-top">
- <a href="magnet:?xt=urn:btih:4c19eb46b5113685fbd2288ed2531b0b">
- <span class="sprite_magnet"></span>
- </a>
- <a rel="nofollow" type="application/x-bittorrent" href="http://www.nyaa.se/f">
- Koyomimonogatari
- </a>
- </td>
- <td class="web"><a rel="nofollow" href="details.php?id=975700">Details</a></td>
- </tr>
- <tr class="shade category_0">
- <td class="desc-bot">
- Authorized: <span class="auth_ok">Yes</span>
- Submitter: <a href="?username=Ohys">Ohys</a> |
- Size: 10.5MB |
- Date: 2016-03-26 16:41 UTC |
- Comment: sample comment
- </td>
- <td style="color: #BBB; font-family: monospace" class="stats" align="right">
- S: <span style="color: red">53</span>
- L: <span style="color: red">18</span>
- C: <span style="color: red">0</span>
- ID: 975700
- </td>
- </tr>
-
- <tr class="category_0">
- <td rowspan="2">
- <a href="/?cat=7"><span class="sprite_cat-raw"></span></a>
- </td>
- <td class="desc-top">
- <a rel="nofollow" type="application/x-bittorrent" href="http://google.com/q">
- Owarimonogatari
- </a>
- </td>
- <td class="web"><a rel="nofollow" href="details.php?id=975700">Details</a></td>
- </tr>
- <tr class="category_0">
- <td class="desc-bot">
- Submitter: <a href="?username=Ohys">Ohys</a> |
- Size: 932.84EB |
- Date: QWERTY-03-26 16:41 UTC
- </td>
- <td style="color: #BBB; font-family: monospace" class="stats" align="right">
- S: <span style="color: red">0</span>
- </td>
- </tr>
- </tbody>
- </table>
- """
-
- resp = mock.Mock(text=html)
- results = tokyotoshokan.response(resp)
-
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
-
- # testing the first result, which has correct format
- # and should have all information fields filled
- r = results[0]
- self.assertEqual(r['url'], 'http://www.nyaa.se/f')
- self.assertEqual(r['title'], 'Koyomimonogatari')
- self.assertEqual(r['magnetlink'], 'magnet:?xt=urn:btih:4c19eb46b5113685fbd2288ed2531b0b')
- self.assertEqual(r['filesize'], int(1024 * 1024 * 10.5))
- self.assertEqual(r['publishedDate'], datetime(2016, 3, 26, 16, 41))
- self.assertEqual(r['content'], 'Comment: sample comment')
- self.assertEqual(r['seed'], 53)
- self.assertEqual(r['leech'], 18)
-
- # testing the second result, which does not include magnet link,
- # seed & leech info, and has incorrect size & creation date
- r = results[1]
- self.assertEqual(r['url'], 'http://google.com/q')
- self.assertEqual(r['title'], 'Owarimonogatari')
-
- self.assertFalse('magnetlink' in r)
- self.assertFalse('filesize' in r)
- self.assertFalse('content' in r)
- self.assertFalse('publishedDate' in r)
- self.assertFalse('seed' in r)
- self.assertFalse('leech' in r)
diff --git a/tests/unit/engines/test_torrentz.py b/tests/unit/engines/test_torrentz.py
deleted file mode 100644
index f483bf68c..000000000
--- a/tests/unit/engines/test_torrentz.py
+++ /dev/null
@@ -1,87 +0,0 @@
-import mock
-from collections import defaultdict
-from searx.engines import torrentz
-from searx.testing import SearxTestCase
-from datetime import datetime
-
-
-class TestTorrentzEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dic = defaultdict(dict)
- dic['pageno'] = 1
- params = torrentz.request(query, dic)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('torrentz2.eu' in params['url'])
-
- def test_response(self):
- resp = mock.Mock(text='<html></html>')
- self.assertEqual(torrentz.response(resp), [])
-
- html = """
- <div class="results">
- <dl>
- <dt>
- <a href="/4362e08b1d80e1820fb2550b752f9f3126fe76d6">
- Completely valid info
- </a>
- books ebooks
- </dt>
- <dd>
- <span>1</span>
- <span title="1503595924">5 hours</span>
- <span>30 MB</span>
- <span>14</span>
- <span>1</span>
- </dd>
- </dl>
-
- <dl>
- <dt>
- <a href="/poaskdpokaspod">
- Invalid hash and date and filesize
- </a>
- books ebooks
- </dt>
- <dd>
- <span>1</span>
- <span title="1503595924 aaa">5 hours</span>
- <span>30MB</span>
- <span>5,555</span>
- <span>1,234,567</span>
- </dd>
- </dl>
- </div>
- """
-
- resp = mock.Mock(text=html)
- results = torrentz.response(resp)
-
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
-
- # testing against the first result
- r = results[0]
- self.assertEqual(r['url'], 'https://torrentz2.eu/4362e08b1d80e1820fb2550b752f9f3126fe76d6')
- self.assertEqual(r['title'], 'Completely valid info books ebooks')
- # 22 Nov 2015 03:01:42
- self.assertEqual(r['publishedDate'], datetime.fromtimestamp(1503595924))
- self.assertEqual(r['seed'], 14)
- self.assertEqual(r['leech'], 1)
- self.assertEqual(r['filesize'], 30 * 1024 * 1024)
- self.assertEqual(r['magnetlink'], 'magnet:?xt=urn:btih:4362e08b1d80e1820fb2550b752f9f3126fe76d6')
-
- # testing against the second result
- r = results[1]
- self.assertEqual(r['url'], 'https://torrentz2.eu/poaskdpokaspod')
- self.assertEqual(r['title'], 'Invalid hash and date and filesize books ebooks')
- self.assertEqual(r['seed'], 5555)
- self.assertEqual(r['leech'], 1234567)
-
- # in the second result we have invalid hash, creation date & torrent size,
- # so these tests should fail
- self.assertFalse('magnetlink' in r)
- self.assertFalse('filesize' in r)
- self.assertFalse('publishedDate' in r)
diff --git a/tests/unit/engines/test_twitter.py b/tests/unit/engines/test_twitter.py
deleted file mode 100644
index b444b48ee..000000000
--- a/tests/unit/engines/test_twitter.py
+++ /dev/null
@@ -1,502 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import twitter
-from searx.testing import SearxTestCase
-
-
-class TestTwitterEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 0
- dicto['language'] = 'fr_FR'
- params = twitter.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('twitter.com', params['url'])
- self.assertIn('cookies', params)
- self.assertIn('lang', params['cookies'])
- self.assertIn('fr', params['cookies']['lang'])
-
- dicto['language'] = 'all'
- params = twitter.request(query, dicto)
- self.assertIn('cookies', params)
- self.assertIn('lang', params['cookies'])
- self.assertIn('en', params['cookies']['lang'])
-
- def test_response(self):
- self.assertRaises(AttributeError, twitter.response, None)
- self.assertRaises(AttributeError, twitter.response, [])
- self.assertRaises(AttributeError, twitter.response, '')
- self.assertRaises(AttributeError, twitter.response, '[]')
-
- response = mock.Mock(text='<html></html>')
- self.assertEqual(twitter.response(response), [])
-
- html = """
- <li class="js-stream-item stream-item stream-item expanding-stream-item" data-item-id="563005573290287105"
- id="stream-item-tweet-563005573290287105" data-item-type="tweet">
- <div class="tweet original-tweet js-stream-tweet js-actionable-tweet js-profile-popup-actionable
- js-original-tweet has-cards has-native-media" data-tweet-id="563005573290287105" data-disclosure-type=""
- data-item-id="563005573290287105" data-screen-name="Jalopnik" data-name="Jalopnik"
- data-user-id="3060631" data-has-native-media="true" data-has-cards="true" data-card-type="photo"
- data-expanded-footer="&lt;div class=&quot;js-tweet-details-fixer
- tweet-details-fixer&quot;&gt;&#10;&#10;&#10;
- &lt;div class=&quot;cards-media-container js-media-container&quot;&gt;&lt;div
- data-card-url=&quot;//twitter.com/Jalopnik/status/563005573290287105/photo/1&quot; data-card-type=&quot;
- photo&quot; class=&quot;cards-base cards-multimedia&quot; data-element-context=&quot;platform_photo_card
- &quot;&gt;&#10;&#10;&#10; &lt;a class=&quot;media media-thumbnail twitter-timeline-link is-preview
- &quot; data-url=&quot;https://pbs.twimg.com/media/B9Aylf5IMAAuziP.jpg:large&quot;
- data-resolved-url-large=&quot;https://pbs.twimg.com/media/B9Aylf5IMAAuziP.jpg:large&quot;
- href=&quot;//twitter.com/Jalopnik/status/563005573290287105/photo/1&quot;&gt;&#10;
- &lt;div class=&quot;&quot;&gt;&#10; &lt;img src=&quot;
- https://pbs.twimg.com/media/B9Aylf5IMAAuziP.jpg&quot;
- alt=&quot;Embedded image permalink&quot; width=&quot;636&quot; height=&quot;309&quot;&gt;&#10;
- &lt;/div&gt;&#10;&#10; &lt;/a&gt;&#10;&#10; &lt;div class=&quot;cards-content&quot;&gt;&#10;
- &lt;div class=&quot;byline&quot;&gt;&#10; &#10; &lt;/div&gt;&#10; &#10; &lt;/div&gt;&#10;
- &#10;&lt;/div&gt;&#10;&#10;&#10;&#10;&#10;&lt;/div&gt;&#10;&#10;&#10;&#10; &lt;div
- class=&quot;js-machine-translated-tweet-container&quot;&gt;&lt;/div&gt;&#10; &lt;div
- class=&quot;js-tweet-stats-container tweet-stats-container &quot;&gt;&#10; &lt;/div&gt;&#10;&#10;
- &lt;div class=&quot;client-and-actions&quot;&gt;&#10; &lt;span class=&quot;metadata&quot;&gt;&#10;
- &lt;span&gt;5:06 PM - 4 Feb 2015&lt;/span&gt;&#10;&#10; &amp;middot; &lt;a
- class=&quot;permalink-link js-permalink js-nav&quot; href=&quot;/Jalopnik/status/563005573290287105
- &quot;tabindex=&quot;-1&quot;&gt;Details&lt;/a&gt;&#10; &#10;&#10; &#10; &#10;
- &#10;&#10; &lt;/span&gt;&#10;&lt;/div&gt;&#10;&#10;&#10;&lt;/div&gt;&#10;" data-you-follow="false"
- data-you-block="false">
- <div class="context">
- </div>
- <div class="content">
- <div class="stream-item-header">
- <a class="account-group js-account-group js-action-profile js-user-profile-link js-nav"
- href="/Jalopnik" data-user-id="3060631">
- <img class="avatar js-action-profile-avatar"
- src="https://pbs.twimg.com/profile_images/2976430168/5cd4a59_bigger.jpeg" alt="">
- <strong class="fullname js-action-profile-name show-popup-with-id" data-aria-label-part>
- Jalopnik
- </strong>
- <span>&rlm;</span>
- <span class="username js-action-profile-name" data-aria-label-part>
- <s>@</s><b>TitleName</b>
- </span>
- </a>
- <small class="time">
- <a href="/this.is.the.url"
- class="tweet-timestamp js-permalink js-nav js-tooltip" title="5:06 PM - 4 Feb 2015" >
- <span class="u-hiddenVisually" data-aria-label-part="last">17 minutes ago</span>
- </a>
- </small>
- </div>
- <p class="js-tweet-text tweet-text" lang="en" data-aria-label-part="0">
- This is the content étude à€
- <a href="http://t.co/nRWsqQAwBL" rel="nofollow" dir="ltr"
- data-expanded-url="http://jalo.ps/ReMENu4" class="twitter-timeline-link"
- target="_blank" title="http://jalo.ps/ReMENu4" >
- <span class="tco-ellipsis">
- </span>
- <span class="invisible">http://</span><span class="js-display-url">link.in.tweet</span>
- <span class="invisible"></span>
- <span class="tco-ellipsis">
- <span class="invisible">&nbsp;</span>
- </span>
- </a>
- <a href="http://t.co/rbFsfeE0l3" class="twitter-timeline-link u-hidden"
- data-pre-embedded="true" dir="ltr">
- pic.twitter.com/rbFsfeE0l3
- </a>
- </p>
- <div class="expanded-content js-tweet-details-dropdown">
- </div>
- <div class="stream-item-footer">
- <a class="details with-icn js-details" href="/Jalopnik/status/563005573290287105">
- <span class="Icon Icon--photo">
- </span>
- <b>
- <span class="expand-stream-item js-view-details">
- View photo
- </span>
- <span class="collapse-stream-item js-hide-details">
- Hide photo
- </span>
- </b>
- </a>
- <span class="ProfileTweet-action--reply u-hiddenVisually">
- <span class="ProfileTweet-actionCount" aria-hidden="true" data-tweet-stat-count="0">
- <span class="ProfileTweet-actionCountForAria" >0 replies</span>
- </span>
- </span>
- <span class="ProfileTweet-action--retweet u-hiddenVisually">
- <span class="ProfileTweet-actionCount" data-tweet-stat-count="8">
- <span class="ProfileTweet-actionCountForAria" data-aria-label-part>8 retweets</span>
- </span>
- </span>
- <span class="ProfileTweet-action--favorite u-hiddenVisually">
- <span class="ProfileTweet-actionCount" data-tweet-stat-count="14">
- <span class="ProfileTweet-actionCountForAria" data-aria-label-part>14 favorites</span>
- </span>
- </span>
- <div role="group" aria-label="Tweet actions" class="ProfileTweet-actionList u-cf js-actions">
- <div class="ProfileTweet-action ProfileTweet-action--reply">
- <button class="ProfileTweet-actionButton u-textUserColorHover js-actionButton
- js-actionReply" data-modal="ProfileTweet-reply" type="button" title="Reply">
- <span class="Icon Icon--reply">
- </span>
- <span class="u-hiddenVisually">Reply</span>
- <span class="ProfileTweet-actionCount u-textUserColorHover
- ProfileTweet-actionCount--isZero">
- <span class="ProfileTweet-actionCountForPresentation" aria-hidden="true">
- </span>
- </span>
- </button>
- </div>
- <div class="ProfileTweet-action ProfileTweet-action--retweet js-toggleState js-toggleRt">
- <button class="ProfileTweet-actionButton js-actionButton js-actionRetweet js-tooltip"
- title="Retweet" data-modal="ProfileTweet-retweet" type="button">
- <span class="Icon Icon--retweet">
- </span>
- <span class="u-hiddenVisually">Retweet</span>
- <span class="ProfileTweet-actionCount">
- <span class="ProfileTweet-actionCountForPresentation">8</span>
- </span>
- </button>
- <button class="ProfileTweet-actionButtonUndo js-actionButton js-actionRetweet"
- data-modal="ProfileTweet-retweet" title="Undo retweet" type="button">
- <span class="Icon Icon--retweet">
- </span>
- <span class="u-hiddenVisually">Retweeted</span>
- <span class="ProfileTweet-actionCount">
- <span class="ProfileTweet-actionCountForPresentation">8</span>
- </span>
- </button>
- </div>
- <div class="ProfileTweet-action ProfileTweet-action--favorite js-toggleState">
- <button class="ProfileTweet-actionButton js-actionButton js-actionFavorite js-tooltip"
- title="Favorite" type="button">
- <span class="Icon Icon--favorite">
- </span>
- <span class="u-hiddenVisually">Favorite</span>
- <span class="ProfileTweet-actionCount">
- <span class="ProfileTweet-actionCountForPresentation">14</span>
- </span>
- </button>
- <button class="ProfileTweet-actionButtonUndo u-linkClean js-actionButton
- js-actionFavorite" title="Undo favorite" type="button">
- <span class="Icon Icon--favorite">
- </span>
- <span class="u-hiddenVisually">Favorited</span>
- <span class="ProfileTweet-actionCount">
- <span class="ProfileTweet-actionCountForPresentation">
- 14
- </span>
- </span>
- </button>
- </div>
- <div class="ProfileTweet-action ProfileTweet-action--more js-more-ProfileTweet-actions">
- <div class="dropdown">
- <button class="ProfileTweet-actionButton u-textUserColorHover dropdown-toggle
- js-tooltip js-dropdown-toggle" type="button" title="More">
- <span class="Icon Icon--dots">
- </span>
- <span class="u-hiddenVisually">More</span>
- </button>
- <div class="dropdown-menu">
- <div class="dropdown-caret">
- <div class="caret-outer">
- </div>
- <div class="caret-inner">
- </div>
- </div>
- <ul>
- <li class="share-via-dm js-actionShareViaDM" data-nav="share_tweet_dm">
- <button type="button" class="dropdown-link">
- Share via Direct Message
- </button>
- </li>
- <li class="embed-link js-actionEmbedTweet" data-nav="embed_tweet">
- <button type="button" class="dropdown-link">
- Embed Tweet
- </button>
- </li>
- <li class="mute-user-item pretty-link">
- <button type="button" class="dropdown-link">
- Mute
- </button>
- </li>
- <li class="unmute-user-item pretty-link">
- <button type="button" class="dropdown-link">
- Unmute
- </button>
- </li>
- <li class="block-or-report-link js-actionBlockOrReport"
- data-nav="block_or_report">
- <button type="button" class="dropdown-link">
- Block or report
- </button>
- </li>
- </ul>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- </li>
- """
- response = mock.Mock(text=html)
- results = twitter.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], '@TitleName')
- self.assertEqual(results[0]['url'], 'https://twitter.com/this.is.the.url')
- self.assertIn(u'This is the content', results[0]['content'])
- # self.assertIn(u'This is the content étude à€', results[0]['content'])
-
- html = """
- <li class="js-stream-item stream-item stream-item expanding-stream-item" data-item-id="563005573290287105"
- id="stream-item-tweet-563005573290287105" data-item-type="tweet">
- <div class="tweet original-tweet js-stream-tweet js-actionable-tweet js-profile-popup-actionable
- js-original-tweet has-cards has-native-media" data-tweet-id="563005573290287105" data-disclosure-type=""
- data-item-id="563005573290287105" data-screen-name="Jalopnik" data-name="Jalopnik"
- data-user-id="3060631" data-has-native-media="true" data-has-cards="true" data-card-type="photo"
- data-expanded-footer="&lt;div class=&quot;js-tweet-details-fixer
- tweet-details-fixer&quot;&gt;&#10;&#10;&#10;
- &lt;div class=&quot;cards-media-container js-media-container&quot;&gt;&lt;div
- data-card-url=&quot;//twitter.com/Jalopnik/status/563005573290287105/photo/1&quot; data-card-type=&quot;
- photo&quot; class=&quot;cards-base cards-multimedia&quot; data-element-context=&quot;platform_photo_card
- &quot;&gt;&#10;&#10;&#10; &lt;a class=&quot;media media-thumbnail twitter-timeline-link is-preview
- &quot; data-url=&quot;https://pbs.twimg.com/media/B9Aylf5IMAAuziP.jpg:large&quot;
- data-resolved-url-large=&quot;https://pbs.twimg.com/media/B9Aylf5IMAAuziP.jpg:large&quot;
- href=&quot;//twitter.com/Jalopnik/status/563005573290287105/photo/1&quot;&gt;&#10;
- &lt;div class=&quot;&quot;&gt;&#10; &lt;img src=&quot;
- https://pbs.twimg.com/media/B9Aylf5IMAAuziP.jpg&quot;
- alt=&quot;Embedded image permalink&quot; width=&quot;636&quot; height=&quot;309&quot;&gt;&#10;
- &lt;/div&gt;&#10;&#10; &lt;/a&gt;&#10;&#10; &lt;div class=&quot;cards-content&quot;&gt;&#10;
- &lt;div class=&quot;byline&quot;&gt;&#10; &#10; &lt;/div&gt;&#10; &#10; &lt;/div&gt;&#10;
- &#10;&lt;/div&gt;&#10;&#10;&#10;&#10;&#10;&lt;/div&gt;&#10;&#10;&#10;&#10; &lt;div
- class=&quot;js-machine-translated-tweet-container&quot;&gt;&lt;/div&gt;&#10; &lt;div
- class=&quot;js-tweet-stats-container tweet-stats-container &quot;&gt;&#10; &lt;/div&gt;&#10;&#10;
- &lt;div class=&quot;client-and-actions&quot;&gt;&#10; &lt;span class=&quot;metadata&quot;&gt;&#10;
- &lt;span&gt;5:06 PM - 4 Feb 2015&lt;/span&gt;&#10;&#10; &amp;middot; &lt;a
- class=&quot;permalink-link js-permalink js-nav&quot; href=&quot;/Jalopnik/status/563005573290287105
- &quot;tabindex=&quot;-1&quot;&gt;Details&lt;/a&gt;&#10; &#10;&#10; &#10; &#10;
- &#10;&#10; &lt;/span&gt;&#10;&lt;/div&gt;&#10;&#10;&#10;&lt;/div&gt;&#10;" data-you-follow="false"
- data-you-block="false">
- <div class="context">
- </div>
- <div class="content">
- <div class="stream-item-header">
- <a class="account-group js-account-group js-action-profile js-user-profile-link js-nav"
- href="/Jalopnik" data-user-id="3060631">
- <img class="avatar js-action-profile-avatar"
- src="https://pbs.twimg.com/profile_images/2976430168/5cd4a59_bigger.jpeg" alt="">
- <strong class="fullname js-action-profile-name show-popup-with-id" data-aria-label-part>
- Jalopnik
- </strong>
- <span>&rlm;</span>
- <span class="username js-action-profile-name" data-aria-label-part>
- <s>@</s><b>TitleName</b>
- </span>
- </a>
- <small class="time">
- <a href="/this.is.the.url"
- class="tweet-timestamp js-permalink js-nav js-tooltip" title="5:06 PM - 4 Feb 2015" >
- <span class="_timestamp js-short-timestamp js-relative-timestamp" data-time="1423065963"
- data-time-ms="1423065963000" data-long-form="true" aria-hidden="true">
- 17m
- </span>
- <span class="u-hiddenVisually" data-aria-label-part="last">17 minutes ago</span>
- </a>
- </small>
- </div>
- <p class="js-tweet-text tweet-text" lang="en" data-aria-label-part="0">
- This is the content étude à€
- <a href="http://t.co/nRWsqQAwBL" rel="nofollow" dir="ltr"
- data-expanded-url="http://jalo.ps/ReMENu4" class="twitter-timeline-link"
- target="_blank" title="http://jalo.ps/ReMENu4" >
- <span class="tco-ellipsis">
- </span>
- <span class="invisible">http://</span><span class="js-display-url">link.in.tweet</span>
- <span class="invisible"></span>
- <span class="tco-ellipsis">
- <span class="invisible">&nbsp;</span>
- </span>
- </a>
- <a href="http://t.co/rbFsfeE0l3" class="twitter-timeline-link u-hidden"
- data-pre-embedded="true" dir="ltr">
- pic.twitter.com/rbFsfeE0l3
- </a>
- </p>
- <div class="expanded-content js-tweet-details-dropdown">
- </div>
- <div class="stream-item-footer">
- <a class="details with-icn js-details" href="/Jalopnik/status/563005573290287105">
- <span class="Icon Icon--photo">
- </span>
- <b>
- <span class="expand-stream-item js-view-details">
- View photo
- </span>
- <span class="collapse-stream-item js-hide-details">
- Hide photo
- </span>
- </b>
- </a>
- <span class="ProfileTweet-action--reply u-hiddenVisually">
- <span class="ProfileTweet-actionCount" aria-hidden="true" data-tweet-stat-count="0">
- <span class="ProfileTweet-actionCountForAria" >0 replies</span>
- </span>
- </span>
- <span class="ProfileTweet-action--retweet u-hiddenVisually">
- <span class="ProfileTweet-actionCount" data-tweet-stat-count="8">
- <span class="ProfileTweet-actionCountForAria" data-aria-label-part>8 retweets</span>
- </span>
- </span>
- <span class="ProfileTweet-action--favorite u-hiddenVisually">
- <span class="ProfileTweet-actionCount" data-tweet-stat-count="14">
- <span class="ProfileTweet-actionCountForAria" data-aria-label-part>14 favorites</span>
- </span>
- </span>
- <div role="group" aria-label="Tweet actions" class="ProfileTweet-actionList u-cf js-actions">
- <div class="ProfileTweet-action ProfileTweet-action--reply">
- <button class="ProfileTweet-actionButton u-textUserColorHover js-actionButton
- js-actionReply" data-modal="ProfileTweet-reply" type="button" title="Reply">
- <span class="Icon Icon--reply">
- </span>
- <span class="u-hiddenVisually">Reply</span>
- <span class="ProfileTweet-actionCount u-textUserColorHover
- ProfileTweet-actionCount--isZero">
- <span class="ProfileTweet-actionCountForPresentation" aria-hidden="true">
- </span>
- </span>
- </button>
- </div>
- <div class="ProfileTweet-action ProfileTweet-action--retweet js-toggleState js-toggleRt">
- <button class="ProfileTweet-actionButton js-actionButton js-actionRetweet js-tooltip"
- title="Retweet" data-modal="ProfileTweet-retweet" type="button">
- <span class="Icon Icon--retweet">
- </span>
- <span class="u-hiddenVisually">Retweet</span>
- <span class="ProfileTweet-actionCount">
- <span class="ProfileTweet-actionCountForPresentation">8</span>
- </span>
- </button>
- <button class="ProfileTweet-actionButtonUndo js-actionButton js-actionRetweet"
- data-modal="ProfileTweet-retweet" title="Undo retweet" type="button">
- <span class="Icon Icon--retweet">
- </span>
- <span class="u-hiddenVisually">Retweeted</span>
- <span class="ProfileTweet-actionCount">
- <span class="ProfileTweet-actionCountForPresentation">8</span>
- </span>
- </button>
- </div>
- <div class="ProfileTweet-action ProfileTweet-action--favorite js-toggleState">
- <button class="ProfileTweet-actionButton js-actionButton js-actionFavorite js-tooltip"
- title="Favorite" type="button">
- <span class="Icon Icon--favorite">
- </span>
- <span class="u-hiddenVisually">Favorite</span>
- <span class="ProfileTweet-actionCount">
- <span class="ProfileTweet-actionCountForPresentation">14</span>
- </span>
- </button>
- <button class="ProfileTweet-actionButtonUndo u-linkClean js-actionButton
- js-actionFavorite" title="Undo favorite" type="button">
- <span class="Icon Icon--favorite">
- </span>
- <span class="u-hiddenVisually">Favorited</span>
- <span class="ProfileTweet-actionCount">
- <span class="ProfileTweet-actionCountForPresentation">
- 14
- </span>
- </span>
- </button>
- </div>
- <div class="ProfileTweet-action ProfileTweet-action--more js-more-ProfileTweet-actions">
- <div class="dropdown">
- <button class="ProfileTweet-actionButton u-textUserColorHover dropdown-toggle
- js-tooltip js-dropdown-toggle" type="button" title="More">
- <span class="Icon Icon--dots">
- </span>
- <span class="u-hiddenVisually">More</span>
- </button>
- <div class="dropdown-menu">
- <div class="dropdown-caret">
- <div class="caret-outer">
- </div>
- <div class="caret-inner">
- </div>
- </div>
- <ul>
- <li class="share-via-dm js-actionShareViaDM" data-nav="share_tweet_dm">
- <button type="button" class="dropdown-link">
- Share via Direct Message
- </button>
- </li>
- <li class="embed-link js-actionEmbedTweet" data-nav="embed_tweet">
- <button type="button" class="dropdown-link">
- Embed Tweet
- </button>
- </li>
- <li class="mute-user-item pretty-link">
- <button type="button" class="dropdown-link">
- Mute
- </button>
- </li>
- <li class="unmute-user-item pretty-link">
- <button type="button" class="dropdown-link">
- Unmute
- </button>
- </li>
- <li class="block-or-report-link js-actionBlockOrReport"
- data-nav="block_or_report">
- <button type="button" class="dropdown-link">
- Block or report
- </button>
- </li>
- </ul>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- </li>
- """
- response = mock.Mock(text=html)
- results = twitter.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], '@TitleName')
- self.assertEqual(results[0]['url'], 'https://twitter.com/this.is.the.url')
- self.assertIn(u'This is the content', results[0]['content'])
-
- html = """
- <li class="b_algo" u="0|5109|4755453613245655|UAGjXgIrPH5yh-o5oNHRx_3Zta87f_QO">
- <div Class="sa_mc">
- <div class="sb_tlst">
- <h2>
- <a href="http://this.should.be.the.link/" h="ID=SERP,5124.1">
- <strong>This</strong> should be the title</a>
- </h2>
- </div>
- <div class="sb_meta">
- <cite>
- <strong>this</strong>.meta.com</cite>
- <span class="c_tlbxTrg">
- <span class="c_tlbxH" H="BASE:CACHEDPAGEDEFAULT" K="SERP,5125.1">
- </span>
- </span>
- </div>
- <p>
- <strong>This</strong> should be the content.</p>
- </div>
- </li>
- """
- response = mock.Mock(text=html)
- results = twitter.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
diff --git a/tests/unit/engines/test_unsplash.py b/tests/unit/engines/test_unsplash.py
deleted file mode 100644
index 4501de906..000000000
--- a/tests/unit/engines/test_unsplash.py
+++ /dev/null
@@ -1,38 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.testing import SearxTestCase
-from searx.engines import unsplash
-
-
-class TestUnsplashEngine(SearxTestCase):
- def test_request(self):
- query = 'penguin'
- _dict = defaultdict(dict)
- _dict['pageno'] = 1
- params = unsplash.request(query, _dict)
-
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
-
- def test_response(self):
- resp = mock.Mock(text='{}')
- result = unsplash.response(resp)
- self.assertEqual([], result)
-
- resp.text = '{"results": []}'
- result = unsplash.response(resp)
- self.assertEqual([], result)
-
- # Sourced from https://unsplash.com/napi/search/photos?query=penguin&xp=&per_page=20&page=2
- with open('./tests/unit/engines/unsplash_fixture.json') as fixture:
- resp.text = fixture.read()
-
- result = unsplash.response(resp)
- self.assertEqual(len(result), 2)
- self.assertEqual(result[0]['title'], 'low angle photography of swimming penguin')
- self.assertEqual(result[0]['url'], 'https://unsplash.com/photos/FY8d721UO_4')
- self.assertEqual(result[0]['thumbnail_src'], 'https://images.unsplash.com/photo-1523557148507-1b77641c7e7c?ixlib=rb-0.3.5&q=80\
-&fm=jpg&crop=entropy&cs=tinysrgb&w=200&fit=max')
- self.assertEqual(result[0]['img_src'], 'https://images.unsplash.com/photo-1523557148507-1b77641c7e7c\
-?ixlib=rb-0.3.5')
- self.assertEqual(result[0]['content'], '')
diff --git a/tests/unit/engines/test_vimeo.py b/tests/unit/engines/test_vimeo.py
deleted file mode 100644
index c86b50a14..000000000
--- a/tests/unit/engines/test_vimeo.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import vimeo
-from searx.testing import SearxTestCase
-
-
-class TestVimeoEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 0
- params = vimeo.request(query, dicto)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('vimeo.com' in params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, vimeo.response, None)
- self.assertRaises(AttributeError, vimeo.response, [])
- self.assertRaises(AttributeError, vimeo.response, '')
- self.assertRaises(AttributeError, vimeo.response, '[]')
-
- json = u"""
-{"filtered":{"total":274641,"page":1,"per_page":18,"paging":{"next":"?sizes=590x332&page=2","previous":null,"first":"?sizes=590x332&page=1","last":"?sizes=590x332&page=15258"},"data":[{"is_staffpick":false,"is_featured":true,"type":"clip","clip":{"uri":"\\/videos\\/106557563","name":"Hot Rod Revue: The South","link":"https:\\/\\/vimeo.com\\/106557563","duration":4069,"created_time":"2014-09-19T03:38:04+00:00","privacy":{"view":"ptv"},"pictures":{"sizes":[{"width":"590","height":"332","link":"https:\\/\\/i.vimeocdn.com\\/video\\/489717884_590x332.jpg?r=pad","link_with_play_button":"https:\\/\\/i.vimeocdn.com\\/filter\\/overlay?src0=https%3A%2F%2Fi.vimeocdn.com%2Fvideo%2F489717884_590x332.jpg&src1=http%3A%2F%2Ff.vimeocdn.com%2Fp%2Fimages%2Fcrawler_play.png"}]},"stats":{"plays":null},"metadata":{"connections":{"comments":{"total":0},"likes":{"total":5}},"interactions":[]},"user":{"name":"Cal Thorley","link":"https:\\/\\/vimeo.com\\/calthorley","pictures":{"sizes":[{"width":30,"height":30,"link":"https:\\/\\/i.vimeocdn.com\\/portrait\\/2545308_30x30?r=pad"},{"width":75,"height":75,"link":"https:\\/\\/i.vimeocdn.com\\/portrait\\/2545308_75x75?r=pad"},{"width":100,"height":100,"link":"https:\\/\\/i.vimeocdn.com\\/portrait\\/2545308_100x100?r=pad"},{"width":300,"height":300,"link":"https:\\/\\/i.vimeocdn.com\\/portrait\\/2545308_300x300?r=pad"}]}}}}]}};
-
-""" # noqa
- response = mock.Mock(text=json)
- results = vimeo.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], u'Hot Rod Revue: The South')
- self.assertEqual(results[0]['url'], 'https://vimeo.com/106557563')
- self.assertEqual(results[0]['content'], '')
- self.assertEqual(results[0]['thumbnail'], 'https://i.vimeocdn.com/video/489717884_590x332.jpg?r=pad')
diff --git a/tests/unit/engines/test_wikidata.py b/tests/unit/engines/test_wikidata.py
deleted file mode 100644
index 48be17bb4..000000000
--- a/tests/unit/engines/test_wikidata.py
+++ /dev/null
@@ -1,514 +0,0 @@
-# -*- coding: utf-8 -*-
-from lxml.html import fromstring
-from lxml import etree
-from collections import defaultdict
-import mock
-from searx.engines import wikidata
-from searx.testing import SearxTestCase
-
-
-class TestWikidataEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['language'] = 'all'
- params = wikidata.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('wikidata.org', params['url'])
-
- dicto['language'] = 'es_ES'
- params = wikidata.request(query, dicto)
- self.assertIn(query, params['url'])
-
- # successful cases are not tested here to avoid sending additional requests
- def test_response(self):
- self.assertRaises(AttributeError, wikidata.response, None)
- self.assertRaises(AttributeError, wikidata.response, [])
- self.assertRaises(AttributeError, wikidata.response, '')
- self.assertRaises(AttributeError, wikidata.response, '[]')
-
- wikidata.supported_languages = ['en', 'es']
- wikidata.language_aliases = {}
- response = mock.Mock(content='<html></html>'.encode("utf-8"), search_params={"language": "en"})
- self.assertEqual(wikidata.response(response), [])
-
- def test_getDetail(self):
- response = {}
- results = wikidata.getDetail(response, "Q123", "en", "en-US", etree.HTMLParser())
- self.assertEqual(results, [])
-
- title_html = '<div><div class="wikibase-title-label">Test</div></div>'
- html = """
- <div>
- <div class="wikibase-entitytermsview-heading-description">
- </div>
- <div>
- <ul class="wikibase-sitelinklistview-listview">
- <li data-wb-siteid="enwiki"><a href="http://en.wikipedia.org/wiki/Test">Test</a></li>
- </ul>
- </div>
- </div>
- """
- response = {"parse": {"displaytitle": title_html, "text": html}}
-
- results = wikidata.getDetail(response, "Q123", "en", "en-US", etree.HTMLParser())
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['url'], 'https://en.wikipedia.org/wiki/Test')
-
- title_html = """
- <div>
- <div class="wikibase-title-label">
- <span lang="en">Test</span>
- <sup class="wb-language-fallback-indicator">English</sup>
- </div>
- </div>
- """
- html = """
- <div>
- <div class="wikibase-entitytermsview-heading-description">
- <span lang="en">Description</span>
- <sup class="wb-language-fallback-indicator">English</sup>
- </div>
- <div id="P856">
- <div class="wikibase-statementgroupview-property-label">
- <a href="/wiki/Property:P856">
- <span lang="en">official website</span>
- <sup class="wb-language-fallback-indicator">English</sup>
- </a>
- </div>
- <div class="wikibase-statementview-mainsnak">
- <a class="external free" href="https://officialsite.com">
- https://officialsite.com
- </a>
- </div>
- </div>
- <div>
- <ul class="wikibase-sitelinklistview-listview">
- <li data-wb-siteid="enwiki"><a href="http://en.wikipedia.org/wiki/Test">Test</a></li>
- </ul>
- </div>
- </div>
- """
- response = {"parse": {"displaytitle": title_html, "text": html}}
-
- results = wikidata.getDetail(response, "Q123", "yua", "yua_MX", etree.HTMLParser())
- self.assertEqual(len(results), 2)
- self.assertEqual(results[0]['title'], 'Official website')
- self.assertEqual(results[0]['url'], 'https://officialsite.com')
-
- self.assertEqual(results[1]['infobox'], 'Test')
- self.assertEqual(results[1]['id'], None)
- self.assertEqual(results[1]['content'], 'Description')
- self.assertEqual(results[1]['attributes'], [])
- self.assertEqual(results[1]['urls'][0]['title'], 'Official website')
- self.assertEqual(results[1]['urls'][0]['url'], 'https://officialsite.com')
- self.assertEqual(results[1]['urls'][1]['title'], 'Wikipedia (en)')
- self.assertEqual(results[1]['urls'][1]['url'], 'https://en.wikipedia.org/wiki/Test')
-
- def test_add_image(self):
- image_src = wikidata.add_image(fromstring("<div></div>"))
- self.assertEqual(image_src, None)
-
- html = u"""
- <div>
- <div id="P18">
- <div class="wikibase-statementgroupview-property-label">
- <a href="/wiki/Property:P18">
- image
- </a>
- </div>
- <div class="wikibase-statementlistview">
- <div class="wikibase-statementview listview-item">
- <div class="wikibase-statementview-rankselector">
- <span class="wikibase-rankselector-normal"></span>
- </div>
- <div class="wikibase-statementview-mainsnak">
- <div>
- <div class="wikibase-snakview-value">
- <div class="commons-media-caption">
- <a href="https://commons.wikimedia.org/wiki/File:image.png">image.png</a>
- <br/>2,687 &#215; 3,356; 1.22 MB
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- """
- html_etree = fromstring(html)
- id_cache = wikidata.get_id_cache(html_etree)
- image_src = wikidata.add_image(id_cache)
- self.assertEqual(image_src,
- "https://commons.wikimedia.org/wiki/Special:FilePath/image.png?width=500&height=400")
-
- html = u"""
- <div>
- <div id="P2910">
- <div class="wikibase-statementgroupview-property-label">
- <a href="/wiki/Property:P2910">
- icon
- </a>
- </div>
- <div class="wikibase-statementlistview">
- <div class="wikibase-statementview listview-item">
- <div class="wikibase-statementview-rankselector">
- <span class="wikibase-rankselector-normal"></span>
- </div>
- <div class="wikibase-statementview-mainsnak">
- <div>
- <div class="wikibase-snakview-value">
- <div class="commons-media-caption">
- <a href="https://commons.wikimedia.org/wiki/File:icon.png">icon.png</a>
- <br/>671 &#215; 671; 18 KB</div>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- <div id="P154">
- <div class="wikibase-statementgroupview-property-label">
- <a href="/wiki/Property:P154">
- logo
- </a>
- </div>
- <div class="wikibase-statementlistview">
- <div class="wikibase-statementview listview-item">
- <div class="wikibase-statementview-rankselector">
- <span class="wikibase-rankselector-normal"></span>
- </div>
- <div class="wikibase-statementview-mainsnak">
- <div>
- <div class="wikibase-snakview-value">
- <div class="commons-media-caption">
- <a href="https://commons.wikimedia.org/wiki/File:logo.png">logo.png</a>
- <br/>170 &#215; 170; 1 KB
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- """
- html_etree = fromstring(html)
- id_cache = wikidata.get_id_cache(html_etree)
-
- image_src = wikidata.add_image(id_cache)
- self.assertEqual(image_src,
- "https://commons.wikimedia.org/wiki/Special:FilePath/logo.png?width=500&height=400")
-
- def test_add_attribute(self):
- html = u"""
- <div>
- <div id="P27">
- <div class="wikibase-statementgroupview-property-label">
- <a href="/wiki/Property:P27">
- country of citizenship
- </a>
- </div>
- <div class="wikibase-statementlistview">
- <div class="wikibase-statementview listview-item">
- <div class="wikibase-statementview-rankselector">
- <span class="wikibase-rankselector-normal"></span>
- </div>
- <div class="wikibase-statementview-mainsnak">
- <div>
- <div class="wikibase-snakview-value">
- <a href="/wiki/Q145">
- United Kingdom
- </a>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- """
- attributes = []
- html_etree = fromstring(html)
- id_cache = wikidata.get_id_cache(html_etree)
-
- wikidata.add_attribute(attributes, id_cache, "Fail")
- self.assertEqual(attributes, [])
-
- wikidata.add_attribute(attributes, id_cache, "P27")
- self.assertEqual(len(attributes), 1)
- self.assertEqual(attributes[0]["label"], "Country of citizenship")
- self.assertEqual(attributes[0]["value"], "United Kingdom")
-
- html = u"""
- <div>
- <div id="P569">
- <div class="wikibase-statementgroupview-property-label">
- <a href="/wiki/Property:P569">
- date of birth
- </a>
- </div>
- <div class="wikibase-statementlistview">
- <div class="wikibase-statementview listview-item">
- <div class="wikibase-statementview-rankselector">
- <span class="wikibase-rankselector-normal"></span>
- </div>
- <div class="wikibase-statementview-mainsnak">
- <div>
- <div class="wikibase-snakview-value">
- 27 January 1832
- <sup class="wb-calendar-name">
- Gregorian
- </sup>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- """
- attributes = []
- html_etree = fromstring(html)
- id_cache = wikidata.get_id_cache(html_etree)
- wikidata.add_attribute(attributes, id_cache, "P569", date=True)
- self.assertEqual(len(attributes), 1)
- self.assertEqual(attributes[0]["label"], "Date of birth")
- self.assertEqual(attributes[0]["value"], "27 January 1832")
-
- html = u"""
- <div>
- <div id="P6">
- <div class="wikibase-statementgroupview-property-label">
- <a href="/wiki/Property:P27">
- head of government
- </a>
- </div>
- <div class="wikibase-statementlistview">
- <div class="wikibase-statementview listview-item">
- <div class="wikibase-statementview-rankselector">
- <span class="wikibase-rankselector-normal"></span>
- </div>
- <div class="wikibase-statementview-mainsnak">
- <div>
- <div class="wikibase-snakview-value">
- <a href="/wiki/Q206">
- Old Prime Minister
- </a>
- </div>
- </div>
- </div>
- </div>
- <div class="wikibase-statementview listview-item">
- <div class="wikibase-statementview-rankselector">
- <span class="wikibase-rankselector-preferred"></span>
- </div>
- <div class="wikibase-statementview-mainsnak">
- <div>
- <div class="wikibase-snakview-value">
- <a href="/wiki/Q3099714">
- Actual Prime Minister
- </a>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- """
- attributes = []
- html_etree = fromstring(html)
- id_cache = wikidata.get_id_cache(html_etree)
- wikidata.add_attribute(attributes, id_cache, "P6")
- self.assertEqual(len(attributes), 1)
- self.assertEqual(attributes[0]["label"], "Head of government")
- self.assertEqual(attributes[0]["value"], "Old Prime Minister, Actual Prime Minister")
-
- attributes = []
- html_etree = fromstring(html)
- id_cache = wikidata.get_id_cache(html_etree)
- wikidata.add_attribute(attributes, id_cache, "P6", trim=True)
- self.assertEqual(len(attributes), 1)
- self.assertEqual(attributes[0]["value"], "Actual Prime Minister")
-
- def test_add_url(self):
- html = u"""
- <div>
- <div id="P856">
- <div class="wikibase-statementgroupview-property-label">
- <a href="/wiki/Property:P856">
- official website
- </a>
- </div>
- <div class="wikibase-statementlistview">
- <div class="wikibase-statementview listview-item">
- <div class="wikibase-statementview-mainsnak">
- <div>
- <div class="wikibase-snakview-value">
- <a class="external free" href="https://searx.me">
- https://searx.me/
- </a>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- """
- urls = []
- html_etree = fromstring(html)
- id_cache = wikidata.get_id_cache(html_etree)
- wikidata.add_url(urls, html_etree, id_cache, 'P856')
- self.assertEquals(len(urls), 1)
- self.assertIn({'title': 'Official website', 'url': 'https://searx.me/'}, urls)
- urls = []
- results = []
- wikidata.add_url(urls, html_etree, id_cache, 'P856', 'custom label', results=results)
- self.assertEquals(len(urls), 1)
- self.assertEquals(len(results), 1)
- self.assertIn({'title': 'custom label', 'url': 'https://searx.me/'}, urls)
- self.assertIn({'title': 'custom label', 'url': 'https://searx.me/'}, results)
-
- html = u"""
- <div>
- <div id="P856">
- <div class="wikibase-statementgroupview-property-label">
- <a href="/wiki/Property:P856">
- official website
- </a>
- </div>
- <div class="wikibase-statementlistview">
- <div class="wikibase-statementview listview-item">
- <div class="wikibase-statementview-mainsnak">
- <div>
- <div class="wikibase-snakview-value">
- <a class="external free" href="http://www.worldofwarcraft.com">
- http://www.worldofwarcraft.com
- </a>
- </div>
- </div>
- </div>
- </div>
- <div class="wikibase-statementview listview-item">
- <div class="wikibase-statementview-mainsnak">
- <div>
- <div class="wikibase-snakview-value">
- <a class="external free" href="http://eu.battle.net/wow/en/">
- http://eu.battle.net/wow/en/
- </a>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- """
- urls = []
- html_etree = fromstring(html)
- id_cache = wikidata.get_id_cache(html_etree)
- wikidata.add_url(urls, html_etree, id_cache, 'P856')
- self.assertEquals(len(urls), 2)
- self.assertIn({'title': 'Official website', 'url': 'http://www.worldofwarcraft.com'}, urls)
- self.assertIn({'title': 'Official website', 'url': 'http://eu.battle.net/wow/en/'}, urls)
-
- def test_get_imdblink(self):
- html = u"""
- <div>
- <div class="wikibase-statementview-mainsnak">
- <div>
- <div class="wikibase-snakview-value">
- <a class="wb-external-id" href="http://www.imdb.com/tt0433664">
- tt0433664
- </a>
- </div>
- </div>
- </div>
- </div>
- """
- html_etree = fromstring(html)
- imdblink = wikidata.get_imdblink(html_etree, 'https://www.imdb.com/')
-
- html = u"""
- <div>
- <div class="wikibase-statementview-mainsnak">
- <div>
- <div class="wikibase-snakview-value">
- <a class="wb-external-id"
- href="href="http://tools.wmflabs.org/...http://www.imdb.com/&id=nm4915994"">
- nm4915994
- </a>
- </div>
- </div>
- </div>
- </div>
- """
- html_etree = fromstring(html)
- imdblink = wikidata.get_imdblink(html_etree, 'https://www.imdb.com/')
- self.assertIn('https://www.imdb.com/name/nm4915994', imdblink)
-
- def test_get_geolink(self):
- html = u"""
- <div>
- <div class="wikibase-statementview-mainsnak">
- <div>
- <div class="wikibase-snakview-value">
- 60°N, 40°E
- </div>
- </div>
- </div>
- </div>
- """
- html_etree = fromstring(html)
- geolink = wikidata.get_geolink(html_etree)
- self.assertIn('https://www.openstreetmap.org/', geolink)
- self.assertIn('lat=60&lon=40', geolink)
-
- html = u"""
- <div>
- <div class="wikibase-statementview-mainsnak">
- <div>
- <div class="wikibase-snakview-value">
- 34°35'59"S, 58°22'55"W
- </div>
- </div>
- </div>
- </div>
- """
- html_etree = fromstring(html)
- geolink = wikidata.get_geolink(html_etree)
- self.assertIn('https://www.openstreetmap.org/', geolink)
- self.assertIn('lat=-34.59', geolink)
- self.assertIn('lon=-58.38', geolink)
-
- def test_get_wikilink(self):
- html = """
- <div>
- <div>
- <ul class="wikibase-sitelinklistview-listview">
- <li data-wb-siteid="arwiki"><a href="http://ar.wikipedia.org/wiki/Test">Test</a></li>
- <li data-wb-siteid="enwiki"><a href="http://en.wikipedia.org/wiki/Test">Test</a></li>
- </ul>
- </div>
- <div>
- <ul class="wikibase-sitelinklistview-listview">
- <li data-wb-siteid="enwikiquote"><a href="https://en.wikiquote.org/wiki/Test">Test</a></li>
- </ul>
- </div>
- </div>
- """
- html_etree = fromstring(html)
- wikilink = wikidata.get_wikilink(html_etree, 'nowiki')
- self.assertEqual(wikilink, None)
- wikilink = wikidata.get_wikilink(html_etree, 'enwiki')
- self.assertEqual(wikilink, 'https://en.wikipedia.org/wiki/Test')
- wikilink = wikidata.get_wikilink(html_etree, 'arwiki')
- self.assertEqual(wikilink, 'https://ar.wikipedia.org/wiki/Test')
- wikilink = wikidata.get_wikilink(html_etree, 'enwikiquote')
- self.assertEqual(wikilink, 'https://en.wikiquote.org/wiki/Test')
diff --git a/tests/unit/engines/test_wikipedia.py b/tests/unit/engines/test_wikipedia.py
deleted file mode 100644
index 316b12bc5..000000000
--- a/tests/unit/engines/test_wikipedia.py
+++ /dev/null
@@ -1,263 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import wikipedia
-from searx.testing import SearxTestCase
-
-
-class TestWikipediaEngine(SearxTestCase):
-
- def test_request(self):
- wikipedia.supported_languages = ['fr', 'en', 'no']
- wikipedia.language_aliases = {'nb': 'no'}
-
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['language'] = 'fr-FR'
- params = wikipedia.request(query.encode('utf-8'), dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('test_query', params['url'])
- self.assertIn('Test_Query', params['url'])
- self.assertIn('fr.wikipedia.org', params['url'])
-
- query = u'Test_Query'
- params = wikipedia.request(query.encode('utf-8'), dicto)
- self.assertIn('Test_Query', params['url'])
- self.assertNotIn('test_query', params['url'])
-
- dicto['language'] = 'nb'
- params = wikipedia.request(query, dicto)
- self.assertIn('no.wikipedia.org', params['url'])
- dicto['language'] = 'all'
- params = wikipedia.request(query, dicto)
- self.assertIn('en', params['url'])
-
- dicto['language'] = 'xx'
- params = wikipedia.request(query, dicto)
- self.assertIn('en.wikipedia.org', params['url'])
-
- def test_response(self):
- dicto = defaultdict(dict)
- dicto['language'] = 'fr'
-
- self.assertRaises(AttributeError, wikipedia.response, None)
- self.assertRaises(AttributeError, wikipedia.response, [])
- self.assertRaises(AttributeError, wikipedia.response, '')
- self.assertRaises(AttributeError, wikipedia.response, '[]')
-
- # page not found
- json = """
- {
- "batchcomplete": "",
- "query": {
- "normalized": [],
- "pages": {
- "-1": {
- "ns": 0,
- "title": "",
- "missing": ""
- }
- }
- }
- }"""
- response = mock.Mock(text=json, search_params=dicto)
- self.assertEqual(wikipedia.response(response), [])
-
- # normal case
- json = """
- {
- "batchcomplete": "",
- "query": {
- "normalized": [],
- "pages": {
- "12345": {
- "pageid": 12345,
- "ns": 0,
- "title": "The Title",
- "extract": "The Title is...",
- "thumbnail": {
- "source": "img_src.jpg"
- },
- "pageimage": "img_name.jpg"
- }
- }
- }
- }"""
- response = mock.Mock(text=json, search_params=dicto)
- results = wikipedia.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
- self.assertEqual(results[0]['title'], u'The Title')
- self.assertIn('fr.wikipedia.org/wiki/The_Title', results[0]['url'])
- self.assertEqual(results[1]['infobox'], u'The Title')
- self.assertIn('fr.wikipedia.org/wiki/The_Title', results[1]['id'])
- self.assertIn('The Title is...', results[1]['content'])
- self.assertEqual(results[1]['img_src'], 'img_src.jpg')
-
- # disambiguation page
- json = """
- {
- "batchcomplete": "",
- "query": {
- "normalized": [],
- "pages": {
- "12345": {
- "pageid": 12345,
- "ns": 0,
- "title": "The Title",
- "extract": "The Title can be:\\nThe Title 1\\nThe Title 2\\nThe Title 3\\nThe Title 4......................................................................................................................................." """ # noqa
- json += """
- }
- }
- }
- }"""
- response = mock.Mock(text=json, search_params=dicto)
- results = wikipedia.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
-
- # no image
- json = """
- {
- "batchcomplete": "",
- "query": {
- "normalized": [],
- "pages": {
- "12345": {
- "pageid": 12345,
- "ns": 0,
- "title": "The Title",
- "extract": "The Title is......................................................................................................................................................................................." """ # noqa
- json += """
- }
- }
- }
- }"""
- response = mock.Mock(text=json, search_params=dicto)
- results = wikipedia.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
- self.assertIn('The Title is...', results[1]['content'])
- self.assertEqual(results[1]['img_src'], None)
-
- # title not in first paragraph
- json = u"""
- {
- "batchcomplete": "",
- "query": {
- "normalized": [],
- "pages": {
- "12345": {
- "pageid": 12345,
- "ns": 0,
- "title": "披頭四樂隊",
- "extract": "披头士乐队....................................................................................................................................................................................................\\n披頭四樂隊...", """ # noqa
- json += """
- "thumbnail": {
- "source": "img_src.jpg"
- },
- "pageimage": "img_name.jpg"
- }
- }
- }
- }"""
- response = mock.Mock(text=json, search_params=dicto)
- results = wikipedia.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
- self.assertEqual(results[1]['infobox'], u'披頭四樂隊')
- self.assertIn(u'披头士乐队...', results[1]['content'])
-
- def test_fetch_supported_languages(self):
- html = u"""<html></html>"""
- response = mock.Mock(text=html)
- languages = wikipedia._fetch_supported_languages(response)
- self.assertEqual(type(languages), dict)
- self.assertEqual(len(languages), 0)
-
- html = u"""
- <html>
- <body>
- <div>
- <div>
- <h3>Table header</h3>
- <table class="sortable jquery-tablesorter">
- <thead>
- <tr>
- <th>N</th>
- <th>Language</th>
- <th>Language (local)</th>
- <th>Wiki</th>
- <th>Articles</th>
- </tr>
- </thead>
- <tbody>
- <tr>
- <td>2</td>
- <td><a>Swedish</a></td>
- <td><a>Svenska</a></td>
- <td><a>sv</a></td>
- <td><a><b>3000000</b></a></td>
- </tr>
- <tr>
- <td>3</td>
- <td><a>Cebuano</a></td>
- <td><a>Sinugboanong Binisaya</a></td>
- <td><a>ceb</a></td>
- <td><a><b>3000000</b></a></td>
- </tr>
- </tbody>
- </table>
- <h3>Table header</h3>
- <table class="sortable jquery-tablesorter">
- <thead>
- <tr>
- <th>N</th>
- <th>Language</th>
- <th>Language (local)</th>
- <th>Wiki</th>
- <th>Articles</th>
- </tr>
- </thead>
- <tbody>
- <tr>
- <td>2</td>
- <td><a>Norwegian (Bokmål)</a></td>
- <td><a>Norsk (Bokmål)</a></td>
- <td><a>no</a></td>
- <td><a><b>100000</b></a></td>
- </tr>
- </tbody>
- </table>
- </div>
- </div>
- </body>
- </html>
- """
- response = mock.Mock(text=html)
- languages = wikipedia._fetch_supported_languages(response)
- self.assertEqual(type(languages), dict)
- self.assertEqual(len(languages), 3)
-
- self.assertIn('sv', languages)
- self.assertIn('ceb', languages)
- self.assertIn('no', languages)
-
- self.assertEqual(type(languages['sv']), dict)
- self.assertEqual(type(languages['ceb']), dict)
- self.assertEqual(type(languages['no']), dict)
-
- self.assertIn('name', languages['sv'])
- self.assertIn('english_name', languages['sv'])
- self.assertIn('articles', languages['sv'])
-
- self.assertEqual(languages['sv']['name'], 'Svenska')
- self.assertEqual(languages['sv']['english_name'], 'Swedish')
- self.assertEqual(languages['sv']['articles'], 3000000)
- self.assertEqual(languages['ceb']['name'], 'Sinugboanong Binisaya')
- self.assertEqual(languages['ceb']['english_name'], 'Cebuano')
- self.assertEqual(languages['ceb']['articles'], 3000000)
- self.assertEqual(languages['no']['name'], u'Norsk (Bokmål)')
- self.assertEqual(languages['no']['english_name'], u'Norwegian (Bokmål)')
- self.assertEqual(languages['no']['articles'], 100000)
diff --git a/tests/unit/engines/test_wolframalpha_api.py b/tests/unit/engines/test_wolframalpha_api.py
deleted file mode 100644
index 0433b34aa..000000000
--- a/tests/unit/engines/test_wolframalpha_api.py
+++ /dev/null
@@ -1,166 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from requests import Request
-from searx.engines import wolframalpha_api
-from searx.testing import SearxTestCase
-
-
-class TestWolframAlphaAPIEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- params = wolframalpha_api.request(query, dicto)
-
- # TODO: test api_key
- self.assertIn('url', params)
- self.assertIn('https://api.wolframalpha.com/v2/query?', params['url'])
- self.assertIn(query, params['url'])
- self.assertEqual('https://www.wolframalpha.com/input/?i=test_query', params['headers']['Referer'])
-
- def test_replace_pua_chars(self):
- self.assertEqual('i', wolframalpha_api.replace_pua_chars(u'\uf74e'))
-
- def test_response(self):
- self.assertRaises(AttributeError, wolframalpha_api.response, None)
- self.assertRaises(AttributeError, wolframalpha_api.response, [])
- self.assertRaises(AttributeError, wolframalpha_api.response, '')
- self.assertRaises(AttributeError, wolframalpha_api.response, '[]')
-
- referer_url = 'referer_url'
- request = Request(headers={'Referer': referer_url})
-
- # test failure
- xml = '''<?xml version='1.0' encoding='UTF-8'?>
- <queryresult success='false' error='false' />
- '''
- response = mock.Mock(content=xml.encode('utf-8'))
- self.assertEqual(wolframalpha_api.response(response), [])
-
- # test basic case
- xml = b"""<?xml version='1.0' encoding='UTF-8'?>
- <queryresult success='true'
- error='false'
- numpods='3'
- datatypes='Math'
- id='queryresult_id'
- host='http://www4c.wolframalpha.com'
- related='related_url'
- version='2.6'>
- <pod title='Input'
- scanner='Identity'
- id='Input'
- numsubpods='1'>
- <subpod title=''>
- <img src='input_img_src.gif'
- alt='input_img_alt'
- title='input_img_title' />
- <plaintext>input_plaintext</plaintext>
- </subpod>
- </pod>
- <pod title='Result'
- scanner='Simplification'
- id='Result'
- numsubpods='1'
- primary='true'>
- <subpod title=''>
- <img src='result_img_src.gif'
- alt='result_img_alt'
- title='result_img_title' />
- <plaintext>result_plaintext</plaintext>
- </subpod>
- </pod>
- <pod title='Manipulatives illustration'
- scanner='Arithmetic'
- id='Illustration'
- numsubpods='1'>
- <subpod title=''>
- <img src='illustration_img_src.gif'
- alt='illustration_img_alt' />
- <plaintext>illustration_plaintext</plaintext>
- </subpod>
- </pod>
- </queryresult>
- """
- response = mock.Mock(content=xml, request=request)
- results = wolframalpha_api.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
- self.assertEqual('input_plaintext', results[0]['infobox'])
-
- self.assertEqual(len(results[0]['attributes']), 3)
- self.assertEqual('Input', results[0]['attributes'][0]['label'])
- self.assertEqual('input_plaintext', results[0]['attributes'][0]['value'])
- self.assertEqual('Result', results[0]['attributes'][1]['label'])
- self.assertEqual('result_plaintext', results[0]['attributes'][1]['value'])
- self.assertEqual('Manipulatives illustration', results[0]['attributes'][2]['label'])
- self.assertEqual('illustration_img_src.gif', results[0]['attributes'][2]['image']['src'])
- self.assertEqual('illustration_img_alt', results[0]['attributes'][2]['image']['alt'])
-
- self.assertEqual(len(results[0]['urls']), 1)
-
- self.assertEqual(referer_url, results[0]['urls'][0]['url'])
- self.assertEqual('Wolfram|Alpha', results[0]['urls'][0]['title'])
- self.assertEqual(referer_url, results[1]['url'])
- self.assertEqual('Wolfram|Alpha (input_plaintext)', results[1]['title'])
- self.assertIn('result_plaintext', results[1]['content'])
-
- # test calc
- xml = b"""<?xml version='1.0' encoding='UTF-8'?>
- <queryresult success='true'
- error='false'
- numpods='2'
- datatypes=''
- parsetimedout='false'
- id='queryresult_id'
- host='http://www5b.wolframalpha.com'
- related='related_url'
- version='2.6' >
- <pod title='Indefinite integral'
- scanner='Integral'
- id='IndefiniteIntegral'
- error='false'
- numsubpods='1'
- primary='true'>
- <subpod title=''>
- <img src='integral_image.gif'
- alt='integral_img_alt'
- title='integral_img_title' />
- <plaintext>integral_plaintext</plaintext>
- </subpod>
- </pod>
- <pod title='Plot of the integral'
- scanner='Integral'
- id='Plot'
- error='false'
- numsubpods='1'>
- <subpod title=''>
- <img src='plot.gif'
- alt='plot_alt'
- title='' />
- <plaintext></plaintext>
- </subpod>
- </pod>
- </queryresult>
- """
- response = mock.Mock(content=xml, request=request)
- results = wolframalpha_api.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
- self.assertEqual('integral_plaintext', results[0]['infobox'])
-
- self.assertEqual(len(results[0]['attributes']), 2)
- self.assertEqual('Indefinite integral', results[0]['attributes'][0]['label'])
- self.assertEqual('integral_plaintext', results[0]['attributes'][0]['value'])
- self.assertEqual('Plot of the integral', results[0]['attributes'][1]['label'])
- self.assertEqual('plot.gif', results[0]['attributes'][1]['image']['src'])
- self.assertEqual('plot_alt', results[0]['attributes'][1]['image']['alt'])
-
- self.assertEqual(len(results[0]['urls']), 1)
-
- self.assertEqual(referer_url, results[0]['urls'][0]['url'])
- self.assertEqual('Wolfram|Alpha', results[0]['urls'][0]['title'])
- self.assertEqual(referer_url, results[1]['url'])
- self.assertEqual('Wolfram|Alpha (integral_plaintext)', results[1]['title'])
- self.assertIn('integral_plaintext', results[1]['content'])
diff --git a/tests/unit/engines/test_wolframalpha_noapi.py b/tests/unit/engines/test_wolframalpha_noapi.py
deleted file mode 100644
index 982edd9f2..000000000
--- a/tests/unit/engines/test_wolframalpha_noapi.py
+++ /dev/null
@@ -1,224 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from requests import Request
-from searx.engines import wolframalpha_noapi
-from searx.testing import SearxTestCase
-
-
-class TestWolframAlphaNoAPIEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- params = wolframalpha_noapi.request(query, dicto)
-
- self.assertIn('url', params)
- self.assertIn('https://www.wolframalpha.com/input/json.jsp', params['url'])
- self.assertIn(query, params['url'])
- self.assertEqual('https://www.wolframalpha.com/input/?i=test_query', params['headers']['Referer'])
-
- def test_response(self):
- self.assertRaises(AttributeError, wolframalpha_noapi.response, None)
- self.assertRaises(AttributeError, wolframalpha_noapi.response, [])
- self.assertRaises(AttributeError, wolframalpha_noapi.response, '')
- self.assertRaises(AttributeError, wolframalpha_noapi.response, '[]')
-
- referer_url = 'referer_url'
- request = Request(headers={'Referer': referer_url})
-
- # test failure
- json = r'''
- {"queryresult" : {
- "success" : false,
- "error" : false,
- "numpods" : 0,
- "id" : "",
- "host" : "https:\/\/www5a.wolframalpha.com",
- "didyoumeans" : {}
- }}
- '''
- response = mock.Mock(text=json, request=request)
- self.assertEqual(wolframalpha_noapi.response(response), [])
-
- # test basic case
- json = r'''
- {"queryresult" : {
- "success" : true,
- "error" : false,
- "numpods" : 6,
- "datatypes" : "Math",
- "id" : "queryresult_id",
- "host" : "https:\/\/www5b.wolframalpha.com",
- "related" : "related_url",
- "version" : "2.6",
- "pods" : [
- {
- "title" : "Input",
- "scanners" : [
- "Identity"
- ],
- "id" : "Input",
- "error" : false,
- "numsubpods" : 1,
- "subpods" : [
- {
- "title" : "",
- "img" : {
- "src" : "input_img_src.gif",
- "alt" : "input_img_alt",
- "title" : "input_img_title"
- },
- "plaintext" : "input_plaintext",
- "minput" : "input_minput"
- }
- ]
- },
- {
- "title" : "Result",
- "scanners" : [
- "Simplification"
- ],
- "id" : "Result",
- "error" : false,
- "numsubpods" : 1,
- "primary" : true,
- "subpods" : [
- {
- "title" : "",
- "img" : {
- "src" : "result_img_src.gif",
- "alt" : "result_img_alt",
- "title" : "result_img_title"
- },
- "plaintext" : "result_plaintext",
- "moutput" : "result_moutput"
- }
- ]
- },
- {
- "title" : "Manipulatives illustration",
- "scanners" : [
- "Arithmetic"
- ],
- "id" : "Illustration",
- "error" : false,
- "numsubpods" : 1,
- "subpods" : [
- {
- "title" : "",
- "CDFcontent" : "Resizeable",
- "img" : {
- "src" : "illustration_img_src.gif",
- "alt" : "illustration_img_alt",
- "title" : "illustration_img_title"
- },
- "plaintext" : "illustration_img_plaintext"
- }
- ]
- }
- ]
- }}
- '''
- response = mock.Mock(text=json, request=request)
- results = wolframalpha_noapi.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
- self.assertEqual('input_plaintext', results[0]['infobox'])
-
- self.assertEqual(len(results[0]['attributes']), 3)
- self.assertEqual('Input', results[0]['attributes'][0]['label'])
- self.assertEqual('input_plaintext', results[0]['attributes'][0]['value'])
- self.assertEqual('Result', results[0]['attributes'][1]['label'])
- self.assertEqual('result_plaintext', results[0]['attributes'][1]['value'])
- self.assertEqual('Manipulatives illustration', results[0]['attributes'][2]['label'])
- self.assertEqual('illustration_img_src.gif', results[0]['attributes'][2]['image']['src'])
- self.assertEqual('illustration_img_alt', results[0]['attributes'][2]['image']['alt'])
-
- self.assertEqual(len(results[0]['urls']), 1)
-
- self.assertEqual(referer_url, results[0]['urls'][0]['url'])
- self.assertEqual('Wolfram|Alpha', results[0]['urls'][0]['title'])
- self.assertEqual(referer_url, results[1]['url'])
- self.assertEqual('Wolfram|Alpha (input_plaintext)', results[1]['title'])
- self.assertIn('result_plaintext', results[1]['content'])
-
- # test calc
- json = r"""
- {"queryresult" : {
- "success" : true,
- "error" : false,
- "numpods" : 2,
- "datatypes" : "",
- "id" : "queryresult_id",
- "host" : "https:\/\/www4b.wolframalpha.com",
- "related" : "related_url",
- "version" : "2.6",
- "pods" : [
- {
- "title" : "Indefinite integral",
- "scanners" : [
- "Integral"
- ],
- "id" : "IndefiniteIntegral",
- "error" : false,
- "numsubpods" : 1,
- "primary" : true,
- "subpods" : [
- {
- "title" : "",
- "img" : {
- "src" : "integral_img_src.gif",
- "alt" : "integral_img_alt",
- "title" : "integral_img_title"
- },
- "plaintext" : "integral_plaintext",
- "minput" : "integral_minput",
- "moutput" : "integral_moutput"
- }
- ]
- },
- {
- "title" : "Plot of the integral",
- "scanners" : [
- "Integral"
- ],
- "id" : "Plot",
- "error" : false,
- "numsubpods" : 1,
- "subpods" : [
- {
- "title" : "",
- "img" : {
- "src" : "plot.gif",
- "alt" : "plot_alt",
- "title" : "plot_title"
- },
- "plaintext" : "",
- "minput" : "plot_minput"
- }
- ]
- }
- ]
- }}
- """
- response = mock.Mock(text=json, request=request)
- results = wolframalpha_noapi.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
- self.assertEqual('integral_plaintext', results[0]['infobox'])
-
- self.assertEqual(len(results[0]['attributes']), 2)
- self.assertEqual('Indefinite integral', results[0]['attributes'][0]['label'])
- self.assertEqual('integral_plaintext', results[0]['attributes'][0]['value'])
- self.assertEqual('Plot of the integral', results[0]['attributes'][1]['label'])
- self.assertEqual('plot.gif', results[0]['attributes'][1]['image']['src'])
- self.assertEqual('plot_alt', results[0]['attributes'][1]['image']['alt'])
-
- self.assertEqual(len(results[0]['urls']), 1)
-
- self.assertEqual(referer_url, results[0]['urls'][0]['url'])
- self.assertEqual('Wolfram|Alpha', results[0]['urls'][0]['title'])
- self.assertEqual(referer_url, results[1]['url'])
- self.assertEqual('Wolfram|Alpha (integral_plaintext)', results[1]['title'])
- self.assertIn('integral_plaintext', results[1]['content'])
diff --git a/tests/unit/engines/test_www1x.py b/tests/unit/engines/test_www1x.py
deleted file mode 100644
index 9df8de6bf..000000000
--- a/tests/unit/engines/test_www1x.py
+++ /dev/null
@@ -1,57 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import www1x
-from searx.testing import SearxTestCase
-
-
-class TestWww1xEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- params = www1x.request(query, defaultdict(dict))
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertTrue('1x.com' in params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, www1x.response, None)
- self.assertRaises(AttributeError, www1x.response, [])
- self.assertRaises(AttributeError, www1x.response, '')
- self.assertRaises(AttributeError, www1x.response, '[]')
-
- response = mock.Mock(text='<html></html>')
- self.assertEqual(www1x.response(response), [])
- html = """
- <?xml version="1.0" encoding="UTF-8"?><!DOCTYPE characters
- [
- <!ELEMENT characters (character*) >
- <!ELEMENT character (#PCDATA ) >
-
- <!ENTITY iexcl "&#161;" >
- <!ENTITY cent "&#162;" >
- <!ENTITY pound "&#163;" >
- ]
- ><root><searchresult><![CDATA[<table border="0" cellpadding="0" cellspacing="0" width="100%">
- <tr>
- <td style="min-width: 220px;" valign="top">
- <div style="font-size: 30px; margin: 0px 0px 20px 0px;">Photos</div>
- <div>
- <a href="/photo/123456" class="dynamiclink">
-<img border="0" class="searchresult" src="/images/user/testimage-123456.jpg" style="width: 125px; height: 120px;">
- </a>
- <a title="sjoerd lammers street photography" href="/member/sjoerdlammers" class="dynamiclink">
-<img border="0" class="searchresult" src="/images/profile/60c48b394c677d2fa4d9e7d263aabf44-square.jpg">
- </a>
- </div>
- </td>
- </table>
- ]]></searchresult></root>
- """
- response = mock.Mock(text=html)
- results = www1x.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['url'], 'https://1x.com/photo/123456')
- self.assertEqual(results[0]['thumbnail_src'], 'https://1x.com/images/user/testimage-123456.jpg')
- self.assertEqual(results[0]['content'], '')
- self.assertEqual(results[0]['template'], 'images.html')
diff --git a/tests/unit/engines/test_yacy.py b/tests/unit/engines/test_yacy.py
deleted file mode 100644
index f49532cf4..000000000
--- a/tests/unit/engines/test_yacy.py
+++ /dev/null
@@ -1,96 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import yacy
-from searx.testing import SearxTestCase
-
-
-class TestYacyEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- dicto['language'] = 'fr_FR'
- params = yacy.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('localhost', params['url'])
- self.assertIn('fr', params['url'])
-
- dicto['language'] = 'all'
- params = yacy.request(query, dicto)
- self.assertIn('url', params)
- self.assertNotIn('lr=lang_', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, yacy.response, None)
- self.assertRaises(AttributeError, yacy.response, [])
- self.assertRaises(AttributeError, yacy.response, '')
- self.assertRaises(AttributeError, yacy.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(yacy.response(response), [])
-
- response = mock.Mock(text='{"data": []}')
- self.assertEqual(yacy.response(response), [])
-
- json = """
- {
- "channels": [
- {
- "title": "YaCy P2P-Search for test",
- "description": "Search for test",
- "link": "http://search.yacy.de:7001/yacysearch.html?query=test&amp;resource=global&amp;contentdom=0",
- "image": {
- "url": "http://search.yacy.de:7001/env/grafics/yacy.png",
- "title": "Search for test",
- "link": "http://search.yacy.de:7001/yacysearch.html?query=test&amp;resource=global&amp;contentdom=0"
- },
- "totalResults": "249",
- "startIndex": "0",
- "itemsPerPage": "5",
- "searchTerms": "test",
- "items": [
- {
- "title": "This is the title",
- "link": "http://this.is.the.url",
- "code": "",
- "description": "This should be the content",
- "pubDate": "Sat, 08 Jun 2013 02:00:00 +0200",
- "size": "44213",
- "sizename": "43 kbyte",
- "guid": "lzh_1T_5FP-A",
- "faviconCode": "XTS4uQ_5FP-A",
- "host": "www.gamestar.de",
- "path": "/spiele/city-of-heroes-freedom/47019.html",
- "file": "47019.html",
- "urlhash": "lzh_1T_5FP-A",
- "ranking": "0.20106804"
- },
- {
- "title": "This is the title2",
- "icon": "/ViewImage.png?maxwidth=96&amp;maxheight=96&amp;code=7EbAbW6BpPOA",
- "image": "http://image.url/image.png",
- "cache": "/ViewImage.png?quadratic=&amp;url=http://golem.ivwbox.de/cgi-bin/ivw/CP/G_INET?d=14071378",
- "url": "http://this.is.the.url",
- "urlhash": "7EbAbW6BpPOA",
- "host": "www.golem.de",
- "width": "-1",
- "height": "-1"
- }
- ]
- }
- ]
- }
- """
- response = mock.Mock(text=json)
- results = yacy.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
- self.assertEqual(results[0]['title'], 'This is the title')
- self.assertEqual(results[0]['url'], 'http://this.is.the.url')
- self.assertEqual(results[0]['content'], 'This should be the content')
- self.assertEqual(results[1]['img_src'], 'http://image.url/image.png')
- self.assertEqual(results[1]['content'], '')
- self.assertEqual(results[1]['url'], 'http://this.is.the.url')
- self.assertEqual(results[1]['title'], 'This is the title2')
diff --git a/tests/unit/engines/test_yahoo.py b/tests/unit/engines/test_yahoo.py
deleted file mode 100644
index e52c1109e..000000000
--- a/tests/unit/engines/test_yahoo.py
+++ /dev/null
@@ -1,190 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import yahoo
-from searx.testing import SearxTestCase
-
-
-class TestYahooEngine(SearxTestCase):
-
- def test_parse_url(self):
- test_url = 'http://r.search.yahoo.com/_ylt=A0LEb9JUSKcAEGRXNyoA;_ylu=X3oDMTEzZm1qazYwBHNlYwNzcgRwb3MDMQRjb' +\
- '2xvA2Jm2dGlkA1NNRTcwM18x/RV=2/RE=1423106085/RO=10/RU=https%3a%2f%2fthis.is.the.url%2f/RK=0/RS=' +\
- 'dtcJsfP4mEeBOjnVfUQ-'
- url = yahoo.parse_url(test_url)
- self.assertEqual('https://this.is.the.url/', url)
-
- test_url = 'http://r.search.yahoo.com/_ylt=A0LElb9JUSKcAEGRXNyoA;_ylu=X3oDMTEzZm1qazYwBHNlYwNzcgRwb3MDMQRjb' +\
- '2xvA2Jm2dGlkA1NNRTcwM18x/RV=2/RE=1423106085/RO=10/RU=https%3a%2f%2fthis.is.the.url%2f/RS=' +\
- 'dtcJsfP4mEeBOjnVfUQ-'
- url = yahoo.parse_url(test_url)
- self.assertEqual('https://this.is.the.url/', url)
-
- test_url = 'https://this.is.the.url/'
- url = yahoo.parse_url(test_url)
- self.assertEqual('https://this.is.the.url/', url)
-
- def test_request(self):
- yahoo.supported_languages = ['en', 'fr', 'zh-CHT', 'zh-CHS']
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- dicto['time_range'] = ''
- dicto['language'] = 'fr-FR'
- params = yahoo.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('search.yahoo.com', params['url'])
- self.assertIn('fr', params['url'])
- self.assertIn('cookies', params)
- self.assertIn('sB', params['cookies'])
- self.assertIn('fr', params['cookies']['sB'])
-
- dicto['language'] = 'zh'
- params = yahoo.request(query, dicto)
- self.assertIn('zh_chs', params['url'])
- self.assertIn('zh_chs', params['cookies']['sB'])
-
- dicto['language'] = 'zh-TW'
- params = yahoo.request(query, dicto)
- self.assertIn('zh_cht', params['url'])
- self.assertIn('zh_cht', params['cookies']['sB'])
-
- dicto['language'] = 'all'
- params = yahoo.request(query, dicto)
- self.assertIn('cookies', params)
- self.assertIn('sB', params['cookies'])
- self.assertIn('en', params['cookies']['sB'])
- self.assertIn('en', params['url'])
-
- def test_no_url_in_request_year_time_range(self):
- dicto = defaultdict(dict)
- query = 'test_query'
- dicto['time_range'] = 'year'
- params = yahoo.request(query, dicto)
- self.assertEqual({}, params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, yahoo.response, None)
- self.assertRaises(AttributeError, yahoo.response, [])
- self.assertRaises(AttributeError, yahoo.response, '')
- self.assertRaises(AttributeError, yahoo.response, '[]')
-
- response = mock.Mock(text='<html></html>')
- self.assertEqual(yahoo.response(response), [])
-
- html = """
-<ol class="reg mb-15 searchCenterMiddle">
- <li class="first">
- <div class="dd algo fst Sr">
- <div class="compTitle">
- <h3 class="title"><a class=" td-u" href="http://r.search.yahoo.com/_ylt=A0LEb9JUSKcAEGRXNyoA;
- _ylu=X3oDMTEzZm1qazYwBHNlYwNzcgRwb3MDMQRjb2xvA2Jm2dGlkA1NNRTcwM18x/RV=2/RE=1423106085/RO=10
- /RU=https%3a%2f%2fthis.is.the.url%2f/RK=0/RS=dtcJsfP4mEeBOjnVfUQ-"
- target="_blank" data-bid="54e712e13671c">
- <b><b>This is the title</b></b></a>
- </h3>
- </div>
- <div class="compText aAbs">
- <p class="lh-18"><b><b>This is the </b>content</b>
- </p>
- </div>
- </div>
- </li>
- <li>
- <div class="dd algo lst Sr">
- <div class="compTitle">
- </div>
- <div class="compText aAbs">
- <p class="lh-18">This is the second content</p>
- </div>
- </div>
- </li>
-</ol>
-<div class="dd assist fst lst AlsoTry" data-bid="54e712e138d04">
- <div class="compTitle mb-4 h-17">
- <h3 class="title">Also Try</h3> </div>
- <table class="compTable m-0 ac-1st td-u fz-ms">
- <tbody>
- <tr>
- <td class="w-50p pr-28"><a href="https://search.yahoo.com/"><B>This is the </B>suggestion<B></B></a>
- </td>
- </tr>
- </table>
-</div>
- """
- response = mock.Mock(text=html)
- results = yahoo.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
- self.assertEqual(results[0]['title'], 'This is the title')
- self.assertEqual(results[0]['url'], 'https://this.is.the.url/')
- self.assertEqual(results[0]['content'], 'This is the content')
- self.assertEqual(results[1]['suggestion'], 'This is the suggestion')
-
- html = """
-<ol class="reg mb-15 searchCenterMiddle">
- <li class="first">
- <div class="dd algo fst Sr">
- <div class="compTitle">
- <h3 class="title"><a class=" td-u" href="http://r.search.yahoo.com/_ylt=A0LEb9JUSKcAEGRXNyoA;
- _ylu=X3oDMTEzZm1qazYwBHNlYwNzcgRwb3MDMQRjb2xvA2Jm2dGlkA1NNRTcwM18x/RV=2/RE=1423106085/RO=10
- /RU=https%3a%2f%2fthis.is.the.url%2f/RK=0/RS=dtcJsfP4mEeBOjnVfUQ-"
- target="_blank" data-bid="54e712e13671c">
- <b><b>This is the title</b></b></a>
- </h3>
- </div>
- <div class="compText aAbs">
- <p class="lh-18"><b><b>This is the </b>content</b>
- </p>
- </div>
- </div>
- </li>
-</ol>
- """
- response = mock.Mock(text=html)
- results = yahoo.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'This is the title')
- self.assertEqual(results[0]['url'], 'https://this.is.the.url/')
- self.assertEqual(results[0]['content'], 'This is the content')
-
- html = """
- <li class="b_algo" u="0|5109|4755453613245655|UAGjXgIrPH5yh-o5oNHRx_3Zta87f_QO">
- </li>
- """
- response = mock.Mock(text=html)
- results = yahoo.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
-
- def test_fetch_supported_languages(self):
- html = """<html></html>"""
- response = mock.Mock(text=html)
- results = yahoo._fetch_supported_languages(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
-
- html = """
- <html>
- <div>
- <div id="yschlang">
- <span>
- <label><input value="lang_ar"></input></label>
- </span>
- <span>
- <label><input value="lang_zh_chs"></input></label>
- <label><input value="lang_zh_cht"></input></label>
- </span>
- </div>
- </div>
- </html>
- """
- response = mock.Mock(text=html)
- languages = yahoo._fetch_supported_languages(response)
- self.assertEqual(type(languages), list)
- self.assertEqual(len(languages), 3)
- self.assertIn('ar', languages)
- self.assertIn('zh-CHS', languages)
- self.assertIn('zh-CHT', languages)
diff --git a/tests/unit/engines/test_yahoo_news.py b/tests/unit/engines/test_yahoo_news.py
deleted file mode 100644
index ae27df2a5..000000000
--- a/tests/unit/engines/test_yahoo_news.py
+++ /dev/null
@@ -1,150 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-from datetime import datetime
-import mock
-from searx.engines import yahoo_news
-from searx.testing import SearxTestCase
-
-
-class TestYahooNewsEngine(SearxTestCase):
-
- def test_request(self):
- yahoo_news.supported_languages = ['en', 'fr']
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 1
- dicto['language'] = 'fr-FR'
- params = yahoo_news.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('news.search.yahoo.com', params['url'])
- self.assertIn('fr', params['url'])
- self.assertIn('cookies', params)
- self.assertIn('sB', params['cookies'])
- self.assertIn('fr', params['cookies']['sB'])
-
- dicto['language'] = 'all'
- params = yahoo_news.request(query, dicto)
- self.assertIn('cookies', params)
- self.assertIn('sB', params['cookies'])
- self.assertIn('en', params['cookies']['sB'])
- self.assertIn('en', params['url'])
-
- def test_sanitize_url(self):
- url = "test.url"
- self.assertEqual(url, yahoo_news.sanitize_url(url))
-
- url = "www.yahoo.com/;_ylt=test"
- self.assertEqual("www.yahoo.com/", yahoo_news.sanitize_url(url))
-
- def test_response(self):
- self.assertRaises(AttributeError, yahoo_news.response, None)
- self.assertRaises(AttributeError, yahoo_news.response, [])
- self.assertRaises(AttributeError, yahoo_news.response, '')
- self.assertRaises(AttributeError, yahoo_news.response, '[]')
-
- response = mock.Mock(text='<html></html>')
- self.assertEqual(yahoo_news.response(response), [])
-
- html = """
- <ol class=" reg searchCenterMiddle">
- <li class="first">
- <div class="compTitle">
- <h3>
- <a class="yschttl spt" href="http://this.is.the.url" target="_blank">
- This is
- the <b>title</b>...
- </a>
- </h3>
- </div>
- <div>
- <span class="cite">Business via Yahoo!</span>
- <span class="tri fc-2nd ml-10">May 01 10:00 AM</span>
- </div>
- <div class="compText">
- This is the content
- </div>
- </li>
- <li class="first">
- <div class="compTitle">
- <h3>
- <a class="yschttl spt" target="_blank">
- </a>
- </h3>
- </div>
- <div class="compText">
- </div>
- </li>
- </ol>
- """
- response = mock.Mock(text=html)
- results = yahoo_news.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'This is the title...')
- self.assertEqual(results[0]['url'], 'http://this.is.the.url/')
- self.assertEqual(results[0]['content'], 'This is the content')
-
- html = """
- <ol class=" reg searchCenterMiddle">
- <li class="first">
- <div class="compTitle">
- <h3>
- <a class="yschttl spt" href="http://this.is.the.url" target="_blank">
- This is
- the <b>title</b>...
- </a>
- </h3>
- </div>
- <div>
- <span class="cite">Business via Yahoo!</span>
- <span class="tri fc-2nd ml-10">2 hours, 22 minutes ago</span>
- </div>
- <div class="compText">
- This is the content
- </div>
- </li>
- <li>
- <div class="compTitle">
- <h3>
- <a class="yschttl spt" href="http://this.is.the.url" target="_blank">
- This is
- the <b>title</b>...
- </a>
- </h3>
- </div>
- <div>
- <span class="cite">Business via Yahoo!</span>
- <span class="tri fc-2nd ml-10">22 minutes ago</span>
- </div>
- <div class="compText">
- This is the content
- </div>
- </li>
- <li>
- <div class="compTitle">
- <h3>
- <a class="yschttl spt" href="http://this.is.the.url" target="_blank">
- This is
- the <b>title</b>...
- </a>
- </h3>
- </div>
- <div>
- <span class="cite">Business via Yahoo!</span>
- <span class="tri fc-2nd ml-10">Feb 03 09:45AM 1900</span>
- </div>
- <div class="compText">
- This is the content
- </div>
- </li>
- </ol>
- """
- response = mock.Mock(text=html)
- results = yahoo_news.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 3)
- self.assertEqual(results[0]['title'], 'This is the title...')
- self.assertEqual(results[0]['url'], 'http://this.is.the.url/')
- self.assertEqual(results[0]['content'], 'This is the content')
- self.assertEqual(results[2]['publishedDate'].year, datetime.now().year)
diff --git a/tests/unit/engines/test_youtube_api.py b/tests/unit/engines/test_youtube_api.py
deleted file mode 100644
index 0d4d478c3..000000000
--- a/tests/unit/engines/test_youtube_api.py
+++ /dev/null
@@ -1,111 +0,0 @@
-from collections import defaultdict
-import mock
-from searx.engines import youtube_api
-from searx.testing import SearxTestCase
-
-
-class TestYoutubeAPIEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 0
- dicto['language'] = 'fr_FR'
- params = youtube_api.request(query, dicto)
- self.assertTrue('url' in params)
- self.assertTrue(query in params['url'])
- self.assertIn('googleapis.com', params['url'])
- self.assertIn('youtube', params['url'])
- self.assertIn('fr', params['url'])
-
- dicto['language'] = 'all'
- params = youtube_api.request(query, dicto)
- self.assertFalse('fr' in params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, youtube_api.response, None)
- self.assertRaises(AttributeError, youtube_api.response, [])
- self.assertRaises(AttributeError, youtube_api.response, '')
- self.assertRaises(AttributeError, youtube_api.response, '[]')
-
- response = mock.Mock(text='{}')
- self.assertEqual(youtube_api.response(response), [])
-
- response = mock.Mock(text='{"data": []}')
- self.assertEqual(youtube_api.response(response), [])
-
- json = """
- {
- "kind": "youtube#searchListResponse",
- "etag": "xmg9xJZuZD438sF4hb-VcBBREXc/YJQDcTBCDcaBvl-sRZJoXdvy1ME",
- "nextPageToken": "CAUQAA",
- "pageInfo": {
- "totalResults": 1000000,
- "resultsPerPage": 20
- },
- "items": [
- {
- "kind": "youtube#searchResult",
- "etag": "xmg9xJZuZD438sF4hb-VcBBREXc/IbLO64BMhbHIgWLwLw7MDYe7Hs4",
- "id": {
- "kind": "youtube#video",
- "videoId": "DIVZCPfAOeM"
- },
- "snippet": {
- "publishedAt": "2015-05-29T22:41:04.000Z",
- "channelId": "UCNodmx1ERIjKqvcJLtdzH5Q",
- "title": "Title",
- "description": "Description",
- "thumbnails": {
- "default": {
- "url": "https://i.ytimg.com/vi/DIVZCPfAOeM/default.jpg"
- },
- "medium": {
- "url": "https://i.ytimg.com/vi/DIVZCPfAOeM/mqdefault.jpg"
- },
- "high": {
- "url": "https://i.ytimg.com/vi/DIVZCPfAOeM/hqdefault.jpg"
- }
- },
- "channelTitle": "MinecraftUniverse",
- "liveBroadcastContent": "none"
- }
- }
- ]
- }
- """
- response = mock.Mock(text=json)
- results = youtube_api.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 1)
- self.assertEqual(results[0]['title'], 'Title')
- self.assertEqual(results[0]['url'], 'https://www.youtube.com/watch?v=DIVZCPfAOeM')
- self.assertEqual(results[0]['content'], 'Description')
- self.assertEqual(results[0]['thumbnail'], 'https://i.ytimg.com/vi/DIVZCPfAOeM/hqdefault.jpg')
- self.assertTrue('DIVZCPfAOeM' in results[0]['embedded'])
-
- json = """
- {
- "kind": "youtube#searchListResponse",
- "etag": "xmg9xJZuZD438sF4hb-VcBBREXc/YJQDcTBCDcaBvl-sRZJoXdvy1ME",
- "nextPageToken": "CAUQAA",
- "pageInfo": {
- "totalResults": 1000000,
- "resultsPerPage": 20
- }
- }
- """
- response = mock.Mock(text=json)
- results = youtube_api.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
-
- json = """
- {"toto":{"entry":[]
- }
- }
- """
- response = mock.Mock(text=json)
- results = youtube_api.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
diff --git a/tests/unit/engines/test_youtube_noapi.py b/tests/unit/engines/test_youtube_noapi.py
deleted file mode 100644
index cbf7b9bcd..000000000
--- a/tests/unit/engines/test_youtube_noapi.py
+++ /dev/null
@@ -1,124 +0,0 @@
-# -*- coding: utf-8 -*-
-from collections import defaultdict
-import mock
-from searx.engines import youtube_noapi
-from searx.testing import SearxTestCase
-
-
-class TestYoutubeNoAPIEngine(SearxTestCase):
-
- def test_request(self):
- query = 'test_query'
- dicto = defaultdict(dict)
- dicto['pageno'] = 0
- dicto['time_range'] = ''
- params = youtube_noapi.request(query, dicto)
- self.assertIn('url', params)
- self.assertIn(query, params['url'])
- self.assertIn('youtube.com', params['url'])
-
- def test_time_range_search(self):
- dicto = defaultdict(dict)
- query = 'test_query'
- dicto['time_range'] = 'year'
- params = youtube_noapi.request(query, dicto)
- self.assertIn('&sp=EgIIBQ%253D%253D', params['url'])
-
- dicto['time_range'] = 'month'
- params = youtube_noapi.request(query, dicto)
- self.assertIn('&sp=EgIIBA%253D%253D', params['url'])
-
- dicto['time_range'] = 'week'
- params = youtube_noapi.request(query, dicto)
- self.assertIn('&sp=EgIIAw%253D%253D', params['url'])
-
- dicto['time_range'] = 'day'
- params = youtube_noapi.request(query, dicto)
- self.assertIn('&sp=EgIIAg%253D%253D', params['url'])
-
- def test_response(self):
- self.assertRaises(AttributeError, youtube_noapi.response, None)
- self.assertRaises(AttributeError, youtube_noapi.response, [])
- self.assertRaises(AttributeError, youtube_noapi.response, '')
- self.assertRaises(AttributeError, youtube_noapi.response, '[]')
-
- response = mock.Mock(text='<html></html>')
- self.assertEqual(youtube_noapi.response(response), [])
-
- html = """
- <div></div>
- <script>
- window["ytInitialData"] = {
- "contents": {
- "twoColumnSearchResultsRenderer": {
- "primaryContents": {
- "sectionListRenderer": {
- "contents": [
- {
- "itemSectionRenderer": {
- "contents": [
- {
- "videoRenderer": {
- "videoId": "DIVZCPfAOeM",
- "title": {
- "simpleText": "Title"
- },
- "descriptionSnippet": {
- "runs": [
- {
- "text": "Des"
- },
- {
- "text": "cription"
- }
- ]
- }
- }
- },
- {
- "videoRenderer": {
- "videoId": "9C_HReR_McQ",
- "title": {
- "simpleText": "Title"
- },
- "descriptionSnippet": {
- "simpleText": "Description"
- }
- }
- }
- ]
- }
- }
- ]
- }
- }
- }
- }
- };
- </script>
- """
- response = mock.Mock(text=html)
- results = youtube_noapi.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 2)
- self.assertEqual(results[0]['title'], 'Title')
- self.assertEqual(results[0]['url'], 'https://www.youtube.com/watch?v=DIVZCPfAOeM')
- self.assertEqual(results[0]['content'], 'Description')
- self.assertEqual(results[0]['thumbnail'], 'https://i.ytimg.com/vi/DIVZCPfAOeM/hqdefault.jpg')
- self.assertTrue('DIVZCPfAOeM' in results[0]['embedded'])
- self.assertEqual(results[1]['title'], 'Title')
- self.assertEqual(results[1]['url'], 'https://www.youtube.com/watch?v=9C_HReR_McQ')
- self.assertEqual(results[1]['content'], 'Description')
- self.assertEqual(results[1]['thumbnail'], 'https://i.ytimg.com/vi/9C_HReR_McQ/hqdefault.jpg')
- self.assertTrue('9C_HReR_McQ' in results[1]['embedded'])
-
- html = """
- <ol id="item-section-063864" class="item-section">
- <li>
- </li>
- </ol>
- """
- response = mock.Mock(text=html)
- results = youtube_noapi.response(response)
- self.assertEqual(type(results), list)
- self.assertEqual(len(results), 0)
diff --git a/tests/unit/engines/unsplash_fixture.json b/tests/unit/engines/unsplash_fixture.json
deleted file mode 100644
index 4c8db2a2c..000000000
--- a/tests/unit/engines/unsplash_fixture.json
+++ /dev/null
@@ -1,241 +0,0 @@
-{
- "total": 2,
- "total_pages": 1,
- "results": [
- {
- "id": "FY8d721UO_4",
- "created_at": "2018-04-12T14:20:35-04:00",
- "updated_at": "2018-08-28T20:58:33-04:00",
- "width": 3891,
- "height": 5829,
- "color": "#152C33",
- "description": "low angle photography of swimming penguin",
- "urls": {
- "raw": "https://images.unsplash.com/photo-1523557148507-1b77641c7e7c?ixlib=rb-0.3.5&ixid=eyJhcHBfaWQiOjEyMDd9&s=095c5fc319c5a77c705f49ad63e0f195",
- "full": "https://images.unsplash.com/photo-1523557148507-1b77641c7e7c?ixlib=rb-0.3.5&q=85&fm=jpg&crop=entropy&cs=srgb&ixid=eyJhcHBfaWQiOjEyMDd9&s=74be977849c173d6929636d491a760c3",
- "regular": "https://images.unsplash.com/photo-1523557148507-1b77641c7e7c?ixlib=rb-0.3.5&q=80&fm=jpg&crop=entropy&cs=tinysrgb&w=1080&fit=max&ixid=eyJhcHBfaWQiOjEyMDd9&s=ad65df26970bd010085f0ca25434de33",
- "small": "https://images.unsplash.com/photo-1523557148507-1b77641c7e7c?ixlib=rb-0.3.5&q=80&fm=jpg&crop=entropy&cs=tinysrgb&w=400&fit=max&ixid=eyJhcHBfaWQiOjEyMDd9&s=5d2edfd073c31eb8ee7b305222bdc5a2",
- "thumb": "https://images.unsplash.com/photo-1523557148507-1b77641c7e7c?ixlib=rb-0.3.5&q=80&fm=jpg&crop=entropy&cs=tinysrgb&w=200&fit=max&ixid=eyJhcHBfaWQiOjEyMDd9&s=a9b9e56e63efc6f4611a87ce7e9a48f8"
- },
- "links": {
- "self": "https://api.unsplash.com/photos/FY8d721UO_4",
- "html": "https://unsplash.com/photos/FY8d721UO_4",
- "download": "https://unsplash.com/photos/FY8d721UO_4/download",
- "download_location": "https://api.unsplash.com/photos/FY8d721UO_4/download"
- },
- "categories": [],
- "sponsored": false,
- "likes": 31,
- "liked_by_user": false,
- "current_user_collections": [],
- "slug": null,
- "user": {
- "id": "N4gE4mrG8lE",
- "updated_at": "2018-10-03T02:51:19-04:00",
- "username": "gaspanik",
- "name": "Masaaki Komori",
- "first_name": "Masaaki",
- "last_name": "Komori",
- "twitter_username": "cipher",
- "portfolio_url": "https://www.instagram.com/cipher/",
- "bio": null,
- "location": "Tokyo, JAPAN",
- "links": {
- "self": "https://api.unsplash.com/users/gaspanik",
- "html": "https://unsplash.com/@gaspanik",
- "photos": "https://api.unsplash.com/users/gaspanik/photos",
- "likes": "https://api.unsplash.com/users/gaspanik/likes",
- "portfolio": "https://api.unsplash.com/users/gaspanik/portfolio",
- "following": "https://api.unsplash.com/users/gaspanik/following",
- "followers": "https://api.unsplash.com/users/gaspanik/followers"
- },
- "profile_image": {
- "small": "https://images.unsplash.com/profile-fb-1502270358-e7c86c1011ce.jpg?ixlib=rb-0.3.5&q=80&fm=jpg&crop=faces&cs=tinysrgb&fit=crop&h=32&w=32&s=9fe12f6d177bd6fdbd56d233a80c01a3",
- "medium": "https://images.unsplash.com/profile-fb-1502270358-e7c86c1011ce.jpg?ixlib=rb-0.3.5&q=80&fm=jpg&crop=faces&cs=tinysrgb&fit=crop&h=64&w=64&s=6ad7d156b62e438ae9dc794cba712fff",
- "large": "https://images.unsplash.com/profile-fb-1502270358-e7c86c1011ce.jpg?ixlib=rb-0.3.5&q=80&fm=jpg&crop=faces&cs=tinysrgb&fit=crop&h=128&w=128&s=13a08a2e72e7d11632410e92bd3a9406"
- },
- "instagram_username": "cipher",
- "total_collections": 0,
- "total_likes": 406,
- "total_photos": 196
- },
- "tags": [
- {
- "title": "animal"
- },
- {
- "title": "water"
- },
- {
- "title": "swim"
- },
- {
- "title": "aquarium"
- },
- {
- "title": "wallpaper"
- },
- {
- "title": "blue"
- },
- {
- "title": "sealife"
- },
- {
- "title": "wildlife"
- },
- {
- "title": "bird"
- },
- {
- "title": "deep sea"
- },
- {
- "title": "fish"
- },
- {
- "title": "water life"
- }
- ],
- "photo_tags": [
- {
- "title": "animal"
- },
- {
- "title": "water"
- },
- {
- "title": "swim"
- },
- {
- "title": "aquarium"
- },
- {
- "title": "wallpaper"
- }
- ]
- },
- {
- "id": "ayKyc01xLWA",
- "created_at": "2018-02-16T23:14:31-05:00",
- "updated_at": "2018-08-28T20:48:27-04:00",
- "width": 4928,
- "height": 3264,
- "color": "#161618",
- "description": "black and white penguins on ice field",
- "urls": {
- "raw": "https://images.unsplash.com/photo-1518840801558-9770b4a34eeb?ixlib=rb-0.3.5&ixid=eyJhcHBfaWQiOjEyMDd9&s=4e107a2bc49ab561ba6272eea2ec725d",
- "full": "https://images.unsplash.com/photo-1518840801558-9770b4a34eeb?ixlib=rb-0.3.5&q=85&fm=jpg&crop=entropy&cs=srgb&ixid=eyJhcHBfaWQiOjEyMDd9&s=f9b1e4d4572ab44efb2cf3d601d2b4d9",
- "regular": "https://images.unsplash.com/photo-1518840801558-9770b4a34eeb?ixlib=rb-0.3.5&q=80&fm=jpg&crop=entropy&cs=tinysrgb&w=1080&fit=max&ixid=eyJhcHBfaWQiOjEyMDd9&s=4430cedb63841f1fe055d5005316cc96",
- "small": "https://images.unsplash.com/photo-1518840801558-9770b4a34eeb?ixlib=rb-0.3.5&q=80&fm=jpg&crop=entropy&cs=tinysrgb&w=400&fit=max&ixid=eyJhcHBfaWQiOjEyMDd9&s=ee73c7af22ce445d408e240821ce07af",
- "thumb": "https://images.unsplash.com/photo-1518840801558-9770b4a34eeb?ixlib=rb-0.3.5&q=80&fm=jpg&crop=entropy&cs=tinysrgb&w=200&fit=max&ixid=eyJhcHBfaWQiOjEyMDd9&s=934302390d383cad8c571905e3a80bac"
- },
- "links": {
- "self": "https://api.unsplash.com/photos/ayKyc01xLWA",
- "html": "https://unsplash.com/photos/ayKyc01xLWA",
- "download": "https://unsplash.com/photos/ayKyc01xLWA/download",
- "download_location": "https://api.unsplash.com/photos/ayKyc01xLWA/download"
- },
- "categories": [],
- "sponsored": false,
- "likes": 37,
- "liked_by_user": false,
- "current_user_collections": [],
- "slug": null,
- "user": {
- "id": "tRb_KGw60Xk",
- "updated_at": "2018-09-20T11:51:54-04:00",
- "username": "ghost_cat",
- "name": "Danielle Barnes",
- "first_name": "Danielle",
- "last_name": "Barnes",
- "twitter_username": null,
- "portfolio_url": null,
- "bio": null,
- "location": null,
- "links": {
- "self": "https://api.unsplash.com/users/ghost_cat",
- "html": "https://unsplash.com/@ghost_cat",
- "photos": "https://api.unsplash.com/users/ghost_cat/photos",
- "likes": "https://api.unsplash.com/users/ghost_cat/likes",
- "portfolio": "https://api.unsplash.com/users/ghost_cat/portfolio",
- "following": "https://api.unsplash.com/users/ghost_cat/following",
- "followers": "https://api.unsplash.com/users/ghost_cat/followers"
- },
- "profile_image": {
- "small": "https://images.unsplash.com/profile-fb-1508491082-ae77f53e9ac3.jpg?ixlib=rb-0.3.5&q=80&fm=jpg&crop=faces&cs=tinysrgb&fit=crop&h=32&w=32&s=751bf6a557763648d52ffd7e60e79436",
- "medium": "https://images.unsplash.com/profile-fb-1508491082-ae77f53e9ac3.jpg?ixlib=rb-0.3.5&q=80&fm=jpg&crop=faces&cs=tinysrgb&fit=crop&h=64&w=64&s=e46cd1c8713035f045130e1b093b981e",
- "large": "https://images.unsplash.com/profile-fb-1508491082-ae77f53e9ac3.jpg?ixlib=rb-0.3.5&q=80&fm=jpg&crop=faces&cs=tinysrgb&fit=crop&h=128&w=128&s=352eabcf107c3ce95fe51a18485f116b"
- },
- "instagram_username": null,
- "total_collections": 0,
- "total_likes": 0,
- "total_photos": 21
- },
- "tags": [
- {
- "title": "ice"
- },
- {
- "title": "bird"
- },
- {
- "title": "ice field"
- },
- {
- "title": "iceberg"
- },
- {
- "title": "snow"
- },
- {
- "title": "frozen"
- },
- {
- "title": "animal"
- },
- {
- "title": "wildlife"
- },
- {
- "title": "wild"
- },
- {
- "title": "antarctica"
- },
- {
- "title": "sunshine"
- },
- {
- "title": "daylight"
- },
- {
- "title": "wilderness"
- },
- {
- "title": "south pole"
- },
- {
- "title": "flock"
- }
- ],
- "photo_tags": [
- {
- "title": "ice"
- },
- {
- "title": "bird"
- },
- {
- "title": "ice field"
- },
- {
- "title": "iceberg"
- },
- {
- "title": "snow"
- }
- ]
- }
- ]
-} \ No newline at end of file
diff --git a/utils/makefile.include b/utils/makefile.include
new file mode 100644
index 000000000..716889c02
--- /dev/null
+++ b/utils/makefile.include
@@ -0,0 +1,128 @@
+# -*- coding: utf-8; mode: makefile-gmake -*-
+
+make-help:
+ @echo ' make V=0|1 [targets] 0 => quiet build (default), 1 => verbose build'
+ @echo ' make V=2 [targets] 2 => give reason for rebuild of target'
+
+quiet_cmd_common_clean = CLEAN $@
+ cmd_common_clean = \
+ rm -rf tests/build ;\
+ find . -name '*.orig' -exec rm -f {} + ;\
+ find . -name '*.rej' -exec rm -f {} + ;\
+ find . -name '*~' -exec rm -f {} + ;\
+ find . -name '*.bak' -exec rm -f {} + ;\
+
+FMT = cat
+ifeq ($(shell which fmt >/dev/null 2>&1; echo $$?), 0)
+FMT = fmt
+endif
+
+# MS-Windows
+#
+# For a minimal *make-environment*, I'am using the gnu-tools from:
+#
+# - GNU MCU Eclipse Windows Build Tools, which brings 'make', 'rm' etc.
+# https://github.com/gnu-mcu-eclipse/windows-build-tools/releases
+#
+# - git for Windows, which brings 'find', 'grep' etc.
+# https://git-scm.com/download/win
+
+
+# normpath
+#
+# System-dependent normalization of the path name
+#
+# usage: $(call normpath,/path/to/file)
+
+normpath = $1
+ifeq ($(OS),Windows_NT)
+ normpath = $(subst /,\,$1)
+endif
+
+
+# stolen from linux/Makefile
+#
+
+ifeq ("$(origin V)", "command line")
+ KBUILD_VERBOSE = $(V)
+endif
+ifndef KBUILD_VERBOSE
+ KBUILD_VERBOSE = 0
+endif
+
+ifeq ($(KBUILD_VERBOSE),1)
+ quiet =
+ Q =
+else
+ quiet=quiet_
+ Q = @
+endif
+
+# stolen from linux/scripts/Kbuild.include
+#
+
+# Convenient variables
+comma := ,
+quote := "
+#" this comment is only for emacs highlighting
+squote := '
+#' this comment is only for emacs highlighting
+empty :=
+space := $(empty) $(empty)
+space_escape := _-_SPACE_-_
+
+# Find any prerequisites that is newer than target or that does not exist.
+# PHONY targets skipped in both cases.
+any-prereq = $(filter-out $(PHONY),$?) $(filter-out $(PHONY) $(wildcard $^),$^)
+#
+###
+# why - tell why a a target got build
+# enabled by make V=2
+# Output (listed in the order they are checked):
+# (1) - due to target is PHONY
+# (2) - due to target missing
+# (3) - due to: file1.h file2.h
+# (4) - due to command line change
+# (5) - due to missing .cmd file
+# (6) - due to target not in $(targets)
+# (1) PHONY targets are always build
+# (2) No target, so we better build it
+# (3) Prerequisite is newer than target
+# (4) The command line stored in the file named dir/.target.cmd
+# differed from actual command line. This happens when compiler
+# options changes
+# (5) No dir/.target.cmd file (used to store command line)
+# (6) No dir/.target.cmd file and target not listed in $(targets)
+# This is a good hint that there is a bug in the kbuild file
+ifeq ($(KBUILD_VERBOSE),2)
+why = \
+ $(if $(filter $@, $(PHONY)),- due to target is PHONY, \
+ $(if $(wildcard $@), \
+ $(if $(strip $(any-prereq)),- due to: $(any-prereq), \
+ $(if $(arg-check), \
+ $(if $(cmd_$@),- due to command line change, \
+ $(if $(filter $@, $(targets)), \
+ - due to missing .cmd file, \
+ - due to $(notdir $@) not in $$(targets) \
+ ) \
+ ) \
+ ) \
+ ), \
+ - due to target missing \
+ ) \
+ )
+
+echo-why = $(call escsq, $(strip $(why)))
+endif
+#
+###
+# Escape single quote for use in echo statements
+escsq = $(subst $(squote),'\$(squote)',$1)
+#
+# echo command.
+# Short version is used, if $(quiet) equals `quiet_', otherwise full one.
+echo-cmd = $(if $($(quiet)cmd_$(1)),echo '$(call escsq,$($(quiet)cmd_$(1)))$(echo-why)';)
+#
+# printing commands
+cmd = @$(echo-cmd) $(cmd_$(1))
+
diff --git a/utils/makefile.python b/utils/makefile.python
new file mode 100644
index 000000000..4aa9d6b49
--- /dev/null
+++ b/utils/makefile.python
@@ -0,0 +1,290 @@
+# -*- coding: utf-8; mode: makefile-gmake -*-
+
+# list of python packages (folders) or modules (files) of this build
+PYOBJECTS ?=
+
+SITE_PYTHON ?=$(dir $(abspath $(lastword $(MAKEFILE_LIST))))site-python
+export PYTHONPATH := $(SITE_PYTHON):$$PYTHONPATH
+
+# folder where the python distribution takes place
+PYDIST ?= ./py_dist
+# folder where the python intermediate build files take place
+PYBUILD ?= ./py_build
+# python version to use
+PY ?=3
+PYTHON ?= python$(PY)
+PIP ?= pip$(PY)
+PIP_INST ?= --user
+
+# https://www.python.org/dev/peps/pep-0508/#extras
+#PY_SETUP_EXTRAS ?= \[develop,test\]
+PY_SETUP_EXTRAS ?=
+
+PYDEBUG ?= --pdb
+PYLINT_RC ?= .pylintrc
+
+TEST_FOLDER ?= ./tests
+TEST ?= .
+
+VTENV_OPTS = "--no-site-packages"
+PY_ENV = ./local/py$(PY)
+PY_ENV_BIN = $(PY_ENV)/bin
+PY_ENV_ACT = . $(PY_ENV_BIN)/activate
+
+ifeq ($(OS),Windows_NT)
+ PYTHON = python
+ PY_ENV_BIN = $(PY_ENV)/Scripts
+ PY_ENV_ACT = $(PY_ENV_BIN)/activate
+endif
+
+ifeq ($(PYTHON),python)
+ VIRTUALENV = virtualenv
+else
+ VIRTUALENV = virtualenv --python=$(PYTHON)
+endif
+
+ifeq ($(KBUILD_VERBOSE),1)
+ PIP_VERBOSE =
+ VIRTUALENV_VERBOSE =
+else
+ PIP_VERBOSE = "-q"
+ VIRTUALENV_VERBOSE = "-q"
+endif
+
+python-help::
+ @echo 'makefile.python:'
+ @echo ' pyenv | pyenv[un]install'
+ @echo ' build $(PY_ENV) & [un]install python objects'
+ @echo ' targts using pyenv $(PY_ENV):'
+ @echo ' pylint - run pylint *linting*'
+ @echo ' pytest - run *tox* test on python objects'
+ @echo ' pydebug - run tests within a PDB debug session'
+ @echo ' pybuild - build python packages'
+ @echo ' pyclean - clean intermediate python objects'
+ @echo ' targets using system users environment:'
+ @echo ' py[un]install - [un]install python objects in editable mode'
+ @echo ' upload-pypi - upload $(PYDIST)/* files to PyPi'
+ @echo 'options:'
+ @echo ' make PY=2 [targets] => to eval targets with python 2 ($(PY))'
+ @echo ' make PIP_INST= => to set/unset pip install options ($(PIP_INST))'
+ @echo ' make TEST=. => choose test from $(TEST_FOLDER) (default "." runs all)'
+ @echo ' make DEBUG= => target "debug": do not invoke PDB on errors'
+ @echo ' make PY_SETUP_EXTRAS => also install extras_require from setup.py \[develop,test\]'
+ @echo ' when using target "pydebug", set breakpoints within py-source by adding::'
+ @echo ' DEBUG()'
+
+# ------------------------------------------------------------------------------
+# OS requirements
+# ------------------------------------------------------------------------------
+
+PHONY += msg-python-exe python-exe
+msg-python-exe:
+ @echo "\n $(PYTHON) is required\n\n\
+ Make sure you have $(PYTHON) installed, grab it from\n\
+ https://www.python.org or install it from your package\n\
+ manager. On debian based OS these requirements are\n\
+ installed by::\n\n\
+ sudo -H apt-get install $(PYTHON)\n" | $(FMT)
+
+ifeq ($(shell which $(PYTHON) >/dev/null 2>&1; echo $$?), 1)
+python-exe: msg-python-exe
+ $(error The '$(PYTHON)' command was not found)
+else
+python-exe:
+ @:
+endif
+
+msg-pip-exe:
+ @echo "\n $(PIP) is required\n\n\
+ Make sure you have updated pip installed, grab it from\n\
+ https://pip.pypa.io or install it from your package\n\
+ manager. On debian based OS these requirements are\n\
+ installed by::\n\n\
+ sudo -H apt-get install python$(PY)-pip\n" | $(FMT)
+
+ifeq ($(shell which $(PIP) >/dev/null 2>&1; echo $$?), 1)
+pip-exe: msg-pip-exe
+ $(error The '$(PIP)' command was not found)
+else
+pip-exe:
+ @:
+endif
+
+PHONY += msg-virtualenv-exe virtualenv-exe
+msg-virtualenv-exe:
+ @echo "\n virtualenv is required\n\n\
+ Make sure you have an updated virtualenv installed, grab it from\n\
+ https://virtualenv.pypa.io/en/stable/installation/ or install it\n\
+ via pip by::\n\n\
+ pip install --user https://github.com/pypa/virtualenv/tarball/master\n" | $(FMT)
+
+ifeq ($(shell which virtualenv >/dev/null 2>&1; echo $$?), 1)
+virtualenv-exe: msg-virtualenv-exe
+ $(error The 'virtualenv' command was not found)
+else
+virtualenv-exe:
+ @:
+endif
+
+# ------------------------------------------------------------------------------
+# commands
+# ------------------------------------------------------------------------------
+
+# $2 path to folder with setup.py, this uses pip from the OS
+quiet_cmd_pyinstall = INSTALL $2
+ cmd_pyinstall = $(PIP) $(PIP_VERBOSE) install $(PIP_INST) -e $2$(PY_SETUP_EXTRAS)
+
+# $2 path to folder with setup.py, this uses pip from pyenv (not OS!)
+quiet_cmd_pyenvinstall = PYENV install $2
+ cmd_pyenvinstall = $(PY_ENV_BIN)/pip $(PIP_VERBOSE) install -e $2$(PY_SETUP_EXTRAS)
+
+# Uninstall the package. Since pip does not uninstall the no longer needed
+# depencies (something like autoremove) the depencies remain.
+
+# $2 package name to uninstall, this uses pip from the OS.
+quiet_cmd_pyuninstall = UNINSTALL $2
+ cmd_pyuninstall = $(PIP) $(PIP_VERBOSE) uninstall --yes $2
+
+# $2 path to folder with setup.py, this uses pip from pyenv (not OS!)
+quiet_cmd_pyenvuninstall = PYENV uninstall $2
+ cmd_pyenvuninstall = $(PY_ENV_BIN)/pip $(PIP_VERBOSE) uninstall --yes $2
+
+# $2 path to folder where virtualenv take place
+quiet_cmd_virtualenv = PYENV usage: $ source ./$@/bin/activate
+ cmd_virtualenv = \
+ if [ ! -d "./$(PY_ENV)" ];then \
+ $(VIRTUALENV) $(VIRTUALENV_VERBOSE) $(VTENV_OPTS) $2; \
+ else \
+ echo "PYENV using virtualenv from $2"; \
+ fi
+
+# $2 path to lint
+quiet_cmd_pylint = LINT $@
+ cmd_pylint = $(PY_ENV_BIN)/pylint --rcfile $(PYLINT_RC) $2
+
+quiet_cmd_pytest = TEST $@
+ cmd_pytest = $(PY_ENV_BIN)/tox -vv
+
+# setuptools, pip, easy_install its a mess full of cracks, a documentation hell
+# and broken by design ... all sucks, I really, really hate all this ... aaargh!
+#
+# About python packaging see `Python Packaging Authority`_. Most of the names
+# here are mapped to ``setup(<name1>=..., <name2>=...)`` arguments in
+# ``setup.py``. See `Packaging and distributing projects`_ about ``setup(...)``
+# arguments. If this is all new for you, start with `PyPI Quick and Dirty`_.
+#
+# Further read:
+#
+# - pythonwheels_
+# - setuptools_
+# - packaging_
+# - sdist_
+# - installing_
+#
+# .. _`Python Packaging Authority`: https://www.pypa.io
+# .. _`Packaging and distributing projects`: https://packaging.python.org/guides/distributing-packages-using-setuptools/
+# .. _`PyPI Quick and Dirty`: https://hynek.me/articles/sharing-your-labor-of-love-pypi-quick-and-dirty/
+# .. _pythonwheels: https://pythonwheels.com/
+# .. _setuptools: https://setuptools.readthedocs.io/en/latest/setuptools.html
+# .. _packaging: https://packaging.python.org/guides/distributing-packages-using-setuptools/#packaging-and-distributing-projects
+# .. _sdist: https://packaging.python.org/guides/distributing-packages-using-setuptools/#source-distributions
+# .. _bdist_wheel: https://packaging.python.org/guides/distributing-packages-using-setuptools/#pure-python-wheels
+# .. _installing: https://packaging.python.org/tutorials/installing-packages/
+#
+quiet_cmd_pybuild = BUILD $@
+ cmd_pybuild = $(PY_ENV_BIN)/$(PYTHON) setup.py \
+ sdist -d $(PYDIST) \
+ bdist_wheel --bdist-dir $(PYBUILD) -d $(PYDIST)
+
+quiet_cmd_pyclean = CLEAN $@
+# remove 'build' folder since bdist_wheel does not care the --bdist-dir
+ cmd_pyclean = \
+ rm -rf $(PYDIST) $(PYBUILD) ./local ./.tox *.egg-info ;\
+ find . -name '*.pyc' -exec rm -f {} + ;\
+ find . -name '*.pyo' -exec rm -f {} + ;\
+ find . -name __pycache__ -exec rm -rf {} +
+
+# ------------------------------------------------------------------------------
+# targets
+# ------------------------------------------------------------------------------
+
+# for installation use the pip from the OS!
+PHONY += pyinstall
+pyinstall: pip-exe
+ $(call cmd,pyinstall,.)
+
+PHONY += pyuninstall
+pyuninstall: pip-exe
+ $(call cmd,pyuninstall,$(PYOBJECTS))
+
+# for installation use the pip from PY_ENV (not the OS)!
+PHONY += pyenvinstall
+pyenvinstall: $(PY_ENV)
+ $(call cmd,pyenvinstall,.)
+
+PHONY += pyenvuninstall
+pyenvuninstall: $(PY_ENV)
+ $(call cmd,pyenvuninstall,$(PYOBJECTS))
+
+PHONY += pyclean
+pyclean:
+ $(call cmd,pyclean)
+
+# to build *local* environment, python and virtualenv from the OS is needed!
+pyenv: $(PY_ENV)
+$(PY_ENV): virtualenv-exe python-exe
+ $(call cmd,virtualenv,$(PY_ENV))
+ @$(PY_ENV_BIN)/pip install $(PIP_VERBOSE) -r requirements.txt
+
+PHONY += pylint-exe
+pylint-exe: $(PY_ENV)
+ @$(PY_ENV_BIN)/pip $(PIP_VERBOSE) install pylint
+
+PHONY += pylint
+pylint: pylint-exe
+ $(call cmd,pylint,$(PYOBJECTS))
+
+PHONY += pybuild
+pybuild: $(PY_ENV)
+ $(call cmd,pybuild)
+
+PHONY += pytest
+pytest: $(PY_ENV)
+ $(call cmd,pytest)
+
+PHONY += pydebug
+# set breakpoint with:
+# DEBUG()
+# e.g. to run tests in debug mode in emacs use:
+# 'M-x pdb' ... 'make pydebug'
+pydebug: $(PY_ENV)
+ DEBUG=$(DEBUG) $(PY_ENV_BIN)/pytest $(DEBUG) -v $(TEST_FOLDER)/$(TEST)
+
+# install / uninstall python objects into virtualenv (PYENV)
+pyenv-install: $(PY_ENV)
+ @$(PY_ENV_BIN)/pip $(PIP_VERBOSE) install -e .
+ @echo "ACTIVATE $(call normpath,$(PY_ENV_ACT)) "
+
+pyenv-uninstall: $(PY_ENV)
+ @$(PY_ENV_BIN)/pip $(PIP_VERBOSE) uninstall --yes .
+
+# runs python interpreter from ./local/py<N>/bin/python
+pyenv-python: pyenv-install
+ cd ./local; ../$(PY_ENV_BIN)/python -i
+
+# With 'dependency_links=' setuptools supports dependencies on packages hosted
+# on other reposetories then PyPi, see "Packages Not On PyPI" [1]. The big
+# drawback is, due to security reasons (I don't know where the security gate on
+# PyPi is), this feature is not supported by pip [2]. Thats why an upload to
+# PyPi is required and since uploads via setuptools is not recommended, we have
+# to imstall / use twine ... its really a mess.
+#
+# [1] http://python-packaging.readthedocs.io/en/latest/dependencies.html#packages-not-on-pypi
+# [2] https://github.com/pypa/pip/pull/1519
+
+# https://github.com/pypa/twine
+PHONY += upload-pypi
+upload-pypi: pyclean pybuild
+ @$(PY_ENV_BIN)/twine upload $(PYDIST)/*
+
+.PHONY: $(PHONY)
diff --git a/utils/makefile.sphinx b/utils/makefile.sphinx
new file mode 100644
index 000000000..5cbc5ebdd
--- /dev/null
+++ b/utils/makefile.sphinx
@@ -0,0 +1,215 @@
+# -*- coding: utf-8; mode: makefile-gmake -*-
+
+# You can set these variables from the command line.
+SPHINXOPTS ?=
+SPHINXBUILD ?= $(PY_ENV_BIN)/sphinx-build
+SPHINX_CONF ?= conf.py
+
+DOCS_FOLDER ?= docs
+DOCS_BUILD ?= build/docs
+DOCS_DIST ?= dist/docs
+GH_PAGES ?= gh-pages
+
+BOOKS_FOLDER ?= docs
+BOOKS_DIST ?= dist/books
+
+ifeq ($(KBUILD_VERBOSE),1)
+ SPHINX_VERBOSE = "-v"
+else
+ SPHINX_VERBOSE =
+endif
+
+## SPHINXVERS variable
+## ===================
+##
+## .. _requirement-specifiers: https://pip.pypa.io/en/stable/reference/pip_install/#requirement-specifiers
+##
+## Sphinx version to use, when building documentation. Set this when calling
+## build target. The default value is empty (install latest), to select a
+## specific version use a requirement-specifiers_. E.g. to build your target
+## 'doc' with a select sphinx-doc_ version 1.7.9::
+##
+## make SPHINXVERS='==1.7.9' docs
+##
+## To build with latest 1.7::
+##
+## make SPHINXVERS='>=1.7,<1.8' docs
+##
+SPHINXVERS ?=
+
+docs-help:
+ @echo 'makefile.sphinx:'
+ @echo ' docs-clean - clean intermediate doc objects'
+ @echo ' $(GH_PAGES) - create & upload github pages'
+ @echo ' sphinx-pdf - run sphinx latex & pdf targets'
+ echo ''
+ @echo ' books/{name}.html : build only the HTML of document {name}'
+ @echo ' valid values for books/{name}.html are:'
+ @echo ' $(BOOKS_HTML)' | $(FMT)
+ @echo ' books/{name}.pdf : build only the PDF of document {name}'
+ @echo ' valid values for books/{name}.pdf are:'
+ @echo ' $(BOOKS_PDF) ' | $(FMT)
+
+# ------------------------------------------------------------------------------
+# requirements
+# ------------------------------------------------------------------------------
+
+sphinx-doc: $(PY_ENV)
+ @echo "PYENV installing Sphinx$(SPHINXVERS)"
+ $(Q)$(PY_ENV_BIN)/pip install $(PIP_VERBOSE) 'Sphinx$(SPHINXVERS)'
+
+sphinx-live: $(PY_ENV)
+ @echo "PYENV installing Sphinx$(SPHINXVERS)"
+ $(Q)$(PY_ENV_BIN)/pip install $(PIP_VERBOSE) 'Sphinx$(SPHINXVERS)' sphinx-autobuild
+
+
+PHONY += msg-texlive texlive
+
+ifeq ($(shell which xelatex >/dev/null 2>&1; echo $$?), 1)
+texlive: msg-TeXLive
+ $(error The 'xelatex' command was not found)
+else
+texlive:
+ @:
+endif
+
+msg-texlive:
+ $(Q)echo "\n\
+The TeX/PDF output and the *math* extension require TexLive and latexmk:\n\n\
+ Make sure you have a updated TeXLive with XeTeX engine installed, grab it\n\
+ it from https://www.tug.org/texlive or install it from your package manager.\n\n\
+ Install latexmk from your package manager or visit https://ctan.org/pkg/latexmk\n\n\
+ Sphinx-doc produce (Xe)LaTeX files which might use additional TeX-packages\n\
+ and fonts. To process these LaTeX files, a TexLive installation with the\n\
+ additional packages is required. On debian based OS these requirements\n\
+ are installed by::\n\n\
+ sudo -H apt-get install\n\
+ latexmk\n\
+ texlive-base texlive-xetex texlive-latex-recommended\n\
+ texlive-extra-utils dvipng ttf-dejavu\n"
+
+# ------------------------------------------------------------------------------
+# commands
+# ------------------------------------------------------------------------------
+
+# $2 sphinx builder e.g. "html"
+# $3 path where configuration file (conf.py) is located
+# $4 sourcedir
+# $5 dest subfolder e.g. "man" for man pages at $(DOCS_DIST)/man
+
+quiet_cmd_sphinx = SPHINX $@ --> file://$(abspath $(DOCS_DIST)/$5)
+ cmd_sphinx = SPHINX_CONF=$(abspath $4/$(SPHINX_CONF))\
+ $(SPHINXBUILD) $(SPHINX_VERBOSE) $(SPHINXOPTS)\
+ -b $2 -c $3 -d $(DOCS_BUILD)/.doctrees $4 $(DOCS_DIST)/$5
+
+quiet_cmd_sphinx_autobuild = SPHINX $@ --> file://$(abspath $(DOCS_DIST)/$5)
+ cmd_sphinx_autobuild = PATH="$(PY_ENV_BIN):$(PATH)" $(PY_ENV_BIN)/sphinx-autobuild $(SPHINX_VERBOSE) --poll -B --host 0.0.0.0 --port 8080 $(SPHINXOPTS)\
+ -b $2 -c $3 -d $(DOCS_BUILD)/.doctrees $4 $(DOCS_DIST)/$5
+
+quiet_cmd_sphinx_clean = CLEAN $@
+ cmd_sphinx_clean = rm -rf $(DOCS_BUILD) $(DOCS_DIST) $(GH_PAGES)/* $(GH_PAGES)/.buildinfo
+
+# ------------------------------------------------------------------------------
+# targets
+# ------------------------------------------------------------------------------
+
+# build PDF of whole documentation in: $(DOCS_DIST)/pdf
+
+PHONY += sphinx-pdf
+sphinx-pdf: sphinx-latex
+ $(Q)cd $(DOCS_BUILD)/latex/; make all-pdf
+ $(Q)mkdir -p $(DOCS_DIST)/pdf
+ $(Q)cp $(DOCS_BUILD)/latex/*.pdf $(DOCS_DIST)/pdf
+ @echo "SPHINX *.pdf --> file://$(abspath $(DOCS_DIST)/pdf)"
+
+PHONY += sphinx-latex
+sphinx-latex: texlive sphinx-doc
+ $(SPHINXBUILD) $(SPHINX_VERBOSE) $(SPHINXOPTS)\
+ -b latex \
+ -c $(DOCS_FOLDER) \
+ -d $(DOCS_BUILD)/.doctrees \
+ $(DOCS_FOLDER) \
+ $(DOCS_BUILD)/latex
+
+# Sphinx projects, we call them *books* (what is more common). Books are
+# folders under $(BOOKS_FOLDER) containing a conf.py file. The HTML output goes
+# to folder $(BOOKS_DIST)/<name> while PDF is placed (BOOKS_DIST)/<name>/pdf
+
+BOOKS=$(patsubst $(BOOKS_FOLDER)/%/conf.py,books/%,$(wildcard $(BOOKS_FOLDER)/*/conf.py))
+
+# fine grained targets
+BOOKS_HTML = $(patsubst %,%.html,$(BOOKS))
+BOOKS_CLEAN = $(patsubst %,%.clean,$(BOOKS))
+BOOKS_LATEX = $(patsubst %,%.latex,$(BOOKS))
+BOOKS_PDF = $(patsubst %,%.pdf,$(BOOKS))
+BOOKS_LIVE = $(patsubst %,%.live,$(BOOKS))
+
+$(BOOKS_DIST):
+ mkdir -p $(BOOKS_DIST)
+
+PHONY += $(BOOKS_HTML)
+$(BOOKS_HTML): sphinx-doc | $(BOOKS_DIST)
+ SPHINX_CONF=$(patsubst books/%.html,%,$@)/conf.py \
+ $(SPHINXBUILD) $(SPHINX_VERBOSE) $(SPHINXOPTS)\
+ -b html \
+ -c $(DOCS_FOLDER) \
+ -d $(DOCS_BUILD)/books/$(patsubst books/%.html,%,$@)/.doctrees \
+ $(patsubst books/%.html,%,$@) \
+ $(BOOKS_DIST)/$(patsubst books/%.html,%,$@)
+ @echo "SPHINX $@ --> file://$(abspath $(BOOKS_DIST)/$(patsubst books/%.html,%,$@))"
+
+PHONY += $(BOOKS_HTML)
+$(BOOKS_LIVE): sphinx-live | $(BOOKS_DIST)
+ PATH="$(PY_ENV_BIN):$(PATH)" \
+ SPHINX_CONF=$(patsubst books/%.live,%,$@)/conf.py \
+ $(PY_ENV_BIN)/sphinx-autobuild --poll -B --host 0.0.0.0 --port 8080 $(SPHINX_VERBOSE) $(SPHINXOPTS)\
+ -b html \
+ -c $(DOCS_FOLDER) \
+ -d $(DOCS_BUILD)/books/$(patsubst books/%.live,%,$@)/.doctrees \
+ $(patsubst books/%.live,%,$@) \
+ $(BOOKS_DIST)/$(patsubst books/%.live,%,$@)
+
+$(BOOKS_PDF): %.pdf : %.latex
+ $(Q)cd $(DOCS_BUILD)/latex/$(patsubst books/%.pdf,%,$@); make all-pdf
+ $(Q)mkdir -p $(BOOKS_DIST)/$(patsubst books/%.pdf,%,$@)/pdf
+ $(Q)cp -v $(DOCS_BUILD)/latex/$(patsubst books/%.pdf,%,$@)/*.pdf $(BOOKS_DIST)/$(patsubst books/%.pdf,%,$@)/pdf
+ @echo "SPHINX $@ --> file://$(abspath $(BOOKS_DIST)/$(patsubst books/%.pdf,%,$@))/pdf"
+
+PHONY += $(BOOKS_LATEX)
+$(BOOKS_LATEX): sphinx-doc | $(BOOKS_DIST)
+ SPHINX_CONF=$(patsubst books/%.latex,%,$@)/conf.py \
+ $(SPHINXBUILD) $(SPHINX_VERBOSE) $(SPHINXOPTS)\
+ -b latex \
+ -c $(DOCS_FOLDER) \
+ -d $(DOCS_BUILD)/books/$(patsubst books/%.latex,%,$@)/.doctrees \
+ $(patsubst books/%.latex,%,$@) \
+ $(DOCS_BUILD)/latex/$(patsubst books/%.latex,%,$@)
+ @echo "SPHINX $@ --> file://$(abspath $(DOCS_BUILD)/latex/$(patsubst books/%.latex,%,$@))"
+
+$(BOOKS_CLEAN):
+ $(Q)rm -rf $(BOOKS_DIST)/$(patsubst books/%.clean,%,$@) \
+ $(DOCS_BUILD)/books/$(patsubst books/%.clean,%,$@) \
+ $(DOCS_BUILD)/latex/$(patsubst books/%.clean,%,$@)
+
+# github pages
+
+PHONY += $(GH_PAGES)
+$(GH_PAGES)::
+ $(MAKE) docs
+ [ -d "gh-pages/.git" ] || git clone $(GIT_URL) gh-pages
+ -cd $(GH_PAGES); git checkout gh-pages >/dev/null
+ -cd $(GH_PAGES); ls -A | grep -v '.git$$' | xargs rm -rf
+ cp -r $(DOCS_DIST)/* $(GH_PAGES)/
+ touch $(GH_PAGES)/.nojekyll
+ echo "<html><head><META http-equiv='refresh' content='0;URL=index.html'></head></html>" > $(GH_PAGES)/404.html
+ cd $(GH_PAGES);\
+ git add --all . ;\
+ git commit -m "gh-pages: updated" ;\
+ git push origin gh-pages
+
+
+PHONY += docs-clean
+docs-clean: $(BOOKS_CLEAN)
+ $(call cmd,sphinx_clean)
+
+.PHONY: $(PHONY)