summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorAdam Tauber <asciimoo@gmail.com>2020-10-26 14:20:58 +0100
committerGitHub <noreply@github.com>2020-10-26 14:20:58 +0100
commitdb703a0283ee169381aeea97c678e666ae508348 (patch)
tree6b992653be4ab7905f2b7bf27d98d64cb15570fe /tests
parent2aef38c3b9d1fe93e9d665a49b10151d63d92392 (diff)
parent32957cdf49c306a5f50ca78bb50c0978ffe5c072 (diff)
downloadsearxng-db703a0283ee169381aeea97c678e666ae508348.tar.gz
searxng-db703a0283ee169381aeea97c678e666ae508348.zip
Merge pull request #565 from MarcAbonce/onions
New category: Onions
Diffstat (limited to 'tests')
-rw-r--r--tests/unit/engines/test_xpath.py121
-rw-r--r--tests/unit/test_engines_init.py44
2 files changed, 165 insertions, 0 deletions
diff --git a/tests/unit/engines/test_xpath.py b/tests/unit/engines/test_xpath.py
new file mode 100644
index 000000000..963a44a25
--- /dev/null
+++ b/tests/unit/engines/test_xpath.py
@@ -0,0 +1,121 @@
+# -*- coding: utf-8 -*-
+from collections import defaultdict
+import mock
+from searx.engines import xpath
+from searx.testing import SearxTestCase
+
+
+class TestXpathEngine(SearxTestCase):
+
+ def test_request(self):
+ xpath.search_url = 'https://url.com/{query}'
+ xpath.categories = []
+ xpath.paging = False
+ query = 'test_query'
+ dicto = defaultdict(dict)
+ params = xpath.request(query, dicto)
+ self.assertIn('url', params)
+ self.assertEquals('https://url.com/test_query', params['url'])
+
+ xpath.search_url = 'https://url.com/q={query}&p={pageno}'
+ xpath.paging = True
+ query = 'test_query'
+ dicto = defaultdict(dict)
+ dicto['pageno'] = 1
+ params = xpath.request(query, dicto)
+ self.assertIn('url', params)
+ self.assertEquals('https://url.com/q=test_query&p=1', params['url'])
+
+ def test_response(self):
+ # without results_xpath
+ xpath.url_xpath = '//div[@class="search_result"]//a[@class="result"]/@href'
+ xpath.title_xpath = '//div[@class="search_result"]//a[@class="result"]'
+ xpath.content_xpath = '//div[@class="search_result"]//p[@class="content"]'
+
+ self.assertRaises(AttributeError, xpath.response, None)
+ self.assertRaises(AttributeError, xpath.response, [])
+ self.assertRaises(AttributeError, xpath.response, '')
+ self.assertRaises(AttributeError, xpath.response, '[]')
+
+ response = mock.Mock(text='<html></html>')
+ self.assertEqual(xpath.response(response), [])
+
+ html = u"""
+ <div>
+ <div class="search_result">
+ <a class="result" href="https://result1.com">Result 1</a>
+ <p class="content">Content 1</p>
+ <a class="cached" href="https://cachedresult1.com">Cache</a>
+ </div>
+ <div class="search_result">
+ <a class="result" href="https://result2.com">Result 2</a>
+ <p class="content">Content 2</p>
+ <a class="cached" href="https://cachedresult2.com">Cache</a>
+ </div>
+ </div>
+ """
+ response = mock.Mock(text=html)
+ results = xpath.response(response)
+ self.assertEqual(type(results), list)
+ self.assertEqual(len(results), 2)
+ self.assertEqual(results[0]['title'], 'Result 1')
+ self.assertEqual(results[0]['url'], 'https://result1.com/')
+ self.assertEqual(results[0]['content'], 'Content 1')
+ self.assertEqual(results[1]['title'], 'Result 2')
+ self.assertEqual(results[1]['url'], 'https://result2.com/')
+ self.assertEqual(results[1]['content'], 'Content 2')
+
+ # with cached urls, without results_xpath
+ xpath.cached_xpath = '//div[@class="search_result"]//a[@class="cached"]/@href'
+ results = xpath.response(response)
+ self.assertEqual(type(results), list)
+ self.assertEqual(len(results), 2)
+ self.assertEqual(results[0]['cached_url'], 'https://cachedresult1.com')
+ self.assertEqual(results[1]['cached_url'], 'https://cachedresult2.com')
+ self.assertFalse(results[0].get('is_onion', False))
+
+ # results are onion urls (no results_xpath)
+ xpath.categories = ['onions']
+ results = xpath.response(response)
+ self.assertTrue(results[0]['is_onion'])
+
+ # with results_xpath
+ xpath.results_xpath = '//div[@class="search_result"]'
+ xpath.url_xpath = './/a[@class="result"]/@href'
+ xpath.title_xpath = './/a[@class="result"]'
+ xpath.content_xpath = './/p[@class="content"]'
+ xpath.cached_xpath = None
+ xpath.categories = []
+
+ self.assertRaises(AttributeError, xpath.response, None)
+ self.assertRaises(AttributeError, xpath.response, [])
+ self.assertRaises(AttributeError, xpath.response, '')
+ self.assertRaises(AttributeError, xpath.response, '[]')
+
+ response = mock.Mock(text='<html></html>')
+ self.assertEqual(xpath.response(response), [])
+
+ response = mock.Mock(text=html)
+ results = xpath.response(response)
+ self.assertEqual(type(results), list)
+ self.assertEqual(len(results), 2)
+ self.assertEqual(results[0]['title'], 'Result 1')
+ self.assertEqual(results[0]['url'], 'https://result1.com/')
+ self.assertEqual(results[0]['content'], 'Content 1')
+ self.assertEqual(results[1]['title'], 'Result 2')
+ self.assertEqual(results[1]['url'], 'https://result2.com/')
+ self.assertEqual(results[1]['content'], 'Content 2')
+
+ # with cached urls, with results_xpath
+ xpath.cached_xpath = './/a[@class="cached"]/@href'
+ results = xpath.response(response)
+ self.assertEqual(type(results), list)
+ self.assertEqual(len(results), 2)
+ self.assertEqual(results[0]['cached_url'], 'https://cachedresult1.com')
+ self.assertEqual(results[1]['cached_url'], 'https://cachedresult2.com')
+ self.assertFalse(results[0].get('is_onion', False))
+
+ # results are onion urls (with results_xpath)
+ xpath.categories = ['onions']
+ results = xpath.response(response)
+ self.assertTrue(results[0]['is_onion'])
diff --git a/tests/unit/test_engines_init.py b/tests/unit/test_engines_init.py
new file mode 100644
index 000000000..cf4d50309
--- /dev/null
+++ b/tests/unit/test_engines_init.py
@@ -0,0 +1,44 @@
+from searx.testing import SearxTestCase
+from searx import settings, engines
+
+
+class TestEnginesInit(SearxTestCase):
+
+ @classmethod
+ def tearDownClass(cls):
+ settings['outgoing']['using_tor_proxy'] = False
+ settings['outgoing']['extra_proxy_timeout'] = 0
+
+ def test_initialize_engines_default(self):
+ engine_list = [{'engine': 'dummy', 'name': 'engine1', 'shortcut': 'e1'},
+ {'engine': 'dummy', 'name': 'engine2', 'shortcut': 'e2'}]
+
+ engines.initialize_engines(engine_list)
+ self.assertEqual(len(engines.engines), 2)
+ self.assertIn('engine1', engines.engines)
+ self.assertIn('engine2', engines.engines)
+
+ def test_initialize_engines_exclude_onions(self):
+ settings['outgoing']['using_tor_proxy'] = False
+ engine_list = [{'engine': 'dummy', 'name': 'engine1', 'shortcut': 'e1', 'categories': 'general'},
+ {'engine': 'dummy', 'name': 'engine2', 'shortcut': 'e2', 'categories': 'onions'}]
+
+ engines.initialize_engines(engine_list)
+ self.assertEqual(len(engines.engines), 1)
+ self.assertIn('engine1', engines.engines)
+ self.assertNotIn('onions', engines.categories)
+
+ def test_initialize_engines_include_onions(self):
+ settings['outgoing']['using_tor_proxy'] = True
+ settings['outgoing']['extra_proxy_timeout'] = 100.0
+ engine_list = [{'engine': 'dummy', 'name': 'engine1', 'shortcut': 'e1', 'categories': 'general',
+ 'timeout': 20.0, 'onion_url': 'http://engine1.onion'},
+ {'engine': 'dummy', 'name': 'engine2', 'shortcut': 'e2', 'categories': 'onions'}]
+
+ engines.initialize_engines(engine_list)
+ self.assertEqual(len(engines.engines), 2)
+ self.assertIn('engine1', engines.engines)
+ self.assertIn('engine2', engines.engines)
+ self.assertIn('onions', engines.categories)
+ self.assertIn('http://engine1.onion', engines.engines['engine1'].search_url)
+ self.assertEqual(engines.engines['engine1'].timeout, 120.0)