summaryrefslogtreecommitdiff
path: root/searx/plugins/hostnames.py
blob: 515a45259c55eb49366053e3a2b4e52e8688959f (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring

import re
from urllib.parse import urlunparse, urlparse

from flask_babel import gettext

from searx import settings
from searx.plugins import logger

name = gettext('Hostnames plugin')
description = gettext('Rewrite hostnames, remove results or prioritize them based on the hostname')
default_on = False
preference_section = 'general'

plugin_id = 'hostnames'

replacements = {
    re.compile(p): r
    for (p, r) in (settings.get(plugin_id, {}).get('replace', settings.get('hostname_replace', {})).items())
}
removables = {re.compile(p) for p in settings[plugin_id].get('remove', [])}
high_priority = {re.compile(p) for p in settings[plugin_id].get('high_priority', [])}
low_priority = {re.compile(p) for p in settings[plugin_id].get('low_priority', [])}

logger = logger.getChild(plugin_id)
parsed = 'parsed_url'
_url_fields = ['iframe_src', 'audio_src']


def _matches_parsed_url(result, pattern):
    return parsed in result and pattern.search(result[parsed].netloc)


def on_result(_request, _search, result):
    for pattern, replacement in replacements.items():
        if _matches_parsed_url(result, pattern):
            logger.debug(result['url'])
            result[parsed] = result[parsed]._replace(netloc=pattern.sub(replacement, result[parsed].netloc))
            result['url'] = urlunparse(result[parsed])
            logger.debug(result['url'])

        for url_field in _url_fields:
            if not result.get(url_field):
                continue

            url_src = urlparse(result[url_field])
            if pattern.search(url_src.netloc):
                url_src = url_src._replace(netloc=pattern.sub(replacement, url_src.netloc))
                result[url_field] = urlunparse(url_src)

    for pattern in removables:
        if _matches_parsed_url(result, pattern):
            return False

        for url_field in _url_fields:
            if not result.get(url_field):
                continue

            url_src = urlparse(result[url_field])
            if pattern.search(url_src.netloc):
                del result[url_field]

    for pattern in low_priority:
        if _matches_parsed_url(result, pattern):
            result['priority'] = 'low'

    for pattern in high_priority:
        if _matches_parsed_url(result, pattern):
            result['priority'] = 'high'

    return True