diff options
Diffstat (limited to 'searx/engines')
-rw-r--r-- | searx/engines/archlinux.py | 4 | ||||
-rw-r--r-- | searx/engines/bing_news.py | 2 | ||||
-rw-r--r-- | searx/engines/brave.py | 10 | ||||
-rw-r--r-- | searx/engines/bt4g.py | 4 | ||||
-rw-r--r-- | searx/engines/duckduckgo.py | 4 | ||||
-rw-r--r-- | searx/engines/google.py | 10 | ||||
-rw-r--r-- | searx/engines/google_news.py | 4 | ||||
-rw-r--r-- | searx/engines/odysee.py | 2 | ||||
-rw-r--r-- | searx/engines/openstreetmap.py | 4 | ||||
-rw-r--r-- | searx/engines/piped.py | 2 | ||||
-rw-r--r-- | searx/engines/qwant.py | 2 | ||||
-rw-r--r-- | searx/engines/sqlite.py | 2 | ||||
-rw-r--r-- | searx/engines/torznab.py | 2 | ||||
-rw-r--r-- | searx/engines/wikidata.py | 2 | ||||
-rw-r--r-- | searx/engines/xpath.py | 2 |
15 files changed, 28 insertions, 28 deletions
diff --git a/searx/engines/archlinux.py b/searx/engines/archlinux.py index 17bb1b6c5..9d5009ed7 100644 --- a/searx/engines/archlinux.py +++ b/searx/engines/archlinux.py @@ -92,7 +92,7 @@ def response(resp): def fetch_traits(engine_traits: EngineTraits): - """Fetch languages from Archlinix-Wiki. The location of the Wiki address of a + """Fetch languages from Archlinux-Wiki. The location of the Wiki address of a language is mapped in a :py:obj:`custom field <searx.enginelib.traits.EngineTraits.custom>` (``wiki_netloc``). Depending on the location, the ``title`` argument in the request is translated. @@ -128,7 +128,7 @@ def fetch_traits(engine_traits: EngineTraits): resp = get('https://wiki.archlinux.org/') if not resp.ok: # type: ignore - print("ERROR: response from wiki.archlinix.org is not OK.") + print("ERROR: response from wiki.archlinux.org is not OK.") dom = lxml.html.fromstring(resp.text) # type: ignore for a in eval_xpath_list(dom, "//a[@class='interlanguage-link-target']"): diff --git a/searx/engines/bing_news.py b/searx/engines/bing_news.py index d8c63857a..18992e2d1 100644 --- a/searx/engines/bing_news.py +++ b/searx/engines/bing_news.py @@ -138,7 +138,7 @@ def fetch_traits(engine_traits: EngineTraits): The :py:obj:`description <searx.engines.bing_news.bing_traits_url>` of the first table says *"query parameter when calling the Video Search API."* - .. thats why I use the 4. table "News Category API markets" for the + .. that's why I use the 4. table "News Category API markets" for the ``xpath_market_codes``. """ diff --git a/searx/engines/brave.py b/searx/engines/brave.py index 7770a2c90..13e262fa3 100644 --- a/searx/engines/brave.py +++ b/searx/engines/brave.py @@ -37,7 +37,7 @@ Brave regions ============= Brave uses two-digit tags for the regions like ``ca`` while SearXNG deals with -locales. To get a mapping, all *officatl de-facto* languages of the Brave +locales. To get a mapping, all *officiat de-facto* languages of the Brave region are mapped to regions in SearXNG (see :py:obj:`babel <babel.languages.get_official_languages>`): @@ -63,10 +63,10 @@ region are mapped to regions in SearXNG (see :py:obj:`babel Brave languages =============== -Brave's language support is limited to the UI (menues, area local notations, +Brave's language support is limited to the UI (menus, area local notations, etc). Brave's index only seems to support a locale, but it does not seem to support any languages in its index. The choice of available languages is very -small (and its not clear to me where the differencee in UI is when switching +small (and its not clear to me where the difference in UI is when switching from en-us to en-ca or en-gb). In the :py:obj:`EngineTraits object <searx.enginelib.traits.EngineTraits>` the @@ -264,7 +264,7 @@ def _parse_search(resp): ) if video_tag is not None: - # In my tests a video tag in the WEB search was mostoften not a + # In my tests a video tag in the WEB search was most often not a # video, except the ones from youtube .. iframe_src = _get_iframe_src(url) @@ -405,7 +405,7 @@ def fetch_traits(engine_traits: EngineTraits): # country_name = extract_text(flag.xpath('./following-sibling::*')[0]) country_tag = re.search(r'flag-([^\s]*)\s', flag.xpath('./@class')[0]).group(1) # type: ignore - # add offical languages of the country .. + # add official languages of the country .. for lang_tag in babel.languages.get_official_languages(country_tag, de_facto=True): lang_tag = lang_map.get(lang_tag, lang_tag) sxng_tag = region_tag(babel.Locale.parse('%s_%s' % (lang_tag, country_tag.upper()))) diff --git a/searx/engines/bt4g.py b/searx/engines/bt4g.py index 34717aeaf..786aa1920 100644 --- a/searx/engines/bt4g.py +++ b/searx/engines/bt4g.py @@ -67,11 +67,11 @@ or ``time``. .. hint:: - When *time_range* is activate, the results always orderd by ``time``. + When *time_range* is activate, the results always ordered by ``time``. """ bt4g_category = 'all' -"""BT$G offers categoies: ``all`` (default), ``audio``, ``movie``, ``doc``, +"""BT$G offers categories: ``all`` (default), ``audio``, ``movie``, ``doc``, ``app`` and `` other``. """ diff --git a/searx/engines/duckduckgo.py b/searx/engines/duckduckgo.py index edd586f78..ebb4745b9 100644 --- a/searx/engines/duckduckgo.py +++ b/searx/engines/duckduckgo.py @@ -105,7 +105,7 @@ def get_vqd(query): def get_ddg_lang(eng_traits: EngineTraits, sxng_locale, default='en_US'): """Get DuckDuckGo's language identifier from SearXNG's locale. - DuckDuckGo defines its lanaguages by region codes (see + DuckDuckGo defines its languages by region codes (see :py:obj:`fetch_traits`). To get region and language of a DDG service use: @@ -338,7 +338,7 @@ def fetch_traits(engine_traits: EngineTraits): ``Accept-Language`` HTTP header. The value in ``engine_traits.all_locale`` is ``wt-wt`` (the region). - Beside regions DuckDuckGo also defines its lanaguages by region codes. By + Beside regions DuckDuckGo also defines its languages by region codes. By example these are the english languages in DuckDuckGo: - en_US diff --git a/searx/engines/google.py b/searx/engines/google.py index 377c8db39..51c6acbf2 100644 --- a/searx/engines/google.py +++ b/searx/engines/google.py @@ -22,7 +22,7 @@ import babel.core import babel.languages from searx.utils import extract_text, eval_xpath, eval_xpath_list, eval_xpath_getindex -from searx.locales import language_tag, region_tag, get_offical_locales +from searx.locales import language_tag, region_tag, get_official_locales from searx.network import get # see https://github.com/searxng/searxng/issues/762 from searx.exceptions import SearxEngineCaptchaException from searx.enginelib.traits import EngineTraits @@ -184,8 +184,8 @@ def get_google_info(params, eng_traits): # # To select 'all' languages an empty 'lr' value is used. # - # Different to other google services, Google Schloar supports to select more - # than one language. The languages are seperated by a pipe '|' (logical OR). + # Different to other google services, Google Scholar supports to select more + # than one language. The languages are separated by a pipe '|' (logical OR). # By example: &lr=lang_zh-TW%7Clang_de selects articles written in # traditional chinese OR german language. @@ -204,7 +204,7 @@ def get_google_info(params, eng_traits): if len(sxng_locale.split('-')) > 1: ret_val['params']['cr'] = 'country' + country - # gl parameter: (mandatory by Geeogle News) + # gl parameter: (mandatory by Google News) # The gl parameter value is a two-letter country code. For WebSearch # results, the gl parameter boosts search results whose country of origin # matches the parameter value. See the Country Codes section for a list of @@ -465,7 +465,7 @@ def fetch_traits(engine_traits: EngineTraits, add_domains: bool = True): engine_traits.all_locale = 'ZZ' continue - sxng_locales = get_offical_locales(eng_country, engine_traits.languages.keys(), regional=True) + sxng_locales = get_official_locales(eng_country, engine_traits.languages.keys(), regional=True) if not sxng_locales: print("ERROR: can't map from google country %s (%s) to a babel region." % (x.get('data-name'), eng_country)) diff --git a/searx/engines/google_news.py b/searx/engines/google_news.py index 4b1bffa30..a5ed63bbe 100644 --- a/searx/engines/google_news.py +++ b/searx/engines/google_news.py @@ -72,7 +72,7 @@ time_range_support = False # Google-News results are always *SafeSearch*. Option 'safesearch' is set to # False here, otherwise checker will report safesearch-errors:: # -# safesearch : results are identitical for safesearch=0 and safesearch=2 +# safesearch : results are identical for safesearch=0 and safesearch=2 safesearch = True # send_accept_language_header = True @@ -155,7 +155,7 @@ def response(resp): title = extract_text(eval_xpath(result, './article/h3[1]')) - # The pub_date is mostly a string like 'yesertday', not a real + # The pub_date is mostly a string like 'yesterday', not a real # timezone date or time. Therefore we can't use publishedDate. pub_date = extract_text(eval_xpath(result, './article//time')) pub_origin = extract_text(eval_xpath(result, './article//a[@data-n-tid]')) diff --git a/searx/engines/odysee.py b/searx/engines/odysee.py index 89d11d093..3189ab4c6 100644 --- a/searx/engines/odysee.py +++ b/searx/engines/odysee.py @@ -1,6 +1,6 @@ # SPDX-License-Identifier: AGPL-3.0-or-later # lint: pylint -"""Odysee_ is a decentralised video hosting platform. +"""Odysee_ is a decentralized video hosting platform. .. _Odysee: https://github.com/OdyseeTeam/odysee-frontend """ diff --git a/searx/engines/openstreetmap.py b/searx/engines/openstreetmap.py index 650bfedd1..68ec0549e 100644 --- a/searx/engines/openstreetmap.py +++ b/searx/engines/openstreetmap.py @@ -162,7 +162,7 @@ def response(resp): } ) - # simplify the code below: make sure extratags is a dictionnary + # simplify the code below: make sure extratags is a dictionary for result in nominatim_json: if not isinstance(result.get('extratags'), dict): result["extratags"] = {} @@ -445,7 +445,7 @@ def get_key_label(key_name, lang): if key_name.startswith('currency:'): # currency:EUR --> get the name from the CURRENCIES variable # see https://wiki.openstreetmap.org/wiki/Key%3Acurrency - # and for exampe https://taginfo.openstreetmap.org/keys/currency:EUR#values + # and for example https://taginfo.openstreetmap.org/keys/currency:EUR#values # but there is also currency=EUR (currently not handled) # https://taginfo.openstreetmap.org/keys/currency#values currency = key_name.split(':') diff --git a/searx/engines/piped.py b/searx/engines/piped.py index 34a5a0a13..a943f7e3a 100644 --- a/searx/engines/piped.py +++ b/searx/engines/piped.py @@ -72,7 +72,7 @@ paging = True backend_url: list | str = "https://pipedapi.kavin.rocks" """Piped-Backend_: The core component behind Piped. The value is an URL or a list of URLs. In the latter case instance will be selected randomly. For a -complete list of offical instances see Piped-Instances (`JSON +complete list of official instances see Piped-Instances (`JSON <https://piped-instances.kavin.rocks/>`__) .. _Piped-Instances: https://github.com/TeamPiped/Piped/wiki/Instances diff --git a/searx/engines/qwant.py b/searx/engines/qwant.py index e54df5af6..654a76337 100644 --- a/searx/engines/qwant.py +++ b/searx/engines/qwant.py @@ -76,7 +76,7 @@ about = { categories = [] paging = True qwant_categ = None -"""One of ``web``, ``news``, ``images`` or ``videos``""" +"""One of ``web-lite`` (or ``web``), ``news``, ``images`` or ``videos``""" safesearch = True # safe_search_map = {0: '&safesearch=0', 1: '&safesearch=1', 2: '&safesearch=2'} diff --git a/searx/engines/sqlite.py b/searx/engines/sqlite.py index c86df5867..76e83af8a 100644 --- a/searx/engines/sqlite.py +++ b/searx/engines/sqlite.py @@ -10,7 +10,7 @@ Example To demonstrate the power of database engines, here is a more complex example which reads from a MediathekView_ (DE) movie database. For this example of the -SQlite engine download the database: +SQLite engine download the database: - https://liste.mediathekview.de/filmliste-v2.db.bz2 diff --git a/searx/engines/torznab.py b/searx/engines/torznab.py index 0692d4a7a..101cab9e3 100644 --- a/searx/engines/torznab.py +++ b/searx/engines/torznab.py @@ -23,7 +23,7 @@ The engine has the following settings: Prowlarr-categories_ or Jackett-categories_ for more information. ``show_torrent_files``: - Whether to show the torrent file in the search results. Be carful as using + Whether to show the torrent file in the search results. Be careful as using this with Prowlarr_ or Jackett_ leaks the API key. This should be used only if you are querying a Torznab endpoint without authentication or if the instance is private. Be aware that private trackers may ban you if you share diff --git a/searx/engines/wikidata.py b/searx/engines/wikidata.py index 34d408158..5779daa0b 100644 --- a/searx/engines/wikidata.py +++ b/searx/engines/wikidata.py @@ -287,7 +287,7 @@ def get_results(attribute_result, attributes, language): elif attribute_type == WDGeoAttribute: # geocoordinate link # use the area to get the OSM zoom - # Note: ignre the unit (must be km² otherwise the calculation is wrong) + # Note: ignore the unit (must be km² otherwise the calculation is wrong) # Should use normalized value p:P2046/psn:P2046/wikibase:quantityAmount area = attribute_result.get('P2046') osm_zoom = area_to_osm_zoom(area) if area else 19 diff --git a/searx/engines/xpath.py b/searx/engines/xpath.py index 51ddcda78..442e90595 100644 --- a/searx/engines/xpath.py +++ b/searx/engines/xpath.py @@ -86,7 +86,7 @@ Replacements are: Search terms from user. ``{pageno}``: - Page number if engine supports pagging :py:obj:`paging` + Page number if engine supports paging :py:obj:`paging` ``{lang}``: ISO 639-1 language code (en, de, fr ..) |