summaryrefslogtreecommitdiff
path: root/searx/engines/docker_hub.py
diff options
context:
space:
mode:
authorKyle Anthony Williams <kyle.anthony.williams2@gmail.com>2021-02-07 13:34:29 -0500
committerMarkus Heiser <markus.heiser@darmarit.de>2021-05-30 15:18:36 +0200
commitd6a2d4f969369544cb4325a7870db00498878716 (patch)
tree3977a936bb8a793491e69c637fecbce8e996e44d /searx/engines/docker_hub.py
parent737f5f05d2bfdd2344578ba81e8832b9b752ee78 (diff)
downloadsearxng-d6a2d4f969369544cb4325a7870db00498878716.tar.gz
searxng-d6a2d4f969369544cb4325a7870db00498878716.zip
[enh] add engine - Docker Hub
Slightly modified merge of commit [1cb1d3ac] from searx [PR 2543]: This adds Docker Hub .. as a search engine .. the engine's favicon was downloaded from the Docker Hub website with wget and converted to a PNG with ImageMagick .. It supports the parsing of URLs, titles, content, published dates, and thumbnails of Docker images. [1cb1d3ac] https://github.com/searx/searx/pull/2543/commits/1cb1d3ac [PR 2543] https://github.com/searx/searx/pull/2543 Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
Diffstat (limited to 'searx/engines/docker_hub.py')
-rw-r--r--searx/engines/docker_hub.py65
1 files changed, 65 insertions, 0 deletions
diff --git a/searx/engines/docker_hub.py b/searx/engines/docker_hub.py
new file mode 100644
index 000000000..d9d0f745b
--- /dev/null
+++ b/searx/engines/docker_hub.py
@@ -0,0 +1,65 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+# lint: pylint
+# pylint: disable=missing-function-docstring
+"""Docker Hub (IT)
+
+"""
+
+from json import loads
+from urllib.parse import urlencode
+from dateutil import parser
+
+about = {
+ "website": 'https://hub.docker.com',
+ "wikidata_id": 'Q100769064',
+ "official_api_documentation": 'https://docs.docker.com/registry/spec/api/',
+ "use_official_api": True,
+ "require_api_key": False,
+ "results": 'JSON',
+ }
+
+categories = ['it'] # optional
+paging = True
+
+base_url = "https://hub.docker.com/"
+search_url = base_url + "api/content/v1/products/search?{query}&type=image&page_size=25"
+
+def request(query, params):
+
+ params['url'] = search_url.format(query=urlencode(dict(q=query, page=params["pageno"])))
+ params["headers"]["Search-Version"] = "v3"
+
+ return params
+
+def response(resp):
+ '''post-response callback
+ resp: requests response object
+ '''
+ results = []
+ body = loads(resp.text)
+
+ # Make sure `summaries` isn't `null`
+ search_res = body.get("summaries")
+ if search_res:
+ for item in search_res:
+ result = {}
+
+ # Make sure correct URL is set
+ filter_type = item.get("filter_type")
+ is_official = filter_type in ["store", "official"]
+
+ if is_official:
+ result["url"] = base_url + "_/" + item.get('slug', "")
+ else:
+ result["url"] = base_url + "r/" + item.get('slug', "")
+ result["title"] = item.get("name")
+ result["content"] = item.get("short_description")
+ result["publishedDate"] = parser.parse(
+ item.get("updated_at") or item.get("created_at")
+ )
+ result["thumbnail"] = (
+ item["logo_url"].get("large") or item["logo_url"].get("small")
+ )
+ results.append(result)
+
+ return results