diff options
author | Markus Heiser <markus.heiser@darmarit.de> | 2021-05-25 16:45:32 +0200 |
---|---|---|
committer | Markus Heiser <markus.heiser@darmarit.de> | 2021-05-25 16:45:32 +0200 |
commit | a88e3e4fea0e030bc70f79755df6f49ebc42be5b (patch) | |
tree | a73e561f0db7259a8f444b7e0ef58385de10187e /searx/engines/unsplash.py | |
parent | 8283ce3d79b67bad0820dad7e02ab8140ec44898 (diff) | |
download | searxng-a88e3e4fea0e030bc70f79755df6f49ebc42be5b.tar.gz searxng-a88e3e4fea0e030bc70f79755df6f49ebc42be5b.zip |
[pylint] searx/engines/unsplash.py, add logger & norm indentation
- fix messages from pylint
- add logger and log request URL
- normalized various indentation
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
Diffstat (limited to 'searx/engines/unsplash.py')
-rw-r--r-- | searx/engines/unsplash.py | 49 |
1 files changed, 31 insertions, 18 deletions
diff --git a/searx/engines/unsplash.py b/searx/engines/unsplash.py index 3bbdf630d..95a80a47c 100644 --- a/searx/engines/unsplash.py +++ b/searx/engines/unsplash.py @@ -1,11 +1,16 @@ # SPDX-License-Identifier: AGPL-3.0-or-later -""" - Unsplash +# lint: pylint +# pylint: disable=missing-function-docstring +"""Unsplash + """ from urllib.parse import urlencode, urlparse, urlunparse, parse_qsl from json import loads +from searx import logger + +logger = logger.getChild('unsplash engine') # about about = { "website": 'https://unsplash.com', @@ -16,8 +21,8 @@ about = { "results": 'JSON', } -url = 'https://unsplash.com/' -search_url = url + 'napi/search/photos?' +base_url = 'https://unsplash.com/' +search_url = base_url + 'napi/search/photos?' categories = ['images'] page_size = 20 paging = True @@ -25,18 +30,24 @@ paging = True def clean_url(url): parsed = urlparse(url) - query = [(k, v) for (k, v) in parse_qsl(parsed.query) if k not in ['ixid', 's']] + query = [(k, v) for (k, v) + in parse_qsl(parsed.query) if k not in ['ixid', 's']] - return urlunparse((parsed.scheme, - parsed.netloc, - parsed.path, - parsed.params, - urlencode(query), - parsed.fragment)) + return urlunparse(( + parsed.scheme, + parsed.netloc, + parsed.path, + parsed.params, + urlencode(query), + parsed.fragment + )) def request(query, params): - params['url'] = search_url + urlencode({'query': query, 'page': params['pageno'], 'per_page': page_size}) + params['url'] = search_url + urlencode({ + 'query': query, 'page': params['pageno'], 'per_page': page_size + }) + logger.debug("query_url --> %s", params['url']) return params @@ -46,10 +57,12 @@ def response(resp): if 'results' in json_data: for result in json_data['results']: - results.append({'template': 'images.html', - 'url': clean_url(result['links']['html']), - 'thumbnail_src': clean_url(result['urls']['thumb']), - 'img_src': clean_url(result['urls']['raw']), - 'title': result['description'], - 'content': ''}) + results.append({ + 'template': 'images.html', + 'url': clean_url(result['links']['html']), + 'thumbnail_src': clean_url(result['urls']['thumb']), + 'img_src': clean_url(result['urls']['raw']), + 'title': result['description'], + 'content': '' + }) return results |