diff options
author | Thomas Pointhuber <thomas.pointhuber@gmx.at> | 2014-09-03 11:40:29 +0200 |
---|---|---|
committer | Thomas Pointhuber <thomas.pointhuber@gmx.at> | 2014-09-03 11:40:29 +0200 |
commit | 52ad49ccba389346e6216dc708891cbea2b4941d (patch) | |
tree | 84c5a729d5aa554e2c943a50fe931f4dfe25d267 /searx/engines/wikipedia.py | |
parent | 629a05e149eaaab05a724dd3915ed363c364c796 (diff) | |
download | searxng-52ad49ccba389346e6216dc708891cbea2b4941d.tar.gz searxng-52ad49ccba389346e6216dc708891cbea2b4941d.zip |
using general mediawiki-engine
* writing general mediawiki-engine
* using this engine for wikipedia
* using this engine for uncyclopedia
Diffstat (limited to 'searx/engines/wikipedia.py')
-rw-r--r-- | searx/engines/wikipedia.py | 67 |
1 files changed, 0 insertions, 67 deletions
diff --git a/searx/engines/wikipedia.py b/searx/engines/wikipedia.py deleted file mode 100644 index ce9429776..000000000 --- a/searx/engines/wikipedia.py +++ /dev/null @@ -1,67 +0,0 @@ -## Wikipedia (Web) -# -# @website http://www.wikipedia.org -# @provide-api yes (http://www.mediawiki.org/wiki/API:Search) -# -# @using-api yes -# @results JSON -# @stable yes -# @parse url, title -# -# @todo content - -from json import loads -from urllib import urlencode, quote - -# engine dependent config -categories = ['general'] -language_support = True -paging = True -number_of_results = 1 - -# search-url -url = 'https://{language}.wikipedia.org/' -search_url = url + 'w/api.php?action=query&list=search&{query}&srprop=timestamp&format=json&sroffset={offset}&srlimit={limit}' # noqa - - -# do search-request -def request(query, params): - offset = (params['pageno'] - 1) * number_of_results - - if params['language'] == 'all': - language = 'en' - else: - language = params['language'].split('_')[0] - - # write search-language back to params, required in response - params['language'] = language - - params['url'] = search_url.format(query=urlencode({'srsearch': query}), - offset=offset, - limit=number_of_results, - language=language) - - return params - - -# get response from search-request -def response(resp): - results = [] - - search_results = loads(resp.text) - - # return empty array if there are no results - if not search_results.get('query', {}).get('search'): - return [] - - # parse results - for result in search_results['query']['search']: - res_url = url.format(language=resp.search_params['language']) + 'wiki/' + quote(result['title'].replace(' ', '_').encode('utf-8')) - - # append result - results.append({'url': res_url, - 'title': result['title'], - 'content': ''}) - - # return results - return results |