diff options
author | dalf <alex@al-f.net> | 2014-12-07 16:37:56 +0100 |
---|---|---|
committer | dalf <alex@al-f.net> | 2014-12-07 16:37:56 +0100 |
commit | 7c13d630e4531630ce3c392a7d60752715742291 (patch) | |
tree | 332019feae5a215d2d54528308792560794d7aa5 /searx/engines/bing_news.py | |
parent | ffcec383b7355c6ca8b60da8579a43019d7d7e6b (diff) | |
download | searxng-7c13d630e4531630ce3c392a7d60752715742291.tar.gz searxng-7c13d630e4531630ce3c392a7d60752715742291.zip |
[fix] pep8 : engines (errors E121, E127, E128 and E501 still exist)
Diffstat (limited to 'searx/engines/bing_news.py')
-rw-r--r-- | searx/engines/bing_news.py | 19 |
1 files changed, 10 insertions, 9 deletions
diff --git a/searx/engines/bing_news.py b/searx/engines/bing_news.py index 279f0d698..5dce4a2b2 100644 --- a/searx/engines/bing_news.py +++ b/searx/engines/bing_news.py @@ -1,8 +1,9 @@ ## Bing (News) -# +# # @website https://www.bing.com/news -# @provide-api yes (http://datamarket.azure.com/dataset/bing/search), max. 5000 query/month -# +# @provide-api yes (http://datamarket.azure.com/dataset/bing/search), +# max. 5000 query/month +# # @using-api no (because of query limit) # @results HTML (using search portal) # @stable no (HTML can change) @@ -57,12 +58,12 @@ def response(resp): url = link.attrib.get('href') title = ' '.join(link.xpath('.//text()')) contentXPath = result.xpath('.//div[@class="sn_txt"]/div//span[@class="sn_snip"]//text()') - if contentXPath != None: + if contentXPath is not None: content = escape(' '.join(contentXPath)) - + # parse publishedDate publishedDateXPath = result.xpath('.//div[@class="sn_txt"]/div//span[contains(@class,"sn_ST")]//span[contains(@class,"sn_tm")]//text()') - if publishedDateXPath != None: + if publishedDateXPath is not None: publishedDate = escape(' '.join(publishedDateXPath)) if re.match("^[0-9]+ minute(s|) ago$", publishedDate): @@ -89,10 +90,10 @@ def response(resp): except TypeError: # FIXME publishedDate = datetime.now() - + # append result - results.append({'url': url, - 'title': title, + results.append({'url': url, + 'title': title, 'publishedDate': publishedDate, 'content': content}) |