summaryrefslogtreecommitdiff
path: root/searx
diff options
context:
space:
mode:
authorEmilien Devos <contact@emiliendevos.be>2022-07-25 12:53:56 +0200
committerMarkus Heiser <markus.heiser@darmarit.de>2022-07-25 13:27:06 +0200
commit5fb2071cb2248c0f0ada7affb0c47f841ddbf102 (patch)
tree601eb8e7109230929d0a13210cf77a69576dcc9b /searx
parent07aa2bfd98fec41427a77532e6890a7af35c9383 (diff)
downloadsearxng-5fb2071cb2248c0f0ada7affb0c47f841ddbf102.tar.gz
searxng-5fb2071cb2248c0f0ada7affb0c47f841ddbf102.zip
[fix] google & youtube - set EU consent cookie
This change the previous bypass method for Google consent using ``ucbcb=1`` (6face215b8) to accept the consent using ``CONSENT=YES+``. The youtube_noapi and google have a similar API, at least for the consent[1]. Get CONSENT cookie from google reguest:: curl -i "https://www.google.com/search?q=time&tbm=isch" \ -A "Mozilla/5.0 (X11; Linux i686; rv:102.0) Gecko/20100101 Firefox/102.0" \ | grep -i consent ... location: https://consent.google.com/m?continue=https://www.google.com/search?q%3Dtime%26tbm%3Disch&gl=DE&m=0&pc=irp&uxe=eomtm&hl=en-US&src=1 set-cookie: CONSENT=PENDING+936; expires=Wed, 24-Jul-2024 11:26:20 GMT; path=/; domain=.google.com; Secure ... PENDING & YES [2]: Google change the way for consent about YouTube cookies agreement in EU countries. Instead of showing a popup in the website, YouTube redirects the user to a new webpage at consent.youtube.com domain ... Fix for this is to put a cookie CONSENT with YES+ value for every YouTube request [1] https://github.com/iv-org/invidious/pull/2207 [2] https://github.com/TeamNewPipe/NewPipeExtractor/issues/592 Closes: https://github.com/searxng/searxng/issues/1432
Diffstat (limited to 'searx')
-rw-r--r--searx/engines/google.py2
-rw-r--r--searx/engines/google_images.py5
-rw-r--r--searx/engines/google_news.py5
-rw-r--r--searx/engines/google_play_apps.py3
-rw-r--r--searx/engines/google_scholar.py3
-rw-r--r--searx/engines/google_videos.py3
-rw-r--r--searx/engines/youtube_noapi.py3
7 files changed, 13 insertions, 11 deletions
diff --git a/searx/engines/google.py b/searx/engines/google.py
index 4468558ac..1c38ba34a 100644
--- a/searx/engines/google.py
+++ b/searx/engines/google.py
@@ -287,7 +287,6 @@ def request(query, params):
'oe': "utf8",
'start': offset,
'filter': '0',
- 'ucbcb': 1,
**additional_parameters,
}
)
@@ -299,6 +298,7 @@ def request(query, params):
query_url += '&' + urlencode({'safe': filter_mapping[params['safesearch']]})
params['url'] = query_url
+ params['cookies']['CONSENT'] = "YES+"
params['headers'].update(lang_info['headers'])
if use_mobile_ui:
params['headers']['Accept'] = '*/*'
diff --git a/searx/engines/google_images.py b/searx/engines/google_images.py
index b5c7b5302..a65c0ce37 100644
--- a/searx/engines/google_images.py
+++ b/searx/engines/google_images.py
@@ -132,9 +132,7 @@ def request(query, params):
+ lang_info['subdomain']
+ '/search'
+ "?"
- + urlencode(
- {'q': query, 'tbm': "isch", **lang_info['params'], 'ie': "utf8", 'oe': "utf8", 'num': 30, 'ucbcb': 1}
- )
+ + urlencode({'q': query, 'tbm': "isch", **lang_info['params'], 'ie': "utf8", 'oe': "utf8", 'num': 30})
)
if params['time_range'] in time_range_dict:
@@ -143,6 +141,7 @@ def request(query, params):
query_url += '&' + urlencode({'safe': filter_mapping[params['safesearch']]})
params['url'] = query_url
+ params['cookies']['CONSENT'] = "YES+"
params['headers'].update(lang_info['headers'])
params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
return params
diff --git a/searx/engines/google_news.py b/searx/engines/google_news.py
index 2aef517f6..0f97f9289 100644
--- a/searx/engines/google_news.py
+++ b/searx/engines/google_news.py
@@ -97,13 +97,12 @@ def request(query, params):
+ lang_info['subdomain']
+ '/search'
+ "?"
- + urlencode(
- {'q': query, **lang_info['params'], 'ie': "utf8", 'oe': "utf8", 'gl': lang_info['country'], 'ucbcb': 1}
- )
+ + urlencode({'q': query, **lang_info['params'], 'ie': "utf8", 'oe': "utf8", 'gl': lang_info['country']})
+ ('&ceid=%s' % ceid)
) # ceid includes a ':' character which must not be urlencoded
params['url'] = query_url
+ params['cookies']['CONSENT'] = "YES+"
params['headers'].update(lang_info['headers'])
params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
diff --git a/searx/engines/google_play_apps.py b/searx/engines/google_play_apps.py
index c80a1f270..226e48dab 100644
--- a/searx/engines/google_play_apps.py
+++ b/searx/engines/google_play_apps.py
@@ -22,11 +22,12 @@ about = {
}
categories = ["files", "apps"]
-search_url = "https://play.google.com/store/search?{query}&c=apps&ucbcb=1"
+search_url = "https://play.google.com/store/search?{query}&c=apps"
def request(query, params):
params["url"] = search_url.format(query=urlencode({"q": query}))
+ params['cookies']['CONSENT'] = "YES+"
return params
diff --git a/searx/engines/google_scholar.py b/searx/engines/google_scholar.py
index e4c2d7273..f9c73097d 100644
--- a/searx/engines/google_scholar.py
+++ b/searx/engines/google_scholar.py
@@ -85,12 +85,13 @@ def request(query, params):
+ lang_info['subdomain']
+ '/scholar'
+ "?"
- + urlencode({'q': query, **lang_info['params'], 'ie': "utf8", 'oe': "utf8", 'start': offset, 'ucbcb': 1})
+ + urlencode({'q': query, **lang_info['params'], 'ie': "utf8", 'oe': "utf8", 'start': offset})
)
query_url += time_range_url(params)
params['url'] = query_url
+ params['cookies']['CONSENT'] = "YES+"
params['headers'].update(lang_info['headers'])
params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
diff --git a/searx/engines/google_videos.py b/searx/engines/google_videos.py
index 3d3f6d93c..6eb051e0a 100644
--- a/searx/engines/google_videos.py
+++ b/searx/engines/google_videos.py
@@ -118,7 +118,7 @@ def request(query, params):
+ lang_info['subdomain']
+ '/search'
+ "?"
- + urlencode({'q': query, 'tbm': "vid", **lang_info['params'], 'ie': "utf8", 'oe': "utf8", 'ucbcb': 1})
+ + urlencode({'q': query, 'tbm': "vid", **lang_info['params'], 'ie': "utf8", 'oe': "utf8"})
)
if params['time_range'] in time_range_dict:
@@ -127,6 +127,7 @@ def request(query, params):
query_url += '&' + urlencode({'safe': filter_mapping[params['safesearch']]})
params['url'] = query_url
+ params['cookies']['CONSENT'] = "YES+"
params['headers'].update(lang_info['headers'])
params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
return params
diff --git a/searx/engines/youtube_noapi.py b/searx/engines/youtube_noapi.py
index 30f89d72f..7992adf82 100644
--- a/searx/engines/youtube_noapi.py
+++ b/searx/engines/youtube_noapi.py
@@ -25,7 +25,7 @@ time_range_support = True
# search-url
base_url = 'https://www.youtube.com/results'
-search_url = base_url + '?search_query={query}&page={page}&ucbcb=1'
+search_url = base_url + '?search_query={query}&page={page}'
time_range_url = '&sp=EgII{time_range}%253D%253D'
# the key seems to be constant
next_page_url = 'https://www.youtube.com/youtubei/v1/search?key=AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8'
@@ -36,6 +36,7 @@ base_youtube_url = 'https://www.youtube.com/watch?v='
# do search-request
def request(query, params):
+ params['cookies']['CONSENT'] = "YES+"
if not params['engine_data'].get('next_page_token'):
params['url'] = search_url.format(query=quote_plus(query), page=params['pageno'])
if params['time_range'] in time_range_dict: