mirror of https://github.com/searxng/searxng.git
Drop Python 2 (4/n): SearchQuery.query is a str instead of bytes
This commit is contained in:
parent
7888377743
commit
c225db45c8
|
@ -32,10 +32,10 @@ def ask(query):
|
||||||
results = []
|
results = []
|
||||||
query_parts = list(filter(None, query.query.split()))
|
query_parts = list(filter(None, query.query.split()))
|
||||||
|
|
||||||
if query_parts[0].decode() not in answerers_by_keywords:
|
if query_parts[0] not in answerers_by_keywords:
|
||||||
return results
|
return results
|
||||||
|
|
||||||
for answerer in answerers_by_keywords[query_parts[0].decode()]:
|
for answerer in answerers_by_keywords[query_parts[0]]:
|
||||||
result = answerer(query)
|
result = answerer(query)
|
||||||
if result:
|
if result:
|
||||||
results.append(result)
|
results.append(result)
|
||||||
|
|
|
@ -39,11 +39,11 @@ def random_uuid():
|
||||||
return str(uuid.uuid4())
|
return str(uuid.uuid4())
|
||||||
|
|
||||||
|
|
||||||
random_types = {b'string': random_string,
|
random_types = {'string': random_string,
|
||||||
b'int': random_int,
|
'int': random_int,
|
||||||
b'float': random_float,
|
'float': random_float,
|
||||||
b'sha256': random_sha256,
|
'sha256': random_sha256,
|
||||||
b'uuid': random_uuid}
|
'uuid': random_uuid}
|
||||||
|
|
||||||
|
|
||||||
# required answerer function
|
# required answerer function
|
||||||
|
@ -64,4 +64,4 @@ def answer(query):
|
||||||
def self_info():
|
def self_info():
|
||||||
return {'name': gettext('Random value generator'),
|
return {'name': gettext('Random value generator'),
|
||||||
'description': gettext('Generate different random values'),
|
'description': gettext('Generate different random values'),
|
||||||
'examples': ['random {}'.format(x.decode()) for x in random_types]}
|
'examples': ['random {}'.format(x) for x in random_types]}
|
||||||
|
|
|
@ -27,15 +27,15 @@ def answer(query):
|
||||||
func = parts[0]
|
func = parts[0]
|
||||||
answer = None
|
answer = None
|
||||||
|
|
||||||
if func == b'min':
|
if func == 'min':
|
||||||
answer = min(args)
|
answer = min(args)
|
||||||
elif func == b'max':
|
elif func == 'max':
|
||||||
answer = max(args)
|
answer = max(args)
|
||||||
elif func == b'avg':
|
elif func == 'avg':
|
||||||
answer = sum(args) / len(args)
|
answer = sum(args) / len(args)
|
||||||
elif func == b'sum':
|
elif func == 'sum':
|
||||||
answer = sum(args)
|
answer = sum(args)
|
||||||
elif func == b'prod':
|
elif func == 'prod':
|
||||||
answer = reduce(mul, args, 1)
|
answer = reduce(mul, args, 1)
|
||||||
|
|
||||||
if answer is None:
|
if answer is None:
|
||||||
|
|
|
@ -105,7 +105,7 @@ def request(query, params):
|
||||||
# if our language is hosted on the main site, we need to add its name
|
# if our language is hosted on the main site, we need to add its name
|
||||||
# to the query in order to narrow the results to that language
|
# to the query in order to narrow the results to that language
|
||||||
if language in main_langs:
|
if language in main_langs:
|
||||||
query += b' (' + main_langs[language] + b')'
|
query += ' (' + main_langs[language] + ')'
|
||||||
|
|
||||||
# prepare the request parameters
|
# prepare the request parameters
|
||||||
query = urlencode({'search': query})
|
query = urlencode({'search': query})
|
||||||
|
|
|
@ -30,7 +30,7 @@ def request(query, params):
|
||||||
# basic search
|
# basic search
|
||||||
offset = (params['pageno'] - 1) * number_of_results
|
offset = (params['pageno'] - 1) * number_of_results
|
||||||
|
|
||||||
string_args = dict(query=query.decode(),
|
string_args = dict(query=query,
|
||||||
offset=offset,
|
offset=offset,
|
||||||
number_of_results=number_of_results)
|
number_of_results=number_of_results)
|
||||||
|
|
||||||
|
|
|
@ -55,7 +55,7 @@ shorcut_dict = {
|
||||||
def request(query, params):
|
def request(query, params):
|
||||||
# replace shortcuts with API advanced search keywords
|
# replace shortcuts with API advanced search keywords
|
||||||
for key in shorcut_dict.keys():
|
for key in shorcut_dict.keys():
|
||||||
query = re.sub(key, shorcut_dict[key], str(query))
|
query = re.sub(key, shorcut_dict[key], query)
|
||||||
|
|
||||||
# basic search
|
# basic search
|
||||||
offset = (params['pageno'] - 1) * number_of_results
|
offset = (params['pageno'] - 1) * number_of_results
|
||||||
|
|
|
@ -47,7 +47,7 @@ def request(query, params):
|
||||||
else:
|
else:
|
||||||
lang = match_language(params['language'], supported_languages, language_aliases)
|
lang = match_language(params['language'], supported_languages, language_aliases)
|
||||||
|
|
||||||
query = 'language:{} {}'.format(lang.split('-')[0].upper(), query.decode()).encode()
|
query = 'language:{} {}'.format(lang.split('-')[0].upper(), query)
|
||||||
|
|
||||||
search_path = search_string.format(
|
search_path = search_string.format(
|
||||||
query=urlencode({'q': query}),
|
query=urlencode({'q': query}),
|
||||||
|
|
|
@ -11,13 +11,13 @@ categories = []
|
||||||
url = 'https://duckduckgo.com/js/spice/currency/1/{0}/{1}'
|
url = 'https://duckduckgo.com/js/spice/currency/1/{0}/{1}'
|
||||||
weight = 100
|
weight = 100
|
||||||
|
|
||||||
parser_re = re.compile(b'.*?(\\d+(?:\\.\\d+)?) ([^.0-9]+) (?:in|to) ([^.0-9]+)', re.I)
|
parser_re = re.compile('.*?(\\d+(?:\\.\\d+)?) ([^.0-9]+) (?:in|to) ([^.0-9]+)', re.I)
|
||||||
|
|
||||||
db = 1
|
db = 1
|
||||||
|
|
||||||
|
|
||||||
def normalize_name(name):
|
def normalize_name(name):
|
||||||
name = name.decode().lower().replace('-', ' ').rstrip('s')
|
name = name.lower().replace('-', ' ').rstrip('s')
|
||||||
name = re.sub(' +', ' ', name)
|
name = re.sub(' +', ' ', name)
|
||||||
return unicodedata.normalize('NFKD', name).lower()
|
return unicodedata.normalize('NFKD', name).lower()
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,7 @@ categories = ['general']
|
||||||
url = 'https://dictzone.com/{from_lang}-{to_lang}-dictionary/{query}'
|
url = 'https://dictzone.com/{from_lang}-{to_lang}-dictionary/{query}'
|
||||||
weight = 100
|
weight = 100
|
||||||
|
|
||||||
parser_re = re.compile(b'.*?([a-z]+)-([a-z]+) ([^ ]+)$', re.I)
|
parser_re = re.compile('.*?([a-z]+)-([a-z]+) ([^ ]+)$', re.I)
|
||||||
results_xpath = './/table[@id="r"]/tr'
|
results_xpath = './/table[@id="r"]/tr'
|
||||||
|
|
||||||
|
|
||||||
|
@ -37,7 +37,7 @@ def request(query, params):
|
||||||
|
|
||||||
params['url'] = url.format(from_lang=from_lang[2],
|
params['url'] = url.format(from_lang=from_lang[2],
|
||||||
to_lang=to_lang[2],
|
to_lang=to_lang[2],
|
||||||
query=query.decode())
|
query=query)
|
||||||
|
|
||||||
return params
|
return params
|
||||||
|
|
||||||
|
|
|
@ -90,7 +90,7 @@ def request(query, params):
|
||||||
# if our language is hosted on the main site, we need to add its name
|
# if our language is hosted on the main site, we need to add its name
|
||||||
# to the query in order to narrow the results to that language
|
# to the query in order to narrow the results to that language
|
||||||
if language in main_langs:
|
if language in main_langs:
|
||||||
query += b' (' + (main_langs[language]).encode() + b')'
|
query += ' (' + main_langs[language] + ')'
|
||||||
|
|
||||||
# prepare the request parameters
|
# prepare the request parameters
|
||||||
query = urlencode({'search': query})
|
query = urlencode({'search': query})
|
||||||
|
|
|
@ -30,8 +30,8 @@ route_re = re.compile('(?:from )?(.+) to (.+)')
|
||||||
# do search-request
|
# do search-request
|
||||||
def request(query, params):
|
def request(query, params):
|
||||||
|
|
||||||
params['url'] = base_url + search_string.format(query=query.decode())
|
params['url'] = base_url + search_string.format(query=query)
|
||||||
params['route'] = route_re.match(query.decode())
|
params['route'] = route_re.match(query)
|
||||||
|
|
||||||
return params
|
return params
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@ def request(query, params):
|
||||||
params['url'] = search_url
|
params['url'] = search_url
|
||||||
params['method'] = 'POST'
|
params['method'] = 'POST'
|
||||||
params['headers']['Content-type'] = "application/json"
|
params['headers']['Content-type'] = "application/json"
|
||||||
params['data'] = dumps({"query": query.decode(),
|
params['data'] = dumps({"query": query,
|
||||||
"searchField": "ALL",
|
"searchField": "ALL",
|
||||||
"sortDirection": "ASC",
|
"sortDirection": "ASC",
|
||||||
"sortOrder": "RELEVANCY",
|
"sortOrder": "RELEVANCY",
|
||||||
|
|
|
@ -16,7 +16,7 @@ url = 'https://api.mymemory.translated.net/get?q={query}&langpair={from_lang}|{t
|
||||||
web_url = 'https://mymemory.translated.net/en/{from_lang}/{to_lang}/{query}'
|
web_url = 'https://mymemory.translated.net/en/{from_lang}/{to_lang}/{query}'
|
||||||
weight = 100
|
weight = 100
|
||||||
|
|
||||||
parser_re = re.compile(b'.*?([a-z]+)-([a-z]+) (.{2,})$', re.I)
|
parser_re = re.compile('.*?([a-z]+)-([a-z]+) (.{2,})$', re.I)
|
||||||
api_key = ''
|
api_key = ''
|
||||||
|
|
||||||
|
|
||||||
|
@ -39,9 +39,9 @@ def request(query, params):
|
||||||
key_form = ''
|
key_form = ''
|
||||||
params['url'] = url.format(from_lang=from_lang[1],
|
params['url'] = url.format(from_lang=from_lang[1],
|
||||||
to_lang=to_lang[1],
|
to_lang=to_lang[1],
|
||||||
query=query.decode(),
|
query=query,
|
||||||
key=key_form)
|
key=key_form)
|
||||||
params['query'] = query.decode()
|
params['query'] = query
|
||||||
params['from_lang'] = from_lang
|
params['from_lang'] = from_lang
|
||||||
params['to_lang'] = to_lang
|
params['to_lang'] = to_lang
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ def get_bang_url(search_query):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if search_query.external_bang:
|
if search_query.external_bang:
|
||||||
query = search_query.query.decode(errors='ignore')
|
query = search_query.query
|
||||||
bang = _get_bang(search_query.external_bang)
|
bang = _get_bang(search_query.external_bang)
|
||||||
|
|
||||||
if bang and query:
|
if bang and query:
|
||||||
|
|
|
@ -22,7 +22,7 @@ default_on = True
|
||||||
|
|
||||||
|
|
||||||
# Self User Agent regex
|
# Self User Agent regex
|
||||||
p = re.compile(b'.*user[ -]agent.*', re.IGNORECASE)
|
p = re.compile('.*user[ -]agent.*', re.IGNORECASE)
|
||||||
|
|
||||||
|
|
||||||
# attach callback to the post search hook
|
# attach callback to the post search hook
|
||||||
|
@ -31,7 +31,7 @@ p = re.compile(b'.*user[ -]agent.*', re.IGNORECASE)
|
||||||
def post_search(request, search):
|
def post_search(request, search):
|
||||||
if search.search_query.pageno > 1:
|
if search.search_query.pageno > 1:
|
||||||
return True
|
return True
|
||||||
if search.search_query.query == b'ip':
|
if search.search_query.query == 'ip':
|
||||||
x_forwarded_for = request.headers.getlist("X-Forwarded-For")
|
x_forwarded_for = request.headers.getlist("X-Forwarded-For")
|
||||||
if x_forwarded_for:
|
if x_forwarded_for:
|
||||||
ip = x_forwarded_for[0]
|
ip = x_forwarded_for[0]
|
||||||
|
|
|
@ -32,6 +32,7 @@ class RawTextQuery:
|
||||||
"""parse raw text query (the value from the html input)"""
|
"""parse raw text query (the value from the html input)"""
|
||||||
|
|
||||||
def __init__(self, query, disabled_engines):
|
def __init__(self, query, disabled_engines):
|
||||||
|
assert isinstance(query, str)
|
||||||
self.query = query
|
self.query = query
|
||||||
self.disabled_engines = []
|
self.disabled_engines = []
|
||||||
|
|
||||||
|
@ -51,7 +52,7 @@ class RawTextQuery:
|
||||||
self.query_parts = []
|
self.query_parts = []
|
||||||
|
|
||||||
# split query, including whitespaces
|
# split query, including whitespaces
|
||||||
raw_query_parts = re.split(r'(\s+)' if isinstance(self.query, str) else b'(\s+)', self.query)
|
raw_query_parts = re.split(r'(\s+)', self.query)
|
||||||
|
|
||||||
parse_next = True
|
parse_next = True
|
||||||
|
|
||||||
|
@ -183,7 +184,7 @@ class SearchQuery:
|
||||||
|
|
||||||
def __init__(self, query, engines, categories, lang, safesearch, pageno, time_range,
|
def __init__(self, query, engines, categories, lang, safesearch, pageno, time_range,
|
||||||
timeout_limit=None, preferences=None, external_bang=None):
|
timeout_limit=None, preferences=None, external_bang=None):
|
||||||
self.query = query.encode()
|
self.query = query
|
||||||
self.engines = engines
|
self.engines = engines
|
||||||
self.categories = categories
|
self.categories = categories
|
||||||
self.lang = lang
|
self.lang = lang
|
||||||
|
@ -195,4 +196,4 @@ class SearchQuery:
|
||||||
self.external_bang = external_bang
|
self.external_bang = external_bang
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.query) + ";" + str(self.engines)
|
return self.query + ";" + str(self.engines)
|
||||||
|
|
|
@ -59,7 +59,6 @@ def highlight_content(content, query):
|
||||||
if content.find('<') != -1:
|
if content.find('<') != -1:
|
||||||
return content
|
return content
|
||||||
|
|
||||||
query = query.decode()
|
|
||||||
if content.lower().find(query.lower()) > -1:
|
if content.lower().find(query.lower()) > -1:
|
||||||
query_regex = '({0})'.format(re.escape(query))
|
query_regex = '({0})'.format(re.escape(query))
|
||||||
content = re.sub(query_regex, '<span class="highlight">\\1</span>',
|
content = re.sub(query_regex, '<span class="highlight">\\1</span>',
|
||||||
|
|
|
@ -623,7 +623,7 @@ def index():
|
||||||
result['publishedDate'] = format_date(result['publishedDate'])
|
result['publishedDate'] = format_date(result['publishedDate'])
|
||||||
|
|
||||||
if output_format == 'json':
|
if output_format == 'json':
|
||||||
return Response(json.dumps({'query': search_query.query.decode(),
|
return Response(json.dumps({'query': search_query.query,
|
||||||
'number_of_results': number_of_results,
|
'number_of_results': number_of_results,
|
||||||
'results': results,
|
'results': results,
|
||||||
'answers': list(result_container.answers),
|
'answers': list(result_container.answers),
|
||||||
|
@ -652,7 +652,7 @@ def index():
|
||||||
csv.writerow([row.get(key, '') for key in keys])
|
csv.writerow([row.get(key, '') for key in keys])
|
||||||
csv.stream.seek(0)
|
csv.stream.seek(0)
|
||||||
response = Response(csv.stream.read(), mimetype='application/csv')
|
response = Response(csv.stream.read(), mimetype='application/csv')
|
||||||
cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search_query.query.decode())
|
cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search_query.query)
|
||||||
response.headers.add('Content-Disposition', cont_disp)
|
response.headers.add('Content-Disposition', cont_disp)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
@ -736,7 +736,7 @@ def autocompleter():
|
||||||
disabled_engines = request.preferences.engines.get_disabled()
|
disabled_engines = request.preferences.engines.get_disabled()
|
||||||
|
|
||||||
# parse query
|
# parse query
|
||||||
raw_text_query = RawTextQuery(request.form.get('q', b''), disabled_engines)
|
raw_text_query = RawTextQuery(str(request.form.get('q', b'')), disabled_engines)
|
||||||
raw_text_query.parse_query()
|
raw_text_query.parse_query()
|
||||||
|
|
||||||
# check if search query is set
|
# check if search query is set
|
||||||
|
|
|
@ -48,11 +48,11 @@ class SelfIPTest(SearxTestCase):
|
||||||
# IP test
|
# IP test
|
||||||
request = Mock(remote_addr='127.0.0.1')
|
request = Mock(remote_addr='127.0.0.1')
|
||||||
request.headers.getlist.return_value = []
|
request.headers.getlist.return_value = []
|
||||||
search = get_search_mock(query=b'ip', pageno=1)
|
search = get_search_mock(query='ip', pageno=1)
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
store.call(store.plugins, 'post_search', request, search)
|
||||||
self.assertTrue('127.0.0.1' in search.result_container.answers["ip"]["answer"])
|
self.assertTrue('127.0.0.1' in search.result_container.answers["ip"]["answer"])
|
||||||
|
|
||||||
search = get_search_mock(query=b'ip', pageno=2)
|
search = get_search_mock(query='ip', pageno=2)
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
store.call(store.plugins, 'post_search', request, search)
|
||||||
self.assertFalse('ip' in search.result_container.answers)
|
self.assertFalse('ip' in search.result_container.answers)
|
||||||
|
|
||||||
|
@ -60,26 +60,26 @@ class SelfIPTest(SearxTestCase):
|
||||||
request = Mock(user_agent='Mock')
|
request = Mock(user_agent='Mock')
|
||||||
request.headers.getlist.return_value = []
|
request.headers.getlist.return_value = []
|
||||||
|
|
||||||
search = get_search_mock(query=b'user-agent', pageno=1)
|
search = get_search_mock(query='user-agent', pageno=1)
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
store.call(store.plugins, 'post_search', request, search)
|
||||||
self.assertTrue('Mock' in search.result_container.answers["user-agent"]["answer"])
|
self.assertTrue('Mock' in search.result_container.answers["user-agent"]["answer"])
|
||||||
|
|
||||||
search = get_search_mock(query=b'user-agent', pageno=2)
|
search = get_search_mock(query='user-agent', pageno=2)
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
store.call(store.plugins, 'post_search', request, search)
|
||||||
self.assertFalse('user-agent' in search.result_container.answers)
|
self.assertFalse('user-agent' in search.result_container.answers)
|
||||||
|
|
||||||
search = get_search_mock(query=b'user-agent', pageno=1)
|
search = get_search_mock(query='user-agent', pageno=1)
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
store.call(store.plugins, 'post_search', request, search)
|
||||||
self.assertTrue('Mock' in search.result_container.answers["user-agent"]["answer"])
|
self.assertTrue('Mock' in search.result_container.answers["user-agent"]["answer"])
|
||||||
|
|
||||||
search = get_search_mock(query=b'user-agent', pageno=2)
|
search = get_search_mock(query='user-agent', pageno=2)
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
store.call(store.plugins, 'post_search', request, search)
|
||||||
self.assertFalse('user-agent' in search.result_container.answers)
|
self.assertFalse('user-agent' in search.result_container.answers)
|
||||||
|
|
||||||
search = get_search_mock(query=b'What is my User-Agent?', pageno=1)
|
search = get_search_mock(query='What is my User-Agent?', pageno=1)
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
store.call(store.plugins, 'post_search', request, search)
|
||||||
self.assertTrue('Mock' in search.result_container.answers["user-agent"]["answer"])
|
self.assertTrue('Mock' in search.result_container.answers["user-agent"]["answer"])
|
||||||
|
|
||||||
search = get_search_mock(query=b'What is my User-Agent?', pageno=2)
|
search = get_search_mock(query='What is my User-Agent?', pageno=2)
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
store.call(store.plugins, 'post_search', request, search)
|
||||||
self.assertFalse('user-agent' in search.result_container.answers)
|
self.assertFalse('user-agent' in search.result_container.answers)
|
||||||
|
|
|
@ -30,9 +30,9 @@ class TestUtils(SearxTestCase):
|
||||||
self.assertEqual(utils.highlight_content(content, None), content)
|
self.assertEqual(utils.highlight_content(content, None), content)
|
||||||
|
|
||||||
content = 'a'
|
content = 'a'
|
||||||
query = b'test'
|
query = 'test'
|
||||||
self.assertEqual(utils.highlight_content(content, query), content)
|
self.assertEqual(utils.highlight_content(content, query), content)
|
||||||
query = b'a test'
|
query = 'a test'
|
||||||
self.assertEqual(utils.highlight_content(content, query), content)
|
self.assertEqual(utils.highlight_content(content, query), content)
|
||||||
|
|
||||||
def test_html_to_text(self):
|
def test_html_to_text(self):
|
||||||
|
|
Loading…
Reference in New Issue