2014-09-13 16:47:28 +00:00
|
|
|
'''
|
|
|
|
searx is free software: you can redistribute it and/or modify
|
|
|
|
it under the terms of the GNU Affero General Public License as published by
|
|
|
|
the Free Software Foundation, either version 3 of the License, or
|
|
|
|
(at your option) any later version.
|
|
|
|
|
|
|
|
searx is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU Affero General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU Affero General Public License
|
|
|
|
along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
|
|
|
|
|
|
|
(C) 2013- by Adam Tauber, <asciimoo@gmail.com>
|
|
|
|
'''
|
|
|
|
|
|
|
|
|
2014-03-29 15:30:49 +00:00
|
|
|
from lxml import etree
|
|
|
|
from json import loads
|
2020-08-06 15:42:46 +00:00
|
|
|
from urllib.parse import urlencode
|
|
|
|
|
2021-02-22 17:13:50 +00:00
|
|
|
from requests import RequestException
|
|
|
|
|
2015-04-09 22:59:25 +00:00
|
|
|
from searx import settings
|
|
|
|
from searx.poolrequests import get as http_get
|
2021-02-22 17:13:50 +00:00
|
|
|
from searx.exceptions import SearxEngineResponseException
|
2018-01-19 02:51:27 +00:00
|
|
|
|
2015-04-09 22:59:25 +00:00
|
|
|
|
|
|
|
def get(*args, **kwargs):
|
2015-05-02 13:45:17 +00:00
|
|
|
if 'timeout' not in kwargs:
|
2015-08-02 17:38:27 +00:00
|
|
|
kwargs['timeout'] = settings['outgoing']['request_timeout']
|
2021-02-22 17:13:50 +00:00
|
|
|
kwargs['raise_for_httperror'] = True
|
2015-04-09 22:59:25 +00:00
|
|
|
return http_get(*args, **kwargs)
|
2015-01-10 15:42:57 +00:00
|
|
|
|
|
|
|
|
2016-03-30 00:53:31 +00:00
|
|
|
def dbpedia(query, lang):
|
2015-05-02 09:43:12 +00:00
|
|
|
# dbpedia autocompleter, no HTTPS
|
2020-12-04 15:47:43 +00:00
|
|
|
autocomplete_url = 'https://lookup.dbpedia.org/api/search.asmx/KeywordSearch?'
|
2014-03-29 15:30:49 +00:00
|
|
|
|
2016-01-18 11:47:31 +00:00
|
|
|
response = get(autocomplete_url + urlencode(dict(QueryString=query)))
|
2014-03-29 15:30:49 +00:00
|
|
|
|
|
|
|
results = []
|
|
|
|
|
|
|
|
if response.ok:
|
|
|
|
dom = etree.fromstring(response.content)
|
2020-12-04 15:47:43 +00:00
|
|
|
results = dom.xpath('//Result/Label//text()')
|
2014-03-29 15:30:49 +00:00
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
2016-03-30 00:53:31 +00:00
|
|
|
def duckduckgo(query, lang):
|
2014-09-13 16:47:28 +00:00
|
|
|
# duckduckgo autocompleter
|
2014-09-07 21:56:06 +00:00
|
|
|
url = 'https://ac.duckduckgo.com/ac/?{0}&type=list'
|
|
|
|
|
|
|
|
resp = loads(get(url.format(urlencode(dict(q=query)))).text)
|
|
|
|
if len(resp) > 1:
|
|
|
|
return resp[1]
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
2016-03-30 00:53:31 +00:00
|
|
|
def google(query, lang):
|
2014-03-29 15:30:49 +00:00
|
|
|
# google autocompleter
|
2015-06-01 18:45:18 +00:00
|
|
|
autocomplete_url = 'https://suggestqueries.google.com/complete/search?client=toolbar&'
|
2014-03-29 15:30:49 +00:00
|
|
|
|
2016-03-30 00:53:31 +00:00
|
|
|
response = get(autocomplete_url + urlencode(dict(hl=lang, q=query)))
|
2014-03-29 15:30:49 +00:00
|
|
|
|
|
|
|
results = []
|
|
|
|
|
|
|
|
if response.ok:
|
2014-03-29 16:04:33 +00:00
|
|
|
dom = etree.fromstring(response.text)
|
2014-03-29 15:30:49 +00:00
|
|
|
results = dom.xpath('//suggestion/@data')
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
2016-03-30 00:53:31 +00:00
|
|
|
def startpage(query, lang):
|
|
|
|
# startpage autocompleter
|
2015-06-01 18:45:18 +00:00
|
|
|
url = 'https://startpage.com/do/suggest?{query}'
|
|
|
|
|
|
|
|
resp = get(url.format(query=urlencode({'query': query}))).text.split('\n')
|
|
|
|
if len(resp) > 1:
|
|
|
|
return resp
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
2020-02-14 18:19:24 +00:00
|
|
|
def swisscows(query, lang):
|
|
|
|
# swisscows autocompleter
|
|
|
|
url = 'https://swisscows.ch/api/suggest?{query}&itemsCount=5'
|
|
|
|
|
|
|
|
resp = loads(get(url.format(query=urlencode({'query': query}))).text)
|
|
|
|
return resp
|
|
|
|
|
|
|
|
|
2016-03-30 00:53:31 +00:00
|
|
|
def qwant(query, lang):
|
2016-03-02 11:54:06 +00:00
|
|
|
# qwant autocompleter (additional parameter : lang=en_en&count=xxx )
|
|
|
|
url = 'https://api.qwant.com/api/suggest?{query}'
|
|
|
|
|
2016-03-30 00:53:31 +00:00
|
|
|
resp = get(url.format(query=urlencode({'q': query, 'lang': lang})))
|
2016-03-02 11:54:06 +00:00
|
|
|
|
|
|
|
results = []
|
|
|
|
|
|
|
|
if resp.ok:
|
|
|
|
data = loads(resp.text)
|
|
|
|
if data['status'] == 'success':
|
|
|
|
for item in data['data']['items']:
|
|
|
|
results.append(item['value'])
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
2016-03-30 00:53:31 +00:00
|
|
|
def wikipedia(query, lang):
|
2014-03-29 15:30:49 +00:00
|
|
|
# wikipedia autocompleter
|
2016-03-30 00:53:31 +00:00
|
|
|
url = 'https://' + lang + '.wikipedia.org/w/api.php?action=opensearch&{0}&limit=10&namespace=0&format=json'
|
2014-03-29 15:30:49 +00:00
|
|
|
|
2014-11-04 18:53:42 +00:00
|
|
|
resp = loads(get(url.format(urlencode(dict(search=query)))).text)
|
2014-03-29 16:15:59 +00:00
|
|
|
if len(resp) > 1:
|
|
|
|
return resp[1]
|
|
|
|
return []
|
2014-03-29 15:30:49 +00:00
|
|
|
|
|
|
|
|
|
|
|
backends = {'dbpedia': dbpedia,
|
2014-09-07 21:56:06 +00:00
|
|
|
'duckduckgo': duckduckgo,
|
2014-03-29 15:30:49 +00:00
|
|
|
'google': google,
|
2015-06-01 18:45:18 +00:00
|
|
|
'startpage': startpage,
|
2020-02-14 18:19:24 +00:00
|
|
|
'swisscows': swisscows,
|
2016-03-02 11:54:06 +00:00
|
|
|
'qwant': qwant,
|
2014-03-29 15:30:49 +00:00
|
|
|
'wikipedia': wikipedia
|
|
|
|
}
|
2021-02-22 17:13:50 +00:00
|
|
|
|
|
|
|
|
|
|
|
def search_autocomplete(backend_name, query, lang):
|
|
|
|
backend = backends.get(backend_name)
|
|
|
|
if backend is None:
|
|
|
|
return []
|
|
|
|
|
|
|
|
try:
|
|
|
|
return backend(query, lang)
|
|
|
|
except (RequestException, SearxEngineResponseException):
|
|
|
|
return []
|