2014-09-13 16:25:25 +00:00
|
|
|
'''
|
|
|
|
searx is free software: you can redistribute it and/or modify
|
|
|
|
it under the terms of the GNU Affero General Public License as published by
|
|
|
|
the Free Software Foundation, either version 3 of the License, or
|
|
|
|
(at your option) any later version.
|
|
|
|
|
|
|
|
searx is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU Affero General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU Affero General Public License
|
|
|
|
along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
|
|
|
|
|
|
|
(C) 2013- by Adam Tauber, <asciimoo@gmail.com>
|
|
|
|
'''
|
|
|
|
|
2016-07-31 21:39:58 +00:00
|
|
|
import gc
|
2014-12-05 18:24:11 +00:00
|
|
|
import threading
|
2016-07-31 21:39:58 +00:00
|
|
|
from thread import start_new_thread
|
2014-12-14 00:18:01 +00:00
|
|
|
from time import time
|
2016-09-05 22:36:33 +00:00
|
|
|
from uuid import uuid4
|
2016-07-31 21:39:58 +00:00
|
|
|
import searx.poolrequests as requests_lib
|
2014-02-07 00:19:07 +00:00
|
|
|
from searx.engines import (
|
2014-10-19 10:41:04 +00:00
|
|
|
categories, engines
|
2014-02-07 00:19:07 +00:00
|
|
|
)
|
2016-04-08 14:38:05 +00:00
|
|
|
from searx.utils import gen_useragent
|
2016-10-22 11:10:31 +00:00
|
|
|
from searx.query import RawTextQuery, SearchQuery
|
2015-10-03 15:26:07 +00:00
|
|
|
from searx.results import ResultContainer
|
2015-01-09 03:13:05 +00:00
|
|
|
from searx import logger
|
2016-10-22 11:10:31 +00:00
|
|
|
from searx.plugins import plugins
|
2014-07-07 11:59:27 +00:00
|
|
|
|
2015-01-09 03:13:05 +00:00
|
|
|
logger = logger.getChild('search')
|
|
|
|
|
2014-07-07 11:59:27 +00:00
|
|
|
number_of_searches = 0
|
|
|
|
|
|
|
|
|
2014-12-19 19:01:01 +00:00
|
|
|
def search_request_wrapper(fn, url, engine_name, **kwargs):
|
2016-02-19 23:21:56 +00:00
|
|
|
ret = None
|
|
|
|
engine = engines[engine_name]
|
2014-12-19 19:01:01 +00:00
|
|
|
try:
|
2016-02-19 23:21:56 +00:00
|
|
|
ret = fn(url, **kwargs)
|
|
|
|
with threading.RLock():
|
|
|
|
engine.continuous_errors = 0
|
|
|
|
engine.suspend_end_time = 0
|
2015-01-09 03:30:55 +00:00
|
|
|
except:
|
2014-12-19 19:01:01 +00:00
|
|
|
# increase errors stats
|
2015-10-03 15:26:07 +00:00
|
|
|
with threading.RLock():
|
2016-02-19 23:21:56 +00:00
|
|
|
engine.stats['errors'] += 1
|
|
|
|
engine.continuous_errors += 1
|
|
|
|
engine.suspend_end_time = time() + min(60, engine.continuous_errors)
|
2014-12-19 19:01:01 +00:00
|
|
|
|
|
|
|
# print engine name and specific error message
|
2015-01-09 03:30:55 +00:00
|
|
|
logger.exception('engine crash: {0}'.format(engine_name))
|
2016-02-19 23:21:56 +00:00
|
|
|
return ret
|
2014-12-19 19:01:01 +00:00
|
|
|
|
|
|
|
|
2014-12-05 18:24:11 +00:00
|
|
|
def threaded_requests(requests):
|
2014-12-14 00:18:01 +00:00
|
|
|
timeout_limit = max(r[2]['timeout'] for r in requests)
|
|
|
|
search_start = time()
|
2016-09-05 22:36:33 +00:00
|
|
|
search_id = uuid4().__str__()
|
2014-12-19 12:59:41 +00:00
|
|
|
for fn, url, request_args, engine_name in requests:
|
2014-12-18 09:11:56 +00:00
|
|
|
request_args['timeout'] = timeout_limit
|
2014-12-05 18:24:11 +00:00
|
|
|
th = threading.Thread(
|
2014-12-19 19:01:01 +00:00
|
|
|
target=search_request_wrapper,
|
|
|
|
args=(fn, url, engine_name),
|
2014-12-08 22:55:11 +00:00
|
|
|
kwargs=request_args,
|
2016-09-05 22:36:33 +00:00
|
|
|
name=search_id,
|
2014-12-05 18:24:11 +00:00
|
|
|
)
|
2014-12-19 12:59:41 +00:00
|
|
|
th._engine_name = engine_name
|
2014-12-05 18:24:11 +00:00
|
|
|
th.start()
|
|
|
|
|
|
|
|
for th in threading.enumerate():
|
2016-09-05 22:36:33 +00:00
|
|
|
if th.name == search_id:
|
2014-12-14 00:18:01 +00:00
|
|
|
remaining_time = max(0.0, timeout_limit - (time() - search_start))
|
|
|
|
th.join(remaining_time)
|
|
|
|
if th.isAlive():
|
2015-01-09 03:13:05 +00:00
|
|
|
logger.warning('engine timeout: {0}'.format(th._engine_name))
|
2014-12-14 00:18:01 +00:00
|
|
|
|
2014-12-05 18:24:11 +00:00
|
|
|
|
2014-09-13 16:25:25 +00:00
|
|
|
# get default reqest parameter
|
2014-07-07 11:59:27 +00:00
|
|
|
def default_request_params():
|
|
|
|
return {
|
2014-12-29 20:31:04 +00:00
|
|
|
'method': 'GET',
|
|
|
|
'headers': {},
|
|
|
|
'data': {},
|
|
|
|
'url': '',
|
|
|
|
'cookies': {},
|
|
|
|
'verify': True
|
|
|
|
}
|
2014-07-07 11:59:27 +00:00
|
|
|
|
|
|
|
|
2014-09-13 16:25:25 +00:00
|
|
|
# create a callback wrapper for the search engine results
|
2015-10-03 15:26:07 +00:00
|
|
|
def make_callback(engine_name, callback, params, result_container):
|
2014-09-13 16:25:25 +00:00
|
|
|
|
2014-07-07 11:59:27 +00:00
|
|
|
# creating a callback wrapper for the search engine results
|
|
|
|
def process_callback(response, **kwargs):
|
2015-02-13 10:17:00 +00:00
|
|
|
# check if redirect comparing to the True value,
|
|
|
|
# because resp can be a Mock object, and any attribut name returns something.
|
|
|
|
if response.is_redirect is True:
|
2015-02-13 12:02:37 +00:00
|
|
|
logger.debug('{0} redirect on: {1}'.format(engine_name, response))
|
2015-02-13 10:17:00 +00:00
|
|
|
return
|
|
|
|
|
2014-07-07 11:59:27 +00:00
|
|
|
response.search_params = params
|
2014-09-13 16:25:25 +00:00
|
|
|
|
2014-12-18 11:07:20 +00:00
|
|
|
search_duration = time() - params['started']
|
2015-10-03 15:26:07 +00:00
|
|
|
# update stats with current page-load-time
|
|
|
|
with threading.RLock():
|
|
|
|
engines[engine_name].stats['page_load_time'] += search_duration
|
|
|
|
|
|
|
|
timeout_overhead = 0.2 # seconds
|
2014-12-18 11:07:20 +00:00
|
|
|
timeout_limit = engines[engine_name].timeout + timeout_overhead
|
2015-10-03 15:26:07 +00:00
|
|
|
|
2014-12-18 11:07:20 +00:00
|
|
|
if search_duration > timeout_limit:
|
2015-10-03 15:26:07 +00:00
|
|
|
with threading.RLock():
|
|
|
|
engines[engine_name].stats['errors'] += 1
|
2014-12-18 11:07:20 +00:00
|
|
|
return
|
|
|
|
|
2014-09-28 14:51:41 +00:00
|
|
|
# callback
|
2014-12-19 19:01:01 +00:00
|
|
|
search_results = callback(response)
|
2014-09-22 19:40:40 +00:00
|
|
|
|
2014-09-28 14:51:41 +00:00
|
|
|
# add results
|
2014-07-07 11:59:27 +00:00
|
|
|
for result in search_results:
|
|
|
|
result['engine'] = engine_name
|
2014-09-13 16:25:25 +00:00
|
|
|
|
2015-10-03 15:26:07 +00:00
|
|
|
result_container.extend(engine_name, search_results)
|
2014-09-28 14:51:41 +00:00
|
|
|
|
2014-07-07 11:59:27 +00:00
|
|
|
return process_callback
|
|
|
|
|
2014-09-28 14:51:41 +00:00
|
|
|
|
2016-10-22 11:10:31 +00:00
|
|
|
def get_search_query_from_webapp(preferences, request_data):
|
|
|
|
query = None
|
|
|
|
query_engines = []
|
|
|
|
query_categories = []
|
|
|
|
query_paging = False
|
|
|
|
query_pageno = 1
|
|
|
|
query_lang = 'all'
|
|
|
|
query_time_range = None
|
|
|
|
|
|
|
|
# set blocked engines
|
|
|
|
disabled_engines = preferences.engines.get_disabled()
|
|
|
|
|
|
|
|
# set specific language if set
|
|
|
|
query_lang = preferences.get_value('language')
|
|
|
|
|
|
|
|
# safesearch
|
|
|
|
query_safesearch = preferences.get_value('safesearch')
|
|
|
|
|
|
|
|
# TODO better exceptions
|
|
|
|
if not request_data.get('q'):
|
|
|
|
raise Exception('noquery')
|
|
|
|
|
|
|
|
# set pagenumber
|
|
|
|
pageno_param = request_data.get('pageno', '1')
|
|
|
|
if not pageno_param.isdigit() or int(pageno_param) < 1:
|
|
|
|
pageno_param = 1
|
|
|
|
|
|
|
|
query_pageno = int(pageno_param)
|
|
|
|
|
|
|
|
# parse query, if tags are set, which change
|
|
|
|
# the serch engine or search-language
|
|
|
|
raw_text_query = RawTextQuery(request_data['q'], disabled_engines)
|
|
|
|
raw_text_query.parse_query()
|
|
|
|
|
|
|
|
# set query
|
|
|
|
query = raw_text_query.getSearchQuery()
|
|
|
|
|
|
|
|
# get last selected language in query, if possible
|
|
|
|
# TODO support search with multible languages
|
|
|
|
if len(raw_text_query.languages):
|
|
|
|
query_lang = raw_text_query.languages[-1]
|
|
|
|
|
|
|
|
query_time_range = request_data.get('time_range')
|
|
|
|
|
|
|
|
query_engines = raw_text_query.engines
|
|
|
|
|
|
|
|
# if engines are calculated from query,
|
|
|
|
# set categories by using that informations
|
|
|
|
if query_engines and raw_text_query.specific:
|
|
|
|
query_categories = list(set(engine['category']
|
|
|
|
for engine in query_engines))
|
|
|
|
|
|
|
|
# otherwise, using defined categories to
|
|
|
|
# calculate which engines should be used
|
|
|
|
else:
|
|
|
|
# set categories/engines
|
|
|
|
load_default_categories = True
|
|
|
|
for pd_name, pd in request_data.items():
|
|
|
|
if pd_name == 'categories':
|
|
|
|
query_categories.extend(categ for categ in map(unicode.strip, pd.split(',')) if categ in categories)
|
|
|
|
elif pd_name == 'engines':
|
|
|
|
pd_engines = [{'category': engines[engine].categories[0],
|
|
|
|
'name': engine}
|
|
|
|
for engine in map(unicode.strip, pd.split(',')) if engine in engines]
|
|
|
|
if pd_engines:
|
|
|
|
query_engines.extend(pd_engines)
|
|
|
|
load_default_categories = False
|
|
|
|
elif pd_name.startswith('category_'):
|
|
|
|
category = pd_name[9:]
|
|
|
|
|
|
|
|
# if category is not found in list, skip
|
|
|
|
if category not in categories:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if pd != 'off':
|
|
|
|
# add category to list
|
|
|
|
query_categories.append(category)
|
|
|
|
elif category in query_categories:
|
|
|
|
# remove category from list if property is set to 'off'
|
|
|
|
query_categories.remove(category)
|
|
|
|
|
|
|
|
if not load_default_categories:
|
|
|
|
if not query_categories:
|
|
|
|
query_categories = list(set(engine['category']
|
|
|
|
for engine in engines))
|
2014-02-07 00:19:07 +00:00
|
|
|
else:
|
2014-10-19 10:41:04 +00:00
|
|
|
# if no category is specified for this search,
|
|
|
|
# using user-defined default-configuration which
|
|
|
|
# (is stored in cookie)
|
2016-10-22 11:10:31 +00:00
|
|
|
if not query_categories:
|
|
|
|
cookie_categories = preferences.get_value('categories')
|
2014-02-07 00:19:07 +00:00
|
|
|
for ccateg in cookie_categories:
|
|
|
|
if ccateg in categories:
|
2016-10-22 11:10:31 +00:00
|
|
|
query_categories.append(ccateg)
|
2014-09-13 16:25:25 +00:00
|
|
|
|
2014-10-19 10:41:04 +00:00
|
|
|
# if still no category is specified, using general
|
|
|
|
# as default-category
|
2016-10-22 11:10:31 +00:00
|
|
|
if not query_categories:
|
|
|
|
query_categories = ['general']
|
2014-02-07 00:19:07 +00:00
|
|
|
|
2014-10-19 10:41:04 +00:00
|
|
|
# using all engines for that search, which are
|
|
|
|
# declared under the specific categories
|
2016-10-22 11:10:31 +00:00
|
|
|
for categ in query_categories:
|
|
|
|
query_engines.extend({'category': categ,
|
|
|
|
'name': engine.name}
|
|
|
|
for engine in categories[categ]
|
|
|
|
if (engine.name, categ) not in disabled_engines)
|
|
|
|
|
|
|
|
return SearchQuery(query, query_engines, query_categories,
|
|
|
|
query_lang, query_safesearch, query_pageno, query_time_range)
|
2014-02-09 00:07:18 +00:00
|
|
|
|
2016-10-22 11:10:31 +00:00
|
|
|
|
|
|
|
class Search(object):
|
|
|
|
|
|
|
|
"""Search information container"""
|
|
|
|
|
|
|
|
def __init__(self, search_query):
|
|
|
|
# init vars
|
|
|
|
super(Search, self).__init__()
|
|
|
|
self.search_query = search_query
|
|
|
|
self.result_container = ResultContainer()
|
2016-02-19 23:21:56 +00:00
|
|
|
|
2014-09-13 16:25:25 +00:00
|
|
|
# do search-request
|
2016-10-22 11:10:31 +00:00
|
|
|
def search(self):
|
2014-07-07 11:59:27 +00:00
|
|
|
global number_of_searches
|
2014-09-13 16:25:25 +00:00
|
|
|
|
|
|
|
# init vars
|
2014-07-07 11:59:27 +00:00
|
|
|
requests = []
|
2014-09-13 16:25:25 +00:00
|
|
|
|
2014-09-13 16:44:11 +00:00
|
|
|
# increase number of searches
|
2014-07-07 11:59:27 +00:00
|
|
|
number_of_searches += 1
|
2014-09-13 16:25:25 +00:00
|
|
|
|
|
|
|
# set default useragent
|
2014-10-19 10:41:04 +00:00
|
|
|
# user_agent = request.headers.get('User-Agent', '')
|
2014-07-07 11:59:27 +00:00
|
|
|
user_agent = gen_useragent()
|
|
|
|
|
2016-10-22 11:10:31 +00:00
|
|
|
search_query = self.search_query
|
|
|
|
|
2014-09-13 16:25:25 +00:00
|
|
|
# start search-reqest for all selected engines
|
2016-10-22 11:10:31 +00:00
|
|
|
for selected_engine in search_query.engines:
|
2014-07-07 11:59:27 +00:00
|
|
|
if selected_engine['name'] not in engines:
|
|
|
|
continue
|
|
|
|
|
|
|
|
engine = engines[selected_engine['name']]
|
|
|
|
|
2016-10-22 11:10:31 +00:00
|
|
|
# skip suspended engines
|
|
|
|
if engine.suspend_end_time and engine.suspend_end_time <= time():
|
|
|
|
continue
|
|
|
|
|
2014-09-13 16:25:25 +00:00
|
|
|
# if paging is not supported, skip
|
2016-10-22 11:10:31 +00:00
|
|
|
if search_query.pageno > 1 and not engine.paging:
|
2014-07-07 11:59:27 +00:00
|
|
|
continue
|
|
|
|
|
2014-10-19 10:41:04 +00:00
|
|
|
# if search-language is set and engine does not
|
|
|
|
# provide language-support, skip
|
2016-10-22 11:10:31 +00:00
|
|
|
if search_query.lang != 'all' and not engine.language_support:
|
2014-07-07 11:59:27 +00:00
|
|
|
continue
|
|
|
|
|
2016-10-22 11:10:31 +00:00
|
|
|
# if time_range is not supported, skip
|
|
|
|
if search_query.time_range and not engine.time_range_support:
|
2016-07-17 16:42:30 +00:00
|
|
|
continue
|
|
|
|
|
2014-09-13 16:25:25 +00:00
|
|
|
# set default request parameters
|
2014-07-07 11:59:27 +00:00
|
|
|
request_params = default_request_params()
|
|
|
|
request_params['headers']['User-Agent'] = user_agent
|
|
|
|
request_params['category'] = selected_engine['category']
|
2014-12-18 09:11:56 +00:00
|
|
|
request_params['started'] = time()
|
2016-10-22 11:10:31 +00:00
|
|
|
request_params['pageno'] = search_query.pageno
|
2015-06-03 15:16:12 +00:00
|
|
|
|
2015-08-25 20:02:18 +00:00
|
|
|
if hasattr(engine, 'language') and engine.language:
|
2015-06-03 15:16:12 +00:00
|
|
|
request_params['language'] = engine.language
|
|
|
|
else:
|
2016-10-22 11:10:31 +00:00
|
|
|
request_params['language'] = search_query.lang
|
2015-06-03 15:16:12 +00:00
|
|
|
|
2016-04-08 14:38:05 +00:00
|
|
|
# 0 = None, 1 = Moderate, 2 = Strict
|
2016-10-22 11:10:31 +00:00
|
|
|
request_params['safesearch'] = search_query.safesearch
|
|
|
|
request_params['time_range'] = search_query.time_range
|
2014-09-13 16:25:25 +00:00
|
|
|
|
2014-10-19 10:41:04 +00:00
|
|
|
# update request parameters dependent on
|
|
|
|
# search-engine (contained in engines folder)
|
2016-10-22 11:10:31 +00:00
|
|
|
engine.request(search_query.query.encode('utf-8'), request_params)
|
2014-07-07 11:59:27 +00:00
|
|
|
|
|
|
|
if request_params['url'] is None:
|
|
|
|
# TODO add support of offline engines
|
|
|
|
pass
|
|
|
|
|
2014-09-13 16:25:25 +00:00
|
|
|
# create a callback wrapper for the search engine results
|
2014-07-07 11:59:27 +00:00
|
|
|
callback = make_callback(
|
|
|
|
selected_engine['name'],
|
|
|
|
engine.response,
|
2015-10-03 15:26:07 +00:00
|
|
|
request_params,
|
|
|
|
self.result_container)
|
2014-07-07 11:59:27 +00:00
|
|
|
|
2014-10-19 10:41:04 +00:00
|
|
|
# create dictionary which contain all
|
|
|
|
# informations about the request
|
2014-07-07 11:59:27 +00:00
|
|
|
request_args = dict(
|
|
|
|
headers=request_params['headers'],
|
|
|
|
hooks=dict(response=callback),
|
|
|
|
cookies=request_params['cookies'],
|
2014-12-15 18:37:58 +00:00
|
|
|
timeout=engine.timeout,
|
|
|
|
verify=request_params['verify']
|
2014-07-07 11:59:27 +00:00
|
|
|
)
|
|
|
|
|
2014-09-13 16:25:25 +00:00
|
|
|
# specific type of request (GET or POST)
|
2014-07-07 11:59:27 +00:00
|
|
|
if request_params['method'] == 'GET':
|
2014-12-05 18:24:11 +00:00
|
|
|
req = requests_lib.get
|
2014-07-07 11:59:27 +00:00
|
|
|
else:
|
2014-12-05 18:24:11 +00:00
|
|
|
req = requests_lib.post
|
2014-07-07 11:59:27 +00:00
|
|
|
request_args['data'] = request_params['data']
|
|
|
|
|
|
|
|
# ignoring empty urls
|
|
|
|
if not request_params['url']:
|
|
|
|
continue
|
|
|
|
|
2014-09-13 16:25:25 +00:00
|
|
|
# append request to list
|
2014-12-29 20:31:04 +00:00
|
|
|
requests.append((req, request_params['url'],
|
|
|
|
request_args,
|
|
|
|
selected_engine['name']))
|
2014-09-13 16:25:25 +00:00
|
|
|
|
2014-12-19 18:40:40 +00:00
|
|
|
if not requests:
|
2016-10-22 11:10:31 +00:00
|
|
|
return self.result_container
|
2014-09-13 16:25:25 +00:00
|
|
|
# send all search-request
|
2014-12-05 18:24:11 +00:00
|
|
|
threaded_requests(requests)
|
2016-07-31 21:39:58 +00:00
|
|
|
start_new_thread(gc.collect, tuple())
|
2014-09-13 16:25:25 +00:00
|
|
|
|
2014-09-28 14:51:41 +00:00
|
|
|
# return results, suggestions, answers and infoboxes
|
2016-10-22 11:10:31 +00:00
|
|
|
return self.result_container
|
|
|
|
|
|
|
|
|
|
|
|
class SearchWithPlugins(Search):
|
|
|
|
|
2016-10-22 12:01:53 +00:00
|
|
|
"""Similar to the Search class but call the plugins."""
|
|
|
|
|
2016-10-22 11:10:31 +00:00
|
|
|
def __init__(self, search_query, request):
|
|
|
|
super(SearchWithPlugins, self).__init__(search_query)
|
|
|
|
self.request = request
|
|
|
|
|
|
|
|
def search(self):
|
2016-10-22 12:01:53 +00:00
|
|
|
if plugins.call('pre_search', self.request, self):
|
2016-10-22 11:10:31 +00:00
|
|
|
super(SearchWithPlugins, self).search()
|
|
|
|
|
2016-10-22 12:01:53 +00:00
|
|
|
plugins.call('post_search', self.request, self)
|
|
|
|
|
|
|
|
results = self.result_container.get_ordered_results()
|
|
|
|
|
|
|
|
for result in results:
|
|
|
|
plugins.call('on_result', self.request, self, result)
|
|
|
|
|
2016-10-22 11:10:31 +00:00
|
|
|
return self.result_container
|