mirror of https://github.com/searxng/searxng.git
[mod] timeout in log are readable (the timeouts are compare to the start_time of the request).
This commit is contained in:
parent
3a8ab5880a
commit
28d51fd063
|
@ -37,7 +37,7 @@ logger = logger.getChild('search')
|
||||||
number_of_searches = 0
|
number_of_searches = 0
|
||||||
|
|
||||||
|
|
||||||
def send_http_request(engine, request_params, timeout_limit):
|
def send_http_request(engine, request_params, start_time, timeout_limit):
|
||||||
# for page_load_time stats
|
# for page_load_time stats
|
||||||
time_before_request = time()
|
time_before_request = time()
|
||||||
|
|
||||||
|
@ -62,7 +62,8 @@ def send_http_request(engine, request_params, timeout_limit):
|
||||||
|
|
||||||
# is there a timeout (no parsing in this case)
|
# is there a timeout (no parsing in this case)
|
||||||
timeout_overhead = 0.2 # seconds
|
timeout_overhead = 0.2 # seconds
|
||||||
search_duration = time() - request_params['started']
|
time_after_request = time()
|
||||||
|
search_duration = time_after_request - start_time
|
||||||
if search_duration > timeout_limit + timeout_overhead:
|
if search_duration > timeout_limit + timeout_overhead:
|
||||||
raise Timeout(response=response)
|
raise Timeout(response=response)
|
||||||
|
|
||||||
|
@ -72,14 +73,14 @@ def send_http_request(engine, request_params, timeout_limit):
|
||||||
engine.suspend_end_time = 0
|
engine.suspend_end_time = 0
|
||||||
# update stats with current page-load-time
|
# update stats with current page-load-time
|
||||||
# only the HTTP request
|
# only the HTTP request
|
||||||
engine.stats['page_load_time'] += time() - time_before_request
|
engine.stats['page_load_time'] += time_after_request - time_before_request
|
||||||
engine.stats['page_load_count'] += 1
|
engine.stats['page_load_count'] += 1
|
||||||
|
|
||||||
# everything is ok : return the response
|
# everything is ok : return the response
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
def search_one_request(engine, query, request_params, timeout_limit):
|
def search_one_request(engine, query, request_params, start_time, timeout_limit):
|
||||||
# update request parameters dependent on
|
# update request parameters dependent on
|
||||||
# search-engine (contained in engines folder)
|
# search-engine (contained in engines folder)
|
||||||
engine.request(query, request_params)
|
engine.request(query, request_params)
|
||||||
|
@ -92,20 +93,19 @@ def search_one_request(engine, query, request_params, timeout_limit):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
# send request
|
# send request
|
||||||
response = send_http_request(engine, request_params, timeout_limit)
|
response = send_http_request(engine, request_params, start_time, timeout_limit)
|
||||||
|
|
||||||
# parse the response
|
# parse the response
|
||||||
response.search_params = request_params
|
response.search_params = request_params
|
||||||
return engine.response(response)
|
return engine.response(response)
|
||||||
|
|
||||||
|
|
||||||
def search_one_request_safe(engine_name, query, request_params, result_container, timeout_limit):
|
def search_one_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
|
||||||
start_time = time()
|
|
||||||
engine = engines[engine_name]
|
engine = engines[engine_name]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# send requests and parse the results
|
# send requests and parse the results
|
||||||
search_results = search_one_request(engine, query, request_params, timeout_limit)
|
search_results = search_one_request(engine, query, request_params, start_time, timeout_limit)
|
||||||
|
|
||||||
# add results
|
# add results
|
||||||
result_container.extend(engine_name, search_results)
|
result_container.extend(engine_name, search_results)
|
||||||
|
@ -149,14 +149,13 @@ def search_one_request_safe(engine_name, query, request_params, result_container
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def search_multiple_requests(requests, result_container, timeout_limit):
|
def search_multiple_requests(requests, result_container, start_time, timeout_limit):
|
||||||
start_time = time()
|
|
||||||
search_id = uuid4().__str__()
|
search_id = uuid4().__str__()
|
||||||
|
|
||||||
for engine_name, query, request_params in requests:
|
for engine_name, query, request_params in requests:
|
||||||
th = threading.Thread(
|
th = threading.Thread(
|
||||||
target=search_one_request_safe,
|
target=search_one_request_safe,
|
||||||
args=(engine_name, query, request_params, result_container, timeout_limit),
|
args=(engine_name, query, request_params, result_container, start_time, timeout_limit),
|
||||||
name=search_id,
|
name=search_id,
|
||||||
)
|
)
|
||||||
th._engine_name = engine_name
|
th._engine_name = engine_name
|
||||||
|
@ -366,7 +365,6 @@ class Search(object):
|
||||||
request_params = default_request_params()
|
request_params = default_request_params()
|
||||||
request_params['headers']['User-Agent'] = user_agent
|
request_params['headers']['User-Agent'] = user_agent
|
||||||
request_params['category'] = selected_engine['category']
|
request_params['category'] = selected_engine['category']
|
||||||
request_params['started'] = start_time
|
|
||||||
request_params['pageno'] = search_query.pageno
|
request_params['pageno'] = search_query.pageno
|
||||||
|
|
||||||
if hasattr(engine, 'language') and engine.language:
|
if hasattr(engine, 'language') and engine.language:
|
||||||
|
@ -386,7 +384,7 @@ class Search(object):
|
||||||
|
|
||||||
if requests:
|
if requests:
|
||||||
# send all search-request
|
# send all search-request
|
||||||
search_multiple_requests(requests, self.result_container, timeout_limit - (time() - start_time))
|
search_multiple_requests(requests, self.result_container, start_time, timeout_limit)
|
||||||
start_new_thread(gc.collect, tuple())
|
start_new_thread(gc.collect, tuple())
|
||||||
|
|
||||||
# return results, suggestions, answers and infoboxes
|
# return results, suggestions, answers and infoboxes
|
||||||
|
|
Loading…
Reference in New Issue