mirror of
https://github.com/searxng/searxng.git
synced 2025-12-22 19:50:00 +00:00
[httpx] replace searx.poolrequests by searx.network
settings.yml:
* outgoing.networks:
* can contains network definition
* propertiers: enable_http, verify, http2, max_connections, max_keepalive_connections,
keepalive_expiry, local_addresses, support_ipv4, support_ipv6, proxies, max_redirects, retries
* retries: 0 by default, number of times searx retries to send the HTTP request (using different IP & proxy each time)
* local_addresses can be "192.168.0.1/24" (it supports IPv6)
* support_ipv4 & support_ipv6: both True by default
see https://github.com/searx/searx/pull/1034
* each engine can define a "network" section:
* either a full network description
* either reference an existing network
* all HTTP requests of engine use the same HTTP configuration (it was not the case before, see proxy configuration in master)
This commit is contained in:
@@ -13,7 +13,7 @@ from langdetect import detect_langs
|
||||
from langdetect.lang_detect_exception import LangDetectException
|
||||
import httpx
|
||||
|
||||
from searx import poolrequests, logger
|
||||
from searx import network, logger
|
||||
from searx.results import ResultContainer
|
||||
from searx.search.models import SearchQuery, EngineRef
|
||||
from searx.search.processors import EngineProcessor
|
||||
@@ -75,8 +75,8 @@ def _is_url_image(image_url):
|
||||
while retry > 0:
|
||||
a = time()
|
||||
try:
|
||||
poolrequests.set_timeout_for_thread(10.0, time())
|
||||
r = poolrequests.get(image_url, timeout=10.0, allow_redirects=True, headers={
|
||||
network.set_timeout_for_thread(10.0, time())
|
||||
r = network.get(image_url, timeout=10.0, allow_redirects=True, headers={
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:84.0) Gecko/20100101 Firefox/84.0',
|
||||
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
|
||||
'Accept-Language': 'en-US;q=0.5,en;q=0.3',
|
||||
|
||||
@@ -6,7 +6,7 @@ import asyncio
|
||||
|
||||
import httpx
|
||||
|
||||
import searx.poolrequests as poolrequests
|
||||
import searx.network
|
||||
from searx.engines import settings
|
||||
from searx import logger
|
||||
from searx.utils import gen_useragent
|
||||
@@ -64,10 +64,6 @@ class OnlineProcessor(EngineProcessor):
|
||||
auth=params['auth']
|
||||
)
|
||||
|
||||
# setting engine based proxies
|
||||
if hasattr(self.engine, 'proxies'):
|
||||
request_args['proxies'] = poolrequests.get_proxies(self.engine.proxies)
|
||||
|
||||
# max_redirects
|
||||
max_redirects = params.get('max_redirects')
|
||||
if max_redirects:
|
||||
@@ -85,9 +81,9 @@ class OnlineProcessor(EngineProcessor):
|
||||
|
||||
# specific type of request (GET or POST)
|
||||
if params['method'] == 'GET':
|
||||
req = poolrequests.get
|
||||
req = searx.network.get
|
||||
else:
|
||||
req = poolrequests.post
|
||||
req = searx.network.post
|
||||
|
||||
request_args['data'] = params['data']
|
||||
|
||||
@@ -128,11 +124,11 @@ class OnlineProcessor(EngineProcessor):
|
||||
|
||||
def search(self, query, params, result_container, start_time, timeout_limit):
|
||||
# set timeout for all HTTP requests
|
||||
poolrequests.set_timeout_for_thread(timeout_limit, start_time=start_time)
|
||||
searx.network.set_timeout_for_thread(timeout_limit, start_time=start_time)
|
||||
# reset the HTTP total time
|
||||
poolrequests.reset_time_for_thread()
|
||||
# enable HTTP only if explicitly enabled
|
||||
poolrequests.set_enable_http_protocol(self.engine.enable_http)
|
||||
searx.network.reset_time_for_thread()
|
||||
# set the network
|
||||
searx.network.set_context_network_name(self.engine_name)
|
||||
|
||||
# suppose everything will be alright
|
||||
http_exception = False
|
||||
@@ -149,7 +145,7 @@ class OnlineProcessor(EngineProcessor):
|
||||
|
||||
# update engine time when there is no exception
|
||||
engine_time = time() - start_time
|
||||
page_load_time = poolrequests.get_time_for_thread()
|
||||
page_load_time = searx.network.get_time_for_thread()
|
||||
result_container.add_timing(self.engine_name, engine_time, page_load_time)
|
||||
with threading.RLock():
|
||||
self.engine.stats['engine_time'] += engine_time
|
||||
@@ -162,7 +158,7 @@ class OnlineProcessor(EngineProcessor):
|
||||
|
||||
# Timing
|
||||
engine_time = time() - start_time
|
||||
page_load_time = poolrequests.get_time_for_thread()
|
||||
page_load_time = searx.network.get_time_for_thread()
|
||||
result_container.add_timing(self.engine_name, engine_time, page_load_time)
|
||||
|
||||
# Record the errors
|
||||
|
||||
Reference in New Issue
Block a user