3 Commits

Author SHA1 Message Date
Markus Heiser
2313b972a3 [fix] engines: base URL can be a list or a string, but its not None!
The code injection and monkey patching examine the names in the module of the
engine; if a variable there starts without an underscore and has the value None,
then this variable needs to be configured. This outdated concept does not fit
engines that may have multiple URLs. At least not as long as the value of the
base URL (list) is None.

The default is now an empty list instead of None

Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
2025-11-25 06:25:45 +01:00
Markus Heiser
989b49335c [fix] engines initialization - if engine load fails, set to inactive
- if engine load fails, set the engine to inactive
- dont' load a engine, when the config says its inactive

Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
2025-11-25 06:25:45 +01:00
Markus Heiser
3f30831640 [fix] don't raise fatal exception when engine isn't available
When wikidata or any other engine with a init method (is active!)  raises an
exception in it's init method, the engine is never registered.

[1] https://github.com/searxng/searxng/issues/5456#issuecomment-3567790287

Closes: https://github.com/searxng/searxng/issues/5456
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
2025-11-25 06:25:45 +01:00
8 changed files with 22 additions and 12 deletions

View File

@@ -270,7 +270,14 @@ def load_engines(engine_list: list[dict[str, t.Any]]):
categories.clear()
categories['general'] = []
for engine_data in engine_list:
if engine_data.get("inactive") is True:
continue
engine = load_engine(engine_data)
if engine:
register_engine(engine)
else:
# if an engine can't be loaded (if for example the engine is missing
# tor or some other requirements) its set to inactive!
logger.error("loading engine %s failed: set engine to inactive!", engine_data.get("name", "???"))
engine_data["inactive"] = True
return engines

View File

@@ -31,7 +31,7 @@ paging = True
time_range_support = True
# base_url can be overwritten by a list of URLs in the settings.yml
base_url: list | str = []
base_url: list[str] | str = []
def init(_):

View File

@@ -72,7 +72,7 @@ categories = []
paging = True
# search-url
backend_url: list[str] | str | None = None
backend_url: list[str] | str = []
"""Piped-Backend_: The core component behind Piped. The value is an URL or a
list of URLs. In the latter case instance will be selected randomly. For a
complete list of official instances see Piped-Instances (`JSON

View File

@@ -20,7 +20,7 @@ categories = ['images']
# Search URL
base_url = "https://www.pixiv.net/ajax/search/illustrations"
pixiv_image_proxies: list = []
pixiv_image_proxies: list[str] = []
def request(query, params):

View File

@@ -96,7 +96,7 @@ search_type = 'text'
``video`` are not yet implemented (Pull-Requests are welcome).
"""
base_url: list[str] | str | None = None
base_url: list[str] | str = []
"""The value is an URL or a list of URLs. In the latter case instance will be
selected randomly.
"""

View File

@@ -22,7 +22,7 @@ from searx.network import initialize as initialize_network, check_network_config
from searx.results import ResultContainer
from searx.search.checker import initialize as initialize_checker
from searx.search.processors import PROCESSORS
from searx.search.processors.abstract import RequestParams
if t.TYPE_CHECKING:
from .models import SearchQuery
@@ -79,16 +79,20 @@ class Search:
return bool(results)
# do search-request
def _get_requests(self) -> tuple[list[tuple[str, str, dict[str, t.Any]]], int]:
def _get_requests(self) -> tuple[list[tuple[str, str, RequestParams]], float]:
# init vars
requests: list[tuple[str, str, dict[str, t.Any]]] = []
requests: list[tuple[str, str, RequestParams]] = []
# max of all selected engine timeout
default_timeout = 0
# start search-request for all selected engines
for engineref in self.search_query.engineref_list:
processor = PROCESSORS[engineref.name]
processor = PROCESSORS.get(engineref.name)
if not processor:
# engine does not exists; not yet or the 'init' method of the
# engine has been failed and the engine has not been registered.
continue
# stop the request now if the engine is suspend
if processor.extend_container_if_suspended(self.result_container):
@@ -133,7 +137,7 @@ class Search:
return requests, actual_timeout
def search_multiple_requests(self, requests: list[tuple[str, str, dict[str, t.Any]]]):
def search_multiple_requests(self, requests: list[tuple[str, str, RequestParams]]):
# pylint: disable=protected-access
search_id = str(uuid4())

View File

@@ -51,7 +51,6 @@ class ProcessorMap(dict[str, EngineProcessor]):
eng_name: str = eng_settings["name"]
if eng_settings.get("inactive", False) is True:
logger.info("Engine of name '%s' is inactive.", eng_name)
continue
eng_obj = engines.engines.get(eng_name)

View File

@@ -61,7 +61,7 @@ class TestEnginesInit(SearxTestCase):
with self.assertLogs('searx.engines', level='ERROR') as cm: # pylint: disable=invalid-name
engines.load_engines(engine_list)
self.assertEqual(len(engines.engines), 0)
self.assertEqual(cm.output, ['ERROR:searx.engines:An engine does not have a "name" field'])
self.assertEqual(cm.output[0], 'ERROR:searx.engines:An engine does not have a "name" field')
def test_missing_engine_field(self):
settings['outgoing']['using_tor_proxy'] = False
@@ -72,5 +72,5 @@ class TestEnginesInit(SearxTestCase):
engines.load_engines(engine_list)
self.assertEqual(len(engines.engines), 0)
self.assertEqual(
cm.output, ['ERROR:searx.engines:The "engine" field is missing for the engine named "engine2"']
cm.output[0], 'ERROR:searx.engines:The "engine" field is missing for the engine named "engine2"'
)