mirror of https://github.com/searxng/searxng.git
[refactor] unit tests to utilize paramaterized and break down monolithic tests
- for tests which perform the same arrange/act/assert pattern but with different data, the data portion has been moved to the ``paramaterized.expand`` fields - for monolithic tests which performed multiple arrange/act/asserts, they have been broken up into different unit tests. - when possible, change generic assert statements to more concise asserts (i.e. ``assertIsNone``) This work ultimately is focused on creating smaller and more concise tests. While paramaterized may make adding new configurations for existing tests easier, that is just a beneficial side effect. The main benefit is that smaller tests are easier to reason about, meaning they are easier to debug when they start failing. This improves the developer experience in debugging what went wrong when refactoring the project. Total number of tests went from 192 -> 259; or, broke apart larger tests into 69 more concise ones.
This commit is contained in:
parent
042c7190e6
commit
44a06190bb
|
@ -1,6 +1,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# pylint: disable=invalid-name, missing-module-docstring, missing-class-docstring
|
# pylint: disable=invalid-name, missing-module-docstring, missing-class-docstring
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
from abc import abstractmethod, ABC
|
from abc import abstractmethod, ABC
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
@ -258,7 +259,7 @@ class RawTextQuery:
|
||||||
FeelingLuckyParser, # redirect to the first link in the results list
|
FeelingLuckyParser, # redirect to the first link in the results list
|
||||||
]
|
]
|
||||||
|
|
||||||
def __init__(self, query, disabled_engines):
|
def __init__(self, query: str, disabled_engines: list):
|
||||||
assert isinstance(query, str)
|
assert isinstance(query, str)
|
||||||
# input parameters
|
# input parameters
|
||||||
self.query = query
|
self.query = query
|
||||||
|
|
|
@ -2,15 +2,16 @@
|
||||||
# pylint: disable=missing-module-docstring
|
# pylint: disable=missing-module-docstring
|
||||||
|
|
||||||
from mock import Mock
|
from mock import Mock
|
||||||
|
from parameterized import parameterized
|
||||||
|
|
||||||
from searx.answerers import answerers
|
from searx.answerers import answerers
|
||||||
from tests import SearxTestCase
|
from tests import SearxTestCase
|
||||||
|
|
||||||
|
|
||||||
class AnswererTest(SearxTestCase): # pylint: disable=missing-class-docstring
|
class AnswererTest(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||||
def test_unicode_input(self):
|
@parameterized.expand(answerers)
|
||||||
|
def test_unicode_input(self, answerer):
|
||||||
query = Mock()
|
query = Mock()
|
||||||
unicode_payload = 'árvíztűrő tükörfúrógép'
|
unicode_payload = 'árvíztűrő tükörfúrógép'
|
||||||
for answerer in answerers:
|
query.query = '{} {}'.format(answerer.keywords[0], unicode_payload)
|
||||||
query.query = '{} {}'.format(answerer.keywords[0], unicode_payload)
|
self.assertIsInstance(answerer.answer(query), list)
|
||||||
self.assertTrue(isinstance(answerer.answer(query), list))
|
|
||||||
|
|
|
@ -1,42 +1,36 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# pylint: disable=missing-module-docstring
|
# pylint: disable=missing-module-docstring
|
||||||
|
|
||||||
|
from parameterized import parameterized
|
||||||
from tests import SearxTestCase
|
from tests import SearxTestCase
|
||||||
import searx.exceptions
|
import searx.exceptions
|
||||||
from searx import get_setting
|
from searx import get_setting
|
||||||
|
|
||||||
|
|
||||||
class TestExceptions(SearxTestCase): # pylint: disable=missing-class-docstring
|
class TestExceptions(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||||
def test_default_suspend_time(self):
|
@parameterized.expand(
|
||||||
with self.assertRaises(searx.exceptions.SearxEngineAccessDeniedException) as e:
|
[
|
||||||
raise searx.exceptions.SearxEngineAccessDeniedException()
|
searx.exceptions.SearxEngineAccessDeniedException,
|
||||||
|
searx.exceptions.SearxEngineCaptchaException,
|
||||||
|
searx.exceptions.SearxEngineTooManyRequestsException,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_default_suspend_time(self, exception):
|
||||||
|
with self.assertRaises(exception) as e:
|
||||||
|
raise exception()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
e.exception.suspended_time,
|
e.exception.suspended_time,
|
||||||
get_setting(searx.exceptions.SearxEngineAccessDeniedException.SUSPEND_TIME_SETTING),
|
get_setting(exception.SUSPEND_TIME_SETTING),
|
||||||
)
|
)
|
||||||
|
|
||||||
with self.assertRaises(searx.exceptions.SearxEngineCaptchaException) as e:
|
@parameterized.expand(
|
||||||
raise searx.exceptions.SearxEngineCaptchaException()
|
[
|
||||||
self.assertEqual(
|
searx.exceptions.SearxEngineAccessDeniedException,
|
||||||
e.exception.suspended_time, get_setting(searx.exceptions.SearxEngineCaptchaException.SUSPEND_TIME_SETTING)
|
searx.exceptions.SearxEngineCaptchaException,
|
||||||
)
|
searx.exceptions.SearxEngineTooManyRequestsException,
|
||||||
|
]
|
||||||
with self.assertRaises(searx.exceptions.SearxEngineTooManyRequestsException) as e:
|
)
|
||||||
raise searx.exceptions.SearxEngineTooManyRequestsException()
|
def test_custom_suspend_time(self, exception):
|
||||||
self.assertEqual(
|
with self.assertRaises(exception) as e:
|
||||||
e.exception.suspended_time,
|
raise exception(suspended_time=1337)
|
||||||
get_setting(searx.exceptions.SearxEngineTooManyRequestsException.SUSPEND_TIME_SETTING),
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_custom_suspend_time(self):
|
|
||||||
with self.assertRaises(searx.exceptions.SearxEngineAccessDeniedException) as e:
|
|
||||||
raise searx.exceptions.SearxEngineAccessDeniedException(suspended_time=1337)
|
|
||||||
self.assertEqual(e.exception.suspended_time, 1337)
|
self.assertEqual(e.exception.suspended_time, 1337)
|
||||||
|
|
||||||
with self.assertRaises(searx.exceptions.SearxEngineCaptchaException) as e:
|
|
||||||
raise searx.exceptions.SearxEngineCaptchaException(suspended_time=1409)
|
|
||||||
self.assertEqual(e.exception.suspended_time, 1409)
|
|
||||||
|
|
||||||
with self.assertRaises(searx.exceptions.SearxEngineTooManyRequestsException) as e:
|
|
||||||
raise searx.exceptions.SearxEngineTooManyRequestsException(suspended_time=1543)
|
|
||||||
self.assertEqual(e.exception.suspended_time, 1543)
|
|
||||||
|
|
|
@ -90,7 +90,7 @@ class TestGetBangDefinitionAndAutocomplete(SearxTestCase): # pylint:disable=mis
|
||||||
|
|
||||||
def test_partial(self):
|
def test_partial(self):
|
||||||
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('examp', external_bangs_db=TEST_DB)
|
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('examp', external_bangs_db=TEST_DB)
|
||||||
self.assertEqual(bang_definition, None)
|
self.assertIsNone(bang_definition)
|
||||||
self.assertEqual(new_autocomplete, ['example'])
|
self.assertEqual(new_autocomplete, ['example'])
|
||||||
|
|
||||||
def test_partial2(self):
|
def test_partial2(self):
|
||||||
|
@ -100,7 +100,7 @@ class TestGetBangDefinitionAndAutocomplete(SearxTestCase): # pylint:disable=mis
|
||||||
|
|
||||||
def test_error(self):
|
def test_error(self):
|
||||||
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('error', external_bangs_db=TEST_DB)
|
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('error', external_bangs_db=TEST_DB)
|
||||||
self.assertEqual(bang_definition, None)
|
self.assertIsNone(bang_definition)
|
||||||
self.assertEqual(new_autocomplete, [])
|
self.assertEqual(new_autocomplete, [])
|
||||||
|
|
||||||
def test_actual_data(self):
|
def test_actual_data(self):
|
||||||
|
@ -112,7 +112,7 @@ class TestGetBangDefinitionAndAutocomplete(SearxTestCase): # pylint:disable=mis
|
||||||
class TestExternalBangJson(SearxTestCase): # pylint:disable=missing-class-docstring
|
class TestExternalBangJson(SearxTestCase): # pylint:disable=missing-class-docstring
|
||||||
def test_no_external_bang_query(self):
|
def test_no_external_bang_query(self):
|
||||||
result = get_bang_url(SearchQuery('test', engineref_list=[EngineRef('wikipedia', 'general')]))
|
result = get_bang_url(SearchQuery('test', engineref_list=[EngineRef('wikipedia', 'general')]))
|
||||||
self.assertEqual(result, None)
|
self.assertIsNone(result)
|
||||||
|
|
||||||
def test_get_bang_url(self):
|
def test_get_bang_url(self):
|
||||||
url = get_bang_url(SearchQuery('test', engineref_list=[], external_bang='example'), external_bangs_db=TEST_DB)
|
url = get_bang_url(SearchQuery('test', engineref_list=[], external_bang='example'), external_bangs_db=TEST_DB)
|
||||||
|
|
|
@ -2,6 +2,8 @@
|
||||||
# pylint: disable=missing-module-docstring
|
# pylint: disable=missing-module-docstring
|
||||||
"""Test some code from module :py:obj:`searx.locales`"""
|
"""Test some code from module :py:obj:`searx.locales`"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
from parameterized import parameterized
|
||||||
from searx import locales
|
from searx import locales
|
||||||
from searx.sxng_locales import sxng_locales
|
from searx.sxng_locales import sxng_locales
|
||||||
from tests import SearxTestCase
|
from tests import SearxTestCase
|
||||||
|
@ -13,98 +15,104 @@ class TestLocales(SearxTestCase):
|
||||||
- :py:obj:`searx.locales.match_locale`
|
- :py:obj:`searx.locales.match_locale`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def test_match_locale(self):
|
@classmethod
|
||||||
|
def setUpClass(cls):
|
||||||
locale_tag_list = [x[0] for x in sxng_locales]
|
cls.locale_tag_list = [x[0] for x in sxng_locales]
|
||||||
|
|
||||||
|
@parameterized.expand(
|
||||||
|
[
|
||||||
|
'de',
|
||||||
|
'fr',
|
||||||
|
'zh',
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_locale_languages(self, locale: str):
|
||||||
# Test SearXNG search languages
|
# Test SearXNG search languages
|
||||||
|
self.assertEqual(locales.match_locale(locale, self.locale_tag_list), locale)
|
||||||
|
|
||||||
self.assertEqual(locales.match_locale('de', locale_tag_list), 'de')
|
@parameterized.expand(
|
||||||
self.assertEqual(locales.match_locale('fr', locale_tag_list), 'fr')
|
[
|
||||||
self.assertEqual(locales.match_locale('zh', locale_tag_list), 'zh')
|
('ca-es', 'ca-ES'),
|
||||||
|
('de-at', 'de-AT'),
|
||||||
|
('de-de', 'de-DE'),
|
||||||
|
('en-UK', 'en-GB'),
|
||||||
|
('fr-be', 'fr-BE'),
|
||||||
|
('fr-be', 'fr-BE'),
|
||||||
|
('fr-ca', 'fr-CA'),
|
||||||
|
('fr-ch', 'fr-CH'),
|
||||||
|
('zh-cn', 'zh-CN'),
|
||||||
|
('zh-tw', 'zh-TW'),
|
||||||
|
('zh-hk', 'zh-HK'),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_match_region(self, locale: str, expected_locale: str):
|
||||||
# Test SearXNG search regions
|
# Test SearXNG search regions
|
||||||
|
self.assertEqual(locales.match_locale(locale, self.locale_tag_list), expected_locale)
|
||||||
|
|
||||||
self.assertEqual(locales.match_locale('ca-es', locale_tag_list), 'ca-ES')
|
@parameterized.expand(
|
||||||
self.assertEqual(locales.match_locale('de-at', locale_tag_list), 'de-AT')
|
[
|
||||||
self.assertEqual(locales.match_locale('de-de', locale_tag_list), 'de-DE')
|
('zh-hans', 'zh-CN'),
|
||||||
self.assertEqual(locales.match_locale('en-UK', locale_tag_list), 'en-GB')
|
('zh-hans-cn', 'zh-CN'),
|
||||||
self.assertEqual(locales.match_locale('fr-be', locale_tag_list), 'fr-BE')
|
('zh-hant', 'zh-TW'),
|
||||||
self.assertEqual(locales.match_locale('fr-be', locale_tag_list), 'fr-BE')
|
('zh-hant-tw', 'zh-TW'),
|
||||||
self.assertEqual(locales.match_locale('fr-ca', locale_tag_list), 'fr-CA')
|
]
|
||||||
self.assertEqual(locales.match_locale('fr-ch', locale_tag_list), 'fr-CH')
|
)
|
||||||
self.assertEqual(locales.match_locale('zh-cn', locale_tag_list), 'zh-CN')
|
def test_match_lang_script_code(self, locale: str, expected_locale: str):
|
||||||
self.assertEqual(locales.match_locale('zh-tw', locale_tag_list), 'zh-TW')
|
|
||||||
self.assertEqual(locales.match_locale('zh-hk', locale_tag_list), 'zh-HK')
|
|
||||||
|
|
||||||
# Test language script code
|
# Test language script code
|
||||||
|
self.assertEqual(locales.match_locale(locale, self.locale_tag_list), expected_locale)
|
||||||
|
|
||||||
self.assertEqual(locales.match_locale('zh-hans', locale_tag_list), 'zh-CN')
|
def test_locale_de(self):
|
||||||
self.assertEqual(locales.match_locale('zh-hans-cn', locale_tag_list), 'zh-CN')
|
self.assertEqual(locales.match_locale('de', ['de-CH', 'de-DE']), 'de-DE')
|
||||||
self.assertEqual(locales.match_locale('zh-hant', locale_tag_list), 'zh-TW')
|
self.assertEqual(locales.match_locale('de', ['de-CH', 'de-DE']), 'de-DE')
|
||||||
self.assertEqual(locales.match_locale('zh-hant-tw', locale_tag_list), 'zh-TW')
|
|
||||||
|
|
||||||
# Test individual locale lists
|
|
||||||
|
|
||||||
|
def test_locale_es(self):
|
||||||
self.assertEqual(locales.match_locale('es', [], fallback='fallback'), 'fallback')
|
self.assertEqual(locales.match_locale('es', [], fallback='fallback'), 'fallback')
|
||||||
|
|
||||||
self.assertEqual(locales.match_locale('de', ['de-CH', 'de-DE']), 'de-DE')
|
|
||||||
self.assertEqual(locales.match_locale('de', ['de-CH', 'de-DE']), 'de-DE')
|
|
||||||
self.assertEqual(locales.match_locale('es', ['ES']), 'ES')
|
self.assertEqual(locales.match_locale('es', ['ES']), 'ES')
|
||||||
self.assertEqual(locales.match_locale('es', ['es-AR', 'es-ES', 'es-MX']), 'es-ES')
|
self.assertEqual(locales.match_locale('es', ['es-AR', 'es-ES', 'es-MX']), 'es-ES')
|
||||||
self.assertEqual(locales.match_locale('es-AR', ['es-AR', 'es-ES', 'es-MX']), 'es-AR')
|
self.assertEqual(locales.match_locale('es-AR', ['es-AR', 'es-ES', 'es-MX']), 'es-AR')
|
||||||
self.assertEqual(locales.match_locale('es-CO', ['es-AR', 'es-ES']), 'es-ES')
|
self.assertEqual(locales.match_locale('es-CO', ['es-AR', 'es-ES']), 'es-ES')
|
||||||
self.assertEqual(locales.match_locale('es-CO', ['es-AR']), 'es-AR')
|
self.assertEqual(locales.match_locale('es-CO', ['es-AR']), 'es-AR')
|
||||||
|
|
||||||
# Tests from the commit message of 9ae409a05a
|
@parameterized.expand(
|
||||||
|
[
|
||||||
|
('zh-TW', ['zh-HK'], 'zh-HK'), # A user selects region 'zh-TW' which should end in zh_HK.
|
||||||
|
# hint: CN is 'Hans' and HK ('Hant') fits better to TW ('Hant')
|
||||||
|
('zh', ['zh-CN'], 'zh-CN'), # A user selects only the language 'zh' which should end in CN
|
||||||
|
('fr', ['fr-CA'], 'fr-CA'), # A user selects only the language 'fr' which should end in fr_CA
|
||||||
|
('nl', ['nl-BE'], 'nl-BE'), # A user selects only the language 'fr' which should end in fr_CA
|
||||||
|
# Territory tests
|
||||||
|
('en', ['en-GB'], 'en-GB'), # A user selects only a language
|
||||||
|
(
|
||||||
|
'fr',
|
||||||
|
['fr-FR', 'fr-CA'],
|
||||||
|
'fr-FR',
|
||||||
|
), # the engine supports fr_FR and fr_CA since no territory is given, fr_FR takes priority
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_locale_optimized_selected(self, locale: str, locale_list: list[str], expected_locale: str):
|
||||||
|
"""
|
||||||
|
Tests from the commit message of 9ae409a05a
|
||||||
|
|
||||||
# Assumption:
|
Assumption:
|
||||||
# A. When a user selects a language the results should be optimized according to
|
A. When a user selects a language the results should be optimized according to
|
||||||
# the selected language.
|
the selected language.
|
||||||
#
|
"""
|
||||||
# B. When user selects a language and a territory the results should be
|
self.assertEqual(locales.match_locale(locale, locale_list), expected_locale)
|
||||||
# optimized with first priority on territory and second on language.
|
|
||||||
|
|
||||||
# Assume we have an engine that supports the following locales:
|
@parameterized.expand(
|
||||||
locale_tag_list = ['zh-CN', 'zh-HK', 'nl-BE', 'fr-CA']
|
[
|
||||||
|
('fr-BE', ['fr-FR', 'fr-CA', 'nl-BE'], 'nl-BE'), # A user selects region 'fr-BE' which should end in nl-BE
|
||||||
|
('fr', ['fr-BE', 'fr-CH'], 'fr-BE'), # A user selects fr with 2 locales,
|
||||||
|
# the get_engine_locale selects the locale by looking at the "population
|
||||||
|
# percent" and this percentage has an higher amount in BE (68.%)
|
||||||
|
# compared to CH (21%)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_locale_optimized_territory(self, locale: str, locale_list: list[str], expected_locale: str):
|
||||||
|
"""
|
||||||
|
Tests from the commit message of 9ae409a05a
|
||||||
|
|
||||||
# Examples (Assumption A.)
|
B. When user selects a language and a territory the results should be
|
||||||
# ------------------------
|
optimized with first priority on territory and second on language.
|
||||||
|
"""
|
||||||
# A user selects region 'zh-TW' which should end in zh_HK.
|
self.assertEqual(locales.match_locale(locale, locale_list), expected_locale)
|
||||||
# hint: CN is 'Hans' and HK ('Hant') fits better to TW ('Hant')
|
|
||||||
self.assertEqual(locales.match_locale('zh-TW', locale_tag_list), 'zh-HK')
|
|
||||||
|
|
||||||
# A user selects only the language 'zh' which should end in CN
|
|
||||||
self.assertEqual(locales.match_locale('zh', locale_tag_list), 'zh-CN')
|
|
||||||
|
|
||||||
# A user selects only the language 'fr' which should end in fr_CA
|
|
||||||
self.assertEqual(locales.match_locale('fr', locale_tag_list), 'fr-CA')
|
|
||||||
|
|
||||||
# The difference in priority on the territory is best shown with a
|
|
||||||
# engine that supports the following locales:
|
|
||||||
locale_tag_list = ['fr-FR', 'fr-CA', 'en-GB', 'nl-BE']
|
|
||||||
|
|
||||||
# A user selects only a language
|
|
||||||
self.assertEqual(locales.match_locale('en', locale_tag_list), 'en-GB')
|
|
||||||
|
|
||||||
# hint: the engine supports fr_FR and fr_CA since no territory is given,
|
|
||||||
# fr_FR takes priority ..
|
|
||||||
self.assertEqual(locales.match_locale('fr', locale_tag_list), 'fr-FR')
|
|
||||||
|
|
||||||
# Examples (Assumption B.)
|
|
||||||
# ------------------------
|
|
||||||
|
|
||||||
# A user selects region 'fr-BE' which should end in nl-BE
|
|
||||||
self.assertEqual(locales.match_locale('fr-BE', locale_tag_list), 'nl-BE')
|
|
||||||
|
|
||||||
# If the user selects a language and there are two locales like the
|
|
||||||
# following:
|
|
||||||
|
|
||||||
locale_tag_list = ['fr-BE', 'fr-CH']
|
|
||||||
|
|
||||||
# The get_engine_locale selects the locale by looking at the "population
|
|
||||||
# percent" and this percentage has an higher amount in BE (68.%)
|
|
||||||
# compared to CH (21%)
|
|
||||||
|
|
||||||
self.assertEqual(locales.match_locale('fr', locale_tag_list), 'fr-BE')
|
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
# pylint: disable=missing-module-docstring
|
# pylint: disable=missing-module-docstring
|
||||||
|
|
||||||
from mock import Mock
|
from mock import Mock
|
||||||
|
from parameterized.parameterized import parameterized
|
||||||
|
|
||||||
from searx import (
|
from searx import (
|
||||||
plugins,
|
plugins,
|
||||||
|
@ -23,143 +24,125 @@ class PluginMock: # pylint: disable=missing-class-docstring, too-few-public-met
|
||||||
|
|
||||||
|
|
||||||
class PluginStoreTest(SearxTestCase): # pylint: disable=missing-class-docstring
|
class PluginStoreTest(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||||
def test_PluginStore_init(self):
|
def setUp(self):
|
||||||
store = plugins.PluginStore()
|
self.store = plugins.PluginStore()
|
||||||
self.assertTrue(isinstance(store.plugins, list) and len(store.plugins) == 0)
|
|
||||||
|
|
||||||
def test_PluginStore_register(self):
|
def test_init(self):
|
||||||
store = plugins.PluginStore()
|
self.assertEqual(0, len(self.store.plugins))
|
||||||
|
self.assertIsInstance(self.store.plugins, list)
|
||||||
|
|
||||||
|
def test_register(self):
|
||||||
testplugin = PluginMock()
|
testplugin = PluginMock()
|
||||||
store.register(testplugin)
|
self.store.register(testplugin)
|
||||||
|
self.assertEqual(1, len(self.store.plugins))
|
||||||
|
|
||||||
self.assertTrue(len(store.plugins) == 1)
|
def test_call_empty(self):
|
||||||
|
testplugin = PluginMock()
|
||||||
|
self.store.register(testplugin)
|
||||||
|
setattr(testplugin, 'asdf', Mock())
|
||||||
|
request = Mock()
|
||||||
|
self.store.call([], 'asdf', request, Mock())
|
||||||
|
self.assertFalse(getattr(testplugin, 'asdf').called) # pylint: disable=E1101
|
||||||
|
|
||||||
def test_PluginStore_call(self):
|
def test_call_with_plugin(self):
|
||||||
store = plugins.PluginStore()
|
store = plugins.PluginStore()
|
||||||
testplugin = PluginMock()
|
testplugin = PluginMock()
|
||||||
store.register(testplugin)
|
store.register(testplugin)
|
||||||
setattr(testplugin, 'asdf', Mock())
|
setattr(testplugin, 'asdf', Mock())
|
||||||
request = Mock()
|
request = Mock()
|
||||||
store.call([], 'asdf', request, Mock())
|
|
||||||
|
|
||||||
self.assertFalse(testplugin.asdf.called) # pylint: disable=E1101
|
|
||||||
|
|
||||||
store.call([testplugin], 'asdf', request, Mock())
|
store.call([testplugin], 'asdf', request, Mock())
|
||||||
self.assertTrue(testplugin.asdf.called) # pylint: disable=E1101
|
self.assertTrue(getattr(testplugin, 'asdf').called) # pylint: disable=E1101
|
||||||
|
|
||||||
|
|
||||||
class SelfIPTest(SearxTestCase): # pylint: disable=missing-class-docstring
|
class PluginIPSelfInfo(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||||
def test_PluginStore_init(self):
|
def setUp(self):
|
||||||
plugin = plugins.load_and_initialize_plugin('searx.plugins.self_info', False, (None, {}))
|
plugin = plugins.load_and_initialize_plugin('searx.plugins.self_info', False, (None, {}))
|
||||||
store = plugins.PluginStore()
|
self.store = plugins.PluginStore()
|
||||||
store.register(plugin)
|
self.store.register(plugin)
|
||||||
cfg = limiter.get_cfg()
|
cfg = limiter.get_cfg()
|
||||||
botdetection.init(cfg, None)
|
botdetection.init(cfg, None)
|
||||||
|
|
||||||
self.assertTrue(len(store.plugins) == 1)
|
def test_plugin_store_init(self):
|
||||||
|
self.assertEqual(1, len(self.store.plugins))
|
||||||
|
|
||||||
# IP test
|
def test_ip_in_answer(self):
|
||||||
request = Mock()
|
request = Mock()
|
||||||
request.remote_addr = '127.0.0.1'
|
request.remote_addr = '127.0.0.1'
|
||||||
request.headers = {'X-Forwarded-For': '1.2.3.4, 127.0.0.1', 'X-Real-IP': '127.0.0.1'}
|
request.headers = {'X-Forwarded-For': '1.2.3.4, 127.0.0.1', 'X-Real-IP': '127.0.0.1'}
|
||||||
search = get_search_mock(
|
search = get_search_mock(query='ip', pageno=1)
|
||||||
query='ip',
|
self.store.call(self.store.plugins, 'post_search', request, search)
|
||||||
pageno=1,
|
self.assertIn('127.0.0.1', search.result_container.answers["ip"]["answer"])
|
||||||
)
|
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
|
||||||
self.assertTrue('127.0.0.1' in search.result_container.answers["ip"]["answer"])
|
|
||||||
|
|
||||||
|
def test_ip_not_in_answer(self):
|
||||||
|
request = Mock()
|
||||||
|
request.remote_addr = '127.0.0.1'
|
||||||
|
request.headers = {'X-Forwarded-For': '1.2.3.4, 127.0.0.1', 'X-Real-IP': '127.0.0.1'}
|
||||||
search = get_search_mock(query='ip', pageno=2)
|
search = get_search_mock(query='ip', pageno=2)
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
self.store.call(self.store.plugins, 'post_search', request, search)
|
||||||
self.assertFalse('ip' in search.result_container.answers)
|
self.assertNotIn('ip', search.result_container.answers)
|
||||||
|
|
||||||
# User agent test
|
@parameterized.expand(
|
||||||
|
[
|
||||||
|
'user-agent',
|
||||||
|
'What is my User-Agent?',
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_user_agent_in_answer(self, query: str):
|
||||||
request = Mock(user_agent=Mock(string='Mock'))
|
request = Mock(user_agent=Mock(string='Mock'))
|
||||||
|
search = get_search_mock(query=query, pageno=1)
|
||||||
|
self.store.call(self.store.plugins, 'post_search', request, search)
|
||||||
|
self.assertIn('Mock', search.result_container.answers["user-agent"]["answer"])
|
||||||
|
|
||||||
search = get_search_mock(query='user-agent', pageno=1)
|
@parameterized.expand(
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
[
|
||||||
self.assertTrue('Mock' in search.result_container.answers["user-agent"]["answer"])
|
'user-agent',
|
||||||
|
'What is my User-Agent?',
|
||||||
search = get_search_mock(query='user-agent', pageno=2)
|
]
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
)
|
||||||
self.assertFalse('user-agent' in search.result_container.answers)
|
def test_user_agent_not_in_answer(self, query: str):
|
||||||
|
request = Mock(user_agent=Mock(string='Mock'))
|
||||||
search = get_search_mock(query='user-agent', pageno=1)
|
search = get_search_mock(query=query, pageno=2)
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
self.store.call(self.store.plugins, 'post_search', request, search)
|
||||||
self.assertTrue('Mock' in search.result_container.answers["user-agent"]["answer"])
|
self.assertNotIn('user-agent', search.result_container.answers)
|
||||||
|
|
||||||
search = get_search_mock(query='user-agent', pageno=2)
|
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
|
||||||
self.assertFalse('user-agent' in search.result_container.answers)
|
|
||||||
|
|
||||||
search = get_search_mock(query='What is my User-Agent?', pageno=1)
|
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
|
||||||
self.assertTrue('Mock' in search.result_container.answers["user-agent"]["answer"])
|
|
||||||
|
|
||||||
search = get_search_mock(query='What is my User-Agent?', pageno=2)
|
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
|
||||||
self.assertFalse('user-agent' in search.result_container.answers)
|
|
||||||
|
|
||||||
|
|
||||||
class HashPluginTest(SearxTestCase): # pylint: disable=missing-class-docstring
|
class PluginHashTest(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||||
def test_PluginStore_init(self):
|
def setUp(self):
|
||||||
store = plugins.PluginStore()
|
self.store = plugins.PluginStore()
|
||||||
plugin = plugins.load_and_initialize_plugin('searx.plugins.hash_plugin', False, (None, {}))
|
plugin = plugins.load_and_initialize_plugin('searx.plugins.hash_plugin', False, (None, {}))
|
||||||
store.register(plugin)
|
self.store.register(plugin)
|
||||||
|
|
||||||
self.assertTrue(len(store.plugins) == 1)
|
def test_plugin_store_init(self):
|
||||||
|
self.assertEqual(1, len(self.store.plugins))
|
||||||
|
|
||||||
|
@parameterized.expand(
|
||||||
|
[
|
||||||
|
('md5 test', 'md5 hash digest: 098f6bcd4621d373cade4e832627b4f6'),
|
||||||
|
('sha1 test', 'sha1 hash digest: a94a8fe5ccb19ba61c4c0873d391e987982fbbd3'),
|
||||||
|
('sha224 test', 'sha224 hash digest: 90a3ed9e32b2aaf4c61c410eb925426119e1a9dc53d4286ade99a809'),
|
||||||
|
('sha256 test', 'sha256 hash digest: 9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08'),
|
||||||
|
(
|
||||||
|
'sha384 test',
|
||||||
|
'sha384 hash digest: 768412320f7b0aa5812fce428dc4706b3c'
|
||||||
|
'ae50e02a64caa16a782249bfe8efc4b7ef1ccb126255d196047dfedf1'
|
||||||
|
'7a0a9',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'sha512 test',
|
||||||
|
'sha512 hash digest: ee26b0dd4af7e749aa1a8ee3c10ae9923f6'
|
||||||
|
'18980772e473f8819a5d4940e0db27ac185f8a0e1d5f84f88bc887fd67b143732c304cc5'
|
||||||
|
'fa9ad8e6f57f50028a8ff',
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_hash_digest_new(self, query: str, hash_str: str):
|
||||||
request = Mock(remote_addr='127.0.0.1')
|
request = Mock(remote_addr='127.0.0.1')
|
||||||
|
search = get_search_mock(query=query, pageno=1)
|
||||||
|
self.store.call(self.store.plugins, 'post_search', request, search)
|
||||||
|
self.assertIn(hash_str, search.result_container.answers['hash']['answer'])
|
||||||
|
|
||||||
# MD5
|
def test_md5_bytes_no_answer(self):
|
||||||
search = get_search_mock(query='md5 test', pageno=1)
|
request = Mock(remote_addr='127.0.0.1')
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
|
||||||
self.assertTrue(
|
|
||||||
'md5 hash digest: 098f6bcd4621d373cade4e832627b4f6' in search.result_container.answers['hash']['answer']
|
|
||||||
)
|
|
||||||
|
|
||||||
search = get_search_mock(query=b'md5 test', pageno=2)
|
search = get_search_mock(query=b'md5 test', pageno=2)
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
self.store.call(self.store.plugins, 'post_search', request, search)
|
||||||
self.assertFalse('hash' in search.result_container.answers)
|
self.assertNotIn('hash', search.result_container.answers)
|
||||||
|
|
||||||
# SHA1
|
|
||||||
search = get_search_mock(query='sha1 test', pageno=1)
|
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
|
||||||
self.assertTrue(
|
|
||||||
'sha1 hash digest: a94a8fe5ccb19ba61c4c0873d391e9879'
|
|
||||||
'82fbbd3' in search.result_container.answers['hash']['answer']
|
|
||||||
)
|
|
||||||
|
|
||||||
# SHA224
|
|
||||||
search = get_search_mock(query='sha224 test', pageno=1)
|
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
|
||||||
self.assertTrue(
|
|
||||||
'sha224 hash digest: 90a3ed9e32b2aaf4c61c410eb9254261'
|
|
||||||
'19e1a9dc53d4286ade99a809' in search.result_container.answers['hash']['answer']
|
|
||||||
)
|
|
||||||
|
|
||||||
# SHA256
|
|
||||||
search = get_search_mock(query='sha256 test', pageno=1)
|
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
|
||||||
self.assertTrue(
|
|
||||||
'sha256 hash digest: 9f86d081884c7d659a2feaa0c55ad015a'
|
|
||||||
'3bf4f1b2b0b822cd15d6c15b0f00a08' in search.result_container.answers['hash']['answer']
|
|
||||||
)
|
|
||||||
|
|
||||||
# SHA384
|
|
||||||
search = get_search_mock(query='sha384 test', pageno=1)
|
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
|
||||||
self.assertTrue(
|
|
||||||
'sha384 hash digest: 768412320f7b0aa5812fce428dc4706b3c'
|
|
||||||
'ae50e02a64caa16a782249bfe8efc4b7ef1ccb126255d196047dfedf1'
|
|
||||||
'7a0a9' in search.result_container.answers['hash']['answer']
|
|
||||||
)
|
|
||||||
|
|
||||||
# SHA512
|
|
||||||
search = get_search_mock(query='sha512 test', pageno=1)
|
|
||||||
store.call(store.plugins, 'post_search', request, search)
|
|
||||||
self.assertTrue(
|
|
||||||
'sha512 hash digest: ee26b0dd4af7e749aa1a8ee3c10ae9923f6'
|
|
||||||
'18980772e473f8819a5d4940e0db27ac185f8a0e1d5f84f88bc887fd67b143732c304cc5'
|
|
||||||
'fa9ad8e6f57f50028a8ff' in search.result_container.answers['hash']['answer']
|
|
||||||
)
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# pylint: disable=missing-module-docstring, invalid-name
|
# pylint: disable=missing-module-docstring, invalid-name
|
||||||
|
|
||||||
|
from tests import SearxTestCase
|
||||||
from searx.locales import locales_initialize
|
from searx.locales import locales_initialize
|
||||||
from searx.preferences import (
|
from searx.preferences import (
|
||||||
EnumStringSetting,
|
EnumStringSetting,
|
||||||
|
@ -10,12 +11,12 @@ from searx.preferences import (
|
||||||
PluginsSetting,
|
PluginsSetting,
|
||||||
ValidationException,
|
ValidationException,
|
||||||
)
|
)
|
||||||
from tests import SearxTestCase
|
from searx.plugins import Plugin
|
||||||
|
|
||||||
locales_initialize()
|
locales_initialize()
|
||||||
|
|
||||||
|
|
||||||
class PluginStub: # pylint: disable=missing-class-docstring, too-few-public-methods
|
class PluginStub(Plugin): # pylint: disable=missing-class-docstring, too-few-public-methods
|
||||||
def __init__(self, plugin_id, default_on):
|
def __init__(self, plugin_id, default_on):
|
||||||
self.id = plugin_id
|
self.id = plugin_id
|
||||||
self.default_on = default_on
|
self.default_on = default_on
|
||||||
|
@ -47,22 +48,22 @@ class TestSettings(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||||
|
|
||||||
def test_enum_setting_invalid_default_value(self):
|
def test_enum_setting_invalid_default_value(self):
|
||||||
with self.assertRaises(ValidationException):
|
with self.assertRaises(ValidationException):
|
||||||
EnumStringSetting(3, choices=[0, 1, 2])
|
EnumStringSetting('3', choices=['0', '1', '2'])
|
||||||
|
|
||||||
def test_enum_setting_invalid_choice(self):
|
def test_enum_setting_invalid_choice(self):
|
||||||
setting = EnumStringSetting(0, choices=[0, 1, 2])
|
setting = EnumStringSetting('0', choices=['0', '1', '2'])
|
||||||
with self.assertRaises(ValidationException):
|
with self.assertRaises(ValidationException):
|
||||||
setting.parse(3)
|
setting.parse('3')
|
||||||
|
|
||||||
def test_enum_setting_valid_default(self):
|
def test_enum_setting_valid_default(self):
|
||||||
setting = EnumStringSetting(3, choices=[1, 2, 3])
|
setting = EnumStringSetting('3', choices=['1', '2', '3'])
|
||||||
self.assertEqual(setting.get_value(), 3)
|
self.assertEqual(setting.get_value(), '3')
|
||||||
|
|
||||||
def test_enum_setting_valid_choice(self):
|
def test_enum_setting_valid_choice(self):
|
||||||
setting = EnumStringSetting(3, choices=[1, 2, 3])
|
setting = EnumStringSetting('3', choices=['1', '2', '3'])
|
||||||
self.assertEqual(setting.get_value(), 3)
|
self.assertEqual(setting.get_value(), '3')
|
||||||
setting.parse(2)
|
setting.parse('2')
|
||||||
self.assertEqual(setting.get_value(), 2)
|
self.assertEqual(setting.get_value(), '2')
|
||||||
|
|
||||||
# multiple choice settings
|
# multiple choice settings
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# pylint: disable=missing-module-docstring
|
# pylint: disable=missing-module-docstring
|
||||||
|
|
||||||
|
from parameterized.parameterized import parameterized
|
||||||
from searx.engines import load_engines
|
from searx.engines import load_engines
|
||||||
from searx.query import RawTextQuery
|
from searx.query import RawTextQuery
|
||||||
from tests import SearxTestCase
|
from tests import SearxTestCase
|
||||||
|
@ -129,49 +130,32 @@ class TestLanguageParser(SearxTestCase): # pylint:disable=missing-class-docstri
|
||||||
query = RawTextQuery(query_text, [])
|
query = RawTextQuery(query_text, [])
|
||||||
self.assertEqual(query.autocomplete_list, [":en", ":en_us", ":english", ":united_kingdom"])
|
self.assertEqual(query.autocomplete_list, [":en", ":en_us", ":english", ":united_kingdom"])
|
||||||
|
|
||||||
def test_autocomplete(self):
|
@parameterized.expand(
|
||||||
query = RawTextQuery(':englis', [])
|
[
|
||||||
self.assertEqual(query.autocomplete_list, [":english"])
|
(':englis', [":english"]),
|
||||||
|
(':deutschla', [":deutschland"]),
|
||||||
query = RawTextQuery(':deutschla', [])
|
(':new_zea', [":new_zealand"]),
|
||||||
self.assertEqual(query.autocomplete_list, [":deutschland"])
|
(':zh-', [':zh-cn', ':zh-hk', ':zh-tw']),
|
||||||
|
]
|
||||||
query = RawTextQuery(':new_zea', [])
|
)
|
||||||
self.assertEqual(query.autocomplete_list, [":new_zealand"])
|
def test_autocomplete(self, query: str, autocomplete_list: list):
|
||||||
|
query = RawTextQuery(query, [])
|
||||||
query = RawTextQuery(':hu-H', [])
|
self.assertEqual(query.autocomplete_list, autocomplete_list)
|
||||||
self.assertEqual(query.autocomplete_list, [":hu-hu"])
|
|
||||||
|
|
||||||
query = RawTextQuery(':zh-', [])
|
|
||||||
self.assertEqual(query.autocomplete_list, [':zh-cn', ':zh-hk', ':zh-tw'])
|
|
||||||
|
|
||||||
|
|
||||||
class TestTimeoutParser(SearxTestCase): # pylint:disable=missing-class-docstring
|
class TestTimeoutParser(SearxTestCase): # pylint:disable=missing-class-docstring
|
||||||
def test_timeout_below100(self):
|
@parameterized.expand(
|
||||||
query_text = '<3 the query'
|
[
|
||||||
|
('<3 the query', 3),
|
||||||
|
('<350 the query', 0.35),
|
||||||
|
('<3500 the query', 3.5),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_timeout_limit(self, query_text: str, timeout_limit: float):
|
||||||
query = RawTextQuery(query_text, [])
|
query = RawTextQuery(query_text, [])
|
||||||
|
|
||||||
self.assertEqual(query.getFullQuery(), query_text)
|
self.assertEqual(query.getFullQuery(), query_text)
|
||||||
self.assertEqual(len(query.query_parts), 1)
|
self.assertEqual(len(query.query_parts), 1)
|
||||||
self.assertEqual(query.timeout_limit, 3)
|
self.assertEqual(query.timeout_limit, timeout_limit)
|
||||||
self.assertFalse(query.specific)
|
|
||||||
|
|
||||||
def test_timeout_above100(self):
|
|
||||||
query_text = '<350 the query'
|
|
||||||
query = RawTextQuery(query_text, [])
|
|
||||||
|
|
||||||
self.assertEqual(query.getFullQuery(), query_text)
|
|
||||||
self.assertEqual(len(query.query_parts), 1)
|
|
||||||
self.assertEqual(query.timeout_limit, 0.35)
|
|
||||||
self.assertFalse(query.specific)
|
|
||||||
|
|
||||||
def test_timeout_above1000(self):
|
|
||||||
query_text = '<3500 the query'
|
|
||||||
query = RawTextQuery(query_text, [])
|
|
||||||
|
|
||||||
self.assertEqual(query.getFullQuery(), query_text)
|
|
||||||
self.assertEqual(len(query.query_parts), 1)
|
|
||||||
self.assertEqual(query.timeout_limit, 3.5)
|
|
||||||
self.assertFalse(query.specific)
|
self.assertFalse(query.specific)
|
||||||
|
|
||||||
def test_timeout_invalid(self):
|
def test_timeout_invalid(self):
|
||||||
|
@ -182,7 +166,7 @@ class TestTimeoutParser(SearxTestCase): # pylint:disable=missing-class-docstrin
|
||||||
self.assertEqual(query.getFullQuery(), query_text)
|
self.assertEqual(query.getFullQuery(), query_text)
|
||||||
self.assertEqual(len(query.query_parts), 0)
|
self.assertEqual(len(query.query_parts), 0)
|
||||||
self.assertEqual(query.getQuery(), query_text)
|
self.assertEqual(query.getQuery(), query_text)
|
||||||
self.assertEqual(query.timeout_limit, None)
|
self.assertIsNone(query.timeout_limit)
|
||||||
self.assertFalse(query.specific)
|
self.assertFalse(query.specific)
|
||||||
|
|
||||||
def test_timeout_autocomplete(self):
|
def test_timeout_autocomplete(self):
|
||||||
|
@ -193,7 +177,7 @@ class TestTimeoutParser(SearxTestCase): # pylint:disable=missing-class-docstrin
|
||||||
self.assertEqual(query.getFullQuery(), query_text)
|
self.assertEqual(query.getFullQuery(), query_text)
|
||||||
self.assertEqual(len(query.query_parts), 0)
|
self.assertEqual(len(query.query_parts), 0)
|
||||||
self.assertEqual(query.getQuery(), query_text)
|
self.assertEqual(query.getQuery(), query_text)
|
||||||
self.assertEqual(query.timeout_limit, None)
|
self.assertIsNone(query.timeout_limit)
|
||||||
self.assertFalse(query.specific)
|
self.assertFalse(query.specific)
|
||||||
self.assertEqual(query.autocomplete_list, ['<3', '<850'])
|
self.assertEqual(query.autocomplete_list, ['<3', '<850'])
|
||||||
|
|
||||||
|
@ -212,7 +196,7 @@ class TestExternalBangParser(SearxTestCase): # pylint:disable=missing-class-doc
|
||||||
query = RawTextQuery(query_text, [])
|
query = RawTextQuery(query_text, [])
|
||||||
|
|
||||||
self.assertEqual(query.getFullQuery(), query_text)
|
self.assertEqual(query.getFullQuery(), query_text)
|
||||||
self.assertEqual(query.external_bang, None)
|
self.assertIsNone(query.external_bang)
|
||||||
self.assertFalse(query.specific)
|
self.assertFalse(query.specific)
|
||||||
|
|
||||||
def test_external_bang_autocomplete(self):
|
def test_external_bang_autocomplete(self):
|
||||||
|
@ -239,23 +223,22 @@ class TestBang(SearxTestCase): # pylint:disable=missing-class-docstring
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
load_engines([])
|
load_engines([])
|
||||||
|
|
||||||
def test_bang(self):
|
@parameterized.expand(SPECIFIC_BANGS)
|
||||||
|
def test_bang(self, bang: str):
|
||||||
|
with self.subTest(msg="Check bang", bang=bang):
|
||||||
|
query_text = TestBang.THE_QUERY + ' ' + bang
|
||||||
|
query = RawTextQuery(query_text, [])
|
||||||
|
|
||||||
for bang in TestBang.SPECIFIC_BANGS:
|
self.assertEqual(query.getFullQuery(), bang + ' ' + TestBang.THE_QUERY)
|
||||||
with self.subTest(msg="Check bang", bang=bang):
|
self.assertEqual(query.query_parts, [bang])
|
||||||
query_text = TestBang.THE_QUERY + ' ' + bang
|
self.assertEqual(query.user_query_parts, TestBang.THE_QUERY.split(' '))
|
||||||
query = RawTextQuery(query_text, [])
|
|
||||||
|
|
||||||
self.assertEqual(query.getFullQuery(), bang + ' ' + TestBang.THE_QUERY)
|
@parameterized.expand(SPECIFIC_BANGS)
|
||||||
self.assertEqual(query.query_parts, [bang])
|
def test_specific(self, bang: str):
|
||||||
self.assertEqual(query.user_query_parts, TestBang.THE_QUERY.split(' '))
|
with self.subTest(msg="Check bang is specific", bang=bang):
|
||||||
|
query_text = TestBang.THE_QUERY + ' ' + bang
|
||||||
def test_specific(self):
|
query = RawTextQuery(query_text, [])
|
||||||
for bang in TestBang.SPECIFIC_BANGS:
|
self.assertTrue(query.specific)
|
||||||
with self.subTest(msg="Check bang is specific", bang=bang):
|
|
||||||
query_text = TestBang.THE_QUERY + ' ' + bang
|
|
||||||
query = RawTextQuery(query_text, [])
|
|
||||||
self.assertTrue(query.specific)
|
|
||||||
|
|
||||||
def test_bang_not_found(self):
|
def test_bang_not_found(self):
|
||||||
query = RawTextQuery('the query !bang_not_found', [])
|
query = RawTextQuery('the query !bang_not_found', [])
|
||||||
|
|
|
@ -110,7 +110,7 @@ class SearchTestCase(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||||
search.search()
|
search.search()
|
||||||
self.assertEqual(search.actual_timeout, 10.0)
|
self.assertEqual(search.actual_timeout, 10.0)
|
||||||
|
|
||||||
def test_external_bang(self):
|
def test_external_bang_valid(self):
|
||||||
search_query = SearchQuery(
|
search_query = SearchQuery(
|
||||||
'yes yes',
|
'yes yes',
|
||||||
[EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
[EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
||||||
|
@ -124,8 +124,9 @@ class SearchTestCase(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||||
search = searx.search.Search(search_query)
|
search = searx.search.Search(search_query)
|
||||||
results = search.search()
|
results = search.search()
|
||||||
# For checking if the user redirected with the youtube external bang
|
# For checking if the user redirected with the youtube external bang
|
||||||
self.assertTrue(results.redirect_url is not None)
|
self.assertIsNotNone(results.redirect_url)
|
||||||
|
|
||||||
|
def test_external_bang_none(self):
|
||||||
search_query = SearchQuery(
|
search_query = SearchQuery(
|
||||||
'youtube never gonna give you up',
|
'youtube never gonna give you up',
|
||||||
[EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
[EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
||||||
|
@ -140,4 +141,4 @@ class SearchTestCase(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||||
with self.app.test_request_context('/search'):
|
with self.app.test_request_context('/search'):
|
||||||
results = search.search()
|
results = search.search()
|
||||||
# This should not redirect
|
# This should not redirect
|
||||||
self.assertTrue(results.redirect_url is None)
|
self.assertIsNone(results.redirect_url)
|
||||||
|
|
|
@ -6,6 +6,8 @@ from pathlib import Path
|
||||||
import os
|
import os
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from parameterized import parameterized
|
||||||
|
|
||||||
from searx.exceptions import SearxSettingsException
|
from searx.exceptions import SearxSettingsException
|
||||||
from searx import settings_loader
|
from searx import settings_loader
|
||||||
from tests import SearxTestCase
|
from tests import SearxTestCase
|
||||||
|
@ -31,13 +33,13 @@ class TestDefaultSettings(SearxTestCase): # pylint: disable=missing-class-docst
|
||||||
settings, msg = settings_loader.load_settings(load_user_settings=False)
|
settings, msg = settings_loader.load_settings(load_user_settings=False)
|
||||||
self.assertTrue(msg.startswith('load the default settings from'))
|
self.assertTrue(msg.startswith('load the default settings from'))
|
||||||
self.assertFalse(settings['general']['debug'])
|
self.assertFalse(settings['general']['debug'])
|
||||||
self.assertTrue(isinstance(settings['general']['instance_name'], str))
|
self.assertIsInstance(settings['general']['instance_name'], str)
|
||||||
self.assertEqual(settings['server']['secret_key'], "ultrasecretkey")
|
self.assertEqual(settings['server']['secret_key'], "ultrasecretkey")
|
||||||
self.assertTrue(isinstance(settings['server']['port'], int))
|
self.assertIsInstance(settings['server']['port'], int)
|
||||||
self.assertTrue(isinstance(settings['server']['bind_address'], str))
|
self.assertIsInstance(settings['server']['bind_address'], str)
|
||||||
self.assertTrue(isinstance(settings['engines'], list))
|
self.assertIsInstance(settings['engines'], list)
|
||||||
self.assertTrue(isinstance(settings['doi_resolvers'], dict))
|
self.assertIsInstance(settings['doi_resolvers'], dict)
|
||||||
self.assertTrue(isinstance(settings['default_doi_resolver'], str))
|
self.assertIsInstance(settings['default_doi_resolver'], str)
|
||||||
|
|
||||||
|
|
||||||
class TestUserSettings(SearxTestCase): # pylint: disable=missing-class-docstring
|
class TestUserSettings(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||||
|
@ -50,11 +52,14 @@ class TestUserSettings(SearxTestCase): # pylint: disable=missing-class-docstrin
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
self.assertFalse(settings_loader.is_use_default_settings({'use_default_settings': 0}))
|
self.assertFalse(settings_loader.is_use_default_settings({'use_default_settings': 0}))
|
||||||
|
|
||||||
def test_user_settings_not_found(self):
|
@parameterized.expand(
|
||||||
with patch.dict(os.environ, {'SEARXNG_SETTINGS_PATH': _settings("not_exists.yml")}):
|
[
|
||||||
with self.assertRaises(EnvironmentError):
|
_settings("not_exists.yml"),
|
||||||
_s, _m = settings_loader.load_settings()
|
"/folder/not/exists",
|
||||||
with patch.dict(os.environ, {'SEARXNG_SETTINGS_PATH': "/folder/not/exists"}):
|
]
|
||||||
|
)
|
||||||
|
def test_user_settings_not_found(self, path: str):
|
||||||
|
with patch.dict(os.environ, {'SEARXNG_SETTINGS_PATH': path}):
|
||||||
with self.assertRaises(EnvironmentError):
|
with self.assertRaises(EnvironmentError):
|
||||||
_s, _m = settings_loader.load_settings()
|
_s, _m = settings_loader.load_settings()
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
|
|
||||||
import lxml.etree
|
import lxml.etree
|
||||||
from lxml import html
|
from lxml import html
|
||||||
|
from parameterized.parameterized import parameterized
|
||||||
|
|
||||||
from searx.exceptions import SearxXPathSyntaxException, SearxEngineXPathException
|
from searx.exceptions import SearxXPathSyntaxException, SearxEngineXPathException
|
||||||
from searx import utils
|
from searx import utils
|
||||||
|
@ -66,9 +67,15 @@ class TestUtils(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||||
self.assertEqual(utils.extract_text(dom.xpath('boolean(//span)')), 'True')
|
self.assertEqual(utils.extract_text(dom.xpath('boolean(//span)')), 'True')
|
||||||
self.assertEqual(utils.extract_text(dom.xpath('//img/@src')), 'test.jpg')
|
self.assertEqual(utils.extract_text(dom.xpath('//img/@src')), 'test.jpg')
|
||||||
self.assertEqual(utils.extract_text(dom.xpath('//unexistingtag')), '')
|
self.assertEqual(utils.extract_text(dom.xpath('//unexistingtag')), '')
|
||||||
|
|
||||||
|
def test_extract_text_allow_none(self):
|
||||||
self.assertEqual(utils.extract_text(None, allow_none=True), None)
|
self.assertEqual(utils.extract_text(None, allow_none=True), None)
|
||||||
|
|
||||||
|
def test_extract_text_error_none(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
utils.extract_text(None)
|
utils.extract_text(None)
|
||||||
|
|
||||||
|
def test_extract_text_error_empty(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
utils.extract_text({})
|
utils.extract_text({})
|
||||||
|
|
||||||
|
@ -103,14 +110,16 @@ class TestHTMLTextExtractor(SearxTestCase): # pylint: disable=missing-class-doc
|
||||||
def test__init__(self):
|
def test__init__(self):
|
||||||
self.assertEqual(self.html_text_extractor.result, [])
|
self.assertEqual(self.html_text_extractor.result, [])
|
||||||
|
|
||||||
def test_handle_charref(self):
|
@parameterized.expand(
|
||||||
self.html_text_extractor.handle_charref('xF')
|
[
|
||||||
self.assertIn('\x0f', self.html_text_extractor.result)
|
('xF', '\x0f'),
|
||||||
self.html_text_extractor.handle_charref('XF')
|
('XF', '\x0f'),
|
||||||
self.assertIn('\x0f', self.html_text_extractor.result)
|
('97', 'a'),
|
||||||
|
]
|
||||||
self.html_text_extractor.handle_charref('97')
|
)
|
||||||
self.assertIn('a', self.html_text_extractor.result)
|
def test_handle_charref(self, charref: str, expected: str):
|
||||||
|
self.html_text_extractor.handle_charref(charref)
|
||||||
|
self.assertIn(expected, self.html_text_extractor.result)
|
||||||
|
|
||||||
def test_handle_entityref(self):
|
def test_handle_entityref(self):
|
||||||
entity = 'test'
|
entity = 'test'
|
||||||
|
@ -191,7 +200,7 @@ class TestXPathUtils(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||||
self.assertEqual(utils.eval_xpath_getindex(doc, '//i/text()', 1, default='something'), 'something')
|
self.assertEqual(utils.eval_xpath_getindex(doc, '//i/text()', 1, default='something'), 'something')
|
||||||
|
|
||||||
# default is None
|
# default is None
|
||||||
self.assertEqual(utils.eval_xpath_getindex(doc, '//i/text()', 1, default=None), None)
|
self.assertIsNone(utils.eval_xpath_getindex(doc, '//i/text()', 1, default=None))
|
||||||
|
|
||||||
# index not found
|
# index not found
|
||||||
with self.assertRaises(SearxEngineXPathException) as context:
|
with self.assertRaises(SearxEngineXPathException) as context:
|
||||||
|
|
|
@ -2,52 +2,59 @@
|
||||||
# pylint: disable=missing-module-docstring
|
# pylint: disable=missing-module-docstring
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
|
from parameterized.parameterized import parameterized
|
||||||
from searx import webutils
|
from searx import webutils
|
||||||
from tests import SearxTestCase
|
from tests import SearxTestCase
|
||||||
|
|
||||||
|
|
||||||
class TestWebUtils(SearxTestCase): # pylint: disable=missing-class-docstring
|
class TestWebUtils(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||||
def test_prettify_url(self):
|
|
||||||
data = (
|
@parameterized.expand(
|
||||||
|
[
|
||||||
('https://searx.me/', 'https://searx.me/'),
|
('https://searx.me/', 'https://searx.me/'),
|
||||||
('https://searx.me/ű', 'https://searx.me/ű'),
|
('https://searx.me/ű', 'https://searx.me/ű'),
|
||||||
('https://searx.me/' + (100 * 'a'), 'https://searx.me/[...]aaaaaaaaaaaaaaaaa'),
|
('https://searx.me/' + (100 * 'a'), 'https://searx.me/[...]aaaaaaaaaaaaaaaaa'),
|
||||||
('https://searx.me/' + (100 * 'ű'), 'https://searx.me/[...]űűűűűűűűűűűűűűűűű'),
|
('https://searx.me/' + (100 * 'ű'), 'https://searx.me/[...]űűűűűűűűűűűűűűűűű'),
|
||||||
)
|
]
|
||||||
|
)
|
||||||
|
def test_prettify_url(self, test_url: str, expected: str):
|
||||||
|
self.assertEqual(webutils.prettify_url(test_url, max_length=32), expected)
|
||||||
|
|
||||||
for test_url, expected in data:
|
@parameterized.expand(
|
||||||
self.assertEqual(webutils.prettify_url(test_url, max_length=32), expected)
|
[
|
||||||
|
(0, None, None),
|
||||||
|
(None, None, None),
|
||||||
|
('', None, None),
|
||||||
|
(False, None, None),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_highlight_content_none(self, content, query, expected):
|
||||||
|
self.assertEqual(webutils.highlight_content(content, query), expected)
|
||||||
|
|
||||||
def test_highlight_content(self):
|
def test_highlight_content_same(self):
|
||||||
self.assertEqual(webutils.highlight_content(0, None), None)
|
content = '<html></html>not<'
|
||||||
self.assertEqual(webutils.highlight_content(None, None), None)
|
self.assertEqual(webutils.highlight_content(content, None), content)
|
||||||
self.assertEqual(webutils.highlight_content('', None), None)
|
|
||||||
self.assertEqual(webutils.highlight_content(False, None), None)
|
|
||||||
|
|
||||||
contents = ['<html></html>not<']
|
@parameterized.expand(
|
||||||
for content in contents:
|
[
|
||||||
self.assertEqual(webutils.highlight_content(content, None), content)
|
('test', 'a', 'a'),
|
||||||
|
('a test', 'a', '<span class="highlight">a</span>'),
|
||||||
content = 'a'
|
|
||||||
query = 'test'
|
|
||||||
self.assertEqual(webutils.highlight_content(content, query), 'a')
|
|
||||||
query = 'a test'
|
|
||||||
self.assertEqual(webutils.highlight_content(content, query), '<span class="highlight">a</span>')
|
|
||||||
|
|
||||||
# pylint: disable=line-too-long
|
|
||||||
data = (
|
|
||||||
('" test "', 'a test string', 'a <span class="highlight">test</span> string'),
|
('" test "', 'a test string', 'a <span class="highlight">test</span> string'),
|
||||||
('"a"', 'this is a test string', 'this is <span class="highlight">a</span> test string'),
|
('"a"', 'this is a test string', 'this is <span class="highlight">a</span> test string'),
|
||||||
(
|
(
|
||||||
'a test',
|
'a test',
|
||||||
'this is a test string that matches entire query',
|
'this is a test string that matches entire query',
|
||||||
'this is <span class="highlight">a</span> <span class="highlight">test</span> string that matches entire query',
|
'this is <span class="highlight">a</span>'
|
||||||
|
' <span class="highlight">test</span>'
|
||||||
|
' string that matches entire query',
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
'this a test',
|
'this a test',
|
||||||
'this is a string to test.',
|
'this is a string to test.',
|
||||||
(
|
(
|
||||||
'<span class="highlight">this</span> is <span class="highlight">a</span> string to <span class="highlight">test</span>.'
|
'<span class="highlight">this</span>'
|
||||||
|
' is <span class="highlight">a</span>'
|
||||||
|
' string to <span class="highlight">test</span>.'
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
|
@ -65,9 +72,10 @@ class TestWebUtils(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||||
'a string with class.',
|
'a string with class.',
|
||||||
'<span class="highlight">a</span> string with <span class="highlight">class</span>.',
|
'<span class="highlight">a</span> string with <span class="highlight">class</span>.',
|
||||||
),
|
),
|
||||||
)
|
]
|
||||||
for query, content, expected in data:
|
)
|
||||||
self.assertEqual(webutils.highlight_content(content, query), expected)
|
def test_highlight_content_equal(self, query: str, content: str, expected: str):
|
||||||
|
self.assertEqual(webutils.highlight_content(content, query), expected)
|
||||||
|
|
||||||
|
|
||||||
class TestUnicodeWriter(SearxTestCase): # pylint: disable=missing-class-docstring
|
class TestUnicodeWriter(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||||
|
@ -76,7 +84,7 @@ class TestUnicodeWriter(SearxTestCase): # pylint: disable=missing-class-docstri
|
||||||
|
|
||||||
def test_write_row(self):
|
def test_write_row(self):
|
||||||
row = [1, 2, 3]
|
row = [1, 2, 3]
|
||||||
self.assertEqual(self.unicode_writer.writerow(row), None)
|
self.assertIsNone(self.unicode_writer.writerow(row))
|
||||||
|
|
||||||
def test_write_rows(self):
|
def test_write_rows(self):
|
||||||
self.unicode_writer.writerow = mock.MagicMock()
|
self.unicode_writer.writerow = mock.MagicMock()
|
||||||
|
@ -86,13 +94,18 @@ class TestUnicodeWriter(SearxTestCase): # pylint: disable=missing-class-docstri
|
||||||
|
|
||||||
|
|
||||||
class TestNewHmac(SearxTestCase): # pylint: disable=missing-class-docstring
|
class TestNewHmac(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||||
def test_bytes(self):
|
@parameterized.expand(
|
||||||
|
[
|
||||||
|
b'secret',
|
||||||
|
1,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_attribute_error(self, secret_key):
|
||||||
data = b'http://example.com'
|
data = b'http://example.com'
|
||||||
with self.assertRaises(AttributeError):
|
with self.assertRaises(AttributeError):
|
||||||
webutils.new_hmac(b'secret', data)
|
webutils.new_hmac(secret_key, data)
|
||||||
|
|
||||||
with self.assertRaises(AttributeError):
|
|
||||||
webutils.new_hmac(1, data)
|
|
||||||
|
|
||||||
|
def test_bytes(self):
|
||||||
|
data = b'http://example.com'
|
||||||
res = webutils.new_hmac('secret', data)
|
res = webutils.new_hmac('secret', data)
|
||||||
self.assertEqual(res, '23e2baa2404012a5cc8e4a18b4aabf0dde4cb9b56f679ddc0fd6d7c24339d819')
|
self.assertEqual(res, '23e2baa2404012a5cc8e4a18b4aabf0dde4cb9b56f679ddc0fd6d7c24339d819')
|
||||||
|
|
Loading…
Reference in New Issue