[mod] pylint all files with one profile / drop PYLINT_SEARXNG_DISABLE_OPTION

In the past, some files were tested with the standard profile, others with a
profile in which most of the messages were switched off ... some files were not
checked at all.

- ``PYLINT_SEARXNG_DISABLE_OPTION`` has been abolished
- the distinction ``# lint: pylint`` is no longer necessary
- the pylint tasks have been reduced from three to two

  1. ./searx/engines -> lint engines with additional builtins
  2. ./searx ./searxng_extra ./tests -> lint all other python files

Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
This commit is contained in:
Markus Heiser 2024-03-11 14:06:26 +01:00 committed by Markus Heiser
parent 8205f170ff
commit 542f7d0d7b
118 changed files with 261 additions and 369 deletions

View File

@ -27,7 +27,7 @@ ignore-patterns=
#init-hook=
# Use multiple processes to speed up Pylint.
jobs=1
jobs=0
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: AGPL-3.0-or-later
import sys, os

View File

@ -1,25 +0,0 @@
categories = ['general'] # optional
def request(query, params):
'''pre-request callback
params<dict>:
method : POST/GET
headers : {}
data : {} # if method == POST
url : ''
category: 'search category'
pageno : 1 # number of the requested page
'''
params['url'] = 'https://host/%s' % query
return params
def response(resp):
'''post-response callback
resp: requests response object
'''
return [{'url': '', 'title': '', 'content': ''}]

21
manage
View File

@ -52,23 +52,6 @@ if [ -S "${_dev_redis_sock}" ] && [ -z "${SEARXNG_REDIS_URL}" ]; then
export SEARXNG_REDIS_URL="unix://${_dev_redis_sock}?db=0"
fi
pylint.FILES() {
# List files tagged by comment:
#
# # lint: pylint
#
# These py files are linted by test.pylint()
grep -l -r --include \*.py '^#[[:blank:]]*lint:[[:blank:]]*pylint' searx searxng_extra tests
find . -name searxng.msg
}
PYLINT_FILES=()
while IFS= read -r line; do
PYLINT_FILES+=("$line")
done <<< "$(pylint.FILES)"
YAMLLINT_FILES=()
while IFS= read -r line; do
YAMLLINT_FILES+=("$line")
@ -78,10 +61,6 @@ RST_FILES=(
'README.rst'
)
PYLINT_SEARXNG_DISABLE_OPTION="\
I,C,R,\
W0105,W0212,W0511,W0603,W0613,W0621,W0702,W0703,W1401,\
E1136"
help() {
nvm.help
cat <<EOF

View File

@ -1,6 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pylint: disable=missing-module-docstring
# pylint: disable=missing-module-docstring, cyclic-import
import sys
import os

View File

@ -1,25 +1,30 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring
import sys
from os import listdir
from os.path import realpath, dirname, join, isdir
from searx.utils import load_module
from collections import defaultdict
from searx.utils import load_module
answerers_dir = dirname(realpath(__file__))
def load_answerers():
answerers = []
answerers = [] # pylint: disable=redefined-outer-name
for filename in listdir(answerers_dir):
if not isdir(join(answerers_dir, filename)) or filename.startswith('_'):
continue
module = load_module('answerer.py', join(answerers_dir, filename))
if not hasattr(module, 'keywords') or not isinstance(module.keywords, tuple) or not len(module.keywords):
exit(2)
if not hasattr(module, 'keywords') or not isinstance(module.keywords, tuple) or not module.keywords:
sys.exit(2)
answerers.append(module)
return answerers
def get_answerers_by_keywords(answerers):
def get_answerers_by_keywords(answerers): # pylint:disable=redefined-outer-name
by_keyword = defaultdict(list)
for answerer in answerers:
for keyword in answerer.keywords:

View File

@ -1,3 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
import hashlib
import random
import string

View File

@ -1,3 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
from functools import reduce
from operator import mul

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""This module implements functions needed for the autocompleter.
"""

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""This module implements the :origin:`searxng_msg <babel.cfg>` extractor to
extract messages from:

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
""".. _botdetection src:
Implementations used for bot detection.

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pylint: disable=missing-module-docstring, invalid-name
from __future__ import annotations

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""Configuration class :py:class:`Config` with deep-update, schema validation
and deprecated names.

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""
Method ``http_accept``
----------------------

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""
Method ``http_accept_encoding``
-------------------------------

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""
Method ``http_accept_language``
-------------------------------

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""
Method ``http_connection``
--------------------------

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""
Method ``http_user_agent``
--------------------------

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
""".. _botdetection.ip_limit:
Method ``ip_limit``

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
""".. _botdetection.ip_lists:
Method ``ip_lists``

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""
Method ``link_token``
---------------------

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""This module holds the *data* created by::
make data.all

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""Implementations of the framework for the SearXNG engines.
.. hint::

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""Engine's traits are fetched from the origin engines and stored in a JSON file
in the *data folder*. Most often traits are languages and region codes and
their mapping from SearXNG's representation to the representation in the origin
@ -167,7 +166,7 @@ class EngineTraits:
# - name: google italian
# engine: google
# language: it
# region: it-IT
# region: it-IT # type: ignore
traits = self.copy()

View File

@ -37,16 +37,6 @@ iframe_src = "https://bandcamp.com/EmbeddedPlayer/{type}={result_id}/size=large/
def request(query, params):
'''pre-request callback
params<dict>:
method : POST/GET
headers : {}
data : {} # if method == POST
url : ''
category: 'search category'
pageno : 1 # number of the requested page
'''
search_path = search_string.format(query=urlencode({'q': query}), page=params['pageno'])
params['url'] = base_url + search_path
@ -54,10 +44,7 @@ def request(query, params):
def response(resp):
'''post-response callback
resp: requests response object
'''
results = []
dom = html.fromstring(resp.text)

View File

@ -1,6 +1,6 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
"""
Duden
"""Duden
"""
import re
@ -29,15 +29,6 @@ search_url = base_url + 'suchen/dudenonline/{query}?search_api_fulltext=&page={o
def request(query, params):
'''pre-request callback
params<dict>:
method : POST/GET
headers : {}
data : {} # if method == POST
url : ''
category: 'search category'
pageno : 1 # number of the requested page
'''
offset = params['pageno'] - 1
if offset == 0:
@ -53,9 +44,6 @@ def request(query, params):
def response(resp):
'''post-response callback
resp: requests response object
'''
results = []
if resp.status_code == 404:

View File

@ -120,8 +120,7 @@ def _get_request_id(query, params):
l = locales.get_locale(params['searxng_locale'])
# Presearch narrows down the search by region. In SearXNG when the user
# does not set a region (e.g. 'en-CA' / canada) we cannot hand over a
# region.
# does not set a region (e.g. 'en-CA' / canada) we cannot hand over a region.
# We could possibly use searx.locales.get_official_locales to determine
# in which regions this language is an official one, but then we still

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""Exception types raised by SearXNG modules.
"""

View File

@ -1,4 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring
from urllib.parse import quote_plus, urlparse
from searx.data import EXTERNAL_BANGS
@ -53,7 +54,7 @@ def resolve_bang_definition(bang_definition, query):
return (url, rank)
def get_bang_definition_and_autocomplete(bang, external_bangs_db=None):
def get_bang_definition_and_autocomplete(bang, external_bangs_db=None): # pylint: disable=invalid-name
if external_bangs_db is None:
external_bangs_db = EXTERNAL_BANGS

View File

@ -1,3 +1,6 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring
import math
from searx.data import EXTERNAL_URLS
@ -46,8 +49,7 @@ def get_external_url(url_id, item_id, alternative="default"):
if url_template is not None:
if item_id is not None:
return url_template.replace('$1', item_id)
else:
return url_template
return url_template
return None

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pylint: disable=missing-module-docstring
from urllib.parse import urlparse

View File

@ -1,6 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pyright: basic
"""Render SearXNG instance documentation.
Usage in a Flask app route:

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""Bot protection / IP rate limitation. The intention of rate limitation is to
limit suspicious requests from an IP. The motivation behind this is the fact
that SearXNG passes through requests from bots and is thus classified as a bot

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""
SearXNGs locale data
=====================

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pylint: disable=missing-module-docstring
import typing

View File

@ -1,3 +1,6 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring, invalid-name
import typing
import inspect
from json import JSONDecodeError
@ -16,7 +19,7 @@ from searx.engines import engines
errors_per_engines = {}
class ErrorContext:
class ErrorContext: # pylint: disable=missing-class-docstring
__slots__ = (
'filename',
@ -29,7 +32,9 @@ class ErrorContext:
'secondary',
)
def __init__(self, filename, function, line_no, code, exception_classname, log_message, log_parameters, secondary):
def __init__( # pylint: disable=too-many-arguments
self, filename, function, line_no, code, exception_classname, log_message, log_parameters, secondary
):
self.filename = filename
self.function = function
self.line_no = line_no
@ -39,7 +44,7 @@ class ErrorContext:
self.log_parameters = log_parameters
self.secondary = secondary
def __eq__(self, o) -> bool:
def __eq__(self, o) -> bool: # pylint: disable=invalid-name
if not isinstance(o, ErrorContext):
return False
return (
@ -109,7 +114,7 @@ def get_request_exception_messages(
status_code = None
reason = None
hostname = None
if hasattr(exc, '_request') and exc._request is not None:
if hasattr(exc, '_request') and exc._request is not None: # pylint: disable=protected-access
# exc.request is property that raise an RuntimeException
# if exc._request is not defined.
url = exc.request.url
@ -123,7 +128,7 @@ def get_request_exception_messages(
return (status_code, reason, hostname)
def get_messages(exc, filename) -> typing.Tuple:
def get_messages(exc, filename) -> typing.Tuple: # pylint: disable=too-many-return-statements
if isinstance(exc, JSONDecodeError):
return (exc.msg,)
if isinstance(exc, TypeError):

View File

@ -1,4 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring
import decimal
import threading
@ -11,7 +12,7 @@ __all__ = ["Histogram", "HistogramStorage", "CounterStorage"]
logger = logger.getChild('searx.metrics')
class Histogram:
class Histogram: # pylint: disable=missing-class-docstring
_slots__ = '_lock', '_size', '_sum', '_quartiles', '_count', '_width'
@ -25,11 +26,11 @@ class Histogram:
def observe(self, value):
q = int(value / self._width)
if q < 0:
"""Value below zero is ignored"""
if q < 0: # pylint: disable=consider-using-max-builtin
# Value below zero is ignored
q = 0
if q >= self._size:
"""Value above the maximum is replaced by the maximum"""
# Value above the maximum is replaced by the maximum
q = self._size - 1
with self._lock:
self._quartiles[q] += 1
@ -53,8 +54,7 @@ class Histogram:
with self._lock:
if self._count != 0:
return self._sum / self._count
else:
return 0
return 0
@property
def quartile_percentage(self):
@ -62,8 +62,7 @@ class Histogram:
with self._lock:
if self._count > 0:
return [int(q * 100 / self._count) for q in self._quartiles]
else:
return self._quartiles
return self._quartiles
@property
def quartile_percentage_map(self):
@ -75,7 +74,7 @@ class Histogram:
with self._lock:
if self._count > 0:
for y in self._quartiles:
yp = int(y * 100 / self._count)
yp = int(y * 100 / self._count) # pylint: disable=invalid-name
if yp != 0:
result[round(float(x), width_exponent)] = yp
x += width
@ -100,7 +99,7 @@ class Histogram:
return "Histogram<avg: " + str(self.average) + ", count: " + str(self._count) + ">"
class HistogramStorage:
class HistogramStorage: # pylint: disable=missing-class-docstring
__slots__ = 'measures', 'histogram_class'
@ -121,12 +120,12 @@ class HistogramStorage:
def dump(self):
logger.debug("Histograms:")
ks = sorted(self.measures.keys(), key='/'.join)
ks = sorted(self.measures.keys(), key='/'.join) # pylint: disable=invalid-name
for k in ks:
logger.debug("- %-60s %s", '|'.join(k), self.measures[k])
class CounterStorage:
class CounterStorage: # pylint: disable=missing-class-docstring
__slots__ = 'counters', 'lock'
@ -151,17 +150,17 @@ class CounterStorage:
def dump(self):
with self.lock:
ks = sorted(self.counters.keys(), key='/'.join)
ks = sorted(self.counters.keys(), key='/'.join) # pylint: disable=invalid-name
logger.debug("Counters:")
for k in ks:
logger.debug("- %-60s %s", '|'.join(k), self.counters[k])
class VoidHistogram(Histogram):
class VoidHistogram(Histogram): # pylint: disable=missing-class-docstring
def observe(self, value):
pass
class VoidCounterStorage(CounterStorage):
class VoidCounterStorage(CounterStorage): # pylint: disable=missing-class-docstring
def add(self, value, *args):
pass

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pylint: disable=missing-module-docstring, global-statement
import asyncio

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pylint: disable=missing-module-docstring, global-statement
import asyncio

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pylint: disable=global-statement
# pylint: disable=missing-module-docstring, missing-class-docstring

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""Raise exception for an HTTP response is an error.
"""

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pylint: disable=missing-module-docstring, missing-class-docstring
import sys

View File

@ -1,6 +1,5 @@
'''
SPDX-License-Identifier: AGPL-3.0-or-later
'''
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring
from hashlib import md5
from searx.data import ahmia_blacklist_loader
@ -13,14 +12,14 @@ preference_section = 'onions'
ahmia_blacklist = None
def on_result(request, search, result):
def on_result(_request, _search, result):
if not result.get('is_onion') or not result.get('parsed_url'):
return True
result_hash = md5(result['parsed_url'].hostname.encode()).hexdigest()
return result_hash not in ahmia_blacklist
def init(app, settings):
def init(_app, settings):
global ahmia_blacklist # pylint: disable=global-statement
if not settings['outgoing']['using_tor_proxy']:
# disable the plugin

View File

@ -1,25 +1,11 @@
'''
searx is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring
searx is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with searx. If not, see < http://www.gnu.org/licenses/ >.
(C) 2015 by Adam Tauber, <asciimoo@gmail.com>
(C) 2018, 2020 by Vaclav Zouzalik
'''
from flask_babel import gettext
import hashlib
import re
from flask_babel import gettext
name = "Hash plugin"
description = gettext("Converts strings to different hash digests.")
default_on = True
@ -30,7 +16,7 @@ query_examples = 'sha512 The quick brown fox jumps over the lazy dog'
parser_re = re.compile('(md5|sha1|sha224|sha256|sha384|sha512) (.*)', re.I)
def post_search(request, search):
def post_search(_request, search):
# process only on first page
if search.search_query.pageno > 1:
return True
@ -40,7 +26,7 @@ def post_search(request, search):
return True
function, string = m.groups()
if string.strip().__len__() == 0:
if not string.strip():
# end if the string is empty
return True

View File

@ -1,10 +1,13 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring
import re
from urllib.parse import urlunparse, urlparse
from flask_babel import gettext
from searx import settings
from searx.plugins import logger
from flask_babel import gettext
name = gettext('Hostname replace')
description = gettext('Rewrite result hostnames or remove results based on the hostname')
@ -20,7 +23,7 @@ parsed = 'parsed_url'
_url_fields = ['iframe_src', 'audio_src']
def on_result(request, search, result):
def on_result(_request, _search, result):
for pattern, replacement in replacements.items():

View File

@ -1,8 +1,11 @@
from urllib.parse import urlparse, parse_qsl
from flask_babel import gettext
import re
from searx import settings
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring
import re
from urllib.parse import urlparse, parse_qsl
from flask_babel import gettext
from searx import settings
regex = re.compile(r'10\.\d{4,9}/[^\s]+')
@ -31,7 +34,7 @@ def get_doi_resolver(preferences):
return doi_resolvers[selected_resolver]
def on_result(request, search, result):
def on_result(request, _search, result):
if 'parsed_url' not in result:
return True

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pylint: disable=missing-module-docstring,invalid-name
import re

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""A plugin to check if the ip address of the request is a Tor exit-node if the
user searches for ``tor-check``. It fetches the tor exit node list from
https://check.torproject.org/exit-addresses and parses all the IPs into a list,

View File

@ -1,24 +1,11 @@
'''
searx is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring
searx is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with searx. If not, see < http://www.gnu.org/licenses/ >.
(C) 2015 by Adam Tauber, <asciimoo@gmail.com>
'''
from flask_babel import gettext
import re
from urllib.parse import urlunparse, parse_qsl, urlencode
from flask_babel import gettext
regexes = {
re.compile(r'utm_[^&]+'),
re.compile(r'(wkey|wemail)[^&]*'),
@ -32,7 +19,7 @@ default_on = True
preference_section = 'privacy'
def on_result(request, search, result):
def on_result(_request, _search, result):
if 'parsed_url' not in result:
return True

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""Searx preferences implementation.
"""

View File

@ -1,4 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=invalid-name, missing-module-docstring, missing-class-docstring
from abc import abstractmethod, ABC
import re
@ -191,7 +192,7 @@ class BangParser(QueryPartParser):
def _parse(self, value):
# check if prefix is equal with engine shortcut
if value in engine_shortcuts:
if value in engine_shortcuts: # pylint: disable=consider-using-get
value = engine_shortcuts[value]
# check if prefix is equal with engine name

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""Implementation of the redis client (redis-py_).
.. _redis-py: https://github.com/redis/redis-py

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""A collection of convenient functions and redis/lua scripts.
This code was partial inspired by the `Bullet-Proofing Lua Scripts in RedisPy`_

View File

@ -1,3 +1,6 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring
import re
from collections import defaultdict
from operator import itemgetter
@ -19,8 +22,7 @@ WHITESPACE_REGEX = re.compile('( |\t|\n)+', re.M | re.U)
def result_content_len(content):
if isinstance(content, str):
return len(CONTENT_LEN_IGNORED_CHARS_REGEX.sub('', content))
else:
return 0
return 0
def compare_urls(url_a, url_b):
@ -56,7 +58,7 @@ def compare_urls(url_a, url_b):
return unquote(path_a) == unquote(path_b)
def merge_two_infoboxes(infobox1, infobox2):
def merge_two_infoboxes(infobox1, infobox2): # pylint: disable=too-many-branches, too-many-statements
# get engines weights
if hasattr(engines[infobox1['engine']], 'weight'):
weight1 = engines[infobox1['engine']].weight
@ -140,13 +142,13 @@ def result_score(result):
return sum((occurrences * weight) / position for position in result['positions'])
class Timing(NamedTuple):
class Timing(NamedTuple): # pylint: disable=missing-class-docstring
engine: str
total: float
load: float
class UnresponsiveEngine(NamedTuple):
class UnresponsiveEngine(NamedTuple): # pylint: disable=missing-class-docstring
engine: str
error_type: str
suspended: bool
@ -189,7 +191,7 @@ class ResultContainer:
self.on_result = lambda _: True
self._lock = RLock()
def extend(self, engine_name, results):
def extend(self, engine_name, results): # pylint: disable=too-many-branches
if self._closed:
return
@ -314,11 +316,11 @@ class ResultContainer:
if result_template != 'images.html':
# not an image, same template, same url : it's a duplicate
return merged_result
else:
# it's an image
# it's a duplicate if the parsed_url, template and img_src are different
if result.get('img_src', '') == merged_result.get('img_src', ''):
return merged_result
# it's an image
# it's a duplicate if the parsed_url, template and img_src are different
if result.get('img_src', '') == merged_result.get('img_src', ''):
return merged_result
return None
def __merge_duplicated_http_result(self, duplicated, result, position):
@ -371,11 +373,11 @@ class ResultContainer:
categoryPositions = {}
for res in results:
# FIXME : handle more than one category per engine
# do we need to handle more than one category per engine?
engine = engines[res['engine']]
res['category'] = engine.categories[0] if len(engine.categories) > 0 else ''
# FIXME : handle more than one category per engine
# do we need to handle more than one category per engine?
category = (
res['category']
+ ':'
@ -397,7 +399,7 @@ class ResultContainer:
# update every index after the current one
# (including the current one)
for k in categoryPositions:
for k in categoryPositions: # pylint: disable=consider-using-dict-items
v = categoryPositions[k]['index']
if v >= index:
categoryPositions[k]['index'] = v + 1

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pylint: disable=missing-module-docstring, too-few-public-methods
import threading

View File

@ -1,4 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring
from .impl import Checker
from .background import initialize, get_result

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pylint: disable=missing-module-docstring
import sys

View File

@ -1,7 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pylint: disable=missing-module-docstring
# pyright: basic
# pylint: disable=missing-module-docstring, cyclic-import
import json
import time

View File

@ -1,4 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring, invalid-name
import gc
import typing
@ -149,7 +150,7 @@ def _search_query_diff(
return (common, diff)
class TestResults:
class TestResults: # pylint: disable=missing-class-docstring
__slots__ = 'errors', 'logs', 'languages'
@ -181,7 +182,7 @@ class TestResults:
yield (test_name, error)
class ResultContainerTests:
class ResultContainerTests: # pylint: disable=missing-class-docstring
__slots__ = 'test_name', 'search_query', 'result_container', 'languages', 'stop_test', 'test_results'
@ -210,7 +211,6 @@ class ResultContainerTests:
if langStr:
self.languages.add(langStr)
self.test_results.add_language(langStr)
return None
def _check_result(self, result):
if not _check_no_html(result.get('title', '')):
@ -319,7 +319,7 @@ class ResultContainerTests:
self._record_error(('{!r} not found in the title'.format(title)))
class CheckerTests:
class CheckerTests: # pylint: disable=missing-class-docstring, too-few-public-methods
__slots__ = 'test_results', 'test_name', 'result_container_tests_list'
@ -351,7 +351,7 @@ class CheckerTests:
)
class Checker:
class Checker: # pylint: disable=missing-class-docstring
__slots__ = 'processor', 'tests', 'test_results'
@ -377,7 +377,7 @@ class Checker:
p.append(l)
for kwargs in itertools.product(*p):
kwargs = {k: v for k, v in kwargs}
kwargs = dict(kwargs)
query = kwargs['query']
params = dict(kwargs)
del params['query']

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pylint: disable=missing-module-docstring
"""Lame scheduler which use Redis as a source of truth:
* the Redis key SearXNG_checker_next_call_ts contains the next time the embedded checker should run.

View File

@ -1,4 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring
import typing
import babel
@ -52,7 +53,7 @@ class SearchQuery:
external_bang: typing.Optional[str] = None,
engine_data: typing.Optional[typing.Dict[str, str]] = None,
redirect_to_first_result: typing.Optional[bool] = None,
):
): # pylint:disable=too-many-arguments
self.query = query
self.engineref_list = engineref_list
self.lang = lang

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""Implement request processors used by engine-types.

View File

@ -1,6 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""Abstract base classes for engine request processors.
"""

View File

@ -1,6 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""Processors for engine-type: ``offline``
"""

View File

@ -1,6 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""Processors for engine-type: ``online``
"""

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""Processors for engine-type: ``online_currency``
"""

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""Processors for engine-type: ``online_dictionary``
"""

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""Processors for engine-type: ``online_url_search``
"""

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""Implementation of the default settings.
"""

View File

@ -1,4 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring, too-many-branches
from typing import Optional
from os import environ

View File

@ -1,4 +1,4 @@
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: AGPL-3.0-or-later
'''List of SearXNG's locale codes.
.. hint::

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""
if setproctitle is installed.
set Unix thread name with the Python thread name

View File

@ -1,6 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pyright: basic
"""Utility functions for the engines
"""
@ -56,7 +54,7 @@ _STORAGE_UNIT_VALUE: Dict[str, int] = {
_XPATH_CACHE: Dict[str, XPath] = {}
_LANG_TO_LC_CACHE: Dict[str, Dict[str, str]] = {}
_FASTTEXT_MODEL: Optional["fasttext.FastText._FastText"] = None
_FASTTEXT_MODEL: Optional["fasttext.FastText._FastText"] = None # type: ignore
"""fasttext model to predict laguage of a search term"""
SEARCH_LANGUAGE_CODES = frozenset([searxng_locale[0].split('-')[0] for searxng_locale in sxng_locales])
@ -595,7 +593,7 @@ def eval_xpath_getindex(elements: ElementBase, xpath_spec: XPathSpecType, index:
return default
def _get_fasttext_model() -> "fasttext.FastText._FastText":
def _get_fasttext_model() -> "fasttext.FastText._FastText": # type: ignore
global _FASTTEXT_MODEL # pylint: disable=global-statement
if _FASTTEXT_MODEL is None:
import fasttext # pylint: disable=import-outside-toplevel

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pylint: disable=,missing-module-docstring,missing-class-docstring
import os
@ -108,6 +107,7 @@ if __name__ == "__main__":
if len(sys.argv) >= 2 and sys.argv[1] == "freeze":
# freeze the version (to create an archive outside a git repository)
python_code = f"""# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring
# this file is generated automatically by searx/version.py
VERSION_STRING = "{VERSION_STRING}"

View File

@ -1,3 +1,6 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring
from collections import defaultdict
from typing import Dict, List, Optional, Tuple
from searx.exceptions import SearxParameterException
@ -10,7 +13,7 @@ from searx.utils import detect_language
# remove duplicate queries.
# FIXME: does not fix "!music !soundcloud", because the categories are 'none' and 'music'
# HINT: does not fix "!music !soundcloud", because the categories are 'none' and 'music'
def deduplicate_engineref_list(engineref_list: List[EngineRef]) -> List[EngineRef]:
engineref_dict = {q.category + '|' + q.name: q for q in engineref_list}
return list(engineref_dict.values())
@ -55,7 +58,7 @@ def parse_lang(preferences: Preferences, form: Dict[str, str], raw_text_query: R
return preferences.get_value('language')
# get language
# set specific language if set on request, query or preferences
# TODO support search with multiple languages
# search with multiple languages is not supported (by most engines)
if len(raw_text_query.languages):
query_lang = raw_text_query.languages[-1]
elif 'language' in form:
@ -153,7 +156,10 @@ def get_selected_categories(preferences: Preferences, form: Optional[Dict[str, s
return selected_categories
def get_engineref_from_category_list(category_list: List[str], disabled_engines: List[str]) -> List[EngineRef]:
def get_engineref_from_category_list( # pylint: disable=invalid-name
category_list: List[str],
disabled_engines: List[str],
) -> List[EngineRef]:
result = []
for categ in category_list:
result.extend(
@ -172,7 +178,7 @@ def parse_generic(preferences: Preferences, form: Dict[str, str], disabled_engin
explicit_engine_list = False
if not is_locked('categories'):
# parse the form only if the categories are not locked
for pd_name, pd in form.items():
for pd_name, pd in form.items(): # pylint: disable=invalid-name
if pd_name == 'engines':
pd_engines = [
EngineRef(engine_name, engines[engine_name].categories[0])

View File

@ -1,7 +1,5 @@
#!/usr/bin/env python
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pyright: basic
"""WebbApp
"""

View File

@ -1,4 +1,6 @@
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring, invalid-name
from __future__ import annotations
import os
@ -108,7 +110,7 @@ class CSVWriter:
self.writerow(row)
def write_csv_response(csv: CSVWriter, rc: ResultContainer) -> None:
def write_csv_response(csv: CSVWriter, rc: ResultContainer) -> None: # pylint: disable=redefined-outer-name
"""Write rows of the results to a query (``application/csv``) into a CSV
table (:py:obj:`CSVWriter`). First line in the table contain the column
names. The column "type" specifies the type, the following types are
@ -143,7 +145,7 @@ def write_csv_response(csv: CSVWriter, rc: ResultContainer) -> None:
csv.writerow([row.get(key, '') for key in keys])
class JSONEncoder(json.JSONEncoder):
class JSONEncoder(json.JSONEncoder): # pylint: disable=missing-class-docstring
def default(self, o):
if isinstance(o, datetime):
return o.isoformat()
@ -226,8 +228,7 @@ def prettify_url(url, max_length=74):
if len(url) > max_length:
chunk_len = int(max_length / 2 + 1)
return '{0}[...]{1}'.format(url[:chunk_len], url[-chunk_len:])
else:
return url
return url
def contains_cjko(s: str) -> bool:
@ -269,8 +270,7 @@ def regex_highlight_cjk(word: str) -> str:
rword = re.escape(word)
if contains_cjko(rword):
return fr'({rword})'
else:
return fr'\b({rword})(?!\w)'
return fr'\b({rword})(?!\w)'
def highlight_content(content, query):
@ -279,7 +279,6 @@ def highlight_content(content, query):
return None
# ignoring html contents
# TODO better html content detection
if content.find('<') != -1:
return content
@ -353,8 +352,8 @@ def group_engines_in_tab(engines: Iterable[Engine]) -> List[Tuple[str, Iterable[
sorted_groups = sorted(((name, list(engines)) for name, engines in subgroups), key=group_sort_key)
ret_val = []
for groupname, engines in sorted_groups:
for groupname, _engines in sorted_groups:
group_bang = '!' + groupname.replace(' ', '_') if groupname != NO_SUBGROUPING else ''
ret_val.append((groupname, group_bang, sorted(engines, key=engine_sort_key)))
ret_val.append((groupname, group_bang, sorted(_engines, key=engine_sort_key)))
return ret_val

View File

@ -0,0 +1,2 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring

View File

@ -1,7 +1,5 @@
#!/usr/bin/env python
# lint: pylint
# SPDX-License-Identifier: AGPL-3.0-or-later
"""Script that implements some prebuild tasks needed by target docs.prebuild
"""
@ -9,10 +7,10 @@ import sys
import os.path
import time
from contextlib import contextmanager
from searx import settings, get_setting, locales
from searx.infopage import InfoPageSet, InfoPage
_doc_user = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'docs', 'user'))
@ -27,13 +25,13 @@ def main():
with infopageset_ctx as infopageset:
for _, _, page in infopageset.iter_pages('en'):
fname = os.path.join(_doc_user, os.path.basename(page.fname))
with open(fname, 'w') as f:
with open(fname, 'w', encoding='utf-8') as f:
f.write(page.content)
class OfflinePage(InfoPage):
class OfflinePage(InfoPage): # pylint: disable=missing-class-docstring
def get_ctx(self): # pylint: disable=no-self-use
def get_ctx(self):
"""Jinja context to render :py:obj:`DocPage.content` for offline purpose (no
links to SearXNG instance)"""
@ -55,7 +53,7 @@ def _instance_infosetset_ctx(base_url):
# registered in the Flask app.
settings['server']['secret_key'] = ''
from searx.webapp import app
from searx.webapp import app # pylint: disable=import-outside-toplevel
# Specify base_url so that url_for() works for base_urls. If base_url is
# specified, then these values from are given preference over any Flask's

View File

@ -1,8 +1,5 @@
#!/usr/bin/env python
# lint: pylint
# SPDX-License-Identifier: AGPL-3.0-or-later
# (C) Copyright Contributors to the SearXNG project.
"""Script to run SearXNG from terminal.
DON'T USE THIS SCRIPT!!

View File

@ -0,0 +1,2 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python
# lint: pylint
# SPDX-License-Identifier: AGPL-3.0-or-later
"""This script saves `Ahmia's blacklist`_ for onion sites.

View File

@ -1,7 +1,5 @@
#!/usr/bin/env python
# lint: pylint
# SPDX-License-Identifier: AGPL-3.0-or-later
"""Fetch currencies from :origin:`searx/engines/wikidata.py` engine.
Output file: :origin:`searx/data/currencies.json` (:origin:`CI Update data ...

View File

@ -1,7 +1,5 @@
#!/usr/bin/env python
# lint: pylint
# SPDX-License-Identifier: AGPL-3.0-or-later
"""Fetch website description from websites and from
:origin:`searx/engines/wikidata.py` engine.

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python
# lint: pylint
# SPDX-License-Identifier: AGPL-3.0-or-later
"""Update :py:obj:`searx.enginelib.traits.EngineTraitsMap` and :origin:`searx/languages.py`
@ -28,7 +27,7 @@ from searx.enginelib.traits import EngineTraitsMap
# Output files.
languages_file = Path(searx_dir) / 'sxng_locales.py'
languages_file_header = """\
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: AGPL-3.0-or-later
'''List of SearXNG's locale codes.
.. hint::

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python
# lint: pylint
# SPDX-License-Identifier: AGPL-3.0-or-later
"""Update :origin:`searx/data/external_bangs.json` using the duckduckgo bangs
from :py:obj:`BANGS_URL`.

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python
# lint: pylint
# SPDX-License-Identifier: AGPL-3.0-or-later
"""Fetch firefox useragent signatures

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python
# lint: pylint
# SPDX-License-Identifier: AGPL-3.0-or-later
"""Update locale names in :origin:`searx/data/locales.json` used by
:ref:`searx.locales`

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python
# lint: pylint
# SPDX-License-Identifier: AGPL-3.0-or-later
"""Fetch OSM keys and tags.

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python
# lint: pylint
# SPDX-License-Identifier: AGPL-3.0-or-later
"""Update pygments style

View File

@ -1,8 +1,5 @@
#!/usr/bin/env python
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pylint: disable=missing-module-docstring
"""Fetch units from :origin:`searx/engines/wikidata.py` engine.
Output file: :origin:`searx/data/wikidata_units.json` (:origin:`CI Update data

View File

@ -1,4 +1,4 @@
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: AGPL-3.0-or-later
"""Installer for SearXNG package."""
from setuptools import setup, find_packages

View File

@ -1,5 +1,7 @@
import os
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring
import os
import aiounittest
os.environ.pop('SEARX_DEBUG', None)

View File

@ -0,0 +1,2 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring, cyclic-import

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""Shared testing code."""
# pylint: disable=missing-function-docstring

View File

@ -1,5 +1,4 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pylint: disable=missing-module-docstring,missing-function-docstring
from time import sleep

View File

@ -1,3 +1,6 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring
import os
from os.path import dirname, sep, abspath

View File

@ -1,3 +1,6 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring
'''
searx is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by

View File

@ -1,4 +1,6 @@
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring
from collections import defaultdict
import mock
from searx.engines import xpath

View File

@ -0,0 +1,2 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# pylint: disable=missing-module-docstring

Some files were not shown because too many files have changed in this diff Show More