mirror of https://github.com/searxng/searxng.git
Compare commits
3 Commits
9d7f292c05
...
06b4637332
Author | SHA1 | Date |
---|---|---|
Markus Heiser | 06b4637332 | |
Bnyro | b07c0ae39f | |
Markus Heiser | 1c9b28968d |
|
@ -53,6 +53,9 @@ Probe HTTP headers
|
||||||
.. automodule:: searx.botdetection.http_user_agent
|
.. automodule:: searx.botdetection.http_user_agent
|
||||||
:members:
|
:members:
|
||||||
|
|
||||||
|
.. automodule:: searx.botdetection.sec_fetch
|
||||||
|
:members:
|
||||||
|
|
||||||
.. _botdetection config:
|
.. _botdetection config:
|
||||||
|
|
||||||
Config
|
Config
|
||||||
|
|
|
@ -31,6 +31,9 @@ def dump_request(request: flask.Request):
|
||||||
+ " || Content-Length: %s" % request.headers.get('Content-Length')
|
+ " || Content-Length: %s" % request.headers.get('Content-Length')
|
||||||
+ " || Connection: %s" % request.headers.get('Connection')
|
+ " || Connection: %s" % request.headers.get('Connection')
|
||||||
+ " || User-Agent: %s" % request.headers.get('User-Agent')
|
+ " || User-Agent: %s" % request.headers.get('User-Agent')
|
||||||
|
+ " || Sec-Fetch-Site: %s" % request.headers.get('Sec-Fetch-Site')
|
||||||
|
+ " || Sec-Fetch-Mode: %s" % request.headers.get('Sec-Fetch-Mode')
|
||||||
|
+ " || Sec-Fetch-Dest: %s" % request.headers.get('Sec-Fetch-Dest')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,59 @@
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
"""
|
||||||
|
Method ``http_sec_fetch``
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
The ``http_sec_fetch`` method protect resources from web attacks with `Fetch
|
||||||
|
Metadata`_. A request is filtered out in case of:
|
||||||
|
|
||||||
|
- http header Sec-Fetch-Mode_ is invalid
|
||||||
|
- http header Sec-Fetch-Dest_ is invalid
|
||||||
|
|
||||||
|
.. _Fetch Metadata:
|
||||||
|
https://developer.mozilla.org/en-US/docs/Glossary/Fetch_metadata_request_header
|
||||||
|
|
||||||
|
.. Sec-Fetch-Dest:
|
||||||
|
https://developer.mozilla.org/en-US/docs/Web/API/Request/destination
|
||||||
|
|
||||||
|
.. Sec-Fetch-Mode:
|
||||||
|
https://developer.mozilla.org/en-US/docs/Web/API/Request/mode
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
from ipaddress import (
|
||||||
|
IPv4Network,
|
||||||
|
IPv6Network,
|
||||||
|
)
|
||||||
|
|
||||||
|
import flask
|
||||||
|
import werkzeug
|
||||||
|
|
||||||
|
from . import config
|
||||||
|
from ._helpers import logger
|
||||||
|
|
||||||
|
|
||||||
|
def filter_request(
|
||||||
|
network: IPv4Network | IPv6Network,
|
||||||
|
request: flask.Request,
|
||||||
|
cfg: config.Config,
|
||||||
|
) -> werkzeug.Response | None:
|
||||||
|
|
||||||
|
val = request.headers.get("Sec-Fetch-Mode", "")
|
||||||
|
if val != "navigate":
|
||||||
|
logger.debug("invalid Sec-Fetch-Mode '%s'", val)
|
||||||
|
return flask.redirect(flask.url_for('index'), code=302)
|
||||||
|
|
||||||
|
val = request.headers.get("Sec-Fetch-Site", "")
|
||||||
|
if val not in ('same-origin', 'same-site', 'none'):
|
||||||
|
logger.debug("invalid Sec-Fetch-Site '%s'", val)
|
||||||
|
flask.redirect(flask.url_for('index'), code=302)
|
||||||
|
|
||||||
|
val = request.headers.get("Sec-Fetch-Dest", "")
|
||||||
|
if val != "document":
|
||||||
|
logger.debug("invalid Sec-Fetch-Dest '%s'", val)
|
||||||
|
flask.redirect(flask.url_for('index'), code=302)
|
||||||
|
|
||||||
|
return None
|
|
@ -34,10 +34,10 @@ Implementations
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import List, Dict, Any, Optional
|
from typing import List, Dict, Any, Optional
|
||||||
from urllib.parse import quote
|
from urllib.parse import urlencode
|
||||||
from lxml import html
|
from lxml import html
|
||||||
|
|
||||||
from searx.utils import extract_text, eval_xpath, eval_xpath_list
|
from searx.utils import extract_text, eval_xpath, eval_xpath_getindex, eval_xpath_list
|
||||||
from searx.enginelib.traits import EngineTraits
|
from searx.enginelib.traits import EngineTraits
|
||||||
from searx.data import ENGINE_TRAITS
|
from searx.data import ENGINE_TRAITS
|
||||||
|
|
||||||
|
@ -53,7 +53,7 @@ about: Dict[str, Any] = {
|
||||||
|
|
||||||
# engine dependent config
|
# engine dependent config
|
||||||
categories: List[str] = ["files"]
|
categories: List[str] = ["files"]
|
||||||
paging: bool = False
|
paging: bool = True
|
||||||
|
|
||||||
# search-url
|
# search-url
|
||||||
base_url: str = "https://annas-archive.org"
|
base_url: str = "https://annas-archive.org"
|
||||||
|
@ -99,9 +99,18 @@ def init(engine_settings=None): # pylint: disable=unused-argument
|
||||||
|
|
||||||
|
|
||||||
def request(query, params: Dict[str, Any]) -> Dict[str, Any]:
|
def request(query, params: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
q = quote(query)
|
|
||||||
lang = traits.get_language(params["language"], traits.all_locale) # type: ignore
|
lang = traits.get_language(params["language"], traits.all_locale) # type: ignore
|
||||||
params["url"] = base_url + f"/search?lang={lang or ''}&content={aa_content}&ext={aa_ext}&sort={aa_sort}&q={q}"
|
args = {
|
||||||
|
'lang': lang,
|
||||||
|
'content': aa_content,
|
||||||
|
'ext': aa_ext,
|
||||||
|
'sort': aa_sort,
|
||||||
|
'q': query,
|
||||||
|
'page': params['pageno'],
|
||||||
|
}
|
||||||
|
# filter out None and empty values
|
||||||
|
filtered_args = dict((k, v) for k, v in args.items() if v)
|
||||||
|
params["url"] = f"{base_url}/search?{urlencode(filtered_args)}"
|
||||||
return params
|
return params
|
||||||
|
|
||||||
|
|
||||||
|
@ -128,12 +137,12 @@ def response(resp) -> List[Dict[str, Optional[str]]]:
|
||||||
def _get_result(item):
|
def _get_result(item):
|
||||||
return {
|
return {
|
||||||
'template': 'paper.html',
|
'template': 'paper.html',
|
||||||
'url': base_url + item.xpath('./@href')[0],
|
'url': base_url + extract_text(eval_xpath_getindex(item, './@href', 0)),
|
||||||
'title': extract_text(eval_xpath(item, './/h3/text()[1]')),
|
'title': extract_text(eval_xpath(item, './/h3/text()[1]')),
|
||||||
'publisher': extract_text(eval_xpath(item, './/div[contains(@class, "text-sm")]')),
|
'publisher': extract_text(eval_xpath(item, './/div[contains(@class, "text-sm")]')),
|
||||||
'authors': [extract_text(eval_xpath(item, './/div[contains(@class, "italic")]'))],
|
'authors': [extract_text(eval_xpath(item, './/div[contains(@class, "italic")]'))],
|
||||||
'content': extract_text(eval_xpath(item, './/div[contains(@class, "text-xs")]')),
|
'content': extract_text(eval_xpath(item, './/div[contains(@class, "text-xs")]')),
|
||||||
'thumbnail': item.xpath('.//img/@src')[0],
|
'thumbnail': extract_text(eval_xpath_getindex(item, './/img/@src', 0, default=None), allow_none=True),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -111,6 +111,7 @@ from searx.botdetection import (
|
||||||
http_accept_encoding,
|
http_accept_encoding,
|
||||||
http_accept_language,
|
http_accept_language,
|
||||||
http_user_agent,
|
http_user_agent,
|
||||||
|
http_sec_fetch,
|
||||||
ip_limit,
|
ip_limit,
|
||||||
ip_lists,
|
ip_lists,
|
||||||
get_network,
|
get_network,
|
||||||
|
@ -178,16 +179,17 @@ def filter_request(request: flask.Request) -> werkzeug.Response | None:
|
||||||
logger.error("BLOCK %s: matched BLOCKLIST - %s", network.compressed, msg)
|
logger.error("BLOCK %s: matched BLOCKLIST - %s", network.compressed, msg)
|
||||||
return flask.make_response(('IP is on BLOCKLIST - %s' % msg, 429))
|
return flask.make_response(('IP is on BLOCKLIST - %s' % msg, 429))
|
||||||
|
|
||||||
# methods applied on /
|
# methods applied on all requests
|
||||||
|
|
||||||
for func in [
|
for func in [
|
||||||
http_user_agent,
|
http_user_agent,
|
||||||
]:
|
]:
|
||||||
val = func.filter_request(network, request, cfg)
|
val = func.filter_request(network, request, cfg)
|
||||||
if val is not None:
|
if val is not None:
|
||||||
|
logger.debug(f"NOT OK ({func.__name__}): {network}: %s", dump_request(flask.request))
|
||||||
return val
|
return val
|
||||||
|
|
||||||
# methods applied on /search
|
# methods applied on /search requests
|
||||||
|
|
||||||
if request.path == '/search':
|
if request.path == '/search':
|
||||||
|
|
||||||
|
@ -196,12 +198,15 @@ def filter_request(request: flask.Request) -> werkzeug.Response | None:
|
||||||
http_accept_encoding,
|
http_accept_encoding,
|
||||||
http_accept_language,
|
http_accept_language,
|
||||||
http_user_agent,
|
http_user_agent,
|
||||||
|
http_sec_fetch,
|
||||||
ip_limit,
|
ip_limit,
|
||||||
]:
|
]:
|
||||||
val = func.filter_request(network, request, cfg)
|
val = func.filter_request(network, request, cfg)
|
||||||
if val is not None:
|
if val is not None:
|
||||||
|
logger.debug(f"NOT OK ({func.__name__}): {network}: %s", dump_request(flask.request))
|
||||||
return val
|
return val
|
||||||
logger.debug(f"OK {network}: %s", dump_request(flask.request))
|
logger.debug(f"OK: {network}: %s", dump_request(flask.request))
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue