mirror of
https://github.com/searxng/searxng.git
synced 2025-12-22 19:50:00 +00:00
[fix] cleanup: rename searx leftovers to SearXNG (#5049)
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
This commit is contained in:
@@ -7,7 +7,7 @@ import re
|
||||
|
||||
from urllib.parse import urlencode
|
||||
from lxml import etree
|
||||
from searx.utils import searx_useragent
|
||||
from searx.utils import searxng_useragent
|
||||
|
||||
# about
|
||||
about = {
|
||||
@@ -69,7 +69,7 @@ def request(query, params):
|
||||
|
||||
params['url'] = base_url.format(**string_args)
|
||||
|
||||
params['headers']['User-Agent'] = searx_useragent()
|
||||
params['headers']['User-Agent'] = searxng_useragent()
|
||||
return params
|
||||
|
||||
|
||||
|
||||
@@ -101,7 +101,7 @@ def request(query, params):
|
||||
def _match_query(query):
|
||||
"""
|
||||
The standard for full text queries.
|
||||
searx format: "key:value" e.g. city:berlin
|
||||
SearXNG format: "key:value" e.g. city:berlin
|
||||
REF: https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-match-query.html
|
||||
"""
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
from json import loads
|
||||
from urllib.parse import urlencode
|
||||
from searx.utils import searx_useragent
|
||||
from searx.utils import searxng_useragent
|
||||
|
||||
# about
|
||||
about = {
|
||||
@@ -40,8 +40,8 @@ def request(query, params):
|
||||
if language in supported_languages:
|
||||
params['url'] = params['url'] + "&lang=" + language
|
||||
|
||||
# using searx User-Agent
|
||||
params['headers']['User-Agent'] = searx_useragent()
|
||||
# using SearXNG User-Agent
|
||||
params['headers']['User-Agent'] = searxng_useragent()
|
||||
|
||||
return params
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ ends.
|
||||
"""
|
||||
|
||||
from json import dumps
|
||||
from searx.utils import searx_useragent
|
||||
from searx.utils import searxng_useragent
|
||||
from searx.enginelib.traits import EngineTraits
|
||||
|
||||
about = {
|
||||
@@ -31,7 +31,7 @@ def request(query, params):
|
||||
params['headers'] = {
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
'User-Agent': searx_useragent(),
|
||||
'User-Agent': searxng_useragent(),
|
||||
}
|
||||
region = traits.get_region(params["searxng_locale"], default=traits.all_locale)
|
||||
params['data'] = dumps(
|
||||
|
||||
@@ -149,7 +149,7 @@ def build_result(item: etree.Element) -> Dict[str, Any]:
|
||||
leechers = get_torznab_attribute(item, 'leechers')
|
||||
peers = get_torznab_attribute(item, 'peers')
|
||||
|
||||
# map attributes to searx result
|
||||
# map attributes to SearXNG result
|
||||
result: Dict[str, Any] = {
|
||||
'template': 'torrent.html',
|
||||
'title': get_attribute(item, 'title'),
|
||||
|
||||
@@ -15,7 +15,7 @@ from babel.dates import format_datetime, format_date, format_time, get_datetime_
|
||||
|
||||
from searx.data import WIKIDATA_UNITS
|
||||
from searx.network import post, get
|
||||
from searx.utils import searx_useragent, get_string_replaces_function
|
||||
from searx.utils import searxng_useragent, get_string_replaces_function
|
||||
from searx.external_urls import get_external_url, get_earth_coordinates_url, area_to_osm_zoom
|
||||
from searx.engines.wikipedia import (
|
||||
fetch_wikimedia_traits,
|
||||
@@ -142,7 +142,7 @@ replace_http_by_https = get_string_replaces_function({'http:': 'https:'})
|
||||
|
||||
def get_headers():
|
||||
# user agent: https://www.mediawiki.org/wiki/Wikidata_Query_Service/User_Manual#Query_limits
|
||||
return {'Accept': 'application/sparql-results+json', 'User-Agent': searx_useragent()}
|
||||
return {'Accept': 'application/sparql-results+json', 'User-Agent': searxng_useragent()}
|
||||
|
||||
|
||||
def get_label_for_entity(entity_id, language):
|
||||
|
||||
Reference in New Issue
Block a user