Compare commits

..

6 Commits

Author SHA1 Message Date
Markus Heiser b6a4e577d4
Merge faf13fa675 into e4961142e3 2024-11-24 12:42:21 +01:00
Bnyro e4961142e3 [build] /static 2024-11-24 12:41:57 +01:00
Bnyro f31a3a2053 [chore] *: fix typos detected by typos-cli 2024-11-24 12:41:57 +01:00
Markus Heiser faf13fa675 [mod] add support for Python 3.13
Python 3.13 has been released [1]

- fasttext-predict supports py3.13 from version 0.9.2.3 [2]

[1] https://www.python.org/downloads/release/python-3130/
[2] https://github.com/searxng/fasttext-predict/commit/f2da9cd173

Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
2024-11-24 12:36:02 +01:00
Markus Heiser 0253c10b52 [feat] engine: add adobe stock video and audio engines
The engine has been revised; there is now the option ``adobe_content_types``
with which it is possible to configure engines for video and audio from the
adobe stock.  BTW this patch adds documentation to the engine.

To test all three engines in one use a search term like::

    !asi !asv !asa sound

Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
2024-11-24 11:56:12 +01:00
Bnyro f20a7632f1 [feat] engine: add adobe stock photos 2024-11-24 11:56:12 +01:00
50 changed files with 353 additions and 81 deletions

View File

@ -93,7 +93,7 @@ Online Currency
- :py:obj:`processors.online_currency <searx.search.processors.online_currency>`
*no engine of this type is documented yet / comming soon*
*no engine of this type is documented yet / coming soon*
.. _online dictionary:
@ -104,4 +104,4 @@ Online Dictionary
- :py:obj:`processors.online_dictionary <searx.search.processors.online_dictionary>`
*no engine of this type is documented yet / comming soon*
*no engine of this type is documented yet / coming soon*

View File

@ -0,0 +1,13 @@
.. _adobe stock engine:
===========
Adobe Stock
===========
.. contents:: Contents
:depth: 2
:local:
:backlinks: entry
.. automodule:: searx.engines.adobe_stock
:members:

View File

@ -14,8 +14,8 @@ httpx-socks[asyncio]==0.7.7
setproctitle==1.3.4
redis==5.0.8
markdown-it-py==3.0.0
fasttext-predict==0.9.2.3
fasttext-predict==0.9.2.4
tomli==2.0.2; python_version < '3.11'
msgspec==0.18.6
eval_type_backport; python_version < '3.9'
typer-slim==0.13.1
isodate==0.7.2

View File

@ -22,7 +22,7 @@ log = logging.getLogger(__name__)
class FALSE:
"""Class of ``False`` singelton"""
"""Class of ``False`` singleton"""
# pylint: disable=multiple-statements
def __init__(self, msg):
@ -81,7 +81,7 @@ class Config:
return cfg
def __init__(self, cfg_schema: typing.Dict, deprecated: typing.Dict[str, str]):
"""Construtor of class Config.
"""Constructor of class Config.
:param cfg_schema: Schema of the configuration
:param deprecated: dictionary that maps deprecated configuration names to a messages
@ -159,7 +159,7 @@ class Config:
return pathlib.Path(str(val))
def pyobj(self, name, default=UNSET):
"""Get python object refered by full qualiffied name (FQN) in the config
"""Get python object referred by full qualiffied name (FQN) in the config
string."""
fqn = self.get(name, default)

View File

@ -55,10 +55,10 @@ from ._helpers import (
)
TOKEN_LIVE_TIME = 600
"""Livetime (sec) of limiter's CSS token."""
"""Lifetime (sec) of limiter's CSS token."""
PING_LIVE_TIME = 3600
"""Livetime (sec) of the ping-key from a client (request)"""
"""Lifetime (sec) of the ping-key from a client (request)"""
PING_KEY = 'SearXNG_limiter.ping'
"""Prefix of all ping-keys generated by :py:obj:`get_ping_key`"""

View File

@ -0,0 +1,229 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
"""`Adobe Stock`_ is a service that gives access to millions of royalty-free
assets. Assets types include photos, vectors, illustrations, templates, 3D
assets, videos, motion graphics templates and audio tracks.
.. Adobe Stock: https://stock.adobe.com/
Configuration
=============
The engine has the following mandatory setting:
- SearXNG's :ref:`engine categories`
- Adobe-Stock's :py:obj:`adobe_order`
- Adobe-Stock's :py:obj:`adobe_content_types`
.. code:: yaml
- name: adobe stock
engine: adobe_stock
shortcut: asi
categories: [images]
adobe_order: relevance
adobe_content_types: ["photo", "illustration", "zip_vector", "template", "3d", "image"]
- name: adobe stock video
engine: adobe_stock
network: adobe stock
shortcut: asi
categories: [videos]
adobe_order: relevance
adobe_content_types: ["video"]
Implementation
==============
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from datetime import datetime, timedelta
from urllib.parse import urlencode
import isodate
if TYPE_CHECKING:
import logging
logger: logging.Logger
about = {
"website": "https://stock.adobe.com/",
"wikidata_id": "Q5977430",
"official_api_documentation": None,
"use_official_api": False,
"require_api_key": False,
"results": "JSON",
}
categories = []
paging = True
send_accept_language_header = True
results_per_page = 10
base_url = "https://stock.adobe.com"
adobe_order: str = ""
"""Sort order, can be one of:
- ``relevance`` or
- ``featured`` or
- ``creation`` (most recent) or
- ``nb_downloads`` (number of downloads)
"""
ADOBE_VALID_TYPES = ["photo", "illustration", "zip_vector", "video", "template", "3d", "audio", "image"]
adobe_content_types: list = []
"""A list of of content types. The following content types are offered:
- Images: ``image``
- Videos: ``video``
- Templates: ``template``
- 3D: ``3d``
- Audio ``audio``
Additional subcategories:
- Photos: ``photo``
- Illustrations: ``illustration``
- Vectors: ``zip_vector`` (Vectors),
"""
# Do we need support for "free_collection" and "include_stock_enterprise"?
def init(_):
if not categories:
raise ValueError("adobe_stock engine: categories is unset")
# adobe_order
if not adobe_order:
raise ValueError("adobe_stock engine: adobe_order is unset")
if adobe_order not in ["relevance", "featured", "creation", "nb_downloads"]:
raise ValueError(f"unsupported adobe_order: {adobe_order}")
# adobe_content_types
if not adobe_content_types:
raise ValueError("adobe_stock engine: adobe_content_types is unset")
if isinstance(adobe_content_types, list):
for t in adobe_content_types:
if t not in ADOBE_VALID_TYPES:
raise ValueError("adobe_stock engine: adobe_content_types: '%s' is invalid" % t)
else:
raise ValueError(
"adobe_stock engine: adobe_content_types must be a list of strings not %s" % type(adobe_content_types)
)
def request(query, params):
args = {
"k": query,
"limit": results_per_page,
"order": adobe_order,
"search_page": params["pageno"],
"search_type": "pagination",
}
for content_type in ADOBE_VALID_TYPES:
args[f"filters[content_type:{content_type}]"] = 1 if content_type in adobe_content_types else 0
params["url"] = f"{base_url}/de/Ajax/Search?{urlencode(args)}"
# headers required to bypass bot-detection
if params["searxng_locale"] == "all":
params["headers"]["Accept-Language"] = "en-US,en;q=0.5"
return params
def parse_image_item(item):
return {
"template": "images.html",
"url": item["content_url"],
"title": item["title"],
"content": item["asset_type"],
"img_src": item["content_thumb_extra_large_url"],
"thumbnail_src": item["thumbnail_url"],
"resolution": f"{item['content_original_width']}x{item['content_original_height']}",
"img_format": item["format"],
"author": item["author"],
}
def parse_video_item(item):
# in video items, the title is more or less a "content description", we try
# to reduce the lenght of the title ..
title = item["title"]
content = ""
if "." in title.strip()[:-1]:
content = title
title = title.split(".", 1)[0]
elif "," in title:
content = title
title = title.split(",", 1)[0]
elif len(title) > 50:
content = title
title = ""
for w in content.split(" "):
title += f" {w}"
if len(title) > 50:
title = title.strip() + "\u2026"
break
return {
"template": "videos.html",
"url": item["content_url"],
"title": title,
"content": content,
# https://en.wikipedia.org/wiki/ISO_8601#Durations
"length": isodate.parse_duration(item["time_duration"]),
"publishedDate": datetime.strptime(item["creation_date"], "%Y-%m-%d"),
"thumbnail": item["thumbnail_url"],
"iframe_src": item["video_small_preview_url"],
"metadata": item["asset_type"],
}
def parse_audio_item(item):
audio_data = item["audio_data"]
content = audio_data.get("description") or ""
if audio_data.get("album"):
content = audio_data["album"] + " - " + content
return {
"url": item["content_url"],
"title": item["title"],
"content": content,
# "thumbnail": base_url + item["thumbnail_url"],
"iframe_src": audio_data["preview"]["url"],
"publishedDate": datetime.fromisoformat(audio_data["release_date"]) if audio_data["release_date"] else None,
"length": timedelta(seconds=round(audio_data["duration"] / 1000)) if audio_data["duration"] else None,
"author": item.get("artist_name"),
}
def response(resp):
results = []
json_resp = resp.json()
if isinstance(json_resp["items"], list):
return None
for item in json_resp["items"].values():
if item["asset_type"].lower() in ["image", "premium-image", "illustration", "vector"]:
result = parse_image_item(item)
elif item["asset_type"].lower() == "video":
result = parse_video_item(item)
elif item["asset_type"].lower() == "audio":
result = parse_audio_item(item)
else:
logger.error("no handle for %s --> %s", item["asset_type"], item)
continue
results.append(result)
return results

View File

@ -31,7 +31,7 @@ paging = True
number_of_results = 10
# shortcuts for advanced search
shorcut_dict = {
shortcut_dict = {
# user-friendly keywords
'format:': 'dcformat:',
'author:': 'dccreator:',
@ -55,7 +55,7 @@ shorcut_dict = {
def request(query, params):
# replace shortcuts with API advanced search keywords
for key, val in shorcut_dict.items():
for key, val in shortcut_dict.items():
query = re.sub(key, val, query)
# basic search

View File

@ -10,7 +10,7 @@ On the `preference page`_ Bing offers a lot of languages an regions (see section
LANGUAGE and COUNTRY/REGION). The Language is the language of the UI, we need
in SearXNG to get the translations of data such as *"published last week"*.
There is a description of the offical search-APIs_, unfortunately this is not
There is a description of the official search-APIs_, unfortunately this is not
the API we can use or that bing itself would use. You can look up some things
in the API to get a better picture of bing, but the value specifications like
the market codes are usually outdated or at least no longer used by bing itself.
@ -91,7 +91,7 @@ def request(query, params):
page = params.get('pageno', 1)
query_params = {
'q': query,
# if arg 'pq' is missed, somtimes on page 4 we get results from page 1,
# if arg 'pq' is missed, sometimes on page 4 we get results from page 1,
# don't ask why it is only sometimes / its M$ and they have never been
# deterministic ;)
'pq': query,
@ -177,7 +177,7 @@ def response(resp):
logger.debug('result error :\n%s', e)
if result_len and _page_offset(resp.search_params.get("pageno", 0)) > result_len:
# Avoid reading more results than avalaible.
# Avoid reading more results than available.
# For example, if there is 100 results from some search and we try to get results from 120 to 130,
# Bing will send back the results from 0 to 10 and no error.
# If we compare results count with the first parameter of the request we can avoid this "invalid" results.

View File

@ -42,7 +42,7 @@ about = {
}
send_accept_language_header = True
"""DuckDuckGo-Lite tries to guess user's prefered language from the HTTP
"""DuckDuckGo-Lite tries to guess user's preferred language from the HTTP
``Accept-Language``. Optional the user can select a region filter (but not a
language).
"""
@ -402,7 +402,7 @@ def fetch_traits(engine_traits: EngineTraits):
"""Fetch languages & regions from DuckDuckGo.
SearXNG's ``all`` locale maps DuckDuckGo's "Alle regions" (``wt-wt``).
DuckDuckGo's language "Browsers prefered language" (``wt_WT``) makes no
DuckDuckGo's language "Browsers preferred language" (``wt_WT``) makes no
sense in a SearXNG request since SearXNG's ``all`` will not add a
``Accept-Language`` HTTP header. The value in ``engine_traits.all_locale``
is ``wt-wt`` (the region).

View File

@ -142,7 +142,7 @@ search_url = base_url + '/sp/search'
# specific xpath variables
# ads xpath //div[@id="results"]/div[@id="sponsored"]//div[@class="result"]
# not ads: div[@class="result"] are the direct childs of div[@id="results"]
# not ads: div[@class="result"] are the direct children of div[@id="results"]
search_form_xpath = '//form[@id="search"]'
"""XPath of Startpage's origin search form

View File

@ -208,7 +208,7 @@ def response(resp):
'title': backlink['image_name'],
'img_src': backlink['url'],
'format': tineye_match['image_format'],
'widht': tineye_match['width'],
'width': tineye_match['width'],
'height': tineye_match['height'],
'publishedDate': backlink['crawl_date'],
}

View File

@ -32,7 +32,7 @@ void_arch = 'x86_64'
"""Default architecture to search for. For valid values see :py:obj:`ARCH_RE`"""
ARCH_RE = re.compile('aarch64-musl|armv6l-musl|armv7l-musl|x86_64-musl|aarch64|armv6l|armv7l|i686|x86_64')
"""Regular expresion that match a architecture in the query string."""
"""Regular expression that match a architecture in the query string."""
def request(query, params):

View File

@ -12,7 +12,7 @@ ipv6_prefix = 48
[botdetection.ip_limit]
# To get unlimited access in a local network, by default link-lokal addresses
# To get unlimited access in a local network, by default link-local addresses
# (networks) are not monitored by the ip_limit
filter_link_local = false

View File

@ -120,7 +120,7 @@ _TR_LOCALES: list[str] = []
def get_translation_locales() -> list[str]:
"""Returns the list of transaltion locales (*underscore*). The list is
"""Returns the list of translation locales (*underscore*). The list is
generated from the translation folders in :origin:`searx/translations`"""
global _TR_LOCALES # pylint:disable=global-statement

View File

@ -234,7 +234,7 @@ def _parse_text_and_convert(search, from_query, to_query):
value = target_from_si(float(value))
if measured.group('E'):
# when incomming notation is scientific, outgoing notation is scientific
# when incoming notation is scientific, outgoing notation is scientific
result = babel.numbers.format_scientific(value, locale=_locale)
else:
result = babel.numbers.format_decimal(value, locale=_locale, format='#,##0.##########;-#')

View File

@ -325,7 +325,7 @@ class ClientPref:
# hint: searx.webapp.get_client_settings should be moved into this class
locale: babel.Locale
"""Locale prefered by the client."""
"""Locale preferred by the client."""
def __init__(self, locale: Optional[babel.Locale] = None):
self.locale = locale

View File

@ -325,6 +325,36 @@ engines:
shortcut: 9g
disabled: true
- name: adobe stock
engine: adobe_stock
shortcut: asi
categories: ["images"]
# https://docs.searxng.org/dev/engines/online/adobe_stock.html
adobe_order: relevance
adobe_content_types: ["photo", "illustration", "zip_vector", "template", "3d", "image"]
timeout: 6
disabled: true
- name: adobe stock video
engine: adobe_stock
shortcut: asv
network: adobe stock
categories: ["videos"]
adobe_order: relevance
adobe_content_types: ["video"]
timeout: 6
disabled: true
- name: adobe stock audio
engine: adobe_stock
shortcut: asa
network: adobe stock
categories: ["music"]
adobe_order: relevance
adobe_content_types: ["audio"]
timeout: 6
disabled: true
- name: alpine linux packages
engine: alpinelinux
disabled: true

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1 +1 @@
{"version":3,"file":"searxng.head.min.js","sources":["../src/js/head/00_init.js"],"sourcesContent":["/* SPDX-License-Identifier: AGPL-3.0-or-later */\n(function (w, d) {\n 'use strict';\n\n // add data- properties\n var script = d.currentScript || (function () {\n var scripts = d.getElementsByTagName('script');\n return scripts[scripts.length - 1];\n })();\n\n w.searxng = {\n settings: JSON.parse(atob(script.getAttribute('client_settings')))\n };\n\n // update the css\n var hmtlElement = d.getElementsByTagName(\"html\")[0];\n hmtlElement.classList.remove('no-js');\n hmtlElement.classList.add('js');\n\n})(window, document);\n"],"names":["w","d","script","currentScript","scripts","getElementsByTagName","length","searxng","settings","JSON","parse","atob","getAttribute","hmtlElement","classList","remove","add","window","document"],"mappings":"CACA,SAAWA,EAAGC,GACZ,aAGA,IAAIC,EAASD,EAAEE,eAAkB,WAC/B,IAAIC,EAAUH,EAAEI,qBAAqB,QAAQ,EAC7C,OAAOD,EAAQA,EAAQE,OAAS,EACjC,EAAE,EAEHN,EAAEO,QAAU,CACVC,SAAUC,KAAKC,MAAMC,KAAKT,EAAOU,aAAa,iBAAiB,CAAC,CAAC,CACnE,EAGA,IAAIC,EAAcZ,EAAEI,qBAAqB,MAAM,EAAE,GACjDQ,EAAYC,UAAUC,OAAO,OAAO,EACpCF,EAAYC,UAAUE,IAAI,IAAI,CAE/B,GAAEC,OAAQC,QAAQ"}
{"version":3,"file":"searxng.head.min.js","sources":["../src/js/head/00_init.js"],"sourcesContent":["/* SPDX-License-Identifier: AGPL-3.0-or-later */\n(function (w, d) {\n 'use strict';\n\n // add data- properties\n var script = d.currentScript || (function () {\n var scripts = d.getElementsByTagName('script');\n return scripts[scripts.length - 1];\n })();\n\n w.searxng = {\n settings: JSON.parse(atob(script.getAttribute('client_settings')))\n };\n\n // update the css\n var htmlElement = d.getElementsByTagName(\"html\")[0];\n htmlElement.classList.remove('no-js');\n htmlElement.classList.add('js');\n\n})(window, document);\n"],"names":["w","d","script","currentScript","scripts","getElementsByTagName","length","searxng","settings","JSON","parse","atob","getAttribute","htmlElement","classList","remove","add","window","document"],"mappings":"CACA,SAAWA,EAAGC,GACZ,aAGA,IAAIC,EAASD,EAAEE,eAAkB,WAC/B,IAAIC,EAAUH,EAAEI,qBAAqB,QAAQ,EAC7C,OAAOD,EAAQA,EAAQE,OAAS,EACjC,EAAE,EAEHN,EAAEO,QAAU,CACVC,SAAUC,KAAKC,MAAMC,KAAKT,EAAOU,aAAa,iBAAiB,CAAC,CAAC,CACnE,EAGA,IAAIC,EAAcZ,EAAEI,qBAAqB,MAAM,EAAE,GACjDQ,EAAYC,UAAUC,OAAO,OAAO,EACpCF,EAAYC,UAAUE,IAAI,IAAI,CAE/B,GAAEC,OAAQC,QAAQ"}

File diff suppressed because one or more lines are too long

View File

@ -13,8 +13,8 @@
};
// update the css
var hmtlElement = d.getElementsByTagName("html")[0];
hmtlElement.classList.remove('no-js');
hmtlElement.classList.add('js');
var htmlElement = d.getElementsByTagName("html")[0];
htmlElement.classList.remove('no-js');
htmlElement.classList.add('js');
})(window, document);

View File

@ -8,7 +8,7 @@ window.searxng = (function (w, d) {
'use strict';
// not invented here tookit with bugs fixed elsewhere
// not invented here toolkit with bugs fixed elsewhere
// purposes : be just good enough and as small as possible
// from https://plainjs.com/javascript/events/live-binding-event-handlers-14/

View File

@ -441,7 +441,7 @@ searxng.ready(function () {
var body = document.getElementsByTagName('body')[0];
body.appendChild(helpPanel);
} else {
// togggle hidden
// toggle hidden
helpPanel.classList.toggle('invisible');
return;
}

View File

@ -67,7 +67,7 @@
} */
});
// this event occour only once per element
// this event occur only once per element
event.preventDefault();
});
});

View File

@ -13,8 +13,8 @@ iframe[src^="https://www.mixcloud.com"] {
// issue with an image URL that is blocked since it is an a Cross-Origin
// request. The alternative text (<img alt='Mixcloud Logo'> then cause an
// scrollbar in the inner of the iframe we can't avoid. Another quirk comes
// when pressing the play button, somtimes the shown player has an height of
// 200px, somtimes 250px.
// when pressing the play button, sometimes the shown player has an height of
// 200px, sometimes 250px.
height: 250px;
}

View File

@ -4,31 +4,31 @@ see the CSS rules for #results in style.less ( grid-template-columns and gap).
In this file, the --center-page-width values comes from the Oscar theme (Bootstrap 3).
All rules starts with ".center-aligment-yes #main_results" to be enabled only
All rules starts with ".center-alignment-yes #main_results" to be enabled only
on the /search URL and when the "center alignment" preference is enabled.
*/
@media screen and (min-width: @phone) {
.center-aligment-yes #main_results {
.center-alignment-yes #main_results {
--center-page-width: 48rem;
}
}
@media screen and (min-width: 62rem) {
.center-aligment-yes #main_results {
.center-alignment-yes #main_results {
--center-page-width: 60rem;
}
}
@media screen and (min-width: @tablet) {
.center-aligment-yes #main_results {
.center-alignment-yes #main_results {
--center-page-width: 73rem;
}
}
@media screen and (min-width: @phone) and (max-width: @tablet) {
// any change must be reset in @media screen and (min-width: @tablet) { ... }
.center-aligment-yes #main_results {
.center-alignment-yes #main_results {
#results {
grid-template-columns: 60% calc(40% - @results-gap);
margin-left: 0;
@ -50,7 +50,7 @@ on the /search URL and when the "center alignment" preference is enabled.
}
@media screen and (min-width: @tablet) {
.center-aligment-yes #main_results {
.center-alignment-yes #main_results {
display: flex;
flex-direction: column;
align-items: center;

View File

@ -297,7 +297,7 @@ article[data-vim-selected].category-social {
padding-top: 0.6rem;
.ltr-padding-right(1rem);
width: 7rem;
height: unset; // remove heigth value that was needed for lazy loading
height: unset; // remove height value that was needed for lazy loading
}
.break {
@ -399,7 +399,7 @@ article[data-vim-selected].category-social {
padding-top: 0.6rem;
.ltr-padding-right(1rem);
width: 20rem;
height: unset; // remove heigth value that was needed for lazy loading
height: unset; // remove height value that was needed for lazy loading
}
}
@ -639,12 +639,12 @@ summary.title {
padding-top: 1em;
}
.sidebar-collapsable {
.sidebar-collapsible {
border-top: 1px solid var(--color-sidebar-border);
padding-bottom: 0.5em;
}
#sidebar-end-collapsable {
#sidebar-end-collapsible {
border-bottom: 1px solid var(--color-sidebar-border);
width: 100%;
}
@ -1007,10 +1007,10 @@ summary.title {
}
@media screen and (min-width: @phone) and (max-width: @tablet) {
// when .center-aligment-yes, see style-center.less
// when .center-alignment-yes, see style-center.less
// the media query includes "min-width: @phone"
// because the phone layout includes the tablet layout unconditionally.
.center-aligment-no {
.center-alignment-no {
.results-tablet();
}
}

View File

@ -1,5 +1,5 @@
<!DOCTYPE html>
<html class="no-js theme-{{ preferences.get_value('simple_style') or 'auto' }} center-aligment-{{ preferences.get_value('center_alignment') and 'yes' or 'no' }}" lang="{{ locale_rfc5646 }}" {% if rtl %} dir="rtl"{% endif %}>
<html class="no-js theme-{{ preferences.get_value('simple_style') or 'auto' }} center-alignment-{{ preferences.get_value('center_alignment') and 'yes' or 'no' }}" lang="{{ locale_rfc5646 }}" {% if rtl %} dir="rtl"{% endif %}>
<head>
<meta charset="UTF-8">
<meta name="description" content="SearXNG — a privacy-respecting, open metasearch engine">

View File

@ -1,5 +1,5 @@
<div id="apis" role="complementary" aria-labelledby="apis-title">
<details class="sidebar-collapsable">
<details class="sidebar-collapsible">
<summary class="title" id="apis-title">{{ _('Download results') }}</summary>
<div class="wrapper">
{%- for output_type in search_formats -%}

View File

@ -1,9 +1,9 @@
<div id="engines_msg">
{% if (not results and not answers) or not max_response_time %}
<details class="sidebar-collapsable" open>
<details class="sidebar-collapsible" open>
<summary class="title" id="engines_msg-title">{{ _('Messages from the search engines') }}</summary>
{% else %}
<details class="sidebar-collapsable">
<details class="sidebar-collapsible">
<summary class="title" id="engines_msg-title">{{ _('Response time') }}: {{ max_response_time | round(1) }} {{ _('seconds') }}</summary>
{% endif %}
<table class="engine-stats" id="engines_msg-table">

View File

@ -1,5 +1,5 @@
<div id="search_url" role="complementary" aria-labelledby="search_url-title">
<details class="sidebar-collapsable">
<details class="sidebar-collapsible">
<summary class="title" id="search_url-title">{{ _('Search URL') }}</summary>
<button id="copy_url" type="submit" data-copied-text="{{ _('Copied') }}">{{ _('Copy') }}</button>
<div class="selectable_url">

View File

@ -1,5 +1,5 @@
<div id="suggestions" role="complementary" aria-labelledby="suggestions-title">
<details class="sidebar-collapsable">
<details class="sidebar-collapsible">
<summary class="title" id="suggestions-title">{{ _('Suggestions') }}</summary>
<div class="wrapper">
{%- for suggestion in suggestions -%}

View File

@ -61,7 +61,7 @@ or manually by executing the searx/webapp.py file? -->
{%- endfor -%}
</textarea>
<input type="checkbox" id="step1">
<label for="step1">{{ _('Start submiting a new issue on GitHub') }}</label>
<label for="step1">{{ _('Start submitting a new issue on GitHub') }}</label>
<div class="step1 step_content">
<p><a href="{{ get_setting('brand.issue_url') }}?q=is%3Aissue+Bug:%20{{ engine_name }} {{ technical_report }}" target="_blank" rel="noreferrer noreferrer">{{ _('Please check for existing bugs about this engine on GitHub') }}</a></p>
</div>

View File

@ -44,7 +44,7 @@
{%- if infoboxes -%}
<div id="infoboxes">
<details open class="sidebar-collapsable">
<details open class="sidebar-collapsible">
<summary class="title">{{ _('Info') }}</summary>
{%- for infobox in infoboxes -%}
{%- include 'simple/elements/infobox.html' -%}
@ -67,7 +67,7 @@
{%- include 'simple/elements/apis.html' -%}
{%- endif -%}
<div id="sidebar-end-collapsable"></div>
<div id="sidebar-end-collapsible"></div>
</div>
{% if corrections %}

View File

@ -677,7 +677,7 @@ msgid "proxied"
msgstr ""
#: searx/templates/simple/new_issue.html:64
msgid "Start submiting a new issue on GitHub"
msgid "Start submitting a new issue on GitHub"
msgstr ""
#: searx/templates/simple/new_issue.html:66

View File

@ -48,7 +48,7 @@ _XPATH_CACHE: Dict[str, XPath] = {}
_LANG_TO_LC_CACHE: Dict[str, Dict[str, str]] = {}
_FASTTEXT_MODEL: Optional["fasttext.FastText._FastText"] = None # type: ignore
"""fasttext model to predict laguage of a search term"""
"""fasttext model to predict language of a search term"""
SEARCH_LANGUAGE_CODES = frozenset([searxng_locale[0].split('-')[0] for searxng_locale in sxng_locales])
"""Languages supported by most searxng engines (:py:obj:`searx.sxng_locales.sxng_locales`)."""

View File

@ -225,7 +225,7 @@ def get_search_query_from_webapp(
"""Assemble data from preferences and request.form (from the HTML form) needed
in a search query.
The returned tuple consits of:
The returned tuple consists of:
1. instance of :py:obj:`searx.search.SearchQuery`
2. instance of :py:obj:`searx.query.RawTextQuery`

View File

@ -72,7 +72,7 @@ def _instance_infosetset_ctx(base_url):
# from searx.network import network
# network.done()
# waiting some seconds before ending the comand line was the only solution I
# waiting some seconds before ending the command line was the only solution I
# found ..
time.sleep(3)

View File

@ -85,7 +85,7 @@ def main():
def fetch_traits_map():
"""Fetchs supported languages for each engine and writes json file with those."""
"""Fetches supported languages for each engine and writes json file with those."""
network.set_timeout_for_thread(10.0)
def log(msg):

View File

@ -25,7 +25,7 @@ from searx.locales import (
)
LOCALE_DATA_FILE = Path(searx_dir) / 'data' / 'locales.json'
TRANSLATOINS_FOLDER = Path(searx_dir) / 'translations'
TRANSLATIONS_FOLDER = Path(searx_dir) / 'translations'
def main():

View File

@ -65,7 +65,7 @@ def main():
test_layer.setUp()
run_robot_tests([getattr(test_webapp, x) for x in dir(test_webapp) if x.startswith('test_')])
except Exception: # pylint: disable=broad-except
print('Error occured: {0}'.format(traceback.format_exc()))
print('Error occurred: {0}'.format(traceback.format_exc()))
sys.exit(1)
finally:
test_layer.tearDown()

View File

@ -772,7 +772,7 @@ docs.clean() {
docs.prebuild() {
# Dummy function to run some actions before sphinx-doc build gets started.
# This finction needs to be overwritten by the application script.
# This function needs to be overwritten by the application script.
true
dump_return $?
}
@ -1065,7 +1065,7 @@ nginx_remove_app() {
# usage: nginx_remove_app <myapp.conf>
info_msg "remove nginx app: $1"
nginx_dissable_app "$1"
nginx_disable_app "$1"
rm -f "${NGINX_APPS_AVAILABLE}/$1"
}
@ -1082,7 +1082,7 @@ nginx_enable_app() {
nginx_reload
}
nginx_dissable_app() {
nginx_disable_app() {
# usage: nginx_disable_app <myapp.conf>
@ -1192,7 +1192,7 @@ apache_remove_site() {
# usage: apache_remove_site <mysite.conf>
info_msg "remove apache site: $1"
apache_dissable_site "$1"
apache_disable_site "$1"
rm -f "${APACHE_SITES_AVAILABLE}/$1"
}
@ -1222,7 +1222,7 @@ apache_enable_site() {
apache_reload
}
apache_dissable_site() {
apache_disable_site() {
# usage: apache_disable_site <mysite.conf>

View File

@ -56,7 +56,7 @@ EOF
go.ver_info(){
# print informations about a golang distribution. To print filename
# print information about a golang distribution. To print filename
# sha256 and size of the archive that fits to your OS and host:
#
# go.ver_info "${GO_VERSION}" archive "$(go.os)" "$(go.arch)" filename sha256 size

View File

@ -36,7 +36,7 @@ nvm.is_installed() {
}
if [[ -z "${NVM_DIR}" ]]; then
# nvm is not pre-intalled in $HOME. Prepare for using nvm from <repo-root>
# nvm is not pre-installed in $HOME. Prepare for using nvm from <repo-root>
NVM_DIR="$(git rev-parse --show-toplevel)/${NVM_LOCAL_FOLDER}"
fi
export NVM_DIR
@ -93,7 +93,7 @@ nvm.help() {
nvm.: use nvm (without dot) to execute nvm commands directly
install : install NVM locally at $(git rev-parse --show-toplevel)/${NVM_LOCAL_FOLDER}
clean : remove NVM installation
status : prompt some status informations about nvm & node
status : prompt some status information about nvm & node
nodejs : install Node.js latest LTS
cmd ... : run command ... in NVM environment
bash : start bash interpreter with NVM environment sourced
@ -108,7 +108,7 @@ nvm.install() {
pushd "${NVM_DIR}" &> /dev/null
git fetch --all | prefix_stdout " ${_Yellow}||${_creset} "
else
# delete any leftovers from previos installations
# delete any leftovers from previous installations
if nvm.is_local; then
rm -rf "${NVM_DIR}"
fi

View File

@ -96,7 +96,7 @@ static.build.commit() {
return 1
fi
# drop existing commit from previos build
# drop existing commit from previous build
static.build.drop &>/dev/null
( set -e

View File

@ -159,7 +159,7 @@ main() {
;;
copy)
case $2 in
''|images) lxc_copy_images_localy;;
''|images) lxc_copy_images_locally;;
*) usage "$_usage"; exit 42;;
esac
;;
@ -167,7 +167,7 @@ main() {
sudo_or_exit
case $2 in
''|--|containers) remove_containers ;;
images) lxc_delete_images_localy ;;
images) lxc_delete_images_locally ;;
${LXC_HOST_PREFIX}-*)
! lxc_exists "$2" && warn_msg "container not yet exists: $2" && exit 0
if ask_yn "Do you really want to delete container $2"; then
@ -291,7 +291,7 @@ build_all_containers() {
rst_title "Build all LXC containers of suite"
echo
usage_containers
lxc_copy_images_localy
lxc_copy_images_locally
lxc_init_all_containers
lxc_config_all_containers
lxc_boilerplate_all_containers
@ -361,7 +361,7 @@ remove_containers() {
# images
# ------
lxc_copy_images_localy() {
lxc_copy_images_locally() {
rst_title "copy images" section
for ((i=0; i<${#LXC_SUITE[@]}; i+=2)); do
lxc_image_copy "${LXC_SUITE[i]}" "${LXC_SUITE[i+1]}"
@ -369,7 +369,7 @@ lxc_copy_images_localy() {
# lxc image list local: && wait_key
}
lxc_delete_images_localy() {
lxc_delete_images_locally() {
rst_title "Delete LXC images"
rst_para "local existing images"
echo

View File

@ -33,7 +33,7 @@ ui:
enabled_plugins:
- 'Hash plugin'
- 'Self Informations'
- 'Self Information'
- 'Tracker URL remover'
- 'Ahmia blacklist'
# - 'Hostnames plugin' # see 'hostnames' configuration below