mirror of https://github.com/searxng/searxng.git
Compare commits
1 Commits
bd976c64c9
...
cb67194ef8
Author | SHA1 | Date |
---|---|---|
DiamondDemon669 | cb67194ef8 |
|
@ -1,5 +1,5 @@
|
||||||
name: "Checker"
|
name: "Checker"
|
||||||
on: # yamllint disable-line rule:truthy
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: "0 4 * * 5"
|
- cron: "0 4 * * 5"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
name: "Update searx.data"
|
name: "Update searx.data"
|
||||||
on: # yamllint disable-line rule:truthy
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: "59 23 28 * *"
|
- cron: "59 23 28 * *"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
name: Integration
|
name: Integration
|
||||||
|
|
||||||
on: # yamllint disable-line rule:truthy
|
on:
|
||||||
push:
|
push:
|
||||||
branches: ["master"]
|
branches: ["master"]
|
||||||
pull_request:
|
pull_request:
|
||||||
|
@ -16,7 +16,7 @@ jobs:
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-20.04]
|
os: [ubuntu-20.04]
|
||||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
@ -25,7 +25,7 @@ jobs:
|
||||||
sudo ./utils/searxng.sh install packages
|
sudo ./utils/searxng.sh install packages
|
||||||
sudo apt install firefox
|
sudo apt install firefox
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
architecture: 'x64'
|
architecture: 'x64'
|
||||||
|
@ -45,6 +45,14 @@ jobs:
|
||||||
make V=1 gecko.driver
|
make V=1 gecko.driver
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: make V=1 ci.test
|
run: make V=1 ci.test
|
||||||
|
- name: Test coverage
|
||||||
|
run: make V=1 test.coverage
|
||||||
|
- name: Store coverage result
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: coverage-${{ matrix.python-version }}
|
||||||
|
path: coverage/
|
||||||
|
retention-days: 60
|
||||||
|
|
||||||
themes:
|
themes:
|
||||||
name: Themes
|
name: Themes
|
||||||
|
@ -55,9 +63,9 @@ jobs:
|
||||||
- name: Install Ubuntu packages
|
- name: Install Ubuntu packages
|
||||||
run: sudo ./utils/searxng.sh install buildhost
|
run: sudo ./utils/searxng.sh install buildhost
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.12'
|
python-version: '3.9'
|
||||||
architecture: 'x64'
|
architecture: 'x64'
|
||||||
- name: Cache Python dependencies
|
- name: Cache Python dependencies
|
||||||
id: cache-python
|
id: cache-python
|
||||||
|
@ -67,7 +75,7 @@ jobs:
|
||||||
./local
|
./local
|
||||||
./.nvm
|
./.nvm
|
||||||
./node_modules
|
./node_modules
|
||||||
key: python-ubuntu-20.04-3.12-${{ hashFiles('requirements*.txt', 'setup.py','.nvmrc', 'package.json') }}
|
key: python-ubuntu-20.04-3.9-${{ hashFiles('requirements*.txt', 'setup.py','.nvmrc', 'package.json') }}
|
||||||
- name: Install node dependencies
|
- name: Install node dependencies
|
||||||
run: make V=1 node.env
|
run: make V=1 node.env
|
||||||
- name: Build themes
|
- name: Build themes
|
||||||
|
@ -87,9 +95,9 @@ jobs:
|
||||||
- name: Install Ubuntu packages
|
- name: Install Ubuntu packages
|
||||||
run: sudo ./utils/searxng.sh install buildhost
|
run: sudo ./utils/searxng.sh install buildhost
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.12'
|
python-version: '3.9'
|
||||||
architecture: 'x64'
|
architecture: 'x64'
|
||||||
- name: Cache Python dependencies
|
- name: Cache Python dependencies
|
||||||
id: cache-python
|
id: cache-python
|
||||||
|
@ -99,7 +107,7 @@ jobs:
|
||||||
./local
|
./local
|
||||||
./.nvm
|
./.nvm
|
||||||
./node_modules
|
./node_modules
|
||||||
key: python-ubuntu-20.04-3.12-${{ hashFiles('requirements*.txt', 'setup.py','.nvmrc', 'package.json') }}
|
key: python-ubuntu-20.04-3.9-${{ hashFiles('requirements*.txt', 'setup.py','.nvmrc', 'package.json') }}
|
||||||
- name: Build documentation
|
- name: Build documentation
|
||||||
run: |
|
run: |
|
||||||
make V=1 docs.clean docs.html
|
make V=1 docs.clean docs.html
|
||||||
|
@ -111,7 +119,7 @@ jobs:
|
||||||
BRANCH: gh-pages
|
BRANCH: gh-pages
|
||||||
FOLDER: dist/docs
|
FOLDER: dist/docs
|
||||||
CLEAN: true # Automatically remove deleted files from the deploy branch
|
CLEAN: true # Automatically remove deleted files from the deploy branch
|
||||||
SINGLE_COMMIT: true
|
SINGLE_COMMIT: True
|
||||||
COMMIT_MESSAGE: '[doc] build from commit ${{ github.sha }}'
|
COMMIT_MESSAGE: '[doc] build from commit ${{ github.sha }}'
|
||||||
|
|
||||||
babel:
|
babel:
|
||||||
|
@ -131,9 +139,9 @@ jobs:
|
||||||
fetch-depth: '0'
|
fetch-depth: '0'
|
||||||
token: ${{ secrets.WEBLATE_GITHUB_TOKEN }}
|
token: ${{ secrets.WEBLATE_GITHUB_TOKEN }}
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.12'
|
python-version: '3.9'
|
||||||
architecture: 'x64'
|
architecture: 'x64'
|
||||||
- name: Cache Python dependencies
|
- name: Cache Python dependencies
|
||||||
id: cache-python
|
id: cache-python
|
||||||
|
@ -143,7 +151,7 @@ jobs:
|
||||||
./local
|
./local
|
||||||
./.nvm
|
./.nvm
|
||||||
./node_modules
|
./node_modules
|
||||||
key: python-ubuntu-20.04-3.12-${{ hashFiles('requirements*.txt', 'setup.py','.nvmrc', 'package.json') }}
|
key: python-ubuntu-20.04-3.9-${{ hashFiles('requirements*.txt', 'setup.py','.nvmrc', 'package.json') }}
|
||||||
- name: weblate & git setup
|
- name: weblate & git setup
|
||||||
env:
|
env:
|
||||||
WEBLATE_CONFIG: ${{ secrets.WEBLATE_CONFIG }}
|
WEBLATE_CONFIG: ${{ secrets.WEBLATE_CONFIG }}
|
||||||
|
@ -175,9 +183,9 @@ jobs:
|
||||||
# make sure "make docker.push" can get the git history
|
# make sure "make docker.push" can get the git history
|
||||||
fetch-depth: '0'
|
fetch-depth: '0'
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.12'
|
python-version: '3.9'
|
||||||
architecture: 'x64'
|
architecture: 'x64'
|
||||||
- name: Cache Python dependencies
|
- name: Cache Python dependencies
|
||||||
id: cache-python
|
id: cache-python
|
||||||
|
@ -187,7 +195,7 @@ jobs:
|
||||||
./local
|
./local
|
||||||
./.nvm
|
./.nvm
|
||||||
./node_modules
|
./node_modules
|
||||||
key: python-ubuntu-20.04-3.12-${{ hashFiles('requirements*.txt', 'setup.py','.nvmrc', 'package.json') }}
|
key: python-ubuntu-20.04-3.9-${{ hashFiles('requirements*.txt', 'setup.py','.nvmrc', 'package.json') }}
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
if: env.DOCKERHUB_USERNAME != null
|
if: env.DOCKERHUB_USERNAME != null
|
||||||
uses: docker/setup-qemu-action@v1
|
uses: docker/setup-qemu-action@v1
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
name: "Security checks"
|
name: "Security checks"
|
||||||
on: # yamllint disable-line rule:truthy
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: "42 05 * * *"
|
- cron: "42 05 * * *"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
name: "Update translations"
|
name: "Update translations"
|
||||||
on: # yamllint disable-line rule:truthy
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: "05 07 * * 5"
|
- cron: "05 07 * * 5"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
@ -16,9 +16,9 @@ jobs:
|
||||||
fetch-depth: '0'
|
fetch-depth: '0'
|
||||||
token: ${{ secrets.WEBLATE_GITHUB_TOKEN }}
|
token: ${{ secrets.WEBLATE_GITHUB_TOKEN }}
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.12'
|
python-version: '3.9'
|
||||||
architecture: 'x64'
|
architecture: 'x64'
|
||||||
- name: Cache Python dependencies
|
- name: Cache Python dependencies
|
||||||
id: cache-python
|
id: cache-python
|
||||||
|
@ -28,7 +28,7 @@ jobs:
|
||||||
./local
|
./local
|
||||||
./.nvm
|
./.nvm
|
||||||
./node_modules
|
./node_modules
|
||||||
key: python-ubuntu-20.04-3.12-${{ hashFiles('requirements*.txt', 'setup.py','.nvmrc', 'package.json') }}
|
key: python-ubuntu-20.04-3.9-${{ hashFiles('requirements*.txt', 'setup.py','.nvmrc', 'package.json') }}
|
||||||
- name: weblate & git setup
|
- name: weblate & git setup
|
||||||
env:
|
env:
|
||||||
WEBLATE_CONFIG: ${{ secrets.WEBLATE_CONFIG }}
|
WEBLATE_CONFIG: ${{ secrets.WEBLATE_CONFIG }}
|
||||||
|
|
|
@ -338,7 +338,6 @@ valid-metaclass-classmethod-first-arg=mcs
|
||||||
|
|
||||||
# Maximum number of arguments for function / method
|
# Maximum number of arguments for function / method
|
||||||
max-args=8
|
max-args=8
|
||||||
max-positional-arguments=14
|
|
||||||
|
|
||||||
# Maximum number of attributes for a class (see R0902).
|
# Maximum number of attributes for a class (see R0902).
|
||||||
max-attributes=20
|
max-attributes=20
|
||||||
|
|
|
@ -66,7 +66,7 @@ A user_, admin_ and developer_ handbook is available on the homepage_.
|
||||||
Contact
|
Contact
|
||||||
=======
|
=======
|
||||||
|
|
||||||
Ask questions or chat with the SearXNG community (this not a chatbot) on
|
Ask questions or just chat about SearXNG on
|
||||||
|
|
||||||
IRC
|
IRC
|
||||||
`#searxng on libera.chat <https://web.libera.chat/?channel=#searxng>`_
|
`#searxng on libera.chat <https://web.libera.chat/?channel=#searxng>`_
|
||||||
|
|
|
@ -84,9 +84,9 @@ HTML of the site. URL of the SearXNG instance and values are customizable.
|
||||||
.. code:: html
|
.. code:: html
|
||||||
|
|
||||||
<form method="post" action="https://example.org/">
|
<form method="post" action="https://example.org/">
|
||||||
<!-- search --> <input type="text" name="q">
|
<!-- search --> <input type="text" name="q" />
|
||||||
<!-- categories --> <input type="hidden" name="categories" value="general,social media">
|
<!-- categories --> <input type="hidden" name="categories" value="general,social media" />
|
||||||
<!-- language --> <input type="hidden" name="lang" value="all">
|
<!-- language --> <input type="hidden" name="lang" value="all" />
|
||||||
<!-- locale --> <input type="hidden" name="locale" value="en">
|
<!-- locale --> <input type="hidden" name="locale" value="en" />
|
||||||
<!-- date filter --> <input type="hidden" name="time_range" value="month">
|
<!-- date filter --> <input type="hidden" name="time_range" value="month" />
|
||||||
</form>
|
</form>
|
||||||
|
|
|
@ -15,7 +15,6 @@ Administrator documentation
|
||||||
installation-apache
|
installation-apache
|
||||||
update-searxng
|
update-searxng
|
||||||
answer-captcha
|
answer-captcha
|
||||||
searx.favicons
|
|
||||||
searx.limiter
|
searx.limiter
|
||||||
api
|
api
|
||||||
architecture
|
architecture
|
||||||
|
|
|
@ -1,251 +0,0 @@
|
||||||
.. _favicons:
|
|
||||||
|
|
||||||
========
|
|
||||||
Favicons
|
|
||||||
========
|
|
||||||
|
|
||||||
.. sidebar:: warning
|
|
||||||
|
|
||||||
Don't activate the favicons before reading the documentation.
|
|
||||||
|
|
||||||
.. contents::
|
|
||||||
:depth: 2
|
|
||||||
:local:
|
|
||||||
:backlinks: entry
|
|
||||||
|
|
||||||
Activating the favicons in SearXNG is very easy, but this **generates a
|
|
||||||
significantly higher load** in the client/server communication and increases
|
|
||||||
resources needed on the server.
|
|
||||||
|
|
||||||
To mitigate these disadvantages, various methods have been implemented,
|
|
||||||
including a *cache*. The cache must be parameterized according to your own
|
|
||||||
requirements and maintained regularly.
|
|
||||||
|
|
||||||
To activate favicons in SearXNG's result list, set a default
|
|
||||||
``favicon_resolver`` in the :ref:`search <settings search>` settings:
|
|
||||||
|
|
||||||
.. code:: yaml
|
|
||||||
|
|
||||||
search:
|
|
||||||
favicon_resolver: "duckduckgo"
|
|
||||||
|
|
||||||
By default and without any extensions, SearXNG serves these resolvers:
|
|
||||||
|
|
||||||
- ``duckduckgo``
|
|
||||||
- ``allesedv``
|
|
||||||
- ``google``
|
|
||||||
- ``yandex``
|
|
||||||
|
|
||||||
With the above setting favicons are displayed, the user has the option to
|
|
||||||
deactivate this feature in his settings. If the user is to have the option of
|
|
||||||
selecting from several *resolvers*, a further setting is required / but this
|
|
||||||
setting will be discussed :ref:`later <register resolvers>` in this article,
|
|
||||||
first we have to setup the favicons cache.
|
|
||||||
|
|
||||||
Infrastructure
|
|
||||||
==============
|
|
||||||
|
|
||||||
The infrastructure for providing the favicons essentially consists of three
|
|
||||||
parts:
|
|
||||||
|
|
||||||
- :py:obj:`Favicons-Proxy <.favicons.proxy>` (aka *proxy*)
|
|
||||||
- :py:obj:`Favicons-Resolvers <.favicons.resolvers>` (aka *resolver*)
|
|
||||||
- :py:obj:`Favicons-Cache <.favicons.cache>` (aka *cache*)
|
|
||||||
|
|
||||||
To protect the privacy of users, the favicons are provided via a *proxy*. This
|
|
||||||
*proxy* is automatically activated with the above activation of a *resolver*.
|
|
||||||
Additional requests are required to provide the favicons: firstly, the *proxy*
|
|
||||||
must process the incoming requests and secondly, the *resolver* must make
|
|
||||||
outgoing requests to obtain the favicons from external sources.
|
|
||||||
|
|
||||||
A *cache* has been developed to massively reduce both, incoming and outgoing
|
|
||||||
requests. This *cache* is also activated automatically with the above
|
|
||||||
activation of a *resolver*. In its defaults, however, the *cache* is minimal
|
|
||||||
and not well suitable for a production environment!
|
|
||||||
|
|
||||||
.. _favicon cache setup:
|
|
||||||
|
|
||||||
Setting up the cache
|
|
||||||
====================
|
|
||||||
|
|
||||||
To parameterize the *cache* and more settings of the favicons infrastructure, a
|
|
||||||
TOML_ configuration is created in the file ``/etc/searxng/favicons.toml``.
|
|
||||||
|
|
||||||
.. code:: toml
|
|
||||||
|
|
||||||
[favicons]
|
|
||||||
|
|
||||||
cfg_schema = 1 # config's schema version no.
|
|
||||||
|
|
||||||
[favicons.cache]
|
|
||||||
|
|
||||||
db_url = "/var/cache/searxng/faviconcache.db" # default: "/tmp/faviconcache.db"
|
|
||||||
LIMIT_TOTAL_BYTES = 2147483648 # 2 GB / default: 50 MB
|
|
||||||
# HOLD_TIME = 5184000 # 60 days / default: 30 days
|
|
||||||
# BLOB_MAX_BYTES = 40960 # 40 KB / default 20 KB
|
|
||||||
# MAINTENANCE_MODE = "off" # default: "auto"
|
|
||||||
# MAINTENANCE_PERIOD = 600 # 10min / default: 1h
|
|
||||||
|
|
||||||
:py:obj:`cfg_schema <.FaviconConfig.cfg_schema>`:
|
|
||||||
Is required to trigger any processes required for future upgrades / don't
|
|
||||||
change it.
|
|
||||||
|
|
||||||
:py:obj:`cache.db_url <.FaviconCacheConfig.db_url>`:
|
|
||||||
The path to the (SQLite_) database file. The default path is in the `/tmp`_
|
|
||||||
folder, which is deleted on every reboot and is therefore unsuitable for a
|
|
||||||
production environment. The FHS_ provides the folder for the
|
|
||||||
application cache
|
|
||||||
|
|
||||||
The FHS_ provides the folder `/var/cache`_ for the cache of applications, so a
|
|
||||||
suitable storage location of SearXNG's caches is folder ``/var/cache/searxng``.
|
|
||||||
In container systems, a volume should be mounted for this folder and in a
|
|
||||||
standard installation (compare :ref:`create searxng user`), the folder must be
|
|
||||||
created and the user under which the SearXNG process is running must be given
|
|
||||||
write permission to this folder.
|
|
||||||
|
|
||||||
.. code:: bash
|
|
||||||
|
|
||||||
$ sudo mkdir /var/cache/searxng
|
|
||||||
$ sudo chown root:searxng /var/cache/searxng/
|
|
||||||
$ sudo chmod g+w /var/cache/searxng/
|
|
||||||
|
|
||||||
:py:obj:`cache.LIMIT_TOTAL_BYTES <.FaviconCacheConfig.LIMIT_TOTAL_BYTES>`:
|
|
||||||
Maximum of bytes stored in the cache of all blobs. The limit is only reached
|
|
||||||
at each maintenance interval after which the oldest BLOBs are deleted; the
|
|
||||||
limit is exceeded during the maintenance period.
|
|
||||||
|
|
||||||
.. attention::
|
|
||||||
|
|
||||||
If the maintenance period is too long or maintenance is switched
|
|
||||||
off completely, the cache grows uncontrollably.
|
|
||||||
|
|
||||||
SearXNG hosters can change other parameters of the cache as required:
|
|
||||||
|
|
||||||
- :py:obj:`cache.HOLD_TIME <.FaviconCacheConfig.HOLD_TIME>`
|
|
||||||
- :py:obj:`cache.BLOB_MAX_BYTES <.FaviconCacheConfig.BLOB_MAX_BYTES>`
|
|
||||||
|
|
||||||
|
|
||||||
Maintenance of the cache
|
|
||||||
------------------------
|
|
||||||
|
|
||||||
Regular maintenance of the cache is required! By default, regular maintenance
|
|
||||||
is triggered automatically as part of the client requests:
|
|
||||||
|
|
||||||
- :py:obj:`cache.MAINTENANCE_MODE <.FaviconCacheConfig.MAINTENANCE_MODE>` (default ``auto``)
|
|
||||||
- :py:obj:`cache.MAINTENANCE_PERIOD <.FaviconCacheConfig.MAINTENANCE_PERIOD>` (default ``6000`` / 1h)
|
|
||||||
|
|
||||||
As an alternative to maintenance as part of the client request process, it is
|
|
||||||
also possible to carry out maintenance using an external process. For example,
|
|
||||||
by creating a :man:`crontab` entry for maintenance:
|
|
||||||
|
|
||||||
.. code:: bash
|
|
||||||
|
|
||||||
$ python -m searx.favicons cache maintenance
|
|
||||||
|
|
||||||
The following command can be used to display the state of the cache:
|
|
||||||
|
|
||||||
.. code:: bash
|
|
||||||
|
|
||||||
$ python -m searx.favicons cache state
|
|
||||||
|
|
||||||
|
|
||||||
.. _favicon proxy setup:
|
|
||||||
|
|
||||||
Proxy configuration
|
|
||||||
===================
|
|
||||||
|
|
||||||
Most of the options of the :py:obj:`Favicons-Proxy <.favicons.proxy>` are
|
|
||||||
already set sensibly with settings from the :ref:`settings.yml <searxng
|
|
||||||
settings.yml>` and should not normally be adjusted.
|
|
||||||
|
|
||||||
.. code:: toml
|
|
||||||
|
|
||||||
[favicons.proxy]
|
|
||||||
|
|
||||||
max_age = 5184000 # 60 days / default: 7 days (604800 sec)
|
|
||||||
|
|
||||||
|
|
||||||
:py:obj:`max_age <.FaviconProxyConfig.max_age>`:
|
|
||||||
The `HTTP Cache-Control max-age`_ response directive indicates that the
|
|
||||||
response remains fresh until N seconds after the response is generated. This
|
|
||||||
setting therefore determines how long a favicon remains in the client's cache.
|
|
||||||
As a rule, in the favicons infrastructure of SearXNG's this setting only
|
|
||||||
affects favicons whose byte size exceeds :ref:`BLOB_MAX_BYTES <favicon cache
|
|
||||||
setup>` (the other favicons that are already in the cache are embedded as
|
|
||||||
`data URL`_ in the :py:obj:`generated HTML <.favicons.proxy.favicon_url>`,
|
|
||||||
which can greatly reduce the number of additional requests).
|
|
||||||
|
|
||||||
.. _register resolvers:
|
|
||||||
|
|
||||||
Register resolvers
|
|
||||||
------------------
|
|
||||||
|
|
||||||
A :py:obj:`resolver <.favicon.resolvers>` is a function that obtains the favicon
|
|
||||||
from an external source. The resolver functions available to the user are
|
|
||||||
registered with their fully qualified name (FQN_) in a ``resolver_map``.
|
|
||||||
|
|
||||||
If no ``resolver_map`` is defined in the ``favicon.toml``, the favicon
|
|
||||||
infrastructure of SearXNG generates this ``resolver_map`` automatically
|
|
||||||
depending on the ``settings.yml``. SearXNG would automatically generate the
|
|
||||||
following TOML configuration from the following YAML configuration:
|
|
||||||
|
|
||||||
.. code:: yaml
|
|
||||||
|
|
||||||
search:
|
|
||||||
favicon_resolver: "duckduckgo"
|
|
||||||
|
|
||||||
.. code:: toml
|
|
||||||
|
|
||||||
[favicons.proxy.resolver_map]
|
|
||||||
|
|
||||||
"duckduckgo" = "searx.favicons.resolvers.duckduckgo"
|
|
||||||
|
|
||||||
If this automatism is not desired, then (and only then) a separate
|
|
||||||
``resolver_map`` must be created. For example, to give the user two resolvers to
|
|
||||||
choose from, the following configuration could be used:
|
|
||||||
|
|
||||||
.. code:: toml
|
|
||||||
|
|
||||||
[favicons.proxy.resolver_map]
|
|
||||||
|
|
||||||
"duckduckgo" = "searx.favicons.resolvers.duckduckgo"
|
|
||||||
"allesedv" = "searx.favicons.resolvers.allesedv"
|
|
||||||
# "google" = "searx.favicons.resolvers.google"
|
|
||||||
# "yandex" = "searx.favicons.resolvers.yandex"
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
With each resolver, the resource requirement increases significantly.
|
|
||||||
|
|
||||||
The number of resolvers increases:
|
|
||||||
|
|
||||||
- the number of incoming/outgoing requests and
|
|
||||||
- the number of favicons to be stored in the cache.
|
|
||||||
|
|
||||||
In the following we list the resolvers available in the core of SearXNG, but via
|
|
||||||
the FQN_ it is also possible to implement your own resolvers and integrate them
|
|
||||||
into the *proxy*:
|
|
||||||
|
|
||||||
- :py:obj:`searx.favicons.resolvers.duckduckgo`
|
|
||||||
- :py:obj:`searx.favicons.resolvers.allesedv`
|
|
||||||
- :py:obj:`searx.favicons.resolvers.google`
|
|
||||||
- :py:obj:`searx.favicons.resolvers.yandex`
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
.. _SQLite:
|
|
||||||
https://www.sqlite.org/
|
|
||||||
.. _FHS:
|
|
||||||
https://refspecs.linuxfoundation.org/FHS_3.0/fhs/index.html
|
|
||||||
.. _`/var/cache`:
|
|
||||||
https://refspecs.linuxfoundation.org/FHS_3.0/fhs/ch05s05.html
|
|
||||||
.. _`/tmp`:
|
|
||||||
https://refspecs.linuxfoundation.org/FHS_3.0/fhs/ch03s18.html
|
|
||||||
.. _TOML:
|
|
||||||
https://toml.io/en/
|
|
||||||
.. _HTTP Cache-Control max-age:
|
|
||||||
https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control#response_directives
|
|
||||||
.. _data URL:
|
|
||||||
https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URLs
|
|
||||||
.. _FQN: https://en.wikipedia.org/wiki/Fully_qualified_name
|
|
||||||
|
|
|
@ -9,7 +9,6 @@
|
||||||
search:
|
search:
|
||||||
safe_search: 0
|
safe_search: 0
|
||||||
autocomplete: ""
|
autocomplete: ""
|
||||||
favicon_resolver: ""
|
|
||||||
default_lang: ""
|
default_lang: ""
|
||||||
ban_time_on_fail: 5
|
ban_time_on_fail: 5
|
||||||
max_ban_time_on_fail: 120
|
max_ban_time_on_fail: 120
|
||||||
|
@ -42,11 +41,6 @@
|
||||||
- ``qwant``
|
- ``qwant``
|
||||||
- ``wikipedia``
|
- ``wikipedia``
|
||||||
|
|
||||||
``favicon_resolver``:
|
|
||||||
To activate favicons in SearXNG's result list select a default
|
|
||||||
favicon-resolver, leave blank to turn off the feature. Don't activate the
|
|
||||||
favicons before reading the :ref:`Favicons documentation <favicons>`.
|
|
||||||
|
|
||||||
``default_lang``:
|
``default_lang``:
|
||||||
Default search language - leave blank to detect from browser information or
|
Default search language - leave blank to detect from browser information or
|
||||||
use codes from :origin:`searx/languages.py`.
|
use codes from :origin:`searx/languages.py`.
|
||||||
|
|
|
@ -58,7 +58,7 @@
|
||||||
Name of the theme you want to use by default on your SearXNG instance.
|
Name of the theme you want to use by default on your SearXNG instance.
|
||||||
|
|
||||||
``theme_args.simple_style``:
|
``theme_args.simple_style``:
|
||||||
Style of simple theme: ``auto``, ``light``, ``dark``, ``black``
|
Style of simple theme: ``auto``, ``light``, ``dark``
|
||||||
|
|
||||||
``results_on_new_tab``:
|
``results_on_new_tab``:
|
||||||
Open result links in a new tab by default.
|
Open result links in a new tab by default.
|
||||||
|
|
|
@ -113,7 +113,7 @@ ${fedora_build}
|
||||||
|
|
||||||
(${SERVICE_USER})$ command -v python && python --version
|
(${SERVICE_USER})$ command -v python && python --version
|
||||||
$SEARXNG_PYENV/bin/python
|
$SEARXNG_PYENV/bin/python
|
||||||
Python 3.11.10
|
Python 3.8.1
|
||||||
|
|
||||||
# update pip's boilerplate ..
|
# update pip's boilerplate ..
|
||||||
pip install -U pip
|
pip install -U pip
|
||||||
|
|
|
@ -127,7 +127,6 @@ extensions = [
|
||||||
"sphinx_tabs.tabs", # https://github.com/djungelorm/sphinx-tabs
|
"sphinx_tabs.tabs", # https://github.com/djungelorm/sphinx-tabs
|
||||||
'myst_parser', # https://www.sphinx-doc.org/en/master/usage/markdown.html
|
'myst_parser', # https://www.sphinx-doc.org/en/master/usage/markdown.html
|
||||||
'notfound.extension', # https://github.com/readthedocs/sphinx-notfound-page
|
'notfound.extension', # https://github.com/readthedocs/sphinx-notfound-page
|
||||||
'sphinxcontrib.autodoc_pydantic', # https://github.com/mansenfranzen/autodoc_pydantic
|
|
||||||
]
|
]
|
||||||
|
|
||||||
autodoc_default_options = {
|
autodoc_default_options = {
|
||||||
|
|
|
@ -25,7 +25,7 @@ Relational Database Management System (RDBMS) are supported:
|
||||||
|
|
||||||
- :ref:`engine sqlite`
|
- :ref:`engine sqlite`
|
||||||
- :ref:`engine postgresql`
|
- :ref:`engine postgresql`
|
||||||
- :ref:`engine mysql_server` & :ref:`engine mariadb_server`
|
- :ref:`engine mysql_server`
|
||||||
|
|
||||||
All of the engines above are just commented out in the :origin:`settings.yml
|
All of the engines above are just commented out in the :origin:`settings.yml
|
||||||
<searx/settings.yml>`, as you have to set the required attributes for the
|
<searx/settings.yml>`, as you have to set the required attributes for the
|
||||||
|
@ -119,16 +119,3 @@ MySQL
|
||||||
.. automodule:: searx.engines.mysql_server
|
.. automodule:: searx.engines.mysql_server
|
||||||
:members:
|
:members:
|
||||||
|
|
||||||
.. _engine mariadb_server:
|
|
||||||
|
|
||||||
MariaDB
|
|
||||||
--------
|
|
||||||
|
|
||||||
.. sidebar:: info
|
|
||||||
|
|
||||||
- :origin:`mariadb_server.py <searx/engines/mariadb_server.py>`
|
|
||||||
- ``pip install`` :pypi:`mariadb <mariadb>`
|
|
||||||
|
|
||||||
|
|
||||||
.. automodule:: searx.engines.mariadb_server
|
|
||||||
:members:
|
|
||||||
|
|
|
@ -1,8 +0,0 @@
|
||||||
.. _gitlab engine:
|
|
||||||
|
|
||||||
======
|
|
||||||
GitLab
|
|
||||||
======
|
|
||||||
|
|
||||||
.. automodule:: searx.engines.gitlab
|
|
||||||
:members:
|
|
|
@ -4,27 +4,22 @@ Welcome to SearXNG
|
||||||
|
|
||||||
*Search without being tracked.*
|
*Search without being tracked.*
|
||||||
|
|
||||||
.. jinja:: searx
|
SearXNG is a free internet metasearch engine which aggregates results from more
|
||||||
|
than 70 search services. Users are neither tracked nor profiled. Additionally,
|
||||||
SearXNG is a free internet metasearch engine which aggregates results from up
|
SearXNG can be used over Tor for online anonymity.
|
||||||
to {{engines | length}} :ref:`search services <configured engines>`. Users
|
|
||||||
are neither tracked nor profiled. Additionally, SearXNG can be used over Tor
|
|
||||||
for online anonymity.
|
|
||||||
|
|
||||||
Get started with SearXNG by using one of the instances listed at searx.space_.
|
Get started with SearXNG by using one of the instances listed at searx.space_.
|
||||||
If you don't trust anyone, you can set up your own, see :ref:`installation`.
|
If you don't trust anyone, you can set up your own, see :ref:`installation`.
|
||||||
|
|
||||||
.. jinja:: searx
|
.. sidebar:: features
|
||||||
|
|
||||||
.. sidebar:: features
|
|
||||||
|
|
||||||
- :ref:`self hosted <installation>`
|
- :ref:`self hosted <installation>`
|
||||||
- :ref:`no user tracking / no profiling <SearXNG protect privacy>`
|
- :ref:`no user tracking / no profiling <SearXNG protect privacy>`
|
||||||
- script & cookies are optional
|
- script & cookies are optional
|
||||||
- secure, encrypted connections
|
- secure, encrypted connections
|
||||||
- :ref:`{{engines | length}} search engines <configured engines>`
|
- :ref:`about 200 search engines <configured engines>`
|
||||||
- `58 translations <https://translate.codeberg.org/projects/searxng/searxng/>`_
|
- `about 60 translations <https://translate.codeberg.org/projects/searxng/searxng/>`_
|
||||||
- about 70 `well maintained <https://uptime.searxng.org/>`__ instances on searx.space_
|
- about 100 `well maintained <https://uptime.searxng.org/>`__ instances on searx.space_
|
||||||
- :ref:`easy integration of search engines <demo online engine>`
|
- :ref:`easy integration of search engines <demo online engine>`
|
||||||
- professional development: `CI <https://github.com/searxng/searxng/actions>`_,
|
- professional development: `CI <https://github.com/searxng/searxng/actions>`_,
|
||||||
`quality assurance <https://dev.searxng.org/>`_ &
|
`quality assurance <https://dev.searxng.org/>`_ &
|
||||||
|
|
|
@ -2,9 +2,9 @@
|
||||||
Why use a private instance?
|
Why use a private instance?
|
||||||
===========================
|
===========================
|
||||||
|
|
||||||
.. sidebar:: Is running my own instance worth it?
|
.. sidebar:: Is it worth to run my own instance?
|
||||||
|
|
||||||
\.\.\.is a common question among SearXNG users. Before answering this
|
\.\. is a common question among SearXNG users. Before answering this
|
||||||
question, see what options a SearXNG user has.
|
question, see what options a SearXNG user has.
|
||||||
|
|
||||||
.. contents::
|
.. contents::
|
||||||
|
@ -12,13 +12,13 @@ Why use a private instance?
|
||||||
:local:
|
:local:
|
||||||
:backlinks: entry
|
:backlinks: entry
|
||||||
|
|
||||||
Public instances are open to everyone who has access to their URL. Usually, they
|
Public instances are open to everyone who has access to its URL. Usually, these
|
||||||
are operated by unknown parties (from the users' point of view). Private
|
are operated by unknown parties (from the users' point of view). Private
|
||||||
instances can be used by a select group of people, such as a SearXNG instance for a
|
instances can be used by a select group of people. It is for example a SearXNG of
|
||||||
group of friends, or a company which can be accessed through a VPN. Instances can also be
|
group of friends or a company which can be accessed through VPN. Also it can be
|
||||||
single-user instances, which run locally on the user's machine.
|
single user one which runs on the user's laptop.
|
||||||
|
|
||||||
To gain more insight on how these instances work, let's dive into how SearXNG
|
To gain more insight on how these instances work let's dive into how SearXNG
|
||||||
protects its users.
|
protects its users.
|
||||||
|
|
||||||
.. _SearXNG protect privacy:
|
.. _SearXNG protect privacy:
|
||||||
|
@ -26,26 +26,26 @@ protects its users.
|
||||||
How does SearXNG protect privacy?
|
How does SearXNG protect privacy?
|
||||||
=================================
|
=================================
|
||||||
|
|
||||||
SearXNG protects the privacy of its users in multiple ways, regardless of the type
|
SearXNG protects the privacy of its users in multiple ways regardless of the type
|
||||||
of the instance (private or public). Removal of private data from search requests
|
of the instance (private, public). Removal of private data from search requests
|
||||||
comes in three forms:
|
comes in three forms:
|
||||||
|
|
||||||
1. Removing private data from requests going to search services
|
1. removal of private data from requests going to search services
|
||||||
2. Not forwarding anything from third party services through search services
|
2. not forwarding anything from a third party services through search services
|
||||||
(e.g. advertisement)
|
(e.g. advertisement)
|
||||||
3. Removing private data from requests going to the results pages
|
3. removal of private data from requests going to the result pages
|
||||||
|
|
||||||
Removing private data means not sending cookies to external search engines and
|
Removing private data means not sending cookies to external search engines and
|
||||||
generating a random browser profile for every request. Thus, it does not matter
|
generating a random browser profile for every request. Thus, it does not matter
|
||||||
if a public or private instance handles the request, because it is anonymized in
|
if a public or private instance handles the request, because it is anonymized in
|
||||||
both cases. The IP address used will be the IP of the instance, but SearXNG can also be
|
both cases. IP addresses will be the IP of the instance. But SearXNG can be
|
||||||
configured to use proxy or Tor. `Result proxy
|
configured to use proxy or Tor. `Result proxy
|
||||||
<https://github.com/asciimoo/morty>`__ is supported, too.
|
<https://github.com/asciimoo/morty>`__ is supported, too.
|
||||||
|
|
||||||
SearXNG does not serve ads or tracking content, unlike most search services. Therefore,
|
SearXNG does not serve ads or tracking content unlike most search services. So
|
||||||
private data is not forwarded to third parties who might monetize it. Besides
|
private data is not forwarded to third parties who might monetize it. Besides
|
||||||
protecting users from search services, both the referring page and search query are
|
protecting users from search services, both referring page and search query are
|
||||||
hidden from the results pages being visited.
|
hidden from visited result pages.
|
||||||
|
|
||||||
|
|
||||||
What are the consequences of using public instances?
|
What are the consequences of using public instances?
|
||||||
|
@ -53,11 +53,11 @@ What are the consequences of using public instances?
|
||||||
|
|
||||||
If someone uses a public instance, they have to trust the administrator of that
|
If someone uses a public instance, they have to trust the administrator of that
|
||||||
instance. This means that the user of the public instance does not know whether
|
instance. This means that the user of the public instance does not know whether
|
||||||
their requests are logged, aggregated, and sent or sold to a third party.
|
their requests are logged, aggregated and sent or sold to a third party.
|
||||||
|
|
||||||
Also, public instances without proper protection are more vulnerable to abuse of
|
Also, public instances without proper protection are more vulnerable to abusing
|
||||||
the search service, which may cause the external service to enforce
|
the search service, In this case the external service in exchange returns
|
||||||
CAPTCHAs or to ban the IP address of the instance. Thus, search requests would return less
|
CAPTCHAs or bans the IP of the instance. Thus, search requests return less
|
||||||
results.
|
results.
|
||||||
|
|
||||||
I see. What about private instances?
|
I see. What about private instances?
|
||||||
|
@ -67,10 +67,10 @@ If users run their :ref:`own instances <installation>`, everything is in their
|
||||||
control: the source code, logging settings and private data. Unknown instance
|
control: the source code, logging settings and private data. Unknown instance
|
||||||
administrators do not have to be trusted.
|
administrators do not have to be trusted.
|
||||||
|
|
||||||
Furthermore, as the default settings of their instance are editable, there is no
|
Furthermore, as the default settings of their instance is editable, there is no
|
||||||
need to use cookies to tailor SearXNG to their needs and preferences will not
|
need to use cookies to tailor SearXNG to their needs. So preferences will not be
|
||||||
reset to defaults when clearing browser cookies. As settings are stored on
|
reset to defaults when clearing browser cookies. As settings are stored on
|
||||||
the user's computer, they will not be accessible to others as long as their computer is
|
their computer, it will not be accessible to others as long as their computer is
|
||||||
not compromised.
|
not compromised.
|
||||||
|
|
||||||
Conclusion
|
Conclusion
|
||||||
|
@ -80,7 +80,7 @@ Always use an instance which is operated by people you trust. The privacy
|
||||||
features of SearXNG are available to users no matter what kind of instance they
|
features of SearXNG are available to users no matter what kind of instance they
|
||||||
use.
|
use.
|
||||||
|
|
||||||
For those on the go, or just wanting to try SearXNG for the first time, public
|
If someone is on the go or just wants to try SearXNG for the first time public
|
||||||
instances are the best choice. Public instances are also making the
|
instances are the best choices. Additionally, public instance are making a
|
||||||
world a better place by giving those who cannot, or do not want to, run an
|
world a better place, because those who cannot or do not want to run an
|
||||||
instance access to a privacy-respecting search service.
|
instance, have access to a privacy respecting search service.
|
||||||
|
|
|
@ -1,48 +0,0 @@
|
||||||
.. _favicons source:
|
|
||||||
|
|
||||||
=================
|
|
||||||
Favicons (source)
|
|
||||||
=================
|
|
||||||
|
|
||||||
.. contents::
|
|
||||||
:depth: 2
|
|
||||||
:local:
|
|
||||||
:backlinks: entry
|
|
||||||
|
|
||||||
.. automodule:: searx.favicons
|
|
||||||
:members:
|
|
||||||
|
|
||||||
.. _favicons.config:
|
|
||||||
|
|
||||||
Favicons Config
|
|
||||||
===============
|
|
||||||
|
|
||||||
.. automodule:: searx.favicons.config
|
|
||||||
:members:
|
|
||||||
|
|
||||||
.. _favicons.proxy:
|
|
||||||
|
|
||||||
Favicons Proxy
|
|
||||||
==============
|
|
||||||
|
|
||||||
.. automodule:: searx.favicons.proxy
|
|
||||||
:members:
|
|
||||||
|
|
||||||
.. _favicons.resolver:
|
|
||||||
|
|
||||||
Favicons Resolver
|
|
||||||
=================
|
|
||||||
|
|
||||||
.. automodule:: searx.favicons.resolvers
|
|
||||||
:members:
|
|
||||||
|
|
||||||
.. _favicons.cache:
|
|
||||||
|
|
||||||
Favicons Cache
|
|
||||||
==============
|
|
||||||
|
|
||||||
.. automodule:: searx.favicons.cache
|
|
||||||
:members:
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,8 +0,0 @@
|
||||||
.. _sqlite db:
|
|
||||||
|
|
||||||
=========
|
|
||||||
SQLite DB
|
|
||||||
=========
|
|
||||||
|
|
||||||
.. automodule:: searx.sqlitedb
|
|
||||||
:members:
|
|
2
manage
2
manage
|
@ -57,7 +57,7 @@ while IFS= read -r line; do
|
||||||
if [ "$line" != "tests/unit/settings/syntaxerror_settings.yml" ]; then
|
if [ "$line" != "tests/unit/settings/syntaxerror_settings.yml" ]; then
|
||||||
YAMLLINT_FILES+=("$line")
|
YAMLLINT_FILES+=("$line")
|
||||||
fi
|
fi
|
||||||
done <<< "$(git ls-files './tests/*.yml' './searx/*.yml' './utils/templates/etc/searxng/*.yml' '.github/*.yml' '.github/*/*.yml')"
|
done <<< "$(git ls-files './tests/*.yml' './searx/*.yml' './utils/templates/etc/searxng/*.yml')"
|
||||||
|
|
||||||
RST_FILES=(
|
RST_FILES=(
|
||||||
'README.rst'
|
'README.rst'
|
||||||
|
|
|
@ -2,23 +2,24 @@ mock==5.1.0
|
||||||
nose2[coverage_plugin]==0.15.1
|
nose2[coverage_plugin]==0.15.1
|
||||||
cov-core==1.15.0
|
cov-core==1.15.0
|
||||||
black==24.3.0
|
black==24.3.0
|
||||||
pylint==3.3.1
|
pylint==3.2.7
|
||||||
splinter==0.21.0
|
splinter==0.21.0
|
||||||
selenium==4.26.1
|
selenium==4.24.0
|
||||||
Pallets-Sphinx-Themes==2.3.0
|
Pallets-Sphinx-Themes==2.1.3
|
||||||
Sphinx==7.4.7
|
Sphinx<=7.1.2; python_version == '3.8'
|
||||||
sphinx-issues==5.0.0
|
Sphinx==7.4.7; python_version > '3.8'
|
||||||
|
sphinx-issues==4.1.0
|
||||||
sphinx-jinja==2.0.2
|
sphinx-jinja==2.0.2
|
||||||
sphinx-tabs==3.4.7
|
sphinx-tabs==3.4.5
|
||||||
sphinxcontrib-programoutput==0.17
|
sphinxcontrib-programoutput==0.17
|
||||||
sphinx-autobuild==2024.10.3
|
sphinx-autobuild==2021.3.14
|
||||||
sphinx-notfound-page==1.0.4
|
sphinx-notfound-page==1.0.4
|
||||||
myst-parser==3.0.1
|
myst-parser==3.0.1
|
||||||
linuxdoc==20240924
|
linuxdoc==20240509
|
||||||
aiounittest==1.4.2
|
aiounittest==1.4.2
|
||||||
yamllint==1.35.1
|
yamllint==1.35.1
|
||||||
wlc==1.15
|
wlc==1.15
|
||||||
coloredlogs==15.0.1
|
coloredlogs==15.0.1
|
||||||
docutils>=0.21.2
|
docutils<=0.21; python_version == '3.8'
|
||||||
parameterized==0.9.0
|
docutils>=0.21.2; python_version > '3.8'
|
||||||
autodoc_pydantic==2.2.0
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
certifi==2024.8.30
|
certifi==2024.8.30
|
||||||
babel==2.16.0
|
babel==2.16.0
|
||||||
flask-babel==4.0.0
|
flask-babel==4.0.0
|
||||||
flask==3.1.0
|
flask==3.0.3
|
||||||
jinja2==3.1.4
|
jinja2==3.1.4
|
||||||
lxml==5.3.0
|
lxml==5.3.0
|
||||||
pygments==2.18.0
|
pygments==2.18.0
|
||||||
|
@ -9,13 +9,10 @@ python-dateutil==2.9.0.post0
|
||||||
pyyaml==6.0.2
|
pyyaml==6.0.2
|
||||||
httpx[http2]==0.24.1
|
httpx[http2]==0.24.1
|
||||||
Brotli==1.1.0
|
Brotli==1.1.0
|
||||||
uvloop==0.21.0
|
uvloop==0.20.0
|
||||||
httpx-socks[asyncio]==0.7.7
|
httpx-socks[asyncio]==0.7.7
|
||||||
setproctitle==1.3.4
|
setproctitle==1.3.3
|
||||||
redis==5.0.8
|
redis==5.0.8
|
||||||
markdown-it-py==3.0.0
|
markdown-it-py==3.0.0
|
||||||
fasttext-predict==0.9.2.2
|
fasttext-predict==0.9.2.2
|
||||||
tomli==2.0.2; python_version < '3.11'
|
pytomlpp==1.0.13; python_version < '3.11'
|
||||||
msgspec==0.18.6
|
|
||||||
eval_type_backport; python_version < '3.9'
|
|
||||||
typer-slim==0.13.1
|
|
||||||
|
|
|
@ -14,7 +14,17 @@ import typing
|
||||||
import logging
|
import logging
|
||||||
import pathlib
|
import pathlib
|
||||||
|
|
||||||
from ..compat import tomllib
|
try:
|
||||||
|
import tomllib
|
||||||
|
|
||||||
|
pytomlpp = None
|
||||||
|
USE_TOMLLIB = True
|
||||||
|
except ImportError:
|
||||||
|
import pytomlpp
|
||||||
|
|
||||||
|
tomllib = None
|
||||||
|
USE_TOMLLIB = False
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['Config', 'UNSET', 'SchemaIssue']
|
__all__ = ['Config', 'UNSET', 'SchemaIssue']
|
||||||
|
|
||||||
|
@ -173,6 +183,8 @@ class Config:
|
||||||
|
|
||||||
|
|
||||||
def toml_load(file_name):
|
def toml_load(file_name):
|
||||||
|
if USE_TOMLLIB:
|
||||||
|
# Python >= 3.11
|
||||||
try:
|
try:
|
||||||
with open(file_name, "rb") as f:
|
with open(file_name, "rb") as f:
|
||||||
return tomllib.load(f)
|
return tomllib.load(f)
|
||||||
|
@ -180,6 +192,13 @@ def toml_load(file_name):
|
||||||
msg = str(exc).replace('\t', '').replace('\n', ' ')
|
msg = str(exc).replace('\t', '').replace('\n', ' ')
|
||||||
log.error("%s: %s", file_name, msg)
|
log.error("%s: %s", file_name, msg)
|
||||||
raise
|
raise
|
||||||
|
# fallback to pytomlpp for Python < 3.11
|
||||||
|
try:
|
||||||
|
return pytomlpp.load(file_name)
|
||||||
|
except pytomlpp.DecodeError as exc:
|
||||||
|
msg = str(exc).replace('\t', '').replace('\n', ' ')
|
||||||
|
log.error("%s: %s", file_name, msg)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
# working with dictionaries
|
# working with dictionaries
|
||||||
|
|
|
@ -28,7 +28,7 @@ And in the HTML template from flask a stylesheet link is needed (the value of
|
||||||
|
|
||||||
<link rel="stylesheet"
|
<link rel="stylesheet"
|
||||||
href="{{ url_for('client_token', token=link_token) }}"
|
href="{{ url_for('client_token', token=link_token) }}"
|
||||||
type="text/css" >
|
type="text/css" />
|
||||||
|
|
||||||
.. _X-Forwarded-For:
|
.. _X-Forwarded-For:
|
||||||
https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-Forwarded-For
|
https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-Forwarded-For
|
||||||
|
|
|
@ -1,18 +0,0 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
"""Compatibility with older versions"""
|
|
||||||
|
|
||||||
# pylint: disable=unused-import
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"tomllib",
|
|
||||||
]
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
# TOML (lib) compatibility
|
|
||||||
# ------------------------
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 11):
|
|
||||||
import tomllib
|
|
||||||
else:
|
|
||||||
import tomli as tomllib
|
|
File diff suppressed because it is too large
Load Diff
|
@ -7,7 +7,6 @@
|
||||||
"bn": "সংযুক্ত আরব আমিরাতের দিরহাম",
|
"bn": "সংযুক্ত আরব আমিরাতের দিরহাম",
|
||||||
"ca": "dírham dels Emirats Àrabs Units",
|
"ca": "dírham dels Emirats Àrabs Units",
|
||||||
"cs": "dirham Spojených arabských emirátů",
|
"cs": "dirham Spojených arabských emirátů",
|
||||||
"cy": "dirham yr Emiradau Arabaidd Unedig",
|
|
||||||
"da": "Emiratisk dirham",
|
"da": "Emiratisk dirham",
|
||||||
"de": "VAE-Dirham",
|
"de": "VAE-Dirham",
|
||||||
"en": "United Arab Emirates dirham",
|
"en": "United Arab Emirates dirham",
|
||||||
|
@ -24,7 +23,7 @@
|
||||||
"ja": "UAEディルハム",
|
"ja": "UAEディルハム",
|
||||||
"ko": "아랍에미리트 디르함",
|
"ko": "아랍에미리트 디르함",
|
||||||
"lt": "Jungtinių Arabų Emyratų dirhamas",
|
"lt": "Jungtinių Arabų Emyratų dirhamas",
|
||||||
"ml": "AE92 0530 0000 1514 1185 002",
|
"ml": "യുണൈറ്റഡ് അറബ് എമിരേറ്റ്സ് ദിർഹം",
|
||||||
"ms": "Dirham Emiriah Arab Bersatu",
|
"ms": "Dirham Emiriah Arab Bersatu",
|
||||||
"nl": "VAE-Dirham",
|
"nl": "VAE-Dirham",
|
||||||
"oc": "Diram emirati",
|
"oc": "Diram emirati",
|
||||||
|
@ -82,7 +81,7 @@
|
||||||
"af": "Albanese lek",
|
"af": "Albanese lek",
|
||||||
"ar": "ليك ألباني",
|
"ar": "ليك ألباني",
|
||||||
"bg": "Албански лек",
|
"bg": "Албански лек",
|
||||||
"ca": "Lek (moneda)",
|
"ca": "lek",
|
||||||
"cs": "Albánský lek",
|
"cs": "Albánský lek",
|
||||||
"cy": "Lek",
|
"cy": "Lek",
|
||||||
"da": "Lek",
|
"da": "Lek",
|
||||||
|
@ -278,7 +277,6 @@
|
||||||
"bg": "Австралийски долар",
|
"bg": "Австралийски долар",
|
||||||
"ca": "dòlar australià",
|
"ca": "dòlar australià",
|
||||||
"cs": "australský dolar",
|
"cs": "australský dolar",
|
||||||
"cy": "Doler Awstralia",
|
|
||||||
"da": "australsk dollar",
|
"da": "australsk dollar",
|
||||||
"de": "Australischer Dollar",
|
"de": "Australischer Dollar",
|
||||||
"en": "Australian dollar",
|
"en": "Australian dollar",
|
||||||
|
@ -355,7 +353,6 @@
|
||||||
"af": "Azerbeidjanse manat",
|
"af": "Azerbeidjanse manat",
|
||||||
"ar": "مانات أذربيجاني",
|
"ar": "مانات أذربيجاني",
|
||||||
"bg": "Азербайджански манат",
|
"bg": "Азербайджански манат",
|
||||||
"bn": "আজারবাইজানি মানাত",
|
|
||||||
"ca": "manat azerbaidjanès",
|
"ca": "manat azerbaidjanès",
|
||||||
"cs": "Ázerbájdžánský manat",
|
"cs": "Ázerbájdžánský manat",
|
||||||
"cy": "Manat Aserbaijan",
|
"cy": "Manat Aserbaijan",
|
||||||
|
@ -383,7 +380,6 @@
|
||||||
"nl": "Azerbeidzjaanse manat",
|
"nl": "Azerbeidzjaanse manat",
|
||||||
"oc": "Manat",
|
"oc": "Manat",
|
||||||
"pa": "ਅਜ਼ਰਬਾਈਜਾਨੀ ਮਨਾਤ",
|
"pa": "ਅਜ਼ਰਬਾਈਜਾਨੀ ਮਨਾਤ",
|
||||||
"pap": "Manat Azerbaijano",
|
|
||||||
"pl": "Manat azerski",
|
"pl": "Manat azerski",
|
||||||
"pt": "Manat azeri",
|
"pt": "Manat azeri",
|
||||||
"ro": "Manat azer",
|
"ro": "Manat azer",
|
||||||
|
@ -393,7 +389,6 @@
|
||||||
"sr": "азербејџански манат",
|
"sr": "азербејџански манат",
|
||||||
"sv": "Azerbajdzjansk manat",
|
"sv": "Azerbajdzjansk manat",
|
||||||
"ta": "அசர்பைச்சானிய மனாத்து",
|
"ta": "அசர்பைச்சானிய மனாத்து",
|
||||||
"th": "มานัตอาเซอร์ไบจาน",
|
|
||||||
"tr": "Azerbaycan manatı",
|
"tr": "Azerbaycan manatı",
|
||||||
"uk": "Азербайджанський манат",
|
"uk": "Азербайджанський манат",
|
||||||
"vi": "Manat Azerbaijan"
|
"vi": "Manat Azerbaijan"
|
||||||
|
@ -607,7 +602,6 @@
|
||||||
"pt": "Franco do Burúndi",
|
"pt": "Franco do Burúndi",
|
||||||
"ro": "franc burundez",
|
"ro": "franc burundez",
|
||||||
"ru": "бурундийский франк",
|
"ru": "бурундийский франк",
|
||||||
"sk": "Burundský frank",
|
|
||||||
"sl": "burundijski frank",
|
"sl": "burundijski frank",
|
||||||
"sr": "бурундски франак",
|
"sr": "бурундски франак",
|
||||||
"sv": "Burundisk franc",
|
"sv": "Burundisk franc",
|
||||||
|
@ -1329,7 +1323,6 @@
|
||||||
"pl": "escudo Zielonego Przylądka",
|
"pl": "escudo Zielonego Przylądka",
|
||||||
"pt": "escudo cabo-verdiano",
|
"pt": "escudo cabo-verdiano",
|
||||||
"ru": "Эскудо Кабо-Верде",
|
"ru": "Эскудо Кабо-Верде",
|
||||||
"sk": "Kapverdské escudo",
|
|
||||||
"sl": "zelenortski eskudo",
|
"sl": "zelenortski eskudo",
|
||||||
"sr": "зеленортски ескудо",
|
"sr": "зеленортски ескудо",
|
||||||
"sv": "Kapverdisk escudo",
|
"sv": "Kapverdisk escudo",
|
||||||
|
@ -1408,7 +1401,6 @@
|
||||||
"pl": "frank Dżibuti",
|
"pl": "frank Dżibuti",
|
||||||
"pt": "franco do Jibuti",
|
"pt": "franco do Jibuti",
|
||||||
"ru": "Франк Джибути",
|
"ru": "Франк Джибути",
|
||||||
"sk": "Džibutský frank",
|
|
||||||
"sr": "џибутски франак",
|
"sr": "џибутски франак",
|
||||||
"sv": "Djiboutisk franc",
|
"sv": "Djiboutisk franc",
|
||||||
"tr": "Cibuti frangı",
|
"tr": "Cibuti frangı",
|
||||||
|
@ -1487,7 +1479,6 @@
|
||||||
"pt": "peso dominicano",
|
"pt": "peso dominicano",
|
||||||
"ro": "peso dominican",
|
"ro": "peso dominican",
|
||||||
"ru": "доминиканское песо",
|
"ru": "доминиканское песо",
|
||||||
"sl": "dominikanski peso",
|
|
||||||
"sr": "доминикански пезос",
|
"sr": "доминикански пезос",
|
||||||
"sv": "Dominikansk peso",
|
"sv": "Dominikansk peso",
|
||||||
"tr": "Dominik pesosu",
|
"tr": "Dominik pesosu",
|
||||||
|
@ -1522,7 +1513,6 @@
|
||||||
"pt": "dinar argelino",
|
"pt": "dinar argelino",
|
||||||
"ro": "Dinar algerian",
|
"ro": "Dinar algerian",
|
||||||
"ru": "алжирский динар",
|
"ru": "алжирский динар",
|
||||||
"sk": "Alžírský dinár",
|
|
||||||
"sl": "alžirski dinar",
|
"sl": "alžirski dinar",
|
||||||
"sr": "алжирски динар",
|
"sr": "алжирски динар",
|
||||||
"sv": "Algerisk dinar",
|
"sv": "Algerisk dinar",
|
||||||
|
@ -1974,7 +1964,6 @@
|
||||||
"pl": "frank gwinejski",
|
"pl": "frank gwinejski",
|
||||||
"pt": "Franco da Guiné",
|
"pt": "Franco da Guiné",
|
||||||
"ru": "Гвинейский франк",
|
"ru": "Гвинейский франк",
|
||||||
"sk": "Guinejský frank",
|
|
||||||
"sl": "gvinejski frank",
|
"sl": "gvinejski frank",
|
||||||
"sr": "гвинејски франак",
|
"sr": "гвинејски франак",
|
||||||
"sv": "Guinesisk franc",
|
"sv": "Guinesisk franc",
|
||||||
|
@ -2695,7 +2684,6 @@
|
||||||
"pt": "Franco comoriano",
|
"pt": "Franco comoriano",
|
||||||
"ro": "Franc comorian",
|
"ro": "Franc comorian",
|
||||||
"ru": "Франк Комор",
|
"ru": "Франк Комор",
|
||||||
"sk": "Komorský frank",
|
|
||||||
"sr": "коморски франак",
|
"sr": "коморски франак",
|
||||||
"sv": "Komoransk franc",
|
"sv": "Komoransk franc",
|
||||||
"tr": "Komor frangı",
|
"tr": "Komor frangı",
|
||||||
|
@ -2993,7 +2981,6 @@
|
||||||
"pt": "rúpia do Sri Lanka",
|
"pt": "rúpia do Sri Lanka",
|
||||||
"ru": "ланкийская рупия",
|
"ru": "ланкийская рупия",
|
||||||
"si": "ශ්රී ලංකා රුපියල",
|
"si": "ශ්රී ලංකා රුපියල",
|
||||||
"sk": "Srílanská rupia",
|
|
||||||
"sl": "šrilanška rupija",
|
"sl": "šrilanška rupija",
|
||||||
"sr": "шриланчанска рупија",
|
"sr": "шриланчанска рупија",
|
||||||
"sv": "Lankesisk rupie",
|
"sv": "Lankesisk rupie",
|
||||||
|
@ -3067,7 +3054,7 @@
|
||||||
"uk": "Лоті"
|
"uk": "Лоті"
|
||||||
},
|
},
|
||||||
"LYD": {
|
"LYD": {
|
||||||
"ar": "دينار ذهبي",
|
"ar": "دينار ليبي",
|
||||||
"bg": "Либийски динар",
|
"bg": "Либийски динар",
|
||||||
"ca": "dinar libi",
|
"ca": "dinar libi",
|
||||||
"cs": "Libyjský dinár",
|
"cs": "Libyjský dinár",
|
||||||
|
@ -3129,7 +3116,6 @@
|
||||||
"pt": "Dirham marroquino",
|
"pt": "Dirham marroquino",
|
||||||
"ro": "Dirham marocan",
|
"ro": "Dirham marocan",
|
||||||
"ru": "марокканский дирхам",
|
"ru": "марокканский дирхам",
|
||||||
"sk": "Marocký dirham",
|
|
||||||
"sl": "maroški dirham",
|
"sl": "maroški dirham",
|
||||||
"sr": "марокански дирхам",
|
"sr": "марокански дирхам",
|
||||||
"sv": "Marockansk dirham",
|
"sv": "Marockansk dirham",
|
||||||
|
@ -3141,7 +3127,6 @@
|
||||||
"bg": "Молдовска лея",
|
"bg": "Молдовска лея",
|
||||||
"ca": "leu moldau",
|
"ca": "leu moldau",
|
||||||
"cs": "moldavský lei",
|
"cs": "moldavský lei",
|
||||||
"cy": "leu Moldofa",
|
|
||||||
"de": "moldauischer Leu",
|
"de": "moldauischer Leu",
|
||||||
"en": "Moldovan leu",
|
"en": "Moldovan leu",
|
||||||
"eo": "moldava leo",
|
"eo": "moldava leo",
|
||||||
|
@ -3149,7 +3134,6 @@
|
||||||
"et": "Moldova leu",
|
"et": "Moldova leu",
|
||||||
"fi": "Moldovan leu",
|
"fi": "Moldovan leu",
|
||||||
"fr": "leu moldave",
|
"fr": "leu moldave",
|
||||||
"gl": "leu moldovo",
|
|
||||||
"he": "לאו מולדובני",
|
"he": "לאו מולדובני",
|
||||||
"hr": "moldavski lej",
|
"hr": "moldavski lej",
|
||||||
"hu": "moldován lej",
|
"hu": "moldován lej",
|
||||||
|
@ -3358,7 +3342,6 @@
|
||||||
"bg": "Мавританска угия",
|
"bg": "Мавританска угия",
|
||||||
"ca": "ouguiya",
|
"ca": "ouguiya",
|
||||||
"cs": "Mauritánská ukíjá",
|
"cs": "Mauritánská ukíjá",
|
||||||
"cy": "ouguiya Mawritania",
|
|
||||||
"da": "Ouguiya",
|
"da": "Ouguiya",
|
||||||
"de": "Ouguiya",
|
"de": "Ouguiya",
|
||||||
"en": "Mauritanian ouguiya",
|
"en": "Mauritanian ouguiya",
|
||||||
|
@ -3381,7 +3364,6 @@
|
||||||
"pl": "Ugija",
|
"pl": "Ugija",
|
||||||
"pt": "Uguia",
|
"pt": "Uguia",
|
||||||
"ru": "Мавританская угия",
|
"ru": "Мавританская угия",
|
||||||
"sk": "Mauritánska ukíjá",
|
|
||||||
"sr": "мауританска огија",
|
"sr": "мауританска огија",
|
||||||
"sv": "Mauretansk ouguiya",
|
"sv": "Mauretansk ouguiya",
|
||||||
"tr": "Ugiya",
|
"tr": "Ugiya",
|
||||||
|
@ -3537,7 +3519,6 @@
|
||||||
"bg": "Малайзийски рингит",
|
"bg": "Малайзийски рингит",
|
||||||
"ca": "ringgit",
|
"ca": "ringgit",
|
||||||
"cs": "Malajsijský ringgit",
|
"cs": "Malajsijský ringgit",
|
||||||
"cy": "ringgit Maleisia",
|
|
||||||
"de": "Ringgit",
|
"de": "Ringgit",
|
||||||
"en": "Malaysian ringgit",
|
"en": "Malaysian ringgit",
|
||||||
"eo": "malajzia ringito",
|
"eo": "malajzia ringito",
|
||||||
|
@ -3755,7 +3736,6 @@
|
||||||
"bn": "নেপালি রুপি",
|
"bn": "নেপালি রুপি",
|
||||||
"ca": "rupia nepalesa",
|
"ca": "rupia nepalesa",
|
||||||
"cs": "Nepálská rupie",
|
"cs": "Nepálská rupie",
|
||||||
"cy": "Rupee Nepal",
|
|
||||||
"da": "Nepalesiske rupee",
|
"da": "Nepalesiske rupee",
|
||||||
"de": "Nepalesische Rupie",
|
"de": "Nepalesische Rupie",
|
||||||
"en": "Nepalese rupee",
|
"en": "Nepalese rupee",
|
||||||
|
@ -3827,7 +3807,6 @@
|
||||||
"sl": "novozelandski dolar",
|
"sl": "novozelandski dolar",
|
||||||
"sr": "новозеландски долар",
|
"sr": "новозеландски долар",
|
||||||
"sv": "Nyzeeländsk dollar",
|
"sv": "Nyzeeländsk dollar",
|
||||||
"th": "ดอลลาร์นิวซีแลนด์",
|
|
||||||
"tr": "Yeni Zelanda doları",
|
"tr": "Yeni Zelanda doları",
|
||||||
"uk": "новозеландський долар",
|
"uk": "новозеландський долар",
|
||||||
"vi": "Đô la New Zealand"
|
"vi": "Đô la New Zealand"
|
||||||
|
@ -3870,7 +3849,6 @@
|
||||||
"bg": "Панамска балбоа",
|
"bg": "Панамска балбоа",
|
||||||
"ca": "balboa",
|
"ca": "balboa",
|
||||||
"cs": "Panamská balboa",
|
"cs": "Panamská balboa",
|
||||||
"cy": "Balboa Panama",
|
|
||||||
"de": "Panamaischer Balboa",
|
"de": "Panamaischer Balboa",
|
||||||
"en": "Panamanian balboa",
|
"en": "Panamanian balboa",
|
||||||
"eo": "panama balboo",
|
"eo": "panama balboo",
|
||||||
|
@ -4011,7 +3989,6 @@
|
||||||
"bn": "পাকিস্তানি রুপি",
|
"bn": "পাকিস্তানি রুপি",
|
||||||
"ca": "rupia pakistanesa",
|
"ca": "rupia pakistanesa",
|
||||||
"cs": "Pákistánská rupie",
|
"cs": "Pákistánská rupie",
|
||||||
"cy": "Rupee Pacistan",
|
|
||||||
"da": "Pakistanske rupee",
|
"da": "Pakistanske rupee",
|
||||||
"de": "pakistanische Rupie",
|
"de": "pakistanische Rupie",
|
||||||
"dv": "ޕާކިސްތާނީ ރުޕީ",
|
"dv": "ޕާކިސްތާނީ ރުޕީ",
|
||||||
|
@ -4137,7 +4114,6 @@
|
||||||
"ar": "غواراني باراغواي",
|
"ar": "غواراني باراغواي",
|
||||||
"ca": "guaraní",
|
"ca": "guaraní",
|
||||||
"cs": "paraguayský guaraní",
|
"cs": "paraguayský guaraní",
|
||||||
"cy": "Gwarani Paragwâi",
|
|
||||||
"de": "Paraguayischer Guaraní",
|
"de": "Paraguayischer Guaraní",
|
||||||
"en": "Paraguayan guaraní",
|
"en": "Paraguayan guaraní",
|
||||||
"eo": "paragvaja gvaranio",
|
"eo": "paragvaja gvaranio",
|
||||||
|
@ -4187,7 +4163,6 @@
|
||||||
"lt": "Kataro rialas",
|
"lt": "Kataro rialas",
|
||||||
"ms": "Riyal Qatar",
|
"ms": "Riyal Qatar",
|
||||||
"nl": "Qatarese rial",
|
"nl": "Qatarese rial",
|
||||||
"oc": "Riyal qatarita",
|
|
||||||
"pa": "ਕਤਰੀ ਰਿਆਲ",
|
"pa": "ਕਤਰੀ ਰਿਆਲ",
|
||||||
"pl": "Rial Kataru",
|
"pl": "Rial Kataru",
|
||||||
"pt": "Rial catarense",
|
"pt": "Rial catarense",
|
||||||
|
@ -4335,7 +4310,6 @@
|
||||||
"bg": "Руандийски франк",
|
"bg": "Руандийски франк",
|
||||||
"ca": "franc ruandès",
|
"ca": "franc ruandès",
|
||||||
"cs": "Rwandský frank",
|
"cs": "Rwandský frank",
|
||||||
"cy": "Ffranc Rwanda",
|
|
||||||
"da": "Rwandisk franc",
|
"da": "Rwandisk franc",
|
||||||
"de": "Ruanda-Franc",
|
"de": "Ruanda-Franc",
|
||||||
"en": "Rwandan franc",
|
"en": "Rwandan franc",
|
||||||
|
@ -4681,7 +4655,6 @@
|
||||||
"bg": "Сомалийски шилинг",
|
"bg": "Сомалийски шилинг",
|
||||||
"ca": "xíling somali",
|
"ca": "xíling somali",
|
||||||
"cs": "Somálský šilink",
|
"cs": "Somálský šilink",
|
||||||
"cy": "Swllt Somali",
|
|
||||||
"da": "Somalisk shilling",
|
"da": "Somalisk shilling",
|
||||||
"de": "Somalia-Schilling",
|
"de": "Somalia-Schilling",
|
||||||
"en": "Somali shilling",
|
"en": "Somali shilling",
|
||||||
|
@ -4744,7 +4717,6 @@
|
||||||
"bg": "Южносудански паунд",
|
"bg": "Южносудански паунд",
|
||||||
"ca": "lliura sud-sudanesa",
|
"ca": "lliura sud-sudanesa",
|
||||||
"cs": "jihosúdánská libra",
|
"cs": "jihosúdánská libra",
|
||||||
"cy": "Punt De Swdan",
|
|
||||||
"da": "Sydsudanesiske pund",
|
"da": "Sydsudanesiske pund",
|
||||||
"de": "südsudanesisches Pfund",
|
"de": "südsudanesisches Pfund",
|
||||||
"en": "South Sudanese pound",
|
"en": "South Sudanese pound",
|
||||||
|
@ -4781,7 +4753,6 @@
|
||||||
"ar": "دوبرا ساو تومي وبرينسيب",
|
"ar": "دوبرا ساو تومي وبرينسيب",
|
||||||
"ca": "dobra",
|
"ca": "dobra",
|
||||||
"cs": "Svatotomášská dobra",
|
"cs": "Svatotomášská dobra",
|
||||||
"cy": "Dobra São Tomé a Príncipe",
|
|
||||||
"da": "Dobra",
|
"da": "Dobra",
|
||||||
"de": "São-toméischer Dobra",
|
"de": "São-toméischer Dobra",
|
||||||
"en": "São Tomé and Príncipe dobra",
|
"en": "São Tomé and Príncipe dobra",
|
||||||
|
@ -4817,7 +4788,6 @@
|
||||||
"bn": "সিরীয় পাউন্ড",
|
"bn": "সিরীয় পাউন্ড",
|
||||||
"ca": "lliura siriana",
|
"ca": "lliura siriana",
|
||||||
"cs": "syrská libra",
|
"cs": "syrská libra",
|
||||||
"cy": "Punt Syria",
|
|
||||||
"de": "syrische Lira",
|
"de": "syrische Lira",
|
||||||
"en": "Syrian pound",
|
"en": "Syrian pound",
|
||||||
"eo": "siria pundo",
|
"eo": "siria pundo",
|
||||||
|
@ -4911,7 +4881,6 @@
|
||||||
"pt": "baht",
|
"pt": "baht",
|
||||||
"ru": "тайский бат",
|
"ru": "тайский бат",
|
||||||
"sk": "Thajský baht",
|
"sk": "Thajský baht",
|
||||||
"sl": "tajski baht",
|
|
||||||
"sr": "тајландски бат",
|
"sr": "тајландски бат",
|
||||||
"sv": "Baht",
|
"sv": "Baht",
|
||||||
"ta": "தாய்லாந்தின் பாட்",
|
"ta": "தாய்லாந்தின் பாட்",
|
||||||
|
@ -4962,7 +4931,6 @@
|
||||||
"bg": "Туркменски манат",
|
"bg": "Туркменски манат",
|
||||||
"ca": "manat turcman",
|
"ca": "manat turcman",
|
||||||
"cs": "Turkmenský manat",
|
"cs": "Turkmenský manat",
|
||||||
"cy": "Manat newydd Tyrcmenestan",
|
|
||||||
"de": "Turkmenistan-Manat",
|
"de": "Turkmenistan-Manat",
|
||||||
"en": "Turkmenistan new manat",
|
"en": "Turkmenistan new manat",
|
||||||
"eo": "turkmena manato",
|
"eo": "turkmena manato",
|
||||||
|
@ -5017,7 +4985,6 @@
|
||||||
"lt": "Tuniso dinaras",
|
"lt": "Tuniso dinaras",
|
||||||
"ms": "Dinar Tunisia",
|
"ms": "Dinar Tunisia",
|
||||||
"nl": "tunesische dinar",
|
"nl": "tunesische dinar",
|
||||||
"oc": "dinar tunisian",
|
|
||||||
"pl": "Dinar tunezyjski",
|
"pl": "Dinar tunezyjski",
|
||||||
"pt": "dinar tunisiano",
|
"pt": "dinar tunisiano",
|
||||||
"ru": "тунисский динар",
|
"ru": "тунисский динар",
|
||||||
|
@ -5110,7 +5077,6 @@
|
||||||
"bg": "Тринидадски и тобагски долар",
|
"bg": "Тринидадски и тобагски долар",
|
||||||
"ca": "dòlar de Trinitat i Tobago",
|
"ca": "dòlar de Trinitat i Tobago",
|
||||||
"cs": "Dolar Trinidadu a Tobaga",
|
"cs": "Dolar Trinidadu a Tobaga",
|
||||||
"cy": "doler Trinidad a Thobago",
|
|
||||||
"de": "Trinidad-und-Tobago-Dollar",
|
"de": "Trinidad-und-Tobago-Dollar",
|
||||||
"en": "Trinidad and Tobago dollar",
|
"en": "Trinidad and Tobago dollar",
|
||||||
"eo": "trinidada dolaro",
|
"eo": "trinidada dolaro",
|
||||||
|
@ -5216,41 +5182,41 @@
|
||||||
"cs": "ukrajinská hřivna",
|
"cs": "ukrajinská hřivna",
|
||||||
"da": "hryvnia",
|
"da": "hryvnia",
|
||||||
"de": "Hrywnja",
|
"de": "Hrywnja",
|
||||||
"en": "hryvnia",
|
"en": "Hryvnia",
|
||||||
"eo": "ukraina hrivno",
|
"eo": "ukraina hrivno",
|
||||||
"es": "grivna",
|
"es": "grivna",
|
||||||
"et": "Ukraina grivna",
|
"et": "Ukraina grivna",
|
||||||
"eu": "hryvnia",
|
"eu": "hryvnia",
|
||||||
"fi": "Ukrainan hryvnia",
|
"fi": "Ukrainan hryvnia",
|
||||||
"fr": "hryvnia",
|
"fr": "hryvnia",
|
||||||
"gl": "hrivna",
|
"gl": "Hrivna",
|
||||||
"he": "הריבניה",
|
"he": "הריבניה",
|
||||||
"hr": "grivnja",
|
"hr": "Grivnja",
|
||||||
"hu": "ukrán hrivnya",
|
"hu": "ukrán hrivnya",
|
||||||
"ia": "hryvnja ukrainian",
|
"ia": "hryvnja ukrainian",
|
||||||
"id": "hryvnia Ukraina",
|
"id": "Hryvnia Ukraina",
|
||||||
"it": "grivnia ucraina",
|
"it": "grivnia ucraina",
|
||||||
"ja": "フリヴニャ",
|
"ja": "フリヴニャ",
|
||||||
"ko": "우크라이나 흐리우냐",
|
"ko": "우크라이나 흐리우냐",
|
||||||
"lt": "Grivina",
|
"lt": "Grivina",
|
||||||
"lv": "Ukrainas hrivna",
|
"lv": "Ukrainas hrivna",
|
||||||
"ms": "hryvnia",
|
"ms": "Hryvnia",
|
||||||
"nl": "Oekraïense hryvnja",
|
"nl": "Oekraïense hryvnja",
|
||||||
"oc": "hryvnia",
|
"oc": "Hryvnia",
|
||||||
"pa": "ਯੂਕਰੇਨੀ ਹਰੀਵਨਾ",
|
"pa": "ਯੂਕਰੇਨੀ ਹਰੀਵਨਾ",
|
||||||
"pl": "hrywna",
|
"pl": "hrywna",
|
||||||
"pt": "hryvnia",
|
"pt": "hryvnia",
|
||||||
"ro": "grivnă",
|
"ro": "Grivnă",
|
||||||
"ru": "украинская гривна",
|
"ru": "украинская гривна",
|
||||||
"sk": "ukrajinská hrivna",
|
"sk": "Ukrajinská hrivna",
|
||||||
"sl": "ukrajinska grivna",
|
"sl": "ukrajinska grivna",
|
||||||
"sr": "украјинска хривња",
|
"sr": "украјинска хривња",
|
||||||
"sv": "hryvnja",
|
"sv": "hryvnja",
|
||||||
"ta": "ஹிருன்யா",
|
"ta": "ஹிருன்யா",
|
||||||
"th": "ฮรึวญา",
|
"th": "ฮรึวญา",
|
||||||
"tr": "grivna",
|
"tr": "Grivna",
|
||||||
"uk": "гривня",
|
"uk": "гривня",
|
||||||
"vi": "hryvnia Ukraina"
|
"vi": "Hryvnia Ukraina"
|
||||||
},
|
},
|
||||||
"UGX": {
|
"UGX": {
|
||||||
"af": "Ugandiese shilling",
|
"af": "Ugandiese shilling",
|
||||||
|
@ -5258,7 +5224,6 @@
|
||||||
"bg": "Угандийски шилинг",
|
"bg": "Угандийски шилинг",
|
||||||
"ca": "xíling ugandès",
|
"ca": "xíling ugandès",
|
||||||
"cs": "Ugandský šilink",
|
"cs": "Ugandský šilink",
|
||||||
"cy": "Swllt Wganda",
|
|
||||||
"de": "Uganda-Schilling",
|
"de": "Uganda-Schilling",
|
||||||
"en": "Ugandan shilling",
|
"en": "Ugandan shilling",
|
||||||
"eo": "uganda ŝilingo",
|
"eo": "uganda ŝilingo",
|
||||||
|
@ -5398,14 +5363,12 @@
|
||||||
"ja": "スム",
|
"ja": "スム",
|
||||||
"ko": "우즈베키스탄 숨",
|
"ko": "우즈베키스탄 숨",
|
||||||
"lt": "Uzbekijos sumas",
|
"lt": "Uzbekijos sumas",
|
||||||
"lv": "Uzbekistānas soms",
|
|
||||||
"nl": "Oezbeekse sum",
|
"nl": "Oezbeekse sum",
|
||||||
"pa": "ਉਜ਼ਬੇਕਿਸਤਾਨੀ ਸੋਮ",
|
"pa": "ਉਜ਼ਬੇਕਿਸਤਾਨੀ ਸੋਮ",
|
||||||
"pl": "Sum",
|
"pl": "Sum",
|
||||||
"pt": "som usbeque",
|
"pt": "som usbeque",
|
||||||
"ro": "Som uzbec",
|
"ro": "Som uzbec",
|
||||||
"ru": "узбекский сум",
|
"ru": "узбекский сум",
|
||||||
"sk": "Uzbecký som",
|
|
||||||
"sr": "узбекистански сом",
|
"sr": "узбекистански сом",
|
||||||
"sv": "Uzbekistansk som",
|
"sv": "Uzbekistansk som",
|
||||||
"tr": "Özbekistan somu",
|
"tr": "Özbekistan somu",
|
||||||
|
@ -5419,7 +5382,6 @@
|
||||||
"VES": {
|
"VES": {
|
||||||
"ar": "بوليفار السيادي",
|
"ar": "بوليفار السيادي",
|
||||||
"ca": "bolívar sobirà",
|
"ca": "bolívar sobirà",
|
||||||
"cy": "sofren bolifar",
|
|
||||||
"en": "sovereign bolivar",
|
"en": "sovereign bolivar",
|
||||||
"es": "bolívar soberano",
|
"es": "bolívar soberano",
|
||||||
"fr": "bolivar souverain",
|
"fr": "bolivar souverain",
|
||||||
|
@ -5435,7 +5397,6 @@
|
||||||
"bg": "виетнамски донг",
|
"bg": "виетнамски донг",
|
||||||
"ca": "dong",
|
"ca": "dong",
|
||||||
"cs": "Vietnamský dong",
|
"cs": "Vietnamský dong",
|
||||||
"cy": "đồng Fietnam",
|
|
||||||
"da": "Dong",
|
"da": "Dong",
|
||||||
"de": "vietnamesischer Đồng",
|
"de": "vietnamesischer Đồng",
|
||||||
"en": "Vietnamese đồng",
|
"en": "Vietnamese đồng",
|
||||||
|
@ -5544,7 +5505,6 @@
|
||||||
"es": "franco CFA de África Central",
|
"es": "franco CFA de África Central",
|
||||||
"fi": "Keski-Afrikan CFA-frangi",
|
"fi": "Keski-Afrikan CFA-frangi",
|
||||||
"fr": "franc CFA d'Afrique centrale",
|
"fr": "franc CFA d'Afrique centrale",
|
||||||
"gl": "franco CFA de África Central",
|
|
||||||
"he": "פרנק CFA מרכז אפריקני",
|
"he": "פרנק CFA מרכז אפריקני",
|
||||||
"hr": "Srednjoafrički CFA franak",
|
"hr": "Srednjoafrički CFA franak",
|
||||||
"ia": "CFA",
|
"ia": "CFA",
|
||||||
|
@ -5556,7 +5516,6 @@
|
||||||
"ms": "Franc CFA Afrika Tengah",
|
"ms": "Franc CFA Afrika Tengah",
|
||||||
"nl": "Central African CFA franc",
|
"nl": "Central African CFA franc",
|
||||||
"oc": "Franc CFA d'Africa Centrala",
|
"oc": "Franc CFA d'Africa Centrala",
|
||||||
"pl": "środkowoafrykański frank CFA",
|
|
||||||
"pt": "franco",
|
"pt": "franco",
|
||||||
"ro": "Franc CFA BEAC",
|
"ro": "Franc CFA BEAC",
|
||||||
"ru": "франк КФА BEAC",
|
"ru": "франк КФА BEAC",
|
||||||
|
@ -5659,7 +5618,7 @@
|
||||||
"eo": "specialaj rajtoj de enspezo",
|
"eo": "specialaj rajtoj de enspezo",
|
||||||
"es": "Derechos Especiales de Giro",
|
"es": "Derechos Especiales de Giro",
|
||||||
"eu": "igorpen eskubide bereziak",
|
"eu": "igorpen eskubide bereziak",
|
||||||
"fi": "erityisnosto-oikeus",
|
"fi": "Erityisnosto-oikeus",
|
||||||
"fr": "droits de tirage spéciaux",
|
"fr": "droits de tirage spéciaux",
|
||||||
"hr": "Posebna prava vučenja",
|
"hr": "Posebna prava vučenja",
|
||||||
"hu": "különleges lehívási jog",
|
"hu": "különleges lehívási jog",
|
||||||
|
@ -5669,10 +5628,9 @@
|
||||||
"ko": "특별인출권",
|
"ko": "특별인출권",
|
||||||
"lt": "Specialiosios skolinimosi teisės",
|
"lt": "Specialiosios skolinimosi teisės",
|
||||||
"lv": "Speciālās aizņēmuma tiesības",
|
"lv": "Speciālās aizņēmuma tiesības",
|
||||||
"ms": "hak pengeluaran khas",
|
|
||||||
"nl": "speciale trekkingsrechten",
|
"nl": "speciale trekkingsrechten",
|
||||||
"oc": "Drechs de tiratge Especials",
|
"oc": "Drechs de tiratge Especials",
|
||||||
"pl": "specjalne prawa ciągnienia",
|
"pl": "Specjalne prawa ciągnienia",
|
||||||
"pt": "direitos especiais de saque",
|
"pt": "direitos especiais de saque",
|
||||||
"ro": "Drepturi speciale de tragere",
|
"ro": "Drepturi speciale de tragere",
|
||||||
"ru": "специальные права заимствования",
|
"ru": "специальные права заимствования",
|
||||||
|
@ -5690,7 +5648,7 @@
|
||||||
"bg": "Западноафрикански CFA франк",
|
"bg": "Западноафрикански CFA франк",
|
||||||
"ca": "franc CFA de l'Àfrica Occidental",
|
"ca": "franc CFA de l'Àfrica Occidental",
|
||||||
"cs": "západoafrický CFA frank",
|
"cs": "západoafrický CFA frank",
|
||||||
"cy": "franc CFA Gorllein Affrica",
|
"cy": "franc CFA Gorllein ffrica",
|
||||||
"de": "CFA-Franc BCEAO",
|
"de": "CFA-Franc BCEAO",
|
||||||
"en": "West African CFA franc",
|
"en": "West African CFA franc",
|
||||||
"eo": "okcident-afrika franko",
|
"eo": "okcident-afrika franko",
|
||||||
|
@ -5792,13 +5750,12 @@
|
||||||
"en": "No currency"
|
"en": "No currency"
|
||||||
},
|
},
|
||||||
"YER": {
|
"YER": {
|
||||||
"ar": "ريال يمني",
|
"ar": "ريال عربي",
|
||||||
"bg": "Йеменски риал",
|
"bg": "Йеменски риал",
|
||||||
"ca": "rial iemenita",
|
"ca": "rial iemenita",
|
||||||
"cs": "Jemenský rijál",
|
"cs": "Jemenský rijál",
|
||||||
"cy": "Rial Iemen",
|
|
||||||
"de": "Jemen-Rial",
|
"de": "Jemen-Rial",
|
||||||
"en": "Yemeni Rial",
|
"en": "rial",
|
||||||
"eo": "jemena rialo",
|
"eo": "jemena rialo",
|
||||||
"es": "rial yemení",
|
"es": "rial yemení",
|
||||||
"fi": "Jemenin rial",
|
"fi": "Jemenin rial",
|
||||||
|
@ -5852,7 +5809,7 @@
|
||||||
"lt": "Randas",
|
"lt": "Randas",
|
||||||
"lv": "Dienvidāfrikas rands",
|
"lv": "Dienvidāfrikas rands",
|
||||||
"ml": "സൗത്ത് ആഫ്രിക്കൻ റാൻഡ്",
|
"ml": "സൗത്ത് ആഫ്രിക്കൻ റാൻഡ്",
|
||||||
"ms": "Rand Afrika Selatan",
|
"ms": "Rand",
|
||||||
"nl": "Zuid-Afrikaanse rand",
|
"nl": "Zuid-Afrikaanse rand",
|
||||||
"oc": "Rand sudafrican",
|
"oc": "Rand sudafrican",
|
||||||
"pl": "Rand",
|
"pl": "Rand",
|
||||||
|
@ -5870,7 +5827,6 @@
|
||||||
"ar": "كواشا زامبي",
|
"ar": "كواشا زامبي",
|
||||||
"ca": "kwacha zambià",
|
"ca": "kwacha zambià",
|
||||||
"cs": "Zambijská kwacha",
|
"cs": "Zambijská kwacha",
|
||||||
"cy": "Kwacha Sambia",
|
|
||||||
"da": "Zambianske kwacha",
|
"da": "Zambianske kwacha",
|
||||||
"de": "sambischer Kwacha",
|
"de": "sambischer Kwacha",
|
||||||
"en": "Zambian Kwacha",
|
"en": "Zambian Kwacha",
|
||||||
|
@ -5910,12 +5866,10 @@
|
||||||
"es": "Oro de Zimbabue",
|
"es": "Oro de Zimbabue",
|
||||||
"fr": "or du Zimbabwe",
|
"fr": "or du Zimbabwe",
|
||||||
"id": "Zimbabwe Gold",
|
"id": "Zimbabwe Gold",
|
||||||
"it": "Zimbabwe Gold",
|
|
||||||
"ja": "ジンバブエ・ゴールド",
|
"ja": "ジンバブエ・ゴールド",
|
||||||
"ko": "짐바브웨 골드",
|
"ko": "짐바브웨 골드",
|
||||||
"nl": "Zimbabwe Gold",
|
"nl": "Zimbabwe Gold",
|
||||||
"pl": "Złoto Zimbabwe",
|
"pl": "Złoto Zimbabwe",
|
||||||
"pt": "Ouro do Zimbábue",
|
|
||||||
"ru": "зимбабвийский золотой",
|
"ru": "зимбабвийский золотой",
|
||||||
"sk": "zimbabwiansky zlatý",
|
"sk": "zimbabwiansky zlatý",
|
||||||
"sl": "zimbabvejski gold",
|
"sl": "zimbabvejski gold",
|
||||||
|
@ -6115,13 +6069,11 @@
|
||||||
"Ush": "UGX",
|
"Ush": "UGX",
|
||||||
"VT": "VUV",
|
"VT": "VUV",
|
||||||
"WS$": "WST",
|
"WS$": "WST",
|
||||||
"XDR": "XDR",
|
|
||||||
"Z$": "ZWL",
|
"Z$": "ZWL",
|
||||||
"ZK": "ZMW",
|
"ZK": "ZMW",
|
||||||
"a$": "AUD",
|
"a$": "AUD",
|
||||||
"abd doları": "USD",
|
"abd doları": "USD",
|
||||||
"adb unit of account": "XUA",
|
"adb unit of account": "XUA",
|
||||||
"ae92 0530 0000 1514 1185 002": "AED",
|
|
||||||
"aed": "AED",
|
"aed": "AED",
|
||||||
"af": "AFN",
|
"af": "AFN",
|
||||||
"afegane": "AFN",
|
"afegane": "AFN",
|
||||||
|
@ -7135,7 +7087,6 @@
|
||||||
"dinar tunezyjski": "TND",
|
"dinar tunezyjski": "TND",
|
||||||
"dinar tunisia": "TND",
|
"dinar tunisia": "TND",
|
||||||
"dinar tunisiaidd": "TND",
|
"dinar tunisiaidd": "TND",
|
||||||
"dinar tunisian": "TND",
|
|
||||||
"dinar tunisiano": "TND",
|
"dinar tunisiano": "TND",
|
||||||
"dinar tunisià": "TND",
|
"dinar tunisià": "TND",
|
||||||
"dinar tunisien": "TND",
|
"dinar tunisien": "TND",
|
||||||
|
@ -7214,7 +7165,6 @@
|
||||||
"dirham uae": "AED",
|
"dirham uae": "AED",
|
||||||
"dirham uea": "AED",
|
"dirham uea": "AED",
|
||||||
"dirham uni emirat arab": "AED",
|
"dirham uni emirat arab": "AED",
|
||||||
"dirham yr emiradau arabaidd unedig": "AED",
|
|
||||||
"dirham zjednoczonych emiratów arabskich": "AED",
|
"dirham zjednoczonych emiratów arabskich": "AED",
|
||||||
"diritti speciali di prelievo": "XDR",
|
"diritti speciali di prelievo": "XDR",
|
||||||
"dirrã dos emirados árabes unidos": "AED",
|
"dirrã dos emirados árabes unidos": "AED",
|
||||||
|
@ -7240,7 +7190,6 @@
|
||||||
"dobra santotomense": "STN",
|
"dobra santotomense": "STN",
|
||||||
"dobra saotomejska": "STN",
|
"dobra saotomejska": "STN",
|
||||||
"dobra são tomense": "STN",
|
"dobra são tomense": "STN",
|
||||||
"dobra são tomé a príncipe": "STN",
|
|
||||||
"dobra são tomé dan príncipe": "STN",
|
"dobra são tomé dan príncipe": "STN",
|
||||||
"dobra svetog tome i principa": "STN",
|
"dobra svetog tome i principa": "STN",
|
||||||
"dobra svetog tome i prinsipa": "STN",
|
"dobra svetog tome i prinsipa": "STN",
|
||||||
|
@ -7424,12 +7373,10 @@
|
||||||
"CAD",
|
"CAD",
|
||||||
"BZD"
|
"BZD"
|
||||||
],
|
],
|
||||||
"doler awstralia": "AUD",
|
|
||||||
"doler dwyrain y caribî": "XCD",
|
"doler dwyrain y caribî": "XCD",
|
||||||
"doler hong cong": "HKD",
|
"doler hong cong": "HKD",
|
||||||
"doler hong kong": "HKD",
|
"doler hong kong": "HKD",
|
||||||
"doler newydd taiwan": "TWD",
|
"doler newydd taiwan": "TWD",
|
||||||
"doler trinidad a thobago": "TTD",
|
|
||||||
"doler yr unol daleithiau": "USD",
|
"doler yr unol daleithiau": "USD",
|
||||||
"dollar": [
|
"dollar": [
|
||||||
"USD",
|
"USD",
|
||||||
|
@ -7833,7 +7780,6 @@
|
||||||
"eritrese nakfa": "ERN",
|
"eritrese nakfa": "ERN",
|
||||||
"erityinen nosto oikeus": "XDR",
|
"erityinen nosto oikeus": "XDR",
|
||||||
"erityiset nosto oikeudet": "XDR",
|
"erityiset nosto oikeudet": "XDR",
|
||||||
"erityisnosto oikeudet": "XDR",
|
|
||||||
"erityisnosto oikeus": "XDR",
|
"erityisnosto oikeus": "XDR",
|
||||||
"ermeni dramı": "AMD",
|
"ermeni dramı": "AMD",
|
||||||
"ermenistan dramı": "AMD",
|
"ermenistan dramı": "AMD",
|
||||||
|
@ -7933,7 +7879,6 @@
|
||||||
"fen": "CNY",
|
"fen": "CNY",
|
||||||
"feninga": "BAM",
|
"feninga": "BAM",
|
||||||
"ffranc canol affrica": "XAF",
|
"ffranc canol affrica": "XAF",
|
||||||
"ffranc rwanda": "RWF",
|
|
||||||
"ffranc y congo": "CDF",
|
"ffranc y congo": "CDF",
|
||||||
"fidschi dollar": "FJD",
|
"fidschi dollar": "FJD",
|
||||||
"fidzhin dollari": "FJD",
|
"fidzhin dollari": "FJD",
|
||||||
|
@ -8053,7 +7998,7 @@
|
||||||
"franc cfa d'afrique centrale": "XAF",
|
"franc cfa d'afrique centrale": "XAF",
|
||||||
"franc cfa de l'àfrica central": "XAF",
|
"franc cfa de l'àfrica central": "XAF",
|
||||||
"franc cfa de l'àfrica occidental": "XOF",
|
"franc cfa de l'àfrica occidental": "XOF",
|
||||||
"franc cfa gorllein affrica": "XOF",
|
"franc cfa gorllein ffrica": "XOF",
|
||||||
"franc cfa tây phi": "XOF",
|
"franc cfa tây phi": "XOF",
|
||||||
"franc cfa trung phi": "XAF",
|
"franc cfa trung phi": "XAF",
|
||||||
"franc cfa vest african": "XOF",
|
"franc cfa vest african": "XOF",
|
||||||
|
@ -8373,7 +8318,6 @@
|
||||||
"gvinejski franak": "GNF",
|
"gvinejski franak": "GNF",
|
||||||
"gvinejski frank": "GNF",
|
"gvinejski frank": "GNF",
|
||||||
"gvinėjos frankas": "GNF",
|
"gvinėjos frankas": "GNF",
|
||||||
"gwarani paragwâi": "PYG",
|
|
||||||
"gy$": "GYD",
|
"gy$": "GYD",
|
||||||
"gyd": "GYD",
|
"gyd": "GYD",
|
||||||
"ĝibraltara pundo": "GIP",
|
"ĝibraltara pundo": "GIP",
|
||||||
|
@ -8389,8 +8333,6 @@
|
||||||
"haitský gourde": "HTG",
|
"haitský gourde": "HTG",
|
||||||
"haïtiaanse gourde": "HTG",
|
"haïtiaanse gourde": "HTG",
|
||||||
"hak penarikan khusus": "XDR",
|
"hak penarikan khusus": "XDR",
|
||||||
"hak pengeluaran khas": "XDR",
|
|
||||||
"hak pengeluaran khusus": "XDR",
|
|
||||||
"halalas": "SAR",
|
"halalas": "SAR",
|
||||||
"hegoafrikar rand": "ZAR",
|
"hegoafrikar rand": "ZAR",
|
||||||
"heller": "CZK",
|
"heller": "CZK",
|
||||||
|
@ -8975,7 +8917,6 @@
|
||||||
"kwacha malawien": "MWK",
|
"kwacha malawien": "MWK",
|
||||||
"kwacha malawijska": "MWK",
|
"kwacha malawijska": "MWK",
|
||||||
"kwacha malawite": "MWK",
|
"kwacha malawite": "MWK",
|
||||||
"kwacha sambia": "ZMW",
|
|
||||||
"kwacha zambese": "ZMW",
|
"kwacha zambese": "ZMW",
|
||||||
"kwacha zambia": "ZMW",
|
"kwacha zambia": "ZMW",
|
||||||
"kwacha zambian": "ZMW",
|
"kwacha zambian": "ZMW",
|
||||||
|
@ -9134,15 +9075,12 @@
|
||||||
"leu da roménia": "RON",
|
"leu da roménia": "RON",
|
||||||
"leu da romênia": "RON",
|
"leu da romênia": "RON",
|
||||||
"leu de moldàvia": "MDL",
|
"leu de moldàvia": "MDL",
|
||||||
"leu de moldova": "MDL",
|
|
||||||
"leu moldau": "MDL",
|
"leu moldau": "MDL",
|
||||||
"leu moldave": "MDL",
|
"leu moldave": "MDL",
|
||||||
"leu moldavo": "MDL",
|
"leu moldavo": "MDL",
|
||||||
"leu moldávio": "MDL",
|
"leu moldávio": "MDL",
|
||||||
"leu moldofa": "MDL",
|
|
||||||
"leu moldova": "MDL",
|
"leu moldova": "MDL",
|
||||||
"leu moldovenesc": "MDL",
|
"leu moldovenesc": "MDL",
|
||||||
"leu moldovo": "MDL",
|
|
||||||
"leu romanès": "RON",
|
"leu romanès": "RON",
|
||||||
"leu romanés": "RON",
|
"leu romanés": "RON",
|
||||||
"leu romanian": "RON",
|
"leu romanian": "RON",
|
||||||
|
@ -9458,7 +9396,6 @@
|
||||||
"manat azerbaijandar": "AZN",
|
"manat azerbaijandar": "AZN",
|
||||||
"manat azerbaijanês": "AZN",
|
"manat azerbaijanês": "AZN",
|
||||||
"manat azerbaijano": "AZN",
|
"manat azerbaijano": "AZN",
|
||||||
"manat azerbaitjanés": "AZN",
|
|
||||||
"manat azerbaiyano": "AZN",
|
"manat azerbaiyano": "AZN",
|
||||||
"manat azerbaïdjanais": "AZN",
|
"manat azerbaïdjanais": "AZN",
|
||||||
"manat azerbejdżański": "AZN",
|
"manat azerbejdżański": "AZN",
|
||||||
|
@ -9475,7 +9412,6 @@
|
||||||
"manat de turkmenistán": "TMT",
|
"manat de turkmenistán": "TMT",
|
||||||
"manat del turkmenistan": "TMT",
|
"manat del turkmenistan": "TMT",
|
||||||
"manat do azerbaijão": "AZN",
|
"manat do azerbaijão": "AZN",
|
||||||
"manat newydd tyrcmenestan": "TMT",
|
|
||||||
"manat turcman": "TMT",
|
"manat turcman": "TMT",
|
||||||
"manat turcmèn": "TMT",
|
"manat turcmèn": "TMT",
|
||||||
"manat turcomano": "TMT",
|
"manat turcomano": "TMT",
|
||||||
|
@ -9542,7 +9478,6 @@
|
||||||
"mauritanijska ouguja": "MRU",
|
"mauritanijska ouguja": "MRU",
|
||||||
"mauritanijska uguija": "MRU",
|
"mauritanijska uguija": "MRU",
|
||||||
"mauritániai ouguiya": "MRU",
|
"mauritániai ouguiya": "MRU",
|
||||||
"mauritánska ukíjá": "MRU",
|
|
||||||
"mauritánská ukíjá": "MRU",
|
"mauritánská ukíjá": "MRU",
|
||||||
"mauritānijas oguja": "MRU",
|
"mauritānijas oguja": "MRU",
|
||||||
"mauritiaanse roepee": "MUR",
|
"mauritiaanse roepee": "MUR",
|
||||||
|
@ -10007,8 +9942,6 @@
|
||||||
"ouguiya della mauritana": "MRU",
|
"ouguiya della mauritana": "MRU",
|
||||||
"ouguiya mauritana": "MRU",
|
"ouguiya mauritana": "MRU",
|
||||||
"ouguiya mauritanien": "MRU",
|
"ouguiya mauritanien": "MRU",
|
||||||
"ouguiya mawritania": "MRU",
|
|
||||||
"ouro do zimbábue": "ZWG",
|
|
||||||
"örmény dram": "AMD",
|
"örmény dram": "AMD",
|
||||||
"östkaribisk dollar": "XCD",
|
"östkaribisk dollar": "XCD",
|
||||||
"özbekistan somu": "UZS",
|
"özbekistan somu": "UZS",
|
||||||
|
@ -10371,12 +10304,10 @@
|
||||||
"pula, unitate monetară": "BWP",
|
"pula, unitate monetară": "BWP",
|
||||||
"pund sterling": "GBP",
|
"pund sterling": "GBP",
|
||||||
"punt": "GBP",
|
"punt": "GBP",
|
||||||
"punt de swdan": "SSP",
|
|
||||||
"punt gibraltar": "GIP",
|
"punt gibraltar": "GIP",
|
||||||
"punt libanus": "LBP",
|
"punt libanus": "LBP",
|
||||||
"punt sterling": "GBP",
|
"punt sterling": "GBP",
|
||||||
"punt swdan": "SDG",
|
"punt swdan": "SDG",
|
||||||
"punt syria": "SYP",
|
|
||||||
"punt yr aifft": "EGP",
|
"punt yr aifft": "EGP",
|
||||||
"puolan zloty": [
|
"puolan zloty": [
|
||||||
"PLZ",
|
"PLZ",
|
||||||
|
@ -10480,7 +10411,6 @@
|
||||||
"rial do irão": "IRR",
|
"rial do irão": "IRR",
|
||||||
"rial do omã": "OMR",
|
"rial do omã": "OMR",
|
||||||
"rial do omão": "OMR",
|
"rial do omão": "OMR",
|
||||||
"rial iemen": "YER",
|
|
||||||
"rial iemenita": "YER",
|
"rial iemenita": "YER",
|
||||||
"rial iemení": "YER",
|
"rial iemení": "YER",
|
||||||
"rial iémenita": "YER",
|
"rial iémenita": "YER",
|
||||||
|
@ -10530,7 +10460,6 @@
|
||||||
"ringgit malaio": "MYR",
|
"ringgit malaio": "MYR",
|
||||||
"ringgit malaysia": "MYR",
|
"ringgit malaysia": "MYR",
|
||||||
"ringgit malaysiano": "MYR",
|
"ringgit malaysiano": "MYR",
|
||||||
"ringgit maleisia": "MYR",
|
|
||||||
"ringgit malese": "MYR",
|
"ringgit malese": "MYR",
|
||||||
"ringgit singapore": "SGD",
|
"ringgit singapore": "SGD",
|
||||||
"ringgit singapura": "SGD",
|
"ringgit singapura": "SGD",
|
||||||
|
@ -10550,7 +10479,6 @@
|
||||||
"riyal qatari": "QAR",
|
"riyal qatari": "QAR",
|
||||||
"riyal qatarien": "QAR",
|
"riyal qatarien": "QAR",
|
||||||
"riyal qatariota": "QAR",
|
"riyal qatariota": "QAR",
|
||||||
"riyal qatarita": "QAR",
|
|
||||||
"riyal saoudien": "SAR",
|
"riyal saoudien": "SAR",
|
||||||
"riyal saudi": "SAR",
|
"riyal saudi": "SAR",
|
||||||
"riyal saudita": "SAR",
|
"riyal saudita": "SAR",
|
||||||
|
@ -10669,7 +10597,6 @@
|
||||||
"rupee mauritus": "MUR",
|
"rupee mauritus": "MUR",
|
||||||
"rupee nepal": "NPR",
|
"rupee nepal": "NPR",
|
||||||
"rupee nepali": "NPR",
|
"rupee nepali": "NPR",
|
||||||
"rupee pacistan": "PKR",
|
|
||||||
"rupee pakistan": "PKR",
|
"rupee pakistan": "PKR",
|
||||||
"rupee seychelles": "SCR",
|
"rupee seychelles": "SCR",
|
||||||
"rupee sri lanca": "LKR",
|
"rupee sri lanca": "LKR",
|
||||||
|
@ -10820,7 +10747,6 @@
|
||||||
"salomona dolaro": "SBD",
|
"salomona dolaro": "SBD",
|
||||||
"salomondollar": "SBD",
|
"salomondollar": "SBD",
|
||||||
"salomonen dollar": "SBD",
|
"salomonen dollar": "SBD",
|
||||||
"salomoninsaarten dollari": "SBD",
|
|
||||||
"salomonsaarten dollari": "SBD",
|
"salomonsaarten dollari": "SBD",
|
||||||
"salomonskootočni dolar": "SBD",
|
"salomonskootočni dolar": "SBD",
|
||||||
"salüng": "THB",
|
"salüng": "THB",
|
||||||
|
@ -11066,7 +10992,6 @@
|
||||||
"so'm": "UZS",
|
"so'm": "UZS",
|
||||||
"soedanees pond": "SDG",
|
"soedanees pond": "SDG",
|
||||||
"soedanese pond": "SDG",
|
"soedanese pond": "SDG",
|
||||||
"sofren bolifar": "VES",
|
|
||||||
"sol": "PEN",
|
"sol": "PEN",
|
||||||
"sol d'or": "PEN",
|
"sol d'or": "PEN",
|
||||||
"sol de oro": "PEN",
|
"sol de oro": "PEN",
|
||||||
|
@ -11177,7 +11102,6 @@
|
||||||
"srilankansk rupee": "LKR",
|
"srilankansk rupee": "LKR",
|
||||||
"srilankanske rupee": "LKR",
|
"srilankanske rupee": "LKR",
|
||||||
"srí lanka i rúpia": "LKR",
|
"srí lanka i rúpia": "LKR",
|
||||||
"srílanská rupia": "LKR",
|
|
||||||
"srílanská rupie": "LKR",
|
"srílanská rupie": "LKR",
|
||||||
"srpski dinar": "RSD",
|
"srpski dinar": "RSD",
|
||||||
"ssp": "SSP",
|
"ssp": "SSP",
|
||||||
|
@ -11290,10 +11214,8 @@
|
||||||
"swiss franken": "CHF",
|
"swiss franken": "CHF",
|
||||||
"switserse frank": "CHF",
|
"switserse frank": "CHF",
|
||||||
"swllt cenia": "KES",
|
"swllt cenia": "KES",
|
||||||
"swllt somali": "SOS",
|
|
||||||
"swllt tansanïa": "TZS",
|
"swllt tansanïa": "TZS",
|
||||||
"swllt tanzania": "TZS",
|
"swllt tanzania": "TZS",
|
||||||
"swllt wganda": "UGX",
|
|
||||||
"sydafrikansk rand": "ZAR",
|
"sydafrikansk rand": "ZAR",
|
||||||
"sydkoreansk won": "KRW",
|
"sydkoreansk won": "KRW",
|
||||||
"sydkoreanske won": "KRW",
|
"sydkoreanske won": "KRW",
|
||||||
|
@ -11327,7 +11249,6 @@
|
||||||
"szyling tanzański": "TZS",
|
"szyling tanzański": "TZS",
|
||||||
"szyling ugandyjski": "UGX",
|
"szyling ugandyjski": "UGX",
|
||||||
"sırp dinarı": "RSD",
|
"sırp dinarı": "RSD",
|
||||||
"środkowoafrykański frank cfa": "XAF",
|
|
||||||
"šalamounský dolar": "SBD",
|
"šalamounský dolar": "SBD",
|
||||||
"šalomounský dolar": "SBD",
|
"šalomounský dolar": "SBD",
|
||||||
"šekel chadaš": "ILS",
|
"šekel chadaš": "ILS",
|
||||||
|
@ -11376,7 +11297,6 @@
|
||||||
"tajikistani somoni": "TJS",
|
"tajikistani somoni": "TJS",
|
||||||
"tajlanda bahto": "THB",
|
"tajlanda bahto": "THB",
|
||||||
"tajlandski baht": "THB",
|
"tajlandski baht": "THB",
|
||||||
"tajski baht": "THB",
|
|
||||||
"tajvana dolaro": "TWD",
|
"tajvana dolaro": "TWD",
|
||||||
"tajvani új dollár": "TWD",
|
"tajvani új dollár": "TWD",
|
||||||
"taka": [
|
"taka": [
|
||||||
|
@ -11441,7 +11361,6 @@
|
||||||
"tengue": "KZT",
|
"tengue": "KZT",
|
||||||
"tengue cazaque": "KZT",
|
"tengue cazaque": "KZT",
|
||||||
"teňňe": "TMT",
|
"teňňe": "TMT",
|
||||||
"tetri": "GEL",
|
|
||||||
"thai baht": "THB",
|
"thai baht": "THB",
|
||||||
"thai bát": "THB",
|
"thai bát": "THB",
|
||||||
"thailandiar baht": "THB",
|
"thailandiar baht": "THB",
|
||||||
|
@ -11566,10 +11485,10 @@
|
||||||
"turkisk lira": "TRY",
|
"turkisk lira": "TRY",
|
||||||
"turkiska lira": "TRY",
|
"turkiska lira": "TRY",
|
||||||
"turkmeense manat": "TMT",
|
"turkmeense manat": "TMT",
|
||||||
"turkmen manat": "TMT",
|
|
||||||
"turkmena manato": "TMT",
|
"turkmena manato": "TMT",
|
||||||
"turkmenistan manat": "TMT",
|
"turkmenistan manat": "TMT",
|
||||||
"turkmenistan new manat": "TMT",
|
"turkmenistan new manat": "TMT",
|
||||||
|
"turkmenistani manat": "TMT",
|
||||||
"turkmenistani new manat": "TMT",
|
"turkmenistani new manat": "TMT",
|
||||||
"turkmenistanin manat": "TMT",
|
"turkmenistanin manat": "TMT",
|
||||||
"turkmenistansk manat": "TMT",
|
"turkmenistansk manat": "TMT",
|
||||||
|
@ -11735,7 +11654,6 @@
|
||||||
"uzbekistano sumas": "UZS",
|
"uzbekistano sumas": "UZS",
|
||||||
"uzbekistansk som": "UZS",
|
"uzbekistansk som": "UZS",
|
||||||
"uzbekistanski som": "UZS",
|
"uzbekistanski som": "UZS",
|
||||||
"uzbekistānas soms": "UZS",
|
|
||||||
"uzs": "UZS",
|
"uzs": "UZS",
|
||||||
"új zélandi dollár": "NZD",
|
"új zélandi dollár": "NZD",
|
||||||
"ürdün dinarı": "JOD",
|
"ürdün dinarı": "JOD",
|
||||||
|
@ -11889,7 +11807,6 @@
|
||||||
"yuan cinese": "CNY",
|
"yuan cinese": "CNY",
|
||||||
"yuan renmimbi": "CNY",
|
"yuan renmimbi": "CNY",
|
||||||
"yuan renminbi": "CNY",
|
"yuan renminbi": "CNY",
|
||||||
"yuan rmb": "CNY",
|
|
||||||
"yuans": "CNY",
|
"yuans": "CNY",
|
||||||
"yuán chino": "CNY",
|
"yuán chino": "CNY",
|
||||||
"z$": "ZWL",
|
"z$": "ZWL",
|
||||||
|
@ -12115,7 +12032,6 @@
|
||||||
"đồng bảng anh": "GBP",
|
"đồng bảng anh": "GBP",
|
||||||
"đồng bạc mĩ": "USD",
|
"đồng bạc mĩ": "USD",
|
||||||
"đồng bạc mỹ": "USD",
|
"đồng bạc mỹ": "USD",
|
||||||
"đồng fietnam": "VND",
|
|
||||||
"đồng forint": "HUF",
|
"đồng forint": "HUF",
|
||||||
"đồng tiền": "VND",
|
"đồng tiền": "VND",
|
||||||
"đồng vietnam": "VND",
|
"đồng vietnam": "VND",
|
||||||
|
@ -13763,7 +13679,6 @@
|
||||||
"دينار بحريني": "BHD",
|
"دينار بحريني": "BHD",
|
||||||
"دينار تونسي": "TND",
|
"دينار تونسي": "TND",
|
||||||
"دينار جزائري": "DZD",
|
"دينار جزائري": "DZD",
|
||||||
"دينار ذهبي": "LYD",
|
|
||||||
"دينار سوداني": "SDG",
|
"دينار سوداني": "SDG",
|
||||||
"دينار صربي": "RSD",
|
"دينار صربي": "RSD",
|
||||||
"دينار عراقي": "IQD",
|
"دينار عراقي": "IQD",
|
||||||
|
@ -13804,6 +13719,7 @@
|
||||||
"ريال إيراني": "IRR",
|
"ريال إيراني": "IRR",
|
||||||
"ريال برازيلي": "BRL",
|
"ريال برازيلي": "BRL",
|
||||||
"ريال سعودي": "SAR",
|
"ريال سعودي": "SAR",
|
||||||
|
"ريال عربي": "YER",
|
||||||
"ريال عماني": "OMR",
|
"ريال عماني": "OMR",
|
||||||
"ريال عُماني": "OMR",
|
"ريال عُماني": "OMR",
|
||||||
"ريال قطري": "QAR",
|
"ريال قطري": "QAR",
|
||||||
|
@ -13973,7 +13889,6 @@
|
||||||
"रू.": "INR",
|
"रू.": "INR",
|
||||||
"অ্যাঙ্গোলীয় কুয়াঞ্জা": "AOA",
|
"অ্যাঙ্গোলীয় কুয়াঞ্জা": "AOA",
|
||||||
"আইসল্যান্ডীয় ক্রোনা": "ISK",
|
"আইসল্যান্ডীয় ক্রোনা": "ISK",
|
||||||
"আজারবাইজানি মানাত": "AZN",
|
|
||||||
"ই রুপি": "INR",
|
"ই রুপি": "INR",
|
||||||
"ইউরো": "EUR",
|
"ইউরো": "EUR",
|
||||||
"ইরানি রিয়াল": "IRR",
|
"ইরানি রিয়াল": "IRR",
|
||||||
|
@ -14021,7 +13936,6 @@
|
||||||
"ਅਲਬਾਨੀਆਈ ਲੇਕ": "ALL",
|
"ਅਲਬਾਨੀਆਈ ਲੇਕ": "ALL",
|
||||||
"ਅੰਗੋਲਨ ਕਵਾਂਜ਼ਾ": "AOA",
|
"ਅੰਗੋਲਨ ਕਵਾਂਜ਼ਾ": "AOA",
|
||||||
"ਆਈਸਲੈਂਡੀ ਕਰੋਨਾ": "ISK",
|
"ਆਈਸਲੈਂਡੀ ਕਰੋਨਾ": "ISK",
|
||||||
"ਆਜ਼ਰਬਾਈਜਾਨੀ ਮਨਾਤ": "AZN",
|
|
||||||
"ਆਸਟ੍ਰੇਲੀਆਈ ਡਾਲਰ": "AUD",
|
"ਆਸਟ੍ਰੇਲੀਆਈ ਡਾਲਰ": "AUD",
|
||||||
"ਇਜ਼ਰਾਇਲੀ ਨਵਾਂ ਸ਼ੇਕਲ": "ILS",
|
"ਇਜ਼ਰਾਇਲੀ ਨਵਾਂ ਸ਼ੇਕਲ": "ILS",
|
||||||
"ਇਰਾਕੀ ਦਿਨਾਰ": "IQD",
|
"ਇਰਾਕੀ ਦਿਨਾਰ": "IQD",
|
||||||
|
@ -14313,6 +14227,7 @@
|
||||||
"ബർമ്മീസ് ക്യാറ്റ്": "MMK",
|
"ബർമ്മീസ് ക്യാറ്റ്": "MMK",
|
||||||
"ബൾഗേറിയൻ ലെവ്": "BGN",
|
"ബൾഗേറിയൻ ലെവ്": "BGN",
|
||||||
"മൊറോക്കൻ ദിർഹം": "MAD",
|
"മൊറോക്കൻ ദിർഹം": "MAD",
|
||||||
|
"യു.എ.ഇ. ദിർഹം": "AED",
|
||||||
"യുണൈറ്റഡ് അറബ് എമിരേറ്റ്സ് ദിർഹം": "AED",
|
"യുണൈറ്റഡ് അറബ് എമിരേറ്റ്സ് ദിർഹം": "AED",
|
||||||
"യുണൈറ്റഡ് സ്റ്റേറ്റ്സ് ഡോളർ": "USD",
|
"യുണൈറ്റഡ് സ്റ്റേറ്റ്സ് ഡോളർ": "USD",
|
||||||
"യൂറോ": "EUR",
|
"യൂറോ": "EUR",
|
||||||
|
@ -14356,7 +14271,6 @@
|
||||||
"USD",
|
"USD",
|
||||||
"TWD"
|
"TWD"
|
||||||
],
|
],
|
||||||
"ดอลลาร์นิวซีแลนด์": "NZD",
|
|
||||||
"ดอลลาร์บรูไน": "BND",
|
"ดอลลาร์บรูไน": "BND",
|
||||||
"ดอลลาร์สหรัฐ": "USD",
|
"ดอลลาร์สหรัฐ": "USD",
|
||||||
"ดอลลาร์สิงคโปร์": "SGD",
|
"ดอลลาร์สิงคโปร์": "SGD",
|
||||||
|
@ -14380,7 +14294,6 @@
|
||||||
"ปาอางา": "TOP",
|
"ปาอางา": "TOP",
|
||||||
"พาแองกา": "TOP",
|
"พาแองกา": "TOP",
|
||||||
"ฟรังก์สวิส": "CHF",
|
"ฟรังก์สวิส": "CHF",
|
||||||
"มานัตอาเซอร์ไบจาน": "AZN",
|
|
||||||
"มานัตเติร์กเมนิสถาน": "TMT",
|
"มานัตเติร์กเมนิสถาน": "TMT",
|
||||||
"ยูโร": "EUR",
|
"ยูโร": "EUR",
|
||||||
"ริงกิต": "MYR",
|
"ริงกิต": "MYR",
|
||||||
|
@ -15029,7 +14942,6 @@
|
||||||
"ボツワナ・プラ": "BWP",
|
"ボツワナ・プラ": "BWP",
|
||||||
"ボリバル・ソベラノ": "VES",
|
"ボリバル・ソベラノ": "VES",
|
||||||
"ボリビアーノ": "BOB",
|
"ボリビアーノ": "BOB",
|
||||||
"ポンド・スターリング": "GBP",
|
|
||||||
"ポーランド・ズウォティ": [
|
"ポーランド・ズウォティ": [
|
||||||
"PLZ",
|
"PLZ",
|
||||||
"PLN"
|
"PLN"
|
||||||
|
@ -15095,6 +15007,7 @@
|
||||||
"中華人民共和国の通貨": "CNY",
|
"中華人民共和国の通貨": "CNY",
|
||||||
"中部アフリカcfaフラン": "XAF",
|
"中部アフリカcfaフラン": "XAF",
|
||||||
"人民元": "CNY",
|
"人民元": "CNY",
|
||||||
|
"人民币": "CNY",
|
||||||
"人民幣": "CNY",
|
"人民幣": "CNY",
|
||||||
"元": [
|
"元": [
|
||||||
"HKD",
|
"HKD",
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
|
@ -5,7 +5,7 @@
|
||||||
],
|
],
|
||||||
"ua": "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}",
|
"ua": "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}",
|
||||||
"versions": [
|
"versions": [
|
||||||
"132.0",
|
"129.0",
|
||||||
"131.0"
|
"128.0"
|
||||||
]
|
]
|
||||||
}
|
}
|
|
@ -832,7 +832,7 @@
|
||||||
"Q104907390": {
|
"Q104907390": {
|
||||||
"si_name": "Q182429",
|
"si_name": "Q182429",
|
||||||
"symbol": "nmi/h",
|
"symbol": "nmi/h",
|
||||||
"to_si_factor": 0.5144444444444445
|
"to_si_factor": 0.514444
|
||||||
},
|
},
|
||||||
"Q104907398": {
|
"Q104907398": {
|
||||||
"si_name": "Q215571",
|
"si_name": "Q215571",
|
||||||
|
@ -1336,7 +1336,7 @@
|
||||||
},
|
},
|
||||||
"Q106636307": {
|
"Q106636307": {
|
||||||
"si_name": "Q80842107",
|
"si_name": "Q80842107",
|
||||||
"symbol": "μS/cm-1",
|
"symbol": "μS/cm",
|
||||||
"to_si_factor": 0.0001
|
"to_si_factor": 0.0001
|
||||||
},
|
},
|
||||||
"Q106639711": {
|
"Q106639711": {
|
||||||
|
@ -3594,6 +3594,11 @@
|
||||||
"symbol": "mm²",
|
"symbol": "mm²",
|
||||||
"to_si_factor": 1e-06
|
"to_si_factor": 1e-06
|
||||||
},
|
},
|
||||||
|
"Q190951": {
|
||||||
|
"si_name": null,
|
||||||
|
"symbol": "S$",
|
||||||
|
"to_si_factor": null
|
||||||
|
},
|
||||||
"Q191118": {
|
"Q191118": {
|
||||||
"si_name": "Q11570",
|
"si_name": "Q11570",
|
||||||
"symbol": "t",
|
"symbol": "t",
|
||||||
|
@ -4142,7 +4147,7 @@
|
||||||
"Q23931103": {
|
"Q23931103": {
|
||||||
"si_name": "Q25343",
|
"si_name": "Q25343",
|
||||||
"symbol": "nmi²",
|
"symbol": "nmi²",
|
||||||
"to_si_factor": 3429904.0
|
"to_si_factor": 3434290.0120544
|
||||||
},
|
},
|
||||||
"Q239830": {
|
"Q239830": {
|
||||||
"si_name": "Q3395194",
|
"si_name": "Q3395194",
|
||||||
|
@ -6511,7 +6516,7 @@
|
||||||
},
|
},
|
||||||
"Q68343206": {
|
"Q68343206": {
|
||||||
"si_name": "Q68343206",
|
"si_name": "Q68343206",
|
||||||
"symbol": "C²/m",
|
"symbol": "C/m²",
|
||||||
"to_si_factor": 1.0
|
"to_si_factor": 1.0
|
||||||
},
|
},
|
||||||
"Q685662": {
|
"Q685662": {
|
||||||
|
|
|
@ -34,10 +34,10 @@ Implementations
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import List, Dict, Any, Optional
|
from typing import List, Dict, Any, Optional
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import quote
|
||||||
from lxml import html
|
from lxml import html
|
||||||
|
|
||||||
from searx.utils import extract_text, eval_xpath, eval_xpath_getindex, eval_xpath_list
|
from searx.utils import extract_text, eval_xpath, eval_xpath_list
|
||||||
from searx.enginelib.traits import EngineTraits
|
from searx.enginelib.traits import EngineTraits
|
||||||
from searx.data import ENGINE_TRAITS
|
from searx.data import ENGINE_TRAITS
|
||||||
|
|
||||||
|
@ -53,7 +53,7 @@ about: Dict[str, Any] = {
|
||||||
|
|
||||||
# engine dependent config
|
# engine dependent config
|
||||||
categories: List[str] = ["files"]
|
categories: List[str] = ["files"]
|
||||||
paging: bool = True
|
paging: bool = False
|
||||||
|
|
||||||
# search-url
|
# search-url
|
||||||
base_url: str = "https://annas-archive.org"
|
base_url: str = "https://annas-archive.org"
|
||||||
|
@ -99,18 +99,9 @@ def init(engine_settings=None): # pylint: disable=unused-argument
|
||||||
|
|
||||||
|
|
||||||
def request(query, params: Dict[str, Any]) -> Dict[str, Any]:
|
def request(query, params: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
q = quote(query)
|
||||||
lang = traits.get_language(params["language"], traits.all_locale) # type: ignore
|
lang = traits.get_language(params["language"], traits.all_locale) # type: ignore
|
||||||
args = {
|
params["url"] = base_url + f"/search?lang={lang or ''}&content={aa_content}&ext={aa_ext}&sort={aa_sort}&q={q}"
|
||||||
'lang': lang,
|
|
||||||
'content': aa_content,
|
|
||||||
'ext': aa_ext,
|
|
||||||
'sort': aa_sort,
|
|
||||||
'q': query,
|
|
||||||
'page': params['pageno'],
|
|
||||||
}
|
|
||||||
# filter out None and empty values
|
|
||||||
filtered_args = dict((k, v) for k, v in args.items() if v)
|
|
||||||
params["url"] = f"{base_url}/search?{urlencode(filtered_args)}"
|
|
||||||
return params
|
return params
|
||||||
|
|
||||||
|
|
||||||
|
@ -137,12 +128,12 @@ def response(resp) -> List[Dict[str, Optional[str]]]:
|
||||||
def _get_result(item):
|
def _get_result(item):
|
||||||
return {
|
return {
|
||||||
'template': 'paper.html',
|
'template': 'paper.html',
|
||||||
'url': base_url + extract_text(eval_xpath_getindex(item, './@href', 0)),
|
'url': base_url + item.xpath('./@href')[0],
|
||||||
'title': extract_text(eval_xpath(item, './/h3/text()[1]')),
|
'title': extract_text(eval_xpath(item, './/h3/text()[1]')),
|
||||||
'publisher': extract_text(eval_xpath(item, './/div[contains(@class, "text-sm")]')),
|
'publisher': extract_text(eval_xpath(item, './/div[contains(@class, "text-sm")]')),
|
||||||
'authors': [extract_text(eval_xpath(item, './/div[contains(@class, "italic")]'))],
|
'authors': [extract_text(eval_xpath(item, './/div[contains(@class, "italic")]'))],
|
||||||
'content': extract_text(eval_xpath(item, './/div[contains(@class, "text-xs")]')),
|
'content': extract_text(eval_xpath(item, './/div[contains(@class, "text-xs")]')),
|
||||||
'thumbnail': extract_text(eval_xpath_getindex(item, './/img/@src', 0, default=None), allow_none=True),
|
'thumbnail': item.xpath('.//img/@src')[0],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -193,8 +184,3 @@ def fetch_traits(engine_traits: EngineTraits):
|
||||||
|
|
||||||
for x in eval_xpath_list(dom, "//form//select[@name='sort']//option"):
|
for x in eval_xpath_list(dom, "//form//select[@name='sort']//option"):
|
||||||
engine_traits.custom['sort'].append(x.get("value"))
|
engine_traits.custom['sort'].append(x.get("value"))
|
||||||
|
|
||||||
# for better diff; sort the persistence of these traits
|
|
||||||
engine_traits.custom['content'].sort()
|
|
||||||
engine_traits.custom['ext'].sort()
|
|
||||||
engine_traits.custom['sort'].sort()
|
|
||||||
|
|
|
@ -99,7 +99,7 @@ def response(resp):
|
||||||
'url': metadata['purl'],
|
'url': metadata['purl'],
|
||||||
'thumbnail_src': metadata['turl'],
|
'thumbnail_src': metadata['turl'],
|
||||||
'img_src': metadata['murl'],
|
'img_src': metadata['murl'],
|
||||||
'content': metadata.get('desc'),
|
'content': metadata['desc'],
|
||||||
'title': title,
|
'title': title,
|
||||||
'source': source,
|
'source': source,
|
||||||
'resolution': img_format[0],
|
'resolution': img_format[0],
|
||||||
|
|
|
@ -123,6 +123,7 @@ from typing import Any, TYPE_CHECKING
|
||||||
from urllib.parse import (
|
from urllib.parse import (
|
||||||
urlencode,
|
urlencode,
|
||||||
urlparse,
|
urlparse,
|
||||||
|
parse_qs,
|
||||||
)
|
)
|
||||||
|
|
||||||
from dateutil import parser
|
from dateutil import parser
|
||||||
|
@ -136,7 +137,6 @@ from searx.utils import (
|
||||||
eval_xpath_list,
|
eval_xpath_list,
|
||||||
eval_xpath_getindex,
|
eval_xpath_getindex,
|
||||||
js_variable_to_python,
|
js_variable_to_python,
|
||||||
get_embeded_stream_url,
|
|
||||||
)
|
)
|
||||||
from searx.enginelib.traits import EngineTraits
|
from searx.enginelib.traits import EngineTraits
|
||||||
|
|
||||||
|
@ -311,7 +311,7 @@ def _parse_search(resp):
|
||||||
# In my tests a video tag in the WEB search was most often not a
|
# In my tests a video tag in the WEB search was most often not a
|
||||||
# video, except the ones from youtube ..
|
# video, except the ones from youtube ..
|
||||||
|
|
||||||
iframe_src = get_embeded_stream_url(url)
|
iframe_src = _get_iframe_src(url)
|
||||||
if iframe_src:
|
if iframe_src:
|
||||||
item['iframe_src'] = iframe_src
|
item['iframe_src'] = iframe_src
|
||||||
item['template'] = 'videos.html'
|
item['template'] = 'videos.html'
|
||||||
|
@ -328,6 +328,15 @@ def _parse_search(resp):
|
||||||
return result_list
|
return result_list
|
||||||
|
|
||||||
|
|
||||||
|
def _get_iframe_src(url):
|
||||||
|
parsed_url = urlparse(url)
|
||||||
|
if parsed_url.path == '/watch' and parsed_url.query:
|
||||||
|
video_id = parse_qs(parsed_url.query).get('v', []) # type: ignore
|
||||||
|
if video_id:
|
||||||
|
return 'https://www.youtube-nocookie.com/embed/' + video_id[0] # type: ignore
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def _parse_news(json_resp):
|
def _parse_news(json_resp):
|
||||||
result_list = []
|
result_list = []
|
||||||
|
|
||||||
|
@ -383,7 +392,7 @@ def _parse_videos(json_resp):
|
||||||
if result['thumbnail'] is not None:
|
if result['thumbnail'] is not None:
|
||||||
item['thumbnail'] = result['thumbnail']['src']
|
item['thumbnail'] = result['thumbnail']['src']
|
||||||
|
|
||||||
iframe_src = get_embeded_stream_url(url)
|
iframe_src = _get_iframe_src(url)
|
||||||
if iframe_src:
|
if iframe_src:
|
||||||
item['iframe_src'] = iframe_src
|
item['iframe_src'] = iframe_src
|
||||||
|
|
||||||
|
@ -421,8 +430,7 @@ def fetch_traits(engine_traits: EngineTraits):
|
||||||
|
|
||||||
ui_lang = option.get('value')
|
ui_lang = option.get('value')
|
||||||
try:
|
try:
|
||||||
l = babel.Locale.parse(ui_lang, sep='-')
|
if '-' in ui_lang and not ui_lang.startswith("zh-"):
|
||||||
if l.territory:
|
|
||||||
sxng_tag = region_tag(babel.Locale.parse(ui_lang, sep='-'))
|
sxng_tag = region_tag(babel.Locale.parse(ui_lang, sep='-'))
|
||||||
else:
|
else:
|
||||||
sxng_tag = language_tag(babel.Locale.parse(ui_lang, sep='-'))
|
sxng_tag = language_tag(babel.Locale.parse(ui_lang, sep='-'))
|
||||||
|
@ -445,7 +453,7 @@ def fetch_traits(engine_traits: EngineTraits):
|
||||||
if not resp.ok: # type: ignore
|
if not resp.ok: # type: ignore
|
||||||
print("ERROR: response from Brave is not OK.")
|
print("ERROR: response from Brave is not OK.")
|
||||||
|
|
||||||
country_js = resp.text[resp.text.index("options:{all") + len('options:') :] # type: ignore
|
country_js = resp.text[resp.text.index("options:{all") + len('options:') :]
|
||||||
country_js = country_js[: country_js.index("},k={default")]
|
country_js = country_js[: country_js.index("},k={default")]
|
||||||
country_tags = js_variable_to_python(country_js)
|
country_tags = js_variable_to_python(country_js)
|
||||||
|
|
||||||
|
|
|
@ -54,6 +54,7 @@ def response(resp):
|
||||||
|
|
||||||
excerpt = result.xpath('.//div[@class="torrent_excerpt"]')[0]
|
excerpt = result.xpath('.//div[@class="torrent_excerpt"]')[0]
|
||||||
content = html.tostring(excerpt, encoding='unicode', method='text', with_tail=False)
|
content = html.tostring(excerpt, encoding='unicode', method='text', with_tail=False)
|
||||||
|
# it is better to emit <br/> instead of |, but html tags are verboten
|
||||||
content = content.strip().replace('\n', ' | ')
|
content = content.strip().replace('\n', ' | ')
|
||||||
content = ' '.join(content.split())
|
content = ' '.join(content.split())
|
||||||
|
|
||||||
|
|
|
@ -1,68 +0,0 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
"""Cloudflare AI engine"""
|
|
||||||
|
|
||||||
from json import loads, dumps
|
|
||||||
from searx.exceptions import SearxEngineAPIException
|
|
||||||
|
|
||||||
about = {
|
|
||||||
"website": 'https://ai.cloudflare.com',
|
|
||||||
"wikidata_id": None,
|
|
||||||
"official_api_documentation": 'https://developers.cloudflare.com/workers-ai',
|
|
||||||
"use_official_api": True,
|
|
||||||
"require_api_key": True,
|
|
||||||
"results": 'JSON',
|
|
||||||
}
|
|
||||||
|
|
||||||
cf_account_id = ''
|
|
||||||
cf_ai_api = ''
|
|
||||||
cf_ai_gateway = ''
|
|
||||||
|
|
||||||
cf_ai_model = ''
|
|
||||||
cf_ai_model_display_name = 'Cloudflare AI'
|
|
||||||
|
|
||||||
# Assistant messages hint to the AI about the desired output format. Not all models support this role.
|
|
||||||
cf_ai_model_assistant = 'Keep your answers as short and effective as possible.'
|
|
||||||
# System messages define the AI's personality. You can use them to set rules and how you expect the AI to behave.
|
|
||||||
cf_ai_model_system = 'You are a self-aware language model who is honest and direct about any question from the user.'
|
|
||||||
|
|
||||||
|
|
||||||
def request(query, params):
|
|
||||||
|
|
||||||
params['query'] = query
|
|
||||||
|
|
||||||
params['url'] = f'https://gateway.ai.cloudflare.com/v1/{cf_account_id}/{cf_ai_gateway}/workers-ai/{cf_ai_model}'
|
|
||||||
|
|
||||||
params['method'] = 'POST'
|
|
||||||
|
|
||||||
params['headers']['Authorization'] = f'Bearer {cf_ai_api}'
|
|
||||||
params['headers']['Content-Type'] = 'application/json'
|
|
||||||
|
|
||||||
params['data'] = dumps(
|
|
||||||
{
|
|
||||||
'messages': [
|
|
||||||
{'role': 'assistant', 'content': cf_ai_model_assistant},
|
|
||||||
{'role': 'system', 'content': cf_ai_model_system},
|
|
||||||
{'role': 'user', 'content': params['query']},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
).encode('utf-8')
|
|
||||||
|
|
||||||
return params
|
|
||||||
|
|
||||||
|
|
||||||
def response(resp):
|
|
||||||
results = []
|
|
||||||
json = loads(resp.text)
|
|
||||||
|
|
||||||
if 'error' in json:
|
|
||||||
raise SearxEngineAPIException('Cloudflare AI error: ' + json['error'])
|
|
||||||
|
|
||||||
if 'result' in json:
|
|
||||||
results.append(
|
|
||||||
{
|
|
||||||
'content': json['result']['response'],
|
|
||||||
'infobox': cf_ai_model_display_name,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return results
|
|
|
@ -18,13 +18,13 @@ from searx import (
|
||||||
)
|
)
|
||||||
from searx.utils import (
|
from searx.utils import (
|
||||||
eval_xpath,
|
eval_xpath,
|
||||||
|
eval_xpath_getindex,
|
||||||
extract_text,
|
extract_text,
|
||||||
)
|
)
|
||||||
from searx.network import get # see https://github.com/searxng/searxng/issues/762
|
from searx.network import get # see https://github.com/searxng/searxng/issues/762
|
||||||
from searx import redisdb
|
from searx import redisdb
|
||||||
from searx.enginelib.traits import EngineTraits
|
from searx.enginelib.traits import EngineTraits
|
||||||
from searx.utils import extr
|
from searx.utils import extr
|
||||||
from searx.exceptions import SearxEngineCaptchaException
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
import logging
|
import logging
|
||||||
|
@ -53,33 +53,31 @@ paging = True
|
||||||
time_range_support = True
|
time_range_support = True
|
||||||
safesearch = True # user can't select but the results are filtered
|
safesearch = True # user can't select but the results are filtered
|
||||||
|
|
||||||
url = "https://html.duckduckgo.com/html"
|
url = 'https://lite.duckduckgo.com/lite/'
|
||||||
|
# url_ping = 'https://duckduckgo.com/t/sl_l'
|
||||||
|
|
||||||
time_range_dict = {'day': 'd', 'week': 'w', 'month': 'm', 'year': 'y'}
|
time_range_dict = {'day': 'd', 'week': 'w', 'month': 'm', 'year': 'y'}
|
||||||
form_data = {'v': 'l', 'api': 'd.js', 'o': 'json'}
|
form_data = {'v': 'l', 'api': 'd.js', 'o': 'json'}
|
||||||
__CACHE = []
|
|
||||||
|
|
||||||
|
|
||||||
def _cache_key(data: dict):
|
def cache_vqd(query, value):
|
||||||
return 'SearXNG_ddg_web_vqd' + redislib.secret_hash(f"{data['q']}//{data['kl']}")
|
|
||||||
|
|
||||||
|
|
||||||
def cache_vqd(data: dict, value):
|
|
||||||
"""Caches a ``vqd`` value from a query."""
|
"""Caches a ``vqd`` value from a query."""
|
||||||
c = redisdb.client()
|
c = redisdb.client()
|
||||||
if c:
|
if c:
|
||||||
logger.debug("cache vqd value: %s", value)
|
logger.debug("cache vqd value: %s", value)
|
||||||
c.set(_cache_key(data), value, ex=600)
|
key = 'SearXNG_ddg_web_vqd' + redislib.secret_hash(query)
|
||||||
|
c.set(key, value, ex=600)
|
||||||
else:
|
|
||||||
logger.debug("MEM cache vqd value: %s", value)
|
|
||||||
if len(__CACHE) > 100: # cache vqd from last 100 queries
|
|
||||||
__CACHE.pop(0)
|
|
||||||
__CACHE.append((_cache_key(data), value))
|
|
||||||
|
|
||||||
|
|
||||||
def get_vqd(data):
|
def get_vqd(query):
|
||||||
"""Returns the ``vqd`` that fits to the *query* (``data`` from HTTP POST).
|
"""Returns the ``vqd`` that fits to the *query*. If there is no ``vqd`` cached
|
||||||
|
(:py:obj:`cache_vqd`) the query is sent to DDG to get a vqd value from the
|
||||||
|
response.
|
||||||
|
|
||||||
|
.. hint::
|
||||||
|
|
||||||
|
If an empty string is returned there are no results for the ``query`` and
|
||||||
|
therefore no ``vqd`` value.
|
||||||
|
|
||||||
DDG's bot detection is sensitive to the ``vqd`` value. For some search terms
|
DDG's bot detection is sensitive to the ``vqd`` value. For some search terms
|
||||||
(such as extremely long search terms that are often sent by bots), no ``vqd``
|
(such as extremely long search terms that are often sent by bots), no ``vqd``
|
||||||
|
@ -107,23 +105,28 @@ def get_vqd(data):
|
||||||
- DuckDuckGo News: ``https://duckduckgo.com/news.js??q=...&vqd=...``
|
- DuckDuckGo News: ``https://duckduckgo.com/news.js??q=...&vqd=...``
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
key = _cache_key(data)
|
|
||||||
value = None
|
value = None
|
||||||
c = redisdb.client()
|
c = redisdb.client()
|
||||||
if c:
|
if c:
|
||||||
|
key = 'SearXNG_ddg_web_vqd' + redislib.secret_hash(query)
|
||||||
value = c.get(key)
|
value = c.get(key)
|
||||||
if value or value == b'':
|
if value or value == b'':
|
||||||
value = value.decode('utf-8')
|
value = value.decode('utf-8')
|
||||||
logger.debug("re-use CACHED vqd value: %s", value)
|
logger.debug("re-use cached vqd value: %s", value)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
else:
|
query_url = 'https://duckduckgo.com/?' + urlencode({'q': query})
|
||||||
for k, value in __CACHE:
|
res = get(query_url)
|
||||||
if k == key:
|
doc = lxml.html.fromstring(res.text)
|
||||||
logger.debug("MEM re-use CACHED vqd value: %s", value)
|
for script in doc.xpath("//script[@type='text/javascript']"):
|
||||||
|
script = script.text
|
||||||
|
if 'vqd="' in script:
|
||||||
|
value = extr(script, 'vqd="', '"')
|
||||||
|
break
|
||||||
|
logger.debug("new vqd value: '%s'", value)
|
||||||
|
if value is not None:
|
||||||
|
cache_vqd(query, value)
|
||||||
return value
|
return value
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def get_ddg_lang(eng_traits: EngineTraits, sxng_locale, default='en_US'):
|
def get_ddg_lang(eng_traits: EngineTraits, sxng_locale, default='en_US'):
|
||||||
|
@ -151,10 +154,9 @@ def get_ddg_lang(eng_traits: EngineTraits, sxng_locale, default='en_US'):
|
||||||
|
|
||||||
.. hint::
|
.. hint::
|
||||||
|
|
||||||
`DDG-lite <https://lite.duckduckgo.com/lite>`__ and the *no Javascript*
|
`DDG-lite <https://lite.duckduckgo.com/lite>`__ does not offer a language
|
||||||
page https://html.duckduckgo.com/html do not offer a language selection
|
selection to the user, only a region can be selected by the user
|
||||||
to the user, only a region can be selected by the user (``eng_region``
|
(``eng_region`` from the example above). DDG-lite stores the selected
|
||||||
from the example above). DDG-lite and *no Javascript* store the selected
|
|
||||||
region in a cookie::
|
region in a cookie::
|
||||||
|
|
||||||
params['cookies']['kl'] = eng_region # 'ar-es'
|
params['cookies']['kl'] = eng_region # 'ar-es'
|
||||||
|
@ -238,27 +240,10 @@ def request(query, params):
|
||||||
|
|
||||||
query = quote_ddg_bangs(query)
|
query = quote_ddg_bangs(query)
|
||||||
|
|
||||||
if len(query) >= 500:
|
# request needs a vqd argument
|
||||||
# DDG does not accept queries with more than 499 chars
|
vqd = get_vqd(query)
|
||||||
params["url"] = None
|
|
||||||
return
|
|
||||||
|
|
||||||
# Advanced search syntax ends in CAPTCHA
|
|
||||||
# https://duckduckgo.com/duckduckgo-help-pages/results/syntax/
|
|
||||||
query = " ".join(
|
|
||||||
[
|
|
||||||
x.removeprefix("site:").removeprefix("intitle:").removeprefix("inurl:").removeprefix("filetype:")
|
|
||||||
for x in query.split()
|
|
||||||
]
|
|
||||||
)
|
|
||||||
eng_region = traits.get_region(params['searxng_locale'], traits.all_locale)
|
eng_region = traits.get_region(params['searxng_locale'], traits.all_locale)
|
||||||
if eng_region == "wt-wt":
|
|
||||||
# https://html.duckduckgo.com/html sets an empty value for "all".
|
|
||||||
eng_region = ""
|
|
||||||
|
|
||||||
params['data']['kl'] = eng_region
|
|
||||||
params['cookies']['kl'] = eng_region
|
|
||||||
|
|
||||||
# eng_lang = get_ddg_lang(traits, params['searxng_locale'])
|
# eng_lang = get_ddg_lang(traits, params['searxng_locale'])
|
||||||
|
|
||||||
params['url'] = url
|
params['url'] = url
|
||||||
|
@ -266,82 +251,45 @@ def request(query, params):
|
||||||
params['data']['q'] = query
|
params['data']['q'] = query
|
||||||
|
|
||||||
# The API is not documented, so we do some reverse engineering and emulate
|
# The API is not documented, so we do some reverse engineering and emulate
|
||||||
# what https://html.duckduckgo.com/html does when you press "next Page" link
|
# what https://lite.duckduckgo.com/lite/ does when you press "next Page"
|
||||||
# again and again ..
|
# link again and again ..
|
||||||
|
|
||||||
params['headers']['Content-Type'] = 'application/x-www-form-urlencoded'
|
params['headers']['Content-Type'] = 'application/x-www-form-urlencoded'
|
||||||
|
params['data']['vqd'] = vqd
|
||||||
|
|
||||||
params['headers']['Sec-Fetch-Dest'] = "document"
|
# initial page does not have an offset
|
||||||
params['headers']['Sec-Fetch-Mode'] = "navigate" # at least this one is used by ddg's bot detection
|
|
||||||
params['headers']['Sec-Fetch-Site'] = "same-origin"
|
|
||||||
params['headers']['Sec-Fetch-User'] = "?1"
|
|
||||||
|
|
||||||
# Form of the initial search page does have empty values in the form
|
|
||||||
if params['pageno'] == 1:
|
|
||||||
|
|
||||||
params['data']['b'] = ""
|
|
||||||
|
|
||||||
params['data']['df'] = ''
|
|
||||||
if params['time_range'] in time_range_dict:
|
|
||||||
|
|
||||||
params['data']['df'] = time_range_dict[params['time_range']]
|
|
||||||
params['cookies']['df'] = time_range_dict[params['time_range']]
|
|
||||||
|
|
||||||
if params['pageno'] == 2:
|
if params['pageno'] == 2:
|
||||||
|
|
||||||
# second page does have an offset of 20
|
# second page does have an offset of 20
|
||||||
offset = (params['pageno'] - 1) * 20
|
offset = (params['pageno'] - 1) * 20
|
||||||
params['data']['s'] = offset
|
params['data']['s'] = offset
|
||||||
params['data']['dc'] = offset + 1
|
params['data']['dc'] = offset + 1
|
||||||
|
|
||||||
elif params['pageno'] > 2:
|
elif params['pageno'] > 2:
|
||||||
|
|
||||||
# third and following pages do have an offset of 20 + n*50
|
# third and following pages do have an offset of 20 + n*50
|
||||||
offset = 20 + (params['pageno'] - 2) * 50
|
offset = 20 + (params['pageno'] - 2) * 50
|
||||||
params['data']['s'] = offset
|
params['data']['s'] = offset
|
||||||
params['data']['dc'] = offset + 1
|
params['data']['dc'] = offset + 1
|
||||||
|
|
||||||
|
# initial page does not have additional data in the input form
|
||||||
if params['pageno'] > 1:
|
if params['pageno'] > 1:
|
||||||
|
|
||||||
# initial page does not have these additional data in the input form
|
|
||||||
params['data']['o'] = form_data.get('o', 'json')
|
params['data']['o'] = form_data.get('o', 'json')
|
||||||
params['data']['api'] = form_data.get('api', 'd.js')
|
params['data']['api'] = form_data.get('api', 'd.js')
|
||||||
params['data']['nextParams'] = form_data.get('nextParams', '')
|
params['data']['nextParams'] = form_data.get('nextParams', '')
|
||||||
params['data']['v'] = form_data.get('v', 'l')
|
params['data']['v'] = form_data.get('v', 'l')
|
||||||
params['headers']['Referer'] = url
|
params['headers']['Referer'] = 'https://lite.duckduckgo.com/'
|
||||||
|
|
||||||
# from here on no more params['data'] shuld be set, since this dict is
|
params['data']['kl'] = eng_region
|
||||||
# needed to get a vqd value from the cache ..
|
params['cookies']['kl'] = eng_region
|
||||||
|
|
||||||
vqd = get_vqd(params['data'])
|
params['data']['df'] = ''
|
||||||
|
if params['time_range'] in time_range_dict:
|
||||||
# Certain conditions must be met in order to call up one of the
|
params['data']['df'] = time_range_dict[params['time_range']]
|
||||||
# following pages ...
|
params['cookies']['df'] = time_range_dict[params['time_range']]
|
||||||
|
|
||||||
if vqd:
|
|
||||||
params['data']['vqd'] = vqd # follow up pages / requests needs a vqd argument
|
|
||||||
else:
|
|
||||||
# Don't try to call follow up pages without a vqd value. DDG
|
|
||||||
# recognizes this as a request from a bot. This lowers the
|
|
||||||
# reputation of the SearXNG IP and DDG starts to activate CAPTCHAs.
|
|
||||||
params["url"] = None
|
|
||||||
return
|
|
||||||
|
|
||||||
if params['searxng_locale'].startswith("zh"):
|
|
||||||
# Some locales (at least China) do not have a "next page" button and ddg
|
|
||||||
# will return a HTTP/2 403 Forbidden for a request of such a page.
|
|
||||||
params["url"] = None
|
|
||||||
return
|
|
||||||
|
|
||||||
logger.debug("param data: %s", params['data'])
|
logger.debug("param data: %s", params['data'])
|
||||||
logger.debug("param cookies: %s", params['cookies'])
|
logger.debug("param cookies: %s", params['cookies'])
|
||||||
|
return params
|
||||||
|
|
||||||
def is_ddg_captcha(dom):
|
|
||||||
"""In case of CAPTCHA ddg response its own *not a Robot* dialog and is not
|
|
||||||
redirected to a CAPTCHA page."""
|
|
||||||
|
|
||||||
return bool(eval_xpath(dom, "//form[@id='challenge-form']"))
|
|
||||||
|
|
||||||
|
|
||||||
def response(resp):
|
def response(resp):
|
||||||
|
@ -352,40 +300,38 @@ def response(resp):
|
||||||
results = []
|
results = []
|
||||||
doc = lxml.html.fromstring(resp.text)
|
doc = lxml.html.fromstring(resp.text)
|
||||||
|
|
||||||
if is_ddg_captcha(doc):
|
result_table = eval_xpath(doc, '//html/body/form/div[@class="filters"]/table')
|
||||||
# set suspend time to zero is OK --> ddg does not block the IP
|
|
||||||
raise SearxEngineCaptchaException(suspended_time=0, message=f"CAPTCHA ({resp.search_params['data'].get('kl')})")
|
|
||||||
|
|
||||||
form = eval_xpath(doc, '//input[@name="vqd"]/..')
|
if len(result_table) == 2:
|
||||||
|
# some locales (at least China) does not have a "next page" button and
|
||||||
|
# the layout of the HTML tables is different.
|
||||||
|
result_table = result_table[1]
|
||||||
|
elif not len(result_table) >= 3:
|
||||||
|
# no more results
|
||||||
|
return []
|
||||||
|
else:
|
||||||
|
result_table = result_table[2]
|
||||||
|
# update form data from response
|
||||||
|
form = eval_xpath(doc, '//html/body/form/div[@class="filters"]/table//input/..')
|
||||||
if len(form):
|
if len(form):
|
||||||
# some locales (at least China) does not have a "next page" button
|
|
||||||
form = form[0]
|
form = form[0]
|
||||||
form_vqd = eval_xpath(form, '//input[@name="vqd"]/@value')[0]
|
form_data['v'] = eval_xpath(form, '//input[@name="v"]/@value')[0]
|
||||||
|
form_data['api'] = eval_xpath(form, '//input[@name="api"]/@value')[0]
|
||||||
|
form_data['o'] = eval_xpath(form, '//input[@name="o"]/@value')[0]
|
||||||
|
logger.debug('form_data: %s', form_data)
|
||||||
|
|
||||||
cache_vqd(resp.search_params["data"], form_vqd)
|
tr_rows = eval_xpath(result_table, './/tr')
|
||||||
|
# In the last <tr> is the form of the 'previous/next page' links
|
||||||
|
tr_rows = tr_rows[:-1]
|
||||||
|
|
||||||
# just select "web-result" and ignore results of class "result--ad result--ad--small"
|
len_tr_rows = len(tr_rows)
|
||||||
for div_result in eval_xpath(doc, '//div[@id="links"]/div[contains(@class, "web-result")]'):
|
offset = 0
|
||||||
|
|
||||||
item = {}
|
zero_click_info_xpath = '//html/body/form/div/table[2]/tr[2]/td/text()'
|
||||||
title = eval_xpath(div_result, './/h2/a')
|
|
||||||
if not title:
|
|
||||||
# this is the "No results." item in the result list
|
|
||||||
continue
|
|
||||||
item["title"] = extract_text(title)
|
|
||||||
item["url"] = eval_xpath(div_result, './/h2/a/@href')[0]
|
|
||||||
item["content"] = extract_text(eval_xpath(div_result, './/a[contains(@class, "result__snippet")]')[0])
|
|
||||||
|
|
||||||
results.append(item)
|
|
||||||
|
|
||||||
zero_click_info_xpath = '//div[@id="zero_click_abstract"]'
|
|
||||||
zero_click = extract_text(eval_xpath(doc, zero_click_info_xpath)).strip()
|
zero_click = extract_text(eval_xpath(doc, zero_click_info_xpath)).strip()
|
||||||
|
|
||||||
if zero_click and (
|
if zero_click and "Your IP address is" not in zero_click and "Your user agent:" not in zero_click:
|
||||||
"Your IP address is" not in zero_click
|
|
||||||
and "Your user agent:" not in zero_click
|
|
||||||
and "URL Decoded:" not in zero_click
|
|
||||||
):
|
|
||||||
current_query = resp.search_params["data"].get("q")
|
current_query = resp.search_params["data"].get("q")
|
||||||
|
|
||||||
results.append(
|
results.append(
|
||||||
|
@ -395,6 +341,33 @@ def response(resp):
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
while len_tr_rows >= offset + 4:
|
||||||
|
|
||||||
|
# assemble table rows we need to scrap
|
||||||
|
tr_title = tr_rows[offset]
|
||||||
|
tr_content = tr_rows[offset + 1]
|
||||||
|
offset += 4
|
||||||
|
|
||||||
|
# ignore sponsored Adds <tr class="result-sponsored">
|
||||||
|
if tr_content.get('class') == 'result-sponsored':
|
||||||
|
continue
|
||||||
|
|
||||||
|
a_tag = eval_xpath_getindex(tr_title, './/td//a[@class="result-link"]', 0, None)
|
||||||
|
if a_tag is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
td_content = eval_xpath_getindex(tr_content, './/td[@class="result-snippet"]', 0, None)
|
||||||
|
if td_content is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
results.append(
|
||||||
|
{
|
||||||
|
'title': a_tag.text_content(),
|
||||||
|
'content': extract_text(td_content),
|
||||||
|
'url': a_tag.get('href'),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,6 @@ DuckDuckGo Extra (images, videos, news)
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
from searx.utils import get_embeded_stream_url
|
|
||||||
|
|
||||||
from searx.engines.duckduckgo import fetch_traits # pylint: disable=unused-import
|
from searx.engines.duckduckgo import fetch_traits # pylint: disable=unused-import
|
||||||
from searx.engines.duckduckgo import (
|
from searx.engines.duckduckgo import (
|
||||||
|
@ -109,7 +108,7 @@ def _video_result(result):
|
||||||
'title': result['title'],
|
'title': result['title'],
|
||||||
'content': result['description'],
|
'content': result['description'],
|
||||||
'thumbnail': result['images'].get('small') or result['images'].get('medium'),
|
'thumbnail': result['images'].get('small') or result['images'].get('medium'),
|
||||||
'iframe_src': get_embeded_stream_url(result['content']),
|
'iframe_src': result['embed_url'],
|
||||||
'source': result['provider'],
|
'source': result['provider'],
|
||||||
'length': result['duration'],
|
'length': result['duration'],
|
||||||
'metadata': result.get('uploader'),
|
'metadata': result.get('uploader'),
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
"""Engine to search in collaborative software platforms based on Gitea_ or Forgejo_.
|
"""Engine to search in collaborative software platforms based on Gitea_.
|
||||||
|
|
||||||
.. _Gitea: https://about.gitea.com/
|
.. _Gitea: https://about.gitea.com/
|
||||||
.. _Forgejo: https://forgejo.org/
|
|
||||||
|
|
||||||
Configuration
|
Configuration
|
||||||
=============
|
=============
|
||||||
|
@ -24,11 +23,6 @@ Optional settings are:
|
||||||
base_url: https://gitea.com
|
base_url: https://gitea.com
|
||||||
shortcut: gitea
|
shortcut: gitea
|
||||||
|
|
||||||
- name: forgejo.com
|
|
||||||
engine: gitea
|
|
||||||
base_url: https://code.forgejo.org
|
|
||||||
shortcut: forgejo
|
|
||||||
|
|
||||||
If you would like to use additional instances, just configure new engines in the
|
If you would like to use additional instances, just configure new engines in the
|
||||||
:ref:`settings <settings engine>` and set the ``base_url``.
|
:ref:`settings <settings engine>` and set the ``base_url``.
|
||||||
|
|
||||||
|
@ -101,14 +95,13 @@ def response(resp):
|
||||||
'url': item.get('html_url'),
|
'url': item.get('html_url'),
|
||||||
'title': item.get('full_name'),
|
'title': item.get('full_name'),
|
||||||
'content': ' / '.join(content),
|
'content': ' / '.join(content),
|
||||||
# Use Repository Avatar and fall back to Owner Avatar if not set.
|
'img_src': item.get('owner', {}).get('avatar_url'),
|
||||||
'thumbnail': item.get('avatar_url') or item.get('owner', {}).get('avatar_url'),
|
|
||||||
'package_name': item.get('name'),
|
'package_name': item.get('name'),
|
||||||
'maintainer': item.get('owner', {}).get('username'),
|
'maintainer': item.get('owner', {}).get('login'),
|
||||||
'publishedDate': parser.parse(item.get("updated_at") or item.get("created_at")),
|
'publishedDate': parser.parse(item.get("updated_at") or item.get("created_at")),
|
||||||
'tags': item.get('topics', []),
|
'tags': item.get('topics', []),
|
||||||
'popularity': item.get('stars_count'),
|
'popularity': item.get('stargazers_count'),
|
||||||
'homepage': item.get('website'),
|
'homepage': item.get('homepage'),
|
||||||
'source_code_url': item.get('clone_url'),
|
'source_code_url': item.get('clone_url'),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,95 +0,0 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
"""Engine to search in collaborative software platforms based on GitLab_ with
|
|
||||||
the `GitLab REST API`_.
|
|
||||||
|
|
||||||
.. _GitLab: https://about.gitlab.com/install/
|
|
||||||
.. _GitLab REST API: https://docs.gitlab.com/ee/api/
|
|
||||||
|
|
||||||
Configuration
|
|
||||||
=============
|
|
||||||
|
|
||||||
The engine has the following mandatory setting:
|
|
||||||
|
|
||||||
- :py:obj:`base_url`
|
|
||||||
|
|
||||||
Optional settings are:
|
|
||||||
|
|
||||||
- :py:obj:`api_path`
|
|
||||||
|
|
||||||
.. code:: yaml
|
|
||||||
|
|
||||||
- name: gitlab
|
|
||||||
engine: gitlab
|
|
||||||
base_url: https://gitlab.com
|
|
||||||
shortcut: gl
|
|
||||||
about:
|
|
||||||
website: https://gitlab.com/
|
|
||||||
wikidata_id: Q16639197
|
|
||||||
|
|
||||||
- name: gnome
|
|
||||||
engine: gitlab
|
|
||||||
base_url: https://gitlab.gnome.org
|
|
||||||
shortcut: gn
|
|
||||||
about:
|
|
||||||
website: https://gitlab.gnome.org
|
|
||||||
wikidata_id: Q44316
|
|
||||||
|
|
||||||
Implementations
|
|
||||||
===============
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
from urllib.parse import urlencode
|
|
||||||
from dateutil import parser
|
|
||||||
|
|
||||||
about = {
|
|
||||||
"website": None,
|
|
||||||
"wikidata_id": None,
|
|
||||||
"official_api_documentation": "https://docs.gitlab.com/ee/api/",
|
|
||||||
"use_official_api": True,
|
|
||||||
"require_api_key": False,
|
|
||||||
"results": "JSON",
|
|
||||||
}
|
|
||||||
|
|
||||||
categories = ['it', 'repos']
|
|
||||||
paging = True
|
|
||||||
|
|
||||||
base_url: str = ""
|
|
||||||
"""Base URL of the GitLab host."""
|
|
||||||
|
|
||||||
api_path: str = 'api/v4/projects'
|
|
||||||
"""The path the `project API <https://docs.gitlab.com/ee/api/projects.html>`_.
|
|
||||||
|
|
||||||
The default path should work fine usually.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def request(query, params):
|
|
||||||
args = {'search': query, 'page': params['pageno']}
|
|
||||||
params['url'] = f"{base_url}/{api_path}?{urlencode(args)}"
|
|
||||||
|
|
||||||
return params
|
|
||||||
|
|
||||||
|
|
||||||
def response(resp):
|
|
||||||
results = []
|
|
||||||
|
|
||||||
for item in resp.json():
|
|
||||||
results.append(
|
|
||||||
{
|
|
||||||
'template': 'packages.html',
|
|
||||||
'url': item.get('web_url'),
|
|
||||||
'title': item.get('name'),
|
|
||||||
'content': item.get('description'),
|
|
||||||
'thumbnail': item.get('avatar_url'),
|
|
||||||
'package_name': item.get('name'),
|
|
||||||
'maintainer': item.get('namespace', {}).get('name'),
|
|
||||||
'publishedDate': parser.parse(item.get('last_activity_at') or item.get("created_at")),
|
|
||||||
'tags': item.get('tag_list', []),
|
|
||||||
'popularity': item.get('star_count'),
|
|
||||||
'homepage': item.get('readme_url'),
|
|
||||||
'source_code_url': item.get('http_url_to_repo'),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return results
|
|
|
@ -62,7 +62,7 @@ filter_mapping = {0: 'off', 1: 'medium', 2: 'high'}
|
||||||
results_xpath = './/div[contains(@jscontroller, "SC7lYd")]'
|
results_xpath = './/div[contains(@jscontroller, "SC7lYd")]'
|
||||||
title_xpath = './/a/h3[1]'
|
title_xpath = './/a/h3[1]'
|
||||||
href_xpath = './/a[h3]/@href'
|
href_xpath = './/a[h3]/@href'
|
||||||
content_xpath = './/div[contains(@data-sncf, "1")]'
|
content_xpath = './/div[@data-sncf="1"]'
|
||||||
|
|
||||||
# Suggestions are links placed in a *card-section*, we extract only the text
|
# Suggestions are links placed in a *card-section*, we extract only the text
|
||||||
# from the links not the links itself.
|
# from the links not the links itself.
|
||||||
|
@ -441,7 +441,7 @@ def fetch_traits(engine_traits: EngineTraits, add_domains: bool = True):
|
||||||
try:
|
try:
|
||||||
locale = babel.Locale.parse(lang_map.get(eng_lang, eng_lang), sep='-')
|
locale = babel.Locale.parse(lang_map.get(eng_lang, eng_lang), sep='-')
|
||||||
except babel.UnknownLocaleError:
|
except babel.UnknownLocaleError:
|
||||||
print("INFO: google UI language %s (%s) is unknown by babel" % (eng_lang, x.text.split("(")[0].strip()))
|
print("ERROR: %s -> %s is unknown by babel" % (x.get("data-name"), eng_lang))
|
||||||
continue
|
continue
|
||||||
sxng_lang = language_tag(locale)
|
sxng_lang = language_tag(locale)
|
||||||
|
|
||||||
|
|
|
@ -34,7 +34,6 @@ from searx.engines.google import (
|
||||||
detect_google_sorry,
|
detect_google_sorry,
|
||||||
)
|
)
|
||||||
from searx.enginelib.traits import EngineTraits
|
from searx.enginelib.traits import EngineTraits
|
||||||
from searx.utils import get_embeded_stream_url
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
import logging
|
import logging
|
||||||
|
@ -126,7 +125,6 @@ def response(resp):
|
||||||
'content': content,
|
'content': content,
|
||||||
'author': pub_info,
|
'author': pub_info,
|
||||||
'thumbnail': thumbnail,
|
'thumbnail': thumbnail,
|
||||||
'iframe_src': get_embeded_stream_url(url),
|
|
||||||
'template': 'videos.html',
|
'template': 'videos.html',
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
|
@ -57,11 +57,7 @@ def request(query, params):
|
||||||
|
|
||||||
if params['time_range']:
|
if params['time_range']:
|
||||||
search_type = 'search_by_date'
|
search_type = 'search_by_date'
|
||||||
timestamp = (
|
timestamp = (datetime.now() - relativedelta(**{f"{params['time_range']}s": 1})).timestamp()
|
||||||
# pylint: disable=unexpected-keyword-arg
|
|
||||||
datetime.now()
|
|
||||||
- relativedelta(**{f"{params['time_range']}s": 1}) # type: ignore
|
|
||||||
).timestamp()
|
|
||||||
query_params["numericFilters"] = f"created_at_i>{timestamp}"
|
query_params["numericFilters"] = f"created_at_i>{timestamp}"
|
||||||
|
|
||||||
params["url"] = f"{base_url}/{search_type}?{urlencode(query_params)}"
|
params["url"] = f"{base_url}/{search_type}?{urlencode(query_params)}"
|
||||||
|
|
|
@ -0,0 +1,71 @@
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
"""Internet Archive scholar(science)
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
from searx.utils import html_to_text
|
||||||
|
|
||||||
|
about = {
|
||||||
|
"website": "https://scholar.archive.org/",
|
||||||
|
"wikidata_id": "Q115667709",
|
||||||
|
"official_api_documentation": "https://scholar.archive.org/api/redoc",
|
||||||
|
"use_official_api": True,
|
||||||
|
"require_api_key": False,
|
||||||
|
"results": "JSON",
|
||||||
|
}
|
||||||
|
categories = ['science', 'scientific publications']
|
||||||
|
paging = True
|
||||||
|
|
||||||
|
base_url = "https://scholar.archive.org"
|
||||||
|
results_per_page = 15
|
||||||
|
|
||||||
|
|
||||||
|
def request(query, params):
|
||||||
|
args = {
|
||||||
|
"q": query,
|
||||||
|
"limit": results_per_page,
|
||||||
|
"offset": (params["pageno"] - 1) * results_per_page,
|
||||||
|
}
|
||||||
|
params["url"] = f"{base_url}/search?{urlencode(args)}"
|
||||||
|
params["headers"]["Accept"] = "application/json"
|
||||||
|
return params
|
||||||
|
|
||||||
|
|
||||||
|
def response(resp):
|
||||||
|
results = []
|
||||||
|
|
||||||
|
json = resp.json()
|
||||||
|
|
||||||
|
for result in json["results"]:
|
||||||
|
publishedDate, content, doi = None, '', None
|
||||||
|
|
||||||
|
if result['biblio'].get('release_date'):
|
||||||
|
publishedDate = datetime.strptime(result['biblio']['release_date'], "%Y-%m-%d")
|
||||||
|
|
||||||
|
if len(result['abstracts']) > 0:
|
||||||
|
content = result['abstracts'][0].get('body')
|
||||||
|
elif len(result['_highlights']) > 0:
|
||||||
|
content = result['_highlights'][0]
|
||||||
|
|
||||||
|
if len(result['releases']) > 0:
|
||||||
|
doi = result['releases'][0].get('doi')
|
||||||
|
|
||||||
|
results.append(
|
||||||
|
{
|
||||||
|
'template': 'paper.html',
|
||||||
|
'url': result['fulltext']['access_url'],
|
||||||
|
'title': result['biblio'].get('title') or result['biblio'].get('container_name'),
|
||||||
|
'content': html_to_text(content),
|
||||||
|
'publisher': result['biblio'].get('publisher'),
|
||||||
|
'doi': doi,
|
||||||
|
'journal': result['biblio'].get('container_name'),
|
||||||
|
'authors': result['biblio'].get('contrib_names'),
|
||||||
|
'tags': result['tags'],
|
||||||
|
'publishedDate': publishedDate,
|
||||||
|
'issns': result['biblio'].get('issns'),
|
||||||
|
'pdf_url': result['fulltext'].get('access_url'),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return results
|
|
@ -27,7 +27,7 @@ categories = ['images']
|
||||||
paging = True
|
paging = True
|
||||||
|
|
||||||
endpoint = 'photos'
|
endpoint = 'photos'
|
||||||
base_url = 'https://www.loc.gov'
|
base_url = 'https://loc.gov'
|
||||||
search_string = "/{endpoint}/?sp={page}&{query}&fo=json"
|
search_string = "/{endpoint}/?sp={page}&{query}&fo=json"
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,95 +0,0 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
"""MariaDB is a community driven fork of MySQL. Before enabling MariaDB engine,
|
|
||||||
you must the install the pip package ``mariadb`` along with the necessary
|
|
||||||
prerequities.
|
|
||||||
|
|
||||||
`See the following documentation for more details
|
|
||||||
<https://mariadb.com/docs/server/connect/programming-languages/c/install/>`_
|
|
||||||
|
|
||||||
Example
|
|
||||||
=======
|
|
||||||
|
|
||||||
This is an example configuration for querying a MariaDB server:
|
|
||||||
|
|
||||||
.. code:: yaml
|
|
||||||
|
|
||||||
- name: my_database
|
|
||||||
engine: mariadb_server
|
|
||||||
database: my_database
|
|
||||||
username: searxng
|
|
||||||
password: password
|
|
||||||
limit: 5
|
|
||||||
query_str: 'SELECT * from my_table WHERE my_column=%(query)s'
|
|
||||||
|
|
||||||
Implementations
|
|
||||||
===============
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
try:
|
|
||||||
import mariadb
|
|
||||||
except ImportError:
|
|
||||||
# import error is ignored because the admin has to install mysql manually to use
|
|
||||||
# the engine
|
|
||||||
pass
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logger = logging.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
engine_type = 'offline'
|
|
||||||
|
|
||||||
host = "127.0.0.1"
|
|
||||||
"""Hostname of the DB connector"""
|
|
||||||
|
|
||||||
port = 3306
|
|
||||||
"""Port of the DB connector"""
|
|
||||||
|
|
||||||
database = ""
|
|
||||||
"""Name of the database."""
|
|
||||||
|
|
||||||
username = ""
|
|
||||||
"""Username for the DB connection."""
|
|
||||||
|
|
||||||
password = ""
|
|
||||||
"""Password for the DB connection."""
|
|
||||||
|
|
||||||
query_str = ""
|
|
||||||
"""SQL query that returns the result items."""
|
|
||||||
|
|
||||||
limit = 10
|
|
||||||
paging = True
|
|
||||||
result_template = 'key-value.html'
|
|
||||||
_connection = None
|
|
||||||
|
|
||||||
|
|
||||||
def init(engine_settings):
|
|
||||||
global _connection # pylint: disable=global-statement
|
|
||||||
|
|
||||||
if 'query_str' not in engine_settings:
|
|
||||||
raise ValueError('query_str cannot be empty')
|
|
||||||
|
|
||||||
if not engine_settings['query_str'].lower().startswith('select '):
|
|
||||||
raise ValueError('only SELECT query is supported')
|
|
||||||
|
|
||||||
_connection = mariadb.connect(database=database, user=username, password=password, host=host, port=port)
|
|
||||||
|
|
||||||
|
|
||||||
def search(query, params):
|
|
||||||
query_params = {'query': query}
|
|
||||||
query_to_run = query_str + ' LIMIT {0} OFFSET {1}'.format(limit, (params['pageno'] - 1) * limit)
|
|
||||||
logger.debug("SQL Query: %s", query_to_run)
|
|
||||||
|
|
||||||
with _connection.cursor() as cur:
|
|
||||||
cur.execute(query_to_run, query_params)
|
|
||||||
results = []
|
|
||||||
col_names = [i[0] for i in cur.description]
|
|
||||||
for res in cur:
|
|
||||||
result = dict(zip(col_names, map(str, res)))
|
|
||||||
result['template'] = result_template
|
|
||||||
results.append(result)
|
|
||||||
return results
|
|
|
@ -1,15 +1,12 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
"""Mojeek (general, images, news)"""
|
"""Mojeek (general, images, news)"""
|
||||||
|
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
from lxml import html
|
from lxml import html
|
||||||
|
|
||||||
from dateutil.relativedelta import relativedelta
|
from dateutil.relativedelta import relativedelta
|
||||||
from searx.utils import eval_xpath, eval_xpath_list, extract_text
|
from searx.utils import eval_xpath, eval_xpath_list, extract_text
|
||||||
from searx.enginelib.traits import EngineTraits
|
|
||||||
|
|
||||||
about = {
|
about = {
|
||||||
'website': 'https://mojeek.com',
|
'website': 'https://mojeek.com',
|
||||||
|
@ -45,18 +42,6 @@ news_url_xpath = './/h2/a/@href'
|
||||||
news_title_xpath = './/h2/a'
|
news_title_xpath = './/h2/a'
|
||||||
news_content_xpath = './/p[@class="s"]'
|
news_content_xpath = './/p[@class="s"]'
|
||||||
|
|
||||||
language_param = 'lb'
|
|
||||||
region_param = 'arc'
|
|
||||||
|
|
||||||
_delta_kwargs = {'day': 'days', 'week': 'weeks', 'month': 'months', 'year': 'years'}
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logger = logging.getLogger()
|
|
||||||
|
|
||||||
traits: EngineTraits
|
|
||||||
|
|
||||||
|
|
||||||
def init(_):
|
def init(_):
|
||||||
if search_type not in ('', 'images', 'news'):
|
if search_type not in ('', 'images', 'news'):
|
||||||
|
@ -68,16 +53,13 @@ def request(query, params):
|
||||||
'q': query,
|
'q': query,
|
||||||
'safe': min(params['safesearch'], 1),
|
'safe': min(params['safesearch'], 1),
|
||||||
'fmt': search_type,
|
'fmt': search_type,
|
||||||
language_param: traits.get_language(params['searxng_locale'], traits.custom['language_all']),
|
|
||||||
region_param: traits.get_region(params['searxng_locale'], traits.custom['region_all']),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if search_type == '':
|
if search_type == '':
|
||||||
args['s'] = 10 * (params['pageno'] - 1)
|
args['s'] = 10 * (params['pageno'] - 1)
|
||||||
|
|
||||||
if params['time_range'] and search_type != 'images':
|
if params['time_range'] and search_type != 'images':
|
||||||
kwargs = {_delta_kwargs[params['time_range']]: 1}
|
args["since"] = (datetime.now() - relativedelta(**{f"{params['time_range']}s": 1})).strftime("%Y%m%d")
|
||||||
args["since"] = (datetime.now() - relativedelta(**kwargs)).strftime("%Y%m%d") # type: ignore
|
|
||||||
logger.debug(args["since"])
|
logger.debug(args["since"])
|
||||||
|
|
||||||
params['url'] = f"{base_url}/search?{urlencode(args)}"
|
params['url'] = f"{base_url}/search?{urlencode(args)}"
|
||||||
|
@ -112,7 +94,7 @@ def _image_results(dom):
|
||||||
'template': 'images.html',
|
'template': 'images.html',
|
||||||
'url': extract_text(eval_xpath(result, image_url_xpath)),
|
'url': extract_text(eval_xpath(result, image_url_xpath)),
|
||||||
'title': extract_text(eval_xpath(result, image_title_xpath)),
|
'title': extract_text(eval_xpath(result, image_title_xpath)),
|
||||||
'img_src': base_url + extract_text(eval_xpath(result, image_img_src_xpath)), # type: ignore
|
'img_src': base_url + extract_text(eval_xpath(result, image_img_src_xpath)),
|
||||||
'content': '',
|
'content': '',
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -148,31 +130,3 @@ def response(resp):
|
||||||
return _news_results(dom)
|
return _news_results(dom)
|
||||||
|
|
||||||
raise ValueError(f"Invalid search type {search_type}")
|
raise ValueError(f"Invalid search type {search_type}")
|
||||||
|
|
||||||
|
|
||||||
def fetch_traits(engine_traits: EngineTraits):
|
|
||||||
# pylint: disable=import-outside-toplevel
|
|
||||||
from searx import network
|
|
||||||
from searx.locales import get_official_locales, region_tag
|
|
||||||
from babel import Locale, UnknownLocaleError
|
|
||||||
import contextlib
|
|
||||||
|
|
||||||
resp = network.get(base_url + "/preferences", headers={'Accept-Language': 'en-US,en;q=0.5'})
|
|
||||||
dom = html.fromstring(resp.text) # type: ignore
|
|
||||||
|
|
||||||
languages = eval_xpath_list(dom, f'//select[@name="{language_param}"]/option/@value')
|
|
||||||
|
|
||||||
engine_traits.custom['language_all'] = languages[0]
|
|
||||||
|
|
||||||
for code in languages[1:]:
|
|
||||||
with contextlib.suppress(UnknownLocaleError):
|
|
||||||
locale = Locale(code)
|
|
||||||
engine_traits.languages[locale.language] = code
|
|
||||||
|
|
||||||
regions = eval_xpath_list(dom, f'//select[@name="{region_param}"]/option/@value')
|
|
||||||
|
|
||||||
engine_traits.custom['region_all'] = regions[1]
|
|
||||||
|
|
||||||
for code in regions[2:]:
|
|
||||||
for locale in get_official_locales(code, engine_traits.languages):
|
|
||||||
engine_traits.regions[region_tag(locale)] = code
|
|
||||||
|
|
|
@ -34,25 +34,12 @@ except ImportError:
|
||||||
|
|
||||||
engine_type = 'offline'
|
engine_type = 'offline'
|
||||||
auth_plugin = 'caching_sha2_password'
|
auth_plugin = 'caching_sha2_password'
|
||||||
|
|
||||||
host = "127.0.0.1"
|
host = "127.0.0.1"
|
||||||
"""Hostname of the DB connector"""
|
|
||||||
|
|
||||||
port = 3306
|
port = 3306
|
||||||
"""Port of the DB connector"""
|
|
||||||
|
|
||||||
database = ""
|
database = ""
|
||||||
"""Name of the database."""
|
|
||||||
|
|
||||||
username = ""
|
username = ""
|
||||||
"""Username for the DB connection."""
|
|
||||||
|
|
||||||
password = ""
|
password = ""
|
||||||
"""Password for the DB connection."""
|
|
||||||
|
|
||||||
query_str = ""
|
query_str = ""
|
||||||
"""SQL query that returns the result items."""
|
|
||||||
|
|
||||||
limit = 10
|
limit = 10
|
||||||
paging = True
|
paging = True
|
||||||
result_template = 'key-value.html'
|
result_template = 'key-value.html'
|
||||||
|
|
|
@ -1,71 +0,0 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
"""Open library (books)
|
|
||||||
"""
|
|
||||||
from urllib.parse import urlencode
|
|
||||||
import re
|
|
||||||
|
|
||||||
from dateutil import parser
|
|
||||||
|
|
||||||
about = {
|
|
||||||
'website': 'https://openlibrary.org',
|
|
||||||
'wikidata_id': 'Q1201876',
|
|
||||||
'require_api_key': False,
|
|
||||||
'use_official_api': False,
|
|
||||||
'official_api_documentation': 'https://openlibrary.org/developers/api',
|
|
||||||
}
|
|
||||||
|
|
||||||
paging = True
|
|
||||||
categories = []
|
|
||||||
|
|
||||||
base_url = "https://openlibrary.org"
|
|
||||||
results_per_page = 10
|
|
||||||
|
|
||||||
|
|
||||||
def request(query, params):
|
|
||||||
args = {
|
|
||||||
'q': query,
|
|
||||||
'page': params['pageno'],
|
|
||||||
'limit': results_per_page,
|
|
||||||
}
|
|
||||||
params['url'] = f"{base_url}/search.json?{urlencode(args)}"
|
|
||||||
return params
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_date(date):
|
|
||||||
try:
|
|
||||||
return parser.parse(date)
|
|
||||||
except parser.ParserError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def response(resp):
|
|
||||||
results = []
|
|
||||||
|
|
||||||
for item in resp.json().get("docs", []):
|
|
||||||
cover = None
|
|
||||||
if 'lending_identifier_s' in item:
|
|
||||||
cover = f"https://archive.org/services/img/{item['lending_identifier_s']}"
|
|
||||||
|
|
||||||
published = item.get('publish_date')
|
|
||||||
if published:
|
|
||||||
published_dates = [date for date in map(_parse_date, published) if date]
|
|
||||||
if published_dates:
|
|
||||||
published = min(published_dates)
|
|
||||||
|
|
||||||
if not published:
|
|
||||||
published = parser.parse(str(item.get('first_published_year')))
|
|
||||||
|
|
||||||
result = {
|
|
||||||
'template': 'paper.html',
|
|
||||||
'url': f"{base_url}{item['key']}",
|
|
||||||
'title': item['title'],
|
|
||||||
'content': re.sub(r"\{|\}", "", item['first_sentence'][0]) if item.get('first_sentence') else '',
|
|
||||||
'isbn': item.get('isbn', [])[:5],
|
|
||||||
'authors': item.get('author_name', []),
|
|
||||||
'thumbnail': cover,
|
|
||||||
'publishedDate': published,
|
|
||||||
'tags': item.get('subject', [])[:10] + item.get('place', [])[:10],
|
|
||||||
}
|
|
||||||
results.append(result)
|
|
||||||
|
|
||||||
return results
|
|
|
@ -29,25 +29,12 @@ except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
engine_type = 'offline'
|
engine_type = 'offline'
|
||||||
|
|
||||||
host = "127.0.0.1"
|
host = "127.0.0.1"
|
||||||
"""Hostname of the DB connector"""
|
|
||||||
|
|
||||||
port = "5432"
|
port = "5432"
|
||||||
"""Port of the DB connector"""
|
|
||||||
|
|
||||||
database = ""
|
database = ""
|
||||||
"""Name of the database."""
|
|
||||||
|
|
||||||
username = ""
|
username = ""
|
||||||
"""Username for the DB connection."""
|
|
||||||
|
|
||||||
password = ""
|
password = ""
|
||||||
"""Password for the DB connection."""
|
|
||||||
|
|
||||||
query_str = ""
|
query_str = ""
|
||||||
"""SQL query that returns the result items."""
|
|
||||||
|
|
||||||
limit = 10
|
limit = 10
|
||||||
paging = True
|
paging = True
|
||||||
result_template = 'key-value.html'
|
result_template = 'key-value.html'
|
||||||
|
|
|
@ -49,11 +49,7 @@ from flask_babel import gettext
|
||||||
import babel
|
import babel
|
||||||
import lxml
|
import lxml
|
||||||
|
|
||||||
from searx.exceptions import (
|
from searx.exceptions import SearxEngineAPIException, SearxEngineTooManyRequestsException
|
||||||
SearxEngineAPIException,
|
|
||||||
SearxEngineTooManyRequestsException,
|
|
||||||
SearxEngineCaptchaException,
|
|
||||||
)
|
|
||||||
from searx.network import raise_for_httperror
|
from searx.network import raise_for_httperror
|
||||||
from searx.enginelib.traits import EngineTraits
|
from searx.enginelib.traits import EngineTraits
|
||||||
|
|
||||||
|
@ -61,7 +57,6 @@ from searx.utils import (
|
||||||
eval_xpath,
|
eval_xpath,
|
||||||
eval_xpath_list,
|
eval_xpath_list,
|
||||||
extract_text,
|
extract_text,
|
||||||
get_embeded_stream_url,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
traits: EngineTraits
|
traits: EngineTraits
|
||||||
|
@ -192,8 +187,6 @@ def parse_web_api(resp):
|
||||||
error_code = data.get('error_code')
|
error_code = data.get('error_code')
|
||||||
if error_code == 24:
|
if error_code == 24:
|
||||||
raise SearxEngineTooManyRequestsException()
|
raise SearxEngineTooManyRequestsException()
|
||||||
if search_results.get("data", {}).get("error_data", {}).get("captchaUrl") is not None:
|
|
||||||
raise SearxEngineCaptchaException()
|
|
||||||
msg = ",".join(data.get('message', ['unknown']))
|
msg = ",".join(data.get('message', ['unknown']))
|
||||||
raise SearxEngineAPIException(f"{msg} ({error_code})")
|
raise SearxEngineAPIException(f"{msg} ({error_code})")
|
||||||
|
|
||||||
|
@ -304,7 +297,6 @@ def parse_web_api(resp):
|
||||||
'title': title,
|
'title': title,
|
||||||
'url': res_url,
|
'url': res_url,
|
||||||
'content': content,
|
'content': content,
|
||||||
'iframe_src': get_embeded_stream_url(res_url),
|
|
||||||
'publishedDate': pub_date,
|
'publishedDate': pub_date,
|
||||||
'thumbnail': thumbnail,
|
'thumbnail': thumbnail,
|
||||||
'template': 'videos.html',
|
'template': 'videos.html',
|
||||||
|
|
|
@ -165,12 +165,10 @@ def fetch_traits(engine_traits: EngineTraits):
|
||||||
|
|
||||||
countrycodes = set()
|
countrycodes = set()
|
||||||
for region in country_list:
|
for region in country_list:
|
||||||
# country_list contains duplicates that differ only in upper/lower case
|
if region['iso_3166_1'] not in babel_reg_list:
|
||||||
_reg = region['iso_3166_1'].upper()
|
|
||||||
if _reg not in babel_reg_list:
|
|
||||||
print(f"ERROR: region tag {region['iso_3166_1']} is unknown by babel")
|
print(f"ERROR: region tag {region['iso_3166_1']} is unknown by babel")
|
||||||
continue
|
continue
|
||||||
countrycodes.add(_reg)
|
countrycodes.add(region['iso_3166_1'])
|
||||||
|
|
||||||
countrycodes = list(countrycodes)
|
countrycodes = list(countrycodes)
|
||||||
countrycodes.sort()
|
countrycodes.sort()
|
||||||
|
|
|
@ -41,13 +41,8 @@ import sqlite3
|
||||||
import contextlib
|
import contextlib
|
||||||
|
|
||||||
engine_type = 'offline'
|
engine_type = 'offline'
|
||||||
|
|
||||||
database = ""
|
database = ""
|
||||||
"""Filename of the SQLite DB."""
|
|
||||||
|
|
||||||
query_str = ""
|
query_str = ""
|
||||||
"""SQL query that returns the result items."""
|
|
||||||
|
|
||||||
limit = 10
|
limit = 10
|
||||||
paging = True
|
paging = True
|
||||||
result_template = 'key-value.html'
|
result_template = 'key-value.html'
|
||||||
|
|
|
@ -7,7 +7,6 @@ ends.
|
||||||
|
|
||||||
from json import dumps
|
from json import dumps
|
||||||
from searx.utils import searx_useragent
|
from searx.utils import searx_useragent
|
||||||
from searx.enginelib.traits import EngineTraits
|
|
||||||
|
|
||||||
about = {
|
about = {
|
||||||
"website": "https://stract.com/",
|
"website": "https://stract.com/",
|
||||||
|
@ -19,10 +18,7 @@ about = {
|
||||||
categories = ['general']
|
categories = ['general']
|
||||||
paging = True
|
paging = True
|
||||||
|
|
||||||
base_url = "https://stract.com/beta/api"
|
search_url = "https://stract.com/beta/api/search"
|
||||||
search_url = base_url + "/search"
|
|
||||||
|
|
||||||
traits: EngineTraits
|
|
||||||
|
|
||||||
|
|
||||||
def request(query, params):
|
def request(query, params):
|
||||||
|
@ -33,14 +29,7 @@ def request(query, params):
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
'User-Agent': searx_useragent(),
|
'User-Agent': searx_useragent(),
|
||||||
}
|
}
|
||||||
region = traits.get_region(params["searxng_locale"], default=traits.all_locale)
|
params['data'] = dumps({'query': query, 'page': params['pageno'] - 1})
|
||||||
params['data'] = dumps(
|
|
||||||
{
|
|
||||||
'query': query,
|
|
||||||
'page': params['pageno'] - 1,
|
|
||||||
'selectedRegion': region,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return params
|
return params
|
||||||
|
|
||||||
|
@ -58,24 +47,3 @@ def response(resp):
|
||||||
)
|
)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def fetch_traits(engine_traits: EngineTraits):
|
|
||||||
# pylint: disable=import-outside-toplevel
|
|
||||||
from searx import network
|
|
||||||
from babel import Locale, languages
|
|
||||||
from searx.locales import region_tag
|
|
||||||
|
|
||||||
territories = Locale("en").territories
|
|
||||||
|
|
||||||
json = network.get(base_url + "/docs/openapi.json").json()
|
|
||||||
regions = json['components']['schemas']['Region']['enum']
|
|
||||||
|
|
||||||
engine_traits.all_locale = regions[0]
|
|
||||||
|
|
||||||
for region in regions[1:]:
|
|
||||||
for code, name in territories.items():
|
|
||||||
if region not in (code, name):
|
|
||||||
continue
|
|
||||||
for lang in languages.get_official_languages(code, de_facto=True):
|
|
||||||
engine_traits.regions[region_tag(Locale(lang, code))] = region
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
"""Exception types raised by SearXNG modules.
|
"""Exception types raised by SearXNG modules.
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import Optional, Union
|
from typing import Optional, Union
|
||||||
|
|
||||||
|
@ -62,7 +61,7 @@ class SearxEngineAccessDeniedException(SearxEngineResponseException):
|
||||||
"""This settings contains the default suspended time (default 86400 sec / 1
|
"""This settings contains the default suspended time (default 86400 sec / 1
|
||||||
day)."""
|
day)."""
|
||||||
|
|
||||||
def __init__(self, suspended_time: int | None = None, message: str = 'Access denied'):
|
def __init__(self, suspended_time: int = None, message: str = 'Access denied'):
|
||||||
"""Generic exception to raise when an engine denies access to the results.
|
"""Generic exception to raise when an engine denies access to the results.
|
||||||
|
|
||||||
:param suspended_time: How long the engine is going to be suspended in
|
:param suspended_time: How long the engine is going to be suspended in
|
||||||
|
@ -71,13 +70,12 @@ class SearxEngineAccessDeniedException(SearxEngineResponseException):
|
||||||
:param message: Internal message. Defaults to ``Access denied``
|
:param message: Internal message. Defaults to ``Access denied``
|
||||||
:type message: str
|
:type message: str
|
||||||
"""
|
"""
|
||||||
if suspended_time is None:
|
suspended_time = suspended_time or self._get_default_suspended_time()
|
||||||
suspended_time = self._get_default_suspended_time()
|
|
||||||
super().__init__(message + ', suspended_time=' + str(suspended_time))
|
super().__init__(message + ', suspended_time=' + str(suspended_time))
|
||||||
self.suspended_time = suspended_time
|
self.suspended_time = suspended_time
|
||||||
self.message = message
|
self.message = message
|
||||||
|
|
||||||
def _get_default_suspended_time(self) -> int:
|
def _get_default_suspended_time(self):
|
||||||
from searx import get_setting # pylint: disable=C0415
|
from searx import get_setting # pylint: disable=C0415
|
||||||
|
|
||||||
return get_setting(self.SUSPEND_TIME_SETTING)
|
return get_setting(self.SUSPEND_TIME_SETTING)
|
||||||
|
@ -90,7 +88,7 @@ class SearxEngineCaptchaException(SearxEngineAccessDeniedException):
|
||||||
"""This settings contains the default suspended time (default 86400 sec / 1
|
"""This settings contains the default suspended time (default 86400 sec / 1
|
||||||
day)."""
|
day)."""
|
||||||
|
|
||||||
def __init__(self, suspended_time: int | None = None, message='CAPTCHA'):
|
def __init__(self, suspended_time=None, message='CAPTCHA'):
|
||||||
super().__init__(message=message, suspended_time=suspended_time)
|
super().__init__(message=message, suspended_time=suspended_time)
|
||||||
|
|
||||||
|
|
||||||
|
@ -104,7 +102,7 @@ class SearxEngineTooManyRequestsException(SearxEngineAccessDeniedException):
|
||||||
"""This settings contains the default suspended time (default 3660 sec / 1
|
"""This settings contains the default suspended time (default 3660 sec / 1
|
||||||
hour)."""
|
hour)."""
|
||||||
|
|
||||||
def __init__(self, suspended_time: int | None = None, message='Too many request'):
|
def __init__(self, suspended_time=None, message='Too many request'):
|
||||||
super().__init__(message=message, suspended_time=suspended_time)
|
super().__init__(message=message, suspended_time=suspended_time)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,38 +0,0 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
"""Implementations for providing the favicons in SearXNG"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
__all__ = ["init", "favicon_url", "favicon_proxy"]
|
|
||||||
|
|
||||||
import pathlib
|
|
||||||
from searx import logger
|
|
||||||
from searx import get_setting
|
|
||||||
from .proxy import favicon_url, favicon_proxy
|
|
||||||
|
|
||||||
logger = logger.getChild('favicons')
|
|
||||||
|
|
||||||
|
|
||||||
def is_active():
|
|
||||||
return bool(get_setting("search.favicon_resolver", False))
|
|
||||||
|
|
||||||
|
|
||||||
def init():
|
|
||||||
|
|
||||||
# pylint: disable=import-outside-toplevel
|
|
||||||
|
|
||||||
from . import config, cache, proxy
|
|
||||||
from .. import settings_loader
|
|
||||||
|
|
||||||
cfg_file = (settings_loader.get_user_cfg_folder() or pathlib.Path("/etc/searxng")) / "favicons.toml"
|
|
||||||
if not cfg_file.exists():
|
|
||||||
if is_active():
|
|
||||||
logger.error(f"missing favicon config: {cfg_file}")
|
|
||||||
cfg_file = config.DEFAULT_CFG_TOML_PATH
|
|
||||||
|
|
||||||
logger.debug(f"load favicon config: {cfg_file}")
|
|
||||||
cfg = config.FaviconConfig.from_toml_file(cfg_file, use_cache=True)
|
|
||||||
cache.init(cfg.cache)
|
|
||||||
proxy.init(cfg.proxy)
|
|
||||||
|
|
||||||
del cache, config, proxy, cfg, settings_loader
|
|
|
@ -1,12 +0,0 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
"""Command line implementation"""
|
|
||||||
|
|
||||||
import typer
|
|
||||||
|
|
||||||
from . import cache
|
|
||||||
from . import init
|
|
||||||
|
|
||||||
init()
|
|
||||||
app = typer.Typer()
|
|
||||||
app.add_typer(cache.app, name="cache", help="commands related to the cache")
|
|
||||||
app()
|
|
|
@ -1,476 +0,0 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
"""Implementations for caching favicons.
|
|
||||||
|
|
||||||
:py:obj:`FaviconCacheConfig`:
|
|
||||||
Configuration of the favicon cache
|
|
||||||
|
|
||||||
:py:obj:`FaviconCache`:
|
|
||||||
Abstract base class for the implementation of a favicon cache.
|
|
||||||
|
|
||||||
:py:obj:`FaviconCacheSQLite`:
|
|
||||||
Favicon cache that manages the favicon BLOBs in a SQLite DB.
|
|
||||||
|
|
||||||
:py:obj:`FaviconCacheNull`:
|
|
||||||
Fallback solution if the configured cache cannot be used for system reasons.
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
from typing import Literal
|
|
||||||
|
|
||||||
import os
|
|
||||||
import abc
|
|
||||||
import dataclasses
|
|
||||||
import hashlib
|
|
||||||
import logging
|
|
||||||
import sqlite3
|
|
||||||
import tempfile
|
|
||||||
import time
|
|
||||||
import typer
|
|
||||||
|
|
||||||
import msgspec
|
|
||||||
|
|
||||||
from searx import sqlitedb
|
|
||||||
from searx import logger
|
|
||||||
from searx.utils import humanize_bytes, humanize_number
|
|
||||||
|
|
||||||
CACHE: "FaviconCache"
|
|
||||||
FALLBACK_ICON = b"FALLBACK_ICON"
|
|
||||||
|
|
||||||
logger = logger.getChild('favicons.cache')
|
|
||||||
app = typer.Typer()
|
|
||||||
|
|
||||||
|
|
||||||
@app.command()
|
|
||||||
def state():
|
|
||||||
"""show state of the cache"""
|
|
||||||
print(CACHE.state().report())
|
|
||||||
|
|
||||||
|
|
||||||
@app.command()
|
|
||||||
def maintenance(force: bool = True, debug: bool = False):
|
|
||||||
"""perform maintenance of the cache"""
|
|
||||||
root_log = logging.getLogger()
|
|
||||||
if debug:
|
|
||||||
root_log.setLevel(logging.DEBUG)
|
|
||||||
else:
|
|
||||||
root_log.handlers = []
|
|
||||||
handler = logging.StreamHandler()
|
|
||||||
handler.setFormatter(logging.Formatter("%(message)s"))
|
|
||||||
logger.addHandler(handler)
|
|
||||||
logger.setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
state_t0 = CACHE.state()
|
|
||||||
CACHE.maintenance(force=force)
|
|
||||||
state_t1 = CACHE.state()
|
|
||||||
state_delta = state_t0 - state_t1
|
|
||||||
print("The cache has been reduced by:")
|
|
||||||
print(state_delta.report("\n- {descr}: {val}").lstrip("\n"))
|
|
||||||
|
|
||||||
|
|
||||||
def init(cfg: "FaviconCacheConfig"):
|
|
||||||
"""Initialization of a global ``CACHE``"""
|
|
||||||
|
|
||||||
global CACHE # pylint: disable=global-statement
|
|
||||||
if cfg.db_type == "sqlite":
|
|
||||||
if sqlite3.sqlite_version_info <= (3, 35):
|
|
||||||
logger.critical(
|
|
||||||
"Disable favicon caching completely: SQLite library (%s) is too old! (require >= 3.35)",
|
|
||||||
sqlite3.sqlite_version,
|
|
||||||
)
|
|
||||||
CACHE = FaviconCacheNull(cfg)
|
|
||||||
else:
|
|
||||||
CACHE = FaviconCacheSQLite(cfg)
|
|
||||||
elif cfg.db_type == "mem":
|
|
||||||
logger.error("Favicons are cached in memory, don't use this in production!")
|
|
||||||
CACHE = FaviconCacheMEM(cfg)
|
|
||||||
else:
|
|
||||||
raise NotImplementedError(f"favicons db_type '{cfg.db_type}' is unknown")
|
|
||||||
|
|
||||||
|
|
||||||
class FaviconCacheConfig(msgspec.Struct): # pylint: disable=too-few-public-methods
|
|
||||||
"""Configuration of the favicon cache."""
|
|
||||||
|
|
||||||
db_type: Literal["sqlite", "mem"] = "sqlite"
|
|
||||||
"""Type of the database:
|
|
||||||
|
|
||||||
``sqlite``:
|
|
||||||
:py:obj:`.cache.FaviconCacheSQLite`
|
|
||||||
|
|
||||||
``mem``:
|
|
||||||
:py:obj:`.cache.FaviconCacheMEM` (not recommended)
|
|
||||||
"""
|
|
||||||
|
|
||||||
db_url: str = tempfile.gettempdir() + os.sep + "faviconcache.db"
|
|
||||||
"""URL of the SQLite DB, the path to the database file."""
|
|
||||||
|
|
||||||
HOLD_TIME: int = 60 * 60 * 24 * 30 # 30 days
|
|
||||||
"""Hold time (default in sec.), after which a BLOB is removed from the cache."""
|
|
||||||
|
|
||||||
LIMIT_TOTAL_BYTES: int = 1024 * 1024 * 50 # 50 MB
|
|
||||||
"""Maximum of bytes (default) stored in the cache of all blobs. Note: The
|
|
||||||
limit is only reached at each maintenance interval after which the oldest
|
|
||||||
BLOBs are deleted; the limit is exceeded during the maintenance period. If
|
|
||||||
the maintenance period is *too long* or maintenance is switched off
|
|
||||||
completely, the cache grows uncontrollably."""
|
|
||||||
|
|
||||||
BLOB_MAX_BYTES: int = 1024 * 20 # 20 KB
|
|
||||||
"""The maximum BLOB size in bytes that a favicon may have so that it can be
|
|
||||||
saved in the cache. If the favicon is larger, it is not saved in the cache
|
|
||||||
and must be requested by the client via the proxy."""
|
|
||||||
|
|
||||||
MAINTENANCE_PERIOD: int = 60 * 60
|
|
||||||
"""Maintenance period in seconds / when :py:obj:`MAINTENANCE_MODE` is set to
|
|
||||||
``auto``."""
|
|
||||||
|
|
||||||
MAINTENANCE_MODE: Literal["auto", "off"] = "auto"
|
|
||||||
"""Type of maintenance mode
|
|
||||||
|
|
||||||
``auto``:
|
|
||||||
Maintenance is carried out automatically as part of the maintenance
|
|
||||||
intervals (:py:obj:`MAINTENANCE_PERIOD`); no external process is required.
|
|
||||||
|
|
||||||
``off``:
|
|
||||||
Maintenance is switched off and must be carried out by an external process
|
|
||||||
if required.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass
|
|
||||||
class FaviconCacheStats:
|
|
||||||
"""Dataclass wich provides information on the status of the cache."""
|
|
||||||
|
|
||||||
favicons: int | None = None
|
|
||||||
bytes: int | None = None
|
|
||||||
domains: int | None = None
|
|
||||||
resolvers: int | None = None
|
|
||||||
|
|
||||||
field_descr = (
|
|
||||||
("favicons", "number of favicons in cache", humanize_number),
|
|
||||||
("bytes", "total size (approx. bytes) of cache", humanize_bytes),
|
|
||||||
("domains", "total number of domains in cache", humanize_number),
|
|
||||||
("resolvers", "number of resolvers", str),
|
|
||||||
)
|
|
||||||
|
|
||||||
def __sub__(self, other) -> FaviconCacheStats:
|
|
||||||
if not isinstance(other, self.__class__):
|
|
||||||
raise TypeError(f"unsupported operand type(s) for +: '{self.__class__}' and '{type(other)}'")
|
|
||||||
kwargs = {}
|
|
||||||
for field, _, _ in self.field_descr:
|
|
||||||
self_val, other_val = getattr(self, field), getattr(other, field)
|
|
||||||
if None in (self_val, other_val):
|
|
||||||
continue
|
|
||||||
if isinstance(self_val, int):
|
|
||||||
kwargs[field] = self_val - other_val
|
|
||||||
else:
|
|
||||||
kwargs[field] = self_val
|
|
||||||
return self.__class__(**kwargs)
|
|
||||||
|
|
||||||
def report(self, fmt: str = "{descr}: {val}\n"):
|
|
||||||
s = []
|
|
||||||
for field, descr, cast in self.field_descr:
|
|
||||||
val = getattr(self, field)
|
|
||||||
if val is None:
|
|
||||||
val = "--"
|
|
||||||
else:
|
|
||||||
val = cast(val)
|
|
||||||
s.append(fmt.format(descr=descr, val=val))
|
|
||||||
return "".join(s)
|
|
||||||
|
|
||||||
|
|
||||||
class FaviconCache(abc.ABC):
|
|
||||||
"""Abstract base class for the implementation of a favicon cache."""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def __init__(self, cfg: FaviconCacheConfig):
|
|
||||||
"""An instance of the favicon cache is build up from the configuration."""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def __call__(self, resolver: str, authority: str) -> None | tuple[None | bytes, None | str]:
|
|
||||||
"""Returns ``None`` or the tuple of ``(data, mime)`` that has been
|
|
||||||
registered in the cache. The ``None`` indicates that there was no entry
|
|
||||||
in the cache."""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def set(self, resolver: str, authority: str, mime: str | None, data: bytes | None) -> bool:
|
|
||||||
"""Set data and mime-type in the cache. If data is None, the
|
|
||||||
:py:obj:`FALLBACK_ICON` is registered. in the cache."""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def state(self) -> FaviconCacheStats:
|
|
||||||
"""Returns a :py:obj:`FaviconCacheStats` (key/values) with information
|
|
||||||
on the state of the cache."""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def maintenance(self, force=False):
|
|
||||||
"""Performs maintenance on the cache"""
|
|
||||||
|
|
||||||
|
|
||||||
class FaviconCacheNull(FaviconCache):
|
|
||||||
"""A dummy favicon cache that caches nothing / a fallback solution. The
|
|
||||||
NullCache is used when more efficient caches such as the
|
|
||||||
:py:obj:`FaviconCacheSQLite` cannot be used because, for example, the SQLite
|
|
||||||
library is only available in an old version and does not meet the
|
|
||||||
requirements."""
|
|
||||||
|
|
||||||
def __init__(self, cfg: FaviconCacheConfig):
|
|
||||||
return None
|
|
||||||
|
|
||||||
def __call__(self, resolver: str, authority: str) -> None | tuple[None | bytes, None | str]:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def set(self, resolver: str, authority: str, mime: str | None, data: bytes | None) -> bool:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def state(self):
|
|
||||||
return FaviconCacheStats(favicons=0)
|
|
||||||
|
|
||||||
def maintenance(self, force=False):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class FaviconCacheSQLite(sqlitedb.SQLiteAppl, FaviconCache):
|
|
||||||
"""Favicon cache that manages the favicon BLOBs in a SQLite DB. The DB
|
|
||||||
model in the SQLite DB is implemented using the abstract class
|
|
||||||
:py:obj:`sqlitedb.SQLiteAppl`.
|
|
||||||
|
|
||||||
The following configurations are required / supported:
|
|
||||||
|
|
||||||
- :py:obj:`FaviconCacheConfig.db_url`
|
|
||||||
- :py:obj:`FaviconCacheConfig.HOLD_TIME`
|
|
||||||
- :py:obj:`FaviconCacheConfig.LIMIT_TOTAL_BYTES`
|
|
||||||
- :py:obj:`FaviconCacheConfig.BLOB_MAX_BYTES`
|
|
||||||
- :py:obj:`MAINTENANCE_PERIOD`
|
|
||||||
- :py:obj:`MAINTENANCE_MODE`
|
|
||||||
"""
|
|
||||||
|
|
||||||
DB_SCHEMA = 1
|
|
||||||
|
|
||||||
DDL_BLOBS = """\
|
|
||||||
CREATE TABLE IF NOT EXISTS blobs (
|
|
||||||
sha256 TEXT,
|
|
||||||
bytes_c INTEGER,
|
|
||||||
mime TEXT NOT NULL,
|
|
||||||
data BLOB NOT NULL,
|
|
||||||
PRIMARY KEY (sha256))"""
|
|
||||||
|
|
||||||
"""Table to store BLOB objects by their sha256 hash values."""
|
|
||||||
|
|
||||||
DDL_BLOB_MAP = """\
|
|
||||||
CREATE TABLE IF NOT EXISTS blob_map (
|
|
||||||
m_time INTEGER DEFAULT (strftime('%s', 'now')), -- last modified (unix epoch) time in sec.
|
|
||||||
sha256 TEXT,
|
|
||||||
resolver TEXT,
|
|
||||||
authority TEXT,
|
|
||||||
PRIMARY KEY (resolver, authority))"""
|
|
||||||
|
|
||||||
"""Table to map from (resolver, authority) to sha256 hash values."""
|
|
||||||
|
|
||||||
DDL_CREATE_TABLES = {
|
|
||||||
"blobs": DDL_BLOBS,
|
|
||||||
"blob_map": DDL_BLOB_MAP,
|
|
||||||
}
|
|
||||||
|
|
||||||
SQL_DROP_LEFTOVER_BLOBS = (
|
|
||||||
"DELETE FROM blobs WHERE sha256 IN ("
|
|
||||||
" SELECT b.sha256"
|
|
||||||
" FROM blobs b"
|
|
||||||
" LEFT JOIN blob_map bm"
|
|
||||||
" ON b.sha256 = bm.sha256"
|
|
||||||
" WHERE bm.sha256 IS NULL)"
|
|
||||||
)
|
|
||||||
"""Delete blobs.sha256 (BLOBs) no longer in blob_map.sha256."""
|
|
||||||
|
|
||||||
SQL_ITER_BLOBS_SHA256_BYTES_C = (
|
|
||||||
"SELECT b.sha256, b.bytes_c FROM blobs b"
|
|
||||||
" JOIN blob_map bm "
|
|
||||||
" ON b.sha256 = bm.sha256"
|
|
||||||
" ORDER BY bm.m_time ASC"
|
|
||||||
)
|
|
||||||
|
|
||||||
SQL_INSERT_BLOBS = (
|
|
||||||
"INSERT INTO blobs (sha256, bytes_c, mime, data) VALUES (?, ?, ?, ?)"
|
|
||||||
" ON CONFLICT (sha256) DO NOTHING"
|
|
||||||
) # fmt: skip
|
|
||||||
|
|
||||||
SQL_INSERT_BLOB_MAP = (
|
|
||||||
"INSERT INTO blob_map (sha256, resolver, authority) VALUES (?, ?, ?)"
|
|
||||||
" ON CONFLICT DO UPDATE "
|
|
||||||
" SET sha256=excluded.sha256, m_time=strftime('%s', 'now')"
|
|
||||||
)
|
|
||||||
|
|
||||||
def __init__(self, cfg: FaviconCacheConfig):
|
|
||||||
"""An instance of the favicon cache is build up from the configuration.""" #
|
|
||||||
|
|
||||||
if cfg.db_url == ":memory:":
|
|
||||||
logger.critical("don't use SQLite DB in :memory: in production!!")
|
|
||||||
super().__init__(cfg.db_url)
|
|
||||||
self.cfg = cfg
|
|
||||||
|
|
||||||
def __call__(self, resolver: str, authority: str) -> None | tuple[None | bytes, None | str]:
|
|
||||||
|
|
||||||
sql = "SELECT sha256 FROM blob_map WHERE resolver = ? AND authority = ?"
|
|
||||||
res = self.DB.execute(sql, (resolver, authority)).fetchone()
|
|
||||||
if res is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
data, mime = (None, None)
|
|
||||||
sha256 = res[0]
|
|
||||||
if sha256 == FALLBACK_ICON:
|
|
||||||
return data, mime
|
|
||||||
|
|
||||||
sql = "SELECT data, mime FROM blobs WHERE sha256 = ?"
|
|
||||||
res = self.DB.execute(sql, (sha256,)).fetchone()
|
|
||||||
if res is not None:
|
|
||||||
data, mime = res
|
|
||||||
return data, mime
|
|
||||||
|
|
||||||
def set(self, resolver: str, authority: str, mime: str | None, data: bytes | None) -> bool:
|
|
||||||
|
|
||||||
if self.cfg.MAINTENANCE_MODE == "auto" and int(time.time()) > self.next_maintenance_time:
|
|
||||||
# Should automatic maintenance be moved to a new thread?
|
|
||||||
self.maintenance()
|
|
||||||
|
|
||||||
if data is not None and mime is None:
|
|
||||||
logger.error(
|
|
||||||
"favicon resolver %s tries to cache mime-type None for authority %s",
|
|
||||||
resolver,
|
|
||||||
authority,
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
bytes_c = len(data or b"")
|
|
||||||
if bytes_c > self.cfg.BLOB_MAX_BYTES:
|
|
||||||
logger.info(
|
|
||||||
"favicon of resolver: %s / authority: %s to big to cache (bytes: %s) " % (resolver, authority, bytes_c)
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
if data is None:
|
|
||||||
sha256 = FALLBACK_ICON
|
|
||||||
else:
|
|
||||||
sha256 = hashlib.sha256(data).hexdigest()
|
|
||||||
|
|
||||||
with self.connect() as conn:
|
|
||||||
if sha256 != FALLBACK_ICON:
|
|
||||||
conn.execute(self.SQL_INSERT_BLOBS, (sha256, bytes_c, mime, data))
|
|
||||||
conn.execute(self.SQL_INSERT_BLOB_MAP, (sha256, resolver, authority))
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
@property
|
|
||||||
def next_maintenance_time(self) -> int:
|
|
||||||
"""Returns (unix epoch) time of the next maintenance."""
|
|
||||||
|
|
||||||
return self.cfg.MAINTENANCE_PERIOD + self.properties.m_time("LAST_MAINTENANCE")
|
|
||||||
|
|
||||||
def maintenance(self, force=False):
|
|
||||||
|
|
||||||
# Prevent parallel DB maintenance cycles from other DB connections
|
|
||||||
# (e.g. in multi thread or process environments).
|
|
||||||
|
|
||||||
if not force and int(time.time()) < self.next_maintenance_time:
|
|
||||||
logger.debug("no maintenance required yet, next maintenance interval is in the future")
|
|
||||||
return
|
|
||||||
self.properties.set("LAST_MAINTENANCE", "") # hint: this (also) sets the m_time of the property!
|
|
||||||
|
|
||||||
# do maintenance tasks
|
|
||||||
|
|
||||||
with self.connect() as conn:
|
|
||||||
|
|
||||||
# drop items not in HOLD time
|
|
||||||
res = conn.execute(
|
|
||||||
f"DELETE FROM blob_map"
|
|
||||||
f" WHERE cast(m_time as integer) < cast(strftime('%s', 'now') as integer) - {self.cfg.HOLD_TIME}"
|
|
||||||
)
|
|
||||||
logger.debug("dropped %s obsolete blob_map items from db", res.rowcount)
|
|
||||||
res = conn.execute(self.SQL_DROP_LEFTOVER_BLOBS)
|
|
||||||
logger.debug("dropped %s obsolete BLOBS from db", res.rowcount)
|
|
||||||
|
|
||||||
# drop old items to be in LIMIT_TOTAL_BYTES
|
|
||||||
total_bytes = conn.execute("SELECT SUM(bytes_c) FROM blobs").fetchone()[0] or 0
|
|
||||||
if total_bytes > self.cfg.LIMIT_TOTAL_BYTES:
|
|
||||||
|
|
||||||
x = total_bytes - self.cfg.LIMIT_TOTAL_BYTES
|
|
||||||
c = 0
|
|
||||||
sha_list = []
|
|
||||||
for row in conn.execute(self.SQL_ITER_BLOBS_SHA256_BYTES_C):
|
|
||||||
sha256, bytes_c = row
|
|
||||||
sha_list.append(sha256)
|
|
||||||
c += bytes_c
|
|
||||||
if c > x:
|
|
||||||
break
|
|
||||||
if sha_list:
|
|
||||||
conn.execute("DELETE FROM blobs WHERE sha256 IN ('%s')" % "','".join(sha_list))
|
|
||||||
conn.execute("DELETE FROM blob_map WHERE sha256 IN ('%s')" % "','".join(sha_list))
|
|
||||||
logger.debug("dropped %s blobs with total size of %s bytes", len(sha_list), c)
|
|
||||||
|
|
||||||
def _query_val(self, sql, default=None):
|
|
||||||
val = self.DB.execute(sql).fetchone()
|
|
||||||
if val is not None:
|
|
||||||
val = val[0]
|
|
||||||
if val is None:
|
|
||||||
val = default
|
|
||||||
return val
|
|
||||||
|
|
||||||
def state(self) -> FaviconCacheStats:
|
|
||||||
return FaviconCacheStats(
|
|
||||||
favicons=self._query_val("SELECT count(*) FROM blobs", 0),
|
|
||||||
bytes=self._query_val("SELECT SUM(bytes_c) FROM blobs", 0),
|
|
||||||
domains=self._query_val("SELECT count(*) FROM (SELECT authority FROM blob_map GROUP BY authority)", 0),
|
|
||||||
resolvers=self._query_val("SELECT count(*) FROM (SELECT resolver FROM blob_map GROUP BY resolver)", 0),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class FaviconCacheMEM(FaviconCache):
|
|
||||||
"""Favicon cache in process' memory. Its just a POC that stores the
|
|
||||||
favicons in the memory of the process.
|
|
||||||
|
|
||||||
.. attention::
|
|
||||||
|
|
||||||
Don't use it in production, it will blow up your memory!!
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, cfg):
|
|
||||||
|
|
||||||
self.cfg = cfg
|
|
||||||
self._data = {}
|
|
||||||
self._sha_mime = {}
|
|
||||||
|
|
||||||
def __call__(self, resolver: str, authority: str) -> None | tuple[bytes | None, str | None]:
|
|
||||||
|
|
||||||
sha, mime = self._sha_mime.get(f"{resolver}:{authority}", (None, None))
|
|
||||||
if sha is None:
|
|
||||||
return None
|
|
||||||
data = self._data.get(sha)
|
|
||||||
if data == FALLBACK_ICON:
|
|
||||||
data = None
|
|
||||||
return data, mime
|
|
||||||
|
|
||||||
def set(self, resolver: str, authority: str, mime: str | None, data: bytes | None) -> bool:
|
|
||||||
|
|
||||||
if data is None:
|
|
||||||
data = FALLBACK_ICON
|
|
||||||
mime = None
|
|
||||||
|
|
||||||
elif mime is None:
|
|
||||||
logger.error(
|
|
||||||
"favicon resolver %s tries to cache mime-type None for authority %s",
|
|
||||||
resolver,
|
|
||||||
authority,
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
digest = hashlib.sha256(data).hexdigest()
|
|
||||||
self._data[digest] = data
|
|
||||||
self._sha_mime[f"{resolver}:{authority}"] = (digest, mime)
|
|
||||||
return True
|
|
||||||
|
|
||||||
def state(self):
|
|
||||||
return FaviconCacheStats(favicons=len(self._data.keys()))
|
|
||||||
|
|
||||||
def maintenance(self, force=False):
|
|
||||||
pass
|
|
|
@ -1,65 +0,0 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
# pylint: disable=missing-module-docstring
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import pathlib
|
|
||||||
import msgspec
|
|
||||||
|
|
||||||
from .cache import FaviconCacheConfig
|
|
||||||
from .proxy import FaviconProxyConfig
|
|
||||||
|
|
||||||
CONFIG_SCHEMA: int = 1
|
|
||||||
"""Version of the configuration schema."""
|
|
||||||
|
|
||||||
TOML_CACHE_CFG: dict[str, "FaviconConfig"] = {}
|
|
||||||
"""Cache config objects by TOML's filename."""
|
|
||||||
|
|
||||||
DEFAULT_CFG_TOML_PATH = pathlib.Path(__file__).parent / "favicons.toml"
|
|
||||||
|
|
||||||
|
|
||||||
class FaviconConfig(msgspec.Struct): # pylint: disable=too-few-public-methods
|
|
||||||
"""The class aggregates configurations of the favicon tools"""
|
|
||||||
|
|
||||||
cfg_schema: int
|
|
||||||
"""Config's schema version. The specification of the version of the schema
|
|
||||||
is mandatory, currently only version :py:obj:`CONFIG_SCHEMA` is supported.
|
|
||||||
By specifying a version, it is possible to ensure downward compatibility in
|
|
||||||
the event of future changes to the configuration schema"""
|
|
||||||
|
|
||||||
cache: FaviconCacheConfig = msgspec.field(default_factory=FaviconCacheConfig)
|
|
||||||
"""Setup of the :py:obj:`.cache.FaviconCacheConfig`."""
|
|
||||||
|
|
||||||
proxy: FaviconProxyConfig = msgspec.field(default_factory=FaviconProxyConfig)
|
|
||||||
"""Setup of the :py:obj:`.proxy.FaviconProxyConfig`."""
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_toml_file(cls, cfg_file: pathlib.Path, use_cache: bool) -> "FaviconConfig":
|
|
||||||
"""Create a config object from a TOML file, the ``use_cache`` argument
|
|
||||||
specifies whether a cache should be used.
|
|
||||||
"""
|
|
||||||
|
|
||||||
cached = TOML_CACHE_CFG.get(str(cfg_file))
|
|
||||||
if use_cache and cached:
|
|
||||||
return cached
|
|
||||||
|
|
||||||
with cfg_file.open("rb") as f:
|
|
||||||
data = f.read()
|
|
||||||
|
|
||||||
cfg = msgspec.toml.decode(data, type=_FaviconConfig)
|
|
||||||
schema = cfg.favicons.cfg_schema
|
|
||||||
if schema != CONFIG_SCHEMA:
|
|
||||||
raise ValueError(
|
|
||||||
f"config schema version {CONFIG_SCHEMA} is needed, version {schema} is given in {cfg_file}"
|
|
||||||
)
|
|
||||||
|
|
||||||
cfg = cfg.favicons
|
|
||||||
if use_cache and cached:
|
|
||||||
TOML_CACHE_CFG[str(cfg_file.resolve())] = cfg
|
|
||||||
|
|
||||||
return cfg
|
|
||||||
|
|
||||||
|
|
||||||
class _FaviconConfig(msgspec.Struct): # pylint: disable=too-few-public-methods
|
|
||||||
# wrapper struct for root object "favicons."
|
|
||||||
favicons: FaviconConfig
|
|
|
@ -1,25 +0,0 @@
|
||||||
[favicons]
|
|
||||||
|
|
||||||
cfg_schema = 1 # config's schema version no.
|
|
||||||
|
|
||||||
[favicons.proxy]
|
|
||||||
|
|
||||||
# max_age = 5184000 # 60 days / default: 7 days (604800 sec)
|
|
||||||
|
|
||||||
# [favicons.proxy.resolver_map]
|
|
||||||
#
|
|
||||||
# The available favicon resolvers are registered here.
|
|
||||||
#
|
|
||||||
# "duckduckgo" = "searx.favicons.resolvers.duckduckgo"
|
|
||||||
# "allesedv" = "searx.favicons.resolvers.allesedv"
|
|
||||||
# "google" = "searx.favicons.resolvers.google"
|
|
||||||
# "yandex" = "searx.favicons.resolvers.yandex"
|
|
||||||
|
|
||||||
[favicons.cache]
|
|
||||||
|
|
||||||
# db_url = "/var/cache/searxng/faviconcache.db" # default: "/tmp/faviconcache.db"
|
|
||||||
# HOLD_TIME = 5184000 # 60 days / default: 30 days
|
|
||||||
# LIMIT_TOTAL_BYTES = 2147483648 # 2 GB / default: 50 MB
|
|
||||||
# BLOB_MAX_BYTES = 40960 # 40 KB / default 20 KB
|
|
||||||
# MAINTENANCE_MODE = "off" # default: "auto"
|
|
||||||
# MAINTENANCE_PERIOD = 600 # 10min / default: 1h
|
|
|
@ -1,237 +0,0 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
"""Implementations for a favicon proxy"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
import importlib
|
|
||||||
import base64
|
|
||||||
import pathlib
|
|
||||||
import urllib.parse
|
|
||||||
|
|
||||||
import flask
|
|
||||||
from httpx import HTTPError
|
|
||||||
import msgspec
|
|
||||||
|
|
||||||
from searx import get_setting
|
|
||||||
|
|
||||||
from searx.webutils import new_hmac, is_hmac_of
|
|
||||||
from searx.exceptions import SearxEngineResponseException
|
|
||||||
|
|
||||||
from .resolvers import DEFAULT_RESOLVER_MAP
|
|
||||||
from . import cache
|
|
||||||
|
|
||||||
DEFAULT_FAVICON_URL = {}
|
|
||||||
CFG: FaviconProxyConfig = None # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
def init(cfg: FaviconProxyConfig):
|
|
||||||
global CFG # pylint: disable=global-statement
|
|
||||||
CFG = cfg
|
|
||||||
|
|
||||||
|
|
||||||
def _initial_resolver_map():
|
|
||||||
d = {}
|
|
||||||
name: str = get_setting("search.favicon_resolver", None) # type: ignore
|
|
||||||
if name:
|
|
||||||
func = DEFAULT_RESOLVER_MAP.get(name)
|
|
||||||
if func:
|
|
||||||
d = {name: f"searx.favicons.resolvers.{func.__name__}"}
|
|
||||||
return d
|
|
||||||
|
|
||||||
|
|
||||||
class FaviconProxyConfig(msgspec.Struct):
|
|
||||||
"""Configuration of the favicon proxy."""
|
|
||||||
|
|
||||||
max_age: int = 60 * 60 * 24 * 7 # seven days
|
|
||||||
"""HTTP header Cache-Control_ ``max-age``
|
|
||||||
|
|
||||||
.. _Cache-Control: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control
|
|
||||||
"""
|
|
||||||
|
|
||||||
secret_key: str = get_setting("server.secret_key") # type: ignore
|
|
||||||
"""By default, the value from :ref:`server.secret_key <settings server>`
|
|
||||||
setting is used."""
|
|
||||||
|
|
||||||
resolver_timeout: int = get_setting("outgoing.request_timeout") # type: ignore
|
|
||||||
"""Timeout which the resolvers should not exceed, is usually passed to the
|
|
||||||
outgoing request of the resolver. By default, the value from
|
|
||||||
:ref:`outgoing.request_timeout <settings outgoing>` setting is used."""
|
|
||||||
|
|
||||||
resolver_map: dict[str, str] = msgspec.field(default_factory=_initial_resolver_map)
|
|
||||||
"""The resolver_map is a key / value dictionary where the key is the name of
|
|
||||||
the resolver and the value is the fully qualifying name (fqn) of resolver's
|
|
||||||
function (the callable). The resolvers from the python module
|
|
||||||
:py:obj:`searx.favicons.resolver` are available by default."""
|
|
||||||
|
|
||||||
def get_resolver(self, name: str) -> Callable | None:
|
|
||||||
"""Returns the callable object (function) of the resolver with the
|
|
||||||
``name``. If no resolver is registered for the ``name``, ``None`` is
|
|
||||||
returned.
|
|
||||||
"""
|
|
||||||
fqn = self.resolver_map.get(name)
|
|
||||||
if fqn is None:
|
|
||||||
return None
|
|
||||||
mod_name, _, func_name = fqn.rpartition('.')
|
|
||||||
mod = importlib.import_module(mod_name)
|
|
||||||
func = getattr(mod, func_name)
|
|
||||||
if func is None:
|
|
||||||
raise ValueError(f"resolver {fqn} is not implemented")
|
|
||||||
return func
|
|
||||||
|
|
||||||
favicon_path: str = get_setting("ui.static_path") + "/themes/{theme}/img/empty_favicon.svg" # type: ignore
|
|
||||||
favicon_mime_type: str = "image/svg+xml"
|
|
||||||
|
|
||||||
def favicon(self, **replacements):
|
|
||||||
"""Returns pathname and mimetype of the default favicon."""
|
|
||||||
return (
|
|
||||||
pathlib.Path(self.favicon_path.format(**replacements)),
|
|
||||||
self.favicon_mime_type,
|
|
||||||
)
|
|
||||||
|
|
||||||
def favicon_data_url(self, **replacements):
|
|
||||||
"""Returns data image URL of the default favicon."""
|
|
||||||
|
|
||||||
cache_key = ", ".join(f"{x}:{replacements[x]}" for x in sorted(list(replacements.keys()), key=str))
|
|
||||||
data_url = DEFAULT_FAVICON_URL.get(cache_key)
|
|
||||||
if data_url is not None:
|
|
||||||
return data_url
|
|
||||||
|
|
||||||
fav, mimetype = CFG.favicon(**replacements)
|
|
||||||
# hint: encoding utf-8 limits favicons to be a SVG image
|
|
||||||
with fav.open("r", encoding="utf-8") as f:
|
|
||||||
data_url = f.read()
|
|
||||||
|
|
||||||
data_url = urllib.parse.quote(data_url)
|
|
||||||
data_url = f"data:{mimetype};utf8,{data_url}"
|
|
||||||
DEFAULT_FAVICON_URL[cache_key] = data_url
|
|
||||||
return data_url
|
|
||||||
|
|
||||||
|
|
||||||
def favicon_proxy():
|
|
||||||
"""REST API of SearXNG's favicon proxy service
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
/favicon_proxy?authority=<...>&h=<...>
|
|
||||||
|
|
||||||
``authority``:
|
|
||||||
Domain name :rfc:`3986` / see :py:obj:`favicon_url`
|
|
||||||
|
|
||||||
``h``:
|
|
||||||
HMAC :rfc:`2104`, build up from the :ref:`server.secret_key <settings
|
|
||||||
server>` setting.
|
|
||||||
|
|
||||||
"""
|
|
||||||
authority = flask.request.args.get('authority')
|
|
||||||
|
|
||||||
# malformed request or RFC 3986 authority
|
|
||||||
if not authority or "/" in authority:
|
|
||||||
return '', 400
|
|
||||||
|
|
||||||
# malformed request / does not have authorisation
|
|
||||||
if not is_hmac_of(
|
|
||||||
CFG.secret_key,
|
|
||||||
authority.encode(),
|
|
||||||
flask.request.args.get('h', ''),
|
|
||||||
):
|
|
||||||
return '', 400
|
|
||||||
|
|
||||||
resolver = flask.request.preferences.get_value('favicon_resolver') # type: ignore
|
|
||||||
# if resolver is empty or not valid, just return HTTP 400.
|
|
||||||
if not resolver or resolver not in CFG.resolver_map.keys():
|
|
||||||
return "", 400
|
|
||||||
|
|
||||||
data, mime = search_favicon(resolver, authority)
|
|
||||||
|
|
||||||
if data is not None and mime is not None:
|
|
||||||
resp = flask.Response(data, mimetype=mime) # type: ignore
|
|
||||||
resp.headers['Cache-Control'] = f"max-age={CFG.max_age}"
|
|
||||||
return resp
|
|
||||||
|
|
||||||
# return default favicon from static path
|
|
||||||
theme = flask.request.preferences.get_value("theme") # type: ignore
|
|
||||||
fav, mimetype = CFG.favicon(theme=theme)
|
|
||||||
return flask.send_from_directory(fav.parent, fav.name, mimetype=mimetype)
|
|
||||||
|
|
||||||
|
|
||||||
def search_favicon(resolver: str, authority: str) -> tuple[None | bytes, None | str]:
|
|
||||||
"""Sends the request to the favicon resolver and returns a tuple for the
|
|
||||||
favicon. The tuple consists of ``(data, mime)``, if the resolver has not
|
|
||||||
determined a favicon, both values are ``None``.
|
|
||||||
|
|
||||||
``data``:
|
|
||||||
Binary data of the favicon.
|
|
||||||
|
|
||||||
``mime``:
|
|
||||||
Mime type of the favicon.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
data, mime = (None, None)
|
|
||||||
|
|
||||||
func = CFG.get_resolver(resolver)
|
|
||||||
if func is None:
|
|
||||||
return data, mime
|
|
||||||
|
|
||||||
# to avoid superfluous requests to the resolver, first look in the cache
|
|
||||||
data_mime = cache.CACHE(resolver, authority)
|
|
||||||
if data_mime is not None:
|
|
||||||
return data_mime
|
|
||||||
|
|
||||||
try:
|
|
||||||
data, mime = func(authority, timeout=CFG.resolver_timeout)
|
|
||||||
if data is None or mime is None:
|
|
||||||
data, mime = (None, None)
|
|
||||||
|
|
||||||
except (HTTPError, SearxEngineResponseException):
|
|
||||||
pass
|
|
||||||
|
|
||||||
cache.CACHE.set(resolver, authority, mime, data)
|
|
||||||
return data, mime
|
|
||||||
|
|
||||||
|
|
||||||
def favicon_url(authority: str) -> str:
|
|
||||||
"""Function to generate the image URL used for favicons in SearXNG's result
|
|
||||||
lists. The ``authority`` argument (aka netloc / :rfc:`3986`) is usually a
|
|
||||||
(sub-) domain name. This function is used in the HTML (jinja) templates.
|
|
||||||
|
|
||||||
.. code:: html
|
|
||||||
|
|
||||||
<div class="favicon">
|
|
||||||
<img src="{{ favicon_url(result.parsed_url.netloc) }}">
|
|
||||||
</div>
|
|
||||||
|
|
||||||
The returned URL is a route to :py:obj:`favicon_proxy` REST API.
|
|
||||||
|
|
||||||
If the favicon is already in the cache, the returned URL is a `data URL`_
|
|
||||||
(something like ``data:image/png;base64,...``). By generating a data url from
|
|
||||||
the :py:obj:`.cache.FaviconCache`, additional HTTP roundtripps via the
|
|
||||||
:py:obj:`favicon_proxy` are saved. However, it must also be borne in mind
|
|
||||||
that data urls are not cached in the client (web browser).
|
|
||||||
|
|
||||||
.. _data URL: https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URLs
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
resolver = flask.request.preferences.get_value('favicon_resolver') # type: ignore
|
|
||||||
# if resolver is empty or not valid, just return nothing.
|
|
||||||
if not resolver or resolver not in CFG.resolver_map.keys():
|
|
||||||
return ""
|
|
||||||
|
|
||||||
data_mime = cache.CACHE(resolver, authority)
|
|
||||||
|
|
||||||
if data_mime == (None, None):
|
|
||||||
# we have already checked, the resolver does not have a favicon
|
|
||||||
theme = flask.request.preferences.get_value("theme") # type: ignore
|
|
||||||
return CFG.favicon_data_url(theme=theme)
|
|
||||||
|
|
||||||
if data_mime is not None:
|
|
||||||
data, mime = data_mime
|
|
||||||
return f"data:{mime};base64,{str(base64.b64encode(data), 'utf-8')}" # type: ignore
|
|
||||||
|
|
||||||
h = new_hmac(CFG.secret_key, authority.encode())
|
|
||||||
proxy_url = flask.url_for('favicon_proxy')
|
|
||||||
query = urllib.parse.urlencode({"authority": authority, "h": h})
|
|
||||||
return f"{proxy_url}?{query}"
|
|
|
@ -1,100 +0,0 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
"""Implementations of the favicon *resolvers* that are available in the favicon
|
|
||||||
proxy by default. A *resolver* is a function that obtains the favicon from an
|
|
||||||
external source. The *resolver* function receives two arguments (``domain,
|
|
||||||
timeout``) and returns a tuple ``(data, mime)``.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
__all__ = ["DEFAULT_RESOLVER_MAP", "allesedv", "duckduckgo", "google", "yandex"]
|
|
||||||
|
|
||||||
from typing import Callable
|
|
||||||
from searx import network
|
|
||||||
from searx import logger
|
|
||||||
|
|
||||||
DEFAULT_RESOLVER_MAP: dict[str, Callable]
|
|
||||||
logger = logger.getChild('favicons.resolvers')
|
|
||||||
|
|
||||||
|
|
||||||
def _req_args(**kwargs):
|
|
||||||
# add the request arguments from the searx.network
|
|
||||||
d = {"raise_for_httperror": False}
|
|
||||||
d.update(kwargs)
|
|
||||||
return d
|
|
||||||
|
|
||||||
|
|
||||||
def allesedv(domain: str, timeout: int) -> tuple[None | bytes, None | str]:
|
|
||||||
"""Favicon Resolver from allesedv.com / https://favicon.allesedv.com/"""
|
|
||||||
data, mime = (None, None)
|
|
||||||
url = f"https://f1.allesedv.com/32/{domain}"
|
|
||||||
logger.debug("fetch favicon from: %s", url)
|
|
||||||
|
|
||||||
# will just return a 200 regardless of the favicon existing or not
|
|
||||||
# sometimes will be correct size, sometimes not
|
|
||||||
response = network.get(url, **_req_args(timeout=timeout))
|
|
||||||
if response and response.status_code == 200:
|
|
||||||
mime = response.headers['Content-Type']
|
|
||||||
if mime != 'image/gif':
|
|
||||||
data = response.content
|
|
||||||
return data, mime
|
|
||||||
|
|
||||||
|
|
||||||
def duckduckgo(domain: str, timeout: int) -> tuple[None | bytes, None | str]:
|
|
||||||
"""Favicon Resolver from duckduckgo.com / https://blog.jim-nielsen.com/2021/displaying-favicons-for-any-domain/"""
|
|
||||||
data, mime = (None, None)
|
|
||||||
url = f"https://icons.duckduckgo.com/ip2/{domain}.ico"
|
|
||||||
logger.debug("fetch favicon from: %s", url)
|
|
||||||
|
|
||||||
# will return a 404 if the favicon does not exist and a 200 if it does,
|
|
||||||
response = network.get(url, **_req_args(timeout=timeout))
|
|
||||||
if response and response.status_code == 200:
|
|
||||||
# api will respond with a 32x32 png image
|
|
||||||
mime = response.headers['Content-Type']
|
|
||||||
data = response.content
|
|
||||||
return data, mime
|
|
||||||
|
|
||||||
|
|
||||||
def google(domain: str, timeout: int) -> tuple[None | bytes, None | str]:
|
|
||||||
"""Favicon Resolver from google.com"""
|
|
||||||
data, mime = (None, None)
|
|
||||||
|
|
||||||
# URL https://www.google.com/s2/favicons?sz=32&domain={domain}" will be
|
|
||||||
# redirected (HTTP 301 Moved Permanently) to t1.gstatic.com/faviconV2:
|
|
||||||
url = (
|
|
||||||
f"https://t1.gstatic.com/faviconV2?client=SOCIAL&type=FAVICON&fallback_opts=TYPE,SIZE,URL"
|
|
||||||
f"&url=https://{domain}&size=32"
|
|
||||||
)
|
|
||||||
logger.debug("fetch favicon from: %s", url)
|
|
||||||
|
|
||||||
# will return a 404 if the favicon does not exist and a 200 if it does,
|
|
||||||
response = network.get(url, **_req_args(timeout=timeout))
|
|
||||||
if response and response.status_code == 200:
|
|
||||||
# api will respond with a 32x32 png image
|
|
||||||
mime = response.headers['Content-Type']
|
|
||||||
data = response.content
|
|
||||||
return data, mime
|
|
||||||
|
|
||||||
|
|
||||||
def yandex(domain: str, timeout: int) -> tuple[None | bytes, None | str]:
|
|
||||||
"""Favicon Resolver from yandex.com"""
|
|
||||||
data, mime = (None, None)
|
|
||||||
url = f"https://favicon.yandex.net/favicon/{domain}"
|
|
||||||
logger.debug("fetch favicon from: %s", url)
|
|
||||||
|
|
||||||
# api will respond with a 16x16 png image, if it doesn't exist, it will be a
|
|
||||||
# 1x1 png image (70 bytes)
|
|
||||||
response = network.get(url, **_req_args(timeout=timeout))
|
|
||||||
if response and response.status_code == 200 and len(response.content) > 70:
|
|
||||||
mime = response.headers['Content-Type']
|
|
||||||
data = response.content
|
|
||||||
return data, mime
|
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_RESOLVER_MAP = {
|
|
||||||
"allesedv": allesedv,
|
|
||||||
"duckduckgo": duckduckgo,
|
|
||||||
"google": google,
|
|
||||||
"yandex": yandex,
|
|
||||||
}
|
|
|
@ -128,6 +128,9 @@ _INSTALLED = False
|
||||||
LIMITER_CFG_SCHEMA = Path(__file__).parent / "limiter.toml"
|
LIMITER_CFG_SCHEMA = Path(__file__).parent / "limiter.toml"
|
||||||
"""Base configuration (schema) of the botdetection."""
|
"""Base configuration (schema) of the botdetection."""
|
||||||
|
|
||||||
|
LIMITER_CFG = Path('/etc/searxng/limiter.toml')
|
||||||
|
"""Local Limiter configuration."""
|
||||||
|
|
||||||
CFG_DEPRECATED = {
|
CFG_DEPRECATED = {
|
||||||
# "dummy.old.foo": "config 'dummy.old.foo' exists only for tests. Don't use it in your real project config."
|
# "dummy.old.foo": "config 'dummy.old.foo' exists only for tests. Don't use it in your real project config."
|
||||||
}
|
}
|
||||||
|
@ -135,12 +138,8 @@ CFG_DEPRECATED = {
|
||||||
|
|
||||||
def get_cfg() -> config.Config:
|
def get_cfg() -> config.Config:
|
||||||
global CFG # pylint: disable=global-statement
|
global CFG # pylint: disable=global-statement
|
||||||
|
|
||||||
if CFG is None:
|
if CFG is None:
|
||||||
from . import settings_loader # pylint: disable=import-outside-toplevel
|
CFG = config.Config.from_toml(LIMITER_CFG_SCHEMA, LIMITER_CFG, CFG_DEPRECATED)
|
||||||
|
|
||||||
cfg_file = (settings_loader.get_user_cfg_folder() or Path("/etc/searxng")) / "limiter.toml"
|
|
||||||
CFG = config.Config.from_toml(LIMITER_CFG_SCHEMA, cfg_file, CFG_DEPRECATED)
|
|
||||||
return CFG
|
return CFG
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -152,7 +152,7 @@ def locales_initialize():
|
||||||
def region_tag(locale: babel.Locale) -> str:
|
def region_tag(locale: babel.Locale) -> str:
|
||||||
"""Returns SearXNG's region tag from the locale (e.g. zh-TW , en-US)."""
|
"""Returns SearXNG's region tag from the locale (e.g. zh-TW , en-US)."""
|
||||||
if not locale.territory:
|
if not locale.territory:
|
||||||
raise ValueError('babel.Locale %s: missed a territory' % locale)
|
raise ValueError('%s missed a territory')
|
||||||
return locale.language + '-' + locale.territory
|
return locale.language + '-' + locale.territory
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -233,7 +233,8 @@ class Network:
|
||||||
del kwargs['raise_for_httperror']
|
del kwargs['raise_for_httperror']
|
||||||
return do_raise_for_httperror
|
return do_raise_for_httperror
|
||||||
|
|
||||||
def patch_response(self, response, do_raise_for_httperror):
|
@staticmethod
|
||||||
|
def patch_response(response, do_raise_for_httperror):
|
||||||
if isinstance(response, httpx.Response):
|
if isinstance(response, httpx.Response):
|
||||||
# requests compatibility (response is not streamed)
|
# requests compatibility (response is not streamed)
|
||||||
# see also https://www.python-httpx.org/compatibility/#checking-for-4xx5xx-responses
|
# see also https://www.python-httpx.org/compatibility/#checking-for-4xx5xx-responses
|
||||||
|
@ -241,11 +242,8 @@ class Network:
|
||||||
|
|
||||||
# raise an exception
|
# raise an exception
|
||||||
if do_raise_for_httperror:
|
if do_raise_for_httperror:
|
||||||
try:
|
|
||||||
raise_for_httperror(response)
|
raise_for_httperror(response)
|
||||||
except:
|
|
||||||
self._logger.warning(f"HTTP Request failed: {response.request.method} {response.request.url}")
|
|
||||||
raise
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def is_valid_response(self, response):
|
def is_valid_response(self, response):
|
||||||
|
@ -271,7 +269,7 @@ class Network:
|
||||||
else:
|
else:
|
||||||
response = await client.request(method, url, **kwargs)
|
response = await client.request(method, url, **kwargs)
|
||||||
if self.is_valid_response(response) or retries <= 0:
|
if self.is_valid_response(response) or retries <= 0:
|
||||||
return self.patch_response(response, do_raise_for_httperror)
|
return Network.patch_response(response, do_raise_for_httperror)
|
||||||
except httpx.RemoteProtocolError as e:
|
except httpx.RemoteProtocolError as e:
|
||||||
if not was_disconnected:
|
if not was_disconnected:
|
||||||
# the server has closed the connection:
|
# the server has closed the connection:
|
||||||
|
|
|
@ -3,13 +3,9 @@
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import ast
|
import ast
|
||||||
import re
|
|
||||||
import operator
|
import operator
|
||||||
from multiprocessing import Process, Queue
|
from multiprocessing import Process, Queue
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
import flask
|
|
||||||
import babel
|
|
||||||
from flask_babel import gettext
|
from flask_babel import gettext
|
||||||
|
|
||||||
from searx.plugins import logger
|
from searx.plugins import logger
|
||||||
|
@ -23,7 +19,7 @@ plugin_id = 'calculator'
|
||||||
|
|
||||||
logger = logger.getChild(plugin_id)
|
logger = logger.getChild(plugin_id)
|
||||||
|
|
||||||
operators: dict[type, Callable] = {
|
operators = {
|
||||||
ast.Add: operator.add,
|
ast.Add: operator.add,
|
||||||
ast.Sub: operator.sub,
|
ast.Sub: operator.sub,
|
||||||
ast.Mult: operator.mul,
|
ast.Mult: operator.mul,
|
||||||
|
@ -43,15 +39,11 @@ def _eval_expr(expr):
|
||||||
>>> _eval_expr('1 + 2*3**(4^5) / (6 + -7)')
|
>>> _eval_expr('1 + 2*3**(4^5) / (6 + -7)')
|
||||||
-5.0
|
-5.0
|
||||||
"""
|
"""
|
||||||
try:
|
|
||||||
return _eval(ast.parse(expr, mode='eval').body)
|
return _eval(ast.parse(expr, mode='eval').body)
|
||||||
except ZeroDivisionError:
|
|
||||||
# This is undefined
|
|
||||||
return ""
|
|
||||||
|
|
||||||
|
|
||||||
def _eval(node):
|
def _eval(node):
|
||||||
if isinstance(node, ast.Constant) and isinstance(node.value, (int, float)):
|
if isinstance(node, ast.Constant) and isinstance(node.value, int):
|
||||||
return node.value
|
return node.value
|
||||||
|
|
||||||
if isinstance(node, ast.BinOp):
|
if isinstance(node, ast.BinOp):
|
||||||
|
@ -101,19 +93,6 @@ def post_search(_request, search):
|
||||||
# replace commonly used math operators with their proper Python operator
|
# replace commonly used math operators with their proper Python operator
|
||||||
query = query.replace("x", "*").replace(":", "/")
|
query = query.replace("x", "*").replace(":", "/")
|
||||||
|
|
||||||
# use UI language
|
|
||||||
ui_locale = babel.Locale.parse(flask.request.preferences.get_value('locale'), sep='-')
|
|
||||||
|
|
||||||
# parse the number system in a localized way
|
|
||||||
def _decimal(match: re.Match) -> str:
|
|
||||||
val = match.string[match.start() : match.end()]
|
|
||||||
val = babel.numbers.parse_decimal(val, ui_locale, numbering_system="latn")
|
|
||||||
return str(val)
|
|
||||||
|
|
||||||
decimal = ui_locale.number_symbols["latn"]["decimal"]
|
|
||||||
group = ui_locale.number_symbols["latn"]["group"]
|
|
||||||
query = re.sub(f"[0-9]+[{decimal}|{group}][0-9]+[{decimal}|{group}]?[0-9]?", _decimal, query)
|
|
||||||
|
|
||||||
# only numbers and math operators are accepted
|
# only numbers and math operators are accepted
|
||||||
if any(str.isalpha(c) for c in query):
|
if any(str.isalpha(c) for c in query):
|
||||||
return True
|
return True
|
||||||
|
@ -123,8 +102,10 @@ def post_search(_request, search):
|
||||||
|
|
||||||
# Prevent the runtime from being longer than 50 ms
|
# Prevent the runtime from being longer than 50 ms
|
||||||
result = timeout_func(0.05, _eval_expr, query_py_formatted)
|
result = timeout_func(0.05, _eval_expr, query_py_formatted)
|
||||||
if result is None or result == "":
|
if result is None:
|
||||||
return True
|
return True
|
||||||
result = babel.numbers.format_decimal(result, locale=ui_locale)
|
result = str(result)
|
||||||
search.result_container.answers['calculate'] = {'answer': f"{search.search_query.query} = {result}"}
|
|
||||||
|
if result != query:
|
||||||
|
search.result_container.answers['calculate'] = {'answer': f"{query} = {result}"}
|
||||||
return True
|
return True
|
||||||
|
|
|
@ -13,7 +13,7 @@ from collections import OrderedDict
|
||||||
import flask
|
import flask
|
||||||
import babel
|
import babel
|
||||||
|
|
||||||
from searx import settings, autocomplete, favicons
|
from searx import settings, autocomplete
|
||||||
from searx.enginelib import Engine
|
from searx.enginelib import Engine
|
||||||
from searx.plugins import Plugin
|
from searx.plugins import Plugin
|
||||||
from searx.locales import LOCALE_NAMES
|
from searx.locales import LOCALE_NAMES
|
||||||
|
@ -406,11 +406,6 @@ class Preferences:
|
||||||
locked=is_locked('autocomplete'),
|
locked=is_locked('autocomplete'),
|
||||||
choices=list(autocomplete.backends.keys()) + ['']
|
choices=list(autocomplete.backends.keys()) + ['']
|
||||||
),
|
),
|
||||||
'favicon_resolver': EnumStringSetting(
|
|
||||||
settings['search']['favicon_resolver'],
|
|
||||||
locked=is_locked('favicon_resolver'),
|
|
||||||
choices=list(favicons.proxy.CFG.resolver_map.keys()) + ['']
|
|
||||||
),
|
|
||||||
'image_proxy': BooleanSetting(
|
'image_proxy': BooleanSetting(
|
||||||
settings['server']['image_proxy'],
|
settings['server']['image_proxy'],
|
||||||
locked=is_locked('image_proxy')
|
locked=is_locked('image_proxy')
|
||||||
|
@ -446,7 +441,7 @@ class Preferences:
|
||||||
'simple_style': EnumStringSetting(
|
'simple_style': EnumStringSetting(
|
||||||
settings['ui']['theme_args']['simple_style'],
|
settings['ui']['theme_args']['simple_style'],
|
||||||
locked=is_locked('simple_style'),
|
locked=is_locked('simple_style'),
|
||||||
choices=['', 'auto', 'light', 'dark', 'black']
|
choices=['', 'auto', 'light', 'dark']
|
||||||
),
|
),
|
||||||
'center_alignment': BooleanSetting(
|
'center_alignment': BooleanSetting(
|
||||||
settings['ui']['center_alignment'],
|
settings['ui']['center_alignment'],
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# pylint: disable=invalid-name, missing-module-docstring, missing-class-docstring
|
# pylint: disable=invalid-name, missing-module-docstring, missing-class-docstring
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
from abc import abstractmethod, ABC
|
from abc import abstractmethod, ABC
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
@ -259,7 +258,7 @@ class RawTextQuery:
|
||||||
FeelingLuckyParser, # redirect to the first link in the results list
|
FeelingLuckyParser, # redirect to the first link in the results list
|
||||||
]
|
]
|
||||||
|
|
||||||
def __init__(self, query: str, disabled_engines: list):
|
def __init__(self, query, disabled_engines):
|
||||||
assert isinstance(query, str)
|
assert isinstance(query, str)
|
||||||
# input parameters
|
# input parameters
|
||||||
self.query = query
|
self.query = query
|
||||||
|
|
|
@ -20,7 +20,7 @@ if (next_call_ts == false or next_call_ts == nil) then
|
||||||
-- 2/ the next call is a random time between start_after_from and start_after_to
|
-- 2/ the next call is a random time between start_after_from and start_after_to
|
||||||
local initial_delay = math.random(start_after_from, start_after_to)
|
local initial_delay = math.random(start_after_from, start_after_to)
|
||||||
redis.call('SET', redis_key, now + initial_delay)
|
redis.call('SET', redis_key, now + initial_delay)
|
||||||
return { false, initial_delay }
|
return { false, delay }
|
||||||
end
|
end
|
||||||
|
|
||||||
-- next_call_ts is defined
|
-- next_call_ts is defined
|
||||||
|
|
|
@ -137,6 +137,9 @@ class OnlineProcessor(EngineProcessor):
|
||||||
self.engine.request(query, params)
|
self.engine.request(query, params)
|
||||||
|
|
||||||
# ignoring empty urls
|
# ignoring empty urls
|
||||||
|
if params['url'] is None:
|
||||||
|
return None
|
||||||
|
|
||||||
if not params['url']:
|
if not params['url']:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ def name_to_iso4217(name):
|
||||||
currency = CURRENCIES['names'].get(name, [name])
|
currency = CURRENCIES['names'].get(name, [name])
|
||||||
if isinstance(currency, str):
|
if isinstance(currency, str):
|
||||||
return currency
|
return currency
|
||||||
return currency[-1]
|
return currency[0]
|
||||||
|
|
||||||
|
|
||||||
def iso4217_to_name(iso4217, language):
|
def iso4217_to_name(iso4217, language):
|
||||||
|
|
|
@ -55,7 +55,6 @@ STYLE_NAMES = {
|
||||||
'AUTO': 'auto',
|
'AUTO': 'auto',
|
||||||
'LIGHT': 'light',
|
'LIGHT': 'light',
|
||||||
'DARK': 'dark',
|
'DARK': 'dark',
|
||||||
'BLACK': 'black',
|
|
||||||
}
|
}
|
||||||
|
|
||||||
BRAND_CUSTOM_LINKS = {
|
BRAND_CUSTOM_LINKS = {
|
||||||
|
|
|
@ -35,9 +35,6 @@ search:
|
||||||
autocomplete: ""
|
autocomplete: ""
|
||||||
# minimun characters to type before autocompleter starts
|
# minimun characters to type before autocompleter starts
|
||||||
autocomplete_min: 4
|
autocomplete_min: 4
|
||||||
# backend for the favicon near URL in search results.
|
|
||||||
# Available resolvers: "allesedv", "duckduckgo", "google", "yandex" - leave blank to turn it off by default.
|
|
||||||
favicon_resolver: ""
|
|
||||||
# Default search language - leave blank to detect from browser information or
|
# Default search language - leave blank to detect from browser information or
|
||||||
# use codes from 'languages.py'
|
# use codes from 'languages.py'
|
||||||
default_lang: "auto"
|
default_lang: "auto"
|
||||||
|
@ -227,12 +224,15 @@ enabled_plugins:
|
||||||
# - 'Hash plugin'
|
# - 'Hash plugin'
|
||||||
# - 'Self Information'
|
# - 'Self Information'
|
||||||
# - 'Tracker URL remover'
|
# - 'Tracker URL remover'
|
||||||
# - 'Unit converter plugin'
|
|
||||||
# - 'Ahmia blacklist' # activation depends on outgoing.using_tor_proxy
|
# - 'Ahmia blacklist' # activation depends on outgoing.using_tor_proxy
|
||||||
# # these plugins are disabled if nothing is configured ..
|
# # these plugins are disabled if nothing is configured ..
|
||||||
# - 'Hostnames plugin' # see 'hostnames' configuration below
|
# - 'Hostnames plugin' # see 'hostnames' configuration below
|
||||||
# - 'Open Access DOI rewrite'
|
# - 'Open Access DOI rewrite'
|
||||||
# - 'Tor check plugin'
|
# - 'Tor check plugin'
|
||||||
|
# # Read the docs before activate: auto-detection of the language could be
|
||||||
|
# # detrimental to users expectations / users can activate the plugin in the
|
||||||
|
# # preferences if they want.
|
||||||
|
# - 'Autodetect search language'
|
||||||
|
|
||||||
# Configuration of the "Hostnames plugin":
|
# Configuration of the "Hostnames plugin":
|
||||||
#
|
#
|
||||||
|
@ -483,23 +483,6 @@ engines:
|
||||||
# to show premium or plus results too:
|
# to show premium or plus results too:
|
||||||
# skip_premium: false
|
# skip_premium: false
|
||||||
|
|
||||||
- name: cloudflareai
|
|
||||||
engine: cloudflareai
|
|
||||||
shortcut: cfai
|
|
||||||
# get api token and accont id from https://developers.cloudflare.com/workers-ai/get-started/rest-api/
|
|
||||||
cf_account_id: 'your_cf_accout_id'
|
|
||||||
cf_ai_api: 'your_cf_api'
|
|
||||||
# create your ai gateway by https://developers.cloudflare.com/ai-gateway/get-started/creating-gateway/
|
|
||||||
cf_ai_gateway: 'your_cf_ai_gateway_name'
|
|
||||||
# find the model name from https://developers.cloudflare.com/workers-ai/models/#text-generation
|
|
||||||
cf_ai_model: 'ai_model_name'
|
|
||||||
# custom your preferences
|
|
||||||
# cf_ai_model_display_name: 'Cloudflare AI'
|
|
||||||
# cf_ai_model_assistant: 'prompts_for_assistant_role'
|
|
||||||
# cf_ai_model_system: 'prompts_for_system_role'
|
|
||||||
timeout: 30
|
|
||||||
disabled: true
|
|
||||||
|
|
||||||
# - name: core.ac.uk
|
# - name: core.ac.uk
|
||||||
# engine: core
|
# engine: core
|
||||||
# categories: science
|
# categories: science
|
||||||
|
@ -825,21 +808,24 @@ engines:
|
||||||
timeout: 10
|
timeout: 10
|
||||||
|
|
||||||
- name: gitlab
|
- name: gitlab
|
||||||
engine: gitlab
|
engine: json_engine
|
||||||
base_url: https://gitlab.com
|
paging: true
|
||||||
|
search_url: https://gitlab.com/api/v4/projects?search={query}&page={pageno}
|
||||||
|
url_query: web_url
|
||||||
|
title_query: name_with_namespace
|
||||||
|
content_query: description
|
||||||
|
page_size: 20
|
||||||
|
categories: [it, repos]
|
||||||
shortcut: gl
|
shortcut: gl
|
||||||
|
timeout: 10.0
|
||||||
disabled: true
|
disabled: true
|
||||||
about:
|
about:
|
||||||
website: https://gitlab.com/
|
website: https://about.gitlab.com/
|
||||||
wikidata_id: Q16639197
|
wikidata_id: Q16639197
|
||||||
|
official_api_documentation: https://docs.gitlab.com/ee/api/
|
||||||
# - name: gnome
|
use_official_api: false
|
||||||
# engine: gitlab
|
require_api_key: false
|
||||||
# base_url: https://gitlab.gnome.org
|
results: JSON
|
||||||
# shortcut: gn
|
|
||||||
# about:
|
|
||||||
# website: https://gitlab.gnome.org
|
|
||||||
# wikidata_id: Q44316
|
|
||||||
|
|
||||||
- name: github
|
- name: github
|
||||||
engine: github
|
engine: github
|
||||||
|
@ -918,6 +904,26 @@ engines:
|
||||||
shortcut: mi
|
shortcut: mi
|
||||||
disabled: true
|
disabled: true
|
||||||
|
|
||||||
|
- name: gpodder
|
||||||
|
engine: json_engine
|
||||||
|
shortcut: gpod
|
||||||
|
timeout: 4.0
|
||||||
|
paging: false
|
||||||
|
search_url: https://gpodder.net/search.json?q={query}
|
||||||
|
url_query: url
|
||||||
|
title_query: title
|
||||||
|
content_query: description
|
||||||
|
page_size: 19
|
||||||
|
categories: music
|
||||||
|
disabled: true
|
||||||
|
about:
|
||||||
|
website: https://gpodder.net
|
||||||
|
wikidata_id: Q3093354
|
||||||
|
official_api_documentation: https://gpoddernet.readthedocs.io/en/latest/api/
|
||||||
|
use_official_api: false
|
||||||
|
requires_api_key: false
|
||||||
|
results: JSON
|
||||||
|
|
||||||
- name: habrahabr
|
- name: habrahabr
|
||||||
engine: xpath
|
engine: xpath
|
||||||
paging: true
|
paging: true
|
||||||
|
@ -1281,12 +1287,6 @@ engines:
|
||||||
require_api_key: false
|
require_api_key: false
|
||||||
results: JSON
|
results: JSON
|
||||||
|
|
||||||
- name: openlibrary
|
|
||||||
engine: openlibrary
|
|
||||||
shortcut: ol
|
|
||||||
timeout: 5
|
|
||||||
disabled: true
|
|
||||||
|
|
||||||
- name: openmeteo
|
- name: openmeteo
|
||||||
engine: open_meteo
|
engine: open_meteo
|
||||||
shortcut: om
|
shortcut: om
|
||||||
|
@ -1623,6 +1623,11 @@ engines:
|
||||||
api_site: 'askubuntu'
|
api_site: 'askubuntu'
|
||||||
categories: [it, q&a]
|
categories: [it, q&a]
|
||||||
|
|
||||||
|
- name: internetarchivescholar
|
||||||
|
engine: internet_archive_scholar
|
||||||
|
shortcut: ias
|
||||||
|
timeout: 15.0
|
||||||
|
|
||||||
- name: superuser
|
- name: superuser
|
||||||
engine: stackexchange
|
engine: stackexchange
|
||||||
shortcut: su
|
shortcut: su
|
||||||
|
@ -1865,6 +1870,25 @@ engines:
|
||||||
about:
|
about:
|
||||||
website: https://wiby.me/
|
website: https://wiby.me/
|
||||||
|
|
||||||
|
- name: alexandria
|
||||||
|
engine: json_engine
|
||||||
|
shortcut: alx
|
||||||
|
categories: general
|
||||||
|
paging: true
|
||||||
|
search_url: https://api.alexandria.org/?a=1&q={query}&p={pageno}
|
||||||
|
results_query: results
|
||||||
|
title_query: title
|
||||||
|
url_query: url
|
||||||
|
content_query: snippet
|
||||||
|
timeout: 1.5
|
||||||
|
disabled: true
|
||||||
|
about:
|
||||||
|
website: https://alexandria.org/
|
||||||
|
official_api_documentation: https://github.com/alexandria-org/alexandria-api/raw/master/README.md
|
||||||
|
use_official_api: true
|
||||||
|
require_api_key: false
|
||||||
|
results: JSON
|
||||||
|
|
||||||
- name: wikibooks
|
- name: wikibooks
|
||||||
engine: mediawiki
|
engine: mediawiki
|
||||||
weight: 0.5
|
weight: 0.5
|
||||||
|
@ -2033,16 +2057,6 @@ engines:
|
||||||
# query_str: 'SELECT * from mytable WHERE fieldname=%(query)s'
|
# query_str: 'SELECT * from mytable WHERE fieldname=%(query)s'
|
||||||
# shortcut: mysql
|
# shortcut: mysql
|
||||||
|
|
||||||
# Required dependency: mariadb
|
|
||||||
# - name: mariadb
|
|
||||||
# engine: mariadb_server
|
|
||||||
# database: mydatabase
|
|
||||||
# username: user
|
|
||||||
# password: pass
|
|
||||||
# limit: 10
|
|
||||||
# query_str: 'SELECT * from mytable WHERE fieldname=%(query)s'
|
|
||||||
# shortcut: mdb
|
|
||||||
|
|
||||||
- name: 1337x
|
- name: 1337x
|
||||||
engine: 1337x
|
engine: 1337x
|
||||||
shortcut: 1337x
|
shortcut: 1337x
|
||||||
|
|
|
@ -18,7 +18,7 @@ searx_dir = abspath(dirname(__file__))
|
||||||
logger = logging.getLogger('searx')
|
logger = logging.getLogger('searx')
|
||||||
OUTPUT_FORMATS = ['html', 'csv', 'json', 'rss']
|
OUTPUT_FORMATS = ['html', 'csv', 'json', 'rss']
|
||||||
SXNG_LOCALE_TAGS = ['all', 'auto'] + list(l[0] for l in sxng_locales)
|
SXNG_LOCALE_TAGS = ['all', 'auto'] + list(l[0] for l in sxng_locales)
|
||||||
SIMPLE_STYLE = ('auto', 'light', 'dark', 'black')
|
SIMPLE_STYLE = ('auto', 'light', 'dark')
|
||||||
CATEGORIES_AS_TABS = {
|
CATEGORIES_AS_TABS = {
|
||||||
'general': {},
|
'general': {},
|
||||||
'images': {},
|
'images': {},
|
||||||
|
@ -156,7 +156,6 @@ SCHEMA = {
|
||||||
'safe_search': SettingsValue((0, 1, 2), 0),
|
'safe_search': SettingsValue((0, 1, 2), 0),
|
||||||
'autocomplete': SettingsValue(str, ''),
|
'autocomplete': SettingsValue(str, ''),
|
||||||
'autocomplete_min': SettingsValue(int, 4),
|
'autocomplete_min': SettingsValue(int, 4),
|
||||||
'favicon_resolver': SettingsValue(str, ''),
|
|
||||||
'default_lang': SettingsValue(tuple(SXNG_LOCALE_TAGS + ['']), ''),
|
'default_lang': SettingsValue(tuple(SXNG_LOCALE_TAGS + ['']), ''),
|
||||||
'languages': SettingSublistValue(SXNG_LOCALE_TAGS, SXNG_LOCALE_TAGS),
|
'languages': SettingSublistValue(SXNG_LOCALE_TAGS, SXNG_LOCALE_TAGS),
|
||||||
'ban_time_on_fail': SettingsValue(numbers.Real, 5),
|
'ban_time_on_fail': SettingsValue(numbers.Real, 5),
|
||||||
|
|
|
@ -1,323 +0,0 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
"""Implementations to make access to SQLite databases a little more convenient.
|
|
||||||
|
|
||||||
:py:obj:`SQLiteAppl`
|
|
||||||
Abstract class with which DB applications can be implemented.
|
|
||||||
|
|
||||||
:py:obj:`SQLiteProperties`:
|
|
||||||
Class to manage properties stored in a database.
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import re
|
|
||||||
import sqlite3
|
|
||||||
import threading
|
|
||||||
import abc
|
|
||||||
|
|
||||||
from searx import logger
|
|
||||||
|
|
||||||
logger = logger.getChild('sqlitedb')
|
|
||||||
|
|
||||||
|
|
||||||
class SQLiteAppl(abc.ABC):
|
|
||||||
"""Abstract base class for implementing convenient DB access in SQLite
|
|
||||||
applications. In the constructor, a :py:obj:`SQLiteProperties` instance is
|
|
||||||
already aggregated under ``self.properties``."""
|
|
||||||
|
|
||||||
DDL_CREATE_TABLES: dict[str, str] = {}
|
|
||||||
|
|
||||||
DB_SCHEMA: int = 1
|
|
||||||
"""As soon as changes are made to the DB schema, the version number must be
|
|
||||||
increased. Changes to the version number require the DB to be recreated (or
|
|
||||||
migrated / if an migration path exists and is implemented)."""
|
|
||||||
|
|
||||||
SQLITE_THREADING_MODE = {
|
|
||||||
0: "single-thread",
|
|
||||||
1: "multi-thread",
|
|
||||||
3: "serialized"}[sqlite3.threadsafety] # fmt:skip
|
|
||||||
"""Threading mode of the SQLite library. Depends on the options used at
|
|
||||||
compile time and is different for different distributions and architectures.
|
|
||||||
|
|
||||||
Possible values are 0:``single-thread``, 1:``multi-thread``,
|
|
||||||
3:``serialized`` (see :py:obj:`sqlite3.threadsafety`). Pre- Python 3.11
|
|
||||||
this value was hard coded to 1.
|
|
||||||
|
|
||||||
Depending on this value, optimizations are made, e.g. in “serialized” mode
|
|
||||||
it is not necessary to create a separate DB connector for each thread.
|
|
||||||
"""
|
|
||||||
|
|
||||||
SQLITE_JOURNAL_MODE = "WAL"
|
|
||||||
SQLITE_CONNECT_ARGS = {
|
|
||||||
# "timeout": 5.0,
|
|
||||||
# "detect_types": 0,
|
|
||||||
"check_same_thread": bool(SQLITE_THREADING_MODE != "serialized"),
|
|
||||||
"cached_statements": 0, # https://github.com/python/cpython/issues/118172
|
|
||||||
# "uri": False,
|
|
||||||
"autocommit": False,
|
|
||||||
} # fmt:skip
|
|
||||||
"""Connection arguments (:py:obj:`sqlite3.connect`)
|
|
||||||
|
|
||||||
``check_same_thread``:
|
|
||||||
Is disabled by default when :py:obj:`SQLITE_THREADING_MODE` is
|
|
||||||
``serialized``. The check is more of a hindrance in this case because it
|
|
||||||
would prevent a DB connector from being used in multiple threads.
|
|
||||||
|
|
||||||
``autocommit``:
|
|
||||||
Is disabled by default. Note: autocommit option has been added in Python
|
|
||||||
3.12.
|
|
||||||
|
|
||||||
``cached_statements``:
|
|
||||||
Is set to ``0`` by default. Note: Python 3.12+ fetch result are not
|
|
||||||
consistent in multi-threading application and causing an API misuse error.
|
|
||||||
|
|
||||||
The multithreading use in SQLiteAppl is intended and supported if
|
|
||||||
threadsafety is set to 3 (aka "serialized"). CPython supports “serialized”
|
|
||||||
from version 3.12 on, but unfortunately only with errors:
|
|
||||||
|
|
||||||
- https://github.com/python/cpython/issues/118172
|
|
||||||
- https://github.com/python/cpython/issues/123873
|
|
||||||
|
|
||||||
The workaround for SQLite3 multithreading cache inconsistency ist to set
|
|
||||||
option ``cached_statements`` to ``0`` by default.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, db_url):
|
|
||||||
|
|
||||||
self.db_url = db_url
|
|
||||||
self.properties = SQLiteProperties(db_url)
|
|
||||||
self.thread_local = threading.local()
|
|
||||||
self._init_done = False
|
|
||||||
self._compatibility()
|
|
||||||
|
|
||||||
def _compatibility(self):
|
|
||||||
|
|
||||||
if self.SQLITE_THREADING_MODE == "serialized":
|
|
||||||
self._DB = None
|
|
||||||
else:
|
|
||||||
msg = (
|
|
||||||
f"SQLite library is compiled with {self.SQLITE_THREADING_MODE} mode,"
|
|
||||||
" read https://docs.python.org/3/library/sqlite3.html#sqlite3.threadsafety"
|
|
||||||
)
|
|
||||||
if threading.active_count() > 1:
|
|
||||||
logger.error(msg)
|
|
||||||
else:
|
|
||||||
logger.warning(msg)
|
|
||||||
|
|
||||||
if sqlite3.sqlite_version_info <= (3, 35):
|
|
||||||
# See "Generalize UPSERT:" in https://sqlite.org/releaselog/3_35_0.html
|
|
||||||
logger.critical(
|
|
||||||
"SQLite runtime library version %s is not supported (require >= 3.35)", sqlite3.sqlite_version
|
|
||||||
)
|
|
||||||
|
|
||||||
def connect(self) -> sqlite3.Connection:
|
|
||||||
"""Creates a new DB connection (:py:obj:`SQLITE_CONNECT_ARGS`). If not
|
|
||||||
already done, the DB schema is set up
|
|
||||||
"""
|
|
||||||
if sys.version_info < (3, 12):
|
|
||||||
# Prior Python 3.12 there is no "autocommit" option
|
|
||||||
self.SQLITE_CONNECT_ARGS.pop("autocommit", None)
|
|
||||||
|
|
||||||
self.init()
|
|
||||||
logger.debug("%s: connect to DB: %s // %s", self.__class__.__name__, self.db_url, self.SQLITE_CONNECT_ARGS)
|
|
||||||
conn = sqlite3.Connection(self.db_url, **self.SQLITE_CONNECT_ARGS) # type: ignore
|
|
||||||
conn.execute(f"PRAGMA journal_mode={self.SQLITE_JOURNAL_MODE}")
|
|
||||||
self.register_functions(conn)
|
|
||||||
return conn
|
|
||||||
|
|
||||||
def register_functions(self, conn):
|
|
||||||
"""Create user-defined_ SQL functions.
|
|
||||||
|
|
||||||
``REGEXP(<pattern>, <field>)`` : 0 | 1
|
|
||||||
`re.search`_ returns (int) 1 for a match and 0 for none match of
|
|
||||||
``<pattern>`` in ``<field>``.
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
SELECT '12' AS field WHERE REGEXP('^[0-9][0-9]$', field)
|
|
||||||
-- 12
|
|
||||||
|
|
||||||
SELECT REGEXP('[0-9][0-9]', 'X12Y')
|
|
||||||
-- 1
|
|
||||||
SELECT REGEXP('[0-9][0-9]', 'X1Y')
|
|
||||||
-- 0
|
|
||||||
|
|
||||||
.. _user-defined: https://docs.python.org/3/library/sqlite3.html#sqlite3.Connection.create_function
|
|
||||||
.. _deterministic: https://sqlite.org/deterministic.html
|
|
||||||
.. _re.search: https://docs.python.org/3/library/re.html#re.search
|
|
||||||
"""
|
|
||||||
|
|
||||||
conn.create_function('regexp', 2, lambda x, y: 1 if re.search(x, y) else 0, deterministic=True)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def DB(self) -> sqlite3.Connection:
|
|
||||||
"""Provides a DB connection. The connection is a *singleton* and
|
|
||||||
therefore well suited for read access. If
|
|
||||||
:py:obj:`SQLITE_THREADING_MODE` is ``serialized`` only one DB connection
|
|
||||||
is created for all threads.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
For dedicated `transaction control`_, it is recommended to create a
|
|
||||||
new connection (:py:obj:`SQLiteAppl.connect`).
|
|
||||||
|
|
||||||
.. _transaction control:
|
|
||||||
https://docs.python.org/3/library/sqlite3.html#sqlite3-controlling-transactions
|
|
||||||
"""
|
|
||||||
|
|
||||||
if getattr(self.thread_local, 'DB', None) is None:
|
|
||||||
self.thread_local.DB = self.connect()
|
|
||||||
|
|
||||||
# Theoretically it is possible to reuse the DB cursor across threads as
|
|
||||||
# of Python 3.12, in practice the threading of the cursor seems to me to
|
|
||||||
# be so faulty that I prefer to establish one connection per thread
|
|
||||||
|
|
||||||
self.thread_local.DB.commit()
|
|
||||||
return self.thread_local.DB
|
|
||||||
|
|
||||||
# In "serialized" mode, SQLite can be safely used by multiple threads
|
|
||||||
# with no restriction.
|
|
||||||
#
|
|
||||||
# if self.SQLITE_THREADING_MODE != "serialized":
|
|
||||||
# if getattr(self.thread_local, 'DB', None) is None:
|
|
||||||
# self.thread_local.DB = self.connect()
|
|
||||||
# return self.thread_local.DB
|
|
||||||
#
|
|
||||||
# if self._DB is None:
|
|
||||||
# self._DB = self.connect() # pylint: disable=attribute-defined-outside-init
|
|
||||||
# return self._DB
|
|
||||||
|
|
||||||
def init(self):
|
|
||||||
"""Initializes the DB schema and properties, is only executed once even
|
|
||||||
if called several times."""
|
|
||||||
|
|
||||||
if self._init_done:
|
|
||||||
return
|
|
||||||
self._init_done = True
|
|
||||||
|
|
||||||
logger.debug("init DB: %s", self.db_url)
|
|
||||||
self.properties.init()
|
|
||||||
ver = self.properties("DB_SCHEMA")
|
|
||||||
if ver is None:
|
|
||||||
with self.properties.DB:
|
|
||||||
self.create_schema(self.properties.DB)
|
|
||||||
else:
|
|
||||||
ver = int(ver)
|
|
||||||
if ver != self.DB_SCHEMA:
|
|
||||||
raise sqlite3.DatabaseError("Expected DB schema v%s, DB schema is v%s" % (self.DB_SCHEMA, ver))
|
|
||||||
logger.debug("DB_SCHEMA = %s", ver)
|
|
||||||
|
|
||||||
def create_schema(self, conn):
|
|
||||||
|
|
||||||
logger.debug("create schema ..")
|
|
||||||
with conn:
|
|
||||||
for table_name, sql in self.DDL_CREATE_TABLES.items():
|
|
||||||
conn.execute(sql)
|
|
||||||
self.properties.set(f"Table {table_name} created", table_name)
|
|
||||||
self.properties.set("DB_SCHEMA", self.DB_SCHEMA)
|
|
||||||
self.properties.set("LAST_MAINTENANCE", "")
|
|
||||||
|
|
||||||
|
|
||||||
class SQLiteProperties(SQLiteAppl):
|
|
||||||
"""Simple class to manage properties of a DB application in the DB. The
|
|
||||||
object has its own DB connection and transaction area.
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS properties (
|
|
||||||
name TEXT,
|
|
||||||
value TEXT,
|
|
||||||
m_time INTEGER DEFAULT (strftime('%s', 'now')),
|
|
||||||
PRIMARY KEY (name))
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
SQLITE_JOURNAL_MODE = "WAL"
|
|
||||||
|
|
||||||
DDL_PROPERTIES = """\
|
|
||||||
CREATE TABLE IF NOT EXISTS properties (
|
|
||||||
name TEXT,
|
|
||||||
value TEXT,
|
|
||||||
m_time INTEGER DEFAULT (strftime('%s', 'now')), -- last modified (unix epoch) time in sec.
|
|
||||||
PRIMARY KEY (name))"""
|
|
||||||
|
|
||||||
"""Table to store properties of the DB application"""
|
|
||||||
|
|
||||||
SQL_GET = "SELECT value FROM properties WHERE name = ?"
|
|
||||||
SQL_M_TIME = "SELECT m_time FROM properties WHERE name = ?"
|
|
||||||
SQL_SET = (
|
|
||||||
"INSERT INTO properties (name, value) VALUES (?, ?)"
|
|
||||||
" ON CONFLICT(name) DO UPDATE"
|
|
||||||
" SET value=excluded.value, m_time=strftime('%s', 'now')"
|
|
||||||
)
|
|
||||||
SQL_TABLE_EXISTS = (
|
|
||||||
"SELECT name FROM sqlite_master"
|
|
||||||
" WHERE type='table' AND name='properties'"
|
|
||||||
) # fmt:skip
|
|
||||||
SQLITE_CONNECT_ARGS = dict(SQLiteAppl.SQLITE_CONNECT_ARGS)
|
|
||||||
SQLITE_CONNECT_ARGS["autocommit"] = True # This option has no effect before Python 3.12
|
|
||||||
|
|
||||||
def __init__(self, db_url: str): # pylint: disable=super-init-not-called
|
|
||||||
|
|
||||||
self.db_url = db_url
|
|
||||||
self.thread_local = threading.local()
|
|
||||||
self._init_done = False
|
|
||||||
self._compatibility()
|
|
||||||
|
|
||||||
def init(self):
|
|
||||||
"""Initializes DB schema of the properties in the DB."""
|
|
||||||
|
|
||||||
if self._init_done:
|
|
||||||
return
|
|
||||||
self._init_done = True
|
|
||||||
logger.debug("init properties of DB: %s", self.db_url)
|
|
||||||
with self.DB as conn:
|
|
||||||
res = conn.execute(self.SQL_TABLE_EXISTS)
|
|
||||||
if res.fetchone() is None: # DB schema needs to be be created
|
|
||||||
self.create_schema(conn)
|
|
||||||
|
|
||||||
def __call__(self, name, default=None):
|
|
||||||
"""Returns the value of the property ``name`` or ``default`` if property
|
|
||||||
not exists in DB."""
|
|
||||||
|
|
||||||
res = self.DB.execute(self.SQL_GET, (name,)).fetchone()
|
|
||||||
if res is None:
|
|
||||||
return default
|
|
||||||
return res[0]
|
|
||||||
|
|
||||||
def set(self, name, value):
|
|
||||||
"""Set ``value`` of property ``name`` in DB. If property already
|
|
||||||
exists, update the ``m_time`` (and the value)."""
|
|
||||||
|
|
||||||
self.DB.execute(self.SQL_SET, (name, value))
|
|
||||||
|
|
||||||
if sys.version_info <= (3, 12):
|
|
||||||
# Prior Python 3.12 there is no "autocommit" option / lets commit
|
|
||||||
# explicitely.
|
|
||||||
self.DB.commit()
|
|
||||||
|
|
||||||
def row(self, name, default=None):
|
|
||||||
"""Returns the DB row of property ``name`` or ``default`` if property
|
|
||||||
not exists in DB."""
|
|
||||||
|
|
||||||
cur = self.DB.cursor()
|
|
||||||
cur.execute("SELECT * FROM properties WHERE name = ?", (name,))
|
|
||||||
res = cur.fetchone()
|
|
||||||
if res is None:
|
|
||||||
return default
|
|
||||||
col_names = [column[0] for column in cur.description]
|
|
||||||
return dict(zip(col_names, res))
|
|
||||||
|
|
||||||
def m_time(self, name, default: int = 0) -> int:
|
|
||||||
"""Last modification time of this property."""
|
|
||||||
res = self.DB.execute(self.SQL_M_TIME, (name,)).fetchone()
|
|
||||||
if res is None:
|
|
||||||
return default
|
|
||||||
return int(res[0])
|
|
||||||
|
|
||||||
def create_schema(self, conn):
|
|
||||||
with conn:
|
|
||||||
conn.execute(self.DDL_PROPERTIES)
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1,4 +0,0 @@
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24">
|
|
||||||
<path fill="#58f" d="M11 20.85a.92.92 0 0 1-1.1.93A10 10 0 0 1 2.06 13c-.06-.55.4-1 .95-1h3a1 1 0 0 1 1 1 3 3 0 0 0 3 3 1 1 0 0 1 1 1v3.85Zm6-1.92c0 .77.83 1.23 1.42.74a10 10 0 0 0 2.03-2.32c.39-.61-.09-1.35-.81-1.35H18a1 1 0 0 0-1 1v1.93ZM12 2a10 10 0 0 1 6.65 2.53c.61.55.17 1.47-.65 1.47h-.15A2.85 2.85 0 0 0 15 8.85c0 .33-.18.62-.47.77l-.08.04a1 1 0 0 1-.9 0l-.08-.04a.85.85 0 0 1-.47-.77A2.85 2.85 0 0 0 10.15 6H10a1 1 0 0 1-1-1V3.2c0-.44.28-.84.7-.94C10.45 2.1 11.22 2 12 2Z"/>
|
|
||||||
<path fill="#58f" d="M3.42 10c-.63 0-1.1-.58-.9-1.18.6-1.8 1.7-3.36 3.12-4.53C6.2 3.82 7 4.26 7 5a3 3 0 0 0 3 3h.15c.47 0 .85.38.85.85 0 1.09.61 2.07 1.58 2.56l.08.04a3 3 0 0 0 2.68 0l.08-.04A2.85 2.85 0 0 0 17 8.85c0-.47.38-.85.85-.85h2.66c.4 0 .77.23.9.6a9.98 9.98 0 0 1 .52 4.6.94.94 0 0 1-.95.8H18a3 3 0 0 0-3 3v3.8c0 .44-.28.84-.7.94l-.2.04a.92.92 0 0 1-1.1-.93V17a3 3 0 0 0-3-3 1 1 0 0 1-1-1 3 3 0 0 0-3-3H3.42Z"/>
|
|
||||||
</svg>
|
|
Before Width: | Height: | Size: 989 B |
|
@ -56,6 +56,7 @@
|
||||||
z-index: 100;
|
z-index: 100;
|
||||||
margin-top: 3.5rem;
|
margin-top: 3.5rem;
|
||||||
border-radius: 0.8rem;
|
border-radius: 0.8rem;
|
||||||
|
box-shadow: 0 2px 8px rgb(34 38 46 / 25%);
|
||||||
|
|
||||||
&:empty {
|
&:empty {
|
||||||
display: none;
|
display: none;
|
||||||
|
|
|
@ -29,7 +29,7 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
span.linenos {
|
span.linenos {
|
||||||
color: var(--color-line-number);
|
color: #64708d;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -115,17 +115,6 @@
|
||||||
--color-toolkit-loader-borderleft: rgba(255, 255, 255, 0);
|
--color-toolkit-loader-borderleft: rgba(255, 255, 255, 0);
|
||||||
--color-doc-code: #003;
|
--color-doc-code: #003;
|
||||||
--color-doc-code-background: #ddeaff;
|
--color-doc-code-background: #ddeaff;
|
||||||
/// Other misc colors
|
|
||||||
--color-bar-chart-primary: #5bc0de;
|
|
||||||
--color-bar-chart-secondary: #deb15b;
|
|
||||||
--color-image-resolution-background: rgba(0, 0, 0, 50%);
|
|
||||||
--color-image-resolution-font: #fff;
|
|
||||||
--color-loading-indicator: rgba(255, 255, 255, 0.2);
|
|
||||||
--color-loading-indicator-gap: #fff;
|
|
||||||
--color-line-number: #64708d;
|
|
||||||
// Favicons Colors
|
|
||||||
--color-favicon-background-color: #ddd;
|
|
||||||
--color-favicon-border-color: #ccc;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.dark-themes() {
|
.dark-themes() {
|
||||||
|
@ -238,17 +227,6 @@
|
||||||
--color-toolkit-loader-borderleft: rgba(0, 0, 0, 0);
|
--color-toolkit-loader-borderleft: rgba(0, 0, 0, 0);
|
||||||
--color-doc-code: #ddd;
|
--color-doc-code: #ddd;
|
||||||
--color-doc-code-background: #4d5a6f;
|
--color-doc-code-background: #4d5a6f;
|
||||||
// Favicons Colors
|
|
||||||
--color-favicon-background-color: #ddd;
|
|
||||||
--color-favicon-border-color: #ccc;
|
|
||||||
}
|
|
||||||
|
|
||||||
.black-themes() {
|
|
||||||
--color-base-background: #000;
|
|
||||||
--color-base-background-mobile: #000;
|
|
||||||
--color-header-background: #000;
|
|
||||||
--color-footer-background: #000;
|
|
||||||
--color-sidebar-background: #000;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Dark Theme (autoswitch based on device pref)
|
/// Dark Theme (autoswitch based on device pref)
|
||||||
|
@ -263,11 +241,6 @@
|
||||||
.dark-themes();
|
.dark-themes();
|
||||||
}
|
}
|
||||||
|
|
||||||
:root.theme-black {
|
|
||||||
.dark-themes();
|
|
||||||
.black-themes();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// General Size
|
/// General Size
|
||||||
@results-width: 45rem;
|
@results-width: 45rem;
|
||||||
@results-sidebar-width: 25rem;
|
@results-sidebar-width: 25rem;
|
||||||
|
@ -277,7 +250,7 @@
|
||||||
@results-margin: 0.125rem;
|
@results-margin: 0.125rem;
|
||||||
@result-padding: 1rem;
|
@result-padding: 1rem;
|
||||||
@results-image-row-height: 12rem;
|
@results-image-row-height: 12rem;
|
||||||
@results-image-row-height-phone: 10rem;
|
@results-image-row-height-phone: 6rem;
|
||||||
@search-width: 44rem;
|
@search-width: 44rem;
|
||||||
// heigh of #search, see detail.less
|
// heigh of #search, see detail.less
|
||||||
@search-height: 7.6rem;
|
@search-height: 7.6rem;
|
||||||
|
|
|
@ -2,7 +2,18 @@
|
||||||
.stats_endpoint {
|
.stats_endpoint {
|
||||||
.github-issue-button {
|
.github-issue-button {
|
||||||
display: block;
|
display: block;
|
||||||
|
padding: 8px 16px;
|
||||||
|
font-family: sans-serif;
|
||||||
font-size: 16px;
|
font-size: 16px;
|
||||||
|
color: white;
|
||||||
|
background-color: #238636;
|
||||||
|
border: #2ea043;
|
||||||
|
border-radius: 10px !important;
|
||||||
|
box-shadow: rgba(0, 0, 0, 0) 0 0 0 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.github-issue-button:hover {
|
||||||
|
background-color: #2ea043;
|
||||||
}
|
}
|
||||||
|
|
||||||
.issue-hide {
|
.issue-hide {
|
||||||
|
|
|
@ -378,12 +378,3 @@ html.no-js #clear_search.hide_if_nojs {
|
||||||
#categories_container {
|
#categories_container {
|
||||||
position: relative;
|
position: relative;
|
||||||
}
|
}
|
||||||
|
|
||||||
.favicon img {
|
|
||||||
height: 1.5rem;
|
|
||||||
width: 1.5rem;
|
|
||||||
border-radius: 10%;
|
|
||||||
background-color: var(--color-favicon-background-color);
|
|
||||||
border: 1px solid var(--color-favicon-border-color);
|
|
||||||
display: flex;
|
|
||||||
}
|
|
||||||
|
|
|
@ -75,18 +75,18 @@ failed-test {
|
||||||
}
|
}
|
||||||
|
|
||||||
.bar-chart-bar {
|
.bar-chart-bar {
|
||||||
border: 3px solid var(--color-bar-chart-primary);
|
border: 3px solid #5bc0de;
|
||||||
margin: 1px 0;
|
margin: 1px 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
.bar-chart-serie1 {
|
.bar-chart-serie1 {
|
||||||
border: 3px solid var(--color-bar-chart-primary);
|
border: 3px solid #5bc0de;
|
||||||
margin: 1px 0;
|
margin: 1px 0;
|
||||||
float: left;
|
float: left;
|
||||||
}
|
}
|
||||||
|
|
||||||
.bar-chart-serie2 {
|
.bar-chart-serie2 {
|
||||||
border: 3px solid var(--color-bar-chart-secondary);
|
border: 3px solid #deb15b;
|
||||||
margin: 1px 0;
|
margin: 1px 0;
|
||||||
float: left;
|
float: left;
|
||||||
}
|
}
|
||||||
|
|
|
@ -94,10 +94,6 @@
|
||||||
direction: initial;
|
direction: initial;
|
||||||
text-align: right;
|
text-align: right;
|
||||||
|
|
||||||
.result .url_header {
|
|
||||||
direction: rtl;
|
|
||||||
}
|
|
||||||
|
|
||||||
.result .url_wrapper {
|
.result .url_wrapper {
|
||||||
justify-content: end;
|
justify-content: end;
|
||||||
}
|
}
|
||||||
|
|
|
@ -170,8 +170,6 @@ article[data-vim-selected].category-social {
|
||||||
.result {
|
.result {
|
||||||
margin: @results-margin 0;
|
margin: @results-margin 0;
|
||||||
padding: @result-padding;
|
padding: @result-padding;
|
||||||
box-sizing: border-box;
|
|
||||||
width: 100%;
|
|
||||||
.ltr-border-left(0.2rem solid transparent);
|
.ltr-border-left(0.2rem solid transparent);
|
||||||
|
|
||||||
h3 {
|
h3 {
|
||||||
|
@ -234,14 +232,8 @@ article[data-vim-selected].category-social {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.url_header {
|
|
||||||
display: flex;
|
|
||||||
gap: 0.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.url_wrapper {
|
.url_wrapper {
|
||||||
display: flex;
|
display: flex;
|
||||||
align-items: center;
|
|
||||||
font-size: 1rem;
|
font-size: 1rem;
|
||||||
color: var(--color-result-url-font);
|
color: var(--color-result-url-font);
|
||||||
flex-wrap: nowrap;
|
flex-wrap: nowrap;
|
||||||
|
@ -456,7 +448,6 @@ article[data-vim-selected].category-social {
|
||||||
margin: 0.25rem;
|
margin: 0.25rem;
|
||||||
border: none !important;
|
border: none !important;
|
||||||
height: @results-image-row-height;
|
height: @results-image-row-height;
|
||||||
width: unset;
|
|
||||||
|
|
||||||
& > a {
|
& > a {
|
||||||
position: relative;
|
position: relative;
|
||||||
|
@ -478,10 +469,10 @@ article[data-vim-selected].category-social {
|
||||||
position: absolute;
|
position: absolute;
|
||||||
right: 0;
|
right: 0;
|
||||||
bottom: 0;
|
bottom: 0;
|
||||||
background: var(--color-image-resolution-background);
|
background: rgba(0, 0, 0, 50%);
|
||||||
padding: 0.3rem 0.5rem;
|
padding: 0.3rem 0.5rem;
|
||||||
font-size: 0.9rem;
|
font-size: 0.9rem;
|
||||||
color: var(--color-image-resolution-font);
|
color: #fff;
|
||||||
border-top-left-radius: 0.3rem;
|
border-top-left-radius: 0.3rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1099,8 +1090,7 @@ summary.title {
|
||||||
.result {
|
.result {
|
||||||
background: var(--color-result-background);
|
background: var(--color-result-background);
|
||||||
border: 1px solid var(--color-result-background);
|
border: 1px solid var(--color-result-background);
|
||||||
margin: 1rem 2%;
|
margin: 1rem 10px;
|
||||||
width: 96%;
|
|
||||||
.rounded-corners;
|
.rounded-corners;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1108,7 +1098,6 @@ summary.title {
|
||||||
margin: 0;
|
margin: 0;
|
||||||
height: @results-image-row-height-phone;
|
height: @results-image-row-height-phone;
|
||||||
background: var(--color-base-background-mobile);
|
background: var(--color-base-background-mobile);
|
||||||
width: unset;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.infobox {
|
.infobox {
|
||||||
|
|
|
@ -10,10 +10,10 @@
|
||||||
font-size: 10px;
|
font-size: 10px;
|
||||||
position: relative;
|
position: relative;
|
||||||
text-indent: -9999em;
|
text-indent: -9999em;
|
||||||
border-top: 1.1em solid var(--color-loading-indicator);
|
border-top: 1.1em solid rgba(255, 255, 255, 0.2);
|
||||||
border-right: 1.1em solid var(--color-loading-indicator);
|
border-right: 1.1em solid rgba(255, 255, 255, 0.2);
|
||||||
border-bottom: 1.1em solid var(--color-loading-indicator);
|
border-bottom: 1.1em solid rgba(255, 255, 255, 0.2);
|
||||||
border-left: 1.1em solid var(--color-loading-indicator-gap);
|
border-left: 1.1em solid #fff;
|
||||||
-webkit-transform: translateZ(0);
|
-webkit-transform: translateZ(0);
|
||||||
-ms-transform: translateZ(0);
|
-ms-transform: translateZ(0);
|
||||||
transform: translateZ(0);
|
transform: translateZ(0);
|
||||||
|
|
|
@ -16,9 +16,9 @@ sxng_locales = (
|
||||||
('bg', 'Български', '', 'Bulgarian', '\U0001f310'),
|
('bg', 'Български', '', 'Bulgarian', '\U0001f310'),
|
||||||
('bg-BG', 'Български', 'България', 'Bulgarian', '\U0001f1e7\U0001f1ec'),
|
('bg-BG', 'Български', 'България', 'Bulgarian', '\U0001f1e7\U0001f1ec'),
|
||||||
('ca', 'Català', '', 'Catalan', '\U0001f310'),
|
('ca', 'Català', '', 'Catalan', '\U0001f310'),
|
||||||
|
('ca-ES', 'Català', 'Espanya', 'Catalan', '\U0001f1ea\U0001f1f8'),
|
||||||
('cs', 'Čeština', '', 'Czech', '\U0001f310'),
|
('cs', 'Čeština', '', 'Czech', '\U0001f310'),
|
||||||
('cs-CZ', 'Čeština', 'Česko', 'Czech', '\U0001f1e8\U0001f1ff'),
|
('cs-CZ', 'Čeština', 'Česko', 'Czech', '\U0001f1e8\U0001f1ff'),
|
||||||
('cy', 'Cymraeg', '', 'Welsh', '\U0001f310'),
|
|
||||||
('da', 'Dansk', '', 'Danish', '\U0001f310'),
|
('da', 'Dansk', '', 'Danish', '\U0001f310'),
|
||||||
('da-DK', 'Dansk', 'Danmark', 'Danish', '\U0001f1e9\U0001f1f0'),
|
('da-DK', 'Dansk', 'Danmark', 'Danish', '\U0001f1e9\U0001f1f0'),
|
||||||
('de', 'Deutsch', '', 'German', '\U0001f310'),
|
('de', 'Deutsch', '', 'German', '\U0001f310'),
|
||||||
|
@ -56,8 +56,6 @@ sxng_locales = (
|
||||||
('fr-CA', 'Français', 'Canada', 'French', '\U0001f1e8\U0001f1e6'),
|
('fr-CA', 'Français', 'Canada', 'French', '\U0001f1e8\U0001f1e6'),
|
||||||
('fr-CH', 'Français', 'Suisse', 'French', '\U0001f1e8\U0001f1ed'),
|
('fr-CH', 'Français', 'Suisse', 'French', '\U0001f1e8\U0001f1ed'),
|
||||||
('fr-FR', 'Français', 'France', 'French', '\U0001f1eb\U0001f1f7'),
|
('fr-FR', 'Français', 'France', 'French', '\U0001f1eb\U0001f1f7'),
|
||||||
('ga', 'Gaeilge', '', 'Irish', '\U0001f310'),
|
|
||||||
('gd', 'Gàidhlig', '', 'Scottish Gaelic', '\U0001f310'),
|
|
||||||
('gl', 'Galego', '', 'Galician', '\U0001f310'),
|
('gl', 'Galego', '', 'Galician', '\U0001f310'),
|
||||||
('he', 'עברית', '', 'Hebrew', '\U0001f1ee\U0001f1f1'),
|
('he', 'עברית', '', 'Hebrew', '\U0001f1ee\U0001f1f1'),
|
||||||
('hi', 'हिन्दी', '', 'Hindi', '\U0001f310'),
|
('hi', 'हिन्दी', '', 'Hindi', '\U0001f310'),
|
||||||
|
@ -94,7 +92,6 @@ sxng_locales = (
|
||||||
('ru-RU', 'Русский', 'Россия', 'Russian', '\U0001f1f7\U0001f1fa'),
|
('ru-RU', 'Русский', 'Россия', 'Russian', '\U0001f1f7\U0001f1fa'),
|
||||||
('sk', 'Slovenčina', '', 'Slovak', '\U0001f310'),
|
('sk', 'Slovenčina', '', 'Slovak', '\U0001f310'),
|
||||||
('sl', 'Slovenščina', '', 'Slovenian', '\U0001f310'),
|
('sl', 'Slovenščina', '', 'Slovenian', '\U0001f310'),
|
||||||
('sq', 'Shqip', '', 'Albanian', '\U0001f310'),
|
|
||||||
('sv', 'Svenska', '', 'Swedish', '\U0001f310'),
|
('sv', 'Svenska', '', 'Swedish', '\U0001f310'),
|
||||||
('sv-SE', 'Svenska', 'Sverige', 'Swedish', '\U0001f1f8\U0001f1ea'),
|
('sv-SE', 'Svenska', 'Sverige', 'Swedish', '\U0001f1f8\U0001f1ea'),
|
||||||
('ta', 'தமிழ்', '', 'Tamil', '\U0001f310'),
|
('ta', 'தமிழ்', '', 'Tamil', '\U0001f310'),
|
||||||
|
@ -103,8 +100,10 @@ sxng_locales = (
|
||||||
('tr', 'Türkçe', '', 'Turkish', '\U0001f310'),
|
('tr', 'Türkçe', '', 'Turkish', '\U0001f310'),
|
||||||
('tr-TR', 'Türkçe', 'Türkiye', 'Turkish', '\U0001f1f9\U0001f1f7'),
|
('tr-TR', 'Türkçe', 'Türkiye', 'Turkish', '\U0001f1f9\U0001f1f7'),
|
||||||
('uk', 'Українська', '', 'Ukrainian', '\U0001f310'),
|
('uk', 'Українська', '', 'Ukrainian', '\U0001f310'),
|
||||||
|
('uk-UA', 'Українська', 'Україна', 'Ukrainian', '\U0001f1fa\U0001f1e6'),
|
||||||
('ur', 'اردو', '', 'Urdu', '\U0001f310'),
|
('ur', 'اردو', '', 'Urdu', '\U0001f310'),
|
||||||
('vi', 'Tiếng Việt', '', 'Vietnamese', '\U0001f310'),
|
('vi', 'Tiếng Việt', '', 'Vietnamese', '\U0001f310'),
|
||||||
|
('vi-VN', 'Tiếng Việt', 'Việt Nam', 'Vietnamese', '\U0001f1fb\U0001f1f3'),
|
||||||
('zh', '中文', '', 'Chinese', '\U0001f310'),
|
('zh', '中文', '', 'Chinese', '\U0001f310'),
|
||||||
('zh-CN', '中文', '中国', 'Chinese', '\U0001f1e8\U0001f1f3'),
|
('zh-CN', '中文', '中国', 'Chinese', '\U0001f1e8\U0001f1f3'),
|
||||||
('zh-HK', '中文', '中國香港特別行政區', 'Chinese', '\U0001f1ed\U0001f1f0'),
|
('zh-HK', '中文', '中國香港特別行政區', 'Chinese', '\U0001f1ed\U0001f1f0'),
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
<html class="no-js theme-{{ preferences.get_value('simple_style') or 'auto' }} center-aligment-{{ preferences.get_value('center_alignment') and 'yes' or 'no' }}" lang="{{ locale_rfc5646 }}" {% if rtl %} dir="rtl"{% endif %}>
|
<html class="no-js theme-{{ preferences.get_value('simple_style') or 'auto' }} center-aligment-{{ preferences.get_value('center_alignment') and 'yes' or 'no' }}" lang="{{ locale_rfc5646 }}" {% if rtl %} dir="rtl"{% endif %}>
|
||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8">
|
<meta charset="UTF-8" />
|
||||||
<meta name="description" content="SearXNG — a privacy-respecting, open metasearch engine">
|
<meta name="description" content="SearXNG — a privacy-respecting, open metasearch engine">
|
||||||
<meta name="keywords" content="SearXNG, search, search engine, metasearch, meta search">
|
<meta name="keywords" content="SearXNG, search, search engine, metasearch, meta search">
|
||||||
<meta name="generator" content="searxng/{{ searx_version }}">
|
<meta name="generator" content="searxng/{{ searx_version }}">
|
||||||
|
@ -13,23 +13,23 @@
|
||||||
<title>{% block title %}{% endblock %}{{ instance_name }}</title>
|
<title>{% block title %}{% endblock %}{{ instance_name }}</title>
|
||||||
{% block meta %}{% endblock %}
|
{% block meta %}{% endblock %}
|
||||||
{% if rtl %}
|
{% if rtl %}
|
||||||
<link rel="stylesheet" href="{{ url_for('static', filename='css/searxng-rtl.min.css') }}" type="text/css" media="screen">
|
<link rel="stylesheet" href="{{ url_for('static', filename='css/searxng-rtl.min.css') }}" type="text/css" media="screen" />
|
||||||
{% else %}
|
{% else %}
|
||||||
<link rel="stylesheet" href="{{ url_for('static', filename='css/searxng.min.css') }}" type="text/css" media="screen">
|
<link rel="stylesheet" href="{{ url_for('static', filename='css/searxng.min.css') }}" type="text/css" media="screen" />
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if get_setting('server.limiter') or get_setting('server.public_instance') %}
|
{% if get_setting('server.limiter') or get_setting('server.public_instance') %}
|
||||||
<link rel="stylesheet" href="{{ url_for('client_token', token=link_token) }}" type="text/css">
|
<link rel="stylesheet" href="{{ url_for('client_token', token=link_token) }}" type="text/css" />
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% block styles %}{% endblock %}
|
{% block styles %}{% endblock %}
|
||||||
<!--[if gte IE 9]>-->
|
<!--[if gte IE 9]>-->
|
||||||
<script src="{{ url_for('static', filename='js/searxng.head.min.js') }}" client_settings="{{ client_settings }}"></script>
|
<script src="{{ url_for('static', filename='js/searxng.head.min.js') }}" client_settings="{{ client_settings }}"></script>
|
||||||
<!--<![endif]-->
|
<!--<![endif]-->
|
||||||
{% block head %}
|
{% block head %}
|
||||||
<link title="{{ instance_name }}" type="application/opensearchdescription+xml" rel="search" href="{{ opensearch_url }}">
|
<link title="{{ instance_name }}" type="application/opensearchdescription+xml" rel="search" href="{{ opensearch_url }}"/>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
<link rel="icon" href="{{ url_for('static', filename='img/favicon.png') }}" sizes="any">
|
<link rel="icon" href="{{ url_for('static', filename='img/favicon.png') }}" sizes="any">
|
||||||
<link rel="icon" href="{{ url_for('static', filename='img/favicon.svg') }}" type="image/svg+xml">
|
<link rel="icon" href="{{ url_for('static', filename='img/favicon.svg') }}" type="image/svg+xml">
|
||||||
<link rel="apple-touch-icon" href="{{ url_for('static', filename='img/favicon.png') }}">
|
<link rel="apple-touch-icon" href="{{ url_for('static', filename='img/favicon.png') }}"/>
|
||||||
</head>
|
</head>
|
||||||
<body class="{{ endpoint }}_endpoint" >
|
<body class="{{ endpoint }}_endpoint" >
|
||||||
<main id="main_{{ self._TemplateReference__context.name|replace("simple/", "")|replace(".html", "") }}" class="{{body_class}}">
|
<main id="main_{{ self._TemplateReference__context.name|replace("simple/", "")|replace(".html", "") }}" class="{{body_class}}">
|
||||||
|
@ -65,7 +65,7 @@
|
||||||
</main>
|
</main>
|
||||||
<footer>
|
<footer>
|
||||||
<p>
|
<p>
|
||||||
{{ _('Powered by') }} <a href="{{ url_for('info', pagename='about') }}">searxng</a> - {{ searx_version }} — {{ _('a privacy-respecting, open metasearch engine') }}<br>
|
{{ _('Powered by') }} <a href="{{ url_for('info', pagename='about') }}">searxng</a> - {{ searx_version }} — {{ _('a privacy-respecting, open metasearch engine') }}<br/>
|
||||||
<a href="{{ searx_git_url }}">{{ _('Source code') }}</a>
|
<a href="{{ searx_git_url }}">{{ _('Source code') }}</a>
|
||||||
| <a href="{{ get_setting('brand.issue_url') }}">{{ _('Issue tracker') }}</a>
|
| <a href="{{ get_setting('brand.issue_url') }}">{{ _('Issue tracker') }}</a>
|
||||||
{% if enable_metrics %}| <a href="{{ url_for('stats') }}">{{ _('Engine stats') }}</a>{% endif %}
|
{% if enable_metrics %}| <a href="{{ url_for('stats') }}">{{ _('Engine stats') }}</a>{% endif %}
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
{%- if not search_on_category_select or not display_tooltip -%}
|
{%- if not search_on_category_select or not display_tooltip -%}
|
||||||
{%- for category in categories -%}
|
{%- for category in categories -%}
|
||||||
<div class="category category_checkbox">{{- '' -}}
|
<div class="category category_checkbox">{{- '' -}}
|
||||||
<input type="checkbox" id="checkbox_{{ category|replace(' ', '_') }}" name="category_{{ category }}"{% if category in selected_categories %} checked="checked"{% endif %}>
|
<input type="checkbox" id="checkbox_{{ category|replace(' ', '_') }}" name="category_{{ category }}"{% if category in selected_categories %} checked="checked"{% endif %}/>
|
||||||
<label for="checkbox_{{ category|replace(' ', '_') }}" class="tooltips">
|
<label for="checkbox_{{ category|replace(' ', '_') }}" class="tooltips">
|
||||||
{{- icon_big(category_icons[category]) if category in category_icons else icon_big('globe-outline') -}}
|
{{- icon_big(category_icons[category]) if category in category_icons else icon_big('globe-outline') -}}
|
||||||
<div class="category_name">{{- _(category) -}}</div>
|
<div class="category_name">{{- _(category) -}}</div>
|
||||||
|
|
|
@ -38,7 +38,7 @@
|
||||||
<input type="hidden" name="safesearch" value="{{ safesearch }}">
|
<input type="hidden" name="safesearch" value="{{ safesearch }}">
|
||||||
<input type="hidden" name="theme" value="{{ theme }}">
|
<input type="hidden" name="theme" value="{{ theme }}">
|
||||||
{%- if timeout_limit -%}<input type="hidden" name="timeout_limit" value="{{ timeout_limit|e }}" >{%- endif -%}
|
{%- if timeout_limit -%}<input type="hidden" name="timeout_limit" value="{{ timeout_limit|e }}" >{%- endif -%}
|
||||||
<input type="submit" value="{{ suggestion }}">
|
<input type="submit" value="{{ suggestion }}" />
|
||||||
</form>
|
</form>
|
||||||
{%- endfor -%}
|
{%- endfor -%}
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -20,15 +20,10 @@
|
||||||
<!-- Draw result header -->
|
<!-- Draw result header -->
|
||||||
{% macro result_header(result, favicons, image_proxify) -%}
|
{% macro result_header(result, favicons, image_proxify) -%}
|
||||||
<article class="result {% if result['template'] %}result-{{ result.template|replace('.html', '') }}{% else %}result-default{% endif %} {% if result['category'] %}category-{{ result['category'] }}{% endif %}{% for e in result.engines %} {{ e }}{% endfor %}">
|
<article class="result {% if result['template'] %}result-{{ result.template|replace('.html', '') }}{% else %}result-default{% endif %} {% if result['category'] %}category-{{ result['category'] }}{% endif %}{% for e in result.engines %} {{ e }}{% endfor %}">
|
||||||
{{- result_open_link(result.url, "url_header") -}}
|
{{- result_open_link(result.url, "url_wrapper") -}}
|
||||||
{%- if favicon_resolver != "" %}
|
|
||||||
<div class="favicon"><img loading="lazy" src="{{ favicon_url(result.parsed_url.netloc) }}"></div>
|
|
||||||
{%- endif -%}
|
|
||||||
<div class="url_wrapper">
|
|
||||||
{%- for part in get_pretty_url(result.parsed_url) -%}
|
{%- for part in get_pretty_url(result.parsed_url) -%}
|
||||||
<span class="url_o{{loop.index}}"><span class="url_i{{loop.index}}">{{- part -}}</span></span>
|
<span class="url_o{{loop.index}}"><span class="url_i{{loop.index}}">{{- part -}}</span></span>
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
</div>
|
|
||||||
{{- result_close_link() -}}
|
{{- result_close_link() -}}
|
||||||
{%- if result.thumbnail %}{{ result_open_link(result.url) }}<img class="thumbnail" src="{{ image_proxify(result.thumbnail) }}" title="{{ result.title|striptags }}" loading="lazy">{{ result_close_link() }}{% endif -%}
|
{%- if result.thumbnail %}{{ result_open_link(result.url) }}<img class="thumbnail" src="{{ image_proxify(result.thumbnail) }}" title="{{ result.title|striptags }}" loading="lazy">{{ result_close_link() }}{% endif -%}
|
||||||
<h3>{{ result_link(result.url, result.title|safe) }}</h3>
|
<h3>{{ result_link(result.url, result.title|safe) }}</h3>
|
||||||
|
|
|
@ -69,7 +69,7 @@ or manually by executing the searx/webapp.py file? -->
|
||||||
<label class="step1 step1_delay" for="step2" >{{ _('I confirm there is no existing bug about the issue I encounter') }}</label>
|
<label class="step1 step1_delay" for="step2" >{{ _('I confirm there is no existing bug about the issue I encounter') }}</label>
|
||||||
<div class="step2 step_content">
|
<div class="step2 step_content">
|
||||||
<p>{{ _('If this is a public instance, please specify the URL in the bug report') }}</p>
|
<p>{{ _('If this is a public instance, please specify the URL in the bug report') }}</p>
|
||||||
<button type="submit" class="github-issue-button button" title="{{ get_setting('brand.new_issue_url') }}">{{ _('Submit a new issue on Github including the above information') }}</button>
|
<button type="submit" class="github-issue-button" title="{{ get_setting('brand.new_issue_url') }}">{{ _('Submit a new issue on Github including the above information') }}</button>
|
||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
{% endmacro %}
|
{% endmacro %}
|
||||||
|
|
|
@ -6,9 +6,9 @@
|
||||||
{%- endmacro -%}
|
{%- endmacro -%}
|
||||||
|
|
||||||
{%- macro tab_header(name, id, label, checked) -%}
|
{%- macro tab_header(name, id, label, checked) -%}
|
||||||
<input type="radio" name="{{ name }}" id="tab-{{ id }}" {% if checked is sameas true %}checked="checked"{% endif %}>
|
<input type="radio" name="{{ name }}" id="tab-{{ id }}" {% if checked is sameas true %}checked="checked"{% endif %} />
|
||||||
<label id="tab-label-{{ id }}" for="tab-{{ id }}" role="tab" aria-controls="tab-content-{{ id }}">{{ label }}</label>
|
<label id="tab-label-{{ id }}" for="tab-{{ id }}" role="tab" aria-controls="tab-content-{{ id }}">{{ label }}</label>
|
||||||
<section id="tab-content-{{ id }}" role="tabpanel" aria-hidden="false">
|
<section id="tab-content-{{ id }}" role="tabpanel" aria-labelledby="tab-label-{{ id }}" aria-hidden="false">
|
||||||
{%- endmacro -%}
|
{%- endmacro -%}
|
||||||
|
|
||||||
{%- macro tab_footer() -%}
|
{%- macro tab_footer() -%}
|
||||||
|
@ -23,17 +23,17 @@
|
||||||
{%- if checked == '?' -%}
|
{%- if checked == '?' -%}
|
||||||
{{- icon_small('warning') -}}
|
{{- icon_small('warning') -}}
|
||||||
{%- else -%}
|
{%- else -%}
|
||||||
<input type="checkbox" {%- if name %} name="{{ name }}" {%- endif %} value="None" {%- if checked %} checked {%- endif -%}{%- if disabled %} disabled {%- endif -%}>
|
<input type="checkbox" {%- if name %} name="{{ name }}" {%- endif %} value="None" {%- if checked %} checked {%- endif -%}{%- if disabled %} disabled {%- endif -%}/>
|
||||||
{%- endif -%}
|
{%- endif -%}
|
||||||
{%- endmacro -%}
|
{%- endmacro -%}
|
||||||
|
|
||||||
{%- macro checkbox_onoff_reversed(name, checked, labelledby) -%}
|
{%- macro checkbox_onoff_reversed(name, checked) -%}
|
||||||
<input type="checkbox" {{- ' ' -}}
|
<input type="checkbox" {{- ' ' -}}
|
||||||
name="{{ name }}" {{- ' ' -}}
|
name="{{ name }}" {{- ' ' -}}
|
||||||
id="{{ name }}" {{- ' ' -}}
|
id="{{ name }}" {{- ' ' -}}
|
||||||
{%- if labelledby -%} aria-labelledby="{{ labelledby }}"{{- ' ' -}}{%- endif -%}
|
aria-labelledby="pref_{{ name }}"{{- ' ' -}}
|
||||||
class="checkbox-onoff reversed-checkbox"{{- ' ' -}}
|
class="checkbox-onoff reversed-checkbox"{{- ' ' -}}
|
||||||
{%- if checked -%} checked{%- endif -%}>
|
{%- if checked -%} checked{%- endif -%}/>
|
||||||
{%- endmacro -%}
|
{%- endmacro -%}
|
||||||
|
|
||||||
{%- macro plugin_preferences(section) -%}
|
{%- macro plugin_preferences(section) -%}
|
||||||
|
@ -42,9 +42,9 @@
|
||||||
<fieldset>{{- '' -}}
|
<fieldset>{{- '' -}}
|
||||||
<legend>{{ _(plugin.name) }}</legend>{{- '' -}}
|
<legend>{{ _(plugin.name) }}</legend>{{- '' -}}
|
||||||
<div class="value">
|
<div class="value">
|
||||||
{{- checkbox_onoff_reversed('plugin_' + plugin.id, plugin.id not in allowed_plugins, 'plugin_labelledby' + plugin.id) -}}
|
{{- checkbox_onoff_reversed('plugin_' + plugin.id, plugin.id not in allowed_plugins) -}}
|
||||||
</div>{{- '' -}}
|
</div>{{- '' -}}
|
||||||
<div class="description" id="{{ 'plugin_labelledby' + plugin.id }}">
|
<div class="description">
|
||||||
{{- _(plugin.description) -}}
|
{{- _(plugin.description) -}}
|
||||||
</div>{{- '' -}}
|
</div>{{- '' -}}
|
||||||
</fieldset>
|
</fieldset>
|
||||||
|
@ -90,7 +90,7 @@
|
||||||
<td class="{{ label }}">{{- '' -}}
|
<td class="{{ label }}">{{- '' -}}
|
||||||
{%- if stats[engine_name].time != None -%}
|
{%- if stats[engine_name].time != None -%}
|
||||||
<span class="stacked-bar-chart-value">{{- stats[engine_name].time -}}</span>{{- '' -}}
|
<span class="stacked-bar-chart-value">{{- stats[engine_name].time -}}</span>{{- '' -}}
|
||||||
<span class="stacked-bar-chart" aria-hidden="true">
|
<span class="stacked-bar-chart" aria-labelledby="{{engine_name}}_chart" aria-hidden="true">
|
||||||
{%- if max_rate95 is not none and max_rate95 > 0 -%}
|
{%- if max_rate95 is not none and max_rate95 > 0 -%}
|
||||||
<div class="stacked-bar-chart-median bar{{ (100 * (stats[engine_name].time / max_rate95))|round }}"></div>{{- '' -}}
|
<div class="stacked-bar-chart-median bar{{ (100 * (stats[engine_name].time / max_rate95))|round }}"></div>{{- '' -}}
|
||||||
<div class="stacked-bar-chart-rate80 bar{{ (100 * ((stats[engine_name].rate80 - stats[engine_name].time) / max_rate95))|round }}"></div>{{- '' -}}
|
<div class="stacked-bar-chart-rate80 bar{{ (100 * ((stats[engine_name].rate80 - stats[engine_name].time) / max_rate95))|round }}"></div>{{- '' -}}
|
||||||
|
@ -127,7 +127,7 @@
|
||||||
{%- if checker_result or errors -%}
|
{%- if checker_result or errors -%}
|
||||||
<td class="{{ label }} column-reliability">{{- '' -}}
|
<td class="{{ label }} column-reliability">{{- '' -}}
|
||||||
<a href="{{ url_for('stats', engine=engine_name|e) }}">{{- '' -}}
|
<a href="{{ url_for('stats', engine=engine_name|e) }}">{{- '' -}}
|
||||||
<span>
|
<span aria-labelledby="{{engine_name}}_reliability">
|
||||||
{{- icon_big('warning', 'The engine is not reliabled') }} {{ r -}}
|
{{- icon_big('warning', 'The engine is not reliabled') }} {{ r -}}
|
||||||
</span>{{- '' -}}
|
</span>{{- '' -}}
|
||||||
</a>{{- '' -}}
|
</a>{{- '' -}}
|
||||||
|
@ -173,9 +173,6 @@
|
||||||
{%- if 'autocomplete' not in locked_preferences -%}
|
{%- if 'autocomplete' not in locked_preferences -%}
|
||||||
{%- include 'simple/preferences/autocomplete.html' -%}
|
{%- include 'simple/preferences/autocomplete.html' -%}
|
||||||
{%- endif -%}
|
{%- endif -%}
|
||||||
{%- if 'favicon' not in locked_preferences -%}
|
|
||||||
{%- include 'simple/preferences/favicon.html' -%}
|
|
||||||
{%- endif -%}
|
|
||||||
{% if 'safesearch' not in locked_preferences %}
|
{% if 'safesearch' not in locked_preferences %}
|
||||||
{%- include 'simple/preferences/safesearch.html' -%}
|
{%- include 'simple/preferences/safesearch.html' -%}
|
||||||
{%- endif -%}
|
{%- endif -%}
|
||||||
|
|
|
@ -33,10 +33,10 @@
|
||||||
{%- for plugin in plugins -%}
|
{%- for plugin in plugins -%}
|
||||||
{%- if plugin.preference_section == 'query' -%}
|
{%- if plugin.preference_section == 'query' -%}
|
||||||
<tr>{{- '' -}}
|
<tr>{{- '' -}}
|
||||||
<td class="checkbox-col">{{- checkbox_onoff_reversed('plugin_' + plugin.id, plugin.id not in allowed_plugins, 'plugin_labelledby' + plugin.id) -}}</td>{{- '' -}}
|
<td class="checkbox-col">{{- checkbox_onoff_reversed('plugin_' + plugin.id, plugin.id not in allowed_plugins) -}}</td>{{- '' -}}
|
||||||
<td>{{ plugin.query_keywords|join(', ') }}</td>{{- '' -}}
|
<td>{{ plugin.query_keywords|join(', ') }}</td>{{- '' -}}
|
||||||
<td>{{ _(plugin.name) }}</td>{{- '' -}}
|
<td>{{ _(plugin.name) }}</td>{{- '' -}}
|
||||||
<td id="{{ 'plugin_labelledby' + plugin.id }}">{{ _(plugin.description) }}</td>{{- '' -}}
|
<td>{{ _(plugin.description) }}</td>{{- '' -}}
|
||||||
<td>{{ plugin.query_examples }}</td>{{- '' -}}
|
<td>{{ plugin.query_examples }}</td>{{- '' -}}
|
||||||
</tr>
|
</tr>
|
||||||
{%- endif -%}
|
{%- endif -%}
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
{%- if preferences.get_value('center_alignment') -%}
|
{%- if preferences.get_value('center_alignment') -%}
|
||||||
checked
|
checked
|
||||||
{%- endif -%}{{- ' ' -}}
|
{%- endif -%}{{- ' ' -}}
|
||||||
>{{- '' -}}
|
/>{{- '' -}}
|
||||||
</p>{{- '' -}}
|
</p>{{- '' -}}
|
||||||
<div class="description">
|
<div class="description">
|
||||||
{{- _('Displays results in the center of the page (Oscar layout).') -}}
|
{{- _('Displays results in the center of the page (Oscar layout).') -}}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
<p class="text-muted">
|
<p class="text-muted">
|
||||||
{{- _('This is the list of cookies and their values SearXNG is storing on your computer.') }}
|
{{- _('This is the list of cookies and their values SearXNG is storing on your computer.') }}
|
||||||
<br>{{- _('With that list, you can assess SearXNG transparency.') -}}
|
<br />{{- _('With that list, you can assess SearXNG transparency.') -}}
|
||||||
<br>{{- '' -}}
|
<br />{{- '' -}}
|
||||||
</p>
|
</p>
|
||||||
{% if cookies %}
|
{% if cookies %}
|
||||||
<table class="cookies">
|
<table class="cookies">
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue