mirror of
https://github.com/searxng/searxng.git
synced 2025-12-22 19:50:00 +00:00
Compare commits
115 Commits
dependabot
...
c954e71f87
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c954e71f87 | ||
|
|
cbc04a839a | ||
|
|
cb4a5abc8c | ||
|
|
07ff6e3ccc | ||
|
|
cdaab944b4 | ||
|
|
6ecf32fd4a | ||
|
|
20de10df4e | ||
|
|
673c29efeb | ||
|
|
c4abf40e6e | ||
|
|
39b9922609 | ||
|
|
7018e6583b | ||
|
|
b957e587da | ||
|
|
ebb9ea4571 | ||
|
|
54a97e1043 | ||
|
|
0ee78c19dd | ||
|
|
bcc7a5eb2e | ||
|
|
2313b972a3 | ||
|
|
989b49335c | ||
|
|
3f30831640 | ||
|
|
5fcee9bc30 | ||
|
|
2f0e52d6eb | ||
|
|
c0d69cec4e | ||
|
|
c852b9a90a | ||
|
|
b876d0bed0 | ||
|
|
e245cade25 | ||
|
|
7c223b32a7 | ||
|
|
33a176813d | ||
|
|
20ec01c5f7 | ||
|
|
6376601ba1 | ||
|
|
ca441f419c | ||
|
|
04e66a2bb4 | ||
|
|
b299386d3e | ||
|
|
21a4622f23 | ||
|
|
041f457dfa | ||
|
|
af111e413c | ||
|
|
431bf5d235 | ||
|
|
576c8ca99c | ||
|
|
45a4b8ad1c | ||
|
|
d14d695966 | ||
|
|
a2a47337cb | ||
|
|
ba98030438 | ||
|
|
1e200a1107 | ||
|
|
7a1b959646 | ||
|
|
b9b46431be | ||
|
|
3f18c0f40f | ||
|
|
1cfbd32a1d | ||
|
|
a15b594003 | ||
|
|
24d27a7a21 | ||
|
|
7af922c9df | ||
|
|
b1918dd121 | ||
|
|
1be19f8b58 | ||
|
|
3763b4bff4 | ||
|
|
52ffc4c7f4 | ||
|
|
0245327fc5 | ||
|
|
b155e66fe5 | ||
|
|
5712827703 | ||
|
|
7ba53d302d | ||
|
|
b8e4ebdc0c | ||
|
|
b37d09557a | ||
|
|
aa28af772c | ||
|
|
9c2b8f2f93 | ||
|
|
c48993452f | ||
|
|
6a2196c03d | ||
|
|
dce383881d | ||
|
|
1ebedcbc17 | ||
|
|
5d99877d8d | ||
|
|
adc1a2a1ea | ||
|
|
43065c5026 | ||
|
|
ea4a55fa57 | ||
|
|
d514dea5cc | ||
|
|
22e1d30017 | ||
|
|
4ca75a0450 | ||
|
|
50a4c653dc | ||
|
|
b7f9b489c9 | ||
|
|
2cdbbb249a | ||
|
|
edfa71cdea | ||
|
|
8dacbbbb15 | ||
|
|
b770a46e1f | ||
|
|
2c880f6084 | ||
|
|
c41b769f97 | ||
|
|
e363db970c | ||
|
|
16293132e3 | ||
|
|
f70120b0b9 | ||
|
|
a8f3644cdc | ||
|
|
4295e758c0 | ||
|
|
33e798b01b | ||
|
|
d84ae96cf9 | ||
|
|
9371658531 | ||
|
|
ee6d4f322f | ||
|
|
3725aef6f3 | ||
|
|
e840e3f960 | ||
|
|
a6bb1ecf87 | ||
|
|
636738779e | ||
|
|
1d138c5968 | ||
|
|
3e7e404fda | ||
|
|
602a73df9a | ||
|
|
57622793bf | ||
|
|
080f3a5f87 | ||
|
|
f54cf643b2 | ||
|
|
dd82d785ce | ||
|
|
f6cdd16449 | ||
|
|
576d30ffcd | ||
|
|
c34bb61284 | ||
|
|
8baefcc21e | ||
|
|
fc7d8b8be2 | ||
|
|
5492de15bb | ||
|
|
ced08e12aa | ||
|
|
613c1aa8eb | ||
|
|
899cf7e08a | ||
|
|
362cc13aeb | ||
|
|
d28a1c434f | ||
|
|
21d0428cf2 | ||
|
|
f0dfe3cc0e | ||
|
|
0559b9bfcf | ||
|
|
37f7960266 |
6
.github/workflows/checker.yml
vendored
6
.github/workflows/checker.yml
vendored
@@ -15,7 +15,7 @@ permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: "3.13"
|
||||
PYTHON_VERSION: "3.14"
|
||||
|
||||
jobs:
|
||||
search:
|
||||
@@ -24,12 +24,12 @@ jobs:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
|
||||
135
.github/workflows/container.yml
vendored
135
.github/workflows/container.yml
vendored
@@ -18,106 +18,34 @@ concurrency:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
# Organization GHCR
|
||||
packages: read
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: "3.13"
|
||||
PYTHON_VERSION: "3.14"
|
||||
|
||||
jobs:
|
||||
build-base:
|
||||
if: |
|
||||
(github.repository_owner == 'searxng' && github.event.workflow_run.conclusion == 'success')
|
||||
|| github.event_name == 'workflow_dispatch'
|
||||
name: Build base
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
# Organization GHCR
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- if: github.repository_owner == 'searxng'
|
||||
name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
- if: github.repository_owner == 'searxng'
|
||||
name: Get date
|
||||
id: date
|
||||
run: echo "date=$(date +'%Y%m%d')" >>$GITHUB_OUTPUT
|
||||
|
||||
- if: github.repository_owner == 'searxng'
|
||||
name: Check cache apko
|
||||
id: cache-apko
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
# yamllint disable-line rule:line-length
|
||||
key: "apko-${{ steps.date.outputs.date }}-${{ hashFiles('./container/base.yml', './container/base-builder.yml') }}"
|
||||
path: "/tmp/.apko/"
|
||||
lookup-only: true
|
||||
|
||||
- if: github.repository_owner == 'searxng' && steps.cache-apko.outputs.cache-hit != 'true'
|
||||
name: Setup cache apko
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
# yamllint disable-line rule:line-length
|
||||
key: "apko-${{ steps.date.outputs.date }}-${{ hashFiles('./container/base.yml', './container/base-builder.yml') }}"
|
||||
restore-keys: "apko-${{ steps.date.outputs.date }}-"
|
||||
path: "/tmp/.apko/"
|
||||
|
||||
- if: github.repository_owner == 'searxng' && steps.cache-apko.outputs.cache-hit != 'true'
|
||||
name: Setup apko
|
||||
run: |
|
||||
eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"
|
||||
brew install apko
|
||||
|
||||
- if: github.repository_owner == 'searxng' && steps.cache-apko.outputs.cache-hit != 'true'
|
||||
name: Login to GHCR
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: "ghcr.io"
|
||||
username: "${{ github.repository_owner }}"
|
||||
password: "${{ secrets.GITHUB_TOKEN }}"
|
||||
|
||||
- if: github.repository_owner == 'searxng' && steps.cache-apko.outputs.cache-hit != 'true'
|
||||
name: Build
|
||||
run: |
|
||||
eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"
|
||||
|
||||
apko publish ./container/base.yml ghcr.io/${{ github.repository_owner }}/base:searxng \
|
||||
--cache-dir=/tmp/.apko/ \
|
||||
--sbom=false \
|
||||
--vcs=false \
|
||||
--log-level=debug
|
||||
|
||||
apko publish ./container/base-builder.yml ghcr.io/${{ github.repository_owner }}/base:searxng-builder \
|
||||
--cache-dir=/tmp/.apko/ \
|
||||
--sbom=false \
|
||||
--vcs=false \
|
||||
--log-level=debug
|
||||
|
||||
build:
|
||||
if: github.repository_owner == 'searxng' || github.event_name == 'workflow_dispatch'
|
||||
name: Build (${{ matrix.arch }})
|
||||
runs-on: ${{ matrix.os }}
|
||||
needs: build-base
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arch: amd64
|
||||
march: amd64
|
||||
os: ubuntu-24.04
|
||||
emulation: false
|
||||
- arch: arm64
|
||||
march: arm64
|
||||
os: ubuntu-24.04-arm
|
||||
emulation: false
|
||||
- arch: armv7
|
||||
march: arm64
|
||||
os: ubuntu-24.04-arm
|
||||
emulation: true
|
||||
|
||||
permissions:
|
||||
# Organization GHCR
|
||||
packages: write
|
||||
|
||||
outputs:
|
||||
@@ -125,13 +53,37 @@ jobs:
|
||||
git_url: ${{ steps.build.outputs.git_url }}
|
||||
|
||||
steps:
|
||||
# yamllint disable rule:line-length
|
||||
- name: Setup podman
|
||||
env:
|
||||
PODMAN_VERSION: "v5.6.2"
|
||||
run: |
|
||||
# dpkg man-db trigger is very slow on GHA runners
|
||||
# https://github.com/actions/runner-images/issues/10977
|
||||
# https://github.com/actions/runner/issues/4030
|
||||
sudo rm -f /var/lib/man-db/auto-update
|
||||
|
||||
sudo apt-get purge -y podman runc crun conmon
|
||||
|
||||
curl -fsSLO "https://github.com/mgoltzsche/podman-static/releases/download/${{ env.PODMAN_VERSION }}/podman-linux-${{ matrix.march }}.tar.gz"
|
||||
curl -fsSLO "https://github.com/mgoltzsche/podman-static/releases/download/${{ env.PODMAN_VERSION }}/podman-linux-${{ matrix.march }}.tar.gz.asc"
|
||||
gpg --keyserver hkps://keyserver.ubuntu.com --recv-keys 0CCF102C4F95D89E583FF1D4F8B5AF50344BB503
|
||||
gpg --batch --verify "podman-linux-${{ matrix.march }}.tar.gz.asc" "podman-linux-${{ matrix.march }}.tar.gz"
|
||||
|
||||
tar -xzf "podman-linux-${{ matrix.march }}.tar.gz"
|
||||
sudo cp -rfv ./podman-linux-${{ matrix.march }}/etc/. /etc/
|
||||
sudo cp -rfv ./podman-linux-${{ matrix.march }}/usr/. /usr/
|
||||
|
||||
sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0
|
||||
# yamllint enable rule:line-length
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
fetch-depth: "0"
|
||||
@@ -143,16 +95,22 @@ jobs:
|
||||
restore-keys: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-"
|
||||
path: "./local/"
|
||||
|
||||
- name: Setup cache container uv
|
||||
- name: Get date
|
||||
id: date
|
||||
run: echo "date=$(date +'%Y%m%d')" >>$GITHUB_OUTPUT
|
||||
|
||||
- name: Setup cache container
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
key: "container-uv-${{ matrix.arch }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: "container-uv-${{ matrix.arch }}-"
|
||||
path: "/var/tmp/buildah-cache-1001/uv/"
|
||||
key: "container-${{ matrix.arch }}-${{ steps.date.outputs.date }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: |
|
||||
"container-${{ matrix.arch }}-${{ steps.date.outputs.date }}-"
|
||||
"container-${{ matrix.arch }}-"
|
||||
path: "/var/tmp/buildah-cache-*/*"
|
||||
|
||||
- if: ${{ matrix.emulation }}
|
||||
name: Setup QEMU
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
@@ -187,13 +145,13 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
- if: ${{ matrix.emulation }}
|
||||
name: Setup QEMU
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
@@ -217,12 +175,11 @@ jobs:
|
||||
- test
|
||||
|
||||
permissions:
|
||||
# Organization GHCR
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
@@ -237,8 +194,8 @@ jobs:
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: "docker.io"
|
||||
username: "${{ secrets.DOCKERHUB_USERNAME }}"
|
||||
password: "${{ secrets.DOCKERHUB_TOKEN }}"
|
||||
username: "${{ secrets.DOCKER_USER }}"
|
||||
password: "${{ secrets.DOCKER_TOKEN }}"
|
||||
|
||||
- name: Release
|
||||
env:
|
||||
|
||||
8
.github/workflows/data-update.yml
vendored
8
.github/workflows/data-update.yml
vendored
@@ -15,7 +15,7 @@ permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: "3.13"
|
||||
PYTHON_VERSION: "3.14"
|
||||
|
||||
jobs:
|
||||
data:
|
||||
@@ -40,12 +40,12 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
@@ -64,7 +64,7 @@ jobs:
|
||||
|
||||
- name: Create PR
|
||||
id: cpr
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
uses: peter-evans/create-pull-request@84ae59a2cdc2258d6fa0732dd66352dddae2a412 # v7.0.9
|
||||
with:
|
||||
author: "searxng-bot <searxng-bot@users.noreply.github.com>"
|
||||
committer: "searxng-bot <searxng-bot@users.noreply.github.com>"
|
||||
|
||||
8
.github/workflows/documentation.yml
vendored
8
.github/workflows/documentation.yml
vendored
@@ -19,7 +19,7 @@ permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: "3.13"
|
||||
PYTHON_VERSION: "3.14"
|
||||
|
||||
jobs:
|
||||
release:
|
||||
@@ -32,12 +32,12 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
fetch-depth: "0"
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
|
||||
- if: github.ref_name == 'master'
|
||||
name: Release
|
||||
uses: JamesIves/github-pages-deploy-action@6c2d9db40f9296374acc17b90404b6e8864128c8 # v4.7.3
|
||||
uses: JamesIves/github-pages-deploy-action@4a3abc783e1a24aeb44c16e869ad83caf6b4cc23 # v4.7.4
|
||||
with:
|
||||
folder: "dist/docs"
|
||||
branch: "gh-pages"
|
||||
|
||||
12
.github/workflows/integration.yml
vendored
12
.github/workflows/integration.yml
vendored
@@ -18,7 +18,7 @@ permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: "3.13"
|
||||
PYTHON_VERSION: "3.14"
|
||||
|
||||
jobs:
|
||||
test:
|
||||
@@ -35,12 +35,12 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ matrix.python-version }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
@@ -62,17 +62,17 @@ jobs:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: "./.nvmrc"
|
||||
|
||||
|
||||
12
.github/workflows/l10n.yml
vendored
12
.github/workflows/l10n.yml
vendored
@@ -22,7 +22,7 @@ permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: "3.13"
|
||||
PYTHON_VERSION: "3.14"
|
||||
|
||||
jobs:
|
||||
update:
|
||||
@@ -35,12 +35,12 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
token: "${{ secrets.WEBLATE_GITHUB_TOKEN }}"
|
||||
fetch-depth: "0"
|
||||
@@ -82,12 +82,12 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
token: "${{ secrets.WEBLATE_GITHUB_TOKEN }}"
|
||||
fetch-depth: "0"
|
||||
@@ -117,7 +117,7 @@ jobs:
|
||||
|
||||
- name: Create PR
|
||||
id: cpr
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
uses: peter-evans/create-pull-request@84ae59a2cdc2258d6fa0732dd66352dddae2a412 # v7.0.9
|
||||
with:
|
||||
author: "searxng-bot <searxng-bot@users.noreply.github.com>"
|
||||
committer: "searxng-bot <searxng-bot@users.noreply.github.com>"
|
||||
|
||||
8
.github/workflows/security.yml
vendored
8
.github/workflows/security.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
@@ -32,8 +32,8 @@ jobs:
|
||||
uses: docker/scout-action@f8c776824083494ab0d56b8105ba2ca85c86e4de # v1.18.2
|
||||
with:
|
||||
organization: "searxng"
|
||||
dockerhub-user: "${{ secrets.DOCKERHUB_USERNAME }}"
|
||||
dockerhub-password: "${{ secrets.DOCKERHUB_TOKEN }}"
|
||||
dockerhub-user: "${{ secrets.DOCKER_USER }}"
|
||||
dockerhub-password: "${{ secrets.DOCKER_TOKEN }}"
|
||||
image: "registry://ghcr.io/searxng/searxng:latest"
|
||||
command: "cves"
|
||||
sarif-file: "./scout.sarif"
|
||||
@@ -41,6 +41,6 @@ jobs:
|
||||
write-comment: "false"
|
||||
|
||||
- name: Upload SARIFs
|
||||
uses: github/codeql-action/upload-sarif@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
|
||||
uses: github/codeql-action/upload-sarif@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
||||
with:
|
||||
sarif_file: "./scout.sarif"
|
||||
|
||||
@@ -162,7 +162,7 @@ no-docstring-rgx=^_
|
||||
property-classes=abc.abstractproperty
|
||||
|
||||
# Regular expression matching correct variable names
|
||||
variable-rgx=(([a-z][a-zA-Z0-9_]{2,30})|(_[a-z0-9_]*)|([a-z]))$
|
||||
variable-rgx=([a-zA-Z0-9_]*)$
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"$schema": "https://biomejs.dev/schemas/2.2.5/schema.json",
|
||||
"$schema": "https://biomejs.dev/schemas/2.3.8/schema.json",
|
||||
"files": {
|
||||
"ignoreUnknown": true,
|
||||
"includes": ["**", "!dist", "!node_modules"]
|
||||
"includes": ["**", "!node_modules"]
|
||||
},
|
||||
"assist": {
|
||||
"enabled": true,
|
||||
@@ -15,9 +15,9 @@
|
||||
}
|
||||
},
|
||||
"formatter": {
|
||||
"enabled": true,
|
||||
"bracketSameLine": false,
|
||||
"bracketSpacing": true,
|
||||
"enabled": true,
|
||||
"formatWithErrors": false,
|
||||
"indentStyle": "space",
|
||||
"indentWidth": 2,
|
||||
@@ -35,24 +35,26 @@
|
||||
},
|
||||
"correctness": {
|
||||
"noGlobalDirnameFilename": "error",
|
||||
"noUndeclaredVariables": {
|
||||
"level": "error",
|
||||
"options": {
|
||||
"checkTypes": true
|
||||
}
|
||||
},
|
||||
"useImportExtensions": "error",
|
||||
"useJsonImportAttributes": "error",
|
||||
"useSingleJsDocAsterisk": "error"
|
||||
},
|
||||
"nursery": {
|
||||
"noContinue": "warn",
|
||||
"noDeprecatedImports": "warn",
|
||||
"noEqualsToNull": "warn",
|
||||
"noFloatingPromises": "warn",
|
||||
"noForIn": "warn",
|
||||
"noImportCycles": "warn",
|
||||
"noIncrementDecrement": "warn",
|
||||
"noMisusedPromises": "warn",
|
||||
"noMultiStr": "warn",
|
||||
"noParametersOnlyUsedInRecursion": "warn",
|
||||
"noUselessCatchBinding": "warn",
|
||||
"noUselessUndefined": "warn",
|
||||
"useExhaustiveSwitchCases": "warn",
|
||||
"useExplicitType": "warn"
|
||||
"useExplicitType": "warn",
|
||||
"useFind": "warn"
|
||||
},
|
||||
"performance": {
|
||||
"noAwaitInLoops": "error",
|
||||
@@ -65,6 +67,7 @@
|
||||
"style": {
|
||||
"noCommonJs": "error",
|
||||
"noEnum": "error",
|
||||
"noImplicitBoolean": "error",
|
||||
"noInferrableTypes": "error",
|
||||
"noNamespace": "error",
|
||||
"noNegationElse": "error",
|
||||
@@ -109,6 +112,12 @@
|
||||
"syntax": "explicit"
|
||||
}
|
||||
},
|
||||
"useConsistentTypeDefinitions": {
|
||||
"level": "error",
|
||||
"options": {
|
||||
"style": "type"
|
||||
}
|
||||
},
|
||||
"useDefaultSwitchClause": "error",
|
||||
"useExplicitLengthCheck": "error",
|
||||
"useForOf": "error",
|
||||
@@ -117,6 +126,7 @@
|
||||
"useNumericSeparators": "error",
|
||||
"useObjectSpread": "error",
|
||||
"useReadonlyClassProperties": "error",
|
||||
"useSelfClosingElements": "error",
|
||||
"useShorthandAssign": "error",
|
||||
"useSingleVarDeclarator": "error",
|
||||
"useThrowNewError": "error",
|
||||
|
||||
801
client/simple/package-lock.json
generated
801
client/simple/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -25,27 +25,27 @@
|
||||
"not dead"
|
||||
],
|
||||
"dependencies": {
|
||||
"ionicons": "~8.0.0",
|
||||
"ionicons": "~8.0.13",
|
||||
"normalize.css": "8.0.1",
|
||||
"ol": "~10.6.0",
|
||||
"ol": "~10.7.0",
|
||||
"swiped-events": "1.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "2.2.5",
|
||||
"@types/node": "~24.6.2",
|
||||
"browserslist": "~4.26.3",
|
||||
"browserslist-to-esbuild": "~2.1.0",
|
||||
"@biomejs/biome": "2.3.8",
|
||||
"@types/node": "~24.10.1",
|
||||
"browserslist": "~4.28.0",
|
||||
"browserslist-to-esbuild": "~2.1.1",
|
||||
"edge.js": "~6.3.0",
|
||||
"less": "~4.4.1",
|
||||
"less": "~4.4.2",
|
||||
"lightningcss": "~1.30.2",
|
||||
"sharp": "~0.34.4",
|
||||
"sort-package-json": "~3.4.0",
|
||||
"stylelint": "~16.24.0",
|
||||
"stylelint-config-standard-less": "~3.0.0",
|
||||
"stylelint-prettier": "~5.0.0",
|
||||
"sharp": "~0.34.5",
|
||||
"sort-package-json": "~3.5.0",
|
||||
"stylelint": "~16.26.0",
|
||||
"stylelint-config-standard-less": "~3.0.1",
|
||||
"stylelint-prettier": "~5.0.3",
|
||||
"svgo": "~4.0.0",
|
||||
"typescript": "~5.9.3",
|
||||
"vite": "npm:rolldown-vite@7.1.15",
|
||||
"vite": "npm:rolldown-vite@7.2.7",
|
||||
"vite-bundle-analyzer": "~1.2.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,11 +4,11 @@ import { Endpoints, endpoint, ready, settings } from "./toolkit.ts";
|
||||
|
||||
ready(
|
||||
() => {
|
||||
import("../main/keyboard.ts");
|
||||
import("../main/search.ts");
|
||||
void import("../main/keyboard.ts");
|
||||
void import("../main/search.ts");
|
||||
|
||||
if (settings.autocomplete) {
|
||||
import("../main/autocomplete.ts");
|
||||
void import("../main/autocomplete.ts");
|
||||
}
|
||||
},
|
||||
{ on: [endpoint === Endpoints.index] }
|
||||
@@ -16,17 +16,17 @@ ready(
|
||||
|
||||
ready(
|
||||
() => {
|
||||
import("../main/keyboard.ts");
|
||||
import("../main/mapresult.ts");
|
||||
import("../main/results.ts");
|
||||
import("../main/search.ts");
|
||||
void import("../main/keyboard.ts");
|
||||
void import("../main/mapresult.ts");
|
||||
void import("../main/results.ts");
|
||||
void import("../main/search.ts");
|
||||
|
||||
if (settings.infinite_scroll) {
|
||||
import("../main/infinite_scroll.ts");
|
||||
void import("../main/infinite_scroll.ts");
|
||||
}
|
||||
|
||||
if (settings.autocomplete) {
|
||||
import("../main/autocomplete.ts");
|
||||
void import("../main/autocomplete.ts");
|
||||
}
|
||||
},
|
||||
{ on: [endpoint === Endpoints.results] }
|
||||
@@ -34,7 +34,7 @@ ready(
|
||||
|
||||
ready(
|
||||
() => {
|
||||
import("../main/preferences.ts");
|
||||
void import("../main/preferences.ts");
|
||||
},
|
||||
{ on: [endpoint === Endpoints.preferences] }
|
||||
);
|
||||
|
||||
@@ -83,7 +83,7 @@ const observer: IntersectionObserver = new IntersectionObserver((entries: Inters
|
||||
if (paginationEntry?.isIntersecting) {
|
||||
observer.unobserve(paginationEntry.target);
|
||||
|
||||
loadNextPage(onlyImages, () => {
|
||||
void loadNextPage(onlyImages, () => {
|
||||
const nextObservedElement = document.querySelector<HTMLElement>(observedSelector);
|
||||
if (nextObservedElement) {
|
||||
observer.observe(nextObservedElement);
|
||||
|
||||
@@ -407,12 +407,31 @@ const toggleHelp = (keyBindings: typeof baseKeyBinding): void => {
|
||||
};
|
||||
|
||||
const copyURLToClipboard = async (): Promise<void> => {
|
||||
const currentUrlElement = document.querySelector<HTMLAnchorElement>(".result[data-vim-selected] h3 a");
|
||||
assertElement(currentUrlElement);
|
||||
const selectedResult = document.querySelector<HTMLElement>(".result[data-vim-selected]");
|
||||
if (!selectedResult) return;
|
||||
|
||||
const url = currentUrlElement.getAttribute("href");
|
||||
const resultAnchor = selectedResult.querySelector<HTMLAnchorElement>("a");
|
||||
assertElement(resultAnchor);
|
||||
|
||||
const url = resultAnchor.getAttribute("href");
|
||||
if (url) {
|
||||
await navigator.clipboard.writeText(url);
|
||||
if (window.isSecureContext) {
|
||||
await navigator.clipboard.writeText(url);
|
||||
} else {
|
||||
const selection = window.getSelection();
|
||||
if (selection) {
|
||||
const node = document.createElement("span");
|
||||
node.textContent = url;
|
||||
resultAnchor.appendChild(node);
|
||||
|
||||
const range = document.createRange();
|
||||
range.selectNodeContents(node);
|
||||
selection.removeAllRanges();
|
||||
selection.addRange(range);
|
||||
document.execCommand("copy");
|
||||
node.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ listen("click", ".searxng_init_map", async function (this: HTMLElement, event: E
|
||||
Feature,
|
||||
Point
|
||||
} = await import("../pkg/ol.ts");
|
||||
import("ol/ol.css");
|
||||
void import("ol/ol.css");
|
||||
|
||||
const { leafletTarget: target, mapLon, mapLat, mapGeojson } = this.dataset;
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { http, listen, settings } from "../core/toolkit.ts";
|
||||
import { assertElement, http, listen, settings } from "../core/toolkit.ts";
|
||||
|
||||
let engineDescriptions: Record<string, [string, string]> | undefined;
|
||||
|
||||
@@ -52,19 +52,25 @@ for (const engine of disableAllEngines) {
|
||||
listen("click", engine, () => toggleEngines(false, engineToggles));
|
||||
}
|
||||
|
||||
const copyHashButton: HTMLElement | null = document.querySelector<HTMLElement>("#copy-hash");
|
||||
if (copyHashButton) {
|
||||
listen("click", copyHashButton, async (event: Event) => {
|
||||
event.preventDefault();
|
||||
listen("click", "#copy-hash", async function (this: HTMLElement) {
|
||||
const target = this.parentElement?.querySelector<HTMLPreElement>("pre");
|
||||
assertElement(target);
|
||||
|
||||
const { copiedText, hash } = copyHashButton.dataset;
|
||||
if (!(copiedText && hash)) return;
|
||||
|
||||
try {
|
||||
await navigator.clipboard.writeText(hash);
|
||||
copyHashButton.innerText = copiedText;
|
||||
} catch (error) {
|
||||
console.error("Failed to copy hash:", error);
|
||||
if (window.isSecureContext) {
|
||||
await navigator.clipboard.writeText(target.innerText);
|
||||
} else {
|
||||
const selection = window.getSelection();
|
||||
if (selection) {
|
||||
const range = document.createRange();
|
||||
range.selectNodeContents(target);
|
||||
selection.removeAllRanges();
|
||||
selection.addRange(range);
|
||||
document.execCommand("copy");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const copiedText = this.dataset.copiedText;
|
||||
if (copiedText) {
|
||||
this.innerText = copiedText;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -121,7 +121,19 @@ listen("click", "#copy_url", async function (this: HTMLElement) {
|
||||
const target = this.parentElement?.querySelector<HTMLPreElement>("pre");
|
||||
assertElement(target);
|
||||
|
||||
await navigator.clipboard.writeText(target.innerText);
|
||||
if (window.isSecureContext) {
|
||||
await navigator.clipboard.writeText(target.innerText);
|
||||
} else {
|
||||
const selection = window.getSelection();
|
||||
if (selection) {
|
||||
const range = document.createRange();
|
||||
range.selectNodeContents(target);
|
||||
selection.removeAllRanges();
|
||||
selection.addRange(range);
|
||||
document.execCommand("copy");
|
||||
}
|
||||
}
|
||||
|
||||
const copiedText = this.dataset.copiedText;
|
||||
if (copiedText) {
|
||||
this.innerText = copiedText;
|
||||
|
||||
22
client/simple/src/less/result_types/file.less
Normal file
22
client/simple/src/less/result_types/file.less
Normal file
@@ -0,0 +1,22 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
/*
|
||||
Layout of the Files result class
|
||||
*/
|
||||
|
||||
#main_results .result-file {
|
||||
border: 1px solid var(--color-result-border);
|
||||
margin: 0 @results-tablet-offset 1rem @results-tablet-offset !important;
|
||||
.rounded-corners;
|
||||
|
||||
video {
|
||||
width: 100%;
|
||||
aspect-ratio: 16 / 9;
|
||||
padding: 10px 0 0 0;
|
||||
}
|
||||
|
||||
audio {
|
||||
width: 100%;
|
||||
padding: 10px 0 0 0;
|
||||
}
|
||||
}
|
||||
@@ -178,7 +178,6 @@ html.no-js #clear_search.hide_if_nojs {
|
||||
#send_search {
|
||||
display: block;
|
||||
margin: 0;
|
||||
padding: 0.8rem;
|
||||
background: none repeat scroll 0 0 var(--color-search-background);
|
||||
border: none;
|
||||
outline: none;
|
||||
@@ -196,6 +195,7 @@ html.no-js #clear_search.hide_if_nojs {
|
||||
|
||||
#send_search {
|
||||
.ltr-rounded-right-corners(0.8rem);
|
||||
padding: 0.8rem;
|
||||
|
||||
&:hover {
|
||||
cursor: pointer;
|
||||
|
||||
@@ -163,12 +163,22 @@ article[data-vim-selected].category-videos,
|
||||
article[data-vim-selected].category-news,
|
||||
article[data-vim-selected].category-map,
|
||||
article[data-vim-selected].category-music,
|
||||
article[data-vim-selected].category-files,
|
||||
article[data-vim-selected].category-social {
|
||||
border: 1px solid var(--color-result-vim-arrow);
|
||||
.rounded-corners;
|
||||
}
|
||||
|
||||
.image-label-bottom-right() {
|
||||
position: absolute;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background: var(--color-image-resolution-background);
|
||||
padding: 0.3rem 0.5rem;
|
||||
font-size: 0.9rem;
|
||||
color: var(--color-image-resolution-font);
|
||||
border-top-left-radius: 0.3rem;
|
||||
}
|
||||
|
||||
.result {
|
||||
margin: @results-margin 0;
|
||||
padding: @result-padding;
|
||||
@@ -295,12 +305,22 @@ article[data-vim-selected].category-social {
|
||||
color: var(--color-result-description-highlight-font);
|
||||
}
|
||||
|
||||
img.thumbnail {
|
||||
a.thumbnail_link {
|
||||
position: relative;
|
||||
margin-top: 0.6rem;
|
||||
.ltr-margin-right(1rem);
|
||||
.ltr-float-left();
|
||||
padding-top: 0.6rem;
|
||||
.ltr-padding-right(1rem);
|
||||
width: 7rem;
|
||||
height: unset; // remove height value that was needed for lazy loading
|
||||
|
||||
img.thumbnail {
|
||||
width: 7rem;
|
||||
height: unset; // remove height value that was needed for lazy loading
|
||||
display: block;
|
||||
}
|
||||
|
||||
.thumbnail_length {
|
||||
.image-label-bottom-right();
|
||||
right: 6px;
|
||||
}
|
||||
}
|
||||
|
||||
.break {
|
||||
@@ -366,7 +386,6 @@ article[data-vim-selected].category-social {
|
||||
.category-news,
|
||||
.category-map,
|
||||
.category-music,
|
||||
.category-files,
|
||||
.category-social {
|
||||
border: 1px solid var(--color-result-border);
|
||||
margin: 0 @results-tablet-offset 1rem @results-tablet-offset !important;
|
||||
@@ -391,23 +410,19 @@ article[data-vim-selected].category-social {
|
||||
}
|
||||
|
||||
.result-videos {
|
||||
img.thumbnail {
|
||||
.ltr-float-left();
|
||||
padding-top: 0.6rem;
|
||||
.ltr-padding-right(1rem);
|
||||
a.thumbnail_link img.thumbnail {
|
||||
width: 20rem;
|
||||
height: unset; // remove height value that was needed for lazy loading
|
||||
}
|
||||
}
|
||||
|
||||
.result-videos .content {
|
||||
overflow: hidden;
|
||||
}
|
||||
.content {
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.result-videos .embedded-video iframe {
|
||||
width: 100%;
|
||||
aspect-ratio: 16 / 9;
|
||||
padding: 10px 0 0 0;
|
||||
.embedded-video iframe {
|
||||
width: 100%;
|
||||
aspect-ratio: 16 / 9;
|
||||
padding: 10px 0 0 0;
|
||||
}
|
||||
}
|
||||
|
||||
@supports not (aspect-ratio: 1 / 1) {
|
||||
@@ -472,14 +487,7 @@ article[data-vim-selected].category-social {
|
||||
}
|
||||
|
||||
.image_resolution {
|
||||
position: absolute;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background: var(--color-image-resolution-background);
|
||||
padding: 0.3rem 0.5rem;
|
||||
font-size: 0.9rem;
|
||||
color: var(--color-image-resolution-font);
|
||||
border-top-left-radius: 0.3rem;
|
||||
.image-label-bottom-right();
|
||||
}
|
||||
|
||||
span.title,
|
||||
@@ -1158,3 +1166,4 @@ pre code {
|
||||
@import "result_types/keyvalue.less";
|
||||
@import "result_types/code.less";
|
||||
@import "result_types/paper.less";
|
||||
@import "result_types/file.less";
|
||||
|
||||
@@ -193,6 +193,15 @@ div.selectable_url {
|
||||
border-color: var(--color-warning);
|
||||
}
|
||||
|
||||
.dialog-warning-block {
|
||||
.dialog();
|
||||
|
||||
display: block;
|
||||
color: var(--color-warning);
|
||||
background: var(--color-warning-background);
|
||||
border-color: var(--color-warning);
|
||||
}
|
||||
|
||||
.dialog-modal {
|
||||
.dialog();
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
* Custom vite plugins to build the web-client components of the simple theme.
|
||||
*
|
||||
* HINT:
|
||||
* This is an inital implementation for the migration of the build process
|
||||
* This is an initial implementation for the migration of the build process
|
||||
* from grunt to vite. For fully support (vite: build & serve) more work is
|
||||
* needed.
|
||||
*/
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
contents:
|
||||
repositories:
|
||||
- https://dl-cdn.alpinelinux.org/alpine/edge/main
|
||||
- https://dl-cdn.alpinelinux.org/alpine/edge/community
|
||||
packages:
|
||||
- alpine-base
|
||||
- build-base
|
||||
- python3-dev
|
||||
- uv
|
||||
- brotli
|
||||
|
||||
entrypoint:
|
||||
command: /bin/sh -l
|
||||
|
||||
work-dir: /usr/local/searxng/
|
||||
|
||||
environment:
|
||||
PATH: /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||
SSL_CERT_DIR: /etc/ssl/certs
|
||||
SSL_CERT_FILE: /etc/ssl/certs/ca-certificates.crt
|
||||
HISTFILE: /dev/null
|
||||
|
||||
archs:
|
||||
- x86_64
|
||||
- aarch64
|
||||
- armv7
|
||||
@@ -1,62 +0,0 @@
|
||||
contents:
|
||||
repositories:
|
||||
- https://dl-cdn.alpinelinux.org/alpine/edge/main
|
||||
packages:
|
||||
- alpine-baselayout
|
||||
- ca-certificates
|
||||
- ca-certificates-bundle
|
||||
- musl-locales
|
||||
- musl-locales-lang
|
||||
- tzdata
|
||||
- busybox
|
||||
- python3
|
||||
- wget
|
||||
|
||||
entrypoint:
|
||||
command: /bin/sh -l
|
||||
|
||||
work-dir: /usr/local/searxng/
|
||||
|
||||
accounts:
|
||||
groups:
|
||||
- groupname: searxng
|
||||
gid: 977
|
||||
users:
|
||||
- username: searxng
|
||||
uid: 977
|
||||
shell: /bin/ash
|
||||
|
||||
environment:
|
||||
PATH: /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||
SSL_CERT_DIR: /etc/ssl/certs
|
||||
SSL_CERT_FILE: /etc/ssl/certs/ca-certificates.crt
|
||||
HISTFILE: /dev/null
|
||||
CONFIG_PATH: /etc/searxng
|
||||
DATA_PATH: /var/cache/searxng
|
||||
|
||||
paths:
|
||||
# Workdir
|
||||
- path: /usr/local/searxng/
|
||||
type: directory
|
||||
uid: 977
|
||||
gid: 977
|
||||
permissions: 0o555
|
||||
|
||||
# Config volume
|
||||
- path: /etc/searxng/
|
||||
type: directory
|
||||
uid: 977
|
||||
gid: 977
|
||||
permissions: 0o755
|
||||
|
||||
# Data volume
|
||||
- path: /var/cache/searxng/
|
||||
type: directory
|
||||
uid: 977
|
||||
gid: 977
|
||||
permissions: 0o755
|
||||
|
||||
archs:
|
||||
- x86_64
|
||||
- aarch64
|
||||
- armv7
|
||||
@@ -19,8 +19,7 @@ RUN --mount=type=cache,id=uv,target=/root/.cache/uv set -eux -o pipefail; \
|
||||
find ./.venv/lib/python*/site-packages/*.dist-info/ -type f -name "RECORD" -exec sort -t, -k1,1 -o {} {} \;; \
|
||||
find ./.venv/ -exec touch -h --date="@$TIMESTAMP_VENV" {} +
|
||||
|
||||
# use "--exclude=./searx/version_frozen.py" when actions/runner-images updates to Podman 5.0+
|
||||
COPY ./searx/ ./searx/
|
||||
COPY --exclude=./searx/version_frozen.py ./searx/ ./searx/
|
||||
|
||||
ARG TIMESTAMP_SETTINGS="0"
|
||||
|
||||
|
||||
@@ -4,10 +4,10 @@ ARG CONTAINER_IMAGE_NAME="searxng"
|
||||
FROM localhost/$CONTAINER_IMAGE_ORGANIZATION/$CONTAINER_IMAGE_NAME:builder AS builder
|
||||
FROM ghcr.io/searxng/base:searxng AS dist
|
||||
|
||||
COPY --chown=searxng:searxng --from=builder /usr/local/searxng/.venv/ ./.venv/
|
||||
COPY --chown=searxng:searxng --from=builder /usr/local/searxng/searx/ ./searx/
|
||||
COPY --chown=searxng:searxng ./container/ ./
|
||||
#COPY --chown=searxng:searxng ./searx/version_frozen.py ./searx/
|
||||
COPY --chown=977:977 --from=builder /usr/local/searxng/.venv/ ./.venv/
|
||||
COPY --chown=977:977 --from=builder /usr/local/searxng/searx/ ./searx/
|
||||
COPY --chown=977:977 ./container/ ./
|
||||
COPY --chown=977:977 ./searx/version_frozen.py ./searx/
|
||||
|
||||
ARG CREATED="0001-01-01T00:00:00Z"
|
||||
ARG VERSION="unknown"
|
||||
|
||||
@@ -48,7 +48,7 @@ solve the CAPTCHA from `qwant.com <https://www.qwant.com/>`__.
|
||||
|
||||
.. group-tab:: Firefox
|
||||
|
||||
.. kernel-figure:: answer-captcha/ffox-setting-proxy-socks.png
|
||||
.. kernel-figure:: /assets/answer-captcha/ffox-setting-proxy-socks.png
|
||||
:alt: FFox proxy on SOCKS5, 127.0.0.1:8080
|
||||
|
||||
Firefox's network settings
|
||||
@@ -66,4 +66,3 @@ solve the CAPTCHA from `qwant.com <https://www.qwant.com/>`__.
|
||||
|
||||
-N
|
||||
Do not execute a remote command. This is useful for just forwarding ports.
|
||||
|
||||
|
||||
@@ -100,7 +100,7 @@ Basic container instancing example:
|
||||
$ cd ./searxng/
|
||||
|
||||
# Run the container
|
||||
$ docker run --name searxng --replace -d \
|
||||
$ docker run --name searxng -d \
|
||||
-p 8888:8080 \
|
||||
-v "./config/:/etc/searxng/" \
|
||||
-v "./data/:/var/cache/searxng/" \
|
||||
|
||||
@@ -4,22 +4,5 @@
|
||||
``brand:``
|
||||
==========
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
brand:
|
||||
issue_url: https://github.com/searxng/searxng/issues
|
||||
docs_url: https://docs.searxng.org
|
||||
public_instances: https://searx.space
|
||||
wiki_url: https://github.com/searxng/searxng/wiki
|
||||
|
||||
``issue_url`` :
|
||||
If you host your own issue tracker change this URL.
|
||||
|
||||
``docs_url`` :
|
||||
If you host your own documentation change this URL.
|
||||
|
||||
``public_instances`` :
|
||||
If you host your own https://searx.space change this URL.
|
||||
|
||||
``wiki_url`` :
|
||||
Link to your wiki (or ``false``)
|
||||
.. autoclass:: searx.brand.SettingsBrand
|
||||
:members:
|
||||
|
||||
|
Before Width: | Height: | Size: 59 KiB After Width: | Height: | Size: 59 KiB |
1
docs/assets/sponsors/browserstack.svg
Normal file
1
docs/assets/sponsors/browserstack.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 7.2 KiB |
1
docs/assets/sponsors/docker.svg
Normal file
1
docs/assets/sponsors/docker.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 5.9 KiB |
1
docs/assets/sponsors/tuta.svg
Normal file
1
docs/assets/sponsors/tuta.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 1024 384"><path fill="#410002" d="M479.178 119.294c-.533-.016-.998.357-1.218 1.078l-24.438 78.364.005-.004c-8.59 27.537 4.516 46.485 34.268 46.485 4.336 0 10.006-.357 11.776-.797.885-.264 1.33-.71 1.594-1.506l5.134-17.177c.264-.973-.09-1.77-1.507-1.683-4.517.445-8.588.797-12.308.797-14.964 0-21.075-7.968-16.646-22.224l10.446-33.47h30.373c.797 0 1.506-.445 1.858-1.418l5.401-17.355c.264-.973-.264-1.681-1.417-1.681H492.66l3.895-12.662c.265-.885.089-1.418-.62-2.034l-15.761-14.255c-.332-.3-.676-.45-.996-.459zm173.64 0c-.532-.016-.996.357-1.218 1.078l-24.436 78.364.005-.004c-8.59 27.537 4.517 46.485 34.268 46.485 4.342 0 10.004-.357 11.778-.797.884-.264 1.324-.71 1.593-1.506l5.133-17.177c.26-.973 0-1.77-1.508-1.683-4.517.445-8.59.797-12.307.797-14.966 0-21.077-7.968-16.646-22.224l10.445-33.47H690.3c.795 0 1.504-.445 1.854-1.418l5.402-17.355c.265-.973-.26-1.681-1.414-1.681H666.3l3.896-12.662c.265-.885.087-1.418-.618-2.034l-15.765-14.255c-.332-.3-.676-.45-.996-.459zm-48.32 29.404c-.974 0-1.503.444-1.862 1.417L590.502 188.9c-7.525 23.998-19.478 37.721-31.965 37.721-12.487 0-17.797-9.83-13.105-24.883l16.028-51.178c.351-1.149-.088-1.857-1.328-1.857H539.94c-.97 0-1.505.444-1.86 1.417l-15.497 49.233.008-.005c-8.765 27.982 5.315 46.31 27.452 46.31 12.747 0 22.756-6.111 29.93-16.118l-.176 12.838c0 1.241.621 1.593 1.681 1.593h14.17c1.064 0 1.504-.445 1.859-1.418l28.512-91.997c.35-1.15-.09-1.858-1.33-1.858zm147.96.005c-43.653 0-60.654 37.719-60.654 62.157-.09 21.339 13.282 34.798 31.08 34.798v.004c11.868 0 21.693-5.314 29.133-16.117v12.836c0 1.061.62 1.596 1.594 1.596h14.166c.974 0 1.505-.446 1.86-1.42l28.777-92.086c.265-.973-.266-1.768-1.24-1.768zm-.616 20.54h17.265l-6.197 19.57c-7.35 23.289-18.684 37.896-32.585 37.896-10.094 0-15.585-6.907-15.585-18.15 0-17.976 13.722-39.315 37.102-39.315z"/><path fill="#850122" d="M226.561 106.964c-.558.007-1.043.428-1.043 1.095V251.59c0 1.594 2.04 1.594 2.48 0L261.38 143.3c.445-1.241.446-2.039-.62-3.1l-33.204-32.762c-.299-.332-.66-.478-.996-.474zm55.983 41.739c-1.241 0-1.594.444-2.039 1.417l-43.919 142.203c-.176.797.177 1.594 1.242 1.594h145.747c1.418 0 2.04-.62 2.48-1.858l44.098-141.499c.445-1.417-.18-1.857-1.417-1.857zm-40.022-58.62c-1.418 0-1.594 1.242-.797 2.04l35.065 35.24c.796.798 1.594 1.061 2.836 1.061h149.467c1.065 0 1.68-1.24.62-2.214l-34.63-34.885c-.796-.796-1.592-1.242-3.274-1.242z"/></svg>
|
||||
|
After Width: | Height: | Size: 2.4 KiB |
@@ -120,6 +120,7 @@ ${fedora_build}
|
||||
pip install -U setuptools
|
||||
pip install -U wheel
|
||||
pip install -U pyyaml
|
||||
pip install -U msgspec
|
||||
|
||||
# jump to SearXNG's working tree and install SearXNG into virtualenv
|
||||
(${SERVICE_USER})$ cd \"$SEARXNG_SRC\"
|
||||
|
||||
8
docs/dev/engines/online/azure.rst
Normal file
8
docs/dev/engines/online/azure.rst
Normal file
@@ -0,0 +1,8 @@
|
||||
.. _azure engine:
|
||||
|
||||
===============
|
||||
Azure Resources
|
||||
===============
|
||||
|
||||
.. automodule:: searx.engines.azure
|
||||
:members:
|
||||
@@ -1,8 +0,0 @@
|
||||
.. _voidlinux mullvad_leta:
|
||||
|
||||
============
|
||||
Mullvad-Leta
|
||||
============
|
||||
|
||||
.. automodule:: searx.engines.mullvad_leta
|
||||
:members:
|
||||
8
docs/dev/engines/online/sourcehut.rst
Normal file
8
docs/dev/engines/online/sourcehut.rst
Normal file
@@ -0,0 +1,8 @@
|
||||
.. _sourcehut engine:
|
||||
|
||||
=========
|
||||
Sourcehut
|
||||
=========
|
||||
|
||||
.. automodule:: searx.engines.sourcehut
|
||||
:members:
|
||||
7
docs/dev/result_types/main/file.rst
Normal file
7
docs/dev/result_types/main/file.rst
Normal file
@@ -0,0 +1,7 @@
|
||||
.. _result_types.file:
|
||||
|
||||
============
|
||||
File Results
|
||||
============
|
||||
|
||||
.. automodule:: searx.result_types.file
|
||||
@@ -17,6 +17,7 @@ following types have been implemented so far ..
|
||||
main/keyvalue
|
||||
main/code
|
||||
main/paper
|
||||
main/file
|
||||
|
||||
The :ref:`LegacyResult <LegacyResult>` is used internally for the results that
|
||||
have not yet been typed. The templates can be used as orientation until the
|
||||
@@ -28,5 +29,4 @@ final typing is complete.
|
||||
- :ref:`template torrent`
|
||||
- :ref:`template map`
|
||||
- :ref:`template packages`
|
||||
- :ref:`template files`
|
||||
- :ref:`template products`
|
||||
|
||||
@@ -60,7 +60,7 @@ Fields used in the template :origin:`macro result_sub_header
|
||||
publishedDate : :py:obj:`datetime.datetime`
|
||||
The date on which the object was published.
|
||||
|
||||
length: :py:obj:`time.struct_time`
|
||||
length: :py:obj:`datetime.timedelta`
|
||||
Playing duration in seconds.
|
||||
|
||||
views: :py:class:`str`
|
||||
@@ -469,38 +469,6 @@ links : :py:class:`dict`
|
||||
Additional links in the form of ``{'link_name': 'http://example.com'}``
|
||||
|
||||
|
||||
.. _template files:
|
||||
|
||||
``files.html``
|
||||
--------------
|
||||
|
||||
Displays result fields from:
|
||||
|
||||
- :ref:`macro result_header` and
|
||||
- :ref:`macro result_sub_header`
|
||||
|
||||
Additional fields used in the :origin:`code.html
|
||||
<searx/templates/simple/result_templates/files.html>`:
|
||||
|
||||
filename, size, time: :py:class:`str`
|
||||
Filename, Filesize and Date of the file.
|
||||
|
||||
mtype : ``audio`` | ``video`` | :py:class:`str`
|
||||
Mimetype type of the file.
|
||||
|
||||
subtype : :py:class:`str`
|
||||
Mimetype / subtype of the file.
|
||||
|
||||
abstract : :py:class:`str`
|
||||
Abstract of the file.
|
||||
|
||||
author : :py:class:`str`
|
||||
Name of the author of the file
|
||||
|
||||
embedded : :py:class:`str`
|
||||
URL of an embedded media type (``audio`` or ``video``) / is collapsible.
|
||||
|
||||
|
||||
.. _template products:
|
||||
|
||||
``products.html``
|
||||
|
||||
@@ -56,4 +56,34 @@ If you don't trust anyone, you can set up your own, see :ref:`installation`.
|
||||
utils/index
|
||||
src/index
|
||||
|
||||
|
||||
----------------
|
||||
Acknowledgements
|
||||
----------------
|
||||
|
||||
The following organizations have provided SearXNG access to their paid plans at
|
||||
no cost:
|
||||
|
||||
.. flat-table::
|
||||
:widths: 1 1
|
||||
|
||||
* - .. image:: /assets/sponsors/docker.svg
|
||||
:target: https://docker.com
|
||||
:alt: Docker
|
||||
:align: center
|
||||
:height: 100 px
|
||||
|
||||
- .. image:: /assets/sponsors/tuta.svg
|
||||
:target: https://tuta.com
|
||||
:alt: Tuta
|
||||
:align: center
|
||||
:height: 100 px
|
||||
|
||||
* - .. image:: /assets/sponsors/browserstack.svg
|
||||
:target: https://browserstack.com
|
||||
:alt: BrowserStack
|
||||
:align: center
|
||||
:height: 100 px
|
||||
|
||||
|
||||
.. _searx.space: https://searx.space
|
||||
|
||||
2
manage
2
manage
@@ -117,7 +117,7 @@ EOF
|
||||
|
||||
dev.env() {
|
||||
go.env.dev
|
||||
nvm.env
|
||||
nvm.ensure
|
||||
node.env.dev
|
||||
|
||||
export GOENV
|
||||
|
||||
@@ -2,9 +2,9 @@ mock==5.2.0
|
||||
nose2[coverage_plugin]==0.15.1
|
||||
cov-core==1.15.0
|
||||
black==25.9.0
|
||||
pylint==3.3.9
|
||||
pylint==4.0.3
|
||||
splinter==0.21.0
|
||||
selenium==4.36.0
|
||||
selenium==4.38.0
|
||||
Pallets-Sphinx-Themes==2.3.0
|
||||
Sphinx==8.2.3 ; python_version >= '3.11'
|
||||
Sphinx==8.1.3 ; python_version < '3.11'
|
||||
@@ -23,6 +23,6 @@ wlc==1.16.1
|
||||
coloredlogs==15.0.1
|
||||
docutils>=0.21.2
|
||||
parameterized==0.9.0
|
||||
granian[reload]==2.5.5
|
||||
basedpyright==1.31.6
|
||||
types-lxml==2025.8.25
|
||||
granian[reload]==2.6.0
|
||||
basedpyright==1.34.0
|
||||
types-lxml==2025.11.25
|
||||
|
||||
@@ -1 +1,2 @@
|
||||
granian==2.5.5
|
||||
granian==2.6.0
|
||||
granian[pname]==2.6.0
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
certifi==2025.10.5
|
||||
certifi==2025.11.12
|
||||
babel==2.17.0
|
||||
flask-babel==4.0.0
|
||||
flask==3.1.2
|
||||
@@ -9,14 +9,13 @@ python-dateutil==2.9.0.post0
|
||||
pyyaml==6.0.3
|
||||
httpx[http2]==0.28.1
|
||||
httpx-socks[asyncio]==0.10.0
|
||||
Brotli==1.1.0
|
||||
setproctitle==1.3.7
|
||||
sniffio==1.3.1
|
||||
valkey==6.1.1
|
||||
markdown-it-py==3.0.0
|
||||
fasttext-predict==0.9.2.4
|
||||
tomli==2.3.0; python_version < '3.11'
|
||||
msgspec==0.19.0
|
||||
typer-slim==0.19.2
|
||||
msgspec==0.20.0
|
||||
typer-slim==0.20.0
|
||||
isodate==0.7.2
|
||||
whitenoise==6.11.0
|
||||
typing-extensions==4.14.1
|
||||
typing-extensions==4.15.0
|
||||
|
||||
@@ -9,7 +9,7 @@ from os.path import dirname, abspath
|
||||
|
||||
import logging
|
||||
|
||||
import searx.unixthreadname # pylint: disable=unused-import
|
||||
import msgspec
|
||||
|
||||
# Debug
|
||||
LOG_FORMAT_DEBUG: str = '%(levelname)-7s %(name)-30.30s: %(message)s'
|
||||
@@ -76,20 +76,22 @@ def get_setting(name: str, default: t.Any = _unset) -> t.Any:
|
||||
settings and the ``default`` is unset, a :py:obj:`KeyError` is raised.
|
||||
|
||||
"""
|
||||
value: dict[str, t.Any] = settings
|
||||
value = settings
|
||||
for a in name.split('.'):
|
||||
if isinstance(value, dict):
|
||||
value = value.get(a, _unset)
|
||||
if isinstance(value, msgspec.Struct):
|
||||
value = getattr(value, a, _unset)
|
||||
elif isinstance(value, dict):
|
||||
value = value.get(a, _unset) # pyright: ignore
|
||||
else:
|
||||
value = _unset # type: ignore
|
||||
value = _unset
|
||||
|
||||
if value is _unset:
|
||||
if default is _unset:
|
||||
raise KeyError(name)
|
||||
value = default # type: ignore
|
||||
value = default
|
||||
break
|
||||
|
||||
return value
|
||||
return value # pyright: ignore
|
||||
|
||||
|
||||
def _is_color_terminal():
|
||||
|
||||
68
searx/brand.py
Normal file
68
searx/brand.py
Normal file
@@ -0,0 +1,68 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Implementations needed for a branding of SearXNG."""
|
||||
# pylint: disable=too-few-public-methods
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
__all__ = ["SettingsBrand"]
|
||||
|
||||
import msgspec
|
||||
|
||||
|
||||
class BrandCustom(msgspec.Struct, kw_only=True, forbid_unknown_fields=True):
|
||||
"""Custom settings in the brand section."""
|
||||
|
||||
links: dict[str, str] = {}
|
||||
"""Custom entries in the footer of the WEB page: ``[title]: [link]``"""
|
||||
|
||||
|
||||
class SettingsBrand(msgspec.Struct, kw_only=True, forbid_unknown_fields=True):
|
||||
"""Options for configuring brand properties.
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
brand:
|
||||
issue_url: https://github.com/searxng/searxng/issues
|
||||
docs_url: https://docs.searxng.org
|
||||
public_instances: https://searx.space
|
||||
wiki_url: https://github.com/searxng/searxng/wiki
|
||||
|
||||
custom:
|
||||
links:
|
||||
Uptime: https://uptime.searxng.org/history/example-org
|
||||
About: https://example.org/user/about.html
|
||||
"""
|
||||
|
||||
issue_url: str = "https://github.com/searxng/searxng/issues"
|
||||
"""If you host your own issue tracker change this URL."""
|
||||
|
||||
docs_url: str = "https://docs.searxng.org"
|
||||
"""If you host your own documentation change this URL."""
|
||||
|
||||
public_instances: str = "https://searx.space"
|
||||
"""If you host your own https://searx.space change this URL."""
|
||||
|
||||
wiki_url: str = "https://github.com/searxng/searxng/wiki"
|
||||
"""Link to your wiki (or ``false``)"""
|
||||
|
||||
custom: BrandCustom = msgspec.field(default_factory=BrandCustom)
|
||||
"""Optional customizing.
|
||||
|
||||
.. autoclass:: searx.brand.BrandCustom
|
||||
:members:
|
||||
"""
|
||||
|
||||
# new_issue_url is a hackish solution tailored for only one hoster (GH). As
|
||||
# long as we don't have a more general solution, we should support it in the
|
||||
# given function, but it should not be expanded further.
|
||||
|
||||
new_issue_url: str = "https://github.com/searxng/searxng/issues/new"
|
||||
"""If you host your own issue tracker not on GitHub, then unset this URL.
|
||||
|
||||
Note: This URL will create a pre-filled GitHub bug report form for an
|
||||
engine. Since this feature is implemented only for GH (and limited to
|
||||
engines), it will probably be replaced by another solution in the near
|
||||
future.
|
||||
"""
|
||||
@@ -5,10 +5,6 @@
|
||||
----
|
||||
"""
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
__all__ = ["ExpireCacheCfg", "ExpireCacheStats", "ExpireCache", "ExpireCacheSQLite"]
|
||||
|
||||
import abc
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -321,6 +321,7 @@
|
||||
"ja": "アルゼンチン・ペソ",
|
||||
"ko": "아르헨티나 페소",
|
||||
"lt": "Argentinos pesas",
|
||||
"lv": "Argentīnas peso",
|
||||
"ms": "Peso Argentina",
|
||||
"nl": "Argentijnse peso",
|
||||
"oc": "Peso",
|
||||
@@ -803,6 +804,7 @@
|
||||
"ja": "ボリビアーノ",
|
||||
"ko": "볼리비아 볼리비아노",
|
||||
"lt": "Bolivianas",
|
||||
"lv": "Bolīvijas boliviano",
|
||||
"ms": "Boliviano",
|
||||
"nl": "Boliviaanse boliviano",
|
||||
"oc": "Boliviano",
|
||||
@@ -848,6 +850,7 @@
|
||||
"ja": "レアル",
|
||||
"ko": "브라질 헤알",
|
||||
"lt": "Brazilijos realas",
|
||||
"lv": "Brazīlijas reāls",
|
||||
"ms": "Real Brazil",
|
||||
"nl": "Braziliaanse real",
|
||||
"oc": "Real",
|
||||
@@ -932,6 +935,7 @@
|
||||
"ja": "ニュルタム",
|
||||
"ko": "부탄 눌트럼",
|
||||
"lt": "Ngultrumas",
|
||||
"lv": "ngultrums",
|
||||
"ml": "ങൾട്രം",
|
||||
"ms": "Ngultrum Bhutan",
|
||||
"nl": "Bhutaanse ngultrum",
|
||||
@@ -1327,15 +1331,15 @@
|
||||
"cs": "Kolumbijské peso",
|
||||
"da": "Colombiansk peso",
|
||||
"de": "kolumbianischer Peso",
|
||||
"en": "Colombian peso",
|
||||
"en": "peso",
|
||||
"eo": "kolombia peso",
|
||||
"es": "peso colombiano",
|
||||
"es": "peso",
|
||||
"et": "Colombia peeso",
|
||||
"eu": "Peso kolonbiar",
|
||||
"fi": "Kolumbian peso",
|
||||
"fr": "peso colombien",
|
||||
"ga": "peso na Colóime",
|
||||
"gl": "Peso colombiano",
|
||||
"gl": "peso colombiano",
|
||||
"he": "פסו קולומביאני",
|
||||
"hr": "Kolumbijski pezo",
|
||||
"hu": "kolumbiai peso",
|
||||
@@ -1411,9 +1415,9 @@
|
||||
"cy": "peso (Ciwba)",
|
||||
"da": "Cubanske pesos",
|
||||
"de": "kubanischer Peso",
|
||||
"en": "Cuban peso",
|
||||
"en": "peso",
|
||||
"eo": "kuba peso",
|
||||
"es": "peso cubano",
|
||||
"es": "peso",
|
||||
"fi": "Kuuban peso",
|
||||
"fr": "peso cubain",
|
||||
"ga": "peso Chúba",
|
||||
@@ -1465,6 +1469,7 @@
|
||||
"ja": "カーボベルデ・エスクード",
|
||||
"ko": "카보베르데 이스쿠두",
|
||||
"lt": "Žaliojo Kyšulio eskudas",
|
||||
"lv": "Kaboverdes eskudo",
|
||||
"nl": "Kaapverdische escudo",
|
||||
"oc": "Escut de Cap Verd",
|
||||
"pl": "escudo Zielonego Przylądka",
|
||||
@@ -1565,7 +1570,7 @@
|
||||
"ar": "كرونة دنماركية",
|
||||
"bg": "Датска крона",
|
||||
"ca": "corona danesa",
|
||||
"cs": "Dánská koruna",
|
||||
"cs": "dánská koruna",
|
||||
"cy": "Krone Danaidd",
|
||||
"da": "dansk krone",
|
||||
"de": "dänische Krone",
|
||||
@@ -1715,7 +1720,7 @@
|
||||
"nl": "Egyptisch pond",
|
||||
"oc": "Liura egipciana",
|
||||
"pa": "ਮਿਸਰੀ ਪਾਊਂਡ",
|
||||
"pl": "Funt egipski",
|
||||
"pl": "funt egipski",
|
||||
"pt": "libra egípcia",
|
||||
"ro": "Liră egipteană",
|
||||
"ru": "египетский фунт",
|
||||
@@ -1772,7 +1777,7 @@
|
||||
"de": "Äthiopischer Birr",
|
||||
"en": "bir",
|
||||
"eo": "etiopa birro",
|
||||
"es": "Birr etíope",
|
||||
"es": "bir etíope",
|
||||
"fi": "Etiopian birr",
|
||||
"fr": "Birr",
|
||||
"ga": "birr",
|
||||
@@ -2035,6 +2040,7 @@
|
||||
"ja": "セディ",
|
||||
"ko": "가나 세디",
|
||||
"lt": "Sedis",
|
||||
"lv": "Ganas sedi",
|
||||
"ms": "Cedi Ghana",
|
||||
"nl": "Ghanese cedi",
|
||||
"oc": "Cedi",
|
||||
@@ -2149,6 +2155,7 @@
|
||||
"ja": "ギニア・フラン",
|
||||
"ko": "기니 프랑",
|
||||
"lt": "Gvinėjos frankas",
|
||||
"lv": "Gvinejas franks",
|
||||
"ms": "Franc Guinea",
|
||||
"nl": "Guineese frank",
|
||||
"oc": "Franc guinean",
|
||||
@@ -2859,6 +2866,7 @@
|
||||
"sl": "kirgiški som",
|
||||
"sr": "киргиски сом",
|
||||
"sv": "Kirgizistansk som",
|
||||
"szl": "Sōm (waluta)",
|
||||
"tr": "Kırgızistan somu",
|
||||
"tt": "кыргыз сумы",
|
||||
"uk": "сом"
|
||||
@@ -2964,6 +2972,7 @@
|
||||
"ms": "Won Korea Utara",
|
||||
"nl": "Noord-Koreaanse won",
|
||||
"pa": "ਉੱਤਰੀ ਕੋਰੀਆਈ ਵੌਨ",
|
||||
"pap": "won nortkoreano",
|
||||
"pl": "Won północnokoreański",
|
||||
"pt": "won norte-coreano",
|
||||
"ro": "Won nord-coreean",
|
||||
@@ -3792,9 +3801,9 @@
|
||||
"cs": "Mexické peso",
|
||||
"cy": "peso (Mecsico)",
|
||||
"de": "Mexikanischer Peso",
|
||||
"en": "Mexican peso",
|
||||
"en": "peso",
|
||||
"eo": "meksika peso",
|
||||
"es": "peso mexicano",
|
||||
"es": "peso",
|
||||
"et": "Mehhiko peeso",
|
||||
"eu": "Mexikar peso",
|
||||
"fi": "Meksikon peso",
|
||||
@@ -3810,6 +3819,7 @@
|
||||
"ja": "メキシコ・ペソ",
|
||||
"ko": "멕시코 페소",
|
||||
"lt": "Meksikos pesas",
|
||||
"lv": "Meksikas peso",
|
||||
"ms": "Peso Mexico",
|
||||
"nl": "Mexicaanse peso",
|
||||
"pa": "ਮੈਕਸੀਕੀ ਪੇਸੋ",
|
||||
@@ -3825,7 +3835,7 @@
|
||||
"tr": "Meksika pesosu",
|
||||
"tt": "Миксикә писысы",
|
||||
"uk": "мексиканський песо",
|
||||
"vi": "Peso Mexico"
|
||||
"vi": "Peso México"
|
||||
},
|
||||
"MXV": {
|
||||
"de": "UNIDAD DE INVERSION",
|
||||
@@ -3879,7 +3889,7 @@
|
||||
"MZN": {
|
||||
"ar": "مثقال موزنبيقي",
|
||||
"ca": "metical",
|
||||
"cs": "Mosambický metical",
|
||||
"cs": "mosambický metical",
|
||||
"cy": "Metical Mosambic",
|
||||
"da": "Metical",
|
||||
"de": "Metical",
|
||||
@@ -3972,6 +3982,7 @@
|
||||
"ja": "ナイラ",
|
||||
"ko": "나이지리아 나이라",
|
||||
"lt": "Naira",
|
||||
"lv": "Nigērijas naira",
|
||||
"ms": "Naira Nigeria",
|
||||
"nl": "Nigeriaanse naira",
|
||||
"oc": "Naira",
|
||||
@@ -4028,7 +4039,7 @@
|
||||
"ar": "كرونة نروجية",
|
||||
"bg": "норвежка крона",
|
||||
"ca": "corona noruega",
|
||||
"cs": "Norská koruna",
|
||||
"cs": "norská koruna",
|
||||
"cy": "krone Norwy",
|
||||
"da": "norsk krone",
|
||||
"de": "norwegische Krone",
|
||||
@@ -4208,7 +4219,7 @@
|
||||
"fi": "Panaman balboa",
|
||||
"fr": "Balboa",
|
||||
"ga": "balboa Phanama",
|
||||
"gl": "Balboa",
|
||||
"gl": "balboa",
|
||||
"he": "בלבואה",
|
||||
"hr": "Panamska balboa",
|
||||
"hu": "panamai balboa",
|
||||
@@ -4255,6 +4266,7 @@
|
||||
"ja": "ヌエボ・ソル",
|
||||
"ko": "페루 솔",
|
||||
"lt": "Naujasis solis",
|
||||
"lv": "Peru sols",
|
||||
"ms": "Nuevo Sol Peru",
|
||||
"nl": "Peruviaanse sol",
|
||||
"oc": "Nuevo Sol",
|
||||
@@ -4269,7 +4281,7 @@
|
||||
"tr": "Nuevo Sol",
|
||||
"tt": "Перу яңа соле",
|
||||
"uk": "Новий соль",
|
||||
"vi": "Sol Peru"
|
||||
"vi": "Sol Perú"
|
||||
},
|
||||
"PGK": {
|
||||
"ar": "كينا بابوا غينيا الجديدة",
|
||||
@@ -4779,7 +4791,7 @@
|
||||
"en": "Solomon Islands dollar",
|
||||
"eo": "salomona dolaro",
|
||||
"es": "dólar de las Islas Salomón",
|
||||
"fi": "Salomonsaarten dollari",
|
||||
"fi": "Salomoninsaarten dollari",
|
||||
"fr": "dollar des îles Salomon",
|
||||
"ga": "dollar Oileáin Sholaimh",
|
||||
"gl": "Dólar das Illas Salomón",
|
||||
@@ -4926,7 +4938,7 @@
|
||||
"ar": "دولار سنغافوري",
|
||||
"bg": "Сингапурски долар",
|
||||
"bn": "সিঙ্গাপুর ডলার",
|
||||
"ca": "dòlar de Singapur",
|
||||
"ca": "dòlar singapurès",
|
||||
"cs": "Singapurský dolar",
|
||||
"da": "singaporeansk dollar",
|
||||
"de": "Singapur-Dollar",
|
||||
@@ -5015,6 +5027,7 @@
|
||||
"ja": "レオン",
|
||||
"ko": "시에라리온 레온",
|
||||
"lt": "leonė",
|
||||
"lv": "Sjerraleones leone",
|
||||
"ms": "leone",
|
||||
"nl": "Sierra Leoonse leone",
|
||||
"oc": "leone",
|
||||
@@ -5052,6 +5065,7 @@
|
||||
"ja": "ソマリア・シリング",
|
||||
"ko": "소말리아 실링",
|
||||
"lt": "Somalio šilingas",
|
||||
"lv": "Somālijas šiliņš",
|
||||
"ms": "Shilling Somalia",
|
||||
"nl": "Somalische shilling",
|
||||
"pl": "Szyling somalijski",
|
||||
@@ -5497,7 +5511,7 @@
|
||||
"TTD": {
|
||||
"ar": "دولار ترينيداد وتوباغو",
|
||||
"bg": "Тринидадски и тобагски долар",
|
||||
"ca": "dòlar de Trinitat i Tobago",
|
||||
"ca": "dòlar de Trinidad i Tobago",
|
||||
"cs": "Dolar Trinidadu a Tobaga",
|
||||
"cy": "doler Trinidad a Thobago",
|
||||
"de": "Trinidad-und-Tobago-Dollar",
|
||||
@@ -5534,7 +5548,7 @@
|
||||
"af": "Nuwe Taiwannese dollar",
|
||||
"ar": "دولار تايواني جديد",
|
||||
"bg": "Нов тайвански долар",
|
||||
"ca": "nou dòlar de Taiwan",
|
||||
"ca": "Nou dòlar taiwanès",
|
||||
"cs": "Tchajwanský dolar",
|
||||
"cy": "Doler Newydd Taiwan",
|
||||
"da": "taiwan dollar",
|
||||
@@ -5715,7 +5729,7 @@
|
||||
"lv": "ASV dolārs",
|
||||
"ml": "യുണൈറ്റഡ് സ്റ്റേറ്റ്സ് ഡോളർ",
|
||||
"ms": "Dolar Amerika Syarikat",
|
||||
"nl": "US dollar",
|
||||
"nl": "Amerikaanse dollar",
|
||||
"oc": "dolar american",
|
||||
"pa": "ਸੰਯੁਕਤ ਰਾਜ ਡਾਲਰ",
|
||||
"pap": "Dollar merikano",
|
||||
@@ -5808,7 +5822,9 @@
|
||||
"lt": "Uzbekijos sumas",
|
||||
"lv": "Uzbekistānas soms",
|
||||
"nl": "Oezbeekse sum",
|
||||
"oc": "som ozbèc",
|
||||
"pa": "ਉਜ਼ਬੇਕਿਸਤਾਨੀ ਸੋਮ",
|
||||
"pap": "som usbekistani",
|
||||
"pl": "Sum",
|
||||
"pt": "som usbeque",
|
||||
"ro": "Som uzbec",
|
||||
@@ -5834,6 +5850,7 @@
|
||||
"en": "sovereign bolivar",
|
||||
"es": "bolívar soberano",
|
||||
"fr": "bolivar souverain",
|
||||
"gl": "bolívar soberano",
|
||||
"hu": "venezuelai bolívar",
|
||||
"ja": "ボリバル・ソベラノ",
|
||||
"pt": "Bolívar soberano",
|
||||
@@ -5948,6 +5965,7 @@
|
||||
"sk": "Tala",
|
||||
"sr": "самоанска тала",
|
||||
"sv": "Samoansk Tala",
|
||||
"tr": "Samoa talası",
|
||||
"tt": "самоа таласы",
|
||||
"uk": "Самоанська тала"
|
||||
},
|
||||
@@ -6095,12 +6113,14 @@
|
||||
"hu": "karibi forint",
|
||||
"it": "fiorino caraibico",
|
||||
"ja": "カリブ・ギルダー",
|
||||
"ko": "카리브 휠던",
|
||||
"nl": "Caribische gulden",
|
||||
"pap": "Florin karibense",
|
||||
"pl": "Gulden karaibski",
|
||||
"pt": "Florim do Caribe",
|
||||
"ro": "Gulden caraibian",
|
||||
"ru": "Карибский гульден",
|
||||
"sk": "Karibský gulden",
|
||||
"sl": "karibski goldinar"
|
||||
},
|
||||
"XDR": {
|
||||
@@ -6571,10 +6591,13 @@
|
||||
"R": "ZAR",
|
||||
"R$": "BRL",
|
||||
"RD$": "DOP",
|
||||
"RF": "RWF",
|
||||
"RM": "MYR",
|
||||
"RWF": "RWF",
|
||||
"Rf": "MVR",
|
||||
"Rp": "IDR",
|
||||
"Rs": "LKR",
|
||||
"R₣": "RWF",
|
||||
"S$": "SGD",
|
||||
"S/.": "PEN",
|
||||
"SI$": "SBD",
|
||||
@@ -6594,6 +6617,7 @@
|
||||
"Ush": "UGX",
|
||||
"VT": "VUV",
|
||||
"WS$": "WST",
|
||||
"XAF": "XAF",
|
||||
"XCG": "XCG",
|
||||
"XDR": "XDR",
|
||||
"Z$": "ZWL",
|
||||
@@ -6719,6 +6743,7 @@
|
||||
"argentinské peso": "ARS",
|
||||
"argentinski peso": "ARS",
|
||||
"argentinski pezo": "ARS",
|
||||
"argentīnas peso": "ARS",
|
||||
"ariari": "MGA",
|
||||
"ariari de madagascar": "MGA",
|
||||
"ariari de madagáscar": "MGA",
|
||||
@@ -7038,6 +7063,7 @@
|
||||
"bolívar soberano": "VES",
|
||||
"bolívar sobirà": "VES",
|
||||
"bolíviai boliviano": "BOB",
|
||||
"bolīvijas boliviano": "BOB",
|
||||
"bosenská konvertibilní marka": "BAM",
|
||||
"bosna hersek değiştirilebilir markı": "BAM",
|
||||
"bosnia and herzegovina convertible mark": "BAM",
|
||||
@@ -7074,6 +7100,7 @@
|
||||
"brazilski real": "BRL",
|
||||
"brazilský real": "BRL",
|
||||
"brazílsky real": "BRL",
|
||||
"brazīlijas reāls": "BRL",
|
||||
"brezilya reali": "BRL",
|
||||
"brit font": "GBP",
|
||||
"brita pundo": "GBP",
|
||||
@@ -7147,6 +7174,7 @@
|
||||
"burundžio frankas": "BIF",
|
||||
"butana ngultrumo": "BTN",
|
||||
"butanski ngultrum": "BTN",
|
||||
"butānas ngultrums": "BTN",
|
||||
"butut": "GMD",
|
||||
"bututs": "GMD",
|
||||
"bwp": "BWP",
|
||||
@@ -7818,6 +7846,7 @@
|
||||
"dirrã marroquino": "MAD",
|
||||
"dírham de los emiratos árabes unidos": "AED",
|
||||
"dírham dels emirats àrabs units": "AED",
|
||||
"dírham emiratià": "AED",
|
||||
"dírham marroquí": "MAD",
|
||||
"djf": "DJF",
|
||||
"djiboeti frank": "DJF",
|
||||
@@ -8232,9 +8261,7 @@
|
||||
"dòlar de singapur": "SGD",
|
||||
"dòlar de surinam": "SRD",
|
||||
"dòlar de taiwan": "TWD",
|
||||
"dòlar de trinitat": "TTD",
|
||||
"dòlar de trinitat i tobago": "TTD",
|
||||
"dòlar de trinitat tobago": "TTD",
|
||||
"dòlar de trinidad i tobago": "TTD",
|
||||
"dòlar de zimbàbue": "ZWL",
|
||||
"dòlar del canadà": "CAD",
|
||||
"dòlar del carib oriental": "XCD",
|
||||
@@ -8250,6 +8277,7 @@
|
||||
"dòlar namibià": "NAD",
|
||||
"dòlar neozelandès": "NZD",
|
||||
"dòlar salomonès": "SBD",
|
||||
"dòlar singapurès": "SGD",
|
||||
"dòlar surinamès": "SRD",
|
||||
"dòlar taiwanès": "TWD",
|
||||
"dòlars canadencs": "CAD",
|
||||
@@ -8894,6 +8922,7 @@
|
||||
"gambijski dalasi": "GMD",
|
||||
"gambijský dalasi": "GMD",
|
||||
"ganaa cedio": "GHS",
|
||||
"ganas sedi": "GHS",
|
||||
"ganski cedi": "GHS",
|
||||
"gbp": "GBP",
|
||||
"gbp£": "GBP",
|
||||
@@ -9043,6 +9072,7 @@
|
||||
"gvatemalski kvecal": "GTQ",
|
||||
"gvatemalski quetzal": "GTQ",
|
||||
"gvinea franko": "GNF",
|
||||
"gvinejas franks": "GNF",
|
||||
"gvinejski franak": "GNF",
|
||||
"gvinejski frank": "GNF",
|
||||
"gvinėjos frankas": "GNF",
|
||||
@@ -9370,6 +9400,7 @@
|
||||
"kaaimaneilandse dollar": "KYD",
|
||||
"kaapverdische escudo": "CVE",
|
||||
"kaboverda eskudo": "CVE",
|
||||
"kaboverdes eskudo": "CVE",
|
||||
"kaiman dollar": "KYD",
|
||||
"kaimanu dolārs": "KYD",
|
||||
"kaimanu salu dolārs": "KYD",
|
||||
@@ -9779,6 +9810,7 @@
|
||||
"lari na seoirsia": "GEL",
|
||||
"lario": "GEL",
|
||||
"laris": "GEL",
|
||||
"lári": "GEL",
|
||||
"länsi afrikan cfa frangi": "XOF",
|
||||
"lbp": "LBP",
|
||||
"ld": "LYD",
|
||||
@@ -10305,6 +10337,7 @@
|
||||
"meksika peso": "MXN",
|
||||
"meksika pesosu": "MXN",
|
||||
"meksikaanse peso": "MXN",
|
||||
"meksikas peso": "MXN",
|
||||
"meksikon peso": "MXN",
|
||||
"meksikos pesas": "MXN",
|
||||
"meticais": "MZN",
|
||||
@@ -10513,6 +10546,7 @@
|
||||
"ngultrum na bútáine": "BTN",
|
||||
"ngultrumas": "BTN",
|
||||
"ngultrumo": "BTN",
|
||||
"ngultrums": "BTN",
|
||||
"ngwee": "ZMW",
|
||||
"nhân dân tệ": "CNY",
|
||||
"nhân dân tệ trung quốc": "CNY",
|
||||
@@ -10540,6 +10574,7 @@
|
||||
"nigerijská naira": "NGN",
|
||||
"nigériai naira": "NGN",
|
||||
"nigérijská naira": "NGN",
|
||||
"nigērijas naira": "NGN",
|
||||
"niĝera najro": "NGN",
|
||||
"niĝeria najro": "NGN",
|
||||
"nijerya nairası": "NGN",
|
||||
@@ -10668,7 +10703,6 @@
|
||||
"nuevo dólar taiwanes": "TWD",
|
||||
"nuevo dólar taiwanés": "TWD",
|
||||
"nuevo peso": [
|
||||
"UYU",
|
||||
"MXN",
|
||||
"ARS"
|
||||
],
|
||||
@@ -10866,6 +10900,7 @@
|
||||
"penny": "GBP",
|
||||
"perak sebagai pelaburan": "XAG",
|
||||
"peru nueva solü": "PEN",
|
||||
"peru sols": "PEN",
|
||||
"perua nova suno": "PEN",
|
||||
"peruanischer nuevo sol": "PEN",
|
||||
"peruanischer sol": "PEN",
|
||||
@@ -10940,7 +10975,6 @@
|
||||
"peso de méxico": "MXN",
|
||||
"peso de republica dominicana": "DOP",
|
||||
"peso de república dominicana": "DOP",
|
||||
"peso de uruguay": "UYU",
|
||||
"peso de xile": "CLP",
|
||||
"peso do chile": "CLP",
|
||||
"peso do uruguai": "UYU",
|
||||
@@ -11587,7 +11621,6 @@
|
||||
"rúpia indiana": "INR",
|
||||
"rúpies": "INR",
|
||||
"rūpija": "IDR",
|
||||
"rwanda franc": "RWF",
|
||||
"rwanda frank": "RWF",
|
||||
"rwandan franc": "RWF",
|
||||
"rwandan frank": "RWF",
|
||||
@@ -11629,6 +11662,7 @@
|
||||
"samoa dolaro": "WST",
|
||||
"samoa tala": "WST",
|
||||
"samoa talao": "WST",
|
||||
"samoa talası": "WST",
|
||||
"samoaanse tala": "WST",
|
||||
"samoan tala": "WST",
|
||||
"samoan tālā": "WST",
|
||||
@@ -11827,6 +11861,7 @@
|
||||
"sistema unificato di compensazione regionale": "XSU",
|
||||
"sistema único de compensación regional": "XSU",
|
||||
"sjekel": "ILS",
|
||||
"sjerraleones leone": "SLE",
|
||||
"sjevernokorejski von": "KPW",
|
||||
"sle": "SLE",
|
||||
"sll": "SLE",
|
||||
@@ -11839,10 +11874,10 @@
|
||||
"sol d'or": "PEN",
|
||||
"sol de oro": "PEN",
|
||||
"sol novo": "PEN",
|
||||
"sol peru": "PEN",
|
||||
"sol peruan": "PEN",
|
||||
"sol peruano": "PEN",
|
||||
"sol peruviano": "PEN",
|
||||
"sol perú": "PEN",
|
||||
"solomon adaları doları": "SBD",
|
||||
"solomon dollar": "SBD",
|
||||
"solomon islands dollar": "SBD",
|
||||
@@ -11868,8 +11903,10 @@
|
||||
"som kîrgîz": "KGS",
|
||||
"som na cirgeastáine": "KGS",
|
||||
"som na húisbéiceastáine": "UZS",
|
||||
"som ozbèc": "UZS",
|
||||
"som quirguiz": "KGS",
|
||||
"som usbeco": "UZS",
|
||||
"som usbekistani": "UZS",
|
||||
"som usbeque": "UZS",
|
||||
"som uzbec": "UZS",
|
||||
"som uzbeco": "UZS",
|
||||
@@ -11892,6 +11929,7 @@
|
||||
"somas": "KGS",
|
||||
"somálsky šiling": "SOS",
|
||||
"somálský šilink": "SOS",
|
||||
"somālijas šiliņš": "SOS",
|
||||
"some": "KGS",
|
||||
"somoni": "TJS",
|
||||
"somoni na táidsíceastáine": "TJS",
|
||||
@@ -11915,6 +11953,7 @@
|
||||
"sovjetisk rubel": "RUB",
|
||||
"soʻm": "UZS",
|
||||
"soʻm uzbekistan": "UZS",
|
||||
"sōm": "KGS",
|
||||
"söm": "UZS",
|
||||
"special drawing right": "XDR",
|
||||
"special drawing rights": "XDR",
|
||||
@@ -12660,6 +12699,7 @@
|
||||
"won nord coréen": "KPW",
|
||||
"won nordcoreano": "KPW",
|
||||
"won norte coreano": "KPW",
|
||||
"won nortkoreano": "KPW",
|
||||
"won południowokoreański": "KRW",
|
||||
"won północnokoreański": "KPW",
|
||||
"won sud corean": "KRW",
|
||||
@@ -14440,6 +14480,7 @@
|
||||
"דולר פיג'י": "FJD",
|
||||
"דולר קיימני": "KYD",
|
||||
"דולר קנדי": "CAD",
|
||||
"דולר של איי קיימן": "KYD",
|
||||
"דונג וייטנאמי ": "VND",
|
||||
"דינר אלג'ירי": "DZD",
|
||||
"דינר בחרייני": "BHD",
|
||||
@@ -14647,6 +14688,7 @@
|
||||
"الجنيه الإسترليني": "GBP",
|
||||
"الجنيه السودانى": "SDG",
|
||||
"الجنيه المصري": "EGP",
|
||||
"الدولار الامريكي": "IQD",
|
||||
"الدولار البربادوسي": "BBD",
|
||||
"الدولار البهامي": "BSD",
|
||||
"الدولار الكندي": "CAD",
|
||||
@@ -14906,6 +14948,7 @@
|
||||
"شيلينغ كينيي": "KES",
|
||||
"عملة السعودية": "SAR",
|
||||
"عملة المملكة العربية السعودية": "SAR",
|
||||
"عملة ذهبيه": "IQD",
|
||||
"عملة قطر": "QAR",
|
||||
"غواراني": "PYG",
|
||||
"غواراني باراغواي": "PYG",
|
||||
@@ -15354,7 +15397,6 @@
|
||||
"యునైటెడ్ స్టేట్స్ డాలర్": "USD",
|
||||
"యూరో": "EUR",
|
||||
"రూపాయి": "INR",
|
||||
"సంయుక్త రాష్ట్రాల డాలర్": "USD",
|
||||
"స్విస్ ఫ్రాంక్": "CHF",
|
||||
"അൾജീരിയൻ ദിനാർ": "DZD",
|
||||
"ഇന്തോനേഷ്യൻ റുപിയ": "IDR",
|
||||
@@ -15735,6 +15777,7 @@
|
||||
"203"
|
||||
],
|
||||
"칠레 페소": "CLP",
|
||||
"카리브 휠던": "XCG",
|
||||
"카보베르데 에스쿠도": "CVE",
|
||||
"카보베르데 이스쿠두": "CVE",
|
||||
"카보베르데에스쿠도": "CVE",
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,5 +1,6 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Simple implementation to store TrackerPatterns data in a SQL database."""
|
||||
# pylint: disable=too-many-branches
|
||||
|
||||
import typing as t
|
||||
|
||||
@@ -119,6 +120,12 @@ class TrackerPatternsDB:
|
||||
|
||||
for rule in self.rules():
|
||||
|
||||
query_str: str = parsed_new_url.query
|
||||
if not query_str:
|
||||
# There are no more query arguments in the parsed_new_url on
|
||||
# which rules can be applied, stop iterating over the rules.
|
||||
break
|
||||
|
||||
if not re.match(rule[self.Fields.url_regexp], new_url):
|
||||
# no match / ignore pattern
|
||||
continue
|
||||
@@ -136,18 +143,32 @@ class TrackerPatternsDB:
|
||||
# overlapping urlPattern like ".*"
|
||||
continue
|
||||
|
||||
# remove tracker arguments from the url-query part
|
||||
query_args: list[tuple[str, str]] = list(parse_qsl(parsed_new_url.query))
|
||||
if query_args:
|
||||
# remove tracker arguments from the url-query part
|
||||
for name, val in query_args.copy():
|
||||
# remove URL arguments
|
||||
for pattern in rule[self.Fields.del_args]:
|
||||
if re.match(pattern, name):
|
||||
log.debug(
|
||||
"TRACKER_PATTERNS: %s remove tracker arg: %s='%s'", parsed_new_url.netloc, name, val
|
||||
)
|
||||
query_args.remove((name, val))
|
||||
|
||||
for name, val in query_args.copy():
|
||||
# remove URL arguments
|
||||
parsed_new_url = parsed_new_url._replace(query=urlencode(query_args))
|
||||
new_url = urlunparse(parsed_new_url)
|
||||
|
||||
else:
|
||||
# The query argument for URLs like:
|
||||
# - 'http://example.org?q=' --> query_str is 'q=' and query_args is []
|
||||
# - 'http://example.org?/foo/bar' --> query_str is 'foo/bar' and query_args is []
|
||||
# is a simple string and not a key/value dict.
|
||||
for pattern in rule[self.Fields.del_args]:
|
||||
if re.match(pattern, name):
|
||||
log.debug("TRACKER_PATTERNS: %s remove tracker arg: %s='%s'", parsed_new_url.netloc, name, val)
|
||||
query_args.remove((name, val))
|
||||
|
||||
parsed_new_url = parsed_new_url._replace(query=urlencode(query_args))
|
||||
new_url = urlunparse(parsed_new_url)
|
||||
if re.match(pattern, query_str):
|
||||
log.debug("TRACKER_PATTERNS: %s remove tracker arg: '%s'", parsed_new_url.netloc, query_str)
|
||||
parsed_new_url = parsed_new_url._replace(query="")
|
||||
new_url = urlunparse(parsed_new_url)
|
||||
break
|
||||
|
||||
if new_url != url:
|
||||
return new_url
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
],
|
||||
"ua": "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}",
|
||||
"versions": [
|
||||
"143.0",
|
||||
"142.0"
|
||||
"145.0",
|
||||
"144.0"
|
||||
]
|
||||
}
|
||||
@@ -3294,6 +3294,16 @@
|
||||
"symbol": "slug",
|
||||
"to_si_factor": 14.593903
|
||||
},
|
||||
"Q136416965": {
|
||||
"si_name": null,
|
||||
"symbol": "GT/S",
|
||||
"to_si_factor": null
|
||||
},
|
||||
"Q136417074": {
|
||||
"si_name": null,
|
||||
"symbol": "MT/S",
|
||||
"to_si_factor": null
|
||||
},
|
||||
"Q1374438": {
|
||||
"si_name": "Q11574",
|
||||
"symbol": "ks",
|
||||
@@ -5449,6 +5459,11 @@
|
||||
"symbol": "T",
|
||||
"to_si_factor": 907.18474
|
||||
},
|
||||
"Q4741": {
|
||||
"si_name": null,
|
||||
"symbol": "RF",
|
||||
"to_si_factor": null
|
||||
},
|
||||
"Q474533": {
|
||||
"si_name": null,
|
||||
"symbol": "At",
|
||||
@@ -6375,9 +6390,9 @@
|
||||
"to_si_factor": 86400.0
|
||||
},
|
||||
"Q577": {
|
||||
"si_name": null,
|
||||
"si_name": "Q11574",
|
||||
"symbol": "a",
|
||||
"to_si_factor": null
|
||||
"to_si_factor": 31557600.0
|
||||
},
|
||||
"Q57899268": {
|
||||
"si_name": "Q3332095",
|
||||
|
||||
@@ -270,7 +270,14 @@ def load_engines(engine_list: list[dict[str, t.Any]]):
|
||||
categories.clear()
|
||||
categories['general'] = []
|
||||
for engine_data in engine_list:
|
||||
if engine_data.get("inactive") is True:
|
||||
continue
|
||||
engine = load_engine(engine_data)
|
||||
if engine:
|
||||
register_engine(engine)
|
||||
else:
|
||||
# if an engine can't be loaded (if for example the engine is missing
|
||||
# tor or some other requirements) its set to inactive!
|
||||
logger.error("loading engine %s failed: set engine to inactive!", engine_data.get("name", "???"))
|
||||
engine_data["inactive"] = True
|
||||
return engines
|
||||
|
||||
@@ -12,7 +12,7 @@ from urllib.parse import urlencode, urljoin, urlparse
|
||||
import lxml
|
||||
import babel
|
||||
|
||||
from searx.utils import extract_text, eval_xpath_list, eval_xpath_getindex
|
||||
from searx.utils import extract_text, eval_xpath_list, eval_xpath_getindex, searxng_useragent
|
||||
from searx.enginelib.traits import EngineTraits
|
||||
from searx.locales import language_tag
|
||||
|
||||
@@ -45,7 +45,7 @@ def request(query, params):
|
||||
query += ' (' + eng_lang + ')'
|
||||
# wiki.archlinux.org is protected by anubis
|
||||
# - https://github.com/searxng/searxng/issues/4646#issuecomment-2817848019
|
||||
params['headers']['User-Agent'] = "SearXNG"
|
||||
params['headers']['User-Agent'] = searxng_useragent()
|
||||
elif netloc == 'wiki.archlinuxcn.org':
|
||||
base_url = 'https://' + netloc + '/wzh/index.php?'
|
||||
|
||||
@@ -120,7 +120,7 @@ def fetch_traits(engine_traits: EngineTraits):
|
||||
'zh': 'Special:搜索',
|
||||
}
|
||||
|
||||
resp = get('https://wiki.archlinux.org/')
|
||||
resp = get('https://wiki.archlinux.org/', timeout=3)
|
||||
if not resp.ok: # type: ignore
|
||||
print("ERROR: response from wiki.archlinux.org is not OK.")
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ def response(resp):
|
||||
pos = script.index(end_tag) + len(end_tag) - 1
|
||||
script = script[:pos]
|
||||
|
||||
json_resp = utils.js_variable_to_python(script)
|
||||
json_resp = utils.js_obj_str_to_python(script)
|
||||
|
||||
results = []
|
||||
|
||||
|
||||
190
searx/engines/azure.py
Normal file
190
searx/engines/azure.py
Normal file
@@ -0,0 +1,190 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Engine for Azure resources. This engine mimics the standard search bar in Azure
|
||||
Portal (for resources and resource groups).
|
||||
|
||||
Configuration
|
||||
=============
|
||||
|
||||
You must `register an application in Microsoft Entra ID`_ and assign it the
|
||||
'Reader' role in your subscription.
|
||||
|
||||
To use this engine, add an entry similar to the following to your engine list in
|
||||
``settings.yml``:
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
- name: azure
|
||||
engine: azure
|
||||
...
|
||||
azure_tenant_id: "your_tenant_id"
|
||||
azure_client_id: "your_client_id"
|
||||
azure_client_secret: "your_client_secret"
|
||||
azure_token_expiration_seconds: 5000
|
||||
|
||||
.. _register an application in Microsoft Entra ID:
|
||||
https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app
|
||||
|
||||
"""
|
||||
import typing as t
|
||||
|
||||
from searx.enginelib import EngineCache
|
||||
from searx.network import post as http_post
|
||||
from searx.result_types import EngineResults
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from searx.extended_types import SXNG_Response
|
||||
from searx.search.processors import OnlineParams
|
||||
|
||||
engine_type = "online"
|
||||
categories = ["it", "cloud"]
|
||||
|
||||
# Default values, should be overridden in settings.yml
|
||||
azure_tenant_id = ""
|
||||
azure_client_id = ""
|
||||
azure_client_secret = ""
|
||||
azure_token_expiration_seconds = 5000
|
||||
"""Time for which an auth token is valid (sec.)"""
|
||||
azure_batch_endpoint = "https://management.azure.com/batch?api-version=2020-06-01"
|
||||
|
||||
about = {
|
||||
"website": "https://www.portal.azure.com",
|
||||
"wikidata_id": "Q725967",
|
||||
"official_api_documentation": "https://learn.microsoft.com/en-us/\
|
||||
rest/api/azure-resourcegraph/?view=rest-azureresourcegraph-resourcegraph-2024-04-01",
|
||||
"use_official_api": True,
|
||||
"require_api_key": True,
|
||||
"results": "JSON",
|
||||
"language": "en",
|
||||
}
|
||||
|
||||
CACHE: EngineCache
|
||||
"""Persistent (SQLite) key/value cache that deletes its values after ``expire``
|
||||
seconds."""
|
||||
|
||||
|
||||
def setup(engine_settings: dict[str, t.Any]) -> bool:
|
||||
"""Initialization of the engine.
|
||||
|
||||
- Instantiate a cache for this engine (:py:obj:`CACHE`).
|
||||
- Checks whether the tenant_id, client_id and client_secret are set,
|
||||
otherwise the engine is inactive.
|
||||
|
||||
"""
|
||||
global CACHE # pylint: disable=global-statement
|
||||
CACHE = EngineCache(engine_settings["name"])
|
||||
|
||||
missing_opts: list[str] = []
|
||||
for opt in ("azure_tenant_id", "azure_client_id", "azure_client_secret"):
|
||||
if not engine_settings.get(opt, ""):
|
||||
missing_opts.append(opt)
|
||||
if missing_opts:
|
||||
logger.error("missing values for options: %s", ", ".join(missing_opts))
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def authenticate(t_id: str, c_id: str, c_secret: str) -> str:
|
||||
"""Authenticates to Azure using Oauth2 Client Credentials Flow and returns
|
||||
an access token."""
|
||||
|
||||
url = f"https://login.microsoftonline.com/{t_id}/oauth2/v2.0/token"
|
||||
body = {
|
||||
"client_id": c_id,
|
||||
"client_secret": c_secret,
|
||||
"grant_type": "client_credentials",
|
||||
"scope": "https://management.azure.com/.default",
|
||||
}
|
||||
|
||||
resp: SXNG_Response = http_post(url, body, timeout=5)
|
||||
if resp.status_code != 200:
|
||||
raise RuntimeError(f"Azure authentication failed (status {resp.status_code}): {resp.text}")
|
||||
return resp.json()["access_token"]
|
||||
|
||||
|
||||
def get_auth_token(t_id: str, c_id: str, c_secret: str) -> str:
|
||||
key = f"azure_tenant_id: {t_id:}, azure_client_id: {c_id}, azure_client_secret: {c_secret}"
|
||||
token: str | None = CACHE.get(key)
|
||||
if token:
|
||||
return token
|
||||
token = authenticate(t_id, c_id, c_secret)
|
||||
CACHE.set(key=key, value=token, expire=azure_token_expiration_seconds)
|
||||
return token
|
||||
|
||||
|
||||
def request(query: str, params: "OnlineParams") -> None:
|
||||
|
||||
token = get_auth_token(azure_tenant_id, azure_client_id, azure_client_secret)
|
||||
|
||||
params["url"] = azure_batch_endpoint
|
||||
params["method"] = "POST"
|
||||
params["headers"]["Authorization"] = f"Bearer {token}"
|
||||
params["headers"]["Content-Type"] = "application/json"
|
||||
params["json"] = {
|
||||
"requests": [
|
||||
{
|
||||
"url": "/providers/Microsoft.ResourceGraph/resources?api-version=2024-04-01",
|
||||
"httpMethod": "POST",
|
||||
"name": "resourceGroups",
|
||||
"requestHeaderDetails": {"commandName": "Microsoft.ResourceGraph"},
|
||||
"content": {
|
||||
"query": (
|
||||
f"ResourceContainers"
|
||||
f" | where (name contains ('{query}'))"
|
||||
f" | where (type =~ ('Microsoft.Resources/subscriptions/resourcegroups'))"
|
||||
f" | project id,name,type,kind,subscriptionId,resourceGroup"
|
||||
f" | extend matchscore = name startswith '{query}'"
|
||||
f" | extend normalizedName = tolower(tostring(name))"
|
||||
f" | sort by matchscore desc, normalizedName asc"
|
||||
f" | take 30"
|
||||
)
|
||||
},
|
||||
},
|
||||
{
|
||||
"url": "/providers/Microsoft.ResourceGraph/resources?api-version=2024-04-01",
|
||||
"httpMethod": "POST",
|
||||
"name": "resources",
|
||||
"requestHeaderDetails": {
|
||||
"commandName": "Microsoft.ResourceGraph",
|
||||
},
|
||||
"content": {
|
||||
"query": f"Resources | where name contains '{query}' | take 30",
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
def response(resp: "SXNG_Response") -> EngineResults:
|
||||
res = EngineResults()
|
||||
json_data = resp.json()
|
||||
|
||||
for result in json_data["responses"]:
|
||||
if result["name"] == "resourceGroups":
|
||||
for data in result["content"]["data"]:
|
||||
res.add(
|
||||
res.types.MainResult(
|
||||
url=(
|
||||
f"https://portal.azure.com/#@/resource"
|
||||
f"/subscriptions/{data['subscriptionId']}/resourceGroups/{data['name']}/overview"
|
||||
),
|
||||
title=data["name"],
|
||||
content=f"Resource Group in Subscription: {data['subscriptionId']}",
|
||||
)
|
||||
)
|
||||
elif result["name"] == "resources":
|
||||
for data in result["content"]["data"]:
|
||||
res.add(
|
||||
res.types.MainResult(
|
||||
url=(
|
||||
f"https://portal.azure.com/#@/resource"
|
||||
f"/subscriptions/{data['subscriptionId']}/resourceGroups/{data['resourceGroup']}"
|
||||
f"/providers/{data['type']}/{data['name']}/overview"
|
||||
),
|
||||
title=data["name"],
|
||||
content=(
|
||||
f"Resource of type {data['type']} in Subscription:"
|
||||
f" {data['subscriptionId']}, Resource Group: {data['resourceGroup']}"
|
||||
),
|
||||
)
|
||||
)
|
||||
return res
|
||||
@@ -108,6 +108,10 @@ def request(query, params):
|
||||
time_ranges = {'day': '1', 'week': '2', 'month': '3', 'year': f'5_{unix_day-365}_{unix_day}'}
|
||||
params['url'] += f'&filters=ex1:"ez{time_ranges[params["time_range"]]}"'
|
||||
|
||||
# in some regions where geoblocking is employed (e.g. China),
|
||||
# www.bing.com redirects to the regional version of Bing
|
||||
params['allow_redirects'] = True
|
||||
|
||||
return params
|
||||
|
||||
|
||||
@@ -197,7 +201,6 @@ def fetch_traits(engine_traits: EngineTraits):
|
||||
"User-Agent": gen_useragent(),
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
|
||||
"Accept-Language": "en-US;q=0.5,en;q=0.3",
|
||||
"Accept-Encoding": "gzip, deflate, br",
|
||||
"DNT": "1",
|
||||
"Connection": "keep-alive",
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
|
||||
@@ -124,17 +124,17 @@ from urllib.parse import (
|
||||
urlparse,
|
||||
)
|
||||
|
||||
import json
|
||||
from dateutil import parser
|
||||
from lxml import html
|
||||
|
||||
from searx import locales
|
||||
from searx.utils import (
|
||||
extr,
|
||||
extract_text,
|
||||
eval_xpath,
|
||||
eval_xpath_list,
|
||||
eval_xpath_getindex,
|
||||
js_variable_to_python,
|
||||
js_obj_str_to_python,
|
||||
js_obj_str_to_json_str,
|
||||
get_embeded_stream_url,
|
||||
)
|
||||
from searx.enginelib.traits import EngineTraits
|
||||
@@ -142,17 +142,17 @@ from searx.result_types import EngineResults
|
||||
from searx.extended_types import SXNG_Response
|
||||
|
||||
about = {
|
||||
"website": 'https://search.brave.com/',
|
||||
"wikidata_id": 'Q22906900',
|
||||
"website": "https://search.brave.com/",
|
||||
"wikidata_id": "Q22906900",
|
||||
"official_api_documentation": None,
|
||||
"use_official_api": False,
|
||||
"require_api_key": False,
|
||||
"results": 'HTML',
|
||||
"results": "HTML",
|
||||
}
|
||||
|
||||
base_url = "https://search.brave.com/"
|
||||
categories = []
|
||||
brave_category: t.Literal["search", "videos", "images", "news", "goggles"] = 'search'
|
||||
brave_category: t.Literal["search", "videos", "images", "news", "goggles"] = "search"
|
||||
"""Brave supports common web-search, videos, images, news, and goggles search.
|
||||
|
||||
- ``search``: Common WEB search
|
||||
@@ -182,74 +182,86 @@ to do more won't return any result and you will most likely be flagged as a bot.
|
||||
"""
|
||||
|
||||
safesearch = True
|
||||
safesearch_map = {2: 'strict', 1: 'moderate', 0: 'off'} # cookie: safesearch=off
|
||||
safesearch_map = {2: "strict", 1: "moderate", 0: "off"} # cookie: safesearch=off
|
||||
|
||||
time_range_support = False
|
||||
"""Brave only supports time-range in :py:obj:`brave_category` ``search`` (UI
|
||||
category All) and in the goggles category."""
|
||||
|
||||
time_range_map: dict[str, str] = {
|
||||
'day': 'pd',
|
||||
'week': 'pw',
|
||||
'month': 'pm',
|
||||
'year': 'py',
|
||||
"day": "pd",
|
||||
"week": "pw",
|
||||
"month": "pm",
|
||||
"year": "py",
|
||||
}
|
||||
|
||||
|
||||
def request(query: str, params: dict[str, t.Any]) -> None:
|
||||
|
||||
# Don't accept br encoding / see https://github.com/searxng/searxng/pull/1787
|
||||
params['headers']['Accept-Encoding'] = 'gzip, deflate'
|
||||
|
||||
args: dict[str, t.Any] = {
|
||||
'q': query,
|
||||
'source': 'web',
|
||||
"q": query,
|
||||
"source": "web",
|
||||
}
|
||||
if brave_spellcheck:
|
||||
args['spellcheck'] = '1'
|
||||
args["spellcheck"] = "1"
|
||||
|
||||
if brave_category in ('search', 'goggles'):
|
||||
if params.get('pageno', 1) - 1:
|
||||
args['offset'] = params.get('pageno', 1) - 1
|
||||
if time_range_map.get(params['time_range']):
|
||||
args['tf'] = time_range_map.get(params['time_range'])
|
||||
if brave_category in ("search", "goggles"):
|
||||
if params.get("pageno", 1) - 1:
|
||||
args["offset"] = params.get("pageno", 1) - 1
|
||||
if time_range_map.get(params["time_range"]):
|
||||
args["tf"] = time_range_map.get(params["time_range"])
|
||||
|
||||
if brave_category == 'goggles':
|
||||
args['goggles_id'] = Goggles
|
||||
if brave_category == "goggles":
|
||||
args["goggles_id"] = Goggles
|
||||
|
||||
params["url"] = f"{base_url}{brave_category}?{urlencode(args)}"
|
||||
logger.debug("url %s", params["url"])
|
||||
|
||||
# set properties in the cookies
|
||||
|
||||
params['cookies']['safesearch'] = safesearch_map.get(params['safesearch'], 'off')
|
||||
# the useLocation is IP based, we use cookie 'country' for the region
|
||||
params['cookies']['useLocation'] = '0'
|
||||
params['cookies']['summarizer'] = '0'
|
||||
params["cookies"]["safesearch"] = safesearch_map.get(params["safesearch"], "off")
|
||||
# the useLocation is IP based, we use cookie "country" for the region
|
||||
params["cookies"]["useLocation"] = "0"
|
||||
params["cookies"]["summarizer"] = "0"
|
||||
|
||||
engine_region = traits.get_region(params['searxng_locale'], 'all')
|
||||
params['cookies']['country'] = engine_region.split('-')[-1].lower() # type: ignore
|
||||
engine_region = traits.get_region(params["searxng_locale"], "all")
|
||||
params["cookies"]["country"] = engine_region.split("-")[-1].lower() # type: ignore
|
||||
|
||||
ui_lang = locales.get_engine_locale(params['searxng_locale'], traits.custom["ui_lang"], 'en-us')
|
||||
params['cookies']['ui_lang'] = ui_lang
|
||||
|
||||
logger.debug("cookies %s", params['cookies'])
|
||||
|
||||
params['headers']['Sec-Fetch-Dest'] = "document"
|
||||
params['headers']['Sec-Fetch-Mode'] = "navigate"
|
||||
params['headers']['Sec-Fetch-Site'] = "same-origin"
|
||||
params['headers']['Sec-Fetch-User'] = "?1"
|
||||
ui_lang = locales.get_engine_locale(params["searxng_locale"], traits.custom["ui_lang"], "en-us")
|
||||
params["cookies"]["ui_lang"] = ui_lang
|
||||
logger.debug("cookies %s", params["cookies"])
|
||||
|
||||
|
||||
def _extract_published_date(published_date_raw):
|
||||
def _extract_published_date(published_date_raw: str | None):
|
||||
if published_date_raw is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
return parser.parse(published_date_raw)
|
||||
except parser.ParserError:
|
||||
return None
|
||||
|
||||
|
||||
def extract_json_data(text: str) -> dict[str, t.Any]:
|
||||
# Example script source containing the data:
|
||||
#
|
||||
# kit.start(app, element, {
|
||||
# node_ids: [0, 19],
|
||||
# data: [{type:"data",data: .... ["q","goggles_id"],route:1,url:1}}]
|
||||
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
text = text[text.index("<script") : text.index("</script")]
|
||||
if not text:
|
||||
raise ValueError("can't find JS/JSON data in the given text")
|
||||
start = text.index("data: [{")
|
||||
end = text.rindex("}}]")
|
||||
js_obj_str = text[start:end]
|
||||
js_obj_str = "{" + js_obj_str + "}}]}"
|
||||
# js_obj_str = js_obj_str.replace("\xa0", "") # remove ASCII for
|
||||
# js_obj_str = js_obj_str.replace(r"\u003C", "<").replace(r"\u003c", "<") # fix broken HTML tags in strings
|
||||
json_str = js_obj_str_to_json_str(js_obj_str)
|
||||
data: dict[str, t.Any] = json.loads(json_str)
|
||||
return data
|
||||
|
||||
|
||||
def response(resp: SXNG_Response) -> EngineResults:
|
||||
|
||||
if brave_category in ('search', 'goggles'):
|
||||
@@ -264,11 +276,8 @@ def response(resp: SXNG_Response) -> EngineResults:
|
||||
# node_ids: [0, 19],
|
||||
# data: [{type:"data",data: .... ["q","goggles_id"],route:1,url:1}}]
|
||||
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
js_object = "[{" + extr(resp.text, "data: [{", "}}],") + "}}]"
|
||||
json_data = js_variable_to_python(js_object)
|
||||
|
||||
# json_data is a list and at the second position (0,1) in this list we find the "response" data we need ..
|
||||
json_resp = json_data[1]['data']['body']['response']
|
||||
json_data: dict[str, t.Any] = extract_json_data(resp.text)
|
||||
json_resp: dict[str, t.Any] = json_data['data'][1]["data"]['body']['response']
|
||||
|
||||
if brave_category == 'images':
|
||||
return _parse_images(json_resp)
|
||||
@@ -278,150 +287,121 @@ def response(resp: SXNG_Response) -> EngineResults:
|
||||
raise ValueError(f"Unsupported brave category: {brave_category}")
|
||||
|
||||
|
||||
def _parse_search(resp) -> EngineResults:
|
||||
result_list = EngineResults()
|
||||
|
||||
def _parse_search(resp: SXNG_Response) -> EngineResults:
|
||||
res = EngineResults()
|
||||
dom = html.fromstring(resp.text)
|
||||
|
||||
# I doubt that Brave is still providing the "answer" class / I haven't seen
|
||||
# answers in brave for a long time.
|
||||
answer_tag = eval_xpath_getindex(dom, '//div[@class="answer"]', 0, default=None)
|
||||
if answer_tag:
|
||||
url = eval_xpath_getindex(dom, '//div[@id="featured_snippet"]/a[@class="result-header"]/@href', 0, default=None)
|
||||
answer = extract_text(answer_tag)
|
||||
if answer is not None:
|
||||
result_list.add(result_list.types.Answer(answer=answer, url=url))
|
||||
for result in eval_xpath_list(dom, "//div[contains(@class, 'snippet ')]"):
|
||||
|
||||
# xpath_results = '//div[contains(@class, "snippet fdb") and @data-type="web"]'
|
||||
xpath_results = '//div[contains(@class, "snippet ")]'
|
||||
|
||||
for result in eval_xpath_list(dom, xpath_results):
|
||||
|
||||
url = eval_xpath_getindex(result, './/a[contains(@class, "h")]/@href', 0, default=None)
|
||||
title_tag = eval_xpath_getindex(
|
||||
result, './/a[contains(@class, "h")]//div[contains(@class, "title")]', 0, default=None
|
||||
)
|
||||
url: str | None = eval_xpath_getindex(result, ".//a/@href", 0, default=None)
|
||||
title_tag = eval_xpath_getindex(result, ".//div[contains(@class, 'title')]", 0, default=None)
|
||||
if url is None or title_tag is None or not urlparse(url).netloc: # partial url likely means it's an ad
|
||||
continue
|
||||
|
||||
content: str = extract_text(
|
||||
eval_xpath_getindex(result, './/div[contains(@class, "snippet-description")]', 0, default='')
|
||||
) # type: ignore
|
||||
pub_date_raw = eval_xpath(result, 'substring-before(.//div[contains(@class, "snippet-description")], "-")')
|
||||
pub_date = _extract_published_date(pub_date_raw)
|
||||
if pub_date and content.startswith(pub_date_raw):
|
||||
content = content.lstrip(pub_date_raw).strip("- \n\t")
|
||||
content: str = ""
|
||||
pub_date = None
|
||||
|
||||
thumbnail = eval_xpath_getindex(result, './/img[contains(@class, "thumb")]/@src', 0, default='')
|
||||
_content = eval_xpath_getindex(result, ".//div[contains(@class, 'content')]", 0, default="")
|
||||
if len(_content):
|
||||
content = extract_text(_content) # type: ignore
|
||||
_pub_date = extract_text(
|
||||
eval_xpath_getindex(_content, ".//span[contains(@class, 't-secondary')]", 0, default="")
|
||||
)
|
||||
if _pub_date:
|
||||
pub_date = _extract_published_date(_pub_date)
|
||||
content = content.lstrip(_pub_date).strip("- \n\t")
|
||||
|
||||
item = {
|
||||
'url': url,
|
||||
'title': extract_text(title_tag),
|
||||
'content': content,
|
||||
'publishedDate': pub_date,
|
||||
'thumbnail': thumbnail,
|
||||
}
|
||||
thumbnail: str = eval_xpath_getindex(result, ".//a[contains(@class, 'thumbnail')]//img/@src", 0, default="")
|
||||
|
||||
item = res.types.LegacyResult(
|
||||
template="default.html",
|
||||
url=url,
|
||||
title=extract_text(title_tag),
|
||||
content=content,
|
||||
publishedDate=pub_date,
|
||||
thumbnail=thumbnail,
|
||||
)
|
||||
res.add(item)
|
||||
|
||||
video_tag = eval_xpath_getindex(
|
||||
result, './/div[contains(@class, "video-snippet") and @data-macro="video"]', 0, default=None
|
||||
result, ".//div[contains(@class, 'video-snippet') and @data-macro='video']", 0, default=[]
|
||||
)
|
||||
if video_tag is not None:
|
||||
|
||||
if len(video_tag):
|
||||
# In my tests a video tag in the WEB search was most often not a
|
||||
# video, except the ones from youtube ..
|
||||
|
||||
iframe_src = get_embeded_stream_url(url)
|
||||
if iframe_src:
|
||||
item['iframe_src'] = iframe_src
|
||||
item['template'] = 'videos.html'
|
||||
item['thumbnail'] = eval_xpath_getindex(video_tag, './/img/@src', 0, default='')
|
||||
pub_date_raw = extract_text(
|
||||
eval_xpath(video_tag, './/div[contains(@class, "snippet-attributes")]/div/text()')
|
||||
)
|
||||
item['publishedDate'] = _extract_published_date(pub_date_raw)
|
||||
else:
|
||||
item['thumbnail'] = eval_xpath_getindex(video_tag, './/img/@src', 0, default='')
|
||||
item["iframe_src"] = iframe_src
|
||||
item["template"] = "videos.html"
|
||||
|
||||
result_list.append(item)
|
||||
|
||||
return result_list
|
||||
return res
|
||||
|
||||
|
||||
def _parse_news(resp) -> EngineResults:
|
||||
|
||||
result_list = EngineResults()
|
||||
def _parse_news(resp: SXNG_Response) -> EngineResults:
|
||||
res = EngineResults()
|
||||
dom = html.fromstring(resp.text)
|
||||
|
||||
for result in eval_xpath_list(dom, '//div[contains(@class, "results")]//div[@data-type="news"]'):
|
||||
for result in eval_xpath_list(dom, "//div[contains(@class, 'results')]//div[@data-type='news']"):
|
||||
|
||||
# import pdb
|
||||
# pdb.set_trace()
|
||||
|
||||
url = eval_xpath_getindex(result, './/a[contains(@class, "result-header")]/@href', 0, default=None)
|
||||
url = eval_xpath_getindex(result, ".//a[contains(@class, 'result-header')]/@href", 0, default=None)
|
||||
if url is None:
|
||||
continue
|
||||
|
||||
title = extract_text(eval_xpath_list(result, './/span[contains(@class, "snippet-title")]'))
|
||||
content = extract_text(eval_xpath_list(result, './/p[contains(@class, "desc")]'))
|
||||
thumbnail = eval_xpath_getindex(result, './/div[contains(@class, "image-wrapper")]//img/@src', 0, default='')
|
||||
title = eval_xpath_list(result, ".//span[contains(@class, 'snippet-title')]")
|
||||
content = eval_xpath_list(result, ".//p[contains(@class, 'desc')]")
|
||||
thumbnail = eval_xpath_getindex(result, ".//div[contains(@class, 'image-wrapper')]//img/@src", 0, default="")
|
||||
|
||||
item = {
|
||||
"url": url,
|
||||
"title": title,
|
||||
"content": content,
|
||||
"thumbnail": thumbnail,
|
||||
}
|
||||
item = res.types.LegacyResult(
|
||||
template="default.html",
|
||||
url=url,
|
||||
title=extract_text(title),
|
||||
thumbnail=thumbnail,
|
||||
content=extract_text(content),
|
||||
)
|
||||
res.add(item)
|
||||
|
||||
result_list.append(item)
|
||||
|
||||
return result_list
|
||||
return res
|
||||
|
||||
|
||||
def _parse_images(json_resp) -> EngineResults:
|
||||
result_list = EngineResults()
|
||||
def _parse_images(json_resp: dict[str, t.Any]) -> EngineResults:
|
||||
res = EngineResults()
|
||||
|
||||
for result in json_resp["results"]:
|
||||
item = {
|
||||
'url': result['url'],
|
||||
'title': result['title'],
|
||||
'content': result['description'],
|
||||
'template': 'images.html',
|
||||
'resolution': result['properties']['format'],
|
||||
'source': result['source'],
|
||||
'img_src': result['properties']['url'],
|
||||
'thumbnail_src': result['thumbnail']['src'],
|
||||
}
|
||||
result_list.append(item)
|
||||
item = res.types.LegacyResult(
|
||||
template="images.html",
|
||||
url=result["url"],
|
||||
title=result["title"],
|
||||
source=result["source"],
|
||||
img_src=result["properties"]["url"],
|
||||
thumbnail_src=result["thumbnail"]["src"],
|
||||
)
|
||||
res.add(item)
|
||||
|
||||
return result_list
|
||||
return res
|
||||
|
||||
|
||||
def _parse_videos(json_resp) -> EngineResults:
|
||||
result_list = EngineResults()
|
||||
def _parse_videos(json_resp: dict[str, t.Any]) -> EngineResults:
|
||||
res = EngineResults()
|
||||
|
||||
for result in json_resp["results"]:
|
||||
|
||||
url = result['url']
|
||||
item = {
|
||||
'url': url,
|
||||
'title': result['title'],
|
||||
'content': result['description'],
|
||||
'template': 'videos.html',
|
||||
'length': result['video']['duration'],
|
||||
'duration': result['video']['duration'],
|
||||
'publishedDate': _extract_published_date(result['age']),
|
||||
}
|
||||
|
||||
if result['thumbnail'] is not None:
|
||||
item['thumbnail'] = result['thumbnail']['src']
|
||||
|
||||
iframe_src = get_embeded_stream_url(url)
|
||||
item = res.types.LegacyResult(
|
||||
template="videos.html",
|
||||
url=result["url"],
|
||||
title=result["title"],
|
||||
content=result["description"],
|
||||
length=result["video"]["duration"],
|
||||
duration=result["video"]["duration"],
|
||||
publishedDate=_extract_published_date(result["age"]),
|
||||
)
|
||||
if result["thumbnail"] is not None:
|
||||
item["thumbnail"] = result["thumbnail"]["src"]
|
||||
iframe_src = get_embeded_stream_url(result["url"])
|
||||
if iframe_src:
|
||||
item['iframe_src'] = iframe_src
|
||||
item["iframe_src"] = iframe_src
|
||||
|
||||
result_list.append(item)
|
||||
res.add(item)
|
||||
|
||||
return result_list
|
||||
return res
|
||||
|
||||
|
||||
def fetch_traits(engine_traits: EngineTraits):
|
||||
@@ -436,34 +416,31 @@ def fetch_traits(engine_traits: EngineTraits):
|
||||
|
||||
engine_traits.custom["ui_lang"] = {}
|
||||
|
||||
headers = {
|
||||
'Accept-Encoding': 'gzip, deflate',
|
||||
}
|
||||
lang_map = {'no': 'nb'} # norway
|
||||
|
||||
# languages (UI)
|
||||
|
||||
resp = get('https://search.brave.com/settings', headers=headers)
|
||||
resp = get('https://search.brave.com/settings')
|
||||
|
||||
if not resp.ok: # type: ignore
|
||||
if not resp.ok:
|
||||
print("ERROR: response from Brave is not OK.")
|
||||
dom = html.fromstring(resp.text) # type: ignore
|
||||
dom = html.fromstring(resp.text)
|
||||
|
||||
for option in dom.xpath('//section//option[@value="en-us"]/../option'):
|
||||
for option in dom.xpath("//section//option[@value='en-us']/../option"):
|
||||
|
||||
ui_lang = option.get('value')
|
||||
ui_lang = option.get("value")
|
||||
try:
|
||||
l = babel.Locale.parse(ui_lang, sep='-')
|
||||
l = babel.Locale.parse(ui_lang, sep="-")
|
||||
if l.territory:
|
||||
sxng_tag = region_tag(babel.Locale.parse(ui_lang, sep='-'))
|
||||
sxng_tag = region_tag(babel.Locale.parse(ui_lang, sep="-"))
|
||||
else:
|
||||
sxng_tag = language_tag(babel.Locale.parse(ui_lang, sep='-'))
|
||||
sxng_tag = language_tag(babel.Locale.parse(ui_lang, sep="-"))
|
||||
|
||||
except babel.UnknownLocaleError:
|
||||
print("ERROR: can't determine babel locale of Brave's (UI) language %s" % ui_lang)
|
||||
continue
|
||||
|
||||
conflict = engine_traits.custom["ui_lang"].get(sxng_tag)
|
||||
conflict = engine_traits.custom["ui_lang"].get(sxng_tag) # type: ignore
|
||||
if conflict:
|
||||
if conflict != ui_lang:
|
||||
print("CONFLICT: babel %s --> %s, %s" % (sxng_tag, conflict, ui_lang))
|
||||
@@ -472,26 +449,26 @@ def fetch_traits(engine_traits: EngineTraits):
|
||||
|
||||
# search regions of brave
|
||||
|
||||
resp = get('https://cdn.search.brave.com/serp/v2/_app/immutable/chunks/parameters.734c106a.js', headers=headers)
|
||||
resp = get("https://cdn.search.brave.com/serp/v2/_app/immutable/chunks/parameters.734c106a.js")
|
||||
|
||||
if not resp.ok: # type: ignore
|
||||
if not resp.ok:
|
||||
print("ERROR: response from Brave is not OK.")
|
||||
|
||||
country_js = resp.text[resp.text.index("options:{all") + len('options:') :] # type: ignore
|
||||
country_js = resp.text[resp.text.index("options:{all") + len("options:") :]
|
||||
country_js = country_js[: country_js.index("},k={default")]
|
||||
country_tags = js_variable_to_python(country_js)
|
||||
country_tags = js_obj_str_to_python(country_js)
|
||||
|
||||
for k, v in country_tags.items():
|
||||
if k == 'all':
|
||||
engine_traits.all_locale = 'all'
|
||||
if k == "all":
|
||||
engine_traits.all_locale = "all"
|
||||
continue
|
||||
country_tag = v['value']
|
||||
country_tag = v["value"]
|
||||
|
||||
# add official languages of the country ..
|
||||
for lang_tag in babel.languages.get_official_languages(country_tag, de_facto=True):
|
||||
lang_tag = lang_map.get(lang_tag, lang_tag)
|
||||
sxng_tag = region_tag(babel.Locale.parse('%s_%s' % (lang_tag, country_tag.upper())))
|
||||
# print("%-20s: %s <-- %s" % (v['label'], country_tag, sxng_tag))
|
||||
sxng_tag = region_tag(babel.Locale.parse("%s_%s" % (lang_tag, country_tag.upper())))
|
||||
# print("%-20s: %s <-- %s" % (v["label"], country_tag, sxng_tag))
|
||||
|
||||
conflict = engine_traits.regions.get(sxng_tag)
|
||||
if conflict:
|
||||
|
||||
@@ -23,14 +23,14 @@ paging = True
|
||||
# search-url
|
||||
base_url = 'https://www.deviantart.com'
|
||||
|
||||
results_xpath = '//div[@class="_2pZkk"]/div/div/a'
|
||||
results_xpath = '//div[@class="V_S0t_"]/div/div/a'
|
||||
url_xpath = './@href'
|
||||
thumbnail_src_xpath = './div/img/@src'
|
||||
img_src_xpath = './div/img/@srcset'
|
||||
title_xpath = './@aria-label'
|
||||
premium_xpath = '../div/div/div/text()'
|
||||
premium_keytext = 'Watch the artist to view this deviation'
|
||||
cursor_xpath = '(//a[@class="_1OGeq"]/@href)[last()]'
|
||||
cursor_xpath = '(//a[@class="vQ2brP"]/@href)[last()]'
|
||||
|
||||
|
||||
def request(query, params):
|
||||
|
||||
63
searx/engines/devicons.py
Normal file
63
searx/engines/devicons.py
Normal file
@@ -0,0 +1,63 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Devicons (icons)"""
|
||||
|
||||
import typing as t
|
||||
|
||||
from searx.result_types import EngineResults
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from extended_types import SXNG_Response
|
||||
from search.processors.online import OnlineParams
|
||||
|
||||
|
||||
about = {
|
||||
"website": "https://devicon.dev/",
|
||||
"wikidata_id": None,
|
||||
"official_api_documentation": None,
|
||||
"use_official_api": True,
|
||||
"results": "JSON",
|
||||
}
|
||||
|
||||
cdn_base_url = "https://cdn.jsdelivr.net/gh/devicons/devicon@latest"
|
||||
categories = ["images", "icons"]
|
||||
|
||||
|
||||
def request(query: str, params: "OnlineParams"):
|
||||
params["url"] = f"{cdn_base_url}/devicon.json"
|
||||
params['query'] = query
|
||||
return params
|
||||
|
||||
|
||||
def response(resp: "SXNG_Response") -> EngineResults:
|
||||
res = EngineResults()
|
||||
query_parts = resp.search_params["query"].lower().split(" ")
|
||||
|
||||
def is_result_match(result: dict[str, t.Any]) -> bool:
|
||||
for part in query_parts:
|
||||
if part in result["name"]:
|
||||
return True
|
||||
|
||||
for tag in result["altnames"] + result["tags"]:
|
||||
if part in tag:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
filtered_results = filter(is_result_match, resp.json())
|
||||
for result in filtered_results:
|
||||
for image_type in result["versions"]["svg"]:
|
||||
img_src = f"{cdn_base_url}/icons/{result['name']}/{result['name']}-{image_type}.svg"
|
||||
res.add(
|
||||
res.types.LegacyResult(
|
||||
{
|
||||
"template": "images.html",
|
||||
"url": img_src,
|
||||
"title": result["name"],
|
||||
"content": f"Base color: {result['color']}",
|
||||
"img_src": img_src,
|
||||
"img_format": "SVG",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
return res
|
||||
@@ -407,7 +407,7 @@ def fetch_traits(engine_traits: EngineTraits):
|
||||
|
||||
"""
|
||||
# pylint: disable=too-many-branches, too-many-statements, disable=import-outside-toplevel
|
||||
from searx.utils import js_variable_to_python
|
||||
from searx.utils import js_obj_str_to_python
|
||||
|
||||
# fetch regions
|
||||
|
||||
@@ -455,7 +455,7 @@ def fetch_traits(engine_traits: EngineTraits):
|
||||
|
||||
js_code = extr(resp.text, 'languages:', ',regions') # type: ignore
|
||||
|
||||
languages = js_variable_to_python(js_code)
|
||||
languages: dict[str, str] = js_obj_str_to_python(js_code)
|
||||
for eng_lang, name in languages.items():
|
||||
|
||||
if eng_lang == 'wt_WT':
|
||||
|
||||
52
searx/engines/grokipedia.py
Normal file
52
searx/engines/grokipedia.py
Normal file
@@ -0,0 +1,52 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Grokipedia (general)"""
|
||||
|
||||
from urllib.parse import urlencode
|
||||
from searx.utils import html_to_text
|
||||
from searx.result_types import EngineResults
|
||||
|
||||
about = {
|
||||
"website": 'https://grokipedia.com',
|
||||
"wikidata_id": "Q136410803",
|
||||
"official_api_documentation": None,
|
||||
"use_official_api": False,
|
||||
"require_api_key": False,
|
||||
"results": "JSON",
|
||||
}
|
||||
|
||||
base_url = "https://grokipedia.com/api/full-text-search"
|
||||
categories = ['general']
|
||||
paging = True
|
||||
results_per_page = 10
|
||||
|
||||
|
||||
def request(query, params):
|
||||
|
||||
start_index = (params["pageno"] - 1) * results_per_page
|
||||
|
||||
query_params = {
|
||||
"query": query,
|
||||
"limit": results_per_page,
|
||||
"offset": start_index,
|
||||
}
|
||||
|
||||
params["url"] = f"{base_url}?{urlencode(query_params)}"
|
||||
|
||||
return params
|
||||
|
||||
|
||||
def response(resp) -> EngineResults:
|
||||
results = EngineResults()
|
||||
search_res = resp.json()
|
||||
|
||||
for item in search_res["results"]:
|
||||
|
||||
results.add(
|
||||
results.types.MainResult(
|
||||
url='https://grokipedia.com/page/' + item["slug"],
|
||||
title=item["title"],
|
||||
content=html_to_text(item["snippet"]),
|
||||
)
|
||||
)
|
||||
|
||||
return results
|
||||
@@ -6,6 +6,7 @@ from urllib.parse import urlencode
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from flask_babel import gettext
|
||||
from searx.utils import html_to_text
|
||||
|
||||
# Engine metadata
|
||||
about = {
|
||||
@@ -75,6 +76,7 @@ def response(resp):
|
||||
object_id = hit["objectID"]
|
||||
points = hit.get("points") or 0
|
||||
num_comments = hit.get("num_comments") or 0
|
||||
content = hit.get("url") or html_to_text(hit.get("comment_text")) or html_to_text(hit.get("story_text"))
|
||||
|
||||
metadata = ""
|
||||
if points != 0 or num_comments != 0:
|
||||
@@ -83,7 +85,7 @@ def response(resp):
|
||||
{
|
||||
"title": hit.get("title") or f"{gettext('author')}: {hit['author']}",
|
||||
"url": f"https://news.ycombinator.com/item?id={object_id}",
|
||||
"content": hit.get("url") or hit.get("comment_text") or hit.get("story_text") or "",
|
||||
"content": content,
|
||||
"metadata": metadata,
|
||||
"author": hit["author"],
|
||||
"publishedDate": datetime.fromtimestamp(hit["created_at_i"]),
|
||||
|
||||
@@ -31,7 +31,7 @@ paging = True
|
||||
time_range_support = True
|
||||
|
||||
# base_url can be overwritten by a list of URLs in the settings.yml
|
||||
base_url: list | str = []
|
||||
base_url: list[str] | str = []
|
||||
|
||||
|
||||
def init(_):
|
||||
|
||||
@@ -1,264 +0,0 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Mullvad Leta is a search engine proxy. Currently Leta only offers text
|
||||
search results not image, news or any other types of search result. Leta acts
|
||||
as a proxy to Google and Brave search results. You can select which backend
|
||||
search engine you wish to use, see (:py:obj:`leta_engine`).
|
||||
|
||||
.. hint::
|
||||
|
||||
Leta caches each search for up to 30 days. For example, if you use search
|
||||
terms like ``news``, contrary to your intention you'll get very old results!
|
||||
|
||||
|
||||
Configuration
|
||||
=============
|
||||
|
||||
The engine has the following additional settings:
|
||||
|
||||
- :py:obj:`leta_engine` (:py:obj:`LetaEnginesType`)
|
||||
|
||||
You can configure one Leta engine for Google and one for Brave:
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
- name: mullvadleta
|
||||
engine: mullvad_leta
|
||||
leta_engine: google
|
||||
shortcut: ml
|
||||
|
||||
- name: mullvadleta brave
|
||||
engine: mullvad_leta
|
||||
network: mullvadleta # use network from engine "mullvadleta" configured above
|
||||
leta_engine: brave
|
||||
shortcut: mlb
|
||||
|
||||
Implementations
|
||||
===============
|
||||
|
||||
"""
|
||||
import typing as t
|
||||
|
||||
from urllib.parse import urlencode
|
||||
import babel
|
||||
from httpx import Response
|
||||
from lxml import html
|
||||
from searx.enginelib.traits import EngineTraits
|
||||
from searx.locales import get_official_locales, language_tag, region_tag
|
||||
from searx.utils import eval_xpath_list
|
||||
from searx.result_types import EngineResults, MainResult
|
||||
|
||||
search_url = "https://leta.mullvad.net"
|
||||
|
||||
# about
|
||||
about = {
|
||||
"website": search_url,
|
||||
"wikidata_id": 'Q47008412', # the Mullvad id - not leta, but related
|
||||
"official_api_documentation": 'https://leta.mullvad.net/faq',
|
||||
"use_official_api": False,
|
||||
"require_api_key": False,
|
||||
"results": 'HTML',
|
||||
}
|
||||
|
||||
# engine dependent config
|
||||
categories = ["general", "web"]
|
||||
paging = True
|
||||
max_page = 10
|
||||
time_range_support = True
|
||||
time_range_dict = {
|
||||
"day": "d",
|
||||
"week": "w",
|
||||
"month": "m",
|
||||
"year": "y",
|
||||
}
|
||||
|
||||
LetaEnginesType = t.Literal["google", "brave"]
|
||||
"""Engine types supported by mullvadleta."""
|
||||
|
||||
leta_engine: LetaEnginesType = "google"
|
||||
"""Select Leta's engine type from :py:obj:`LetaEnginesType`."""
|
||||
|
||||
|
||||
def init(_):
|
||||
l = t.get_args(LetaEnginesType)
|
||||
if leta_engine not in l:
|
||||
raise ValueError(f"leta_engine '{leta_engine}' is invalid, use one of {', '.join(l)}")
|
||||
|
||||
|
||||
class DataNodeQueryMetaDataIndices(t.TypedDict):
|
||||
"""Indices into query metadata."""
|
||||
|
||||
success: int
|
||||
q: int # pylint: disable=invalid-name
|
||||
country: int
|
||||
language: int
|
||||
lastUpdated: int
|
||||
engine: int
|
||||
items: int
|
||||
infobox: int
|
||||
news: int
|
||||
timestamp: int
|
||||
altered: int
|
||||
page: int
|
||||
next: int # if -1, there no more results are available
|
||||
previous: int
|
||||
|
||||
|
||||
class DataNodeResultIndices(t.TypedDict):
|
||||
"""Indices into query resultsdata."""
|
||||
|
||||
link: int
|
||||
snippet: int
|
||||
title: int
|
||||
favicon: int
|
||||
|
||||
|
||||
def request(query: str, params: dict):
|
||||
params["method"] = "GET"
|
||||
args = {
|
||||
"q": query,
|
||||
"engine": leta_engine,
|
||||
"x-sveltekit-invalidated": "001", # hardcoded from all requests seen
|
||||
}
|
||||
|
||||
country = traits.get_region(params.get("searxng_locale"), traits.all_locale) # type: ignore
|
||||
if country:
|
||||
args["country"] = country
|
||||
|
||||
language = traits.get_language(params.get("searxng_locale"), traits.all_locale) # type: ignore
|
||||
if language:
|
||||
args["language"] = language
|
||||
|
||||
if params["time_range"] in time_range_dict:
|
||||
args["lastUpdated"] = time_range_dict[params["time_range"]]
|
||||
|
||||
if params["pageno"] > 1:
|
||||
args["page"] = params["pageno"]
|
||||
|
||||
params["url"] = f"{search_url}/search/__data.json?{urlencode(args)}"
|
||||
|
||||
return params
|
||||
|
||||
|
||||
def response(resp: Response) -> EngineResults:
|
||||
json_response = resp.json()
|
||||
|
||||
nodes = json_response["nodes"]
|
||||
# 0: is None
|
||||
# 1: has "connected=True", not useful
|
||||
# 2: query results within "data"
|
||||
|
||||
data_nodes = nodes[2]["data"]
|
||||
# Instead of nested object structure, all objects are flattened into a
|
||||
# list. Rather, the first object in data_node provides indices into the
|
||||
# "data_nodes" to access each searchresult (which is an object of more
|
||||
# indices)
|
||||
#
|
||||
# Read the relative TypedDict definitions for details
|
||||
|
||||
query_meta_data: DataNodeQueryMetaDataIndices = data_nodes[0]
|
||||
|
||||
query_items_indices = query_meta_data["items"]
|
||||
|
||||
results = EngineResults()
|
||||
for idx in data_nodes[query_items_indices]:
|
||||
query_item_indices: DataNodeResultIndices = data_nodes[idx]
|
||||
results.add(
|
||||
MainResult(
|
||||
url=data_nodes[query_item_indices["link"]],
|
||||
title=data_nodes[query_item_indices["title"]],
|
||||
content=data_nodes[query_item_indices["snippet"]],
|
||||
)
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def fetch_traits(engine_traits: EngineTraits) -> None:
|
||||
"""Fetch languages and regions from Mullvad-Leta"""
|
||||
|
||||
def extract_table_data(table):
|
||||
for row in table.xpath(".//tr")[2:]:
|
||||
cells = row.xpath(".//td | .//th") # includes headers and data
|
||||
if len(cells) > 1: # ensure the column exists
|
||||
cell0 = cells[0].text_content().strip()
|
||||
cell1 = cells[1].text_content().strip()
|
||||
yield [cell0, cell1]
|
||||
|
||||
# pylint: disable=import-outside-toplevel
|
||||
# see https://github.com/searxng/searxng/issues/762
|
||||
from searx.network import get as http_get
|
||||
|
||||
# pylint: enable=import-outside-toplevel
|
||||
|
||||
resp = http_get(f"{search_url}/documentation")
|
||||
if not isinstance(resp, Response):
|
||||
print("ERROR: failed to get response from mullvad-leta. Are you connected to the VPN?")
|
||||
return
|
||||
if not resp.ok:
|
||||
print("ERROR: response from mullvad-leta is not OK. Are you connected to the VPN?")
|
||||
return
|
||||
|
||||
dom = html.fromstring(resp.text)
|
||||
|
||||
# There are 4 HTML tables on the documentation page for extracting information:
|
||||
# 0. Keyboard Shortcuts
|
||||
# 1. Query Parameters (shoutout to Mullvad for accessible docs for integration)
|
||||
# 2. Country Codes [Country, Code]
|
||||
# 3. Language Codes [Language, Code]
|
||||
tables = eval_xpath_list(dom.body, "//table")
|
||||
if tables is None or len(tables) <= 0:
|
||||
print("ERROR: could not find any tables. Was the page updated?")
|
||||
|
||||
language_table = tables[3]
|
||||
lang_map = {
|
||||
"zh-hant": "zh_Hans",
|
||||
"zh-hans": "zh_Hant",
|
||||
"jp": "ja",
|
||||
}
|
||||
|
||||
for language, code in extract_table_data(language_table):
|
||||
|
||||
locale_tag = lang_map.get(code, code).replace("-", "_") # type: ignore
|
||||
try:
|
||||
locale = babel.Locale.parse(locale_tag)
|
||||
except babel.UnknownLocaleError:
|
||||
print(f"ERROR: Mullvad-Leta language {language} ({code}) is unknown by babel")
|
||||
continue
|
||||
|
||||
sxng_tag = language_tag(locale)
|
||||
engine_traits.languages[sxng_tag] = code
|
||||
|
||||
country_table = tables[2]
|
||||
country_map = {
|
||||
"cn": "zh-CN",
|
||||
"hk": "zh-HK",
|
||||
"jp": "ja-JP",
|
||||
"my": "ms-MY",
|
||||
"tw": "zh-TW",
|
||||
"uk": "en-GB",
|
||||
"us": "en-US",
|
||||
}
|
||||
|
||||
for country, code in extract_table_data(country_table):
|
||||
|
||||
sxng_tag = country_map.get(code)
|
||||
if sxng_tag:
|
||||
engine_traits.regions[sxng_tag] = code
|
||||
continue
|
||||
|
||||
try:
|
||||
locale = babel.Locale.parse(f"{code.lower()}_{code.upper()}")
|
||||
except babel.UnknownLocaleError:
|
||||
locale = None
|
||||
|
||||
if locale:
|
||||
engine_traits.regions[region_tag(locale)] = code
|
||||
continue
|
||||
|
||||
official_locales = get_official_locales(code, engine_traits.languages.keys(), regional=True)
|
||||
if not official_locales:
|
||||
print(f"ERROR: Mullvad-Leta country '{code}' ({country}) could not be mapped as expected.")
|
||||
continue
|
||||
|
||||
for locale in official_locales:
|
||||
engine_traits.regions[region_tag(locale)] = code
|
||||
@@ -15,7 +15,7 @@ from searx.utils import (
|
||||
extr,
|
||||
html_to_text,
|
||||
parse_duration_string,
|
||||
js_variable_to_python,
|
||||
js_obj_str_to_python,
|
||||
get_embeded_stream_url,
|
||||
)
|
||||
|
||||
@@ -125,7 +125,7 @@ def parse_images(data):
|
||||
|
||||
match = extr(data, '<script>var imageSearchTabData=', '</script>')
|
||||
if match:
|
||||
json = js_variable_to_python(match.strip())
|
||||
json = js_obj_str_to_python(match.strip())
|
||||
items = json.get('content', {}).get('items', [])
|
||||
|
||||
for item in items:
|
||||
|
||||
@@ -55,15 +55,18 @@ def response(resp):
|
||||
if result['type'] == 'story':
|
||||
continue
|
||||
|
||||
main_image = result['images']['orig']
|
||||
results.append(
|
||||
{
|
||||
'template': 'images.html',
|
||||
'url': result['link'] or f"{base_url}/pin/{result['id']}/",
|
||||
'url': result.get('link') or f"{base_url}/pin/{result['id']}/",
|
||||
'title': result.get('title') or result.get('grid_title'),
|
||||
'content': (result.get('rich_summary') or {}).get('display_description') or "",
|
||||
'img_src': result['images']['orig']['url'],
|
||||
'img_src': main_image['url'],
|
||||
'thumbnail_src': result['images']['236x']['url'],
|
||||
'source': (result.get('rich_summary') or {}).get('site_name'),
|
||||
'resolution': f"{main_image['width']}x{main_image['height']}",
|
||||
'author': f"{result['pinner'].get('full_name')} ({result['pinner']['username']})",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -72,7 +72,7 @@ categories = []
|
||||
paging = True
|
||||
|
||||
# search-url
|
||||
backend_url: list[str] | str | None = None
|
||||
backend_url: list[str] | str = []
|
||||
"""Piped-Backend_: The core component behind Piped. The value is an URL or a
|
||||
list of URLs. In the latter case instance will be selected randomly. For a
|
||||
complete list of official instances see Piped-Instances (`JSON
|
||||
|
||||
@@ -20,7 +20,7 @@ categories = ['images']
|
||||
|
||||
# Search URL
|
||||
base_url = "https://www.pixiv.net/ajax/search/illustrations"
|
||||
pixiv_image_proxies: list = []
|
||||
pixiv_image_proxies: list[str] = []
|
||||
|
||||
|
||||
def request(query, params):
|
||||
|
||||
@@ -89,6 +89,9 @@ time_range_support = True
|
||||
send_accept_language_header = True
|
||||
categories = ["general", "web"] # general, images, videos, news
|
||||
|
||||
# HTTP2 requests immediately get blocked by a CAPTCHA
|
||||
enable_http2 = False
|
||||
|
||||
search_type = "search"
|
||||
"""must be any of ``search``, ``images``, ``videos``, ``news``"""
|
||||
|
||||
@@ -137,7 +140,7 @@ def _get_request_id(query, params):
|
||||
if l.territory:
|
||||
headers['Accept-Language'] = f"{l.language}-{l.territory},{l.language};" "q=0.9,*;" "q=0.5"
|
||||
|
||||
resp = get(url, headers=headers)
|
||||
resp = get(url, headers=headers, timeout=5)
|
||||
|
||||
for line in resp.text.split("\n"):
|
||||
if "window.searchId = " in line:
|
||||
|
||||
@@ -64,7 +64,7 @@ def _get_algolia_api_url():
|
||||
return __CACHED_API_URL
|
||||
|
||||
# fake request to extract api url
|
||||
resp = get(f"{pdia_base_url}/search/?q=")
|
||||
resp = get(f"{pdia_base_url}/search/?q=", timeout=3)
|
||||
if resp.status_code != 200:
|
||||
raise LookupError("Failed to fetch config location (and as such the API url) for PDImageArchive")
|
||||
pdia_config_filepart = extr(resp.text, pdia_config_start, pdia_config_end)
|
||||
|
||||
@@ -73,7 +73,7 @@ def request(query: str, params: "OnlineParams") -> None:
|
||||
)
|
||||
esearch_url = f"{eutils_api}/esearch.fcgi?{args}"
|
||||
# DTD: https://eutils.ncbi.nlm.nih.gov/eutils/dtd/20060628/esearch.dtd
|
||||
esearch_resp: "SXNG_Response" = get(esearch_url)
|
||||
esearch_resp: "SXNG_Response" = get(esearch_url, timeout=3)
|
||||
pmids_results = etree.XML(esearch_resp.content)
|
||||
pmids: list[str] = [i.text for i in pmids_results.xpath("//eSearchResult/IdList/Id")]
|
||||
|
||||
|
||||
@@ -53,6 +53,7 @@ from searx.exceptions import (
|
||||
SearxEngineAPIException,
|
||||
SearxEngineTooManyRequestsException,
|
||||
SearxEngineCaptchaException,
|
||||
SearxEngineAccessDeniedException,
|
||||
)
|
||||
from searx.network import raise_for_httperror
|
||||
from searx.enginelib.traits import EngineTraits
|
||||
@@ -81,6 +82,9 @@ max_page = 5
|
||||
"""5 pages maximum (``&p=5``): Trying to do more just results in an improper
|
||||
redirect"""
|
||||
|
||||
# Otherwise Qwant will return 403 if not set
|
||||
send_accept_language_header = True
|
||||
|
||||
qwant_categ = None
|
||||
"""One of ``web-lite`` (or ``web``), ``news``, ``images`` or ``videos``"""
|
||||
|
||||
@@ -130,17 +134,17 @@ def request(query, params):
|
||||
|
||||
elif qwant_categ == 'images':
|
||||
|
||||
args['count'] = 50
|
||||
args['locale'] = q_locale
|
||||
args['safesearch'] = params['safesearch']
|
||||
args['count'] = 50
|
||||
args['tgp'] = 3
|
||||
args['offset'] = (params['pageno'] - 1) * args['count']
|
||||
|
||||
else: # web, news, videos
|
||||
|
||||
args['count'] = 10
|
||||
args['locale'] = q_locale
|
||||
args['safesearch'] = params['safesearch']
|
||||
args['count'] = 10
|
||||
args['llm'] = 'false'
|
||||
args['tgp'] = 3
|
||||
args['offset'] = (params['pageno'] - 1) * args['count']
|
||||
@@ -184,8 +188,12 @@ def parse_web_api(resp):
|
||||
|
||||
results = []
|
||||
|
||||
# load JSON result
|
||||
search_results = loads(resp.text)
|
||||
# Try to load JSON result
|
||||
try:
|
||||
search_results = loads(resp.text)
|
||||
except ValueError:
|
||||
search_results = {}
|
||||
|
||||
data = search_results.get('data', {})
|
||||
|
||||
# check for an API error
|
||||
@@ -195,6 +203,8 @@ def parse_web_api(resp):
|
||||
raise SearxEngineTooManyRequestsException()
|
||||
if search_results.get("data", {}).get("error_data", {}).get("captchaUrl") is not None:
|
||||
raise SearxEngineCaptchaException()
|
||||
if resp.status_code == 403:
|
||||
raise SearxEngineAccessDeniedException()
|
||||
msg = ",".join(data.get('message', ['unknown']))
|
||||
raise SearxEngineAPIException(f"{msg} ({error_code})")
|
||||
|
||||
|
||||
@@ -13,23 +13,12 @@ Configuration
|
||||
|
||||
You must configure the following settings:
|
||||
|
||||
``base_url``:
|
||||
Location where recoll-webui can be reached.
|
||||
- :py:obj:`base_url`
|
||||
- :py:obj:`mount_prefix`
|
||||
- :py:obj:`dl_prefix`
|
||||
- :py:obj:`search_dir`
|
||||
|
||||
``mount_prefix``:
|
||||
Location where the file hierarchy is mounted on your *local* filesystem.
|
||||
|
||||
``dl_prefix``:
|
||||
Location where the file hierarchy as indexed by recoll can be reached.
|
||||
|
||||
``search_dir``:
|
||||
Part of the indexed file hierarchy to be search, if empty the full domain is
|
||||
searched.
|
||||
|
||||
Example
|
||||
=======
|
||||
|
||||
Scenario:
|
||||
Example scenario:
|
||||
|
||||
#. Recoll indexes a local filesystem mounted in ``/export/documents/reference``,
|
||||
#. the Recoll search interface can be reached at https://recoll.example.org/ and
|
||||
@@ -37,107 +26,131 @@ Scenario:
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
base_url: https://recoll.example.org/
|
||||
base_url: https://recoll.example.org
|
||||
mount_prefix: /export/documents
|
||||
dl_prefix: https://download.example.org
|
||||
search_dir: ''
|
||||
search_dir: ""
|
||||
|
||||
Implementations
|
||||
===============
|
||||
|
||||
"""
|
||||
import typing as t
|
||||
|
||||
from datetime import date, timedelta
|
||||
from json import loads
|
||||
from urllib.parse import urlencode, quote
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from searx.result_types import EngineResults
|
||||
from searx.utils import html_to_text
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from searx.extended_types import SXNG_Response
|
||||
from searx.search.processors import OnlineParams
|
||||
|
||||
|
||||
# about
|
||||
about = {
|
||||
"website": None,
|
||||
"wikidata_id": 'Q15735774',
|
||||
"official_api_documentation": 'https://www.lesbonscomptes.com/recoll/',
|
||||
"wikidata_id": "Q15735774",
|
||||
"official_api_documentation": "https://www.lesbonscomptes.com/recoll/",
|
||||
"use_official_api": True,
|
||||
"require_api_key": False,
|
||||
"results": 'JSON',
|
||||
"results": "JSON",
|
||||
}
|
||||
|
||||
# engine dependent config
|
||||
paging = True
|
||||
time_range_support = True
|
||||
|
||||
# parameters from settings.yml
|
||||
base_url = None
|
||||
search_dir = ''
|
||||
mount_prefix = None
|
||||
dl_prefix = None
|
||||
base_url: str = ""
|
||||
"""Location where recoll-webui can be reached."""
|
||||
|
||||
# embedded
|
||||
embedded_url = '<{ttype} controls height="166px" ' + 'src="{url}" type="{mtype}"></{ttype}>'
|
||||
mount_prefix: str = ""
|
||||
"""Location where the file hierarchy is mounted on your *local* filesystem."""
|
||||
|
||||
dl_prefix: str = ""
|
||||
"""Location where the file hierarchy as indexed by recoll can be reached."""
|
||||
|
||||
search_dir: str = ""
|
||||
"""Part of the indexed file hierarchy to be search, if empty the full domain is
|
||||
searched."""
|
||||
|
||||
_s2i: dict[str | None, int] = {"day": 1, "week": 7, "month": 30, "year": 365}
|
||||
|
||||
|
||||
# helper functions
|
||||
def get_time_range(time_range):
|
||||
sw = {'day': 1, 'week': 7, 'month': 30, 'year': 365} # pylint: disable=invalid-name
|
||||
def setup(engine_settings: dict[str, t.Any]) -> bool:
|
||||
"""Initialization of the Recoll engine, checks if the mandatory values are
|
||||
configured.
|
||||
"""
|
||||
missing: list[str] = []
|
||||
for cfg_name in ["base_url", "mount_prefix", "dl_prefix"]:
|
||||
if not engine_settings.get(cfg_name):
|
||||
missing.append(cfg_name)
|
||||
if missing:
|
||||
logger.error("missing recoll configuration: %s", missing)
|
||||
return False
|
||||
|
||||
offset = sw.get(time_range, 0)
|
||||
if engine_settings["base_url"].endswith("/"):
|
||||
engine_settings["base_url"] = engine_settings["base_url"][:-1]
|
||||
return True
|
||||
|
||||
|
||||
def search_after(time_range: str | None) -> str:
|
||||
offset = _s2i.get(time_range, 0)
|
||||
if not offset:
|
||||
return ''
|
||||
|
||||
return ""
|
||||
return (date.today() - timedelta(days=offset)).isoformat()
|
||||
|
||||
|
||||
# do search-request
|
||||
def request(query, params):
|
||||
search_after = get_time_range(params['time_range'])
|
||||
search_url = base_url + 'json?{query}&highlight=0'
|
||||
params['url'] = search_url.format(
|
||||
query=urlencode({'query': query, 'page': params['pageno'], 'after': search_after, 'dir': search_dir})
|
||||
)
|
||||
|
||||
return params
|
||||
def request(query: str, params: "OnlineParams") -> None:
|
||||
args = {
|
||||
"query": query,
|
||||
"page": params["pageno"],
|
||||
"after": search_after(params["time_range"]),
|
||||
"dir": search_dir,
|
||||
"highlight": 0,
|
||||
}
|
||||
params["url"] = f"{base_url}/json?{urlencode(args)}"
|
||||
|
||||
|
||||
# get response from search-request
|
||||
def response(resp):
|
||||
results = []
|
||||
def response(resp: "SXNG_Response") -> EngineResults:
|
||||
|
||||
response_json = loads(resp.text)
|
||||
res = EngineResults()
|
||||
json_data = resp.json()
|
||||
|
||||
if not response_json:
|
||||
return []
|
||||
if not json_data:
|
||||
return res
|
||||
|
||||
for result in response_json.get('results', []):
|
||||
title = result['label']
|
||||
url = result['url'].replace('file://' + mount_prefix, dl_prefix)
|
||||
content = '{}'.format(result['snippet'])
|
||||
for result in json_data.get("results", []):
|
||||
|
||||
# append result
|
||||
item = {'url': url, 'title': title, 'content': content, 'template': 'files.html'}
|
||||
url = result.get("url", "").replace("file://" + mount_prefix, dl_prefix)
|
||||
|
||||
if result['size']:
|
||||
item['size'] = int(result['size'])
|
||||
|
||||
for parameter in ['filename', 'abstract', 'author', 'mtype', 'time']:
|
||||
if result[parameter]:
|
||||
item[parameter] = result[parameter]
|
||||
mtype = subtype = result.get("mtype", "")
|
||||
if mtype:
|
||||
mtype, subtype = (mtype.split("/", 1) + [""])[:2]
|
||||
|
||||
# facilitate preview support for known mime types
|
||||
if 'mtype' in result and '/' in result['mtype']:
|
||||
(mtype, subtype) = result['mtype'].split('/')
|
||||
item['mtype'] = mtype
|
||||
item['subtype'] = subtype
|
||||
thumbnail = embedded = ""
|
||||
if mtype in ["audio", "video"]:
|
||||
embedded = url
|
||||
if mtype in ["image"] and subtype in ["bmp", "gif", "jpeg", "png"]:
|
||||
thumbnail = url
|
||||
|
||||
if mtype in ['audio', 'video']:
|
||||
item['embedded'] = embedded_url.format(
|
||||
ttype=mtype, url=quote(url.encode('utf8'), '/:'), mtype=result['mtype']
|
||||
)
|
||||
# remove HTML from snippet
|
||||
content = html_to_text(result.get("snippet", ""))
|
||||
|
||||
if mtype in ['image'] and subtype in ['bmp', 'gif', 'jpeg', 'png']:
|
||||
item['thumbnail'] = url
|
||||
|
||||
results.append(item)
|
||||
|
||||
if 'nres' in response_json:
|
||||
results.append({'number_of_results': response_json['nres']})
|
||||
|
||||
return results
|
||||
res.add(
|
||||
res.types.File(
|
||||
title=result.get("label", ""),
|
||||
url=url,
|
||||
content=content,
|
||||
size=result.get("size", ""),
|
||||
filename=result.get("filename", ""),
|
||||
abstract=result.get("abstract", ""),
|
||||
author=result.get("author", ""),
|
||||
mtype=mtype,
|
||||
subtype=subtype,
|
||||
time=result.get("time", ""),
|
||||
embedded=embedded,
|
||||
thumbnail=thumbnail,
|
||||
)
|
||||
)
|
||||
return res
|
||||
|
||||
@@ -17,7 +17,6 @@ The engine has the following additional settings:
|
||||
shortcut: reu
|
||||
sort_order: "relevance"
|
||||
|
||||
|
||||
Implementations
|
||||
===============
|
||||
|
||||
@@ -26,6 +25,7 @@ Implementations
|
||||
from json import dumps
|
||||
from urllib.parse import quote_plus
|
||||
from datetime import datetime, timedelta
|
||||
from dateutil import parser
|
||||
|
||||
from searx.result_types import EngineResults
|
||||
|
||||
@@ -76,15 +76,62 @@ def request(query, params):
|
||||
def response(resp) -> EngineResults:
|
||||
res = EngineResults()
|
||||
|
||||
for result in resp.json().get("result", {}).get("articles", []):
|
||||
resp_json = resp.json()
|
||||
if not resp_json.get("result"):
|
||||
return res
|
||||
|
||||
for result in resp_json["result"].get("articles", []):
|
||||
res.add(
|
||||
res.types.MainResult(
|
||||
url=base_url + result["canonical_url"],
|
||||
title=result["web"],
|
||||
content=result["description"],
|
||||
thumbnail=result.get("thumbnail", {}).get("url", ""),
|
||||
thumbnail=resize_url(result.get("thumbnail", {}), height=80),
|
||||
metadata=result.get("kicker", {}).get("name"),
|
||||
publishedDate=datetime.fromisoformat(result["display_time"]),
|
||||
publishedDate=parser.isoparse(result["display_time"]),
|
||||
)
|
||||
)
|
||||
return res
|
||||
|
||||
|
||||
def resize_url(thumbnail: dict[str, str], width: int = 0, height: int = 0) -> str:
|
||||
"""Generates a URL for Reuter's thumbnail with the dimensions *width* and
|
||||
*height*. If no URL can be generated from the *thumbnail data*, an empty
|
||||
string will be returned.
|
||||
|
||||
width: default is *unset* (``0``)
|
||||
Image width in pixels (negative values are ignored). If only width is
|
||||
specified, the height matches the original aspect ratio.
|
||||
|
||||
height: default is *unset* (``0``)
|
||||
Image height in pixels (negative values are ignored). If only height is
|
||||
specified, the width matches the original aspect ratio.
|
||||
|
||||
The file size of a full-size image is usually several MB; when reduced to a
|
||||
height of, for example, 80 points, only a few KB remain!
|
||||
|
||||
Fields of the *thumbnail data* (``result.articles.[<int>].thumbnail``):
|
||||
|
||||
thumbnail.url:
|
||||
Is a full-size image (>MB).
|
||||
|
||||
thumbnail.width & .height:
|
||||
Dimensions of the full-size image.
|
||||
|
||||
thumbnail.resizer_url:
|
||||
Reuters has a *resizer* `REST-API for the images`_, this is the URL of the
|
||||
service. This URL includes the ``&auth`` argument, other arguments are
|
||||
``&width=<int>`` and ``&height=<int>``.
|
||||
|
||||
.. _REST-API for the images:
|
||||
https://dev.arcxp.com/photo-center/image-resizer/resizer-v2-how-to-transform-images/#query-parameters
|
||||
"""
|
||||
|
||||
url = thumbnail.get("resizer_url")
|
||||
if not url:
|
||||
return ""
|
||||
if int(width) > 0:
|
||||
url += f"&width={int(width)}"
|
||||
if int(height) > 0:
|
||||
url += f"&height={int(height)}"
|
||||
return url
|
||||
|
||||
@@ -66,7 +66,7 @@ def setup(engine_settings: dict[str, t.Any]) -> bool:
|
||||
def get_ui_version() -> str:
|
||||
ret_val: str = CACHE.get("X-S2-UI-Version")
|
||||
if not ret_val:
|
||||
resp = get(base_url)
|
||||
resp = get(base_url, timeout=3)
|
||||
if not resp.ok:
|
||||
raise RuntimeError("Can't determine Semantic Scholar UI version")
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ base_url = 'https://search.seznam.cz/'
|
||||
|
||||
|
||||
def request(query, params):
|
||||
response_index = get(base_url, headers=params['headers'], raise_for_httperror=True)
|
||||
response_index = get(base_url, headers=params['headers'], raise_for_httperror=True, timeout=3)
|
||||
dom = html.fromstring(response_index.text)
|
||||
|
||||
url_params = {
|
||||
|
||||
@@ -124,7 +124,7 @@ def get_client_id() -> str | None:
|
||||
|
||||
client_id = ""
|
||||
url = "https://soundcloud.com"
|
||||
resp = http_get(url, timeout=10)
|
||||
resp = http_get(url, timeout=3)
|
||||
|
||||
if not resp.ok:
|
||||
logger.error("init: GET %s failed", url)
|
||||
|
||||
90
searx/engines/sourcehut.py
Normal file
90
searx/engines/sourcehut.py
Normal file
@@ -0,0 +1,90 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Engine to search in the collaborative software platform SourceHut_.
|
||||
|
||||
.. _SourceHut: https://sourcehut.org/
|
||||
|
||||
Configuration
|
||||
=============
|
||||
|
||||
You can configure the following setting:
|
||||
|
||||
- :py:obj:`sourcehut_sort_order`
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
- name: sourcehut
|
||||
shortcut: srht
|
||||
engine: sourcehut
|
||||
# sourcehut_sort_order: longest-active
|
||||
|
||||
Implementations
|
||||
===============
|
||||
|
||||
"""
|
||||
|
||||
import typing as t
|
||||
|
||||
from urllib.parse import urlencode
|
||||
from lxml import html
|
||||
|
||||
from searx.utils import eval_xpath, eval_xpath_list, extract_text, searxng_useragent
|
||||
from searx.result_types import EngineResults
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from searx.extended_types import SXNG_Response
|
||||
from searx.search.processors import OnlineParams
|
||||
|
||||
|
||||
about = {
|
||||
"website": "https://sourcehut.org",
|
||||
"wikidata_id": "Q78514485",
|
||||
"official_api_documentation": "https://man.sr.ht/",
|
||||
"use_official_api": False,
|
||||
"require_api_key": False,
|
||||
"results": "HTML",
|
||||
}
|
||||
|
||||
categories = ["it", "repos"]
|
||||
paging = True
|
||||
|
||||
base_url: str = "https://sr.ht/projects"
|
||||
"""Browse public projects."""
|
||||
|
||||
|
||||
sourcehut_sort_order: str = "recently-updated"
|
||||
"""The sort order of the results. Possible values:
|
||||
|
||||
- ``recently-updated``
|
||||
- ``longest-active``
|
||||
"""
|
||||
|
||||
|
||||
def request(query: str, params: "OnlineParams") -> None:
|
||||
|
||||
args = {"search": query, "page": params["pageno"], "sort": sourcehut_sort_order}
|
||||
params["url"] = f"{base_url}?{urlencode(args)}"
|
||||
|
||||
# standard user agents are blocked by 'go-away', a foss bot detection tool
|
||||
params["headers"]["User-Agent"] = searxng_useragent()
|
||||
|
||||
|
||||
def response(resp: "SXNG_Response") -> EngineResults:
|
||||
|
||||
res = EngineResults()
|
||||
doc = html.fromstring(resp.text)
|
||||
|
||||
for item in eval_xpath_list(doc, "(//div[@class='event-list'])[1]/div[contains(@class, 'event')]"):
|
||||
res.add(
|
||||
res.types.LegacyResult(
|
||||
template="packages.html",
|
||||
url=base_url + (extract_text(eval_xpath(item, "./h4/a[2]/@href")) or ""),
|
||||
title=extract_text(eval_xpath(item, "./h4")),
|
||||
package_name=extract_text(eval_xpath(item, "./h4/a[2]")),
|
||||
content=extract_text(eval_xpath(item, "./p")),
|
||||
maintainer=(extract_text(eval_xpath(item, "./h4/a[1]")) or "").removeprefix("~"),
|
||||
tags=[
|
||||
tag.removeprefix("#") for tag in eval_xpath_list(item, "./div[contains(@class, 'tags')]/a/text()")
|
||||
],
|
||||
)
|
||||
)
|
||||
return res
|
||||
@@ -404,6 +404,10 @@ def _get_image_result(result) -> dict[str, t.Any] | None:
|
||||
def response(resp):
|
||||
categ = startpage_categ.capitalize()
|
||||
results_raw = '{' + extr(resp.text, f"React.createElement(UIStartpage.AppSerp{categ}, {{", '}})') + '}}'
|
||||
|
||||
if resp.headers.get('Location', '').startswith("https://www.startpage.com/sp/captcha"):
|
||||
raise SearxEngineCaptchaException()
|
||||
|
||||
results_json = loads(results_raw)
|
||||
results_obj = results_json.get('render', {}).get('presenter', {}).get('regions', {})
|
||||
|
||||
|
||||
@@ -73,7 +73,6 @@ def request(query, params):
|
||||
params['headers'].update(
|
||||
{
|
||||
'Connection': 'keep-alive',
|
||||
'Accept-Encoding': 'gzip, defalte, br',
|
||||
'Host': 'tineye.com',
|
||||
'DNT': '1',
|
||||
'TE': 'trailers',
|
||||
|
||||
@@ -1,102 +1,208 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Wikimedia Commons (images)"""
|
||||
"""`Wikimedia Commons`_ is a collection of more than 120 millions freely usable
|
||||
media files to which anyone can contribute.
|
||||
|
||||
This engine uses the `MediaWiki query API`_, with which engines can be configured
|
||||
for searching images, videos, audio, and other files in the Wikimedia.
|
||||
|
||||
.. _MediaWiki query API: https://commons.wikimedia.org/w/api.php?action=help&modules=query
|
||||
.. _Wikimedia Commons: https://commons.wikimedia.org/
|
||||
|
||||
|
||||
Configuration
|
||||
=============
|
||||
|
||||
The engine has the following additional settings:
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
- name: wikicommons.images
|
||||
engine: wikicommons
|
||||
wc_search_type: image
|
||||
|
||||
- name: wikicommons.videos
|
||||
engine: wikicommons
|
||||
wc_search_type: video
|
||||
|
||||
- name: wikicommons.audio
|
||||
engine: wikicommons
|
||||
wc_search_type: audio
|
||||
|
||||
- name: wikicommons.files
|
||||
engine: wikicommons
|
||||
wc_search_type: file
|
||||
|
||||
|
||||
Implementations
|
||||
===============
|
||||
|
||||
"""
|
||||
|
||||
import typing as t
|
||||
|
||||
import datetime
|
||||
|
||||
from urllib.parse import urlencode
|
||||
import pathlib
|
||||
from urllib.parse import urlencode, unquote
|
||||
|
||||
from searx.utils import html_to_text, humanize_bytes
|
||||
from searx.result_types import EngineResults
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from searx.extended_types import SXNG_Response
|
||||
from searx.search.processors import OnlineParams
|
||||
|
||||
# about
|
||||
about = {
|
||||
"website": 'https://commons.wikimedia.org/',
|
||||
"wikidata_id": 'Q565',
|
||||
"official_api_documentation": 'https://commons.wikimedia.org/w/api.php',
|
||||
"website": "https://commons.wikimedia.org/",
|
||||
"wikidata_id": "Q565",
|
||||
"official_api_documentation": "https://commons.wikimedia.org/w/api.php",
|
||||
"use_official_api": True,
|
||||
"require_api_key": False,
|
||||
"results": 'JSON',
|
||||
"results": "JSON",
|
||||
}
|
||||
categories = ['images']
|
||||
search_type = 'images'
|
||||
|
||||
base_url = "https://commons.wikimedia.org"
|
||||
search_prefix = (
|
||||
'?action=query'
|
||||
'&format=json'
|
||||
'&generator=search'
|
||||
'&gsrnamespace=6'
|
||||
'&gsrprop=snippet'
|
||||
'&prop=info|imageinfo'
|
||||
'&iiprop=url|size|mime'
|
||||
'&iiurlheight=180' # needed for the thumb url
|
||||
)
|
||||
categories: list[str] = []
|
||||
paging = True
|
||||
number_of_results = 10
|
||||
|
||||
search_types = {
|
||||
'images': 'bitmap|drawing',
|
||||
'videos': 'video',
|
||||
'audio': 'audio',
|
||||
'files': 'multimedia|office|archive|3d',
|
||||
wc_api_url = "https://commons.wikimedia.org/w/api.php"
|
||||
wc_search_type: str = ""
|
||||
|
||||
SEARCH_TYPES: dict[str, str] = {
|
||||
"image": "bitmap|drawing",
|
||||
"video": "video",
|
||||
"audio": "audio",
|
||||
"file": "multimedia|office|archive|3d",
|
||||
}
|
||||
# FileType = t.Literal["bitmap", "drawing", "video", "audio", "multimedia", "office", "archive", "3d"]
|
||||
# FILE_TYPES = list(t.get_args(FileType))
|
||||
|
||||
|
||||
def request(query, params):
|
||||
language = 'en'
|
||||
if params['language'] != 'all':
|
||||
language = params['language'].split('-')[0]
|
||||
def setup(engine_settings: dict[str, t.Any]) -> bool:
|
||||
"""Initialization of the Wikimedia engine, checks if the value configured in
|
||||
:py:obj:`wc_search_type` is valid."""
|
||||
|
||||
if search_type not in search_types:
|
||||
raise ValueError(f"Unsupported search type: {search_type}")
|
||||
if engine_settings.get("wc_search_type") not in SEARCH_TYPES:
|
||||
logger.error(
|
||||
"wc_search_type: %s isn't a valid file type (%s)",
|
||||
engine_settings.get("wc_search_type"),
|
||||
",".join(SEARCH_TYPES.keys()),
|
||||
)
|
||||
return False
|
||||
return True
|
||||
|
||||
filetype = search_types[search_type]
|
||||
|
||||
def request(query: str, params: "OnlineParams") -> None:
|
||||
uselang: str = "en"
|
||||
if params["searxng_locale"] != "all":
|
||||
uselang = params["searxng_locale"].split("-")[0]
|
||||
filetype = SEARCH_TYPES[wc_search_type]
|
||||
args = {
|
||||
'uselang': language,
|
||||
'gsrlimit': number_of_results,
|
||||
'gsroffset': number_of_results * (params["pageno"] - 1),
|
||||
'gsrsearch': f"filetype:{filetype} {query}",
|
||||
# https://commons.wikimedia.org/w/api.php
|
||||
"format": "json",
|
||||
"uselang": uselang,
|
||||
"action": "query",
|
||||
# https://commons.wikimedia.org/w/api.php?action=help&modules=query
|
||||
"prop": "info|imageinfo",
|
||||
# generator (gsr optins) https://commons.wikimedia.org/w/api.php?action=help&modules=query%2Bsearch
|
||||
"generator": "search",
|
||||
"gsrnamespace": "6", # https://www.mediawiki.org/wiki/Help:Namespaces#Renaming_namespaces
|
||||
"gsrprop": "snippet",
|
||||
"gsrlimit": number_of_results,
|
||||
"gsroffset": number_of_results * (params["pageno"] - 1),
|
||||
"gsrsearch": f"filetype:{filetype} {query}",
|
||||
# imageinfo: https://commons.wikimedia.org/w/api.php?action=help&modules=query%2Bimageinfo
|
||||
"iiprop": "url|size|mime",
|
||||
"iiurlheight": "180", # needed for the thumb url
|
||||
}
|
||||
|
||||
params["url"] = f"{base_url}/w/api.php{search_prefix}&{urlencode(args, safe=':|')}"
|
||||
return params
|
||||
params["url"] = f"{wc_api_url}?{urlencode(args, safe=':|')}"
|
||||
|
||||
|
||||
def response(resp):
|
||||
results = []
|
||||
json = resp.json()
|
||||
def response(resp: "SXNG_Response") -> EngineResults:
|
||||
|
||||
if not json.get("query", {}).get("pages"):
|
||||
return results
|
||||
for item in json["query"]["pages"].values():
|
||||
res = EngineResults()
|
||||
json_data = resp.json()
|
||||
pages = json_data.get("query", {}).get("pages", {}).values()
|
||||
|
||||
for item in pages:
|
||||
|
||||
if not item.get("imageinfo", []):
|
||||
continue
|
||||
imageinfo = item["imageinfo"][0]
|
||||
title = item["title"].replace("File:", "").rsplit('.', 1)[0]
|
||||
result = {
|
||||
'url': imageinfo["descriptionurl"],
|
||||
'title': title,
|
||||
'content': html_to_text(item["snippet"]),
|
||||
}
|
||||
|
||||
if search_type == "images":
|
||||
result['template'] = 'images.html'
|
||||
result['img_src'] = imageinfo["url"]
|
||||
result['thumbnail_src'] = imageinfo["thumburl"]
|
||||
result['resolution'] = f'{imageinfo["width"]} x {imageinfo["height"]}'
|
||||
else:
|
||||
result['thumbnail'] = imageinfo["thumburl"]
|
||||
title: str = item["title"].replace("File:", "").rsplit(".", 1)[0]
|
||||
content = html_to_text(item["snippet"])
|
||||
|
||||
if search_type == "videos":
|
||||
result['template'] = 'videos.html'
|
||||
if imageinfo.get('duration'):
|
||||
result['length'] = datetime.timedelta(seconds=int(imageinfo['duration']))
|
||||
result['iframe_src'] = imageinfo['url']
|
||||
elif search_type == "files":
|
||||
result['template'] = 'files.html'
|
||||
result['metadata'] = imageinfo['mime']
|
||||
result['size'] = humanize_bytes(imageinfo['size'])
|
||||
elif search_type == "audio":
|
||||
result['iframe_src'] = imageinfo['url']
|
||||
url: str = imageinfo["descriptionurl"]
|
||||
media_url: str = imageinfo["url"]
|
||||
mimetype: str = imageinfo["mime"]
|
||||
thumbnail: str = imageinfo["thumburl"]
|
||||
size = imageinfo.get("size")
|
||||
if size:
|
||||
size = humanize_bytes(size)
|
||||
|
||||
results.append(result)
|
||||
duration = None
|
||||
seconds: str = imageinfo.get("duration")
|
||||
if seconds:
|
||||
try:
|
||||
duration = datetime.timedelta(seconds=int(seconds))
|
||||
except OverflowError:
|
||||
pass
|
||||
|
||||
return results
|
||||
if wc_search_type == "file":
|
||||
res.add(
|
||||
res.types.File(
|
||||
title=title,
|
||||
url=url,
|
||||
content=content,
|
||||
size=size,
|
||||
mimetype=mimetype,
|
||||
filename=unquote(pathlib.Path(media_url).name),
|
||||
embedded=media_url,
|
||||
thumbnail=thumbnail,
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
if wc_search_type == "image":
|
||||
res.add(
|
||||
res.types.LegacyResult(
|
||||
template="images.html",
|
||||
title=title,
|
||||
url=url,
|
||||
content=content,
|
||||
img_src=imageinfo["url"],
|
||||
thumbnail_src=thumbnail,
|
||||
resolution=f"{imageinfo['width']} x {imageinfo['height']}",
|
||||
img_format=imageinfo["mime"],
|
||||
filesize=size,
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
if wc_search_type == "video":
|
||||
res.add(
|
||||
res.types.LegacyResult(
|
||||
template="videos.html",
|
||||
title=title,
|
||||
url=url,
|
||||
content=content,
|
||||
iframe_src=media_url,
|
||||
length=duration,
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
if wc_search_type == "audio":
|
||||
res.add(
|
||||
res.types.MainResult(
|
||||
template="default.html",
|
||||
title=title,
|
||||
url=url,
|
||||
content=content,
|
||||
audio_src=media_url,
|
||||
length=duration,
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
return res
|
||||
|
||||
@@ -96,7 +96,7 @@ search_type = 'text'
|
||||
``video`` are not yet implemented (Pull-Requests are welcome).
|
||||
"""
|
||||
|
||||
base_url: list[str] | str | None = None
|
||||
base_url: list[str] | str = []
|
||||
"""The value is an URL or a list of URLs. In the latter case instance will be
|
||||
selected randomly.
|
||||
"""
|
||||
|
||||
@@ -28,6 +28,20 @@ search_type = ""
|
||||
base_url_web = 'https://yandex.com/search/site/'
|
||||
base_url_images = 'https://yandex.com/images/search'
|
||||
|
||||
# Supported languages
|
||||
yandex_supported_langs = [
|
||||
"ru", # Russian
|
||||
"en", # English
|
||||
"be", # Belarusian
|
||||
"fr", # French
|
||||
"de", # German
|
||||
"id", # Indonesian
|
||||
"kk", # Kazakh
|
||||
"tt", # Tatar
|
||||
"tr", # Turkish
|
||||
"uk", # Ukrainian
|
||||
]
|
||||
|
||||
results_xpath = '//li[contains(@class, "serp-item")]'
|
||||
url_xpath = './/a[@class="b-serp-item__title-link"]/@href'
|
||||
title_xpath = './/h3[@class="b-serp-item__title"]/a[@class="b-serp-item__title-link"]/span'
|
||||
@@ -35,7 +49,7 @@ content_xpath = './/div[@class="b-serp-item__content"]//div[@class="b-serp-item_
|
||||
|
||||
|
||||
def catch_bad_response(resp):
|
||||
if resp.url.path.startswith('/showcaptcha'):
|
||||
if resp.headers.get('x-yandex-captcha') == 'captcha':
|
||||
raise SearxEngineCaptchaException()
|
||||
|
||||
|
||||
@@ -48,6 +62,10 @@ def request(query, params):
|
||||
"searchid": "3131712",
|
||||
}
|
||||
|
||||
lang = params["language"].split("-")[0]
|
||||
if lang in yandex_supported_langs:
|
||||
query_params_web["lang"] = lang
|
||||
|
||||
query_params_images = {
|
||||
"text": query,
|
||||
"uinfo": "sw-1920-sh-1080-ww-1125-wh-999",
|
||||
|
||||
@@ -30,7 +30,7 @@ import httpx
|
||||
if typing.TYPE_CHECKING:
|
||||
import searx.preferences
|
||||
import searx.results
|
||||
from searx.search.processors import ParamTypes
|
||||
from searx.search.processors import OnlineParamTypes
|
||||
|
||||
|
||||
class SXNG_Request(flask.Request):
|
||||
@@ -83,4 +83,4 @@ class SXNG_Response(httpx.Response):
|
||||
"""
|
||||
|
||||
ok: bool
|
||||
search_params: "ParamTypes"
|
||||
search_params: "OnlineParamTypes"
|
||||
|
||||
@@ -17,10 +17,6 @@
|
||||
|
||||
"""
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
import os
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# pylint: disable=missing-module-docstring
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
import pathlib
|
||||
import msgspec
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Implementations for a favicon proxy"""
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Callable
|
||||
|
||||
|
||||
@@ -23,6 +23,7 @@ __all__ = [
|
||||
"WeatherAnswer",
|
||||
"Code",
|
||||
"Paper",
|
||||
"File",
|
||||
]
|
||||
|
||||
import typing as t
|
||||
@@ -33,6 +34,7 @@ from .answer import AnswerSet, Answer, Translations, WeatherAnswer
|
||||
from .keyvalue import KeyValue
|
||||
from .code import Code
|
||||
from .paper import Paper
|
||||
from .file import File
|
||||
|
||||
|
||||
class ResultList(list[Result | LegacyResult], abc.ABC):
|
||||
@@ -47,6 +49,7 @@ class ResultList(list[Result | LegacyResult], abc.ABC):
|
||||
KeyValue = KeyValue
|
||||
Code = Code
|
||||
Paper = Paper
|
||||
File = File
|
||||
MainResult = MainResult
|
||||
Result = Result
|
||||
Translations = Translations
|
||||
|
||||
@@ -16,10 +16,6 @@
|
||||
:members:
|
||||
"""
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
__all__ = ["Result"]
|
||||
|
||||
import typing as t
|
||||
@@ -27,7 +23,6 @@ import typing as t
|
||||
import re
|
||||
import urllib.parse
|
||||
import warnings
|
||||
import time
|
||||
import datetime
|
||||
|
||||
from collections.abc import Callable
|
||||
@@ -236,13 +231,6 @@ class Result(msgspec.Struct, kw_only=True):
|
||||
url: str | None = None
|
||||
"""A link related to this *result*"""
|
||||
|
||||
template: str = "default.html"
|
||||
"""Name of the template used to render the result.
|
||||
|
||||
By default :origin:`result_templates/default.html
|
||||
<searx/templates/simple/result_templates/default.html>` is used.
|
||||
"""
|
||||
|
||||
engine: str | None = ""
|
||||
"""Name of the engine *this* result comes from. In case of *plugins* a
|
||||
prefix ``plugin:`` is set, in case of *answerer* prefix ``answerer:`` is
|
||||
@@ -350,6 +338,13 @@ class Result(msgspec.Struct, kw_only=True):
|
||||
class MainResult(Result): # pylint: disable=missing-class-docstring
|
||||
"""Base class of all result types displayed in :ref:`area main results`."""
|
||||
|
||||
template: str = "default.html"
|
||||
"""Name of the template used to render the result.
|
||||
|
||||
By default :origin:`result_templates/default.html
|
||||
<searx/templates/simple/result_templates/default.html>` is used.
|
||||
"""
|
||||
|
||||
title: str = ""
|
||||
"""Link title of the result item."""
|
||||
|
||||
@@ -359,6 +354,12 @@ class MainResult(Result): # pylint: disable=missing-class-docstring
|
||||
img_src: str = ""
|
||||
"""URL of a image that is displayed in the result item."""
|
||||
|
||||
iframe_src: str = ""
|
||||
"""URL of an embedded ``<iframe>`` / the frame is collapsible."""
|
||||
|
||||
audio_src: str = ""
|
||||
"""URL of an embedded ``<audio controls>``."""
|
||||
|
||||
thumbnail: str = ""
|
||||
"""URL of a thumbnail that is displayed in the result item."""
|
||||
|
||||
@@ -372,7 +373,7 @@ class MainResult(Result): # pylint: disable=missing-class-docstring
|
||||
completely eliminated.
|
||||
"""
|
||||
|
||||
length: time.struct_time | None = None
|
||||
length: datetime.timedelta | None = None
|
||||
"""Playing duration in seconds."""
|
||||
|
||||
views: str = ""
|
||||
|
||||
@@ -28,9 +28,6 @@ template.
|
||||
"""
|
||||
# pylint: disable=too-few-public-methods
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
__all__ = ["AnswerSet", "Answer", "Translations", "WeatherAnswer"]
|
||||
|
||||
|
||||
@@ -14,10 +14,6 @@ template. For highlighting the code passages, Pygments_ is used.
|
||||
"""
|
||||
# pylint: disable=too-few-public-methods, disable=invalid-name
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
__all__ = ["Code"]
|
||||
|
||||
import typing as t
|
||||
|
||||
94
searx/result_types/file.py
Normal file
94
searx/result_types/file.py
Normal file
@@ -0,0 +1,94 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""
|
||||
Typification of the *file* results. Results of this type are rendered in
|
||||
the :origin:`file.html <searx/templates/simple/result_templates/file.html>`
|
||||
template.
|
||||
|
||||
----
|
||||
|
||||
.. autoclass:: File
|
||||
:members:
|
||||
:show-inheritance:
|
||||
|
||||
"""
|
||||
# pylint: disable=too-few-public-methods
|
||||
|
||||
|
||||
__all__ = ["File"]
|
||||
|
||||
import typing as t
|
||||
import mimetypes
|
||||
|
||||
from ._base import MainResult
|
||||
|
||||
|
||||
@t.final
|
||||
class File(MainResult, kw_only=True):
|
||||
"""Class for results of type *file*"""
|
||||
|
||||
template: str = "file.html"
|
||||
|
||||
filename: str = ""
|
||||
"""Name of the file."""
|
||||
|
||||
size: str = ""
|
||||
"""Size of bytes in human readable notation (``MB`` for 1024 * 1024 Bytes
|
||||
file size.)"""
|
||||
|
||||
time: str = ""
|
||||
"""Indication of a time, such as the date of the last modification or the
|
||||
date of creation. This is a simple string, the *date* of which can be freely
|
||||
chosen according to the context."""
|
||||
|
||||
mimetype: str = ""
|
||||
"""Mimetype/Subtype of the file. For ``audio`` and ``video``, a URL can be
|
||||
passed in the :py:obj:`File.embedded` field to embed the referenced media in
|
||||
the result. If no value is specified, the MIME type is determined from
|
||||
``self.filename`` or, alternatively, from ``self.embedded`` (if either of
|
||||
the two values is set)."""
|
||||
|
||||
abstract: str = ""
|
||||
"""Abstract of the file."""
|
||||
|
||||
author: str = ""
|
||||
"""Author of the file."""
|
||||
|
||||
embedded: str = ""
|
||||
"""URL of an embedded media type (audio or video) / is collapsible."""
|
||||
|
||||
mtype: str = ""
|
||||
"""Used for displaying :py:obj:`File.embedded`. Its value is automatically
|
||||
populated from the base type of :py:obj:`File.mimetype`, and can be
|
||||
explicitly set to enforce e.g. ``audio`` or ``video`` when mimetype is
|
||||
something like "application/ogg" but its know the content is for example a
|
||||
video."""
|
||||
|
||||
subtype: str = ""
|
||||
"""Used for displaying :py:obj:`File.embedded`. Its value is automatically
|
||||
populated from the subtype type of :py:obj:`File.mimetype`, and can be
|
||||
explicitly set to enforce a subtype for the :py:obj:`File.embedded`
|
||||
element."""
|
||||
|
||||
def __post_init__(self):
|
||||
super().__post_init__()
|
||||
|
||||
if not self.mtype or not self.subtype:
|
||||
|
||||
fn = self.filename or self.embedded
|
||||
if not self.mimetype and fn:
|
||||
self.mimetype = mimetypes.guess_type(fn, strict=False)[0] or ""
|
||||
|
||||
mtype, subtype = (self.mimetype.split("/", 1) + [""])[:2]
|
||||
|
||||
if not self.mtype:
|
||||
# I don't know why, but the ogg video stream is not displayed,
|
||||
# may https://github.com/videojs/video.js can help?
|
||||
if self.embedded.endswith(".ogv"):
|
||||
self.mtype = "video"
|
||||
elif self.embedded.endswith(".oga"):
|
||||
self.mtype = "audio"
|
||||
else:
|
||||
self.mtype = mtype
|
||||
|
||||
if not self.subtype:
|
||||
self.subtype = subtype
|
||||
@@ -13,9 +13,6 @@ template.
|
||||
"""
|
||||
# pylint: disable=too-few-public-methods
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
__all__ = ["KeyValue"]
|
||||
|
||||
|
||||
@@ -21,10 +21,6 @@ Related topics:
|
||||
"""
|
||||
# pylint: disable=too-few-public-methods, disable=invalid-name
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
__all__ = ["Paper"]
|
||||
|
||||
import typing as t
|
||||
|
||||
@@ -22,7 +22,7 @@ from searx.network import initialize as initialize_network, check_network_config
|
||||
from searx.results import ResultContainer
|
||||
from searx.search.checker import initialize as initialize_checker
|
||||
from searx.search.processors import PROCESSORS
|
||||
|
||||
from searx.search.processors.abstract import RequestParams
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from .models import SearchQuery
|
||||
@@ -79,16 +79,20 @@ class Search:
|
||||
return bool(results)
|
||||
|
||||
# do search-request
|
||||
def _get_requests(self) -> tuple[list[tuple[str, str, dict[str, t.Any]]], int]:
|
||||
def _get_requests(self) -> tuple[list[tuple[str, str, RequestParams]], float]:
|
||||
# init vars
|
||||
requests: list[tuple[str, str, dict[str, t.Any]]] = []
|
||||
requests: list[tuple[str, str, RequestParams]] = []
|
||||
|
||||
# max of all selected engine timeout
|
||||
default_timeout = 0
|
||||
|
||||
# start search-request for all selected engines
|
||||
for engineref in self.search_query.engineref_list:
|
||||
processor = PROCESSORS[engineref.name]
|
||||
processor = PROCESSORS.get(engineref.name)
|
||||
if not processor:
|
||||
# engine does not exists; not yet or the 'init' method of the
|
||||
# engine has been failed and the engine has not been registered.
|
||||
continue
|
||||
|
||||
# stop the request now if the engine is suspend
|
||||
if processor.extend_container_if_suspended(self.result_container):
|
||||
@@ -133,7 +137,7 @@ class Search:
|
||||
|
||||
return requests, actual_timeout
|
||||
|
||||
def search_multiple_requests(self, requests: list[tuple[str, str, dict[str, t.Any]]]):
|
||||
def search_multiple_requests(self, requests: list[tuple[str, str, RequestParams]]):
|
||||
# pylint: disable=protected-access
|
||||
search_id = str(uuid4())
|
||||
|
||||
|
||||
@@ -82,7 +82,6 @@ def _download_and_check_if_image(image_url: str) -> bool:
|
||||
'User-Agent': gen_useragent(),
|
||||
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
|
||||
'Accept-Language': 'en-US;q=0.5,en;q=0.3',
|
||||
'Accept-Encoding': 'gzip, deflate, br',
|
||||
'DNT': '1',
|
||||
'Connection': 'keep-alive',
|
||||
'Upgrade-Insecure-Requests': '1',
|
||||
|
||||
@@ -51,7 +51,6 @@ class ProcessorMap(dict[str, EngineProcessor]):
|
||||
eng_name: str = eng_settings["name"]
|
||||
|
||||
if eng_settings.get("inactive", False) is True:
|
||||
logger.info("Engine of name '%s' is inactive.", eng_name)
|
||||
continue
|
||||
|
||||
eng_obj = engines.engines.get(eng_name)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user