mirror of
https://github.com/searxng/searxng.git
synced 2025-12-22 19:50:00 +00:00
Compare commits
151 Commits
dependabot
...
8bf600cc62
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8bf600cc62 | ||
|
|
aa607a379a | ||
|
|
6ebd3f4d35 | ||
|
|
9072c77aea | ||
|
|
c32b8100c3 | ||
|
|
f93257941e | ||
|
|
896863802e | ||
|
|
920b40253c | ||
|
|
07440e3332 | ||
|
|
1827dfc071 | ||
|
|
c46aecd4e3 | ||
|
|
21bf8a6973 | ||
|
|
f5475ba782 | ||
|
|
265f15498c | ||
|
|
666409ec7e | ||
|
|
b719d559b6 | ||
|
|
9d3ec9a2a2 | ||
|
|
74ec225ad1 | ||
|
|
b5a1a092f1 | ||
|
|
ddc6d68114 | ||
|
|
32eb84d6d3 | ||
|
|
da6c635ea2 | ||
|
|
e34c356e64 | ||
|
|
7017393647 | ||
|
|
aa49f5b933 | ||
|
|
3f91ac47e6 | ||
|
|
8c631b92ce | ||
|
|
0ebac144f5 | ||
|
|
5e0e1c6b31 | ||
|
|
3c7545c6ce | ||
|
|
aba839195b | ||
|
|
1f6ea41272 | ||
|
|
5450d22796 | ||
|
|
1174fde1f3 | ||
|
|
fb089ae297 | ||
|
|
ab8224c939 | ||
|
|
c954e71f87 | ||
|
|
cbc04a839a | ||
|
|
cb4a5abc8c | ||
|
|
07ff6e3ccc | ||
|
|
cdaab944b4 | ||
|
|
6ecf32fd4a | ||
|
|
20de10df4e | ||
|
|
673c29efeb | ||
|
|
c4abf40e6e | ||
|
|
39b9922609 | ||
|
|
7018e6583b | ||
|
|
b957e587da | ||
|
|
ebb9ea4571 | ||
|
|
54a97e1043 | ||
|
|
0ee78c19dd | ||
|
|
bcc7a5eb2e | ||
|
|
2313b972a3 | ||
|
|
989b49335c | ||
|
|
3f30831640 | ||
|
|
5fcee9bc30 | ||
|
|
2f0e52d6eb | ||
|
|
c0d69cec4e | ||
|
|
c852b9a90a | ||
|
|
b876d0bed0 | ||
|
|
e245cade25 | ||
|
|
7c223b32a7 | ||
|
|
33a176813d | ||
|
|
20ec01c5f7 | ||
|
|
6376601ba1 | ||
|
|
ca441f419c | ||
|
|
04e66a2bb4 | ||
|
|
b299386d3e | ||
|
|
21a4622f23 | ||
|
|
041f457dfa | ||
|
|
af111e413c | ||
|
|
431bf5d235 | ||
|
|
576c8ca99c | ||
|
|
45a4b8ad1c | ||
|
|
d14d695966 | ||
|
|
a2a47337cb | ||
|
|
ba98030438 | ||
|
|
1e200a1107 | ||
|
|
7a1b959646 | ||
|
|
b9b46431be | ||
|
|
3f18c0f40f | ||
|
|
1cfbd32a1d | ||
|
|
a15b594003 | ||
|
|
24d27a7a21 | ||
|
|
7af922c9df | ||
|
|
b1918dd121 | ||
|
|
1be19f8b58 | ||
|
|
3763b4bff4 | ||
|
|
52ffc4c7f4 | ||
|
|
0245327fc5 | ||
|
|
b155e66fe5 | ||
|
|
5712827703 | ||
|
|
7ba53d302d | ||
|
|
b8e4ebdc0c | ||
|
|
b37d09557a | ||
|
|
aa28af772c | ||
|
|
9c2b8f2f93 | ||
|
|
c48993452f | ||
|
|
6a2196c03d | ||
|
|
dce383881d | ||
|
|
1ebedcbc17 | ||
|
|
5d99877d8d | ||
|
|
adc1a2a1ea | ||
|
|
43065c5026 | ||
|
|
ea4a55fa57 | ||
|
|
d514dea5cc | ||
|
|
22e1d30017 | ||
|
|
4ca75a0450 | ||
|
|
50a4c653dc | ||
|
|
b7f9b489c9 | ||
|
|
2cdbbb249a | ||
|
|
edfa71cdea | ||
|
|
8dacbbbb15 | ||
|
|
b770a46e1f | ||
|
|
2c880f6084 | ||
|
|
c41b769f97 | ||
|
|
e363db970c | ||
|
|
16293132e3 | ||
|
|
f70120b0b9 | ||
|
|
a8f3644cdc | ||
|
|
4295e758c0 | ||
|
|
33e798b01b | ||
|
|
d84ae96cf9 | ||
|
|
9371658531 | ||
|
|
ee6d4f322f | ||
|
|
3725aef6f3 | ||
|
|
e840e3f960 | ||
|
|
a6bb1ecf87 | ||
|
|
636738779e | ||
|
|
1d138c5968 | ||
|
|
3e7e404fda | ||
|
|
602a73df9a | ||
|
|
57622793bf | ||
|
|
080f3a5f87 | ||
|
|
f54cf643b2 | ||
|
|
dd82d785ce | ||
|
|
f6cdd16449 | ||
|
|
576d30ffcd | ||
|
|
c34bb61284 | ||
|
|
8baefcc21e | ||
|
|
fc7d8b8be2 | ||
|
|
5492de15bb | ||
|
|
ced08e12aa | ||
|
|
613c1aa8eb | ||
|
|
899cf7e08a | ||
|
|
362cc13aeb | ||
|
|
d28a1c434f | ||
|
|
21d0428cf2 | ||
|
|
f0dfe3cc0e | ||
|
|
0559b9bfcf | ||
|
|
37f7960266 |
8
.github/workflows/checker.yml
vendored
8
.github/workflows/checker.yml
vendored
@@ -15,7 +15,7 @@ permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: "3.13"
|
||||
PYTHON_VERSION: "3.14"
|
||||
|
||||
jobs:
|
||||
search:
|
||||
@@ -24,17 +24,17 @@ jobs:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
- name: Setup cache Python
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
key: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-"
|
||||
|
||||
139
.github/workflows/container.yml
vendored
139
.github/workflows/container.yml
vendored
@@ -18,106 +18,34 @@ concurrency:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
# Organization GHCR
|
||||
packages: read
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: "3.13"
|
||||
PYTHON_VERSION: "3.14"
|
||||
|
||||
jobs:
|
||||
build-base:
|
||||
if: |
|
||||
(github.repository_owner == 'searxng' && github.event.workflow_run.conclusion == 'success')
|
||||
|| github.event_name == 'workflow_dispatch'
|
||||
name: Build base
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
# Organization GHCR
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- if: github.repository_owner == 'searxng'
|
||||
name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
- if: github.repository_owner == 'searxng'
|
||||
name: Get date
|
||||
id: date
|
||||
run: echo "date=$(date +'%Y%m%d')" >>$GITHUB_OUTPUT
|
||||
|
||||
- if: github.repository_owner == 'searxng'
|
||||
name: Check cache apko
|
||||
id: cache-apko
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
# yamllint disable-line rule:line-length
|
||||
key: "apko-${{ steps.date.outputs.date }}-${{ hashFiles('./container/base.yml', './container/base-builder.yml') }}"
|
||||
path: "/tmp/.apko/"
|
||||
lookup-only: true
|
||||
|
||||
- if: github.repository_owner == 'searxng' && steps.cache-apko.outputs.cache-hit != 'true'
|
||||
name: Setup cache apko
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
# yamllint disable-line rule:line-length
|
||||
key: "apko-${{ steps.date.outputs.date }}-${{ hashFiles('./container/base.yml', './container/base-builder.yml') }}"
|
||||
restore-keys: "apko-${{ steps.date.outputs.date }}-"
|
||||
path: "/tmp/.apko/"
|
||||
|
||||
- if: github.repository_owner == 'searxng' && steps.cache-apko.outputs.cache-hit != 'true'
|
||||
name: Setup apko
|
||||
run: |
|
||||
eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"
|
||||
brew install apko
|
||||
|
||||
- if: github.repository_owner == 'searxng' && steps.cache-apko.outputs.cache-hit != 'true'
|
||||
name: Login to GHCR
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: "ghcr.io"
|
||||
username: "${{ github.repository_owner }}"
|
||||
password: "${{ secrets.GITHUB_TOKEN }}"
|
||||
|
||||
- if: github.repository_owner == 'searxng' && steps.cache-apko.outputs.cache-hit != 'true'
|
||||
name: Build
|
||||
run: |
|
||||
eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"
|
||||
|
||||
apko publish ./container/base.yml ghcr.io/${{ github.repository_owner }}/base:searxng \
|
||||
--cache-dir=/tmp/.apko/ \
|
||||
--sbom=false \
|
||||
--vcs=false \
|
||||
--log-level=debug
|
||||
|
||||
apko publish ./container/base-builder.yml ghcr.io/${{ github.repository_owner }}/base:searxng-builder \
|
||||
--cache-dir=/tmp/.apko/ \
|
||||
--sbom=false \
|
||||
--vcs=false \
|
||||
--log-level=debug
|
||||
|
||||
build:
|
||||
if: github.repository_owner == 'searxng' || github.event_name == 'workflow_dispatch'
|
||||
name: Build (${{ matrix.arch }})
|
||||
runs-on: ${{ matrix.os }}
|
||||
needs: build-base
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arch: amd64
|
||||
march: amd64
|
||||
os: ubuntu-24.04
|
||||
emulation: false
|
||||
- arch: arm64
|
||||
march: arm64
|
||||
os: ubuntu-24.04-arm
|
||||
emulation: false
|
||||
- arch: armv7
|
||||
march: arm64
|
||||
os: ubuntu-24.04-arm
|
||||
emulation: true
|
||||
|
||||
permissions:
|
||||
# Organization GHCR
|
||||
packages: write
|
||||
|
||||
outputs:
|
||||
@@ -125,34 +53,64 @@ jobs:
|
||||
git_url: ${{ steps.build.outputs.git_url }}
|
||||
|
||||
steps:
|
||||
# yamllint disable rule:line-length
|
||||
- name: Setup podman
|
||||
env:
|
||||
PODMAN_VERSION: "v5.6.2"
|
||||
run: |
|
||||
# dpkg man-db trigger is very slow on GHA runners
|
||||
# https://github.com/actions/runner-images/issues/10977
|
||||
# https://github.com/actions/runner/issues/4030
|
||||
sudo rm -f /var/lib/man-db/auto-update
|
||||
|
||||
sudo apt-get purge -y podman runc crun conmon
|
||||
|
||||
curl -fsSLO "https://github.com/mgoltzsche/podman-static/releases/download/${{ env.PODMAN_VERSION }}/podman-linux-${{ matrix.march }}.tar.gz"
|
||||
curl -fsSLO "https://github.com/mgoltzsche/podman-static/releases/download/${{ env.PODMAN_VERSION }}/podman-linux-${{ matrix.march }}.tar.gz.asc"
|
||||
gpg --keyserver hkps://keyserver.ubuntu.com --recv-keys 0CCF102C4F95D89E583FF1D4F8B5AF50344BB503
|
||||
gpg --batch --verify "podman-linux-${{ matrix.march }}.tar.gz.asc" "podman-linux-${{ matrix.march }}.tar.gz"
|
||||
|
||||
tar -xzf "podman-linux-${{ matrix.march }}.tar.gz"
|
||||
sudo cp -rfv ./podman-linux-${{ matrix.march }}/etc/. /etc/
|
||||
sudo cp -rfv ./podman-linux-${{ matrix.march }}/usr/. /usr/
|
||||
|
||||
sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0
|
||||
# yamllint enable rule:line-length
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
fetch-depth: "0"
|
||||
|
||||
- name: Setup cache Python
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
key: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-"
|
||||
path: "./local/"
|
||||
|
||||
- name: Setup cache container uv
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
- name: Get date
|
||||
id: date
|
||||
run: echo "date=$(date +'%Y%m%d')" >>$GITHUB_OUTPUT
|
||||
|
||||
- name: Setup cache container
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
key: "container-uv-${{ matrix.arch }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: "container-uv-${{ matrix.arch }}-"
|
||||
path: "/var/tmp/buildah-cache-1001/uv/"
|
||||
key: "container-${{ matrix.arch }}-${{ steps.date.outputs.date }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: |
|
||||
"container-${{ matrix.arch }}-${{ steps.date.outputs.date }}-"
|
||||
"container-${{ matrix.arch }}-"
|
||||
path: "/var/tmp/buildah-cache-*/*"
|
||||
|
||||
- if: ${{ matrix.emulation }}
|
||||
name: Setup QEMU
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
@@ -187,13 +145,13 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
- if: ${{ matrix.emulation }}
|
||||
name: Setup QEMU
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
@@ -217,12 +175,11 @@ jobs:
|
||||
- test
|
||||
|
||||
permissions:
|
||||
# Organization GHCR
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
@@ -237,8 +194,8 @@ jobs:
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: "docker.io"
|
||||
username: "${{ secrets.DOCKERHUB_USERNAME }}"
|
||||
password: "${{ secrets.DOCKERHUB_TOKEN }}"
|
||||
username: "${{ secrets.DOCKER_USER }}"
|
||||
password: "${{ secrets.DOCKER_TOKEN }}"
|
||||
|
||||
- name: Release
|
||||
env:
|
||||
|
||||
10
.github/workflows/data-update.yml
vendored
10
.github/workflows/data-update.yml
vendored
@@ -15,7 +15,7 @@ permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: "3.13"
|
||||
PYTHON_VERSION: "3.14"
|
||||
|
||||
jobs:
|
||||
data:
|
||||
@@ -40,17 +40,17 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
- name: Setup cache Python
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
key: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-"
|
||||
@@ -64,7 +64,7 @@ jobs:
|
||||
|
||||
- name: Create PR
|
||||
id: cpr
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
|
||||
with:
|
||||
author: "searxng-bot <searxng-bot@users.noreply.github.com>"
|
||||
committer: "searxng-bot <searxng-bot@users.noreply.github.com>"
|
||||
|
||||
10
.github/workflows/documentation.yml
vendored
10
.github/workflows/documentation.yml
vendored
@@ -19,7 +19,7 @@ permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: "3.13"
|
||||
PYTHON_VERSION: "3.14"
|
||||
|
||||
jobs:
|
||||
release:
|
||||
@@ -32,18 +32,18 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
fetch-depth: "0"
|
||||
|
||||
- name: Setup cache Python
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
key: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-"
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
|
||||
- if: github.ref_name == 'master'
|
||||
name: Release
|
||||
uses: JamesIves/github-pages-deploy-action@6c2d9db40f9296374acc17b90404b6e8864128c8 # v4.7.3
|
||||
uses: JamesIves/github-pages-deploy-action@9d877eea73427180ae43cf98e8914934fe157a1a # v4.7.6
|
||||
with:
|
||||
folder: "dist/docs"
|
||||
branch: "gh-pages"
|
||||
|
||||
18
.github/workflows/integration.yml
vendored
18
.github/workflows/integration.yml
vendored
@@ -18,7 +18,7 @@ permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: "3.13"
|
||||
PYTHON_VERSION: "3.14"
|
||||
|
||||
jobs:
|
||||
test:
|
||||
@@ -35,17 +35,17 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ matrix.python-version }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
- name: Setup cache Python
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
key: "python-${{ matrix.python-version }}-${{ runner.arch }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: "python-${{ matrix.python-version }}-${{ runner.arch }}-"
|
||||
@@ -62,28 +62,28 @@ jobs:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: "./.nvmrc"
|
||||
|
||||
- name: Setup cache Node.js
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
key: "nodejs-${{ runner.arch }}-${{ hashFiles('./.nvmrc', './package.json') }}"
|
||||
path: "./client/simple/node_modules/"
|
||||
|
||||
- name: Setup cache Python
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
key: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-"
|
||||
|
||||
16
.github/workflows/l10n.yml
vendored
16
.github/workflows/l10n.yml
vendored
@@ -22,7 +22,7 @@ permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: "3.13"
|
||||
PYTHON_VERSION: "3.14"
|
||||
|
||||
jobs:
|
||||
update:
|
||||
@@ -35,18 +35,18 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
token: "${{ secrets.WEBLATE_GITHUB_TOKEN }}"
|
||||
fetch-depth: "0"
|
||||
|
||||
- name: Setup cache Python
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
key: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-"
|
||||
@@ -82,18 +82,18 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
token: "${{ secrets.WEBLATE_GITHUB_TOKEN }}"
|
||||
fetch-depth: "0"
|
||||
|
||||
- name: Setup cache Python
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
key: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-"
|
||||
@@ -117,7 +117,7 @@ jobs:
|
||||
|
||||
- name: Create PR
|
||||
id: cpr
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
|
||||
with:
|
||||
author: "searxng-bot <searxng-bot@users.noreply.github.com>"
|
||||
committer: "searxng-bot <searxng-bot@users.noreply.github.com>"
|
||||
|
||||
8
.github/workflows/security.yml
vendored
8
.github/workflows/security.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
@@ -32,8 +32,8 @@ jobs:
|
||||
uses: docker/scout-action@f8c776824083494ab0d56b8105ba2ca85c86e4de # v1.18.2
|
||||
with:
|
||||
organization: "searxng"
|
||||
dockerhub-user: "${{ secrets.DOCKERHUB_USERNAME }}"
|
||||
dockerhub-password: "${{ secrets.DOCKERHUB_TOKEN }}"
|
||||
dockerhub-user: "${{ secrets.DOCKER_USER }}"
|
||||
dockerhub-password: "${{ secrets.DOCKER_TOKEN }}"
|
||||
image: "registry://ghcr.io/searxng/searxng:latest"
|
||||
command: "cves"
|
||||
sarif-file: "./scout.sarif"
|
||||
@@ -41,6 +41,6 @@ jobs:
|
||||
write-comment: "false"
|
||||
|
||||
- name: Upload SARIFs
|
||||
uses: github/codeql-action/upload-sarif@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
sarif_file: "./scout.sarif"
|
||||
|
||||
@@ -162,7 +162,7 @@ no-docstring-rgx=^_
|
||||
property-classes=abc.abstractproperty
|
||||
|
||||
# Regular expression matching correct variable names
|
||||
variable-rgx=(([a-z][a-zA-Z0-9_]{2,30})|(_[a-z0-9_]*)|([a-z]))$
|
||||
variable-rgx=([a-zA-Z0-9_]*)$
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"$schema": "https://biomejs.dev/schemas/2.2.5/schema.json",
|
||||
"$schema": "https://biomejs.dev/schemas/2.3.10/schema.json",
|
||||
"files": {
|
||||
"ignoreUnknown": true,
|
||||
"includes": ["**", "!dist", "!node_modules"]
|
||||
"includes": ["**", "!node_modules"]
|
||||
},
|
||||
"assist": {
|
||||
"enabled": true,
|
||||
@@ -15,9 +15,9 @@
|
||||
}
|
||||
},
|
||||
"formatter": {
|
||||
"enabled": true,
|
||||
"bracketSameLine": false,
|
||||
"bracketSpacing": true,
|
||||
"enabled": true,
|
||||
"formatWithErrors": false,
|
||||
"indentStyle": "space",
|
||||
"indentWidth": 2,
|
||||
@@ -35,24 +35,29 @@
|
||||
},
|
||||
"correctness": {
|
||||
"noGlobalDirnameFilename": "error",
|
||||
"noUndeclaredVariables": {
|
||||
"level": "error",
|
||||
"options": {
|
||||
"checkTypes": true
|
||||
}
|
||||
},
|
||||
"useImportExtensions": "error",
|
||||
"useJsonImportAttributes": "error",
|
||||
"useSingleJsDocAsterisk": "error"
|
||||
},
|
||||
"nursery": {
|
||||
"noContinue": "warn",
|
||||
"noDeprecatedImports": "warn",
|
||||
"noEqualsToNull": "warn",
|
||||
"noFloatingPromises": "warn",
|
||||
"noForIn": "warn",
|
||||
"noImportCycles": "warn",
|
||||
"noIncrementDecrement": "warn",
|
||||
"noMisusedPromises": "warn",
|
||||
"noMultiStr": "warn",
|
||||
"noParametersOnlyUsedInRecursion": "warn",
|
||||
"noUselessCatchBinding": "warn",
|
||||
"noUselessUndefined": "warn",
|
||||
"useAwaitThenable": "off",
|
||||
"useDestructuring": "warn",
|
||||
"useExhaustiveSwitchCases": "warn",
|
||||
"useExplicitType": "warn"
|
||||
"useExplicitType": "warn",
|
||||
"useFind": "warn",
|
||||
"useRegexpExec": "warn"
|
||||
},
|
||||
"performance": {
|
||||
"noAwaitInLoops": "error",
|
||||
@@ -65,6 +70,7 @@
|
||||
"style": {
|
||||
"noCommonJs": "error",
|
||||
"noEnum": "error",
|
||||
"noImplicitBoolean": "error",
|
||||
"noInferrableTypes": "error",
|
||||
"noNamespace": "error",
|
||||
"noNegationElse": "error",
|
||||
@@ -109,6 +115,12 @@
|
||||
"syntax": "explicit"
|
||||
}
|
||||
},
|
||||
"useConsistentTypeDefinitions": {
|
||||
"level": "error",
|
||||
"options": {
|
||||
"style": "type"
|
||||
}
|
||||
},
|
||||
"useDefaultSwitchClause": "error",
|
||||
"useExplicitLengthCheck": "error",
|
||||
"useForOf": "error",
|
||||
@@ -117,6 +129,7 @@
|
||||
"useNumericSeparators": "error",
|
||||
"useObjectSpread": "error",
|
||||
"useReadonlyClassProperties": "error",
|
||||
"useSelfClosingElements": "error",
|
||||
"useShorthandAssign": "error",
|
||||
"useSingleVarDeclarator": "error",
|
||||
"useThrowNewError": "error",
|
||||
|
||||
1120
client/simple/package-lock.json
generated
1120
client/simple/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -19,33 +19,31 @@
|
||||
"lint:tsc": "tsc --noEmit"
|
||||
},
|
||||
"browserslist": [
|
||||
"Chrome >= 93",
|
||||
"Firefox >= 92",
|
||||
"Safari >= 15.4",
|
||||
"baseline 2022",
|
||||
"not dead"
|
||||
],
|
||||
"dependencies": {
|
||||
"ionicons": "~8.0.0",
|
||||
"ionicons": "~8.0.13",
|
||||
"normalize.css": "8.0.1",
|
||||
"ol": "~10.6.0",
|
||||
"ol": "~10.7.0",
|
||||
"swiped-events": "1.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "2.2.5",
|
||||
"@types/node": "~24.6.2",
|
||||
"browserslist": "~4.26.3",
|
||||
"browserslist-to-esbuild": "~2.1.0",
|
||||
"edge.js": "~6.3.0",
|
||||
"less": "~4.4.1",
|
||||
"lightningcss": "~1.30.2",
|
||||
"sharp": "~0.34.4",
|
||||
"sort-package-json": "~3.4.0",
|
||||
"stylelint": "~16.24.0",
|
||||
"stylelint-config-standard-less": "~3.0.0",
|
||||
"stylelint-prettier": "~5.0.0",
|
||||
"@biomejs/biome": "2.3.10",
|
||||
"@types/node": "~25.0.3",
|
||||
"browserslist": "~4.28.1",
|
||||
"browserslist-to-esbuild": "~2.1.1",
|
||||
"edge.js": "~6.4.0",
|
||||
"less": "~4.5.1",
|
||||
"mathjs": "~15.1.0",
|
||||
"sharp": "~0.34.5",
|
||||
"sort-package-json": "~3.6.0",
|
||||
"stylelint": "~16.26.0",
|
||||
"stylelint-config-standard-less": "~3.0.1",
|
||||
"stylelint-prettier": "~5.0.3",
|
||||
"svgo": "~4.0.0",
|
||||
"typescript": "~5.9.3",
|
||||
"vite": "npm:rolldown-vite@7.1.15",
|
||||
"vite-bundle-analyzer": "~1.2.3"
|
||||
"vite": "8.0.0-beta.3",
|
||||
"vite-bundle-analyzer": "~1.3.2"
|
||||
}
|
||||
}
|
||||
|
||||
66
client/simple/src/js/Plugin.ts
Normal file
66
client/simple/src/js/Plugin.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
/**
|
||||
* Base class for client-side plugins.
|
||||
*
|
||||
* @remarks
|
||||
* Handle conditional loading of the plugin in:
|
||||
*
|
||||
* - client/simple/src/js/router.ts
|
||||
*
|
||||
* @abstract
|
||||
*/
|
||||
export abstract class Plugin {
|
||||
/**
|
||||
* Plugin name.
|
||||
*/
|
||||
protected readonly id: string;
|
||||
|
||||
/**
|
||||
* @remarks
|
||||
* Don't hold references of this instance outside the class.
|
||||
*/
|
||||
protected constructor(id: string) {
|
||||
this.id = id;
|
||||
|
||||
void this.invoke();
|
||||
}
|
||||
|
||||
private async invoke(): Promise<void> {
|
||||
try {
|
||||
console.debug(`[PLUGIN] ${this.id}: Running...`);
|
||||
const result = await this.run();
|
||||
if (!result) return;
|
||||
|
||||
console.debug(`[PLUGIN] ${this.id}: Running post-exec...`);
|
||||
// @ts-expect-error
|
||||
void (await this.post(result as NonNullable<Awaited<ReturnType<this["run"]>>>));
|
||||
} catch (error) {
|
||||
console.error(`[PLUGIN] ${this.id}:`, error);
|
||||
} finally {
|
||||
console.debug(`[PLUGIN] ${this.id}: Done.`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Plugin goes here.
|
||||
*
|
||||
* @remarks
|
||||
* The plugin is already loaded at this point. If you wish to execute
|
||||
* conditions to exit early, consider moving the logic to:
|
||||
*
|
||||
* - client/simple/src/js/router.ts
|
||||
*
|
||||
* ...to avoid unnecessarily loading this plugin on the client.
|
||||
*/
|
||||
protected abstract run(): Promise<unknown>;
|
||||
|
||||
/**
|
||||
* Post-execution hook.
|
||||
*
|
||||
* @remarks
|
||||
* The hook is only executed if `#run()` returns a truthy value.
|
||||
*/
|
||||
// @ts-expect-error
|
||||
protected abstract post(result: NonNullable<Awaited<ReturnType<this["run"]>>>): Promise<void>;
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import "./nojs.ts";
|
||||
import "./router.ts";
|
||||
import "./toolkit.ts";
|
||||
import "./listener.ts";
|
||||
@@ -1,7 +0,0 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { listen } from "./toolkit.ts";
|
||||
|
||||
listen("click", ".close", function (this: HTMLElement) {
|
||||
(this.parentNode as HTMLElement)?.classList.add("invisible");
|
||||
});
|
||||
@@ -1,8 +0,0 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { ready } from "./toolkit.ts";
|
||||
|
||||
ready(() => {
|
||||
document.documentElement.classList.remove("no-js");
|
||||
document.documentElement.classList.add("js");
|
||||
});
|
||||
@@ -1,40 +0,0 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { Endpoints, endpoint, ready, settings } from "./toolkit.ts";
|
||||
|
||||
ready(
|
||||
() => {
|
||||
import("../main/keyboard.ts");
|
||||
import("../main/search.ts");
|
||||
|
||||
if (settings.autocomplete) {
|
||||
import("../main/autocomplete.ts");
|
||||
}
|
||||
},
|
||||
{ on: [endpoint === Endpoints.index] }
|
||||
);
|
||||
|
||||
ready(
|
||||
() => {
|
||||
import("../main/keyboard.ts");
|
||||
import("../main/mapresult.ts");
|
||||
import("../main/results.ts");
|
||||
import("../main/search.ts");
|
||||
|
||||
if (settings.infinite_scroll) {
|
||||
import("../main/infinite_scroll.ts");
|
||||
}
|
||||
|
||||
if (settings.autocomplete) {
|
||||
import("../main/autocomplete.ts");
|
||||
}
|
||||
},
|
||||
{ on: [endpoint === Endpoints.results] }
|
||||
);
|
||||
|
||||
ready(
|
||||
() => {
|
||||
import("../main/preferences.ts");
|
||||
},
|
||||
{ on: [endpoint === Endpoints.preferences] }
|
||||
);
|
||||
4
client/simple/src/js/index.ts
Normal file
4
client/simple/src/js/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
// core
|
||||
void import.meta.glob(["./*.ts", "./util/**/.ts"], { eager: true });
|
||||
36
client/simple/src/js/loader.ts
Normal file
36
client/simple/src/js/loader.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import type { Plugin } from "./Plugin.ts";
|
||||
import { type EndpointsKeys, endpoint } from "./toolkit.ts";
|
||||
|
||||
type Options =
|
||||
| {
|
||||
on: "global";
|
||||
}
|
||||
| {
|
||||
on: "endpoint";
|
||||
where: EndpointsKeys[];
|
||||
};
|
||||
|
||||
export const load = <T extends Plugin>(instance: () => Promise<T>, options: Options): void => {
|
||||
if (!check(options)) return;
|
||||
|
||||
void instance();
|
||||
};
|
||||
|
||||
const check = (options: Options): boolean => {
|
||||
// biome-ignore lint/style/useDefaultSwitchClause: options is typed
|
||||
switch (options.on) {
|
||||
case "global": {
|
||||
return true;
|
||||
}
|
||||
case "endpoint": {
|
||||
if (!options.where.includes(endpoint)) {
|
||||
// not on the expected endpoint
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -1,6 +1,7 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { assertElement, http, listen, settings } from "../core/toolkit.ts";
|
||||
import { http, listen, settings } from "../toolkit.ts";
|
||||
import { assertElement } from "../util/assertElement.ts";
|
||||
|
||||
const fetchResults = async (qInput: HTMLInputElement, query: string): Promise<void> => {
|
||||
try {
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { assertElement, http, settings } from "../core/toolkit.ts";
|
||||
|
||||
const newLoadSpinner = (): HTMLDivElement => {
|
||||
return Object.assign(document.createElement("div"), {
|
||||
className: "loader"
|
||||
});
|
||||
};
|
||||
|
||||
const loadNextPage = async (onlyImages: boolean, callback: () => void): Promise<void> => {
|
||||
const searchForm = document.querySelector<HTMLFormElement>("#search");
|
||||
assertElement(searchForm);
|
||||
|
||||
const form = document.querySelector<HTMLFormElement>("#pagination form.next_page");
|
||||
assertElement(form);
|
||||
|
||||
const action = searchForm.getAttribute("action");
|
||||
if (!action) {
|
||||
throw new Error("Form action not defined");
|
||||
}
|
||||
|
||||
const paginationElement = document.querySelector<HTMLElement>("#pagination");
|
||||
assertElement(paginationElement);
|
||||
|
||||
paginationElement.replaceChildren(newLoadSpinner());
|
||||
|
||||
try {
|
||||
const res = await http("POST", action, { body: new FormData(form) });
|
||||
const nextPage = await res.text();
|
||||
if (!nextPage) return;
|
||||
|
||||
const nextPageDoc = new DOMParser().parseFromString(nextPage, "text/html");
|
||||
const articleList = nextPageDoc.querySelectorAll<HTMLElement>("#urls article");
|
||||
const nextPaginationElement = nextPageDoc.querySelector<HTMLElement>("#pagination");
|
||||
|
||||
document.querySelector("#pagination")?.remove();
|
||||
|
||||
const urlsElement = document.querySelector<HTMLElement>("#urls");
|
||||
if (!urlsElement) {
|
||||
throw new Error("URLs element not found");
|
||||
}
|
||||
|
||||
if (articleList.length > 0 && !onlyImages) {
|
||||
// do not add <hr> element when there are only images
|
||||
urlsElement.appendChild(document.createElement("hr"));
|
||||
}
|
||||
|
||||
urlsElement.append(...Array.from(articleList));
|
||||
|
||||
if (nextPaginationElement) {
|
||||
const results = document.querySelector<HTMLElement>("#results");
|
||||
results?.appendChild(nextPaginationElement);
|
||||
callback();
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error loading next page:", error);
|
||||
|
||||
const errorElement = Object.assign(document.createElement("div"), {
|
||||
textContent: settings.translations?.error_loading_next_page ?? "Error loading next page",
|
||||
className: "dialog-error"
|
||||
});
|
||||
errorElement.setAttribute("role", "alert");
|
||||
document.querySelector("#pagination")?.replaceChildren(errorElement);
|
||||
}
|
||||
};
|
||||
|
||||
const resultsElement: HTMLElement | null = document.getElementById("results");
|
||||
if (!resultsElement) {
|
||||
throw new Error("Results element not found");
|
||||
}
|
||||
|
||||
const onlyImages: boolean = resultsElement.classList.contains("only_template_images");
|
||||
const observedSelector = "article.result:last-child";
|
||||
|
||||
const intersectionObserveOptions: IntersectionObserverInit = {
|
||||
rootMargin: "320px"
|
||||
};
|
||||
|
||||
const observer: IntersectionObserver = new IntersectionObserver((entries: IntersectionObserverEntry[]) => {
|
||||
const [paginationEntry] = entries;
|
||||
|
||||
if (paginationEntry?.isIntersecting) {
|
||||
observer.unobserve(paginationEntry.target);
|
||||
|
||||
loadNextPage(onlyImages, () => {
|
||||
const nextObservedElement = document.querySelector<HTMLElement>(observedSelector);
|
||||
if (nextObservedElement) {
|
||||
observer.observe(nextObservedElement);
|
||||
}
|
||||
}).then(() => {
|
||||
// wait until promise is resolved
|
||||
});
|
||||
}
|
||||
}, intersectionObserveOptions);
|
||||
|
||||
const initialObservedElement: HTMLElement | null = document.querySelector<HTMLElement>(observedSelector);
|
||||
if (initialObservedElement) {
|
||||
observer.observe(initialObservedElement);
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { assertElement, listen, mutable, settings } from "../core/toolkit.ts";
|
||||
import { listen, mutable, settings } from "../toolkit.ts";
|
||||
import { assertElement } from "../util/assertElement.ts";
|
||||
|
||||
export type KeyBindingLayout = "default" | "vim";
|
||||
|
||||
@@ -219,7 +220,7 @@ const highlightResult =
|
||||
// biome-ignore lint/complexity/noUselessSwitchCase: fallthrough is intended
|
||||
case "top":
|
||||
default:
|
||||
next = results[0];
|
||||
[next] = results;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -342,7 +343,7 @@ const initHelpContent = (divElement: HTMLElement, keyBindings: typeof baseKeyBin
|
||||
const categories: Record<string, KeyBinding[]> = {};
|
||||
|
||||
for (const binding of Object.values(keyBindings)) {
|
||||
const cat = binding.cat;
|
||||
const { cat } = binding;
|
||||
categories[cat] ??= [];
|
||||
categories[cat].push(binding);
|
||||
}
|
||||
@@ -399,7 +400,7 @@ const toggleHelp = (keyBindings: typeof baseKeyBinding): void => {
|
||||
className: "dialog-modal"
|
||||
});
|
||||
initHelpContent(helpPanel, keyBindings);
|
||||
const body = document.getElementsByTagName("body")[0];
|
||||
const [body] = document.getElementsByTagName("body");
|
||||
if (body) {
|
||||
body.appendChild(helpPanel);
|
||||
}
|
||||
@@ -407,12 +408,31 @@ const toggleHelp = (keyBindings: typeof baseKeyBinding): void => {
|
||||
};
|
||||
|
||||
const copyURLToClipboard = async (): Promise<void> => {
|
||||
const currentUrlElement = document.querySelector<HTMLAnchorElement>(".result[data-vim-selected] h3 a");
|
||||
assertElement(currentUrlElement);
|
||||
const selectedResult = document.querySelector<HTMLElement>(".result[data-vim-selected]");
|
||||
if (!selectedResult) return;
|
||||
|
||||
const url = currentUrlElement.getAttribute("href");
|
||||
const resultAnchor = selectedResult.querySelector<HTMLAnchorElement>("a");
|
||||
assertElement(resultAnchor);
|
||||
|
||||
const url = resultAnchor.getAttribute("href");
|
||||
if (url) {
|
||||
if (window.isSecureContext) {
|
||||
await navigator.clipboard.writeText(url);
|
||||
} else {
|
||||
const selection = window.getSelection();
|
||||
if (selection) {
|
||||
const node = document.createElement("span");
|
||||
node.textContent = url;
|
||||
resultAnchor.appendChild(node);
|
||||
|
||||
const range = document.createRange();
|
||||
range.selectNodeContents(node);
|
||||
selection.removeAllRanges();
|
||||
selection.addRange(range);
|
||||
document.execCommand("copy");
|
||||
node.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { listen } from "../core/toolkit.ts";
|
||||
|
||||
listen("click", ".searxng_init_map", async function (this: HTMLElement, event: Event) {
|
||||
event.preventDefault();
|
||||
this.classList.remove("searxng_init_map");
|
||||
|
||||
const {
|
||||
View,
|
||||
OlMap,
|
||||
TileLayer,
|
||||
VectorLayer,
|
||||
OSM,
|
||||
VectorSource,
|
||||
Style,
|
||||
Stroke,
|
||||
Fill,
|
||||
Circle,
|
||||
fromLonLat,
|
||||
GeoJSON,
|
||||
Feature,
|
||||
Point
|
||||
} = await import("../pkg/ol.ts");
|
||||
import("ol/ol.css");
|
||||
|
||||
const { leafletTarget: target, mapLon, mapLat, mapGeojson } = this.dataset;
|
||||
|
||||
const lon = Number.parseFloat(mapLon || "0");
|
||||
const lat = Number.parseFloat(mapLat || "0");
|
||||
const view = new View({ maxZoom: 16, enableRotation: false });
|
||||
const map = new OlMap({
|
||||
target: target,
|
||||
layers: [new TileLayer({ source: new OSM({ maxZoom: 16 }) })],
|
||||
view: view
|
||||
});
|
||||
|
||||
try {
|
||||
const markerSource = new VectorSource({
|
||||
features: [
|
||||
new Feature({
|
||||
geometry: new Point(fromLonLat([lon, lat]))
|
||||
})
|
||||
]
|
||||
});
|
||||
|
||||
const markerLayer = new VectorLayer({
|
||||
source: markerSource,
|
||||
style: new Style({
|
||||
image: new Circle({
|
||||
radius: 6,
|
||||
fill: new Fill({ color: "#3050ff" })
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
map.addLayer(markerLayer);
|
||||
} catch (error) {
|
||||
console.error("Failed to create marker layer:", error);
|
||||
}
|
||||
|
||||
if (mapGeojson) {
|
||||
try {
|
||||
const geoSource = new VectorSource({
|
||||
features: new GeoJSON().readFeatures(JSON.parse(mapGeojson), {
|
||||
dataProjection: "EPSG:4326",
|
||||
featureProjection: "EPSG:3857"
|
||||
})
|
||||
});
|
||||
|
||||
const geoLayer = new VectorLayer({
|
||||
source: geoSource,
|
||||
style: new Style({
|
||||
stroke: new Stroke({ color: "#3050ff", width: 2 }),
|
||||
fill: new Fill({ color: "#3050ff33" })
|
||||
})
|
||||
});
|
||||
|
||||
map.addLayer(geoLayer);
|
||||
|
||||
view.fit(geoSource.getExtent(), { padding: [20, 20, 20, 20] });
|
||||
} catch (error) {
|
||||
console.error("Failed to create GeoJSON layer:", error);
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -1,6 +1,7 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { http, listen, settings } from "../core/toolkit.ts";
|
||||
import { http, listen, settings } from "../toolkit.ts";
|
||||
import { assertElement } from "../util/assertElement.ts";
|
||||
|
||||
let engineDescriptions: Record<string, [string, string]> | undefined;
|
||||
|
||||
@@ -52,19 +53,24 @@ for (const engine of disableAllEngines) {
|
||||
listen("click", engine, () => toggleEngines(false, engineToggles));
|
||||
}
|
||||
|
||||
const copyHashButton: HTMLElement | null = document.querySelector<HTMLElement>("#copy-hash");
|
||||
if (copyHashButton) {
|
||||
listen("click", copyHashButton, async (event: Event) => {
|
||||
event.preventDefault();
|
||||
listen("click", "#copy-hash", async function (this: HTMLElement) {
|
||||
const target = this.parentElement?.querySelector<HTMLPreElement>("pre");
|
||||
assertElement(target);
|
||||
|
||||
const { copiedText, hash } = copyHashButton.dataset;
|
||||
if (!(copiedText && hash)) return;
|
||||
|
||||
try {
|
||||
await navigator.clipboard.writeText(hash);
|
||||
copyHashButton.innerText = copiedText;
|
||||
} catch (error) {
|
||||
console.error("Failed to copy hash:", error);
|
||||
if (window.isSecureContext) {
|
||||
await navigator.clipboard.writeText(target.innerText);
|
||||
} else {
|
||||
const selection = window.getSelection();
|
||||
if (selection) {
|
||||
const range = document.createRange();
|
||||
range.selectNodeContents(target);
|
||||
selection.removeAllRanges();
|
||||
selection.addRange(range);
|
||||
document.execCommand("copy");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (this.dataset.copiedText) {
|
||||
this.innerText = this.dataset.copiedText;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import "../../../node_modules/swiped-events/src/swiped-events.js";
|
||||
import { assertElement, listen, mutable, settings } from "../core/toolkit.ts";
|
||||
import { listen, mutable, settings } from "../toolkit.ts";
|
||||
import { assertElement } from "../util/assertElement.ts";
|
||||
|
||||
let imgTimeoutID: number;
|
||||
|
||||
@@ -121,10 +122,21 @@ listen("click", "#copy_url", async function (this: HTMLElement) {
|
||||
const target = this.parentElement?.querySelector<HTMLPreElement>("pre");
|
||||
assertElement(target);
|
||||
|
||||
if (window.isSecureContext) {
|
||||
await navigator.clipboard.writeText(target.innerText);
|
||||
const copiedText = this.dataset.copiedText;
|
||||
if (copiedText) {
|
||||
this.innerText = copiedText;
|
||||
} else {
|
||||
const selection = window.getSelection();
|
||||
if (selection) {
|
||||
const range = document.createRange();
|
||||
range.selectNodeContents(target);
|
||||
selection.removeAllRanges();
|
||||
selection.addRange(range);
|
||||
document.execCommand("copy");
|
||||
}
|
||||
}
|
||||
|
||||
if (this.dataset.copiedText) {
|
||||
this.innerText = this.dataset.copiedText;
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -1,88 +1,51 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { assertElement, listen, settings } from "../core/toolkit.ts";
|
||||
import { listen } from "../toolkit.ts";
|
||||
import { getElement } from "../util/getElement.ts";
|
||||
|
||||
const submitIfQuery = (qInput: HTMLInputElement): void => {
|
||||
if (qInput.value.length > 0) {
|
||||
const search = document.getElementById("search") as HTMLFormElement | null;
|
||||
search?.submit();
|
||||
}
|
||||
};
|
||||
|
||||
const updateClearButton = (qInput: HTMLInputElement, cs: HTMLElement): void => {
|
||||
cs.classList.toggle("empty", qInput.value.length === 0);
|
||||
};
|
||||
|
||||
const createClearButton = (qInput: HTMLInputElement): void => {
|
||||
const cs = document.getElementById("clear_search");
|
||||
assertElement(cs);
|
||||
|
||||
updateClearButton(qInput, cs);
|
||||
|
||||
listen("click", cs, (event: MouseEvent) => {
|
||||
event.preventDefault();
|
||||
qInput.value = "";
|
||||
qInput.focus();
|
||||
updateClearButton(qInput, cs);
|
||||
});
|
||||
|
||||
listen("input", qInput, () => updateClearButton(qInput, cs), { passive: true });
|
||||
};
|
||||
|
||||
const qInput = document.getElementById("q") as HTMLInputElement | null;
|
||||
assertElement(qInput);
|
||||
const searchForm: HTMLFormElement = getElement<HTMLFormElement>("search");
|
||||
const searchInput: HTMLInputElement = getElement<HTMLInputElement>("q");
|
||||
const searchReset: HTMLButtonElement = getElement<HTMLButtonElement>("clear_search");
|
||||
|
||||
const isMobile: boolean = window.matchMedia("(max-width: 50em)").matches;
|
||||
const isResultsPage: boolean = document.querySelector("main")?.id === "main_results";
|
||||
|
||||
const categoryButtons: HTMLButtonElement[] = Array.from(
|
||||
document.querySelectorAll<HTMLButtonElement>("#categories_container button.category")
|
||||
);
|
||||
|
||||
if (searchInput.value.length === 0) {
|
||||
searchReset.classList.add("empty");
|
||||
}
|
||||
|
||||
// focus search input on large screens
|
||||
if (!(isMobile || isResultsPage)) {
|
||||
qInput.focus();
|
||||
searchInput.focus();
|
||||
}
|
||||
|
||||
// On mobile, move cursor to the end of the input on focus
|
||||
if (isMobile) {
|
||||
listen("focus", qInput, () => {
|
||||
listen("focus", searchInput, () => {
|
||||
// Defer cursor move until the next frame to prevent a visual jump
|
||||
requestAnimationFrame(() => {
|
||||
const end = qInput.value.length;
|
||||
qInput.setSelectionRange(end, end);
|
||||
qInput.scrollLeft = qInput.scrollWidth;
|
||||
const end = searchInput.value.length;
|
||||
searchInput.setSelectionRange(end, end);
|
||||
searchInput.scrollLeft = searchInput.scrollWidth;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
createClearButton(qInput);
|
||||
listen("input", searchInput, () => {
|
||||
searchReset.classList.toggle("empty", searchInput.value.length === 0);
|
||||
});
|
||||
|
||||
// Additionally to searching when selecting a new category, we also
|
||||
// automatically start a new search request when the user changes a search
|
||||
// filter (safesearch, time range or language) (this requires JavaScript
|
||||
// though)
|
||||
if (
|
||||
settings.search_on_category_select &&
|
||||
// If .search_filters is undefined (invisible) we are on the homepage and
|
||||
// hence don't have to set any listeners
|
||||
document.querySelector(".search_filters")
|
||||
) {
|
||||
const safesearchElement = document.getElementById("safesearch");
|
||||
if (safesearchElement) {
|
||||
listen("change", safesearchElement, () => submitIfQuery(qInput));
|
||||
}
|
||||
listen("click", searchReset, (event: MouseEvent) => {
|
||||
event.preventDefault();
|
||||
searchInput.value = "";
|
||||
searchInput.focus();
|
||||
searchReset.classList.add("empty");
|
||||
});
|
||||
|
||||
const timeRangeElement = document.getElementById("time_range");
|
||||
if (timeRangeElement) {
|
||||
listen("change", timeRangeElement, () => submitIfQuery(qInput));
|
||||
}
|
||||
|
||||
const languageElement = document.getElementById("language");
|
||||
if (languageElement) {
|
||||
listen("change", languageElement, () => submitIfQuery(qInput));
|
||||
}
|
||||
}
|
||||
|
||||
const categoryButtons: HTMLButtonElement[] = [
|
||||
...document.querySelectorAll<HTMLButtonElement>("button.category_button")
|
||||
];
|
||||
for (const button of categoryButtons) {
|
||||
listen("click", button, (event: MouseEvent) => {
|
||||
if (event.shiftKey) {
|
||||
@@ -98,21 +61,34 @@ for (const button of categoryButtons) {
|
||||
});
|
||||
}
|
||||
|
||||
const form: HTMLFormElement | null = document.querySelector<HTMLFormElement>("#search");
|
||||
assertElement(form);
|
||||
|
||||
// override form submit action to update the actually selected categories
|
||||
listen("submit", form, (event: Event) => {
|
||||
event.preventDefault();
|
||||
|
||||
const categoryValuesInput = document.querySelector<HTMLInputElement>("#selected-categories");
|
||||
if (categoryValuesInput) {
|
||||
const categoryValues = categoryButtons
|
||||
.filter((button) => button.classList.contains("selected"))
|
||||
.map((button) => button.name.replace("category_", ""));
|
||||
|
||||
categoryValuesInput.value = categoryValues.join(",");
|
||||
if (document.querySelector("div.search_filters")) {
|
||||
const safesearchElement = document.getElementById("safesearch");
|
||||
if (safesearchElement) {
|
||||
listen("change", safesearchElement, () => searchForm.submit());
|
||||
}
|
||||
|
||||
form.submit();
|
||||
const timeRangeElement = document.getElementById("time_range");
|
||||
if (timeRangeElement) {
|
||||
listen("change", timeRangeElement, () => searchForm.submit());
|
||||
}
|
||||
|
||||
const languageElement = document.getElementById("language");
|
||||
if (languageElement) {
|
||||
listen("change", languageElement, () => searchForm.submit());
|
||||
}
|
||||
}
|
||||
|
||||
// override searchForm submit event
|
||||
listen("submit", searchForm, (event: Event) => {
|
||||
event.preventDefault();
|
||||
|
||||
if (categoryButtons.length > 0) {
|
||||
const searchCategories = getElement<HTMLInputElement>("selected-categories");
|
||||
searchCategories.value = categoryButtons
|
||||
.filter((button) => button.classList.contains("selected"))
|
||||
.map((button) => button.name.replace("category_", ""))
|
||||
.join(",");
|
||||
}
|
||||
|
||||
searchForm.submit();
|
||||
});
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { Feature, Map as OlMap, View } from "ol";
|
||||
import { createEmpty } from "ol/extent";
|
||||
import { GeoJSON } from "ol/format";
|
||||
import { Point } from "ol/geom";
|
||||
import { Tile as TileLayer, Vector as VectorLayer } from "ol/layer";
|
||||
import { fromLonLat } from "ol/proj";
|
||||
import { OSM, Vector as VectorSource } from "ol/source";
|
||||
import { Circle, Fill, Stroke, Style } from "ol/style";
|
||||
|
||||
export {
|
||||
View,
|
||||
OlMap,
|
||||
TileLayer,
|
||||
VectorLayer,
|
||||
OSM,
|
||||
createEmpty,
|
||||
VectorSource,
|
||||
Style,
|
||||
Stroke,
|
||||
Fill,
|
||||
Circle,
|
||||
fromLonLat,
|
||||
GeoJSON,
|
||||
Feature,
|
||||
Point
|
||||
};
|
||||
93
client/simple/src/js/plugin/Calculator.ts
Normal file
93
client/simple/src/js/plugin/Calculator.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import {
|
||||
absDependencies,
|
||||
addDependencies,
|
||||
create,
|
||||
divideDependencies,
|
||||
eDependencies,
|
||||
evaluateDependencies,
|
||||
expDependencies,
|
||||
factorialDependencies,
|
||||
gcdDependencies,
|
||||
lcmDependencies,
|
||||
log1pDependencies,
|
||||
log2Dependencies,
|
||||
log10Dependencies,
|
||||
logDependencies,
|
||||
modDependencies,
|
||||
multiplyDependencies,
|
||||
nthRootDependencies,
|
||||
piDependencies,
|
||||
powDependencies,
|
||||
roundDependencies,
|
||||
signDependencies,
|
||||
sqrtDependencies,
|
||||
subtractDependencies
|
||||
} from "mathjs/number";
|
||||
import { Plugin } from "../Plugin.ts";
|
||||
import { appendAnswerElement } from "../util/appendAnswerElement.ts";
|
||||
import { getElement } from "../util/getElement.ts";
|
||||
|
||||
/**
|
||||
* Parses and solves mathematical expressions. Can do basic arithmetic and
|
||||
* evaluate some functions.
|
||||
*
|
||||
* @example
|
||||
* "(3 + 5) / 2" = "4"
|
||||
* "e ^ 2 + pi" = "10.530648752520442"
|
||||
* "gcd(48, 18) + lcm(4, 5)" = "26"
|
||||
*
|
||||
* @remarks
|
||||
* Depends on `mathjs` library.
|
||||
*/
|
||||
export default class Calculator extends Plugin {
|
||||
public constructor() {
|
||||
super("calculator");
|
||||
}
|
||||
|
||||
/**
|
||||
* @remarks
|
||||
* Compare bundle size after adding or removing features.
|
||||
*/
|
||||
private static readonly math = create({
|
||||
...absDependencies,
|
||||
...addDependencies,
|
||||
...divideDependencies,
|
||||
...eDependencies,
|
||||
...evaluateDependencies,
|
||||
...expDependencies,
|
||||
...factorialDependencies,
|
||||
...gcdDependencies,
|
||||
...lcmDependencies,
|
||||
...log10Dependencies,
|
||||
...log1pDependencies,
|
||||
...log2Dependencies,
|
||||
...logDependencies,
|
||||
...modDependencies,
|
||||
...multiplyDependencies,
|
||||
...nthRootDependencies,
|
||||
...piDependencies,
|
||||
...powDependencies,
|
||||
...roundDependencies,
|
||||
...signDependencies,
|
||||
...sqrtDependencies,
|
||||
...subtractDependencies
|
||||
});
|
||||
|
||||
protected async run(): Promise<string | undefined> {
|
||||
const searchInput = getElement<HTMLInputElement>("q");
|
||||
const node = Calculator.math.parse(searchInput.value);
|
||||
|
||||
try {
|
||||
return `${node.toString()} = ${node.evaluate()}`;
|
||||
} catch {
|
||||
// not a compatible math expression
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
protected async post(result: string): Promise<void> {
|
||||
appendAnswerElement(result);
|
||||
}
|
||||
}
|
||||
110
client/simple/src/js/plugin/InfiniteScroll.ts
Normal file
110
client/simple/src/js/plugin/InfiniteScroll.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { Plugin } from "../Plugin.ts";
|
||||
import { http, settings } from "../toolkit.ts";
|
||||
import { assertElement } from "../util/assertElement.ts";
|
||||
import { getElement } from "../util/getElement.ts";
|
||||
|
||||
/**
|
||||
* Automatically loads the next page when scrolling to bottom of the current page.
|
||||
*/
|
||||
export default class InfiniteScroll extends Plugin {
|
||||
public constructor() {
|
||||
super("infiniteScroll");
|
||||
}
|
||||
|
||||
protected async run(): Promise<void> {
|
||||
const resultsElement = getElement<HTMLElement>("results");
|
||||
|
||||
const onlyImages: boolean = resultsElement.classList.contains("only_template_images");
|
||||
const observedSelector = "article.result:last-child";
|
||||
|
||||
const spinnerElement = document.createElement("div");
|
||||
spinnerElement.className = "loader";
|
||||
|
||||
const loadNextPage = async (callback: () => void): Promise<void> => {
|
||||
const searchForm = document.querySelector<HTMLFormElement>("#search");
|
||||
assertElement(searchForm);
|
||||
|
||||
const form = document.querySelector<HTMLFormElement>("#pagination form.next_page");
|
||||
assertElement(form);
|
||||
|
||||
const action = searchForm.getAttribute("action");
|
||||
if (!action) {
|
||||
throw new Error("Form action not defined");
|
||||
}
|
||||
|
||||
const paginationElement = document.querySelector<HTMLElement>("#pagination");
|
||||
assertElement(paginationElement);
|
||||
|
||||
paginationElement.replaceChildren(spinnerElement);
|
||||
|
||||
try {
|
||||
const res = await http("POST", action, { body: new FormData(form) });
|
||||
const nextPage = await res.text();
|
||||
if (!nextPage) return;
|
||||
|
||||
const nextPageDoc = new DOMParser().parseFromString(nextPage, "text/html");
|
||||
const articleList = nextPageDoc.querySelectorAll<HTMLElement>("#urls article");
|
||||
const nextPaginationElement = nextPageDoc.querySelector<HTMLElement>("#pagination");
|
||||
|
||||
document.querySelector("#pagination")?.remove();
|
||||
|
||||
const urlsElement = document.querySelector<HTMLElement>("#urls");
|
||||
if (!urlsElement) {
|
||||
throw new Error("URLs element not found");
|
||||
}
|
||||
|
||||
if (articleList.length > 0 && !onlyImages) {
|
||||
// do not add <hr> element when there are only images
|
||||
urlsElement.appendChild(document.createElement("hr"));
|
||||
}
|
||||
|
||||
urlsElement.append(...articleList);
|
||||
|
||||
if (nextPaginationElement) {
|
||||
const results = document.querySelector<HTMLElement>("#results");
|
||||
results?.appendChild(nextPaginationElement);
|
||||
callback();
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error loading next page:", error);
|
||||
|
||||
const errorElement = Object.assign(document.createElement("div"), {
|
||||
textContent: settings.translations?.error_loading_next_page ?? "Error loading next page",
|
||||
className: "dialog-error"
|
||||
});
|
||||
errorElement.setAttribute("role", "alert");
|
||||
document.querySelector("#pagination")?.replaceChildren(errorElement);
|
||||
}
|
||||
};
|
||||
|
||||
const intersectionObserveOptions: IntersectionObserverInit = {
|
||||
rootMargin: "320px"
|
||||
};
|
||||
|
||||
const observer: IntersectionObserver = new IntersectionObserver(async (entries: IntersectionObserverEntry[]) => {
|
||||
const [paginationEntry] = entries;
|
||||
|
||||
if (paginationEntry?.isIntersecting) {
|
||||
observer.unobserve(paginationEntry.target);
|
||||
|
||||
await loadNextPage(() => {
|
||||
const nextObservedElement = document.querySelector<HTMLElement>(observedSelector);
|
||||
if (nextObservedElement) {
|
||||
observer.observe(nextObservedElement);
|
||||
}
|
||||
});
|
||||
}
|
||||
}, intersectionObserveOptions);
|
||||
|
||||
const initialObservedElement: HTMLElement | null = document.querySelector<HTMLElement>(observedSelector);
|
||||
if (initialObservedElement) {
|
||||
observer.observe(initialObservedElement);
|
||||
}
|
||||
}
|
||||
|
||||
protected async post(): Promise<void> {
|
||||
// noop
|
||||
}
|
||||
}
|
||||
90
client/simple/src/js/plugin/MapView.ts
Normal file
90
client/simple/src/js/plugin/MapView.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import "ol/ol.css?inline";
|
||||
import { Feature, Map as OlMap, View } from "ol";
|
||||
import { GeoJSON } from "ol/format";
|
||||
import { Point } from "ol/geom";
|
||||
import { Tile as TileLayer, Vector as VectorLayer } from "ol/layer";
|
||||
import { fromLonLat } from "ol/proj";
|
||||
import { OSM, Vector as VectorSource } from "ol/source";
|
||||
import { Circle, Fill, Stroke, Style } from "ol/style";
|
||||
import { Plugin } from "../Plugin.ts";
|
||||
|
||||
/**
|
||||
* MapView
|
||||
*/
|
||||
export default class MapView extends Plugin {
|
||||
private readonly map: HTMLElement;
|
||||
|
||||
public constructor(map: HTMLElement) {
|
||||
super("mapView");
|
||||
|
||||
this.map = map;
|
||||
}
|
||||
|
||||
protected async run(): Promise<void> {
|
||||
const { leafletTarget: target, mapLon, mapLat, mapGeojson } = this.map.dataset;
|
||||
|
||||
const lon = Number.parseFloat(mapLon || "0");
|
||||
const lat = Number.parseFloat(mapLat || "0");
|
||||
const view = new View({ maxZoom: 16, enableRotation: false });
|
||||
const map = new OlMap({
|
||||
target: target,
|
||||
layers: [new TileLayer({ source: new OSM({ maxZoom: 16 }) })],
|
||||
view: view
|
||||
});
|
||||
|
||||
try {
|
||||
const markerSource = new VectorSource({
|
||||
features: [
|
||||
new Feature({
|
||||
geometry: new Point(fromLonLat([lon, lat]))
|
||||
})
|
||||
]
|
||||
});
|
||||
|
||||
const markerLayer = new VectorLayer({
|
||||
source: markerSource,
|
||||
style: new Style({
|
||||
image: new Circle({
|
||||
radius: 6,
|
||||
fill: new Fill({ color: "#3050ff" })
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
map.addLayer(markerLayer);
|
||||
} catch (error) {
|
||||
console.error("Failed to create marker layer:", error);
|
||||
}
|
||||
|
||||
if (mapGeojson) {
|
||||
try {
|
||||
const geoSource = new VectorSource({
|
||||
features: new GeoJSON().readFeatures(JSON.parse(mapGeojson), {
|
||||
dataProjection: "EPSG:4326",
|
||||
featureProjection: "EPSG:3857"
|
||||
})
|
||||
});
|
||||
|
||||
const geoLayer = new VectorLayer({
|
||||
source: geoSource,
|
||||
style: new Style({
|
||||
stroke: new Stroke({ color: "#3050ff", width: 2 }),
|
||||
fill: new Fill({ color: "#3050ff33" })
|
||||
})
|
||||
});
|
||||
|
||||
map.addLayer(geoLayer);
|
||||
|
||||
view.fit(geoSource.getExtent(), { padding: [20, 20, 20, 20] });
|
||||
} catch (error) {
|
||||
console.error("Failed to create GeoJSON layer:", error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected async post(): Promise<void> {
|
||||
// noop
|
||||
}
|
||||
}
|
||||
69
client/simple/src/js/router.ts
Normal file
69
client/simple/src/js/router.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { load } from "./loader.ts";
|
||||
import { Endpoints, endpoint, listen, ready, settings } from "./toolkit.ts";
|
||||
|
||||
ready(() => {
|
||||
document.documentElement.classList.remove("no-js");
|
||||
document.documentElement.classList.add("js");
|
||||
|
||||
listen("click", ".close", function (this: HTMLElement) {
|
||||
(this.parentNode as HTMLElement)?.classList.add("invisible");
|
||||
});
|
||||
|
||||
listen("click", ".searxng_init_map", async function (this: HTMLElement, event: Event) {
|
||||
event.preventDefault();
|
||||
this.classList.remove("searxng_init_map");
|
||||
|
||||
load(() => import("./plugin/MapView.ts").then(({ default: Plugin }) => new Plugin(this)), {
|
||||
on: "endpoint",
|
||||
where: [Endpoints.results]
|
||||
});
|
||||
});
|
||||
|
||||
if (settings.plugins?.includes("infiniteScroll")) {
|
||||
load(() => import("./plugin/InfiniteScroll.ts").then(({ default: Plugin }) => new Plugin()), {
|
||||
on: "endpoint",
|
||||
where: [Endpoints.results]
|
||||
});
|
||||
}
|
||||
|
||||
if (settings.plugins?.includes("calculator")) {
|
||||
load(() => import("./plugin/Calculator.ts").then(({ default: Plugin }) => new Plugin()), {
|
||||
on: "endpoint",
|
||||
where: [Endpoints.results]
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
ready(
|
||||
() => {
|
||||
void import("./main/keyboard.ts");
|
||||
void import("./main/search.ts");
|
||||
|
||||
if (settings.autocomplete) {
|
||||
void import("./main/autocomplete.ts");
|
||||
}
|
||||
},
|
||||
{ on: [endpoint === Endpoints.index] }
|
||||
);
|
||||
|
||||
ready(
|
||||
() => {
|
||||
void import("./main/keyboard.ts");
|
||||
void import("./main/results.ts");
|
||||
void import("./main/search.ts");
|
||||
|
||||
if (settings.autocomplete) {
|
||||
void import("./main/autocomplete.ts");
|
||||
}
|
||||
},
|
||||
{ on: [endpoint === Endpoints.results] }
|
||||
);
|
||||
|
||||
ready(
|
||||
() => {
|
||||
void import("./main/preferences.ts");
|
||||
},
|
||||
{ on: [endpoint === Endpoints.preferences] }
|
||||
);
|
||||
@@ -1,16 +1,16 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import type { KeyBindingLayout } from "../main/keyboard.ts";
|
||||
import type { KeyBindingLayout } from "./main/keyboard.ts";
|
||||
|
||||
// synced with searx/webapp.py get_client_settings
|
||||
type Settings = {
|
||||
plugins?: string[];
|
||||
advanced_search?: boolean;
|
||||
autocomplete?: string;
|
||||
autocomplete_min?: number;
|
||||
doi_resolver?: string;
|
||||
favicon_resolver?: string;
|
||||
hotkeys?: KeyBindingLayout;
|
||||
infinite_scroll?: boolean;
|
||||
method?: "GET" | "POST";
|
||||
query_in_title?: boolean;
|
||||
results_on_new_tab?: boolean;
|
||||
@@ -32,8 +32,6 @@ type ReadyOptions = {
|
||||
on?: (boolean | undefined)[];
|
||||
};
|
||||
|
||||
type AssertElement = (element?: HTMLElement | null) => asserts element is HTMLElement;
|
||||
|
||||
export type EndpointsKeys = keyof typeof Endpoints;
|
||||
|
||||
export const Endpoints = {
|
||||
@@ -73,12 +71,6 @@ const getSettings = (): Settings => {
|
||||
}
|
||||
};
|
||||
|
||||
export const assertElement: AssertElement = (element?: HTMLElement | null): asserts element is HTMLElement => {
|
||||
if (!element) {
|
||||
throw new Error("Bad assertion: DOM element not found");
|
||||
}
|
||||
};
|
||||
|
||||
export const http = async (method: string, url: string | URL, options?: HTTPOptions): Promise<Response> => {
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => controller.abort(), options?.timeout ?? 30_000);
|
||||
34
client/simple/src/js/util/appendAnswerElement.ts
Normal file
34
client/simple/src/js/util/appendAnswerElement.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { getElement } from "./getElement.ts";
|
||||
|
||||
export const appendAnswerElement = (element: HTMLElement | string | number): void => {
|
||||
const results = getElement<HTMLDivElement>("results");
|
||||
|
||||
// ./searx/templates/elements/answers.html
|
||||
let answers = getElement<HTMLDivElement>("answers", { assert: false });
|
||||
if (!answers) {
|
||||
// what is this?
|
||||
const answersTitle = document.createElement("h4");
|
||||
answersTitle.setAttribute("class", "title");
|
||||
answersTitle.setAttribute("id", "answers-title");
|
||||
answersTitle.textContent = "Answers : ";
|
||||
|
||||
answers = document.createElement("div");
|
||||
answers.setAttribute("id", "answers");
|
||||
answers.setAttribute("role", "complementary");
|
||||
answers.setAttribute("aria-labelledby", "answers-title");
|
||||
answers.appendChild(answersTitle);
|
||||
}
|
||||
|
||||
if (!(element instanceof HTMLElement)) {
|
||||
const span = document.createElement("span");
|
||||
span.innerHTML = element.toString();
|
||||
// biome-ignore lint/style/noParameterAssign: TODO
|
||||
element = span;
|
||||
}
|
||||
|
||||
answers.appendChild(element);
|
||||
|
||||
results.insertAdjacentElement("afterbegin", answers);
|
||||
};
|
||||
8
client/simple/src/js/util/assertElement.ts
Normal file
8
client/simple/src/js/util/assertElement.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
type AssertElement = <T>(element?: T | null) => asserts element is T;
|
||||
export const assertElement: AssertElement = <T>(element?: T | null): asserts element is T => {
|
||||
if (!element) {
|
||||
throw new Error("DOM element not found");
|
||||
}
|
||||
};
|
||||
21
client/simple/src/js/util/getElement.ts
Normal file
21
client/simple/src/js/util/getElement.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { assertElement } from "./assertElement.ts";
|
||||
|
||||
type Options = {
|
||||
assert?: boolean;
|
||||
};
|
||||
|
||||
export function getElement<T>(id: string, options?: { assert: true }): T;
|
||||
export function getElement<T>(id: string, options?: { assert: false }): T | null;
|
||||
export function getElement<T>(id: string, options: Options = {}): T | null {
|
||||
options.assert ??= true;
|
||||
|
||||
const element = document.getElementById(id) as T | null;
|
||||
|
||||
if (options.assert) {
|
||||
assertElement(element);
|
||||
}
|
||||
|
||||
return element;
|
||||
}
|
||||
@@ -1,19 +1,16 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
iframe[src^="https://w.soundcloud.com"]
|
||||
{
|
||||
iframe[src^="https://w.soundcloud.com"] {
|
||||
height: 120px;
|
||||
}
|
||||
|
||||
iframe[src^="https://www.deezer.com"]
|
||||
{
|
||||
iframe[src^="https://www.deezer.com"] {
|
||||
// The real size is 92px, but 94px are needed to avoid an inner scrollbar of
|
||||
// the embedded HTML.
|
||||
height: 94px;
|
||||
}
|
||||
|
||||
iframe[src^="https://www.mixcloud.com"]
|
||||
{
|
||||
iframe[src^="https://www.mixcloud.com"] {
|
||||
// the embedded player from mixcloud has some quirks: initial there is an
|
||||
// issue with an image URL that is blocked since it is an a Cross-Origin
|
||||
// request. The alternative text (<img alt='Mixcloud Logo'> then cause an
|
||||
@@ -23,19 +20,16 @@ iframe[src^="https://www.mixcloud.com"]
|
||||
height: 250px;
|
||||
}
|
||||
|
||||
iframe[src^="https://bandcamp.com/EmbeddedPlayer"]
|
||||
{
|
||||
iframe[src^="https://bandcamp.com/EmbeddedPlayer"] {
|
||||
// show playlist
|
||||
height: 350px;
|
||||
}
|
||||
|
||||
iframe[src^="https://bandcamp.com/EmbeddedPlayer/track"]
|
||||
{
|
||||
iframe[src^="https://bandcamp.com/EmbeddedPlayer/track"] {
|
||||
// hide playlist
|
||||
height: 120px;
|
||||
}
|
||||
|
||||
iframe[src^="https://genius.com/songs"]
|
||||
{
|
||||
iframe[src^="https://genius.com/songs"] {
|
||||
height: 65px;
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
text-align: center;
|
||||
|
||||
.title {
|
||||
background: url("../img/searxng.png") no-repeat;
|
||||
background: url("./img/searxng.png") no-repeat;
|
||||
min-height: 4rem;
|
||||
margin: 4rem auto;
|
||||
background-position: center;
|
||||
|
||||
22
client/simple/src/less/result_types/file.less
Normal file
22
client/simple/src/less/result_types/file.less
Normal file
@@ -0,0 +1,22 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
/*
|
||||
Layout of the Files result class
|
||||
*/
|
||||
|
||||
#main_results .result-file {
|
||||
border: 1px solid var(--color-result-border);
|
||||
margin: 0 @results-tablet-offset 1rem @results-tablet-offset !important;
|
||||
.rounded-corners;
|
||||
|
||||
video {
|
||||
width: 100%;
|
||||
aspect-ratio: 16 / 9;
|
||||
padding: 10px 0 0 0;
|
||||
}
|
||||
|
||||
audio {
|
||||
width: 100%;
|
||||
padding: 10px 0 0 0;
|
||||
}
|
||||
}
|
||||
@@ -178,7 +178,6 @@ html.no-js #clear_search.hide_if_nojs {
|
||||
#send_search {
|
||||
display: block;
|
||||
margin: 0;
|
||||
padding: 0.8rem;
|
||||
background: none repeat scroll 0 0 var(--color-search-background);
|
||||
border: none;
|
||||
outline: none;
|
||||
@@ -196,6 +195,7 @@ html.no-js #clear_search.hide_if_nojs {
|
||||
|
||||
#send_search {
|
||||
.ltr-rounded-right-corners(0.8rem);
|
||||
padding: 0.8rem;
|
||||
|
||||
&:hover {
|
||||
cursor: pointer;
|
||||
|
||||
@@ -163,12 +163,22 @@ article[data-vim-selected].category-videos,
|
||||
article[data-vim-selected].category-news,
|
||||
article[data-vim-selected].category-map,
|
||||
article[data-vim-selected].category-music,
|
||||
article[data-vim-selected].category-files,
|
||||
article[data-vim-selected].category-social {
|
||||
border: 1px solid var(--color-result-vim-arrow);
|
||||
.rounded-corners;
|
||||
}
|
||||
|
||||
.image-label-bottom-right() {
|
||||
position: absolute;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background: var(--color-image-resolution-background);
|
||||
padding: 0.3rem 0.5rem;
|
||||
font-size: 0.9rem;
|
||||
color: var(--color-image-resolution-font);
|
||||
border-top-left-radius: 0.3rem;
|
||||
}
|
||||
|
||||
.result {
|
||||
margin: @results-margin 0;
|
||||
padding: @result-padding;
|
||||
@@ -295,12 +305,22 @@ article[data-vim-selected].category-social {
|
||||
color: var(--color-result-description-highlight-font);
|
||||
}
|
||||
|
||||
img.thumbnail {
|
||||
a.thumbnail_link {
|
||||
position: relative;
|
||||
margin-top: 0.6rem;
|
||||
.ltr-margin-right(1rem);
|
||||
.ltr-float-left();
|
||||
padding-top: 0.6rem;
|
||||
.ltr-padding-right(1rem);
|
||||
|
||||
img.thumbnail {
|
||||
width: 7rem;
|
||||
height: unset; // remove height value that was needed for lazy loading
|
||||
display: block;
|
||||
}
|
||||
|
||||
.thumbnail_length {
|
||||
.image-label-bottom-right();
|
||||
right: 6px;
|
||||
}
|
||||
}
|
||||
|
||||
.break {
|
||||
@@ -366,7 +386,6 @@ article[data-vim-selected].category-social {
|
||||
.category-news,
|
||||
.category-map,
|
||||
.category-music,
|
||||
.category-files,
|
||||
.category-social {
|
||||
border: 1px solid var(--color-result-border);
|
||||
margin: 0 @results-tablet-offset 1rem @results-tablet-offset !important;
|
||||
@@ -391,23 +410,19 @@ article[data-vim-selected].category-social {
|
||||
}
|
||||
|
||||
.result-videos {
|
||||
img.thumbnail {
|
||||
.ltr-float-left();
|
||||
padding-top: 0.6rem;
|
||||
.ltr-padding-right(1rem);
|
||||
a.thumbnail_link img.thumbnail {
|
||||
width: 20rem;
|
||||
height: unset; // remove height value that was needed for lazy loading
|
||||
}
|
||||
}
|
||||
|
||||
.result-videos .content {
|
||||
.content {
|
||||
overflow: hidden;
|
||||
}
|
||||
}
|
||||
|
||||
.result-videos .embedded-video iframe {
|
||||
.embedded-video iframe {
|
||||
width: 100%;
|
||||
aspect-ratio: 16 / 9;
|
||||
padding: 10px 0 0 0;
|
||||
}
|
||||
}
|
||||
|
||||
@supports not (aspect-ratio: 1 / 1) {
|
||||
@@ -472,14 +487,7 @@ article[data-vim-selected].category-social {
|
||||
}
|
||||
|
||||
.image_resolution {
|
||||
position: absolute;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background: var(--color-image-resolution-background);
|
||||
padding: 0.3rem 0.5rem;
|
||||
font-size: 0.9rem;
|
||||
color: var(--color-image-resolution-font);
|
||||
border-top-left-radius: 0.3rem;
|
||||
.image-label-bottom-right();
|
||||
}
|
||||
|
||||
span.title,
|
||||
@@ -1158,3 +1166,4 @@ pre code {
|
||||
@import "result_types/keyvalue.less";
|
||||
@import "result_types/code.less";
|
||||
@import "result_types/paper.less";
|
||||
@import "result_types/file.less";
|
||||
|
||||
@@ -193,6 +193,15 @@ div.selectable_url {
|
||||
border-color: var(--color-warning);
|
||||
}
|
||||
|
||||
.dialog-warning-block {
|
||||
.dialog();
|
||||
|
||||
display: block;
|
||||
color: var(--color-warning);
|
||||
background: var(--color-warning-background);
|
||||
border-color: var(--color-warning);
|
||||
}
|
||||
|
||||
.dialog-modal {
|
||||
.dialog();
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
* Custom vite plugins to build the web-client components of the simple theme.
|
||||
*
|
||||
* HINT:
|
||||
* This is an inital implementation for the migration of the build process
|
||||
* This is an initial implementation for the migration of the build process
|
||||
* from grunt to vite. For fully support (vite: build & serve) more work is
|
||||
* needed.
|
||||
*/
|
||||
|
||||
@@ -46,39 +46,34 @@ export default {
|
||||
sourcemap: true,
|
||||
rolldownOptions: {
|
||||
input: {
|
||||
// build CSS files
|
||||
"searxng-ltr.css": `${PATH.src}/less/style-ltr.less`,
|
||||
"searxng-rtl.css": `${PATH.src}/less/style-rtl.less`,
|
||||
"rss.css": `${PATH.src}/less/rss.less`,
|
||||
// entrypoint
|
||||
core: `${PATH.src}/js/index.ts`,
|
||||
|
||||
// build script files
|
||||
"searxng.core": `${PATH.src}/js/core/index.ts`,
|
||||
|
||||
// ol pkg
|
||||
ol: `${PATH.src}/js/pkg/ol.ts`,
|
||||
"ol.css": `${PATH.modules}/ol/ol.css`
|
||||
// stylesheets
|
||||
ltr: `${PATH.src}/less/style-ltr.less`,
|
||||
rtl: `${PATH.src}/less/style-rtl.less`,
|
||||
rss: `${PATH.src}/less/rss.less`
|
||||
},
|
||||
|
||||
// file naming conventions / pathnames are relative to outDir (PATH.dist)
|
||||
output: {
|
||||
entryFileNames: "js/[name].min.js",
|
||||
chunkFileNames: "js/[name].min.js",
|
||||
entryFileNames: "sxng-[name].min.js",
|
||||
chunkFileNames: "chunk/[hash].min.js",
|
||||
assetFileNames: ({ names }: PreRenderedAsset): string => {
|
||||
const [name] = names;
|
||||
|
||||
const extension = name?.split(".").pop();
|
||||
switch (extension) {
|
||||
switch (name?.split(".").pop()) {
|
||||
case "css":
|
||||
return "css/[name].min[extname]";
|
||||
case "js":
|
||||
return "js/[name].min[extname]";
|
||||
case "png":
|
||||
case "svg":
|
||||
return "img/[name][extname]";
|
||||
return "sxng-[name].min[extname]";
|
||||
default:
|
||||
console.warn("Unknown asset:", name);
|
||||
return "[name][extname]";
|
||||
return "sxng-[name][extname]";
|
||||
}
|
||||
},
|
||||
sanitizeFileName: (name: string): string => {
|
||||
return name
|
||||
.normalize("NFD")
|
||||
.replace(/[^a-zA-Z0-9.-]/g, "_")
|
||||
.toLowerCase();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
contents:
|
||||
repositories:
|
||||
- https://dl-cdn.alpinelinux.org/alpine/edge/main
|
||||
- https://dl-cdn.alpinelinux.org/alpine/edge/community
|
||||
packages:
|
||||
- alpine-base
|
||||
- build-base
|
||||
- python3-dev
|
||||
- uv
|
||||
- brotli
|
||||
|
||||
entrypoint:
|
||||
command: /bin/sh -l
|
||||
|
||||
work-dir: /usr/local/searxng/
|
||||
|
||||
environment:
|
||||
PATH: /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||
SSL_CERT_DIR: /etc/ssl/certs
|
||||
SSL_CERT_FILE: /etc/ssl/certs/ca-certificates.crt
|
||||
HISTFILE: /dev/null
|
||||
|
||||
archs:
|
||||
- x86_64
|
||||
- aarch64
|
||||
- armv7
|
||||
@@ -1,62 +0,0 @@
|
||||
contents:
|
||||
repositories:
|
||||
- https://dl-cdn.alpinelinux.org/alpine/edge/main
|
||||
packages:
|
||||
- alpine-baselayout
|
||||
- ca-certificates
|
||||
- ca-certificates-bundle
|
||||
- musl-locales
|
||||
- musl-locales-lang
|
||||
- tzdata
|
||||
- busybox
|
||||
- python3
|
||||
- wget
|
||||
|
||||
entrypoint:
|
||||
command: /bin/sh -l
|
||||
|
||||
work-dir: /usr/local/searxng/
|
||||
|
||||
accounts:
|
||||
groups:
|
||||
- groupname: searxng
|
||||
gid: 977
|
||||
users:
|
||||
- username: searxng
|
||||
uid: 977
|
||||
shell: /bin/ash
|
||||
|
||||
environment:
|
||||
PATH: /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||
SSL_CERT_DIR: /etc/ssl/certs
|
||||
SSL_CERT_FILE: /etc/ssl/certs/ca-certificates.crt
|
||||
HISTFILE: /dev/null
|
||||
CONFIG_PATH: /etc/searxng
|
||||
DATA_PATH: /var/cache/searxng
|
||||
|
||||
paths:
|
||||
# Workdir
|
||||
- path: /usr/local/searxng/
|
||||
type: directory
|
||||
uid: 977
|
||||
gid: 977
|
||||
permissions: 0o555
|
||||
|
||||
# Config volume
|
||||
- path: /etc/searxng/
|
||||
type: directory
|
||||
uid: 977
|
||||
gid: 977
|
||||
permissions: 0o755
|
||||
|
||||
# Data volume
|
||||
- path: /var/cache/searxng/
|
||||
type: directory
|
||||
uid: 977
|
||||
gid: 977
|
||||
permissions: 0o755
|
||||
|
||||
archs:
|
||||
- x86_64
|
||||
- aarch64
|
||||
- armv7
|
||||
@@ -19,8 +19,7 @@ RUN --mount=type=cache,id=uv,target=/root/.cache/uv set -eux -o pipefail; \
|
||||
find ./.venv/lib/python*/site-packages/*.dist-info/ -type f -name "RECORD" -exec sort -t, -k1,1 -o {} {} \;; \
|
||||
find ./.venv/ -exec touch -h --date="@$TIMESTAMP_VENV" {} +
|
||||
|
||||
# use "--exclude=./searx/version_frozen.py" when actions/runner-images updates to Podman 5.0+
|
||||
COPY ./searx/ ./searx/
|
||||
COPY --exclude=./searx/version_frozen.py ./searx/ ./searx/
|
||||
|
||||
ARG TIMESTAMP_SETTINGS="0"
|
||||
|
||||
|
||||
@@ -4,10 +4,10 @@ ARG CONTAINER_IMAGE_NAME="searxng"
|
||||
FROM localhost/$CONTAINER_IMAGE_ORGANIZATION/$CONTAINER_IMAGE_NAME:builder AS builder
|
||||
FROM ghcr.io/searxng/base:searxng AS dist
|
||||
|
||||
COPY --chown=searxng:searxng --from=builder /usr/local/searxng/.venv/ ./.venv/
|
||||
COPY --chown=searxng:searxng --from=builder /usr/local/searxng/searx/ ./searx/
|
||||
COPY --chown=searxng:searxng ./container/ ./
|
||||
#COPY --chown=searxng:searxng ./searx/version_frozen.py ./searx/
|
||||
COPY --chown=977:977 --from=builder /usr/local/searxng/.venv/ ./.venv/
|
||||
COPY --chown=977:977 --from=builder /usr/local/searxng/searx/ ./searx/
|
||||
COPY --chown=977:977 ./container/ ./
|
||||
COPY --chown=977:977 ./searx/version_frozen.py ./searx/
|
||||
|
||||
ARG CREATED="0001-01-01T00:00:00Z"
|
||||
ARG VERSION="unknown"
|
||||
|
||||
@@ -48,7 +48,7 @@ solve the CAPTCHA from `qwant.com <https://www.qwant.com/>`__.
|
||||
|
||||
.. group-tab:: Firefox
|
||||
|
||||
.. kernel-figure:: answer-captcha/ffox-setting-proxy-socks.png
|
||||
.. kernel-figure:: /assets/answer-captcha/ffox-setting-proxy-socks.png
|
||||
:alt: FFox proxy on SOCKS5, 127.0.0.1:8080
|
||||
|
||||
Firefox's network settings
|
||||
@@ -66,4 +66,3 @@ solve the CAPTCHA from `qwant.com <https://www.qwant.com/>`__.
|
||||
|
||||
-N
|
||||
Do not execute a remote command. This is useful for just forwarding ports.
|
||||
|
||||
|
||||
@@ -100,7 +100,7 @@ Basic container instancing example:
|
||||
$ cd ./searxng/
|
||||
|
||||
# Run the container
|
||||
$ docker run --name searxng --replace -d \
|
||||
$ docker run --name searxng -d \
|
||||
-p 8888:8080 \
|
||||
-v "./config/:/etc/searxng/" \
|
||||
-v "./data/:/var/cache/searxng/" \
|
||||
|
||||
@@ -4,22 +4,5 @@
|
||||
``brand:``
|
||||
==========
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
brand:
|
||||
issue_url: https://github.com/searxng/searxng/issues
|
||||
docs_url: https://docs.searxng.org
|
||||
public_instances: https://searx.space
|
||||
wiki_url: https://github.com/searxng/searxng/wiki
|
||||
|
||||
``issue_url`` :
|
||||
If you host your own issue tracker change this URL.
|
||||
|
||||
``docs_url`` :
|
||||
If you host your own documentation change this URL.
|
||||
|
||||
``public_instances`` :
|
||||
If you host your own https://searx.space change this URL.
|
||||
|
||||
``wiki_url`` :
|
||||
Link to your wiki (or ``false``)
|
||||
.. autoclass:: searx.brand.SettingsBrand
|
||||
:members:
|
||||
|
||||
@@ -69,6 +69,9 @@ The built-in plugins are all located in the namespace `searx.plugins`.
|
||||
searx.plugins.calculator.SXNGPlugin:
|
||||
active: true
|
||||
|
||||
searx.plugins.infinite_scroll.SXNGPlugin:
|
||||
active: false
|
||||
|
||||
searx.plugins.hash_plugin.SXNGPlugin:
|
||||
active: true
|
||||
|
||||
|
||||
@@ -12,7 +12,6 @@
|
||||
ui:
|
||||
default_locale: ""
|
||||
query_in_title: false
|
||||
infinite_scroll: false
|
||||
center_alignment: false
|
||||
cache_url: https://web.archive.org/web/
|
||||
default_theme: simple
|
||||
@@ -32,9 +31,6 @@
|
||||
When true, the result page's titles contains the query it decreases the
|
||||
privacy, since the browser can records the page titles.
|
||||
|
||||
``infinite_scroll``:
|
||||
When true, automatically loads the next page when scrolling to bottom of the current page.
|
||||
|
||||
``center_alignment`` : default ``false``
|
||||
When enabled, the results are centered instead of being in the left (or RTL)
|
||||
side of the screen. This setting only affects the *desktop layout*
|
||||
|
||||
|
Before Width: | Height: | Size: 59 KiB After Width: | Height: | Size: 59 KiB |
1
docs/assets/sponsors/browserstack.svg
Normal file
1
docs/assets/sponsors/browserstack.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 7.2 KiB |
1
docs/assets/sponsors/docker.svg
Normal file
1
docs/assets/sponsors/docker.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 5.9 KiB |
1
docs/assets/sponsors/tuta.svg
Normal file
1
docs/assets/sponsors/tuta.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 1024 384"><path fill="#410002" d="M479.178 119.294c-.533-.016-.998.357-1.218 1.078l-24.438 78.364.005-.004c-8.59 27.537 4.516 46.485 34.268 46.485 4.336 0 10.006-.357 11.776-.797.885-.264 1.33-.71 1.594-1.506l5.134-17.177c.264-.973-.09-1.77-1.507-1.683-4.517.445-8.588.797-12.308.797-14.964 0-21.075-7.968-16.646-22.224l10.446-33.47h30.373c.797 0 1.506-.445 1.858-1.418l5.401-17.355c.264-.973-.264-1.681-1.417-1.681H492.66l3.895-12.662c.265-.885.089-1.418-.62-2.034l-15.761-14.255c-.332-.3-.676-.45-.996-.459zm173.64 0c-.532-.016-.996.357-1.218 1.078l-24.436 78.364.005-.004c-8.59 27.537 4.517 46.485 34.268 46.485 4.342 0 10.004-.357 11.778-.797.884-.264 1.324-.71 1.593-1.506l5.133-17.177c.26-.973 0-1.77-1.508-1.683-4.517.445-8.59.797-12.307.797-14.966 0-21.077-7.968-16.646-22.224l10.445-33.47H690.3c.795 0 1.504-.445 1.854-1.418l5.402-17.355c.265-.973-.26-1.681-1.414-1.681H666.3l3.896-12.662c.265-.885.087-1.418-.618-2.034l-15.765-14.255c-.332-.3-.676-.45-.996-.459zm-48.32 29.404c-.974 0-1.503.444-1.862 1.417L590.502 188.9c-7.525 23.998-19.478 37.721-31.965 37.721-12.487 0-17.797-9.83-13.105-24.883l16.028-51.178c.351-1.149-.088-1.857-1.328-1.857H539.94c-.97 0-1.505.444-1.86 1.417l-15.497 49.233.008-.005c-8.765 27.982 5.315 46.31 27.452 46.31 12.747 0 22.756-6.111 29.93-16.118l-.176 12.838c0 1.241.621 1.593 1.681 1.593h14.17c1.064 0 1.504-.445 1.859-1.418l28.512-91.997c.35-1.15-.09-1.858-1.33-1.858zm147.96.005c-43.653 0-60.654 37.719-60.654 62.157-.09 21.339 13.282 34.798 31.08 34.798v.004c11.868 0 21.693-5.314 29.133-16.117v12.836c0 1.061.62 1.596 1.594 1.596h14.166c.974 0 1.505-.446 1.86-1.42l28.777-92.086c.265-.973-.266-1.768-1.24-1.768zm-.616 20.54h17.265l-6.197 19.57c-7.35 23.289-18.684 37.896-32.585 37.896-10.094 0-15.585-6.907-15.585-18.15 0-17.976 13.722-39.315 37.102-39.315z"/><path fill="#850122" d="M226.561 106.964c-.558.007-1.043.428-1.043 1.095V251.59c0 1.594 2.04 1.594 2.48 0L261.38 143.3c.445-1.241.446-2.039-.62-3.1l-33.204-32.762c-.299-.332-.66-.478-.996-.474zm55.983 41.739c-1.241 0-1.594.444-2.039 1.417l-43.919 142.203c-.176.797.177 1.594 1.242 1.594h145.747c1.418 0 2.04-.62 2.48-1.858l44.098-141.499c.445-1.417-.18-1.857-1.417-1.857zm-40.022-58.62c-1.418 0-1.594 1.242-.797 2.04l35.065 35.24c.796.798 1.594 1.061 2.836 1.061h149.467c1.065 0 1.68-1.24.62-2.214l-34.63-34.885c-.796-.796-1.592-1.242-3.274-1.242z"/></svg>
|
||||
|
After Width: | Height: | Size: 2.4 KiB |
@@ -120,6 +120,7 @@ ${fedora_build}
|
||||
pip install -U setuptools
|
||||
pip install -U wheel
|
||||
pip install -U pyyaml
|
||||
pip install -U msgspec
|
||||
|
||||
# jump to SearXNG's working tree and install SearXNG into virtualenv
|
||||
(${SERVICE_USER})$ cd \"$SEARXNG_SRC\"
|
||||
|
||||
8
docs/dev/engines/online/azure.rst
Normal file
8
docs/dev/engines/online/azure.rst
Normal file
@@ -0,0 +1,8 @@
|
||||
.. _azure engine:
|
||||
|
||||
===============
|
||||
Azure Resources
|
||||
===============
|
||||
|
||||
.. automodule:: searx.engines.azure
|
||||
:members:
|
||||
@@ -1,8 +0,0 @@
|
||||
.. _voidlinux mullvad_leta:
|
||||
|
||||
============
|
||||
Mullvad-Leta
|
||||
============
|
||||
|
||||
.. automodule:: searx.engines.mullvad_leta
|
||||
:members:
|
||||
8
docs/dev/engines/online/sourcehut.rst
Normal file
8
docs/dev/engines/online/sourcehut.rst
Normal file
@@ -0,0 +1,8 @@
|
||||
.. _sourcehut engine:
|
||||
|
||||
=========
|
||||
Sourcehut
|
||||
=========
|
||||
|
||||
.. automodule:: searx.engines.sourcehut
|
||||
:members:
|
||||
@@ -10,6 +10,7 @@ Built-in Plugins
|
||||
calculator
|
||||
hash_plugin
|
||||
hostnames
|
||||
infinite_scroll
|
||||
self_info
|
||||
tor_check
|
||||
unit_converter
|
||||
|
||||
8
docs/dev/plugins/infinite_scroll.rst
Normal file
8
docs/dev/plugins/infinite_scroll.rst
Normal file
@@ -0,0 +1,8 @@
|
||||
.. _plugins.infinite_scroll:
|
||||
|
||||
===============
|
||||
Infinite scroll
|
||||
===============
|
||||
|
||||
.. automodule:: searx.plugins.infinite_scroll
|
||||
:members:
|
||||
7
docs/dev/result_types/main/file.rst
Normal file
7
docs/dev/result_types/main/file.rst
Normal file
@@ -0,0 +1,7 @@
|
||||
.. _result_types.file:
|
||||
|
||||
============
|
||||
File Results
|
||||
============
|
||||
|
||||
.. automodule:: searx.result_types.file
|
||||
@@ -17,6 +17,7 @@ following types have been implemented so far ..
|
||||
main/keyvalue
|
||||
main/code
|
||||
main/paper
|
||||
main/file
|
||||
|
||||
The :ref:`LegacyResult <LegacyResult>` is used internally for the results that
|
||||
have not yet been typed. The templates can be used as orientation until the
|
||||
@@ -28,5 +29,4 @@ final typing is complete.
|
||||
- :ref:`template torrent`
|
||||
- :ref:`template map`
|
||||
- :ref:`template packages`
|
||||
- :ref:`template files`
|
||||
- :ref:`template products`
|
||||
|
||||
@@ -60,7 +60,7 @@ Fields used in the template :origin:`macro result_sub_header
|
||||
publishedDate : :py:obj:`datetime.datetime`
|
||||
The date on which the object was published.
|
||||
|
||||
length: :py:obj:`time.struct_time`
|
||||
length: :py:obj:`datetime.timedelta`
|
||||
Playing duration in seconds.
|
||||
|
||||
views: :py:class:`str`
|
||||
@@ -469,38 +469,6 @@ links : :py:class:`dict`
|
||||
Additional links in the form of ``{'link_name': 'http://example.com'}``
|
||||
|
||||
|
||||
.. _template files:
|
||||
|
||||
``files.html``
|
||||
--------------
|
||||
|
||||
Displays result fields from:
|
||||
|
||||
- :ref:`macro result_header` and
|
||||
- :ref:`macro result_sub_header`
|
||||
|
||||
Additional fields used in the :origin:`code.html
|
||||
<searx/templates/simple/result_templates/files.html>`:
|
||||
|
||||
filename, size, time: :py:class:`str`
|
||||
Filename, Filesize and Date of the file.
|
||||
|
||||
mtype : ``audio`` | ``video`` | :py:class:`str`
|
||||
Mimetype type of the file.
|
||||
|
||||
subtype : :py:class:`str`
|
||||
Mimetype / subtype of the file.
|
||||
|
||||
abstract : :py:class:`str`
|
||||
Abstract of the file.
|
||||
|
||||
author : :py:class:`str`
|
||||
Name of the author of the file
|
||||
|
||||
embedded : :py:class:`str`
|
||||
URL of an embedded media type (``audio`` or ``video``) / is collapsible.
|
||||
|
||||
|
||||
.. _template products:
|
||||
|
||||
``products.html``
|
||||
|
||||
@@ -56,4 +56,34 @@ If you don't trust anyone, you can set up your own, see :ref:`installation`.
|
||||
utils/index
|
||||
src/index
|
||||
|
||||
|
||||
----------------
|
||||
Acknowledgements
|
||||
----------------
|
||||
|
||||
The following organizations have provided SearXNG access to their paid plans at
|
||||
no cost:
|
||||
|
||||
.. flat-table::
|
||||
:widths: 1 1
|
||||
|
||||
* - .. image:: /assets/sponsors/docker.svg
|
||||
:target: https://docker.com
|
||||
:alt: Docker
|
||||
:align: center
|
||||
:height: 100 px
|
||||
|
||||
- .. image:: /assets/sponsors/tuta.svg
|
||||
:target: https://tuta.com
|
||||
:alt: Tuta
|
||||
:align: center
|
||||
:height: 100 px
|
||||
|
||||
* - .. image:: /assets/sponsors/browserstack.svg
|
||||
:target: https://browserstack.com
|
||||
:alt: BrowserStack
|
||||
:align: center
|
||||
:height: 100 px
|
||||
|
||||
|
||||
.. _searx.space: https://searx.space
|
||||
|
||||
2
manage
2
manage
@@ -117,7 +117,7 @@ EOF
|
||||
|
||||
dev.env() {
|
||||
go.env.dev
|
||||
nvm.env
|
||||
nvm.ensure
|
||||
node.env.dev
|
||||
|
||||
export GOENV
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
[tools]
|
||||
# minimal version we support
|
||||
python = "3.10"
|
||||
node = "24.3.0"
|
||||
node = "25"
|
||||
go = "1.24.5"
|
||||
shellcheck = "0.11.0"
|
||||
# python 3.10 uses 3.40.1 (on mac and win)
|
||||
|
||||
@@ -2,9 +2,9 @@ mock==5.2.0
|
||||
nose2[coverage_plugin]==0.15.1
|
||||
cov-core==1.15.0
|
||||
black==25.9.0
|
||||
pylint==3.3.9
|
||||
pylint==4.0.4
|
||||
splinter==0.21.0
|
||||
selenium==4.36.0
|
||||
selenium==4.38.0
|
||||
Pallets-Sphinx-Themes==2.3.0
|
||||
Sphinx==8.2.3 ; python_version >= '3.11'
|
||||
Sphinx==8.1.3 ; python_version < '3.11'
|
||||
@@ -23,6 +23,6 @@ wlc==1.16.1
|
||||
coloredlogs==15.0.1
|
||||
docutils>=0.21.2
|
||||
parameterized==0.9.0
|
||||
granian[reload]==2.5.5
|
||||
basedpyright==1.31.6
|
||||
types-lxml==2025.8.25
|
||||
granian[reload]==2.6.0
|
||||
basedpyright==1.35.0
|
||||
types-lxml==2025.11.25
|
||||
|
||||
@@ -1 +1,2 @@
|
||||
granian==2.5.5
|
||||
granian==2.6.0
|
||||
granian[pname]==2.6.0
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
certifi==2025.10.5
|
||||
certifi==2025.11.12
|
||||
babel==2.17.0
|
||||
flask-babel==4.0.0
|
||||
flask==3.1.2
|
||||
@@ -9,14 +9,13 @@ python-dateutil==2.9.0.post0
|
||||
pyyaml==6.0.3
|
||||
httpx[http2]==0.28.1
|
||||
httpx-socks[asyncio]==0.10.0
|
||||
Brotli==1.1.0
|
||||
setproctitle==1.3.7
|
||||
sniffio==1.3.1
|
||||
valkey==6.1.1
|
||||
markdown-it-py==3.0.0
|
||||
fasttext-predict==0.9.2.4
|
||||
tomli==2.3.0; python_version < '3.11'
|
||||
msgspec==0.19.0
|
||||
typer-slim==0.19.2
|
||||
msgspec==0.20.0
|
||||
typer-slim==0.20.0
|
||||
isodate==0.7.2
|
||||
whitenoise==6.11.0
|
||||
typing-extensions==4.14.1
|
||||
typing-extensions==4.15.0
|
||||
|
||||
@@ -9,7 +9,7 @@ from os.path import dirname, abspath
|
||||
|
||||
import logging
|
||||
|
||||
import searx.unixthreadname # pylint: disable=unused-import
|
||||
import msgspec
|
||||
|
||||
# Debug
|
||||
LOG_FORMAT_DEBUG: str = '%(levelname)-7s %(name)-30.30s: %(message)s'
|
||||
@@ -76,20 +76,22 @@ def get_setting(name: str, default: t.Any = _unset) -> t.Any:
|
||||
settings and the ``default`` is unset, a :py:obj:`KeyError` is raised.
|
||||
|
||||
"""
|
||||
value: dict[str, t.Any] = settings
|
||||
value = settings
|
||||
for a in name.split('.'):
|
||||
if isinstance(value, dict):
|
||||
value = value.get(a, _unset)
|
||||
if isinstance(value, msgspec.Struct):
|
||||
value = getattr(value, a, _unset)
|
||||
elif isinstance(value, dict):
|
||||
value = value.get(a, _unset) # pyright: ignore
|
||||
else:
|
||||
value = _unset # type: ignore
|
||||
value = _unset
|
||||
|
||||
if value is _unset:
|
||||
if default is _unset:
|
||||
raise KeyError(name)
|
||||
value = default # type: ignore
|
||||
value = default
|
||||
break
|
||||
|
||||
return value
|
||||
return value # pyright: ignore
|
||||
|
||||
|
||||
def _is_color_terminal():
|
||||
|
||||
68
searx/brand.py
Normal file
68
searx/brand.py
Normal file
@@ -0,0 +1,68 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Implementations needed for a branding of SearXNG."""
|
||||
# pylint: disable=too-few-public-methods
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
__all__ = ["SettingsBrand"]
|
||||
|
||||
import msgspec
|
||||
|
||||
|
||||
class BrandCustom(msgspec.Struct, kw_only=True, forbid_unknown_fields=True):
|
||||
"""Custom settings in the brand section."""
|
||||
|
||||
links: dict[str, str] = {}
|
||||
"""Custom entries in the footer of the WEB page: ``[title]: [link]``"""
|
||||
|
||||
|
||||
class SettingsBrand(msgspec.Struct, kw_only=True, forbid_unknown_fields=True):
|
||||
"""Options for configuring brand properties.
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
brand:
|
||||
issue_url: https://github.com/searxng/searxng/issues
|
||||
docs_url: https://docs.searxng.org
|
||||
public_instances: https://searx.space
|
||||
wiki_url: https://github.com/searxng/searxng/wiki
|
||||
|
||||
custom:
|
||||
links:
|
||||
Uptime: https://uptime.searxng.org/history/example-org
|
||||
About: https://example.org/user/about.html
|
||||
"""
|
||||
|
||||
issue_url: str = "https://github.com/searxng/searxng/issues"
|
||||
"""If you host your own issue tracker change this URL."""
|
||||
|
||||
docs_url: str = "https://docs.searxng.org"
|
||||
"""If you host your own documentation change this URL."""
|
||||
|
||||
public_instances: str = "https://searx.space"
|
||||
"""If you host your own https://searx.space change this URL."""
|
||||
|
||||
wiki_url: str = "https://github.com/searxng/searxng/wiki"
|
||||
"""Link to your wiki (or ``false``)"""
|
||||
|
||||
custom: BrandCustom = msgspec.field(default_factory=BrandCustom)
|
||||
"""Optional customizing.
|
||||
|
||||
.. autoclass:: searx.brand.BrandCustom
|
||||
:members:
|
||||
"""
|
||||
|
||||
# new_issue_url is a hackish solution tailored for only one hoster (GH). As
|
||||
# long as we don't have a more general solution, we should support it in the
|
||||
# given function, but it should not be expanded further.
|
||||
|
||||
new_issue_url: str = "https://github.com/searxng/searxng/issues/new"
|
||||
"""If you host your own issue tracker not on GitHub, then unset this URL.
|
||||
|
||||
Note: This URL will create a pre-filled GitHub bug report form for an
|
||||
engine. Since this feature is implemented only for GH (and limited to
|
||||
engines), it will probably be replaced by another solution in the near
|
||||
future.
|
||||
"""
|
||||
@@ -5,10 +5,6 @@
|
||||
----
|
||||
"""
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
__all__ = ["ExpireCacheCfg", "ExpireCacheStats", "ExpireCache", "ExpireCacheSQLite"]
|
||||
|
||||
import abc
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -321,6 +321,7 @@
|
||||
"ja": "アルゼンチン・ペソ",
|
||||
"ko": "아르헨티나 페소",
|
||||
"lt": "Argentinos pesas",
|
||||
"lv": "Argentīnas peso",
|
||||
"ms": "Peso Argentina",
|
||||
"nl": "Argentijnse peso",
|
||||
"oc": "Peso",
|
||||
@@ -803,6 +804,7 @@
|
||||
"ja": "ボリビアーノ",
|
||||
"ko": "볼리비아 볼리비아노",
|
||||
"lt": "Bolivianas",
|
||||
"lv": "Bolīvijas boliviano",
|
||||
"ms": "Boliviano",
|
||||
"nl": "Boliviaanse boliviano",
|
||||
"oc": "Boliviano",
|
||||
@@ -848,6 +850,7 @@
|
||||
"ja": "レアル",
|
||||
"ko": "브라질 헤알",
|
||||
"lt": "Brazilijos realas",
|
||||
"lv": "Brazīlijas reāls",
|
||||
"ms": "Real Brazil",
|
||||
"nl": "Braziliaanse real",
|
||||
"oc": "Real",
|
||||
@@ -932,6 +935,7 @@
|
||||
"ja": "ニュルタム",
|
||||
"ko": "부탄 눌트럼",
|
||||
"lt": "Ngultrumas",
|
||||
"lv": "ngultrums",
|
||||
"ml": "ങൾട്രം",
|
||||
"ms": "Ngultrum Bhutan",
|
||||
"nl": "Bhutaanse ngultrum",
|
||||
@@ -1327,15 +1331,15 @@
|
||||
"cs": "Kolumbijské peso",
|
||||
"da": "Colombiansk peso",
|
||||
"de": "kolumbianischer Peso",
|
||||
"en": "Colombian peso",
|
||||
"en": "peso",
|
||||
"eo": "kolombia peso",
|
||||
"es": "peso colombiano",
|
||||
"es": "peso",
|
||||
"et": "Colombia peeso",
|
||||
"eu": "Peso kolonbiar",
|
||||
"fi": "Kolumbian peso",
|
||||
"fr": "peso colombien",
|
||||
"ga": "peso na Colóime",
|
||||
"gl": "Peso colombiano",
|
||||
"gl": "peso colombiano",
|
||||
"he": "פסו קולומביאני",
|
||||
"hr": "Kolumbijski pezo",
|
||||
"hu": "kolumbiai peso",
|
||||
@@ -1411,9 +1415,9 @@
|
||||
"cy": "peso (Ciwba)",
|
||||
"da": "Cubanske pesos",
|
||||
"de": "kubanischer Peso",
|
||||
"en": "Cuban peso",
|
||||
"en": "peso",
|
||||
"eo": "kuba peso",
|
||||
"es": "peso cubano",
|
||||
"es": "peso",
|
||||
"fi": "Kuuban peso",
|
||||
"fr": "peso cubain",
|
||||
"ga": "peso Chúba",
|
||||
@@ -1465,6 +1469,7 @@
|
||||
"ja": "カーボベルデ・エスクード",
|
||||
"ko": "카보베르데 이스쿠두",
|
||||
"lt": "Žaliojo Kyšulio eskudas",
|
||||
"lv": "Kaboverdes eskudo",
|
||||
"nl": "Kaapverdische escudo",
|
||||
"oc": "Escut de Cap Verd",
|
||||
"pl": "escudo Zielonego Przylądka",
|
||||
@@ -1565,7 +1570,7 @@
|
||||
"ar": "كرونة دنماركية",
|
||||
"bg": "Датска крона",
|
||||
"ca": "corona danesa",
|
||||
"cs": "Dánská koruna",
|
||||
"cs": "dánská koruna",
|
||||
"cy": "Krone Danaidd",
|
||||
"da": "dansk krone",
|
||||
"de": "dänische Krone",
|
||||
@@ -1715,7 +1720,7 @@
|
||||
"nl": "Egyptisch pond",
|
||||
"oc": "Liura egipciana",
|
||||
"pa": "ਮਿਸਰੀ ਪਾਊਂਡ",
|
||||
"pl": "Funt egipski",
|
||||
"pl": "funt egipski",
|
||||
"pt": "libra egípcia",
|
||||
"ro": "Liră egipteană",
|
||||
"ru": "египетский фунт",
|
||||
@@ -1772,7 +1777,7 @@
|
||||
"de": "Äthiopischer Birr",
|
||||
"en": "bir",
|
||||
"eo": "etiopa birro",
|
||||
"es": "Birr etíope",
|
||||
"es": "bir etíope",
|
||||
"fi": "Etiopian birr",
|
||||
"fr": "Birr",
|
||||
"ga": "birr",
|
||||
@@ -2035,6 +2040,7 @@
|
||||
"ja": "セディ",
|
||||
"ko": "가나 세디",
|
||||
"lt": "Sedis",
|
||||
"lv": "Ganas sedi",
|
||||
"ms": "Cedi Ghana",
|
||||
"nl": "Ghanese cedi",
|
||||
"oc": "Cedi",
|
||||
@@ -2149,6 +2155,7 @@
|
||||
"ja": "ギニア・フラン",
|
||||
"ko": "기니 프랑",
|
||||
"lt": "Gvinėjos frankas",
|
||||
"lv": "Gvinejas franks",
|
||||
"ms": "Franc Guinea",
|
||||
"nl": "Guineese frank",
|
||||
"oc": "Franc guinean",
|
||||
@@ -2859,6 +2866,7 @@
|
||||
"sl": "kirgiški som",
|
||||
"sr": "киргиски сом",
|
||||
"sv": "Kirgizistansk som",
|
||||
"szl": "Sōm (waluta)",
|
||||
"tr": "Kırgızistan somu",
|
||||
"tt": "кыргыз сумы",
|
||||
"uk": "сом"
|
||||
@@ -2964,6 +2972,7 @@
|
||||
"ms": "Won Korea Utara",
|
||||
"nl": "Noord-Koreaanse won",
|
||||
"pa": "ਉੱਤਰੀ ਕੋਰੀਆਈ ਵੌਨ",
|
||||
"pap": "won nortkoreano",
|
||||
"pl": "Won północnokoreański",
|
||||
"pt": "won norte-coreano",
|
||||
"ro": "Won nord-coreean",
|
||||
@@ -3792,9 +3801,9 @@
|
||||
"cs": "Mexické peso",
|
||||
"cy": "peso (Mecsico)",
|
||||
"de": "Mexikanischer Peso",
|
||||
"en": "Mexican peso",
|
||||
"en": "peso",
|
||||
"eo": "meksika peso",
|
||||
"es": "peso mexicano",
|
||||
"es": "peso",
|
||||
"et": "Mehhiko peeso",
|
||||
"eu": "Mexikar peso",
|
||||
"fi": "Meksikon peso",
|
||||
@@ -3810,6 +3819,7 @@
|
||||
"ja": "メキシコ・ペソ",
|
||||
"ko": "멕시코 페소",
|
||||
"lt": "Meksikos pesas",
|
||||
"lv": "Meksikas peso",
|
||||
"ms": "Peso Mexico",
|
||||
"nl": "Mexicaanse peso",
|
||||
"pa": "ਮੈਕਸੀਕੀ ਪੇਸੋ",
|
||||
@@ -3825,7 +3835,7 @@
|
||||
"tr": "Meksika pesosu",
|
||||
"tt": "Миксикә писысы",
|
||||
"uk": "мексиканський песо",
|
||||
"vi": "Peso Mexico"
|
||||
"vi": "Peso México"
|
||||
},
|
||||
"MXV": {
|
||||
"de": "UNIDAD DE INVERSION",
|
||||
@@ -3879,7 +3889,7 @@
|
||||
"MZN": {
|
||||
"ar": "مثقال موزنبيقي",
|
||||
"ca": "metical",
|
||||
"cs": "Mosambický metical",
|
||||
"cs": "mosambický metical",
|
||||
"cy": "Metical Mosambic",
|
||||
"da": "Metical",
|
||||
"de": "Metical",
|
||||
@@ -3972,6 +3982,7 @@
|
||||
"ja": "ナイラ",
|
||||
"ko": "나이지리아 나이라",
|
||||
"lt": "Naira",
|
||||
"lv": "Nigērijas naira",
|
||||
"ms": "Naira Nigeria",
|
||||
"nl": "Nigeriaanse naira",
|
||||
"oc": "Naira",
|
||||
@@ -4028,7 +4039,7 @@
|
||||
"ar": "كرونة نروجية",
|
||||
"bg": "норвежка крона",
|
||||
"ca": "corona noruega",
|
||||
"cs": "Norská koruna",
|
||||
"cs": "norská koruna",
|
||||
"cy": "krone Norwy",
|
||||
"da": "norsk krone",
|
||||
"de": "norwegische Krone",
|
||||
@@ -4208,7 +4219,7 @@
|
||||
"fi": "Panaman balboa",
|
||||
"fr": "Balboa",
|
||||
"ga": "balboa Phanama",
|
||||
"gl": "Balboa",
|
||||
"gl": "balboa",
|
||||
"he": "בלבואה",
|
||||
"hr": "Panamska balboa",
|
||||
"hu": "panamai balboa",
|
||||
@@ -4255,6 +4266,7 @@
|
||||
"ja": "ヌエボ・ソル",
|
||||
"ko": "페루 솔",
|
||||
"lt": "Naujasis solis",
|
||||
"lv": "Peru sols",
|
||||
"ms": "Nuevo Sol Peru",
|
||||
"nl": "Peruviaanse sol",
|
||||
"oc": "Nuevo Sol",
|
||||
@@ -4269,7 +4281,7 @@
|
||||
"tr": "Nuevo Sol",
|
||||
"tt": "Перу яңа соле",
|
||||
"uk": "Новий соль",
|
||||
"vi": "Sol Peru"
|
||||
"vi": "Sol Perú"
|
||||
},
|
||||
"PGK": {
|
||||
"ar": "كينا بابوا غينيا الجديدة",
|
||||
@@ -4779,7 +4791,7 @@
|
||||
"en": "Solomon Islands dollar",
|
||||
"eo": "salomona dolaro",
|
||||
"es": "dólar de las Islas Salomón",
|
||||
"fi": "Salomonsaarten dollari",
|
||||
"fi": "Salomoninsaarten dollari",
|
||||
"fr": "dollar des îles Salomon",
|
||||
"ga": "dollar Oileáin Sholaimh",
|
||||
"gl": "Dólar das Illas Salomón",
|
||||
@@ -4926,7 +4938,7 @@
|
||||
"ar": "دولار سنغافوري",
|
||||
"bg": "Сингапурски долар",
|
||||
"bn": "সিঙ্গাপুর ডলার",
|
||||
"ca": "dòlar de Singapur",
|
||||
"ca": "dòlar singapurès",
|
||||
"cs": "Singapurský dolar",
|
||||
"da": "singaporeansk dollar",
|
||||
"de": "Singapur-Dollar",
|
||||
@@ -5015,6 +5027,7 @@
|
||||
"ja": "レオン",
|
||||
"ko": "시에라리온 레온",
|
||||
"lt": "leonė",
|
||||
"lv": "Sjerraleones leone",
|
||||
"ms": "leone",
|
||||
"nl": "Sierra Leoonse leone",
|
||||
"oc": "leone",
|
||||
@@ -5052,6 +5065,7 @@
|
||||
"ja": "ソマリア・シリング",
|
||||
"ko": "소말리아 실링",
|
||||
"lt": "Somalio šilingas",
|
||||
"lv": "Somālijas šiliņš",
|
||||
"ms": "Shilling Somalia",
|
||||
"nl": "Somalische shilling",
|
||||
"pl": "Szyling somalijski",
|
||||
@@ -5497,7 +5511,7 @@
|
||||
"TTD": {
|
||||
"ar": "دولار ترينيداد وتوباغو",
|
||||
"bg": "Тринидадски и тобагски долар",
|
||||
"ca": "dòlar de Trinitat i Tobago",
|
||||
"ca": "dòlar de Trinidad i Tobago",
|
||||
"cs": "Dolar Trinidadu a Tobaga",
|
||||
"cy": "doler Trinidad a Thobago",
|
||||
"de": "Trinidad-und-Tobago-Dollar",
|
||||
@@ -5534,7 +5548,7 @@
|
||||
"af": "Nuwe Taiwannese dollar",
|
||||
"ar": "دولار تايواني جديد",
|
||||
"bg": "Нов тайвански долар",
|
||||
"ca": "nou dòlar de Taiwan",
|
||||
"ca": "Nou dòlar taiwanès",
|
||||
"cs": "Tchajwanský dolar",
|
||||
"cy": "Doler Newydd Taiwan",
|
||||
"da": "taiwan dollar",
|
||||
@@ -5715,7 +5729,7 @@
|
||||
"lv": "ASV dolārs",
|
||||
"ml": "യുണൈറ്റഡ് സ്റ്റേറ്റ്സ് ഡോളർ",
|
||||
"ms": "Dolar Amerika Syarikat",
|
||||
"nl": "US dollar",
|
||||
"nl": "Amerikaanse dollar",
|
||||
"oc": "dolar american",
|
||||
"pa": "ਸੰਯੁਕਤ ਰਾਜ ਡਾਲਰ",
|
||||
"pap": "Dollar merikano",
|
||||
@@ -5808,7 +5822,9 @@
|
||||
"lt": "Uzbekijos sumas",
|
||||
"lv": "Uzbekistānas soms",
|
||||
"nl": "Oezbeekse sum",
|
||||
"oc": "som ozbèc",
|
||||
"pa": "ਉਜ਼ਬੇਕਿਸਤਾਨੀ ਸੋਮ",
|
||||
"pap": "som usbekistani",
|
||||
"pl": "Sum",
|
||||
"pt": "som usbeque",
|
||||
"ro": "Som uzbec",
|
||||
@@ -5834,6 +5850,7 @@
|
||||
"en": "sovereign bolivar",
|
||||
"es": "bolívar soberano",
|
||||
"fr": "bolivar souverain",
|
||||
"gl": "bolívar soberano",
|
||||
"hu": "venezuelai bolívar",
|
||||
"ja": "ボリバル・ソベラノ",
|
||||
"pt": "Bolívar soberano",
|
||||
@@ -5948,6 +5965,7 @@
|
||||
"sk": "Tala",
|
||||
"sr": "самоанска тала",
|
||||
"sv": "Samoansk Tala",
|
||||
"tr": "Samoa talası",
|
||||
"tt": "самоа таласы",
|
||||
"uk": "Самоанська тала"
|
||||
},
|
||||
@@ -6095,12 +6113,14 @@
|
||||
"hu": "karibi forint",
|
||||
"it": "fiorino caraibico",
|
||||
"ja": "カリブ・ギルダー",
|
||||
"ko": "카리브 휠던",
|
||||
"nl": "Caribische gulden",
|
||||
"pap": "Florin karibense",
|
||||
"pl": "Gulden karaibski",
|
||||
"pt": "Florim do Caribe",
|
||||
"ro": "Gulden caraibian",
|
||||
"ru": "Карибский гульден",
|
||||
"sk": "Karibský gulden",
|
||||
"sl": "karibski goldinar"
|
||||
},
|
||||
"XDR": {
|
||||
@@ -6571,10 +6591,13 @@
|
||||
"R": "ZAR",
|
||||
"R$": "BRL",
|
||||
"RD$": "DOP",
|
||||
"RF": "RWF",
|
||||
"RM": "MYR",
|
||||
"RWF": "RWF",
|
||||
"Rf": "MVR",
|
||||
"Rp": "IDR",
|
||||
"Rs": "LKR",
|
||||
"R₣": "RWF",
|
||||
"S$": "SGD",
|
||||
"S/.": "PEN",
|
||||
"SI$": "SBD",
|
||||
@@ -6594,6 +6617,7 @@
|
||||
"Ush": "UGX",
|
||||
"VT": "VUV",
|
||||
"WS$": "WST",
|
||||
"XAF": "XAF",
|
||||
"XCG": "XCG",
|
||||
"XDR": "XDR",
|
||||
"Z$": "ZWL",
|
||||
@@ -6719,6 +6743,7 @@
|
||||
"argentinské peso": "ARS",
|
||||
"argentinski peso": "ARS",
|
||||
"argentinski pezo": "ARS",
|
||||
"argentīnas peso": "ARS",
|
||||
"ariari": "MGA",
|
||||
"ariari de madagascar": "MGA",
|
||||
"ariari de madagáscar": "MGA",
|
||||
@@ -7038,6 +7063,7 @@
|
||||
"bolívar soberano": "VES",
|
||||
"bolívar sobirà": "VES",
|
||||
"bolíviai boliviano": "BOB",
|
||||
"bolīvijas boliviano": "BOB",
|
||||
"bosenská konvertibilní marka": "BAM",
|
||||
"bosna hersek değiştirilebilir markı": "BAM",
|
||||
"bosnia and herzegovina convertible mark": "BAM",
|
||||
@@ -7074,6 +7100,7 @@
|
||||
"brazilski real": "BRL",
|
||||
"brazilský real": "BRL",
|
||||
"brazílsky real": "BRL",
|
||||
"brazīlijas reāls": "BRL",
|
||||
"brezilya reali": "BRL",
|
||||
"brit font": "GBP",
|
||||
"brita pundo": "GBP",
|
||||
@@ -7147,6 +7174,7 @@
|
||||
"burundžio frankas": "BIF",
|
||||
"butana ngultrumo": "BTN",
|
||||
"butanski ngultrum": "BTN",
|
||||
"butānas ngultrums": "BTN",
|
||||
"butut": "GMD",
|
||||
"bututs": "GMD",
|
||||
"bwp": "BWP",
|
||||
@@ -7818,6 +7846,7 @@
|
||||
"dirrã marroquino": "MAD",
|
||||
"dírham de los emiratos árabes unidos": "AED",
|
||||
"dírham dels emirats àrabs units": "AED",
|
||||
"dírham emiratià": "AED",
|
||||
"dírham marroquí": "MAD",
|
||||
"djf": "DJF",
|
||||
"djiboeti frank": "DJF",
|
||||
@@ -8232,9 +8261,7 @@
|
||||
"dòlar de singapur": "SGD",
|
||||
"dòlar de surinam": "SRD",
|
||||
"dòlar de taiwan": "TWD",
|
||||
"dòlar de trinitat": "TTD",
|
||||
"dòlar de trinitat i tobago": "TTD",
|
||||
"dòlar de trinitat tobago": "TTD",
|
||||
"dòlar de trinidad i tobago": "TTD",
|
||||
"dòlar de zimbàbue": "ZWL",
|
||||
"dòlar del canadà": "CAD",
|
||||
"dòlar del carib oriental": "XCD",
|
||||
@@ -8250,6 +8277,7 @@
|
||||
"dòlar namibià": "NAD",
|
||||
"dòlar neozelandès": "NZD",
|
||||
"dòlar salomonès": "SBD",
|
||||
"dòlar singapurès": "SGD",
|
||||
"dòlar surinamès": "SRD",
|
||||
"dòlar taiwanès": "TWD",
|
||||
"dòlars canadencs": "CAD",
|
||||
@@ -8894,6 +8922,7 @@
|
||||
"gambijski dalasi": "GMD",
|
||||
"gambijský dalasi": "GMD",
|
||||
"ganaa cedio": "GHS",
|
||||
"ganas sedi": "GHS",
|
||||
"ganski cedi": "GHS",
|
||||
"gbp": "GBP",
|
||||
"gbp£": "GBP",
|
||||
@@ -9043,6 +9072,7 @@
|
||||
"gvatemalski kvecal": "GTQ",
|
||||
"gvatemalski quetzal": "GTQ",
|
||||
"gvinea franko": "GNF",
|
||||
"gvinejas franks": "GNF",
|
||||
"gvinejski franak": "GNF",
|
||||
"gvinejski frank": "GNF",
|
||||
"gvinėjos frankas": "GNF",
|
||||
@@ -9370,6 +9400,7 @@
|
||||
"kaaimaneilandse dollar": "KYD",
|
||||
"kaapverdische escudo": "CVE",
|
||||
"kaboverda eskudo": "CVE",
|
||||
"kaboverdes eskudo": "CVE",
|
||||
"kaiman dollar": "KYD",
|
||||
"kaimanu dolārs": "KYD",
|
||||
"kaimanu salu dolārs": "KYD",
|
||||
@@ -9779,6 +9810,7 @@
|
||||
"lari na seoirsia": "GEL",
|
||||
"lario": "GEL",
|
||||
"laris": "GEL",
|
||||
"lári": "GEL",
|
||||
"länsi afrikan cfa frangi": "XOF",
|
||||
"lbp": "LBP",
|
||||
"ld": "LYD",
|
||||
@@ -10305,6 +10337,7 @@
|
||||
"meksika peso": "MXN",
|
||||
"meksika pesosu": "MXN",
|
||||
"meksikaanse peso": "MXN",
|
||||
"meksikas peso": "MXN",
|
||||
"meksikon peso": "MXN",
|
||||
"meksikos pesas": "MXN",
|
||||
"meticais": "MZN",
|
||||
@@ -10513,6 +10546,7 @@
|
||||
"ngultrum na bútáine": "BTN",
|
||||
"ngultrumas": "BTN",
|
||||
"ngultrumo": "BTN",
|
||||
"ngultrums": "BTN",
|
||||
"ngwee": "ZMW",
|
||||
"nhân dân tệ": "CNY",
|
||||
"nhân dân tệ trung quốc": "CNY",
|
||||
@@ -10540,6 +10574,7 @@
|
||||
"nigerijská naira": "NGN",
|
||||
"nigériai naira": "NGN",
|
||||
"nigérijská naira": "NGN",
|
||||
"nigērijas naira": "NGN",
|
||||
"niĝera najro": "NGN",
|
||||
"niĝeria najro": "NGN",
|
||||
"nijerya nairası": "NGN",
|
||||
@@ -10668,7 +10703,6 @@
|
||||
"nuevo dólar taiwanes": "TWD",
|
||||
"nuevo dólar taiwanés": "TWD",
|
||||
"nuevo peso": [
|
||||
"UYU",
|
||||
"MXN",
|
||||
"ARS"
|
||||
],
|
||||
@@ -10866,6 +10900,7 @@
|
||||
"penny": "GBP",
|
||||
"perak sebagai pelaburan": "XAG",
|
||||
"peru nueva solü": "PEN",
|
||||
"peru sols": "PEN",
|
||||
"perua nova suno": "PEN",
|
||||
"peruanischer nuevo sol": "PEN",
|
||||
"peruanischer sol": "PEN",
|
||||
@@ -10940,7 +10975,6 @@
|
||||
"peso de méxico": "MXN",
|
||||
"peso de republica dominicana": "DOP",
|
||||
"peso de república dominicana": "DOP",
|
||||
"peso de uruguay": "UYU",
|
||||
"peso de xile": "CLP",
|
||||
"peso do chile": "CLP",
|
||||
"peso do uruguai": "UYU",
|
||||
@@ -11587,7 +11621,6 @@
|
||||
"rúpia indiana": "INR",
|
||||
"rúpies": "INR",
|
||||
"rūpija": "IDR",
|
||||
"rwanda franc": "RWF",
|
||||
"rwanda frank": "RWF",
|
||||
"rwandan franc": "RWF",
|
||||
"rwandan frank": "RWF",
|
||||
@@ -11629,6 +11662,7 @@
|
||||
"samoa dolaro": "WST",
|
||||
"samoa tala": "WST",
|
||||
"samoa talao": "WST",
|
||||
"samoa talası": "WST",
|
||||
"samoaanse tala": "WST",
|
||||
"samoan tala": "WST",
|
||||
"samoan tālā": "WST",
|
||||
@@ -11827,6 +11861,7 @@
|
||||
"sistema unificato di compensazione regionale": "XSU",
|
||||
"sistema único de compensación regional": "XSU",
|
||||
"sjekel": "ILS",
|
||||
"sjerraleones leone": "SLE",
|
||||
"sjevernokorejski von": "KPW",
|
||||
"sle": "SLE",
|
||||
"sll": "SLE",
|
||||
@@ -11839,10 +11874,10 @@
|
||||
"sol d'or": "PEN",
|
||||
"sol de oro": "PEN",
|
||||
"sol novo": "PEN",
|
||||
"sol peru": "PEN",
|
||||
"sol peruan": "PEN",
|
||||
"sol peruano": "PEN",
|
||||
"sol peruviano": "PEN",
|
||||
"sol perú": "PEN",
|
||||
"solomon adaları doları": "SBD",
|
||||
"solomon dollar": "SBD",
|
||||
"solomon islands dollar": "SBD",
|
||||
@@ -11868,8 +11903,10 @@
|
||||
"som kîrgîz": "KGS",
|
||||
"som na cirgeastáine": "KGS",
|
||||
"som na húisbéiceastáine": "UZS",
|
||||
"som ozbèc": "UZS",
|
||||
"som quirguiz": "KGS",
|
||||
"som usbeco": "UZS",
|
||||
"som usbekistani": "UZS",
|
||||
"som usbeque": "UZS",
|
||||
"som uzbec": "UZS",
|
||||
"som uzbeco": "UZS",
|
||||
@@ -11892,6 +11929,7 @@
|
||||
"somas": "KGS",
|
||||
"somálsky šiling": "SOS",
|
||||
"somálský šilink": "SOS",
|
||||
"somālijas šiliņš": "SOS",
|
||||
"some": "KGS",
|
||||
"somoni": "TJS",
|
||||
"somoni na táidsíceastáine": "TJS",
|
||||
@@ -11915,6 +11953,7 @@
|
||||
"sovjetisk rubel": "RUB",
|
||||
"soʻm": "UZS",
|
||||
"soʻm uzbekistan": "UZS",
|
||||
"sōm": "KGS",
|
||||
"söm": "UZS",
|
||||
"special drawing right": "XDR",
|
||||
"special drawing rights": "XDR",
|
||||
@@ -12660,6 +12699,7 @@
|
||||
"won nord coréen": "KPW",
|
||||
"won nordcoreano": "KPW",
|
||||
"won norte coreano": "KPW",
|
||||
"won nortkoreano": "KPW",
|
||||
"won południowokoreański": "KRW",
|
||||
"won północnokoreański": "KPW",
|
||||
"won sud corean": "KRW",
|
||||
@@ -14440,6 +14480,7 @@
|
||||
"דולר פיג'י": "FJD",
|
||||
"דולר קיימני": "KYD",
|
||||
"דולר קנדי": "CAD",
|
||||
"דולר של איי קיימן": "KYD",
|
||||
"דונג וייטנאמי ": "VND",
|
||||
"דינר אלג'ירי": "DZD",
|
||||
"דינר בחרייני": "BHD",
|
||||
@@ -14647,6 +14688,7 @@
|
||||
"الجنيه الإسترليني": "GBP",
|
||||
"الجنيه السودانى": "SDG",
|
||||
"الجنيه المصري": "EGP",
|
||||
"الدولار الامريكي": "IQD",
|
||||
"الدولار البربادوسي": "BBD",
|
||||
"الدولار البهامي": "BSD",
|
||||
"الدولار الكندي": "CAD",
|
||||
@@ -14906,6 +14948,7 @@
|
||||
"شيلينغ كينيي": "KES",
|
||||
"عملة السعودية": "SAR",
|
||||
"عملة المملكة العربية السعودية": "SAR",
|
||||
"عملة ذهبيه": "IQD",
|
||||
"عملة قطر": "QAR",
|
||||
"غواراني": "PYG",
|
||||
"غواراني باراغواي": "PYG",
|
||||
@@ -15354,7 +15397,6 @@
|
||||
"యునైటెడ్ స్టేట్స్ డాలర్": "USD",
|
||||
"యూరో": "EUR",
|
||||
"రూపాయి": "INR",
|
||||
"సంయుక్త రాష్ట్రాల డాలర్": "USD",
|
||||
"స్విస్ ఫ్రాంక్": "CHF",
|
||||
"അൾജീരിയൻ ദിനാർ": "DZD",
|
||||
"ഇന്തോനേഷ്യൻ റുപിയ": "IDR",
|
||||
@@ -15735,6 +15777,7 @@
|
||||
"203"
|
||||
],
|
||||
"칠레 페소": "CLP",
|
||||
"카리브 휠던": "XCG",
|
||||
"카보베르데 에스쿠도": "CVE",
|
||||
"카보베르데 이스쿠두": "CVE",
|
||||
"카보베르데에스쿠도": "CVE",
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,5 +1,6 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Simple implementation to store TrackerPatterns data in a SQL database."""
|
||||
# pylint: disable=too-many-branches
|
||||
|
||||
import typing as t
|
||||
|
||||
@@ -119,6 +120,12 @@ class TrackerPatternsDB:
|
||||
|
||||
for rule in self.rules():
|
||||
|
||||
query_str: str = parsed_new_url.query
|
||||
if not query_str:
|
||||
# There are no more query arguments in the parsed_new_url on
|
||||
# which rules can be applied, stop iterating over the rules.
|
||||
break
|
||||
|
||||
if not re.match(rule[self.Fields.url_regexp], new_url):
|
||||
# no match / ignore pattern
|
||||
continue
|
||||
@@ -136,19 +143,33 @@ class TrackerPatternsDB:
|
||||
# overlapping urlPattern like ".*"
|
||||
continue
|
||||
|
||||
# remove tracker arguments from the url-query part
|
||||
query_args: list[tuple[str, str]] = list(parse_qsl(parsed_new_url.query))
|
||||
|
||||
if query_args:
|
||||
# remove tracker arguments from the url-query part
|
||||
for name, val in query_args.copy():
|
||||
# remove URL arguments
|
||||
for pattern in rule[self.Fields.del_args]:
|
||||
if re.match(pattern, name):
|
||||
log.debug("TRACKER_PATTERNS: %s remove tracker arg: %s='%s'", parsed_new_url.netloc, name, val)
|
||||
log.debug(
|
||||
"TRACKER_PATTERNS: %s remove tracker arg: %s='%s'", parsed_new_url.netloc, name, val
|
||||
)
|
||||
query_args.remove((name, val))
|
||||
|
||||
parsed_new_url = parsed_new_url._replace(query=urlencode(query_args))
|
||||
new_url = urlunparse(parsed_new_url)
|
||||
|
||||
else:
|
||||
# The query argument for URLs like:
|
||||
# - 'http://example.org?q=' --> query_str is 'q=' and query_args is []
|
||||
# - 'http://example.org?/foo/bar' --> query_str is 'foo/bar' and query_args is []
|
||||
# is a simple string and not a key/value dict.
|
||||
for pattern in rule[self.Fields.del_args]:
|
||||
if re.match(pattern, query_str):
|
||||
log.debug("TRACKER_PATTERNS: %s remove tracker arg: '%s'", parsed_new_url.netloc, query_str)
|
||||
parsed_new_url = parsed_new_url._replace(query="")
|
||||
new_url = urlunparse(parsed_new_url)
|
||||
break
|
||||
|
||||
if new_url != url:
|
||||
return new_url
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
],
|
||||
"ua": "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}",
|
||||
"versions": [
|
||||
"143.0",
|
||||
"142.0"
|
||||
"145.0",
|
||||
"144.0"
|
||||
]
|
||||
}
|
||||
@@ -3294,6 +3294,16 @@
|
||||
"symbol": "slug",
|
||||
"to_si_factor": 14.593903
|
||||
},
|
||||
"Q136416965": {
|
||||
"si_name": null,
|
||||
"symbol": "GT/S",
|
||||
"to_si_factor": null
|
||||
},
|
||||
"Q136417074": {
|
||||
"si_name": null,
|
||||
"symbol": "MT/S",
|
||||
"to_si_factor": null
|
||||
},
|
||||
"Q1374438": {
|
||||
"si_name": "Q11574",
|
||||
"symbol": "ks",
|
||||
@@ -5449,6 +5459,11 @@
|
||||
"symbol": "T",
|
||||
"to_si_factor": 907.18474
|
||||
},
|
||||
"Q4741": {
|
||||
"si_name": null,
|
||||
"symbol": "RF",
|
||||
"to_si_factor": null
|
||||
},
|
||||
"Q474533": {
|
||||
"si_name": null,
|
||||
"symbol": "At",
|
||||
@@ -6375,9 +6390,9 @@
|
||||
"to_si_factor": 86400.0
|
||||
},
|
||||
"Q577": {
|
||||
"si_name": null,
|
||||
"si_name": "Q11574",
|
||||
"symbol": "a",
|
||||
"to_si_factor": null
|
||||
"to_si_factor": 31557600.0
|
||||
},
|
||||
"Q57899268": {
|
||||
"si_name": "Q3332095",
|
||||
|
||||
@@ -270,7 +270,14 @@ def load_engines(engine_list: list[dict[str, t.Any]]):
|
||||
categories.clear()
|
||||
categories['general'] = []
|
||||
for engine_data in engine_list:
|
||||
if engine_data.get("inactive") is True:
|
||||
continue
|
||||
engine = load_engine(engine_data)
|
||||
if engine:
|
||||
register_engine(engine)
|
||||
else:
|
||||
# if an engine can't be loaded (if for example the engine is missing
|
||||
# tor or some other requirements) its set to inactive!
|
||||
logger.error("loading engine %s failed: set engine to inactive!", engine_data.get("name", "???"))
|
||||
engine_data["inactive"] = True
|
||||
return engines
|
||||
|
||||
@@ -12,7 +12,7 @@ from urllib.parse import urlencode, urljoin, urlparse
|
||||
import lxml
|
||||
import babel
|
||||
|
||||
from searx.utils import extract_text, eval_xpath_list, eval_xpath_getindex
|
||||
from searx.utils import extract_text, eval_xpath_list, eval_xpath_getindex, searxng_useragent
|
||||
from searx.enginelib.traits import EngineTraits
|
||||
from searx.locales import language_tag
|
||||
|
||||
@@ -45,7 +45,7 @@ def request(query, params):
|
||||
query += ' (' + eng_lang + ')'
|
||||
# wiki.archlinux.org is protected by anubis
|
||||
# - https://github.com/searxng/searxng/issues/4646#issuecomment-2817848019
|
||||
params['headers']['User-Agent'] = "SearXNG"
|
||||
params['headers']['User-Agent'] = searxng_useragent()
|
||||
elif netloc == 'wiki.archlinuxcn.org':
|
||||
base_url = 'https://' + netloc + '/wzh/index.php?'
|
||||
|
||||
@@ -120,7 +120,7 @@ def fetch_traits(engine_traits: EngineTraits):
|
||||
'zh': 'Special:搜索',
|
||||
}
|
||||
|
||||
resp = get('https://wiki.archlinux.org/')
|
||||
resp = get('https://wiki.archlinux.org/', timeout=3)
|
||||
if not resp.ok: # type: ignore
|
||||
print("ERROR: response from wiki.archlinux.org is not OK.")
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ def response(resp):
|
||||
pos = script.index(end_tag) + len(end_tag) - 1
|
||||
script = script[:pos]
|
||||
|
||||
json_resp = utils.js_variable_to_python(script)
|
||||
json_resp = utils.js_obj_str_to_python(script)
|
||||
|
||||
results = []
|
||||
|
||||
|
||||
190
searx/engines/azure.py
Normal file
190
searx/engines/azure.py
Normal file
@@ -0,0 +1,190 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Engine for Azure resources. This engine mimics the standard search bar in Azure
|
||||
Portal (for resources and resource groups).
|
||||
|
||||
Configuration
|
||||
=============
|
||||
|
||||
You must `register an application in Microsoft Entra ID`_ and assign it the
|
||||
'Reader' role in your subscription.
|
||||
|
||||
To use this engine, add an entry similar to the following to your engine list in
|
||||
``settings.yml``:
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
- name: azure
|
||||
engine: azure
|
||||
...
|
||||
azure_tenant_id: "your_tenant_id"
|
||||
azure_client_id: "your_client_id"
|
||||
azure_client_secret: "your_client_secret"
|
||||
azure_token_expiration_seconds: 5000
|
||||
|
||||
.. _register an application in Microsoft Entra ID:
|
||||
https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app
|
||||
|
||||
"""
|
||||
import typing as t
|
||||
|
||||
from searx.enginelib import EngineCache
|
||||
from searx.network import post as http_post
|
||||
from searx.result_types import EngineResults
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from searx.extended_types import SXNG_Response
|
||||
from searx.search.processors import OnlineParams
|
||||
|
||||
engine_type = "online"
|
||||
categories = ["it", "cloud"]
|
||||
|
||||
# Default values, should be overridden in settings.yml
|
||||
azure_tenant_id = ""
|
||||
azure_client_id = ""
|
||||
azure_client_secret = ""
|
||||
azure_token_expiration_seconds = 5000
|
||||
"""Time for which an auth token is valid (sec.)"""
|
||||
azure_batch_endpoint = "https://management.azure.com/batch?api-version=2020-06-01"
|
||||
|
||||
about = {
|
||||
"website": "https://www.portal.azure.com",
|
||||
"wikidata_id": "Q725967",
|
||||
"official_api_documentation": "https://learn.microsoft.com/en-us/\
|
||||
rest/api/azure-resourcegraph/?view=rest-azureresourcegraph-resourcegraph-2024-04-01",
|
||||
"use_official_api": True,
|
||||
"require_api_key": True,
|
||||
"results": "JSON",
|
||||
"language": "en",
|
||||
}
|
||||
|
||||
CACHE: EngineCache
|
||||
"""Persistent (SQLite) key/value cache that deletes its values after ``expire``
|
||||
seconds."""
|
||||
|
||||
|
||||
def setup(engine_settings: dict[str, t.Any]) -> bool:
|
||||
"""Initialization of the engine.
|
||||
|
||||
- Instantiate a cache for this engine (:py:obj:`CACHE`).
|
||||
- Checks whether the tenant_id, client_id and client_secret are set,
|
||||
otherwise the engine is inactive.
|
||||
|
||||
"""
|
||||
global CACHE # pylint: disable=global-statement
|
||||
CACHE = EngineCache(engine_settings["name"])
|
||||
|
||||
missing_opts: list[str] = []
|
||||
for opt in ("azure_tenant_id", "azure_client_id", "azure_client_secret"):
|
||||
if not engine_settings.get(opt, ""):
|
||||
missing_opts.append(opt)
|
||||
if missing_opts:
|
||||
logger.error("missing values for options: %s", ", ".join(missing_opts))
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def authenticate(t_id: str, c_id: str, c_secret: str) -> str:
|
||||
"""Authenticates to Azure using Oauth2 Client Credentials Flow and returns
|
||||
an access token."""
|
||||
|
||||
url = f"https://login.microsoftonline.com/{t_id}/oauth2/v2.0/token"
|
||||
body = {
|
||||
"client_id": c_id,
|
||||
"client_secret": c_secret,
|
||||
"grant_type": "client_credentials",
|
||||
"scope": "https://management.azure.com/.default",
|
||||
}
|
||||
|
||||
resp: SXNG_Response = http_post(url, body, timeout=5)
|
||||
if resp.status_code != 200:
|
||||
raise RuntimeError(f"Azure authentication failed (status {resp.status_code}): {resp.text}")
|
||||
return resp.json()["access_token"]
|
||||
|
||||
|
||||
def get_auth_token(t_id: str, c_id: str, c_secret: str) -> str:
|
||||
key = f"azure_tenant_id: {t_id:}, azure_client_id: {c_id}, azure_client_secret: {c_secret}"
|
||||
token: str | None = CACHE.get(key)
|
||||
if token:
|
||||
return token
|
||||
token = authenticate(t_id, c_id, c_secret)
|
||||
CACHE.set(key=key, value=token, expire=azure_token_expiration_seconds)
|
||||
return token
|
||||
|
||||
|
||||
def request(query: str, params: "OnlineParams") -> None:
|
||||
|
||||
token = get_auth_token(azure_tenant_id, azure_client_id, azure_client_secret)
|
||||
|
||||
params["url"] = azure_batch_endpoint
|
||||
params["method"] = "POST"
|
||||
params["headers"]["Authorization"] = f"Bearer {token}"
|
||||
params["headers"]["Content-Type"] = "application/json"
|
||||
params["json"] = {
|
||||
"requests": [
|
||||
{
|
||||
"url": "/providers/Microsoft.ResourceGraph/resources?api-version=2024-04-01",
|
||||
"httpMethod": "POST",
|
||||
"name": "resourceGroups",
|
||||
"requestHeaderDetails": {"commandName": "Microsoft.ResourceGraph"},
|
||||
"content": {
|
||||
"query": (
|
||||
f"ResourceContainers"
|
||||
f" | where (name contains ('{query}'))"
|
||||
f" | where (type =~ ('Microsoft.Resources/subscriptions/resourcegroups'))"
|
||||
f" | project id,name,type,kind,subscriptionId,resourceGroup"
|
||||
f" | extend matchscore = name startswith '{query}'"
|
||||
f" | extend normalizedName = tolower(tostring(name))"
|
||||
f" | sort by matchscore desc, normalizedName asc"
|
||||
f" | take 30"
|
||||
)
|
||||
},
|
||||
},
|
||||
{
|
||||
"url": "/providers/Microsoft.ResourceGraph/resources?api-version=2024-04-01",
|
||||
"httpMethod": "POST",
|
||||
"name": "resources",
|
||||
"requestHeaderDetails": {
|
||||
"commandName": "Microsoft.ResourceGraph",
|
||||
},
|
||||
"content": {
|
||||
"query": f"Resources | where name contains '{query}' | take 30",
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
def response(resp: "SXNG_Response") -> EngineResults:
|
||||
res = EngineResults()
|
||||
json_data = resp.json()
|
||||
|
||||
for result in json_data["responses"]:
|
||||
if result["name"] == "resourceGroups":
|
||||
for data in result["content"]["data"]:
|
||||
res.add(
|
||||
res.types.MainResult(
|
||||
url=(
|
||||
f"https://portal.azure.com/#@/resource"
|
||||
f"/subscriptions/{data['subscriptionId']}/resourceGroups/{data['name']}/overview"
|
||||
),
|
||||
title=data["name"],
|
||||
content=f"Resource Group in Subscription: {data['subscriptionId']}",
|
||||
)
|
||||
)
|
||||
elif result["name"] == "resources":
|
||||
for data in result["content"]["data"]:
|
||||
res.add(
|
||||
res.types.MainResult(
|
||||
url=(
|
||||
f"https://portal.azure.com/#@/resource"
|
||||
f"/subscriptions/{data['subscriptionId']}/resourceGroups/{data['resourceGroup']}"
|
||||
f"/providers/{data['type']}/{data['name']}/overview"
|
||||
),
|
||||
title=data["name"],
|
||||
content=(
|
||||
f"Resource of type {data['type']} in Subscription:"
|
||||
f" {data['subscriptionId']}, Resource Group: {data['resourceGroup']}"
|
||||
),
|
||||
)
|
||||
)
|
||||
return res
|
||||
@@ -108,6 +108,10 @@ def request(query, params):
|
||||
time_ranges = {'day': '1', 'week': '2', 'month': '3', 'year': f'5_{unix_day-365}_{unix_day}'}
|
||||
params['url'] += f'&filters=ex1:"ez{time_ranges[params["time_range"]]}"'
|
||||
|
||||
# in some regions where geoblocking is employed (e.g. China),
|
||||
# www.bing.com redirects to the regional version of Bing
|
||||
params['allow_redirects'] = True
|
||||
|
||||
return params
|
||||
|
||||
|
||||
@@ -197,7 +201,6 @@ def fetch_traits(engine_traits: EngineTraits):
|
||||
"User-Agent": gen_useragent(),
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
|
||||
"Accept-Language": "en-US;q=0.5,en;q=0.3",
|
||||
"Accept-Encoding": "gzip, deflate, br",
|
||||
"DNT": "1",
|
||||
"Connection": "keep-alive",
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
|
||||
@@ -124,17 +124,17 @@ from urllib.parse import (
|
||||
urlparse,
|
||||
)
|
||||
|
||||
import json
|
||||
from dateutil import parser
|
||||
from lxml import html
|
||||
|
||||
from searx import locales
|
||||
from searx.utils import (
|
||||
extr,
|
||||
extract_text,
|
||||
eval_xpath,
|
||||
eval_xpath_list,
|
||||
eval_xpath_getindex,
|
||||
js_variable_to_python,
|
||||
js_obj_str_to_python,
|
||||
js_obj_str_to_json_str,
|
||||
get_embeded_stream_url,
|
||||
)
|
||||
from searx.enginelib.traits import EngineTraits
|
||||
@@ -142,17 +142,17 @@ from searx.result_types import EngineResults
|
||||
from searx.extended_types import SXNG_Response
|
||||
|
||||
about = {
|
||||
"website": 'https://search.brave.com/',
|
||||
"wikidata_id": 'Q22906900',
|
||||
"website": "https://search.brave.com/",
|
||||
"wikidata_id": "Q22906900",
|
||||
"official_api_documentation": None,
|
||||
"use_official_api": False,
|
||||
"require_api_key": False,
|
||||
"results": 'HTML',
|
||||
"results": "HTML",
|
||||
}
|
||||
|
||||
base_url = "https://search.brave.com/"
|
||||
categories = []
|
||||
brave_category: t.Literal["search", "videos", "images", "news", "goggles"] = 'search'
|
||||
brave_category: t.Literal["search", "videos", "images", "news", "goggles"] = "search"
|
||||
"""Brave supports common web-search, videos, images, news, and goggles search.
|
||||
|
||||
- ``search``: Common WEB search
|
||||
@@ -182,74 +182,87 @@ to do more won't return any result and you will most likely be flagged as a bot.
|
||||
"""
|
||||
|
||||
safesearch = True
|
||||
safesearch_map = {2: 'strict', 1: 'moderate', 0: 'off'} # cookie: safesearch=off
|
||||
safesearch_map = {2: "strict", 1: "moderate", 0: "off"} # cookie: safesearch=off
|
||||
|
||||
time_range_support = False
|
||||
"""Brave only supports time-range in :py:obj:`brave_category` ``search`` (UI
|
||||
category All) and in the goggles category."""
|
||||
|
||||
time_range_map: dict[str, str] = {
|
||||
'day': 'pd',
|
||||
'week': 'pw',
|
||||
'month': 'pm',
|
||||
'year': 'py',
|
||||
"day": "pd",
|
||||
"week": "pw",
|
||||
"month": "pm",
|
||||
"year": "py",
|
||||
}
|
||||
|
||||
|
||||
def request(query: str, params: dict[str, t.Any]) -> None:
|
||||
|
||||
# Don't accept br encoding / see https://github.com/searxng/searxng/pull/1787
|
||||
params['headers']['Accept-Encoding'] = 'gzip, deflate'
|
||||
|
||||
args: dict[str, t.Any] = {
|
||||
'q': query,
|
||||
'source': 'web',
|
||||
"q": query,
|
||||
"source": "web",
|
||||
}
|
||||
if brave_spellcheck:
|
||||
args['spellcheck'] = '1'
|
||||
args["spellcheck"] = "1"
|
||||
|
||||
if brave_category in ('search', 'goggles'):
|
||||
if params.get('pageno', 1) - 1:
|
||||
args['offset'] = params.get('pageno', 1) - 1
|
||||
if time_range_map.get(params['time_range']):
|
||||
args['tf'] = time_range_map.get(params['time_range'])
|
||||
if brave_category in ("search", "goggles"):
|
||||
if params.get("pageno", 1) - 1:
|
||||
args["offset"] = params.get("pageno", 1) - 1
|
||||
if time_range_map.get(params["time_range"]):
|
||||
args["tf"] = time_range_map.get(params["time_range"])
|
||||
|
||||
if brave_category == 'goggles':
|
||||
args['goggles_id'] = Goggles
|
||||
if brave_category == "goggles":
|
||||
args["goggles_id"] = Goggles
|
||||
|
||||
params["headers"]["Accept-Encoding"] = "gzip, deflate"
|
||||
params["url"] = f"{base_url}{brave_category}?{urlencode(args)}"
|
||||
logger.debug("url %s", params["url"])
|
||||
|
||||
# set properties in the cookies
|
||||
|
||||
params['cookies']['safesearch'] = safesearch_map.get(params['safesearch'], 'off')
|
||||
# the useLocation is IP based, we use cookie 'country' for the region
|
||||
params['cookies']['useLocation'] = '0'
|
||||
params['cookies']['summarizer'] = '0'
|
||||
params["cookies"]["safesearch"] = safesearch_map.get(params["safesearch"], "off")
|
||||
# the useLocation is IP based, we use cookie "country" for the region
|
||||
params["cookies"]["useLocation"] = "0"
|
||||
params["cookies"]["summarizer"] = "0"
|
||||
|
||||
engine_region = traits.get_region(params['searxng_locale'], 'all')
|
||||
params['cookies']['country'] = engine_region.split('-')[-1].lower() # type: ignore
|
||||
engine_region = traits.get_region(params["searxng_locale"], "all")
|
||||
params["cookies"]["country"] = engine_region.split("-")[-1].lower() # type: ignore
|
||||
|
||||
ui_lang = locales.get_engine_locale(params['searxng_locale'], traits.custom["ui_lang"], 'en-us')
|
||||
params['cookies']['ui_lang'] = ui_lang
|
||||
|
||||
logger.debug("cookies %s", params['cookies'])
|
||||
|
||||
params['headers']['Sec-Fetch-Dest'] = "document"
|
||||
params['headers']['Sec-Fetch-Mode'] = "navigate"
|
||||
params['headers']['Sec-Fetch-Site'] = "same-origin"
|
||||
params['headers']['Sec-Fetch-User'] = "?1"
|
||||
ui_lang = locales.get_engine_locale(params["searxng_locale"], traits.custom["ui_lang"], "en-us")
|
||||
params["cookies"]["ui_lang"] = ui_lang
|
||||
logger.debug("cookies %s", params["cookies"])
|
||||
|
||||
|
||||
def _extract_published_date(published_date_raw):
|
||||
def _extract_published_date(published_date_raw: str | None):
|
||||
if published_date_raw is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
return parser.parse(published_date_raw)
|
||||
except parser.ParserError:
|
||||
return None
|
||||
|
||||
|
||||
def extract_json_data(text: str) -> dict[str, t.Any]:
|
||||
# Example script source containing the data:
|
||||
#
|
||||
# kit.start(app, element, {
|
||||
# node_ids: [0, 19],
|
||||
# data: [{type:"data",data: .... ["q","goggles_id"],route:1,url:1}}]
|
||||
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
text = text[text.index("<script") : text.index("</script")]
|
||||
if not text:
|
||||
raise ValueError("can't find JS/JSON data in the given text")
|
||||
start = text.index("data: [{")
|
||||
end = text.rindex("}}]")
|
||||
js_obj_str = text[start:end]
|
||||
js_obj_str = "{" + js_obj_str + "}}]}"
|
||||
# js_obj_str = js_obj_str.replace("\xa0", "") # remove ASCII for
|
||||
# js_obj_str = js_obj_str.replace(r"\u003C", "<").replace(r"\u003c", "<") # fix broken HTML tags in strings
|
||||
json_str = js_obj_str_to_json_str(js_obj_str)
|
||||
data: dict[str, t.Any] = json.loads(json_str)
|
||||
return data
|
||||
|
||||
|
||||
def response(resp: SXNG_Response) -> EngineResults:
|
||||
|
||||
if brave_category in ('search', 'goggles'):
|
||||
@@ -264,11 +277,8 @@ def response(resp: SXNG_Response) -> EngineResults:
|
||||
# node_ids: [0, 19],
|
||||
# data: [{type:"data",data: .... ["q","goggles_id"],route:1,url:1}}]
|
||||
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
js_object = "[{" + extr(resp.text, "data: [{", "}}],") + "}}]"
|
||||
json_data = js_variable_to_python(js_object)
|
||||
|
||||
# json_data is a list and at the second position (0,1) in this list we find the "response" data we need ..
|
||||
json_resp = json_data[1]['data']['body']['response']
|
||||
json_data: dict[str, t.Any] = extract_json_data(resp.text)
|
||||
json_resp: dict[str, t.Any] = json_data['data'][1]["data"]['body']['response']
|
||||
|
||||
if brave_category == 'images':
|
||||
return _parse_images(json_resp)
|
||||
@@ -278,150 +288,124 @@ def response(resp: SXNG_Response) -> EngineResults:
|
||||
raise ValueError(f"Unsupported brave category: {brave_category}")
|
||||
|
||||
|
||||
def _parse_search(resp) -> EngineResults:
|
||||
result_list = EngineResults()
|
||||
|
||||
def _parse_search(resp: SXNG_Response) -> EngineResults:
|
||||
res = EngineResults()
|
||||
dom = html.fromstring(resp.text)
|
||||
|
||||
# I doubt that Brave is still providing the "answer" class / I haven't seen
|
||||
# answers in brave for a long time.
|
||||
answer_tag = eval_xpath_getindex(dom, '//div[@class="answer"]', 0, default=None)
|
||||
if answer_tag:
|
||||
url = eval_xpath_getindex(dom, '//div[@id="featured_snippet"]/a[@class="result-header"]/@href', 0, default=None)
|
||||
answer = extract_text(answer_tag)
|
||||
if answer is not None:
|
||||
result_list.add(result_list.types.Answer(answer=answer, url=url))
|
||||
for result in eval_xpath_list(dom, "//div[contains(@class, 'snippet ')]"):
|
||||
|
||||
# xpath_results = '//div[contains(@class, "snippet fdb") and @data-type="web"]'
|
||||
xpath_results = '//div[contains(@class, "snippet ")]'
|
||||
|
||||
for result in eval_xpath_list(dom, xpath_results):
|
||||
|
||||
url = eval_xpath_getindex(result, './/a[contains(@class, "h")]/@href', 0, default=None)
|
||||
title_tag = eval_xpath_getindex(
|
||||
result, './/a[contains(@class, "h")]//div[contains(@class, "title")]', 0, default=None
|
||||
)
|
||||
url: str | None = eval_xpath_getindex(result, ".//a/@href", 0, default=None)
|
||||
title_tag = eval_xpath_getindex(result, ".//div[contains(@class, 'title')]", 0, default=None)
|
||||
if url is None or title_tag is None or not urlparse(url).netloc: # partial url likely means it's an ad
|
||||
continue
|
||||
|
||||
content: str = extract_text(
|
||||
eval_xpath_getindex(result, './/div[contains(@class, "snippet-description")]', 0, default='')
|
||||
) # type: ignore
|
||||
pub_date_raw = eval_xpath(result, 'substring-before(.//div[contains(@class, "snippet-description")], "-")')
|
||||
pub_date = _extract_published_date(pub_date_raw)
|
||||
if pub_date and content.startswith(pub_date_raw):
|
||||
content = content.lstrip(pub_date_raw).strip("- \n\t")
|
||||
content: str = ""
|
||||
pub_date = None
|
||||
|
||||
thumbnail = eval_xpath_getindex(result, './/img[contains(@class, "thumb")]/@src', 0, default='')
|
||||
# there are other classes like 'site-name-content' we don't want to match,
|
||||
# however only using contains(@class, 'content') would e.g. also match `site-name-content`
|
||||
# thus, we explicitly also require the spaces as class separator
|
||||
_content = eval_xpath_getindex(result, ".//div[contains(concat(' ', @class, ' '), ' content ')]", 0, default="")
|
||||
if len(_content):
|
||||
content = extract_text(_content) # type: ignore
|
||||
_pub_date = extract_text(
|
||||
eval_xpath_getindex(_content, ".//span[contains(@class, 't-secondary')]", 0, default="")
|
||||
)
|
||||
if _pub_date:
|
||||
pub_date = _extract_published_date(_pub_date)
|
||||
content = content.lstrip(_pub_date).strip("- \n\t")
|
||||
|
||||
item = {
|
||||
'url': url,
|
||||
'title': extract_text(title_tag),
|
||||
'content': content,
|
||||
'publishedDate': pub_date,
|
||||
'thumbnail': thumbnail,
|
||||
}
|
||||
thumbnail: str = eval_xpath_getindex(result, ".//a[contains(@class, 'thumbnail')]//img/@src", 0, default="")
|
||||
|
||||
item = res.types.LegacyResult(
|
||||
template="default.html",
|
||||
url=url,
|
||||
title=extract_text(title_tag),
|
||||
content=content,
|
||||
publishedDate=pub_date,
|
||||
thumbnail=thumbnail,
|
||||
)
|
||||
res.add(item)
|
||||
|
||||
video_tag = eval_xpath_getindex(
|
||||
result, './/div[contains(@class, "video-snippet") and @data-macro="video"]', 0, default=None
|
||||
result, ".//div[contains(@class, 'video-snippet') and @data-macro='video']", 0, default=[]
|
||||
)
|
||||
if video_tag is not None:
|
||||
|
||||
if len(video_tag):
|
||||
# In my tests a video tag in the WEB search was most often not a
|
||||
# video, except the ones from youtube ..
|
||||
|
||||
iframe_src = get_embeded_stream_url(url)
|
||||
if iframe_src:
|
||||
item['iframe_src'] = iframe_src
|
||||
item['template'] = 'videos.html'
|
||||
item['thumbnail'] = eval_xpath_getindex(video_tag, './/img/@src', 0, default='')
|
||||
pub_date_raw = extract_text(
|
||||
eval_xpath(video_tag, './/div[contains(@class, "snippet-attributes")]/div/text()')
|
||||
)
|
||||
item['publishedDate'] = _extract_published_date(pub_date_raw)
|
||||
else:
|
||||
item['thumbnail'] = eval_xpath_getindex(video_tag, './/img/@src', 0, default='')
|
||||
item["iframe_src"] = iframe_src
|
||||
item["template"] = "videos.html"
|
||||
|
||||
result_list.append(item)
|
||||
|
||||
return result_list
|
||||
return res
|
||||
|
||||
|
||||
def _parse_news(resp) -> EngineResults:
|
||||
|
||||
result_list = EngineResults()
|
||||
def _parse_news(resp: SXNG_Response) -> EngineResults:
|
||||
res = EngineResults()
|
||||
dom = html.fromstring(resp.text)
|
||||
|
||||
for result in eval_xpath_list(dom, '//div[contains(@class, "results")]//div[@data-type="news"]'):
|
||||
for result in eval_xpath_list(dom, "//div[contains(@class, 'results')]//div[@data-type='news']"):
|
||||
|
||||
# import pdb
|
||||
# pdb.set_trace()
|
||||
|
||||
url = eval_xpath_getindex(result, './/a[contains(@class, "result-header")]/@href', 0, default=None)
|
||||
url = eval_xpath_getindex(result, ".//a[contains(@class, 'result-header')]/@href", 0, default=None)
|
||||
if url is None:
|
||||
continue
|
||||
|
||||
title = extract_text(eval_xpath_list(result, './/span[contains(@class, "snippet-title")]'))
|
||||
content = extract_text(eval_xpath_list(result, './/p[contains(@class, "desc")]'))
|
||||
thumbnail = eval_xpath_getindex(result, './/div[contains(@class, "image-wrapper")]//img/@src', 0, default='')
|
||||
title = eval_xpath_list(result, ".//span[contains(@class, 'snippet-title')]")
|
||||
content = eval_xpath_list(result, ".//p[contains(@class, 'desc')]")
|
||||
thumbnail = eval_xpath_getindex(result, ".//div[contains(@class, 'image-wrapper')]//img/@src", 0, default="")
|
||||
|
||||
item = {
|
||||
"url": url,
|
||||
"title": title,
|
||||
"content": content,
|
||||
"thumbnail": thumbnail,
|
||||
}
|
||||
item = res.types.LegacyResult(
|
||||
template="default.html",
|
||||
url=url,
|
||||
title=extract_text(title),
|
||||
thumbnail=thumbnail,
|
||||
content=extract_text(content),
|
||||
)
|
||||
res.add(item)
|
||||
|
||||
result_list.append(item)
|
||||
|
||||
return result_list
|
||||
return res
|
||||
|
||||
|
||||
def _parse_images(json_resp) -> EngineResults:
|
||||
result_list = EngineResults()
|
||||
def _parse_images(json_resp: dict[str, t.Any]) -> EngineResults:
|
||||
res = EngineResults()
|
||||
|
||||
for result in json_resp["results"]:
|
||||
item = {
|
||||
'url': result['url'],
|
||||
'title': result['title'],
|
||||
'content': result['description'],
|
||||
'template': 'images.html',
|
||||
'resolution': result['properties']['format'],
|
||||
'source': result['source'],
|
||||
'img_src': result['properties']['url'],
|
||||
'thumbnail_src': result['thumbnail']['src'],
|
||||
}
|
||||
result_list.append(item)
|
||||
item = res.types.LegacyResult(
|
||||
template="images.html",
|
||||
url=result["url"],
|
||||
title=result["title"],
|
||||
source=result["source"],
|
||||
img_src=result["properties"]["url"],
|
||||
thumbnail_src=result["thumbnail"]["src"],
|
||||
)
|
||||
res.add(item)
|
||||
|
||||
return result_list
|
||||
return res
|
||||
|
||||
|
||||
def _parse_videos(json_resp) -> EngineResults:
|
||||
result_list = EngineResults()
|
||||
def _parse_videos(json_resp: dict[str, t.Any]) -> EngineResults:
|
||||
res = EngineResults()
|
||||
|
||||
for result in json_resp["results"]:
|
||||
|
||||
url = result['url']
|
||||
item = {
|
||||
'url': url,
|
||||
'title': result['title'],
|
||||
'content': result['description'],
|
||||
'template': 'videos.html',
|
||||
'length': result['video']['duration'],
|
||||
'duration': result['video']['duration'],
|
||||
'publishedDate': _extract_published_date(result['age']),
|
||||
}
|
||||
|
||||
if result['thumbnail'] is not None:
|
||||
item['thumbnail'] = result['thumbnail']['src']
|
||||
|
||||
iframe_src = get_embeded_stream_url(url)
|
||||
item = res.types.LegacyResult(
|
||||
template="videos.html",
|
||||
url=result["url"],
|
||||
title=result["title"],
|
||||
content=result["description"],
|
||||
length=result["video"]["duration"],
|
||||
duration=result["video"]["duration"],
|
||||
publishedDate=_extract_published_date(result["age"]),
|
||||
)
|
||||
if result["thumbnail"] is not None:
|
||||
item["thumbnail"] = result["thumbnail"]["src"]
|
||||
iframe_src = get_embeded_stream_url(result["url"])
|
||||
if iframe_src:
|
||||
item['iframe_src'] = iframe_src
|
||||
item["iframe_src"] = iframe_src
|
||||
|
||||
result_list.append(item)
|
||||
res.add(item)
|
||||
|
||||
return result_list
|
||||
return res
|
||||
|
||||
|
||||
def fetch_traits(engine_traits: EngineTraits):
|
||||
@@ -436,34 +420,31 @@ def fetch_traits(engine_traits: EngineTraits):
|
||||
|
||||
engine_traits.custom["ui_lang"] = {}
|
||||
|
||||
headers = {
|
||||
'Accept-Encoding': 'gzip, deflate',
|
||||
}
|
||||
lang_map = {'no': 'nb'} # norway
|
||||
|
||||
# languages (UI)
|
||||
|
||||
resp = get('https://search.brave.com/settings', headers=headers)
|
||||
resp = get('https://search.brave.com/settings')
|
||||
|
||||
if not resp.ok: # type: ignore
|
||||
if not resp.ok:
|
||||
print("ERROR: response from Brave is not OK.")
|
||||
dom = html.fromstring(resp.text) # type: ignore
|
||||
dom = html.fromstring(resp.text)
|
||||
|
||||
for option in dom.xpath('//section//option[@value="en-us"]/../option'):
|
||||
for option in dom.xpath("//section//option[@value='en-us']/../option"):
|
||||
|
||||
ui_lang = option.get('value')
|
||||
ui_lang = option.get("value")
|
||||
try:
|
||||
l = babel.Locale.parse(ui_lang, sep='-')
|
||||
l = babel.Locale.parse(ui_lang, sep="-")
|
||||
if l.territory:
|
||||
sxng_tag = region_tag(babel.Locale.parse(ui_lang, sep='-'))
|
||||
sxng_tag = region_tag(babel.Locale.parse(ui_lang, sep="-"))
|
||||
else:
|
||||
sxng_tag = language_tag(babel.Locale.parse(ui_lang, sep='-'))
|
||||
sxng_tag = language_tag(babel.Locale.parse(ui_lang, sep="-"))
|
||||
|
||||
except babel.UnknownLocaleError:
|
||||
print("ERROR: can't determine babel locale of Brave's (UI) language %s" % ui_lang)
|
||||
continue
|
||||
|
||||
conflict = engine_traits.custom["ui_lang"].get(sxng_tag)
|
||||
conflict = engine_traits.custom["ui_lang"].get(sxng_tag) # type: ignore
|
||||
if conflict:
|
||||
if conflict != ui_lang:
|
||||
print("CONFLICT: babel %s --> %s, %s" % (sxng_tag, conflict, ui_lang))
|
||||
@@ -472,26 +453,26 @@ def fetch_traits(engine_traits: EngineTraits):
|
||||
|
||||
# search regions of brave
|
||||
|
||||
resp = get('https://cdn.search.brave.com/serp/v2/_app/immutable/chunks/parameters.734c106a.js', headers=headers)
|
||||
resp = get("https://cdn.search.brave.com/serp/v2/_app/immutable/chunks/parameters.734c106a.js")
|
||||
|
||||
if not resp.ok: # type: ignore
|
||||
if not resp.ok:
|
||||
print("ERROR: response from Brave is not OK.")
|
||||
|
||||
country_js = resp.text[resp.text.index("options:{all") + len('options:') :] # type: ignore
|
||||
country_js = resp.text[resp.text.index("options:{all") + len("options:") :]
|
||||
country_js = country_js[: country_js.index("},k={default")]
|
||||
country_tags = js_variable_to_python(country_js)
|
||||
country_tags = js_obj_str_to_python(country_js)
|
||||
|
||||
for k, v in country_tags.items():
|
||||
if k == 'all':
|
||||
engine_traits.all_locale = 'all'
|
||||
if k == "all":
|
||||
engine_traits.all_locale = "all"
|
||||
continue
|
||||
country_tag = v['value']
|
||||
country_tag = v["value"]
|
||||
|
||||
# add official languages of the country ..
|
||||
for lang_tag in babel.languages.get_official_languages(country_tag, de_facto=True):
|
||||
lang_tag = lang_map.get(lang_tag, lang_tag)
|
||||
sxng_tag = region_tag(babel.Locale.parse('%s_%s' % (lang_tag, country_tag.upper())))
|
||||
# print("%-20s: %s <-- %s" % (v['label'], country_tag, sxng_tag))
|
||||
sxng_tag = region_tag(babel.Locale.parse("%s_%s" % (lang_tag, country_tag.upper())))
|
||||
# print("%-20s: %s <-- %s" % (v["label"], country_tag, sxng_tag))
|
||||
|
||||
conflict = engine_traits.regions.get(sxng_tag)
|
||||
if conflict:
|
||||
|
||||
@@ -23,14 +23,14 @@ paging = True
|
||||
# search-url
|
||||
base_url = 'https://www.deviantart.com'
|
||||
|
||||
results_xpath = '//div[@class="_2pZkk"]/div/div/a'
|
||||
results_xpath = '//div[@class="V_S0t_"]/div/div/a'
|
||||
url_xpath = './@href'
|
||||
thumbnail_src_xpath = './div/img/@src'
|
||||
img_src_xpath = './div/img/@srcset'
|
||||
title_xpath = './@aria-label'
|
||||
premium_xpath = '../div/div/div/text()'
|
||||
premium_keytext = 'Watch the artist to view this deviation'
|
||||
cursor_xpath = '(//a[@class="_1OGeq"]/@href)[last()]'
|
||||
cursor_xpath = '(//a[@class="vQ2brP"]/@href)[last()]'
|
||||
|
||||
|
||||
def request(query, params):
|
||||
|
||||
63
searx/engines/devicons.py
Normal file
63
searx/engines/devicons.py
Normal file
@@ -0,0 +1,63 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Devicons (icons)"""
|
||||
|
||||
import typing as t
|
||||
|
||||
from searx.result_types import EngineResults
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from extended_types import SXNG_Response
|
||||
from search.processors.online import OnlineParams
|
||||
|
||||
|
||||
about = {
|
||||
"website": "https://devicon.dev/",
|
||||
"wikidata_id": None,
|
||||
"official_api_documentation": None,
|
||||
"use_official_api": True,
|
||||
"results": "JSON",
|
||||
}
|
||||
|
||||
cdn_base_url = "https://cdn.jsdelivr.net/gh/devicons/devicon@latest"
|
||||
categories = ["images", "icons"]
|
||||
|
||||
|
||||
def request(query: str, params: "OnlineParams"):
|
||||
params["url"] = f"{cdn_base_url}/devicon.json"
|
||||
params['query'] = query
|
||||
return params
|
||||
|
||||
|
||||
def response(resp: "SXNG_Response") -> EngineResults:
|
||||
res = EngineResults()
|
||||
query_parts = resp.search_params["query"].lower().split(" ")
|
||||
|
||||
def is_result_match(result: dict[str, t.Any]) -> bool:
|
||||
for part in query_parts:
|
||||
if part in result["name"]:
|
||||
return True
|
||||
|
||||
for tag in result["altnames"] + result["tags"]:
|
||||
if part in tag:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
filtered_results = filter(is_result_match, resp.json())
|
||||
for result in filtered_results:
|
||||
for image_type in result["versions"]["svg"]:
|
||||
img_src = f"{cdn_base_url}/icons/{result['name']}/{result['name']}-{image_type}.svg"
|
||||
res.add(
|
||||
res.types.LegacyResult(
|
||||
{
|
||||
"template": "images.html",
|
||||
"url": img_src,
|
||||
"title": result["name"],
|
||||
"content": f"Base color: {result['color']}",
|
||||
"img_src": img_src,
|
||||
"img_format": "SVG",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
return res
|
||||
@@ -407,7 +407,7 @@ def fetch_traits(engine_traits: EngineTraits):
|
||||
|
||||
"""
|
||||
# pylint: disable=too-many-branches, too-many-statements, disable=import-outside-toplevel
|
||||
from searx.utils import js_variable_to_python
|
||||
from searx.utils import js_obj_str_to_python
|
||||
|
||||
# fetch regions
|
||||
|
||||
@@ -455,7 +455,7 @@ def fetch_traits(engine_traits: EngineTraits):
|
||||
|
||||
js_code = extr(resp.text, 'languages:', ',regions') # type: ignore
|
||||
|
||||
languages = js_variable_to_python(js_code)
|
||||
languages: dict[str, str] = js_obj_str_to_python(js_code)
|
||||
for eng_lang, name in languages.items():
|
||||
|
||||
if eng_lang == 'wt_WT':
|
||||
|
||||
@@ -42,8 +42,8 @@ def response(resp):
|
||||
|
||||
results.append(
|
||||
{
|
||||
'url': item['source_page_url'],
|
||||
'title': item['source_site'],
|
||||
'url': item.get('source_page_url'),
|
||||
'title': item.get('source_site'),
|
||||
'img_src': img if item['type'] == 'IMAGE' else thumb,
|
||||
'filesize': humanize_bytes(item['meme_file_size']),
|
||||
'publishedDate': formatted_date,
|
||||
|
||||
52
searx/engines/grokipedia.py
Normal file
52
searx/engines/grokipedia.py
Normal file
@@ -0,0 +1,52 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Grokipedia (general)"""
|
||||
|
||||
from urllib.parse import urlencode
|
||||
from searx.utils import html_to_text
|
||||
from searx.result_types import EngineResults
|
||||
|
||||
about = {
|
||||
"website": 'https://grokipedia.com',
|
||||
"wikidata_id": "Q136410803",
|
||||
"official_api_documentation": None,
|
||||
"use_official_api": False,
|
||||
"require_api_key": False,
|
||||
"results": "JSON",
|
||||
}
|
||||
|
||||
base_url = "https://grokipedia.com/api/full-text-search"
|
||||
categories = ['general']
|
||||
paging = True
|
||||
results_per_page = 10
|
||||
|
||||
|
||||
def request(query, params):
|
||||
|
||||
start_index = (params["pageno"] - 1) * results_per_page
|
||||
|
||||
query_params = {
|
||||
"query": query,
|
||||
"limit": results_per_page,
|
||||
"offset": start_index,
|
||||
}
|
||||
|
||||
params["url"] = f"{base_url}?{urlencode(query_params)}"
|
||||
|
||||
return params
|
||||
|
||||
|
||||
def response(resp) -> EngineResults:
|
||||
results = EngineResults()
|
||||
search_res = resp.json()
|
||||
|
||||
for item in search_res["results"]:
|
||||
|
||||
results.add(
|
||||
results.types.MainResult(
|
||||
url='https://grokipedia.com/page/' + item["slug"],
|
||||
title=item["title"],
|
||||
content=html_to_text(item["snippet"]),
|
||||
)
|
||||
)
|
||||
|
||||
return results
|
||||
@@ -6,6 +6,7 @@ from urllib.parse import urlencode
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from flask_babel import gettext
|
||||
from searx.utils import html_to_text
|
||||
|
||||
# Engine metadata
|
||||
about = {
|
||||
@@ -75,6 +76,7 @@ def response(resp):
|
||||
object_id = hit["objectID"]
|
||||
points = hit.get("points") or 0
|
||||
num_comments = hit.get("num_comments") or 0
|
||||
content = hit.get("url") or html_to_text(hit.get("comment_text")) or html_to_text(hit.get("story_text"))
|
||||
|
||||
metadata = ""
|
||||
if points != 0 or num_comments != 0:
|
||||
@@ -83,7 +85,7 @@ def response(resp):
|
||||
{
|
||||
"title": hit.get("title") or f"{gettext('author')}: {hit['author']}",
|
||||
"url": f"https://news.ycombinator.com/item?id={object_id}",
|
||||
"content": hit.get("url") or hit.get("comment_text") or hit.get("story_text") or "",
|
||||
"content": content,
|
||||
"metadata": metadata,
|
||||
"author": hit["author"],
|
||||
"publishedDate": datetime.fromtimestamp(hit["created_at_i"]),
|
||||
|
||||
@@ -31,7 +31,7 @@ paging = True
|
||||
time_range_support = True
|
||||
|
||||
# base_url can be overwritten by a list of URLs in the settings.yml
|
||||
base_url: list | str = []
|
||||
base_url: list[str] | str = []
|
||||
|
||||
|
||||
def init(_):
|
||||
|
||||
69
searx/engines/lucide.py
Normal file
69
searx/engines/lucide.py
Normal file
@@ -0,0 +1,69 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Browse one of the largest collections of copyleft icons
|
||||
that can be used for own projects (e.g. apps, websites).
|
||||
|
||||
.. _Website: https://lucide.dev
|
||||
|
||||
"""
|
||||
|
||||
import typing as t
|
||||
|
||||
from searx.result_types import EngineResults
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from extended_types import SXNG_Response
|
||||
from search.processors.online import OnlineParams
|
||||
|
||||
|
||||
about = {
|
||||
"website": "https://lucide.dev/",
|
||||
"wikidata_id": None,
|
||||
"official_api_documentation": None,
|
||||
"use_official_api": True,
|
||||
"results": "JSON",
|
||||
}
|
||||
|
||||
cdn_base_url = "https://cdn.jsdelivr.net/npm/lucide-static"
|
||||
categories = ["images", "icons"]
|
||||
|
||||
|
||||
def request(query: str, params: "OnlineParams"):
|
||||
params["url"] = f"{cdn_base_url}/tags.json"
|
||||
params['query'] = query
|
||||
return params
|
||||
|
||||
|
||||
def response(resp: "SXNG_Response") -> EngineResults:
|
||||
res = EngineResults()
|
||||
query_parts = resp.search_params["query"].lower().split(" ")
|
||||
|
||||
def is_result_match(result: tuple[str, list[str]]) -> bool:
|
||||
icon_name, tags = result
|
||||
|
||||
for part in query_parts:
|
||||
if part in icon_name:
|
||||
return True
|
||||
|
||||
for tag in tags:
|
||||
if part in tag:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
filtered_results = filter(is_result_match, resp.json().items())
|
||||
for icon_name, tags in filtered_results:
|
||||
img_src = f"{cdn_base_url}/icons/{icon_name}.svg"
|
||||
res.add(
|
||||
res.types.LegacyResult(
|
||||
{
|
||||
"template": "images.html",
|
||||
"url": img_src,
|
||||
"title": icon_name,
|
||||
"content": ", ".join(tags),
|
||||
"img_src": img_src,
|
||||
"img_format": "SVG",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
return res
|
||||
@@ -28,7 +28,7 @@ Implementations
|
||||
"""
|
||||
|
||||
import typing as t
|
||||
from urllib.parse import urlencode, quote_plus
|
||||
from urllib.parse import urlencode
|
||||
from searx.utils import searxng_useragent
|
||||
from searx.result_types import EngineResults
|
||||
from searx.extended_types import SXNG_Response
|
||||
@@ -42,7 +42,7 @@ about = {
|
||||
"results": "JSON",
|
||||
}
|
||||
|
||||
base_url = "https://api.marginalia.nu"
|
||||
base_url = "https://api2.marginalia-search.com"
|
||||
safesearch = True
|
||||
categories = ["general"]
|
||||
paging = False
|
||||
@@ -85,13 +85,11 @@ class ApiSearchResults(t.TypedDict):
|
||||
|
||||
def request(query: str, params: dict[str, t.Any]):
|
||||
|
||||
query_params = {
|
||||
"count": results_per_page,
|
||||
"nsfw": min(params["safesearch"], 1),
|
||||
}
|
||||
query_params = {"count": results_per_page, "nsfw": min(params["safesearch"], 1), "query": query}
|
||||
|
||||
params["url"] = f"{base_url}/{api_key}/search/{quote_plus(query)}?{urlencode(query_params)}"
|
||||
params["url"] = f"{base_url}/search?{urlencode(query_params)}"
|
||||
params["headers"]["User-Agent"] = searxng_useragent()
|
||||
params["headers"]["API-Key"] = api_key
|
||||
|
||||
|
||||
def response(resp: SXNG_Response):
|
||||
|
||||
@@ -65,7 +65,8 @@ def request(query, params):
|
||||
if search_type:
|
||||
args['fmt'] = search_type
|
||||
|
||||
if search_type == '':
|
||||
# setting the page number on the first page (i.e. s=0) triggers a rate-limit
|
||||
if search_type == '' and params['pageno'] > 1:
|
||||
args['s'] = 10 * (params['pageno'] - 1)
|
||||
|
||||
if params['time_range'] and search_type != 'images':
|
||||
|
||||
@@ -1,264 +0,0 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Mullvad Leta is a search engine proxy. Currently Leta only offers text
|
||||
search results not image, news or any other types of search result. Leta acts
|
||||
as a proxy to Google and Brave search results. You can select which backend
|
||||
search engine you wish to use, see (:py:obj:`leta_engine`).
|
||||
|
||||
.. hint::
|
||||
|
||||
Leta caches each search for up to 30 days. For example, if you use search
|
||||
terms like ``news``, contrary to your intention you'll get very old results!
|
||||
|
||||
|
||||
Configuration
|
||||
=============
|
||||
|
||||
The engine has the following additional settings:
|
||||
|
||||
- :py:obj:`leta_engine` (:py:obj:`LetaEnginesType`)
|
||||
|
||||
You can configure one Leta engine for Google and one for Brave:
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
- name: mullvadleta
|
||||
engine: mullvad_leta
|
||||
leta_engine: google
|
||||
shortcut: ml
|
||||
|
||||
- name: mullvadleta brave
|
||||
engine: mullvad_leta
|
||||
network: mullvadleta # use network from engine "mullvadleta" configured above
|
||||
leta_engine: brave
|
||||
shortcut: mlb
|
||||
|
||||
Implementations
|
||||
===============
|
||||
|
||||
"""
|
||||
import typing as t
|
||||
|
||||
from urllib.parse import urlencode
|
||||
import babel
|
||||
from httpx import Response
|
||||
from lxml import html
|
||||
from searx.enginelib.traits import EngineTraits
|
||||
from searx.locales import get_official_locales, language_tag, region_tag
|
||||
from searx.utils import eval_xpath_list
|
||||
from searx.result_types import EngineResults, MainResult
|
||||
|
||||
search_url = "https://leta.mullvad.net"
|
||||
|
||||
# about
|
||||
about = {
|
||||
"website": search_url,
|
||||
"wikidata_id": 'Q47008412', # the Mullvad id - not leta, but related
|
||||
"official_api_documentation": 'https://leta.mullvad.net/faq',
|
||||
"use_official_api": False,
|
||||
"require_api_key": False,
|
||||
"results": 'HTML',
|
||||
}
|
||||
|
||||
# engine dependent config
|
||||
categories = ["general", "web"]
|
||||
paging = True
|
||||
max_page = 10
|
||||
time_range_support = True
|
||||
time_range_dict = {
|
||||
"day": "d",
|
||||
"week": "w",
|
||||
"month": "m",
|
||||
"year": "y",
|
||||
}
|
||||
|
||||
LetaEnginesType = t.Literal["google", "brave"]
|
||||
"""Engine types supported by mullvadleta."""
|
||||
|
||||
leta_engine: LetaEnginesType = "google"
|
||||
"""Select Leta's engine type from :py:obj:`LetaEnginesType`."""
|
||||
|
||||
|
||||
def init(_):
|
||||
l = t.get_args(LetaEnginesType)
|
||||
if leta_engine not in l:
|
||||
raise ValueError(f"leta_engine '{leta_engine}' is invalid, use one of {', '.join(l)}")
|
||||
|
||||
|
||||
class DataNodeQueryMetaDataIndices(t.TypedDict):
|
||||
"""Indices into query metadata."""
|
||||
|
||||
success: int
|
||||
q: int # pylint: disable=invalid-name
|
||||
country: int
|
||||
language: int
|
||||
lastUpdated: int
|
||||
engine: int
|
||||
items: int
|
||||
infobox: int
|
||||
news: int
|
||||
timestamp: int
|
||||
altered: int
|
||||
page: int
|
||||
next: int # if -1, there no more results are available
|
||||
previous: int
|
||||
|
||||
|
||||
class DataNodeResultIndices(t.TypedDict):
|
||||
"""Indices into query resultsdata."""
|
||||
|
||||
link: int
|
||||
snippet: int
|
||||
title: int
|
||||
favicon: int
|
||||
|
||||
|
||||
def request(query: str, params: dict):
|
||||
params["method"] = "GET"
|
||||
args = {
|
||||
"q": query,
|
||||
"engine": leta_engine,
|
||||
"x-sveltekit-invalidated": "001", # hardcoded from all requests seen
|
||||
}
|
||||
|
||||
country = traits.get_region(params.get("searxng_locale"), traits.all_locale) # type: ignore
|
||||
if country:
|
||||
args["country"] = country
|
||||
|
||||
language = traits.get_language(params.get("searxng_locale"), traits.all_locale) # type: ignore
|
||||
if language:
|
||||
args["language"] = language
|
||||
|
||||
if params["time_range"] in time_range_dict:
|
||||
args["lastUpdated"] = time_range_dict[params["time_range"]]
|
||||
|
||||
if params["pageno"] > 1:
|
||||
args["page"] = params["pageno"]
|
||||
|
||||
params["url"] = f"{search_url}/search/__data.json?{urlencode(args)}"
|
||||
|
||||
return params
|
||||
|
||||
|
||||
def response(resp: Response) -> EngineResults:
|
||||
json_response = resp.json()
|
||||
|
||||
nodes = json_response["nodes"]
|
||||
# 0: is None
|
||||
# 1: has "connected=True", not useful
|
||||
# 2: query results within "data"
|
||||
|
||||
data_nodes = nodes[2]["data"]
|
||||
# Instead of nested object structure, all objects are flattened into a
|
||||
# list. Rather, the first object in data_node provides indices into the
|
||||
# "data_nodes" to access each searchresult (which is an object of more
|
||||
# indices)
|
||||
#
|
||||
# Read the relative TypedDict definitions for details
|
||||
|
||||
query_meta_data: DataNodeQueryMetaDataIndices = data_nodes[0]
|
||||
|
||||
query_items_indices = query_meta_data["items"]
|
||||
|
||||
results = EngineResults()
|
||||
for idx in data_nodes[query_items_indices]:
|
||||
query_item_indices: DataNodeResultIndices = data_nodes[idx]
|
||||
results.add(
|
||||
MainResult(
|
||||
url=data_nodes[query_item_indices["link"]],
|
||||
title=data_nodes[query_item_indices["title"]],
|
||||
content=data_nodes[query_item_indices["snippet"]],
|
||||
)
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def fetch_traits(engine_traits: EngineTraits) -> None:
|
||||
"""Fetch languages and regions from Mullvad-Leta"""
|
||||
|
||||
def extract_table_data(table):
|
||||
for row in table.xpath(".//tr")[2:]:
|
||||
cells = row.xpath(".//td | .//th") # includes headers and data
|
||||
if len(cells) > 1: # ensure the column exists
|
||||
cell0 = cells[0].text_content().strip()
|
||||
cell1 = cells[1].text_content().strip()
|
||||
yield [cell0, cell1]
|
||||
|
||||
# pylint: disable=import-outside-toplevel
|
||||
# see https://github.com/searxng/searxng/issues/762
|
||||
from searx.network import get as http_get
|
||||
|
||||
# pylint: enable=import-outside-toplevel
|
||||
|
||||
resp = http_get(f"{search_url}/documentation")
|
||||
if not isinstance(resp, Response):
|
||||
print("ERROR: failed to get response from mullvad-leta. Are you connected to the VPN?")
|
||||
return
|
||||
if not resp.ok:
|
||||
print("ERROR: response from mullvad-leta is not OK. Are you connected to the VPN?")
|
||||
return
|
||||
|
||||
dom = html.fromstring(resp.text)
|
||||
|
||||
# There are 4 HTML tables on the documentation page for extracting information:
|
||||
# 0. Keyboard Shortcuts
|
||||
# 1. Query Parameters (shoutout to Mullvad for accessible docs for integration)
|
||||
# 2. Country Codes [Country, Code]
|
||||
# 3. Language Codes [Language, Code]
|
||||
tables = eval_xpath_list(dom.body, "//table")
|
||||
if tables is None or len(tables) <= 0:
|
||||
print("ERROR: could not find any tables. Was the page updated?")
|
||||
|
||||
language_table = tables[3]
|
||||
lang_map = {
|
||||
"zh-hant": "zh_Hans",
|
||||
"zh-hans": "zh_Hant",
|
||||
"jp": "ja",
|
||||
}
|
||||
|
||||
for language, code in extract_table_data(language_table):
|
||||
|
||||
locale_tag = lang_map.get(code, code).replace("-", "_") # type: ignore
|
||||
try:
|
||||
locale = babel.Locale.parse(locale_tag)
|
||||
except babel.UnknownLocaleError:
|
||||
print(f"ERROR: Mullvad-Leta language {language} ({code}) is unknown by babel")
|
||||
continue
|
||||
|
||||
sxng_tag = language_tag(locale)
|
||||
engine_traits.languages[sxng_tag] = code
|
||||
|
||||
country_table = tables[2]
|
||||
country_map = {
|
||||
"cn": "zh-CN",
|
||||
"hk": "zh-HK",
|
||||
"jp": "ja-JP",
|
||||
"my": "ms-MY",
|
||||
"tw": "zh-TW",
|
||||
"uk": "en-GB",
|
||||
"us": "en-US",
|
||||
}
|
||||
|
||||
for country, code in extract_table_data(country_table):
|
||||
|
||||
sxng_tag = country_map.get(code)
|
||||
if sxng_tag:
|
||||
engine_traits.regions[sxng_tag] = code
|
||||
continue
|
||||
|
||||
try:
|
||||
locale = babel.Locale.parse(f"{code.lower()}_{code.upper()}")
|
||||
except babel.UnknownLocaleError:
|
||||
locale = None
|
||||
|
||||
if locale:
|
||||
engine_traits.regions[region_tag(locale)] = code
|
||||
continue
|
||||
|
||||
official_locales = get_official_locales(code, engine_traits.languages.keys(), regional=True)
|
||||
if not official_locales:
|
||||
print(f"ERROR: Mullvad-Leta country '{code}' ({country}) could not be mapped as expected.")
|
||||
continue
|
||||
|
||||
for locale in official_locales:
|
||||
engine_traits.regions[region_tag(locale)] = code
|
||||
@@ -15,7 +15,7 @@ from searx.utils import (
|
||||
extr,
|
||||
html_to_text,
|
||||
parse_duration_string,
|
||||
js_variable_to_python,
|
||||
js_obj_str_to_python,
|
||||
get_embeded_stream_url,
|
||||
)
|
||||
|
||||
@@ -125,7 +125,7 @@ def parse_images(data):
|
||||
|
||||
match = extr(data, '<script>var imageSearchTabData=', '</script>')
|
||||
if match:
|
||||
json = js_variable_to_python(match.strip())
|
||||
json = js_obj_str_to_python(match.strip())
|
||||
items = json.get('content', {}).get('items', [])
|
||||
|
||||
for item in items:
|
||||
|
||||
@@ -55,15 +55,18 @@ def response(resp):
|
||||
if result['type'] == 'story':
|
||||
continue
|
||||
|
||||
main_image = result['images']['orig']
|
||||
results.append(
|
||||
{
|
||||
'template': 'images.html',
|
||||
'url': result['link'] or f"{base_url}/pin/{result['id']}/",
|
||||
'url': result.get('link') or f"{base_url}/pin/{result['id']}/",
|
||||
'title': result.get('title') or result.get('grid_title'),
|
||||
'content': (result.get('rich_summary') or {}).get('display_description') or "",
|
||||
'img_src': result['images']['orig']['url'],
|
||||
'img_src': main_image['url'],
|
||||
'thumbnail_src': result['images']['236x']['url'],
|
||||
'source': (result.get('rich_summary') or {}).get('site_name'),
|
||||
'resolution': f"{main_image['width']}x{main_image['height']}",
|
||||
'author': f"{result['pinner'].get('full_name')} ({result['pinner']['username']})",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -40,8 +40,8 @@ Known Quirks
|
||||
The implementation to support :py:obj:`paging <searx.enginelib.Engine.paging>`
|
||||
is based on the *nextpage* method of Piped's REST API / the :py:obj:`frontend
|
||||
API <frontend_url>`. This feature is *next page driven* and plays well with the
|
||||
:ref:`infinite_scroll <settings ui>` setting in SearXNG but it does not really
|
||||
fit into SearXNG's UI to select a page by number.
|
||||
:ref:`infinite_scroll <settings plugins>` plugin in SearXNG but it does not
|
||||
really fit into SearXNG's UI to select a page by number.
|
||||
|
||||
Implementations
|
||||
===============
|
||||
@@ -72,7 +72,7 @@ categories = []
|
||||
paging = True
|
||||
|
||||
# search-url
|
||||
backend_url: list[str] | str | None = None
|
||||
backend_url: list[str] | str = []
|
||||
"""Piped-Backend_: The core component behind Piped. The value is an URL or a
|
||||
list of URLs. In the latter case instance will be selected randomly. For a
|
||||
complete list of official instances see Piped-Instances (`JSON
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user