mirror of
https://github.com/searxng/searxng.git
synced 2025-12-23 04:00:02 +00:00
Compare commits
1 Commits
20de10df4e
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d991c94d43 |
6
.github/workflows/checker.yml
vendored
6
.github/workflows/checker.yml
vendored
@@ -15,7 +15,7 @@ permissions:
|
|||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
env:
|
env:
|
||||||
PYTHON_VERSION: "3.14"
|
PYTHON_VERSION: "3.13"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
search:
|
search:
|
||||||
@@ -24,12 +24,12 @@ jobs:
|
|||||||
runs-on: ubuntu-24.04-arm
|
runs-on: ubuntu-24.04-arm
|
||||||
steps:
|
steps:
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: "${{ env.PYTHON_VERSION }}"
|
python-version: "${{ env.PYTHON_VERSION }}"
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: "false"
|
persist-credentials: "false"
|
||||||
|
|
||||||
|
|||||||
135
.github/workflows/container.yml
vendored
135
.github/workflows/container.yml
vendored
@@ -18,34 +18,106 @@ concurrency:
|
|||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
# Organization GHCR
|
||||||
packages: read
|
packages: read
|
||||||
|
|
||||||
env:
|
env:
|
||||||
PYTHON_VERSION: "3.14"
|
PYTHON_VERSION: "3.13"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
build-base:
|
||||||
|
if: |
|
||||||
|
(github.repository_owner == 'searxng' && github.event.workflow_run.conclusion == 'success')
|
||||||
|
|| github.event_name == 'workflow_dispatch'
|
||||||
|
name: Build base
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
permissions:
|
||||||
|
# Organization GHCR
|
||||||
|
packages: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- if: github.repository_owner == 'searxng'
|
||||||
|
name: Checkout
|
||||||
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
with:
|
||||||
|
persist-credentials: "false"
|
||||||
|
|
||||||
|
- if: github.repository_owner == 'searxng'
|
||||||
|
name: Get date
|
||||||
|
id: date
|
||||||
|
run: echo "date=$(date +'%Y%m%d')" >>$GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- if: github.repository_owner == 'searxng'
|
||||||
|
name: Check cache apko
|
||||||
|
id: cache-apko
|
||||||
|
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||||
|
with:
|
||||||
|
# yamllint disable-line rule:line-length
|
||||||
|
key: "apko-${{ steps.date.outputs.date }}-${{ hashFiles('./container/base.yml', './container/base-builder.yml') }}"
|
||||||
|
path: "/tmp/.apko/"
|
||||||
|
lookup-only: true
|
||||||
|
|
||||||
|
- if: github.repository_owner == 'searxng' && steps.cache-apko.outputs.cache-hit != 'true'
|
||||||
|
name: Setup cache apko
|
||||||
|
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||||
|
with:
|
||||||
|
# yamllint disable-line rule:line-length
|
||||||
|
key: "apko-${{ steps.date.outputs.date }}-${{ hashFiles('./container/base.yml', './container/base-builder.yml') }}"
|
||||||
|
restore-keys: "apko-${{ steps.date.outputs.date }}-"
|
||||||
|
path: "/tmp/.apko/"
|
||||||
|
|
||||||
|
- if: github.repository_owner == 'searxng' && steps.cache-apko.outputs.cache-hit != 'true'
|
||||||
|
name: Setup apko
|
||||||
|
run: |
|
||||||
|
eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"
|
||||||
|
brew install apko
|
||||||
|
|
||||||
|
- if: github.repository_owner == 'searxng' && steps.cache-apko.outputs.cache-hit != 'true'
|
||||||
|
name: Login to GHCR
|
||||||
|
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||||
|
with:
|
||||||
|
registry: "ghcr.io"
|
||||||
|
username: "${{ github.repository_owner }}"
|
||||||
|
password: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
|
|
||||||
|
- if: github.repository_owner == 'searxng' && steps.cache-apko.outputs.cache-hit != 'true'
|
||||||
|
name: Build
|
||||||
|
run: |
|
||||||
|
eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"
|
||||||
|
|
||||||
|
apko publish ./container/base.yml ghcr.io/${{ github.repository_owner }}/base:searxng \
|
||||||
|
--cache-dir=/tmp/.apko/ \
|
||||||
|
--sbom=false \
|
||||||
|
--vcs=false \
|
||||||
|
--log-level=debug
|
||||||
|
|
||||||
|
apko publish ./container/base-builder.yml ghcr.io/${{ github.repository_owner }}/base:searxng-builder \
|
||||||
|
--cache-dir=/tmp/.apko/ \
|
||||||
|
--sbom=false \
|
||||||
|
--vcs=false \
|
||||||
|
--log-level=debug
|
||||||
|
|
||||||
build:
|
build:
|
||||||
if: github.repository_owner == 'searxng' || github.event_name == 'workflow_dispatch'
|
if: github.repository_owner == 'searxng' || github.event_name == 'workflow_dispatch'
|
||||||
name: Build (${{ matrix.arch }})
|
name: Build (${{ matrix.arch }})
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
needs: build-base
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- arch: amd64
|
- arch: amd64
|
||||||
march: amd64
|
|
||||||
os: ubuntu-24.04
|
os: ubuntu-24.04
|
||||||
emulation: false
|
emulation: false
|
||||||
- arch: arm64
|
- arch: arm64
|
||||||
march: arm64
|
|
||||||
os: ubuntu-24.04-arm
|
os: ubuntu-24.04-arm
|
||||||
emulation: false
|
emulation: false
|
||||||
- arch: armv7
|
- arch: armv7
|
||||||
march: arm64
|
|
||||||
os: ubuntu-24.04-arm
|
os: ubuntu-24.04-arm
|
||||||
emulation: true
|
emulation: true
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
|
# Organization GHCR
|
||||||
packages: write
|
packages: write
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
@@ -53,37 +125,13 @@ jobs:
|
|||||||
git_url: ${{ steps.build.outputs.git_url }}
|
git_url: ${{ steps.build.outputs.git_url }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
# yamllint disable rule:line-length
|
|
||||||
- name: Setup podman
|
|
||||||
env:
|
|
||||||
PODMAN_VERSION: "v5.6.2"
|
|
||||||
run: |
|
|
||||||
# dpkg man-db trigger is very slow on GHA runners
|
|
||||||
# https://github.com/actions/runner-images/issues/10977
|
|
||||||
# https://github.com/actions/runner/issues/4030
|
|
||||||
sudo rm -f /var/lib/man-db/auto-update
|
|
||||||
|
|
||||||
sudo apt-get purge -y podman runc crun conmon
|
|
||||||
|
|
||||||
curl -fsSLO "https://github.com/mgoltzsche/podman-static/releases/download/${{ env.PODMAN_VERSION }}/podman-linux-${{ matrix.march }}.tar.gz"
|
|
||||||
curl -fsSLO "https://github.com/mgoltzsche/podman-static/releases/download/${{ env.PODMAN_VERSION }}/podman-linux-${{ matrix.march }}.tar.gz.asc"
|
|
||||||
gpg --keyserver hkps://keyserver.ubuntu.com --recv-keys 0CCF102C4F95D89E583FF1D4F8B5AF50344BB503
|
|
||||||
gpg --batch --verify "podman-linux-${{ matrix.march }}.tar.gz.asc" "podman-linux-${{ matrix.march }}.tar.gz"
|
|
||||||
|
|
||||||
tar -xzf "podman-linux-${{ matrix.march }}.tar.gz"
|
|
||||||
sudo cp -rfv ./podman-linux-${{ matrix.march }}/etc/. /etc/
|
|
||||||
sudo cp -rfv ./podman-linux-${{ matrix.march }}/usr/. /usr/
|
|
||||||
|
|
||||||
sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0
|
|
||||||
# yamllint enable rule:line-length
|
|
||||||
|
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: "${{ env.PYTHON_VERSION }}"
|
python-version: "${{ env.PYTHON_VERSION }}"
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: "false"
|
persist-credentials: "false"
|
||||||
fetch-depth: "0"
|
fetch-depth: "0"
|
||||||
@@ -95,22 +143,16 @@ jobs:
|
|||||||
restore-keys: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-"
|
restore-keys: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-"
|
||||||
path: "./local/"
|
path: "./local/"
|
||||||
|
|
||||||
- name: Get date
|
- name: Setup cache container uv
|
||||||
id: date
|
|
||||||
run: echo "date=$(date +'%Y%m%d')" >>$GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Setup cache container
|
|
||||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||||
with:
|
with:
|
||||||
key: "container-${{ matrix.arch }}-${{ steps.date.outputs.date }}-${{ hashFiles('./requirements*.txt') }}"
|
key: "container-uv-${{ matrix.arch }}-${{ hashFiles('./requirements*.txt') }}"
|
||||||
restore-keys: |
|
restore-keys: "container-uv-${{ matrix.arch }}-"
|
||||||
"container-${{ matrix.arch }}-${{ steps.date.outputs.date }}-"
|
path: "/var/tmp/buildah-cache-1001/uv/"
|
||||||
"container-${{ matrix.arch }}-"
|
|
||||||
path: "/var/tmp/buildah-cache-*/*"
|
|
||||||
|
|
||||||
- if: ${{ matrix.emulation }}
|
- if: ${{ matrix.emulation }}
|
||||||
name: Setup QEMU
|
name: Setup QEMU
|
||||||
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
|
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||||
|
|
||||||
- name: Login to GHCR
|
- name: Login to GHCR
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||||
@@ -145,13 +187,13 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: "false"
|
persist-credentials: "false"
|
||||||
|
|
||||||
- if: ${{ matrix.emulation }}
|
- if: ${{ matrix.emulation }}
|
||||||
name: Setup QEMU
|
name: Setup QEMU
|
||||||
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
|
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||||
|
|
||||||
- name: Login to GHCR
|
- name: Login to GHCR
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||||
@@ -175,11 +217,12 @@ jobs:
|
|||||||
- test
|
- test
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
|
# Organization GHCR
|
||||||
packages: write
|
packages: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: "false"
|
persist-credentials: "false"
|
||||||
|
|
||||||
@@ -194,8 +237,8 @@ jobs:
|
|||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||||
with:
|
with:
|
||||||
registry: "docker.io"
|
registry: "docker.io"
|
||||||
username: "${{ secrets.DOCKER_USER }}"
|
username: "${{ secrets.DOCKERHUB_USERNAME }}"
|
||||||
password: "${{ secrets.DOCKER_TOKEN }}"
|
password: "${{ secrets.DOCKERHUB_TOKEN }}"
|
||||||
|
|
||||||
- name: Release
|
- name: Release
|
||||||
env:
|
env:
|
||||||
|
|||||||
8
.github/workflows/data-update.yml
vendored
8
.github/workflows/data-update.yml
vendored
@@ -15,7 +15,7 @@ permissions:
|
|||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
env:
|
env:
|
||||||
PYTHON_VERSION: "3.14"
|
PYTHON_VERSION: "3.13"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
data:
|
data:
|
||||||
@@ -40,12 +40,12 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: "${{ env.PYTHON_VERSION }}"
|
python-version: "${{ env.PYTHON_VERSION }}"
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: "false"
|
persist-credentials: "false"
|
||||||
|
|
||||||
@@ -64,7 +64,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Create PR
|
- name: Create PR
|
||||||
id: cpr
|
id: cpr
|
||||||
uses: peter-evans/create-pull-request@84ae59a2cdc2258d6fa0732dd66352dddae2a412 # v7.0.9
|
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||||
with:
|
with:
|
||||||
author: "searxng-bot <searxng-bot@users.noreply.github.com>"
|
author: "searxng-bot <searxng-bot@users.noreply.github.com>"
|
||||||
committer: "searxng-bot <searxng-bot@users.noreply.github.com>"
|
committer: "searxng-bot <searxng-bot@users.noreply.github.com>"
|
||||||
|
|||||||
8
.github/workflows/documentation.yml
vendored
8
.github/workflows/documentation.yml
vendored
@@ -19,7 +19,7 @@ permissions:
|
|||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
env:
|
env:
|
||||||
PYTHON_VERSION: "3.14"
|
PYTHON_VERSION: "3.13"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
release:
|
release:
|
||||||
@@ -32,12 +32,12 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: "${{ env.PYTHON_VERSION }}"
|
python-version: "${{ env.PYTHON_VERSION }}"
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: "false"
|
persist-credentials: "false"
|
||||||
fetch-depth: "0"
|
fetch-depth: "0"
|
||||||
@@ -57,7 +57,7 @@ jobs:
|
|||||||
|
|
||||||
- if: github.ref_name == 'master'
|
- if: github.ref_name == 'master'
|
||||||
name: Release
|
name: Release
|
||||||
uses: JamesIves/github-pages-deploy-action@4a3abc783e1a24aeb44c16e869ad83caf6b4cc23 # v4.7.4
|
uses: JamesIves/github-pages-deploy-action@6c2d9db40f9296374acc17b90404b6e8864128c8 # v4.7.3
|
||||||
with:
|
with:
|
||||||
folder: "dist/docs"
|
folder: "dist/docs"
|
||||||
branch: "gh-pages"
|
branch: "gh-pages"
|
||||||
|
|||||||
12
.github/workflows/integration.yml
vendored
12
.github/workflows/integration.yml
vendored
@@ -18,7 +18,7 @@ permissions:
|
|||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
env:
|
env:
|
||||||
PYTHON_VERSION: "3.14"
|
PYTHON_VERSION: "3.13"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
@@ -35,12 +35,12 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: "${{ matrix.python-version }}"
|
python-version: "${{ matrix.python-version }}"
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: "false"
|
persist-credentials: "false"
|
||||||
|
|
||||||
@@ -62,17 +62,17 @@ jobs:
|
|||||||
runs-on: ubuntu-24.04-arm
|
runs-on: ubuntu-24.04-arm
|
||||||
steps:
|
steps:
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: "${{ env.PYTHON_VERSION }}"
|
python-version: "${{ env.PYTHON_VERSION }}"
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: "false"
|
persist-credentials: "false"
|
||||||
|
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||||
with:
|
with:
|
||||||
node-version-file: "./.nvmrc"
|
node-version-file: "./.nvmrc"
|
||||||
|
|
||||||
|
|||||||
12
.github/workflows/l10n.yml
vendored
12
.github/workflows/l10n.yml
vendored
@@ -22,7 +22,7 @@ permissions:
|
|||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
env:
|
env:
|
||||||
PYTHON_VERSION: "3.14"
|
PYTHON_VERSION: "3.13"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
update:
|
update:
|
||||||
@@ -35,12 +35,12 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: "${{ env.PYTHON_VERSION }}"
|
python-version: "${{ env.PYTHON_VERSION }}"
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
token: "${{ secrets.WEBLATE_GITHUB_TOKEN }}"
|
token: "${{ secrets.WEBLATE_GITHUB_TOKEN }}"
|
||||||
fetch-depth: "0"
|
fetch-depth: "0"
|
||||||
@@ -82,12 +82,12 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: "${{ env.PYTHON_VERSION }}"
|
python-version: "${{ env.PYTHON_VERSION }}"
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
token: "${{ secrets.WEBLATE_GITHUB_TOKEN }}"
|
token: "${{ secrets.WEBLATE_GITHUB_TOKEN }}"
|
||||||
fetch-depth: "0"
|
fetch-depth: "0"
|
||||||
@@ -117,7 +117,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Create PR
|
- name: Create PR
|
||||||
id: cpr
|
id: cpr
|
||||||
uses: peter-evans/create-pull-request@84ae59a2cdc2258d6fa0732dd66352dddae2a412 # v7.0.9
|
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||||
with:
|
with:
|
||||||
author: "searxng-bot <searxng-bot@users.noreply.github.com>"
|
author: "searxng-bot <searxng-bot@users.noreply.github.com>"
|
||||||
committer: "searxng-bot <searxng-bot@users.noreply.github.com>"
|
committer: "searxng-bot <searxng-bot@users.noreply.github.com>"
|
||||||
|
|||||||
8
.github/workflows/security.yml
vendored
8
.github/workflows/security.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: "false"
|
persist-credentials: "false"
|
||||||
|
|
||||||
@@ -32,8 +32,8 @@ jobs:
|
|||||||
uses: docker/scout-action@f8c776824083494ab0d56b8105ba2ca85c86e4de # v1.18.2
|
uses: docker/scout-action@f8c776824083494ab0d56b8105ba2ca85c86e4de # v1.18.2
|
||||||
with:
|
with:
|
||||||
organization: "searxng"
|
organization: "searxng"
|
||||||
dockerhub-user: "${{ secrets.DOCKER_USER }}"
|
dockerhub-user: "${{ secrets.DOCKERHUB_USERNAME }}"
|
||||||
dockerhub-password: "${{ secrets.DOCKER_TOKEN }}"
|
dockerhub-password: "${{ secrets.DOCKERHUB_TOKEN }}"
|
||||||
image: "registry://ghcr.io/searxng/searxng:latest"
|
image: "registry://ghcr.io/searxng/searxng:latest"
|
||||||
command: "cves"
|
command: "cves"
|
||||||
sarif-file: "./scout.sarif"
|
sarif-file: "./scout.sarif"
|
||||||
@@ -41,6 +41,6 @@ jobs:
|
|||||||
write-comment: "false"
|
write-comment: "false"
|
||||||
|
|
||||||
- name: Upload SARIFs
|
- name: Upload SARIFs
|
||||||
uses: github/codeql-action/upload-sarif@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
uses: github/codeql-action/upload-sarif@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
|
||||||
with:
|
with:
|
||||||
sarif_file: "./scout.sarif"
|
sarif_file: "./scout.sarif"
|
||||||
|
|||||||
@@ -162,7 +162,7 @@ no-docstring-rgx=^_
|
|||||||
property-classes=abc.abstractproperty
|
property-classes=abc.abstractproperty
|
||||||
|
|
||||||
# Regular expression matching correct variable names
|
# Regular expression matching correct variable names
|
||||||
variable-rgx=([a-zA-Z0-9_]*)$
|
variable-rgx=(([a-z][a-zA-Z0-9_]{2,30})|(_[a-z0-9_]*)|([a-z]))$
|
||||||
|
|
||||||
|
|
||||||
[FORMAT]
|
[FORMAT]
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
{
|
{
|
||||||
"$schema": "https://biomejs.dev/schemas/2.3.8/schema.json",
|
"$schema": "https://biomejs.dev/schemas/2.2.5/schema.json",
|
||||||
"files": {
|
"files": {
|
||||||
"ignoreUnknown": true,
|
"ignoreUnknown": true,
|
||||||
"includes": ["**", "!node_modules"]
|
"includes": ["**", "!dist", "!node_modules"]
|
||||||
},
|
},
|
||||||
"assist": {
|
"assist": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
@@ -15,9 +15,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"formatter": {
|
"formatter": {
|
||||||
"enabled": true,
|
|
||||||
"bracketSameLine": false,
|
"bracketSameLine": false,
|
||||||
"bracketSpacing": true,
|
"bracketSpacing": true,
|
||||||
|
"enabled": true,
|
||||||
"formatWithErrors": false,
|
"formatWithErrors": false,
|
||||||
"indentStyle": "space",
|
"indentStyle": "space",
|
||||||
"indentWidth": 2,
|
"indentWidth": 2,
|
||||||
@@ -35,26 +35,24 @@
|
|||||||
},
|
},
|
||||||
"correctness": {
|
"correctness": {
|
||||||
"noGlobalDirnameFilename": "error",
|
"noGlobalDirnameFilename": "error",
|
||||||
|
"noUndeclaredVariables": {
|
||||||
|
"level": "error",
|
||||||
|
"options": {
|
||||||
|
"checkTypes": true
|
||||||
|
}
|
||||||
|
},
|
||||||
"useImportExtensions": "error",
|
"useImportExtensions": "error",
|
||||||
"useJsonImportAttributes": "error",
|
"useJsonImportAttributes": "error",
|
||||||
"useSingleJsDocAsterisk": "error"
|
"useSingleJsDocAsterisk": "error"
|
||||||
},
|
},
|
||||||
"nursery": {
|
"nursery": {
|
||||||
"noContinue": "warn",
|
|
||||||
"noDeprecatedImports": "warn",
|
"noDeprecatedImports": "warn",
|
||||||
"noEqualsToNull": "warn",
|
|
||||||
"noFloatingPromises": "warn",
|
|
||||||
"noForIn": "warn",
|
|
||||||
"noImportCycles": "warn",
|
"noImportCycles": "warn",
|
||||||
"noIncrementDecrement": "warn",
|
|
||||||
"noMisusedPromises": "warn",
|
"noMisusedPromises": "warn",
|
||||||
"noMultiStr": "warn",
|
|
||||||
"noParametersOnlyUsedInRecursion": "warn",
|
|
||||||
"noUselessCatchBinding": "warn",
|
"noUselessCatchBinding": "warn",
|
||||||
"noUselessUndefined": "warn",
|
"noUselessUndefined": "warn",
|
||||||
"useExhaustiveSwitchCases": "warn",
|
"useExhaustiveSwitchCases": "warn",
|
||||||
"useExplicitType": "warn",
|
"useExplicitType": "warn"
|
||||||
"useFind": "warn"
|
|
||||||
},
|
},
|
||||||
"performance": {
|
"performance": {
|
||||||
"noAwaitInLoops": "error",
|
"noAwaitInLoops": "error",
|
||||||
@@ -67,7 +65,6 @@
|
|||||||
"style": {
|
"style": {
|
||||||
"noCommonJs": "error",
|
"noCommonJs": "error",
|
||||||
"noEnum": "error",
|
"noEnum": "error",
|
||||||
"noImplicitBoolean": "error",
|
|
||||||
"noInferrableTypes": "error",
|
"noInferrableTypes": "error",
|
||||||
"noNamespace": "error",
|
"noNamespace": "error",
|
||||||
"noNegationElse": "error",
|
"noNegationElse": "error",
|
||||||
@@ -112,12 +109,6 @@
|
|||||||
"syntax": "explicit"
|
"syntax": "explicit"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"useConsistentTypeDefinitions": {
|
|
||||||
"level": "error",
|
|
||||||
"options": {
|
|
||||||
"style": "type"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"useDefaultSwitchClause": "error",
|
"useDefaultSwitchClause": "error",
|
||||||
"useExplicitLengthCheck": "error",
|
"useExplicitLengthCheck": "error",
|
||||||
"useForOf": "error",
|
"useForOf": "error",
|
||||||
@@ -126,7 +117,6 @@
|
|||||||
"useNumericSeparators": "error",
|
"useNumericSeparators": "error",
|
||||||
"useObjectSpread": "error",
|
"useObjectSpread": "error",
|
||||||
"useReadonlyClassProperties": "error",
|
"useReadonlyClassProperties": "error",
|
||||||
"useSelfClosingElements": "error",
|
|
||||||
"useShorthandAssign": "error",
|
"useShorthandAssign": "error",
|
||||||
"useSingleVarDeclarator": "error",
|
"useSingleVarDeclarator": "error",
|
||||||
"useThrowNewError": "error",
|
"useThrowNewError": "error",
|
||||||
|
|||||||
803
client/simple/package-lock.json
generated
803
client/simple/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -25,27 +25,27 @@
|
|||||||
"not dead"
|
"not dead"
|
||||||
],
|
],
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"ionicons": "~8.0.13",
|
"ionicons": "~8.0.0",
|
||||||
"normalize.css": "8.0.1",
|
"normalize.css": "8.0.1",
|
||||||
"ol": "~10.7.0",
|
"ol": "~10.6.0",
|
||||||
"swiped-events": "1.2.0"
|
"swiped-events": "1.2.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@biomejs/biome": "2.3.8",
|
"@biomejs/biome": "2.2.5",
|
||||||
"@types/node": "~24.10.1",
|
"@types/node": "~24.6.2",
|
||||||
"browserslist": "~4.28.0",
|
"browserslist": "~4.26.3",
|
||||||
"browserslist-to-esbuild": "~2.1.1",
|
"browserslist-to-esbuild": "~2.1.0",
|
||||||
"edge.js": "~6.3.0",
|
"edge.js": "~6.3.0",
|
||||||
"less": "~4.4.2",
|
"less": "~4.4.1",
|
||||||
"lightningcss": "~1.30.2",
|
"lightningcss": "~1.30.2",
|
||||||
"sharp": "~0.34.5",
|
"sharp": "~0.34.4",
|
||||||
"sort-package-json": "~3.5.0",
|
"sort-package-json": "~3.4.0",
|
||||||
"stylelint": "~16.26.0",
|
"stylelint": "~16.24.0",
|
||||||
"stylelint-config-standard-less": "~3.0.1",
|
"stylelint-config-standard-less": "~3.0.0",
|
||||||
"stylelint-prettier": "~5.0.3",
|
"stylelint-prettier": "~5.0.0",
|
||||||
"svgo": "~4.0.0",
|
"svgo": "~4.0.0",
|
||||||
"typescript": "~5.9.3",
|
"typescript": "~5.9.3",
|
||||||
"vite": "npm:rolldown-vite@7.2.7",
|
"vite": "npm:rolldown-vite@7.1.15",
|
||||||
"vite-bundle-analyzer": "~1.2.3"
|
"vite-bundle-analyzer": "~1.2.3"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,11 +4,11 @@ import { Endpoints, endpoint, ready, settings } from "./toolkit.ts";
|
|||||||
|
|
||||||
ready(
|
ready(
|
||||||
() => {
|
() => {
|
||||||
void import("../main/keyboard.ts");
|
import("../main/keyboard.ts");
|
||||||
void import("../main/search.ts");
|
import("../main/search.ts");
|
||||||
|
|
||||||
if (settings.autocomplete) {
|
if (settings.autocomplete) {
|
||||||
void import("../main/autocomplete.ts");
|
import("../main/autocomplete.ts");
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{ on: [endpoint === Endpoints.index] }
|
{ on: [endpoint === Endpoints.index] }
|
||||||
@@ -16,17 +16,17 @@ ready(
|
|||||||
|
|
||||||
ready(
|
ready(
|
||||||
() => {
|
() => {
|
||||||
void import("../main/keyboard.ts");
|
import("../main/keyboard.ts");
|
||||||
void import("../main/mapresult.ts");
|
import("../main/mapresult.ts");
|
||||||
void import("../main/results.ts");
|
import("../main/results.ts");
|
||||||
void import("../main/search.ts");
|
import("../main/search.ts");
|
||||||
|
|
||||||
if (settings.infinite_scroll) {
|
if (settings.infinite_scroll) {
|
||||||
void import("../main/infinite_scroll.ts");
|
import("../main/infinite_scroll.ts");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (settings.autocomplete) {
|
if (settings.autocomplete) {
|
||||||
void import("../main/autocomplete.ts");
|
import("../main/autocomplete.ts");
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{ on: [endpoint === Endpoints.results] }
|
{ on: [endpoint === Endpoints.results] }
|
||||||
@@ -34,7 +34,7 @@ ready(
|
|||||||
|
|
||||||
ready(
|
ready(
|
||||||
() => {
|
() => {
|
||||||
void import("../main/preferences.ts");
|
import("../main/preferences.ts");
|
||||||
},
|
},
|
||||||
{ on: [endpoint === Endpoints.preferences] }
|
{ on: [endpoint === Endpoints.preferences] }
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -83,7 +83,7 @@ const observer: IntersectionObserver = new IntersectionObserver((entries: Inters
|
|||||||
if (paginationEntry?.isIntersecting) {
|
if (paginationEntry?.isIntersecting) {
|
||||||
observer.unobserve(paginationEntry.target);
|
observer.unobserve(paginationEntry.target);
|
||||||
|
|
||||||
void loadNextPage(onlyImages, () => {
|
loadNextPage(onlyImages, () => {
|
||||||
const nextObservedElement = document.querySelector<HTMLElement>(observedSelector);
|
const nextObservedElement = document.querySelector<HTMLElement>(observedSelector);
|
||||||
if (nextObservedElement) {
|
if (nextObservedElement) {
|
||||||
observer.observe(nextObservedElement);
|
observer.observe(nextObservedElement);
|
||||||
|
|||||||
@@ -407,31 +407,12 @@ const toggleHelp = (keyBindings: typeof baseKeyBinding): void => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const copyURLToClipboard = async (): Promise<void> => {
|
const copyURLToClipboard = async (): Promise<void> => {
|
||||||
const selectedResult = document.querySelector<HTMLElement>(".result[data-vim-selected]");
|
const currentUrlElement = document.querySelector<HTMLAnchorElement>(".result[data-vim-selected] h3 a");
|
||||||
if (!selectedResult) return;
|
assertElement(currentUrlElement);
|
||||||
|
|
||||||
const resultAnchor = selectedResult.querySelector<HTMLAnchorElement>("a");
|
const url = currentUrlElement.getAttribute("href");
|
||||||
assertElement(resultAnchor);
|
|
||||||
|
|
||||||
const url = resultAnchor.getAttribute("href");
|
|
||||||
if (url) {
|
if (url) {
|
||||||
if (window.isSecureContext) {
|
await navigator.clipboard.writeText(url);
|
||||||
await navigator.clipboard.writeText(url);
|
|
||||||
} else {
|
|
||||||
const selection = window.getSelection();
|
|
||||||
if (selection) {
|
|
||||||
const node = document.createElement("span");
|
|
||||||
node.textContent = url;
|
|
||||||
resultAnchor.appendChild(node);
|
|
||||||
|
|
||||||
const range = document.createRange();
|
|
||||||
range.selectNodeContents(node);
|
|
||||||
selection.removeAllRanges();
|
|
||||||
selection.addRange(range);
|
|
||||||
document.execCommand("copy");
|
|
||||||
node.remove();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ listen("click", ".searxng_init_map", async function (this: HTMLElement, event: E
|
|||||||
Feature,
|
Feature,
|
||||||
Point
|
Point
|
||||||
} = await import("../pkg/ol.ts");
|
} = await import("../pkg/ol.ts");
|
||||||
void import("ol/ol.css");
|
import("ol/ol.css");
|
||||||
|
|
||||||
const { leafletTarget: target, mapLon, mapLat, mapGeojson } = this.dataset;
|
const { leafletTarget: target, mapLon, mapLat, mapGeojson } = this.dataset;
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
|
||||||
import { assertElement, http, listen, settings } from "../core/toolkit.ts";
|
import { http, listen, settings } from "../core/toolkit.ts";
|
||||||
|
|
||||||
let engineDescriptions: Record<string, [string, string]> | undefined;
|
let engineDescriptions: Record<string, [string, string]> | undefined;
|
||||||
|
|
||||||
@@ -52,25 +52,19 @@ for (const engine of disableAllEngines) {
|
|||||||
listen("click", engine, () => toggleEngines(false, engineToggles));
|
listen("click", engine, () => toggleEngines(false, engineToggles));
|
||||||
}
|
}
|
||||||
|
|
||||||
listen("click", "#copy-hash", async function (this: HTMLElement) {
|
const copyHashButton: HTMLElement | null = document.querySelector<HTMLElement>("#copy-hash");
|
||||||
const target = this.parentElement?.querySelector<HTMLPreElement>("pre");
|
if (copyHashButton) {
|
||||||
assertElement(target);
|
listen("click", copyHashButton, async (event: Event) => {
|
||||||
|
event.preventDefault();
|
||||||
|
|
||||||
if (window.isSecureContext) {
|
const { copiedText, hash } = copyHashButton.dataset;
|
||||||
await navigator.clipboard.writeText(target.innerText);
|
if (!(copiedText && hash)) return;
|
||||||
} else {
|
|
||||||
const selection = window.getSelection();
|
try {
|
||||||
if (selection) {
|
await navigator.clipboard.writeText(hash);
|
||||||
const range = document.createRange();
|
copyHashButton.innerText = copiedText;
|
||||||
range.selectNodeContents(target);
|
} catch (error) {
|
||||||
selection.removeAllRanges();
|
console.error("Failed to copy hash:", error);
|
||||||
selection.addRange(range);
|
|
||||||
document.execCommand("copy");
|
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
|
}
|
||||||
const copiedText = this.dataset.copiedText;
|
|
||||||
if (copiedText) {
|
|
||||||
this.innerText = copiedText;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|||||||
@@ -121,19 +121,7 @@ listen("click", "#copy_url", async function (this: HTMLElement) {
|
|||||||
const target = this.parentElement?.querySelector<HTMLPreElement>("pre");
|
const target = this.parentElement?.querySelector<HTMLPreElement>("pre");
|
||||||
assertElement(target);
|
assertElement(target);
|
||||||
|
|
||||||
if (window.isSecureContext) {
|
await navigator.clipboard.writeText(target.innerText);
|
||||||
await navigator.clipboard.writeText(target.innerText);
|
|
||||||
} else {
|
|
||||||
const selection = window.getSelection();
|
|
||||||
if (selection) {
|
|
||||||
const range = document.createRange();
|
|
||||||
range.selectNodeContents(target);
|
|
||||||
selection.removeAllRanges();
|
|
||||||
selection.addRange(range);
|
|
||||||
document.execCommand("copy");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const copiedText = this.dataset.copiedText;
|
const copiedText = this.dataset.copiedText;
|
||||||
if (copiedText) {
|
if (copiedText) {
|
||||||
this.innerText = copiedText;
|
this.innerText = copiedText;
|
||||||
|
|||||||
@@ -1,22 +0,0 @@
|
|||||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
|
|
||||||
/*
|
|
||||||
Layout of the Files result class
|
|
||||||
*/
|
|
||||||
|
|
||||||
#main_results .result-file {
|
|
||||||
border: 1px solid var(--color-result-border);
|
|
||||||
margin: 0 @results-tablet-offset 1rem @results-tablet-offset !important;
|
|
||||||
.rounded-corners;
|
|
||||||
|
|
||||||
video {
|
|
||||||
width: 100%;
|
|
||||||
aspect-ratio: 16 / 9;
|
|
||||||
padding: 10px 0 0 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
audio {
|
|
||||||
width: 100%;
|
|
||||||
padding: 10px 0 0 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -178,6 +178,7 @@ html.no-js #clear_search.hide_if_nojs {
|
|||||||
#send_search {
|
#send_search {
|
||||||
display: block;
|
display: block;
|
||||||
margin: 0;
|
margin: 0;
|
||||||
|
padding: 0.8rem;
|
||||||
background: none repeat scroll 0 0 var(--color-search-background);
|
background: none repeat scroll 0 0 var(--color-search-background);
|
||||||
border: none;
|
border: none;
|
||||||
outline: none;
|
outline: none;
|
||||||
@@ -195,7 +196,6 @@ html.no-js #clear_search.hide_if_nojs {
|
|||||||
|
|
||||||
#send_search {
|
#send_search {
|
||||||
.ltr-rounded-right-corners(0.8rem);
|
.ltr-rounded-right-corners(0.8rem);
|
||||||
padding: 0.8rem;
|
|
||||||
|
|
||||||
&:hover {
|
&:hover {
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
|
|||||||
@@ -163,22 +163,12 @@ article[data-vim-selected].category-videos,
|
|||||||
article[data-vim-selected].category-news,
|
article[data-vim-selected].category-news,
|
||||||
article[data-vim-selected].category-map,
|
article[data-vim-selected].category-map,
|
||||||
article[data-vim-selected].category-music,
|
article[data-vim-selected].category-music,
|
||||||
|
article[data-vim-selected].category-files,
|
||||||
article[data-vim-selected].category-social {
|
article[data-vim-selected].category-social {
|
||||||
border: 1px solid var(--color-result-vim-arrow);
|
border: 1px solid var(--color-result-vim-arrow);
|
||||||
.rounded-corners;
|
.rounded-corners;
|
||||||
}
|
}
|
||||||
|
|
||||||
.image-label-bottom-right() {
|
|
||||||
position: absolute;
|
|
||||||
right: 0;
|
|
||||||
bottom: 0;
|
|
||||||
background: var(--color-image-resolution-background);
|
|
||||||
padding: 0.3rem 0.5rem;
|
|
||||||
font-size: 0.9rem;
|
|
||||||
color: var(--color-image-resolution-font);
|
|
||||||
border-top-left-radius: 0.3rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.result {
|
.result {
|
||||||
margin: @results-margin 0;
|
margin: @results-margin 0;
|
||||||
padding: @result-padding;
|
padding: @result-padding;
|
||||||
@@ -305,22 +295,12 @@ article[data-vim-selected].category-social {
|
|||||||
color: var(--color-result-description-highlight-font);
|
color: var(--color-result-description-highlight-font);
|
||||||
}
|
}
|
||||||
|
|
||||||
a.thumbnail_link {
|
img.thumbnail {
|
||||||
position: relative;
|
|
||||||
margin-top: 0.6rem;
|
|
||||||
.ltr-margin-right(1rem);
|
|
||||||
.ltr-float-left();
|
.ltr-float-left();
|
||||||
|
padding-top: 0.6rem;
|
||||||
img.thumbnail {
|
.ltr-padding-right(1rem);
|
||||||
width: 7rem;
|
width: 7rem;
|
||||||
height: unset; // remove height value that was needed for lazy loading
|
height: unset; // remove height value that was needed for lazy loading
|
||||||
display: block;
|
|
||||||
}
|
|
||||||
|
|
||||||
.thumbnail_length {
|
|
||||||
.image-label-bottom-right();
|
|
||||||
right: 6px;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.break {
|
.break {
|
||||||
@@ -386,6 +366,7 @@ article[data-vim-selected].category-social {
|
|||||||
.category-news,
|
.category-news,
|
||||||
.category-map,
|
.category-map,
|
||||||
.category-music,
|
.category-music,
|
||||||
|
.category-files,
|
||||||
.category-social {
|
.category-social {
|
||||||
border: 1px solid var(--color-result-border);
|
border: 1px solid var(--color-result-border);
|
||||||
margin: 0 @results-tablet-offset 1rem @results-tablet-offset !important;
|
margin: 0 @results-tablet-offset 1rem @results-tablet-offset !important;
|
||||||
@@ -410,19 +391,23 @@ article[data-vim-selected].category-social {
|
|||||||
}
|
}
|
||||||
|
|
||||||
.result-videos {
|
.result-videos {
|
||||||
a.thumbnail_link img.thumbnail {
|
img.thumbnail {
|
||||||
|
.ltr-float-left();
|
||||||
|
padding-top: 0.6rem;
|
||||||
|
.ltr-padding-right(1rem);
|
||||||
width: 20rem;
|
width: 20rem;
|
||||||
|
height: unset; // remove height value that was needed for lazy loading
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
.content {
|
.result-videos .content {
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
}
|
}
|
||||||
|
|
||||||
.embedded-video iframe {
|
.result-videos .embedded-video iframe {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
aspect-ratio: 16 / 9;
|
aspect-ratio: 16 / 9;
|
||||||
padding: 10px 0 0 0;
|
padding: 10px 0 0 0;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@supports not (aspect-ratio: 1 / 1) {
|
@supports not (aspect-ratio: 1 / 1) {
|
||||||
@@ -487,7 +472,14 @@ article[data-vim-selected].category-social {
|
|||||||
}
|
}
|
||||||
|
|
||||||
.image_resolution {
|
.image_resolution {
|
||||||
.image-label-bottom-right();
|
position: absolute;
|
||||||
|
right: 0;
|
||||||
|
bottom: 0;
|
||||||
|
background: var(--color-image-resolution-background);
|
||||||
|
padding: 0.3rem 0.5rem;
|
||||||
|
font-size: 0.9rem;
|
||||||
|
color: var(--color-image-resolution-font);
|
||||||
|
border-top-left-radius: 0.3rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
span.title,
|
span.title,
|
||||||
@@ -1166,4 +1158,3 @@ pre code {
|
|||||||
@import "result_types/keyvalue.less";
|
@import "result_types/keyvalue.less";
|
||||||
@import "result_types/code.less";
|
@import "result_types/code.less";
|
||||||
@import "result_types/paper.less";
|
@import "result_types/paper.less";
|
||||||
@import "result_types/file.less";
|
|
||||||
|
|||||||
@@ -193,15 +193,6 @@ div.selectable_url {
|
|||||||
border-color: var(--color-warning);
|
border-color: var(--color-warning);
|
||||||
}
|
}
|
||||||
|
|
||||||
.dialog-warning-block {
|
|
||||||
.dialog();
|
|
||||||
|
|
||||||
display: block;
|
|
||||||
color: var(--color-warning);
|
|
||||||
background: var(--color-warning-background);
|
|
||||||
border-color: var(--color-warning);
|
|
||||||
}
|
|
||||||
|
|
||||||
.dialog-modal {
|
.dialog-modal {
|
||||||
.dialog();
|
.dialog();
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
* Custom vite plugins to build the web-client components of the simple theme.
|
* Custom vite plugins to build the web-client components of the simple theme.
|
||||||
*
|
*
|
||||||
* HINT:
|
* HINT:
|
||||||
* This is an initial implementation for the migration of the build process
|
* This is an inital implementation for the migration of the build process
|
||||||
* from grunt to vite. For fully support (vite: build & serve) more work is
|
* from grunt to vite. For fully support (vite: build & serve) more work is
|
||||||
* needed.
|
* needed.
|
||||||
*/
|
*/
|
||||||
|
|||||||
26
container/base-builder.yml
Normal file
26
container/base-builder.yml
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
contents:
|
||||||
|
repositories:
|
||||||
|
- https://dl-cdn.alpinelinux.org/alpine/edge/main
|
||||||
|
- https://dl-cdn.alpinelinux.org/alpine/edge/community
|
||||||
|
packages:
|
||||||
|
- alpine-base
|
||||||
|
- build-base
|
||||||
|
- python3-dev
|
||||||
|
- uv
|
||||||
|
- brotli
|
||||||
|
|
||||||
|
entrypoint:
|
||||||
|
command: /bin/sh -l
|
||||||
|
|
||||||
|
work-dir: /usr/local/searxng/
|
||||||
|
|
||||||
|
environment:
|
||||||
|
PATH: /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||||
|
SSL_CERT_DIR: /etc/ssl/certs
|
||||||
|
SSL_CERT_FILE: /etc/ssl/certs/ca-certificates.crt
|
||||||
|
HISTFILE: /dev/null
|
||||||
|
|
||||||
|
archs:
|
||||||
|
- x86_64
|
||||||
|
- aarch64
|
||||||
|
- armv7
|
||||||
62
container/base.yml
Normal file
62
container/base.yml
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
contents:
|
||||||
|
repositories:
|
||||||
|
- https://dl-cdn.alpinelinux.org/alpine/edge/main
|
||||||
|
packages:
|
||||||
|
- alpine-baselayout
|
||||||
|
- ca-certificates
|
||||||
|
- ca-certificates-bundle
|
||||||
|
- musl-locales
|
||||||
|
- musl-locales-lang
|
||||||
|
- tzdata
|
||||||
|
- busybox
|
||||||
|
- python3
|
||||||
|
- wget
|
||||||
|
|
||||||
|
entrypoint:
|
||||||
|
command: /bin/sh -l
|
||||||
|
|
||||||
|
work-dir: /usr/local/searxng/
|
||||||
|
|
||||||
|
accounts:
|
||||||
|
groups:
|
||||||
|
- groupname: searxng
|
||||||
|
gid: 977
|
||||||
|
users:
|
||||||
|
- username: searxng
|
||||||
|
uid: 977
|
||||||
|
shell: /bin/ash
|
||||||
|
|
||||||
|
environment:
|
||||||
|
PATH: /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||||
|
SSL_CERT_DIR: /etc/ssl/certs
|
||||||
|
SSL_CERT_FILE: /etc/ssl/certs/ca-certificates.crt
|
||||||
|
HISTFILE: /dev/null
|
||||||
|
CONFIG_PATH: /etc/searxng
|
||||||
|
DATA_PATH: /var/cache/searxng
|
||||||
|
|
||||||
|
paths:
|
||||||
|
# Workdir
|
||||||
|
- path: /usr/local/searxng/
|
||||||
|
type: directory
|
||||||
|
uid: 977
|
||||||
|
gid: 977
|
||||||
|
permissions: 0o555
|
||||||
|
|
||||||
|
# Config volume
|
||||||
|
- path: /etc/searxng/
|
||||||
|
type: directory
|
||||||
|
uid: 977
|
||||||
|
gid: 977
|
||||||
|
permissions: 0o755
|
||||||
|
|
||||||
|
# Data volume
|
||||||
|
- path: /var/cache/searxng/
|
||||||
|
type: directory
|
||||||
|
uid: 977
|
||||||
|
gid: 977
|
||||||
|
permissions: 0o755
|
||||||
|
|
||||||
|
archs:
|
||||||
|
- x86_64
|
||||||
|
- aarch64
|
||||||
|
- armv7
|
||||||
@@ -19,7 +19,8 @@ RUN --mount=type=cache,id=uv,target=/root/.cache/uv set -eux -o pipefail; \
|
|||||||
find ./.venv/lib/python*/site-packages/*.dist-info/ -type f -name "RECORD" -exec sort -t, -k1,1 -o {} {} \;; \
|
find ./.venv/lib/python*/site-packages/*.dist-info/ -type f -name "RECORD" -exec sort -t, -k1,1 -o {} {} \;; \
|
||||||
find ./.venv/ -exec touch -h --date="@$TIMESTAMP_VENV" {} +
|
find ./.venv/ -exec touch -h --date="@$TIMESTAMP_VENV" {} +
|
||||||
|
|
||||||
COPY --exclude=./searx/version_frozen.py ./searx/ ./searx/
|
# use "--exclude=./searx/version_frozen.py" when actions/runner-images updates to Podman 5.0+
|
||||||
|
COPY ./searx/ ./searx/
|
||||||
|
|
||||||
ARG TIMESTAMP_SETTINGS="0"
|
ARG TIMESTAMP_SETTINGS="0"
|
||||||
|
|
||||||
|
|||||||
@@ -4,10 +4,10 @@ ARG CONTAINER_IMAGE_NAME="searxng"
|
|||||||
FROM localhost/$CONTAINER_IMAGE_ORGANIZATION/$CONTAINER_IMAGE_NAME:builder AS builder
|
FROM localhost/$CONTAINER_IMAGE_ORGANIZATION/$CONTAINER_IMAGE_NAME:builder AS builder
|
||||||
FROM ghcr.io/searxng/base:searxng AS dist
|
FROM ghcr.io/searxng/base:searxng AS dist
|
||||||
|
|
||||||
COPY --chown=977:977 --from=builder /usr/local/searxng/.venv/ ./.venv/
|
COPY --chown=searxng:searxng --from=builder /usr/local/searxng/.venv/ ./.venv/
|
||||||
COPY --chown=977:977 --from=builder /usr/local/searxng/searx/ ./searx/
|
COPY --chown=searxng:searxng --from=builder /usr/local/searxng/searx/ ./searx/
|
||||||
COPY --chown=977:977 ./container/ ./
|
COPY --chown=searxng:searxng ./container/ ./
|
||||||
COPY --chown=977:977 ./searx/version_frozen.py ./searx/
|
#COPY --chown=searxng:searxng ./searx/version_frozen.py ./searx/
|
||||||
|
|
||||||
ARG CREATED="0001-01-01T00:00:00Z"
|
ARG CREATED="0001-01-01T00:00:00Z"
|
||||||
ARG VERSION="unknown"
|
ARG VERSION="unknown"
|
||||||
|
|||||||
@@ -48,7 +48,7 @@ solve the CAPTCHA from `qwant.com <https://www.qwant.com/>`__.
|
|||||||
|
|
||||||
.. group-tab:: Firefox
|
.. group-tab:: Firefox
|
||||||
|
|
||||||
.. kernel-figure:: /assets/answer-captcha/ffox-setting-proxy-socks.png
|
.. kernel-figure:: answer-captcha/ffox-setting-proxy-socks.png
|
||||||
:alt: FFox proxy on SOCKS5, 127.0.0.1:8080
|
:alt: FFox proxy on SOCKS5, 127.0.0.1:8080
|
||||||
|
|
||||||
Firefox's network settings
|
Firefox's network settings
|
||||||
@@ -66,3 +66,4 @@ solve the CAPTCHA from `qwant.com <https://www.qwant.com/>`__.
|
|||||||
|
|
||||||
-N
|
-N
|
||||||
Do not execute a remote command. This is useful for just forwarding ports.
|
Do not execute a remote command. This is useful for just forwarding ports.
|
||||||
|
|
||||||
|
|||||||
|
Before Width: | Height: | Size: 59 KiB After Width: | Height: | Size: 59 KiB |
@@ -100,7 +100,7 @@ Basic container instancing example:
|
|||||||
$ cd ./searxng/
|
$ cd ./searxng/
|
||||||
|
|
||||||
# Run the container
|
# Run the container
|
||||||
$ docker run --name searxng -d \
|
$ docker run --name searxng --replace -d \
|
||||||
-p 8888:8080 \
|
-p 8888:8080 \
|
||||||
-v "./config/:/etc/searxng/" \
|
-v "./config/:/etc/searxng/" \
|
||||||
-v "./data/:/var/cache/searxng/" \
|
-v "./data/:/var/cache/searxng/" \
|
||||||
|
|||||||
@@ -4,5 +4,22 @@
|
|||||||
``brand:``
|
``brand:``
|
||||||
==========
|
==========
|
||||||
|
|
||||||
.. autoclass:: searx.brand.SettingsBrand
|
.. code:: yaml
|
||||||
:members:
|
|
||||||
|
brand:
|
||||||
|
issue_url: https://github.com/searxng/searxng/issues
|
||||||
|
docs_url: https://docs.searxng.org
|
||||||
|
public_instances: https://searx.space
|
||||||
|
wiki_url: https://github.com/searxng/searxng/wiki
|
||||||
|
|
||||||
|
``issue_url`` :
|
||||||
|
If you host your own issue tracker change this URL.
|
||||||
|
|
||||||
|
``docs_url`` :
|
||||||
|
If you host your own documentation change this URL.
|
||||||
|
|
||||||
|
``public_instances`` :
|
||||||
|
If you host your own https://searx.space change this URL.
|
||||||
|
|
||||||
|
``wiki_url`` :
|
||||||
|
Link to your wiki (or ``false``)
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
|
Before Width: | Height: | Size: 7.2 KiB |
File diff suppressed because one or more lines are too long
|
Before Width: | Height: | Size: 5.9 KiB |
@@ -1 +0,0 @@
|
|||||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 1024 384"><path fill="#410002" d="M479.178 119.294c-.533-.016-.998.357-1.218 1.078l-24.438 78.364.005-.004c-8.59 27.537 4.516 46.485 34.268 46.485 4.336 0 10.006-.357 11.776-.797.885-.264 1.33-.71 1.594-1.506l5.134-17.177c.264-.973-.09-1.77-1.507-1.683-4.517.445-8.588.797-12.308.797-14.964 0-21.075-7.968-16.646-22.224l10.446-33.47h30.373c.797 0 1.506-.445 1.858-1.418l5.401-17.355c.264-.973-.264-1.681-1.417-1.681H492.66l3.895-12.662c.265-.885.089-1.418-.62-2.034l-15.761-14.255c-.332-.3-.676-.45-.996-.459zm173.64 0c-.532-.016-.996.357-1.218 1.078l-24.436 78.364.005-.004c-8.59 27.537 4.517 46.485 34.268 46.485 4.342 0 10.004-.357 11.778-.797.884-.264 1.324-.71 1.593-1.506l5.133-17.177c.26-.973 0-1.77-1.508-1.683-4.517.445-8.59.797-12.307.797-14.966 0-21.077-7.968-16.646-22.224l10.445-33.47H690.3c.795 0 1.504-.445 1.854-1.418l5.402-17.355c.265-.973-.26-1.681-1.414-1.681H666.3l3.896-12.662c.265-.885.087-1.418-.618-2.034l-15.765-14.255c-.332-.3-.676-.45-.996-.459zm-48.32 29.404c-.974 0-1.503.444-1.862 1.417L590.502 188.9c-7.525 23.998-19.478 37.721-31.965 37.721-12.487 0-17.797-9.83-13.105-24.883l16.028-51.178c.351-1.149-.088-1.857-1.328-1.857H539.94c-.97 0-1.505.444-1.86 1.417l-15.497 49.233.008-.005c-8.765 27.982 5.315 46.31 27.452 46.31 12.747 0 22.756-6.111 29.93-16.118l-.176 12.838c0 1.241.621 1.593 1.681 1.593h14.17c1.064 0 1.504-.445 1.859-1.418l28.512-91.997c.35-1.15-.09-1.858-1.33-1.858zm147.96.005c-43.653 0-60.654 37.719-60.654 62.157-.09 21.339 13.282 34.798 31.08 34.798v.004c11.868 0 21.693-5.314 29.133-16.117v12.836c0 1.061.62 1.596 1.594 1.596h14.166c.974 0 1.505-.446 1.86-1.42l28.777-92.086c.265-.973-.266-1.768-1.24-1.768zm-.616 20.54h17.265l-6.197 19.57c-7.35 23.289-18.684 37.896-32.585 37.896-10.094 0-15.585-6.907-15.585-18.15 0-17.976 13.722-39.315 37.102-39.315z"/><path fill="#850122" d="M226.561 106.964c-.558.007-1.043.428-1.043 1.095V251.59c0 1.594 2.04 1.594 2.48 0L261.38 143.3c.445-1.241.446-2.039-.62-3.1l-33.204-32.762c-.299-.332-.66-.478-.996-.474zm55.983 41.739c-1.241 0-1.594.444-2.039 1.417l-43.919 142.203c-.176.797.177 1.594 1.242 1.594h145.747c1.418 0 2.04-.62 2.48-1.858l44.098-141.499c.445-1.417-.18-1.857-1.417-1.857zm-40.022-58.62c-1.418 0-1.594 1.242-.797 2.04l35.065 35.24c.796.798 1.594 1.061 2.836 1.061h149.467c1.065 0 1.68-1.24.62-2.214l-34.63-34.885c-.796-.796-1.592-1.242-3.274-1.242z"/></svg>
|
|
||||||
|
Before Width: | Height: | Size: 2.4 KiB |
@@ -120,7 +120,6 @@ ${fedora_build}
|
|||||||
pip install -U setuptools
|
pip install -U setuptools
|
||||||
pip install -U wheel
|
pip install -U wheel
|
||||||
pip install -U pyyaml
|
pip install -U pyyaml
|
||||||
pip install -U msgspec
|
|
||||||
|
|
||||||
# jump to SearXNG's working tree and install SearXNG into virtualenv
|
# jump to SearXNG's working tree and install SearXNG into virtualenv
|
||||||
(${SERVICE_USER})$ cd \"$SEARXNG_SRC\"
|
(${SERVICE_USER})$ cd \"$SEARXNG_SRC\"
|
||||||
|
|||||||
@@ -1,8 +0,0 @@
|
|||||||
.. _azure engine:
|
|
||||||
|
|
||||||
===============
|
|
||||||
Azure Resources
|
|
||||||
===============
|
|
||||||
|
|
||||||
.. automodule:: searx.engines.azure
|
|
||||||
:members:
|
|
||||||
8
docs/dev/engines/online/mullvad_leta.rst
Normal file
8
docs/dev/engines/online/mullvad_leta.rst
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
.. _voidlinux mullvad_leta:
|
||||||
|
|
||||||
|
============
|
||||||
|
Mullvad-Leta
|
||||||
|
============
|
||||||
|
|
||||||
|
.. automodule:: searx.engines.mullvad_leta
|
||||||
|
:members:
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
.. _sourcehut engine:
|
|
||||||
|
|
||||||
=========
|
|
||||||
Sourcehut
|
|
||||||
=========
|
|
||||||
|
|
||||||
.. automodule:: searx.engines.sourcehut
|
|
||||||
:members:
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
.. _result_types.file:
|
|
||||||
|
|
||||||
============
|
|
||||||
File Results
|
|
||||||
============
|
|
||||||
|
|
||||||
.. automodule:: searx.result_types.file
|
|
||||||
@@ -17,7 +17,6 @@ following types have been implemented so far ..
|
|||||||
main/keyvalue
|
main/keyvalue
|
||||||
main/code
|
main/code
|
||||||
main/paper
|
main/paper
|
||||||
main/file
|
|
||||||
|
|
||||||
The :ref:`LegacyResult <LegacyResult>` is used internally for the results that
|
The :ref:`LegacyResult <LegacyResult>` is used internally for the results that
|
||||||
have not yet been typed. The templates can be used as orientation until the
|
have not yet been typed. The templates can be used as orientation until the
|
||||||
@@ -29,4 +28,5 @@ final typing is complete.
|
|||||||
- :ref:`template torrent`
|
- :ref:`template torrent`
|
||||||
- :ref:`template map`
|
- :ref:`template map`
|
||||||
- :ref:`template packages`
|
- :ref:`template packages`
|
||||||
|
- :ref:`template files`
|
||||||
- :ref:`template products`
|
- :ref:`template products`
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ Fields used in the template :origin:`macro result_sub_header
|
|||||||
publishedDate : :py:obj:`datetime.datetime`
|
publishedDate : :py:obj:`datetime.datetime`
|
||||||
The date on which the object was published.
|
The date on which the object was published.
|
||||||
|
|
||||||
length: :py:obj:`datetime.timedelta`
|
length: :py:obj:`time.struct_time`
|
||||||
Playing duration in seconds.
|
Playing duration in seconds.
|
||||||
|
|
||||||
views: :py:class:`str`
|
views: :py:class:`str`
|
||||||
@@ -469,6 +469,38 @@ links : :py:class:`dict`
|
|||||||
Additional links in the form of ``{'link_name': 'http://example.com'}``
|
Additional links in the form of ``{'link_name': 'http://example.com'}``
|
||||||
|
|
||||||
|
|
||||||
|
.. _template files:
|
||||||
|
|
||||||
|
``files.html``
|
||||||
|
--------------
|
||||||
|
|
||||||
|
Displays result fields from:
|
||||||
|
|
||||||
|
- :ref:`macro result_header` and
|
||||||
|
- :ref:`macro result_sub_header`
|
||||||
|
|
||||||
|
Additional fields used in the :origin:`code.html
|
||||||
|
<searx/templates/simple/result_templates/files.html>`:
|
||||||
|
|
||||||
|
filename, size, time: :py:class:`str`
|
||||||
|
Filename, Filesize and Date of the file.
|
||||||
|
|
||||||
|
mtype : ``audio`` | ``video`` | :py:class:`str`
|
||||||
|
Mimetype type of the file.
|
||||||
|
|
||||||
|
subtype : :py:class:`str`
|
||||||
|
Mimetype / subtype of the file.
|
||||||
|
|
||||||
|
abstract : :py:class:`str`
|
||||||
|
Abstract of the file.
|
||||||
|
|
||||||
|
author : :py:class:`str`
|
||||||
|
Name of the author of the file
|
||||||
|
|
||||||
|
embedded : :py:class:`str`
|
||||||
|
URL of an embedded media type (``audio`` or ``video``) / is collapsible.
|
||||||
|
|
||||||
|
|
||||||
.. _template products:
|
.. _template products:
|
||||||
|
|
||||||
``products.html``
|
``products.html``
|
||||||
|
|||||||
@@ -56,34 +56,4 @@ If you don't trust anyone, you can set up your own, see :ref:`installation`.
|
|||||||
utils/index
|
utils/index
|
||||||
src/index
|
src/index
|
||||||
|
|
||||||
|
|
||||||
----------------
|
|
||||||
Acknowledgements
|
|
||||||
----------------
|
|
||||||
|
|
||||||
The following organizations have provided SearXNG access to their paid plans at
|
|
||||||
no cost:
|
|
||||||
|
|
||||||
.. flat-table::
|
|
||||||
:widths: 1 1
|
|
||||||
|
|
||||||
* - .. image:: /assets/sponsors/docker.svg
|
|
||||||
:target: https://docker.com
|
|
||||||
:alt: Docker
|
|
||||||
:align: center
|
|
||||||
:height: 100 px
|
|
||||||
|
|
||||||
- .. image:: /assets/sponsors/tuta.svg
|
|
||||||
:target: https://tuta.com
|
|
||||||
:alt: Tuta
|
|
||||||
:align: center
|
|
||||||
:height: 100 px
|
|
||||||
|
|
||||||
* - .. image:: /assets/sponsors/browserstack.svg
|
|
||||||
:target: https://browserstack.com
|
|
||||||
:alt: BrowserStack
|
|
||||||
:align: center
|
|
||||||
:height: 100 px
|
|
||||||
|
|
||||||
|
|
||||||
.. _searx.space: https://searx.space
|
.. _searx.space: https://searx.space
|
||||||
|
|||||||
2
manage
2
manage
@@ -117,7 +117,7 @@ EOF
|
|||||||
|
|
||||||
dev.env() {
|
dev.env() {
|
||||||
go.env.dev
|
go.env.dev
|
||||||
nvm.ensure
|
nvm.env
|
||||||
node.env.dev
|
node.env.dev
|
||||||
|
|
||||||
export GOENV
|
export GOENV
|
||||||
|
|||||||
@@ -2,9 +2,9 @@ mock==5.2.0
|
|||||||
nose2[coverage_plugin]==0.15.1
|
nose2[coverage_plugin]==0.15.1
|
||||||
cov-core==1.15.0
|
cov-core==1.15.0
|
||||||
black==25.9.0
|
black==25.9.0
|
||||||
pylint==4.0.3
|
pylint==3.3.9
|
||||||
splinter==0.21.0
|
splinter==0.21.0
|
||||||
selenium==4.38.0
|
selenium==4.36.0
|
||||||
Pallets-Sphinx-Themes==2.3.0
|
Pallets-Sphinx-Themes==2.3.0
|
||||||
Sphinx==8.2.3 ; python_version >= '3.11'
|
Sphinx==8.2.3 ; python_version >= '3.11'
|
||||||
Sphinx==8.1.3 ; python_version < '3.11'
|
Sphinx==8.1.3 ; python_version < '3.11'
|
||||||
@@ -23,6 +23,6 @@ wlc==1.16.1
|
|||||||
coloredlogs==15.0.1
|
coloredlogs==15.0.1
|
||||||
docutils>=0.21.2
|
docutils>=0.21.2
|
||||||
parameterized==0.9.0
|
parameterized==0.9.0
|
||||||
granian[reload]==2.6.0
|
granian[reload]==2.5.5
|
||||||
basedpyright==1.34.0
|
basedpyright==1.31.6
|
||||||
types-lxml==2025.11.25
|
types-lxml==2025.8.25
|
||||||
|
|||||||
@@ -1,2 +1 @@
|
|||||||
granian==2.6.0
|
granian==2.5.5
|
||||||
granian[pname]==2.6.0
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
certifi==2025.11.12
|
certifi==2025.10.5
|
||||||
babel==2.17.0
|
babel==2.17.0
|
||||||
flask-babel==4.0.0
|
flask-babel==4.0.0
|
||||||
flask==3.1.2
|
flask==3.1.2
|
||||||
@@ -9,12 +9,14 @@ python-dateutil==2.9.0.post0
|
|||||||
pyyaml==6.0.3
|
pyyaml==6.0.3
|
||||||
httpx[http2]==0.28.1
|
httpx[http2]==0.28.1
|
||||||
httpx-socks[asyncio]==0.10.0
|
httpx-socks[asyncio]==0.10.0
|
||||||
|
Brotli==1.1.0
|
||||||
|
setproctitle==1.3.7
|
||||||
valkey==6.1.1
|
valkey==6.1.1
|
||||||
markdown-it-py==3.0.0
|
markdown-it-py==4.0.0
|
||||||
fasttext-predict==0.9.2.4
|
fasttext-predict==0.9.2.4
|
||||||
tomli==2.3.0; python_version < '3.11'
|
tomli==2.3.0; python_version < '3.11'
|
||||||
msgspec==0.20.0
|
msgspec==0.19.0
|
||||||
typer-slim==0.20.0
|
typer-slim==0.19.2
|
||||||
isodate==0.7.2
|
isodate==0.7.2
|
||||||
whitenoise==6.11.0
|
whitenoise==6.11.0
|
||||||
typing-extensions==4.15.0
|
typing-extensions==4.14.1
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from os.path import dirname, abspath
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import msgspec
|
import searx.unixthreadname # pylint: disable=unused-import
|
||||||
|
|
||||||
# Debug
|
# Debug
|
||||||
LOG_FORMAT_DEBUG: str = '%(levelname)-7s %(name)-30.30s: %(message)s'
|
LOG_FORMAT_DEBUG: str = '%(levelname)-7s %(name)-30.30s: %(message)s'
|
||||||
@@ -76,22 +76,20 @@ def get_setting(name: str, default: t.Any = _unset) -> t.Any:
|
|||||||
settings and the ``default`` is unset, a :py:obj:`KeyError` is raised.
|
settings and the ``default`` is unset, a :py:obj:`KeyError` is raised.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
value = settings
|
value: dict[str, t.Any] = settings
|
||||||
for a in name.split('.'):
|
for a in name.split('.'):
|
||||||
if isinstance(value, msgspec.Struct):
|
if isinstance(value, dict):
|
||||||
value = getattr(value, a, _unset)
|
value = value.get(a, _unset)
|
||||||
elif isinstance(value, dict):
|
|
||||||
value = value.get(a, _unset) # pyright: ignore
|
|
||||||
else:
|
else:
|
||||||
value = _unset
|
value = _unset # type: ignore
|
||||||
|
|
||||||
if value is _unset:
|
if value is _unset:
|
||||||
if default is _unset:
|
if default is _unset:
|
||||||
raise KeyError(name)
|
raise KeyError(name)
|
||||||
value = default
|
value = default # type: ignore
|
||||||
break
|
break
|
||||||
|
|
||||||
return value # pyright: ignore
|
return value
|
||||||
|
|
||||||
|
|
||||||
def _is_color_terminal():
|
def _is_color_terminal():
|
||||||
|
|||||||
@@ -1,68 +0,0 @@
|
|||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
"""Implementations needed for a branding of SearXNG."""
|
|
||||||
# pylint: disable=too-few-public-methods
|
|
||||||
|
|
||||||
# Struct fields aren't discovered in Python 3.14
|
|
||||||
# - https://github.com/searxng/searxng/issues/5284
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
__all__ = ["SettingsBrand"]
|
|
||||||
|
|
||||||
import msgspec
|
|
||||||
|
|
||||||
|
|
||||||
class BrandCustom(msgspec.Struct, kw_only=True, forbid_unknown_fields=True):
|
|
||||||
"""Custom settings in the brand section."""
|
|
||||||
|
|
||||||
links: dict[str, str] = {}
|
|
||||||
"""Custom entries in the footer of the WEB page: ``[title]: [link]``"""
|
|
||||||
|
|
||||||
|
|
||||||
class SettingsBrand(msgspec.Struct, kw_only=True, forbid_unknown_fields=True):
|
|
||||||
"""Options for configuring brand properties.
|
|
||||||
|
|
||||||
.. code:: yaml
|
|
||||||
|
|
||||||
brand:
|
|
||||||
issue_url: https://github.com/searxng/searxng/issues
|
|
||||||
docs_url: https://docs.searxng.org
|
|
||||||
public_instances: https://searx.space
|
|
||||||
wiki_url: https://github.com/searxng/searxng/wiki
|
|
||||||
|
|
||||||
custom:
|
|
||||||
links:
|
|
||||||
Uptime: https://uptime.searxng.org/history/example-org
|
|
||||||
About: https://example.org/user/about.html
|
|
||||||
"""
|
|
||||||
|
|
||||||
issue_url: str = "https://github.com/searxng/searxng/issues"
|
|
||||||
"""If you host your own issue tracker change this URL."""
|
|
||||||
|
|
||||||
docs_url: str = "https://docs.searxng.org"
|
|
||||||
"""If you host your own documentation change this URL."""
|
|
||||||
|
|
||||||
public_instances: str = "https://searx.space"
|
|
||||||
"""If you host your own https://searx.space change this URL."""
|
|
||||||
|
|
||||||
wiki_url: str = "https://github.com/searxng/searxng/wiki"
|
|
||||||
"""Link to your wiki (or ``false``)"""
|
|
||||||
|
|
||||||
custom: BrandCustom = msgspec.field(default_factory=BrandCustom)
|
|
||||||
"""Optional customizing.
|
|
||||||
|
|
||||||
.. autoclass:: searx.brand.BrandCustom
|
|
||||||
:members:
|
|
||||||
"""
|
|
||||||
|
|
||||||
# new_issue_url is a hackish solution tailored for only one hoster (GH). As
|
|
||||||
# long as we don't have a more general solution, we should support it in the
|
|
||||||
# given function, but it should not be expanded further.
|
|
||||||
|
|
||||||
new_issue_url: str = "https://github.com/searxng/searxng/issues/new"
|
|
||||||
"""If you host your own issue tracker not on GitHub, then unset this URL.
|
|
||||||
|
|
||||||
Note: This URL will create a pre-filled GitHub bug report form for an
|
|
||||||
engine. Since this feature is implemented only for GH (and limited to
|
|
||||||
engines), it will probably be replaced by another solution in the near
|
|
||||||
future.
|
|
||||||
"""
|
|
||||||
@@ -5,6 +5,10 @@
|
|||||||
----
|
----
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# Struct fields aren't discovered in Python 3.14
|
||||||
|
# - https://github.com/searxng/searxng/issues/5284
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
__all__ = ["ExpireCacheCfg", "ExpireCacheStats", "ExpireCache", "ExpireCacheSQLite"]
|
__all__ = ["ExpireCacheCfg", "ExpireCacheStats", "ExpireCache", "ExpireCacheSQLite"]
|
||||||
|
|
||||||
import abc
|
import abc
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -848,7 +848,6 @@
|
|||||||
"ja": "レアル",
|
"ja": "レアル",
|
||||||
"ko": "브라질 헤알",
|
"ko": "브라질 헤알",
|
||||||
"lt": "Brazilijos realas",
|
"lt": "Brazilijos realas",
|
||||||
"lv": "Brazīlijas reāls",
|
|
||||||
"ms": "Real Brazil",
|
"ms": "Real Brazil",
|
||||||
"nl": "Braziliaanse real",
|
"nl": "Braziliaanse real",
|
||||||
"oc": "Real",
|
"oc": "Real",
|
||||||
@@ -933,7 +932,6 @@
|
|||||||
"ja": "ニュルタム",
|
"ja": "ニュルタム",
|
||||||
"ko": "부탄 눌트럼",
|
"ko": "부탄 눌트럼",
|
||||||
"lt": "Ngultrumas",
|
"lt": "Ngultrumas",
|
||||||
"lv": "ngultrums",
|
|
||||||
"ml": "ങൾട്രം",
|
"ml": "ങൾട്രം",
|
||||||
"ms": "Ngultrum Bhutan",
|
"ms": "Ngultrum Bhutan",
|
||||||
"nl": "Bhutaanse ngultrum",
|
"nl": "Bhutaanse ngultrum",
|
||||||
@@ -1329,15 +1327,15 @@
|
|||||||
"cs": "Kolumbijské peso",
|
"cs": "Kolumbijské peso",
|
||||||
"da": "Colombiansk peso",
|
"da": "Colombiansk peso",
|
||||||
"de": "kolumbianischer Peso",
|
"de": "kolumbianischer Peso",
|
||||||
"en": "peso",
|
"en": "Colombian peso",
|
||||||
"eo": "kolombia peso",
|
"eo": "kolombia peso",
|
||||||
"es": "peso",
|
"es": "peso colombiano",
|
||||||
"et": "Colombia peeso",
|
"et": "Colombia peeso",
|
||||||
"eu": "Peso kolonbiar",
|
"eu": "Peso kolonbiar",
|
||||||
"fi": "Kolumbian peso",
|
"fi": "Kolumbian peso",
|
||||||
"fr": "peso colombien",
|
"fr": "peso colombien",
|
||||||
"ga": "peso na Colóime",
|
"ga": "peso na Colóime",
|
||||||
"gl": "peso colombiano",
|
"gl": "Peso colombiano",
|
||||||
"he": "פסו קולומביאני",
|
"he": "פסו קולומביאני",
|
||||||
"hr": "Kolumbijski pezo",
|
"hr": "Kolumbijski pezo",
|
||||||
"hu": "kolumbiai peso",
|
"hu": "kolumbiai peso",
|
||||||
@@ -1413,9 +1411,9 @@
|
|||||||
"cy": "peso (Ciwba)",
|
"cy": "peso (Ciwba)",
|
||||||
"da": "Cubanske pesos",
|
"da": "Cubanske pesos",
|
||||||
"de": "kubanischer Peso",
|
"de": "kubanischer Peso",
|
||||||
"en": "peso",
|
"en": "Cuban peso",
|
||||||
"eo": "kuba peso",
|
"eo": "kuba peso",
|
||||||
"es": "peso",
|
"es": "peso cubano",
|
||||||
"fi": "Kuuban peso",
|
"fi": "Kuuban peso",
|
||||||
"fr": "peso cubain",
|
"fr": "peso cubain",
|
||||||
"ga": "peso Chúba",
|
"ga": "peso Chúba",
|
||||||
@@ -1717,7 +1715,7 @@
|
|||||||
"nl": "Egyptisch pond",
|
"nl": "Egyptisch pond",
|
||||||
"oc": "Liura egipciana",
|
"oc": "Liura egipciana",
|
||||||
"pa": "ਮਿਸਰੀ ਪਾਊਂਡ",
|
"pa": "ਮਿਸਰੀ ਪਾਊਂਡ",
|
||||||
"pl": "funt egipski",
|
"pl": "Funt egipski",
|
||||||
"pt": "libra egípcia",
|
"pt": "libra egípcia",
|
||||||
"ro": "Liră egipteană",
|
"ro": "Liră egipteană",
|
||||||
"ru": "египетский фунт",
|
"ru": "египетский фунт",
|
||||||
@@ -1774,7 +1772,7 @@
|
|||||||
"de": "Äthiopischer Birr",
|
"de": "Äthiopischer Birr",
|
||||||
"en": "bir",
|
"en": "bir",
|
||||||
"eo": "etiopa birro",
|
"eo": "etiopa birro",
|
||||||
"es": "bir etíope",
|
"es": "Birr etíope",
|
||||||
"fi": "Etiopian birr",
|
"fi": "Etiopian birr",
|
||||||
"fr": "Birr",
|
"fr": "Birr",
|
||||||
"ga": "birr",
|
"ga": "birr",
|
||||||
@@ -2861,7 +2859,6 @@
|
|||||||
"sl": "kirgiški som",
|
"sl": "kirgiški som",
|
||||||
"sr": "киргиски сом",
|
"sr": "киргиски сом",
|
||||||
"sv": "Kirgizistansk som",
|
"sv": "Kirgizistansk som",
|
||||||
"szl": "Sōm (waluta)",
|
|
||||||
"tr": "Kırgızistan somu",
|
"tr": "Kırgızistan somu",
|
||||||
"tt": "кыргыз сумы",
|
"tt": "кыргыз сумы",
|
||||||
"uk": "сом"
|
"uk": "сом"
|
||||||
@@ -3795,9 +3792,9 @@
|
|||||||
"cs": "Mexické peso",
|
"cs": "Mexické peso",
|
||||||
"cy": "peso (Mecsico)",
|
"cy": "peso (Mecsico)",
|
||||||
"de": "Mexikanischer Peso",
|
"de": "Mexikanischer Peso",
|
||||||
"en": "peso",
|
"en": "Mexican peso",
|
||||||
"eo": "meksika peso",
|
"eo": "meksika peso",
|
||||||
"es": "peso",
|
"es": "peso mexicano",
|
||||||
"et": "Mehhiko peeso",
|
"et": "Mehhiko peeso",
|
||||||
"eu": "Mexikar peso",
|
"eu": "Mexikar peso",
|
||||||
"fi": "Meksikon peso",
|
"fi": "Meksikon peso",
|
||||||
@@ -4211,7 +4208,7 @@
|
|||||||
"fi": "Panaman balboa",
|
"fi": "Panaman balboa",
|
||||||
"fr": "Balboa",
|
"fr": "Balboa",
|
||||||
"ga": "balboa Phanama",
|
"ga": "balboa Phanama",
|
||||||
"gl": "balboa",
|
"gl": "Balboa",
|
||||||
"he": "בלבואה",
|
"he": "בלבואה",
|
||||||
"hr": "Panamska balboa",
|
"hr": "Panamska balboa",
|
||||||
"hu": "panamai balboa",
|
"hu": "panamai balboa",
|
||||||
@@ -4272,7 +4269,7 @@
|
|||||||
"tr": "Nuevo Sol",
|
"tr": "Nuevo Sol",
|
||||||
"tt": "Перу яңа соле",
|
"tt": "Перу яңа соле",
|
||||||
"uk": "Новий соль",
|
"uk": "Новий соль",
|
||||||
"vi": "Sol Perú"
|
"vi": "Sol Peru"
|
||||||
},
|
},
|
||||||
"PGK": {
|
"PGK": {
|
||||||
"ar": "كينا بابوا غينيا الجديدة",
|
"ar": "كينا بابوا غينيا الجديدة",
|
||||||
@@ -4929,7 +4926,7 @@
|
|||||||
"ar": "دولار سنغافوري",
|
"ar": "دولار سنغافوري",
|
||||||
"bg": "Сингапурски долар",
|
"bg": "Сингапурски долар",
|
||||||
"bn": "সিঙ্গাপুর ডলার",
|
"bn": "সিঙ্গাপুর ডলার",
|
||||||
"ca": "dòlar singapurès",
|
"ca": "dòlar de Singapur",
|
||||||
"cs": "Singapurský dolar",
|
"cs": "Singapurský dolar",
|
||||||
"da": "singaporeansk dollar",
|
"da": "singaporeansk dollar",
|
||||||
"de": "Singapur-Dollar",
|
"de": "Singapur-Dollar",
|
||||||
@@ -5537,7 +5534,7 @@
|
|||||||
"af": "Nuwe Taiwannese dollar",
|
"af": "Nuwe Taiwannese dollar",
|
||||||
"ar": "دولار تايواني جديد",
|
"ar": "دولار تايواني جديد",
|
||||||
"bg": "Нов тайвански долар",
|
"bg": "Нов тайвански долар",
|
||||||
"ca": "Nou dòlar taiwanès",
|
"ca": "nou dòlar de Taiwan",
|
||||||
"cs": "Tchajwanský dolar",
|
"cs": "Tchajwanský dolar",
|
||||||
"cy": "Doler Newydd Taiwan",
|
"cy": "Doler Newydd Taiwan",
|
||||||
"da": "taiwan dollar",
|
"da": "taiwan dollar",
|
||||||
@@ -5811,7 +5808,6 @@
|
|||||||
"lt": "Uzbekijos sumas",
|
"lt": "Uzbekijos sumas",
|
||||||
"lv": "Uzbekistānas soms",
|
"lv": "Uzbekistānas soms",
|
||||||
"nl": "Oezbeekse sum",
|
"nl": "Oezbeekse sum",
|
||||||
"oc": "som ozbèc",
|
|
||||||
"pa": "ਉਜ਼ਬੇਕਿਸਤਾਨੀ ਸੋਮ",
|
"pa": "ਉਜ਼ਬੇਕਿਸਤਾਨੀ ਸੋਮ",
|
||||||
"pl": "Sum",
|
"pl": "Sum",
|
||||||
"pt": "som usbeque",
|
"pt": "som usbeque",
|
||||||
@@ -5952,7 +5948,6 @@
|
|||||||
"sk": "Tala",
|
"sk": "Tala",
|
||||||
"sr": "самоанска тала",
|
"sr": "самоанска тала",
|
||||||
"sv": "Samoansk Tala",
|
"sv": "Samoansk Tala",
|
||||||
"tr": "Samoa talası",
|
|
||||||
"tt": "самоа таласы",
|
"tt": "самоа таласы",
|
||||||
"uk": "Самоанська тала"
|
"uk": "Самоанська тала"
|
||||||
},
|
},
|
||||||
@@ -6100,14 +6095,12 @@
|
|||||||
"hu": "karibi forint",
|
"hu": "karibi forint",
|
||||||
"it": "fiorino caraibico",
|
"it": "fiorino caraibico",
|
||||||
"ja": "カリブ・ギルダー",
|
"ja": "カリブ・ギルダー",
|
||||||
"ko": "카리브 휠던",
|
|
||||||
"nl": "Caribische gulden",
|
"nl": "Caribische gulden",
|
||||||
"pap": "Florin karibense",
|
"pap": "Florin karibense",
|
||||||
"pl": "Gulden karaibski",
|
"pl": "Gulden karaibski",
|
||||||
"pt": "Florim do Caribe",
|
"pt": "Florim do Caribe",
|
||||||
"ro": "Gulden caraibian",
|
"ro": "Gulden caraibian",
|
||||||
"ru": "Карибский гульден",
|
"ru": "Карибский гульден",
|
||||||
"sk": "Karibský gulden",
|
|
||||||
"sl": "karibski goldinar"
|
"sl": "karibski goldinar"
|
||||||
},
|
},
|
||||||
"XDR": {
|
"XDR": {
|
||||||
@@ -7081,7 +7074,6 @@
|
|||||||
"brazilski real": "BRL",
|
"brazilski real": "BRL",
|
||||||
"brazilský real": "BRL",
|
"brazilský real": "BRL",
|
||||||
"brazílsky real": "BRL",
|
"brazílsky real": "BRL",
|
||||||
"brazīlijas reāls": "BRL",
|
|
||||||
"brezilya reali": "BRL",
|
"brezilya reali": "BRL",
|
||||||
"brit font": "GBP",
|
"brit font": "GBP",
|
||||||
"brita pundo": "GBP",
|
"brita pundo": "GBP",
|
||||||
@@ -7155,7 +7147,6 @@
|
|||||||
"burundžio frankas": "BIF",
|
"burundžio frankas": "BIF",
|
||||||
"butana ngultrumo": "BTN",
|
"butana ngultrumo": "BTN",
|
||||||
"butanski ngultrum": "BTN",
|
"butanski ngultrum": "BTN",
|
||||||
"butānas ngultrums": "BTN",
|
|
||||||
"butut": "GMD",
|
"butut": "GMD",
|
||||||
"bututs": "GMD",
|
"bututs": "GMD",
|
||||||
"bwp": "BWP",
|
"bwp": "BWP",
|
||||||
@@ -7827,7 +7818,6 @@
|
|||||||
"dirrã marroquino": "MAD",
|
"dirrã marroquino": "MAD",
|
||||||
"dírham de los emiratos árabes unidos": "AED",
|
"dírham de los emiratos árabes unidos": "AED",
|
||||||
"dírham dels emirats àrabs units": "AED",
|
"dírham dels emirats àrabs units": "AED",
|
||||||
"dírham emiratià": "AED",
|
|
||||||
"dírham marroquí": "MAD",
|
"dírham marroquí": "MAD",
|
||||||
"djf": "DJF",
|
"djf": "DJF",
|
||||||
"djiboeti frank": "DJF",
|
"djiboeti frank": "DJF",
|
||||||
@@ -8260,7 +8250,6 @@
|
|||||||
"dòlar namibià": "NAD",
|
"dòlar namibià": "NAD",
|
||||||
"dòlar neozelandès": "NZD",
|
"dòlar neozelandès": "NZD",
|
||||||
"dòlar salomonès": "SBD",
|
"dòlar salomonès": "SBD",
|
||||||
"dòlar singapurès": "SGD",
|
|
||||||
"dòlar surinamès": "SRD",
|
"dòlar surinamès": "SRD",
|
||||||
"dòlar taiwanès": "TWD",
|
"dòlar taiwanès": "TWD",
|
||||||
"dòlars canadencs": "CAD",
|
"dòlars canadencs": "CAD",
|
||||||
@@ -10524,7 +10513,6 @@
|
|||||||
"ngultrum na bútáine": "BTN",
|
"ngultrum na bútáine": "BTN",
|
||||||
"ngultrumas": "BTN",
|
"ngultrumas": "BTN",
|
||||||
"ngultrumo": "BTN",
|
"ngultrumo": "BTN",
|
||||||
"ngultrums": "BTN",
|
|
||||||
"ngwee": "ZMW",
|
"ngwee": "ZMW",
|
||||||
"nhân dân tệ": "CNY",
|
"nhân dân tệ": "CNY",
|
||||||
"nhân dân tệ trung quốc": "CNY",
|
"nhân dân tệ trung quốc": "CNY",
|
||||||
@@ -11641,7 +11629,6 @@
|
|||||||
"samoa dolaro": "WST",
|
"samoa dolaro": "WST",
|
||||||
"samoa tala": "WST",
|
"samoa tala": "WST",
|
||||||
"samoa talao": "WST",
|
"samoa talao": "WST",
|
||||||
"samoa talası": "WST",
|
|
||||||
"samoaanse tala": "WST",
|
"samoaanse tala": "WST",
|
||||||
"samoan tala": "WST",
|
"samoan tala": "WST",
|
||||||
"samoan tālā": "WST",
|
"samoan tālā": "WST",
|
||||||
@@ -11852,10 +11839,10 @@
|
|||||||
"sol d'or": "PEN",
|
"sol d'or": "PEN",
|
||||||
"sol de oro": "PEN",
|
"sol de oro": "PEN",
|
||||||
"sol novo": "PEN",
|
"sol novo": "PEN",
|
||||||
|
"sol peru": "PEN",
|
||||||
"sol peruan": "PEN",
|
"sol peruan": "PEN",
|
||||||
"sol peruano": "PEN",
|
"sol peruano": "PEN",
|
||||||
"sol peruviano": "PEN",
|
"sol peruviano": "PEN",
|
||||||
"sol perú": "PEN",
|
|
||||||
"solomon adaları doları": "SBD",
|
"solomon adaları doları": "SBD",
|
||||||
"solomon dollar": "SBD",
|
"solomon dollar": "SBD",
|
||||||
"solomon islands dollar": "SBD",
|
"solomon islands dollar": "SBD",
|
||||||
@@ -11881,7 +11868,6 @@
|
|||||||
"som kîrgîz": "KGS",
|
"som kîrgîz": "KGS",
|
||||||
"som na cirgeastáine": "KGS",
|
"som na cirgeastáine": "KGS",
|
||||||
"som na húisbéiceastáine": "UZS",
|
"som na húisbéiceastáine": "UZS",
|
||||||
"som ozbèc": "UZS",
|
|
||||||
"som quirguiz": "KGS",
|
"som quirguiz": "KGS",
|
||||||
"som usbeco": "UZS",
|
"som usbeco": "UZS",
|
||||||
"som usbeque": "UZS",
|
"som usbeque": "UZS",
|
||||||
@@ -11929,7 +11915,6 @@
|
|||||||
"sovjetisk rubel": "RUB",
|
"sovjetisk rubel": "RUB",
|
||||||
"soʻm": "UZS",
|
"soʻm": "UZS",
|
||||||
"soʻm uzbekistan": "UZS",
|
"soʻm uzbekistan": "UZS",
|
||||||
"sōm": "KGS",
|
|
||||||
"söm": "UZS",
|
"söm": "UZS",
|
||||||
"special drawing right": "XDR",
|
"special drawing right": "XDR",
|
||||||
"special drawing rights": "XDR",
|
"special drawing rights": "XDR",
|
||||||
@@ -14455,7 +14440,6 @@
|
|||||||
"דולר פיג'י": "FJD",
|
"דולר פיג'י": "FJD",
|
||||||
"דולר קיימני": "KYD",
|
"דולר קיימני": "KYD",
|
||||||
"דולר קנדי": "CAD",
|
"דולר קנדי": "CAD",
|
||||||
"דולר של איי קיימן": "KYD",
|
|
||||||
"דונג וייטנאמי ": "VND",
|
"דונג וייטנאמי ": "VND",
|
||||||
"דינר אלג'ירי": "DZD",
|
"דינר אלג'ירי": "DZD",
|
||||||
"דינר בחרייני": "BHD",
|
"דינר בחרייני": "BHD",
|
||||||
@@ -14663,7 +14647,6 @@
|
|||||||
"الجنيه الإسترليني": "GBP",
|
"الجنيه الإسترليني": "GBP",
|
||||||
"الجنيه السودانى": "SDG",
|
"الجنيه السودانى": "SDG",
|
||||||
"الجنيه المصري": "EGP",
|
"الجنيه المصري": "EGP",
|
||||||
"الدولار الامريكي": "IQD",
|
|
||||||
"الدولار البربادوسي": "BBD",
|
"الدولار البربادوسي": "BBD",
|
||||||
"الدولار البهامي": "BSD",
|
"الدولار البهامي": "BSD",
|
||||||
"الدولار الكندي": "CAD",
|
"الدولار الكندي": "CAD",
|
||||||
@@ -14923,7 +14906,6 @@
|
|||||||
"شيلينغ كينيي": "KES",
|
"شيلينغ كينيي": "KES",
|
||||||
"عملة السعودية": "SAR",
|
"عملة السعودية": "SAR",
|
||||||
"عملة المملكة العربية السعودية": "SAR",
|
"عملة المملكة العربية السعودية": "SAR",
|
||||||
"عملة ذهبيه": "IQD",
|
|
||||||
"عملة قطر": "QAR",
|
"عملة قطر": "QAR",
|
||||||
"غواراني": "PYG",
|
"غواراني": "PYG",
|
||||||
"غواراني باراغواي": "PYG",
|
"غواراني باراغواي": "PYG",
|
||||||
@@ -15753,7 +15735,6 @@
|
|||||||
"203"
|
"203"
|
||||||
],
|
],
|
||||||
"칠레 페소": "CLP",
|
"칠레 페소": "CLP",
|
||||||
"카리브 휠던": "XCG",
|
|
||||||
"카보베르데 에스쿠도": "CVE",
|
"카보베르데 에스쿠도": "CVE",
|
||||||
"카보베르데 이스쿠두": "CVE",
|
"카보베르데 이스쿠두": "CVE",
|
||||||
"카보베르데에스쿠도": "CVE",
|
"카보베르데에스쿠도": "CVE",
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@@ -1,6 +1,5 @@
|
|||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
"""Simple implementation to store TrackerPatterns data in a SQL database."""
|
"""Simple implementation to store TrackerPatterns data in a SQL database."""
|
||||||
# pylint: disable=too-many-branches
|
|
||||||
|
|
||||||
import typing as t
|
import typing as t
|
||||||
|
|
||||||
@@ -120,12 +119,6 @@ class TrackerPatternsDB:
|
|||||||
|
|
||||||
for rule in self.rules():
|
for rule in self.rules():
|
||||||
|
|
||||||
query_str: str = parsed_new_url.query
|
|
||||||
if not query_str:
|
|
||||||
# There are no more query arguments in the parsed_new_url on
|
|
||||||
# which rules can be applied, stop iterating over the rules.
|
|
||||||
break
|
|
||||||
|
|
||||||
if not re.match(rule[self.Fields.url_regexp], new_url):
|
if not re.match(rule[self.Fields.url_regexp], new_url):
|
||||||
# no match / ignore pattern
|
# no match / ignore pattern
|
||||||
continue
|
continue
|
||||||
@@ -143,32 +136,18 @@ class TrackerPatternsDB:
|
|||||||
# overlapping urlPattern like ".*"
|
# overlapping urlPattern like ".*"
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# remove tracker arguments from the url-query part
|
||||||
query_args: list[tuple[str, str]] = list(parse_qsl(parsed_new_url.query))
|
query_args: list[tuple[str, str]] = list(parse_qsl(parsed_new_url.query))
|
||||||
if query_args:
|
|
||||||
# remove tracker arguments from the url-query part
|
|
||||||
for name, val in query_args.copy():
|
|
||||||
# remove URL arguments
|
|
||||||
for pattern in rule[self.Fields.del_args]:
|
|
||||||
if re.match(pattern, name):
|
|
||||||
log.debug(
|
|
||||||
"TRACKER_PATTERNS: %s remove tracker arg: %s='%s'", parsed_new_url.netloc, name, val
|
|
||||||
)
|
|
||||||
query_args.remove((name, val))
|
|
||||||
|
|
||||||
parsed_new_url = parsed_new_url._replace(query=urlencode(query_args))
|
for name, val in query_args.copy():
|
||||||
new_url = urlunparse(parsed_new_url)
|
# remove URL arguments
|
||||||
|
|
||||||
else:
|
|
||||||
# The query argument for URLs like:
|
|
||||||
# - 'http://example.org?q=' --> query_str is 'q=' and query_args is []
|
|
||||||
# - 'http://example.org?/foo/bar' --> query_str is 'foo/bar' and query_args is []
|
|
||||||
# is a simple string and not a key/value dict.
|
|
||||||
for pattern in rule[self.Fields.del_args]:
|
for pattern in rule[self.Fields.del_args]:
|
||||||
if re.match(pattern, query_str):
|
if re.match(pattern, name):
|
||||||
log.debug("TRACKER_PATTERNS: %s remove tracker arg: '%s'", parsed_new_url.netloc, query_str)
|
log.debug("TRACKER_PATTERNS: %s remove tracker arg: %s='%s'", parsed_new_url.netloc, name, val)
|
||||||
parsed_new_url = parsed_new_url._replace(query="")
|
query_args.remove((name, val))
|
||||||
new_url = urlunparse(parsed_new_url)
|
|
||||||
break
|
parsed_new_url = parsed_new_url._replace(query=urlencode(query_args))
|
||||||
|
new_url = urlunparse(parsed_new_url)
|
||||||
|
|
||||||
if new_url != url:
|
if new_url != url:
|
||||||
return new_url
|
return new_url
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
],
|
],
|
||||||
"ua": "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}",
|
"ua": "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}",
|
||||||
"versions": [
|
"versions": [
|
||||||
"144.0",
|
"143.0",
|
||||||
"143.0"
|
"142.0"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
@@ -3294,16 +3294,6 @@
|
|||||||
"symbol": "slug",
|
"symbol": "slug",
|
||||||
"to_si_factor": 14.593903
|
"to_si_factor": 14.593903
|
||||||
},
|
},
|
||||||
"Q136416965": {
|
|
||||||
"si_name": null,
|
|
||||||
"symbol": "GT/S",
|
|
||||||
"to_si_factor": null
|
|
||||||
},
|
|
||||||
"Q136417074": {
|
|
||||||
"si_name": null,
|
|
||||||
"symbol": "MT/S",
|
|
||||||
"to_si_factor": null
|
|
||||||
},
|
|
||||||
"Q1374438": {
|
"Q1374438": {
|
||||||
"si_name": "Q11574",
|
"si_name": "Q11574",
|
||||||
"symbol": "ks",
|
"symbol": "ks",
|
||||||
@@ -6385,9 +6375,9 @@
|
|||||||
"to_si_factor": 86400.0
|
"to_si_factor": 86400.0
|
||||||
},
|
},
|
||||||
"Q577": {
|
"Q577": {
|
||||||
"si_name": "Q11574",
|
"si_name": null,
|
||||||
"symbol": "a",
|
"symbol": "a",
|
||||||
"to_si_factor": 31557600.0
|
"to_si_factor": null
|
||||||
},
|
},
|
||||||
"Q57899268": {
|
"Q57899268": {
|
||||||
"si_name": "Q3332095",
|
"si_name": "Q3332095",
|
||||||
|
|||||||
@@ -270,14 +270,7 @@ def load_engines(engine_list: list[dict[str, t.Any]]):
|
|||||||
categories.clear()
|
categories.clear()
|
||||||
categories['general'] = []
|
categories['general'] = []
|
||||||
for engine_data in engine_list:
|
for engine_data in engine_list:
|
||||||
if engine_data.get("inactive") is True:
|
|
||||||
continue
|
|
||||||
engine = load_engine(engine_data)
|
engine = load_engine(engine_data)
|
||||||
if engine:
|
if engine:
|
||||||
register_engine(engine)
|
register_engine(engine)
|
||||||
else:
|
|
||||||
# if an engine can't be loaded (if for example the engine is missing
|
|
||||||
# tor or some other requirements) its set to inactive!
|
|
||||||
logger.error("loading engine %s failed: set engine to inactive!", engine_data.get("name", "???"))
|
|
||||||
engine_data["inactive"] = True
|
|
||||||
return engines
|
return engines
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ from urllib.parse import urlencode, urljoin, urlparse
|
|||||||
import lxml
|
import lxml
|
||||||
import babel
|
import babel
|
||||||
|
|
||||||
from searx.utils import extract_text, eval_xpath_list, eval_xpath_getindex, searxng_useragent
|
from searx.utils import extract_text, eval_xpath_list, eval_xpath_getindex
|
||||||
from searx.enginelib.traits import EngineTraits
|
from searx.enginelib.traits import EngineTraits
|
||||||
from searx.locales import language_tag
|
from searx.locales import language_tag
|
||||||
|
|
||||||
@@ -45,7 +45,7 @@ def request(query, params):
|
|||||||
query += ' (' + eng_lang + ')'
|
query += ' (' + eng_lang + ')'
|
||||||
# wiki.archlinux.org is protected by anubis
|
# wiki.archlinux.org is protected by anubis
|
||||||
# - https://github.com/searxng/searxng/issues/4646#issuecomment-2817848019
|
# - https://github.com/searxng/searxng/issues/4646#issuecomment-2817848019
|
||||||
params['headers']['User-Agent'] = searxng_useragent()
|
params['headers']['User-Agent'] = "SearXNG"
|
||||||
elif netloc == 'wiki.archlinuxcn.org':
|
elif netloc == 'wiki.archlinuxcn.org':
|
||||||
base_url = 'https://' + netloc + '/wzh/index.php?'
|
base_url = 'https://' + netloc + '/wzh/index.php?'
|
||||||
|
|
||||||
@@ -120,7 +120,7 @@ def fetch_traits(engine_traits: EngineTraits):
|
|||||||
'zh': 'Special:搜索',
|
'zh': 'Special:搜索',
|
||||||
}
|
}
|
||||||
|
|
||||||
resp = get('https://wiki.archlinux.org/', timeout=3)
|
resp = get('https://wiki.archlinux.org/')
|
||||||
if not resp.ok: # type: ignore
|
if not resp.ok: # type: ignore
|
||||||
print("ERROR: response from wiki.archlinux.org is not OK.")
|
print("ERROR: response from wiki.archlinux.org is not OK.")
|
||||||
|
|
||||||
|
|||||||
@@ -50,7 +50,7 @@ def response(resp):
|
|||||||
pos = script.index(end_tag) + len(end_tag) - 1
|
pos = script.index(end_tag) + len(end_tag) - 1
|
||||||
script = script[:pos]
|
script = script[:pos]
|
||||||
|
|
||||||
json_resp = utils.js_obj_str_to_python(script)
|
json_resp = utils.js_variable_to_python(script)
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
|
|||||||
@@ -1,190 +0,0 @@
|
|||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
"""Engine for Azure resources. This engine mimics the standard search bar in Azure
|
|
||||||
Portal (for resources and resource groups).
|
|
||||||
|
|
||||||
Configuration
|
|
||||||
=============
|
|
||||||
|
|
||||||
You must `register an application in Microsoft Entra ID`_ and assign it the
|
|
||||||
'Reader' role in your subscription.
|
|
||||||
|
|
||||||
To use this engine, add an entry similar to the following to your engine list in
|
|
||||||
``settings.yml``:
|
|
||||||
|
|
||||||
.. code:: yaml
|
|
||||||
|
|
||||||
- name: azure
|
|
||||||
engine: azure
|
|
||||||
...
|
|
||||||
azure_tenant_id: "your_tenant_id"
|
|
||||||
azure_client_id: "your_client_id"
|
|
||||||
azure_client_secret: "your_client_secret"
|
|
||||||
azure_token_expiration_seconds: 5000
|
|
||||||
|
|
||||||
.. _register an application in Microsoft Entra ID:
|
|
||||||
https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app
|
|
||||||
|
|
||||||
"""
|
|
||||||
import typing as t
|
|
||||||
|
|
||||||
from searx.enginelib import EngineCache
|
|
||||||
from searx.network import post as http_post
|
|
||||||
from searx.result_types import EngineResults
|
|
||||||
|
|
||||||
if t.TYPE_CHECKING:
|
|
||||||
from searx.extended_types import SXNG_Response
|
|
||||||
from searx.search.processors import OnlineParams
|
|
||||||
|
|
||||||
engine_type = "online"
|
|
||||||
categories = ["it", "cloud"]
|
|
||||||
|
|
||||||
# Default values, should be overridden in settings.yml
|
|
||||||
azure_tenant_id = ""
|
|
||||||
azure_client_id = ""
|
|
||||||
azure_client_secret = ""
|
|
||||||
azure_token_expiration_seconds = 5000
|
|
||||||
"""Time for which an auth token is valid (sec.)"""
|
|
||||||
azure_batch_endpoint = "https://management.azure.com/batch?api-version=2020-06-01"
|
|
||||||
|
|
||||||
about = {
|
|
||||||
"website": "https://www.portal.azure.com",
|
|
||||||
"wikidata_id": "Q725967",
|
|
||||||
"official_api_documentation": "https://learn.microsoft.com/en-us/\
|
|
||||||
rest/api/azure-resourcegraph/?view=rest-azureresourcegraph-resourcegraph-2024-04-01",
|
|
||||||
"use_official_api": True,
|
|
||||||
"require_api_key": True,
|
|
||||||
"results": "JSON",
|
|
||||||
"language": "en",
|
|
||||||
}
|
|
||||||
|
|
||||||
CACHE: EngineCache
|
|
||||||
"""Persistent (SQLite) key/value cache that deletes its values after ``expire``
|
|
||||||
seconds."""
|
|
||||||
|
|
||||||
|
|
||||||
def setup(engine_settings: dict[str, t.Any]) -> bool:
|
|
||||||
"""Initialization of the engine.
|
|
||||||
|
|
||||||
- Instantiate a cache for this engine (:py:obj:`CACHE`).
|
|
||||||
- Checks whether the tenant_id, client_id and client_secret are set,
|
|
||||||
otherwise the engine is inactive.
|
|
||||||
|
|
||||||
"""
|
|
||||||
global CACHE # pylint: disable=global-statement
|
|
||||||
CACHE = EngineCache(engine_settings["name"])
|
|
||||||
|
|
||||||
missing_opts: list[str] = []
|
|
||||||
for opt in ("azure_tenant_id", "azure_client_id", "azure_client_secret"):
|
|
||||||
if not engine_settings.get(opt, ""):
|
|
||||||
missing_opts.append(opt)
|
|
||||||
if missing_opts:
|
|
||||||
logger.error("missing values for options: %s", ", ".join(missing_opts))
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def authenticate(t_id: str, c_id: str, c_secret: str) -> str:
|
|
||||||
"""Authenticates to Azure using Oauth2 Client Credentials Flow and returns
|
|
||||||
an access token."""
|
|
||||||
|
|
||||||
url = f"https://login.microsoftonline.com/{t_id}/oauth2/v2.0/token"
|
|
||||||
body = {
|
|
||||||
"client_id": c_id,
|
|
||||||
"client_secret": c_secret,
|
|
||||||
"grant_type": "client_credentials",
|
|
||||||
"scope": "https://management.azure.com/.default",
|
|
||||||
}
|
|
||||||
|
|
||||||
resp: SXNG_Response = http_post(url, body, timeout=5)
|
|
||||||
if resp.status_code != 200:
|
|
||||||
raise RuntimeError(f"Azure authentication failed (status {resp.status_code}): {resp.text}")
|
|
||||||
return resp.json()["access_token"]
|
|
||||||
|
|
||||||
|
|
||||||
def get_auth_token(t_id: str, c_id: str, c_secret: str) -> str:
|
|
||||||
key = f"azure_tenant_id: {t_id:}, azure_client_id: {c_id}, azure_client_secret: {c_secret}"
|
|
||||||
token: str | None = CACHE.get(key)
|
|
||||||
if token:
|
|
||||||
return token
|
|
||||||
token = authenticate(t_id, c_id, c_secret)
|
|
||||||
CACHE.set(key=key, value=token, expire=azure_token_expiration_seconds)
|
|
||||||
return token
|
|
||||||
|
|
||||||
|
|
||||||
def request(query: str, params: "OnlineParams") -> None:
|
|
||||||
|
|
||||||
token = get_auth_token(azure_tenant_id, azure_client_id, azure_client_secret)
|
|
||||||
|
|
||||||
params["url"] = azure_batch_endpoint
|
|
||||||
params["method"] = "POST"
|
|
||||||
params["headers"]["Authorization"] = f"Bearer {token}"
|
|
||||||
params["headers"]["Content-Type"] = "application/json"
|
|
||||||
params["json"] = {
|
|
||||||
"requests": [
|
|
||||||
{
|
|
||||||
"url": "/providers/Microsoft.ResourceGraph/resources?api-version=2024-04-01",
|
|
||||||
"httpMethod": "POST",
|
|
||||||
"name": "resourceGroups",
|
|
||||||
"requestHeaderDetails": {"commandName": "Microsoft.ResourceGraph"},
|
|
||||||
"content": {
|
|
||||||
"query": (
|
|
||||||
f"ResourceContainers"
|
|
||||||
f" | where (name contains ('{query}'))"
|
|
||||||
f" | where (type =~ ('Microsoft.Resources/subscriptions/resourcegroups'))"
|
|
||||||
f" | project id,name,type,kind,subscriptionId,resourceGroup"
|
|
||||||
f" | extend matchscore = name startswith '{query}'"
|
|
||||||
f" | extend normalizedName = tolower(tostring(name))"
|
|
||||||
f" | sort by matchscore desc, normalizedName asc"
|
|
||||||
f" | take 30"
|
|
||||||
)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"url": "/providers/Microsoft.ResourceGraph/resources?api-version=2024-04-01",
|
|
||||||
"httpMethod": "POST",
|
|
||||||
"name": "resources",
|
|
||||||
"requestHeaderDetails": {
|
|
||||||
"commandName": "Microsoft.ResourceGraph",
|
|
||||||
},
|
|
||||||
"content": {
|
|
||||||
"query": f"Resources | where name contains '{query}' | take 30",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def response(resp: "SXNG_Response") -> EngineResults:
|
|
||||||
res = EngineResults()
|
|
||||||
json_data = resp.json()
|
|
||||||
|
|
||||||
for result in json_data["responses"]:
|
|
||||||
if result["name"] == "resourceGroups":
|
|
||||||
for data in result["content"]["data"]:
|
|
||||||
res.add(
|
|
||||||
res.types.MainResult(
|
|
||||||
url=(
|
|
||||||
f"https://portal.azure.com/#@/resource"
|
|
||||||
f"/subscriptions/{data['subscriptionId']}/resourceGroups/{data['name']}/overview"
|
|
||||||
),
|
|
||||||
title=data["name"],
|
|
||||||
content=f"Resource Group in Subscription: {data['subscriptionId']}",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
elif result["name"] == "resources":
|
|
||||||
for data in result["content"]["data"]:
|
|
||||||
res.add(
|
|
||||||
res.types.MainResult(
|
|
||||||
url=(
|
|
||||||
f"https://portal.azure.com/#@/resource"
|
|
||||||
f"/subscriptions/{data['subscriptionId']}/resourceGroups/{data['resourceGroup']}"
|
|
||||||
f"/providers/{data['type']}/{data['name']}/overview"
|
|
||||||
),
|
|
||||||
title=data["name"],
|
|
||||||
content=(
|
|
||||||
f"Resource of type {data['type']} in Subscription:"
|
|
||||||
f" {data['subscriptionId']}, Resource Group: {data['resourceGroup']}"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return res
|
|
||||||
@@ -108,10 +108,6 @@ def request(query, params):
|
|||||||
time_ranges = {'day': '1', 'week': '2', 'month': '3', 'year': f'5_{unix_day-365}_{unix_day}'}
|
time_ranges = {'day': '1', 'week': '2', 'month': '3', 'year': f'5_{unix_day-365}_{unix_day}'}
|
||||||
params['url'] += f'&filters=ex1:"ez{time_ranges[params["time_range"]]}"'
|
params['url'] += f'&filters=ex1:"ez{time_ranges[params["time_range"]]}"'
|
||||||
|
|
||||||
# in some regions where geoblocking is employed (e.g. China),
|
|
||||||
# www.bing.com redirects to the regional version of Bing
|
|
||||||
params['allow_redirects'] = True
|
|
||||||
|
|
||||||
return params
|
return params
|
||||||
|
|
||||||
|
|
||||||
@@ -201,6 +197,7 @@ def fetch_traits(engine_traits: EngineTraits):
|
|||||||
"User-Agent": gen_useragent(),
|
"User-Agent": gen_useragent(),
|
||||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
|
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
|
||||||
"Accept-Language": "en-US;q=0.5,en;q=0.3",
|
"Accept-Language": "en-US;q=0.5,en;q=0.3",
|
||||||
|
"Accept-Encoding": "gzip, deflate, br",
|
||||||
"DNT": "1",
|
"DNT": "1",
|
||||||
"Connection": "keep-alive",
|
"Connection": "keep-alive",
|
||||||
"Upgrade-Insecure-Requests": "1",
|
"Upgrade-Insecure-Requests": "1",
|
||||||
|
|||||||
@@ -124,17 +124,17 @@ from urllib.parse import (
|
|||||||
urlparse,
|
urlparse,
|
||||||
)
|
)
|
||||||
|
|
||||||
import json
|
|
||||||
from dateutil import parser
|
from dateutil import parser
|
||||||
from lxml import html
|
from lxml import html
|
||||||
|
|
||||||
from searx import locales
|
from searx import locales
|
||||||
from searx.utils import (
|
from searx.utils import (
|
||||||
|
extr,
|
||||||
extract_text,
|
extract_text,
|
||||||
|
eval_xpath,
|
||||||
eval_xpath_list,
|
eval_xpath_list,
|
||||||
eval_xpath_getindex,
|
eval_xpath_getindex,
|
||||||
js_obj_str_to_python,
|
js_variable_to_python,
|
||||||
js_obj_str_to_json_str,
|
|
||||||
get_embeded_stream_url,
|
get_embeded_stream_url,
|
||||||
)
|
)
|
||||||
from searx.enginelib.traits import EngineTraits
|
from searx.enginelib.traits import EngineTraits
|
||||||
@@ -142,17 +142,17 @@ from searx.result_types import EngineResults
|
|||||||
from searx.extended_types import SXNG_Response
|
from searx.extended_types import SXNG_Response
|
||||||
|
|
||||||
about = {
|
about = {
|
||||||
"website": "https://search.brave.com/",
|
"website": 'https://search.brave.com/',
|
||||||
"wikidata_id": "Q22906900",
|
"wikidata_id": 'Q22906900',
|
||||||
"official_api_documentation": None,
|
"official_api_documentation": None,
|
||||||
"use_official_api": False,
|
"use_official_api": False,
|
||||||
"require_api_key": False,
|
"require_api_key": False,
|
||||||
"results": "HTML",
|
"results": 'HTML',
|
||||||
}
|
}
|
||||||
|
|
||||||
base_url = "https://search.brave.com/"
|
base_url = "https://search.brave.com/"
|
||||||
categories = []
|
categories = []
|
||||||
brave_category: t.Literal["search", "videos", "images", "news", "goggles"] = "search"
|
brave_category: t.Literal["search", "videos", "images", "news", "goggles"] = 'search'
|
||||||
"""Brave supports common web-search, videos, images, news, and goggles search.
|
"""Brave supports common web-search, videos, images, news, and goggles search.
|
||||||
|
|
||||||
- ``search``: Common WEB search
|
- ``search``: Common WEB search
|
||||||
@@ -182,86 +182,74 @@ to do more won't return any result and you will most likely be flagged as a bot.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
safesearch = True
|
safesearch = True
|
||||||
safesearch_map = {2: "strict", 1: "moderate", 0: "off"} # cookie: safesearch=off
|
safesearch_map = {2: 'strict', 1: 'moderate', 0: 'off'} # cookie: safesearch=off
|
||||||
|
|
||||||
time_range_support = False
|
time_range_support = False
|
||||||
"""Brave only supports time-range in :py:obj:`brave_category` ``search`` (UI
|
"""Brave only supports time-range in :py:obj:`brave_category` ``search`` (UI
|
||||||
category All) and in the goggles category."""
|
category All) and in the goggles category."""
|
||||||
|
|
||||||
time_range_map: dict[str, str] = {
|
time_range_map: dict[str, str] = {
|
||||||
"day": "pd",
|
'day': 'pd',
|
||||||
"week": "pw",
|
'week': 'pw',
|
||||||
"month": "pm",
|
'month': 'pm',
|
||||||
"year": "py",
|
'year': 'py',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def request(query: str, params: dict[str, t.Any]) -> None:
|
def request(query: str, params: dict[str, t.Any]) -> None:
|
||||||
|
|
||||||
|
# Don't accept br encoding / see https://github.com/searxng/searxng/pull/1787
|
||||||
|
params['headers']['Accept-Encoding'] = 'gzip, deflate'
|
||||||
|
|
||||||
args: dict[str, t.Any] = {
|
args: dict[str, t.Any] = {
|
||||||
"q": query,
|
'q': query,
|
||||||
"source": "web",
|
'source': 'web',
|
||||||
}
|
}
|
||||||
if brave_spellcheck:
|
if brave_spellcheck:
|
||||||
args["spellcheck"] = "1"
|
args['spellcheck'] = '1'
|
||||||
|
|
||||||
if brave_category in ("search", "goggles"):
|
if brave_category in ('search', 'goggles'):
|
||||||
if params.get("pageno", 1) - 1:
|
if params.get('pageno', 1) - 1:
|
||||||
args["offset"] = params.get("pageno", 1) - 1
|
args['offset'] = params.get('pageno', 1) - 1
|
||||||
if time_range_map.get(params["time_range"]):
|
if time_range_map.get(params['time_range']):
|
||||||
args["tf"] = time_range_map.get(params["time_range"])
|
args['tf'] = time_range_map.get(params['time_range'])
|
||||||
|
|
||||||
if brave_category == "goggles":
|
if brave_category == 'goggles':
|
||||||
args["goggles_id"] = Goggles
|
args['goggles_id'] = Goggles
|
||||||
|
|
||||||
params["url"] = f"{base_url}{brave_category}?{urlencode(args)}"
|
params["url"] = f"{base_url}{brave_category}?{urlencode(args)}"
|
||||||
logger.debug("url %s", params["url"])
|
|
||||||
|
|
||||||
# set properties in the cookies
|
# set properties in the cookies
|
||||||
|
|
||||||
params["cookies"]["safesearch"] = safesearch_map.get(params["safesearch"], "off")
|
params['cookies']['safesearch'] = safesearch_map.get(params['safesearch'], 'off')
|
||||||
# the useLocation is IP based, we use cookie "country" for the region
|
# the useLocation is IP based, we use cookie 'country' for the region
|
||||||
params["cookies"]["useLocation"] = "0"
|
params['cookies']['useLocation'] = '0'
|
||||||
params["cookies"]["summarizer"] = "0"
|
params['cookies']['summarizer'] = '0'
|
||||||
|
|
||||||
engine_region = traits.get_region(params["searxng_locale"], "all")
|
engine_region = traits.get_region(params['searxng_locale'], 'all')
|
||||||
params["cookies"]["country"] = engine_region.split("-")[-1].lower() # type: ignore
|
params['cookies']['country'] = engine_region.split('-')[-1].lower() # type: ignore
|
||||||
|
|
||||||
ui_lang = locales.get_engine_locale(params["searxng_locale"], traits.custom["ui_lang"], "en-us")
|
ui_lang = locales.get_engine_locale(params['searxng_locale'], traits.custom["ui_lang"], 'en-us')
|
||||||
params["cookies"]["ui_lang"] = ui_lang
|
params['cookies']['ui_lang'] = ui_lang
|
||||||
logger.debug("cookies %s", params["cookies"])
|
|
||||||
|
logger.debug("cookies %s", params['cookies'])
|
||||||
|
|
||||||
|
params['headers']['Sec-Fetch-Dest'] = "document"
|
||||||
|
params['headers']['Sec-Fetch-Mode'] = "navigate"
|
||||||
|
params['headers']['Sec-Fetch-Site'] = "same-origin"
|
||||||
|
params['headers']['Sec-Fetch-User'] = "?1"
|
||||||
|
|
||||||
|
|
||||||
def _extract_published_date(published_date_raw: str | None):
|
def _extract_published_date(published_date_raw):
|
||||||
if published_date_raw is None:
|
if published_date_raw is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return parser.parse(published_date_raw)
|
return parser.parse(published_date_raw)
|
||||||
except parser.ParserError:
|
except parser.ParserError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def extract_json_data(text: str) -> dict[str, t.Any]:
|
|
||||||
# Example script source containing the data:
|
|
||||||
#
|
|
||||||
# kit.start(app, element, {
|
|
||||||
# node_ids: [0, 19],
|
|
||||||
# data: [{type:"data",data: .... ["q","goggles_id"],route:1,url:1}}]
|
|
||||||
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
text = text[text.index("<script") : text.index("</script")]
|
|
||||||
if not text:
|
|
||||||
raise ValueError("can't find JS/JSON data in the given text")
|
|
||||||
start = text.index("data: [{")
|
|
||||||
end = text.rindex("}}]")
|
|
||||||
js_obj_str = text[start:end]
|
|
||||||
js_obj_str = "{" + js_obj_str + "}}]}"
|
|
||||||
# js_obj_str = js_obj_str.replace("\xa0", "") # remove ASCII for
|
|
||||||
# js_obj_str = js_obj_str.replace(r"\u003C", "<").replace(r"\u003c", "<") # fix broken HTML tags in strings
|
|
||||||
json_str = js_obj_str_to_json_str(js_obj_str)
|
|
||||||
data: dict[str, t.Any] = json.loads(json_str)
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def response(resp: SXNG_Response) -> EngineResults:
|
def response(resp: SXNG_Response) -> EngineResults:
|
||||||
|
|
||||||
if brave_category in ('search', 'goggles'):
|
if brave_category in ('search', 'goggles'):
|
||||||
@@ -276,8 +264,11 @@ def response(resp: SXNG_Response) -> EngineResults:
|
|||||||
# node_ids: [0, 19],
|
# node_ids: [0, 19],
|
||||||
# data: [{type:"data",data: .... ["q","goggles_id"],route:1,url:1}}]
|
# data: [{type:"data",data: .... ["q","goggles_id"],route:1,url:1}}]
|
||||||
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
json_data: dict[str, t.Any] = extract_json_data(resp.text)
|
js_object = "[{" + extr(resp.text, "data: [{", "}}],") + "}}]"
|
||||||
json_resp: dict[str, t.Any] = json_data['data'][1]["data"]['body']['response']
|
json_data = js_variable_to_python(js_object)
|
||||||
|
|
||||||
|
# json_data is a list and at the second position (0,1) in this list we find the "response" data we need ..
|
||||||
|
json_resp = json_data[1]['data']['body']['response']
|
||||||
|
|
||||||
if brave_category == 'images':
|
if brave_category == 'images':
|
||||||
return _parse_images(json_resp)
|
return _parse_images(json_resp)
|
||||||
@@ -287,121 +278,150 @@ def response(resp: SXNG_Response) -> EngineResults:
|
|||||||
raise ValueError(f"Unsupported brave category: {brave_category}")
|
raise ValueError(f"Unsupported brave category: {brave_category}")
|
||||||
|
|
||||||
|
|
||||||
def _parse_search(resp: SXNG_Response) -> EngineResults:
|
def _parse_search(resp) -> EngineResults:
|
||||||
res = EngineResults()
|
result_list = EngineResults()
|
||||||
|
|
||||||
dom = html.fromstring(resp.text)
|
dom = html.fromstring(resp.text)
|
||||||
|
|
||||||
for result in eval_xpath_list(dom, "//div[contains(@class, 'snippet ')]"):
|
# I doubt that Brave is still providing the "answer" class / I haven't seen
|
||||||
|
# answers in brave for a long time.
|
||||||
|
answer_tag = eval_xpath_getindex(dom, '//div[@class="answer"]', 0, default=None)
|
||||||
|
if answer_tag:
|
||||||
|
url = eval_xpath_getindex(dom, '//div[@id="featured_snippet"]/a[@class="result-header"]/@href', 0, default=None)
|
||||||
|
answer = extract_text(answer_tag)
|
||||||
|
if answer is not None:
|
||||||
|
result_list.add(result_list.types.Answer(answer=answer, url=url))
|
||||||
|
|
||||||
url: str | None = eval_xpath_getindex(result, ".//a/@href", 0, default=None)
|
# xpath_results = '//div[contains(@class, "snippet fdb") and @data-type="web"]'
|
||||||
title_tag = eval_xpath_getindex(result, ".//div[contains(@class, 'title')]", 0, default=None)
|
xpath_results = '//div[contains(@class, "snippet ")]'
|
||||||
|
|
||||||
|
for result in eval_xpath_list(dom, xpath_results):
|
||||||
|
|
||||||
|
url = eval_xpath_getindex(result, './/a[contains(@class, "h")]/@href', 0, default=None)
|
||||||
|
title_tag = eval_xpath_getindex(
|
||||||
|
result, './/a[contains(@class, "h")]//div[contains(@class, "title")]', 0, default=None
|
||||||
|
)
|
||||||
if url is None or title_tag is None or not urlparse(url).netloc: # partial url likely means it's an ad
|
if url is None or title_tag is None or not urlparse(url).netloc: # partial url likely means it's an ad
|
||||||
continue
|
continue
|
||||||
|
|
||||||
content: str = ""
|
content: str = extract_text(
|
||||||
pub_date = None
|
eval_xpath_getindex(result, './/div[contains(@class, "snippet-description")]', 0, default='')
|
||||||
|
) # type: ignore
|
||||||
|
pub_date_raw = eval_xpath(result, 'substring-before(.//div[contains(@class, "snippet-description")], "-")')
|
||||||
|
pub_date = _extract_published_date(pub_date_raw)
|
||||||
|
if pub_date and content.startswith(pub_date_raw):
|
||||||
|
content = content.lstrip(pub_date_raw).strip("- \n\t")
|
||||||
|
|
||||||
_content = eval_xpath_getindex(result, ".//div[contains(@class, 'content')]", 0, default="")
|
thumbnail = eval_xpath_getindex(result, './/img[contains(@class, "thumb")]/@src', 0, default='')
|
||||||
if len(_content):
|
|
||||||
content = extract_text(_content) # type: ignore
|
|
||||||
_pub_date = extract_text(
|
|
||||||
eval_xpath_getindex(_content, ".//span[contains(@class, 't-secondary')]", 0, default="")
|
|
||||||
)
|
|
||||||
if _pub_date:
|
|
||||||
pub_date = _extract_published_date(_pub_date)
|
|
||||||
content = content.lstrip(_pub_date).strip("- \n\t")
|
|
||||||
|
|
||||||
thumbnail: str = eval_xpath_getindex(result, ".//a[contains(@class, 'thumbnail')]//img/@src", 0, default="")
|
item = {
|
||||||
|
'url': url,
|
||||||
item = res.types.LegacyResult(
|
'title': extract_text(title_tag),
|
||||||
template="default.html",
|
'content': content,
|
||||||
url=url,
|
'publishedDate': pub_date,
|
||||||
title=extract_text(title_tag),
|
'thumbnail': thumbnail,
|
||||||
content=content,
|
}
|
||||||
publishedDate=pub_date,
|
|
||||||
thumbnail=thumbnail,
|
|
||||||
)
|
|
||||||
res.add(item)
|
|
||||||
|
|
||||||
video_tag = eval_xpath_getindex(
|
video_tag = eval_xpath_getindex(
|
||||||
result, ".//div[contains(@class, 'video-snippet') and @data-macro='video']", 0, default=[]
|
result, './/div[contains(@class, "video-snippet") and @data-macro="video"]', 0, default=None
|
||||||
)
|
)
|
||||||
if len(video_tag):
|
if video_tag is not None:
|
||||||
|
|
||||||
# In my tests a video tag in the WEB search was most often not a
|
# In my tests a video tag in the WEB search was most often not a
|
||||||
# video, except the ones from youtube ..
|
# video, except the ones from youtube ..
|
||||||
|
|
||||||
iframe_src = get_embeded_stream_url(url)
|
iframe_src = get_embeded_stream_url(url)
|
||||||
if iframe_src:
|
if iframe_src:
|
||||||
item["iframe_src"] = iframe_src
|
item['iframe_src'] = iframe_src
|
||||||
item["template"] = "videos.html"
|
item['template'] = 'videos.html'
|
||||||
|
item['thumbnail'] = eval_xpath_getindex(video_tag, './/img/@src', 0, default='')
|
||||||
|
pub_date_raw = extract_text(
|
||||||
|
eval_xpath(video_tag, './/div[contains(@class, "snippet-attributes")]/div/text()')
|
||||||
|
)
|
||||||
|
item['publishedDate'] = _extract_published_date(pub_date_raw)
|
||||||
|
else:
|
||||||
|
item['thumbnail'] = eval_xpath_getindex(video_tag, './/img/@src', 0, default='')
|
||||||
|
|
||||||
return res
|
result_list.append(item)
|
||||||
|
|
||||||
|
return result_list
|
||||||
|
|
||||||
|
|
||||||
def _parse_news(resp: SXNG_Response) -> EngineResults:
|
def _parse_news(resp) -> EngineResults:
|
||||||
res = EngineResults()
|
|
||||||
|
result_list = EngineResults()
|
||||||
dom = html.fromstring(resp.text)
|
dom = html.fromstring(resp.text)
|
||||||
|
|
||||||
for result in eval_xpath_list(dom, "//div[contains(@class, 'results')]//div[@data-type='news']"):
|
for result in eval_xpath_list(dom, '//div[contains(@class, "results")]//div[@data-type="news"]'):
|
||||||
|
|
||||||
url = eval_xpath_getindex(result, ".//a[contains(@class, 'result-header')]/@href", 0, default=None)
|
# import pdb
|
||||||
|
# pdb.set_trace()
|
||||||
|
|
||||||
|
url = eval_xpath_getindex(result, './/a[contains(@class, "result-header")]/@href', 0, default=None)
|
||||||
if url is None:
|
if url is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
title = eval_xpath_list(result, ".//span[contains(@class, 'snippet-title')]")
|
title = extract_text(eval_xpath_list(result, './/span[contains(@class, "snippet-title")]'))
|
||||||
content = eval_xpath_list(result, ".//p[contains(@class, 'desc')]")
|
content = extract_text(eval_xpath_list(result, './/p[contains(@class, "desc")]'))
|
||||||
thumbnail = eval_xpath_getindex(result, ".//div[contains(@class, 'image-wrapper')]//img/@src", 0, default="")
|
thumbnail = eval_xpath_getindex(result, './/div[contains(@class, "image-wrapper")]//img/@src', 0, default='')
|
||||||
|
|
||||||
item = res.types.LegacyResult(
|
item = {
|
||||||
template="default.html",
|
"url": url,
|
||||||
url=url,
|
"title": title,
|
||||||
title=extract_text(title),
|
"content": content,
|
||||||
thumbnail=thumbnail,
|
"thumbnail": thumbnail,
|
||||||
content=extract_text(content),
|
}
|
||||||
)
|
|
||||||
res.add(item)
|
|
||||||
|
|
||||||
return res
|
result_list.append(item)
|
||||||
|
|
||||||
|
return result_list
|
||||||
|
|
||||||
|
|
||||||
def _parse_images(json_resp: dict[str, t.Any]) -> EngineResults:
|
def _parse_images(json_resp) -> EngineResults:
|
||||||
res = EngineResults()
|
result_list = EngineResults()
|
||||||
|
|
||||||
for result in json_resp["results"]:
|
for result in json_resp["results"]:
|
||||||
item = res.types.LegacyResult(
|
item = {
|
||||||
template="images.html",
|
'url': result['url'],
|
||||||
url=result["url"],
|
'title': result['title'],
|
||||||
title=result["title"],
|
'content': result['description'],
|
||||||
source=result["source"],
|
'template': 'images.html',
|
||||||
img_src=result["properties"]["url"],
|
'resolution': result['properties']['format'],
|
||||||
thumbnail_src=result["thumbnail"]["src"],
|
'source': result['source'],
|
||||||
)
|
'img_src': result['properties']['url'],
|
||||||
res.add(item)
|
'thumbnail_src': result['thumbnail']['src'],
|
||||||
|
}
|
||||||
|
result_list.append(item)
|
||||||
|
|
||||||
return res
|
return result_list
|
||||||
|
|
||||||
|
|
||||||
def _parse_videos(json_resp: dict[str, t.Any]) -> EngineResults:
|
def _parse_videos(json_resp) -> EngineResults:
|
||||||
res = EngineResults()
|
result_list = EngineResults()
|
||||||
|
|
||||||
for result in json_resp["results"]:
|
for result in json_resp["results"]:
|
||||||
item = res.types.LegacyResult(
|
|
||||||
template="videos.html",
|
url = result['url']
|
||||||
url=result["url"],
|
item = {
|
||||||
title=result["title"],
|
'url': url,
|
||||||
content=result["description"],
|
'title': result['title'],
|
||||||
length=result["video"]["duration"],
|
'content': result['description'],
|
||||||
duration=result["video"]["duration"],
|
'template': 'videos.html',
|
||||||
publishedDate=_extract_published_date(result["age"]),
|
'length': result['video']['duration'],
|
||||||
)
|
'duration': result['video']['duration'],
|
||||||
if result["thumbnail"] is not None:
|
'publishedDate': _extract_published_date(result['age']),
|
||||||
item["thumbnail"] = result["thumbnail"]["src"]
|
}
|
||||||
iframe_src = get_embeded_stream_url(result["url"])
|
|
||||||
|
if result['thumbnail'] is not None:
|
||||||
|
item['thumbnail'] = result['thumbnail']['src']
|
||||||
|
|
||||||
|
iframe_src = get_embeded_stream_url(url)
|
||||||
if iframe_src:
|
if iframe_src:
|
||||||
item["iframe_src"] = iframe_src
|
item['iframe_src'] = iframe_src
|
||||||
|
|
||||||
res.add(item)
|
result_list.append(item)
|
||||||
|
|
||||||
return res
|
return result_list
|
||||||
|
|
||||||
|
|
||||||
def fetch_traits(engine_traits: EngineTraits):
|
def fetch_traits(engine_traits: EngineTraits):
|
||||||
@@ -416,31 +436,34 @@ def fetch_traits(engine_traits: EngineTraits):
|
|||||||
|
|
||||||
engine_traits.custom["ui_lang"] = {}
|
engine_traits.custom["ui_lang"] = {}
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'Accept-Encoding': 'gzip, deflate',
|
||||||
|
}
|
||||||
lang_map = {'no': 'nb'} # norway
|
lang_map = {'no': 'nb'} # norway
|
||||||
|
|
||||||
# languages (UI)
|
# languages (UI)
|
||||||
|
|
||||||
resp = get('https://search.brave.com/settings')
|
resp = get('https://search.brave.com/settings', headers=headers)
|
||||||
|
|
||||||
if not resp.ok:
|
if not resp.ok: # type: ignore
|
||||||
print("ERROR: response from Brave is not OK.")
|
print("ERROR: response from Brave is not OK.")
|
||||||
dom = html.fromstring(resp.text)
|
dom = html.fromstring(resp.text) # type: ignore
|
||||||
|
|
||||||
for option in dom.xpath("//section//option[@value='en-us']/../option"):
|
for option in dom.xpath('//section//option[@value="en-us"]/../option'):
|
||||||
|
|
||||||
ui_lang = option.get("value")
|
ui_lang = option.get('value')
|
||||||
try:
|
try:
|
||||||
l = babel.Locale.parse(ui_lang, sep="-")
|
l = babel.Locale.parse(ui_lang, sep='-')
|
||||||
if l.territory:
|
if l.territory:
|
||||||
sxng_tag = region_tag(babel.Locale.parse(ui_lang, sep="-"))
|
sxng_tag = region_tag(babel.Locale.parse(ui_lang, sep='-'))
|
||||||
else:
|
else:
|
||||||
sxng_tag = language_tag(babel.Locale.parse(ui_lang, sep="-"))
|
sxng_tag = language_tag(babel.Locale.parse(ui_lang, sep='-'))
|
||||||
|
|
||||||
except babel.UnknownLocaleError:
|
except babel.UnknownLocaleError:
|
||||||
print("ERROR: can't determine babel locale of Brave's (UI) language %s" % ui_lang)
|
print("ERROR: can't determine babel locale of Brave's (UI) language %s" % ui_lang)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
conflict = engine_traits.custom["ui_lang"].get(sxng_tag) # type: ignore
|
conflict = engine_traits.custom["ui_lang"].get(sxng_tag)
|
||||||
if conflict:
|
if conflict:
|
||||||
if conflict != ui_lang:
|
if conflict != ui_lang:
|
||||||
print("CONFLICT: babel %s --> %s, %s" % (sxng_tag, conflict, ui_lang))
|
print("CONFLICT: babel %s --> %s, %s" % (sxng_tag, conflict, ui_lang))
|
||||||
@@ -449,26 +472,26 @@ def fetch_traits(engine_traits: EngineTraits):
|
|||||||
|
|
||||||
# search regions of brave
|
# search regions of brave
|
||||||
|
|
||||||
resp = get("https://cdn.search.brave.com/serp/v2/_app/immutable/chunks/parameters.734c106a.js")
|
resp = get('https://cdn.search.brave.com/serp/v2/_app/immutable/chunks/parameters.734c106a.js', headers=headers)
|
||||||
|
|
||||||
if not resp.ok:
|
if not resp.ok: # type: ignore
|
||||||
print("ERROR: response from Brave is not OK.")
|
print("ERROR: response from Brave is not OK.")
|
||||||
|
|
||||||
country_js = resp.text[resp.text.index("options:{all") + len("options:") :]
|
country_js = resp.text[resp.text.index("options:{all") + len('options:') :] # type: ignore
|
||||||
country_js = country_js[: country_js.index("},k={default")]
|
country_js = country_js[: country_js.index("},k={default")]
|
||||||
country_tags = js_obj_str_to_python(country_js)
|
country_tags = js_variable_to_python(country_js)
|
||||||
|
|
||||||
for k, v in country_tags.items():
|
for k, v in country_tags.items():
|
||||||
if k == "all":
|
if k == 'all':
|
||||||
engine_traits.all_locale = "all"
|
engine_traits.all_locale = 'all'
|
||||||
continue
|
continue
|
||||||
country_tag = v["value"]
|
country_tag = v['value']
|
||||||
|
|
||||||
# add official languages of the country ..
|
# add official languages of the country ..
|
||||||
for lang_tag in babel.languages.get_official_languages(country_tag, de_facto=True):
|
for lang_tag in babel.languages.get_official_languages(country_tag, de_facto=True):
|
||||||
lang_tag = lang_map.get(lang_tag, lang_tag)
|
lang_tag = lang_map.get(lang_tag, lang_tag)
|
||||||
sxng_tag = region_tag(babel.Locale.parse("%s_%s" % (lang_tag, country_tag.upper())))
|
sxng_tag = region_tag(babel.Locale.parse('%s_%s' % (lang_tag, country_tag.upper())))
|
||||||
# print("%-20s: %s <-- %s" % (v["label"], country_tag, sxng_tag))
|
# print("%-20s: %s <-- %s" % (v['label'], country_tag, sxng_tag))
|
||||||
|
|
||||||
conflict = engine_traits.regions.get(sxng_tag)
|
conflict = engine_traits.regions.get(sxng_tag)
|
||||||
if conflict:
|
if conflict:
|
||||||
|
|||||||
@@ -23,14 +23,14 @@ paging = True
|
|||||||
# search-url
|
# search-url
|
||||||
base_url = 'https://www.deviantart.com'
|
base_url = 'https://www.deviantart.com'
|
||||||
|
|
||||||
results_xpath = '//div[@class="V_S0t_"]/div/div/a'
|
results_xpath = '//div[@class="_2pZkk"]/div/div/a'
|
||||||
url_xpath = './@href'
|
url_xpath = './@href'
|
||||||
thumbnail_src_xpath = './div/img/@src'
|
thumbnail_src_xpath = './div/img/@src'
|
||||||
img_src_xpath = './div/img/@srcset'
|
img_src_xpath = './div/img/@srcset'
|
||||||
title_xpath = './@aria-label'
|
title_xpath = './@aria-label'
|
||||||
premium_xpath = '../div/div/div/text()'
|
premium_xpath = '../div/div/div/text()'
|
||||||
premium_keytext = 'Watch the artist to view this deviation'
|
premium_keytext = 'Watch the artist to view this deviation'
|
||||||
cursor_xpath = '(//a[@class="vQ2brP"]/@href)[last()]'
|
cursor_xpath = '(//a[@class="_1OGeq"]/@href)[last()]'
|
||||||
|
|
||||||
|
|
||||||
def request(query, params):
|
def request(query, params):
|
||||||
|
|||||||
@@ -1,63 +0,0 @@
|
|||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
"""Devicons (icons)"""
|
|
||||||
|
|
||||||
import typing as t
|
|
||||||
|
|
||||||
from searx.result_types import EngineResults
|
|
||||||
|
|
||||||
if t.TYPE_CHECKING:
|
|
||||||
from extended_types import SXNG_Response
|
|
||||||
from search.processors.online import OnlineParams
|
|
||||||
|
|
||||||
|
|
||||||
about = {
|
|
||||||
"website": "https://devicon.dev/",
|
|
||||||
"wikidata_id": None,
|
|
||||||
"official_api_documentation": None,
|
|
||||||
"use_official_api": True,
|
|
||||||
"results": "JSON",
|
|
||||||
}
|
|
||||||
|
|
||||||
cdn_base_url = "https://cdn.jsdelivr.net/gh/devicons/devicon@latest"
|
|
||||||
categories = ["images", "icons"]
|
|
||||||
|
|
||||||
|
|
||||||
def request(query: str, params: "OnlineParams"):
|
|
||||||
params["url"] = f"{cdn_base_url}/devicon.json"
|
|
||||||
params['query'] = query
|
|
||||||
return params
|
|
||||||
|
|
||||||
|
|
||||||
def response(resp: "SXNG_Response") -> EngineResults:
|
|
||||||
res = EngineResults()
|
|
||||||
query_parts = resp.search_params["query"].lower().split(" ")
|
|
||||||
|
|
||||||
def is_result_match(result: dict[str, t.Any]) -> bool:
|
|
||||||
for part in query_parts:
|
|
||||||
if part in result["name"]:
|
|
||||||
return True
|
|
||||||
|
|
||||||
for tag in result["altnames"] + result["tags"]:
|
|
||||||
if part in tag:
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
filtered_results = filter(is_result_match, resp.json())
|
|
||||||
for result in filtered_results:
|
|
||||||
for image_type in result["versions"]["svg"]:
|
|
||||||
img_src = f"{cdn_base_url}/icons/{result['name']}/{result['name']}-{image_type}.svg"
|
|
||||||
res.add(
|
|
||||||
res.types.LegacyResult(
|
|
||||||
{
|
|
||||||
"template": "images.html",
|
|
||||||
"url": img_src,
|
|
||||||
"title": result["name"],
|
|
||||||
"content": f"Base color: {result['color']}",
|
|
||||||
"img_src": img_src,
|
|
||||||
"img_format": "SVG",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return res
|
|
||||||
@@ -407,7 +407,7 @@ def fetch_traits(engine_traits: EngineTraits):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
# pylint: disable=too-many-branches, too-many-statements, disable=import-outside-toplevel
|
# pylint: disable=too-many-branches, too-many-statements, disable=import-outside-toplevel
|
||||||
from searx.utils import js_obj_str_to_python
|
from searx.utils import js_variable_to_python
|
||||||
|
|
||||||
# fetch regions
|
# fetch regions
|
||||||
|
|
||||||
@@ -455,7 +455,7 @@ def fetch_traits(engine_traits: EngineTraits):
|
|||||||
|
|
||||||
js_code = extr(resp.text, 'languages:', ',regions') # type: ignore
|
js_code = extr(resp.text, 'languages:', ',regions') # type: ignore
|
||||||
|
|
||||||
languages: dict[str, str] = js_obj_str_to_python(js_code)
|
languages = js_variable_to_python(js_code)
|
||||||
for eng_lang, name in languages.items():
|
for eng_lang, name in languages.items():
|
||||||
|
|
||||||
if eng_lang == 'wt_WT':
|
if eng_lang == 'wt_WT':
|
||||||
|
|||||||
@@ -1,52 +0,0 @@
|
|||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
"""Grokipedia (general)"""
|
|
||||||
|
|
||||||
from urllib.parse import urlencode
|
|
||||||
from searx.utils import html_to_text
|
|
||||||
from searx.result_types import EngineResults
|
|
||||||
|
|
||||||
about = {
|
|
||||||
"website": 'https://grokipedia.com',
|
|
||||||
"wikidata_id": "Q136410803",
|
|
||||||
"official_api_documentation": None,
|
|
||||||
"use_official_api": False,
|
|
||||||
"require_api_key": False,
|
|
||||||
"results": "JSON",
|
|
||||||
}
|
|
||||||
|
|
||||||
base_url = "https://grokipedia.com/api/full-text-search"
|
|
||||||
categories = ['general']
|
|
||||||
paging = True
|
|
||||||
results_per_page = 10
|
|
||||||
|
|
||||||
|
|
||||||
def request(query, params):
|
|
||||||
|
|
||||||
start_index = (params["pageno"] - 1) * results_per_page
|
|
||||||
|
|
||||||
query_params = {
|
|
||||||
"query": query,
|
|
||||||
"limit": results_per_page,
|
|
||||||
"offset": start_index,
|
|
||||||
}
|
|
||||||
|
|
||||||
params["url"] = f"{base_url}?{urlencode(query_params)}"
|
|
||||||
|
|
||||||
return params
|
|
||||||
|
|
||||||
|
|
||||||
def response(resp) -> EngineResults:
|
|
||||||
results = EngineResults()
|
|
||||||
search_res = resp.json()
|
|
||||||
|
|
||||||
for item in search_res["results"]:
|
|
||||||
|
|
||||||
results.add(
|
|
||||||
results.types.MainResult(
|
|
||||||
url='https://grokipedia.com/page/' + item["slug"],
|
|
||||||
title=item["title"],
|
|
||||||
content=html_to_text(item["snippet"]),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return results
|
|
||||||
@@ -6,7 +6,6 @@ from urllib.parse import urlencode
|
|||||||
from dateutil.relativedelta import relativedelta
|
from dateutil.relativedelta import relativedelta
|
||||||
|
|
||||||
from flask_babel import gettext
|
from flask_babel import gettext
|
||||||
from searx.utils import html_to_text
|
|
||||||
|
|
||||||
# Engine metadata
|
# Engine metadata
|
||||||
about = {
|
about = {
|
||||||
@@ -76,7 +75,6 @@ def response(resp):
|
|||||||
object_id = hit["objectID"]
|
object_id = hit["objectID"]
|
||||||
points = hit.get("points") or 0
|
points = hit.get("points") or 0
|
||||||
num_comments = hit.get("num_comments") or 0
|
num_comments = hit.get("num_comments") or 0
|
||||||
content = hit.get("url") or html_to_text(hit.get("comment_text")) or html_to_text(hit.get("story_text"))
|
|
||||||
|
|
||||||
metadata = ""
|
metadata = ""
|
||||||
if points != 0 or num_comments != 0:
|
if points != 0 or num_comments != 0:
|
||||||
@@ -85,7 +83,7 @@ def response(resp):
|
|||||||
{
|
{
|
||||||
"title": hit.get("title") or f"{gettext('author')}: {hit['author']}",
|
"title": hit.get("title") or f"{gettext('author')}: {hit['author']}",
|
||||||
"url": f"https://news.ycombinator.com/item?id={object_id}",
|
"url": f"https://news.ycombinator.com/item?id={object_id}",
|
||||||
"content": content,
|
"content": hit.get("url") or hit.get("comment_text") or hit.get("story_text") or "",
|
||||||
"metadata": metadata,
|
"metadata": metadata,
|
||||||
"author": hit["author"],
|
"author": hit["author"],
|
||||||
"publishedDate": datetime.fromtimestamp(hit["created_at_i"]),
|
"publishedDate": datetime.fromtimestamp(hit["created_at_i"]),
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ paging = True
|
|||||||
time_range_support = True
|
time_range_support = True
|
||||||
|
|
||||||
# base_url can be overwritten by a list of URLs in the settings.yml
|
# base_url can be overwritten by a list of URLs in the settings.yml
|
||||||
base_url: list[str] | str = []
|
base_url: list | str = []
|
||||||
|
|
||||||
|
|
||||||
def init(_):
|
def init(_):
|
||||||
|
|||||||
264
searx/engines/mullvad_leta.py
Normal file
264
searx/engines/mullvad_leta.py
Normal file
@@ -0,0 +1,264 @@
|
|||||||
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
"""Mullvad Leta is a search engine proxy. Currently Leta only offers text
|
||||||
|
search results not image, news or any other types of search result. Leta acts
|
||||||
|
as a proxy to Google and Brave search results. You can select which backend
|
||||||
|
search engine you wish to use, see (:py:obj:`leta_engine`).
|
||||||
|
|
||||||
|
.. hint::
|
||||||
|
|
||||||
|
Leta caches each search for up to 30 days. For example, if you use search
|
||||||
|
terms like ``news``, contrary to your intention you'll get very old results!
|
||||||
|
|
||||||
|
|
||||||
|
Configuration
|
||||||
|
=============
|
||||||
|
|
||||||
|
The engine has the following additional settings:
|
||||||
|
|
||||||
|
- :py:obj:`leta_engine` (:py:obj:`LetaEnginesType`)
|
||||||
|
|
||||||
|
You can configure one Leta engine for Google and one for Brave:
|
||||||
|
|
||||||
|
.. code:: yaml
|
||||||
|
|
||||||
|
- name: mullvadleta
|
||||||
|
engine: mullvad_leta
|
||||||
|
leta_engine: google
|
||||||
|
shortcut: ml
|
||||||
|
|
||||||
|
- name: mullvadleta brave
|
||||||
|
engine: mullvad_leta
|
||||||
|
network: mullvadleta # use network from engine "mullvadleta" configured above
|
||||||
|
leta_engine: brave
|
||||||
|
shortcut: mlb
|
||||||
|
|
||||||
|
Implementations
|
||||||
|
===============
|
||||||
|
|
||||||
|
"""
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
import babel
|
||||||
|
from httpx import Response
|
||||||
|
from lxml import html
|
||||||
|
from searx.enginelib.traits import EngineTraits
|
||||||
|
from searx.locales import get_official_locales, language_tag, region_tag
|
||||||
|
from searx.utils import eval_xpath_list
|
||||||
|
from searx.result_types import EngineResults, MainResult
|
||||||
|
|
||||||
|
search_url = "https://leta.mullvad.net"
|
||||||
|
|
||||||
|
# about
|
||||||
|
about = {
|
||||||
|
"website": search_url,
|
||||||
|
"wikidata_id": 'Q47008412', # the Mullvad id - not leta, but related
|
||||||
|
"official_api_documentation": 'https://leta.mullvad.net/faq',
|
||||||
|
"use_official_api": False,
|
||||||
|
"require_api_key": False,
|
||||||
|
"results": 'HTML',
|
||||||
|
}
|
||||||
|
|
||||||
|
# engine dependent config
|
||||||
|
categories = ["general", "web"]
|
||||||
|
paging = True
|
||||||
|
max_page = 10
|
||||||
|
time_range_support = True
|
||||||
|
time_range_dict = {
|
||||||
|
"day": "d",
|
||||||
|
"week": "w",
|
||||||
|
"month": "m",
|
||||||
|
"year": "y",
|
||||||
|
}
|
||||||
|
|
||||||
|
LetaEnginesType = t.Literal["google", "brave"]
|
||||||
|
"""Engine types supported by mullvadleta."""
|
||||||
|
|
||||||
|
leta_engine: LetaEnginesType = "google"
|
||||||
|
"""Select Leta's engine type from :py:obj:`LetaEnginesType`."""
|
||||||
|
|
||||||
|
|
||||||
|
def init(_):
|
||||||
|
l = t.get_args(LetaEnginesType)
|
||||||
|
if leta_engine not in l:
|
||||||
|
raise ValueError(f"leta_engine '{leta_engine}' is invalid, use one of {', '.join(l)}")
|
||||||
|
|
||||||
|
|
||||||
|
class DataNodeQueryMetaDataIndices(t.TypedDict):
|
||||||
|
"""Indices into query metadata."""
|
||||||
|
|
||||||
|
success: int
|
||||||
|
q: int # pylint: disable=invalid-name
|
||||||
|
country: int
|
||||||
|
language: int
|
||||||
|
lastUpdated: int
|
||||||
|
engine: int
|
||||||
|
items: int
|
||||||
|
infobox: int
|
||||||
|
news: int
|
||||||
|
timestamp: int
|
||||||
|
altered: int
|
||||||
|
page: int
|
||||||
|
next: int # if -1, there no more results are available
|
||||||
|
previous: int
|
||||||
|
|
||||||
|
|
||||||
|
class DataNodeResultIndices(t.TypedDict):
|
||||||
|
"""Indices into query resultsdata."""
|
||||||
|
|
||||||
|
link: int
|
||||||
|
snippet: int
|
||||||
|
title: int
|
||||||
|
favicon: int
|
||||||
|
|
||||||
|
|
||||||
|
def request(query: str, params: dict):
|
||||||
|
params["method"] = "GET"
|
||||||
|
args = {
|
||||||
|
"q": query,
|
||||||
|
"engine": leta_engine,
|
||||||
|
"x-sveltekit-invalidated": "001", # hardcoded from all requests seen
|
||||||
|
}
|
||||||
|
|
||||||
|
country = traits.get_region(params.get("searxng_locale"), traits.all_locale) # type: ignore
|
||||||
|
if country:
|
||||||
|
args["country"] = country
|
||||||
|
|
||||||
|
language = traits.get_language(params.get("searxng_locale"), traits.all_locale) # type: ignore
|
||||||
|
if language:
|
||||||
|
args["language"] = language
|
||||||
|
|
||||||
|
if params["time_range"] in time_range_dict:
|
||||||
|
args["lastUpdated"] = time_range_dict[params["time_range"]]
|
||||||
|
|
||||||
|
if params["pageno"] > 1:
|
||||||
|
args["page"] = params["pageno"]
|
||||||
|
|
||||||
|
params["url"] = f"{search_url}/search/__data.json?{urlencode(args)}"
|
||||||
|
|
||||||
|
return params
|
||||||
|
|
||||||
|
|
||||||
|
def response(resp: Response) -> EngineResults:
|
||||||
|
json_response = resp.json()
|
||||||
|
|
||||||
|
nodes = json_response["nodes"]
|
||||||
|
# 0: is None
|
||||||
|
# 1: has "connected=True", not useful
|
||||||
|
# 2: query results within "data"
|
||||||
|
|
||||||
|
data_nodes = nodes[2]["data"]
|
||||||
|
# Instead of nested object structure, all objects are flattened into a
|
||||||
|
# list. Rather, the first object in data_node provides indices into the
|
||||||
|
# "data_nodes" to access each searchresult (which is an object of more
|
||||||
|
# indices)
|
||||||
|
#
|
||||||
|
# Read the relative TypedDict definitions for details
|
||||||
|
|
||||||
|
query_meta_data: DataNodeQueryMetaDataIndices = data_nodes[0]
|
||||||
|
|
||||||
|
query_items_indices = query_meta_data["items"]
|
||||||
|
|
||||||
|
results = EngineResults()
|
||||||
|
for idx in data_nodes[query_items_indices]:
|
||||||
|
query_item_indices: DataNodeResultIndices = data_nodes[idx]
|
||||||
|
results.add(
|
||||||
|
MainResult(
|
||||||
|
url=data_nodes[query_item_indices["link"]],
|
||||||
|
title=data_nodes[query_item_indices["title"]],
|
||||||
|
content=data_nodes[query_item_indices["snippet"]],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_traits(engine_traits: EngineTraits) -> None:
|
||||||
|
"""Fetch languages and regions from Mullvad-Leta"""
|
||||||
|
|
||||||
|
def extract_table_data(table):
|
||||||
|
for row in table.xpath(".//tr")[2:]:
|
||||||
|
cells = row.xpath(".//td | .//th") # includes headers and data
|
||||||
|
if len(cells) > 1: # ensure the column exists
|
||||||
|
cell0 = cells[0].text_content().strip()
|
||||||
|
cell1 = cells[1].text_content().strip()
|
||||||
|
yield [cell0, cell1]
|
||||||
|
|
||||||
|
# pylint: disable=import-outside-toplevel
|
||||||
|
# see https://github.com/searxng/searxng/issues/762
|
||||||
|
from searx.network import get as http_get
|
||||||
|
|
||||||
|
# pylint: enable=import-outside-toplevel
|
||||||
|
|
||||||
|
resp = http_get(f"{search_url}/documentation")
|
||||||
|
if not isinstance(resp, Response):
|
||||||
|
print("ERROR: failed to get response from mullvad-leta. Are you connected to the VPN?")
|
||||||
|
return
|
||||||
|
if not resp.ok:
|
||||||
|
print("ERROR: response from mullvad-leta is not OK. Are you connected to the VPN?")
|
||||||
|
return
|
||||||
|
|
||||||
|
dom = html.fromstring(resp.text)
|
||||||
|
|
||||||
|
# There are 4 HTML tables on the documentation page for extracting information:
|
||||||
|
# 0. Keyboard Shortcuts
|
||||||
|
# 1. Query Parameters (shoutout to Mullvad for accessible docs for integration)
|
||||||
|
# 2. Country Codes [Country, Code]
|
||||||
|
# 3. Language Codes [Language, Code]
|
||||||
|
tables = eval_xpath_list(dom.body, "//table")
|
||||||
|
if tables is None or len(tables) <= 0:
|
||||||
|
print("ERROR: could not find any tables. Was the page updated?")
|
||||||
|
|
||||||
|
language_table = tables[3]
|
||||||
|
lang_map = {
|
||||||
|
"zh-hant": "zh_Hans",
|
||||||
|
"zh-hans": "zh_Hant",
|
||||||
|
"jp": "ja",
|
||||||
|
}
|
||||||
|
|
||||||
|
for language, code in extract_table_data(language_table):
|
||||||
|
|
||||||
|
locale_tag = lang_map.get(code, code).replace("-", "_") # type: ignore
|
||||||
|
try:
|
||||||
|
locale = babel.Locale.parse(locale_tag)
|
||||||
|
except babel.UnknownLocaleError:
|
||||||
|
print(f"ERROR: Mullvad-Leta language {language} ({code}) is unknown by babel")
|
||||||
|
continue
|
||||||
|
|
||||||
|
sxng_tag = language_tag(locale)
|
||||||
|
engine_traits.languages[sxng_tag] = code
|
||||||
|
|
||||||
|
country_table = tables[2]
|
||||||
|
country_map = {
|
||||||
|
"cn": "zh-CN",
|
||||||
|
"hk": "zh-HK",
|
||||||
|
"jp": "ja-JP",
|
||||||
|
"my": "ms-MY",
|
||||||
|
"tw": "zh-TW",
|
||||||
|
"uk": "en-GB",
|
||||||
|
"us": "en-US",
|
||||||
|
}
|
||||||
|
|
||||||
|
for country, code in extract_table_data(country_table):
|
||||||
|
|
||||||
|
sxng_tag = country_map.get(code)
|
||||||
|
if sxng_tag:
|
||||||
|
engine_traits.regions[sxng_tag] = code
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
locale = babel.Locale.parse(f"{code.lower()}_{code.upper()}")
|
||||||
|
except babel.UnknownLocaleError:
|
||||||
|
locale = None
|
||||||
|
|
||||||
|
if locale:
|
||||||
|
engine_traits.regions[region_tag(locale)] = code
|
||||||
|
continue
|
||||||
|
|
||||||
|
official_locales = get_official_locales(code, engine_traits.languages.keys(), regional=True)
|
||||||
|
if not official_locales:
|
||||||
|
print(f"ERROR: Mullvad-Leta country '{code}' ({country}) could not be mapped as expected.")
|
||||||
|
continue
|
||||||
|
|
||||||
|
for locale in official_locales:
|
||||||
|
engine_traits.regions[region_tag(locale)] = code
|
||||||
@@ -15,7 +15,7 @@ from searx.utils import (
|
|||||||
extr,
|
extr,
|
||||||
html_to_text,
|
html_to_text,
|
||||||
parse_duration_string,
|
parse_duration_string,
|
||||||
js_obj_str_to_python,
|
js_variable_to_python,
|
||||||
get_embeded_stream_url,
|
get_embeded_stream_url,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -125,7 +125,7 @@ def parse_images(data):
|
|||||||
|
|
||||||
match = extr(data, '<script>var imageSearchTabData=', '</script>')
|
match = extr(data, '<script>var imageSearchTabData=', '</script>')
|
||||||
if match:
|
if match:
|
||||||
json = js_obj_str_to_python(match.strip())
|
json = js_variable_to_python(match.strip())
|
||||||
items = json.get('content', {}).get('items', [])
|
items = json.get('content', {}).get('items', [])
|
||||||
|
|
||||||
for item in items:
|
for item in items:
|
||||||
|
|||||||
@@ -55,18 +55,15 @@ def response(resp):
|
|||||||
if result['type'] == 'story':
|
if result['type'] == 'story':
|
||||||
continue
|
continue
|
||||||
|
|
||||||
main_image = result['images']['orig']
|
|
||||||
results.append(
|
results.append(
|
||||||
{
|
{
|
||||||
'template': 'images.html',
|
'template': 'images.html',
|
||||||
'url': result.get('link') or f"{base_url}/pin/{result['id']}/",
|
'url': result['link'] or f"{base_url}/pin/{result['id']}/",
|
||||||
'title': result.get('title') or result.get('grid_title'),
|
'title': result.get('title') or result.get('grid_title'),
|
||||||
'content': (result.get('rich_summary') or {}).get('display_description') or "",
|
'content': (result.get('rich_summary') or {}).get('display_description') or "",
|
||||||
'img_src': main_image['url'],
|
'img_src': result['images']['orig']['url'],
|
||||||
'thumbnail_src': result['images']['236x']['url'],
|
'thumbnail_src': result['images']['236x']['url'],
|
||||||
'source': (result.get('rich_summary') or {}).get('site_name'),
|
'source': (result.get('rich_summary') or {}).get('site_name'),
|
||||||
'resolution': f"{main_image['width']}x{main_image['height']}",
|
|
||||||
'author': f"{result['pinner'].get('full_name')} ({result['pinner']['username']})",
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ categories = []
|
|||||||
paging = True
|
paging = True
|
||||||
|
|
||||||
# search-url
|
# search-url
|
||||||
backend_url: list[str] | str = []
|
backend_url: list[str] | str | None = None
|
||||||
"""Piped-Backend_: The core component behind Piped. The value is an URL or a
|
"""Piped-Backend_: The core component behind Piped. The value is an URL or a
|
||||||
list of URLs. In the latter case instance will be selected randomly. For a
|
list of URLs. In the latter case instance will be selected randomly. For a
|
||||||
complete list of official instances see Piped-Instances (`JSON
|
complete list of official instances see Piped-Instances (`JSON
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ categories = ['images']
|
|||||||
|
|
||||||
# Search URL
|
# Search URL
|
||||||
base_url = "https://www.pixiv.net/ajax/search/illustrations"
|
base_url = "https://www.pixiv.net/ajax/search/illustrations"
|
||||||
pixiv_image_proxies: list[str] = []
|
pixiv_image_proxies: list = []
|
||||||
|
|
||||||
|
|
||||||
def request(query, params):
|
def request(query, params):
|
||||||
|
|||||||
@@ -89,9 +89,6 @@ time_range_support = True
|
|||||||
send_accept_language_header = True
|
send_accept_language_header = True
|
||||||
categories = ["general", "web"] # general, images, videos, news
|
categories = ["general", "web"] # general, images, videos, news
|
||||||
|
|
||||||
# HTTP2 requests immediately get blocked by a CAPTCHA
|
|
||||||
enable_http2 = False
|
|
||||||
|
|
||||||
search_type = "search"
|
search_type = "search"
|
||||||
"""must be any of ``search``, ``images``, ``videos``, ``news``"""
|
"""must be any of ``search``, ``images``, ``videos``, ``news``"""
|
||||||
|
|
||||||
@@ -140,7 +137,7 @@ def _get_request_id(query, params):
|
|||||||
if l.territory:
|
if l.territory:
|
||||||
headers['Accept-Language'] = f"{l.language}-{l.territory},{l.language};" "q=0.9,*;" "q=0.5"
|
headers['Accept-Language'] = f"{l.language}-{l.territory},{l.language};" "q=0.9,*;" "q=0.5"
|
||||||
|
|
||||||
resp = get(url, headers=headers, timeout=5)
|
resp = get(url, headers=headers)
|
||||||
|
|
||||||
for line in resp.text.split("\n"):
|
for line in resp.text.split("\n"):
|
||||||
if "window.searchId = " in line:
|
if "window.searchId = " in line:
|
||||||
|
|||||||
@@ -64,7 +64,7 @@ def _get_algolia_api_url():
|
|||||||
return __CACHED_API_URL
|
return __CACHED_API_URL
|
||||||
|
|
||||||
# fake request to extract api url
|
# fake request to extract api url
|
||||||
resp = get(f"{pdia_base_url}/search/?q=", timeout=3)
|
resp = get(f"{pdia_base_url}/search/?q=")
|
||||||
if resp.status_code != 200:
|
if resp.status_code != 200:
|
||||||
raise LookupError("Failed to fetch config location (and as such the API url) for PDImageArchive")
|
raise LookupError("Failed to fetch config location (and as such the API url) for PDImageArchive")
|
||||||
pdia_config_filepart = extr(resp.text, pdia_config_start, pdia_config_end)
|
pdia_config_filepart = extr(resp.text, pdia_config_start, pdia_config_end)
|
||||||
|
|||||||
@@ -73,7 +73,7 @@ def request(query: str, params: "OnlineParams") -> None:
|
|||||||
)
|
)
|
||||||
esearch_url = f"{eutils_api}/esearch.fcgi?{args}"
|
esearch_url = f"{eutils_api}/esearch.fcgi?{args}"
|
||||||
# DTD: https://eutils.ncbi.nlm.nih.gov/eutils/dtd/20060628/esearch.dtd
|
# DTD: https://eutils.ncbi.nlm.nih.gov/eutils/dtd/20060628/esearch.dtd
|
||||||
esearch_resp: "SXNG_Response" = get(esearch_url, timeout=3)
|
esearch_resp: "SXNG_Response" = get(esearch_url)
|
||||||
pmids_results = etree.XML(esearch_resp.content)
|
pmids_results = etree.XML(esearch_resp.content)
|
||||||
pmids: list[str] = [i.text for i in pmids_results.xpath("//eSearchResult/IdList/Id")]
|
pmids: list[str] = [i.text for i in pmids_results.xpath("//eSearchResult/IdList/Id")]
|
||||||
|
|
||||||
|
|||||||
@@ -53,7 +53,6 @@ from searx.exceptions import (
|
|||||||
SearxEngineAPIException,
|
SearxEngineAPIException,
|
||||||
SearxEngineTooManyRequestsException,
|
SearxEngineTooManyRequestsException,
|
||||||
SearxEngineCaptchaException,
|
SearxEngineCaptchaException,
|
||||||
SearxEngineAccessDeniedException,
|
|
||||||
)
|
)
|
||||||
from searx.network import raise_for_httperror
|
from searx.network import raise_for_httperror
|
||||||
from searx.enginelib.traits import EngineTraits
|
from searx.enginelib.traits import EngineTraits
|
||||||
@@ -82,9 +81,6 @@ max_page = 5
|
|||||||
"""5 pages maximum (``&p=5``): Trying to do more just results in an improper
|
"""5 pages maximum (``&p=5``): Trying to do more just results in an improper
|
||||||
redirect"""
|
redirect"""
|
||||||
|
|
||||||
# Otherwise Qwant will return 403 if not set
|
|
||||||
send_accept_language_header = True
|
|
||||||
|
|
||||||
qwant_categ = None
|
qwant_categ = None
|
||||||
"""One of ``web-lite`` (or ``web``), ``news``, ``images`` or ``videos``"""
|
"""One of ``web-lite`` (or ``web``), ``news``, ``images`` or ``videos``"""
|
||||||
|
|
||||||
@@ -134,17 +130,17 @@ def request(query, params):
|
|||||||
|
|
||||||
elif qwant_categ == 'images':
|
elif qwant_categ == 'images':
|
||||||
|
|
||||||
args['count'] = 50
|
|
||||||
args['locale'] = q_locale
|
args['locale'] = q_locale
|
||||||
args['safesearch'] = params['safesearch']
|
args['safesearch'] = params['safesearch']
|
||||||
|
args['count'] = 50
|
||||||
args['tgp'] = 3
|
args['tgp'] = 3
|
||||||
args['offset'] = (params['pageno'] - 1) * args['count']
|
args['offset'] = (params['pageno'] - 1) * args['count']
|
||||||
|
|
||||||
else: # web, news, videos
|
else: # web, news, videos
|
||||||
|
|
||||||
args['count'] = 10
|
|
||||||
args['locale'] = q_locale
|
args['locale'] = q_locale
|
||||||
args['safesearch'] = params['safesearch']
|
args['safesearch'] = params['safesearch']
|
||||||
|
args['count'] = 10
|
||||||
args['llm'] = 'false'
|
args['llm'] = 'false'
|
||||||
args['tgp'] = 3
|
args['tgp'] = 3
|
||||||
args['offset'] = (params['pageno'] - 1) * args['count']
|
args['offset'] = (params['pageno'] - 1) * args['count']
|
||||||
@@ -188,12 +184,8 @@ def parse_web_api(resp):
|
|||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
# Try to load JSON result
|
# load JSON result
|
||||||
try:
|
search_results = loads(resp.text)
|
||||||
search_results = loads(resp.text)
|
|
||||||
except ValueError:
|
|
||||||
search_results = {}
|
|
||||||
|
|
||||||
data = search_results.get('data', {})
|
data = search_results.get('data', {})
|
||||||
|
|
||||||
# check for an API error
|
# check for an API error
|
||||||
@@ -203,8 +195,6 @@ def parse_web_api(resp):
|
|||||||
raise SearxEngineTooManyRequestsException()
|
raise SearxEngineTooManyRequestsException()
|
||||||
if search_results.get("data", {}).get("error_data", {}).get("captchaUrl") is not None:
|
if search_results.get("data", {}).get("error_data", {}).get("captchaUrl") is not None:
|
||||||
raise SearxEngineCaptchaException()
|
raise SearxEngineCaptchaException()
|
||||||
if resp.status_code == 403:
|
|
||||||
raise SearxEngineAccessDeniedException()
|
|
||||||
msg = ",".join(data.get('message', ['unknown']))
|
msg = ",".join(data.get('message', ['unknown']))
|
||||||
raise SearxEngineAPIException(f"{msg} ({error_code})")
|
raise SearxEngineAPIException(f"{msg} ({error_code})")
|
||||||
|
|
||||||
|
|||||||
@@ -13,12 +13,23 @@ Configuration
|
|||||||
|
|
||||||
You must configure the following settings:
|
You must configure the following settings:
|
||||||
|
|
||||||
- :py:obj:`base_url`
|
``base_url``:
|
||||||
- :py:obj:`mount_prefix`
|
Location where recoll-webui can be reached.
|
||||||
- :py:obj:`dl_prefix`
|
|
||||||
- :py:obj:`search_dir`
|
|
||||||
|
|
||||||
Example scenario:
|
``mount_prefix``:
|
||||||
|
Location where the file hierarchy is mounted on your *local* filesystem.
|
||||||
|
|
||||||
|
``dl_prefix``:
|
||||||
|
Location where the file hierarchy as indexed by recoll can be reached.
|
||||||
|
|
||||||
|
``search_dir``:
|
||||||
|
Part of the indexed file hierarchy to be search, if empty the full domain is
|
||||||
|
searched.
|
||||||
|
|
||||||
|
Example
|
||||||
|
=======
|
||||||
|
|
||||||
|
Scenario:
|
||||||
|
|
||||||
#. Recoll indexes a local filesystem mounted in ``/export/documents/reference``,
|
#. Recoll indexes a local filesystem mounted in ``/export/documents/reference``,
|
||||||
#. the Recoll search interface can be reached at https://recoll.example.org/ and
|
#. the Recoll search interface can be reached at https://recoll.example.org/ and
|
||||||
@@ -26,131 +37,107 @@ Example scenario:
|
|||||||
|
|
||||||
.. code:: yaml
|
.. code:: yaml
|
||||||
|
|
||||||
base_url: https://recoll.example.org
|
base_url: https://recoll.example.org/
|
||||||
mount_prefix: /export/documents
|
mount_prefix: /export/documents
|
||||||
dl_prefix: https://download.example.org
|
dl_prefix: https://download.example.org
|
||||||
search_dir: ""
|
search_dir: ''
|
||||||
|
|
||||||
Implementations
|
Implementations
|
||||||
===============
|
===============
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import typing as t
|
|
||||||
|
|
||||||
from datetime import date, timedelta
|
from datetime import date, timedelta
|
||||||
from urllib.parse import urlencode
|
from json import loads
|
||||||
|
from urllib.parse import urlencode, quote
|
||||||
from searx.result_types import EngineResults
|
|
||||||
from searx.utils import html_to_text
|
|
||||||
|
|
||||||
if t.TYPE_CHECKING:
|
|
||||||
from searx.extended_types import SXNG_Response
|
|
||||||
from searx.search.processors import OnlineParams
|
|
||||||
|
|
||||||
|
|
||||||
|
# about
|
||||||
about = {
|
about = {
|
||||||
"website": None,
|
"website": None,
|
||||||
"wikidata_id": "Q15735774",
|
"wikidata_id": 'Q15735774',
|
||||||
"official_api_documentation": "https://www.lesbonscomptes.com/recoll/",
|
"official_api_documentation": 'https://www.lesbonscomptes.com/recoll/',
|
||||||
"use_official_api": True,
|
"use_official_api": True,
|
||||||
"require_api_key": False,
|
"require_api_key": False,
|
||||||
"results": "JSON",
|
"results": 'JSON',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# engine dependent config
|
||||||
paging = True
|
paging = True
|
||||||
time_range_support = True
|
time_range_support = True
|
||||||
|
|
||||||
base_url: str = ""
|
# parameters from settings.yml
|
||||||
"""Location where recoll-webui can be reached."""
|
base_url = None
|
||||||
|
search_dir = ''
|
||||||
|
mount_prefix = None
|
||||||
|
dl_prefix = None
|
||||||
|
|
||||||
mount_prefix: str = ""
|
# embedded
|
||||||
"""Location where the file hierarchy is mounted on your *local* filesystem."""
|
embedded_url = '<{ttype} controls height="166px" ' + 'src="{url}" type="{mtype}"></{ttype}>'
|
||||||
|
|
||||||
dl_prefix: str = ""
|
|
||||||
"""Location where the file hierarchy as indexed by recoll can be reached."""
|
|
||||||
|
|
||||||
search_dir: str = ""
|
|
||||||
"""Part of the indexed file hierarchy to be search, if empty the full domain is
|
|
||||||
searched."""
|
|
||||||
|
|
||||||
_s2i: dict[str | None, int] = {"day": 1, "week": 7, "month": 30, "year": 365}
|
|
||||||
|
|
||||||
|
|
||||||
def setup(engine_settings: dict[str, t.Any]) -> bool:
|
# helper functions
|
||||||
"""Initialization of the Recoll engine, checks if the mandatory values are
|
def get_time_range(time_range):
|
||||||
configured.
|
sw = {'day': 1, 'week': 7, 'month': 30, 'year': 365} # pylint: disable=invalid-name
|
||||||
"""
|
|
||||||
missing: list[str] = []
|
|
||||||
for cfg_name in ["base_url", "mount_prefix", "dl_prefix"]:
|
|
||||||
if not engine_settings.get(cfg_name):
|
|
||||||
missing.append(cfg_name)
|
|
||||||
if missing:
|
|
||||||
logger.error("missing recoll configuration: %s", missing)
|
|
||||||
return False
|
|
||||||
|
|
||||||
if engine_settings["base_url"].endswith("/"):
|
offset = sw.get(time_range, 0)
|
||||||
engine_settings["base_url"] = engine_settings["base_url"][:-1]
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def search_after(time_range: str | None) -> str:
|
|
||||||
offset = _s2i.get(time_range, 0)
|
|
||||||
if not offset:
|
if not offset:
|
||||||
return ""
|
return ''
|
||||||
|
|
||||||
return (date.today() - timedelta(days=offset)).isoformat()
|
return (date.today() - timedelta(days=offset)).isoformat()
|
||||||
|
|
||||||
|
|
||||||
def request(query: str, params: "OnlineParams") -> None:
|
# do search-request
|
||||||
args = {
|
def request(query, params):
|
||||||
"query": query,
|
search_after = get_time_range(params['time_range'])
|
||||||
"page": params["pageno"],
|
search_url = base_url + 'json?{query}&highlight=0'
|
||||||
"after": search_after(params["time_range"]),
|
params['url'] = search_url.format(
|
||||||
"dir": search_dir,
|
query=urlencode({'query': query, 'page': params['pageno'], 'after': search_after, 'dir': search_dir})
|
||||||
"highlight": 0,
|
)
|
||||||
}
|
|
||||||
params["url"] = f"{base_url}/json?{urlencode(args)}"
|
return params
|
||||||
|
|
||||||
|
|
||||||
def response(resp: "SXNG_Response") -> EngineResults:
|
# get response from search-request
|
||||||
|
def response(resp):
|
||||||
|
results = []
|
||||||
|
|
||||||
res = EngineResults()
|
response_json = loads(resp.text)
|
||||||
json_data = resp.json()
|
|
||||||
|
|
||||||
if not json_data:
|
if not response_json:
|
||||||
return res
|
return []
|
||||||
|
|
||||||
for result in json_data.get("results", []):
|
for result in response_json.get('results', []):
|
||||||
|
title = result['label']
|
||||||
|
url = result['url'].replace('file://' + mount_prefix, dl_prefix)
|
||||||
|
content = '{}'.format(result['snippet'])
|
||||||
|
|
||||||
url = result.get("url", "").replace("file://" + mount_prefix, dl_prefix)
|
# append result
|
||||||
|
item = {'url': url, 'title': title, 'content': content, 'template': 'files.html'}
|
||||||
|
|
||||||
mtype = subtype = result.get("mtype", "")
|
if result['size']:
|
||||||
if mtype:
|
item['size'] = int(result['size'])
|
||||||
mtype, subtype = (mtype.split("/", 1) + [""])[:2]
|
|
||||||
|
for parameter in ['filename', 'abstract', 'author', 'mtype', 'time']:
|
||||||
|
if result[parameter]:
|
||||||
|
item[parameter] = result[parameter]
|
||||||
|
|
||||||
# facilitate preview support for known mime types
|
# facilitate preview support for known mime types
|
||||||
thumbnail = embedded = ""
|
if 'mtype' in result and '/' in result['mtype']:
|
||||||
if mtype in ["audio", "video"]:
|
(mtype, subtype) = result['mtype'].split('/')
|
||||||
embedded = url
|
item['mtype'] = mtype
|
||||||
if mtype in ["image"] and subtype in ["bmp", "gif", "jpeg", "png"]:
|
item['subtype'] = subtype
|
||||||
thumbnail = url
|
|
||||||
|
|
||||||
# remove HTML from snippet
|
if mtype in ['audio', 'video']:
|
||||||
content = html_to_text(result.get("snippet", ""))
|
item['embedded'] = embedded_url.format(
|
||||||
|
ttype=mtype, url=quote(url.encode('utf8'), '/:'), mtype=result['mtype']
|
||||||
|
)
|
||||||
|
|
||||||
res.add(
|
if mtype in ['image'] and subtype in ['bmp', 'gif', 'jpeg', 'png']:
|
||||||
res.types.File(
|
item['thumbnail'] = url
|
||||||
title=result.get("label", ""),
|
|
||||||
url=url,
|
results.append(item)
|
||||||
content=content,
|
|
||||||
size=result.get("size", ""),
|
if 'nres' in response_json:
|
||||||
filename=result.get("filename", ""),
|
results.append({'number_of_results': response_json['nres']})
|
||||||
abstract=result.get("abstract", ""),
|
|
||||||
author=result.get("author", ""),
|
return results
|
||||||
mtype=mtype,
|
|
||||||
subtype=subtype,
|
|
||||||
time=result.get("time", ""),
|
|
||||||
embedded=embedded,
|
|
||||||
thumbnail=thumbnail,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return res
|
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ The engine has the following additional settings:
|
|||||||
shortcut: reu
|
shortcut: reu
|
||||||
sort_order: "relevance"
|
sort_order: "relevance"
|
||||||
|
|
||||||
|
|
||||||
Implementations
|
Implementations
|
||||||
===============
|
===============
|
||||||
|
|
||||||
@@ -25,7 +26,6 @@ Implementations
|
|||||||
from json import dumps
|
from json import dumps
|
||||||
from urllib.parse import quote_plus
|
from urllib.parse import quote_plus
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from dateutil import parser
|
|
||||||
|
|
||||||
from searx.result_types import EngineResults
|
from searx.result_types import EngineResults
|
||||||
|
|
||||||
@@ -76,62 +76,15 @@ def request(query, params):
|
|||||||
def response(resp) -> EngineResults:
|
def response(resp) -> EngineResults:
|
||||||
res = EngineResults()
|
res = EngineResults()
|
||||||
|
|
||||||
resp_json = resp.json()
|
for result in resp.json().get("result", {}).get("articles", []):
|
||||||
if not resp_json.get("result"):
|
|
||||||
return res
|
|
||||||
|
|
||||||
for result in resp_json["result"].get("articles", []):
|
|
||||||
res.add(
|
res.add(
|
||||||
res.types.MainResult(
|
res.types.MainResult(
|
||||||
url=base_url + result["canonical_url"],
|
url=base_url + result["canonical_url"],
|
||||||
title=result["web"],
|
title=result["web"],
|
||||||
content=result["description"],
|
content=result["description"],
|
||||||
thumbnail=resize_url(result.get("thumbnail", {}), height=80),
|
thumbnail=result.get("thumbnail", {}).get("url", ""),
|
||||||
metadata=result.get("kicker", {}).get("name"),
|
metadata=result.get("kicker", {}).get("name"),
|
||||||
publishedDate=parser.isoparse(result["display_time"]),
|
publishedDate=datetime.fromisoformat(result["display_time"]),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
def resize_url(thumbnail: dict[str, str], width: int = 0, height: int = 0) -> str:
|
|
||||||
"""Generates a URL for Reuter's thumbnail with the dimensions *width* and
|
|
||||||
*height*. If no URL can be generated from the *thumbnail data*, an empty
|
|
||||||
string will be returned.
|
|
||||||
|
|
||||||
width: default is *unset* (``0``)
|
|
||||||
Image width in pixels (negative values are ignored). If only width is
|
|
||||||
specified, the height matches the original aspect ratio.
|
|
||||||
|
|
||||||
height: default is *unset* (``0``)
|
|
||||||
Image height in pixels (negative values are ignored). If only height is
|
|
||||||
specified, the width matches the original aspect ratio.
|
|
||||||
|
|
||||||
The file size of a full-size image is usually several MB; when reduced to a
|
|
||||||
height of, for example, 80 points, only a few KB remain!
|
|
||||||
|
|
||||||
Fields of the *thumbnail data* (``result.articles.[<int>].thumbnail``):
|
|
||||||
|
|
||||||
thumbnail.url:
|
|
||||||
Is a full-size image (>MB).
|
|
||||||
|
|
||||||
thumbnail.width & .height:
|
|
||||||
Dimensions of the full-size image.
|
|
||||||
|
|
||||||
thumbnail.resizer_url:
|
|
||||||
Reuters has a *resizer* `REST-API for the images`_, this is the URL of the
|
|
||||||
service. This URL includes the ``&auth`` argument, other arguments are
|
|
||||||
``&width=<int>`` and ``&height=<int>``.
|
|
||||||
|
|
||||||
.. _REST-API for the images:
|
|
||||||
https://dev.arcxp.com/photo-center/image-resizer/resizer-v2-how-to-transform-images/#query-parameters
|
|
||||||
"""
|
|
||||||
|
|
||||||
url = thumbnail.get("resizer_url")
|
|
||||||
if not url:
|
|
||||||
return ""
|
|
||||||
if int(width) > 0:
|
|
||||||
url += f"&width={int(width)}"
|
|
||||||
if int(height) > 0:
|
|
||||||
url += f"&height={int(height)}"
|
|
||||||
return url
|
|
||||||
|
|||||||
@@ -66,7 +66,7 @@ def setup(engine_settings: dict[str, t.Any]) -> bool:
|
|||||||
def get_ui_version() -> str:
|
def get_ui_version() -> str:
|
||||||
ret_val: str = CACHE.get("X-S2-UI-Version")
|
ret_val: str = CACHE.get("X-S2-UI-Version")
|
||||||
if not ret_val:
|
if not ret_val:
|
||||||
resp = get(base_url, timeout=3)
|
resp = get(base_url)
|
||||||
if not resp.ok:
|
if not resp.ok:
|
||||||
raise RuntimeError("Can't determine Semantic Scholar UI version")
|
raise RuntimeError("Can't determine Semantic Scholar UI version")
|
||||||
|
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ base_url = 'https://search.seznam.cz/'
|
|||||||
|
|
||||||
|
|
||||||
def request(query, params):
|
def request(query, params):
|
||||||
response_index = get(base_url, headers=params['headers'], raise_for_httperror=True, timeout=3)
|
response_index = get(base_url, headers=params['headers'], raise_for_httperror=True)
|
||||||
dom = html.fromstring(response_index.text)
|
dom = html.fromstring(response_index.text)
|
||||||
|
|
||||||
url_params = {
|
url_params = {
|
||||||
|
|||||||
@@ -124,7 +124,7 @@ def get_client_id() -> str | None:
|
|||||||
|
|
||||||
client_id = ""
|
client_id = ""
|
||||||
url = "https://soundcloud.com"
|
url = "https://soundcloud.com"
|
||||||
resp = http_get(url, timeout=3)
|
resp = http_get(url, timeout=10)
|
||||||
|
|
||||||
if not resp.ok:
|
if not resp.ok:
|
||||||
logger.error("init: GET %s failed", url)
|
logger.error("init: GET %s failed", url)
|
||||||
|
|||||||
@@ -1,90 +0,0 @@
|
|||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
"""Engine to search in the collaborative software platform SourceHut_.
|
|
||||||
|
|
||||||
.. _SourceHut: https://sourcehut.org/
|
|
||||||
|
|
||||||
Configuration
|
|
||||||
=============
|
|
||||||
|
|
||||||
You can configure the following setting:
|
|
||||||
|
|
||||||
- :py:obj:`sourcehut_sort_order`
|
|
||||||
|
|
||||||
.. code:: yaml
|
|
||||||
|
|
||||||
- name: sourcehut
|
|
||||||
shortcut: srht
|
|
||||||
engine: sourcehut
|
|
||||||
# sourcehut_sort_order: longest-active
|
|
||||||
|
|
||||||
Implementations
|
|
||||||
===============
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import typing as t
|
|
||||||
|
|
||||||
from urllib.parse import urlencode
|
|
||||||
from lxml import html
|
|
||||||
|
|
||||||
from searx.utils import eval_xpath, eval_xpath_list, extract_text, searxng_useragent
|
|
||||||
from searx.result_types import EngineResults
|
|
||||||
|
|
||||||
if t.TYPE_CHECKING:
|
|
||||||
from searx.extended_types import SXNG_Response
|
|
||||||
from searx.search.processors import OnlineParams
|
|
||||||
|
|
||||||
|
|
||||||
about = {
|
|
||||||
"website": "https://sourcehut.org",
|
|
||||||
"wikidata_id": "Q78514485",
|
|
||||||
"official_api_documentation": "https://man.sr.ht/",
|
|
||||||
"use_official_api": False,
|
|
||||||
"require_api_key": False,
|
|
||||||
"results": "HTML",
|
|
||||||
}
|
|
||||||
|
|
||||||
categories = ["it", "repos"]
|
|
||||||
paging = True
|
|
||||||
|
|
||||||
base_url: str = "https://sr.ht/projects"
|
|
||||||
"""Browse public projects."""
|
|
||||||
|
|
||||||
|
|
||||||
sourcehut_sort_order: str = "recently-updated"
|
|
||||||
"""The sort order of the results. Possible values:
|
|
||||||
|
|
||||||
- ``recently-updated``
|
|
||||||
- ``longest-active``
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def request(query: str, params: "OnlineParams") -> None:
|
|
||||||
|
|
||||||
args = {"search": query, "page": params["pageno"], "sort": sourcehut_sort_order}
|
|
||||||
params["url"] = f"{base_url}?{urlencode(args)}"
|
|
||||||
|
|
||||||
# standard user agents are blocked by 'go-away', a foss bot detection tool
|
|
||||||
params["headers"]["User-Agent"] = searxng_useragent()
|
|
||||||
|
|
||||||
|
|
||||||
def response(resp: "SXNG_Response") -> EngineResults:
|
|
||||||
|
|
||||||
res = EngineResults()
|
|
||||||
doc = html.fromstring(resp.text)
|
|
||||||
|
|
||||||
for item in eval_xpath_list(doc, "(//div[@class='event-list'])[1]/div[contains(@class, 'event')]"):
|
|
||||||
res.add(
|
|
||||||
res.types.LegacyResult(
|
|
||||||
template="packages.html",
|
|
||||||
url=base_url + (extract_text(eval_xpath(item, "./h4/a[2]/@href")) or ""),
|
|
||||||
title=extract_text(eval_xpath(item, "./h4")),
|
|
||||||
package_name=extract_text(eval_xpath(item, "./h4/a[2]")),
|
|
||||||
content=extract_text(eval_xpath(item, "./p")),
|
|
||||||
maintainer=(extract_text(eval_xpath(item, "./h4/a[1]")) or "").removeprefix("~"),
|
|
||||||
tags=[
|
|
||||||
tag.removeprefix("#") for tag in eval_xpath_list(item, "./div[contains(@class, 'tags')]/a/text()")
|
|
||||||
],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return res
|
|
||||||
@@ -404,10 +404,6 @@ def _get_image_result(result) -> dict[str, t.Any] | None:
|
|||||||
def response(resp):
|
def response(resp):
|
||||||
categ = startpage_categ.capitalize()
|
categ = startpage_categ.capitalize()
|
||||||
results_raw = '{' + extr(resp.text, f"React.createElement(UIStartpage.AppSerp{categ}, {{", '}})') + '}}'
|
results_raw = '{' + extr(resp.text, f"React.createElement(UIStartpage.AppSerp{categ}, {{", '}})') + '}}'
|
||||||
|
|
||||||
if resp.headers.get('Location', '').startswith("https://www.startpage.com/sp/captcha"):
|
|
||||||
raise SearxEngineCaptchaException()
|
|
||||||
|
|
||||||
results_json = loads(results_raw)
|
results_json = loads(results_raw)
|
||||||
results_obj = results_json.get('render', {}).get('presenter', {}).get('regions', {})
|
results_obj = results_json.get('render', {}).get('presenter', {}).get('regions', {})
|
||||||
|
|
||||||
|
|||||||
@@ -73,6 +73,7 @@ def request(query, params):
|
|||||||
params['headers'].update(
|
params['headers'].update(
|
||||||
{
|
{
|
||||||
'Connection': 'keep-alive',
|
'Connection': 'keep-alive',
|
||||||
|
'Accept-Encoding': 'gzip, defalte, br',
|
||||||
'Host': 'tineye.com',
|
'Host': 'tineye.com',
|
||||||
'DNT': '1',
|
'DNT': '1',
|
||||||
'TE': 'trailers',
|
'TE': 'trailers',
|
||||||
|
|||||||
@@ -1,208 +1,102 @@
|
|||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
"""`Wikimedia Commons`_ is a collection of more than 120 millions freely usable
|
"""Wikimedia Commons (images)"""
|
||||||
media files to which anyone can contribute.
|
|
||||||
|
|
||||||
This engine uses the `MediaWiki query API`_, with which engines can be configured
|
|
||||||
for searching images, videos, audio, and other files in the Wikimedia.
|
|
||||||
|
|
||||||
.. _MediaWiki query API: https://commons.wikimedia.org/w/api.php?action=help&modules=query
|
|
||||||
.. _Wikimedia Commons: https://commons.wikimedia.org/
|
|
||||||
|
|
||||||
|
|
||||||
Configuration
|
|
||||||
=============
|
|
||||||
|
|
||||||
The engine has the following additional settings:
|
|
||||||
|
|
||||||
.. code:: yaml
|
|
||||||
|
|
||||||
- name: wikicommons.images
|
|
||||||
engine: wikicommons
|
|
||||||
wc_search_type: image
|
|
||||||
|
|
||||||
- name: wikicommons.videos
|
|
||||||
engine: wikicommons
|
|
||||||
wc_search_type: video
|
|
||||||
|
|
||||||
- name: wikicommons.audio
|
|
||||||
engine: wikicommons
|
|
||||||
wc_search_type: audio
|
|
||||||
|
|
||||||
- name: wikicommons.files
|
|
||||||
engine: wikicommons
|
|
||||||
wc_search_type: file
|
|
||||||
|
|
||||||
|
|
||||||
Implementations
|
|
||||||
===============
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import typing as t
|
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import pathlib
|
|
||||||
from urllib.parse import urlencode, unquote
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
from searx.utils import html_to_text, humanize_bytes
|
from searx.utils import html_to_text, humanize_bytes
|
||||||
from searx.result_types import EngineResults
|
|
||||||
|
|
||||||
if t.TYPE_CHECKING:
|
|
||||||
from searx.extended_types import SXNG_Response
|
|
||||||
from searx.search.processors import OnlineParams
|
|
||||||
|
|
||||||
|
# about
|
||||||
about = {
|
about = {
|
||||||
"website": "https://commons.wikimedia.org/",
|
"website": 'https://commons.wikimedia.org/',
|
||||||
"wikidata_id": "Q565",
|
"wikidata_id": 'Q565',
|
||||||
"official_api_documentation": "https://commons.wikimedia.org/w/api.php",
|
"official_api_documentation": 'https://commons.wikimedia.org/w/api.php',
|
||||||
"use_official_api": True,
|
"use_official_api": True,
|
||||||
"require_api_key": False,
|
"require_api_key": False,
|
||||||
"results": "JSON",
|
"results": 'JSON',
|
||||||
}
|
}
|
||||||
|
categories = ['images']
|
||||||
|
search_type = 'images'
|
||||||
|
|
||||||
categories: list[str] = []
|
base_url = "https://commons.wikimedia.org"
|
||||||
|
search_prefix = (
|
||||||
|
'?action=query'
|
||||||
|
'&format=json'
|
||||||
|
'&generator=search'
|
||||||
|
'&gsrnamespace=6'
|
||||||
|
'&gsrprop=snippet'
|
||||||
|
'&prop=info|imageinfo'
|
||||||
|
'&iiprop=url|size|mime'
|
||||||
|
'&iiurlheight=180' # needed for the thumb url
|
||||||
|
)
|
||||||
paging = True
|
paging = True
|
||||||
number_of_results = 10
|
number_of_results = 10
|
||||||
|
|
||||||
wc_api_url = "https://commons.wikimedia.org/w/api.php"
|
search_types = {
|
||||||
wc_search_type: str = ""
|
'images': 'bitmap|drawing',
|
||||||
|
'videos': 'video',
|
||||||
SEARCH_TYPES: dict[str, str] = {
|
'audio': 'audio',
|
||||||
"image": "bitmap|drawing",
|
'files': 'multimedia|office|archive|3d',
|
||||||
"video": "video",
|
|
||||||
"audio": "audio",
|
|
||||||
"file": "multimedia|office|archive|3d",
|
|
||||||
}
|
}
|
||||||
# FileType = t.Literal["bitmap", "drawing", "video", "audio", "multimedia", "office", "archive", "3d"]
|
|
||||||
# FILE_TYPES = list(t.get_args(FileType))
|
|
||||||
|
|
||||||
|
|
||||||
def setup(engine_settings: dict[str, t.Any]) -> bool:
|
def request(query, params):
|
||||||
"""Initialization of the Wikimedia engine, checks if the value configured in
|
language = 'en'
|
||||||
:py:obj:`wc_search_type` is valid."""
|
if params['language'] != 'all':
|
||||||
|
language = params['language'].split('-')[0]
|
||||||
|
|
||||||
if engine_settings.get("wc_search_type") not in SEARCH_TYPES:
|
if search_type not in search_types:
|
||||||
logger.error(
|
raise ValueError(f"Unsupported search type: {search_type}")
|
||||||
"wc_search_type: %s isn't a valid file type (%s)",
|
|
||||||
engine_settings.get("wc_search_type"),
|
|
||||||
",".join(SEARCH_TYPES.keys()),
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
filetype = search_types[search_type]
|
||||||
|
|
||||||
def request(query: str, params: "OnlineParams") -> None:
|
|
||||||
uselang: str = "en"
|
|
||||||
if params["searxng_locale"] != "all":
|
|
||||||
uselang = params["searxng_locale"].split("-")[0]
|
|
||||||
filetype = SEARCH_TYPES[wc_search_type]
|
|
||||||
args = {
|
args = {
|
||||||
# https://commons.wikimedia.org/w/api.php
|
'uselang': language,
|
||||||
"format": "json",
|
'gsrlimit': number_of_results,
|
||||||
"uselang": uselang,
|
'gsroffset': number_of_results * (params["pageno"] - 1),
|
||||||
"action": "query",
|
'gsrsearch': f"filetype:{filetype} {query}",
|
||||||
# https://commons.wikimedia.org/w/api.php?action=help&modules=query
|
|
||||||
"prop": "info|imageinfo",
|
|
||||||
# generator (gsr optins) https://commons.wikimedia.org/w/api.php?action=help&modules=query%2Bsearch
|
|
||||||
"generator": "search",
|
|
||||||
"gsrnamespace": "6", # https://www.mediawiki.org/wiki/Help:Namespaces#Renaming_namespaces
|
|
||||||
"gsrprop": "snippet",
|
|
||||||
"gsrlimit": number_of_results,
|
|
||||||
"gsroffset": number_of_results * (params["pageno"] - 1),
|
|
||||||
"gsrsearch": f"filetype:{filetype} {query}",
|
|
||||||
# imageinfo: https://commons.wikimedia.org/w/api.php?action=help&modules=query%2Bimageinfo
|
|
||||||
"iiprop": "url|size|mime",
|
|
||||||
"iiurlheight": "180", # needed for the thumb url
|
|
||||||
}
|
}
|
||||||
params["url"] = f"{wc_api_url}?{urlencode(args, safe=':|')}"
|
|
||||||
|
params["url"] = f"{base_url}/w/api.php{search_prefix}&{urlencode(args, safe=':|')}"
|
||||||
|
return params
|
||||||
|
|
||||||
|
|
||||||
def response(resp: "SXNG_Response") -> EngineResults:
|
def response(resp):
|
||||||
|
results = []
|
||||||
|
json = resp.json()
|
||||||
|
|
||||||
res = EngineResults()
|
if not json.get("query", {}).get("pages"):
|
||||||
json_data = resp.json()
|
return results
|
||||||
pages = json_data.get("query", {}).get("pages", {}).values()
|
for item in json["query"]["pages"].values():
|
||||||
|
|
||||||
for item in pages:
|
|
||||||
|
|
||||||
if not item.get("imageinfo", []):
|
|
||||||
continue
|
|
||||||
imageinfo = item["imageinfo"][0]
|
imageinfo = item["imageinfo"][0]
|
||||||
|
title = item["title"].replace("File:", "").rsplit('.', 1)[0]
|
||||||
|
result = {
|
||||||
|
'url': imageinfo["descriptionurl"],
|
||||||
|
'title': title,
|
||||||
|
'content': html_to_text(item["snippet"]),
|
||||||
|
}
|
||||||
|
|
||||||
title: str = item["title"].replace("File:", "").rsplit(".", 1)[0]
|
if search_type == "images":
|
||||||
content = html_to_text(item["snippet"])
|
result['template'] = 'images.html'
|
||||||
|
result['img_src'] = imageinfo["url"]
|
||||||
|
result['thumbnail_src'] = imageinfo["thumburl"]
|
||||||
|
result['resolution'] = f'{imageinfo["width"]} x {imageinfo["height"]}'
|
||||||
|
else:
|
||||||
|
result['thumbnail'] = imageinfo["thumburl"]
|
||||||
|
|
||||||
url: str = imageinfo["descriptionurl"]
|
if search_type == "videos":
|
||||||
media_url: str = imageinfo["url"]
|
result['template'] = 'videos.html'
|
||||||
mimetype: str = imageinfo["mime"]
|
if imageinfo.get('duration'):
|
||||||
thumbnail: str = imageinfo["thumburl"]
|
result['length'] = datetime.timedelta(seconds=int(imageinfo['duration']))
|
||||||
size = imageinfo.get("size")
|
result['iframe_src'] = imageinfo['url']
|
||||||
if size:
|
elif search_type == "files":
|
||||||
size = humanize_bytes(size)
|
result['template'] = 'files.html'
|
||||||
|
result['metadata'] = imageinfo['mime']
|
||||||
|
result['size'] = humanize_bytes(imageinfo['size'])
|
||||||
|
elif search_type == "audio":
|
||||||
|
result['iframe_src'] = imageinfo['url']
|
||||||
|
|
||||||
duration = None
|
results.append(result)
|
||||||
seconds: str = imageinfo.get("duration")
|
|
||||||
if seconds:
|
|
||||||
try:
|
|
||||||
duration = datetime.timedelta(seconds=int(seconds))
|
|
||||||
except OverflowError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if wc_search_type == "file":
|
return results
|
||||||
res.add(
|
|
||||||
res.types.File(
|
|
||||||
title=title,
|
|
||||||
url=url,
|
|
||||||
content=content,
|
|
||||||
size=size,
|
|
||||||
mimetype=mimetype,
|
|
||||||
filename=unquote(pathlib.Path(media_url).name),
|
|
||||||
embedded=media_url,
|
|
||||||
thumbnail=thumbnail,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if wc_search_type == "image":
|
|
||||||
res.add(
|
|
||||||
res.types.LegacyResult(
|
|
||||||
template="images.html",
|
|
||||||
title=title,
|
|
||||||
url=url,
|
|
||||||
content=content,
|
|
||||||
img_src=imageinfo["url"],
|
|
||||||
thumbnail_src=thumbnail,
|
|
||||||
resolution=f"{imageinfo['width']} x {imageinfo['height']}",
|
|
||||||
img_format=imageinfo["mime"],
|
|
||||||
filesize=size,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if wc_search_type == "video":
|
|
||||||
res.add(
|
|
||||||
res.types.LegacyResult(
|
|
||||||
template="videos.html",
|
|
||||||
title=title,
|
|
||||||
url=url,
|
|
||||||
content=content,
|
|
||||||
iframe_src=media_url,
|
|
||||||
length=duration,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if wc_search_type == "audio":
|
|
||||||
res.add(
|
|
||||||
res.types.MainResult(
|
|
||||||
template="default.html",
|
|
||||||
title=title,
|
|
||||||
url=url,
|
|
||||||
content=content,
|
|
||||||
audio_src=media_url,
|
|
||||||
length=duration,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
return res
|
|
||||||
|
|||||||
@@ -96,7 +96,7 @@ search_type = 'text'
|
|||||||
``video`` are not yet implemented (Pull-Requests are welcome).
|
``video`` are not yet implemented (Pull-Requests are welcome).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
base_url: list[str] | str = []
|
base_url: list[str] | str | None = None
|
||||||
"""The value is an URL or a list of URLs. In the latter case instance will be
|
"""The value is an URL or a list of URLs. In the latter case instance will be
|
||||||
selected randomly.
|
selected randomly.
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -28,20 +28,6 @@ search_type = ""
|
|||||||
base_url_web = 'https://yandex.com/search/site/'
|
base_url_web = 'https://yandex.com/search/site/'
|
||||||
base_url_images = 'https://yandex.com/images/search'
|
base_url_images = 'https://yandex.com/images/search'
|
||||||
|
|
||||||
# Supported languages
|
|
||||||
yandex_supported_langs = [
|
|
||||||
"ru", # Russian
|
|
||||||
"en", # English
|
|
||||||
"be", # Belarusian
|
|
||||||
"fr", # French
|
|
||||||
"de", # German
|
|
||||||
"id", # Indonesian
|
|
||||||
"kk", # Kazakh
|
|
||||||
"tt", # Tatar
|
|
||||||
"tr", # Turkish
|
|
||||||
"uk", # Ukrainian
|
|
||||||
]
|
|
||||||
|
|
||||||
results_xpath = '//li[contains(@class, "serp-item")]'
|
results_xpath = '//li[contains(@class, "serp-item")]'
|
||||||
url_xpath = './/a[@class="b-serp-item__title-link"]/@href'
|
url_xpath = './/a[@class="b-serp-item__title-link"]/@href'
|
||||||
title_xpath = './/h3[@class="b-serp-item__title"]/a[@class="b-serp-item__title-link"]/span'
|
title_xpath = './/h3[@class="b-serp-item__title"]/a[@class="b-serp-item__title-link"]/span'
|
||||||
@@ -49,7 +35,7 @@ content_xpath = './/div[@class="b-serp-item__content"]//div[@class="b-serp-item_
|
|||||||
|
|
||||||
|
|
||||||
def catch_bad_response(resp):
|
def catch_bad_response(resp):
|
||||||
if resp.headers.get('x-yandex-captcha') == 'captcha':
|
if resp.url.path.startswith('/showcaptcha'):
|
||||||
raise SearxEngineCaptchaException()
|
raise SearxEngineCaptchaException()
|
||||||
|
|
||||||
|
|
||||||
@@ -62,10 +48,6 @@ def request(query, params):
|
|||||||
"searchid": "3131712",
|
"searchid": "3131712",
|
||||||
}
|
}
|
||||||
|
|
||||||
lang = params["language"].split("-")[0]
|
|
||||||
if lang in yandex_supported_langs:
|
|
||||||
query_params_web["lang"] = lang
|
|
||||||
|
|
||||||
query_params_images = {
|
query_params_images = {
|
||||||
"text": query,
|
"text": query,
|
||||||
"uinfo": "sw-1920-sh-1080-ww-1125-wh-999",
|
"uinfo": "sw-1920-sh-1080-ww-1125-wh-999",
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ import httpx
|
|||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
import searx.preferences
|
import searx.preferences
|
||||||
import searx.results
|
import searx.results
|
||||||
from searx.search.processors import OnlineParamTypes
|
from searx.search.processors import ParamTypes
|
||||||
|
|
||||||
|
|
||||||
class SXNG_Request(flask.Request):
|
class SXNG_Request(flask.Request):
|
||||||
@@ -83,4 +83,4 @@ class SXNG_Response(httpx.Response):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
ok: bool
|
ok: bool
|
||||||
search_params: "OnlineParamTypes"
|
search_params: "ParamTypes"
|
||||||
|
|||||||
@@ -17,6 +17,10 @@
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# Struct fields aren't discovered in Python 3.14
|
||||||
|
# - https://github.com/searxng/searxng/issues/5284
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import typing as t
|
import typing as t
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|||||||
@@ -1,6 +1,9 @@
|
|||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# pylint: disable=missing-module-docstring
|
# pylint: disable=missing-module-docstring
|
||||||
|
|
||||||
|
# Struct fields aren't discovered in Python 3.14
|
||||||
|
# - https://github.com/searxng/searxng/issues/5284
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import pathlib
|
import pathlib
|
||||||
import msgspec
|
import msgspec
|
||||||
|
|||||||
@@ -1,6 +1,9 @@
|
|||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
"""Implementations for a favicon proxy"""
|
"""Implementations for a favicon proxy"""
|
||||||
|
|
||||||
|
# Struct fields aren't discovered in Python 3.14
|
||||||
|
# - https://github.com/searxng/searxng/issues/5284
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
|
|
||||||
|
|||||||
@@ -23,7 +23,6 @@ __all__ = [
|
|||||||
"WeatherAnswer",
|
"WeatherAnswer",
|
||||||
"Code",
|
"Code",
|
||||||
"Paper",
|
"Paper",
|
||||||
"File",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
import typing as t
|
import typing as t
|
||||||
@@ -34,7 +33,6 @@ from .answer import AnswerSet, Answer, Translations, WeatherAnswer
|
|||||||
from .keyvalue import KeyValue
|
from .keyvalue import KeyValue
|
||||||
from .code import Code
|
from .code import Code
|
||||||
from .paper import Paper
|
from .paper import Paper
|
||||||
from .file import File
|
|
||||||
|
|
||||||
|
|
||||||
class ResultList(list[Result | LegacyResult], abc.ABC):
|
class ResultList(list[Result | LegacyResult], abc.ABC):
|
||||||
@@ -49,7 +47,6 @@ class ResultList(list[Result | LegacyResult], abc.ABC):
|
|||||||
KeyValue = KeyValue
|
KeyValue = KeyValue
|
||||||
Code = Code
|
Code = Code
|
||||||
Paper = Paper
|
Paper = Paper
|
||||||
File = File
|
|
||||||
MainResult = MainResult
|
MainResult = MainResult
|
||||||
Result = Result
|
Result = Result
|
||||||
Translations = Translations
|
Translations = Translations
|
||||||
|
|||||||
@@ -16,6 +16,10 @@
|
|||||||
:members:
|
:members:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# Struct fields aren't discovered in Python 3.14
|
||||||
|
# - https://github.com/searxng/searxng/issues/5284
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
__all__ = ["Result"]
|
__all__ = ["Result"]
|
||||||
|
|
||||||
import typing as t
|
import typing as t
|
||||||
@@ -23,6 +27,7 @@ import typing as t
|
|||||||
import re
|
import re
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import warnings
|
import warnings
|
||||||
|
import time
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
@@ -231,6 +236,13 @@ class Result(msgspec.Struct, kw_only=True):
|
|||||||
url: str | None = None
|
url: str | None = None
|
||||||
"""A link related to this *result*"""
|
"""A link related to this *result*"""
|
||||||
|
|
||||||
|
template: str = "default.html"
|
||||||
|
"""Name of the template used to render the result.
|
||||||
|
|
||||||
|
By default :origin:`result_templates/default.html
|
||||||
|
<searx/templates/simple/result_templates/default.html>` is used.
|
||||||
|
"""
|
||||||
|
|
||||||
engine: str | None = ""
|
engine: str | None = ""
|
||||||
"""Name of the engine *this* result comes from. In case of *plugins* a
|
"""Name of the engine *this* result comes from. In case of *plugins* a
|
||||||
prefix ``plugin:`` is set, in case of *answerer* prefix ``answerer:`` is
|
prefix ``plugin:`` is set, in case of *answerer* prefix ``answerer:`` is
|
||||||
@@ -338,13 +350,6 @@ class Result(msgspec.Struct, kw_only=True):
|
|||||||
class MainResult(Result): # pylint: disable=missing-class-docstring
|
class MainResult(Result): # pylint: disable=missing-class-docstring
|
||||||
"""Base class of all result types displayed in :ref:`area main results`."""
|
"""Base class of all result types displayed in :ref:`area main results`."""
|
||||||
|
|
||||||
template: str = "default.html"
|
|
||||||
"""Name of the template used to render the result.
|
|
||||||
|
|
||||||
By default :origin:`result_templates/default.html
|
|
||||||
<searx/templates/simple/result_templates/default.html>` is used.
|
|
||||||
"""
|
|
||||||
|
|
||||||
title: str = ""
|
title: str = ""
|
||||||
"""Link title of the result item."""
|
"""Link title of the result item."""
|
||||||
|
|
||||||
@@ -354,12 +359,6 @@ class MainResult(Result): # pylint: disable=missing-class-docstring
|
|||||||
img_src: str = ""
|
img_src: str = ""
|
||||||
"""URL of a image that is displayed in the result item."""
|
"""URL of a image that is displayed in the result item."""
|
||||||
|
|
||||||
iframe_src: str = ""
|
|
||||||
"""URL of an embedded ``<iframe>`` / the frame is collapsible."""
|
|
||||||
|
|
||||||
audio_src: str = ""
|
|
||||||
"""URL of an embedded ``<audio controls>``."""
|
|
||||||
|
|
||||||
thumbnail: str = ""
|
thumbnail: str = ""
|
||||||
"""URL of a thumbnail that is displayed in the result item."""
|
"""URL of a thumbnail that is displayed in the result item."""
|
||||||
|
|
||||||
@@ -373,7 +372,7 @@ class MainResult(Result): # pylint: disable=missing-class-docstring
|
|||||||
completely eliminated.
|
completely eliminated.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
length: datetime.timedelta | None = None
|
length: time.struct_time | None = None
|
||||||
"""Playing duration in seconds."""
|
"""Playing duration in seconds."""
|
||||||
|
|
||||||
views: str = ""
|
views: str = ""
|
||||||
|
|||||||
@@ -28,6 +28,9 @@ template.
|
|||||||
"""
|
"""
|
||||||
# pylint: disable=too-few-public-methods
|
# pylint: disable=too-few-public-methods
|
||||||
|
|
||||||
|
# Struct fields aren't discovered in Python 3.14
|
||||||
|
# - https://github.com/searxng/searxng/issues/5284
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
__all__ = ["AnswerSet", "Answer", "Translations", "WeatherAnswer"]
|
__all__ = ["AnswerSet", "Answer", "Translations", "WeatherAnswer"]
|
||||||
|
|
||||||
|
|||||||
@@ -14,6 +14,10 @@ template. For highlighting the code passages, Pygments_ is used.
|
|||||||
"""
|
"""
|
||||||
# pylint: disable=too-few-public-methods, disable=invalid-name
|
# pylint: disable=too-few-public-methods, disable=invalid-name
|
||||||
|
|
||||||
|
# Struct fields aren't discovered in Python 3.14
|
||||||
|
# - https://github.com/searxng/searxng/issues/5284
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
__all__ = ["Code"]
|
__all__ = ["Code"]
|
||||||
|
|
||||||
import typing as t
|
import typing as t
|
||||||
|
|||||||
@@ -1,94 +0,0 @@
|
|||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
"""
|
|
||||||
Typification of the *file* results. Results of this type are rendered in
|
|
||||||
the :origin:`file.html <searx/templates/simple/result_templates/file.html>`
|
|
||||||
template.
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
.. autoclass:: File
|
|
||||||
:members:
|
|
||||||
:show-inheritance:
|
|
||||||
|
|
||||||
"""
|
|
||||||
# pylint: disable=too-few-public-methods
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["File"]
|
|
||||||
|
|
||||||
import typing as t
|
|
||||||
import mimetypes
|
|
||||||
|
|
||||||
from ._base import MainResult
|
|
||||||
|
|
||||||
|
|
||||||
@t.final
|
|
||||||
class File(MainResult, kw_only=True):
|
|
||||||
"""Class for results of type *file*"""
|
|
||||||
|
|
||||||
template: str = "file.html"
|
|
||||||
|
|
||||||
filename: str = ""
|
|
||||||
"""Name of the file."""
|
|
||||||
|
|
||||||
size: str = ""
|
|
||||||
"""Size of bytes in human readable notation (``MB`` for 1024 * 1024 Bytes
|
|
||||||
file size.)"""
|
|
||||||
|
|
||||||
time: str = ""
|
|
||||||
"""Indication of a time, such as the date of the last modification or the
|
|
||||||
date of creation. This is a simple string, the *date* of which can be freely
|
|
||||||
chosen according to the context."""
|
|
||||||
|
|
||||||
mimetype: str = ""
|
|
||||||
"""Mimetype/Subtype of the file. For ``audio`` and ``video``, a URL can be
|
|
||||||
passed in the :py:obj:`File.embedded` field to embed the referenced media in
|
|
||||||
the result. If no value is specified, the MIME type is determined from
|
|
||||||
``self.filename`` or, alternatively, from ``self.embedded`` (if either of
|
|
||||||
the two values is set)."""
|
|
||||||
|
|
||||||
abstract: str = ""
|
|
||||||
"""Abstract of the file."""
|
|
||||||
|
|
||||||
author: str = ""
|
|
||||||
"""Author of the file."""
|
|
||||||
|
|
||||||
embedded: str = ""
|
|
||||||
"""URL of an embedded media type (audio or video) / is collapsible."""
|
|
||||||
|
|
||||||
mtype: str = ""
|
|
||||||
"""Used for displaying :py:obj:`File.embedded`. Its value is automatically
|
|
||||||
populated from the base type of :py:obj:`File.mimetype`, and can be
|
|
||||||
explicitly set to enforce e.g. ``audio`` or ``video`` when mimetype is
|
|
||||||
something like "application/ogg" but its know the content is for example a
|
|
||||||
video."""
|
|
||||||
|
|
||||||
subtype: str = ""
|
|
||||||
"""Used for displaying :py:obj:`File.embedded`. Its value is automatically
|
|
||||||
populated from the subtype type of :py:obj:`File.mimetype`, and can be
|
|
||||||
explicitly set to enforce a subtype for the :py:obj:`File.embedded`
|
|
||||||
element."""
|
|
||||||
|
|
||||||
def __post_init__(self):
|
|
||||||
super().__post_init__()
|
|
||||||
|
|
||||||
if not self.mtype or not self.subtype:
|
|
||||||
|
|
||||||
fn = self.filename or self.embedded
|
|
||||||
if not self.mimetype and fn:
|
|
||||||
self.mimetype = mimetypes.guess_type(fn, strict=False)[0] or ""
|
|
||||||
|
|
||||||
mtype, subtype = (self.mimetype.split("/", 1) + [""])[:2]
|
|
||||||
|
|
||||||
if not self.mtype:
|
|
||||||
# I don't know why, but the ogg video stream is not displayed,
|
|
||||||
# may https://github.com/videojs/video.js can help?
|
|
||||||
if self.embedded.endswith(".ogv"):
|
|
||||||
self.mtype = "video"
|
|
||||||
elif self.embedded.endswith(".oga"):
|
|
||||||
self.mtype = "audio"
|
|
||||||
else:
|
|
||||||
self.mtype = mtype
|
|
||||||
|
|
||||||
if not self.subtype:
|
|
||||||
self.subtype = subtype
|
|
||||||
@@ -13,6 +13,9 @@ template.
|
|||||||
"""
|
"""
|
||||||
# pylint: disable=too-few-public-methods
|
# pylint: disable=too-few-public-methods
|
||||||
|
|
||||||
|
# Struct fields aren't discovered in Python 3.14
|
||||||
|
# - https://github.com/searxng/searxng/issues/5284
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
__all__ = ["KeyValue"]
|
__all__ = ["KeyValue"]
|
||||||
|
|
||||||
|
|||||||
@@ -21,6 +21,10 @@ Related topics:
|
|||||||
"""
|
"""
|
||||||
# pylint: disable=too-few-public-methods, disable=invalid-name
|
# pylint: disable=too-few-public-methods, disable=invalid-name
|
||||||
|
|
||||||
|
# Struct fields aren't discovered in Python 3.14
|
||||||
|
# - https://github.com/searxng/searxng/issues/5284
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
__all__ = ["Paper"]
|
__all__ = ["Paper"]
|
||||||
|
|
||||||
import typing as t
|
import typing as t
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ from searx.network import initialize as initialize_network, check_network_config
|
|||||||
from searx.results import ResultContainer
|
from searx.results import ResultContainer
|
||||||
from searx.search.checker import initialize as initialize_checker
|
from searx.search.checker import initialize as initialize_checker
|
||||||
from searx.search.processors import PROCESSORS
|
from searx.search.processors import PROCESSORS
|
||||||
from searx.search.processors.abstract import RequestParams
|
|
||||||
|
|
||||||
if t.TYPE_CHECKING:
|
if t.TYPE_CHECKING:
|
||||||
from .models import SearchQuery
|
from .models import SearchQuery
|
||||||
@@ -79,20 +79,16 @@ class Search:
|
|||||||
return bool(results)
|
return bool(results)
|
||||||
|
|
||||||
# do search-request
|
# do search-request
|
||||||
def _get_requests(self) -> tuple[list[tuple[str, str, RequestParams]], float]:
|
def _get_requests(self) -> tuple[list[tuple[str, str, dict[str, t.Any]]], int]:
|
||||||
# init vars
|
# init vars
|
||||||
requests: list[tuple[str, str, RequestParams]] = []
|
requests: list[tuple[str, str, dict[str, t.Any]]] = []
|
||||||
|
|
||||||
# max of all selected engine timeout
|
# max of all selected engine timeout
|
||||||
default_timeout = 0
|
default_timeout = 0
|
||||||
|
|
||||||
# start search-request for all selected engines
|
# start search-request for all selected engines
|
||||||
for engineref in self.search_query.engineref_list:
|
for engineref in self.search_query.engineref_list:
|
||||||
processor = PROCESSORS.get(engineref.name)
|
processor = PROCESSORS[engineref.name]
|
||||||
if not processor:
|
|
||||||
# engine does not exists; not yet or the 'init' method of the
|
|
||||||
# engine has been failed and the engine has not been registered.
|
|
||||||
continue
|
|
||||||
|
|
||||||
# stop the request now if the engine is suspend
|
# stop the request now if the engine is suspend
|
||||||
if processor.extend_container_if_suspended(self.result_container):
|
if processor.extend_container_if_suspended(self.result_container):
|
||||||
@@ -137,7 +133,7 @@ class Search:
|
|||||||
|
|
||||||
return requests, actual_timeout
|
return requests, actual_timeout
|
||||||
|
|
||||||
def search_multiple_requests(self, requests: list[tuple[str, str, RequestParams]]):
|
def search_multiple_requests(self, requests: list[tuple[str, str, dict[str, t.Any]]]):
|
||||||
# pylint: disable=protected-access
|
# pylint: disable=protected-access
|
||||||
search_id = str(uuid4())
|
search_id = str(uuid4())
|
||||||
|
|
||||||
|
|||||||
@@ -82,6 +82,7 @@ def _download_and_check_if_image(image_url: str) -> bool:
|
|||||||
'User-Agent': gen_useragent(),
|
'User-Agent': gen_useragent(),
|
||||||
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
|
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
|
||||||
'Accept-Language': 'en-US;q=0.5,en;q=0.3',
|
'Accept-Language': 'en-US;q=0.5,en;q=0.3',
|
||||||
|
'Accept-Encoding': 'gzip, deflate, br',
|
||||||
'DNT': '1',
|
'DNT': '1',
|
||||||
'Connection': 'keep-alive',
|
'Connection': 'keep-alive',
|
||||||
'Upgrade-Insecure-Requests': '1',
|
'Upgrade-Insecure-Requests': '1',
|
||||||
|
|||||||
@@ -51,6 +51,7 @@ class ProcessorMap(dict[str, EngineProcessor]):
|
|||||||
eng_name: str = eng_settings["name"]
|
eng_name: str = eng_settings["name"]
|
||||||
|
|
||||||
if eng_settings.get("inactive", False) is True:
|
if eng_settings.get("inactive", False) is True:
|
||||||
|
logger.info("Engine of name '%s' is inactive.", eng_name)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
eng_obj = engines.engines.get(eng_name)
|
eng_obj = engines.engines.get(eng_name)
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user