mirror of
https://github.com/searxng/searxng.git
synced 2026-01-02 00:50:01 +00:00
Compare commits
78 Commits
b876d0bed0
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a5c946a321 | ||
|
|
29042d8e5a | ||
|
|
c57db45672 | ||
|
|
9491b514c9 | ||
|
|
320c317719 | ||
|
|
abae17e6fc | ||
|
|
3baf5c38fc | ||
|
|
ce46f30739 | ||
|
|
65a95539f1 | ||
|
|
874dc3f5ea | ||
|
|
7941719371 | ||
|
|
fa9729226b | ||
|
|
9df177af85 | ||
|
|
f45123356b | ||
|
|
8851f4d6b1 | ||
|
|
f954423101 | ||
|
|
95e63ac32d | ||
|
|
fc6e59d3ec | ||
|
|
da45859f32 | ||
|
|
8bf600cc62 | ||
|
|
aa607a379a | ||
|
|
6ebd3f4d35 | ||
|
|
9072c77aea | ||
|
|
c32b8100c3 | ||
|
|
f93257941e | ||
|
|
896863802e | ||
|
|
920b40253c | ||
|
|
07440e3332 | ||
|
|
1827dfc071 | ||
|
|
c46aecd4e3 | ||
|
|
21bf8a6973 | ||
|
|
f5475ba782 | ||
|
|
265f15498c | ||
|
|
666409ec7e | ||
|
|
b719d559b6 | ||
|
|
9d3ec9a2a2 | ||
|
|
74ec225ad1 | ||
|
|
b5a1a092f1 | ||
|
|
ddc6d68114 | ||
|
|
32eb84d6d3 | ||
|
|
da6c635ea2 | ||
|
|
e34c356e64 | ||
|
|
7017393647 | ||
|
|
aa49f5b933 | ||
|
|
3f91ac47e6 | ||
|
|
8c631b92ce | ||
|
|
0ebac144f5 | ||
|
|
5e0e1c6b31 | ||
|
|
3c7545c6ce | ||
|
|
aba839195b | ||
|
|
1f6ea41272 | ||
|
|
5450d22796 | ||
|
|
1174fde1f3 | ||
|
|
fb089ae297 | ||
|
|
ab8224c939 | ||
|
|
c954e71f87 | ||
|
|
cbc04a839a | ||
|
|
cb4a5abc8c | ||
|
|
07ff6e3ccc | ||
|
|
cdaab944b4 | ||
|
|
6ecf32fd4a | ||
|
|
20de10df4e | ||
|
|
673c29efeb | ||
|
|
c4abf40e6e | ||
|
|
39b9922609 | ||
|
|
7018e6583b | ||
|
|
b957e587da | ||
|
|
ebb9ea4571 | ||
|
|
54a97e1043 | ||
|
|
0ee78c19dd | ||
|
|
bcc7a5eb2e | ||
|
|
2313b972a3 | ||
|
|
989b49335c | ||
|
|
3f30831640 | ||
|
|
5fcee9bc30 | ||
|
|
2f0e52d6eb | ||
|
|
c0d69cec4e | ||
|
|
c852b9a90a |
6
.github/workflows/checker.yml
vendored
6
.github/workflows/checker.yml
vendored
@@ -24,17 +24,17 @@ jobs:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
- name: Setup cache Python
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
key: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-"
|
||||
|
||||
16
.github/workflows/container.yml
vendored
16
.github/workflows/container.yml
vendored
@@ -78,18 +78,18 @@ jobs:
|
||||
# yamllint enable rule:line-length
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
fetch-depth: "0"
|
||||
|
||||
- name: Setup cache Python
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
key: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-"
|
||||
@@ -100,7 +100,7 @@ jobs:
|
||||
run: echo "date=$(date +'%Y%m%d')" >>$GITHUB_OUTPUT
|
||||
|
||||
- name: Setup cache container
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
key: "container-${{ matrix.arch }}-${{ steps.date.outputs.date }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: |
|
||||
@@ -145,7 +145,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
@@ -194,8 +194,8 @@ jobs:
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: "docker.io"
|
||||
username: "${{ secrets.DOCKERHUB_USERNAME }}"
|
||||
password: "${{ secrets.DOCKERHUB_TOKEN }}"
|
||||
username: "${{ secrets.DOCKER_USER }}"
|
||||
password: "${{ secrets.DOCKER_TOKEN }}"
|
||||
|
||||
- name: Release
|
||||
env:
|
||||
|
||||
8
.github/workflows/data-update.yml
vendored
8
.github/workflows/data-update.yml
vendored
@@ -40,17 +40,17 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
- name: Setup cache Python
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
key: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-"
|
||||
@@ -64,7 +64,7 @@ jobs:
|
||||
|
||||
- name: Create PR
|
||||
id: cpr
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
|
||||
with:
|
||||
author: "searxng-bot <searxng-bot@users.noreply.github.com>"
|
||||
committer: "searxng-bot <searxng-bot@users.noreply.github.com>"
|
||||
|
||||
8
.github/workflows/documentation.yml
vendored
8
.github/workflows/documentation.yml
vendored
@@ -32,18 +32,18 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
fetch-depth: "0"
|
||||
|
||||
- name: Setup cache Python
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
key: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-"
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
|
||||
- if: github.ref_name == 'master'
|
||||
name: Release
|
||||
uses: JamesIves/github-pages-deploy-action@4a3abc783e1a24aeb44c16e869ad83caf6b4cc23 # v4.7.4
|
||||
uses: JamesIves/github-pages-deploy-action@9d877eea73427180ae43cf98e8914934fe157a1a # v4.7.6
|
||||
with:
|
||||
folder: "dist/docs"
|
||||
branch: "gh-pages"
|
||||
|
||||
16
.github/workflows/integration.yml
vendored
16
.github/workflows/integration.yml
vendored
@@ -35,17 +35,17 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ matrix.python-version }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
- name: Setup cache Python
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
key: "python-${{ matrix.python-version }}-${{ runner.arch }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: "python-${{ matrix.python-version }}-${{ runner.arch }}-"
|
||||
@@ -62,28 +62,28 @@ jobs:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version-file: "./.nvmrc"
|
||||
|
||||
- name: Setup cache Node.js
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
key: "nodejs-${{ runner.arch }}-${{ hashFiles('./.nvmrc', './package.json') }}"
|
||||
path: "./client/simple/node_modules/"
|
||||
|
||||
- name: Setup cache Python
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
key: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-"
|
||||
|
||||
14
.github/workflows/l10n.yml
vendored
14
.github/workflows/l10n.yml
vendored
@@ -35,18 +35,18 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
token: "${{ secrets.WEBLATE_GITHUB_TOKEN }}"
|
||||
fetch-depth: "0"
|
||||
|
||||
- name: Setup cache Python
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
key: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-"
|
||||
@@ -82,18 +82,18 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
token: "${{ secrets.WEBLATE_GITHUB_TOKEN }}"
|
||||
fetch-depth: "0"
|
||||
|
||||
- name: Setup cache Python
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
key: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-${{ hashFiles('./requirements*.txt') }}"
|
||||
restore-keys: "python-${{ env.PYTHON_VERSION }}-${{ runner.arch }}-"
|
||||
@@ -117,7 +117,7 @@ jobs:
|
||||
|
||||
- name: Create PR
|
||||
id: cpr
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
|
||||
with:
|
||||
author: "searxng-bot <searxng-bot@users.noreply.github.com>"
|
||||
committer: "searxng-bot <searxng-bot@users.noreply.github.com>"
|
||||
|
||||
8
.github/workflows/security.yml
vendored
8
.github/workflows/security.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: "false"
|
||||
|
||||
@@ -32,8 +32,8 @@ jobs:
|
||||
uses: docker/scout-action@f8c776824083494ab0d56b8105ba2ca85c86e4de # v1.18.2
|
||||
with:
|
||||
organization: "searxng"
|
||||
dockerhub-user: "${{ secrets.DOCKERHUB_USERNAME }}"
|
||||
dockerhub-password: "${{ secrets.DOCKERHUB_TOKEN }}"
|
||||
dockerhub-user: "${{ secrets.DOCKER_USER }}"
|
||||
dockerhub-password: "${{ secrets.DOCKER_TOKEN }}"
|
||||
image: "registry://ghcr.io/searxng/searxng:latest"
|
||||
command: "cves"
|
||||
sarif-file: "./scout.sarif"
|
||||
@@ -41,6 +41,6 @@ jobs:
|
||||
write-comment: "false"
|
||||
|
||||
- name: Upload SARIFs
|
||||
uses: github/codeql-action/upload-sarif@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
sarif_file: "./scout.sarif"
|
||||
|
||||
@@ -46,11 +46,9 @@ Further information on *how-to* can be found `here <https://docs.searxng.org/adm
|
||||
Connect
|
||||
=======
|
||||
|
||||
If you have questions or want to connect with others in the community,
|
||||
we have two official channels:
|
||||
If you have questions or want to connect with others in the community:
|
||||
|
||||
- `#searxng:matrix.org <https://matrix.to/#/#searxng:matrix.org>`_
|
||||
- `#searxng @ libera.chat <https://web.libera.chat/?channel=#searxng>`_ (bridged to Matrix)
|
||||
|
||||
Contributing
|
||||
============
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"$schema": "https://biomejs.dev/schemas/2.3.7/schema.json",
|
||||
"$schema": "https://biomejs.dev/schemas/2.3.10/schema.json",
|
||||
"files": {
|
||||
"ignoreUnknown": true,
|
||||
"includes": ["**", "!node_modules"]
|
||||
@@ -35,12 +35,6 @@
|
||||
},
|
||||
"correctness": {
|
||||
"noGlobalDirnameFilename": "error",
|
||||
"noUndeclaredVariables": {
|
||||
"level": "error",
|
||||
"options": {
|
||||
"checkTypes": true
|
||||
}
|
||||
},
|
||||
"useImportExtensions": "error",
|
||||
"useJsonImportAttributes": "error",
|
||||
"useSingleJsDocAsterisk": "error"
|
||||
@@ -48,17 +42,22 @@
|
||||
"nursery": {
|
||||
"noContinue": "warn",
|
||||
"noDeprecatedImports": "warn",
|
||||
"noEqualsToNull": "warn",
|
||||
"noFloatingPromises": "warn",
|
||||
"noForIn": "warn",
|
||||
"noImportCycles": "warn",
|
||||
"noIncrementDecrement": "warn",
|
||||
"noMisusedPromises": "warn",
|
||||
"noMultiStr": "warn",
|
||||
"noParametersOnlyUsedInRecursion": "warn",
|
||||
"noUselessCatchBinding": "warn",
|
||||
"noUselessUndefined": "warn",
|
||||
"useAwaitThenable": "off",
|
||||
"useDestructuring": "warn",
|
||||
"useExhaustiveSwitchCases": "warn",
|
||||
"useExplicitType": "warn",
|
||||
"useFind": "warn"
|
||||
"useFind": "warn",
|
||||
"useRegexpExec": "warn"
|
||||
},
|
||||
"performance": {
|
||||
"noAwaitInLoops": "error",
|
||||
@@ -160,15 +159,5 @@
|
||||
"semicolons": "always",
|
||||
"trailingCommas": "none"
|
||||
}
|
||||
},
|
||||
"html": {
|
||||
"experimentalFullSupportEnabled": true,
|
||||
"formatter": {
|
||||
"attributePosition": "auto",
|
||||
"bracketSameLine": false,
|
||||
"indentScriptAndStyle": true,
|
||||
"selfCloseVoidElements": "always",
|
||||
"whitespaceSensitivity": "ignore"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
729
client/simple/package-lock.json
generated
729
client/simple/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -19,33 +19,31 @@
|
||||
"lint:tsc": "tsc --noEmit"
|
||||
},
|
||||
"browserslist": [
|
||||
"Chrome >= 93",
|
||||
"Firefox >= 92",
|
||||
"Safari >= 15.4",
|
||||
"baseline 2022",
|
||||
"not dead"
|
||||
],
|
||||
"dependencies": {
|
||||
"ionicons": "~8.0.13",
|
||||
"ionicons": "^8.0.13",
|
||||
"normalize.css": "8.0.1",
|
||||
"ol": "~10.7.0",
|
||||
"ol": "^10.7.0",
|
||||
"swiped-events": "1.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "2.3.7",
|
||||
"@types/node": "~24.10.1",
|
||||
"browserslist": "~4.28.0",
|
||||
"browserslist-to-esbuild": "~2.1.1",
|
||||
"edge.js": "~6.3.0",
|
||||
"less": "~4.4.2",
|
||||
"lightningcss": "~1.30.2",
|
||||
"@biomejs/biome": "2.3.10",
|
||||
"@types/node": "^25.0.3",
|
||||
"browserslist": "^4.28.1",
|
||||
"browserslist-to-esbuild": "^2.1.1",
|
||||
"edge.js": "^6.4.0",
|
||||
"less": "^4.5.1",
|
||||
"mathjs": "^15.1.0",
|
||||
"sharp": "~0.34.5",
|
||||
"sort-package-json": "~3.4.0",
|
||||
"stylelint": "~16.25.0",
|
||||
"stylelint-config-standard-less": "~3.0.1",
|
||||
"stylelint-prettier": "~5.0.3",
|
||||
"svgo": "~4.0.0",
|
||||
"sort-package-json": "^3.6.0",
|
||||
"stylelint": "^16.26.0",
|
||||
"stylelint-config-standard-less": "^3.0.1",
|
||||
"stylelint-prettier": "^5.0.3",
|
||||
"svgo": "^4.0.0",
|
||||
"typescript": "~5.9.3",
|
||||
"vite": "npm:rolldown-vite@7.2.7",
|
||||
"vite-bundle-analyzer": "~1.2.3"
|
||||
"vite": "8.0.0-beta.5",
|
||||
"vite-bundle-analyzer": "^1.3.2"
|
||||
}
|
||||
}
|
||||
|
||||
66
client/simple/src/js/Plugin.ts
Normal file
66
client/simple/src/js/Plugin.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
/**
|
||||
* Base class for client-side plugins.
|
||||
*
|
||||
* @remarks
|
||||
* Handle conditional loading of the plugin in:
|
||||
*
|
||||
* - client/simple/src/js/router.ts
|
||||
*
|
||||
* @abstract
|
||||
*/
|
||||
export abstract class Plugin {
|
||||
/**
|
||||
* Plugin name.
|
||||
*/
|
||||
protected readonly id: string;
|
||||
|
||||
/**
|
||||
* @remarks
|
||||
* Don't hold references of this instance outside the class.
|
||||
*/
|
||||
protected constructor(id: string) {
|
||||
this.id = id;
|
||||
|
||||
void this.invoke();
|
||||
}
|
||||
|
||||
private async invoke(): Promise<void> {
|
||||
try {
|
||||
console.debug(`[PLUGIN] ${this.id}: Running...`);
|
||||
const result = await this.run();
|
||||
if (!result) return;
|
||||
|
||||
console.debug(`[PLUGIN] ${this.id}: Running post-exec...`);
|
||||
// @ts-expect-error
|
||||
void (await this.post(result as NonNullable<Awaited<ReturnType<this["run"]>>>));
|
||||
} catch (error) {
|
||||
console.error(`[PLUGIN] ${this.id}:`, error);
|
||||
} finally {
|
||||
console.debug(`[PLUGIN] ${this.id}: Done.`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Plugin goes here.
|
||||
*
|
||||
* @remarks
|
||||
* The plugin is already loaded at this point. If you wish to execute
|
||||
* conditions to exit early, consider moving the logic to:
|
||||
*
|
||||
* - client/simple/src/js/router.ts
|
||||
*
|
||||
* ...to avoid unnecessarily loading this plugin on the client.
|
||||
*/
|
||||
protected abstract run(): Promise<unknown>;
|
||||
|
||||
/**
|
||||
* Post-execution hook.
|
||||
*
|
||||
* @remarks
|
||||
* The hook is only executed if `#run()` returns a truthy value.
|
||||
*/
|
||||
// @ts-expect-error
|
||||
protected abstract post(result: NonNullable<Awaited<ReturnType<this["run"]>>>): Promise<void>;
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import "./nojs.ts";
|
||||
import "./router.ts";
|
||||
import "./toolkit.ts";
|
||||
import "./listener.ts";
|
||||
@@ -1,7 +0,0 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { listen } from "./toolkit.ts";
|
||||
|
||||
listen("click", ".close", function (this: HTMLElement) {
|
||||
(this.parentNode as HTMLElement)?.classList.add("invisible");
|
||||
});
|
||||
@@ -1,8 +0,0 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { ready } from "./toolkit.ts";
|
||||
|
||||
ready(() => {
|
||||
document.documentElement.classList.remove("no-js");
|
||||
document.documentElement.classList.add("js");
|
||||
});
|
||||
@@ -1,40 +0,0 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { Endpoints, endpoint, ready, settings } from "./toolkit.ts";
|
||||
|
||||
ready(
|
||||
() => {
|
||||
void import("../main/keyboard.ts");
|
||||
void import("../main/search.ts");
|
||||
|
||||
if (settings.autocomplete) {
|
||||
void import("../main/autocomplete.ts");
|
||||
}
|
||||
},
|
||||
{ on: [endpoint === Endpoints.index] }
|
||||
);
|
||||
|
||||
ready(
|
||||
() => {
|
||||
void import("../main/keyboard.ts");
|
||||
void import("../main/mapresult.ts");
|
||||
void import("../main/results.ts");
|
||||
void import("../main/search.ts");
|
||||
|
||||
if (settings.infinite_scroll) {
|
||||
void import("../main/infinite_scroll.ts");
|
||||
}
|
||||
|
||||
if (settings.autocomplete) {
|
||||
void import("../main/autocomplete.ts");
|
||||
}
|
||||
},
|
||||
{ on: [endpoint === Endpoints.results] }
|
||||
);
|
||||
|
||||
ready(
|
||||
() => {
|
||||
void import("../main/preferences.ts");
|
||||
},
|
||||
{ on: [endpoint === Endpoints.preferences] }
|
||||
);
|
||||
4
client/simple/src/js/index.ts
Normal file
4
client/simple/src/js/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
// core
|
||||
void import.meta.glob(["./*.ts", "./util/**/.ts"], { eager: true });
|
||||
36
client/simple/src/js/loader.ts
Normal file
36
client/simple/src/js/loader.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import type { Plugin } from "./Plugin.ts";
|
||||
import { type EndpointsKeys, endpoint } from "./toolkit.ts";
|
||||
|
||||
type Options =
|
||||
| {
|
||||
on: "global";
|
||||
}
|
||||
| {
|
||||
on: "endpoint";
|
||||
where: EndpointsKeys[];
|
||||
};
|
||||
|
||||
export const load = <T extends Plugin>(instance: () => Promise<T>, options: Options): void => {
|
||||
if (!check(options)) return;
|
||||
|
||||
void instance();
|
||||
};
|
||||
|
||||
const check = (options: Options): boolean => {
|
||||
// biome-ignore lint/style/useDefaultSwitchClause: options is typed
|
||||
switch (options.on) {
|
||||
case "global": {
|
||||
return true;
|
||||
}
|
||||
case "endpoint": {
|
||||
if (!options.where.includes(endpoint)) {
|
||||
// not on the expected endpoint
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -1,6 +1,7 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { assertElement, http, listen, settings } from "../core/toolkit.ts";
|
||||
import { http, listen, settings } from "../toolkit.ts";
|
||||
import { assertElement } from "../util/assertElement.ts";
|
||||
|
||||
const fetchResults = async (qInput: HTMLInputElement, query: string): Promise<void> => {
|
||||
try {
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { assertElement, http, settings } from "../core/toolkit.ts";
|
||||
|
||||
const newLoadSpinner = (): HTMLDivElement => {
|
||||
return Object.assign(document.createElement("div"), {
|
||||
className: "loader"
|
||||
});
|
||||
};
|
||||
|
||||
const loadNextPage = async (onlyImages: boolean, callback: () => void): Promise<void> => {
|
||||
const searchForm = document.querySelector<HTMLFormElement>("#search");
|
||||
assertElement(searchForm);
|
||||
|
||||
const form = document.querySelector<HTMLFormElement>("#pagination form.next_page");
|
||||
assertElement(form);
|
||||
|
||||
const action = searchForm.getAttribute("action");
|
||||
if (!action) {
|
||||
throw new Error("Form action not defined");
|
||||
}
|
||||
|
||||
const paginationElement = document.querySelector<HTMLElement>("#pagination");
|
||||
assertElement(paginationElement);
|
||||
|
||||
paginationElement.replaceChildren(newLoadSpinner());
|
||||
|
||||
try {
|
||||
const res = await http("POST", action, { body: new FormData(form) });
|
||||
const nextPage = await res.text();
|
||||
if (!nextPage) return;
|
||||
|
||||
const nextPageDoc = new DOMParser().parseFromString(nextPage, "text/html");
|
||||
const articleList = nextPageDoc.querySelectorAll<HTMLElement>("#urls article");
|
||||
const nextPaginationElement = nextPageDoc.querySelector<HTMLElement>("#pagination");
|
||||
|
||||
document.querySelector("#pagination")?.remove();
|
||||
|
||||
const urlsElement = document.querySelector<HTMLElement>("#urls");
|
||||
if (!urlsElement) {
|
||||
throw new Error("URLs element not found");
|
||||
}
|
||||
|
||||
if (articleList.length > 0 && !onlyImages) {
|
||||
// do not add <hr> element when there are only images
|
||||
urlsElement.appendChild(document.createElement("hr"));
|
||||
}
|
||||
|
||||
urlsElement.append(...Array.from(articleList));
|
||||
|
||||
if (nextPaginationElement) {
|
||||
const results = document.querySelector<HTMLElement>("#results");
|
||||
results?.appendChild(nextPaginationElement);
|
||||
callback();
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error loading next page:", error);
|
||||
|
||||
const errorElement = Object.assign(document.createElement("div"), {
|
||||
textContent: settings.translations?.error_loading_next_page ?? "Error loading next page",
|
||||
className: "dialog-error"
|
||||
});
|
||||
errorElement.setAttribute("role", "alert");
|
||||
document.querySelector("#pagination")?.replaceChildren(errorElement);
|
||||
}
|
||||
};
|
||||
|
||||
const resultsElement: HTMLElement | null = document.getElementById("results");
|
||||
if (!resultsElement) {
|
||||
throw new Error("Results element not found");
|
||||
}
|
||||
|
||||
const onlyImages: boolean = resultsElement.classList.contains("only_template_images");
|
||||
const observedSelector = "article.result:last-child";
|
||||
|
||||
const intersectionObserveOptions: IntersectionObserverInit = {
|
||||
rootMargin: "320px"
|
||||
};
|
||||
|
||||
const observer: IntersectionObserver = new IntersectionObserver((entries: IntersectionObserverEntry[]) => {
|
||||
const [paginationEntry] = entries;
|
||||
|
||||
if (paginationEntry?.isIntersecting) {
|
||||
observer.unobserve(paginationEntry.target);
|
||||
|
||||
void loadNextPage(onlyImages, () => {
|
||||
const nextObservedElement = document.querySelector<HTMLElement>(observedSelector);
|
||||
if (nextObservedElement) {
|
||||
observer.observe(nextObservedElement);
|
||||
}
|
||||
}).then(() => {
|
||||
// wait until promise is resolved
|
||||
});
|
||||
}
|
||||
}, intersectionObserveOptions);
|
||||
|
||||
const initialObservedElement: HTMLElement | null = document.querySelector<HTMLElement>(observedSelector);
|
||||
if (initialObservedElement) {
|
||||
observer.observe(initialObservedElement);
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { assertElement, listen, mutable, settings } from "../core/toolkit.ts";
|
||||
import { listen, mutable, settings } from "../toolkit.ts";
|
||||
import { assertElement } from "../util/assertElement.ts";
|
||||
|
||||
export type KeyBindingLayout = "default" | "vim";
|
||||
|
||||
@@ -219,7 +220,7 @@ const highlightResult =
|
||||
// biome-ignore lint/complexity/noUselessSwitchCase: fallthrough is intended
|
||||
case "top":
|
||||
default:
|
||||
next = results[0];
|
||||
[next] = results;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -342,7 +343,7 @@ const initHelpContent = (divElement: HTMLElement, keyBindings: typeof baseKeyBin
|
||||
const categories: Record<string, KeyBinding[]> = {};
|
||||
|
||||
for (const binding of Object.values(keyBindings)) {
|
||||
const cat = binding.cat;
|
||||
const { cat } = binding;
|
||||
categories[cat] ??= [];
|
||||
categories[cat].push(binding);
|
||||
}
|
||||
@@ -399,7 +400,7 @@ const toggleHelp = (keyBindings: typeof baseKeyBinding): void => {
|
||||
className: "dialog-modal"
|
||||
});
|
||||
initHelpContent(helpPanel, keyBindings);
|
||||
const body = document.getElementsByTagName("body")[0];
|
||||
const [body] = document.getElementsByTagName("body");
|
||||
if (body) {
|
||||
body.appendChild(helpPanel);
|
||||
}
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { listen } from "../core/toolkit.ts";
|
||||
|
||||
listen("click", ".searxng_init_map", async function (this: HTMLElement, event: Event) {
|
||||
event.preventDefault();
|
||||
this.classList.remove("searxng_init_map");
|
||||
|
||||
const {
|
||||
View,
|
||||
OlMap,
|
||||
TileLayer,
|
||||
VectorLayer,
|
||||
OSM,
|
||||
VectorSource,
|
||||
Style,
|
||||
Stroke,
|
||||
Fill,
|
||||
Circle,
|
||||
fromLonLat,
|
||||
GeoJSON,
|
||||
Feature,
|
||||
Point
|
||||
} = await import("../pkg/ol.ts");
|
||||
void import("ol/ol.css");
|
||||
|
||||
const { leafletTarget: target, mapLon, mapLat, mapGeojson } = this.dataset;
|
||||
|
||||
const lon = Number.parseFloat(mapLon || "0");
|
||||
const lat = Number.parseFloat(mapLat || "0");
|
||||
const view = new View({ maxZoom: 16, enableRotation: false });
|
||||
const map = new OlMap({
|
||||
target: target,
|
||||
layers: [new TileLayer({ source: new OSM({ maxZoom: 16 }) })],
|
||||
view: view
|
||||
});
|
||||
|
||||
try {
|
||||
const markerSource = new VectorSource({
|
||||
features: [
|
||||
new Feature({
|
||||
geometry: new Point(fromLonLat([lon, lat]))
|
||||
})
|
||||
]
|
||||
});
|
||||
|
||||
const markerLayer = new VectorLayer({
|
||||
source: markerSource,
|
||||
style: new Style({
|
||||
image: new Circle({
|
||||
radius: 6,
|
||||
fill: new Fill({ color: "#3050ff" })
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
map.addLayer(markerLayer);
|
||||
} catch (error) {
|
||||
console.error("Failed to create marker layer:", error);
|
||||
}
|
||||
|
||||
if (mapGeojson) {
|
||||
try {
|
||||
const geoSource = new VectorSource({
|
||||
features: new GeoJSON().readFeatures(JSON.parse(mapGeojson), {
|
||||
dataProjection: "EPSG:4326",
|
||||
featureProjection: "EPSG:3857"
|
||||
})
|
||||
});
|
||||
|
||||
const geoLayer = new VectorLayer({
|
||||
source: geoSource,
|
||||
style: new Style({
|
||||
stroke: new Stroke({ color: "#3050ff", width: 2 }),
|
||||
fill: new Fill({ color: "#3050ff33" })
|
||||
})
|
||||
});
|
||||
|
||||
map.addLayer(geoLayer);
|
||||
|
||||
view.fit(geoSource.getExtent(), { padding: [20, 20, 20, 20] });
|
||||
} catch (error) {
|
||||
console.error("Failed to create GeoJSON layer:", error);
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -1,6 +1,7 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { assertElement, http, listen, settings } from "../core/toolkit.ts";
|
||||
import { http, listen, settings } from "../toolkit.ts";
|
||||
import { assertElement } from "../util/assertElement.ts";
|
||||
|
||||
let engineDescriptions: Record<string, [string, string]> | undefined;
|
||||
|
||||
@@ -69,8 +70,7 @@ listen("click", "#copy-hash", async function (this: HTMLElement) {
|
||||
}
|
||||
}
|
||||
|
||||
const copiedText = this.dataset.copiedText;
|
||||
if (copiedText) {
|
||||
this.innerText = copiedText;
|
||||
if (this.dataset.copiedText) {
|
||||
this.innerText = this.dataset.copiedText;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import "../../../node_modules/swiped-events/src/swiped-events.js";
|
||||
import { assertElement, listen, mutable, settings } from "../core/toolkit.ts";
|
||||
import { listen, mutable, settings } from "../toolkit.ts";
|
||||
import { assertElement } from "../util/assertElement.ts";
|
||||
|
||||
let imgTimeoutID: number;
|
||||
|
||||
@@ -134,9 +135,8 @@ listen("click", "#copy_url", async function (this: HTMLElement) {
|
||||
}
|
||||
}
|
||||
|
||||
const copiedText = this.dataset.copiedText;
|
||||
if (copiedText) {
|
||||
this.innerText = copiedText;
|
||||
if (this.dataset.copiedText) {
|
||||
this.innerText = this.dataset.copiedText;
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -1,88 +1,51 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { assertElement, listen, settings } from "../core/toolkit.ts";
|
||||
import { listen } from "../toolkit.ts";
|
||||
import { getElement } from "../util/getElement.ts";
|
||||
|
||||
const submitIfQuery = (qInput: HTMLInputElement): void => {
|
||||
if (qInput.value.length > 0) {
|
||||
const search = document.getElementById("search") as HTMLFormElement | null;
|
||||
search?.submit();
|
||||
}
|
||||
};
|
||||
|
||||
const updateClearButton = (qInput: HTMLInputElement, cs: HTMLElement): void => {
|
||||
cs.classList.toggle("empty", qInput.value.length === 0);
|
||||
};
|
||||
|
||||
const createClearButton = (qInput: HTMLInputElement): void => {
|
||||
const cs = document.getElementById("clear_search");
|
||||
assertElement(cs);
|
||||
|
||||
updateClearButton(qInput, cs);
|
||||
|
||||
listen("click", cs, (event: MouseEvent) => {
|
||||
event.preventDefault();
|
||||
qInput.value = "";
|
||||
qInput.focus();
|
||||
updateClearButton(qInput, cs);
|
||||
});
|
||||
|
||||
listen("input", qInput, () => updateClearButton(qInput, cs), { passive: true });
|
||||
};
|
||||
|
||||
const qInput = document.getElementById("q") as HTMLInputElement | null;
|
||||
assertElement(qInput);
|
||||
const searchForm: HTMLFormElement = getElement<HTMLFormElement>("search");
|
||||
const searchInput: HTMLInputElement = getElement<HTMLInputElement>("q");
|
||||
const searchReset: HTMLButtonElement = getElement<HTMLButtonElement>("clear_search");
|
||||
|
||||
const isMobile: boolean = window.matchMedia("(max-width: 50em)").matches;
|
||||
const isResultsPage: boolean = document.querySelector("main")?.id === "main_results";
|
||||
|
||||
const categoryButtons: HTMLButtonElement[] = Array.from(
|
||||
document.querySelectorAll<HTMLButtonElement>("#categories_container button.category")
|
||||
);
|
||||
|
||||
if (searchInput.value.length === 0) {
|
||||
searchReset.classList.add("empty");
|
||||
}
|
||||
|
||||
// focus search input on large screens
|
||||
if (!(isMobile || isResultsPage)) {
|
||||
qInput.focus();
|
||||
searchInput.focus();
|
||||
}
|
||||
|
||||
// On mobile, move cursor to the end of the input on focus
|
||||
if (isMobile) {
|
||||
listen("focus", qInput, () => {
|
||||
listen("focus", searchInput, () => {
|
||||
// Defer cursor move until the next frame to prevent a visual jump
|
||||
requestAnimationFrame(() => {
|
||||
const end = qInput.value.length;
|
||||
qInput.setSelectionRange(end, end);
|
||||
qInput.scrollLeft = qInput.scrollWidth;
|
||||
const end = searchInput.value.length;
|
||||
searchInput.setSelectionRange(end, end);
|
||||
searchInput.scrollLeft = searchInput.scrollWidth;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
createClearButton(qInput);
|
||||
listen("input", searchInput, () => {
|
||||
searchReset.classList.toggle("empty", searchInput.value.length === 0);
|
||||
});
|
||||
|
||||
// Additionally to searching when selecting a new category, we also
|
||||
// automatically start a new search request when the user changes a search
|
||||
// filter (safesearch, time range or language) (this requires JavaScript
|
||||
// though)
|
||||
if (
|
||||
settings.search_on_category_select &&
|
||||
// If .search_filters is undefined (invisible) we are on the homepage and
|
||||
// hence don't have to set any listeners
|
||||
document.querySelector(".search_filters")
|
||||
) {
|
||||
const safesearchElement = document.getElementById("safesearch");
|
||||
if (safesearchElement) {
|
||||
listen("change", safesearchElement, () => submitIfQuery(qInput));
|
||||
}
|
||||
listen("click", searchReset, (event: MouseEvent) => {
|
||||
event.preventDefault();
|
||||
searchInput.value = "";
|
||||
searchInput.focus();
|
||||
searchReset.classList.add("empty");
|
||||
});
|
||||
|
||||
const timeRangeElement = document.getElementById("time_range");
|
||||
if (timeRangeElement) {
|
||||
listen("change", timeRangeElement, () => submitIfQuery(qInput));
|
||||
}
|
||||
|
||||
const languageElement = document.getElementById("language");
|
||||
if (languageElement) {
|
||||
listen("change", languageElement, () => submitIfQuery(qInput));
|
||||
}
|
||||
}
|
||||
|
||||
const categoryButtons: HTMLButtonElement[] = [
|
||||
...document.querySelectorAll<HTMLButtonElement>("button.category_button")
|
||||
];
|
||||
for (const button of categoryButtons) {
|
||||
listen("click", button, (event: MouseEvent) => {
|
||||
if (event.shiftKey) {
|
||||
@@ -98,21 +61,34 @@ for (const button of categoryButtons) {
|
||||
});
|
||||
}
|
||||
|
||||
const form: HTMLFormElement | null = document.querySelector<HTMLFormElement>("#search");
|
||||
assertElement(form);
|
||||
|
||||
// override form submit action to update the actually selected categories
|
||||
listen("submit", form, (event: Event) => {
|
||||
event.preventDefault();
|
||||
|
||||
const categoryValuesInput = document.querySelector<HTMLInputElement>("#selected-categories");
|
||||
if (categoryValuesInput) {
|
||||
const categoryValues = categoryButtons
|
||||
.filter((button) => button.classList.contains("selected"))
|
||||
.map((button) => button.name.replace("category_", ""));
|
||||
|
||||
categoryValuesInput.value = categoryValues.join(",");
|
||||
if (document.querySelector("div.search_filters")) {
|
||||
const safesearchElement = document.getElementById("safesearch");
|
||||
if (safesearchElement) {
|
||||
listen("change", safesearchElement, () => searchForm.submit());
|
||||
}
|
||||
|
||||
form.submit();
|
||||
const timeRangeElement = document.getElementById("time_range");
|
||||
if (timeRangeElement) {
|
||||
listen("change", timeRangeElement, () => searchForm.submit());
|
||||
}
|
||||
|
||||
const languageElement = document.getElementById("language");
|
||||
if (languageElement) {
|
||||
listen("change", languageElement, () => searchForm.submit());
|
||||
}
|
||||
}
|
||||
|
||||
// override searchForm submit event
|
||||
listen("submit", searchForm, (event: Event) => {
|
||||
event.preventDefault();
|
||||
|
||||
if (categoryButtons.length > 0) {
|
||||
const searchCategories = getElement<HTMLInputElement>("selected-categories");
|
||||
searchCategories.value = categoryButtons
|
||||
.filter((button) => button.classList.contains("selected"))
|
||||
.map((button) => button.name.replace("category_", ""))
|
||||
.join(",");
|
||||
}
|
||||
|
||||
searchForm.submit();
|
||||
});
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { Feature, Map as OlMap, View } from "ol";
|
||||
import { createEmpty } from "ol/extent";
|
||||
import { GeoJSON } from "ol/format";
|
||||
import { Point } from "ol/geom";
|
||||
import { Tile as TileLayer, Vector as VectorLayer } from "ol/layer";
|
||||
import { fromLonLat } from "ol/proj";
|
||||
import { OSM, Vector as VectorSource } from "ol/source";
|
||||
import { Circle, Fill, Stroke, Style } from "ol/style";
|
||||
|
||||
export {
|
||||
View,
|
||||
OlMap,
|
||||
TileLayer,
|
||||
VectorLayer,
|
||||
OSM,
|
||||
createEmpty,
|
||||
VectorSource,
|
||||
Style,
|
||||
Stroke,
|
||||
Fill,
|
||||
Circle,
|
||||
fromLonLat,
|
||||
GeoJSON,
|
||||
Feature,
|
||||
Point
|
||||
};
|
||||
93
client/simple/src/js/plugin/Calculator.ts
Normal file
93
client/simple/src/js/plugin/Calculator.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import {
|
||||
absDependencies,
|
||||
addDependencies,
|
||||
create,
|
||||
divideDependencies,
|
||||
eDependencies,
|
||||
evaluateDependencies,
|
||||
expDependencies,
|
||||
factorialDependencies,
|
||||
gcdDependencies,
|
||||
lcmDependencies,
|
||||
log1pDependencies,
|
||||
log2Dependencies,
|
||||
log10Dependencies,
|
||||
logDependencies,
|
||||
modDependencies,
|
||||
multiplyDependencies,
|
||||
nthRootDependencies,
|
||||
piDependencies,
|
||||
powDependencies,
|
||||
roundDependencies,
|
||||
signDependencies,
|
||||
sqrtDependencies,
|
||||
subtractDependencies
|
||||
} from "mathjs/number";
|
||||
import { Plugin } from "../Plugin.ts";
|
||||
import { appendAnswerElement } from "../util/appendAnswerElement.ts";
|
||||
import { getElement } from "../util/getElement.ts";
|
||||
|
||||
/**
|
||||
* Parses and solves mathematical expressions. Can do basic arithmetic and
|
||||
* evaluate some functions.
|
||||
*
|
||||
* @example
|
||||
* "(3 + 5) / 2" = "4"
|
||||
* "e ^ 2 + pi" = "10.530648752520442"
|
||||
* "gcd(48, 18) + lcm(4, 5)" = "26"
|
||||
*
|
||||
* @remarks
|
||||
* Depends on `mathjs` library.
|
||||
*/
|
||||
export default class Calculator extends Plugin {
|
||||
public constructor() {
|
||||
super("calculator");
|
||||
}
|
||||
|
||||
/**
|
||||
* @remarks
|
||||
* Compare bundle size after adding or removing features.
|
||||
*/
|
||||
private static readonly math = create({
|
||||
...absDependencies,
|
||||
...addDependencies,
|
||||
...divideDependencies,
|
||||
...eDependencies,
|
||||
...evaluateDependencies,
|
||||
...expDependencies,
|
||||
...factorialDependencies,
|
||||
...gcdDependencies,
|
||||
...lcmDependencies,
|
||||
...log10Dependencies,
|
||||
...log1pDependencies,
|
||||
...log2Dependencies,
|
||||
...logDependencies,
|
||||
...modDependencies,
|
||||
...multiplyDependencies,
|
||||
...nthRootDependencies,
|
||||
...piDependencies,
|
||||
...powDependencies,
|
||||
...roundDependencies,
|
||||
...signDependencies,
|
||||
...sqrtDependencies,
|
||||
...subtractDependencies
|
||||
});
|
||||
|
||||
protected async run(): Promise<string | undefined> {
|
||||
const searchInput = getElement<HTMLInputElement>("q");
|
||||
const node = Calculator.math.parse(searchInput.value);
|
||||
|
||||
try {
|
||||
return `${node.toString()} = ${node.evaluate()}`;
|
||||
} catch {
|
||||
// not a compatible math expression
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
protected async post(result: string): Promise<void> {
|
||||
appendAnswerElement(result);
|
||||
}
|
||||
}
|
||||
110
client/simple/src/js/plugin/InfiniteScroll.ts
Normal file
110
client/simple/src/js/plugin/InfiniteScroll.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { Plugin } from "../Plugin.ts";
|
||||
import { http, settings } from "../toolkit.ts";
|
||||
import { assertElement } from "../util/assertElement.ts";
|
||||
import { getElement } from "../util/getElement.ts";
|
||||
|
||||
/**
|
||||
* Automatically loads the next page when scrolling to bottom of the current page.
|
||||
*/
|
||||
export default class InfiniteScroll extends Plugin {
|
||||
public constructor() {
|
||||
super("infiniteScroll");
|
||||
}
|
||||
|
||||
protected async run(): Promise<void> {
|
||||
const resultsElement = getElement<HTMLElement>("results");
|
||||
|
||||
const onlyImages: boolean = resultsElement.classList.contains("only_template_images");
|
||||
const observedSelector = "article.result:last-child";
|
||||
|
||||
const spinnerElement = document.createElement("div");
|
||||
spinnerElement.className = "loader";
|
||||
|
||||
const loadNextPage = async (callback: () => void): Promise<void> => {
|
||||
const searchForm = document.querySelector<HTMLFormElement>("#search");
|
||||
assertElement(searchForm);
|
||||
|
||||
const form = document.querySelector<HTMLFormElement>("#pagination form.next_page");
|
||||
assertElement(form);
|
||||
|
||||
const action = searchForm.getAttribute("action");
|
||||
if (!action) {
|
||||
throw new Error("Form action not defined");
|
||||
}
|
||||
|
||||
const paginationElement = document.querySelector<HTMLElement>("#pagination");
|
||||
assertElement(paginationElement);
|
||||
|
||||
paginationElement.replaceChildren(spinnerElement);
|
||||
|
||||
try {
|
||||
const res = await http("POST", action, { body: new FormData(form) });
|
||||
const nextPage = await res.text();
|
||||
if (!nextPage) return;
|
||||
|
||||
const nextPageDoc = new DOMParser().parseFromString(nextPage, "text/html");
|
||||
const articleList = nextPageDoc.querySelectorAll<HTMLElement>("#urls article");
|
||||
const nextPaginationElement = nextPageDoc.querySelector<HTMLElement>("#pagination");
|
||||
|
||||
document.querySelector("#pagination")?.remove();
|
||||
|
||||
const urlsElement = document.querySelector<HTMLElement>("#urls");
|
||||
if (!urlsElement) {
|
||||
throw new Error("URLs element not found");
|
||||
}
|
||||
|
||||
if (articleList.length > 0 && !onlyImages) {
|
||||
// do not add <hr> element when there are only images
|
||||
urlsElement.appendChild(document.createElement("hr"));
|
||||
}
|
||||
|
||||
urlsElement.append(...articleList);
|
||||
|
||||
if (nextPaginationElement) {
|
||||
const results = document.querySelector<HTMLElement>("#results");
|
||||
results?.appendChild(nextPaginationElement);
|
||||
callback();
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error loading next page:", error);
|
||||
|
||||
const errorElement = Object.assign(document.createElement("div"), {
|
||||
textContent: settings.translations?.error_loading_next_page ?? "Error loading next page",
|
||||
className: "dialog-error"
|
||||
});
|
||||
errorElement.setAttribute("role", "alert");
|
||||
document.querySelector("#pagination")?.replaceChildren(errorElement);
|
||||
}
|
||||
};
|
||||
|
||||
const intersectionObserveOptions: IntersectionObserverInit = {
|
||||
rootMargin: "320px"
|
||||
};
|
||||
|
||||
const observer: IntersectionObserver = new IntersectionObserver(async (entries: IntersectionObserverEntry[]) => {
|
||||
const [paginationEntry] = entries;
|
||||
|
||||
if (paginationEntry?.isIntersecting) {
|
||||
observer.unobserve(paginationEntry.target);
|
||||
|
||||
await loadNextPage(() => {
|
||||
const nextObservedElement = document.querySelector<HTMLElement>(observedSelector);
|
||||
if (nextObservedElement) {
|
||||
observer.observe(nextObservedElement);
|
||||
}
|
||||
});
|
||||
}
|
||||
}, intersectionObserveOptions);
|
||||
|
||||
const initialObservedElement: HTMLElement | null = document.querySelector<HTMLElement>(observedSelector);
|
||||
if (initialObservedElement) {
|
||||
observer.observe(initialObservedElement);
|
||||
}
|
||||
}
|
||||
|
||||
protected async post(): Promise<void> {
|
||||
// noop
|
||||
}
|
||||
}
|
||||
90
client/simple/src/js/plugin/MapView.ts
Normal file
90
client/simple/src/js/plugin/MapView.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import "ol/ol.css?inline";
|
||||
import { Feature, Map as OlMap, View } from "ol";
|
||||
import { GeoJSON } from "ol/format";
|
||||
import { Point } from "ol/geom";
|
||||
import { Tile as TileLayer, Vector as VectorLayer } from "ol/layer";
|
||||
import { fromLonLat } from "ol/proj";
|
||||
import { OSM, Vector as VectorSource } from "ol/source";
|
||||
import { Circle, Fill, Stroke, Style } from "ol/style";
|
||||
import { Plugin } from "../Plugin.ts";
|
||||
|
||||
/**
|
||||
* MapView
|
||||
*/
|
||||
export default class MapView extends Plugin {
|
||||
private readonly map: HTMLElement;
|
||||
|
||||
public constructor(map: HTMLElement) {
|
||||
super("mapView");
|
||||
|
||||
this.map = map;
|
||||
}
|
||||
|
||||
protected async run(): Promise<void> {
|
||||
const { leafletTarget: target, mapLon, mapLat, mapGeojson } = this.map.dataset;
|
||||
|
||||
const lon = Number.parseFloat(mapLon || "0");
|
||||
const lat = Number.parseFloat(mapLat || "0");
|
||||
const view = new View({ maxZoom: 16, enableRotation: false });
|
||||
const map = new OlMap({
|
||||
target: target,
|
||||
layers: [new TileLayer({ source: new OSM({ maxZoom: 16 }) })],
|
||||
view: view
|
||||
});
|
||||
|
||||
try {
|
||||
const markerSource = new VectorSource({
|
||||
features: [
|
||||
new Feature({
|
||||
geometry: new Point(fromLonLat([lon, lat]))
|
||||
})
|
||||
]
|
||||
});
|
||||
|
||||
const markerLayer = new VectorLayer({
|
||||
source: markerSource,
|
||||
style: new Style({
|
||||
image: new Circle({
|
||||
radius: 6,
|
||||
fill: new Fill({ color: "#3050ff" })
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
map.addLayer(markerLayer);
|
||||
} catch (error) {
|
||||
console.error("Failed to create marker layer:", error);
|
||||
}
|
||||
|
||||
if (mapGeojson) {
|
||||
try {
|
||||
const geoSource = new VectorSource({
|
||||
features: new GeoJSON().readFeatures(JSON.parse(mapGeojson), {
|
||||
dataProjection: "EPSG:4326",
|
||||
featureProjection: "EPSG:3857"
|
||||
})
|
||||
});
|
||||
|
||||
const geoLayer = new VectorLayer({
|
||||
source: geoSource,
|
||||
style: new Style({
|
||||
stroke: new Stroke({ color: "#3050ff", width: 2 }),
|
||||
fill: new Fill({ color: "#3050ff33" })
|
||||
})
|
||||
});
|
||||
|
||||
map.addLayer(geoLayer);
|
||||
|
||||
view.fit(geoSource.getExtent(), { padding: [20, 20, 20, 20] });
|
||||
} catch (error) {
|
||||
console.error("Failed to create GeoJSON layer:", error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected async post(): Promise<void> {
|
||||
// noop
|
||||
}
|
||||
}
|
||||
69
client/simple/src/js/router.ts
Normal file
69
client/simple/src/js/router.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { load } from "./loader.ts";
|
||||
import { Endpoints, endpoint, listen, ready, settings } from "./toolkit.ts";
|
||||
|
||||
ready(() => {
|
||||
document.documentElement.classList.remove("no-js");
|
||||
document.documentElement.classList.add("js");
|
||||
|
||||
listen("click", ".close", function (this: HTMLElement) {
|
||||
(this.parentNode as HTMLElement)?.classList.add("invisible");
|
||||
});
|
||||
|
||||
listen("click", ".searxng_init_map", async function (this: HTMLElement, event: Event) {
|
||||
event.preventDefault();
|
||||
this.classList.remove("searxng_init_map");
|
||||
|
||||
load(() => import("./plugin/MapView.ts").then(({ default: Plugin }) => new Plugin(this)), {
|
||||
on: "endpoint",
|
||||
where: [Endpoints.results]
|
||||
});
|
||||
});
|
||||
|
||||
if (settings.plugins?.includes("infiniteScroll")) {
|
||||
load(() => import("./plugin/InfiniteScroll.ts").then(({ default: Plugin }) => new Plugin()), {
|
||||
on: "endpoint",
|
||||
where: [Endpoints.results]
|
||||
});
|
||||
}
|
||||
|
||||
if (settings.plugins?.includes("calculator")) {
|
||||
load(() => import("./plugin/Calculator.ts").then(({ default: Plugin }) => new Plugin()), {
|
||||
on: "endpoint",
|
||||
where: [Endpoints.results]
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
ready(
|
||||
() => {
|
||||
void import("./main/keyboard.ts");
|
||||
void import("./main/search.ts");
|
||||
|
||||
if (settings.autocomplete) {
|
||||
void import("./main/autocomplete.ts");
|
||||
}
|
||||
},
|
||||
{ on: [endpoint === Endpoints.index] }
|
||||
);
|
||||
|
||||
ready(
|
||||
() => {
|
||||
void import("./main/keyboard.ts");
|
||||
void import("./main/results.ts");
|
||||
void import("./main/search.ts");
|
||||
|
||||
if (settings.autocomplete) {
|
||||
void import("./main/autocomplete.ts");
|
||||
}
|
||||
},
|
||||
{ on: [endpoint === Endpoints.results] }
|
||||
);
|
||||
|
||||
ready(
|
||||
() => {
|
||||
void import("./main/preferences.ts");
|
||||
},
|
||||
{ on: [endpoint === Endpoints.preferences] }
|
||||
);
|
||||
@@ -1,16 +1,16 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import type { KeyBindingLayout } from "../main/keyboard.ts";
|
||||
import type { KeyBindingLayout } from "./main/keyboard.ts";
|
||||
|
||||
// synced with searx/webapp.py get_client_settings
|
||||
type Settings = {
|
||||
plugins?: string[];
|
||||
advanced_search?: boolean;
|
||||
autocomplete?: string;
|
||||
autocomplete_min?: number;
|
||||
doi_resolver?: string;
|
||||
favicon_resolver?: string;
|
||||
hotkeys?: KeyBindingLayout;
|
||||
infinite_scroll?: boolean;
|
||||
method?: "GET" | "POST";
|
||||
query_in_title?: boolean;
|
||||
results_on_new_tab?: boolean;
|
||||
@@ -32,8 +32,6 @@ type ReadyOptions = {
|
||||
on?: (boolean | undefined)[];
|
||||
};
|
||||
|
||||
type AssertElement = (element?: HTMLElement | null) => asserts element is HTMLElement;
|
||||
|
||||
export type EndpointsKeys = keyof typeof Endpoints;
|
||||
|
||||
export const Endpoints = {
|
||||
@@ -73,12 +71,6 @@ const getSettings = (): Settings => {
|
||||
}
|
||||
};
|
||||
|
||||
export const assertElement: AssertElement = (element?: HTMLElement | null): asserts element is HTMLElement => {
|
||||
if (!element) {
|
||||
throw new Error("Bad assertion: DOM element not found");
|
||||
}
|
||||
};
|
||||
|
||||
export const http = async (method: string, url: string | URL, options?: HTTPOptions): Promise<Response> => {
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => controller.abort(), options?.timeout ?? 30_000);
|
||||
34
client/simple/src/js/util/appendAnswerElement.ts
Normal file
34
client/simple/src/js/util/appendAnswerElement.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { getElement } from "./getElement.ts";
|
||||
|
||||
export const appendAnswerElement = (element: HTMLElement | string | number): void => {
|
||||
const results = getElement<HTMLDivElement>("results");
|
||||
|
||||
// ./searx/templates/elements/answers.html
|
||||
let answers = getElement<HTMLDivElement>("answers", { assert: false });
|
||||
if (!answers) {
|
||||
// what is this?
|
||||
const answersTitle = document.createElement("h4");
|
||||
answersTitle.setAttribute("class", "title");
|
||||
answersTitle.setAttribute("id", "answers-title");
|
||||
answersTitle.textContent = "Answers : ";
|
||||
|
||||
answers = document.createElement("div");
|
||||
answers.setAttribute("id", "answers");
|
||||
answers.setAttribute("role", "complementary");
|
||||
answers.setAttribute("aria-labelledby", "answers-title");
|
||||
answers.appendChild(answersTitle);
|
||||
}
|
||||
|
||||
if (!(element instanceof HTMLElement)) {
|
||||
const span = document.createElement("span");
|
||||
span.innerHTML = element.toString();
|
||||
// biome-ignore lint/style/noParameterAssign: TODO
|
||||
element = span;
|
||||
}
|
||||
|
||||
answers.appendChild(element);
|
||||
|
||||
results.insertAdjacentElement("afterbegin", answers);
|
||||
};
|
||||
8
client/simple/src/js/util/assertElement.ts
Normal file
8
client/simple/src/js/util/assertElement.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
type AssertElement = <T>(element?: T | null) => asserts element is T;
|
||||
export const assertElement: AssertElement = <T>(element?: T | null): asserts element is T => {
|
||||
if (!element) {
|
||||
throw new Error("DOM element not found");
|
||||
}
|
||||
};
|
||||
21
client/simple/src/js/util/getElement.ts
Normal file
21
client/simple/src/js/util/getElement.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import { assertElement } from "./assertElement.ts";
|
||||
|
||||
type Options = {
|
||||
assert?: boolean;
|
||||
};
|
||||
|
||||
export function getElement<T>(id: string, options?: { assert: true }): T;
|
||||
export function getElement<T>(id: string, options?: { assert: false }): T | null;
|
||||
export function getElement<T>(id: string, options: Options = {}): T | null {
|
||||
options.assert ??= true;
|
||||
|
||||
const element = document.getElementById(id) as T | null;
|
||||
|
||||
if (options.assert) {
|
||||
assertElement(element);
|
||||
}
|
||||
|
||||
return element;
|
||||
}
|
||||
@@ -1,19 +1,16 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
iframe[src^="https://w.soundcloud.com"]
|
||||
{
|
||||
iframe[src^="https://w.soundcloud.com"] {
|
||||
height: 120px;
|
||||
}
|
||||
|
||||
iframe[src^="https://www.deezer.com"]
|
||||
{
|
||||
iframe[src^="https://www.deezer.com"] {
|
||||
// The real size is 92px, but 94px are needed to avoid an inner scrollbar of
|
||||
// the embedded HTML.
|
||||
height: 94px;
|
||||
}
|
||||
|
||||
iframe[src^="https://www.mixcloud.com"]
|
||||
{
|
||||
iframe[src^="https://www.mixcloud.com"] {
|
||||
// the embedded player from mixcloud has some quirks: initial there is an
|
||||
// issue with an image URL that is blocked since it is an a Cross-Origin
|
||||
// request. The alternative text (<img alt='Mixcloud Logo'> then cause an
|
||||
@@ -23,19 +20,16 @@ iframe[src^="https://www.mixcloud.com"]
|
||||
height: 250px;
|
||||
}
|
||||
|
||||
iframe[src^="https://bandcamp.com/EmbeddedPlayer"]
|
||||
{
|
||||
iframe[src^="https://bandcamp.com/EmbeddedPlayer"] {
|
||||
// show playlist
|
||||
height: 350px;
|
||||
}
|
||||
|
||||
iframe[src^="https://bandcamp.com/EmbeddedPlayer/track"]
|
||||
{
|
||||
iframe[src^="https://bandcamp.com/EmbeddedPlayer/track"] {
|
||||
// hide playlist
|
||||
height: 120px;
|
||||
}
|
||||
|
||||
iframe[src^="https://genius.com/songs"]
|
||||
{
|
||||
iframe[src^="https://genius.com/songs"] {
|
||||
height: 65px;
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
text-align: center;
|
||||
|
||||
.title {
|
||||
background: url("../img/searxng.png") no-repeat;
|
||||
background: url("./img/searxng.png") no-repeat;
|
||||
min-height: 4rem;
|
||||
margin: 4rem auto;
|
||||
background-position: center;
|
||||
|
||||
@@ -46,39 +46,34 @@ export default {
|
||||
sourcemap: true,
|
||||
rolldownOptions: {
|
||||
input: {
|
||||
// build CSS files
|
||||
"searxng-ltr.css": `${PATH.src}/less/style-ltr.less`,
|
||||
"searxng-rtl.css": `${PATH.src}/less/style-rtl.less`,
|
||||
"rss.css": `${PATH.src}/less/rss.less`,
|
||||
// entrypoint
|
||||
core: `${PATH.src}/js/index.ts`,
|
||||
|
||||
// build script files
|
||||
"searxng.core": `${PATH.src}/js/core/index.ts`,
|
||||
|
||||
// ol pkg
|
||||
ol: `${PATH.src}/js/pkg/ol.ts`,
|
||||
"ol.css": `${PATH.modules}/ol/ol.css`
|
||||
// stylesheets
|
||||
ltr: `${PATH.src}/less/style-ltr.less`,
|
||||
rtl: `${PATH.src}/less/style-rtl.less`,
|
||||
rss: `${PATH.src}/less/rss.less`
|
||||
},
|
||||
|
||||
// file naming conventions / pathnames are relative to outDir (PATH.dist)
|
||||
output: {
|
||||
entryFileNames: "js/[name].min.js",
|
||||
chunkFileNames: "js/[name].min.js",
|
||||
entryFileNames: "sxng-[name].min.js",
|
||||
chunkFileNames: "chunk/[hash].min.js",
|
||||
assetFileNames: ({ names }: PreRenderedAsset): string => {
|
||||
const [name] = names;
|
||||
|
||||
const extension = name?.split(".").pop();
|
||||
switch (extension) {
|
||||
switch (name?.split(".").pop()) {
|
||||
case "css":
|
||||
return "css/[name].min[extname]";
|
||||
case "js":
|
||||
return "js/[name].min[extname]";
|
||||
case "png":
|
||||
case "svg":
|
||||
return "img/[name][extname]";
|
||||
return "sxng-[name].min[extname]";
|
||||
default:
|
||||
console.warn("Unknown asset:", name);
|
||||
return "[name][extname]";
|
||||
return "sxng-[name][extname]";
|
||||
}
|
||||
},
|
||||
sanitizeFileName: (name: string): string => {
|
||||
return name
|
||||
.normalize("NFD")
|
||||
.replace(/[^a-zA-Z0-9.-]/g, "_")
|
||||
.toLowerCase();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -124,14 +124,17 @@ engine is shown. Most of the options have a default value or even are optional.
|
||||
|
||||
``api_key`` : optional
|
||||
In a few cases, using an API needs the use of a secret key. How to obtain them
|
||||
is described in the file.
|
||||
is described in the file. Engines that require an API key are set to
|
||||
``inactive: true`` by default. To enable such an engine, provide the API key
|
||||
and set ``inactive: false``.
|
||||
|
||||
``disabled`` : optional
|
||||
To disable by default the engine, but not deleting it. It will allow the user
|
||||
to manually activate it in the settings.
|
||||
|
||||
``inactive``: optional
|
||||
Remove the engine from the settings (*disabled & removed*).
|
||||
Remove the engine from the settings (*disabled & removed*). This defaults to ``true`` for engines
|
||||
that require an API key, please see the ``api_key`` section if you want to enable such an engine.
|
||||
|
||||
``language`` : optional
|
||||
If you want to use another language for a specific engine, you can define it
|
||||
|
||||
@@ -69,6 +69,9 @@ The built-in plugins are all located in the namespace `searx.plugins`.
|
||||
searx.plugins.calculator.SXNGPlugin:
|
||||
active: true
|
||||
|
||||
searx.plugins.infinite_scroll.SXNGPlugin:
|
||||
active: false
|
||||
|
||||
searx.plugins.hash_plugin.SXNGPlugin:
|
||||
active: true
|
||||
|
||||
|
||||
@@ -12,7 +12,6 @@
|
||||
ui:
|
||||
default_locale: ""
|
||||
query_in_title: false
|
||||
infinite_scroll: false
|
||||
center_alignment: false
|
||||
cache_url: https://web.archive.org/web/
|
||||
default_theme: simple
|
||||
@@ -32,9 +31,6 @@
|
||||
When true, the result page's titles contains the query it decreases the
|
||||
privacy, since the browser can records the page titles.
|
||||
|
||||
``infinite_scroll``:
|
||||
When true, automatically loads the next page when scrolling to bottom of the current page.
|
||||
|
||||
``center_alignment`` : default ``false``
|
||||
When enabled, the results are centered instead of being in the left (or RTL)
|
||||
side of the screen. This setting only affects the *desktop layout*
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
.. _voidlinux mullvad_leta:
|
||||
|
||||
============
|
||||
Mullvad-Leta
|
||||
============
|
||||
|
||||
.. automodule:: searx.engines.mullvad_leta
|
||||
:members:
|
||||
@@ -10,6 +10,7 @@ Built-in Plugins
|
||||
calculator
|
||||
hash_plugin
|
||||
hostnames
|
||||
infinite_scroll
|
||||
self_info
|
||||
tor_check
|
||||
unit_converter
|
||||
|
||||
8
docs/dev/plugins/infinite_scroll.rst
Normal file
8
docs/dev/plugins/infinite_scroll.rst
Normal file
@@ -0,0 +1,8 @@
|
||||
.. _plugins.infinite_scroll:
|
||||
|
||||
===============
|
||||
Infinite scroll
|
||||
===============
|
||||
|
||||
.. automodule:: searx.plugins.infinite_scroll
|
||||
:members:
|
||||
@@ -4,15 +4,33 @@
|
||||
Search API
|
||||
==========
|
||||
|
||||
The search supports both ``GET`` and ``POST``.
|
||||
SearXNG supports querying via a simple HTTP API.
|
||||
Two endpoints, ``/`` and ``/search``, are supported for both GET and POST methods.
|
||||
The GET method expects parameters as URL query parameters, while the POST method expects parameters as form data.
|
||||
|
||||
Furthermore, two endpoints ``/`` and ``/search`` are available for querying.
|
||||
If you want to consume the results as JSON, CSV, or RSS, you need to set the
|
||||
``format`` parameter accordingly. Supported formats are defined in ``settings.yml``, under the ``search`` section.
|
||||
Requesting an unset format will return a 403 Forbidden error. Be aware that many public instances have these formats disabled.
|
||||
|
||||
|
||||
Endpoints:
|
||||
|
||||
``GET /``
|
||||
|
||||
``GET /search``
|
||||
|
||||
``POST /``
|
||||
``POST /search``
|
||||
|
||||
example cURL calls:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
curl 'https://searx.example.org/search?q=searxng&format=json'
|
||||
|
||||
curl -X POST 'https://searx.example.org/search' -d 'q=searxng&format=csv'
|
||||
|
||||
curl -L -X POST -d 'q=searxng&format=json' 'https://searx.example.org/'
|
||||
|
||||
Parameters
|
||||
==========
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
[tools]
|
||||
# minimal version we support
|
||||
python = "3.10"
|
||||
node = "24.3.0"
|
||||
node = "25"
|
||||
go = "1.24.5"
|
||||
shellcheck = "0.11.0"
|
||||
# python 3.10 uses 3.40.1 (on mac and win)
|
||||
|
||||
@@ -2,9 +2,9 @@ mock==5.2.0
|
||||
nose2[coverage_plugin]==0.15.1
|
||||
cov-core==1.15.0
|
||||
black==25.9.0
|
||||
pylint==4.0.3
|
||||
pylint==4.0.4
|
||||
splinter==0.21.0
|
||||
selenium==4.38.0
|
||||
selenium==4.39.0
|
||||
Pallets-Sphinx-Themes==2.3.0
|
||||
Sphinx==8.2.3 ; python_version >= '3.11'
|
||||
Sphinx==8.1.3 ; python_version < '3.11'
|
||||
@@ -24,5 +24,5 @@ coloredlogs==15.0.1
|
||||
docutils>=0.21.2
|
||||
parameterized==0.9.0
|
||||
granian[reload]==2.6.0
|
||||
basedpyright==1.34.0
|
||||
types-lxml==2025.8.25
|
||||
basedpyright==1.36.2
|
||||
types-lxml==2025.11.25
|
||||
|
||||
@@ -9,12 +9,13 @@ python-dateutil==2.9.0.post0
|
||||
pyyaml==6.0.3
|
||||
httpx[http2]==0.28.1
|
||||
httpx-socks[asyncio]==0.10.0
|
||||
sniffio==1.3.1
|
||||
valkey==6.1.1
|
||||
markdown-it-py==3.0.0
|
||||
fasttext-predict==0.9.2.4
|
||||
tomli==2.3.0; python_version < '3.11'
|
||||
msgspec==0.19.0
|
||||
typer-slim==0.20.0
|
||||
msgspec==0.20.0
|
||||
typer-slim==0.21.0
|
||||
isodate==0.7.2
|
||||
whitenoise==6.11.0
|
||||
typing-extensions==4.15.0
|
||||
|
||||
@@ -5,10 +5,6 @@
|
||||
----
|
||||
"""
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
__all__ = ["ExpireCacheCfg", "ExpireCacheStats", "ExpireCache", "ExpireCacheSQLite"]
|
||||
|
||||
import abc
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -321,6 +321,7 @@
|
||||
"ja": "アルゼンチン・ペソ",
|
||||
"ko": "아르헨티나 페소",
|
||||
"lt": "Argentinos pesas",
|
||||
"lv": "Argentīnas peso",
|
||||
"ms": "Peso Argentina",
|
||||
"nl": "Argentijnse peso",
|
||||
"oc": "Peso",
|
||||
@@ -344,6 +345,7 @@
|
||||
"af": "Australiese dollar",
|
||||
"ar": "دولار أسترالي",
|
||||
"bg": "Австралийски долар",
|
||||
"bn": "অস্ট্রেলীয় ডলার",
|
||||
"ca": "dòlar australià",
|
||||
"cs": "australský dolar",
|
||||
"cy": "Doler Awstralia",
|
||||
@@ -565,6 +567,7 @@
|
||||
"fi": "Bangladeshin taka",
|
||||
"fr": "taka",
|
||||
"ga": "taka na Banglaidéise",
|
||||
"gl": "taka",
|
||||
"he": "טאקה",
|
||||
"hr": "Bangladeška taka",
|
||||
"hu": "bangladesi taka",
|
||||
@@ -645,6 +648,7 @@
|
||||
"fi": "Bahrainin dinaari",
|
||||
"fr": "dinar bahreïnien",
|
||||
"ga": "dinar Bhairéin",
|
||||
"gl": "dinar de Bahrain",
|
||||
"he": "דינר בחרייני",
|
||||
"hr": "Bahreinski dinar",
|
||||
"hu": "bahreini dinár",
|
||||
@@ -803,6 +807,7 @@
|
||||
"ja": "ボリビアーノ",
|
||||
"ko": "볼리비아 볼리비아노",
|
||||
"lt": "Bolivianas",
|
||||
"lv": "Bolīvijas boliviano",
|
||||
"ms": "Boliviano",
|
||||
"nl": "Boliviaanse boliviano",
|
||||
"oc": "Boliviano",
|
||||
@@ -967,7 +972,7 @@
|
||||
"et": "Botswana pula",
|
||||
"eu": "Pula",
|
||||
"fi": "Pula",
|
||||
"fr": "pula",
|
||||
"fr": "Pula",
|
||||
"ga": "pula na Botsuáine",
|
||||
"gl": "Pula",
|
||||
"he": "פולה",
|
||||
@@ -1273,6 +1278,10 @@
|
||||
"uk": "Чилійський песо",
|
||||
"vi": "Peso Chile"
|
||||
},
|
||||
"CNH": {
|
||||
"en": "renminbi (offshore)",
|
||||
"es": "yuan offshore"
|
||||
},
|
||||
"CNY": {
|
||||
"af": "Renminbi",
|
||||
"ar": "رنمينبي",
|
||||
@@ -1285,7 +1294,7 @@
|
||||
"da": "Renminbi",
|
||||
"de": "Renminbi",
|
||||
"dv": "ރެންމިބީ",
|
||||
"en": "renminbi",
|
||||
"en": "CNY",
|
||||
"eo": "Renminbio",
|
||||
"es": "yuan chino",
|
||||
"et": "Renminbi",
|
||||
@@ -1329,7 +1338,7 @@
|
||||
"cs": "Kolumbijské peso",
|
||||
"da": "Colombiansk peso",
|
||||
"de": "kolumbianischer Peso",
|
||||
"en": "peso",
|
||||
"en": "Colombian peso",
|
||||
"eo": "kolombia peso",
|
||||
"es": "peso",
|
||||
"et": "Colombia peeso",
|
||||
@@ -1413,7 +1422,7 @@
|
||||
"cy": "peso (Ciwba)",
|
||||
"da": "Cubanske pesos",
|
||||
"de": "kubanischer Peso",
|
||||
"en": "peso",
|
||||
"en": "Cuban peso",
|
||||
"eo": "kuba peso",
|
||||
"es": "peso",
|
||||
"fi": "Kuuban peso",
|
||||
@@ -1459,6 +1468,7 @@
|
||||
"fi": "Kap Verden escudo",
|
||||
"fr": "escudo cap-verdien",
|
||||
"ga": "escudo Rinn Verde",
|
||||
"gl": "escudo caboverdiano",
|
||||
"he": "אשקודו כף ורדי",
|
||||
"hr": "Zelenortski eskudo",
|
||||
"hu": "zöld-foki köztársasági escudo",
|
||||
@@ -1467,6 +1477,7 @@
|
||||
"ja": "カーボベルデ・エスクード",
|
||||
"ko": "카보베르데 이스쿠두",
|
||||
"lt": "Žaliojo Kyšulio eskudas",
|
||||
"lv": "Kaboverdes eskudo",
|
||||
"nl": "Kaapverdische escudo",
|
||||
"oc": "Escut de Cap Verd",
|
||||
"pl": "escudo Zielonego Przylądka",
|
||||
@@ -1567,7 +1578,7 @@
|
||||
"ar": "كرونة دنماركية",
|
||||
"bg": "Датска крона",
|
||||
"ca": "corona danesa",
|
||||
"cs": "Dánská koruna",
|
||||
"cs": "dánská koruna",
|
||||
"cy": "Krone Danaidd",
|
||||
"da": "dansk krone",
|
||||
"de": "dänische Krone",
|
||||
@@ -1806,7 +1817,7 @@
|
||||
"bg": "евро",
|
||||
"bn": "ইউরো",
|
||||
"ca": "euro",
|
||||
"cs": "Euro",
|
||||
"cs": "euro",
|
||||
"cy": "Ewro",
|
||||
"da": "Euro",
|
||||
"de": "Euro",
|
||||
@@ -1955,7 +1966,7 @@
|
||||
"lt": "svaras sterlingų",
|
||||
"lv": "sterliņu mārciņa",
|
||||
"ms": "paun sterling",
|
||||
"nl": "pond sterling",
|
||||
"nl": "Britse pond",
|
||||
"oc": "liure esterlina",
|
||||
"pa": "ਪਾਊਂਡ ਸਟਰਲਿੰਗ",
|
||||
"pl": "funt szterling",
|
||||
@@ -2026,7 +2037,7 @@
|
||||
"eo": "ganaa cedio",
|
||||
"es": "cedi",
|
||||
"fi": "Cedi",
|
||||
"fr": "cedi",
|
||||
"fr": "Cedi",
|
||||
"ga": "cedi",
|
||||
"gl": "Cedi",
|
||||
"he": "סדי גאני",
|
||||
@@ -2037,6 +2048,7 @@
|
||||
"ja": "セディ",
|
||||
"ko": "가나 세디",
|
||||
"lt": "Sedis",
|
||||
"lv": "Ganas sedi",
|
||||
"ms": "Cedi Ghana",
|
||||
"nl": "Ghanese cedi",
|
||||
"oc": "Cedi",
|
||||
@@ -2064,7 +2076,7 @@
|
||||
"es": "libra gibraltareña",
|
||||
"et": "Gibraltari nael",
|
||||
"fi": "Gibraltarin punta",
|
||||
"fr": "livre de Gibraltar",
|
||||
"fr": "Livre de Gibraltar",
|
||||
"ga": "punt Ghiobráltar",
|
||||
"gl": "Libra de Xibraltar",
|
||||
"he": "לירה גיברלטרית",
|
||||
@@ -2151,6 +2163,7 @@
|
||||
"ja": "ギニア・フラン",
|
||||
"ko": "기니 프랑",
|
||||
"lt": "Gvinėjos frankas",
|
||||
"lv": "Gvinejas franks",
|
||||
"ms": "Franc Guinea",
|
||||
"nl": "Guineese frank",
|
||||
"oc": "Franc guinean",
|
||||
@@ -2967,6 +2980,7 @@
|
||||
"ms": "Won Korea Utara",
|
||||
"nl": "Noord-Koreaanse won",
|
||||
"pa": "ਉੱਤਰੀ ਕੋਰੀਆਈ ਵੌਨ",
|
||||
"pap": "won nortkoreano",
|
||||
"pl": "Won północnokoreański",
|
||||
"pt": "won norte-coreano",
|
||||
"ro": "Won nord-coreean",
|
||||
@@ -3576,7 +3590,7 @@
|
||||
"cy": "tögrög Mongolia",
|
||||
"da": "Tugrik",
|
||||
"de": "Tögrög",
|
||||
"en": "tugrik",
|
||||
"en": "Mongolian tögrög",
|
||||
"eo": "mongola tugriko",
|
||||
"es": "tugrik mongol",
|
||||
"fi": "Mongolian tugrik",
|
||||
@@ -3793,7 +3807,7 @@
|
||||
"bg": "Мексиканско песо",
|
||||
"ca": "peso mexicà",
|
||||
"cs": "Mexické peso",
|
||||
"cy": "peso (Mecsico)",
|
||||
"cy": "peso",
|
||||
"de": "Mexikanischer Peso",
|
||||
"en": "peso",
|
||||
"eo": "meksika peso",
|
||||
@@ -3813,6 +3827,7 @@
|
||||
"ja": "メキシコ・ペソ",
|
||||
"ko": "멕시코 페소",
|
||||
"lt": "Meksikos pesas",
|
||||
"lv": "Meksikas peso",
|
||||
"ms": "Peso Mexico",
|
||||
"nl": "Mexicaanse peso",
|
||||
"pa": "ਮੈਕਸੀਕੀ ਪੇਸੋ",
|
||||
@@ -3828,7 +3843,7 @@
|
||||
"tr": "Meksika pesosu",
|
||||
"tt": "Миксикә писысы",
|
||||
"uk": "мексиканський песо",
|
||||
"vi": "Peso Mexico"
|
||||
"vi": "peso"
|
||||
},
|
||||
"MXV": {
|
||||
"de": "UNIDAD DE INVERSION",
|
||||
@@ -3841,7 +3856,7 @@
|
||||
"ar": "رينغيت ماليزي",
|
||||
"bg": "Малайзийски рингит",
|
||||
"ca": "ringgit",
|
||||
"cs": "Malajsijský ringgit",
|
||||
"cs": "malajsijský ringgit",
|
||||
"cy": "ringgit Maleisia",
|
||||
"de": "Ringgit",
|
||||
"en": "Malaysian ringgit",
|
||||
@@ -3882,7 +3897,7 @@
|
||||
"MZN": {
|
||||
"ar": "مثقال موزنبيقي",
|
||||
"ca": "metical",
|
||||
"cs": "Mosambický metical",
|
||||
"cs": "mosambický metical",
|
||||
"cy": "Metical Mosambic",
|
||||
"da": "Metical",
|
||||
"de": "Metical",
|
||||
@@ -3975,6 +3990,7 @@
|
||||
"ja": "ナイラ",
|
||||
"ko": "나이지리아 나이라",
|
||||
"lt": "Naira",
|
||||
"lv": "Nigērijas naira",
|
||||
"ms": "Naira Nigeria",
|
||||
"nl": "Nigeriaanse naira",
|
||||
"oc": "Naira",
|
||||
@@ -4031,7 +4047,7 @@
|
||||
"ar": "كرونة نروجية",
|
||||
"bg": "норвежка крона",
|
||||
"ca": "corona noruega",
|
||||
"cs": "Norská koruna",
|
||||
"cs": "norská koruna",
|
||||
"cy": "krone Norwy",
|
||||
"da": "norsk krone",
|
||||
"de": "norwegische Krone",
|
||||
@@ -4258,6 +4274,7 @@
|
||||
"ja": "ヌエボ・ソル",
|
||||
"ko": "페루 솔",
|
||||
"lt": "Naujasis solis",
|
||||
"lv": "Peru sols",
|
||||
"ms": "Nuevo Sol Peru",
|
||||
"nl": "Peruviaanse sol",
|
||||
"oc": "Nuevo Sol",
|
||||
@@ -4660,7 +4677,7 @@
|
||||
"eo": "rusa rublo",
|
||||
"es": "rublo ruso",
|
||||
"et": "Venemaa rubla",
|
||||
"eu": "Errusiar errublo",
|
||||
"eu": "errusiar errublo",
|
||||
"fi": "Venäjän rupla",
|
||||
"fr": "rouble russe",
|
||||
"ga": "rúbal na Rúise",
|
||||
@@ -4744,6 +4761,7 @@
|
||||
"fi": "Saudi-Arabian rial",
|
||||
"fr": "riyal saoudien",
|
||||
"ga": "riyal na hAraibe Sádaí",
|
||||
"gl": "riyal saudita",
|
||||
"he": "ריאל סעודי",
|
||||
"hr": "Saudijski rijal",
|
||||
"hu": "szaúdi riál",
|
||||
@@ -4782,7 +4800,7 @@
|
||||
"en": "Solomon Islands dollar",
|
||||
"eo": "salomona dolaro",
|
||||
"es": "dólar de las Islas Salomón",
|
||||
"fi": "Salomonsaarten dollari",
|
||||
"fi": "Salomoninsaarten dollari",
|
||||
"fr": "dollar des îles Salomon",
|
||||
"ga": "dollar Oileáin Sholaimh",
|
||||
"gl": "Dólar das Illas Salomón",
|
||||
@@ -5018,6 +5036,7 @@
|
||||
"ja": "レオン",
|
||||
"ko": "시에라리온 레온",
|
||||
"lt": "leonė",
|
||||
"lv": "Sjerraleones leone",
|
||||
"ms": "leone",
|
||||
"nl": "Sierra Leoonse leone",
|
||||
"oc": "leone",
|
||||
@@ -5055,6 +5074,7 @@
|
||||
"ja": "ソマリア・シリング",
|
||||
"ko": "소말리아 실링",
|
||||
"lt": "Somalio šilingas",
|
||||
"lv": "Somālijas šiliņš",
|
||||
"ms": "Shilling Somalia",
|
||||
"nl": "Somalische shilling",
|
||||
"pl": "Szyling somalijski",
|
||||
@@ -5404,6 +5424,7 @@
|
||||
"oc": "dinar tunisian",
|
||||
"pl": "Dinar tunezyjski",
|
||||
"pt": "dinar tunisiano",
|
||||
"ro": "dinar tunisian",
|
||||
"ru": "тунисский динар",
|
||||
"sk": "Tuniský dinár",
|
||||
"sl": "tunizijski dinar",
|
||||
@@ -5500,7 +5521,7 @@
|
||||
"TTD": {
|
||||
"ar": "دولار ترينيداد وتوباغو",
|
||||
"bg": "Тринидадски и тобагски долар",
|
||||
"ca": "dòlar de Trinitat i Tobago",
|
||||
"ca": "dòlar de Trinidad i Tobago",
|
||||
"cs": "Dolar Trinidadu a Tobaga",
|
||||
"cy": "doler Trinidad a Thobago",
|
||||
"de": "Trinidad-und-Tobago-Dollar",
|
||||
@@ -5718,7 +5739,7 @@
|
||||
"lv": "ASV dolārs",
|
||||
"ml": "യുണൈറ്റഡ് സ്റ്റേറ്റ്സ് ഡോളർ",
|
||||
"ms": "Dolar Amerika Syarikat",
|
||||
"nl": "US dollar",
|
||||
"nl": "Amerikaanse dollar",
|
||||
"oc": "dolar american",
|
||||
"pa": "ਸੰਯੁਕਤ ਰਾਜ ਡਾਲਰ",
|
||||
"pap": "Dollar merikano",
|
||||
@@ -5744,7 +5765,7 @@
|
||||
"en": "US Dollar (Next day)"
|
||||
},
|
||||
"UYI": {
|
||||
"en": "Uruguay peso en Unidades Indexadas"
|
||||
"en": "Uruguay Peso en Unidades Indexadas"
|
||||
},
|
||||
"UYU": {
|
||||
"af": "Uruguaanse Peso",
|
||||
@@ -5813,6 +5834,7 @@
|
||||
"nl": "Oezbeekse sum",
|
||||
"oc": "som ozbèc",
|
||||
"pa": "ਉਜ਼ਬੇਕਿਸਤਾਨੀ ਸੋਮ",
|
||||
"pap": "som usbekistani",
|
||||
"pl": "Sum",
|
||||
"pt": "som usbeque",
|
||||
"ro": "Som uzbec",
|
||||
@@ -5838,6 +5860,7 @@
|
||||
"en": "sovereign bolivar",
|
||||
"es": "bolívar soberano",
|
||||
"fr": "bolivar souverain",
|
||||
"gl": "bolívar soberano",
|
||||
"hu": "venezuelai bolívar",
|
||||
"ja": "ボリバル・ソベラノ",
|
||||
"pt": "Bolívar soberano",
|
||||
@@ -6578,10 +6601,13 @@
|
||||
"R": "ZAR",
|
||||
"R$": "BRL",
|
||||
"RD$": "DOP",
|
||||
"RF": "RWF",
|
||||
"RM": "MYR",
|
||||
"RWF": "RWF",
|
||||
"Rf": "MVR",
|
||||
"Rp": "IDR",
|
||||
"Rs": "LKR",
|
||||
"R₣": "RWF",
|
||||
"S$": "SGD",
|
||||
"S/.": "PEN",
|
||||
"SI$": "SBD",
|
||||
@@ -6601,6 +6627,7 @@
|
||||
"Ush": "UGX",
|
||||
"VT": "VUV",
|
||||
"WS$": "WST",
|
||||
"XAF": "XAF",
|
||||
"XCG": "XCG",
|
||||
"XDR": "XDR",
|
||||
"Z$": "ZWL",
|
||||
@@ -6726,6 +6753,7 @@
|
||||
"argentinské peso": "ARS",
|
||||
"argentinski peso": "ARS",
|
||||
"argentinski pezo": "ARS",
|
||||
"argentīnas peso": "ARS",
|
||||
"ariari": "MGA",
|
||||
"ariari de madagascar": "MGA",
|
||||
"ariari de madagáscar": "MGA",
|
||||
@@ -7008,6 +7036,7 @@
|
||||
"birr etiopia": "ETB",
|
||||
"birr etíope": "ETB",
|
||||
"birr éthiopien": "ETB",
|
||||
"birr éthiopienne": "ETB",
|
||||
"birr habsyah": "ETB",
|
||||
"birr na haetóipe": "ETB",
|
||||
"birre da etiópia": "ETB",
|
||||
@@ -7045,6 +7074,7 @@
|
||||
"bolívar soberano": "VES",
|
||||
"bolívar sobirà": "VES",
|
||||
"bolíviai boliviano": "BOB",
|
||||
"bolīvijas boliviano": "BOB",
|
||||
"bosenská konvertibilní marka": "BAM",
|
||||
"bosna hersek değiştirilebilir markı": "BAM",
|
||||
"bosnia and herzegovina convertible mark": "BAM",
|
||||
@@ -7193,6 +7223,7 @@
|
||||
"ceatsal": "GTQ",
|
||||
"cebelitarık sterlini": "GIP",
|
||||
"cedi": "GHS",
|
||||
"cedi du ghana": "GHS",
|
||||
"cedi ghana": "GHS",
|
||||
"cedi ghanese": "GHS",
|
||||
"centr afrika franko": "XAF",
|
||||
@@ -7260,7 +7291,10 @@
|
||||
"chilensk peso": "CLP",
|
||||
"chilské peso": "CLP",
|
||||
"chinese renminbi": "CNY",
|
||||
"chinese yuan": "CNY",
|
||||
"chinese yuan": [
|
||||
"CNY",
|
||||
"CNH"
|
||||
],
|
||||
"chinesischer renminbi": "CNY",
|
||||
"ci$": "KYD",
|
||||
"cibuti frangı": "DJF",
|
||||
@@ -7270,6 +7304,7 @@
|
||||
"clp": "CLP",
|
||||
"clp$": "CLP",
|
||||
"clps": "CLP",
|
||||
"cnh": "CNH",
|
||||
"cny": "CNY",
|
||||
"co $": "COP",
|
||||
"co$": "COP",
|
||||
@@ -7510,7 +7545,6 @@
|
||||
"203"
|
||||
],
|
||||
"cирійський фунт": "SYP",
|
||||
"d.r.": "EGP",
|
||||
"da": "DZD",
|
||||
"dalase": "GMD",
|
||||
"dalasi": "GMD",
|
||||
@@ -8190,6 +8224,7 @@
|
||||
"HKD",
|
||||
"AUD"
|
||||
],
|
||||
"dollars barbados": "BBD",
|
||||
"dom$": "DOP",
|
||||
"dominga peso": "DOP",
|
||||
"dominicaanse peso": "DOP",
|
||||
@@ -8242,9 +8277,7 @@
|
||||
"dòlar de singapur": "SGD",
|
||||
"dòlar de surinam": "SRD",
|
||||
"dòlar de taiwan": "TWD",
|
||||
"dòlar de trinitat": "TTD",
|
||||
"dòlar de trinitat i tobago": "TTD",
|
||||
"dòlar de trinitat tobago": "TTD",
|
||||
"dòlar de trinidad i tobago": "TTD",
|
||||
"dòlar de zimbàbue": "ZWL",
|
||||
"dòlar del canadà": "CAD",
|
||||
"dòlar del carib oriental": "XCD",
|
||||
@@ -8406,7 +8439,6 @@
|
||||
"dólares canadenses": "CAD",
|
||||
"dólares estadounidenses": "USD",
|
||||
"dólares neozelandeses": "NZD",
|
||||
"dr": "EGP",
|
||||
"dram": "AMD",
|
||||
"dram armean": "AMD",
|
||||
"dram armenia": "AMD",
|
||||
@@ -8434,7 +8466,6 @@
|
||||
"džibučio frankas": "DJF",
|
||||
"džibutski franak": "DJF",
|
||||
"džibutský frank": "DJF",
|
||||
"d£": "EGP",
|
||||
"e": "SZL",
|
||||
"e rupee": "INR",
|
||||
"e.m.u. 6": "XBB",
|
||||
@@ -8493,6 +8524,7 @@
|
||||
"ermenistan dramı": "AMD",
|
||||
"ern": "ERN",
|
||||
"erreal brasildar": "BRL",
|
||||
"errublo": "RUB",
|
||||
"errublo errusiar": "RUB",
|
||||
"errupia indiar": "INR",
|
||||
"errupia indonesiar": "IDR",
|
||||
@@ -8905,6 +8937,7 @@
|
||||
"gambijski dalasi": "GMD",
|
||||
"gambijský dalasi": "GMD",
|
||||
"ganaa cedio": "GHS",
|
||||
"ganas sedi": "GHS",
|
||||
"ganski cedi": "GHS",
|
||||
"gbp": "GBP",
|
||||
"gbp£": "GBP",
|
||||
@@ -9054,6 +9087,7 @@
|
||||
"gvatemalski kvecal": "GTQ",
|
||||
"gvatemalski quetzal": "GTQ",
|
||||
"gvinea franko": "GNF",
|
||||
"gvinejas franks": "GNF",
|
||||
"gvinejski franak": "GNF",
|
||||
"gvinejski frank": "GNF",
|
||||
"gvinėjos frankas": "GNF",
|
||||
@@ -9381,6 +9415,7 @@
|
||||
"kaaimaneilandse dollar": "KYD",
|
||||
"kaapverdische escudo": "CVE",
|
||||
"kaboverda eskudo": "CVE",
|
||||
"kaboverdes eskudo": "CVE",
|
||||
"kaiman dollar": "KYD",
|
||||
"kaimanu dolārs": "KYD",
|
||||
"kaimanu salu dolārs": "KYD",
|
||||
@@ -9790,6 +9825,7 @@
|
||||
"lari na seoirsia": "GEL",
|
||||
"lario": "GEL",
|
||||
"laris": "GEL",
|
||||
"lári": "GEL",
|
||||
"länsi afrikan cfa frangi": "XOF",
|
||||
"lbp": "LBP",
|
||||
"ld": "LYD",
|
||||
@@ -10214,6 +10250,7 @@
|
||||
"manat de turkmenistan": "TMT",
|
||||
"manat de turkmenistán": "TMT",
|
||||
"manat del turkmenistan": "TMT",
|
||||
"manat di azerbeidjan": "AZN",
|
||||
"manat do azerbaijão": "AZN",
|
||||
"manat na hasarbaiseáine": "AZN",
|
||||
"manat newydd tyrcmenestan": "TMT",
|
||||
@@ -10316,6 +10353,7 @@
|
||||
"meksika peso": "MXN",
|
||||
"meksika pesosu": "MXN",
|
||||
"meksikaanse peso": "MXN",
|
||||
"meksikas peso": "MXN",
|
||||
"meksikon peso": "MXN",
|
||||
"meksikos pesas": "MXN",
|
||||
"meticais": "MZN",
|
||||
@@ -10552,6 +10590,7 @@
|
||||
"nigerijská naira": "NGN",
|
||||
"nigériai naira": "NGN",
|
||||
"nigérijská naira": "NGN",
|
||||
"nigērijas naira": "NGN",
|
||||
"niĝera najro": "NGN",
|
||||
"niĝeria najro": "NGN",
|
||||
"nijerya nairası": "NGN",
|
||||
@@ -10680,7 +10719,6 @@
|
||||
"nuevo dólar taiwanes": "TWD",
|
||||
"nuevo dólar taiwanés": "TWD",
|
||||
"nuevo peso": [
|
||||
"UYU",
|
||||
"MXN",
|
||||
"ARS"
|
||||
],
|
||||
@@ -10878,6 +10916,7 @@
|
||||
"penny": "GBP",
|
||||
"perak sebagai pelaburan": "XAG",
|
||||
"peru nueva solü": "PEN",
|
||||
"peru sols": "PEN",
|
||||
"perua nova suno": "PEN",
|
||||
"peruanischer nuevo sol": "PEN",
|
||||
"peruanischer sol": "PEN",
|
||||
@@ -10952,7 +10991,6 @@
|
||||
"peso de méxico": "MXN",
|
||||
"peso de republica dominicana": "DOP",
|
||||
"peso de república dominicana": "DOP",
|
||||
"peso de uruguay": "UYU",
|
||||
"peso de xile": "CLP",
|
||||
"peso do chile": "CLP",
|
||||
"peso do uruguai": "UYU",
|
||||
@@ -11231,7 +11269,10 @@
|
||||
"rends": "ZAR",
|
||||
"renmibi": "CNY",
|
||||
"renminb": "CNY",
|
||||
"renminbi": "CNY",
|
||||
"renminbi": [
|
||||
"CNH",
|
||||
"CNY"
|
||||
],
|
||||
"renminbi cinese": "CNY",
|
||||
"renminbi yuan": "CNY",
|
||||
"renminbio": "CNY",
|
||||
@@ -11599,7 +11640,6 @@
|
||||
"rúpia indiana": "INR",
|
||||
"rúpies": "INR",
|
||||
"rūpija": "IDR",
|
||||
"rwanda franc": "RWF",
|
||||
"rwanda frank": "RWF",
|
||||
"rwandan franc": "RWF",
|
||||
"rwandan frank": "RWF",
|
||||
@@ -11840,6 +11880,7 @@
|
||||
"sistema unificato di compensazione regionale": "XSU",
|
||||
"sistema único de compensación regional": "XSU",
|
||||
"sjekel": "ILS",
|
||||
"sjerraleones leone": "SLE",
|
||||
"sjevernokorejski von": "KPW",
|
||||
"sle": "SLE",
|
||||
"sll": "SLE",
|
||||
@@ -11884,6 +11925,7 @@
|
||||
"som ozbèc": "UZS",
|
||||
"som quirguiz": "KGS",
|
||||
"som usbeco": "UZS",
|
||||
"som usbekistani": "UZS",
|
||||
"som usbeque": "UZS",
|
||||
"som uzbec": "UZS",
|
||||
"som uzbeco": "UZS",
|
||||
@@ -11906,6 +11948,7 @@
|
||||
"somas": "KGS",
|
||||
"somálsky šiling": "SOS",
|
||||
"somálský šilink": "SOS",
|
||||
"somālijas šiliņš": "SOS",
|
||||
"some": "KGS",
|
||||
"somoni": "TJS",
|
||||
"somoni na táidsíceastáine": "TJS",
|
||||
@@ -12675,6 +12718,7 @@
|
||||
"won nord coréen": "KPW",
|
||||
"won nordcoreano": "KPW",
|
||||
"won norte coreano": "KPW",
|
||||
"won nortkoreano": "KPW",
|
||||
"won południowokoreański": "KRW",
|
||||
"won północnokoreański": "KPW",
|
||||
"won sud corean": "KRW",
|
||||
@@ -12745,10 +12789,14 @@
|
||||
"yhdistyneiden arabiemiraattien dirhami": "AED",
|
||||
"yhdysvaltain dollari": "USD",
|
||||
"ytl": "TRY",
|
||||
"yuan": "CNY",
|
||||
"yuan": [
|
||||
"CNH",
|
||||
"CNY"
|
||||
],
|
||||
"yuan chinezesc": "CNY",
|
||||
"yuan chino": "CNY",
|
||||
"yuan cinese": "CNY",
|
||||
"yuan offshore": "CNH",
|
||||
"yuan renmimbi": "CNY",
|
||||
"yuan renminbi": "CNY",
|
||||
"yuan rmb": "CNY",
|
||||
@@ -12949,7 +12997,8 @@
|
||||
"£s": "SYP",
|
||||
"¥": [
|
||||
"JPY",
|
||||
"CNY"
|
||||
"CNY",
|
||||
"CNH"
|
||||
],
|
||||
"đài tệ": "TWD",
|
||||
"đại hàn dân quốc weon": "KRW",
|
||||
@@ -15043,6 +15092,7 @@
|
||||
"ޕާކިސްތާނީ ރުޕީ": "PKR",
|
||||
"रू": "NPR",
|
||||
"रू.": "INR",
|
||||
"অস্ট্রেলীয় ডলার": "AUD",
|
||||
"অ্যাঙ্গোলীয় কুয়াঞ্জা": "AOA",
|
||||
"আইসল্যান্ডীয় ক্রোনা": "ISK",
|
||||
"আজারবাইজানি মানাত": "AZN",
|
||||
@@ -15372,7 +15422,6 @@
|
||||
"యునైటెడ్ స్టేట్స్ డాలర్": "USD",
|
||||
"యూరో": "EUR",
|
||||
"రూపాయి": "INR",
|
||||
"సంయుక్త రాష్ట్రాల డాలర్": "USD",
|
||||
"స్విస్ ఫ్రాంక్": "CHF",
|
||||
"അൾജീരിയൻ ദിനാർ": "DZD",
|
||||
"ഇന്തോനേഷ്യൻ റുപിയ": "IDR",
|
||||
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
@@ -5,7 +5,7 @@
|
||||
],
|
||||
"ua": "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}",
|
||||
"versions": [
|
||||
"144.0",
|
||||
"143.0"
|
||||
"146.0",
|
||||
"145.0"
|
||||
]
|
||||
}
|
||||
@@ -2319,11 +2319,6 @@
|
||||
"symbol": "kJ/(kg K)",
|
||||
"to_si_factor": 1000.0
|
||||
},
|
||||
"Q108888186": {
|
||||
"si_name": "Q11570",
|
||||
"symbol": "eV/c²",
|
||||
"to_si_factor": 1.782661921627898e-36
|
||||
},
|
||||
"Q108888198": {
|
||||
"si_name": "Q11570",
|
||||
"symbol": "keV/c²",
|
||||
@@ -4394,6 +4389,11 @@
|
||||
"symbol": "m²",
|
||||
"to_si_factor": 1.0
|
||||
},
|
||||
"Q25376902": {
|
||||
"si_name": null,
|
||||
"symbol": "Mbp",
|
||||
"to_si_factor": null
|
||||
},
|
||||
"Q25377184": {
|
||||
"si_name": "Q25377184",
|
||||
"symbol": "kg/m²",
|
||||
@@ -5344,11 +5344,6 @@
|
||||
"symbol": "bhp EDR",
|
||||
"to_si_factor": 12.958174
|
||||
},
|
||||
"Q3984193": {
|
||||
"si_name": "Q25269",
|
||||
"symbol": "TeV",
|
||||
"to_si_factor": 1.602176634e-07
|
||||
},
|
||||
"Q39978339": {
|
||||
"si_name": "Q25377184",
|
||||
"symbol": "kg/cm²",
|
||||
@@ -5459,6 +5454,11 @@
|
||||
"symbol": "T",
|
||||
"to_si_factor": 907.18474
|
||||
},
|
||||
"Q4741": {
|
||||
"si_name": null,
|
||||
"symbol": "RF",
|
||||
"to_si_factor": null
|
||||
},
|
||||
"Q474533": {
|
||||
"si_name": null,
|
||||
"symbol": "At",
|
||||
|
||||
@@ -2,10 +2,18 @@
|
||||
# pylint: disable=invalid-name
|
||||
"""360Search search engine for searxng"""
|
||||
|
||||
import typing as t
|
||||
|
||||
from urllib.parse import urlencode
|
||||
from lxml import html
|
||||
|
||||
from searx import logger
|
||||
from searx.enginelib import EngineCache
|
||||
from searx.utils import extract_text
|
||||
from searx.network import get as http_get
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from searx.extended_types import SXNG_Response
|
||||
|
||||
# Metadata
|
||||
about = {
|
||||
@@ -26,6 +34,35 @@ time_range_dict = {'day': 'd', 'week': 'w', 'month': 'm', 'year': 'y'}
|
||||
|
||||
# Base URL
|
||||
base_url = "https://www.so.com"
|
||||
COOKIE_CACHE_KEY = "cookie"
|
||||
COOKIE_CACHE_EXPIRATION_SECONDS = 3600
|
||||
|
||||
CACHE: EngineCache
|
||||
"""Stores cookies from 360search to avoid re-fetching them on every request."""
|
||||
|
||||
|
||||
def setup(engine_settings: dict[str, t.Any]) -> bool:
|
||||
"""Initialization of the engine.
|
||||
|
||||
- Instantiate a cache for this engine (:py:obj:`CACHE`).
|
||||
|
||||
"""
|
||||
global CACHE # pylint: disable=global-statement
|
||||
# table name needs to be quoted to start with digits, so "cache" has been added to avoid sqlite complaining
|
||||
CACHE = EngineCache("cache" + engine_settings["name"])
|
||||
return True
|
||||
|
||||
|
||||
def get_cookie(url: str) -> str:
|
||||
cookie: str | None = CACHE.get(COOKIE_CACHE_KEY)
|
||||
if cookie:
|
||||
return cookie
|
||||
resp: SXNG_Response = http_get(url, timeout=10, allow_redirects=False)
|
||||
headers = resp.headers
|
||||
cookie = headers['set-cookie'].split(";")[0]
|
||||
CACHE.set(key=COOKIE_CACHE_KEY, value=cookie, expire=COOKIE_CACHE_EXPIRATION_SECONDS)
|
||||
|
||||
return cookie
|
||||
|
||||
|
||||
def request(query, params):
|
||||
@@ -36,8 +73,13 @@ def request(query, params):
|
||||
|
||||
if time_range_dict.get(params['time_range']):
|
||||
query_params["adv_t"] = time_range_dict.get(params['time_range'])
|
||||
|
||||
params["url"] = f"{base_url}/s?{urlencode(query_params)}"
|
||||
# get token by calling the query page
|
||||
logger.debug("querying url: %s", params["url"])
|
||||
cookie = get_cookie(params["url"])
|
||||
logger.debug("obtained cookie: %s", cookie)
|
||||
params['headers'] = {'Cookie': cookie}
|
||||
|
||||
return params
|
||||
|
||||
|
||||
|
||||
@@ -270,7 +270,14 @@ def load_engines(engine_list: list[dict[str, t.Any]]):
|
||||
categories.clear()
|
||||
categories['general'] = []
|
||||
for engine_data in engine_list:
|
||||
if engine_data.get("inactive") is True:
|
||||
continue
|
||||
engine = load_engine(engine_data)
|
||||
if engine:
|
||||
register_engine(engine)
|
||||
else:
|
||||
# if an engine can't be loaded (if for example the engine is missing
|
||||
# tor or some other requirements) its set to inactive!
|
||||
logger.error("loading engine %s failed: set engine to inactive!", engine_data.get("name", "???"))
|
||||
engine_data["inactive"] = True
|
||||
return engines
|
||||
|
||||
@@ -3,9 +3,14 @@
|
||||
Ahmia (Onions)
|
||||
"""
|
||||
|
||||
import typing as t
|
||||
|
||||
from urllib.parse import urlencode, urlparse, parse_qs
|
||||
from lxml.html import fromstring
|
||||
from searx.utils import gen_useragent, ElementType
|
||||
from searx.engines.xpath import extract_url, extract_text, eval_xpath_list, eval_xpath
|
||||
from searx.network import get
|
||||
from searx.enginelib import EngineCache
|
||||
|
||||
# about
|
||||
about = {
|
||||
@@ -23,6 +28,7 @@ paging = True
|
||||
page_size = 10
|
||||
|
||||
# search url
|
||||
base_url = 'http://juhanurmihxlp77nkq76byazcldy2hlmovfu2epvl5ankdibsot4csyd.onion'
|
||||
search_url = 'http://juhanurmihxlp77nkq76byazcldy2hlmovfu2epvl5ankdibsot4csyd.onion/search/?{query}'
|
||||
time_range_support = True
|
||||
time_range_dict = {'day': 1, 'week': 7, 'month': 30}
|
||||
@@ -34,10 +40,42 @@ title_xpath = './h4/a[1]'
|
||||
content_xpath = './/p[1]'
|
||||
correction_xpath = '//*[@id="didYouMean"]//a'
|
||||
number_of_results_xpath = '//*[@id="totalResults"]'
|
||||
name_token_xpath = '//form[@id="searchForm"]/input[@type="hidden"]/@name'
|
||||
value_token_xpath = '//form[@id="searchForm"]/input[@type="hidden"]/@value'
|
||||
|
||||
CACHE: EngineCache
|
||||
|
||||
|
||||
def setup(engine_settings: dict[str, t.Any]) -> bool:
|
||||
global CACHE # pylint: disable=global-statement
|
||||
CACHE = EngineCache(engine_settings["name"])
|
||||
return True
|
||||
|
||||
|
||||
def _get_tokens(dom: ElementType | None = None) -> str:
|
||||
"""
|
||||
The tokens are hidden in a hidden input field.
|
||||
They update every minute, but allow up to 1 hour old tokens to be used.
|
||||
To spend the least amount of requests, it is best to always get the newest
|
||||
tokens from each request. In worst case if it has expired, it would
|
||||
need to do a total of 2 requests (over tor, might be ridiculously slow).
|
||||
"""
|
||||
if dom is None:
|
||||
resp = get(base_url, headers={'User-Agent': gen_useragent()})
|
||||
dom = fromstring(resp.text)
|
||||
name_token = extract_text(dom.xpath(name_token_xpath))
|
||||
value_token = extract_text(dom.xpath(value_token_xpath))
|
||||
return f"{name_token}:{value_token}"
|
||||
|
||||
|
||||
def request(query, params):
|
||||
params['url'] = search_url.format(query=urlencode({'q': query}))
|
||||
token_str: str | None = CACHE.get('ahmia-tokens')
|
||||
if not token_str:
|
||||
token_str = _get_tokens()
|
||||
CACHE.set('ahmia-tokens', token_str, expire=60 * 60)
|
||||
name_token, value_token = token_str.split(":")
|
||||
|
||||
params['url'] = search_url.format(query=urlencode({'q': query, name_token: value_token}))
|
||||
|
||||
if params['time_range'] in time_range_dict:
|
||||
params['url'] += '&' + urlencode({'d': time_range_dict[params['time_range']]})
|
||||
@@ -77,4 +115,8 @@ def response(resp):
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
# Update the tokens to the newest ones
|
||||
token_str = _get_tokens(dom)
|
||||
CACHE.set('ahmia-tokens', token_str, expire=60 * 60)
|
||||
|
||||
return results
|
||||
|
||||
@@ -50,7 +50,7 @@ def response(resp):
|
||||
pos = script.index(end_tag) + len(end_tag) - 1
|
||||
script = script[:pos]
|
||||
|
||||
json_resp = utils.js_variable_to_python(script)
|
||||
json_resp = utils.js_obj_str_to_python(script)
|
||||
|
||||
results = []
|
||||
|
||||
|
||||
@@ -51,6 +51,7 @@ def request(query, params):
|
||||
}
|
||||
|
||||
params["url"] = f"{base_url}?{urlencode(query_params)}"
|
||||
params["headers"]["Referer"] = "https://www.bilibili.com"
|
||||
params["cookies"] = cookie
|
||||
|
||||
return params
|
||||
|
||||
@@ -124,17 +124,17 @@ from urllib.parse import (
|
||||
urlparse,
|
||||
)
|
||||
|
||||
import json
|
||||
from dateutil import parser
|
||||
from lxml import html
|
||||
|
||||
from searx import locales
|
||||
from searx.utils import (
|
||||
extr,
|
||||
extract_text,
|
||||
eval_xpath,
|
||||
eval_xpath_list,
|
||||
eval_xpath_getindex,
|
||||
js_variable_to_python,
|
||||
js_obj_str_to_python,
|
||||
js_obj_str_to_json_str,
|
||||
get_embeded_stream_url,
|
||||
)
|
||||
from searx.enginelib.traits import EngineTraits
|
||||
@@ -142,17 +142,17 @@ from searx.result_types import EngineResults
|
||||
from searx.extended_types import SXNG_Response
|
||||
|
||||
about = {
|
||||
"website": 'https://search.brave.com/',
|
||||
"wikidata_id": 'Q22906900',
|
||||
"website": "https://search.brave.com/",
|
||||
"wikidata_id": "Q22906900",
|
||||
"official_api_documentation": None,
|
||||
"use_official_api": False,
|
||||
"require_api_key": False,
|
||||
"results": 'HTML',
|
||||
"results": "HTML",
|
||||
}
|
||||
|
||||
base_url = "https://search.brave.com/"
|
||||
categories = []
|
||||
brave_category: t.Literal["search", "videos", "images", "news", "goggles"] = 'search'
|
||||
brave_category: t.Literal["search", "videos", "images", "news", "goggles"] = "search"
|
||||
"""Brave supports common web-search, videos, images, news, and goggles search.
|
||||
|
||||
- ``search``: Common WEB search
|
||||
@@ -182,71 +182,87 @@ to do more won't return any result and you will most likely be flagged as a bot.
|
||||
"""
|
||||
|
||||
safesearch = True
|
||||
safesearch_map = {2: 'strict', 1: 'moderate', 0: 'off'} # cookie: safesearch=off
|
||||
safesearch_map = {2: "strict", 1: "moderate", 0: "off"} # cookie: safesearch=off
|
||||
|
||||
time_range_support = False
|
||||
"""Brave only supports time-range in :py:obj:`brave_category` ``search`` (UI
|
||||
category All) and in the goggles category."""
|
||||
|
||||
time_range_map: dict[str, str] = {
|
||||
'day': 'pd',
|
||||
'week': 'pw',
|
||||
'month': 'pm',
|
||||
'year': 'py',
|
||||
"day": "pd",
|
||||
"week": "pw",
|
||||
"month": "pm",
|
||||
"year": "py",
|
||||
}
|
||||
|
||||
|
||||
def request(query: str, params: dict[str, t.Any]) -> None:
|
||||
|
||||
args: dict[str, t.Any] = {
|
||||
'q': query,
|
||||
'source': 'web',
|
||||
"q": query,
|
||||
"source": "web",
|
||||
}
|
||||
if brave_spellcheck:
|
||||
args['spellcheck'] = '1'
|
||||
args["spellcheck"] = "1"
|
||||
|
||||
if brave_category in ('search', 'goggles'):
|
||||
if params.get('pageno', 1) - 1:
|
||||
args['offset'] = params.get('pageno', 1) - 1
|
||||
if time_range_map.get(params['time_range']):
|
||||
args['tf'] = time_range_map.get(params['time_range'])
|
||||
if brave_category in ("search", "goggles"):
|
||||
if params.get("pageno", 1) - 1:
|
||||
args["offset"] = params.get("pageno", 1) - 1
|
||||
if time_range_map.get(params["time_range"]):
|
||||
args["tf"] = time_range_map.get(params["time_range"])
|
||||
|
||||
if brave_category == 'goggles':
|
||||
args['goggles_id'] = Goggles
|
||||
if brave_category == "goggles":
|
||||
args["goggles_id"] = Goggles
|
||||
|
||||
params["headers"]["Accept-Encoding"] = "gzip, deflate"
|
||||
params["url"] = f"{base_url}{brave_category}?{urlencode(args)}"
|
||||
logger.debug("url %s", params["url"])
|
||||
|
||||
# set properties in the cookies
|
||||
|
||||
params['cookies']['safesearch'] = safesearch_map.get(params['safesearch'], 'off')
|
||||
# the useLocation is IP based, we use cookie 'country' for the region
|
||||
params['cookies']['useLocation'] = '0'
|
||||
params['cookies']['summarizer'] = '0'
|
||||
params["cookies"]["safesearch"] = safesearch_map.get(params["safesearch"], "off")
|
||||
# the useLocation is IP based, we use cookie "country" for the region
|
||||
params["cookies"]["useLocation"] = "0"
|
||||
params["cookies"]["summarizer"] = "0"
|
||||
|
||||
engine_region = traits.get_region(params['searxng_locale'], 'all')
|
||||
params['cookies']['country'] = engine_region.split('-')[-1].lower() # type: ignore
|
||||
engine_region = traits.get_region(params["searxng_locale"], "all")
|
||||
params["cookies"]["country"] = engine_region.split("-")[-1].lower() # type: ignore
|
||||
|
||||
ui_lang = locales.get_engine_locale(params['searxng_locale'], traits.custom["ui_lang"], 'en-us')
|
||||
params['cookies']['ui_lang'] = ui_lang
|
||||
|
||||
logger.debug("cookies %s", params['cookies'])
|
||||
|
||||
params['headers']['Sec-Fetch-Dest'] = "document"
|
||||
params['headers']['Sec-Fetch-Mode'] = "navigate"
|
||||
params['headers']['Sec-Fetch-Site'] = "same-origin"
|
||||
params['headers']['Sec-Fetch-User'] = "?1"
|
||||
ui_lang = locales.get_engine_locale(params["searxng_locale"], traits.custom["ui_lang"], "en-us")
|
||||
params["cookies"]["ui_lang"] = ui_lang
|
||||
logger.debug("cookies %s", params["cookies"])
|
||||
|
||||
|
||||
def _extract_published_date(published_date_raw):
|
||||
def _extract_published_date(published_date_raw: str | None):
|
||||
if published_date_raw is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
return parser.parse(published_date_raw)
|
||||
except parser.ParserError:
|
||||
return None
|
||||
|
||||
|
||||
def extract_json_data(text: str) -> dict[str, t.Any]:
|
||||
# Example script source containing the data:
|
||||
#
|
||||
# kit.start(app, element, {
|
||||
# node_ids: [0, 19],
|
||||
# data: [{type:"data",data: .... ["q","goggles_id"],route:1,url:1}}]
|
||||
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
text = text[text.index("<script") : text.index("</script")]
|
||||
if not text:
|
||||
raise ValueError("can't find JS/JSON data in the given text")
|
||||
start = text.index("data: [{")
|
||||
end = text.rindex("}}]")
|
||||
js_obj_str = text[start:end]
|
||||
js_obj_str = "{" + js_obj_str + "}}]}"
|
||||
# js_obj_str = js_obj_str.replace("\xa0", "") # remove ASCII for
|
||||
# js_obj_str = js_obj_str.replace(r"\u003C", "<").replace(r"\u003c", "<") # fix broken HTML tags in strings
|
||||
json_str = js_obj_str_to_json_str(js_obj_str)
|
||||
data: dict[str, t.Any] = json.loads(json_str)
|
||||
return data
|
||||
|
||||
|
||||
def response(resp: SXNG_Response) -> EngineResults:
|
||||
|
||||
if brave_category in ('search', 'goggles'):
|
||||
@@ -261,11 +277,8 @@ def response(resp: SXNG_Response) -> EngineResults:
|
||||
# node_ids: [0, 19],
|
||||
# data: [{type:"data",data: .... ["q","goggles_id"],route:1,url:1}}]
|
||||
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
js_object = "[{" + extr(resp.text, "data: [{", "}}],") + "}}]"
|
||||
json_data = js_variable_to_python(js_object)
|
||||
|
||||
# json_data is a list and at the second position (0,1) in this list we find the "response" data we need ..
|
||||
json_resp = json_data[1]['data']['body']['response']
|
||||
json_data: dict[str, t.Any] = extract_json_data(resp.text)
|
||||
json_resp: dict[str, t.Any] = json_data['data'][1]["data"]['body']['response']
|
||||
|
||||
if brave_category == 'images':
|
||||
return _parse_images(json_resp)
|
||||
@@ -275,150 +288,124 @@ def response(resp: SXNG_Response) -> EngineResults:
|
||||
raise ValueError(f"Unsupported brave category: {brave_category}")
|
||||
|
||||
|
||||
def _parse_search(resp) -> EngineResults:
|
||||
result_list = EngineResults()
|
||||
|
||||
def _parse_search(resp: SXNG_Response) -> EngineResults:
|
||||
res = EngineResults()
|
||||
dom = html.fromstring(resp.text)
|
||||
|
||||
# I doubt that Brave is still providing the "answer" class / I haven't seen
|
||||
# answers in brave for a long time.
|
||||
answer_tag = eval_xpath_getindex(dom, '//div[@class="answer"]', 0, default=None)
|
||||
if answer_tag:
|
||||
url = eval_xpath_getindex(dom, '//div[@id="featured_snippet"]/a[@class="result-header"]/@href', 0, default=None)
|
||||
answer = extract_text(answer_tag)
|
||||
if answer is not None:
|
||||
result_list.add(result_list.types.Answer(answer=answer, url=url))
|
||||
for result in eval_xpath_list(dom, "//div[contains(@class, 'snippet ')]"):
|
||||
|
||||
# xpath_results = '//div[contains(@class, "snippet fdb") and @data-type="web"]'
|
||||
xpath_results = '//div[contains(@class, "snippet ")]'
|
||||
|
||||
for result in eval_xpath_list(dom, xpath_results):
|
||||
|
||||
url = eval_xpath_getindex(result, './/a[contains(@class, "h")]/@href', 0, default=None)
|
||||
title_tag = eval_xpath_getindex(
|
||||
result, './/a[contains(@class, "h")]//div[contains(@class, "title")]', 0, default=None
|
||||
)
|
||||
url: str | None = eval_xpath_getindex(result, ".//a/@href", 0, default=None)
|
||||
title_tag = eval_xpath_getindex(result, ".//div[contains(@class, 'title')]", 0, default=None)
|
||||
if url is None or title_tag is None or not urlparse(url).netloc: # partial url likely means it's an ad
|
||||
continue
|
||||
|
||||
content: str = extract_text(
|
||||
eval_xpath_getindex(result, './/div[contains(@class, "snippet-description")]', 0, default='')
|
||||
) # type: ignore
|
||||
pub_date_raw = eval_xpath(result, 'substring-before(.//div[contains(@class, "snippet-description")], "-")')
|
||||
pub_date = _extract_published_date(pub_date_raw)
|
||||
if pub_date and content.startswith(pub_date_raw):
|
||||
content = content.lstrip(pub_date_raw).strip("- \n\t")
|
||||
content: str = ""
|
||||
pub_date = None
|
||||
|
||||
thumbnail = eval_xpath_getindex(result, './/img[contains(@class, "thumb")]/@src', 0, default='')
|
||||
# there are other classes like 'site-name-content' we don't want to match,
|
||||
# however only using contains(@class, 'content') would e.g. also match `site-name-content`
|
||||
# thus, we explicitly also require the spaces as class separator
|
||||
_content = eval_xpath_getindex(result, ".//div[contains(concat(' ', @class, ' '), ' content ')]", 0, default="")
|
||||
if len(_content):
|
||||
content = extract_text(_content) # type: ignore
|
||||
_pub_date = extract_text(
|
||||
eval_xpath_getindex(_content, ".//span[contains(@class, 't-secondary')]", 0, default="")
|
||||
)
|
||||
if _pub_date:
|
||||
pub_date = _extract_published_date(_pub_date)
|
||||
content = content.lstrip(_pub_date).strip("- \n\t")
|
||||
|
||||
item = {
|
||||
'url': url,
|
||||
'title': extract_text(title_tag),
|
||||
'content': content,
|
||||
'publishedDate': pub_date,
|
||||
'thumbnail': thumbnail,
|
||||
}
|
||||
thumbnail: str = eval_xpath_getindex(result, ".//a[contains(@class, 'thumbnail')]//img/@src", 0, default="")
|
||||
|
||||
item = res.types.LegacyResult(
|
||||
template="default.html",
|
||||
url=url,
|
||||
title=extract_text(title_tag),
|
||||
content=content,
|
||||
publishedDate=pub_date,
|
||||
thumbnail=thumbnail,
|
||||
)
|
||||
res.add(item)
|
||||
|
||||
video_tag = eval_xpath_getindex(
|
||||
result, './/div[contains(@class, "video-snippet") and @data-macro="video"]', 0, default=None
|
||||
result, ".//div[contains(@class, 'video-snippet') and @data-macro='video']", 0, default=[]
|
||||
)
|
||||
if video_tag is not None:
|
||||
|
||||
if len(video_tag):
|
||||
# In my tests a video tag in the WEB search was most often not a
|
||||
# video, except the ones from youtube ..
|
||||
|
||||
iframe_src = get_embeded_stream_url(url)
|
||||
if iframe_src:
|
||||
item['iframe_src'] = iframe_src
|
||||
item['template'] = 'videos.html'
|
||||
item['thumbnail'] = eval_xpath_getindex(video_tag, './/img/@src', 0, default='')
|
||||
pub_date_raw = extract_text(
|
||||
eval_xpath(video_tag, './/div[contains(@class, "snippet-attributes")]/div/text()')
|
||||
)
|
||||
item['publishedDate'] = _extract_published_date(pub_date_raw)
|
||||
else:
|
||||
item['thumbnail'] = eval_xpath_getindex(video_tag, './/img/@src', 0, default='')
|
||||
item["iframe_src"] = iframe_src
|
||||
item["template"] = "videos.html"
|
||||
|
||||
result_list.append(item)
|
||||
|
||||
return result_list
|
||||
return res
|
||||
|
||||
|
||||
def _parse_news(resp) -> EngineResults:
|
||||
|
||||
result_list = EngineResults()
|
||||
def _parse_news(resp: SXNG_Response) -> EngineResults:
|
||||
res = EngineResults()
|
||||
dom = html.fromstring(resp.text)
|
||||
|
||||
for result in eval_xpath_list(dom, '//div[contains(@class, "results")]//div[@data-type="news"]'):
|
||||
for result in eval_xpath_list(dom, "//div[contains(@class, 'results')]//div[@data-type='news']"):
|
||||
|
||||
# import pdb
|
||||
# pdb.set_trace()
|
||||
|
||||
url = eval_xpath_getindex(result, './/a[contains(@class, "result-header")]/@href', 0, default=None)
|
||||
url = eval_xpath_getindex(result, ".//a[contains(@class, 'result-header')]/@href", 0, default=None)
|
||||
if url is None:
|
||||
continue
|
||||
|
||||
title = extract_text(eval_xpath_list(result, './/span[contains(@class, "snippet-title")]'))
|
||||
content = extract_text(eval_xpath_list(result, './/p[contains(@class, "desc")]'))
|
||||
thumbnail = eval_xpath_getindex(result, './/div[contains(@class, "image-wrapper")]//img/@src', 0, default='')
|
||||
title = eval_xpath_list(result, ".//span[contains(@class, 'snippet-title')]")
|
||||
content = eval_xpath_list(result, ".//p[contains(@class, 'desc')]")
|
||||
thumbnail = eval_xpath_getindex(result, ".//div[contains(@class, 'image-wrapper')]//img/@src", 0, default="")
|
||||
|
||||
item = {
|
||||
"url": url,
|
||||
"title": title,
|
||||
"content": content,
|
||||
"thumbnail": thumbnail,
|
||||
}
|
||||
item = res.types.LegacyResult(
|
||||
template="default.html",
|
||||
url=url,
|
||||
title=extract_text(title),
|
||||
thumbnail=thumbnail,
|
||||
content=extract_text(content),
|
||||
)
|
||||
res.add(item)
|
||||
|
||||
result_list.append(item)
|
||||
|
||||
return result_list
|
||||
return res
|
||||
|
||||
|
||||
def _parse_images(json_resp) -> EngineResults:
|
||||
result_list = EngineResults()
|
||||
def _parse_images(json_resp: dict[str, t.Any]) -> EngineResults:
|
||||
res = EngineResults()
|
||||
|
||||
for result in json_resp["results"]:
|
||||
item = {
|
||||
'url': result['url'],
|
||||
'title': result['title'],
|
||||
'content': result['description'],
|
||||
'template': 'images.html',
|
||||
'resolution': result['properties']['format'],
|
||||
'source': result['source'],
|
||||
'img_src': result['properties']['url'],
|
||||
'thumbnail_src': result['thumbnail']['src'],
|
||||
}
|
||||
result_list.append(item)
|
||||
item = res.types.LegacyResult(
|
||||
template="images.html",
|
||||
url=result["url"],
|
||||
title=result["title"],
|
||||
source=result["source"],
|
||||
img_src=result["properties"]["url"],
|
||||
thumbnail_src=result["thumbnail"]["src"],
|
||||
)
|
||||
res.add(item)
|
||||
|
||||
return result_list
|
||||
return res
|
||||
|
||||
|
||||
def _parse_videos(json_resp) -> EngineResults:
|
||||
result_list = EngineResults()
|
||||
def _parse_videos(json_resp: dict[str, t.Any]) -> EngineResults:
|
||||
res = EngineResults()
|
||||
|
||||
for result in json_resp["results"]:
|
||||
|
||||
url = result['url']
|
||||
item = {
|
||||
'url': url,
|
||||
'title': result['title'],
|
||||
'content': result['description'],
|
||||
'template': 'videos.html',
|
||||
'length': result['video']['duration'],
|
||||
'duration': result['video']['duration'],
|
||||
'publishedDate': _extract_published_date(result['age']),
|
||||
}
|
||||
|
||||
if result['thumbnail'] is not None:
|
||||
item['thumbnail'] = result['thumbnail']['src']
|
||||
|
||||
iframe_src = get_embeded_stream_url(url)
|
||||
item = res.types.LegacyResult(
|
||||
template="videos.html",
|
||||
url=result["url"],
|
||||
title=result["title"],
|
||||
content=result["description"],
|
||||
length=result["video"]["duration"],
|
||||
duration=result["video"]["duration"],
|
||||
publishedDate=_extract_published_date(result["age"]),
|
||||
)
|
||||
if result["thumbnail"] is not None:
|
||||
item["thumbnail"] = result["thumbnail"]["src"]
|
||||
iframe_src = get_embeded_stream_url(result["url"])
|
||||
if iframe_src:
|
||||
item['iframe_src'] = iframe_src
|
||||
item["iframe_src"] = iframe_src
|
||||
|
||||
result_list.append(item)
|
||||
res.add(item)
|
||||
|
||||
return result_list
|
||||
return res
|
||||
|
||||
|
||||
def fetch_traits(engine_traits: EngineTraits):
|
||||
@@ -439,25 +426,25 @@ def fetch_traits(engine_traits: EngineTraits):
|
||||
|
||||
resp = get('https://search.brave.com/settings')
|
||||
|
||||
if not resp.ok: # type: ignore
|
||||
if not resp.ok:
|
||||
print("ERROR: response from Brave is not OK.")
|
||||
dom = html.fromstring(resp.text) # type: ignore
|
||||
dom = html.fromstring(resp.text)
|
||||
|
||||
for option in dom.xpath('//section//option[@value="en-us"]/../option'):
|
||||
for option in dom.xpath("//section//option[@value='en-us']/../option"):
|
||||
|
||||
ui_lang = option.get('value')
|
||||
ui_lang = option.get("value")
|
||||
try:
|
||||
l = babel.Locale.parse(ui_lang, sep='-')
|
||||
l = babel.Locale.parse(ui_lang, sep="-")
|
||||
if l.territory:
|
||||
sxng_tag = region_tag(babel.Locale.parse(ui_lang, sep='-'))
|
||||
sxng_tag = region_tag(babel.Locale.parse(ui_lang, sep="-"))
|
||||
else:
|
||||
sxng_tag = language_tag(babel.Locale.parse(ui_lang, sep='-'))
|
||||
sxng_tag = language_tag(babel.Locale.parse(ui_lang, sep="-"))
|
||||
|
||||
except babel.UnknownLocaleError:
|
||||
print("ERROR: can't determine babel locale of Brave's (UI) language %s" % ui_lang)
|
||||
continue
|
||||
|
||||
conflict = engine_traits.custom["ui_lang"].get(sxng_tag)
|
||||
conflict = engine_traits.custom["ui_lang"].get(sxng_tag) # type: ignore
|
||||
if conflict:
|
||||
if conflict != ui_lang:
|
||||
print("CONFLICT: babel %s --> %s, %s" % (sxng_tag, conflict, ui_lang))
|
||||
@@ -466,26 +453,26 @@ def fetch_traits(engine_traits: EngineTraits):
|
||||
|
||||
# search regions of brave
|
||||
|
||||
resp = get('https://cdn.search.brave.com/serp/v2/_app/immutable/chunks/parameters.734c106a.js')
|
||||
resp = get("https://cdn.search.brave.com/serp/v2/_app/immutable/chunks/parameters.734c106a.js")
|
||||
|
||||
if not resp.ok: # type: ignore
|
||||
if not resp.ok:
|
||||
print("ERROR: response from Brave is not OK.")
|
||||
|
||||
country_js = resp.text[resp.text.index("options:{all") + len('options:') :] # type: ignore
|
||||
country_js = resp.text[resp.text.index("options:{all") + len("options:") :]
|
||||
country_js = country_js[: country_js.index("},k={default")]
|
||||
country_tags = js_variable_to_python(country_js)
|
||||
country_tags = js_obj_str_to_python(country_js)
|
||||
|
||||
for k, v in country_tags.items():
|
||||
if k == 'all':
|
||||
engine_traits.all_locale = 'all'
|
||||
if k == "all":
|
||||
engine_traits.all_locale = "all"
|
||||
continue
|
||||
country_tag = v['value']
|
||||
country_tag = v["value"]
|
||||
|
||||
# add official languages of the country ..
|
||||
for lang_tag in babel.languages.get_official_languages(country_tag, de_facto=True):
|
||||
lang_tag = lang_map.get(lang_tag, lang_tag)
|
||||
sxng_tag = region_tag(babel.Locale.parse('%s_%s' % (lang_tag, country_tag.upper())))
|
||||
# print("%-20s: %s <-- %s" % (v['label'], country_tag, sxng_tag))
|
||||
sxng_tag = region_tag(babel.Locale.parse("%s_%s" % (lang_tag, country_tag.upper())))
|
||||
# print("%-20s: %s <-- %s" % (v["label"], country_tag, sxng_tag))
|
||||
|
||||
conflict = engine_traits.regions.get(sxng_tag)
|
||||
if conflict:
|
||||
|
||||
@@ -407,7 +407,7 @@ def fetch_traits(engine_traits: EngineTraits):
|
||||
|
||||
"""
|
||||
# pylint: disable=too-many-branches, too-many-statements, disable=import-outside-toplevel
|
||||
from searx.utils import js_variable_to_python
|
||||
from searx.utils import js_obj_str_to_python
|
||||
|
||||
# fetch regions
|
||||
|
||||
@@ -455,7 +455,7 @@ def fetch_traits(engine_traits: EngineTraits):
|
||||
|
||||
js_code = extr(resp.text, 'languages:', ',regions') # type: ignore
|
||||
|
||||
languages = js_variable_to_python(js_code)
|
||||
languages: dict[str, str] = js_obj_str_to_python(js_code)
|
||||
for eng_lang, name in languages.items():
|
||||
|
||||
if eng_lang == 'wt_WT':
|
||||
|
||||
@@ -42,8 +42,8 @@ def response(resp):
|
||||
|
||||
results.append(
|
||||
{
|
||||
'url': item['source_page_url'],
|
||||
'title': item['source_site'],
|
||||
'url': item.get('source_page_url'),
|
||||
'title': item.get('source_site'),
|
||||
'img_src': img if item['type'] == 'IMAGE' else thumb,
|
||||
'filesize': humanize_bytes(item['meme_file_size']),
|
||||
'publishedDate': formatted_date,
|
||||
|
||||
52
searx/engines/grokipedia.py
Normal file
52
searx/engines/grokipedia.py
Normal file
@@ -0,0 +1,52 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Grokipedia (general)"""
|
||||
|
||||
from urllib.parse import urlencode
|
||||
from searx.utils import html_to_text
|
||||
from searx.result_types import EngineResults
|
||||
|
||||
about = {
|
||||
"website": 'https://grokipedia.com',
|
||||
"wikidata_id": "Q136410803",
|
||||
"official_api_documentation": None,
|
||||
"use_official_api": False,
|
||||
"require_api_key": False,
|
||||
"results": "JSON",
|
||||
}
|
||||
|
||||
base_url = "https://grokipedia.com/api/full-text-search"
|
||||
categories = ['general']
|
||||
paging = True
|
||||
results_per_page = 10
|
||||
|
||||
|
||||
def request(query, params):
|
||||
|
||||
start_index = (params["pageno"] - 1) * results_per_page
|
||||
|
||||
query_params = {
|
||||
"query": query,
|
||||
"limit": results_per_page,
|
||||
"offset": start_index,
|
||||
}
|
||||
|
||||
params["url"] = f"{base_url}?{urlencode(query_params)}"
|
||||
|
||||
return params
|
||||
|
||||
|
||||
def response(resp) -> EngineResults:
|
||||
results = EngineResults()
|
||||
search_res = resp.json()
|
||||
|
||||
for item in search_res["results"]:
|
||||
|
||||
results.add(
|
||||
results.types.MainResult(
|
||||
url='https://grokipedia.com/page/' + item["slug"],
|
||||
title=item["title"],
|
||||
content=html_to_text(item["snippet"]),
|
||||
)
|
||||
)
|
||||
|
||||
return results
|
||||
@@ -31,7 +31,7 @@ paging = True
|
||||
time_range_support = True
|
||||
|
||||
# base_url can be overwritten by a list of URLs in the settings.yml
|
||||
base_url: list | str = []
|
||||
base_url: list[str] | str = []
|
||||
|
||||
|
||||
def init(_):
|
||||
|
||||
69
searx/engines/lucide.py
Normal file
69
searx/engines/lucide.py
Normal file
@@ -0,0 +1,69 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Browse one of the largest collections of copyleft icons
|
||||
that can be used for own projects (e.g. apps, websites).
|
||||
|
||||
.. _Website: https://lucide.dev
|
||||
|
||||
"""
|
||||
|
||||
import typing as t
|
||||
|
||||
from searx.result_types import EngineResults
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from extended_types import SXNG_Response
|
||||
from search.processors.online import OnlineParams
|
||||
|
||||
|
||||
about = {
|
||||
"website": "https://lucide.dev/",
|
||||
"wikidata_id": None,
|
||||
"official_api_documentation": None,
|
||||
"use_official_api": True,
|
||||
"results": "JSON",
|
||||
}
|
||||
|
||||
cdn_base_url = "https://cdn.jsdelivr.net/npm/lucide-static"
|
||||
categories = ["images", "icons"]
|
||||
|
||||
|
||||
def request(query: str, params: "OnlineParams"):
|
||||
params["url"] = f"{cdn_base_url}/tags.json"
|
||||
params['query'] = query
|
||||
return params
|
||||
|
||||
|
||||
def response(resp: "SXNG_Response") -> EngineResults:
|
||||
res = EngineResults()
|
||||
query_parts = resp.search_params["query"].lower().split(" ")
|
||||
|
||||
def is_result_match(result: tuple[str, list[str]]) -> bool:
|
||||
icon_name, tags = result
|
||||
|
||||
for part in query_parts:
|
||||
if part in icon_name:
|
||||
return True
|
||||
|
||||
for tag in tags:
|
||||
if part in tag:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
filtered_results = filter(is_result_match, resp.json().items())
|
||||
for icon_name, tags in filtered_results:
|
||||
img_src = f"{cdn_base_url}/icons/{icon_name}.svg"
|
||||
res.add(
|
||||
res.types.LegacyResult(
|
||||
{
|
||||
"template": "images.html",
|
||||
"url": img_src,
|
||||
"title": icon_name,
|
||||
"content": ", ".join(tags),
|
||||
"img_src": img_src,
|
||||
"img_format": "SVG",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
return res
|
||||
@@ -28,7 +28,7 @@ Implementations
|
||||
"""
|
||||
|
||||
import typing as t
|
||||
from urllib.parse import urlencode, quote_plus
|
||||
from urllib.parse import urlencode
|
||||
from searx.utils import searxng_useragent
|
||||
from searx.result_types import EngineResults
|
||||
from searx.extended_types import SXNG_Response
|
||||
@@ -42,7 +42,7 @@ about = {
|
||||
"results": "JSON",
|
||||
}
|
||||
|
||||
base_url = "https://api.marginalia.nu"
|
||||
base_url = "https://api2.marginalia-search.com"
|
||||
safesearch = True
|
||||
categories = ["general"]
|
||||
paging = False
|
||||
@@ -85,13 +85,11 @@ class ApiSearchResults(t.TypedDict):
|
||||
|
||||
def request(query: str, params: dict[str, t.Any]):
|
||||
|
||||
query_params = {
|
||||
"count": results_per_page,
|
||||
"nsfw": min(params["safesearch"], 1),
|
||||
}
|
||||
query_params = {"count": results_per_page, "nsfw": min(params["safesearch"], 1), "query": query}
|
||||
|
||||
params["url"] = f"{base_url}/{api_key}/search/{quote_plus(query)}?{urlencode(query_params)}"
|
||||
params["url"] = f"{base_url}/search?{urlencode(query_params)}"
|
||||
params["headers"]["User-Agent"] = searxng_useragent()
|
||||
params["headers"]["API-Key"] = api_key
|
||||
|
||||
|
||||
def response(resp: SXNG_Response):
|
||||
|
||||
@@ -65,7 +65,8 @@ def request(query, params):
|
||||
if search_type:
|
||||
args['fmt'] = search_type
|
||||
|
||||
if search_type == '':
|
||||
# setting the page number on the first page (i.e. s=0) triggers a rate-limit
|
||||
if search_type == '' and params['pageno'] > 1:
|
||||
args['s'] = 10 * (params['pageno'] - 1)
|
||||
|
||||
if params['time_range'] and search_type != 'images':
|
||||
|
||||
@@ -1,276 +0,0 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Mullvad Leta is a search engine proxy. Currently Leta only offers text
|
||||
search results not image, news or any other types of search result. Leta acts
|
||||
as a proxy to Google and Brave search results. You can select which backend
|
||||
search engine you wish to use, see (:py:obj:`leta_engine`).
|
||||
|
||||
.. hint::
|
||||
|
||||
Leta caches each search for up to 30 days. For example, if you use search
|
||||
terms like ``news``, contrary to your intention you'll get very old results!
|
||||
|
||||
|
||||
Configuration
|
||||
=============
|
||||
|
||||
The engine has the following additional settings:
|
||||
|
||||
- :py:obj:`leta_engine` (:py:obj:`LetaEnginesType`)
|
||||
|
||||
You can configure one Leta engine for Google and one for Brave:
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
- name: mullvadleta
|
||||
engine: mullvad_leta
|
||||
leta_engine: google
|
||||
shortcut: ml
|
||||
|
||||
- name: mullvadleta brave
|
||||
engine: mullvad_leta
|
||||
network: mullvadleta # use network from engine "mullvadleta" configured above
|
||||
leta_engine: brave
|
||||
shortcut: mlb
|
||||
|
||||
Implementations
|
||||
===============
|
||||
|
||||
"""
|
||||
import typing as t
|
||||
|
||||
from urllib.parse import urlencode
|
||||
import babel
|
||||
from httpx import Response
|
||||
from lxml import html
|
||||
from searx.enginelib.traits import EngineTraits
|
||||
from searx.extended_types import SXNG_Response
|
||||
|
||||
from searx.locales import get_official_locales, language_tag, region_tag
|
||||
from searx.utils import eval_xpath_list
|
||||
from searx.result_types import EngineResults, MainResult
|
||||
from searx.network import raise_for_httperror
|
||||
|
||||
search_url = "https://leta.mullvad.net"
|
||||
|
||||
# about
|
||||
about = {
|
||||
"website": search_url,
|
||||
"wikidata_id": 'Q47008412', # the Mullvad id - not leta, but related
|
||||
"official_api_documentation": 'https://leta.mullvad.net/faq',
|
||||
"use_official_api": False,
|
||||
"require_api_key": False,
|
||||
"results": 'HTML',
|
||||
}
|
||||
|
||||
# engine dependent config
|
||||
categories = ["general", "web"]
|
||||
paging = True
|
||||
max_page = 10
|
||||
time_range_support = True
|
||||
time_range_dict = {
|
||||
"day": "d",
|
||||
"week": "w",
|
||||
"month": "m",
|
||||
"year": "y",
|
||||
}
|
||||
|
||||
LetaEnginesType = t.Literal["google", "brave"]
|
||||
"""Engine types supported by mullvadleta."""
|
||||
|
||||
leta_engine: LetaEnginesType = "google"
|
||||
"""Select Leta's engine type from :py:obj:`LetaEnginesType`."""
|
||||
|
||||
|
||||
def init(_):
|
||||
l = t.get_args(LetaEnginesType)
|
||||
if leta_engine not in l:
|
||||
raise ValueError(f"leta_engine '{leta_engine}' is invalid, use one of {', '.join(l)}")
|
||||
|
||||
|
||||
class DataNodeQueryMetaDataIndices(t.TypedDict):
|
||||
"""Indices into query metadata."""
|
||||
|
||||
success: int
|
||||
q: int # pylint: disable=invalid-name
|
||||
country: int
|
||||
language: int
|
||||
lastUpdated: int
|
||||
engine: int
|
||||
items: int
|
||||
infobox: int
|
||||
news: int
|
||||
timestamp: int
|
||||
altered: int
|
||||
page: int
|
||||
next: int # if -1, there no more results are available
|
||||
previous: int
|
||||
|
||||
|
||||
class DataNodeResultIndices(t.TypedDict):
|
||||
"""Indices into query resultsdata."""
|
||||
|
||||
link: int
|
||||
snippet: int
|
||||
title: int
|
||||
favicon: int
|
||||
|
||||
|
||||
def request(query: str, params: dict[str, t.Any]) -> None:
|
||||
params["raise_for_httperror"] = False
|
||||
params["method"] = "GET"
|
||||
args = {
|
||||
"q": query,
|
||||
"engine": leta_engine,
|
||||
"x-sveltekit-invalidated": "001", # hardcoded from all requests seen
|
||||
}
|
||||
|
||||
country = traits.get_region(params.get("searxng_locale"), traits.all_locale) # type: ignore
|
||||
if country:
|
||||
args["country"] = country
|
||||
|
||||
language = traits.get_language(params.get("searxng_locale"), traits.all_locale) # type: ignore
|
||||
if language:
|
||||
args["language"] = language
|
||||
|
||||
if params["time_range"] in time_range_dict:
|
||||
args["lastUpdated"] = time_range_dict[params["time_range"]]
|
||||
|
||||
if params["pageno"] > 1:
|
||||
args["page"] = params["pageno"]
|
||||
|
||||
params["url"] = f"{search_url}/search/__data.json?{urlencode(args)}"
|
||||
|
||||
|
||||
def response(resp: SXNG_Response) -> EngineResults:
|
||||
results = EngineResults()
|
||||
|
||||
if resp.status_code in (403, 429):
|
||||
# It doesn't matter if you're using Mullvad's VPN and a proper browser,
|
||||
# you'll still get blocked for specific searches with a 403 or 429 HTTP
|
||||
# status code.
|
||||
# https://github.com/searxng/searxng/issues/5328#issue-3518337233
|
||||
return results
|
||||
# raise for other errors
|
||||
raise_for_httperror(resp)
|
||||
|
||||
json_response = resp.json()
|
||||
|
||||
nodes = json_response["nodes"]
|
||||
# 0: is None
|
||||
# 1: has "connected=True", not useful
|
||||
# 2: query results within "data"
|
||||
|
||||
data_nodes = nodes[2]["data"]
|
||||
# Instead of nested object structure, all objects are flattened into a
|
||||
# list. Rather, the first object in data_node provides indices into the
|
||||
# "data_nodes" to access each searchresult (which is an object of more
|
||||
# indices)
|
||||
#
|
||||
# Read the relative TypedDict definitions for details
|
||||
|
||||
query_meta_data: DataNodeQueryMetaDataIndices = data_nodes[0]
|
||||
|
||||
query_items_indices = query_meta_data["items"]
|
||||
|
||||
for idx in data_nodes[query_items_indices]:
|
||||
query_item_indices: DataNodeResultIndices = data_nodes[idx]
|
||||
results.add(
|
||||
MainResult(
|
||||
url=data_nodes[query_item_indices["link"]],
|
||||
title=data_nodes[query_item_indices["title"]],
|
||||
content=data_nodes[query_item_indices["snippet"]],
|
||||
)
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def fetch_traits(engine_traits: EngineTraits) -> None:
|
||||
"""Fetch languages and regions from Mullvad-Leta"""
|
||||
|
||||
def extract_table_data(table):
|
||||
for row in table.xpath(".//tr")[2:]:
|
||||
cells = row.xpath(".//td | .//th") # includes headers and data
|
||||
if len(cells) > 1: # ensure the column exists
|
||||
cell0 = cells[0].text_content().strip()
|
||||
cell1 = cells[1].text_content().strip()
|
||||
yield [cell0, cell1]
|
||||
|
||||
# pylint: disable=import-outside-toplevel
|
||||
# see https://github.com/searxng/searxng/issues/762
|
||||
from searx.network import get as http_get
|
||||
|
||||
# pylint: enable=import-outside-toplevel
|
||||
|
||||
resp = http_get(f"{search_url}/documentation")
|
||||
if not isinstance(resp, Response):
|
||||
print("ERROR: failed to get response from mullvad-leta. Are you connected to the VPN?")
|
||||
return
|
||||
if not resp.ok:
|
||||
print("ERROR: response from mullvad-leta is not OK. Are you connected to the VPN?")
|
||||
return
|
||||
|
||||
dom = html.fromstring(resp.text)
|
||||
|
||||
# There are 4 HTML tables on the documentation page for extracting information:
|
||||
# 0. Keyboard Shortcuts
|
||||
# 1. Query Parameters (shoutout to Mullvad for accessible docs for integration)
|
||||
# 2. Country Codes [Country, Code]
|
||||
# 3. Language Codes [Language, Code]
|
||||
tables = eval_xpath_list(dom.body, "//table")
|
||||
if tables is None or len(tables) <= 0:
|
||||
print("ERROR: could not find any tables. Was the page updated?")
|
||||
|
||||
language_table = tables[3]
|
||||
lang_map = {
|
||||
"zh-hant": "zh_Hans",
|
||||
"zh-hans": "zh_Hant",
|
||||
"jp": "ja",
|
||||
}
|
||||
|
||||
for language, code in extract_table_data(language_table):
|
||||
|
||||
locale_tag = lang_map.get(code, code).replace("-", "_") # type: ignore
|
||||
try:
|
||||
locale = babel.Locale.parse(locale_tag)
|
||||
except babel.UnknownLocaleError:
|
||||
print(f"ERROR: Mullvad-Leta language {language} ({code}) is unknown by babel")
|
||||
continue
|
||||
|
||||
sxng_tag = language_tag(locale)
|
||||
engine_traits.languages[sxng_tag] = code
|
||||
|
||||
country_table = tables[2]
|
||||
country_map = {
|
||||
"cn": "zh-CN",
|
||||
"hk": "zh-HK",
|
||||
"jp": "ja-JP",
|
||||
"my": "ms-MY",
|
||||
"tw": "zh-TW",
|
||||
"uk": "en-GB",
|
||||
"us": "en-US",
|
||||
}
|
||||
|
||||
for country, code in extract_table_data(country_table):
|
||||
|
||||
sxng_tag = country_map.get(code)
|
||||
if sxng_tag:
|
||||
engine_traits.regions[sxng_tag] = code
|
||||
continue
|
||||
|
||||
try:
|
||||
locale = babel.Locale.parse(f"{code.lower()}_{code.upper()}")
|
||||
except babel.UnknownLocaleError:
|
||||
locale = None
|
||||
|
||||
if locale:
|
||||
engine_traits.regions[region_tag(locale)] = code
|
||||
continue
|
||||
|
||||
official_locales = get_official_locales(code, engine_traits.languages.keys(), regional=True)
|
||||
if not official_locales:
|
||||
print(f"ERROR: Mullvad-Leta country '{code}' ({country}) could not be mapped as expected.")
|
||||
continue
|
||||
|
||||
for locale in official_locales:
|
||||
engine_traits.regions[region_tag(locale)] = code
|
||||
@@ -15,7 +15,7 @@ from searx.utils import (
|
||||
extr,
|
||||
html_to_text,
|
||||
parse_duration_string,
|
||||
js_variable_to_python,
|
||||
js_obj_str_to_python,
|
||||
get_embeded_stream_url,
|
||||
)
|
||||
|
||||
@@ -125,7 +125,7 @@ def parse_images(data):
|
||||
|
||||
match = extr(data, '<script>var imageSearchTabData=', '</script>')
|
||||
if match:
|
||||
json = js_variable_to_python(match.strip())
|
||||
json = js_obj_str_to_python(match.strip())
|
||||
items = json.get('content', {}).get('items', [])
|
||||
|
||||
for item in items:
|
||||
|
||||
@@ -40,8 +40,8 @@ Known Quirks
|
||||
The implementation to support :py:obj:`paging <searx.enginelib.Engine.paging>`
|
||||
is based on the *nextpage* method of Piped's REST API / the :py:obj:`frontend
|
||||
API <frontend_url>`. This feature is *next page driven* and plays well with the
|
||||
:ref:`infinite_scroll <settings ui>` setting in SearXNG but it does not really
|
||||
fit into SearXNG's UI to select a page by number.
|
||||
:ref:`infinite_scroll <settings plugins>` plugin in SearXNG but it does not
|
||||
really fit into SearXNG's UI to select a page by number.
|
||||
|
||||
Implementations
|
||||
===============
|
||||
@@ -72,7 +72,7 @@ categories = []
|
||||
paging = True
|
||||
|
||||
# search-url
|
||||
backend_url: list[str] | str | None = None
|
||||
backend_url: list[str] | str = []
|
||||
"""Piped-Backend_: The core component behind Piped. The value is an URL or a
|
||||
list of URLs. In the latter case instance will be selected randomly. For a
|
||||
complete list of official instances see Piped-Instances (`JSON
|
||||
|
||||
@@ -17,10 +17,11 @@ about = {
|
||||
# Engine configuration
|
||||
paging = True
|
||||
categories = ['images']
|
||||
remove_ai_images = False
|
||||
|
||||
# Search URL
|
||||
base_url = "https://www.pixiv.net/ajax/search/illustrations"
|
||||
pixiv_image_proxies: list = []
|
||||
pixiv_image_proxies: list[str] = []
|
||||
|
||||
|
||||
def request(query, params):
|
||||
@@ -34,6 +35,9 @@ def request(query, params):
|
||||
"lang": "en",
|
||||
}
|
||||
|
||||
if remove_ai_images is True:
|
||||
query_params.update({"ai_type": 1})
|
||||
|
||||
params["url"] = f"{base_url}/{query}?{urlencode(query_params)}"
|
||||
|
||||
return params
|
||||
|
||||
@@ -41,6 +41,7 @@ from datetime import date, timedelta
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from searx.result_types import EngineResults
|
||||
from searx.utils import html_to_text
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from searx.extended_types import SXNG_Response
|
||||
@@ -133,11 +134,14 @@ def response(resp: "SXNG_Response") -> EngineResults:
|
||||
if mtype in ["image"] and subtype in ["bmp", "gif", "jpeg", "png"]:
|
||||
thumbnail = url
|
||||
|
||||
# remove HTML from snippet
|
||||
content = html_to_text(result.get("snippet", ""))
|
||||
|
||||
res.add(
|
||||
res.types.File(
|
||||
title=result.get("label", ""),
|
||||
url=url,
|
||||
content=result.get("snippet", ""),
|
||||
content=content,
|
||||
size=result.get("size", ""),
|
||||
filename=result.get("filename", ""),
|
||||
abstract=result.get("abstract", ""),
|
||||
|
||||
@@ -32,8 +32,8 @@ Known Quirks
|
||||
|
||||
The implementation to support :py:obj:`paging <searx.enginelib.Engine.paging>`
|
||||
is based on the *nextpage* method of Seekr's REST API. This feature is *next
|
||||
page driven* and plays well with the :ref:`infinite_scroll <settings ui>`
|
||||
setting in SearXNG but it does not really fit into SearXNG's UI to select a page
|
||||
page driven* and plays well with the :ref:`infinite_scroll <settings plugins>`
|
||||
plugin in SearXNG but it does not really fit into SearXNG's UI to select a page
|
||||
by number.
|
||||
|
||||
Implementations
|
||||
|
||||
@@ -96,7 +96,7 @@ search_type = 'text'
|
||||
``video`` are not yet implemented (Pull-Requests are welcome).
|
||||
"""
|
||||
|
||||
base_url: list[str] | str | None = None
|
||||
base_url: list[str] | str = []
|
||||
"""The value is an URL or a list of URLs. In the latter case instance will be
|
||||
selected randomly.
|
||||
"""
|
||||
|
||||
@@ -28,6 +28,20 @@ search_type = ""
|
||||
base_url_web = 'https://yandex.com/search/site/'
|
||||
base_url_images = 'https://yandex.com/images/search'
|
||||
|
||||
# Supported languages
|
||||
yandex_supported_langs = [
|
||||
"ru", # Russian
|
||||
"en", # English
|
||||
"be", # Belarusian
|
||||
"fr", # French
|
||||
"de", # German
|
||||
"id", # Indonesian
|
||||
"kk", # Kazakh
|
||||
"tt", # Tatar
|
||||
"tr", # Turkish
|
||||
"uk", # Ukrainian
|
||||
]
|
||||
|
||||
results_xpath = '//li[contains(@class, "serp-item")]'
|
||||
url_xpath = './/a[@class="b-serp-item__title-link"]/@href'
|
||||
title_xpath = './/h3[@class="b-serp-item__title"]/a[@class="b-serp-item__title-link"]/span'
|
||||
@@ -48,6 +62,10 @@ def request(query, params):
|
||||
"searchid": "3131712",
|
||||
}
|
||||
|
||||
lang = params["language"].split("-")[0]
|
||||
if lang in yandex_supported_langs:
|
||||
query_params_web["lang"] = lang
|
||||
|
||||
query_params_images = {
|
||||
"text": query,
|
||||
"uinfo": "sw-1920-sh-1080-ww-1125-wh-999",
|
||||
|
||||
@@ -17,10 +17,6 @@
|
||||
|
||||
"""
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
import os
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# pylint: disable=missing-module-docstring
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
import pathlib
|
||||
import msgspec
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Implementations for a favicon proxy"""
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Callable
|
||||
|
||||
|
||||
@@ -106,9 +106,9 @@ class AsyncProxyTransportFixed(AsyncProxyTransport):
|
||||
except ProxyConnectionError as e:
|
||||
raise httpx.ProxyError("ProxyConnectionError: " + str(e.strerror), request=request) from e
|
||||
except ProxyTimeoutError as e:
|
||||
raise httpx.ProxyError("ProxyTimeoutError: " + e.args[0], request=request) from e
|
||||
raise httpx.ProxyError("ProxyTimeoutError: " + str(e.args[0]), request=request) from e
|
||||
except ProxyError as e:
|
||||
raise httpx.ProxyError("ProxyError: " + e.args[0], request=request) from e
|
||||
raise httpx.ProxyError("ProxyError: " + str(e.args[0]), request=request) from e
|
||||
|
||||
|
||||
def get_transport_for_socks_proxy(
|
||||
|
||||
@@ -1,31 +1,19 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Calculate mathematical expressions using :py:obj:`ast.parse` (mode="eval")."""
|
||||
# pylint: disable=missing-module-docstring
|
||||
|
||||
import typing
|
||||
import typing as t
|
||||
|
||||
import ast
|
||||
import math
|
||||
import re
|
||||
import operator
|
||||
import multiprocessing
|
||||
from flask_babel import gettext # pyright: ignore[reportUnknownVariableType]
|
||||
|
||||
import babel
|
||||
import babel.numbers
|
||||
from flask_babel import gettext
|
||||
|
||||
from searx.result_types import EngineResults
|
||||
from searx.plugins import Plugin, PluginInfo
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from searx.search import SearchWithPlugins
|
||||
from searx.extended_types import SXNG_Request
|
||||
if t.TYPE_CHECKING:
|
||||
from searx.plugins import PluginCfg
|
||||
|
||||
|
||||
@t.final
|
||||
class SXNGPlugin(Plugin):
|
||||
"""Plugin converts strings to different hash digests. The results are
|
||||
displayed in area for the "answers".
|
||||
"""
|
||||
"""Parses and solves mathematical expressions."""
|
||||
|
||||
id = "calculator"
|
||||
|
||||
@@ -34,200 +22,7 @@ class SXNGPlugin(Plugin):
|
||||
|
||||
self.info = PluginInfo(
|
||||
id=self.id,
|
||||
name=gettext("Basic Calculator"),
|
||||
description=gettext("Calculate mathematical expressions via the search bar"),
|
||||
preference_section="general",
|
||||
name=gettext("Calculator"),
|
||||
description=gettext("Parses and solves mathematical expressions."),
|
||||
preference_section="query",
|
||||
)
|
||||
|
||||
def timeout_func(self, timeout, func, *args, **kwargs):
|
||||
que = mp_fork.Queue()
|
||||
p = mp_fork.Process(target=handler, args=(que, func, args), kwargs=kwargs)
|
||||
p.start()
|
||||
p.join(timeout=timeout)
|
||||
ret_val = None
|
||||
# pylint: disable=used-before-assignment,undefined-variable
|
||||
if not p.is_alive():
|
||||
ret_val = que.get()
|
||||
else:
|
||||
self.log.debug("terminate function (%s: %s // %s) after timeout is exceeded", func.__name__, args, kwargs)
|
||||
p.terminate()
|
||||
p.join()
|
||||
p.close()
|
||||
return ret_val
|
||||
|
||||
def post_search(self, request: "SXNG_Request", search: "SearchWithPlugins") -> EngineResults:
|
||||
results = EngineResults()
|
||||
|
||||
# only show the result of the expression on the first page
|
||||
if search.search_query.pageno > 1:
|
||||
return results
|
||||
|
||||
query = search.search_query.query
|
||||
# in order to avoid DoS attacks with long expressions, ignore long expressions
|
||||
if len(query) > 100:
|
||||
return results
|
||||
|
||||
# replace commonly used math operators with their proper Python operator
|
||||
query = query.replace("x", "*").replace(":", "/")
|
||||
|
||||
# Is this a term that can be calculated?
|
||||
word, constants = "", set()
|
||||
for x in query:
|
||||
# Alphabetic characters are defined as "Letters" in the Unicode
|
||||
# character database and are the constants in an equation.
|
||||
if x.isalpha():
|
||||
word += x.strip()
|
||||
elif word:
|
||||
constants.add(word)
|
||||
word = ""
|
||||
|
||||
# In the term of an arithmetic operation there should be no other
|
||||
# alphabetic characters besides the constants
|
||||
if constants - set(math_constants):
|
||||
return results
|
||||
|
||||
# use UI language
|
||||
ui_locale = babel.Locale.parse(request.preferences.get_value("locale"), sep="-")
|
||||
|
||||
# parse the number system in a localized way
|
||||
def _decimal(match: re.Match) -> str:
|
||||
val = match.string[match.start() : match.end()]
|
||||
val = babel.numbers.parse_decimal(val, ui_locale, numbering_system="latn")
|
||||
return str(val)
|
||||
|
||||
decimal = ui_locale.number_symbols["latn"]["decimal"]
|
||||
group = ui_locale.number_symbols["latn"]["group"]
|
||||
query = re.sub(f"[0-9]+[{decimal}|{group}][0-9]+[{decimal}|{group}]?[0-9]?", _decimal, query)
|
||||
|
||||
# in python, powers are calculated via **
|
||||
query_py_formatted = query.replace("^", "**")
|
||||
|
||||
# Prevent the runtime from being longer than 50 ms
|
||||
res = self.timeout_func(0.05, _eval_expr, query_py_formatted)
|
||||
if res is None or res[0] == "":
|
||||
return results
|
||||
|
||||
res, is_boolean = res
|
||||
if is_boolean:
|
||||
res = "True" if res != 0 else "False"
|
||||
else:
|
||||
res = babel.numbers.format_decimal(res, locale=ui_locale)
|
||||
results.add(results.types.Answer(answer=f"{search.search_query.query} = {res}"))
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def _compare(ops: list[ast.cmpop], values: list[int | float]) -> int:
|
||||
"""
|
||||
2 < 3 becomes ops=[ast.Lt] and values=[2,3]
|
||||
2 < 3 <= 4 becomes ops=[ast.Lt, ast.LtE] and values=[2,3, 4]
|
||||
"""
|
||||
for op, a, b in zip(ops, values, values[1:]): # pylint: disable=invalid-name
|
||||
if isinstance(op, ast.Eq) and a == b:
|
||||
continue
|
||||
if isinstance(op, ast.NotEq) and a != b:
|
||||
continue
|
||||
if isinstance(op, ast.Lt) and a < b:
|
||||
continue
|
||||
if isinstance(op, ast.LtE) and a <= b:
|
||||
continue
|
||||
if isinstance(op, ast.Gt) and a > b:
|
||||
continue
|
||||
if isinstance(op, ast.GtE) and a >= b:
|
||||
continue
|
||||
|
||||
# Ignore impossible ops:
|
||||
# * ast.Is
|
||||
# * ast.IsNot
|
||||
# * ast.In
|
||||
# * ast.NotIn
|
||||
|
||||
# the result is False for a and b and operation op
|
||||
return 0
|
||||
# the results for all the ops are True
|
||||
return 1
|
||||
|
||||
|
||||
operators: dict[type, typing.Callable] = {
|
||||
ast.Add: operator.add,
|
||||
ast.Sub: operator.sub,
|
||||
ast.Mult: operator.mul,
|
||||
ast.Div: operator.truediv,
|
||||
ast.Pow: operator.pow,
|
||||
ast.BitXor: operator.xor,
|
||||
ast.BitOr: operator.or_,
|
||||
ast.BitAnd: operator.and_,
|
||||
ast.USub: operator.neg,
|
||||
ast.RShift: operator.rshift,
|
||||
ast.LShift: operator.lshift,
|
||||
ast.Mod: operator.mod,
|
||||
ast.Compare: _compare,
|
||||
}
|
||||
|
||||
|
||||
math_constants = {
|
||||
'e': math.e,
|
||||
'pi': math.pi,
|
||||
}
|
||||
|
||||
|
||||
# with multiprocessing.get_context("fork") we are ready for Py3.14 (by emulating
|
||||
# the old behavior "fork") but it will not solve the core problem of fork, nor
|
||||
# will it remove the deprecation warnings in py3.12 & py3.13. Issue is
|
||||
# ddiscussed here: https://github.com/searxng/searxng/issues/4159
|
||||
mp_fork = multiprocessing.get_context("fork")
|
||||
|
||||
|
||||
def _eval_expr(expr):
|
||||
"""
|
||||
Evaluates the given textual expression.
|
||||
|
||||
Returns a tuple of (numericResult, isBooleanResult).
|
||||
|
||||
>>> _eval_expr('2^6')
|
||||
64, False
|
||||
>>> _eval_expr('2**6')
|
||||
64, False
|
||||
>>> _eval_expr('1 + 2*3**(4^5) / (6 + -7)')
|
||||
-5.0, False
|
||||
>>> _eval_expr('1 < 3')
|
||||
1, True
|
||||
>>> _eval_expr('5 < 3')
|
||||
0, True
|
||||
>>> _eval_expr('17 == 11+1+5 == 7+5+5')
|
||||
1, True
|
||||
"""
|
||||
try:
|
||||
root_expr = ast.parse(expr, mode='eval').body
|
||||
return _eval(root_expr), isinstance(root_expr, ast.Compare)
|
||||
|
||||
except (SyntaxError, TypeError, ZeroDivisionError):
|
||||
# Expression that can't be evaluated (i.e. not a math expression)
|
||||
return "", False
|
||||
|
||||
|
||||
def _eval(node):
|
||||
if isinstance(node, ast.Constant) and isinstance(node.value, (int, float)):
|
||||
return node.value
|
||||
|
||||
if isinstance(node, ast.BinOp):
|
||||
return operators[type(node.op)](_eval(node.left), _eval(node.right))
|
||||
|
||||
if isinstance(node, ast.UnaryOp):
|
||||
return operators[type(node.op)](_eval(node.operand))
|
||||
|
||||
if isinstance(node, ast.Compare):
|
||||
return _compare(node.ops, [_eval(node.left)] + [_eval(c) for c in node.comparators])
|
||||
|
||||
if isinstance(node, ast.Name) and node.id in math_constants:
|
||||
return math_constants[node.id]
|
||||
|
||||
raise TypeError(node)
|
||||
|
||||
|
||||
def handler(q: multiprocessing.Queue, func, args, **kwargs): # pylint:disable=invalid-name
|
||||
try:
|
||||
q.put(func(*args, **kwargs))
|
||||
except:
|
||||
q.put(None)
|
||||
raise
|
||||
|
||||
28
searx/plugins/infinite_scroll.py
Normal file
28
searx/plugins/infinite_scroll.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# pylint: disable=missing-module-docstring
|
||||
|
||||
import typing as t
|
||||
|
||||
from flask_babel import gettext # pyright: ignore[reportUnknownVariableType]
|
||||
|
||||
from searx.plugins import Plugin, PluginInfo
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from searx.plugins import PluginCfg
|
||||
|
||||
|
||||
@t.final
|
||||
class SXNGPlugin(Plugin):
|
||||
"""Automatically loads the next page when scrolling to bottom of the current page."""
|
||||
|
||||
id = "infiniteScroll"
|
||||
|
||||
def __init__(self, plg_cfg: "PluginCfg") -> None:
|
||||
super().__init__(plg_cfg)
|
||||
|
||||
self.info = PluginInfo(
|
||||
id=self.id,
|
||||
name=gettext("Infinite scroll"),
|
||||
description=gettext("Automatically loads the next page when scrolling to bottom of the current page"),
|
||||
preference_section="ui",
|
||||
)
|
||||
@@ -24,12 +24,6 @@ if typing.TYPE_CHECKING:
|
||||
from searx.plugins import PluginCfg
|
||||
|
||||
|
||||
name = ""
|
||||
description = gettext("")
|
||||
|
||||
plugin_id = ""
|
||||
preference_section = ""
|
||||
|
||||
CONVERT_KEYWORDS = ["in", "to", "as"]
|
||||
|
||||
|
||||
|
||||
@@ -476,10 +476,6 @@ class Preferences:
|
||||
settings['ui']['query_in_title'],
|
||||
locked=is_locked('query_in_title')
|
||||
),
|
||||
'infinite_scroll': BooleanSetting(
|
||||
settings['ui']['infinite_scroll'],
|
||||
locked=is_locked('infinite_scroll')
|
||||
),
|
||||
'search_on_category_select': BooleanSetting(
|
||||
settings['ui']['search_on_category_select'],
|
||||
locked=is_locked('search_on_category_select')
|
||||
|
||||
@@ -16,10 +16,6 @@
|
||||
:members:
|
||||
"""
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
__all__ = ["Result"]
|
||||
|
||||
import typing as t
|
||||
|
||||
@@ -28,9 +28,6 @@ template.
|
||||
"""
|
||||
# pylint: disable=too-few-public-methods
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
__all__ = ["AnswerSet", "Answer", "Translations", "WeatherAnswer"]
|
||||
|
||||
|
||||
@@ -14,10 +14,6 @@ template. For highlighting the code passages, Pygments_ is used.
|
||||
"""
|
||||
# pylint: disable=too-few-public-methods, disable=invalid-name
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
__all__ = ["Code"]
|
||||
|
||||
import typing as t
|
||||
|
||||
@@ -13,9 +13,6 @@ template.
|
||||
"""
|
||||
# pylint: disable=too-few-public-methods
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
__all__ = ["KeyValue"]
|
||||
|
||||
|
||||
@@ -21,10 +21,6 @@ Related topics:
|
||||
"""
|
||||
# pylint: disable=too-few-public-methods, disable=invalid-name
|
||||
|
||||
# Struct fields aren't discovered in Python 3.14
|
||||
# - https://github.com/searxng/searxng/issues/5284
|
||||
from __future__ import annotations
|
||||
|
||||
__all__ = ["Paper"]
|
||||
|
||||
import typing as t
|
||||
|
||||
@@ -22,7 +22,7 @@ from searx.network import initialize as initialize_network, check_network_config
|
||||
from searx.results import ResultContainer
|
||||
from searx.search.checker import initialize as initialize_checker
|
||||
from searx.search.processors import PROCESSORS
|
||||
|
||||
from searx.search.processors.abstract import RequestParams
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from .models import SearchQuery
|
||||
@@ -79,16 +79,20 @@ class Search:
|
||||
return bool(results)
|
||||
|
||||
# do search-request
|
||||
def _get_requests(self) -> tuple[list[tuple[str, str, dict[str, t.Any]]], int]:
|
||||
def _get_requests(self) -> tuple[list[tuple[str, str, RequestParams]], float]:
|
||||
# init vars
|
||||
requests: list[tuple[str, str, dict[str, t.Any]]] = []
|
||||
requests: list[tuple[str, str, RequestParams]] = []
|
||||
|
||||
# max of all selected engine timeout
|
||||
default_timeout = 0
|
||||
|
||||
# start search-request for all selected engines
|
||||
for engineref in self.search_query.engineref_list:
|
||||
processor = PROCESSORS[engineref.name]
|
||||
processor = PROCESSORS.get(engineref.name)
|
||||
if not processor:
|
||||
# engine does not exists; not yet or the 'init' method of the
|
||||
# engine has been failed and the engine has not been registered.
|
||||
continue
|
||||
|
||||
# stop the request now if the engine is suspend
|
||||
if processor.extend_container_if_suspended(self.result_container):
|
||||
@@ -133,7 +137,7 @@ class Search:
|
||||
|
||||
return requests, actual_timeout
|
||||
|
||||
def search_multiple_requests(self, requests: list[tuple[str, str, dict[str, t.Any]]]):
|
||||
def search_multiple_requests(self, requests: list[tuple[str, str, RequestParams]]):
|
||||
# pylint: disable=protected-access
|
||||
search_id = str(uuid4())
|
||||
|
||||
|
||||
@@ -51,7 +51,6 @@ class ProcessorMap(dict[str, EngineProcessor]):
|
||||
eng_name: str = eng_settings["name"]
|
||||
|
||||
if eng_settings.get("inactive", False) is True:
|
||||
logger.info("Engine of name '%s' is inactive.", eng_name)
|
||||
continue
|
||||
|
||||
eng_obj = engines.engines.get(eng_name)
|
||||
|
||||
@@ -155,8 +155,15 @@ class OnlineProcessor(EngineProcessor):
|
||||
search_query.locale.language,
|
||||
)
|
||||
headers["Accept-Language"] = ac_lang
|
||||
|
||||
self.logger.debug("HTTP Accept-Language: %s", headers.get("Accept-Language", ""))
|
||||
|
||||
# https://developer.mozilla.org/en-US/docs/Glossary/Fetch_metadata_request_header
|
||||
headers["Sec-Fetch-Dest"] = "empty"
|
||||
headers["Sec-Fetch-Mode"] = "cors"
|
||||
headers["Sec-Fetch-Site"] = "same-origin"
|
||||
headers["Sec-Fetch-User"] = "?1"
|
||||
headers["Sec-GPC"] = "1"
|
||||
|
||||
return params
|
||||
|
||||
def _send_http_request(self, params: OnlineParams):
|
||||
|
||||
@@ -124,8 +124,6 @@ ui:
|
||||
# query_in_title: When true, the result page's titles contains the query
|
||||
# it decreases the privacy, since the browser can records the page titles.
|
||||
query_in_title: false
|
||||
# infinite_scroll: When true, automatically loads the next page when scrolling to bottom of the current page.
|
||||
infinite_scroll: false
|
||||
# ui theme
|
||||
default_theme: simple
|
||||
# center the results ?
|
||||
@@ -162,7 +160,6 @@ ui:
|
||||
# - locale
|
||||
# - theme
|
||||
# - results_on_new_tab
|
||||
# - infinite_scroll
|
||||
# - search_on_category_select
|
||||
# - method
|
||||
# - image_proxy
|
||||
@@ -214,6 +211,7 @@ outgoing:
|
||||
# - 1.1.1.2
|
||||
# - fe80::/126
|
||||
|
||||
|
||||
# Plugin configuration, for more details see
|
||||
# https://docs.searxng.org/admin/settings/settings_plugins.html
|
||||
#
|
||||
@@ -222,6 +220,9 @@ plugins:
|
||||
searx.plugins.calculator.SXNGPlugin:
|
||||
active: true
|
||||
|
||||
searx.plugins.infinite_scroll.SXNGPlugin:
|
||||
active: false
|
||||
|
||||
searx.plugins.hash_plugin.SXNGPlugin:
|
||||
active: true
|
||||
|
||||
@@ -340,6 +341,7 @@ engines:
|
||||
- name: 360search
|
||||
engine: 360search
|
||||
shortcut: 360so
|
||||
timeout: 10.0
|
||||
disabled: true
|
||||
|
||||
- name: 360search videos
|
||||
@@ -432,6 +434,9 @@ engines:
|
||||
# Requires Tor
|
||||
- name: ahmia
|
||||
engine: ahmia
|
||||
# Might do up to two requests to perform a search.
|
||||
# Since Tor is already slow by nature, the timeout is set very high.
|
||||
timeout: 20.0
|
||||
categories: onions
|
||||
enable_http: true
|
||||
shortcut: ah
|
||||
@@ -476,14 +481,14 @@ engines:
|
||||
shortcut: ask
|
||||
disabled: true
|
||||
|
||||
# - name: azure
|
||||
# engine: azure
|
||||
# shortcut: az
|
||||
# categories: [it, cloud]
|
||||
# azure_tenant_id: "your_tenant_id"
|
||||
# azure_client_id: "your_client_id"
|
||||
# azure_client_secret: "your_client_secret"
|
||||
# disabled: true
|
||||
- name: azure
|
||||
engine: azure
|
||||
shortcut: az
|
||||
categories: [it, cloud]
|
||||
# azure_tenant_id: "your_tenant_id"
|
||||
# azure_client_id: "your_client_id"
|
||||
# azure_client_secret: "your_client_secret"
|
||||
inactive: true
|
||||
|
||||
# tmp suspended: dh key too small
|
||||
# - name: base
|
||||
@@ -645,7 +650,7 @@ engines:
|
||||
# cf_ai_model_assistant: 'prompts_for_assistant_role'
|
||||
# cf_ai_model_system: 'prompts_for_system_role'
|
||||
timeout: 30
|
||||
disabled: true
|
||||
inactive: true
|
||||
|
||||
- name: core.ac.uk
|
||||
engine: core
|
||||
@@ -779,20 +784,20 @@ engines:
|
||||
require_api_key: false
|
||||
results: HTML
|
||||
|
||||
# - name: elasticsearch
|
||||
# shortcut: els
|
||||
# engine: elasticsearch
|
||||
# base_url: http://localhost:9200
|
||||
# username: elastic
|
||||
# password: changeme
|
||||
# index: my-index
|
||||
# enable_http: true
|
||||
# # available options: match, simple_query_string, term, terms, custom
|
||||
# query_type: match
|
||||
# # if query_type is set to custom, provide your query here
|
||||
# # custom_query_json: {"query":{"match_all": {}}}
|
||||
# # show_metadata: false
|
||||
# disabled: true
|
||||
- name: elasticsearch
|
||||
shortcut: els
|
||||
engine: elasticsearch
|
||||
# base_url: http://localhost:9200
|
||||
# username: elastic
|
||||
# password: changeme
|
||||
# index: my-index
|
||||
# enable_http: true
|
||||
# available options: match, simple_query_string, term, terms, custom
|
||||
query_type: match
|
||||
# if query_type is set to custom, provide your query here
|
||||
# custom_query_json: {"query":{"match_all": {}}}
|
||||
# show_metadata: false
|
||||
inactive: true
|
||||
|
||||
- name: wikidata
|
||||
engine: wikidata
|
||||
@@ -870,12 +875,12 @@ engines:
|
||||
require_api_key: false
|
||||
results: HTML
|
||||
|
||||
# - name: ebay
|
||||
# engine: ebay
|
||||
# shortcut: eb
|
||||
# base_url: 'https://www.ebay.com'
|
||||
# disabled: true
|
||||
# timeout: 5
|
||||
- name: ebay
|
||||
engine: ebay
|
||||
shortcut: eb
|
||||
base_url: 'https://www.ebay.com'
|
||||
inactive: true
|
||||
timeout: 5
|
||||
|
||||
- name: 1x
|
||||
engine: www1x
|
||||
@@ -896,12 +901,16 @@ engines:
|
||||
- name: flickr
|
||||
categories: images
|
||||
shortcut: fl
|
||||
engine: flickr_noapi
|
||||
|
||||
- name: flickr_api
|
||||
# You can use the engine using the official stable API, but you need an API
|
||||
# key, see: https://www.flickr.com/services/apps/create/
|
||||
# engine: flickr
|
||||
engine: flickr
|
||||
categories: images
|
||||
shortcut: fla
|
||||
# api_key: 'apikey' # required!
|
||||
# Or you can use the html non-stable engine, activated by default
|
||||
engine: flickr_noapi
|
||||
inactive: true
|
||||
|
||||
- name: free software directory
|
||||
engine: mediawiki
|
||||
@@ -915,13 +924,13 @@ engines:
|
||||
website: https://directory.fsf.org/
|
||||
wikidata_id: Q2470288
|
||||
|
||||
# - name: freesound
|
||||
# engine: freesound
|
||||
# shortcut: fnd
|
||||
# disabled: true
|
||||
# timeout: 15.0
|
||||
# API key required, see: https://freesound.org/docs/api/overview.html
|
||||
# api_key: MyAPIkey
|
||||
- name: freesound
|
||||
engine: freesound
|
||||
shortcut: fnd
|
||||
timeout: 15.0
|
||||
# API key required, see: https://freesound.org/docs/api/overview.html
|
||||
# api_key: MyAPIkey
|
||||
inactive: true
|
||||
|
||||
- name: frinkiac
|
||||
engine: frinkiac
|
||||
@@ -976,7 +985,7 @@ engines:
|
||||
- name: github code
|
||||
engine: github_code
|
||||
shortcut: ghc
|
||||
disabled: true
|
||||
inactive: true
|
||||
ghc_auth:
|
||||
# type is one of:
|
||||
# * none
|
||||
@@ -1058,6 +1067,12 @@ engines:
|
||||
play_categ: movies
|
||||
disabled: true
|
||||
|
||||
- name: grokipedia
|
||||
engine: grokipedia
|
||||
shortcut: gp
|
||||
disabled: true
|
||||
inactive: true
|
||||
|
||||
- name: material icons
|
||||
engine: material_icons
|
||||
shortcut: mi
|
||||
@@ -1257,9 +1272,9 @@ engines:
|
||||
# https://github.com/LibreTranslate/LibreTranslate?tab=readme-ov-file#mirrors
|
||||
base_url:
|
||||
- https://libretranslate.com/translate
|
||||
# api_key: abc123
|
||||
# api_key: ''
|
||||
shortcut: lt
|
||||
disabled: true
|
||||
inactive: true
|
||||
|
||||
- name: lingva
|
||||
engine: lingva
|
||||
@@ -1286,12 +1301,17 @@ engines:
|
||||
require_api_key: false
|
||||
results: HTML
|
||||
|
||||
- name: lucide
|
||||
engine: lucide
|
||||
shortcut: luc
|
||||
timeout: 3.0
|
||||
|
||||
- name: marginalia
|
||||
engine: marginalia
|
||||
shortcut: mar
|
||||
# To get an API key, please follow the instructions at
|
||||
# - https://about.marginalia-search.com/article/api/
|
||||
# api_key: ...
|
||||
# api_key: ''
|
||||
disabled: true
|
||||
inactive: true
|
||||
|
||||
@@ -1420,22 +1440,6 @@ engines:
|
||||
require_api_key: false
|
||||
results: JSON
|
||||
|
||||
# https://docs.searxng.org/dev/engines/online/mullvad_leta.html
|
||||
- name: mullvadleta
|
||||
engine: mullvad_leta
|
||||
disabled: true
|
||||
leta_engine: google
|
||||
categories: [general, web]
|
||||
shortcut: ml
|
||||
|
||||
- name: mullvadleta brave
|
||||
engine: mullvad_leta
|
||||
network: mullvadleta
|
||||
disabled: true
|
||||
leta_engine: brave
|
||||
categories: [general, web]
|
||||
shortcut: mlb
|
||||
|
||||
- name: odysee
|
||||
engine: odysee
|
||||
shortcut: od
|
||||
@@ -1626,6 +1630,7 @@ engines:
|
||||
engine: pixiv
|
||||
disabled: true
|
||||
inactive: true
|
||||
remove_ai_images: false
|
||||
pixiv_image_proxies:
|
||||
- https://pximg.example.org
|
||||
# A proxy is required to load the images. Hosting an image proxy server
|
||||
@@ -2087,19 +2092,19 @@ engines:
|
||||
# engine in combination with Jackett opens the possibility to query a lot of
|
||||
# public and private indexers directly from SearXNG. More details at:
|
||||
# https://docs.searxng.org/dev/engines/online/torznab.html
|
||||
#
|
||||
# - name: Torznab EZTV
|
||||
# engine: torznab
|
||||
# shortcut: eztv
|
||||
# base_url: http://localhost:9117/api/v2.0/indexers/eztv/results/torznab
|
||||
# enable_http: true # if using localhost
|
||||
# api_key: xxxxxxxxxxxxxxx
|
||||
# show_magnet_links: true
|
||||
# show_torrent_files: false
|
||||
# # https://github.com/Jackett/Jackett/wiki/Jackett-Categories
|
||||
# torznab_categories: # optional
|
||||
# - 2000
|
||||
# - 5000
|
||||
- name: Torznab EZTV
|
||||
engine: torznab
|
||||
shortcut: eztv
|
||||
# base_url: http://localhost:9117/api/v2.0/indexers/eztv/results/torznab
|
||||
# enable_http: true # if using localhost
|
||||
# api_key: xxxxxxxxxxxxxxx
|
||||
show_magnet_links: true
|
||||
show_torrent_files: false
|
||||
# https://github.com/Jackett/Jackett/wiki/Jackett-Categories
|
||||
torznab_categories: # optional
|
||||
- 2000
|
||||
- 5000
|
||||
inactive: true
|
||||
|
||||
# tmp suspended - too slow, too many errors
|
||||
# - name: urbandictionary
|
||||
@@ -2120,22 +2125,21 @@ engines:
|
||||
search_type: web
|
||||
shortcut: yd
|
||||
disabled: true
|
||||
inactive: true
|
||||
|
||||
- name: yandex images
|
||||
engine: yandex
|
||||
network: yandex
|
||||
categories: images
|
||||
search_type: images
|
||||
shortcut: ydi
|
||||
disabled: true
|
||||
inactive: true
|
||||
|
||||
- name: yandex music
|
||||
engine: yandex_music
|
||||
network: yandex
|
||||
shortcut: ydm
|
||||
disabled: true
|
||||
# https://yandex.com/support/music/access.html
|
||||
inactive: true
|
||||
|
||||
- name: yahoo
|
||||
engine: yahoo
|
||||
@@ -2148,14 +2152,15 @@ engines:
|
||||
|
||||
- name: youtube
|
||||
shortcut: yt
|
||||
engine: youtube_noapi
|
||||
|
||||
- name: youtube_api
|
||||
# You can use the engine using the official stable API, but you need an API
|
||||
# key See: https://console.developers.google.com/project
|
||||
#
|
||||
# engine: youtube_api
|
||||
# api_key: 'apikey' # required!
|
||||
#
|
||||
# Or you can use the html non-stable engine, activated by default
|
||||
engine: youtube_noapi
|
||||
engine: youtube_api
|
||||
# api_key: '' # required!
|
||||
shortcut: yta
|
||||
inactive: true
|
||||
|
||||
- name: dailymotion
|
||||
engine: dailymotion
|
||||
@@ -2308,18 +2313,21 @@ engines:
|
||||
|
||||
- name: wolframalpha
|
||||
shortcut: wa
|
||||
# You can use the engine using the official stable API, but you need an API
|
||||
# key. See: https://products.wolframalpha.com/api/
|
||||
#
|
||||
# engine: wolframalpha_api
|
||||
# api_key: ''
|
||||
#
|
||||
# Or you can use the html non-stable engine, activated by default
|
||||
engine: wolframalpha_noapi
|
||||
timeout: 6.0
|
||||
categories: general
|
||||
disabled: true
|
||||
|
||||
- name: wolframalpha_api
|
||||
# You can use the engine using the official stable API, but you need an API
|
||||
# key. See: https://products.wolframalpha.com/api/
|
||||
engine: wolframalpha_api
|
||||
# api_key: '' # required!
|
||||
shortcut: waa
|
||||
timeout: 6.0
|
||||
categories: general
|
||||
inactive: true
|
||||
|
||||
- name: dictzone
|
||||
engine: dictzone
|
||||
shortcut: dc
|
||||
@@ -2367,14 +2375,14 @@ engines:
|
||||
engine: seznam
|
||||
disabled: true
|
||||
|
||||
# - name: deepl
|
||||
# engine: deepl
|
||||
# shortcut: dpl
|
||||
# # You can use the engine using the official stable API, but you need an API key
|
||||
# # See: https://www.deepl.com/pro-api?cta=header-pro-api
|
||||
# api_key: '' # required!
|
||||
# timeout: 5.0
|
||||
# disabled: true
|
||||
- name: deepl
|
||||
engine: deepl
|
||||
shortcut: dpl
|
||||
# You can use the engine using the official stable API, but you need an API key
|
||||
# See: https://www.deepl.com/pro-api?cta=header-pro-api
|
||||
# api_key: '' # required!
|
||||
timeout: 5.0
|
||||
inactive: true
|
||||
|
||||
- name: mojeek
|
||||
shortcut: mjk
|
||||
@@ -2589,7 +2597,7 @@ engines:
|
||||
engine: wallhaven
|
||||
# api_key: abcdefghijklmnopqrstuvwxyz
|
||||
shortcut: wh
|
||||
disabled: true
|
||||
inactive: true
|
||||
|
||||
# wikimini: online encyclopedia for children
|
||||
# The fulltext and title parameter is necessary for Wikimini because
|
||||
|
||||
@@ -238,7 +238,6 @@ SCHEMA: dict[str, t.Any] = {
|
||||
'results_on_new_tab': SettingsValue(bool, False),
|
||||
'advanced_search': SettingsValue(bool, False),
|
||||
'query_in_title': SettingsValue(bool, False),
|
||||
'infinite_scroll': SettingsValue(bool, False),
|
||||
'cache_url': SettingsValue(str, 'https://web.archive.org/web/'),
|
||||
'search_on_category_select': SettingsValue(bool, True),
|
||||
'hotkeys': SettingsValue(('default', 'vim'), 'default'),
|
||||
|
||||
2
searx/static/themes/simple/chunk/13gvpunf.min.js
vendored
Normal file
2
searx/static/themes/simple/chunk/13gvpunf.min.js
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
const e=e=>{if(!e)throw Error(`DOM element not found`)};export{e as t};
|
||||
//# sourceMappingURL=13gvpunf.min.js.map
|
||||
1
searx/static/themes/simple/chunk/13gvpunf.min.js.map
Normal file
1
searx/static/themes/simple/chunk/13gvpunf.min.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"13gvpunf.min.js","names":["assertElement: AssertElement"],"sources":["../../../../../client/simple/src/js/util/assertElement.ts"],"sourcesContent":["// SPDX-License-Identifier: AGPL-3.0-or-later\n\ntype AssertElement = <T>(element?: T | null) => asserts element is T;\nexport const assertElement: AssertElement = <T>(element?: T | null): asserts element is T => {\n if (!element) {\n throw new Error(\"DOM element not found\");\n }\n};\n"],"mappings":"AAGA,MAAaA,EAAmC,GAA6C,CAC3F,GAAI,CAAC,EACH,MAAU,MAAM,wBAAwB"}
|
||||
8
searx/static/themes/simple/chunk/BAcZkB_P.min.js
vendored
Normal file
8
searx/static/themes/simple/chunk/BAcZkB_P.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1
searx/static/themes/simple/chunk/BAcZkB_P.min.js.map
Normal file
1
searx/static/themes/simple/chunk/BAcZkB_P.min.js.map
Normal file
File diff suppressed because one or more lines are too long
15
searx/static/themes/simple/chunk/BmuYt_wm.min.js
vendored
Normal file
15
searx/static/themes/simple/chunk/BmuYt_wm.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1
searx/static/themes/simple/chunk/BmuYt_wm.min.js.map
Normal file
1
searx/static/themes/simple/chunk/BmuYt_wm.min.js.map
Normal file
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user