Compare commits

...

5 Commits

Author SHA1 Message Date
Alexandre Flament 3085474818
Merge 4398ce059f into cd384a8a60 2024-11-06 10:03:21 +01:00
dependabot[bot] cd384a8a60 [upd] pypi: Bump selenium from 4.25.0 to 4.26.1
Bumps [selenium](https://github.com/SeleniumHQ/Selenium) from 4.25.0 to 4.26.1.
- [Release notes](https://github.com/SeleniumHQ/Selenium/releases)
- [Commits](https://github.com/SeleniumHQ/Selenium/commits)

---
updated-dependencies:
- dependency-name: selenium
  dependency-type: direct:development
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-11-06 10:01:13 +01:00
Markus Heiser c4055e449f [fix] issues reported by `make test.yamllint`
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
2024-11-06 08:16:21 +01:00
Markus Heiser 2fdbf2622b [mod] lint github YAML config files
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
2024-11-06 08:16:21 +01:00
Alexandre Flament 4398ce059f Add baidu engine (experimental) 2022-07-26 10:52:35 +02:00
9 changed files with 344 additions and 169 deletions

View File

@ -1,5 +1,5 @@
name: "Checker"
on:
on: # yamllint disable-line rule:truthy
schedule:
- cron: "0 4 * * 5"
workflow_dispatch:

View File

@ -1,5 +1,5 @@
name: "Update searx.data"
on:
on: # yamllint disable-line rule:truthy
schedule:
- cron: "59 23 28 * *"
workflow_dispatch:

View File

@ -1,6 +1,6 @@
name: Integration
on:
on: # yamllint disable-line rule:truthy
push:
branches: ["master"]
pull_request:
@ -16,62 +16,62 @@ jobs:
strategy:
matrix:
os: [ubuntu-20.04]
python-version: ["3.9", "3.10", "3.11", "3.12",]
python-version: ["3.9", "3.10", "3.11", "3.12"]
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install Ubuntu packages
run: |
sudo ./utils/searxng.sh install packages
sudo apt install firefox
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
architecture: 'x64'
- name: Cache Python dependencies
id: cache-python
uses: actions/cache@v3
with:
path: |
./local
./.nvm
./node_modules
key: python-${{ matrix.os }}-${{ matrix.python-version }}-${{ hashFiles('requirements*.txt', 'setup.py') }}
- name: Install Python dependencies
if: steps.cache-python.outputs.cache-hit != 'true'
run: |
make V=1 install
make V=1 gecko.driver
- name: Run tests
run: make V=1 ci.test
- name: Checkout
uses: actions/checkout@v4
- name: Install Ubuntu packages
run: |
sudo ./utils/searxng.sh install packages
sudo apt install firefox
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
architecture: 'x64'
- name: Cache Python dependencies
id: cache-python
uses: actions/cache@v3
with:
path: |
./local
./.nvm
./node_modules
key: python-${{ matrix.os }}-${{ matrix.python-version }}-${{ hashFiles('requirements*.txt', 'setup.py') }}
- name: Install Python dependencies
if: steps.cache-python.outputs.cache-hit != 'true'
run: |
make V=1 install
make V=1 gecko.driver
- name: Run tests
run: make V=1 ci.test
themes:
name: Themes
runs-on: ubuntu-20.04
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install Ubuntu packages
run: sudo ./utils/searxng.sh install buildhost
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
architecture: 'x64'
- name: Cache Python dependencies
id: cache-python
uses: actions/cache@v3
with:
path: |
./local
./.nvm
./node_modules
key: python-ubuntu-20.04-3.12-${{ hashFiles('requirements*.txt', 'setup.py','.nvmrc', 'package.json') }}
- name: Install node dependencies
run: make V=1 node.env
- name: Build themes
run: make V=1 themes.all
- name: Checkout
uses: actions/checkout@v4
- name: Install Ubuntu packages
run: sudo ./utils/searxng.sh install buildhost
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
architecture: 'x64'
- name: Cache Python dependencies
id: cache-python
uses: actions/cache@v3
with:
path: |
./local
./.nvm
./node_modules
key: python-ubuntu-20.04-3.12-${{ hashFiles('requirements*.txt', 'setup.py','.nvmrc', 'package.json') }}
- name: Install node dependencies
run: make V=1 node.env
- name: Build themes
run: make V=1 themes.all
documentation:
name: Documentation
@ -79,40 +79,40 @@ jobs:
permissions:
contents: write # for JamesIves/github-pages-deploy-action to push changes in repo
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: '0'
persist-credentials: false
- name: Install Ubuntu packages
run: sudo ./utils/searxng.sh install buildhost
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
architecture: 'x64'
- name: Cache Python dependencies
id: cache-python
uses: actions/cache@v3
with:
path: |
./local
./.nvm
./node_modules
key: python-ubuntu-20.04-3.12-${{ hashFiles('requirements*.txt', 'setup.py','.nvmrc', 'package.json') }}
- name: Build documentation
run: |
make V=1 docs.clean docs.html
- name: Deploy
if: github.ref == 'refs/heads/master'
uses: JamesIves/github-pages-deploy-action@3.7.1
with:
GITHUB_TOKEN: ${{ github.token }}
BRANCH: gh-pages
FOLDER: dist/docs
CLEAN: true # Automatically remove deleted files from the deploy branch
SINGLE_COMMIT: True
COMMIT_MESSAGE: '[doc] build from commit ${{ github.sha }}'
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: '0'
persist-credentials: false
- name: Install Ubuntu packages
run: sudo ./utils/searxng.sh install buildhost
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
architecture: 'x64'
- name: Cache Python dependencies
id: cache-python
uses: actions/cache@v3
with:
path: |
./local
./.nvm
./node_modules
key: python-ubuntu-20.04-3.12-${{ hashFiles('requirements*.txt', 'setup.py','.nvmrc', 'package.json') }}
- name: Build documentation
run: |
make V=1 docs.clean docs.html
- name: Deploy
if: github.ref == 'refs/heads/master'
uses: JamesIves/github-pages-deploy-action@3.7.1
with:
GITHUB_TOKEN: ${{ github.token }}
BRANCH: gh-pages
FOLDER: dist/docs
CLEAN: true # Automatically remove deleted files from the deploy branch
SINGLE_COMMIT: true
COMMIT_MESSAGE: '[doc] build from commit ${{ github.sha }}'
babel:
name: Update translations branch
@ -125,37 +125,37 @@ jobs:
permissions:
contents: write # for make V=1 weblate.push.translations
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: '0'
token: ${{ secrets.WEBLATE_GITHUB_TOKEN }}
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
architecture: 'x64'
- name: Cache Python dependencies
id: cache-python
uses: actions/cache@v3
with:
path: |
./local
./.nvm
./node_modules
key: python-ubuntu-20.04-3.12-${{ hashFiles('requirements*.txt', 'setup.py','.nvmrc', 'package.json') }}
- name: weblate & git setup
env:
WEBLATE_CONFIG: ${{ secrets.WEBLATE_CONFIG }}
run: |
mkdir -p ~/.config
echo "${WEBLATE_CONFIG}" > ~/.config/weblate
git config --global user.email "searxng-bot@users.noreply.github.com"
git config --global user.name "searxng-bot"
- name: Update transations
id: update
run: |
make V=1 weblate.push.translations
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: '0'
token: ${{ secrets.WEBLATE_GITHUB_TOKEN }}
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
architecture: 'x64'
- name: Cache Python dependencies
id: cache-python
uses: actions/cache@v3
with:
path: |
./local
./.nvm
./node_modules
key: python-ubuntu-20.04-3.12-${{ hashFiles('requirements*.txt', 'setup.py','.nvmrc', 'package.json') }}
- name: weblate & git setup
env:
WEBLATE_CONFIG: ${{ secrets.WEBLATE_CONFIG }}
run: |
mkdir -p ~/.config
echo "${WEBLATE_CONFIG}" > ~/.config/weblate
git config --global user.email "searxng-bot@users.noreply.github.com"
git config --global user.name "searxng-bot"
- name: Update transations
id: update
run: |
make V=1 weblate.push.translations
dockers:
name: Docker

View File

@ -1,5 +1,5 @@
name: "Security checks"
on:
on: # yamllint disable-line rule:truthy
schedule:
- cron: "42 05 * * *"
workflow_dispatch:

View File

@ -1,5 +1,5 @@
name: "Update translations"
on:
on: # yamllint disable-line rule:truthy
schedule:
- cron: "05 07 * * 5"
workflow_dispatch:
@ -10,50 +10,50 @@ jobs:
runs-on: ubuntu-20.04
if: ${{ github.repository_owner == 'searxng' && github.ref == 'refs/heads/master' }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: '0'
token: ${{ secrets.WEBLATE_GITHUB_TOKEN }}
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
architecture: 'x64'
- name: Cache Python dependencies
id: cache-python
uses: actions/cache@v3
with:
path: |
./local
./.nvm
./node_modules
key: python-ubuntu-20.04-3.12-${{ hashFiles('requirements*.txt', 'setup.py','.nvmrc', 'package.json') }}
- name: weblate & git setup
env:
WEBLATE_CONFIG: ${{ secrets.WEBLATE_CONFIG }}
run: |
mkdir -p ~/.config
echo "${WEBLATE_CONFIG}" > ~/.config/weblate
git config --global user.email "searxng-bot@users.noreply.github.com"
git config --global user.name "searxng-bot"
- name: Merge and push transation updates
run: |
make V=1 weblate.translations.commit
- name: Create Pull Request
id: cpr
uses: peter-evans/create-pull-request@v3
with:
token: ${{ secrets.WEBLATE_GITHUB_TOKEN }}
commit-message: '[l10n] update translations from Weblate'
committer: searxng-bot <searxng-bot@users.noreply.github.com>
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
signoff: false
branch: translations_update
delete-branch: true
draft: false
title: '[l10n] update translations from Weblate'
body: |
update translations from Weblate
labels: |
translation
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: '0'
token: ${{ secrets.WEBLATE_GITHUB_TOKEN }}
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
architecture: 'x64'
- name: Cache Python dependencies
id: cache-python
uses: actions/cache@v3
with:
path: |
./local
./.nvm
./node_modules
key: python-ubuntu-20.04-3.12-${{ hashFiles('requirements*.txt', 'setup.py','.nvmrc', 'package.json') }}
- name: weblate & git setup
env:
WEBLATE_CONFIG: ${{ secrets.WEBLATE_CONFIG }}
run: |
mkdir -p ~/.config
echo "${WEBLATE_CONFIG}" > ~/.config/weblate
git config --global user.email "searxng-bot@users.noreply.github.com"
git config --global user.name "searxng-bot"
- name: Merge and push transation updates
run: |
make V=1 weblate.translations.commit
- name: Create Pull Request
id: cpr
uses: peter-evans/create-pull-request@v3
with:
token: ${{ secrets.WEBLATE_GITHUB_TOKEN }}
commit-message: '[l10n] update translations from Weblate'
committer: searxng-bot <searxng-bot@users.noreply.github.com>
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
signoff: false
branch: translations_update
delete-branch: true
draft: false
title: '[l10n] update translations from Weblate'
body: |
update translations from Weblate
labels: |
translation

2
manage
View File

@ -57,7 +57,7 @@ while IFS= read -r line; do
if [ "$line" != "tests/unit/settings/syntaxerror_settings.yml" ]; then
YAMLLINT_FILES+=("$line")
fi
done <<< "$(git ls-files './tests/*.yml' './searx/*.yml' './utils/templates/etc/searxng/*.yml')"
done <<< "$(git ls-files './tests/*.yml' './searx/*.yml' './utils/templates/etc/searxng/*.yml' '.github/*.yml' '.github/*/*.yml')"
RST_FILES=(
'README.rst'

View File

@ -4,7 +4,7 @@ cov-core==1.15.0
black==24.3.0
pylint==3.3.1
splinter==0.21.0
selenium==4.25.0
selenium==4.26.1
Pallets-Sphinx-Themes==2.3.0
Sphinx==7.4.7
sphinx-issues==5.0.0

169
searx/engines/baidu.py Normal file
View File

@ -0,0 +1,169 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
"""Bing (Web)
- https://github.com/searx/searx/issues/2019#issuecomment-648227442
"""
import re
from urllib.parse import urlencode
from lxml import html
from searx.utils import eval_xpath, extract_text, eval_xpath_list, eval_xpath_getindex
from searx.network import raise_for_httperror, multi_requests, get, Request
from searx.exceptions import SearxEngineCaptchaException
about = {
"website": 'https://www.baidu.com',
"wikidata_id": 'Q14772',
"official_api_documentation": 'https://apis.baidu.com/',
"use_official_api": False,
"require_api_key": False,
"results": 'HTML',
"language": 'zn',
}
# engine dependent config
categories = ['general', 'web']
paging = False
time_range_support = False
safesearch = False
base_url = 'https://www.baidu.com/'
search_string = 's?{query}'
skip_tpls = ('img_normal', 'short_video', 'yl_music_song', 'dict3', 'recommend_list')
desc_xpath_per_tpl = {
'se_com_default': './/span[contains(@class, "content-right_8Zs40")]',
'kaifa_pc_open_source_software': './/p[contains(@class, "c-color-text")]',
'bk_polysemy': './/div/@aria-label',
'se_st_single_video_zhanzhang': './/span[contains(@class, "c-span-last")]//p[2]',
}
def get_initial_parameters(params):
resp_index = get(base_url, headers=params['headers'], raise_for_httperror=True)
dom = html.fromstring(resp_index.text)
query_params = {}
for ielement in eval_xpath_list(dom, '//form[@id="form"]//input[@name]'):
name = ielement.attrib.get('name')
value = ielement.attrib.get('value')
query_params[name] = value
return query_params, resp_index.cookies
def request(query, params):
params['headers'].update(
{
'Accept-Language': 'en-US,en;q=0.5',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Sec-Fetch-Dest': 'document',
'Sec-Fetch-Mode': 'navigate',
'Sec-Fetch-Site': 'same-origin',
'Sec-Fetch-User': '?1',
'Sec-GPC': '1',
'Upgrade-Insecure-Requests': '1',
'TE': 'trailers',
}
)
query_params, cookies = get_initial_parameters(params)
query_params['wd'] = query
params['url'] = base_url + search_string.format(query=urlencode(query_params))
params['cookies'] = cookies
params['raise_for_httperror'] = False
return params
def response(resp):
results = []
if resp.url.host == 'wappass.baidu.com' or resp.url.path.startswith('/static/captcha'):
raise SearxEngineCaptchaException()
raise_for_httperror(resp)
dom = html.fromstring(resp.text)
# follow redirect but don't use the result page to reduce the CAPTCHA issue
redirect_element = eval_xpath_getindex(dom, '//noscript/meta[@http-equiv="refresh"]/@content', 0, default=None)
if redirect_element and redirect_element.startswith('0; url='):
get(
base_url + redirect_element[8:],
headers=resp.search_params['headers'],
cookies=resp.search_params['cookies'],
)
for result in eval_xpath_list(dom, '//div[contains(@id,"content_left")]/div[contains(@class, "c-container")]'):
tpl = result.attrib.get('tpl')
if tpl in skip_tpls:
continue
if tpl == 'kaifa_pc_blog_weak':
# skip the result to kaifa.baidu.com (search engine for IT)
# but includes results from kaifa
for r2 in eval_xpath_list(result, './/div[contains(@class, "c-gap-bottom-small")]'):
title = extract_text(eval_xpath(r2, './/div[@class="c-row"]//a'))
url = extract_text(eval_xpath(r2, './/div[@class="c-row"]//a/@href'))
content = extract_text(eval_xpath(r2, '//span[@class="c-line-clamp2"]'))
results.append(
{
'url': url,
'title': title,
'content': content,
}
)
continue
# normal results
title = extract_text(eval_xpath(result, './/h3/a'))
url = extract_text(eval_xpath(result, './/h3/a/@href'))
if not title or not url:
continue
content = None
if tpl in desc_xpath_per_tpl:
# try the XPath for the Baidu template
content = extract_text(eval_xpath(result, desc_xpath_per_tpl[tpl]))
if not content:
# no content was found: try all the XPath from the Baidu templates
for xp in desc_xpath_per_tpl.values():
content = extract_text(eval_xpath(result, xp))
if content:
break
results.append(
{
'url': url,
'title': title,
'content': content,
}
)
# resolve the Baidu redirections
# note: Baidu does not support HTTP/2
request_list = [
Request.get(
u['url'].replace('http://www.baidu.com/link?url=', 'https://www.baidu.com/link?url='),
allow_redirects=False,
headers=resp.search_params['headers'],
)
for u in results
]
response_list = multi_requests(request_list)
for i, redirect_response in enumerate(response_list):
if not isinstance(redirect_response, Exception):
results[i]['url'] = redirect_response.headers['location']
return results
def debug_write_content_to_file(text):
RE_STYLE_ELEMENT = re.compile(r'<style[^>]*>[^<]+</style>')
RE_SCRIPT_ELEMENT = re.compile(r'<script[^>]*>[^<]+</script>')
RE_COMMENT_ELEMENT = re.compile(r'\<\!\-\-[^-]+\-\-\>')
with open('baidu.html', 'wt', encoding='utf-8') as f:
text = RE_STYLE_ELEMENT.sub("", text)
text = RE_SCRIPT_ELEMENT.sub("", text)
text = RE_COMMENT_ELEMENT.sub("", text)
text = "\n".join([ll.rstrip() for ll in text.splitlines() if ll.strip()])
f.write(text)

View File

@ -425,6 +425,12 @@ engines:
shortcut: bi
disabled: true
- name: baidu
engine: baidu
shortcut: ba
timeout: 15
disabled: true
- name: bing images
engine: bing_images
shortcut: bii