mirror of https://github.com/searxng/searxng.git
Compare commits
5 Commits
7b772cfca7
...
3085474818
Author | SHA1 | Date |
---|---|---|
Alexandre Flament | 3085474818 | |
dependabot[bot] | cd384a8a60 | |
Markus Heiser | c4055e449f | |
Markus Heiser | 2fdbf2622b | |
Alexandre Flament | 4398ce059f |
|
@ -1,5 +1,5 @@
|
||||||
name: "Checker"
|
name: "Checker"
|
||||||
on:
|
on: # yamllint disable-line rule:truthy
|
||||||
schedule:
|
schedule:
|
||||||
- cron: "0 4 * * 5"
|
- cron: "0 4 * * 5"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
name: "Update searx.data"
|
name: "Update searx.data"
|
||||||
on:
|
on: # yamllint disable-line rule:truthy
|
||||||
schedule:
|
schedule:
|
||||||
- cron: "59 23 28 * *"
|
- cron: "59 23 28 * *"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
name: Integration
|
name: Integration
|
||||||
|
|
||||||
on:
|
on: # yamllint disable-line rule:truthy
|
||||||
push:
|
push:
|
||||||
branches: ["master"]
|
branches: ["master"]
|
||||||
pull_request:
|
pull_request:
|
||||||
|
@ -16,7 +16,7 @@ jobs:
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-20.04]
|
os: [ubuntu-20.04]
|
||||||
python-version: ["3.9", "3.10", "3.11", "3.12",]
|
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
@ -111,7 +111,7 @@ jobs:
|
||||||
BRANCH: gh-pages
|
BRANCH: gh-pages
|
||||||
FOLDER: dist/docs
|
FOLDER: dist/docs
|
||||||
CLEAN: true # Automatically remove deleted files from the deploy branch
|
CLEAN: true # Automatically remove deleted files from the deploy branch
|
||||||
SINGLE_COMMIT: True
|
SINGLE_COMMIT: true
|
||||||
COMMIT_MESSAGE: '[doc] build from commit ${{ github.sha }}'
|
COMMIT_MESSAGE: '[doc] build from commit ${{ github.sha }}'
|
||||||
|
|
||||||
babel:
|
babel:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
name: "Security checks"
|
name: "Security checks"
|
||||||
on:
|
on: # yamllint disable-line rule:truthy
|
||||||
schedule:
|
schedule:
|
||||||
- cron: "42 05 * * *"
|
- cron: "42 05 * * *"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
name: "Update translations"
|
name: "Update translations"
|
||||||
on:
|
on: # yamllint disable-line rule:truthy
|
||||||
schedule:
|
schedule:
|
||||||
- cron: "05 07 * * 5"
|
- cron: "05 07 * * 5"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
2
manage
2
manage
|
@ -57,7 +57,7 @@ while IFS= read -r line; do
|
||||||
if [ "$line" != "tests/unit/settings/syntaxerror_settings.yml" ]; then
|
if [ "$line" != "tests/unit/settings/syntaxerror_settings.yml" ]; then
|
||||||
YAMLLINT_FILES+=("$line")
|
YAMLLINT_FILES+=("$line")
|
||||||
fi
|
fi
|
||||||
done <<< "$(git ls-files './tests/*.yml' './searx/*.yml' './utils/templates/etc/searxng/*.yml')"
|
done <<< "$(git ls-files './tests/*.yml' './searx/*.yml' './utils/templates/etc/searxng/*.yml' '.github/*.yml' '.github/*/*.yml')"
|
||||||
|
|
||||||
RST_FILES=(
|
RST_FILES=(
|
||||||
'README.rst'
|
'README.rst'
|
||||||
|
|
|
@ -4,7 +4,7 @@ cov-core==1.15.0
|
||||||
black==24.3.0
|
black==24.3.0
|
||||||
pylint==3.3.1
|
pylint==3.3.1
|
||||||
splinter==0.21.0
|
splinter==0.21.0
|
||||||
selenium==4.25.0
|
selenium==4.26.1
|
||||||
Pallets-Sphinx-Themes==2.3.0
|
Pallets-Sphinx-Themes==2.3.0
|
||||||
Sphinx==7.4.7
|
Sphinx==7.4.7
|
||||||
sphinx-issues==5.0.0
|
sphinx-issues==5.0.0
|
||||||
|
|
|
@ -0,0 +1,169 @@
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
"""Bing (Web)
|
||||||
|
|
||||||
|
- https://github.com/searx/searx/issues/2019#issuecomment-648227442
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
from lxml import html
|
||||||
|
from searx.utils import eval_xpath, extract_text, eval_xpath_list, eval_xpath_getindex
|
||||||
|
from searx.network import raise_for_httperror, multi_requests, get, Request
|
||||||
|
from searx.exceptions import SearxEngineCaptchaException
|
||||||
|
|
||||||
|
about = {
|
||||||
|
"website": 'https://www.baidu.com',
|
||||||
|
"wikidata_id": 'Q14772',
|
||||||
|
"official_api_documentation": 'https://apis.baidu.com/',
|
||||||
|
"use_official_api": False,
|
||||||
|
"require_api_key": False,
|
||||||
|
"results": 'HTML',
|
||||||
|
"language": 'zn',
|
||||||
|
}
|
||||||
|
|
||||||
|
# engine dependent config
|
||||||
|
categories = ['general', 'web']
|
||||||
|
paging = False
|
||||||
|
time_range_support = False
|
||||||
|
safesearch = False
|
||||||
|
|
||||||
|
base_url = 'https://www.baidu.com/'
|
||||||
|
search_string = 's?{query}'
|
||||||
|
|
||||||
|
skip_tpls = ('img_normal', 'short_video', 'yl_music_song', 'dict3', 'recommend_list')
|
||||||
|
|
||||||
|
desc_xpath_per_tpl = {
|
||||||
|
'se_com_default': './/span[contains(@class, "content-right_8Zs40")]',
|
||||||
|
'kaifa_pc_open_source_software': './/p[contains(@class, "c-color-text")]',
|
||||||
|
'bk_polysemy': './/div/@aria-label',
|
||||||
|
'se_st_single_video_zhanzhang': './/span[contains(@class, "c-span-last")]//p[2]',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_initial_parameters(params):
|
||||||
|
resp_index = get(base_url, headers=params['headers'], raise_for_httperror=True)
|
||||||
|
dom = html.fromstring(resp_index.text)
|
||||||
|
query_params = {}
|
||||||
|
for ielement in eval_xpath_list(dom, '//form[@id="form"]//input[@name]'):
|
||||||
|
name = ielement.attrib.get('name')
|
||||||
|
value = ielement.attrib.get('value')
|
||||||
|
query_params[name] = value
|
||||||
|
return query_params, resp_index.cookies
|
||||||
|
|
||||||
|
|
||||||
|
def request(query, params):
|
||||||
|
params['headers'].update(
|
||||||
|
{
|
||||||
|
'Accept-Language': 'en-US,en;q=0.5',
|
||||||
|
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
|
||||||
|
'Sec-Fetch-Dest': 'document',
|
||||||
|
'Sec-Fetch-Mode': 'navigate',
|
||||||
|
'Sec-Fetch-Site': 'same-origin',
|
||||||
|
'Sec-Fetch-User': '?1',
|
||||||
|
'Sec-GPC': '1',
|
||||||
|
'Upgrade-Insecure-Requests': '1',
|
||||||
|
'TE': 'trailers',
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
query_params, cookies = get_initial_parameters(params)
|
||||||
|
query_params['wd'] = query
|
||||||
|
|
||||||
|
params['url'] = base_url + search_string.format(query=urlencode(query_params))
|
||||||
|
params['cookies'] = cookies
|
||||||
|
params['raise_for_httperror'] = False
|
||||||
|
return params
|
||||||
|
|
||||||
|
|
||||||
|
def response(resp):
|
||||||
|
results = []
|
||||||
|
|
||||||
|
if resp.url.host == 'wappass.baidu.com' or resp.url.path.startswith('/static/captcha'):
|
||||||
|
raise SearxEngineCaptchaException()
|
||||||
|
raise_for_httperror(resp)
|
||||||
|
|
||||||
|
dom = html.fromstring(resp.text)
|
||||||
|
|
||||||
|
# follow redirect but don't use the result page to reduce the CAPTCHA issue
|
||||||
|
redirect_element = eval_xpath_getindex(dom, '//noscript/meta[@http-equiv="refresh"]/@content', 0, default=None)
|
||||||
|
if redirect_element and redirect_element.startswith('0; url='):
|
||||||
|
get(
|
||||||
|
base_url + redirect_element[8:],
|
||||||
|
headers=resp.search_params['headers'],
|
||||||
|
cookies=resp.search_params['cookies'],
|
||||||
|
)
|
||||||
|
|
||||||
|
for result in eval_xpath_list(dom, '//div[contains(@id,"content_left")]/div[contains(@class, "c-container")]'):
|
||||||
|
tpl = result.attrib.get('tpl')
|
||||||
|
if tpl in skip_tpls:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if tpl == 'kaifa_pc_blog_weak':
|
||||||
|
# skip the result to kaifa.baidu.com (search engine for IT)
|
||||||
|
# but includes results from kaifa
|
||||||
|
for r2 in eval_xpath_list(result, './/div[contains(@class, "c-gap-bottom-small")]'):
|
||||||
|
title = extract_text(eval_xpath(r2, './/div[@class="c-row"]//a'))
|
||||||
|
url = extract_text(eval_xpath(r2, './/div[@class="c-row"]//a/@href'))
|
||||||
|
content = extract_text(eval_xpath(r2, '//span[@class="c-line-clamp2"]'))
|
||||||
|
results.append(
|
||||||
|
{
|
||||||
|
'url': url,
|
||||||
|
'title': title,
|
||||||
|
'content': content,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# normal results
|
||||||
|
title = extract_text(eval_xpath(result, './/h3/a'))
|
||||||
|
url = extract_text(eval_xpath(result, './/h3/a/@href'))
|
||||||
|
|
||||||
|
if not title or not url:
|
||||||
|
continue
|
||||||
|
|
||||||
|
content = None
|
||||||
|
if tpl in desc_xpath_per_tpl:
|
||||||
|
# try the XPath for the Baidu template
|
||||||
|
content = extract_text(eval_xpath(result, desc_xpath_per_tpl[tpl]))
|
||||||
|
if not content:
|
||||||
|
# no content was found: try all the XPath from the Baidu templates
|
||||||
|
for xp in desc_xpath_per_tpl.values():
|
||||||
|
content = extract_text(eval_xpath(result, xp))
|
||||||
|
if content:
|
||||||
|
break
|
||||||
|
results.append(
|
||||||
|
{
|
||||||
|
'url': url,
|
||||||
|
'title': title,
|
||||||
|
'content': content,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# resolve the Baidu redirections
|
||||||
|
# note: Baidu does not support HTTP/2
|
||||||
|
request_list = [
|
||||||
|
Request.get(
|
||||||
|
u['url'].replace('http://www.baidu.com/link?url=', 'https://www.baidu.com/link?url='),
|
||||||
|
allow_redirects=False,
|
||||||
|
headers=resp.search_params['headers'],
|
||||||
|
)
|
||||||
|
for u in results
|
||||||
|
]
|
||||||
|
response_list = multi_requests(request_list)
|
||||||
|
for i, redirect_response in enumerate(response_list):
|
||||||
|
if not isinstance(redirect_response, Exception):
|
||||||
|
results[i]['url'] = redirect_response.headers['location']
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def debug_write_content_to_file(text):
|
||||||
|
RE_STYLE_ELEMENT = re.compile(r'<style[^>]*>[^<]+</style>')
|
||||||
|
RE_SCRIPT_ELEMENT = re.compile(r'<script[^>]*>[^<]+</script>')
|
||||||
|
RE_COMMENT_ELEMENT = re.compile(r'\<\!\-\-[^-]+\-\-\>')
|
||||||
|
with open('baidu.html', 'wt', encoding='utf-8') as f:
|
||||||
|
text = RE_STYLE_ELEMENT.sub("", text)
|
||||||
|
text = RE_SCRIPT_ELEMENT.sub("", text)
|
||||||
|
text = RE_COMMENT_ELEMENT.sub("", text)
|
||||||
|
text = "\n".join([ll.rstrip() for ll in text.splitlines() if ll.strip()])
|
||||||
|
f.write(text)
|
|
@ -425,6 +425,12 @@ engines:
|
||||||
shortcut: bi
|
shortcut: bi
|
||||||
disabled: true
|
disabled: true
|
||||||
|
|
||||||
|
- name: baidu
|
||||||
|
engine: baidu
|
||||||
|
shortcut: ba
|
||||||
|
timeout: 15
|
||||||
|
disabled: true
|
||||||
|
|
||||||
- name: bing images
|
- name: bing images
|
||||||
engine: bing_images
|
engine: bing_images
|
||||||
shortcut: bii
|
shortcut: bii
|
||||||
|
|
Loading…
Reference in New Issue