Compare commits

..

2 Commits

Author SHA1 Message Date
dependabot[bot] b418f489cc
Merge 3454346f99 into cc148a76b0 2024-11-01 08:30:54 +01:00
dependabot[bot] 3454346f99
[upd] pypi: Bump selenium from 4.25.0 to 4.26.1
Bumps [selenium](https://github.com/SeleniumHQ/Selenium) from 4.25.0 to 4.26.1.
- [Release notes](https://github.com/SeleniumHQ/Selenium/releases)
- [Commits](https://github.com/SeleniumHQ/Selenium/commits)

---
updated-dependencies:
- dependency-name: selenium
  dependency-type: direct:development
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-11-01 07:28:33 +00:00
2 changed files with 15 additions and 16 deletions

View File

@ -45,6 +45,14 @@ jobs:
make V=1 gecko.driver make V=1 gecko.driver
- name: Run tests - name: Run tests
run: make V=1 ci.test run: make V=1 ci.test
- name: Test coverage
run: make V=1 test.coverage
- name: Store coverage result
uses: actions/upload-artifact@v3
with:
name: coverage-${{ matrix.python-version }}
path: coverage/
retention-days: 60
themes: themes:
name: Themes name: Themes

View File

@ -34,10 +34,10 @@ Implementations
""" """
from typing import List, Dict, Any, Optional from typing import List, Dict, Any, Optional
from urllib.parse import urlencode from urllib.parse import quote
from lxml import html from lxml import html
from searx.utils import extract_text, eval_xpath, eval_xpath_getindex, eval_xpath_list from searx.utils import extract_text, eval_xpath, eval_xpath_list
from searx.enginelib.traits import EngineTraits from searx.enginelib.traits import EngineTraits
from searx.data import ENGINE_TRAITS from searx.data import ENGINE_TRAITS
@ -53,7 +53,7 @@ about: Dict[str, Any] = {
# engine dependent config # engine dependent config
categories: List[str] = ["files"] categories: List[str] = ["files"]
paging: bool = True paging: bool = False
# search-url # search-url
base_url: str = "https://annas-archive.org" base_url: str = "https://annas-archive.org"
@ -99,18 +99,9 @@ def init(engine_settings=None): # pylint: disable=unused-argument
def request(query, params: Dict[str, Any]) -> Dict[str, Any]: def request(query, params: Dict[str, Any]) -> Dict[str, Any]:
q = quote(query)
lang = traits.get_language(params["language"], traits.all_locale) # type: ignore lang = traits.get_language(params["language"], traits.all_locale) # type: ignore
args = { params["url"] = base_url + f"/search?lang={lang or ''}&content={aa_content}&ext={aa_ext}&sort={aa_sort}&q={q}"
'lang': lang,
'content': aa_content,
'ext': aa_ext,
'sort': aa_sort,
'q': query,
'page': params['pageno'],
}
# filter out None and empty values
filtered_args = dict((k, v) for k, v in args.items() if v)
params["url"] = f"{base_url}/search?{urlencode(filtered_args)}"
return params return params
@ -137,12 +128,12 @@ def response(resp) -> List[Dict[str, Optional[str]]]:
def _get_result(item): def _get_result(item):
return { return {
'template': 'paper.html', 'template': 'paper.html',
'url': base_url + extract_text(eval_xpath_getindex(item, './@href', 0)), 'url': base_url + item.xpath('./@href')[0],
'title': extract_text(eval_xpath(item, './/h3/text()[1]')), 'title': extract_text(eval_xpath(item, './/h3/text()[1]')),
'publisher': extract_text(eval_xpath(item, './/div[contains(@class, "text-sm")]')), 'publisher': extract_text(eval_xpath(item, './/div[contains(@class, "text-sm")]')),
'authors': [extract_text(eval_xpath(item, './/div[contains(@class, "italic")]'))], 'authors': [extract_text(eval_xpath(item, './/div[contains(@class, "italic")]'))],
'content': extract_text(eval_xpath(item, './/div[contains(@class, "text-xs")]')), 'content': extract_text(eval_xpath(item, './/div[contains(@class, "text-xs")]')),
'thumbnail': extract_text(eval_xpath_getindex(item, './/img/@src', 0, default=None), allow_none=True), 'thumbnail': item.xpath('.//img/@src')[0],
} }