mirror of
https://github.com/searxng/searxng.git
synced 2025-12-22 19:50:00 +00:00
[mod] Semantic Scholar engine: revision of the engine (Paper result)
Revision of the engine / use of the result type Paper as well as other typifications. Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
This commit is contained in:
committed by
Markus Heiser
parent
bb22bb1831
commit
4b4bf0ecaf
8
docs/dev/engines/online/semantic_scholar.rst
Normal file
8
docs/dev/engines/online/semantic_scholar.rst
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
.. _semantic_scholar engine:
|
||||||
|
|
||||||
|
================
|
||||||
|
Semantic Scholar
|
||||||
|
================
|
||||||
|
|
||||||
|
.. automodule:: searx.engines.semantic_scholar
|
||||||
|
:members:
|
||||||
@@ -1,125 +1,163 @@
|
|||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
"""Semantic Scholar (Science)"""
|
"""`Semantic Scholar`_ provides free, AI-driven search and discovery tools, and
|
||||||
|
open resources for the global research community. `Semantic Scholar`_ index
|
||||||
|
over 200 million academic papers sourced from publisher partnerships, data
|
||||||
|
providers, and web crawls.
|
||||||
|
|
||||||
|
.. _Semantic Scholar: https://www.semanticscholar.org/about
|
||||||
|
|
||||||
|
Configuration
|
||||||
|
=============
|
||||||
|
|
||||||
|
To get in use of this engine add the following entry to your engines list in
|
||||||
|
``settings.yml``:
|
||||||
|
|
||||||
|
.. code:: yaml
|
||||||
|
|
||||||
|
- name: semantic scholar
|
||||||
|
engine: semantic_scholar
|
||||||
|
shortcut: se
|
||||||
|
|
||||||
|
Implementations
|
||||||
|
===============
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import typing as t
|
||||||
|
|
||||||
from json import dumps
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from lxml import html
|
from lxml import html
|
||||||
|
from flask_babel import gettext # pyright: ignore[reportUnknownVariableType]
|
||||||
|
|
||||||
from flask_babel import gettext
|
|
||||||
from searx.network import get
|
from searx.network import get
|
||||||
from searx.utils import eval_xpath_getindex, html_to_text
|
from searx.utils import eval_xpath_getindex, html_to_text
|
||||||
|
from searx.enginelib import EngineCache
|
||||||
|
from searx.result_types import EngineResults
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from searx.extended_types import SXNG_Response
|
||||||
|
from searx.search.processors import OnlineParams
|
||||||
|
|
||||||
about = {
|
about = {
|
||||||
"website": 'https://www.semanticscholar.org/',
|
"website": "https://www.semanticscholar.org/",
|
||||||
"wikidata_id": 'Q22908627',
|
"wikidata_id": "Q22908627",
|
||||||
"official_api_documentation": 'https://api.semanticscholar.org/',
|
"official_api_documentation": "https://api.semanticscholar.org/",
|
||||||
"use_official_api": True,
|
"use_official_api": True,
|
||||||
"require_api_key": False,
|
"require_api_key": False,
|
||||||
"results": 'JSON',
|
"results": "JSON",
|
||||||
}
|
}
|
||||||
|
|
||||||
categories = ['science', 'scientific publications']
|
categories = ["science", "scientific publications"]
|
||||||
paging = True
|
paging = True
|
||||||
search_url = 'https://www.semanticscholar.org/api/1/search'
|
search_url = "https://www.semanticscholar.org/api/1/search"
|
||||||
base_url = 'https://www.semanticscholar.org'
|
base_url = "https://www.semanticscholar.org"
|
||||||
|
|
||||||
|
CACHE: EngineCache
|
||||||
|
"""Persistent (SQLite) key/value cache that deletes its values after ``expire``
|
||||||
|
seconds."""
|
||||||
|
|
||||||
|
|
||||||
def _get_ui_version():
|
def setup(engine_settings: dict[str, t.Any]) -> bool:
|
||||||
resp = get(base_url)
|
global CACHE # pylint: disable=global-statement
|
||||||
if not resp.ok:
|
CACHE = EngineCache(engine_settings["name"])
|
||||||
raise RuntimeError("Can't determine Semantic Scholar UI version")
|
return True
|
||||||
|
|
||||||
doc = html.fromstring(resp.text)
|
|
||||||
ui_version = eval_xpath_getindex(doc, "//meta[@name='s2-ui-version']/@content", 0)
|
|
||||||
if not ui_version:
|
|
||||||
raise RuntimeError("Can't determine Semantic Scholar UI version")
|
|
||||||
|
|
||||||
return ui_version
|
|
||||||
|
|
||||||
|
|
||||||
def request(query, params):
|
def get_ui_version() -> str:
|
||||||
params['url'] = search_url
|
ret_val: str = CACHE.get("X-S2-UI-Version")
|
||||||
params['method'] = 'POST'
|
if not ret_val:
|
||||||
params['headers'].update(
|
resp = get(base_url)
|
||||||
|
if not resp.ok:
|
||||||
|
raise RuntimeError("Can't determine Semantic Scholar UI version")
|
||||||
|
|
||||||
|
doc = html.fromstring(resp.text)
|
||||||
|
ret_val = eval_xpath_getindex(doc, "//meta[@name='s2-ui-version']/@content", 0)
|
||||||
|
if not ret_val:
|
||||||
|
raise RuntimeError("Can't determine Semantic Scholar UI version")
|
||||||
|
# hold the cached value for 5min
|
||||||
|
CACHE.set("X-S2-UI-Version", value=ret_val, expire=300)
|
||||||
|
logger.debug("X-S2-UI-Version: %s", ret_val)
|
||||||
|
return ret_val
|
||||||
|
|
||||||
|
|
||||||
|
def request(query: str, params: "OnlineParams") -> None:
|
||||||
|
params["url"] = search_url
|
||||||
|
params["method"] = "POST"
|
||||||
|
params["headers"].update(
|
||||||
{
|
{
|
||||||
'Content-Type': 'application/json',
|
"Content-Type": "application/json",
|
||||||
'X-S2-UI-Version': _get_ui_version(),
|
"X-S2-UI-Version": get_ui_version(),
|
||||||
'X-S2-Client': "webapp-browser",
|
"X-S2-Client": "webapp-browser",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
params['data'] = dumps(
|
params["json"] = {
|
||||||
{
|
"queryString": query,
|
||||||
"queryString": query,
|
"page": params["pageno"],
|
||||||
"page": params['pageno'],
|
"pageSize": 10,
|
||||||
"pageSize": 10,
|
"sort": "relevance",
|
||||||
"sort": "relevance",
|
"getQuerySuggestions": False,
|
||||||
"getQuerySuggestions": False,
|
"authors": [],
|
||||||
"authors": [],
|
"coAuthors": [],
|
||||||
"coAuthors": [],
|
"venues": [],
|
||||||
"venues": [],
|
"performTitleMatch": True,
|
||||||
"performTitleMatch": True,
|
}
|
||||||
}
|
|
||||||
)
|
|
||||||
return params
|
|
||||||
|
|
||||||
|
|
||||||
def response(resp):
|
def response(resp: "SXNG_Response") -> EngineResults:
|
||||||
res = resp.json()
|
res = EngineResults()
|
||||||
|
json_data = resp.json()
|
||||||
|
|
||||||
results = []
|
for result in json_data["results"]:
|
||||||
for result in res['results']:
|
url: str = result.get("primaryPaperLink", {}).get("url")
|
||||||
url = result.get('primaryPaperLink', {}).get('url')
|
if not url and result.get("links"):
|
||||||
if not url and result.get('links'):
|
url = result.get("links")[0]
|
||||||
url = result.get('links')[0]
|
|
||||||
if not url:
|
if not url:
|
||||||
alternatePaperLinks = result.get('alternatePaperLinks')
|
alternatePaperLinks = result.get("alternatePaperLinks")
|
||||||
if alternatePaperLinks:
|
if alternatePaperLinks:
|
||||||
url = alternatePaperLinks[0].get('url')
|
url = alternatePaperLinks[0].get("url")
|
||||||
if not url:
|
if not url:
|
||||||
url = base_url + '/paper/%s' % result['id']
|
url = base_url + "/paper/%s" % result["id"]
|
||||||
|
|
||||||
# publishedDate
|
publishedDate: datetime | None
|
||||||
if 'pubDate' in result:
|
if "pubDate" in result:
|
||||||
publishedDate = datetime.strptime(result['pubDate'], "%Y-%m-%d")
|
publishedDate = datetime.strptime(result["pubDate"], "%Y-%m-%d")
|
||||||
else:
|
else:
|
||||||
publishedDate = None
|
publishedDate = None
|
||||||
|
|
||||||
# authors
|
# authors
|
||||||
authors = [author[0]['name'] for author in result.get('authors', [])]
|
authors: list[str] = [author[0]["name"] for author in result.get("authors", [])]
|
||||||
|
|
||||||
# pick for the first alternate link, but not from the crawler
|
# pick for the first alternate link, but not from the crawler
|
||||||
pdf_url = None
|
pdf_url: str = ""
|
||||||
for doc in result.get('alternatePaperLinks', []):
|
for doc in result.get("alternatePaperLinks", []):
|
||||||
if doc['linkType'] not in ('crawler', 'doi'):
|
if doc["linkType"] not in ("crawler", "doi"):
|
||||||
pdf_url = doc['url']
|
pdf_url = doc["url"]
|
||||||
break
|
break
|
||||||
|
|
||||||
# comments
|
# comments
|
||||||
comments = None
|
comments: str = ""
|
||||||
if 'citationStats' in result:
|
if "citationStats" in result:
|
||||||
comments = gettext(
|
comments = gettext(
|
||||||
'{numCitations} citations from the year {firstCitationVelocityYear} to {lastCitationVelocityYear}'
|
"{numCitations} citations from the year {firstCitationVelocityYear} to {lastCitationVelocityYear}"
|
||||||
).format(
|
).format(
|
||||||
numCitations=result['citationStats']['numCitations'],
|
numCitations=result["citationStats"]["numCitations"],
|
||||||
firstCitationVelocityYear=result['citationStats']['firstCitationVelocityYear'],
|
firstCitationVelocityYear=result["citationStats"]["firstCitationVelocityYear"],
|
||||||
lastCitationVelocityYear=result['citationStats']['lastCitationVelocityYear'],
|
lastCitationVelocityYear=result["citationStats"]["lastCitationVelocityYear"],
|
||||||
)
|
)
|
||||||
|
|
||||||
results.append(
|
res.add(
|
||||||
{
|
res.types.Paper(
|
||||||
'template': 'paper.html',
|
title=result["title"]["text"],
|
||||||
'url': url,
|
url=url,
|
||||||
'title': result['title']['text'],
|
content=html_to_text(result["paperAbstract"]["text"]),
|
||||||
'content': html_to_text(result['paperAbstract']['text']),
|
journal=result.get("venue", {}).get("text") or result.get("journal", {}).get("name"),
|
||||||
'journal': result.get('venue', {}).get('text') or result.get('journal', {}).get('name'),
|
doi=result.get("doiInfo", {}).get("doi"),
|
||||||
'doi': result.get('doiInfo', {}).get('doi'),
|
tags=result.get("fieldsOfStudy"),
|
||||||
'tags': result.get('fieldsOfStudy'),
|
authors=authors,
|
||||||
'authors': authors,
|
pdf_url=pdf_url,
|
||||||
'pdf_url': pdf_url,
|
publishedDate=publishedDate,
|
||||||
'publishedDate': publishedDate,
|
comments=comments,
|
||||||
'comments': comments,
|
)
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return results
|
return res
|
||||||
|
|||||||
@@ -1964,7 +1964,6 @@ engines:
|
|||||||
|
|
||||||
- name: semantic scholar
|
- name: semantic scholar
|
||||||
engine: semantic_scholar
|
engine: semantic_scholar
|
||||||
disabled: true
|
|
||||||
shortcut: se
|
shortcut: se
|
||||||
|
|
||||||
# Spotify needs API credentials
|
# Spotify needs API credentials
|
||||||
|
|||||||
Reference in New Issue
Block a user