2021-01-13 10:31:25 +00:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
2016-03-29 09:59:16 +00:00
|
|
|
"""
|
|
|
|
BASE (Scholar publications)
|
|
|
|
"""
|
|
|
|
|
2020-08-06 15:42:46 +00:00
|
|
|
from urllib.parse import urlencode
|
2016-03-29 09:59:16 +00:00
|
|
|
from lxml import etree
|
|
|
|
from datetime import datetime
|
|
|
|
import re
|
2016-11-30 17:43:03 +00:00
|
|
|
from searx.utils import searx_useragent
|
2016-03-29 09:59:16 +00:00
|
|
|
|
2021-01-13 10:31:25 +00:00
|
|
|
# about
|
|
|
|
about = {
|
|
|
|
"website": 'https://base-search.net',
|
|
|
|
"wikidata_id": 'Q448335',
|
|
|
|
"official_api_documentation": 'https://api.base-search.net/',
|
|
|
|
"use_official_api": True,
|
|
|
|
"require_api_key": False,
|
|
|
|
"results": 'XML',
|
|
|
|
}
|
2016-03-29 09:59:16 +00:00
|
|
|
|
|
|
|
categories = ['science']
|
|
|
|
|
|
|
|
base_url = 'https://api.base-search.net/cgi-bin/BaseHttpSearchInterface.fcgi'\
|
|
|
|
+ '?func=PerformSearch&{query}&boost=oa&hits={hits}&offset={offset}'
|
|
|
|
|
|
|
|
# engine dependent config
|
|
|
|
paging = True
|
|
|
|
number_of_results = 10
|
|
|
|
|
|
|
|
# shortcuts for advanced search
|
|
|
|
shorcut_dict = {
|
|
|
|
# user-friendly keywords
|
|
|
|
'format:': 'dcformat:',
|
|
|
|
'author:': 'dccreator:',
|
|
|
|
'collection:': 'dccollection:',
|
|
|
|
'hdate:': 'dchdate:',
|
|
|
|
'contributor:': 'dccontributor:',
|
|
|
|
'coverage:': 'dccoverage:',
|
|
|
|
'date:': 'dcdate:',
|
|
|
|
'abstract:': 'dcdescription:',
|
|
|
|
'urls:': 'dcidentifier:',
|
|
|
|
'language:': 'dclanguage:',
|
|
|
|
'publisher:': 'dcpublisher:',
|
|
|
|
'relation:': 'dcrelation:',
|
|
|
|
'rights:': 'dcrights:',
|
|
|
|
'source:': 'dcsource:',
|
|
|
|
'subject:': 'dcsubject:',
|
|
|
|
'title:': 'dctitle:',
|
|
|
|
'type:': 'dcdctype:'
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def request(query, params):
|
|
|
|
# replace shortcuts with API advanced search keywords
|
|
|
|
for key in shorcut_dict.keys():
|
2020-08-11 14:25:03 +00:00
|
|
|
query = re.sub(key, shorcut_dict[key], query)
|
2016-03-29 09:59:16 +00:00
|
|
|
|
|
|
|
# basic search
|
|
|
|
offset = (params['pageno'] - 1) * number_of_results
|
|
|
|
|
|
|
|
string_args = dict(query=urlencode({'query': query}),
|
|
|
|
offset=offset,
|
|
|
|
hits=number_of_results)
|
|
|
|
|
|
|
|
params['url'] = base_url.format(**string_args)
|
|
|
|
|
|
|
|
params['headers']['User-Agent'] = searx_useragent()
|
|
|
|
return params
|
|
|
|
|
|
|
|
|
|
|
|
def response(resp):
|
|
|
|
results = []
|
|
|
|
|
2017-11-01 15:50:27 +00:00
|
|
|
search_results = etree.XML(resp.content)
|
2016-03-29 09:59:16 +00:00
|
|
|
|
|
|
|
for entry in search_results.xpath('./result/doc'):
|
|
|
|
content = "No description available"
|
|
|
|
|
|
|
|
date = datetime.now() # needed in case no dcdate is available for an item
|
|
|
|
for item in entry:
|
2020-11-16 08:43:23 +00:00
|
|
|
if item.attrib["name"] == "dcdate":
|
2016-03-29 09:59:16 +00:00
|
|
|
date = item.text
|
|
|
|
|
|
|
|
elif item.attrib["name"] == "dctitle":
|
|
|
|
title = item.text
|
|
|
|
|
|
|
|
elif item.attrib["name"] == "dclink":
|
|
|
|
url = item.text
|
|
|
|
|
|
|
|
elif item.attrib["name"] == "dcdescription":
|
2016-12-09 10:44:24 +00:00
|
|
|
content = item.text[:300]
|
2016-03-29 09:59:16 +00:00
|
|
|
if len(item.text) > 300:
|
|
|
|
content += "..."
|
|
|
|
|
|
|
|
# dates returned by the BASE API are not several formats
|
|
|
|
publishedDate = None
|
|
|
|
for date_format in ['%Y-%m-%dT%H:%M:%SZ', '%Y-%m-%d', '%Y-%m', '%Y']:
|
|
|
|
try:
|
|
|
|
publishedDate = datetime.strptime(date, date_format)
|
|
|
|
break
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if publishedDate is not None:
|
|
|
|
res_dict = {'url': url,
|
|
|
|
'title': title,
|
|
|
|
'publishedDate': publishedDate,
|
|
|
|
'content': content}
|
|
|
|
else:
|
|
|
|
res_dict = {'url': url,
|
|
|
|
'title': title,
|
|
|
|
'content': content}
|
|
|
|
|
|
|
|
results.append(res_dict)
|
|
|
|
|
|
|
|
return results
|