mirror of https://github.com/searxng/searxng.git
Merge branch 'master' into docker/opencontainers
This commit is contained in:
commit
359c18f9e6
|
@ -23,13 +23,13 @@ Go to the `searx-docker <https://github.com/searx/searx-docker>`__ project.
|
|||
|
||||
Without Docker
|
||||
------
|
||||
For all the details, follow this `step by step installation <https://asciimoo.github.io/searx/dev/install/installation.html>`__.
|
||||
For all of the details, follow this `step by step installation <https://asciimoo.github.io/searx/dev/install/installation.html>`__.
|
||||
|
||||
Note: the documentation needs to be updated.
|
||||
|
||||
If you are in hurry
|
||||
If you are in a hurry
|
||||
------
|
||||
- clone source:
|
||||
- clone the source:
|
||||
``git clone https://github.com/asciimoo/searx.git && cd searx``
|
||||
- install dependencies: ``./manage.sh update_packages``
|
||||
- edit your
|
||||
|
|
|
@ -14,6 +14,7 @@ import random
|
|||
from json import loads
|
||||
from time import time
|
||||
from lxml.html import fromstring
|
||||
from searx.poolrequests import get
|
||||
from searx.url_utils import urlencode
|
||||
from searx.utils import eval_xpath
|
||||
|
||||
|
@ -31,13 +32,9 @@ search_string = 'search?{query}'\
|
|||
'&c=main'\
|
||||
'&s={offset}'\
|
||||
'&format=json'\
|
||||
'&qh=0'\
|
||||
'&qlang={lang}'\
|
||||
'&langcountry={lang}'\
|
||||
'&ff={safesearch}'\
|
||||
'&rxiec={rxieu}'\
|
||||
'&ulse={ulse}'\
|
||||
'&rand={rxikd}'\
|
||||
'&dbez={dbez}'
|
||||
'&rand={rxikd}'
|
||||
# specific xpath variables
|
||||
results_xpath = '//response//result'
|
||||
url_xpath = './/url'
|
||||
|
@ -46,9 +43,26 @@ content_xpath = './/sum'
|
|||
|
||||
supported_languages_url = 'https://gigablast.com/search?&rxikd=1'
|
||||
|
||||
extra_param = '' # gigablast requires a random extra parameter
|
||||
# which can be extracted from the source code of the search page
|
||||
|
||||
|
||||
def parse_extra_param(text):
|
||||
global extra_param
|
||||
param_lines = [x for x in text.splitlines() if x.startswith('var url=') or x.startswith('url=url+')]
|
||||
extra_param = ''
|
||||
for l in param_lines:
|
||||
extra_param += l.split("'")[1]
|
||||
extra_param = extra_param.split('&')[-1]
|
||||
|
||||
|
||||
def init(engine_settings=None):
|
||||
parse_extra_param(get('http://gigablast.com/search?c=main&qlangcountry=en-us&q=south&s=10').text)
|
||||
|
||||
|
||||
# do search-request
|
||||
def request(query, params):
|
||||
print("EXTRAPARAM:", extra_param)
|
||||
offset = (params['pageno'] - 1) * number_of_results
|
||||
|
||||
if params['language'] == 'all':
|
||||
|
@ -67,14 +81,11 @@ def request(query, params):
|
|||
search_path = search_string.format(query=urlencode({'q': query}),
|
||||
offset=offset,
|
||||
number_of_results=number_of_results,
|
||||
rxikd=int(time() * 1000),
|
||||
rxieu=random.randint(1000000000, 9999999999),
|
||||
ulse=random.randint(100000000, 999999999),
|
||||
lang=language,
|
||||
safesearch=safesearch,
|
||||
dbez=random.randint(100000000, 999999999))
|
||||
rxikd=int(time() * 1000),
|
||||
safesearch=safesearch)
|
||||
|
||||
params['url'] = base_url + search_path
|
||||
params['url'] = base_url + search_path + '&' + extra_param
|
||||
|
||||
return params
|
||||
|
||||
|
@ -84,7 +95,11 @@ def response(resp):
|
|||
results = []
|
||||
|
||||
# parse results
|
||||
response_json = loads(resp.text)
|
||||
try:
|
||||
response_json = loads(resp.text)
|
||||
except:
|
||||
parse_extra_param(resp.text)
|
||||
raise Exception('extra param expired, please reload')
|
||||
|
||||
for result in response_json['results']:
|
||||
# append result
|
||||
|
|
|
@ -24,7 +24,7 @@ result_base_url = 'https://openstreetmap.org/{osm_type}/{osm_id}'
|
|||
|
||||
# do search-request
|
||||
def request(query, params):
|
||||
params['url'] = base_url + search_string.format(query=query)
|
||||
params['url'] = base_url + search_string.format(query=query.decode('utf-8'))
|
||||
|
||||
return params
|
||||
|
||||
|
|
|
@ -50,6 +50,7 @@ def request(query, params):
|
|||
language = match_language(params['language'], supported_languages, language_aliases)
|
||||
params['url'] += '&locale=' + language.replace('-', '_').lower()
|
||||
|
||||
params['headers']['User-Agent'] = 'Mozilla/5.0 (X11; Linux x86_64; rv:69.0) Gecko/20100101 Firefox/69.0'
|
||||
return params
|
||||
|
||||
|
||||
|
|
|
@ -21,7 +21,8 @@ search_url = base_url + u'w/api.php?'\
|
|||
'action=query'\
|
||||
'&format=json'\
|
||||
'&{query}'\
|
||||
'&prop=extracts|pageimages'\
|
||||
'&prop=extracts|pageimages|pageprops'\
|
||||
'&ppprop=disambiguation'\
|
||||
'&exintro'\
|
||||
'&explaintext'\
|
||||
'&pithumbsize=300'\
|
||||
|
@ -79,12 +80,15 @@ def response(resp):
|
|||
|
||||
# wikipedia article's unique id
|
||||
# first valid id is assumed to be the requested article
|
||||
if 'pages' not in search_result['query']:
|
||||
return results
|
||||
|
||||
for article_id in search_result['query']['pages']:
|
||||
page = search_result['query']['pages'][article_id]
|
||||
if int(article_id) > 0:
|
||||
break
|
||||
|
||||
if int(article_id) < 0:
|
||||
if int(article_id) < 0 or 'disambiguation' in page.get('pageprops', {}):
|
||||
return []
|
||||
|
||||
title = page.get('title')
|
||||
|
@ -96,6 +100,7 @@ def response(resp):
|
|||
extract = page.get('extract')
|
||||
|
||||
summary = extract_first_paragraph(extract, title, image)
|
||||
summary = summary.replace('() ', '')
|
||||
|
||||
# link to wikipedia article
|
||||
wikipedia_link = base_url.format(language=url_lang(resp.search_params['language'])) \
|
||||
|
|
|
@ -408,7 +408,7 @@ engines:
|
|||
|
||||
- name : library genesis
|
||||
engine : xpath
|
||||
search_url : http://libgen.io/search.php?req={query}
|
||||
search_url : https://libgen.is/search.php?req={query}
|
||||
url_xpath : //a[contains(@href,"bookfi.net")]/@href
|
||||
title_xpath : //a[contains(@href,"book/")]/text()[1]
|
||||
content_xpath : //td/a[1][contains(@href,"=author")]/text()
|
||||
|
@ -464,7 +464,7 @@ engines:
|
|||
- name : openairedatasets
|
||||
engine : json_engine
|
||||
paging : True
|
||||
search_url : http://api.openaire.eu/search/datasets?format=json&page={pageno}&size=10&title={query}
|
||||
search_url : https://api.openaire.eu/search/datasets?format=json&page={pageno}&size=10&title={query}
|
||||
results_query : response/results/result
|
||||
url_query : metadata/oaf:entity/oaf:result/children/instance/webresource/url/$
|
||||
title_query : metadata/oaf:entity/oaf:result/title/$
|
||||
|
@ -476,7 +476,7 @@ engines:
|
|||
- name : openairepublications
|
||||
engine : json_engine
|
||||
paging : True
|
||||
search_url : http://api.openaire.eu/search/publications?format=json&page={pageno}&size=10&title={query}
|
||||
search_url : https://api.openaire.eu/search/publications?format=json&page={pageno}&size=10&title={query}
|
||||
results_query : response/results/result
|
||||
url_query : metadata/oaf:entity/oaf:result/children/instance/webresource/url/$
|
||||
title_query : metadata/oaf:entity/oaf:result/title/$
|
||||
|
@ -812,7 +812,7 @@ locales:
|
|||
doi_resolvers :
|
||||
oadoi.org : 'https://oadoi.org/'
|
||||
doi.org : 'https://doi.org/'
|
||||
doai.io : 'http://doai.io/'
|
||||
sci-hub.tw : 'http://sci-hub.tw/'
|
||||
doai.io : 'https://doai.io/'
|
||||
sci-hub.tw : 'https://sci-hub.tw/'
|
||||
|
||||
default_doi_resolver : 'oadoi.org'
|
||||
|
|
|
@ -43,7 +43,7 @@ locales:
|
|||
doi_resolvers :
|
||||
oadoi.org : 'https://oadoi.org/'
|
||||
doi.org : 'https://doi.org/'
|
||||
doai.io : 'http://doai.io/'
|
||||
sci-hub.tw : 'http://sci-hub.tw/'
|
||||
doai.io : 'https://doai.io/'
|
||||
sci-hub.tw : 'https://sci-hub.tw/'
|
||||
|
||||
default_doi_resolver : 'oadoi.org'
|
||||
|
|
|
@ -125,6 +125,14 @@ $(document).ready(function() {
|
|||
}
|
||||
});
|
||||
|
||||
function nextResult(current, direction) {
|
||||
var next = current[direction]();
|
||||
while (!next.is('.result') && next.length !== 0) {
|
||||
next = next[direction]();
|
||||
}
|
||||
return next
|
||||
}
|
||||
|
||||
function highlightResult(which) {
|
||||
return function() {
|
||||
var current = $('.result[data-vim-selected]');
|
||||
|
@ -157,13 +165,13 @@ $(document).ready(function() {
|
|||
}
|
||||
break;
|
||||
case 'down':
|
||||
next = current.next('.result');
|
||||
next = nextResult(current, 'next');
|
||||
if (next.length === 0) {
|
||||
next = $('.result:first');
|
||||
}
|
||||
break;
|
||||
case 'up':
|
||||
next = current.prev('.result');
|
||||
next = nextResult(current, 'prev');
|
||||
if (next.length === 0) {
|
||||
next = $('.result:last');
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<div class="panel-body">
|
||||
{% if infobox.img_src %}<img class="img-responsive center-block infobox_part" src="{{ image_proxify(infobox.img_src) }}" alt="{{ infobox.infobox }}" />{% endif %}
|
||||
|
||||
{% if infobox.content %}<bdi><p class="infobox_part">{{ infobox.content }}</p></bdi>{% endif %}
|
||||
{% if infobox.content %}<bdi><p class="infobox_part">{{ infobox.content | safe }}</p></bdi>{% endif %}
|
||||
|
||||
{% if infobox.attributes -%}
|
||||
<table class="table table-striped infobox_part">
|
||||
|
|
|
@ -606,11 +606,11 @@ def index():
|
|||
# HTML output format
|
||||
|
||||
# suggestions: use RawTextQuery to get the suggestion URLs with the same bang
|
||||
suggestion_urls = map(lambda suggestion: {
|
||||
'url': raw_text_query.changeSearchQuery(suggestion).getFullQuery(),
|
||||
'title': suggestion
|
||||
},
|
||||
result_container.suggestions)
|
||||
suggestion_urls = list(map(lambda suggestion: {
|
||||
'url': raw_text_query.changeSearchQuery(suggestion).getFullQuery(),
|
||||
'title': suggestion
|
||||
},
|
||||
result_container.suggestions))
|
||||
|
||||
correction_urls = list(map(lambda correction: {
|
||||
'url': raw_text_query.changeSearchQuery(correction).getFullQuery(),
|
||||
|
|
|
@ -1,37 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import pubmed
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestPubmedEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
params = pubmed.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn('eutils.ncbi.nlm.nih.gov/', params['url'])
|
||||
self.assertIn('term', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, pubmed.response, None)
|
||||
self.assertRaises(AttributeError, pubmed.response, [])
|
||||
self.assertRaises(AttributeError, pubmed.response, '')
|
||||
self.assertRaises(AttributeError, pubmed.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<PubmedArticleSet></PubmedArticleSet>')
|
||||
self.assertEqual(pubmed.response(response), [])
|
||||
|
||||
xml_mock = """<eSearchResult><Count>1</Count><RetMax>1</RetMax><RetStart>0</RetStart><IdList>
|
||||
<Id>1</Id>
|
||||
</IdList></eSearchResult>
|
||||
"""
|
||||
|
||||
response = mock.Mock(text=xml_mock.encode('utf-8'))
|
||||
results = pubmed.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['content'], 'No abstract is available for this publication.')
|
|
@ -1,110 +0,0 @@
|
|||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head>
|
||||
</head>
|
||||
<body>
|
||||
<div id="header">
|
||||
<div id="whoIsYou">
|
||||
<a href="/lang.php"><small>SeedPeer in your own language?</small></a> <a href="http://www.seedpeer.eu"><img src="/images/flags/uk.gif" width="16px" alt="Torrents EN" /></a> <a href="http://spanish.seedpeer.eu"><img src="/images/flags/es.gif" width="16px" alt="Torrents ES" /></a> <a href="http://german.seedpeer.eu"><img src="/images/flags/de.gif" width="16px" alt="Torrents DE" /></a> <a href="http://french.seedpeer.eu"><img src="/images/flags/fr.gif" width="16px" alt="Torrents FR" /></a> <a href="http://portuguese.seedpeer.eu"><img src="/images/flags/pt.gif" width="16px" alt="Torrents Portuguese" /></a> <a href="http://swedish.seedpeer.eu"><img src="/images/flags/se.gif" width="16px" alt="Torrents Sweden" /></a>
|
||||
</div>
|
||||
|
||||
<script type="text/javascript">
|
||||
whoIsYou();
|
||||
</script>
|
||||
<div id="search">
|
||||
<form action="/search.php" method="get">
|
||||
<input id="topsearchbar" name="search" value="narcos season 2" />
|
||||
<input type="submit" class="searchbutton" value="Torrents" />
|
||||
<input style="color:#000" type="submit" class="searchbutton" name="usenet" value="Usenet Binaries" />
|
||||
</form>
|
||||
<div id="suggestion"></div>
|
||||
</div>
|
||||
<div id="logo"><a href="/"><img src="/images/logo2.gif" alt="Seedpeer homepage" width="415" height="143" /></a></div>
|
||||
<div id="subtext"><a href="/">Home</a> > <a href="/search.html">Torrent search</a> > Narcos season 2 | page 1</div>
|
||||
</div>
|
||||
<div id="nav">
|
||||
<ul>
|
||||
<!--
|
||||
<li><font style="color:red;font-size:9px;font-weight:bold;">NEW</font><a title="Download TOP Games for FREE" rel="nofollow" href="http://www.bigrebelads.com/affiliate/index?ref=9301" target="_blank">FREE Games</a></li>
|
||||
|
||||
-->
|
||||
<li style="border-left:none" id="categories"><a title="Browse Torrent Categories" href="/browse.html">Categories</a>
|
||||
<ul>
|
||||
<li><a title="Browse Anime Torrents" href="/browse.html#6">Anime</a></li>
|
||||
<li><a title="Browse Game Torrents" href="/browse.html#4">Games</a></li>
|
||||
<li><a title="Browse Movie Torrents" href="/browse.html#1">Movies</a></li>
|
||||
<li><a title="Browse Music Torrents" href="/browse.html#3">Music</a></li>
|
||||
<li><a title="Browse Software Torrents" href="/browse.html#5">Software</a></li>
|
||||
<li><a title="Browse TV Torrents" href="/browse.html#2">TV Shows</a></li>
|
||||
<li><a title="Browse Other Torrents" href="/browse.html#7">Others</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li><a title="Upload A Torrents" href="/upload.html">Upload torrent</a></li>
|
||||
<li id="verified"><a title="Verified Torrents" href="/verified.html">Verified</a></li>
|
||||
<li id="searchoptions"><a title="Search Torrents" href="/search.html">Torrent search</a></li>
|
||||
<li id="newsgroups"><a style="color:#212b3e" title="News Groups" href="/usenet.html">Usenet Binaries</a></li>
|
||||
<li id="about" style="border-right:none"><a rel="nofollow" href="/faq.html">About Us</a>
|
||||
<ul>
|
||||
<li><a title="SeedPeer Statistics" href="/stats.html">Statistics</a></li>
|
||||
<li><a title="Contact Us" href="/contact.html">Contact</a></li>
|
||||
<li><a title="Frequently Asked Questions" href="/faq.html">FAQ</a></li>
|
||||
<li><a title="SeedPeer API" href="http://api.seedpeer.eu">Our API</a></li>
|
||||
<li><a title="SeedPeer Blog" href="/blog">Blog</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<!--<li><a href="/toolbar.php">Our Toolbar</a></li>-->
|
||||
</ul>
|
||||
<div class="clear"></div>
|
||||
</div>
|
||||
<div id="body"><div id="pageTop"></div>
|
||||
<div id="headerbox"><h1>Verified <font class="colored">Narcos season 2</font> torrents</h1></div><table width="100%"><tr><th>
|
||||
<span style="float:right">
|
||||
<a href="/search/narcos-season-2/8/1.html"><img style="vertical-align:middle" src="/images/comments.gif" alt="comments" /></a> |
|
||||
<a href="/search/narcos-season-2/7/1.html"><img style="vertical-align:middle" src="/images/ver.gif" alt="verified" /></a>
|
||||
</span>
|
||||
<a href="/search/narcos-season-2/1/1.html">Torrent name</a></th><th class="right"><a href="/search/narcos-season-2/2/1.html">Age</a></th><th class="right"><a href="/search/narcos-season-2/3/1.html">Size</a></th><th class="right"><a href="/search/narcos-season-2/4/1.html">Seeds</a></th><th class="right"><a href="/search/narcos-season-2/5/1.html">Peers</a></th><th class="center"><a href="/search/narcos-season-2/6/1.html">Health</a></th><td class="tableAd" rowspan="6"><iframe src="http://creative.wwwpromoter.com/13689?d=300x250" width="300" height="250" style="border: none;" frameborder="0" scrolling="no"></iframe></td></tr><tr class=""><td><a class="pblink" id="pblink_table_item_1" href="" data-tad="431726" data-last-search="narcos+season+2" target="_blank" rel="nofollow"><strong class='colored'>Narcos season 2</strong> Full Version</a></td><td class="right">20 hours</td><td class="right">681.3 MB</td><td class="right"><font color="green">28</font> </td><td class="right"><font color="navy">654</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" /></td></tr><tr class="tdark"><td><a class="pblink" id="pblink_table_item_2" href="" data-tad="431727" data-url="narcos+season+2" target="_blank" rel="nofollow"><strong class='colored'>Narcos season 2</strong> Trusted Source</a></td><td class="right">12 hours</td><td class="right">787.1 MB</td><td class="right"><font color="green">64</font> </td><td class="right"><font color="navy">220</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" /></td></tr><tr class=""><td><a class="pblink" id="pblink_table_item_3" href="" data-tad="431729" data-last-search="narcos+season+2" target="_blank" rel="nofollow"><strong class='colored'>Full Narcos season 2 Download</strong></a> <small><a class="pblink" id="pblink_table_item_4" href="" data-tad="431729" data-last-search="narcos+season+2" target="_blank" rel="nofollow">Usenet</a></small></td><td class="right">24 hours</td><td class="right">775.5 MB</td><td class="right"><font color="green">60</font> </td><td class="right"><font color="navy">236</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" /></td></tr><tr class="tdark"><td><a class="pblink" id="pblink_table_item_5" href="" data-tad="431730" data-last-search="narcos+season+2" target="_blank" rel="nofollow"><strong class='colored'>Narcos season 2</strong> 2014 - DIRECT STREAMING</a> <small><a class="pblink" id="pblink_table_item_6" href="" data-tad="431729" data-last-search="narcos+season+2" target="_blank" rel="nofollow">Movies</a></small></td><td class="right">17 hours</td><td class="right">654.1 MB</td><td class="right"><font color="green">2</font> </td><td class="right"><font color="navy">391</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" /></td></tr><tr class=""><td><a class="pblink" id="pblink_table_item_7" href="" data-tad="431731" data-last-search="narcos+season+2" target="_blank" rel="nofollow"><strong class='colored'>Narcos season 2</strong> 2014</a> <small><a class="pblink" id="pblink_table_item_8" href="" data-tad="431729" data-last-search="narcos+season+2" target="_blank" rel="nofollow">Movies</a></small></td><td class="right">20 hours</td><td class="right">754.5 MB</td><td class="right"><font color="green">21</font> </td><td class="right"><font color="navy">919</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" /></td></tr></table><br /><br /><center><iframe src='http://creative.wwwpromoter.com/13689?d=728x90' width='728' height='90' style='border: none;' frameborder='0' scrolling='no'></iframe><center><span style="float:right;margin:1em .2em 0 0"><a title="Download at the speed of your connection" href="/usenet.php?search=narcos+season+2"><img src="/images/dlf.gif" alt="Search Binaries" /></a></span><div style="margin-bottom:1em;margin-right:290px" id="headerbox"><h1><a href="/searchfeed/narcos+season+2.xml" target="_blank" title="SeedPeer RSS Torrent Search Feed fornarcos season 2"><img src="/images/feedIcon.png" border="0" /></a> 2 <font class="colored">Narcos season 2</font> Torrents were found</h1></div><table width="100%"><tr><th>
|
||||
<span style="float:right">
|
||||
<a href="/search/narcos-season-2/8/1.html"><img style="vertical-align:middle" src="/images/comments.gif" alt="comments" /></a> |
|
||||
<a href="/search/narcos-season-2/7/1.html"><img style="vertical-align:middle" src="/images/ver.gif" alt="verified" /></a>
|
||||
</span>
|
||||
<a href="/search/narcos-season-2/1/1.html">Torrent name</a></th><th class="right"><a href="/search/narcos-season-2/2/1.html">Age</a></th><th class="right"><a href="/search/narcos-season-2/3/1.html">Size</a></th><th class="right"><a href="/search/narcos-season-2/4/1.html">Seeds</a></th><th class="right"><a href="/search/narcos-season-2/5/1.html">Peers</a></th><th class="center"><a href="/search/narcos-season-2/6/1.html">Health</a></th></tr><tr class=""><td><small class="comments"><a href="http://www.facebook.com/sharer.php?t=Download%20<strong class='colored'>Narcos</strong> <strong class='colored'>Season</strong> <strong class='colored'>2</strong> Complete 7<strong class='colored'>2</strong>0p WebRip EN-SUB x<strong class='colored'>2</strong>64-[MULVAcoded] S0<strong class='colored'>2</strong>%20 torrent&u=http://seedpeer.seedpeer.eu/details/11686840/Narcos-Season-2-Complete-720p-WebRip-EN-SUB-x264-[MULVAcoded]-S02.html"><img src="/images/facebook.png" alt="Add to Facebook" width="14" height="14" /></a></small><a href="/details/11686840/Narcos-Season-2-Complete-720p-WebRip-EN-SUB-x264-[MULVAcoded]-S02.html"><strong class='colored'>Narcos</strong> <strong class='colored'>Season</strong> <strong class='colored'>2</strong> Complete 7<strong class='colored'>2</strong>0p WebRip EN-SUB x<strong class='colored'>2</strong>64-[MULVAcoded] S0<strong class='colored'>2</strong> <small><a href="/browse.html#11686840"></a></small></a></td><td class="right">19 hours</td><td class="right">4.39 GB</td><td class="right"><font color="green">715</font> </td><td class="right"><font color="navy">183</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" width="40" height="11" /></td></tr><tr class="tdark"><td><small class="comments"><a href="http://www.facebook.com/sharer.php?t=Download%20<strong class='colored'>Narcos</strong> - <strong class='colored'>Season</strong> <strong class='colored'>2</strong> - 7<strong class='colored'>2</strong>0p WEBRiP - x<strong class='colored'>2</strong>65 HEVC - ShAaNiG%20 torrent&u=http://seedpeer.seedpeer.eu/details/11685972/Narcos---Season-2---720p-WEBRiP---x265-HEVC---ShAaNiG.html"><img src="/images/facebook.png" alt="Add to Facebook" width="14" height="14" /></a></small><a href="/details/11685972/Narcos---Season-2---720p-WEBRiP---x265-HEVC---ShAaNiG.html"><strong class='colored'>Narcos</strong> - <strong class='colored'>Season</strong> <strong class='colored'>2</strong> - 7<strong class='colored'>2</strong>0p WEBRiP - x<strong class='colored'>2</strong>65 HEVC - ShAaNiG <small><a href="/browse.html#11685972"></a></small></a></td><td class="right">1 day</td><td class="right">2.48 GB</td><td class="right"><font color="green">861</font> </td><td class="right"><font color="navy">332</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" width="40" height="11" /></td></tr></table><div id="headerbox"><h1>Related searches for: <font class="colored">Narcos season 2</font></h1></div><div id="search_suggestions"><br />Other suggested searches: </div><br /><a href="http://torrentz2.eu/search?f=narcos-season-2">Search for "narcos-season-2" on Torrentz2.eu</a><br /><a href="http://torrent-finder.info/show.php?q=narcos-season-2">Search for "narcos-season-2" on Torrent-Finder</a><br /><center><iframe src='http://creative.wwwpromoter.com/13689?d=300x250' width='300' height='250' style='border: none;' frameborder='0' scrolling='no'></iframe> <iframe src='http://creative.wwwpromoter.com/13689?d=300x250' width='300' height='250' style='border: none;' frameborder='0' scrolling='no'></iframe> <iframe src='http://creative.wwwpromoter.com/13689?d=300x250' width='300' height='250' style='border: none;' frameborder='0' scrolling='no'></iframe></center><div id="footer">
|
||||
<table width="100%">
|
||||
<tr>
|
||||
<td width="30%">
|
||||
<h2>Torrents Download</h2>
|
||||
<a href="/">Torrent search</a><br />
|
||||
<a href="/browse.html">Browse categories</a><br />
|
||||
<a href="/verified.html">Verified Torrents</a><br />
|
||||
<a href="/order-date.html">Today's torrents</a><br />
|
||||
<a href="/yesterday.html">Yesterday's torrents</a><br />
|
||||
<a href="/stats.html">Statistics</a><br />
|
||||
<br />
|
||||
<a href="/faq.html#copyright"><strong>Copyright & Removal</strong></a>
|
||||
</td>
|
||||
<td width="30%"><h2>Cool Stuff</h2>
|
||||
<a href="/promotional.php">Promotional</a><br />
|
||||
<a href="/contact.html">Advertising Information</a><br />
|
||||
<strong><a href="/plugins.php" title="Add a search plugin to Firefox or Internet Explorer">Search Plugin <span style="color:red">*</span></a></strong><br />
|
||||
<a href="http://www.utorrent.com">µTorrent Client</a><br />
|
||||
<a href="/blog">Seedpeer Blog</a><br />
|
||||
</td>
|
||||
<td width="30%"><h2>Links</h2>
|
||||
<a href="http://www.sumotorrent.com" target="_blank"><strong>SumoTorrent</strong></a><br />
|
||||
<a href="http://www.torrent-finder.info" target="_blank"><strong>Torrent Finder</strong></a><br />
|
||||
<a href="http://www.torrentpond.com" target="_blank"><strong>TorrentPond</strong></a><br />
|
||||
<a href="https://www.limetorrents.cc" target="_blank">LimeTorrents.cc</a><br />
|
||||
<a href="http://www.torrents.to/" target="_blank">Torrents.to</a><br />
|
||||
<a href="http://www.torrentfunk.com" target="_blank">TorrentFunk</a><br />
|
||||
<a href="https://monova.org" target="_blank">Monova</a><br />
|
||||
<a href="http://www.torrentroom.com" target="_blank">TorrentRoom</a><br />
|
||||
<a href="http://www.katcr.co/" target="_blank">Kickass Torrents Community</a><br />
|
||||
</td>
|
||||
<td width="10%"><div id="bottomlogo"></div></td>
|
||||
</tr>
|
||||
</table>
|
||||
<br />
|
||||
<br />
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
|
@ -1,78 +0,0 @@
|
|||
# coding=utf-8
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import acgsou
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestAcgsouEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dic = defaultdict(dict)
|
||||
dic['pageno'] = 1
|
||||
params = acgsou.request(query, dic)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('acgsou.com' in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
resp = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(acgsou.response(resp), [])
|
||||
|
||||
html = u"""
|
||||
<html>
|
||||
<table id="listTable" class="list_style table_fixed">
|
||||
<thead class="tcat">
|
||||
<tr>
|
||||
<th axis="string" class="l1 tableHeaderOver">test</th>
|
||||
<th axis="string" class="l2 tableHeaderOver">test</th>
|
||||
<th axis="string" class="l3 tableHeaderOver">test</th>
|
||||
<th axis="size" class="l4 tableHeaderOver">test</th>
|
||||
<th axis="number" class="l5 tableHeaderOver">test</th>
|
||||
<th axis="number" class="l6 tableHeaderOver">test</th>
|
||||
<th axis="number" class="l7 tableHeaderOver">test</th>
|
||||
<th axis="string" class="l8 tableHeaderOver">test</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody class="tbody" id="data_list">
|
||||
<tr class="alt1 ">
|
||||
<td nowrap="nowrap">date</td>
|
||||
<td><a href="category.html">testcategory テスト</a></td>
|
||||
<td style="text-align:left;">
|
||||
<a href="show-torrentid.html" target="_blank">torrentname テスト</a>
|
||||
</td>
|
||||
<td>1MB</td>
|
||||
<td nowrap="nowrap">
|
||||
<span class="bts_1">
|
||||
29
|
||||
</span>
|
||||
</td>
|
||||
<td nowrap="nowrap">
|
||||
<span class="btl_1">
|
||||
211
|
||||
</span>
|
||||
</td>
|
||||
<td nowrap="nowrap">
|
||||
<span class="btc_">
|
||||
168
|
||||
</span>
|
||||
</td>
|
||||
<td><a href="random.html">user</a></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</html>
|
||||
"""
|
||||
|
||||
resp = mock.Mock(text=html)
|
||||
results = acgsou.response(resp)
|
||||
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
|
||||
r = results[0]
|
||||
self.assertEqual(r['url'], 'http://www.acgsou.com/show-torrentid.html')
|
||||
self.assertEqual(r['content'], u'Category: "testcategory テスト".')
|
||||
self.assertEqual(r['title'], u'torrentname テスト')
|
||||
self.assertEqual(r['filesize'], 1048576)
|
|
@ -1,111 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import archlinux
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
domains = {
|
||||
'all': 'https://wiki.archlinux.org',
|
||||
'de': 'https://wiki.archlinux.de',
|
||||
'fr': 'https://wiki.archlinux.fr',
|
||||
'ja': 'https://wiki.archlinuxjp.org',
|
||||
'ro': 'http://wiki.archlinux.ro',
|
||||
'tr': 'http://archtr.org/wiki'
|
||||
}
|
||||
|
||||
|
||||
class TestArchLinuxEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dic = defaultdict(dict)
|
||||
dic['pageno'] = 1
|
||||
dic['language'] = 'en-US'
|
||||
params = archlinux.request(query, dic)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('wiki.archlinux.org' in params['url'])
|
||||
|
||||
for lang, name in archlinux.main_langs:
|
||||
dic['language'] = lang
|
||||
params = archlinux.request(query, dic)
|
||||
self.assertTrue(name in params['url'])
|
||||
|
||||
for lang, domain in domains.items():
|
||||
dic['language'] = lang
|
||||
params = archlinux.request(query, dic)
|
||||
self.assertTrue(domain in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
response = mock.Mock(text='<html></html>',
|
||||
search_params={'language': 'en_US'})
|
||||
self.assertEqual(archlinux.response(response), [])
|
||||
|
||||
html = """
|
||||
<ul class="mw-search-results">
|
||||
<li>
|
||||
<div class="mw-search-result-heading">
|
||||
<a href="/index.php/ATI" title="ATI">ATI</a>
|
||||
</div>
|
||||
<div class="searchresult">
|
||||
Lorem ipsum dolor sit amet
|
||||
</div>
|
||||
<div class="mw-search-result-data">
|
||||
30 KB (4,630 words) - 19:04, 17 March 2016</div>
|
||||
</li>
|
||||
<li>
|
||||
<div class="mw-search-result-heading">
|
||||
<a href="/index.php/Frequently_asked_questions" title="Frequently asked questions">
|
||||
Frequently asked questions
|
||||
</a>
|
||||
</div>
|
||||
<div class="searchresult">
|
||||
CPUs with AMDs instruction set "AMD64"
|
||||
</div>
|
||||
<div class="mw-search-result-data">
|
||||
17 KB (2,722 words) - 20:13, 21 March 2016
|
||||
</div>
|
||||
</li>
|
||||
<li>
|
||||
<div class="mw-search-result-heading">
|
||||
<a href="/index.php/CPU_frequency_scaling" title="CPU frequency scaling">CPU frequency scaling</a>
|
||||
</div>
|
||||
<div class="searchresult">
|
||||
ondemand for AMD and older Intel CPU
|
||||
</div>
|
||||
<div class="mw-search-result-data">
|
||||
15 KB (2,319 words) - 23:46, 16 March 2016
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
"""
|
||||
|
||||
expected = [
|
||||
{
|
||||
'title': 'ATI',
|
||||
'url': 'https://wiki.archlinux.org/index.php/ATI'
|
||||
},
|
||||
{
|
||||
'title': 'Frequently asked questions',
|
||||
'url': 'https://wiki.archlinux.org/index.php/Frequently_asked_questions'
|
||||
},
|
||||
{
|
||||
'title': 'CPU frequency scaling',
|
||||
'url': 'https://wiki.archlinux.org/index.php/CPU_frequency_scaling'
|
||||
}
|
||||
]
|
||||
|
||||
response = mock.Mock(text=html)
|
||||
response.search_params = {
|
||||
'language': 'en_US'
|
||||
}
|
||||
results = archlinux.response(response)
|
||||
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), len(expected))
|
||||
|
||||
i = 0
|
||||
for exp in expected:
|
||||
res = results[i]
|
||||
i += 1
|
||||
for key, value in exp.items():
|
||||
self.assertEqual(res[key], value)
|
|
@ -1,58 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import arxiv
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestBaseEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'.encode('utf-8')
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
params = arxiv.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn('export.arxiv.org/api/', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, arxiv.response, None)
|
||||
self.assertRaises(AttributeError, arxiv.response, [])
|
||||
self.assertRaises(AttributeError, arxiv.response, '')
|
||||
self.assertRaises(AttributeError, arxiv.response, '[]')
|
||||
|
||||
response = mock.Mock(content=b'''<?xml version="1.0" encoding="UTF-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom"></feed>''')
|
||||
self.assertEqual(arxiv.response(response), [])
|
||||
|
||||
xml_mock = b'''<?xml version="1.0" encoding="UTF-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
<title type="html">ArXiv Query: search_query=all:test_query&id_list=&start=0&max_results=1</title>
|
||||
<id>http://arxiv.org/api/1</id>
|
||||
<updated>2000-01-21T00:00:00-01:00</updated>
|
||||
<opensearch:totalResults xmlns:opensearch="http://a9.com/-/spec/opensearch/1.1/">1</opensearch:totalResults>
|
||||
<opensearch:startIndex xmlns:opensearch="http://a9.com/-/spec/opensearch/1.1/">0</opensearch:startIndex>
|
||||
<opensearch:itemsPerPage xmlns:opensearch="http://a9.com/-/spec/opensearch/1.1/">1</opensearch:itemsPerPage>
|
||||
<entry>
|
||||
<id>http://arxiv.org/1</id>
|
||||
<updated>2000-01-01T00:00:01Z</updated>
|
||||
<published>2000-01-01T00:00:01Z</published>
|
||||
<title>Mathematical proof.</title>
|
||||
<summary>Mathematical formula.</summary>
|
||||
<author>
|
||||
<name>A. B.</name>
|
||||
</author>
|
||||
<link href="http://arxiv.org/1" rel="alternate" type="text/html"/>
|
||||
<link title="pdf" href="http://arxiv.org/1" rel="related" type="application/pdf"/>
|
||||
<category term="math.QA" scheme="http://arxiv.org/schemas/atom"/>
|
||||
<category term="1" scheme="http://arxiv.org/schemas/atom"/>
|
||||
</entry>
|
||||
</feed>
|
||||
'''
|
||||
|
||||
response = mock.Mock(content=xml_mock)
|
||||
results = arxiv.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'Mathematical proof.')
|
||||
self.assertEqual(results[0]['content'], 'Mathematical formula.')
|
|
@ -1,91 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import base
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestBaseEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
params = base.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn('base-search.net', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, base.response, None)
|
||||
self.assertRaises(AttributeError, base.response, [])
|
||||
self.assertRaises(AttributeError, base.response, '')
|
||||
self.assertRaises(AttributeError, base.response, '[]')
|
||||
|
||||
response = mock.Mock(content=b'<response></response>')
|
||||
self.assertEqual(base.response(response), [])
|
||||
|
||||
xml_mock = b"""<?xml version="1.0"?>
|
||||
<response>
|
||||
<lst name="responseHeader">
|
||||
<int name="status">0</int>
|
||||
<int name="QTime">1</int>
|
||||
</lst>
|
||||
<result name="response" numFound="1" start="0">
|
||||
<doc>
|
||||
<date name="dchdate">2000-01-01T01:01:01Z</date>
|
||||
<str name="dcdocid">1</str>
|
||||
<str name="dccontinent">cna</str>
|
||||
<str name="dccountry">us</str>
|
||||
<str name="dccollection">ftciteseerx</str>
|
||||
<str name="dcprovider">CiteSeerX</str>
|
||||
<str name="dctitle">Science and more</str>
|
||||
<arr name="dccreator">
|
||||
<str>Someone</str>
|
||||
</arr>
|
||||
<arr name="dcperson">
|
||||
<str>Someone</str>
|
||||
</arr>
|
||||
<arr name="dcsubject">
|
||||
<str>Science and more</str>
|
||||
</arr>
|
||||
<str name="dcdescription">Science, and even more.</str>
|
||||
<arr name="dccontributor">
|
||||
<str>The neighbour</str>
|
||||
</arr>
|
||||
<str name="dcdate">2001</str>
|
||||
<int name="dcyear">2001</int>
|
||||
<arr name="dctype">
|
||||
<str>text</str>
|
||||
</arr>
|
||||
<arr name="dctypenorm">
|
||||
<str>1</str>
|
||||
</arr>
|
||||
<arr name="dcformat">
|
||||
<str>application/pdf</str>
|
||||
</arr>
|
||||
<arr name="dccontenttype">
|
||||
<str>application/pdf</str>
|
||||
</arr>
|
||||
<arr name="dcidentifier">
|
||||
<str>http://example.org/</str>
|
||||
</arr>
|
||||
<str name="dclink">http://example.org</str>
|
||||
<str name="dcsource">http://example.org</str>
|
||||
<arr name="dclanguage">
|
||||
<str>en</str>
|
||||
</arr>
|
||||
<str name="dcrights">Under the example.org licence</str>
|
||||
<int name="dcoa">1</int>
|
||||
<arr name="dclang">
|
||||
<str>eng</str>
|
||||
</arr>
|
||||
</doc>
|
||||
</result>
|
||||
</response>"""
|
||||
|
||||
response = mock.Mock(content=xml_mock)
|
||||
results = base.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'Science and more')
|
||||
self.assertEqual(results[0]['content'], 'Science, and even more.')
|
|
@ -1,178 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import bing
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestBingEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
bing.supported_languages = ['en', 'fr', 'zh-CHS', 'zh-CHT', 'pt-PT', 'pt-BR']
|
||||
query = u'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
dicto['language'] = 'fr-FR'
|
||||
params = bing.request(query.encode('utf-8'), dicto)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('language%3AFR' in params['url'])
|
||||
self.assertTrue('bing.com' in params['url'])
|
||||
|
||||
dicto['language'] = 'all'
|
||||
params = bing.request(query.encode('utf-8'), dicto)
|
||||
self.assertTrue('language' in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
dicto['language'] = 'fr-FR'
|
||||
self.assertRaises(AttributeError, bing.response, None)
|
||||
self.assertRaises(AttributeError, bing.response, [])
|
||||
self.assertRaises(AttributeError, bing.response, '')
|
||||
self.assertRaises(AttributeError, bing.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
response.search_params = dicto
|
||||
self.assertEqual(bing.response(response), [])
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
response.search_params = dicto
|
||||
self.assertEqual(bing.response(response), [])
|
||||
|
||||
html = """
|
||||
<div>
|
||||
<div id="b_tween">
|
||||
<span class="sb_count" data-bm="4">23 900 000 résultats</span>
|
||||
</div>
|
||||
<ol id="b_results" role="main">
|
||||
<div class="sa_cc" u="0|5109|4755453613245655|UAGjXgIrPH5yh-o5oNHRx_3Zta87f_QO">
|
||||
<div Class="sa_mc">
|
||||
<div class="sb_tlst">
|
||||
<h3>
|
||||
<a href="http://this.should.be.the.link/" h="ID=SERP,5124.1">
|
||||
<strong>This</strong> should be the title</a>
|
||||
</h3>
|
||||
</div>
|
||||
<div class="sb_meta"><cite><strong>this</strong>.meta.com</cite>
|
||||
<span class="c_tlbxTrg">
|
||||
<span class="c_tlbxH" H="BASE:CACHEDPAGEDEFAULT" K="SERP,5125.1">
|
||||
</span>
|
||||
</span>
|
||||
</div>
|
||||
<p><strong>This</strong> should be the content.</p>
|
||||
</div>
|
||||
</div>
|
||||
</ol>
|
||||
</div>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
response.search_params = dicto
|
||||
results = bing.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['title'], 'This should be the title')
|
||||
self.assertEqual(results[0]['url'], 'http://this.should.be.the.link/')
|
||||
self.assertEqual(results[0]['content'], 'This should be the content.')
|
||||
self.assertEqual(results[-1]['number_of_results'], 23900000)
|
||||
|
||||
html = """
|
||||
<div>
|
||||
<div id="b_tween">
|
||||
<span class="sb_count" data-bm="4">9-18 résultats sur 23 900 000</span>
|
||||
</div>
|
||||
<ol id="b_results" role="main">
|
||||
<li class="b_algo" u="0|5109|4755453613245655|UAGjXgIrPH5yh-o5oNHRx_3Zta87f_QO">
|
||||
<div Class="sa_mc">
|
||||
<div class="sb_tlst">
|
||||
<h2>
|
||||
<a href="http://this.should.be.the.link/" h="ID=SERP,5124.1">
|
||||
<strong>This</strong> should be the title</a>
|
||||
</h2>
|
||||
</div>
|
||||
<div class="sb_meta"><cite><strong>this</strong>.meta.com</cite>
|
||||
<span class="c_tlbxTrg">
|
||||
<span class="c_tlbxH" H="BASE:CACHEDPAGEDEFAULT" K="SERP,5125.1">
|
||||
</span>
|
||||
</span>
|
||||
</div>
|
||||
<p><strong>This</strong> should be the content.</p>
|
||||
</div>
|
||||
</li>
|
||||
</ol>
|
||||
</div>
|
||||
"""
|
||||
dicto['pageno'] = 2
|
||||
response = mock.Mock(text=html)
|
||||
response.search_params = dicto
|
||||
results = bing.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['title'], 'This should be the title')
|
||||
self.assertEqual(results[0]['url'], 'http://this.should.be.the.link/')
|
||||
self.assertEqual(results[0]['content'], 'This should be the content.')
|
||||
self.assertEqual(results[-1]['number_of_results'], 23900000)
|
||||
|
||||
html = """
|
||||
<div>
|
||||
<div id="b_tween">
|
||||
<span class="sb_count" data-bm="4">23 900 000 résultats</span>
|
||||
</div>
|
||||
<ol id="b_results" role="main">
|
||||
<li class="b_algo" u="0|5109|4755453613245655|UAGjXgIrPH5yh-o5oNHRx_3Zta87f_QO">
|
||||
<div Class="sa_mc">
|
||||
<div class="sb_tlst">
|
||||
<h2>
|
||||
<a href="http://this.should.be.the.link/" h="ID=SERP,5124.1">
|
||||
<strong>This</strong> should be the title</a>
|
||||
</h2>
|
||||
</div>
|
||||
<div class="sb_meta"><cite><strong>this</strong>.meta.com</cite>
|
||||
<span class="c_tlbxTrg">
|
||||
<span class="c_tlbxH" H="BASE:CACHEDPAGEDEFAULT" K="SERP,5125.1">
|
||||
</span>
|
||||
</span>
|
||||
</div>
|
||||
<p><strong>This</strong> should be the content.</p>
|
||||
</div>
|
||||
</li>
|
||||
</ol>
|
||||
</div>
|
||||
"""
|
||||
dicto['pageno'] = 33900000
|
||||
response = mock.Mock(text=html)
|
||||
response.search_params = dicto
|
||||
results = bing.response(response)
|
||||
self.assertEqual(bing.response(response), [])
|
||||
|
||||
def test_fetch_supported_languages(self):
|
||||
html = """<html></html>"""
|
||||
response = mock.Mock(text=html)
|
||||
results = bing._fetch_supported_languages(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
html = """
|
||||
<html>
|
||||
<body>
|
||||
<form>
|
||||
<div id="limit-languages">
|
||||
<div>
|
||||
<div><input id="es" value="es"></input></div>
|
||||
</div>
|
||||
<div>
|
||||
<div><input id="pt_BR" value="pt_BR"></input></div>
|
||||
<div><input id="pt_PT" value="pt_PT"></input></div>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
languages = bing._fetch_supported_languages(response)
|
||||
self.assertEqual(type(languages), list)
|
||||
self.assertEqual(len(languages), 3)
|
||||
self.assertIn('es', languages)
|
||||
self.assertIn('pt-BR', languages)
|
||||
self.assertIn('pt-PT', languages)
|
|
@ -1,132 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import bing_images
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestBingImagesEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
bing_images.supported_languages = ['fr-FR', 'en-US']
|
||||
bing_images.language_aliases = {}
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
dicto['language'] = 'fr-FR'
|
||||
dicto['safesearch'] = 1
|
||||
dicto['time_range'] = ''
|
||||
params = bing_images.request(query, dicto)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('bing.com' in params['url'])
|
||||
self.assertTrue('SRCHHPGUSR' in params['cookies'])
|
||||
self.assertTrue('DEMOTE' in params['cookies']['SRCHHPGUSR'])
|
||||
self.assertTrue('_EDGE_S' in params['cookies'])
|
||||
self.assertTrue('fr-fr' in params['cookies']['_EDGE_S'])
|
||||
|
||||
dicto['language'] = 'fr'
|
||||
params = bing_images.request(query, dicto)
|
||||
self.assertTrue('_EDGE_S' in params['cookies'])
|
||||
self.assertTrue('fr-fr' in params['cookies']['_EDGE_S'])
|
||||
|
||||
dicto['language'] = 'all'
|
||||
params = bing_images.request(query, dicto)
|
||||
self.assertTrue('_EDGE_S' in params['cookies'])
|
||||
self.assertTrue('en-us' in params['cookies']['_EDGE_S'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, bing_images.response, None)
|
||||
self.assertRaises(AttributeError, bing_images.response, [])
|
||||
self.assertRaises(AttributeError, bing_images.response, '')
|
||||
self.assertRaises(AttributeError, bing_images.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(bing_images.response(response), [])
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(bing_images.response(response), [])
|
||||
|
||||
html = """
|
||||
<div id="mmComponent_images_1">
|
||||
<ul>
|
||||
<li>
|
||||
<div>
|
||||
<div class="imgpt">
|
||||
<a m='{"purl":"page_url","murl":"img_url","turl":"thumb_url","t":"Page 1 title"}'>
|
||||
<img src="" alt="alt text" />
|
||||
</a>
|
||||
<div class="img_info">
|
||||
<span>1 x 1 - jpeg</span>
|
||||
<a>1.example.org</a>
|
||||
</div>
|
||||
</div>
|
||||
<div></div>
|
||||
</div>
|
||||
<div>
|
||||
<div class="imgpt">
|
||||
<a m='{"purl":"page_url2","murl":"img_url2","turl":"thumb_url2","t":"Page 2 title"}'>
|
||||
<img src="" alt="alt text 2" />
|
||||
</a>
|
||||
<div class="img_info">
|
||||
<span>2 x 2 - jpeg</span>
|
||||
<a>2.example.org</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
<ul>
|
||||
<li>
|
||||
<div>
|
||||
<div class="imgpt">
|
||||
<a m='{"purl":"page_url3","murl":"img_url3","turl":"thumb_url3","t":"Page 3 title"}'>
|
||||
<img src="" alt="alt text 3" />
|
||||
</a>
|
||||
<div class="img_info">
|
||||
<span>3 x 3 - jpeg</span>
|
||||
<a>3.example.org</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
"""
|
||||
html = html.replace('\r\n', '').replace('\n', '').replace('\r', '')
|
||||
response = mock.Mock(text=html)
|
||||
results = bing_images.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 3)
|
||||
self.assertEqual(results[0]['title'], 'Page 1 title')
|
||||
self.assertEqual(results[0]['url'], 'page_url')
|
||||
self.assertEqual(results[0]['content'], '')
|
||||
self.assertEqual(results[0]['thumbnail_src'], 'thumb_url')
|
||||
self.assertEqual(results[0]['img_src'], 'img_url')
|
||||
self.assertEqual(results[0]['img_format'], '1 x 1 - jpeg')
|
||||
self.assertEqual(results[0]['source'], '1.example.org')
|
||||
|
||||
def test_fetch_supported_languages(self):
|
||||
html = """
|
||||
<div>
|
||||
<div id="region-section-content">
|
||||
<ul class="b_vList">
|
||||
<li>
|
||||
<a href="https://bing...&setmkt=de-DE&s...">Germany</a>
|
||||
<a href="https://bing...&setmkt=nb-NO&s...">Norway</a>
|
||||
</li>
|
||||
</ul>
|
||||
<ul class="b_vList">
|
||||
<li>
|
||||
<a href="https://bing...&setmkt=es-AR&s...">Argentina</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
languages = list(bing_images._fetch_supported_languages(response))
|
||||
self.assertEqual(len(languages), 3)
|
||||
self.assertIn('de-DE', languages)
|
||||
self.assertIn('no-NO', languages)
|
||||
self.assertIn('es-AR', languages)
|
|
@ -1,147 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import bing_news
|
||||
from searx.testing import SearxTestCase
|
||||
import lxml
|
||||
|
||||
|
||||
class TestBingNewsEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
bing_news.supported_languages = ['en', 'fr']
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
dicto['language'] = 'fr-FR'
|
||||
dicto['time_range'] = ''
|
||||
params = bing_news.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('bing.com', params['url'])
|
||||
self.assertIn('fr', params['url'])
|
||||
|
||||
dicto['language'] = 'all'
|
||||
params = bing_news.request(query, dicto)
|
||||
self.assertIn('en', params['url'])
|
||||
|
||||
def test_no_url_in_request_year_time_range(self):
|
||||
dicto = defaultdict(dict)
|
||||
query = 'test_query'
|
||||
dicto['time_range'] = 'year'
|
||||
params = bing_news.request(query, dicto)
|
||||
self.assertEqual({}, params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, bing_news.response, None)
|
||||
self.assertRaises(AttributeError, bing_news.response, [])
|
||||
self.assertRaises(AttributeError, bing_news.response, '')
|
||||
self.assertRaises(AttributeError, bing_news.response, '[]')
|
||||
|
||||
response = mock.Mock(content='<html></html>')
|
||||
self.assertEqual(bing_news.response(response), [])
|
||||
|
||||
response = mock.Mock(content='<html></html>')
|
||||
self.assertEqual(bing_news.response(response), [])
|
||||
|
||||
html = """<?xml version="1.0" encoding="utf-8" ?>
|
||||
<rss version="2.0" xmlns:News="https://www.bing.com:443/news/search?q=python&setmkt=en-US&first=1&format=RSS">
|
||||
<channel>
|
||||
<title>python - Bing News</title>
|
||||
<link>https://www.bing.com:443/news/search?q=python&setmkt=en-US&first=1&format=RSS</link>
|
||||
<description>Search results</description>
|
||||

|
||||
<copyright>Copyright</copyright>
|
||||
<item>
|
||||
<title>Title</title>
|
||||
<link>https://www.bing.com/news/apiclick.aspx?ref=FexRss&aid=&tid=c237eccc50bd4758b106a5e3c94fce09&url=http%3a%2f%2furl.of.article%2f&c=xxxxxxxxx&mkt=en-us</link>
|
||||
<description>Article Content</description>
|
||||
<pubDate>Tue, 02 Jun 2015 13:37:00 GMT</pubDate>
|
||||
<News:Source>Infoworld</News:Source>
|
||||
<News:Image>http://a1.bing4.com/th?id=ON.13371337133713371337133713371337&pid=News</News:Image>
|
||||
<News:ImageSize>w={0}&h={1}&c=7</News:ImageSize>
|
||||
<News:ImageKeepOriginalRatio></News:ImageKeepOriginalRatio>
|
||||
<News:ImageMaxWidth>620</News:ImageMaxWidth>
|
||||
<News:ImageMaxHeight>413</News:ImageMaxHeight>
|
||||
</item>
|
||||
<item>
|
||||
<title>Another Title</title>
|
||||
<link>https://www.bing.com/news/apiclick.aspx?ref=FexRss&aid=&tid=c237eccc50bd4758b106a5e3c94fce09&url=http%3a%2f%2fanother.url.of.article%2f&c=xxxxxxxxx&mkt=en-us</link>
|
||||
<description>Another Article Content</description>
|
||||
<pubDate>Tue, 02 Jun 2015 13:37:00 GMT</pubDate>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>""" # noqa
|
||||
response = mock.Mock(content=html.encode('utf-8'))
|
||||
results = bing_news.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['title'], 'Title')
|
||||
self.assertEqual(results[0]['url'], 'http://url.of.article/')
|
||||
self.assertEqual(results[0]['content'], 'Article Content')
|
||||
self.assertEqual(results[0]['img_src'], 'https://www.bing.com/th?id=ON.13371337133713371337133713371337')
|
||||
self.assertEqual(results[1]['title'], 'Another Title')
|
||||
self.assertEqual(results[1]['url'], 'http://another.url.of.article/')
|
||||
self.assertEqual(results[1]['content'], 'Another Article Content')
|
||||
self.assertNotIn('img_src', results[1])
|
||||
|
||||
html = """<?xml version="1.0" encoding="utf-8" ?>
|
||||
<rss version="2.0" xmlns:News="https://www.bing.com:443/news/search?q=python&setmkt=en-US&first=1&format=RSS">
|
||||
<channel>
|
||||
<title>python - Bing News</title>
|
||||
<link>https://www.bing.com:443/news/search?q=python&setmkt=en-US&first=1&format=RSS</link>
|
||||
<description>Search results</description>
|
||||

|
||||
<copyright>Copyright</copyright>
|
||||
<item>
|
||||
<title>Title</title>
|
||||
<link>http://another.url.of.article/</link>
|
||||
<description>Article Content</description>
|
||||
<pubDate>garbage</pubDate>
|
||||
<News:Source>Infoworld</News:Source>
|
||||
<News:Image>http://another.bing.com/image</News:Image>
|
||||
<News:ImageSize>w={0}&h={1}&c=7</News:ImageSize>
|
||||
<News:ImageKeepOriginalRatio></News:ImageKeepOriginalRatio>
|
||||
<News:ImageMaxWidth>620</News:ImageMaxWidth>
|
||||
<News:ImageMaxHeight>413</News:ImageMaxHeight>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>""" # noqa
|
||||
response = mock.Mock(content=html.encode('utf-8'))
|
||||
results = bing_news.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'Title')
|
||||
self.assertEqual(results[0]['url'], 'http://another.url.of.article/')
|
||||
self.assertEqual(results[0]['content'], 'Article Content')
|
||||
self.assertEqual(results[0]['img_src'], 'http://another.bing.com/image')
|
||||
|
||||
html = """<?xml version="1.0" encoding="utf-8" ?>
|
||||
<rss version="2.0" xmlns:News="https://www.bing.com:443/news/search?q=python&setmkt=en-US&first=1&format=RSS">
|
||||
<channel>
|
||||
<title>python - Bing News</title>
|
||||
<link>https://www.bing.com:443/news/search?q=python&setmkt=en-US&first=1&format=RSS</link>
|
||||
<description>Search results</description>
|
||||

|
||||
</channel>
|
||||
</rss>""" # noqa
|
||||
|
||||
response = mock.Mock(content=html.encode('utf-8'))
|
||||
results = bing_news.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
html = """<?xml version="1.0" encoding="utf-8" ?>gabarge"""
|
||||
response = mock.Mock(content=html.encode('utf-8'))
|
||||
self.assertRaises(lxml.etree.XMLSyntaxError, bing_news.response, response)
|
|
@ -1,72 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import bing_videos
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestBingVideosEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
bing_videos.supported_languages = ['fr-FR', 'en-US']
|
||||
bing_videos.language_aliases = {}
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
dicto['language'] = 'fr-FR'
|
||||
dicto['safesearch'] = 0
|
||||
dicto['time_range'] = ''
|
||||
params = bing_videos.request(query, dicto)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('bing.com' in params['url'])
|
||||
self.assertTrue('SRCHHPGUSR' in params['cookies'])
|
||||
self.assertTrue('OFF' in params['cookies']['SRCHHPGUSR'])
|
||||
self.assertTrue('_EDGE_S' in params['cookies'])
|
||||
self.assertTrue('fr-fr' in params['cookies']['_EDGE_S'])
|
||||
|
||||
dicto['pageno'] = 2
|
||||
dicto['time_range'] = 'day'
|
||||
dicto['safesearch'] = 2
|
||||
params = bing_videos.request(query, dicto)
|
||||
self.assertTrue('first=29' in params['url'])
|
||||
self.assertTrue('1440' in params['url'])
|
||||
self.assertIn('SRCHHPGUSR', params['cookies'])
|
||||
self.assertTrue('STRICT' in params['cookies']['SRCHHPGUSR'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, bing_videos.response, None)
|
||||
self.assertRaises(AttributeError, bing_videos.response, [])
|
||||
self.assertRaises(AttributeError, bing_videos.response, '')
|
||||
self.assertRaises(AttributeError, bing_videos.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(bing_videos.response(response), [])
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(bing_videos.response(response), [])
|
||||
|
||||
html = """
|
||||
<div class="dg_u">
|
||||
<div>
|
||||
<a>
|
||||
<div>
|
||||
<div>
|
||||
<div class="mc_vtvc_meta_block">
|
||||
<div><span>100 views</span><span>1 year ago</span></div><div><span>ExampleTube</span><span>Channel 1<span></div> #noqa
|
||||
</div>
|
||||
</div>
|
||||
<div class="vrhdata" vrhm='{"du":"01:11","murl":"https://www.example.com/watch?v=DEADBEEF","thid":"OVP.BINGTHUMB1","vt":"Title 1"}'></div> # noqa
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = bing_videos.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'Title 1')
|
||||
self.assertEqual(results[0]['url'], 'https://www.example.com/watch?v=DEADBEEF')
|
||||
self.assertEqual(results[0]['content'], '01:11 - 100 views - 1 year ago - ExampleTube - Channel 1')
|
||||
self.assertEqual(results[0]['thumbnail'], 'https://www.bing.com/th?id=OVP.BINGTHUMB1')
|
|
@ -1,112 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import btdigg
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestBtdiggEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 0
|
||||
params = btdigg.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('btdig.com', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, btdigg.response, None)
|
||||
self.assertRaises(AttributeError, btdigg.response, [])
|
||||
self.assertRaises(AttributeError, btdigg.response, '')
|
||||
self.assertRaises(AttributeError, btdigg.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(btdigg.response(response), [])
|
||||
|
||||
html = u"""
|
||||
<div class="one_result" style="display:table-row;background-color:#e8e8e8">
|
||||
<div style="display:table-cell;color:rgb(0, 0, 0)">
|
||||
<div style="display:table">
|
||||
<div style="display:table-row">
|
||||
<div class="torrent_name" style="display:table-cell">
|
||||
<a style="color:rgb(0, 0, 204);text-decoration:underline;font-size:150%"
|
||||
href="http://btdig.com/a72f35b7ee3a10928f02bb799e40ae5db701ed1c/pdf?q=pdf&p=1&order=0"
|
||||
>3.9GBdeLibrosByHuasoFromHell(3de4)</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div style="display:table">
|
||||
<div style="display:table-row">
|
||||
<div style="display:table-cell">
|
||||
<span class="torrent_files" style="color:#666;padding-left:10px">4217</span> files <span
|
||||
class="torrent_size" style="color:#666;padding-left:10px">1 GB</span><span
|
||||
class="torrent_age" style="color:rgb(0, 102, 0);padding-left:10px;margin: 0px 4px"
|
||||
>found 3 years ago</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div style="display:table;width:100%;padding:10px">
|
||||
<div style="display:table-row">
|
||||
<div class="torrent_magnet" style="display:table-cell">
|
||||
<div class="fa fa-magnet" style="color:#cc0000">
|
||||
<a href="magnet:?xt=urn:btih:a72f35b7ee3a10928f02bb799e40ae5db701ed1c&dn=3.9GBdeLibrosBy..."
|
||||
title="Download via magnet-link"> magnet:?xt=urn:btih:a72f35b7ee...</a>
|
||||
</div>
|
||||
</div>
|
||||
<div style="display:table-cell;color:rgb(0, 0, 0);text-align:right">
|
||||
<span style="color:rgb(136, 136, 136);margin: 0px 0px 0px 4px"></span><span
|
||||
style="color:rgb(0, 102, 0);margin: 0px 4px">found 3 years ago</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="torrent_excerpt" style="display:table;padding:10px;white-space:nowrap">
|
||||
<div class="fa fa-folder-open" style="padding-left:0em"> 3.9GBdeLibrosByHuasoFromHell(3de4)</div><br/>
|
||||
<div class="fa fa-folder-open" style="padding-left:1em"> Libros H-Z</div><br/>
|
||||
<div class="fa fa-folder-open" style="padding-left:2em"> H</div><br/><div class="fa fa-file-archive-o"
|
||||
style="padding-left:3em"> H.H. Hollis - El truco de la espada-<b
|
||||
style="color:red; background-color:yellow">pdf</b>.zip</div><span
|
||||
style="color:#666;padding-left:10px">17 KB</span><br/>
|
||||
<div class="fa fa-file-archive-o" style="padding-left:3em"> Hagakure - El Libro del Samurai-<b
|
||||
style="color:red; background-color:yellow">pdf</b>.zip</div><span
|
||||
style="color:#666;padding-left:10px">95 KB</span><br/>
|
||||
<div class="fa fa-folder-open" style="padding-left:3em"> Hamsun, Knut (1859-1952)</div><br/>
|
||||
<div class="fa fa-file-archive-o" style="padding-left:4em"> Hamsun, Knut - Hambre-<b
|
||||
style="color:red; background-color:yellow">pdf</b>.zip</div><span
|
||||
style="color:#666;padding-left:10px">786 KB</span><br/>
|
||||
<div class="fa fa-plus-circle"><a
|
||||
href="http://btdig.com/a72f35b7ee3a10928f02bb799e40ae5db701ed1c/pdf?q=pdf&p=1&order=0"
|
||||
> 4214 hidden files<span style="color:#666;padding-left:10px">1 GB</span></a></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
response = mock.Mock(text=html.encode('utf-8'))
|
||||
results = btdigg.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], '3.9GBdeLibrosByHuasoFromHell(3de4)')
|
||||
self.assertEqual(results[0]['url'],
|
||||
'http://btdig.com/a72f35b7ee3a10928f02bb799e40ae5db701ed1c/pdf?q=pdf&p=1&order=0')
|
||||
self.assertEqual(results[0]['content'],
|
||||
'3.9GBdeLibrosByHuasoFromHell(3de4) | ' +
|
||||
'Libros H-Z | ' +
|
||||
'H H.H. Hollis - El truco de la espada-pdf.zip17 KB | ' +
|
||||
'Hagakure - El Libro del Samurai-pdf.zip95 KB | ' +
|
||||
'Hamsun, Knut (1859-1952) | Hamsun, Knut - Hambre-pdf.zip786 KB | ' +
|
||||
'4214 hidden files1 GB')
|
||||
self.assertEqual(results[0]['filesize'], 1 * 1024 * 1024 * 1024)
|
||||
self.assertEqual(results[0]['files'], 4217)
|
||||
self.assertEqual(results[0]['magnetlink'],
|
||||
'magnet:?xt=urn:btih:a72f35b7ee3a10928f02bb799e40ae5db701ed1c&dn=3.9GBdeLibrosBy...')
|
||||
|
||||
html = """
|
||||
<div style="display:table-row;background-color:#e8e8e8">
|
||||
|
||||
</div>
|
||||
"""
|
||||
response = mock.Mock(text=html.encode('utf-8'))
|
||||
results = btdigg.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
|
@ -1,56 +0,0 @@
|
|||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
import mock
|
||||
from searx.engines import currency_convert
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestCurrencyConvertEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = b'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
params = currency_convert.request(query, dicto)
|
||||
self.assertNotIn('url', params)
|
||||
|
||||
query = b'convert 10 Pound Sterlings to United States Dollars'
|
||||
params = currency_convert.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn('duckduckgo.com', params['url'])
|
||||
self.assertIn('GBP', params['url'])
|
||||
self.assertIn('USD', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
dicto = defaultdict(dict)
|
||||
dicto['amount'] = float(10)
|
||||
dicto['from'] = "GBP"
|
||||
dicto['to'] = "USD"
|
||||
dicto['from_name'] = "pound sterling"
|
||||
dicto['to_name'] = "United States dollar"
|
||||
response = mock.Mock(text='a,b,c,d', search_params=dicto)
|
||||
self.assertEqual(currency_convert.response(response), [])
|
||||
body = """ddg_spice_currency(
|
||||
{
|
||||
"conversion":{
|
||||
"converted-amount": "0.5"
|
||||
},
|
||||
"topConversions":[
|
||||
{
|
||||
},
|
||||
{
|
||||
}
|
||||
]
|
||||
}
|
||||
);
|
||||
"""
|
||||
response = mock.Mock(text=body, search_params=dicto)
|
||||
results = currency_convert.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['answer'], '10.0 GBP = 5.0 USD, 1 GBP (pound sterling)' +
|
||||
' = 0.5 USD (United States dollar)')
|
||||
|
||||
target_url = 'https://duckduckgo.com/js/spice/currency/1/{}/{}'.format(
|
||||
dicto['from'], dicto['to'])
|
||||
self.assertEqual(results[0]['url'], target_url)
|
|
@ -1,112 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import dailymotion
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestDailymotionEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
dailymotion.supported_languages = ['en', 'fr']
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 0
|
||||
dicto['language'] = 'fr-FR'
|
||||
params = dailymotion.request(query, dicto)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('dailymotion.com' in params['url'])
|
||||
self.assertTrue('fr' in params['url'])
|
||||
|
||||
dicto['language'] = 'all'
|
||||
params = dailymotion.request(query, dicto)
|
||||
self.assertTrue('en' in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, dailymotion.response, None)
|
||||
self.assertRaises(AttributeError, dailymotion.response, [])
|
||||
self.assertRaises(AttributeError, dailymotion.response, '')
|
||||
self.assertRaises(AttributeError, dailymotion.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}')
|
||||
self.assertEqual(dailymotion.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": []}')
|
||||
self.assertEqual(dailymotion.response(response), [])
|
||||
|
||||
json = """
|
||||
{
|
||||
"page": 1,
|
||||
"limit": 5,
|
||||
"explicit": false,
|
||||
"total": 289487,
|
||||
"has_more": true,
|
||||
"list": [
|
||||
{
|
||||
"created_time": 1422173451,
|
||||
"title": "Title",
|
||||
"description": "Description",
|
||||
"duration": 81,
|
||||
"url": "http://www.url",
|
||||
"thumbnail_360_url": "http://thumbnail",
|
||||
"id": "x2fit7q"
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = dailymotion.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'Title')
|
||||
self.assertEqual(results[0]['url'], 'http://www.url')
|
||||
self.assertEqual(results[0]['content'], 'Description')
|
||||
self.assertIn('x2fit7q', results[0]['embedded'])
|
||||
|
||||
json = r"""
|
||||
{"toto":[
|
||||
{"id":200,"name":"Artist Name",
|
||||
"link":"http:\/\/www.dailymotion.com\/artist\/1217","type":"artist"}
|
||||
]}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = dailymotion.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
def test_fetch_supported_languages(self):
|
||||
json = r"""
|
||||
{"list":[{"code":"af","name":"Afrikaans","native_name":"Afrikaans",
|
||||
"localized_name":"Afrikaans","display_name":"Afrikaans"},
|
||||
{"code":"ar","name":"Arabic","native_name":"\u0627\u0644\u0639\u0631\u0628\u064a\u0629",
|
||||
"localized_name":"Arabic","display_name":"Arabic"},
|
||||
{"code":"la","name":"Latin","native_name":null,
|
||||
"localized_name":"Latin","display_name":"Latin"}
|
||||
]}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
languages = dailymotion._fetch_supported_languages(response)
|
||||
self.assertEqual(type(languages), dict)
|
||||
self.assertEqual(len(languages), 3)
|
||||
self.assertIn('af', languages)
|
||||
self.assertIn('ar', languages)
|
||||
self.assertIn('la', languages)
|
||||
|
||||
self.assertEqual(type(languages['af']), dict)
|
||||
self.assertEqual(type(languages['ar']), dict)
|
||||
self.assertEqual(type(languages['la']), dict)
|
||||
|
||||
self.assertIn('name', languages['af'])
|
||||
self.assertIn('name', languages['ar'])
|
||||
self.assertNotIn('name', languages['la'])
|
||||
|
||||
self.assertIn('english_name', languages['af'])
|
||||
self.assertIn('english_name', languages['ar'])
|
||||
self.assertIn('english_name', languages['la'])
|
||||
|
||||
self.assertEqual(languages['af']['name'], 'Afrikaans')
|
||||
self.assertEqual(languages['af']['english_name'], 'Afrikaans')
|
||||
self.assertEqual(languages['ar']['name'], u'العربية')
|
||||
self.assertEqual(languages['ar']['english_name'], 'Arabic')
|
||||
self.assertEqual(languages['la']['english_name'], 'Latin')
|
|
@ -1,57 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import deezer
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestDeezerEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 0
|
||||
params = deezer.request(query, dicto)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('deezer.com' in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, deezer.response, None)
|
||||
self.assertRaises(AttributeError, deezer.response, [])
|
||||
self.assertRaises(AttributeError, deezer.response, '')
|
||||
self.assertRaises(AttributeError, deezer.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}')
|
||||
self.assertEqual(deezer.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": []}')
|
||||
self.assertEqual(deezer.response(response), [])
|
||||
|
||||
json = r"""
|
||||
{"data":[
|
||||
{"id":100, "title":"Title of track",
|
||||
"link":"https:\/\/www.deezer.com\/track\/1094042","duration":232,
|
||||
"artist":{"id":200,"name":"Artist Name",
|
||||
"link":"https:\/\/www.deezer.com\/artist\/1217","type":"artist"},
|
||||
"album":{"id":118106,"title":"Album Title","type":"album"},"type":"track"}
|
||||
]}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = deezer.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'Title of track')
|
||||
self.assertEqual(results[0]['url'], 'https://www.deezer.com/track/1094042')
|
||||
self.assertEqual(results[0]['content'], 'Artist Name - Album Title - Title of track')
|
||||
self.assertTrue('100' in results[0]['embedded'])
|
||||
|
||||
json = r"""
|
||||
{"data":[
|
||||
{"id":200,"name":"Artist Name",
|
||||
"link":"https:\/\/www.deezer.com\/artist\/1217","type":"artist"}
|
||||
]}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = deezer.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
|
@ -1,24 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import deviantart
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestDeviantartEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
dicto = defaultdict(dict)
|
||||
query = 'test_query'
|
||||
dicto['pageno'] = 0
|
||||
dicto['time_range'] = ''
|
||||
params = deviantart.request(query, dicto)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('deviantart.com' in params['url'])
|
||||
|
||||
def test_no_url_in_request_year_time_range(self):
|
||||
dicto = defaultdict(dict)
|
||||
query = 'test_query'
|
||||
dicto['time_range'] = 'year'
|
||||
params = deviantart.request(query, dicto)
|
||||
self.assertEqual({}, params['url'])
|
|
@ -1,61 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import digbt
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestDigBTEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 0
|
||||
params = digbt.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('digbt.org', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, digbt.response, None)
|
||||
self.assertRaises(AttributeError, digbt.response, [])
|
||||
self.assertRaises(AttributeError, digbt.response, '')
|
||||
self.assertRaises(AttributeError, digbt.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(digbt.response(response), [])
|
||||
|
||||
html = """
|
||||
<table class="table">
|
||||
<tr><td class="x-item">
|
||||
<div>
|
||||
<a title="The Big Bang Theory" class="title" href="/The-Big-Bang-Theory-d2.html">
|
||||
The Big <span class="highlight">Bang</span> Theory
|
||||
</a>
|
||||
<span class="ctime"><span style="color:red;">4 hours ago</span></span>
|
||||
</div>
|
||||
<div class="files">
|
||||
<ul>
|
||||
<li>The Big Bang Theory 2.9 GB</li>
|
||||
<li>....</li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="tail">
|
||||
Files: 1 Size: 2.9 GB Downloads: 1 Updated: <span style="color:red;">4 hours ago</span>
|
||||
|
||||
<a class="title" href="magnet:?xt=urn:btih:a&dn=The+Big+Bang+Theory">
|
||||
<span class="glyphicon glyphicon-magnet"></span> magnet-link
|
||||
</a>
|
||||
|
||||
</div>
|
||||
</td></tr>
|
||||
</table>
|
||||
"""
|
||||
response = mock.Mock(text=html.encode('utf-8'))
|
||||
results = digbt.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'The Big Bang Theory')
|
||||
self.assertEqual(results[0]['url'], 'https://digbt.org/The-Big-Bang-Theory-d2.html')
|
||||
self.assertEqual(results[0]['content'], 'The Big Bang Theory 2.9 GB ....')
|
||||
self.assertEqual(results[0]['filesize'], 3113851289)
|
||||
self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:a&dn=The+Big+Bang+Theory')
|
|
@ -1,16 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import digg
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestDiggEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
params = digg.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('digg.com', params['url'])
|
|
@ -1,79 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import doku
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestDokuEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
params = doku.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, doku.response, None)
|
||||
self.assertRaises(AttributeError, doku.response, [])
|
||||
self.assertRaises(AttributeError, doku.response, '')
|
||||
self.assertRaises(AttributeError, doku.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(doku.response(response), [])
|
||||
|
||||
html = u"""
|
||||
<div class="search_quickresult">
|
||||
<h3>Pages trouvées :</h3>
|
||||
<ul class="search_quickhits">
|
||||
<li> <a href="/xfconf-query" class="wikilink1" title="xfconf-query">xfconf-query</a></li>
|
||||
</ul>
|
||||
<div class="clearer"></div>
|
||||
</div>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = doku.response(response)
|
||||
expected = [{'content': '', 'title': 'xfconf-query', 'url': 'http://localhost:8090/xfconf-query'}]
|
||||
self.assertEqual(doku.response(response), expected)
|
||||
|
||||
html = u"""
|
||||
<dl class="search_results">
|
||||
<dt><a href="/xvnc?s[]=query" class="wikilink1" title="xvnc">xvnc</a>: 40 Occurrences trouvées</dt>
|
||||
<dd>er = /usr/bin/Xvnc
|
||||
server_args = -inetd -<strong class="search_hit">query</strong> localhost -geometry 640x480 ... er = /usr/bin/Xvnc
|
||||
server_args = -inetd -<strong class="search_hit">query</strong> localhost -geometry 800x600 ... er = /usr/bin/Xvnc
|
||||
server_args = -inetd -<strong class="search_hit">query</strong> localhost -geometry 1024x768 ... er = /usr/bin/Xvnc
|
||||
server_args = -inetd -<strong class="search_hit">query</strong> localhost -geometry 1280x1024 -depth 8 -Sec</dd>
|
||||
<dt><a href="/postfix_mysql_tls_sasl_1404?s[]=query"
|
||||
class="wikilink1"
|
||||
title="postfix_mysql_tls_sasl_1404">postfix_mysql_tls_sasl_1404</a>: 14 Occurrences trouvées</dt>
|
||||
<dd>tdepasse
|
||||
hosts = 127.0.0.1
|
||||
dbname = postfix
|
||||
<strong class="search_hit">query</strong> = SELECT goto FROM alias WHERE address='%s' AND a... tdepasse
|
||||
hosts = 127.0.0.1
|
||||
dbname = postfix
|
||||
<strong class="search_hit">query</strong> = SELECT domain FROM domain WHERE domain='%s'
|
||||
#optional <strong class="search_hit">query</strong> to use when relaying for backup MX
|
||||
#<strong class="search_hit">query</strong> = SELECT domain FROM domain WHERE domain='%s' and backupmx =</dd>
|
||||
<dt><a href="/bind9?s[]=query" class="wikilink1" title="bind9">bind9</a>: 12 Occurrences trouvées</dt>
|
||||
<dd> printcmd
|
||||
;; Got answer:
|
||||
;; ->>HEADER<<- opcode: <strong class="search_hit">QUERY</strong>, status: NOERROR, id: 13427
|
||||
;; flags: qr aa rd ra; <strong class="search_hit">QUERY</strong>: 1, ANSWER: 1, AUTHORITY: 1, ADDITIONAL: 1
|
||||
|
||||
[...]
|
||||
|
||||
;; <strong class="search_hit">Query</strong> time: 1 msec
|
||||
;; SERVER: 127.0.0.1#53(127.0.0.1)
|
||||
;... par la requête (<strong class="search_hit">Query</strong> time) , entre la première et la deuxième requête.</dd>
|
||||
</dl>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = doku.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 3)
|
||||
self.assertEqual(results[0]['title'], 'xvnc')
|
||||
# FIXME self.assertEqual(results[0]['url'], u'http://this.should.be.the.link/ű')
|
||||
# FIXME self.assertEqual(results[0]['content'], 'This should be the content.')
|
|
@ -1,106 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import load_engine, duckduckgo
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestDuckduckgoEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
duckduckgo = load_engine({'engine': 'duckduckgo', 'name': 'duckduckgo'})
|
||||
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
dicto['time_range'] = ''
|
||||
|
||||
dicto['language'] = 'de-CH'
|
||||
params = duckduckgo.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('duckduckgo.com', params['url'])
|
||||
self.assertIn('ch-de', params['url'])
|
||||
self.assertIn('s=0', params['url'])
|
||||
|
||||
# when ddg uses non standard codes
|
||||
dicto['language'] = 'zh-HK'
|
||||
params = duckduckgo.request(query, dicto)
|
||||
self.assertIn('hk-tzh', params['url'])
|
||||
|
||||
dicto['language'] = 'en-GB'
|
||||
params = duckduckgo.request(query, dicto)
|
||||
self.assertIn('uk-en', params['url'])
|
||||
|
||||
# no country given
|
||||
dicto['language'] = 'en'
|
||||
params = duckduckgo.request(query, dicto)
|
||||
self.assertIn('us-en', params['url'])
|
||||
|
||||
def test_no_url_in_request_year_time_range(self):
|
||||
dicto = defaultdict(dict)
|
||||
query = 'test_query'
|
||||
dicto['time_range'] = 'year'
|
||||
params = duckduckgo.request(query, dicto)
|
||||
self.assertEqual({}, params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, duckduckgo.response, None)
|
||||
self.assertRaises(AttributeError, duckduckgo.response, [])
|
||||
self.assertRaises(AttributeError, duckduckgo.response, '')
|
||||
self.assertRaises(AttributeError, duckduckgo.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(duckduckgo.response(response), [])
|
||||
|
||||
html = u"""
|
||||
<div class="result results_links results_links_deep web-result result--no-result">
|
||||
<div class="links_main links_deep result__body">
|
||||
<h2 class="result__title">
|
||||
</h2>
|
||||
<div class="no-results">No results</div>
|
||||
<div class="result__extras">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = duckduckgo.response(response)
|
||||
self.assertEqual(duckduckgo.response(response), [])
|
||||
|
||||
html = u"""
|
||||
<div class="result results_links results_links_deep web-result ">
|
||||
<div class="links_main links_deep result__body">
|
||||
<h2 class="result__title">
|
||||
<a rel="nofollow" class="result__a" href="http://this.should.be.the.link/ű">
|
||||
This <b>is</b> <b>the</b> title
|
||||
</a>
|
||||
</h2>
|
||||
<a class="result__snippet" href="http://this.should.be.the.link/ű">
|
||||
<b>This</b> should be the content.
|
||||
</a>
|
||||
<div class="result__extras">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = duckduckgo.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'This is the title')
|
||||
self.assertEqual(results[0]['url'], u'http://this.should.be.the.link/ű')
|
||||
self.assertEqual(results[0]['content'], 'This should be the content.')
|
||||
|
||||
def test_fetch_supported_languages(self):
|
||||
js = """some code...regions:{
|
||||
"wt-wt":"All Results","ar-es":"Argentina","au-en":"Australia","at-de":"Austria","be-fr":"Belgium (fr)"
|
||||
}some more code..."""
|
||||
response = mock.Mock(text=js)
|
||||
languages = list(duckduckgo._fetch_supported_languages(response))
|
||||
self.assertEqual(len(languages), 5)
|
||||
self.assertIn('wt-WT', languages)
|
||||
self.assertIn('es-AR', languages)
|
||||
self.assertIn('en-AU', languages)
|
||||
self.assertIn('de-AT', languages)
|
||||
self.assertIn('fr-BE', languages)
|
|
@ -1,255 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import duckduckgo_definitions
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestDDGDefinitionsEngine(SearxTestCase):
|
||||
|
||||
def test_result_to_text(self):
|
||||
url = ''
|
||||
text = 'Text'
|
||||
html_result = 'Html'
|
||||
result = duckduckgo_definitions.result_to_text(url, text, html_result)
|
||||
self.assertEqual(result, text)
|
||||
|
||||
html_result = '<a href="url">Text in link</a>'
|
||||
result = duckduckgo_definitions.result_to_text(url, text, html_result)
|
||||
self.assertEqual(result, 'Text in link')
|
||||
|
||||
def test_request(self):
|
||||
duckduckgo_definitions.supported_languages = ['en-US', 'es-ES']
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
dicto['language'] = 'es'
|
||||
params = duckduckgo_definitions.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('duckduckgo.com', params['url'])
|
||||
self.assertIn('headers', params)
|
||||
self.assertIn('Accept-Language', params['headers'])
|
||||
self.assertIn('es', params['headers']['Accept-Language'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, duckduckgo_definitions.response, None)
|
||||
self.assertRaises(AttributeError, duckduckgo_definitions.response, [])
|
||||
self.assertRaises(AttributeError, duckduckgo_definitions.response, '')
|
||||
self.assertRaises(AttributeError, duckduckgo_definitions.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}')
|
||||
self.assertEqual(duckduckgo_definitions.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": []}')
|
||||
self.assertEqual(duckduckgo_definitions.response(response), [])
|
||||
|
||||
json = """
|
||||
{
|
||||
"DefinitionSource": "definition source",
|
||||
"Heading": "heading",
|
||||
"ImageWidth": 0,
|
||||
"RelatedTopics": [
|
||||
{
|
||||
"Result": "Top-level domains",
|
||||
"Icon": {
|
||||
"URL": "",
|
||||
"Height": "",
|
||||
"Width": ""
|
||||
},
|
||||
"FirstURL": "https://first.url",
|
||||
"Text": "text"
|
||||
},
|
||||
{
|
||||
"Topics": [
|
||||
{
|
||||
"Result": "result topic",
|
||||
"Icon": {
|
||||
"URL": "",
|
||||
"Height": "",
|
||||
"Width": ""
|
||||
},
|
||||
"FirstURL": "https://duckduckgo.com/?q=2%2F2",
|
||||
"Text": "result topic text"
|
||||
}
|
||||
],
|
||||
"Name": "name"
|
||||
}
|
||||
],
|
||||
"Entity": "Entity",
|
||||
"Type": "A",
|
||||
"Redirect": "",
|
||||
"DefinitionURL": "http://definition.url",
|
||||
"AbstractURL": "https://abstract.url",
|
||||
"Definition": "this is the definition",
|
||||
"AbstractSource": "abstract source",
|
||||
"Infobox": {
|
||||
"content": [
|
||||
{
|
||||
"data_type": "string",
|
||||
"value": "1999",
|
||||
"label": "Introduced",
|
||||
"wiki_order": 0
|
||||
}
|
||||
],
|
||||
"meta": [
|
||||
{
|
||||
"data_type": "string",
|
||||
"value": ".test",
|
||||
"label": "article_title"
|
||||
}
|
||||
]
|
||||
},
|
||||
"Image": "image.png",
|
||||
"ImageIsLogo": 0,
|
||||
"Abstract": "abstract",
|
||||
"AbstractText": "abstract text",
|
||||
"AnswerType": "",
|
||||
"ImageHeight": 0,
|
||||
"Results": [{
|
||||
"Result" : "result title",
|
||||
"Icon" : {
|
||||
"URL" : "result url",
|
||||
"Height" : 16,
|
||||
"Width" : 16
|
||||
},
|
||||
"FirstURL" : "result first url",
|
||||
"Text" : "result text"
|
||||
}
|
||||
],
|
||||
"Answer": "answer"
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = duckduckgo_definitions.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 4)
|
||||
self.assertEqual(results[0]['answer'], 'answer')
|
||||
self.assertEqual(results[1]['title'], 'heading')
|
||||
self.assertEqual(results[1]['url'], 'result first url')
|
||||
self.assertEqual(results[2]['suggestion'], 'text')
|
||||
self.assertEqual(results[3]['infobox'], 'heading')
|
||||
self.assertEqual(results[3]['id'], 'https://definition.url')
|
||||
self.assertEqual(results[3]['entity'], 'Entity')
|
||||
self.assertIn('abstract', results[3]['content'])
|
||||
self.assertIn('this is the definition', results[3]['content'])
|
||||
self.assertEqual(results[3]['img_src'], 'image.png')
|
||||
self.assertIn('Introduced', results[3]['attributes'][0]['label'])
|
||||
self.assertIn('1999', results[3]['attributes'][0]['value'])
|
||||
self.assertIn({'url': 'https://abstract.url', 'title': 'abstract source'}, results[3]['urls'])
|
||||
self.assertIn({'url': 'http://definition.url', 'title': 'definition source'}, results[3]['urls'])
|
||||
self.assertIn({'name': 'name', 'suggestions': ['result topic text']}, results[3]['relatedTopics'])
|
||||
|
||||
json = """
|
||||
{
|
||||
"DefinitionSource": "definition source",
|
||||
"Heading": "heading",
|
||||
"ImageWidth": 0,
|
||||
"RelatedTopics": [],
|
||||
"Entity": "Entity",
|
||||
"Type": "A",
|
||||
"Redirect": "",
|
||||
"DefinitionURL": "",
|
||||
"AbstractURL": "https://abstract.url",
|
||||
"Definition": "",
|
||||
"AbstractSource": "abstract source",
|
||||
"Image": "",
|
||||
"ImageIsLogo": 0,
|
||||
"Abstract": "",
|
||||
"AbstractText": "abstract text",
|
||||
"AnswerType": "",
|
||||
"ImageHeight": 0,
|
||||
"Results": [],
|
||||
"Answer": ""
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = duckduckgo_definitions.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['url'], 'https://abstract.url')
|
||||
self.assertEqual(results[0]['title'], 'heading')
|
||||
self.assertEqual(results[0]['content'], '')
|
||||
|
||||
json = """
|
||||
{
|
||||
"DefinitionSource": "definition source",
|
||||
"Heading": "heading",
|
||||
"ImageWidth": 0,
|
||||
"RelatedTopics": [
|
||||
{
|
||||
"Result": "Top-level domains",
|
||||
"Icon": {
|
||||
"URL": "",
|
||||
"Height": "",
|
||||
"Width": ""
|
||||
},
|
||||
"FirstURL": "https://first.url",
|
||||
"Text": "heading"
|
||||
},
|
||||
{
|
||||
"Name": "name"
|
||||
},
|
||||
{
|
||||
"Topics": [
|
||||
{
|
||||
"Result": "result topic",
|
||||
"Icon": {
|
||||
"URL": "",
|
||||
"Height": "",
|
||||
"Width": ""
|
||||
},
|
||||
"FirstURL": "https://duckduckgo.com/?q=2%2F2",
|
||||
"Text": "heading"
|
||||
}
|
||||
],
|
||||
"Name": "name"
|
||||
}
|
||||
],
|
||||
"Entity": "Entity",
|
||||
"Type": "A",
|
||||
"Redirect": "",
|
||||
"DefinitionURL": "http://definition.url",
|
||||
"AbstractURL": "https://abstract.url",
|
||||
"Definition": "this is the definition",
|
||||
"AbstractSource": "abstract source",
|
||||
"Infobox": {
|
||||
"meta": [
|
||||
{
|
||||
"data_type": "string",
|
||||
"value": ".test",
|
||||
"label": "article_title"
|
||||
}
|
||||
]
|
||||
},
|
||||
"Image": "image.png",
|
||||
"ImageIsLogo": 0,
|
||||
"Abstract": "abstract",
|
||||
"AbstractText": "abstract text",
|
||||
"AnswerType": "",
|
||||
"ImageHeight": 0,
|
||||
"Results": [{
|
||||
"Result" : "result title",
|
||||
"Icon" : {
|
||||
"URL" : "result url",
|
||||
"Height" : 16,
|
||||
"Width" : 16
|
||||
},
|
||||
"Text" : "result text"
|
||||
}
|
||||
],
|
||||
"Answer": ""
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = duckduckgo_definitions.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['infobox'], 'heading')
|
||||
self.assertEqual(results[0]['id'], 'https://definition.url')
|
||||
self.assertEqual(results[0]['entity'], 'Entity')
|
||||
self.assertIn('abstract', results[0]['content'])
|
||||
self.assertIn('this is the definition', results[0]['content'])
|
||||
self.assertEqual(results[0]['img_src'], 'image.png')
|
||||
self.assertIn({'url': 'https://abstract.url', 'title': 'abstract source'}, results[0]['urls'])
|
||||
self.assertIn({'url': 'http://definition.url', 'title': 'definition source'}, results[0]['urls'])
|
||||
self.assertIn({'name': 'name', 'suggestions': []}, results[0]['relatedTopics'])
|
|
@ -1,75 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import duckduckgo_images
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestDuckduckgoImagesEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
duckduckgo_images.supported_languages = ['de-CH', 'en-US']
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['is_test'] = True
|
||||
dicto['pageno'] = 1
|
||||
dicto['safesearch'] = 0
|
||||
dicto['language'] = 'all'
|
||||
params = duckduckgo_images.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('duckduckgo.com', params['url'])
|
||||
self.assertIn('s=0', params['url'])
|
||||
self.assertIn('p=-1', params['url'])
|
||||
self.assertIn('vqd=12345', params['url'])
|
||||
|
||||
# test paging, safe search and language
|
||||
dicto['pageno'] = 2
|
||||
dicto['safesearch'] = 2
|
||||
dicto['language'] = 'de'
|
||||
params = duckduckgo_images.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('s=50', params['url'])
|
||||
self.assertIn('p=1', params['url'])
|
||||
self.assertIn('ch-de', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, duckduckgo_images.response, None)
|
||||
self.assertRaises(AttributeError, duckduckgo_images.response, [])
|
||||
self.assertRaises(AttributeError, duckduckgo_images.response, '')
|
||||
self.assertRaises(AttributeError, duckduckgo_images.response, '[]')
|
||||
|
||||
response = mock.Mock(text='If this error persists, please let us know: ops@duckduckgo.com')
|
||||
self.assertRaises(Exception, duckduckgo_images.response, response)
|
||||
|
||||
json = u"""
|
||||
{
|
||||
"query": "test_query",
|
||||
"results": [
|
||||
{
|
||||
"title": "Result 1",
|
||||
"url": "https://site1.url",
|
||||
"thumbnail": "https://thumb1.nail",
|
||||
"image": "https://image1"
|
||||
},
|
||||
{
|
||||
"title": "Result 2",
|
||||
"url": "https://site2.url",
|
||||
"thumbnail": "https://thumb2.nail",
|
||||
"image": "https://image2"
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = duckduckgo_images.response(response)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['title'], 'Result 1')
|
||||
self.assertEqual(results[0]['url'], 'https://site1.url')
|
||||
self.assertEqual(results[0]['thumbnail_src'], 'https://thumb1.nail')
|
||||
self.assertEqual(results[0]['img_src'], 'https://image1')
|
||||
self.assertEqual(results[1]['title'], 'Result 2')
|
||||
self.assertEqual(results[1]['url'], 'https://site2.url')
|
||||
self.assertEqual(results[1]['thumbnail_src'], 'https://thumb2.nail')
|
||||
self.assertEqual(results[1]['img_src'], 'https://image2')
|
|
@ -1,47 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import duden
|
||||
from searx.testing import SearxTestCase
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class TestDudenEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'Haus'
|
||||
dic = defaultdict(dict)
|
||||
data = [
|
||||
[1, 'https://www.duden.de/suchen/dudenonline/Haus'],
|
||||
[2, 'https://www.duden.de/suchen/dudenonline/Haus?search_api_fulltext=&page=1']
|
||||
]
|
||||
for page_no, exp_res in data:
|
||||
dic['pageno'] = page_no
|
||||
params = duden.request(query, dic)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('duden.de' in params['url'])
|
||||
self.assertEqual(params['url'], exp_res)
|
||||
|
||||
def test_response(self):
|
||||
resp = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(duden.response(resp), [])
|
||||
|
||||
html = """
|
||||
<section class="vignette">
|
||||
<h2"> <a href="/rechtschreibung/Haus">
|
||||
<strong>This is the title also here</strong>
|
||||
</a> </h2>
|
||||
<p>This is the content</p>
|
||||
</section>
|
||||
"""
|
||||
resp = mock.Mock(text=html)
|
||||
results = duden.response(resp)
|
||||
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(type(results), list)
|
||||
|
||||
# testing result (dictionary entry)
|
||||
r = results[0]
|
||||
self.assertEqual(r['url'], 'https://www.duden.de/rechtschreibung/Haus')
|
||||
self.assertEqual(r['title'], 'This is the title also here')
|
||||
self.assertEqual(r['content'], 'This is the content')
|
|
@ -1,26 +0,0 @@
|
|||
from searx.engines import dummy
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestDummyEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
test_params = [
|
||||
[1, 2, 3],
|
||||
['a'],
|
||||
[],
|
||||
1
|
||||
]
|
||||
for params in test_params:
|
||||
self.assertEqual(dummy.request(None, params), params)
|
||||
|
||||
def test_response(self):
|
||||
responses = [
|
||||
None,
|
||||
[],
|
||||
True,
|
||||
dict(),
|
||||
tuple()
|
||||
]
|
||||
for response in responses:
|
||||
self.assertEqual(dummy.response(response), [])
|
|
@ -1,113 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import faroo
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestFarooEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
dicto['language'] = 'fr-FR'
|
||||
dicto['category'] = 'general'
|
||||
params = faroo.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('faroo.com', params['url'])
|
||||
self.assertIn('en', params['url'])
|
||||
self.assertIn('web', params['url'])
|
||||
|
||||
dicto['language'] = 'all'
|
||||
params = faroo.request(query, dicto)
|
||||
self.assertIn('en', params['url'])
|
||||
|
||||
dicto['language'] = 'de-DE'
|
||||
params = faroo.request(query, dicto)
|
||||
self.assertIn('de', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, faroo.response, None)
|
||||
self.assertRaises(AttributeError, faroo.response, [])
|
||||
self.assertRaises(AttributeError, faroo.response, '')
|
||||
self.assertRaises(AttributeError, faroo.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}')
|
||||
self.assertEqual(faroo.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": []}')
|
||||
self.assertEqual(faroo.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": []}', status_code=429)
|
||||
self.assertRaises(Exception, faroo.response, response)
|
||||
|
||||
json = """
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"title": "This is the title",
|
||||
"kwic": "This is the content",
|
||||
"content": "",
|
||||
"url": "http://this.is.the.url/",
|
||||
"iurl": "",
|
||||
"domain": "css3test.com",
|
||||
"author": "Jim Dalrymple",
|
||||
"news": true,
|
||||
"votes": "10",
|
||||
"date": 1360622563000,
|
||||
"related": []
|
||||
},
|
||||
{
|
||||
"title": "This is the title2",
|
||||
"kwic": "This is the content2",
|
||||
"content": "",
|
||||
"url": "http://this.is.the.url2/",
|
||||
"iurl": "",
|
||||
"domain": "css3test.com",
|
||||
"author": "Jim Dalrymple",
|
||||
"news": false,
|
||||
"votes": "10",
|
||||
"related": []
|
||||
},
|
||||
{
|
||||
"title": "This is the title3",
|
||||
"kwic": "This is the content3",
|
||||
"content": "",
|
||||
"url": "http://this.is.the.url3/",
|
||||
"iurl": "http://upload.wikimedia.org/optimized.jpg",
|
||||
"domain": "css3test.com",
|
||||
"author": "Jim Dalrymple",
|
||||
"news": false,
|
||||
"votes": "10",
|
||||
"related": []
|
||||
}
|
||||
],
|
||||
"query": "test",
|
||||
"suggestions": [],
|
||||
"count": 100,
|
||||
"start": 1,
|
||||
"length": 10,
|
||||
"time": "15"
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = faroo.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 3)
|
||||
self.assertEqual(results[0]['title'], 'This is the title')
|
||||
self.assertEqual(results[0]['url'], 'http://this.is.the.url/')
|
||||
self.assertEqual(results[0]['content'], 'This is the content')
|
||||
self.assertEqual(results[1]['title'], 'This is the title2')
|
||||
self.assertEqual(results[1]['url'], 'http://this.is.the.url2/')
|
||||
self.assertEqual(results[1]['content'], 'This is the content2')
|
||||
self.assertEqual(results[2]['thumbnail'], 'http://upload.wikimedia.org/optimized.jpg')
|
||||
|
||||
json = """
|
||||
{}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = faroo.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
|
@ -1,60 +0,0 @@
|
|||
import mock
|
||||
from collections import defaultdict
|
||||
from searx.engines import fdroid
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestFdroidEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dic = defaultdict(dict)
|
||||
dic['pageno'] = 1
|
||||
params = fdroid.request(query, dic)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('search.f-droid.org' in params['url'])
|
||||
|
||||
def test_response_empty(self):
|
||||
resp = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(fdroid.response(resp), [])
|
||||
|
||||
def test_response_oneresult(self):
|
||||
html = """
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>test</title>
|
||||
</head>
|
||||
<body>
|
||||
<div class="site-wrapper">
|
||||
<div class="main-content">
|
||||
<a class="package-header" href="https://example.com/app.url">
|
||||
<img class="package-icon" src="https://example.com/appexample.logo.png" />
|
||||
|
||||
<div class="package-info">
|
||||
<h4 class="package-name">
|
||||
App Example 1
|
||||
</h4>
|
||||
|
||||
<div class="package-desc">
|
||||
<span class="package-summary">Description App Example 1</span>
|
||||
<span class="package-license">GPL-3.0-only</span>
|
||||
</div>
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
resp = mock.Mock(text=html)
|
||||
results = fdroid.response(resp)
|
||||
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['url'], 'https://example.com/app.url')
|
||||
self.assertEqual(results[0]['title'], 'App Example 1')
|
||||
self.assertEqual(results[0]['content'], 'Description App Example 1 - GPL-3.0-only')
|
||||
self.assertEqual(results[0]['img_src'], 'https://example.com/appexample.logo.png')
|
|
@ -1,142 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import flickr
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestFlickrEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 0
|
||||
params = flickr.request(query, dicto)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('flickr.com' in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, flickr.response, None)
|
||||
self.assertRaises(AttributeError, flickr.response, [])
|
||||
self.assertRaises(AttributeError, flickr.response, '')
|
||||
self.assertRaises(AttributeError, flickr.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}')
|
||||
self.assertEqual(flickr.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": []}')
|
||||
self.assertEqual(flickr.response(response), [])
|
||||
|
||||
json = r"""
|
||||
{ "photos": { "page": 1, "pages": "41001", "perpage": 100, "total": "4100032",
|
||||
"photo": [
|
||||
{ "id": "15751017054", "owner": "66847915@N08",
|
||||
"secret": "69c22afc40", "server": "7285", "farm": 8,
|
||||
"title": "Photo title", "ispublic": 1,
|
||||
"isfriend": 0, "isfamily": 0,
|
||||
"description": { "_content": "Description" },
|
||||
"ownername": "Owner",
|
||||
"url_o": "https:\/\/farm8.staticflickr.com\/7285\/15751017054_9178e0f963_o.jpg",
|
||||
"height_o": "2100", "width_o": "2653",
|
||||
"url_n": "https:\/\/farm8.staticflickr.com\/7285\/15751017054_69c22afc40_n.jpg",
|
||||
"height_n": "253", "width_n": "320",
|
||||
"url_z": "https:\/\/farm8.staticflickr.com\/7285\/15751017054_69c22afc40_z.jpg",
|
||||
"height_z": "507", "width_z": "640" }
|
||||
] }, "stat": "ok" }
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = flickr.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'Photo title')
|
||||
self.assertEqual(results[0]['url'], 'https://www.flickr.com/photos/66847915@N08/15751017054')
|
||||
self.assertTrue('o.jpg' in results[0]['img_src'])
|
||||
self.assertTrue('n.jpg' in results[0]['thumbnail_src'])
|
||||
self.assertTrue('Owner' in results[0]['author'])
|
||||
self.assertTrue('Description' in results[0]['content'])
|
||||
|
||||
json = r"""
|
||||
{ "photos": { "page": 1, "pages": "41001", "perpage": 100, "total": "4100032",
|
||||
"photo": [
|
||||
{ "id": "15751017054", "owner": "66847915@N08",
|
||||
"secret": "69c22afc40", "server": "7285", "farm": 8,
|
||||
"title": "Photo title", "ispublic": 1,
|
||||
"isfriend": 0, "isfamily": 0,
|
||||
"description": { "_content": "Description" },
|
||||
"ownername": "Owner",
|
||||
"url_z": "https:\/\/farm8.staticflickr.com\/7285\/15751017054_69c22afc40_z.jpg",
|
||||
"height_z": "507", "width_z": "640" }
|
||||
] }, "stat": "ok" }
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = flickr.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'Photo title')
|
||||
self.assertEqual(results[0]['url'], 'https://www.flickr.com/photos/66847915@N08/15751017054')
|
||||
self.assertTrue('z.jpg' in results[0]['img_src'])
|
||||
self.assertTrue('z.jpg' in results[0]['thumbnail_src'])
|
||||
self.assertTrue('Owner' in results[0]['author'])
|
||||
self.assertTrue('Description' in results[0]['content'])
|
||||
|
||||
json = r"""
|
||||
{ "photos": { "page": 1, "pages": "41001", "perpage": 100, "total": "4100032",
|
||||
"photo": [
|
||||
{ "id": "15751017054", "owner": "66847915@N08",
|
||||
"secret": "69c22afc40", "server": "7285", "farm": 8,
|
||||
"title": "Photo title", "ispublic": 1,
|
||||
"isfriend": 0, "isfamily": 0,
|
||||
"description": { "_content": "Description" },
|
||||
"ownername": "Owner",
|
||||
"url_o": "https:\/\/farm8.staticflickr.com\/7285\/15751017054_9178e0f963_o.jpg",
|
||||
"height_o": "2100", "width_o": "2653" }
|
||||
] }, "stat": "ok" }
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = flickr.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'Photo title')
|
||||
self.assertEqual(results[0]['url'], 'https://www.flickr.com/photos/66847915@N08/15751017054')
|
||||
self.assertTrue('o.jpg' in results[0]['img_src'])
|
||||
self.assertTrue('o.jpg' in results[0]['thumbnail_src'])
|
||||
self.assertTrue('Owner' in results[0]['author'])
|
||||
self.assertTrue('Description' in results[0]['content'])
|
||||
|
||||
json = r"""
|
||||
{ "photos": { "page": 1, "pages": "41001", "perpage": 100, "total": "4100032",
|
||||
"photo": [
|
||||
{ "id": "15751017054", "owner": "66847915@N08",
|
||||
"secret": "69c22afc40", "server": "7285", "farm": 8,
|
||||
"title": "Photo title", "ispublic": 1,
|
||||
"isfriend": 0, "isfamily": 0,
|
||||
"description": { "_content": "Description" },
|
||||
"ownername": "Owner",
|
||||
"url_n": "https:\/\/farm8.staticflickr.com\/7285\/15751017054_69c22afc40_n.jpg",
|
||||
"height_n": "253", "width_n": "320" }
|
||||
] }, "stat": "ok" }
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = flickr.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
json = """
|
||||
{ "photos": { "page": 1, "pages": "41001", "perpage": 100, "total": "4100032",
|
||||
"toto": [] }, "stat": "ok" }
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = flickr.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
json = r"""
|
||||
{"toto":[
|
||||
{"id":200,"name":"Artist Name",
|
||||
"link":"http:\/\/www.flickr.com\/artist\/1217","type":"artist"}
|
||||
]}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = flickr.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
|
@ -1,357 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import flickr_noapi
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestFlickrNoapiEngine(SearxTestCase):
|
||||
|
||||
def test_build_flickr_url(self):
|
||||
url = flickr_noapi.build_flickr_url("uid", "pid")
|
||||
self.assertIn("uid", url)
|
||||
self.assertIn("pid", url)
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
dicto['time_range'] = ''
|
||||
params = flickr_noapi.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('flickr.com', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, flickr_noapi.response, None)
|
||||
self.assertRaises(AttributeError, flickr_noapi.response, [])
|
||||
self.assertRaises(AttributeError, flickr_noapi.response, '')
|
||||
self.assertRaises(AttributeError, flickr_noapi.response, '[]')
|
||||
|
||||
response = mock.Mock(text='"modelExport:{"legend":[],"main":{"search-photos-lite-models":[{"photos":{}}]}}')
|
||||
self.assertEqual(flickr_noapi.response(response), [])
|
||||
|
||||
response = \
|
||||
mock.Mock(text='"modelExport:{"legend":[],"main":{"search-photos-lite-models":[{"photos":{"_data":[]}}]}}')
|
||||
self.assertEqual(flickr_noapi.response(response), [])
|
||||
|
||||
# everthing is ok test
|
||||
json = """
|
||||
modelExport: {
|
||||
"legend": [
|
||||
[
|
||||
"search-photos-lite-models",
|
||||
"0",
|
||||
"photos",
|
||||
"_data",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"main": {
|
||||
"search-photos-lite-models": [
|
||||
{
|
||||
"photos": {
|
||||
"_data": [
|
||||
{
|
||||
"_flickrModelRegistry": "photo-lite-models",
|
||||
"title": "This%20is%20the%20title",
|
||||
"username": "Owner",
|
||||
"pathAlias": "klink692",
|
||||
"realname": "Owner",
|
||||
"license": 0,
|
||||
"ownerNsid": "59729010@N00",
|
||||
"canComment": false,
|
||||
"commentCount": 14,
|
||||
"faveCount": 21,
|
||||
"id": "14001294434",
|
||||
"sizes": {
|
||||
"c": {
|
||||
"displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_c.jpg",
|
||||
"width": 541,
|
||||
"height": 800,
|
||||
"url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_c.jpg",
|
||||
"key": "c"
|
||||
},
|
||||
"h": {
|
||||
"displayUrl": "//farm8.staticflickr.com/7246/14001294434_761d32237a_h.jpg",
|
||||
"width": 1081,
|
||||
"height": 1600,
|
||||
"url": "//c4.staticflickr.com/8/7246/14001294434_761d32237a_h.jpg",
|
||||
"key": "h"
|
||||
},
|
||||
"k": {
|
||||
"displayUrl": "//farm8.staticflickr.com/7246/14001294434_f145a2c11a_k.jpg",
|
||||
"width": 1383,
|
||||
"height": 2048,
|
||||
"url": "//c4.staticflickr.com/8/7246/14001294434_f145a2c11a_k.jpg",
|
||||
"key": "k"
|
||||
},
|
||||
"l": {
|
||||
"displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_b.jpg",
|
||||
"width": 692,
|
||||
"height": 1024,
|
||||
"url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_b.jpg",
|
||||
"key": "l"
|
||||
},
|
||||
"m": {
|
||||
"displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777.jpg",
|
||||
"width": 338,
|
||||
"height": 500,
|
||||
"url": "//c4.staticflickr.com/8/7246/14001294434_410f653777.jpg",
|
||||
"key": "m"
|
||||
},
|
||||
"n": {
|
||||
"displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_n.jpg",
|
||||
"width": 216,
|
||||
"height": 320,
|
||||
"url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_n.jpg",
|
||||
"key": "n"
|
||||
},
|
||||
"q": {
|
||||
"displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_q.jpg",
|
||||
"width": 150,
|
||||
"height": 150,
|
||||
"url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_q.jpg",
|
||||
"key": "q"
|
||||
},
|
||||
"s": {
|
||||
"displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_m.jpg",
|
||||
"width": 162,
|
||||
"height": 240,
|
||||
"url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_m.jpg",
|
||||
"key": "s"
|
||||
},
|
||||
"sq": {
|
||||
"displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_s.jpg",
|
||||
"width": 75,
|
||||
"height": 75,
|
||||
"url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_s.jpg",
|
||||
"key": "sq"
|
||||
},
|
||||
"t": {
|
||||
"displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_t.jpg",
|
||||
"width": 68,
|
||||
"height": 100,
|
||||
"url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_t.jpg",
|
||||
"key": "t"
|
||||
},
|
||||
"z": {
|
||||
"displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_z.jpg",
|
||||
"width": 433,
|
||||
"height": 640,
|
||||
"url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_z.jpg",
|
||||
"key": "z"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"""
|
||||
# Flickr serves search results in a json block named 'modelExport' buried inside a script tag,
|
||||
# this json is served as a single line terminating with a comma.
|
||||
json = ''.join(json.split()) + ',\n'
|
||||
response = mock.Mock(text=json)
|
||||
results = flickr_noapi.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'This is the title')
|
||||
self.assertEqual(results[0]['url'], 'https://www.flickr.com/photos/59729010@N00/14001294434')
|
||||
self.assertIn('k.jpg', results[0]['img_src'])
|
||||
self.assertIn('n.jpg', results[0]['thumbnail_src'])
|
||||
self.assertIn('Owner', results[0]['author'])
|
||||
|
||||
# no n size, only the z size
|
||||
json = """
|
||||
modelExport: {
|
||||
"legend": [
|
||||
[
|
||||
"search-photos-lite-models",
|
||||
"0",
|
||||
"photos",
|
||||
"_data",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"main": {
|
||||
"search-photos-lite-models": [
|
||||
{
|
||||
"photos": {
|
||||
"_data": [
|
||||
{
|
||||
"_flickrModelRegistry": "photo-lite-models",
|
||||
"title": "This%20is%20the%20title",
|
||||
"username": "Owner",
|
||||
"pathAlias": "klink692",
|
||||
"realname": "Owner",
|
||||
"license": 0,
|
||||
"ownerNsid": "59729010@N00",
|
||||
"canComment": false,
|
||||
"commentCount": 14,
|
||||
"faveCount": 21,
|
||||
"id": "14001294434",
|
||||
"sizes": {
|
||||
"z": {
|
||||
"displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_z.jpg",
|
||||
"width": 433,
|
||||
"height": 640,
|
||||
"url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_z.jpg",
|
||||
"key": "z"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"""
|
||||
json = ''.join(json.split()) + ',\n'
|
||||
response = mock.Mock(text=json)
|
||||
results = flickr_noapi.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'This is the title')
|
||||
self.assertEqual(results[0]['url'], 'https://www.flickr.com/photos/59729010@N00/14001294434')
|
||||
self.assertIn('z.jpg', results[0]['img_src'])
|
||||
self.assertIn('z.jpg', results[0]['thumbnail_src'])
|
||||
self.assertIn('Owner', results[0]['author'])
|
||||
|
||||
# no z or n size
|
||||
json = """
|
||||
modelExport: {
|
||||
"legend": [
|
||||
[
|
||||
"search-photos-lite-models",
|
||||
"0",
|
||||
"photos",
|
||||
"_data",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"main": {
|
||||
"search-photos-lite-models": [
|
||||
{
|
||||
"photos": {
|
||||
"_data": [
|
||||
{
|
||||
"_flickrModelRegistry": "photo-lite-models",
|
||||
"title": "This%20is%20the%20title",
|
||||
"username": "Owner",
|
||||
"pathAlias": "klink692",
|
||||
"realname": "Owner",
|
||||
"license": 0,
|
||||
"ownerNsid": "59729010@N00",
|
||||
"canComment": false,
|
||||
"commentCount": 14,
|
||||
"faveCount": 21,
|
||||
"id": "14001294434",
|
||||
"sizes": {
|
||||
"o": {
|
||||
"displayUrl": "//farm8.staticflickr.com/7246/14001294434_410f653777_o.jpg",
|
||||
"width": 433,
|
||||
"height": 640,
|
||||
"url": "//c4.staticflickr.com/8/7246/14001294434_410f653777_o.jpg",
|
||||
"key": "o"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"""
|
||||
json = ''.join(json.split()) + ',\n'
|
||||
response = mock.Mock(text=json)
|
||||
results = flickr_noapi.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'This is the title')
|
||||
self.assertEqual(results[0]['url'], 'https://www.flickr.com/photos/59729010@N00/14001294434')
|
||||
self.assertIn('o.jpg', results[0]['img_src'])
|
||||
self.assertIn('o.jpg', results[0]['thumbnail_src'])
|
||||
self.assertIn('Owner', results[0]['author'])
|
||||
|
||||
# no image test
|
||||
json = """
|
||||
modelExport: {
|
||||
"legend": [
|
||||
[
|
||||
"search-photos-lite-models",
|
||||
"0",
|
||||
"photos",
|
||||
"_data",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"main": {
|
||||
"search-photos-lite-models": [
|
||||
{
|
||||
"photos": {
|
||||
"_data": [
|
||||
{
|
||||
"_flickrModelRegistry": "photo-lite-models",
|
||||
"title": "This is the title",
|
||||
"username": "Owner",
|
||||
"pathAlias": "klink692",
|
||||
"realname": "Owner",
|
||||
"license": 0,
|
||||
"ownerNsid": "59729010@N00",
|
||||
"canComment": false,
|
||||
"commentCount": 14,
|
||||
"faveCount": 21,
|
||||
"id": "14001294434",
|
||||
"sizes": {
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"""
|
||||
json = ''.join(json.split()) + ',\n'
|
||||
response = mock.Mock(text=json)
|
||||
results = flickr_noapi.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
# null test
|
||||
json = """
|
||||
modelExport: {
|
||||
"legend": [null],
|
||||
"main": {
|
||||
"search-photos-lite-models": [
|
||||
{
|
||||
"photos": {
|
||||
"_data": [null]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"""
|
||||
json = ''.join(json.split()) + ',\n'
|
||||
response = mock.Mock(text=json)
|
||||
results = flickr_noapi.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
# garbage test
|
||||
json = r"""
|
||||
{"toto":[
|
||||
{"id":200,"name":"Artist Name",
|
||||
"link":"http:\/\/www.flickr.com\/artist\/1217","type":"artist"}
|
||||
]}
|
||||
"""
|
||||
json = ''.join(json.split()) + ',\n'
|
||||
response = mock.Mock(text=json)
|
||||
results = flickr_noapi.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
|
@ -1,103 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import framalibre
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestFramalibreEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 0
|
||||
params = framalibre.request(query, dicto)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('framalibre.org' in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, framalibre.response, None)
|
||||
self.assertRaises(AttributeError, framalibre.response, [])
|
||||
self.assertRaises(AttributeError, framalibre.response, '')
|
||||
self.assertRaises(AttributeError, framalibre.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}')
|
||||
self.assertEqual(framalibre.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": []}')
|
||||
self.assertEqual(framalibre.response(response), [])
|
||||
|
||||
html = u"""
|
||||
<div class="nodes-list-row">
|
||||
<div id="node-431"
|
||||
class="node node-logiciel-annuaires node-promoted node-teaser node-teaser node-sheet clearfix nodes-list"
|
||||
about="/content/gogs" typeof="sioc:Item foaf:Document">
|
||||
<header class="media">
|
||||
<div class="media-left">
|
||||
<div class="field field-name-field-logo field-type-image field-label-hidden">
|
||||
<div class="field-items">
|
||||
<div class="field-item even">
|
||||
<a href="/content/gogs">
|
||||
<img class="media-object img-responsive" typeof="foaf:Image"
|
||||
src="https://framalibre.org/sites/default/files/styles/teaser_logo/public/leslogos/gogs-lg.png?itok=rrCxKKBy"
|
||||
width="70" height="70" alt="" />
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="media-body">
|
||||
<h3 class="node-title"><a href="/content/gogs">Gogs</a></h3>
|
||||
<span property="dc:title" content="Gogs" class="rdf-meta element-hidden"></span>
|
||||
<div class="field field-name-field-annuaires field-type-taxonomy-term-reference field-label-hidden">
|
||||
<div class="field-items">
|
||||
<div class="field-item even">
|
||||
<a href="/annuaires/cloudwebapps"
|
||||
typeof="skos:Concept" property="rdfs:label skos:prefLabel"
|
||||
datatype="" class="label label-primary">Cloud/webApps</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
<div class="content">
|
||||
<div class="field field-name-field-votre-appr-ciation field-type-fivestar field-label-hidden">
|
||||
<div class="field-items">
|
||||
<div class="field-item even">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="field field-name-body field-type-text-with-summary field-label-hidden">
|
||||
<div class="field-items">
|
||||
<div class="field-item even" property="content:encoded">
|
||||
<p>Gogs est une interface web basée sur git et une bonne alternative à GitHub.</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<footer>
|
||||
<a href="/content/gogs" class="read-more btn btn-default btn-sm">Voir la notice</a>
|
||||
<div class="field field-name-field-lien-officiel field-type-link-field field-label-hidden">
|
||||
<div class="field-items">
|
||||
<div class="field-item even">
|
||||
<a href="https://gogs.io/" target="_blank" title="Voir le site officiel">
|
||||
<span class="glyphicon glyphicon-globe"></span>
|
||||
<span class="sr-only">Lien officiel</span>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</footer>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = framalibre.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'Gogs')
|
||||
self.assertEqual(results[0]['url'],
|
||||
'https://framalibre.org/content/gogs')
|
||||
self.assertEqual(results[0]['content'],
|
||||
u"Gogs est une interface web basée sur git et une bonne alternative à GitHub.")
|
|
@ -1,50 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import frinkiac
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestFrinkiacEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
request_dict = defaultdict(dict)
|
||||
params = frinkiac.request(query, request_dict)
|
||||
self.assertTrue('url' in params)
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, frinkiac.response, None)
|
||||
self.assertRaises(AttributeError, frinkiac.response, [])
|
||||
self.assertRaises(AttributeError, frinkiac.response, '')
|
||||
self.assertRaises(AttributeError, frinkiac.response, '[]')
|
||||
|
||||
text = """
|
||||
[{"Id":770931,
|
||||
"Episode":"S06E18",
|
||||
"Timestamp":534616,
|
||||
"Filename":""},
|
||||
{"Id":1657080,
|
||||
"Episode":"S12E14",
|
||||
"Timestamp":910868,
|
||||
"Filename":""},
|
||||
{"Id":1943753,
|
||||
"Episode":"S14E21",
|
||||
"Timestamp":773439,
|
||||
"Filename":""},
|
||||
{"Id":107835,
|
||||
"Episode":"S02E03",
|
||||
"Timestamp":531709,
|
||||
"Filename":""}]
|
||||
"""
|
||||
|
||||
response = mock.Mock(text=text)
|
||||
results = frinkiac.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 4)
|
||||
self.assertEqual(results[0]['title'], u'S06E18')
|
||||
self.assertIn('p=caption', results[0]['url'])
|
||||
self.assertIn('e=S06E18', results[0]['url'])
|
||||
self.assertIn('t=534616', results[0]['url'])
|
||||
self.assertEqual(results[0]['thumbnail_src'], 'https://frinkiac.com/img/S06E18/534616/medium.jpg')
|
||||
self.assertEqual(results[0]['img_src'], 'https://frinkiac.com/img/S06E18/534616.jpg')
|
|
@ -1,231 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from datetime import datetime
|
||||
from searx.engines import genius
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestGeniusEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
params = genius.request(query, dicto)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('genius.com' in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
|
||||
json_empty = """
|
||||
{
|
||||
"meta": {
|
||||
"status": 200
|
||||
},
|
||||
"response": {
|
||||
"sections": [
|
||||
{
|
||||
"type": "top_hit",
|
||||
"hits": []
|
||||
},
|
||||
{
|
||||
"type": "song",
|
||||
"hits": []
|
||||
},
|
||||
{
|
||||
"type": "lyric",
|
||||
"hits": []
|
||||
},
|
||||
{
|
||||
"type": "artist",
|
||||
"hits": []
|
||||
},
|
||||
{
|
||||
"type": "album",
|
||||
"hits": []
|
||||
},
|
||||
{
|
||||
"type": "tag",
|
||||
"hits": []
|
||||
},
|
||||
{
|
||||
"type": "video",
|
||||
"hits": []
|
||||
},
|
||||
{
|
||||
"type": "article",
|
||||
"hits": []
|
||||
},
|
||||
{
|
||||
"type": "user",
|
||||
"hits": []
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
resp = mock.Mock(text=json_empty)
|
||||
self.assertEqual(genius.response(resp), [])
|
||||
|
||||
json = """
|
||||
{
|
||||
"meta": {
|
||||
"status": 200
|
||||
},
|
||||
"response": {
|
||||
"sections": [
|
||||
{
|
||||
"type": "lyric",
|
||||
"hits": [
|
||||
{
|
||||
"highlights": [
|
||||
{
|
||||
"property": "lyrics",
|
||||
"value": "Sample lyrics",
|
||||
"snippet": true,
|
||||
"ranges": []
|
||||
}
|
||||
],
|
||||
"index": "lyric",
|
||||
"type": "song",
|
||||
"result": {
|
||||
"_type": "song",
|
||||
"annotation_count": 45,
|
||||
"api_path": "/songs/52916",
|
||||
"full_title": "J't'emmerde by MC Jean Gab'1",
|
||||
"header_image_thumbnail_url": "https://images.genius.com/xxx.300x300x1.jpg",
|
||||
"header_image_url": "https://images.genius.com/ef9f736a86df3c3b1772f3fb7fbdb21c.1000x1000x1.jpg",
|
||||
"id": 52916,
|
||||
"instrumental": false,
|
||||
"lyrics_owner_id": 15586,
|
||||
"lyrics_state": "complete",
|
||||
"lyrics_updated_at": 1498744545,
|
||||
"path": "/Mc-jean-gab1-jtemmerde-lyrics",
|
||||
"pyongs_count": 4,
|
||||
"song_art_image_thumbnail_url": "https://images.genius.com/xxx.300x300x1.jpg",
|
||||
"stats": {
|
||||
"hot": false,
|
||||
"unreviewed_annotations": 0,
|
||||
"pageviews": 62490
|
||||
},
|
||||
"title": "J't'emmerde",
|
||||
"title_with_featured": "J't'emmerde",
|
||||
"updated_by_human_at": 1498744546,
|
||||
"url": "https://genius.com/Mc-jean-gab1-jtemmerde-lyrics",
|
||||
"primary_artist": {
|
||||
"_type": "artist",
|
||||
"api_path": "/artists/12691",
|
||||
"header_image_url": "https://images.genius.com/c7847662a58f8c2b0f02a6e217d60907.960x657x1.jpg",
|
||||
"id": 12691,
|
||||
"image_url": "https://s3.amazonaws.com/rapgenius/Mc-jean-gab1.jpg",
|
||||
"index_character": "m",
|
||||
"is_meme_verified": false,
|
||||
"is_verified": false,
|
||||
"name": "MC Jean Gab'1",
|
||||
"slug": "Mc-jean-gab1",
|
||||
"url": "https://genius.com/artists/Mc-jean-gab1"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "artist",
|
||||
"hits": [
|
||||
{
|
||||
"highlights": [],
|
||||
"index": "artist",
|
||||
"type": "artist",
|
||||
"result": {
|
||||
"_type": "artist",
|
||||
"api_path": "/artists/191580",
|
||||
"header_image_url": "https://assets.genius.com/images/default_avatar_300.png?1503090542",
|
||||
"id": 191580,
|
||||
"image_url": "https://assets.genius.com/images/default_avatar_300.png?1503090542",
|
||||
"index_character": "a",
|
||||
"is_meme_verified": false,
|
||||
"is_verified": false,
|
||||
"name": "ASDF Guy",
|
||||
"slug": "Asdf-guy",
|
||||
"url": "https://genius.com/artists/Asdf-guy"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "album",
|
||||
"hits": [
|
||||
{
|
||||
"highlights": [],
|
||||
"index": "album",
|
||||
"type": "album",
|
||||
"result": {
|
||||
"_type": "album",
|
||||
"api_path": "/albums/132332",
|
||||
"cover_art_thumbnail_url": "https://images.genius.com/xxx.300x300x1.jpg",
|
||||
"cover_art_url": "https://images.genius.com/xxx.600x600x1.jpg",
|
||||
"full_title": "ASD by A Skylit Drive",
|
||||
"id": 132332,
|
||||
"name": "ASD",
|
||||
"name_with_artist": "ASD (artist: A Skylit Drive)",
|
||||
"release_date_components": {
|
||||
"year": 2015,
|
||||
"month": null,
|
||||
"day": null
|
||||
},
|
||||
"url": "https://genius.com/albums/A-skylit-drive/Asd",
|
||||
"artist": {
|
||||
"_type": "artist",
|
||||
"api_path": "/artists/48712",
|
||||
"header_image_url": "https://images.genius.com/814c1551293172c56306d0e310c6aa89.620x400x1.jpg",
|
||||
"id": 48712,
|
||||
"image_url": "https://images.genius.com/814c1551293172c56306d0e310c6aa89.620x400x1.jpg",
|
||||
"index_character": "s",
|
||||
"is_meme_verified": false,
|
||||
"is_verified": false,
|
||||
"name": "A Skylit Drive",
|
||||
"slug": "A-skylit-drive",
|
||||
"url": "https://genius.com/artists/A-skylit-drive"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
resp = mock.Mock(text=json)
|
||||
results = genius.response(resp)
|
||||
|
||||
self.assertEqual(len(results), 3)
|
||||
self.assertEqual(type(results), list)
|
||||
|
||||
# check lyric parsing
|
||||
r = results[0]
|
||||
self.assertEqual(r['url'], 'https://genius.com/Mc-jean-gab1-jtemmerde-lyrics')
|
||||
self.assertEqual(r['title'], "J't'emmerde by MC Jean Gab'1")
|
||||
self.assertEqual(r['content'], "Sample lyrics")
|
||||
self.assertEqual(r['template'], 'videos.html')
|
||||
self.assertEqual(r['thumbnail'], 'https://images.genius.com/xxx.300x300x1.jpg')
|
||||
created = datetime.fromtimestamp(1498744545)
|
||||
self.assertEqual(r['publishedDate'], created)
|
||||
|
||||
# check artist parsing
|
||||
r = results[1]
|
||||
self.assertEqual(r['url'], 'https://genius.com/artists/Asdf-guy')
|
||||
self.assertEqual(r['title'], "ASDF Guy")
|
||||
self.assertEqual(r['content'], None)
|
||||
self.assertEqual(r['template'], 'videos.html')
|
||||
self.assertEqual(r['thumbnail'], 'https://assets.genius.com/images/default_avatar_300.png?1503090542')
|
||||
|
||||
# check album parsing
|
||||
r = results[2]
|
||||
self.assertEqual(r['url'], 'https://genius.com/albums/A-skylit-drive/Asd')
|
||||
self.assertEqual(r['title'], "ASD by A Skylit Drive")
|
||||
self.assertEqual(r['content'], "Released: 2015")
|
||||
self.assertEqual(r['template'], 'videos.html')
|
||||
self.assertEqual(r['thumbnail'], 'https://images.genius.com/xxx.600x600x1.jpg')
|
|
@ -1,119 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import gigablast
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestGigablastEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 0
|
||||
dicto['safesearch'] = 0
|
||||
dicto['language'] = 'all'
|
||||
params = gigablast.request(query, dicto)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('gigablast.com' in params['url'])
|
||||
self.assertTrue('xx' in params['url'])
|
||||
|
||||
dicto['language'] = 'en-US'
|
||||
params = gigablast.request(query, dicto)
|
||||
self.assertTrue('en' in params['url'])
|
||||
self.assertFalse('en-US' in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, gigablast.response, None)
|
||||
self.assertRaises(AttributeError, gigablast.response, [])
|
||||
self.assertRaises(AttributeError, gigablast.response, '')
|
||||
self.assertRaises(AttributeError, gigablast.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{"results": []}')
|
||||
self.assertEqual(gigablast.response(response), [])
|
||||
|
||||
json = """{"results": [
|
||||
{
|
||||
"title":"South by Southwest 2016",
|
||||
"dmozEntry":{
|
||||
"dmozCatId":1041152,
|
||||
"directCatId":1,
|
||||
"dmozCatStr":"Top: Regional: North America: United States",
|
||||
"dmozTitle":"South by Southwest (SXSW)",
|
||||
"dmozSum":"Annual music, film, and interactive conference.",
|
||||
"dmozAnchor":""
|
||||
},
|
||||
"dmozEntry":{
|
||||
"dmozCatId":763945,
|
||||
"directCatId":1,
|
||||
"dmozCatStr":"Top: Regional: North America: United States",
|
||||
"dmozTitle":"South by Southwest (SXSW)",
|
||||
"dmozSum":"",
|
||||
"dmozAnchor":"www.sxsw.com"
|
||||
},
|
||||
"dmozEntry":{
|
||||
"dmozCatId":761446,
|
||||
"directCatId":1,
|
||||
"dmozCatStr":"Top: Regional: North America: United States",
|
||||
"dmozTitle":"South by Southwest (SXSW)",
|
||||
"dmozSum":"Music, film, and interactive conference and festival.",
|
||||
"dmozAnchor":""
|
||||
},
|
||||
"indirectDmozCatId":1041152,
|
||||
"indirectDmozCatId":763945,
|
||||
"indirectDmozCatId":761446,
|
||||
"contentType":"html",
|
||||
"sum":"This should be the content.",
|
||||
"url":"www.sxsw.com",
|
||||
"hopCount":0,
|
||||
"size":" 102k",
|
||||
"sizeInBytes":104306,
|
||||
"bytesUsedToComputeSummary":70000,
|
||||
"docId":269411794364,
|
||||
"docScore":586571136.000000,
|
||||
"summaryGenTimeMS":12,
|
||||
"summaryTagdbLookupTimeMS":0,
|
||||
"summaryTitleRecLoadTimeMS":1,
|
||||
"site":"www.sxsw.com",
|
||||
"spidered":1452203608,
|
||||
"firstIndexedDateUTC":1444167123,
|
||||
"contentHash32":2170650347,
|
||||
"language":"English",
|
||||
"langAbbr":"en"
|
||||
}
|
||||
]}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = gigablast.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'South by Southwest 2016')
|
||||
self.assertEqual(results[0]['url'], 'www.sxsw.com')
|
||||
self.assertEqual(results[0]['content'], 'This should be the content.')
|
||||
|
||||
def test_fetch_supported_languages(self):
|
||||
html = """<html></html>"""
|
||||
response = mock.Mock(text=html)
|
||||
results = gigablast._fetch_supported_languages(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
html = """
|
||||
<html>
|
||||
<body>
|
||||
<span id="menu2">
|
||||
<a href="/search?&rxikd=1&qlang=xx"></a>
|
||||
<a href="/search?&rxikd=1&qlang=en"></a>
|
||||
<a href="/search?&rxikd=1&prepend=gblang%3Aen"></a>
|
||||
<a href="/search?&rxikd=1&qlang=zh_"></a>
|
||||
<a href="/search?&rxikd=1&prepend=gblang%3Azh_tw"></a>
|
||||
</span>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
languages = gigablast._fetch_supported_languages(response)
|
||||
self.assertEqual(type(languages), list)
|
||||
self.assertEqual(len(languages), 2)
|
||||
self.assertIn('en', languages)
|
||||
self.assertIn('zh-TW', languages)
|
|
@ -1,61 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import github
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestGitHubEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
params = github.request(query, defaultdict(dict))
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('github.com' in params['url'])
|
||||
self.assertEqual(params['headers']['Accept'], github.accept_header)
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, github.response, None)
|
||||
self.assertRaises(AttributeError, github.response, [])
|
||||
self.assertRaises(AttributeError, github.response, '')
|
||||
self.assertRaises(AttributeError, github.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}')
|
||||
self.assertEqual(github.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"items": []}')
|
||||
self.assertEqual(github.response(response), [])
|
||||
|
||||
json = """
|
||||
{
|
||||
"items": [
|
||||
{
|
||||
"name": "title",
|
||||
"html_url": "url",
|
||||
"description": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = github.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'title')
|
||||
self.assertEqual(results[0]['url'], 'url')
|
||||
self.assertEqual(results[0]['content'], '')
|
||||
|
||||
json = """
|
||||
{
|
||||
"items": [
|
||||
{
|
||||
"name": "title",
|
||||
"html_url": "url",
|
||||
"description": "desc"
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = github.response(response)
|
||||
self.assertEqual(results[0]['content'], "desc")
|
|
@ -1,194 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
import lxml
|
||||
from searx.engines import google
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestGoogleEngine(SearxTestCase):
|
||||
|
||||
def mock_response(self, text):
|
||||
response = mock.Mock(text=text, url='https://www.google.com/search?q=test&start=0&gbv=1&gws_rd=cr')
|
||||
response.search_params = mock.Mock()
|
||||
response.search_params.get = mock.Mock(return_value='www.google.com')
|
||||
return response
|
||||
|
||||
def test_request(self):
|
||||
google.supported_languages = ['en', 'fr', 'zh-CN', 'iw']
|
||||
google.language_aliases = {'he': 'iw'}
|
||||
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
dicto['language'] = 'fr-FR'
|
||||
dicto['time_range'] = ''
|
||||
params = google.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('google.fr', params['url'])
|
||||
self.assertIn('fr', params['url'])
|
||||
self.assertIn('fr', params['headers']['Accept-Language'])
|
||||
|
||||
dicto['language'] = 'en-US'
|
||||
params = google.request(query, dicto)
|
||||
self.assertIn('google.com', params['url'])
|
||||
self.assertIn('en', params['url'])
|
||||
self.assertIn('en', params['headers']['Accept-Language'])
|
||||
|
||||
dicto['language'] = 'zh'
|
||||
params = google.request(query, dicto)
|
||||
self.assertIn('google.com', params['url'])
|
||||
self.assertIn('zh-CN', params['url'])
|
||||
self.assertIn('zh-CN', params['headers']['Accept-Language'])
|
||||
|
||||
dicto['language'] = 'he'
|
||||
params = google.request(query, dicto)
|
||||
self.assertIn('google.com', params['url'])
|
||||
self.assertIn('iw', params['url'])
|
||||
self.assertIn('iw', params['headers']['Accept-Language'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, google.response, None)
|
||||
self.assertRaises(AttributeError, google.response, [])
|
||||
self.assertRaises(AttributeError, google.response, '')
|
||||
self.assertRaises(AttributeError, google.response, '[]')
|
||||
|
||||
response = self.mock_response('<html></html>')
|
||||
self.assertEqual(google.response(response), [])
|
||||
|
||||
html = """
|
||||
<div class="ZINbbc xpd O9g5cc uUPGi">
|
||||
<div>
|
||||
<div class="kCrYT">
|
||||
<a href="/url?q=http://this.should.be.the.link/">
|
||||
<div class="BNeawe">
|
||||
<b>This</b> is <b>the</b> title
|
||||
</div>
|
||||
<div class="BNeawe">
|
||||
http://website
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
<div class="kCrYT">
|
||||
<div>
|
||||
<div class="BNeawe">
|
||||
<div>
|
||||
<div class="BNeawe">
|
||||
This should be the content.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</p>
|
||||
<div class="ZINbbc xpd O9g5cc uUPGi">
|
||||
<div>
|
||||
<div class="kCrYT">
|
||||
<span>
|
||||
<div class="BNeawe">
|
||||
Related searches
|
||||
</div>
|
||||
</span>
|
||||
</div>
|
||||
<div class="rVLSBd">
|
||||
<a>
|
||||
<div>
|
||||
<div class="BNeawe">
|
||||
suggestion title
|
||||
</div>
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</p>
|
||||
"""
|
||||
response = self.mock_response(html)
|
||||
results = google.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['title'], 'This is the title')
|
||||
self.assertEqual(results[0]['url'], 'http://this.should.be.the.link/')
|
||||
self.assertEqual(results[0]['content'], 'This should be the content.')
|
||||
self.assertEqual(results[1]['suggestion'], 'suggestion title')
|
||||
|
||||
html = """
|
||||
<li class="b_algo" u="0|5109|4755453613245655|UAGjXgIrPH5yh-o5oNHRx_3Zta87f_QO">
|
||||
</li>
|
||||
"""
|
||||
response = self.mock_response(html)
|
||||
results = google.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
response = mock.Mock(text='<html></html>', url='https://sorry.google.com')
|
||||
response.search_params = mock.Mock()
|
||||
response.search_params.get = mock.Mock(return_value='www.google.com')
|
||||
self.assertRaises(RuntimeWarning, google.response, response)
|
||||
|
||||
response = mock.Mock(text='<html></html>', url='https://www.google.com/sorry/IndexRedirect')
|
||||
response.search_params = mock.Mock()
|
||||
response.search_params.get = mock.Mock(return_value='www.google.com')
|
||||
self.assertRaises(RuntimeWarning, google.response, response)
|
||||
|
||||
def test_parse_images(self):
|
||||
html = """
|
||||
<li>
|
||||
<div>
|
||||
<a href="http://www.google.com/url?q=http://this.is.the.url/">
|
||||
<img style="margin:3px 0;margin-right:6px;padding:0" height="90"
|
||||
src="https://this.is.the.image/image.jpg" width="60" align="middle" alt="" border="0">
|
||||
</a>
|
||||
</div>
|
||||
</li>
|
||||
"""
|
||||
dom = lxml.html.fromstring(html)
|
||||
results = google.parse_images(dom, 'www.google.com')
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['url'], 'http://this.is.the.url/')
|
||||
self.assertEqual(results[0]['title'], '')
|
||||
self.assertEqual(results[0]['content'], '')
|
||||
self.assertEqual(results[0]['img_src'], 'https://this.is.the.image/image.jpg')
|
||||
|
||||
def test_fetch_supported_languages(self):
|
||||
html = """<html></html>"""
|
||||
response = mock.Mock(text=html)
|
||||
languages = google._fetch_supported_languages(response)
|
||||
self.assertEqual(type(languages), dict)
|
||||
self.assertEqual(len(languages), 0)
|
||||
|
||||
html = u"""
|
||||
<html>
|
||||
<body>
|
||||
<div id="langSec">
|
||||
<div>
|
||||
<input name="lr" data-name="english" value="lang_en" />
|
||||
<input name="lr" data-name="中文 (简体)" value="lang_zh-CN" />
|
||||
<input name="lr" data-name="中文 (繁體)" value="lang_zh-TW" />
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
languages = google._fetch_supported_languages(response)
|
||||
self.assertEqual(type(languages), dict)
|
||||
self.assertEqual(len(languages), 3)
|
||||
|
||||
self.assertIn('en', languages)
|
||||
self.assertIn('zh-CN', languages)
|
||||
self.assertIn('zh-TW', languages)
|
||||
|
||||
self.assertEquals(type(languages['en']), dict)
|
||||
self.assertEquals(type(languages['zh-CN']), dict)
|
||||
self.assertEquals(type(languages['zh-TW']), dict)
|
||||
|
||||
self.assertIn('name', languages['en'])
|
||||
self.assertIn('name', languages['zh-CN'])
|
||||
self.assertIn('name', languages['zh-TW'])
|
||||
|
||||
self.assertEquals(languages['en']['name'], 'English')
|
||||
self.assertEquals(languages['zh-CN']['name'], u'中文 (简体)')
|
||||
self.assertEquals(languages['zh-TW']['name'], u'中文 (繁體)')
|
|
@ -1,27 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import google_images
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestGoogleImagesEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
dicto['safesearch'] = 1
|
||||
dicto['time_range'] = ''
|
||||
params = google_images.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
|
||||
dicto['safesearch'] = 0
|
||||
params = google_images.request(query, dicto)
|
||||
self.assertNotIn('safe', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, google_images.response, None)
|
||||
self.assertRaises(AttributeError, google_images.response, [])
|
||||
self.assertRaises(AttributeError, google_images.response, '')
|
||||
self.assertRaises(AttributeError, google_images.response, '[]')
|
|
@ -1,102 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import google_news
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestGoogleNewsEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
google_news.supported_languages = ['en-US', 'fr-FR']
|
||||
google_news.language_aliases = {}
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
dicto['language'] = 'fr-FR'
|
||||
dicto['time_range'] = 'w'
|
||||
params = google_news.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('fr', params['url'])
|
||||
|
||||
dicto['language'] = 'all'
|
||||
params = google_news.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertNotIn('fr', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, google_news.response, None)
|
||||
self.assertRaises(AttributeError, google_news.response, [])
|
||||
self.assertRaises(AttributeError, google_news.response, '')
|
||||
self.assertRaises(AttributeError, google_news.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}')
|
||||
self.assertEqual(google_news.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": []}')
|
||||
self.assertEqual(google_news.response(response), [])
|
||||
|
||||
html = u"""
|
||||
<h2 class="hd">Search Results</h2>
|
||||
<div data-async-context="query:searx" id="ires">
|
||||
<div eid="oC2oWcGXCafR6ASkwoCwDA" id="rso">
|
||||
<div class="_NId">
|
||||
<!--m-->
|
||||
<div class="g _cy">
|
||||
<div class="ts _JGs _JHs _tJs _KGs _jHs">
|
||||
<div class="_hJs">
|
||||
<h3 class="r _gJs">
|
||||
<a class="l lLrAF" href="https://example.com/" onmousedown="return rwt(this,'','','','11','AFQjCNEyehpzD5cJK1KUfXBx9RmsbqqG9g','','0ahUKEwjB58OR54HWAhWnKJoKHSQhAMY4ChCpAggiKAAwAA','','',event)">Example title</a>
|
||||
</h3>
|
||||
<div class="slp">
|
||||
<span class="_OHs _PHs">
|
||||
Mac & i</span>
|
||||
<span class="_QGs">
|
||||
-</span>
|
||||
<span class="f nsa _QHs">
|
||||
Mar 21, 2016</span>
|
||||
</div>
|
||||
<div class="st">Example description</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="g _cy">
|
||||
<div class="ts _JGs _JHs _oGs _KGs _jHs">
|
||||
<a class="top _xGs _SHs" href="https://example2.com/" onmousedown="return rwt(this,'','','','12','AFQjCNHObfH7sYmLWI1SC-YhWXKZFRzRjw','','0ahUKEwjB58OR54HWAhWnKJoKHSQhAMY4ChC8iAEIJDAB','','',event)">
|
||||
<img class="th _RGs" src="https://example2.com/image.jpg" alt="Story image for searx from Golem.de" onload="typeof google==='object'&&google.aft&&google.aft(this)">
|
||||
</a>
|
||||
<div class="_hJs">
|
||||
<h3 class="r _gJs">
|
||||
<a class="l lLrAF" href="https://example2.com/" onmousedown="return rwt(this,'','','','12','AFQjCNHObfH7sYmLWI1SC-YhWXKZFRzRjw','','0ahUKEwjB58OR54HWAhWnKJoKHSQhAMY4ChCpAgglKAAwAQ','','',event)">Example title 2</a>
|
||||
</h3>
|
||||
<div class="slp">
|
||||
<span class="_OHs _PHs">
|
||||
Golem.de</span>
|
||||
<span class="_QGs">
|
||||
-</span>
|
||||
<span class="f nsa _QHs">
|
||||
Oct 4, 2016</span>
|
||||
</div>
|
||||
<div class="st">Example description 2</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
""" # noqa
|
||||
response = mock.Mock(text=html)
|
||||
results = google_news.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['title'], u'Example title')
|
||||
self.assertEqual(results[0]['url'], 'https://example.com/')
|
||||
self.assertEqual(results[0]['content'], 'Example description')
|
||||
self.assertEqual(results[1]['title'], u'Example title 2')
|
||||
self.assertEqual(results[1]['url'], 'https://example2.com/')
|
||||
self.assertEqual(results[1]['content'], 'Example description 2')
|
||||
self.assertEqual(results[1]['img_src'], 'https://example2.com/image.jpg')
|
|
@ -1,79 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import google_videos
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestGoogleVideosEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
dicto['safesearch'] = 1
|
||||
dicto['time_range'] = ''
|
||||
params = google_videos.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
|
||||
dicto['safesearch'] = 0
|
||||
params = google_videos.request(query, dicto)
|
||||
self.assertNotIn('safe', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, google_videos.response, None)
|
||||
self.assertRaises(AttributeError, google_videos.response, [])
|
||||
self.assertRaises(AttributeError, google_videos.response, '')
|
||||
self.assertRaises(AttributeError, google_videos.response, '[]')
|
||||
|
||||
html = r"""
|
||||
<div>
|
||||
<div>
|
||||
<div class="g">
|
||||
<div class="r">
|
||||
<a href="url_1"><h3>Title 1</h3></a>
|
||||
</div>
|
||||
<div class="s">
|
||||
<div>
|
||||
<a>
|
||||
<g-img>
|
||||
<img id="vidthumb1">
|
||||
</g-img>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<span class="st">Content 1</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="g">
|
||||
<div class="r">
|
||||
<a href="url_2"><h3>Title 2</h3></a>
|
||||
</div>
|
||||
<div class="s">
|
||||
<div>
|
||||
<a>
|
||||
<g-img>
|
||||
<img id="vidthumb2">
|
||||
</g-img>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<span class="st">Content 2</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<script>function _setImagesSrc(c,d,e){}</script>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = google_videos.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['url'], u'url_1')
|
||||
self.assertEqual(results[0]['title'], u'Title 1')
|
||||
self.assertEqual(results[0]['content'], u'Content 1')
|
||||
self.assertEqual(results[1]['url'], u'url_2')
|
||||
self.assertEqual(results[1]['title'], u'Title 2')
|
||||
self.assertEqual(results[1]['content'], u'Content 2')
|
|
@ -1,64 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import ina
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestInaEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 0
|
||||
params = ina.request(query, dicto)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('ina.fr' in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, ina.response, None)
|
||||
self.assertRaises(AttributeError, ina.response, [])
|
||||
self.assertRaises(AttributeError, ina.response, '')
|
||||
self.assertRaises(AttributeError, ina.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}')
|
||||
self.assertEqual(ina.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": []}')
|
||||
self.assertEqual(ina.response(response), [])
|
||||
|
||||
json = """
|
||||
{"content":"\\t<div class=\\"container\\">\\n\\t\\n\
|
||||
<!-- DEBUT CONTENU PRINCIPAL -->\\n<div class=\\"row\\">\\n\
|
||||
<div class=\\"search-results--list\\"><div class=\\"media\\">\\n\
|
||||
\\t\\t\\t\\t<a class=\\"media-left media-video premium xiti_click_action\\" \
|
||||
data-xiti-params=\\"recherche_v4::resultats_conference_de_presse_du_general_de_gaulle::N\\" \
|
||||
href=\\"\\/video\\/CAF89035682\\/conference-de-presse-du-general-de-gaulle-video.html\\">\\n\
|
||||
<img src=\\"https:\\/\\/www.ina.fr\\/images_v2\\/140x105\\/CAF89035682.jpeg\\" \
|
||||
alt=\\"Conf\\u00e9rence de presse du G\\u00e9n\\u00e9ral de Gaulle \\">\\n\
|
||||
\\t\\t\\t\\t\\t<\\/a>\\n\
|
||||
\\t\\t\\t\\t\\t<div class=\\"media-body\\">\\n\\t\\t\\t\\t\\t\\t<h3 class=\\"h3--title media-heading\\">\\n\
|
||||
\\t\\t\\t\\t\\t\\t\\t<a class=\\"xiti_click_action\\" \
|
||||
data-xiti-params=\\"recherche_v4::resultats_conference_de_presse_du_general_de_gaulle::N\\" \
|
||||
href=\\"\\/video\\/CAF89035682\\/conference-de-presse-du-general-de-gaulle-video.html\\">\
|
||||
Conf\\u00e9rence de presse du G\\u00e9n\\u00e9ral de Gaulle <\\/a>\\n\
|
||||
<\\/h3>\\n\
|
||||
<div class=\\"media-body__info\\">\\n<span class=\\"broadcast\\">27\\/11\\/1967<\\/span>\\n\
|
||||
<span class=\\"views\\">29321 vues<\\/span>\\n\
|
||||
<span class=\\"duration\\">01h 33m 07s<\\/span>\\n\
|
||||
<\\/div>\\n\
|
||||
<p class=\\"media-body__summary\\">VERSION INTEGRALE DE LA CONFERENCE DE PRESSE DU GENERAL DE GAULLE . \
|
||||
- PA le Pr\\u00e9sident DE GAULLE : il ouvre les bras et s'assied. DP journalis...<\\/p>\\n\
|
||||
<\\/div>\\n<\\/div><!-- \\/.media -->\\n"
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = ina.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], u'Conf\xe9rence de presse du G\xe9n\xe9ral de Gaulle')
|
||||
self.assertEqual(results[0]['url'],
|
||||
'https://www.ina.fr/video/CAF89035682/conference-de-presse-du-general-de-gaulle-video.html')
|
||||
self.assertEqual(results[0]['content'],
|
||||
u"VERSION INTEGRALE DE LA CONFERENCE DE PRESSE DU GENERAL DE GAULLE ."
|
||||
u" - PA le Pr\u00e9sident DE GAULLE : il ouvre les bras et s'assied. DP journalis...")
|
|
@ -1,397 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import kickass
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestKickassEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
params = kickass.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('kickass.cd', params['url'])
|
||||
self.assertFalse(params['verify'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, kickass.response, None)
|
||||
self.assertRaises(AttributeError, kickass.response, [])
|
||||
self.assertRaises(AttributeError, kickass.response, '')
|
||||
self.assertRaises(AttributeError, kickass.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(kickass.response(response), [])
|
||||
|
||||
html = """
|
||||
<table cellpadding="0" cellspacing="0" class="data" style="width: 100%">
|
||||
<tr class="firstr">
|
||||
<th class="width100perc nopad">torrent name</th>
|
||||
<th class="center">
|
||||
<a href="/search/test/?field=size&sorder=desc" rel="nofollow">size</a>
|
||||
</th>
|
||||
<th class="center"><span class="files">
|
||||
<a href="/search/test/?field=files_count&sorder=desc" rel="nofollow">files</a></span>
|
||||
</th>
|
||||
<th class="center"><span>
|
||||
<a href="/search/test/?field=time_add&sorder=desc" rel="nofollow">age</a></span>
|
||||
</th>
|
||||
<th class="center"><span class="seed">
|
||||
<a href="/search/test/?field=seeders&sorder=desc" rel="nofollow">seed</a></span>
|
||||
</th>
|
||||
<th class="lasttd nobr center">
|
||||
<a href="/search/test/?field=leechers&sorder=desc" rel="nofollow">leech</a>
|
||||
</th>
|
||||
</tr>
|
||||
<tr class="even" id="torrent_test6478745">
|
||||
<td>
|
||||
<div class="iaconbox center floatright">
|
||||
<a rel="6478745,0" class="icommentjs icon16" href="/test-t6478745.html#comment">
|
||||
<em style="font-size: 12px; margin: 0 4px 0 4px;" class="iconvalue">3</em>
|
||||
<i class="ka ka-comment"></i>
|
||||
</a>
|
||||
<a class="iverify icon16" href="/test-t6478745.html" title="Verified Torrent">
|
||||
<i class="ka ka16 ka-verify ka-green"></i>
|
||||
</a>
|
||||
<a href="#" onclick="_scq.push([]); return false;" class="partner1Button idownload icon16">
|
||||
<i class="ka ka16 ka-arrow-down partner1Button"></i>
|
||||
</a>
|
||||
<a title="Torrent magnet link"
|
||||
href="magnet:?xt=urn:btih:MAGNETURL&dn=test" class="imagnet icon16">
|
||||
<i class="ka ka16 ka-magnet"></i>
|
||||
</a>
|
||||
<a title="Download torrent file"
|
||||
href="http://torcache.net/torrent/53917.torrent?title=test" class="idownload icon16">
|
||||
<i class="ka ka16 ka-arrow-down"></i>
|
||||
</a>
|
||||
</div>
|
||||
<div class="torrentname">
|
||||
<a href="/test-t6478745.html" class="torType txtType"></a>
|
||||
<a href="/test-t6478745.html" class="normalgrey font12px plain bold"></a>
|
||||
<div class="markeredBlock torType txtType">
|
||||
<a href="/url.html" class="cellMainLink">
|
||||
<strong class="red">This should be the title</strong>
|
||||
</a>
|
||||
<span class="font11px lightgrey block">
|
||||
Posted by <i class="ka ka-verify" style="font-size: 16px;color:orange;"></i>
|
||||
<a class="plain" href="/user/riri/">riri</a> in
|
||||
<span id="cat_6478745">
|
||||
<strong><a href="/other/">Other</a> > <a href="/unsorted/">Unsorted</a></strong>
|
||||
</span>
|
||||
</span>
|
||||
</div>
|
||||
</td>
|
||||
<td class="nobr center">449 bytes</td>
|
||||
<td class="center">4</td>
|
||||
<td class="center">2 years</td>
|
||||
<td class="green center">10</td>
|
||||
<td class="red lasttd center">1</td>
|
||||
</tr>
|
||||
</table>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = kickass.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'This should be the title')
|
||||
self.assertEqual(results[0]['url'], 'https://kickass.cd/url.html')
|
||||
self.assertEqual(results[0]['content'], 'Posted by riri in Other > Unsorted')
|
||||
self.assertEqual(results[0]['seed'], 10)
|
||||
self.assertEqual(results[0]['leech'], 1)
|
||||
self.assertEqual(results[0]['filesize'], 449)
|
||||
self.assertEqual(results[0]['files'], 4)
|
||||
self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:MAGNETURL&dn=test')
|
||||
self.assertEqual(results[0]['torrentfile'], 'http://torcache.net/torrent/53917.torrent?title=test')
|
||||
|
||||
html = """
|
||||
<table cellpadding="0" cellspacing="0" class="data" style="width: 100%">
|
||||
<tr class="firstr">
|
||||
<th class="width100perc nopad">torrent name</th>
|
||||
<th class="center">
|
||||
<a href="/search/test/?field=size&sorder=desc" rel="nofollow">size</a>
|
||||
</th>
|
||||
<th class="center"><span class="files">
|
||||
<a href="/search/test/?field=files_count&sorder=desc" rel="nofollow">files</a></span>
|
||||
</th>
|
||||
<th class="center"><span>
|
||||
<a href="/search/test/?field=time_add&sorder=desc" rel="nofollow">age</a></span>
|
||||
</th>
|
||||
<th class="center"><span class="seed">
|
||||
<a href="/search/test/?field=seeders&sorder=desc" rel="nofollow">seed</a></span>
|
||||
</th>
|
||||
<th class="lasttd nobr center">
|
||||
<a href="/search/test/?field=leechers&sorder=desc" rel="nofollow">leech</a>
|
||||
</th>
|
||||
</tr>
|
||||
</table>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = kickass.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
html = """
|
||||
<table cellpadding="0" cellspacing="0" class="data" style="width: 100%">
|
||||
<tr class="firstr">
|
||||
<th class="width100perc nopad">torrent name</th>
|
||||
<th class="center">
|
||||
<a href="/search/test/?field=size&sorder=desc" rel="nofollow">size</a>
|
||||
</th>
|
||||
<th class="center"><span class="files">
|
||||
<a href="/search/test/?field=files_count&sorder=desc" rel="nofollow">files</a></span>
|
||||
</th>
|
||||
<th class="center"><span>
|
||||
<a href="/search/test/?field=time_add&sorder=desc" rel="nofollow">age</a></span>
|
||||
</th>
|
||||
<th class="center"><span class="seed">
|
||||
<a href="/search/test/?field=seeders&sorder=desc" rel="nofollow">seed</a></span>
|
||||
</th>
|
||||
<th class="lasttd nobr center">
|
||||
<a href="/search/test/?field=leechers&sorder=desc" rel="nofollow">leech</a>
|
||||
</th>
|
||||
</tr>
|
||||
<tr class="even" id="torrent_test6478745">
|
||||
<td>
|
||||
<div class="iaconbox center floatright">
|
||||
<a rel="6478745,0" class="icommentjs icon16" href="/test-t6478745.html#comment">
|
||||
<em style="font-size: 12px; margin: 0 4px 0 4px;" class="iconvalue">3</em>
|
||||
<i class="ka ka-comment"></i>
|
||||
</a>
|
||||
<a class="iverify icon16" href="/test-t6478745.html" title="Verified Torrent">
|
||||
<i class="ka ka16 ka-verify ka-green"></i>
|
||||
</a>
|
||||
<a href="#" onclick="_scq.push([]); return false;" class="partner1Button idownload icon16">
|
||||
<i class="ka ka16 ka-arrow-down partner1Button"></i>
|
||||
</a>
|
||||
<a title="Torrent magnet link"
|
||||
href="magnet:?xt=urn:btih:MAGNETURL&dn=test" class="imagnet icon16">
|
||||
<i class="ka ka16 ka-magnet"></i>
|
||||
</a>
|
||||
<a title="Download torrent file"
|
||||
href="http://torcache.net/torrent/53917.torrent?title=test" class="idownload icon16">
|
||||
<i class="ka ka16 ka-arrow-down"></i>
|
||||
</a>
|
||||
</div>
|
||||
<div class="torrentname">
|
||||
<a href="/test-t6478745.html" class="torType txtType"></a>
|
||||
<a href="/test-t6478745.html" class="normalgrey font12px plain bold"></a>
|
||||
<div class="markeredBlock torType txtType">
|
||||
<a href="/url.html" class="cellMainLink">
|
||||
<strong class="red">This should be the title</strong>
|
||||
</a>
|
||||
<span class="font11px lightgrey block">
|
||||
Posted by <i class="ka ka-verify" style="font-size: 16px;color:orange;"></i>
|
||||
<a class="plain" href="/user/riri/">riri</a> in
|
||||
<span id="cat_6478745">
|
||||
<strong><a href="/other/">Other</a> > <a href="/unsorted/">Unsorted</a></strong>
|
||||
</span>
|
||||
</span>
|
||||
</div>
|
||||
</td>
|
||||
<td class="nobr center">1 KiB</td>
|
||||
<td class="center">4</td>
|
||||
<td class="center">2 years</td>
|
||||
<td class="green center">10</td>
|
||||
<td class="red lasttd center">1</td>
|
||||
</tr>
|
||||
<tr class="even" id="torrent_test6478745">
|
||||
<td>
|
||||
<div class="iaconbox center floatright">
|
||||
<a rel="6478745,0" class="icommentjs icon16" href="/test-t6478745.html#comment">
|
||||
<em style="font-size: 12px; margin: 0 4px 0 4px;" class="iconvalue">3</em>
|
||||
<i class="ka ka-comment"></i>
|
||||
</a>
|
||||
<a class="iverify icon16" href="/test-t6478745.html" title="Verified Torrent">
|
||||
<i class="ka ka16 ka-verify ka-green"></i>
|
||||
</a>
|
||||
<a href="#" onclick="_scq.push([]); return false;" class="partner1Button idownload icon16">
|
||||
<i class="ka ka16 ka-arrow-down partner1Button"></i>
|
||||
</a>
|
||||
<a title="Torrent magnet link"
|
||||
href="magnet:?xt=urn:btih:MAGNETURL&dn=test" class="imagnet icon16">
|
||||
<i class="ka ka16 ka-magnet"></i>
|
||||
</a>
|
||||
<a title="Download torrent file"
|
||||
href="http://torcache.net/torrent/53917.torrent?title=test" class="idownload icon16">
|
||||
<i class="ka ka16 ka-arrow-down"></i>
|
||||
</a>
|
||||
</div>
|
||||
<div class="torrentname">
|
||||
<a href="/test-t6478745.html" class="torType txtType"></a>
|
||||
<a href="/test-t6478745.html" class="normalgrey font12px plain bold"></a>
|
||||
<div class="markeredBlock torType txtType">
|
||||
<a href="/url.html" class="cellMainLink">
|
||||
<strong class="red">This should be the title</strong>
|
||||
</a>
|
||||
<span class="font11px lightgrey block">
|
||||
Posted by <i class="ka ka-verify" style="font-size: 16px;color:orange;"></i>
|
||||
<a class="plain" href="/user/riri/">riri</a> in
|
||||
<span id="cat_6478745">
|
||||
<strong><a href="/other/">Other</a> > <a href="/unsorted/">Unsorted</a></strong>
|
||||
</span>
|
||||
</span>
|
||||
</div>
|
||||
</td>
|
||||
<td class="nobr center">1 MiB</td>
|
||||
<td class="center">4</td>
|
||||
<td class="center">2 years</td>
|
||||
<td class="green center">9</td>
|
||||
<td class="red lasttd center">1</td>
|
||||
</tr>
|
||||
<tr class="even" id="torrent_test6478745">
|
||||
<td>
|
||||
<div class="iaconbox center floatright">
|
||||
<a rel="6478745,0" class="icommentjs icon16" href="/test-t6478745.html#comment">
|
||||
<em style="font-size: 12px; margin: 0 4px 0 4px;" class="iconvalue">3</em>
|
||||
<i class="ka ka-comment"></i>
|
||||
</a>
|
||||
<a class="iverify icon16" href="/test-t6478745.html" title="Verified Torrent">
|
||||
<i class="ka ka16 ka-verify ka-green"></i>
|
||||
</a>
|
||||
<a href="#" onclick="_scq.push([]); return false;" class="partner1Button idownload icon16">
|
||||
<i class="ka ka16 ka-arrow-down partner1Button"></i>
|
||||
</a>
|
||||
<a title="Torrent magnet link"
|
||||
href="magnet:?xt=urn:btih:MAGNETURL&dn=test" class="imagnet icon16">
|
||||
<i class="ka ka16 ka-magnet"></i>
|
||||
</a>
|
||||
<a title="Download torrent file"
|
||||
href="http://torcache.net/torrent/53917.torrent?title=test" class="idownload icon16">
|
||||
<i class="ka ka16 ka-arrow-down"></i>
|
||||
</a>
|
||||
</div>
|
||||
<div class="torrentname">
|
||||
<a href="/test-t6478745.html" class="torType txtType"></a>
|
||||
<a href="/test-t6478745.html" class="normalgrey font12px plain bold"></a>
|
||||
<div class="markeredBlock torType txtType">
|
||||
<a href="/url.html" class="cellMainLink">
|
||||
<strong class="red">This should be the title</strong>
|
||||
</a>
|
||||
<span class="font11px lightgrey block">
|
||||
Posted by <i class="ka ka-verify" style="font-size: 16px;color:orange;"></i>
|
||||
<a class="plain" href="/user/riri/">riri</a> in
|
||||
<span id="cat_6478745">
|
||||
<strong><a href="/other/">Other</a> > <a href="/unsorted/">Unsorted</a></strong>
|
||||
</span>
|
||||
</span>
|
||||
</div>
|
||||
</td>
|
||||
<td class="nobr center">1 GiB</td>
|
||||
<td class="center">4</td>
|
||||
<td class="center">2 years</td>
|
||||
<td class="green center">8</td>
|
||||
<td class="red lasttd center">1</td>
|
||||
</tr>
|
||||
<tr class="even" id="torrent_test6478745">
|
||||
<td>
|
||||
<div class="iaconbox center floatright">
|
||||
<a rel="6478745,0" class="icommentjs icon16" href="/test-t6478745.html#comment">
|
||||
<em style="font-size: 12px; margin: 0 4px 0 4px;" class="iconvalue">3</em>
|
||||
<i class="ka ka-comment"></i>
|
||||
</a>
|
||||
<a class="iverify icon16" href="/test-t6478745.html" title="Verified Torrent">
|
||||
<i class="ka ka16 ka-verify ka-green"></i>
|
||||
</a>
|
||||
<a href="#" onclick="_scq.push([]); return false;" class="partner1Button idownload icon16">
|
||||
<i class="ka ka16 ka-arrow-down partner1Button"></i>
|
||||
</a>
|
||||
<a title="Torrent magnet link"
|
||||
href="magnet:?xt=urn:btih:MAGNETURL&dn=test" class="imagnet icon16">
|
||||
<i class="ka ka16 ka-magnet"></i>
|
||||
</a>
|
||||
<a title="Download torrent file"
|
||||
href="http://torcache.net/torrent/53917.torrent?title=test" class="idownload icon16">
|
||||
<i class="ka ka16 ka-arrow-down"></i>
|
||||
</a>
|
||||
</div>
|
||||
<div class="torrentname">
|
||||
<a href="/test-t6478745.html" class="torType txtType"></a>
|
||||
<a href="/test-t6478745.html" class="normalgrey font12px plain bold"></a>
|
||||
<div class="markeredBlock torType txtType">
|
||||
<a href="/url.html" class="cellMainLink">
|
||||
<strong class="red">This should be the title</strong>
|
||||
</a>
|
||||
<span class="font11px lightgrey block">
|
||||
Posted by <i class="ka ka-verify" style="font-size: 16px;color:orange;"></i>
|
||||
<a class="plain" href="/user/riri/">riri</a> in
|
||||
<span id="cat_6478745">
|
||||
<strong><a href="/other/">Other</a> > <a href="/unsorted/">Unsorted</a></strong>
|
||||
</span>
|
||||
</span>
|
||||
</div>
|
||||
</td>
|
||||
<td class="nobr center">1 TiB</td>
|
||||
<td class="center">4</td>
|
||||
<td class="center">2 years</td>
|
||||
<td class="green center">7</td>
|
||||
<td class="red lasttd center">1</td>
|
||||
</tr>
|
||||
<tr class="even" id="torrent_test6478745">
|
||||
<td>
|
||||
<div class="iaconbox center floatright">
|
||||
<a rel="6478745,0" class="icommentjs icon16" href="/test-t6478745.html#comment">
|
||||
<em style="font-size: 12px; margin: 0 4px 0 4px;" class="iconvalue">3</em>
|
||||
<i class="ka ka-comment"></i>
|
||||
</a>
|
||||
<a class="iverify icon16" href="/test-t6478745.html" title="Verified Torrent">
|
||||
<i class="ka ka16 ka-verify ka-green"></i>
|
||||
</a>
|
||||
<a href="#" onclick="_scq.push([]); return false;" class="partner1Button idownload icon16">
|
||||
<i class="ka ka16 ka-arrow-down partner1Button"></i>
|
||||
</a>
|
||||
<a title="Torrent magnet link"
|
||||
href="magnet:?xt=urn:btih:MAGNETURL&dn=test" class="imagnet icon16">
|
||||
<i class="ka ka16 ka-magnet"></i>
|
||||
</a>
|
||||
<a title="Download torrent file"
|
||||
href="http://torcache.net/torrent/53917.torrent?title=test" class="idownload icon16">
|
||||
<i class="ka ka16 ka-arrow-down"></i>
|
||||
</a>
|
||||
</div>
|
||||
<div class="torrentname">
|
||||
<a href="/test-t6478745.html" class="torType txtType"></a>
|
||||
<a href="/test-t6478745.html" class="normalgrey font12px plain bold"></a>
|
||||
<div class="markeredBlock torType txtType">
|
||||
<a href="/url.html" class="cellMainLink">
|
||||
<strong class="red">This should be the title</strong>
|
||||
</a>
|
||||
<span class="font11px lightgrey block">
|
||||
Posted by <i class="ka ka-verify" style="font-size: 16px;color:orange;"></i>
|
||||
<a class="plain" href="/user/riri/">riri</a> in
|
||||
<span id="cat_6478745">
|
||||
<strong><a href="/other/">Other</a> > <a href="/unsorted/">Unsorted</a></strong>
|
||||
</span>
|
||||
</span>
|
||||
</div>
|
||||
</td>
|
||||
<td class="nobr center">z bytes</td>
|
||||
<td class="center">r</td>
|
||||
<td class="center">2 years</td>
|
||||
<td class="green center">a</td>
|
||||
<td class="red lasttd center">t</td>
|
||||
</tr>
|
||||
</table>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = kickass.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 5)
|
||||
self.assertEqual(results[0]['title'], 'This should be the title')
|
||||
self.assertEqual(results[0]['url'], 'https://kickass.cd/url.html')
|
||||
self.assertEqual(results[0]['content'], 'Posted by riri in Other > Unsorted')
|
||||
self.assertEqual(results[0]['seed'], 10)
|
||||
self.assertEqual(results[0]['leech'], 1)
|
||||
self.assertEqual(results[0]['files'], 4)
|
||||
self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:MAGNETURL&dn=test')
|
||||
self.assertEqual(results[0]['torrentfile'], 'http://torcache.net/torrent/53917.torrent?title=test')
|
||||
self.assertEqual(results[0]['filesize'], 1000)
|
||||
self.assertEqual(results[1]['filesize'], 1000000)
|
||||
self.assertEqual(results[2]['filesize'], 1000000000)
|
||||
self.assertEqual(results[3]['filesize'], 1000000000000)
|
||||
self.assertEqual(results[4]['seed'], 0)
|
||||
self.assertEqual(results[4]['leech'], 0)
|
||||
self.assertEqual(results[4]['files'], None)
|
||||
self.assertEqual(results[4]['filesize'], None)
|
|
@ -1,130 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import mediawiki
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestMediawikiEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
dicto['language'] = 'fr_FR'
|
||||
params = mediawiki.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('wikipedia.org', params['url'])
|
||||
self.assertIn('fr', params['url'])
|
||||
|
||||
dicto['language'] = 'all'
|
||||
params = mediawiki.request(query, dicto)
|
||||
self.assertIn('en', params['url'])
|
||||
|
||||
mediawiki.base_url = "http://test.url/"
|
||||
mediawiki.search_url = mediawiki.base_url +\
|
||||
'w/api.php?action=query'\
|
||||
'&list=search'\
|
||||
'&{query}'\
|
||||
'&srprop=timestamp'\
|
||||
'&format=json'\
|
||||
'&sroffset={offset}'\
|
||||
'&srlimit={limit}' # noqa
|
||||
params = mediawiki.request(query, dicto)
|
||||
self.assertIn('test.url', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
dicto = defaultdict(dict)
|
||||
dicto['language'] = 'fr'
|
||||
mediawiki.base_url = "https://{language}.wikipedia.org/"
|
||||
|
||||
self.assertRaises(AttributeError, mediawiki.response, None)
|
||||
self.assertRaises(AttributeError, mediawiki.response, [])
|
||||
self.assertRaises(AttributeError, mediawiki.response, '')
|
||||
self.assertRaises(AttributeError, mediawiki.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}', search_params=dicto)
|
||||
self.assertEqual(mediawiki.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": []}', search_params=dicto)
|
||||
self.assertEqual(mediawiki.response(response), [])
|
||||
|
||||
json = """
|
||||
{
|
||||
"query-continue": {
|
||||
"search": {
|
||||
"sroffset": 1
|
||||
}
|
||||
},
|
||||
"query": {
|
||||
"searchinfo": {
|
||||
"totalhits": 29721
|
||||
},
|
||||
"search": [
|
||||
{
|
||||
"ns": 0,
|
||||
"title": "This is the title étude",
|
||||
"timestamp": "2014-12-19T17:42:52Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json, search_params=dicto)
|
||||
results = mediawiki.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], u'This is the title étude')
|
||||
self.assertIn('fr.wikipedia.org', results[0]['url'])
|
||||
self.assertIn('This_is_the_title', results[0]['url'])
|
||||
self.assertIn('%C3%A9tude', results[0]['url'])
|
||||
self.assertEqual(results[0]['content'], '')
|
||||
|
||||
json = """
|
||||
{
|
||||
"query-continue": {
|
||||
"search": {
|
||||
"sroffset": 1
|
||||
}
|
||||
},
|
||||
"query": {
|
||||
"searchinfo": {
|
||||
"totalhits": 29721
|
||||
},
|
||||
"search": [
|
||||
]
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json, search_params=dicto)
|
||||
results = mediawiki.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
json = """
|
||||
{
|
||||
"query-continue": {
|
||||
"search": {
|
||||
"sroffset": 1
|
||||
}
|
||||
},
|
||||
"query": {
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json, search_params=dicto)
|
||||
results = mediawiki.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
json = r"""
|
||||
{"toto":[
|
||||
{"id":200,"name":"Artist Name",
|
||||
"link":"http:\/\/www.mediawiki.com\/artist\/1217","type":"artist"}
|
||||
]}
|
||||
"""
|
||||
response = mock.Mock(text=json, search_params=dicto)
|
||||
results = mediawiki.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
|
@ -1,67 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import mixcloud
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestMixcloudEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 0
|
||||
params = mixcloud.request(query, dicto)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('mixcloud.com' in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, mixcloud.response, None)
|
||||
self.assertRaises(AttributeError, mixcloud.response, [])
|
||||
self.assertRaises(AttributeError, mixcloud.response, '')
|
||||
self.assertRaises(AttributeError, mixcloud.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}')
|
||||
self.assertEqual(mixcloud.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": []}')
|
||||
self.assertEqual(mixcloud.response(response), [])
|
||||
|
||||
json = """
|
||||
{"data":[
|
||||
{
|
||||
"user": {
|
||||
"url": "http://www.mixcloud.com/user/",
|
||||
"username": "user",
|
||||
"name": "User",
|
||||
"key": "/user/"
|
||||
},
|
||||
"key": "/user/this-is-the-url/",
|
||||
"created_time": "2014-11-14T13:30:02Z",
|
||||
"audio_length": 3728,
|
||||
"slug": "this-is-the-url",
|
||||
"name": "Title of track",
|
||||
"url": "http://www.mixcloud.com/user/this-is-the-url/",
|
||||
"updated_time": "2014-11-14T13:14:10Z"
|
||||
}
|
||||
]}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = mixcloud.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'Title of track')
|
||||
self.assertEqual(results[0]['url'], 'http://www.mixcloud.com/user/this-is-the-url/')
|
||||
self.assertEqual(results[0]['content'], 'User')
|
||||
self.assertTrue('http://www.mixcloud.com/user/this-is-the-url/' in results[0]['embedded'])
|
||||
|
||||
json = r"""
|
||||
{"toto":[
|
||||
{"id":200,"name":"Artist Name",
|
||||
"link":"http:\/\/www.mixcloud.com\/artist\/1217","type":"artist"}
|
||||
]}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = mixcloud.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
|
@ -1,124 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import nyaa
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestNyaaEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dic = defaultdict(dict)
|
||||
dic['pageno'] = 1
|
||||
params = nyaa.request(query, dic)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('nyaa.si' in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
resp = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(nyaa.response(resp), [])
|
||||
|
||||
html = """
|
||||
<table class="table table-bordered table-hover table-striped torrent-list">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="hdr-category text-center" style="width:80px;">
|
||||
<div>Category</div>
|
||||
</th>
|
||||
<th class="hdr-name" style="width:auto;">
|
||||
<div>Name</div>
|
||||
</th>
|
||||
<th class="hdr-comments sorting text-center" title="Comments" style="width:50px;">
|
||||
<a href="/?f=0&c=0_0&q=Death+Parade&s=comments&o=desc"></a>
|
||||
<i class="fa fa-comments-o"></i>
|
||||
</th>
|
||||
<th class="hdr-link text-center" style="width:70px;">
|
||||
<div>Link</div>
|
||||
</th>
|
||||
<th class="hdr-size sorting text-center" style="width:100px;">
|
||||
<a href="/?f=0&c=0_0&q=Death+Parade&s=size&o=desc"></a>
|
||||
<div>Size</div>
|
||||
</th>
|
||||
<th class="hdr-date sorting_desc text-center" title="In local time" style="width:140px;">
|
||||
<a href="/?f=0&c=0_0&q=Death+Parade&s=id&o=asc"></a>
|
||||
<div>Date</div>
|
||||
</th>
|
||||
<th class="hdr-seeders sorting text-center" title="Seeders" style="width:50px;">
|
||||
<a href="/?f=0&c=0_0&q=Death+Parade&s=seeders&o=desc"></a>
|
||||
<i class="fa fa-arrow-up" aria-hidden="true"></i>
|
||||
</th>
|
||||
<th class="hdr-leechers sorting text-center" title="Leechers" style="width:50px;">
|
||||
<a href="/?f=0&c=0_0&q=Death+Parade&s=leechers&o=desc"></a>
|
||||
<i class="fa fa-arrow-down" aria-hidden="true"></i>
|
||||
</th>
|
||||
<th class="hdr-downloads sorting text-center" title="Completed downloads" style="width:50px;">
|
||||
<a href="/?f=0&c=0_0&q=Death+Parade&s=downloads&o=desc"></a>
|
||||
<i class="fa fa-check" aria-hidden="true"></i>
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr class="default">
|
||||
<td style="padding:0 4px;">
|
||||
<a href="/?c=1_2" title="Anime - English-translated">
|
||||
<img src="/static/img/icons/nyaa/1_2.png" alt="Anime - English-translated">
|
||||
</a>
|
||||
</td>
|
||||
<td colspan="2">
|
||||
<a href="/view/1" title="Sample title 1">Sample title 1</a>
|
||||
</td>
|
||||
<td class="text-center" style="white-space: nowrap;">
|
||||
<a href="/download/1.torrent"><i class="fa fa-fw fa-download"></i></a>
|
||||
<a href="magnet:?xt=urn:btih:2"><i class="fa fa-fw fa-magnet"></i></a>
|
||||
</td>
|
||||
<td class="text-center">723.7 MiB</td>
|
||||
<td class="text-center" data-timestamp="1503307456" title="1 week 3
|
||||
days 9 hours 44 minutes 39 seconds ago">2017-08-21 11:24</td>
|
||||
<td class="text-center" style="color: green;">1</td>
|
||||
<td class="text-center" style="color: red;">3</td>
|
||||
<td class="text-center">12</td>
|
||||
</tr>
|
||||
<tr class="default">
|
||||
<td style="padding:0 4px;">
|
||||
<a href="/?c=1_2" title="Anime - English-translated">
|
||||
<img src="/static/img/icons/nyaa/1_2.png" alt="Anime - English-translated">
|
||||
</a>
|
||||
</td>
|
||||
<td colspan="2">
|
||||
<a href="/view/2" title="Sample title 2">Sample title 2</a>
|
||||
</td>
|
||||
<td class="text-center" style="white-space: nowrap;">
|
||||
<a href="magnet:?xt=urn:btih:2"><i class="fa fa-fw fa-magnet"></i></a>
|
||||
</td>
|
||||
<td class="text-center">8.2 GiB</td>
|
||||
<td class="text-center" data-timestamp="1491608400" title="4 months 3
|
||||
weeks 4 days 19 hours 28 minutes 55 seconds ago">2017-04-08 01:40</td>
|
||||
<td class="text-center" style="color: green;">10</td>
|
||||
<td class="text-center" style="color: red;">1</td>
|
||||
<td class="text-center">206</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
"""
|
||||
|
||||
resp = mock.Mock(text=html)
|
||||
results = nyaa.response(resp)
|
||||
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
|
||||
r = results[0]
|
||||
self.assertTrue(r['url'].find('1') >= 0)
|
||||
self.assertTrue(r['torrentfile'].find('1.torrent') >= 0)
|
||||
self.assertTrue(r['content'].find('Anime - English-translated') >= 0)
|
||||
self.assertTrue(r['content'].find('Downloaded 12 times.') >= 0)
|
||||
|
||||
self.assertEqual(r['title'], 'Sample title 1')
|
||||
self.assertEqual(r['seed'], 1)
|
||||
self.assertEqual(r['leech'], 3)
|
||||
self.assertEqual(r['filesize'], 723700000)
|
||||
|
||||
r = results[1]
|
||||
self.assertTrue(r['url'].find('2') >= 0)
|
||||
self.assertTrue(r['magnetlink'].find('magnet:') >= 0)
|
|
@ -1,199 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import openstreetmap
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestOpenstreetmapEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
params = openstreetmap.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('openstreetmap.org', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, openstreetmap.response, None)
|
||||
self.assertRaises(AttributeError, openstreetmap.response, [])
|
||||
self.assertRaises(AttributeError, openstreetmap.response, '')
|
||||
self.assertRaises(AttributeError, openstreetmap.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}')
|
||||
self.assertEqual(openstreetmap.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": []}')
|
||||
self.assertEqual(openstreetmap.response(response), [])
|
||||
|
||||
json = """
|
||||
[
|
||||
{
|
||||
"place_id": "127732055",
|
||||
"licence": "Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright",
|
||||
"osm_type": "relation",
|
||||
"osm_id": "7444",
|
||||
"boundingbox": [
|
||||
"48.8155755",
|
||||
"48.902156",
|
||||
"2.224122",
|
||||
"2.4697602"
|
||||
],
|
||||
"lat": "48.8565056",
|
||||
"lon": "2.3521334",
|
||||
"display_name": "This is the title",
|
||||
"class": "place",
|
||||
"type": "city",
|
||||
"importance": 0.96893459932191,
|
||||
"icon": "https://nominatim.openstreetmap.org/images/mapicons/poi_place_city.p.20.png",
|
||||
"address": {
|
||||
"city": "Paris",
|
||||
"county": "Paris",
|
||||
"state": "Île-de-France",
|
||||
"country": "France",
|
||||
"country_code": "fr"
|
||||
},
|
||||
"geojson": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[
|
||||
2.224122,
|
||||
48.854199
|
||||
]
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = openstreetmap.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'This is the title')
|
||||
self.assertEqual(results[0]['url'], 'https://openstreetmap.org/relation/7444')
|
||||
self.assertIn('coordinates', results[0]['geojson'])
|
||||
self.assertEqual(results[0]['geojson']['coordinates'][0][0][0], 2.224122)
|
||||
self.assertEqual(results[0]['geojson']['coordinates'][0][0][1], 48.854199)
|
||||
self.assertEqual(results[0]['address'], None)
|
||||
self.assertIn('48.8155755', results[0]['boundingbox'])
|
||||
self.assertIn('48.902156', results[0]['boundingbox'])
|
||||
self.assertIn('2.224122', results[0]['boundingbox'])
|
||||
self.assertIn('2.4697602', results[0]['boundingbox'])
|
||||
|
||||
json = """
|
||||
[
|
||||
{
|
||||
"place_id": "127732055",
|
||||
"licence": "Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright",
|
||||
"osm_type": "relation",
|
||||
"osm_id": "7444",
|
||||
"boundingbox": [
|
||||
"48.8155755",
|
||||
"48.902156",
|
||||
"2.224122",
|
||||
"2.4697602"
|
||||
],
|
||||
"lat": "48.8565056",
|
||||
"lon": "2.3521334",
|
||||
"display_name": "This is the title",
|
||||
"class": "tourism",
|
||||
"type": "city",
|
||||
"importance": 0.96893459932191,
|
||||
"icon": "https://nominatim.openstreetmap.org/images/mapicons/poi_place_city.p.20.png",
|
||||
"address": {
|
||||
"city": "Paris",
|
||||
"county": "Paris",
|
||||
"state": "Île-de-France",
|
||||
"country": "France",
|
||||
"country_code": "fr",
|
||||
"address29": "Address"
|
||||
},
|
||||
"geojson": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[
|
||||
2.224122,
|
||||
48.854199
|
||||
]
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"place_id": "127732055",
|
||||
"licence": "Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright",
|
||||
"osm_type": "relation",
|
||||
"osm_id": "7444",
|
||||
"boundingbox": [
|
||||
"48.8155755",
|
||||
"48.902156",
|
||||
"2.224122",
|
||||
"2.4697602"
|
||||
],
|
||||
"lat": "48.8565056",
|
||||
"lon": "2.3521334",
|
||||
"display_name": "This is the title",
|
||||
"class": "tourism",
|
||||
"type": "city",
|
||||
"importance": 0.96893459932191,
|
||||
"icon": "https://nominatim.openstreetmap.org/images/mapicons/poi_place_city.p.20.png",
|
||||
"address": {
|
||||
"city": "Paris",
|
||||
"county": "Paris",
|
||||
"state": "Île-de-France",
|
||||
"country": "France",
|
||||
"postcode": 75000,
|
||||
"country_code": "fr"
|
||||
},
|
||||
"geojson": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[
|
||||
2.224122,
|
||||
48.854199
|
||||
]
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"place_id": "127732055",
|
||||
"licence": "Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright",
|
||||
"osm_type": "node",
|
||||
"osm_id": "7444",
|
||||
"boundingbox": [
|
||||
"48.8155755",
|
||||
"48.902156",
|
||||
"2.224122",
|
||||
"2.4697602"
|
||||
],
|
||||
"lat": "48.8565056",
|
||||
"lon": "2.3521334",
|
||||
"display_name": "This is the title",
|
||||
"class": "tourism",
|
||||
"type": "city",
|
||||
"importance": 0.96893459932191,
|
||||
"icon": "https://nominatim.openstreetmap.org/images/mapicons/poi_place_city.p.20.png",
|
||||
"address": {
|
||||
"city": "Paris",
|
||||
"county": "Paris",
|
||||
"state": "Île-de-France",
|
||||
"country": "France",
|
||||
"country_code": "fr",
|
||||
"address29": "Address"
|
||||
}
|
||||
}
|
||||
]
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = openstreetmap.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 3)
|
||||
self.assertIn('48.8565056', results[2]['geojson']['coordinates'])
|
||||
self.assertIn('2.3521334', results[2]['geojson']['coordinates'])
|
|
@ -1,109 +0,0 @@
|
|||
import mock
|
||||
from collections import defaultdict
|
||||
from searx.engines import pdbe
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestPdbeEngine(SearxTestCase):
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
params = pdbe.request(query, dicto)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue('ebi.ac.uk' in params['url'])
|
||||
self.assertTrue('data' in params)
|
||||
self.assertTrue('q' in params['data'])
|
||||
self.assertTrue(query in params['data']['q'])
|
||||
self.assertTrue('wt' in params['data'])
|
||||
self.assertTrue('json' in params['data']['wt'])
|
||||
self.assertTrue('method' in params)
|
||||
self.assertTrue(params['method'] == 'POST')
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, pdbe.response, None)
|
||||
self.assertRaises(AttributeError, pdbe.response, [])
|
||||
self.assertRaises(AttributeError, pdbe.response, '')
|
||||
self.assertRaises(AttributeError, pdbe.response, '[]')
|
||||
|
||||
json = """
|
||||
{
|
||||
"response": {
|
||||
"docs": [
|
||||
{
|
||||
"citation_title": "X-ray crystal structure of ferric Aplysia limacina myoglobin in different liganded states.",
|
||||
"citation_year": 1993,
|
||||
"entry_author_list": [
|
||||
"Conti E, Moser C, Rizzi M, Mattevi A, Lionetti C, Coda A, Ascenzi P, Brunori M, Bolognesi M"
|
||||
],
|
||||
"journal": "J. Mol. Biol.",
|
||||
"journal_page": "498-508",
|
||||
"journal_volume": "233",
|
||||
"pdb_id": "2fal",
|
||||
"status": "REL",
|
||||
"title": "X-RAY CRYSTAL STRUCTURE OF FERRIC APLYSIA LIMACINA MYOGLOBIN IN DIFFERENT LIGANDED STATES"
|
||||
}
|
||||
],
|
||||
"numFound": 1,
|
||||
"start": 0
|
||||
},
|
||||
"responseHeader": {
|
||||
"QTime": 0,
|
||||
"params": {
|
||||
"q": "2fal",
|
||||
"wt": "json"
|
||||
},
|
||||
"status": 0
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
response = mock.Mock(text=json)
|
||||
results = pdbe.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'],
|
||||
'X-RAY CRYSTAL STRUCTURE OF FERRIC APLYSIA LIMACINA MYOGLOBIN IN DIFFERENT LIGANDED STATES')
|
||||
self.assertEqual(results[0]['url'], pdbe.pdbe_entry_url.format(pdb_id='2fal'))
|
||||
self.assertEqual(results[0]['img_src'], pdbe.pdbe_preview_url.format(pdb_id='2fal'))
|
||||
self.assertTrue('Conti E' in results[0]['content'])
|
||||
self.assertTrue('X-ray crystal structure of ferric Aplysia limacina myoglobin in different liganded states.' in
|
||||
results[0]['content'])
|
||||
self.assertTrue('1993' in results[0]['content'])
|
||||
|
||||
# Testing proper handling of PDB entries marked as obsolete
|
||||
json = """
|
||||
{
|
||||
"response": {
|
||||
"docs": [
|
||||
{
|
||||
"citation_title": "Obsolete entry test",
|
||||
"citation_year": 2016,
|
||||
"entry_author_list": ["Doe J"],
|
||||
"journal": "J. Obs.",
|
||||
"journal_page": "1-2",
|
||||
"journal_volume": "1",
|
||||
"pdb_id": "xxxx",
|
||||
"status": "OBS",
|
||||
"title": "OBSOLETE ENTRY TEST",
|
||||
"superseded_by": "yyyy"
|
||||
}
|
||||
],
|
||||
"numFound": 1,
|
||||
"start": 0
|
||||
},
|
||||
"responseHeader": {
|
||||
"QTime": 0,
|
||||
"params": {
|
||||
"q": "xxxx",
|
||||
"wt": "json"
|
||||
},
|
||||
"status": 0
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = pdbe.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'OBSOLETE ENTRY TEST (OBSOLETE)')
|
||||
self.assertTrue(results[0]['content'].startswith('This entry has been superseded by'))
|
|
@ -1,166 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import photon
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestPhotonEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
dicto['language'] = 'all'
|
||||
params = photon.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('photon.komoot.de', params['url'])
|
||||
|
||||
dicto['language'] = 'all'
|
||||
params = photon.request(query, dicto)
|
||||
self.assertNotIn('lang', params['url'])
|
||||
|
||||
dicto['language'] = 'al'
|
||||
params = photon.request(query, dicto)
|
||||
self.assertNotIn('lang', params['url'])
|
||||
|
||||
dicto['language'] = 'fr'
|
||||
params = photon.request(query, dicto)
|
||||
self.assertIn('fr', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, photon.response, None)
|
||||
self.assertRaises(AttributeError, photon.response, [])
|
||||
self.assertRaises(AttributeError, photon.response, '')
|
||||
self.assertRaises(AttributeError, photon.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}')
|
||||
self.assertEqual(photon.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": []}')
|
||||
self.assertEqual(photon.response(response), [])
|
||||
|
||||
json = """
|
||||
{
|
||||
"features": [
|
||||
{
|
||||
"properties": {
|
||||
"osm_key": "waterway",
|
||||
"extent": [
|
||||
-1.4508446,
|
||||
51.1614997,
|
||||
-1.4408036,
|
||||
51.1525635
|
||||
],
|
||||
"name": "This is the title",
|
||||
"state": "England",
|
||||
"osm_id": 114823817,
|
||||
"osm_type": "W",
|
||||
"osm_value": "river",
|
||||
"city": "Test Valley",
|
||||
"country": "United Kingdom"
|
||||
},
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
-1.4458571,
|
||||
51.1576661
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"osm_key": "place",
|
||||
"street": "Rue",
|
||||
"state": "Ile-de-France",
|
||||
"osm_id": 129211377,
|
||||
"osm_type": "R",
|
||||
"housenumber": "10",
|
||||
"postcode": "75011",
|
||||
"osm_value": "house",
|
||||
"city": "Paris",
|
||||
"country": "France"
|
||||
},
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
2.3725025,
|
||||
48.8654481
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"osm_key": "amenity",
|
||||
"street": "Allée",
|
||||
"name": "Bibliothèque",
|
||||
"state": "Ile-de-France",
|
||||
"osm_id": 1028573132,
|
||||
"osm_type": "N",
|
||||
"postcode": "75001",
|
||||
"osm_value": "library",
|
||||
"city": "Paris",
|
||||
"country": "France"
|
||||
},
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
2.3445634,
|
||||
48.862494
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"osm_key": "amenity",
|
||||
"osm_id": 1028573132,
|
||||
"osm_type": "Y",
|
||||
"postcode": "75001",
|
||||
"osm_value": "library",
|
||||
"city": "Paris",
|
||||
"country": "France"
|
||||
},
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
2.3445634,
|
||||
48.862494
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
}
|
||||
],
|
||||
"type": "FeatureCollection"
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = photon.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 3)
|
||||
self.assertEqual(results[0]['title'], 'This is the title')
|
||||
self.assertEqual(results[0]['content'], '')
|
||||
self.assertEqual(results[0]['longitude'], -1.4458571)
|
||||
self.assertEqual(results[0]['latitude'], 51.1576661)
|
||||
self.assertIn(-1.4508446, results[0]['boundingbox'])
|
||||
self.assertIn(51.1614997, results[0]['boundingbox'])
|
||||
self.assertIn(-1.4408036, results[0]['boundingbox'])
|
||||
self.assertIn(51.1525635, results[0]['boundingbox'])
|
||||
self.assertIn('type', results[0]['geojson'])
|
||||
self.assertEqual(results[0]['geojson']['type'], 'Point')
|
||||
self.assertEqual(results[0]['address'], None)
|
||||
self.assertEqual(results[0]['osm']['type'], 'way')
|
||||
self.assertEqual(results[0]['osm']['id'], 114823817)
|
||||
self.assertEqual(results[0]['url'], 'https://openstreetmap.org/way/114823817')
|
||||
self.assertEqual(results[1]['osm']['type'], 'relation')
|
||||
self.assertEqual(results[2]['address']['name'], u'Bibliothèque')
|
||||
self.assertEqual(results[2]['address']['house_number'], None)
|
||||
self.assertEqual(results[2]['address']['locality'], 'Paris')
|
||||
self.assertEqual(results[2]['address']['postcode'], '75001')
|
||||
self.assertEqual(results[2]['address']['country'], 'France')
|
||||
self.assertEqual(results[2]['osm']['type'], 'node')
|
|
@ -1,166 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import piratebay
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestPiratebayEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
dicto['category'] = 'Toto'
|
||||
params = piratebay.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('piratebay.org', params['url'])
|
||||
self.assertIn('0', params['url'])
|
||||
|
||||
dicto['category'] = 'music'
|
||||
params = piratebay.request(query, dicto)
|
||||
self.assertIn('100', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, piratebay.response, None)
|
||||
self.assertRaises(AttributeError, piratebay.response, [])
|
||||
self.assertRaises(AttributeError, piratebay.response, '')
|
||||
self.assertRaises(AttributeError, piratebay.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(piratebay.response(response), [])
|
||||
|
||||
html = """
|
||||
<table id="searchResult">
|
||||
<tr>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="vertTh">
|
||||
<center>
|
||||
<a href="#" title="More from this category">Anime</a><br/>
|
||||
(<a href="#" title="More from this category">Anime</a>)
|
||||
</center>
|
||||
</td>
|
||||
<td>
|
||||
<div class="detName">
|
||||
<a href="/this.is.the.link" class="detLink" title="Title">
|
||||
This is the title
|
||||
</a>
|
||||
</div>
|
||||
<a href="magnet:?xt=urn:btih:MAGNETLINK" title="Download this torrent using magnet">
|
||||
<img src="/static/img/icon-magnet.gif" alt="Magnet link"/>
|
||||
</a>
|
||||
<a href="http://torcache.net/torrent/TORRENTFILE.torrent" title="Download this torrent">
|
||||
<img src="/static/img/dl.gif" class="dl" alt="Download"/>
|
||||
</a>
|
||||
<a href="/user/HorribleSubs">
|
||||
<img src="/static/img/vip.gif" alt="VIP" title="VIP" style="width:11px;" border='0'/>
|
||||
</a>
|
||||
<img src="/static/img/11x11p.png"/>
|
||||
<font class="detDesc">
|
||||
This is the content <span>and should be</span> OK
|
||||
</font>
|
||||
</td>
|
||||
<td align="right">13</td>
|
||||
<td align="right">334</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="vertTh">
|
||||
<center>
|
||||
<a href="#" title="More from this category">Anime</a><br/>
|
||||
(<a href="#" title="More from this category">Anime</a>)
|
||||
</center>
|
||||
</td>
|
||||
<td>
|
||||
<div class="detName">
|
||||
<a href="/this.is.the.link" class="detLink" title="Title">
|
||||
This is the title
|
||||
</a>
|
||||
</div>
|
||||
<a href="magnet:?xt=urn:btih:MAGNETLINK" title="Download this torrent using magnet">
|
||||
<img src="/static/img/icon-magnet.gif" alt="Magnet link"/>
|
||||
</a>
|
||||
<a href="/user/HorribleSubs">
|
||||
<img src="/static/img/vip.gif" alt="VIP" title="VIP" style="width:11px;" border='0'/>
|
||||
</a>
|
||||
<img src="/static/img/11x11p.png"/>
|
||||
<font class="detDesc">
|
||||
This is the content <span>and should be</span> OK
|
||||
</font>
|
||||
</td>
|
||||
<td align="right">13</td>
|
||||
<td align="right">334</td>
|
||||
</tr>
|
||||
</table>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = piratebay.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['title'], 'This is the title')
|
||||
self.assertEqual(results[0]['url'], 'https://thepiratebay.org/this.is.the.link')
|
||||
self.assertEqual(results[0]['content'], 'This is the content and should be OK')
|
||||
self.assertEqual(results[0]['seed'], 13)
|
||||
self.assertEqual(results[0]['leech'], 334)
|
||||
self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:MAGNETLINK')
|
||||
self.assertEqual(results[0]['torrentfile'], 'http://torcache.net/torrent/TORRENTFILE.torrent')
|
||||
|
||||
self.assertEqual(results[1]['torrentfile'], None)
|
||||
|
||||
html = """
|
||||
<table id="searchResult">
|
||||
<tr>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="vertTh">
|
||||
<center>
|
||||
<a href="#" title="More from this category">Anime</a><br/>
|
||||
(<a href="#" title="More from this category">Anime</a>)
|
||||
</center>
|
||||
</td>
|
||||
<td>
|
||||
<div class="detName">
|
||||
<a href="/this.is.the.link" class="detLink" title="Title">
|
||||
This is the title
|
||||
</a>
|
||||
</div>
|
||||
<a href="magnet:?xt=urn:btih:MAGNETLINK" title="Download this torrent using magnet">
|
||||
<img src="/static/img/icon-magnet.gif" alt="Magnet link"/>
|
||||
</a>
|
||||
<a href="http://torcache.net/torrent/TORRENTFILE.torrent" title="Download this torrent">
|
||||
<img src="/static/img/dl.gif" class="dl" alt="Download"/>
|
||||
</a>
|
||||
<a href="/user/HorribleSubs">
|
||||
<img src="/static/img/vip.gif" alt="VIP" title="VIP" style="width:11px;" border='0'/>
|
||||
</a>
|
||||
<img src="/static/img/11x11p.png"/>
|
||||
<font class="detDesc">
|
||||
This is the content <span>and should be</span> OK
|
||||
</font>
|
||||
</td>
|
||||
<td align="right">s</td>
|
||||
<td align="right">d</td>
|
||||
</tr>
|
||||
</table>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = piratebay.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'This is the title')
|
||||
self.assertEqual(results[0]['url'], 'https://thepiratebay.org/this.is.the.link')
|
||||
self.assertEqual(results[0]['content'], 'This is the content and should be OK')
|
||||
self.assertEqual(results[0]['seed'], 0)
|
||||
self.assertEqual(results[0]['leech'], 0)
|
||||
self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:MAGNETLINK')
|
||||
self.assertEqual(results[0]['torrentfile'], 'http://torcache.net/torrent/TORRENTFILE.torrent')
|
||||
|
||||
html = """
|
||||
<table id="searchResult">
|
||||
</table>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = piratebay.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
|
@ -1,339 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import qwant
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestQwantEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
qwant.supported_languages = ['en-US', 'fr-CA', 'fr-FR']
|
||||
qwant.language_aliases = {}
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 0
|
||||
dicto['language'] = 'fr-FR'
|
||||
qwant.categories = ['']
|
||||
params = qwant.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('web', params['url'])
|
||||
self.assertIn('qwant.com', params['url'])
|
||||
self.assertIn('fr_fr', params['url'])
|
||||
|
||||
dicto['language'] = 'all'
|
||||
qwant.categories = ['news']
|
||||
params = qwant.request(query, dicto)
|
||||
self.assertFalse('fr' in params['url'])
|
||||
self.assertIn('news', params['url'])
|
||||
|
||||
dicto['language'] = 'fr'
|
||||
params = qwant.request(query, dicto)
|
||||
self.assertIn('fr_fr', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, qwant.response, None)
|
||||
self.assertRaises(AttributeError, qwant.response, [])
|
||||
self.assertRaises(AttributeError, qwant.response, '')
|
||||
self.assertRaises(AttributeError, qwant.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}')
|
||||
self.assertEqual(qwant.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": {}}')
|
||||
self.assertEqual(qwant.response(response), [])
|
||||
|
||||
json = """
|
||||
{
|
||||
"status": "success",
|
||||
"data": {
|
||||
"query": {
|
||||
"locale": "en_us",
|
||||
"query": "Test",
|
||||
"offset": 10
|
||||
},
|
||||
"result": {
|
||||
"items": [
|
||||
{
|
||||
"title": "Title",
|
||||
"score": 9999,
|
||||
"url": "http://www.url.xyz",
|
||||
"source": "...",
|
||||
"desc": "Description",
|
||||
"date": "",
|
||||
"_id": "db0aadd62c2a8565567ffc382f5c61fa",
|
||||
"favicon": "https://s.qwant.com/fav.ico"
|
||||
}
|
||||
],
|
||||
"filters": []
|
||||
},
|
||||
"cache": {
|
||||
"key": "e66aa864c00147a0e3a16ff7a5efafde",
|
||||
"created": 1433092754,
|
||||
"expiration": 259200,
|
||||
"status": "miss",
|
||||
"age": 0
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
qwant.categories = ['general']
|
||||
results = qwant.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'Title')
|
||||
self.assertEqual(results[0]['url'], 'http://www.url.xyz')
|
||||
self.assertEqual(results[0]['content'], 'Description')
|
||||
|
||||
json = """
|
||||
{
|
||||
"status": "success",
|
||||
"data": {
|
||||
"query": {
|
||||
"locale": "en_us",
|
||||
"query": "Test",
|
||||
"offset": 10
|
||||
},
|
||||
"result": {
|
||||
"items": [
|
||||
{
|
||||
"title": "Title",
|
||||
"score": 9999,
|
||||
"url": "http://www.url.xyz",
|
||||
"source": "...",
|
||||
"media": "http://image.jpg",
|
||||
"desc": "",
|
||||
"thumbnail": "http://thumbnail.jpg",
|
||||
"date": "",
|
||||
"_id": "db0aadd62c2a8565567ffc382f5c61fa",
|
||||
"favicon": "https://s.qwant.com/fav.ico"
|
||||
}
|
||||
],
|
||||
"filters": []
|
||||
},
|
||||
"cache": {
|
||||
"key": "e66aa864c00147a0e3a16ff7a5efafde",
|
||||
"created": 1433092754,
|
||||
"expiration": 259200,
|
||||
"status": "miss",
|
||||
"age": 0
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
qwant.categories = ['images']
|
||||
results = qwant.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'Title')
|
||||
self.assertEqual(results[0]['url'], 'http://www.url.xyz')
|
||||
self.assertEqual(results[0]['content'], '')
|
||||
self.assertEqual(results[0]['thumbnail_src'], 'http://thumbnail.jpg')
|
||||
self.assertEqual(results[0]['img_src'], 'http://image.jpg')
|
||||
|
||||
json = """
|
||||
{
|
||||
"status": "success",
|
||||
"data": {
|
||||
"query": {
|
||||
"locale": "en_us",
|
||||
"query": "Test",
|
||||
"offset": 10
|
||||
},
|
||||
"result": {
|
||||
"items": [
|
||||
{
|
||||
"title": "Title",
|
||||
"score": 9999,
|
||||
"url": "http://www.url.xyz",
|
||||
"source": "...",
|
||||
"desc": "Description",
|
||||
"date": 1433260920,
|
||||
"_id": "db0aadd62c2a8565567ffc382f5c61fa",
|
||||
"favicon": "https://s.qwant.com/fav.ico"
|
||||
}
|
||||
],
|
||||
"filters": []
|
||||
},
|
||||
"cache": {
|
||||
"key": "e66aa864c00147a0e3a16ff7a5efafde",
|
||||
"created": 1433092754,
|
||||
"expiration": 259200,
|
||||
"status": "miss",
|
||||
"age": 0
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
qwant.categories = ['news']
|
||||
results = qwant.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'Title')
|
||||
self.assertEqual(results[0]['url'], 'http://www.url.xyz')
|
||||
self.assertEqual(results[0]['content'], 'Description')
|
||||
self.assertIn('publishedDate', results[0])
|
||||
|
||||
json = """
|
||||
{
|
||||
"status": "success",
|
||||
"data": {
|
||||
"query": {
|
||||
"locale": "en_us",
|
||||
"query": "Test",
|
||||
"offset": 10
|
||||
},
|
||||
"result": {
|
||||
"items": [
|
||||
{
|
||||
"title": "Title",
|
||||
"score": 9999,
|
||||
"url": "http://www.url.xyz",
|
||||
"source": "...",
|
||||
"desc": "Description",
|
||||
"date": 1433260920,
|
||||
"_id": "db0aadd62c2a8565567ffc382f5c61fa",
|
||||
"favicon": "https://s.qwant.com/fav.ico"
|
||||
}
|
||||
],
|
||||
"filters": []
|
||||
},
|
||||
"cache": {
|
||||
"key": "e66aa864c00147a0e3a16ff7a5efafde",
|
||||
"created": 1433092754,
|
||||
"expiration": 259200,
|
||||
"status": "miss",
|
||||
"age": 0
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
qwant.categories = ['social media']
|
||||
results = qwant.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'Title')
|
||||
self.assertEqual(results[0]['url'], 'http://www.url.xyz')
|
||||
self.assertEqual(results[0]['content'], 'Description')
|
||||
self.assertIn('publishedDate', results[0])
|
||||
|
||||
json = """
|
||||
{
|
||||
"status": "success",
|
||||
"data": {
|
||||
"query": {
|
||||
"locale": "en_us",
|
||||
"query": "Test",
|
||||
"offset": 10
|
||||
},
|
||||
"result": {
|
||||
"items": [
|
||||
{
|
||||
"title": "Title",
|
||||
"score": 9999,
|
||||
"url": "http://www.url.xyz",
|
||||
"source": "...",
|
||||
"desc": "Description",
|
||||
"date": 1433260920,
|
||||
"_id": "db0aadd62c2a8565567ffc382f5c61fa",
|
||||
"favicon": "https://s.qwant.com/fav.ico"
|
||||
}
|
||||
],
|
||||
"filters": []
|
||||
},
|
||||
"cache": {
|
||||
"key": "e66aa864c00147a0e3a16ff7a5efafde",
|
||||
"created": 1433092754,
|
||||
"expiration": 259200,
|
||||
"status": "miss",
|
||||
"age": 0
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
qwant.categories = ['']
|
||||
results = qwant.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
json = """
|
||||
{
|
||||
"status": "success",
|
||||
"data": {
|
||||
"query": {
|
||||
"locale": "en_us",
|
||||
"query": "Test",
|
||||
"offset": 10
|
||||
},
|
||||
"result": {
|
||||
"filters": []
|
||||
},
|
||||
"cache": {
|
||||
"key": "e66aa864c00147a0e3a16ff7a5efafde",
|
||||
"created": 1433092754,
|
||||
"expiration": 259200,
|
||||
"status": "miss",
|
||||
"age": 0
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = qwant.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
json = """
|
||||
{
|
||||
"status": "success",
|
||||
"data": {
|
||||
"query": {
|
||||
"locale": "en_us",
|
||||
"query": "Test",
|
||||
"offset": 10
|
||||
},
|
||||
"cache": {
|
||||
"key": "e66aa864c00147a0e3a16ff7a5efafde",
|
||||
"created": 1433092754,
|
||||
"expiration": 259200,
|
||||
"status": "miss",
|
||||
"age": 0
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = qwant.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
json = """
|
||||
{
|
||||
"status": "success"
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = qwant.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
def test_fetch_supported_languages(self):
|
||||
page = """some code...
|
||||
config_set('project.regionalisation', {"continents":{},"languages":
|
||||
{"de":{"code":"de","name":"Deutsch","countries":["DE","CH","AT"]},
|
||||
"it":{"code":"it","name":"Italiano","countries":["IT","CH"]}}});
|
||||
some more code..."""
|
||||
response = mock.Mock(text=page)
|
||||
languages = qwant._fetch_supported_languages(response)
|
||||
self.assertEqual(type(languages), list)
|
||||
self.assertEqual(len(languages), 5)
|
||||
self.assertIn('de-DE', languages)
|
||||
self.assertIn('de-CH', languages)
|
||||
self.assertIn('de-AT', languages)
|
||||
self.assertIn('it-IT', languages)
|
||||
self.assertIn('it-CH', languages)
|
|
@ -1,71 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import reddit
|
||||
from searx.testing import SearxTestCase
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class TestRedditEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dic = defaultdict(dict)
|
||||
params = reddit.request(query, dic)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('reddit.com' in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
resp = mock.Mock(text='{}')
|
||||
self.assertEqual(reddit.response(resp), [])
|
||||
|
||||
json = """
|
||||
{
|
||||
"kind": "Listing",
|
||||
"data": {
|
||||
"children": [{
|
||||
"data": {
|
||||
"url": "http://google2.com/",
|
||||
"permalink": "http://google.com/",
|
||||
"title": "Title number one",
|
||||
"selftext": "Sample",
|
||||
"created_utc": 1401219957.0,
|
||||
"thumbnail": "http://image.com/picture.jpg"
|
||||
}
|
||||
}, {
|
||||
"data": {
|
||||
"url": "https://reddit2.com/",
|
||||
"permalink": "https://reddit.com/",
|
||||
"title": "Title number two",
|
||||
"selftext": "Dominus vobiscum",
|
||||
"created_utc": 1438792533.0,
|
||||
"thumbnail": "self"
|
||||
}
|
||||
}]
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
resp = mock.Mock(text=json)
|
||||
results = reddit.response(resp)
|
||||
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(type(results), list)
|
||||
|
||||
# testing first result (picture)
|
||||
r = results[0]
|
||||
self.assertEqual(r['url'], 'http://google.com/')
|
||||
self.assertEqual(r['title'], 'Title number one')
|
||||
self.assertEqual(r['template'], 'images.html')
|
||||
self.assertEqual(r['img_src'], 'http://google2.com/')
|
||||
self.assertEqual(r['thumbnail_src'], 'http://image.com/picture.jpg')
|
||||
|
||||
# testing second result (self-post)
|
||||
r = results[1]
|
||||
self.assertEqual(r['url'], 'https://reddit.com/')
|
||||
self.assertEqual(r['title'], 'Title number two')
|
||||
self.assertEqual(r['content'], 'Dominus vobiscum')
|
||||
created = datetime.fromtimestamp(1438792533.0)
|
||||
self.assertEqual(r['publishedDate'], created)
|
||||
self.assertTrue('thumbnail_src' not in r)
|
||||
self.assertTrue('img_src' not in r)
|
|
@ -1,175 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import scanr_structures
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestScanrStructuresEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
params = scanr_structures.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['data'])
|
||||
self.assertIn('scanr.enseignementsup-recherche.gouv.fr', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, scanr_structures.response, None)
|
||||
self.assertRaises(AttributeError, scanr_structures.response, [])
|
||||
self.assertRaises(AttributeError, scanr_structures.response, '')
|
||||
self.assertRaises(AttributeError, scanr_structures.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}')
|
||||
self.assertEqual(scanr_structures.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": []}')
|
||||
self.assertEqual(scanr_structures.response(response), [])
|
||||
|
||||
json = u"""
|
||||
{
|
||||
"request":
|
||||
{
|
||||
"query":"test_query",
|
||||
"page":1,
|
||||
"pageSize":20,
|
||||
"sortOrder":"RELEVANCY",
|
||||
"sortDirection":"ASC",
|
||||
"searchField":"ALL",
|
||||
"from":0
|
||||
},
|
||||
"total":2471,
|
||||
"results":[
|
||||
{
|
||||
"id":"200711886U",
|
||||
"label":"Laboratoire d'Informatique de Grenoble",
|
||||
"kind":"RNSR",
|
||||
"publicEntity":true,
|
||||
"address":{"city":"Grenoble","departement":"38"},
|
||||
"logo":"/static/logos/200711886U.png",
|
||||
"acronym":"LIG",
|
||||
"type":{"code":"UR","label":"Unit\xe9 de recherche"},
|
||||
"level":2,
|
||||
"institutions":[
|
||||
{
|
||||
"id":"193819125",
|
||||
"label":"Grenoble INP",
|
||||
"acronym":"IPG",
|
||||
"code":"UMR 5217"
|
||||
},
|
||||
{
|
||||
"id":"130021397",
|
||||
"label":"Universit\xe9 de Grenoble Alpes",
|
||||
"acronym":"UGA",
|
||||
"code":"UMR 5217"
|
||||
},
|
||||
{
|
||||
"id":"180089013",
|
||||
"label":"Centre national de la recherche scientifique",
|
||||
"acronym":"CNRS",
|
||||
"code":"UMR 5217"
|
||||
},
|
||||
{
|
||||
"id":"180089047",
|
||||
"label":"Institut national de recherche en informatique et en automatique",
|
||||
"acronym":"Inria",
|
||||
"code":"UMR 5217"
|
||||
}
|
||||
],
|
||||
"highlights":[
|
||||
{
|
||||
"type":"projects",
|
||||
"value":"linguicielles d\xe9velopp\xe9s jusqu'ici par le GETALP\
|
||||
du <strong>LIG</strong> en tant que prototypes op\xe9rationnels.\
|
||||
\\r\\nDans le contexte"
|
||||
},
|
||||
{
|
||||
"type":"acronym",
|
||||
"value":"<strong>LIG</strong>"
|
||||
},
|
||||
{
|
||||
"type":"websiteContents",
|
||||
"value":"S\xe9lection\\nListe structures\\nD\xe9tail\\n\
|
||||
Accueil\\n200711886U : <strong>LIG</strong>\
|
||||
Laboratoire d'Informatique de Grenoble Unit\xe9 de recherche"},
|
||||
{
|
||||
"type":"publications",
|
||||
"value":"de noms. Nous avons d'abord d\xe9velopp\xe9 LOOV \
|
||||
(pour <strong>Lig</strong> Overlaid OCR in Vid\xe9o), \
|
||||
un outil d'extraction des"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id":"199511665F",
|
||||
"label":"Laboratoire Bordelais de Recherche en Informatique",
|
||||
"kind":"RNSR",
|
||||
"publicEntity":true,
|
||||
"address":{"city":"Talence","departement":"33"},
|
||||
"logo":"/static/logos/199511665F.png",
|
||||
"acronym":"LaBRI",
|
||||
"type":{"code":"UR","label":"Unit\xe9 de recherche"},
|
||||
"level":2,
|
||||
"institutions":[
|
||||
{
|
||||
"id":"130006356",
|
||||
"label":"Institut polytechnique de Bordeaux",
|
||||
"acronym":"IPB",
|
||||
"code":"UMR 5800"
|
||||
},
|
||||
{
|
||||
"id":"130018351",
|
||||
"label":"Universit\xe9 de Bordeaux",
|
||||
"acronym":null,
|
||||
"code":"UMR 5800"
|
||||
},
|
||||
{
|
||||
"id":"180089013",
|
||||
"label":"Centre national de la recherche scientifique",
|
||||
"acronym":"CNRS",
|
||||
"code":"UMR 5800"
|
||||
},
|
||||
{
|
||||
"id":"180089047",
|
||||
"label":"Institut national de recherche en informatique et en automatique",
|
||||
"acronym":"Inria",
|
||||
"code":"UMR 5800"
|
||||
}
|
||||
],
|
||||
"highlights":[
|
||||
{
|
||||
"type":"websiteContents",
|
||||
"value":"Samia Kerdjoudj\\n2016-07-05\\nDouble-exponential\
|
||||
and <strong>triple</strong>-exponential bounds for\
|
||||
choosability problems parameterized"
|
||||
},
|
||||
{
|
||||
"type":"publications",
|
||||
"value":"de cam\xe9ras install\xe9es dans les lieux publiques \
|
||||
a <strong>tripl\xe9</strong> en 2009, passant de 20 000 \
|
||||
\xe0 60 000. Malgr\xe9 le"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = scanr_structures.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['title'], u"Laboratoire d'Informatique de Grenoble")
|
||||
self.assertEqual(results[0]['url'], 'https://scanr.enseignementsup-recherche.gouv.fr/structure/200711886U')
|
||||
self.assertEqual(results[0]['content'],
|
||||
u"linguicielles d\xe9velopp\xe9s jusqu'ici par le GETALP "
|
||||
u"du LIG en tant que prototypes "
|
||||
u"op\xe9rationnels. Dans le contexte")
|
||||
self.assertEqual(results[1]['img_src'],
|
||||
'https://scanr.enseignementsup-recherche.gouv.fr//static/logos/199511665F.png')
|
||||
self.assertEqual(results[1]['content'],
|
||||
"Samia Kerdjoudj 2016-07-05 Double-exponential and"
|
||||
" triple-exponential bounds for "
|
||||
"choosability problems parameterized")
|
||||
self.assertEqual(results[1]['url'], 'https://scanr.enseignementsup-recherche.gouv.fr/structure/199511665F')
|
||||
self.assertEqual(results[1]['title'], u"Laboratoire Bordelais de Recherche en Informatique")
|
|
@ -1,75 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import searchcode_code
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestSearchcodeCodeEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 0
|
||||
params = searchcode_code.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('searchcode.com', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, searchcode_code.response, None)
|
||||
self.assertRaises(AttributeError, searchcode_code.response, [])
|
||||
self.assertRaises(AttributeError, searchcode_code.response, '')
|
||||
self.assertRaises(AttributeError, searchcode_code.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}')
|
||||
self.assertEqual(searchcode_code.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": []}')
|
||||
self.assertEqual(searchcode_code.response(response), [])
|
||||
|
||||
json = """
|
||||
{
|
||||
"matchterm": "test",
|
||||
"previouspage": null,
|
||||
"searchterm": "test",
|
||||
"query": "test",
|
||||
"total": 1000,
|
||||
"page": 0,
|
||||
"nextpage": 1,
|
||||
"results": [
|
||||
{
|
||||
"repo": "https://repo",
|
||||
"linescount": 1044,
|
||||
"location": "/tests",
|
||||
"name": "Name",
|
||||
"url": "https://url",
|
||||
"md5hash": "ecac6e479edd2b9406c9e08603cec655",
|
||||
"lines": {
|
||||
"1": "// Test 011",
|
||||
"2": "// Source: "
|
||||
},
|
||||
"id": 51223527,
|
||||
"filename": "File.CPP"
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = searchcode_code.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'Name - File.CPP')
|
||||
self.assertEqual(results[0]['url'], 'https://url')
|
||||
self.assertEqual(results[0]['repository'], 'https://repo')
|
||||
self.assertEqual(results[0]['code_language'], 'cpp')
|
||||
|
||||
json = r"""
|
||||
{"toto":[
|
||||
{"id":200,"name":"Artist Name",
|
||||
"link":"http:\/\/www.searchcode_code.com\/artist\/1217","type":"artist"}
|
||||
]}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = searchcode_code.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
|
@ -1,70 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import searchcode_doc
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestSearchcodeDocEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 0
|
||||
params = searchcode_doc.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('searchcode.com', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, searchcode_doc.response, None)
|
||||
self.assertRaises(AttributeError, searchcode_doc.response, [])
|
||||
self.assertRaises(AttributeError, searchcode_doc.response, '')
|
||||
self.assertRaises(AttributeError, searchcode_doc.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}')
|
||||
self.assertEqual(searchcode_doc.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": []}')
|
||||
self.assertEqual(searchcode_doc.response(response), [])
|
||||
|
||||
json = """
|
||||
{
|
||||
"matchterm": "test",
|
||||
"previouspage": null,
|
||||
"searchterm": "test",
|
||||
"query": "test",
|
||||
"total": 60,
|
||||
"page": 0,
|
||||
"nextpage": 1,
|
||||
"results": [
|
||||
{
|
||||
"synopsis": "Synopsis",
|
||||
"displayname": null,
|
||||
"name": "test",
|
||||
"url": "http://url",
|
||||
"type": "Type",
|
||||
"icon": null,
|
||||
"namespace": "Namespace",
|
||||
"description": "Description"
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = searchcode_doc.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], '[Type] Namespace test')
|
||||
self.assertEqual(results[0]['url'], 'http://url')
|
||||
self.assertIn('Description', results[0]['content'])
|
||||
|
||||
json = r"""
|
||||
{"toto":[
|
||||
{"id":200,"name":"Artist Name",
|
||||
"link":"http:\/\/www.searchcode_doc.com\/artist\/1217","type":"artist"}
|
||||
]}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = searchcode_doc.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
|
@ -1,66 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import seedpeer
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestBtdiggEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
params = seedpeer.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('seedpeer', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, seedpeer.response, None)
|
||||
self.assertRaises(AttributeError, seedpeer.response, [])
|
||||
self.assertRaises(AttributeError, seedpeer.response, '')
|
||||
self.assertRaises(AttributeError, seedpeer.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(seedpeer.response(response), [])
|
||||
|
||||
html = u"""
|
||||
<html>
|
||||
<head>
|
||||
<script></script>
|
||||
<script type="text/javascript" src="not_here.js"></script>
|
||||
<script type="text/javascript">
|
||||
window.initialData=
|
||||
{"data": {"list": [{"name": "Title", "seeds": "10", "peers": "20", "size": "1024", "hash": "abc123"}]}}
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<table></table>
|
||||
<table>
|
||||
<thead><tr></tr></thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td><a href="link">Title</a></td>
|
||||
<td>1 year</td>
|
||||
<td>1 KB</td>
|
||||
<td>10</td>
|
||||
<td>20</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = seedpeer.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'Title')
|
||||
self.assertEqual(results[0]['url'], 'https://seedpeer.me/link')
|
||||
self.assertEqual(results[0]['seed'], 10)
|
||||
self.assertEqual(results[0]['leech'], 20)
|
||||
self.assertEqual(results[0]['filesize'], 1024)
|
||||
self.assertEqual(results[0]['torrentfile'], 'https://seedpeer.me/torrent/abc123')
|
||||
self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:abc123')
|
|
@ -1,192 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import soundcloud
|
||||
from searx.testing import SearxTestCase
|
||||
from searx.url_utils import quote_plus
|
||||
|
||||
|
||||
class TestSoundcloudEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
params = soundcloud.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('soundcloud.com', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, soundcloud.response, None)
|
||||
self.assertRaises(AttributeError, soundcloud.response, [])
|
||||
self.assertRaises(AttributeError, soundcloud.response, '')
|
||||
self.assertRaises(AttributeError, soundcloud.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}')
|
||||
self.assertEqual(soundcloud.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": []}')
|
||||
self.assertEqual(soundcloud.response(response), [])
|
||||
|
||||
json = """
|
||||
{
|
||||
"collection": [
|
||||
{
|
||||
"kind": "track",
|
||||
"id": 159723640,
|
||||
"created_at": "2014/07/22 00:51:21 +0000",
|
||||
"user_id": 2976616,
|
||||
"duration": 303780,
|
||||
"commentable": true,
|
||||
"state": "finished",
|
||||
"original_content_size": 13236349,
|
||||
"last_modified": "2015/01/31 15:14:50 +0000",
|
||||
"sharing": "public",
|
||||
"tag_list": "seekae flume",
|
||||
"permalink": "seekae-test-recognise-flume-re-work",
|
||||
"streamable": true,
|
||||
"embeddable_by": "all",
|
||||
"downloadable": true,
|
||||
"purchase_url": "http://www.facebook.com/seekaemusic",
|
||||
"label_id": null,
|
||||
"purchase_title": "Seekae",
|
||||
"genre": "freedownload",
|
||||
"title": "This is the title",
|
||||
"description": "This is the content",
|
||||
"label_name": "Future Classic",
|
||||
"release": "",
|
||||
"track_type": "remix",
|
||||
"key_signature": "",
|
||||
"isrc": "",
|
||||
"video_url": null,
|
||||
"bpm": null,
|
||||
"release_year": 2014,
|
||||
"release_month": 7,
|
||||
"release_day": 22,
|
||||
"original_format": "mp3",
|
||||
"license": "all-rights-reserved",
|
||||
"uri": "https://api.soundcloud.com/tracks/159723640",
|
||||
"user": {
|
||||
"id": 2976616,
|
||||
"kind": "user",
|
||||
"permalink": "flume",
|
||||
"username": "Flume",
|
||||
"last_modified": "2014/11/24 19:21:29 +0000",
|
||||
"uri": "https://api.soundcloud.com/users/2976616",
|
||||
"permalink_url": "http://soundcloud.com/flume",
|
||||
"avatar_url": "https://i1.sndcdn.com/avatars-000044475439-4zi7ii-large.jpg"
|
||||
},
|
||||
"permalink_url": "http://soundcloud.com/this.is.the.url",
|
||||
"artwork_url": "https://i1.sndcdn.com/artworks-000085857162-xdxy5c-large.jpg",
|
||||
"waveform_url": "https://w1.sndcdn.com/DWrL1lAN8BkP_m.png",
|
||||
"stream_url": "https://api.soundcloud.com/tracks/159723640/stream",
|
||||
"download_url": "https://api.soundcloud.com/tracks/159723640/download",
|
||||
"playback_count": 2190687,
|
||||
"download_count": 54856,
|
||||
"favoritings_count": 49061,
|
||||
"comment_count": 826,
|
||||
"likes_count": 49061,
|
||||
"reposts_count": 15910,
|
||||
"attachments_uri": "https://api.soundcloud.com/tracks/159723640/attachments",
|
||||
"policy": "ALLOW"
|
||||
}
|
||||
],
|
||||
"total_results": 375750,
|
||||
"next_href": "https://api.soundcloud.com/search?&q=test",
|
||||
"tx_id": ""
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = soundcloud.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'This is the title')
|
||||
self.assertEqual(results[0]['url'], 'http://soundcloud.com/this.is.the.url')
|
||||
self.assertEqual(results[0]['content'], 'This is the content')
|
||||
self.assertIn(quote_plus('https://api.soundcloud.com/tracks/159723640'), results[0]['embedded'])
|
||||
|
||||
json = """
|
||||
{
|
||||
"collection": [
|
||||
{
|
||||
"kind": "user",
|
||||
"id": 159723640,
|
||||
"created_at": "2014/07/22 00:51:21 +0000",
|
||||
"user_id": 2976616,
|
||||
"duration": 303780,
|
||||
"commentable": true,
|
||||
"state": "finished",
|
||||
"original_content_size": 13236349,
|
||||
"last_modified": "2015/01/31 15:14:50 +0000",
|
||||
"sharing": "public",
|
||||
"tag_list": "seekae flume",
|
||||
"permalink": "seekae-test-recognise-flume-re-work",
|
||||
"streamable": true,
|
||||
"embeddable_by": "all",
|
||||
"downloadable": true,
|
||||
"purchase_url": "http://www.facebook.com/seekaemusic",
|
||||
"label_id": null,
|
||||
"purchase_title": "Seekae",
|
||||
"genre": "freedownload",
|
||||
"title": "This is the title",
|
||||
"description": "This is the content",
|
||||
"label_name": "Future Classic",
|
||||
"release": "",
|
||||
"track_type": "remix",
|
||||
"key_signature": "",
|
||||
"isrc": "",
|
||||
"video_url": null,
|
||||
"bpm": null,
|
||||
"release_year": 2014,
|
||||
"release_month": 7,
|
||||
"release_day": 22,
|
||||
"original_format": "mp3",
|
||||
"license": "all-rights-reserved",
|
||||
"uri": "https://api.soundcloud.com/tracks/159723640",
|
||||
"user": {
|
||||
"id": 2976616,
|
||||
"kind": "user",
|
||||
"permalink": "flume",
|
||||
"username": "Flume",
|
||||
"last_modified": "2014/11/24 19:21:29 +0000",
|
||||
"uri": "https://api.soundcloud.com/users/2976616",
|
||||
"permalink_url": "http://soundcloud.com/flume",
|
||||
"avatar_url": "https://i1.sndcdn.com/avatars-000044475439-4zi7ii-large.jpg"
|
||||
},
|
||||
"permalink_url": "http://soundcloud.com/this.is.the.url",
|
||||
"artwork_url": "https://i1.sndcdn.com/artworks-000085857162-xdxy5c-large.jpg",
|
||||
"waveform_url": "https://w1.sndcdn.com/DWrL1lAN8BkP_m.png",
|
||||
"stream_url": "https://api.soundcloud.com/tracks/159723640/stream",
|
||||
"download_url": "https://api.soundcloud.com/tracks/159723640/download",
|
||||
"playback_count": 2190687,
|
||||
"download_count": 54856,
|
||||
"favoritings_count": 49061,
|
||||
"comment_count": 826,
|
||||
"likes_count": 49061,
|
||||
"reposts_count": 15910,
|
||||
"attachments_uri": "https://api.soundcloud.com/tracks/159723640/attachments",
|
||||
"policy": "ALLOW"
|
||||
}
|
||||
],
|
||||
"total_results": 375750,
|
||||
"next_href": "https://api.soundcloud.com/search?&q=test",
|
||||
"tx_id": ""
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = soundcloud.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
json = """
|
||||
{
|
||||
"collection": [],
|
||||
"total_results": 375750,
|
||||
"next_href": "https://api.soundcloud.com/search?&q=test",
|
||||
"tx_id": ""
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = soundcloud.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
|
@ -1,124 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import spotify
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestSpotifyEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 0
|
||||
params = spotify.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('spotify.com', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, spotify.response, None)
|
||||
self.assertRaises(AttributeError, spotify.response, [])
|
||||
self.assertRaises(AttributeError, spotify.response, '')
|
||||
self.assertRaises(AttributeError, spotify.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}')
|
||||
self.assertEqual(spotify.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": []}')
|
||||
self.assertEqual(spotify.response(response), [])
|
||||
|
||||
json = """
|
||||
{
|
||||
"tracks": {
|
||||
"href": "https://api.spotify.com/v1/search?query=nosfell&offset=0&limit=20&type=track",
|
||||
"items": [
|
||||
{
|
||||
"album": {
|
||||
"album_type": "album",
|
||||
"external_urls": {
|
||||
"spotify": "https://open.spotify.com/album/5c9ap1PBkSGLxT3J73toxA"
|
||||
},
|
||||
"href": "https://api.spotify.com/v1/albums/5c9ap1PBkSGLxT3J73toxA",
|
||||
"id": "5c9ap1PBkSGLxT3J73toxA",
|
||||
"name": "Album Title",
|
||||
"type": "album",
|
||||
"uri": "spotify:album:5c9ap1PBkSGLxT3J73toxA"
|
||||
},
|
||||
"artists": [
|
||||
{
|
||||
"external_urls": {
|
||||
"spotify": "https://open.spotify.com/artist/0bMc6b75FfZEpQHG1jifKu"
|
||||
},
|
||||
"href": "https://api.spotify.com/v1/artists/0bMc6b75FfZEpQHG1jifKu",
|
||||
"id": "0bMc6b75FfZEpQHG1jifKu",
|
||||
"name": "Artist Name",
|
||||
"type": "artist",
|
||||
"uri": "spotify:artist:0bMc6b75FfZEpQHG1jifKu"
|
||||
}
|
||||
],
|
||||
"disc_number": 1,
|
||||
"duration_ms": 202386,
|
||||
"explicit": false,
|
||||
"external_ids": {
|
||||
"isrc": "FRV640600067"
|
||||
},
|
||||
"external_urls": {
|
||||
"spotify": "https://open.spotify.com/track/2GzvFiedqW8hgqUpWcASZa"
|
||||
},
|
||||
"href": "https://api.spotify.com/v1/tracks/2GzvFiedqW8hgqUpWcASZa",
|
||||
"id": "1000",
|
||||
"is_playable": true,
|
||||
"name": "Title of track",
|
||||
"popularity": 6,
|
||||
"preview_url": "https://p.scdn.co/mp3-preview/7b8ecda580965a066b768c2647f877e43f7b1a0a",
|
||||
"track_number": 3,
|
||||
"type": "track",
|
||||
"uri": "spotify:track:2GzvFiedqW8hgqUpWcASZa"
|
||||
}
|
||||
],
|
||||
"limit": 20,
|
||||
"next": "https://api.spotify.com/v1/search?query=nosfell&offset=20&limit=20&type=track",
|
||||
"offset": 0,
|
||||
"previous": null,
|
||||
"total": 107
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = spotify.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'Title of track')
|
||||
self.assertEqual(results[0]['url'], 'https://open.spotify.com/track/2GzvFiedqW8hgqUpWcASZa')
|
||||
self.assertEqual(results[0]['content'], 'Artist Name - Album Title - Title of track')
|
||||
self.assertIn('1000', results[0]['embedded'])
|
||||
|
||||
json = """
|
||||
{
|
||||
"tracks": {
|
||||
"href": "https://api.spotify.com/v1/search?query=nosfell&offset=0&limit=20&type=track",
|
||||
"items": [
|
||||
{
|
||||
"href": "https://api.spotify.com/v1/tracks/2GzvFiedqW8hgqUpWcASZa",
|
||||
"id": "1000",
|
||||
"is_playable": true,
|
||||
"name": "Title of track",
|
||||
"popularity": 6,
|
||||
"preview_url": "https://p.scdn.co/mp3-preview/7b8ecda580965a066b768c2647f877e43f7b1a0a",
|
||||
"track_number": 3,
|
||||
"type": "album",
|
||||
"uri": "spotify:track:2GzvFiedqW8hgqUpWcASZa"
|
||||
}
|
||||
],
|
||||
"limit": 20,
|
||||
"next": "https://api.spotify.com/v1/search?query=nosfell&offset=20&limit=20&type=track",
|
||||
"offset": 0,
|
||||
"previous": null,
|
||||
"total": 107
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = spotify.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
|
@ -1,106 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import stackoverflow
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestStackoverflowEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 0
|
||||
params = stackoverflow.request(query, dicto)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('stackoverflow.com' in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, stackoverflow.response, None)
|
||||
self.assertRaises(AttributeError, stackoverflow.response, [])
|
||||
self.assertRaises(AttributeError, stackoverflow.response, '')
|
||||
self.assertRaises(AttributeError, stackoverflow.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(stackoverflow.response(response), [])
|
||||
|
||||
html = """
|
||||
<div class="question-summary search-result" id="answer-id-1783426">
|
||||
<div class="statscontainer">
|
||||
<div class="statsarrow"></div>
|
||||
<div class="stats">
|
||||
<div class="vote">
|
||||
<div class="votes answered">
|
||||
<span class="vote-count-post "><strong>2583</strong></span>
|
||||
<div class="viewcount">votes</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="summary">
|
||||
<div class="result-link">
|
||||
<span>
|
||||
<a href="/questions/this.is.the.url"
|
||||
data-searchsession="/questions"
|
||||
title="Checkout remote Git branch">
|
||||
This is the title
|
||||
</a>
|
||||
</span>
|
||||
</div>
|
||||
<div class="excerpt">
|
||||
This is the content
|
||||
</div>
|
||||
<div class="tags user-tags t-git t-git-checkout t-remote-branch">
|
||||
</div>
|
||||
<div class="started fr">
|
||||
answered <span title="2009-11-23 14:26:08Z" class="relativetime">nov 23 '09</span> by
|
||||
<a href="/users/214090/hallski">hallski</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = stackoverflow.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'This is the title')
|
||||
self.assertEqual(results[0]['url'], 'https://stackoverflow.com/questions/this.is.the.url')
|
||||
self.assertEqual(results[0]['content'], 'This is the content')
|
||||
|
||||
html = """
|
||||
<div class="statscontainer">
|
||||
<div class="statsarrow"></div>
|
||||
<div class="stats">
|
||||
<div class="vote">
|
||||
<div class="votes answered">
|
||||
<span class="vote-count-post "><strong>2583</strong></span>
|
||||
<div class="viewcount">votes</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="summary">
|
||||
<div class="result-link">
|
||||
<span>
|
||||
<a href="/questions/this.is.the.url"
|
||||
data-searchsession="/questions"
|
||||
title="Checkout remote Git branch">
|
||||
This is the title
|
||||
</a>
|
||||
</span>
|
||||
</div>
|
||||
<div class="excerpt">
|
||||
This is the content
|
||||
</div>
|
||||
<div class="tags user-tags t-git t-git-checkout t-remote-branch">
|
||||
</div>
|
||||
<div class="started fr">
|
||||
answered <span title="2009-11-23 14:26:08Z" class="relativetime">nov 23 '09</span> by
|
||||
<a href="/users/214090/hallski">hallski</a>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = stackoverflow.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
|
@ -1,67 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import startpage
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestStartpageEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
dicto['language'] = 'fr_FR'
|
||||
params = startpage.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn('startpage.com', params['url'])
|
||||
self.assertIn('data', params)
|
||||
self.assertIn('query', params['data'])
|
||||
self.assertIn(query, params['data']['query'])
|
||||
|
||||
dicto['language'] = 'all'
|
||||
params = startpage.request(query, dicto)
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, startpage.response, None)
|
||||
self.assertRaises(AttributeError, startpage.response, [])
|
||||
self.assertRaises(AttributeError, startpage.response, '')
|
||||
self.assertRaises(AttributeError, startpage.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(startpage.response(response), [])
|
||||
|
||||
html = """
|
||||
<div class="w-gl__result">
|
||||
<a
|
||||
class="w-gl__result-title"
|
||||
href="http://this.should.be.the.link/"
|
||||
data-onw="1"
|
||||
rel="noopener noreferrer"
|
||||
target="_blank">
|
||||
|
||||
<h3>This should be the title</h3>
|
||||
</a>
|
||||
<div class="w-gl__result-second-line-container">
|
||||
<div class="w-gl__result-url-container">
|
||||
<a
|
||||
class="w-gl__result-url"
|
||||
href="http://this.should.be.the.link/"
|
||||
rel="noopener noreferrer"
|
||||
target="_blank">https://www.cnbc.com/2019/10/12/dj-zedd-banned-in-china-for-liking-a-south-park-tweet.html</a>
|
||||
</div>
|
||||
<a
|
||||
class="w-gl__anonymous-view-url"
|
||||
href="https://eu-browse.startpage.com/do/proxy?ep=556b554d576b6f5054554546423167764b5445616455554d5342675441774659495246304848774f5267385453304941486b5949546c63704e33774f526b705544565647516d4a61554246304847674f4a556f6957415a4f436b455042426b6b4f7a64535a52784a56514a4f45307743446c567250445a4f4c52514e5677554e46776b4b545563704c7931554c5167465467644f42464d4f4255426f4d693152624634525741305845526c595746636b626d67494e42705743466c515252634f4267456e597a7346596b7856435134465345634f564249794b5752785643315863546769515773764a5163494c5877505246315865456f5141426b4f41774167596d6c5a4e30395758773442465251495677596c624770665a6b786344466b4151455663425249794d6a78525a55554157516f4342556766526b51314b57514e&ek=4q58686o5047786n6343527259445247576p6o38&ekdata=84abd523dc13cba5c65164d04d7d7263"
|
||||
target="_blank">Anonymous View</a>
|
||||
</div>
|
||||
<p class="w-gl__description">This should be the content.</p>
|
||||
</div>
|
||||
""" # noqa
|
||||
response = mock.Mock(text=html.encode('utf-8'))
|
||||
results = startpage.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'This should be the title')
|
||||
self.assertEqual(results[0]['url'], 'http://this.should.be.the.link/')
|
||||
self.assertEqual(results[0]['content'], 'This should be the content.')
|
|
@ -1,110 +0,0 @@
|
|||
import mock
|
||||
from collections import defaultdict
|
||||
from searx.engines import tokyotoshokan
|
||||
from searx.testing import SearxTestCase
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class TestTokyotoshokanEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dic = defaultdict(dict)
|
||||
dic['pageno'] = 1
|
||||
params = tokyotoshokan.request(query, dic)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('tokyotosho.info' in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
resp = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(tokyotoshokan.response(resp), [])
|
||||
|
||||
html = """
|
||||
<table class="listing">
|
||||
<tbody>
|
||||
<tr class="shade category_0">
|
||||
<td rowspan="2">
|
||||
<a href="/?cat=7"><span class="sprite_cat-raw"></span></a>
|
||||
</td>
|
||||
<td class="desc-top">
|
||||
<a href="magnet:?xt=urn:btih:4c19eb46b5113685fbd2288ed2531b0b">
|
||||
<span class="sprite_magnet"></span>
|
||||
</a>
|
||||
<a rel="nofollow" type="application/x-bittorrent" href="http://www.nyaa.se/f">
|
||||
Koyomimonogatari
|
||||
</a>
|
||||
</td>
|
||||
<td class="web"><a rel="nofollow" href="details.php?id=975700">Details</a></td>
|
||||
</tr>
|
||||
<tr class="shade category_0">
|
||||
<td class="desc-bot">
|
||||
Authorized: <span class="auth_ok">Yes</span>
|
||||
Submitter: <a href="?username=Ohys">Ohys</a> |
|
||||
Size: 10.5MB |
|
||||
Date: 2016-03-26 16:41 UTC |
|
||||
Comment: sample comment
|
||||
</td>
|
||||
<td style="color: #BBB; font-family: monospace" class="stats" align="right">
|
||||
S: <span style="color: red">53</span>
|
||||
L: <span style="color: red">18</span>
|
||||
C: <span style="color: red">0</span>
|
||||
ID: 975700
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr class="category_0">
|
||||
<td rowspan="2">
|
||||
<a href="/?cat=7"><span class="sprite_cat-raw"></span></a>
|
||||
</td>
|
||||
<td class="desc-top">
|
||||
<a rel="nofollow" type="application/x-bittorrent" href="http://google.com/q">
|
||||
Owarimonogatari
|
||||
</a>
|
||||
</td>
|
||||
<td class="web"><a rel="nofollow" href="details.php?id=975700">Details</a></td>
|
||||
</tr>
|
||||
<tr class="category_0">
|
||||
<td class="desc-bot">
|
||||
Submitter: <a href="?username=Ohys">Ohys</a> |
|
||||
Size: 932.84EB |
|
||||
Date: QWERTY-03-26 16:41 UTC
|
||||
</td>
|
||||
<td style="color: #BBB; font-family: monospace" class="stats" align="right">
|
||||
S: <span style="color: red">0</span>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
"""
|
||||
|
||||
resp = mock.Mock(text=html)
|
||||
results = tokyotoshokan.response(resp)
|
||||
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
|
||||
# testing the first result, which has correct format
|
||||
# and should have all information fields filled
|
||||
r = results[0]
|
||||
self.assertEqual(r['url'], 'http://www.nyaa.se/f')
|
||||
self.assertEqual(r['title'], 'Koyomimonogatari')
|
||||
self.assertEqual(r['magnetlink'], 'magnet:?xt=urn:btih:4c19eb46b5113685fbd2288ed2531b0b')
|
||||
self.assertEqual(r['filesize'], int(1024 * 1024 * 10.5))
|
||||
self.assertEqual(r['publishedDate'], datetime(2016, 3, 26, 16, 41))
|
||||
self.assertEqual(r['content'], 'Comment: sample comment')
|
||||
self.assertEqual(r['seed'], 53)
|
||||
self.assertEqual(r['leech'], 18)
|
||||
|
||||
# testing the second result, which does not include magnet link,
|
||||
# seed & leech info, and has incorrect size & creation date
|
||||
r = results[1]
|
||||
self.assertEqual(r['url'], 'http://google.com/q')
|
||||
self.assertEqual(r['title'], 'Owarimonogatari')
|
||||
|
||||
self.assertFalse('magnetlink' in r)
|
||||
self.assertFalse('filesize' in r)
|
||||
self.assertFalse('content' in r)
|
||||
self.assertFalse('publishedDate' in r)
|
||||
self.assertFalse('seed' in r)
|
||||
self.assertFalse('leech' in r)
|
|
@ -1,87 +0,0 @@
|
|||
import mock
|
||||
from collections import defaultdict
|
||||
from searx.engines import torrentz
|
||||
from searx.testing import SearxTestCase
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class TestTorrentzEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dic = defaultdict(dict)
|
||||
dic['pageno'] = 1
|
||||
params = torrentz.request(query, dic)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('torrentz2.eu' in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
resp = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(torrentz.response(resp), [])
|
||||
|
||||
html = """
|
||||
<div class="results">
|
||||
<dl>
|
||||
<dt>
|
||||
<a href="/4362e08b1d80e1820fb2550b752f9f3126fe76d6">
|
||||
Completely valid info
|
||||
</a>
|
||||
books ebooks
|
||||
</dt>
|
||||
<dd>
|
||||
<span>1</span>
|
||||
<span title="1503595924">5 hours</span>
|
||||
<span>30 MB</span>
|
||||
<span>14</span>
|
||||
<span>1</span>
|
||||
</dd>
|
||||
</dl>
|
||||
|
||||
<dl>
|
||||
<dt>
|
||||
<a href="/poaskdpokaspod">
|
||||
Invalid hash and date and filesize
|
||||
</a>
|
||||
books ebooks
|
||||
</dt>
|
||||
<dd>
|
||||
<span>1</span>
|
||||
<span title="1503595924 aaa">5 hours</span>
|
||||
<span>30MB</span>
|
||||
<span>5,555</span>
|
||||
<span>1,234,567</span>
|
||||
</dd>
|
||||
</dl>
|
||||
</div>
|
||||
"""
|
||||
|
||||
resp = mock.Mock(text=html)
|
||||
results = torrentz.response(resp)
|
||||
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
|
||||
# testing against the first result
|
||||
r = results[0]
|
||||
self.assertEqual(r['url'], 'https://torrentz2.eu/4362e08b1d80e1820fb2550b752f9f3126fe76d6')
|
||||
self.assertEqual(r['title'], 'Completely valid info books ebooks')
|
||||
# 22 Nov 2015 03:01:42
|
||||
self.assertEqual(r['publishedDate'], datetime.fromtimestamp(1503595924))
|
||||
self.assertEqual(r['seed'], 14)
|
||||
self.assertEqual(r['leech'], 1)
|
||||
self.assertEqual(r['filesize'], 30 * 1024 * 1024)
|
||||
self.assertEqual(r['magnetlink'], 'magnet:?xt=urn:btih:4362e08b1d80e1820fb2550b752f9f3126fe76d6')
|
||||
|
||||
# testing against the second result
|
||||
r = results[1]
|
||||
self.assertEqual(r['url'], 'https://torrentz2.eu/poaskdpokaspod')
|
||||
self.assertEqual(r['title'], 'Invalid hash and date and filesize books ebooks')
|
||||
self.assertEqual(r['seed'], 5555)
|
||||
self.assertEqual(r['leech'], 1234567)
|
||||
|
||||
# in the second result we have invalid hash, creation date & torrent size,
|
||||
# so these tests should fail
|
||||
self.assertFalse('magnetlink' in r)
|
||||
self.assertFalse('filesize' in r)
|
||||
self.assertFalse('publishedDate' in r)
|
|
@ -1,502 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import twitter
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestTwitterEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 0
|
||||
dicto['language'] = 'fr_FR'
|
||||
params = twitter.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('twitter.com', params['url'])
|
||||
self.assertIn('cookies', params)
|
||||
self.assertIn('lang', params['cookies'])
|
||||
self.assertIn('fr', params['cookies']['lang'])
|
||||
|
||||
dicto['language'] = 'all'
|
||||
params = twitter.request(query, dicto)
|
||||
self.assertIn('cookies', params)
|
||||
self.assertIn('lang', params['cookies'])
|
||||
self.assertIn('en', params['cookies']['lang'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, twitter.response, None)
|
||||
self.assertRaises(AttributeError, twitter.response, [])
|
||||
self.assertRaises(AttributeError, twitter.response, '')
|
||||
self.assertRaises(AttributeError, twitter.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(twitter.response(response), [])
|
||||
|
||||
html = """
|
||||
<li class="js-stream-item stream-item stream-item expanding-stream-item" data-item-id="563005573290287105"
|
||||
id="stream-item-tweet-563005573290287105" data-item-type="tweet">
|
||||
<div class="tweet original-tweet js-stream-tweet js-actionable-tweet js-profile-popup-actionable
|
||||
js-original-tweet has-cards has-native-media" data-tweet-id="563005573290287105" data-disclosure-type=""
|
||||
data-item-id="563005573290287105" data-screen-name="Jalopnik" data-name="Jalopnik"
|
||||
data-user-id="3060631" data-has-native-media="true" data-has-cards="true" data-card-type="photo"
|
||||
data-expanded-footer="<div class="js-tweet-details-fixer
|
||||
tweet-details-fixer">
|
||||
<div class="cards-media-container js-media-container"><div
|
||||
data-card-url="//twitter.com/Jalopnik/status/563005573290287105/photo/1" data-card-type="
|
||||
photo" class="cards-base cards-multimedia" data-element-context="platform_photo_card
|
||||
"> <a class="media media-thumbnail twitter-timeline-link is-preview
|
||||
" data-url="https://pbs.twimg.com/media/B9Aylf5IMAAuziP.jpg:large"
|
||||
data-resolved-url-large="https://pbs.twimg.com/media/B9Aylf5IMAAuziP.jpg:large"
|
||||
href="//twitter.com/Jalopnik/status/563005573290287105/photo/1">
|
||||
<div class=""> <img src="
|
||||
https://pbs.twimg.com/media/B9Aylf5IMAAuziP.jpg"
|
||||
alt="Embedded image permalink" width="636" height="309">
|
||||
</div> </a> <div class="cards-content">
|
||||
<div class="byline"> </div> </div>
|
||||
</div> </div> <div
|
||||
class="js-machine-translated-tweet-container"></div> <div
|
||||
class="js-tweet-stats-container tweet-stats-container "> </div>
|
||||
<div class="client-and-actions"> <span class="metadata">
|
||||
<span>5:06 PM - 4 Feb 2015</span> &middot; <a
|
||||
class="permalink-link js-permalink js-nav" href="/Jalopnik/status/563005573290287105
|
||||
"tabindex="-1">Details</a>
|
||||
</span> </div> </div> " data-you-follow="false"
|
||||
data-you-block="false">
|
||||
<div class="context">
|
||||
</div>
|
||||
<div class="content">
|
||||
<div class="stream-item-header">
|
||||
<a class="account-group js-account-group js-action-profile js-user-profile-link js-nav"
|
||||
href="/Jalopnik" data-user-id="3060631">
|
||||
<img class="avatar js-action-profile-avatar"
|
||||
src="https://pbs.twimg.com/profile_images/2976430168/5cd4a59_bigger.jpeg" alt="">
|
||||
<strong class="fullname js-action-profile-name show-popup-with-id" data-aria-label-part>
|
||||
Jalopnik
|
||||
</strong>
|
||||
<span>‏</span>
|
||||
<span class="username js-action-profile-name" data-aria-label-part>
|
||||
<s>@</s><b>TitleName</b>
|
||||
</span>
|
||||
</a>
|
||||
<small class="time">
|
||||
<a href="/this.is.the.url"
|
||||
class="tweet-timestamp js-permalink js-nav js-tooltip" title="5:06 PM - 4 Feb 2015" >
|
||||
<span class="u-hiddenVisually" data-aria-label-part="last">17 minutes ago</span>
|
||||
</a>
|
||||
</small>
|
||||
</div>
|
||||
<p class="js-tweet-text tweet-text" lang="en" data-aria-label-part="0">
|
||||
This is the content étude à€
|
||||
<a href="http://t.co/nRWsqQAwBL" rel="nofollow" dir="ltr"
|
||||
data-expanded-url="http://jalo.ps/ReMENu4" class="twitter-timeline-link"
|
||||
target="_blank" title="http://jalo.ps/ReMENu4" >
|
||||
<span class="tco-ellipsis">
|
||||
</span>
|
||||
<span class="invisible">http://</span><span class="js-display-url">link.in.tweet</span>
|
||||
<span class="invisible"></span>
|
||||
<span class="tco-ellipsis">
|
||||
<span class="invisible"> </span>
|
||||
</span>
|
||||
</a>
|
||||
<a href="http://t.co/rbFsfeE0l3" class="twitter-timeline-link u-hidden"
|
||||
data-pre-embedded="true" dir="ltr">
|
||||
pic.twitter.com/rbFsfeE0l3
|
||||
</a>
|
||||
</p>
|
||||
<div class="expanded-content js-tweet-details-dropdown">
|
||||
</div>
|
||||
<div class="stream-item-footer">
|
||||
<a class="details with-icn js-details" href="/Jalopnik/status/563005573290287105">
|
||||
<span class="Icon Icon--photo">
|
||||
</span>
|
||||
<b>
|
||||
<span class="expand-stream-item js-view-details">
|
||||
View photo
|
||||
</span>
|
||||
<span class="collapse-stream-item js-hide-details">
|
||||
Hide photo
|
||||
</span>
|
||||
</b>
|
||||
</a>
|
||||
<span class="ProfileTweet-action--reply u-hiddenVisually">
|
||||
<span class="ProfileTweet-actionCount" aria-hidden="true" data-tweet-stat-count="0">
|
||||
<span class="ProfileTweet-actionCountForAria" >0 replies</span>
|
||||
</span>
|
||||
</span>
|
||||
<span class="ProfileTweet-action--retweet u-hiddenVisually">
|
||||
<span class="ProfileTweet-actionCount" data-tweet-stat-count="8">
|
||||
<span class="ProfileTweet-actionCountForAria" data-aria-label-part>8 retweets</span>
|
||||
</span>
|
||||
</span>
|
||||
<span class="ProfileTweet-action--favorite u-hiddenVisually">
|
||||
<span class="ProfileTweet-actionCount" data-tweet-stat-count="14">
|
||||
<span class="ProfileTweet-actionCountForAria" data-aria-label-part>14 favorites</span>
|
||||
</span>
|
||||
</span>
|
||||
<div role="group" aria-label="Tweet actions" class="ProfileTweet-actionList u-cf js-actions">
|
||||
<div class="ProfileTweet-action ProfileTweet-action--reply">
|
||||
<button class="ProfileTweet-actionButton u-textUserColorHover js-actionButton
|
||||
js-actionReply" data-modal="ProfileTweet-reply" type="button" title="Reply">
|
||||
<span class="Icon Icon--reply">
|
||||
</span>
|
||||
<span class="u-hiddenVisually">Reply</span>
|
||||
<span class="ProfileTweet-actionCount u-textUserColorHover
|
||||
ProfileTweet-actionCount--isZero">
|
||||
<span class="ProfileTweet-actionCountForPresentation" aria-hidden="true">
|
||||
</span>
|
||||
</span>
|
||||
</button>
|
||||
</div>
|
||||
<div class="ProfileTweet-action ProfileTweet-action--retweet js-toggleState js-toggleRt">
|
||||
<button class="ProfileTweet-actionButton js-actionButton js-actionRetweet js-tooltip"
|
||||
title="Retweet" data-modal="ProfileTweet-retweet" type="button">
|
||||
<span class="Icon Icon--retweet">
|
||||
</span>
|
||||
<span class="u-hiddenVisually">Retweet</span>
|
||||
<span class="ProfileTweet-actionCount">
|
||||
<span class="ProfileTweet-actionCountForPresentation">8</span>
|
||||
</span>
|
||||
</button>
|
||||
<button class="ProfileTweet-actionButtonUndo js-actionButton js-actionRetweet"
|
||||
data-modal="ProfileTweet-retweet" title="Undo retweet" type="button">
|
||||
<span class="Icon Icon--retweet">
|
||||
</span>
|
||||
<span class="u-hiddenVisually">Retweeted</span>
|
||||
<span class="ProfileTweet-actionCount">
|
||||
<span class="ProfileTweet-actionCountForPresentation">8</span>
|
||||
</span>
|
||||
</button>
|
||||
</div>
|
||||
<div class="ProfileTweet-action ProfileTweet-action--favorite js-toggleState">
|
||||
<button class="ProfileTweet-actionButton js-actionButton js-actionFavorite js-tooltip"
|
||||
title="Favorite" type="button">
|
||||
<span class="Icon Icon--favorite">
|
||||
</span>
|
||||
<span class="u-hiddenVisually">Favorite</span>
|
||||
<span class="ProfileTweet-actionCount">
|
||||
<span class="ProfileTweet-actionCountForPresentation">14</span>
|
||||
</span>
|
||||
</button>
|
||||
<button class="ProfileTweet-actionButtonUndo u-linkClean js-actionButton
|
||||
js-actionFavorite" title="Undo favorite" type="button">
|
||||
<span class="Icon Icon--favorite">
|
||||
</span>
|
||||
<span class="u-hiddenVisually">Favorited</span>
|
||||
<span class="ProfileTweet-actionCount">
|
||||
<span class="ProfileTweet-actionCountForPresentation">
|
||||
14
|
||||
</span>
|
||||
</span>
|
||||
</button>
|
||||
</div>
|
||||
<div class="ProfileTweet-action ProfileTweet-action--more js-more-ProfileTweet-actions">
|
||||
<div class="dropdown">
|
||||
<button class="ProfileTweet-actionButton u-textUserColorHover dropdown-toggle
|
||||
js-tooltip js-dropdown-toggle" type="button" title="More">
|
||||
<span class="Icon Icon--dots">
|
||||
</span>
|
||||
<span class="u-hiddenVisually">More</span>
|
||||
</button>
|
||||
<div class="dropdown-menu">
|
||||
<div class="dropdown-caret">
|
||||
<div class="caret-outer">
|
||||
</div>
|
||||
<div class="caret-inner">
|
||||
</div>
|
||||
</div>
|
||||
<ul>
|
||||
<li class="share-via-dm js-actionShareViaDM" data-nav="share_tweet_dm">
|
||||
<button type="button" class="dropdown-link">
|
||||
Share via Direct Message
|
||||
</button>
|
||||
</li>
|
||||
<li class="embed-link js-actionEmbedTweet" data-nav="embed_tweet">
|
||||
<button type="button" class="dropdown-link">
|
||||
Embed Tweet
|
||||
</button>
|
||||
</li>
|
||||
<li class="mute-user-item pretty-link">
|
||||
<button type="button" class="dropdown-link">
|
||||
Mute
|
||||
</button>
|
||||
</li>
|
||||
<li class="unmute-user-item pretty-link">
|
||||
<button type="button" class="dropdown-link">
|
||||
Unmute
|
||||
</button>
|
||||
</li>
|
||||
<li class="block-or-report-link js-actionBlockOrReport"
|
||||
data-nav="block_or_report">
|
||||
<button type="button" class="dropdown-link">
|
||||
Block or report
|
||||
</button>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</li>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = twitter.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], '@TitleName')
|
||||
self.assertEqual(results[0]['url'], 'https://twitter.com/this.is.the.url')
|
||||
self.assertIn(u'This is the content', results[0]['content'])
|
||||
# self.assertIn(u'This is the content étude à€', results[0]['content'])
|
||||
|
||||
html = """
|
||||
<li class="js-stream-item stream-item stream-item expanding-stream-item" data-item-id="563005573290287105"
|
||||
id="stream-item-tweet-563005573290287105" data-item-type="tweet">
|
||||
<div class="tweet original-tweet js-stream-tweet js-actionable-tweet js-profile-popup-actionable
|
||||
js-original-tweet has-cards has-native-media" data-tweet-id="563005573290287105" data-disclosure-type=""
|
||||
data-item-id="563005573290287105" data-screen-name="Jalopnik" data-name="Jalopnik"
|
||||
data-user-id="3060631" data-has-native-media="true" data-has-cards="true" data-card-type="photo"
|
||||
data-expanded-footer="<div class="js-tweet-details-fixer
|
||||
tweet-details-fixer">
|
||||
<div class="cards-media-container js-media-container"><div
|
||||
data-card-url="//twitter.com/Jalopnik/status/563005573290287105/photo/1" data-card-type="
|
||||
photo" class="cards-base cards-multimedia" data-element-context="platform_photo_card
|
||||
"> <a class="media media-thumbnail twitter-timeline-link is-preview
|
||||
" data-url="https://pbs.twimg.com/media/B9Aylf5IMAAuziP.jpg:large"
|
||||
data-resolved-url-large="https://pbs.twimg.com/media/B9Aylf5IMAAuziP.jpg:large"
|
||||
href="//twitter.com/Jalopnik/status/563005573290287105/photo/1">
|
||||
<div class=""> <img src="
|
||||
https://pbs.twimg.com/media/B9Aylf5IMAAuziP.jpg"
|
||||
alt="Embedded image permalink" width="636" height="309">
|
||||
</div> </a> <div class="cards-content">
|
||||
<div class="byline"> </div> </div>
|
||||
</div> </div> <div
|
||||
class="js-machine-translated-tweet-container"></div> <div
|
||||
class="js-tweet-stats-container tweet-stats-container "> </div>
|
||||
<div class="client-and-actions"> <span class="metadata">
|
||||
<span>5:06 PM - 4 Feb 2015</span> &middot; <a
|
||||
class="permalink-link js-permalink js-nav" href="/Jalopnik/status/563005573290287105
|
||||
"tabindex="-1">Details</a>
|
||||
</span> </div> </div> " data-you-follow="false"
|
||||
data-you-block="false">
|
||||
<div class="context">
|
||||
</div>
|
||||
<div class="content">
|
||||
<div class="stream-item-header">
|
||||
<a class="account-group js-account-group js-action-profile js-user-profile-link js-nav"
|
||||
href="/Jalopnik" data-user-id="3060631">
|
||||
<img class="avatar js-action-profile-avatar"
|
||||
src="https://pbs.twimg.com/profile_images/2976430168/5cd4a59_bigger.jpeg" alt="">
|
||||
<strong class="fullname js-action-profile-name show-popup-with-id" data-aria-label-part>
|
||||
Jalopnik
|
||||
</strong>
|
||||
<span>‏</span>
|
||||
<span class="username js-action-profile-name" data-aria-label-part>
|
||||
<s>@</s><b>TitleName</b>
|
||||
</span>
|
||||
</a>
|
||||
<small class="time">
|
||||
<a href="/this.is.the.url"
|
||||
class="tweet-timestamp js-permalink js-nav js-tooltip" title="5:06 PM - 4 Feb 2015" >
|
||||
<span class="_timestamp js-short-timestamp js-relative-timestamp" data-time="1423065963"
|
||||
data-time-ms="1423065963000" data-long-form="true" aria-hidden="true">
|
||||
17m
|
||||
</span>
|
||||
<span class="u-hiddenVisually" data-aria-label-part="last">17 minutes ago</span>
|
||||
</a>
|
||||
</small>
|
||||
</div>
|
||||
<p class="js-tweet-text tweet-text" lang="en" data-aria-label-part="0">
|
||||
This is the content étude à€
|
||||
<a href="http://t.co/nRWsqQAwBL" rel="nofollow" dir="ltr"
|
||||
data-expanded-url="http://jalo.ps/ReMENu4" class="twitter-timeline-link"
|
||||
target="_blank" title="http://jalo.ps/ReMENu4" >
|
||||
<span class="tco-ellipsis">
|
||||
</span>
|
||||
<span class="invisible">http://</span><span class="js-display-url">link.in.tweet</span>
|
||||
<span class="invisible"></span>
|
||||
<span class="tco-ellipsis">
|
||||
<span class="invisible"> </span>
|
||||
</span>
|
||||
</a>
|
||||
<a href="http://t.co/rbFsfeE0l3" class="twitter-timeline-link u-hidden"
|
||||
data-pre-embedded="true" dir="ltr">
|
||||
pic.twitter.com/rbFsfeE0l3
|
||||
</a>
|
||||
</p>
|
||||
<div class="expanded-content js-tweet-details-dropdown">
|
||||
</div>
|
||||
<div class="stream-item-footer">
|
||||
<a class="details with-icn js-details" href="/Jalopnik/status/563005573290287105">
|
||||
<span class="Icon Icon--photo">
|
||||
</span>
|
||||
<b>
|
||||
<span class="expand-stream-item js-view-details">
|
||||
View photo
|
||||
</span>
|
||||
<span class="collapse-stream-item js-hide-details">
|
||||
Hide photo
|
||||
</span>
|
||||
</b>
|
||||
</a>
|
||||
<span class="ProfileTweet-action--reply u-hiddenVisually">
|
||||
<span class="ProfileTweet-actionCount" aria-hidden="true" data-tweet-stat-count="0">
|
||||
<span class="ProfileTweet-actionCountForAria" >0 replies</span>
|
||||
</span>
|
||||
</span>
|
||||
<span class="ProfileTweet-action--retweet u-hiddenVisually">
|
||||
<span class="ProfileTweet-actionCount" data-tweet-stat-count="8">
|
||||
<span class="ProfileTweet-actionCountForAria" data-aria-label-part>8 retweets</span>
|
||||
</span>
|
||||
</span>
|
||||
<span class="ProfileTweet-action--favorite u-hiddenVisually">
|
||||
<span class="ProfileTweet-actionCount" data-tweet-stat-count="14">
|
||||
<span class="ProfileTweet-actionCountForAria" data-aria-label-part>14 favorites</span>
|
||||
</span>
|
||||
</span>
|
||||
<div role="group" aria-label="Tweet actions" class="ProfileTweet-actionList u-cf js-actions">
|
||||
<div class="ProfileTweet-action ProfileTweet-action--reply">
|
||||
<button class="ProfileTweet-actionButton u-textUserColorHover js-actionButton
|
||||
js-actionReply" data-modal="ProfileTweet-reply" type="button" title="Reply">
|
||||
<span class="Icon Icon--reply">
|
||||
</span>
|
||||
<span class="u-hiddenVisually">Reply</span>
|
||||
<span class="ProfileTweet-actionCount u-textUserColorHover
|
||||
ProfileTweet-actionCount--isZero">
|
||||
<span class="ProfileTweet-actionCountForPresentation" aria-hidden="true">
|
||||
</span>
|
||||
</span>
|
||||
</button>
|
||||
</div>
|
||||
<div class="ProfileTweet-action ProfileTweet-action--retweet js-toggleState js-toggleRt">
|
||||
<button class="ProfileTweet-actionButton js-actionButton js-actionRetweet js-tooltip"
|
||||
title="Retweet" data-modal="ProfileTweet-retweet" type="button">
|
||||
<span class="Icon Icon--retweet">
|
||||
</span>
|
||||
<span class="u-hiddenVisually">Retweet</span>
|
||||
<span class="ProfileTweet-actionCount">
|
||||
<span class="ProfileTweet-actionCountForPresentation">8</span>
|
||||
</span>
|
||||
</button>
|
||||
<button class="ProfileTweet-actionButtonUndo js-actionButton js-actionRetweet"
|
||||
data-modal="ProfileTweet-retweet" title="Undo retweet" type="button">
|
||||
<span class="Icon Icon--retweet">
|
||||
</span>
|
||||
<span class="u-hiddenVisually">Retweeted</span>
|
||||
<span class="ProfileTweet-actionCount">
|
||||
<span class="ProfileTweet-actionCountForPresentation">8</span>
|
||||
</span>
|
||||
</button>
|
||||
</div>
|
||||
<div class="ProfileTweet-action ProfileTweet-action--favorite js-toggleState">
|
||||
<button class="ProfileTweet-actionButton js-actionButton js-actionFavorite js-tooltip"
|
||||
title="Favorite" type="button">
|
||||
<span class="Icon Icon--favorite">
|
||||
</span>
|
||||
<span class="u-hiddenVisually">Favorite</span>
|
||||
<span class="ProfileTweet-actionCount">
|
||||
<span class="ProfileTweet-actionCountForPresentation">14</span>
|
||||
</span>
|
||||
</button>
|
||||
<button class="ProfileTweet-actionButtonUndo u-linkClean js-actionButton
|
||||
js-actionFavorite" title="Undo favorite" type="button">
|
||||
<span class="Icon Icon--favorite">
|
||||
</span>
|
||||
<span class="u-hiddenVisually">Favorited</span>
|
||||
<span class="ProfileTweet-actionCount">
|
||||
<span class="ProfileTweet-actionCountForPresentation">
|
||||
14
|
||||
</span>
|
||||
</span>
|
||||
</button>
|
||||
</div>
|
||||
<div class="ProfileTweet-action ProfileTweet-action--more js-more-ProfileTweet-actions">
|
||||
<div class="dropdown">
|
||||
<button class="ProfileTweet-actionButton u-textUserColorHover dropdown-toggle
|
||||
js-tooltip js-dropdown-toggle" type="button" title="More">
|
||||
<span class="Icon Icon--dots">
|
||||
</span>
|
||||
<span class="u-hiddenVisually">More</span>
|
||||
</button>
|
||||
<div class="dropdown-menu">
|
||||
<div class="dropdown-caret">
|
||||
<div class="caret-outer">
|
||||
</div>
|
||||
<div class="caret-inner">
|
||||
</div>
|
||||
</div>
|
||||
<ul>
|
||||
<li class="share-via-dm js-actionShareViaDM" data-nav="share_tweet_dm">
|
||||
<button type="button" class="dropdown-link">
|
||||
Share via Direct Message
|
||||
</button>
|
||||
</li>
|
||||
<li class="embed-link js-actionEmbedTweet" data-nav="embed_tweet">
|
||||
<button type="button" class="dropdown-link">
|
||||
Embed Tweet
|
||||
</button>
|
||||
</li>
|
||||
<li class="mute-user-item pretty-link">
|
||||
<button type="button" class="dropdown-link">
|
||||
Mute
|
||||
</button>
|
||||
</li>
|
||||
<li class="unmute-user-item pretty-link">
|
||||
<button type="button" class="dropdown-link">
|
||||
Unmute
|
||||
</button>
|
||||
</li>
|
||||
<li class="block-or-report-link js-actionBlockOrReport"
|
||||
data-nav="block_or_report">
|
||||
<button type="button" class="dropdown-link">
|
||||
Block or report
|
||||
</button>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</li>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = twitter.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], '@TitleName')
|
||||
self.assertEqual(results[0]['url'], 'https://twitter.com/this.is.the.url')
|
||||
self.assertIn(u'This is the content', results[0]['content'])
|
||||
|
||||
html = """
|
||||
<li class="b_algo" u="0|5109|4755453613245655|UAGjXgIrPH5yh-o5oNHRx_3Zta87f_QO">
|
||||
<div Class="sa_mc">
|
||||
<div class="sb_tlst">
|
||||
<h2>
|
||||
<a href="http://this.should.be.the.link/" h="ID=SERP,5124.1">
|
||||
<strong>This</strong> should be the title</a>
|
||||
</h2>
|
||||
</div>
|
||||
<div class="sb_meta">
|
||||
<cite>
|
||||
<strong>this</strong>.meta.com</cite>
|
||||
<span class="c_tlbxTrg">
|
||||
<span class="c_tlbxH" H="BASE:CACHEDPAGEDEFAULT" K="SERP,5125.1">
|
||||
</span>
|
||||
</span>
|
||||
</div>
|
||||
<p>
|
||||
<strong>This</strong> should be the content.</p>
|
||||
</div>
|
||||
</li>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = twitter.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
|
@ -1,38 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.testing import SearxTestCase
|
||||
from searx.engines import unsplash
|
||||
|
||||
|
||||
class TestUnsplashEngine(SearxTestCase):
|
||||
def test_request(self):
|
||||
query = 'penguin'
|
||||
_dict = defaultdict(dict)
|
||||
_dict['pageno'] = 1
|
||||
params = unsplash.request(query, _dict)
|
||||
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
resp = mock.Mock(text='{}')
|
||||
result = unsplash.response(resp)
|
||||
self.assertEqual([], result)
|
||||
|
||||
resp.text = '{"results": []}'
|
||||
result = unsplash.response(resp)
|
||||
self.assertEqual([], result)
|
||||
|
||||
# Sourced from https://unsplash.com/napi/search/photos?query=penguin&xp=&per_page=20&page=2
|
||||
with open('./tests/unit/engines/unsplash_fixture.json') as fixture:
|
||||
resp.text = fixture.read()
|
||||
|
||||
result = unsplash.response(resp)
|
||||
self.assertEqual(len(result), 2)
|
||||
self.assertEqual(result[0]['title'], 'low angle photography of swimming penguin')
|
||||
self.assertEqual(result[0]['url'], 'https://unsplash.com/photos/FY8d721UO_4')
|
||||
self.assertEqual(result[0]['thumbnail_src'], 'https://images.unsplash.com/photo-1523557148507-1b77641c7e7c?ixlib=rb-0.3.5&q=80\
|
||||
&fm=jpg&crop=entropy&cs=tinysrgb&w=200&fit=max')
|
||||
self.assertEqual(result[0]['img_src'], 'https://images.unsplash.com/photo-1523557148507-1b77641c7e7c\
|
||||
?ixlib=rb-0.3.5')
|
||||
self.assertEqual(result[0]['content'], '')
|
|
@ -1,36 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import vimeo
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestVimeoEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 0
|
||||
params = vimeo.request(query, dicto)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('vimeo.com' in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, vimeo.response, None)
|
||||
self.assertRaises(AttributeError, vimeo.response, [])
|
||||
self.assertRaises(AttributeError, vimeo.response, '')
|
||||
self.assertRaises(AttributeError, vimeo.response, '[]')
|
||||
|
||||
json = u"""
|
||||
{"filtered":{"total":274641,"page":1,"per_page":18,"paging":{"next":"?sizes=590x332&page=2","previous":null,"first":"?sizes=590x332&page=1","last":"?sizes=590x332&page=15258"},"data":[{"is_staffpick":false,"is_featured":true,"type":"clip","clip":{"uri":"\\/videos\\/106557563","name":"Hot Rod Revue: The South","link":"https:\\/\\/vimeo.com\\/106557563","duration":4069,"created_time":"2014-09-19T03:38:04+00:00","privacy":{"view":"ptv"},"pictures":{"sizes":[{"width":"590","height":"332","link":"https:\\/\\/i.vimeocdn.com\\/video\\/489717884_590x332.jpg?r=pad","link_with_play_button":"https:\\/\\/i.vimeocdn.com\\/filter\\/overlay?src0=https%3A%2F%2Fi.vimeocdn.com%2Fvideo%2F489717884_590x332.jpg&src1=http%3A%2F%2Ff.vimeocdn.com%2Fp%2Fimages%2Fcrawler_play.png"}]},"stats":{"plays":null},"metadata":{"connections":{"comments":{"total":0},"likes":{"total":5}},"interactions":[]},"user":{"name":"Cal Thorley","link":"https:\\/\\/vimeo.com\\/calthorley","pictures":{"sizes":[{"width":30,"height":30,"link":"https:\\/\\/i.vimeocdn.com\\/portrait\\/2545308_30x30?r=pad"},{"width":75,"height":75,"link":"https:\\/\\/i.vimeocdn.com\\/portrait\\/2545308_75x75?r=pad"},{"width":100,"height":100,"link":"https:\\/\\/i.vimeocdn.com\\/portrait\\/2545308_100x100?r=pad"},{"width":300,"height":300,"link":"https:\\/\\/i.vimeocdn.com\\/portrait\\/2545308_300x300?r=pad"}]}}}}]}};
|
||||
|
||||
""" # noqa
|
||||
response = mock.Mock(text=json)
|
||||
results = vimeo.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], u'Hot Rod Revue: The South')
|
||||
self.assertEqual(results[0]['url'], 'https://vimeo.com/106557563')
|
||||
self.assertEqual(results[0]['content'], '')
|
||||
self.assertEqual(results[0]['thumbnail'], 'https://i.vimeocdn.com/video/489717884_590x332.jpg?r=pad')
|
|
@ -1,514 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from lxml.html import fromstring
|
||||
from lxml import etree
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import wikidata
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestWikidataEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['language'] = 'all'
|
||||
params = wikidata.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('wikidata.org', params['url'])
|
||||
|
||||
dicto['language'] = 'es_ES'
|
||||
params = wikidata.request(query, dicto)
|
||||
self.assertIn(query, params['url'])
|
||||
|
||||
# successful cases are not tested here to avoid sending additional requests
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, wikidata.response, None)
|
||||
self.assertRaises(AttributeError, wikidata.response, [])
|
||||
self.assertRaises(AttributeError, wikidata.response, '')
|
||||
self.assertRaises(AttributeError, wikidata.response, '[]')
|
||||
|
||||
wikidata.supported_languages = ['en', 'es']
|
||||
wikidata.language_aliases = {}
|
||||
response = mock.Mock(content='<html></html>'.encode("utf-8"), search_params={"language": "en"})
|
||||
self.assertEqual(wikidata.response(response), [])
|
||||
|
||||
def test_getDetail(self):
|
||||
response = {}
|
||||
results = wikidata.getDetail(response, "Q123", "en", "en-US", etree.HTMLParser())
|
||||
self.assertEqual(results, [])
|
||||
|
||||
title_html = '<div><div class="wikibase-title-label">Test</div></div>'
|
||||
html = """
|
||||
<div>
|
||||
<div class="wikibase-entitytermsview-heading-description">
|
||||
</div>
|
||||
<div>
|
||||
<ul class="wikibase-sitelinklistview-listview">
|
||||
<li data-wb-siteid="enwiki"><a href="http://en.wikipedia.org/wiki/Test">Test</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
response = {"parse": {"displaytitle": title_html, "text": html}}
|
||||
|
||||
results = wikidata.getDetail(response, "Q123", "en", "en-US", etree.HTMLParser())
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['url'], 'https://en.wikipedia.org/wiki/Test')
|
||||
|
||||
title_html = """
|
||||
<div>
|
||||
<div class="wikibase-title-label">
|
||||
<span lang="en">Test</span>
|
||||
<sup class="wb-language-fallback-indicator">English</sup>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
html = """
|
||||
<div>
|
||||
<div class="wikibase-entitytermsview-heading-description">
|
||||
<span lang="en">Description</span>
|
||||
<sup class="wb-language-fallback-indicator">English</sup>
|
||||
</div>
|
||||
<div id="P856">
|
||||
<div class="wikibase-statementgroupview-property-label">
|
||||
<a href="/wiki/Property:P856">
|
||||
<span lang="en">official website</span>
|
||||
<sup class="wb-language-fallback-indicator">English</sup>
|
||||
</a>
|
||||
</div>
|
||||
<div class="wikibase-statementview-mainsnak">
|
||||
<a class="external free" href="https://officialsite.com">
|
||||
https://officialsite.com
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<ul class="wikibase-sitelinklistview-listview">
|
||||
<li data-wb-siteid="enwiki"><a href="http://en.wikipedia.org/wiki/Test">Test</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
response = {"parse": {"displaytitle": title_html, "text": html}}
|
||||
|
||||
results = wikidata.getDetail(response, "Q123", "yua", "yua_MX", etree.HTMLParser())
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['title'], 'Official website')
|
||||
self.assertEqual(results[0]['url'], 'https://officialsite.com')
|
||||
|
||||
self.assertEqual(results[1]['infobox'], 'Test')
|
||||
self.assertEqual(results[1]['id'], None)
|
||||
self.assertEqual(results[1]['content'], 'Description')
|
||||
self.assertEqual(results[1]['attributes'], [])
|
||||
self.assertEqual(results[1]['urls'][0]['title'], 'Official website')
|
||||
self.assertEqual(results[1]['urls'][0]['url'], 'https://officialsite.com')
|
||||
self.assertEqual(results[1]['urls'][1]['title'], 'Wikipedia (en)')
|
||||
self.assertEqual(results[1]['urls'][1]['url'], 'https://en.wikipedia.org/wiki/Test')
|
||||
|
||||
def test_add_image(self):
|
||||
image_src = wikidata.add_image(fromstring("<div></div>"))
|
||||
self.assertEqual(image_src, None)
|
||||
|
||||
html = u"""
|
||||
<div>
|
||||
<div id="P18">
|
||||
<div class="wikibase-statementgroupview-property-label">
|
||||
<a href="/wiki/Property:P18">
|
||||
image
|
||||
</a>
|
||||
</div>
|
||||
<div class="wikibase-statementlistview">
|
||||
<div class="wikibase-statementview listview-item">
|
||||
<div class="wikibase-statementview-rankselector">
|
||||
<span class="wikibase-rankselector-normal"></span>
|
||||
</div>
|
||||
<div class="wikibase-statementview-mainsnak">
|
||||
<div>
|
||||
<div class="wikibase-snakview-value">
|
||||
<div class="commons-media-caption">
|
||||
<a href="https://commons.wikimedia.org/wiki/File:image.png">image.png</a>
|
||||
<br/>2,687 × 3,356; 1.22 MB
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
html_etree = fromstring(html)
|
||||
id_cache = wikidata.get_id_cache(html_etree)
|
||||
image_src = wikidata.add_image(id_cache)
|
||||
self.assertEqual(image_src,
|
||||
"https://commons.wikimedia.org/wiki/Special:FilePath/image.png?width=500&height=400")
|
||||
|
||||
html = u"""
|
||||
<div>
|
||||
<div id="P2910">
|
||||
<div class="wikibase-statementgroupview-property-label">
|
||||
<a href="/wiki/Property:P2910">
|
||||
icon
|
||||
</a>
|
||||
</div>
|
||||
<div class="wikibase-statementlistview">
|
||||
<div class="wikibase-statementview listview-item">
|
||||
<div class="wikibase-statementview-rankselector">
|
||||
<span class="wikibase-rankselector-normal"></span>
|
||||
</div>
|
||||
<div class="wikibase-statementview-mainsnak">
|
||||
<div>
|
||||
<div class="wikibase-snakview-value">
|
||||
<div class="commons-media-caption">
|
||||
<a href="https://commons.wikimedia.org/wiki/File:icon.png">icon.png</a>
|
||||
<br/>671 × 671; 18 KB</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="P154">
|
||||
<div class="wikibase-statementgroupview-property-label">
|
||||
<a href="/wiki/Property:P154">
|
||||
logo
|
||||
</a>
|
||||
</div>
|
||||
<div class="wikibase-statementlistview">
|
||||
<div class="wikibase-statementview listview-item">
|
||||
<div class="wikibase-statementview-rankselector">
|
||||
<span class="wikibase-rankselector-normal"></span>
|
||||
</div>
|
||||
<div class="wikibase-statementview-mainsnak">
|
||||
<div>
|
||||
<div class="wikibase-snakview-value">
|
||||
<div class="commons-media-caption">
|
||||
<a href="https://commons.wikimedia.org/wiki/File:logo.png">logo.png</a>
|
||||
<br/>170 × 170; 1 KB
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
html_etree = fromstring(html)
|
||||
id_cache = wikidata.get_id_cache(html_etree)
|
||||
|
||||
image_src = wikidata.add_image(id_cache)
|
||||
self.assertEqual(image_src,
|
||||
"https://commons.wikimedia.org/wiki/Special:FilePath/logo.png?width=500&height=400")
|
||||
|
||||
def test_add_attribute(self):
|
||||
html = u"""
|
||||
<div>
|
||||
<div id="P27">
|
||||
<div class="wikibase-statementgroupview-property-label">
|
||||
<a href="/wiki/Property:P27">
|
||||
country of citizenship
|
||||
</a>
|
||||
</div>
|
||||
<div class="wikibase-statementlistview">
|
||||
<div class="wikibase-statementview listview-item">
|
||||
<div class="wikibase-statementview-rankselector">
|
||||
<span class="wikibase-rankselector-normal"></span>
|
||||
</div>
|
||||
<div class="wikibase-statementview-mainsnak">
|
||||
<div>
|
||||
<div class="wikibase-snakview-value">
|
||||
<a href="/wiki/Q145">
|
||||
United Kingdom
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
attributes = []
|
||||
html_etree = fromstring(html)
|
||||
id_cache = wikidata.get_id_cache(html_etree)
|
||||
|
||||
wikidata.add_attribute(attributes, id_cache, "Fail")
|
||||
self.assertEqual(attributes, [])
|
||||
|
||||
wikidata.add_attribute(attributes, id_cache, "P27")
|
||||
self.assertEqual(len(attributes), 1)
|
||||
self.assertEqual(attributes[0]["label"], "Country of citizenship")
|
||||
self.assertEqual(attributes[0]["value"], "United Kingdom")
|
||||
|
||||
html = u"""
|
||||
<div>
|
||||
<div id="P569">
|
||||
<div class="wikibase-statementgroupview-property-label">
|
||||
<a href="/wiki/Property:P569">
|
||||
date of birth
|
||||
</a>
|
||||
</div>
|
||||
<div class="wikibase-statementlistview">
|
||||
<div class="wikibase-statementview listview-item">
|
||||
<div class="wikibase-statementview-rankselector">
|
||||
<span class="wikibase-rankselector-normal"></span>
|
||||
</div>
|
||||
<div class="wikibase-statementview-mainsnak">
|
||||
<div>
|
||||
<div class="wikibase-snakview-value">
|
||||
27 January 1832
|
||||
<sup class="wb-calendar-name">
|
||||
Gregorian
|
||||
</sup>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
attributes = []
|
||||
html_etree = fromstring(html)
|
||||
id_cache = wikidata.get_id_cache(html_etree)
|
||||
wikidata.add_attribute(attributes, id_cache, "P569", date=True)
|
||||
self.assertEqual(len(attributes), 1)
|
||||
self.assertEqual(attributes[0]["label"], "Date of birth")
|
||||
self.assertEqual(attributes[0]["value"], "27 January 1832")
|
||||
|
||||
html = u"""
|
||||
<div>
|
||||
<div id="P6">
|
||||
<div class="wikibase-statementgroupview-property-label">
|
||||
<a href="/wiki/Property:P27">
|
||||
head of government
|
||||
</a>
|
||||
</div>
|
||||
<div class="wikibase-statementlistview">
|
||||
<div class="wikibase-statementview listview-item">
|
||||
<div class="wikibase-statementview-rankselector">
|
||||
<span class="wikibase-rankselector-normal"></span>
|
||||
</div>
|
||||
<div class="wikibase-statementview-mainsnak">
|
||||
<div>
|
||||
<div class="wikibase-snakview-value">
|
||||
<a href="/wiki/Q206">
|
||||
Old Prime Minister
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="wikibase-statementview listview-item">
|
||||
<div class="wikibase-statementview-rankselector">
|
||||
<span class="wikibase-rankselector-preferred"></span>
|
||||
</div>
|
||||
<div class="wikibase-statementview-mainsnak">
|
||||
<div>
|
||||
<div class="wikibase-snakview-value">
|
||||
<a href="/wiki/Q3099714">
|
||||
Actual Prime Minister
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
attributes = []
|
||||
html_etree = fromstring(html)
|
||||
id_cache = wikidata.get_id_cache(html_etree)
|
||||
wikidata.add_attribute(attributes, id_cache, "P6")
|
||||
self.assertEqual(len(attributes), 1)
|
||||
self.assertEqual(attributes[0]["label"], "Head of government")
|
||||
self.assertEqual(attributes[0]["value"], "Old Prime Minister, Actual Prime Minister")
|
||||
|
||||
attributes = []
|
||||
html_etree = fromstring(html)
|
||||
id_cache = wikidata.get_id_cache(html_etree)
|
||||
wikidata.add_attribute(attributes, id_cache, "P6", trim=True)
|
||||
self.assertEqual(len(attributes), 1)
|
||||
self.assertEqual(attributes[0]["value"], "Actual Prime Minister")
|
||||
|
||||
def test_add_url(self):
|
||||
html = u"""
|
||||
<div>
|
||||
<div id="P856">
|
||||
<div class="wikibase-statementgroupview-property-label">
|
||||
<a href="/wiki/Property:P856">
|
||||
official website
|
||||
</a>
|
||||
</div>
|
||||
<div class="wikibase-statementlistview">
|
||||
<div class="wikibase-statementview listview-item">
|
||||
<div class="wikibase-statementview-mainsnak">
|
||||
<div>
|
||||
<div class="wikibase-snakview-value">
|
||||
<a class="external free" href="https://searx.me">
|
||||
https://searx.me/
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
urls = []
|
||||
html_etree = fromstring(html)
|
||||
id_cache = wikidata.get_id_cache(html_etree)
|
||||
wikidata.add_url(urls, html_etree, id_cache, 'P856')
|
||||
self.assertEquals(len(urls), 1)
|
||||
self.assertIn({'title': 'Official website', 'url': 'https://searx.me/'}, urls)
|
||||
urls = []
|
||||
results = []
|
||||
wikidata.add_url(urls, html_etree, id_cache, 'P856', 'custom label', results=results)
|
||||
self.assertEquals(len(urls), 1)
|
||||
self.assertEquals(len(results), 1)
|
||||
self.assertIn({'title': 'custom label', 'url': 'https://searx.me/'}, urls)
|
||||
self.assertIn({'title': 'custom label', 'url': 'https://searx.me/'}, results)
|
||||
|
||||
html = u"""
|
||||
<div>
|
||||
<div id="P856">
|
||||
<div class="wikibase-statementgroupview-property-label">
|
||||
<a href="/wiki/Property:P856">
|
||||
official website
|
||||
</a>
|
||||
</div>
|
||||
<div class="wikibase-statementlistview">
|
||||
<div class="wikibase-statementview listview-item">
|
||||
<div class="wikibase-statementview-mainsnak">
|
||||
<div>
|
||||
<div class="wikibase-snakview-value">
|
||||
<a class="external free" href="http://www.worldofwarcraft.com">
|
||||
http://www.worldofwarcraft.com
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="wikibase-statementview listview-item">
|
||||
<div class="wikibase-statementview-mainsnak">
|
||||
<div>
|
||||
<div class="wikibase-snakview-value">
|
||||
<a class="external free" href="http://eu.battle.net/wow/en/">
|
||||
http://eu.battle.net/wow/en/
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
urls = []
|
||||
html_etree = fromstring(html)
|
||||
id_cache = wikidata.get_id_cache(html_etree)
|
||||
wikidata.add_url(urls, html_etree, id_cache, 'P856')
|
||||
self.assertEquals(len(urls), 2)
|
||||
self.assertIn({'title': 'Official website', 'url': 'http://www.worldofwarcraft.com'}, urls)
|
||||
self.assertIn({'title': 'Official website', 'url': 'http://eu.battle.net/wow/en/'}, urls)
|
||||
|
||||
def test_get_imdblink(self):
|
||||
html = u"""
|
||||
<div>
|
||||
<div class="wikibase-statementview-mainsnak">
|
||||
<div>
|
||||
<div class="wikibase-snakview-value">
|
||||
<a class="wb-external-id" href="http://www.imdb.com/tt0433664">
|
||||
tt0433664
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
html_etree = fromstring(html)
|
||||
imdblink = wikidata.get_imdblink(html_etree, 'https://www.imdb.com/')
|
||||
|
||||
html = u"""
|
||||
<div>
|
||||
<div class="wikibase-statementview-mainsnak">
|
||||
<div>
|
||||
<div class="wikibase-snakview-value">
|
||||
<a class="wb-external-id"
|
||||
href="href="http://tools.wmflabs.org/...http://www.imdb.com/&id=nm4915994"">
|
||||
nm4915994
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
html_etree = fromstring(html)
|
||||
imdblink = wikidata.get_imdblink(html_etree, 'https://www.imdb.com/')
|
||||
self.assertIn('https://www.imdb.com/name/nm4915994', imdblink)
|
||||
|
||||
def test_get_geolink(self):
|
||||
html = u"""
|
||||
<div>
|
||||
<div class="wikibase-statementview-mainsnak">
|
||||
<div>
|
||||
<div class="wikibase-snakview-value">
|
||||
60°N, 40°E
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
html_etree = fromstring(html)
|
||||
geolink = wikidata.get_geolink(html_etree)
|
||||
self.assertIn('https://www.openstreetmap.org/', geolink)
|
||||
self.assertIn('lat=60&lon=40', geolink)
|
||||
|
||||
html = u"""
|
||||
<div>
|
||||
<div class="wikibase-statementview-mainsnak">
|
||||
<div>
|
||||
<div class="wikibase-snakview-value">
|
||||
34°35'59"S, 58°22'55"W
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
html_etree = fromstring(html)
|
||||
geolink = wikidata.get_geolink(html_etree)
|
||||
self.assertIn('https://www.openstreetmap.org/', geolink)
|
||||
self.assertIn('lat=-34.59', geolink)
|
||||
self.assertIn('lon=-58.38', geolink)
|
||||
|
||||
def test_get_wikilink(self):
|
||||
html = """
|
||||
<div>
|
||||
<div>
|
||||
<ul class="wikibase-sitelinklistview-listview">
|
||||
<li data-wb-siteid="arwiki"><a href="http://ar.wikipedia.org/wiki/Test">Test</a></li>
|
||||
<li data-wb-siteid="enwiki"><a href="http://en.wikipedia.org/wiki/Test">Test</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div>
|
||||
<ul class="wikibase-sitelinklistview-listview">
|
||||
<li data-wb-siteid="enwikiquote"><a href="https://en.wikiquote.org/wiki/Test">Test</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
html_etree = fromstring(html)
|
||||
wikilink = wikidata.get_wikilink(html_etree, 'nowiki')
|
||||
self.assertEqual(wikilink, None)
|
||||
wikilink = wikidata.get_wikilink(html_etree, 'enwiki')
|
||||
self.assertEqual(wikilink, 'https://en.wikipedia.org/wiki/Test')
|
||||
wikilink = wikidata.get_wikilink(html_etree, 'arwiki')
|
||||
self.assertEqual(wikilink, 'https://ar.wikipedia.org/wiki/Test')
|
||||
wikilink = wikidata.get_wikilink(html_etree, 'enwikiquote')
|
||||
self.assertEqual(wikilink, 'https://en.wikiquote.org/wiki/Test')
|
|
@ -1,263 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import wikipedia
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestWikipediaEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
wikipedia.supported_languages = ['fr', 'en', 'no']
|
||||
wikipedia.language_aliases = {'nb': 'no'}
|
||||
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['language'] = 'fr-FR'
|
||||
params = wikipedia.request(query.encode('utf-8'), dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('test_query', params['url'])
|
||||
self.assertIn('Test_Query', params['url'])
|
||||
self.assertIn('fr.wikipedia.org', params['url'])
|
||||
|
||||
query = u'Test_Query'
|
||||
params = wikipedia.request(query.encode('utf-8'), dicto)
|
||||
self.assertIn('Test_Query', params['url'])
|
||||
self.assertNotIn('test_query', params['url'])
|
||||
|
||||
dicto['language'] = 'nb'
|
||||
params = wikipedia.request(query, dicto)
|
||||
self.assertIn('no.wikipedia.org', params['url'])
|
||||
dicto['language'] = 'all'
|
||||
params = wikipedia.request(query, dicto)
|
||||
self.assertIn('en', params['url'])
|
||||
|
||||
dicto['language'] = 'xx'
|
||||
params = wikipedia.request(query, dicto)
|
||||
self.assertIn('en.wikipedia.org', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
dicto = defaultdict(dict)
|
||||
dicto['language'] = 'fr'
|
||||
|
||||
self.assertRaises(AttributeError, wikipedia.response, None)
|
||||
self.assertRaises(AttributeError, wikipedia.response, [])
|
||||
self.assertRaises(AttributeError, wikipedia.response, '')
|
||||
self.assertRaises(AttributeError, wikipedia.response, '[]')
|
||||
|
||||
# page not found
|
||||
json = """
|
||||
{
|
||||
"batchcomplete": "",
|
||||
"query": {
|
||||
"normalized": [],
|
||||
"pages": {
|
||||
"-1": {
|
||||
"ns": 0,
|
||||
"title": "",
|
||||
"missing": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}"""
|
||||
response = mock.Mock(text=json, search_params=dicto)
|
||||
self.assertEqual(wikipedia.response(response), [])
|
||||
|
||||
# normal case
|
||||
json = """
|
||||
{
|
||||
"batchcomplete": "",
|
||||
"query": {
|
||||
"normalized": [],
|
||||
"pages": {
|
||||
"12345": {
|
||||
"pageid": 12345,
|
||||
"ns": 0,
|
||||
"title": "The Title",
|
||||
"extract": "The Title is...",
|
||||
"thumbnail": {
|
||||
"source": "img_src.jpg"
|
||||
},
|
||||
"pageimage": "img_name.jpg"
|
||||
}
|
||||
}
|
||||
}
|
||||
}"""
|
||||
response = mock.Mock(text=json, search_params=dicto)
|
||||
results = wikipedia.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['title'], u'The Title')
|
||||
self.assertIn('fr.wikipedia.org/wiki/The_Title', results[0]['url'])
|
||||
self.assertEqual(results[1]['infobox'], u'The Title')
|
||||
self.assertIn('fr.wikipedia.org/wiki/The_Title', results[1]['id'])
|
||||
self.assertIn('The Title is...', results[1]['content'])
|
||||
self.assertEqual(results[1]['img_src'], 'img_src.jpg')
|
||||
|
||||
# disambiguation page
|
||||
json = """
|
||||
{
|
||||
"batchcomplete": "",
|
||||
"query": {
|
||||
"normalized": [],
|
||||
"pages": {
|
||||
"12345": {
|
||||
"pageid": 12345,
|
||||
"ns": 0,
|
||||
"title": "The Title",
|
||||
"extract": "The Title can be:\\nThe Title 1\\nThe Title 2\\nThe Title 3\\nThe Title 4......................................................................................................................................." """ # noqa
|
||||
json += """
|
||||
}
|
||||
}
|
||||
}
|
||||
}"""
|
||||
response = mock.Mock(text=json, search_params=dicto)
|
||||
results = wikipedia.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
|
||||
# no image
|
||||
json = """
|
||||
{
|
||||
"batchcomplete": "",
|
||||
"query": {
|
||||
"normalized": [],
|
||||
"pages": {
|
||||
"12345": {
|
||||
"pageid": 12345,
|
||||
"ns": 0,
|
||||
"title": "The Title",
|
||||
"extract": "The Title is......................................................................................................................................................................................." """ # noqa
|
||||
json += """
|
||||
}
|
||||
}
|
||||
}
|
||||
}"""
|
||||
response = mock.Mock(text=json, search_params=dicto)
|
||||
results = wikipedia.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertIn('The Title is...', results[1]['content'])
|
||||
self.assertEqual(results[1]['img_src'], None)
|
||||
|
||||
# title not in first paragraph
|
||||
json = u"""
|
||||
{
|
||||
"batchcomplete": "",
|
||||
"query": {
|
||||
"normalized": [],
|
||||
"pages": {
|
||||
"12345": {
|
||||
"pageid": 12345,
|
||||
"ns": 0,
|
||||
"title": "披頭四樂隊",
|
||||
"extract": "披头士乐队....................................................................................................................................................................................................\\n披頭四樂隊...", """ # noqa
|
||||
json += """
|
||||
"thumbnail": {
|
||||
"source": "img_src.jpg"
|
||||
},
|
||||
"pageimage": "img_name.jpg"
|
||||
}
|
||||
}
|
||||
}
|
||||
}"""
|
||||
response = mock.Mock(text=json, search_params=dicto)
|
||||
results = wikipedia.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[1]['infobox'], u'披頭四樂隊')
|
||||
self.assertIn(u'披头士乐队...', results[1]['content'])
|
||||
|
||||
def test_fetch_supported_languages(self):
|
||||
html = u"""<html></html>"""
|
||||
response = mock.Mock(text=html)
|
||||
languages = wikipedia._fetch_supported_languages(response)
|
||||
self.assertEqual(type(languages), dict)
|
||||
self.assertEqual(len(languages), 0)
|
||||
|
||||
html = u"""
|
||||
<html>
|
||||
<body>
|
||||
<div>
|
||||
<div>
|
||||
<h3>Table header</h3>
|
||||
<table class="sortable jquery-tablesorter">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>N</th>
|
||||
<th>Language</th>
|
||||
<th>Language (local)</th>
|
||||
<th>Wiki</th>
|
||||
<th>Articles</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>2</td>
|
||||
<td><a>Swedish</a></td>
|
||||
<td><a>Svenska</a></td>
|
||||
<td><a>sv</a></td>
|
||||
<td><a><b>3000000</b></a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>3</td>
|
||||
<td><a>Cebuano</a></td>
|
||||
<td><a>Sinugboanong Binisaya</a></td>
|
||||
<td><a>ceb</a></td>
|
||||
<td><a><b>3000000</b></a></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<h3>Table header</h3>
|
||||
<table class="sortable jquery-tablesorter">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>N</th>
|
||||
<th>Language</th>
|
||||
<th>Language (local)</th>
|
||||
<th>Wiki</th>
|
||||
<th>Articles</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>2</td>
|
||||
<td><a>Norwegian (Bokmål)</a></td>
|
||||
<td><a>Norsk (Bokmål)</a></td>
|
||||
<td><a>no</a></td>
|
||||
<td><a><b>100000</b></a></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
languages = wikipedia._fetch_supported_languages(response)
|
||||
self.assertEqual(type(languages), dict)
|
||||
self.assertEqual(len(languages), 3)
|
||||
|
||||
self.assertIn('sv', languages)
|
||||
self.assertIn('ceb', languages)
|
||||
self.assertIn('no', languages)
|
||||
|
||||
self.assertEqual(type(languages['sv']), dict)
|
||||
self.assertEqual(type(languages['ceb']), dict)
|
||||
self.assertEqual(type(languages['no']), dict)
|
||||
|
||||
self.assertIn('name', languages['sv'])
|
||||
self.assertIn('english_name', languages['sv'])
|
||||
self.assertIn('articles', languages['sv'])
|
||||
|
||||
self.assertEqual(languages['sv']['name'], 'Svenska')
|
||||
self.assertEqual(languages['sv']['english_name'], 'Swedish')
|
||||
self.assertEqual(languages['sv']['articles'], 3000000)
|
||||
self.assertEqual(languages['ceb']['name'], 'Sinugboanong Binisaya')
|
||||
self.assertEqual(languages['ceb']['english_name'], 'Cebuano')
|
||||
self.assertEqual(languages['ceb']['articles'], 3000000)
|
||||
self.assertEqual(languages['no']['name'], u'Norsk (Bokmål)')
|
||||
self.assertEqual(languages['no']['english_name'], u'Norwegian (Bokmål)')
|
||||
self.assertEqual(languages['no']['articles'], 100000)
|
|
@ -1,166 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from requests import Request
|
||||
from searx.engines import wolframalpha_api
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestWolframAlphaAPIEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
params = wolframalpha_api.request(query, dicto)
|
||||
|
||||
# TODO: test api_key
|
||||
self.assertIn('url', params)
|
||||
self.assertIn('https://api.wolframalpha.com/v2/query?', params['url'])
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertEqual('https://www.wolframalpha.com/input/?i=test_query', params['headers']['Referer'])
|
||||
|
||||
def test_replace_pua_chars(self):
|
||||
self.assertEqual('i', wolframalpha_api.replace_pua_chars(u'\uf74e'))
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, wolframalpha_api.response, None)
|
||||
self.assertRaises(AttributeError, wolframalpha_api.response, [])
|
||||
self.assertRaises(AttributeError, wolframalpha_api.response, '')
|
||||
self.assertRaises(AttributeError, wolframalpha_api.response, '[]')
|
||||
|
||||
referer_url = 'referer_url'
|
||||
request = Request(headers={'Referer': referer_url})
|
||||
|
||||
# test failure
|
||||
xml = '''<?xml version='1.0' encoding='UTF-8'?>
|
||||
<queryresult success='false' error='false' />
|
||||
'''
|
||||
response = mock.Mock(content=xml.encode('utf-8'))
|
||||
self.assertEqual(wolframalpha_api.response(response), [])
|
||||
|
||||
# test basic case
|
||||
xml = b"""<?xml version='1.0' encoding='UTF-8'?>
|
||||
<queryresult success='true'
|
||||
error='false'
|
||||
numpods='3'
|
||||
datatypes='Math'
|
||||
id='queryresult_id'
|
||||
host='http://www4c.wolframalpha.com'
|
||||
related='related_url'
|
||||
version='2.6'>
|
||||
<pod title='Input'
|
||||
scanner='Identity'
|
||||
id='Input'
|
||||
numsubpods='1'>
|
||||
<subpod title=''>
|
||||
<img src='input_img_src.gif'
|
||||
alt='input_img_alt'
|
||||
title='input_img_title' />
|
||||
<plaintext>input_plaintext</plaintext>
|
||||
</subpod>
|
||||
</pod>
|
||||
<pod title='Result'
|
||||
scanner='Simplification'
|
||||
id='Result'
|
||||
numsubpods='1'
|
||||
primary='true'>
|
||||
<subpod title=''>
|
||||
<img src='result_img_src.gif'
|
||||
alt='result_img_alt'
|
||||
title='result_img_title' />
|
||||
<plaintext>result_plaintext</plaintext>
|
||||
</subpod>
|
||||
</pod>
|
||||
<pod title='Manipulatives illustration'
|
||||
scanner='Arithmetic'
|
||||
id='Illustration'
|
||||
numsubpods='1'>
|
||||
<subpod title=''>
|
||||
<img src='illustration_img_src.gif'
|
||||
alt='illustration_img_alt' />
|
||||
<plaintext>illustration_plaintext</plaintext>
|
||||
</subpod>
|
||||
</pod>
|
||||
</queryresult>
|
||||
"""
|
||||
response = mock.Mock(content=xml, request=request)
|
||||
results = wolframalpha_api.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual('input_plaintext', results[0]['infobox'])
|
||||
|
||||
self.assertEqual(len(results[0]['attributes']), 3)
|
||||
self.assertEqual('Input', results[0]['attributes'][0]['label'])
|
||||
self.assertEqual('input_plaintext', results[0]['attributes'][0]['value'])
|
||||
self.assertEqual('Result', results[0]['attributes'][1]['label'])
|
||||
self.assertEqual('result_plaintext', results[0]['attributes'][1]['value'])
|
||||
self.assertEqual('Manipulatives illustration', results[0]['attributes'][2]['label'])
|
||||
self.assertEqual('illustration_img_src.gif', results[0]['attributes'][2]['image']['src'])
|
||||
self.assertEqual('illustration_img_alt', results[0]['attributes'][2]['image']['alt'])
|
||||
|
||||
self.assertEqual(len(results[0]['urls']), 1)
|
||||
|
||||
self.assertEqual(referer_url, results[0]['urls'][0]['url'])
|
||||
self.assertEqual('Wolfram|Alpha', results[0]['urls'][0]['title'])
|
||||
self.assertEqual(referer_url, results[1]['url'])
|
||||
self.assertEqual('Wolfram|Alpha (input_plaintext)', results[1]['title'])
|
||||
self.assertIn('result_plaintext', results[1]['content'])
|
||||
|
||||
# test calc
|
||||
xml = b"""<?xml version='1.0' encoding='UTF-8'?>
|
||||
<queryresult success='true'
|
||||
error='false'
|
||||
numpods='2'
|
||||
datatypes=''
|
||||
parsetimedout='false'
|
||||
id='queryresult_id'
|
||||
host='http://www5b.wolframalpha.com'
|
||||
related='related_url'
|
||||
version='2.6' >
|
||||
<pod title='Indefinite integral'
|
||||
scanner='Integral'
|
||||
id='IndefiniteIntegral'
|
||||
error='false'
|
||||
numsubpods='1'
|
||||
primary='true'>
|
||||
<subpod title=''>
|
||||
<img src='integral_image.gif'
|
||||
alt='integral_img_alt'
|
||||
title='integral_img_title' />
|
||||
<plaintext>integral_plaintext</plaintext>
|
||||
</subpod>
|
||||
</pod>
|
||||
<pod title='Plot of the integral'
|
||||
scanner='Integral'
|
||||
id='Plot'
|
||||
error='false'
|
||||
numsubpods='1'>
|
||||
<subpod title=''>
|
||||
<img src='plot.gif'
|
||||
alt='plot_alt'
|
||||
title='' />
|
||||
<plaintext></plaintext>
|
||||
</subpod>
|
||||
</pod>
|
||||
</queryresult>
|
||||
"""
|
||||
response = mock.Mock(content=xml, request=request)
|
||||
results = wolframalpha_api.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual('integral_plaintext', results[0]['infobox'])
|
||||
|
||||
self.assertEqual(len(results[0]['attributes']), 2)
|
||||
self.assertEqual('Indefinite integral', results[0]['attributes'][0]['label'])
|
||||
self.assertEqual('integral_plaintext', results[0]['attributes'][0]['value'])
|
||||
self.assertEqual('Plot of the integral', results[0]['attributes'][1]['label'])
|
||||
self.assertEqual('plot.gif', results[0]['attributes'][1]['image']['src'])
|
||||
self.assertEqual('plot_alt', results[0]['attributes'][1]['image']['alt'])
|
||||
|
||||
self.assertEqual(len(results[0]['urls']), 1)
|
||||
|
||||
self.assertEqual(referer_url, results[0]['urls'][0]['url'])
|
||||
self.assertEqual('Wolfram|Alpha', results[0]['urls'][0]['title'])
|
||||
self.assertEqual(referer_url, results[1]['url'])
|
||||
self.assertEqual('Wolfram|Alpha (integral_plaintext)', results[1]['title'])
|
||||
self.assertIn('integral_plaintext', results[1]['content'])
|
|
@ -1,224 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from requests import Request
|
||||
from searx.engines import wolframalpha_noapi
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestWolframAlphaNoAPIEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
params = wolframalpha_noapi.request(query, dicto)
|
||||
|
||||
self.assertIn('url', params)
|
||||
self.assertIn('https://www.wolframalpha.com/input/json.jsp', params['url'])
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertEqual('https://www.wolframalpha.com/input/?i=test_query', params['headers']['Referer'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, wolframalpha_noapi.response, None)
|
||||
self.assertRaises(AttributeError, wolframalpha_noapi.response, [])
|
||||
self.assertRaises(AttributeError, wolframalpha_noapi.response, '')
|
||||
self.assertRaises(AttributeError, wolframalpha_noapi.response, '[]')
|
||||
|
||||
referer_url = 'referer_url'
|
||||
request = Request(headers={'Referer': referer_url})
|
||||
|
||||
# test failure
|
||||
json = r'''
|
||||
{"queryresult" : {
|
||||
"success" : false,
|
||||
"error" : false,
|
||||
"numpods" : 0,
|
||||
"id" : "",
|
||||
"host" : "https:\/\/www5a.wolframalpha.com",
|
||||
"didyoumeans" : {}
|
||||
}}
|
||||
'''
|
||||
response = mock.Mock(text=json, request=request)
|
||||
self.assertEqual(wolframalpha_noapi.response(response), [])
|
||||
|
||||
# test basic case
|
||||
json = r'''
|
||||
{"queryresult" : {
|
||||
"success" : true,
|
||||
"error" : false,
|
||||
"numpods" : 6,
|
||||
"datatypes" : "Math",
|
||||
"id" : "queryresult_id",
|
||||
"host" : "https:\/\/www5b.wolframalpha.com",
|
||||
"related" : "related_url",
|
||||
"version" : "2.6",
|
||||
"pods" : [
|
||||
{
|
||||
"title" : "Input",
|
||||
"scanners" : [
|
||||
"Identity"
|
||||
],
|
||||
"id" : "Input",
|
||||
"error" : false,
|
||||
"numsubpods" : 1,
|
||||
"subpods" : [
|
||||
{
|
||||
"title" : "",
|
||||
"img" : {
|
||||
"src" : "input_img_src.gif",
|
||||
"alt" : "input_img_alt",
|
||||
"title" : "input_img_title"
|
||||
},
|
||||
"plaintext" : "input_plaintext",
|
||||
"minput" : "input_minput"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"title" : "Result",
|
||||
"scanners" : [
|
||||
"Simplification"
|
||||
],
|
||||
"id" : "Result",
|
||||
"error" : false,
|
||||
"numsubpods" : 1,
|
||||
"primary" : true,
|
||||
"subpods" : [
|
||||
{
|
||||
"title" : "",
|
||||
"img" : {
|
||||
"src" : "result_img_src.gif",
|
||||
"alt" : "result_img_alt",
|
||||
"title" : "result_img_title"
|
||||
},
|
||||
"plaintext" : "result_plaintext",
|
||||
"moutput" : "result_moutput"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"title" : "Manipulatives illustration",
|
||||
"scanners" : [
|
||||
"Arithmetic"
|
||||
],
|
||||
"id" : "Illustration",
|
||||
"error" : false,
|
||||
"numsubpods" : 1,
|
||||
"subpods" : [
|
||||
{
|
||||
"title" : "",
|
||||
"CDFcontent" : "Resizeable",
|
||||
"img" : {
|
||||
"src" : "illustration_img_src.gif",
|
||||
"alt" : "illustration_img_alt",
|
||||
"title" : "illustration_img_title"
|
||||
},
|
||||
"plaintext" : "illustration_img_plaintext"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}}
|
||||
'''
|
||||
response = mock.Mock(text=json, request=request)
|
||||
results = wolframalpha_noapi.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual('input_plaintext', results[0]['infobox'])
|
||||
|
||||
self.assertEqual(len(results[0]['attributes']), 3)
|
||||
self.assertEqual('Input', results[0]['attributes'][0]['label'])
|
||||
self.assertEqual('input_plaintext', results[0]['attributes'][0]['value'])
|
||||
self.assertEqual('Result', results[0]['attributes'][1]['label'])
|
||||
self.assertEqual('result_plaintext', results[0]['attributes'][1]['value'])
|
||||
self.assertEqual('Manipulatives illustration', results[0]['attributes'][2]['label'])
|
||||
self.assertEqual('illustration_img_src.gif', results[0]['attributes'][2]['image']['src'])
|
||||
self.assertEqual('illustration_img_alt', results[0]['attributes'][2]['image']['alt'])
|
||||
|
||||
self.assertEqual(len(results[0]['urls']), 1)
|
||||
|
||||
self.assertEqual(referer_url, results[0]['urls'][0]['url'])
|
||||
self.assertEqual('Wolfram|Alpha', results[0]['urls'][0]['title'])
|
||||
self.assertEqual(referer_url, results[1]['url'])
|
||||
self.assertEqual('Wolfram|Alpha (input_plaintext)', results[1]['title'])
|
||||
self.assertIn('result_plaintext', results[1]['content'])
|
||||
|
||||
# test calc
|
||||
json = r"""
|
||||
{"queryresult" : {
|
||||
"success" : true,
|
||||
"error" : false,
|
||||
"numpods" : 2,
|
||||
"datatypes" : "",
|
||||
"id" : "queryresult_id",
|
||||
"host" : "https:\/\/www4b.wolframalpha.com",
|
||||
"related" : "related_url",
|
||||
"version" : "2.6",
|
||||
"pods" : [
|
||||
{
|
||||
"title" : "Indefinite integral",
|
||||
"scanners" : [
|
||||
"Integral"
|
||||
],
|
||||
"id" : "IndefiniteIntegral",
|
||||
"error" : false,
|
||||
"numsubpods" : 1,
|
||||
"primary" : true,
|
||||
"subpods" : [
|
||||
{
|
||||
"title" : "",
|
||||
"img" : {
|
||||
"src" : "integral_img_src.gif",
|
||||
"alt" : "integral_img_alt",
|
||||
"title" : "integral_img_title"
|
||||
},
|
||||
"plaintext" : "integral_plaintext",
|
||||
"minput" : "integral_minput",
|
||||
"moutput" : "integral_moutput"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"title" : "Plot of the integral",
|
||||
"scanners" : [
|
||||
"Integral"
|
||||
],
|
||||
"id" : "Plot",
|
||||
"error" : false,
|
||||
"numsubpods" : 1,
|
||||
"subpods" : [
|
||||
{
|
||||
"title" : "",
|
||||
"img" : {
|
||||
"src" : "plot.gif",
|
||||
"alt" : "plot_alt",
|
||||
"title" : "plot_title"
|
||||
},
|
||||
"plaintext" : "",
|
||||
"minput" : "plot_minput"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}}
|
||||
"""
|
||||
response = mock.Mock(text=json, request=request)
|
||||
results = wolframalpha_noapi.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual('integral_plaintext', results[0]['infobox'])
|
||||
|
||||
self.assertEqual(len(results[0]['attributes']), 2)
|
||||
self.assertEqual('Indefinite integral', results[0]['attributes'][0]['label'])
|
||||
self.assertEqual('integral_plaintext', results[0]['attributes'][0]['value'])
|
||||
self.assertEqual('Plot of the integral', results[0]['attributes'][1]['label'])
|
||||
self.assertEqual('plot.gif', results[0]['attributes'][1]['image']['src'])
|
||||
self.assertEqual('plot_alt', results[0]['attributes'][1]['image']['alt'])
|
||||
|
||||
self.assertEqual(len(results[0]['urls']), 1)
|
||||
|
||||
self.assertEqual(referer_url, results[0]['urls'][0]['url'])
|
||||
self.assertEqual('Wolfram|Alpha', results[0]['urls'][0]['title'])
|
||||
self.assertEqual(referer_url, results[1]['url'])
|
||||
self.assertEqual('Wolfram|Alpha (integral_plaintext)', results[1]['title'])
|
||||
self.assertIn('integral_plaintext', results[1]['content'])
|
|
@ -1,14 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import www1x
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestWww1xEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
params = www1x.request(query, defaultdict(dict))
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('1x.com' in params['url'])
|
|
@ -1,96 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import yacy
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestYacyEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
dicto['language'] = 'fr_FR'
|
||||
params = yacy.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('localhost', params['url'])
|
||||
self.assertIn('fr', params['url'])
|
||||
|
||||
dicto['language'] = 'all'
|
||||
params = yacy.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertNotIn('lr=lang_', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, yacy.response, None)
|
||||
self.assertRaises(AttributeError, yacy.response, [])
|
||||
self.assertRaises(AttributeError, yacy.response, '')
|
||||
self.assertRaises(AttributeError, yacy.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}')
|
||||
self.assertEqual(yacy.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": []}')
|
||||
self.assertEqual(yacy.response(response), [])
|
||||
|
||||
json = """
|
||||
{
|
||||
"channels": [
|
||||
{
|
||||
"title": "YaCy P2P-Search for test",
|
||||
"description": "Search for test",
|
||||
"link": "http://search.yacy.de:7001/yacysearch.html?query=test&resource=global&contentdom=0",
|
||||
"image": {
|
||||
"url": "http://search.yacy.de:7001/env/grafics/yacy.png",
|
||||
"title": "Search for test",
|
||||
"link": "http://search.yacy.de:7001/yacysearch.html?query=test&resource=global&contentdom=0"
|
||||
},
|
||||
"totalResults": "249",
|
||||
"startIndex": "0",
|
||||
"itemsPerPage": "5",
|
||||
"searchTerms": "test",
|
||||
"items": [
|
||||
{
|
||||
"title": "This is the title",
|
||||
"link": "http://this.is.the.url",
|
||||
"code": "",
|
||||
"description": "This should be the content",
|
||||
"pubDate": "Sat, 08 Jun 2013 02:00:00 +0200",
|
||||
"size": "44213",
|
||||
"sizename": "43 kbyte",
|
||||
"guid": "lzh_1T_5FP-A",
|
||||
"faviconCode": "XTS4uQ_5FP-A",
|
||||
"host": "www.gamestar.de",
|
||||
"path": "/spiele/city-of-heroes-freedom/47019.html",
|
||||
"file": "47019.html",
|
||||
"urlhash": "lzh_1T_5FP-A",
|
||||
"ranking": "0.20106804"
|
||||
},
|
||||
{
|
||||
"title": "This is the title2",
|
||||
"icon": "/ViewImage.png?maxwidth=96&maxheight=96&code=7EbAbW6BpPOA",
|
||||
"image": "http://image.url/image.png",
|
||||
"cache": "/ViewImage.png?quadratic=&url=http://golem.ivwbox.de/cgi-bin/ivw/CP/G_INET?d=14071378",
|
||||
"url": "http://this.is.the.url",
|
||||
"urlhash": "7EbAbW6BpPOA",
|
||||
"host": "www.golem.de",
|
||||
"width": "-1",
|
||||
"height": "-1"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = yacy.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['title'], 'This is the title')
|
||||
self.assertEqual(results[0]['url'], 'http://this.is.the.url')
|
||||
self.assertEqual(results[0]['content'], 'This should be the content')
|
||||
self.assertEqual(results[1]['img_src'], 'http://image.url/image.png')
|
||||
self.assertEqual(results[1]['content'], '')
|
||||
self.assertEqual(results[1]['url'], 'http://this.is.the.url')
|
||||
self.assertEqual(results[1]['title'], 'This is the title2')
|
|
@ -1,190 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import yahoo
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestYahooEngine(SearxTestCase):
|
||||
|
||||
def test_parse_url(self):
|
||||
test_url = 'http://r.search.yahoo.com/_ylt=A0LEb9JUSKcAEGRXNyoA;_ylu=X3oDMTEzZm1qazYwBHNlYwNzcgRwb3MDMQRjb' +\
|
||||
'2xvA2Jm2dGlkA1NNRTcwM18x/RV=2/RE=1423106085/RO=10/RU=https%3a%2f%2fthis.is.the.url%2f/RK=0/RS=' +\
|
||||
'dtcJsfP4mEeBOjnVfUQ-'
|
||||
url = yahoo.parse_url(test_url)
|
||||
self.assertEqual('https://this.is.the.url/', url)
|
||||
|
||||
test_url = 'http://r.search.yahoo.com/_ylt=A0LElb9JUSKcAEGRXNyoA;_ylu=X3oDMTEzZm1qazYwBHNlYwNzcgRwb3MDMQRjb' +\
|
||||
'2xvA2Jm2dGlkA1NNRTcwM18x/RV=2/RE=1423106085/RO=10/RU=https%3a%2f%2fthis.is.the.url%2f/RS=' +\
|
||||
'dtcJsfP4mEeBOjnVfUQ-'
|
||||
url = yahoo.parse_url(test_url)
|
||||
self.assertEqual('https://this.is.the.url/', url)
|
||||
|
||||
test_url = 'https://this.is.the.url/'
|
||||
url = yahoo.parse_url(test_url)
|
||||
self.assertEqual('https://this.is.the.url/', url)
|
||||
|
||||
def test_request(self):
|
||||
yahoo.supported_languages = ['en', 'fr', 'zh-CHT', 'zh-CHS']
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
dicto['time_range'] = ''
|
||||
dicto['language'] = 'fr-FR'
|
||||
params = yahoo.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('search.yahoo.com', params['url'])
|
||||
self.assertIn('fr', params['url'])
|
||||
self.assertIn('cookies', params)
|
||||
self.assertIn('sB', params['cookies'])
|
||||
self.assertIn('fr', params['cookies']['sB'])
|
||||
|
||||
dicto['language'] = 'zh'
|
||||
params = yahoo.request(query, dicto)
|
||||
self.assertIn('zh_chs', params['url'])
|
||||
self.assertIn('zh_chs', params['cookies']['sB'])
|
||||
|
||||
dicto['language'] = 'zh-TW'
|
||||
params = yahoo.request(query, dicto)
|
||||
self.assertIn('zh_cht', params['url'])
|
||||
self.assertIn('zh_cht', params['cookies']['sB'])
|
||||
|
||||
dicto['language'] = 'all'
|
||||
params = yahoo.request(query, dicto)
|
||||
self.assertIn('cookies', params)
|
||||
self.assertIn('sB', params['cookies'])
|
||||
self.assertIn('en', params['cookies']['sB'])
|
||||
self.assertIn('en', params['url'])
|
||||
|
||||
def test_no_url_in_request_year_time_range(self):
|
||||
dicto = defaultdict(dict)
|
||||
query = 'test_query'
|
||||
dicto['time_range'] = 'year'
|
||||
params = yahoo.request(query, dicto)
|
||||
self.assertEqual({}, params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, yahoo.response, None)
|
||||
self.assertRaises(AttributeError, yahoo.response, [])
|
||||
self.assertRaises(AttributeError, yahoo.response, '')
|
||||
self.assertRaises(AttributeError, yahoo.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(yahoo.response(response), [])
|
||||
|
||||
html = """
|
||||
<ol class="reg mb-15 searchCenterMiddle">
|
||||
<li class="first">
|
||||
<div class="dd algo fst Sr">
|
||||
<div class="compTitle">
|
||||
<h3 class="title"><a class=" td-u" href="http://r.search.yahoo.com/_ylt=A0LEb9JUSKcAEGRXNyoA;
|
||||
_ylu=X3oDMTEzZm1qazYwBHNlYwNzcgRwb3MDMQRjb2xvA2Jm2dGlkA1NNRTcwM18x/RV=2/RE=1423106085/RO=10
|
||||
/RU=https%3a%2f%2fthis.is.the.url%2f/RK=0/RS=dtcJsfP4mEeBOjnVfUQ-"
|
||||
target="_blank" data-bid="54e712e13671c">
|
||||
<b><b>This is the title</b></b></a>
|
||||
</h3>
|
||||
</div>
|
||||
<div class="compText aAbs">
|
||||
<p class="lh-18"><b><b>This is the </b>content</b>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</li>
|
||||
<li>
|
||||
<div class="dd algo lst Sr">
|
||||
<div class="compTitle">
|
||||
</div>
|
||||
<div class="compText aAbs">
|
||||
<p class="lh-18">This is the second content</p>
|
||||
</div>
|
||||
</div>
|
||||
</li>
|
||||
</ol>
|
||||
<div class="dd assist fst lst AlsoTry" data-bid="54e712e138d04">
|
||||
<div class="compTitle mb-4 h-17">
|
||||
<h3 class="title">Also Try</h3> </div>
|
||||
<table class="compTable m-0 ac-1st td-u fz-ms">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td class="w-50p pr-28"><a href="https://search.yahoo.com/"><B>This is the </B>suggestion<B></B></a>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = yahoo.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['title'], 'This is the title')
|
||||
self.assertEqual(results[0]['url'], 'https://this.is.the.url/')
|
||||
self.assertEqual(results[0]['content'], 'This is the content')
|
||||
self.assertEqual(results[1]['suggestion'], 'This is the suggestion')
|
||||
|
||||
html = """
|
||||
<ol class="reg mb-15 searchCenterMiddle">
|
||||
<li class="first">
|
||||
<div class="dd algo fst Sr">
|
||||
<div class="compTitle">
|
||||
<h3 class="title"><a class=" td-u" href="http://r.search.yahoo.com/_ylt=A0LEb9JUSKcAEGRXNyoA;
|
||||
_ylu=X3oDMTEzZm1qazYwBHNlYwNzcgRwb3MDMQRjb2xvA2Jm2dGlkA1NNRTcwM18x/RV=2/RE=1423106085/RO=10
|
||||
/RU=https%3a%2f%2fthis.is.the.url%2f/RK=0/RS=dtcJsfP4mEeBOjnVfUQ-"
|
||||
target="_blank" data-bid="54e712e13671c">
|
||||
<b><b>This is the title</b></b></a>
|
||||
</h3>
|
||||
</div>
|
||||
<div class="compText aAbs">
|
||||
<p class="lh-18"><b><b>This is the </b>content</b>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</li>
|
||||
</ol>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = yahoo.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'This is the title')
|
||||
self.assertEqual(results[0]['url'], 'https://this.is.the.url/')
|
||||
self.assertEqual(results[0]['content'], 'This is the content')
|
||||
|
||||
html = """
|
||||
<li class="b_algo" u="0|5109|4755453613245655|UAGjXgIrPH5yh-o5oNHRx_3Zta87f_QO">
|
||||
</li>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = yahoo.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
def test_fetch_supported_languages(self):
|
||||
html = """<html></html>"""
|
||||
response = mock.Mock(text=html)
|
||||
results = yahoo._fetch_supported_languages(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
html = """
|
||||
<html>
|
||||
<div>
|
||||
<div id="yschlang">
|
||||
<span>
|
||||
<label><input value="lang_ar"></input></label>
|
||||
</span>
|
||||
<span>
|
||||
<label><input value="lang_zh_chs"></input></label>
|
||||
<label><input value="lang_zh_cht"></input></label>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</html>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
languages = yahoo._fetch_supported_languages(response)
|
||||
self.assertEqual(type(languages), list)
|
||||
self.assertEqual(len(languages), 3)
|
||||
self.assertIn('ar', languages)
|
||||
self.assertIn('zh-CHS', languages)
|
||||
self.assertIn('zh-CHT', languages)
|
|
@ -1,150 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
import mock
|
||||
from searx.engines import yahoo_news
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestYahooNewsEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
yahoo_news.supported_languages = ['en', 'fr']
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
dicto['language'] = 'fr-FR'
|
||||
params = yahoo_news.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('news.search.yahoo.com', params['url'])
|
||||
self.assertIn('fr', params['url'])
|
||||
self.assertIn('cookies', params)
|
||||
self.assertIn('sB', params['cookies'])
|
||||
self.assertIn('fr', params['cookies']['sB'])
|
||||
|
||||
dicto['language'] = 'all'
|
||||
params = yahoo_news.request(query, dicto)
|
||||
self.assertIn('cookies', params)
|
||||
self.assertIn('sB', params['cookies'])
|
||||
self.assertIn('en', params['cookies']['sB'])
|
||||
self.assertIn('en', params['url'])
|
||||
|
||||
def test_sanitize_url(self):
|
||||
url = "test.url"
|
||||
self.assertEqual(url, yahoo_news.sanitize_url(url))
|
||||
|
||||
url = "www.yahoo.com/;_ylt=test"
|
||||
self.assertEqual("www.yahoo.com/", yahoo_news.sanitize_url(url))
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, yahoo_news.response, None)
|
||||
self.assertRaises(AttributeError, yahoo_news.response, [])
|
||||
self.assertRaises(AttributeError, yahoo_news.response, '')
|
||||
self.assertRaises(AttributeError, yahoo_news.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(yahoo_news.response(response), [])
|
||||
|
||||
html = """
|
||||
<ol class=" reg searchCenterMiddle">
|
||||
<li class="first">
|
||||
<div class="compTitle">
|
||||
<h3>
|
||||
<a class="yschttl spt" href="http://this.is.the.url" target="_blank">
|
||||
This is
|
||||
the <b>title</b>...
|
||||
</a>
|
||||
</h3>
|
||||
</div>
|
||||
<div>
|
||||
<span class="cite">Business via Yahoo!</span>
|
||||
<span class="tri fc-2nd ml-10">May 01 10:00 AM</span>
|
||||
</div>
|
||||
<div class="compText">
|
||||
This is the content
|
||||
</div>
|
||||
</li>
|
||||
<li class="first">
|
||||
<div class="compTitle">
|
||||
<h3>
|
||||
<a class="yschttl spt" target="_blank">
|
||||
</a>
|
||||
</h3>
|
||||
</div>
|
||||
<div class="compText">
|
||||
</div>
|
||||
</li>
|
||||
</ol>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = yahoo_news.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'This is the title...')
|
||||
self.assertEqual(results[0]['url'], 'http://this.is.the.url/')
|
||||
self.assertEqual(results[0]['content'], 'This is the content')
|
||||
|
||||
html = """
|
||||
<ol class=" reg searchCenterMiddle">
|
||||
<li class="first">
|
||||
<div class="compTitle">
|
||||
<h3>
|
||||
<a class="yschttl spt" href="http://this.is.the.url" target="_blank">
|
||||
This is
|
||||
the <b>title</b>...
|
||||
</a>
|
||||
</h3>
|
||||
</div>
|
||||
<div>
|
||||
<span class="cite">Business via Yahoo!</span>
|
||||
<span class="tri fc-2nd ml-10">2 hours, 22 minutes ago</span>
|
||||
</div>
|
||||
<div class="compText">
|
||||
This is the content
|
||||
</div>
|
||||
</li>
|
||||
<li>
|
||||
<div class="compTitle">
|
||||
<h3>
|
||||
<a class="yschttl spt" href="http://this.is.the.url" target="_blank">
|
||||
This is
|
||||
the <b>title</b>...
|
||||
</a>
|
||||
</h3>
|
||||
</div>
|
||||
<div>
|
||||
<span class="cite">Business via Yahoo!</span>
|
||||
<span class="tri fc-2nd ml-10">22 minutes ago</span>
|
||||
</div>
|
||||
<div class="compText">
|
||||
This is the content
|
||||
</div>
|
||||
</li>
|
||||
<li>
|
||||
<div class="compTitle">
|
||||
<h3>
|
||||
<a class="yschttl spt" href="http://this.is.the.url" target="_blank">
|
||||
This is
|
||||
the <b>title</b>...
|
||||
</a>
|
||||
</h3>
|
||||
</div>
|
||||
<div>
|
||||
<span class="cite">Business via Yahoo!</span>
|
||||
<span class="tri fc-2nd ml-10">Feb 03 09:45AM 1900</span>
|
||||
</div>
|
||||
<div class="compText">
|
||||
This is the content
|
||||
</div>
|
||||
</li>
|
||||
</ol>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = yahoo_news.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 3)
|
||||
self.assertEqual(results[0]['title'], 'This is the title...')
|
||||
self.assertEqual(results[0]['url'], 'http://this.is.the.url/')
|
||||
self.assertEqual(results[0]['content'], 'This is the content')
|
||||
self.assertEqual(results[2]['publishedDate'].year, datetime.now().year)
|
|
@ -1,111 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import youtube_api
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestYoutubeAPIEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 0
|
||||
dicto['language'] = 'fr_FR'
|
||||
params = youtube_api.request(query, dicto)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertIn('googleapis.com', params['url'])
|
||||
self.assertIn('youtube', params['url'])
|
||||
self.assertIn('fr', params['url'])
|
||||
|
||||
dicto['language'] = 'all'
|
||||
params = youtube_api.request(query, dicto)
|
||||
self.assertFalse('fr' in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, youtube_api.response, None)
|
||||
self.assertRaises(AttributeError, youtube_api.response, [])
|
||||
self.assertRaises(AttributeError, youtube_api.response, '')
|
||||
self.assertRaises(AttributeError, youtube_api.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}')
|
||||
self.assertEqual(youtube_api.response(response), [])
|
||||
|
||||
response = mock.Mock(text='{"data": []}')
|
||||
self.assertEqual(youtube_api.response(response), [])
|
||||
|
||||
json = """
|
||||
{
|
||||
"kind": "youtube#searchListResponse",
|
||||
"etag": "xmg9xJZuZD438sF4hb-VcBBREXc/YJQDcTBCDcaBvl-sRZJoXdvy1ME",
|
||||
"nextPageToken": "CAUQAA",
|
||||
"pageInfo": {
|
||||
"totalResults": 1000000,
|
||||
"resultsPerPage": 20
|
||||
},
|
||||
"items": [
|
||||
{
|
||||
"kind": "youtube#searchResult",
|
||||
"etag": "xmg9xJZuZD438sF4hb-VcBBREXc/IbLO64BMhbHIgWLwLw7MDYe7Hs4",
|
||||
"id": {
|
||||
"kind": "youtube#video",
|
||||
"videoId": "DIVZCPfAOeM"
|
||||
},
|
||||
"snippet": {
|
||||
"publishedAt": "2015-05-29T22:41:04.000Z",
|
||||
"channelId": "UCNodmx1ERIjKqvcJLtdzH5Q",
|
||||
"title": "Title",
|
||||
"description": "Description",
|
||||
"thumbnails": {
|
||||
"default": {
|
||||
"url": "https://i.ytimg.com/vi/DIVZCPfAOeM/default.jpg"
|
||||
},
|
||||
"medium": {
|
||||
"url": "https://i.ytimg.com/vi/DIVZCPfAOeM/mqdefault.jpg"
|
||||
},
|
||||
"high": {
|
||||
"url": "https://i.ytimg.com/vi/DIVZCPfAOeM/hqdefault.jpg"
|
||||
}
|
||||
},
|
||||
"channelTitle": "MinecraftUniverse",
|
||||
"liveBroadcastContent": "none"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = youtube_api.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['title'], 'Title')
|
||||
self.assertEqual(results[0]['url'], 'https://www.youtube.com/watch?v=DIVZCPfAOeM')
|
||||
self.assertEqual(results[0]['content'], 'Description')
|
||||
self.assertEqual(results[0]['thumbnail'], 'https://i.ytimg.com/vi/DIVZCPfAOeM/hqdefault.jpg')
|
||||
self.assertTrue('DIVZCPfAOeM' in results[0]['embedded'])
|
||||
|
||||
json = """
|
||||
{
|
||||
"kind": "youtube#searchListResponse",
|
||||
"etag": "xmg9xJZuZD438sF4hb-VcBBREXc/YJQDcTBCDcaBvl-sRZJoXdvy1ME",
|
||||
"nextPageToken": "CAUQAA",
|
||||
"pageInfo": {
|
||||
"totalResults": 1000000,
|
||||
"resultsPerPage": 20
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = youtube_api.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
json = """
|
||||
{"toto":{"entry":[]
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = mock.Mock(text=json)
|
||||
results = youtube_api.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
|
@ -1,124 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import youtube_noapi
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
|
||||
class TestYoutubeNoAPIEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 0
|
||||
dicto['time_range'] = ''
|
||||
params = youtube_noapi.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('youtube.com', params['url'])
|
||||
|
||||
def test_time_range_search(self):
|
||||
dicto = defaultdict(dict)
|
||||
query = 'test_query'
|
||||
dicto['time_range'] = 'year'
|
||||
params = youtube_noapi.request(query, dicto)
|
||||
self.assertIn('&sp=EgIIBQ%253D%253D', params['url'])
|
||||
|
||||
dicto['time_range'] = 'month'
|
||||
params = youtube_noapi.request(query, dicto)
|
||||
self.assertIn('&sp=EgIIBA%253D%253D', params['url'])
|
||||
|
||||
dicto['time_range'] = 'week'
|
||||
params = youtube_noapi.request(query, dicto)
|
||||
self.assertIn('&sp=EgIIAw%253D%253D', params['url'])
|
||||
|
||||
dicto['time_range'] = 'day'
|
||||
params = youtube_noapi.request(query, dicto)
|
||||
self.assertIn('&sp=EgIIAg%253D%253D', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
self.assertRaises(AttributeError, youtube_noapi.response, None)
|
||||
self.assertRaises(AttributeError, youtube_noapi.response, [])
|
||||
self.assertRaises(AttributeError, youtube_noapi.response, '')
|
||||
self.assertRaises(AttributeError, youtube_noapi.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(youtube_noapi.response(response), [])
|
||||
|
||||
html = """
|
||||
<div></div>
|
||||
<script>
|
||||
window["ytInitialData"] = {
|
||||
"contents": {
|
||||
"twoColumnSearchResultsRenderer": {
|
||||
"primaryContents": {
|
||||
"sectionListRenderer": {
|
||||
"contents": [
|
||||
{
|
||||
"itemSectionRenderer": {
|
||||
"contents": [
|
||||
{
|
||||
"videoRenderer": {
|
||||
"videoId": "DIVZCPfAOeM",
|
||||
"title": {
|
||||
"simpleText": "Title"
|
||||
},
|
||||
"descriptionSnippet": {
|
||||
"runs": [
|
||||
{
|
||||
"text": "Des"
|
||||
},
|
||||
{
|
||||
"text": "cription"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"videoRenderer": {
|
||||
"videoId": "9C_HReR_McQ",
|
||||
"title": {
|
||||
"simpleText": "Title"
|
||||
},
|
||||
"descriptionSnippet": {
|
||||
"simpleText": "Description"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
</script>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = youtube_noapi.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['title'], 'Title')
|
||||
self.assertEqual(results[0]['url'], 'https://www.youtube.com/watch?v=DIVZCPfAOeM')
|
||||
self.assertEqual(results[0]['content'], 'Description')
|
||||
self.assertEqual(results[0]['thumbnail'], 'https://i.ytimg.com/vi/DIVZCPfAOeM/hqdefault.jpg')
|
||||
self.assertTrue('DIVZCPfAOeM' in results[0]['embedded'])
|
||||
self.assertEqual(results[1]['title'], 'Title')
|
||||
self.assertEqual(results[1]['url'], 'https://www.youtube.com/watch?v=9C_HReR_McQ')
|
||||
self.assertEqual(results[1]['content'], 'Description')
|
||||
self.assertEqual(results[1]['thumbnail'], 'https://i.ytimg.com/vi/9C_HReR_McQ/hqdefault.jpg')
|
||||
self.assertTrue('9C_HReR_McQ' in results[1]['embedded'])
|
||||
|
||||
html = """
|
||||
<ol id="item-section-063864" class="item-section">
|
||||
<li>
|
||||
</li>
|
||||
</ol>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = youtube_noapi.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
|
@ -1,241 +0,0 @@
|
|||
{
|
||||
"total": 2,
|
||||
"total_pages": 1,
|
||||
"results": [
|
||||
{
|
||||
"id": "FY8d721UO_4",
|
||||
"created_at": "2018-04-12T14:20:35-04:00",
|
||||
"updated_at": "2018-08-28T20:58:33-04:00",
|
||||
"width": 3891,
|
||||
"height": 5829,
|
||||
"color": "#152C33",
|
||||
"description": "low angle photography of swimming penguin",
|
||||
"urls": {
|
||||
"raw": "https://images.unsplash.com/photo-1523557148507-1b77641c7e7c?ixlib=rb-0.3.5&ixid=eyJhcHBfaWQiOjEyMDd9&s=095c5fc319c5a77c705f49ad63e0f195",
|
||||
"full": "https://images.unsplash.com/photo-1523557148507-1b77641c7e7c?ixlib=rb-0.3.5&q=85&fm=jpg&crop=entropy&cs=srgb&ixid=eyJhcHBfaWQiOjEyMDd9&s=74be977849c173d6929636d491a760c3",
|
||||
"regular": "https://images.unsplash.com/photo-1523557148507-1b77641c7e7c?ixlib=rb-0.3.5&q=80&fm=jpg&crop=entropy&cs=tinysrgb&w=1080&fit=max&ixid=eyJhcHBfaWQiOjEyMDd9&s=ad65df26970bd010085f0ca25434de33",
|
||||
"small": "https://images.unsplash.com/photo-1523557148507-1b77641c7e7c?ixlib=rb-0.3.5&q=80&fm=jpg&crop=entropy&cs=tinysrgb&w=400&fit=max&ixid=eyJhcHBfaWQiOjEyMDd9&s=5d2edfd073c31eb8ee7b305222bdc5a2",
|
||||
"thumb": "https://images.unsplash.com/photo-1523557148507-1b77641c7e7c?ixlib=rb-0.3.5&q=80&fm=jpg&crop=entropy&cs=tinysrgb&w=200&fit=max&ixid=eyJhcHBfaWQiOjEyMDd9&s=a9b9e56e63efc6f4611a87ce7e9a48f8"
|
||||
},
|
||||
"links": {
|
||||
"self": "https://api.unsplash.com/photos/FY8d721UO_4",
|
||||
"html": "https://unsplash.com/photos/FY8d721UO_4",
|
||||
"download": "https://unsplash.com/photos/FY8d721UO_4/download",
|
||||
"download_location": "https://api.unsplash.com/photos/FY8d721UO_4/download"
|
||||
},
|
||||
"categories": [],
|
||||
"sponsored": false,
|
||||
"likes": 31,
|
||||
"liked_by_user": false,
|
||||
"current_user_collections": [],
|
||||
"slug": null,
|
||||
"user": {
|
||||
"id": "N4gE4mrG8lE",
|
||||
"updated_at": "2018-10-03T02:51:19-04:00",
|
||||
"username": "gaspanik",
|
||||
"name": "Masaaki Komori",
|
||||
"first_name": "Masaaki",
|
||||
"last_name": "Komori",
|
||||
"twitter_username": "cipher",
|
||||
"portfolio_url": "https://www.instagram.com/cipher/",
|
||||
"bio": null,
|
||||
"location": "Tokyo, JAPAN",
|
||||
"links": {
|
||||
"self": "https://api.unsplash.com/users/gaspanik",
|
||||
"html": "https://unsplash.com/@gaspanik",
|
||||
"photos": "https://api.unsplash.com/users/gaspanik/photos",
|
||||
"likes": "https://api.unsplash.com/users/gaspanik/likes",
|
||||
"portfolio": "https://api.unsplash.com/users/gaspanik/portfolio",
|
||||
"following": "https://api.unsplash.com/users/gaspanik/following",
|
||||
"followers": "https://api.unsplash.com/users/gaspanik/followers"
|
||||
},
|
||||
"profile_image": {
|
||||
"small": "https://images.unsplash.com/profile-fb-1502270358-e7c86c1011ce.jpg?ixlib=rb-0.3.5&q=80&fm=jpg&crop=faces&cs=tinysrgb&fit=crop&h=32&w=32&s=9fe12f6d177bd6fdbd56d233a80c01a3",
|
||||
"medium": "https://images.unsplash.com/profile-fb-1502270358-e7c86c1011ce.jpg?ixlib=rb-0.3.5&q=80&fm=jpg&crop=faces&cs=tinysrgb&fit=crop&h=64&w=64&s=6ad7d156b62e438ae9dc794cba712fff",
|
||||
"large": "https://images.unsplash.com/profile-fb-1502270358-e7c86c1011ce.jpg?ixlib=rb-0.3.5&q=80&fm=jpg&crop=faces&cs=tinysrgb&fit=crop&h=128&w=128&s=13a08a2e72e7d11632410e92bd3a9406"
|
||||
},
|
||||
"instagram_username": "cipher",
|
||||
"total_collections": 0,
|
||||
"total_likes": 406,
|
||||
"total_photos": 196
|
||||
},
|
||||
"tags": [
|
||||
{
|
||||
"title": "animal"
|
||||
},
|
||||
{
|
||||
"title": "water"
|
||||
},
|
||||
{
|
||||
"title": "swim"
|
||||
},
|
||||
{
|
||||
"title": "aquarium"
|
||||
},
|
||||
{
|
||||
"title": "wallpaper"
|
||||
},
|
||||
{
|
||||
"title": "blue"
|
||||
},
|
||||
{
|
||||
"title": "sealife"
|
||||
},
|
||||
{
|
||||
"title": "wildlife"
|
||||
},
|
||||
{
|
||||
"title": "bird"
|
||||
},
|
||||
{
|
||||
"title": "deep sea"
|
||||
},
|
||||
{
|
||||
"title": "fish"
|
||||
},
|
||||
{
|
||||
"title": "water life"
|
||||
}
|
||||
],
|
||||
"photo_tags": [
|
||||
{
|
||||
"title": "animal"
|
||||
},
|
||||
{
|
||||
"title": "water"
|
||||
},
|
||||
{
|
||||
"title": "swim"
|
||||
},
|
||||
{
|
||||
"title": "aquarium"
|
||||
},
|
||||
{
|
||||
"title": "wallpaper"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "ayKyc01xLWA",
|
||||
"created_at": "2018-02-16T23:14:31-05:00",
|
||||
"updated_at": "2018-08-28T20:48:27-04:00",
|
||||
"width": 4928,
|
||||
"height": 3264,
|
||||
"color": "#161618",
|
||||
"description": "black and white penguins on ice field",
|
||||
"urls": {
|
||||
"raw": "https://images.unsplash.com/photo-1518840801558-9770b4a34eeb?ixlib=rb-0.3.5&ixid=eyJhcHBfaWQiOjEyMDd9&s=4e107a2bc49ab561ba6272eea2ec725d",
|
||||
"full": "https://images.unsplash.com/photo-1518840801558-9770b4a34eeb?ixlib=rb-0.3.5&q=85&fm=jpg&crop=entropy&cs=srgb&ixid=eyJhcHBfaWQiOjEyMDd9&s=f9b1e4d4572ab44efb2cf3d601d2b4d9",
|
||||
"regular": "https://images.unsplash.com/photo-1518840801558-9770b4a34eeb?ixlib=rb-0.3.5&q=80&fm=jpg&crop=entropy&cs=tinysrgb&w=1080&fit=max&ixid=eyJhcHBfaWQiOjEyMDd9&s=4430cedb63841f1fe055d5005316cc96",
|
||||
"small": "https://images.unsplash.com/photo-1518840801558-9770b4a34eeb?ixlib=rb-0.3.5&q=80&fm=jpg&crop=entropy&cs=tinysrgb&w=400&fit=max&ixid=eyJhcHBfaWQiOjEyMDd9&s=ee73c7af22ce445d408e240821ce07af",
|
||||
"thumb": "https://images.unsplash.com/photo-1518840801558-9770b4a34eeb?ixlib=rb-0.3.5&q=80&fm=jpg&crop=entropy&cs=tinysrgb&w=200&fit=max&ixid=eyJhcHBfaWQiOjEyMDd9&s=934302390d383cad8c571905e3a80bac"
|
||||
},
|
||||
"links": {
|
||||
"self": "https://api.unsplash.com/photos/ayKyc01xLWA",
|
||||
"html": "https://unsplash.com/photos/ayKyc01xLWA",
|
||||
"download": "https://unsplash.com/photos/ayKyc01xLWA/download",
|
||||
"download_location": "https://api.unsplash.com/photos/ayKyc01xLWA/download"
|
||||
},
|
||||
"categories": [],
|
||||
"sponsored": false,
|
||||
"likes": 37,
|
||||
"liked_by_user": false,
|
||||
"current_user_collections": [],
|
||||
"slug": null,
|
||||
"user": {
|
||||
"id": "tRb_KGw60Xk",
|
||||
"updated_at": "2018-09-20T11:51:54-04:00",
|
||||
"username": "ghost_cat",
|
||||
"name": "Danielle Barnes",
|
||||
"first_name": "Danielle",
|
||||
"last_name": "Barnes",
|
||||
"twitter_username": null,
|
||||
"portfolio_url": null,
|
||||
"bio": null,
|
||||
"location": null,
|
||||
"links": {
|
||||
"self": "https://api.unsplash.com/users/ghost_cat",
|
||||
"html": "https://unsplash.com/@ghost_cat",
|
||||
"photos": "https://api.unsplash.com/users/ghost_cat/photos",
|
||||
"likes": "https://api.unsplash.com/users/ghost_cat/likes",
|
||||
"portfolio": "https://api.unsplash.com/users/ghost_cat/portfolio",
|
||||
"following": "https://api.unsplash.com/users/ghost_cat/following",
|
||||
"followers": "https://api.unsplash.com/users/ghost_cat/followers"
|
||||
},
|
||||
"profile_image": {
|
||||
"small": "https://images.unsplash.com/profile-fb-1508491082-ae77f53e9ac3.jpg?ixlib=rb-0.3.5&q=80&fm=jpg&crop=faces&cs=tinysrgb&fit=crop&h=32&w=32&s=751bf6a557763648d52ffd7e60e79436",
|
||||
"medium": "https://images.unsplash.com/profile-fb-1508491082-ae77f53e9ac3.jpg?ixlib=rb-0.3.5&q=80&fm=jpg&crop=faces&cs=tinysrgb&fit=crop&h=64&w=64&s=e46cd1c8713035f045130e1b093b981e",
|
||||
"large": "https://images.unsplash.com/profile-fb-1508491082-ae77f53e9ac3.jpg?ixlib=rb-0.3.5&q=80&fm=jpg&crop=faces&cs=tinysrgb&fit=crop&h=128&w=128&s=352eabcf107c3ce95fe51a18485f116b"
|
||||
},
|
||||
"instagram_username": null,
|
||||
"total_collections": 0,
|
||||
"total_likes": 0,
|
||||
"total_photos": 21
|
||||
},
|
||||
"tags": [
|
||||
{
|
||||
"title": "ice"
|
||||
},
|
||||
{
|
||||
"title": "bird"
|
||||
},
|
||||
{
|
||||
"title": "ice field"
|
||||
},
|
||||
{
|
||||
"title": "iceberg"
|
||||
},
|
||||
{
|
||||
"title": "snow"
|
||||
},
|
||||
{
|
||||
"title": "frozen"
|
||||
},
|
||||
{
|
||||
"title": "animal"
|
||||
},
|
||||
{
|
||||
"title": "wildlife"
|
||||
},
|
||||
{
|
||||
"title": "wild"
|
||||
},
|
||||
{
|
||||
"title": "antarctica"
|
||||
},
|
||||
{
|
||||
"title": "sunshine"
|
||||
},
|
||||
{
|
||||
"title": "daylight"
|
||||
},
|
||||
{
|
||||
"title": "wilderness"
|
||||
},
|
||||
{
|
||||
"title": "south pole"
|
||||
},
|
||||
{
|
||||
"title": "flock"
|
||||
}
|
||||
],
|
||||
"photo_tags": [
|
||||
{
|
||||
"title": "ice"
|
||||
},
|
||||
{
|
||||
"title": "bird"
|
||||
},
|
||||
{
|
||||
"title": "ice field"
|
||||
},
|
||||
{
|
||||
"title": "iceberg"
|
||||
},
|
||||
{
|
||||
"title": "snow"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
Loading…
Reference in New Issue