mirror of
https://github.com/searxng/searxng.git
synced 2025-12-31 08:00:02 +00:00
[enh] py3 compatibility
This commit is contained in:
@@ -0,0 +1,75 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from time import sleep
|
||||
|
||||
url = "http://localhost:11111/"
|
||||
|
||||
|
||||
def test_index(browser):
|
||||
# Visit URL
|
||||
browser.visit(url)
|
||||
assert browser.is_text_present('about')
|
||||
|
||||
|
||||
def test_404(browser):
|
||||
# Visit URL
|
||||
browser.visit(url + 'missing_link')
|
||||
assert browser.is_text_present('Page not found')
|
||||
|
||||
|
||||
def test_about(browser):
|
||||
browser.visit(url)
|
||||
browser.click_link_by_text('about')
|
||||
assert browser.is_text_present('Why use searx?')
|
||||
|
||||
|
||||
def test_preferences(browser):
|
||||
browser.visit(url)
|
||||
browser.click_link_by_text('preferences')
|
||||
assert browser.is_text_present('Preferences')
|
||||
assert browser.is_text_present('Cookies')
|
||||
|
||||
assert browser.is_element_present_by_xpath('//label[@for="checkbox_dummy"]')
|
||||
|
||||
|
||||
def test_preferences_engine_select(browser):
|
||||
browser.visit(url)
|
||||
browser.click_link_by_text('preferences')
|
||||
|
||||
assert browser.is_element_present_by_xpath('//a[@href="#tab_engine"]')
|
||||
browser.find_by_xpath('//a[@href="#tab_engine"]').first.click()
|
||||
|
||||
assert not browser.find_by_xpath('//input[@id="engine_general_dummy__general"]').first.checked
|
||||
browser.find_by_xpath('//label[@for="engine_general_dummy__general"]').first.check()
|
||||
browser.find_by_xpath('//input[@value="save"]').first.click()
|
||||
|
||||
# waiting for the redirect - without this the test is flaky..
|
||||
sleep(1)
|
||||
|
||||
browser.visit(url)
|
||||
browser.click_link_by_text('preferences')
|
||||
browser.find_by_xpath('//a[@href="#tab_engine"]').first.click()
|
||||
|
||||
assert browser.find_by_xpath('//input[@id="engine_general_dummy__general"]').first.checked
|
||||
|
||||
|
||||
def test_preferences_locale(browser):
|
||||
browser.visit(url)
|
||||
browser.click_link_by_text('preferences')
|
||||
|
||||
browser.select('locale', 'hu')
|
||||
browser.find_by_xpath('//input[@value="save"]').first.click()
|
||||
|
||||
# waiting for the redirect - without this the test is flaky..
|
||||
sleep(1)
|
||||
|
||||
browser.visit(url)
|
||||
browser.click_link_by_text('beállítások')
|
||||
browser.is_text_present('Beállítások')
|
||||
|
||||
|
||||
def test_search(browser):
|
||||
browser.visit(url)
|
||||
browser.fill('q', 'test search query')
|
||||
browser.find_by_xpath('//button[@type="submit"]').first.click()
|
||||
assert browser.is_text_present('didn\'t find any results')
|
||||
|
||||
@@ -1,153 +0,0 @@
|
||||
*** Settings ***
|
||||
Library Selenium2Library timeout=10 implicit_wait=0.5
|
||||
Test Setup Open Browser http://localhost:11111/
|
||||
Test Teardown Close All Browsers
|
||||
|
||||
|
||||
*** Keywords ***
|
||||
Submit Preferences
|
||||
Set Selenium Speed 2 seconds
|
||||
Submit Form id=search_form
|
||||
Location Should Be http://localhost:11111/
|
||||
Set Selenium Speed 0 seconds
|
||||
|
||||
|
||||
*** Test Cases ***
|
||||
Front page
|
||||
Page Should Contain about
|
||||
Page Should Contain preferences
|
||||
|
||||
404 page
|
||||
Go To http://localhost:11111/no-such-page
|
||||
Page Should Contain Page not found
|
||||
Page Should Contain Go to search page
|
||||
|
||||
About page
|
||||
Click Element link=about
|
||||
Page Should Contain Why use searx?
|
||||
Page Should Contain Element link=search engines
|
||||
|
||||
Preferences page
|
||||
Click Element link=preferences
|
||||
Page Should Contain Preferences
|
||||
Page Should Contain Default categories
|
||||
Page Should Contain Currently used search engines
|
||||
Page Should Contain dummy dummy
|
||||
Page Should Contain general dummy
|
||||
|
||||
Switch category
|
||||
Go To http://localhost:11111/preferences
|
||||
Page Should Contain Checkbox category_general
|
||||
Page Should Contain Checkbox category_dummy
|
||||
Click Element xpath=//*[.="general"]
|
||||
Click Element xpath=//*[.="dummy"]
|
||||
Submit Preferences
|
||||
Checkbox Should Not Be Selected category_general
|
||||
Checkbox Should Be Selected category_dummy
|
||||
|
||||
Change language
|
||||
Page Should Contain about
|
||||
Page Should Contain preferences
|
||||
Go To http://localhost:11111/preferences
|
||||
Select From List locale hu
|
||||
Submit Preferences
|
||||
Page Should Contain rólunk
|
||||
Page Should Contain beállítások
|
||||
|
||||
Change method
|
||||
Page Should Contain about
|
||||
Page Should Contain preferences
|
||||
Go To http://localhost:11111/preferences
|
||||
Select From List method GET
|
||||
Submit Preferences
|
||||
Go To http://localhost:11111/preferences
|
||||
List Selection Should Be method GET
|
||||
Select From List method POST
|
||||
Submit Preferences
|
||||
Go To http://localhost:11111/preferences
|
||||
List Selection Should Be method POST
|
||||
|
||||
Change theme
|
||||
Page Should Contain about
|
||||
Page Should Contain preferences
|
||||
Go To http://localhost:11111/preferences
|
||||
List Selection Should Be theme legacy
|
||||
Select From List theme oscar
|
||||
Submit Preferences
|
||||
Go To http://localhost:11111/preferences
|
||||
List Selection Should Be theme oscar
|
||||
|
||||
Change safesearch
|
||||
Page Should Contain about
|
||||
Page Should Contain preferences
|
||||
Go To http://localhost:11111/preferences
|
||||
List Selection Should Be safesearch None
|
||||
Select From List safesearch Strict
|
||||
Submit Preferences
|
||||
Go To http://localhost:11111/preferences
|
||||
List Selection Should Be safesearch Strict
|
||||
|
||||
Change image proxy
|
||||
Page Should Contain about
|
||||
Page Should Contain preferences
|
||||
Go To http://localhost:11111/preferences
|
||||
List Selection Should Be image_proxy Disabled
|
||||
Select From List image_proxy Enabled
|
||||
Submit Preferences
|
||||
Go To http://localhost:11111/preferences
|
||||
List Selection Should Be image_proxy Enabled
|
||||
|
||||
Change search language
|
||||
Page Should Contain about
|
||||
Page Should Contain preferences
|
||||
Go To http://localhost:11111/preferences
|
||||
List Selection Should Be language Default language
|
||||
Select From List language Türkçe - tr-TR
|
||||
Submit Preferences
|
||||
Go To http://localhost:11111/preferences
|
||||
List Selection Should Be language Türkçe - tr-TR
|
||||
|
||||
Change autocomplete
|
||||
Page Should Contain about
|
||||
Page Should Contain preferences
|
||||
Go To http://localhost:11111/preferences
|
||||
List Selection Should Be autocomplete -
|
||||
Select From List autocomplete google
|
||||
Submit Preferences
|
||||
Go To http://localhost:11111/preferences
|
||||
List Selection Should Be autocomplete google
|
||||
|
||||
Change allowed/disabled engines
|
||||
Page Should Contain about
|
||||
Page Should Contain preferences
|
||||
Go To http://localhost:11111/preferences
|
||||
Page Should Contain Engine name
|
||||
Element Should Contain xpath=//label[@class="deny"][@for='engine_dummy_dummy_dummy'] Block
|
||||
Element Should Contain xpath=//label[@class="deny"][@for='engine_general_general_dummy'] Block
|
||||
Click Element xpath=//label[@class="deny"][@for='engine_general_general_dummy']
|
||||
Submit Preferences
|
||||
Page Should Contain about
|
||||
Page Should Contain preferences
|
||||
Go To http://localhost:11111/preferences
|
||||
Page Should Contain Engine name
|
||||
Element Should Contain xpath=//label[@class="deny"][@for='engine_dummy_dummy_dummy'] Block
|
||||
Element Should Contain xpath=//label[@class="deny"][@for='engine_general_general_dummy'] \
|
||||
|
||||
Block a plugin
|
||||
Page Should Contain about
|
||||
Page Should Contain preferences
|
||||
Go To http://localhost:11111/preferences
|
||||
List Selection Should Be theme legacy
|
||||
Select From List theme oscar
|
||||
Submit Preferences
|
||||
Go To http://localhost:11111/preferences
|
||||
List Selection Should Be theme oscar
|
||||
Page Should Contain Plugins
|
||||
Click Link Plugins
|
||||
Checkbox Should Not Be Selected id=plugin_HTTPS_rewrite
|
||||
Click Element xpath=//label[@for='plugin_HTTPS_rewrite']
|
||||
Submit Preferences
|
||||
Go To http://localhost:11111/preferences
|
||||
Page Should Contain Plugins
|
||||
Click Link Plugins
|
||||
Checkbox Should Be Selected id=plugin_HTTPS_rewrite
|
||||
@@ -25,7 +25,7 @@ class TestArchLinuxEngine(SearxTestCase):
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('wiki.archlinux.org' in params['url'])
|
||||
|
||||
for lang, domain in domains.iteritems():
|
||||
for lang, domain in domains.items():
|
||||
dic['language'] = lang
|
||||
params = archlinux.request(query, dic)
|
||||
self.assertTrue(domain in params['url'])
|
||||
@@ -102,5 +102,5 @@ class TestArchLinuxEngine(SearxTestCase):
|
||||
for exp in expected:
|
||||
res = results[i]
|
||||
i += 1
|
||||
for key, value in exp.iteritems():
|
||||
for key, value in exp.items():
|
||||
self.assertEqual(res[key], value)
|
||||
|
||||
@@ -7,18 +7,18 @@ from searx.testing import SearxTestCase
|
||||
class TestBingEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
query = u'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 0
|
||||
dicto['language'] = 'fr_FR'
|
||||
params = bing.request(query, dicto)
|
||||
params = bing.request(query.encode('utf-8'), dicto)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('language%3AFR' in params['url'])
|
||||
self.assertTrue('bing.com' in params['url'])
|
||||
|
||||
dicto['language'] = 'all'
|
||||
params = bing.request(query, dicto)
|
||||
params = bing.request(query.encode('utf-8'), dicto)
|
||||
self.assertTrue('language' in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
|
||||
@@ -36,10 +36,10 @@ class TestBingNewsEngine(SearxTestCase):
|
||||
self.assertRaises(AttributeError, bing_news.response, '')
|
||||
self.assertRaises(AttributeError, bing_news.response, '[]')
|
||||
|
||||
response = mock.Mock(content='<html></html>')
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(bing_news.response(response), [])
|
||||
|
||||
response = mock.Mock(content='<html></html>')
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(bing_news.response(response), [])
|
||||
|
||||
html = """<?xml version="1.0" encoding="utf-8" ?>
|
||||
@@ -74,7 +74,7 @@ class TestBingNewsEngine(SearxTestCase):
|
||||
</item>
|
||||
</channel>
|
||||
</rss>""" # noqa
|
||||
response = mock.Mock(content=html)
|
||||
response = mock.Mock(text=html.encode('utf-8'))
|
||||
results = bing_news.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
@@ -113,7 +113,7 @@ class TestBingNewsEngine(SearxTestCase):
|
||||
</item>
|
||||
</channel>
|
||||
</rss>""" # noqa
|
||||
response = mock.Mock(content=html)
|
||||
response = mock.Mock(text=html.encode('utf-8'))
|
||||
results = bing_news.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
@@ -136,11 +136,11 @@ class TestBingNewsEngine(SearxTestCase):
|
||||
</channel>
|
||||
</rss>""" # noqa
|
||||
|
||||
response = mock.Mock(content=html)
|
||||
response = mock.Mock(text=html.encode('utf-8'))
|
||||
results = bing_news.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
html = """<?xml version="1.0" encoding="utf-8" ?>gabarge"""
|
||||
response = mock.Mock(content=html)
|
||||
response = mock.Mock(text=html.encode('utf-8'))
|
||||
self.assertRaises(lxml.etree.XMLSyntaxError, bing_news.response, response)
|
||||
|
||||
@@ -22,10 +22,10 @@ class TestBtdiggEngine(SearxTestCase):
|
||||
self.assertRaises(AttributeError, btdigg.response, '')
|
||||
self.assertRaises(AttributeError, btdigg.response, '[]')
|
||||
|
||||
response = mock.Mock(content='<html></html>')
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(btdigg.response(response), [])
|
||||
|
||||
html = """
|
||||
html = u"""
|
||||
<div id="search_res">
|
||||
<table>
|
||||
<tr>
|
||||
@@ -82,7 +82,7 @@ class TestBtdiggEngine(SearxTestCase):
|
||||
</table>
|
||||
</div>
|
||||
"""
|
||||
response = mock.Mock(content=html)
|
||||
response = mock.Mock(text=html.encode('utf-8'))
|
||||
results = btdigg.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
@@ -101,12 +101,12 @@ class TestBtdiggEngine(SearxTestCase):
|
||||
</table>
|
||||
</div>
|
||||
"""
|
||||
response = mock.Mock(content=html)
|
||||
response = mock.Mock(text=html.encode('utf-8'))
|
||||
results = btdigg.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
html = """
|
||||
html = u"""
|
||||
<div id="search_res">
|
||||
<table>
|
||||
<tr>
|
||||
@@ -367,7 +367,7 @@ class TestBtdiggEngine(SearxTestCase):
|
||||
</table>
|
||||
</div>
|
||||
"""
|
||||
response = mock.Mock(content=html)
|
||||
response = mock.Mock(text=html.encode('utf-8'))
|
||||
results = btdigg.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 5)
|
||||
|
||||
@@ -8,13 +8,13 @@ from searx.testing import SearxTestCase
|
||||
class TestCurrencyConvertEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
query = b'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
params = currency_convert.request(query, dicto)
|
||||
self.assertNotIn('url', params)
|
||||
|
||||
query = 'convert 10 Pound Sterlings to United States Dollars'
|
||||
query = b'convert 10 Pound Sterlings to United States Dollars'
|
||||
params = currency_convert.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn('finance.yahoo.com', params['url'])
|
||||
|
||||
@@ -21,7 +21,7 @@ class TestDigBTEngine(SearxTestCase):
|
||||
self.assertRaises(AttributeError, digbt.response, '')
|
||||
self.assertRaises(AttributeError, digbt.response, '[]')
|
||||
|
||||
response = mock.Mock(content='<html></html>')
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(digbt.response(response), [])
|
||||
|
||||
html = """
|
||||
@@ -50,7 +50,7 @@ class TestDigBTEngine(SearxTestCase):
|
||||
</td></tr>
|
||||
</table>
|
||||
"""
|
||||
response = mock.Mock(content=html)
|
||||
response = mock.Mock(text=html.encode('utf-8'))
|
||||
results = digbt.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
|
||||
@@ -90,8 +90,7 @@ class TestDuckduckgoEngine(SearxTestCase):
|
||||
"wt-wt":"All Results","ar-es":"Argentina","au-en":"Australia","at-de":"Austria","be-fr":"Belgium (fr)"
|
||||
}some more code..."""
|
||||
response = mock.Mock(text=js)
|
||||
languages = duckduckgo._fetch_supported_languages(response)
|
||||
self.assertEqual(type(languages), list)
|
||||
languages = list(duckduckgo._fetch_supported_languages(response))
|
||||
self.assertEqual(len(languages), 5)
|
||||
self.assertIn('wt-WT', languages)
|
||||
self.assertIn('es-AR', languages)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from json import dumps
|
||||
from searx.engines import frinkiac
|
||||
from searx.testing import SearxTestCase
|
||||
|
||||
@@ -44,6 +43,8 @@ class TestFrinkiacEngine(SearxTestCase):
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 4)
|
||||
self.assertEqual(results[0]['title'], u'S06E18')
|
||||
self.assertEqual(results[0]['url'], 'https://frinkiac.com/?p=caption&e=S06E18&t=534616')
|
||||
self.assertIn('p=caption', results[0]['url'])
|
||||
self.assertIn('e=S06E18', results[0]['url'])
|
||||
self.assertIn('t=534616', results[0]['url'])
|
||||
self.assertEqual(results[0]['thumbnail_src'], 'https://frinkiac.com/img/S06E18/534616/medium.jpg')
|
||||
self.assertEqual(results[0]['img_src'], 'https://frinkiac.com/img/S06E18/534616.jpg')
|
||||
|
||||
@@ -10,6 +10,7 @@ class TestGigablastEngine(SearxTestCase):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 0
|
||||
dicto['safesearch'] = 0
|
||||
dicto['language'] = 'all'
|
||||
params = gigablast.request(query, dicto)
|
||||
self.assertTrue('url' in params)
|
||||
|
||||
@@ -2,7 +2,7 @@ from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import soundcloud
|
||||
from searx.testing import SearxTestCase
|
||||
from urllib import quote_plus
|
||||
from searx.url_utils import quote_plus
|
||||
|
||||
|
||||
class TestSoundcloudEngine(SearxTestCase):
|
||||
|
||||
@@ -31,7 +31,7 @@ class TestStartpageEngine(SearxTestCase):
|
||||
self.assertRaises(AttributeError, startpage.response, '')
|
||||
self.assertRaises(AttributeError, startpage.response, '[]')
|
||||
|
||||
response = mock.Mock(content='<html></html>')
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(startpage.response(response), [])
|
||||
|
||||
html = """
|
||||
@@ -62,7 +62,7 @@ class TestStartpageEngine(SearxTestCase):
|
||||
</p>
|
||||
</div>
|
||||
"""
|
||||
response = mock.Mock(content=html)
|
||||
response = mock.Mock(text=html.encode('utf-8'))
|
||||
results = startpage.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
@@ -133,7 +133,7 @@ class TestStartpageEngine(SearxTestCase):
|
||||
</p>
|
||||
</div>
|
||||
"""
|
||||
response = mock.Mock(content=html)
|
||||
response = mock.Mock(text=html.encode('utf-8'))
|
||||
results = startpage.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 1)
|
||||
|
||||
@@ -33,13 +33,13 @@ class TestSwisscowsEngine(SearxTestCase):
|
||||
self.assertRaises(AttributeError, swisscows.response, '')
|
||||
self.assertRaises(AttributeError, swisscows.response, '[]')
|
||||
|
||||
response = mock.Mock(content='<html></html>')
|
||||
response = mock.Mock(text=b'<html></html>')
|
||||
self.assertEqual(swisscows.response(response), [])
|
||||
|
||||
response = mock.Mock(content='<html></html>')
|
||||
response = mock.Mock(text=b'<html></html>')
|
||||
self.assertEqual(swisscows.response(response), [])
|
||||
|
||||
html = u"""
|
||||
html = b"""
|
||||
<script>
|
||||
App.Dispatcher.dispatch("initialize", {
|
||||
html5history: true,
|
||||
@@ -111,7 +111,7 @@ class TestSwisscowsEngine(SearxTestCase):
|
||||
});
|
||||
</script>
|
||||
"""
|
||||
response = mock.Mock(content=html)
|
||||
response = mock.Mock(text=html)
|
||||
results = swisscows.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 3)
|
||||
|
||||
@@ -91,7 +91,7 @@ class TestTokyotoshokanEngine(SearxTestCase):
|
||||
self.assertEqual(r['title'], 'Koyomimonogatari')
|
||||
self.assertEqual(r['magnetlink'], 'magnet:?xt=urn:btih:4c19eb46b5113685fbd2288ed2531b0b')
|
||||
self.assertEqual(r['filesize'], int(1024 * 1024 * 10.5))
|
||||
self.assertEqual(r['publishedDate'], datetime(2016, 03, 26, 16, 41))
|
||||
self.assertEqual(r['publishedDate'], datetime(2016, 3, 26, 16, 41))
|
||||
self.assertEqual(r['content'], 'Comment: sample comment')
|
||||
self.assertEqual(r['seed'], 53)
|
||||
self.assertEqual(r['leech'], 18)
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from json import loads
|
||||
from lxml.html import fromstring
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
@@ -31,7 +30,7 @@ class TestWikidataEngine(SearxTestCase):
|
||||
self.assertRaises(AttributeError, wikidata.response, '')
|
||||
self.assertRaises(AttributeError, wikidata.response, '[]')
|
||||
|
||||
response = mock.Mock(content='<html></html>', search_params={"language": "all"})
|
||||
response = mock.Mock(text='<html></html>', search_params={"language": "all"})
|
||||
self.assertEqual(wikidata.response(response), [])
|
||||
|
||||
def test_getDetail(self):
|
||||
|
||||
@@ -13,15 +13,15 @@ class TestWikipediaEngine(SearxTestCase):
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['language'] = 'fr-FR'
|
||||
params = wikipedia.request(query, dicto)
|
||||
params = wikipedia.request(query.encode('utf-8'), dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertIn(query, params['url'])
|
||||
self.assertIn('test_query', params['url'])
|
||||
self.assertIn('Test_Query', params['url'])
|
||||
self.assertIn('fr.wikipedia.org', params['url'])
|
||||
|
||||
query = 'Test_Query'
|
||||
params = wikipedia.request(query, dicto)
|
||||
query = u'Test_Query'
|
||||
params = wikipedia.request(query.encode('utf-8'), dicto)
|
||||
self.assertIn('Test_Query', params['url'])
|
||||
self.assertNotIn('test_query', params['url'])
|
||||
|
||||
@@ -57,7 +57,7 @@ class TestWikipediaEngine(SearxTestCase):
|
||||
}
|
||||
}
|
||||
}"""
|
||||
response = mock.Mock(content=json, search_params=dicto)
|
||||
response = mock.Mock(text=json, search_params=dicto)
|
||||
self.assertEqual(wikipedia.response(response), [])
|
||||
|
||||
# normal case
|
||||
@@ -80,7 +80,7 @@ class TestWikipediaEngine(SearxTestCase):
|
||||
}
|
||||
}
|
||||
}"""
|
||||
response = mock.Mock(content=json, search_params=dicto)
|
||||
response = mock.Mock(text=json, search_params=dicto)
|
||||
results = wikipedia.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
@@ -108,10 +108,10 @@ class TestWikipediaEngine(SearxTestCase):
|
||||
}
|
||||
}
|
||||
}"""
|
||||
response = mock.Mock(content=json, search_params=dicto)
|
||||
response = mock.Mock(text=json, search_params=dicto)
|
||||
results = wikipedia.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 0)
|
||||
self.assertEqual(len(results), 2)
|
||||
|
||||
# no image
|
||||
json = """
|
||||
@@ -130,7 +130,7 @@ class TestWikipediaEngine(SearxTestCase):
|
||||
}
|
||||
}
|
||||
}"""
|
||||
response = mock.Mock(content=json, search_params=dicto)
|
||||
response = mock.Mock(text=json, search_params=dicto)
|
||||
results = wikipedia.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
@@ -158,7 +158,7 @@ class TestWikipediaEngine(SearxTestCase):
|
||||
}
|
||||
}
|
||||
}"""
|
||||
response = mock.Mock(content=json, search_params=dicto)
|
||||
response = mock.Mock(text=json, search_params=dicto)
|
||||
results = wikipedia.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
|
||||
@@ -35,11 +35,11 @@ class TestWolframAlphaAPIEngine(SearxTestCase):
|
||||
xml = '''<?xml version='1.0' encoding='UTF-8'?>
|
||||
<queryresult success='false' error='false' />
|
||||
'''
|
||||
response = mock.Mock(content=xml)
|
||||
response = mock.Mock(text=xml.encode('utf-8'))
|
||||
self.assertEqual(wolframalpha_api.response(response), [])
|
||||
|
||||
# test basic case
|
||||
xml = """<?xml version='1.0' encoding='UTF-8'?>
|
||||
xml = b"""<?xml version='1.0' encoding='UTF-8'?>
|
||||
<queryresult success='true'
|
||||
error='false'
|
||||
numpods='3'
|
||||
@@ -83,7 +83,7 @@ class TestWolframAlphaAPIEngine(SearxTestCase):
|
||||
</pod>
|
||||
</queryresult>
|
||||
"""
|
||||
response = mock.Mock(content=xml, request=request)
|
||||
response = mock.Mock(text=xml, request=request)
|
||||
results = wolframalpha_api.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
@@ -107,7 +107,7 @@ class TestWolframAlphaAPIEngine(SearxTestCase):
|
||||
self.assertIn('result_plaintext', results[1]['content'])
|
||||
|
||||
# test calc
|
||||
xml = """<?xml version='1.0' encoding='UTF-8'?>
|
||||
xml = b"""<?xml version='1.0' encoding='UTF-8'?>
|
||||
<queryresult success='true'
|
||||
error='false'
|
||||
numpods='2'
|
||||
@@ -144,7 +144,7 @@ class TestWolframAlphaAPIEngine(SearxTestCase):
|
||||
</pod>
|
||||
</queryresult>
|
||||
"""
|
||||
response = mock.Mock(content=xml, request=request)
|
||||
response = mock.Mock(text=xml, request=request)
|
||||
results = wolframalpha_api.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
|
||||
@@ -48,11 +48,11 @@ class SelfIPTest(SearxTestCase):
|
||||
# IP test
|
||||
request = Mock(remote_addr='127.0.0.1')
|
||||
request.headers.getlist.return_value = []
|
||||
search = get_search_mock(query='ip', pageno=1)
|
||||
search = get_search_mock(query=b'ip', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue('127.0.0.1' in search.result_container.answers)
|
||||
|
||||
search = get_search_mock(query='ip', pageno=2)
|
||||
search = get_search_mock(query=b'ip', pageno=2)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertFalse('127.0.0.1' in search.result_container.answers)
|
||||
|
||||
@@ -60,26 +60,26 @@ class SelfIPTest(SearxTestCase):
|
||||
request = Mock(user_agent='Mock')
|
||||
request.headers.getlist.return_value = []
|
||||
|
||||
search = get_search_mock(query='user-agent', pageno=1)
|
||||
search = get_search_mock(query=b'user-agent', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue('Mock' in search.result_container.answers)
|
||||
|
||||
search = get_search_mock(query='user-agent', pageno=2)
|
||||
search = get_search_mock(query=b'user-agent', pageno=2)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertFalse('Mock' in search.result_container.answers)
|
||||
|
||||
search = get_search_mock(query='user-agent', pageno=1)
|
||||
search = get_search_mock(query=b'user-agent', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue('Mock' in search.result_container.answers)
|
||||
|
||||
search = get_search_mock(query='user-agent', pageno=2)
|
||||
search = get_search_mock(query=b'user-agent', pageno=2)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertFalse('Mock' in search.result_container.answers)
|
||||
|
||||
search = get_search_mock(query='What is my User-Agent?', pageno=1)
|
||||
search = get_search_mock(query=b'What is my User-Agent?', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue('Mock' in search.result_container.answers)
|
||||
|
||||
search = get_search_mock(query='What is my User-Agent?', pageno=2)
|
||||
search = get_search_mock(query=b'What is my User-Agent?', pageno=2)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertFalse('Mock' in search.result_container.answers)
|
||||
|
||||
@@ -1,8 +1,12 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import mock
|
||||
import sys
|
||||
from searx.testing import SearxTestCase
|
||||
from searx import utils
|
||||
|
||||
if sys.version_info[0] == 3:
|
||||
unicode = str
|
||||
|
||||
|
||||
class TestUtils(SearxTestCase):
|
||||
|
||||
@@ -30,9 +34,9 @@ class TestUtils(SearxTestCase):
|
||||
self.assertEqual(utils.highlight_content(content, None), content)
|
||||
|
||||
content = 'a'
|
||||
query = 'test'
|
||||
query = b'test'
|
||||
self.assertEqual(utils.highlight_content(content, query), content)
|
||||
query = 'a test'
|
||||
query = b'a test'
|
||||
self.assertEqual(utils.highlight_content(content, query), content)
|
||||
|
||||
def test_html_to_text(self):
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
|
||||
import json
|
||||
from mock import Mock
|
||||
from urlparse import ParseResult
|
||||
from searx import webapp
|
||||
from searx.testing import SearxTestCase
|
||||
from searx.search import Search
|
||||
from searx.url_utils import ParseResult
|
||||
|
||||
|
||||
class ViewsTestCase(SearxTestCase):
|
||||
@@ -57,37 +57,35 @@ class ViewsTestCase(SearxTestCase):
|
||||
def test_index_empty(self):
|
||||
result = self.app.post('/')
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assertIn('<div class="title"><h1>searx</h1></div>', result.data)
|
||||
self.assertIn(b'<div class="title"><h1>searx</h1></div>', result.data)
|
||||
|
||||
def test_index_html(self):
|
||||
result = self.app.post('/', data={'q': 'test'})
|
||||
self.assertIn(
|
||||
'<h3 class="result_title"><img width="14" height="14" class="favicon" src="/static/themes/legacy/img/icons/icon_youtube.ico" alt="youtube" /><a href="http://second.test.xyz" rel="noreferrer">Second <span class="highlight">Test</span></a></h3>', # noqa
|
||||
b'<h3 class="result_title"><img width="14" height="14" class="favicon" src="/static/themes/legacy/img/icons/icon_youtube.ico" alt="youtube" /><a href="http://second.test.xyz" rel="noreferrer">Second <span class="highlight">Test</span></a></h3>', # noqa
|
||||
result.data
|
||||
)
|
||||
self.assertIn(
|
||||
'<p class="content">first <span class="highlight">test</span> content<br class="last"/></p>', # noqa
|
||||
b'<p class="content">first <span class="highlight">test</span> content<br class="last"/></p>', # noqa
|
||||
result.data
|
||||
)
|
||||
|
||||
def test_index_json(self):
|
||||
result = self.app.post('/', data={'q': 'test', 'format': 'json'})
|
||||
|
||||
result_dict = json.loads(result.data)
|
||||
result_dict = json.loads(result.data.decode('utf-8'))
|
||||
|
||||
self.assertEqual('test', result_dict['query'])
|
||||
self.assertEqual(
|
||||
result_dict['results'][0]['content'], 'first test content')
|
||||
self.assertEqual(
|
||||
result_dict['results'][0]['url'], 'http://first.test.xyz')
|
||||
self.assertEqual(result_dict['results'][0]['content'], 'first test content')
|
||||
self.assertEqual(result_dict['results'][0]['url'], 'http://first.test.xyz')
|
||||
|
||||
def test_index_csv(self):
|
||||
result = self.app.post('/', data={'q': 'test', 'format': 'csv'})
|
||||
|
||||
self.assertEqual(
|
||||
'title,url,content,host,engine,score\r\n'
|
||||
'First Test,http://first.test.xyz,first test content,first.test.xyz,startpage,\r\n' # noqa
|
||||
'Second Test,http://second.test.xyz,second test content,second.test.xyz,youtube,\r\n', # noqa
|
||||
b'title,url,content,host,engine,score\r\n'
|
||||
b'First Test,http://first.test.xyz,first test content,first.test.xyz,startpage,\r\n' # noqa
|
||||
b'Second Test,http://second.test.xyz,second test content,second.test.xyz,youtube,\r\n', # noqa
|
||||
result.data
|
||||
)
|
||||
|
||||
@@ -95,65 +93,65 @@ class ViewsTestCase(SearxTestCase):
|
||||
result = self.app.post('/', data={'q': 'test', 'format': 'rss'})
|
||||
|
||||
self.assertIn(
|
||||
'<description>Search results for "test" - searx</description>',
|
||||
b'<description>Search results for "test" - searx</description>',
|
||||
result.data
|
||||
)
|
||||
|
||||
self.assertIn(
|
||||
'<opensearch:totalResults>3</opensearch:totalResults>',
|
||||
b'<opensearch:totalResults>3</opensearch:totalResults>',
|
||||
result.data
|
||||
)
|
||||
|
||||
self.assertIn(
|
||||
'<title>First Test</title>',
|
||||
b'<title>First Test</title>',
|
||||
result.data
|
||||
)
|
||||
|
||||
self.assertIn(
|
||||
'<link>http://first.test.xyz</link>',
|
||||
b'<link>http://first.test.xyz</link>',
|
||||
result.data
|
||||
)
|
||||
|
||||
self.assertIn(
|
||||
'<description>first test content</description>',
|
||||
b'<description>first test content</description>',
|
||||
result.data
|
||||
)
|
||||
|
||||
def test_about(self):
|
||||
result = self.app.get('/about')
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assertIn('<h1>About <a href="/">searx</a></h1>', result.data)
|
||||
self.assertIn(b'<h1>About <a href="/">searx</a></h1>', result.data)
|
||||
|
||||
def test_preferences(self):
|
||||
result = self.app.get('/preferences')
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assertIn(
|
||||
'<form method="post" action="/preferences" id="search_form">',
|
||||
b'<form method="post" action="/preferences" id="search_form">',
|
||||
result.data
|
||||
)
|
||||
self.assertIn(
|
||||
'<legend>Default categories</legend>',
|
||||
b'<legend>Default categories</legend>',
|
||||
result.data
|
||||
)
|
||||
self.assertIn(
|
||||
'<legend>Interface language</legend>',
|
||||
b'<legend>Interface language</legend>',
|
||||
result.data
|
||||
)
|
||||
|
||||
def test_stats(self):
|
||||
result = self.app.get('/stats')
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assertIn('<h2>Engine stats</h2>', result.data)
|
||||
self.assertIn(b'<h2>Engine stats</h2>', result.data)
|
||||
|
||||
def test_robots_txt(self):
|
||||
result = self.app.get('/robots.txt')
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assertIn('Allow: /', result.data)
|
||||
self.assertIn(b'Allow: /', result.data)
|
||||
|
||||
def test_opensearch_xml(self):
|
||||
result = self.app.get('/opensearch.xml')
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assertIn('<Description>a privacy-respecting, hackable metasearch engine</Description>', result.data)
|
||||
self.assertIn(b'<Description>a privacy-respecting, hackable metasearch engine</Description>', result.data)
|
||||
|
||||
def test_favicon(self):
|
||||
result = self.app.get('/favicon.ico')
|
||||
|
||||
Reference in New Issue
Block a user