mirror of https://github.com/searxng/searxng.git
Merge branch 'master' into fix-infinite-scroll
This commit is contained in:
commit
e64ff38217
|
@ -19,5 +19,8 @@ node_modules/
|
||||||
|
|
||||||
.tx/
|
.tx/
|
||||||
|
|
||||||
|
build/
|
||||||
|
dist/
|
||||||
local/
|
local/
|
||||||
|
gh-pages/
|
||||||
searx.egg-info/
|
searx.egg-info/
|
||||||
|
|
27
Makefile
27
Makefile
|
@ -1,19 +1,32 @@
|
||||||
# -*- coding: utf-8; mode: makefile-gmake -*-
|
# -*- coding: utf-8; mode: makefile-gmake -*-
|
||||||
|
|
||||||
|
export GIT_URL=https://github.com/asciimoo/searx
|
||||||
|
export SEARX_URL=https://searx.me
|
||||||
|
export DOCS_URL=https://asciimoo.github.io/searx
|
||||||
|
|
||||||
PYOBJECTS = searx
|
PYOBJECTS = searx
|
||||||
|
DOC = docs
|
||||||
PY_SETUP_EXTRAS ?= \[test\]
|
PY_SETUP_EXTRAS ?= \[test\]
|
||||||
|
|
||||||
|
PYDIST=./dist/py
|
||||||
|
PYBUILD=./build/py
|
||||||
|
|
||||||
include utils/makefile.include
|
include utils/makefile.include
|
||||||
include utils/makefile.python
|
include utils/makefile.python
|
||||||
|
include utils/makefile.sphinx
|
||||||
|
|
||||||
all: clean install
|
all: clean install
|
||||||
|
|
||||||
PHONY += help
|
PHONY += help
|
||||||
help:
|
help:
|
||||||
@echo ' test - run developer tests'
|
@echo ' test - run developer tests'
|
||||||
|
@echo ' docs - build documentation'
|
||||||
|
@echo ' docs-live - autobuild HTML documentation while editing'
|
||||||
@echo ' run - run developer instance'
|
@echo ' run - run developer instance'
|
||||||
@echo ' install - developer install (./local)'
|
@echo ' install - developer install (./local)'
|
||||||
@echo ' uninstall - uninstall (./local)'
|
@echo ' uninstall - uninstall (./local)'
|
||||||
|
@echo ' gh-pages - build docs & deploy on gh-pages branch'
|
||||||
|
@echo ' clean - drop builds and environments'
|
||||||
@echo ''
|
@echo ''
|
||||||
@$(MAKE) -s -f utils/makefile.include make-help
|
@$(MAKE) -s -f utils/makefile.include make-help
|
||||||
@echo ''
|
@echo ''
|
||||||
|
@ -40,6 +53,20 @@ run: pyenvinstall
|
||||||
) &
|
) &
|
||||||
$(PY_ENV)/bin/python ./searx/webapp.py
|
$(PY_ENV)/bin/python ./searx/webapp.py
|
||||||
|
|
||||||
|
# docs
|
||||||
|
# ----
|
||||||
|
|
||||||
|
PHONY += docs
|
||||||
|
docs: pyenvinstall sphinx-doc
|
||||||
|
$(call cmd,sphinx,html,docs,docs)
|
||||||
|
|
||||||
|
PHONY += docs-live
|
||||||
|
docs-live: pyenvinstall sphinx-live
|
||||||
|
$(call cmd,sphinx_autobuild,html,docs,docs)
|
||||||
|
|
||||||
|
$(GH_PAGES)::
|
||||||
|
@echo "doc available at --> $(DOCS_URL)"
|
||||||
|
|
||||||
# test
|
# test
|
||||||
# ----
|
# ----
|
||||||
|
|
||||||
|
|
|
@ -23,13 +23,13 @@ Go to the `searx-docker <https://github.com/searx/searx-docker>`__ project.
|
||||||
|
|
||||||
Without Docker
|
Without Docker
|
||||||
------
|
------
|
||||||
For all the details, follow this `step by step installation <https://asciimoo.github.io/searx/dev/install/installation.html>`__.
|
For all of the details, follow this `step by step installation <https://asciimoo.github.io/searx/dev/install/installation.html>`__.
|
||||||
|
|
||||||
Note: the documentation needs to be updated.
|
Note: the documentation needs to be updated.
|
||||||
|
|
||||||
If you are in hurry
|
If you are in a hurry
|
||||||
------
|
------
|
||||||
- clone source:
|
- clone the source:
|
||||||
``git clone https://github.com/asciimoo/searx.git && cd searx``
|
``git clone https://github.com/asciimoo/searx.git && cd searx``
|
||||||
- install dependencies: ``./manage.sh update_packages``
|
- install dependencies: ``./manage.sh update_packages``
|
||||||
- edit your
|
- edit your
|
||||||
|
|
|
@ -0,0 +1,130 @@
|
||||||
|
@import url("pocoo.css");
|
||||||
|
|
||||||
|
a, a.reference, a.footnote-reference {
|
||||||
|
color: #004b6b;
|
||||||
|
border-color: #004b6b;
|
||||||
|
}
|
||||||
|
|
||||||
|
a:hover {
|
||||||
|
color: #6d4100;
|
||||||
|
border-color: #6d4100;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.version-warning {
|
||||||
|
background-color: #004b6b;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.sidebar {
|
||||||
|
background-color: whitesmoke;
|
||||||
|
border-color: lightsteelblue;
|
||||||
|
border-radius: 3pt;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.sidebar-title, .sidebar p {
|
||||||
|
margin: 6pt;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar li,
|
||||||
|
.hlist li {
|
||||||
|
list-style-type: disclosure-closed;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* admonitions
|
||||||
|
*/
|
||||||
|
|
||||||
|
div.admonition, div.topic {
|
||||||
|
background-color: #fafafa;
|
||||||
|
margin: 8px 0px;
|
||||||
|
padding: 1em;
|
||||||
|
border-radius: 3pt 0 0 3pt;
|
||||||
|
border-top: none;
|
||||||
|
border-right: none;
|
||||||
|
border-bottom: none;
|
||||||
|
border-left: 5pt solid #ccc;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.admonition-title:after {
|
||||||
|
content: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admonition.hint { border-color: #416dc0b0; }
|
||||||
|
.admonition.note { border-color: #6c856cb0; }
|
||||||
|
.admonition.tip { border-color: #85c5c2b0; }
|
||||||
|
.admonition.attention { border-color: #ecec97b0; }
|
||||||
|
.admonition.caution { border-color: #a6c677b0; }
|
||||||
|
.admonition.danger { border-color: #d46262b0; }
|
||||||
|
.admonition.important { border-color: #dfa3a3b0; }
|
||||||
|
.admonition.error { border-color: red; }
|
||||||
|
.admonition.warning { border-color: darkred; }
|
||||||
|
|
||||||
|
.admonition.admonition-generic-admonition-title {
|
||||||
|
border-color: #416dc0b0;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* admonitions with (rendered) reST markup examples (:class: rst-example)
|
||||||
|
*
|
||||||
|
* .. admonition:: title of the example
|
||||||
|
* :class: rst-example
|
||||||
|
* ....
|
||||||
|
*/
|
||||||
|
|
||||||
|
div.rst-example {
|
||||||
|
background-color: inherit;
|
||||||
|
margin: 0;
|
||||||
|
border-top: none;
|
||||||
|
border-right: 1px solid #ccc;
|
||||||
|
border-bottom: none;
|
||||||
|
border-left: none;
|
||||||
|
border-radius: none;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.rst-example > p.admonition-title {
|
||||||
|
font-family: Sans Serif;
|
||||||
|
font-style: italic;
|
||||||
|
font-size: 0.8em;
|
||||||
|
display: block;
|
||||||
|
border-bottom: 1px solid #ccc;
|
||||||
|
padding: 0.5em 1em;
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* code block in figures
|
||||||
|
*/
|
||||||
|
|
||||||
|
div.highlight pre {
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Table theme
|
||||||
|
*/
|
||||||
|
|
||||||
|
thead, tfoot {
|
||||||
|
background-color: #fff;
|
||||||
|
}
|
||||||
|
|
||||||
|
th:hover, td:hover {
|
||||||
|
background-color: #ffc;
|
||||||
|
}
|
||||||
|
|
||||||
|
thead th, tfoot th, tfoot td, tbody th {
|
||||||
|
background-color: #fffaef;
|
||||||
|
}
|
||||||
|
|
||||||
|
tbody tr:nth-child(odd) {
|
||||||
|
background-color: #fff;
|
||||||
|
}
|
||||||
|
|
||||||
|
tbody tr:nth-child(even) {
|
||||||
|
background-color: #fafafa;
|
||||||
|
}
|
||||||
|
|
||||||
|
caption {
|
||||||
|
font-family: Sans Serif;
|
||||||
|
padding: 0.5em;
|
||||||
|
margin: 0.5em 0 0.5em 0;
|
||||||
|
caption-side: top;
|
||||||
|
text-align: left;
|
||||||
|
}
|
|
@ -0,0 +1,6 @@
|
||||||
|
[theme]
|
||||||
|
inherit = pocoo
|
||||||
|
stylesheet = searx.css
|
||||||
|
|
||||||
|
[options]
|
||||||
|
touch_icon =
|
|
@ -0,0 +1,96 @@
|
||||||
|
.. _adminapi:
|
||||||
|
|
||||||
|
==================
|
||||||
|
Administration API
|
||||||
|
==================
|
||||||
|
|
||||||
|
Get configuration data
|
||||||
|
======================
|
||||||
|
|
||||||
|
.. code:: http
|
||||||
|
|
||||||
|
GET /config HTTP/1.1
|
||||||
|
|
||||||
|
Sample response
|
||||||
|
---------------
|
||||||
|
|
||||||
|
.. code:: json
|
||||||
|
|
||||||
|
{
|
||||||
|
"autocomplete": "",
|
||||||
|
"categories": [
|
||||||
|
"map",
|
||||||
|
"it",
|
||||||
|
"images",
|
||||||
|
],
|
||||||
|
"default_locale": "",
|
||||||
|
"default_theme": "oscar",
|
||||||
|
"engines": [
|
||||||
|
{
|
||||||
|
"categories": [
|
||||||
|
"map"
|
||||||
|
],
|
||||||
|
"enabled": true,
|
||||||
|
"name": "openstreetmap",
|
||||||
|
"shortcut": "osm"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"categories": [
|
||||||
|
"it"
|
||||||
|
],
|
||||||
|
"enabled": true,
|
||||||
|
"name": "arch linux wiki",
|
||||||
|
"shortcut": "al"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"categories": [
|
||||||
|
"images"
|
||||||
|
],
|
||||||
|
"enabled": true,
|
||||||
|
"name": "google images",
|
||||||
|
"shortcut": "goi"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"categories": [
|
||||||
|
"it"
|
||||||
|
],
|
||||||
|
"enabled": false,
|
||||||
|
"name": "bitbucket",
|
||||||
|
"shortcut": "bb"
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"instance_name": "searx",
|
||||||
|
"locales": {
|
||||||
|
"de": "Deutsch (German)",
|
||||||
|
"en": "English",
|
||||||
|
"eo": "Esperanto (Esperanto)",
|
||||||
|
},
|
||||||
|
"plugins": [
|
||||||
|
{
|
||||||
|
"enabled": true,
|
||||||
|
"name": "HTTPS rewrite"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"enabled": false,
|
||||||
|
"name": "Vim-like hotkeys"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"safe_search": 0
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Embed search bar
|
||||||
|
================
|
||||||
|
|
||||||
|
The search bar can be embedded into websites. Just paste the example into the
|
||||||
|
HTML of the site. URL of the searx instance and values are customizable.
|
||||||
|
|
||||||
|
.. code:: html
|
||||||
|
|
||||||
|
<form method="post" action="https://searx.me/">
|
||||||
|
<!-- search --> <input type="text" name="q" />
|
||||||
|
<!-- categories --> <input type="hidden" name="categories" value="general,social media" />
|
||||||
|
<!-- language --> <input type="hidden" name="lang" value="all" />
|
||||||
|
<!-- locale --> <input type="hidden" name="locale" value="en" />
|
||||||
|
<!-- date filter --> <input type="hidden" name="time_range" value="month" />
|
||||||
|
</form>
|
|
@ -0,0 +1,33 @@
|
||||||
|
digraph G {
|
||||||
|
|
||||||
|
node [style=filled, shape=box, fillcolor="#ffffcc", fontname="Sans"];
|
||||||
|
edge [fontname="Sans"];
|
||||||
|
|
||||||
|
browser [label="Browser", shape=Mdiamond];
|
||||||
|
rp [label="Reverse Proxy", href="url to configure reverse proxy"];
|
||||||
|
filtron [label="Filtron", href="https://github.com/asciimoo/filtron"];
|
||||||
|
morty [label="Morty", href="https://github.com/asciimoo/morty"];
|
||||||
|
static [label="Static files", href="url to configure static files"];
|
||||||
|
uwsgi [label="uwsgi", href="url to configure uwsgi"]
|
||||||
|
searx1 [label="Searx #1"];
|
||||||
|
searx2 [label="Searx #2"];
|
||||||
|
searx3 [label="Searx #3"];
|
||||||
|
searx4 [label="Searx #4"];
|
||||||
|
|
||||||
|
browser -> rp [label="HTTPS"]
|
||||||
|
|
||||||
|
subgraph cluster_searx {
|
||||||
|
label = "Searx instance" fontname="Sans";
|
||||||
|
bgcolor="#fafafa";
|
||||||
|
{ rank=same; static rp };
|
||||||
|
rp -> morty [label="optional: images and HTML pages proxy"];
|
||||||
|
rp -> static [label="optional: reverse proxy serves directly static files"];
|
||||||
|
rp -> filtron [label="HTTP"];
|
||||||
|
filtron -> uwsgi [label="HTTP"];
|
||||||
|
uwsgi -> searx1;
|
||||||
|
uwsgi -> searx2;
|
||||||
|
uwsgi -> searx3;
|
||||||
|
uwsgi -> searx4;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,24 @@
|
||||||
|
.. _architecture:
|
||||||
|
|
||||||
|
============
|
||||||
|
Architecture
|
||||||
|
============
|
||||||
|
|
||||||
|
.. sidebar:: Needs work!
|
||||||
|
|
||||||
|
This article needs some work / Searx is a collaborative effort. If you have
|
||||||
|
any contribution, feel welcome to send us your :pull:`PR <../pulls>`, see
|
||||||
|
:ref:`how to contribute`.
|
||||||
|
|
||||||
|
Herein you will find some hints and suggestions about typical architectures of
|
||||||
|
searx infrastructures.
|
||||||
|
|
||||||
|
We start with a contribution from :pull:`@dalf <1776#issuecomment-567917320>`.
|
||||||
|
It shows a *reference* setup for public searx instances.
|
||||||
|
|
||||||
|
.. _arch public:
|
||||||
|
|
||||||
|
.. kernel-figure:: arch_public.dot
|
||||||
|
:alt: arch_public.dot
|
||||||
|
|
||||||
|
Reference architecture of a public searx setup.
|
|
@ -0,0 +1,103 @@
|
||||||
|
.. _buildhosts:
|
||||||
|
|
||||||
|
==========
|
||||||
|
Buildhosts
|
||||||
|
==========
|
||||||
|
|
||||||
|
.. sidebar:: This article needs some work
|
||||||
|
|
||||||
|
If you have any contribution send us your :pull:`PR <../pulls>`, see
|
||||||
|
:ref:`how to contribute`.
|
||||||
|
|
||||||
|
To get best results from build, its recommend to install additional packages
|
||||||
|
on build hosts.
|
||||||
|
|
||||||
|
.. _docs build:
|
||||||
|
|
||||||
|
Build docs
|
||||||
|
==========
|
||||||
|
|
||||||
|
.. _Graphviz: https://graphviz.gitlab.io
|
||||||
|
.. _ImageMagick: https://www.imagemagick.org
|
||||||
|
.. _XeTeX: https://tug.org/xetex/
|
||||||
|
.. _dvisvgm: https://dvisvgm.de/
|
||||||
|
|
||||||
|
.. sidebar:: Sphinx build needs
|
||||||
|
|
||||||
|
- ImageMagick_
|
||||||
|
- Graphviz_
|
||||||
|
- XeTeX_
|
||||||
|
- dvisvgm_
|
||||||
|
|
||||||
|
Most of the sphinx requirements are installed from :origin:`setup.py` and the
|
||||||
|
docs can be build from scratch with ``make docs``. For better math and image
|
||||||
|
processing additional packages are needed. The XeTeX_ needed not only for PDF
|
||||||
|
creation, its also needed for :ref:`math` when HTML output is build.
|
||||||
|
|
||||||
|
To be able to do :ref:`sphinx:math-support` without CDNs, the math are rendered
|
||||||
|
as images (``sphinx.ext.imgmath`` extension). If your docs build (``make
|
||||||
|
docs``) shows warnings like this::
|
||||||
|
|
||||||
|
WARNING: dot(1) not found, for better output quality install \
|
||||||
|
graphviz from http://www.graphviz.org
|
||||||
|
..
|
||||||
|
WARNING: LaTeX command 'latex' cannot be run (needed for math \
|
||||||
|
display), check the imgmath_latex setting
|
||||||
|
|
||||||
|
you need to install additional packages on your build host, to get better HTML
|
||||||
|
output.
|
||||||
|
|
||||||
|
.. _system requirements:
|
||||||
|
|
||||||
|
.. tabs::
|
||||||
|
|
||||||
|
.. group-tab:: Ubuntu / debian
|
||||||
|
|
||||||
|
.. code-block:: sh
|
||||||
|
|
||||||
|
$ sudo apt install graphviz imagemagick texlive-xetex librsvg2-bin
|
||||||
|
|
||||||
|
.. group-tab:: Arch Linux
|
||||||
|
|
||||||
|
.. code-block:: sh
|
||||||
|
|
||||||
|
$ sudo pacman -S graphviz imagemagick texlive-bin extra/librsvg
|
||||||
|
|
||||||
|
.. group-tab:: Fedora / RHEL
|
||||||
|
|
||||||
|
.. code-block:: sh
|
||||||
|
|
||||||
|
$ sudo dnf install graphviz graphviz-gd texlive-xetex-bin librsvg2-tools
|
||||||
|
|
||||||
|
|
||||||
|
For PDF output you also need:
|
||||||
|
|
||||||
|
.. tabs::
|
||||||
|
|
||||||
|
.. group-tab:: Ubuntu / debian
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
$ sudo apt texlive-latex-recommended texlive-extra-utils ttf-dejavu
|
||||||
|
|
||||||
|
.. group-tab:: Arch Linux
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
$ sudo pacman -S texlive-core texlive-latexextra ttf-dejavu
|
||||||
|
|
||||||
|
.. group-tab:: Fedora / RHEL
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
$ sudo dnf install \
|
||||||
|
texlive-collection-fontsrecommended texlive-collection-latex \
|
||||||
|
dejavu-sans-fonts dejavu-serif-fonts dejavu-sans-mono-fonts
|
||||||
|
|
||||||
|
.. _system requirements END:
|
||||||
|
|
||||||
|
.. literalinclude:: ../conf.py
|
||||||
|
:language: python
|
||||||
|
:start-after: # sphinx.ext.imgmath setup
|
||||||
|
:end-before: # sphinx.ext.imgmath setup END
|
||||||
|
|
|
@ -0,0 +1,71 @@
|
||||||
|
.. _engines generic:
|
||||||
|
|
||||||
|
=======
|
||||||
|
Engines
|
||||||
|
=======
|
||||||
|
|
||||||
|
.. sidebar:: Further reading ..
|
||||||
|
|
||||||
|
- :ref:`settings engine`
|
||||||
|
- :ref:`engine settings`
|
||||||
|
- :ref:`engine file`
|
||||||
|
|
||||||
|
============= =========== ==================== ============
|
||||||
|
:ref:`engine settings` :ref:`engine file`
|
||||||
|
------------------------- ---------------------------------
|
||||||
|
Name (cfg) Categories
|
||||||
|
------------------------- ---------------------------------
|
||||||
|
Engine .. Paging support **P**
|
||||||
|
------------------------- -------------------- ------------
|
||||||
|
Shortcut **S** Language support **L**
|
||||||
|
Timeout **TO** Time range support **TR**
|
||||||
|
Disabled **D** Offline **O**
|
||||||
|
------------- ----------- -------------------- ------------
|
||||||
|
Safe search **SS**
|
||||||
|
------------- ----------- ---------------------------------
|
||||||
|
Weigth **W**
|
||||||
|
------------- ----------- ---------------------------------
|
||||||
|
Disabled **D**
|
||||||
|
============= =========== =================================
|
||||||
|
|
||||||
|
Configuration defaults (at built time):
|
||||||
|
|
||||||
|
.. _configured engines:
|
||||||
|
|
||||||
|
.. jinja:: webapp
|
||||||
|
|
||||||
|
.. flat-table:: Engines configured at built time (defaults)
|
||||||
|
:header-rows: 1
|
||||||
|
:stub-columns: 2
|
||||||
|
|
||||||
|
* - Name (cfg)
|
||||||
|
- S
|
||||||
|
- Engine
|
||||||
|
- TO
|
||||||
|
- Categories
|
||||||
|
- P
|
||||||
|
- L
|
||||||
|
- SS
|
||||||
|
- D
|
||||||
|
- TR
|
||||||
|
- O
|
||||||
|
- W
|
||||||
|
- D
|
||||||
|
|
||||||
|
{% for name, mod in engines.items() %}
|
||||||
|
|
||||||
|
* - {{name}}
|
||||||
|
- !{{mod.shortcut}}
|
||||||
|
- {{mod.__name__}}
|
||||||
|
- {{mod.timeout}}
|
||||||
|
- {{", ".join(mod.categories)}}
|
||||||
|
- {{(mod.paging and "y") or ""}}
|
||||||
|
- {{(mod.language_support and "y") or ""}}
|
||||||
|
- {{(mod.safesearch and "y") or ""}}
|
||||||
|
- {{(mod.disabled and "y") or ""}}
|
||||||
|
- {{(mod.time_range_support and "y") or ""}}
|
||||||
|
- {{(mod.offline and "y") or ""}}
|
||||||
|
- {{mod.weight or 1 }}
|
||||||
|
- {{(mod.disabled and "y") or ""}}
|
||||||
|
|
||||||
|
{% endfor %}
|
|
@ -0,0 +1,148 @@
|
||||||
|
==========================
|
||||||
|
How to protect an instance
|
||||||
|
==========================
|
||||||
|
|
||||||
|
Searx depens on external search services. To avoid the abuse of these services
|
||||||
|
it is advised to limit the number of requests processed by searx.
|
||||||
|
|
||||||
|
An application firewall, ``filtron`` solves exactly this problem. Information
|
||||||
|
on how to install it can be found at the `project page of filtron
|
||||||
|
<https://github.com/asciimoo/filtron>`__.
|
||||||
|
|
||||||
|
|
||||||
|
Sample configuration of filtron
|
||||||
|
===============================
|
||||||
|
|
||||||
|
An example configuration can be find below. This configuration limits the access
|
||||||
|
of:
|
||||||
|
|
||||||
|
- scripts or applications (roboagent limit)
|
||||||
|
- webcrawlers (botlimit)
|
||||||
|
- IPs which send too many requests (IP limit)
|
||||||
|
- too many json, csv, etc. requests (rss/json limit)
|
||||||
|
- the same UserAgent of if too many requests (useragent limit)
|
||||||
|
|
||||||
|
.. code:: json
|
||||||
|
|
||||||
|
[{
|
||||||
|
"name":"search request",
|
||||||
|
"filters":[
|
||||||
|
"Param:q",
|
||||||
|
"Path=^(/|/search)$"
|
||||||
|
],
|
||||||
|
"interval":"<time-interval-in-sec (int)>",
|
||||||
|
"limit":"<max-request-number-in-interval (int)>",
|
||||||
|
"subrules":[
|
||||||
|
{
|
||||||
|
"name":"roboagent limit",
|
||||||
|
"interval":"<time-interval-in-sec (int)>",
|
||||||
|
"limit":"<max-request-number-in-interval (int)>",
|
||||||
|
"filters":[
|
||||||
|
"Header:User-Agent=(curl|cURL|Wget|python-requests|Scrapy|FeedFetcher|Go-http-client)"
|
||||||
|
],
|
||||||
|
"actions":[
|
||||||
|
{
|
||||||
|
"name":"block",
|
||||||
|
"params":{
|
||||||
|
"message":"Rate limit exceeded"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"botlimit",
|
||||||
|
"limit":0,
|
||||||
|
"stop":true,
|
||||||
|
"filters":[
|
||||||
|
"Header:User-Agent=(Googlebot|bingbot|Baiduspider|yacybot|YandexMobileBot|YandexBot|Yahoo! Slurp|MJ12bot|AhrefsBot|archive.org_bot|msnbot|MJ12bot|SeznamBot|linkdexbot|Netvibes|SMTBot|zgrab|James BOT)"
|
||||||
|
],
|
||||||
|
"actions":[
|
||||||
|
{
|
||||||
|
"name":"block",
|
||||||
|
"params":{
|
||||||
|
"message":"Rate limit exceeded"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"IP limit",
|
||||||
|
"interval":"<time-interval-in-sec (int)>",
|
||||||
|
"limit":"<max-request-number-in-interval (int)>",
|
||||||
|
"stop":true,
|
||||||
|
"aggregations":[
|
||||||
|
"Header:X-Forwarded-For"
|
||||||
|
],
|
||||||
|
"actions":[
|
||||||
|
{
|
||||||
|
"name":"block",
|
||||||
|
"params":{
|
||||||
|
"message":"Rate limit exceeded"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"rss/json limit",
|
||||||
|
"interval":"<time-interval-in-sec (int)>",
|
||||||
|
"limit":"<max-request-number-in-interval (int)>",
|
||||||
|
"stop":true,
|
||||||
|
"filters":[
|
||||||
|
"Param:format=(csv|json|rss)"
|
||||||
|
],
|
||||||
|
"actions":[
|
||||||
|
{
|
||||||
|
"name":"block",
|
||||||
|
"params":{
|
||||||
|
"message":"Rate limit exceeded"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"useragent limit",
|
||||||
|
"interval":"<time-interval-in-sec (int)>",
|
||||||
|
"limit":"<max-request-number-in-interval (int)>",
|
||||||
|
"aggregations":[
|
||||||
|
"Header:User-Agent"
|
||||||
|
],
|
||||||
|
"actions":[
|
||||||
|
{
|
||||||
|
"name":"block",
|
||||||
|
"params":{
|
||||||
|
"message":"Rate limit exceeded"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Route request through filtron
|
||||||
|
=============================
|
||||||
|
|
||||||
|
Filtron can be started using the following command:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
$ filtron -rules rules.json
|
||||||
|
|
||||||
|
It listens on ``127.0.0.1:4004`` and forwards filtered requests to
|
||||||
|
``127.0.0.1:8888`` by default.
|
||||||
|
|
||||||
|
Use it along with ``nginx`` with the following example configuration.
|
||||||
|
|
||||||
|
.. code:: nginx
|
||||||
|
|
||||||
|
location / {
|
||||||
|
proxy_set_header Host $http_host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Scheme $scheme;
|
||||||
|
proxy_pass http://127.0.0.1:4004/;
|
||||||
|
}
|
||||||
|
|
||||||
|
Requests are coming from port 4004 going through filtron and then forwarded to
|
||||||
|
port 8888 where a searx is being run.
|
|
@ -0,0 +1,16 @@
|
||||||
|
===========================
|
||||||
|
Administrator documentation
|
||||||
|
===========================
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
installation
|
||||||
|
settings
|
||||||
|
api
|
||||||
|
architecture
|
||||||
|
filtron
|
||||||
|
morty
|
||||||
|
engines
|
||||||
|
plugins
|
||||||
|
buildhosts
|
|
@ -0,0 +1,349 @@
|
||||||
|
.. _installation:
|
||||||
|
|
||||||
|
============
|
||||||
|
Installation
|
||||||
|
============
|
||||||
|
|
||||||
|
.. contents::
|
||||||
|
:depth: 3
|
||||||
|
|
||||||
|
Basic installation
|
||||||
|
==================
|
||||||
|
|
||||||
|
Step by step installation for Debian/Ubuntu with virtualenv. For Ubuntu, be sure
|
||||||
|
to have enable universe repository.
|
||||||
|
|
||||||
|
Install packages:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
$ sudo -H apt-get install \
|
||||||
|
git build-essential libxslt-dev \
|
||||||
|
python-dev python-virtualenv python-babel \
|
||||||
|
zlib1g-dev libffi-dev libssl-dev
|
||||||
|
|
||||||
|
Install searx:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
cd /usr/local
|
||||||
|
sudo -H git clone https://github.com/asciimoo/searx.git
|
||||||
|
sudo -H useradd searx -d /usr/local/searx
|
||||||
|
sudo -H chown searx:searx -R /usr/local/searx
|
||||||
|
|
||||||
|
Install dependencies in a virtualenv:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
cd /usr/local/searx
|
||||||
|
sudo -H -u searx -i
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
(searx)$ virtualenv searx-ve
|
||||||
|
(searx)$ . ./searx-ve/bin/activate
|
||||||
|
(searx)$ ./manage.sh update_packages
|
||||||
|
|
||||||
|
Configuration
|
||||||
|
==============
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
sed -i -e "s/ultrasecretkey/`openssl rand -hex 16`/g" searx/settings.yml
|
||||||
|
|
||||||
|
Edit searx/settings.yml if necessary.
|
||||||
|
|
||||||
|
Check
|
||||||
|
=====
|
||||||
|
|
||||||
|
Start searx:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
python searx/webapp.py
|
||||||
|
|
||||||
|
Go to http://localhost:8888
|
||||||
|
|
||||||
|
If everything works fine, disable the debug option in settings.yml:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
sed -i -e "s/debug : True/debug : False/g" searx/settings.yml
|
||||||
|
|
||||||
|
At this point searx is not demonized ; uwsgi allows this.
|
||||||
|
|
||||||
|
You can exit the virtualenv and the searx user bash (enter exit command
|
||||||
|
twice).
|
||||||
|
|
||||||
|
uwsgi
|
||||||
|
=====
|
||||||
|
|
||||||
|
Install packages:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
sudo -H apt-get install \
|
||||||
|
uwsgi uwsgi-plugin-python
|
||||||
|
|
||||||
|
Create the configuration file ``/etc/uwsgi/apps-available/searx.ini`` with this
|
||||||
|
content:
|
||||||
|
|
||||||
|
.. code:: ini
|
||||||
|
|
||||||
|
[uwsgi]
|
||||||
|
# Who will run the code
|
||||||
|
uid = searx
|
||||||
|
gid = searx
|
||||||
|
|
||||||
|
# disable logging for privacy
|
||||||
|
disable-logging = true
|
||||||
|
|
||||||
|
# Number of workers (usually CPU count)
|
||||||
|
workers = 4
|
||||||
|
|
||||||
|
# The right granted on the created socket
|
||||||
|
chmod-socket = 666
|
||||||
|
|
||||||
|
# Plugin to use and interpretor config
|
||||||
|
single-interpreter = true
|
||||||
|
master = true
|
||||||
|
plugin = python
|
||||||
|
lazy-apps = true
|
||||||
|
enable-threads = true
|
||||||
|
|
||||||
|
# Module to import
|
||||||
|
module = searx.webapp
|
||||||
|
|
||||||
|
# Support running the module from a webserver subdirectory.
|
||||||
|
route-run = fixpathinfo:
|
||||||
|
|
||||||
|
# Virtualenv and python path
|
||||||
|
virtualenv = /usr/local/searx/searx-ve/
|
||||||
|
pythonpath = /usr/local/searx/
|
||||||
|
chdir = /usr/local/searx/searx/
|
||||||
|
|
||||||
|
Activate the uwsgi application and restart:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
cd /etc/uwsgi/apps-enabled
|
||||||
|
ln -s ../apps-available/searx.ini
|
||||||
|
/etc/init.d/uwsgi restart
|
||||||
|
|
||||||
|
Web server
|
||||||
|
==========
|
||||||
|
|
||||||
|
with nginx
|
||||||
|
----------
|
||||||
|
|
||||||
|
If nginx is not installed (uwsgi will not work with the package
|
||||||
|
nginx-light):
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
sudo -H apt-get install nginx
|
||||||
|
|
||||||
|
Hosted at /
|
||||||
|
~~~~~~~~~~~
|
||||||
|
|
||||||
|
Create the configuration file ``/etc/nginx/sites-available/searx`` with this
|
||||||
|
content:
|
||||||
|
|
||||||
|
.. code:: nginx
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
server_name searx.example.com;
|
||||||
|
root /usr/local/searx/searx;
|
||||||
|
|
||||||
|
location /static {
|
||||||
|
}
|
||||||
|
|
||||||
|
location / {
|
||||||
|
include uwsgi_params;
|
||||||
|
uwsgi_pass unix:/run/uwsgi/app/searx/socket;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Create a symlink to sites-enabled:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
sudo -H ln -s /etc/nginx/sites-available/searx /etc/nginx/sites-enabled/searx
|
||||||
|
|
||||||
|
Restart service:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
sudo -H service nginx restart
|
||||||
|
sudo -H service uwsgi restart
|
||||||
|
|
||||||
|
from subdirectory URL (/searx)
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Add this configuration in the server config file
|
||||||
|
``/etc/nginx/sites-enabled/default``:
|
||||||
|
|
||||||
|
.. code:: nginx
|
||||||
|
|
||||||
|
location /searx/static {
|
||||||
|
alias /usr/local/searx/searx/static;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /searx {
|
||||||
|
uwsgi_param SCRIPT_NAME /searx;
|
||||||
|
include uwsgi_params;
|
||||||
|
uwsgi_pass unix:/run/uwsgi/app/searx/socket;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
**OR** using reverse proxy (Please, note that reverse proxy advised to be used
|
||||||
|
in case of single-user or low-traffic instances.)
|
||||||
|
|
||||||
|
.. code:: nginx
|
||||||
|
|
||||||
|
location /searx/static {
|
||||||
|
alias /usr/local/searx/searx/static;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /searx {
|
||||||
|
proxy_pass http://127.0.0.1:8888;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Scheme $scheme;
|
||||||
|
proxy_set_header X-Script-Name /searx;
|
||||||
|
proxy_buffering off;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Enable ``base_url`` in ``searx/settings.yml``
|
||||||
|
|
||||||
|
.. code:: yaml
|
||||||
|
|
||||||
|
base_url : http://your.domain.tld/searx/
|
||||||
|
|
||||||
|
Restart service:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
sudo -H service nginx restart
|
||||||
|
sudo -H service uwsgi restart
|
||||||
|
|
||||||
|
disable logs
|
||||||
|
^^^^^^^^^^^^
|
||||||
|
|
||||||
|
for better privacy you can disable nginx logs about searx.
|
||||||
|
|
||||||
|
how to proceed: below ``uwsgi_pass`` in ``/etc/nginx/sites-available/default``
|
||||||
|
add:
|
||||||
|
|
||||||
|
.. code:: nginx
|
||||||
|
|
||||||
|
access_log /dev/null;
|
||||||
|
error_log /dev/null;
|
||||||
|
|
||||||
|
Restart service:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
sudo -H service nginx restart
|
||||||
|
|
||||||
|
with apache
|
||||||
|
-----------
|
||||||
|
|
||||||
|
Add wsgi mod:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
sudo -H apt-get install libapache2-mod-uwsgi
|
||||||
|
sudo -H a2enmod uwsgi
|
||||||
|
|
||||||
|
Add this configuration in the file ``/etc/apache2/apache2.conf``:
|
||||||
|
|
||||||
|
.. code:: apache
|
||||||
|
|
||||||
|
<Location />
|
||||||
|
Options FollowSymLinks Indexes
|
||||||
|
SetHandler uwsgi-handler
|
||||||
|
uWSGISocket /run/uwsgi/app/searx/socket
|
||||||
|
</Location>
|
||||||
|
|
||||||
|
Note that if your instance of searx is not at the root, you should change
|
||||||
|
``<Location />`` by the location of your instance, like ``<Location /searx>``.
|
||||||
|
|
||||||
|
Restart Apache:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
sudo -H /etc/init.d/apache2 restart
|
||||||
|
|
||||||
|
disable logs
|
||||||
|
~~~~~~~~~~~~
|
||||||
|
|
||||||
|
For better privacy you can disable Apache logs.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
You can only disable logs for the whole (virtual) server not for a specific
|
||||||
|
path.
|
||||||
|
|
||||||
|
Go back to ``/etc/apache2/apache2.conf`` and above ``<Location />`` add:
|
||||||
|
|
||||||
|
.. code:: apache
|
||||||
|
|
||||||
|
CustomLog /dev/null combined
|
||||||
|
|
||||||
|
Restart Apache:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
sudo -H /etc/init.d/apache2 restart
|
||||||
|
|
||||||
|
How to update
|
||||||
|
=============
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
cd /usr/local/searx
|
||||||
|
sudo -H -u searx -i
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
(searx)$ . ./searx-ve/bin/activate
|
||||||
|
(searx)$ git stash
|
||||||
|
(searx)$ git pull origin master
|
||||||
|
(searx)$ git stash apply
|
||||||
|
(searx)$ ./manage.sh update_packages
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
sudo -H service uwsgi restart
|
||||||
|
|
||||||
|
Docker
|
||||||
|
======
|
||||||
|
|
||||||
|
Make sure you have installed Docker. For instance, you can deploy searx like this:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
docker pull wonderfall/searx
|
||||||
|
docker run -d --name searx -p $PORT:8888 wonderfall/searx
|
||||||
|
|
||||||
|
Go to ``http://localhost:$PORT``.
|
||||||
|
|
||||||
|
See https://hub.docker.com/r/wonderfall/searx/ for more informations. It's also
|
||||||
|
possible to build searx from the embedded Dockerfile.
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
git clone https://github.com/asciimoo/searx.git
|
||||||
|
cd searx
|
||||||
|
docker build -t whatever/searx .
|
||||||
|
|
||||||
|
References
|
||||||
|
==========
|
||||||
|
|
||||||
|
* https://about.okhin.fr/posts/Searx/ with some additions
|
||||||
|
|
||||||
|
* How to: `Setup searx in a couple of hours with a free SSL certificate
|
||||||
|
<https://www.reddit.com/r/privacytoolsIO/comments/366kvn/how_to_setup_your_own_privacy_respecting_search/>`__
|
|
@ -0,0 +1,26 @@
|
||||||
|
=========================
|
||||||
|
How to setup result proxy
|
||||||
|
=========================
|
||||||
|
|
||||||
|
.. _morty: https://github.com/asciimoo/morty
|
||||||
|
.. _morty's README: https://github.com/asciimoo/morty
|
||||||
|
|
||||||
|
By default searx can only act as an image proxy for result images, but it is
|
||||||
|
possible to proxify all the result URLs with an external service, morty_.
|
||||||
|
|
||||||
|
To use this feature, morty has to be installed and activated in searx's
|
||||||
|
``settings.yml``.
|
||||||
|
|
||||||
|
Add the following snippet to your ``settings.yml`` and restart searx:
|
||||||
|
|
||||||
|
.. code:: yaml
|
||||||
|
|
||||||
|
result_proxy:
|
||||||
|
url : http://127.0.0.1:3000/
|
||||||
|
key : your_morty_proxy_key
|
||||||
|
|
||||||
|
``url``
|
||||||
|
Is the address of the running morty service.
|
||||||
|
|
||||||
|
``key``
|
||||||
|
Is an optional argument, see `morty's README`_ for more information.
|
|
@ -0,0 +1,39 @@
|
||||||
|
.. _plugins generic:
|
||||||
|
|
||||||
|
===============
|
||||||
|
Plugins builtin
|
||||||
|
===============
|
||||||
|
|
||||||
|
.. sidebar:: Further reading ..
|
||||||
|
|
||||||
|
- :ref:`dev plugin`
|
||||||
|
|
||||||
|
Configuration defaults (at built time):
|
||||||
|
|
||||||
|
:DO: Default on
|
||||||
|
|
||||||
|
.. _configured plugins:
|
||||||
|
|
||||||
|
.. jinja:: webapp
|
||||||
|
|
||||||
|
.. flat-table:: Plugins configured at built time (defaults)
|
||||||
|
:header-rows: 1
|
||||||
|
:stub-columns: 1
|
||||||
|
:widths: 3 1 9
|
||||||
|
|
||||||
|
* - Name
|
||||||
|
- DO
|
||||||
|
- Description
|
||||||
|
|
||||||
|
JS & CSS dependencies
|
||||||
|
|
||||||
|
{% for plgin in plugins %}
|
||||||
|
|
||||||
|
* - {{plgin.name}}
|
||||||
|
- {{(plgin.default_on and "y") or ""}}
|
||||||
|
- {{plgin.description}}
|
||||||
|
|
||||||
|
{% for dep in (plgin.js_dependencies + plgin.css_dependencies) %}
|
||||||
|
| ``{{dep}}`` {% endfor %}
|
||||||
|
|
||||||
|
{% endfor %}
|
|
@ -0,0 +1,181 @@
|
||||||
|
.. _settings.yml:
|
||||||
|
|
||||||
|
================
|
||||||
|
``settings.yml``
|
||||||
|
================
|
||||||
|
|
||||||
|
.. sidebar:: Further reading ..
|
||||||
|
|
||||||
|
- :ref:`search API`
|
||||||
|
|
||||||
|
This page describe the options possibilities of the settings.yml file.
|
||||||
|
|
||||||
|
.. _settings global:
|
||||||
|
|
||||||
|
Global Settings
|
||||||
|
===============
|
||||||
|
|
||||||
|
.. code:: yaml
|
||||||
|
|
||||||
|
server:
|
||||||
|
port : 8888
|
||||||
|
secret_key : "ultrasecretkey" # change this!
|
||||||
|
debug : False # debug mode, only for development
|
||||||
|
request_timeout : 2.0 # seconds
|
||||||
|
base_url : False # set custom base_url (or False)
|
||||||
|
themes_path : "" # custom ui themes path
|
||||||
|
default_theme : oscar # ui theme
|
||||||
|
useragent_suffix : "" # suffix of searx_useragent, could contain
|
||||||
|
# informations like admins email address
|
||||||
|
image_proxy : False # proxying image results through searx
|
||||||
|
default_locale : "" # default interface locale
|
||||||
|
|
||||||
|
# uncomment below section if you want to use a proxy
|
||||||
|
|
||||||
|
#outgoing_proxies :
|
||||||
|
# http : http://127.0.0.1:8080
|
||||||
|
# https: http://127.0.0.1:8080
|
||||||
|
|
||||||
|
# uncomment below section only if you have more than one network interface
|
||||||
|
# which can be the source of outgoing search requests
|
||||||
|
|
||||||
|
#source_ips:
|
||||||
|
# - 1.1.1.1
|
||||||
|
# - 1.1.1.2
|
||||||
|
|
||||||
|
locales:
|
||||||
|
en : English
|
||||||
|
de : Deutsch
|
||||||
|
he : Hebrew
|
||||||
|
hu : Magyar
|
||||||
|
fr : Français
|
||||||
|
es : Español
|
||||||
|
it : Italiano
|
||||||
|
nl : Nederlands
|
||||||
|
ja : 日本語 (Japanese)
|
||||||
|
tr : Türkçe
|
||||||
|
ru : Russian
|
||||||
|
ro : Romanian
|
||||||
|
|
||||||
|
|
||||||
|
``port`` :
|
||||||
|
Port number of the searx web application if you run it directly using ``python
|
||||||
|
searx/webapp.py``. Doesn't apply to searx running on Apache or Nginx.
|
||||||
|
|
||||||
|
``secret_key`` :
|
||||||
|
Used for cryptography purpose.
|
||||||
|
|
||||||
|
``debug`` :
|
||||||
|
Allow a more detailed log if you run searx directly. Display *detailed* error
|
||||||
|
messages in the browser too, so this must be deactivated in production.
|
||||||
|
|
||||||
|
``request_timeout`` :
|
||||||
|
Global timeout of the requests made to others engines in seconds. A bigger
|
||||||
|
timeout will allow to wait for answers from slow engines, but in consequence
|
||||||
|
will slow searx reactivity (the result page may take the time specified in the
|
||||||
|
timeout to load)
|
||||||
|
|
||||||
|
``base_url`` :
|
||||||
|
The base URL where searx is deployed. Used to create correct inbound links.
|
||||||
|
|
||||||
|
``themes_path`` :
|
||||||
|
Path to where the themes are located. If you didn't develop anything, leave it
|
||||||
|
blank.
|
||||||
|
|
||||||
|
``default_theme`` :
|
||||||
|
Name of the theme you want to use by default on you searx instance.
|
||||||
|
|
||||||
|
``useragent_suffix`` :
|
||||||
|
Suffix to the user-agent searx uses to send requests to others engines. If an
|
||||||
|
engine wish to block you, a contact info here may be useful to avoid that.
|
||||||
|
|
||||||
|
``image_proxy`` :
|
||||||
|
Allow your instance of searx of being able to proxy images. Uses memory space.
|
||||||
|
|
||||||
|
``default_locale`` :
|
||||||
|
Aearx interface language. If blank, the locale is detected by using the
|
||||||
|
browser language. If it doesn't work, or you are deploying a language
|
||||||
|
specific instance of searx, a locale can be defined using an ISO language
|
||||||
|
code, like ``fr``, ``en``, ``de``.
|
||||||
|
|
||||||
|
.. _requests proxies: http://docs.python-requests.org/en/latest/user/advanced/#proxies
|
||||||
|
.. _PR SOCKS support: https://github.com/kennethreitz/requests/pull/478
|
||||||
|
|
||||||
|
``outgoing_proxies`` :
|
||||||
|
Define a proxy you wish to use, see `requests proxies`_. SOCKS proxies are
|
||||||
|
not supported / see `PR SOCKS support`.
|
||||||
|
|
||||||
|
``source_ips`` :
|
||||||
|
If you use multiple nework interfaces, define from which IP the requests must
|
||||||
|
be made.
|
||||||
|
|
||||||
|
``locales`` :
|
||||||
|
Locales codes and their names. Available translations of searx interface.
|
||||||
|
|
||||||
|
|
||||||
|
.. _settings engine:
|
||||||
|
|
||||||
|
Engine settings
|
||||||
|
===============
|
||||||
|
|
||||||
|
.. sidebar:: Further reading ..
|
||||||
|
|
||||||
|
- :ref:`engines-dev`
|
||||||
|
|
||||||
|
.. code:: yaml
|
||||||
|
|
||||||
|
- name : bing
|
||||||
|
engine : bing
|
||||||
|
shortcut : bi
|
||||||
|
base_url : 'https://{language}.wikipedia.org/'
|
||||||
|
categories : general
|
||||||
|
timeout : 3.0
|
||||||
|
api_key : 'apikey'
|
||||||
|
disabled : True
|
||||||
|
language : en_US
|
||||||
|
|
||||||
|
``name`` :
|
||||||
|
Name that will be used accross searx to define this engine. In settings, on
|
||||||
|
the result page...
|
||||||
|
|
||||||
|
``engine`` :
|
||||||
|
Name of the python file used to handle requests and responses to and from this
|
||||||
|
search engine.
|
||||||
|
|
||||||
|
``shortcut`` :
|
||||||
|
Code used to execute bang requests (in this case using ``!bi`` or ``?bi``)
|
||||||
|
|
||||||
|
``base_url`` : optional
|
||||||
|
Part of the URL that should be stable accross every request. Can be useful to
|
||||||
|
use multiple sites using only one engine, or updating the site URL without
|
||||||
|
touching at the code.
|
||||||
|
|
||||||
|
``categories`` : optional
|
||||||
|
Define in which categories this engine will be active. Most of the time, it is
|
||||||
|
defined in the code of the engine, but in a few cases it is useful, like when
|
||||||
|
describing multiple search engine using the same code.
|
||||||
|
|
||||||
|
``timeout`` : optional
|
||||||
|
Timeout of the search with the current search engine. **Be careful, it will
|
||||||
|
modify the global timeout of searx.**
|
||||||
|
|
||||||
|
``api_key`` : optional
|
||||||
|
In a few cases, using an API needs the use of a secret key. How to obtain them
|
||||||
|
is described in the file.
|
||||||
|
|
||||||
|
``disabled`` : optional
|
||||||
|
To disable by default the engine, but not deleting it. It will allow the user
|
||||||
|
to manually activate it in the settings.
|
||||||
|
|
||||||
|
``language`` : optional
|
||||||
|
If you want to use another language for a specific engine, you can define it
|
||||||
|
by using the full ISO code of language and country, like ``fr_FR``, ``en_US``,
|
||||||
|
``de_DE``.
|
||||||
|
|
||||||
|
``weigth`` : default ``1``
|
||||||
|
Weighting of the results of this engine.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
A few more options are possible, but they are pretty specific to some
|
||||||
|
engines, and so won't be described here.
|
|
@ -0,0 +1,43 @@
|
||||||
|
=============================================================
|
||||||
|
Searx admin interface
|
||||||
|
=============================================================
|
||||||
|
|
||||||
|
.. _searx-admin: https://github.com/kvch/searx-admin#searx-admin
|
||||||
|
.. _NLnet Foundation: https://nlnet.nl/
|
||||||
|
|
||||||
|
manage your instance from your browser
|
||||||
|
|
||||||
|
.. sidebar:: Installation
|
||||||
|
|
||||||
|
Installation guide can be found in the repository of searx-admin_.
|
||||||
|
|
||||||
|
One of the biggest advantages of searx is being extremely customizable. But at
|
||||||
|
first it can be daunting to newcomers. A barrier of taking advantage of this
|
||||||
|
feature is our ugly settings file which is sometimes hard to understand and
|
||||||
|
edit.
|
||||||
|
|
||||||
|
To make self-hosting searx more accessible a new tool is introduced, called
|
||||||
|
``searx-admin``. It is a web application which is capable of managing your
|
||||||
|
instance and manipulating its settings via a web UI. It aims to replace editing
|
||||||
|
of ``settings.yml`` for less experienced administrators or people who prefer
|
||||||
|
graphical admin interfaces.
|
||||||
|
|
||||||
|
.. figure:: searx-admin-engines.png
|
||||||
|
:alt: Screenshot of engine list
|
||||||
|
|
||||||
|
Configuration page of engines
|
||||||
|
|
||||||
|
Since ``searx-admin`` acts as a supervisor for searx, we have decided to
|
||||||
|
implement it as a standalone tool instead of part of searx. Another reason for
|
||||||
|
making it a standalone tool is that the codebase and dependencies of searx
|
||||||
|
should not grow because of a fully optional feature, which does not affect
|
||||||
|
existing instances.
|
||||||
|
|
||||||
|
|
||||||
|
Acknowledgements
|
||||||
|
================
|
||||||
|
|
||||||
|
This development was sponsored by `NLnet Foundation`_.
|
||||||
|
|
||||||
|
| Happy hacking.
|
||||||
|
| kvch // 2017.08.22 21:25
|
|
@ -0,0 +1,10 @@
|
||||||
|
====
|
||||||
|
Blog
|
||||||
|
====
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
python3
|
||||||
|
admin
|
||||||
|
intro-offline
|
|
@ -0,0 +1,77 @@
|
||||||
|
===============================
|
||||||
|
Preparation for offline engines
|
||||||
|
===============================
|
||||||
|
|
||||||
|
Offline engines
|
||||||
|
===============
|
||||||
|
|
||||||
|
To extend the functionality of searx, offline engines are going to be
|
||||||
|
introduced. An offline engine is an engine which does not need Internet
|
||||||
|
connection to perform a search and does not use HTTP to communicate.
|
||||||
|
|
||||||
|
Offline engines can be configured as online engines, by adding those to the
|
||||||
|
`engines` list of :origin:`settings.yml <searx/settings.yml>`. Thus, searx
|
||||||
|
finds the engine file and imports it.
|
||||||
|
|
||||||
|
Example skeleton for the new engines:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from subprocess import PIPE, Popen
|
||||||
|
|
||||||
|
categories = ['general']
|
||||||
|
offline = True
|
||||||
|
|
||||||
|
def init(settings):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def search(query, params):
|
||||||
|
process = Popen(['ls', query], stdout=PIPE)
|
||||||
|
return_code = process.wait()
|
||||||
|
if return_code != 0:
|
||||||
|
raise RuntimeError('non-zero return code', return_code)
|
||||||
|
|
||||||
|
results = []
|
||||||
|
line = process.stdout.readline()
|
||||||
|
while line:
|
||||||
|
result = parse_line(line)
|
||||||
|
results.append(results)
|
||||||
|
|
||||||
|
line = process.stdout.readline()
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
Development progress
|
||||||
|
====================
|
||||||
|
|
||||||
|
First, a proposal has been created as a Github issue. Then it was moved to the
|
||||||
|
wiki as a design document. You can read it here: :wiki:`Offline-engines`.
|
||||||
|
|
||||||
|
In this development step, searx core was prepared to accept and perform offline
|
||||||
|
searches. Offline search requests are scheduled together with regular offline
|
||||||
|
requests.
|
||||||
|
|
||||||
|
As offline searches can return arbitrary results depending on the engine, the
|
||||||
|
current result templates were insufficient to present such results. Thus, a new
|
||||||
|
template is introduced which is caplable of presenting arbitrary key value pairs
|
||||||
|
as a table. You can check out the pull request for more details see
|
||||||
|
:pull:`1700`.
|
||||||
|
|
||||||
|
Next steps
|
||||||
|
==========
|
||||||
|
|
||||||
|
Today, it is possible to create/run an offline engine. However, it is going to be publicly available for everyone who knows the searx instance. So the next step is to introduce token based access for engines. This way administrators are able to limit the access to private engines.
|
||||||
|
|
||||||
|
Acknowledgement
|
||||||
|
===============
|
||||||
|
|
||||||
|
This development was sponsored by `Search and Discovery Fund`_ of `NLnet Foundation`_ .
|
||||||
|
|
||||||
|
.. _Search and Discovery Fund: https://nlnet.nl/discovery
|
||||||
|
.. _NLnet Foundation: https://nlnet.nl/
|
||||||
|
|
||||||
|
|
||||||
|
| Happy hacking.
|
||||||
|
| kvch // 2019.10.21 17:03
|
||||||
|
|
|
@ -0,0 +1,68 @@
|
||||||
|
============================
|
||||||
|
Introducing Python 3 support
|
||||||
|
============================
|
||||||
|
|
||||||
|
.. _Python 2.7 clock: https://pythonclock.org/
|
||||||
|
|
||||||
|
.. sidebar:: Python 2.7 to 3 upgrade
|
||||||
|
|
||||||
|
This chapter exists of historical reasons. Python 2.7 release schedule ends
|
||||||
|
(`Python 2.7 clock`_) after 11 years Python 3 exists
|
||||||
|
|
||||||
|
As most operation systems are coming with Python3 installed by default. So it is
|
||||||
|
time for searx to support Python3. But don't worry support of Python2.7 won't be
|
||||||
|
dropped.
|
||||||
|
|
||||||
|
.. image:: searxpy3.png
|
||||||
|
:scale: 50 %
|
||||||
|
:alt: hurray
|
||||||
|
:align: center
|
||||||
|
|
||||||
|
|
||||||
|
How to run searx using Python 3
|
||||||
|
===============================
|
||||||
|
|
||||||
|
Please make sure that you run at least Python 3.5.
|
||||||
|
|
||||||
|
To run searx, first a Python3 virtualenv should be created. After entering the
|
||||||
|
virtualenv, dependencies must be installed. Then run searx with python3 instead
|
||||||
|
of the usual python command.
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
virtualenv -p python3 venv3
|
||||||
|
source venv3/bin/activate
|
||||||
|
pip3 install -r requirements.txt
|
||||||
|
python3 searx/webapp.py
|
||||||
|
|
||||||
|
|
||||||
|
If you want to run searx using Python2.7, you don't have to do anything
|
||||||
|
differently as before.
|
||||||
|
|
||||||
|
Fun facts
|
||||||
|
=========
|
||||||
|
|
||||||
|
- 115 files were changed when implementing the support for both Python versions.
|
||||||
|
|
||||||
|
- All of the dependencies was compatible except for the robotframework used for
|
||||||
|
browser tests. Thus, these tests were migrated to splinter. So from now on
|
||||||
|
both versions are being tested on Travis and can be tested locally.
|
||||||
|
|
||||||
|
If you found bugs
|
||||||
|
=================
|
||||||
|
|
||||||
|
Please open an issue on `GitHub`_. Make sure that you mention your Python
|
||||||
|
version in your issue, so we can investigate it properly.
|
||||||
|
|
||||||
|
.. _GitHub: https://github.com/asciimoo/searx/issues
|
||||||
|
|
||||||
|
Acknowledgment
|
||||||
|
==============
|
||||||
|
|
||||||
|
This development was sponsored by `NLnet Foundation`_.
|
||||||
|
|
||||||
|
.. _NLnet Foundation: https://nlnet.nl/
|
||||||
|
|
||||||
|
|
||||||
|
| Happy hacking.
|
||||||
|
| kvch // 2017.05.13 22:57
|
Binary file not shown.
After Width: | Height: | Size: 50 KiB |
Binary file not shown.
After Width: | Height: | Size: 30 KiB |
|
@ -0,0 +1,114 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import sys, os
|
||||||
|
from searx.version import VERSION_STRING
|
||||||
|
from pallets_sphinx_themes import ProjectLink
|
||||||
|
|
||||||
|
GIT_URL = os.environ.get("GIT_URL", "https://github.com/asciimoo/searx")
|
||||||
|
SEARX_URL = os.environ.get("SEARX_URL", "https://searx.me")
|
||||||
|
DOCS_URL = os.environ.get("DOCS_URL", "https://asciimoo.github.io/searx/")
|
||||||
|
|
||||||
|
# Project --------------------------------------------------------------
|
||||||
|
|
||||||
|
project = u'searx'
|
||||||
|
copyright = u'2015-2019, Adam Tauber, Noémi Ványi'
|
||||||
|
author = u'Adam Tauber'
|
||||||
|
release, version = VERSION_STRING, VERSION_STRING
|
||||||
|
highlight_language = 'none'
|
||||||
|
|
||||||
|
# General --------------------------------------------------------------
|
||||||
|
|
||||||
|
master_doc = "index"
|
||||||
|
source_suffix = '.rst'
|
||||||
|
numfig = True
|
||||||
|
|
||||||
|
from searx import webapp
|
||||||
|
jinja_contexts = {
|
||||||
|
'webapp': dict(**webapp.__dict__)
|
||||||
|
}
|
||||||
|
|
||||||
|
# usage:: lorem :patch:`f373169` ipsum
|
||||||
|
extlinks = {}
|
||||||
|
|
||||||
|
# upstream links
|
||||||
|
extlinks['wiki'] = ('https://github.com/asciimoo/searx/wiki/%s', ' ')
|
||||||
|
extlinks['pull'] = ('https://github.com/asciimoo/searx/pull/%s', 'PR ')
|
||||||
|
|
||||||
|
# links to custom brand
|
||||||
|
extlinks['origin'] = (GIT_URL + '/blob/master/%s', 'git://')
|
||||||
|
extlinks['patch'] = (GIT_URL + '/commit/%s', '#')
|
||||||
|
extlinks['search'] = (SEARX_URL + '/%s', '#')
|
||||||
|
extlinks['docs'] = (DOCS_URL + '/%s', 'docs: ')
|
||||||
|
extlinks['pypi'] = ('https://pypi.org/project/%s', 'PyPi: ')
|
||||||
|
extlinks['man'] = ('https://manpages.debian.org/jump?q=%s', '')
|
||||||
|
#extlinks['role'] = (
|
||||||
|
# 'https://www.sphinx-doc.org/en/master/usage/restructuredtext/roles.html#role-%s', '')
|
||||||
|
extlinks['duref'] = (
|
||||||
|
'http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html#%s', '')
|
||||||
|
extlinks['durole'] = (
|
||||||
|
'http://docutils.sourceforge.net/docs/ref/rst/roles.html#%s', '')
|
||||||
|
extlinks['dudir'] = (
|
||||||
|
'http://docutils.sourceforge.net/docs/ref/rst/directives.html#%s', '')
|
||||||
|
extlinks['ctan'] = (
|
||||||
|
'https://ctan.org/pkg/%s', 'CTAN: ')
|
||||||
|
|
||||||
|
extensions = [
|
||||||
|
'sphinx.ext.imgmath',
|
||||||
|
'sphinx.ext.extlinks',
|
||||||
|
'sphinx.ext.viewcode',
|
||||||
|
"sphinx.ext.autodoc",
|
||||||
|
"sphinx.ext.intersphinx",
|
||||||
|
"pallets_sphinx_themes",
|
||||||
|
"sphinx_issues", # https://github.com/sloria/sphinx-issues/blob/master/README.rst
|
||||||
|
"sphinxcontrib.jinja", # https://github.com/tardyp/sphinx-jinja
|
||||||
|
'linuxdoc.rstFlatTable', # Implementation of the 'flat-table' reST-directive.
|
||||||
|
'linuxdoc.kfigure', # Sphinx extension which implements scalable image handling.
|
||||||
|
"sphinx_tabs.tabs", # https://github.com/djungelorm/sphinx-tabs
|
||||||
|
]
|
||||||
|
|
||||||
|
intersphinx_mapping = {
|
||||||
|
"python": ("https://docs.python.org/3/", None),
|
||||||
|
"flask": ("https://flask.palletsprojects.com/", None),
|
||||||
|
# "werkzeug": ("https://werkzeug.palletsprojects.com/", None),
|
||||||
|
"jinja": ("https://jinja.palletsprojects.com/", None),
|
||||||
|
"linuxdoc" : ("https://return42.github.io/linuxdoc/", None),
|
||||||
|
"sphinx" : ("https://www.sphinx-doc.org/en/master/", None),
|
||||||
|
}
|
||||||
|
|
||||||
|
issues_github_path = "asciimoo/searx"
|
||||||
|
|
||||||
|
# HTML -----------------------------------------------------------------
|
||||||
|
|
||||||
|
sys.path.append(os.path.abspath('_themes'))
|
||||||
|
html_theme_path = ['_themes']
|
||||||
|
html_theme = "searx"
|
||||||
|
|
||||||
|
# sphinx.ext.imgmath setup
|
||||||
|
html_math_renderer = 'imgmath'
|
||||||
|
imgmath_image_format = 'svg'
|
||||||
|
imgmath_font_size = 14
|
||||||
|
# sphinx.ext.imgmath setup END
|
||||||
|
|
||||||
|
html_theme_options = {"index_sidebar_logo": True}
|
||||||
|
html_context = {
|
||||||
|
"project_links": [
|
||||||
|
ProjectLink("Source", GIT_URL),
|
||||||
|
ProjectLink("Wiki", "https://github.com/asciimoo/searx/wiki"),
|
||||||
|
ProjectLink("Public instances", "https://asciimoo.github.io/searx/user/public_instances.html"),
|
||||||
|
ProjectLink("Twitter", "https://twitter.com/Searx_engine"),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
html_sidebars = {
|
||||||
|
"**": ["project.html", "relations.html", "searchbox.html"],
|
||||||
|
}
|
||||||
|
singlehtml_sidebars = {"index": ["project.html", "localtoc.html"]}
|
||||||
|
html_static_path = ["static"]
|
||||||
|
html_logo = "static/img/searx_logo_small.png"
|
||||||
|
html_title = "Searx Documentation ({})".format("Searx-{}.tex".format(VERSION_STRING))
|
||||||
|
html_show_sourcelink = False
|
||||||
|
|
||||||
|
# LaTeX ----------------------------------------------------------------
|
||||||
|
|
||||||
|
latex_documents = [
|
||||||
|
(master_doc, "searx-{}.tex".format(VERSION_STRING), html_title, author, "manual")
|
||||||
|
]
|
|
@ -0,0 +1,180 @@
|
||||||
|
.. _how to contribute:
|
||||||
|
|
||||||
|
=================
|
||||||
|
How to contribute
|
||||||
|
=================
|
||||||
|
|
||||||
|
Prime directives: Privacy, Hackability
|
||||||
|
======================================
|
||||||
|
|
||||||
|
Searx has two prime directives, **privacy-by-design and hackability** . The
|
||||||
|
hackability comes in three levels:
|
||||||
|
|
||||||
|
- support of search engines
|
||||||
|
- plugins to alter search behaviour
|
||||||
|
- hacking searx itself
|
||||||
|
|
||||||
|
Note the lack of "world domination" among the directives. Searx has no
|
||||||
|
intention of wide mass-adoption, rounded corners, etc. The prime directive
|
||||||
|
"privacy" deserves a separate chapter, as it's quite uncommon unfortunately.
|
||||||
|
|
||||||
|
Privacy-by-design
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
Searx was born out of the need for a **privacy-respecting** search tool which
|
||||||
|
can be extended easily to maximize both, its search and its privacy protecting
|
||||||
|
capabilities.
|
||||||
|
|
||||||
|
A few widely used features work differently or turned off by default or not
|
||||||
|
implemented at all **as a consequence of privacy-by-design**.
|
||||||
|
|
||||||
|
If a feature reduces the privacy preserving aspects of searx, it should be
|
||||||
|
switched off by default or should not implemented at all. There are plenty of
|
||||||
|
search engines already providing such features. If a feature reduces the
|
||||||
|
protection of searx, users must be informed about the effect of choosing to
|
||||||
|
enable it. Features that protect privacy but differ from the expectations of
|
||||||
|
the user should also be explained.
|
||||||
|
|
||||||
|
Also, if you think that something works weird with searx, it's might be because
|
||||||
|
of the tool you use is designed in a way to interfere with the privacy respect.
|
||||||
|
Submitting a bugreport to the vendor of the tool that misbehaves might be a good
|
||||||
|
feedback to reconsider the disrespect to its customers (e.g. ``GET`` vs ``POST``
|
||||||
|
requests in various browsers).
|
||||||
|
|
||||||
|
Remember the other prime directive of searx is to be hackable, so if the above
|
||||||
|
privacy concerns do not fancy you, simply fork it.
|
||||||
|
|
||||||
|
*Happy hacking.*
|
||||||
|
|
||||||
|
Code
|
||||||
|
====
|
||||||
|
|
||||||
|
.. _PEP8: https://www.python.org/dev/peps/pep-0008/
|
||||||
|
.. _Conventional Commits: https://www.conventionalcommits.org/
|
||||||
|
.. _Git Commit Good Practice: https://wiki.openstack.org/wiki/GitCommitMessages
|
||||||
|
.. _Structural split of changes:
|
||||||
|
https://wiki.openstack.org/wiki/GitCommitMessages#Structural_split_of_changes
|
||||||
|
.. _gitmoji: https://gitmoji.carloscuesta.me/
|
||||||
|
.. _Semantic PR: https://github.com/zeke/semantic-pull-requests
|
||||||
|
|
||||||
|
.. sidebar:: Create good commits!
|
||||||
|
|
||||||
|
- `Structural split of changes`_
|
||||||
|
- `Conventional Commits`_
|
||||||
|
- `Git Commit Good Practice`_
|
||||||
|
- some like to use: gitmoji_
|
||||||
|
- not yet active: `Semantic PR`_
|
||||||
|
|
||||||
|
In order to submit a patch, please follow the steps below:
|
||||||
|
|
||||||
|
- Follow coding conventions.
|
||||||
|
|
||||||
|
- PEP8_ standards apply, except the convention of line length
|
||||||
|
- Maximum line length is 120 characters
|
||||||
|
|
||||||
|
- The cardinal rule for creating good commits is to ensure there is only one
|
||||||
|
*logical change* per commit / read `Structural split of changes`_
|
||||||
|
|
||||||
|
- Check if your code breaks existing tests. If so, update the tests or fix your
|
||||||
|
code.
|
||||||
|
|
||||||
|
- If your code can be unit-tested, add unit tests.
|
||||||
|
|
||||||
|
- Add yourself to the :origin:`AUTHORS.rst` file.
|
||||||
|
|
||||||
|
- Choose meaning full commit messages, read `Conventional Commits`_
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
<type>[optional scope]: <description>
|
||||||
|
|
||||||
|
[optional body]
|
||||||
|
|
||||||
|
[optional footer(s)]
|
||||||
|
|
||||||
|
- Create a pull request.
|
||||||
|
|
||||||
|
For more help on getting started with searx development, see :ref:`devquickstart`.
|
||||||
|
|
||||||
|
|
||||||
|
Translation
|
||||||
|
===========
|
||||||
|
|
||||||
|
Translation currently takes place on :ref:`transifex <translation>`.
|
||||||
|
|
||||||
|
.. caution::
|
||||||
|
|
||||||
|
Please, do not update translation files in the repo.
|
||||||
|
|
||||||
|
|
||||||
|
.. _contrib docs:
|
||||||
|
|
||||||
|
Documentation
|
||||||
|
=============
|
||||||
|
|
||||||
|
.. _Sphinx: http://www.sphinx-doc.org
|
||||||
|
.. _reST: http://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html
|
||||||
|
|
||||||
|
.. sidebar:: The reST sources
|
||||||
|
|
||||||
|
has been moved from ``gh-branch`` into ``master`` (:origin:`docs`).
|
||||||
|
|
||||||
|
The documentation is built using Sphinx_. So in order to be able to generate
|
||||||
|
the required files, you have to install it on your system. Much easier, use
|
||||||
|
our :ref:`makefile`.
|
||||||
|
|
||||||
|
Here is an example which makes a complete rebuild:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
$ make docs-clean docs
|
||||||
|
...
|
||||||
|
The HTML pages are in dist/docs.
|
||||||
|
|
||||||
|
.. _make docs-live:
|
||||||
|
|
||||||
|
live build
|
||||||
|
----------
|
||||||
|
|
||||||
|
.. sidebar:: docs-clean
|
||||||
|
|
||||||
|
It is recommended to assert a complete rebuild before deploying (use
|
||||||
|
``docs-clean``).
|
||||||
|
|
||||||
|
Live build is like WYSIWYG. If you want to edit the documentation, its
|
||||||
|
recommended to use. The Makefile target ``docs-live`` builds the docs, opens
|
||||||
|
URL in your favorite browser and rebuilds every time a reST file has been
|
||||||
|
changed.
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
$ make docs-live
|
||||||
|
...
|
||||||
|
The HTML pages are in dist/docs.
|
||||||
|
... Serving on http://0.0.0.0:8080
|
||||||
|
... Start watching changes
|
||||||
|
|
||||||
|
|
||||||
|
.. _deploy on github.io:
|
||||||
|
|
||||||
|
deploy on github.io
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
To deploy documentation at :docs:`github.io <.>` use Makefile target
|
||||||
|
:ref:`make gh-pages`, which will builds the documentation, clones searx into a sub
|
||||||
|
folder ``gh-pages``, cleans it, copies the doc build into and runs all the
|
||||||
|
needed git add, commit and push:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
$ make docs-clean gh-pages
|
||||||
|
...
|
||||||
|
SPHINX docs --> file://<...>/dist/docs
|
||||||
|
The HTML pages are in dist/docs.
|
||||||
|
...
|
||||||
|
Cloning into 'gh-pages' ...
|
||||||
|
...
|
||||||
|
cd gh-pages; git checkout gh-pages >/dev/null
|
||||||
|
Switched to a new branch 'gh-pages'
|
||||||
|
...
|
||||||
|
doc available at --> https://asciimoo.github.io/searx
|
|
@ -0,0 +1,6 @@
|
||||||
|
stub col row 1, column, "loremLorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy
|
||||||
|
eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam
|
||||||
|
voluptua."
|
||||||
|
stub col row 1, "At vero eos et accusam et justo duo dolores et ea rebum. Stet clita
|
||||||
|
kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet.", column
|
||||||
|
stub col row 1, column, column
|
|
@ -0,0 +1,267 @@
|
||||||
|
|
||||||
|
.. _engines-dev:
|
||||||
|
|
||||||
|
===============
|
||||||
|
Engine overview
|
||||||
|
===============
|
||||||
|
|
||||||
|
.. _metasearch-engine: https://en.wikipedia.org/wiki/Metasearch_engine
|
||||||
|
|
||||||
|
searx is a metasearch-engine_, so it uses different search engines to provide
|
||||||
|
better results.
|
||||||
|
|
||||||
|
Because there is no general search API which could be used for every search
|
||||||
|
engine, an adapter has to be built between searx and the external search
|
||||||
|
engines. Adapters are stored under the folder :origin:`searx/engines`.
|
||||||
|
|
||||||
|
.. contents::
|
||||||
|
:depth: 3
|
||||||
|
:backlinks: entry
|
||||||
|
|
||||||
|
general engine configuration
|
||||||
|
============================
|
||||||
|
|
||||||
|
It is required to tell searx the type of results the engine provides. The
|
||||||
|
arguments can be set in the engine file or in the settings file
|
||||||
|
(normally ``settings.yml``). The arguments in the settings file override
|
||||||
|
the ones in the engine file.
|
||||||
|
|
||||||
|
It does not matter if an option is stored in the engine file or in the
|
||||||
|
settings. However, the standard way is the following:
|
||||||
|
|
||||||
|
.. _engine file:
|
||||||
|
|
||||||
|
engine file
|
||||||
|
-----------
|
||||||
|
|
||||||
|
======================= =========== ===========================================
|
||||||
|
argument type information
|
||||||
|
======================= =========== ===========================================
|
||||||
|
categories list pages, in which the engine is working
|
||||||
|
paging boolean support multible pages
|
||||||
|
language_support boolean support language choosing
|
||||||
|
time_range_support boolean support search time range
|
||||||
|
offline boolean engine runs offline
|
||||||
|
======================= =========== ===========================================
|
||||||
|
|
||||||
|
.. _engine settings:
|
||||||
|
|
||||||
|
settings.yml
|
||||||
|
------------
|
||||||
|
|
||||||
|
======================= =========== ===========================================
|
||||||
|
argument type information
|
||||||
|
======================= =========== ===========================================
|
||||||
|
name string name of search-engine
|
||||||
|
engine string name of searx-engine
|
||||||
|
(filename without ``.py``)
|
||||||
|
shortcut string shortcut of search-engine
|
||||||
|
timeout string specific timeout for search-engine
|
||||||
|
======================= =========== ===========================================
|
||||||
|
|
||||||
|
|
||||||
|
overrides
|
||||||
|
---------
|
||||||
|
|
||||||
|
A few of the options have default values in the engine, but are often
|
||||||
|
overwritten by the settings. If ``None`` is assigned to an option in the engine
|
||||||
|
file, it has to be redefined in the settings, otherwise searx will not start
|
||||||
|
with that engine.
|
||||||
|
|
||||||
|
The naming of overrides is arbitrary. But the recommended overrides are the
|
||||||
|
following:
|
||||||
|
|
||||||
|
======================= =========== ===========================================
|
||||||
|
argument type information
|
||||||
|
======================= =========== ===========================================
|
||||||
|
base_url string base-url, can be overwritten to use same
|
||||||
|
engine on other URL
|
||||||
|
number_of_results int maximum number of results per request
|
||||||
|
language string ISO code of language and country like en_US
|
||||||
|
api_key string api-key if required by engine
|
||||||
|
======================= =========== ===========================================
|
||||||
|
|
||||||
|
example code
|
||||||
|
------------
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
# engine dependent config
|
||||||
|
categories = ['general']
|
||||||
|
paging = True
|
||||||
|
language_support = True
|
||||||
|
|
||||||
|
|
||||||
|
making a request
|
||||||
|
================
|
||||||
|
|
||||||
|
To perform a search an URL have to be specified. In addition to specifying an
|
||||||
|
URL, arguments can be passed to the query.
|
||||||
|
|
||||||
|
passed arguments
|
||||||
|
----------------
|
||||||
|
|
||||||
|
These arguments can be used to construct the search query. Furthermore,
|
||||||
|
parameters with default value can be redefined for special purposes.
|
||||||
|
|
||||||
|
====================== ============ ========================================================================
|
||||||
|
argument type default-value, information
|
||||||
|
====================== ============ ========================================================================
|
||||||
|
url string ``''``
|
||||||
|
method string ``'GET'``
|
||||||
|
headers set ``{}``
|
||||||
|
data set ``{}``
|
||||||
|
cookies set ``{}``
|
||||||
|
verify boolean ``True``
|
||||||
|
headers.User-Agent string a random User-Agent
|
||||||
|
category string current category, like ``'general'``
|
||||||
|
started datetime current date-time
|
||||||
|
pageno int current pagenumber
|
||||||
|
language string specific language code like ``'en_US'``, or ``'all'`` if unspecified
|
||||||
|
====================== ============ ========================================================================
|
||||||
|
|
||||||
|
parsed arguments
|
||||||
|
----------------
|
||||||
|
|
||||||
|
The function ``def request(query, params):`` always returns the ``params``
|
||||||
|
variable. Inside searx, the following paramters can be used to specify a search
|
||||||
|
request:
|
||||||
|
|
||||||
|
============ =========== =========================================================
|
||||||
|
argument type information
|
||||||
|
============ =========== =========================================================
|
||||||
|
url string requested url
|
||||||
|
method string HTTP request method
|
||||||
|
headers set HTTP header information
|
||||||
|
data set HTTP data information (parsed if ``method != 'GET'``)
|
||||||
|
cookies set HTTP cookies
|
||||||
|
verify boolean Performing SSL-Validity check
|
||||||
|
============ =========== =========================================================
|
||||||
|
|
||||||
|
|
||||||
|
example code
|
||||||
|
------------
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
# search-url
|
||||||
|
base_url = 'https://example.com/'
|
||||||
|
search_string = 'search?{query}&page={page}'
|
||||||
|
|
||||||
|
# do search-request
|
||||||
|
def request(query, params):
|
||||||
|
search_path = search_string.format(
|
||||||
|
query=urlencode({'q': query}),
|
||||||
|
page=params['pageno'])
|
||||||
|
|
||||||
|
params['url'] = base_url + search_path
|
||||||
|
|
||||||
|
return params
|
||||||
|
|
||||||
|
|
||||||
|
returned results
|
||||||
|
================
|
||||||
|
|
||||||
|
Searx is able to return results of different media-types. Currently the
|
||||||
|
following media-types are supported:
|
||||||
|
|
||||||
|
- default_
|
||||||
|
- images_
|
||||||
|
- videos_
|
||||||
|
- torrent_
|
||||||
|
- map_
|
||||||
|
|
||||||
|
To set another media-type as default, the parameter ``template`` must be set to
|
||||||
|
the desired type.
|
||||||
|
|
||||||
|
default
|
||||||
|
-------
|
||||||
|
|
||||||
|
========================= =====================================================
|
||||||
|
result-parameter information
|
||||||
|
========================= =====================================================
|
||||||
|
url string, url of the result
|
||||||
|
title string, title of the result
|
||||||
|
content string, general result-text
|
||||||
|
publishedDate :py:class:`datetime.datetime`, time of publish
|
||||||
|
========================= =====================================================
|
||||||
|
|
||||||
|
images
|
||||||
|
------
|
||||||
|
|
||||||
|
To use this template, the parameter:
|
||||||
|
|
||||||
|
========================= =====================================================
|
||||||
|
result-parameter information
|
||||||
|
========================= =====================================================
|
||||||
|
template is set to ``images.html``
|
||||||
|
url string, url to the result site
|
||||||
|
title string, title of the result *(partly implemented)*
|
||||||
|
content *(partly implemented)*
|
||||||
|
publishedDate :py:class:`datetime.datetime`,
|
||||||
|
time of publish *(partly implemented)*
|
||||||
|
img\_src string, url to the result image
|
||||||
|
thumbnail\_src string, url to a small-preview image
|
||||||
|
========================= =====================================================
|
||||||
|
|
||||||
|
videos
|
||||||
|
------
|
||||||
|
|
||||||
|
========================= =====================================================
|
||||||
|
result-parameter information
|
||||||
|
========================= =====================================================
|
||||||
|
template is set to ``videos.html``
|
||||||
|
url string, url of the result
|
||||||
|
title string, title of the result
|
||||||
|
content *(not implemented yet)*
|
||||||
|
publishedDate :py:class:`datetime.datetime`, time of publish
|
||||||
|
thumbnail string, url to a small-preview image
|
||||||
|
========================= =====================================================
|
||||||
|
|
||||||
|
torrent
|
||||||
|
-------
|
||||||
|
|
||||||
|
.. _magnetlink: https://en.wikipedia.org/wiki/Magnet_URI_scheme
|
||||||
|
|
||||||
|
========================= =====================================================
|
||||||
|
result-parameter information
|
||||||
|
========================= =====================================================
|
||||||
|
template is set to ``torrent.html``
|
||||||
|
url string, url of the result
|
||||||
|
title string, title of the result
|
||||||
|
content string, general result-text
|
||||||
|
publishedDate :py:class:`datetime.datetime`,
|
||||||
|
time of publish *(not implemented yet)*
|
||||||
|
seed int, number of seeder
|
||||||
|
leech int, number of leecher
|
||||||
|
filesize int, size of file in bytes
|
||||||
|
files int, number of files
|
||||||
|
magnetlink string, magnetlink_ of the result
|
||||||
|
torrentfile string, torrentfile of the result
|
||||||
|
========================= =====================================================
|
||||||
|
|
||||||
|
|
||||||
|
map
|
||||||
|
---
|
||||||
|
|
||||||
|
========================= =====================================================
|
||||||
|
result-parameter information
|
||||||
|
========================= =====================================================
|
||||||
|
url string, url of the result
|
||||||
|
title string, title of the result
|
||||||
|
content string, general result-text
|
||||||
|
publishedDate :py:class:`datetime.datetime`, time of publish
|
||||||
|
latitude latitude of result (in decimal format)
|
||||||
|
longitude longitude of result (in decimal format)
|
||||||
|
boundingbox boundingbox of result (array of 4. values
|
||||||
|
``[lat-min, lat-max, lon-min, lon-max]``)
|
||||||
|
geojson geojson of result (http://geojson.org)
|
||||||
|
osm.type type of osm-object (if OSM-Result)
|
||||||
|
osm.id id of osm-object (if OSM-Result)
|
||||||
|
address.name name of object
|
||||||
|
address.road street name of object
|
||||||
|
address.house_number house number of object
|
||||||
|
address.locality city, place of object
|
||||||
|
address.postcode postcode of object
|
||||||
|
address.country country of object
|
||||||
|
========================= =====================================================
|
|
@ -0,0 +1,3 @@
|
||||||
|
graph G {
|
||||||
|
Hello -- World
|
||||||
|
}
|
|
@ -0,0 +1,15 @@
|
||||||
|
=======================
|
||||||
|
Developer documentation
|
||||||
|
=======================
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
quickstart
|
||||||
|
contribution_guide
|
||||||
|
engine_overview
|
||||||
|
search_api
|
||||||
|
plugins
|
||||||
|
translation
|
||||||
|
makefile
|
||||||
|
reST
|
|
@ -0,0 +1,221 @@
|
||||||
|
.. _makefile:
|
||||||
|
|
||||||
|
================
|
||||||
|
Makefile Targets
|
||||||
|
================
|
||||||
|
|
||||||
|
.. _gnu-make: https://www.gnu.org/software/make/manual/make.html#Introduction
|
||||||
|
|
||||||
|
.. sidebar:: build environment
|
||||||
|
|
||||||
|
Before looking deeper at the targets, first read about :ref:`makefile setup`
|
||||||
|
and :ref:`make pyenv`.
|
||||||
|
|
||||||
|
With the aim to simplify development cycles, started with :pull:`1756` a
|
||||||
|
``Makefile`` based boilerplate was added. If you are not familiar with
|
||||||
|
Makefiles, we recommend to read gnu-make_ introduction.
|
||||||
|
|
||||||
|
The usage is simple, just type ``make {target-name}`` to *build* a target.
|
||||||
|
Calling the ``help`` target gives a first overview::
|
||||||
|
|
||||||
|
$ make help
|
||||||
|
test - run developer tests
|
||||||
|
docs - build documentation
|
||||||
|
docs-live - autobuild HTML documentation while editing
|
||||||
|
run - run developer instance
|
||||||
|
install - developer install (./local)
|
||||||
|
uninstall - uninstall (./local)
|
||||||
|
gh-pages - build docs & deploy on gh-pages branch
|
||||||
|
clean - drop builds and environments
|
||||||
|
...
|
||||||
|
|
||||||
|
.. contents:: Contents
|
||||||
|
:depth: 2
|
||||||
|
:local:
|
||||||
|
:backlinks: entry
|
||||||
|
|
||||||
|
|
||||||
|
.. _makefile setup:
|
||||||
|
|
||||||
|
Setup
|
||||||
|
=====
|
||||||
|
|
||||||
|
.. _git stash: https://git-scm.com/docs/git-stash
|
||||||
|
|
||||||
|
The main setup is done in the :origin:`Makefile`::
|
||||||
|
|
||||||
|
export GIT_URL=https://github.com/asciimoo/searx
|
||||||
|
export SEARX_URL=https://searx.me
|
||||||
|
export DOCS_URL=https://asciimoo.github.io/searx
|
||||||
|
|
||||||
|
.. sidebar:: fork & upstream
|
||||||
|
|
||||||
|
Commit changes in your (local) branch, fork or whatever, but do not push them
|
||||||
|
upstream / `git stash`_ is your friend.
|
||||||
|
|
||||||
|
:GIT_URL: Changes this, to point to your searx fork.
|
||||||
|
|
||||||
|
:SEARX_URL: Changes this, to point to your searx instance.
|
||||||
|
|
||||||
|
:DOCS_URL: If you host your own (branded) documentation, change this URL.
|
||||||
|
|
||||||
|
.. _make pyenv:
|
||||||
|
|
||||||
|
Python environment
|
||||||
|
==================
|
||||||
|
|
||||||
|
.. sidebar:: activate environment
|
||||||
|
|
||||||
|
``source ./local/py3/bin/activate``
|
||||||
|
|
||||||
|
With Makefile we do no longer need to build up the virualenv manually (as
|
||||||
|
described in the :ref:`devquickstart` guide). Jump into your git working tree
|
||||||
|
and release a ``make pyenv``:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
$ cd ~/searx-clone
|
||||||
|
$ make pyenv
|
||||||
|
PYENV usage: source ./local/py3/bin/activate
|
||||||
|
...
|
||||||
|
|
||||||
|
With target ``pyenv`` a development environment (aka virtualenv) was build up in
|
||||||
|
``./local/py3/``. To make a *developer install* of searx (:origin:`setup.py`)
|
||||||
|
into this environment, use make target ``install``:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
$ make install
|
||||||
|
PYENV usage: source ./local/py3/bin/activate
|
||||||
|
PYENV using virtualenv from ./local/py3
|
||||||
|
PYENV install .
|
||||||
|
|
||||||
|
You have never to think about intermediate targets like ``pyenv`` or
|
||||||
|
``install``, the ``Makefile`` chains them as requisites. Just run your main
|
||||||
|
target.
|
||||||
|
|
||||||
|
.. sidebar:: drop environment
|
||||||
|
|
||||||
|
To get rid of the existing environment before re-build use :ref:`clean target
|
||||||
|
<make clean>` first.
|
||||||
|
|
||||||
|
If you think, something goes wrong with your ./local environment or you change
|
||||||
|
the :origin:`setup.py` file (or the requirements listed in
|
||||||
|
:origin:`requirements-dev.txt` and :origin:`requirements.txt`), you have to call
|
||||||
|
:ref:`make clean`.
|
||||||
|
|
||||||
|
|
||||||
|
.. _make run:
|
||||||
|
|
||||||
|
``make run``
|
||||||
|
============
|
||||||
|
|
||||||
|
To get up a running a developer instance simply call ``make run``. This enables
|
||||||
|
*debug* option in :origin:`searx/settings.yml`, starts a ``./searx/webapp.py``
|
||||||
|
instance, disables *debug* option again and opens the URL in your favorite WEB
|
||||||
|
browser (:man:`xdg-open`):
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
$ make run
|
||||||
|
PYENV usage: source ./local/py3/bin/activate
|
||||||
|
PYENV install .
|
||||||
|
./local/py3/bin/python ./searx/webapp.py
|
||||||
|
...
|
||||||
|
INFO:werkzeug: * Running on http://127.0.0.1:8888/ (Press CTRL+C to quit)
|
||||||
|
...
|
||||||
|
|
||||||
|
.. _make clean:
|
||||||
|
|
||||||
|
``make clean``
|
||||||
|
==============
|
||||||
|
|
||||||
|
Drop all intermediate files, all builds, but keep sources untouched. Includes
|
||||||
|
target ``pyclean`` which drops ./local environment. Before calling ``make
|
||||||
|
clean`` stop all processes using :ref:`make pyenv`.
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
$ make clean
|
||||||
|
CLEAN pyclean
|
||||||
|
CLEAN clean
|
||||||
|
|
||||||
|
.. _make docs:
|
||||||
|
|
||||||
|
``make docs docs-live docs-clean``
|
||||||
|
==================================
|
||||||
|
|
||||||
|
We describe the usage of the ``doc*`` targets in the :ref:`How to contribute /
|
||||||
|
Documentation <contrib docs>` section. If you want to edit the documentation
|
||||||
|
read our :ref:`make docs-live` section. If you are working in your own brand,
|
||||||
|
adjust your :ref:`Makefile setup <makefile setup>`.
|
||||||
|
|
||||||
|
|
||||||
|
.. _make gh-pages:
|
||||||
|
|
||||||
|
``make gh-pages``
|
||||||
|
=================
|
||||||
|
|
||||||
|
To deploy on github.io first adjust your :ref:`Makefile setup <makefile
|
||||||
|
setup>`. For any further read :ref:`deploy on github.io`.
|
||||||
|
|
||||||
|
.. _make test:
|
||||||
|
|
||||||
|
``make test``
|
||||||
|
=============
|
||||||
|
|
||||||
|
Runs a series of tests: ``test.pep8``, ``test.unit``, ``test.robot`` and does
|
||||||
|
additional :ref:`pylint checks <make pylint>`. You can run tests selective,
|
||||||
|
e.g.:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
$ make test.pep8 test.unit
|
||||||
|
. ./local/py3/bin/activate; ./manage.sh pep8_check
|
||||||
|
[!] Running pep8 check
|
||||||
|
. ./local/py3/bin/activate; ./manage.sh unit_tests
|
||||||
|
[!] Running unit tests
|
||||||
|
|
||||||
|
.. _make pylint:
|
||||||
|
|
||||||
|
``make pylint``
|
||||||
|
===============
|
||||||
|
|
||||||
|
.. _Pylint: https://www.pylint.org/
|
||||||
|
|
||||||
|
Before commiting its recommend to do some (more) linting. Pylint_ is known as
|
||||||
|
one of the best source-code, bug and quality checker for the Python programming
|
||||||
|
language. Pylint_ is not yet a quality gate within our searx project (like
|
||||||
|
:ref:`test.pep8 <make test>` it is), but Pylint_ can help to improve code
|
||||||
|
quality anyway. The pylint profile we use at searx project is found in
|
||||||
|
project's root folder :origin:`.pylintrc`.
|
||||||
|
|
||||||
|
Code quality is a ongoing process. Don't try to fix all messages from Pylint,
|
||||||
|
run Pylint and check if your changed lines are bringing up new messages. If so,
|
||||||
|
fix it. By this, code quality gets incremental better and if there comes the
|
||||||
|
day, the linting is balanced out, we might decide to add Pylint as a quality
|
||||||
|
gate.
|
||||||
|
|
||||||
|
|
||||||
|
``make pybuild``
|
||||||
|
================
|
||||||
|
|
||||||
|
.. _PyPi: https://pypi.org/
|
||||||
|
.. _twine: https://twine.readthedocs.io/en/latest/
|
||||||
|
|
||||||
|
Build Python packages in ``./dist/py``.
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
$ make pybuild
|
||||||
|
...
|
||||||
|
BUILD pybuild
|
||||||
|
running sdist
|
||||||
|
running egg_info
|
||||||
|
...
|
||||||
|
$ ls ./dist/py/
|
||||||
|
searx-0.15.0-py3-none-any.whl searx-0.15.0.tar.gz
|
||||||
|
|
||||||
|
To upload packages to PyPi_, there is also a ``upload-pypi`` target. It needs
|
||||||
|
twine_ to be installed. Since you are not the owner of :pypi:`searx` you will
|
||||||
|
never need the latter.
|
|
@ -0,0 +1,54 @@
|
||||||
|
.. _dev plugin:
|
||||||
|
|
||||||
|
=======
|
||||||
|
Plugins
|
||||||
|
=======
|
||||||
|
|
||||||
|
.. sidebar:: Further reading ..
|
||||||
|
|
||||||
|
- :ref:`plugins generic`
|
||||||
|
|
||||||
|
Plugins can extend or replace functionality of various components of searx.
|
||||||
|
|
||||||
|
Example plugin
|
||||||
|
==============
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
name = 'Example plugin'
|
||||||
|
description = 'This plugin extends the suggestions with the word "example"'
|
||||||
|
default_on = False # disabled by default
|
||||||
|
|
||||||
|
js_dependencies = tuple() # optional, list of static js files
|
||||||
|
css_dependencies = tuple() # optional, list of static css files
|
||||||
|
|
||||||
|
|
||||||
|
# attach callback to the post search hook
|
||||||
|
# request: flask request object
|
||||||
|
# ctx: the whole local context of the post search hook
|
||||||
|
def post_search(request, ctx):
|
||||||
|
ctx['search'].suggestions.add('example')
|
||||||
|
return True
|
||||||
|
|
||||||
|
Plugin entry points
|
||||||
|
===================
|
||||||
|
|
||||||
|
Entry points (hooks) define when a plugin runs. Right now only three hooks are
|
||||||
|
implemented. So feel free to implement a hook if it fits the behaviour of your
|
||||||
|
plugin.
|
||||||
|
|
||||||
|
Pre search hook
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Runs BEFORE the search request. Function to implement: ``pre_search``
|
||||||
|
|
||||||
|
Post search hook
|
||||||
|
----------------
|
||||||
|
|
||||||
|
Runs AFTER the search request. Function to implement: ``post_search``
|
||||||
|
|
||||||
|
Result hook
|
||||||
|
-----------
|
||||||
|
|
||||||
|
Runs when a new result is added to the result list. Function to implement:
|
||||||
|
``on_result``
|
|
@ -0,0 +1,132 @@
|
||||||
|
.. _devquickstart:
|
||||||
|
|
||||||
|
======================
|
||||||
|
Development Quickstart
|
||||||
|
======================
|
||||||
|
|
||||||
|
.. sidebar:: :ref:`makefile`
|
||||||
|
|
||||||
|
For additional developer purpose there are :ref:`makefile`.
|
||||||
|
|
||||||
|
This quickstart guide gets your environment set up with searx. Furthermore, it
|
||||||
|
gives a short introduction to the ``manage.sh`` script.
|
||||||
|
|
||||||
|
How to setup your development environment
|
||||||
|
=========================================
|
||||||
|
|
||||||
|
.. sidebar:: :ref:`make pyenv <make pyenv>`
|
||||||
|
|
||||||
|
Alternatively use the :ref:`make pyenv`.
|
||||||
|
|
||||||
|
First, clone the source code of searx to the desired folder. In this case the
|
||||||
|
source is cloned to ``~/myprojects/searx``. Then create and activate the
|
||||||
|
searx-ve virtualenv and install the required packages using ``manage.sh``.
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
cd ~/myprojects
|
||||||
|
git clone https://github.com/asciimoo/searx.git
|
||||||
|
cd searx
|
||||||
|
virtualenv searx-ve
|
||||||
|
. ./searx-ve/bin/activate
|
||||||
|
./manage.sh update_dev_packages
|
||||||
|
|
||||||
|
|
||||||
|
How to run tests
|
||||||
|
================
|
||||||
|
|
||||||
|
.. sidebar:: :ref:`make test.unit <make test>`
|
||||||
|
|
||||||
|
Alternatively use the ``test.pep8``, ``test.unit``, ``test.robot`` targets.
|
||||||
|
|
||||||
|
Tests can be run using the ``manage.sh`` script. Following tests and checks are
|
||||||
|
available:
|
||||||
|
|
||||||
|
- Unit tests
|
||||||
|
- Selenium tests
|
||||||
|
- PEP8 validation
|
||||||
|
- Unit test coverage check
|
||||||
|
|
||||||
|
For example unit tests are run with the command below:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
./manage.sh unit_tests
|
||||||
|
|
||||||
|
For further test options, please consult the help of the ``manage.sh`` script or
|
||||||
|
read :ref:`make test`.
|
||||||
|
|
||||||
|
|
||||||
|
How to compile styles and javascript
|
||||||
|
====================================
|
||||||
|
|
||||||
|
.. _less: http://lesscss.org/
|
||||||
|
.. _NodeJS: https://nodejs.org
|
||||||
|
|
||||||
|
How to build styles
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
Less_ is required to build the styles of searx. Less_ can be installed using
|
||||||
|
either NodeJS_ or Apt.
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
sudo -H apt-get install nodejs
|
||||||
|
sudo -H npm install -g less
|
||||||
|
|
||||||
|
OR
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
sudo -H apt-get install node-less
|
||||||
|
|
||||||
|
After satisfying the requirements styles can be build using ``manage.sh``
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
./manage.sh styles
|
||||||
|
|
||||||
|
|
||||||
|
How to build the source of the oscar theme
|
||||||
|
==========================================
|
||||||
|
|
||||||
|
.. _grunt: https://gruntjs.com/
|
||||||
|
|
||||||
|
Grunt_ must be installed in order to build the javascript sources. It depends on
|
||||||
|
NodeJS, so first Node has to be installed.
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
sudo -H apt-get install nodejs
|
||||||
|
sudo -H npm install -g grunt-cli
|
||||||
|
|
||||||
|
After installing grunt, the files can be built using the following command:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
./manage.sh grunt_build
|
||||||
|
|
||||||
|
|
||||||
|
Tips for debugging/development
|
||||||
|
==============================
|
||||||
|
|
||||||
|
.. sidebar:: :ref:`make run`
|
||||||
|
|
||||||
|
Makefile target ``run`` already enables debug option for your developer
|
||||||
|
session / see :ref:`make run`.
|
||||||
|
|
||||||
|
Turn on debug logging
|
||||||
|
Whether you are working on a new engine or trying to eliminate a bug, it is
|
||||||
|
always a good idea to turn on debug logging. When debug logging is enabled a
|
||||||
|
stack trace appears, instead of the cryptic ``Internal Server Error``
|
||||||
|
message. It can be turned on by setting ``debug: False`` to ``debug: True`` in
|
||||||
|
:origin:`settings.yml <searx/settings.yml>`.
|
||||||
|
|
||||||
|
.. sidebar:: :ref:`make test`
|
||||||
|
|
||||||
|
Alternatively use the :ref:`make test` targets.
|
||||||
|
|
||||||
|
Run ``./manage.sh tests`` before creating a PR.
|
||||||
|
Failing build on Travis is common because of PEP8 checks. So a new commit
|
||||||
|
must be created containing these format fixes. This phase can be skipped if
|
||||||
|
``./manage.sh tests`` is run locally before creating a PR.
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,120 @@
|
||||||
|
.. _search API:
|
||||||
|
|
||||||
|
==========
|
||||||
|
Search API
|
||||||
|
==========
|
||||||
|
|
||||||
|
The search supports both ``GET`` and ``POST``.
|
||||||
|
|
||||||
|
Furthermore, two enpoints ``/`` and ``/search`` are available for querying.
|
||||||
|
|
||||||
|
|
||||||
|
``GET /``
|
||||||
|
|
||||||
|
``GET /search``
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
==========
|
||||||
|
|
||||||
|
.. sidebar:: Further reading ..
|
||||||
|
|
||||||
|
- :ref:`engines-dev`
|
||||||
|
- :ref:`settings.yml`
|
||||||
|
- :ref:`engines generic`
|
||||||
|
|
||||||
|
``q`` : required
|
||||||
|
The search query. This string is passed to external search services. Thus,
|
||||||
|
searx supports syntax of each search service. For example, ``site:github.com
|
||||||
|
searx`` is a valid query for Google. However, if simply the query above is
|
||||||
|
passed to any search engine which does not filter its results based on this
|
||||||
|
syntax, you might not get the results you wanted.
|
||||||
|
|
||||||
|
See more at :ref:`search-syntax`
|
||||||
|
|
||||||
|
``categories`` : optional
|
||||||
|
Comma separated list, specifies the active search categories
|
||||||
|
|
||||||
|
``engines`` : optional
|
||||||
|
Comma separated list, specifies the active search engines.
|
||||||
|
|
||||||
|
``lang`` : default ``all``
|
||||||
|
Code of the language.
|
||||||
|
|
||||||
|
``pageno`` : default ``1``
|
||||||
|
Search page number.
|
||||||
|
|
||||||
|
``time_range`` : optional
|
||||||
|
[ ``day``, ``month``, ``year`` ]
|
||||||
|
|
||||||
|
Time range of search for engines which support it. See if an engine supports
|
||||||
|
time range search in the preferences page of an instance.
|
||||||
|
|
||||||
|
``format`` : optional
|
||||||
|
[ ``json``, ``csv``, ``rss`` ]
|
||||||
|
|
||||||
|
Output format of results.
|
||||||
|
|
||||||
|
``results_on_new_tab`` : default ``0``
|
||||||
|
[ ``0``, ``1`` ]
|
||||||
|
|
||||||
|
Open search results on new tab.
|
||||||
|
|
||||||
|
``image_proxy`` : default ``False``
|
||||||
|
[ ``True``, ``False`` ]
|
||||||
|
|
||||||
|
Proxy image results through searx.
|
||||||
|
|
||||||
|
``autocomplete`` : default *empty*
|
||||||
|
[ ``google``, ``dbpedia``, ``duckduckgo``, ``startpage``, ``wikipedia`` ]
|
||||||
|
|
||||||
|
Service which completes words as you type.
|
||||||
|
|
||||||
|
``safesearch`` : default ``None``
|
||||||
|
[ ``0``, ``1``, ``None`` ]
|
||||||
|
|
||||||
|
Filter search results of engines which support safe search. See if an engine
|
||||||
|
supports safe search in the preferences page of an instance.
|
||||||
|
|
||||||
|
``theme`` : default ``oscar``
|
||||||
|
[ ``oscar``, ``simple``, ``legacy``, ``pix-art``, ``courgette`` ]
|
||||||
|
|
||||||
|
Theme of instance.
|
||||||
|
|
||||||
|
Please note, available themes depend on an instance. It is possible that an
|
||||||
|
instance administrator deleted, created or renamed themes on his/her instance.
|
||||||
|
See the available options in the preferences page of the instance.
|
||||||
|
|
||||||
|
``oscar-style`` : default ``logicodev``
|
||||||
|
[ ``pointhi``, ``logicodev`` ]
|
||||||
|
|
||||||
|
Style of Oscar theme. It is only parsed if the theme of an instance is
|
||||||
|
``oscar``.
|
||||||
|
|
||||||
|
Please note, available styles depend on an instance. It is possible that an
|
||||||
|
instance administrator deleted, created or renamed styles on his/her
|
||||||
|
instance. See the available options in the preferences page of the instance.
|
||||||
|
|
||||||
|
``enabled_plugins`` : optional
|
||||||
|
List of enabled plugins.
|
||||||
|
|
||||||
|
:default: ``HTTPS_rewrite``, ``Self_Informations``,
|
||||||
|
``Search_on_category_select``, ``Tracker_URL_remover``
|
||||||
|
|
||||||
|
:values: [ ``DOAI_rewrite``, ``HTTPS_rewrite``, ``Infinite_scroll``,
|
||||||
|
``Vim-like_hotkeys``, ``Self_Informations``, ``Tracker_URL_remover``,
|
||||||
|
``Search_on_category_select`` ]
|
||||||
|
|
||||||
|
``disabled_plugins``: optional
|
||||||
|
List of disabled plugins.
|
||||||
|
|
||||||
|
:default: ``DOAI_rewrite``, ``Infinite_scroll``, ``Vim-like_hotkeys``
|
||||||
|
:values: ``DOAI_rewrite``, ``HTTPS_rewrite``, ``Infinite_scroll``,
|
||||||
|
``Vim-like_hotkeys``, ``Self_Informations``, ``Tracker_URL_remover``,
|
||||||
|
``Search_on_category_select``
|
||||||
|
|
||||||
|
``enabled_engines`` : optional : *all* :origin:`engines <searx/engines>`
|
||||||
|
List of enabled engines.
|
||||||
|
|
||||||
|
``disabled_engines`` : optional : *all* :origin:`engines <searx/engines>`
|
||||||
|
List of disabled engines.
|
||||||
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!-- originate: https://commons.wikimedia.org/wiki/File:Variable_Resistor.svg -->
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg"
|
||||||
|
version="1.1" baseProfile="full"
|
||||||
|
width="70px" height="40px" viewBox="0 0 700 400">
|
||||||
|
<line x1="0" y1="200" x2="700" y2="200" stroke="black" stroke-width="20px"/>
|
||||||
|
<rect x="100" y="100" width="500" height="200" fill="white" stroke="black" stroke-width="20px"/>
|
||||||
|
<line x1="180" y1="370" x2="500" y2="50" stroke="black" stroke-width="15px"/>
|
||||||
|
<polygon points="585 0 525 25 585 50" transform="rotate(135 525 25)"/>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 580 B |
|
@ -0,0 +1,71 @@
|
||||||
|
.. _translation:
|
||||||
|
|
||||||
|
===========
|
||||||
|
Translation
|
||||||
|
===========
|
||||||
|
|
||||||
|
.. _searx@transifex: https://www.transifex.com/asciimoo/searx/
|
||||||
|
|
||||||
|
Translation currently takes place on `searx@transifex`_
|
||||||
|
|
||||||
|
Requirements
|
||||||
|
============
|
||||||
|
|
||||||
|
* Transifex account
|
||||||
|
* Installed CLI tool of Transifex
|
||||||
|
|
||||||
|
Init Transifex project
|
||||||
|
======================
|
||||||
|
|
||||||
|
After installing ``transifex`` using pip, run the following command to
|
||||||
|
initialize the project.
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
tx init # Transifex instance: https://www.transifex.com/asciimoo/searx/
|
||||||
|
|
||||||
|
|
||||||
|
After ``$HOME/.transifexrc`` is created, get a Transifex API key and insert it
|
||||||
|
into the configuration file.
|
||||||
|
|
||||||
|
Create a configuration file for ``tx`` named ``$HOME/.tx/config``.
|
||||||
|
|
||||||
|
.. code:: ini
|
||||||
|
|
||||||
|
[main]
|
||||||
|
host = https://www.transifex.com
|
||||||
|
[searx.messagespo]
|
||||||
|
file_filter = searx/translations/<lang>/LC_MESSAGES/messages.po
|
||||||
|
source_file = messages.pot
|
||||||
|
source_lang = en
|
||||||
|
type = PO
|
||||||
|
|
||||||
|
|
||||||
|
Then run ``tx set``:
|
||||||
|
|
||||||
|
.. code:: shell
|
||||||
|
|
||||||
|
tx set --auto-local -r searx.messagespo 'searx/translations/<lang>/LC_MESSAGES/messages.po' \
|
||||||
|
--source-lang en --type PO --source-file messages.pot --execute
|
||||||
|
|
||||||
|
|
||||||
|
Update translations
|
||||||
|
===================
|
||||||
|
|
||||||
|
To retrieve the latest translations, pull it from Transifex.
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
tx pull -a
|
||||||
|
|
||||||
|
Then check the new languages. If strings translated are not enough, delete those
|
||||||
|
folders, because those should not be compiled. Call the command below to compile
|
||||||
|
the ``.po`` files.
|
||||||
|
|
||||||
|
.. code:: shell
|
||||||
|
|
||||||
|
pybabel compile -d searx/translations
|
||||||
|
|
||||||
|
|
||||||
|
After the compilation is finished commit the ``.po`` and ``.mo`` files and
|
||||||
|
create a PR.
|
|
@ -0,0 +1,32 @@
|
||||||
|
================
|
||||||
|
Welcome to searx
|
||||||
|
================
|
||||||
|
|
||||||
|
Search without being tracked.
|
||||||
|
|
||||||
|
.. sidebar:: Features
|
||||||
|
|
||||||
|
- Self hosted
|
||||||
|
- No user tracking
|
||||||
|
- No user profiling
|
||||||
|
- About 70 supported search engines
|
||||||
|
- Easy integration with any search engine
|
||||||
|
- Cookies are not used by default
|
||||||
|
- Secure, encrypted connections (HTTPS/SSL)
|
||||||
|
- Hosted by organizations, such as *La Quadrature du Net*, which promote
|
||||||
|
digital rights
|
||||||
|
|
||||||
|
Searx is a free internet metasearch engine which aggregates results from more
|
||||||
|
than 70 search services. Users are neither tracked nor profiled. Additionally,
|
||||||
|
searx can be used over Tor for online anonymity.
|
||||||
|
|
||||||
|
Get started with searx by using one of the :wiki:`Searx-instances`. If you
|
||||||
|
don't trust anyone, you can set up your own, see :ref:`installation`.
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
|
||||||
|
user/index
|
||||||
|
admin/index
|
||||||
|
dev/index
|
||||||
|
blog/index
|
Binary file not shown.
After Width: | Height: | Size: 6.3 KiB |
|
@ -0,0 +1,10 @@
|
||||||
|
==================
|
||||||
|
User documentation
|
||||||
|
==================
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
public_instances
|
||||||
|
search_syntax
|
||||||
|
own-instance
|
|
@ -0,0 +1,77 @@
|
||||||
|
===========================
|
||||||
|
Why use a private instance?
|
||||||
|
===========================
|
||||||
|
|
||||||
|
"Is it worth to run my own instance?" is a common question among searx users.
|
||||||
|
Before answering this question, see what options a searx user has.
|
||||||
|
|
||||||
|
Public instances are open to everyone who has access to its URL. Usually, these
|
||||||
|
are operated by unknown parties (from the users' point of view). Private
|
||||||
|
instances can be used by a select group of people. It is for example a searx of
|
||||||
|
group of friends or a company which can be accessed through VPN. Also it can be
|
||||||
|
single user one which runs on the user's laptop.
|
||||||
|
|
||||||
|
To gain more insight on how these instances work let's dive into how searx
|
||||||
|
protects its users.
|
||||||
|
|
||||||
|
How does searx protect privacy?
|
||||||
|
===============================
|
||||||
|
|
||||||
|
Searx protects the privacy of its users in multiple ways regardless of the type
|
||||||
|
of the instance (private, public). Removal of private data from search requests
|
||||||
|
comes in three forms:
|
||||||
|
|
||||||
|
1. removal of private data from requests going to search services
|
||||||
|
2. not forwarding anything from a third party services through search services
|
||||||
|
(e.g. advertisement)
|
||||||
|
3. removal of private data from requests going to the result pages
|
||||||
|
|
||||||
|
Removing private data means not sending cookies to external search engines and
|
||||||
|
generating a random browser profile for every request. Thus, it does not matter
|
||||||
|
if a public or private instance handles the request, because it is anonymized in
|
||||||
|
both cases. IP addresses will be the IP of the instance. But searx can be
|
||||||
|
configured to use proxy or Tor. `Result proxy
|
||||||
|
<https://github.com/asciimoo/morty>`__ is supported, too.
|
||||||
|
|
||||||
|
Searx does not serve ads or tracking content unlike most search services. So
|
||||||
|
private data is not forwarded to third parties who might monetize it. Besides
|
||||||
|
protecting users from search services, both referring page and search query are
|
||||||
|
hidden from visited result pages.
|
||||||
|
|
||||||
|
|
||||||
|
What are the consequences of using public instances?
|
||||||
|
----------------------------------------------------
|
||||||
|
|
||||||
|
If someone uses a public instance, he/she has to trust the administrator of that
|
||||||
|
instance. This means that the user of the public instance does not know whether
|
||||||
|
his/her requests are logged, aggregated and sent or sold to a third party.
|
||||||
|
|
||||||
|
Also, public instances without proper protection are more vulnerable to abusing
|
||||||
|
the search service, In this case the external service in exchange returns
|
||||||
|
CAPTCHAs or bans the IP of the instance. Thus, search requests return less
|
||||||
|
results.
|
||||||
|
|
||||||
|
I see. What about private instances?
|
||||||
|
------------------------------------
|
||||||
|
|
||||||
|
If users run their own instances, everything is in their control: the source
|
||||||
|
code, logging settings and private data. Unknown instance administrators do not
|
||||||
|
have to be trusted.
|
||||||
|
|
||||||
|
Furthermore, as the default settings of their instance is editable, there is no
|
||||||
|
need to use cookies to tailor searx to their needs. So preferences will not be
|
||||||
|
reset to defaults when clearing browser cookies. As settings are stored on
|
||||||
|
their computer, it will not be accessible to others as long as their computer is
|
||||||
|
not compromised.
|
||||||
|
|
||||||
|
Conclusion
|
||||||
|
==========
|
||||||
|
|
||||||
|
Always use an instance which is operated by people you trust. The privacy
|
||||||
|
features of searx are available to users no matter what kind of instance they
|
||||||
|
use.
|
||||||
|
|
||||||
|
If someone is on the go or just wants to try searx for the first time public
|
||||||
|
instances are the best choices. Additionally, public instance are making a
|
||||||
|
world a better place, because those who cannot or do not want to run an
|
||||||
|
instance, have access to a privacy respecting search service.
|
|
@ -0,0 +1,318 @@
|
||||||
|
.. _public instances:
|
||||||
|
|
||||||
|
..
|
||||||
|
links has been ported from markdown to reST by::
|
||||||
|
|
||||||
|
regexpr: \[([^\]]*)\]\(([^)]*)\)
|
||||||
|
substitution: `\1 <\2>`__
|
||||||
|
|
||||||
|
|
||||||
|
======================
|
||||||
|
Public Searx instances
|
||||||
|
======================
|
||||||
|
|
||||||
|
.. _mailing list: mailto:searx-instances@autistici.org
|
||||||
|
.. _subscription page: https://www.autistici.org/mailman/listinfo/searx-instances
|
||||||
|
|
||||||
|
|
||||||
|
Useful information
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Up-to-date health report available on https://stats.searx.xyz [1]_, for onion
|
||||||
|
(tor) services: https://stats.searx.xyz/tor.html
|
||||||
|
|
||||||
|
* Searx instances `mailing list`_ & `subscription page`_.
|
||||||
|
|
||||||
|
* Some of the Searx instances have a CAcert SSL certificate. You can install the
|
||||||
|
missing root cert `from here <http://www.cacert.org/index.php?id=3>`__.
|
||||||
|
|
||||||
|
* To add your own Searx instance to this page send us your PR. A GitHub account
|
||||||
|
is required to send PR or add an issue.
|
||||||
|
|
||||||
|
.. [1] Note that most of the instances with a A+ grade in CSP column in this
|
||||||
|
site are not fully functional - for example auto-completion may not work.
|
||||||
|
|
||||||
|
|
||||||
|
List of public Searx instances
|
||||||
|
==============================
|
||||||
|
|
||||||
|
Meta-searx instances
|
||||||
|
====================
|
||||||
|
|
||||||
|
These are websites that source from other searx instances. These are useful if
|
||||||
|
you can't decide which Searx instance to use:
|
||||||
|
|
||||||
|
|
||||||
|
.. flat-table:: Meta-searx instances
|
||||||
|
:header-rows: 1
|
||||||
|
:stub-columns: 0
|
||||||
|
:widths: 2 1 2 4 4
|
||||||
|
|
||||||
|
* - clearnet host
|
||||||
|
- onion host
|
||||||
|
- issuer
|
||||||
|
- source selection method
|
||||||
|
- extra privacy features
|
||||||
|
|
||||||
|
* - `Neocities <https://searx.neocities.org/>`__
|
||||||
|
- n/a
|
||||||
|
- Comodo (`Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.neocities.org>`__)
|
||||||
|
- Redirects users directly to a random selection of any known running
|
||||||
|
server after entering query. Requires
|
||||||
|
Javascript. `Changelog <https://searx.neocities.org/changelog.html>`__.
|
||||||
|
- Excludes servers with user tracking and analytics or are proxied through
|
||||||
|
Cloudflare.
|
||||||
|
|
||||||
|
* - `Searxes <https://searxes.danwin1210.me/>`__ @Danwin
|
||||||
|
- onion v3 `hidden service
|
||||||
|
<http://searxes.nmqnkngye4ct7bgss4bmv5ca3wpa55yugvxen5kz2bbq67lwy6ps54yd.onion/>`__
|
||||||
|
- Let's Encrypt (`Verification
|
||||||
|
<https://www.ssllabs.com/ssltest/analyze.html?d=searxes.danwin1210.me>`__)
|
||||||
|
- sources data from a randomly selected running server that satisfies
|
||||||
|
admin's quality standards which is used for post-processing
|
||||||
|
- filters out privacy-hostile websites (like CloudFlare) and either marks
|
||||||
|
them as such or folds them below the high ranking results.
|
||||||
|
|
||||||
|
|
||||||
|
Alive and running
|
||||||
|
=================
|
||||||
|
|
||||||
|
**BEFORE EDITING**: Please add your Searx instance by respecting the alphabetic order.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Public instances listed here may yield less accurate results as they have
|
||||||
|
much higher traffic and consequently have a higher chance of being blocked by
|
||||||
|
search providers such as Google, Qwant, Bing, Startpage, etc. Hosting your
|
||||||
|
own instance or using an instance that isn't listed here may give you a more
|
||||||
|
consistent search experience.
|
||||||
|
|
||||||
|
* `ai.deafpray.wtf/searx <https://ai.deafpray.wtf/searx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=ai.deafpray.wtf/searx>`__
|
||||||
|
* `bamboozle.it <https://bamboozle.it/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=bamboozle.it>`__
|
||||||
|
* `bee.jaekr.dev <https://bee.jaekr.dev>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=bee.jaekr.dev>`__
|
||||||
|
* `beezboo.com <https://beezboo.com/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=beezboo.com>`__
|
||||||
|
* `burtrum.org/searx <https://burtrum.org/searx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=burtrum.org/searx>`__
|
||||||
|
* `darmarit.cloud/searx <https://darmarit.cloud/searx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=darmarit.cloud/searx>`__
|
||||||
|
* `dc.ax <https://dc.ax>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=dc.ax>`__
|
||||||
|
* `dynabyte.ca <https://dynabyte.ca>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=dynabyte.ca>`__
|
||||||
|
* `goso.ga <https://goso.ga/search>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=goso.ga>`__
|
||||||
|
* `gruble.de <https://www.gruble.de/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=www.gruble.de>`__
|
||||||
|
* `haku.ahmia.fi <https://haku.ahmia.fi/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=haku.ahmia.fi&latest>`__
|
||||||
|
* `haku.lelux.fi <https://haku.lelux.fi/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=haku.lelux.fi>`__
|
||||||
|
* `huyo.me <https://huyo.me/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=huyo.me>`__
|
||||||
|
* `jsearch.pw <https://jsearch.pw>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=jsearch.pw>`__
|
||||||
|
* `le-dahut.com/searx <https://le-dahut.com/searx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=le-dahut.com/searx>`__
|
||||||
|
* `mijisou.com <https://mijisou.com/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=mijisou.com>`__
|
||||||
|
* `null.media <https://null.media>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=null.media>`__
|
||||||
|
* `openworlds.info <https://openworlds.info/>`__ - Issuer: Let's Encrypt
|
||||||
|
* `perfectpixel.de/searx/ <https://www.perfectpixel.de/searx/>`__ - Issuer: LetsEncrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=www.perfectpixel.de>`__
|
||||||
|
* `ransack.i2p <http://ransack.i2p/>`__ - I2P eepsite, only accessible with `I2P <https://geti2p.net/>`__ (`base32 address <http://mqamk4cfykdvhw5kjez2gnvse56gmnqxn7vkvvbuor4k4j2lbbnq.b32.i2p>`__)
|
||||||
|
* `rapu.nz <https://rapu.nz/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=rapu.nz>`__
|
||||||
|
* `roflcopter.fr <https://wtf.roflcopter.fr/searx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=wtf.roflcopter.fr>`__
|
||||||
|
* `roteserver.de/searx <https://roteserver.de/searx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=roteserver.de>`__
|
||||||
|
* `s.cmd.gg <https://s.cmd.gg>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=s.cmd.gg>`__
|
||||||
|
* `search.activemail.de <https://search.activemail.de/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.activemail.de&latest>`__
|
||||||
|
* `search.anonymize.com <https://search.anonymize.com/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.anonymize.com>`__
|
||||||
|
* `search.azkware.net <https://search.azkware.net/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.azkware.net>`__
|
||||||
|
* `search.biboumail.fr <https://search.biboumail.fr/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.biboumail.fr>`__
|
||||||
|
* `search.blankenberg.eu <https://search.blankenberg.eu>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.blankenberg.eu>`__
|
||||||
|
* `search.d4networks.com <https://search.d4networks.com/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.d4networks.com>`__
|
||||||
|
* `search.datensturm.net <https://search.datensturm.net>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.datensturm.net>`__
|
||||||
|
* `search.disroot.org <https://search.disroot.org/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.disroot.org>`__
|
||||||
|
* `search.ethibox.fr <https://search.ethibox.fr>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.ethibox.fr>`__
|
||||||
|
* `search.fossdaily.xyz <https://search.fossdaily.xyz>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.fossdaily.xyz>`__
|
||||||
|
* `search.galaxy.cat <https://search.galaxy.cat>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.galaxy.cat>`__
|
||||||
|
* `search.gibberfish.org <https://search.gibberfish.org/>`__ (as `Hidden Service <http://o2jdk5mdsijm2b7l.onion/>`__ or `Proxied through Tor <https://search.gibberfish.org/tor/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.gibberfish.org>`__
|
||||||
|
* `search.koehn.com <https://search.koehn.com>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.koehn.com>`__
|
||||||
|
* `search.lgbtq.cool <https://search.lgbtq.cool/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.lgbtq.cool>`__
|
||||||
|
* `search.mdosch.de <https://search.mdosch.de/>`__ (as `Hidden Service <http://search.4bkxscubgtxwvhpe.onion/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.mdosch.de>`__
|
||||||
|
* `search.modalogi.com <https://search.modalogi.com/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.modalogi.com&latest>`__
|
||||||
|
* `search.moravit.com <https://search.moravit.com>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.moravit.com>`__
|
||||||
|
* `search.nebulacentre.net <https://search.nebulacentre.net>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.nebulacentre.net>`__
|
||||||
|
* `search.paulla.asso.fr <https://search.paulla.asso.fr/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.paulla.asso.fr>`__
|
||||||
|
* `search.pifferi.info <https://search.pifferi.info/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.pifferi.info&latest>`__
|
||||||
|
* `search.poal.co <https://search.poal.co/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.poal.co>`__
|
||||||
|
* `search.privacytools.io <https://search.privacytools.io/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.privacytools.io>`__ - Uses Matomo for user tracking and analytics
|
||||||
|
* `search.seds.nl <https://search.seds.nl/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.seds.nl&latest>`__
|
||||||
|
* `search.snopyta.org <https://search.snopyta.org/>`__ (as `Hidden Service <http://juy4e6eicawzdrz7.onion/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.snopyta.org>`__
|
||||||
|
* `search.spaeth.me <https://search.spaeth.me/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.spaeth.me&latest>`__
|
||||||
|
* `search.st8.at <https://search.st8.at/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.st8.at>`__
|
||||||
|
* `search.stinpriza.org <https://search.stinpriza.org>`__ (as `Hidden Service <http://z5vawdol25vrmorm4yydmohsd4u6rdoj2sylvoi3e3nqvxkvpqul7bqd.onion/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.stinpriza.org&hideResults=on>`__
|
||||||
|
* `search.sudo-i.net <https://search.sudo-i.net/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.sudo-i.net>`__
|
||||||
|
* `search.tolstoevsky.ml <https://search.tolstoevsky.ml>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.tolstoevsky.ml>`__
|
||||||
|
* `searchsin.com/searx <https://searchsin.com/searx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searchsin.com/searx>`__
|
||||||
|
* `searx.anongoth.pl <https://searx.anongoth.pl>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.anongoth.pl&latest>`__
|
||||||
|
* `searx.be <https://searx.be>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.be>`__
|
||||||
|
* `searx.ca <https://searx.ca/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.ca>`__
|
||||||
|
* `searx.canox.net <https://searx.canox.net/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.canox.net>`__
|
||||||
|
* `searx.cybt.de <https://searx.cybt.de/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.cybt.de>`__
|
||||||
|
* `searx.de <https://www.searx.de/>`__ - Issuer: COMODO `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.de>`__
|
||||||
|
* `searx.decatec.de <https://searx.decatec.de>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.decatec.de>`__
|
||||||
|
* `searx.devol.it <https://searx.devol.it/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=sears.devol.it>`__
|
||||||
|
* `searx.dnswarden.com <https://searx.dnswarden.com>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.dnswarden.com>`__
|
||||||
|
* `searx.drakonix.net <https://searx.drakonix.net/>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.drakonix.net>`__
|
||||||
|
* `searx.dresden.network <https://searx.dresden.network/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.dresden.network>`__
|
||||||
|
* `searx.elukerio.org <https://searx.elukerio.org/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.elukerio.org/>`__
|
||||||
|
* `searx.everdot.org <https://searx.everdot.org/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.everdot.org/>`__ - Crawls using YaCy
|
||||||
|
* `searx.foo.li <https://searx.foo.li>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.foo.li&hideResults=on>`__
|
||||||
|
* `searx.fossencdi.org <https://searx.fossencdi.org>`__ (as `Hidden Service <http://searx.cwuzdtzlubq5uual.onion/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.fossencdi.org>`__
|
||||||
|
* `searx.fr32k.de <https://searx.fr32k.de/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.fr32k.de>`__
|
||||||
|
* `searx.good.one.pl <https://searx.good.one.pl>`__ (as `Hidden Service <http://searxl7u2y6gvonm.onion/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.good.one.pl>`__
|
||||||
|
* `searx.gotrust.de <https://searx.gotrust.de/>`__ (as `Hidden Service <http://nxhhwbbxc4khvvlw.onion/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.gotrust.de>`__
|
||||||
|
* `searx.hardwired.link <https://searx.hardwired.link/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.hardwired.link>`__
|
||||||
|
* `searx.hlfh.space <https://searx.hlfh.space>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.hlfh.space>`__
|
||||||
|
* `searx.info <https://searx.info>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.info>`__
|
||||||
|
* `searx.itunix.eu <https://searx.itunix.eu/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.itunix.eu>`__
|
||||||
|
* `searx.john-at-me.net <https://searx.john-at-me.net/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.john-at-me.net>`__
|
||||||
|
* `searx.kvch.me <https://searx.kvch.me>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.kvch.me>`__
|
||||||
|
* `searx.laquadrature.net <https://searx.laquadrature.net>`__ (as `Hidden Service <http://searchb5a7tmimez.onion/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.laquadrature.net>`__
|
||||||
|
* `searx.lelux.fi <https://searx.lelux.fi/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=haku.lelux.fi>`__
|
||||||
|
* `searx.lhorn.de <https://searx.lhorn.de/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.lhorn.de&latest>`__
|
||||||
|
* `searx.li <https://searx.li/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.li>`__
|
||||||
|
* `searx.libmail.eu <https://searx.libmail.eu/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.libmail.eu/>`__
|
||||||
|
* `searx.linux.pizza <https://searx.linux.pizza>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.linux.pizza>`__
|
||||||
|
* `searx.lynnesbian.space <https://searx.lynnesbian.space/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.lynnesbian.space>`__
|
||||||
|
* `searx.mastodontech.de <https://searx.mastodontech.de/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.mastodontech.de>`__
|
||||||
|
* `searx.me <https://searx.me>`__ (as `Hidden Service <http://ulrn6sryqaifefld.onion/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.me>`__
|
||||||
|
* `searx.mxchange.org <https://searx.mxchange.org/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.mxchange.org>`__
|
||||||
|
* `searx.nakhan.net <https://searx.nakhan.net>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.nakhan.net>`__
|
||||||
|
* `searx.nixnet.xyz <https://searx.nixnet.xyz>`__ (as `Hidden Service <http://searx.l4qlywnpwqsluw65ts7md3khrivpirse744un3x7mlskqauz5pyuzgqd.onion/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.nixnet.xyz>`__
|
||||||
|
* `searx.nnto.net <https://searx.nnto.net/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.nnto.net>`__
|
||||||
|
* `searx.openhoofd.nl <https://searx.openhoofd.nl/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=openhoofd.nl>`__
|
||||||
|
* `searx.openpandora.org <https://searx.openpandora.org/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.openpandora.org&latest>`__
|
||||||
|
* `searx.operationtulip.com <https://searx.operationtulip.com/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.operationtulip.com>`__
|
||||||
|
* `searx.orcadian.net <https://searx.orcadian.net/>`__ - Issuer: Comodo CA Limited `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.orcadian.net>`__
|
||||||
|
* `searx.ouahpit.info <https://searx.ouahpiti.info/>`__ - Issuer: Let's Encrypt
|
||||||
|
* `searx.pofilo.fr <https://searx.pofilo.fr/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.pofilo.fr>`__
|
||||||
|
* `searx.prvcy.eu <https://searx.prvcy.eu/>`__ (as `Hidden Service <http://hmfztxt3pfhevucl.onion/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.prvcy.eu>`__
|
||||||
|
* `searx.pwoss.org <https://searx.pwoss.org>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.pwoss.org>`__
|
||||||
|
* `searx.ro <https://searx.ro/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.ro>`__
|
||||||
|
* `searx.ru <https://searx.ru/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.ru>`__
|
||||||
|
* `searx.solusar.de <https://searx.solusar.de/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.solusar.de>`__
|
||||||
|
* `searx.targaryen.house <https://searx.targaryen.house/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.targaryen.house>`__
|
||||||
|
* `searx.tuxcloud.net <https://searx.tuxcloud.net>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.tuxcloud.net>`__
|
||||||
|
* `searx.tyil.nl <https://searx.tyil.nl>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.tyil.nl>`__
|
||||||
|
* `searx.wegeeks.win <https://searx.wegeeks.win>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.wegeeks.win>`__
|
||||||
|
* `searx.win <https://searx.win/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.win&latest>`__
|
||||||
|
* `searx.xyz <https://searx.xyz/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.xyz&latest>`__
|
||||||
|
* `searx.zareldyn.net <https://searx.zareldyn.net/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.zareldyn.net>`__
|
||||||
|
* `searx.zdechov.net <https://searx.zdechov.net>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.zdechov.net>`__
|
||||||
|
* `searxs.eu <https://www.searxs.eu>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=www.searxs.eu&hideResults=on>`__
|
||||||
|
* `seeks.hsbp.org <https://seeks.hsbp.org/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=seeks.hsbp.org>`__ - `PGP signed fingerprints of cert <https://seeks.hsbp.org/cert>`__
|
||||||
|
* `skyn3t.in/srx <https://skyn3t.in/srx/>`__ - Issuer: Let's Encrypt | onion `hidden service <http://skyn3tb3bas655mw.onion/srx/>`__
|
||||||
|
* `spot.ecloud.global <https://spot.ecloud.global/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=spot.ecloud.global>`__
|
||||||
|
* `srx.sx <https://srx.sx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=srx.sx>`__
|
||||||
|
* `stemy.me/searx <https://stemy.me/searx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=stemy.me>`__
|
||||||
|
* `suche.dasnetzundich.de <https://suche.dasnetzundich.de>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=suche.dasnetzundich.de>`__
|
||||||
|
* `suche.elaon.de <https://suche.elaon.de>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=suche.elaon.de>`__
|
||||||
|
* `suche.xyzco456vwisukfg.onion <http://suche.xyzco456vwisukfg.onion/>`__
|
||||||
|
* `suche.uferwerk.org <https://suche.uferwerk.org>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=suche.uferwerk.org>`__
|
||||||
|
* `timdor.noip.me/searx <https://timdor.noip.me/searx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=timdor.noip.me/searx>`__
|
||||||
|
* `trovu.komun.org <https://trovu.komun.org>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=trovu.komun.org>`__
|
||||||
|
* `unmonito.red <https://unmonito.red/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=unmonito.red>`__
|
||||||
|
* `www.finden.tk <https://www.finden.tk/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=www.finden.tk>`__
|
||||||
|
* `zoek.anchel.nl <https://zoek.anchel.nl/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=zoek.anchel.nl>`__
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Running in exclusive private walled-gardens
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
These instances run in walled-gardens that exclude some segment of the general
|
||||||
|
public (e.g. Tor users and users sharing IPs with many other users). Caution:
|
||||||
|
privacy is also compromised on these sites due to exposure of cleartext traffic
|
||||||
|
to a third party other than the website operator.
|
||||||
|
|
||||||
|
* `intelme.com <https://intelme.com>`__ - Issuer: Cloudflare `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=intelme.com>`__
|
||||||
|
* `search404.io <https://www.search404.io/>`__ - Issuer: Cloudflare `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search404.io>`__
|
||||||
|
* `searx.com.au <https://searx.com.au/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.com.au>`__
|
||||||
|
* `searx.lavatech.top <https://searx.lavatech.top/>`__ - Issuer: Cloudflare `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.lavatech.top>`__
|
||||||
|
* `searchx.mobi <https://searchx.mobi/>`__ - Issuer: Cloudflare `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searchx.mobi>`__
|
||||||
|
* `searx.org <https://searx.org/>`__ - Issuer: Cloudflare `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.org>`__
|
||||||
|
* `searx.run <https://searx.run/>`__ - Issuer: Cloudflare `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.run>`__
|
||||||
|
* `searx.world <https://searx.world>`__ - Issuer: Cloudflare `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.world>`__ - Adds Amazon affiliate links
|
||||||
|
|
||||||
|
|
||||||
|
Running with an incorrect SSL certificate
|
||||||
|
=========================================
|
||||||
|
|
||||||
|
* `listi.me <https://listi.me/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=listi.me&latest>`__
|
||||||
|
* `s.matejc.com <https://s.matejc.com/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=s.matejc.com>`__
|
||||||
|
* `search.jollausers.de <https://search.jollausers.de>`__ - Incorrectly configured `SSL certificate <https://www.ssllabs.com/ssltest/analyze.html?d=search.jollausers.de>`__
|
||||||
|
* `search.paviro.de <https://search.paviro.de>`__ - Issuer: LetsEncrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.paviro.de>`__
|
||||||
|
* `searx.abenthung.it <https://searx.abenthung.it/>`__ - Issuer: Comodo CA Limited `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.abenthung.it>`__
|
||||||
|
* `searx.coding4schoki.org <https://searx.coding4schoki.org/>`__ - Incorrectly configured `SSL Certificate <https://www.ssllabs.com/ssltest/analyze.html?d=searx.coding4schoki.org>`__
|
||||||
|
* `searx.haxors.club <https://searx.haxors.club/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.haxors.club>`__
|
||||||
|
* `searx.nulltime.net <https://searx.nulltime.net/>`__ (as `Hidden Service <http://searx7gwtu5rh6wr.onion>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.nulltime.net>`__
|
||||||
|
* `searx.ch <https://searx.ch/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.ch>`__ (cert clock problems)
|
||||||
|
|
||||||
|
|
||||||
|
Offline
|
||||||
|
=======
|
||||||
|
|
||||||
|
* `a.searx.space <https://a.searx.space>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=a.searx.space>`__ (unstable, under construction).
|
||||||
|
* `anyonething.de <https://anyonething.de>`__ - (was found to have become a pastebin on or before 2019-03-01) Issuer: Comodo CA Limited (Warning: uses Cloudflare) `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=anyonething.de>`__
|
||||||
|
* `h7jwxg5rakyfvikpi.onion <http://7jwxg5rakyfvikpi.onion/>`__ - available only as Tor Hidden Service (down on 2019-06-26)
|
||||||
|
* `hacktivis.me/searx <https://hacktivis.me/searx>`__ - (down) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=hacktivis.me/searx>`__
|
||||||
|
* `icebal.com <https://icebal.com>`__ - (down) Issuer: Let's Encrypt
|
||||||
|
* `netrangler.host <https://netrangler.host>`__ - (down) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=netrangler.host>`__
|
||||||
|
* `opengo.nl <https://www.opengo.nl>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=www.opengo.nl>`__
|
||||||
|
* `p9e.de <https://p9e.de/>`__ - (down - timeout) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=p9e.de>`__
|
||||||
|
* `rubri.co <https://rubri.co>`__ - (down) Issuer: Let's Encrypt
|
||||||
|
* `s.bacafe.xyz <https://s.bacafe.xyz/>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=s.bacafe.xyz&latest>`__
|
||||||
|
* `search.alecpap.com <https://search.alecpap.com/>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.alecpap.com>`__
|
||||||
|
* `search.blackit.de <https://search.blackit.de/>`__ - (down) Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.blackit.de>`__
|
||||||
|
* `search.deblan.org <https://search.deblan.org/>`__ (down) - Issuer: COMODO via GANDI `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.deblan.org>`__
|
||||||
|
* `search.homecomputing.fr <https://search.homecomputing.fr/>`__ - (down) Issuer: CAcert `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.homecomputing.fr>`__
|
||||||
|
* `search.jpope.org <https://search.jpope.org>`__ - (down - timeout) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.jpope.org>`__
|
||||||
|
* `search.kakise.xyz <https://search.kakise.xyz/>`__ - down
|
||||||
|
* `search.kosebamse.com <https://search.kosebamse.com>`__ - Issuer: LetsEncrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.kosebamse.com>`__
|
||||||
|
* `search.kujiu.org <https://search.kujiu.org>`__ - (down) Issuer: Let's Encrypt
|
||||||
|
* `search.mailaender.coffee <https://search.mailaender.coffee/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.mailaender.coffee>`__
|
||||||
|
* `search.matrix.ac <https://search.matrix.ac>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=matrix.ac>`__
|
||||||
|
* `search.mypsc.ca <https://search.mypsc.ca/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.mypsc.ca>`__
|
||||||
|
* `search.namedkitten.pw <https://search.namedkitten.pw>`__ - (SSL error) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.namedkitten.pw>`__
|
||||||
|
* `search.opentunisia.org <https://search.opentunisia.org>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.opentunisia.org>`__
|
||||||
|
* `search.r3d007.com <https://search.r3d007.com/>`__ - (down) Issuer: Let's Encrypt
|
||||||
|
* `search.static.lu <https://search.static.lu/>`__ - (down) Issuer: StartCom `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.static.lu>`__
|
||||||
|
* `search.teej.xyz <https://search.teej.xyz>`__ - (down) Issuer: LetsEncrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.teej.xyz>`__
|
||||||
|
* `search.wxzm.sx <https://search.wxzm.sx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.wxzm.sx>`__
|
||||||
|
* `searx.4ray.co <https://searx.4ray.co/>`__ - (no longer an instance, redirects to main page) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.4ray.co>`__
|
||||||
|
* `searx.32bitflo.at <https://searx.32bitflo.at/>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.32bitflo.at>`__
|
||||||
|
* `searx.ahh.si <https://searx.ahh.si/>`__ - (down) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.ahh.si>`__
|
||||||
|
* `searx.angristan.xyz <https://searx.angristan.xyz/>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.angristan.xyz>`__
|
||||||
|
* `searx.antirep.net <https://searx.antirep.net/>`__ - (return a 502 HTTP error) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.antirep.net>`__
|
||||||
|
* `searx.aquilenet.fr <https://searx.aquilenet.fr/>`__ - (down - 429 HTTP error) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.aquilenet.fr>`__
|
||||||
|
* `searx.at <https://searx.at/>`__ - (return "request exception" at every search) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.at>`__
|
||||||
|
* `searx.cc <https://searx.cc/>`__ - (down on 2019-06-26) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.cc>`__
|
||||||
|
* `searx.dk <https://searx.dk/>`__ - (down - 429 HTTP error) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.dk>`__
|
||||||
|
* `searx.ehrmanns.ch <https://searx.ehrmanns.ch>`__ - (down) Issuer: Let's Encrypt
|
||||||
|
* `searx.glibre.net <https://searx.glibre.net>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.glibre.net>`__
|
||||||
|
* `searx.infini.fr <https://searx.infini.fr>`__ - (return a page stating that the website is not installed) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.infini.fr>`__
|
||||||
|
* `searx.jeanphilippemorvan.info <https://searx.jeanphilippemorvan.info/>`__ - (down) Issuer: StartCom `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.jeanphilippemorvan.info>`__
|
||||||
|
* `searx.lhorn.de <https://searx.lhorn.de/>`__ - (redirect the Searx's github repository page) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.lhorn.de&latest>`__ (only reachable from european countries)
|
||||||
|
* `searx.lvweb.host <https://searx.lvweb.host>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.lvweb.host>`__
|
||||||
|
* `searx.mrtino.eu <https://searx.mrtino.eu>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.mrtino.eu>`__
|
||||||
|
* `searx.netzspielplatz.de <https://searx.netzspielplatz.de/>`__ - (error page about GDPR even when browsing it from USA and Asia) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.netzspielplatz.de>`__
|
||||||
|
* `searx.new-admin.net <https://searx.new-admin.net>`__ - (down) Issuer: Let's Encrypt
|
||||||
|
* `searx.nogafa.org <https://searx.nogafa.org/>`__ - (broken CSS) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.nogafa.org>`__
|
||||||
|
* `searx.potato.hu <https://searx.potato.hu>`__ - (not a searx instance) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.potato.hu>`__
|
||||||
|
* `searx.rubbeldiekatz.info <https://searx.rubbeldiekatz.info/>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.rubbeldiekatz.info/>`__
|
||||||
|
* `searx.s42.space <https://searx.s42.space>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.s42.space>`__
|
||||||
|
* `searx.salcay.hu <https://searx.salcay.hu/>`__ - (down - blank page) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.salcay.hu>`__
|
||||||
|
* `searx.selea.se <https://searx.selea.se>`__ - (Leads to default Apache page) Issuer: RapidSSL (HSTS preloaded, DNSSEC) `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.selea.se>`__ | `HSTS Preload <https://hstspreload.org/?domain=searx.selea.se>`__
|
||||||
|
* `searx.steinscraft.net <https://searx.steinscraft.net/>`__ - (down) Issuer: Cloudflare
|
||||||
|
* `searx.techregion.de <https://searx.techregion.de/>`__ - (domain expired) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.techregion.de>`__
|
||||||
|
* `searx.tognella.com <https://searx.tognella.com/>`__ - (down) Issuer: Cloudflare
|
||||||
|
* `searx.xi.ht <https://searx.xi.ht/>`__ - (return a 502 HTTP error) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.xi.ht>`__
|
||||||
|
* `searxist.com <https://searxist.com/>`__ - (down) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searxist.com>`__
|
||||||
|
* `so.sb <https://so.sb/>`__ - (down) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=so.sb>`__
|
||||||
|
* `srx.stdout.net <https://srx.stdout.net/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=srx.stdout.net>`__
|
||||||
|
* `w6f7cgdm54cyvohcuhraaafhajctyj3ihenrovuxogoagrr5g43qmoid.onion <http://w6f7cgdm54cyvohcuhraaafhajctyj3ihenrovuxogoagrr5g43qmoid.onion/>`__ - Hidden Service
|
||||||
|
* `win8linux.nohost.me <https://win8linux.nohost.me/searx/>`__ - (down) Issuer: Let's Encrypt
|
||||||
|
* `wiznet.tech <https://wiznet.tech>`__ - (down) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=wiznet.tech>`__
|
||||||
|
* `www.mercurius.space <https://www.mercurius.space/>`__ - (down) Issuer: Let's Encrypt
|
||||||
|
* `www.ready.pm <https://www.ready.pm>`__ - Issuer: WoSign `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=www.ready.pm>`__
|
||||||
|
* `z.awsmppl.com <https://z.awsmppl.com>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=z.awsmppl.com>`__
|
||||||
|
* `zlsdzh.tk <https://zlsdzh.tk>`__ - (down - 404 HTTP error) Issuer: TrustAsia Technologies, Inc. `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=zlsdzh.tk>`__ *
|
||||||
|
|
|
@ -0,0 +1,42 @@
|
||||||
|
|
||||||
|
.. _search-syntax:
|
||||||
|
|
||||||
|
=============
|
||||||
|
Search syntax
|
||||||
|
=============
|
||||||
|
|
||||||
|
Searx allows you to modify the default categories, engines and search language
|
||||||
|
via the search query.
|
||||||
|
|
||||||
|
Prefix ``!``
|
||||||
|
to set Category/engine
|
||||||
|
|
||||||
|
Prefix: ``:``
|
||||||
|
to set language
|
||||||
|
|
||||||
|
Prefix: ``?``
|
||||||
|
to add engines and categories to the currently selected categories
|
||||||
|
|
||||||
|
Abbrevations of the engines and languages are also accepted. Engine/category
|
||||||
|
modifiers are chainable and inclusive (e.g. with :search:`!it !ddg !wp qwer
|
||||||
|
<?q=%21it%20%21ddg%20%21wp%20qwer>` search in IT category **and** duckduckgo
|
||||||
|
**and** wikipedia for ``qwer``).
|
||||||
|
|
||||||
|
See the :search:`/preferences page <preferences>` for the list of engines,
|
||||||
|
categories and languages.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
========
|
||||||
|
|
||||||
|
Search in wikipedia for ``qwer``:
|
||||||
|
|
||||||
|
- :search:`!wp qwer <?q=%21wp%20qwer>` or
|
||||||
|
- :search:`!wikipedia qwer :search:<?q=%21wikipedia%20qwer>`
|
||||||
|
|
||||||
|
Image search:
|
||||||
|
|
||||||
|
- :search:`!images Cthulhu <?q=%21images%20Cthulhu>`
|
||||||
|
|
||||||
|
Custom language in wikipedia:
|
||||||
|
|
||||||
|
- :search:`:hu !wp hackerspace <?q=%3Ahu%20%21wp%20hackerspace>`
|
10
manage.sh
10
manage.sh
|
@ -18,12 +18,12 @@ ACTION="$1"
|
||||||
update_packages() {
|
update_packages() {
|
||||||
pip install --upgrade pip
|
pip install --upgrade pip
|
||||||
pip install --upgrade setuptools
|
pip install --upgrade setuptools
|
||||||
pip install -r "$BASE_DIR/requirements.txt"
|
pip install -Ur "$BASE_DIR/requirements.txt"
|
||||||
}
|
}
|
||||||
|
|
||||||
update_dev_packages() {
|
update_dev_packages() {
|
||||||
update_packages
|
update_packages
|
||||||
pip install -r "$BASE_DIR/requirements-dev.txt"
|
pip install -Ur "$BASE_DIR/requirements-dev.txt"
|
||||||
}
|
}
|
||||||
|
|
||||||
install_geckodriver() {
|
install_geckodriver() {
|
||||||
|
@ -70,6 +70,11 @@ locales() {
|
||||||
pybabel compile -d "$SEARX_DIR/translations"
|
pybabel compile -d "$SEARX_DIR/translations"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
update_useragents() {
|
||||||
|
echo '[!] Updating user agent versions'
|
||||||
|
python utils/fetch_firefox_version.py
|
||||||
|
}
|
||||||
|
|
||||||
pep8_check() {
|
pep8_check() {
|
||||||
echo '[!] Running pep8 check'
|
echo '[!] Running pep8 check'
|
||||||
# ignored rules:
|
# ignored rules:
|
||||||
|
@ -246,6 +251,7 @@ Commands
|
||||||
update_dev_packages - Check & update development and production dependency changes
|
update_dev_packages - Check & update development and production dependency changes
|
||||||
install_geckodriver - Download & install geckodriver if not already installed (required for robot_tests)
|
install_geckodriver - Download & install geckodriver if not already installed (required for robot_tests)
|
||||||
npm_packages - Download & install npm dependencies
|
npm_packages - Download & install npm dependencies
|
||||||
|
update_useragents - Update useragents.json with the most recent versions of Firefox
|
||||||
|
|
||||||
Build
|
Build
|
||||||
-----
|
-----
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
pallets-sphinx-themes
|
||||||
|
Sphinx
|
||||||
|
sphinx-issues
|
||||||
mock==2.0.0
|
mock==2.0.0
|
||||||
nose2[coverage_plugin]
|
nose2[coverage_plugin]
|
||||||
cov-core==1.15.0
|
cov-core==1.15.0
|
||||||
|
@ -8,3 +11,6 @@ transifex-client==0.12.2
|
||||||
unittest2==1.1.0
|
unittest2==1.1.0
|
||||||
zope.testrunner==4.5.1
|
zope.testrunner==4.5.1
|
||||||
selenium==3.141.0
|
selenium==3.141.0
|
||||||
|
linuxdoc @ git+http://github.com/return42/linuxdoc.git
|
||||||
|
sphinx-jinja
|
||||||
|
sphinx-tabs
|
||||||
|
|
|
@ -1,14 +1,15 @@
|
||||||
{
|
{
|
||||||
"ua": "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}",
|
|
||||||
"versions": [
|
"versions": [
|
||||||
"61.0.1",
|
"70.0.1",
|
||||||
"61.0",
|
"70.0",
|
||||||
"60.0.2",
|
"69.0.3",
|
||||||
"60.0.1",
|
"69.0.2",
|
||||||
"60.0"
|
"69.0.1",
|
||||||
|
"69.0"
|
||||||
],
|
],
|
||||||
"os": [
|
"os": [
|
||||||
"Windows NT 10; WOW64",
|
"Windows NT 10; WOW64",
|
||||||
"X11; Linux x86_64"
|
"X11; Linux x86_64"
|
||||||
]
|
],
|
||||||
|
"ua": "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}"
|
||||||
}
|
}
|
|
@ -89,8 +89,7 @@ def response(resp):
|
||||||
'content': content})
|
'content': content})
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result_len_container = "".join(eval_xpath(dom, '//span[@class="sb_count"]/text()'))
|
result_len_container = "".join(eval_xpath(dom, '//span[@class="sb_count"]//text()'))
|
||||||
result_len_container = utils.to_string(result_len_container)
|
|
||||||
if "-" in result_len_container:
|
if "-" in result_len_container:
|
||||||
# Remove the part "from-to" for paginated request ...
|
# Remove the part "from-to" for paginated request ...
|
||||||
result_len_container = result_len_container[result_len_container.find("-") * 2 + 2:]
|
result_len_container = result_len_container[result_len_container.find("-") * 2 + 2:]
|
||||||
|
@ -102,7 +101,7 @@ def response(resp):
|
||||||
logger.debug('result error :\n%s', e)
|
logger.debug('result error :\n%s', e)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if _get_offset_from_pageno(resp.search_params.get("pageno", 0)) > result_len:
|
if result_len and _get_offset_from_pageno(resp.search_params.get("pageno", 0)) > result_len:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
results.append({'number_of_results': result_len})
|
results.append({'number_of_results': result_len})
|
||||||
|
|
|
@ -109,14 +109,22 @@ def response(resp):
|
||||||
else:
|
else:
|
||||||
url = build_flickr_url(photo['ownerNsid'], photo['id'])
|
url = build_flickr_url(photo['ownerNsid'], photo['id'])
|
||||||
|
|
||||||
results.append({'url': url,
|
result = {
|
||||||
'title': title,
|
'url': url,
|
||||||
'img_src': img_src,
|
'img_src': img_src,
|
||||||
'thumbnail_src': thumbnail_src,
|
'thumbnail_src': thumbnail_src,
|
||||||
'content': content,
|
'source': source,
|
||||||
'author': author,
|
'img_format': img_format,
|
||||||
'source': source,
|
'template': 'images.html'
|
||||||
'img_format': img_format,
|
}
|
||||||
'template': 'images.html'})
|
try:
|
||||||
|
result['author'] = author
|
||||||
|
result['title'] = title
|
||||||
|
result['content'] = content
|
||||||
|
except:
|
||||||
|
result['author'] = ''
|
||||||
|
result['title'] = ''
|
||||||
|
result['content'] = ''
|
||||||
|
results.append(result)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
|
@ -14,6 +14,7 @@ import random
|
||||||
from json import loads
|
from json import loads
|
||||||
from time import time
|
from time import time
|
||||||
from lxml.html import fromstring
|
from lxml.html import fromstring
|
||||||
|
from searx.poolrequests import get
|
||||||
from searx.url_utils import urlencode
|
from searx.url_utils import urlencode
|
||||||
from searx.utils import eval_xpath
|
from searx.utils import eval_xpath
|
||||||
|
|
||||||
|
@ -31,13 +32,9 @@ search_string = 'search?{query}'\
|
||||||
'&c=main'\
|
'&c=main'\
|
||||||
'&s={offset}'\
|
'&s={offset}'\
|
||||||
'&format=json'\
|
'&format=json'\
|
||||||
'&qh=0'\
|
'&langcountry={lang}'\
|
||||||
'&qlang={lang}'\
|
|
||||||
'&ff={safesearch}'\
|
'&ff={safesearch}'\
|
||||||
'&rxiec={rxieu}'\
|
'&rand={rxikd}'
|
||||||
'&ulse={ulse}'\
|
|
||||||
'&rand={rxikd}'\
|
|
||||||
'&dbez={dbez}'
|
|
||||||
# specific xpath variables
|
# specific xpath variables
|
||||||
results_xpath = '//response//result'
|
results_xpath = '//response//result'
|
||||||
url_xpath = './/url'
|
url_xpath = './/url'
|
||||||
|
@ -46,9 +43,26 @@ content_xpath = './/sum'
|
||||||
|
|
||||||
supported_languages_url = 'https://gigablast.com/search?&rxikd=1'
|
supported_languages_url = 'https://gigablast.com/search?&rxikd=1'
|
||||||
|
|
||||||
|
extra_param = '' # gigablast requires a random extra parameter
|
||||||
|
# which can be extracted from the source code of the search page
|
||||||
|
|
||||||
|
|
||||||
|
def parse_extra_param(text):
|
||||||
|
global extra_param
|
||||||
|
param_lines = [x for x in text.splitlines() if x.startswith('var url=') or x.startswith('url=url+')]
|
||||||
|
extra_param = ''
|
||||||
|
for l in param_lines:
|
||||||
|
extra_param += l.split("'")[1]
|
||||||
|
extra_param = extra_param.split('&')[-1]
|
||||||
|
|
||||||
|
|
||||||
|
def init(engine_settings=None):
|
||||||
|
parse_extra_param(get('http://gigablast.com/search?c=main&qlangcountry=en-us&q=south&s=10').text)
|
||||||
|
|
||||||
|
|
||||||
# do search-request
|
# do search-request
|
||||||
def request(query, params):
|
def request(query, params):
|
||||||
|
print("EXTRAPARAM:", extra_param)
|
||||||
offset = (params['pageno'] - 1) * number_of_results
|
offset = (params['pageno'] - 1) * number_of_results
|
||||||
|
|
||||||
if params['language'] == 'all':
|
if params['language'] == 'all':
|
||||||
|
@ -67,14 +81,11 @@ def request(query, params):
|
||||||
search_path = search_string.format(query=urlencode({'q': query}),
|
search_path = search_string.format(query=urlencode({'q': query}),
|
||||||
offset=offset,
|
offset=offset,
|
||||||
number_of_results=number_of_results,
|
number_of_results=number_of_results,
|
||||||
rxikd=int(time() * 1000),
|
|
||||||
rxieu=random.randint(1000000000, 9999999999),
|
|
||||||
ulse=random.randint(100000000, 999999999),
|
|
||||||
lang=language,
|
lang=language,
|
||||||
safesearch=safesearch,
|
rxikd=int(time() * 1000),
|
||||||
dbez=random.randint(100000000, 999999999))
|
safesearch=safesearch)
|
||||||
|
|
||||||
params['url'] = base_url + search_path
|
params['url'] = base_url + search_path + '&' + extra_param
|
||||||
|
|
||||||
return params
|
return params
|
||||||
|
|
||||||
|
@ -84,7 +95,11 @@ def response(resp):
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
# parse results
|
# parse results
|
||||||
response_json = loads(resp.text)
|
try:
|
||||||
|
response_json = loads(resp.text)
|
||||||
|
except:
|
||||||
|
parse_extra_param(resp.text)
|
||||||
|
raise Exception('extra param expired, please reload')
|
||||||
|
|
||||||
for result in response_json['results']:
|
for result in response_json['results']:
|
||||||
# append result
|
# append result
|
||||||
|
|
|
@ -32,7 +32,7 @@ base_url = 'https://www.ina.fr'
|
||||||
search_url = base_url + '/layout/set/ajax/recherche/result?autopromote=&hf={ps}&b={start}&type=Video&r=&{query}'
|
search_url = base_url + '/layout/set/ajax/recherche/result?autopromote=&hf={ps}&b={start}&type=Video&r=&{query}'
|
||||||
|
|
||||||
# specific xpath variables
|
# specific xpath variables
|
||||||
results_xpath = '//div[contains(@class,"search-results--list")]/div[@class="media"]'
|
results_xpath = '//div[contains(@class,"search-results--list")]//div[@class="media-body"]'
|
||||||
url_xpath = './/a/@href'
|
url_xpath = './/a/@href'
|
||||||
title_xpath = './/h3[@class="h3--title media-heading"]'
|
title_xpath = './/h3[@class="h3--title media-heading"]'
|
||||||
thumbnail_xpath = './/img/@src'
|
thumbnail_xpath = './/img/@src'
|
||||||
|
@ -65,8 +65,11 @@ def response(resp):
|
||||||
videoid = result.xpath(url_xpath)[0]
|
videoid = result.xpath(url_xpath)[0]
|
||||||
url = base_url + videoid
|
url = base_url + videoid
|
||||||
title = p.unescape(extract_text(result.xpath(title_xpath)))
|
title = p.unescape(extract_text(result.xpath(title_xpath)))
|
||||||
thumbnail = extract_text(result.xpath(thumbnail_xpath)[0])
|
try:
|
||||||
if thumbnail[0] == '/':
|
thumbnail = extract_text(result.xpath(thumbnail_xpath)[0])
|
||||||
|
except:
|
||||||
|
thumbnail = ''
|
||||||
|
if thumbnail and thumbnail[0] == '/':
|
||||||
thumbnail = base_url + thumbnail
|
thumbnail = base_url + thumbnail
|
||||||
d = extract_text(result.xpath(publishedDate_xpath)[0])
|
d = extract_text(result.xpath(publishedDate_xpath)[0])
|
||||||
d = d.split('/')
|
d = d.split('/')
|
||||||
|
|
|
@ -45,6 +45,8 @@ def request(query, params):
|
||||||
def response(resp):
|
def response(resp):
|
||||||
results = []
|
results = []
|
||||||
response_data = loads(resp.text)
|
response_data = loads(resp.text)
|
||||||
|
if not response_data:
|
||||||
|
return results
|
||||||
|
|
||||||
for result in response_data['results']:
|
for result in response_data['results']:
|
||||||
url = _get_url(result)
|
url = _get_url(result)
|
||||||
|
|
|
@ -24,7 +24,7 @@ result_base_url = 'https://openstreetmap.org/{osm_type}/{osm_id}'
|
||||||
|
|
||||||
# do search-request
|
# do search-request
|
||||||
def request(query, params):
|
def request(query, params):
|
||||||
params['url'] = base_url + search_string.format(query=query)
|
params['url'] = base_url + search_string.format(query=query.decode('utf-8'))
|
||||||
|
|
||||||
return params
|
return params
|
||||||
|
|
||||||
|
|
|
@ -50,6 +50,7 @@ def request(query, params):
|
||||||
language = match_language(params['language'], supported_languages, language_aliases)
|
language = match_language(params['language'], supported_languages, language_aliases)
|
||||||
params['url'] += '&locale=' + language.replace('-', '_').lower()
|
params['url'] += '&locale=' + language.replace('-', '_').lower()
|
||||||
|
|
||||||
|
params['headers']['User-Agent'] = 'Mozilla/5.0 (X11; Linux x86_64; rv:69.0) Gecko/20100101 Firefox/69.0'
|
||||||
return params
|
return params
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@ def request(query, params):
|
||||||
params['url'] = search_url
|
params['url'] = search_url
|
||||||
params['method'] = 'POST'
|
params['method'] = 'POST'
|
||||||
params['headers']['Content-type'] = "application/json"
|
params['headers']['Content-type'] = "application/json"
|
||||||
params['data'] = dumps({"query": query,
|
params['data'] = dumps({"query": query.decode('utf-8'),
|
||||||
"searchField": "ALL",
|
"searchField": "ALL",
|
||||||
"sortDirection": "ASC",
|
"sortDirection": "ASC",
|
||||||
"sortOrder": "RELEVANCY",
|
"sortOrder": "RELEVANCY",
|
||||||
|
|
|
@ -12,10 +12,14 @@
|
||||||
|
|
||||||
from json import loads
|
from json import loads
|
||||||
from searx.url_utils import urlencode
|
from searx.url_utils import urlencode
|
||||||
|
import requests
|
||||||
|
import base64
|
||||||
|
|
||||||
# engine dependent config
|
# engine dependent config
|
||||||
categories = ['music']
|
categories = ['music']
|
||||||
paging = True
|
paging = True
|
||||||
|
api_client_id = None
|
||||||
|
api_client_secret = None
|
||||||
|
|
||||||
# search-url
|
# search-url
|
||||||
url = 'https://api.spotify.com/'
|
url = 'https://api.spotify.com/'
|
||||||
|
@ -31,6 +35,16 @@ def request(query, params):
|
||||||
|
|
||||||
params['url'] = search_url.format(query=urlencode({'q': query}), offset=offset)
|
params['url'] = search_url.format(query=urlencode({'q': query}), offset=offset)
|
||||||
|
|
||||||
|
r = requests.post(
|
||||||
|
'https://accounts.spotify.com/api/token',
|
||||||
|
data={'grant_type': 'client_credentials'},
|
||||||
|
headers={'Authorization': 'Basic ' + base64.b64encode(
|
||||||
|
"{}:{}".format(api_client_id, api_client_secret).encode('utf-8')
|
||||||
|
).decode('utf-8')}
|
||||||
|
)
|
||||||
|
j = loads(r.text)
|
||||||
|
params['headers'] = {'Authorization': 'Bearer {}'.format(j.get('access_token'))}
|
||||||
|
|
||||||
return params
|
return params
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,8 @@ search_url = base_url + u'w/api.php?'\
|
||||||
'action=query'\
|
'action=query'\
|
||||||
'&format=json'\
|
'&format=json'\
|
||||||
'&{query}'\
|
'&{query}'\
|
||||||
'&prop=extracts|pageimages'\
|
'&prop=extracts|pageimages|pageprops'\
|
||||||
|
'&ppprop=disambiguation'\
|
||||||
'&exintro'\
|
'&exintro'\
|
||||||
'&explaintext'\
|
'&explaintext'\
|
||||||
'&pithumbsize=300'\
|
'&pithumbsize=300'\
|
||||||
|
@ -79,12 +80,15 @@ def response(resp):
|
||||||
|
|
||||||
# wikipedia article's unique id
|
# wikipedia article's unique id
|
||||||
# first valid id is assumed to be the requested article
|
# first valid id is assumed to be the requested article
|
||||||
|
if 'pages' not in search_result['query']:
|
||||||
|
return results
|
||||||
|
|
||||||
for article_id in search_result['query']['pages']:
|
for article_id in search_result['query']['pages']:
|
||||||
page = search_result['query']['pages'][article_id]
|
page = search_result['query']['pages'][article_id]
|
||||||
if int(article_id) > 0:
|
if int(article_id) > 0:
|
||||||
break
|
break
|
||||||
|
|
||||||
if int(article_id) < 0:
|
if int(article_id) < 0 or 'disambiguation' in page.get('pageprops', {}):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
title = page.get('title')
|
title = page.get('title')
|
||||||
|
@ -96,6 +100,7 @@ def response(resp):
|
||||||
extract = page.get('extract')
|
extract = page.get('extract')
|
||||||
|
|
||||||
summary = extract_first_paragraph(extract, title, image)
|
summary = extract_first_paragraph(extract, title, image)
|
||||||
|
summary = summary.replace('() ', '')
|
||||||
|
|
||||||
# link to wikipedia article
|
# link to wikipedia article
|
||||||
wikipedia_link = base_url.format(language=url_lang(resp.search_params['language'])) \
|
wikipedia_link = base_url.format(language=url_lang(resp.search_params['language'])) \
|
||||||
|
|
|
@ -79,9 +79,10 @@ engines:
|
||||||
categories : science
|
categories : science
|
||||||
timeout : 4.0
|
timeout : 4.0
|
||||||
|
|
||||||
- name : base
|
# tmp suspended: dh key too small
|
||||||
engine : base
|
# - name : base
|
||||||
shortcut : bs
|
# engine : base
|
||||||
|
# shortcut : bs
|
||||||
|
|
||||||
- name : wikipedia
|
- name : wikipedia
|
||||||
engine : wikipedia
|
engine : wikipedia
|
||||||
|
@ -408,7 +409,7 @@ engines:
|
||||||
|
|
||||||
- name : library genesis
|
- name : library genesis
|
||||||
engine : xpath
|
engine : xpath
|
||||||
search_url : http://libgen.io/search.php?req={query}
|
search_url : https://libgen.is/search.php?req={query}
|
||||||
url_xpath : //a[contains(@href,"bookfi.net")]/@href
|
url_xpath : //a[contains(@href,"bookfi.net")]/@href
|
||||||
title_xpath : //a[contains(@href,"book/")]/text()[1]
|
title_xpath : //a[contains(@href,"book/")]/text()[1]
|
||||||
content_xpath : //td/a[1][contains(@href,"=author")]/text()
|
content_xpath : //td/a[1][contains(@href,"=author")]/text()
|
||||||
|
@ -464,7 +465,7 @@ engines:
|
||||||
- name : openairedatasets
|
- name : openairedatasets
|
||||||
engine : json_engine
|
engine : json_engine
|
||||||
paging : True
|
paging : True
|
||||||
search_url : http://api.openaire.eu/search/datasets?format=json&page={pageno}&size=10&title={query}
|
search_url : https://api.openaire.eu/search/datasets?format=json&page={pageno}&size=10&title={query}
|
||||||
results_query : response/results/result
|
results_query : response/results/result
|
||||||
url_query : metadata/oaf:entity/oaf:result/children/instance/webresource/url/$
|
url_query : metadata/oaf:entity/oaf:result/children/instance/webresource/url/$
|
||||||
title_query : metadata/oaf:entity/oaf:result/title/$
|
title_query : metadata/oaf:entity/oaf:result/title/$
|
||||||
|
@ -476,7 +477,7 @@ engines:
|
||||||
- name : openairepublications
|
- name : openairepublications
|
||||||
engine : json_engine
|
engine : json_engine
|
||||||
paging : True
|
paging : True
|
||||||
search_url : http://api.openaire.eu/search/publications?format=json&page={pageno}&size=10&title={query}
|
search_url : https://api.openaire.eu/search/publications?format=json&page={pageno}&size=10&title={query}
|
||||||
results_query : response/results/result
|
results_query : response/results/result
|
||||||
url_query : metadata/oaf:entity/oaf:result/children/instance/webresource/url/$
|
url_query : metadata/oaf:entity/oaf:result/children/instance/webresource/url/$
|
||||||
title_query : metadata/oaf:entity/oaf:result/title/$
|
title_query : metadata/oaf:entity/oaf:result/title/$
|
||||||
|
@ -552,10 +553,11 @@ engines:
|
||||||
timeout : 10.0
|
timeout : 10.0
|
||||||
disabled : True
|
disabled : True
|
||||||
|
|
||||||
- name : scanr structures
|
# tmp suspended: bad certificate
|
||||||
shortcut: scs
|
# - name : scanr structures
|
||||||
engine : scanr_structures
|
# shortcut: scs
|
||||||
disabled : True
|
# engine : scanr_structures
|
||||||
|
# disabled : True
|
||||||
|
|
||||||
- name : soundcloud
|
- name : soundcloud
|
||||||
engine : soundcloud
|
engine : soundcloud
|
||||||
|
@ -598,9 +600,12 @@ engines:
|
||||||
shortcut : se
|
shortcut : se
|
||||||
categories : science
|
categories : science
|
||||||
|
|
||||||
- name : spotify
|
# Spotify needs API credentials
|
||||||
engine : spotify
|
# - name : spotify
|
||||||
shortcut : stf
|
# engine : spotify
|
||||||
|
# shortcut : stf
|
||||||
|
# api_client_id : *******
|
||||||
|
# api_client_secret : *******
|
||||||
|
|
||||||
- name : startpage
|
- name : startpage
|
||||||
engine : startpage
|
engine : startpage
|
||||||
|
@ -812,7 +817,7 @@ locales:
|
||||||
doi_resolvers :
|
doi_resolvers :
|
||||||
oadoi.org : 'https://oadoi.org/'
|
oadoi.org : 'https://oadoi.org/'
|
||||||
doi.org : 'https://doi.org/'
|
doi.org : 'https://doi.org/'
|
||||||
doai.io : 'http://doai.io/'
|
doai.io : 'https://doai.io/'
|
||||||
sci-hub.tw : 'http://sci-hub.tw/'
|
sci-hub.tw : 'https://sci-hub.tw/'
|
||||||
|
|
||||||
default_doi_resolver : 'oadoi.org'
|
default_doi_resolver : 'oadoi.org'
|
||||||
|
|
|
@ -43,7 +43,7 @@ locales:
|
||||||
doi_resolvers :
|
doi_resolvers :
|
||||||
oadoi.org : 'https://oadoi.org/'
|
oadoi.org : 'https://oadoi.org/'
|
||||||
doi.org : 'https://doi.org/'
|
doi.org : 'https://doi.org/'
|
||||||
doai.io : 'http://doai.io/'
|
doai.io : 'https://doai.io/'
|
||||||
sci-hub.tw : 'http://sci-hub.tw/'
|
sci-hub.tw : 'https://sci-hub.tw/'
|
||||||
|
|
||||||
default_doi_resolver : 'oadoi.org'
|
default_doi_resolver : 'oadoi.org'
|
||||||
|
|
|
@ -125,6 +125,14 @@ $(document).ready(function() {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
function nextResult(current, direction) {
|
||||||
|
var next = current[direction]();
|
||||||
|
while (!next.is('.result') && next.length !== 0) {
|
||||||
|
next = next[direction]();
|
||||||
|
}
|
||||||
|
return next
|
||||||
|
}
|
||||||
|
|
||||||
function highlightResult(which) {
|
function highlightResult(which) {
|
||||||
return function() {
|
return function() {
|
||||||
var current = $('.result[data-vim-selected]');
|
var current = $('.result[data-vim-selected]');
|
||||||
|
@ -157,13 +165,13 @@ $(document).ready(function() {
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case 'down':
|
case 'down':
|
||||||
next = current.next('.result');
|
next = nextResult(current, 'next');
|
||||||
if (next.length === 0) {
|
if (next.length === 0) {
|
||||||
next = $('.result:first');
|
next = $('.result:first');
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case 'up':
|
case 'up':
|
||||||
next = current.prev('.result');
|
next = nextResult(current, 'prev');
|
||||||
if (next.length === 0) {
|
if (next.length === 0) {
|
||||||
next = $('.result:last');
|
next = $('.result:last');
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,61 +1,61 @@
|
||||||
/*
|
/*
|
||||||
* searx, A privacy-respecting, hackable metasearch engine
|
* searx, A privacy-respecting, hackable metasearch engine
|
||||||
*/
|
*/
|
||||||
|
|
||||||
ul {
|
ul {
|
||||||
&.autocompleter-choices {
|
&.autocompleter-choices {
|
||||||
position: absolute;
|
position: absolute;
|
||||||
margin: 0;
|
margin: 0;
|
||||||
padding: 0;
|
padding: 0;
|
||||||
list-style: none;
|
list-style: none;
|
||||||
border: 1px solid @color-autocompleter-choices-border;
|
border: 1px solid @color-autocompleter-choices-border;
|
||||||
border-left-color: @color-autocompleter-choices-border-left-right;
|
border-left-color: @color-autocompleter-choices-border-left-right;
|
||||||
border-right-color: @color-autocompleter-choices-border-left-right;
|
border-right-color: @color-autocompleter-choices-border-left-right;
|
||||||
border-bottom-color: @color-autocompleter-choices-border-bottom;
|
border-bottom-color: @color-autocompleter-choices-border-bottom;
|
||||||
text-align: left;
|
text-align: left;
|
||||||
font-family: Verdana, Geneva, Arial, Helvetica, sans-serif;
|
font-family: Verdana, Geneva, Arial, Helvetica, sans-serif;
|
||||||
z-index: 50;
|
z-index: 50;
|
||||||
background-color: @color-autocompleter-choices-background;
|
background-color: @color-autocompleter-choices-background;
|
||||||
color: @color-autocompleter-choices-font;
|
color: @color-autocompleter-choices-font;
|
||||||
|
|
||||||
li {
|
li {
|
||||||
position: relative;
|
position: relative;
|
||||||
margin: -2px 0 0 0;
|
margin: -2px 0 0 0;
|
||||||
padding: 0.2em 1.5em 0.2em 1em;
|
padding: 0.2em 1.5em 0.2em 1em;
|
||||||
display: block;
|
display: block;
|
||||||
float: none !important;
|
float: none !important;
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
font-size: 1em;
|
font-size: 1em;
|
||||||
line-height: 1.5em;
|
line-height: 1.5em;
|
||||||
|
|
||||||
&.autocompleter-selected {
|
&.autocompleter-selected {
|
||||||
background-color: @color-autocompleter-selected-background;
|
background-color: @color-autocompleter-selected-background;
|
||||||
color: @color-autocompleter-selected-font;
|
color: @color-autocompleter-selected-font;
|
||||||
|
|
||||||
span.autocompleter-queried {
|
span.autocompleter-queried {
|
||||||
color: @color-autocompleter-selected-queried-font;
|
color: @color-autocompleter-selected-queried-font;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
span.autocompleter-queried {
|
span.autocompleter-queried {
|
||||||
display: inline;
|
display: inline;
|
||||||
float: none;
|
float: none;
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
margin: 0;
|
margin: 0;
|
||||||
padding: 0;
|
padding: 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/*.autocompleter-loading {
|
/*.autocompleter-loading {
|
||||||
//background-image: url(images/spinner.gif);
|
//background-image: url(images/spinner.gif);
|
||||||
background-repeat: no-repeat;
|
background-repeat: no-repeat;
|
||||||
background-position: right 50%;
|
background-position: right 50%;
|
||||||
}*/
|
}*/
|
||||||
|
|
||||||
/*textarea.autocompleter-loading {
|
/*textarea.autocompleter-loading {
|
||||||
background-position: right bottom;
|
background-position: right bottom;
|
||||||
}*/
|
}*/
|
||||||
|
|
|
@ -24,7 +24,7 @@ module.exports = function(grunt) {
|
||||||
jshint: {
|
jshint: {
|
||||||
files: ['gruntfile.js', 'js/searx_src/*.js'],
|
files: ['gruntfile.js', 'js/searx_src/*.js'],
|
||||||
options: {
|
options: {
|
||||||
reporterOutput: "",
|
reporterOutput: "",
|
||||||
// options here to override JSHint defaults
|
// options here to override JSHint defaults
|
||||||
globals: {
|
globals: {
|
||||||
jQuery: true,
|
jQuery: true,
|
||||||
|
@ -55,7 +55,7 @@ module.exports = function(grunt) {
|
||||||
"css/logicodev-dark.min.css": "less/logicodev-dark/oscar.less"}
|
"css/logicodev-dark.min.css": "less/logicodev-dark/oscar.less"}
|
||||||
},
|
},
|
||||||
/*
|
/*
|
||||||
// built with ./manage.sh styles
|
// built with ./manage.sh styles
|
||||||
bootstrap: {
|
bootstrap: {
|
||||||
options: {
|
options: {
|
||||||
paths: ["less/bootstrap"],
|
paths: ["less/bootstrap"],
|
||||||
|
@ -90,7 +90,7 @@ module.exports = function(grunt) {
|
||||||
grunt.registerTask('test', ['jshint']);
|
grunt.registerTask('test', ['jshint']);
|
||||||
|
|
||||||
grunt.registerTask('default', ['jshint', 'concat', 'uglify', 'less']);
|
grunt.registerTask('default', ['jshint', 'concat', 'uglify', 'less']);
|
||||||
|
|
||||||
grunt.registerTask('styles', ['less']);
|
grunt.registerTask('styles', ['less']);
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,26 +1,26 @@
|
||||||
/**
|
/**
|
||||||
* searx is free software: you can redistribute it and/or modify
|
* searx is free software: you can redistribute it and/or modify
|
||||||
* it under the terms of the GNU Affero General Public License as published by
|
* it under the terms of the GNU Affero General Public License as published by
|
||||||
* the Free Software Foundation, either version 3 of the License, or
|
* the Free Software Foundation, either version 3 of the License, or
|
||||||
* (at your option) any later version.
|
* (at your option) any later version.
|
||||||
*
|
*
|
||||||
* searx is distributed in the hope that it will be useful,
|
* searx is distributed in the hope that it will be useful,
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
* GNU Affero General Public License for more details.
|
* GNU Affero General Public License for more details.
|
||||||
*
|
*
|
||||||
* You should have received a copy of the GNU Affero General Public License
|
* You should have received a copy of the GNU Affero General Public License
|
||||||
* along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
* along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
||||||
*
|
*
|
||||||
* (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
|
* (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
|
||||||
*/
|
*/
|
||||||
|
|
||||||
requirejs.config({
|
requirejs.config({
|
||||||
baseUrl: './static/themes/oscar/js',
|
baseUrl: './static/themes/oscar/js',
|
||||||
paths: {
|
paths: {
|
||||||
app: '../app'
|
app: '../app'
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
;/**
|
;/**
|
||||||
* searx is free software: you can redistribute it and/or modify
|
* searx is free software: you can redistribute it and/or modify
|
||||||
* it under the terms of the GNU Affero General Public License as published by
|
* it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -51,306 +51,306 @@ window.searx = (function(d) {
|
||||||
method: script.getAttribute('data-method')
|
method: script.getAttribute('data-method')
|
||||||
};
|
};
|
||||||
})(document);
|
})(document);
|
||||||
;/**
|
;/**
|
||||||
* searx is free software: you can redistribute it and/or modify
|
* searx is free software: you can redistribute it and/or modify
|
||||||
* it under the terms of the GNU Affero General Public License as published by
|
* it under the terms of the GNU Affero General Public License as published by
|
||||||
* the Free Software Foundation, either version 3 of the License, or
|
* the Free Software Foundation, either version 3 of the License, or
|
||||||
* (at your option) any later version.
|
* (at your option) any later version.
|
||||||
*
|
*
|
||||||
* searx is distributed in the hope that it will be useful,
|
* searx is distributed in the hope that it will be useful,
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
* GNU Affero General Public License for more details.
|
* GNU Affero General Public License for more details.
|
||||||
*
|
*
|
||||||
* You should have received a copy of the GNU Affero General Public License
|
* You should have received a copy of the GNU Affero General Public License
|
||||||
* along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
* along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
||||||
*
|
*
|
||||||
* (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
|
* (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
|
||||||
*/
|
*/
|
||||||
|
|
||||||
if(searx.autocompleter) {
|
if(searx.autocompleter) {
|
||||||
searx.searchResults = new Bloodhound({
|
searx.searchResults = new Bloodhound({
|
||||||
datumTokenizer: Bloodhound.tokenizers.obj.whitespace('value'),
|
datumTokenizer: Bloodhound.tokenizers.obj.whitespace('value'),
|
||||||
queryTokenizer: Bloodhound.tokenizers.whitespace,
|
queryTokenizer: Bloodhound.tokenizers.whitespace,
|
||||||
remote: './autocompleter?q=%QUERY'
|
remote: './autocompleter?q=%QUERY'
|
||||||
});
|
});
|
||||||
searx.searchResults.initialize();
|
searx.searchResults.initialize();
|
||||||
}
|
}
|
||||||
|
|
||||||
$(document).ready(function(){
|
$(document).ready(function(){
|
||||||
if(searx.autocompleter) {
|
if(searx.autocompleter) {
|
||||||
$('#q').typeahead(null, {
|
$('#q').typeahead(null, {
|
||||||
name: 'search-results',
|
name: 'search-results',
|
||||||
displayKey: function(result) {
|
displayKey: function(result) {
|
||||||
return result;
|
return result;
|
||||||
},
|
},
|
||||||
source: searx.searchResults.ttAdapter()
|
source: searx.searchResults.ttAdapter()
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
;/**
|
;/**
|
||||||
* searx is free software: you can redistribute it and/or modify
|
* searx is free software: you can redistribute it and/or modify
|
||||||
* it under the terms of the GNU Affero General Public License as published by
|
* it under the terms of the GNU Affero General Public License as published by
|
||||||
* the Free Software Foundation, either version 3 of the License, or
|
* the Free Software Foundation, either version 3 of the License, or
|
||||||
* (at your option) any later version.
|
* (at your option) any later version.
|
||||||
*
|
*
|
||||||
* searx is distributed in the hope that it will be useful,
|
* searx is distributed in the hope that it will be useful,
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
* GNU Affero General Public License for more details.
|
* GNU Affero General Public License for more details.
|
||||||
*
|
*
|
||||||
* You should have received a copy of the GNU Affero General Public License
|
* You should have received a copy of the GNU Affero General Public License
|
||||||
* along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
* along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
||||||
*
|
*
|
||||||
* (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
|
* (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
|
||||||
*/
|
*/
|
||||||
|
|
||||||
$(document).ready(function(){
|
$(document).ready(function(){
|
||||||
/**
|
/**
|
||||||
* focus element if class="autofocus" and id="q"
|
* focus element if class="autofocus" and id="q"
|
||||||
*/
|
*/
|
||||||
$('#q.autofocus').focus();
|
$('#q.autofocus').focus();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* select full content on click if class="select-all-on-click"
|
* select full content on click if class="select-all-on-click"
|
||||||
*/
|
*/
|
||||||
$(".select-all-on-click").click(function () {
|
$(".select-all-on-click").click(function () {
|
||||||
$(this).select();
|
$(this).select();
|
||||||
});
|
});
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* change text during btn-collapse click if possible
|
* change text during btn-collapse click if possible
|
||||||
*/
|
*/
|
||||||
$('.btn-collapse').click(function() {
|
$('.btn-collapse').click(function() {
|
||||||
var btnTextCollapsed = $(this).data('btn-text-collapsed');
|
var btnTextCollapsed = $(this).data('btn-text-collapsed');
|
||||||
var btnTextNotCollapsed = $(this).data('btn-text-not-collapsed');
|
var btnTextNotCollapsed = $(this).data('btn-text-not-collapsed');
|
||||||
|
|
||||||
if(btnTextCollapsed !== '' && btnTextNotCollapsed !== '') {
|
if(btnTextCollapsed !== '' && btnTextNotCollapsed !== '') {
|
||||||
if($(this).hasClass('collapsed')) {
|
if($(this).hasClass('collapsed')) {
|
||||||
new_html = $(this).html().replace(btnTextCollapsed, btnTextNotCollapsed);
|
new_html = $(this).html().replace(btnTextCollapsed, btnTextNotCollapsed);
|
||||||
} else {
|
} else {
|
||||||
new_html = $(this).html().replace(btnTextNotCollapsed, btnTextCollapsed);
|
new_html = $(this).html().replace(btnTextNotCollapsed, btnTextCollapsed);
|
||||||
}
|
}
|
||||||
$(this).html(new_html);
|
$(this).html(new_html);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* change text during btn-toggle click if possible
|
* change text during btn-toggle click if possible
|
||||||
*/
|
*/
|
||||||
$('.btn-toggle .btn').click(function() {
|
$('.btn-toggle .btn').click(function() {
|
||||||
var btnClass = 'btn-' + $(this).data('btn-class');
|
var btnClass = 'btn-' + $(this).data('btn-class');
|
||||||
var btnLabelDefault = $(this).data('btn-label-default');
|
var btnLabelDefault = $(this).data('btn-label-default');
|
||||||
var btnLabelToggled = $(this).data('btn-label-toggled');
|
var btnLabelToggled = $(this).data('btn-label-toggled');
|
||||||
if(btnLabelToggled !== '') {
|
if(btnLabelToggled !== '') {
|
||||||
if($(this).hasClass('btn-default')) {
|
if($(this).hasClass('btn-default')) {
|
||||||
new_html = $(this).html().replace(btnLabelDefault, btnLabelToggled);
|
new_html = $(this).html().replace(btnLabelDefault, btnLabelToggled);
|
||||||
} else {
|
} else {
|
||||||
new_html = $(this).html().replace(btnLabelToggled, btnLabelDefault);
|
new_html = $(this).html().replace(btnLabelToggled, btnLabelDefault);
|
||||||
}
|
}
|
||||||
$(this).html(new_html);
|
$(this).html(new_html);
|
||||||
}
|
}
|
||||||
$(this).toggleClass(btnClass);
|
$(this).toggleClass(btnClass);
|
||||||
$(this).toggleClass('btn-default');
|
$(this).toggleClass('btn-default');
|
||||||
});
|
});
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* change text during btn-toggle click if possible
|
* change text during btn-toggle click if possible
|
||||||
*/
|
*/
|
||||||
$('.media-loader').click(function() {
|
$('.media-loader').click(function() {
|
||||||
var target = $(this).data('target');
|
var target = $(this).data('target');
|
||||||
var iframe_load = $(target + ' > iframe');
|
var iframe_load = $(target + ' > iframe');
|
||||||
var srctest = iframe_load.attr('src');
|
var srctest = iframe_load.attr('src');
|
||||||
if(srctest === undefined || srctest === false){
|
if(srctest === undefined || srctest === false){
|
||||||
iframe_load.attr('src', iframe_load.data('src'));
|
iframe_load.attr('src', iframe_load.data('src'));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Select or deselect every categories on double clic
|
* Select or deselect every categories on double clic
|
||||||
*/
|
*/
|
||||||
$(".btn-sm").dblclick(function() {
|
$(".btn-sm").dblclick(function() {
|
||||||
var btnClass = 'btn-' + $(this).data('btn-class'); // primary
|
var btnClass = 'btn-' + $(this).data('btn-class'); // primary
|
||||||
if($(this).hasClass('btn-default')) {
|
if($(this).hasClass('btn-default')) {
|
||||||
$(".btn-sm > input").attr('checked', 'checked');
|
$(".btn-sm > input").attr('checked', 'checked');
|
||||||
$(".btn-sm > input").prop("checked", true);
|
$(".btn-sm > input").prop("checked", true);
|
||||||
$(".btn-sm").addClass(btnClass);
|
$(".btn-sm").addClass(btnClass);
|
||||||
$(".btn-sm").addClass('active');
|
$(".btn-sm").addClass('active');
|
||||||
$(".btn-sm").removeClass('btn-default');
|
$(".btn-sm").removeClass('btn-default');
|
||||||
} else {
|
} else {
|
||||||
$(".btn-sm > input").attr('checked', '');
|
$(".btn-sm > input").attr('checked', '');
|
||||||
$(".btn-sm > input").removeAttr('checked');
|
$(".btn-sm > input").removeAttr('checked');
|
||||||
$(".btn-sm > input").checked = false;
|
$(".btn-sm > input").checked = false;
|
||||||
$(".btn-sm").removeClass(btnClass);
|
$(".btn-sm").removeClass(btnClass);
|
||||||
$(".btn-sm").removeClass('active');
|
$(".btn-sm").removeClass('active');
|
||||||
$(".btn-sm").addClass('btn-default');
|
$(".btn-sm").addClass('btn-default');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
;/**
|
;/**
|
||||||
* searx is free software: you can redistribute it and/or modify
|
* searx is free software: you can redistribute it and/or modify
|
||||||
* it under the terms of the GNU Affero General Public License as published by
|
* it under the terms of the GNU Affero General Public License as published by
|
||||||
* the Free Software Foundation, either version 3 of the License, or
|
* the Free Software Foundation, either version 3 of the License, or
|
||||||
* (at your option) any later version.
|
* (at your option) any later version.
|
||||||
*
|
*
|
||||||
* searx is distributed in the hope that it will be useful,
|
* searx is distributed in the hope that it will be useful,
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
* GNU Affero General Public License for more details.
|
* GNU Affero General Public License for more details.
|
||||||
*
|
*
|
||||||
* You should have received a copy of the GNU Affero General Public License
|
* You should have received a copy of the GNU Affero General Public License
|
||||||
* along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
* along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
||||||
*
|
*
|
||||||
* (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
|
* (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
|
||||||
*/
|
*/
|
||||||
|
|
||||||
$(document).ready(function(){
|
$(document).ready(function(){
|
||||||
$(".searx_overpass_request").on( "click", function( event ) {
|
$(".searx_overpass_request").on( "click", function( event ) {
|
||||||
var overpass_url = "https://overpass-api.de/api/interpreter?data=";
|
var overpass_url = "https://overpass-api.de/api/interpreter?data=";
|
||||||
var query_start = overpass_url + "[out:json][timeout:25];(";
|
var query_start = overpass_url + "[out:json][timeout:25];(";
|
||||||
var query_end = ");out meta;";
|
var query_end = ");out meta;";
|
||||||
|
|
||||||
var osm_id = $(this).data('osm-id');
|
var osm_id = $(this).data('osm-id');
|
||||||
var osm_type = $(this).data('osm-type');
|
var osm_type = $(this).data('osm-type');
|
||||||
var result_table = $(this).data('result-table');
|
var result_table = $(this).data('result-table');
|
||||||
var result_table_loadicon = "#" + $(this).data('result-table-loadicon');
|
var result_table_loadicon = "#" + $(this).data('result-table-loadicon');
|
||||||
|
|
||||||
// tags which can be ignored
|
// tags which can be ignored
|
||||||
var osm_ignore_tags = [ "addr:city", "addr:country", "addr:housenumber", "addr:postcode", "addr:street" ];
|
var osm_ignore_tags = [ "addr:city", "addr:country", "addr:housenumber", "addr:postcode", "addr:street" ];
|
||||||
|
|
||||||
if(osm_id && osm_type && result_table) {
|
if(osm_id && osm_type && result_table) {
|
||||||
result_table = "#" + result_table;
|
result_table = "#" + result_table;
|
||||||
var query = null;
|
var query = null;
|
||||||
switch(osm_type) {
|
switch(osm_type) {
|
||||||
case 'node':
|
case 'node':
|
||||||
query = query_start + "node(" + osm_id + ");" + query_end;
|
query = query_start + "node(" + osm_id + ");" + query_end;
|
||||||
break;
|
break;
|
||||||
case 'way':
|
case 'way':
|
||||||
query = query_start + "way(" + osm_id + ");" + query_end;
|
query = query_start + "way(" + osm_id + ");" + query_end;
|
||||||
break;
|
break;
|
||||||
case 'relation':
|
case 'relation':
|
||||||
query = query_start + "relation(" + osm_id + ");" + query_end;
|
query = query_start + "relation(" + osm_id + ");" + query_end;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if(query) {
|
if(query) {
|
||||||
//alert(query);
|
//alert(query);
|
||||||
var ajaxRequest = $.ajax( query )
|
var ajaxRequest = $.ajax( query )
|
||||||
.done(function( html) {
|
.done(function( html) {
|
||||||
if(html && html.elements && html.elements[0]) {
|
if(html && html.elements && html.elements[0]) {
|
||||||
var element = html.elements[0];
|
var element = html.elements[0];
|
||||||
var newHtml = $(result_table).html();
|
var newHtml = $(result_table).html();
|
||||||
for (var row in element.tags) {
|
for (var row in element.tags) {
|
||||||
if(element.tags.name === null || osm_ignore_tags.indexOf(row) == -1) {
|
if(element.tags.name === null || osm_ignore_tags.indexOf(row) == -1) {
|
||||||
newHtml += "<tr><td>" + row + "</td><td>";
|
newHtml += "<tr><td>" + row + "</td><td>";
|
||||||
switch(row) {
|
switch(row) {
|
||||||
case "phone":
|
case "phone":
|
||||||
case "fax":
|
case "fax":
|
||||||
newHtml += "<a href=\"tel:" + element.tags[row].replace(/ /g,'') + "\">" + element.tags[row] + "</a>";
|
newHtml += "<a href=\"tel:" + element.tags[row].replace(/ /g,'') + "\">" + element.tags[row] + "</a>";
|
||||||
break;
|
break;
|
||||||
case "email":
|
case "email":
|
||||||
newHtml += "<a href=\"mailto:" + element.tags[row] + "\">" + element.tags[row] + "</a>";
|
newHtml += "<a href=\"mailto:" + element.tags[row] + "\">" + element.tags[row] + "</a>";
|
||||||
break;
|
break;
|
||||||
case "website":
|
case "website":
|
||||||
case "url":
|
case "url":
|
||||||
newHtml += "<a href=\"" + element.tags[row] + "\">" + element.tags[row] + "</a>";
|
newHtml += "<a href=\"" + element.tags[row] + "\">" + element.tags[row] + "</a>";
|
||||||
break;
|
break;
|
||||||
case "wikidata":
|
case "wikidata":
|
||||||
newHtml += "<a href=\"https://www.wikidata.org/wiki/" + element.tags[row] + "\">" + element.tags[row] + "</a>";
|
newHtml += "<a href=\"https://www.wikidata.org/wiki/" + element.tags[row] + "\">" + element.tags[row] + "</a>";
|
||||||
break;
|
break;
|
||||||
case "wikipedia":
|
case "wikipedia":
|
||||||
if(element.tags[row].indexOf(":") != -1) {
|
if(element.tags[row].indexOf(":") != -1) {
|
||||||
newHtml += "<a href=\"https://" + element.tags[row].substring(0,element.tags[row].indexOf(":")) + ".wikipedia.org/wiki/" + element.tags[row].substring(element.tags[row].indexOf(":")+1) + "\">" + element.tags[row] + "</a>";
|
newHtml += "<a href=\"https://" + element.tags[row].substring(0,element.tags[row].indexOf(":")) + ".wikipedia.org/wiki/" + element.tags[row].substring(element.tags[row].indexOf(":")+1) + "\">" + element.tags[row] + "</a>";
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
/* jshint ignore:start */
|
/* jshint ignore:start */
|
||||||
default:
|
default:
|
||||||
/* jshint ignore:end */
|
/* jshint ignore:end */
|
||||||
newHtml += element.tags[row];
|
newHtml += element.tags[row];
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
newHtml += "</td></tr>";
|
newHtml += "</td></tr>";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
$(result_table).html(newHtml);
|
$(result_table).html(newHtml);
|
||||||
$(result_table).removeClass('hidden');
|
$(result_table).removeClass('hidden');
|
||||||
$(result_table_loadicon).addClass('hidden');
|
$(result_table_loadicon).addClass('hidden');
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.fail(function() {
|
.fail(function() {
|
||||||
$(result_table_loadicon).html($(result_table_loadicon).html() + "<p class=\"text-muted\">could not load data!</p>");
|
$(result_table_loadicon).html($(result_table_loadicon).html() + "<p class=\"text-muted\">could not load data!</p>");
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// this event occour only once per element
|
// this event occour only once per element
|
||||||
$( this ).off( event );
|
$( this ).off( event );
|
||||||
});
|
});
|
||||||
|
|
||||||
$(".searx_init_map").on( "click", function( event ) {
|
$(".searx_init_map").on( "click", function( event ) {
|
||||||
var leaflet_target = $(this).data('leaflet-target');
|
var leaflet_target = $(this).data('leaflet-target');
|
||||||
var map_lon = $(this).data('map-lon');
|
var map_lon = $(this).data('map-lon');
|
||||||
var map_lat = $(this).data('map-lat');
|
var map_lat = $(this).data('map-lat');
|
||||||
var map_zoom = $(this).data('map-zoom');
|
var map_zoom = $(this).data('map-zoom');
|
||||||
var map_boundingbox = $(this).data('map-boundingbox');
|
var map_boundingbox = $(this).data('map-boundingbox');
|
||||||
var map_geojson = $(this).data('map-geojson');
|
var map_geojson = $(this).data('map-geojson');
|
||||||
|
|
||||||
require(['leaflet-0.7.3.min'], function(leaflet) {
|
require(['leaflet-0.7.3.min'], function(leaflet) {
|
||||||
if(map_boundingbox) {
|
if(map_boundingbox) {
|
||||||
southWest = L.latLng(map_boundingbox[0], map_boundingbox[2]);
|
southWest = L.latLng(map_boundingbox[0], map_boundingbox[2]);
|
||||||
northEast = L.latLng(map_boundingbox[1], map_boundingbox[3]);
|
northEast = L.latLng(map_boundingbox[1], map_boundingbox[3]);
|
||||||
map_bounds = L.latLngBounds(southWest, northEast);
|
map_bounds = L.latLngBounds(southWest, northEast);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO hack
|
// TODO hack
|
||||||
// change default imagePath
|
// change default imagePath
|
||||||
L.Icon.Default.imagePath = "./static/themes/oscar/img/map";
|
L.Icon.Default.imagePath = "./static/themes/oscar/img/map";
|
||||||
|
|
||||||
// init map
|
// init map
|
||||||
var map = L.map(leaflet_target);
|
var map = L.map(leaflet_target);
|
||||||
|
|
||||||
// create the tile layer with correct attribution
|
// create the tile layer with correct attribution
|
||||||
var osmMapnikUrl='https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png';
|
var osmMapnikUrl='https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png';
|
||||||
var osmMapnikAttrib='Map data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors';
|
var osmMapnikAttrib='Map data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors';
|
||||||
var osmMapnik = new L.TileLayer(osmMapnikUrl, {minZoom: 1, maxZoom: 19, attribution: osmMapnikAttrib});
|
var osmMapnik = new L.TileLayer(osmMapnikUrl, {minZoom: 1, maxZoom: 19, attribution: osmMapnikAttrib});
|
||||||
|
|
||||||
var osmWikimediaUrl='https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png';
|
var osmWikimediaUrl='https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png';
|
||||||
var osmWikimediaAttrib = 'Wikimedia maps beta | Maps data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors';
|
var osmWikimediaAttrib = 'Wikimedia maps beta | Maps data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors';
|
||||||
var osmWikimedia = new L.TileLayer(osmWikimediaUrl, {minZoom: 1, maxZoom: 19, attribution: osmWikimediaAttrib});
|
var osmWikimedia = new L.TileLayer(osmWikimediaUrl, {minZoom: 1, maxZoom: 19, attribution: osmWikimediaAttrib});
|
||||||
|
|
||||||
// init map view
|
// init map view
|
||||||
if(map_bounds) {
|
if(map_bounds) {
|
||||||
// TODO hack: https://github.com/Leaflet/Leaflet/issues/2021
|
// TODO hack: https://github.com/Leaflet/Leaflet/issues/2021
|
||||||
setTimeout(function () {
|
setTimeout(function () {
|
||||||
map.fitBounds(map_bounds, {
|
map.fitBounds(map_bounds, {
|
||||||
maxZoom:17
|
maxZoom:17
|
||||||
});
|
});
|
||||||
}, 0);
|
}, 0);
|
||||||
} else if (map_lon && map_lat) {
|
} else if (map_lon && map_lat) {
|
||||||
if(map_zoom)
|
if(map_zoom)
|
||||||
map.setView(new L.LatLng(map_lat, map_lon),map_zoom);
|
map.setView(new L.LatLng(map_lat, map_lon),map_zoom);
|
||||||
else
|
else
|
||||||
map.setView(new L.LatLng(map_lat, map_lon),8);
|
map.setView(new L.LatLng(map_lat, map_lon),8);
|
||||||
}
|
}
|
||||||
|
|
||||||
map.addLayer(osmMapnik);
|
map.addLayer(osmMapnik);
|
||||||
|
|
||||||
var baseLayers = {
|
var baseLayers = {
|
||||||
"OSM Mapnik": osmMapnik/*,
|
"OSM Mapnik": osmMapnik/*,
|
||||||
"OSM Wikimedia": osmWikimedia*/
|
"OSM Wikimedia": osmWikimedia*/
|
||||||
};
|
};
|
||||||
|
|
||||||
L.control.layers(baseLayers).addTo(map);
|
L.control.layers(baseLayers).addTo(map);
|
||||||
|
|
||||||
|
|
||||||
if(map_geojson)
|
if(map_geojson)
|
||||||
L.geoJson(map_geojson).addTo(map);
|
L.geoJson(map_geojson).addTo(map);
|
||||||
/*else if(map_bounds)
|
/*else if(map_bounds)
|
||||||
L.rectangle(map_bounds, {color: "#ff7800", weight: 3, fill:false}).addTo(map);*/
|
L.rectangle(map_bounds, {color: "#ff7800", weight: 3, fill:false}).addTo(map);*/
|
||||||
});
|
});
|
||||||
|
|
||||||
// this event occour only once per element
|
// this event occour only once per element
|
||||||
$( this ).off( event );
|
$( this ).off( event );
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,23 +1,23 @@
|
||||||
/**
|
/**
|
||||||
* searx is free software: you can redistribute it and/or modify
|
* searx is free software: you can redistribute it and/or modify
|
||||||
* it under the terms of the GNU Affero General Public License as published by
|
* it under the terms of the GNU Affero General Public License as published by
|
||||||
* the Free Software Foundation, either version 3 of the License, or
|
* the Free Software Foundation, either version 3 of the License, or
|
||||||
* (at your option) any later version.
|
* (at your option) any later version.
|
||||||
*
|
*
|
||||||
* searx is distributed in the hope that it will be useful,
|
* searx is distributed in the hope that it will be useful,
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
* GNU Affero General Public License for more details.
|
* GNU Affero General Public License for more details.
|
||||||
*
|
*
|
||||||
* You should have received a copy of the GNU Affero General Public License
|
* You should have received a copy of the GNU Affero General Public License
|
||||||
* along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
* along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
||||||
*
|
*
|
||||||
* (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
|
* (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
|
||||||
*/
|
*/
|
||||||
|
|
||||||
requirejs.config({
|
requirejs.config({
|
||||||
baseUrl: './static/themes/oscar/js',
|
baseUrl: './static/themes/oscar/js',
|
||||||
paths: {
|
paths: {
|
||||||
app: '../app'
|
app: '../app'
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,37 +1,37 @@
|
||||||
/**
|
/**
|
||||||
* searx is free software: you can redistribute it and/or modify
|
* searx is free software: you can redistribute it and/or modify
|
||||||
* it under the terms of the GNU Affero General Public License as published by
|
* it under the terms of the GNU Affero General Public License as published by
|
||||||
* the Free Software Foundation, either version 3 of the License, or
|
* the Free Software Foundation, either version 3 of the License, or
|
||||||
* (at your option) any later version.
|
* (at your option) any later version.
|
||||||
*
|
*
|
||||||
* searx is distributed in the hope that it will be useful,
|
* searx is distributed in the hope that it will be useful,
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
* GNU Affero General Public License for more details.
|
* GNU Affero General Public License for more details.
|
||||||
*
|
*
|
||||||
* You should have received a copy of the GNU Affero General Public License
|
* You should have received a copy of the GNU Affero General Public License
|
||||||
* along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
* along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
||||||
*
|
*
|
||||||
* (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
|
* (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
|
||||||
*/
|
*/
|
||||||
|
|
||||||
if(searx.autocompleter) {
|
if(searx.autocompleter) {
|
||||||
searx.searchResults = new Bloodhound({
|
searx.searchResults = new Bloodhound({
|
||||||
datumTokenizer: Bloodhound.tokenizers.obj.whitespace('value'),
|
datumTokenizer: Bloodhound.tokenizers.obj.whitespace('value'),
|
||||||
queryTokenizer: Bloodhound.tokenizers.whitespace,
|
queryTokenizer: Bloodhound.tokenizers.whitespace,
|
||||||
remote: './autocompleter?q=%QUERY'
|
remote: './autocompleter?q=%QUERY'
|
||||||
});
|
});
|
||||||
searx.searchResults.initialize();
|
searx.searchResults.initialize();
|
||||||
}
|
}
|
||||||
|
|
||||||
$(document).ready(function(){
|
$(document).ready(function(){
|
||||||
if(searx.autocompleter) {
|
if(searx.autocompleter) {
|
||||||
$('#q').typeahead(null, {
|
$('#q').typeahead(null, {
|
||||||
name: 'search-results',
|
name: 'search-results',
|
||||||
displayKey: function(result) {
|
displayKey: function(result) {
|
||||||
return result;
|
return result;
|
||||||
},
|
},
|
||||||
source: searx.searchResults.ttAdapter()
|
source: searx.searchResults.ttAdapter()
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,99 +1,99 @@
|
||||||
/**
|
/**
|
||||||
* searx is free software: you can redistribute it and/or modify
|
* searx is free software: you can redistribute it and/or modify
|
||||||
* it under the terms of the GNU Affero General Public License as published by
|
* it under the terms of the GNU Affero General Public License as published by
|
||||||
* the Free Software Foundation, either version 3 of the License, or
|
* the Free Software Foundation, either version 3 of the License, or
|
||||||
* (at your option) any later version.
|
* (at your option) any later version.
|
||||||
*
|
*
|
||||||
* searx is distributed in the hope that it will be useful,
|
* searx is distributed in the hope that it will be useful,
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
* GNU Affero General Public License for more details.
|
* GNU Affero General Public License for more details.
|
||||||
*
|
*
|
||||||
* You should have received a copy of the GNU Affero General Public License
|
* You should have received a copy of the GNU Affero General Public License
|
||||||
* along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
* along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
||||||
*
|
*
|
||||||
* (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
|
* (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
|
||||||
*/
|
*/
|
||||||
|
|
||||||
$(document).ready(function(){
|
$(document).ready(function(){
|
||||||
/**
|
/**
|
||||||
* focus element if class="autofocus" and id="q"
|
* focus element if class="autofocus" and id="q"
|
||||||
*/
|
*/
|
||||||
$('#q.autofocus').focus();
|
$('#q.autofocus').focus();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* select full content on click if class="select-all-on-click"
|
* select full content on click if class="select-all-on-click"
|
||||||
*/
|
*/
|
||||||
$(".select-all-on-click").click(function () {
|
$(".select-all-on-click").click(function () {
|
||||||
$(this).select();
|
$(this).select();
|
||||||
});
|
});
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* change text during btn-collapse click if possible
|
* change text during btn-collapse click if possible
|
||||||
*/
|
*/
|
||||||
$('.btn-collapse').click(function() {
|
$('.btn-collapse').click(function() {
|
||||||
var btnTextCollapsed = $(this).data('btn-text-collapsed');
|
var btnTextCollapsed = $(this).data('btn-text-collapsed');
|
||||||
var btnTextNotCollapsed = $(this).data('btn-text-not-collapsed');
|
var btnTextNotCollapsed = $(this).data('btn-text-not-collapsed');
|
||||||
|
|
||||||
if(btnTextCollapsed !== '' && btnTextNotCollapsed !== '') {
|
if(btnTextCollapsed !== '' && btnTextNotCollapsed !== '') {
|
||||||
if($(this).hasClass('collapsed')) {
|
if($(this).hasClass('collapsed')) {
|
||||||
new_html = $(this).html().replace(btnTextCollapsed, btnTextNotCollapsed);
|
new_html = $(this).html().replace(btnTextCollapsed, btnTextNotCollapsed);
|
||||||
} else {
|
} else {
|
||||||
new_html = $(this).html().replace(btnTextNotCollapsed, btnTextCollapsed);
|
new_html = $(this).html().replace(btnTextNotCollapsed, btnTextCollapsed);
|
||||||
}
|
}
|
||||||
$(this).html(new_html);
|
$(this).html(new_html);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* change text during btn-toggle click if possible
|
* change text during btn-toggle click if possible
|
||||||
*/
|
*/
|
||||||
$('.btn-toggle .btn').click(function() {
|
$('.btn-toggle .btn').click(function() {
|
||||||
var btnClass = 'btn-' + $(this).data('btn-class');
|
var btnClass = 'btn-' + $(this).data('btn-class');
|
||||||
var btnLabelDefault = $(this).data('btn-label-default');
|
var btnLabelDefault = $(this).data('btn-label-default');
|
||||||
var btnLabelToggled = $(this).data('btn-label-toggled');
|
var btnLabelToggled = $(this).data('btn-label-toggled');
|
||||||
if(btnLabelToggled !== '') {
|
if(btnLabelToggled !== '') {
|
||||||
if($(this).hasClass('btn-default')) {
|
if($(this).hasClass('btn-default')) {
|
||||||
new_html = $(this).html().replace(btnLabelDefault, btnLabelToggled);
|
new_html = $(this).html().replace(btnLabelDefault, btnLabelToggled);
|
||||||
} else {
|
} else {
|
||||||
new_html = $(this).html().replace(btnLabelToggled, btnLabelDefault);
|
new_html = $(this).html().replace(btnLabelToggled, btnLabelDefault);
|
||||||
}
|
}
|
||||||
$(this).html(new_html);
|
$(this).html(new_html);
|
||||||
}
|
}
|
||||||
$(this).toggleClass(btnClass);
|
$(this).toggleClass(btnClass);
|
||||||
$(this).toggleClass('btn-default');
|
$(this).toggleClass('btn-default');
|
||||||
});
|
});
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* change text during btn-toggle click if possible
|
* change text during btn-toggle click if possible
|
||||||
*/
|
*/
|
||||||
$('.media-loader').click(function() {
|
$('.media-loader').click(function() {
|
||||||
var target = $(this).data('target');
|
var target = $(this).data('target');
|
||||||
var iframe_load = $(target + ' > iframe');
|
var iframe_load = $(target + ' > iframe');
|
||||||
var srctest = iframe_load.attr('src');
|
var srctest = iframe_load.attr('src');
|
||||||
if(srctest === undefined || srctest === false){
|
if(srctest === undefined || srctest === false){
|
||||||
iframe_load.attr('src', iframe_load.data('src'));
|
iframe_load.attr('src', iframe_load.data('src'));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Select or deselect every categories on double clic
|
* Select or deselect every categories on double clic
|
||||||
*/
|
*/
|
||||||
$(".btn-sm").dblclick(function() {
|
$(".btn-sm").dblclick(function() {
|
||||||
var btnClass = 'btn-' + $(this).data('btn-class'); // primary
|
var btnClass = 'btn-' + $(this).data('btn-class'); // primary
|
||||||
if($(this).hasClass('btn-default')) {
|
if($(this).hasClass('btn-default')) {
|
||||||
$(".btn-sm > input").attr('checked', 'checked');
|
$(".btn-sm > input").attr('checked', 'checked');
|
||||||
$(".btn-sm > input").prop("checked", true);
|
$(".btn-sm > input").prop("checked", true);
|
||||||
$(".btn-sm").addClass(btnClass);
|
$(".btn-sm").addClass(btnClass);
|
||||||
$(".btn-sm").addClass('active');
|
$(".btn-sm").addClass('active');
|
||||||
$(".btn-sm").removeClass('btn-default');
|
$(".btn-sm").removeClass('btn-default');
|
||||||
} else {
|
} else {
|
||||||
$(".btn-sm > input").attr('checked', '');
|
$(".btn-sm > input").attr('checked', '');
|
||||||
$(".btn-sm > input").removeAttr('checked');
|
$(".btn-sm > input").removeAttr('checked');
|
||||||
$(".btn-sm > input").checked = false;
|
$(".btn-sm > input").checked = false;
|
||||||
$(".btn-sm").removeClass(btnClass);
|
$(".btn-sm").removeClass(btnClass);
|
||||||
$(".btn-sm").removeClass('active');
|
$(".btn-sm").removeClass('active');
|
||||||
$(".btn-sm").addClass('btn-default');
|
$(".btn-sm").addClass('btn-default');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,167 +1,167 @@
|
||||||
/**
|
/**
|
||||||
* searx is free software: you can redistribute it and/or modify
|
* searx is free software: you can redistribute it and/or modify
|
||||||
* it under the terms of the GNU Affero General Public License as published by
|
* it under the terms of the GNU Affero General Public License as published by
|
||||||
* the Free Software Foundation, either version 3 of the License, or
|
* the Free Software Foundation, either version 3 of the License, or
|
||||||
* (at your option) any later version.
|
* (at your option) any later version.
|
||||||
*
|
*
|
||||||
* searx is distributed in the hope that it will be useful,
|
* searx is distributed in the hope that it will be useful,
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
* GNU Affero General Public License for more details.
|
* GNU Affero General Public License for more details.
|
||||||
*
|
*
|
||||||
* You should have received a copy of the GNU Affero General Public License
|
* You should have received a copy of the GNU Affero General Public License
|
||||||
* along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
* along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
||||||
*
|
*
|
||||||
* (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
|
* (C) 2014 by Thomas Pointhuber, <thomas.pointhuber@gmx.at>
|
||||||
*/
|
*/
|
||||||
|
|
||||||
$(document).ready(function(){
|
$(document).ready(function(){
|
||||||
$(".searx_overpass_request").on( "click", function( event ) {
|
$(".searx_overpass_request").on( "click", function( event ) {
|
||||||
var overpass_url = "https://overpass-api.de/api/interpreter?data=";
|
var overpass_url = "https://overpass-api.de/api/interpreter?data=";
|
||||||
var query_start = overpass_url + "[out:json][timeout:25];(";
|
var query_start = overpass_url + "[out:json][timeout:25];(";
|
||||||
var query_end = ");out meta;";
|
var query_end = ");out meta;";
|
||||||
|
|
||||||
var osm_id = $(this).data('osm-id');
|
var osm_id = $(this).data('osm-id');
|
||||||
var osm_type = $(this).data('osm-type');
|
var osm_type = $(this).data('osm-type');
|
||||||
var result_table = $(this).data('result-table');
|
var result_table = $(this).data('result-table');
|
||||||
var result_table_loadicon = "#" + $(this).data('result-table-loadicon');
|
var result_table_loadicon = "#" + $(this).data('result-table-loadicon');
|
||||||
|
|
||||||
// tags which can be ignored
|
// tags which can be ignored
|
||||||
var osm_ignore_tags = [ "addr:city", "addr:country", "addr:housenumber", "addr:postcode", "addr:street" ];
|
var osm_ignore_tags = [ "addr:city", "addr:country", "addr:housenumber", "addr:postcode", "addr:street" ];
|
||||||
|
|
||||||
if(osm_id && osm_type && result_table) {
|
if(osm_id && osm_type && result_table) {
|
||||||
result_table = "#" + result_table;
|
result_table = "#" + result_table;
|
||||||
var query = null;
|
var query = null;
|
||||||
switch(osm_type) {
|
switch(osm_type) {
|
||||||
case 'node':
|
case 'node':
|
||||||
query = query_start + "node(" + osm_id + ");" + query_end;
|
query = query_start + "node(" + osm_id + ");" + query_end;
|
||||||
break;
|
break;
|
||||||
case 'way':
|
case 'way':
|
||||||
query = query_start + "way(" + osm_id + ");" + query_end;
|
query = query_start + "way(" + osm_id + ");" + query_end;
|
||||||
break;
|
break;
|
||||||
case 'relation':
|
case 'relation':
|
||||||
query = query_start + "relation(" + osm_id + ");" + query_end;
|
query = query_start + "relation(" + osm_id + ");" + query_end;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if(query) {
|
if(query) {
|
||||||
//alert(query);
|
//alert(query);
|
||||||
var ajaxRequest = $.ajax( query )
|
var ajaxRequest = $.ajax( query )
|
||||||
.done(function( html) {
|
.done(function( html) {
|
||||||
if(html && html.elements && html.elements[0]) {
|
if(html && html.elements && html.elements[0]) {
|
||||||
var element = html.elements[0];
|
var element = html.elements[0];
|
||||||
var newHtml = $(result_table).html();
|
var newHtml = $(result_table).html();
|
||||||
for (var row in element.tags) {
|
for (var row in element.tags) {
|
||||||
if(element.tags.name === null || osm_ignore_tags.indexOf(row) == -1) {
|
if(element.tags.name === null || osm_ignore_tags.indexOf(row) == -1) {
|
||||||
newHtml += "<tr><td>" + row + "</td><td>";
|
newHtml += "<tr><td>" + row + "</td><td>";
|
||||||
switch(row) {
|
switch(row) {
|
||||||
case "phone":
|
case "phone":
|
||||||
case "fax":
|
case "fax":
|
||||||
newHtml += "<a href=\"tel:" + element.tags[row].replace(/ /g,'') + "\">" + element.tags[row] + "</a>";
|
newHtml += "<a href=\"tel:" + element.tags[row].replace(/ /g,'') + "\">" + element.tags[row] + "</a>";
|
||||||
break;
|
break;
|
||||||
case "email":
|
case "email":
|
||||||
newHtml += "<a href=\"mailto:" + element.tags[row] + "\">" + element.tags[row] + "</a>";
|
newHtml += "<a href=\"mailto:" + element.tags[row] + "\">" + element.tags[row] + "</a>";
|
||||||
break;
|
break;
|
||||||
case "website":
|
case "website":
|
||||||
case "url":
|
case "url":
|
||||||
newHtml += "<a href=\"" + element.tags[row] + "\">" + element.tags[row] + "</a>";
|
newHtml += "<a href=\"" + element.tags[row] + "\">" + element.tags[row] + "</a>";
|
||||||
break;
|
break;
|
||||||
case "wikidata":
|
case "wikidata":
|
||||||
newHtml += "<a href=\"https://www.wikidata.org/wiki/" + element.tags[row] + "\">" + element.tags[row] + "</a>";
|
newHtml += "<a href=\"https://www.wikidata.org/wiki/" + element.tags[row] + "\">" + element.tags[row] + "</a>";
|
||||||
break;
|
break;
|
||||||
case "wikipedia":
|
case "wikipedia":
|
||||||
if(element.tags[row].indexOf(":") != -1) {
|
if(element.tags[row].indexOf(":") != -1) {
|
||||||
newHtml += "<a href=\"https://" + element.tags[row].substring(0,element.tags[row].indexOf(":")) + ".wikipedia.org/wiki/" + element.tags[row].substring(element.tags[row].indexOf(":")+1) + "\">" + element.tags[row] + "</a>";
|
newHtml += "<a href=\"https://" + element.tags[row].substring(0,element.tags[row].indexOf(":")) + ".wikipedia.org/wiki/" + element.tags[row].substring(element.tags[row].indexOf(":")+1) + "\">" + element.tags[row] + "</a>";
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
/* jshint ignore:start */
|
/* jshint ignore:start */
|
||||||
default:
|
default:
|
||||||
/* jshint ignore:end */
|
/* jshint ignore:end */
|
||||||
newHtml += element.tags[row];
|
newHtml += element.tags[row];
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
newHtml += "</td></tr>";
|
newHtml += "</td></tr>";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
$(result_table).html(newHtml);
|
$(result_table).html(newHtml);
|
||||||
$(result_table).removeClass('hidden');
|
$(result_table).removeClass('hidden');
|
||||||
$(result_table_loadicon).addClass('hidden');
|
$(result_table_loadicon).addClass('hidden');
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.fail(function() {
|
.fail(function() {
|
||||||
$(result_table_loadicon).html($(result_table_loadicon).html() + "<p class=\"text-muted\">could not load data!</p>");
|
$(result_table_loadicon).html($(result_table_loadicon).html() + "<p class=\"text-muted\">could not load data!</p>");
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// this event occour only once per element
|
// this event occour only once per element
|
||||||
$( this ).off( event );
|
$( this ).off( event );
|
||||||
});
|
});
|
||||||
|
|
||||||
$(".searx_init_map").on( "click", function( event ) {
|
$(".searx_init_map").on( "click", function( event ) {
|
||||||
var leaflet_target = $(this).data('leaflet-target');
|
var leaflet_target = $(this).data('leaflet-target');
|
||||||
var map_lon = $(this).data('map-lon');
|
var map_lon = $(this).data('map-lon');
|
||||||
var map_lat = $(this).data('map-lat');
|
var map_lat = $(this).data('map-lat');
|
||||||
var map_zoom = $(this).data('map-zoom');
|
var map_zoom = $(this).data('map-zoom');
|
||||||
var map_boundingbox = $(this).data('map-boundingbox');
|
var map_boundingbox = $(this).data('map-boundingbox');
|
||||||
var map_geojson = $(this).data('map-geojson');
|
var map_geojson = $(this).data('map-geojson');
|
||||||
|
|
||||||
require(['leaflet-0.7.3.min'], function(leaflet) {
|
require(['leaflet-0.7.3.min'], function(leaflet) {
|
||||||
if(map_boundingbox) {
|
if(map_boundingbox) {
|
||||||
southWest = L.latLng(map_boundingbox[0], map_boundingbox[2]);
|
southWest = L.latLng(map_boundingbox[0], map_boundingbox[2]);
|
||||||
northEast = L.latLng(map_boundingbox[1], map_boundingbox[3]);
|
northEast = L.latLng(map_boundingbox[1], map_boundingbox[3]);
|
||||||
map_bounds = L.latLngBounds(southWest, northEast);
|
map_bounds = L.latLngBounds(southWest, northEast);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO hack
|
// TODO hack
|
||||||
// change default imagePath
|
// change default imagePath
|
||||||
L.Icon.Default.imagePath = "./static/themes/oscar/img/map";
|
L.Icon.Default.imagePath = "./static/themes/oscar/img/map";
|
||||||
|
|
||||||
// init map
|
// init map
|
||||||
var map = L.map(leaflet_target);
|
var map = L.map(leaflet_target);
|
||||||
|
|
||||||
// create the tile layer with correct attribution
|
// create the tile layer with correct attribution
|
||||||
var osmMapnikUrl='https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png';
|
var osmMapnikUrl='https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png';
|
||||||
var osmMapnikAttrib='Map data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors';
|
var osmMapnikAttrib='Map data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors';
|
||||||
var osmMapnik = new L.TileLayer(osmMapnikUrl, {minZoom: 1, maxZoom: 19, attribution: osmMapnikAttrib});
|
var osmMapnik = new L.TileLayer(osmMapnikUrl, {minZoom: 1, maxZoom: 19, attribution: osmMapnikAttrib});
|
||||||
|
|
||||||
var osmWikimediaUrl='https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png';
|
var osmWikimediaUrl='https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png';
|
||||||
var osmWikimediaAttrib = 'Wikimedia maps beta | Maps data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors';
|
var osmWikimediaAttrib = 'Wikimedia maps beta | Maps data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors';
|
||||||
var osmWikimedia = new L.TileLayer(osmWikimediaUrl, {minZoom: 1, maxZoom: 19, attribution: osmWikimediaAttrib});
|
var osmWikimedia = new L.TileLayer(osmWikimediaUrl, {minZoom: 1, maxZoom: 19, attribution: osmWikimediaAttrib});
|
||||||
|
|
||||||
// init map view
|
// init map view
|
||||||
if(map_bounds) {
|
if(map_bounds) {
|
||||||
// TODO hack: https://github.com/Leaflet/Leaflet/issues/2021
|
// TODO hack: https://github.com/Leaflet/Leaflet/issues/2021
|
||||||
setTimeout(function () {
|
setTimeout(function () {
|
||||||
map.fitBounds(map_bounds, {
|
map.fitBounds(map_bounds, {
|
||||||
maxZoom:17
|
maxZoom:17
|
||||||
});
|
});
|
||||||
}, 0);
|
}, 0);
|
||||||
} else if (map_lon && map_lat) {
|
} else if (map_lon && map_lat) {
|
||||||
if(map_zoom)
|
if(map_zoom)
|
||||||
map.setView(new L.LatLng(map_lat, map_lon),map_zoom);
|
map.setView(new L.LatLng(map_lat, map_lon),map_zoom);
|
||||||
else
|
else
|
||||||
map.setView(new L.LatLng(map_lat, map_lon),8);
|
map.setView(new L.LatLng(map_lat, map_lon),8);
|
||||||
}
|
}
|
||||||
|
|
||||||
map.addLayer(osmMapnik);
|
map.addLayer(osmMapnik);
|
||||||
|
|
||||||
var baseLayers = {
|
var baseLayers = {
|
||||||
"OSM Mapnik": osmMapnik/*,
|
"OSM Mapnik": osmMapnik/*,
|
||||||
"OSM Wikimedia": osmWikimedia*/
|
"OSM Wikimedia": osmWikimedia*/
|
||||||
};
|
};
|
||||||
|
|
||||||
L.control.layers(baseLayers).addTo(map);
|
L.control.layers(baseLayers).addTo(map);
|
||||||
|
|
||||||
|
|
||||||
if(map_geojson)
|
if(map_geojson)
|
||||||
L.geoJson(map_geojson).addTo(map);
|
L.geoJson(map_geojson).addTo(map);
|
||||||
/*else if(map_bounds)
|
/*else if(map_bounds)
|
||||||
L.rectangle(map_bounds, {color: "#ff7800", weight: 3, fill:false}).addTo(map);*/
|
L.rectangle(map_bounds, {color: "#ff7800", weight: 3, fill:false}).addTo(map);*/
|
||||||
});
|
});
|
||||||
|
|
||||||
// this event occour only once per element
|
// this event occour only once per element
|
||||||
$( this ).off( event );
|
$( this ).off( event );
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -109,7 +109,7 @@ ul.nav li a {
|
||||||
|
|
||||||
.btn:hover {
|
.btn:hover {
|
||||||
color:#444 !important;
|
color:#444 !important;
|
||||||
background-color: #BBB !important;
|
background-color: #BBB !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
.btn-primary.active {
|
.btn-primary.active {
|
||||||
|
@ -221,7 +221,7 @@ p.btn.btn-default{
|
||||||
}
|
}
|
||||||
|
|
||||||
.table-hover > tbody > tr:hover > td, .table-hover > tbody > tr:hover > th {
|
.table-hover > tbody > tr:hover > td, .table-hover > tbody > tr:hover > th {
|
||||||
background: rgb(102, 105, 110) !important;
|
background: rgb(102, 105, 110) !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
.btn-success {
|
.btn-success {
|
||||||
|
|
|
@ -78,7 +78,7 @@ pre, code{
|
||||||
user-select: none;
|
user-select: none;
|
||||||
cursor: default;
|
cursor: default;
|
||||||
color: #556366;
|
color: #556366;
|
||||||
|
|
||||||
&::selection {
|
&::selection {
|
||||||
background: transparent; /* WebKit/Blink Browsers */
|
background: transparent; /* WebKit/Blink Browsers */
|
||||||
}
|
}
|
||||||
|
@ -99,5 +99,3 @@ pre, code{
|
||||||
.highlight {
|
.highlight {
|
||||||
font-weight: 700;
|
font-weight: 700;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,7 @@
|
||||||
table-layout: fixed;
|
table-layout: fixed;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.infobox_part:last-child {
|
.infobox_part:last-child {
|
||||||
margin-bottom: 0;
|
margin-bottom: 0;
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,4 +28,3 @@
|
||||||
width: 80%;
|
width: 80%;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -69,7 +69,7 @@
|
||||||
-ms-user-select: none;
|
-ms-user-select: none;
|
||||||
user-select: none;
|
user-select: none;
|
||||||
cursor: default;
|
cursor: default;
|
||||||
|
|
||||||
&::selection {
|
&::selection {
|
||||||
background: transparent; /* WebKit/Blink Browsers */
|
background: transparent; /* WebKit/Blink Browsers */
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
word-wrap: break-word;
|
word-wrap: break-word;
|
||||||
table-layout: fixed;
|
table-layout: fixed;
|
||||||
}
|
}
|
||||||
|
|
||||||
.infobox_part:last-child {
|
.infobox_part:last-child {
|
||||||
margin-bottom: 0;
|
margin-bottom: 0;
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -6,7 +6,7 @@
|
||||||
<div class="panel-body">
|
<div class="panel-body">
|
||||||
{% if infobox.img_src %}<img class="img-responsive center-block infobox_part" src="{{ image_proxify(infobox.img_src) }}" alt="{{ infobox.infobox }}" />{% endif %}
|
{% if infobox.img_src %}<img class="img-responsive center-block infobox_part" src="{{ image_proxify(infobox.img_src) }}" alt="{{ infobox.infobox }}" />{% endif %}
|
||||||
|
|
||||||
{% if infobox.content %}<bdi><p class="infobox_part">{{ infobox.content }}</p></bdi>{% endif %}
|
{% if infobox.content %}<bdi><p class="infobox_part">{{ infobox.content | safe }}</p></bdi>{% endif %}
|
||||||
|
|
||||||
{% if infobox.attributes -%}
|
{% if infobox.attributes -%}
|
||||||
<table class="table table-striped infobox_part">
|
<table class="table table-striped infobox_part">
|
||||||
|
|
|
@ -1,12 +1,8 @@
|
||||||
{% if preferences -%}
|
<select class="language custom-select form-control" id="language" name="language" accesskey="l">
|
||||||
<select class="custom-select form-control" name='language'>
|
<option value="all" {% if current_language == 'all' %}selected="selected"{% endif %}>{{ _('Default language') }}</option>
|
||||||
{%- else -%}
|
{%- for lang_id,lang_name,country_name,english_name in language_codes | sort(attribute=1) -%}
|
||||||
<select class="time_range custom-select form-control" id='language' name='language'>
|
<option value="{{ lang_id }}" {% if lang_id == current_language %}selected="selected"{% endif %}>
|
||||||
{%- endif -%}
|
{{- lang_name }} {% if country_name %}({{ country_name }}) {% endif %}- {{ lang_id -}}
|
||||||
<option value="all" {% if current_language == 'all' %}selected="selected"{% endif %}>{{ _('Default language') }}</option>
|
</option>
|
||||||
{%- for lang_id,lang_name,country_name,english_name in language_codes | sort(attribute=1) -%}
|
{%- endfor -%}
|
||||||
<option value="{{ lang_id }}" {% if lang_id == current_language %}selected="selected"{% endif %}>
|
</select>
|
||||||
{{- lang_name }} {% if country_name %}({{ country_name }}) {% endif %}- {{ lang_id -}}
|
|
||||||
</option>
|
|
||||||
{%- endfor -%}
|
|
||||||
</select>
|
|
||||||
|
|
|
@ -41,7 +41,7 @@
|
||||||
{% set language_label = _('Search language') %}
|
{% set language_label = _('Search language') %}
|
||||||
{% set language_info = _('What language do you prefer for search?') %}
|
{% set language_info = _('What language do you prefer for search?') %}
|
||||||
{{ preferences_item_header(language_info, language_label, rtl) }}
|
{{ preferences_item_header(language_info, language_label, rtl) }}
|
||||||
{% include 'oscar/languages.html' %}
|
{% include 'oscar/languages.html' %}
|
||||||
{{ preferences_item_footer(language_info, language_label, rtl) }}
|
{{ preferences_item_footer(language_info, language_label, rtl) }}
|
||||||
|
|
||||||
{% set locale_label = _('Interface language') %}
|
{% set locale_label = _('Interface language') %}
|
||||||
|
@ -156,26 +156,26 @@
|
||||||
<div class="container-fluid">
|
<div class="container-fluid">
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<div class="table-responsive">
|
<div class="table-responsive">
|
||||||
<table class="table table-hover table-condensed table-striped">
|
<table class="table table-hover table-condensed table-striped">
|
||||||
<tr>
|
<tr>
|
||||||
{% if not rtl %}
|
{% if not rtl %}
|
||||||
<th>{{ _("Allow") }}</th>
|
<th>{{ _("Allow") }}</th>
|
||||||
<th>{{ _("Engine name") }}</th>
|
<th>{{ _("Engine name") }}</th>
|
||||||
<th>{{ _("Shortcut") }}</th>
|
<th>{{ _("Shortcut") }}</th>
|
||||||
<th>{{ _("Selected language") }}</th>
|
<th>{{ _("Selected language") }}</th>
|
||||||
<th>{{ _("SafeSearch") }}</th>
|
<th>{{ _("SafeSearch") }}</th>
|
||||||
<th>{{ _("Time range") }}</th>
|
<th>{{ _("Time range") }}</th>
|
||||||
<th>{{ _("Avg. time") }}</th>
|
<th>{{ _("Avg. time") }}</th>
|
||||||
<th>{{ _("Max time") }}</th>
|
<th>{{ _("Max time") }}</th>
|
||||||
{% else %}
|
{% else %}
|
||||||
<th>{{ _("Max time") }}</th>
|
<th>{{ _("Max time") }}</th>
|
||||||
<th>{{ _("Avg. time") }}</th>
|
<th>{{ _("Avg. time") }}</th>
|
||||||
<th>{{ _("Time range") }}</th>
|
<th>{{ _("Time range") }}</th>
|
||||||
<th>{{ _("SafeSearch") }}</th>
|
<th>{{ _("SafeSearch") }}</th>
|
||||||
<th>{{ _("Selected language") }}</th>
|
<th>{{ _("Selected language") }}</th>
|
||||||
<th>{{ _("Shortcut") }}</th>
|
<th>{{ _("Shortcut") }}</th>
|
||||||
<th>{{ _("Engine name") }}</th>
|
<th>{{ _("Engine name") }}</th>
|
||||||
<th>{{ _("Allow") }}</th>
|
<th>{{ _("Allow") }}</th>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</tr>
|
</tr>
|
||||||
{% for search_engine in engines_by_category[categ] %}
|
{% for search_engine in engines_by_category[categ] %}
|
||||||
|
@ -186,19 +186,19 @@
|
||||||
{{ checkbox_toggle('engine_' + search_engine.name|replace(' ', '_') + '__' + categ|replace(' ', '_'), (search_engine.name, categ) in disabled_engines) }}
|
{{ checkbox_toggle('engine_' + search_engine.name|replace(' ', '_') + '__' + categ|replace(' ', '_'), (search_engine.name, categ) in disabled_engines) }}
|
||||||
</td>
|
</td>
|
||||||
<th>{{ search_engine.name }}</th>
|
<th>{{ search_engine.name }}</th>
|
||||||
<td class="name">{{ shortcuts[search_engine.name] }}</td>
|
<td class="name">{{ shortcuts[search_engine.name] }}</td>
|
||||||
<td>{{ support_toggle(stats[search_engine.name].supports_selected_language) }}</td>
|
<td>{{ support_toggle(stats[search_engine.name].supports_selected_language) }}</td>
|
||||||
<td>{{ support_toggle(search_engine.safesearch==True) }}</td>
|
<td>{{ support_toggle(search_engine.safesearch==True) }}</td>
|
||||||
<td>{{ support_toggle(search_engine.time_range_support==True) }}</td>
|
<td>{{ support_toggle(search_engine.time_range_support==True) }}</td>
|
||||||
<td class="{{ 'danger' if stats[search_engine.name]['warn_time'] else '' }}">{{ 'N/A' if stats[search_engine.name].time==None else stats[search_engine.name].time }}</td>
|
<td class="{{ 'danger' if stats[search_engine.name]['warn_time'] else '' }}">{{ 'N/A' if stats[search_engine.name].time==None else stats[search_engine.name].time }}</td>
|
||||||
<td class="{{ 'danger' if stats[search_engine.name]['warn_timeout'] else '' }}">{{ search_engine.timeout }}</td>
|
<td class="{{ 'danger' if stats[search_engine.name]['warn_timeout'] else '' }}">{{ search_engine.timeout }}</td>
|
||||||
{% else %}
|
{% else %}
|
||||||
<td class="{{ 'danger' if stats[search_engine.name]['warn_timeout'] else '' }}">{{ search_engine.timeout }}</td>
|
<td class="{{ 'danger' if stats[search_engine.name]['warn_timeout'] else '' }}">{{ search_engine.timeout }}</td>
|
||||||
<td class="{{ 'danger' if stats[search_engine.name]['warn_time'] else '' }}">{{ 'N/A' if stats[search_engine.name].time==None else stats[search_engine.name].time }}</td>
|
<td class="{{ 'danger' if stats[search_engine.name]['warn_time'] else '' }}">{{ 'N/A' if stats[search_engine.name].time==None else stats[search_engine.name].time }}</td>
|
||||||
<td>{{ support_toggle(search_engine.time_range_support==True) }}</td>
|
<td>{{ support_toggle(search_engine.time_range_support==True) }}</td>
|
||||||
<td>{{ support_toggle(search_engine.safesearch==True) }}</td>
|
<td>{{ support_toggle(search_engine.safesearch==True) }}</td>
|
||||||
<td>{{ support_toggle(stats[search_engine.name].supports_selected_language) }}</td>
|
<td>{{ support_toggle(stats[search_engine.name].supports_selected_language) }}</td>
|
||||||
<td>{{ shortcuts[search_engine.name] }}</td>
|
<td>{{ shortcuts[search_engine.name] }}</td>
|
||||||
<th>{{ search_engine.name }}</th>
|
<th>{{ search_engine.name }}</th>
|
||||||
<td class="onoff-checkbox">
|
<td class="onoff-checkbox">
|
||||||
{{ checkbox_toggle('engine_' + search_engine.name|replace(' ', '_') + '__' + categ|replace(' ', '_'), (search_engine.name, categ) in disabled_engines) }}
|
{{ checkbox_toggle('engine_' + search_engine.name|replace(' ', '_') + '__' + categ|replace(' ', '_'), (search_engine.name, categ) in disabled_engines) }}
|
||||||
|
@ -207,7 +207,7 @@
|
||||||
</tr>
|
</tr>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</table>
|
</table>
|
||||||
</div>
|
</div>
|
||||||
</fieldset>
|
</fieldset>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -1,18 +1,18 @@
|
||||||
{% from 'oscar/macros.html' import result_header, result_sub_header, result_footer, result_footer_rtl, icon %}
|
{% from 'oscar/macros.html' import result_header, result_sub_header, result_footer, result_footer_rtl, icon %}
|
||||||
|
|
||||||
{{ result_header(result, favicons) }}
|
{{ result_header(result, favicons) }}
|
||||||
{{ result_sub_header(result) }}
|
{{ result_sub_header(result) }}
|
||||||
|
|
||||||
{% if result.content %}<p class="result-content">{{ result.content|safe }}</p>{% endif %}
|
{% if result.content %}<p class="result-content">{{ result.content|safe }}</p>{% endif %}
|
||||||
|
|
||||||
{% if result.repository %}<p class="result-content">{{ icon('file') }} <a href="{{ result.repository }}" {% if results_on_new_tab %}target="_blank" rel="noopener noreferrer"{% else %}rel="noreferrer"{% endif %}>{{ result.repository }}</a></p>{% endif %}
|
{% if result.repository %}<p class="result-content">{{ icon('file') }} <a href="{{ result.repository }}" {% if results_on_new_tab %}target="_blank" rel="noopener noreferrer"{% else %}rel="noreferrer"{% endif %}>{{ result.repository }}</a></p>{% endif %}
|
||||||
|
|
||||||
<div dir="ltr">
|
<div dir="ltr">
|
||||||
{{ result.codelines|code_highlighter(result.code_language)|safe }}
|
{{ result.codelines|code_highlighter(result.code_language)|safe }}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{% if rtl %}
|
{% if rtl %}
|
||||||
{{ result_footer_rtl(result) }}
|
{{ result_footer_rtl(result) }}
|
||||||
{% else %}
|
{% else %}
|
||||||
{{ result_footer(result) }}
|
{{ result_footer(result) }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
|
@ -22,7 +22,7 @@
|
||||||
<span class="label label-default pull-right">{{ result.engine }}</span>{{- "" -}}
|
<span class="label label-default pull-right">{{ result.engine }}</span>{{- "" -}}
|
||||||
<p class="text-muted pull-left">{{ result.pretty_url }}</p>{{- "" -}}
|
<p class="text-muted pull-left">{{ result.pretty_url }}</p>{{- "" -}}
|
||||||
<div class="clearfix"></div>{{- "" -}}
|
<div class="clearfix"></div>{{- "" -}}
|
||||||
<div class="row">{{- "" -}}
|
<div class="row">{{- "" -}}
|
||||||
<div class="col-md-6">{{- "" -}}
|
<div class="col-md-6">{{- "" -}}
|
||||||
<a href="{{ result.img_src }}" {% if results_on_new_tab %}target="_blank" rel="noopener noreferrer"{% else %}rel="noreferrer"{% endif %} class="btn btn-default">{{ _('Get image') }}</a>{{- "" -}}
|
<a href="{{ result.img_src }}" {% if results_on_new_tab %}target="_blank" rel="noopener noreferrer"{% else %}rel="noreferrer"{% endif %} class="btn btn-default">{{ _('Get image') }}</a>{{- "" -}}
|
||||||
</div>{{- "" -}}
|
</div>{{- "" -}}
|
||||||
|
|
|
@ -1,72 +1,72 @@
|
||||||
{% from 'oscar/macros.html' import result_header, result_sub_header, result_footer, result_footer_rtl, icon %}
|
{% from 'oscar/macros.html' import result_header, result_sub_header, result_footer, result_footer_rtl, icon %}
|
||||||
|
|
||||||
{{ result_header(result, favicons) }}
|
{{ result_header(result, favicons) }}
|
||||||
{{ result_sub_header(result) }}
|
{{ result_sub_header(result) }}
|
||||||
|
|
||||||
{% if (result.latitude and result.longitude) or result.boundingbox %}
|
{% if (result.latitude and result.longitude) or result.boundingbox %}
|
||||||
<small> • <a class="text-info btn-collapse collapsed searx_init_map cursor-pointer disabled_if_nojs" data-toggle="collapse" data-target="#result-map-{{ index }}" data-leaflet-target="osm-map-{{ index }}" data-map-lon="{{ result.longitude }}" data-map-lat="{{ result.latitude }}" {% if result.boundingbox %}data-map-boundingbox='{{ result.boundingbox|tojson|safe }}'{% endif %} {% if result.geojson %}data-map-geojson='{{ result.geojson|tojson|safe }}'{% endif %} data-btn-text-collapsed="{{ _('show map') }}" data-btn-text-not-collapsed="{{ _('hide map') }}">{{ icon('globe') }} {{ _('show map') }}</a></small>
|
<small> • <a class="text-info btn-collapse collapsed searx_init_map cursor-pointer disabled_if_nojs" data-toggle="collapse" data-target="#result-map-{{ index }}" data-leaflet-target="osm-map-{{ index }}" data-map-lon="{{ result.longitude }}" data-map-lat="{{ result.latitude }}" {% if result.boundingbox %}data-map-boundingbox='{{ result.boundingbox|tojson|safe }}'{% endif %} {% if result.geojson %}data-map-geojson='{{ result.geojson|tojson|safe }}'{% endif %} data-btn-text-collapsed="{{ _('show map') }}" data-btn-text-not-collapsed="{{ _('hide map') }}">{{ icon('globe') }} {{ _('show map') }}</a></small>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if result.osm and (result.osm.type and result.osm.id) %}
|
{% if result.osm and (result.osm.type and result.osm.id) %}
|
||||||
<small> • <a class="text-info btn-collapse collapsed cursor-pointer searx_overpass_request disabled_if_nojs" data-toggle="collapse" data-target="#result-overpass-{{ index }}" data-osm-type="{{ result.osm.type }}" data-osm-id="{{ result.osm.id }}" data-result-table="result-overpass-table-{{ index }}" data-result-table-loadicon="result-overpass-table-loading-{{ index }}" data-btn-text-collapsed="{{ _('show details') }}" data-btn-text-not-collapsed="{{ _('hide details') }}">{{ icon('map-marker') }} {{ _('show details') }}</a></small>
|
<small> • <a class="text-info btn-collapse collapsed cursor-pointer searx_overpass_request disabled_if_nojs" data-toggle="collapse" data-target="#result-overpass-{{ index }}" data-osm-type="{{ result.osm.type }}" data-osm-id="{{ result.osm.id }}" data-result-table="result-overpass-table-{{ index }}" data-result-table-loadicon="result-overpass-table-loading-{{ index }}" data-btn-text-collapsed="{{ _('show details') }}" data-btn-text-not-collapsed="{{ _('hide details') }}">{{ icon('map-marker') }} {{ _('show details') }}</a></small>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{# {% if (result.latitude and result.longitude) %}
|
{# {% if (result.latitude and result.longitude) %}
|
||||||
<small> • <a class="text-info btn-collapse collapsed cursor-pointer disabled_if_nojs" data-toggle="collapse" data-target="#result-geodata-{{ index }}" data-btn-text-collapsed="{{ _('show geodata') }}" data-btn-text-not-collapsed="{{ _('hide geodata') }}">{{ icon('map-marker') }} {{ _('show geodata') }}</a></small>
|
<small> • <a class="text-info btn-collapse collapsed cursor-pointer disabled_if_nojs" data-toggle="collapse" data-target="#result-geodata-{{ index }}" data-btn-text-collapsed="{{ _('show geodata') }}" data-btn-text-not-collapsed="{{ _('hide geodata') }}">{{ icon('map-marker') }} {{ _('show geodata') }}</a></small>
|
||||||
{% endif %} #}
|
{% endif %} #}
|
||||||
|
|
||||||
<div class="container-fluid">
|
<div class="container-fluid">
|
||||||
|
|
||||||
{% if result.address %}
|
{% if result.address %}
|
||||||
<p class="row result-content result-adress col-xs-12 col-sm-5 col-md-4" itemscope itemtype="http://schema.org/PostalAddress">
|
<p class="row result-content result-adress col-xs-12 col-sm-5 col-md-4" itemscope itemtype="http://schema.org/PostalAddress">
|
||||||
{% if result.address.name %}
|
{% if result.address.name %}
|
||||||
<strong itemprop="name">{{ result.address.name }}</strong><br/>
|
<strong itemprop="name">{{ result.address.name }}</strong><br/>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if result.address.road %}
|
{% if result.address.road %}
|
||||||
<span itemprop="streetAddress">
|
<span itemprop="streetAddress">
|
||||||
{% if result.address.house_number %}{{ result.address.house_number }}, {% endif %}
|
{% if result.address.house_number %}{{ result.address.house_number }}, {% endif %}
|
||||||
{{ result.address.road }}
|
{{ result.address.road }}
|
||||||
</span><br/>
|
</span><br/>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if result.address.locality %}
|
{% if result.address.locality %}
|
||||||
<span itemprop="addressLocality">{{ result.address.locality }}</span>
|
<span itemprop="addressLocality">{{ result.address.locality }}</span>
|
||||||
{% if result.address.postcode %}, <span itemprop="postalCode">{{ result.address.postcode }}</span>{% endif %}
|
{% if result.address.postcode %}, <span itemprop="postalCode">{{ result.address.postcode }}</span>{% endif %}
|
||||||
<br/>
|
<br/>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if result.address.country %}
|
{% if result.address.country %}
|
||||||
<span itemprop="addressCountry">{{ result.address.country }}</span>
|
<span itemprop="addressCountry">{{ result.address.country }}</span>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</p>
|
</p>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if result.osm and (result.osm.type and result.osm.id) %}
|
{% if result.osm and (result.osm.type and result.osm.id) %}
|
||||||
<div class="row result-content collapse col-xs-12 col-sm-7 col-md-8" id="result-overpass-{{ index }}"{% if rtl %} dir="ltr"{% endif %}>
|
<div class="row result-content collapse col-xs-12 col-sm-7 col-md-8" id="result-overpass-{{ index }}"{% if rtl %} dir="ltr"{% endif %}>
|
||||||
<div class="text-center" id="result-overpass-table-loading-{{ index }}"><img src="{{ url_for('static', filename='img/loader.gif') }}" alt="Loading ..."/></div>
|
<div class="text-center" id="result-overpass-table-loading-{{ index }}"><img src="{{ url_for('static', filename='img/loader.gif') }}" alt="Loading ..."/></div>
|
||||||
<table class="table table-striped table-condensed hidden" id="result-overpass-table-{{ index }}">
|
<table class="table table-striped table-condensed hidden" id="result-overpass-table-{{ index }}">
|
||||||
<tr><th>key</th><th>value</th></tr>
|
<tr><th>key</th><th>value</th></tr>
|
||||||
</table>
|
</table>
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{# {% if (result.latitude and result.longitude) %}
|
{# {% if (result.latitude and result.longitude) %}
|
||||||
<div class="row collapse col-xs-12 col-sm-5 col-md-4" id="result-geodata-{{ index }}">
|
<div class="row collapse col-xs-12 col-sm-5 col-md-4" id="result-geodata-{{ index }}">
|
||||||
<strong>Longitude:</strong> {{ result.longitude }} <br/>
|
<strong>Longitude:</strong> {{ result.longitude }} <br/>
|
||||||
<strong>Latitude:</strong> {{ result.latitude }}
|
<strong>Latitude:</strong> {{ result.latitude }}
|
||||||
</div>
|
</div>
|
||||||
{% endif %} #}
|
{% endif %} #}
|
||||||
|
|
||||||
{% if result.content %}<p class="row result-content col-xs-12 col-sm-12 col-md-12">{{ result.content|safe }}</p>{% endif %}
|
{% if result.content %}<p class="row result-content col-xs-12 col-sm-12 col-md-12">{{ result.content|safe }}</p>{% endif %}
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{% if (result.latitude and result.longitude) or result.boundingbox %}
|
{% if (result.latitude and result.longitude) or result.boundingbox %}
|
||||||
<div class="collapse" id="result-map-{{ index }}">
|
<div class="collapse" id="result-map-{{ index }}">
|
||||||
<div style="height:300px; width:100%; margin: 10px 0;" id="osm-map-{{ index }}"></div>
|
<div style="height:300px; width:100%; margin: 10px 0;" id="osm-map-{{ index }}"></div>
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if rtl %}
|
{% if rtl %}
|
||||||
{{ result_footer_rtl(result) }}
|
{{ result_footer_rtl(result) }}
|
||||||
{% else %}
|
{% else %}
|
||||||
{{ result_footer(result) }}
|
{{ result_footer(result) }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
|
@ -1,27 +1,27 @@
|
||||||
{% from 'oscar/macros.html' import result_header, result_sub_header, result_footer, result_footer_rtl, icon %}
|
{% from 'oscar/macros.html' import result_header, result_sub_header, result_footer, result_footer_rtl, icon %}
|
||||||
|
|
||||||
{{ result_header(result, favicons) }}
|
{{ result_header(result, favicons) }}
|
||||||
{{ result_sub_header(result) }}
|
{{ result_sub_header(result) }}
|
||||||
|
|
||||||
{% if result.embedded %}
|
{% if result.embedded %}
|
||||||
<small> • <a class="text-info btn-collapse collapsed cursor-pointer media-loader disabled_if_nojs" data-toggle="collapse" data-target="#result-video-{{ index }}" data-btn-text-collapsed="{{ _('show video') }}" data-btn-text-not-collapsed="{{ _('hide video') }}">{{ icon('film') }} {{ _('show video') }}</a></small>
|
<small> • <a class="text-info btn-collapse collapsed cursor-pointer media-loader disabled_if_nojs" data-toggle="collapse" data-target="#result-video-{{ index }}" data-btn-text-collapsed="{{ _('show video') }}" data-btn-text-not-collapsed="{{ _('hide video') }}">{{ icon('film') }} {{ _('show video') }}</a></small>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if result.embedded %}
|
{% if result.embedded %}
|
||||||
<div id="result-video-{{ index }}" class="collapse">
|
<div id="result-video-{{ index }}" class="collapse">
|
||||||
{{ result.embedded|safe }}
|
{{ result.embedded|safe }}
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
<div class="container-fluid">
|
<div class="container-fluid">
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<a href="{{ result.url }}" {% if results_on_new_tab %}target="_blank" rel="noopener noreferrer"{% else %}rel="noreferrer"{% endif %}><img class="thumbnail col-xs-6 col-sm-4 col-md-4 result-content" src="{{ image_proxify(result.thumbnail) }}" alt="{{ result.title|striptags }} {{ result.engine }}" /></a>
|
<a href="{{ result.url }}" {% if results_on_new_tab %}target="_blank" rel="noopener noreferrer"{% else %}rel="noreferrer"{% endif %}><img class="thumbnail col-xs-6 col-sm-4 col-md-4 result-content" src="{{ image_proxify(result.thumbnail) }}" alt="{{ result.title|striptags }} {{ result.engine }}" /></a>
|
||||||
{% if result.content %}<p class="col-xs-12 col-sm-8 col-md-8 result-content">{{ result.content|safe }}</p>{% endif %}
|
{% if result.content %}<p class="col-xs-12 col-sm-8 col-md-8 result-content">{{ result.content|safe }}</p>{% endif %}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{% if rtl %}
|
{% if rtl %}
|
||||||
{{ result_footer_rtl(result) }}
|
{{ result_footer_rtl(result) }}
|
||||||
{% else %}
|
{% else %}
|
||||||
{{ result_footer(result) }}
|
{{ result_footer(result) }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
|
@ -15,7 +15,68 @@
|
||||||
{% include 'oscar/search.html' %}
|
{% include 'oscar/search.html' %}
|
||||||
|
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col-sm-8" id="main_results">
|
<div class="col-sm-4 col-sm-push-8" id="sidebar_results">
|
||||||
|
{% if number_of_results != '0' -%}
|
||||||
|
<p><small>{{ _('Number of results') }}: {{ number_of_results }}</small></p>
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
{% if unresponsive_engines and results|length >= 1 -%}
|
||||||
|
<div class="alert alert-danger fade in" role="alert">
|
||||||
|
<p>{{ _('Engines cannot retrieve results') }}:</p>
|
||||||
|
{%- for engine_name, error_type in unresponsive_engines -%}
|
||||||
|
{{- engine_name }} ({{ error_type }}){% if not loop.last %}, {% endif %}{{- "" -}}
|
||||||
|
{%- endfor -%}
|
||||||
|
</div>
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
{% if infoboxes -%}
|
||||||
|
{% for infobox in infoboxes %}
|
||||||
|
{% include 'oscar/infobox.html' %}{{- "\n\n" -}}
|
||||||
|
{% endfor %}
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
{% if suggestions %}
|
||||||
|
<div class="panel panel-default">
|
||||||
|
<div class="panel-heading">
|
||||||
|
<h4 class="panel-title">{{ _('Suggestions') }}</h4>
|
||||||
|
</div>
|
||||||
|
<div class="panel-body">
|
||||||
|
{% for suggestion in suggestions %}
|
||||||
|
<form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" role="navigation" class="form-inline pull-{% if rtl %}right{% else %}left{% endif %} suggestion_item">
|
||||||
|
<input type="hidden" name="q" value="{{ suggestion.url }}">
|
||||||
|
<button type="submit" class="btn btn-default btn-xs">{{ suggestion.title }}</button>
|
||||||
|
</form>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
<div class="panel panel-default">
|
||||||
|
<div class="panel-heading">{{- "" -}}
|
||||||
|
<h4 class="panel-title">{{ _('Links') }}</h4>{{- "" -}}
|
||||||
|
</div>
|
||||||
|
<div class="panel-body">
|
||||||
|
<form role="form">{{- "" -}}
|
||||||
|
<div class="form-group">{{- "" -}}
|
||||||
|
<label for="search_url">{{ _('Search URL') }}</label>{{- "" -}}
|
||||||
|
<input id="search_url" type="url" class="form-control select-all-on-click cursor-text" name="search_url" value="{{ search_url() }}" readonly>{{- "" -}}
|
||||||
|
</div>{{- "" -}}
|
||||||
|
</form>
|
||||||
|
<label>{{ _('Download results') }}</label>
|
||||||
|
<div class="clearfix"></div>
|
||||||
|
{% for output_type in ('csv', 'json', 'rss') %}
|
||||||
|
<form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" class="form-inline pull-{% if rtl %}right{% else %}left{% endif %} result_download">
|
||||||
|
{{- search_form_attrs(pageno) -}}
|
||||||
|
<input type="hidden" name="format" value="{{ output_type }}">{{- "" -}}
|
||||||
|
<button type="submit" class="btn btn-default">{{ output_type }}</button>{{- "" -}}
|
||||||
|
</form>
|
||||||
|
{% endfor %}
|
||||||
|
<div class="clearfix"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div><!-- /#sidebar_results -->
|
||||||
|
|
||||||
|
<div class="col-sm-8 col-sm-pull-4" id="main_results">
|
||||||
<h1 class="sr-only">{{ _('Search results') }}</h1>
|
<h1 class="sr-only">{{ _('Search results') }}</h1>
|
||||||
|
|
||||||
{% if corrections -%}
|
{% if corrections -%}
|
||||||
|
@ -91,66 +152,5 @@
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</div><!-- /#main_results -->
|
</div><!-- /#main_results -->
|
||||||
|
|
||||||
<div class="col-sm-4" id="sidebar_results">
|
|
||||||
{% if number_of_results != '0' -%}
|
|
||||||
<p><small>{{ _('Number of results') }}: {{ number_of_results }}</small></p>
|
|
||||||
{%- endif %}
|
|
||||||
|
|
||||||
{% if unresponsive_engines and results|length >= 1 -%}
|
|
||||||
<div class="alert alert-danger fade in" role="alert">
|
|
||||||
<p>{{ _('Engines cannot retrieve results') }}:</p>
|
|
||||||
{%- for engine_name, error_type in unresponsive_engines -%}AAAA
|
|
||||||
{{- engine_name }} ({{ error_type }}){% if not loop.last %}, {% endif %}{{- "" -}}
|
|
||||||
{%- endfor -%}
|
|
||||||
</div>
|
|
||||||
{%- endif %}
|
|
||||||
|
|
||||||
{% if infoboxes -%}
|
|
||||||
{% for infobox in infoboxes %}
|
|
||||||
{% include 'oscar/infobox.html' %}{{- "\n\n" -}}
|
|
||||||
{% endfor %}
|
|
||||||
{%- endif %}
|
|
||||||
|
|
||||||
{% if suggestions %}
|
|
||||||
<div class="panel panel-default">
|
|
||||||
<div class="panel-heading">
|
|
||||||
<h4 class="panel-title">{{ _('Suggestions') }}</h4>
|
|
||||||
</div>
|
|
||||||
<div class="panel-body">
|
|
||||||
{% for suggestion in suggestions %}
|
|
||||||
<form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" role="navigation" class="form-inline pull-{% if rtl %}right{% else %}left{% endif %} suggestion_item">
|
|
||||||
<input type="hidden" name="q" value="{{ suggestion.url }}">
|
|
||||||
<button type="submit" class="btn btn-default btn-xs">{{ suggestion.title }}</button>
|
|
||||||
</form>
|
|
||||||
{% endfor %}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
{%- endif %}
|
|
||||||
|
|
||||||
<div class="panel panel-default">
|
|
||||||
<div class="panel-heading">{{- "" -}}
|
|
||||||
<h4 class="panel-title">{{ _('Links') }}</h4>{{- "" -}}
|
|
||||||
</div>
|
|
||||||
<div class="panel-body">
|
|
||||||
<form role="form">{{- "" -}}
|
|
||||||
<div class="form-group">{{- "" -}}
|
|
||||||
<label for="search_url">{{ _('Search URL') }}</label>{{- "" -}}
|
|
||||||
<input id="search_url" type="url" class="form-control select-all-on-click cursor-text" name="search_url" value="{{ search_url() }}" readonly>{{- "" -}}
|
|
||||||
</div>{{- "" -}}
|
|
||||||
</form>
|
|
||||||
<label>{{ _('Download results') }}</label>
|
|
||||||
<div class="clearfix"></div>
|
|
||||||
{% for output_type in ('csv', 'json', 'rss') %}
|
|
||||||
<form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" class="form-inline pull-{% if rtl %}right{% else %}left{% endif %} result_download">
|
|
||||||
{{- search_form_attrs(pageno) -}}
|
|
||||||
<input type="hidden" name="format" value="{{ output_type }}">{{- "" -}}
|
|
||||||
<button type="submit" class="btn btn-default">{{ output_type }}</button>{{- "" -}}
|
|
||||||
</form>
|
|
||||||
{% endfor %}
|
|
||||||
<div class="clearfix"></div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div><!-- /#sidebar_results -->
|
|
||||||
</div>
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col-xs-12 col-md-8">
|
<div class="col-xs-12 col-md-8">
|
||||||
<div class="input-group search-margin">
|
<div class="input-group search-margin">
|
||||||
<input type="search" name="q" class="form-control" id="q" placeholder="{{ _('Search for...') }}" aria-label="{{ _('Search for...') }}" autocomplete="off" value="{{ q }}">
|
<input type="search" name="q" class="form-control" id="q" placeholder="{{ _('Search for...') }}" aria-label="{{ _('Search for...') }}" autocomplete="off" value="{{ q }}" accesskey="s">
|
||||||
<span class="input-group-btn">
|
<span class="input-group-btn">
|
||||||
<button type="submit" class="btn btn-default" aria-label="{{ _('Start search') }}"><span class="hide_if_nojs">{{ icon('search') }}</span><span class="hidden active_if_nojs">{{ _('Start search') }}</span></button>
|
<button type="submit" class="btn btn-default" aria-label="{{ _('Start search') }}"><span class="hide_if_nojs">{{ icon('search') }}</span><span class="hidden active_if_nojs">{{ _('Start search') }}</span></button>
|
||||||
</span>
|
</span>
|
||||||
|
|
|
@ -1,18 +1,18 @@
|
||||||
{% from 'oscar/macros.html' import icon %}
|
{% from 'oscar/macros.html' import icon %}
|
||||||
|
|
||||||
<form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" id="search_form" role="search">
|
<form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" id="search_form" role="search">
|
||||||
{% if rtl %}
|
{% if rtl %}
|
||||||
<div class="input-group">
|
<div class="input-group">
|
||||||
{% else %}
|
{% else %}
|
||||||
<div class="input-group col-md-8 col-md-offset-2">
|
<div class="input-group col-md-8 col-md-offset-2">
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<input type="search" name="q" class="form-control input-lg autofocus" id="q" placeholder="{{ _('Search for...') }}" aria-label="{{ _('Search for...') }}" autocomplete="off" value="{{ q }}">
|
<input type="search" name="q" class="form-control input-lg autofocus" id="q" placeholder="{{ _('Search for...') }}" aria-label="{{ _('Search for...') }}" autocomplete="off" value="{{ q }}" accesskey="s">
|
||||||
<span class="input-group-btn">
|
<span class="input-group-btn">
|
||||||
<button type="submit" class="btn btn-default input-lg" aria-label="{{ _('Start search') }}"><span class="hide_if_nojs">{{ icon('search') }}</span><span class="hidden active_if_nojs">{{ _('Start search') }}</span></button>
|
<button type="submit" class="btn btn-default input-lg" aria-label="{{ _('Start search') }}"><span class="hide_if_nojs">{{ icon('search') }}</span><span class="hidden active_if_nojs">{{ _('Start search') }}</span></button>
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
<div class="col-md-8 col-md-offset-2 advanced">
|
<div class="col-md-8 col-md-offset-2 advanced">
|
||||||
{% include 'oscar/advanced.html' %}
|
{% include 'oscar/advanced.html' %}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
</form><!-- / #search_form_full -->
|
</form><!-- / #search_form_full -->
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
<select name="time_range" id="time-range" class="custom-select form-control">{{- "" -}}
|
<select name="time_range" id="time-range" class="custom-select form-control" accesskey="t">{{- "" -}}
|
||||||
<option id="time-range-anytime" value="" {{ "selected" if time_range=="" or not time_range else ""}}>
|
<option id="time-range-anytime" value="" {{ "selected" if time_range=="" or not time_range else ""}}>
|
||||||
{{- _('Anytime') -}}
|
{{- _('Anytime') -}}
|
||||||
</option>{{- "" -}}
|
</option>{{- "" -}}
|
||||||
|
|
|
@ -606,11 +606,11 @@ def index():
|
||||||
# HTML output format
|
# HTML output format
|
||||||
|
|
||||||
# suggestions: use RawTextQuery to get the suggestion URLs with the same bang
|
# suggestions: use RawTextQuery to get the suggestion URLs with the same bang
|
||||||
suggestion_urls = map(lambda suggestion: {
|
suggestion_urls = list(map(lambda suggestion: {
|
||||||
'url': raw_text_query.changeSearchQuery(suggestion).getFullQuery(),
|
'url': raw_text_query.changeSearchQuery(suggestion).getFullQuery(),
|
||||||
'title': suggestion
|
'title': suggestion
|
||||||
},
|
},
|
||||||
result_container.suggestions)
|
result_container.suggestions))
|
||||||
|
|
||||||
correction_urls = list(map(lambda correction: {
|
correction_urls = list(map(lambda correction: {
|
||||||
'url': raw_text_query.changeSearchQuery(correction).getFullQuery(),
|
'url': raw_text_query.changeSearchQuery(correction).getFullQuery(),
|
||||||
|
|
|
@ -1,37 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
from collections import defaultdict
|
|
||||||
import mock
|
|
||||||
from searx.engines import pubmed
|
|
||||||
from searx.testing import SearxTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestPubmedEngine(SearxTestCase):
|
|
||||||
|
|
||||||
def test_request(self):
|
|
||||||
query = 'test_query'
|
|
||||||
dicto = defaultdict(dict)
|
|
||||||
dicto['pageno'] = 1
|
|
||||||
params = pubmed.request(query, dicto)
|
|
||||||
self.assertIn('url', params)
|
|
||||||
self.assertIn('eutils.ncbi.nlm.nih.gov/', params['url'])
|
|
||||||
self.assertIn('term', params['url'])
|
|
||||||
|
|
||||||
def test_response(self):
|
|
||||||
self.assertRaises(AttributeError, pubmed.response, None)
|
|
||||||
self.assertRaises(AttributeError, pubmed.response, [])
|
|
||||||
self.assertRaises(AttributeError, pubmed.response, '')
|
|
||||||
self.assertRaises(AttributeError, pubmed.response, '[]')
|
|
||||||
|
|
||||||
response = mock.Mock(text='<PubmedArticleSet></PubmedArticleSet>')
|
|
||||||
self.assertEqual(pubmed.response(response), [])
|
|
||||||
|
|
||||||
xml_mock = """<eSearchResult><Count>1</Count><RetMax>1</RetMax><RetStart>0</RetStart><IdList>
|
|
||||||
<Id>1</Id>
|
|
||||||
</IdList></eSearchResult>
|
|
||||||
"""
|
|
||||||
|
|
||||||
response = mock.Mock(text=xml_mock.encode('utf-8'))
|
|
||||||
results = pubmed.response(response)
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), 1)
|
|
||||||
self.assertEqual(results[0]['content'], 'No abstract is available for this publication.')
|
|
|
@ -1,110 +0,0 @@
|
||||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
|
||||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
|
||||||
<head>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<div id="header">
|
|
||||||
<div id="whoIsYou">
|
|
||||||
<a href="/lang.php"><small>SeedPeer in your own language?</small></a> <a href="http://www.seedpeer.eu"><img src="/images/flags/uk.gif" width="16px" alt="Torrents EN" /></a> <a href="http://spanish.seedpeer.eu"><img src="/images/flags/es.gif" width="16px" alt="Torrents ES" /></a> <a href="http://german.seedpeer.eu"><img src="/images/flags/de.gif" width="16px" alt="Torrents DE" /></a> <a href="http://french.seedpeer.eu"><img src="/images/flags/fr.gif" width="16px" alt="Torrents FR" /></a> <a href="http://portuguese.seedpeer.eu"><img src="/images/flags/pt.gif" width="16px" alt="Torrents Portuguese" /></a> <a href="http://swedish.seedpeer.eu"><img src="/images/flags/se.gif" width="16px" alt="Torrents Sweden" /></a>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<script type="text/javascript">
|
|
||||||
whoIsYou();
|
|
||||||
</script>
|
|
||||||
<div id="search">
|
|
||||||
<form action="/search.php" method="get">
|
|
||||||
<input id="topsearchbar" name="search" value="narcos season 2" />
|
|
||||||
<input type="submit" class="searchbutton" value="Torrents" />
|
|
||||||
<input style="color:#000" type="submit" class="searchbutton" name="usenet" value="Usenet Binaries" />
|
|
||||||
</form>
|
|
||||||
<div id="suggestion"></div>
|
|
||||||
</div>
|
|
||||||
<div id="logo"><a href="/"><img src="/images/logo2.gif" alt="Seedpeer homepage" width="415" height="143" /></a></div>
|
|
||||||
<div id="subtext"><a href="/">Home</a> > <a href="/search.html">Torrent search</a> > Narcos season 2 | page 1</div>
|
|
||||||
</div>
|
|
||||||
<div id="nav">
|
|
||||||
<ul>
|
|
||||||
<!--
|
|
||||||
<li><font style="color:red;font-size:9px;font-weight:bold;">NEW</font><a title="Download TOP Games for FREE" rel="nofollow" href="http://www.bigrebelads.com/affiliate/index?ref=9301" target="_blank">FREE Games</a></li>
|
|
||||||
|
|
||||||
-->
|
|
||||||
<li style="border-left:none" id="categories"><a title="Browse Torrent Categories" href="/browse.html">Categories</a>
|
|
||||||
<ul>
|
|
||||||
<li><a title="Browse Anime Torrents" href="/browse.html#6">Anime</a></li>
|
|
||||||
<li><a title="Browse Game Torrents" href="/browse.html#4">Games</a></li>
|
|
||||||
<li><a title="Browse Movie Torrents" href="/browse.html#1">Movies</a></li>
|
|
||||||
<li><a title="Browse Music Torrents" href="/browse.html#3">Music</a></li>
|
|
||||||
<li><a title="Browse Software Torrents" href="/browse.html#5">Software</a></li>
|
|
||||||
<li><a title="Browse TV Torrents" href="/browse.html#2">TV Shows</a></li>
|
|
||||||
<li><a title="Browse Other Torrents" href="/browse.html#7">Others</a></li>
|
|
||||||
</ul>
|
|
||||||
</li>
|
|
||||||
<li><a title="Upload A Torrents" href="/upload.html">Upload torrent</a></li>
|
|
||||||
<li id="verified"><a title="Verified Torrents" href="/verified.html">Verified</a></li>
|
|
||||||
<li id="searchoptions"><a title="Search Torrents" href="/search.html">Torrent search</a></li>
|
|
||||||
<li id="newsgroups"><a style="color:#212b3e" title="News Groups" href="/usenet.html">Usenet Binaries</a></li>
|
|
||||||
<li id="about" style="border-right:none"><a rel="nofollow" href="/faq.html">About Us</a>
|
|
||||||
<ul>
|
|
||||||
<li><a title="SeedPeer Statistics" href="/stats.html">Statistics</a></li>
|
|
||||||
<li><a title="Contact Us" href="/contact.html">Contact</a></li>
|
|
||||||
<li><a title="Frequently Asked Questions" href="/faq.html">FAQ</a></li>
|
|
||||||
<li><a title="SeedPeer API" href="http://api.seedpeer.eu">Our API</a></li>
|
|
||||||
<li><a title="SeedPeer Blog" href="/blog">Blog</a></li>
|
|
||||||
</ul>
|
|
||||||
</li>
|
|
||||||
<!--<li><a href="/toolbar.php">Our Toolbar</a></li>-->
|
|
||||||
</ul>
|
|
||||||
<div class="clear"></div>
|
|
||||||
</div>
|
|
||||||
<div id="body"><div id="pageTop"></div>
|
|
||||||
<div id="headerbox"><h1>Verified <font class="colored">Narcos season 2</font> torrents</h1></div><table width="100%"><tr><th>
|
|
||||||
<span style="float:right">
|
|
||||||
<a href="/search/narcos-season-2/8/1.html"><img style="vertical-align:middle" src="/images/comments.gif" alt="comments" /></a> |
|
|
||||||
<a href="/search/narcos-season-2/7/1.html"><img style="vertical-align:middle" src="/images/ver.gif" alt="verified" /></a>
|
|
||||||
</span>
|
|
||||||
<a href="/search/narcos-season-2/1/1.html">Torrent name</a></th><th class="right"><a href="/search/narcos-season-2/2/1.html">Age</a></th><th class="right"><a href="/search/narcos-season-2/3/1.html">Size</a></th><th class="right"><a href="/search/narcos-season-2/4/1.html">Seeds</a></th><th class="right"><a href="/search/narcos-season-2/5/1.html">Peers</a></th><th class="center"><a href="/search/narcos-season-2/6/1.html">Health</a></th><td class="tableAd" rowspan="6"><iframe src="http://creative.wwwpromoter.com/13689?d=300x250" width="300" height="250" style="border: none;" frameborder="0" scrolling="no"></iframe></td></tr><tr class=""><td><a class="pblink" id="pblink_table_item_1" href="" data-tad="431726" data-last-search="narcos+season+2" target="_blank" rel="nofollow"><strong class='colored'>Narcos season 2</strong> Full Version</a></td><td class="right">20 hours</td><td class="right">681.3 MB</td><td class="right"><font color="green">28</font> </td><td class="right"><font color="navy">654</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" /></td></tr><tr class="tdark"><td><a class="pblink" id="pblink_table_item_2" href="" data-tad="431727" data-url="narcos+season+2" target="_blank" rel="nofollow"><strong class='colored'>Narcos season 2</strong> Trusted Source</a></td><td class="right">12 hours</td><td class="right">787.1 MB</td><td class="right"><font color="green">64</font> </td><td class="right"><font color="navy">220</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" /></td></tr><tr class=""><td><a class="pblink" id="pblink_table_item_3" href="" data-tad="431729" data-last-search="narcos+season+2" target="_blank" rel="nofollow"><strong class='colored'>Full Narcos season 2 Download</strong></a> <small><a class="pblink" id="pblink_table_item_4" href="" data-tad="431729" data-last-search="narcos+season+2" target="_blank" rel="nofollow">Usenet</a></small></td><td class="right">24 hours</td><td class="right">775.5 MB</td><td class="right"><font color="green">60</font> </td><td class="right"><font color="navy">236</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" /></td></tr><tr class="tdark"><td><a class="pblink" id="pblink_table_item_5" href="" data-tad="431730" data-last-search="narcos+season+2" target="_blank" rel="nofollow"><strong class='colored'>Narcos season 2</strong> 2014 - DIRECT STREAMING</a> <small><a class="pblink" id="pblink_table_item_6" href="" data-tad="431729" data-last-search="narcos+season+2" target="_blank" rel="nofollow">Movies</a></small></td><td class="right">17 hours</td><td class="right">654.1 MB</td><td class="right"><font color="green">2</font> </td><td class="right"><font color="navy">391</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" /></td></tr><tr class=""><td><a class="pblink" id="pblink_table_item_7" href="" data-tad="431731" data-last-search="narcos+season+2" target="_blank" rel="nofollow"><strong class='colored'>Narcos season 2</strong> 2014</a> <small><a class="pblink" id="pblink_table_item_8" href="" data-tad="431729" data-last-search="narcos+season+2" target="_blank" rel="nofollow">Movies</a></small></td><td class="right">20 hours</td><td class="right">754.5 MB</td><td class="right"><font color="green">21</font> </td><td class="right"><font color="navy">919</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" /></td></tr></table><br /><br /><center><iframe src='http://creative.wwwpromoter.com/13689?d=728x90' width='728' height='90' style='border: none;' frameborder='0' scrolling='no'></iframe><center><span style="float:right;margin:1em .2em 0 0"><a title="Download at the speed of your connection" href="/usenet.php?search=narcos+season+2"><img src="/images/dlf.gif" alt="Search Binaries" /></a></span><div style="margin-bottom:1em;margin-right:290px" id="headerbox"><h1><a href="/searchfeed/narcos+season+2.xml" target="_blank" title="SeedPeer RSS Torrent Search Feed fornarcos season 2"><img src="/images/feedIcon.png" border="0" /></a> 2 <font class="colored">Narcos season 2</font> Torrents were found</h1></div><table width="100%"><tr><th>
|
|
||||||
<span style="float:right">
|
|
||||||
<a href="/search/narcos-season-2/8/1.html"><img style="vertical-align:middle" src="/images/comments.gif" alt="comments" /></a> |
|
|
||||||
<a href="/search/narcos-season-2/7/1.html"><img style="vertical-align:middle" src="/images/ver.gif" alt="verified" /></a>
|
|
||||||
</span>
|
|
||||||
<a href="/search/narcos-season-2/1/1.html">Torrent name</a></th><th class="right"><a href="/search/narcos-season-2/2/1.html">Age</a></th><th class="right"><a href="/search/narcos-season-2/3/1.html">Size</a></th><th class="right"><a href="/search/narcos-season-2/4/1.html">Seeds</a></th><th class="right"><a href="/search/narcos-season-2/5/1.html">Peers</a></th><th class="center"><a href="/search/narcos-season-2/6/1.html">Health</a></th></tr><tr class=""><td><small class="comments"><a href="http://www.facebook.com/sharer.php?t=Download%20<strong class='colored'>Narcos</strong> <strong class='colored'>Season</strong> <strong class='colored'>2</strong> Complete 7<strong class='colored'>2</strong>0p WebRip EN-SUB x<strong class='colored'>2</strong>64-[MULVAcoded] S0<strong class='colored'>2</strong>%20 torrent&u=http://seedpeer.seedpeer.eu/details/11686840/Narcos-Season-2-Complete-720p-WebRip-EN-SUB-x264-[MULVAcoded]-S02.html"><img src="/images/facebook.png" alt="Add to Facebook" width="14" height="14" /></a></small><a href="/details/11686840/Narcos-Season-2-Complete-720p-WebRip-EN-SUB-x264-[MULVAcoded]-S02.html"><strong class='colored'>Narcos</strong> <strong class='colored'>Season</strong> <strong class='colored'>2</strong> Complete 7<strong class='colored'>2</strong>0p WebRip EN-SUB x<strong class='colored'>2</strong>64-[MULVAcoded] S0<strong class='colored'>2</strong> <small><a href="/browse.html#11686840"></a></small></a></td><td class="right">19 hours</td><td class="right">4.39 GB</td><td class="right"><font color="green">715</font> </td><td class="right"><font color="navy">183</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" width="40" height="11" /></td></tr><tr class="tdark"><td><small class="comments"><a href="http://www.facebook.com/sharer.php?t=Download%20<strong class='colored'>Narcos</strong> - <strong class='colored'>Season</strong> <strong class='colored'>2</strong> - 7<strong class='colored'>2</strong>0p WEBRiP - x<strong class='colored'>2</strong>65 HEVC - ShAaNiG%20 torrent&u=http://seedpeer.seedpeer.eu/details/11685972/Narcos---Season-2---720p-WEBRiP---x265-HEVC---ShAaNiG.html"><img src="/images/facebook.png" alt="Add to Facebook" width="14" height="14" /></a></small><a href="/details/11685972/Narcos---Season-2---720p-WEBRiP---x265-HEVC---ShAaNiG.html"><strong class='colored'>Narcos</strong> - <strong class='colored'>Season</strong> <strong class='colored'>2</strong> - 7<strong class='colored'>2</strong>0p WEBRiP - x<strong class='colored'>2</strong>65 HEVC - ShAaNiG <small><a href="/browse.html#11685972"></a></small></a></td><td class="right">1 day</td><td class="right">2.48 GB</td><td class="right"><font color="green">861</font> </td><td class="right"><font color="navy">332</font> </td><td class="center"><img src="/images/h5.gif" alt="Health" width="40" height="11" /></td></tr></table><div id="headerbox"><h1>Related searches for: <font class="colored">Narcos season 2</font></h1></div><div id="search_suggestions"><br />Other suggested searches: </div><br /><a href="http://torrentz2.eu/search?f=narcos-season-2">Search for "narcos-season-2" on Torrentz2.eu</a><br /><a href="http://torrent-finder.info/show.php?q=narcos-season-2">Search for "narcos-season-2" on Torrent-Finder</a><br /><center><iframe src='http://creative.wwwpromoter.com/13689?d=300x250' width='300' height='250' style='border: none;' frameborder='0' scrolling='no'></iframe> <iframe src='http://creative.wwwpromoter.com/13689?d=300x250' width='300' height='250' style='border: none;' frameborder='0' scrolling='no'></iframe> <iframe src='http://creative.wwwpromoter.com/13689?d=300x250' width='300' height='250' style='border: none;' frameborder='0' scrolling='no'></iframe></center><div id="footer">
|
|
||||||
<table width="100%">
|
|
||||||
<tr>
|
|
||||||
<td width="30%">
|
|
||||||
<h2>Torrents Download</h2>
|
|
||||||
<a href="/">Torrent search</a><br />
|
|
||||||
<a href="/browse.html">Browse categories</a><br />
|
|
||||||
<a href="/verified.html">Verified Torrents</a><br />
|
|
||||||
<a href="/order-date.html">Today's torrents</a><br />
|
|
||||||
<a href="/yesterday.html">Yesterday's torrents</a><br />
|
|
||||||
<a href="/stats.html">Statistics</a><br />
|
|
||||||
<br />
|
|
||||||
<a href="/faq.html#copyright"><strong>Copyright & Removal</strong></a>
|
|
||||||
</td>
|
|
||||||
<td width="30%"><h2>Cool Stuff</h2>
|
|
||||||
<a href="/promotional.php">Promotional</a><br />
|
|
||||||
<a href="/contact.html">Advertising Information</a><br />
|
|
||||||
<strong><a href="/plugins.php" title="Add a search plugin to Firefox or Internet Explorer">Search Plugin <span style="color:red">*</span></a></strong><br />
|
|
||||||
<a href="http://www.utorrent.com">µTorrent Client</a><br />
|
|
||||||
<a href="/blog">Seedpeer Blog</a><br />
|
|
||||||
</td>
|
|
||||||
<td width="30%"><h2>Links</h2>
|
|
||||||
<a href="http://www.sumotorrent.com" target="_blank"><strong>SumoTorrent</strong></a><br />
|
|
||||||
<a href="http://www.torrent-finder.info" target="_blank"><strong>Torrent Finder</strong></a><br />
|
|
||||||
<a href="http://www.torrentpond.com" target="_blank"><strong>TorrentPond</strong></a><br />
|
|
||||||
<a href="https://www.limetorrents.cc" target="_blank">LimeTorrents.cc</a><br />
|
|
||||||
<a href="http://www.torrents.to/" target="_blank">Torrents.to</a><br />
|
|
||||||
<a href="http://www.torrentfunk.com" target="_blank">TorrentFunk</a><br />
|
|
||||||
<a href="https://monova.org" target="_blank">Monova</a><br />
|
|
||||||
<a href="http://www.torrentroom.com" target="_blank">TorrentRoom</a><br />
|
|
||||||
<a href="http://www.katcr.co/" target="_blank">Kickass Torrents Community</a><br />
|
|
||||||
</td>
|
|
||||||
<td width="10%"><div id="bottomlogo"></div></td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
<br />
|
|
||||||
<br />
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,78 +0,0 @@
|
||||||
# coding=utf-8
|
|
||||||
from collections import defaultdict
|
|
||||||
import mock
|
|
||||||
from searx.engines import acgsou
|
|
||||||
from searx.testing import SearxTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestAcgsouEngine(SearxTestCase):
|
|
||||||
|
|
||||||
def test_request(self):
|
|
||||||
query = 'test_query'
|
|
||||||
dic = defaultdict(dict)
|
|
||||||
dic['pageno'] = 1
|
|
||||||
params = acgsou.request(query, dic)
|
|
||||||
self.assertTrue('url' in params)
|
|
||||||
self.assertTrue(query in params['url'])
|
|
||||||
self.assertTrue('acgsou.com' in params['url'])
|
|
||||||
|
|
||||||
def test_response(self):
|
|
||||||
resp = mock.Mock(text='<html></html>')
|
|
||||||
self.assertEqual(acgsou.response(resp), [])
|
|
||||||
|
|
||||||
html = u"""
|
|
||||||
<html>
|
|
||||||
<table id="listTable" class="list_style table_fixed">
|
|
||||||
<thead class="tcat">
|
|
||||||
<tr>
|
|
||||||
<th axis="string" class="l1 tableHeaderOver">test</th>
|
|
||||||
<th axis="string" class="l2 tableHeaderOver">test</th>
|
|
||||||
<th axis="string" class="l3 tableHeaderOver">test</th>
|
|
||||||
<th axis="size" class="l4 tableHeaderOver">test</th>
|
|
||||||
<th axis="number" class="l5 tableHeaderOver">test</th>
|
|
||||||
<th axis="number" class="l6 tableHeaderOver">test</th>
|
|
||||||
<th axis="number" class="l7 tableHeaderOver">test</th>
|
|
||||||
<th axis="string" class="l8 tableHeaderOver">test</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody class="tbody" id="data_list">
|
|
||||||
<tr class="alt1 ">
|
|
||||||
<td nowrap="nowrap">date</td>
|
|
||||||
<td><a href="category.html">testcategory テスト</a></td>
|
|
||||||
<td style="text-align:left;">
|
|
||||||
<a href="show-torrentid.html" target="_blank">torrentname テスト</a>
|
|
||||||
</td>
|
|
||||||
<td>1MB</td>
|
|
||||||
<td nowrap="nowrap">
|
|
||||||
<span class="bts_1">
|
|
||||||
29
|
|
||||||
</span>
|
|
||||||
</td>
|
|
||||||
<td nowrap="nowrap">
|
|
||||||
<span class="btl_1">
|
|
||||||
211
|
|
||||||
</span>
|
|
||||||
</td>
|
|
||||||
<td nowrap="nowrap">
|
|
||||||
<span class="btc_">
|
|
||||||
168
|
|
||||||
</span>
|
|
||||||
</td>
|
|
||||||
<td><a href="random.html">user</a></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</html>
|
|
||||||
"""
|
|
||||||
|
|
||||||
resp = mock.Mock(text=html)
|
|
||||||
results = acgsou.response(resp)
|
|
||||||
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), 1)
|
|
||||||
|
|
||||||
r = results[0]
|
|
||||||
self.assertEqual(r['url'], 'http://www.acgsou.com/show-torrentid.html')
|
|
||||||
self.assertEqual(r['content'], u'Category: "testcategory テスト".')
|
|
||||||
self.assertEqual(r['title'], u'torrentname テスト')
|
|
||||||
self.assertEqual(r['filesize'], 1048576)
|
|
|
@ -1,111 +0,0 @@
|
||||||
from collections import defaultdict
|
|
||||||
import mock
|
|
||||||
from searx.engines import archlinux
|
|
||||||
from searx.testing import SearxTestCase
|
|
||||||
|
|
||||||
domains = {
|
|
||||||
'all': 'https://wiki.archlinux.org',
|
|
||||||
'de': 'https://wiki.archlinux.de',
|
|
||||||
'fr': 'https://wiki.archlinux.fr',
|
|
||||||
'ja': 'https://wiki.archlinuxjp.org',
|
|
||||||
'ro': 'http://wiki.archlinux.ro',
|
|
||||||
'tr': 'http://archtr.org/wiki'
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class TestArchLinuxEngine(SearxTestCase):
|
|
||||||
|
|
||||||
def test_request(self):
|
|
||||||
query = 'test_query'
|
|
||||||
dic = defaultdict(dict)
|
|
||||||
dic['pageno'] = 1
|
|
||||||
dic['language'] = 'en-US'
|
|
||||||
params = archlinux.request(query, dic)
|
|
||||||
self.assertTrue('url' in params)
|
|
||||||
self.assertTrue(query in params['url'])
|
|
||||||
self.assertTrue('wiki.archlinux.org' in params['url'])
|
|
||||||
|
|
||||||
for lang, name in archlinux.main_langs:
|
|
||||||
dic['language'] = lang
|
|
||||||
params = archlinux.request(query, dic)
|
|
||||||
self.assertTrue(name in params['url'])
|
|
||||||
|
|
||||||
for lang, domain in domains.items():
|
|
||||||
dic['language'] = lang
|
|
||||||
params = archlinux.request(query, dic)
|
|
||||||
self.assertTrue(domain in params['url'])
|
|
||||||
|
|
||||||
def test_response(self):
|
|
||||||
response = mock.Mock(text='<html></html>',
|
|
||||||
search_params={'language': 'en_US'})
|
|
||||||
self.assertEqual(archlinux.response(response), [])
|
|
||||||
|
|
||||||
html = """
|
|
||||||
<ul class="mw-search-results">
|
|
||||||
<li>
|
|
||||||
<div class="mw-search-result-heading">
|
|
||||||
<a href="/index.php/ATI" title="ATI">ATI</a>
|
|
||||||
</div>
|
|
||||||
<div class="searchresult">
|
|
||||||
Lorem ipsum dolor sit amet
|
|
||||||
</div>
|
|
||||||
<div class="mw-search-result-data">
|
|
||||||
30 KB (4,630 words) - 19:04, 17 March 2016</div>
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<div class="mw-search-result-heading">
|
|
||||||
<a href="/index.php/Frequently_asked_questions" title="Frequently asked questions">
|
|
||||||
Frequently asked questions
|
|
||||||
</a>
|
|
||||||
</div>
|
|
||||||
<div class="searchresult">
|
|
||||||
CPUs with AMDs instruction set "AMD64"
|
|
||||||
</div>
|
|
||||||
<div class="mw-search-result-data">
|
|
||||||
17 KB (2,722 words) - 20:13, 21 March 2016
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<div class="mw-search-result-heading">
|
|
||||||
<a href="/index.php/CPU_frequency_scaling" title="CPU frequency scaling">CPU frequency scaling</a>
|
|
||||||
</div>
|
|
||||||
<div class="searchresult">
|
|
||||||
ondemand for AMD and older Intel CPU
|
|
||||||
</div>
|
|
||||||
<div class="mw-search-result-data">
|
|
||||||
15 KB (2,319 words) - 23:46, 16 March 2016
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
"""
|
|
||||||
|
|
||||||
expected = [
|
|
||||||
{
|
|
||||||
'title': 'ATI',
|
|
||||||
'url': 'https://wiki.archlinux.org/index.php/ATI'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'title': 'Frequently asked questions',
|
|
||||||
'url': 'https://wiki.archlinux.org/index.php/Frequently_asked_questions'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'title': 'CPU frequency scaling',
|
|
||||||
'url': 'https://wiki.archlinux.org/index.php/CPU_frequency_scaling'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
response = mock.Mock(text=html)
|
|
||||||
response.search_params = {
|
|
||||||
'language': 'en_US'
|
|
||||||
}
|
|
||||||
results = archlinux.response(response)
|
|
||||||
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), len(expected))
|
|
||||||
|
|
||||||
i = 0
|
|
||||||
for exp in expected:
|
|
||||||
res = results[i]
|
|
||||||
i += 1
|
|
||||||
for key, value in exp.items():
|
|
||||||
self.assertEqual(res[key], value)
|
|
|
@ -1,58 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
from collections import defaultdict
|
|
||||||
import mock
|
|
||||||
from searx.engines import arxiv
|
|
||||||
from searx.testing import SearxTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestBaseEngine(SearxTestCase):
|
|
||||||
|
|
||||||
def test_request(self):
|
|
||||||
query = 'test_query'.encode('utf-8')
|
|
||||||
dicto = defaultdict(dict)
|
|
||||||
dicto['pageno'] = 1
|
|
||||||
params = arxiv.request(query, dicto)
|
|
||||||
self.assertIn('url', params)
|
|
||||||
self.assertIn('export.arxiv.org/api/', params['url'])
|
|
||||||
|
|
||||||
def test_response(self):
|
|
||||||
self.assertRaises(AttributeError, arxiv.response, None)
|
|
||||||
self.assertRaises(AttributeError, arxiv.response, [])
|
|
||||||
self.assertRaises(AttributeError, arxiv.response, '')
|
|
||||||
self.assertRaises(AttributeError, arxiv.response, '[]')
|
|
||||||
|
|
||||||
response = mock.Mock(content=b'''<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<feed xmlns="http://www.w3.org/2005/Atom"></feed>''')
|
|
||||||
self.assertEqual(arxiv.response(response), [])
|
|
||||||
|
|
||||||
xml_mock = b'''<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
|
||||||
<title type="html">ArXiv Query: search_query=all:test_query&id_list=&start=0&max_results=1</title>
|
|
||||||
<id>http://arxiv.org/api/1</id>
|
|
||||||
<updated>2000-01-21T00:00:00-01:00</updated>
|
|
||||||
<opensearch:totalResults xmlns:opensearch="http://a9.com/-/spec/opensearch/1.1/">1</opensearch:totalResults>
|
|
||||||
<opensearch:startIndex xmlns:opensearch="http://a9.com/-/spec/opensearch/1.1/">0</opensearch:startIndex>
|
|
||||||
<opensearch:itemsPerPage xmlns:opensearch="http://a9.com/-/spec/opensearch/1.1/">1</opensearch:itemsPerPage>
|
|
||||||
<entry>
|
|
||||||
<id>http://arxiv.org/1</id>
|
|
||||||
<updated>2000-01-01T00:00:01Z</updated>
|
|
||||||
<published>2000-01-01T00:00:01Z</published>
|
|
||||||
<title>Mathematical proof.</title>
|
|
||||||
<summary>Mathematical formula.</summary>
|
|
||||||
<author>
|
|
||||||
<name>A. B.</name>
|
|
||||||
</author>
|
|
||||||
<link href="http://arxiv.org/1" rel="alternate" type="text/html"/>
|
|
||||||
<link title="pdf" href="http://arxiv.org/1" rel="related" type="application/pdf"/>
|
|
||||||
<category term="math.QA" scheme="http://arxiv.org/schemas/atom"/>
|
|
||||||
<category term="1" scheme="http://arxiv.org/schemas/atom"/>
|
|
||||||
</entry>
|
|
||||||
</feed>
|
|
||||||
'''
|
|
||||||
|
|
||||||
response = mock.Mock(content=xml_mock)
|
|
||||||
results = arxiv.response(response)
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), 1)
|
|
||||||
self.assertEqual(results[0]['title'], 'Mathematical proof.')
|
|
||||||
self.assertEqual(results[0]['content'], 'Mathematical formula.')
|
|
|
@ -1,91 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
from collections import defaultdict
|
|
||||||
import mock
|
|
||||||
from searx.engines import base
|
|
||||||
from searx.testing import SearxTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestBaseEngine(SearxTestCase):
|
|
||||||
|
|
||||||
def test_request(self):
|
|
||||||
query = 'test_query'
|
|
||||||
dicto = defaultdict(dict)
|
|
||||||
dicto['pageno'] = 1
|
|
||||||
params = base.request(query, dicto)
|
|
||||||
self.assertIn('url', params)
|
|
||||||
self.assertIn('base-search.net', params['url'])
|
|
||||||
|
|
||||||
def test_response(self):
|
|
||||||
self.assertRaises(AttributeError, base.response, None)
|
|
||||||
self.assertRaises(AttributeError, base.response, [])
|
|
||||||
self.assertRaises(AttributeError, base.response, '')
|
|
||||||
self.assertRaises(AttributeError, base.response, '[]')
|
|
||||||
|
|
||||||
response = mock.Mock(content=b'<response></response>')
|
|
||||||
self.assertEqual(base.response(response), [])
|
|
||||||
|
|
||||||
xml_mock = b"""<?xml version="1.0"?>
|
|
||||||
<response>
|
|
||||||
<lst name="responseHeader">
|
|
||||||
<int name="status">0</int>
|
|
||||||
<int name="QTime">1</int>
|
|
||||||
</lst>
|
|
||||||
<result name="response" numFound="1" start="0">
|
|
||||||
<doc>
|
|
||||||
<date name="dchdate">2000-01-01T01:01:01Z</date>
|
|
||||||
<str name="dcdocid">1</str>
|
|
||||||
<str name="dccontinent">cna</str>
|
|
||||||
<str name="dccountry">us</str>
|
|
||||||
<str name="dccollection">ftciteseerx</str>
|
|
||||||
<str name="dcprovider">CiteSeerX</str>
|
|
||||||
<str name="dctitle">Science and more</str>
|
|
||||||
<arr name="dccreator">
|
|
||||||
<str>Someone</str>
|
|
||||||
</arr>
|
|
||||||
<arr name="dcperson">
|
|
||||||
<str>Someone</str>
|
|
||||||
</arr>
|
|
||||||
<arr name="dcsubject">
|
|
||||||
<str>Science and more</str>
|
|
||||||
</arr>
|
|
||||||
<str name="dcdescription">Science, and even more.</str>
|
|
||||||
<arr name="dccontributor">
|
|
||||||
<str>The neighbour</str>
|
|
||||||
</arr>
|
|
||||||
<str name="dcdate">2001</str>
|
|
||||||
<int name="dcyear">2001</int>
|
|
||||||
<arr name="dctype">
|
|
||||||
<str>text</str>
|
|
||||||
</arr>
|
|
||||||
<arr name="dctypenorm">
|
|
||||||
<str>1</str>
|
|
||||||
</arr>
|
|
||||||
<arr name="dcformat">
|
|
||||||
<str>application/pdf</str>
|
|
||||||
</arr>
|
|
||||||
<arr name="dccontenttype">
|
|
||||||
<str>application/pdf</str>
|
|
||||||
</arr>
|
|
||||||
<arr name="dcidentifier">
|
|
||||||
<str>http://example.org/</str>
|
|
||||||
</arr>
|
|
||||||
<str name="dclink">http://example.org</str>
|
|
||||||
<str name="dcsource">http://example.org</str>
|
|
||||||
<arr name="dclanguage">
|
|
||||||
<str>en</str>
|
|
||||||
</arr>
|
|
||||||
<str name="dcrights">Under the example.org licence</str>
|
|
||||||
<int name="dcoa">1</int>
|
|
||||||
<arr name="dclang">
|
|
||||||
<str>eng</str>
|
|
||||||
</arr>
|
|
||||||
</doc>
|
|
||||||
</result>
|
|
||||||
</response>"""
|
|
||||||
|
|
||||||
response = mock.Mock(content=xml_mock)
|
|
||||||
results = base.response(response)
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), 1)
|
|
||||||
self.assertEqual(results[0]['title'], 'Science and more')
|
|
||||||
self.assertEqual(results[0]['content'], 'Science, and even more.')
|
|
|
@ -1,178 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
from collections import defaultdict
|
|
||||||
import mock
|
|
||||||
from searx.engines import bing
|
|
||||||
from searx.testing import SearxTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestBingEngine(SearxTestCase):
|
|
||||||
|
|
||||||
def test_request(self):
|
|
||||||
bing.supported_languages = ['en', 'fr', 'zh-CHS', 'zh-CHT', 'pt-PT', 'pt-BR']
|
|
||||||
query = u'test_query'
|
|
||||||
dicto = defaultdict(dict)
|
|
||||||
dicto['pageno'] = 1
|
|
||||||
dicto['language'] = 'fr-FR'
|
|
||||||
params = bing.request(query.encode('utf-8'), dicto)
|
|
||||||
self.assertTrue('url' in params)
|
|
||||||
self.assertTrue(query in params['url'])
|
|
||||||
self.assertTrue('language%3AFR' in params['url'])
|
|
||||||
self.assertTrue('bing.com' in params['url'])
|
|
||||||
|
|
||||||
dicto['language'] = 'all'
|
|
||||||
params = bing.request(query.encode('utf-8'), dicto)
|
|
||||||
self.assertTrue('language' in params['url'])
|
|
||||||
|
|
||||||
def test_response(self):
|
|
||||||
dicto = defaultdict(dict)
|
|
||||||
dicto['pageno'] = 1
|
|
||||||
dicto['language'] = 'fr-FR'
|
|
||||||
self.assertRaises(AttributeError, bing.response, None)
|
|
||||||
self.assertRaises(AttributeError, bing.response, [])
|
|
||||||
self.assertRaises(AttributeError, bing.response, '')
|
|
||||||
self.assertRaises(AttributeError, bing.response, '[]')
|
|
||||||
|
|
||||||
response = mock.Mock(text='<html></html>')
|
|
||||||
response.search_params = dicto
|
|
||||||
self.assertEqual(bing.response(response), [])
|
|
||||||
|
|
||||||
response = mock.Mock(text='<html></html>')
|
|
||||||
response.search_params = dicto
|
|
||||||
self.assertEqual(bing.response(response), [])
|
|
||||||
|
|
||||||
html = """
|
|
||||||
<div>
|
|
||||||
<div id="b_tween">
|
|
||||||
<span class="sb_count" data-bm="4">23 900 000 résultats</span>
|
|
||||||
</div>
|
|
||||||
<ol id="b_results" role="main">
|
|
||||||
<div class="sa_cc" u="0|5109|4755453613245655|UAGjXgIrPH5yh-o5oNHRx_3Zta87f_QO">
|
|
||||||
<div Class="sa_mc">
|
|
||||||
<div class="sb_tlst">
|
|
||||||
<h3>
|
|
||||||
<a href="http://this.should.be.the.link/" h="ID=SERP,5124.1">
|
|
||||||
<strong>This</strong> should be the title</a>
|
|
||||||
</h3>
|
|
||||||
</div>
|
|
||||||
<div class="sb_meta"><cite><strong>this</strong>.meta.com</cite>
|
|
||||||
<span class="c_tlbxTrg">
|
|
||||||
<span class="c_tlbxH" H="BASE:CACHEDPAGEDEFAULT" K="SERP,5125.1">
|
|
||||||
</span>
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<p><strong>This</strong> should be the content.</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</ol>
|
|
||||||
</div>
|
|
||||||
"""
|
|
||||||
response = mock.Mock(text=html)
|
|
||||||
response.search_params = dicto
|
|
||||||
results = bing.response(response)
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), 2)
|
|
||||||
self.assertEqual(results[0]['title'], 'This should be the title')
|
|
||||||
self.assertEqual(results[0]['url'], 'http://this.should.be.the.link/')
|
|
||||||
self.assertEqual(results[0]['content'], 'This should be the content.')
|
|
||||||
self.assertEqual(results[-1]['number_of_results'], 23900000)
|
|
||||||
|
|
||||||
html = """
|
|
||||||
<div>
|
|
||||||
<div id="b_tween">
|
|
||||||
<span class="sb_count" data-bm="4">9-18 résultats sur 23 900 000</span>
|
|
||||||
</div>
|
|
||||||
<ol id="b_results" role="main">
|
|
||||||
<li class="b_algo" u="0|5109|4755453613245655|UAGjXgIrPH5yh-o5oNHRx_3Zta87f_QO">
|
|
||||||
<div Class="sa_mc">
|
|
||||||
<div class="sb_tlst">
|
|
||||||
<h2>
|
|
||||||
<a href="http://this.should.be.the.link/" h="ID=SERP,5124.1">
|
|
||||||
<strong>This</strong> should be the title</a>
|
|
||||||
</h2>
|
|
||||||
</div>
|
|
||||||
<div class="sb_meta"><cite><strong>this</strong>.meta.com</cite>
|
|
||||||
<span class="c_tlbxTrg">
|
|
||||||
<span class="c_tlbxH" H="BASE:CACHEDPAGEDEFAULT" K="SERP,5125.1">
|
|
||||||
</span>
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<p><strong>This</strong> should be the content.</p>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ol>
|
|
||||||
</div>
|
|
||||||
"""
|
|
||||||
dicto['pageno'] = 2
|
|
||||||
response = mock.Mock(text=html)
|
|
||||||
response.search_params = dicto
|
|
||||||
results = bing.response(response)
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), 2)
|
|
||||||
self.assertEqual(results[0]['title'], 'This should be the title')
|
|
||||||
self.assertEqual(results[0]['url'], 'http://this.should.be.the.link/')
|
|
||||||
self.assertEqual(results[0]['content'], 'This should be the content.')
|
|
||||||
self.assertEqual(results[-1]['number_of_results'], 23900000)
|
|
||||||
|
|
||||||
html = """
|
|
||||||
<div>
|
|
||||||
<div id="b_tween">
|
|
||||||
<span class="sb_count" data-bm="4">23 900 000 résultats</span>
|
|
||||||
</div>
|
|
||||||
<ol id="b_results" role="main">
|
|
||||||
<li class="b_algo" u="0|5109|4755453613245655|UAGjXgIrPH5yh-o5oNHRx_3Zta87f_QO">
|
|
||||||
<div Class="sa_mc">
|
|
||||||
<div class="sb_tlst">
|
|
||||||
<h2>
|
|
||||||
<a href="http://this.should.be.the.link/" h="ID=SERP,5124.1">
|
|
||||||
<strong>This</strong> should be the title</a>
|
|
||||||
</h2>
|
|
||||||
</div>
|
|
||||||
<div class="sb_meta"><cite><strong>this</strong>.meta.com</cite>
|
|
||||||
<span class="c_tlbxTrg">
|
|
||||||
<span class="c_tlbxH" H="BASE:CACHEDPAGEDEFAULT" K="SERP,5125.1">
|
|
||||||
</span>
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<p><strong>This</strong> should be the content.</p>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ol>
|
|
||||||
</div>
|
|
||||||
"""
|
|
||||||
dicto['pageno'] = 33900000
|
|
||||||
response = mock.Mock(text=html)
|
|
||||||
response.search_params = dicto
|
|
||||||
results = bing.response(response)
|
|
||||||
self.assertEqual(bing.response(response), [])
|
|
||||||
|
|
||||||
def test_fetch_supported_languages(self):
|
|
||||||
html = """<html></html>"""
|
|
||||||
response = mock.Mock(text=html)
|
|
||||||
results = bing._fetch_supported_languages(response)
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), 0)
|
|
||||||
|
|
||||||
html = """
|
|
||||||
<html>
|
|
||||||
<body>
|
|
||||||
<form>
|
|
||||||
<div id="limit-languages">
|
|
||||||
<div>
|
|
||||||
<div><input id="es" value="es"></input></div>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<div><input id="pt_BR" value="pt_BR"></input></div>
|
|
||||||
<div><input id="pt_PT" value="pt_PT"></input></div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
"""
|
|
||||||
response = mock.Mock(text=html)
|
|
||||||
languages = bing._fetch_supported_languages(response)
|
|
||||||
self.assertEqual(type(languages), list)
|
|
||||||
self.assertEqual(len(languages), 3)
|
|
||||||
self.assertIn('es', languages)
|
|
||||||
self.assertIn('pt-BR', languages)
|
|
||||||
self.assertIn('pt-PT', languages)
|
|
|
@ -1,132 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
from collections import defaultdict
|
|
||||||
import mock
|
|
||||||
from searx.engines import bing_images
|
|
||||||
from searx.testing import SearxTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestBingImagesEngine(SearxTestCase):
|
|
||||||
|
|
||||||
def test_request(self):
|
|
||||||
bing_images.supported_languages = ['fr-FR', 'en-US']
|
|
||||||
bing_images.language_aliases = {}
|
|
||||||
query = 'test_query'
|
|
||||||
dicto = defaultdict(dict)
|
|
||||||
dicto['pageno'] = 1
|
|
||||||
dicto['language'] = 'fr-FR'
|
|
||||||
dicto['safesearch'] = 1
|
|
||||||
dicto['time_range'] = ''
|
|
||||||
params = bing_images.request(query, dicto)
|
|
||||||
self.assertTrue('url' in params)
|
|
||||||
self.assertTrue(query in params['url'])
|
|
||||||
self.assertTrue('bing.com' in params['url'])
|
|
||||||
self.assertTrue('SRCHHPGUSR' in params['cookies'])
|
|
||||||
self.assertTrue('DEMOTE' in params['cookies']['SRCHHPGUSR'])
|
|
||||||
self.assertTrue('_EDGE_S' in params['cookies'])
|
|
||||||
self.assertTrue('fr-fr' in params['cookies']['_EDGE_S'])
|
|
||||||
|
|
||||||
dicto['language'] = 'fr'
|
|
||||||
params = bing_images.request(query, dicto)
|
|
||||||
self.assertTrue('_EDGE_S' in params['cookies'])
|
|
||||||
self.assertTrue('fr-fr' in params['cookies']['_EDGE_S'])
|
|
||||||
|
|
||||||
dicto['language'] = 'all'
|
|
||||||
params = bing_images.request(query, dicto)
|
|
||||||
self.assertTrue('_EDGE_S' in params['cookies'])
|
|
||||||
self.assertTrue('en-us' in params['cookies']['_EDGE_S'])
|
|
||||||
|
|
||||||
def test_response(self):
|
|
||||||
self.assertRaises(AttributeError, bing_images.response, None)
|
|
||||||
self.assertRaises(AttributeError, bing_images.response, [])
|
|
||||||
self.assertRaises(AttributeError, bing_images.response, '')
|
|
||||||
self.assertRaises(AttributeError, bing_images.response, '[]')
|
|
||||||
|
|
||||||
response = mock.Mock(text='<html></html>')
|
|
||||||
self.assertEqual(bing_images.response(response), [])
|
|
||||||
|
|
||||||
response = mock.Mock(text='<html></html>')
|
|
||||||
self.assertEqual(bing_images.response(response), [])
|
|
||||||
|
|
||||||
html = """
|
|
||||||
<div id="mmComponent_images_1">
|
|
||||||
<ul>
|
|
||||||
<li>
|
|
||||||
<div>
|
|
||||||
<div class="imgpt">
|
|
||||||
<a m='{"purl":"page_url","murl":"img_url","turl":"thumb_url","t":"Page 1 title"}'>
|
|
||||||
<img src="" alt="alt text" />
|
|
||||||
</a>
|
|
||||||
<div class="img_info">
|
|
||||||
<span>1 x 1 - jpeg</span>
|
|
||||||
<a>1.example.org</a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div></div>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<div class="imgpt">
|
|
||||||
<a m='{"purl":"page_url2","murl":"img_url2","turl":"thumb_url2","t":"Page 2 title"}'>
|
|
||||||
<img src="" alt="alt text 2" />
|
|
||||||
</a>
|
|
||||||
<div class="img_info">
|
|
||||||
<span>2 x 2 - jpeg</span>
|
|
||||||
<a>2.example.org</a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<ul>
|
|
||||||
<li>
|
|
||||||
<div>
|
|
||||||
<div class="imgpt">
|
|
||||||
<a m='{"purl":"page_url3","murl":"img_url3","turl":"thumb_url3","t":"Page 3 title"}'>
|
|
||||||
<img src="" alt="alt text 3" />
|
|
||||||
</a>
|
|
||||||
<div class="img_info">
|
|
||||||
<span>3 x 3 - jpeg</span>
|
|
||||||
<a>3.example.org</a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
"""
|
|
||||||
html = html.replace('\r\n', '').replace('\n', '').replace('\r', '')
|
|
||||||
response = mock.Mock(text=html)
|
|
||||||
results = bing_images.response(response)
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), 3)
|
|
||||||
self.assertEqual(results[0]['title'], 'Page 1 title')
|
|
||||||
self.assertEqual(results[0]['url'], 'page_url')
|
|
||||||
self.assertEqual(results[0]['content'], '')
|
|
||||||
self.assertEqual(results[0]['thumbnail_src'], 'thumb_url')
|
|
||||||
self.assertEqual(results[0]['img_src'], 'img_url')
|
|
||||||
self.assertEqual(results[0]['img_format'], '1 x 1 - jpeg')
|
|
||||||
self.assertEqual(results[0]['source'], '1.example.org')
|
|
||||||
|
|
||||||
def test_fetch_supported_languages(self):
|
|
||||||
html = """
|
|
||||||
<div>
|
|
||||||
<div id="region-section-content">
|
|
||||||
<ul class="b_vList">
|
|
||||||
<li>
|
|
||||||
<a href="https://bing...&setmkt=de-DE&s...">Germany</a>
|
|
||||||
<a href="https://bing...&setmkt=nb-NO&s...">Norway</a>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<ul class="b_vList">
|
|
||||||
<li>
|
|
||||||
<a href="https://bing...&setmkt=es-AR&s...">Argentina</a>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
"""
|
|
||||||
response = mock.Mock(text=html)
|
|
||||||
languages = list(bing_images._fetch_supported_languages(response))
|
|
||||||
self.assertEqual(len(languages), 3)
|
|
||||||
self.assertIn('de-DE', languages)
|
|
||||||
self.assertIn('no-NO', languages)
|
|
||||||
self.assertIn('es-AR', languages)
|
|
|
@ -1,147 +0,0 @@
|
||||||
from collections import defaultdict
|
|
||||||
import mock
|
|
||||||
from searx.engines import bing_news
|
|
||||||
from searx.testing import SearxTestCase
|
|
||||||
import lxml
|
|
||||||
|
|
||||||
|
|
||||||
class TestBingNewsEngine(SearxTestCase):
|
|
||||||
|
|
||||||
def test_request(self):
|
|
||||||
bing_news.supported_languages = ['en', 'fr']
|
|
||||||
query = 'test_query'
|
|
||||||
dicto = defaultdict(dict)
|
|
||||||
dicto['pageno'] = 1
|
|
||||||
dicto['language'] = 'fr-FR'
|
|
||||||
dicto['time_range'] = ''
|
|
||||||
params = bing_news.request(query, dicto)
|
|
||||||
self.assertIn('url', params)
|
|
||||||
self.assertIn(query, params['url'])
|
|
||||||
self.assertIn('bing.com', params['url'])
|
|
||||||
self.assertIn('fr', params['url'])
|
|
||||||
|
|
||||||
dicto['language'] = 'all'
|
|
||||||
params = bing_news.request(query, dicto)
|
|
||||||
self.assertIn('en', params['url'])
|
|
||||||
|
|
||||||
def test_no_url_in_request_year_time_range(self):
|
|
||||||
dicto = defaultdict(dict)
|
|
||||||
query = 'test_query'
|
|
||||||
dicto['time_range'] = 'year'
|
|
||||||
params = bing_news.request(query, dicto)
|
|
||||||
self.assertEqual({}, params['url'])
|
|
||||||
|
|
||||||
def test_response(self):
|
|
||||||
self.assertRaises(AttributeError, bing_news.response, None)
|
|
||||||
self.assertRaises(AttributeError, bing_news.response, [])
|
|
||||||
self.assertRaises(AttributeError, bing_news.response, '')
|
|
||||||
self.assertRaises(AttributeError, bing_news.response, '[]')
|
|
||||||
|
|
||||||
response = mock.Mock(content='<html></html>')
|
|
||||||
self.assertEqual(bing_news.response(response), [])
|
|
||||||
|
|
||||||
response = mock.Mock(content='<html></html>')
|
|
||||||
self.assertEqual(bing_news.response(response), [])
|
|
||||||
|
|
||||||
html = """<?xml version="1.0" encoding="utf-8" ?>
|
|
||||||
<rss version="2.0" xmlns:News="https://www.bing.com:443/news/search?q=python&setmkt=en-US&first=1&format=RSS">
|
|
||||||
<channel>
|
|
||||||
<title>python - Bing News</title>
|
|
||||||
<link>https://www.bing.com:443/news/search?q=python&setmkt=en-US&first=1&format=RSS</link>
|
|
||||||
<description>Search results</description>
|
|
||||||

|
|
||||||
<copyright>Copyright</copyright>
|
|
||||||
<item>
|
|
||||||
<title>Title</title>
|
|
||||||
<link>https://www.bing.com/news/apiclick.aspx?ref=FexRss&aid=&tid=c237eccc50bd4758b106a5e3c94fce09&url=http%3a%2f%2furl.of.article%2f&c=xxxxxxxxx&mkt=en-us</link>
|
|
||||||
<description>Article Content</description>
|
|
||||||
<pubDate>Tue, 02 Jun 2015 13:37:00 GMT</pubDate>
|
|
||||||
<News:Source>Infoworld</News:Source>
|
|
||||||
<News:Image>http://a1.bing4.com/th?id=ON.13371337133713371337133713371337&pid=News</News:Image>
|
|
||||||
<News:ImageSize>w={0}&h={1}&c=7</News:ImageSize>
|
|
||||||
<News:ImageKeepOriginalRatio></News:ImageKeepOriginalRatio>
|
|
||||||
<News:ImageMaxWidth>620</News:ImageMaxWidth>
|
|
||||||
<News:ImageMaxHeight>413</News:ImageMaxHeight>
|
|
||||||
</item>
|
|
||||||
<item>
|
|
||||||
<title>Another Title</title>
|
|
||||||
<link>https://www.bing.com/news/apiclick.aspx?ref=FexRss&aid=&tid=c237eccc50bd4758b106a5e3c94fce09&url=http%3a%2f%2fanother.url.of.article%2f&c=xxxxxxxxx&mkt=en-us</link>
|
|
||||||
<description>Another Article Content</description>
|
|
||||||
<pubDate>Tue, 02 Jun 2015 13:37:00 GMT</pubDate>
|
|
||||||
</item>
|
|
||||||
</channel>
|
|
||||||
</rss>""" # noqa
|
|
||||||
response = mock.Mock(content=html.encode('utf-8'))
|
|
||||||
results = bing_news.response(response)
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), 2)
|
|
||||||
self.assertEqual(results[0]['title'], 'Title')
|
|
||||||
self.assertEqual(results[0]['url'], 'http://url.of.article/')
|
|
||||||
self.assertEqual(results[0]['content'], 'Article Content')
|
|
||||||
self.assertEqual(results[0]['img_src'], 'https://www.bing.com/th?id=ON.13371337133713371337133713371337')
|
|
||||||
self.assertEqual(results[1]['title'], 'Another Title')
|
|
||||||
self.assertEqual(results[1]['url'], 'http://another.url.of.article/')
|
|
||||||
self.assertEqual(results[1]['content'], 'Another Article Content')
|
|
||||||
self.assertNotIn('img_src', results[1])
|
|
||||||
|
|
||||||
html = """<?xml version="1.0" encoding="utf-8" ?>
|
|
||||||
<rss version="2.0" xmlns:News="https://www.bing.com:443/news/search?q=python&setmkt=en-US&first=1&format=RSS">
|
|
||||||
<channel>
|
|
||||||
<title>python - Bing News</title>
|
|
||||||
<link>https://www.bing.com:443/news/search?q=python&setmkt=en-US&first=1&format=RSS</link>
|
|
||||||
<description>Search results</description>
|
|
||||||

|
|
||||||
<copyright>Copyright</copyright>
|
|
||||||
<item>
|
|
||||||
<title>Title</title>
|
|
||||||
<link>http://another.url.of.article/</link>
|
|
||||||
<description>Article Content</description>
|
|
||||||
<pubDate>garbage</pubDate>
|
|
||||||
<News:Source>Infoworld</News:Source>
|
|
||||||
<News:Image>http://another.bing.com/image</News:Image>
|
|
||||||
<News:ImageSize>w={0}&h={1}&c=7</News:ImageSize>
|
|
||||||
<News:ImageKeepOriginalRatio></News:ImageKeepOriginalRatio>
|
|
||||||
<News:ImageMaxWidth>620</News:ImageMaxWidth>
|
|
||||||
<News:ImageMaxHeight>413</News:ImageMaxHeight>
|
|
||||||
</item>
|
|
||||||
</channel>
|
|
||||||
</rss>""" # noqa
|
|
||||||
response = mock.Mock(content=html.encode('utf-8'))
|
|
||||||
results = bing_news.response(response)
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), 1)
|
|
||||||
self.assertEqual(results[0]['title'], 'Title')
|
|
||||||
self.assertEqual(results[0]['url'], 'http://another.url.of.article/')
|
|
||||||
self.assertEqual(results[0]['content'], 'Article Content')
|
|
||||||
self.assertEqual(results[0]['img_src'], 'http://another.bing.com/image')
|
|
||||||
|
|
||||||
html = """<?xml version="1.0" encoding="utf-8" ?>
|
|
||||||
<rss version="2.0" xmlns:News="https://www.bing.com:443/news/search?q=python&setmkt=en-US&first=1&format=RSS">
|
|
||||||
<channel>
|
|
||||||
<title>python - Bing News</title>
|
|
||||||
<link>https://www.bing.com:443/news/search?q=python&setmkt=en-US&first=1&format=RSS</link>
|
|
||||||
<description>Search results</description>
|
|
||||||

|
|
||||||
</channel>
|
|
||||||
</rss>""" # noqa
|
|
||||||
|
|
||||||
response = mock.Mock(content=html.encode('utf-8'))
|
|
||||||
results = bing_news.response(response)
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), 0)
|
|
||||||
|
|
||||||
html = """<?xml version="1.0" encoding="utf-8" ?>gabarge"""
|
|
||||||
response = mock.Mock(content=html.encode('utf-8'))
|
|
||||||
self.assertRaises(lxml.etree.XMLSyntaxError, bing_news.response, response)
|
|
|
@ -1,72 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
from collections import defaultdict
|
|
||||||
import mock
|
|
||||||
from searx.engines import bing_videos
|
|
||||||
from searx.testing import SearxTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestBingVideosEngine(SearxTestCase):
|
|
||||||
|
|
||||||
def test_request(self):
|
|
||||||
bing_videos.supported_languages = ['fr-FR', 'en-US']
|
|
||||||
bing_videos.language_aliases = {}
|
|
||||||
query = 'test_query'
|
|
||||||
dicto = defaultdict(dict)
|
|
||||||
dicto['pageno'] = 1
|
|
||||||
dicto['language'] = 'fr-FR'
|
|
||||||
dicto['safesearch'] = 0
|
|
||||||
dicto['time_range'] = ''
|
|
||||||
params = bing_videos.request(query, dicto)
|
|
||||||
self.assertTrue('url' in params)
|
|
||||||
self.assertTrue(query in params['url'])
|
|
||||||
self.assertTrue('bing.com' in params['url'])
|
|
||||||
self.assertTrue('SRCHHPGUSR' in params['cookies'])
|
|
||||||
self.assertTrue('OFF' in params['cookies']['SRCHHPGUSR'])
|
|
||||||
self.assertTrue('_EDGE_S' in params['cookies'])
|
|
||||||
self.assertTrue('fr-fr' in params['cookies']['_EDGE_S'])
|
|
||||||
|
|
||||||
dicto['pageno'] = 2
|
|
||||||
dicto['time_range'] = 'day'
|
|
||||||
dicto['safesearch'] = 2
|
|
||||||
params = bing_videos.request(query, dicto)
|
|
||||||
self.assertTrue('first=29' in params['url'])
|
|
||||||
self.assertTrue('1440' in params['url'])
|
|
||||||
self.assertIn('SRCHHPGUSR', params['cookies'])
|
|
||||||
self.assertTrue('STRICT' in params['cookies']['SRCHHPGUSR'])
|
|
||||||
|
|
||||||
def test_response(self):
|
|
||||||
self.assertRaises(AttributeError, bing_videos.response, None)
|
|
||||||
self.assertRaises(AttributeError, bing_videos.response, [])
|
|
||||||
self.assertRaises(AttributeError, bing_videos.response, '')
|
|
||||||
self.assertRaises(AttributeError, bing_videos.response, '[]')
|
|
||||||
|
|
||||||
response = mock.Mock(text='<html></html>')
|
|
||||||
self.assertEqual(bing_videos.response(response), [])
|
|
||||||
|
|
||||||
response = mock.Mock(text='<html></html>')
|
|
||||||
self.assertEqual(bing_videos.response(response), [])
|
|
||||||
|
|
||||||
html = """
|
|
||||||
<div class="dg_u">
|
|
||||||
<div>
|
|
||||||
<a>
|
|
||||||
<div>
|
|
||||||
<div>
|
|
||||||
<div class="mc_vtvc_meta_block">
|
|
||||||
<div><span>100 views</span><span>1 year ago</span></div><div><span>ExampleTube</span><span>Channel 1<span></div> #noqa
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="vrhdata" vrhm='{"du":"01:11","murl":"https://www.example.com/watch?v=DEADBEEF","thid":"OVP.BINGTHUMB1","vt":"Title 1"}'></div> # noqa
|
|
||||||
</div>
|
|
||||||
</a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
"""
|
|
||||||
response = mock.Mock(text=html)
|
|
||||||
results = bing_videos.response(response)
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), 1)
|
|
||||||
self.assertEqual(results[0]['title'], 'Title 1')
|
|
||||||
self.assertEqual(results[0]['url'], 'https://www.example.com/watch?v=DEADBEEF')
|
|
||||||
self.assertEqual(results[0]['content'], '01:11 - 100 views - 1 year ago - ExampleTube - Channel 1')
|
|
||||||
self.assertEqual(results[0]['thumbnail'], 'https://www.bing.com/th?id=OVP.BINGTHUMB1')
|
|
|
@ -1,112 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
from collections import defaultdict
|
|
||||||
import mock
|
|
||||||
from searx.engines import btdigg
|
|
||||||
from searx.testing import SearxTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestBtdiggEngine(SearxTestCase):
|
|
||||||
|
|
||||||
def test_request(self):
|
|
||||||
query = 'test_query'
|
|
||||||
dicto = defaultdict(dict)
|
|
||||||
dicto['pageno'] = 0
|
|
||||||
params = btdigg.request(query, dicto)
|
|
||||||
self.assertIn('url', params)
|
|
||||||
self.assertIn(query, params['url'])
|
|
||||||
self.assertIn('btdig.com', params['url'])
|
|
||||||
|
|
||||||
def test_response(self):
|
|
||||||
self.assertRaises(AttributeError, btdigg.response, None)
|
|
||||||
self.assertRaises(AttributeError, btdigg.response, [])
|
|
||||||
self.assertRaises(AttributeError, btdigg.response, '')
|
|
||||||
self.assertRaises(AttributeError, btdigg.response, '[]')
|
|
||||||
|
|
||||||
response = mock.Mock(text='<html></html>')
|
|
||||||
self.assertEqual(btdigg.response(response), [])
|
|
||||||
|
|
||||||
html = u"""
|
|
||||||
<div class="one_result" style="display:table-row;background-color:#e8e8e8">
|
|
||||||
<div style="display:table-cell;color:rgb(0, 0, 0)">
|
|
||||||
<div style="display:table">
|
|
||||||
<div style="display:table-row">
|
|
||||||
<div class="torrent_name" style="display:table-cell">
|
|
||||||
<a style="color:rgb(0, 0, 204);text-decoration:underline;font-size:150%"
|
|
||||||
href="http://btdig.com/a72f35b7ee3a10928f02bb799e40ae5db701ed1c/pdf?q=pdf&p=1&order=0"
|
|
||||||
>3.9GBdeLibrosByHuasoFromHell(3de4)</a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div style="display:table">
|
|
||||||
<div style="display:table-row">
|
|
||||||
<div style="display:table-cell">
|
|
||||||
<span class="torrent_files" style="color:#666;padding-left:10px">4217</span> files <span
|
|
||||||
class="torrent_size" style="color:#666;padding-left:10px">1 GB</span><span
|
|
||||||
class="torrent_age" style="color:rgb(0, 102, 0);padding-left:10px;margin: 0px 4px"
|
|
||||||
>found 3 years ago</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div style="display:table;width:100%;padding:10px">
|
|
||||||
<div style="display:table-row">
|
|
||||||
<div class="torrent_magnet" style="display:table-cell">
|
|
||||||
<div class="fa fa-magnet" style="color:#cc0000">
|
|
||||||
<a href="magnet:?xt=urn:btih:a72f35b7ee3a10928f02bb799e40ae5db701ed1c&dn=3.9GBdeLibrosBy..."
|
|
||||||
title="Download via magnet-link"> magnet:?xt=urn:btih:a72f35b7ee...</a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div style="display:table-cell;color:rgb(0, 0, 0);text-align:right">
|
|
||||||
<span style="color:rgb(136, 136, 136);margin: 0px 0px 0px 4px"></span><span
|
|
||||||
style="color:rgb(0, 102, 0);margin: 0px 4px">found 3 years ago</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="torrent_excerpt" style="display:table;padding:10px;white-space:nowrap">
|
|
||||||
<div class="fa fa-folder-open" style="padding-left:0em"> 3.9GBdeLibrosByHuasoFromHell(3de4)</div><br/>
|
|
||||||
<div class="fa fa-folder-open" style="padding-left:1em"> Libros H-Z</div><br/>
|
|
||||||
<div class="fa fa-folder-open" style="padding-left:2em"> H</div><br/><div class="fa fa-file-archive-o"
|
|
||||||
style="padding-left:3em"> H.H. Hollis - El truco de la espada-<b
|
|
||||||
style="color:red; background-color:yellow">pdf</b>.zip</div><span
|
|
||||||
style="color:#666;padding-left:10px">17 KB</span><br/>
|
|
||||||
<div class="fa fa-file-archive-o" style="padding-left:3em"> Hagakure - El Libro del Samurai-<b
|
|
||||||
style="color:red; background-color:yellow">pdf</b>.zip</div><span
|
|
||||||
style="color:#666;padding-left:10px">95 KB</span><br/>
|
|
||||||
<div class="fa fa-folder-open" style="padding-left:3em"> Hamsun, Knut (1859-1952)</div><br/>
|
|
||||||
<div class="fa fa-file-archive-o" style="padding-left:4em"> Hamsun, Knut - Hambre-<b
|
|
||||||
style="color:red; background-color:yellow">pdf</b>.zip</div><span
|
|
||||||
style="color:#666;padding-left:10px">786 KB</span><br/>
|
|
||||||
<div class="fa fa-plus-circle"><a
|
|
||||||
href="http://btdig.com/a72f35b7ee3a10928f02bb799e40ae5db701ed1c/pdf?q=pdf&p=1&order=0"
|
|
||||||
> 4214 hidden files<span style="color:#666;padding-left:10px">1 GB</span></a></div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
"""
|
|
||||||
response = mock.Mock(text=html.encode('utf-8'))
|
|
||||||
results = btdigg.response(response)
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), 1)
|
|
||||||
self.assertEqual(results[0]['title'], '3.9GBdeLibrosByHuasoFromHell(3de4)')
|
|
||||||
self.assertEqual(results[0]['url'],
|
|
||||||
'http://btdig.com/a72f35b7ee3a10928f02bb799e40ae5db701ed1c/pdf?q=pdf&p=1&order=0')
|
|
||||||
self.assertEqual(results[0]['content'],
|
|
||||||
'3.9GBdeLibrosByHuasoFromHell(3de4) | ' +
|
|
||||||
'Libros H-Z | ' +
|
|
||||||
'H H.H. Hollis - El truco de la espada-pdf.zip17 KB | ' +
|
|
||||||
'Hagakure - El Libro del Samurai-pdf.zip95 KB | ' +
|
|
||||||
'Hamsun, Knut (1859-1952) | Hamsun, Knut - Hambre-pdf.zip786 KB | ' +
|
|
||||||
'4214 hidden files1 GB')
|
|
||||||
self.assertEqual(results[0]['filesize'], 1 * 1024 * 1024 * 1024)
|
|
||||||
self.assertEqual(results[0]['files'], 4217)
|
|
||||||
self.assertEqual(results[0]['magnetlink'],
|
|
||||||
'magnet:?xt=urn:btih:a72f35b7ee3a10928f02bb799e40ae5db701ed1c&dn=3.9GBdeLibrosBy...')
|
|
||||||
|
|
||||||
html = """
|
|
||||||
<div style="display:table-row;background-color:#e8e8e8">
|
|
||||||
|
|
||||||
</div>
|
|
||||||
"""
|
|
||||||
response = mock.Mock(text=html.encode('utf-8'))
|
|
||||||
results = btdigg.response(response)
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), 0)
|
|
|
@ -1,56 +0,0 @@
|
||||||
from collections import defaultdict
|
|
||||||
from datetime import datetime
|
|
||||||
import mock
|
|
||||||
from searx.engines import currency_convert
|
|
||||||
from searx.testing import SearxTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestCurrencyConvertEngine(SearxTestCase):
|
|
||||||
|
|
||||||
def test_request(self):
|
|
||||||
query = b'test_query'
|
|
||||||
dicto = defaultdict(dict)
|
|
||||||
dicto['pageno'] = 1
|
|
||||||
params = currency_convert.request(query, dicto)
|
|
||||||
self.assertNotIn('url', params)
|
|
||||||
|
|
||||||
query = b'convert 10 Pound Sterlings to United States Dollars'
|
|
||||||
params = currency_convert.request(query, dicto)
|
|
||||||
self.assertIn('url', params)
|
|
||||||
self.assertIn('duckduckgo.com', params['url'])
|
|
||||||
self.assertIn('GBP', params['url'])
|
|
||||||
self.assertIn('USD', params['url'])
|
|
||||||
|
|
||||||
def test_response(self):
|
|
||||||
dicto = defaultdict(dict)
|
|
||||||
dicto['amount'] = float(10)
|
|
||||||
dicto['from'] = "GBP"
|
|
||||||
dicto['to'] = "USD"
|
|
||||||
dicto['from_name'] = "pound sterling"
|
|
||||||
dicto['to_name'] = "United States dollar"
|
|
||||||
response = mock.Mock(text='a,b,c,d', search_params=dicto)
|
|
||||||
self.assertEqual(currency_convert.response(response), [])
|
|
||||||
body = """ddg_spice_currency(
|
|
||||||
{
|
|
||||||
"conversion":{
|
|
||||||
"converted-amount": "0.5"
|
|
||||||
},
|
|
||||||
"topConversions":[
|
|
||||||
{
|
|
||||||
},
|
|
||||||
{
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
);
|
|
||||||
"""
|
|
||||||
response = mock.Mock(text=body, search_params=dicto)
|
|
||||||
results = currency_convert.response(response)
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), 1)
|
|
||||||
self.assertEqual(results[0]['answer'], '10.0 GBP = 5.0 USD, 1 GBP (pound sterling)' +
|
|
||||||
' = 0.5 USD (United States dollar)')
|
|
||||||
|
|
||||||
target_url = 'https://duckduckgo.com/js/spice/currency/1/{}/{}'.format(
|
|
||||||
dicto['from'], dicto['to'])
|
|
||||||
self.assertEqual(results[0]['url'], target_url)
|
|
|
@ -1,112 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
from collections import defaultdict
|
|
||||||
import mock
|
|
||||||
from searx.engines import dailymotion
|
|
||||||
from searx.testing import SearxTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestDailymotionEngine(SearxTestCase):
|
|
||||||
|
|
||||||
def test_request(self):
|
|
||||||
dailymotion.supported_languages = ['en', 'fr']
|
|
||||||
query = 'test_query'
|
|
||||||
dicto = defaultdict(dict)
|
|
||||||
dicto['pageno'] = 0
|
|
||||||
dicto['language'] = 'fr-FR'
|
|
||||||
params = dailymotion.request(query, dicto)
|
|
||||||
self.assertTrue('url' in params)
|
|
||||||
self.assertTrue(query in params['url'])
|
|
||||||
self.assertTrue('dailymotion.com' in params['url'])
|
|
||||||
self.assertTrue('fr' in params['url'])
|
|
||||||
|
|
||||||
dicto['language'] = 'all'
|
|
||||||
params = dailymotion.request(query, dicto)
|
|
||||||
self.assertTrue('en' in params['url'])
|
|
||||||
|
|
||||||
def test_response(self):
|
|
||||||
self.assertRaises(AttributeError, dailymotion.response, None)
|
|
||||||
self.assertRaises(AttributeError, dailymotion.response, [])
|
|
||||||
self.assertRaises(AttributeError, dailymotion.response, '')
|
|
||||||
self.assertRaises(AttributeError, dailymotion.response, '[]')
|
|
||||||
|
|
||||||
response = mock.Mock(text='{}')
|
|
||||||
self.assertEqual(dailymotion.response(response), [])
|
|
||||||
|
|
||||||
response = mock.Mock(text='{"data": []}')
|
|
||||||
self.assertEqual(dailymotion.response(response), [])
|
|
||||||
|
|
||||||
json = """
|
|
||||||
{
|
|
||||||
"page": 1,
|
|
||||||
"limit": 5,
|
|
||||||
"explicit": false,
|
|
||||||
"total": 289487,
|
|
||||||
"has_more": true,
|
|
||||||
"list": [
|
|
||||||
{
|
|
||||||
"created_time": 1422173451,
|
|
||||||
"title": "Title",
|
|
||||||
"description": "Description",
|
|
||||||
"duration": 81,
|
|
||||||
"url": "http://www.url",
|
|
||||||
"thumbnail_360_url": "http://thumbnail",
|
|
||||||
"id": "x2fit7q"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
response = mock.Mock(text=json)
|
|
||||||
results = dailymotion.response(response)
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), 1)
|
|
||||||
self.assertEqual(results[0]['title'], 'Title')
|
|
||||||
self.assertEqual(results[0]['url'], 'http://www.url')
|
|
||||||
self.assertEqual(results[0]['content'], 'Description')
|
|
||||||
self.assertIn('x2fit7q', results[0]['embedded'])
|
|
||||||
|
|
||||||
json = r"""
|
|
||||||
{"toto":[
|
|
||||||
{"id":200,"name":"Artist Name",
|
|
||||||
"link":"http:\/\/www.dailymotion.com\/artist\/1217","type":"artist"}
|
|
||||||
]}
|
|
||||||
"""
|
|
||||||
response = mock.Mock(text=json)
|
|
||||||
results = dailymotion.response(response)
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), 0)
|
|
||||||
|
|
||||||
def test_fetch_supported_languages(self):
|
|
||||||
json = r"""
|
|
||||||
{"list":[{"code":"af","name":"Afrikaans","native_name":"Afrikaans",
|
|
||||||
"localized_name":"Afrikaans","display_name":"Afrikaans"},
|
|
||||||
{"code":"ar","name":"Arabic","native_name":"\u0627\u0644\u0639\u0631\u0628\u064a\u0629",
|
|
||||||
"localized_name":"Arabic","display_name":"Arabic"},
|
|
||||||
{"code":"la","name":"Latin","native_name":null,
|
|
||||||
"localized_name":"Latin","display_name":"Latin"}
|
|
||||||
]}
|
|
||||||
"""
|
|
||||||
response = mock.Mock(text=json)
|
|
||||||
languages = dailymotion._fetch_supported_languages(response)
|
|
||||||
self.assertEqual(type(languages), dict)
|
|
||||||
self.assertEqual(len(languages), 3)
|
|
||||||
self.assertIn('af', languages)
|
|
||||||
self.assertIn('ar', languages)
|
|
||||||
self.assertIn('la', languages)
|
|
||||||
|
|
||||||
self.assertEqual(type(languages['af']), dict)
|
|
||||||
self.assertEqual(type(languages['ar']), dict)
|
|
||||||
self.assertEqual(type(languages['la']), dict)
|
|
||||||
|
|
||||||
self.assertIn('name', languages['af'])
|
|
||||||
self.assertIn('name', languages['ar'])
|
|
||||||
self.assertNotIn('name', languages['la'])
|
|
||||||
|
|
||||||
self.assertIn('english_name', languages['af'])
|
|
||||||
self.assertIn('english_name', languages['ar'])
|
|
||||||
self.assertIn('english_name', languages['la'])
|
|
||||||
|
|
||||||
self.assertEqual(languages['af']['name'], 'Afrikaans')
|
|
||||||
self.assertEqual(languages['af']['english_name'], 'Afrikaans')
|
|
||||||
self.assertEqual(languages['ar']['name'], u'العربية')
|
|
||||||
self.assertEqual(languages['ar']['english_name'], 'Arabic')
|
|
||||||
self.assertEqual(languages['la']['english_name'], 'Latin')
|
|
|
@ -1,57 +0,0 @@
|
||||||
from collections import defaultdict
|
|
||||||
import mock
|
|
||||||
from searx.engines import deezer
|
|
||||||
from searx.testing import SearxTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestDeezerEngine(SearxTestCase):
|
|
||||||
|
|
||||||
def test_request(self):
|
|
||||||
query = 'test_query'
|
|
||||||
dicto = defaultdict(dict)
|
|
||||||
dicto['pageno'] = 0
|
|
||||||
params = deezer.request(query, dicto)
|
|
||||||
self.assertTrue('url' in params)
|
|
||||||
self.assertTrue(query in params['url'])
|
|
||||||
self.assertTrue('deezer.com' in params['url'])
|
|
||||||
|
|
||||||
def test_response(self):
|
|
||||||
self.assertRaises(AttributeError, deezer.response, None)
|
|
||||||
self.assertRaises(AttributeError, deezer.response, [])
|
|
||||||
self.assertRaises(AttributeError, deezer.response, '')
|
|
||||||
self.assertRaises(AttributeError, deezer.response, '[]')
|
|
||||||
|
|
||||||
response = mock.Mock(text='{}')
|
|
||||||
self.assertEqual(deezer.response(response), [])
|
|
||||||
|
|
||||||
response = mock.Mock(text='{"data": []}')
|
|
||||||
self.assertEqual(deezer.response(response), [])
|
|
||||||
|
|
||||||
json = r"""
|
|
||||||
{"data":[
|
|
||||||
{"id":100, "title":"Title of track",
|
|
||||||
"link":"https:\/\/www.deezer.com\/track\/1094042","duration":232,
|
|
||||||
"artist":{"id":200,"name":"Artist Name",
|
|
||||||
"link":"https:\/\/www.deezer.com\/artist\/1217","type":"artist"},
|
|
||||||
"album":{"id":118106,"title":"Album Title","type":"album"},"type":"track"}
|
|
||||||
]}
|
|
||||||
"""
|
|
||||||
response = mock.Mock(text=json)
|
|
||||||
results = deezer.response(response)
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), 1)
|
|
||||||
self.assertEqual(results[0]['title'], 'Title of track')
|
|
||||||
self.assertEqual(results[0]['url'], 'https://www.deezer.com/track/1094042')
|
|
||||||
self.assertEqual(results[0]['content'], 'Artist Name - Album Title - Title of track')
|
|
||||||
self.assertTrue('100' in results[0]['embedded'])
|
|
||||||
|
|
||||||
json = r"""
|
|
||||||
{"data":[
|
|
||||||
{"id":200,"name":"Artist Name",
|
|
||||||
"link":"https:\/\/www.deezer.com\/artist\/1217","type":"artist"}
|
|
||||||
]}
|
|
||||||
"""
|
|
||||||
response = mock.Mock(text=json)
|
|
||||||
results = deezer.response(response)
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), 0)
|
|
|
@ -1,24 +0,0 @@
|
||||||
from collections import defaultdict
|
|
||||||
import mock
|
|
||||||
from searx.engines import deviantart
|
|
||||||
from searx.testing import SearxTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestDeviantartEngine(SearxTestCase):
|
|
||||||
|
|
||||||
def test_request(self):
|
|
||||||
dicto = defaultdict(dict)
|
|
||||||
query = 'test_query'
|
|
||||||
dicto['pageno'] = 0
|
|
||||||
dicto['time_range'] = ''
|
|
||||||
params = deviantart.request(query, dicto)
|
|
||||||
self.assertTrue('url' in params)
|
|
||||||
self.assertTrue(query in params['url'])
|
|
||||||
self.assertTrue('deviantart.com' in params['url'])
|
|
||||||
|
|
||||||
def test_no_url_in_request_year_time_range(self):
|
|
||||||
dicto = defaultdict(dict)
|
|
||||||
query = 'test_query'
|
|
||||||
dicto['time_range'] = 'year'
|
|
||||||
params = deviantart.request(query, dicto)
|
|
||||||
self.assertEqual({}, params['url'])
|
|
|
@ -1,61 +0,0 @@
|
||||||
from collections import defaultdict
|
|
||||||
import mock
|
|
||||||
from searx.engines import digbt
|
|
||||||
from searx.testing import SearxTestCase
|
|
||||||
|
|
||||||
|
|
||||||
class TestDigBTEngine(SearxTestCase):
|
|
||||||
|
|
||||||
def test_request(self):
|
|
||||||
query = 'test_query'
|
|
||||||
dicto = defaultdict(dict)
|
|
||||||
dicto['pageno'] = 0
|
|
||||||
params = digbt.request(query, dicto)
|
|
||||||
self.assertIn('url', params)
|
|
||||||
self.assertIn(query, params['url'])
|
|
||||||
self.assertIn('digbt.org', params['url'])
|
|
||||||
|
|
||||||
def test_response(self):
|
|
||||||
self.assertRaises(AttributeError, digbt.response, None)
|
|
||||||
self.assertRaises(AttributeError, digbt.response, [])
|
|
||||||
self.assertRaises(AttributeError, digbt.response, '')
|
|
||||||
self.assertRaises(AttributeError, digbt.response, '[]')
|
|
||||||
|
|
||||||
response = mock.Mock(text='<html></html>')
|
|
||||||
self.assertEqual(digbt.response(response), [])
|
|
||||||
|
|
||||||
html = """
|
|
||||||
<table class="table">
|
|
||||||
<tr><td class="x-item">
|
|
||||||
<div>
|
|
||||||
<a title="The Big Bang Theory" class="title" href="/The-Big-Bang-Theory-d2.html">
|
|
||||||
The Big <span class="highlight">Bang</span> Theory
|
|
||||||
</a>
|
|
||||||
<span class="ctime"><span style="color:red;">4 hours ago</span></span>
|
|
||||||
</div>
|
|
||||||
<div class="files">
|
|
||||||
<ul>
|
|
||||||
<li>The Big Bang Theory 2.9 GB</li>
|
|
||||||
<li>....</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<div class="tail">
|
|
||||||
Files: 1 Size: 2.9 GB Downloads: 1 Updated: <span style="color:red;">4 hours ago</span>
|
|
||||||
|
|
||||||
<a class="title" href="magnet:?xt=urn:btih:a&dn=The+Big+Bang+Theory">
|
|
||||||
<span class="glyphicon glyphicon-magnet"></span> magnet-link
|
|
||||||
</a>
|
|
||||||
|
|
||||||
</div>
|
|
||||||
</td></tr>
|
|
||||||
</table>
|
|
||||||
"""
|
|
||||||
response = mock.Mock(text=html.encode('utf-8'))
|
|
||||||
results = digbt.response(response)
|
|
||||||
self.assertEqual(type(results), list)
|
|
||||||
self.assertEqual(len(results), 1)
|
|
||||||
self.assertEqual(results[0]['title'], 'The Big Bang Theory')
|
|
||||||
self.assertEqual(results[0]['url'], 'https://digbt.org/The-Big-Bang-Theory-d2.html')
|
|
||||||
self.assertEqual(results[0]['content'], 'The Big Bang Theory 2.9 GB ....')
|
|
||||||
self.assertEqual(results[0]['filesize'], 3113851289)
|
|
||||||
self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:a&dn=The+Big+Bang+Theory')
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue