mirror of https://github.com/searxng/searxng.git
Merge branch 'master' into uwsgi_static
This commit is contained in:
commit
0f4dbc4eca
|
@ -4,6 +4,9 @@
|
||||||
*/*/*/*~
|
*/*/*/*~
|
||||||
*/*/*/*/*~
|
*/*/*/*/*~
|
||||||
|
|
||||||
|
#
|
||||||
|
local/
|
||||||
|
|
||||||
# Git
|
# Git
|
||||||
.git
|
.git
|
||||||
.gitignore
|
.gitignore
|
||||||
|
@ -36,6 +39,11 @@ robot_report.html
|
||||||
test_basic/
|
test_basic/
|
||||||
setup.cfg
|
setup.cfg
|
||||||
|
|
||||||
|
# node_modules
|
||||||
node_modules/
|
node_modules/
|
||||||
|
*/node_modules/
|
||||||
|
*/*/node_modules/
|
||||||
|
*/*/*/node_modules/
|
||||||
|
*/*/*/*/node_modules/
|
||||||
|
|
||||||
.tx/
|
.tx/
|
|
@ -15,7 +15,7 @@ setup.cfg
|
||||||
*/*.pyc
|
*/*.pyc
|
||||||
*~
|
*~
|
||||||
|
|
||||||
node_modules/
|
/node_modules
|
||||||
|
|
||||||
.tx/
|
.tx/
|
||||||
|
|
||||||
|
|
31
.travis.yml
31
.travis.yml
|
@ -1,26 +1,24 @@
|
||||||
|
os: linux
|
||||||
|
dist: bionic
|
||||||
language: python
|
language: python
|
||||||
sudo: false
|
|
||||||
cache:
|
cache:
|
||||||
- pip
|
|
||||||
- npm
|
|
||||||
- directories:
|
- directories:
|
||||||
- $HOME/.cache/pip
|
- $HOME/.cache/pip
|
||||||
|
|
||||||
addons:
|
addons:
|
||||||
firefox: "latest"
|
firefox: "latest"
|
||||||
|
|
||||||
install:
|
install:
|
||||||
- ./manage.sh install_geckodriver ~/drivers
|
- env
|
||||||
- export PATH=~/drivers:$PATH
|
- which python; python --version
|
||||||
- ./manage.sh npm_packages
|
- make V=1 install
|
||||||
- ./manage.sh update_dev_packages
|
- make V=1 gecko.driver
|
||||||
- pip install codecov
|
- make V=1 node.env
|
||||||
|
- make V=1 travis.codecov
|
||||||
script:
|
script:
|
||||||
- ./manage.sh styles
|
- make V=1 themes
|
||||||
- ./manage.sh grunt_build
|
- make V=1 test
|
||||||
- ./manage.sh tests
|
|
||||||
after_success:
|
after_success:
|
||||||
- ./manage.sh py_test_coverage
|
- make V=1 test.coverage
|
||||||
- codecov
|
- codecov
|
||||||
|
|
||||||
stages:
|
stages:
|
||||||
|
@ -31,10 +29,13 @@ stages:
|
||||||
jobs:
|
jobs:
|
||||||
include:
|
include:
|
||||||
- python: "2.7"
|
- python: "2.7"
|
||||||
|
env: PY=2
|
||||||
- python: "3.5"
|
- python: "3.5"
|
||||||
- python: "3.6"
|
- python: "3.6"
|
||||||
|
- python: "3.7"
|
||||||
|
- python: "3.8"
|
||||||
- stage: docker
|
- stage: docker
|
||||||
python: "3.6"
|
python: "3.8"
|
||||||
git:
|
git:
|
||||||
depth: false
|
depth: false
|
||||||
services:
|
services:
|
||||||
|
@ -44,7 +45,7 @@ jobs:
|
||||||
install: true
|
install: true
|
||||||
script:
|
script:
|
||||||
- echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin
|
- echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin
|
||||||
- ./manage.sh docker_build push
|
- make -e GIT_URL=$(git remote get-url origin) docker.push
|
||||||
after_success: true
|
after_success: true
|
||||||
|
|
||||||
notifications:
|
notifications:
|
||||||
|
|
|
@ -123,3 +123,4 @@ generally made searx better:
|
||||||
- Vipul @finn0
|
- Vipul @finn0
|
||||||
- @CaffeinatedTech
|
- @CaffeinatedTech
|
||||||
- Robin Schneider @ypid
|
- Robin Schneider @ypid
|
||||||
|
- @splintah
|
||||||
|
|
|
@ -4,6 +4,7 @@ EXPOSE 8080
|
||||||
VOLUME /etc/searx
|
VOLUME /etc/searx
|
||||||
VOLUME /var/log/uwsgi
|
VOLUME /var/log/uwsgi
|
||||||
|
|
||||||
|
ARG GIT_URL=unknown
|
||||||
ARG VERSION_GITCOMMIT=unknown
|
ARG VERSION_GITCOMMIT=unknown
|
||||||
ARG SEARX_GIT_VERSION=unknown
|
ARG SEARX_GIT_VERSION=unknown
|
||||||
|
|
||||||
|
@ -69,7 +70,7 @@ RUN su searx -c "/usr/bin/python3 -m compileall -q searx"; \
|
||||||
|
|
||||||
# Keep this argument at the end since it change each time
|
# Keep this argument at the end since it change each time
|
||||||
ARG LABEL_DATE=
|
ARG LABEL_DATE=
|
||||||
LABEL maintainer="searx <https://github.com/asciimoo/searx>" \
|
LABEL maintainer="searx <${GIT_URL}>" \
|
||||||
description="A privacy-respecting, hackable metasearch engine." \
|
description="A privacy-respecting, hackable metasearch engine." \
|
||||||
version="${SEARX_GIT_VERSION}" \
|
version="${SEARX_GIT_VERSION}" \
|
||||||
org.label-schema.schema-version="1.0" \
|
org.label-schema.schema-version="1.0" \
|
||||||
|
|
171
Makefile
171
Makefile
|
@ -27,23 +27,28 @@ help:
|
||||||
@echo ' uninstall - uninstall (./local)'
|
@echo ' uninstall - uninstall (./local)'
|
||||||
@echo ' gh-pages - build docs & deploy on gh-pages branch'
|
@echo ' gh-pages - build docs & deploy on gh-pages branch'
|
||||||
@echo ' clean - drop builds and environments'
|
@echo ' clean - drop builds and environments'
|
||||||
|
@echo ' project - re-build generic files of the searx project'
|
||||||
|
@echo ' buildenv - re-build environment files (aka brand)'
|
||||||
|
@echo ' themes - re-build build the source of the themes'
|
||||||
|
@echo ' docker - build Docker image'
|
||||||
|
@echo ' node.env - download & install npm dependencies locally'
|
||||||
@echo ''
|
@echo ''
|
||||||
@$(MAKE) -s -f utils/makefile.include make-help
|
@$(MAKE) -s -f utils/makefile.include make-help
|
||||||
@echo ''
|
@echo ''
|
||||||
@$(MAKE) -s -f utils/makefile.python python-help
|
@$(MAKE) -s -f utils/makefile.python python-help
|
||||||
|
|
||||||
PHONY += install
|
PHONY += install
|
||||||
install: pyenvinstall
|
install: buildenv pyenvinstall
|
||||||
|
|
||||||
PHONY += uninstall
|
PHONY += uninstall
|
||||||
uninstall: pyenvuninstall
|
uninstall: pyenvuninstall
|
||||||
|
|
||||||
PHONY += clean
|
PHONY += clean
|
||||||
clean: pyclean
|
clean: pyclean node.clean test.clean
|
||||||
$(call cmd,common_clean)
|
$(call cmd,common_clean)
|
||||||
|
|
||||||
PHONY += run
|
PHONY += run
|
||||||
run: pyenvinstall
|
run: buildenv pyenvinstall
|
||||||
$(Q) ( \
|
$(Q) ( \
|
||||||
sed -i -e "s/debug : False/debug : True/g" ./searx/settings.yml ; \
|
sed -i -e "s/debug : False/debug : True/g" ./searx/settings.yml ; \
|
||||||
sleep 2 ; \
|
sleep 2 ; \
|
||||||
|
@ -57,36 +62,174 @@ run: pyenvinstall
|
||||||
# ----
|
# ----
|
||||||
|
|
||||||
PHONY += docs
|
PHONY += docs
|
||||||
docs: pyenvinstall sphinx-doc
|
docs: buildenv pyenvinstall sphinx-doc
|
||||||
$(call cmd,sphinx,html,docs,docs)
|
$(call cmd,sphinx,html,docs,docs)
|
||||||
|
|
||||||
PHONY += docs-live
|
PHONY += docs-live
|
||||||
docs-live: pyenvinstall sphinx-live
|
docs-live: buildenv pyenvinstall sphinx-live
|
||||||
$(call cmd,sphinx_autobuild,html,docs,docs)
|
$(call cmd,sphinx_autobuild,html,docs,docs)
|
||||||
|
|
||||||
$(GH_PAGES)::
|
$(GH_PAGES)::
|
||||||
@echo "doc available at --> $(DOCS_URL)"
|
@echo "doc available at --> $(DOCS_URL)"
|
||||||
|
|
||||||
|
# update project files
|
||||||
|
# --------------------
|
||||||
|
|
||||||
|
PHONY += project engines.languages useragents.update buildenv
|
||||||
|
|
||||||
|
project: buildenv useragents.update engines.languages
|
||||||
|
|
||||||
|
engines.languages: pyenvinstall
|
||||||
|
$(Q)echo "fetch languages .."
|
||||||
|
$(Q)$(PY_ENV_ACT); python utils/fetch_languages.py
|
||||||
|
$(Q)echo "update searx/data/engines_languages.json"
|
||||||
|
$(Q)mv engines_languages.json searx/data/engines_languages.json
|
||||||
|
$(Q)echo "update searx/languages.py"
|
||||||
|
$(Q)mv languages.py searx/languages.py
|
||||||
|
|
||||||
|
useragents.update: pyenvinstall
|
||||||
|
$(Q)echo "Update searx/data/useragents.json with the most recent versions of Firefox."
|
||||||
|
$(Q)$(PY_ENV_ACT); python utils/fetch_firefox_version.py
|
||||||
|
|
||||||
|
buildenv:
|
||||||
|
$(Q)echo "build searx/brand.py"
|
||||||
|
$(Q)echo "GIT_URL = '$(GIT_URL)'" > searx/brand.py
|
||||||
|
$(Q)echo "ISSUE_URL = 'https://github.com/asciimoo/searx/issues'" >> searx/brand.py
|
||||||
|
$(Q)echo "SEARX_URL = '$(SEARX_URL)'" >> searx/brand.py
|
||||||
|
$(Q)echo "DOCS_URL = '$(DOCS_URL)'" >> searx/brand.py
|
||||||
|
$(Q)echo "PUBLIC_INSTANCES = 'https://searx.space'" >> searx/brand.py
|
||||||
|
$(Q)echo "build utils/brand.env"
|
||||||
|
$(Q)echo "export GIT_URL='$(GIT_URL)'" > utils/brand.env
|
||||||
|
$(Q)echo "export ISSUE_URL='https://github.com/asciimoo/searx/issues'" >> utils/brand.env
|
||||||
|
$(Q)echo "export SEARX_URL='$(SEARX_URL)'" >> utils/brand.env
|
||||||
|
$(Q)echo "export DOCS_URL='$(DOCS_URL)'" >> utils/brand.env
|
||||||
|
$(Q)echo "export PUBLIC_INSTANCES='https://searx.space'" >> utils/brand.env
|
||||||
|
|
||||||
|
|
||||||
|
# node / npm
|
||||||
|
# ----------
|
||||||
|
|
||||||
|
node.env: buildenv
|
||||||
|
$(Q)./manage.sh npm_packages
|
||||||
|
|
||||||
|
node.clean:
|
||||||
|
$(Q)echo "CLEAN locally installed npm dependencies"
|
||||||
|
$(Q)rm -rf \
|
||||||
|
./node_modules \
|
||||||
|
./package-lock.json \
|
||||||
|
./searx/static/themes/oscar/package-lock.json \
|
||||||
|
./searx/static/themes/oscar/node_modules \
|
||||||
|
./searx/static/themes/simple/package-lock.json \
|
||||||
|
./searx/static/themes/simple/node_modules
|
||||||
|
|
||||||
|
# build themes
|
||||||
|
# ------------
|
||||||
|
|
||||||
|
PHONY += themes.bootstrap themes themes.oscar themes.simple themes.legacy themes.courgette themes.pixart
|
||||||
|
themes: buildenv themes.bootstrap themes.oscar themes.simple themes.legacy themes.courgette themes.pixart
|
||||||
|
|
||||||
|
quiet_cmd_lessc = LESSC $3
|
||||||
|
cmd_lessc = PATH="$$(npm bin):$$PATH" \
|
||||||
|
lessc --clean-css="--s1 --advanced --compatibility=ie9" "searx/static/$2" "searx/static/$3"
|
||||||
|
|
||||||
|
quiet_cmd_grunt = GRUNT $2
|
||||||
|
cmd_grunt = PATH="$$(npm bin):$$PATH" \
|
||||||
|
grunt --gruntfile "$2"
|
||||||
|
|
||||||
|
themes.oscar:
|
||||||
|
$(Q)echo '[!] build oscar theme'
|
||||||
|
$(call cmd,grunt,searx/static/themes/oscar/gruntfile.js)
|
||||||
|
|
||||||
|
themes.simple:
|
||||||
|
$(Q)echo '[!] build simple theme'
|
||||||
|
$(call cmd,grunt,searx/static/themes/simple/gruntfile.js)
|
||||||
|
|
||||||
|
themes.legacy:
|
||||||
|
$(Q)echo '[!] build legacy theme'
|
||||||
|
$(call cmd,lessc,themes/legacy/less/style-rtl.less,themes/legacy/css/style-rtl.css)
|
||||||
|
$(call cmd,lessc,themes/legacy/less/style.less,themes/legacy/css/style.css)
|
||||||
|
|
||||||
|
themes.courgette:
|
||||||
|
$(Q)echo '[!] build courgette theme'
|
||||||
|
$(call cmd,lessc,themes/courgette/less/style.less,themes/courgette/css/style.css)
|
||||||
|
$(call cmd,lessc,themes/courgette/less/style-rtl.less,themes/courgette/css/style-rtl.css)
|
||||||
|
|
||||||
|
themes.pixart:
|
||||||
|
$(Q)echo '[!] build pixart theme'
|
||||||
|
$(call cmd,lessc,themes/pix-art/less/style.less,themes/pix-art/css/style.css)
|
||||||
|
|
||||||
|
themes.bootstrap:
|
||||||
|
$(call cmd,lessc,less/bootstrap/bootstrap.less,css/bootstrap.min.css)
|
||||||
|
|
||||||
|
|
||||||
|
# docker
|
||||||
|
# ------
|
||||||
|
|
||||||
|
PHONY += docker
|
||||||
|
docker: buildenv
|
||||||
|
$(Q)./manage.sh docker_build
|
||||||
|
|
||||||
|
docker.push: buildenv
|
||||||
|
$(Q)./manage.sh docker_build push
|
||||||
|
|
||||||
|
# gecko
|
||||||
|
# -----
|
||||||
|
|
||||||
|
PHONY += gecko.driver
|
||||||
|
gecko.driver:
|
||||||
|
$(PY_ENV_ACT); ./manage.sh install_geckodriver
|
||||||
|
|
||||||
# test
|
# test
|
||||||
# ----
|
# ----
|
||||||
|
|
||||||
PHONY += test test.pylint test.pep8 test.unit test.robot
|
PHONY += test test.pylint test.pep8 test.unit test.coverage test.robot
|
||||||
|
|
||||||
test: test.pylint test.pep8 test.unit test.robot
|
test: buildenv test.pylint test.pep8 test.unit gecko.driver test.robot
|
||||||
|
|
||||||
|
ifeq ($(PY),2)
|
||||||
|
test.pylint:
|
||||||
|
@echo "LINT skip liniting py2"
|
||||||
|
else
|
||||||
# TODO: balance linting with pylint
|
# TODO: balance linting with pylint
|
||||||
test.pylint: pyenvinstall
|
test.pylint: pyenvinstall
|
||||||
$(call cmd,pylint,searx/preferences.py)
|
$(call cmd,pylint,\
|
||||||
$(call cmd,pylint,searx/testing.py)
|
searx/preferences.py \
|
||||||
|
searx/testing.py \
|
||||||
|
)
|
||||||
|
endif
|
||||||
|
|
||||||
|
# ignored rules:
|
||||||
|
# E402 module level import not at top of file
|
||||||
|
# W503 line break before binary operator
|
||||||
|
|
||||||
test.pep8: pyenvinstall
|
test.pep8: pyenvinstall
|
||||||
$(PY_ENV_ACT); ./manage.sh pep8_check
|
@echo "TEST pep8"
|
||||||
|
$(Q)$(PY_ENV_ACT); pep8 --exclude=searx/static --max-line-length=120 --ignore "E402,W503" searx tests
|
||||||
|
|
||||||
test.unit: pyenvinstall
|
test.unit: pyenvinstall
|
||||||
$(PY_ENV_ACT); ./manage.sh unit_tests
|
@echo "TEST tests/unit"
|
||||||
|
$(Q)$(PY_ENV_ACT); python -m nose2 -s tests/unit
|
||||||
|
|
||||||
test.robot: pyenvinstall
|
test.coverage: pyenvinstall
|
||||||
$(PY_ENV_ACT); ./manage.sh install_geckodriver
|
@echo "TEST unit test coverage"
|
||||||
$(PY_ENV_ACT); ./manage.sh robot_tests
|
$(Q)$(PY_ENV_ACT); \
|
||||||
|
python -m nose2 -C --log-capture --with-coverage --coverage searx -s tests/unit \
|
||||||
|
&& coverage report \
|
||||||
|
&& coverage html \
|
||||||
|
|
||||||
|
test.robot: pyenvinstall gecko.driver
|
||||||
|
@echo "TEST robot"
|
||||||
|
$(Q)$(PY_ENV_ACT); PYTHONPATH=. python searx/testing.py robot
|
||||||
|
|
||||||
|
test.clean:
|
||||||
|
@echo "CLEAN intermediate test stuff"
|
||||||
|
$(Q)rm -rf geckodriver.log .coverage coverage/
|
||||||
|
|
||||||
|
|
||||||
|
# travis
|
||||||
|
# ------
|
||||||
|
|
||||||
|
travis.codecov:
|
||||||
|
$(Q)$(PY_ENV_BIN)/python -m pip install codecov
|
||||||
|
|
||||||
.PHONY: $(PHONY)
|
.PHONY: $(PHONY)
|
||||||
|
|
|
@ -23,7 +23,7 @@ Go to the `searx-docker <https://github.com/searx/searx-docker>`__ project.
|
||||||
|
|
||||||
Without Docker
|
Without Docker
|
||||||
--------------
|
--------------
|
||||||
For all of the details, follow this `step by step installation <https://asciimoo.github.io/searx/dev/install/installation.html>`__.
|
For all of the details, follow this `step by step installation <https://asciimoo.github.io/searx/admin/installation.html>`__.
|
||||||
|
|
||||||
Note: the documentation needs to be updated.
|
Note: the documentation needs to be updated.
|
||||||
|
|
||||||
|
|
|
@ -8,3 +8,4 @@ Blog
|
||||||
python3
|
python3
|
||||||
admin
|
admin
|
||||||
intro-offline
|
intro-offline
|
||||||
|
private-engines
|
||||||
|
|
|
@ -0,0 +1,63 @@
|
||||||
|
==================================
|
||||||
|
Limit access to your searx engines
|
||||||
|
==================================
|
||||||
|
|
||||||
|
Administrators might find themselves wanting to limit access to some of the
|
||||||
|
enabled engines on their instances. It might be because they do not want to
|
||||||
|
expose some private information through an offline engine. Or they
|
||||||
|
would rather share engines only with their trusted friends or colleagues.
|
||||||
|
|
||||||
|
Private engines
|
||||||
|
===============
|
||||||
|
|
||||||
|
To solve this issue private engines were introduced in :pull:`1823`.
|
||||||
|
A new option was added to engines named `tokens`. It expects a list
|
||||||
|
of strings. If the user making a request presents one of the tokens
|
||||||
|
of an engine, he/she is able to access information about the engine
|
||||||
|
and make search requests.
|
||||||
|
|
||||||
|
Example configuration to restrict access to the Arch Linux Wiki engine:
|
||||||
|
|
||||||
|
.. code:: yaml
|
||||||
|
|
||||||
|
- name : arch linux wiki
|
||||||
|
engine : archlinux
|
||||||
|
shortcut : al
|
||||||
|
tokens : [ 'my-secret-token' ]
|
||||||
|
|
||||||
|
|
||||||
|
Unless a user has configured the right token, the engine is going
|
||||||
|
to be hidden from him/her. It is not going to be included in the
|
||||||
|
list of engines on the Preferences page and in the output of
|
||||||
|
`/config` REST API call.
|
||||||
|
|
||||||
|
Tokens can be added to one's configuration on the Preferences page
|
||||||
|
under "Engine tokens". The input expects a comma separated list of
|
||||||
|
strings.
|
||||||
|
|
||||||
|
The distribution of the tokens from the administrator to the users
|
||||||
|
is not carved in stone. As providing access to such engines
|
||||||
|
implies that the admin knows and trusts the user, we do not see
|
||||||
|
necessary to come up with a strict process. Instead,
|
||||||
|
we would like to add guidelines to the documentation of the feature.
|
||||||
|
|
||||||
|
Next steps
|
||||||
|
==========
|
||||||
|
|
||||||
|
Now that searx has support for both offline engines and private engines,
|
||||||
|
it is possible to add concrete engines which benefit from these features.
|
||||||
|
For example engines which search on the local host running the instance.
|
||||||
|
Be it searching your file system or querying a private database. Be creative
|
||||||
|
and come up with new solutions which fit your use case.
|
||||||
|
|
||||||
|
Acknowledgement
|
||||||
|
===============
|
||||||
|
|
||||||
|
This development was sponsored by `Search and Discovery Fund`_ of `NLnet Foundation`_ .
|
||||||
|
|
||||||
|
.. _Search and Discovery Fund: https://nlnet.nl/discovery
|
||||||
|
.. _NLnet Foundation: https://nlnet.nl/
|
||||||
|
|
||||||
|
|
||||||
|
| Happy hacking.
|
||||||
|
| kvch // 2020.02.28 22:26
|
|
@ -4,9 +4,9 @@ import sys, os
|
||||||
from searx.version import VERSION_STRING
|
from searx.version import VERSION_STRING
|
||||||
from pallets_sphinx_themes import ProjectLink
|
from pallets_sphinx_themes import ProjectLink
|
||||||
|
|
||||||
GIT_URL = os.environ.get("GIT_URL", "https://github.com/asciimoo/searx")
|
from searx.brand import GIT_URL
|
||||||
SEARX_URL = os.environ.get("SEARX_URL", "https://searx.me")
|
from searx.brand import SEARX_URL
|
||||||
DOCS_URL = os.environ.get("DOCS_URL", "https://asciimoo.github.io/searx/")
|
from searx.brand import DOCS_URL
|
||||||
|
|
||||||
# Project --------------------------------------------------------------
|
# Project --------------------------------------------------------------
|
||||||
|
|
||||||
|
|
|
@ -87,8 +87,8 @@ After satisfying the requirements styles can be build using ``manage.sh``
|
||||||
./manage.sh styles
|
./manage.sh styles
|
||||||
|
|
||||||
|
|
||||||
How to build the source of the oscar theme
|
How to build the source of the themes
|
||||||
==========================================
|
=====================================
|
||||||
|
|
||||||
.. _grunt: https://gruntjs.com/
|
.. _grunt: https://gruntjs.com/
|
||||||
|
|
||||||
|
@ -98,13 +98,13 @@ NodeJS, so first Node has to be installed.
|
||||||
.. code:: sh
|
.. code:: sh
|
||||||
|
|
||||||
sudo -H apt-get install nodejs
|
sudo -H apt-get install nodejs
|
||||||
sudo -H npm install -g grunt-cli
|
make node.env
|
||||||
|
|
||||||
After installing grunt, the files can be built using the following command:
|
After installing grunt, the files can be built using the following command:
|
||||||
|
|
||||||
.. code:: sh
|
.. code:: sh
|
||||||
|
|
||||||
./manage.sh grunt_build
|
make themes
|
||||||
|
|
||||||
|
|
||||||
Tips for debugging/development
|
Tips for debugging/development
|
||||||
|
|
96
manage.sh
96
manage.sh
|
@ -10,6 +10,7 @@ PYTHONPATH="$BASE_DIR"
|
||||||
SEARX_DIR="$BASE_DIR/searx"
|
SEARX_DIR="$BASE_DIR/searx"
|
||||||
ACTION="$1"
|
ACTION="$1"
|
||||||
|
|
||||||
|
. "${BASE_DIR}/utils/brand.env"
|
||||||
|
|
||||||
#
|
#
|
||||||
# Python
|
# Python
|
||||||
|
@ -70,45 +71,6 @@ locales() {
|
||||||
pybabel compile -d "$SEARX_DIR/translations"
|
pybabel compile -d "$SEARX_DIR/translations"
|
||||||
}
|
}
|
||||||
|
|
||||||
update_useragents() {
|
|
||||||
echo '[!] Updating user agent versions'
|
|
||||||
python utils/fetch_firefox_version.py
|
|
||||||
}
|
|
||||||
|
|
||||||
pep8_check() {
|
|
||||||
echo '[!] Running pep8 check'
|
|
||||||
# ignored rules:
|
|
||||||
# E402 module level import not at top of file
|
|
||||||
# W503 line break before binary operator
|
|
||||||
pep8 --exclude=searx/static --max-line-length=120 --ignore "E402,W503" "$SEARX_DIR" "$BASE_DIR/tests"
|
|
||||||
}
|
|
||||||
|
|
||||||
unit_tests() {
|
|
||||||
echo '[!] Running unit tests'
|
|
||||||
python -m nose2 -s "$BASE_DIR/tests/unit"
|
|
||||||
}
|
|
||||||
|
|
||||||
py_test_coverage() {
|
|
||||||
echo '[!] Running python test coverage'
|
|
||||||
PYTHONPATH="`pwd`" python -m nose2 -C --log-capture --with-coverage --coverage "$SEARX_DIR" -s "$BASE_DIR/tests/unit" \
|
|
||||||
&& coverage report \
|
|
||||||
&& coverage html
|
|
||||||
}
|
|
||||||
|
|
||||||
robot_tests() {
|
|
||||||
echo '[!] Running robot tests'
|
|
||||||
PYTHONPATH="`pwd`" python "$SEARX_DIR/testing.py" robot
|
|
||||||
}
|
|
||||||
|
|
||||||
tests() {
|
|
||||||
set -e
|
|
||||||
pep8_check
|
|
||||||
unit_tests
|
|
||||||
install_geckodriver
|
|
||||||
robot_tests
|
|
||||||
set +e
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Web
|
# Web
|
||||||
|
@ -135,36 +97,6 @@ npm_packages() {
|
||||||
npm install
|
npm install
|
||||||
}
|
}
|
||||||
|
|
||||||
build_style() {
|
|
||||||
npm_path_setup
|
|
||||||
|
|
||||||
lessc --clean-css="--s1 --advanced --compatibility=ie9" "$BASE_DIR/searx/static/$1" "$BASE_DIR/searx/static/$2"
|
|
||||||
}
|
|
||||||
|
|
||||||
styles() {
|
|
||||||
npm_path_setup
|
|
||||||
|
|
||||||
echo '[!] Building legacy style'
|
|
||||||
build_style themes/legacy/less/style.less themes/legacy/css/style.css
|
|
||||||
build_style themes/legacy/less/style-rtl.less themes/legacy/css/style-rtl.css
|
|
||||||
echo '[!] Building courgette style'
|
|
||||||
build_style themes/courgette/less/style.less themes/courgette/css/style.css
|
|
||||||
build_style themes/courgette/less/style-rtl.less themes/courgette/css/style-rtl.css
|
|
||||||
echo '[!] Building pix-art style'
|
|
||||||
build_style themes/pix-art/less/style.less themes/pix-art/css/style.css
|
|
||||||
echo '[!] Building bootstrap style'
|
|
||||||
build_style less/bootstrap/bootstrap.less css/bootstrap.min.css
|
|
||||||
}
|
|
||||||
|
|
||||||
grunt_build() {
|
|
||||||
npm_path_setup
|
|
||||||
|
|
||||||
echo '[!] Grunt build : oscar theme'
|
|
||||||
grunt --gruntfile "$SEARX_DIR/static/themes/oscar/gruntfile.js"
|
|
||||||
echo '[!] Grunt build : simple theme'
|
|
||||||
grunt --gruntfile "$SEARX_DIR/static/themes/simple/gruntfile.js"
|
|
||||||
}
|
|
||||||
|
|
||||||
docker_build() {
|
docker_build() {
|
||||||
# Check if it is a git repository
|
# Check if it is a git repository
|
||||||
if [ ! -d .git ]; then
|
if [ ! -d .git ]; then
|
||||||
|
@ -189,8 +121,9 @@ docker_build() {
|
||||||
SEARX_GIT_VERSION=$(git describe --match "v[0-9]*\.[0-9]*\.[0-9]*" HEAD 2>/dev/null | awk -F'-' '{OFS="-"; $1=substr($1, 2); $3=substr($3, 2); print}')
|
SEARX_GIT_VERSION=$(git describe --match "v[0-9]*\.[0-9]*\.[0-9]*" HEAD 2>/dev/null | awk -F'-' '{OFS="-"; $1=substr($1, 2); $3=substr($3, 2); print}')
|
||||||
|
|
||||||
# add the suffix "-dirty" if the repository has uncommited change
|
# add the suffix "-dirty" if the repository has uncommited change
|
||||||
|
# /!\ HACK for searx/searx: ignore searx/brand.py and utils/brand.env
|
||||||
git update-index -q --refresh
|
git update-index -q --refresh
|
||||||
if [ ! -z "$(git diff-index --name-only HEAD --)" ]; then
|
if [ ! -z "$(git diff-index --name-only HEAD -- | grep -v 'searx/brand.py' | grep -v 'utils/brand.env')" ]; then
|
||||||
SEARX_GIT_VERSION="${SEARX_GIT_VERSION}-dirty"
|
SEARX_GIT_VERSION="${SEARX_GIT_VERSION}-dirty"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
@ -211,18 +144,18 @@ docker_build() {
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# define the docker image name
|
# define the docker image name
|
||||||
# /!\ HACK to get the user name /!\
|
GITHUB_USER=$(echo "${GIT_URL}" | sed 's/.*github\.com\/\([^\/]*\).*/\1/')
|
||||||
GITHUB_USER=$(git remote get-url origin | sed 's/.*github\.com\/\([^\/]*\).*/\1/')
|
|
||||||
SEARX_IMAGE_NAME="${GITHUB_USER:-searx}/searx"
|
SEARX_IMAGE_NAME="${GITHUB_USER:-searx}/searx"
|
||||||
|
|
||||||
# build Docker image
|
# build Docker image
|
||||||
echo "Building image ${SEARX_IMAGE_NAME}:${SEARX_GIT_VERSION}"
|
echo "Building image ${SEARX_IMAGE_NAME}:${SEARX_GIT_VERSION}"
|
||||||
sudo docker build \
|
sudo docker build \
|
||||||
|
--build-arg GIT_URL="${GIT_URL}" \
|
||||||
--build-arg SEARX_GIT_VERSION="${SEARX_GIT_VERSION}" \
|
--build-arg SEARX_GIT_VERSION="${SEARX_GIT_VERSION}" \
|
||||||
--build-arg VERSION_GITCOMMIT="${VERSION_GITCOMMIT}" \
|
--build-arg VERSION_GITCOMMIT="${VERSION_GITCOMMIT}" \
|
||||||
--build-arg LABEL_DATE=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
|
--build-arg LABEL_DATE=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
|
||||||
--build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \
|
--build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \
|
||||||
--build-arg LABEL_VCS_URL=$(git remote get-url origin) \
|
--build-arg LABEL_VCS_URL="${GIT_URL}" \
|
||||||
--build-arg TIMESTAMP_SETTINGS=$(git log -1 --format="%cd" --date=unix -- searx/settings.yml) \
|
--build-arg TIMESTAMP_SETTINGS=$(git log -1 --format="%cd" --date=unix -- searx/settings.yml) \
|
||||||
--build-arg TIMESTAMP_UWSGI=$(git log -1 --format="%cd" --date=unix -- dockerfiles/uwsgi.ini) \
|
--build-arg TIMESTAMP_UWSGI=$(git log -1 --format="%cd" --date=unix -- dockerfiles/uwsgi.ini) \
|
||||||
-t ${SEARX_IMAGE_NAME}:latest -t ${SEARX_IMAGE_NAME}:${SEARX_GIT_VERSION} .
|
-t ${SEARX_IMAGE_NAME}:latest -t ${SEARX_IMAGE_NAME}:${SEARX_GIT_VERSION} .
|
||||||
|
@ -251,22 +184,17 @@ Commands
|
||||||
update_dev_packages - Check & update development and production dependency changes
|
update_dev_packages - Check & update development and production dependency changes
|
||||||
install_geckodriver - Download & install geckodriver if not already installed (required for robot_tests)
|
install_geckodriver - Download & install geckodriver if not already installed (required for robot_tests)
|
||||||
npm_packages - Download & install npm dependencies
|
npm_packages - Download & install npm dependencies
|
||||||
update_useragents - Update useragents.json with the most recent versions of Firefox
|
|
||||||
|
|
||||||
Build
|
Build
|
||||||
-----
|
-----
|
||||||
locales - Compile locales
|
locales - Compile locales
|
||||||
styles - Build less files
|
|
||||||
grunt_build - Build files for themes
|
|
||||||
docker_build - Build Docker image
|
|
||||||
|
|
||||||
Tests
|
Environment:
|
||||||
-----
|
GIT_URL: ${GIT_URL}
|
||||||
unit_tests - Run unit tests
|
ISSUE_URL: ${ISSUE_URL}
|
||||||
pep8_check - Pep8 validation
|
SEARX_URL: ${SEARX_URL}
|
||||||
robot_tests - Run selenium tests
|
DOCS_URL: ${DOCS_URL}
|
||||||
tests - Run all python tests (pep8, unit, robot_tests)
|
PUBLIC_INSTANCES: ${PUBLIC_INSTANCES}
|
||||||
py_test_coverage - Unit test coverage
|
|
||||||
"
|
"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,6 +5,7 @@ mock==2.0.0
|
||||||
nose2[coverage_plugin]
|
nose2[coverage_plugin]
|
||||||
cov-core==1.15.0
|
cov-core==1.15.0
|
||||||
pep8==1.7.0
|
pep8==1.7.0
|
||||||
|
pylint
|
||||||
plone.testing==5.0.0
|
plone.testing==5.0.0
|
||||||
splinter==0.11.0
|
splinter==0.11.0
|
||||||
transifex-client==0.12.2
|
transifex-client==0.12.2
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
certifi==2019.3.9
|
certifi==2020.4.5.1
|
||||||
babel==2.7.0
|
babel==2.7.0
|
||||||
flask-babel==1.0.0
|
flask-babel==1.0.0
|
||||||
flask==1.0.2
|
flask==1.1.2
|
||||||
idna==2.8
|
idna==2.9
|
||||||
jinja2==2.10.1
|
jinja2==2.11.1
|
||||||
lxml==4.3.3
|
lxml==4.5.0
|
||||||
pygments==2.1.3
|
pygments==2.1.3
|
||||||
pyopenssl==19.0.0
|
pyopenssl==19.1.0
|
||||||
python-dateutil==2.8.0
|
python-dateutil==2.8.0
|
||||||
pyyaml==5.1
|
pyyaml==5.3.1
|
||||||
requests[socks]==2.22.0
|
requests[socks]==2.23.0
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
GIT_URL = 'https://github.com/asciimoo/searx'
|
||||||
|
ISSUE_URL = 'https://github.com/asciimoo/searx/issues'
|
||||||
|
SEARX_URL = 'https://searx.me'
|
||||||
|
DOCS_URL = 'https://asciimoo.github.io/searx'
|
||||||
|
PUBLIC_INSTANCES = 'https://searx.space'
|
File diff suppressed because it is too large
Load Diff
|
@ -1,14 +1,11 @@
|
||||||
{
|
{
|
||||||
"versions": [
|
"versions": [
|
||||||
"70.0.1",
|
"75.0",
|
||||||
"70.0",
|
"74.0.1",
|
||||||
"69.0.3",
|
"74.0"
|
||||||
"69.0.2",
|
|
||||||
"69.0.1",
|
|
||||||
"69.0"
|
|
||||||
],
|
],
|
||||||
"os": [
|
"os": [
|
||||||
"Windows NT 10; WOW64",
|
"Windows NT 10.0; WOW64",
|
||||||
"X11; Linux x86_64"
|
"X11; Linux x86_64"
|
||||||
],
|
],
|
||||||
"ua": "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}"
|
"ua": "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}"
|
||||||
|
|
|
@ -110,13 +110,18 @@ def response(resp):
|
||||||
|
|
||||||
# get supported languages from their site
|
# get supported languages from their site
|
||||||
def _fetch_supported_languages(resp):
|
def _fetch_supported_languages(resp):
|
||||||
supported_languages = []
|
lang_tags = set()
|
||||||
dom = html.fromstring(resp.text)
|
|
||||||
options = eval_xpath(dom, '//div[@id="limit-languages"]//input')
|
|
||||||
for option in options:
|
|
||||||
code = eval_xpath(option, './@id')[0].replace('_', '-')
|
|
||||||
if code == 'nb':
|
|
||||||
code = 'no'
|
|
||||||
supported_languages.append(code)
|
|
||||||
|
|
||||||
return supported_languages
|
setmkt = re.compile('setmkt=([^&]*)')
|
||||||
|
dom = html.fromstring(resp.text)
|
||||||
|
lang_links = eval_xpath(dom, "//li/a[contains(@href, 'setmkt')]")
|
||||||
|
|
||||||
|
for a in lang_links:
|
||||||
|
href = eval_xpath(a, './@href')[0]
|
||||||
|
match = setmkt.search(href)
|
||||||
|
l_tag = match.groups()[0]
|
||||||
|
_lang, _nation = l_tag.split('-', 1)
|
||||||
|
l_tag = _lang.lower() + '-' + _nation.upper()
|
||||||
|
lang_tags.add(l_tag)
|
||||||
|
|
||||||
|
return list(lang_tags)
|
||||||
|
|
|
@ -18,6 +18,8 @@ import re
|
||||||
from searx.url_utils import urlencode
|
from searx.url_utils import urlencode
|
||||||
from searx.utils import match_language
|
from searx.utils import match_language
|
||||||
|
|
||||||
|
from searx.engines.bing import _fetch_supported_languages, supported_languages_url, language_aliases
|
||||||
|
|
||||||
# engine dependent config
|
# engine dependent config
|
||||||
categories = ['images']
|
categories = ['images']
|
||||||
paging = True
|
paging = True
|
||||||
|
@ -103,22 +105,3 @@ def response(resp):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
# get supported languages from their site
|
|
||||||
def _fetch_supported_languages(resp):
|
|
||||||
supported_languages = []
|
|
||||||
dom = html.fromstring(resp.text)
|
|
||||||
|
|
||||||
regions_xpath = '//div[@id="region-section-content"]' \
|
|
||||||
+ '//ul[@class="b_vList"]/li/a/@href'
|
|
||||||
|
|
||||||
regions = dom.xpath(regions_xpath)
|
|
||||||
for region in regions:
|
|
||||||
code = re.search('setmkt=[^\&]+', region).group()[7:]
|
|
||||||
if code == 'nb-NO':
|
|
||||||
code = 'no-NO'
|
|
||||||
|
|
||||||
supported_languages.append(code)
|
|
||||||
|
|
||||||
return supported_languages
|
|
||||||
|
|
|
@ -15,9 +15,10 @@ from datetime import datetime
|
||||||
from dateutil import parser
|
from dateutil import parser
|
||||||
from lxml import etree
|
from lxml import etree
|
||||||
from searx.utils import list_get, match_language
|
from searx.utils import list_get, match_language
|
||||||
from searx.engines.bing import _fetch_supported_languages, supported_languages_url, language_aliases
|
|
||||||
from searx.url_utils import urlencode, urlparse, parse_qsl
|
from searx.url_utils import urlencode, urlparse, parse_qsl
|
||||||
|
|
||||||
|
from searx.engines.bing import _fetch_supported_languages, supported_languages_url, language_aliases
|
||||||
|
|
||||||
# engine dependent config
|
# engine dependent config
|
||||||
categories = ['news']
|
categories = ['news']
|
||||||
paging = True
|
paging = True
|
||||||
|
@ -58,6 +59,7 @@ def _get_url(query, language, offset, time_range):
|
||||||
offset=offset,
|
offset=offset,
|
||||||
interval=time_range_dict[time_range])
|
interval=time_range_dict[time_range])
|
||||||
else:
|
else:
|
||||||
|
# e.g. setmkt=de-de&setlang=de
|
||||||
search_path = search_string.format(
|
search_path = search_string.format(
|
||||||
query=urlencode({'q': query, 'setmkt': language}),
|
query=urlencode({'q': query, 'setmkt': language}),
|
||||||
offset=offset)
|
offset=offset)
|
||||||
|
|
|
@ -12,10 +12,10 @@
|
||||||
|
|
||||||
from json import loads
|
from json import loads
|
||||||
from lxml import html
|
from lxml import html
|
||||||
from searx.engines.bing_images import _fetch_supported_languages, supported_languages_url
|
|
||||||
from searx.url_utils import urlencode
|
from searx.url_utils import urlencode
|
||||||
from searx.utils import match_language
|
from searx.utils import match_language
|
||||||
|
|
||||||
|
from searx.engines.bing import _fetch_supported_languages, supported_languages_url, language_aliases
|
||||||
|
|
||||||
categories = ['videos']
|
categories = ['videos']
|
||||||
paging = True
|
paging = True
|
||||||
|
@ -67,6 +67,10 @@ def request(query, params):
|
||||||
if params['time_range'] in time_range_dict:
|
if params['time_range'] in time_range_dict:
|
||||||
params['url'] += time_range_string.format(interval=time_range_dict[params['time_range']])
|
params['url'] += time_range_string.format(interval=time_range_dict[params['time_range']])
|
||||||
|
|
||||||
|
# bing videos did not like "older" versions < 70.0.1 when selectin other
|
||||||
|
# languages then 'en' .. very strange ?!?!
|
||||||
|
params['headers']['User-Agent'] = 'Mozilla/5.0 (X11; Linux x86_64; rv:73.0.1) Gecko/20100101 Firefox/73.0.1'
|
||||||
|
|
||||||
return params
|
return params
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,96 +0,0 @@
|
||||||
"""
|
|
||||||
Faroo (Web, News)
|
|
||||||
|
|
||||||
@website http://www.faroo.com
|
|
||||||
@provide-api yes (http://www.faroo.com/hp/api/api.html), require API-key
|
|
||||||
|
|
||||||
@using-api no
|
|
||||||
@results JSON
|
|
||||||
@stable yes
|
|
||||||
@parse url, title, content, publishedDate, img_src
|
|
||||||
"""
|
|
||||||
|
|
||||||
from json import loads
|
|
||||||
import datetime
|
|
||||||
from searx.utils import searx_useragent
|
|
||||||
from searx.url_utils import urlencode
|
|
||||||
|
|
||||||
# engine dependent config
|
|
||||||
categories = ['general', 'news']
|
|
||||||
paging = True
|
|
||||||
language_support = True
|
|
||||||
number_of_results = 10
|
|
||||||
|
|
||||||
# search-url
|
|
||||||
url = 'http://www.faroo.com/'
|
|
||||||
search_url = url + 'instant.json?{query}'\
|
|
||||||
'&start={offset}'\
|
|
||||||
'&length={number_of_results}'\
|
|
||||||
'&l={language}'\
|
|
||||||
'&src={categorie}'\
|
|
||||||
'&i=false'\
|
|
||||||
'&c=false'
|
|
||||||
|
|
||||||
search_category = {'general': 'web',
|
|
||||||
'news': 'news'}
|
|
||||||
|
|
||||||
|
|
||||||
# do search-request
|
|
||||||
def request(query, params):
|
|
||||||
offset = (params['pageno'] - 1) * number_of_results + 1
|
|
||||||
categorie = search_category.get(params['category'], 'web')
|
|
||||||
|
|
||||||
if params['language'] == 'all':
|
|
||||||
language = 'en'
|
|
||||||
else:
|
|
||||||
language = params['language'].split('-')[0]
|
|
||||||
|
|
||||||
# if language is not supported, put it in english
|
|
||||||
if language != 'en' and\
|
|
||||||
language != 'de' and\
|
|
||||||
language != 'zh':
|
|
||||||
language = 'en'
|
|
||||||
|
|
||||||
params['url'] = search_url.format(offset=offset,
|
|
||||||
number_of_results=number_of_results,
|
|
||||||
query=urlencode({'q': query}),
|
|
||||||
language=language,
|
|
||||||
categorie=categorie)
|
|
||||||
|
|
||||||
params['headers']['Referer'] = url
|
|
||||||
|
|
||||||
return params
|
|
||||||
|
|
||||||
|
|
||||||
# get response from search-request
|
|
||||||
def response(resp):
|
|
||||||
# HTTP-Code 429: rate limit exceeded
|
|
||||||
if resp.status_code == 429:
|
|
||||||
raise Exception("rate limit has been exceeded!")
|
|
||||||
|
|
||||||
results = []
|
|
||||||
|
|
||||||
search_res = loads(resp.text)
|
|
||||||
|
|
||||||
# return empty array if there are no results
|
|
||||||
if not search_res.get('results', {}):
|
|
||||||
return []
|
|
||||||
|
|
||||||
# parse results
|
|
||||||
for result in search_res['results']:
|
|
||||||
publishedDate = None
|
|
||||||
result_json = {'url': result['url'], 'title': result['title'],
|
|
||||||
'content': result['kwic']}
|
|
||||||
if result['news']:
|
|
||||||
result_json['publishedDate'] = \
|
|
||||||
datetime.datetime.fromtimestamp(result['date'] / 1000.0)
|
|
||||||
|
|
||||||
# append image result if image url is set
|
|
||||||
if result['iurl']:
|
|
||||||
result_json['template'] = 'videos.html'
|
|
||||||
result_json['thumbnail'] = result['iurl']
|
|
||||||
|
|
||||||
results.append(result_json)
|
|
||||||
|
|
||||||
# return results
|
|
||||||
return results
|
|
|
@ -54,7 +54,7 @@ def request(query, params):
|
||||||
if params['language'] != 'all':
|
if params['language'] != 'all':
|
||||||
language = match_language(params['language'], supported_languages, language_aliases).split('-')[0]
|
language = match_language(params['language'], supported_languages, language_aliases).split('-')[0]
|
||||||
if language:
|
if language:
|
||||||
params['url'] += '&lr=lang_' + language
|
params['url'] += '&hl=' + language
|
||||||
|
|
||||||
return params
|
return params
|
||||||
|
|
||||||
|
|
|
@ -99,11 +99,14 @@ def response(resp):
|
||||||
if re.match(r"^([1-9]|[1-2][0-9]|3[0-1]) [A-Z][a-z]{2} [0-9]{4} \.\.\. ", content):
|
if re.match(r"^([1-9]|[1-2][0-9]|3[0-1]) [A-Z][a-z]{2} [0-9]{4} \.\.\. ", content):
|
||||||
date_pos = content.find('...') + 4
|
date_pos = content.find('...') + 4
|
||||||
date_string = content[0:date_pos - 5]
|
date_string = content[0:date_pos - 5]
|
||||||
published_date = parser.parse(date_string, dayfirst=True)
|
|
||||||
|
|
||||||
# fix content string
|
# fix content string
|
||||||
content = content[date_pos:]
|
content = content[date_pos:]
|
||||||
|
|
||||||
|
try:
|
||||||
|
published_date = parser.parse(date_string, dayfirst=True)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
# check if search result starts with something like: "5 days ago ... "
|
# check if search result starts with something like: "5 days ago ... "
|
||||||
elif re.match(r"^[0-9]+ days? ago \.\.\. ", content):
|
elif re.match(r"^[0-9]+ days? ago \.\.\. ", content):
|
||||||
date_pos = content.find('...') + 4
|
date_pos = content.find('...') + 4
|
||||||
|
|
|
@ -3,9 +3,11 @@
|
||||||
# this file is generated automatically by utils/update_search_languages.py
|
# this file is generated automatically by utils/update_search_languages.py
|
||||||
|
|
||||||
language_codes = (
|
language_codes = (
|
||||||
|
(u"af-NA", u"Afrikaans", u"", u"Afrikaans"),
|
||||||
(u"ar-SA", u"العربية", u"", u"Arabic"),
|
(u"ar-SA", u"العربية", u"", u"Arabic"),
|
||||||
|
(u"be-BY", u"Беларуская", u"", u"Belarusian"),
|
||||||
(u"bg-BG", u"Български", u"", u"Bulgarian"),
|
(u"bg-BG", u"Български", u"", u"Bulgarian"),
|
||||||
(u"ca-ES", u"Català", u"", u"Catalan"),
|
(u"ca-AD", u"Català", u"", u"Catalan"),
|
||||||
(u"cs-CZ", u"Čeština", u"", u"Czech"),
|
(u"cs-CZ", u"Čeština", u"", u"Czech"),
|
||||||
(u"da-DK", u"Dansk", u"", u"Danish"),
|
(u"da-DK", u"Dansk", u"", u"Danish"),
|
||||||
(u"de", u"Deutsch", u"", u"German"),
|
(u"de", u"Deutsch", u"", u"German"),
|
||||||
|
@ -17,11 +19,15 @@ language_codes = (
|
||||||
(u"en-AU", u"English", u"Australia", u"English"),
|
(u"en-AU", u"English", u"Australia", u"English"),
|
||||||
(u"en-CA", u"English", u"Canada", u"English"),
|
(u"en-CA", u"English", u"Canada", u"English"),
|
||||||
(u"en-GB", u"English", u"United Kingdom", u"English"),
|
(u"en-GB", u"English", u"United Kingdom", u"English"),
|
||||||
|
(u"en-IE", u"English", u"Ireland", u"English"),
|
||||||
(u"en-IN", u"English", u"India", u"English"),
|
(u"en-IN", u"English", u"India", u"English"),
|
||||||
(u"en-MY", u"English", u"Malaysia", u"English"),
|
(u"en-NZ", u"English", u"New Zealand", u"English"),
|
||||||
|
(u"en-PH", u"English", u"Philippines", u"English"),
|
||||||
|
(u"en-SG", u"English", u"Singapore", u"English"),
|
||||||
(u"en-US", u"English", u"United States", u"English"),
|
(u"en-US", u"English", u"United States", u"English"),
|
||||||
(u"es", u"Español", u"", u"Spanish"),
|
(u"es", u"Español", u"", u"Spanish"),
|
||||||
(u"es-AR", u"Español", u"Argentina", u"Spanish"),
|
(u"es-AR", u"Español", u"Argentina", u"Spanish"),
|
||||||
|
(u"es-CL", u"Español", u"Chile", u"Spanish"),
|
||||||
(u"es-ES", u"Español", u"España", u"Spanish"),
|
(u"es-ES", u"Español", u"España", u"Spanish"),
|
||||||
(u"es-MX", u"Español", u"México", u"Spanish"),
|
(u"es-MX", u"Español", u"México", u"Spanish"),
|
||||||
(u"et-EE", u"Eesti", u"", u"Estonian"),
|
(u"et-EE", u"Eesti", u"", u"Estonian"),
|
||||||
|
@ -35,6 +41,7 @@ language_codes = (
|
||||||
(u"he-IL", u"עברית", u"", u"Hebrew"),
|
(u"he-IL", u"עברית", u"", u"Hebrew"),
|
||||||
(u"hr-HR", u"Hrvatski", u"", u"Croatian"),
|
(u"hr-HR", u"Hrvatski", u"", u"Croatian"),
|
||||||
(u"hu-HU", u"Magyar", u"", u"Hungarian"),
|
(u"hu-HU", u"Magyar", u"", u"Hungarian"),
|
||||||
|
(u"hy-AM", u"Հայերեն", u"", u"Armenian"),
|
||||||
(u"id-ID", u"Indonesia", u"", u"Indonesian"),
|
(u"id-ID", u"Indonesia", u"", u"Indonesian"),
|
||||||
(u"is-IS", u"Íslenska", u"", u"Icelandic"),
|
(u"is-IS", u"Íslenska", u"", u"Icelandic"),
|
||||||
(u"it-IT", u"Italiano", u"", u"Italian"),
|
(u"it-IT", u"Italiano", u"", u"Italian"),
|
||||||
|
@ -42,7 +49,7 @@ language_codes = (
|
||||||
(u"ko-KR", u"한국어", u"", u"Korean"),
|
(u"ko-KR", u"한국어", u"", u"Korean"),
|
||||||
(u"lt-LT", u"Lietuvių", u"", u"Lithuanian"),
|
(u"lt-LT", u"Lietuvių", u"", u"Lithuanian"),
|
||||||
(u"lv-LV", u"Latviešu", u"", u"Latvian"),
|
(u"lv-LV", u"Latviešu", u"", u"Latvian"),
|
||||||
(u"ms-MY", u"Bahasa Melayu", u"", u"Malay"),
|
(u"ms-MY", u"Melayu", u"", u"Malay"),
|
||||||
(u"nb-NO", u"Norsk Bokmål", u"", u"Norwegian Bokmål"),
|
(u"nb-NO", u"Norsk Bokmål", u"", u"Norwegian Bokmål"),
|
||||||
(u"nl", u"Nederlands", u"", u"Dutch"),
|
(u"nl", u"Nederlands", u"", u"Dutch"),
|
||||||
(u"nl-BE", u"Nederlands", u"België", u"Dutch"),
|
(u"nl-BE", u"Nederlands", u"België", u"Dutch"),
|
||||||
|
@ -55,8 +62,9 @@ language_codes = (
|
||||||
(u"ru-RU", u"Русский", u"", u"Russian"),
|
(u"ru-RU", u"Русский", u"", u"Russian"),
|
||||||
(u"sk-SK", u"Slovenčina", u"", u"Slovak"),
|
(u"sk-SK", u"Slovenčina", u"", u"Slovak"),
|
||||||
(u"sl-SI", u"Slovenščina", u"", u"Slovenian"),
|
(u"sl-SI", u"Slovenščina", u"", u"Slovenian"),
|
||||||
(u"sr-RS", u"Српски", u"", u"Serbian"),
|
(u"sr-RS", u"Srpski", u"", u"Serbian"),
|
||||||
(u"sv-SE", u"Svenska", u"", u"Swedish"),
|
(u"sv-SE", u"Svenska", u"", u"Swedish"),
|
||||||
|
(u"sw-KE", u"Kiswahili", u"", u"Swahili"),
|
||||||
(u"th-TH", u"ไทย", u"", u"Thai"),
|
(u"th-TH", u"ไทย", u"", u"Thai"),
|
||||||
(u"tr-TR", u"Türkçe", u"", u"Turkish"),
|
(u"tr-TR", u"Türkçe", u"", u"Turkish"),
|
||||||
(u"uk-UA", u"Українська", u"", u"Ukrainian"),
|
(u"uk-UA", u"Українська", u"", u"Ukrainian"),
|
||||||
|
|
|
@ -345,8 +345,8 @@ class ResultContainer(object):
|
||||||
return 0
|
return 0
|
||||||
return resultnum_sum / len(self._number_of_results)
|
return resultnum_sum / len(self._number_of_results)
|
||||||
|
|
||||||
def add_unresponsive_engine(self, engine_error):
|
def add_unresponsive_engine(self, engine_name, error_type, error_message=None):
|
||||||
self.unresponsive_engines.add(engine_error)
|
self.unresponsive_engines.add((engine_name, error_type, error_message))
|
||||||
|
|
||||||
def add_timing(self, engine_name, engine_time, page_load_time):
|
def add_timing(self, engine_name, engine_time, page_load_time):
|
||||||
self.timings.append({
|
self.timings.append({
|
||||||
|
|
|
@ -127,11 +127,7 @@ def search_one_offline_request_safe(engine_name, query, request_params, result_c
|
||||||
logger.exception('engine {0} : invalid input : {1}'.format(engine_name, e))
|
logger.exception('engine {0} : invalid input : {1}'.format(engine_name, e))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
record_offline_engine_stats_on_error(engine, result_container, start_time)
|
record_offline_engine_stats_on_error(engine, result_container, start_time)
|
||||||
|
result_container.add_unresponsive_engine(engine_name, 'unexpected crash', str(e))
|
||||||
result_container.add_unresponsive_engine((
|
|
||||||
engine_name,
|
|
||||||
u'{0}: {1}'.format(gettext('unexpected crash'), e),
|
|
||||||
))
|
|
||||||
logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
|
logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
|
||||||
|
|
||||||
|
|
||||||
|
@ -186,24 +182,21 @@ def search_one_http_request_safe(engine_name, query, request_params, result_cont
|
||||||
engine.stats['errors'] += 1
|
engine.stats['errors'] += 1
|
||||||
|
|
||||||
if (issubclass(e.__class__, requests.exceptions.Timeout)):
|
if (issubclass(e.__class__, requests.exceptions.Timeout)):
|
||||||
result_container.add_unresponsive_engine((engine_name, gettext('timeout')))
|
result_container.add_unresponsive_engine(engine_name, 'timeout')
|
||||||
# requests timeout (connect or read)
|
# requests timeout (connect or read)
|
||||||
logger.error("engine {0} : HTTP requests timeout"
|
logger.error("engine {0} : HTTP requests timeout"
|
||||||
"(search duration : {1} s, timeout: {2} s) : {3}"
|
"(search duration : {1} s, timeout: {2} s) : {3}"
|
||||||
.format(engine_name, engine_time, timeout_limit, e.__class__.__name__))
|
.format(engine_name, engine_time, timeout_limit, e.__class__.__name__))
|
||||||
requests_exception = True
|
requests_exception = True
|
||||||
elif (issubclass(e.__class__, requests.exceptions.RequestException)):
|
elif (issubclass(e.__class__, requests.exceptions.RequestException)):
|
||||||
result_container.add_unresponsive_engine((engine_name, gettext('request exception')))
|
result_container.add_unresponsive_engine(engine_name, 'request exception')
|
||||||
# other requests exception
|
# other requests exception
|
||||||
logger.exception("engine {0} : requests exception"
|
logger.exception("engine {0} : requests exception"
|
||||||
"(search duration : {1} s, timeout: {2} s) : {3}"
|
"(search duration : {1} s, timeout: {2} s) : {3}"
|
||||||
.format(engine_name, engine_time, timeout_limit, e))
|
.format(engine_name, engine_time, timeout_limit, e))
|
||||||
requests_exception = True
|
requests_exception = True
|
||||||
else:
|
else:
|
||||||
result_container.add_unresponsive_engine((
|
result_container.add_unresponsive_engine(engine_name, 'unexpected crash', str(e))
|
||||||
engine_name,
|
|
||||||
u'{0}: {1}'.format(gettext('unexpected crash'), e),
|
|
||||||
))
|
|
||||||
# others errors
|
# others errors
|
||||||
logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
|
logger.exception('engine {0} : exception : {1}'.format(engine_name, e))
|
||||||
|
|
||||||
|
@ -238,7 +231,7 @@ def search_multiple_requests(requests, result_container, start_time, timeout_lim
|
||||||
remaining_time = max(0.0, timeout_limit - (time() - start_time))
|
remaining_time = max(0.0, timeout_limit - (time() - start_time))
|
||||||
th.join(remaining_time)
|
th.join(remaining_time)
|
||||||
if th.isAlive():
|
if th.isAlive():
|
||||||
result_container.add_unresponsive_engine((th._engine_name, gettext('timeout')))
|
result_container.add_unresponsive_engine(th._engine_name, 'timeout')
|
||||||
logger.warning('engine timeout: {0}'.format(th._engine_name))
|
logger.warning('engine timeout: {0}'.format(th._engine_name))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -219,11 +219,6 @@ engines:
|
||||||
shortcut : et
|
shortcut : et
|
||||||
disabled : True
|
disabled : True
|
||||||
|
|
||||||
- name : faroo
|
|
||||||
engine : faroo
|
|
||||||
shortcut : fa
|
|
||||||
disabled : True
|
|
||||||
|
|
||||||
- name : 1x
|
- name : 1x
|
||||||
engine : www1x
|
engine : www1x
|
||||||
shortcut : 1x
|
shortcut : 1x
|
||||||
|
@ -686,6 +681,69 @@ engines:
|
||||||
engine : vimeo
|
engine : vimeo
|
||||||
shortcut : vm
|
shortcut : vm
|
||||||
|
|
||||||
|
- name : wikibooks
|
||||||
|
engine : mediawiki
|
||||||
|
shortcut : wb
|
||||||
|
categories : general
|
||||||
|
base_url : "https://{language}.wikibooks.org/"
|
||||||
|
number_of_results : 5
|
||||||
|
search_type : text
|
||||||
|
disabled : True
|
||||||
|
|
||||||
|
- name : wikinews
|
||||||
|
engine : mediawiki
|
||||||
|
shortcut : wn
|
||||||
|
categories : news
|
||||||
|
base_url : "https://{language}.wikinews.org/"
|
||||||
|
number_of_results : 5
|
||||||
|
search_type : text
|
||||||
|
disabled : True
|
||||||
|
|
||||||
|
- name : wikiquote
|
||||||
|
engine : mediawiki
|
||||||
|
shortcut : wq
|
||||||
|
categories : general
|
||||||
|
base_url : "https://{language}.wikiquote.org/"
|
||||||
|
number_of_results : 5
|
||||||
|
search_type : text
|
||||||
|
disabled : True
|
||||||
|
|
||||||
|
- name : wikisource
|
||||||
|
engine : mediawiki
|
||||||
|
shortcut : ws
|
||||||
|
categories : general
|
||||||
|
base_url : "https://{language}.wikisource.org/"
|
||||||
|
number_of_results : 5
|
||||||
|
search_type : text
|
||||||
|
disabled : True
|
||||||
|
|
||||||
|
- name : wiktionary
|
||||||
|
engine : mediawiki
|
||||||
|
shortcut : wt
|
||||||
|
categories : general
|
||||||
|
base_url : "https://{language}.wiktionary.org/"
|
||||||
|
number_of_results : 5
|
||||||
|
search_type : text
|
||||||
|
disabled : True
|
||||||
|
|
||||||
|
- name : wikiversity
|
||||||
|
engine : mediawiki
|
||||||
|
shortcut : wv
|
||||||
|
categories : general
|
||||||
|
base_url : "https://{language}.wikiversity.org/"
|
||||||
|
number_of_results : 5
|
||||||
|
search_type : text
|
||||||
|
disabled : True
|
||||||
|
|
||||||
|
- name : wikivoyage
|
||||||
|
engine : mediawiki
|
||||||
|
shortcut : wy
|
||||||
|
categories : general
|
||||||
|
base_url : "https://{language}.wikivoyage.org/"
|
||||||
|
number_of_results : 5
|
||||||
|
search_type : text
|
||||||
|
disabled : True
|
||||||
|
|
||||||
- name : wolframalpha
|
- name : wolframalpha
|
||||||
shortcut : wa
|
shortcut : wa
|
||||||
# You can use the engine using the official stable API, but you need an API key
|
# You can use the engine using the official stable API, but you need an API key
|
||||||
|
@ -763,6 +821,20 @@ engines:
|
||||||
engine : seedpeer
|
engine : seedpeer
|
||||||
categories: files, music, videos
|
categories: files, music, videos
|
||||||
|
|
||||||
|
- name : rubygems
|
||||||
|
shortcut: rbg
|
||||||
|
engine: xpath
|
||||||
|
paging : True
|
||||||
|
search_url : https://rubygems.org/search?page={pageno}&query={query}
|
||||||
|
results_xpath: /html/body/main/div/a[@class="gems__gem"]
|
||||||
|
url_xpath : ./@href
|
||||||
|
title_xpath : ./span/h2
|
||||||
|
content_xpath : ./span/p
|
||||||
|
suggestion_xpath : /html/body/main/div/div[@class="search__suggestions"]/p/a
|
||||||
|
first_page_num : 1
|
||||||
|
categories: it
|
||||||
|
disabled : True
|
||||||
|
|
||||||
# - name : yacy
|
# - name : yacy
|
||||||
# engine : yacy
|
# engine : yacy
|
||||||
# shortcut : ya
|
# shortcut : ya
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
$(document).ready(function() {
|
function hasScrollbar() {
|
||||||
var win = $(window);
|
var root = document.compatMode=='BackCompat'? document.body : document.documentElement;
|
||||||
win.scroll(function() {
|
return root.scrollHeight>root.clientHeight;
|
||||||
if ($(document).height() - win.height() - win.scrollTop() < 150) {
|
}
|
||||||
|
|
||||||
|
function loadNextPage() {
|
||||||
var formData = $('#pagination form:last').serialize();
|
var formData = $('#pagination form:last').serialize();
|
||||||
if (formData) {
|
if (formData) {
|
||||||
$('#pagination').html('<div class="loading-spinner"></div>');
|
$('#pagination').html('<div class="loading-spinner"></div>');
|
||||||
|
@ -16,9 +18,23 @@ $(document).ready(function() {
|
||||||
$('#main_results').append('<hr/>');
|
$('#main_results').append('<hr/>');
|
||||||
$('#main_results').append(body.find('.result'));
|
$('#main_results').append(body.find('.result'));
|
||||||
$('#main_results').append(body.find('#pagination'));
|
$('#main_results').append(body.find('#pagination'));
|
||||||
|
if(!hasScrollbar()) {
|
||||||
|
loadNextPage();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
$(document).ready(function() {
|
||||||
|
var win = $(window);
|
||||||
|
if(!hasScrollbar()) {
|
||||||
|
loadNextPage();
|
||||||
|
}
|
||||||
|
win.scroll(function() {
|
||||||
|
$("#pagination button").css("visibility", "hidden");
|
||||||
|
if ($(document).height() - win.height() - win.scrollTop() < 150) {
|
||||||
|
loadNextPage();
|
||||||
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
node_modules/
|
/node_modules
|
||||||
|
|
|
@ -13,7 +13,7 @@ module.exports = function(grunt) {
|
||||||
},
|
},
|
||||||
uglify: {
|
uglify: {
|
||||||
options: {
|
options: {
|
||||||
banner: '/*! oscar/searx.min.js | <%= grunt.template.today("dd-mm-yyyy") %> | https://github.com/asciimoo/searx */\n'
|
banner: '/*! oscar/searx.min.js | <%= grunt.template.today("dd-mm-yyyy") %> | <%= process.env.GIT_URL %> */\n'
|
||||||
},
|
},
|
||||||
dist: {
|
dist: {
|
||||||
files: {
|
files: {
|
||||||
|
@ -38,7 +38,6 @@ module.exports = function(grunt) {
|
||||||
development: {
|
development: {
|
||||||
options: {
|
options: {
|
||||||
paths: ["less/pointhi", "less/logicodev", "less/logicodev-dark"]
|
paths: ["less/pointhi", "less/logicodev", "less/logicodev-dark"]
|
||||||
//banner: '/*! less/oscar/oscar.css | <%= grunt.template.today("dd-mm-yyyy") %> | https://github.com/asciimoo/searx */\n'
|
|
||||||
},
|
},
|
||||||
files: {"css/pointhi.css": "less/pointhi/oscar.less",
|
files: {"css/pointhi.css": "less/pointhi/oscar.less",
|
||||||
"css/logicodev.css": "less/logicodev-dark/oscar.less",
|
"css/logicodev.css": "less/logicodev-dark/oscar.less",
|
||||||
|
@ -47,7 +46,6 @@ module.exports = function(grunt) {
|
||||||
production: {
|
production: {
|
||||||
options: {
|
options: {
|
||||||
paths: ["less/pointhi", "less/logicodev", "less/logicodev-dark"],
|
paths: ["less/pointhi", "less/logicodev", "less/logicodev-dark"],
|
||||||
//banner: '/*! less/oscar/oscar.css | <%= grunt.template.today("dd-mm-yyyy") %> | https://github.com/asciimoo/searx */\n',
|
|
||||||
cleancss: true
|
cleancss: true
|
||||||
},
|
},
|
||||||
files: {"css/pointhi.min.css": "less/pointhi/oscar.less",
|
files: {"css/pointhi.min.css": "less/pointhi/oscar.less",
|
||||||
|
|
|
@ -86,6 +86,9 @@ $(document).ready(function(){
|
||||||
},
|
},
|
||||||
source: searx.searchResults.ttAdapter()
|
source: searx.searchResults.ttAdapter()
|
||||||
});
|
});
|
||||||
|
$('#q').bind('typeahead:selected', function(ev, suggestion) {
|
||||||
|
$("#search_form").submit();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
;/**
|
;/**
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
/*! oscar/searx.min.js | 06-08-2019 | https://github.com/asciimoo/searx */
|
/*! oscar/searx.min.js | 23-03-2020 | https://github.com/asciimoo/searx */
|
||||||
requirejs.config({baseUrl:"./static/themes/oscar/js",paths:{app:"../app"}}),window.searx=function(a){"use strict";var b=a.currentScript||function(){var b=a.getElementsByTagName("script");return b[b.length-1]}();return{autocompleter:"true"===b.getAttribute("data-autocompleter"),method:b.getAttribute("data-method")}}(document),searx.autocompleter&&(searx.searchResults=new Bloodhound({datumTokenizer:Bloodhound.tokenizers.obj.whitespace("value"),queryTokenizer:Bloodhound.tokenizers.whitespace,remote:"./autocompleter?q=%QUERY"}),searx.searchResults.initialize()),$(document).ready(function(){searx.autocompleter&&$("#q").typeahead(null,{name:"search-results",displayKey:function(a){return a},source:searx.searchResults.ttAdapter()})}),$(document).ready(function(){$("#q.autofocus").focus(),$(".select-all-on-click").click(function(){$(this).select()}),$(".btn-collapse").click(function(){var a=$(this).data("btn-text-collapsed"),b=$(this).data("btn-text-not-collapsed");""!==a&&""!==b&&($(this).hasClass("collapsed")?new_html=$(this).html().replace(a,b):new_html=$(this).html().replace(b,a),$(this).html(new_html))}),$(".btn-toggle .btn").click(function(){var a="btn-"+$(this).data("btn-class"),b=$(this).data("btn-label-default"),c=$(this).data("btn-label-toggled");""!==c&&($(this).hasClass("btn-default")?new_html=$(this).html().replace(b,c):new_html=$(this).html().replace(c,b),$(this).html(new_html)),$(this).toggleClass(a),$(this).toggleClass("btn-default")}),$(".media-loader").click(function(){var a=$(this).data("target"),b=$(a+" > iframe"),c=b.attr("src");void 0!==c&&!1!==c||b.attr("src",b.data("src"))}),$(".btn-sm").dblclick(function(){var a="btn-"+$(this).data("btn-class");$(this).hasClass("btn-default")?($(".btn-sm > input").attr("checked","checked"),$(".btn-sm > input").prop("checked",!0),$(".btn-sm").addClass(a),$(".btn-sm").addClass("active"),$(".btn-sm").removeClass("btn-default")):($(".btn-sm > input").attr("checked",""),$(".btn-sm > input").removeAttr("checked"),$(".btn-sm > input").checked=!1,$(".btn-sm").removeClass(a),$(".btn-sm").removeClass("active"),$(".btn-sm").addClass("btn-default"))})}),$(document).ready(function(){$(".searx_overpass_request").on("click",function(a){var b="https://overpass-api.de/api/interpreter?data=",c=b+"[out:json][timeout:25];(",d=");out meta;",e=$(this).data("osm-id"),f=$(this).data("osm-type"),g=$(this).data("result-table"),h="#"+$(this).data("result-table-loadicon"),i=["addr:city","addr:country","addr:housenumber","addr:postcode","addr:street"];if(e&&f&&g){g="#"+g;var j=null;switch(f){case"node":j=c+"node("+e+");"+d;break;case"way":j=c+"way("+e+");"+d;break;case"relation":j=c+"relation("+e+");"+d}if(j){$.ajax(j).done(function(a){if(a&&a.elements&&a.elements[0]){var b=a.elements[0],c=$(g).html();for(var d in b.tags)if(null===b.tags.name||-1==i.indexOf(d)){switch(c+="<tr><td>"+d+"</td><td>",d){case"phone":case"fax":c+='<a href="tel:'+b.tags[d].replace(/ /g,"")+'">'+b.tags[d]+"</a>";break;case"email":c+='<a href="mailto:'+b.tags[d]+'">'+b.tags[d]+"</a>";break;case"website":case"url":c+='<a href="'+b.tags[d]+'">'+b.tags[d]+"</a>";break;case"wikidata":c+='<a href="https://www.wikidata.org/wiki/'+b.tags[d]+'">'+b.tags[d]+"</a>";break;case"wikipedia":if(-1!=b.tags[d].indexOf(":")){c+='<a href="https://'+b.tags[d].substring(0,b.tags[d].indexOf(":"))+".wikipedia.org/wiki/"+b.tags[d].substring(b.tags[d].indexOf(":")+1)+'">'+b.tags[d]+"</a>";break}default:c+=b.tags[d]}c+="</td></tr>"}$(g).html(c),$(g).removeClass("hidden"),$(h).addClass("hidden")}}).fail(function(){$(h).html($(h).html()+'<p class="text-muted">could not load data!</p>')})}}$(this).off(a)}),$(".searx_init_map").on("click",function(a){var b=$(this).data("leaflet-target"),c=$(this).data("map-lon"),d=$(this).data("map-lat"),e=$(this).data("map-zoom"),f=$(this).data("map-boundingbox"),g=$(this).data("map-geojson");require(["leaflet-0.7.3.min"],function(a){f&&(southWest=L.latLng(f[0],f[2]),northEast=L.latLng(f[1],f[3]),map_bounds=L.latLngBounds(southWest,northEast)),L.Icon.Default.imagePath="./static/themes/oscar/img/map";var h=L.map(b),i="https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png",j='Map data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors',k=new L.TileLayer(i,{minZoom:1,maxZoom:19,attribution:j}),l="https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png",m='Wikimedia maps beta | Maps data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors';new L.TileLayer(l,{minZoom:1,maxZoom:19,attribution:m});map_bounds?setTimeout(function(){h.fitBounds(map_bounds,{maxZoom:17})},0):c&&d&&(e?h.setView(new L.LatLng(d,c),e):h.setView(new L.LatLng(d,c),8)),h.addLayer(k);var n={"OSM Mapnik":k};L.control.layers(n).addTo(h),g&&L.geoJson(g).addTo(h)}),$(this).off(a)})});
|
requirejs.config({baseUrl:"./static/themes/oscar/js",paths:{app:"../app"}}),window.searx=function(a){"use strict";var b=a.currentScript||function(){var b=a.getElementsByTagName("script");return b[b.length-1]}();return{autocompleter:"true"===b.getAttribute("data-autocompleter"),method:b.getAttribute("data-method")}}(document),searx.autocompleter&&(searx.searchResults=new Bloodhound({datumTokenizer:Bloodhound.tokenizers.obj.whitespace("value"),queryTokenizer:Bloodhound.tokenizers.whitespace,remote:"./autocompleter?q=%QUERY"}),searx.searchResults.initialize()),$(document).ready(function(){searx.autocompleter&&($("#q").typeahead(null,{name:"search-results",displayKey:function(a){return a},source:searx.searchResults.ttAdapter()}),$("#q").bind("typeahead:selected",function(a,b){$("#search_form").submit()}))}),$(document).ready(function(){$("#q.autofocus").focus(),$(".select-all-on-click").click(function(){$(this).select()}),$(".btn-collapse").click(function(){var a=$(this).data("btn-text-collapsed"),b=$(this).data("btn-text-not-collapsed");""!==a&&""!==b&&($(this).hasClass("collapsed")?new_html=$(this).html().replace(a,b):new_html=$(this).html().replace(b,a),$(this).html(new_html))}),$(".btn-toggle .btn").click(function(){var a="btn-"+$(this).data("btn-class"),b=$(this).data("btn-label-default"),c=$(this).data("btn-label-toggled");""!==c&&($(this).hasClass("btn-default")?new_html=$(this).html().replace(b,c):new_html=$(this).html().replace(c,b),$(this).html(new_html)),$(this).toggleClass(a),$(this).toggleClass("btn-default")}),$(".media-loader").click(function(){var a=$(this).data("target"),b=$(a+" > iframe"),c=b.attr("src");void 0!==c&&c!==!1||b.attr("src",b.data("src"))}),$(".btn-sm").dblclick(function(){var a="btn-"+$(this).data("btn-class");$(this).hasClass("btn-default")?($(".btn-sm > input").attr("checked","checked"),$(".btn-sm > input").prop("checked",!0),$(".btn-sm").addClass(a),$(".btn-sm").addClass("active"),$(".btn-sm").removeClass("btn-default")):($(".btn-sm > input").attr("checked",""),$(".btn-sm > input").removeAttr("checked"),$(".btn-sm > input").checked=!1,$(".btn-sm").removeClass(a),$(".btn-sm").removeClass("active"),$(".btn-sm").addClass("btn-default"))})}),$(document).ready(function(){$(".searx_overpass_request").on("click",function(a){var b="https://overpass-api.de/api/interpreter?data=",c=b+"[out:json][timeout:25];(",d=");out meta;",e=$(this).data("osm-id"),f=$(this).data("osm-type"),g=$(this).data("result-table"),h="#"+$(this).data("result-table-loadicon"),i=["addr:city","addr:country","addr:housenumber","addr:postcode","addr:street"];if(e&&f&&g){g="#"+g;var j=null;switch(f){case"node":j=c+"node("+e+");"+d;break;case"way":j=c+"way("+e+");"+d;break;case"relation":j=c+"relation("+e+");"+d}if(j){$.ajax(j).done(function(a){if(a&&a.elements&&a.elements[0]){var b=a.elements[0],c=$(g).html();for(var d in b.tags)if(null===b.tags.name||i.indexOf(d)==-1){switch(c+="<tr><td>"+d+"</td><td>",d){case"phone":case"fax":c+='<a href="tel:'+b.tags[d].replace(/ /g,"")+'">'+b.tags[d]+"</a>";break;case"email":c+='<a href="mailto:'+b.tags[d]+'">'+b.tags[d]+"</a>";break;case"website":case"url":c+='<a href="'+b.tags[d]+'">'+b.tags[d]+"</a>";break;case"wikidata":c+='<a href="https://www.wikidata.org/wiki/'+b.tags[d]+'">'+b.tags[d]+"</a>";break;case"wikipedia":if(b.tags[d].indexOf(":")!=-1){c+='<a href="https://'+b.tags[d].substring(0,b.tags[d].indexOf(":"))+".wikipedia.org/wiki/"+b.tags[d].substring(b.tags[d].indexOf(":")+1)+'">'+b.tags[d]+"</a>";break}default:c+=b.tags[d]}c+="</td></tr>"}$(g).html(c),$(g).removeClass("hidden"),$(h).addClass("hidden")}}).fail(function(){$(h).html($(h).html()+'<p class="text-muted">could not load data!</p>')})}}$(this).off(a)}),$(".searx_init_map").on("click",function(a){var b=$(this).data("leaflet-target"),c=$(this).data("map-lon"),d=$(this).data("map-lat"),e=$(this).data("map-zoom"),f=$(this).data("map-boundingbox"),g=$(this).data("map-geojson");require(["leaflet-0.7.3.min"],function(a){f&&(southWest=L.latLng(f[0],f[2]),northEast=L.latLng(f[1],f[3]),map_bounds=L.latLngBounds(southWest,northEast)),L.Icon.Default.imagePath="./static/themes/oscar/img/map";var h=L.map(b),i="https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png",j='Map data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors',k=new L.TileLayer(i,{minZoom:1,maxZoom:19,attribution:j}),l="https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png",m='Wikimedia maps beta | Maps data © <a href="https://openstreetmap.org">OpenStreetMap</a> contributors';new L.TileLayer(l,{minZoom:1,maxZoom:19,attribution:m});map_bounds?setTimeout(function(){h.fitBounds(map_bounds,{maxZoom:17})},0):c&&d&&(e?h.setView(new L.LatLng(d,c),e):h.setView(new L.LatLng(d,c),8)),h.addLayer(k);var n={"OSM Mapnik":k};L.control.layers(n).addTo(h),g&&L.geoJson(g).addTo(h)}),$(this).off(a)})});
|
|
@ -33,5 +33,8 @@ $(document).ready(function(){
|
||||||
},
|
},
|
||||||
source: searx.searchResults.ttAdapter()
|
source: searx.searchResults.ttAdapter()
|
||||||
});
|
});
|
||||||
|
$('#q').bind('typeahead:selected', function(ev, suggestion) {
|
||||||
|
$("#search_form").submit();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
/node_modules
|
|
@ -36,7 +36,7 @@ module.exports = function(grunt) {
|
||||||
},
|
},
|
||||||
uglify: {
|
uglify: {
|
||||||
options: {
|
options: {
|
||||||
banner: '/*! simple/searx.min.js | <%= grunt.template.today("dd-mm-yyyy") %> | https://github.com/asciimoo/searx */\n',
|
banner: '/*! simple/searx.min.js | <%= grunt.template.today("dd-mm-yyyy") %> | <%= process.env.GIT_URL %> */\n',
|
||||||
output: {
|
output: {
|
||||||
comments: 'some'
|
comments: 'some'
|
||||||
},
|
},
|
||||||
|
@ -57,7 +57,7 @@ module.exports = function(grunt) {
|
||||||
development: {
|
development: {
|
||||||
options: {
|
options: {
|
||||||
paths: ["less"],
|
paths: ["less"],
|
||||||
banner: '/*! searx | <%= grunt.template.today("dd-mm-yyyy") %> | https://github.com/asciimoo/searx */\n'
|
banner: '/*! searx | <%= grunt.template.today("dd-mm-yyyy") %> | <%= process.env.GIT_URL %> */\n'
|
||||||
},
|
},
|
||||||
files: {
|
files: {
|
||||||
"css/searx.css": "less/style.less",
|
"css/searx.css": "less/style.less",
|
||||||
|
@ -73,7 +73,7 @@ module.exports = function(grunt) {
|
||||||
compatibility: '*'
|
compatibility: '*'
|
||||||
})
|
})
|
||||||
],
|
],
|
||||||
banner: '/*! searx | <%= grunt.template.today("dd-mm-yyyy") %> | https://github.com/asciimoo/searx */\n'
|
banner: '/*! searx | <%= grunt.template.today("dd-mm-yyyy") %> | <%= process.env.GIT_URL %> */\n'
|
||||||
},
|
},
|
||||||
files: {
|
files: {
|
||||||
"css/searx.min.css": "less/style.less",
|
"css/searx.min.css": "less/style.less",
|
||||||
|
|
|
@ -1,63 +1,97 @@
|
||||||
<div{% if rtl %} dir="ltr"{% endif %}>
|
<div{% if rtl %} dir="ltr"{% endif %}>
|
||||||
<h1>About <a href="{{ url_for('index') }}">searx</a></h1>
|
<h1>About <a href="{{ url_for('index') }}">searx</a></h1>
|
||||||
|
|
||||||
<p>Searx is a <a href="https://en.wikipedia.org/wiki/Metasearch_engine">metasearch engine</a>, aggregating the results of other <a href="{{ url_for('preferences') }}">search engines</a> while not storing information about its users.
|
<p>
|
||||||
</p>
|
Searx is a <a href="https://en.wikipedia.org/wiki/Metasearch_engine">metasearch engine</a>,
|
||||||
<h2>Why use searx?</h2>
|
aggregating the results of other <a href="{{ url_for('preferences') }}">search engines</a>
|
||||||
<ul>
|
while not storing information about its users.
|
||||||
<li>searx may not offer you as personalised results as Google, but it doesn't generate a profile about you</li>
|
|
||||||
<li>searx doesn't care about what you search for, never shares anything with a third party, and it can't be used to compromise you</li>
|
|
||||||
<li>searx is free software, the code is 100% open and you can help to make it better. See more on <a href="https://github.com/asciimoo/searx">github</a></li>
|
|
||||||
</ul>
|
|
||||||
<p>If you do care about privacy, want to be a conscious user, or otherwise believe
|
|
||||||
in digital freedom, make searx your default search engine or run it on your own server</p>
|
|
||||||
|
|
||||||
<h2>Technical details - How does it work?</h2>
|
|
||||||
|
|
||||||
<p>Searx is a <a href="https://en.wikipedia.org/wiki/Metasearch_engine">metasearch engine</a>,
|
|
||||||
inspired by the <a href="https://beniz.github.io/seeks/">seeks project</a>.<br />
|
|
||||||
It provides basic privacy by mixing your queries with searches on other platforms without storing search data. Queries are made using a POST request on every browser (except chrome*). Therefore they show up in neither our logs, nor your url history. In case of Chrome* users there is an exception, searx uses the search bar to perform GET requests.<br />
|
|
||||||
Searx can be added to your browser's search bar; moreover, it can be set as the default search engine.
|
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<h2>How can I make it my own?</h2>
|
<p>More about searx ...</p>
|
||||||
|
|
||||||
<p>Searx appreciates your concern regarding logs, so take the <a href="https://github.com/asciimoo/searx">code</a> and run it yourself! <br />Add your Searx to this <a href="https://searx.space/">list</a> to help other people reclaim their privacy and make the Internet freer!
|
|
||||||
<br />The more decentralized the Internet is, the more freedom we have!</p>
|
|
||||||
|
|
||||||
|
|
||||||
<h2>More about searx</h2>
|
|
||||||
|
|
||||||
<ul>
|
<ul>
|
||||||
<li><a href="https://github.com/asciimoo/searx">github</a></li>
|
<li><a href="https://github.com/asciimoo/searx">github</a></li>
|
||||||
<li><a href="https://www.ohloh.net/p/searx/">ohloh</a></li>
|
|
||||||
<li><a href="https://twitter.com/Searx_engine">twitter</a></li>
|
<li><a href="https://twitter.com/Searx_engine">twitter</a></li>
|
||||||
<li>IRC: #searx @ freenode (<a href="https://kiwiirc.com/client/irc.freenode.com/searx">webclient</a>)</li>
|
<li>IRC: #searx @ freenode (<a href="https://kiwiirc.com/client/irc.freenode.com/searx">webclient</a>)</li>
|
||||||
<li><a href="https://www.transifex.com/projects/p/searx/">transifex</a></li>
|
<li><a href="https://www.transifex.com/projects/p/searx/">transifex</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
|
|
||||||
<hr />
|
<hr />
|
||||||
|
|
||||||
<h2 id="faq">FAQ</h2>
|
<h2>Why use searx?</h2>
|
||||||
|
|
||||||
<h3>How to add to firefox?</h3>
|
|
||||||
<p><a href="#" onclick="window.external.AddSearchProvider(window.location.protocol + '//' + window.location.host + '{{ url_for('opensearch') }}');">Install</a> searx as a search engine on any version of Firefox! (javascript required)</p>
|
|
||||||
|
|
||||||
<h2 id="dev_faq">Developer FAQ</h2>
|
|
||||||
|
|
||||||
<h3>New engines?</h3>
|
|
||||||
<ul>
|
<ul>
|
||||||
<li>Edit your <a href="https://raw.github.com/asciimoo/searx/master/searx/settings.yml">settings.yml</a></li>
|
<li>
|
||||||
<li>Create your custom engine module, check the <a href="https://github.com/asciimoo/searx/blob/master/examples/basic_engine.py">example engine</a></li>
|
Searx may not offer you as personalised results as Google, but it doesn't
|
||||||
|
generate a profile about you.
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
Searx doesn't care about what you search for, never shares anything with a
|
||||||
|
third party, and it can't be used to compromise you.
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
Searx is free software, the code is 100% open and you can help to make it
|
||||||
|
better. See more on <a href="https://github.com/asciimoo/searx">github</a>.
|
||||||
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
<p>Don't forget to restart searx after config edit!</p>
|
|
||||||
|
|
||||||
<h3>Installation/WSGI support?</h3>
|
<p>
|
||||||
<p>See the <a href="https://github.com/asciimoo/searx/wiki/Installation">installation and setup</a> wiki page</p>
|
If you do care about privacy, want to be a conscious user, or otherwise
|
||||||
|
believe in digital freedom, make searx your default search engine or run it
|
||||||
|
on your own server
|
||||||
|
</p>
|
||||||
|
|
||||||
<h3>How to debug engines?</h3>
|
<h2>Technical details - How does it work?</h2>
|
||||||
<p><a href="{{ url_for('stats') }}">Stats page</a> contains some useful data about the engines used.</p>
|
|
||||||
|
<p>
|
||||||
|
Searx is a <a href="https://en.wikipedia.org/wiki/Metasearch_engine">metasearch engine</a>,
|
||||||
|
inspired by the <a href="https://beniz.github.io/seeks/">seeks project</a>.
|
||||||
|
|
||||||
|
It provides basic privacy by mixing your queries with searches on other
|
||||||
|
platforms without storing search data. Queries are made using a POST request
|
||||||
|
on every browser (except chrome*). Therefore they show up in neither our
|
||||||
|
logs, nor your url history. In case of Chrome* users there is an exception,
|
||||||
|
searx uses the search bar to perform GET requests.
|
||||||
|
|
||||||
|
Searx can be added to your browser's search bar; moreover, it can be set as
|
||||||
|
the default search engine.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<h2 id='add to browser'>How to set as the default search engine?</h2>
|
||||||
|
|
||||||
|
<dt>Firefox</dt>
|
||||||
|
|
||||||
|
<dd>
|
||||||
|
<a href="#" onclick="window.external.AddSearchProvider(window.location.protocol + '//' + window.location.host + '{{ url_for('opensearch') }}');">Install</a>
|
||||||
|
searx as a search engine on any version of Firefox! (javascript required)
|
||||||
|
</dd>
|
||||||
|
|
||||||
|
<h2>Where to find anonymous usage statistics of this instance ?</h2>
|
||||||
|
|
||||||
|
<p>
|
||||||
|
<a href="{{ url_for('stats') }}">Stats page</a> contains some useful data about the engines used.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<h2>How can I make it my own?</h2>
|
||||||
|
|
||||||
|
<p>
|
||||||
|
Searx appreciates your concern regarding logs, so take the
|
||||||
|
code from the <a href="https://github.com/asciimoo/searx">original searx project</a> and
|
||||||
|
run it yourself!
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
Add your searx instance to this <a href="{{ brand.PUBLIC_INSTANCES }}"> list
|
||||||
|
of public searx instances</a> to help other people reclaim their privacy and
|
||||||
|
make the Internet freer! The more decentralized the Internet is, the more
|
||||||
|
freedom we have!
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<h2>Where are the docs & code of this instance?</h2>
|
||||||
|
|
||||||
|
<p>
|
||||||
|
See the <a href="{{ brand.DOCS_URL }}">{{ brand.DOCS_URL }}</a>
|
||||||
|
and <a href="{{ brand.GIT_URL }}">{{ brand.GIT_URL }}</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
{% include "__common__/aboutextend.html" ignore missing %}
|
{% include "__common__/aboutextend.html" ignore missing %}
|
||||||
|
|
|
@ -25,5 +25,29 @@
|
||||||
{% if r.pubdate %}<pubDate>{{ r.pubdate }}</pubDate>{% endif %}
|
{% if r.pubdate %}<pubDate>{{ r.pubdate }}</pubDate>{% endif %}
|
||||||
</item>
|
</item>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
{% if answers %}
|
||||||
|
{% for a in answers %}
|
||||||
|
<item>
|
||||||
|
<title>{{ a }}</title>
|
||||||
|
<type>answer</type>
|
||||||
|
</item>
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{% if corrections %}
|
||||||
|
{% for a in corrections %}
|
||||||
|
<item>
|
||||||
|
<title>{{ a }}</title>
|
||||||
|
<type>correction</type>
|
||||||
|
</item>
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{% if suggestions %}
|
||||||
|
{% for a in suggestions %}
|
||||||
|
<item>
|
||||||
|
<title>{{ a }}</title>
|
||||||
|
<type>suggestion</type>
|
||||||
|
</item>
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
</channel>
|
</channel>
|
||||||
</rss>
|
</rss>
|
||||||
|
|
|
@ -85,10 +85,10 @@
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
<p class="text-muted">
|
<p class="text-muted">
|
||||||
<small>
|
<small>
|
||||||
{{ _('Powered by') }} <a href="https://asciimoo.github.io/searx/">searx</a> - {{ searx_version }} - {{ _('a privacy-respecting, hackable metasearch engine') }}<br/>
|
{{ _('Powered by') }} <a href="{{ brand.DOCS_URL }}">searx</a> - {{ searx_version }} - {{ _('a privacy-respecting, hackable metasearch engine') }}<br/>
|
||||||
<a href="https://github.com/asciimoo/searx">{{ _('Source code') }}</a> |
|
<a href="{{ brand.GIT_URL }}">{{ _('Source code') }}</a> |
|
||||||
<a href="https://github.com/asciimoo/searx/issues">{{ _('Issue tracker') }}</a> |
|
<a href="{{ brand.ISSUE_URL }}">{{ _('Issue tracker') }}</a> |
|
||||||
<a href="https://searx.space/">{{ _('Public instances') }}</a>
|
<a href="{{ brand.PUBLIC_INSTANCES }}">{{ _('Public instances') }}</a>
|
||||||
</small>
|
</small>
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
<input type="search" name="q" class="form-control" id="q" placeholder="{{ _('Search for...') }}" aria-label="{{ _('Search for...') }}" autocomplete="off" value="{{ q }}" accesskey="s">
|
<input type="search" name="q" class="form-control" id="q" placeholder="{{ _('Search for...') }}" aria-label="{{ _('Search for...') }}" autocomplete="off" value="{{ q }}" accesskey="s">
|
||||||
<span class="input-group-btn">
|
<span class="input-group-btn">
|
||||||
<button type="submit" class="btn btn-default" aria-label="{{ _('Start search') }}"><span class="hide_if_nojs">{{ icon('search') }}</span><span class="hidden active_if_nojs">{{ _('Start search') }}</span></button>
|
<button type="submit" class="btn btn-default" aria-label="{{ _('Start search') }}"><span class="hide_if_nojs">{{ icon('search') }}</span><span class="hidden active_if_nojs">{{ _('Start search') }}</span></button>
|
||||||
|
<button type="reset" class="btn btn-default" aria-label="{{ _('Clear search') }}"><span class="hide_if_nojs">{{ icon('remove') }}</span><span class="hidden active_if_nojs">{{ _('Clear') }}</span></button>
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
<input type="search" name="q" class="form-control input-lg autofocus" id="q" placeholder="{{ _('Search for...') }}" aria-label="{{ _('Search for...') }}" autocomplete="off" value="{{ q }}" accesskey="s">
|
<input type="search" name="q" class="form-control input-lg autofocus" id="q" placeholder="{{ _('Search for...') }}" aria-label="{{ _('Search for...') }}" autocomplete="off" value="{{ q }}" accesskey="s">
|
||||||
<span class="input-group-btn">
|
<span class="input-group-btn">
|
||||||
<button type="submit" class="btn btn-default input-lg" aria-label="{{ _('Start search') }}"><span class="hide_if_nojs">{{ icon('search') }}</span><span class="hidden active_if_nojs">{{ _('Start search') }}</span></button>
|
<button type="submit" class="btn btn-default input-lg" aria-label="{{ _('Start search') }}"><span class="hide_if_nojs">{{ icon('search') }}</span><span class="hidden active_if_nojs">{{ _('Start search') }}</span></button>
|
||||||
|
<button type="reset" class="btn btn-default input-lg" aria-label="{{ _('Clear search') }}"><span class="hide_if_nojs">{{ icon('remove') }}</span><span class="hidden active_if_nojs">{{ _('Clear') }}</span></button>
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
<div class="col-md-8 col-md-offset-2 advanced">
|
<div class="col-md-8 col-md-offset-2 advanced">
|
||||||
|
|
|
@ -51,9 +51,9 @@
|
||||||
<footer>
|
<footer>
|
||||||
<p>
|
<p>
|
||||||
{{ _('Powered by') }} <a href="{{ url_for('about') }}">searx</a> - {{ searx_version }} - {{ _('a privacy-respecting, hackable metasearch engine') }}<br/>
|
{{ _('Powered by') }} <a href="{{ url_for('about') }}">searx</a> - {{ searx_version }} - {{ _('a privacy-respecting, hackable metasearch engine') }}<br/>
|
||||||
<a href="https://github.com/asciimoo/searx">{{ _('Source code') }}</a> |
|
<a href="{{ brand.GIT_URL }}">{{ _('Source code') }}</a> |
|
||||||
<a href="https://github.com/asciimoo/searx/issues">{{ _('Issue tracker') }}</a> |
|
<a href="{{ brand.ISSUE_URL }}">{{ _('Issue tracker') }}</a> |
|
||||||
<a href="https://searx.space/">{{ _('Public instances') }}</a>
|
<a href="{{ brand.PUBLIC_INSTANCES }}">{{ _('Public instances') }}</a>
|
||||||
</p>
|
</p>
|
||||||
</footer>
|
</footer>
|
||||||
<!--[if gte IE 9]>-->
|
<!--[if gte IE 9]>-->
|
||||||
|
|
|
@ -56,7 +56,9 @@ from flask import (
|
||||||
from babel.support import Translations
|
from babel.support import Translations
|
||||||
import flask_babel
|
import flask_babel
|
||||||
from flask_babel import Babel, gettext, format_date, format_decimal
|
from flask_babel import Babel, gettext, format_date, format_decimal
|
||||||
|
from flask.ctx import has_request_context
|
||||||
from flask.json import jsonify
|
from flask.json import jsonify
|
||||||
|
from searx import brand
|
||||||
from searx import settings, searx_dir, searx_debug
|
from searx import settings, searx_dir, searx_debug
|
||||||
from searx.exceptions import SearxParameterException
|
from searx.exceptions import SearxParameterException
|
||||||
from searx.engines import (
|
from searx.engines import (
|
||||||
|
@ -164,13 +166,11 @@ _flask_babel_get_translations = flask_babel.get_translations
|
||||||
|
|
||||||
# monkey patch for flask_babel.get_translations
|
# monkey patch for flask_babel.get_translations
|
||||||
def _get_translations():
|
def _get_translations():
|
||||||
translation_locale = request.form.get('use-translation')
|
if has_request_context() and request.form.get('use-translation') == 'oc':
|
||||||
if translation_locale:
|
|
||||||
babel_ext = flask_babel.current_app.extensions['babel']
|
babel_ext = flask_babel.current_app.extensions['babel']
|
||||||
translation = Translations.load(next(babel_ext.translation_directories), 'oc')
|
return Translations.load(next(babel_ext.translation_directories), 'oc')
|
||||||
else:
|
|
||||||
translation = _flask_babel_get_translations()
|
return _flask_babel_get_translations()
|
||||||
return translation
|
|
||||||
|
|
||||||
|
|
||||||
flask_babel.get_translations = _get_translations
|
flask_babel.get_translations = _get_translations
|
||||||
|
@ -178,9 +178,12 @@ flask_babel.get_translations = _get_translations
|
||||||
|
|
||||||
def _get_browser_language(request, lang_list):
|
def _get_browser_language(request, lang_list):
|
||||||
for lang in request.headers.get("Accept-Language", "en").split(","):
|
for lang in request.headers.get("Accept-Language", "en").split(","):
|
||||||
|
if ';' in lang:
|
||||||
|
lang = lang.split(';')[0]
|
||||||
locale = match_language(lang, lang_list, fallback=None)
|
locale = match_language(lang, lang_list, fallback=None)
|
||||||
if locale is not None:
|
if locale is not None:
|
||||||
return locale
|
return locale
|
||||||
|
return settings['search']['default_lang'] or 'en'
|
||||||
|
|
||||||
|
|
||||||
@babel.localeselector
|
@babel.localeselector
|
||||||
|
@ -424,6 +427,8 @@ def render(template_name, override_theme=None, **kwargs):
|
||||||
|
|
||||||
kwargs['preferences'] = request.preferences
|
kwargs['preferences'] = request.preferences
|
||||||
|
|
||||||
|
kwargs['brand'] = brand
|
||||||
|
|
||||||
kwargs['scripts'] = set()
|
kwargs['scripts'] = set()
|
||||||
for plugin in request.user_plugins:
|
for plugin in request.user_plugins:
|
||||||
for script in plugin.js_dependencies:
|
for script in plugin.js_dependencies:
|
||||||
|
@ -621,25 +626,38 @@ def index():
|
||||||
'corrections': list(result_container.corrections),
|
'corrections': list(result_container.corrections),
|
||||||
'infoboxes': result_container.infoboxes,
|
'infoboxes': result_container.infoboxes,
|
||||||
'suggestions': list(result_container.suggestions),
|
'suggestions': list(result_container.suggestions),
|
||||||
'unresponsive_engines': list(result_container.unresponsive_engines)},
|
'unresponsive_engines': __get_translated_errors(result_container.unresponsive_engines)}, # noqa
|
||||||
default=lambda item: list(item) if isinstance(item, set) else item),
|
default=lambda item: list(item) if isinstance(item, set) else item),
|
||||||
mimetype='application/json')
|
mimetype='application/json')
|
||||||
elif output_format == 'csv':
|
elif output_format == 'csv':
|
||||||
csv = UnicodeWriter(StringIO())
|
csv = UnicodeWriter(StringIO())
|
||||||
keys = ('title', 'url', 'content', 'host', 'engine', 'score')
|
keys = ('title', 'url', 'content', 'host', 'engine', 'score', 'type')
|
||||||
csv.writerow(keys)
|
csv.writerow(keys)
|
||||||
for row in results:
|
for row in results:
|
||||||
row['host'] = row['parsed_url'].netloc
|
row['host'] = row['parsed_url'].netloc
|
||||||
|
row['type'] = 'result'
|
||||||
|
csv.writerow([row.get(key, '') for key in keys])
|
||||||
|
for a in result_container.answers:
|
||||||
|
row = {'title': a, 'type': 'answer'}
|
||||||
|
csv.writerow([row.get(key, '') for key in keys])
|
||||||
|
for a in result_container.suggestions:
|
||||||
|
row = {'title': a, 'type': 'suggestion'}
|
||||||
|
csv.writerow([row.get(key, '') for key in keys])
|
||||||
|
for a in result_container.corrections:
|
||||||
|
row = {'title': a, 'type': 'correction'}
|
||||||
csv.writerow([row.get(key, '') for key in keys])
|
csv.writerow([row.get(key, '') for key in keys])
|
||||||
csv.stream.seek(0)
|
csv.stream.seek(0)
|
||||||
response = Response(csv.stream.read(), mimetype='application/csv')
|
response = Response(csv.stream.read(), mimetype='application/csv')
|
||||||
cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search_query.query)
|
cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search_query.query.decode('utf-8'))
|
||||||
response.headers.add('Content-Disposition', cont_disp)
|
response.headers.add('Content-Disposition', cont_disp)
|
||||||
return response
|
return response
|
||||||
elif output_format == 'rss':
|
elif output_format == 'rss':
|
||||||
response_rss = render(
|
response_rss = render(
|
||||||
'opensearch_response_rss.xml',
|
'opensearch_response_rss.xml',
|
||||||
results=results,
|
results=results,
|
||||||
|
answers=result_container.answers,
|
||||||
|
corrections=result_container.corrections,
|
||||||
|
suggestions=result_container.suggestions,
|
||||||
q=request.form['q'],
|
q=request.form['q'],
|
||||||
number_of_results=number_of_results,
|
number_of_results=number_of_results,
|
||||||
base_url=get_base_url(),
|
base_url=get_base_url(),
|
||||||
|
@ -676,7 +694,7 @@ def index():
|
||||||
corrections=correction_urls,
|
corrections=correction_urls,
|
||||||
infoboxes=result_container.infoboxes,
|
infoboxes=result_container.infoboxes,
|
||||||
paging=result_container.paging,
|
paging=result_container.paging,
|
||||||
unresponsive_engines=result_container.unresponsive_engines,
|
unresponsive_engines=__get_translated_errors(result_container.unresponsive_engines),
|
||||||
current_language=match_language(search_query.lang,
|
current_language=match_language(search_query.lang,
|
||||||
LANGUAGE_CODES,
|
LANGUAGE_CODES,
|
||||||
fallback=request.preferences.get_value("language")),
|
fallback=request.preferences.get_value("language")),
|
||||||
|
@ -687,6 +705,16 @@ def index():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def __get_translated_errors(unresponsive_engines):
|
||||||
|
translated_errors = []
|
||||||
|
for unresponsive_engine in unresponsive_engines:
|
||||||
|
error_msg = gettext(unresponsive_engine[1])
|
||||||
|
if unresponsive_engine[2]:
|
||||||
|
error_msg = "{} {}".format(error_msg, unresponsive_engine[2])
|
||||||
|
translated_errors.append((unresponsive_engine[0], error_msg))
|
||||||
|
return translated_errors
|
||||||
|
|
||||||
|
|
||||||
@app.route('/about', methods=['GET'])
|
@app.route('/about', methods=['GET'])
|
||||||
def about():
|
def about():
|
||||||
"""Render about page"""
|
"""Render about page"""
|
||||||
|
@ -939,24 +967,37 @@ def clear_cookies():
|
||||||
|
|
||||||
@app.route('/config')
|
@app.route('/config')
|
||||||
def config():
|
def config():
|
||||||
return jsonify({'categories': list(categories.keys()),
|
"""Return configuration in JSON format."""
|
||||||
'engines': [{'name': name,
|
_engines = []
|
||||||
|
for name, engine in engines.items():
|
||||||
|
if not request.preferences.validate_token(engine):
|
||||||
|
continue
|
||||||
|
|
||||||
|
supported_languages = engine.supported_languages
|
||||||
|
if isinstance(engine.supported_languages, dict):
|
||||||
|
supported_languages = list(engine.supported_languages.keys())
|
||||||
|
|
||||||
|
_engines.append({
|
||||||
|
'name': name,
|
||||||
'categories': engine.categories,
|
'categories': engine.categories,
|
||||||
'shortcut': engine.shortcut,
|
'shortcut': engine.shortcut,
|
||||||
'enabled': not engine.disabled,
|
'enabled': not engine.disabled,
|
||||||
'paging': engine.paging,
|
'paging': engine.paging,
|
||||||
'language_support': engine.language_support,
|
'language_support': engine.language_support,
|
||||||
'supported_languages':
|
'supported_languages': supported_languages,
|
||||||
list(engine.supported_languages.keys())
|
|
||||||
if isinstance(engine.supported_languages, dict)
|
|
||||||
else engine.supported_languages,
|
|
||||||
'safesearch': engine.safesearch,
|
'safesearch': engine.safesearch,
|
||||||
'time_range_support': engine.time_range_support,
|
'time_range_support': engine.time_range_support,
|
||||||
'timeout': engine.timeout}
|
'timeout': engine.timeout
|
||||||
for name, engine in engines.items() if request.preferences.validate_token(engine)],
|
})
|
||||||
'plugins': [{'name': plugin.name,
|
|
||||||
'enabled': plugin.default_on}
|
_plugins = []
|
||||||
for plugin in plugins],
|
for _ in plugins:
|
||||||
|
_plugins.append({'name': _.name, 'enabled': _.default_on})
|
||||||
|
|
||||||
|
return jsonify({
|
||||||
|
'categories': list(categories.keys()),
|
||||||
|
'engines': _engines,
|
||||||
|
'plugins': _plugins,
|
||||||
'instance_name': settings['general']['instance_name'],
|
'instance_name': settings['general']['instance_name'],
|
||||||
'locales': settings['locales'],
|
'locales': settings['locales'],
|
||||||
'default_locale': settings['ui']['default_locale'],
|
'default_locale': settings['ui']['default_locale'],
|
||||||
|
@ -964,6 +1005,10 @@ def config():
|
||||||
'safe_search': settings['search']['safe_search'],
|
'safe_search': settings['search']['safe_search'],
|
||||||
'default_theme': settings['ui']['default_theme'],
|
'default_theme': settings['ui']['default_theme'],
|
||||||
'version': VERSION_STRING,
|
'version': VERSION_STRING,
|
||||||
|
'brand': {
|
||||||
|
'GIT_URL': brand.GIT_URL,
|
||||||
|
'DOCS_URL': brand.DOCS_URL
|
||||||
|
},
|
||||||
'doi_resolvers': [r for r in settings['doi_resolvers']],
|
'doi_resolvers': [r for r in settings['doi_resolvers']],
|
||||||
'default_doi_resolver': settings['default_doi_resolver'],
|
'default_doi_resolver': settings['default_doi_resolver'],
|
||||||
})
|
})
|
||||||
|
|
7
setup.py
7
setup.py
|
@ -10,6 +10,7 @@ import sys
|
||||||
# required to load VERSION_STRING constant
|
# required to load VERSION_STRING constant
|
||||||
sys.path.insert(0, './searx')
|
sys.path.insert(0, './searx')
|
||||||
from version import VERSION_STRING
|
from version import VERSION_STRING
|
||||||
|
import brand
|
||||||
|
|
||||||
with open('README.rst') as f:
|
with open('README.rst') as f:
|
||||||
long_description = f.read()
|
long_description = f.read()
|
||||||
|
@ -25,6 +26,11 @@ setup(
|
||||||
version=VERSION_STRING,
|
version=VERSION_STRING,
|
||||||
description="A privacy-respecting, hackable metasearch engine",
|
description="A privacy-respecting, hackable metasearch engine",
|
||||||
long_description=long_description,
|
long_description=long_description,
|
||||||
|
url=brand.DOCS_URL,
|
||||||
|
project_urls={
|
||||||
|
"Code": brand.GIT_URL,
|
||||||
|
"Issue tracker": brand.ISSUE_URL
|
||||||
|
},
|
||||||
classifiers=[
|
classifiers=[
|
||||||
"Development Status :: 4 - Beta",
|
"Development Status :: 4 - Beta",
|
||||||
"Programming Language :: Python",
|
"Programming Language :: Python",
|
||||||
|
@ -36,7 +42,6 @@ setup(
|
||||||
keywords='metasearch searchengine search web http',
|
keywords='metasearch searchengine search web http',
|
||||||
author='Adam Tauber',
|
author='Adam Tauber',
|
||||||
author_email='asciimoo@gmail.com',
|
author_email='asciimoo@gmail.com',
|
||||||
url='https://github.com/asciimoo/searx',
|
|
||||||
license='GNU Affero General Public License',
|
license='GNU Affero General Public License',
|
||||||
packages=find_packages(exclude=["tests*"]),
|
packages=find_packages(exclude=["tests*"]),
|
||||||
zip_safe=False,
|
zip_safe=False,
|
||||||
|
|
|
@ -99,9 +99,9 @@ class ViewsTestCase(SearxTestCase):
|
||||||
result = self.app.post('/', data={'q': 'test', 'format': 'csv'})
|
result = self.app.post('/', data={'q': 'test', 'format': 'csv'})
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
b'title,url,content,host,engine,score\r\n'
|
b'title,url,content,host,engine,score,type\r\n'
|
||||||
b'First Test,http://first.test.xyz,first test content,first.test.xyz,startpage,\r\n' # noqa
|
b'First Test,http://first.test.xyz,first test content,first.test.xyz,startpage,,result\r\n' # noqa
|
||||||
b'Second Test,http://second.test.xyz,second test content,second.test.xyz,youtube,\r\n', # noqa
|
b'Second Test,http://second.test.xyz,second test content,second.test.xyz,youtube,,result\r\n', # noqa
|
||||||
result.data
|
result.data
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
export GIT_URL='https://github.com/asciimoo/searx'
|
||||||
|
export ISSUE_URL='https://github.com/asciimoo/searx/issues'
|
||||||
|
export SEARX_URL='https://searx.me'
|
||||||
|
export DOCS_URL='https://asciimoo.github.io/searx'
|
||||||
|
export PUBLIC_INSTANCES='https://searx.space'
|
|
@ -24,7 +24,7 @@ NORMAL_REGEX = re.compile('^[0-9]+\.[0-9](\.[0-9])?$')
|
||||||
#
|
#
|
||||||
useragents = {
|
useragents = {
|
||||||
"versions": (),
|
"versions": (),
|
||||||
"os": ('Windows NT 10; WOW64',
|
"os": ('Windows NT 10.0; WOW64',
|
||||||
'X11; Linux x86_64'),
|
'X11; Linux x86_64'),
|
||||||
"ua": "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}"
|
"ua": "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}"
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
# Output files (engines_languages.json and languages.py)
|
# Output files (engines_languages.json and languages.py)
|
||||||
# are written in current directory to avoid overwriting in case something goes wrong.
|
# are written in current directory to avoid overwriting in case something goes wrong.
|
||||||
|
|
||||||
from json import dump
|
import json
|
||||||
import io
|
import io
|
||||||
from sys import path
|
from sys import path
|
||||||
from babel import Locale, UnknownLocaleError
|
from babel import Locale, UnknownLocaleError
|
||||||
|
@ -22,19 +22,22 @@ languages_file = 'languages.py'
|
||||||
|
|
||||||
# Fetchs supported languages for each engine and writes json file with those.
|
# Fetchs supported languages for each engine and writes json file with those.
|
||||||
def fetch_supported_languages():
|
def fetch_supported_languages():
|
||||||
|
|
||||||
engines_languages = {}
|
engines_languages = {}
|
||||||
for engine_name in engines:
|
names = list(engines)
|
||||||
|
names.sort()
|
||||||
|
|
||||||
|
for engine_name in names:
|
||||||
|
print("fetching languages of engine %s" % engine_name)
|
||||||
|
|
||||||
if hasattr(engines[engine_name], 'fetch_supported_languages'):
|
if hasattr(engines[engine_name], 'fetch_supported_languages'):
|
||||||
try:
|
|
||||||
engines_languages[engine_name] = engines[engine_name].fetch_supported_languages()
|
engines_languages[engine_name] = engines[engine_name].fetch_supported_languages()
|
||||||
if type(engines_languages[engine_name]) == list:
|
if type(engines_languages[engine_name]) == list:
|
||||||
engines_languages[engine_name] = sorted(engines_languages[engine_name])
|
engines_languages[engine_name] = sorted(engines_languages[engine_name])
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
|
|
||||||
# write json file
|
# write json file
|
||||||
with io.open(engines_languages_file, "w", encoding="utf-8") as f:
|
with open(engines_languages_file, 'w', encoding='utf-8') as f:
|
||||||
dump(engines_languages, f, ensure_ascii=False, indent=4, separators=(',', ': '))
|
json.dump(engines_languages, f, indent=2, sort_keys=True)
|
||||||
|
|
||||||
return engines_languages
|
return engines_languages
|
||||||
|
|
||||||
|
|
|
@ -5,6 +5,7 @@ PYOBJECTS ?=
|
||||||
|
|
||||||
SITE_PYTHON ?=$(dir $(abspath $(lastword $(MAKEFILE_LIST))))site-python
|
SITE_PYTHON ?=$(dir $(abspath $(lastword $(MAKEFILE_LIST))))site-python
|
||||||
export PYTHONPATH := $(SITE_PYTHON):$$PYTHONPATH
|
export PYTHONPATH := $(SITE_PYTHON):$$PYTHONPATH
|
||||||
|
export PY_ENV PYDIST PYBUILD
|
||||||
|
|
||||||
# folder where the python distribution takes place
|
# folder where the python distribution takes place
|
||||||
PYDIST ?= ./py_dist
|
PYDIST ?= ./py_dist
|
||||||
|
@ -12,6 +13,9 @@ PYDIST ?= ./py_dist
|
||||||
PYBUILD ?= ./py_build
|
PYBUILD ?= ./py_build
|
||||||
# python version to use
|
# python version to use
|
||||||
PY ?=3
|
PY ?=3
|
||||||
|
# $(PYTHON) points to the python interpreter from the OS! The python from the
|
||||||
|
# OS is needed e.g. to create a virtualenv. For tasks inside the virtualenv the
|
||||||
|
# interpeter from '$(PY_ENV_BIN)/python' is used.
|
||||||
PYTHON ?= python$(PY)
|
PYTHON ?= python$(PY)
|
||||||
PIP ?= pip$(PY)
|
PIP ?= pip$(PY)
|
||||||
PIP_INST ?= --user
|
PIP_INST ?= --user
|
||||||
|
@ -59,7 +63,7 @@ python-help::
|
||||||
@echo ' pylint - run pylint *linting*'
|
@echo ' pylint - run pylint *linting*'
|
||||||
@echo ' pytest - run *tox* test on python objects'
|
@echo ' pytest - run *tox* test on python objects'
|
||||||
@echo ' pydebug - run tests within a PDB debug session'
|
@echo ' pydebug - run tests within a PDB debug session'
|
||||||
@echo ' pybuild - build python packages'
|
@echo ' pybuild - build python packages ($(PYDIST) $(PYBUILD))'
|
||||||
@echo ' pyclean - clean intermediate python objects'
|
@echo ' pyclean - clean intermediate python objects'
|
||||||
@echo ' targets using system users environment:'
|
@echo ' targets using system users environment:'
|
||||||
@echo ' py[un]install - [un]install python objects in editable mode'
|
@echo ' py[un]install - [un]install python objects in editable mode'
|
||||||
|
@ -94,38 +98,6 @@ python-exe:
|
||||||
@:
|
@:
|
||||||
endif
|
endif
|
||||||
|
|
||||||
msg-pip-exe:
|
|
||||||
@echo "\n $(PIP) is required\n\n\
|
|
||||||
Make sure you have updated pip installed, grab it from\n\
|
|
||||||
https://pip.pypa.io or install it from your package\n\
|
|
||||||
manager. On debian based OS these requirements are\n\
|
|
||||||
installed by::\n\n\
|
|
||||||
sudo -H apt-get install python$(PY)-pip\n" | $(FMT)
|
|
||||||
|
|
||||||
ifeq ($(shell which $(PIP) >/dev/null 2>&1; echo $$?), 1)
|
|
||||||
pip-exe: msg-pip-exe
|
|
||||||
$(error The '$(PIP)' command was not found)
|
|
||||||
else
|
|
||||||
pip-exe:
|
|
||||||
@:
|
|
||||||
endif
|
|
||||||
|
|
||||||
PHONY += msg-virtualenv-exe virtualenv-exe
|
|
||||||
msg-virtualenv-exe:
|
|
||||||
@echo "\n virtualenv is required\n\n\
|
|
||||||
Make sure you have an updated virtualenv installed, grab it from\n\
|
|
||||||
https://virtualenv.pypa.io/en/stable/installation/ or install it\n\
|
|
||||||
via pip by::\n\n\
|
|
||||||
pip install --user https://github.com/pypa/virtualenv/tarball/master\n" | $(FMT)
|
|
||||||
|
|
||||||
ifeq ($(shell which virtualenv >/dev/null 2>&1; echo $$?), 1)
|
|
||||||
virtualenv-exe: msg-virtualenv-exe
|
|
||||||
$(error The 'virtualenv' command was not found)
|
|
||||||
else
|
|
||||||
virtualenv-exe:
|
|
||||||
@:
|
|
||||||
endif
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
# commands
|
# commands
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
|
@ -136,7 +108,7 @@ quiet_cmd_pyinstall = INSTALL $2
|
||||||
|
|
||||||
# $2 path to folder with setup.py, this uses pip from pyenv (not OS!)
|
# $2 path to folder with setup.py, this uses pip from pyenv (not OS!)
|
||||||
quiet_cmd_pyenvinstall = PYENV install $2
|
quiet_cmd_pyenvinstall = PYENV install $2
|
||||||
cmd_pyenvinstall = $(PY_ENV_BIN)/pip $(PIP_VERBOSE) install -e $2$(PY_SETUP_EXTRAS)
|
cmd_pyenvinstall = $(PY_ENV_BIN)/python -m pip $(PIP_VERBOSE) install -e $2$(PY_SETUP_EXTRAS)
|
||||||
|
|
||||||
# Uninstall the package. Since pip does not uninstall the no longer needed
|
# Uninstall the package. Since pip does not uninstall the no longer needed
|
||||||
# depencies (something like autoremove) the depencies remain.
|
# depencies (something like autoremove) the depencies remain.
|
||||||
|
@ -147,7 +119,7 @@ quiet_cmd_pyuninstall = UNINSTALL $2
|
||||||
|
|
||||||
# $2 path to folder with setup.py, this uses pip from pyenv (not OS!)
|
# $2 path to folder with setup.py, this uses pip from pyenv (not OS!)
|
||||||
quiet_cmd_pyenvuninstall = PYENV uninstall $2
|
quiet_cmd_pyenvuninstall = PYENV uninstall $2
|
||||||
cmd_pyenvuninstall = $(PY_ENV_BIN)/pip $(PIP_VERBOSE) uninstall --yes $2
|
cmd_pyenvuninstall = $(PY_ENV_BIN)/python -m pip $(PIP_VERBOSE) uninstall --yes $2
|
||||||
|
|
||||||
# $2 path to folder where virtualenv take place
|
# $2 path to folder where virtualenv take place
|
||||||
quiet_cmd_virtualenv = PYENV usage: $ source ./$@/bin/activate
|
quiet_cmd_virtualenv = PYENV usage: $ source ./$@/bin/activate
|
||||||
|
@ -160,10 +132,10 @@ quiet_cmd_virtualenv = PYENV usage: $ source ./$@/bin/activate
|
||||||
|
|
||||||
# $2 path to lint
|
# $2 path to lint
|
||||||
quiet_cmd_pylint = LINT $@
|
quiet_cmd_pylint = LINT $@
|
||||||
cmd_pylint = $(PY_ENV_BIN)/pylint --rcfile $(PYLINT_RC) $2
|
cmd_pylint = $(PY_ENV_BIN)/python -m pylint --rcfile $(PYLINT_RC) $2
|
||||||
|
|
||||||
quiet_cmd_pytest = TEST $@
|
quiet_cmd_pytest = TEST $@
|
||||||
cmd_pytest = $(PY_ENV_BIN)/tox -vv
|
cmd_pytest = $(PY_ENV_BIN)/python -m tox -vv
|
||||||
|
|
||||||
# setuptools, pip, easy_install its a mess full of cracks, a documentation hell
|
# setuptools, pip, easy_install its a mess full of cracks, a documentation hell
|
||||||
# and broken by design ... all sucks, I really, really hate all this ... aaargh!
|
# and broken by design ... all sucks, I really, really hate all this ... aaargh!
|
||||||
|
@ -192,14 +164,14 @@ quiet_cmd_pytest = TEST $@
|
||||||
# .. _installing: https://packaging.python.org/tutorials/installing-packages/
|
# .. _installing: https://packaging.python.org/tutorials/installing-packages/
|
||||||
#
|
#
|
||||||
quiet_cmd_pybuild = BUILD $@
|
quiet_cmd_pybuild = BUILD $@
|
||||||
cmd_pybuild = $(PY_ENV_BIN)/$(PYTHON) setup.py \
|
cmd_pybuild = $(PY_ENV_BIN)/python setup.py \
|
||||||
sdist -d $(PYDIST) \
|
sdist -d $(PYDIST) \
|
||||||
bdist_wheel --bdist-dir $(PYBUILD) -d $(PYDIST)
|
bdist_wheel --bdist-dir $(PYBUILD) -d $(PYDIST)
|
||||||
|
|
||||||
quiet_cmd_pyclean = CLEAN $@
|
quiet_cmd_pyclean = CLEAN $@
|
||||||
# remove 'build' folder since bdist_wheel does not care the --bdist-dir
|
# remove 'build' folder since bdist_wheel does not care the --bdist-dir
|
||||||
cmd_pyclean = \
|
cmd_pyclean = \
|
||||||
rm -rf $(PYDIST) $(PYBUILD) ./local ./.tox *.egg-info ;\
|
rm -rf $(PYDIST) $(PYBUILD) $(PY_ENV) ./.tox *.egg-info ;\
|
||||||
find . -name '*.pyc' -exec rm -f {} + ;\
|
find . -name '*.pyc' -exec rm -f {} + ;\
|
||||||
find . -name '*.pyo' -exec rm -f {} + ;\
|
find . -name '*.pyo' -exec rm -f {} + ;\
|
||||||
find . -name __pycache__ -exec rm -rf {} +
|
find . -name __pycache__ -exec rm -rf {} +
|
||||||
|
@ -230,15 +202,16 @@ PHONY += pyclean
|
||||||
pyclean:
|
pyclean:
|
||||||
$(call cmd,pyclean)
|
$(call cmd,pyclean)
|
||||||
|
|
||||||
# to build *local* environment, python and virtualenv from the OS is needed!
|
# to build *local* environment, python from the OS is needed!
|
||||||
pyenv: $(PY_ENV)
|
pyenv: $(PY_ENV)
|
||||||
$(PY_ENV): virtualenv-exe python-exe
|
$(PY_ENV): python-exe
|
||||||
$(call cmd,virtualenv,$(PY_ENV))
|
$(call cmd,virtualenv,$(PY_ENV))
|
||||||
@$(PY_ENV_BIN)/pip install $(PIP_VERBOSE) -r requirements.txt
|
$(Q)$(PY_ENV_BIN)/python -m pip install $(PIP_VERBOSE) -U pip wheel pip setuptools
|
||||||
|
$(Q)$(PY_ENV_BIN)/python -m pip install $(PIP_VERBOSE) -r requirements.txt
|
||||||
|
|
||||||
PHONY += pylint-exe
|
PHONY += pylint-exe
|
||||||
pylint-exe: $(PY_ENV)
|
pylint-exe: $(PY_ENV)
|
||||||
@$(PY_ENV_BIN)/pip $(PIP_VERBOSE) install pylint
|
@$(PY_ENV_BIN)/python -m pip $(PIP_VERBOSE) install pylint
|
||||||
|
|
||||||
PHONY += pylint
|
PHONY += pylint
|
||||||
pylint: pylint-exe
|
pylint: pylint-exe
|
||||||
|
@ -262,15 +235,15 @@ pydebug: $(PY_ENV)
|
||||||
|
|
||||||
# install / uninstall python objects into virtualenv (PYENV)
|
# install / uninstall python objects into virtualenv (PYENV)
|
||||||
pyenv-install: $(PY_ENV)
|
pyenv-install: $(PY_ENV)
|
||||||
@$(PY_ENV_BIN)/pip $(PIP_VERBOSE) install -e .
|
@$(PY_ENV_BIN)/python -m pip $(PIP_VERBOSE) install -e .
|
||||||
@echo "ACTIVATE $(call normpath,$(PY_ENV_ACT)) "
|
@echo "ACTIVATE $(call normpath,$(PY_ENV_ACT)) "
|
||||||
|
|
||||||
pyenv-uninstall: $(PY_ENV)
|
pyenv-uninstall: $(PY_ENV)
|
||||||
@$(PY_ENV_BIN)/pip $(PIP_VERBOSE) uninstall --yes .
|
@$(PY_ENV_BIN)/python -m pip $(PIP_VERBOSE) uninstall --yes .
|
||||||
|
|
||||||
# runs python interpreter from ./local/py<N>/bin/python
|
# runs python interpreter from ./local/py<N>/bin/python
|
||||||
pyenv-python: pyenv-install
|
pyenv-python: pyenv-install
|
||||||
cd ./local; ../$(PY_ENV_BIN)/python -i
|
$(PY_ENV_BIN)/python -i
|
||||||
|
|
||||||
# With 'dependency_links=' setuptools supports dependencies on packages hosted
|
# With 'dependency_links=' setuptools supports dependencies on packages hosted
|
||||||
# on other reposetories then PyPi, see "Packages Not On PyPI" [1]. The big
|
# on other reposetories then PyPi, see "Packages Not On PyPI" [1]. The big
|
||||||
|
@ -284,7 +257,7 @@ pyenv-python: pyenv-install
|
||||||
|
|
||||||
# https://github.com/pypa/twine
|
# https://github.com/pypa/twine
|
||||||
PHONY += upload-pypi
|
PHONY += upload-pypi
|
||||||
upload-pypi: pyclean pybuild
|
upload-pypi: pyclean pyenvinstall pybuild
|
||||||
@$(PY_ENV_BIN)/twine upload $(PYDIST)/*
|
@$(PY_ENV_BIN)/twine upload $(PYDIST)/*
|
||||||
|
|
||||||
.PHONY: $(PHONY)
|
.PHONY: $(PHONY)
|
||||||
|
|
Loading…
Reference in New Issue