mirror of https://github.com/searx/searx
Merge branch 'master' into master
This commit is contained in:
commit
7463250e76
|
@ -40,8 +40,7 @@ jobs:
|
|||
env:
|
||||
FETCH_SCRIPT: ./searx_extra/update/${{ matrix.fetch }}
|
||||
run: |
|
||||
source local/py3/bin/activate
|
||||
$FETCH_SCRIPT
|
||||
V=1 manage pyenv.cmd python "$FETCH_SCRIPT"
|
||||
|
||||
- name: Create Pull Request
|
||||
id: cpr
|
||||
|
|
|
@ -59,7 +59,7 @@ jobs:
|
|||
- name: Install node dependencies
|
||||
run: make V=1 node.env
|
||||
- name: Build themes
|
||||
run: make V=1 themes
|
||||
run: make V=1 themes.all
|
||||
|
||||
documentation:
|
||||
name: Documentation
|
||||
|
@ -77,14 +77,15 @@ jobs:
|
|||
python-version: '3.9'
|
||||
architecture: 'x64'
|
||||
- name: Build documentation
|
||||
run: SEARX_DEBUG=1 make V=1 ci-gh-pages
|
||||
run: |
|
||||
make V=1 docs.clean docs.html
|
||||
- name: Deploy
|
||||
if: github.ref == 'refs/heads/master'
|
||||
uses: JamesIves/github-pages-deploy-action@3.7.1
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
BRANCH: gh-pages
|
||||
FOLDER: build/gh-pages
|
||||
FOLDER: dist/docs
|
||||
CLEAN: true # Automatically remove deleted files from the deploy branch
|
||||
|
||||
dockers:
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
Searx was created by Adam Tauber and is maintained by Adam Tauber, Alexandre Flament, Noémi Ványi, @pofilo, Gaspard d'Hautefeuille and Markus Heiser.
|
||||
Searx was created by Adam Tauber and is maintained by Adam Tauber, Noémi Ványi, @pofilo, Gaspard d'Hautefeuille and Émilien Devos.
|
||||
|
||||
Major contributing authors:
|
||||
|
||||
|
@ -12,8 +12,9 @@ Major contributing authors:
|
|||
- @pofilo
|
||||
- Markus Heiser @return42
|
||||
- Émilien Devos @unixfox
|
||||
- Alexandre Flament
|
||||
|
||||
People who have submitted patches/translates, reported bugs, consulted features or
|
||||
People who have submitted patches/translations, reported bugs, consulted features or
|
||||
generally made searx better:
|
||||
|
||||
- Laszlo Hammerl
|
||||
|
|
23
Dockerfile
23
Dockerfile
|
@ -4,26 +4,19 @@ EXPOSE 8080
|
|||
VOLUME /etc/searx
|
||||
VOLUME /var/log/uwsgi
|
||||
|
||||
ARG GIT_URL=unknown
|
||||
ARG VERSION_GITCOMMIT=unknown
|
||||
ARG SEARX_GIT_VERSION=unknown
|
||||
|
||||
ARG SEARX_GID=977
|
||||
ARG SEARX_UID=977
|
||||
|
||||
RUN addgroup -g ${SEARX_GID} searx && \
|
||||
adduser -u ${SEARX_UID} -D -h /usr/local/searx -s /bin/sh -G searx searx
|
||||
|
||||
ARG TIMESTAMP_SETTINGS=0
|
||||
ARG TIMESTAMP_UWSGI=0
|
||||
ARG LABEL_VCS_REF=
|
||||
ARG LABEL_VCS_URL=
|
||||
|
||||
ENV INSTANCE_NAME=searx \
|
||||
AUTOCOMPLETE= \
|
||||
BASE_URL= \
|
||||
MORTY_KEY= \
|
||||
MORTY_URL=
|
||||
MORTY_URL= \
|
||||
SEARX_SETTINGS_PATH=/etc/searx/settings.yml \
|
||||
UWSGI_SETTINGS_PATH=/etc/searx/uwsgi.ini
|
||||
|
||||
WORKDIR /usr/local/searx
|
||||
|
||||
|
@ -60,6 +53,10 @@ RUN apk upgrade --no-cache \
|
|||
|
||||
COPY --chown=searx:searx . .
|
||||
|
||||
ARG TIMESTAMP_SETTINGS=0
|
||||
ARG TIMESTAMP_UWSGI=0
|
||||
ARG VERSION_GITCOMMIT=unknown
|
||||
|
||||
RUN su searx -c "/usr/bin/python3 -m compileall -q searx"; \
|
||||
touch -c --date=@${TIMESTAMP_SETTINGS} searx/settings.yml; \
|
||||
touch -c --date=@${TIMESTAMP_UWSGI} dockerfiles/uwsgi.ini; \
|
||||
|
@ -70,8 +67,12 @@ RUN su searx -c "/usr/bin/python3 -m compileall -q searx"; \
|
|||
-o -name '*.svg' -o -name '*.ttf' -o -name '*.eot' \) \
|
||||
-type f -exec gzip -9 -k {} \+ -exec brotli --best {} \+
|
||||
|
||||
# Keep this argument at the end since it change each time
|
||||
# Keep these arguments at the end to prevent redundant layer rebuilds
|
||||
ARG LABEL_DATE=
|
||||
ARG GIT_URL=unknown
|
||||
ARG SEARX_GIT_VERSION=unknown
|
||||
ARG LABEL_VCS_REF=
|
||||
ARG LABEL_VCS_URL=
|
||||
LABEL maintainer="searx <${GIT_URL}>" \
|
||||
description="A privacy-respecting, hackable metasearch engine." \
|
||||
version="${SEARX_GIT_VERSION}" \
|
||||
|
|
320
Makefile
320
Makefile
|
@ -1,265 +1,107 @@
|
|||
# -*- coding: utf-8; mode: makefile-gmake -*-
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
.DEFAULT_GOAL=help
|
||||
export MTOOLS=./manage
|
||||
|
||||
include utils/makefile.include
|
||||
|
||||
PYOBJECTS = searx
|
||||
DOC = docs
|
||||
PY_SETUP_EXTRAS ?= \[test\]
|
||||
PYLINT_SEARX_DISABLE_OPTION := I,C,R,W0105,W0212,W0511,W0603,W0613,W0621,W0702,W0703,W1401,E1136
|
||||
PYLINT_ADDITIONAL_BUILTINS_FOR_ENGINES := supported_languages,language_aliases
|
||||
|
||||
include utils/makefile.python
|
||||
include utils/makefile.sphinx
|
||||
|
||||
all: clean install
|
||||
|
||||
PHONY += help-min help-all help
|
||||
PHONY += help
|
||||
|
||||
help: help-min
|
||||
@echo ''
|
||||
@echo 'to get more help: make help-all'
|
||||
help:
|
||||
@./manage --help
|
||||
@echo '----'
|
||||
@echo 'run - run developer instance'
|
||||
@echo 'install - developer install of searx into virtualenv'
|
||||
@echo 'uninstall - uninstall developer installation'
|
||||
@echo 'clean - clean up working tree'
|
||||
@echo 'search.checker - check search engines'
|
||||
@echo 'test - run shell & CI tests'
|
||||
@echo 'test.sh - test shell scripts'
|
||||
@echo 'ci.test - run CI tests'
|
||||
|
||||
help-min:
|
||||
@echo ' test - run developer tests'
|
||||
@echo ' docs - build documentation'
|
||||
@echo ' docs-live - autobuild HTML documentation while editing'
|
||||
@echo ' run - run developer instance'
|
||||
@echo ' install - developer install (./local)'
|
||||
@echo ' uninstall - uninstall (./local)'
|
||||
@echo ' gh-pages - build docs & deploy on gh-pages branch'
|
||||
@echo ' clean - drop builds and environments'
|
||||
@echo ' project - re-build generic files of the searx project'
|
||||
@echo ' buildenv - re-build environment files (aka brand)'
|
||||
@echo ' themes - re-build build the source of the themes'
|
||||
@echo ' docker - build Docker image'
|
||||
@echo ' node.env - download & install npm dependencies locally'
|
||||
@echo ''
|
||||
@$(MAKE) -e -s make-help
|
||||
|
||||
help-all: help-min
|
||||
@echo ''
|
||||
@$(MAKE) -e -s python-help
|
||||
@echo ''
|
||||
@$(MAKE) -e -s docs-help
|
||||
|
||||
PHONY += install
|
||||
install: buildenv pyenvinstall
|
||||
|
||||
PHONY += uninstall
|
||||
uninstall: pyenvuninstall
|
||||
|
||||
PHONY += clean
|
||||
clean: pyclean docs-clean node.clean test.clean
|
||||
$(call cmd,common_clean)
|
||||
|
||||
PHONY += run
|
||||
run: buildenv pyenvinstall
|
||||
run: install
|
||||
$(Q) ( \
|
||||
sleep 2 ; \
|
||||
xdg-open http://127.0.0.1:8888/ ; \
|
||||
) &
|
||||
SEARX_DEBUG=1 $(PY_ENV)/bin/python ./searx/webapp.py
|
||||
SEARX_DEBUG=1 ./manage pyenv.cmd python ./searx/webapp.py
|
||||
|
||||
# docs
|
||||
# ----
|
||||
PHONY += install uninstall
|
||||
install uninstall:
|
||||
$(Q)./manage pyenv.$@
|
||||
|
||||
sphinx-doc-prebuilds:: buildenv pyenvinstall prebuild-includes
|
||||
PHONY += clean
|
||||
clean: py.clean docs.clean node.clean test.clean
|
||||
$(Q)./manage build_msg CLEAN "common files"
|
||||
$(Q)find . -name '*.orig' -exec rm -f {} +
|
||||
$(Q)find . -name '*.rej' -exec rm -f {} +
|
||||
$(Q)find . -name '*~' -exec rm -f {} +
|
||||
$(Q)find . -name '*.bak' -exec rm -f {} +
|
||||
|
||||
PHONY += docs
|
||||
docs: sphinx-doc-prebuilds
|
||||
$(call cmd,sphinx,html,docs,docs)
|
||||
PHONY += search.checker search.checker.%
|
||||
search.checker: install
|
||||
$(Q)./manage pyenv.cmd searx-checker -v
|
||||
|
||||
PHONY += docs-live
|
||||
docs-live: sphinx-doc-prebuilds
|
||||
$(call cmd,sphinx_autobuild,html,docs,docs)
|
||||
search.checker.%: install
|
||||
$(Q)./manage pyenv.cmd searx-checker -v "$(subst _, ,$(patsubst search.checker.%,%,$@))"
|
||||
|
||||
PHONY += prebuild-includes
|
||||
prebuild-includes:
|
||||
$(Q)mkdir -p $(DOCS_BUILD)/includes
|
||||
$(Q)./utils/searx.sh doc | cat > $(DOCS_BUILD)/includes/searx.rst
|
||||
$(Q)./utils/filtron.sh doc | cat > $(DOCS_BUILD)/includes/filtron.rst
|
||||
$(Q)./utils/morty.sh doc | cat > $(DOCS_BUILD)/includes/morty.rst
|
||||
|
||||
|
||||
$(GH_PAGES)::
|
||||
@echo "doc available at --> $(DOCS_URL)"
|
||||
|
||||
# update project files
|
||||
# --------------------
|
||||
|
||||
PHONY += project engines.languages useragents.update buildenv
|
||||
|
||||
project: buildenv useragents.update engines.languages
|
||||
|
||||
engines.languages: pyenvinstall
|
||||
$(Q)echo "fetch languages .."
|
||||
$(Q)$(PY_ENV_ACT); python ./searx_extra/update/update_languages.py
|
||||
$(Q)echo "updated searx/data/engines_languages.json"
|
||||
$(Q)echo "updated searx/languages.py"
|
||||
|
||||
useragents.update: pyenvinstall
|
||||
$(Q)echo "fetch useragents .."
|
||||
$(Q)$(PY_ENV_ACT); python ./searx_extra/update/update_firefox_version.py
|
||||
$(Q)echo "updated searx/data/useragents.json with the most recent versions of Firefox."
|
||||
|
||||
buildenv: pyenv
|
||||
$(Q)$(PY_ENV_ACT); SEARX_DEBUG=1 python utils/build_env.py
|
||||
|
||||
# node / npm
|
||||
# ----------
|
||||
|
||||
node.env: buildenv
|
||||
$(Q)./manage.sh npm_packages
|
||||
|
||||
node.clean:
|
||||
$(Q)echo "CLEAN locally installed npm dependencies"
|
||||
$(Q)rm -rf \
|
||||
./node_modules \
|
||||
./package-lock.json \
|
||||
./searx/static/themes/oscar/package-lock.json \
|
||||
./searx/static/themes/oscar/node_modules \
|
||||
./searx/static/themes/simple/package-lock.json \
|
||||
./searx/static/themes/simple/node_modules
|
||||
|
||||
# build themes
|
||||
# ------------
|
||||
|
||||
PHONY += themes themes.oscar themes.simple
|
||||
themes: buildenv themes.oscar themes.simple
|
||||
|
||||
quiet_cmd_lessc = LESSC $3
|
||||
cmd_lessc = PATH="$$(npm bin):$$PATH" \
|
||||
lessc --clean-css="--s1 --advanced --compatibility=ie9" "searx/static/$2" "searx/static/$3"
|
||||
|
||||
quiet_cmd_grunt = GRUNT $2
|
||||
cmd_grunt = PATH="$$(npm bin):$$PATH" \
|
||||
grunt --gruntfile "$2"
|
||||
|
||||
themes.oscar: node.env
|
||||
$(Q)echo '[!] build oscar theme'
|
||||
$(call cmd,grunt,searx/static/themes/oscar/gruntfile.js)
|
||||
|
||||
themes.simple: node.env
|
||||
$(Q)echo '[!] build simple theme'
|
||||
$(call cmd,grunt,searx/static/themes/simple/gruntfile.js)
|
||||
|
||||
|
||||
# docker
|
||||
# ------
|
||||
|
||||
PHONY += docker
|
||||
docker: buildenv
|
||||
$(Q)./manage.sh docker_build
|
||||
|
||||
docker.push: buildenv
|
||||
$(Q)./manage.sh docker_build push
|
||||
|
||||
# gecko
|
||||
# -----
|
||||
|
||||
PHONY += gecko.driver
|
||||
gecko.driver:
|
||||
$(PY_ENV_ACT); ./manage.sh install_geckodriver
|
||||
|
||||
# search.checker
|
||||
# --------------
|
||||
|
||||
search.checker: pyenvinstall
|
||||
$(Q)$(PY_ENV_ACT); searx-checker -v
|
||||
|
||||
ENGINE_TARGETS=$(patsubst searx/engines/%.py,search.checker.%,$(wildcard searx/engines/[!_]*.py))
|
||||
|
||||
$(ENGINE_TARGETS): pyenvinstall
|
||||
$(Q)$(PY_ENV_ACT); searx-checker -v "$(subst _, ,$(patsubst search.checker.%,%,$@))"
|
||||
|
||||
|
||||
# test
|
||||
# ----
|
||||
|
||||
PHONY += test test.sh test.pylint test.pep8 test.unit test.coverage test.robot
|
||||
test: buildenv test.pylint test.pep8 test.unit gecko.driver test.robot
|
||||
|
||||
PYLINT_FILES=\
|
||||
searx/preferences.py \
|
||||
searx/testing.py \
|
||||
searx/engines/gigablast.py \
|
||||
searx/engines/deviantart.py \
|
||||
searx/engines/digg.py \
|
||||
searx/engines/google.py \
|
||||
searx/engines/google_news.py \
|
||||
searx/engines/google_videos.py \
|
||||
searx/engines/google_images.py \
|
||||
searx/engines/mediathekviewweb.py \
|
||||
searx/engines/solidtorrents.py \
|
||||
searx/engines/solr.py \
|
||||
searx/engines/google_scholar.py \
|
||||
searx/engines/yahoo_news.py \
|
||||
searx/engines/apkmirror.py \
|
||||
searx_extra/update/update_external_bangs.py
|
||||
|
||||
test.pylint: pyenvinstall
|
||||
$(call cmd,pylint,$(PYLINT_FILES))
|
||||
$(call cmd,pylint,\
|
||||
--disable=$(PYLINT_SEARX_DISABLE_OPTION) \
|
||||
--additional-builtins=$(PYLINT_ADDITIONAL_BUILTINS_FOR_ENGINES) \
|
||||
searx/engines \
|
||||
)
|
||||
$(call cmd,pylint,\
|
||||
--disable=$(PYLINT_SEARX_DISABLE_OPTION) \
|
||||
--ignore=searx/engines \
|
||||
searx tests \
|
||||
)
|
||||
|
||||
# ignored rules:
|
||||
# E402 module level import not at top of file
|
||||
# W503 line break before binary operator
|
||||
|
||||
# ubu1604: uses shellcheck v0.3.7 (from 04/2015), no longer supported!
|
||||
PHONY += ci.test test test.sh
|
||||
ci.test: test.pep8 test.pylint test.unit test.robot
|
||||
test: ci.test
|
||||
test.sh:
|
||||
shellcheck -x -s bash utils/brand.env
|
||||
shellcheck -x utils/lib.sh
|
||||
shellcheck -x utils/filtron.sh
|
||||
shellcheck -x utils/searx.sh
|
||||
shellcheck -x utils/morty.sh
|
||||
shellcheck -x utils/lxc.sh
|
||||
shellcheck -x utils/lxc-searx.env
|
||||
shellcheck -x .config.sh
|
||||
|
||||
test.pep8: pyenvinstall
|
||||
@echo "TEST pycodestyle (formerly pep8)"
|
||||
$(Q)$(PY_ENV_ACT); pycodestyle --exclude='searx/static, searx/languages.py, $(foreach f,$(PYLINT_FILES),$(f),)' \
|
||||
--max-line-length=120 --ignore "E117,E252,E402,E722,E741,W503,W504,W605" searx tests
|
||||
|
||||
test.unit: pyenvinstall
|
||||
@echo "TEST tests/unit"
|
||||
$(Q)$(PY_ENV_ACT); python -m nose2 -s tests/unit
|
||||
|
||||
test.coverage: pyenvinstall
|
||||
@echo "TEST unit test coverage"
|
||||
$(Q)$(PY_ENV_ACT); \
|
||||
python -m nose2 -C --log-capture --with-coverage --coverage searx -s tests/unit \
|
||||
&& coverage report \
|
||||
&& coverage html \
|
||||
|
||||
test.robot: pyenvinstall gecko.driver
|
||||
@echo "TEST robot"
|
||||
$(Q)$(PY_ENV_ACT); PYTHONPATH=. python searx/testing.py robot
|
||||
|
||||
test.clean:
|
||||
@echo "CLEAN intermediate test stuff"
|
||||
$(Q)rm -rf geckodriver.log .coverage coverage/
|
||||
$(Q)shellcheck -x -s bash \
|
||||
utils/brand.env \
|
||||
./manage \
|
||||
utils/lib.sh \
|
||||
utils/filtron.sh \
|
||||
utils/searx.sh \
|
||||
utils/morty.sh \
|
||||
utils/lxc.sh \
|
||||
utils/lxc-searx.env \
|
||||
.config.sh
|
||||
$(Q)./manage build_msg TEST "$@ OK"
|
||||
|
||||
|
||||
# travis
|
||||
# ------
|
||||
# wrap ./manage script
|
||||
|
||||
PHONY += ci.test
|
||||
ci.test:
|
||||
$(PY_ENV_BIN)/python -c "import yaml" || make clean
|
||||
$(MAKE) test
|
||||
MANAGE += buildenv
|
||||
MANAGE += babel.compile
|
||||
MANAGE += data.all data.languages data.useragents
|
||||
MANAGE += docs.html docs.live docs.gh-pages docs.prebuild docs.clean
|
||||
MANAGE += docker.build docker.push
|
||||
MANAGE += gecko.driver
|
||||
MANAGE += node.env node.clean
|
||||
MANAGE += py.build py.clean
|
||||
MANAGE += pyenv pyenv.install pyenv.uninstall
|
||||
MANAGE += pypi.upload pypi.upload.test
|
||||
MANAGE += test.pylint test.pep8 test.unit test.coverage test.robot test.clean
|
||||
MANAGE += themes.all themes.oscar themes.simple themes.bootstrap
|
||||
|
||||
travis.codecov:
|
||||
$(Q)$(PY_ENV_BIN)/python -m pip install codecov
|
||||
PHONY += $(MANAGE)
|
||||
|
||||
.PHONY: $(PHONY)
|
||||
$(MANAGE):
|
||||
$(Q)$(MTOOLS) $@
|
||||
|
||||
# deprecated
|
||||
|
||||
PHONY += docs docs-clean docs-live docker themes
|
||||
|
||||
docs: docs.html
|
||||
$(Q)./manage build_msg WARN $@ is deprecated use docs.html
|
||||
|
||||
docs-clean: docs.clean
|
||||
$(Q)./manage build_msg WARN $@ is deprecated use docs.clean
|
||||
|
||||
docs-live: docs.live
|
||||
$(Q)./manage build_msg WARN $@ is deprecated use docs.live
|
||||
|
||||
docker: docker.build
|
||||
$(Q)./manage build_msg WARN $@ is deprecated use docker.build
|
||||
|
||||
themes: themes.all
|
||||
$(Q)./manage build_msg WARN $@ is deprecated use themes.all
|
||||
|
|
|
@ -24,9 +24,6 @@ if [ -z "${BIND_ADDRESS}" ]; then
|
|||
export BIND_ADDRESS="${DEFAULT_BIND_ADDRESS}"
|
||||
fi
|
||||
|
||||
export UWSGI_SETTINGS_PATH=/etc/searx/uwsgi.ini
|
||||
export SEARX_SETTINGS_PATH=/etc/searx/settings.yml
|
||||
|
||||
# Parse special command line
|
||||
# see docs/admin/installation-docker.rst
|
||||
# display the help message without the version
|
||||
|
|
|
@ -49,9 +49,9 @@ Build docs
|
|||
- dvisvgm_
|
||||
|
||||
Most of the sphinx requirements are installed from :origin:`setup.py` and the
|
||||
docs can be build from scratch with ``make docs``. For better math and image
|
||||
processing additional packages are needed. The XeTeX_ needed not only for PDF
|
||||
creation, its also needed for :ref:`math` when HTML output is build.
|
||||
docs can be build from scratch with ``make docs.html``. For better math and
|
||||
image processing additional packages are needed. The XeTeX_ needed not only for
|
||||
PDF creation, its also needed for :ref:`math` when HTML output is build.
|
||||
|
||||
To be able to do :ref:`sphinx:math-support` without CDNs, the math are rendered
|
||||
as images (``sphinx.ext.imgmath`` extension).
|
||||
|
@ -64,7 +64,7 @@ to ``imgmath``:
|
|||
:start-after: # sphinx.ext.imgmath setup
|
||||
:end-before: # sphinx.ext.imgmath setup END
|
||||
|
||||
If your docs build (``make docs``) shows warnings like this::
|
||||
If your docs build (``make docs.html``) shows warnings like this::
|
||||
|
||||
WARNING: dot(1) not found, for better output quality install \
|
||||
graphviz from https://www.graphviz.org
|
||||
|
|
|
@ -51,7 +51,7 @@ It's also possible to build searx from the embedded Dockerfile.
|
|||
|
||||
git clone https://github.com/searx/searx.git
|
||||
cd searx
|
||||
make docker
|
||||
make docker.build
|
||||
|
||||
|
||||
Public instance
|
||||
|
|
|
@ -61,7 +61,7 @@ from the login (*~/.profile*):
|
|||
|
||||
.. tip::
|
||||
|
||||
Open a second terminal for the configuration tasks and left the ``(searx)$``
|
||||
Open a second terminal for the configuration tasks and leave the ``(searx)$``
|
||||
terminal open for the tasks below.
|
||||
|
||||
|
||||
|
|
|
@ -39,13 +39,18 @@ install from ``root``, take into account that the scripts are creating a
|
|||
these new created users do need read access to the clone of searx, which is not
|
||||
the case if you clone into a folder below ``/root``.
|
||||
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ cd ~/Downloads
|
||||
$ git clone https://github.com/searx/searx searx
|
||||
$ cd searx
|
||||
|
||||
.. sidebar:: further read
|
||||
|
||||
- :ref:`toolboxing`
|
||||
- :ref:`update searx`
|
||||
- :ref:`inspect searx`
|
||||
|
||||
**Install** :ref:`searx service <searx.sh>`
|
||||
|
||||
This installs searx as described in :ref:`installation basic`.
|
||||
|
|
|
@ -4,20 +4,56 @@
|
|||
How to update
|
||||
=============
|
||||
|
||||
How to update depends on the :ref:`installation` method. If you have used the
|
||||
:ref:`installation scripts`, use ``update`` command from the scripts.
|
||||
|
||||
**Update** :ref:`searx service <searx.sh>`
|
||||
|
||||
.. code:: sh
|
||||
|
||||
sudo -H -u searx -i
|
||||
(searx)$ git stash
|
||||
(searx)$ git pull origin master
|
||||
(searx)$ git stash apply
|
||||
(searx)$ ./manage.sh update_packages
|
||||
sudo -H ./utils/searx.sh update searx
|
||||
|
||||
Restart uwsgi:
|
||||
**Update** :ref:`filtron reverse proxy <filtron.sh>`
|
||||
|
||||
.. tabs::
|
||||
.. code:: sh
|
||||
|
||||
.. group-tab:: Ubuntu / debian
|
||||
sudo -H ./utils/filtron.sh update filtron
|
||||
|
||||
.. code:: sh
|
||||
**Update** :ref:`result proxy <morty.sh>`
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ sudo -H ./utils/morty.sh update morty
|
||||
|
||||
.. _inspect searx:
|
||||
|
||||
======================
|
||||
How to inspect & debug
|
||||
======================
|
||||
|
||||
.. sidebar:: further read
|
||||
|
||||
- :ref:`toolboxing`
|
||||
- :ref:`Makefile`
|
||||
|
||||
How to debug depends on the :ref:`installation` method. If you have used the
|
||||
:ref:`installation scripts`, use ``inspect`` command from the scripts.
|
||||
|
||||
**Inspect** :ref:`searx service <searx.sh>`
|
||||
|
||||
.. code:: sh
|
||||
|
||||
sudo -H ./utils/searx.sh inspect service
|
||||
|
||||
**Inspect** :ref:`filtron reverse proxy <filtron.sh>`
|
||||
|
||||
.. code:: sh
|
||||
|
||||
sudo -H ./utils/filtron.sh inspect service
|
||||
|
||||
**Inspect** :ref:`result proxy <morty.sh>`
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ sudo -H ./utils/morty.sh inspect service
|
||||
|
||||
sudo -H systemctl restart uwsgi
|
||||
|
|
|
@ -12,3 +12,4 @@ Blog
|
|||
intro-offline
|
||||
private-engines
|
||||
command-line-engines
|
||||
search-indexer-engines
|
||||
|
|
|
@ -31,7 +31,7 @@ might fail in some aspects we should not overlook.
|
|||
|
||||
The environment in which we run all our development processes matters!
|
||||
|
||||
The :ref:`makefile` and the :ref:`make pyenv` encapsulate a lot for us, but they
|
||||
The :ref:`makefile` and the :ref:`make install` encapsulate a lot for us, but they
|
||||
do not have access to all prerequisites. For example, there may have
|
||||
dependencies on packages that are installed on the developer's desktop, but
|
||||
usually are not preinstalled on a server or client system. Another examples
|
||||
|
@ -356,7 +356,7 @@ daily usage:
|
|||
.. code:: sh
|
||||
|
||||
$ sudo -H ./utils/lxc.sh cmd searx-archlinux \
|
||||
make docs
|
||||
make docs.html
|
||||
|
||||
.. _blog-lxcdev-202006 abstract:
|
||||
|
||||
|
@ -407,7 +407,7 @@ To get remarks from the suite of the archlinux container we can use:
|
|||
...
|
||||
[searx-archlinux] INFO: (eth0) filtron: http://10.174.184.156:4004/ http://10.174.184.156/searx
|
||||
[searx-archlinux] INFO: (eth0) morty: http://10.174.184.156:3000/
|
||||
[searx-archlinux] INFO: (eth0) docs-live: http://10.174.184.156:8080/
|
||||
[searx-archlinux] INFO: (eth0) docs.live: http://10.174.184.156:8080/
|
||||
[searx-archlinux] INFO: (eth0) IPv6: http://[fd42:573b:e0b3:e97e:216:3eff:fea5:9b65]
|
||||
...
|
||||
|
||||
|
|
|
@ -0,0 +1,114 @@
|
|||
===============================
|
||||
Query your local search engines
|
||||
===============================
|
||||
|
||||
From now on, searx lets you to query your locally running search engines. The following
|
||||
ones are supported now:
|
||||
|
||||
* `Elasticsearch`_
|
||||
* `Meilisearch`_
|
||||
* `Solr`_
|
||||
|
||||
All of the engines above are added to ``settings.yml`` just commented out, as you have to
|
||||
``base_url`` for all them.
|
||||
|
||||
Please note that if you are not using HTTPS to access these engines, you have to enable
|
||||
HTTP requests by setting ``enable_http`` to ``True``.
|
||||
|
||||
Futhermore, if you do not want to expose these engines on a public instance, you can
|
||||
still add them and limit the access by setting ``tokens`` as described in the `blog post about
|
||||
private engines`_.
|
||||
|
||||
Configuring searx for search engines
|
||||
====================================
|
||||
|
||||
Each search engine is powerful, capable of full-text search.
|
||||
|
||||
Elasticsearch
|
||||
-------------
|
||||
|
||||
Elasticsearch supports numerous ways to query the data it is storing. At the moment
|
||||
the engine supports the most popular search methods: ``match``, ``simple_query_string``, ``term`` and ``terms``.
|
||||
|
||||
If none of the methods fit your use case, you can select ``custom`` query type and provide the JSON payload
|
||||
searx has to submit to Elasticsearch in ``custom_query_json``.
|
||||
|
||||
The following is an example configuration for an Elasticsearch instance with authentication
|
||||
configured to read from ``my-index`` index.
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
- name : elasticsearch
|
||||
shortcut : es
|
||||
engine : elasticsearch
|
||||
base_url : http://localhost:9200
|
||||
username : elastic
|
||||
password : changeme
|
||||
index : my-index
|
||||
query_type : match
|
||||
enable_http : True
|
||||
|
||||
|
||||
Meilisearch
|
||||
-----------
|
||||
|
||||
This search engine is aimed at individuals and small companies. It is designed for
|
||||
small-scale (less than 10 million documents) data collections. E.g. it is great for storing
|
||||
web pages you have visited and searching in the contents later.
|
||||
|
||||
The engine supports faceted search, so you can search in a subset of documents of the collection.
|
||||
Futhermore, you can search in Meilisearch instances that require authentication by setting ``auth_token``.
|
||||
|
||||
Here is a simple example to query a Meilisearch instance:
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
- name : meilisearch
|
||||
engine : meilisearch
|
||||
shortcut: mes
|
||||
base_url : http://localhost:7700
|
||||
index : my-index
|
||||
enable_http: True
|
||||
|
||||
|
||||
Solr
|
||||
----
|
||||
|
||||
Solr is a popular search engine based on Lucene, just like Elasticsearch.
|
||||
But instead of searching in indices, you can search in collections.
|
||||
|
||||
This is an example configuration for searching in the collection ``my-collection`` and get
|
||||
the results in ascending order.
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
- name : solr
|
||||
engine : solr
|
||||
shortcut : slr
|
||||
base_url : http://localhost:8983
|
||||
collection : my-collection
|
||||
sort : asc
|
||||
enable_http : True
|
||||
|
||||
|
||||
Next steps
|
||||
==========
|
||||
|
||||
The next step is to add support for various SQL databases.
|
||||
|
||||
Acknowledgement
|
||||
===============
|
||||
|
||||
This development was sponsored by `Search and Discovery Fund`_ of `NLnet Foundation`_ .
|
||||
|
||||
.. _blog post about private engines: private-engines.html#private-engines
|
||||
.. _Elasticsearch: https://www.elastic.co/elasticsearch/
|
||||
.. _Meilisearch: https://www.meilisearch.com/
|
||||
.. _Solr: https://solr.apache.org/
|
||||
.. _Search and Discovery Fund: https://nlnet.nl/discovery
|
||||
.. _NLnet Foundation: https://nlnet.nl/
|
||||
|
||||
|
||||
| Happy hacking.
|
||||
| kvch // 2021.04.07 23:16
|
||||
|
|
@ -32,7 +32,7 @@
|
|||
(${SERVICE_USER}) $ mkdir ${SERVICE_HOME}/local
|
||||
(${SERVICE_USER}) $ wget --progress=bar -O \"${GO_TAR}\" \\
|
||||
\"${GO_PKG_URL}\"
|
||||
(${SERVICE_USER}) $ tar -C ${SERVICE_HOME}/local/go -xzf \"${GO_TAR}\"
|
||||
(${SERVICE_USER}) $ tar -C ${SERVICE_HOME}/local -xzf \"${GO_TAR}\"
|
||||
(${SERVICE_USER}) $ which go
|
||||
${SERVICE_HOME}/local/go/bin/go
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import sys, os
|
||||
from sphinx_build_tools import load_sphinx_config
|
||||
from pallets_sphinx_themes import ProjectLink
|
||||
|
||||
from searx import brand
|
||||
|
@ -128,9 +128,3 @@ html_show_sourcelink = False
|
|||
latex_documents = [
|
||||
(master_doc, "searx-{}.tex".format(VERSION_STRING), html_title, author, "manual")
|
||||
]
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Since loadConfig overwrites settings from the global namespace, it has to be
|
||||
# the last statement in the conf.py file
|
||||
# ------------------------------------------------------------------------------
|
||||
load_sphinx_config(globals())
|
||||
|
|
|
@ -132,11 +132,11 @@ Here is an example which makes a complete rebuild:
|
|||
|
||||
.. code:: sh
|
||||
|
||||
$ make docs-clean docs
|
||||
$ make docs.clean docs.html
|
||||
...
|
||||
The HTML pages are in dist/docs.
|
||||
|
||||
.. _make docs-live:
|
||||
.. _make docs.live:
|
||||
|
||||
live build
|
||||
----------
|
||||
|
@ -144,19 +144,19 @@ live build
|
|||
.. _sphinx-autobuild:
|
||||
https://github.com/executablebooks/sphinx-autobuild/blob/master/README.md
|
||||
|
||||
.. sidebar:: docs-clean
|
||||
.. sidebar:: docs.clean
|
||||
|
||||
It is recommended to assert a complete rebuild before deploying (use
|
||||
``docs-clean``).
|
||||
``docs.clean``).
|
||||
|
||||
Live build is like WYSIWYG. If you want to edit the documentation, its
|
||||
recommended to use. The Makefile target ``docs-live`` builds the docs, opens
|
||||
recommended to use. The Makefile target ``docs.live`` builds the docs, opens
|
||||
URL in your favorite browser and rebuilds every time a reST file has been
|
||||
changed.
|
||||
|
||||
.. code:: sh
|
||||
|
||||
$ make docs-live
|
||||
$ make docs.live
|
||||
...
|
||||
The HTML pages are in dist/docs.
|
||||
... Serving on http://0.0.0.0:8000
|
||||
|
@ -169,7 +169,7 @@ argument. E.g to find and use a free port, use:
|
|||
|
||||
.. code:: sh
|
||||
|
||||
$ SPHINXOPTS="--port 0" make docs-live
|
||||
$ SPHINXOPTS="--port 0" make docs.live
|
||||
...
|
||||
... Serving on http://0.0.0.0:50593
|
||||
...
|
||||
|
@ -180,21 +180,10 @@ argument. E.g to find and use a free port, use:
|
|||
deploy on github.io
|
||||
-------------------
|
||||
|
||||
To deploy documentation at :docs:`github.io <.>` use Makefile target
|
||||
:ref:`make gh-pages`, which will builds the documentation, clones searx into a sub
|
||||
folder ``gh-pages``, cleans it, copies the doc build into and runs all the
|
||||
needed git add, commit and push:
|
||||
To deploy documentation at :docs:`github.io <.>` use Makefile target :ref:`make
|
||||
docs.gh-pages`, which builds the documentation and runs all the needed git add,
|
||||
commit and push:
|
||||
|
||||
.. code:: sh
|
||||
|
||||
$ make docs-clean gh-pages
|
||||
...
|
||||
SPHINX docs --> file://<...>/dist/docs
|
||||
The HTML pages are in dist/docs.
|
||||
...
|
||||
Cloning into 'gh-pages' ...
|
||||
...
|
||||
cd gh-pages; git checkout gh-pages >/dev/null
|
||||
Switched to a new branch 'gh-pages'
|
||||
...
|
||||
doc available at --> https://searx.github.io/searx
|
||||
$ make docs.clean docs.gh-pages
|
||||
|
|
|
@ -1,33 +1,33 @@
|
|||
.. _makefile:
|
||||
|
||||
================
|
||||
Makefile Targets
|
||||
================
|
||||
========
|
||||
Makefile
|
||||
========
|
||||
|
||||
.. _gnu-make: https://www.gnu.org/software/make/manual/make.html#Introduction
|
||||
|
||||
.. sidebar:: build environment
|
||||
|
||||
Before looking deeper at the targets, first read about :ref:`make pyenv`.
|
||||
Before looking deeper at the targets, first read about :ref:`make
|
||||
install`.
|
||||
|
||||
To install system requirements follow :ref:`buildhosts`.
|
||||
|
||||
With the aim to simplify development cycles, started with :pull:`1756` a
|
||||
``Makefile`` based boilerplate was added. If you are not familiar with
|
||||
Makefiles, we recommend to read gnu-make_ introduction.
|
||||
All relevant build tasks are implemented in :origin:`manage.sh` and for CI or
|
||||
IDE integration a small ``Makefile`` wrapper is available. If you are not
|
||||
familiar with Makefiles, we recommend to read gnu-make_ introduction.
|
||||
|
||||
The usage is simple, just type ``make {target-name}`` to *build* a target.
|
||||
Calling the ``help`` target gives a first overview (``make help``):
|
||||
|
||||
.. program-output:: bash -c "cd ..; make --no-print-directory help"
|
||||
|
||||
|
||||
.. contents:: Contents
|
||||
:depth: 2
|
||||
:local:
|
||||
:backlinks: entry
|
||||
|
||||
.. _make pyenv:
|
||||
.. _make install:
|
||||
|
||||
Python environment
|
||||
==================
|
||||
|
@ -36,31 +36,42 @@ Python environment
|
|||
|
||||
``source ./local/py3/bin/activate``
|
||||
|
||||
With Makefile we do no longer need to build up the virtualenv manually (as
|
||||
described in the :ref:`devquickstart` guide). Jump into your git working tree
|
||||
and release a ``make pyenv``:
|
||||
|
||||
.. code:: sh
|
||||
We do no longer need to build up the virtualenv manually. Jump into your git
|
||||
working tree and release a ``make install`` to get a virtualenv with a
|
||||
*developer install* of searx (:origin:`setup.py`). ::
|
||||
|
||||
$ cd ~/searx-clone
|
||||
$ make pyenv
|
||||
PYENV usage: source ./local/py3/bin/activate
|
||||
$ make install
|
||||
PYENV [virtualenv] installing ./requirements*.txt into local/py3
|
||||
...
|
||||
PYENV OK
|
||||
PYENV [install] pip install -e 'searx[test]'
|
||||
...
|
||||
Successfully installed argparse-1.4.0 searx
|
||||
BUILDENV INFO:searx:load the default settings from ./searx/settings.yml
|
||||
BUILDENV INFO:searx:Initialisation done
|
||||
BUILDENV build utils/brand.env
|
||||
|
||||
With target ``pyenv`` a development environment (aka virtualenv) was build up in
|
||||
``./local/py3/``. To make a *developer install* of searx (:origin:`setup.py`)
|
||||
into this environment, use make target ``install``:
|
||||
|
||||
.. code:: sh
|
||||
If you release ``make install`` multiple times the installation will only
|
||||
rebuild if the sha256 sum of the *requirement files* fails. With other words:
|
||||
the check fails if you edit the requirements listed in
|
||||
:origin:`requirements-dev.txt` and :origin:`requirements.txt`). ::
|
||||
|
||||
$ make install
|
||||
PYENV usage: source ./local/py3/bin/activate
|
||||
PYENV using virtualenv from ./local/py3
|
||||
PYENV install .
|
||||
|
||||
You have never to think about intermediate targets like ``pyenv`` or
|
||||
``install``, the ``Makefile`` chains them as requisites. Just run your main
|
||||
target.
|
||||
PYENV OK
|
||||
PYENV [virtualenv] requirements.sha256 failed
|
||||
[virtualenv] - 6cea6eb6def9e14a18bf32f8a3e... ./requirements-dev.txt
|
||||
[virtualenv] - 471efef6c73558e391c3adb35f4... ./requirements.txt
|
||||
...
|
||||
PYENV [virtualenv] installing ./requirements*.txt into local/py3
|
||||
...
|
||||
PYENV OK
|
||||
PYENV [install] pip install -e 'searx[test]'
|
||||
...
|
||||
Successfully installed argparse-1.4.0 searx
|
||||
BUILDENV INFO:searx:load the default settings from ./searx/settings.yml
|
||||
BUILDENV INFO:searx:Initialisation done
|
||||
BUILDENV build utils/brand.env
|
||||
|
||||
.. sidebar:: drop environment
|
||||
|
||||
|
@ -68,10 +79,7 @@ target.
|
|||
<make clean>` first.
|
||||
|
||||
If you think, something goes wrong with your ./local environment or you change
|
||||
the :origin:`setup.py` file (or the requirements listed in
|
||||
:origin:`requirements-dev.txt` and :origin:`requirements.txt`), you have to call
|
||||
:ref:`make clean`.
|
||||
|
||||
the :origin:`setup.py` file, you have to call :ref:`make clean`.
|
||||
|
||||
.. _make run:
|
||||
|
||||
|
@ -81,77 +89,44 @@ the :origin:`setup.py` file (or the requirements listed in
|
|||
To get up a running a developer instance simply call ``make run``. This enables
|
||||
*debug* option in :origin:`searx/settings.yml`, starts a ``./searx/webapp.py``
|
||||
instance, disables *debug* option again and opens the URL in your favorite WEB
|
||||
browser (:man:`xdg-open`):
|
||||
browser (:man:`xdg-open`)::
|
||||
|
||||
.. code:: sh
|
||||
|
||||
$ make run
|
||||
PYENV usage: source ./local/py3/bin/activate
|
||||
PYENV install .
|
||||
./local/py3/bin/python ./searx/webapp.py
|
||||
...
|
||||
INFO:werkzeug: * Running on http://127.0.0.1:8888/ (Press CTRL+C to quit)
|
||||
...
|
||||
$ make run
|
||||
PYENV OK
|
||||
SEARX_DEBUG=1 ./manage.sh pyenv.cmd python ./searx/webapp.py
|
||||
...
|
||||
INFO:werkzeug: * Running on http://127.0.0.1:8888/ (Press CTRL+C to quit)
|
||||
|
||||
.. _make clean:
|
||||
|
||||
``make clean``
|
||||
==============
|
||||
|
||||
Drop all intermediate files, all builds, but keep sources untouched. Includes
|
||||
target ``pyclean`` which drops ./local environment. Before calling ``make
|
||||
clean`` stop all processes using :ref:`make pyenv`.
|
||||
|
||||
.. code:: sh
|
||||
Drop all intermediate files, all builds, but keep sources untouched. Before
|
||||
calling ``make clean`` stop all processes using :ref:`make install`. ::
|
||||
|
||||
$ make clean
|
||||
CLEAN pyclean
|
||||
CLEAN clean
|
||||
CLEAN pyenv
|
||||
PYENV [virtualenv] drop ./local/py3
|
||||
CLEAN docs -- ./build/docs ./dist/docs
|
||||
CLEAN locally installed npm dependencies
|
||||
CLEAN test stuff
|
||||
CLEAN common files
|
||||
|
||||
.. _make docs:
|
||||
|
||||
``make docs docs-live docs-clean``
|
||||
==================================
|
||||
``make docs docs.autobuild docs.clean``
|
||||
=======================================
|
||||
|
||||
We describe the usage of the ``doc*`` targets in the :ref:`How to contribute /
|
||||
We describe the usage of the ``doc.*`` targets in the :ref:`How to contribute /
|
||||
Documentation <contrib docs>` section. If you want to edit the documentation
|
||||
read our :ref:`make docs-live` section. If you are working in your own brand,
|
||||
read our :ref:`make docs.live` section. If you are working in your own brand,
|
||||
adjust your :ref:`settings global`.
|
||||
|
||||
.. _make books:
|
||||
.. _make docs.gh-pages:
|
||||
|
||||
``make books/{name}.html books/{name}.pdf``
|
||||
===========================================
|
||||
|
||||
.. _intersphinx: https://www.sphinx-doc.org/en/stable/ext/intersphinx.html
|
||||
.. _XeTeX: https://tug.org/xetex/
|
||||
|
||||
.. sidebar:: info
|
||||
|
||||
To build PDF a XeTeX_ is needed, see :ref:`buildhosts`.
|
||||
|
||||
|
||||
The ``books/{name}.*`` targets are building *books*. A *book* is a
|
||||
sub-directory containing a ``conf.py`` file. One example is the user handbook
|
||||
which can deployed separately (:origin:`docs/user/conf.py`). Such ``conf.py``
|
||||
do inherit from :origin:`docs/conf.py` and overwrite values to fit *book's*
|
||||
needs.
|
||||
|
||||
With the help of Intersphinx_ (:ref:`reST smart ref`) the links to searx’s
|
||||
documentation outside of the book will be bound by the object inventory of
|
||||
``DOCS_URL``. Take into account that URLs will be picked from the inventary at
|
||||
documentation's build time.
|
||||
|
||||
Use ``make docs-help`` to see which books available:
|
||||
|
||||
.. program-output:: bash -c "cd ..; make --no-print-directory docs-help"
|
||||
:ellipsis: 0,-6
|
||||
|
||||
|
||||
.. _make gh-pages:
|
||||
|
||||
``make gh-pages``
|
||||
=================
|
||||
``make docs.gh-pages``
|
||||
======================
|
||||
|
||||
To deploy on github.io first adjust your :ref:`settings global`. For any
|
||||
further read :ref:`deploy on github.io`.
|
||||
|
@ -161,37 +136,66 @@ further read :ref:`deploy on github.io`.
|
|||
``make test``
|
||||
=============
|
||||
|
||||
Runs a series of tests: ``test.pep8``, ``test.unit``, ``test.robot`` and does
|
||||
additional :ref:`pylint checks <make pylint>`. You can run tests selective,
|
||||
e.g.:
|
||||
|
||||
.. code:: sh
|
||||
Runs a series of tests: :ref:`make test.pylint`, ``test.pep8``, ``test.unit``
|
||||
and ``test.robot``. You can run tests selective, e.g.::
|
||||
|
||||
$ make test.pep8 test.unit test.sh
|
||||
. ./local/py3/bin/activate; ./manage.sh pep8_check
|
||||
[!] Running pep8 check
|
||||
. ./local/py3/bin/activate; ./manage.sh unit_tests
|
||||
[!] Running unit tests
|
||||
TEST test.pep8 OK
|
||||
...
|
||||
TEST test.unit OK
|
||||
...
|
||||
TEST test.sh OK
|
||||
|
||||
.. _make pylint:
|
||||
.. _make test.sh:
|
||||
|
||||
``make pylint``
|
||||
===============
|
||||
``make test.sh``
|
||||
================
|
||||
|
||||
:ref:`sh lint` / if you have changed some bash scripting run this test before
|
||||
commit.
|
||||
|
||||
.. _make test.pylint:
|
||||
|
||||
``make test.pylint``
|
||||
====================
|
||||
|
||||
.. _Pylint: https://www.pylint.org/
|
||||
|
||||
Before commiting its recommend to do some (more) linting. Pylint_ is known as
|
||||
one of the best source-code, bug and quality checker for the Python programming
|
||||
language. Pylint_ is not yet a quality gate within our searx project (like
|
||||
:ref:`test.pep8 <make test>` it is), but Pylint_ can help to improve code
|
||||
quality anyway. The pylint profile we use at searx project is found in
|
||||
project's root folder :origin:`.pylintrc`.
|
||||
Pylint_ is known as one of the best source-code, bug and quality checker for the
|
||||
Python programming language. The pylint profile we use at searx project is
|
||||
found in project's root folder :origin:`.pylintrc`.
|
||||
|
||||
Code quality is a ongoing process. Don't try to fix all messages from Pylint,
|
||||
run Pylint and check if your changed lines are bringing up new messages. If so,
|
||||
fix it. By this, code quality gets incremental better and if there comes the
|
||||
day, the linting is balanced out, we might decide to add Pylint as a quality
|
||||
gate.
|
||||
.. _make search.checker:
|
||||
|
||||
``search.checker.{engine name}``
|
||||
================================
|
||||
|
||||
To check all engines::
|
||||
|
||||
make search.checker
|
||||
|
||||
To check a engine with whitespace in the name like *google news* replace space
|
||||
by underline::
|
||||
|
||||
make search.checker.google_news
|
||||
|
||||
To see HTTP requests and more use SEARX_DEBUG::
|
||||
|
||||
make SEARX_DEBUG=1 search.checker.google_news
|
||||
|
||||
.. _3xx: https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#3xx_redirection
|
||||
|
||||
To filter out HTTP redirects (3xx_)::
|
||||
|
||||
make SEARX_DEBUG=1 search.checker.google_news | grep -A1 "HTTP/1.1\" 3[0-9][0-9]"
|
||||
...
|
||||
Engine google news Checking
|
||||
https://news.google.com:443 "GET /search?q=life&hl=en&lr=lang_en&ie=utf8&oe=utf8&ceid=US%3Aen&gl=US HTTP/1.1" 302 0
|
||||
https://news.google.com:443 "GET /search?q=life&hl=en-US&lr=lang_en&ie=utf8&oe=utf8&ceid=US:en&gl=US HTTP/1.1" 200 None
|
||||
--
|
||||
https://news.google.com:443 "GET /search?q=computer&hl=en&lr=lang_en&ie=utf8&oe=utf8&ceid=US%3Aen&gl=US HTTP/1.1" 302 0
|
||||
https://news.google.com:443 "GET /search?q=computer&hl=en-US&lr=lang_en&ie=utf8&oe=utf8&ceid=US:en&gl=US HTTP/1.1" 200 None
|
||||
--
|
||||
|
||||
|
||||
``make pybuild``
|
||||
|
@ -200,9 +204,7 @@ gate.
|
|||
.. _PyPi: https://pypi.org/
|
||||
.. _twine: https://twine.readthedocs.io/en/latest/
|
||||
|
||||
Build Python packages in ``./dist/py``.
|
||||
|
||||
.. code:: sh
|
||||
Build Python packages in ``./dist/py``::
|
||||
|
||||
$ make pybuild
|
||||
...
|
||||
|
@ -210,9 +212,11 @@ Build Python packages in ``./dist/py``.
|
|||
running sdist
|
||||
running egg_info
|
||||
...
|
||||
$ ls ./dist/py/
|
||||
searx-0.15.0-py3-none-any.whl searx-0.15.0.tar.gz
|
||||
running bdist_wheel
|
||||
|
||||
To upload packages to PyPi_, there is also a ``upload-pypi`` target. It needs
|
||||
twine_ to be installed. Since you are not the owner of :pypi:`searx` you will
|
||||
never need the latter.
|
||||
$ ls ./dist
|
||||
searx-0.18.0-py3-none-any.whl searx-0.18.0.tar.gz
|
||||
|
||||
To upload packages to PyPi_, there is also a ``pypi.upload`` target (to test use
|
||||
``pypi.upload.test``). Since you are not the owner of :pypi:`searx` you will
|
||||
never need to upload.
|
||||
|
|
|
@ -15,8 +15,8 @@ generated and deployed at :docs:`github.io <.>`. For build prerequisites read
|
|||
:ref:`docs build`.
|
||||
|
||||
The source files of Searx's documentation are located at :origin:`docs`. Sphinx
|
||||
assumes source files to be encoded in UTF-8 by defaul. Run :ref:`make docs-live
|
||||
<make docs-live>` to build HTML while editing.
|
||||
assumes source files to be encoded in UTF-8 by defaul. Run :ref:`make docs.live
|
||||
<make docs.live>` to build HTML while editing.
|
||||
|
||||
.. sidebar:: Further reading
|
||||
|
||||
|
@ -1276,13 +1276,12 @@ Templating
|
|||
|
||||
.. sidebar:: Build environment
|
||||
|
||||
All *generic-doc* tasks are running in the :ref:`build environment <make
|
||||
pyenv>`.
|
||||
All *generic-doc* tasks are running in the :ref:`make install`.
|
||||
|
||||
Templating is suitable for documentation which is created generic at the build
|
||||
time. The sphinx-jinja_ extension evaluates jinja_ templates in the :ref:`build
|
||||
environment <make pyenv>` (with searx modules installed). We use this e.g. to
|
||||
build chapter: :ref:`engines generic`. Below the jinja directive from the
|
||||
time. The sphinx-jinja_ extension evaluates jinja_ templates in the :ref:`make
|
||||
install` (with searx modules installed). We use this e.g. to build chapter:
|
||||
:ref:`engines generic`. Below the jinja directive from the
|
||||
:origin:`docs/admin/engines.rst` is shown:
|
||||
|
||||
.. literalinclude:: ../admin/engines.rst
|
||||
|
|
|
@ -1,21 +0,0 @@
|
|||
# -*- coding: utf-8; mode: python -*-
|
||||
"""Configuration for the Searx user handbook
|
||||
"""
|
||||
project = 'Searx User-HB'
|
||||
version = release = VERSION_STRING
|
||||
|
||||
intersphinx_mapping['searx'] = (brand.DOCS_URL, None)
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
('index' # startdocname
|
||||
, 'searx-user-hb.tex' # targetname
|
||||
, '' # take title from .rst
|
||||
, author # author
|
||||
, 'howto' # documentclass
|
||||
, False # toctree_only
|
||||
),
|
||||
]
|
||||
|
|
@ -1,9 +1,9 @@
|
|||
.. _searx_utils:
|
||||
.. _toolboxing:
|
||||
|
||||
========================================
|
||||
Tooling box ``utils`` for administrators
|
||||
========================================
|
||||
===================
|
||||
Admin's tooling box
|
||||
===================
|
||||
|
||||
In the folder :origin:`utils/` we maintain some tools useful for administrators.
|
||||
|
||||
|
|
|
@ -119,15 +119,15 @@ of coffee).::
|
|||
|
||||
To build (live) documentation inside a archlinux_ container::
|
||||
|
||||
sudo -H ./utils/lxc.sh cmd searx-archlinux make docs-clean docs-live
|
||||
sudo -H ./utils/lxc.sh cmd searx-archlinux make docs.clean docs.live
|
||||
...
|
||||
[I 200331 15:00:42 server:296] Serving on http://0.0.0.0:8080
|
||||
|
||||
To get IP of the container and the port number *live docs* is listening::
|
||||
|
||||
$ sudo ./utils/lxc.sh show suite | grep docs-live
|
||||
$ sudo ./utils/lxc.sh show suite | grep docs.live
|
||||
...
|
||||
[searx-archlinux] INFO: (eth0) docs-live: http://n.n.n.12:8080/
|
||||
[searx-archlinux] INFO: (eth0) docs.live: http://n.n.n.12:8080/
|
||||
|
||||
|
||||
.. _lxc.sh help:
|
||||
|
|
|
@ -0,0 +1,501 @@
|
|||
#!/usr/bin/env bash
|
||||
# -*- coding: utf-8; mode: sh indent-tabs-mode: nil -*-
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# shellcheck disable=SC2031
|
||||
|
||||
# shellcheck source=utils/lib.sh
|
||||
source "$(dirname "${BASH_SOURCE[0]}")/utils/lib.sh"
|
||||
# shellcheck source=utils/brand.env
|
||||
source "${REPO_ROOT}/utils/brand.env"
|
||||
source_dot_config
|
||||
|
||||
# config
|
||||
|
||||
PY_SETUP_EXTRAS='[test]'
|
||||
NPM_PACKAGES="less@2.7 less-plugin-clean-css grunt-cli"
|
||||
GECKODRIVER_VERSION="v0.28.0"
|
||||
# SPHINXOPTS=
|
||||
|
||||
# These py files are linted by test.pylint(), all other files are linted by
|
||||
# test.pep8()
|
||||
PYLINT_FILES=(
|
||||
searx/preferences.py
|
||||
searx/testing.py
|
||||
searx/engines/gigablast.py
|
||||
searx/engines/deviantart.py
|
||||
searx/engines/digg.py
|
||||
searx/engines/google.py
|
||||
searx/engines/google_news.py
|
||||
searx/engines/google_videos.py
|
||||
searx/engines/google_images.py
|
||||
searx/engines/mediathekviewweb.py
|
||||
searx/engines/meilisearch.py
|
||||
searx/engines/solidtorrents.py
|
||||
searx/engines/solr.py
|
||||
searx/engines/google_scholar.py
|
||||
searx/engines/yahoo_news.py
|
||||
searx/engines/apkmirror.py
|
||||
searx_extra/update/update_external_bangs.py
|
||||
)
|
||||
|
||||
PYLINT_SEARX_DISABLE_OPTION="\
|
||||
I,C,R,\
|
||||
W0105,W0212,W0511,W0603,W0613,W0621,W0702,W0703,W1401,\
|
||||
E1136"
|
||||
PYLINT_ADDITIONAL_BUILTINS_FOR_ENGINES="supported_languages,language_aliases"
|
||||
PYLINT_OPTIONS="-m pylint -j 0 --rcfile .pylintrc"
|
||||
|
||||
help() {
|
||||
cat <<EOF
|
||||
buildenv
|
||||
rebuild ./utils/brand.env
|
||||
babel.compile
|
||||
pybabel compile ./searx/translations
|
||||
data.*
|
||||
all : update searx/languages.py and ./data/*
|
||||
languages : update searx/data/engines_languages.json & searx/languages.py
|
||||
useragents: update searx/data/useragents.json with the most recent versions of Firefox.
|
||||
docs.*
|
||||
html : build HTML documentation
|
||||
live : autobuild HTML documentation while editing
|
||||
gh-pages : deploy on gh-pages branch
|
||||
prebuild : build reST include files (./${DOCS_BUILD}/includes)
|
||||
clean : clean documentation build
|
||||
docker
|
||||
build : build docker image
|
||||
push : build and push docker image
|
||||
gecko.driver
|
||||
download & install geckodriver if not already installed (required for
|
||||
robot_tests)
|
||||
node.*
|
||||
env : download & install npm dependencies locally
|
||||
clean : drop npm installations
|
||||
py.*
|
||||
build : Build python packages at ./${PYDIST}
|
||||
clean : delete virtualenv and intermediate py files
|
||||
pyenv.* :
|
||||
install : developer install of searx into virtualenv
|
||||
uninstall : uninstall developer installation
|
||||
cmd ... : run command ... in virtualenv
|
||||
OK : test if virtualenv is OK
|
||||
pypi.upload:
|
||||
Upload python packages to PyPi (to test use pypi.upload.test)
|
||||
test.* :
|
||||
pylint : lint PYLINT_FILES, searx/engines, searx & tests
|
||||
pep8 : pycodestyle (pep8) for all files except PYLINT_FILES
|
||||
unit : run unit tests
|
||||
coverage : run unit tests with coverage
|
||||
robot : run robot test
|
||||
clean : clean intermediate test stuff
|
||||
themes.* :
|
||||
all : build all themes
|
||||
oscar : build oscar theme
|
||||
simple : build simple theme
|
||||
EOF
|
||||
}
|
||||
|
||||
|
||||
if [ "$VERBOSE" = "1" ]; then
|
||||
SPHINX_VERBOSE="-v"
|
||||
PYLINT_VERBOSE="-v"
|
||||
fi
|
||||
|
||||
# needed by sphinx-docs
|
||||
export DOCS_BUILD
|
||||
|
||||
buildenv() {
|
||||
SEARX_DEBUG=1 pyenv.cmd python utils/build_env.py 2>&1 \
|
||||
| prefix_stdout "${_Blue}BUILDENV${_creset} "
|
||||
return "${PIPESTATUS[0]}"
|
||||
}
|
||||
|
||||
babel.compile() {
|
||||
build_msg BABEL compile
|
||||
pyenv.cmd pybabel compile -d "${REPO_ROOT}/searx/translations"
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
|
||||
data.all() {
|
||||
data.languages
|
||||
data.useragents
|
||||
build_msg DATA "update searx/data/ahmia_blacklist.txt"
|
||||
pyenv.cmd python searx_extra/update/update_ahmia_blacklist.py
|
||||
build_msg DATA "update searx/data/wikidata_units.json"
|
||||
pyenv.cmd python searx_extra/update/update_wikidata_units.py
|
||||
build_msg DATA "update searx/data/currencies.json"
|
||||
pyenv.cmd python searx_extra/update/update_currencies.py
|
||||
}
|
||||
|
||||
|
||||
data.languages() {
|
||||
( set -e
|
||||
build_msg ENGINES "fetch languages .."
|
||||
pyenv.cmd python searx_extra/update/update_languages.py
|
||||
build_msg ENGINES "update update searx/languages.py"
|
||||
build_msg DATA "update searx/data/engines_languages.json"
|
||||
)
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
data.useragents() {
|
||||
build_msg DATA "update searx/data/useragents.json"
|
||||
pyenv.cmd python searx_extra/update/update_firefox_version.py
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
docs.prebuild() {
|
||||
build_msg DOCS "build ${DOCS_BUILD}/includes"
|
||||
(
|
||||
set -e
|
||||
[ "$VERBOSE" = "1" ] && set -x
|
||||
mkdir -p "${DOCS_BUILD}/includes"
|
||||
./utils/searx.sh doc | cat > "${DOCS_BUILD}/includes/searx.rst"
|
||||
./utils/filtron.sh doc | cat > "${DOCS_BUILD}/includes/filtron.rst"
|
||||
./utils/morty.sh doc | cat > "${DOCS_BUILD}/includes/morty.rst"
|
||||
)
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
docker.push() {
|
||||
docker.build push
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2119
|
||||
docker.build() {
|
||||
pyenv.install
|
||||
|
||||
build_msg DOCKER build
|
||||
# run installation in a subprocess and activate pyenv
|
||||
|
||||
# See https://www.shellcheck.net/wiki/SC1001 and others ..
|
||||
# shellcheck disable=SC2031,SC2230,SC2002,SC2236,SC2143,SC1001
|
||||
( set -e
|
||||
# shellcheck source=/dev/null
|
||||
source "${PY_ENV_BIN}/activate"
|
||||
|
||||
# Check if it is a git repository
|
||||
if [ ! -d .git ]; then
|
||||
die 1 "This is not Git repository"
|
||||
fi
|
||||
if [ ! -x "$(which git)" ]; then
|
||||
die 1 "git is not installed"
|
||||
fi
|
||||
|
||||
if ! git remote get-url origin 2> /dev/null; then
|
||||
die 1 "there is no remote origin"
|
||||
fi
|
||||
|
||||
# This is a git repository
|
||||
|
||||
# "git describe" to get the Docker version (for example : v0.15.0-89-g0585788e)
|
||||
# awk to remove the "v" and the "g"
|
||||
SEARX_GIT_VERSION=$(git describe --match "v[0-9]*\.[0-9]*\.[0-9]*" HEAD 2>/dev/null | awk -F'-' '{OFS="-"; $1=substr($1, 2); if ($3) { $3=substr($3, 2); } print}')
|
||||
|
||||
# add the suffix "-dirty" if the repository has uncommited change
|
||||
# /!\ HACK for searx/searx: ignore utils/brand.env
|
||||
git update-index -q --refresh
|
||||
if [ ! -z "$(git diff-index --name-only HEAD -- | grep -v 'utils/brand.env')" ]; then
|
||||
SEARX_GIT_VERSION="${SEARX_GIT_VERSION}-dirty"
|
||||
fi
|
||||
|
||||
# Get the last git commit id, will be added to the Searx version (see Dockerfile)
|
||||
VERSION_GITCOMMIT=$(echo "$SEARX_GIT_VERSION" | cut -d- -f2-4)
|
||||
build_msg DOCKER "Last commit : $VERSION_GITCOMMIT"
|
||||
|
||||
# Check consistency between the git tag and the searx/version.py file
|
||||
# /! HACK : parse Python file with bash /!
|
||||
# otherwise it is not possible build the docker image without all Python
|
||||
# dependencies ( version.py loads __init__.py )
|
||||
# SEARX_PYTHON_VERSION=$(python3 -c "import six; import searx.version; six.print_(searx.version.VERSION_STRING)")
|
||||
SEARX_PYTHON_VERSION=$(cat searx/version.py | grep "\(VERSION_MAJOR\|VERSION_MINOR\|VERSION_BUILD\) =" | cut -d\= -f2 | sed -e 's/^[[:space:]]*//' | paste -sd "." -)
|
||||
if [ "$(echo "$SEARX_GIT_VERSION" | cut -d- -f1)" != "$SEARX_PYTHON_VERSION" ]; then
|
||||
err_msg "git tag: $SEARX_GIT_VERSION"
|
||||
err_msg "searx/version.py: $SEARX_PYTHON_VERSION"
|
||||
die 1 "Inconsistency between the last git tag and the searx/version.py file"
|
||||
fi
|
||||
|
||||
# define the docker image name
|
||||
GITHUB_USER=$(echo "${GIT_URL}" | sed 's/.*github\.com\/\([^\/]*\).*/\1/')
|
||||
SEARX_IMAGE_NAME="${SEARX_IMAGE_NAME:-${GITHUB_USER:-searx}/searx}"
|
||||
|
||||
# build Docker image
|
||||
build_msg DOCKER "Building image ${SEARX_IMAGE_NAME}:${SEARX_GIT_VERSION}"
|
||||
sudo docker build \
|
||||
--build-arg GIT_URL="${GIT_URL}" \
|
||||
--build-arg SEARX_GIT_VERSION="${SEARX_GIT_VERSION}" \
|
||||
--build-arg VERSION_GITCOMMIT="${VERSION_GITCOMMIT}" \
|
||||
--build-arg LABEL_DATE="$(date -u +"%Y-%m-%dT%H:%M:%SZ")" \
|
||||
--build-arg LABEL_VCS_REF="$(git rev-parse HEAD)" \
|
||||
--build-arg LABEL_VCS_URL="${GIT_URL}" \
|
||||
--build-arg TIMESTAMP_SETTINGS="$(git log -1 --format="%cd" --date=unix -- searx/settings.yml)" \
|
||||
--build-arg TIMESTAMP_UWSGI="$(git log -1 --format="%cd" --date=unix -- dockerfiles/uwsgi.ini)" \
|
||||
-t "${SEARX_IMAGE_NAME}:latest" -t "${SEARX_IMAGE_NAME}:${SEARX_GIT_VERSION}" .
|
||||
|
||||
if [ "$1" = "push" ]; then
|
||||
sudo docker push "${SEARX_IMAGE_NAME}:latest"
|
||||
sudo docker push "${SEARX_IMAGE_NAME}:${SEARX_GIT_VERSION}"
|
||||
fi
|
||||
)
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2119
|
||||
gecko.driver() {
|
||||
pyenv.install
|
||||
|
||||
build_msg INSTALL "gecko.driver"
|
||||
# run installation in a subprocess and activate pyenv
|
||||
( set -e
|
||||
# shellcheck source=/dev/null
|
||||
source "${PY_ENV_BIN}/activate"
|
||||
|
||||
# TODO : check the current geckodriver version
|
||||
geckodriver -V > /dev/null 2>&1 || NOTFOUND=1
|
||||
set +e
|
||||
if [ -z "$NOTFOUND" ]; then
|
||||
build_msg INSTALL "geckodriver already installed"
|
||||
return
|
||||
fi
|
||||
PLATFORM="$(python3 -c 'import platform; print(platform.system().lower(), platform.architecture()[0])')"
|
||||
case "$PLATFORM" in
|
||||
"linux 32bit" | "linux2 32bit") ARCH="linux32";;
|
||||
"linux 64bit" | "linux2 64bit") ARCH="linux64";;
|
||||
"windows 32 bit") ARCH="win32";;
|
||||
"windows 64 bit") ARCH="win64";;
|
||||
"mac 64bit") ARCH="macos";;
|
||||
esac
|
||||
GECKODRIVER_URL="https://github.com/mozilla/geckodriver/releases/download/$GECKODRIVER_VERSION/geckodriver-$GECKODRIVER_VERSION-$ARCH.tar.gz";
|
||||
|
||||
build_msg GECKO "Installing ${PY_ENV_BIN}/geckodriver from $GECKODRIVER_URL"
|
||||
|
||||
FILE="$(mktemp)"
|
||||
wget -qO "$FILE" -- "$GECKODRIVER_URL" && tar xz -C "${PY_ENV_BIN}" -f "$FILE" geckodriver
|
||||
rm -- "$FILE"
|
||||
chmod 755 -- "${PY_ENV_BIN}/geckodriver"
|
||||
)
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
node.env() {
|
||||
local err=0
|
||||
pushd "${REPO_ROOT}" &> /dev/null
|
||||
# shellcheck disable=SC2230
|
||||
which npm &> /dev/null || die 1 'node.env - npm is not found!'
|
||||
|
||||
( set -e
|
||||
# shellcheck disable=SC2030
|
||||
PATH="$(npm bin):$PATH"
|
||||
export PATH
|
||||
|
||||
build_msg INSTALL "npm install $NPM_PACKAGES"
|
||||
# shellcheck disable=SC2086
|
||||
npm install $NPM_PACKAGES
|
||||
|
||||
cd "${REPO_ROOT}/searx/static/themes/oscar"
|
||||
build_msg INSTALL "($(pwd)) npm install"
|
||||
npm install
|
||||
|
||||
build_msg INSTALL "($(pwd)) npm install"
|
||||
cd "${REPO_ROOT}/searx/static/themes/simple"
|
||||
npm install
|
||||
)
|
||||
err=$?
|
||||
popd &> /dev/null
|
||||
dump_return "$err"
|
||||
}
|
||||
|
||||
node.clean() {
|
||||
|
||||
build_msg CLEAN "locally installed npm dependencies"
|
||||
rm -rf \
|
||||
./node_modules \
|
||||
./package-lock.json \
|
||||
./searx/static/themes/oscar/package-lock.json \
|
||||
./searx/static/themes/oscar/node_modules \
|
||||
./searx/static/themes/simple/package-lock.json \
|
||||
./searx/static/themes/simple/node_modules
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
py.build() {
|
||||
build_msg BUILD "[pylint] python package ${PYDIST}"
|
||||
pyenv.cmd python setup.py \
|
||||
sdist -d "${PYDIST}" \
|
||||
bdist_wheel --bdist-dir "${PYBUILD}" -d "${PYDIST}"
|
||||
}
|
||||
|
||||
py.clean() {
|
||||
build_msg CLEAN pyenv
|
||||
( set -e
|
||||
pyenv.drop
|
||||
[ "$VERBOSE" = "1" ] && set -x
|
||||
rm -rf "${PYDIST}" "${PYBUILD}" "${PY_ENV}" ./.tox ./*.egg-info
|
||||
find . -name '*.pyc' -exec rm -f {} +
|
||||
find . -name '*.pyo' -exec rm -f {} +
|
||||
find . -name __pycache__ -exec rm -rf {} +
|
||||
)
|
||||
}
|
||||
|
||||
pyenv.check() {
|
||||
cat <<EOF
|
||||
import yaml
|
||||
print('import yaml --> OK')
|
||||
EOF
|
||||
}
|
||||
|
||||
pyenv.install() {
|
||||
|
||||
if ! pyenv.OK; then
|
||||
py.clean > /dev/null
|
||||
fi
|
||||
if pyenv.install.OK > /dev/null; then
|
||||
return 0
|
||||
fi
|
||||
( set -e
|
||||
pyenv
|
||||
build_msg PYENV "[install] pip install -e 'searx${PY_SETUP_EXTRAS}'"
|
||||
"${PY_ENV_BIN}/python" -m pip install -e ".${PY_SETUP_EXTRAS}"
|
||||
buildenv
|
||||
) || die 42 "error while build & install pyenv (${PY_ENV_BIN})"
|
||||
}
|
||||
|
||||
pyenv.uninstall() {
|
||||
build_msg PYENV "[pyenv.uninstall] uninstall packages: ${PYOBJECTS}"
|
||||
pyenv.cmd python setup.py develop --uninstall 2>&1 \
|
||||
| prefix_stdout "${_Blue}PYENV ${_creset}[pyenv.uninstall] "
|
||||
|
||||
}
|
||||
|
||||
pypi.upload() {
|
||||
py.clean
|
||||
py.build
|
||||
# https://github.com/pypa/twine
|
||||
pyenv.cmd twine upload "${PYDIST}"/*
|
||||
}
|
||||
|
||||
pypi.upload.test() {
|
||||
py.clean
|
||||
py.build
|
||||
pyenv.cmd twine upload -r testpypi "${PYDIST}"/*
|
||||
}
|
||||
|
||||
test.pylint() {
|
||||
# shellcheck disable=SC2086
|
||||
( set -e
|
||||
build_msg TEST "[pylint] \$PYLINT_FILES"
|
||||
pyenv.cmd python ${PYLINT_OPTIONS} ${PYLINT_VERBOSE} \
|
||||
"${PYLINT_FILES[@]}"
|
||||
|
||||
build_msg TEST "[pylint] searx/engines"
|
||||
pyenv.cmd python ${PYLINT_OPTIONS} ${PYLINT_VERBOSE} \
|
||||
--disable="${PYLINT_SEARX_DISABLE_OPTION}" \
|
||||
--additional-builtins="${PYLINT_ADDITIONAL_BUILTINS_FOR_ENGINES}" \
|
||||
searx/engines
|
||||
|
||||
build_msg TEST "[pylint] searx tests"
|
||||
pyenv.cmd python ${PYLINT_OPTIONS} ${PYLINT_VERBOSE} \
|
||||
--disable="${PYLINT_SEARX_DISABLE_OPTION}" \
|
||||
--ignore=searx/engines \
|
||||
searx tests
|
||||
)
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
test.pep8() {
|
||||
build_msg TEST 'pycodestyle (formerly pep8)'
|
||||
local _exclude=""
|
||||
printf -v _exclude '%s, ' "${PYLINT_FILES[@]}"
|
||||
pyenv.cmd pycodestyle \
|
||||
--exclude="searx/static, searx/languages.py, $_exclude " \
|
||||
--max-line-length=120 \
|
||||
--ignore "E117,E252,E402,E722,E741,W503,W504,W605" \
|
||||
searx tests
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
test.unit() {
|
||||
build_msg TEST 'tests/unit'
|
||||
pyenv.cmd python -m nose2 -s tests/unit
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
test.coverage() {
|
||||
build_msg TEST 'unit test coverage'
|
||||
( set -e
|
||||
pyenv.cmd python -m nose2 -C --log-capture --with-coverage --coverage searx -s tests/unit
|
||||
pyenv.cmd coverage report
|
||||
pyenv.cmd coverage html
|
||||
)
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
test.robot() {
|
||||
build_msg TEST 'robot'
|
||||
gecko.driver
|
||||
PYTHONPATH=. pyenv.cmd python searx/testing.py robot
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
test.clean() {
|
||||
build_msg CLEAN "test stuff"
|
||||
rm -rf geckodriver.log .coverage coverage/
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
themes.all() {
|
||||
( set -e
|
||||
node.env
|
||||
themes.oscar
|
||||
themes.simple
|
||||
)
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
themes.oscar() {
|
||||
local gruntfile=searx/static/themes/oscar/gruntfile.js
|
||||
build_msg GRUNT "${gruntfile}"
|
||||
PATH="$(npm bin):$PATH" grunt --gruntfile "${gruntfile}"
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
themes.simple() {
|
||||
local gruntfile=searx/static/themes/simple/gruntfile.js
|
||||
build_msg GRUNT "${gruntfile}"
|
||||
PATH="$(npm bin):$PATH" grunt --gruntfile "${gruntfile}"
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2119
|
||||
main() {
|
||||
|
||||
local _type
|
||||
local cmd="$1"; shift
|
||||
|
||||
if [ "$cmd" == "" ]; then
|
||||
help
|
||||
err_msg "missing command"
|
||||
return 42
|
||||
fi
|
||||
|
||||
case "$cmd" in
|
||||
--getenv) var="$1"; echo "${!var}";;
|
||||
--help) help;;
|
||||
--*)
|
||||
help
|
||||
err_msg "unknown option $cmd"
|
||||
return 42
|
||||
;;
|
||||
*)
|
||||
_type="$(type -t "$cmd")"
|
||||
if [ "$_type" != 'function' ]; then
|
||||
err_msg "unknown command $1 / use --help"
|
||||
return 42
|
||||
else
|
||||
"$cmd" "$@"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
main "$@"
|
205
manage.sh
205
manage.sh
|
@ -1,205 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
export LANG=C
|
||||
|
||||
BASE_DIR="$(dirname -- "`readlink -f -- "$0"`")"
|
||||
|
||||
cd -- "$BASE_DIR"
|
||||
set -e
|
||||
|
||||
# subshell
|
||||
PYTHONPATH="$BASE_DIR"
|
||||
SEARX_DIR="$BASE_DIR/searx"
|
||||
ACTION="$1"
|
||||
|
||||
. "${BASE_DIR}/utils/brand.env"
|
||||
|
||||
#
|
||||
# Python
|
||||
#
|
||||
|
||||
update_packages() {
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade setuptools
|
||||
pip install -Ur "$BASE_DIR/requirements.txt"
|
||||
}
|
||||
|
||||
update_dev_packages() {
|
||||
update_packages
|
||||
pip install -Ur "$BASE_DIR/requirements-dev.txt"
|
||||
}
|
||||
|
||||
install_geckodriver() {
|
||||
echo '[!] Checking geckodriver'
|
||||
# TODO : check the current geckodriver version
|
||||
set -e
|
||||
geckodriver -V > /dev/null 2>&1 || NOTFOUND=1
|
||||
set +e
|
||||
if [ -z "$NOTFOUND" ]; then
|
||||
return
|
||||
fi
|
||||
GECKODRIVER_VERSION="v0.28.0"
|
||||
PLATFORM="`python3 -c "import platform; print(platform.system().lower(), platform.architecture()[0])"`"
|
||||
case "$PLATFORM" in
|
||||
"linux 32bit" | "linux2 32bit") ARCH="linux32";;
|
||||
"linux 64bit" | "linux2 64bit") ARCH="linux64";;
|
||||
"windows 32 bit") ARCH="win32";;
|
||||
"windows 64 bit") ARCH="win64";;
|
||||
"mac 64bit") ARCH="macos";;
|
||||
esac
|
||||
GECKODRIVER_URL="https://github.com/mozilla/geckodriver/releases/download/$GECKODRIVER_VERSION/geckodriver-$GECKODRIVER_VERSION-$ARCH.tar.gz";
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
if [ -z "$VIRTUAL_ENV" ]; then
|
||||
printf "geckodriver can't be installed because VIRTUAL_ENV is not set, you should download it from\n %s" "$GECKODRIVER_URL"
|
||||
exit
|
||||
else
|
||||
GECKODRIVER_DIR="$VIRTUAL_ENV/bin"
|
||||
fi
|
||||
else
|
||||
GECKODRIVER_DIR="$1"
|
||||
mkdir -p -- "$GECKODRIVER_DIR"
|
||||
fi
|
||||
|
||||
printf "Installing %s/geckodriver from\n %s" "$GECKODRIVER_DIR" "$GECKODRIVER_URL"
|
||||
|
||||
FILE="`mktemp`"
|
||||
wget -qO "$FILE" -- "$GECKODRIVER_URL" && tar xz -C "$GECKODRIVER_DIR" -f "$FILE" geckodriver
|
||||
rm -- "$FILE"
|
||||
chmod 777 -- "$GECKODRIVER_DIR/geckodriver"
|
||||
}
|
||||
|
||||
locales() {
|
||||
pybabel compile -d "$SEARX_DIR/translations"
|
||||
}
|
||||
|
||||
|
||||
#
|
||||
# Web
|
||||
#
|
||||
|
||||
npm_path_setup() {
|
||||
which npm || (printf 'Error: npm is not found\n'; exit 1)
|
||||
export PATH="$(npm bin)":$PATH
|
||||
}
|
||||
|
||||
npm_packages() {
|
||||
npm_path_setup
|
||||
|
||||
echo '[!] install NPM packages'
|
||||
cd -- "$BASE_DIR"
|
||||
npm install less@2.7 less-plugin-clean-css grunt-cli
|
||||
|
||||
echo '[!] install NPM packages for oscar theme'
|
||||
cd -- "$BASE_DIR/searx/static/themes/oscar"
|
||||
npm install
|
||||
|
||||
echo '[!] install NPM packages for simple theme'
|
||||
cd -- "$BASE_DIR/searx/static/themes/simple"
|
||||
npm install
|
||||
}
|
||||
|
||||
docker_build() {
|
||||
# Check if it is a git repository
|
||||
if [ ! -d .git ]; then
|
||||
echo "This is not Git repository"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -x "$(which git)" ]; then
|
||||
echo "git is not installed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! git remote get-url origin 2> /dev/null ]; then
|
||||
echo "there is no remote origin"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# This is a git repository
|
||||
|
||||
# "git describe" to get the Docker version (for example : v0.15.0-89-g0585788e)
|
||||
# awk to remove the "v" and the "g"
|
||||
SEARX_GIT_VERSION=$(git describe --match "v[0-9]*\.[0-9]*\.[0-9]*" HEAD 2>/dev/null | awk -F'-' '{OFS="-"; $1=substr($1, 2); if ($3) { $3=substr($3, 2); } print}')
|
||||
|
||||
# add the suffix "-dirty" if the repository has uncommited change
|
||||
# /!\ HACK for searx/searx: ignore utils/brand.env
|
||||
git update-index -q --refresh
|
||||
if [ ! -z "$(git diff-index --name-only HEAD -- | grep -v 'utils/brand.env')" ]; then
|
||||
SEARX_GIT_VERSION="${SEARX_GIT_VERSION}-dirty"
|
||||
fi
|
||||
|
||||
# Get the last git commit id, will be added to the Searx version (see Dockerfile)
|
||||
VERSION_GITCOMMIT=$(echo $SEARX_GIT_VERSION | cut -d- -f2-4)
|
||||
echo "Last commit : $VERSION_GITCOMMIT"
|
||||
|
||||
# Check consistency between the git tag and the searx/version.py file
|
||||
# /!\ HACK : parse Python file with bash /!\
|
||||
# otherwise it is not possible build the docker image without all Python dependencies ( version.py loads __init__.py )
|
||||
# SEARX_PYTHON_VERSION=$(python3 -c "import six; import searx.version; six.print_(searx.version.VERSION_STRING)")
|
||||
SEARX_PYTHON_VERSION=$(cat searx/version.py | grep "\(VERSION_MAJOR\|VERSION_MINOR\|VERSION_BUILD\) =" | cut -d\= -f2 | sed -e 's/^[[:space:]]*//' | paste -sd "." -)
|
||||
if [ $(echo "$SEARX_GIT_VERSION" | cut -d- -f1) != "$SEARX_PYTHON_VERSION" ]; then
|
||||
echo "Inconsistency between the last git tag and the searx/version.py file"
|
||||
echo "git tag: $SEARX_GIT_VERSION"
|
||||
echo "searx/version.py: $SEARX_PYTHON_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# define the docker image name
|
||||
GITHUB_USER=$(echo "${GIT_URL}" | sed 's/.*github\.com\/\([^\/]*\).*/\1/')
|
||||
SEARX_IMAGE_NAME="${GITHUB_USER:-searx}/searx"
|
||||
|
||||
# build Docker image
|
||||
echo "Building image ${SEARX_IMAGE_NAME}:${SEARX_GIT_VERSION}"
|
||||
sudo docker build \
|
||||
--build-arg GIT_URL="${GIT_URL}" \
|
||||
--build-arg SEARX_GIT_VERSION="${SEARX_GIT_VERSION}" \
|
||||
--build-arg VERSION_GITCOMMIT="${VERSION_GITCOMMIT}" \
|
||||
--build-arg LABEL_DATE=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
|
||||
--build-arg LABEL_VCS_REF=$(git rev-parse HEAD) \
|
||||
--build-arg LABEL_VCS_URL="${GIT_URL}" \
|
||||
--build-arg TIMESTAMP_SETTINGS=$(git log -1 --format="%cd" --date=unix -- searx/settings.yml) \
|
||||
--build-arg TIMESTAMP_UWSGI=$(git log -1 --format="%cd" --date=unix -- dockerfiles/uwsgi.ini) \
|
||||
-t ${SEARX_IMAGE_NAME}:latest -t ${SEARX_IMAGE_NAME}:${SEARX_GIT_VERSION} .
|
||||
|
||||
if [ "$1" = "push" ]; then
|
||||
sudo docker push ${SEARX_IMAGE_NAME}:latest
|
||||
sudo docker push ${SEARX_IMAGE_NAME}:${SEARX_GIT_VERSION}
|
||||
fi
|
||||
}
|
||||
|
||||
#
|
||||
# Help
|
||||
#
|
||||
|
||||
help() {
|
||||
[ -z "$1" ] || printf 'Error: %s\n' "$1"
|
||||
echo "Searx manage.sh help
|
||||
|
||||
Commands
|
||||
========
|
||||
help - This text
|
||||
|
||||
Build requirements
|
||||
------------------
|
||||
update_packages - Check & update production dependency changes
|
||||
update_dev_packages - Check & update development and production dependency changes
|
||||
install_geckodriver - Download & install geckodriver if not already installed (required for robot_tests)
|
||||
npm_packages - Download & install npm dependencies
|
||||
|
||||
Build
|
||||
-----
|
||||
locales - Compile locales
|
||||
|
||||
Environment:
|
||||
GIT_URL: ${GIT_URL}
|
||||
ISSUE_URL: ${ISSUE_URL}
|
||||
SEARX_URL: ${SEARX_URL}
|
||||
DOCS_URL: ${DOCS_URL}
|
||||
PUBLIC_INSTANCES: ${PUBLIC_INSTANCES}
|
||||
"
|
||||
}
|
||||
|
||||
[ "$(command -V "$ACTION" | grep ' function$')" = "" ] \
|
||||
&& help "action not found" \
|
||||
|| "$ACTION" "$2"
|
|
@ -1,17 +1,18 @@
|
|||
mock==4.0.3
|
||||
nose2[coverage_plugin]==0.10.0
|
||||
cov-core==1.15.0
|
||||
pycodestyle==2.6.0
|
||||
pylint==2.7.2
|
||||
pycodestyle==2.7.0
|
||||
pylint==2.7.4
|
||||
splinter==0.14.0
|
||||
transifex-client==0.14.2
|
||||
selenium==3.141.0
|
||||
twine==3.4.1
|
||||
Pallets-Sphinx-Themes==1.2.3
|
||||
Sphinx==3.5.2
|
||||
docutils==0.16
|
||||
Sphinx==3.5.4
|
||||
sphinx-issues==1.2.0
|
||||
sphinx-jinja==1.1.1
|
||||
sphinx-tabs==2.1.0
|
||||
sphinxcontrib-programoutput==0.16
|
||||
sphinx-autobuild==2020.9.1
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx-autobuild==2021.3.14
|
||||
linuxdoc==20210324
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -2790,6 +2790,7 @@
|
|||
"намібійський долар": "NAD",
|
||||
"納米比亞元": "NAD",
|
||||
"نيرة نيجيرية": "NGN",
|
||||
"нигерийска найра": "NGN",
|
||||
"naira": "NGN",
|
||||
"nigerijská naira": "NGN",
|
||||
"nigerian naira": "NGN",
|
||||
|
@ -3730,7 +3731,7 @@
|
|||
"east timor centavo coins": "TLD",
|
||||
"centavos de dólar de timor oriental": "TLD",
|
||||
"سکه کنتاووی تیمور شرقی": "TLD",
|
||||
"סנתאבוס מזרח טימוריים": "TLD",
|
||||
"סנטאבו מזרח טימורי": "TLD",
|
||||
"istočnotimorski sentavo": "TLD",
|
||||
"kelet timori centavoérmék": "TLD",
|
||||
"centavo est timorense": "TLD",
|
||||
|
@ -4123,6 +4124,7 @@
|
|||
"vietnamdar dong": "VND",
|
||||
"دانگ ویتنام": "VND",
|
||||
"vietnamin đồng": "VND",
|
||||
"dong vietnamita": "VND",
|
||||
"vijetnamski dong": "VND",
|
||||
"vietnámi đồng": "VND",
|
||||
"đồng": "VND",
|
||||
|
@ -4758,6 +4760,7 @@
|
|||
"peso d'argentina": "ARS",
|
||||
"peso de l'argentina": "ARS",
|
||||
"pesos argentins": "ARS",
|
||||
"monedas de 1 2 5 10 20 50 y billetes 1 2 5 10 20 50 100 pesos": "ARS",
|
||||
"arg$": "ARS",
|
||||
"peso ley": "ARS",
|
||||
"peso moneda nacional": "ARS",
|
||||
|
@ -5240,6 +5243,7 @@
|
|||
"ブータン・ルピー": "BTN",
|
||||
"бутанский нгултрум": "BTN",
|
||||
"валюта бутана": "BTN",
|
||||
"不丹努爾特魯姆": "BTN",
|
||||
"努尔特鲁姆": "BTN",
|
||||
"努爾特魯姆": "BTN",
|
||||
"بوتسوانا بولا": "BWP",
|
||||
|
@ -5740,6 +5744,7 @@
|
|||
"丹麦克朗": "DKK",
|
||||
"بيسو دومنيكاني": "DOP",
|
||||
"RD$": "DOP",
|
||||
"peso dominica": "DOP",
|
||||
"dom$": "DOP",
|
||||
"rd$": "DOP",
|
||||
"dop": "DOP",
|
||||
|
@ -5878,6 +5883,8 @@
|
|||
"e": "EUR",
|
||||
"யூரோ": "EUR",
|
||||
"avro": "EUR",
|
||||
"歐元": "EUR",
|
||||
"歐羅": "EUR",
|
||||
"F$": "FJD",
|
||||
"dòlar de fiji": "FJD",
|
||||
"fjd": "FJD",
|
||||
|
@ -6783,6 +6790,7 @@
|
|||
"тенге": "KZT",
|
||||
"kazašské tenge": "KZT",
|
||||
"kazašský tenge": "KZT",
|
||||
"tenge casachstan": "KZT",
|
||||
"kzt": "KZT",
|
||||
"〒": "KZT",
|
||||
"تنگه": "KZT",
|
||||
|
@ -6885,6 +6893,7 @@
|
|||
"روپیهٔ سری لانکا": "LKR",
|
||||
"روپیهٔ سریلانکا": "LKR",
|
||||
"roupie du sri lanka": "LKR",
|
||||
"רופי סרי לנקי": "LKR",
|
||||
"srilankaanse roepie": "LKR",
|
||||
"srilankaanse rupee": "LKR",
|
||||
"rupia sri lanki": "LKR",
|
||||
|
@ -7960,8 +7969,8 @@
|
|||
"lliura de síria": "SYP",
|
||||
"syp": "SYP",
|
||||
"syrisches pfund": "SYP",
|
||||
"syrian liyra": "SYP",
|
||||
"syr": "SYP",
|
||||
"ls": "SYP",
|
||||
"syrian lira": "SYP",
|
||||
"dolar sirio": "SYP",
|
||||
"لیرهٔ سوریه": "SYP",
|
||||
"پوند سوریه": "SYP",
|
||||
|
@ -8060,7 +8069,6 @@
|
|||
"centavo de dólar de timor oriental": "TLD",
|
||||
"centavos de dolar de timor oriental": "TLD",
|
||||
"سکه کنتاووی تیمور خاوری": "TLD",
|
||||
"סנטאבו מזרח טימורי": "TLD",
|
||||
"moedas de centavo do timor leste": "TLD",
|
||||
"восточно тиморские монеты": "TLD",
|
||||
"T": "TMT",
|
||||
|
@ -8402,7 +8410,6 @@
|
|||
"đôla mỹ": "USD",
|
||||
"đồng bạc mĩ": "USD",
|
||||
"đồng bạc mỹ": "USD",
|
||||
"美刀": "USD",
|
||||
"美金": "USD",
|
||||
"بيزو أوروغواني": "UYU",
|
||||
"$U": "UYU",
|
||||
|
@ -8472,7 +8479,6 @@
|
|||
"vietnamin dong": "VND",
|
||||
"dong vietnamien": "VND",
|
||||
"דונג וייטנאמי ": "VND",
|
||||
"dong vietnamita": "VND",
|
||||
"ベトナムドン": "VND",
|
||||
"ベトナム・ドン": "VND",
|
||||
"越南銅": "VND",
|
||||
|
@ -8659,8 +8665,6 @@
|
|||
"unitate monetară europeană": "XEU",
|
||||
"европейская валютная единица": "XEU",
|
||||
"歐洲通貨單位": "XEU",
|
||||
"stellar lumens": "XLM",
|
||||
"xlm": "XLM",
|
||||
"xmr": "XMR",
|
||||
"mo": "XMR",
|
||||
"bitmonero": "XMR",
|
||||
|
@ -9087,7 +9091,8 @@
|
|||
"tr": "Arjantin pesosu",
|
||||
"uk": "аргентинський песо",
|
||||
"vi": "Peso Argentina",
|
||||
"zh": "阿根廷比索"
|
||||
"zh": "阿根廷比索",
|
||||
"cy": "ars"
|
||||
},
|
||||
"AUD": {
|
||||
"ar": "دولار أسترالي",
|
||||
|
@ -9610,7 +9615,7 @@
|
|||
"tr": "Ngultrum",
|
||||
"uk": "Нгултрум",
|
||||
"vi": "Ngultrum Bhutan",
|
||||
"zh": "不丹努尔特鲁姆",
|
||||
"zh": "不丹努爾特魯姆",
|
||||
"he": "נגולטורם",
|
||||
"oc": "Ngultrum"
|
||||
},
|
||||
|
@ -10221,6 +10226,7 @@
|
|||
"tr": "Dominik pesosu",
|
||||
"uk": "Домініканський песо",
|
||||
"zh": "多明尼加比索",
|
||||
"cy": "Peso Dominica",
|
||||
"ro": "peso dominican"
|
||||
},
|
||||
"DZD": {
|
||||
|
@ -10412,7 +10418,7 @@
|
|||
"hr": "Euro",
|
||||
"hu": "euró",
|
||||
"ia": "Euro",
|
||||
"it": "Euro",
|
||||
"it": "euro",
|
||||
"ja": "ユーロ",
|
||||
"lt": "Euras",
|
||||
"nl": "euro",
|
||||
|
@ -10430,7 +10436,7 @@
|
|||
"tr": "Euro",
|
||||
"uk": "євро",
|
||||
"vi": "Euro",
|
||||
"zh": "欧元"
|
||||
"zh": "歐元"
|
||||
},
|
||||
"FJD": {
|
||||
"ar": "دولار فيجي",
|
||||
|
@ -10447,7 +10453,7 @@
|
|||
"fr": "dollar de Fidji",
|
||||
"gl": "Dólar fidxiano",
|
||||
"hr": "Fidžijski dolar",
|
||||
"hu": "Fidzsi dollár",
|
||||
"hu": "fidzsi dollár",
|
||||
"it": "dollaro delle Figi",
|
||||
"ja": "フィジー・ドル",
|
||||
"lt": "Fidžio doleris",
|
||||
|
@ -11684,6 +11690,7 @@
|
|||
"uk": "Казахстанський тенге",
|
||||
"vi": "Tenge Kazakhstan",
|
||||
"zh": "哈萨克斯坦坚戈",
|
||||
"cy": "tenge Casachstan",
|
||||
"he": "טנגה",
|
||||
"ia": "tenge kazakh",
|
||||
"oc": "tenge"
|
||||
|
@ -11780,7 +11787,8 @@
|
|||
"uk": "ланкійська рупія",
|
||||
"vi": "Rupee Sri Lanka",
|
||||
"zh": "斯里蘭卡盧比",
|
||||
"cy": "Rupee Sri Lanca"
|
||||
"cy": "Rupee Sri Lanca",
|
||||
"he": "רופי סרי לנקי"
|
||||
},
|
||||
"LRD": {
|
||||
"ar": "دولار ليبيري",
|
||||
|
@ -12397,6 +12405,7 @@
|
|||
},
|
||||
"NGN": {
|
||||
"ar": "نيرة نيجيرية",
|
||||
"bg": "Нигерийска найра",
|
||||
"ca": "naira",
|
||||
"cs": "Nigerijská naira",
|
||||
"de": "Naira",
|
||||
|
@ -13450,7 +13459,7 @@
|
|||
"cs": "Syrská libra",
|
||||
"de": "syrische Lira",
|
||||
"el": "Λίρα Συρίας",
|
||||
"en": "Syrian liyra",
|
||||
"en": "Syrian pound",
|
||||
"eo": "siria pundo",
|
||||
"es": "Dolar sirio",
|
||||
"fa": "لیره سوریه",
|
||||
|
@ -14044,6 +14053,7 @@
|
|||
"fa": "دانگ ویتنام",
|
||||
"fi": "Vietnamin đồng",
|
||||
"fr": "dong",
|
||||
"gl": "Dong vietnamita",
|
||||
"hr": "Vijetnamski dong",
|
||||
"hu": "vietnámi đồng",
|
||||
"it": "Đồng vietnamita",
|
||||
|
@ -14655,6 +14665,7 @@
|
|||
"pt": "Zcash",
|
||||
"ro": "Zcash",
|
||||
"ru": "Zcash",
|
||||
"tr": "Zcash",
|
||||
"uk": "Zcash",
|
||||
"vi": "Zcash",
|
||||
"zh": "Zcash",
|
||||
|
@ -14706,10 +14717,6 @@
|
|||
"en": "Unidad de Valor Real",
|
||||
"es": "Unidad de Valor Real",
|
||||
"fr": "Unidad de Valor Real colombienne"
|
||||
},
|
||||
"XLM": {
|
||||
"de": "Stellar Lumens",
|
||||
"en": "Stellar Lumens"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -26998,7 +26998,7 @@
|
|||
},
|
||||
"lij": {
|
||||
"english_name": "Ligurian",
|
||||
"name": "L\u00edguru"
|
||||
"name": "L\u00ecgure"
|
||||
},
|
||||
"lld": {
|
||||
"english_name": "Ladin",
|
||||
|
@ -27476,6 +27476,10 @@
|
|||
"english_name": "Turkish",
|
||||
"name": "T\u00fcrk\u00e7e"
|
||||
},
|
||||
"trv": {
|
||||
"english_name": "Seediq",
|
||||
"name": "Taroko"
|
||||
},
|
||||
"ts": {
|
||||
"english_name": "Tsonga",
|
||||
"name": "Xitsonga"
|
||||
|
@ -28232,7 +28236,7 @@
|
|||
},
|
||||
"lij": {
|
||||
"english_name": "Ligurian",
|
||||
"name": "L\u00edguru"
|
||||
"name": "L\u00ecgure"
|
||||
},
|
||||
"lld": {
|
||||
"english_name": "Ladin",
|
||||
|
@ -28710,6 +28714,10 @@
|
|||
"english_name": "Turkish",
|
||||
"name": "T\u00fcrk\u00e7e"
|
||||
},
|
||||
"trv": {
|
||||
"english_name": "Seediq",
|
||||
"name": "Taroko"
|
||||
},
|
||||
"ts": {
|
||||
"english_name": "Tsonga",
|
||||
"name": "Xitsonga"
|
||||
|
@ -28876,39 +28884,5 @@
|
|||
"tr",
|
||||
"zh-CHS",
|
||||
"zh-CHT"
|
||||
],
|
||||
"yahoo news": [
|
||||
"ar",
|
||||
"bg",
|
||||
"cs",
|
||||
"da",
|
||||
"de",
|
||||
"el",
|
||||
"en",
|
||||
"es",
|
||||
"et",
|
||||
"fi",
|
||||
"fr",
|
||||
"he",
|
||||
"hr",
|
||||
"hu",
|
||||
"it",
|
||||
"ja",
|
||||
"ko",
|
||||
"lt",
|
||||
"lv",
|
||||
"nl",
|
||||
"no",
|
||||
"pl",
|
||||
"pt",
|
||||
"ro",
|
||||
"ru",
|
||||
"sk",
|
||||
"sl",
|
||||
"sv",
|
||||
"th",
|
||||
"tr",
|
||||
"zh-CHS",
|
||||
"zh-CHT"
|
||||
]
|
||||
}
|
|
@ -1,9 +1,8 @@
|
|||
{
|
||||
"versions": [
|
||||
"86.0",
|
||||
"85.0.2",
|
||||
"85.0.1",
|
||||
"85.0"
|
||||
"87.0",
|
||||
"86.0.1",
|
||||
"86.0"
|
||||
],
|
||||
"os": [
|
||||
"Windows NT 10.0; WOW64",
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
"Q100149279": "°We",
|
||||
"Q100995": "lb",
|
||||
"Q101194838": "GHz/V",
|
||||
"Q101427873": "pk (US)",
|
||||
"Q101427917": "pk (UK)",
|
||||
"Q101463141": "ym²",
|
||||
"Q101463237": "zm²",
|
||||
"Q101463321": "am²",
|
||||
|
@ -81,6 +83,8 @@
|
|||
"Q105519288": "B SPL",
|
||||
"Q105687125": "eV⁻¹ m⁻³",
|
||||
"Q1057069": "hg",
|
||||
"Q105761866": "mV/K",
|
||||
"Q105840138": "BU",
|
||||
"Q1063756": "rad/s",
|
||||
"Q1063786": "in²",
|
||||
"Q1065153": "mrad",
|
||||
|
@ -445,7 +449,6 @@
|
|||
"Q4041686": "in H20",
|
||||
"Q4068266": "Ʒ",
|
||||
"Q4176683": "aC",
|
||||
"Q420266": "fl oz",
|
||||
"Q42319606": "people/m²",
|
||||
"Q4243638": "km³",
|
||||
"Q4456994": "mF",
|
||||
|
@ -458,6 +461,7 @@
|
|||
"Q4861171": "H",
|
||||
"Q494083": "fur",
|
||||
"Q4989854": "kJ",
|
||||
"Q4992853": "kt",
|
||||
"Q500515": "Gal",
|
||||
"Q5042194": "£",
|
||||
"Q50808017": "kg m²",
|
||||
|
@ -726,6 +730,7 @@
|
|||
"Q87262709": "kΩ",
|
||||
"Q87416053": "MΩ",
|
||||
"Q88296091": "tsp",
|
||||
"Q89187604": "bbl (US)",
|
||||
"Q89473028": "bu (UK)",
|
||||
"Q89662131": "pt (UK)",
|
||||
"Q901492": "ph",
|
||||
|
@ -1042,6 +1047,7 @@
|
|||
"Q97143843": "z°C",
|
||||
"Q97143849": "Y°C",
|
||||
"Q97143851": "a°C",
|
||||
"Q97496530": "eV s",
|
||||
"Q98492214": "den",
|
||||
"Q98538634": "eV/m²",
|
||||
"Q98635536": "eV/m",
|
||||
|
@ -1103,5 +1109,6 @@
|
|||
"Q573": "d",
|
||||
"Q577": "a",
|
||||
"Q7727": "min",
|
||||
"Q8799": "B"
|
||||
"Q8799": "B",
|
||||
"Q8805": "bit"
|
||||
}
|
|
@ -0,0 +1,73 @@
|
|||
"""
|
||||
Bandcamp (Music)
|
||||
|
||||
@website https://bandcamp.com/
|
||||
@provide-api no
|
||||
@results HTML
|
||||
@parse url, title, content, publishedDate, embedded, thumbnail
|
||||
"""
|
||||
|
||||
from urllib.parse import urlencode, urlparse, parse_qs
|
||||
from dateutil.parser import parse as dateparse
|
||||
from lxml import html
|
||||
from searx.utils import extract_text
|
||||
|
||||
categories = ['music']
|
||||
paging = True
|
||||
|
||||
base_url = "https://bandcamp.com/"
|
||||
search_string = search_string = 'search?{query}&page={page}'
|
||||
embedded_url = '''<iframe width="100%" height="166"
|
||||
scrolling="no" frameborder="no"
|
||||
data-src="https://bandcamp.com/EmbeddedPlayer/{type}={result_id}/size=large/bgcol=ffffff/linkcol=0687f5/tracklist=false/artwork=small/transparent=true/"
|
||||
></iframe>'''
|
||||
|
||||
|
||||
def request(query, params):
|
||||
'''pre-request callback
|
||||
params<dict>:
|
||||
method : POST/GET
|
||||
headers : {}
|
||||
data : {} # if method == POST
|
||||
url : ''
|
||||
category: 'search category'
|
||||
pageno : 1 # number of the requested page
|
||||
'''
|
||||
|
||||
search_path = search_string.format(
|
||||
query=urlencode({'q': query}),
|
||||
page=params['pageno'])
|
||||
|
||||
params['url'] = base_url + search_path
|
||||
|
||||
return params
|
||||
|
||||
|
||||
def response(resp):
|
||||
'''post-response callback
|
||||
resp: requests response object
|
||||
'''
|
||||
results = []
|
||||
tree = html.fromstring(resp.text)
|
||||
search_results = tree.xpath('//li[contains(@class, "searchresult")]')
|
||||
for result in search_results:
|
||||
link = result.xpath('.//div[@class="itemurl"]/a')[0]
|
||||
result_id = parse_qs(urlparse(link.get('href')).query)["search_item_id"][0]
|
||||
title = result.xpath('.//div[@class="heading"]/a/text()')
|
||||
date = dateparse(result.xpath('//div[@class="released"]/text()')[0].replace("released ", ""))
|
||||
content = result.xpath('.//div[@class="subhead"]/text()')
|
||||
new_result = {
|
||||
"url": extract_text(link),
|
||||
"title": extract_text(title),
|
||||
"content": extract_text(content),
|
||||
"publishedDate": date,
|
||||
}
|
||||
thumbnail = result.xpath('.//div[@class="art"]/img/@src')
|
||||
if thumbnail:
|
||||
new_result['thumbnail'] = thumbnail[0]
|
||||
if "album" in result.classes:
|
||||
new_result["embedded"] = embedded_url.format(type='album', result_id=result_id)
|
||||
elif "track" in result.classes:
|
||||
new_result["embedded"] = embedded_url.format(type='track', result_id=result_id)
|
||||
results.append(new_result)
|
||||
return results
|
|
@ -243,7 +243,7 @@ def response(resp):
|
|||
if answer:
|
||||
results.append({'answer': ' '.join(answer)})
|
||||
else:
|
||||
logger.debug("did not found 'answer'")
|
||||
logger.debug("did not find 'answer'")
|
||||
|
||||
# results --> number_of_results
|
||||
try:
|
||||
|
|
|
@ -0,0 +1,59 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""
|
||||
Meilisearch
|
||||
"""
|
||||
|
||||
# pylint: disable=global-statement, missing-function-docstring
|
||||
|
||||
from json import loads, dumps
|
||||
|
||||
|
||||
base_url = 'http://localhost:7700'
|
||||
index = ''
|
||||
auth_key = ''
|
||||
facet_filters = list()
|
||||
_search_url = ''
|
||||
result_template = 'key-value.html'
|
||||
categories = ['general']
|
||||
paging = True
|
||||
|
||||
|
||||
def init(_):
|
||||
if index == '':
|
||||
raise ValueError('index cannot be empty')
|
||||
|
||||
global _search_url
|
||||
_search_url = base_url + '/indexes/' + index + '/search'
|
||||
|
||||
|
||||
def request(query, params):
|
||||
if auth_key != '':
|
||||
params['headers']['X-Meili-API-Key'] = auth_key
|
||||
|
||||
params['headers']['Content-Type'] = 'application/json'
|
||||
params['url'] = _search_url
|
||||
params['method'] = 'POST'
|
||||
|
||||
data = {
|
||||
'q': query,
|
||||
'offset': 10 * (params['pageno'] - 1),
|
||||
'limit': 10,
|
||||
}
|
||||
if len(facet_filters) > 0:
|
||||
data['facetFilters'] = facet_filters
|
||||
|
||||
params['data'] = dumps(data)
|
||||
|
||||
return params
|
||||
|
||||
|
||||
def response(resp):
|
||||
results = []
|
||||
|
||||
resp_json = loads(resp.text)
|
||||
for result in resp_json['hits']:
|
||||
r = {key: str(value) for key, value in result.items()}
|
||||
r['template'] = result_template
|
||||
results.append(r)
|
||||
|
||||
return results
|
|
@ -0,0 +1,60 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""
|
||||
MySQL database (Offline)
|
||||
"""
|
||||
|
||||
# error is ignored because the admin has to
|
||||
# install it manually to use the engine
|
||||
# pylint: disable=import-error
|
||||
|
||||
import mysql.connector
|
||||
|
||||
|
||||
engine_type = 'offline'
|
||||
auth_plugin = 'caching_sha2_password'
|
||||
host = "127.0.0.1"
|
||||
database = ""
|
||||
username = ""
|
||||
password = ""
|
||||
query_str = ""
|
||||
limit = 10
|
||||
paging = True
|
||||
result_template = 'key-value.html'
|
||||
_connection = None
|
||||
|
||||
|
||||
def init(engine_settings):
|
||||
if 'query_str' not in engine_settings:
|
||||
raise ValueError('query_str cannot be empty')
|
||||
|
||||
if not engine_settings['query_str'].lower().startswith('select '):
|
||||
raise ValueError('only SELECT query is supported')
|
||||
|
||||
global _connection
|
||||
_connection = mysql.connector.connect(
|
||||
database=database,
|
||||
user=username,
|
||||
password=password,
|
||||
host=host,
|
||||
auth_plugin=auth_plugin,
|
||||
)
|
||||
|
||||
|
||||
def search(query, params):
|
||||
query_params = {'query': query}
|
||||
query_to_run = query_str + ' LIMIT {0} OFFSET {1}'.format(limit, (params['pageno'] - 1) * limit)
|
||||
|
||||
with _connection.cursor() as cur:
|
||||
cur.execute(query_to_run, query_params)
|
||||
|
||||
return _fetch_results(cur)
|
||||
|
||||
|
||||
def _fetch_results(cur):
|
||||
results = []
|
||||
for res in cur:
|
||||
result = dict(zip(cur.column_names, map(str, res)))
|
||||
result['template'] = result_template
|
||||
results.append(result)
|
||||
|
||||
return results
|
|
@ -0,0 +1,69 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""
|
||||
PostgreSQL database (Offline)
|
||||
"""
|
||||
|
||||
# error is ignored because the admin has to
|
||||
# install it manually to use the engine
|
||||
# pylint: disable=import-error
|
||||
|
||||
import psycopg2
|
||||
|
||||
engine_type = 'offline'
|
||||
host = "127.0.0.1"
|
||||
port = "5432"
|
||||
database = ""
|
||||
username = ""
|
||||
password = ""
|
||||
query_str = ""
|
||||
limit = 10
|
||||
paging = True
|
||||
result_template = 'key-value.html'
|
||||
_connection = None
|
||||
|
||||
|
||||
def init(engine_settings):
|
||||
if 'query_str' not in engine_settings:
|
||||
raise ValueError('query_str cannot be empty')
|
||||
|
||||
if not engine_settings['query_str'].lower().startswith('select '):
|
||||
raise ValueError('only SELECT query is supported')
|
||||
|
||||
global _connection
|
||||
_connection = psycopg2.connect(
|
||||
database=database,
|
||||
user=username,
|
||||
password=password,
|
||||
host=host,
|
||||
port=port,
|
||||
)
|
||||
|
||||
|
||||
def search(query, params):
|
||||
query_params = {'query': query}
|
||||
query_to_run = query_str + ' LIMIT {0} OFFSET {1}'.format(limit, (params['pageno'] - 1) * limit)
|
||||
|
||||
with _connection:
|
||||
with _connection.cursor() as cur:
|
||||
cur.execute(query_to_run, query_params)
|
||||
|
||||
return _fetch_results(cur)
|
||||
|
||||
|
||||
def _fetch_results(cur):
|
||||
results = []
|
||||
titles = []
|
||||
|
||||
try:
|
||||
titles = [column_desc.name for column_desc in cur.description]
|
||||
|
||||
for res in cur:
|
||||
result = dict(zip(titles, map(str, res)))
|
||||
result['template'] = result_template
|
||||
results.append(result)
|
||||
|
||||
# no results to fetch
|
||||
except psycopg2.ProgrammingError:
|
||||
pass
|
||||
|
||||
return results
|
|
@ -0,0 +1,92 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Słownik Języka Polskiego (general)
|
||||
|
||||
"""
|
||||
|
||||
from lxml.html import fromstring
|
||||
from searx import logger
|
||||
from searx.utils import extract_text
|
||||
from searx.raise_for_httperror import raise_for_httperror
|
||||
|
||||
logger = logger.getChild('sjp engine')
|
||||
|
||||
# about
|
||||
about = {
|
||||
"website": 'https://sjp.pwn.pl',
|
||||
"wikidata_id": 'Q55117369',
|
||||
"official_api_documentation": None,
|
||||
"use_official_api": False,
|
||||
"require_api_key": False,
|
||||
"results": 'HTML',
|
||||
}
|
||||
|
||||
categories = ['general']
|
||||
paging = False
|
||||
|
||||
URL = 'https://sjp.pwn.pl'
|
||||
SEARCH_URL = URL + '/szukaj/{query}.html'
|
||||
|
||||
word_xpath = '//div[@class="query"]'
|
||||
dict_xpath = ['//div[@class="wyniki sjp-so-wyniki sjp-so-anchor"]',
|
||||
'//div[@class="wyniki sjp-wyniki sjp-anchor"]',
|
||||
'//div[@class="wyniki sjp-doroszewski-wyniki sjp-doroszewski-anchor"]']
|
||||
|
||||
|
||||
def request(query, params):
|
||||
params['url'] = SEARCH_URL.format(query=query)
|
||||
logger.debug(f"query_url --> {params['url']}")
|
||||
return params
|
||||
|
||||
|
||||
def response(resp):
|
||||
results = []
|
||||
|
||||
raise_for_httperror(resp)
|
||||
dom = fromstring(resp.text)
|
||||
word = extract_text(dom.xpath(word_xpath))
|
||||
|
||||
definitions = []
|
||||
|
||||
for dict_src in dict_xpath:
|
||||
for src in dom.xpath(dict_src):
|
||||
src_text = extract_text(src.xpath('.//span[@class="entry-head-title"]/text()')).strip()
|
||||
|
||||
src_defs = []
|
||||
for def_item in src.xpath('.//div[contains(@class, "ribbon-element")]'):
|
||||
if def_item.xpath('./div[@class="znacz"]'):
|
||||
sub_defs = []
|
||||
for def_sub_item in def_item.xpath('./div[@class="znacz"]'):
|
||||
def_sub_text = extract_text(def_sub_item).lstrip('0123456789. ')
|
||||
sub_defs.append(def_sub_text)
|
||||
src_defs.append((word, sub_defs))
|
||||
else:
|
||||
def_text = extract_text(def_item).strip()
|
||||
def_link = def_item.xpath('./span/a/@href')
|
||||
if 'doroszewski' in def_link[0]:
|
||||
def_text = f"<a href='{def_link[0]}'>{def_text}</a>"
|
||||
src_defs.append((def_text, ''))
|
||||
|
||||
definitions.append((src_text, src_defs))
|
||||
|
||||
if not definitions:
|
||||
return results
|
||||
|
||||
infobox = ''
|
||||
for src in definitions:
|
||||
infobox += f"<div><small>{src[0]}</small>"
|
||||
infobox += "<ul>"
|
||||
for (def_text, sub_def) in src[1]:
|
||||
infobox += f"<li>{def_text}</li>"
|
||||
if sub_def:
|
||||
infobox += "<ol>"
|
||||
for sub_def_text in sub_def:
|
||||
infobox += f"<li>{sub_def_text}</li>"
|
||||
infobox += "</ol>"
|
||||
infobox += "</ul></div>"
|
||||
|
||||
results.append({
|
||||
'infobox': word,
|
||||
'content': infobox,
|
||||
})
|
||||
|
||||
return results
|
|
@ -45,7 +45,7 @@ def response(resp):
|
|||
'seed': result["swarm"]["seeders"],
|
||||
'leech': result["swarm"]["leechers"],
|
||||
'title': result["title"],
|
||||
'link': "https://solidtorrents.net/view/" + result["_id"],
|
||||
'url': "https://solidtorrents.net/view/" + result["_id"],
|
||||
'filesize': result["size"],
|
||||
'magnetlink': result["magnet"],
|
||||
'template': "torrent.html",
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Wordnik (general)
|
||||
|
||||
"""
|
||||
|
||||
from lxml.html import fromstring
|
||||
from searx import logger
|
||||
from searx.utils import extract_text
|
||||
from searx.raise_for_httperror import raise_for_httperror
|
||||
|
||||
logger = logger.getChild('Wordnik engine')
|
||||
|
||||
# about
|
||||
about = {
|
||||
"website": 'https://www.wordnik.com',
|
||||
"wikidata_id": 'Q8034401',
|
||||
"official_api_documentation": None,
|
||||
"use_official_api": False,
|
||||
"require_api_key": False,
|
||||
"results": 'HTML',
|
||||
}
|
||||
|
||||
categories = ['general']
|
||||
paging = False
|
||||
|
||||
URL = 'https://www.wordnik.com'
|
||||
SEARCH_URL = URL + '/words/{query}'
|
||||
|
||||
|
||||
def request(query, params):
|
||||
params['url'] = SEARCH_URL.format(query=query)
|
||||
logger.debug(f"query_url --> {params['url']}")
|
||||
return params
|
||||
|
||||
|
||||
def response(resp):
|
||||
results = []
|
||||
|
||||
raise_for_httperror(resp)
|
||||
dom = fromstring(resp.text)
|
||||
word = extract_text(dom.xpath('//*[@id="headword"]/text()'))
|
||||
|
||||
definitions = []
|
||||
for src in dom.xpath('//*[@id="define"]//h3[@class="source"]'):
|
||||
src_text = extract_text(src).strip()
|
||||
if src_text.startswith('from '):
|
||||
src_text = src_text[5:]
|
||||
|
||||
src_defs = []
|
||||
for def_item in src.xpath('following-sibling::ul[1]/li'):
|
||||
def_abbr = extract_text(def_item.xpath('.//abbr')).strip()
|
||||
def_text = extract_text(def_item).strip()
|
||||
if def_abbr:
|
||||
def_text = def_text[len(def_abbr):].strip()
|
||||
src_defs.append((def_abbr, def_text))
|
||||
|
||||
definitions.append((src_text, src_defs))
|
||||
|
||||
if not definitions:
|
||||
return results
|
||||
|
||||
infobox = ''
|
||||
for src_text, src_defs in definitions:
|
||||
infobox += f"<small>{src_text}</small>"
|
||||
infobox += "<ul>"
|
||||
for def_abbr, def_text in src_defs:
|
||||
if def_abbr:
|
||||
def_abbr += ": "
|
||||
infobox += f"<li><i>{def_abbr}</i> {def_text}</li>"
|
||||
infobox += "</ul>"
|
||||
|
||||
results.append({
|
||||
'infobox': word,
|
||||
'content': infobox,
|
||||
})
|
||||
|
||||
return results
|
|
@ -3,6 +3,7 @@
|
|||
Youtube (Videos)
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from functools import reduce
|
||||
from json import loads, dumps
|
||||
from urllib.parse import quote_plus
|
||||
|
@ -56,6 +57,7 @@ def request(query, params):
|
|||
})
|
||||
params['headers']['Content-Type'] = 'application/json'
|
||||
|
||||
params['headers']['Cookie'] = "CONSENT=YES+cb.%s-17-p0.en+F+941;" % datetime.now().strftime("%Y%m%d")
|
||||
return params
|
||||
|
||||
|
||||
|
|
|
@ -32,6 +32,7 @@ from searx.plugins import (oa_doi_rewrite,
|
|||
https_rewrite,
|
||||
infinite_scroll,
|
||||
self_info,
|
||||
hostname_replace,
|
||||
search_on_category_select,
|
||||
tracker_url_remover,
|
||||
vim_hotkeys)
|
||||
|
@ -168,6 +169,7 @@ plugins.register(hash_plugin)
|
|||
plugins.register(https_rewrite)
|
||||
plugins.register(infinite_scroll)
|
||||
plugins.register(self_info)
|
||||
plugins.register(hostname_replace)
|
||||
plugins.register(search_on_category_select)
|
||||
plugins.register(tracker_url_remover)
|
||||
plugins.register(vim_hotkeys)
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
import re
|
||||
from urllib.parse import urlunparse
|
||||
from searx import settings
|
||||
from searx.plugins import logger
|
||||
from flask_babel import gettext
|
||||
|
||||
name = gettext('Hostname replace')
|
||||
description = gettext('Rewrite result hostnames')
|
||||
default_on = False
|
||||
preference_section = 'general'
|
||||
|
||||
plugin_id = 'hostname_replace'
|
||||
parsed = 'parsed_url'
|
||||
|
||||
replacements = {re.compile(p): r for (p, r) in settings[plugin_id].items()} if plugin_id in settings else {}
|
||||
|
||||
logger = logger.getChild(plugin_id)
|
||||
|
||||
|
||||
def on_result(request, search, result):
|
||||
if parsed not in result:
|
||||
return True
|
||||
for (pattern, replacement) in replacements.items():
|
||||
if pattern.search(result[parsed].netloc):
|
||||
result[parsed] = result[parsed]._replace(netloc=pattern.sub(replacement, result[parsed].netloc))
|
||||
result['url'] = urlunparse(result[parsed])
|
||||
|
||||
return True
|
|
@ -29,7 +29,7 @@ def get_doi_resolver(args, preference_doi_resolver):
|
|||
doi_resolvers = settings['doi_resolvers']
|
||||
doi_resolver = args.get('doi_resolver', preference_doi_resolver)[0]
|
||||
if doi_resolver not in doi_resolvers:
|
||||
doi_resolvers = settings['default_doi_resolver']
|
||||
doi_resolver = settings['default_doi_resolver']
|
||||
doi_resolver_url = doi_resolvers[doi_resolver]
|
||||
return doi_resolver_url
|
||||
|
||||
|
@ -40,7 +40,7 @@ def on_result(request, search, result):
|
|||
|
||||
doi = extract_doi(result['parsed_url'])
|
||||
if doi and len(doi) < 50:
|
||||
for suffix in ('/', '.pdf', '/full', '/meta', '/abstract'):
|
||||
for suffix in ('/', '.pdf', '.xml', '/full', '/meta', '/abstract'):
|
||||
if doi.endswith(suffix):
|
||||
doi = doi[:-len(suffix)]
|
||||
result['url'] = get_doi_resolver(request.args, request.preferences.get_value('doi_resolver')) + doi
|
||||
|
|
|
@ -21,6 +21,7 @@ from urllib.parse import urlunparse, parse_qsl, urlencode
|
|||
|
||||
regexes = {re.compile(r'utm_[^&]+'),
|
||||
re.compile(r'(wkey|wemail)[^&]*'),
|
||||
re.compile(r'(_hsenc|_hsmi|hsCtaTracking|__hssc|__hstc|__hsfp)[^&]*'),
|
||||
re.compile(r'&$')}
|
||||
|
||||
name = gettext('Tracker URL remover')
|
||||
|
|
|
@ -387,7 +387,7 @@ class Preferences:
|
|||
}
|
||||
),
|
||||
'doi_resolver': MultipleChoiceSetting(
|
||||
['oadoi.org'],
|
||||
[settings['default_doi_resolver'], ],
|
||||
is_locked('doi_resolver'),
|
||||
choices=DOI_RESOLVERS
|
||||
),
|
||||
|
|
|
@ -104,6 +104,18 @@ outgoing: # communication with search engines
|
|||
# - "HTTPS rewrite"
|
||||
# - ...
|
||||
|
||||
# Example to rewrite hostnames in external links
|
||||
#
|
||||
# enabled_plugins:
|
||||
# - 'Hostname replace'
|
||||
# hostname_replace:
|
||||
# '(.*\.)?youtube\.com$': 'invidious.example.com'
|
||||
# '(.*\.)?youtu\.be$': 'invidious.example.com'
|
||||
# '(.*\.)?youtube-noocookie\.com$': 'yotter.example.com'
|
||||
# '(.*\.)?reddit\.com$': 'teddit.example.com'
|
||||
# '(.*\.)?redd\.it$': 'teddit.example.com'
|
||||
# '(www\.)?twitter\.com$': 'nitter.example.com'
|
||||
|
||||
checker:
|
||||
# disable checker when in debug mode
|
||||
off_when_debug: True
|
||||
|
@ -197,6 +209,11 @@ engines:
|
|||
# engine : base
|
||||
# shortcut : bs
|
||||
|
||||
- name: bandcamp
|
||||
engine: bandcamp
|
||||
shortcut: bc
|
||||
categories: music
|
||||
|
||||
- name : wikipedia
|
||||
engine : wikipedia
|
||||
shortcut : wp
|
||||
|
@ -700,6 +717,13 @@ engines:
|
|||
require_api_key: false
|
||||
results: HTML
|
||||
|
||||
# - name : meilisearch
|
||||
# engine : meilisearch
|
||||
# shortcut: mes
|
||||
# enable_http: True
|
||||
# base_url : http://localhost:7700
|
||||
# index : my-index
|
||||
|
||||
- name : microsoft academic
|
||||
engine : microsoft_academic
|
||||
categories : science
|
||||
|
@ -828,6 +852,16 @@ engines:
|
|||
url: https://thepiratebay.org/
|
||||
timeout : 3.0
|
||||
|
||||
# Required dependency: psychopg2
|
||||
# - name : postgresql
|
||||
# engine : postgresql
|
||||
# database : postgres
|
||||
# username : postgres
|
||||
# password : postgres
|
||||
# limit : 10
|
||||
# query_str : 'SELECT * from my_table WHERE my_column = %(query)s'
|
||||
# shortcut : psql
|
||||
|
||||
- name : pubmed
|
||||
engine : pubmed
|
||||
shortcut : pub
|
||||
|
@ -1160,6 +1194,16 @@ engines:
|
|||
# See : http://mymemory.translated.net/doc/usagelimits.php
|
||||
# api_key : ''
|
||||
|
||||
# Required dependency: mysql-connector-python
|
||||
# - name : mysql
|
||||
# engine : mysql_server
|
||||
# database : mydatabase
|
||||
# username : user
|
||||
# password : pass
|
||||
# limit : 10
|
||||
# query_str : 'SELECT * from mytable WHERE fieldname=%(query)s'
|
||||
# shortcut : mysql
|
||||
|
||||
- name : 1337x
|
||||
engine : 1337x
|
||||
shortcut : 1337x
|
||||
|
@ -1264,6 +1308,22 @@ engines:
|
|||
categories: videos
|
||||
disabled : True
|
||||
|
||||
- name: wordnik
|
||||
engine: wordnik
|
||||
shortcut: def
|
||||
base_url: https://www.wordnik.com/
|
||||
categories: general
|
||||
timeout: 5.0
|
||||
disabled: True
|
||||
|
||||
- name: słownik języka polskiego
|
||||
engine: sjp
|
||||
shortcut: sjp
|
||||
base_url: https://sjp.pwn.pl/
|
||||
categories: general
|
||||
timeout: 5.0
|
||||
disabled: True
|
||||
|
||||
# Doku engine lets you access to any Doku wiki instance:
|
||||
# A public one or a privete/corporate one.
|
||||
# - name : ubuntuwiki
|
||||
|
@ -1400,5 +1460,4 @@ doi_resolvers :
|
|||
sci-hub.bar : 'https://sci-hub.bar/'
|
||||
sci-hub.it.nf : 'https://sci-hub.it.nf/'
|
||||
|
||||
|
||||
default_doi_resolver : 'sci-hub.do'
|
||||
|
|
|
@ -57,7 +57,7 @@ def update_settings(default_settings, user_settings):
|
|||
# merge everything except the engines
|
||||
for k, v in user_settings.items():
|
||||
if k not in ('use_default_settings', 'engines'):
|
||||
if k in default_settings:
|
||||
if k in default_settings and isinstance(v, Mapping):
|
||||
update_dict(default_settings[k], v)
|
||||
else:
|
||||
default_settings[k] = v
|
||||
|
|
|
@ -3,21 +3,21 @@
|
|||
* Google Image Layout v0.0.1
|
||||
* Description, by Anh Trinh.
|
||||
* Heavily modified for searx
|
||||
* http://trinhtrunganh.com
|
||||
* https://ptgamr.github.io/2014-09-12-google-image-layout/
|
||||
* https://ptgamr.github.io/google-image-layout/src/google-image-layout.js
|
||||
*
|
||||
* @license Free to use under the MIT License.
|
||||
*
|
||||
*/
|
||||
(function(w, d) {
|
||||
'use strict';
|
||||
|
||||
function ImageLayout(container_selector, results_selector, img_selector, maxHeight) {
|
||||
|
||||
(function (w, d) {
|
||||
function ImageLayout(container_selector, results_selector, img_selector, margin, maxHeight) {
|
||||
this.container_selector = container_selector;
|
||||
this.results_selector = results_selector;
|
||||
this.img_selector = img_selector;
|
||||
this.margin = 10;
|
||||
this.margin = margin;
|
||||
this.maxHeight = maxHeight;
|
||||
this._alignAllDone = true;
|
||||
this.isAlignDone = true;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -31,12 +31,11 @@
|
|||
*
|
||||
* @return {[type]} the height
|
||||
*/
|
||||
ImageLayout.prototype._getHeigth = function(images, width) {
|
||||
var r = 0,
|
||||
img;
|
||||
ImageLayout.prototype._getHeigth = function (images, width) {
|
||||
var i, img;
|
||||
var r = 0;
|
||||
|
||||
width -= images.length * this.margin;
|
||||
for (var i = 0; i < images.length; i++) {
|
||||
for (i = 0; i < images.length; i++) {
|
||||
img = images[i];
|
||||
if ((img.naturalWidth > 0) && (img.naturalHeight > 0)) {
|
||||
r += img.naturalWidth / img.naturalHeight;
|
||||
|
@ -46,12 +45,14 @@
|
|||
}
|
||||
}
|
||||
|
||||
return width / r; //have to round down because Firefox will automatically roundup value with number of decimals > 3
|
||||
return (width - images.length * this.margin) / r; //have to round down because Firefox will automatically roundup value with number of decimals > 3
|
||||
};
|
||||
|
||||
ImageLayout.prototype._setSize = function(images, height) {
|
||||
var img, imgWidth, imagesLength = images.length;
|
||||
for (var i = 0; i < imagesLength; i++) {
|
||||
ImageLayout.prototype._setSize = function (images, height) {
|
||||
var i, img, imgWidth;
|
||||
var imagesLength = images.length, resultNode;
|
||||
|
||||
for (i = 0; i < imagesLength; i++) {
|
||||
img = images[i];
|
||||
if ((img.naturalWidth > 0) && (img.naturalHeight > 0)) {
|
||||
imgWidth = height * img.naturalWidth / img.naturalHeight;
|
||||
|
@ -65,38 +66,52 @@
|
|||
img.style.marginTop = '3px';
|
||||
img.style.marginRight = this.margin - 7 + 'px'; // -4 is the negative margin of the inline element
|
||||
img.style.marginBottom = this.margin - 7 + 'px';
|
||||
resultNode = img.parentNode.parentNode;
|
||||
if (!resultNode.classList.contains('js')) {
|
||||
resultNode.classList.add('js');
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
ImageLayout.prototype._alignImgs = function(imgGroup) {
|
||||
var slice, h,
|
||||
containerWidth = d.querySelector(this.container_selector).clientWidth;
|
||||
ImageLayout.prototype._alignImgs = function (imgGroup) {
|
||||
var isSearching, slice, i, h;
|
||||
var containerElement = d.querySelector(this.container_selector);
|
||||
var containerCompStyles = window.getComputedStyle(containerElement);
|
||||
var containerPaddingLeft = parseInt(containerCompStyles.getPropertyValue('padding-left'), 10);
|
||||
var containerPaddingRight = parseInt(containerCompStyles.getPropertyValue('padding-right'), 10);
|
||||
var containerWidth = containerElement.clientWidth - containerPaddingLeft - containerPaddingRight;
|
||||
|
||||
w: while (imgGroup.length > 0) {
|
||||
for (var i = 1; i <= imgGroup.length; i++) {
|
||||
while (imgGroup.length > 0) {
|
||||
isSearching = true;
|
||||
for (i = 1; i <= imgGroup.length && isSearching; i++) {
|
||||
slice = imgGroup.slice(0, i);
|
||||
h = this._getHeigth(slice, containerWidth);
|
||||
if (h < this.maxHeight) {
|
||||
this._setSize(slice, h);
|
||||
// continue with the remaining images
|
||||
imgGroup = imgGroup.slice(i);
|
||||
continue w;
|
||||
isSearching = false;
|
||||
}
|
||||
}
|
||||
this._setSize(slice, Math.min(this.maxHeight, h));
|
||||
break;
|
||||
if (isSearching) {
|
||||
this._setSize(slice, Math.min(this.maxHeight, h));
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
ImageLayout.prototype.align = function(results_selector) {
|
||||
var results_selectorNode = d.querySelectorAll(this.results_selector),
|
||||
results_length = results_selectorNode.length,
|
||||
previous = null,
|
||||
current = null,
|
||||
imgGroup = [];
|
||||
for (var i = 0; i < results_length; i++) {
|
||||
ImageLayout.prototype.align = function () {
|
||||
var i;
|
||||
var results_selectorNode = d.querySelectorAll(this.results_selector);
|
||||
var results_length = results_selectorNode.length;
|
||||
var previous = null;
|
||||
var current = null;
|
||||
var imgGroup = [];
|
||||
|
||||
for (i = 0; i < results_length; i++) {
|
||||
current = results_selectorNode[i];
|
||||
if (current.previousElementSibling !== previous && imgGroup.length > 0) {
|
||||
// the current image is not conected to previous one
|
||||
// the current image is not connected to previous one
|
||||
// so the current image is the start of a new group of images.
|
||||
// so call _alignImgs to align the current group
|
||||
this._alignImgs(imgGroup);
|
||||
|
@ -114,32 +129,29 @@
|
|||
}
|
||||
};
|
||||
|
||||
ImageLayout.prototype.watch = function() {
|
||||
var i, img, imgGroup, imgNodeLength,
|
||||
obj = this,
|
||||
results_nodes = d.querySelectorAll(this.results_selector),
|
||||
results_length = results_nodes.length;
|
||||
ImageLayout.prototype.watch = function () {
|
||||
var i, img;
|
||||
var obj = this;
|
||||
var results_nodes = d.querySelectorAll(this.results_selector);
|
||||
var results_length = results_nodes.length;
|
||||
|
||||
function align(e) {
|
||||
obj.align();
|
||||
}
|
||||
|
||||
function throttleAlign(e) {
|
||||
if (obj._alignAllDone) {
|
||||
obj._alignAllDone = false;
|
||||
setTimeout(function() {
|
||||
function throttleAlign() {
|
||||
if (obj.isAlignDone) {
|
||||
obj.isAlignDone = false;
|
||||
setTimeout(function () {
|
||||
obj.align();
|
||||
obj._alignAllDone = true;
|
||||
obj.isAlignDone = true;
|
||||
}, 100);
|
||||
}
|
||||
}
|
||||
|
||||
w.addEventListener('pageshow', throttleAlign);
|
||||
w.addEventListener('load', throttleAlign);
|
||||
w.addEventListener('resize', throttleAlign);
|
||||
w.addEventListener('pageshow', align);
|
||||
|
||||
for (i = 0; i < results_length; i++) {
|
||||
img = results_nodes[i].querySelector(this.img_selector);
|
||||
if (typeof img !== 'undefined') {
|
||||
if (img !== null && img !== undefined) {
|
||||
img.addEventListener('load', throttleAlign);
|
||||
img.addEventListener('error', throttleAlign);
|
||||
}
|
||||
|
@ -148,4 +160,4 @@
|
|||
|
||||
w.searx.ImageLayout = ImageLayout;
|
||||
|
||||
})(window, document);
|
||||
}(window, document));
|
|
@ -202,14 +202,39 @@ input[type=checkbox]:not(:checked) + .label_hide_if_checked + .label_hide_if_not
|
|||
}
|
||||
.result-images {
|
||||
float: left !important;
|
||||
width: 24%;
|
||||
margin: 0.5%;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
.result-images a {
|
||||
display: block;
|
||||
width: 100%;
|
||||
background-size: cover;
|
||||
}
|
||||
.result-images a .img-thumbnail {
|
||||
border: none !important;
|
||||
padding: 0;
|
||||
}
|
||||
.result-images a:hover,
|
||||
.result-images a:focus {
|
||||
outline: 0;
|
||||
}
|
||||
.result-images a:hover .img-thumbnail,
|
||||
.result-images a:focus .img-thumbnail {
|
||||
box-shadow: 5px 5px 15px 0px black;
|
||||
}
|
||||
.result-images.js a .img-thumbnail {
|
||||
max-height: inherit;
|
||||
min-height: inherit;
|
||||
}
|
||||
.result-images:not(.js) {
|
||||
width: 25%;
|
||||
padding: 3px 13px 13px 3px;
|
||||
}
|
||||
.result-images:not(.js) a .img-thumbnail {
|
||||
margin: 0;
|
||||
max-height: 128px;
|
||||
min-height: 128px;
|
||||
}
|
||||
.img-thumbnail {
|
||||
margin: 5px;
|
||||
max-height: 128px;
|
||||
|
@ -341,6 +366,10 @@ input[type=checkbox]:not(:checked) + .label_hide_if_checked + .label_hide_if_not
|
|||
width: 100%;
|
||||
text-align: center;
|
||||
margin-bottom: 0px;
|
||||
cursor: pointer;
|
||||
}
|
||||
.infobox .infobox_toggle:hover {
|
||||
background: #DDD;
|
||||
}
|
||||
.infobox .infobox_checkbox ~ .infobox_body {
|
||||
max-height: 300px;
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -175,14 +175,39 @@ input[type=checkbox]:not(:checked) + .label_hide_if_checked + .label_hide_if_not
|
|||
}
|
||||
.result-images {
|
||||
float: left !important;
|
||||
width: 24%;
|
||||
margin: 0.5%;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
.result-images a {
|
||||
display: block;
|
||||
width: 100%;
|
||||
background-size: cover;
|
||||
}
|
||||
.result-images a .img-thumbnail {
|
||||
border: none !important;
|
||||
padding: 0;
|
||||
}
|
||||
.result-images a:hover,
|
||||
.result-images a:focus {
|
||||
outline: 0;
|
||||
}
|
||||
.result-images a:hover .img-thumbnail,
|
||||
.result-images a:focus .img-thumbnail {
|
||||
box-shadow: 5px 5px 15px 0px black;
|
||||
}
|
||||
.result-images.js a .img-thumbnail {
|
||||
max-height: inherit;
|
||||
min-height: inherit;
|
||||
}
|
||||
.result-images:not(.js) {
|
||||
width: 25%;
|
||||
padding: 3px 13px 13px 3px;
|
||||
}
|
||||
.result-images:not(.js) a .img-thumbnail {
|
||||
margin: 0;
|
||||
max-height: 128px;
|
||||
min-height: 128px;
|
||||
}
|
||||
.img-thumbnail {
|
||||
margin: 5px;
|
||||
max-height: 128px;
|
||||
|
@ -314,6 +339,10 @@ input[type=checkbox]:not(:checked) + .label_hide_if_checked + .label_hide_if_not
|
|||
width: 100%;
|
||||
text-align: center;
|
||||
margin-bottom: 0px;
|
||||
cursor: pointer;
|
||||
}
|
||||
.infobox .infobox_toggle:hover {
|
||||
background: #DDD;
|
||||
}
|
||||
.infobox .infobox_checkbox ~ .infobox_body {
|
||||
max-height: 300px;
|
||||
|
@ -991,6 +1020,14 @@ ul.nav li a {
|
|||
background: #1d1f21 !important;
|
||||
border-color: #111 !important;
|
||||
}
|
||||
.panel-footer {
|
||||
color: #C5C8C6 !important;
|
||||
background: #282a2e !important;
|
||||
border-top: 1px solid #111 !important;
|
||||
}
|
||||
.infobox_toggle:hover {
|
||||
background: #3d3f43 !important;
|
||||
}
|
||||
p.btn.btn-default {
|
||||
background: none;
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -60,7 +60,7 @@ module.exports = function(grunt) {
|
|||
separator: ';'
|
||||
},
|
||||
dist: {
|
||||
src: ['src/js/*.js'],
|
||||
src: ['src/js/*.js', '../__common__/js/image_layout.js'],
|
||||
dest: 'js/searx.js'
|
||||
}
|
||||
},
|
||||
|
@ -76,7 +76,7 @@ module.exports = function(grunt) {
|
|||
}
|
||||
},
|
||||
jshint: {
|
||||
files: ['gruntfile.js', 'js/searx_src/*.js'],
|
||||
files: ['gruntfile.js', 'js/searx_src/*.js', '../__common__/js/image_layout.js'],
|
||||
options: {
|
||||
reporterOutput: "",
|
||||
// options here to override JSHint defaults
|
||||
|
|
Binary file not shown.
After Width: | Height: | Size: 919 B |
|
@ -17,6 +17,9 @@
|
|||
window.searx = (function(d) {
|
||||
'use strict';
|
||||
|
||||
//
|
||||
d.getElementsByTagName("html")[0].className = "js";
|
||||
|
||||
// add data- properties
|
||||
var script = d.currentScript || (function() {
|
||||
var scripts = d.getElementsByTagName('script');
|
||||
|
@ -199,6 +202,12 @@ $(document).ready(function(){
|
|||
tabs.children().attr("aria-selected", "false");
|
||||
$(a.target).parent().attr("aria-selected", "true");
|
||||
});
|
||||
|
||||
/**
|
||||
* Layout images according to their sizes
|
||||
*/
|
||||
searx.image_thumbnail_layout = new searx.ImageLayout('#main_results', '#main_results .result-images', 'img.img-thumbnail', 15, 200);
|
||||
searx.image_thumbnail_layout.watch();
|
||||
});
|
||||
;window.addEventListener('load', function() {
|
||||
// Hide infobox toggle if shrunk size already fits all content.
|
||||
|
@ -383,3 +392,166 @@ $(document).ready(function(){
|
|||
});
|
||||
});
|
||||
|
||||
;/**
|
||||
*
|
||||
* Google Image Layout v0.0.1
|
||||
* Description, by Anh Trinh.
|
||||
* Heavily modified for searx
|
||||
* https://ptgamr.github.io/2014-09-12-google-image-layout/
|
||||
* https://ptgamr.github.io/google-image-layout/src/google-image-layout.js
|
||||
*
|
||||
* @license Free to use under the MIT License.
|
||||
*
|
||||
*/
|
||||
|
||||
(function (w, d) {
|
||||
function ImageLayout(container_selector, results_selector, img_selector, margin, maxHeight) {
|
||||
this.container_selector = container_selector;
|
||||
this.results_selector = results_selector;
|
||||
this.img_selector = img_selector;
|
||||
this.margin = margin;
|
||||
this.maxHeight = maxHeight;
|
||||
this.isAlignDone = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the height that make all images fit the container
|
||||
*
|
||||
* width = w1 + w2 + w3 + ... = r1*h + r2*h + r3*h + ...
|
||||
*
|
||||
* @param {[type]} images the images to be calculated
|
||||
* @param {[type]} width the container witdth
|
||||
* @param {[type]} margin the margin between each image
|
||||
*
|
||||
* @return {[type]} the height
|
||||
*/
|
||||
ImageLayout.prototype._getHeigth = function (images, width) {
|
||||
var i, img;
|
||||
var r = 0;
|
||||
|
||||
for (i = 0; i < images.length; i++) {
|
||||
img = images[i];
|
||||
if ((img.naturalWidth > 0) && (img.naturalHeight > 0)) {
|
||||
r += img.naturalWidth / img.naturalHeight;
|
||||
} else {
|
||||
// assume that not loaded images are square
|
||||
r += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return (width - images.length * this.margin) / r; //have to round down because Firefox will automatically roundup value with number of decimals > 3
|
||||
};
|
||||
|
||||
ImageLayout.prototype._setSize = function (images, height) {
|
||||
var i, img, imgWidth;
|
||||
var imagesLength = images.length, resultNode;
|
||||
|
||||
for (i = 0; i < imagesLength; i++) {
|
||||
img = images[i];
|
||||
if ((img.naturalWidth > 0) && (img.naturalHeight > 0)) {
|
||||
imgWidth = height * img.naturalWidth / img.naturalHeight;
|
||||
} else {
|
||||
// not loaded image : make it square as _getHeigth said it
|
||||
imgWidth = height;
|
||||
}
|
||||
img.style.width = imgWidth + 'px';
|
||||
img.style.height = height + 'px';
|
||||
img.style.marginLeft = '3px';
|
||||
img.style.marginTop = '3px';
|
||||
img.style.marginRight = this.margin - 7 + 'px'; // -4 is the negative margin of the inline element
|
||||
img.style.marginBottom = this.margin - 7 + 'px';
|
||||
resultNode = img.parentNode.parentNode;
|
||||
if (!resultNode.classList.contains('js')) {
|
||||
resultNode.classList.add('js');
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
ImageLayout.prototype._alignImgs = function (imgGroup) {
|
||||
var isSearching, slice, i, h;
|
||||
var containerElement = d.querySelector(this.container_selector);
|
||||
var containerCompStyles = window.getComputedStyle(containerElement);
|
||||
var containerPaddingLeft = parseInt(containerCompStyles.getPropertyValue('padding-left'), 10);
|
||||
var containerPaddingRight = parseInt(containerCompStyles.getPropertyValue('padding-right'), 10);
|
||||
var containerWidth = containerElement.clientWidth - containerPaddingLeft - containerPaddingRight;
|
||||
|
||||
while (imgGroup.length > 0) {
|
||||
isSearching = true;
|
||||
for (i = 1; i <= imgGroup.length && isSearching; i++) {
|
||||
slice = imgGroup.slice(0, i);
|
||||
h = this._getHeigth(slice, containerWidth);
|
||||
if (h < this.maxHeight) {
|
||||
this._setSize(slice, h);
|
||||
// continue with the remaining images
|
||||
imgGroup = imgGroup.slice(i);
|
||||
isSearching = false;
|
||||
}
|
||||
}
|
||||
if (isSearching) {
|
||||
this._setSize(slice, Math.min(this.maxHeight, h));
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
ImageLayout.prototype.align = function () {
|
||||
var i;
|
||||
var results_selectorNode = d.querySelectorAll(this.results_selector);
|
||||
var results_length = results_selectorNode.length;
|
||||
var previous = null;
|
||||
var current = null;
|
||||
var imgGroup = [];
|
||||
|
||||
for (i = 0; i < results_length; i++) {
|
||||
current = results_selectorNode[i];
|
||||
if (current.previousElementSibling !== previous && imgGroup.length > 0) {
|
||||
// the current image is not connected to previous one
|
||||
// so the current image is the start of a new group of images.
|
||||
// so call _alignImgs to align the current group
|
||||
this._alignImgs(imgGroup);
|
||||
// and start a new empty group of images
|
||||
imgGroup = [];
|
||||
}
|
||||
// add the current image to the group (only the img tag)
|
||||
imgGroup.push(current.querySelector(this.img_selector));
|
||||
// update the previous variable
|
||||
previous = current;
|
||||
}
|
||||
// align the remaining images
|
||||
if (imgGroup.length > 0) {
|
||||
this._alignImgs(imgGroup);
|
||||
}
|
||||
};
|
||||
|
||||
ImageLayout.prototype.watch = function () {
|
||||
var i, img;
|
||||
var obj = this;
|
||||
var results_nodes = d.querySelectorAll(this.results_selector);
|
||||
var results_length = results_nodes.length;
|
||||
|
||||
function throttleAlign() {
|
||||
if (obj.isAlignDone) {
|
||||
obj.isAlignDone = false;
|
||||
setTimeout(function () {
|
||||
obj.align();
|
||||
obj.isAlignDone = true;
|
||||
}, 100);
|
||||
}
|
||||
}
|
||||
|
||||
w.addEventListener('pageshow', throttleAlign);
|
||||
w.addEventListener('load', throttleAlign);
|
||||
w.addEventListener('resize', throttleAlign);
|
||||
|
||||
for (i = 0; i < results_length; i++) {
|
||||
img = results_nodes[i].querySelector(this.img_selector);
|
||||
if (img !== null && img !== undefined) {
|
||||
img.addEventListener('load', throttleAlign);
|
||||
img.addEventListener('error', throttleAlign);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
w.searx.ImageLayout = ImageLayout;
|
||||
|
||||
}(window, document));
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -17,6 +17,9 @@
|
|||
window.searx = (function(d) {
|
||||
'use strict';
|
||||
|
||||
//
|
||||
d.getElementsByTagName("html")[0].className = "js";
|
||||
|
||||
// add data- properties
|
||||
var script = d.currentScript || (function() {
|
||||
var scripts = d.getElementsByTagName('script');
|
||||
|
|
|
@ -108,4 +108,10 @@ $(document).ready(function(){
|
|||
tabs.children().attr("aria-selected", "false");
|
||||
$(a.target).parent().attr("aria-selected", "true");
|
||||
});
|
||||
|
||||
/**
|
||||
* Layout images according to their sizes
|
||||
*/
|
||||
searx.image_thumbnail_layout = new searx.ImageLayout('#main_results', '#main_results .result-images', 'img.img-thumbnail', 15, 200);
|
||||
searx.image_thumbnail_layout.watch();
|
||||
});
|
||||
|
|
|
@ -132,6 +132,16 @@ ul.nav li a {
|
|||
border-color: #111 !important;
|
||||
}
|
||||
|
||||
.panel-footer {
|
||||
color: #C5C8C6 !important;
|
||||
background: #282a2e !important;
|
||||
border-top: 1px solid #111 !important;
|
||||
}
|
||||
|
||||
.infobox_toggle:hover {
|
||||
background: #3d3f43 !important;
|
||||
}
|
||||
|
||||
p.btn.btn-default{
|
||||
background: none;
|
||||
}
|
||||
|
|
|
@ -50,6 +50,11 @@
|
|||
width: 100%;
|
||||
text-align: center;
|
||||
margin-bottom: 0px;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.infobox_toggle:hover {
|
||||
background: @mild-gray;
|
||||
}
|
||||
|
||||
// Shrink infobox size when toggle is off
|
||||
|
|
|
@ -77,12 +77,39 @@
|
|||
// image formating of results
|
||||
.result-images {
|
||||
float: left !important;
|
||||
width: 24%;
|
||||
margin: .5%;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
a {
|
||||
display: block;
|
||||
width: 100%;
|
||||
background-size: cover;
|
||||
.img-thumbnail {
|
||||
border: none !important;
|
||||
padding: 0;
|
||||
}
|
||||
&:hover, &:focus {
|
||||
outline: 0;
|
||||
.img-thumbnail {
|
||||
box-shadow: 5px 5px 15px 0px black;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.result-images.js a .img-thumbnail {
|
||||
max-height: inherit;
|
||||
min-height: inherit;
|
||||
}
|
||||
|
||||
.result-images:not(.js) {
|
||||
width: 25%;
|
||||
padding: 3px 13px 13px 3px;
|
||||
a {
|
||||
.img-thumbnail {
|
||||
margin: 0;
|
||||
max-height: 128px;
|
||||
min-height: 128px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
/*! searx | 16-03-2021 | */
|
||||
/*! searx | 23-03-2021 | */
|
||||
/*
|
||||
* searx, A privacy-respecting, hackable metasearch engine
|
||||
*
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,4 +1,4 @@
|
|||
/*! searx | 16-03-2021 | */
|
||||
/*! searx | 23-03-2021 | */
|
||||
/*
|
||||
* searx, A privacy-respecting, hackable metasearch engine
|
||||
*
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -11,7 +11,7 @@ module.exports = function(grunt) {
|
|||
}
|
||||
},
|
||||
jshint: {
|
||||
files: ['js/searx_src/*.js', 'js/searx_header/*.js'],
|
||||
files: ['js/searx_src/*.js', 'js/searx_header/*.js', '../__common__/js/*.js'],
|
||||
options: {
|
||||
reporterOutput: "",
|
||||
proto: true,
|
||||
|
@ -30,7 +30,7 @@ module.exports = function(grunt) {
|
|||
},
|
||||
files: {
|
||||
'js/searx.head.js': ['js/searx_head/*.js'],
|
||||
'js/searx.js': ['js/searx_src/*.js']
|
||||
'js/searx.js': ['js/searx_src/*.js', '../__common__/js/*.js']
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
/*! simple/searx.min.js | 16-03-2021 | */
|
||||
/*! simple/searx.min.js | 23-03-2021 | */
|
||||
|
||||
(function(t,e){"use strict";var a=e.currentScript||function(){var t=e.getElementsByTagName("script");return t[t.length-1]}();t.searx={touch:"ontouchstart"in t||t.DocumentTouch&&document instanceof DocumentTouch||false,method:a.getAttribute("data-method"),autocompleter:a.getAttribute("data-autocompleter")==="true",search_on_category_select:a.getAttribute("data-search-on-category-select")==="true",infinite_scroll:a.getAttribute("data-infinite-scroll")==="true",static_path:a.getAttribute("data-static-path"),translations:JSON.parse(a.getAttribute("data-translations"))};e.getElementsByTagName("html")[0].className=t.searx.touch?"js touch":"js"})(window,document);
|
||||
//# sourceMappingURL=searx.head.min.js.map
|
|
@ -698,157 +698,6 @@ module.exports = AutoComplete;
|
|||
|
||||
},{}]},{},[1])(1)
|
||||
});
|
||||
;/**
|
||||
*
|
||||
* Google Image Layout v0.0.1
|
||||
* Description, by Anh Trinh.
|
||||
* Heavily modified for searx
|
||||
* http://trinhtrunganh.com
|
||||
*
|
||||
* @license Free to use under the MIT License.
|
||||
*
|
||||
*/
|
||||
(function(w, d) {
|
||||
'use strict';
|
||||
|
||||
function ImageLayout(container_selector, results_selector, img_selector, maxHeight) {
|
||||
this.container_selector = container_selector;
|
||||
this.results_selector = results_selector;
|
||||
this.img_selector = img_selector;
|
||||
this.margin = 10;
|
||||
this.maxHeight = maxHeight;
|
||||
this._alignAllDone = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the height that make all images fit the container
|
||||
*
|
||||
* width = w1 + w2 + w3 + ... = r1*h + r2*h + r3*h + ...
|
||||
*
|
||||
* @param {[type]} images the images to be calculated
|
||||
* @param {[type]} width the container witdth
|
||||
* @param {[type]} margin the margin between each image
|
||||
*
|
||||
* @return {[type]} the height
|
||||
*/
|
||||
ImageLayout.prototype._getHeigth = function(images, width) {
|
||||
var r = 0,
|
||||
img;
|
||||
|
||||
width -= images.length * this.margin;
|
||||
for (var i = 0; i < images.length; i++) {
|
||||
img = images[i];
|
||||
if ((img.naturalWidth > 0) && (img.naturalHeight > 0)) {
|
||||
r += img.naturalWidth / img.naturalHeight;
|
||||
} else {
|
||||
// assume that not loaded images are square
|
||||
r += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return width / r; //have to round down because Firefox will automatically roundup value with number of decimals > 3
|
||||
};
|
||||
|
||||
ImageLayout.prototype._setSize = function(images, height) {
|
||||
var img, imgWidth, imagesLength = images.length;
|
||||
for (var i = 0; i < imagesLength; i++) {
|
||||
img = images[i];
|
||||
if ((img.naturalWidth > 0) && (img.naturalHeight > 0)) {
|
||||
imgWidth = height * img.naturalWidth / img.naturalHeight;
|
||||
} else {
|
||||
// not loaded image : make it square as _getHeigth said it
|
||||
imgWidth = height;
|
||||
}
|
||||
img.style.width = imgWidth + 'px';
|
||||
img.style.height = height + 'px';
|
||||
img.style.marginLeft = '3px';
|
||||
img.style.marginTop = '3px';
|
||||
img.style.marginRight = this.margin - 7 + 'px'; // -4 is the negative margin of the inline element
|
||||
img.style.marginBottom = this.margin - 7 + 'px';
|
||||
}
|
||||
};
|
||||
|
||||
ImageLayout.prototype._alignImgs = function(imgGroup) {
|
||||
var slice, h,
|
||||
containerWidth = d.querySelector(this.container_selector).clientWidth;
|
||||
|
||||
w: while (imgGroup.length > 0) {
|
||||
for (var i = 1; i <= imgGroup.length; i++) {
|
||||
slice = imgGroup.slice(0, i);
|
||||
h = this._getHeigth(slice, containerWidth);
|
||||
if (h < this.maxHeight) {
|
||||
this._setSize(slice, h);
|
||||
imgGroup = imgGroup.slice(i);
|
||||
continue w;
|
||||
}
|
||||
}
|
||||
this._setSize(slice, Math.min(this.maxHeight, h));
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
ImageLayout.prototype.align = function(results_selector) {
|
||||
var results_selectorNode = d.querySelectorAll(this.results_selector),
|
||||
results_length = results_selectorNode.length,
|
||||
previous = null,
|
||||
current = null,
|
||||
imgGroup = [];
|
||||
for (var i = 0; i < results_length; i++) {
|
||||
current = results_selectorNode[i];
|
||||
if (current.previousElementSibling !== previous && imgGroup.length > 0) {
|
||||
// the current image is not conected to previous one
|
||||
// so the current image is the start of a new group of images.
|
||||
// so call _alignImgs to align the current group
|
||||
this._alignImgs(imgGroup);
|
||||
// and start a new empty group of images
|
||||
imgGroup = [];
|
||||
}
|
||||
// add the current image to the group (only the img tag)
|
||||
imgGroup.push(current.querySelector(this.img_selector));
|
||||
// update the previous variable
|
||||
previous = current;
|
||||
}
|
||||
// align the remaining images
|
||||
if (imgGroup.length > 0) {
|
||||
this._alignImgs(imgGroup);
|
||||
}
|
||||
};
|
||||
|
||||
ImageLayout.prototype.watch = function() {
|
||||
var i, img, imgGroup, imgNodeLength,
|
||||
obj = this,
|
||||
results_nodes = d.querySelectorAll(this.results_selector),
|
||||
results_length = results_nodes.length;
|
||||
|
||||
function align(e) {
|
||||
obj.align();
|
||||
}
|
||||
|
||||
function throttleAlign(e) {
|
||||
if (obj._alignAllDone) {
|
||||
obj._alignAllDone = false;
|
||||
setTimeout(function() {
|
||||
obj.align();
|
||||
obj._alignAllDone = true;
|
||||
}, 100);
|
||||
}
|
||||
}
|
||||
|
||||
w.addEventListener('resize', throttleAlign);
|
||||
w.addEventListener('pageshow', align);
|
||||
|
||||
for (i = 0; i < results_length; i++) {
|
||||
img = results_nodes[i].querySelector(this.img_selector);
|
||||
if (typeof img !== 'undefined') {
|
||||
img.addEventListener('load', throttleAlign);
|
||||
img.addEventListener('error', throttleAlign);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
w.searx.ImageLayout = ImageLayout;
|
||||
|
||||
})(window, document);
|
||||
;searx.ready(function() {
|
||||
|
||||
searx.on('.result', 'click', function() {
|
||||
|
@ -1411,7 +1260,7 @@ module.exports = AutoComplete;
|
|||
'use strict';
|
||||
|
||||
searx.ready(function() {
|
||||
searx.image_thumbnail_layout = new searx.ImageLayout('#urls', '#urls .result-images', 'img.image_thumbnail', 200);
|
||||
searx.image_thumbnail_layout = new searx.ImageLayout('#urls', '#urls .result-images', 'img.image_thumbnail', 10, 200);
|
||||
searx.image_thumbnail_layout.watch();
|
||||
|
||||
searx.on('.btn-collapse', 'click', function(event) {
|
||||
|
@ -1575,3 +1424,166 @@ module.exports = AutoComplete;
|
|||
});
|
||||
|
||||
})(window, document, window.searx);
|
||||
;/**
|
||||
*
|
||||
* Google Image Layout v0.0.1
|
||||
* Description, by Anh Trinh.
|
||||
* Heavily modified for searx
|
||||
* https://ptgamr.github.io/2014-09-12-google-image-layout/
|
||||
* https://ptgamr.github.io/google-image-layout/src/google-image-layout.js
|
||||
*
|
||||
* @license Free to use under the MIT License.
|
||||
*
|
||||
*/
|
||||
|
||||
(function (w, d) {
|
||||
function ImageLayout(container_selector, results_selector, img_selector, margin, maxHeight) {
|
||||
this.container_selector = container_selector;
|
||||
this.results_selector = results_selector;
|
||||
this.img_selector = img_selector;
|
||||
this.margin = margin;
|
||||
this.maxHeight = maxHeight;
|
||||
this.isAlignDone = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the height that make all images fit the container
|
||||
*
|
||||
* width = w1 + w2 + w3 + ... = r1*h + r2*h + r3*h + ...
|
||||
*
|
||||
* @param {[type]} images the images to be calculated
|
||||
* @param {[type]} width the container witdth
|
||||
* @param {[type]} margin the margin between each image
|
||||
*
|
||||
* @return {[type]} the height
|
||||
*/
|
||||
ImageLayout.prototype._getHeigth = function (images, width) {
|
||||
var i, img;
|
||||
var r = 0;
|
||||
|
||||
for (i = 0; i < images.length; i++) {
|
||||
img = images[i];
|
||||
if ((img.naturalWidth > 0) && (img.naturalHeight > 0)) {
|
||||
r += img.naturalWidth / img.naturalHeight;
|
||||
} else {
|
||||
// assume that not loaded images are square
|
||||
r += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return (width - images.length * this.margin) / r; //have to round down because Firefox will automatically roundup value with number of decimals > 3
|
||||
};
|
||||
|
||||
ImageLayout.prototype._setSize = function (images, height) {
|
||||
var i, img, imgWidth;
|
||||
var imagesLength = images.length, resultNode;
|
||||
|
||||
for (i = 0; i < imagesLength; i++) {
|
||||
img = images[i];
|
||||
if ((img.naturalWidth > 0) && (img.naturalHeight > 0)) {
|
||||
imgWidth = height * img.naturalWidth / img.naturalHeight;
|
||||
} else {
|
||||
// not loaded image : make it square as _getHeigth said it
|
||||
imgWidth = height;
|
||||
}
|
||||
img.style.width = imgWidth + 'px';
|
||||
img.style.height = height + 'px';
|
||||
img.style.marginLeft = '3px';
|
||||
img.style.marginTop = '3px';
|
||||
img.style.marginRight = this.margin - 7 + 'px'; // -4 is the negative margin of the inline element
|
||||
img.style.marginBottom = this.margin - 7 + 'px';
|
||||
resultNode = img.parentNode.parentNode;
|
||||
if (!resultNode.classList.contains('js')) {
|
||||
resultNode.classList.add('js');
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
ImageLayout.prototype._alignImgs = function (imgGroup) {
|
||||
var isSearching, slice, i, h;
|
||||
var containerElement = d.querySelector(this.container_selector);
|
||||
var containerCompStyles = window.getComputedStyle(containerElement);
|
||||
var containerPaddingLeft = parseInt(containerCompStyles.getPropertyValue('padding-left'), 10);
|
||||
var containerPaddingRight = parseInt(containerCompStyles.getPropertyValue('padding-right'), 10);
|
||||
var containerWidth = containerElement.clientWidth - containerPaddingLeft - containerPaddingRight;
|
||||
|
||||
while (imgGroup.length > 0) {
|
||||
isSearching = true;
|
||||
for (i = 1; i <= imgGroup.length && isSearching; i++) {
|
||||
slice = imgGroup.slice(0, i);
|
||||
h = this._getHeigth(slice, containerWidth);
|
||||
if (h < this.maxHeight) {
|
||||
this._setSize(slice, h);
|
||||
// continue with the remaining images
|
||||
imgGroup = imgGroup.slice(i);
|
||||
isSearching = false;
|
||||
}
|
||||
}
|
||||
if (isSearching) {
|
||||
this._setSize(slice, Math.min(this.maxHeight, h));
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
ImageLayout.prototype.align = function () {
|
||||
var i;
|
||||
var results_selectorNode = d.querySelectorAll(this.results_selector);
|
||||
var results_length = results_selectorNode.length;
|
||||
var previous = null;
|
||||
var current = null;
|
||||
var imgGroup = [];
|
||||
|
||||
for (i = 0; i < results_length; i++) {
|
||||
current = results_selectorNode[i];
|
||||
if (current.previousElementSibling !== previous && imgGroup.length > 0) {
|
||||
// the current image is not connected to previous one
|
||||
// so the current image is the start of a new group of images.
|
||||
// so call _alignImgs to align the current group
|
||||
this._alignImgs(imgGroup);
|
||||
// and start a new empty group of images
|
||||
imgGroup = [];
|
||||
}
|
||||
// add the current image to the group (only the img tag)
|
||||
imgGroup.push(current.querySelector(this.img_selector));
|
||||
// update the previous variable
|
||||
previous = current;
|
||||
}
|
||||
// align the remaining images
|
||||
if (imgGroup.length > 0) {
|
||||
this._alignImgs(imgGroup);
|
||||
}
|
||||
};
|
||||
|
||||
ImageLayout.prototype.watch = function () {
|
||||
var i, img;
|
||||
var obj = this;
|
||||
var results_nodes = d.querySelectorAll(this.results_selector);
|
||||
var results_length = results_nodes.length;
|
||||
|
||||
function throttleAlign() {
|
||||
if (obj.isAlignDone) {
|
||||
obj.isAlignDone = false;
|
||||
setTimeout(function () {
|
||||
obj.align();
|
||||
obj.isAlignDone = true;
|
||||
}, 100);
|
||||
}
|
||||
}
|
||||
|
||||
w.addEventListener('pageshow', throttleAlign);
|
||||
w.addEventListener('load', throttleAlign);
|
||||
w.addEventListener('resize', throttleAlign);
|
||||
|
||||
for (i = 0; i < results_length; i++) {
|
||||
img = results_nodes[i].querySelector(this.img_selector);
|
||||
if (img !== null && img !== undefined) {
|
||||
img.addEventListener('load', throttleAlign);
|
||||
img.addEventListener('error', throttleAlign);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
w.searx.ImageLayout = ImageLayout;
|
||||
|
||||
}(window, document));
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -18,7 +18,7 @@
|
|||
'use strict';
|
||||
|
||||
searx.ready(function() {
|
||||
searx.image_thumbnail_layout = new searx.ImageLayout('#urls', '#urls .result-images', 'img.image_thumbnail', 200);
|
||||
searx.image_thumbnail_layout = new searx.ImageLayout('#urls', '#urls .result-images', 'img.image_thumbnail', 10, 200);
|
||||
searx.image_thumbnail_layout.watch();
|
||||
|
||||
searx.on('.btn-collapse', 'click', function(event) {
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<ShortName>{{ instance_name }}</ShortName>
|
||||
<Description>a privacy-respecting, hackable metasearch engine</Description>
|
||||
<InputEncoding>UTF-8</InputEncoding>
|
||||
<Image>{{ urljoin(host, url_for('static', filename='img/favicon.png')) }}</Image>
|
||||
<Image>{{ url_for('static', filename='img/favicon.png', _external=True) }}</Image>
|
||||
<LongName>searx metasearch</LongName>
|
||||
{% if opensearch_method == 'get' %}
|
||||
<Url rel="results" type="text/html" method="get" template="{{ url_for('search', _external=True) }}?q={searchTerms}"/>
|
||||
|
@ -13,7 +13,7 @@
|
|||
</Url>
|
||||
{% endif %}
|
||||
{% if autocomplete %}
|
||||
<Url rel="suggestions" type="application/x-suggestions+json" template="{{ host }}autocompleter?q={searchTerms}"/>
|
||||
<Url rel="suggestions" type="application/x-suggestions+json" template="{{ url_for('autocompleter', _external=True) }}?q={searchTerms}"/>
|
||||
{% endif %}
|
||||
|
||||
<Url type="application/opensearchdescription+xml"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{% from 'oscar/macros.html' import icon %}
|
||||
<!DOCTYPE html>
|
||||
<html lang="{{ preferences.get_value('locale') }}" xml:lang="{{ preferences.get_value('locale') }}"{% if rtl %} dir="rtl"{% endif %}>
|
||||
<html lang="{{ preferences.get_value('locale') }}" xml:lang="{{ preferences.get_value('locale') }}"{% if rtl %} dir="rtl"{% endif %} class="nojs">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="description" content="searx - a privacy-respecting, hackable metasearch engine" />
|
||||
|
|
|
@ -165,7 +165,7 @@
|
|||
{{ preferences_item_header(info, label, rtl, 'doi_resolver') }}
|
||||
<select class="form-control {{ custom_select_class(rtl) }}" name="doi_resolver" id="doi_resolver">
|
||||
{% for doi_resolver_name,doi_resolver_url in doi_resolvers.items() %}
|
||||
<option value="{{ doi_resolver_name }}" {% if doi_resolver_name == current_doi_resolver %}selected="selected"{% endif %}>
|
||||
<option value="{{ doi_resolver_name }}" {% if doi_resolver_url == current_doi_resolver %}selected="selected"{% endif %}>
|
||||
{{ doi_resolver_name }} - {{ doi_resolver_url }}
|
||||
</option>
|
||||
{% endfor %}
|
||||
|
@ -368,7 +368,7 @@
|
|||
</p>
|
||||
|
||||
<div class="tab-pane">
|
||||
<input readonly="" class="form-control select-all-on-click cursor-text" type="url" value="{{ base_url }}?preferences={{ preferences_url_params|e }}{% raw %}&q=%s{% endraw %}">
|
||||
<input readonly="" class="form-control select-all-on-click cursor-text" type="url" value="{{ url_for('index', _external=True) }}?preferences={{ preferences_url_params|e }}{% raw %}&q=%s{% endraw %}">
|
||||
<input type="submit" class="btn btn-primary" value="{{ _('save') }}" />
|
||||
<a href="{{ url_for('index') }}"><div class="btn btn-default">{{ _('back') }}</div></a>
|
||||
<a href="{{ url_for('clear_cookies') }}"><div class="btn btn-default">{{ _('Reset defaults') }}</div></a>
|
||||
|
|
|
@ -13,10 +13,10 @@
|
|||
</div>
|
||||
{%- endif -%}
|
||||
|
||||
{%- if result.img_src -%}
|
||||
{%- if result.img_src or result.thumbnail -%}
|
||||
<div class="container-fluid">{{- "" -}}
|
||||
<div class="row">{{- "" -}}
|
||||
<img src="{{ image_proxify(result.img_src) }}" title="{{ result.title|striptags }}" style="width: auto; max-height: 60px; min-height: 60px;" class="col-xs-2 col-sm-4 col-md-4 result-content">
|
||||
<img src="{{ image_proxify(result.img_src or result.thumbnail) }}" title="{{ result.title|striptags }}" style="width: auto; max-height: 60px; min-height: 60px;" class="col-xs-2 col-sm-4 col-md-4 result-content">
|
||||
{%- if result.content %}<p class="result-content col-xs-8 col-sm-8 col-md-8">{{ result.content|safe }}</p>{% endif -%}
|
||||
</div>{{- "" -}}
|
||||
</div>
|
||||
|
|
|
@ -96,7 +96,7 @@
|
|||
<p class="value">
|
||||
<select id='doi_resolver' name='doi_resolver'>
|
||||
{%- for doi_resolver_name,doi_resolver_url in doi_resolvers.items() -%}
|
||||
<option value="{{ doi_resolver_name }}" {% if doi_resolver_name == current_doi_resolver %}selected="selected"{% endif %}>
|
||||
<option value="{{ doi_resolver_name }}" {% if doi_resolver_url == current_doi_resolver %}selected="selected"{% endif %}>
|
||||
{{- doi_resolver_name }} - {{ doi_resolver_url -}}
|
||||
</option>
|
||||
{%- endfor -%}
|
||||
|
|
|
@ -40,7 +40,7 @@ from datetime import datetime, timedelta
|
|||
from time import time
|
||||
from html import escape
|
||||
from io import StringIO
|
||||
from urllib.parse import urlencode, urljoin, urlparse
|
||||
from urllib.parse import urlencode, urlparse
|
||||
|
||||
from pygments import highlight
|
||||
from pygments.lexers import get_lexer_by_name
|
||||
|
@ -270,14 +270,7 @@ def extract_domain(url):
|
|||
|
||||
|
||||
def get_base_url():
|
||||
if settings['server']['base_url']:
|
||||
hostname = settings['server']['base_url']
|
||||
else:
|
||||
scheme = 'http'
|
||||
if request.is_secure:
|
||||
scheme = 'https'
|
||||
hostname = url_for('index', _external=True, _scheme=scheme)
|
||||
return hostname
|
||||
return url_for('index', _external=True)
|
||||
|
||||
|
||||
def get_current_theme_name(override=None):
|
||||
|
@ -310,10 +303,6 @@ def url_for_theme(endpoint, override_theme=None, **values):
|
|||
if filename_with_theme in static_files:
|
||||
values['filename'] = filename_with_theme
|
||||
url = url_for(endpoint, **values)
|
||||
if settings['server']['base_url']:
|
||||
if url.startswith('/'):
|
||||
url = url[1:]
|
||||
url = urljoin(settings['server']['base_url'], url)
|
||||
return url
|
||||
|
||||
|
||||
|
@ -650,7 +639,7 @@ def search():
|
|||
result['pretty_url'] = prettify_url(result['url'])
|
||||
|
||||
# TODO, check if timezone is calculated right
|
||||
if 'publishedDate' in result:
|
||||
if result.get('publishedDate'): # do not try to get a date from an empty string or a None type
|
||||
try: # test if publishedDate >= 1900 (datetime module bug)
|
||||
result['pubdate'] = result['publishedDate'].strftime('%Y-%m-%d %H:%M:%S%z')
|
||||
except ValueError:
|
||||
|
@ -785,20 +774,26 @@ def autocompleter():
|
|||
|
||||
# parse query
|
||||
raw_text_query = RawTextQuery(request.form.get('q', ''), disabled_engines)
|
||||
sug_prefix = raw_text_query.getQuery()
|
||||
|
||||
# normal autocompletion results only appear if no inner results returned
|
||||
# and there is a query part
|
||||
if len(raw_text_query.autocomplete_list) == 0 and len(raw_text_query.getQuery()) > 0:
|
||||
if len(raw_text_query.autocomplete_list) == 0 and len(sug_prefix) > 0:
|
||||
|
||||
# get language from cookie
|
||||
language = request.preferences.get_value('language')
|
||||
if not language or language == 'all':
|
||||
language = 'en'
|
||||
else:
|
||||
language = language.split('-')[0]
|
||||
|
||||
# run autocompletion
|
||||
raw_results = search_autocomplete(request.preferences.get_value('autocomplete'),
|
||||
raw_text_query.getQuery(), language)
|
||||
raw_results = search_autocomplete(
|
||||
request.preferences.get_value('autocomplete'), sug_prefix, language
|
||||
)
|
||||
for result in raw_results:
|
||||
# attention: this loop will change raw_text_query object and this is
|
||||
# the reason why the sug_prefix was stored before (see above)
|
||||
results.append(raw_text_query.changeQuery(result).getFullQuery())
|
||||
|
||||
if len(raw_text_query.autocomplete_list) > 0:
|
||||
|
@ -809,13 +804,16 @@ def autocompleter():
|
|||
for answer in answers:
|
||||
results.append(str(answer['answer']))
|
||||
|
||||
# return autocompleter results
|
||||
if request.headers.get('X-Requested-With') == 'XMLHttpRequest':
|
||||
return Response(json.dumps(results),
|
||||
mimetype='application/json')
|
||||
# the suggestion request comes from the searx search form
|
||||
suggestions = json.dumps(results)
|
||||
mimetype = 'application/json'
|
||||
else:
|
||||
# the suggestion request comes from browser's URL bar
|
||||
suggestions = json.dumps([sug_prefix, results])
|
||||
mimetype = 'application/x-suggestions+json'
|
||||
|
||||
return Response(json.dumps([raw_text_query.query, results]),
|
||||
mimetype='application/x-suggestions+json')
|
||||
return Response(suggestions, mimetype=mimetype)
|
||||
|
||||
|
||||
@app.route('/preferences', methods=['GET', 'POST'])
|
||||
|
@ -824,7 +822,7 @@ def preferences():
|
|||
|
||||
# save preferences
|
||||
if request.method == 'POST':
|
||||
resp = make_response(redirect(urljoin(settings['server']['base_url'], url_for('index'))))
|
||||
resp = make_response(redirect(url_for('index', _external=True)))
|
||||
try:
|
||||
request.preferences.parse_form(request.form)
|
||||
except ValidationException:
|
||||
|
@ -1013,11 +1011,11 @@ def opensearch():
|
|||
if request.headers.get('User-Agent', '').lower().find('webkit') >= 0:
|
||||
method = 'get'
|
||||
|
||||
ret = render('opensearch.xml',
|
||||
opensearch_method=method,
|
||||
host=get_base_url(),
|
||||
urljoin=urljoin,
|
||||
override_theme='__common__')
|
||||
ret = render(
|
||||
'opensearch.xml',
|
||||
opensearch_method=method,
|
||||
override_theme='__common__'
|
||||
)
|
||||
|
||||
resp = Response(response=ret,
|
||||
status=200,
|
||||
|
@ -1038,7 +1036,7 @@ def favicon():
|
|||
|
||||
@app.route('/clear_cookies')
|
||||
def clear_cookies():
|
||||
resp = make_response(redirect(urljoin(settings['server']['base_url'], url_for('index'))))
|
||||
resp = make_response(redirect(url_for('index', _external=True)))
|
||||
for cookie_name in request.cookies:
|
||||
resp.delete_cookie(cookie_name)
|
||||
return resp
|
||||
|
@ -1131,19 +1129,41 @@ class ReverseProxyPathFix:
|
|||
'''
|
||||
|
||||
def __init__(self, app):
|
||||
|
||||
self.app = app
|
||||
self.script_name = None
|
||||
self.scheme = None
|
||||
self.server = None
|
||||
|
||||
if settings['server']['base_url']:
|
||||
|
||||
# If base_url is specified, then these values from are given
|
||||
# preference over any Flask's generics.
|
||||
|
||||
base_url = urlparse(settings['server']['base_url'])
|
||||
self.script_name = base_url.path
|
||||
if self.script_name.endswith('/'):
|
||||
# remove trailing slash to avoid infinite redirect on the index
|
||||
# see https://github.com/searx/searx/issues/2729
|
||||
self.script_name = self.script_name[:-1]
|
||||
self.scheme = base_url.scheme
|
||||
self.server = base_url.netloc
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
script_name = environ.get('HTTP_X_SCRIPT_NAME', '')
|
||||
script_name = self.script_name or environ.get('HTTP_X_SCRIPT_NAME', '')
|
||||
if script_name:
|
||||
environ['SCRIPT_NAME'] = script_name
|
||||
path_info = environ['PATH_INFO']
|
||||
if path_info.startswith(script_name):
|
||||
environ['PATH_INFO'] = path_info[len(script_name):]
|
||||
|
||||
scheme = environ.get('HTTP_X_SCHEME', '')
|
||||
scheme = self.scheme or environ.get('HTTP_X_SCHEME', '')
|
||||
if scheme:
|
||||
environ['wsgi.url_scheme'] = scheme
|
||||
|
||||
server = self.server or environ.get('HTTP_X_FORWARDED_HOST', '')
|
||||
if server:
|
||||
environ['HTTP_HOST'] = server
|
||||
return self.app(environ, start_response)
|
||||
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ Example to run it from python:
|
|||
... engine_cs = list(searx.engines.categories.keys())
|
||||
... # load module
|
||||
... spec = importlib.util.spec_from_file_location(
|
||||
... 'utils.standalone_searx', 'utils/standalone_searx.py')
|
||||
... 'utils.standalone_searx', 'searx_extra/standalone_searx.py')
|
||||
... sas = importlib.util.module_from_spec(spec)
|
||||
... spec.loader.exec_module(sas)
|
||||
... # use function from module
|
||||
|
|
315
utils/lib.sh
315
utils/lib.sh
|
@ -86,7 +86,7 @@ set_terminal_colors() {
|
|||
_Red='\e[0;31m'
|
||||
_Green='\e[0;32m'
|
||||
_Yellow='\e[0;33m'
|
||||
_Blue='\e[0;34m'
|
||||
_Blue='\e[0;94m'
|
||||
_Violet='\e[0;35m'
|
||||
_Cyan='\e[0;36m'
|
||||
|
||||
|
@ -95,12 +95,12 @@ set_terminal_colors() {
|
|||
_BRed='\e[1;31m'
|
||||
_BGreen='\e[1;32m'
|
||||
_BYellow='\e[1;33m'
|
||||
_BBlue='\e[1;34m'
|
||||
_BBlue='\e[1;94m'
|
||||
_BPurple='\e[1;35m'
|
||||
_BCyan='\e[1;36m'
|
||||
}
|
||||
|
||||
if [ ! -p /dev/stdout ]; then
|
||||
if [ ! -p /dev/stdout ] && [ ! "$TERM" = 'dumb' ] && [ ! "$TERM" = 'unknown' ]; then
|
||||
set_terminal_colors
|
||||
fi
|
||||
|
||||
|
@ -152,6 +152,22 @@ err_msg() { echo -e "${_BRed}ERROR:${_creset} $*" >&2; }
|
|||
warn_msg() { echo -e "${_BBlue}WARN:${_creset} $*" >&2; }
|
||||
info_msg() { echo -e "${_BYellow}INFO:${_creset} $*" >&2; }
|
||||
|
||||
build_msg() {
|
||||
local tag="$1 "
|
||||
shift
|
||||
echo -e "${_Blue}${tag:0:10}${_creset}$*"
|
||||
}
|
||||
|
||||
dump_return() {
|
||||
|
||||
# Use this as last command in your function to prompt an ERROR message if
|
||||
# the exit code is not zero.
|
||||
|
||||
local err=$1
|
||||
[ "$err" -ne "0" ] && err_msg "${FUNCNAME[1]} exit with error ($err)"
|
||||
return "$err"
|
||||
}
|
||||
|
||||
clean_stdin() {
|
||||
if [[ $(uname -s) != 'Darwin' ]]; then
|
||||
while read -r -n1 -t 0.1; do : ; done
|
||||
|
@ -496,6 +512,291 @@ service_is_available() {
|
|||
return "$exit_val"
|
||||
}
|
||||
|
||||
# python
|
||||
# ------
|
||||
|
||||
PY="${PY:=3}"
|
||||
PYTHON="${PYTHON:=python$PY}"
|
||||
PY_ENV="${PY_ENV:=local/py${PY}}"
|
||||
PY_ENV_BIN="${PY_ENV}/bin"
|
||||
PY_ENV_REQ="${PY_ENV_REQ:=${REPO_ROOT}/requirements*.txt}"
|
||||
|
||||
# List of python packages (folders) or modules (files) installed by command:
|
||||
# pyenv.install
|
||||
PYOBJECTS="${PYOBJECTS:=.}"
|
||||
|
||||
# folder where the python distribution takes place
|
||||
PYDIST="${PYDIST:=dist}"
|
||||
|
||||
# folder where the intermediate build files take place
|
||||
PYBUILD="${PYBUILD:=build/py${PY}}"
|
||||
|
||||
# https://www.python.org/dev/peps/pep-0508/#extras
|
||||
#PY_SETUP_EXTRAS='[develop,test]'
|
||||
PY_SETUP_EXTRAS="${PY_SETUP_EXTRAS:=[develop,test]}"
|
||||
|
||||
PIP_BOILERPLATE=( pip wheel setuptools )
|
||||
|
||||
# shellcheck disable=SC2120
|
||||
pyenv() {
|
||||
|
||||
# usage: pyenv [vtenv_opts ...]
|
||||
#
|
||||
# vtenv_opts: see 'pip install --help'
|
||||
#
|
||||
# Builds virtualenv with 'requirements*.txt' (PY_ENV_REQ) installed. The
|
||||
# virtualenv will be reused by validating sha256sum of the requirement
|
||||
# files.
|
||||
|
||||
required_commands \
|
||||
sha256sum "${PYTHON}" \
|
||||
|| exit
|
||||
|
||||
local pip_req=()
|
||||
|
||||
if ! pyenv.OK > /dev/null; then
|
||||
rm -f "${PY_ENV}/${PY_ENV_REQ}.sha256"
|
||||
pyenv.drop > /dev/null
|
||||
build_msg PYENV "[virtualenv] installing ${PY_ENV_REQ} into ${PY_ENV}"
|
||||
|
||||
"${PYTHON}" -m venv "$@" "${PY_ENV}" || exit
|
||||
"${PY_ENV_BIN}/python" -m pip install -U "${PIP_BOILERPLATE[@]}" || exit
|
||||
|
||||
for i in ${PY_ENV_REQ}; do
|
||||
pip_req=( "${pip_req[@]}" "-r" "$i" )
|
||||
done
|
||||
|
||||
(
|
||||
[ "$VERBOSE" = "1" ] && set -x
|
||||
# shellcheck disable=SC2086
|
||||
"${PY_ENV_BIN}/python" -m pip install "${pip_req[@]}" \
|
||||
&& sha256sum ${PY_ENV_REQ} > "${PY_ENV}/requirements.sha256"
|
||||
)
|
||||
fi
|
||||
pyenv.OK
|
||||
}
|
||||
|
||||
_pyenv_OK=''
|
||||
pyenv.OK() {
|
||||
|
||||
# probes if pyenv exists and runs the script from pyenv.check
|
||||
|
||||
[ "$_pyenv_OK" == "OK" ] && return 0
|
||||
|
||||
if [ ! -f "${PY_ENV_BIN}/python" ]; then
|
||||
build_msg PYENV "[virtualenv] missing ${PY_ENV_BIN}/python"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [ ! -f "${PY_ENV}/requirements.sha256" ] \
|
||||
|| ! sha256sum --check --status <"${PY_ENV}/requirements.sha256" 2>/dev/null; then
|
||||
build_msg PYENV "[virtualenv] requirements.sha256 failed"
|
||||
sed 's/^/ [virtualenv] - /' <"${PY_ENV}/requirements.sha256"
|
||||
return 1
|
||||
fi
|
||||
|
||||
pyenv.check \
|
||||
| "${PY_ENV_BIN}/python" 2>&1 \
|
||||
| prefix_stdout "${_Blue}PYENV ${_creset}[check] "
|
||||
|
||||
local err=${PIPESTATUS[1]}
|
||||
if [ "$err" -ne "0" ]; then
|
||||
build_msg PYENV "[check] python test failed"
|
||||
return "$err"
|
||||
fi
|
||||
|
||||
build_msg PYENV "OK"
|
||||
_pyenv_OK="OK"
|
||||
return 0
|
||||
}
|
||||
|
||||
pyenv.drop() {
|
||||
|
||||
build_msg PYENV "[virtualenv] drop ${PY_ENV}"
|
||||
rm -rf "${PY_ENV}"
|
||||
_pyenv_OK=''
|
||||
|
||||
}
|
||||
|
||||
pyenv.check() {
|
||||
|
||||
# Prompts a python script with additional checks. Used by pyenv.OK to check
|
||||
# if virtualenv is ready to install python objects. This function should be
|
||||
# overwritten by the application script.
|
||||
|
||||
local imp=""
|
||||
|
||||
for i in "${PIP_BOILERPLATE[@]}"; do
|
||||
imp="$imp, $i"
|
||||
done
|
||||
|
||||
cat <<EOF
|
||||
import ${imp#,*}
|
||||
|
||||
EOF
|
||||
}
|
||||
|
||||
pyenv.install() {
|
||||
|
||||
if ! pyenv.OK; then
|
||||
py.clean > /dev/null
|
||||
fi
|
||||
if ! pyenv.install.OK > /dev/null; then
|
||||
build_msg PYENV "[install] ${PYOBJECTS}"
|
||||
if ! pyenv.OK >/dev/null; then
|
||||
pyenv
|
||||
fi
|
||||
for i in ${PYOBJECTS}; do
|
||||
build_msg PYENV "[install] pip install -e '$i${PY_SETUP_EXTRAS}'"
|
||||
"${PY_ENV_BIN}/python" -m pip install -e "$i${PY_SETUP_EXTRAS}"
|
||||
done
|
||||
fi
|
||||
pyenv.install.OK
|
||||
}
|
||||
|
||||
_pyenv_install_OK=''
|
||||
pyenv.install.OK() {
|
||||
|
||||
[ "$_pyenv_install_OK" == "OK" ] && return 0
|
||||
|
||||
local imp=""
|
||||
local err=""
|
||||
|
||||
if [ "." = "${PYOBJECTS}" ]; then
|
||||
imp="import $(basename "$(pwd)")"
|
||||
else
|
||||
# shellcheck disable=SC2086
|
||||
for i in ${PYOBJECTS}; do imp="$imp, $i"; done
|
||||
imp="import ${imp#,*} "
|
||||
fi
|
||||
(
|
||||
[ "$VERBOSE" = "1" ] && set -x
|
||||
"${PY_ENV_BIN}/python" -c "import sys; sys.path.pop(0); $imp;" 2>/dev/null
|
||||
)
|
||||
|
||||
err=$?
|
||||
if [ "$err" -ne "0" ]; then
|
||||
build_msg PYENV "[install] python installation test failed"
|
||||
return "$err"
|
||||
fi
|
||||
|
||||
build_msg PYENV "[install] OK"
|
||||
_pyenv_install_OK="OK"
|
||||
return 0
|
||||
}
|
||||
|
||||
pyenv.uninstall() {
|
||||
|
||||
build_msg PYENV "[uninstall] ${PYOBJECTS}"
|
||||
|
||||
if [ "." = "${PYOBJECTS}" ]; then
|
||||
pyenv.cmd python setup.py develop --uninstall 2>&1 \
|
||||
| prefix_stdout "${_Blue}PYENV ${_creset}[pyenv.uninstall] "
|
||||
else
|
||||
pyenv.cmd python -m pip uninstall --yes ${PYOBJECTS} 2>&1 \
|
||||
| prefix_stdout "${_Blue}PYENV ${_creset}[pyenv.uninstall] "
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
pyenv.cmd() {
|
||||
pyenv.install
|
||||
( set -e
|
||||
# shellcheck source=/dev/null
|
||||
source "${PY_ENV_BIN}/activate"
|
||||
[ "$VERBOSE" = "1" ] && set -x
|
||||
"$@"
|
||||
)
|
||||
}
|
||||
|
||||
# Sphinx doc
|
||||
# ----------
|
||||
|
||||
GH_PAGES="build/gh-pages"
|
||||
DOCS_DIST="${DOCS_DIST:=dist/docs}"
|
||||
DOCS_BUILD="${DOCS_BUILD:=build/docs}"
|
||||
|
||||
docs.html() {
|
||||
build_msg SPHINX "HTML ./docs --> file://$(readlink -e "$(pwd)/$DOCS_DIST")"
|
||||
pyenv.install
|
||||
docs.prebuild
|
||||
# shellcheck disable=SC2086
|
||||
PATH="${PY_ENV_BIN}:${PATH}" pyenv.cmd sphinx-build \
|
||||
${SPHINX_VERBOSE} ${SPHINXOPTS} \
|
||||
-b html -c ./docs -d "${DOCS_BUILD}/.doctrees" ./docs "${DOCS_DIST}"
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
docs.live() {
|
||||
build_msg SPHINX "autobuild ./docs --> file://$(readlink -e "$(pwd)/$DOCS_DIST")"
|
||||
pyenv.install
|
||||
docs.prebuild
|
||||
# shellcheck disable=SC2086
|
||||
PATH="${PY_ENV_BIN}:${PATH}" pyenv.cmd sphinx-autobuild \
|
||||
${SPHINX_VERBOSE} ${SPHINXOPTS} --open-browser --host 0.0.0.0 \
|
||||
-b html -c ./docs -d "${DOCS_BUILD}/.doctrees" ./docs "${DOCS_DIST}"
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
docs.clean() {
|
||||
build_msg CLEAN "docs -- ${DOCS_BUILD} ${DOCS_DIST}"
|
||||
# shellcheck disable=SC2115
|
||||
rm -rf "${GH_PAGES}" "${DOCS_BUILD}" "${DOCS_DIST}"
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
docs.prebuild() {
|
||||
# Dummy function to run some actions before sphinx-doc build gets started.
|
||||
# This finction needs to be overwritten by the application script.
|
||||
true
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2155
|
||||
docs.gh-pages() {
|
||||
|
||||
# The commit history in the gh-pages branch makes no sense, the history only
|
||||
# inflates the repository unnecessarily. Therefore a *new orphan* branch
|
||||
# is created each time we deploy on the gh-pages branch.
|
||||
|
||||
docs.clean
|
||||
docs.prebuild
|
||||
docs.html
|
||||
|
||||
[ "$VERBOSE" = "1" ] && set -x
|
||||
local head="$(git rev-parse HEAD)"
|
||||
local branch="$(git name-rev --name-only HEAD)"
|
||||
local remote="$(git config branch."${branch}".remote)"
|
||||
local remote_url="$(git config remote."${remote}".url)"
|
||||
|
||||
# prepare the *orphan* gh-pages working tree
|
||||
(
|
||||
git worktree remove -f "${GH_PAGES}"
|
||||
git branch -D gh-pages
|
||||
) &> /dev/null || true
|
||||
git worktree add --no-checkout "${GH_PAGES}" origin/master
|
||||
|
||||
pushd "${GH_PAGES}" &> /dev/null
|
||||
git checkout --orphan gh-pages
|
||||
git rm -rfq .
|
||||
popd &> /dev/null
|
||||
|
||||
cp -r "${DOCS_DIST}"/* "${GH_PAGES}"/
|
||||
touch "${GH_PAGES}/.nojekyll"
|
||||
cat > "${GH_PAGES}/404.html" <<EOF
|
||||
<html><head><META http-equiv='refresh' content='0;URL=index.html'></head></html>
|
||||
EOF
|
||||
|
||||
pushd "${GH_PAGES}" &> /dev/null
|
||||
git add --all .
|
||||
git commit -q -m "gh-pages build from: ${branch}@${head} (${remote_url})"
|
||||
git push -f "${remote}" gh-pages
|
||||
popd &> /dev/null
|
||||
|
||||
set +x
|
||||
build_msg GH-PAGES "deployed"
|
||||
}
|
||||
|
||||
# golang
|
||||
# ------
|
||||
|
||||
|
@ -1250,7 +1551,7 @@ pkg_install() {
|
|||
centos)
|
||||
# shellcheck disable=SC2068
|
||||
yum install -y $@
|
||||
;;
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
|
@ -1382,6 +1683,12 @@ LXC_ENV_FOLDER=
|
|||
if in_container; then
|
||||
# shellcheck disable=SC2034
|
||||
LXC_ENV_FOLDER="lxc-env/$(hostname)/"
|
||||
PY_ENV="${LXC_ENV_FOLDER}${PY_ENV}"
|
||||
PY_ENV_BIN="${LXC_ENV_FOLDER}${PY_ENV_BIN}"
|
||||
PYDIST="${LXC_ENV_FOLDER}${PYDIST}"
|
||||
PYBUILD="${LXC_ENV_FOLDER}${PYBUILD}"
|
||||
DOCS_DIST="${LXC_ENV_FOLDER}${DOCS_DIST}"
|
||||
DOCS_BUILD="${LXC_ENV_FOLDER}${DOCS_BUILD}"
|
||||
fi
|
||||
|
||||
lxc_init_container_env() {
|
||||
|
|
|
@ -142,11 +142,11 @@ main() {
|
|||
local _usage="unknown or missing $1 command $2"
|
||||
|
||||
# don't check prerequisite when in recursion
|
||||
if [[ ! $1 == __* ]]; then
|
||||
if [[ ! $1 == __* ]] && [[ ! $1 == --help ]]; then
|
||||
if ! in_container; then
|
||||
! required_commands lxc && lxd_info && exit 42
|
||||
fi
|
||||
[[ -z $LXC_SUITE ]] && err_msg "missing LXC_SUITE" && exit 42
|
||||
[[ -z $LXC_SUITE ]] && err_msg "missing LXC_SUITE" && exit 42
|
||||
fi
|
||||
|
||||
case $1 in
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
# -*- coding: utf-8; mode: makefile-gmake -*-
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
ifeq (,$(wildcard /.lxcenv.mk))
|
||||
PHONY += lxc-activate lxc-purge
|
||||
|
@ -10,60 +11,26 @@ else
|
|||
include /.lxcenv.mk
|
||||
endif
|
||||
|
||||
PHONY += make-help
|
||||
ifeq (,$(wildcard /.lxcenv.mk))
|
||||
make-help:
|
||||
else
|
||||
make-help: lxc-help
|
||||
endif
|
||||
@echo 'options:'
|
||||
@echo ' make V=0|1 [targets] 0 => quiet build (default), 1 => verbose build'
|
||||
@echo ' make V=2 [targets] 2 => give reason for rebuild of target'
|
||||
|
||||
quiet_cmd_common_clean = CLEAN $@
|
||||
cmd_common_clean = \
|
||||
find . -name '*.orig' -exec rm -f {} + ;\
|
||||
find . -name '*.rej' -exec rm -f {} + ;\
|
||||
find . -name '*~' -exec rm -f {} + ;\
|
||||
find . -name '*.bak' -exec rm -f {} + ;\
|
||||
|
||||
FMT = cat
|
||||
ifeq ($(shell which fmt >/dev/null 2>&1; echo $$?), 0)
|
||||
FMT = fmt
|
||||
endif
|
||||
|
||||
# MS-Windows
|
||||
#
|
||||
# For a minimal *make-environment*, I'am using the gnu-tools from:
|
||||
#
|
||||
# - GNU MCU Eclipse Windows Build Tools, which brings 'make', 'rm' etc.
|
||||
# https://github.com/gnu-mcu-eclipse/windows-build-tools/releases
|
||||
#
|
||||
# - git for Windows, which brings 'find', 'grep' etc.
|
||||
# https://git-scm.com/download/win
|
||||
|
||||
|
||||
# normpath
|
||||
#
|
||||
# System-dependent normalization of the path name
|
||||
#
|
||||
# usage: $(call normpath,/path/to/file)
|
||||
|
||||
normpath = $1
|
||||
ifeq ($(OS),Windows_NT)
|
||||
normpath = $(subst /,\,$1)
|
||||
endif
|
||||
|
||||
|
||||
# stolen from linux/Makefile
|
||||
#
|
||||
|
||||
ifeq ("$(origin V)", "command line")
|
||||
KBUILD_VERBOSE = $(V)
|
||||
VERBOSE = $(V)
|
||||
endif
|
||||
ifndef KBUILD_VERBOSE
|
||||
KBUILD_VERBOSE = 0
|
||||
ifndef VERBOSE
|
||||
VERBOSE = 0
|
||||
endif
|
||||
|
||||
ifeq ($(KBUILD_VERBOSE),1)
|
||||
export VERBOSE
|
||||
|
||||
ifeq ($(VERBOSE),1)
|
||||
quiet =
|
||||
Q =
|
||||
else
|
||||
|
@ -75,14 +42,8 @@ endif
|
|||
#
|
||||
|
||||
# Convenient variables
|
||||
comma := ,
|
||||
quote := "
|
||||
#" this comment is only for emacs highlighting
|
||||
squote := '
|
||||
#' this comment is only for emacs highlighting
|
||||
empty :=
|
||||
space := $(empty) $(empty)
|
||||
space_escape := _-_SPACE_-_
|
||||
|
||||
# Find any prerequisites that is newer than target or that does not exist.
|
||||
# PHONY targets skipped in both cases.
|
||||
|
@ -107,7 +68,7 @@ any-prereq = $(filter-out $(PHONY),$?) $(filter-out $(PHONY) $(wildcard $^),$^)
|
|||
# (5) No dir/.target.cmd file (used to store command line)
|
||||
# (6) No dir/.target.cmd file and target not listed in $(targets)
|
||||
# This is a good hint that there is a bug in the kbuild file
|
||||
ifeq ($(KBUILD_VERBOSE),2)
|
||||
ifeq ($(VERBOSE),2)
|
||||
why = \
|
||||
$(if $(filter $@, $(PHONY)),- due to target is PHONY, \
|
||||
$(if $(wildcard $@), \
|
||||
|
|
|
@ -1,269 +0,0 @@
|
|||
# -*- coding: utf-8; mode: makefile-gmake -*-
|
||||
|
||||
# list of python packages (folders) or modules (files) of this build
|
||||
PYOBJECTS ?=
|
||||
|
||||
SITE_PYTHON ?=$(dir $(abspath $(lastword $(MAKEFILE_LIST))))site-python
|
||||
export PYTHONPATH := $(SITE_PYTHON):$$PYTHONPATH
|
||||
export PY_ENV PYDIST PYBUILD
|
||||
|
||||
# folder where the python distribution takes place
|
||||
PYDIST = ./$(LXC_ENV_FOLDER)dist
|
||||
# folder where the python intermediate build files take place
|
||||
PYBUILD = ./$(LXC_ENV_FOLDER)build
|
||||
# python version to use
|
||||
PY ?=3
|
||||
# $(PYTHON) points to the python interpreter from the OS! The python from the
|
||||
# OS is needed e.g. to create a virtualenv. For tasks inside the virtualenv the
|
||||
# interpeter from '$(PY_ENV_BIN)/python' is used.
|
||||
PYTHON ?= python$(PY)
|
||||
PIP ?= pip$(PY)
|
||||
PIP_INST ?= --user
|
||||
|
||||
# https://www.python.org/dev/peps/pep-0508/#extras
|
||||
#PY_SETUP_EXTRAS ?= \[develop,test\]
|
||||
PY_SETUP_EXTRAS ?=
|
||||
|
||||
PYDEBUG ?= --pdb
|
||||
PYLINT_RC ?= .pylintrc
|
||||
|
||||
TEST_FOLDER ?= ./tests
|
||||
TEST ?= .
|
||||
|
||||
PY_ENV = ./$(LXC_ENV_FOLDER)local/py$(PY)
|
||||
PY_ENV_BIN = $(PY_ENV)/bin
|
||||
PY_ENV_ACT = . $(PY_ENV_BIN)/activate
|
||||
|
||||
ifeq ($(OS),Windows_NT)
|
||||
PYTHON = python
|
||||
PY_ENV_BIN = $(PY_ENV)/Scripts
|
||||
PY_ENV_ACT = $(PY_ENV_BIN)/activate
|
||||
endif
|
||||
|
||||
VTENV_OPTS ?=
|
||||
|
||||
python-help::
|
||||
@echo 'makefile.python:'
|
||||
@echo ' pyenv | pyenv[un]install'
|
||||
@echo ' build $(PY_ENV) & [un]install python objects'
|
||||
@echo ' targts using pyenv $(PY_ENV):'
|
||||
@echo ' pylint - run pylint *linting*'
|
||||
@echo ' pytest - run *tox* test on python objects'
|
||||
@echo ' pydebug - run tests within a PDB debug session'
|
||||
@echo ' pybuild - build python packages ($(PYDIST) $(PYBUILD))'
|
||||
@echo ' pyclean - clean intermediate python objects'
|
||||
@echo ' targets using system users environment:'
|
||||
@echo ' py[un]install - [un]install python objects in editable mode'
|
||||
@echo ' upload-pypi - upload $(PYDIST)/* files to PyPi'
|
||||
@echo 'options:'
|
||||
@echo ' make PY=3.7 [targets] => to eval targets with python 3.7 ($(PY))'
|
||||
@echo ' make PIP_INST= => to set/unset pip install options ($(PIP_INST))'
|
||||
@echo ' make TEST=. => choose test from $(TEST_FOLDER) (default "." runs all)'
|
||||
@echo ' make DEBUG= => target "debug": do not invoke PDB on errors'
|
||||
@echo ' make PY_SETUP_EXTRAS => also install extras_require from setup.py \[develop,test\]'
|
||||
@echo ' when using target "pydebug", set breakpoints within py-source by adding::'
|
||||
@echo ' DEBUG()'
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# OS requirements
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
PHONY += msg-python-exe python-exe
|
||||
msg-python-exe:
|
||||
@echo "\n $(PYTHON) is required\n\n\
|
||||
Make sure you have $(PYTHON) installed, grab it from\n\
|
||||
https://www.python.org or install it from your package\n\
|
||||
manager. On debian based OS these requirements are\n\
|
||||
installed by::\n\n\
|
||||
sudo -H add-apt-repository ppa:deadsnakes/ppa\n\
|
||||
sudo -H apt update\n\
|
||||
sudo -H apt-get install $(PYTHON) $(PYTHON)-venv\n"
|
||||
|
||||
ifeq ($(shell which $(PYTHON) >/dev/null 2>&1; echo $$?), 1)
|
||||
python-exe: msg-python-exe
|
||||
$(error The '$(PYTHON)' command was not found)
|
||||
else
|
||||
python-exe:
|
||||
@:
|
||||
endif
|
||||
|
||||
msg-pip-exe:
|
||||
@echo "\n $(PIP) is required\n\n\
|
||||
Make sure you have updated pip installed, grab it from\n\
|
||||
https://pip.pypa.io or install it from your package\n\
|
||||
manager. On debian based OS these requirements are\n\
|
||||
installed by::\n\n\
|
||||
sudo -H apt-get install python$(PY)-pip\n" | $(FMT)
|
||||
|
||||
ifeq ($(shell which $(PIP) >/dev/null 2>&1; echo $$?), 1)
|
||||
pip-exe: msg-pip-exe
|
||||
$(error The '$(PIP)' command was not found)
|
||||
else
|
||||
pip-exe:
|
||||
@:
|
||||
endif
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# commands
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
# $2 path to folder with setup.py, this uses pip from the OS
|
||||
quiet_cmd_pyinstall = INSTALL $2
|
||||
cmd_pyinstall = $(PIP) $(PIP_VERBOSE) install $(PIP_INST) -e $2$(PY_SETUP_EXTRAS)
|
||||
|
||||
# $2 path to folder with setup.py, this uses pip from pyenv (not OS!)
|
||||
quiet_cmd_pyenvinstall = PYENV install $2
|
||||
cmd_pyenvinstall = \
|
||||
if ! cat $(PY_ENV)/requirements.sha256 2>/dev/null | sha256sum --check --status 2>/dev/null; then \
|
||||
rm -f $(PY_ENV)/requirements.sha256; \
|
||||
$(PY_ENV_BIN)/python -m pip $(PIP_VERBOSE) install -e $2$(PY_SETUP_EXTRAS) &&\
|
||||
sha256sum requirements*.txt > $(PY_ENV)/requirements.sha256 ;\
|
||||
else \
|
||||
echo "PYENV $2 already installed"; \
|
||||
fi
|
||||
|
||||
# Uninstall the package. Since pip does not uninstall the no longer needed
|
||||
# depencies (something like autoremove) the depencies remain.
|
||||
|
||||
# $2 package name to uninstall, this uses pip from the OS.
|
||||
quiet_cmd_pyuninstall = UNINSTALL $2
|
||||
cmd_pyuninstall = $(PIP) $(PIP_VERBOSE) uninstall --yes $2
|
||||
|
||||
# $2 path to folder with setup.py, this uses pip from pyenv (not OS!)
|
||||
quiet_cmd_pyenvuninstall = PYENV uninstall $2
|
||||
cmd_pyenvuninstall = $(PY_ENV_BIN)/python -m pip $(PIP_VERBOSE) uninstall --yes $2
|
||||
|
||||
# $2 path to folder where virtualenv take place
|
||||
quiet_cmd_virtualenv = PYENV usage: $ source ./$@/bin/activate
|
||||
cmd_virtualenv = \
|
||||
if [ -d "./$(PY_ENV)" -a -x "./$(PY_ENV_BIN)/python" ]; then \
|
||||
echo "PYENV using virtualenv from $2"; \
|
||||
else \
|
||||
$(PYTHON) -m venv $(VTENV_OPTS) $2; \
|
||||
$(PY_ENV_BIN)/python -m pip install $(PIP_VERBOSE) -U pip wheel setuptools; \
|
||||
$(PY_ENV_BIN)/python -m pip install $(PIP_VERBOSE) -r requirements.txt; \
|
||||
fi
|
||||
|
||||
# $2 path to lint
|
||||
quiet_cmd_pylint = LINT $@
|
||||
cmd_pylint = $(PY_ENV_BIN)/python -m pylint -j 0 --rcfile $(PYLINT_RC) $2
|
||||
|
||||
quiet_cmd_pytest = TEST $@
|
||||
cmd_pytest = $(PY_ENV_BIN)/python -m tox -vv
|
||||
|
||||
# setuptools, pip, easy_install its a mess full of cracks, a documentation hell
|
||||
# and broken by design ... all sucks, I really, really hate all this ... aaargh!
|
||||
#
|
||||
# About python packaging see `Python Packaging Authority`_. Most of the names
|
||||
# here are mapped to ``setup(<name1>=..., <name2>=...)`` arguments in
|
||||
# ``setup.py``. See `Packaging and distributing projects`_ about ``setup(...)``
|
||||
# arguments. If this is all new for you, start with `PyPI Quick and Dirty`_.
|
||||
#
|
||||
# Further read:
|
||||
#
|
||||
# - pythonwheels_
|
||||
# - setuptools_
|
||||
# - packaging_
|
||||
# - sdist_
|
||||
# - installing_
|
||||
#
|
||||
# .. _`Python Packaging Authority`: https://www.pypa.io
|
||||
# .. _`Packaging and distributing projects`: https://packaging.python.org/guides/distributing-packages-using-setuptools/
|
||||
# .. _`PyPI Quick and Dirty`: https://hynek.me/articles/sharing-your-labor-of-love-pypi-quick-and-dirty/
|
||||
# .. _pythonwheels: https://pythonwheels.com/
|
||||
# .. _setuptools: https://setuptools.readthedocs.io/en/latest/setuptools.html
|
||||
# .. _packaging: https://packaging.python.org/guides/distributing-packages-using-setuptools/#packaging-and-distributing-projects
|
||||
# .. _sdist: https://packaging.python.org/guides/distributing-packages-using-setuptools/#source-distributions
|
||||
# .. _bdist_wheel: https://packaging.python.org/guides/distributing-packages-using-setuptools/#pure-python-wheels
|
||||
# .. _installing: https://packaging.python.org/tutorials/installing-packages/
|
||||
#
|
||||
quiet_cmd_pybuild = BUILD $@
|
||||
cmd_pybuild = $(PY_ENV_BIN)/python setup.py \
|
||||
sdist -d $(PYDIST) \
|
||||
bdist_wheel --bdist-dir $(PYBUILD) -d $(PYDIST)
|
||||
|
||||
quiet_cmd_pyclean = CLEAN $@
|
||||
# remove 'build' folder since bdist_wheel does not care the --bdist-dir
|
||||
cmd_pyclean = \
|
||||
rm -rf $(PYDIST) $(PYBUILD) $(PY_ENV) ./.tox *.egg-info ;\
|
||||
find . -name '*.pyc' -exec rm -f {} + ;\
|
||||
find . -name '*.pyo' -exec rm -f {} + ;\
|
||||
find . -name __pycache__ -exec rm -rf {} +
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# targets
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
# for installation use the pip from the OS!
|
||||
PHONY += pyinstall
|
||||
pyinstall: pip-exe
|
||||
$(call cmd,pyinstall,.)
|
||||
|
||||
PHONY += pyuninstall
|
||||
pyuninstall: pip-exe
|
||||
$(call cmd,pyuninstall,$(PYOBJECTS))
|
||||
|
||||
# for installation use the pip from PY_ENV (not the OS)!
|
||||
PHONY += pyenvinstall
|
||||
pyenvinstall: $(PY_ENV)
|
||||
$(call cmd,pyenvinstall,.)
|
||||
|
||||
PHONY += pyenvuninstall
|
||||
pyenvuninstall: $(PY_ENV)
|
||||
$(call cmd,pyenvuninstall,$(PYOBJECTS))
|
||||
|
||||
PHONY += pyclean
|
||||
pyclean:
|
||||
$(call cmd,pyclean)
|
||||
|
||||
# to build *local* environment, python from the OS is needed!
|
||||
pyenv: $(PY_ENV)
|
||||
$(PY_ENV): python-exe
|
||||
$(call cmd,virtualenv,$(PY_ENV))
|
||||
|
||||
PHONY += pylint-exe
|
||||
pylint-exe: $(PY_ENV)
|
||||
@$(PY_ENV_BIN)/python -m pip $(PIP_VERBOSE) install pylint
|
||||
|
||||
PHONY += pylint
|
||||
pylint: pylint-exe
|
||||
$(call cmd,pylint,$(PYOBJECTS))
|
||||
|
||||
PHONY += pybuild
|
||||
pybuild: $(PY_ENV)
|
||||
$(call cmd,pybuild)
|
||||
|
||||
PHONY += pytest
|
||||
pytest: $(PY_ENV)
|
||||
$(call cmd,pytest)
|
||||
|
||||
PHONY += pydebug
|
||||
# set breakpoint with:
|
||||
# DEBUG()
|
||||
# e.g. to run tests in debug mode in emacs use:
|
||||
# 'M-x pdb' ... 'make pydebug'
|
||||
pydebug: $(PY_ENV)
|
||||
DEBUG=$(DEBUG) $(PY_ENV_BIN)/pytest $(DEBUG) -v $(TEST_FOLDER)/$(TEST)
|
||||
|
||||
# runs python interpreter from ./local/py<N>/bin/python
|
||||
pyenv-python: pyenvinstall
|
||||
$(PY_ENV_BIN)/python -i
|
||||
|
||||
# With 'dependency_links=' setuptools supports dependencies on packages hosted
|
||||
# on other reposetories then PyPi, see "Packages Not On PyPI" [1]. The big
|
||||
# drawback is, due to security reasons (I don't know where the security gate on
|
||||
# PyPi is), this feature is not supported by pip [2]. Thats why an upload to
|
||||
# PyPi is required and since uploads via setuptools is not recommended, we have
|
||||
# to imstall / use twine ... its really a mess.
|
||||
#
|
||||
# [1] https://python-packaging.readthedocs.io/en/latest/dependencies.html#packages-not-on-pypi
|
||||
# [2] https://github.com/pypa/pip/pull/1519
|
||||
|
||||
# https://github.com/pypa/twine
|
||||
PHONY += upload-pypi upload-pypi-test
|
||||
upload-pypi: pyclean pyenvinstall pybuild
|
||||
@$(PY_ENV_BIN)/twine upload $(PYDIST)/*
|
||||
|
||||
upload-pypi-test: pyclean pyenvinstall pybuild
|
||||
@$(PY_ENV_BIN)/twine upload -r testpypi $(PYDIST)/*
|
||||
.PHONY: $(PHONY)
|
|
@ -1,199 +0,0 @@
|
|||
# -*- coding: utf-8; mode: makefile-gmake -*-
|
||||
|
||||
export DOCS_FOLDER DOCS_BUILD DOCS_DIST BOOKS_FOLDER BOOKS_DIST
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXBUILD ?= $(PY_ENV_BIN)/sphinx-build
|
||||
SPHINX_CONF ?= conf.py
|
||||
|
||||
DOCS_FOLDER = ./docs
|
||||
DOCS_BUILD = ./$(LXC_ENV_FOLDER)build/docs
|
||||
DOCS_DIST = ./$(LXC_ENV_FOLDER)dist/docs
|
||||
GH_PAGES ?= build/gh-pages
|
||||
|
||||
BOOKS_FOLDER = ./docs
|
||||
BOOKS_DIST = ./$(LXC_ENV_FOLDER)dist/books
|
||||
|
||||
ifeq ($(KBUILD_VERBOSE),1)
|
||||
SPHINX_VERBOSE = "-v"
|
||||
else
|
||||
SPHINX_VERBOSE =
|
||||
endif
|
||||
|
||||
|
||||
docs-help:
|
||||
@echo 'makefile.sphinx:'
|
||||
@echo ' docs-clean - clean intermediate doc objects'
|
||||
@echo ' $(GH_PAGES) - create & upload github pages'
|
||||
@echo ' sphinx-pdf - run sphinx latex & pdf targets'
|
||||
@echo ''
|
||||
@echo ' books/{name}.html : build only the HTML of document {name}'
|
||||
@echo ' valid values for books/{name}.html are:'
|
||||
@echo ' $(BOOKS_HTML)' | $(FMT)
|
||||
@echo ' books/{name}.pdf : build only the PDF of document {name}'
|
||||
@echo ' valid values for books/{name}.pdf are:'
|
||||
@echo ' $(BOOKS_PDF) ' | $(FMT)
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# requirements
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
PHONY += msg-texlive texlive
|
||||
|
||||
ifeq ($(shell which xelatex >/dev/null 2>&1; echo $$?), 1)
|
||||
texlive: msg-TeXLive
|
||||
$(error The 'xelatex' command was not found)
|
||||
else
|
||||
texlive:
|
||||
@:
|
||||
endif
|
||||
|
||||
msg-texlive:
|
||||
$(Q)echo "\n\
|
||||
The TeX/PDF output and the *math* extension require TexLive and latexmk:\n\n\
|
||||
Make sure you have a updated TeXLive with XeTeX engine installed, grab it\n\
|
||||
it from https://www.tug.org/texlive or install it from your package manager.\n\n\
|
||||
Install latexmk from your package manager or visit https://ctan.org/pkg/latexmk\n\n\
|
||||
Sphinx-doc produce (Xe)LaTeX files which might use additional TeX-packages\n\
|
||||
and fonts. To process these LaTeX files, a TexLive installation with the\n\
|
||||
additional packages is required. On debian based OS these requirements\n\
|
||||
are installed by::\n\n\
|
||||
sudo -H apt-get install\n\
|
||||
latexmk\n\
|
||||
texlive-base texlive-xetex texlive-latex-recommended\n\
|
||||
texlive-extra-utils dvipng ttf-dejavu\n"
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# commands
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
# $2 sphinx builder e.g. "html"
|
||||
# $3 path where configuration file (conf.py) is located
|
||||
# $4 sourcedir
|
||||
# $5 dest subfolder e.g. "man" for man pages at $(DOCS_DIST)/man
|
||||
|
||||
quiet_cmd_sphinx = SPHINX $@ --> file://$(abspath $(DOCS_DIST)/$5)
|
||||
cmd_sphinx = SPHINX_CONF=$(abspath $4/$(SPHINX_CONF))\
|
||||
$(SPHINXBUILD) $(SPHINX_VERBOSE) $(SPHINXOPTS)\
|
||||
-b $2 -c $3 -d $(DOCS_BUILD)/.doctrees $4 $(DOCS_DIST)/$5
|
||||
|
||||
quiet_cmd_sphinx_autobuild = SPHINX $@ --> file://$(abspath $(DOCS_DIST)/$5)
|
||||
cmd_sphinx_autobuild = PATH="$(PY_ENV_BIN):$(PATH)" $(PY_ENV_BIN)/sphinx-autobuild $(SPHINX_VERBOSE) --open-browser --host 0.0.0.0 $(SPHINXOPTS)\
|
||||
-b $2 -c $3 -d $(DOCS_BUILD)/.doctrees $4 $(DOCS_DIST)/$5
|
||||
|
||||
quiet_cmd_sphinx_clean = CLEAN $@
|
||||
cmd_sphinx_clean = rm -rf $(DOCS_BUILD) $(DOCS_DIST) $(GH_PAGES)/* $(GH_PAGES)/.buildinfo
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# targets
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
# build PDF of whole documentation in: $(DOCS_DIST)/pdf
|
||||
|
||||
PHONY += sphinx-pdf
|
||||
sphinx-pdf: sphinx-latex
|
||||
$(Q)cd $(DOCS_BUILD)/latex/; make all-pdf
|
||||
$(Q)mkdir -p $(DOCS_DIST)/pdf
|
||||
$(Q)cp $(DOCS_BUILD)/latex/*.pdf $(DOCS_DIST)/pdf
|
||||
@echo "SPHINX *.pdf --> file://$(abspath $(DOCS_DIST)/pdf)"
|
||||
|
||||
PHONY += sphinx-latex
|
||||
sphinx-latex: pyenvinstall texlive
|
||||
$(SPHINXBUILD) $(SPHINX_VERBOSE) $(SPHINXOPTS)\
|
||||
-b latex \
|
||||
-c $(DOCS_FOLDER) \
|
||||
-d $(DOCS_BUILD)/.doctrees \
|
||||
$(DOCS_FOLDER) \
|
||||
$(DOCS_BUILD)/latex
|
||||
|
||||
# Sphinx projects, we call them *books* (what is more common). Books are
|
||||
# folders under $(BOOKS_FOLDER) containing a conf.py file. The HTML output goes
|
||||
# to folder $(BOOKS_DIST)/<name> while PDF is placed (BOOKS_DIST)/<name>/pdf
|
||||
|
||||
BOOKS=$(patsubst $(BOOKS_FOLDER)/%/conf.py,books/%,$(wildcard $(BOOKS_FOLDER)/*/conf.py))
|
||||
|
||||
# fine grained targets
|
||||
BOOKS_HTML = $(patsubst %,%.html,$(BOOKS))
|
||||
BOOKS_CLEAN = $(patsubst %,%.clean,$(BOOKS))
|
||||
BOOKS_LATEX = $(patsubst %,%.latex,$(BOOKS))
|
||||
BOOKS_PDF = $(patsubst %,%.pdf,$(BOOKS))
|
||||
BOOKS_LIVE = $(patsubst %,%.live,$(BOOKS))
|
||||
|
||||
$(BOOKS_DIST):
|
||||
mkdir -p $(BOOKS_DIST)
|
||||
|
||||
PHONY += $(BOOKS_HTML)
|
||||
$(BOOKS_HTML): pyenvinstall | $(BOOKS_DIST)
|
||||
SPHINX_CONF=$(patsubst books/%.html,%,$@)/conf.py \
|
||||
$(SPHINXBUILD) $(SPHINX_VERBOSE) $(SPHINXOPTS)\
|
||||
-b html \
|
||||
-c $(DOCS_FOLDER) \
|
||||
-d $(DOCS_BUILD)/books/$(patsubst books/%.html,%,$@)/.doctrees \
|
||||
$(BOOKS_FOLDER)/$(patsubst books/%.html,%,$@) \
|
||||
$(BOOKS_DIST)/$(patsubst books/%.html,%,$@)
|
||||
@echo "SPHINX $@ --> file://$(abspath $(BOOKS_DIST)/$(patsubst books/%.html,%,$@))"
|
||||
|
||||
PHONY += $(BOOKS_HTML)
|
||||
$(BOOKS_LIVE): pyenvinstall | $(BOOKS_DIST)
|
||||
PATH="$(PY_ENV_BIN):$(PATH)" \
|
||||
SPHINX_CONF=$(patsubst books/%.live,%,$@)/conf.py \
|
||||
$(PY_ENV_BIN)/sphinx-autobuild --poll -B --host 0.0.0.0 --port 8080 $(SPHINX_VERBOSE) $(SPHINXOPTS)\
|
||||
-b html \
|
||||
-c $(DOCS_FOLDER) \
|
||||
-d $(DOCS_BUILD)/books/$(patsubst books/%.live,%,$@)/.doctrees \
|
||||
$(BOOKS_FOLDER)/$(patsubst books/%.live,%,$@) \
|
||||
$(BOOKS_DIST)/$(patsubst books/%.live,%,$@)
|
||||
|
||||
$(BOOKS_PDF): %.pdf : %.latex
|
||||
$(Q)cd $(DOCS_BUILD)/latex/$(patsubst books/%.pdf,%,$@); make all-pdf
|
||||
$(Q)mkdir -p $(BOOKS_DIST)/$(patsubst books/%.pdf,%,$@)/pdf
|
||||
$(Q)cp -v $(DOCS_BUILD)/latex/$(patsubst books/%.pdf,%,$@)/*.pdf $(BOOKS_DIST)/$(patsubst books/%.pdf,%,$@)/pdf
|
||||
@echo "SPHINX $@ --> file://$(abspath $(BOOKS_DIST)/$(patsubst books/%.pdf,%,$@))/pdf"
|
||||
|
||||
PHONY += $(BOOKS_LATEX)
|
||||
$(BOOKS_LATEX): pyenvinstall | $(BOOKS_DIST)
|
||||
SPHINX_CONF=$(patsubst books/%.latex,%,$@)/conf.py \
|
||||
$(SPHINXBUILD) $(SPHINX_VERBOSE) $(SPHINXOPTS)\
|
||||
-b latex \
|
||||
-c $(DOCS_FOLDER) \
|
||||
-d $(DOCS_BUILD)/books/$(patsubst books/%.latex,%,$@)/.doctrees \
|
||||
$(BOOKS_FOLDER)/$(patsubst books/%.latex,%,$@) \
|
||||
$(DOCS_BUILD)/latex/$(patsubst books/%.latex,%,$@)
|
||||
@echo "SPHINX $@ --> file://$(abspath $(DOCS_BUILD)/latex/$(patsubst books/%.latex,%,$@))"
|
||||
|
||||
$(BOOKS_CLEAN):
|
||||
$(Q)rm -rf $(BOOKS_DIST)/$(patsubst books/%.clean,%,$@) \
|
||||
$(DOCS_BUILD)/books/$(patsubst books/%.clean,%,$@) \
|
||||
$(DOCS_BUILD)/latex/$(patsubst books/%.clean,%,$@)
|
||||
|
||||
# github pages
|
||||
PHONY += prepare-gh-pages
|
||||
prepare-gh-pages:
|
||||
cp -r $(DOCS_DIST)/* $(GH_PAGES)/
|
||||
touch $(GH_PAGES)/.nojekyll
|
||||
echo "<html><head><META http-equiv='refresh' content='0;URL=index.html'></head></html>" > $(GH_PAGES)/404.html
|
||||
|
||||
PHONY += gh-pages
|
||||
gh-pages: docs-clean docs
|
||||
- git worktree remove -f $(GH_PAGES) || exit 0
|
||||
- git branch -D gh-pages || exit 0
|
||||
git worktree add --no-checkout $(GH_PAGES) master
|
||||
cd $(GH_PAGES); git checkout --orphan gh-pages && git rm -rfq .
|
||||
$(MAKE) prepare-gh-pages
|
||||
cd $(GH_PAGES);\
|
||||
git add --all . ;\
|
||||
git commit -q -m "make gh-pages: from $(shell git config --get remote.origin.url)@$(shell git rev-parse HEAD)" ;\
|
||||
git push -f origin gh-pages
|
||||
|
||||
PHONY += ci-gh-pages
|
||||
ci-gh-pages: docs-clean docs
|
||||
rm -Rf $(GH_PAGES)
|
||||
mkdir -p $(GH_PAGES)
|
||||
$(MAKE) prepare-gh-pages
|
||||
|
||||
PHONY += docs-clean
|
||||
docs-clean: $(BOOKS_CLEAN)
|
||||
$(call cmd,sphinx_clean)
|
||||
|
||||
.PHONY: $(PHONY)
|
|
@ -35,7 +35,7 @@ SERVICE_GROUP="${SERVICE_USER}"
|
|||
GIT_BRANCH="${GIT_BRANCH:-master}"
|
||||
SEARX_PYENV="${SERVICE_HOME}/searx-pyenv"
|
||||
SEARX_SRC="${SERVICE_HOME}/searx-src"
|
||||
SEARX_SETTINGS_PATH="/etc/searx/settings.yml"
|
||||
SEARX_SETTINGS_PATH="${SEARX_SETTINGS_PATH:-/etc/searx/settings.yml}"
|
||||
SEARX_SETTINGS_TEMPLATE="${SEARX_SETTINGS_TEMPLATE:-${REPO_ROOT}/utils/templates/etc/searx/use_default_settings.yml}"
|
||||
SEARX_UWSGI_APP="searx.ini"
|
||||
# shellcheck disable=SC2034
|
||||
|
@ -481,7 +481,7 @@ pyenv_is_available() {
|
|||
create_pyenv() {
|
||||
rst_title "Create virtualenv (python)" section
|
||||
echo
|
||||
if [[ ! -f "${SEARX_SRC}/manage.sh" ]]; then
|
||||
if [[ ! -f "${SEARX_SRC}/manage" ]]; then
|
||||
err_msg "to create pyenv for searx, searx has to be cloned first"
|
||||
return 42
|
||||
fi
|
||||
|
|
|
@ -1,48 +0,0 @@
|
|||
# -*- coding: utf-8; mode: python -*-
|
||||
"""Implement some sphinx-build tools.
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from sphinx.util.pycompat import execfile_
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
def load_sphinx_config(namespace):
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
u"""Load an additional configuration file into *namespace*.
|
||||
|
||||
The name of the configuration file is taken from the environment
|
||||
``SPHINX_CONF``. The external configuration file extends (or overwrites) the
|
||||
configuration values from the origin ``conf.py``. With this you are able to
|
||||
maintain *build themes*. To your docs/conf.py add::
|
||||
|
||||
from sphinx_build_tools import load_sphinx_config
|
||||
...
|
||||
|
||||
# Since loadConfig overwrites settings from the global namespace, it has to be
|
||||
# the last statement in the conf.py file
|
||||
|
||||
load_sphinx_config(globals())
|
||||
|
||||
"""
|
||||
|
||||
config_file = os.environ.get("SPHINX_CONF", None)
|
||||
if (config_file is not None
|
||||
and os.path.normpath(namespace["__file__"]) != os.path.normpath(config_file) ):
|
||||
config_file = os.path.abspath(config_file)
|
||||
|
||||
if os.path.isfile(config_file):
|
||||
sys.stdout.write(
|
||||
"load additional sphinx-config: %s\n"
|
||||
% config_file)
|
||||
config = namespace.copy()
|
||||
config['__file__'] = config_file
|
||||
execfile_(config_file, config)
|
||||
del config['__file__']
|
||||
namespace.update(config)
|
||||
else:
|
||||
sys.stderr.write(
|
||||
"WARNING: additional sphinx-config not found: %s\n"
|
||||
% config_file)
|
Loading…
Reference in New Issue