diff --git a/AUTHORS.rst b/AUTHORS.rst index c5047438..974fbeb1 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -42,3 +42,4 @@ generally made searx better: - Noemi Vanyi - Kang-min Liu - Kirill Isakov +- Guilhem Bonnefille diff --git a/searx/engines/doku.py b/searx/engines/doku.py new file mode 100644 index 00000000..93867fd0 --- /dev/null +++ b/searx/engines/doku.py @@ -0,0 +1,84 @@ +# Doku Wiki +# +# @website https://www.dokuwiki.org/ +# @provide-api yes +# (https://www.dokuwiki.org/devel:xmlrpc) +# +# @using-api no +# @results HTML +# @stable yes +# @parse (general) url, title, content + +from urllib import urlencode +from lxml.html import fromstring +from searx.engines.xpath import extract_text + +# engine dependent config +categories = ['general'] # TODO , 'images', 'music', 'videos', 'files' +paging = False +language_support = False +number_of_results = 5 + +# search-url +# Doku is OpenSearch compatible +base_url = 'http://localhost:8090' +search_url = '/?do=search'\ + '&{query}' +# TODO '&startRecord={offset}'\ +# TODO '&maximumRecords={limit}'\ + + +# do search-request +def request(query, params): + + params['url'] = base_url +\ + search_url.format(query=urlencode({'id': query})) + + return params + + +# get response from search-request +def response(resp): + results = [] + + doc = fromstring(resp.text) + + # parse results + # Quickhits + for r in doc.xpath('//div[@class="search_quickresult"]/ul/li'): + try: + res_url = r.xpath('.//a[@class="wikilink1"]/@href')[-1] + except: + continue + + if not res_url: + continue + + title = extract_text(r.xpath('.//a[@class="wikilink1"]/@title')) + + # append result + results.append({'title': title, + 'content': "", + 'url': base_url + res_url}) + + # Search results + for r in doc.xpath('//dl[@class="search_results"]/*'): + try: + if r.tag == "dt": + res_url = r.xpath('.//a[@class="wikilink1"]/@href')[-1] + title = extract_text(r.xpath('.//a[@class="wikilink1"]/@title')) + elif r.tag == "dd": + content = extract_text(r.xpath('.')) + + # append result + results.append({'title': title, + 'content': content, + 'url': base_url + res_url}) + except: + continue + + if not res_url: + continue + + # return results + return results diff --git a/searx/settings.yml b/searx/settings.yml index 462a0bcc..40f569e9 100644 --- a/searx/settings.yml +++ b/searx/settings.yml @@ -337,6 +337,13 @@ engines: # number_of_results : 5 # timeout : 3.0 +# Doku engine lets you access to any Doku wiki instance: +# A public one or a privete/corporate one. +# - name : ubuntuwiki +# engine : doku +# shortcut : uw +# base_url : 'http://doc.ubuntu-fr.org' + locales: en : English bg : Български (Bulgarian) diff --git a/tests/unit/engines/test_doku.py b/tests/unit/engines/test_doku.py new file mode 100644 index 00000000..22ddb7a7 --- /dev/null +++ b/tests/unit/engines/test_doku.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +from collections import defaultdict +import mock +from searx.engines import doku +from searx.testing import SearxTestCase + + +class TestDokuEngine(SearxTestCase): + + def test_request(self): + query = 'test_query' + dicto = defaultdict(dict) + params = doku.request(query, dicto) + self.assertIn('url', params) + self.assertIn(query, params['url']) + + def test_response(self): + self.assertRaises(AttributeError, doku.response, None) + self.assertRaises(AttributeError, doku.response, []) + self.assertRaises(AttributeError, doku.response, '') + self.assertRaises(AttributeError, doku.response, '[]') + + response = mock.Mock(text='') + self.assertEqual(doku.response(response), []) + + html = u""" +