searx/searx/engines/stackoverflow.py

28 lines
829 B
Python
Raw Normal View History

2013-10-17 00:27:25 +02:00
from lxml import html
from urlparse import urljoin
2013-10-17 20:43:05 +02:00
from cgi import escape
2013-10-20 21:31:16 +02:00
from urllib import urlencode
2013-10-17 00:27:25 +02:00
2013-10-17 21:07:09 +02:00
categories = ['it']
2013-10-23 23:55:37 +02:00
url = 'http://stackoverflow.com/'
search_url = url+'search?'
2014-01-20 02:31:20 +01:00
result_xpath = './/div[@class="excerpt"]//text()'
2013-10-17 00:27:25 +02:00
def request(query, params):
2013-10-20 21:31:16 +02:00
params['url'] = search_url + urlencode({'q': query})
2013-10-17 00:27:25 +02:00
return params
def response(resp):
results = []
dom = html.fromstring(resp.text)
for result in dom.xpath('//div[@class="question-summary search-result"]'):
link = result.xpath('.//div[@class="result-link"]//a')[0]
2013-10-23 23:55:37 +02:00
href = urljoin(url, link.attrib.get('href'))
2014-01-11 12:39:39 +01:00
title = escape(' '.join(link.xpath('.//text()')))
2014-01-20 02:31:20 +01:00
content = escape(' '.join(result.xpath(result_xpath)))
2013-10-23 23:55:37 +02:00
results.append({'url': href, 'title': title, 'content': content})
2013-10-17 00:27:25 +02:00
return results