searx/searx/engines/github.py

64 lines
1.4 KiB
Python
Raw Normal View History

# SPDX-License-Identifier: AGPL-3.0-or-later
"""
Github (IT)
"""
2014-09-02 17:37:47 +02:00
2013-10-20 21:53:49 +02:00
from json import loads
from urllib.parse import urlencode
2013-10-20 21:53:49 +02:00
# about
about = {
"website": 'https://github.com/',
"wikidata_id": 'Q364',
"official_api_documentation": 'https://developer.github.com/v3/',
"use_official_api": True,
"require_api_key": False,
"results": 'JSON',
}
2014-09-02 17:37:47 +02:00
# engine dependent config
2013-10-20 21:53:49 +02:00
categories = ['it']
2014-09-02 17:37:47 +02:00
# search-url
2014-01-20 02:31:20 +01:00
search_url = 'https://api.github.com/search/repositories?sort=stars&order=desc&{query}' # noqa
accept_header = 'application/vnd.github.preview.text-match+json'
2013-10-20 21:53:49 +02:00
2014-09-02 17:37:47 +02:00
# do search-request
2013-10-20 21:53:49 +02:00
def request(query, params):
2013-10-23 23:55:37 +02:00
params['url'] = search_url.format(query=urlencode({'q': query}))
2014-09-02 17:37:47 +02:00
2014-01-20 02:31:20 +01:00
params['headers']['Accept'] = accept_header
2014-09-02 17:37:47 +02:00
2013-10-20 21:53:49 +02:00
return params
2014-09-02 17:37:47 +02:00
# get response from search-request
2013-10-20 21:53:49 +02:00
def response(resp):
results = []
2014-09-02 17:37:47 +02:00
2013-10-20 21:53:49 +02:00
search_res = loads(resp.text)
2014-09-02 17:37:47 +02:00
# check if items are received
if 'items' not in search_res:
2014-09-02 17:37:47 +02:00
return []
# parse results
2013-10-20 21:53:49 +02:00
for res in search_res['items']:
title = res['name']
url = res['html_url']
2014-09-02 17:37:47 +02:00
if res['description']:
content = res['description'][:500]
else:
content = ''
2014-09-02 17:37:47 +02:00
# append result
results.append({'url': url,
'title': title,
'content': content})
# return results
2013-10-20 21:53:49 +02:00
return results