1
0
mirror of https://github.com/searx/searx synced 2024-12-11 08:25:27 +01:00

[enh] result ordering and deduplication

This commit is contained in:
asciimoo 2013-10-16 23:03:26 +02:00
parent a1d15c3076
commit fa9c9e090b

View File

@ -4,6 +4,7 @@ from os import listdir
from imp import load_source from imp import load_source
import grequests import grequests
from itertools import izip_longest, chain from itertools import izip_longest, chain
from operator import itemgetter
engine_dir = dirname(realpath(__file__)) engine_dir = dirname(realpath(__file__))
@ -56,4 +57,23 @@ def search(query, request, selected_engines):
) )
requests.append(req) requests.append(req)
grequests.map(requests) grequests.map(requests)
return list(filter(None, chain(*izip_longest(*results.values())))) flat_res = list(filter(None, chain(*izip_longest(*results.values()))))
flat_len = len(flat_res)
results = []
# deduplication + scoring
for i,res in enumerate(flat_res):
score = flat_len - i
duplicated = False
for new_res in results:
if res['url'] == new_res['url']:
duplicated = new_res
break
if duplicated:
if len(res['content']) > len(duplicated):
duplicated['content'] = res['content']
duplicated['score'] += score
else:
res['score'] = score
results.append(res)
return sorted(results, key=itemgetter('score'), reverse=True)