Use a more human-friendly format in searx/data/engines_languages.json (#1399)

Instead of a single line with 500000 characters use nicely formatted JSON.
Sort the lists in engine_languages.py so when updating it is possible to
more easily see the differences (search engines do change the order their
languages are listed in)
This commit is contained in:
Ivan Skytte Jørgensen 2018-11-29 16:31:05 +01:00 committed by pofilo
parent 3ff67c2e78
commit 0b7f0bde62
2 changed files with 27228 additions and 2 deletions

File diff suppressed because one or more lines are too long

View File

@ -27,12 +27,14 @@ def fetch_supported_languages():
if hasattr(engines[engine_name], 'fetch_supported_languages'): if hasattr(engines[engine_name], 'fetch_supported_languages'):
try: try:
engines_languages[engine_name] = engines[engine_name].fetch_supported_languages() engines_languages[engine_name] = engines[engine_name].fetch_supported_languages()
if type(engines_languages[engine_name]) == list:
engines_languages[engine_name] = sorted(engines_languages[engine_name])
except Exception as e: except Exception as e:
print(e) print(e)
# write json file # write json file
with io.open(engines_languages_file, "w", encoding="utf-8") as f: with io.open(engines_languages_file, "w", encoding="utf-8") as f:
dump(engines_languages, f, ensure_ascii=False) dump(engines_languages, f, ensure_ascii=False, indent=4, separators=(',', ': '))
return engines_languages return engines_languages