Tweak footer e layout categorie, fix metapagine, aggiornamento traduttore

This commit is contained in:
2024-09-21 17:13:19 +02:00
parent c1bac2daeb
commit 7ab6681735
11 changed files with 198 additions and 133 deletions

View File

@ -1,7 +1,9 @@
#!/usr/bin/env python3
ModificationMetadataKey = "lastmod"
KeepOriginalMetadata = ["draft", "date", "lastmod"]
DestinationLanguages = ["it", "en", "es", "fr"] # "de", "eo"
IncludePaths = ["/"]
ExcludePaths = ["/categories", "/note/2024-09-19-Raspberry-Output-Audio-Both.md", "/miscellanea/Devlogs.md"] # "/miscellanea/PicoBlog.md"
ExcludePaths = ["/note/2024-09-19-Raspberry-Output-Audio-Both.md", "/miscellanea/Devlogs.md"] # "/miscellanea/PicoBlog.md"
import subprocess
from os import getcwd, listdir
@ -9,7 +11,7 @@ from os.path import dirname, realpath, isfile, isdir
from pathlib import Path
from translate_shell.translate import translate
# TODO somehow handle overriding frontmatter data for some translation (title, slug, ...)
# TODO somehow handle overriding frontmatter data for some translation (title, slug, ...) (via in other files or commented metadata lines?)
# TODO handle deleted files? (it should probably be done in another sh script, not here)
def printf(*objects):
@ -26,11 +28,11 @@ def make_destination_path(document_path, destination_language):
+ '/'.join(document_path.split('/')[1:]))
def is_translation_uptodate(source_path, destination_path):
original_lines = split_text_with_frontmatter(read_original_document(source_path))[1].splitlines()
translated_lines = split_text_with_frontmatter(open(destination_path, 'r').read())[1].splitlines()
original_lines = split_with_frontmatter(read_original_document(source_path))[1].splitlines()
translated_lines = split_with_frontmatter(open(destination_path, 'r').read())[1].splitlines()
for [index, original_line] in enumerate(original_lines):
line_key = original_line.split('=')[0]
if line_key.strip().lower() == "lastmod":
if line_key.strip().lower() == ModificationMetadataKey:
if original_line != translated_lines[index]:
return False
break
@ -40,7 +42,7 @@ def is_translation_uptodate(source_path, destination_path):
def needs_translation(source_path, destination_language=None):
for exclude_path in ExcludePaths:
document_path = ('/' + '/'.join(source_path.split('/')[1:]))
if (document_path == exclude_path) or document_path.startswith(exclude_path + '/'):
if (document_path == exclude_path) or document_path.startswith(exclude_path.rstrip('/') + '/'):
return False
if not read_original_document(source_path).strip():
return False
@ -63,7 +65,7 @@ def find_documents(folder_path):
documents[document].append(destination_language)
return documents
def split_text_with_frontmatter(document_text):
def split_with_frontmatter(document_text):
text_header = document_text.strip().splitlines()[0].strip()
if text_header in ["---", "+++"]:
text_tokens = document_text.split(text_header)
@ -76,7 +78,7 @@ def fix_frontmatter(translated_text, reference_text):
if translated_line.strip() and (translated_line.lstrip() == translated_line):
reference_line = reference_lines[index]
line_key = reference_line.split('=')[0]
if line_key.strip().lower() in ["draft", "date", "lastmod"]:
if line_key.strip().lower() in KeepOriginalMetadata:
translated_line = reference_line
else:
line_value = '='.join(translated_line.split('=')[1:])
@ -86,11 +88,50 @@ def fix_frontmatter(translated_text, reference_text):
result += (translated_line + '\n')
return result
# <https://stackoverflow.com/a/18815890>
def ascii_to_number(text:str) -> int:
return int(''.join(format(ord(i), 'b').zfill(8) for i in text), 2)
# <https://stackoverflow.com/a/699891>, <https://stackoverflow.com/a/40559005>
def number_to_ascii(number:int) -> str:
binary = format(int(number), '016b')
binary = binary.zfill(len(binary) + (8 - (len(binary) % 8)))
return ''.join(chr(int(binary[(i * 8):((i * 8) + 8)], 2)) for i in range(len(binary) // 8))
# TODO add checks for number-strings to ensure they aren't already in the literal text
# TODO handle code blocks and .notranslate HTML elements
# TODO fix strange bugs
def wrap_for_translation(original_text):
#return original_text
original_text = (original_text
.replace("{{%", "{{@%").replace("%}}", "%@}}")
.replace("{{<", "{{@<").replace(">}}", ">@}}"))
original_tokens = original_text.split("{{@")
#[(("{{@" if i else '') + c) for [i, c] in enumerate(original_text.split("{{@"))]
for i in range(1, len(original_tokens)):
token_tokens = original_tokens[i].split("@}}")
token_tokens[0] = (f"{TranslationMagic}__" + str(ascii_to_number("{{@" + token_tokens[0] + "@}}")) + "__").replace("9", "9_")
original_tokens[i] = ''.join(token_tokens)
#print(unwrap_from_translation(''.join(original_tokens)))
#exit(1)
return ''.join(original_tokens)
def unwrap_from_translation(translated_text):
#return translated_text
translated_tokens = translated_text.split(f"{TranslationMagic}__")
for i in range(1, len(translated_tokens)):
token_tokens = translated_tokens[i].split("__")
token_tokens[0] = number_to_ascii(token_tokens[0].replace(' ', '').replace('_', ''))
translated_tokens[i] = (token_tokens[0] + "__".join(token_tokens[1:]))
return (''.join(translated_tokens)
.replace("{{@%", "{{%").replace("%@}}", "%}}")
.replace("{{@<", "{{<").replace(">@}}", ">}}"))
def translate_document(document_path, documents):
printf(f"* {document_path} ->")
for destination_language in documents[document_path]:
source_language = get_source_language(document_path)
original_text = read_original_document(document_path)
original_text = wrap_for_translation(read_original_document(document_path))
printf('', destination_language)
try:
is_python_translator = True
@ -101,10 +142,13 @@ def translate_document(document_path, documents):
printf('', exception)
try:
is_python_translator = False
temporary_path = ("./tmp/" + document_path)
Path('/'.join(temporary_path.split('/')[:-1])).mkdir(parents=True, exist_ok=True)
open(temporary_path, 'w').write(original_text)
translated = subprocess.run(
("bash", "../Scripts/Lib/translate-shell.bash", "-brief", "-no-autocorrect",
"-t", destination_language, "-s", source_language,
("file://" + "../content/" + document_path)),
("file://" + temporary_path)),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
if translated.stderr:
@ -116,7 +160,7 @@ def translate_document(document_path, documents):
translated_text = (translated.results[0].paraphrase
if is_python_translator else translated.stdout.decode())
translated_preamble = ("\n\n{{< noticeAutomaticTranslation " + source_language + " >}}\n\n")
if (translated_tokens := split_text_with_frontmatter(translated_text)):
if (translated_tokens := split_with_frontmatter(translated_text)):
translated_tokens[1] = fix_frontmatter(translated_tokens[1], original_text.split(translated_tokens[0])[1])
if translated_tokens[3].strip():
translated_tokens.insert(3, translated_preamble)
@ -125,7 +169,7 @@ def translate_document(document_path, documents):
translated_text = (translated_preamble + translated_text)
destination_path = make_destination_path(document_path, destination_language)
Path('/'.join(destination_path.split('/')[:-1])).mkdir(parents=True, exist_ok=True)
open(destination_path, 'w').write(translated_text)
open(destination_path, 'w').write(unwrap_from_translation(translated_text))
printf('\n')
def main():
@ -139,6 +183,8 @@ def main():
def read_from_scripts(relative_path:str):
return open((dirname(realpath(__file__)) + '/../' + relative_path), 'r').read()
TranslationMagic = ("__" + str(ascii_to_number("sitoctt")))
if __name__ == "__main__":
globals_text = read_from_scripts('Lib/Globals.sh')
exec(globals_text.split('#' + globals_text.splitlines()[0].split('#!')[1] + '!')[0])