Non-case-sensitive transclude; Warn on duplicate file names

This commit is contained in:
octospacc 2023-03-09 13:06:07 +01:00
parent fe4cf294d5
commit ad0a8be875
4 changed files with 50 additions and 14 deletions

View File

@ -71,7 +71,7 @@ def CheckSafeOutDir(OutDir:str):
logging.error(f"⛔ Output and Input directories ({OutDir}) can't be the same. Exiting.")
exit(1)
elif OutFolder.lower() in ReservedPaths and f"{InDir.lower()}/{OutFolder.lower()}" == OutDir.lower():
logging.error(f"⛔ Output directory {OutDir} can't be a reserved subdirectory of the Input. Exiting.")
logging.error(f"⛔ Output directory ({OutDir}) can't be a reserved subdirectory of the Input. Exiting.")
exit(1)
def GetModifiedFiles(OutDir):
@ -125,19 +125,52 @@ def WriteRedirects(Flags:dict, Pages:list, FinalPaths, Locale:dict):
def CopyBaseFiles(Flags:dict):
f = NameSpace(Flags)
Have = {"Pages": False, "Posts": False}
Prefix = {"Pages": "", "Posts": "/Posts"}
for Type in ('Pages', 'Posts'):
if os.path.isdir(Type):
Have[Type] = True
shutil.copytree(Type, f'{f.OutDir}{Prefix[Type]}', dirs_exist_ok=True)
shutil.copytree(Type, f'{f.OutDir}.Content{Prefix[Type]}', dirs_exist_ok=True)
shutil.copytree(Type, f'{f.OutDir}{PageTypeOutPrefix[Type]}', dirs_exist_ok=True)
shutil.copytree(Type, f'{f.OutDir}.Content{PageTypeOutPrefix[Type]}', dirs_exist_ok=True)
if f.GemtextOutput:
shutil.copytree('Posts', f'{f.OutDir}.gmi{Prefix[Type]}', ignore=IgnoreFiles, dirs_exist_ok=True)
shutil.copytree('Posts', f'{f.OutDir}.gmi{PageTypeOutPrefix[Type]}', ignore=IgnoreFiles, dirs_exist_ok=True)
return Have['Pages'] or Have['Posts']
def DedupeBaseFiles(Flags:dict):
f = NameSpace(Flags)
Msg = "⚠ Input files with the same name but different {0} are not allowed. Unpredictable issues will arise. You should rename your files to fix this issue."
SaidDupes = {"Ext": False, "Name": False}
Files = {}
Remove = []
for Dir in (f.OutDir, f'{f.OutDir}.Content'):
for File in Path(Dir).rglob('*'):
if os.path.isfile(File):
File = str(File)
Name = '.'.join(File.split('.')[:-1])
Lower = Name.lower()
Ext = File.split('.')[-1]
# If file with same name exists
if Lower in Files:
# If extension and name are the same
if Files[Lower][-1].lower() == Ext.lower():
if not SaidDupes['Name']:
logging.warning(Msg.format('capitalization'))
SaidDupes['Name'] = True
# If extension differs
else:
if not SaidDupes['Ext']:
logging.warning(Msg.format('extension'))
SaidDupes['Ext'] = True
Remove += [File]
else:
Files.update({Name.lower(): (Name, Ext)})
for File in Remove:
# Avoid duplicate prints executed for multiple folders
if File.startswith(f'{f.OutDir}/'):
Name = '.'.join(File[len(f'{f.OutDir}/'):].split('.')[:-1]) + '.*'
print(Name if Name.startswith('Posts/') else f'Pages/{Name}')
os.remove(File)
def BuildMain(Args, FeedEntries):
Flags, Snippets = {}, {}
HavePages, HavePosts = False, False
SiteConf = LoadConfFile('Site.ini')
#ConfigLogging(DefConfOptChoose('Logging', Args.Logging, ReadConf(SiteConf, 'staticoso', 'Logging')))
@ -222,9 +255,10 @@ def BuildMain(Args, FeedEntries):
LimitFiles = False
logging.info("Reading Base Files")
if not (CopyBaseFiles(Flags)):
logging.error("⛔ No Pages or posts found. Nothing to do, exiting!")
if not CopyBaseFiles(Flags):
logging.error("⛔ No Pages or Posts found. Nothing to do, exiting!")
exit(1)
DedupeBaseFiles(Flags)
logging.info("Generating HTML")
DictPages = MakeSite(
@ -236,7 +270,7 @@ def BuildMain(Args, FeedEntries):
Locale=Locale,
Threads=Threads)
# REFACTOR: Some functions below are still not changed to accept a Page as Dict, so let's convert to Lists
# REFACTOR: Some functions below are still not changed to accept a Page as Dict, so let's reconvert to Lists
ListPages = DictPages.copy()
for i, e in enumerate(ListPages):
ListPages[i] = list(e.values())

View File

@ -22,6 +22,8 @@ PageIndexStrNeg = tuple(list(NegStrBools) + ['none', 'unlisted', 'unindexed', 'h
InternalMacrosWraps = (('[', ']'), ('<', '>'))
PageTypeOutPrefix = {"Pages": "", "Posts": "/Posts"}
PageMetaDefault = {
'Template': None, # Replace with var
'Head': '',

View File

@ -49,7 +49,6 @@ def HandleDynamicParts(Flags:dict, Html:str, Snippets:dict):
# TODO: This would need to be handled either fully before or fully after after all pages' content has been transformed to HTML, else other markups end up in HTML and the page is broken
def HandleTransclusions(Base:str, Caller:str, Pages:list):
#if Type == 'Evals': # [% cmd %] | {% cmd %}
Targets = []
Finding = Base
Start = Finding.find('{{')
@ -64,7 +63,7 @@ def HandleTransclusions(Base:str, Caller:str, Pages:list):
# We should show an error message on inexistant transclusion and possible recursive transclusion, as currently this doesn't handle escaped tokens
if Target != Caller:
for File, Content, _, _ in Pages:
if File == Target:
if File.lower() == Target.lower():
Base = ReplWithEsc(Base, '{{' + Target + '}}', Content)
break
return Base
@ -215,7 +214,7 @@ def BuildPagesSearch(Flags:dict, Pages:list, Template:str, Snippets:dict, Locale
SearchContent += f'''
<div
class="staticoso-HtmlSearch-Page"
data-staticoso-htmlsearch-name="{html.escape(html.unescape(Page["Titles"][0]), quote=True)}"
data-staticoso-htmlsearch-name="{html.escape(html.unescape(Page["Titles"][0] if Page["Titles"] else ""), quote=True)}"
data-staticoso-htmlsearch-href="{StripExt(Page["File"])}.html"
>
{Page["ContentHtml"]}

View File

@ -50,8 +50,9 @@ def LoadFromDir(Dir:str, Matchs:list):
Matchs = SureList(Matchs)
for Match in Matchs:
for File in Path(Dir).rglob(Match):
File = str(File)[len(Dir)+1:]
Contents.update({File: ReadFile(f"{Dir}/{File}")})
if os.path.isfile(File):
Name = str(File)[len(Dir)+1:]
Contents.update({Name: ReadFile(File)})
return Contents
def mkdirps(Dir:str):