mirror of https://gitlab.com/octtspacc/staticoso
Refactoring: Build.py/MakeSite(), and minor modules
This commit is contained in:
parent
c35aa94345
commit
f43026b13b
|
@ -2,7 +2,7 @@ image: alpine:latest
|
|||
|
||||
before_script: |
|
||||
apk update
|
||||
apk add python3
|
||||
apk add python3 make
|
||||
|
||||
pages:
|
||||
stage: deploy
|
||||
|
|
|
@ -15,6 +15,7 @@ import time
|
|||
from ast import literal_eval
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from Modules.Assets import *
|
||||
from Modules.Config import *
|
||||
from Modules.Gemini import *
|
||||
from Modules.Globals import *
|
||||
|
@ -22,15 +23,8 @@ from Modules.Logging import *
|
|||
from Modules.Markdown import *
|
||||
from Modules.Site import *
|
||||
from Modules.Sitemap import *
|
||||
from Modules.Social import *
|
||||
from Modules.Utils import *
|
||||
try:
|
||||
from Modules.ActivityPub import *
|
||||
ActivityPub = True
|
||||
except:
|
||||
logging.warning("⚠ Can't load the ActivityPub module. Its use is disabled. Make sure the 'requests' library is installed.")
|
||||
ActivityPub = False
|
||||
from Libs import rcssmin
|
||||
cssmin = rcssmin._make_cssmin(python_only=True)
|
||||
|
||||
def ResetOutDir(OutDir):
|
||||
for e in (OutDir, f'{OutDir}.Content', f'{OutDir}.gmi'):
|
||||
|
@ -120,7 +114,7 @@ def WriteRedirects(Flags, Pages, FinalPaths, Locale):
|
|||
StrRedirect=Locale['IfNotRedirected']))
|
||||
|
||||
def BuildMain(Args, FeedEntries):
|
||||
Flags, Snippets, FinalPaths = {}, {}, []
|
||||
Flags, Snippets = {}, {}
|
||||
HavePages, HavePosts = False, False
|
||||
SiteConf = LoadConfFile('Site.ini')
|
||||
|
||||
|
@ -232,34 +226,17 @@ def BuildMain(Args, FeedEntries):
|
|||
Locale=Locale,
|
||||
Threads=Threads)
|
||||
|
||||
# REFACTOR: The functions below are still not changed to accept a Page as Dict
|
||||
for i, e in enumerate(Pages):
|
||||
Pages[i] = list(e.values())
|
||||
|
||||
if FeedEntries != 0:
|
||||
logging.info("Generating Feeds")
|
||||
for FeedType in (True, False):
|
||||
MakeFeed(Flags, Pages, FeedType)
|
||||
|
||||
if ActivityPub and MastodonURL and MastodonToken and SiteDomain:
|
||||
logging.info("Mastodon Stuff")
|
||||
MastodonPosts = MastodonShare(Flags, Pages, Locale)
|
||||
else:
|
||||
MastodonPosts = []
|
||||
|
||||
for File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image in Pages:
|
||||
if IsLightRun(File, LimitFiles):
|
||||
continue
|
||||
File = f"{OutDir}/{StripExt(File)}.html"
|
||||
Content = ReadFile(File)
|
||||
Post = ''
|
||||
for p in MastodonPosts:
|
||||
if p['Link'] == SiteDomain + '/' + File[len(f"{OutDir}/"):]:
|
||||
Post = HTMLCommentsBlock.format(
|
||||
StrComments=Locale['Comments'],
|
||||
StrOpen=Locale['OpenInNewTab'],
|
||||
URL=p['Post'])
|
||||
break
|
||||
Content = ReplWithEsc(Content, '[staticoso:Comments]', Post)
|
||||
Content = ReplWithEsc(Content, '<staticoso:Comments>', Post)
|
||||
WriteFile(File, Content)
|
||||
FinalPaths += [File]
|
||||
logging.info("Applying Social Integrations")
|
||||
FinalPaths = ApplySocialIntegrations(Flags, Pages, LimitFiles, Locale)
|
||||
|
||||
logging.info("Creating Redirects")
|
||||
WriteRedirects(Flags, Pages, FinalPaths, Locale)
|
||||
|
@ -279,20 +256,7 @@ def BuildMain(Args, FeedEntries):
|
|||
MakeSitemap(Flags, Pages)
|
||||
|
||||
logging.info("Preparing Assets")
|
||||
#os.system(f"cp -R Assets/* {OutDir}/")
|
||||
if Flags['MinifyAssets']:
|
||||
shutil.copytree('Assets', OutDir, ignore=IgnoreFiles, dirs_exist_ok=True)
|
||||
for File in Path('Assets').rglob('*'):
|
||||
if os.path.isfile(File):
|
||||
Dest = f"{OutDir}/{str(File)[len('Assets')+1:]}"
|
||||
if str(File).lower().endswith(FileExtensions['HTML']):
|
||||
WriteFile(Dest, DoMinifyHTML(ReadFile(File), MinifyKeepComments))
|
||||
elif str(File).lower().endswith('.css'):
|
||||
WriteFile(Dest, cssmin(ReadFile(File), MinifyKeepComments))
|
||||
else:
|
||||
shutil.copy2(File, Dest)
|
||||
else:
|
||||
shutil.copytree('Assets', OutDir, dirs_exist_ok=True)
|
||||
PrepareAssets(Flags)
|
||||
|
||||
if __name__ == '__main__':
|
||||
StartTime = time.time()
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
""" ================================== |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022-2023, OctoSpacc |
|
||||
| ================================== """
|
||||
|
||||
import shutil
|
||||
from Modules.HTML import DoMinifyHTML
|
||||
from Modules.Utils import *
|
||||
|
||||
from Libs import rcssmin
|
||||
cssmin = rcssmin._make_cssmin(python_only=True)
|
||||
|
||||
def PrepareAssets(Flags):
|
||||
f = NameSpace(Flags)
|
||||
if f.MinifyAssets:
|
||||
shutil.copytree('Assets', f.OutDir, ignore=IgnoreFiles, dirs_exist_ok=True)
|
||||
for File in Path('Assets').rglob('*'):
|
||||
if os.path.isfile(File):
|
||||
Dest = f"{f.OutDir}/{str(File)[len('Assets')+1:]}"
|
||||
if str(File).lower().endswith(FileExtensions['HTML']):
|
||||
WriteFile(Dest, DoMinifyHTML(ReadFile(File), f.MinifyKeepComments))
|
||||
elif str(File).lower().endswith('.css'):
|
||||
WriteFile(Dest, cssmin(ReadFile(File), f.MinifyKeepComments))
|
||||
else:
|
||||
shutil.copy2(File, Dest)
|
||||
else:
|
||||
shutil.copytree('Assets', f.OutDir, dirs_exist_ok=True)
|
|
@ -156,7 +156,7 @@ def FormatTitles(Titles:list, Flatten=False):
|
|||
# Clean up a generic HTML tree such that it's compliant with the HTML Journal standard
|
||||
# (https://m15o.ichi.city/site/subscribing-to-a-journal-page.html);
|
||||
# basis is: find an element with the JournalBody attr., and group its direct children as <article>s
|
||||
def MakeHTMLJournal(Flags, Locale, FilePath, HTML):
|
||||
def MakeHTMLJournal(Flags:dict, Locale:dict, FilePath:str, HTML:str):
|
||||
Soup, Journal, Entries = MkSoup(HTML), '', []
|
||||
for t in Soup.find_all(attrs={"htmljournal":True}):
|
||||
#JournalStyle = JournalStyles[t.attrs["journalstyle"]] if 'journalstyle' in t.attrs and t.attrs["journalstyle"] in JournalStyles else JournalStyles['Default']
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
from Libs.feedgen.feed import FeedGenerator
|
||||
from Modules.Utils import *
|
||||
|
||||
def MakeFeed(Flags, Pages, FullSite=False):
|
||||
def MakeFeed(Flags:dict, Pages:list, FullSite=False):
|
||||
CategoryFilter = Flags['FeedCategoryFilter']
|
||||
MaxEntries = Flags['FeedEntries']
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ from Libs.bs4 import BeautifulSoup
|
|||
from Modules.HTML import *
|
||||
from Modules.Utils import *
|
||||
|
||||
def FixGemlogDateLine(Line):
|
||||
def FixGemlogDateLine(Line:str):
|
||||
if len(Line) >= 2 and Line[0] == '[' and Line[1].isdigit():
|
||||
Line = Line[1:]
|
||||
else:
|
||||
|
@ -22,7 +22,7 @@ def FixGemlogDateLine(Line):
|
|||
Line = Words[0] + '\n' + Words[1][1:] + ' ' + ' '.join(Words[2:])
|
||||
return Line
|
||||
|
||||
def GemtextCompileList(Flags, Pages, LimitFiles):
|
||||
def GemtextCompileList(Flags:dict, Pages:list, LimitFiles):
|
||||
Cmd = ''
|
||||
for File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image in Pages:
|
||||
if IsLightRun(File, LimitFiles):
|
||||
|
@ -53,7 +53,7 @@ def FindEarliest(Str, Items):
|
|||
Str.find(Item)
|
||||
return Pos, Item
|
||||
|
||||
def ParseTag(Content):
|
||||
print(Content)
|
||||
def ParseTag(Content:str):
|
||||
#print(Content)
|
||||
Parse = BeautifulSoup(str(Content), 'html.parser')
|
||||
Tag = Parse.find()
|
||||
|
|
|
@ -15,30 +15,31 @@ from Modules.Utils import *
|
|||
|
||||
# Suppress useless bs4 warnings
|
||||
warnings.filterwarnings('ignore', message='The input looks more like a filename than markup.')
|
||||
warnings.filterwarnings('ignore', message='The soupsieve package is not installed.')
|
||||
|
||||
def MkSoup(HTML):
|
||||
return BeautifulSoup(HTML, 'html.parser')
|
||||
def MkSoup(Html:str):
|
||||
return BeautifulSoup(Html, 'html.parser')
|
||||
|
||||
def StripAttrs(HTML):
|
||||
Soup = MkSoup(HTML)
|
||||
def StripAttrs(Html:str):
|
||||
Soup = MkSoup(Html)
|
||||
Tags = Soup.find_all()
|
||||
for t in Tags:
|
||||
if 'href' not in t.attrs and 'src' not in t.attrs:
|
||||
t.attrs = {}
|
||||
return str(Soup)
|
||||
|
||||
def StripTags(HTML, ToStrip): # Remove desired tags from the HTML
|
||||
Soup = MkSoup(HTML)
|
||||
def StripTags(Html:str, ToStrip:list): # Remove desired tags from the HTML
|
||||
Soup = MkSoup(Html)
|
||||
Tags = Soup.find_all()
|
||||
for t in Tags:
|
||||
if t.name in ToStrip:
|
||||
t.replace_with('')
|
||||
return str(Soup)
|
||||
|
||||
def DoHTMLFixPre(HTML):
|
||||
if not ("<pre>" in HTML or "<pre " in HTML):
|
||||
return HTML
|
||||
Soup = MkSoup(HTML)
|
||||
def DoHTMLFixPre(Html:str):
|
||||
if not ("<pre>" in Html or "<pre " in Html):
|
||||
return Html
|
||||
Soup = MkSoup(Html)
|
||||
Tags = Soup.find_all('pre')
|
||||
for t in Tags:
|
||||
FirstLine = str(t).splitlines()[0].lstrip().rstrip()
|
||||
|
@ -78,8 +79,8 @@ def AddToTagStartEnd(HTML, MatchStart, MatchEnd, AddStart, AddEnd): # This doesn
|
|||
DidEnd -= 1
|
||||
return HTML
|
||||
|
||||
def SquareFnrefs(HTML): # Different combinations of formatting for Soup .prettify, .encode, .decode break different page elements, don't use this for now
|
||||
Soup = MkSoup(HTML)
|
||||
def SquareFnrefs(Html:str): # Different combinations of formatting for Soup .prettify, .encode, .decode break different page elements, don't use this for now
|
||||
Soup = MkSoup(Html)
|
||||
Tags = Soup.find_all('sup')
|
||||
for t in Tags:
|
||||
if 'id' in t.attrs and t.attrs['id'].startswith('fnref:'):
|
||||
|
@ -87,9 +88,9 @@ def SquareFnrefs(HTML): # Different combinations of formatting for Soup .prettif
|
|||
s.replace_with(f'[{t}]')
|
||||
return str(Soup.prettify(formatter=None))
|
||||
|
||||
def DoMinifyHTML(HTML, KeepComments):
|
||||
def DoMinifyHTML(Html:str, KeepComments:bool):
|
||||
return htmlmin.minify(
|
||||
input=HTML,
|
||||
input=Html,
|
||||
remove_comments=not KeepComments,
|
||||
remove_empty_space=True,
|
||||
remove_all_empty_space=False,
|
||||
|
|
|
@ -16,7 +16,9 @@ from Modules.Utils import *
|
|||
# Menu styles:
|
||||
# - Simple: Default, Flat, Line
|
||||
# - Others: Excerpt, Image, Preview (Excerpt + Image), Full
|
||||
def GetHTMLPagesList(Pages:list, BlogName:str, SiteRoot:str, PathPrefix:str, CallbackFile=None, Unite=[], Type=None, Limit=None, PathFilter='', Category=None, For='Menu', MarkdownExts=(), MenuStyle='Default', ShowPaths=True):
|
||||
def GetHTMLPagesList(Flags:dict, Pages:list, PathPrefix:str, CallbackFile=None, Unite=[], Type=None, Limit=None, PathFilter='', Category=None, For='Menu', MenuStyle='Default', ShowPaths=True):
|
||||
f = NameSpace(Flags)
|
||||
|
||||
Flatten, SingleLine, DoneCount, PrevDepth = False, False, 0, 0
|
||||
if MenuStyle == 'Flat':
|
||||
Flatten = True
|
||||
|
@ -57,7 +59,7 @@ def GetHTMLPagesList(Pages:list, BlogName:str, SiteRoot:str, PathPrefix:str, Cal
|
|||
Levels = '.' * ((Depth-2+i) if not Flatten else 0) + ':'
|
||||
# If search node endswith index, it's a page; else, it's a folder
|
||||
if StripExt(File).endswith('index'):
|
||||
Title = MakeListTitle(File, Meta, Titles, 'HTMLTitle', BlogName, PathPrefix)
|
||||
Title = MakeListTitle(File, Meta, Titles, 'HTMLTitle', f.BlogName, PathPrefix)
|
||||
DoneCount += 1
|
||||
else:
|
||||
Title = CurParent[Depth-2+i]
|
||||
|
@ -71,9 +73,9 @@ def GetHTMLPagesList(Pages:list, BlogName:str, SiteRoot:str, PathPrefix:str, Cal
|
|||
Levels = '.' * ((Depth-1) if not Flatten else 0) + ':'
|
||||
DoneCount += 1
|
||||
if Meta['Order'] == 'Unite':
|
||||
Title = markdown(MarkdownHTMLEscape(File, MarkdownExts), extensions=MarkdownExts).removeprefix('<p>').removesuffix('<p>')
|
||||
Title = markdown(MarkdownHTMLEscape(File, f.MarkdownExts), extensions=f.MarkdownExts).removeprefix('<p>').removesuffix('<p>')
|
||||
else:
|
||||
Title = MakeListTitle(File, Meta, Titles, 'HTMLTitle', BlogName, PathPrefix)
|
||||
Title = MakeListTitle(File, Meta, Titles, 'HTMLTitle', f.BlogName, PathPrefix)
|
||||
if SingleLine:
|
||||
List += ' <span>' + Title + '</span> '
|
||||
else:
|
||||
|
@ -122,7 +124,10 @@ def FindPreprocLine(Line:str, Meta, Macros):
|
|||
# IgnoreBlocksStart += [l]
|
||||
return (Meta, Macros, Changed)
|
||||
|
||||
def PagePreprocessor(Path:str, TempPath:str, Type:str, SiteTemplate, SiteRoot, GlobalMacros, CategoryUncategorized, LightRun:bool=False, Content=None):
|
||||
def PagePreprocessor(Flags:dict, Page:list, SiteTemplate, GlobalMacros, LightRun:bool=False):
|
||||
CategoryUncategorized = Flags['CategoriesUncategorized']
|
||||
Path, TempPath, Type, Content = Page
|
||||
|
||||
File = ReadFile(Path) if not Content else Content
|
||||
Path = Path.lower()
|
||||
Content, Titles, DashyTitles, HTMLTitlesFound, Macros, Meta, MetaDefault = '', [], [], False, '', '', {
|
||||
|
|
|
@ -12,12 +12,12 @@
|
|||
import os
|
||||
from Modules.Utils import *
|
||||
|
||||
def PugCompileList(OutputDir, Pages, LimitFiles):
|
||||
def PugCompileList(OutDir:str, Pages:list, LimitFiles):
|
||||
# Pug-cli seems to shit itself with folder paths as input, so we pass ALL the files as arguments
|
||||
Paths = ''
|
||||
for File, Content, Titles, Meta in Pages:
|
||||
if File.lower().endswith('.pug') and (LimitFiles == False or File in LimitFiles):
|
||||
Path = f'{OutputDir}/{File}'
|
||||
Path = f'{OutDir}/{File}'
|
||||
WriteFile(Path, Content)
|
||||
Paths += f'"{Path}" '
|
||||
if Paths:
|
||||
|
|
|
@ -20,7 +20,13 @@ from Modules.Meta import *
|
|||
from Modules.Pug import *
|
||||
from Modules.Utils import *
|
||||
|
||||
def PatchHTML(File, HTML, StaticPartsText, DynamicParts, DynamicPartsText, HTMLPagesList, PagePath, Content, Titles, Meta, SiteDomain, SiteRoot, SiteName, BlogName, FolderRoots, Categories, SiteLang, Locale, LightRun):
|
||||
def PatchHTML(Flags, File, HTML, StaticPartsText, DynamicParts, DynamicPartsText, HTMLPagesList, PagePath, Content, Titles, Meta, FolderRoots, Categories, Locale, LightRun):
|
||||
SiteDomain = Flags['SiteDomain']
|
||||
SiteRoot = Flags['SiteRoot']
|
||||
SiteLang = Flags['SiteLang']
|
||||
SiteName = Flags['SiteName']
|
||||
BlogName = Flags['BlogName']
|
||||
|
||||
HTMLTitles = FormatTitles(Titles)
|
||||
BodyDescription, BodyImage = '', ''
|
||||
if not File.lower().endswith('.txt'):
|
||||
|
@ -171,8 +177,9 @@ def PatchHTML(File, HTML, StaticPartsText, DynamicParts, DynamicPartsText, HTMLP
|
|||
|
||||
return HTML, ContentHTML, Description, Image
|
||||
|
||||
def HandlePage(Flags, Page, Pages, Categories, LimitFiles, Snippets, ConfMenu, Locale):
|
||||
def HandlePage(Flags:dict, Page:list, Pages, Categories, LimitFiles, Snippets, ConfMenu, Locale:dict):
|
||||
File, Content, Titles, Meta = Page
|
||||
|
||||
OutDir, MarkdownExts, Sorting, MinifyKeepComments = Flags['OutDir'], Flags['MarkdownExts'], Flags['Sorting'], Flags['MinifyKeepComments']
|
||||
SiteName, BlogName, SiteTagline = Flags['SiteName'], Flags['BlogName'], Flags['SiteTagline']
|
||||
SiteTemplate, SiteLang = Flags['SiteTemplate'], Flags['SiteLang']
|
||||
|
@ -200,17 +207,16 @@ def HandlePage(Flags, Page, Pages, Categories, LimitFiles, Snippets, ConfMenu, L
|
|||
else:
|
||||
TemplateMeta = TemplatePreprocessor(TemplatesText[Meta['Template']])
|
||||
HTMLPagesList = GetHTMLPagesList(
|
||||
Flags,
|
||||
Pages=Pages,
|
||||
BlogName=BlogName,
|
||||
SiteRoot=SiteRoot,
|
||||
PathPrefix=GetPathLevels(File),
|
||||
Unite=ConfMenu,
|
||||
Type='Page',
|
||||
For='Menu',
|
||||
MarkdownExts=MarkdownExts,
|
||||
MenuStyle=TemplateMeta['MenuStyle'])
|
||||
|
||||
HTML, ContentHTML, Description, Image = PatchHTML(
|
||||
Flags,
|
||||
File=File,
|
||||
HTML=TemplatesText[Meta['Template']],
|
||||
StaticPartsText=StaticPartsText,
|
||||
|
@ -221,26 +227,19 @@ def HandlePage(Flags, Page, Pages, Categories, LimitFiles, Snippets, ConfMenu, L
|
|||
Content=Content,
|
||||
Titles=Titles,
|
||||
Meta=Meta,
|
||||
SiteDomain=SiteDomain,
|
||||
SiteRoot=SiteRoot,
|
||||
SiteName=SiteName,
|
||||
BlogName=BlogName,
|
||||
FolderRoots=FolderRoots,
|
||||
Categories=Categories,
|
||||
SiteLang=SiteLang,
|
||||
Locale=Locale,
|
||||
LightRun=LightRun)
|
||||
|
||||
HTML = ReplWithEsc(HTML, f"<staticoso:Feed>", GetHTMLPagesList(
|
||||
Flags,
|
||||
Limit=Flags['FeedEntries'],
|
||||
Type='Post',
|
||||
Category=None if Flags['FeedCategoryFilter'] == '*' else Flags['FeedCategoryFilter'],
|
||||
Pages=Pages,
|
||||
BlogName=BlogName,
|
||||
SiteRoot=SiteRoot,
|
||||
PathPrefix=GetPathLevels(File),
|
||||
For='Categories',
|
||||
MarkdownExts=MarkdownExts,
|
||||
MenuStyle='Flat',
|
||||
ShowPaths=False))
|
||||
if 'staticoso:DirectoryList:' in HTML: # Reduce risk of unnecessary cycles
|
||||
|
@ -249,14 +248,12 @@ def HandlePage(Flags, Page, Pages, Categories, LimitFiles, Snippets, ConfMenu, L
|
|||
if Line.startswith('<staticoso:DirectoryList:') and Line.endswith('>'):
|
||||
Path = Line[len('<staticoso:DirectoryList:'):-1]
|
||||
DirectoryList = GetHTMLPagesList(
|
||||
Flags,
|
||||
CallbackFile=File,
|
||||
Pages=Pages,
|
||||
BlogName=BlogName,
|
||||
SiteRoot=SiteRoot,
|
||||
PathPrefix=GetPathLevels(File),
|
||||
PathFilter=Path,
|
||||
For='Categories',
|
||||
MarkdownExts=MarkdownExts,
|
||||
MenuStyle='Flat')
|
||||
HTML = ReplWithEsc(HTML, f"<staticoso:DirectoryList:{Path}>", DirectoryList)
|
||||
|
||||
|
@ -287,6 +284,7 @@ def HandlePage(Flags, Page, Pages, Categories, LimitFiles, Snippets, ConfMenu, L
|
|||
|
||||
if not LightRun and 'htmljournal' in ContentHTML.lower(): # Avoid extra cycles
|
||||
HTML, _, _, _ = PatchHTML(
|
||||
Flags,
|
||||
File=File,
|
||||
HTML=TemplatesText[Meta['Template']],
|
||||
StaticPartsText=StaticPartsText,
|
||||
|
@ -297,32 +295,92 @@ def HandlePage(Flags, Page, Pages, Categories, LimitFiles, Snippets, ConfMenu, L
|
|||
Content=MakeHTMLJournal(Flags, Locale, f'{StripExt(File)}.html', ContentHTML),
|
||||
Titles='',
|
||||
Meta=Meta,
|
||||
SiteDomain=SiteDomain,
|
||||
SiteRoot=SiteRoot,
|
||||
SiteName=SiteName,
|
||||
BlogName=BlogName,
|
||||
FolderRoots=FolderRoots,
|
||||
Categories=Categories,
|
||||
SiteLang=SiteLang,
|
||||
Locale=Locale,
|
||||
LightRun=LightRun)
|
||||
if Flags["JournalRedirect"]:
|
||||
HTML = HTML.replace('</head>', f"""<meta http-equiv="refresh" content="0; url='./{PagePath.split('''/''')[-1]}'"></head>""")
|
||||
WriteFile(StripExt(PagePath)+'.Journal.html', HTML)
|
||||
|
||||
return [File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image]
|
||||
#return [File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image]
|
||||
return {"File":File, "Content":Content, "Titles":Titles, "Meta":Meta, "ContentHtml":ContentHTML, "SlimHtml":SlimHTML, "Description":Description, "Image":Image}
|
||||
|
||||
def MultiprocPagePreprocessor(d):
|
||||
PrintProcPercentDots(d['Process'], 2)
|
||||
return PagePreprocessor(d['Path'], d['TempPath'], d['Type'], d['Template'], d['SiteRoot'], d['GlobalMacros'], d['CategoryUncategorized'], d['LightRun'])
|
||||
def MultiprocPagePreprocessor(d:dict):
|
||||
PrintProcPercentDots(d['Process'])#, 2)
|
||||
return PagePreprocessor(d['Flags'], d['Page'], d['Template'], d['GlobalMacros'], d['LightRun'])
|
||||
|
||||
def MultiprocHandlePage(d):
|
||||
def MultiprocHandlePage(d:dict):
|
||||
PrintProcPercentDots(d['Process'])
|
||||
return HandlePage(d['Flags'], d['Page'], d['Pages'], d['Categories'], d['LimitFiles'], d['Snippets'], d['ConfMenu'], d['Locale'])
|
||||
|
||||
def MakeSite(Flags, LimitFiles, Snippets, ConfMenu, GlobalMacros, Locale, Threads):
|
||||
PagesPaths, PostsPaths, Pages, MadePages, Categories = [], [], [], [], {}
|
||||
def FindPagesPaths():
|
||||
Paths = {"Pages":[], "Posts":[]}
|
||||
for Ext in FileExtensions['Pages']:
|
||||
for Type in ('Pages', 'Posts'):
|
||||
for File in Path(Type).rglob(f'*.{Ext}'):
|
||||
Paths[Type] += [FileToStr(File, f'{Type}/')]
|
||||
return Paths
|
||||
|
||||
def ReorderPagesPaths(Paths:dict, Sorting:dict):
|
||||
for Type in ('Pages', 'Posts'):
|
||||
Paths[Type] = FileNameDateSort(Paths[Type])
|
||||
if Sorting[Type] in ('Inverse', 'Reverse'):
|
||||
Paths[Type].reverse()
|
||||
return Paths
|
||||
|
||||
def PopulateCategoryLists(Flags:dict, Pages:list, Categories):
|
||||
for Cat in Categories:
|
||||
for Type in ('Page', 'Post'):
|
||||
Categories[Cat] += GetHTMLPagesList(
|
||||
Flags,
|
||||
Pages=Pages,
|
||||
PathPrefix=GetPathLevels('Categories/'),
|
||||
Type=Type,
|
||||
Category=Cat,
|
||||
For='Categories',
|
||||
MenuStyle='Flat')
|
||||
return Categories
|
||||
|
||||
def MakeAutoCategories(Flags:dict, Categories):
|
||||
Pages = []
|
||||
if Flags['CategoriesAutomatic']:
|
||||
OutDir = Flags['OutDir']
|
||||
Dir = f'{OutDir}/Categories'
|
||||
for Cat in Categories:
|
||||
Exists = False
|
||||
for File in Path(Dir).rglob(str(Cat)+'.*'):
|
||||
Exists = True
|
||||
break
|
||||
if not Exists:
|
||||
File = f'Categories/{Cat}.md'
|
||||
FilePath = f'{OutDir}/{File}'
|
||||
WriteFile(FilePath, CategoryPageTemplate.format(Name=Cat))
|
||||
_, Content, Titles, Meta = PagePreprocessor(Flags, [FilePath, FilePath, Type, None], SiteTemplate, GlobalMacros, LightRun=LightRun)
|
||||
Pages += [File, Content, Titles, Meta]
|
||||
return Pages
|
||||
|
||||
def PreprocessSourcePages(Flags:dict, PagesPaths:dict, LimitFiles, SiteTemplate, GlobalMacros, PoolSize:int):
|
||||
MultiprocPages = []
|
||||
for Type in ('Page', 'Post'):
|
||||
Files, PathPrefix = {"Page": [PagesPaths['Pages'], ''], "Post": [PagesPaths['Posts'], 'Posts/']}[Type]
|
||||
for i, File in enumerate(Files):
|
||||
TempPath = f"{PathPrefix}{File}"
|
||||
LightRun = False if LimitFiles == False or TempPath in LimitFiles else True
|
||||
MultiprocPages += [{'Flags': Flags, 'Page': [f"{Type}s/{File}", TempPath, Type, None], 'Template': SiteTemplate, 'GlobalMacros': GlobalMacros, 'LightRun': LightRun}]
|
||||
return DoMultiProc(MultiprocPagePreprocessor, MultiprocPages, PoolSize, True)
|
||||
|
||||
def WriteProcessedPages(Flags:dict, Pages:list, Categories, ConfMenu, Snippets, LimitFiles, PoolSize:int, Locale:dict):
|
||||
MultiprocPages = []
|
||||
for i, Page in enumerate(Pages):
|
||||
MultiprocPages += [{'Flags': Flags, 'Page': Page, 'Pages': Pages, 'Categories': Categories, 'LimitFiles': LimitFiles, 'Snippets': Snippets, 'ConfMenu': ConfMenu, 'Locale': Locale}]
|
||||
return DoMultiProc(MultiprocHandlePage, MultiprocPages, PoolSize, True)
|
||||
|
||||
def MakeSite(Flags:dict, LimitFiles, Snippets, ConfMenu, GlobalMacros, Locale:dict, Threads):
|
||||
Pages, MadePages, Categories = [], [], {}
|
||||
PoolSize = cpu_count() if Threads <= 0 else Threads
|
||||
|
||||
f = NameSpace(Flags)
|
||||
OutDir, MarkdownExts, Sorting = Flags['OutDir'], Flags['MarkdownExts'], Flags['Sorting']
|
||||
SiteName, BlogName, SiteTagline = Flags['SiteName'], Flags['BlogName'], Flags['SiteTagline']
|
||||
SiteTemplate, SiteLang = Flags['SiteTemplate'], Flags['SiteLang']
|
||||
|
@ -331,74 +389,29 @@ def MakeSite(Flags, LimitFiles, Snippets, ConfMenu, GlobalMacros, Locale, Thread
|
|||
ImgAltToTitle, ImgTitleToAlt = Flags['ImgAltToTitle'], Flags['ImgTitleToAlt']
|
||||
DynamicParts, DynamicPartsText, StaticPartsText, TemplatesText = Flags['DynamicParts'], Snippets['DynamicParts'], Snippets['StaticParts'], Snippets['Templates']
|
||||
|
||||
for Ext in FileExtensions['Pages']:
|
||||
for File in Path('Pages').rglob(f"*.{Ext}"):
|
||||
PagesPaths += [FileToStr(File, 'Pages/')]
|
||||
for File in Path('Posts').rglob(f"*.{Ext}"):
|
||||
PostsPaths += [FileToStr(File, 'Posts/')]
|
||||
logging.info(f"Pages Found: {len(PagesPaths+PostsPaths)}")
|
||||
logging.info("Finding Pages")
|
||||
PagesPaths = FindPagesPaths()
|
||||
logging.info(f"Pages Found: {len(PagesPaths['Pages']+PagesPaths['Posts'])}")
|
||||
|
||||
PagesPaths = FileNameDateSort(PagesPaths)
|
||||
if Sorting['Pages'] == 'Inverse':
|
||||
PagesPaths.reverse()
|
||||
PostsPaths = FileNameDateSort(PostsPaths)
|
||||
if Sorting['Posts'] == 'Inverse':
|
||||
PostsPaths.reverse()
|
||||
logging.info("Reordering Pages")
|
||||
PagesPaths = ReorderPagesPaths(PagesPaths, f.Sorting)
|
||||
|
||||
logging.info("Preprocessing Source Pages")
|
||||
MultiprocPages = []
|
||||
for Type in ['Page', 'Post']:
|
||||
if Type == 'Page':
|
||||
Files = PagesPaths
|
||||
PathPrefix = ''
|
||||
elif Type == 'Post':
|
||||
Files = PostsPaths
|
||||
PathPrefix = 'Posts/'
|
||||
for i,File in enumerate(Files):
|
||||
TempPath = f"{PathPrefix}{File}"
|
||||
LightRun = False if LimitFiles == False or TempPath in LimitFiles else True
|
||||
MultiprocPages += [{'Process':{'Num':i, 'Count':len(Files)}, 'Path':f"{Type}s/{File}", 'TempPath':TempPath, 'Type':Type, 'Template':SiteTemplate, 'SiteRoot':SiteRoot, 'GlobalMacros':GlobalMacros, 'CategoryUncategorized':CategoryUncategorized, 'LightRun':LightRun}]
|
||||
os.system('printf "["')
|
||||
with Pool(PoolSize) as MultiprocPool:
|
||||
Pages = MultiprocPool.map(MultiprocPagePreprocessor, MultiprocPages)
|
||||
os.system('printf "]\n"') # Make newline after percentage dots
|
||||
Pages = PreprocessSourcePages(Flags, PagesPaths, LimitFiles, SiteTemplate, GlobalMacros, PoolSize)
|
||||
|
||||
for File, Content, Titles, Meta in Pages:
|
||||
for Cat in Meta['Categories']:
|
||||
Categories.update({Cat:''})
|
||||
|
||||
PugCompileList(OutDir, Pages, LimitFiles)
|
||||
|
||||
if Categories:
|
||||
if Categories or f.CategoriesAutomatic:
|
||||
logging.info("Generating Category Lists")
|
||||
for Cat in Categories:
|
||||
for Type in ('Page', 'Post'):
|
||||
Categories[Cat] += GetHTMLPagesList(
|
||||
Pages=Pages,
|
||||
BlogName=BlogName,
|
||||
SiteRoot=SiteRoot,
|
||||
PathPrefix=GetPathLevels('Categories/'),
|
||||
Type=Type,
|
||||
Category=Cat,
|
||||
For='Categories',
|
||||
MarkdownExts=MarkdownExts,
|
||||
MenuStyle='Flat')
|
||||
|
||||
if AutoCategories:
|
||||
Dir = f"{OutDir}/Categories"
|
||||
for Cat in Categories:
|
||||
Exists = False
|
||||
for File in Path(Dir).rglob(str(Cat)+'.*'):
|
||||
Exists = True
|
||||
break
|
||||
if not Exists:
|
||||
File = f"Categories/{Cat}.md"
|
||||
FilePath = f"{OutDir}/{File}"
|
||||
WriteFile(FilePath, CategoryPageTemplate.format(Name=Cat))
|
||||
_, Content, Titles, Meta = PagePreprocessor(FilePath, FilePath, Type, SiteTemplate, SiteRoot, GlobalMacros, CategoryUncategorized, LightRun=LightRun)
|
||||
Pages += [[File, Content, Titles, Meta]]
|
||||
Categories = PopulateCategoryLists(Flags, Pages, Categories)
|
||||
Pages += MakeAutoCategories(Flags, Categories)
|
||||
|
||||
#logging.info("Building the HTML Search Page")
|
||||
#Pages += [PagePreprocessor(Path='Search.html', TempPath='Search.html', Type='Page', SiteTemplate=SiteTemplate, SiteRoot=SiteRoot, GlobalMacros=GlobalMacros, CategoryUncategorized=CategoryUncategorized, LightRun=LightRun, Content=BuildPagesSearch(Flags, Pages))]
|
||||
#Pages += [PagePreprocessor(Flags, Path='Search.html', TempPath='Search.html', Type='Page', SiteTemplate=SiteTemplate, GlobalMacros=GlobalMacros, LightRun=LightRun, Content=BuildPagesSearch(Flags, Pages))]
|
||||
|
||||
for i,e in enumerate(ConfMenu):
|
||||
for File, Content, Titles, Meta in Pages:
|
||||
|
@ -407,13 +420,7 @@ def MakeSite(Flags, LimitFiles, Snippets, ConfMenu, GlobalMacros, Locale, Thread
|
|||
ConfMenu[i] = None
|
||||
|
||||
logging.info("Writing Pages")
|
||||
MultiprocPages = []
|
||||
for i,Page in enumerate(Pages):
|
||||
MultiprocPages += [{'Process':{'Num':i, 'Count':len(Pages)}, 'Flags':Flags, 'Page':Page, 'Pages':Pages, 'Categories':Categories, 'LimitFiles':LimitFiles, 'Snippets':Snippets, 'ConfMenu':ConfMenu, 'Locale':Locale}]
|
||||
os.system('printf "["')
|
||||
with Pool(PoolSize) as MultiprocPool:
|
||||
MadePages = MultiprocPool.map(MultiprocHandlePage, MultiprocPages)
|
||||
os.system('printf "]\n"') # Make newline after percentage dots
|
||||
MadePages = WriteProcessedPages(Flags, Pages, Categories, ConfMenu, Snippets, LimitFiles, PoolSize, Locale)
|
||||
|
||||
# Do page transclusions here (?)
|
||||
#while True:
|
||||
|
|
|
@ -11,7 +11,7 @@ from urllib.parse import quote as URLEncode
|
|||
from Modules.HTML import *
|
||||
from Modules.Utils import *
|
||||
|
||||
def MakeSitemap(Flags, Pages):
|
||||
def MakeSitemap(Flags:dict, Pages:list):
|
||||
Map = ''
|
||||
Domain = Flags['SiteDomain'] + '/' if Flags['SiteDomain'] else ''
|
||||
for File, Content, Titles, Meta, ContentHtml, SlimHtml, Description, Image in Pages:
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
""" ================================== |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022-2023, OctoSpacc |
|
||||
| ================================== """
|
||||
|
||||
from Modules.Logging import *
|
||||
from Modules.Utils import *
|
||||
|
||||
try:
|
||||
from Modules.ActivityPub import *
|
||||
ActivityPub = True
|
||||
except:
|
||||
logging.warning("⚠ Can't load the ActivityPub module. Its use is disabled. Make sure the 'requests' library is installed.")
|
||||
ActivityPub = False
|
||||
|
||||
def ApplySocialIntegrations(Flags, Pages, LimitFiles, Locale):
|
||||
f = NameSpace(Flags)
|
||||
FinalPaths = []
|
||||
|
||||
if ActivityPub and f.MastodonURL and f.MastodonToken and f.SiteDomain:
|
||||
logging.info("Mastodon Operations")
|
||||
MastodonPosts = MastodonShare(Flags, Pages, Locale)
|
||||
else:
|
||||
MastodonPosts = []
|
||||
|
||||
for File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image in Pages:
|
||||
if IsLightRun(File, LimitFiles):
|
||||
continue
|
||||
File = f'{f.OutDir}/{StripExt(File)}.html'
|
||||
Content = ReadFile(File)
|
||||
Post = ''
|
||||
for p in MastodonPosts:
|
||||
if p['Link'] == SiteDomain + '/' + File[len(f'{f.OutDir}/'):]:
|
||||
Post = HTMLCommentsBlock.format(
|
||||
StrComments=Locale['Comments'],
|
||||
StrOpen=Locale['OpenInNewTab'],
|
||||
URL=p['Post'])
|
||||
break
|
||||
#Content = ReplWithEsc(Content, '[staticoso:Comments]', Post)
|
||||
Content = ReplWithEsc(Content, '<staticoso:Comments>', Post)
|
||||
WriteFile(File, Content)
|
||||
FinalPaths += [File]
|
||||
|
||||
return FinalPaths
|
|
@ -10,7 +10,9 @@
|
|||
import json
|
||||
import os
|
||||
from datetime import datetime
|
||||
from multiprocessing import Pool, cpu_count
|
||||
from pathlib import Path
|
||||
from types import SimpleNamespace
|
||||
from Modules.Globals import *
|
||||
|
||||
def SureList(e):
|
||||
|
@ -36,14 +38,14 @@ def WriteFile(p, c, m='w'):
|
|||
logging.error(f"[E] Error writing file {p}")
|
||||
return False
|
||||
|
||||
def FileToStr(File, Truncate=''):
|
||||
def FileToStr(File:str, Truncate:str=''):
|
||||
return str(File)[len(Truncate):]
|
||||
|
||||
# With shutil.copytree copy only folder struct, no files; https://stackoverflow.com/a/15664273
|
||||
def IgnoreFiles(Dir, Files):
|
||||
def IgnoreFiles(Dir:str, Files):
|
||||
return [f for f in Files if os.path.isfile(os.path.join(Dir, f))]
|
||||
|
||||
def LoadFromDir(Dir, Matchs):
|
||||
def LoadFromDir(Dir:str, Matchs):
|
||||
Contents = {}
|
||||
Matchs = SureList(Matchs)
|
||||
for Match in Matchs:
|
||||
|
@ -52,10 +54,10 @@ def LoadFromDir(Dir, Matchs):
|
|||
Contents.update({File: ReadFile(f"{Dir}/{File}")})
|
||||
return Contents
|
||||
|
||||
def mkdirps(Dir):
|
||||
def mkdirps(Dir:str):
|
||||
return Path(Dir).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def StripExt(Path):
|
||||
def StripExt(Path:str):
|
||||
return ".".join(Path.split('.')[:-1])
|
||||
|
||||
def UndupeStr(Str, Known, Split):
|
||||
|
@ -174,3 +176,28 @@ def PrintProcPercentDots(Proc, DivMult=1):
|
|||
os.system('printf "="') # Using sys shell since for some reason print() without newline breaks here (doesn't print everytime)
|
||||
return True
|
||||
return False
|
||||
|
||||
def NameSpace(From):
|
||||
return SimpleNamespace(**From)
|
||||
|
||||
def DoMultiProc(Funct, ArgsCollection:list, Threads:int=cpu_count(), Progress:bool=False):
|
||||
# The function should simply be like this
|
||||
# def Funct(Args:dict):
|
||||
# # Print the percentage dots, if needed
|
||||
# PrintProcPercentDots(Args['Process'])
|
||||
# # Call the real function that handles a single process at a time
|
||||
# return Work(Args['a'], ...)
|
||||
#if not Threads:
|
||||
# Threads = cpu_count()
|
||||
FinalArgsCollection = []
|
||||
for Index, Args in enumerate(ArgsCollection):
|
||||
FinalArgsCollection.append(Args)
|
||||
FinalArgsCollection[Index].update({"Process": {"Num": Index, "Count": len(ArgsCollection)}})
|
||||
Results = []
|
||||
if Progress:
|
||||
os.system('printf "["') # Using system print because (see PrintProcPercentDots())
|
||||
with Pool(Threads) as MultiprocPool:
|
||||
Results = MultiprocPool.map(Funct, FinalArgsCollection)
|
||||
if Progress:
|
||||
os.system('printf "]\n"') # Newline after percentage dots
|
||||
return Results
|
||||
|
|
Loading…
Reference in New Issue