mirror of
https://gitlab.com/octtspacc/staticoso
synced 2025-06-05 22:09:23 +02:00
Refactoring: Build.py/MakeSite(), and minor modules
This commit is contained in:
@ -2,7 +2,7 @@ image: alpine:latest
|
|||||||
|
|
||||||
before_script: |
|
before_script: |
|
||||||
apk update
|
apk update
|
||||||
apk add python3
|
apk add python3 make
|
||||||
|
|
||||||
pages:
|
pages:
|
||||||
stage: deploy
|
stage: deploy
|
||||||
|
@ -15,6 +15,7 @@ import time
|
|||||||
from ast import literal_eval
|
from ast import literal_eval
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from Modules.Assets import *
|
||||||
from Modules.Config import *
|
from Modules.Config import *
|
||||||
from Modules.Gemini import *
|
from Modules.Gemini import *
|
||||||
from Modules.Globals import *
|
from Modules.Globals import *
|
||||||
@ -22,15 +23,8 @@ from Modules.Logging import *
|
|||||||
from Modules.Markdown import *
|
from Modules.Markdown import *
|
||||||
from Modules.Site import *
|
from Modules.Site import *
|
||||||
from Modules.Sitemap import *
|
from Modules.Sitemap import *
|
||||||
|
from Modules.Social import *
|
||||||
from Modules.Utils import *
|
from Modules.Utils import *
|
||||||
try:
|
|
||||||
from Modules.ActivityPub import *
|
|
||||||
ActivityPub = True
|
|
||||||
except:
|
|
||||||
logging.warning("⚠ Can't load the ActivityPub module. Its use is disabled. Make sure the 'requests' library is installed.")
|
|
||||||
ActivityPub = False
|
|
||||||
from Libs import rcssmin
|
|
||||||
cssmin = rcssmin._make_cssmin(python_only=True)
|
|
||||||
|
|
||||||
def ResetOutDir(OutDir):
|
def ResetOutDir(OutDir):
|
||||||
for e in (OutDir, f'{OutDir}.Content', f'{OutDir}.gmi'):
|
for e in (OutDir, f'{OutDir}.Content', f'{OutDir}.gmi'):
|
||||||
@ -120,7 +114,7 @@ def WriteRedirects(Flags, Pages, FinalPaths, Locale):
|
|||||||
StrRedirect=Locale['IfNotRedirected']))
|
StrRedirect=Locale['IfNotRedirected']))
|
||||||
|
|
||||||
def BuildMain(Args, FeedEntries):
|
def BuildMain(Args, FeedEntries):
|
||||||
Flags, Snippets, FinalPaths = {}, {}, []
|
Flags, Snippets = {}, {}
|
||||||
HavePages, HavePosts = False, False
|
HavePages, HavePosts = False, False
|
||||||
SiteConf = LoadConfFile('Site.ini')
|
SiteConf = LoadConfFile('Site.ini')
|
||||||
|
|
||||||
@ -232,34 +226,17 @@ def BuildMain(Args, FeedEntries):
|
|||||||
Locale=Locale,
|
Locale=Locale,
|
||||||
Threads=Threads)
|
Threads=Threads)
|
||||||
|
|
||||||
|
# REFACTOR: The functions below are still not changed to accept a Page as Dict
|
||||||
|
for i, e in enumerate(Pages):
|
||||||
|
Pages[i] = list(e.values())
|
||||||
|
|
||||||
if FeedEntries != 0:
|
if FeedEntries != 0:
|
||||||
logging.info("Generating Feeds")
|
logging.info("Generating Feeds")
|
||||||
for FeedType in (True, False):
|
for FeedType in (True, False):
|
||||||
MakeFeed(Flags, Pages, FeedType)
|
MakeFeed(Flags, Pages, FeedType)
|
||||||
|
|
||||||
if ActivityPub and MastodonURL and MastodonToken and SiteDomain:
|
logging.info("Applying Social Integrations")
|
||||||
logging.info("Mastodon Stuff")
|
FinalPaths = ApplySocialIntegrations(Flags, Pages, LimitFiles, Locale)
|
||||||
MastodonPosts = MastodonShare(Flags, Pages, Locale)
|
|
||||||
else:
|
|
||||||
MastodonPosts = []
|
|
||||||
|
|
||||||
for File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image in Pages:
|
|
||||||
if IsLightRun(File, LimitFiles):
|
|
||||||
continue
|
|
||||||
File = f"{OutDir}/{StripExt(File)}.html"
|
|
||||||
Content = ReadFile(File)
|
|
||||||
Post = ''
|
|
||||||
for p in MastodonPosts:
|
|
||||||
if p['Link'] == SiteDomain + '/' + File[len(f"{OutDir}/"):]:
|
|
||||||
Post = HTMLCommentsBlock.format(
|
|
||||||
StrComments=Locale['Comments'],
|
|
||||||
StrOpen=Locale['OpenInNewTab'],
|
|
||||||
URL=p['Post'])
|
|
||||||
break
|
|
||||||
Content = ReplWithEsc(Content, '[staticoso:Comments]', Post)
|
|
||||||
Content = ReplWithEsc(Content, '<staticoso:Comments>', Post)
|
|
||||||
WriteFile(File, Content)
|
|
||||||
FinalPaths += [File]
|
|
||||||
|
|
||||||
logging.info("Creating Redirects")
|
logging.info("Creating Redirects")
|
||||||
WriteRedirects(Flags, Pages, FinalPaths, Locale)
|
WriteRedirects(Flags, Pages, FinalPaths, Locale)
|
||||||
@ -279,20 +256,7 @@ def BuildMain(Args, FeedEntries):
|
|||||||
MakeSitemap(Flags, Pages)
|
MakeSitemap(Flags, Pages)
|
||||||
|
|
||||||
logging.info("Preparing Assets")
|
logging.info("Preparing Assets")
|
||||||
#os.system(f"cp -R Assets/* {OutDir}/")
|
PrepareAssets(Flags)
|
||||||
if Flags['MinifyAssets']:
|
|
||||||
shutil.copytree('Assets', OutDir, ignore=IgnoreFiles, dirs_exist_ok=True)
|
|
||||||
for File in Path('Assets').rglob('*'):
|
|
||||||
if os.path.isfile(File):
|
|
||||||
Dest = f"{OutDir}/{str(File)[len('Assets')+1:]}"
|
|
||||||
if str(File).lower().endswith(FileExtensions['HTML']):
|
|
||||||
WriteFile(Dest, DoMinifyHTML(ReadFile(File), MinifyKeepComments))
|
|
||||||
elif str(File).lower().endswith('.css'):
|
|
||||||
WriteFile(Dest, cssmin(ReadFile(File), MinifyKeepComments))
|
|
||||||
else:
|
|
||||||
shutil.copy2(File, Dest)
|
|
||||||
else:
|
|
||||||
shutil.copytree('Assets', OutDir, dirs_exist_ok=True)
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
StartTime = time.time()
|
StartTime = time.time()
|
||||||
|
31
App/Source/Modules/Assets.py
Normal file
31
App/Source/Modules/Assets.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
""" ================================== |
|
||||||
|
| This file is part of |
|
||||||
|
| staticoso |
|
||||||
|
| Just a simple Static Site Generator |
|
||||||
|
| |
|
||||||
|
| Licensed under the AGPLv3 license |
|
||||||
|
| Copyright (C) 2022-2023, OctoSpacc |
|
||||||
|
| ================================== """
|
||||||
|
|
||||||
|
import shutil
|
||||||
|
from Modules.HTML import DoMinifyHTML
|
||||||
|
from Modules.Utils import *
|
||||||
|
|
||||||
|
from Libs import rcssmin
|
||||||
|
cssmin = rcssmin._make_cssmin(python_only=True)
|
||||||
|
|
||||||
|
def PrepareAssets(Flags):
|
||||||
|
f = NameSpace(Flags)
|
||||||
|
if f.MinifyAssets:
|
||||||
|
shutil.copytree('Assets', f.OutDir, ignore=IgnoreFiles, dirs_exist_ok=True)
|
||||||
|
for File in Path('Assets').rglob('*'):
|
||||||
|
if os.path.isfile(File):
|
||||||
|
Dest = f"{f.OutDir}/{str(File)[len('Assets')+1:]}"
|
||||||
|
if str(File).lower().endswith(FileExtensions['HTML']):
|
||||||
|
WriteFile(Dest, DoMinifyHTML(ReadFile(File), f.MinifyKeepComments))
|
||||||
|
elif str(File).lower().endswith('.css'):
|
||||||
|
WriteFile(Dest, cssmin(ReadFile(File), f.MinifyKeepComments))
|
||||||
|
else:
|
||||||
|
shutil.copy2(File, Dest)
|
||||||
|
else:
|
||||||
|
shutil.copytree('Assets', f.OutDir, dirs_exist_ok=True)
|
@ -156,7 +156,7 @@ def FormatTitles(Titles:list, Flatten=False):
|
|||||||
# Clean up a generic HTML tree such that it's compliant with the HTML Journal standard
|
# Clean up a generic HTML tree such that it's compliant with the HTML Journal standard
|
||||||
# (https://m15o.ichi.city/site/subscribing-to-a-journal-page.html);
|
# (https://m15o.ichi.city/site/subscribing-to-a-journal-page.html);
|
||||||
# basis is: find an element with the JournalBody attr., and group its direct children as <article>s
|
# basis is: find an element with the JournalBody attr., and group its direct children as <article>s
|
||||||
def MakeHTMLJournal(Flags, Locale, FilePath, HTML):
|
def MakeHTMLJournal(Flags:dict, Locale:dict, FilePath:str, HTML:str):
|
||||||
Soup, Journal, Entries = MkSoup(HTML), '', []
|
Soup, Journal, Entries = MkSoup(HTML), '', []
|
||||||
for t in Soup.find_all(attrs={"htmljournal":True}):
|
for t in Soup.find_all(attrs={"htmljournal":True}):
|
||||||
#JournalStyle = JournalStyles[t.attrs["journalstyle"]] if 'journalstyle' in t.attrs and t.attrs["journalstyle"] in JournalStyles else JournalStyles['Default']
|
#JournalStyle = JournalStyles[t.attrs["journalstyle"]] if 'journalstyle' in t.attrs and t.attrs["journalstyle"] in JournalStyles else JournalStyles['Default']
|
||||||
|
@ -12,7 +12,7 @@
|
|||||||
from Libs.feedgen.feed import FeedGenerator
|
from Libs.feedgen.feed import FeedGenerator
|
||||||
from Modules.Utils import *
|
from Modules.Utils import *
|
||||||
|
|
||||||
def MakeFeed(Flags, Pages, FullSite=False):
|
def MakeFeed(Flags:dict, Pages:list, FullSite=False):
|
||||||
CategoryFilter = Flags['FeedCategoryFilter']
|
CategoryFilter = Flags['FeedCategoryFilter']
|
||||||
MaxEntries = Flags['FeedEntries']
|
MaxEntries = Flags['FeedEntries']
|
||||||
|
|
||||||
|
@ -13,7 +13,7 @@ from Libs.bs4 import BeautifulSoup
|
|||||||
from Modules.HTML import *
|
from Modules.HTML import *
|
||||||
from Modules.Utils import *
|
from Modules.Utils import *
|
||||||
|
|
||||||
def FixGemlogDateLine(Line):
|
def FixGemlogDateLine(Line:str):
|
||||||
if len(Line) >= 2 and Line[0] == '[' and Line[1].isdigit():
|
if len(Line) >= 2 and Line[0] == '[' and Line[1].isdigit():
|
||||||
Line = Line[1:]
|
Line = Line[1:]
|
||||||
else:
|
else:
|
||||||
@ -22,7 +22,7 @@ def FixGemlogDateLine(Line):
|
|||||||
Line = Words[0] + '\n' + Words[1][1:] + ' ' + ' '.join(Words[2:])
|
Line = Words[0] + '\n' + Words[1][1:] + ' ' + ' '.join(Words[2:])
|
||||||
return Line
|
return Line
|
||||||
|
|
||||||
def GemtextCompileList(Flags, Pages, LimitFiles):
|
def GemtextCompileList(Flags:dict, Pages:list, LimitFiles):
|
||||||
Cmd = ''
|
Cmd = ''
|
||||||
for File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image in Pages:
|
for File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image in Pages:
|
||||||
if IsLightRun(File, LimitFiles):
|
if IsLightRun(File, LimitFiles):
|
||||||
@ -53,7 +53,7 @@ def FindEarliest(Str, Items):
|
|||||||
Str.find(Item)
|
Str.find(Item)
|
||||||
return Pos, Item
|
return Pos, Item
|
||||||
|
|
||||||
def ParseTag(Content):
|
def ParseTag(Content:str):
|
||||||
print(Content)
|
#print(Content)
|
||||||
Parse = BeautifulSoup(str(Content), 'html.parser')
|
Parse = BeautifulSoup(str(Content), 'html.parser')
|
||||||
Tag = Parse.find()
|
Tag = Parse.find()
|
||||||
|
@ -15,30 +15,31 @@ from Modules.Utils import *
|
|||||||
|
|
||||||
# Suppress useless bs4 warnings
|
# Suppress useless bs4 warnings
|
||||||
warnings.filterwarnings('ignore', message='The input looks more like a filename than markup.')
|
warnings.filterwarnings('ignore', message='The input looks more like a filename than markup.')
|
||||||
|
warnings.filterwarnings('ignore', message='The soupsieve package is not installed.')
|
||||||
|
|
||||||
def MkSoup(HTML):
|
def MkSoup(Html:str):
|
||||||
return BeautifulSoup(HTML, 'html.parser')
|
return BeautifulSoup(Html, 'html.parser')
|
||||||
|
|
||||||
def StripAttrs(HTML):
|
def StripAttrs(Html:str):
|
||||||
Soup = MkSoup(HTML)
|
Soup = MkSoup(Html)
|
||||||
Tags = Soup.find_all()
|
Tags = Soup.find_all()
|
||||||
for t in Tags:
|
for t in Tags:
|
||||||
if 'href' not in t.attrs and 'src' not in t.attrs:
|
if 'href' not in t.attrs and 'src' not in t.attrs:
|
||||||
t.attrs = {}
|
t.attrs = {}
|
||||||
return str(Soup)
|
return str(Soup)
|
||||||
|
|
||||||
def StripTags(HTML, ToStrip): # Remove desired tags from the HTML
|
def StripTags(Html:str, ToStrip:list): # Remove desired tags from the HTML
|
||||||
Soup = MkSoup(HTML)
|
Soup = MkSoup(Html)
|
||||||
Tags = Soup.find_all()
|
Tags = Soup.find_all()
|
||||||
for t in Tags:
|
for t in Tags:
|
||||||
if t.name in ToStrip:
|
if t.name in ToStrip:
|
||||||
t.replace_with('')
|
t.replace_with('')
|
||||||
return str(Soup)
|
return str(Soup)
|
||||||
|
|
||||||
def DoHTMLFixPre(HTML):
|
def DoHTMLFixPre(Html:str):
|
||||||
if not ("<pre>" in HTML or "<pre " in HTML):
|
if not ("<pre>" in Html or "<pre " in Html):
|
||||||
return HTML
|
return Html
|
||||||
Soup = MkSoup(HTML)
|
Soup = MkSoup(Html)
|
||||||
Tags = Soup.find_all('pre')
|
Tags = Soup.find_all('pre')
|
||||||
for t in Tags:
|
for t in Tags:
|
||||||
FirstLine = str(t).splitlines()[0].lstrip().rstrip()
|
FirstLine = str(t).splitlines()[0].lstrip().rstrip()
|
||||||
@ -78,8 +79,8 @@ def AddToTagStartEnd(HTML, MatchStart, MatchEnd, AddStart, AddEnd): # This doesn
|
|||||||
DidEnd -= 1
|
DidEnd -= 1
|
||||||
return HTML
|
return HTML
|
||||||
|
|
||||||
def SquareFnrefs(HTML): # Different combinations of formatting for Soup .prettify, .encode, .decode break different page elements, don't use this for now
|
def SquareFnrefs(Html:str): # Different combinations of formatting for Soup .prettify, .encode, .decode break different page elements, don't use this for now
|
||||||
Soup = MkSoup(HTML)
|
Soup = MkSoup(Html)
|
||||||
Tags = Soup.find_all('sup')
|
Tags = Soup.find_all('sup')
|
||||||
for t in Tags:
|
for t in Tags:
|
||||||
if 'id' in t.attrs and t.attrs['id'].startswith('fnref:'):
|
if 'id' in t.attrs and t.attrs['id'].startswith('fnref:'):
|
||||||
@ -87,9 +88,9 @@ def SquareFnrefs(HTML): # Different combinations of formatting for Soup .prettif
|
|||||||
s.replace_with(f'[{t}]')
|
s.replace_with(f'[{t}]')
|
||||||
return str(Soup.prettify(formatter=None))
|
return str(Soup.prettify(formatter=None))
|
||||||
|
|
||||||
def DoMinifyHTML(HTML, KeepComments):
|
def DoMinifyHTML(Html:str, KeepComments:bool):
|
||||||
return htmlmin.minify(
|
return htmlmin.minify(
|
||||||
input=HTML,
|
input=Html,
|
||||||
remove_comments=not KeepComments,
|
remove_comments=not KeepComments,
|
||||||
remove_empty_space=True,
|
remove_empty_space=True,
|
||||||
remove_all_empty_space=False,
|
remove_all_empty_space=False,
|
||||||
|
@ -16,7 +16,9 @@ from Modules.Utils import *
|
|||||||
# Menu styles:
|
# Menu styles:
|
||||||
# - Simple: Default, Flat, Line
|
# - Simple: Default, Flat, Line
|
||||||
# - Others: Excerpt, Image, Preview (Excerpt + Image), Full
|
# - Others: Excerpt, Image, Preview (Excerpt + Image), Full
|
||||||
def GetHTMLPagesList(Pages:list, BlogName:str, SiteRoot:str, PathPrefix:str, CallbackFile=None, Unite=[], Type=None, Limit=None, PathFilter='', Category=None, For='Menu', MarkdownExts=(), MenuStyle='Default', ShowPaths=True):
|
def GetHTMLPagesList(Flags:dict, Pages:list, PathPrefix:str, CallbackFile=None, Unite=[], Type=None, Limit=None, PathFilter='', Category=None, For='Menu', MenuStyle='Default', ShowPaths=True):
|
||||||
|
f = NameSpace(Flags)
|
||||||
|
|
||||||
Flatten, SingleLine, DoneCount, PrevDepth = False, False, 0, 0
|
Flatten, SingleLine, DoneCount, PrevDepth = False, False, 0, 0
|
||||||
if MenuStyle == 'Flat':
|
if MenuStyle == 'Flat':
|
||||||
Flatten = True
|
Flatten = True
|
||||||
@ -57,7 +59,7 @@ def GetHTMLPagesList(Pages:list, BlogName:str, SiteRoot:str, PathPrefix:str, Cal
|
|||||||
Levels = '.' * ((Depth-2+i) if not Flatten else 0) + ':'
|
Levels = '.' * ((Depth-2+i) if not Flatten else 0) + ':'
|
||||||
# If search node endswith index, it's a page; else, it's a folder
|
# If search node endswith index, it's a page; else, it's a folder
|
||||||
if StripExt(File).endswith('index'):
|
if StripExt(File).endswith('index'):
|
||||||
Title = MakeListTitle(File, Meta, Titles, 'HTMLTitle', BlogName, PathPrefix)
|
Title = MakeListTitle(File, Meta, Titles, 'HTMLTitle', f.BlogName, PathPrefix)
|
||||||
DoneCount += 1
|
DoneCount += 1
|
||||||
else:
|
else:
|
||||||
Title = CurParent[Depth-2+i]
|
Title = CurParent[Depth-2+i]
|
||||||
@ -71,9 +73,9 @@ def GetHTMLPagesList(Pages:list, BlogName:str, SiteRoot:str, PathPrefix:str, Cal
|
|||||||
Levels = '.' * ((Depth-1) if not Flatten else 0) + ':'
|
Levels = '.' * ((Depth-1) if not Flatten else 0) + ':'
|
||||||
DoneCount += 1
|
DoneCount += 1
|
||||||
if Meta['Order'] == 'Unite':
|
if Meta['Order'] == 'Unite':
|
||||||
Title = markdown(MarkdownHTMLEscape(File, MarkdownExts), extensions=MarkdownExts).removeprefix('<p>').removesuffix('<p>')
|
Title = markdown(MarkdownHTMLEscape(File, f.MarkdownExts), extensions=f.MarkdownExts).removeprefix('<p>').removesuffix('<p>')
|
||||||
else:
|
else:
|
||||||
Title = MakeListTitle(File, Meta, Titles, 'HTMLTitle', BlogName, PathPrefix)
|
Title = MakeListTitle(File, Meta, Titles, 'HTMLTitle', f.BlogName, PathPrefix)
|
||||||
if SingleLine:
|
if SingleLine:
|
||||||
List += ' <span>' + Title + '</span> '
|
List += ' <span>' + Title + '</span> '
|
||||||
else:
|
else:
|
||||||
@ -122,7 +124,10 @@ def FindPreprocLine(Line:str, Meta, Macros):
|
|||||||
# IgnoreBlocksStart += [l]
|
# IgnoreBlocksStart += [l]
|
||||||
return (Meta, Macros, Changed)
|
return (Meta, Macros, Changed)
|
||||||
|
|
||||||
def PagePreprocessor(Path:str, TempPath:str, Type:str, SiteTemplate, SiteRoot, GlobalMacros, CategoryUncategorized, LightRun:bool=False, Content=None):
|
def PagePreprocessor(Flags:dict, Page:list, SiteTemplate, GlobalMacros, LightRun:bool=False):
|
||||||
|
CategoryUncategorized = Flags['CategoriesUncategorized']
|
||||||
|
Path, TempPath, Type, Content = Page
|
||||||
|
|
||||||
File = ReadFile(Path) if not Content else Content
|
File = ReadFile(Path) if not Content else Content
|
||||||
Path = Path.lower()
|
Path = Path.lower()
|
||||||
Content, Titles, DashyTitles, HTMLTitlesFound, Macros, Meta, MetaDefault = '', [], [], False, '', '', {
|
Content, Titles, DashyTitles, HTMLTitlesFound, Macros, Meta, MetaDefault = '', [], [], False, '', '', {
|
||||||
|
@ -12,12 +12,12 @@
|
|||||||
import os
|
import os
|
||||||
from Modules.Utils import *
|
from Modules.Utils import *
|
||||||
|
|
||||||
def PugCompileList(OutputDir, Pages, LimitFiles):
|
def PugCompileList(OutDir:str, Pages:list, LimitFiles):
|
||||||
# Pug-cli seems to shit itself with folder paths as input, so we pass ALL the files as arguments
|
# Pug-cli seems to shit itself with folder paths as input, so we pass ALL the files as arguments
|
||||||
Paths = ''
|
Paths = ''
|
||||||
for File, Content, Titles, Meta in Pages:
|
for File, Content, Titles, Meta in Pages:
|
||||||
if File.lower().endswith('.pug') and (LimitFiles == False or File in LimitFiles):
|
if File.lower().endswith('.pug') and (LimitFiles == False or File in LimitFiles):
|
||||||
Path = f'{OutputDir}/{File}'
|
Path = f'{OutDir}/{File}'
|
||||||
WriteFile(Path, Content)
|
WriteFile(Path, Content)
|
||||||
Paths += f'"{Path}" '
|
Paths += f'"{Path}" '
|
||||||
if Paths:
|
if Paths:
|
||||||
|
@ -20,7 +20,13 @@ from Modules.Meta import *
|
|||||||
from Modules.Pug import *
|
from Modules.Pug import *
|
||||||
from Modules.Utils import *
|
from Modules.Utils import *
|
||||||
|
|
||||||
def PatchHTML(File, HTML, StaticPartsText, DynamicParts, DynamicPartsText, HTMLPagesList, PagePath, Content, Titles, Meta, SiteDomain, SiteRoot, SiteName, BlogName, FolderRoots, Categories, SiteLang, Locale, LightRun):
|
def PatchHTML(Flags, File, HTML, StaticPartsText, DynamicParts, DynamicPartsText, HTMLPagesList, PagePath, Content, Titles, Meta, FolderRoots, Categories, Locale, LightRun):
|
||||||
|
SiteDomain = Flags['SiteDomain']
|
||||||
|
SiteRoot = Flags['SiteRoot']
|
||||||
|
SiteLang = Flags['SiteLang']
|
||||||
|
SiteName = Flags['SiteName']
|
||||||
|
BlogName = Flags['BlogName']
|
||||||
|
|
||||||
HTMLTitles = FormatTitles(Titles)
|
HTMLTitles = FormatTitles(Titles)
|
||||||
BodyDescription, BodyImage = '', ''
|
BodyDescription, BodyImage = '', ''
|
||||||
if not File.lower().endswith('.txt'):
|
if not File.lower().endswith('.txt'):
|
||||||
@ -171,8 +177,9 @@ def PatchHTML(File, HTML, StaticPartsText, DynamicParts, DynamicPartsText, HTMLP
|
|||||||
|
|
||||||
return HTML, ContentHTML, Description, Image
|
return HTML, ContentHTML, Description, Image
|
||||||
|
|
||||||
def HandlePage(Flags, Page, Pages, Categories, LimitFiles, Snippets, ConfMenu, Locale):
|
def HandlePage(Flags:dict, Page:list, Pages, Categories, LimitFiles, Snippets, ConfMenu, Locale:dict):
|
||||||
File, Content, Titles, Meta = Page
|
File, Content, Titles, Meta = Page
|
||||||
|
|
||||||
OutDir, MarkdownExts, Sorting, MinifyKeepComments = Flags['OutDir'], Flags['MarkdownExts'], Flags['Sorting'], Flags['MinifyKeepComments']
|
OutDir, MarkdownExts, Sorting, MinifyKeepComments = Flags['OutDir'], Flags['MarkdownExts'], Flags['Sorting'], Flags['MinifyKeepComments']
|
||||||
SiteName, BlogName, SiteTagline = Flags['SiteName'], Flags['BlogName'], Flags['SiteTagline']
|
SiteName, BlogName, SiteTagline = Flags['SiteName'], Flags['BlogName'], Flags['SiteTagline']
|
||||||
SiteTemplate, SiteLang = Flags['SiteTemplate'], Flags['SiteLang']
|
SiteTemplate, SiteLang = Flags['SiteTemplate'], Flags['SiteLang']
|
||||||
@ -200,17 +207,16 @@ def HandlePage(Flags, Page, Pages, Categories, LimitFiles, Snippets, ConfMenu, L
|
|||||||
else:
|
else:
|
||||||
TemplateMeta = TemplatePreprocessor(TemplatesText[Meta['Template']])
|
TemplateMeta = TemplatePreprocessor(TemplatesText[Meta['Template']])
|
||||||
HTMLPagesList = GetHTMLPagesList(
|
HTMLPagesList = GetHTMLPagesList(
|
||||||
|
Flags,
|
||||||
Pages=Pages,
|
Pages=Pages,
|
||||||
BlogName=BlogName,
|
|
||||||
SiteRoot=SiteRoot,
|
|
||||||
PathPrefix=GetPathLevels(File),
|
PathPrefix=GetPathLevels(File),
|
||||||
Unite=ConfMenu,
|
Unite=ConfMenu,
|
||||||
Type='Page',
|
Type='Page',
|
||||||
For='Menu',
|
For='Menu',
|
||||||
MarkdownExts=MarkdownExts,
|
|
||||||
MenuStyle=TemplateMeta['MenuStyle'])
|
MenuStyle=TemplateMeta['MenuStyle'])
|
||||||
|
|
||||||
HTML, ContentHTML, Description, Image = PatchHTML(
|
HTML, ContentHTML, Description, Image = PatchHTML(
|
||||||
|
Flags,
|
||||||
File=File,
|
File=File,
|
||||||
HTML=TemplatesText[Meta['Template']],
|
HTML=TemplatesText[Meta['Template']],
|
||||||
StaticPartsText=StaticPartsText,
|
StaticPartsText=StaticPartsText,
|
||||||
@ -221,26 +227,19 @@ def HandlePage(Flags, Page, Pages, Categories, LimitFiles, Snippets, ConfMenu, L
|
|||||||
Content=Content,
|
Content=Content,
|
||||||
Titles=Titles,
|
Titles=Titles,
|
||||||
Meta=Meta,
|
Meta=Meta,
|
||||||
SiteDomain=SiteDomain,
|
|
||||||
SiteRoot=SiteRoot,
|
|
||||||
SiteName=SiteName,
|
|
||||||
BlogName=BlogName,
|
|
||||||
FolderRoots=FolderRoots,
|
FolderRoots=FolderRoots,
|
||||||
Categories=Categories,
|
Categories=Categories,
|
||||||
SiteLang=SiteLang,
|
|
||||||
Locale=Locale,
|
Locale=Locale,
|
||||||
LightRun=LightRun)
|
LightRun=LightRun)
|
||||||
|
|
||||||
HTML = ReplWithEsc(HTML, f"<staticoso:Feed>", GetHTMLPagesList(
|
HTML = ReplWithEsc(HTML, f"<staticoso:Feed>", GetHTMLPagesList(
|
||||||
|
Flags,
|
||||||
Limit=Flags['FeedEntries'],
|
Limit=Flags['FeedEntries'],
|
||||||
Type='Post',
|
Type='Post',
|
||||||
Category=None if Flags['FeedCategoryFilter'] == '*' else Flags['FeedCategoryFilter'],
|
Category=None if Flags['FeedCategoryFilter'] == '*' else Flags['FeedCategoryFilter'],
|
||||||
Pages=Pages,
|
Pages=Pages,
|
||||||
BlogName=BlogName,
|
|
||||||
SiteRoot=SiteRoot,
|
|
||||||
PathPrefix=GetPathLevels(File),
|
PathPrefix=GetPathLevels(File),
|
||||||
For='Categories',
|
For='Categories',
|
||||||
MarkdownExts=MarkdownExts,
|
|
||||||
MenuStyle='Flat',
|
MenuStyle='Flat',
|
||||||
ShowPaths=False))
|
ShowPaths=False))
|
||||||
if 'staticoso:DirectoryList:' in HTML: # Reduce risk of unnecessary cycles
|
if 'staticoso:DirectoryList:' in HTML: # Reduce risk of unnecessary cycles
|
||||||
@ -249,14 +248,12 @@ def HandlePage(Flags, Page, Pages, Categories, LimitFiles, Snippets, ConfMenu, L
|
|||||||
if Line.startswith('<staticoso:DirectoryList:') and Line.endswith('>'):
|
if Line.startswith('<staticoso:DirectoryList:') and Line.endswith('>'):
|
||||||
Path = Line[len('<staticoso:DirectoryList:'):-1]
|
Path = Line[len('<staticoso:DirectoryList:'):-1]
|
||||||
DirectoryList = GetHTMLPagesList(
|
DirectoryList = GetHTMLPagesList(
|
||||||
|
Flags,
|
||||||
CallbackFile=File,
|
CallbackFile=File,
|
||||||
Pages=Pages,
|
Pages=Pages,
|
||||||
BlogName=BlogName,
|
|
||||||
SiteRoot=SiteRoot,
|
|
||||||
PathPrefix=GetPathLevels(File),
|
PathPrefix=GetPathLevels(File),
|
||||||
PathFilter=Path,
|
PathFilter=Path,
|
||||||
For='Categories',
|
For='Categories',
|
||||||
MarkdownExts=MarkdownExts,
|
|
||||||
MenuStyle='Flat')
|
MenuStyle='Flat')
|
||||||
HTML = ReplWithEsc(HTML, f"<staticoso:DirectoryList:{Path}>", DirectoryList)
|
HTML = ReplWithEsc(HTML, f"<staticoso:DirectoryList:{Path}>", DirectoryList)
|
||||||
|
|
||||||
@ -287,6 +284,7 @@ def HandlePage(Flags, Page, Pages, Categories, LimitFiles, Snippets, ConfMenu, L
|
|||||||
|
|
||||||
if not LightRun and 'htmljournal' in ContentHTML.lower(): # Avoid extra cycles
|
if not LightRun and 'htmljournal' in ContentHTML.lower(): # Avoid extra cycles
|
||||||
HTML, _, _, _ = PatchHTML(
|
HTML, _, _, _ = PatchHTML(
|
||||||
|
Flags,
|
||||||
File=File,
|
File=File,
|
||||||
HTML=TemplatesText[Meta['Template']],
|
HTML=TemplatesText[Meta['Template']],
|
||||||
StaticPartsText=StaticPartsText,
|
StaticPartsText=StaticPartsText,
|
||||||
@ -297,32 +295,92 @@ def HandlePage(Flags, Page, Pages, Categories, LimitFiles, Snippets, ConfMenu, L
|
|||||||
Content=MakeHTMLJournal(Flags, Locale, f'{StripExt(File)}.html', ContentHTML),
|
Content=MakeHTMLJournal(Flags, Locale, f'{StripExt(File)}.html', ContentHTML),
|
||||||
Titles='',
|
Titles='',
|
||||||
Meta=Meta,
|
Meta=Meta,
|
||||||
SiteDomain=SiteDomain,
|
|
||||||
SiteRoot=SiteRoot,
|
|
||||||
SiteName=SiteName,
|
|
||||||
BlogName=BlogName,
|
|
||||||
FolderRoots=FolderRoots,
|
FolderRoots=FolderRoots,
|
||||||
Categories=Categories,
|
Categories=Categories,
|
||||||
SiteLang=SiteLang,
|
|
||||||
Locale=Locale,
|
Locale=Locale,
|
||||||
LightRun=LightRun)
|
LightRun=LightRun)
|
||||||
if Flags["JournalRedirect"]:
|
if Flags["JournalRedirect"]:
|
||||||
HTML = HTML.replace('</head>', f"""<meta http-equiv="refresh" content="0; url='./{PagePath.split('''/''')[-1]}'"></head>""")
|
HTML = HTML.replace('</head>', f"""<meta http-equiv="refresh" content="0; url='./{PagePath.split('''/''')[-1]}'"></head>""")
|
||||||
WriteFile(StripExt(PagePath)+'.Journal.html', HTML)
|
WriteFile(StripExt(PagePath)+'.Journal.html', HTML)
|
||||||
|
|
||||||
return [File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image]
|
#return [File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image]
|
||||||
|
return {"File":File, "Content":Content, "Titles":Titles, "Meta":Meta, "ContentHtml":ContentHTML, "SlimHtml":SlimHTML, "Description":Description, "Image":Image}
|
||||||
|
|
||||||
def MultiprocPagePreprocessor(d):
|
def MultiprocPagePreprocessor(d:dict):
|
||||||
PrintProcPercentDots(d['Process'], 2)
|
PrintProcPercentDots(d['Process'])#, 2)
|
||||||
return PagePreprocessor(d['Path'], d['TempPath'], d['Type'], d['Template'], d['SiteRoot'], d['GlobalMacros'], d['CategoryUncategorized'], d['LightRun'])
|
return PagePreprocessor(d['Flags'], d['Page'], d['Template'], d['GlobalMacros'], d['LightRun'])
|
||||||
|
|
||||||
def MultiprocHandlePage(d):
|
def MultiprocHandlePage(d:dict):
|
||||||
PrintProcPercentDots(d['Process'])
|
PrintProcPercentDots(d['Process'])
|
||||||
return HandlePage(d['Flags'], d['Page'], d['Pages'], d['Categories'], d['LimitFiles'], d['Snippets'], d['ConfMenu'], d['Locale'])
|
return HandlePage(d['Flags'], d['Page'], d['Pages'], d['Categories'], d['LimitFiles'], d['Snippets'], d['ConfMenu'], d['Locale'])
|
||||||
|
|
||||||
def MakeSite(Flags, LimitFiles, Snippets, ConfMenu, GlobalMacros, Locale, Threads):
|
def FindPagesPaths():
|
||||||
PagesPaths, PostsPaths, Pages, MadePages, Categories = [], [], [], [], {}
|
Paths = {"Pages":[], "Posts":[]}
|
||||||
|
for Ext in FileExtensions['Pages']:
|
||||||
|
for Type in ('Pages', 'Posts'):
|
||||||
|
for File in Path(Type).rglob(f'*.{Ext}'):
|
||||||
|
Paths[Type] += [FileToStr(File, f'{Type}/')]
|
||||||
|
return Paths
|
||||||
|
|
||||||
|
def ReorderPagesPaths(Paths:dict, Sorting:dict):
|
||||||
|
for Type in ('Pages', 'Posts'):
|
||||||
|
Paths[Type] = FileNameDateSort(Paths[Type])
|
||||||
|
if Sorting[Type] in ('Inverse', 'Reverse'):
|
||||||
|
Paths[Type].reverse()
|
||||||
|
return Paths
|
||||||
|
|
||||||
|
def PopulateCategoryLists(Flags:dict, Pages:list, Categories):
|
||||||
|
for Cat in Categories:
|
||||||
|
for Type in ('Page', 'Post'):
|
||||||
|
Categories[Cat] += GetHTMLPagesList(
|
||||||
|
Flags,
|
||||||
|
Pages=Pages,
|
||||||
|
PathPrefix=GetPathLevels('Categories/'),
|
||||||
|
Type=Type,
|
||||||
|
Category=Cat,
|
||||||
|
For='Categories',
|
||||||
|
MenuStyle='Flat')
|
||||||
|
return Categories
|
||||||
|
|
||||||
|
def MakeAutoCategories(Flags:dict, Categories):
|
||||||
|
Pages = []
|
||||||
|
if Flags['CategoriesAutomatic']:
|
||||||
|
OutDir = Flags['OutDir']
|
||||||
|
Dir = f'{OutDir}/Categories'
|
||||||
|
for Cat in Categories:
|
||||||
|
Exists = False
|
||||||
|
for File in Path(Dir).rglob(str(Cat)+'.*'):
|
||||||
|
Exists = True
|
||||||
|
break
|
||||||
|
if not Exists:
|
||||||
|
File = f'Categories/{Cat}.md'
|
||||||
|
FilePath = f'{OutDir}/{File}'
|
||||||
|
WriteFile(FilePath, CategoryPageTemplate.format(Name=Cat))
|
||||||
|
_, Content, Titles, Meta = PagePreprocessor(Flags, [FilePath, FilePath, Type, None], SiteTemplate, GlobalMacros, LightRun=LightRun)
|
||||||
|
Pages += [File, Content, Titles, Meta]
|
||||||
|
return Pages
|
||||||
|
|
||||||
|
def PreprocessSourcePages(Flags:dict, PagesPaths:dict, LimitFiles, SiteTemplate, GlobalMacros, PoolSize:int):
|
||||||
|
MultiprocPages = []
|
||||||
|
for Type in ('Page', 'Post'):
|
||||||
|
Files, PathPrefix = {"Page": [PagesPaths['Pages'], ''], "Post": [PagesPaths['Posts'], 'Posts/']}[Type]
|
||||||
|
for i, File in enumerate(Files):
|
||||||
|
TempPath = f"{PathPrefix}{File}"
|
||||||
|
LightRun = False if LimitFiles == False or TempPath in LimitFiles else True
|
||||||
|
MultiprocPages += [{'Flags': Flags, 'Page': [f"{Type}s/{File}", TempPath, Type, None], 'Template': SiteTemplate, 'GlobalMacros': GlobalMacros, 'LightRun': LightRun}]
|
||||||
|
return DoMultiProc(MultiprocPagePreprocessor, MultiprocPages, PoolSize, True)
|
||||||
|
|
||||||
|
def WriteProcessedPages(Flags:dict, Pages:list, Categories, ConfMenu, Snippets, LimitFiles, PoolSize:int, Locale:dict):
|
||||||
|
MultiprocPages = []
|
||||||
|
for i, Page in enumerate(Pages):
|
||||||
|
MultiprocPages += [{'Flags': Flags, 'Page': Page, 'Pages': Pages, 'Categories': Categories, 'LimitFiles': LimitFiles, 'Snippets': Snippets, 'ConfMenu': ConfMenu, 'Locale': Locale}]
|
||||||
|
return DoMultiProc(MultiprocHandlePage, MultiprocPages, PoolSize, True)
|
||||||
|
|
||||||
|
def MakeSite(Flags:dict, LimitFiles, Snippets, ConfMenu, GlobalMacros, Locale:dict, Threads):
|
||||||
|
Pages, MadePages, Categories = [], [], {}
|
||||||
PoolSize = cpu_count() if Threads <= 0 else Threads
|
PoolSize = cpu_count() if Threads <= 0 else Threads
|
||||||
|
|
||||||
|
f = NameSpace(Flags)
|
||||||
OutDir, MarkdownExts, Sorting = Flags['OutDir'], Flags['MarkdownExts'], Flags['Sorting']
|
OutDir, MarkdownExts, Sorting = Flags['OutDir'], Flags['MarkdownExts'], Flags['Sorting']
|
||||||
SiteName, BlogName, SiteTagline = Flags['SiteName'], Flags['BlogName'], Flags['SiteTagline']
|
SiteName, BlogName, SiteTagline = Flags['SiteName'], Flags['BlogName'], Flags['SiteTagline']
|
||||||
SiteTemplate, SiteLang = Flags['SiteTemplate'], Flags['SiteLang']
|
SiteTemplate, SiteLang = Flags['SiteTemplate'], Flags['SiteLang']
|
||||||
@ -331,74 +389,29 @@ def MakeSite(Flags, LimitFiles, Snippets, ConfMenu, GlobalMacros, Locale, Thread
|
|||||||
ImgAltToTitle, ImgTitleToAlt = Flags['ImgAltToTitle'], Flags['ImgTitleToAlt']
|
ImgAltToTitle, ImgTitleToAlt = Flags['ImgAltToTitle'], Flags['ImgTitleToAlt']
|
||||||
DynamicParts, DynamicPartsText, StaticPartsText, TemplatesText = Flags['DynamicParts'], Snippets['DynamicParts'], Snippets['StaticParts'], Snippets['Templates']
|
DynamicParts, DynamicPartsText, StaticPartsText, TemplatesText = Flags['DynamicParts'], Snippets['DynamicParts'], Snippets['StaticParts'], Snippets['Templates']
|
||||||
|
|
||||||
for Ext in FileExtensions['Pages']:
|
logging.info("Finding Pages")
|
||||||
for File in Path('Pages').rglob(f"*.{Ext}"):
|
PagesPaths = FindPagesPaths()
|
||||||
PagesPaths += [FileToStr(File, 'Pages/')]
|
logging.info(f"Pages Found: {len(PagesPaths['Pages']+PagesPaths['Posts'])}")
|
||||||
for File in Path('Posts').rglob(f"*.{Ext}"):
|
|
||||||
PostsPaths += [FileToStr(File, 'Posts/')]
|
|
||||||
logging.info(f"Pages Found: {len(PagesPaths+PostsPaths)}")
|
|
||||||
|
|
||||||
PagesPaths = FileNameDateSort(PagesPaths)
|
logging.info("Reordering Pages")
|
||||||
if Sorting['Pages'] == 'Inverse':
|
PagesPaths = ReorderPagesPaths(PagesPaths, f.Sorting)
|
||||||
PagesPaths.reverse()
|
|
||||||
PostsPaths = FileNameDateSort(PostsPaths)
|
|
||||||
if Sorting['Posts'] == 'Inverse':
|
|
||||||
PostsPaths.reverse()
|
|
||||||
|
|
||||||
logging.info("Preprocessing Source Pages")
|
logging.info("Preprocessing Source Pages")
|
||||||
MultiprocPages = []
|
Pages = PreprocessSourcePages(Flags, PagesPaths, LimitFiles, SiteTemplate, GlobalMacros, PoolSize)
|
||||||
for Type in ['Page', 'Post']:
|
|
||||||
if Type == 'Page':
|
|
||||||
Files = PagesPaths
|
|
||||||
PathPrefix = ''
|
|
||||||
elif Type == 'Post':
|
|
||||||
Files = PostsPaths
|
|
||||||
PathPrefix = 'Posts/'
|
|
||||||
for i,File in enumerate(Files):
|
|
||||||
TempPath = f"{PathPrefix}{File}"
|
|
||||||
LightRun = False if LimitFiles == False or TempPath in LimitFiles else True
|
|
||||||
MultiprocPages += [{'Process':{'Num':i, 'Count':len(Files)}, 'Path':f"{Type}s/{File}", 'TempPath':TempPath, 'Type':Type, 'Template':SiteTemplate, 'SiteRoot':SiteRoot, 'GlobalMacros':GlobalMacros, 'CategoryUncategorized':CategoryUncategorized, 'LightRun':LightRun}]
|
|
||||||
os.system('printf "["')
|
|
||||||
with Pool(PoolSize) as MultiprocPool:
|
|
||||||
Pages = MultiprocPool.map(MultiprocPagePreprocessor, MultiprocPages)
|
|
||||||
os.system('printf "]\n"') # Make newline after percentage dots
|
|
||||||
|
|
||||||
for File, Content, Titles, Meta in Pages:
|
for File, Content, Titles, Meta in Pages:
|
||||||
for Cat in Meta['Categories']:
|
for Cat in Meta['Categories']:
|
||||||
Categories.update({Cat:''})
|
Categories.update({Cat:''})
|
||||||
|
|
||||||
PugCompileList(OutDir, Pages, LimitFiles)
|
PugCompileList(OutDir, Pages, LimitFiles)
|
||||||
|
|
||||||
if Categories:
|
if Categories or f.CategoriesAutomatic:
|
||||||
logging.info("Generating Category Lists")
|
logging.info("Generating Category Lists")
|
||||||
for Cat in Categories:
|
Categories = PopulateCategoryLists(Flags, Pages, Categories)
|
||||||
for Type in ('Page', 'Post'):
|
Pages += MakeAutoCategories(Flags, Categories)
|
||||||
Categories[Cat] += GetHTMLPagesList(
|
|
||||||
Pages=Pages,
|
|
||||||
BlogName=BlogName,
|
|
||||||
SiteRoot=SiteRoot,
|
|
||||||
PathPrefix=GetPathLevels('Categories/'),
|
|
||||||
Type=Type,
|
|
||||||
Category=Cat,
|
|
||||||
For='Categories',
|
|
||||||
MarkdownExts=MarkdownExts,
|
|
||||||
MenuStyle='Flat')
|
|
||||||
|
|
||||||
if AutoCategories:
|
|
||||||
Dir = f"{OutDir}/Categories"
|
|
||||||
for Cat in Categories:
|
|
||||||
Exists = False
|
|
||||||
for File in Path(Dir).rglob(str(Cat)+'.*'):
|
|
||||||
Exists = True
|
|
||||||
break
|
|
||||||
if not Exists:
|
|
||||||
File = f"Categories/{Cat}.md"
|
|
||||||
FilePath = f"{OutDir}/{File}"
|
|
||||||
WriteFile(FilePath, CategoryPageTemplate.format(Name=Cat))
|
|
||||||
_, Content, Titles, Meta = PagePreprocessor(FilePath, FilePath, Type, SiteTemplate, SiteRoot, GlobalMacros, CategoryUncategorized, LightRun=LightRun)
|
|
||||||
Pages += [[File, Content, Titles, Meta]]
|
|
||||||
|
|
||||||
#logging.info("Building the HTML Search Page")
|
#logging.info("Building the HTML Search Page")
|
||||||
#Pages += [PagePreprocessor(Path='Search.html', TempPath='Search.html', Type='Page', SiteTemplate=SiteTemplate, SiteRoot=SiteRoot, GlobalMacros=GlobalMacros, CategoryUncategorized=CategoryUncategorized, LightRun=LightRun, Content=BuildPagesSearch(Flags, Pages))]
|
#Pages += [PagePreprocessor(Flags, Path='Search.html', TempPath='Search.html', Type='Page', SiteTemplate=SiteTemplate, GlobalMacros=GlobalMacros, LightRun=LightRun, Content=BuildPagesSearch(Flags, Pages))]
|
||||||
|
|
||||||
for i,e in enumerate(ConfMenu):
|
for i,e in enumerate(ConfMenu):
|
||||||
for File, Content, Titles, Meta in Pages:
|
for File, Content, Titles, Meta in Pages:
|
||||||
@ -407,13 +420,7 @@ def MakeSite(Flags, LimitFiles, Snippets, ConfMenu, GlobalMacros, Locale, Thread
|
|||||||
ConfMenu[i] = None
|
ConfMenu[i] = None
|
||||||
|
|
||||||
logging.info("Writing Pages")
|
logging.info("Writing Pages")
|
||||||
MultiprocPages = []
|
MadePages = WriteProcessedPages(Flags, Pages, Categories, ConfMenu, Snippets, LimitFiles, PoolSize, Locale)
|
||||||
for i,Page in enumerate(Pages):
|
|
||||||
MultiprocPages += [{'Process':{'Num':i, 'Count':len(Pages)}, 'Flags':Flags, 'Page':Page, 'Pages':Pages, 'Categories':Categories, 'LimitFiles':LimitFiles, 'Snippets':Snippets, 'ConfMenu':ConfMenu, 'Locale':Locale}]
|
|
||||||
os.system('printf "["')
|
|
||||||
with Pool(PoolSize) as MultiprocPool:
|
|
||||||
MadePages = MultiprocPool.map(MultiprocHandlePage, MultiprocPages)
|
|
||||||
os.system('printf "]\n"') # Make newline after percentage dots
|
|
||||||
|
|
||||||
# Do page transclusions here (?)
|
# Do page transclusions here (?)
|
||||||
#while True:
|
#while True:
|
||||||
|
@ -11,7 +11,7 @@ from urllib.parse import quote as URLEncode
|
|||||||
from Modules.HTML import *
|
from Modules.HTML import *
|
||||||
from Modules.Utils import *
|
from Modules.Utils import *
|
||||||
|
|
||||||
def MakeSitemap(Flags, Pages):
|
def MakeSitemap(Flags:dict, Pages:list):
|
||||||
Map = ''
|
Map = ''
|
||||||
Domain = Flags['SiteDomain'] + '/' if Flags['SiteDomain'] else ''
|
Domain = Flags['SiteDomain'] + '/' if Flags['SiteDomain'] else ''
|
||||||
for File, Content, Titles, Meta, ContentHtml, SlimHtml, Description, Image in Pages:
|
for File, Content, Titles, Meta, ContentHtml, SlimHtml, Description, Image in Pages:
|
||||||
|
48
App/Source/Modules/Social.py
Normal file
48
App/Source/Modules/Social.py
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
""" ================================== |
|
||||||
|
| This file is part of |
|
||||||
|
| staticoso |
|
||||||
|
| Just a simple Static Site Generator |
|
||||||
|
| |
|
||||||
|
| Licensed under the AGPLv3 license |
|
||||||
|
| Copyright (C) 2022-2023, OctoSpacc |
|
||||||
|
| ================================== """
|
||||||
|
|
||||||
|
from Modules.Logging import *
|
||||||
|
from Modules.Utils import *
|
||||||
|
|
||||||
|
try:
|
||||||
|
from Modules.ActivityPub import *
|
||||||
|
ActivityPub = True
|
||||||
|
except:
|
||||||
|
logging.warning("⚠ Can't load the ActivityPub module. Its use is disabled. Make sure the 'requests' library is installed.")
|
||||||
|
ActivityPub = False
|
||||||
|
|
||||||
|
def ApplySocialIntegrations(Flags, Pages, LimitFiles, Locale):
|
||||||
|
f = NameSpace(Flags)
|
||||||
|
FinalPaths = []
|
||||||
|
|
||||||
|
if ActivityPub and f.MastodonURL and f.MastodonToken and f.SiteDomain:
|
||||||
|
logging.info("Mastodon Operations")
|
||||||
|
MastodonPosts = MastodonShare(Flags, Pages, Locale)
|
||||||
|
else:
|
||||||
|
MastodonPosts = []
|
||||||
|
|
||||||
|
for File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image in Pages:
|
||||||
|
if IsLightRun(File, LimitFiles):
|
||||||
|
continue
|
||||||
|
File = f'{f.OutDir}/{StripExt(File)}.html'
|
||||||
|
Content = ReadFile(File)
|
||||||
|
Post = ''
|
||||||
|
for p in MastodonPosts:
|
||||||
|
if p['Link'] == SiteDomain + '/' + File[len(f'{f.OutDir}/'):]:
|
||||||
|
Post = HTMLCommentsBlock.format(
|
||||||
|
StrComments=Locale['Comments'],
|
||||||
|
StrOpen=Locale['OpenInNewTab'],
|
||||||
|
URL=p['Post'])
|
||||||
|
break
|
||||||
|
#Content = ReplWithEsc(Content, '[staticoso:Comments]', Post)
|
||||||
|
Content = ReplWithEsc(Content, '<staticoso:Comments>', Post)
|
||||||
|
WriteFile(File, Content)
|
||||||
|
FinalPaths += [File]
|
||||||
|
|
||||||
|
return FinalPaths
|
@ -10,7 +10,9 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from multiprocessing import Pool, cpu_count
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from types import SimpleNamespace
|
||||||
from Modules.Globals import *
|
from Modules.Globals import *
|
||||||
|
|
||||||
def SureList(e):
|
def SureList(e):
|
||||||
@ -36,14 +38,14 @@ def WriteFile(p, c, m='w'):
|
|||||||
logging.error(f"[E] Error writing file {p}")
|
logging.error(f"[E] Error writing file {p}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def FileToStr(File, Truncate=''):
|
def FileToStr(File:str, Truncate:str=''):
|
||||||
return str(File)[len(Truncate):]
|
return str(File)[len(Truncate):]
|
||||||
|
|
||||||
# With shutil.copytree copy only folder struct, no files; https://stackoverflow.com/a/15664273
|
# With shutil.copytree copy only folder struct, no files; https://stackoverflow.com/a/15664273
|
||||||
def IgnoreFiles(Dir, Files):
|
def IgnoreFiles(Dir:str, Files):
|
||||||
return [f for f in Files if os.path.isfile(os.path.join(Dir, f))]
|
return [f for f in Files if os.path.isfile(os.path.join(Dir, f))]
|
||||||
|
|
||||||
def LoadFromDir(Dir, Matchs):
|
def LoadFromDir(Dir:str, Matchs):
|
||||||
Contents = {}
|
Contents = {}
|
||||||
Matchs = SureList(Matchs)
|
Matchs = SureList(Matchs)
|
||||||
for Match in Matchs:
|
for Match in Matchs:
|
||||||
@ -52,10 +54,10 @@ def LoadFromDir(Dir, Matchs):
|
|||||||
Contents.update({File: ReadFile(f"{Dir}/{File}")})
|
Contents.update({File: ReadFile(f"{Dir}/{File}")})
|
||||||
return Contents
|
return Contents
|
||||||
|
|
||||||
def mkdirps(Dir):
|
def mkdirps(Dir:str):
|
||||||
return Path(Dir).mkdir(parents=True, exist_ok=True)
|
return Path(Dir).mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
def StripExt(Path):
|
def StripExt(Path:str):
|
||||||
return ".".join(Path.split('.')[:-1])
|
return ".".join(Path.split('.')[:-1])
|
||||||
|
|
||||||
def UndupeStr(Str, Known, Split):
|
def UndupeStr(Str, Known, Split):
|
||||||
@ -174,3 +176,28 @@ def PrintProcPercentDots(Proc, DivMult=1):
|
|||||||
os.system('printf "="') # Using sys shell since for some reason print() without newline breaks here (doesn't print everytime)
|
os.system('printf "="') # Using sys shell since for some reason print() without newline breaks here (doesn't print everytime)
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def NameSpace(From):
|
||||||
|
return SimpleNamespace(**From)
|
||||||
|
|
||||||
|
def DoMultiProc(Funct, ArgsCollection:list, Threads:int=cpu_count(), Progress:bool=False):
|
||||||
|
# The function should simply be like this
|
||||||
|
# def Funct(Args:dict):
|
||||||
|
# # Print the percentage dots, if needed
|
||||||
|
# PrintProcPercentDots(Args['Process'])
|
||||||
|
# # Call the real function that handles a single process at a time
|
||||||
|
# return Work(Args['a'], ...)
|
||||||
|
#if not Threads:
|
||||||
|
# Threads = cpu_count()
|
||||||
|
FinalArgsCollection = []
|
||||||
|
for Index, Args in enumerate(ArgsCollection):
|
||||||
|
FinalArgsCollection.append(Args)
|
||||||
|
FinalArgsCollection[Index].update({"Process": {"Num": Index, "Count": len(ArgsCollection)}})
|
||||||
|
Results = []
|
||||||
|
if Progress:
|
||||||
|
os.system('printf "["') # Using system print because (see PrintProcPercentDots())
|
||||||
|
with Pool(Threads) as MultiprocPool:
|
||||||
|
Results = MultiprocPool.map(Funct, FinalArgsCollection)
|
||||||
|
if Progress:
|
||||||
|
os.system('printf "]\n"') # Newline after percentage dots
|
||||||
|
return Results
|
||||||
|
Reference in New Issue
Block a user