First addition of experimental HTML site search; Update to themes build scripts

This commit is contained in:
octospacc 2023-02-22 22:46:04 +01:00
parent 3a483dc0ef
commit f433fff8a2
15 changed files with 308 additions and 87 deletions

View File

@ -7,6 +7,9 @@ before_script: |
pages:
stage: deploy
script: |
cd ./Themes
make All
cd ..
sh ./Sites/BuildAll.sh
artifacts:
paths:

201
App/Assets/PagesSearch.html Normal file
View File

@ -0,0 +1,201 @@
<!-- -->
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8"/>
<style>
:root {
--staticoso-HtmlSearch-ColorMatchWord: yellow;
/*--staticoso-HtmlSearch-ColorMatchBlock: orange;*/
}
</style>
</head>
<body>
<!--
-->
<style class="staticoso-HtmlSearch-Style"></style>
<input class="staticoso-HtmlSearch-Input"/>
<div class="staticoso-HtmlSearch-Pages">
{{PagesInject}}
</div>
<script>
var SearchInput = document.querySelector('.staticoso-HtmlSearch-Input');
var SearchStyle = document.querySelector('.staticoso-HtmlSearch-Style');
var SelectPage = '.staticoso-HtmlSearch-Page';
var SelectHref = '.staticoso-HtmlSearch-Href';
var SelectBase = `${SelectPage}s > ${SelectPage}`;
// <https://developer.mozilla.org/en-US/docs/Web/HTML/Block-level_elements#elements> + some personal
var BlockElems = ['address', 'article', 'aside', 'blockquote', 'details', 'dialog', 'dd', 'div', 'dl', 'dt', 'fieldset', 'figcaption', 'figure', 'footer', 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'header', 'hgroup', 'hr', 'li', 'main', 'nav', 'ol', 'p', 'pre', 'section', 'table', 'ul', /**/ 'iframe', 'video', 'label'/*, 'span', 'i'*/];
// <https://stackoverflow.com/a/10730777>
function TextNodesUnder(El) {
var n, a = [], walk = document.createTreeWalker(El, NodeFilter.SHOW_TEXT, null, false);
while (n = walk.nextNode()) a.push(n);
return a;
};
function StripExcessSpace(Txt) {
return Txt
.trim()
.replaceAll('\n', ' ')
.replaceAll('\t', ' ')
.replace(/\s+/g, ' '); // Replace all multiple spaces with 1
};
// Make a CSS string emulating the :where selector, for old browsers
function CssWhere(Base, Where) {
var Style = '';
Where.forEach(function(Tgt) {
Style += `${Base} ${Tgt},`;
});
return Style.slice(0, -1);
};
// Get all needed elements under our class, infinitely nested, and set their textContent as data attribute
function PatchHtml() {
// Block elements, just add the attribute
document.querySelectorAll(CssWhere(SelectBase, BlockElems)).forEach(function(El) {
El.dataset.staticosoHtmlsearchBlock = StripExcessSpace(El.textContent.toLowerCase());
});
// Text nodes, we have to wrap each into a new real element and delete the original node
TextNodesUnder(document.querySelector(`${SelectPage}s`)).forEach(function(El) {
var ElNew = document.createElement('span');
StripExcessSpace(El.textContent).split(' ').forEach(function(Word) {
var ElWord;
[Word, ' '].forEach(function(Str) {
ElWord = document.createElement('span');
ElWord.innerHTML = Str;
ElWord.dataset.staticosoHtmlsearchWord = Str.toLowerCase();
ElNew.appendChild(ElWord);
});
});
El.replaceWith(ElNew);
});
// Delete any illegal elements that got out of their supposed div due to bad HTML
document.querySelectorAll(`${SelectPage}s > *:not(${SelectPage})`).forEach(function(El) {
El.remove();
});
};
// Check if any child of a node is actually visible
function HasVisibleChild(El) {
var Childs = El.children;
for (var i = 0; i < Childs.length; i++) {
// If at least one child is CSS-displayed and has non-void content
if (getComputedStyle(Childs[i]).display != 'none' && Childs[i].textContent.trim()) {
return true;
};
};
return false;
};
function CreateSearchAnchors() {
/*
// Create anchors redirecting to the pages that are displayed
document.querySelectorAll(SelectBase).forEach(function(Page) {
var Href = Page.dataset.staticosoHtmlsearchHref;
if (HasVisibleChild(Page)) {
if (!Page.parentNode.querySelector(`${SelectHref}[href="${Href}"]`)) {
var ElHref = document.createElement('a');
ElHref.className = SelectHref.slice(1);
ElHref.innerHTML = Page.dataset.staticosoHtmlsearchName || Href;
ElHref.href = Href;
Page.parentNode.insertBefore(ElHref, Page);
};
} else {
Page.parentNode.querySelectorAll(`${SelectHref}[href="${Href}"]`).forEach(function(ElHref) {
ElHref.remove();
});
};
});
*/
// Create anchors redirecting to the pages that are displayed
// First delete old links
document.querySelectorAll(`${SelectPage}s > ${SelectHref}`).forEach(function(Link) {
Link.remove();
});
// Then for all visible blocks check their parents to see if the links exist, if not [re]create them
// Go page by page to skip cycles when we can
var Pages = document.querySelectorAll(SelectBase);
for (var i = 0; i < Pages.length; i++) {
//document.querySelectorAll(SelectBase).forEach(function(Page) {
var Page = Pages[i];
var Blocks = Page.querySelector/*All*/(`*[data-staticoso-htmlsearch-block*="${Query}"]`);
//for (var i = 0; i < Blocks.length; i++) {
if (Blocks) {
var Href = Page.dataset.staticosoHtmlsearchHref;
if (!Page.parentNode.querySelector(`${SelectHref}[href="${Href}"]`)) {
var Link = document.createElement('a');
Link.className = SelectHref.slice(1);
Link.innerHTML = Page.dataset.staticosoHtmlsearchName || Href;
Link.href = Href;
Page.parentNode.insertBefore(Link, Page);
};
//break;
};
//});
};
/*
document.querySelectorAll(`${SelectBase} *[data-staticoso-htmlsearch-block*="${Query}"]`).forEach(function(Block) {
var Page = Block.closest('.staticoso-HtmlSearch-Page');
var Href = Page.dataset.staticosoHtmlsearchHref;
if (!Page.parentNode.querySelector(`${SelectHref}[href="${Href}"]`)) {
var Link = document.createElement('a');
Link.className = SelectHref.slice(1);
Link.innerHTML = Page.dataset.staticosoHtmlsearchName || Href;
Link.href = Href;
Page.parentNode.insertBefore(Link, Page);
};
});
*/
// NOTE: This lags so baaad but I've not found a better solution for now, and I want the search to be continuos, without clicking a button :(
};
// Every time the search query is modified, reset our CSS that does all the content filtering
function SetSearch() {
var Query = StripExcessSpace(SearchInput.value.toLowerCase());
var WordStrictStyle = '';
var WordLooseStyle = '';
// Reset the style CSS to hide everything by default
SearchStyle.innerHTML = `
/* ${SelectBase} { cursor: pointer; } */
${SelectBase} *[data-staticoso-htmlsearch-block] { display: none; }
`;
// For every word in the search query, add a CSS selector
// Strict selection (=)
Query.split(' ').forEach(function(Token) {
WordStrictStyle += `${SelectBase} *[data-staticoso-htmlsearch-word="${Token}"],`;
});
WordStrictStyle = `${WordStrictStyle.trim().slice(0, -1)}{ background: var(--staticoso-HtmlSearch-ColorMatchWord); }`;
// Loose selection (*=)
Query.split(' ').forEach(function(Token) {
WordLooseStyle += `${SelectBase} *[data-staticoso-htmlsearch-word*="${Token}"],`;
});
WordLooseStyle = `${WordLooseStyle.trim().slice(0, -1)}{ border: 2px dotted gray; }`;
// Set the style for the above tokens, then unhide needed blocks
//SearchStyle.innerHTML = `
// ${SearchStyle.innerHTML.trim().slice(0, -1)} { background: var(--staticoso-HtmlSearch-ColorMatchWord); }
SearchStyle.innerHTML += `
${WordStrictStyle}
${WordLooseStyle}
${SelectBase} *[data-staticoso-htmlsearch-block*="${Query}"] { display: revert; }
`;
CreateSearchAnchors();
};
['onchange', 'oninput', 'onpaste'].forEach(function(Ev) {
SearchInput[Ev] = SetSearch;
});
PatchHtml();
SetSearch();
</script>
<!-- -->
</body>
</html>
<!--
-->

View File

Before

Width:  |  Height:  |  Size: 198 B

After

Width:  |  Height:  |  Size: 198 B

View File

Before

Width:  |  Height:  |  Size: 320 B

After

Width:  |  Height:  |  Size: 320 B

View File

@ -33,7 +33,7 @@ from Libs import rcssmin
cssmin = rcssmin._make_cssmin(python_only=True)
def ResetOutDir(OutDir):
for e in (OutDir, f"{OutDir}.gmi"):
for e in (OutDir, f'{OutDir}.Content', f'{OutDir}.gmi'):
try:
shutil.rmtree(e)
except FileNotFoundError:
@ -119,7 +119,7 @@ def WriteRedirects(Flags, Pages, FinalPaths, Locale):
StrClick=Locale['ClickHere'],
StrRedirect=Locale['IfNotRedirected']))
def Main(Args, FeedEntries):
def BuildMain(Args, FeedEntries):
Flags, Snippets, FinalPaths = {}, {}, []
HavePages, HavePosts = False, False
SiteConf = LoadConfFile('Site.ini')
@ -208,13 +208,15 @@ def Main(Args, FeedEntries):
if os.path.isdir('Pages'):
HavePages = True
shutil.copytree('Pages', OutDir, dirs_exist_ok=True)
shutil.copytree('Pages', f'{OutDir}.Content', dirs_exist_ok=True)
if Flags['GemtextOutput']:
shutil.copytree('Pages', f"{OutDir}.gmi", ignore=IgnoreFiles, dirs_exist_ok=True)
shutil.copytree('Pages', f'{OutDir}.gmi', ignore=IgnoreFiles, dirs_exist_ok=True)
if os.path.isdir('Posts'):
HavePosts = True
shutil.copytree('Posts', f"{OutDir}/Posts", dirs_exist_ok=True)
shutil.copytree('Posts', f'{OutDir}/Posts', dirs_exist_ok=True)
shutil.copytree('Posts', f'{OutDir}.Content/Posts', dirs_exist_ok=True)
if Flags['GemtextOutput']:
shutil.copytree('Posts', f"{OutDir}.gmi/Posts", ignore=IgnoreFiles, dirs_exist_ok=True)
shutil.copytree('Posts', f'{OutDir}.gmi/Posts', ignore=IgnoreFiles, dirs_exist_ok=True)
if not (HavePages or HavePosts):
logging.error("⛔ No Pages or posts found. Nothing to do, exiting!")
@ -259,9 +261,12 @@ def Main(Args, FeedEntries):
WriteFile(File, Content)
FinalPaths += [File]
logging.debug("Creating Redirects")
logging.info("Creating Redirects")
WriteRedirects(Flags, Pages, FinalPaths, Locale)
logging.info("Building HTML Search Page")
WriteFile(f'{OutDir}/Search.html', BuildPagesSearch(Flags, Pages))
if Flags['GemtextOutput']:
logging.info("Generating Gemtext")
GemtextCompileList(Flags, Pages, LimitFiles)
@ -289,16 +294,6 @@ def Main(Args, FeedEntries):
else:
shutil.copytree('Assets', OutDir, dirs_exist_ok=True)
#def DoSiteBuild(Arg=None):
# #try:
# # SiteEditObserver.stop()
# # SiteEditObserver.join()
# #except:
# # pass
# Main(Args=Args, FeedEntries=FeedEntries)
# logging.info(f"✅ Done! ({round(time.time()-StartTime, 3)}s)")
# #SiteEditObserver.start()
if __name__ == '__main__':
StartTime = time.time()
@ -351,26 +346,5 @@ if __name__ == '__main__':
logging.warning("⚠ Can't load the XML libraries. XML Feeds Generation is Disabled. Make sure the 'lxml' library is installed.")
FeedEntries = 0
#from watchdog.observers import Observer
#from watchdog.events import LoggingEventHandler
#SiteEditEvent = LoggingEventHandler()
#SiteEditEvent.on_created = DoSiteBuild
#SiteEditEvent.on_deleted = DoSiteBuild
#SiteEditEvent.on_modified = DoSiteBuild
#SiteEditEvent.on_moved = DoSiteBuild
#SiteEditObserver = Observer()
#SiteEditObserver.schedule(SiteEditEvent, ".", recursive=True)
#SiteEditObserver.start()
Main(Args=Args, FeedEntries=FeedEntries)
BuildMain(Args=Args, FeedEntries=FeedEntries)
logging.info(f"✅ Done! ({round(time.time()-StartTime, 3)}s)")
#DoSiteBuild()
#try:
# while True:
# pass
#except KeyboardInterrupt:
# logging.info("Stopped.")
#finally:
# SiteEditObserver.stop()
# SiteEditObserver.join()

View File

@ -180,12 +180,12 @@ def MakeHTMLJournal(Flags, Locale, FilePath, HTML):
Redirect = f"""<meta http-equiv="refresh" content="0; url='./{FileName}'">""" if Flags["JournalRedirect"] else ''
# Instead of copying stuff from the full page, for now we use dedicated title, header, footer, and pagination
Title = t.attrs["journaltitle"] if 'journaltitle' in t.attrs else f'"{StripExt(FileName)}" Journal - {Flags["SiteName"]}' if Flags["SiteName"] else f'"{StripExt(FileName)}" Journal'
FeedLink = f"""<a title="Journal Atom Feed" href="https://journal.miso.town/atom?url={URL}" target="_blank" rel="noopener"><img width="88" height="31" alt="Journal Atom Feed" title="Journal Atom Feed" src="data:image/png;base64,{b64encode(ReadFile(staticosoBaseDir()+'Assets/Feed-88x31.png', 'rb')).decode()}"></a>""" if Flags["SiteDomain"] else ''
Header = t.attrs["journalheader"] if 'journalheader' in t.attrs else f"""\
Title = t.attrs['journaltitle'] if 'journaltitle' in t.attrs else f'"{StripExt(FileName)}" Journal - {Flags["SiteName"]}' if Flags["SiteName"] else f'"{StripExt(FileName)}" Journal'
FeedLink = f"""<a title="Journal Atom Feed" href="https://journal.miso.town/atom?url={URL}" target="_blank" rel="noopener"><img width="88" height="31" alt="Journal Atom Feed" title="Journal Atom Feed" src="data:image/png;base64,{b64encode(ReadFile(staticosoBaseDir()+'Assets/ThirdParty/Feed-88x31.png', 'rb')).decode()}"></a>""" if Flags['SiteDomain'] else ''
Header = t.attrs['journalheader'] if 'journalheader' in t.attrs else f"""\
<p>
<i>{Locale["StrippedDownNotice"].format(Link="./"+FileName)}</i>
<a title="Valid HTML Journal" href="https://journal.miso.town" target="_blank" rel="noopener"><img alt="Valid HTML Journal" title="Valid HTML Journal" width="88" height="31" src="data:image/png;base64,{b64encode(ReadFile(staticosoBaseDir()+'Assets/Valid-HTML-Journal-88x31.png', 'rb')).decode()}"></a>
<a title="Valid HTML Journal" href="https://journal.miso.town" target="_blank" rel="noopener"><img alt="Valid HTML Journal" title="Valid HTML Journal" width="88" height="31" src="data:image/png;base64,{b64encode(ReadFile(staticosoBaseDir()+'Assets/ThirdParty/Valid-HTML-Journal-88x31.png', 'rb')).decode()}"></a>
{FeedLink}
</p>
"""

View File

@ -23,7 +23,7 @@ from Modules.Utils import *
# Menu styles:
# - Simple: Default, Flat, Line
# - Others: Excerpt, Image, Preview (Excerpt + Image), Full
def GetHTMLPagesList(Pages, BlogName, SiteRoot, PathPrefix, CallbackFile=None, Unite=[], Type=None, Limit=None, PathFilter='', Category=None, For='Menu', MarkdownExts=(), MenuStyle='Default', ShowPaths=True):
def GetHTMLPagesList(Pages:list, BlogName:str, SiteRoot:str, PathPrefix:str, CallbackFile=None, Unite=[], Type=None, Limit=None, PathFilter='', Category=None, For='Menu', MarkdownExts=(), MenuStyle='Default', ShowPaths=True):
Flatten, SingleLine, DoneCount, PrevDepth = False, False, 0, 0
if MenuStyle == 'Flat':
Flatten = True
@ -91,14 +91,14 @@ def GetHTMLPagesList(Pages, BlogName, SiteRoot, PathPrefix, CallbackFile=None, U
elif MenuStyle in ('Line', 'Excerpt', 'Image', 'Preview', 'Full'):
return List
def CheckHTMLCommentLine(Line):
def CheckHTMLCommentLine(Line:str):
if Line.startswith('<!--'):
Line = Line[4:].lstrip()
if Line.endswith('-->'):
return Line
return None
def TemplatePreprocessor(Text):
def TemplatePreprocessor(Text:str):
Meta, MetaDefault = '', {
'MenuStyle': 'Default'}
for l in Text.splitlines():
@ -113,7 +113,7 @@ def TemplatePreprocessor(Text):
Meta.update({i:MetaDefault[i]})
return Meta
def FindPreprocLine(Line, Meta, Macros):
def FindPreprocLine(Line:str, Meta, Macros):
Changed = False
Line = Line.lstrip().rstrip()
lll = CheckHTMLCommentLine(Line)
@ -129,8 +129,8 @@ def FindPreprocLine(Line, Meta, Macros):
# IgnoreBlocksStart += [l]
return (Meta, Macros, Changed)
def PagePreprocessor(Path:str, TempPath:str, Type, SiteTemplate, SiteRoot, GlobalMacros, CategoryUncategorized, LightRun=False):
File = ReadFile(Path)
def PagePreprocessor(Path:str, TempPath:str, Type:str, SiteTemplate, SiteRoot, GlobalMacros, CategoryUncategorized, LightRun:bool=False, Content=None):
File = ReadFile(Path) if not Content else Content
Path = Path.lower()
Content, Titles, DashyTitles, HTMLTitlesFound, Macros, Meta, MetaDefault = '', [], [], False, '', '', {
'Template': SiteTemplate,
@ -457,7 +457,8 @@ def HandlePage(Flags, Page, Pages, Categories, LimitFiles, Snippets, ConfMenu, L
DynamicParts, DynamicPartsText, StaticPartsText, TemplatesText = Flags['DynamicParts'], Snippets['DynamicParts'], Snippets['StaticParts'], Snippets['Templates']
FileLower = File.lower()
PagePath = f"{OutDir}/{StripExt(File)}.html"
PagePath = f'{OutDir}/{StripExt(File)}.html'
ContentPagePath = f'{OutDir}.Content/{StripExt(File)}.html'
LightRun = False if LimitFiles == False or File in LimitFiles else True
if FileLower.endswith(FileExtensions['Markdown']):
@ -466,8 +467,8 @@ def HandlePage(Flags, Page, Pages, Categories, LimitFiles, Snippets, ConfMenu, L
Content = PagePostprocessor('pug', ReadFile(PagePath), Meta)
elif FileLower.endswith(('.txt')):
Content = '<pre>' + html.escape(Content) + '</pre>'
elif FileLower.endswith(FileExtensions['HTML']):
Content = ReadFile(PagePath)
#elif FileLower.endswith(FileExtensions['HTML']):
# Content = ReadFile(PagePath)
if LightRun:
HTMLPagesList = None
@ -557,6 +558,7 @@ def HandlePage(Flags, Page, Pages, Categories, LimitFiles, Snippets, ConfMenu, L
SlimHTML = HTMLPagesList + ContentHTML
if not LightRun:
WriteFile(PagePath, HTML)
WriteFile(ContentPagePath, ContentHTML)
if not LightRun and 'htmljournal' in ContentHTML.lower(): # Avoid extra cycles
HTML, _, _, _ = PatchHTML(
@ -667,9 +669,12 @@ def MakeSite(Flags, LimitFiles, Snippets, ConfMenu, GlobalMacros, Locale, Thread
File = f"Categories/{Cat}.md"
FilePath = f"{OutDir}/{File}"
WriteFile(FilePath, CategoryPageTemplate.format(Name=Cat))
_, Content, Titles, Meta = PagePreprocessor(FilePath, FilePath, Type, SiteTemplate, SiteRoot, GlobalMacros, CategoryUncategorized, LightRun=LightRun)
_, Content, Titles, Meta = PagePreprocessor(FilePath, FilePath, Type, SiteTemplate, SiteRoot, GlobalMacros, CategoryUncategorized, LightRun=LightRun)
Pages += [[File, Content, Titles, Meta]]
#logging.info("Building the HTML Search Page")
#Pages += [PagePreprocessor(Path='Search.html', TempPath='Search.html', Type='Page', SiteTemplate=SiteTemplate, SiteRoot=SiteRoot, GlobalMacros=GlobalMacros, CategoryUncategorized=CategoryUncategorized, LightRun=LightRun, Content=BuildPagesSearch(Flags, Pages))]
for i,e in enumerate(ConfMenu):
for File, Content, Titles, Meta in Pages:
File = StripExt(File)+'.html'

View File

@ -8,12 +8,30 @@
| ================================= """
from urllib.parse import quote as URLEncode
from Modules.HTML import *
from Modules.Utils import *
def MakeSitemap(Flags, Pages):
Map = ''
Domain = Flags['SiteDomain'] + '/' if Flags['SiteDomain'] else ''
for File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image in Pages:
for File, Content, Titles, Meta, ContentHtml, SlimHtml, Description, Image in Pages:
File = f"{StripExt(File)}.html"
Map += Domain + URLEncode(File) + '\n'
WriteFile(f"{Flags['OutDir']}/sitemap.txt", Map)
def BuildPagesSearch(Flags:dict, Pages:list):
SearchContent = ''
with open(f'{staticosoBaseDir()}Assets/PagesSearch.html', 'r') as File:
Base = File.read().split('{{PagesInject}}')
for File, Content, Titles, Meta, ContentHtml, SlimHtml, Description, Image in Pages:
#for File, Content, Titles, Meta in Pages:
SearchContent += f'''
<div
class="staticoso-HtmlSearch-Page"
data-staticoso-htmlsearch-name="{html.escape(html.unescape(Titles[0]), quote=True)}"
data-staticoso-htmlsearch-href="{StripExt(File)}.html"
>
{ContentHtml}
</div>
'''
return Base[0] + SearchContent + Base[1]

View File

@ -105,3 +105,4 @@ All of this is because some crucial things might be changed from one commit to a
- Bad HTML included in Markdown files can cause a build to fail entirely.
- Despite differential building and multithreading, the program still needs some more optimizations.
- Ordering pages in the global menu with external configuration flags (outside the pages' source) yields broken and unpredictable results.
- If site is small, the graphic of percentage completion is bugged (appears shorter).

View File

@ -1,10 +1,11 @@
all: BuildThemes BuildDemos
all: All
All: BuildThemes BuildDemos
BuildThemes:
./Scripts/BuildThemes.py
python3 ./Scripts/BuildThemes.py
BuildDemos:
./Scripts/BuildDemos.sh
sh ./Scripts/BuildDemos.sh
clean: Clean
Clean:

View File

@ -1 +1,19 @@
#!/bin/sh
cd "$( dirname "$( realpath "$0" )" )"
cd ../Sources/Themes
for Theme in *
do
cd ../../
mkdir -p ./Build/Demos/$Theme
cd ./Build/Demos/$Theme
mkdir -p ./Assets ./Templates ./Posts
cp -r ../../Themes/$Theme ./.Source
cp ./.Source/$Theme.html ./Templates/Default.html
cp ./.Source/*.css ./Assets/
cp ../../../Sources/Snippets/*.md ./Posts/
python3 ../../../../App/Source/Build.py \
--SiteName="$Theme"
cd ../../../Sources/Themes
done

View File

@ -4,18 +4,18 @@ import shutil
from pathlib import Path
def Main():
for Theme in os.listdir("Sources/Themes"):
Path(f"Build/Themes/{Theme}").mkdir(parents=True, exist_ok=True)
for Theme in os.listdir('./Sources/Themes'):
Path(f'./Build/Themes/{Theme}').mkdir(parents=True, exist_ok=True)
try:
shutil.copyfile(f"Sources/Themes/{Theme}/Style.css", f"Build/Themes/{Theme}/Style.css")
shutil.copyfile(f'./Sources/Themes/{Theme}/{Theme}.css', f'./Build/Themes/{Theme}/{Theme}.css')
except FileExistsError:
pass
with open(f"Sources/Snippets/Base.html", "r") as f:
with open(f'./Sources/Snippets/Base.html', 'r') as f:
Base = f.read()
with open(f"Sources/Themes/{Theme}/Body.html", "r") as f:
with open(f'./Sources/Themes/{Theme}/Body.html', 'r') as f:
Body = f.read()
with open(f"Build/Themes/{Theme}/{Theme}.html", "w+") as f:
f.write(Base.replace("{{Body}}", Body))
with open(f'./Build/Themes/{Theme}/{Theme}.html', 'w+') as f:
f.write(Base.replace('{{Theme}}', Theme).replace('{{Body}}', Body))
if __name__ == "__main__":
if __name__ == '__main__':
Main()

View File

@ -1,25 +1,25 @@
<!DOCTYPE html>
<html lang="<staticoso:PageLang>">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="generator" content="staticoso">
<link rel="stylesheet" href="Style.css">
<link rel="alternate" type="application/atom+xml" title="Blog Atom Feed" href="[staticoso:Site:RelativeRoot]feed/atom.xml">
<link rel="alternate" type="application/rss+xml" title="Blog RSS Feed" href="[staticoso:Site:RelativeRoot]feed/rss.xml">
<title><staticoso:PageTitle> - <staticoso:SiteName></title>
<meta name="description" content="<staticoso:PageDescription>">
<link href="[staticoso:Site:RelativeRoot]favicon.png" rel="icon" type="image/png">
<meta property="og:type" content="website">
<meta property="og:title" content="<staticoso:PageTitle> - <staticoso:SiteName>">
<meta property="og:description" content="<staticoso:PageDescription>">
<meta property="og:url" content="<staticoso:SiteDomain>/<staticoso:PagePath>">
<meta property="og:image" content="<staticoso:PageImage>">
<style>
<staticoso:PageStyle>
</style>
</head>
<body>
{{Body}}
</body>
<html lang="[staticoso:PageLang]">
<head>
<meta charset="UTF-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
<meta name="generator" content="staticoso"/>
<link rel="stylesheet" href="[staticoso:SiteRelativeRoot]{{Theme}}.css"/>
<link rel="alternate" type="application/atom+xml" title="Blog Atom Feed" href="[staticoso:SiteRelativeRoot]feed/atom.xml"/>
<link rel="alternate" type="application/rss+xml" title="Blog RSS Feed" href="[staticoso:SiteRelativeRoot]feed/rss.xml"/>
<title><staticoso:PageTitle> - <staticoso:SiteName></title>
<meta name="description" content="[staticoso:PageDescription]"/>
<link href="[staticoso:SiteRelativeRoot]favicon.png" rel="icon" type="image/png"/>
<meta property="og:type" content="website"/>
<meta property="og:title" content="[staticoso:PageTitle] - [staticoso:SiteName]"/>
<meta property="og:description" content="[staticoso:PageDescription]"/>
<meta property="og:url" content="[staticoso:SiteDomain]/[staticoso:PagePath]"/>
<meta property="og:image" content="[staticoso:PageImage]"/>
<style>
<staticoso:PageStyle>
</style>
</head>
<body>
{{Body}}
</body>
</html>