mirror of https://gitlab.com/octtspacc/staticoso
Compare commits
2 Commits
3a483dc0ef
...
c35aa94345
Author | SHA1 | Date |
---|---|---|
octospacc | c35aa94345 | |
octospacc | f433fff8a2 |
|
@ -3,4 +3,6 @@
|
||||||
tmp.*
|
tmp.*
|
||||||
public/*
|
public/*
|
||||||
Sites/*/public/*
|
Sites/*/public/*
|
||||||
|
Sites/*/public.Content/*
|
||||||
|
Sites/*/public.gmi/*
|
||||||
Themes/Build/*
|
Themes/Build/*
|
||||||
|
|
|
@ -7,6 +7,9 @@ before_script: |
|
||||||
pages:
|
pages:
|
||||||
stage: deploy
|
stage: deploy
|
||||||
script: |
|
script: |
|
||||||
|
cd ./Themes
|
||||||
|
make All
|
||||||
|
cd ..
|
||||||
sh ./Sites/BuildAll.sh
|
sh ./Sites/BuildAll.sh
|
||||||
artifacts:
|
artifacts:
|
||||||
paths:
|
paths:
|
||||||
|
|
|
@ -0,0 +1,201 @@
|
||||||
|
<!-- -->
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8"/>
|
||||||
|
<style>
|
||||||
|
:root {
|
||||||
|
--staticoso-HtmlSearch-ColorMatchWord: yellow;
|
||||||
|
/*--staticoso-HtmlSearch-ColorMatchBlock: orange;*/
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<!--
|
||||||
|
-->
|
||||||
|
<style class="staticoso-HtmlSearch-Style"></style>
|
||||||
|
<input class="staticoso-HtmlSearch-Input"/>
|
||||||
|
|
||||||
|
<div class="staticoso-HtmlSearch-Pages">
|
||||||
|
{{PagesInject}}
|
||||||
|
</div>
|
||||||
|
<script>
|
||||||
|
var SearchInput = document.querySelector('.staticoso-HtmlSearch-Input');
|
||||||
|
var SearchStyle = document.querySelector('.staticoso-HtmlSearch-Style');
|
||||||
|
var SelectPage = '.staticoso-HtmlSearch-Page';
|
||||||
|
var SelectHref = '.staticoso-HtmlSearch-Href';
|
||||||
|
var SelectBase = `${SelectPage}s > ${SelectPage}`;
|
||||||
|
// <https://developer.mozilla.org/en-US/docs/Web/HTML/Block-level_elements#elements> + some personal
|
||||||
|
var BlockElems = ['address', 'article', 'aside', 'blockquote', 'details', 'dialog', 'dd', 'div', 'dl', 'dt', 'fieldset', 'figcaption', 'figure', 'footer', 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'header', 'hgroup', 'hr', 'li', 'main', 'nav', 'ol', 'p', 'pre', 'section', 'table', 'ul', /**/ 'iframe', 'video', 'label'/*, 'span', 'i'*/];
|
||||||
|
|
||||||
|
// <https://stackoverflow.com/a/10730777>
|
||||||
|
function TextNodesUnder(El) {
|
||||||
|
var n, a = [], walk = document.createTreeWalker(El, NodeFilter.SHOW_TEXT, null, false);
|
||||||
|
while (n = walk.nextNode()) a.push(n);
|
||||||
|
return a;
|
||||||
|
};
|
||||||
|
|
||||||
|
function StripExcessSpace(Txt) {
|
||||||
|
return Txt
|
||||||
|
.trim()
|
||||||
|
.replaceAll('\n', ' ')
|
||||||
|
.replaceAll('\t', ' ')
|
||||||
|
.replace(/\s+/g, ' '); // Replace all multiple spaces with 1
|
||||||
|
};
|
||||||
|
|
||||||
|
// Make a CSS string emulating the :where selector, for old browsers
|
||||||
|
function CssWhere(Base, Where) {
|
||||||
|
var Style = '';
|
||||||
|
Where.forEach(function(Tgt) {
|
||||||
|
Style += `${Base} ${Tgt},`;
|
||||||
|
});
|
||||||
|
return Style.slice(0, -1);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get all needed elements under our class, infinitely nested, and set their textContent as data attribute
|
||||||
|
function PatchHtml() {
|
||||||
|
// Block elements, just add the attribute
|
||||||
|
document.querySelectorAll(CssWhere(SelectBase, BlockElems)).forEach(function(El) {
|
||||||
|
El.dataset.staticosoHtmlsearchBlock = StripExcessSpace(El.textContent.toLowerCase());
|
||||||
|
});
|
||||||
|
// Text nodes, we have to wrap each into a new real element and delete the original node
|
||||||
|
TextNodesUnder(document.querySelector(`${SelectPage}s`)).forEach(function(El) {
|
||||||
|
var ElNew = document.createElement('span');
|
||||||
|
StripExcessSpace(El.textContent).split(' ').forEach(function(Word) {
|
||||||
|
var ElWord;
|
||||||
|
[Word, ' '].forEach(function(Str) {
|
||||||
|
ElWord = document.createElement('span');
|
||||||
|
ElWord.innerHTML = Str;
|
||||||
|
ElWord.dataset.staticosoHtmlsearchWord = Str.toLowerCase();
|
||||||
|
ElNew.appendChild(ElWord);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
El.replaceWith(ElNew);
|
||||||
|
});
|
||||||
|
// Delete any illegal elements that got out of their supposed div due to bad HTML
|
||||||
|
document.querySelectorAll(`${SelectPage}s > *:not(${SelectPage})`).forEach(function(El) {
|
||||||
|
El.remove();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check if any child of a node is actually visible
|
||||||
|
function HasVisibleChild(El) {
|
||||||
|
var Childs = El.children;
|
||||||
|
for (var i = 0; i < Childs.length; i++) {
|
||||||
|
// If at least one child is CSS-displayed and has non-void content
|
||||||
|
if (getComputedStyle(Childs[i]).display != 'none' && Childs[i].textContent.trim()) {
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
function CreateSearchAnchors(Query) {
|
||||||
|
/*
|
||||||
|
// Create anchors redirecting to the pages that are displayed
|
||||||
|
document.querySelectorAll(SelectBase).forEach(function(Page) {
|
||||||
|
var Href = Page.dataset.staticosoHtmlsearchHref;
|
||||||
|
if (HasVisibleChild(Page)) {
|
||||||
|
if (!Page.parentNode.querySelector(`${SelectHref}[href="${Href}"]`)) {
|
||||||
|
var ElHref = document.createElement('a');
|
||||||
|
ElHref.className = SelectHref.slice(1);
|
||||||
|
ElHref.innerHTML = Page.dataset.staticosoHtmlsearchName || Href;
|
||||||
|
ElHref.href = Href;
|
||||||
|
Page.parentNode.insertBefore(ElHref, Page);
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
Page.parentNode.querySelectorAll(`${SelectHref}[href="${Href}"]`).forEach(function(ElHref) {
|
||||||
|
ElHref.remove();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
});
|
||||||
|
*/
|
||||||
|
// Create anchors redirecting to the pages that are displayed
|
||||||
|
// First delete old links
|
||||||
|
document.querySelectorAll(`${SelectPage}s > ${SelectHref}`).forEach(function(Link) {
|
||||||
|
Link.remove();
|
||||||
|
});
|
||||||
|
// Then for all visible blocks check their parents to see if the links exist, if not [re]create them
|
||||||
|
// Go page by page to skip cycles when we can
|
||||||
|
var Pages = document.querySelectorAll(SelectBase);
|
||||||
|
for (var i = 0; i < Pages.length; i++) {
|
||||||
|
//document.querySelectorAll(SelectBase).forEach(function(Page) {
|
||||||
|
var Page = Pages[i];
|
||||||
|
var Blocks = Page.querySelector/*All*/(`*[data-staticoso-htmlsearch-block*="${Query}"]`);
|
||||||
|
//for (var i = 0; i < Blocks.length; i++) {
|
||||||
|
if (Blocks) {
|
||||||
|
var Href = Page.dataset.staticosoHtmlsearchHref;
|
||||||
|
if (!Page.parentNode.querySelector(`${SelectHref}[href="${Href}"]`)) {
|
||||||
|
var Link = document.createElement('a');
|
||||||
|
Link.className = SelectHref.slice(1);
|
||||||
|
Link.innerHTML = Page.dataset.staticosoHtmlsearchName || Href;
|
||||||
|
Link.href = Href;
|
||||||
|
Page.parentNode.insertBefore(Link, Page);
|
||||||
|
};
|
||||||
|
//break;
|
||||||
|
};
|
||||||
|
//});
|
||||||
|
};
|
||||||
|
/*
|
||||||
|
document.querySelectorAll(`${SelectBase} *[data-staticoso-htmlsearch-block*="${Query}"]`).forEach(function(Block) {
|
||||||
|
var Page = Block.closest('.staticoso-HtmlSearch-Page');
|
||||||
|
var Href = Page.dataset.staticosoHtmlsearchHref;
|
||||||
|
if (!Page.parentNode.querySelector(`${SelectHref}[href="${Href}"]`)) {
|
||||||
|
var Link = document.createElement('a');
|
||||||
|
Link.className = SelectHref.slice(1);
|
||||||
|
Link.innerHTML = Page.dataset.staticosoHtmlsearchName || Href;
|
||||||
|
Link.href = Href;
|
||||||
|
Page.parentNode.insertBefore(Link, Page);
|
||||||
|
};
|
||||||
|
});
|
||||||
|
*/
|
||||||
|
// NOTE: This lags so baaad but I've not found a better solution for now, and I want the search to be continuos, without clicking a button :(
|
||||||
|
};
|
||||||
|
|
||||||
|
// Every time the search query is modified, reset our CSS that does all the content filtering
|
||||||
|
function SetSearch() {
|
||||||
|
var Query = StripExcessSpace(SearchInput.value.toLowerCase());
|
||||||
|
var WordStrictStyle = '';
|
||||||
|
var WordLooseStyle = '';
|
||||||
|
|
||||||
|
// Reset the style CSS to hide everything by default
|
||||||
|
SearchStyle.innerHTML = `
|
||||||
|
/* ${SelectBase} { cursor: pointer; } */
|
||||||
|
${SelectBase} *[data-staticoso-htmlsearch-block] { display: none; }
|
||||||
|
`;
|
||||||
|
|
||||||
|
// For every word in the search query, add a CSS selector
|
||||||
|
// Strict selection (=)
|
||||||
|
Query.split(' ').forEach(function(Token) {
|
||||||
|
WordStrictStyle += `${SelectBase} *[data-staticoso-htmlsearch-word="${Token}"],`;
|
||||||
|
});
|
||||||
|
WordStrictStyle = `${WordStrictStyle.trim().slice(0, -1)}{ background: var(--staticoso-HtmlSearch-ColorMatchWord); }`;
|
||||||
|
// Loose selection (*=)
|
||||||
|
Query.split(' ').forEach(function(Token) {
|
||||||
|
WordLooseStyle += `${SelectBase} *[data-staticoso-htmlsearch-word*="${Token}"],`;
|
||||||
|
});
|
||||||
|
WordLooseStyle = `${WordLooseStyle.trim().slice(0, -1)}{ border: 2px dotted gray; }`;
|
||||||
|
|
||||||
|
// Set the style for the above tokens, then unhide needed blocks
|
||||||
|
//SearchStyle.innerHTML = `
|
||||||
|
// ${SearchStyle.innerHTML.trim().slice(0, -1)} { background: var(--staticoso-HtmlSearch-ColorMatchWord); }
|
||||||
|
SearchStyle.innerHTML += `
|
||||||
|
${WordStrictStyle}
|
||||||
|
${WordLooseStyle}
|
||||||
|
${SelectBase} *[data-staticoso-htmlsearch-block*="${Query}"] { display: revert; }
|
||||||
|
`;
|
||||||
|
CreateSearchAnchors(Query);
|
||||||
|
};
|
||||||
|
|
||||||
|
['onchange', 'oninput', 'onpaste'].forEach(function(Ev) {
|
||||||
|
SearchInput[Ev] = SetSearch;
|
||||||
|
});
|
||||||
|
|
||||||
|
PatchHtml();
|
||||||
|
SetSearch();
|
||||||
|
</script>
|
||||||
|
<!-- -->
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
<!--
|
||||||
|
-->
|
Before Width: | Height: | Size: 198 B After Width: | Height: | Size: 198 B |
Before Width: | Height: | Size: 320 B After Width: | Height: | Size: 320 B |
|
@ -1,12 +1,12 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
""" ================================= |
|
""" ================================== |
|
||||||
| This file is part of |
|
| This file is part of |
|
||||||
| staticoso |
|
| staticoso |
|
||||||
| Just a simple Static Site Generator |
|
| Just a simple Static Site Generator |
|
||||||
| |
|
| |
|
||||||
| Licensed under the AGPLv3 license |
|
| Licensed under the AGPLv3 license |
|
||||||
| Copyright (C) 2022, OctoSpacc |
|
| Copyright (C) 2022-2023, OctoSpacc |
|
||||||
| ================================= """
|
| ================================== """
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
|
@ -33,7 +33,7 @@ from Libs import rcssmin
|
||||||
cssmin = rcssmin._make_cssmin(python_only=True)
|
cssmin = rcssmin._make_cssmin(python_only=True)
|
||||||
|
|
||||||
def ResetOutDir(OutDir):
|
def ResetOutDir(OutDir):
|
||||||
for e in (OutDir, f"{OutDir}.gmi"):
|
for e in (OutDir, f'{OutDir}.Content', f'{OutDir}.gmi'):
|
||||||
try:
|
try:
|
||||||
shutil.rmtree(e)
|
shutil.rmtree(e)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
|
@ -119,7 +119,7 @@ def WriteRedirects(Flags, Pages, FinalPaths, Locale):
|
||||||
StrClick=Locale['ClickHere'],
|
StrClick=Locale['ClickHere'],
|
||||||
StrRedirect=Locale['IfNotRedirected']))
|
StrRedirect=Locale['IfNotRedirected']))
|
||||||
|
|
||||||
def Main(Args, FeedEntries):
|
def BuildMain(Args, FeedEntries):
|
||||||
Flags, Snippets, FinalPaths = {}, {}, []
|
Flags, Snippets, FinalPaths = {}, {}, []
|
||||||
HavePages, HavePosts = False, False
|
HavePages, HavePosts = False, False
|
||||||
SiteConf = LoadConfFile('Site.ini')
|
SiteConf = LoadConfFile('Site.ini')
|
||||||
|
@ -208,13 +208,15 @@ def Main(Args, FeedEntries):
|
||||||
if os.path.isdir('Pages'):
|
if os.path.isdir('Pages'):
|
||||||
HavePages = True
|
HavePages = True
|
||||||
shutil.copytree('Pages', OutDir, dirs_exist_ok=True)
|
shutil.copytree('Pages', OutDir, dirs_exist_ok=True)
|
||||||
|
shutil.copytree('Pages', f'{OutDir}.Content', dirs_exist_ok=True)
|
||||||
if Flags['GemtextOutput']:
|
if Flags['GemtextOutput']:
|
||||||
shutil.copytree('Pages', f"{OutDir}.gmi", ignore=IgnoreFiles, dirs_exist_ok=True)
|
shutil.copytree('Pages', f'{OutDir}.gmi', ignore=IgnoreFiles, dirs_exist_ok=True)
|
||||||
if os.path.isdir('Posts'):
|
if os.path.isdir('Posts'):
|
||||||
HavePosts = True
|
HavePosts = True
|
||||||
shutil.copytree('Posts', f"{OutDir}/Posts", dirs_exist_ok=True)
|
shutil.copytree('Posts', f'{OutDir}/Posts', dirs_exist_ok=True)
|
||||||
|
shutil.copytree('Posts', f'{OutDir}.Content/Posts', dirs_exist_ok=True)
|
||||||
if Flags['GemtextOutput']:
|
if Flags['GemtextOutput']:
|
||||||
shutil.copytree('Posts', f"{OutDir}.gmi/Posts", ignore=IgnoreFiles, dirs_exist_ok=True)
|
shutil.copytree('Posts', f'{OutDir}.gmi/Posts', ignore=IgnoreFiles, dirs_exist_ok=True)
|
||||||
|
|
||||||
if not (HavePages or HavePosts):
|
if not (HavePages or HavePosts):
|
||||||
logging.error("⛔ No Pages or posts found. Nothing to do, exiting!")
|
logging.error("⛔ No Pages or posts found. Nothing to do, exiting!")
|
||||||
|
@ -259,9 +261,12 @@ def Main(Args, FeedEntries):
|
||||||
WriteFile(File, Content)
|
WriteFile(File, Content)
|
||||||
FinalPaths += [File]
|
FinalPaths += [File]
|
||||||
|
|
||||||
logging.debug("Creating Redirects")
|
logging.info("Creating Redirects")
|
||||||
WriteRedirects(Flags, Pages, FinalPaths, Locale)
|
WriteRedirects(Flags, Pages, FinalPaths, Locale)
|
||||||
|
|
||||||
|
logging.info("Building HTML Search Page")
|
||||||
|
WriteFile(f'{OutDir}/Search.html', BuildPagesSearch(Flags, Pages))
|
||||||
|
|
||||||
if Flags['GemtextOutput']:
|
if Flags['GemtextOutput']:
|
||||||
logging.info("Generating Gemtext")
|
logging.info("Generating Gemtext")
|
||||||
GemtextCompileList(Flags, Pages, LimitFiles)
|
GemtextCompileList(Flags, Pages, LimitFiles)
|
||||||
|
@ -289,16 +294,6 @@ def Main(Args, FeedEntries):
|
||||||
else:
|
else:
|
||||||
shutil.copytree('Assets', OutDir, dirs_exist_ok=True)
|
shutil.copytree('Assets', OutDir, dirs_exist_ok=True)
|
||||||
|
|
||||||
#def DoSiteBuild(Arg=None):
|
|
||||||
# #try:
|
|
||||||
# # SiteEditObserver.stop()
|
|
||||||
# # SiteEditObserver.join()
|
|
||||||
# #except:
|
|
||||||
# # pass
|
|
||||||
# Main(Args=Args, FeedEntries=FeedEntries)
|
|
||||||
# logging.info(f"✅ Done! ({round(time.time()-StartTime, 3)}s)")
|
|
||||||
# #SiteEditObserver.start()
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
StartTime = time.time()
|
StartTime = time.time()
|
||||||
|
|
||||||
|
@ -351,26 +346,5 @@ if __name__ == '__main__':
|
||||||
logging.warning("⚠ Can't load the XML libraries. XML Feeds Generation is Disabled. Make sure the 'lxml' library is installed.")
|
logging.warning("⚠ Can't load the XML libraries. XML Feeds Generation is Disabled. Make sure the 'lxml' library is installed.")
|
||||||
FeedEntries = 0
|
FeedEntries = 0
|
||||||
|
|
||||||
#from watchdog.observers import Observer
|
BuildMain(Args=Args, FeedEntries=FeedEntries)
|
||||||
#from watchdog.events import LoggingEventHandler
|
|
||||||
#SiteEditEvent = LoggingEventHandler()
|
|
||||||
#SiteEditEvent.on_created = DoSiteBuild
|
|
||||||
#SiteEditEvent.on_deleted = DoSiteBuild
|
|
||||||
#SiteEditEvent.on_modified = DoSiteBuild
|
|
||||||
#SiteEditEvent.on_moved = DoSiteBuild
|
|
||||||
#SiteEditObserver = Observer()
|
|
||||||
#SiteEditObserver.schedule(SiteEditEvent, ".", recursive=True)
|
|
||||||
#SiteEditObserver.start()
|
|
||||||
|
|
||||||
Main(Args=Args, FeedEntries=FeedEntries)
|
|
||||||
logging.info(f"✅ Done! ({round(time.time()-StartTime, 3)}s)")
|
logging.info(f"✅ Done! ({round(time.time()-StartTime, 3)}s)")
|
||||||
#DoSiteBuild()
|
|
||||||
|
|
||||||
#try:
|
|
||||||
# while True:
|
|
||||||
# pass
|
|
||||||
#except KeyboardInterrupt:
|
|
||||||
# logging.info("Stopped.")
|
|
||||||
#finally:
|
|
||||||
# SiteEditObserver.stop()
|
|
||||||
# SiteEditObserver.join()
|
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
""" ================================= |
|
""" ================================== |
|
||||||
| This file is part of |
|
| This file is part of |
|
||||||
| staticoso |
|
| staticoso |
|
||||||
| Just a simple Static Site Generator |
|
| Just a simple Static Site Generator |
|
||||||
| |
|
| |
|
||||||
| Licensed under the AGPLv3 license |
|
| Licensed under the AGPLv3 license |
|
||||||
| Copyright (C) 2022, OctoSpacc |
|
| Copyright (C) 2022-2023, OctoSpacc |
|
||||||
| ================================= """
|
| ================================== """
|
||||||
|
|
||||||
import time
|
import time
|
||||||
from Libs.dateutil.parser import parse as date_parse
|
from Libs.dateutil.parser import parse as date_parse
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
""" ================================= |
|
""" ================================== |
|
||||||
| This file is part of |
|
| This file is part of |
|
||||||
| staticoso |
|
| staticoso |
|
||||||
| Just a simple Static Site Generator |
|
| Just a simple Static Site Generator |
|
||||||
| |
|
| |
|
||||||
| Licensed under the AGPLv3 license |
|
| Licensed under the AGPLv3 license |
|
||||||
| Copyright (C) 2022, OctoSpacc |
|
| Copyright (C) 2022-2023, OctoSpacc |
|
||||||
| ================================= """
|
| ================================== """
|
||||||
|
|
||||||
import configparser
|
import configparser
|
||||||
from ast import literal_eval
|
from ast import literal_eval
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
""" ================================= |
|
""" ================================== |
|
||||||
| This file is part of |
|
| This file is part of |
|
||||||
| staticoso |
|
| staticoso |
|
||||||
| Just a simple Static Site Generator |
|
| Just a simple Static Site Generator |
|
||||||
| |
|
| |
|
||||||
| Licensed under the AGPLv3 license |
|
| Licensed under the AGPLv3 license |
|
||||||
| Copyright (C) 2022, OctoSpacc |
|
| Copyright (C) 2022-2023, OctoSpacc |
|
||||||
| ================================= """
|
| ================================== """
|
||||||
|
|
||||||
from base64 import b64encode
|
from base64 import b64encode
|
||||||
from Modules.Globals import *
|
from Modules.Globals import *
|
||||||
|
@ -180,12 +180,12 @@ def MakeHTMLJournal(Flags, Locale, FilePath, HTML):
|
||||||
Redirect = f"""<meta http-equiv="refresh" content="0; url='./{FileName}'">""" if Flags["JournalRedirect"] else ''
|
Redirect = f"""<meta http-equiv="refresh" content="0; url='./{FileName}'">""" if Flags["JournalRedirect"] else ''
|
||||||
|
|
||||||
# Instead of copying stuff from the full page, for now we use dedicated title, header, footer, and pagination
|
# Instead of copying stuff from the full page, for now we use dedicated title, header, footer, and pagination
|
||||||
Title = t.attrs["journaltitle"] if 'journaltitle' in t.attrs else f'"{StripExt(FileName)}" Journal - {Flags["SiteName"]}' if Flags["SiteName"] else f'"{StripExt(FileName)}" Journal'
|
Title = t.attrs['journaltitle'] if 'journaltitle' in t.attrs else f'"{StripExt(FileName)}" Journal - {Flags["SiteName"]}' if Flags["SiteName"] else f'"{StripExt(FileName)}" Journal'
|
||||||
FeedLink = f"""<a title="Journal Atom Feed" href="https://journal.miso.town/atom?url={URL}" target="_blank" rel="noopener"><img width="88" height="31" alt="Journal Atom Feed" title="Journal Atom Feed" src="data:image/png;base64,{b64encode(ReadFile(staticosoBaseDir()+'Assets/Feed-88x31.png', 'rb')).decode()}"></a>""" if Flags["SiteDomain"] else ''
|
FeedLink = f"""<a title="Journal Atom Feed" href="https://journal.miso.town/atom?url={URL}" target="_blank" rel="noopener"><img width="88" height="31" alt="Journal Atom Feed" title="Journal Atom Feed" src="data:image/png;base64,{b64encode(ReadFile(staticosoBaseDir()+'Assets/ThirdParty/Feed-88x31.png', 'rb')).decode()}"></a>""" if Flags['SiteDomain'] else ''
|
||||||
Header = t.attrs["journalheader"] if 'journalheader' in t.attrs else f"""\
|
Header = t.attrs['journalheader'] if 'journalheader' in t.attrs else f"""\
|
||||||
<p>
|
<p>
|
||||||
<i>{Locale["StrippedDownNotice"].format(Link="./"+FileName)}</i>
|
<i>{Locale["StrippedDownNotice"].format(Link="./"+FileName)}</i>
|
||||||
<a title="Valid HTML Journal" href="https://journal.miso.town" target="_blank" rel="noopener"><img alt="Valid HTML Journal" title="Valid HTML Journal" width="88" height="31" src="data:image/png;base64,{b64encode(ReadFile(staticosoBaseDir()+'Assets/Valid-HTML-Journal-88x31.png', 'rb')).decode()}"></a>
|
<a title="Valid HTML Journal" href="https://journal.miso.town" target="_blank" rel="noopener"><img alt="Valid HTML Journal" title="Valid HTML Journal" width="88" height="31" src="data:image/png;base64,{b64encode(ReadFile(staticosoBaseDir()+'Assets/ThirdParty/Valid-HTML-Journal-88x31.png', 'rb')).decode()}"></a>
|
||||||
{FeedLink}
|
{FeedLink}
|
||||||
</p>
|
</p>
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
""" ================================= |
|
""" ================================== |
|
||||||
| This file is part of |
|
| This file is part of |
|
||||||
| staticoso |
|
| staticoso |
|
||||||
| Just a simple Static Site Generator |
|
| Just a simple Static Site Generator |
|
||||||
| |
|
| |
|
||||||
| Licensed under the AGPLv3 license |
|
| Licensed under the AGPLv3 license |
|
||||||
| Copyright (C) 2022, OctoSpacc |
|
| Copyright (C) 2022-2023, OctoSpacc |
|
||||||
| ================================= """
|
| ================================== """
|
||||||
|
|
||||||
# TODO: Either switch feed generation lib, or rewrite the 'lxml' module, so that no modules have to be compiled and the program is 100% portable
|
# TODO: Either switch feed generation lib, or rewrite the 'lxml' module, so that no modules have to be compiled and the program is 100% portable
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
""" ================================= |
|
""" ================================== |
|
||||||
| This file is part of |
|
| This file is part of |
|
||||||
| staticoso |
|
| staticoso |
|
||||||
| Just a simple Static Site Generator |
|
| Just a simple Static Site Generator |
|
||||||
| |
|
| |
|
||||||
| Licensed under the AGPLv3 license |
|
| Licensed under the AGPLv3 license |
|
||||||
| Copyright (C) 2022, OctoSpacc |
|
| Copyright (C) 2022-2023, OctoSpacc |
|
||||||
| ================================= """
|
| ================================== """
|
||||||
|
|
||||||
# TODO: Write the Python HTML2Gemtext converter
|
# TODO: Write the Python HTML2Gemtext converter
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
""" ================================= |
|
""" ================================== |
|
||||||
| This file is part of |
|
| This file is part of |
|
||||||
| staticoso |
|
| staticoso |
|
||||||
| Just a simple Static Site Generator |
|
| Just a simple Static Site Generator |
|
||||||
| |
|
| |
|
||||||
| Licensed under the AGPLv3 license |
|
| Licensed under the AGPLv3 license |
|
||||||
| Copyright (C) 2022, OctoSpacc |
|
| Copyright (C) 2022-2023, OctoSpacc |
|
||||||
| ================================= """
|
| ================================== """
|
||||||
|
|
||||||
ReservedPaths = ('Site.ini', 'Assets', 'Pages', 'Posts', 'Templates', 'StaticParts', 'DynamicParts')
|
ReservedPaths = ('Site.ini', 'Assets', 'Pages', 'Posts', 'Templates', 'StaticParts', 'DynamicParts')
|
||||||
FileExtensions = {
|
FileExtensions = {
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
""" ================================= |
|
""" ================================== |
|
||||||
| This file is part of |
|
| This file is part of |
|
||||||
| staticoso |
|
| staticoso |
|
||||||
| Just a simple Static Site Generator |
|
| Just a simple Static Site Generator |
|
||||||
| |
|
| |
|
||||||
| Licensed under the AGPLv3 license |
|
| Licensed under the AGPLv3 license |
|
||||||
| Copyright (C) 2022, OctoSpacc |
|
| Copyright (C) 2022-2023, OctoSpacc |
|
||||||
| ================================= """
|
| ================================== """
|
||||||
|
|
||||||
import html
|
import html
|
||||||
import warnings
|
import warnings
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
""" ================================= |
|
""" ================================== |
|
||||||
| This file is part of |
|
| This file is part of |
|
||||||
| staticoso |
|
| staticoso |
|
||||||
| Just a simple Static Site Generator |
|
| Just a simple Static Site Generator |
|
||||||
| |
|
| |
|
||||||
| Licensed under the AGPLv3 license |
|
| Licensed under the AGPLv3 license |
|
||||||
| Copyright (C) 2022, OctoSpacc |
|
| Copyright (C) 2022-2023, OctoSpacc |
|
||||||
| ================================= """
|
| ================================== """
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
""" ================================= |
|
""" ================================== |
|
||||||
| This file is part of |
|
| This file is part of |
|
||||||
| staticoso |
|
| staticoso |
|
||||||
| Just a simple Static Site Generator |
|
| Just a simple Static Site Generator |
|
||||||
| |
|
| |
|
||||||
| Licensed under the AGPLv3 license |
|
| Licensed under the AGPLv3 license |
|
||||||
| Copyright (C) 2022, OctoSpacc |
|
| Copyright (C) 2022-2023, OctoSpacc |
|
||||||
| ================================= """
|
| ================================== """
|
||||||
|
|
||||||
from Libs.markdown import markdown
|
from Libs.markdown import markdown
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,289 @@
|
||||||
|
""" ================================== |
|
||||||
|
| This file is part of |
|
||||||
|
| staticoso |
|
||||||
|
| Just a simple Static Site Generator |
|
||||||
|
| |
|
||||||
|
| Licensed under the AGPLv3 license |
|
||||||
|
| Copyright (C) 2022-2023, OctoSpacc |
|
||||||
|
| ================================== """
|
||||||
|
|
||||||
|
from Modules.Config import *
|
||||||
|
from Modules.Elements import *
|
||||||
|
from Modules.HTML import *
|
||||||
|
from Modules.Markdown import *
|
||||||
|
from Modules.Utils import *
|
||||||
|
|
||||||
|
# Menu styles:
|
||||||
|
# - Simple: Default, Flat, Line
|
||||||
|
# - Others: Excerpt, Image, Preview (Excerpt + Image), Full
|
||||||
|
def GetHTMLPagesList(Pages:list, BlogName:str, SiteRoot:str, PathPrefix:str, CallbackFile=None, Unite=[], Type=None, Limit=None, PathFilter='', Category=None, For='Menu', MarkdownExts=(), MenuStyle='Default', ShowPaths=True):
|
||||||
|
Flatten, SingleLine, DoneCount, PrevDepth = False, False, 0, 0
|
||||||
|
if MenuStyle == 'Flat':
|
||||||
|
Flatten = True
|
||||||
|
elif MenuStyle == 'Line':
|
||||||
|
ShowPaths, SingleLine = False, True
|
||||||
|
List, ToPop, LastParent = '', [], []
|
||||||
|
IndexPages = Pages.copy()
|
||||||
|
for e in IndexPages:
|
||||||
|
if e[3]['Index'].lower() in PageIndexStrNeg:
|
||||||
|
IndexPages.remove(e)
|
||||||
|
for i,e in enumerate(IndexPages):
|
||||||
|
if Type and e[3]['Type'] != Type:
|
||||||
|
ToPop += [i]
|
||||||
|
ToPop = RevSort(ToPop)
|
||||||
|
for i in ToPop:
|
||||||
|
IndexPages.pop(i)
|
||||||
|
if Type == 'Page':
|
||||||
|
IndexPages = OrderPages(IndexPages)
|
||||||
|
for i,e in enumerate(Unite):
|
||||||
|
if e:
|
||||||
|
IndexPages.insert(i, [e, None, None, {'Type':Type, 'Index':'True', 'Order':'Unite'}])
|
||||||
|
for File, Content, Titles, Meta in IndexPages:
|
||||||
|
# Allow for the virtual "Pages/" prefix to be used in path filtering
|
||||||
|
TmpPathFilter = PathFilter
|
||||||
|
if TmpPathFilter.startswith('Pages/'):
|
||||||
|
TmpPathFilter = TmpPathFilter[len('Pages/'):]
|
||||||
|
if File.startswith('Posts/'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if (not Type or (Meta['Type'] == Type and CanIndex(Meta['Index'], For))) and (not Category or Category in Meta['Categories']) and File.startswith(TmpPathFilter) and File != CallbackFile and (not Limit or Limit > DoneCount):
|
||||||
|
Depth = (File.count('/') + 1) if Meta['Order'] != 'Unite' else 1
|
||||||
|
# Folder names are handled here
|
||||||
|
if Depth > 1 and Meta['Order'] != 'Unite':
|
||||||
|
CurParent = File.split('/')[:-1]
|
||||||
|
for i,s in enumerate(CurParent):
|
||||||
|
if LastParent != CurParent and ShowPaths:
|
||||||
|
LastParent = CurParent
|
||||||
|
Levels = '.' * ((Depth-2+i) if not Flatten else 0) + ':'
|
||||||
|
# If search node endswith index, it's a page; else, it's a folder
|
||||||
|
if StripExt(File).endswith('index'):
|
||||||
|
Title = MakeListTitle(File, Meta, Titles, 'HTMLTitle', BlogName, PathPrefix)
|
||||||
|
DoneCount += 1
|
||||||
|
else:
|
||||||
|
Title = CurParent[Depth-2+i]
|
||||||
|
if SingleLine:
|
||||||
|
List += f' <span>{Title}</span> '
|
||||||
|
else:
|
||||||
|
List += f'{Levels}<span>{Title}</span>\n'
|
||||||
|
|
||||||
|
# Pages with any other path
|
||||||
|
if not (Depth > 1 and StripExt(File).split('/')[-1] == 'index'):
|
||||||
|
Levels = '.' * ((Depth-1) if not Flatten else 0) + ':'
|
||||||
|
DoneCount += 1
|
||||||
|
if Meta['Order'] == 'Unite':
|
||||||
|
Title = markdown(MarkdownHTMLEscape(File, MarkdownExts), extensions=MarkdownExts).removeprefix('<p>').removesuffix('<p>')
|
||||||
|
else:
|
||||||
|
Title = MakeListTitle(File, Meta, Titles, 'HTMLTitle', BlogName, PathPrefix)
|
||||||
|
if SingleLine:
|
||||||
|
List += ' <span>' + Title + '</span> '
|
||||||
|
else:
|
||||||
|
List += Levels + Title + '\n'
|
||||||
|
|
||||||
|
if MenuStyle in ('Default', 'Flat'):
|
||||||
|
return GenHTMLTreeList(List, Class="staticoso-PagesList")
|
||||||
|
elif MenuStyle in ('Line', 'Excerpt', 'Image', 'Preview', 'Full'):
|
||||||
|
return List
|
||||||
|
|
||||||
|
def CheckHTMLCommentLine(Line:str):
|
||||||
|
if Line.startswith('<!--'):
|
||||||
|
Line = Line[4:].lstrip()
|
||||||
|
if Line.endswith('-->'):
|
||||||
|
return Line
|
||||||
|
return None
|
||||||
|
|
||||||
|
def TemplatePreprocessor(Text:str):
|
||||||
|
Meta, MetaDefault = '', {
|
||||||
|
'MenuStyle': 'Default'}
|
||||||
|
for l in Text.splitlines():
|
||||||
|
ll = l.lstrip().rstrip()
|
||||||
|
lll = CheckHTMLCommentLine(ll)
|
||||||
|
if lll:
|
||||||
|
if lll.startswith('%'):
|
||||||
|
Meta += lll[1:-3].lstrip().rstrip() + '\n'
|
||||||
|
Meta = dict(ReadConf(LoadConfStr('[Meta]\n' + Meta), 'Meta'))
|
||||||
|
for i in MetaDefault:
|
||||||
|
if not i in Meta:
|
||||||
|
Meta.update({i:MetaDefault[i]})
|
||||||
|
return Meta
|
||||||
|
|
||||||
|
def FindPreprocLine(Line:str, Meta, Macros):
|
||||||
|
Changed = False
|
||||||
|
Line = Line.lstrip().rstrip()
|
||||||
|
lll = CheckHTMLCommentLine(Line)
|
||||||
|
if Line.startswith('//') or lll: # Find preprocessor lines
|
||||||
|
lll = Line[2:].lstrip()
|
||||||
|
if lll.startswith('%'):
|
||||||
|
Meta += lll[1:].lstrip() + '\n'
|
||||||
|
Changed = True
|
||||||
|
elif lll.startswith('$'):
|
||||||
|
Macros += lll[1:].lstrip() + '\n'
|
||||||
|
Changed = True
|
||||||
|
#if ll.startswith('<!--') and not ll.endswith('-->'): # Find comment and code blocks
|
||||||
|
# IgnoreBlocksStart += [l]
|
||||||
|
return (Meta, Macros, Changed)
|
||||||
|
|
||||||
|
def PagePreprocessor(Path:str, TempPath:str, Type:str, SiteTemplate, SiteRoot, GlobalMacros, CategoryUncategorized, LightRun:bool=False, Content=None):
|
||||||
|
File = ReadFile(Path) if not Content else Content
|
||||||
|
Path = Path.lower()
|
||||||
|
Content, Titles, DashyTitles, HTMLTitlesFound, Macros, Meta, MetaDefault = '', [], [], False, '', '', {
|
||||||
|
'Template': SiteTemplate,
|
||||||
|
'Head': '',
|
||||||
|
'Style': '',
|
||||||
|
'Type': Type,
|
||||||
|
'Index': 'Unspecified',
|
||||||
|
'Feed': 'True',
|
||||||
|
'Title': '',
|
||||||
|
'HTMLTitle': '',
|
||||||
|
'Description': '',
|
||||||
|
'Image': '',
|
||||||
|
'Macros': {},
|
||||||
|
'Categories': [],
|
||||||
|
'URLs': [],
|
||||||
|
'CreatedOn': '',
|
||||||
|
'UpdatedOn': '',
|
||||||
|
'EditedOn': '',
|
||||||
|
'Order': None,
|
||||||
|
'Language': None,
|
||||||
|
'Downsync': None}
|
||||||
|
# Find all positions of '<!--', '-->', add them in a list=[[pos0,pos1,line0,line1],...]
|
||||||
|
for l in File.splitlines():
|
||||||
|
ll = l.lstrip().rstrip()
|
||||||
|
Meta, Macros, Changed = FindPreprocLine(ll, Meta, Macros)
|
||||||
|
if not Changed: # Find headings
|
||||||
|
#if line in ignore block:
|
||||||
|
# continue
|
||||||
|
Headings = ('h1', 'h2', 'h3', 'h4', 'h5', 'h6')
|
||||||
|
#if Path.endswith(FileExtensions['HTML']):
|
||||||
|
# if ll[1:].startswith(Headings):
|
||||||
|
# if ll[3:].startswith((" class='NoTitle", ' class="NoTitle')):
|
||||||
|
# Content += l + '\n'
|
||||||
|
# elif ll.replace(' ', ' ').startswith('// %'):
|
||||||
|
# pass
|
||||||
|
# else:
|
||||||
|
# Title = '#'*int(ll[2]) + ' ' + ll[4:]
|
||||||
|
# DashTitle = DashifyTitle(Title.lstrip('#'), DashyTitles)
|
||||||
|
# DashyTitles += [DashTitle]
|
||||||
|
# Titles += [Title]
|
||||||
|
# Content += MakeLinkableTitle(l, Title, DashTitle, 'pug') + '\n'
|
||||||
|
# else:
|
||||||
|
# Content += l + '\n'
|
||||||
|
if Path.endswith(FileExtensions['HTML']) and not HTMLTitlesFound:
|
||||||
|
Soup = MkSoup(File)
|
||||||
|
Tags = Soup.find_all()
|
||||||
|
for t in Tags:
|
||||||
|
if t.name in Headings:
|
||||||
|
Title = '#'*int(t.name[1]) + ' ' + str(t.text)
|
||||||
|
DashTitle = DashifyTitle(Title.lstrip('#'), DashyTitles)
|
||||||
|
DashyTitles += [DashTitle]
|
||||||
|
Titles += [Title]
|
||||||
|
t.replace_with(MakeLinkableTitle(None, Title, DashTitle, 'md'))
|
||||||
|
HTMLTitlesFound = True
|
||||||
|
Content = ''
|
||||||
|
TmpContent = str(Soup.prettify(formatter=None))
|
||||||
|
for cl in TmpContent.splitlines():
|
||||||
|
_, _, IsMetaLine = FindPreprocLine(cl, Meta, Macros)
|
||||||
|
if not IsMetaLine:
|
||||||
|
#print(cl)
|
||||||
|
Content += cl + '\n'
|
||||||
|
break
|
||||||
|
elif Path.endswith(FileExtensions['Markdown']):
|
||||||
|
lsuffix = ''
|
||||||
|
if ll.startswith(('-', '+', '*')):
|
||||||
|
lsuffix += ll[0]
|
||||||
|
ll = ll[1:].lstrip()
|
||||||
|
if ll.startswith('#') or (ll.startswith('<') and ll[1:].startswith(Headings)):
|
||||||
|
if ll.startswith('#'):
|
||||||
|
Title = ll
|
||||||
|
elif ll.startswith('<'):
|
||||||
|
if ll[3:].startswith((" class='NoTitle", ' class="NoTitle')):
|
||||||
|
Content += l + '\n'
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
Title = '#'*int(ll[2]) + ' ' + ll[4:]
|
||||||
|
DashTitle = DashifyTitle(MkSoup(Title.lstrip('#')).get_text(), DashyTitles)
|
||||||
|
DashyTitles += [DashTitle]
|
||||||
|
Titles += [Title]
|
||||||
|
Title = MakeLinkableTitle(None, Title, DashTitle, 'md')
|
||||||
|
# I can't remember why I put this but it was needed
|
||||||
|
Title = Title.replace('> </', '> </').replace(' </', '</')
|
||||||
|
Content += lsuffix + Title + '\n'
|
||||||
|
else:
|
||||||
|
Content += l + '\n'
|
||||||
|
elif Path.endswith('.pug'):
|
||||||
|
if ll.startswith(Headings):
|
||||||
|
if ll[2:].startswith(("(class='NoTitle", '(class="NoTitle')):
|
||||||
|
Content += l + '\n'
|
||||||
|
else:
|
||||||
|
Title = '#'*int(ll[1]) + ll[3:]
|
||||||
|
DashTitle = DashifyTitle(Title.lstrip('#'), DashyTitles)
|
||||||
|
DashyTitles += [DashTitle]
|
||||||
|
Titles += [Title]
|
||||||
|
# TODO: We should handle headers that for any reason already have parenthesis
|
||||||
|
if ll[2:] == '(':
|
||||||
|
Content += l + '\n'
|
||||||
|
else:
|
||||||
|
Content += MakeLinkableTitle(l, Title, DashTitle, 'pug') + '\n'
|
||||||
|
else:
|
||||||
|
Content += l + '\n'
|
||||||
|
elif Path.endswith('.txt'):
|
||||||
|
Content += l + '\n'
|
||||||
|
Meta = dict(ReadConf(LoadConfStr('[Meta]\n' + Meta), 'Meta'))
|
||||||
|
for i in MetaDefault:
|
||||||
|
if i in Meta:
|
||||||
|
# TODO: Handle strings with spaces but wrapped in quotes
|
||||||
|
if i == 'Categories':
|
||||||
|
Categories = Meta['Categories'].split(' ')
|
||||||
|
Meta['Categories'] = []
|
||||||
|
for j in Categories:
|
||||||
|
Meta['Categories'] += [j]
|
||||||
|
elif i == 'URLs':
|
||||||
|
URLs = Meta['URLs'].split(' ')
|
||||||
|
Meta['URLs'] = []
|
||||||
|
for j in URLs:
|
||||||
|
Meta['URLs'] += [j]
|
||||||
|
else:
|
||||||
|
Meta.update({i:MetaDefault[i]})
|
||||||
|
if Meta['UpdatedOn']:
|
||||||
|
Meta['EditedOn'] = Meta['UpdatedOn']
|
||||||
|
if Meta['Index'].lower() in ('default', 'unspecified', 'categories'):
|
||||||
|
if not Meta['Categories']:
|
||||||
|
Meta['Categories'] = [CategoryUncategorized]
|
||||||
|
if Meta['Type'].lower() == 'page':
|
||||||
|
Meta['Index'] = 'Categories'
|
||||||
|
elif Meta['Type'].lower() == 'post':
|
||||||
|
Meta['Index'] = 'True'
|
||||||
|
if GlobalMacros:
|
||||||
|
Meta['Macros'].update(GlobalMacros)
|
||||||
|
Meta['Macros'].update(ReadConf(LoadConfStr('[Macros]\n' + Macros), 'Macros'))
|
||||||
|
return [TempPath, Content, Titles, Meta]
|
||||||
|
|
||||||
|
def PagePostprocessor(FileType, Text:str, Meta:dict):
|
||||||
|
for e in Meta['Macros']:
|
||||||
|
Text = ReplWithEsc(Text, f"[: {e} :]", f"[:{e}:]")
|
||||||
|
return Text
|
||||||
|
|
||||||
|
def OrderPages(Old:list):
|
||||||
|
New, NoOrder, Max = [], [], 0
|
||||||
|
for i,e in enumerate(Old):
|
||||||
|
Curr = e[3]['Order']
|
||||||
|
if Curr:
|
||||||
|
if int(Curr) > Max:
|
||||||
|
Max = int(Curr)
|
||||||
|
else:
|
||||||
|
NoOrder += [e]
|
||||||
|
New = [None] * (Max+1)
|
||||||
|
for i,e in enumerate(Old):
|
||||||
|
Curr = e[3]['Order']
|
||||||
|
if Curr:
|
||||||
|
New[int(Curr)] = e
|
||||||
|
while None in New:
|
||||||
|
New.remove(None)
|
||||||
|
return New + NoOrder
|
||||||
|
|
||||||
|
def CanIndex(Index:str, For:str):
|
||||||
|
if Index.lower() in PageIndexStrNeg:
|
||||||
|
return False
|
||||||
|
elif Index.lower() in PageIndexStrPos:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return True if Index == For else False
|
|
@ -1,11 +1,11 @@
|
||||||
""" ================================= |
|
""" ================================== |
|
||||||
| This file is part of |
|
| This file is part of |
|
||||||
| staticoso |
|
| staticoso |
|
||||||
| Just a simple Static Site Generator |
|
| Just a simple Static Site Generator |
|
||||||
| |
|
| |
|
||||||
| Licensed under the AGPLv3 license |
|
| Licensed under the AGPLv3 license |
|
||||||
| Copyright (C) 2022, OctoSpacc |
|
| Copyright (C) 2022-2023, OctoSpacc |
|
||||||
| ================================= """
|
| ================================== """
|
||||||
|
|
||||||
# TODO: Write a native Pug parser; There is one already available for Python but seems broken / out-of-date
|
# TODO: Write a native Pug parser; There is one already available for Python but seems broken / out-of-date
|
||||||
|
|
||||||
|
|
|
@ -1,300 +1,25 @@
|
||||||
""" ================================= |
|
""" ================================== |
|
||||||
| This file is part of |
|
| This file is part of |
|
||||||
| staticoso |
|
| staticoso |
|
||||||
| Just a simple Static Site Generator |
|
| Just a simple Static Site Generator |
|
||||||
| |
|
| |
|
||||||
| Licensed under the AGPLv3 license |
|
| Licensed under the AGPLv3 license |
|
||||||
| Copyright (C) 2022, OctoSpacc |
|
| Copyright (C) 2022-2023, OctoSpacc |
|
||||||
| ================================= """
|
| ================================== """
|
||||||
|
|
||||||
import shutil
|
import shutil
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from multiprocessing import Pool, cpu_count
|
from multiprocessing import Pool, cpu_count
|
||||||
from Libs.bs4 import BeautifulSoup
|
|
||||||
from Modules.Config import *
|
from Modules.Config import *
|
||||||
from Modules.Elements import *
|
from Modules.Elements import *
|
||||||
from Modules.Globals import *
|
from Modules.Globals import *
|
||||||
from Modules.HTML import *
|
from Modules.HTML import *
|
||||||
from Modules.Logging import *
|
from Modules.Logging import *
|
||||||
from Modules.Markdown import *
|
from Modules.Markdown import *
|
||||||
|
from Modules.Meta import *
|
||||||
from Modules.Pug import *
|
from Modules.Pug import *
|
||||||
from Modules.Utils import *
|
from Modules.Utils import *
|
||||||
|
|
||||||
# Menu styles:
|
|
||||||
# - Simple: Default, Flat, Line
|
|
||||||
# - Others: Excerpt, Image, Preview (Excerpt + Image), Full
|
|
||||||
def GetHTMLPagesList(Pages, BlogName, SiteRoot, PathPrefix, CallbackFile=None, Unite=[], Type=None, Limit=None, PathFilter='', Category=None, For='Menu', MarkdownExts=(), MenuStyle='Default', ShowPaths=True):
|
|
||||||
Flatten, SingleLine, DoneCount, PrevDepth = False, False, 0, 0
|
|
||||||
if MenuStyle == 'Flat':
|
|
||||||
Flatten = True
|
|
||||||
elif MenuStyle == 'Line':
|
|
||||||
ShowPaths, SingleLine = False, True
|
|
||||||
List, ToPop, LastParent = '', [], []
|
|
||||||
IndexPages = Pages.copy()
|
|
||||||
for e in IndexPages:
|
|
||||||
if e[3]['Index'].lower() in PageIndexStrNeg:
|
|
||||||
IndexPages.remove(e)
|
|
||||||
for i,e in enumerate(IndexPages):
|
|
||||||
if Type and e[3]['Type'] != Type:
|
|
||||||
ToPop += [i]
|
|
||||||
ToPop = RevSort(ToPop)
|
|
||||||
for i in ToPop:
|
|
||||||
IndexPages.pop(i)
|
|
||||||
if Type == 'Page':
|
|
||||||
IndexPages = OrderPages(IndexPages)
|
|
||||||
for i,e in enumerate(Unite):
|
|
||||||
if e:
|
|
||||||
IndexPages.insert(i, [e, None, None, {'Type':Type, 'Index':'True', 'Order':'Unite'}])
|
|
||||||
for File, Content, Titles, Meta in IndexPages:
|
|
||||||
# Allow for the virtual "Pages/" prefix to be used in path filtering
|
|
||||||
TmpPathFilter = PathFilter
|
|
||||||
if TmpPathFilter.startswith('Pages/'):
|
|
||||||
TmpPathFilter = TmpPathFilter[len('Pages/'):]
|
|
||||||
if File.startswith('Posts/'):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if (not Type or (Meta['Type'] == Type and CanIndex(Meta['Index'], For))) and (not Category or Category in Meta['Categories']) and File.startswith(TmpPathFilter) and File != CallbackFile and (not Limit or Limit > DoneCount):
|
|
||||||
Depth = (File.count('/') + 1) if Meta['Order'] != 'Unite' else 1
|
|
||||||
# Folder names are handled here
|
|
||||||
if Depth > 1 and Meta['Order'] != 'Unite':
|
|
||||||
CurParent = File.split('/')[:-1]
|
|
||||||
for i,s in enumerate(CurParent):
|
|
||||||
if LastParent != CurParent and ShowPaths:
|
|
||||||
LastParent = CurParent
|
|
||||||
Levels = '.' * ((Depth-2+i) if not Flatten else 0) + ':'
|
|
||||||
# If search node endswith index, it's a page; else, it's a folder
|
|
||||||
if StripExt(File).endswith('index'):
|
|
||||||
Title = MakeListTitle(File, Meta, Titles, 'HTMLTitle', BlogName, PathPrefix)
|
|
||||||
DoneCount += 1
|
|
||||||
else:
|
|
||||||
Title = CurParent[Depth-2+i]
|
|
||||||
if SingleLine:
|
|
||||||
List += f' <span>{Title}</span> '
|
|
||||||
else:
|
|
||||||
List += f'{Levels}<span>{Title}</span>\n'
|
|
||||||
|
|
||||||
# Pages with any other path
|
|
||||||
if not (Depth > 1 and StripExt(File).split('/')[-1] == 'index'):
|
|
||||||
Levels = '.' * ((Depth-1) if not Flatten else 0) + ':'
|
|
||||||
DoneCount += 1
|
|
||||||
if Meta['Order'] == 'Unite':
|
|
||||||
Title = markdown(MarkdownHTMLEscape(File, MarkdownExts), extensions=MarkdownExts).removeprefix('<p>').removesuffix('<p>')
|
|
||||||
else:
|
|
||||||
Title = MakeListTitle(File, Meta, Titles, 'HTMLTitle', BlogName, PathPrefix)
|
|
||||||
if SingleLine:
|
|
||||||
List += ' <span>' + Title + '</span> '
|
|
||||||
else:
|
|
||||||
List += Levels + Title + '\n'
|
|
||||||
|
|
||||||
if MenuStyle in ('Default', 'Flat'):
|
|
||||||
return GenHTMLTreeList(List, Class="staticoso-PagesList")
|
|
||||||
elif MenuStyle in ('Line', 'Excerpt', 'Image', 'Preview', 'Full'):
|
|
||||||
return List
|
|
||||||
|
|
||||||
def CheckHTMLCommentLine(Line):
|
|
||||||
if Line.startswith('<!--'):
|
|
||||||
Line = Line[4:].lstrip()
|
|
||||||
if Line.endswith('-->'):
|
|
||||||
return Line
|
|
||||||
return None
|
|
||||||
|
|
||||||
def TemplatePreprocessor(Text):
|
|
||||||
Meta, MetaDefault = '', {
|
|
||||||
'MenuStyle': 'Default'}
|
|
||||||
for l in Text.splitlines():
|
|
||||||
ll = l.lstrip().rstrip()
|
|
||||||
lll = CheckHTMLCommentLine(ll)
|
|
||||||
if lll:
|
|
||||||
if lll.startswith('%'):
|
|
||||||
Meta += lll[1:-3].lstrip().rstrip() + '\n'
|
|
||||||
Meta = dict(ReadConf(LoadConfStr('[Meta]\n' + Meta), 'Meta'))
|
|
||||||
for i in MetaDefault:
|
|
||||||
if not i in Meta:
|
|
||||||
Meta.update({i:MetaDefault[i]})
|
|
||||||
return Meta
|
|
||||||
|
|
||||||
def FindPreprocLine(Line, Meta, Macros):
|
|
||||||
Changed = False
|
|
||||||
Line = Line.lstrip().rstrip()
|
|
||||||
lll = CheckHTMLCommentLine(Line)
|
|
||||||
if Line.startswith('//') or lll: # Find preprocessor lines
|
|
||||||
lll = Line[2:].lstrip()
|
|
||||||
if lll.startswith('%'):
|
|
||||||
Meta += lll[1:].lstrip() + '\n'
|
|
||||||
Changed = True
|
|
||||||
elif lll.startswith('$'):
|
|
||||||
Macros += lll[1:].lstrip() + '\n'
|
|
||||||
Changed = True
|
|
||||||
#if ll.startswith('<!--') and not ll.endswith('-->'): # Find comment and code blocks
|
|
||||||
# IgnoreBlocksStart += [l]
|
|
||||||
return (Meta, Macros, Changed)
|
|
||||||
|
|
||||||
def PagePreprocessor(Path:str, TempPath:str, Type, SiteTemplate, SiteRoot, GlobalMacros, CategoryUncategorized, LightRun=False):
|
|
||||||
File = ReadFile(Path)
|
|
||||||
Path = Path.lower()
|
|
||||||
Content, Titles, DashyTitles, HTMLTitlesFound, Macros, Meta, MetaDefault = '', [], [], False, '', '', {
|
|
||||||
'Template': SiteTemplate,
|
|
||||||
'Head': '',
|
|
||||||
'Style': '',
|
|
||||||
'Type': Type,
|
|
||||||
'Index': 'Unspecified',
|
|
||||||
'Feed': 'True',
|
|
||||||
'Title': '',
|
|
||||||
'HTMLTitle': '',
|
|
||||||
'Description': '',
|
|
||||||
'Image': '',
|
|
||||||
'Macros': {},
|
|
||||||
'Categories': [],
|
|
||||||
'URLs': [],
|
|
||||||
'CreatedOn': '',
|
|
||||||
'UpdatedOn': '',
|
|
||||||
'EditedOn': '',
|
|
||||||
'Order': None,
|
|
||||||
'Language': None,
|
|
||||||
'Downsync': None}
|
|
||||||
# Find all positions of '<!--', '-->', add them in a list=[[pos0,pos1,line0,line1],...]
|
|
||||||
for l in File.splitlines():
|
|
||||||
ll = l.lstrip().rstrip()
|
|
||||||
Meta, Macros, Changed = FindPreprocLine(ll, Meta, Macros)
|
|
||||||
if not Changed: # Find headings
|
|
||||||
#if line in ignore block:
|
|
||||||
# continue
|
|
||||||
Headings = ('h1', 'h2', 'h3', 'h4', 'h5', 'h6')
|
|
||||||
#if Path.endswith(FileExtensions['HTML']):
|
|
||||||
# if ll[1:].startswith(Headings):
|
|
||||||
# if ll[3:].startswith((" class='NoTitle", ' class="NoTitle')):
|
|
||||||
# Content += l + '\n'
|
|
||||||
# elif ll.replace(' ', ' ').startswith('// %'):
|
|
||||||
# pass
|
|
||||||
# else:
|
|
||||||
# Title = '#'*int(ll[2]) + ' ' + ll[4:]
|
|
||||||
# DashTitle = DashifyTitle(Title.lstrip('#'), DashyTitles)
|
|
||||||
# DashyTitles += [DashTitle]
|
|
||||||
# Titles += [Title]
|
|
||||||
# Content += MakeLinkableTitle(l, Title, DashTitle, 'pug') + '\n'
|
|
||||||
# else:
|
|
||||||
# Content += l + '\n'
|
|
||||||
if Path.endswith(FileExtensions['HTML']) and not HTMLTitlesFound:
|
|
||||||
Soup = BeautifulSoup(File, 'html.parser')
|
|
||||||
Tags = Soup.find_all()
|
|
||||||
for t in Tags:
|
|
||||||
if t.name in Headings:
|
|
||||||
Title = '#'*int(t.name[1]) + ' ' + str(t.text)
|
|
||||||
DashTitle = DashifyTitle(Title.lstrip('#'), DashyTitles)
|
|
||||||
DashyTitles += [DashTitle]
|
|
||||||
Titles += [Title]
|
|
||||||
t.replace_with(MakeLinkableTitle(None, Title, DashTitle, 'md'))
|
|
||||||
HTMLTitlesFound = True
|
|
||||||
Content = ''
|
|
||||||
TmpContent = str(Soup.prettify(formatter=None))
|
|
||||||
for cl in TmpContent.splitlines():
|
|
||||||
_, _, IsMetaLine = FindPreprocLine(cl, Meta, Macros)
|
|
||||||
if not IsMetaLine:
|
|
||||||
#print(cl)
|
|
||||||
Content += cl + '\n'
|
|
||||||
break
|
|
||||||
elif Path.endswith(FileExtensions['Markdown']):
|
|
||||||
lsuffix = ''
|
|
||||||
if ll.startswith(('-', '+', '*')):
|
|
||||||
lsuffix += ll[0]
|
|
||||||
ll = ll[1:].lstrip()
|
|
||||||
if ll.startswith('#') or (ll.startswith('<') and ll[1:].startswith(Headings)):
|
|
||||||
if ll.startswith('#'):
|
|
||||||
Title = ll
|
|
||||||
elif ll.startswith('<'):
|
|
||||||
if ll[3:].startswith((" class='NoTitle", ' class="NoTitle')):
|
|
||||||
Content += l + '\n'
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
Title = '#'*int(ll[2]) + ' ' + ll[4:]
|
|
||||||
DashTitle = DashifyTitle(MkSoup(Title.lstrip('#')).get_text(), DashyTitles)
|
|
||||||
DashyTitles += [DashTitle]
|
|
||||||
Titles += [Title]
|
|
||||||
Title = MakeLinkableTitle(None, Title, DashTitle, 'md')
|
|
||||||
# I can't remember why I put this but it was needed
|
|
||||||
Title = Title.replace('> </', '> </').replace(' </', '</')
|
|
||||||
Content += lsuffix + Title + '\n'
|
|
||||||
else:
|
|
||||||
Content += l + '\n'
|
|
||||||
elif Path.endswith('.pug'):
|
|
||||||
if ll.startswith(Headings):
|
|
||||||
if ll[2:].startswith(("(class='NoTitle", '(class="NoTitle')):
|
|
||||||
Content += l + '\n'
|
|
||||||
else:
|
|
||||||
Title = '#'*int(ll[1]) + ll[3:]
|
|
||||||
DashTitle = DashifyTitle(Title.lstrip('#'), DashyTitles)
|
|
||||||
DashyTitles += [DashTitle]
|
|
||||||
Titles += [Title]
|
|
||||||
# TODO: We should handle headers that for any reason already have parenthesis
|
|
||||||
if ll[2:] == '(':
|
|
||||||
Content += l + '\n'
|
|
||||||
else:
|
|
||||||
Content += MakeLinkableTitle(l, Title, DashTitle, 'pug') + '\n'
|
|
||||||
else:
|
|
||||||
Content += l + '\n'
|
|
||||||
elif Path.endswith('.txt'):
|
|
||||||
Content += l + '\n'
|
|
||||||
Meta = dict(ReadConf(LoadConfStr('[Meta]\n' + Meta), 'Meta'))
|
|
||||||
for i in MetaDefault:
|
|
||||||
if i in Meta:
|
|
||||||
# TODO: Handle strings with spaces but wrapped in quotes
|
|
||||||
if i == 'Categories':
|
|
||||||
Categories = Meta['Categories'].split(' ')
|
|
||||||
Meta['Categories'] = []
|
|
||||||
for j in Categories:
|
|
||||||
Meta['Categories'] += [j]
|
|
||||||
elif i == 'URLs':
|
|
||||||
URLs = Meta['URLs'].split(' ')
|
|
||||||
Meta['URLs'] = []
|
|
||||||
for j in URLs:
|
|
||||||
Meta['URLs'] += [j]
|
|
||||||
else:
|
|
||||||
Meta.update({i:MetaDefault[i]})
|
|
||||||
if Meta['UpdatedOn']:
|
|
||||||
Meta['EditedOn'] = Meta['UpdatedOn']
|
|
||||||
if Meta['Index'].lower() in ('default', 'unspecified', 'categories'):
|
|
||||||
if not Meta['Categories']:
|
|
||||||
Meta['Categories'] = [CategoryUncategorized]
|
|
||||||
if Meta['Type'].lower() == 'page':
|
|
||||||
Meta['Index'] = 'Categories'
|
|
||||||
elif Meta['Type'].lower() == 'post':
|
|
||||||
Meta['Index'] = 'True'
|
|
||||||
if GlobalMacros:
|
|
||||||
Meta['Macros'].update(GlobalMacros)
|
|
||||||
Meta['Macros'].update(ReadConf(LoadConfStr('[Macros]\n' + Macros), 'Macros'))
|
|
||||||
return [TempPath, Content, Titles, Meta]
|
|
||||||
|
|
||||||
def PagePostprocessor(FileType, Text:str, Meta:dict):
|
|
||||||
for e in Meta['Macros']:
|
|
||||||
Text = ReplWithEsc(Text, f"[: {e} :]", f"[:{e}:]")
|
|
||||||
return Text
|
|
||||||
|
|
||||||
def OrderPages(Old:list):
|
|
||||||
New, NoOrder, Max = [], [], 0
|
|
||||||
for i,e in enumerate(Old):
|
|
||||||
Curr = e[3]['Order']
|
|
||||||
if Curr:
|
|
||||||
if int(Curr) > Max:
|
|
||||||
Max = int(Curr)
|
|
||||||
else:
|
|
||||||
NoOrder += [e]
|
|
||||||
New = [None] * (Max+1)
|
|
||||||
for i,e in enumerate(Old):
|
|
||||||
Curr = e[3]['Order']
|
|
||||||
if Curr:
|
|
||||||
New[int(Curr)] = e
|
|
||||||
while None in New:
|
|
||||||
New.remove(None)
|
|
||||||
return New + NoOrder
|
|
||||||
|
|
||||||
def CanIndex(Index:str, For:str):
|
|
||||||
if Index.lower() in PageIndexStrNeg:
|
|
||||||
return False
|
|
||||||
elif Index.lower() in PageIndexStrPos:
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return True if Index == For else False
|
|
||||||
|
|
||||||
def PatchHTML(File, HTML, StaticPartsText, DynamicParts, DynamicPartsText, HTMLPagesList, PagePath, Content, Titles, Meta, SiteDomain, SiteRoot, SiteName, BlogName, FolderRoots, Categories, SiteLang, Locale, LightRun):
|
def PatchHTML(File, HTML, StaticPartsText, DynamicParts, DynamicPartsText, HTMLPagesList, PagePath, Content, Titles, Meta, SiteDomain, SiteRoot, SiteName, BlogName, FolderRoots, Categories, SiteLang, Locale, LightRun):
|
||||||
HTMLTitles = FormatTitles(Titles)
|
HTMLTitles = FormatTitles(Titles)
|
||||||
BodyDescription, BodyImage = '', ''
|
BodyDescription, BodyImage = '', ''
|
||||||
|
@ -457,7 +182,8 @@ def HandlePage(Flags, Page, Pages, Categories, LimitFiles, Snippets, ConfMenu, L
|
||||||
DynamicParts, DynamicPartsText, StaticPartsText, TemplatesText = Flags['DynamicParts'], Snippets['DynamicParts'], Snippets['StaticParts'], Snippets['Templates']
|
DynamicParts, DynamicPartsText, StaticPartsText, TemplatesText = Flags['DynamicParts'], Snippets['DynamicParts'], Snippets['StaticParts'], Snippets['Templates']
|
||||||
|
|
||||||
FileLower = File.lower()
|
FileLower = File.lower()
|
||||||
PagePath = f"{OutDir}/{StripExt(File)}.html"
|
PagePath = f'{OutDir}/{StripExt(File)}.html'
|
||||||
|
ContentPagePath = f'{OutDir}.Content/{StripExt(File)}.html'
|
||||||
LightRun = False if LimitFiles == False or File in LimitFiles else True
|
LightRun = False if LimitFiles == False or File in LimitFiles else True
|
||||||
|
|
||||||
if FileLower.endswith(FileExtensions['Markdown']):
|
if FileLower.endswith(FileExtensions['Markdown']):
|
||||||
|
@ -466,8 +192,8 @@ def HandlePage(Flags, Page, Pages, Categories, LimitFiles, Snippets, ConfMenu, L
|
||||||
Content = PagePostprocessor('pug', ReadFile(PagePath), Meta)
|
Content = PagePostprocessor('pug', ReadFile(PagePath), Meta)
|
||||||
elif FileLower.endswith(('.txt')):
|
elif FileLower.endswith(('.txt')):
|
||||||
Content = '<pre>' + html.escape(Content) + '</pre>'
|
Content = '<pre>' + html.escape(Content) + '</pre>'
|
||||||
elif FileLower.endswith(FileExtensions['HTML']):
|
#elif FileLower.endswith(FileExtensions['HTML']):
|
||||||
Content = ReadFile(PagePath)
|
# Content = ReadFile(PagePath)
|
||||||
|
|
||||||
if LightRun:
|
if LightRun:
|
||||||
HTMLPagesList = None
|
HTMLPagesList = None
|
||||||
|
@ -557,6 +283,7 @@ def HandlePage(Flags, Page, Pages, Categories, LimitFiles, Snippets, ConfMenu, L
|
||||||
SlimHTML = HTMLPagesList + ContentHTML
|
SlimHTML = HTMLPagesList + ContentHTML
|
||||||
if not LightRun:
|
if not LightRun:
|
||||||
WriteFile(PagePath, HTML)
|
WriteFile(PagePath, HTML)
|
||||||
|
WriteFile(ContentPagePath, ContentHTML)
|
||||||
|
|
||||||
if not LightRun and 'htmljournal' in ContentHTML.lower(): # Avoid extra cycles
|
if not LightRun and 'htmljournal' in ContentHTML.lower(): # Avoid extra cycles
|
||||||
HTML, _, _, _ = PatchHTML(
|
HTML, _, _, _ = PatchHTML(
|
||||||
|
@ -667,9 +394,12 @@ def MakeSite(Flags, LimitFiles, Snippets, ConfMenu, GlobalMacros, Locale, Thread
|
||||||
File = f"Categories/{Cat}.md"
|
File = f"Categories/{Cat}.md"
|
||||||
FilePath = f"{OutDir}/{File}"
|
FilePath = f"{OutDir}/{File}"
|
||||||
WriteFile(FilePath, CategoryPageTemplate.format(Name=Cat))
|
WriteFile(FilePath, CategoryPageTemplate.format(Name=Cat))
|
||||||
_, Content, Titles, Meta = PagePreprocessor(FilePath, FilePath, Type, SiteTemplate, SiteRoot, GlobalMacros, CategoryUncategorized, LightRun=LightRun)
|
_, Content, Titles, Meta = PagePreprocessor(FilePath, FilePath, Type, SiteTemplate, SiteRoot, GlobalMacros, CategoryUncategorized, LightRun=LightRun)
|
||||||
Pages += [[File, Content, Titles, Meta]]
|
Pages += [[File, Content, Titles, Meta]]
|
||||||
|
|
||||||
|
#logging.info("Building the HTML Search Page")
|
||||||
|
#Pages += [PagePreprocessor(Path='Search.html', TempPath='Search.html', Type='Page', SiteTemplate=SiteTemplate, SiteRoot=SiteRoot, GlobalMacros=GlobalMacros, CategoryUncategorized=CategoryUncategorized, LightRun=LightRun, Content=BuildPagesSearch(Flags, Pages))]
|
||||||
|
|
||||||
for i,e in enumerate(ConfMenu):
|
for i,e in enumerate(ConfMenu):
|
||||||
for File, Content, Titles, Meta in Pages:
|
for File, Content, Titles, Meta in Pages:
|
||||||
File = StripExt(File)+'.html'
|
File = StripExt(File)+'.html'
|
||||||
|
|
|
@ -1,19 +1,37 @@
|
||||||
""" ================================= |
|
""" ================================== |
|
||||||
| This file is part of |
|
| This file is part of |
|
||||||
| staticoso |
|
| staticoso |
|
||||||
| Just a simple Static Site Generator |
|
| Just a simple Static Site Generator |
|
||||||
| |
|
| |
|
||||||
| Licensed under the AGPLv3 license |
|
| Licensed under the AGPLv3 license |
|
||||||
| Copyright (C) 2022, OctoSpacc |
|
| Copyright (C) 2022-2023, OctoSpacc |
|
||||||
| ================================= """
|
| ================================== """
|
||||||
|
|
||||||
from urllib.parse import quote as URLEncode
|
from urllib.parse import quote as URLEncode
|
||||||
|
from Modules.HTML import *
|
||||||
from Modules.Utils import *
|
from Modules.Utils import *
|
||||||
|
|
||||||
def MakeSitemap(Flags, Pages):
|
def MakeSitemap(Flags, Pages):
|
||||||
Map = ''
|
Map = ''
|
||||||
Domain = Flags['SiteDomain'] + '/' if Flags['SiteDomain'] else ''
|
Domain = Flags['SiteDomain'] + '/' if Flags['SiteDomain'] else ''
|
||||||
for File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image in Pages:
|
for File, Content, Titles, Meta, ContentHtml, SlimHtml, Description, Image in Pages:
|
||||||
File = f"{StripExt(File)}.html"
|
File = f"{StripExt(File)}.html"
|
||||||
Map += Domain + URLEncode(File) + '\n'
|
Map += Domain + URLEncode(File) + '\n'
|
||||||
WriteFile(f"{Flags['OutDir']}/sitemap.txt", Map)
|
WriteFile(f"{Flags['OutDir']}/sitemap.txt", Map)
|
||||||
|
|
||||||
|
def BuildPagesSearch(Flags:dict, Pages:list):
|
||||||
|
SearchContent = ''
|
||||||
|
with open(f'{staticosoBaseDir()}Assets/PagesSearch.html', 'r') as File:
|
||||||
|
Base = File.read().split('{{PagesInject}}')
|
||||||
|
for File, Content, Titles, Meta, ContentHtml, SlimHtml, Description, Image in Pages:
|
||||||
|
#for File, Content, Titles, Meta in Pages:
|
||||||
|
SearchContent += f'''
|
||||||
|
<div
|
||||||
|
class="staticoso-HtmlSearch-Page"
|
||||||
|
data-staticoso-htmlsearch-name="{html.escape(html.unescape(Titles[0]), quote=True)}"
|
||||||
|
data-staticoso-htmlsearch-href="{StripExt(File)}.html"
|
||||||
|
>
|
||||||
|
{ContentHtml}
|
||||||
|
</div>
|
||||||
|
'''
|
||||||
|
return Base[0] + SearchContent + Base[1]
|
||||||
|
|
|
@ -1,12 +1,11 @@
|
||||||
|
""" ================================== |
|
||||||
""" ================================= |
|
| This file is part of |
|
||||||
| This file is part of |
|
| staticoso |
|
||||||
| staticoso |
|
| Just a simple Static Site Generator |
|
||||||
| Just a simple Static Site Generator |
|
| |
|
||||||
| |
|
| Licensed under the AGPLv3 license |
|
||||||
| Licensed under the AGPLv3 license |
|
| Copyright (C) 2022-2023, OctoSpacc |
|
||||||
| Copyright (C) 2022, OctoSpacc |
|
| ================================== """
|
||||||
| ================================= """
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
|
|
@ -105,3 +105,4 @@ All of this is because some crucial things might be changed from one commit to a
|
||||||
- Bad HTML included in Markdown files can cause a build to fail entirely.
|
- Bad HTML included in Markdown files can cause a build to fail entirely.
|
||||||
- Despite differential building and multithreading, the program still needs some more optimizations.
|
- Despite differential building and multithreading, the program still needs some more optimizations.
|
||||||
- Ordering pages in the global menu with external configuration flags (outside the pages' source) yields broken and unpredictable results.
|
- Ordering pages in the global menu with external configuration flags (outside the pages' source) yields broken and unpredictable results.
|
||||||
|
- If site is small, the graphic of percentage completion is bugged (appears shorter).
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
all: BuildThemes BuildDemos
|
all: All
|
||||||
|
All: BuildThemes BuildDemos
|
||||||
|
|
||||||
BuildThemes:
|
BuildThemes:
|
||||||
./Scripts/BuildThemes.py
|
python3 ./Scripts/BuildThemes.py
|
||||||
|
|
||||||
BuildDemos:
|
BuildDemos:
|
||||||
./Scripts/BuildDemos.sh
|
sh ./Scripts/BuildDemos.sh
|
||||||
|
|
||||||
clean: Clean
|
clean: Clean
|
||||||
Clean:
|
Clean:
|
||||||
|
|
|
@ -1 +1,19 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
cd "$( dirname "$( realpath "$0" )" )"
|
||||||
|
|
||||||
|
cd ../Sources/Themes
|
||||||
|
|
||||||
|
for Theme in *
|
||||||
|
do
|
||||||
|
cd ../../
|
||||||
|
mkdir -p ./Build/Demos/$Theme
|
||||||
|
cd ./Build/Demos/$Theme
|
||||||
|
mkdir -p ./Assets ./Templates ./Posts
|
||||||
|
cp -r ../../Themes/$Theme ./.Source
|
||||||
|
cp ./.Source/$Theme.html ./Templates/Default.html
|
||||||
|
cp ./.Source/*.css ./Assets/
|
||||||
|
cp ../../../Sources/Snippets/*.md ./Posts/
|
||||||
|
python3 ../../../../App/Source/Build.py \
|
||||||
|
--SiteName="$Theme"
|
||||||
|
cd ../../../Sources/Themes
|
||||||
|
done
|
||||||
|
|
|
@ -4,18 +4,18 @@ import shutil
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
def Main():
|
def Main():
|
||||||
for Theme in os.listdir("Sources/Themes"):
|
for Theme in os.listdir('./Sources/Themes'):
|
||||||
Path(f"Build/Themes/{Theme}").mkdir(parents=True, exist_ok=True)
|
Path(f'./Build/Themes/{Theme}').mkdir(parents=True, exist_ok=True)
|
||||||
try:
|
try:
|
||||||
shutil.copyfile(f"Sources/Themes/{Theme}/Style.css", f"Build/Themes/{Theme}/Style.css")
|
shutil.copyfile(f'./Sources/Themes/{Theme}/{Theme}.css', f'./Build/Themes/{Theme}/{Theme}.css')
|
||||||
except FileExistsError:
|
except FileExistsError:
|
||||||
pass
|
pass
|
||||||
with open(f"Sources/Snippets/Base.html", "r") as f:
|
with open(f'./Sources/Snippets/Base.html', 'r') as f:
|
||||||
Base = f.read()
|
Base = f.read()
|
||||||
with open(f"Sources/Themes/{Theme}/Body.html", "r") as f:
|
with open(f'./Sources/Themes/{Theme}/Body.html', 'r') as f:
|
||||||
Body = f.read()
|
Body = f.read()
|
||||||
with open(f"Build/Themes/{Theme}/{Theme}.html", "w+") as f:
|
with open(f'./Build/Themes/{Theme}/{Theme}.html', 'w+') as f:
|
||||||
f.write(Base.replace("{{Body}}", Body))
|
f.write(Base.replace('{{Theme}}', Theme).replace('{{Body}}', Body))
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == '__main__':
|
||||||
Main()
|
Main()
|
||||||
|
|
|
@ -1,25 +1,25 @@
|
||||||
<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
<html lang="<staticoso:PageLang>">
|
<html lang="[staticoso:PageLang]">
|
||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8">
|
<meta charset="UTF-8"/>
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
|
||||||
<meta name="generator" content="staticoso">
|
<meta name="generator" content="staticoso"/>
|
||||||
<link rel="stylesheet" href="Style.css">
|
<link rel="stylesheet" href="[staticoso:SiteRelativeRoot]{{Theme}}.css"/>
|
||||||
<link rel="alternate" type="application/atom+xml" title="Blog Atom Feed" href="[staticoso:Site:RelativeRoot]feed/atom.xml">
|
<link rel="alternate" type="application/atom+xml" title="Blog Atom Feed" href="[staticoso:SiteRelativeRoot]feed/atom.xml"/>
|
||||||
<link rel="alternate" type="application/rss+xml" title="Blog RSS Feed" href="[staticoso:Site:RelativeRoot]feed/rss.xml">
|
<link rel="alternate" type="application/rss+xml" title="Blog RSS Feed" href="[staticoso:SiteRelativeRoot]feed/rss.xml"/>
|
||||||
<title><staticoso:PageTitle> - <staticoso:SiteName></title>
|
<title><staticoso:PageTitle> - <staticoso:SiteName></title>
|
||||||
<meta name="description" content="<staticoso:PageDescription>">
|
<meta name="description" content="[staticoso:PageDescription]"/>
|
||||||
<link href="[staticoso:Site:RelativeRoot]favicon.png" rel="icon" type="image/png">
|
<link href="[staticoso:SiteRelativeRoot]favicon.png" rel="icon" type="image/png"/>
|
||||||
<meta property="og:type" content="website">
|
<meta property="og:type" content="website"/>
|
||||||
<meta property="og:title" content="<staticoso:PageTitle> - <staticoso:SiteName>">
|
<meta property="og:title" content="[staticoso:PageTitle] - [staticoso:SiteName]"/>
|
||||||
<meta property="og:description" content="<staticoso:PageDescription>">
|
<meta property="og:description" content="[staticoso:PageDescription]"/>
|
||||||
<meta property="og:url" content="<staticoso:SiteDomain>/<staticoso:PagePath>">
|
<meta property="og:url" content="[staticoso:SiteDomain]/[staticoso:PagePath]"/>
|
||||||
<meta property="og:image" content="<staticoso:PageImage>">
|
<meta property="og:image" content="[staticoso:PageImage]"/>
|
||||||
<style>
|
<style>
|
||||||
<staticoso:PageStyle>
|
<staticoso:PageStyle>
|
||||||
</style>
|
</style>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
{{Body}}
|
{{Body}}
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
Loading…
Reference in New Issue