mirror of
https://gitlab.com/octtspacc/staticoso
synced 2025-04-18 03:47:24 +02:00
Some code restructuring, Update copyright year, Fix bug in PagesSearch.html
This commit is contained in:
parent
f433fff8a2
commit
c35aa94345
2
.gitignore
vendored
2
.gitignore
vendored
@ -3,4 +3,6 @@
|
||||
tmp.*
|
||||
public/*
|
||||
Sites/*/public/*
|
||||
Sites/*/public.Content/*
|
||||
Sites/*/public.gmi/*
|
||||
Themes/Build/*
|
||||
|
@ -90,7 +90,7 @@
|
||||
return false;
|
||||
};
|
||||
|
||||
function CreateSearchAnchors() {
|
||||
function CreateSearchAnchors(Query) {
|
||||
/*
|
||||
// Create anchors redirecting to the pages that are displayed
|
||||
document.querySelectorAll(SelectBase).forEach(function(Page) {
|
||||
@ -184,7 +184,7 @@
|
||||
${WordLooseStyle}
|
||||
${SelectBase} *[data-staticoso-htmlsearch-block*="${Query}"] { display: revert; }
|
||||
`;
|
||||
CreateSearchAnchors();
|
||||
CreateSearchAnchors(Query);
|
||||
};
|
||||
|
||||
['onchange', 'oninput', 'onpaste'].forEach(function(Ev) {
|
||||
|
@ -1,12 +1,12 @@
|
||||
#!/usr/bin/env python3
|
||||
""" ================================= |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022, OctoSpacc |
|
||||
| ================================= """
|
||||
""" ================================== |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022-2023, OctoSpacc |
|
||||
| ================================== """
|
||||
|
||||
import argparse
|
||||
import os
|
||||
|
@ -1,11 +1,11 @@
|
||||
""" ================================= |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022, OctoSpacc |
|
||||
| ================================= """
|
||||
""" ================================== |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022-2023, OctoSpacc |
|
||||
| ================================== """
|
||||
|
||||
import time
|
||||
from Libs.dateutil.parser import parse as date_parse
|
||||
|
@ -1,11 +1,11 @@
|
||||
""" ================================= |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022, OctoSpacc |
|
||||
| ================================= """
|
||||
""" ================================== |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022-2023, OctoSpacc |
|
||||
| ================================== """
|
||||
|
||||
import configparser
|
||||
from ast import literal_eval
|
||||
|
@ -1,11 +1,11 @@
|
||||
""" ================================= |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022, OctoSpacc |
|
||||
| ================================= """
|
||||
""" ================================== |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022-2023, OctoSpacc |
|
||||
| ================================== """
|
||||
|
||||
from base64 import b64encode
|
||||
from Modules.Globals import *
|
||||
|
@ -1,11 +1,11 @@
|
||||
""" ================================= |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022, OctoSpacc |
|
||||
| ================================= """
|
||||
""" ================================== |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022-2023, OctoSpacc |
|
||||
| ================================== """
|
||||
|
||||
# TODO: Either switch feed generation lib, or rewrite the 'lxml' module, so that no modules have to be compiled and the program is 100% portable
|
||||
|
||||
|
@ -1,11 +1,11 @@
|
||||
""" ================================= |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022, OctoSpacc |
|
||||
| ================================= """
|
||||
""" ================================== |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022-2023, OctoSpacc |
|
||||
| ================================== """
|
||||
|
||||
# TODO: Write the Python HTML2Gemtext converter
|
||||
|
||||
|
@ -1,11 +1,11 @@
|
||||
""" ================================= |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022, OctoSpacc |
|
||||
| ================================= """
|
||||
""" ================================== |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022-2023, OctoSpacc |
|
||||
| ================================== """
|
||||
|
||||
ReservedPaths = ('Site.ini', 'Assets', 'Pages', 'Posts', 'Templates', 'StaticParts', 'DynamicParts')
|
||||
FileExtensions = {
|
||||
|
@ -1,11 +1,11 @@
|
||||
""" ================================= |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022, OctoSpacc |
|
||||
| ================================= """
|
||||
""" ================================== |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022-2023, OctoSpacc |
|
||||
| ================================== """
|
||||
|
||||
import html
|
||||
import warnings
|
||||
|
@ -1,11 +1,11 @@
|
||||
""" ================================= |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022, OctoSpacc |
|
||||
| ================================= """
|
||||
""" ================================== |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022-2023, OctoSpacc |
|
||||
| ================================== """
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
@ -1,11 +1,11 @@
|
||||
""" ================================= |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022, OctoSpacc |
|
||||
| ================================= """
|
||||
""" ================================== |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022-2023, OctoSpacc |
|
||||
| ================================== """
|
||||
|
||||
from Libs.markdown import markdown
|
||||
|
||||
|
289
App/Source/Modules/Meta.py
Normal file
289
App/Source/Modules/Meta.py
Normal file
@ -0,0 +1,289 @@
|
||||
""" ================================== |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022-2023, OctoSpacc |
|
||||
| ================================== """
|
||||
|
||||
from Modules.Config import *
|
||||
from Modules.Elements import *
|
||||
from Modules.HTML import *
|
||||
from Modules.Markdown import *
|
||||
from Modules.Utils import *
|
||||
|
||||
# Menu styles:
|
||||
# - Simple: Default, Flat, Line
|
||||
# - Others: Excerpt, Image, Preview (Excerpt + Image), Full
|
||||
def GetHTMLPagesList(Pages:list, BlogName:str, SiteRoot:str, PathPrefix:str, CallbackFile=None, Unite=[], Type=None, Limit=None, PathFilter='', Category=None, For='Menu', MarkdownExts=(), MenuStyle='Default', ShowPaths=True):
|
||||
Flatten, SingleLine, DoneCount, PrevDepth = False, False, 0, 0
|
||||
if MenuStyle == 'Flat':
|
||||
Flatten = True
|
||||
elif MenuStyle == 'Line':
|
||||
ShowPaths, SingleLine = False, True
|
||||
List, ToPop, LastParent = '', [], []
|
||||
IndexPages = Pages.copy()
|
||||
for e in IndexPages:
|
||||
if e[3]['Index'].lower() in PageIndexStrNeg:
|
||||
IndexPages.remove(e)
|
||||
for i,e in enumerate(IndexPages):
|
||||
if Type and e[3]['Type'] != Type:
|
||||
ToPop += [i]
|
||||
ToPop = RevSort(ToPop)
|
||||
for i in ToPop:
|
||||
IndexPages.pop(i)
|
||||
if Type == 'Page':
|
||||
IndexPages = OrderPages(IndexPages)
|
||||
for i,e in enumerate(Unite):
|
||||
if e:
|
||||
IndexPages.insert(i, [e, None, None, {'Type':Type, 'Index':'True', 'Order':'Unite'}])
|
||||
for File, Content, Titles, Meta in IndexPages:
|
||||
# Allow for the virtual "Pages/" prefix to be used in path filtering
|
||||
TmpPathFilter = PathFilter
|
||||
if TmpPathFilter.startswith('Pages/'):
|
||||
TmpPathFilter = TmpPathFilter[len('Pages/'):]
|
||||
if File.startswith('Posts/'):
|
||||
continue
|
||||
|
||||
if (not Type or (Meta['Type'] == Type and CanIndex(Meta['Index'], For))) and (not Category or Category in Meta['Categories']) and File.startswith(TmpPathFilter) and File != CallbackFile and (not Limit or Limit > DoneCount):
|
||||
Depth = (File.count('/') + 1) if Meta['Order'] != 'Unite' else 1
|
||||
# Folder names are handled here
|
||||
if Depth > 1 and Meta['Order'] != 'Unite':
|
||||
CurParent = File.split('/')[:-1]
|
||||
for i,s in enumerate(CurParent):
|
||||
if LastParent != CurParent and ShowPaths:
|
||||
LastParent = CurParent
|
||||
Levels = '.' * ((Depth-2+i) if not Flatten else 0) + ':'
|
||||
# If search node endswith index, it's a page; else, it's a folder
|
||||
if StripExt(File).endswith('index'):
|
||||
Title = MakeListTitle(File, Meta, Titles, 'HTMLTitle', BlogName, PathPrefix)
|
||||
DoneCount += 1
|
||||
else:
|
||||
Title = CurParent[Depth-2+i]
|
||||
if SingleLine:
|
||||
List += f' <span>{Title}</span> '
|
||||
else:
|
||||
List += f'{Levels}<span>{Title}</span>\n'
|
||||
|
||||
# Pages with any other path
|
||||
if not (Depth > 1 and StripExt(File).split('/')[-1] == 'index'):
|
||||
Levels = '.' * ((Depth-1) if not Flatten else 0) + ':'
|
||||
DoneCount += 1
|
||||
if Meta['Order'] == 'Unite':
|
||||
Title = markdown(MarkdownHTMLEscape(File, MarkdownExts), extensions=MarkdownExts).removeprefix('<p>').removesuffix('<p>')
|
||||
else:
|
||||
Title = MakeListTitle(File, Meta, Titles, 'HTMLTitle', BlogName, PathPrefix)
|
||||
if SingleLine:
|
||||
List += ' <span>' + Title + '</span> '
|
||||
else:
|
||||
List += Levels + Title + '\n'
|
||||
|
||||
if MenuStyle in ('Default', 'Flat'):
|
||||
return GenHTMLTreeList(List, Class="staticoso-PagesList")
|
||||
elif MenuStyle in ('Line', 'Excerpt', 'Image', 'Preview', 'Full'):
|
||||
return List
|
||||
|
||||
def CheckHTMLCommentLine(Line:str):
|
||||
if Line.startswith('<!--'):
|
||||
Line = Line[4:].lstrip()
|
||||
if Line.endswith('-->'):
|
||||
return Line
|
||||
return None
|
||||
|
||||
def TemplatePreprocessor(Text:str):
|
||||
Meta, MetaDefault = '', {
|
||||
'MenuStyle': 'Default'}
|
||||
for l in Text.splitlines():
|
||||
ll = l.lstrip().rstrip()
|
||||
lll = CheckHTMLCommentLine(ll)
|
||||
if lll:
|
||||
if lll.startswith('%'):
|
||||
Meta += lll[1:-3].lstrip().rstrip() + '\n'
|
||||
Meta = dict(ReadConf(LoadConfStr('[Meta]\n' + Meta), 'Meta'))
|
||||
for i in MetaDefault:
|
||||
if not i in Meta:
|
||||
Meta.update({i:MetaDefault[i]})
|
||||
return Meta
|
||||
|
||||
def FindPreprocLine(Line:str, Meta, Macros):
|
||||
Changed = False
|
||||
Line = Line.lstrip().rstrip()
|
||||
lll = CheckHTMLCommentLine(Line)
|
||||
if Line.startswith('//') or lll: # Find preprocessor lines
|
||||
lll = Line[2:].lstrip()
|
||||
if lll.startswith('%'):
|
||||
Meta += lll[1:].lstrip() + '\n'
|
||||
Changed = True
|
||||
elif lll.startswith('$'):
|
||||
Macros += lll[1:].lstrip() + '\n'
|
||||
Changed = True
|
||||
#if ll.startswith('<!--') and not ll.endswith('-->'): # Find comment and code blocks
|
||||
# IgnoreBlocksStart += [l]
|
||||
return (Meta, Macros, Changed)
|
||||
|
||||
def PagePreprocessor(Path:str, TempPath:str, Type:str, SiteTemplate, SiteRoot, GlobalMacros, CategoryUncategorized, LightRun:bool=False, Content=None):
|
||||
File = ReadFile(Path) if not Content else Content
|
||||
Path = Path.lower()
|
||||
Content, Titles, DashyTitles, HTMLTitlesFound, Macros, Meta, MetaDefault = '', [], [], False, '', '', {
|
||||
'Template': SiteTemplate,
|
||||
'Head': '',
|
||||
'Style': '',
|
||||
'Type': Type,
|
||||
'Index': 'Unspecified',
|
||||
'Feed': 'True',
|
||||
'Title': '',
|
||||
'HTMLTitle': '',
|
||||
'Description': '',
|
||||
'Image': '',
|
||||
'Macros': {},
|
||||
'Categories': [],
|
||||
'URLs': [],
|
||||
'CreatedOn': '',
|
||||
'UpdatedOn': '',
|
||||
'EditedOn': '',
|
||||
'Order': None,
|
||||
'Language': None,
|
||||
'Downsync': None}
|
||||
# Find all positions of '<!--', '-->', add them in a list=[[pos0,pos1,line0,line1],...]
|
||||
for l in File.splitlines():
|
||||
ll = l.lstrip().rstrip()
|
||||
Meta, Macros, Changed = FindPreprocLine(ll, Meta, Macros)
|
||||
if not Changed: # Find headings
|
||||
#if line in ignore block:
|
||||
# continue
|
||||
Headings = ('h1', 'h2', 'h3', 'h4', 'h5', 'h6')
|
||||
#if Path.endswith(FileExtensions['HTML']):
|
||||
# if ll[1:].startswith(Headings):
|
||||
# if ll[3:].startswith((" class='NoTitle", ' class="NoTitle')):
|
||||
# Content += l + '\n'
|
||||
# elif ll.replace(' ', ' ').startswith('// %'):
|
||||
# pass
|
||||
# else:
|
||||
# Title = '#'*int(ll[2]) + ' ' + ll[4:]
|
||||
# DashTitle = DashifyTitle(Title.lstrip('#'), DashyTitles)
|
||||
# DashyTitles += [DashTitle]
|
||||
# Titles += [Title]
|
||||
# Content += MakeLinkableTitle(l, Title, DashTitle, 'pug') + '\n'
|
||||
# else:
|
||||
# Content += l + '\n'
|
||||
if Path.endswith(FileExtensions['HTML']) and not HTMLTitlesFound:
|
||||
Soup = MkSoup(File)
|
||||
Tags = Soup.find_all()
|
||||
for t in Tags:
|
||||
if t.name in Headings:
|
||||
Title = '#'*int(t.name[1]) + ' ' + str(t.text)
|
||||
DashTitle = DashifyTitle(Title.lstrip('#'), DashyTitles)
|
||||
DashyTitles += [DashTitle]
|
||||
Titles += [Title]
|
||||
t.replace_with(MakeLinkableTitle(None, Title, DashTitle, 'md'))
|
||||
HTMLTitlesFound = True
|
||||
Content = ''
|
||||
TmpContent = str(Soup.prettify(formatter=None))
|
||||
for cl in TmpContent.splitlines():
|
||||
_, _, IsMetaLine = FindPreprocLine(cl, Meta, Macros)
|
||||
if not IsMetaLine:
|
||||
#print(cl)
|
||||
Content += cl + '\n'
|
||||
break
|
||||
elif Path.endswith(FileExtensions['Markdown']):
|
||||
lsuffix = ''
|
||||
if ll.startswith(('-', '+', '*')):
|
||||
lsuffix += ll[0]
|
||||
ll = ll[1:].lstrip()
|
||||
if ll.startswith('#') or (ll.startswith('<') and ll[1:].startswith(Headings)):
|
||||
if ll.startswith('#'):
|
||||
Title = ll
|
||||
elif ll.startswith('<'):
|
||||
if ll[3:].startswith((" class='NoTitle", ' class="NoTitle')):
|
||||
Content += l + '\n'
|
||||
continue
|
||||
else:
|
||||
Title = '#'*int(ll[2]) + ' ' + ll[4:]
|
||||
DashTitle = DashifyTitle(MkSoup(Title.lstrip('#')).get_text(), DashyTitles)
|
||||
DashyTitles += [DashTitle]
|
||||
Titles += [Title]
|
||||
Title = MakeLinkableTitle(None, Title, DashTitle, 'md')
|
||||
# I can't remember why I put this but it was needed
|
||||
Title = Title.replace('> </', '> </').replace(' </', '</')
|
||||
Content += lsuffix + Title + '\n'
|
||||
else:
|
||||
Content += l + '\n'
|
||||
elif Path.endswith('.pug'):
|
||||
if ll.startswith(Headings):
|
||||
if ll[2:].startswith(("(class='NoTitle", '(class="NoTitle')):
|
||||
Content += l + '\n'
|
||||
else:
|
||||
Title = '#'*int(ll[1]) + ll[3:]
|
||||
DashTitle = DashifyTitle(Title.lstrip('#'), DashyTitles)
|
||||
DashyTitles += [DashTitle]
|
||||
Titles += [Title]
|
||||
# TODO: We should handle headers that for any reason already have parenthesis
|
||||
if ll[2:] == '(':
|
||||
Content += l + '\n'
|
||||
else:
|
||||
Content += MakeLinkableTitle(l, Title, DashTitle, 'pug') + '\n'
|
||||
else:
|
||||
Content += l + '\n'
|
||||
elif Path.endswith('.txt'):
|
||||
Content += l + '\n'
|
||||
Meta = dict(ReadConf(LoadConfStr('[Meta]\n' + Meta), 'Meta'))
|
||||
for i in MetaDefault:
|
||||
if i in Meta:
|
||||
# TODO: Handle strings with spaces but wrapped in quotes
|
||||
if i == 'Categories':
|
||||
Categories = Meta['Categories'].split(' ')
|
||||
Meta['Categories'] = []
|
||||
for j in Categories:
|
||||
Meta['Categories'] += [j]
|
||||
elif i == 'URLs':
|
||||
URLs = Meta['URLs'].split(' ')
|
||||
Meta['URLs'] = []
|
||||
for j in URLs:
|
||||
Meta['URLs'] += [j]
|
||||
else:
|
||||
Meta.update({i:MetaDefault[i]})
|
||||
if Meta['UpdatedOn']:
|
||||
Meta['EditedOn'] = Meta['UpdatedOn']
|
||||
if Meta['Index'].lower() in ('default', 'unspecified', 'categories'):
|
||||
if not Meta['Categories']:
|
||||
Meta['Categories'] = [CategoryUncategorized]
|
||||
if Meta['Type'].lower() == 'page':
|
||||
Meta['Index'] = 'Categories'
|
||||
elif Meta['Type'].lower() == 'post':
|
||||
Meta['Index'] = 'True'
|
||||
if GlobalMacros:
|
||||
Meta['Macros'].update(GlobalMacros)
|
||||
Meta['Macros'].update(ReadConf(LoadConfStr('[Macros]\n' + Macros), 'Macros'))
|
||||
return [TempPath, Content, Titles, Meta]
|
||||
|
||||
def PagePostprocessor(FileType, Text:str, Meta:dict):
|
||||
for e in Meta['Macros']:
|
||||
Text = ReplWithEsc(Text, f"[: {e} :]", f"[:{e}:]")
|
||||
return Text
|
||||
|
||||
def OrderPages(Old:list):
|
||||
New, NoOrder, Max = [], [], 0
|
||||
for i,e in enumerate(Old):
|
||||
Curr = e[3]['Order']
|
||||
if Curr:
|
||||
if int(Curr) > Max:
|
||||
Max = int(Curr)
|
||||
else:
|
||||
NoOrder += [e]
|
||||
New = [None] * (Max+1)
|
||||
for i,e in enumerate(Old):
|
||||
Curr = e[3]['Order']
|
||||
if Curr:
|
||||
New[int(Curr)] = e
|
||||
while None in New:
|
||||
New.remove(None)
|
||||
return New + NoOrder
|
||||
|
||||
def CanIndex(Index:str, For:str):
|
||||
if Index.lower() in PageIndexStrNeg:
|
||||
return False
|
||||
elif Index.lower() in PageIndexStrPos:
|
||||
return True
|
||||
else:
|
||||
return True if Index == For else False
|
@ -1,11 +1,11 @@
|
||||
""" ================================= |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022, OctoSpacc |
|
||||
| ================================= """
|
||||
""" ================================== |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022-2023, OctoSpacc |
|
||||
| ================================== """
|
||||
|
||||
# TODO: Write a native Pug parser; There is one already available for Python but seems broken / out-of-date
|
||||
|
||||
|
@ -1,300 +1,25 @@
|
||||
""" ================================= |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022, OctoSpacc |
|
||||
| ================================= """
|
||||
""" ================================== |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022-2023, OctoSpacc |
|
||||
| ================================== """
|
||||
|
||||
import shutil
|
||||
from datetime import datetime
|
||||
from multiprocessing import Pool, cpu_count
|
||||
from Libs.bs4 import BeautifulSoup
|
||||
from Modules.Config import *
|
||||
from Modules.Elements import *
|
||||
from Modules.Globals import *
|
||||
from Modules.HTML import *
|
||||
from Modules.Logging import *
|
||||
from Modules.Markdown import *
|
||||
from Modules.Meta import *
|
||||
from Modules.Pug import *
|
||||
from Modules.Utils import *
|
||||
|
||||
# Menu styles:
|
||||
# - Simple: Default, Flat, Line
|
||||
# - Others: Excerpt, Image, Preview (Excerpt + Image), Full
|
||||
def GetHTMLPagesList(Pages:list, BlogName:str, SiteRoot:str, PathPrefix:str, CallbackFile=None, Unite=[], Type=None, Limit=None, PathFilter='', Category=None, For='Menu', MarkdownExts=(), MenuStyle='Default', ShowPaths=True):
|
||||
Flatten, SingleLine, DoneCount, PrevDepth = False, False, 0, 0
|
||||
if MenuStyle == 'Flat':
|
||||
Flatten = True
|
||||
elif MenuStyle == 'Line':
|
||||
ShowPaths, SingleLine = False, True
|
||||
List, ToPop, LastParent = '', [], []
|
||||
IndexPages = Pages.copy()
|
||||
for e in IndexPages:
|
||||
if e[3]['Index'].lower() in PageIndexStrNeg:
|
||||
IndexPages.remove(e)
|
||||
for i,e in enumerate(IndexPages):
|
||||
if Type and e[3]['Type'] != Type:
|
||||
ToPop += [i]
|
||||
ToPop = RevSort(ToPop)
|
||||
for i in ToPop:
|
||||
IndexPages.pop(i)
|
||||
if Type == 'Page':
|
||||
IndexPages = OrderPages(IndexPages)
|
||||
for i,e in enumerate(Unite):
|
||||
if e:
|
||||
IndexPages.insert(i, [e, None, None, {'Type':Type, 'Index':'True', 'Order':'Unite'}])
|
||||
for File, Content, Titles, Meta in IndexPages:
|
||||
# Allow for the virtual "Pages/" prefix to be used in path filtering
|
||||
TmpPathFilter = PathFilter
|
||||
if TmpPathFilter.startswith('Pages/'):
|
||||
TmpPathFilter = TmpPathFilter[len('Pages/'):]
|
||||
if File.startswith('Posts/'):
|
||||
continue
|
||||
|
||||
if (not Type or (Meta['Type'] == Type and CanIndex(Meta['Index'], For))) and (not Category or Category in Meta['Categories']) and File.startswith(TmpPathFilter) and File != CallbackFile and (not Limit or Limit > DoneCount):
|
||||
Depth = (File.count('/') + 1) if Meta['Order'] != 'Unite' else 1
|
||||
# Folder names are handled here
|
||||
if Depth > 1 and Meta['Order'] != 'Unite':
|
||||
CurParent = File.split('/')[:-1]
|
||||
for i,s in enumerate(CurParent):
|
||||
if LastParent != CurParent and ShowPaths:
|
||||
LastParent = CurParent
|
||||
Levels = '.' * ((Depth-2+i) if not Flatten else 0) + ':'
|
||||
# If search node endswith index, it's a page; else, it's a folder
|
||||
if StripExt(File).endswith('index'):
|
||||
Title = MakeListTitle(File, Meta, Titles, 'HTMLTitle', BlogName, PathPrefix)
|
||||
DoneCount += 1
|
||||
else:
|
||||
Title = CurParent[Depth-2+i]
|
||||
if SingleLine:
|
||||
List += f' <span>{Title}</span> '
|
||||
else:
|
||||
List += f'{Levels}<span>{Title}</span>\n'
|
||||
|
||||
# Pages with any other path
|
||||
if not (Depth > 1 and StripExt(File).split('/')[-1] == 'index'):
|
||||
Levels = '.' * ((Depth-1) if not Flatten else 0) + ':'
|
||||
DoneCount += 1
|
||||
if Meta['Order'] == 'Unite':
|
||||
Title = markdown(MarkdownHTMLEscape(File, MarkdownExts), extensions=MarkdownExts).removeprefix('<p>').removesuffix('<p>')
|
||||
else:
|
||||
Title = MakeListTitle(File, Meta, Titles, 'HTMLTitle', BlogName, PathPrefix)
|
||||
if SingleLine:
|
||||
List += ' <span>' + Title + '</span> '
|
||||
else:
|
||||
List += Levels + Title + '\n'
|
||||
|
||||
if MenuStyle in ('Default', 'Flat'):
|
||||
return GenHTMLTreeList(List, Class="staticoso-PagesList")
|
||||
elif MenuStyle in ('Line', 'Excerpt', 'Image', 'Preview', 'Full'):
|
||||
return List
|
||||
|
||||
def CheckHTMLCommentLine(Line:str):
|
||||
if Line.startswith('<!--'):
|
||||
Line = Line[4:].lstrip()
|
||||
if Line.endswith('-->'):
|
||||
return Line
|
||||
return None
|
||||
|
||||
def TemplatePreprocessor(Text:str):
|
||||
Meta, MetaDefault = '', {
|
||||
'MenuStyle': 'Default'}
|
||||
for l in Text.splitlines():
|
||||
ll = l.lstrip().rstrip()
|
||||
lll = CheckHTMLCommentLine(ll)
|
||||
if lll:
|
||||
if lll.startswith('%'):
|
||||
Meta += lll[1:-3].lstrip().rstrip() + '\n'
|
||||
Meta = dict(ReadConf(LoadConfStr('[Meta]\n' + Meta), 'Meta'))
|
||||
for i in MetaDefault:
|
||||
if not i in Meta:
|
||||
Meta.update({i:MetaDefault[i]})
|
||||
return Meta
|
||||
|
||||
def FindPreprocLine(Line:str, Meta, Macros):
|
||||
Changed = False
|
||||
Line = Line.lstrip().rstrip()
|
||||
lll = CheckHTMLCommentLine(Line)
|
||||
if Line.startswith('//') or lll: # Find preprocessor lines
|
||||
lll = Line[2:].lstrip()
|
||||
if lll.startswith('%'):
|
||||
Meta += lll[1:].lstrip() + '\n'
|
||||
Changed = True
|
||||
elif lll.startswith('$'):
|
||||
Macros += lll[1:].lstrip() + '\n'
|
||||
Changed = True
|
||||
#if ll.startswith('<!--') and not ll.endswith('-->'): # Find comment and code blocks
|
||||
# IgnoreBlocksStart += [l]
|
||||
return (Meta, Macros, Changed)
|
||||
|
||||
def PagePreprocessor(Path:str, TempPath:str, Type:str, SiteTemplate, SiteRoot, GlobalMacros, CategoryUncategorized, LightRun:bool=False, Content=None):
|
||||
File = ReadFile(Path) if not Content else Content
|
||||
Path = Path.lower()
|
||||
Content, Titles, DashyTitles, HTMLTitlesFound, Macros, Meta, MetaDefault = '', [], [], False, '', '', {
|
||||
'Template': SiteTemplate,
|
||||
'Head': '',
|
||||
'Style': '',
|
||||
'Type': Type,
|
||||
'Index': 'Unspecified',
|
||||
'Feed': 'True',
|
||||
'Title': '',
|
||||
'HTMLTitle': '',
|
||||
'Description': '',
|
||||
'Image': '',
|
||||
'Macros': {},
|
||||
'Categories': [],
|
||||
'URLs': [],
|
||||
'CreatedOn': '',
|
||||
'UpdatedOn': '',
|
||||
'EditedOn': '',
|
||||
'Order': None,
|
||||
'Language': None,
|
||||
'Downsync': None}
|
||||
# Find all positions of '<!--', '-->', add them in a list=[[pos0,pos1,line0,line1],...]
|
||||
for l in File.splitlines():
|
||||
ll = l.lstrip().rstrip()
|
||||
Meta, Macros, Changed = FindPreprocLine(ll, Meta, Macros)
|
||||
if not Changed: # Find headings
|
||||
#if line in ignore block:
|
||||
# continue
|
||||
Headings = ('h1', 'h2', 'h3', 'h4', 'h5', 'h6')
|
||||
#if Path.endswith(FileExtensions['HTML']):
|
||||
# if ll[1:].startswith(Headings):
|
||||
# if ll[3:].startswith((" class='NoTitle", ' class="NoTitle')):
|
||||
# Content += l + '\n'
|
||||
# elif ll.replace(' ', ' ').startswith('// %'):
|
||||
# pass
|
||||
# else:
|
||||
# Title = '#'*int(ll[2]) + ' ' + ll[4:]
|
||||
# DashTitle = DashifyTitle(Title.lstrip('#'), DashyTitles)
|
||||
# DashyTitles += [DashTitle]
|
||||
# Titles += [Title]
|
||||
# Content += MakeLinkableTitle(l, Title, DashTitle, 'pug') + '\n'
|
||||
# else:
|
||||
# Content += l + '\n'
|
||||
if Path.endswith(FileExtensions['HTML']) and not HTMLTitlesFound:
|
||||
Soup = BeautifulSoup(File, 'html.parser')
|
||||
Tags = Soup.find_all()
|
||||
for t in Tags:
|
||||
if t.name in Headings:
|
||||
Title = '#'*int(t.name[1]) + ' ' + str(t.text)
|
||||
DashTitle = DashifyTitle(Title.lstrip('#'), DashyTitles)
|
||||
DashyTitles += [DashTitle]
|
||||
Titles += [Title]
|
||||
t.replace_with(MakeLinkableTitle(None, Title, DashTitle, 'md'))
|
||||
HTMLTitlesFound = True
|
||||
Content = ''
|
||||
TmpContent = str(Soup.prettify(formatter=None))
|
||||
for cl in TmpContent.splitlines():
|
||||
_, _, IsMetaLine = FindPreprocLine(cl, Meta, Macros)
|
||||
if not IsMetaLine:
|
||||
#print(cl)
|
||||
Content += cl + '\n'
|
||||
break
|
||||
elif Path.endswith(FileExtensions['Markdown']):
|
||||
lsuffix = ''
|
||||
if ll.startswith(('-', '+', '*')):
|
||||
lsuffix += ll[0]
|
||||
ll = ll[1:].lstrip()
|
||||
if ll.startswith('#') or (ll.startswith('<') and ll[1:].startswith(Headings)):
|
||||
if ll.startswith('#'):
|
||||
Title = ll
|
||||
elif ll.startswith('<'):
|
||||
if ll[3:].startswith((" class='NoTitle", ' class="NoTitle')):
|
||||
Content += l + '\n'
|
||||
continue
|
||||
else:
|
||||
Title = '#'*int(ll[2]) + ' ' + ll[4:]
|
||||
DashTitle = DashifyTitle(MkSoup(Title.lstrip('#')).get_text(), DashyTitles)
|
||||
DashyTitles += [DashTitle]
|
||||
Titles += [Title]
|
||||
Title = MakeLinkableTitle(None, Title, DashTitle, 'md')
|
||||
# I can't remember why I put this but it was needed
|
||||
Title = Title.replace('> </', '> </').replace(' </', '</')
|
||||
Content += lsuffix + Title + '\n'
|
||||
else:
|
||||
Content += l + '\n'
|
||||
elif Path.endswith('.pug'):
|
||||
if ll.startswith(Headings):
|
||||
if ll[2:].startswith(("(class='NoTitle", '(class="NoTitle')):
|
||||
Content += l + '\n'
|
||||
else:
|
||||
Title = '#'*int(ll[1]) + ll[3:]
|
||||
DashTitle = DashifyTitle(Title.lstrip('#'), DashyTitles)
|
||||
DashyTitles += [DashTitle]
|
||||
Titles += [Title]
|
||||
# TODO: We should handle headers that for any reason already have parenthesis
|
||||
if ll[2:] == '(':
|
||||
Content += l + '\n'
|
||||
else:
|
||||
Content += MakeLinkableTitle(l, Title, DashTitle, 'pug') + '\n'
|
||||
else:
|
||||
Content += l + '\n'
|
||||
elif Path.endswith('.txt'):
|
||||
Content += l + '\n'
|
||||
Meta = dict(ReadConf(LoadConfStr('[Meta]\n' + Meta), 'Meta'))
|
||||
for i in MetaDefault:
|
||||
if i in Meta:
|
||||
# TODO: Handle strings with spaces but wrapped in quotes
|
||||
if i == 'Categories':
|
||||
Categories = Meta['Categories'].split(' ')
|
||||
Meta['Categories'] = []
|
||||
for j in Categories:
|
||||
Meta['Categories'] += [j]
|
||||
elif i == 'URLs':
|
||||
URLs = Meta['URLs'].split(' ')
|
||||
Meta['URLs'] = []
|
||||
for j in URLs:
|
||||
Meta['URLs'] += [j]
|
||||
else:
|
||||
Meta.update({i:MetaDefault[i]})
|
||||
if Meta['UpdatedOn']:
|
||||
Meta['EditedOn'] = Meta['UpdatedOn']
|
||||
if Meta['Index'].lower() in ('default', 'unspecified', 'categories'):
|
||||
if not Meta['Categories']:
|
||||
Meta['Categories'] = [CategoryUncategorized]
|
||||
if Meta['Type'].lower() == 'page':
|
||||
Meta['Index'] = 'Categories'
|
||||
elif Meta['Type'].lower() == 'post':
|
||||
Meta['Index'] = 'True'
|
||||
if GlobalMacros:
|
||||
Meta['Macros'].update(GlobalMacros)
|
||||
Meta['Macros'].update(ReadConf(LoadConfStr('[Macros]\n' + Macros), 'Macros'))
|
||||
return [TempPath, Content, Titles, Meta]
|
||||
|
||||
def PagePostprocessor(FileType, Text:str, Meta:dict):
|
||||
for e in Meta['Macros']:
|
||||
Text = ReplWithEsc(Text, f"[: {e} :]", f"[:{e}:]")
|
||||
return Text
|
||||
|
||||
def OrderPages(Old:list):
|
||||
New, NoOrder, Max = [], [], 0
|
||||
for i,e in enumerate(Old):
|
||||
Curr = e[3]['Order']
|
||||
if Curr:
|
||||
if int(Curr) > Max:
|
||||
Max = int(Curr)
|
||||
else:
|
||||
NoOrder += [e]
|
||||
New = [None] * (Max+1)
|
||||
for i,e in enumerate(Old):
|
||||
Curr = e[3]['Order']
|
||||
if Curr:
|
||||
New[int(Curr)] = e
|
||||
while None in New:
|
||||
New.remove(None)
|
||||
return New + NoOrder
|
||||
|
||||
def CanIndex(Index:str, For:str):
|
||||
if Index.lower() in PageIndexStrNeg:
|
||||
return False
|
||||
elif Index.lower() in PageIndexStrPos:
|
||||
return True
|
||||
else:
|
||||
return True if Index == For else False
|
||||
|
||||
def PatchHTML(File, HTML, StaticPartsText, DynamicParts, DynamicPartsText, HTMLPagesList, PagePath, Content, Titles, Meta, SiteDomain, SiteRoot, SiteName, BlogName, FolderRoots, Categories, SiteLang, Locale, LightRun):
|
||||
HTMLTitles = FormatTitles(Titles)
|
||||
BodyDescription, BodyImage = '', ''
|
||||
|
@ -1,11 +1,11 @@
|
||||
""" ================================= |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022, OctoSpacc |
|
||||
| ================================= """
|
||||
""" ================================== |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022-2023, OctoSpacc |
|
||||
| ================================== """
|
||||
|
||||
from urllib.parse import quote as URLEncode
|
||||
from Modules.HTML import *
|
||||
|
@ -1,12 +1,11 @@
|
||||
|
||||
""" ================================= |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022, OctoSpacc |
|
||||
| ================================= """
|
||||
""" ================================== |
|
||||
| This file is part of |
|
||||
| staticoso |
|
||||
| Just a simple Static Site Generator |
|
||||
| |
|
||||
| Licensed under the AGPLv3 license |
|
||||
| Copyright (C) 2022-2023, OctoSpacc |
|
||||
| ================================== """
|
||||
|
||||
import json
|
||||
import os
|
||||
|
Loading…
x
Reference in New Issue
Block a user