Some internal refactoring

This commit is contained in:
2022-08-30 16:31:33 +02:00
parent 22918f0342
commit 91450570f4
9 changed files with 101 additions and 108 deletions

View File

@@ -51,8 +51,11 @@ def MastodonGetAllLinkPosts(Session, Domain=None):
Posts += [Post]
return Posts
def MastodonShare(InstanceURL, Token, TypeFilter, CategoryFilter, HoursLimit, Pages, SiteDomain, SiteLang, Locale):
def MastodonShare(Flags, Pages, Locale):
SaidPosting = False
SiteDomain, SiteLang = Flags['SiteDomain'], Flags['SiteLang']
InstanceURL, Token = Flags['MastodonURL'], Flags['MastodonToken']
TypeFilter, HoursLimit, CategoryFilter = Flags['ActivityPubTypeFilter'], Flags['ActivityPubHoursLimit'], Flags['FeedCategoryFilter']
Session = MastodonGetSession(InstanceURL, Token)
Posts = MastodonGetAllLinkPosts(Session, SiteDomain)
Pages.sort()

View File

@@ -12,13 +12,13 @@ from ast import literal_eval
def LoadConfFile(File):
Conf = configparser.ConfigParser()
Conf.optionxform = lambda option: option
Conf.optionxform = str
Conf.read(File)
return Conf
def LoadConfStr(Str):
Conf = configparser.ConfigParser()
Conf.optionxform = lambda option: option
Conf.optionxform = str
Conf.read_string(Str)
return Conf

View File

@@ -12,16 +12,19 @@
from Libs.feedgen.feed import FeedGenerator
from Modules.Utils import *
def MakeFeed(OutputDir, CategoryFilter, Pages, SiteName, SiteTagline, SiteDomain, MaxEntries, Lang, FullSite=False, Minify=False):
def MakeFeed(Flags, Pages, FullSite=False):
CategoryFilter = Flags['FeedCategoryFilter']
MaxEntries = Flags['FeedEntries']
Feed = FeedGenerator()
Link = SiteDomain if SiteDomain else ' '
Link = Flags['SiteDomain'] if Flags['SiteDomain'] else ' '
Feed.id(Link)
Feed.title(SiteName if SiteName else 'Untitled Site')
Feed.title(Flags['SiteName'] if Flags['SiteName'] else 'Untitled Site')
Feed.link(href=Link, rel='alternate')
Feed.description(SiteTagline if SiteTagline else ' ')
if SiteDomain:
Feed.logo(SiteDomain + '/favicon.png')
Feed.language(Lang)
Feed.description(Flags['SiteTagline'] if Flags['SiteTagline'] else ' ')
if Flags['SiteDomain']:
Feed.logo(Flags['SiteDomain'] + '/favicon.png')
Feed.language(Flags['SiteLang'])
DoPages = []
for e in Pages:
@@ -35,8 +38,8 @@ def MakeFeed(OutputDir, CategoryFilter, Pages, SiteName, SiteTagline, SiteDomain
Entry = Feed.add_entry()
FileName = File.split('/')[-1]
File = f"{StripExt(File)}.html"
Content = ReadFile(f"{OutputDir}/{File}")
Link = SiteDomain + '/' + File if SiteDomain else ' '
Content = ReadFile(f"{Flags['OutDir']}/{File}")
Link = Flags['SiteDomain'] + '/' + File if Flags['SiteDomain'] else ' '
CreatedOn = GetFullDate(Meta['CreatedOn'])
EditedOn = GetFullDate(Meta['EditedOn'])
Entry.id(Link)
@@ -51,8 +54,8 @@ def MakeFeed(OutputDir, CategoryFilter, Pages, SiteName, SiteTagline, SiteDomain
EditedOn = EditedOn if EditedOn else CreatedOn if CreatedOn and not EditedOn else '1970-01-01T00:00+00:00'
Entry.updated(EditedOn)
if not os.path.exists(f"{OutputDir}/feed"):
os.mkdir(f"{OutputDir}/feed")
if not os.path.exists(f"{Flags['OutDir']}/feed"):
os.mkdir(f"{Flags['OutDir']}/feed")
FeedType = 'site.' if FullSite else ''
Feed.atom_file(f"{OutputDir}/feed/{FeedType}atom.xml", pretty=(not Minify))
Feed.rss_file(f"{OutputDir}/feed/{FeedType}rss.xml", pretty=(not Minify))
Feed.atom_file(f"{Flags['OutDir']}/feed/{FeedType}atom.xml", pretty=(not Flags['Minify']))
Feed.rss_file(f"{Flags['OutDir']}/feed/{FeedType}rss.xml", pretty=(not Flags['Minify']))

View File

@@ -22,13 +22,13 @@ def FixGemlogDateLine(Line):
Line = Words[0] + '\n' + Words[1][1:] + ' ' + ' '.join(Words[2:])
return Line
def GemtextCompileList(OutputDir, Pages, LimitFiles, Header=''):
def GemtextCompileList(Flags, Pages, LimitFiles):
Cmd = ''
for File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image in Pages:
if IsLightRun(File, LimitFiles):
continue
Src = f"{OutputDir}.gmi/{StripExt(File)}.html.tmp"
Dst = f"{OutputDir}.gmi/{StripExt(File)}.gmi"
Src = f"{Flags['OutDir']}.gmi/{StripExt(File)}.html.tmp"
Dst = f"{Flags['OutDir']}.gmi/{StripExt(File)}.gmi"
SlimHTML = StripAttrs(SlimHTML)
for i in ('ol', 'ul', 'li'):
for j in ('<'+i+'>', '</'+i+'>'):
@@ -40,12 +40,12 @@ def GemtextCompileList(OutputDir, Pages, LimitFiles, Header=''):
for File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image in Pages:
if IsLightRun(File, LimitFiles):
continue
Dst = f"{OutputDir}.gmi/{StripExt(File)}.gmi"
Dst = f"{Flags['OutDir']}.gmi/{StripExt(File)}.gmi"
Gemtext = ''
for Line in ReadFile(Dst).splitlines():
Line = FixGemlogDateLine(Line)
Gemtext += Line + '\n'
WriteFile(Dst, Header + Gemtext)
WriteFile(Dst, Flags['GemtextHeader'] + Gemtext)
def FindEarliest(Str, Items):
Pos, Item = 0, ''

View File

@@ -8,6 +8,7 @@
| ================================= """
from datetime import datetime
from multiprocessing import Pool
from Libs.bs4 import BeautifulSoup
from Modules.Config import *
from Modules.HTML import *
@@ -389,8 +390,12 @@ def PatchHTML(File, HTML, StaticPartsText, DynamicParts, DynamicPartsText, HTMLP
return HTML, ContentHTML, Description, Image
def MakeSite(OutputDir, LimitFiles, TemplatesText, StaticPartsText, DynamicParts, DynamicPartsText, ConfMenu, GlobalMacros, SiteName, BlogName, SiteTagline, SiteTemplate, SiteDomain, SiteRoot, FolderRoots, SiteLang, Locale, Minify, MinifyKeepComments, NoScripts, ImgAltToTitle, ImgTitleToAlt, Sorting, MarkdownExts, AutoCategories, CategoryUncategorized):
def MakeSite(Flags, OutputDir, LimitFiles, TemplatesText, StaticPartsText, DynamicParts, DynamicPartsText, ConfMenu, GlobalMacros, SiteName, BlogName, SiteTagline, SiteTemplate, SiteDomain, SiteRoot, FolderRoots, SiteLang, Locale, Sorting, MarkdownExts):
PagesPaths, PostsPaths, Pages, MadePages, Categories = [], [], [], [], {}
AutoCategories, CategoryUncategorized = Flags['CategoriesAutomatic'], Flags['CategoriesUncategorized']
ImgAltToTitle, ImgTitleToAlt = Flags['ImgAltToTitle'], Flags['ImgTitleToAlt']
MinifyKeepComments = Flags['MinifyKeepComments']
for Ext in FileExtensions['Pages']:
for File in Path('Pages').rglob(f"*.{Ext}"):
PagesPaths += [FileToStr(File, 'Pages/')]
@@ -458,9 +463,10 @@ def MakeSite(OutputDir, LimitFiles, TemplatesText, StaticPartsText, DynamicParts
print("[I] Writing Pages")
for File, Content, Titles, Meta in Pages:
#print(f'-> {File}')
LightRun = False if LimitFiles == False or File in LimitFiles else True
PagePath = f"{OutputDir}/{StripExt(File)}.html"
if File.lower().endswith(FileExtensions['Markdown']):
Content = markdown(PagePostprocessor('md', Content, Meta), extensions=MarkdownExts)
elif File.lower().endswith(('.pug')):
@@ -505,11 +511,11 @@ def MakeSite(OutputDir, LimitFiles, TemplatesText, StaticPartsText, DynamicParts
Locale=Locale,
LightRun=LightRun)
if Minify:
if Flags['Minify']:
if not LightRun:
HTML = DoMinifyHTML(HTML, MinifyKeepComments)
ContentHTML = DoMinifyHTML(ContentHTML, MinifyKeepComments)
if NoScripts:
if Flags['NoScripts']:
if not LightRun:
HTML = StripTags(HTML, ['script'])
ContentHTML = StripTags(ContentHTML, ['script'])
@@ -522,7 +528,6 @@ def MakeSite(OutputDir, LimitFiles, TemplatesText, StaticPartsText, DynamicParts
SlimHTML = None
else:
SlimHTML = HTMLPagesList + ContentHTML
if not LightRun:
WriteFile(PagePath, HTML)

View File

@@ -10,10 +10,10 @@
from urllib.parse import quote as URLEncode
from Modules.Utils import *
def MakeSitemap(OutputDir, Pages, SiteDomain=''):
def MakeSitemap(Flags, Pages):
Map = ''
Domain = SiteDomain + '/' if SiteDomain else ''
Domain = Flags['SiteDomain'] + '/' if Flags['SiteDomain'] else ''
for File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image in Pages:
File = f"{StripExt(File)}.html"
Map += Domain + URLEncode(File) + '\n'
WriteFile(f"{OutputDir}/sitemap.txt", Map)
WriteFile(f"{Flags['OutDir']}/sitemap.txt", Map)