mirror of
https://gitlab.com/octtspacc/staticoso
synced 2025-03-13 09:40:17 +01:00
Gemlog format fix; Cringe XML sitemap generation
This commit is contained in:
parent
f85f8d6696
commit
55647f0ad0
@ -36,7 +36,7 @@ Feel free to experiment with all of this stuff!
|
|||||||
- [ ] Pug support for base templates and page side parts
|
- [ ] Pug support for base templates and page side parts
|
||||||
- [ ] Differential recompile (to optimize resource waste on non-ephemeral servers)
|
- [ ] Differential recompile (to optimize resource waste on non-ephemeral servers)
|
||||||
- [ ] Hot-recompile (for website development)
|
- [ ] Hot-recompile (for website development)
|
||||||
- [ ] XML sitemap generation
|
- [x] XML sitemap generation
|
||||||
- [x] Atom + RSS feed generation for posts
|
- [x] Atom + RSS feed generation for posts
|
||||||
- [x] Generation of website page tree in left sidebar
|
- [x] Generation of website page tree in left sidebar
|
||||||
- [x] Generation of titles in right sidebar with clickable links
|
- [x] Generation of titles in right sidebar with clickable links
|
||||||
|
@ -472,7 +472,7 @@ def GetConfMenu(Conf):
|
|||||||
print(Menu)
|
print(Menu)
|
||||||
return Menu
|
return Menu
|
||||||
|
|
||||||
def Main(Args, FeedEntries):
|
def Main(Args, FeedEntries, SitemapOut):
|
||||||
HavePages, HavePosts = False, False
|
HavePages, HavePosts = False, False
|
||||||
SiteConf = LoadConf('Site.ini')
|
SiteConf = LoadConf('Site.ini')
|
||||||
#SiteMenu = GetConfMenu(SiteConf)
|
#SiteMenu = GetConfMenu(SiteConf)
|
||||||
@ -541,6 +541,19 @@ def Main(Args, FeedEntries):
|
|||||||
SiteDomain=SiteDomain,
|
SiteDomain=SiteDomain,
|
||||||
MaxEntries=FeedEntries,
|
MaxEntries=FeedEntries,
|
||||||
Lang=SiteLang,
|
Lang=SiteLang,
|
||||||
|
FullMap=False,
|
||||||
|
Minify=True if Args.Minify and Args.Minify not in ('False', 'None') else False)
|
||||||
|
|
||||||
|
if SitemapOut:
|
||||||
|
print("[I] Generating Sitemap")
|
||||||
|
MakeFeed(
|
||||||
|
Pages=Pages,
|
||||||
|
SiteName=SiteName,
|
||||||
|
SiteTagline=SiteTagline,
|
||||||
|
SiteDomain=SiteDomain,
|
||||||
|
MaxEntries=FeedEntries,
|
||||||
|
Lang=SiteLang,
|
||||||
|
FullMap=True,
|
||||||
Minify=True if Args.Minify and Args.Minify not in ('False', 'None') else False)
|
Minify=True if Args.Minify and Args.Minify not in ('False', 'None') else False)
|
||||||
|
|
||||||
if ActivityPub and MastodonURL and MastodonToken and SiteDomain:
|
if ActivityPub and MastodonURL and MastodonToken and SiteDomain:
|
||||||
@ -593,6 +606,7 @@ if __name__ == '__main__':
|
|||||||
Parser.add_argument('--GemtextOut', type=bool)
|
Parser.add_argument('--GemtextOut', type=bool)
|
||||||
Parser.add_argument('--GemtextHeader', type=str)
|
Parser.add_argument('--GemtextHeader', type=str)
|
||||||
Parser.add_argument('--SiteTagline', type=str)
|
Parser.add_argument('--SiteTagline', type=str)
|
||||||
|
Parser.add_argument('--SitemapOut', type=bool)
|
||||||
Parser.add_argument('--FeedEntries', type=int)
|
Parser.add_argument('--FeedEntries', type=int)
|
||||||
Parser.add_argument('--FolderRoots', type=str)
|
Parser.add_argument('--FolderRoots', type=str)
|
||||||
Parser.add_argument('--ContextParts', type=str)
|
Parser.add_argument('--ContextParts', type=str)
|
||||||
@ -606,10 +620,13 @@ if __name__ == '__main__':
|
|||||||
import lxml
|
import lxml
|
||||||
from Modules.Feed import *
|
from Modules.Feed import *
|
||||||
FeedEntries = Args.FeedEntries if Args.FeedEntries or Args.FeedEntries == 0 else 10
|
FeedEntries = Args.FeedEntries if Args.FeedEntries or Args.FeedEntries == 0 else 10
|
||||||
|
SitemapOut = True if Args.SitemapOut else False
|
||||||
except:
|
except:
|
||||||
print("[E] Can't load the Atom/RSS feed libraries. Their generation is disabled. Make sure the 'lxml' library is installed.")
|
print("[E] Can't load the XML libraries. XML Feeds and Sitemaps generation is disabled. Make sure the 'lxml' library is installed.")
|
||||||
FeedEntries = 0
|
FeedEntries = 0
|
||||||
|
SitemapOut = False
|
||||||
|
|
||||||
Main(
|
Main(
|
||||||
Args=Args,
|
Args=Args,
|
||||||
FeedEntries=FeedEntries)
|
FeedEntries=FeedEntries,
|
||||||
|
SitemapOut=SitemapOut)
|
||||||
|
@ -12,7 +12,7 @@
|
|||||||
from Libs.feedgen.feed import FeedGenerator
|
from Libs.feedgen.feed import FeedGenerator
|
||||||
from Modules.Utils import *
|
from Modules.Utils import *
|
||||||
|
|
||||||
def MakeFeed(Pages, SiteName, SiteTagline, SiteDomain, MaxEntries, Lang, Minify=False):
|
def MakeFeed(Pages, SiteName, SiteTagline, SiteDomain, MaxEntries, Lang, FullMap=False, Minify=False):
|
||||||
Feed = FeedGenerator()
|
Feed = FeedGenerator()
|
||||||
Link = SiteDomain if SiteDomain else ' '
|
Link = SiteDomain if SiteDomain else ' '
|
||||||
Feed.id(Link)
|
Feed.id(Link)
|
||||||
@ -24,21 +24,22 @@ def MakeFeed(Pages, SiteName, SiteTagline, SiteDomain, MaxEntries, Lang, Minify=
|
|||||||
Feed.language(Lang)
|
Feed.language(Lang)
|
||||||
|
|
||||||
DoPages = []
|
DoPages = []
|
||||||
|
if FullMap:
|
||||||
|
MaxEntries = 50000 # Sitemap standard limit
|
||||||
for e in Pages:
|
for e in Pages:
|
||||||
if MaxEntries != 0 and e[3]['Type'] == 'Post':
|
if MaxEntries != 0 and (FullMap or (not FullMap and e[3]['Type'] == 'Post')):
|
||||||
DoPages += [e]
|
DoPages += [e]
|
||||||
MaxEntries -= 1
|
MaxEntries -= 1
|
||||||
DoPages.reverse()
|
DoPages.reverse()
|
||||||
|
|
||||||
for File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image in DoPages:
|
for File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image in DoPages:
|
||||||
if Meta['Type'] == 'Post':
|
if FullMap or (not FullMap and Meta['Type'] == 'Post'):
|
||||||
Entry = Feed.add_entry()
|
Entry = Feed.add_entry()
|
||||||
File = '{}.html'.format(StripExt(File))
|
File = '{}.html'.format(StripExt(File))
|
||||||
Content = ReadFile('public/'+File)
|
Content = ReadFile('public/'+File)
|
||||||
Link = SiteDomain + '/' + File if SiteDomain else ' '
|
Link = SiteDomain + '/' + File if SiteDomain else ' '
|
||||||
CreatedOn = GetFullDate(Meta['CreatedOn'])
|
CreatedOn = GetFullDate(Meta['CreatedOn'])
|
||||||
EditedOn = GetFullDate(Meta['EditedOn'])
|
EditedOn = GetFullDate(Meta['EditedOn'])
|
||||||
|
|
||||||
Entry.id(Link)
|
Entry.id(Link)
|
||||||
Entry.title(Meta['Title'] if Meta['Title'] else ' ')
|
Entry.title(Meta['Title'] if Meta['Title'] else ' ')
|
||||||
Entry.description(Description)
|
Entry.description(Description)
|
||||||
@ -49,6 +50,9 @@ def MakeFeed(Pages, SiteName, SiteTagline, SiteDomain, MaxEntries, Lang, Minify=
|
|||||||
EditedOn = EditedOn if EditedOn else CreatedOn if CreatedOn and not EditedOn else '1970-01-01T00:00+00:00'
|
EditedOn = EditedOn if EditedOn else CreatedOn if CreatedOn and not EditedOn else '1970-01-01T00:00+00:00'
|
||||||
Entry.updated(EditedOn)
|
Entry.updated(EditedOn)
|
||||||
|
|
||||||
|
if FullMap:
|
||||||
|
Feed.atom_file('public/sitemap.xml', pretty=(not Minify))
|
||||||
|
else:
|
||||||
os.mkdir('public/feed')
|
os.mkdir('public/feed')
|
||||||
Feed.atom_file('public/feed/atom.xml', pretty=(not Minify))
|
Feed.atom_file('public/feed/atom.xml', pretty=(not Minify))
|
||||||
Feed.rss_file('public/feed/rss.xml', pretty=(not Minify))
|
Feed.rss_file('public/feed/rss.xml', pretty=(not Minify))
|
||||||
|
@ -31,6 +31,15 @@ def StripAttrs(HTML):
|
|||||||
t.attrs = {}
|
t.attrs = {}
|
||||||
return str(Soup)
|
return str(Soup)
|
||||||
|
|
||||||
|
def FixGemlogDateLine(Line):
|
||||||
|
if len(Line) >= 2 and Line[0] == '[' and Line[1].isdigit():
|
||||||
|
Line = Line[1:]
|
||||||
|
else:
|
||||||
|
Words = Line.split(' ')
|
||||||
|
if len(Words) >= 2 and len(Words[1]) >= 2 and Words[1][0] == '[' and Words[1][1].isdigit():
|
||||||
|
Line = Words[0] + '\n' + Words[1][1:] + ' ' + ' '.join(Words[2:])
|
||||||
|
return Line
|
||||||
|
|
||||||
def GemtextCompileList(Pages, Header=''):
|
def GemtextCompileList(Pages, Header=''):
|
||||||
Cmd = ''
|
Cmd = ''
|
||||||
for File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image in Pages:
|
for File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image in Pages:
|
||||||
@ -45,7 +54,11 @@ def GemtextCompileList(Pages, Header=''):
|
|||||||
os.system(Cmd)
|
os.system(Cmd)
|
||||||
for File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image in Pages:
|
for File, Content, Titles, Meta, ContentHTML, SlimHTML, Description, Image in Pages:
|
||||||
Dst = 'public.gmi/{}.gmi'.format(StripExt(File))
|
Dst = 'public.gmi/{}.gmi'.format(StripExt(File))
|
||||||
WriteFile(Dst, Header + ReadFile(Dst))
|
Gemtext = ''
|
||||||
|
for Line in ReadFile(Dst).splitlines():
|
||||||
|
Line = FixGemlogDateLine(Line)
|
||||||
|
Gemtext += Line + '\n'
|
||||||
|
WriteFile(Dst, Header + Gemtext)
|
||||||
|
|
||||||
def FindEarliest(Str, Items):
|
def FindEarliest(Str, Items):
|
||||||
Pos, Item = 0, ''
|
Pos, Item = 0, ''
|
||||||
|
Loading…
x
Reference in New Issue
Block a user