diff --git a/Makefile b/Makefile index dd7a8f1..0573225 100644 --- a/Makefile +++ b/Makefile @@ -14,7 +14,10 @@ watch: serve: python -m http.server --directory out/ -publish: +pubdate: + sed -i "s/pubdate: now/pubdate: $$(date -Isec)/" src/blog/*.md + +upload: ./build.py srv/ pagefind --site srv/ rsync -av srv/* ssh.alogoulogoi.com:/srv/www.alogoulogoi.com \ No newline at end of file diff --git a/build.py b/build.py index ed660cd..93060db 100755 --- a/build.py +++ b/build.py @@ -1,12 +1,14 @@ #!/usr/bin/env python import argparse +from datetime import datetime, timezone import copy import os import pathlib import shutil import bs4 +from feedgen.feed import FeedGenerator import markdown @@ -21,6 +23,10 @@ def main(): md = markdown.Markdown(extensions=["attr_list", "footnotes", "md_in_html", "meta"]) comment_md = markdown.Markdown() + # Map of feed url -> FeedGenerator object + feeds = {} + build_date = datetime.now(timezone.utc) + # Load the template template = bs4.BeautifulSoup( (src / ".template.html").read_text(encoding="utf8"), @@ -107,10 +113,41 @@ def main(): aside.extend(html.p.contents) page.header.append(aside) + # RSS metadata + if "feed" in meta and "pubdate" in meta: + pubdate = datetime.fromisoformat(meta["pubdate"][0]) + link = f"https://www.alogoulogoi.com/{dest.relative_to(out).as_posix()}" + for feed in meta["feed"]: + if feed not in feeds: + feeds[feed] = [] + feeds[feed].append({ + "title": meta_title[0], + "link": link, + "description": "", + "pubdate": pubdate, + }) + # Write the fully templated page print("Writing ", dest) dest.write_text(str(page)) + for feed, items in feeds.items(): + fg = FeedGenerator() + fg.title(f"alogoulogoi /{feed}/") + fg.link(href=f"https://www.alogoulogoi.com/{feed}/feed.xml") + fg.description("Blog posts from alogoulogoi") + fg.language("en-us") + fg.lastBuildDate(build_date) + for item in sorted(items, key=lambda i: i["pubdate"]): + entry = fg.add_entry() + entry.title(item["title"]) + entry.link(href=item["link"]) + entry.description(item["description"]) + entry.published(item["pubdate"]) + rss_path = (out / feed / "feed.xml") + os.makedirs(rss_path.parent, exist_ok=True) + rss_path.write_bytes(fg.rss_str(pretty=True)) + print("Processed", count, "files")