1
0
Fork 0

Allow txt content and deny robots

This commit is contained in:
Tim Van Baak 2023-09-11 17:48:03 -07:00
parent 7755f2be31
commit c652657ea7
2 changed files with 26 additions and 20 deletions

View File

@ -40,7 +40,7 @@ def main():
count += 1 count += 1
# Future-proofing # Future-proofing
if not filename.endswith(".html") and not filename.endswith(".md"): if filename.rsplit(".")[-1] not in ("html", "md", "txt"):
raise Exception("Support for this filetype is not yet supported:", filename) raise Exception("Support for this filetype is not yet supported:", filename)
path = src / dirpath / filename path = src / dirpath / filename
@ -58,6 +58,7 @@ def main():
content = md.convert(content) content = md.convert(content)
meta = md.Meta meta = md.Meta
if dest.name.endswith("html"):
# Inject content into the template # Inject content into the template
page_content = bs4.BeautifulSoup(content, features="html.parser") page_content = bs4.BeautifulSoup(content, features="html.parser")
page = copy.copy(template) page = copy.copy(template)
@ -79,9 +80,12 @@ def main():
page.title.string = title page.title.string = title
page.header.h1.string = title page.header.h1.string = title
# The fully templated page is the new content
content = str(page)
# Write the destination file # Write the destination file
print("Writing ", dest) print("Writing ", dest)
dest.write_text(str(page)) dest.write_text(content)
print("Processed", count, "files") print("Processed", count, "files")

2
src/robots.txt Normal file
View File

@ -0,0 +1,2 @@
User-agent: *
Disallow: /