Add basic article publishing
This commit is contained in:
parent
49ed06fcb0
commit
99622bbb3d
|
@ -302,5 +302,7 @@ def command_publish_turn(args):
|
||||||
settings.
|
settings.
|
||||||
"""
|
"""
|
||||||
# Module imports
|
# Module imports
|
||||||
|
from amanuensis.lexicon.manage import attempt_publish
|
||||||
|
|
||||||
raise NotImplementedError() # TODO
|
# Internal call
|
||||||
|
attempt_publish(args.lexicon)
|
||||||
|
|
|
@ -59,7 +59,7 @@ class ConfigDirectoryContext():
|
||||||
fpath = os.path.join(self.path, filename)
|
fpath = os.path.join(self.path, filename)
|
||||||
if not os.path.isfile(fpath):
|
if not os.path.isfile(fpath):
|
||||||
raise MissingConfigError(fpath)
|
raise MissingConfigError(fpath)
|
||||||
os.delete(fpath)
|
os.remove(fpath)
|
||||||
|
|
||||||
def ls(self):
|
def ls(self):
|
||||||
"""Lists all files in this directory."""
|
"""Lists all files in this directory."""
|
||||||
|
@ -128,6 +128,7 @@ class LexiconConfigDirectoryContext(ConfigFileMixin, ConfigDirectoryContext):
|
||||||
super().__init__(path)
|
super().__init__(path)
|
||||||
self.draft = ConfigDirectoryContext(os.path.join(self.path, 'draft'))
|
self.draft = ConfigDirectoryContext(os.path.join(self.path, 'draft'))
|
||||||
self.src = ConfigDirectoryContext(os.path.join(self.path, 'src'))
|
self.src = ConfigDirectoryContext(os.path.join(self.path, 'src'))
|
||||||
|
self.article = ConfigDirectoryContext(os.path.join(self.path, 'article'))
|
||||||
|
|
||||||
|
|
||||||
class UserConfigDirectoryContext(ConfigFileMixin, ConfigDirectoryContext):
|
class UserConfigDirectoryContext(ConfigFileMixin, ConfigDirectoryContext):
|
||||||
|
|
|
@ -13,6 +13,7 @@ from amanuensis.config import prepend, json_rw, json_ro, logger
|
||||||
from amanuensis.config.loader import AttrOrderedDict
|
from amanuensis.config.loader import AttrOrderedDict
|
||||||
from amanuensis.errors import ArgumentError
|
from amanuensis.errors import ArgumentError
|
||||||
from amanuensis.lexicon import LexiconModel
|
from amanuensis.lexicon import LexiconModel
|
||||||
|
from amanuensis.parser import parse_raw_markdown, GetCitations, HtmlRenderer, filesafe_title
|
||||||
from amanuensis.resources import get_stream
|
from amanuensis.resources import get_stream
|
||||||
|
|
||||||
def valid_name(name):
|
def valid_name(name):
|
||||||
|
@ -261,3 +262,62 @@ def delete_character(lex, charname):
|
||||||
# Remove character from character list
|
# Remove character from character list
|
||||||
with json_rw(lex.config_path) as cfg:
|
with json_rw(lex.config_path) as cfg:
|
||||||
del cfg.character[char.cid]
|
del cfg.character[char.cid]
|
||||||
|
|
||||||
|
|
||||||
|
def attempt_publish(lexicon):
|
||||||
|
# Need to do checks
|
||||||
|
|
||||||
|
# Get the articles to publish
|
||||||
|
draft_ctx = lexicon.ctx.draft
|
||||||
|
drafts = draft_ctx.ls()
|
||||||
|
turn = []
|
||||||
|
for draft_fn in drafts:
|
||||||
|
with draft_ctx.read(draft_fn) as draft:
|
||||||
|
if draft.status.approved:
|
||||||
|
draft_fn = f'{draft.character}.{draft.aid}'
|
||||||
|
turn.append(draft_fn)
|
||||||
|
|
||||||
|
return publish_turn(lexicon, turn)
|
||||||
|
|
||||||
|
def publish_turn(lexicon, drafts):
|
||||||
|
# Move the drafts to src
|
||||||
|
draft_ctx = lexicon.ctx.draft
|
||||||
|
src_ctx = lexicon.ctx.src
|
||||||
|
for filename in drafts:
|
||||||
|
with draft_ctx.read(filename) as source:
|
||||||
|
with src_ctx.new(filename) as dest:
|
||||||
|
dest.update(source)
|
||||||
|
draft_ctx.delete(filename)
|
||||||
|
|
||||||
|
# Rebuilding the interlink data begins with loading all articles
|
||||||
|
article_model_by_title = {}
|
||||||
|
article_renderable_by_title = {}
|
||||||
|
for filename in src_ctx.ls():
|
||||||
|
with src_ctx.read(filename) as article:
|
||||||
|
article_model_by_title[article.title] = article
|
||||||
|
article_renderable_by_title[article.title] = parse_raw_markdown(article.contents)
|
||||||
|
|
||||||
|
# Determine the full list of articles by checking for phantom citations
|
||||||
|
written_titles = list(article_model_by_title.keys())
|
||||||
|
phantom_titles = []
|
||||||
|
for article in article_renderable_by_title.values():
|
||||||
|
citations = article.render(GetCitations())
|
||||||
|
for target in citations:
|
||||||
|
if target not in written_titles and target not in phantom_titles:
|
||||||
|
phantom_titles.append(target)
|
||||||
|
|
||||||
|
# Render article HTML and save to cache
|
||||||
|
rendered_html_by_title = {}
|
||||||
|
for title, article in article_renderable_by_title.items():
|
||||||
|
html = article.render(HtmlRenderer(written_titles))
|
||||||
|
filename = filesafe_title(title)
|
||||||
|
with lexicon.ctx.article.new(filename) as f:
|
||||||
|
f['title'] = title
|
||||||
|
f['html'] = html
|
||||||
|
|
||||||
|
for title in phantom_titles:
|
||||||
|
html = ""
|
||||||
|
filename = filesafe_title(title)
|
||||||
|
with lexicon.ctx.article.new(filename) as f:
|
||||||
|
f['title'] = title
|
||||||
|
f['html'] = html
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
Module encapsulating all markdown parsing functionality
|
Module encapsulating all markdown parsing functionality
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from amanuensis.parser.analyze import FeatureCounter
|
from amanuensis.parser.analyze import FeatureCounter, GetCitations
|
||||||
|
from amanuensis.parser.helpers import titlesort, filesafe_title
|
||||||
from amanuensis.parser.tokenizer import parse_raw_markdown
|
from amanuensis.parser.tokenizer import parse_raw_markdown
|
||||||
from amanuensis.parser.render import PreviewHtmlRenderer
|
from amanuensis.parser.render import PreviewHtmlRenderer, HtmlRenderer
|
|
@ -33,8 +33,11 @@ class RenderableVisitor():
|
||||||
class GetCitations(RenderableVisitor):
|
class GetCitations(RenderableVisitor):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.citations = []
|
self.citations = []
|
||||||
|
def ParsedArticle(self, span):
|
||||||
|
span.recurse(self)
|
||||||
|
return self.citations
|
||||||
def CitationSpan(self, span):
|
def CitationSpan(self, span):
|
||||||
self.citations.append(self.cite_target)
|
self.citations.append(span.cite_target)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
class FeatureCounter(RenderableVisitor):
|
class FeatureCounter(RenderableVisitor):
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
import re
|
||||||
|
import urllib
|
||||||
|
|
||||||
def normalize_title(title):
|
def normalize_title(title):
|
||||||
"""
|
"""
|
||||||
Normalizes strings as titles:
|
Normalizes strings as titles:
|
||||||
|
@ -6,7 +9,7 @@ def normalize_title(title):
|
||||||
- Capitalizes the first word
|
- Capitalizes the first word
|
||||||
"""
|
"""
|
||||||
cleaned = re.sub(r'\s+', " ", title.strip())
|
cleaned = re.sub(r'\s+', " ", title.strip())
|
||||||
return cleaned[0:1].upper() + cleaned[1:]
|
return cleaned[:1].capitalize() + cleaned[1:]
|
||||||
|
|
||||||
def titlesort(title):
|
def titlesort(title):
|
||||||
"""
|
"""
|
||||||
|
@ -20,4 +23,15 @@ def titlesort(title):
|
||||||
elif lower.startswith("a "):
|
elif lower.startswith("a "):
|
||||||
return lower[2:]
|
return lower[2:]
|
||||||
else:
|
else:
|
||||||
return lower
|
return lower
|
||||||
|
|
||||||
|
def filesafe_title(title):
|
||||||
|
"""
|
||||||
|
Makes an article title filename-safe.
|
||||||
|
"""
|
||||||
|
s = re.sub(r"\s+", '_', title) # Replace whitespace with _
|
||||||
|
s = re.sub(r"~", '-', s) # parse.quote doesn't catch ~
|
||||||
|
s = urllib.parse.quote(s) # Encode all other characters
|
||||||
|
s = re.sub(r"%", "", s) # Strip encoding %s
|
||||||
|
s = s[:64] # Limit to 64 characters
|
||||||
|
return s
|
|
@ -4,6 +4,47 @@ readable formats.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class HtmlRenderer():
|
||||||
|
"""
|
||||||
|
Renders an article token tree into published article HTML.
|
||||||
|
"""
|
||||||
|
def __init__(self, written_articles):
|
||||||
|
self.written_articles = written_articles
|
||||||
|
|
||||||
|
def TextSpan(self, span):
|
||||||
|
return span.innertext
|
||||||
|
|
||||||
|
def LineBreak(self, span):
|
||||||
|
return '<br>'
|
||||||
|
|
||||||
|
def ParsedArticle(self, span):
|
||||||
|
return '\n'.join(span.recurse(self))
|
||||||
|
|
||||||
|
def BodyParagraph(self, span):
|
||||||
|
return f'<p>{"".join(span.recurse(self))}</p>'
|
||||||
|
|
||||||
|
def SignatureParagraph(self, span):
|
||||||
|
return (
|
||||||
|
'<hr><span class="signature"><p>'
|
||||||
|
f'{"".join(span.recurse(self))}'
|
||||||
|
'</p></span>'
|
||||||
|
)
|
||||||
|
|
||||||
|
def BoldSpan(self, span):
|
||||||
|
return f'<b>{"".join(span.recurse(self))}</b>'
|
||||||
|
|
||||||
|
def ItalicSpan(self, span):
|
||||||
|
return f'<i>{"".join(span.recurse(self))}</i>'
|
||||||
|
|
||||||
|
def CitationSpan(self, span):
|
||||||
|
if span.cite_target in self.written_articles:
|
||||||
|
link_class = ''
|
||||||
|
else:
|
||||||
|
link_class = ' class="phantom"'
|
||||||
|
return f'<a href="#"{link_class}>{"".join(span.recurse(self))}</a>'
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class PreviewHtmlRenderer():
|
class PreviewHtmlRenderer():
|
||||||
def __init__(self, article_map):
|
def __init__(self, article_map):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -31,12 +31,13 @@ def register_custom_filters(app):
|
||||||
def lexicon_param(route):
|
def lexicon_param(route):
|
||||||
"""Wrapper for loading a route's lexicon"""
|
"""Wrapper for loading a route's lexicon"""
|
||||||
@wraps(route)
|
@wraps(route)
|
||||||
def with_lexicon(name):
|
def with_lexicon(**kwargs):
|
||||||
|
name = kwargs.get('name')
|
||||||
g.lexicon = LexiconModel.by(name=name)
|
g.lexicon = LexiconModel.by(name=name)
|
||||||
if g.lexicon is None:
|
if g.lexicon is None:
|
||||||
flash("Couldn't find a lexicon with the name '{}'".format(name))
|
flash("Couldn't find a lexicon with the name '{}'".format(name))
|
||||||
return redirect(url_for("home.home"))
|
return redirect(url_for("home.home"))
|
||||||
return route(name)
|
return route(**kwargs)
|
||||||
return with_lexicon
|
return with_lexicon
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -8,8 +8,8 @@ from flask_login import login_required, current_user
|
||||||
from amanuensis.config import root
|
from amanuensis.config import root
|
||||||
from amanuensis.config.loader import ReadOnlyOrderedDict
|
from amanuensis.config.loader import ReadOnlyOrderedDict
|
||||||
from amanuensis.errors import MissingConfigError
|
from amanuensis.errors import MissingConfigError
|
||||||
from amanuensis.lexicon.manage import valid_add, add_player, add_character
|
from amanuensis.lexicon.manage import valid_add, add_player, add_character, attempt_publish
|
||||||
from amanuensis.parser import parse_raw_markdown, PreviewHtmlRenderer, FeatureCounter
|
from amanuensis.parser import parse_raw_markdown, PreviewHtmlRenderer, FeatureCounter, filesafe_title
|
||||||
from amanuensis.server.forms import (
|
from amanuensis.server.forms import (
|
||||||
LexiconConfigForm, LexiconJoinForm,LexiconCharacterForm, LexiconReviewForm)
|
LexiconConfigForm, LexiconJoinForm,LexiconCharacterForm, LexiconReviewForm)
|
||||||
from amanuensis.server.helpers import (
|
from amanuensis.server.helpers import (
|
||||||
|
@ -54,7 +54,24 @@ def get_bp():
|
||||||
@lexicon_param
|
@lexicon_param
|
||||||
@player_required_if_not_public
|
@player_required_if_not_public
|
||||||
def contents(name):
|
def contents(name):
|
||||||
return render_template('lexicon/contents.html')
|
articles = []
|
||||||
|
filenames = g.lexicon.ctx.article.ls()
|
||||||
|
for filename in filenames:
|
||||||
|
with g.lexicon.ctx.article.read(filename) as a:
|
||||||
|
articles.append({
|
||||||
|
'title': a.title,
|
||||||
|
'link': url_for('lexicon.article', name=name, title=filesafe_title(a.title)),
|
||||||
|
})
|
||||||
|
return render_template('lexicon/contents.html', articles=articles)
|
||||||
|
|
||||||
|
@bp.route('/article/<title>')
|
||||||
|
@lexicon_param
|
||||||
|
@player_required_if_not_public
|
||||||
|
def article(name, title):
|
||||||
|
with g.lexicon.ctx.article.read(title) as a:
|
||||||
|
article = dict(a)
|
||||||
|
article['html'] = Markup(a['html'])
|
||||||
|
return render_template('lexicon/article.html', article=article)
|
||||||
|
|
||||||
@bp.route('/rules/', methods=['GET'])
|
@bp.route('/rules/', methods=['GET'])
|
||||||
@lexicon_param
|
@lexicon_param
|
||||||
|
@ -177,6 +194,8 @@ def get_bp():
|
||||||
draft.status.ready = True
|
draft.status.ready = True
|
||||||
draft.status.approved = True
|
draft.status.approved = True
|
||||||
g.lexicon.add_log(f"Article '{draft.title}' approved ({draft.aid})")
|
g.lexicon.add_log(f"Article '{draft.title}' approved ({draft.aid})")
|
||||||
|
if g.lexicon.publish.asap:
|
||||||
|
attempt_publish(g.lexicon)
|
||||||
else:
|
else:
|
||||||
draft.status.ready = False
|
draft.status.ready = False
|
||||||
draft.status.approved = False
|
draft.status.approved = False
|
||||||
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
{% extends "lexicon/lexicon.html" %}
|
||||||
|
{% block title %}{{ article.title }} | {{ lexicon_title }}{% endblock %}
|
||||||
|
|
||||||
|
{% block main %}
|
||||||
|
|
||||||
|
{% for message in get_flashed_messages() %}
|
||||||
|
<span style="color:#ff0000">{{ message }}</span><br>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
<h1>{{ article.title }}</h1>
|
||||||
|
|
||||||
|
{{ article.html }}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
|
{% set template_content_blocks = [self.main()] %}
|
|
@ -8,7 +8,13 @@
|
||||||
<span style="color:#ff0000">{{ message }}</span><br>
|
<span style="color:#ff0000">{{ message }}</span><br>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|
||||||
Placeholder text
|
{% if articles %}
|
||||||
|
<ul>
|
||||||
|
{% for article in articles %}
|
||||||
|
<li><a href="{{ article.link }}">{{ article.title }}</a></li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
{% set template_content_blocks = [self.main()] %}
|
{% set template_content_blocks = [self.main()] %}
|
Loading…
Reference in New Issue