Move publish functions to lexicon.gameloop
This commit is contained in:
parent
cae12b960d
commit
7d633f5201
|
@ -8,6 +8,7 @@
|
||||||
# run after commandline parsing has already occurred.
|
# run after commandline parsing has already occurred.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
|
||||||
def server_commands(commands={}):
|
def server_commands(commands={}):
|
||||||
if commands:
|
if commands:
|
||||||
return commands
|
return commands
|
||||||
|
@ -18,6 +19,7 @@ def server_commands(commands={}):
|
||||||
commands[name] = func
|
commands[name] = func
|
||||||
return commands
|
return commands
|
||||||
|
|
||||||
|
|
||||||
def lexicon_commands(commands={}):
|
def lexicon_commands(commands={}):
|
||||||
if commands:
|
if commands:
|
||||||
return commands
|
return commands
|
||||||
|
@ -28,6 +30,7 @@ def lexicon_commands(commands={}):
|
||||||
commands["lexicon-" + name] = func
|
commands["lexicon-" + name] = func
|
||||||
return commands
|
return commands
|
||||||
|
|
||||||
|
|
||||||
def user_commands(commands={}):
|
def user_commands(commands={}):
|
||||||
if commands:
|
if commands:
|
||||||
return commands
|
return commands
|
||||||
|
@ -38,12 +41,15 @@ def user_commands(commands={}):
|
||||||
commands["user-" + name] = func
|
commands["user-" + name] = func
|
||||||
return commands
|
return commands
|
||||||
|
|
||||||
|
|
||||||
def get_commands():
|
def get_commands():
|
||||||
return {**server_commands(), **lexicon_commands(), **user_commands()}
|
return {**server_commands(), **lexicon_commands(), **user_commands()}
|
||||||
|
|
||||||
|
|
||||||
def cmd_desc(func):
|
def cmd_desc(func):
|
||||||
return ((func.__doc__ or "").strip() or '\n').splitlines()[0]
|
return ((func.__doc__ or "").strip() or '\n').splitlines()[0]
|
||||||
|
|
||||||
|
|
||||||
def describe_commands():
|
def describe_commands():
|
||||||
longest = max(map(len, server_commands().keys()))
|
longest = max(map(len, server_commands().keys()))
|
||||||
server_desc = "General commands:\n{}\n".format("\n".join([
|
server_desc = "General commands:\n{}\n".format("\n".join([
|
||||||
|
|
|
@ -294,13 +294,14 @@ def command_char_list(args):
|
||||||
# Procedural commands
|
# Procedural commands
|
||||||
#
|
#
|
||||||
|
|
||||||
|
|
||||||
@alias('lpt')
|
@alias('lpt')
|
||||||
@requires_lexicon
|
@requires_lexicon
|
||||||
@add_argument(
|
@add_argument("--as-deadline",
|
||||||
"--as-deadline", action="store_true",
|
action="store_true",
|
||||||
help="Notifies players of the publish result")
|
help="Notifies players of the publish result")
|
||||||
@add_argument(
|
@add_argument("--force",
|
||||||
"--force", action="store_true",
|
action="store_true",
|
||||||
help="Publish all approved articles, regardless of other checks")
|
help="Publish all approved articles, regardless of other checks")
|
||||||
def command_publish_turn(args):
|
def command_publish_turn(args):
|
||||||
"""
|
"""
|
||||||
|
@ -313,7 +314,7 @@ def command_publish_turn(args):
|
||||||
settings.
|
settings.
|
||||||
"""
|
"""
|
||||||
# Module imports
|
# Module imports
|
||||||
from amanuensis.lexicon.manage import attempt_publish
|
from amanuensis.lexicon import attempt_publish
|
||||||
|
|
||||||
# Internal call
|
# Internal call
|
||||||
attempt_publish(args.lexicon)
|
attempt_publish(args.lexicon)
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
from amanuensis.lexicon.admin import valid_name, create_lexicon
|
from amanuensis.lexicon.admin import valid_name, create_lexicon
|
||||||
|
from amanuensis.lexicon.gameloop import attempt_publish
|
||||||
from amanuensis.lexicon.setup import (
|
from amanuensis.lexicon.setup import (
|
||||||
player_can_join_lexicon,
|
player_can_join_lexicon,
|
||||||
add_player_to_lexicon,
|
add_player_to_lexicon,
|
||||||
|
@ -7,6 +8,7 @@ from amanuensis.lexicon.setup import (
|
||||||
__all__ = [member.__name__ for member in [
|
__all__ = [member.__name__ for member in [
|
||||||
valid_name,
|
valid_name,
|
||||||
create_lexicon,
|
create_lexicon,
|
||||||
|
attempt_publish,
|
||||||
player_can_join_lexicon,
|
player_can_join_lexicon,
|
||||||
add_player_to_lexicon,
|
add_player_to_lexicon,
|
||||||
create_character_in_lexicon,
|
create_character_in_lexicon,
|
||||||
|
|
|
@ -2,3 +2,119 @@
|
||||||
Submodule of functions for managing lexicon games during the core game
|
Submodule of functions for managing lexicon games during the core game
|
||||||
loop of writing and publishing articles.
|
loop of writing and publishing articles.
|
||||||
"""
|
"""
|
||||||
|
from typing import Iterable, Any
|
||||||
|
|
||||||
|
from amanuensis.models import LexiconModel
|
||||||
|
from amanuensis.parser import (
|
||||||
|
parse_raw_markdown,
|
||||||
|
GetCitations,
|
||||||
|
HtmlRenderer,
|
||||||
|
titlesort,
|
||||||
|
filesafe_title)
|
||||||
|
|
||||||
|
|
||||||
|
def attempt_publish(lexicon: LexiconModel) -> None:
|
||||||
|
"""
|
||||||
|
If the lexicon's publsh policy allows the current set of approved
|
||||||
|
articles to be published, publish them and rebuild all pages.
|
||||||
|
"""
|
||||||
|
# TODO Check against lexicon publish policy
|
||||||
|
|
||||||
|
# Get the approved drafts to publish
|
||||||
|
draft_ctx = lexicon.ctx.draft
|
||||||
|
to_publish = []
|
||||||
|
for draft_fn in draft_ctx.ls():
|
||||||
|
with draft_ctx.read(draft_fn) as draft:
|
||||||
|
if draft.status.approved:
|
||||||
|
to_publish.append(draft_fn)
|
||||||
|
|
||||||
|
# Publish new articles
|
||||||
|
publish_drafts(lexicon, to_publish)
|
||||||
|
|
||||||
|
# Rebuild all pages
|
||||||
|
rebuild_pages(lexicon)
|
||||||
|
|
||||||
|
|
||||||
|
def publish_drafts(lexicon: LexiconModel, filenames: Iterable[str]) -> None:
|
||||||
|
"""
|
||||||
|
Moves the given list of drafts to the article source directory
|
||||||
|
"""
|
||||||
|
# Move the drafts to src
|
||||||
|
draft_ctx = lexicon.ctx.draft
|
||||||
|
src_ctx = lexicon.ctx.src
|
||||||
|
for filename in filenames:
|
||||||
|
with draft_ctx.read(filename) as source:
|
||||||
|
with src_ctx.edit(filename, create=True) as dest:
|
||||||
|
dest.update(source)
|
||||||
|
draft_ctx.delete(filename)
|
||||||
|
|
||||||
|
|
||||||
|
def rebuild_pages(lexicon: LexiconModel) -> None:
|
||||||
|
"""
|
||||||
|
Rebuilds all cached html
|
||||||
|
"""
|
||||||
|
src_ctx = lexicon.ctx.src
|
||||||
|
article: Any = None # typing workaround
|
||||||
|
|
||||||
|
# Load all articles in the source directory and rebuild their renderable trees
|
||||||
|
article_model_by_title = {}
|
||||||
|
article_renderable_by_title = {}
|
||||||
|
for filename in src_ctx.ls():
|
||||||
|
with src_ctx.read(filename) as article:
|
||||||
|
article_model_by_title[article.title] = article
|
||||||
|
article_renderable_by_title[article.title] = (
|
||||||
|
parse_raw_markdown(article.contents))
|
||||||
|
|
||||||
|
# Get all citations
|
||||||
|
citations_by_title = {}
|
||||||
|
for title, article in article_renderable_by_title.items():
|
||||||
|
citations_by_title[title] = sorted(
|
||||||
|
set(article.render(GetCitations())), key=titlesort)
|
||||||
|
|
||||||
|
# Get the written and phantom lists from the citation map
|
||||||
|
written_titles = list(citations_by_title.keys())
|
||||||
|
phantom_titles = []
|
||||||
|
for citations in citations_by_title.values():
|
||||||
|
for title in citations:
|
||||||
|
if title not in written_titles and title not in phantom_titles:
|
||||||
|
phantom_titles.append(title)
|
||||||
|
|
||||||
|
# Build the citation map and save it to the info cache
|
||||||
|
with lexicon.ctx.edit('info', create=True) as info:
|
||||||
|
for title in info.keys():
|
||||||
|
if title not in written_titles and title not in phantom_titles:
|
||||||
|
del info[title]
|
||||||
|
for title in written_titles:
|
||||||
|
info[title] = {
|
||||||
|
'citations': citations_by_title[title],
|
||||||
|
'character': article_model_by_title[title].character
|
||||||
|
}
|
||||||
|
for title in phantom_titles:
|
||||||
|
info[title] = {
|
||||||
|
'citations': [],
|
||||||
|
'character': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Render article HTML and save to article cache
|
||||||
|
for title, article in article_renderable_by_title.items():
|
||||||
|
html = article.render(HtmlRenderer(lexicon.cfg.name, written_titles))
|
||||||
|
filename = filesafe_title(title)
|
||||||
|
with lexicon.ctx.article.edit(filename, create=True) as f:
|
||||||
|
f['title'] = title
|
||||||
|
f['html'] = html
|
||||||
|
f['cites'] = citations_by_title[title]
|
||||||
|
f['citedby'] = [
|
||||||
|
citer for citer, citations
|
||||||
|
in citations_by_title.items()
|
||||||
|
if title in citations]
|
||||||
|
|
||||||
|
for title in phantom_titles:
|
||||||
|
filename = filesafe_title(title)
|
||||||
|
with lexicon.ctx.article.edit(filename, create=True) as f:
|
||||||
|
f['title'] = title
|
||||||
|
f['html'] = ""
|
||||||
|
f['cites'] = []
|
||||||
|
f['citedby'] = [
|
||||||
|
citer for citer, citations
|
||||||
|
in citations_by_title.items()
|
||||||
|
if title in citations]
|
||||||
|
|
|
@ -112,88 +112,3 @@ def delete_character(lex, charname):
|
||||||
with json_rw(lex.config_path) as cfg:
|
with json_rw(lex.config_path) as cfg:
|
||||||
del cfg.character[char.cid]
|
del cfg.character[char.cid]
|
||||||
|
|
||||||
|
|
||||||
def attempt_publish(lexicon):
|
|
||||||
# Need to do checks
|
|
||||||
|
|
||||||
# Get the articles to publish
|
|
||||||
draft_ctx = lexicon.ctx.draft
|
|
||||||
drafts = draft_ctx.ls()
|
|
||||||
turn = []
|
|
||||||
for draft_fn in drafts:
|
|
||||||
with draft_ctx.read(draft_fn) as draft:
|
|
||||||
if draft.status.approved:
|
|
||||||
draft_fn = f'{draft.character}.{draft.aid}'
|
|
||||||
turn.append(draft_fn)
|
|
||||||
|
|
||||||
return publish_turn(lexicon, turn)
|
|
||||||
|
|
||||||
def publish_turn(lexicon, drafts):
|
|
||||||
# Move the drafts to src
|
|
||||||
draft_ctx = lexicon.ctx.draft
|
|
||||||
src_ctx = lexicon.ctx.src
|
|
||||||
for filename in drafts:
|
|
||||||
with draft_ctx.read(filename) as source:
|
|
||||||
with src_ctx.edit(filename, create=True) as dest:
|
|
||||||
dest.update(source)
|
|
||||||
draft_ctx.delete(filename)
|
|
||||||
|
|
||||||
# Load all articles in the source directory and rebuild their renderable trees
|
|
||||||
article_model_by_title = {}
|
|
||||||
article_renderable_by_title = {}
|
|
||||||
for filename in src_ctx.ls():
|
|
||||||
with src_ctx.read(filename) as article:
|
|
||||||
article_model_by_title[article.title] = article
|
|
||||||
article_renderable_by_title[article.title] = parse_raw_markdown(article.contents)
|
|
||||||
|
|
||||||
# Get all citations
|
|
||||||
citations_by_title = {}
|
|
||||||
for title, article in article_renderable_by_title.items():
|
|
||||||
citations_by_title[title] = sorted(set(article.render(GetCitations())), key=titlesort)
|
|
||||||
|
|
||||||
# Get the written and phantom lists from the citation map
|
|
||||||
written_titles = list(citations_by_title.keys())
|
|
||||||
phantom_titles = []
|
|
||||||
for citations in citations_by_title.values():
|
|
||||||
for title in citations:
|
|
||||||
if title not in written_titles and title not in phantom_titles:
|
|
||||||
phantom_titles.append(title)
|
|
||||||
|
|
||||||
# Build the citation map and save it to the info cache
|
|
||||||
# TODO delete obsolete entries?
|
|
||||||
with lexicon.ctx.edit('info', create=True) as info:
|
|
||||||
for title in written_titles:
|
|
||||||
info[title] = {
|
|
||||||
'citations': citations_by_title[title],
|
|
||||||
'character': article_model_by_title[title].character
|
|
||||||
}
|
|
||||||
for title in phantom_titles:
|
|
||||||
info[title] = {
|
|
||||||
'citations': [],
|
|
||||||
'character': None,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Render article HTML and save to article cache
|
|
||||||
rendered_html_by_title = {}
|
|
||||||
for title, article in article_renderable_by_title.items():
|
|
||||||
html = article.render(HtmlRenderer(lexicon.name, written_titles))
|
|
||||||
filename = filesafe_title(title)
|
|
||||||
with lexicon.ctx.article.edit(filename, create=True) as f:
|
|
||||||
f['title'] = title
|
|
||||||
f['html'] = html
|
|
||||||
f['cites'] = citations_by_title[title]
|
|
||||||
f['citedby'] = [
|
|
||||||
citer for citer, citations
|
|
||||||
in citations_by_title.items()
|
|
||||||
if title in citations]
|
|
||||||
|
|
||||||
for title in phantom_titles:
|
|
||||||
filename = filesafe_title(title)
|
|
||||||
with lexicon.ctx.article.edit(filename, create=True) as f:
|
|
||||||
f['title'] = title
|
|
||||||
f['html'] = ""
|
|
||||||
f['cites'] = []
|
|
||||||
f['citedby'] = [
|
|
||||||
citer for citer, citations
|
|
||||||
in citations_by_title.items()
|
|
||||||
if title in citations]
|
|
||||||
|
|
Loading…
Reference in New Issue