Move the constraint analysis code to the lexicon module
This commit is contained in:
parent
fc9c344a1d
commit
b6d7a4a54e
|
@ -6,6 +6,8 @@ from .gameloop import (
|
|||
get_player_characters,
|
||||
get_player_drafts,
|
||||
get_draft,
|
||||
title_constraint_analysis,
|
||||
content_constraint_analysis,
|
||||
attempt_publish)
|
||||
from .setup import (
|
||||
player_can_join_lexicon,
|
||||
|
@ -19,6 +21,8 @@ __all__ = [member.__name__ for member in [
|
|||
get_player_characters,
|
||||
get_player_drafts,
|
||||
get_draft,
|
||||
title_constraint_analysis,
|
||||
content_constraint_analysis,
|
||||
attempt_publish,
|
||||
player_can_join_lexicon,
|
||||
add_player_to_lexicon,
|
||||
|
|
|
@ -2,16 +2,17 @@
|
|||
Submodule of functions for managing lexicon games during the core game
|
||||
loop of writing and publishing articles.
|
||||
"""
|
||||
from typing import Iterable, Any, List, Optional
|
||||
from typing import Iterable, Any, List, Optional, Tuple
|
||||
|
||||
from amanuensis.config import ReadOnlyOrderedDict
|
||||
from amanuensis.models import LexiconModel
|
||||
from amanuensis.models import LexiconModel, UserModel
|
||||
from amanuensis.parser import (
|
||||
parse_raw_markdown,
|
||||
GetCitations,
|
||||
HtmlRenderer,
|
||||
titlesort,
|
||||
filesafe_title)
|
||||
filesafe_title,
|
||||
ConstraintAnalysis)
|
||||
|
||||
|
||||
def get_player_characters(
|
||||
|
@ -60,6 +61,94 @@ def get_draft(lexicon: LexiconModel, aid: str) -> Optional[ReadOnlyOrderedDict]:
|
|||
return article
|
||||
|
||||
|
||||
def title_constraint_analysis(
|
||||
lexicon: LexiconModel,
|
||||
player: UserModel,
|
||||
title: str) -> Tuple[List[str], List[str]]:
|
||||
"""
|
||||
Checks article constraints for the lexicon against a proposed
|
||||
draft title.
|
||||
"""
|
||||
warnings = []
|
||||
errors = []
|
||||
with lexicon.ctx.read('info') as info:
|
||||
# No title
|
||||
if not title:
|
||||
errors.append('Missing title')
|
||||
return warnings, errors # No point in further analysis
|
||||
# The article does not sort under the player's assigned index
|
||||
pass
|
||||
# The article's title is new, but its index is full
|
||||
pass
|
||||
# The article's title is a phantom, but the player has cited it before
|
||||
info
|
||||
# Another player is writing an article with this title
|
||||
pass # warning
|
||||
# Another player has an approved article with this title
|
||||
pass
|
||||
# An article with this title was already written and addendums are
|
||||
# disabled
|
||||
pass
|
||||
# An article with this title was already written and this player has
|
||||
# reached the maximum number of addendum articles
|
||||
pass
|
||||
# The article's title matches a character's name
|
||||
pass # warning
|
||||
|
||||
return warnings, errors
|
||||
|
||||
|
||||
def content_constraint_analysis(
|
||||
lexicon: LexiconModel,
|
||||
player: UserModel,
|
||||
cid: str,
|
||||
parsed) -> Tuple[List[str], List[str], List[str]]:
|
||||
"""
|
||||
Checks article constraints for the lexicon against the content of
|
||||
a draft
|
||||
"""
|
||||
infos = []
|
||||
warnings = []
|
||||
errors = []
|
||||
character = lexicon.cfg.character.get(cid)
|
||||
content_analysis: ConstraintAnalysis = (
|
||||
parsed.render(ConstraintAnalysis(lexicon)))
|
||||
with lexicon.ctx.read('info') as info:
|
||||
infos.append(f'Word count: {content_analysis.word_count}')
|
||||
# Self-citation when forbidden
|
||||
pass
|
||||
# A new citation matches a character's name
|
||||
pass # warning
|
||||
# Not enough extant citations
|
||||
# Too many extant citations
|
||||
# Not enough phantom citations
|
||||
# Too many phantom citations
|
||||
# Not enough total citations
|
||||
# Too many total citations
|
||||
# Not enough characters' articles cited
|
||||
# Too many characters' articles cited
|
||||
# Exceeded hard word limit
|
||||
if (lexicon.cfg.article.word_limit.hard is not None
|
||||
and content_analysis.word_count > lexicon.cfg.article.word_limit.hard):
|
||||
errors.append('Exceeded maximum word count '
|
||||
f'({lexicon.cfg.article.word_limit.hard})')
|
||||
# Exceeded soft word limit
|
||||
elif (lexicon.cfg.article.word_limit.soft is not None
|
||||
and content_analysis.word_count > lexicon.cfg.article.word_limit.soft):
|
||||
warnings.append('Exceeded suggested maximum word count '
|
||||
f'({lexicon.cfg.article.word_limit.soft})')
|
||||
# Missing signature
|
||||
if content_analysis.signatures < 1:
|
||||
warnings.append('Missing signature')
|
||||
# Multiple signatures
|
||||
if content_analysis.signatures > 1:
|
||||
warnings.append('Multiple signatures')
|
||||
# Signature altered from default
|
||||
pass # warning
|
||||
|
||||
return infos, warnings, errors
|
||||
|
||||
|
||||
def attempt_publish(lexicon: LexiconModel) -> None:
|
||||
"""
|
||||
If the lexicon's publsh policy allows the current set of approved
|
||||
|
|
|
@ -4,7 +4,7 @@ for verification against constraints.
|
|||
"""
|
||||
|
||||
import re
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
|
||||
from amanuensis.models import LexiconModel
|
||||
|
||||
|
@ -26,29 +26,20 @@ class GetCitations(RenderableVisitor):
|
|||
|
||||
class ConstraintAnalysis(RenderableVisitor):
|
||||
def __init__(self, lexicon: LexiconModel):
|
||||
self.info: Iterable[str] = []
|
||||
self.warning: Iterable[str] = []
|
||||
self.error: Iterable[str] = []
|
||||
self.info: List[str] = []
|
||||
self.warning: List[str] = []
|
||||
self.error: List[str] = []
|
||||
|
||||
self.word_count = 0
|
||||
self.citation_count = 0
|
||||
self.has_signature = False
|
||||
|
||||
def ParsedArticle(self, span):
|
||||
# Execute over the article tree
|
||||
span.recurse(self)
|
||||
# Perform analysis
|
||||
self.info.append(f'Word count: {self.word_count}')
|
||||
if not self.has_signature:
|
||||
self.warning.append('Missing signature')
|
||||
return self
|
||||
self.signatures = 0
|
||||
|
||||
def TextSpan(self, span):
|
||||
self.word_count += len(re.split(r'\s+', span.innertext.strip()))
|
||||
return self
|
||||
|
||||
def SignatureParagraph(self, span):
|
||||
self.has_signature = True
|
||||
self.signatures += 1
|
||||
span.recurse(self)
|
||||
return self
|
||||
|
||||
|
|
|
@ -83,7 +83,7 @@ function updatePreview(response) {
|
|||
for (var i = 0; i < response.error.length; i++) {
|
||||
error += "<span class=\"message-error\">" + response.error[i] + "</span><br>";
|
||||
}
|
||||
var control = info + "<br>" + warning + "<br>" + error;
|
||||
var control = info + warning + error;
|
||||
document.getElementById("preview-control").innerHTML = control;
|
||||
}
|
||||
|
||||
|
|
|
@ -11,13 +11,14 @@ from flask_login import current_user
|
|||
from amanuensis.lexicon import (
|
||||
get_player_characters,
|
||||
get_player_drafts,
|
||||
get_draft)
|
||||
get_draft,
|
||||
title_constraint_analysis,
|
||||
content_constraint_analysis)
|
||||
from amanuensis.models import LexiconModel
|
||||
from amanuensis.parser import (
|
||||
normalize_title,
|
||||
parse_raw_markdown,
|
||||
PreviewHtmlRenderer,
|
||||
ConstraintAnalysis)
|
||||
PreviewHtmlRenderer)
|
||||
|
||||
|
||||
def load_editor(lexicon: LexiconModel, aid: str):
|
||||
|
@ -109,7 +110,10 @@ def update_draft(lexicon: LexiconModel, article_json):
|
|||
# HTML parsing
|
||||
preview = parsed.render(PreviewHtmlRenderer(lexicon))
|
||||
# Constraint analysis
|
||||
analysis = parsed.render(ConstraintAnalysis(lexicon))
|
||||
title_warnings, title_errors = title_constraint_analysis(
|
||||
lexicon, current_user, title)
|
||||
content_infos, content_warnings, content_errors = content_constraint_analysis(
|
||||
lexicon, current_user, article.character, parsed)
|
||||
|
||||
# Article update
|
||||
filename = f'{article.character}.{aid}'
|
||||
|
@ -127,7 +131,7 @@ def update_draft(lexicon: LexiconModel, article_json):
|
|||
},
|
||||
'rendered': preview.contents,
|
||||
'citations': preview.citations,
|
||||
'info': analysis.info,
|
||||
'warning': analysis.warning,
|
||||
'error': analysis.error,
|
||||
'info': content_infos,
|
||||
'warning': title_warnings + content_warnings,
|
||||
'error': title_errors + content_errors,
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue