Compare commits
No commits in common. "develop" and "master" have entirely different histories.
|
@ -0,0 +1,3 @@
|
||||||
|
graft amanuensis/resources
|
||||||
|
graft amanuensis/templates
|
||||||
|
global-exclude *.pyc
|
|
@ -0,0 +1,99 @@
|
||||||
|
# Standard library imports
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from amanuensis.cli import describe_commands, get_commands
|
||||||
|
from amanuensis.config import (
|
||||||
|
RootConfigDirectoryContext,
|
||||||
|
ENV_CONFIG_DIR,
|
||||||
|
ENV_LOG_FILE)
|
||||||
|
from amanuensis.errors import AmanuensisError
|
||||||
|
from amanuensis.log import init_logging
|
||||||
|
from amanuensis.models import ModelFactory
|
||||||
|
|
||||||
|
|
||||||
|
def process_doc(docstring):
|
||||||
|
return '\n'.join([
|
||||||
|
line.strip()
|
||||||
|
for line in (docstring or "").strip().splitlines()
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
|
def get_parser(valid_commands):
|
||||||
|
# Set up the top-level parser.
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description=describe_commands(),
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||||
|
# The config directory.
|
||||||
|
parser.add_argument("--config-dir",
|
||||||
|
dest="config_dir",
|
||||||
|
default=os.environ.get(ENV_CONFIG_DIR, "./config"),
|
||||||
|
help="The config directory for Amanuensis")
|
||||||
|
# Logging settings.
|
||||||
|
parser.add_argument("--verbose", "-v",
|
||||||
|
action="store_true",
|
||||||
|
dest="verbose",
|
||||||
|
help="Enable verbose console logging")
|
||||||
|
parser.add_argument("--log-file",
|
||||||
|
dest="log_file",
|
||||||
|
default=os.environ.get(ENV_LOG_FILE),
|
||||||
|
help="Enable verbose file logging")
|
||||||
|
parser.set_defaults(func=lambda args: parser.print_help())
|
||||||
|
subp = parser.add_subparsers(
|
||||||
|
metavar="COMMAND",
|
||||||
|
dest="command",
|
||||||
|
help="The command to execute")
|
||||||
|
|
||||||
|
# Set up command subparsers.
|
||||||
|
# command_ functions perform setup or execution depending on
|
||||||
|
# whether their argument is an ArgumentParser.
|
||||||
|
for name, func in valid_commands.items():
|
||||||
|
# Create the subparser, set the docstring as the description.
|
||||||
|
cmd = subp.add_parser(name,
|
||||||
|
description=process_doc(func.__doc__),
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||||
|
aliases=func.__dict__.get("aliases", []))
|
||||||
|
# Delegate subparser setup to the command.
|
||||||
|
func(cmd)
|
||||||
|
# Store function for later execution.
|
||||||
|
cmd.set_defaults(func=func)
|
||||||
|
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
def main(argv):
|
||||||
|
# Enumerate valid commands from the CLI module.
|
||||||
|
commands = get_commands()
|
||||||
|
|
||||||
|
# Parse args
|
||||||
|
args = get_parser(commands).parse_args(argv)
|
||||||
|
|
||||||
|
# First things first, initialize logging
|
||||||
|
init_logging(args.verbose, args.log_file)
|
||||||
|
logger = logging.getLogger('amanuensis')
|
||||||
|
|
||||||
|
# The init command initializes a config directory at --config-dir.
|
||||||
|
# All other commands assume that the config dir already exists.
|
||||||
|
if args.command and args.command != "init":
|
||||||
|
args.root = RootConfigDirectoryContext(args.config_dir)
|
||||||
|
args.model_factory = ModelFactory(args.root)
|
||||||
|
|
||||||
|
# If verbose logging, dump args namespace
|
||||||
|
if args.verbose:
|
||||||
|
logger.debug('amanuensis')
|
||||||
|
for key, val in vars(args).items():
|
||||||
|
logger.debug(f' {key}: {val}')
|
||||||
|
|
||||||
|
# Execute command.
|
||||||
|
try:
|
||||||
|
args.func(args)
|
||||||
|
except AmanuensisError as e:
|
||||||
|
logger.error('Unexpected internal {}: {}'.format(
|
||||||
|
type(e).__name__, str(e)))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main(sys.argv[1:]))
|
|
@ -1,10 +0,0 @@
|
||||||
import amanuensis.backend.article as artiq
|
|
||||||
import amanuensis.backend.character as charq
|
|
||||||
import amanuensis.backend.index as indq
|
|
||||||
import amanuensis.backend.indexrule as irq
|
|
||||||
import amanuensis.backend.lexicon as lexiq
|
|
||||||
import amanuensis.backend.membership as memq
|
|
||||||
import amanuensis.backend.post as postq
|
|
||||||
import amanuensis.backend.user as userq
|
|
||||||
|
|
||||||
__all__ = ["artiq", "charq", "indq", "irq", "lexiq", "memq", "postq", "userq"]
|
|
|
@ -1,44 +0,0 @@
|
||||||
"""
|
|
||||||
Article query interface
|
|
||||||
"""
|
|
||||||
|
|
||||||
from sqlalchemy import select
|
|
||||||
|
|
||||||
from amanuensis.db import *
|
|
||||||
from amanuensis.errors import ArgumentError, BackendArgumentTypeError
|
|
||||||
|
|
||||||
|
|
||||||
def create(
|
|
||||||
db: DbContext,
|
|
||||||
lexicon_id: int,
|
|
||||||
character_id: int,
|
|
||||||
ersatz: bool = False,
|
|
||||||
) -> Article:
|
|
||||||
"""
|
|
||||||
Create a new article in a lexicon.
|
|
||||||
"""
|
|
||||||
# Verify argument types are correct
|
|
||||||
if not isinstance(lexicon_id, int):
|
|
||||||
raise BackendArgumentTypeError(int, lexicon_id=lexicon_id)
|
|
||||||
if character_id is not None and not isinstance(character_id, int):
|
|
||||||
raise BackendArgumentTypeError(int, character_id=character_id)
|
|
||||||
|
|
||||||
# Check that the character belongs to the lexicon
|
|
||||||
character: Character = db(
|
|
||||||
select(Character).where(Character.id == character_id)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
if not character:
|
|
||||||
raise ArgumentError("Character does not exist")
|
|
||||||
if character.lexicon.id != lexicon_id:
|
|
||||||
raise ArgumentError("Character belongs to the wrong lexicon")
|
|
||||||
signature = character.signature if not ersatz else "~Ersatz Scrivener"
|
|
||||||
|
|
||||||
new_article = Article(
|
|
||||||
lexicon_id=lexicon_id,
|
|
||||||
character_id=character_id,
|
|
||||||
title="Article title",
|
|
||||||
body=f"\n\n{signature}",
|
|
||||||
)
|
|
||||||
db.session.add(new_article)
|
|
||||||
db.session.commit()
|
|
||||||
return new_article
|
|
|
@ -1,83 +0,0 @@
|
||||||
"""
|
|
||||||
Character query interface
|
|
||||||
"""
|
|
||||||
|
|
||||||
from typing import Optional, Sequence
|
|
||||||
from uuid import UUID
|
|
||||||
|
|
||||||
from sqlalchemy import select, func
|
|
||||||
|
|
||||||
from amanuensis.db import *
|
|
||||||
from amanuensis.errors import ArgumentError, BackendArgumentTypeError
|
|
||||||
|
|
||||||
|
|
||||||
def create(
|
|
||||||
db: DbContext,
|
|
||||||
lexicon_id: int,
|
|
||||||
user_id: int,
|
|
||||||
name: str,
|
|
||||||
signature: Optional[str],
|
|
||||||
) -> Character:
|
|
||||||
"""
|
|
||||||
Create a new character for a user.
|
|
||||||
"""
|
|
||||||
# Verify argument types are correct
|
|
||||||
if not isinstance(lexicon_id, int):
|
|
||||||
raise BackendArgumentTypeError(int, lexicon_id=lexicon_id)
|
|
||||||
if not isinstance(user_id, int):
|
|
||||||
raise BackendArgumentTypeError(int, user_id=user_id)
|
|
||||||
if not isinstance(name, str):
|
|
||||||
raise BackendArgumentTypeError(str, name=name)
|
|
||||||
if signature is not None and not isinstance(signature, str):
|
|
||||||
raise BackendArgumentTypeError(str, signature=signature)
|
|
||||||
|
|
||||||
# Verify character name is valid
|
|
||||||
if not name.strip():
|
|
||||||
raise ArgumentError("Character name cannot be blank")
|
|
||||||
|
|
||||||
# If no signature is provided, use a default signature
|
|
||||||
if not signature or not signature.strip():
|
|
||||||
signature = f"~{name}"
|
|
||||||
|
|
||||||
# Check that the user is a member of this lexicon
|
|
||||||
mem: Membership = db(
|
|
||||||
select(Membership)
|
|
||||||
.where(Membership.user_id == user_id)
|
|
||||||
.where(Membership.lexicon_id == lexicon_id)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
if not mem:
|
|
||||||
raise ArgumentError("User is not a member of lexicon")
|
|
||||||
|
|
||||||
# Check that this user is below the limit for creating characters
|
|
||||||
num_user_chars = db(
|
|
||||||
select(func.count(Character.id))
|
|
||||||
.where(Character.lexicon_id == lexicon_id)
|
|
||||||
.where(Character.user_id == user_id)
|
|
||||||
).scalar()
|
|
||||||
if (
|
|
||||||
mem.lexicon.character_limit is not None
|
|
||||||
and num_user_chars >= mem.lexicon.character_limit
|
|
||||||
):
|
|
||||||
raise ArgumentError("User is at character limit")
|
|
||||||
|
|
||||||
new_character = Character(
|
|
||||||
lexicon_id=lexicon_id,
|
|
||||||
user_id=user_id,
|
|
||||||
name=name,
|
|
||||||
signature=signature,
|
|
||||||
)
|
|
||||||
db.session.add(new_character)
|
|
||||||
db.session.commit()
|
|
||||||
return new_character
|
|
||||||
|
|
||||||
|
|
||||||
def get_in_lexicon(db: DbContext, lexicon_id: int) -> Sequence[Character]:
|
|
||||||
"""Get all characters in a lexicon."""
|
|
||||||
return db(select(Character).where(Character.lexicon_id == lexicon_id)).scalars()
|
|
||||||
|
|
||||||
|
|
||||||
def try_from_public_id(db: DbContext, public_id: UUID) -> Optional[Character]:
|
|
||||||
"""Get a character by its public id."""
|
|
||||||
return db(
|
|
||||||
select(Character).where(Character.public_id == public_id)
|
|
||||||
).scalar_one_or_none()
|
|
|
@ -1,118 +0,0 @@
|
||||||
"""
|
|
||||||
Index query interface
|
|
||||||
"""
|
|
||||||
|
|
||||||
import re
|
|
||||||
from typing import Optional, Sequence
|
|
||||||
|
|
||||||
from sqlalchemy import select
|
|
||||||
|
|
||||||
from amanuensis.db import DbContext, ArticleIndex, IndexType
|
|
||||||
from amanuensis.errors import ArgumentError, BackendArgumentTypeError
|
|
||||||
|
|
||||||
|
|
||||||
def create(
|
|
||||||
db: DbContext,
|
|
||||||
lexicon_id: int,
|
|
||||||
index_type: IndexType,
|
|
||||||
pattern: str,
|
|
||||||
logical_order: int,
|
|
||||||
display_order: int,
|
|
||||||
capacity: Optional[int],
|
|
||||||
) -> ArticleIndex:
|
|
||||||
"""
|
|
||||||
Create a new index in a lexicon.
|
|
||||||
"""
|
|
||||||
# Verify argument types are correct
|
|
||||||
if not isinstance(lexicon_id, int):
|
|
||||||
raise BackendArgumentTypeError(int, lexicon_id=lexicon_id)
|
|
||||||
if not isinstance(index_type, IndexType):
|
|
||||||
raise BackendArgumentTypeError(IndexType, index_type=index_type)
|
|
||||||
if not isinstance(pattern, str):
|
|
||||||
raise BackendArgumentTypeError(str, pattern=pattern)
|
|
||||||
if not isinstance(logical_order, int):
|
|
||||||
raise BackendArgumentTypeError(int, logical_order=logical_order)
|
|
||||||
if not isinstance(display_order, int):
|
|
||||||
raise BackendArgumentTypeError(int, display_order=display_order)
|
|
||||||
if capacity is not None and not isinstance(capacity, int):
|
|
||||||
raise BackendArgumentTypeError(int, capacity=capacity)
|
|
||||||
|
|
||||||
# Verify the pattern is valid for the index type:
|
|
||||||
if index_type == IndexType.CHAR:
|
|
||||||
if len(pattern) < 1:
|
|
||||||
raise ArgumentError(
|
|
||||||
f"Pattern '{pattern}' too short for index type {index_type}"
|
|
||||||
)
|
|
||||||
elif index_type == IndexType.RANGE:
|
|
||||||
range_def = re.match(r"^(.)-(.)$", pattern)
|
|
||||||
if not range_def:
|
|
||||||
raise ArgumentError(f"Pattern '{pattern}' is not a valid range format")
|
|
||||||
start_char, end_char = range_def.group(1), range_def.group(2)
|
|
||||||
if start_char >= end_char:
|
|
||||||
raise ArgumentError(
|
|
||||||
f"Range start '{start_char}' is not before range end '{end_char}'"
|
|
||||||
)
|
|
||||||
elif index_type == IndexType.PREFIX:
|
|
||||||
if len(pattern) < 1:
|
|
||||||
raise ArgumentError(
|
|
||||||
f"Pattern '{pattern}' too short for index type {index_type}"
|
|
||||||
)
|
|
||||||
elif index_type == IndexType.ETC:
|
|
||||||
if len(pattern) < 1:
|
|
||||||
raise ArgumentError(
|
|
||||||
f"Pattern '{pattern}' too short for index type {index_type}"
|
|
||||||
)
|
|
||||||
|
|
||||||
new_index = ArticleIndex(
|
|
||||||
lexicon_id=lexicon_id,
|
|
||||||
index_type=index_type,
|
|
||||||
pattern=pattern,
|
|
||||||
logical_order=logical_order,
|
|
||||||
display_order=display_order,
|
|
||||||
capacity=capacity,
|
|
||||||
)
|
|
||||||
db.session.add(new_index)
|
|
||||||
db.session.commit()
|
|
||||||
return new_index
|
|
||||||
|
|
||||||
|
|
||||||
def get_for_lexicon(db: DbContext, lexicon_id: int) -> Sequence[ArticleIndex]:
|
|
||||||
"""Returns all index rules for a lexicon."""
|
|
||||||
return db(
|
|
||||||
select(ArticleIndex).where(ArticleIndex.lexicon_id == lexicon_id)
|
|
||||||
).scalars()
|
|
||||||
|
|
||||||
|
|
||||||
def update(db: DbContext, lexicon_id: int, indices: Sequence[ArticleIndex]) -> None:
|
|
||||||
"""
|
|
||||||
Update the indices for a lexicon. Indices are matched by type and pattern.
|
|
||||||
An extant index not matched to an input is deleted, and an input index not
|
|
||||||
matched to a an extant index is created. Matched indices are updated with
|
|
||||||
the input logical and display orders and capacity.
|
|
||||||
|
|
||||||
Note that this scheme does not allow for an existing index to have its type
|
|
||||||
or pattern updated: such an operation will always result in the deletion of
|
|
||||||
the old index and the creation of a new index.
|
|
||||||
"""
|
|
||||||
extant_indices: Sequence[ArticleIndex] = list(get_for_lexicon(db, lexicon_id))
|
|
||||||
for extant_index in extant_indices:
|
|
||||||
match = None
|
|
||||||
for new_index in indices:
|
|
||||||
if extant_index.name == new_index.name:
|
|
||||||
match = new_index
|
|
||||||
break
|
|
||||||
if match:
|
|
||||||
extant_index.logical_order = new_index.logical_order
|
|
||||||
extant_index.display_order = new_index.display_order
|
|
||||||
extant_index.capacity = new_index.capacity
|
|
||||||
else:
|
|
||||||
db.session.delete(extant_index)
|
|
||||||
for new_index in indices:
|
|
||||||
match = None
|
|
||||||
for extant_index in extant_indices:
|
|
||||||
if extant_index.name == new_index.name:
|
|
||||||
match = extant_index
|
|
||||||
break
|
|
||||||
if not match:
|
|
||||||
db.session.add(new_index)
|
|
||||||
db.session.commit()
|
|
|
@ -1,100 +0,0 @@
|
||||||
"""
|
|
||||||
Index rule query interface
|
|
||||||
"""
|
|
||||||
|
|
||||||
from typing import Sequence
|
|
||||||
|
|
||||||
from sqlalchemy import select
|
|
||||||
|
|
||||||
from amanuensis.db import *
|
|
||||||
from amanuensis.errors import ArgumentError, BackendArgumentTypeError
|
|
||||||
|
|
||||||
|
|
||||||
def create(
|
|
||||||
db: DbContext,
|
|
||||||
lexicon_id: int,
|
|
||||||
character_id: int,
|
|
||||||
index_id: int,
|
|
||||||
turn: int,
|
|
||||||
) -> ArticleIndexRule:
|
|
||||||
"""Create an index assignment."""
|
|
||||||
# Verify argument types are correct
|
|
||||||
if not isinstance(lexicon_id, int):
|
|
||||||
raise BackendArgumentTypeError(int, lexicon_id=lexicon_id)
|
|
||||||
if character_id is not None and not isinstance(character_id, int):
|
|
||||||
raise BackendArgumentTypeError(int, character_id=character_id)
|
|
||||||
if not isinstance(index_id, int):
|
|
||||||
raise BackendArgumentTypeError(int, index_id=index_id)
|
|
||||||
if not isinstance(turn, int):
|
|
||||||
raise BackendArgumentTypeError(int, turn=turn)
|
|
||||||
|
|
||||||
# Verify the character belongs to the lexicon
|
|
||||||
character: Character = db(
|
|
||||||
select(Character).where(Character.id == character_id)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
if not character:
|
|
||||||
raise ArgumentError("Character does not exist")
|
|
||||||
if character.lexicon_id != lexicon_id:
|
|
||||||
raise ArgumentError("Character belongs to the wrong lexicon")
|
|
||||||
|
|
||||||
# Verify the index belongs to the lexicon
|
|
||||||
index: ArticleIndex = db(
|
|
||||||
select(ArticleIndex).where(ArticleIndex.id == index_id)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
if not index:
|
|
||||||
raise ArgumentError("Index does not exist")
|
|
||||||
if index.lexicon_id != lexicon_id:
|
|
||||||
raise ArgumentError("Index belongs to the wrong lexicon")
|
|
||||||
|
|
||||||
new_assignment: ArticleIndexRule = ArticleIndexRule(
|
|
||||||
lexicon_id=lexicon_id,
|
|
||||||
character_id=character_id,
|
|
||||||
index_id=index_id,
|
|
||||||
turn=turn,
|
|
||||||
)
|
|
||||||
db.session.add(new_assignment)
|
|
||||||
db.session.commit()
|
|
||||||
return new_assignment
|
|
||||||
|
|
||||||
|
|
||||||
def get_for_lexicon(db: DbContext, lexicon_id: int) -> Sequence[ArticleIndex]:
|
|
||||||
"""Returns all index rules for a lexicon."""
|
|
||||||
return db(
|
|
||||||
select(ArticleIndexRule)
|
|
||||||
.join(ArticleIndexRule.index)
|
|
||||||
.join(ArticleIndexRule.character)
|
|
||||||
.where(ArticleIndexRule.lexicon_id == lexicon_id)
|
|
||||||
.order_by(ArticleIndexRule.turn, ArticleIndex.pattern, Character.name)
|
|
||||||
).scalars()
|
|
||||||
|
|
||||||
|
|
||||||
def update(db: DbContext, lexicon_id: int, rules: Sequence[ArticleIndexRule]) -> None:
|
|
||||||
"""
|
|
||||||
Update the index assignments for a lexicon. An index assignment is a tuple
|
|
||||||
of turn, index, and character. Unlike indices themselves, assignments have
|
|
||||||
no other attributes that can be updated, so they are simply created or
|
|
||||||
deleted based on their presence or absence in the desired rule list.
|
|
||||||
"""
|
|
||||||
print(rules)
|
|
||||||
extant_rules: Sequence[ArticleIndexRule] = list(get_for_lexicon(db, lexicon_id))
|
|
||||||
for extant_rule in extant_rules:
|
|
||||||
if not any(
|
|
||||||
[
|
|
||||||
extant_rule.character_id == new_rule.character_id
|
|
||||||
and extant_rule.index_id == new_rule.index_id
|
|
||||||
and extant_rule.turn == new_rule.turn
|
|
||||||
for new_rule in rules
|
|
||||||
]
|
|
||||||
):
|
|
||||||
db.session.delete(extant_rule)
|
|
||||||
for new_rule in rules:
|
|
||||||
if not any(
|
|
||||||
[
|
|
||||||
extant_rule.character_id == new_rule.character_id
|
|
||||||
and extant_rule.index_id == new_rule.index_id
|
|
||||||
and extant_rule.turn == new_rule.turn
|
|
||||||
for extant_rule in extant_rules
|
|
||||||
]
|
|
||||||
):
|
|
||||||
db.session.add(new_rule)
|
|
||||||
db.session.commit()
|
|
|
@ -1,97 +0,0 @@
|
||||||
"""
|
|
||||||
Lexicon query interface
|
|
||||||
"""
|
|
||||||
|
|
||||||
import re
|
|
||||||
from typing import Sequence, Optional
|
|
||||||
|
|
||||||
from sqlalchemy import select, func, update
|
|
||||||
from werkzeug.security import generate_password_hash, check_password_hash
|
|
||||||
|
|
||||||
from amanuensis.db import DbContext, Lexicon, Membership
|
|
||||||
from amanuensis.errors import ArgumentError, BackendArgumentTypeError
|
|
||||||
|
|
||||||
|
|
||||||
RE_ALPHANUM_DASH_UNDER = re.compile(r"^[A-Za-z0-9-_]*$")
|
|
||||||
|
|
||||||
|
|
||||||
def create(
|
|
||||||
db: DbContext,
|
|
||||||
name: str,
|
|
||||||
title: Optional[str],
|
|
||||||
prompt: str,
|
|
||||||
) -> Lexicon:
|
|
||||||
"""
|
|
||||||
Create a new lexicon.
|
|
||||||
"""
|
|
||||||
# Verify name
|
|
||||||
if not isinstance(name, str):
|
|
||||||
raise BackendArgumentTypeError(str, name=name)
|
|
||||||
if not name.strip():
|
|
||||||
raise ArgumentError("Lexicon name must not be blank")
|
|
||||||
if not RE_ALPHANUM_DASH_UNDER.match(name):
|
|
||||||
raise ArgumentError(
|
|
||||||
"Lexicon name may only contain alphanumerics, dash, and underscore"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Verify title
|
|
||||||
if title is not None and not isinstance(title, str):
|
|
||||||
raise BackendArgumentTypeError(str, title=title)
|
|
||||||
|
|
||||||
# Verify prompt
|
|
||||||
if not isinstance(prompt, str):
|
|
||||||
raise BackendArgumentTypeError(str, prompt=prompt)
|
|
||||||
|
|
||||||
# Query the db to make sure the lexicon name isn't taken
|
|
||||||
if db(select(func.count(Lexicon.id)).where(Lexicon.name == name)).scalar() > 0:
|
|
||||||
raise ArgumentError("Lexicon name is already taken")
|
|
||||||
|
|
||||||
new_lexicon = Lexicon(
|
|
||||||
name=name,
|
|
||||||
title=title,
|
|
||||||
prompt=prompt,
|
|
||||||
)
|
|
||||||
db.session.add(new_lexicon)
|
|
||||||
db.session.commit()
|
|
||||||
return new_lexicon
|
|
||||||
|
|
||||||
|
|
||||||
def get_all(db: DbContext) -> Sequence[Lexicon]:
|
|
||||||
"""Get all lexicons."""
|
|
||||||
return db(select(Lexicon)).scalars()
|
|
||||||
|
|
||||||
|
|
||||||
def get_joined(db: DbContext, user_id: int) -> Sequence[Lexicon]:
|
|
||||||
"""Get all lexicons that a player is in."""
|
|
||||||
return db(
|
|
||||||
select(Lexicon).join(Lexicon.memberships).where(Membership.user_id == user_id)
|
|
||||||
).scalars()
|
|
||||||
|
|
||||||
|
|
||||||
def get_public(db: DbContext) -> Sequence[Lexicon]:
|
|
||||||
"""Get all publicly visible lexicons."""
|
|
||||||
return db(select(Lexicon).where(Lexicon.public == True)).scalars()
|
|
||||||
|
|
||||||
|
|
||||||
def password_check(db: DbContext, lexicon_id: int, password: str) -> bool:
|
|
||||||
"""Check if a password is correct."""
|
|
||||||
password_hash: str = db(
|
|
||||||
select(Lexicon.join_password).where(Lexicon.id == lexicon_id)
|
|
||||||
).scalar_one()
|
|
||||||
return check_password_hash(password_hash, password)
|
|
||||||
|
|
||||||
|
|
||||||
def password_set(db: DbContext, lexicon_id: int, new_password: Optional[str]) -> None:
|
|
||||||
"""Set or clear a lexicon's password."""
|
|
||||||
password_hash = generate_password_hash(new_password) if new_password else None
|
|
||||||
db(
|
|
||||||
update(Lexicon)
|
|
||||||
.where(Lexicon.id == lexicon_id)
|
|
||||||
.values(join_password=password_hash)
|
|
||||||
)
|
|
||||||
db.session.commit()
|
|
||||||
|
|
||||||
|
|
||||||
def try_from_name(db: DbContext, name: str) -> Optional[Lexicon]:
|
|
||||||
"""Get a lexicon by its name, or None if no such lexicon was found."""
|
|
||||||
return db(select(Lexicon).where(Lexicon.name == name)).scalar_one_or_none()
|
|
|
@ -1,82 +0,0 @@
|
||||||
"""
|
|
||||||
Membership query interface
|
|
||||||
"""
|
|
||||||
|
|
||||||
from typing import Sequence
|
|
||||||
|
|
||||||
from sqlalchemy import select, func
|
|
||||||
|
|
||||||
from amanuensis.db import DbContext, Membership
|
|
||||||
from amanuensis.db.models import Lexicon
|
|
||||||
from amanuensis.errors import ArgumentError, BackendArgumentTypeError
|
|
||||||
|
|
||||||
|
|
||||||
def create(
|
|
||||||
db: DbContext,
|
|
||||||
user_id: int,
|
|
||||||
lexicon_id: int,
|
|
||||||
is_editor: bool,
|
|
||||||
) -> Membership:
|
|
||||||
"""
|
|
||||||
Create a new user membership in a lexicon.
|
|
||||||
"""
|
|
||||||
# Verify argument types are correct
|
|
||||||
if not isinstance(user_id, int):
|
|
||||||
raise BackendArgumentTypeError(int, user_id=user_id)
|
|
||||||
if not isinstance(lexicon_id, int):
|
|
||||||
raise BackendArgumentTypeError(int, lexicon_id=lexicon_id)
|
|
||||||
if not isinstance(is_editor, bool):
|
|
||||||
raise BackendArgumentTypeError(bool, is_editor=is_editor)
|
|
||||||
|
|
||||||
# Verify user has not already joined lexicon
|
|
||||||
if (
|
|
||||||
db(
|
|
||||||
select(func.count(Membership.id))
|
|
||||||
.where(Membership.user_id == user_id)
|
|
||||||
.where(Membership.lexicon_id == lexicon_id)
|
|
||||||
).scalar()
|
|
||||||
> 0
|
|
||||||
):
|
|
||||||
raise ArgumentError("User is already a member of lexicon")
|
|
||||||
|
|
||||||
# get reference to lexicon for next few checks
|
|
||||||
lex: Lexicon = db(
|
|
||||||
select(Lexicon).where(Lexicon.id == lexicon_id)
|
|
||||||
).scalar_one_or_none()
|
|
||||||
if not lex:
|
|
||||||
raise ArgumentError("could not find lexicon")
|
|
||||||
|
|
||||||
# Verify lexicon is joinable
|
|
||||||
if not lex.joinable:
|
|
||||||
raise ArgumentError("Can't join: Lexicon is not joinable")
|
|
||||||
|
|
||||||
# Verify lexicon is not full
|
|
||||||
if lex.player_limit is not None:
|
|
||||||
if (
|
|
||||||
db(select(func.count()).where(Membership.lexicon_id == lexicon_id)).scalar()
|
|
||||||
>= lex.player_limit
|
|
||||||
):
|
|
||||||
raise ArgumentError("Can't join: Lexicon is full")
|
|
||||||
|
|
||||||
new_membership = Membership(
|
|
||||||
user_id=user_id,
|
|
||||||
lexicon_id=lexicon_id,
|
|
||||||
is_editor=is_editor,
|
|
||||||
)
|
|
||||||
db.session.add(new_membership)
|
|
||||||
db.session.commit()
|
|
||||||
return new_membership
|
|
||||||
|
|
||||||
|
|
||||||
def get_players_in_lexicon(db: DbContext, lexicon_id: int) -> Sequence[Membership]:
|
|
||||||
"""Get all users who are members of a lexicon."""
|
|
||||||
return db(select(Membership).where(Membership.lexicon_id == lexicon_id)).scalars()
|
|
||||||
|
|
||||||
|
|
||||||
def try_from_ids(db: DbContext, user_id: int, lexicon_id: int) -> Membership:
|
|
||||||
"""Get a membership by the user and lexicon ids, or None if no such membership was found."""
|
|
||||||
return db(
|
|
||||||
select(Membership)
|
|
||||||
.where(Membership.user_id == user_id)
|
|
||||||
.where(Membership.lexicon_id == lexicon_id)
|
|
||||||
).scalar_one_or_none()
|
|
|
@ -1,103 +0,0 @@
|
||||||
"""
|
|
||||||
Post query interface
|
|
||||||
"""
|
|
||||||
|
|
||||||
from typing import Optional, Sequence, Tuple
|
|
||||||
|
|
||||||
from sqlalchemy import select, update, func, or_, DateTime
|
|
||||||
|
|
||||||
from amanuensis.db import DbContext, Post
|
|
||||||
from amanuensis.db.models import Lexicon, Membership
|
|
||||||
from amanuensis.errors import ArgumentError, BackendArgumentTypeError
|
|
||||||
|
|
||||||
|
|
||||||
def create(
|
|
||||||
db: DbContext,
|
|
||||||
lexicon_id: int,
|
|
||||||
user_id: Optional[int],
|
|
||||||
body: str,
|
|
||||||
) -> Post:
|
|
||||||
"""
|
|
||||||
Create a new post
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Verify lexicon id
|
|
||||||
if not isinstance(lexicon_id, int):
|
|
||||||
raise BackendArgumentTypeError(int, lexicon_id=lexicon_id)
|
|
||||||
|
|
||||||
# Verify user_id
|
|
||||||
if user_id is not None and not isinstance(user_id, int):
|
|
||||||
raise BackendArgumentTypeError(int, user_id=user_id)
|
|
||||||
|
|
||||||
# Verify body
|
|
||||||
if not isinstance(body, str):
|
|
||||||
raise BackendArgumentTypeError(str, body=body)
|
|
||||||
if not body.strip():
|
|
||||||
raise ArgumentError("Post body cannot be empty.")
|
|
||||||
|
|
||||||
# Check that the lexicon allows posting
|
|
||||||
if not (
|
|
||||||
db(select(Lexicon).where(Lexicon.id == lexicon_id))
|
|
||||||
.scalar_one_or_none()
|
|
||||||
.allow_post
|
|
||||||
):
|
|
||||||
raise ArgumentError("Lexicon does not allow posting.")
|
|
||||||
|
|
||||||
new_post = Post(lexicon_id=lexicon_id, user_id=user_id, body=body)
|
|
||||||
db.session.add(new_post)
|
|
||||||
db.session.commit()
|
|
||||||
return new_post
|
|
||||||
|
|
||||||
|
|
||||||
def get_posts_for_membership(
|
|
||||||
db: DbContext, membership_id: int
|
|
||||||
) -> Tuple[Sequence[Post], Sequence[Post]]:
|
|
||||||
"""
|
|
||||||
Returns posts for the membership's lexicon, split into posts that
|
|
||||||
are new since the last view and posts that were previously seen.
|
|
||||||
"""
|
|
||||||
# Save the current timestamp, so we don't miss posts created between now
|
|
||||||
# and when we finish looking stuff up
|
|
||||||
now: DateTime = db(select(func.now())).scalar_one()
|
|
||||||
|
|
||||||
# Save the previous last-seen timestamp for splitting new from old posts,
|
|
||||||
# then update the membership with the current time
|
|
||||||
last_seen: DateTime = db(
|
|
||||||
select(Membership.last_post_seen).where(Membership.id == membership_id)
|
|
||||||
).scalar_one()
|
|
||||||
db(
|
|
||||||
update(Membership)
|
|
||||||
.where(Membership.id == membership_id)
|
|
||||||
.values(last_post_seen=now)
|
|
||||||
)
|
|
||||||
db.session.commit()
|
|
||||||
|
|
||||||
# Fetch posts in two groups, new ones after the last-seen time and old ones
|
|
||||||
# If last-seen is null, then just return everything as new
|
|
||||||
new_posts = db(
|
|
||||||
select(Post)
|
|
||||||
.where(last_seen is None or Post.created > last_seen)
|
|
||||||
.order_by(Post.created.desc())
|
|
||||||
).scalars()
|
|
||||||
old_posts = db(
|
|
||||||
select(Post)
|
|
||||||
.where(last_seen is not None and Post.created <= last_seen)
|
|
||||||
.order_by(Post.created.desc())
|
|
||||||
).scalars()
|
|
||||||
|
|
||||||
return new_posts, old_posts
|
|
||||||
|
|
||||||
|
|
||||||
def get_unread_count(db: DbContext, membership_id: int) -> int:
|
|
||||||
"""Get the number of posts that the member has not seen"""
|
|
||||||
return db(
|
|
||||||
select(func.count(Post.id))
|
|
||||||
.join(Membership, Membership.lexicon_id == Post.lexicon_id)
|
|
||||||
.where(
|
|
||||||
or_(
|
|
||||||
Membership.last_post_seen.is_(None),
|
|
||||||
Post.created > Membership.last_post_seen,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.where(Membership.id == membership_id)
|
|
||||||
).scalar()
|
|
|
@ -1,111 +0,0 @@
|
||||||
"""
|
|
||||||
User query interface
|
|
||||||
"""
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
import re
|
|
||||||
from typing import Optional, Sequence
|
|
||||||
|
|
||||||
from sqlalchemy import select, func, update
|
|
||||||
from werkzeug.security import generate_password_hash, check_password_hash
|
|
||||||
|
|
||||||
from amanuensis.db import DbContext, User
|
|
||||||
from amanuensis.errors import ArgumentError, BackendArgumentTypeError
|
|
||||||
|
|
||||||
|
|
||||||
RE_NO_LETTERS = re.compile(r"^[0-9-_]*$")
|
|
||||||
RE_ALPHANUM_DASH_UNDER = re.compile(r"^[A-Za-z0-9-_]*$")
|
|
||||||
|
|
||||||
|
|
||||||
def create(
|
|
||||||
db: DbContext,
|
|
||||||
username: str,
|
|
||||||
password: str,
|
|
||||||
display_name: Optional[str],
|
|
||||||
email: str,
|
|
||||||
is_site_admin: bool,
|
|
||||||
) -> User:
|
|
||||||
"""
|
|
||||||
Create a new user.
|
|
||||||
"""
|
|
||||||
# Verify username
|
|
||||||
if not isinstance(username, str):
|
|
||||||
raise BackendArgumentTypeError(str, username=username)
|
|
||||||
if len(username) < 3 or len(username) > 32:
|
|
||||||
raise ArgumentError("Username must be between 3 and 32 characters")
|
|
||||||
if RE_NO_LETTERS.match(username):
|
|
||||||
raise ArgumentError("Username must contain a letter")
|
|
||||||
if not RE_ALPHANUM_DASH_UNDER.match(username):
|
|
||||||
raise ArgumentError(
|
|
||||||
"Username may only contain alphanumerics, dash, and underscore"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Verify password
|
|
||||||
if not isinstance(password, str):
|
|
||||||
raise BackendArgumentTypeError(str, password=password)
|
|
||||||
|
|
||||||
# Verify display name
|
|
||||||
if display_name is not None and not isinstance(display_name, str):
|
|
||||||
raise BackendArgumentTypeError(str, display_name=display_name)
|
|
||||||
# If display name is not provided, use the username
|
|
||||||
if not display_name or not display_name.strip():
|
|
||||||
display_name = username
|
|
||||||
|
|
||||||
# Verify email
|
|
||||||
if not isinstance(email, str):
|
|
||||||
raise BackendArgumentTypeError(str, email=email)
|
|
||||||
|
|
||||||
# Query the db to make sure the username isn't taken
|
|
||||||
if db(select(func.count(User.id)).where(User.username == username)).scalar() > 0:
|
|
||||||
raise ArgumentError("Username is already taken")
|
|
||||||
|
|
||||||
new_user = User(
|
|
||||||
username=username,
|
|
||||||
password=generate_password_hash(password),
|
|
||||||
display_name=display_name,
|
|
||||||
email=email,
|
|
||||||
is_site_admin=is_site_admin,
|
|
||||||
)
|
|
||||||
db.session.add(new_user)
|
|
||||||
db.session.commit()
|
|
||||||
return new_user
|
|
||||||
|
|
||||||
|
|
||||||
def get_all(db: DbContext) -> Sequence[User]:
|
|
||||||
"""Get all users."""
|
|
||||||
return db(select(User)).scalars()
|
|
||||||
|
|
||||||
|
|
||||||
def password_check(db: DbContext, username: str, password: str) -> bool:
|
|
||||||
"""Check if a password is correct."""
|
|
||||||
user_password_hash: str = db(
|
|
||||||
select(User.password).where(User.username == username)
|
|
||||||
).scalar_one()
|
|
||||||
return check_password_hash(user_password_hash, password)
|
|
||||||
|
|
||||||
|
|
||||||
def password_set(db: DbContext, username: str, new_password: str) -> None:
|
|
||||||
"""Set a user's password."""
|
|
||||||
password_hash = generate_password_hash(new_password)
|
|
||||||
db(update(User).where(User.username == username).values(password=password_hash))
|
|
||||||
db.session.commit()
|
|
||||||
|
|
||||||
|
|
||||||
def try_from_id(db: DbContext, user_id: int) -> Optional[User]:
|
|
||||||
"""Get a user by the user's id, or None is no such user was found."""
|
|
||||||
return db(select(User).where(User.id == user_id)).scalar_one_or_none()
|
|
||||||
|
|
||||||
|
|
||||||
def try_from_username(db: DbContext, username: str) -> Optional[User]:
|
|
||||||
"""Get a user by the user's username, or None is no such user was found."""
|
|
||||||
return db(select(User).where(User.username == username)).scalar_one_or_none()
|
|
||||||
|
|
||||||
|
|
||||||
def update_logged_in(db: DbContext, username: str) -> None:
|
|
||||||
"""Bump the value of the last_login column for a user."""
|
|
||||||
db(
|
|
||||||
update(User)
|
|
||||||
.where(User.username == username)
|
|
||||||
.values(last_login=datetime.datetime.now(datetime.timezone.utc))
|
|
||||||
)
|
|
||||||
db.session.commit()
|
|
|
@ -1,126 +1,72 @@
|
||||||
from argparse import ArgumentParser, Namespace
|
#
|
||||||
import logging
|
# The cli module must not import other parts of the application at the module
|
||||||
import logging.config
|
# level. This is because most other modules depend on the config module. The
|
||||||
import os
|
# config module may depend on __main__'s commandline parsing to locate config
|
||||||
from typing import Callable
|
# files, and __main__'s commandline parsing requires importing (but not
|
||||||
|
# executing) the functions in the cli module. Thus, cli functions must only
|
||||||
import amanuensis.cli.admin
|
# import the config module inside the various command methods, which are only
|
||||||
import amanuensis.cli.character
|
# run after commandline parsing has already occurred.
|
||||||
import amanuensis.cli.index
|
#
|
||||||
import amanuensis.cli.lexicon
|
|
||||||
import amanuensis.cli.post
|
|
||||||
import amanuensis.cli.user
|
|
||||||
from amanuensis.db import DbContext
|
|
||||||
|
|
||||||
|
|
||||||
LOGGING_CONFIG = {
|
def server_commands(commands={}):
|
||||||
"version": 1,
|
if commands:
|
||||||
"formatters": {
|
return commands
|
||||||
"fmt_basic": {
|
import amanuensis.cli.server
|
||||||
"validate": True,
|
for name, func in vars(amanuensis.cli.server).items():
|
||||||
"format": "%(message)s",
|
if name.startswith("command_"):
|
||||||
},
|
name = name[8:].replace("_", "-")
|
||||||
"fmt_detailed": {
|
commands[name] = func
|
||||||
"validate": True,
|
return commands
|
||||||
"format": "%(asctime)s %(levelname)s %(message)s",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"handlers": {
|
|
||||||
"hnd_stderr": {
|
|
||||||
"class": "logging.StreamHandler",
|
|
||||||
"level": "INFO",
|
|
||||||
"formatter": "fmt_basic",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"loggers": {
|
|
||||||
__name__: {
|
|
||||||
"level": "DEBUG",
|
|
||||||
"handlers": ["hnd_stderr"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def add_subcommand(subparsers, module) -> None:
|
def lexicon_commands(commands={}):
|
||||||
"""Add a cli submodule's commands as a subparser."""
|
if commands:
|
||||||
# Get the command information from the module
|
return commands
|
||||||
command_name: str = getattr(module, "COMMAND_NAME")
|
import amanuensis.cli.lexicon
|
||||||
command_help: str = getattr(module, "COMMAND_HELP")
|
for name, func in vars(amanuensis.cli.lexicon).items():
|
||||||
if not command_name and command_help:
|
if name.startswith("command_"):
|
||||||
return
|
name = name[8:].replace("_", "-")
|
||||||
|
commands["lexicon-" + name] = func
|
||||||
# Add the subparser for the command and set a default action
|
return commands
|
||||||
command_parser: ArgumentParser = subparsers.add_parser(
|
|
||||||
command_name, help=command_help
|
|
||||||
)
|
|
||||||
command_parser.set_defaults(func=lambda args: command_parser.print_help())
|
|
||||||
|
|
||||||
# Add all subcommands in the command module
|
|
||||||
subcommands = command_parser.add_subparsers(metavar="SUBCOMMAND")
|
|
||||||
for name, obj in vars(module).items():
|
|
||||||
if name.startswith("command_"):
|
|
||||||
# Hyphenate subcommand names
|
|
||||||
sc_name: str = name[8:].replace("_", "-")
|
|
||||||
# Only the first line of the subcommand function docstring is used
|
|
||||||
sc_help = ((obj.__doc__ or "").strip() or "\n").splitlines()[0]
|
|
||||||
|
|
||||||
# Add the command and any arguments defined by its decorators
|
|
||||||
subcommand: ArgumentParser = subcommands.add_parser(
|
|
||||||
sc_name, help=sc_help, description=obj.__doc__
|
|
||||||
)
|
|
||||||
subcommand.set_defaults(func=obj)
|
|
||||||
for args, kwargs in reversed(obj.__dict__.get("add_argument", [])):
|
|
||||||
subcommand.add_argument(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def init_logger(args):
|
def user_commands(commands={}):
|
||||||
"""Set up logging based on verbosity args"""
|
if commands:
|
||||||
if args.verbose:
|
return commands
|
||||||
handler = LOGGING_CONFIG["handlers"]["hnd_stderr"]
|
import amanuensis.cli.user
|
||||||
handler["formatter"] = "fmt_detailed"
|
for name, func in vars(amanuensis.cli.user).items():
|
||||||
handler["level"] = "DEBUG"
|
if name.startswith("command_"):
|
||||||
logging.config.dictConfig(LOGGING_CONFIG)
|
name = name[8:].replace("_", "-")
|
||||||
|
commands["user-" + name] = func
|
||||||
|
return commands
|
||||||
|
|
||||||
|
|
||||||
def get_db_factory(args: Namespace) -> Callable[[], DbContext]:
|
def get_commands():
|
||||||
"""Factory function for lazy-loading the database in subcommands."""
|
return {**server_commands(), **lexicon_commands(), **user_commands()}
|
||||||
|
|
||||||
def get_db() -> DbContext:
|
|
||||||
"""Lazy loader for the database connection."""
|
|
||||||
if not os.path.exists(args.db_path):
|
|
||||||
args.parser.error(f"No database found at {args.db_path}")
|
|
||||||
return DbContext(path=args.db_path, echo=args.verbose)
|
|
||||||
|
|
||||||
return get_db
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def cmd_desc(func):
|
||||||
"""CLI entry point"""
|
return ((func.__doc__ or "").strip() or '\n').splitlines()[0]
|
||||||
# Set up the top-level parser
|
|
||||||
parser = ArgumentParser()
|
|
||||||
parser.set_defaults(
|
|
||||||
parser=parser,
|
|
||||||
func=lambda args: parser.print_help(),
|
|
||||||
get_db=None,
|
|
||||||
)
|
|
||||||
parser.add_argument("--verbose", "-v", action="store_true", help="Verbose output")
|
|
||||||
parser.add_argument(
|
|
||||||
"--db", dest="db_path", default="db.sqlite", help="Path to Amanuensis database"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add commands from cli submodules
|
|
||||||
subparsers = parser.add_subparsers(metavar="COMMAND")
|
|
||||||
add_subcommand(subparsers, amanuensis.cli.admin)
|
|
||||||
add_subcommand(subparsers, amanuensis.cli.character)
|
|
||||||
add_subcommand(subparsers, amanuensis.cli.index)
|
|
||||||
add_subcommand(subparsers, amanuensis.cli.lexicon)
|
|
||||||
add_subcommand(subparsers, amanuensis.cli.post)
|
|
||||||
add_subcommand(subparsers, amanuensis.cli.user)
|
|
||||||
|
|
||||||
# Parse args and perform top-level arg processing
|
def describe_commands():
|
||||||
args = parser.parse_args()
|
longest = max(map(len, server_commands().keys()))
|
||||||
init_logger(args)
|
server_desc = "General commands:\n{}\n".format("\n".join([
|
||||||
args.get_db = get_db_factory(args)
|
" {1:<{0}} : {2}".format(longest, name, cmd_desc(func))
|
||||||
|
for name, func in server_commands().items()
|
||||||
|
]))
|
||||||
|
|
||||||
# Execute the desired action
|
longest = max(map(len, lexicon_commands().keys()))
|
||||||
args.func(args)
|
lexicon_desc = "Lexicon commands:\n{}\n".format("\n".join([
|
||||||
|
" {1:<{0}} : {2}".format(longest, name, cmd_desc(func))
|
||||||
|
for name, func in lexicon_commands().items()
|
||||||
|
]))
|
||||||
|
|
||||||
|
longest = max(map(len, user_commands().keys()))
|
||||||
|
user_desc = "User commands:\n{}\n".format("\n".join([
|
||||||
|
" {1:<{0}} : {2}".format(longest, name, cmd_desc(func))
|
||||||
|
for name, func in user_commands().items()
|
||||||
|
]))
|
||||||
|
|
||||||
|
return "\n".join([server_desc, lexicon_desc, user_desc])
|
||||||
|
|
|
@ -1,53 +0,0 @@
|
||||||
import collections
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
from amanuensis.db import DbContext
|
|
||||||
|
|
||||||
from .helpers import add_argument
|
|
||||||
|
|
||||||
|
|
||||||
COMMAND_NAME = "admin"
|
|
||||||
COMMAND_HELP = "Interact with Amanuensis."
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@add_argument("--drop", "-d", action="store_true", help="Overwrite existing database")
|
|
||||||
def command_init_db(args) -> int:
|
|
||||||
"""
|
|
||||||
Initialize the Amanuensis database.
|
|
||||||
"""
|
|
||||||
if args.drop:
|
|
||||||
open(args.db_path, mode="w").close()
|
|
||||||
|
|
||||||
# Initialize the database
|
|
||||||
LOG.info(f"Creating database at {args.db_path}")
|
|
||||||
args.get_db().create_all()
|
|
||||||
|
|
||||||
LOG.info("Done")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
@add_argument("path", metavar="CONFIG_PATH", help="Path to the config file")
|
|
||||||
def command_secret_key(args) -> int:
|
|
||||||
"""
|
|
||||||
Generate a Flask secret key.
|
|
||||||
|
|
||||||
The Flask server will not run unless a secret key has
|
|
||||||
been generated.
|
|
||||||
"""
|
|
||||||
# Load the json config
|
|
||||||
with open(args.path, mode="r", encoding="utf8") as f:
|
|
||||||
config = json.load(f, object_pairs_hook=collections.OrderedDict)
|
|
||||||
|
|
||||||
# Set the secret key to a new random string
|
|
||||||
config["SECRET_KEY"] = os.urandom(32).hex()
|
|
||||||
|
|
||||||
# Write the config back out
|
|
||||||
with open(args.path, mode="w", encoding="utf8") as f:
|
|
||||||
json.dump(config, f, indent=2)
|
|
||||||
|
|
||||||
LOG.info("Regenerated Flask secret key")
|
|
||||||
return 0
|
|
|
@ -1,31 +0,0 @@
|
||||||
import logging
|
|
||||||
|
|
||||||
from amanuensis.backend import lexiq, userq, charq
|
|
||||||
from amanuensis.db import DbContext, Character
|
|
||||||
|
|
||||||
from .helpers import add_argument
|
|
||||||
|
|
||||||
|
|
||||||
COMMAND_NAME = "char"
|
|
||||||
COMMAND_HELP = "Interact with characters."
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@add_argument("--lexicon", required=True)
|
|
||||||
@add_argument("--user", required=True)
|
|
||||||
@add_argument("--name", required=True)
|
|
||||||
def command_create(args) -> int:
|
|
||||||
"""
|
|
||||||
Create a character.
|
|
||||||
"""
|
|
||||||
db: DbContext = args.get_db()
|
|
||||||
lexicon = lexiq.try_from_name(db, args.lexicon)
|
|
||||||
if not lexicon:
|
|
||||||
raise ValueError("Lexicon does not exist")
|
|
||||||
user = userq.try_from_username(db, args.user)
|
|
||||||
if not user:
|
|
||||||
raise ValueError("User does not exist")
|
|
||||||
char: Character = charq.create(db, lexicon.id, user.id, args.name, signature=None)
|
|
||||||
LOG.info(f"Created {char.name} in {lexicon.full_title}")
|
|
||||||
return 0
|
|
|
@ -1,20 +1,209 @@
|
||||||
"""
|
# Standard library imports
|
||||||
Helpers for cli commands.
|
from argparse import ArgumentParser
|
||||||
"""
|
from functools import wraps
|
||||||
|
from json.decoder import JSONDecodeError
|
||||||
|
from logging import getLogger
|
||||||
|
from sys import exc_info
|
||||||
|
|
||||||
|
logger = getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# The add_argument and no_argument function wrappers allow the same
|
||||||
|
# function to both configure a command and execute it. This keeps
|
||||||
|
# command argument configuration close to where the command is defined
|
||||||
|
# and reduces the number of things the main parser has to handle.
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
def add_argument(*args, **kwargs):
|
def add_argument(*args, **kwargs):
|
||||||
"""Defines an argument to a cli command."""
|
"""Passes the given args and kwargs to subparser.add_argument"""
|
||||||
|
|
||||||
def argument_adder(command_func):
|
def argument_adder(command):
|
||||||
"""Decorator function for storing parser args on the function."""
|
@wraps(command)
|
||||||
|
def augmented_command(cmd_args):
|
||||||
|
# Add this wrapper's command in the parser pass
|
||||||
|
if isinstance(cmd_args, ArgumentParser):
|
||||||
|
cmd_args.add_argument(*args, **kwargs)
|
||||||
|
# If there are more command wrappers, pass through to them
|
||||||
|
if command.__dict__.get('wrapper', False):
|
||||||
|
command(cmd_args)
|
||||||
|
# Parser pass doesn't return a value
|
||||||
|
return None
|
||||||
|
|
||||||
# Store the kw/args in the function dictionary
|
# Pass through transparently in the execute pass
|
||||||
add_args = command_func.__dict__.get("add_argument", [])
|
return command(cmd_args)
|
||||||
add_args.append((args, kwargs))
|
|
||||||
command_func.__dict__["add_argument"] = add_args
|
|
||||||
|
|
||||||
# Return the same function
|
# Mark the command as wrapped so control passes through
|
||||||
return command_func
|
augmented_command.__dict__['wrapper'] = True
|
||||||
|
return augmented_command
|
||||||
|
|
||||||
return argument_adder
|
return argument_adder
|
||||||
|
|
||||||
|
|
||||||
|
def no_argument(command):
|
||||||
|
"""Noops for subparsers"""
|
||||||
|
@wraps(command)
|
||||||
|
def augmented_command(cmd_args):
|
||||||
|
# Noop in the parser pass
|
||||||
|
if isinstance(cmd_args, ArgumentParser):
|
||||||
|
return None
|
||||||
|
# Pass through in the execute pass
|
||||||
|
return command(cmd_args)
|
||||||
|
|
||||||
|
return augmented_command
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Many commands require specifying a lexicon or user to operate on, so
|
||||||
|
# the requires_lexicon and requires_user wrappers replace @add_argument
|
||||||
|
# as well as automatically create the model for the object from the
|
||||||
|
# provided identifier.
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
LEXICON_ARGS = ['--lexicon']
|
||||||
|
LEXICON_KWARGS = {
|
||||||
|
'metavar': 'LEXICON',
|
||||||
|
'dest': 'lexicon',
|
||||||
|
'help': 'Specify a user to operate on'}
|
||||||
|
|
||||||
|
|
||||||
|
def requires_lexicon(command):
|
||||||
|
@wraps(command)
|
||||||
|
def augmented_command(cmd_args):
|
||||||
|
# Add lexicon argument in parser pass
|
||||||
|
if isinstance(cmd_args, ArgumentParser):
|
||||||
|
cmd_args.add_argument(*LEXICON_ARGS, **LEXICON_KWARGS)
|
||||||
|
# If there are more command wrappers, pass through to them
|
||||||
|
if command.__dict__.get('wrapper', False):
|
||||||
|
command(cmd_args)
|
||||||
|
# Parser pass doesn't return a value
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Verify lexicon argument in execute pass
|
||||||
|
val = getattr(cmd_args, 'lexicon', None)
|
||||||
|
if not val:
|
||||||
|
logger.error("Missing --lexicon argument")
|
||||||
|
return -1
|
||||||
|
try:
|
||||||
|
model_factory = cmd_args.model_factory
|
||||||
|
cmd_args.lexicon = model_factory.lexicon(val)
|
||||||
|
except Exception:
|
||||||
|
ex_type, value, tb = exc_info()
|
||||||
|
logger.error(
|
||||||
|
f'Loading lexicon "{val}" failed with '
|
||||||
|
f'{ex_type.__name__}: {value}')
|
||||||
|
return -1
|
||||||
|
return command(cmd_args)
|
||||||
|
|
||||||
|
augmented_command.__dict__['wrapper'] = True
|
||||||
|
return augmented_command
|
||||||
|
|
||||||
|
|
||||||
|
USER_ARGS = ['--user']
|
||||||
|
USER_KWARGS = {
|
||||||
|
'metavar': 'USER',
|
||||||
|
'dest': 'user',
|
||||||
|
'help': 'Specify a user to operate on'}
|
||||||
|
|
||||||
|
|
||||||
|
def requires_user(command):
|
||||||
|
@wraps(command)
|
||||||
|
def augmented_command(cmd_args):
|
||||||
|
# Add user argument in parser pass
|
||||||
|
if isinstance(cmd_args, ArgumentParser):
|
||||||
|
cmd_args.add_argument(*USER_ARGS, **USER_KWARGS)
|
||||||
|
# If there are more command wrappers, pass through to them
|
||||||
|
if command.__dict__.get('wrapper', False):
|
||||||
|
command(cmd_args)
|
||||||
|
# Parser pass doesn't return a value
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Verify user argument in execute pass
|
||||||
|
val = getattr(cmd_args, "user", None)
|
||||||
|
if not val:
|
||||||
|
logger.error("Missing --user argument")
|
||||||
|
return -1
|
||||||
|
try:
|
||||||
|
model_factory = cmd_args.model_factory
|
||||||
|
cmd_args.user = model_factory.user(val)
|
||||||
|
except Exception:
|
||||||
|
ex_type, value, tb = exc_info()
|
||||||
|
logger.error(
|
||||||
|
f'Loading user "{val}" failed with '
|
||||||
|
f'{ex_type.__name__}: {value}')
|
||||||
|
return -1
|
||||||
|
return command(cmd_args)
|
||||||
|
|
||||||
|
augmented_command.__dict__['wrapper'] = True
|
||||||
|
return augmented_command
|
||||||
|
|
||||||
|
|
||||||
|
# Wrapper for aliasing commands
|
||||||
|
def alias(cmd_alias):
|
||||||
|
"""Adds an alias to the function dictionary"""
|
||||||
|
def aliaser(command):
|
||||||
|
aliases = command.__dict__.get('aliases', [])
|
||||||
|
aliases.append(cmd_alias)
|
||||||
|
command.__dict__['aliases'] = aliases
|
||||||
|
return command
|
||||||
|
return aliaser
|
||||||
|
|
||||||
|
|
||||||
|
# Helpers for common command tasks
|
||||||
|
|
||||||
|
CONFIG_GET_ROOT_VALUE = object()
|
||||||
|
|
||||||
|
|
||||||
|
def config_get(cfg, pathspec):
|
||||||
|
"""
|
||||||
|
Performs config --get for a given config
|
||||||
|
|
||||||
|
cfg is from a `with json_ro` context
|
||||||
|
path is the full pathspec, unsplit
|
||||||
|
"""
|
||||||
|
import json
|
||||||
|
|
||||||
|
if pathspec is CONFIG_GET_ROOT_VALUE:
|
||||||
|
path = []
|
||||||
|
else:
|
||||||
|
path = pathspec.split(".")
|
||||||
|
for spec in path:
|
||||||
|
if spec not in cfg:
|
||||||
|
logger.error("Path not found: {}".format(pathspec))
|
||||||
|
return -1
|
||||||
|
cfg = cfg.get(spec)
|
||||||
|
print(json.dumps(cfg, indent=2))
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def config_set(obj_id, cfg, set_tuple):
|
||||||
|
"""
|
||||||
|
Performs config --set for a given config
|
||||||
|
|
||||||
|
config is from a "with json_rw" context
|
||||||
|
set_tuple is a tuple of the pathspec and the value
|
||||||
|
"""
|
||||||
|
import json
|
||||||
|
pathspec, value = set_tuple
|
||||||
|
if not pathspec:
|
||||||
|
logger.error("Path must be non-empty")
|
||||||
|
path = pathspec.split('.')
|
||||||
|
try:
|
||||||
|
value = json.loads(value)
|
||||||
|
except JSONDecodeError:
|
||||||
|
pass # Leave value as string
|
||||||
|
for spec in path[:-1]:
|
||||||
|
if spec not in cfg:
|
||||||
|
logger.error("Path not found")
|
||||||
|
return -1
|
||||||
|
cfg = cfg.get(spec)
|
||||||
|
key = path[-1]
|
||||||
|
if key not in cfg:
|
||||||
|
logger.error("Path not found")
|
||||||
|
return -1
|
||||||
|
old_value = cfg[key]
|
||||||
|
cfg[key] = value
|
||||||
|
logger.info("{}.{}: {} -> {}".format(obj_id, pathspec, old_value, value))
|
||||||
|
return 0
|
||||||
|
|
|
@ -1,64 +0,0 @@
|
||||||
import logging
|
|
||||||
|
|
||||||
from amanuensis.backend import *
|
|
||||||
from amanuensis.db import DbContext, ArticleIndex, IndexType
|
|
||||||
|
|
||||||
from .helpers import add_argument
|
|
||||||
|
|
||||||
|
|
||||||
COMMAND_NAME = "index"
|
|
||||||
COMMAND_HELP = "Interact with indices."
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@add_argument("--lexicon", required=True)
|
|
||||||
@add_argument(
|
|
||||||
"--type", required=True, type=lambda s: IndexType[s.upper()], choices=IndexType
|
|
||||||
)
|
|
||||||
@add_argument("--pattern", required=True)
|
|
||||||
@add_argument("--logical", type=int, default=0)
|
|
||||||
@add_argument("--display", type=int, default=0)
|
|
||||||
@add_argument("--capacity", type=int, default=None)
|
|
||||||
def command_create(args) -> int:
|
|
||||||
"""
|
|
||||||
Create an index for a lexicon.
|
|
||||||
"""
|
|
||||||
db: DbContext = args.get_db()
|
|
||||||
lexicon = lexiq.try_from_name(db, args.lexicon)
|
|
||||||
if not lexicon:
|
|
||||||
raise ValueError("Lexicon does not exist")
|
|
||||||
index: ArticleIndex = indq.create(
|
|
||||||
db,
|
|
||||||
lexicon.id,
|
|
||||||
args.type,
|
|
||||||
args.pattern,
|
|
||||||
args.logical,
|
|
||||||
args.display,
|
|
||||||
args.capacity,
|
|
||||||
)
|
|
||||||
LOG.info(f"Created {index.index_type}:{index.pattern} in {lexicon.full_title}")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
@add_argument("--lexicon", required=True, help="The lexicon's name")
|
|
||||||
@add_argument("--character", help="The character's public id")
|
|
||||||
@add_argument("--index", required=True, help="The index pattern")
|
|
||||||
@add_argument("--turn", required=True, type=int)
|
|
||||||
def command_assign(args) -> int:
|
|
||||||
"""
|
|
||||||
Create a turn assignment for a lexicon.
|
|
||||||
"""
|
|
||||||
db: DbContext = args.get_db()
|
|
||||||
lexicon = lexiq.try_from_name(db, args.lexicon)
|
|
||||||
if not lexicon:
|
|
||||||
raise ValueError("Lexicon does not exist")
|
|
||||||
char = charq.try_from_public_id(db, args.character)
|
|
||||||
assert char
|
|
||||||
indices = indq.get_for_lexicon(db, lexicon.id)
|
|
||||||
index = [i for i in indices if i.pattern == args.index]
|
|
||||||
if not index:
|
|
||||||
raise ValueError("Index not found")
|
|
||||||
assignment = irq.create(db, lexicon.id, char.id, index[0].id, args.turn)
|
|
||||||
LOG.info("Created")
|
|
||||||
return 0
|
|
|
@ -1,76 +1,324 @@
|
||||||
|
# Standard library imports
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from sqlalchemy import update
|
# Module imports
|
||||||
|
from amanuensis.config import RootConfigDirectoryContext
|
||||||
|
from amanuensis.models import LexiconModel, UserModel
|
||||||
|
|
||||||
from amanuensis.backend import lexiq, memq, userq
|
from .helpers import (
|
||||||
from amanuensis.db import DbContext, Lexicon
|
add_argument, no_argument, requires_lexicon, requires_user, alias,
|
||||||
|
config_get, config_set, CONFIG_GET_ROOT_VALUE)
|
||||||
|
|
||||||
from .helpers import add_argument
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
#
|
||||||
|
# CRUD commands
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
COMMAND_NAME = "lexicon"
|
@alias('lc')
|
||||||
COMMAND_HELP = "Interact with lexicons."
|
@add_argument("--name", required=True, help="The name of the new lexicon")
|
||||||
|
@requires_user
|
||||||
LOG = logging.getLogger(__name__)
|
@add_argument("--prompt", help="The lexicon's prompt")
|
||||||
|
|
||||||
|
|
||||||
@add_argument("lexicon")
|
|
||||||
@add_argument("user")
|
|
||||||
@add_argument("--editor", action="store_true")
|
|
||||||
def command_add(args) -> int:
|
|
||||||
"""
|
|
||||||
Add a user to a lexicon.
|
|
||||||
"""
|
|
||||||
db: DbContext = args.get_db()
|
|
||||||
lexicon = lexiq.try_from_name(db, args.lexicon)
|
|
||||||
if not lexicon:
|
|
||||||
raise ValueError("Lexicon does not exist")
|
|
||||||
user = userq.try_from_username(db, args.user)
|
|
||||||
if not user:
|
|
||||||
raise ValueError("User does not exist")
|
|
||||||
memq.create(db, user.id, lexicon.id, args.editor)
|
|
||||||
LOG.info(f"Added {args.user} to lexicon {args.lexicon}")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
@add_argument("name")
|
|
||||||
def command_create(args):
|
def command_create(args):
|
||||||
"""
|
"""
|
||||||
Create a lexicon.
|
Create a lexicon
|
||||||
"""
|
|
||||||
db: DbContext = args.get_db()
|
The specified user will be the editor. A newly created created lexicon is
|
||||||
lexiq.create(db, args.name, None, f"Prompt for Lexicon {args.name}")
|
not open for joining and requires additional configuration before it is
|
||||||
LOG.info(f"Created lexicon {args.name}")
|
playable. The editor should ensure that all settings are as desired before
|
||||||
return 0
|
opening the lexicon for player joins.
|
||||||
|
"""
|
||||||
|
# Module imports
|
||||||
|
from amanuensis.lexicon import valid_name, create_lexicon
|
||||||
|
|
||||||
|
root: RootConfigDirectoryContext = args.root
|
||||||
|
|
||||||
|
# Verify arguments
|
||||||
|
if not valid_name(args.name):
|
||||||
|
logger.error(f'Lexicon name contains illegal characters: "{args.name}"')
|
||||||
|
return -1
|
||||||
|
with root.lexicon.read_index() as index:
|
||||||
|
if args.name in index.keys():
|
||||||
|
logger.error(f'A lexicon with name "{args.name}" already exists')
|
||||||
|
return -1
|
||||||
|
|
||||||
|
# Perform command
|
||||||
|
create_lexicon(root, args.name, args.user)
|
||||||
|
|
||||||
|
# Output already logged by create_lexicon
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
@add_argument("name")
|
@alias('ld')
|
||||||
@add_argument("--public", dest="public", action="store_const", const=True)
|
@requires_lexicon
|
||||||
@add_argument("--no-public", dest="public", action="store_const", const=False)
|
@add_argument("--purge", action="store_true", help="Delete the lexicon's data")
|
||||||
@add_argument("--join", dest="join", action="store_const", const=True)
|
def command_delete(args):
|
||||||
@add_argument("--no-join", dest="join", action="store_const", const=False)
|
"""
|
||||||
@add_argument("--char-limit", type=int, default=None)
|
Delete a lexicon and optionally its data
|
||||||
def command_edit(args):
|
"""
|
||||||
"""
|
raise NotImplementedError()
|
||||||
Update a lexicon's configuration.
|
# # Module imports
|
||||||
"""
|
# from amanuensis.config import logger
|
||||||
db: DbContext = args.get_db()
|
# from amanuensis.lexicon.manage import delete_lexicon
|
||||||
values = {}
|
|
||||||
|
|
||||||
if args.public == True:
|
# # Perform command
|
||||||
values["public"] = True
|
# delete_lexicon(args.lexicon, args.purge)
|
||||||
elif args.public == False:
|
|
||||||
values["public"] = False
|
|
||||||
|
|
||||||
if args.join == True:
|
# # Output
|
||||||
values["joinable"] = True
|
# logger.info('Deleted lexicon "{}"'.format(args.lexicon.name))
|
||||||
elif args.join == False:
|
# return 0
|
||||||
values["joinable"] = False
|
|
||||||
|
|
||||||
if args.char_limit:
|
|
||||||
values["character_limit"] = args.char_limit
|
|
||||||
|
|
||||||
result = db(update(Lexicon).where(Lexicon.name == args.name).values(**values))
|
@alias('ll')
|
||||||
LOG.info(f"Updated {result.rowcount} lexicons")
|
@no_argument
|
||||||
db.session.commit()
|
def command_list(args):
|
||||||
return 0 if result.rowcount == 1 else -1
|
"""
|
||||||
|
List all lexicons and their statuses
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
# # Module imports
|
||||||
|
# from amanuensis.lexicon.manage import get_all_lexicons
|
||||||
|
|
||||||
|
# # Execute command
|
||||||
|
# lexicons = get_all_lexicons()
|
||||||
|
|
||||||
|
# # Output
|
||||||
|
# statuses = []
|
||||||
|
# for lex in lexicons:
|
||||||
|
# statuses.append("{0.lid} {0.name} ({1})".format(lex, lex.status()))
|
||||||
|
# for s in statuses:
|
||||||
|
# print(s)
|
||||||
|
# return 0
|
||||||
|
|
||||||
|
|
||||||
|
@alias('ln')
|
||||||
|
@requires_lexicon
|
||||||
|
@add_argument("--get",
|
||||||
|
metavar="PATHSPEC",
|
||||||
|
dest="get",
|
||||||
|
nargs="?",
|
||||||
|
const=CONFIG_GET_ROOT_VALUE,
|
||||||
|
help="Get the value of a config key")
|
||||||
|
@add_argument("--set",
|
||||||
|
metavar=("PATHSPEC", "VALUE"),
|
||||||
|
dest="set",
|
||||||
|
nargs=2,
|
||||||
|
help="Set the value of a config key")
|
||||||
|
def command_config(args):
|
||||||
|
"""
|
||||||
|
Interact with a lexicon's config
|
||||||
|
"""
|
||||||
|
lexicon: LexiconModel = args.lexicon
|
||||||
|
|
||||||
|
# Verify arguments
|
||||||
|
if args.get and args.set:
|
||||||
|
logger.error("Specify one of --get and --set")
|
||||||
|
return -1
|
||||||
|
|
||||||
|
# Execute command
|
||||||
|
if args.get:
|
||||||
|
config_get(lexicon.cfg, args.get)
|
||||||
|
|
||||||
|
if args.set:
|
||||||
|
with lexicon.ctx.edit_config() as cfg:
|
||||||
|
config_set(lexicon.lid, cfg, args.set)
|
||||||
|
|
||||||
|
# config_* functions handle output
|
||||||
|
return 0
|
||||||
|
|
||||||
|
#
|
||||||
|
# Player/character commands
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
@alias('lpa')
|
||||||
|
@requires_lexicon
|
||||||
|
@requires_user
|
||||||
|
def command_player_add(args):
|
||||||
|
"""
|
||||||
|
Add a player to a lexicon
|
||||||
|
"""
|
||||||
|
lexicon: LexiconModel = args.lexicon
|
||||||
|
user: UserModel = args.user
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from amanuensis.lexicon import add_player_to_lexicon
|
||||||
|
|
||||||
|
# Verify arguments
|
||||||
|
if user.uid in lexicon.cfg.join.joined:
|
||||||
|
logger.error(f'"{user.cfg.username}" is already a player '
|
||||||
|
f'in "{lexicon.cfg.name}"')
|
||||||
|
return -1
|
||||||
|
|
||||||
|
# Perform command
|
||||||
|
add_player_to_lexicon(user, lexicon)
|
||||||
|
|
||||||
|
# Output
|
||||||
|
logger.info(f'Added user "{user.cfg.username}" to '
|
||||||
|
f'lexicon "{lexicon.cfg.name}"')
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
@alias('lpr')
|
||||||
|
@requires_lexicon
|
||||||
|
@requires_user
|
||||||
|
def command_player_remove(args):
|
||||||
|
"""
|
||||||
|
Remove a player from a lexicon
|
||||||
|
|
||||||
|
Removing a player dissociates them from any characters
|
||||||
|
they control but does not delete any character data.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
# # Module imports
|
||||||
|
# from amanuensis.lexicon.manage import remove_player
|
||||||
|
|
||||||
|
# # Verify arguments
|
||||||
|
# if not args.user.in_lexicon(args.lexicon):
|
||||||
|
# logger.error('"{0.username}" is not a player in lexicon "{1.name}"'
|
||||||
|
# ''.format(args.user, args.lexicon))
|
||||||
|
# return -1
|
||||||
|
# if args.user.id == args.lexicon.editor:
|
||||||
|
# logger.error("Can't remove the editor of a lexicon")
|
||||||
|
# return -1
|
||||||
|
|
||||||
|
# # Perform command
|
||||||
|
# remove_player(args.lexicon, args.user)
|
||||||
|
|
||||||
|
# # Output
|
||||||
|
# logger.info('Removed "{0.username}" from lexicon "{1.name}"'.format(
|
||||||
|
# args.user, args.lexicon))
|
||||||
|
# return 0
|
||||||
|
|
||||||
|
|
||||||
|
@alias('lpl')
|
||||||
|
@requires_lexicon
|
||||||
|
def command_player_list(args):
|
||||||
|
"""
|
||||||
|
List all players in a lexicon
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
# import json
|
||||||
|
# # Module imports
|
||||||
|
# from amanuensis.user import UserModel
|
||||||
|
|
||||||
|
# # Perform command
|
||||||
|
# players = list(map(
|
||||||
|
# lambda uid: UserModel.by(uid=uid).username,
|
||||||
|
# args.lexicon.join.joined))
|
||||||
|
|
||||||
|
# # Output
|
||||||
|
# print(json.dumps(players, indent=2))
|
||||||
|
# return 0
|
||||||
|
|
||||||
|
|
||||||
|
@alias('lcc')
|
||||||
|
@requires_lexicon
|
||||||
|
@requires_user
|
||||||
|
@add_argument("--charname", required=True, help="The character's name")
|
||||||
|
def command_char_create(args):
|
||||||
|
"""
|
||||||
|
Create a character for a lexicon
|
||||||
|
|
||||||
|
The specified player will be set as the character's player.
|
||||||
|
"""
|
||||||
|
lexicon: LexiconModel = args.lexicon
|
||||||
|
user: UserModel = args.user
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from amanuensis.lexicon import create_character_in_lexicon
|
||||||
|
|
||||||
|
# Verify arguments
|
||||||
|
if user.uid not in lexicon.cfg.join.joined:
|
||||||
|
logger.error('"{0.username}" is not a player in lexicon "{1.name}"'
|
||||||
|
''.format(user.cfg, lexicon.cfg))
|
||||||
|
return -1
|
||||||
|
|
||||||
|
# Perform command
|
||||||
|
create_character_in_lexicon(user, lexicon, args.charname)
|
||||||
|
|
||||||
|
# Output
|
||||||
|
logger.info(f'Created character "{args.charname}" for "{user.cfg.username}"'
|
||||||
|
f' in "{lexicon.cfg.name}"')
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
@alias('lcd')
|
||||||
|
@requires_lexicon
|
||||||
|
@add_argument("--charname", required=True, help="The character's name")
|
||||||
|
def command_char_delete(args):
|
||||||
|
"""
|
||||||
|
Delete a character from a lexicon
|
||||||
|
|
||||||
|
Deleting a character dissociates them from any content
|
||||||
|
they have contributed rather than deleting it.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
# # Module imports
|
||||||
|
# from amanuensis.lexicon import LexiconModel
|
||||||
|
# from amanuensis.lexicon.manage import delete_character
|
||||||
|
|
||||||
|
# # Verify arguments
|
||||||
|
# lex = LexiconModel.by(name=args.lexicon)
|
||||||
|
# if lex is None:
|
||||||
|
# logger.error("Could not find lexicon '{}'".format(args.lexicon))
|
||||||
|
# return -1
|
||||||
|
|
||||||
|
# # Internal call
|
||||||
|
# delete_character(lex, args.charname)
|
||||||
|
# return 0
|
||||||
|
|
||||||
|
|
||||||
|
@alias('lcl')
|
||||||
|
@requires_lexicon
|
||||||
|
def command_char_list(args):
|
||||||
|
"""
|
||||||
|
List all characters in a lexicon
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
# import json
|
||||||
|
# # Module imports
|
||||||
|
# from amanuensis.lexicon import LexiconModel
|
||||||
|
|
||||||
|
# # Verify arguments
|
||||||
|
# lex = LexiconModel.by(name=args.lexicon)
|
||||||
|
# if lex is None:
|
||||||
|
# logger.error("Could not find lexicon '{}'".format(args.lexicon))
|
||||||
|
# return -1
|
||||||
|
|
||||||
|
# # Internal call
|
||||||
|
# print(json.dumps(lex.character, indent=2))
|
||||||
|
# return 0
|
||||||
|
|
||||||
|
#
|
||||||
|
# Procedural commands
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
@alias('lpt')
|
||||||
|
@requires_lexicon
|
||||||
|
@add_argument("--as-deadline",
|
||||||
|
action="store_true",
|
||||||
|
help="Notifies players of the publish result")
|
||||||
|
@add_argument("--force",
|
||||||
|
action="store_true",
|
||||||
|
help="Publish all approved articles, regardless of other checks")
|
||||||
|
def command_publish_turn(args):
|
||||||
|
"""
|
||||||
|
Publishes the current turn of a lexicon
|
||||||
|
|
||||||
|
The --as-deadline flag is intended to be used only by the scheduled publish
|
||||||
|
attempts controlled by the publish.deadlines setting.
|
||||||
|
|
||||||
|
The --force flag bypasses the publish.quorum and publish.block_on_ready
|
||||||
|
settings.
|
||||||
|
"""
|
||||||
|
# Module imports
|
||||||
|
from amanuensis.lexicon import attempt_publish
|
||||||
|
|
||||||
|
# Internal call
|
||||||
|
result = attempt_publish(args.lexicon)
|
||||||
|
|
||||||
|
if not result:
|
||||||
|
logger.error('Publish failed, check lexicon log')
|
||||||
|
|
|
@ -1,31 +0,0 @@
|
||||||
import logging
|
|
||||||
|
|
||||||
from amanuensis.backend import *
|
|
||||||
from amanuensis.db import *
|
|
||||||
|
|
||||||
from .helpers import add_argument
|
|
||||||
|
|
||||||
|
|
||||||
COMMAND_NAME = "post"
|
|
||||||
COMMAND_HELP = "Interact with posts."
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@add_argument("--lexicon", required=True, help="The lexicon's name")
|
|
||||||
@add_argument("--by", help="The character's public id")
|
|
||||||
@add_argument("--text", help="The text of the post")
|
|
||||||
def command_create(args) -> int:
|
|
||||||
"""
|
|
||||||
Create a post in a lexicon.
|
|
||||||
"""
|
|
||||||
db: DbContext = args.get_db()
|
|
||||||
lexicon = lexiq.try_from_name(db, args.lexicon)
|
|
||||||
if not lexicon:
|
|
||||||
raise ValueError("Lexicon does not exist")
|
|
||||||
user = userq.try_from_username(db, args.by)
|
|
||||||
user_id = user.id if user else None
|
|
||||||
post: Post = postq.create(db, lexicon.id, user_id, args.text)
|
|
||||||
preview = post.body[:20] + "..." if len(post.body) > 20 else post.body
|
|
||||||
LOG.info(f"Posted '{preview}' in {lexicon.full_title}")
|
|
||||||
return 0
|
|
|
@ -0,0 +1,120 @@
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from amanuensis.config import RootConfigDirectoryContext
|
||||||
|
|
||||||
|
from .helpers import (
|
||||||
|
add_argument,
|
||||||
|
no_argument,
|
||||||
|
alias,
|
||||||
|
config_get,
|
||||||
|
config_set,
|
||||||
|
CONFIG_GET_ROOT_VALUE)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@alias('i')
|
||||||
|
@add_argument("--refresh",
|
||||||
|
action="store_true",
|
||||||
|
help="Refresh an existing config directory")
|
||||||
|
def command_init(args):
|
||||||
|
"""
|
||||||
|
Initialize a config directory at --config-dir
|
||||||
|
|
||||||
|
A clean config directory will contain a config.json, a
|
||||||
|
lexicon config directory, and a user config directory.
|
||||||
|
|
||||||
|
Refreshing an existing directory will add keys to the global config that
|
||||||
|
are present in the default configs. Users and lexicons that are missing
|
||||||
|
from the indexes will be deleted, and stale index entries will be removed.
|
||||||
|
"""
|
||||||
|
# Module imports
|
||||||
|
from amanuensis.config.init import create_config_dir
|
||||||
|
|
||||||
|
# Verify arguments
|
||||||
|
if args.refresh and not os.path.isdir(args.config_dir):
|
||||||
|
print("Error: couldn't find directory '{}'".format(args.config_dir))
|
||||||
|
|
||||||
|
# Internal call
|
||||||
|
create_config_dir(args.config_dir, args.refresh)
|
||||||
|
logger.info(f'Initialized config dir at {args.config_dir}')
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
@alias('gs')
|
||||||
|
@no_argument
|
||||||
|
def command_generate_secret(args):
|
||||||
|
"""
|
||||||
|
Generate a Flask secret key
|
||||||
|
|
||||||
|
The Flask server will not run unless a secret key has
|
||||||
|
been generated.
|
||||||
|
"""
|
||||||
|
root: RootConfigDirectoryContext = args.root
|
||||||
|
secret_key: bytes = os.urandom(32)
|
||||||
|
with root.edit_config() as cfg:
|
||||||
|
cfg.secret_key = secret_key.hex()
|
||||||
|
logger.info("Regenerated Flask secret key")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
@alias('r')
|
||||||
|
@add_argument("-a", "--address", default="127.0.0.1")
|
||||||
|
@add_argument("-p", "--port", default="5000")
|
||||||
|
@add_argument("--debug", action="store_true")
|
||||||
|
def command_run(args):
|
||||||
|
"""
|
||||||
|
Run the default Flask server
|
||||||
|
|
||||||
|
The default Flask server is not secure, and should
|
||||||
|
only be used for development.
|
||||||
|
"""
|
||||||
|
from amanuensis.server import get_app
|
||||||
|
|
||||||
|
root: RootConfigDirectoryContext = args.root
|
||||||
|
|
||||||
|
with root.read_config() as cfg:
|
||||||
|
if cfg.secret_key is None:
|
||||||
|
logger.error("Can't run server without a secret_key. "
|
||||||
|
"Run generate-secet first.")
|
||||||
|
return -1
|
||||||
|
|
||||||
|
get_app(root).run(host=args.address, port=args.port, debug=args.debug)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
@alias('n')
|
||||||
|
@add_argument("--get",
|
||||||
|
metavar="PATHSPEC",
|
||||||
|
dest="get",
|
||||||
|
nargs="?",
|
||||||
|
const=CONFIG_GET_ROOT_VALUE,
|
||||||
|
help="Get the value of a config key")
|
||||||
|
@add_argument("--set",
|
||||||
|
metavar=("PATHSPEC", "VALUE"),
|
||||||
|
dest="set",
|
||||||
|
nargs=2,
|
||||||
|
help="Set the value of a config key")
|
||||||
|
def command_config(args):
|
||||||
|
"""
|
||||||
|
Interact with the global config
|
||||||
|
|
||||||
|
PATHSPEC is a path into the config object formatted as
|
||||||
|
a dot-separated sequence of keys.
|
||||||
|
"""
|
||||||
|
root: RootConfigDirectoryContext = args.root
|
||||||
|
|
||||||
|
if args.get and args.set:
|
||||||
|
logger.error("Specify one of --get and --set")
|
||||||
|
return -1
|
||||||
|
|
||||||
|
if args.get:
|
||||||
|
with root.read_config() as cfg:
|
||||||
|
config_get(cfg, args.get)
|
||||||
|
|
||||||
|
if args.set:
|
||||||
|
with root.edit_config() as cfg:
|
||||||
|
config_set("config", cfg, args.set)
|
||||||
|
|
||||||
|
return 0
|
|
@ -1,85 +1,158 @@
|
||||||
|
# Standard library imports
|
||||||
|
import getpass
|
||||||
import logging
|
import logging
|
||||||
from typing import Optional
|
# import shutil
|
||||||
|
|
||||||
from amanuensis.backend import userq
|
# Module imports
|
||||||
from amanuensis.db import DbContext, User
|
from amanuensis.models import UserModel
|
||||||
|
|
||||||
from .helpers import add_argument
|
from .helpers import (
|
||||||
|
add_argument,
|
||||||
|
no_argument,
|
||||||
|
requires_user,
|
||||||
|
alias,
|
||||||
|
config_get,
|
||||||
|
config_set,
|
||||||
|
CONFIG_GET_ROOT_VALUE)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
COMMAND_NAME = "user"
|
@alias('uc')
|
||||||
COMMAND_HELP = "Interact with users."
|
@add_argument("--username", required=True, help="Name of user to create")
|
||||||
|
@add_argument("--email", help="User's email")
|
||||||
|
@add_argument("--displayname", help="User's publicly displayed name")
|
||||||
|
def command_create(args):
|
||||||
|
"""
|
||||||
|
Create a user
|
||||||
|
"""
|
||||||
|
# Module imports
|
||||||
|
from amanuensis.user import (
|
||||||
|
valid_username, valid_email, create_user)
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
# Verify arguments
|
||||||
|
if not valid_username(args.username):
|
||||||
|
logger.error("Invalid username: usernames may only contain alphanumer"
|
||||||
@add_argument("username")
|
"ic characters, dashes, and underscores")
|
||||||
@add_argument("--password", default="password")
|
return -1
|
||||||
@add_argument("--email", default="")
|
if not args.displayname:
|
||||||
def command_create(args) -> int:
|
args.displayname = args.username
|
||||||
"""Create a user."""
|
if args.email and not valid_email(args.email):
|
||||||
db: DbContext = args.get_db()
|
logger.error("Invalid email")
|
||||||
userq.create(db, args.username, "password", args.username, args.email, False)
|
return -1
|
||||||
userq.password_set(db, args.username, args.password)
|
try:
|
||||||
LOG.info(f"Created user {args.username}")
|
existing_user = args.model_factory.user(args.username)
|
||||||
return 0
|
if existing_user is not None:
|
||||||
|
logger.error("Invalid username: username is already taken")
|
||||||
|
return -1
|
||||||
@add_argument("username")
|
except Exception:
|
||||||
def command_promote(args) -> int:
|
pass # User doesn't already exist, good to go
|
||||||
"""Make a user a site admin."""
|
|
||||||
db: DbContext = args.get_db()
|
# Perform command
|
||||||
user: Optional[User] = userq.try_from_username(db, args.username)
|
new_user, tmp_pw = create_user(
|
||||||
if user is None:
|
args.root,
|
||||||
args.parser.error("User not found")
|
args.model_factory,
|
||||||
return -1
|
args.username,
|
||||||
if user.is_site_admin:
|
args.displayname,
|
||||||
LOG.info(f"{user.username} is already a site admin.")
|
args.email)
|
||||||
else:
|
|
||||||
user.is_site_admin = True
|
# Output
|
||||||
LOG.info(f"Promoting {user.username} to site admin.")
|
print(tmp_pw)
|
||||||
db.session.commit()
|
return 0
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
@add_argument("username")
|
|
||||||
def command_demote(args):
|
|
||||||
"""Revoke a user's site admin status."""
|
|
||||||
db: DbContext = args.get_db()
|
|
||||||
user: Optional[User] = userq.try_from_username(db, args.username)
|
|
||||||
if user is None:
|
|
||||||
args.parser.error("User not found")
|
|
||||||
return -1
|
|
||||||
if not user.is_site_admin:
|
|
||||||
LOG.info(f"{user.username} is not a site admin.")
|
|
||||||
else:
|
|
||||||
user.is_site_admin = False
|
|
||||||
LOG.info(f"Revoking site admin status for {user.username}.")
|
|
||||||
db.session.commit()
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
|
@alias('ud')
|
||||||
|
@requires_user
|
||||||
def command_delete(args):
|
def command_delete(args):
|
||||||
"""
|
"""
|
||||||
Delete a user.
|
Delete a user
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
# # Module imports
|
||||||
|
# from amanuensis.config import logger, prepend, json_rw
|
||||||
|
|
||||||
|
# # Perform command
|
||||||
|
# user_path = prepend('user', args.user.id)
|
||||||
|
# shutil.rmtree(user_path)
|
||||||
|
# with json_rw('user', 'index.json') as index:
|
||||||
|
# del index[args.user.username]
|
||||||
|
|
||||||
|
# # TODO resolve user id references in all games
|
||||||
|
|
||||||
|
# # Output
|
||||||
|
# logger.info("Deleted user {0.username} ({0.id})".format(args.user))
|
||||||
|
# return 0
|
||||||
|
|
||||||
|
|
||||||
|
@alias('ul')
|
||||||
|
@no_argument
|
||||||
def command_list(args):
|
def command_list(args):
|
||||||
"""
|
"""List all users"""
|
||||||
List all users.
|
raise NotImplementedError()
|
||||||
"""
|
# # Module imports
|
||||||
raise NotImplementedError()
|
# from amanuensis.config import prepend, json_ro
|
||||||
|
# from amanuensis.user import UserModel
|
||||||
|
|
||||||
|
# # Perform command
|
||||||
|
# users = []
|
||||||
|
# with json_ro('user', 'index.json') as index:
|
||||||
|
# for username, uid in index.items():
|
||||||
|
# users.append(UserModel.by(uid=uid))
|
||||||
|
|
||||||
|
# # Output
|
||||||
|
# users.sort(key=lambda u: u.username)
|
||||||
|
# for user in users:
|
||||||
|
# print("{0.id} {0.displayname} ({0.username})".format(user))
|
||||||
|
# return 0
|
||||||
|
|
||||||
|
|
||||||
@add_argument("username")
|
@alias('un')
|
||||||
@add_argument("password")
|
@requires_user
|
||||||
def command_passwd(args) -> int:
|
@add_argument(
|
||||||
"""
|
"--get", metavar="PATHSPEC", dest="get",
|
||||||
Set a user's password.
|
nargs="?", const=CONFIG_GET_ROOT_VALUE, help="Get the value of a config key")
|
||||||
"""
|
@add_argument(
|
||||||
db: DbContext = args.get_db()
|
"--set", metavar=("PATHSPEC", "VALUE"), dest="set",
|
||||||
userq.password_set(db, args.username, args.password)
|
nargs=2, help="Set the value of a config key")
|
||||||
LOG.info(f"Updated password for {args.username}")
|
def command_config(args):
|
||||||
return 0
|
"""
|
||||||
|
Interact with a user's config
|
||||||
|
"""
|
||||||
|
user: UserModel = args.user
|
||||||
|
|
||||||
|
# Verify arguments
|
||||||
|
if args.get and args.set:
|
||||||
|
logger.error("Specify one of --get and --set")
|
||||||
|
return -1
|
||||||
|
|
||||||
|
# Perform command
|
||||||
|
if args.get:
|
||||||
|
config_get(user.cfg, args.get)
|
||||||
|
|
||||||
|
if args.set:
|
||||||
|
with user.ctx.edit_config() as cfg:
|
||||||
|
config_set(user.uid, cfg, args.set)
|
||||||
|
|
||||||
|
# Output
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
@alias('up')
|
||||||
|
@requires_user
|
||||||
|
@add_argument("--password", help="The password to set. Used for scripting; "
|
||||||
|
"not recommended for general use")
|
||||||
|
def command_passwd(args):
|
||||||
|
"""
|
||||||
|
Set a user's password
|
||||||
|
"""
|
||||||
|
user: UserModel = args.user
|
||||||
|
|
||||||
|
# Verify arguments
|
||||||
|
password: str = args.password or getpass.getpass("Password: ")
|
||||||
|
|
||||||
|
# Perform command
|
||||||
|
user.set_password(password)
|
||||||
|
|
||||||
|
# Output
|
||||||
|
logger.info('Updated password for {}'.format(user.cfg.username))
|
||||||
|
return 0
|
||||||
|
|
|
@ -1,46 +0,0 @@
|
||||||
from argparse import ArgumentParser
|
|
||||||
from typing import Optional
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
class AmanuensisConfig:
|
|
||||||
"""Base config type. Defines config keys for subclasses to override."""
|
|
||||||
|
|
||||||
# If CONFIG_FILE is defined, the config file it points to may override
|
|
||||||
# config values defined on the config object itself.
|
|
||||||
CONFIG_FILE: Optional[str] = None
|
|
||||||
STATIC_ROOT: Optional[str] = "../resources"
|
|
||||||
SECRET_KEY: Optional[str] = "secret"
|
|
||||||
DATABASE_URI: Optional[str] = "sqlite:///:memory:"
|
|
||||||
TESTING: bool = False
|
|
||||||
|
|
||||||
|
|
||||||
class EnvironmentConfig(AmanuensisConfig):
|
|
||||||
"""Loads config values from environment variables."""
|
|
||||||
|
|
||||||
CONFIG_FILE = os.environ.get("AMANUENSIS_CONFIG_FILE", AmanuensisConfig.CONFIG_FILE)
|
|
||||||
STATIC_ROOT = os.environ.get("AMANUENSIS_STATIC_ROOT", AmanuensisConfig.STATIC_ROOT)
|
|
||||||
SECRET_KEY = os.environ.get("AMANUENSIS_SECRET_KEY", AmanuensisConfig.SECRET_KEY)
|
|
||||||
DATABASE_URI = os.environ.get(
|
|
||||||
"AMANUENSIS_DATABASE_URI", AmanuensisConfig.DATABASE_URI
|
|
||||||
)
|
|
||||||
TESTING = os.environ.get("AMANUENSIS_TESTING", "").lower() in ("true", "1")
|
|
||||||
|
|
||||||
|
|
||||||
class CommandLineConfig(AmanuensisConfig):
|
|
||||||
"""Loads config values from command line arguments."""
|
|
||||||
|
|
||||||
def __init__(self) -> None:
|
|
||||||
parser = ArgumentParser()
|
|
||||||
parser.add_argument("--config-file", default=AmanuensisConfig.CONFIG_FILE)
|
|
||||||
parser.add_argument("--static-root", default=AmanuensisConfig.STATIC_ROOT)
|
|
||||||
parser.add_argument("--secret-key", default=AmanuensisConfig.SECRET_KEY)
|
|
||||||
parser.add_argument("--database-uri", default=AmanuensisConfig.DATABASE_URI)
|
|
||||||
parser.add_argument("--debug", action="store_true")
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
self.CONFIG_FILE = args.config_file
|
|
||||||
self.STATIC_ROOT = args.static_root
|
|
||||||
self.SECRET_KEY = args.secret_key
|
|
||||||
self.DATABASE_URI = args.database_uri
|
|
||||||
self.TESTING = args.debug
|
|
|
@ -0,0 +1,23 @@
|
||||||
|
# Module imports
|
||||||
|
from .dict import AttrOrderedDict, ReadOnlyOrderedDict
|
||||||
|
from .directory import (
|
||||||
|
RootConfigDirectoryContext,
|
||||||
|
UserConfigDirectoryContext,
|
||||||
|
LexiconConfigDirectoryContext,
|
||||||
|
is_guid)
|
||||||
|
|
||||||
|
# Environment variable name constants
|
||||||
|
ENV_SECRET_KEY = "AMANUENSIS_SECRET_KEY"
|
||||||
|
ENV_CONFIG_DIR = "AMANUENSIS_CONFIG_DIR"
|
||||||
|
ENV_LOG_FILE = "AMANUENSIS_LOG_FILE"
|
||||||
|
ENV_LOG_FILE_SIZE = "AMANUENSIS_LOG_FILE_SIZE"
|
||||||
|
ENV_LOG_FILE_NUM = "AMANUENSIS_LOG_FILE_NUM"
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
AttrOrderedDict.__name__,
|
||||||
|
ReadOnlyOrderedDict.__name__,
|
||||||
|
RootConfigDirectoryContext.__name__,
|
||||||
|
UserConfigDirectoryContext.__name__,
|
||||||
|
LexiconConfigDirectoryContext.__name__,
|
||||||
|
is_guid.__name__,
|
||||||
|
]
|
|
@ -0,0 +1,82 @@
|
||||||
|
"""
|
||||||
|
`with` context managers for mediating config file access.
|
||||||
|
"""
|
||||||
|
# Standard library imports
|
||||||
|
import fcntl
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Application imports
|
||||||
|
from .dict import AttrOrderedDict, ReadOnlyOrderedDict
|
||||||
|
|
||||||
|
|
||||||
|
class open_lock():
|
||||||
|
"""A context manager that opens a file with the specified file lock"""
|
||||||
|
def __init__(self, path, mode, lock_type):
|
||||||
|
self.fd = open(path, mode, encoding='utf8')
|
||||||
|
fcntl.lockf(self.fd, lock_type)
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
return self.fd
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_value, traceback):
|
||||||
|
fcntl.lockf(self.fd, fcntl.LOCK_UN)
|
||||||
|
self.fd.close()
|
||||||
|
|
||||||
|
|
||||||
|
class open_sh(open_lock):
|
||||||
|
"""A context manager that opens a file with a shared lock"""
|
||||||
|
def __init__(self, path, mode):
|
||||||
|
super().__init__(path, mode, fcntl.LOCK_SH)
|
||||||
|
|
||||||
|
|
||||||
|
class open_ex(open_lock):
|
||||||
|
"""A context manager that opens a file with an exclusive lock"""
|
||||||
|
def __init__(self, path, mode):
|
||||||
|
super().__init__(path, mode, fcntl.LOCK_EX)
|
||||||
|
|
||||||
|
|
||||||
|
class json_ro(open_sh):
|
||||||
|
"""
|
||||||
|
A context manager that opens a file in a shared, read-only mode.
|
||||||
|
The contents of the file are read as JSON and returned as a read-
|
||||||
|
only OrderedDict.
|
||||||
|
"""
|
||||||
|
def __init__(self, path):
|
||||||
|
super().__init__(path, 'r')
|
||||||
|
self.config = None
|
||||||
|
|
||||||
|
def __enter__(self) -> ReadOnlyOrderedDict:
|
||||||
|
self.config = json.load(self.fd, object_pairs_hook=ReadOnlyOrderedDict)
|
||||||
|
return self.config
|
||||||
|
|
||||||
|
|
||||||
|
class json_rw(open_ex):
|
||||||
|
"""
|
||||||
|
A context manager that opens a file with an exclusive lock. The
|
||||||
|
file mode defaults to r+, which requires that the file exist. The
|
||||||
|
file mode can be set to w+ to create a new file by setting the new
|
||||||
|
kwarg in the ctor. The contents of the file are read as JSON and
|
||||||
|
returned in an AttrOrderedDict. Any changes to the context dict
|
||||||
|
will be written out to the file when the context manager exits,
|
||||||
|
unless an exception is raised before exiting.
|
||||||
|
"""
|
||||||
|
def __init__(self, path, new=False):
|
||||||
|
mode = 'w+' if new else 'r+'
|
||||||
|
super().__init__(path, mode)
|
||||||
|
self.config = None
|
||||||
|
self.new = new
|
||||||
|
|
||||||
|
def __enter__(self) -> AttrOrderedDict:
|
||||||
|
if not self.new:
|
||||||
|
self.config = json.load(self.fd, object_pairs_hook=AttrOrderedDict)
|
||||||
|
else:
|
||||||
|
self.config = AttrOrderedDict()
|
||||||
|
return self.config
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_value, traceback):
|
||||||
|
# Only write the new value out if there wasn't an exception
|
||||||
|
if not exc_type:
|
||||||
|
self.fd.seek(0)
|
||||||
|
json.dump(self.config, self.fd, allow_nan=False, indent='\t')
|
||||||
|
self.fd.truncate()
|
||||||
|
super().__exit__(exc_type, exc_value, traceback)
|
|
@ -0,0 +1,52 @@
|
||||||
|
"""
|
||||||
|
Dictionary classes used to represent JSON config files in memory.
|
||||||
|
"""
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
from amanuensis.errors import ReadOnlyError
|
||||||
|
|
||||||
|
|
||||||
|
class AttrOrderedDict(OrderedDict):
|
||||||
|
"""
|
||||||
|
An OrderedDict with attribute access to known keys and explicit
|
||||||
|
creation of new keys.
|
||||||
|
"""
|
||||||
|
def __getattr__(self, key):
|
||||||
|
if key not in self:
|
||||||
|
raise AttributeError(key)
|
||||||
|
return self[key]
|
||||||
|
|
||||||
|
def __setattr__(self, key, value):
|
||||||
|
if key not in self:
|
||||||
|
raise AttributeError(key)
|
||||||
|
self[key] = value
|
||||||
|
|
||||||
|
def new(self, key, value):
|
||||||
|
"""Setter for adding new keys"""
|
||||||
|
if key in self:
|
||||||
|
raise KeyError("Key already exists: '{}'".format(key))
|
||||||
|
self[key] = value
|
||||||
|
|
||||||
|
|
||||||
|
class ReadOnlyOrderedDict(OrderedDict):
|
||||||
|
"""
|
||||||
|
An OrderedDict that cannot be modified with attribute access to
|
||||||
|
known keys.
|
||||||
|
"""
|
||||||
|
def __readonly__(self, *args, **kwargs):
|
||||||
|
raise ReadOnlyError("Cannot modify a ReadOnlyOrderedDict")
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(ReadOnlyOrderedDict, self).__init__(*args, **kwargs)
|
||||||
|
self.__setitem__ = self.__readonly__
|
||||||
|
self.__delitem__ = self.__readonly__
|
||||||
|
self.pop = self.__readonly__
|
||||||
|
self.popitem = self.__readonly__
|
||||||
|
self.clear = self.__readonly__
|
||||||
|
self.update = self.__readonly__
|
||||||
|
self.setdefault = self.__readonly__
|
||||||
|
|
||||||
|
def __getattr__(self, key):
|
||||||
|
if key not in self:
|
||||||
|
raise AttributeError(key)
|
||||||
|
return self[key]
|
|
@ -0,0 +1,160 @@
|
||||||
|
"""
|
||||||
|
Config directory abstractions that encapsulate path munging and context
|
||||||
|
manager usage.
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from typing import Iterable
|
||||||
|
|
||||||
|
from amanuensis.errors import MissingConfigError, ConfigAlreadyExistsError
|
||||||
|
|
||||||
|
from .context import json_ro, json_rw
|
||||||
|
|
||||||
|
|
||||||
|
def is_guid(s: str) -> bool:
|
||||||
|
return bool(re.match(r'[0-9a-z]{32}', s.lower()))
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigDirectoryContext():
|
||||||
|
"""
|
||||||
|
Base class for CRUD operations on config files in a config
|
||||||
|
directory.
|
||||||
|
"""
|
||||||
|
def __init__(self, path: str):
|
||||||
|
self.path: str = path
|
||||||
|
if not os.path.isdir(self.path):
|
||||||
|
raise MissingConfigError(path)
|
||||||
|
|
||||||
|
def new(self, filename) -> json_rw:
|
||||||
|
"""
|
||||||
|
Creates a JSON file that doesn't already exist.
|
||||||
|
"""
|
||||||
|
if not filename.endswith('.json'):
|
||||||
|
filename = f'{filename}.json'
|
||||||
|
fpath: str = os.path.join(self.path, filename)
|
||||||
|
if os.path.isfile(fpath):
|
||||||
|
raise ConfigAlreadyExistsError(fpath)
|
||||||
|
return json_rw(fpath, new=True)
|
||||||
|
|
||||||
|
def read(self, filename) -> json_ro:
|
||||||
|
"""
|
||||||
|
Loads a JSON file in read-only mode.
|
||||||
|
"""
|
||||||
|
if not filename.endswith('.json'):
|
||||||
|
filename = f'{filename}.json'
|
||||||
|
fpath: str = os.path.join(self.path, filename)
|
||||||
|
if not os.path.isfile(fpath):
|
||||||
|
raise MissingConfigError(fpath)
|
||||||
|
return json_ro(fpath)
|
||||||
|
|
||||||
|
def edit(self, filename, create=False) -> json_rw:
|
||||||
|
"""
|
||||||
|
Loads a JSON file in write mode.
|
||||||
|
"""
|
||||||
|
if not filename.endswith('.json'):
|
||||||
|
filename = f'{filename}.json'
|
||||||
|
fpath: str = os.path.join(self.path, filename)
|
||||||
|
if not create and not os.path.isfile(fpath):
|
||||||
|
raise MissingConfigError(fpath)
|
||||||
|
return json_rw(fpath, new=create)
|
||||||
|
|
||||||
|
def delete(self, filename) -> None:
|
||||||
|
"""Deletes a file."""
|
||||||
|
if not filename.endswith('.json'):
|
||||||
|
filename = f'{filename}.json'
|
||||||
|
fpath: str = os.path.join(self.path, filename)
|
||||||
|
if not os.path.isfile(fpath):
|
||||||
|
raise MissingConfigError(fpath)
|
||||||
|
os.remove(fpath)
|
||||||
|
|
||||||
|
def ls(self) -> Iterable[str]:
|
||||||
|
"""Lists all files in this directory."""
|
||||||
|
filenames: Iterable[str] = os.listdir(self.path)
|
||||||
|
return filenames
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigFileConfigDirectoryContext(ConfigDirectoryContext):
|
||||||
|
"""
|
||||||
|
Config directory with a `config.json`.
|
||||||
|
"""
|
||||||
|
def __init__(self, path: str):
|
||||||
|
super().__init__(path)
|
||||||
|
config_path = os.path.join(self.path, 'config.json')
|
||||||
|
if not os.path.isfile(config_path):
|
||||||
|
raise MissingConfigError(config_path)
|
||||||
|
|
||||||
|
def edit_config(self) -> json_rw:
|
||||||
|
"""rw context manager for this object's config file."""
|
||||||
|
return self.edit('config')
|
||||||
|
|
||||||
|
def read_config(self) -> json_ro:
|
||||||
|
"""ro context manager for this object's config file."""
|
||||||
|
return self.read('config')
|
||||||
|
|
||||||
|
|
||||||
|
class IndexDirectoryContext(ConfigDirectoryContext):
|
||||||
|
"""
|
||||||
|
A lookup layer for getting config directory contexts for lexicon
|
||||||
|
or user directories.
|
||||||
|
"""
|
||||||
|
def __init__(self, path: str, cdc_type: type):
|
||||||
|
super().__init__(path)
|
||||||
|
index_path = os.path.join(self.path, 'index.json')
|
||||||
|
if not os.path.isfile(index_path):
|
||||||
|
raise MissingConfigError(index_path)
|
||||||
|
self.cdc_type = cdc_type
|
||||||
|
|
||||||
|
def __getitem__(self, key: str) -> ConfigFileConfigDirectoryContext:
|
||||||
|
"""
|
||||||
|
Returns a context to the given item. key is treated as the
|
||||||
|
item's id if it's a guid string, otherwise it's treated as
|
||||||
|
the item's indexed name and run through the index first.
|
||||||
|
"""
|
||||||
|
if not is_guid(key):
|
||||||
|
with self.read_index() as index:
|
||||||
|
iid = index.get(key)
|
||||||
|
if not iid:
|
||||||
|
raise MissingConfigError(key)
|
||||||
|
key = iid
|
||||||
|
return self.cdc_type(os.path.join(self.path, key))
|
||||||
|
|
||||||
|
def edit_index(self) -> json_rw:
|
||||||
|
return self.edit('index')
|
||||||
|
|
||||||
|
def read_index(self) -> json_ro:
|
||||||
|
return self.read('index')
|
||||||
|
|
||||||
|
|
||||||
|
class RootConfigDirectoryContext(ConfigFileConfigDirectoryContext):
|
||||||
|
"""
|
||||||
|
Context for the config directory with links to the lexicon and
|
||||||
|
user contexts.
|
||||||
|
"""
|
||||||
|
def __init__(self, path):
|
||||||
|
super().__init__(path)
|
||||||
|
self.lexicon: IndexDirectoryContext = IndexDirectoryContext(
|
||||||
|
os.path.join(self.path, 'lexicon'),
|
||||||
|
LexiconConfigDirectoryContext)
|
||||||
|
self.user: IndexDirectoryContext = IndexDirectoryContext(
|
||||||
|
os.path.join(self.path, 'user'),
|
||||||
|
UserConfigDirectoryContext)
|
||||||
|
|
||||||
|
|
||||||
|
class LexiconConfigDirectoryContext(ConfigFileConfigDirectoryContext):
|
||||||
|
"""
|
||||||
|
A config context for a lexicon's config directory.
|
||||||
|
"""
|
||||||
|
def __init__(self, path):
|
||||||
|
super().__init__(path)
|
||||||
|
self.draft: ConfigDirectoryContext = ConfigDirectoryContext(
|
||||||
|
os.path.join(self.path, 'draft'))
|
||||||
|
self.src: ConfigDirectoryContext = ConfigDirectoryContext(
|
||||||
|
os.path.join(self.path, 'src'))
|
||||||
|
self.article: ConfigDirectoryContext = ConfigDirectoryContext(
|
||||||
|
os.path.join(self.path, 'article'))
|
||||||
|
|
||||||
|
|
||||||
|
class UserConfigDirectoryContext(ConfigFileConfigDirectoryContext):
|
||||||
|
"""
|
||||||
|
A config context for a user's config directory.
|
||||||
|
"""
|
|
@ -0,0 +1,96 @@
|
||||||
|
# Standard library imports
|
||||||
|
from collections import OrderedDict
|
||||||
|
import fcntl
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from amanuensis.resources import get_stream
|
||||||
|
|
||||||
|
from .context import json_ro, json_rw
|
||||||
|
|
||||||
|
|
||||||
|
def create_config_dir(config_dir, refresh=False):
|
||||||
|
"""
|
||||||
|
Create or refresh a config directory
|
||||||
|
"""
|
||||||
|
|
||||||
|
def prepend(*path):
|
||||||
|
joined = os.path.join(*path)
|
||||||
|
if not joined.startswith(config_dir):
|
||||||
|
joined = os.path.join(config_dir, joined)
|
||||||
|
return joined
|
||||||
|
|
||||||
|
# Create the directory if it doesn't exist.
|
||||||
|
if not os.path.isdir(config_dir):
|
||||||
|
os.mkdir(config_dir)
|
||||||
|
|
||||||
|
# The directory should be empty if we're not updating an existing one.
|
||||||
|
if len(os.listdir(config_dir)) > 0 and not refresh:
|
||||||
|
print("Directory {} is not empty".format(config_dir))
|
||||||
|
return -1
|
||||||
|
|
||||||
|
# Update or create global config.
|
||||||
|
def_cfg = get_stream("global.json")
|
||||||
|
global_config_path = prepend("config.json")
|
||||||
|
if refresh and os.path.isfile(global_config_path):
|
||||||
|
# We need to write an entirely different ordereddict to the config
|
||||||
|
# file, so we mimic the config.context functionality manually.
|
||||||
|
with open(global_config_path, 'r+', encoding='utf8') as cfg_file:
|
||||||
|
fcntl.lockf(cfg_file, fcntl.LOCK_EX)
|
||||||
|
old_cfg = json.load(cfg_file, object_pairs_hook=OrderedDict)
|
||||||
|
new_cfg = json.load(def_cfg, object_pairs_hook=OrderedDict)
|
||||||
|
merged = {}
|
||||||
|
for key in new_cfg:
|
||||||
|
merged[key] = old_cfg[key] if key in old_cfg else new_cfg[key]
|
||||||
|
if key not in old_cfg:
|
||||||
|
print("Added key '{}' to config".format(key))
|
||||||
|
for key in old_cfg:
|
||||||
|
if key not in new_cfg:
|
||||||
|
print("Config contains unknown key '{}'".format(key))
|
||||||
|
merged[key] = old_cfg[key]
|
||||||
|
cfg_file.seek(0)
|
||||||
|
json.dump(merged, cfg_file, allow_nan=False, indent='\t')
|
||||||
|
cfg_file.truncate()
|
||||||
|
fcntl.lockf(cfg_file, fcntl.LOCK_UN)
|
||||||
|
else:
|
||||||
|
with open(prepend("config.json"), 'wb') as f:
|
||||||
|
f.write(def_cfg.read())
|
||||||
|
|
||||||
|
# Ensure lexicon subdir exists.
|
||||||
|
if not os.path.isdir(prepend("lexicon")):
|
||||||
|
os.mkdir(prepend("lexicon"))
|
||||||
|
if not os.path.isfile(prepend("lexicon", "index.json")):
|
||||||
|
with open(prepend("lexicon", "index.json"), 'w') as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
|
||||||
|
# Ensure user subdir exists.
|
||||||
|
if not os.path.isdir(prepend("user")):
|
||||||
|
os.mkdir(prepend("user"))
|
||||||
|
if not os.path.isfile(prepend('user', 'index.json')):
|
||||||
|
with open(prepend('user', 'index.json'), 'w') as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
|
||||||
|
if refresh:
|
||||||
|
for dir_name in ('lexicon', 'user'):
|
||||||
|
# Clean up unindexed folders
|
||||||
|
with json_ro(prepend(dir_name, 'index.json')) as index:
|
||||||
|
known = list(index.values())
|
||||||
|
entries = os.listdir(prepend(dir_name))
|
||||||
|
for dir_entry in entries:
|
||||||
|
if dir_entry == "index.json":
|
||||||
|
continue
|
||||||
|
if dir_entry in known:
|
||||||
|
continue
|
||||||
|
print("Removing unindexed folder: '{}/{}'"
|
||||||
|
.format(dir_name, dir_entry))
|
||||||
|
shutil.rmtree(prepend(dir_name, dir_entry))
|
||||||
|
|
||||||
|
# Remove orphaned index listings
|
||||||
|
with json_rw(prepend(dir_name, 'index.json')) as index:
|
||||||
|
for name, entry in index.items():
|
||||||
|
if not os.path.isdir(prepend(dir_name, entry)):
|
||||||
|
print("Removing stale {} index entry '{}: {}'"
|
||||||
|
.format(dir_name, name, entry))
|
||||||
|
del index[name]
|
|
@ -1,31 +0,0 @@
|
||||||
from .database import DbContext
|
|
||||||
from .models import (
|
|
||||||
User,
|
|
||||||
Lexicon,
|
|
||||||
Membership,
|
|
||||||
Character,
|
|
||||||
ArticleState,
|
|
||||||
Article,
|
|
||||||
IndexType,
|
|
||||||
ArticleIndex,
|
|
||||||
ArticleIndexRule,
|
|
||||||
ArticleContentRuleType,
|
|
||||||
ArticleContentRule,
|
|
||||||
Post,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"DbContext",
|
|
||||||
"User",
|
|
||||||
"Lexicon",
|
|
||||||
"Membership",
|
|
||||||
"Character",
|
|
||||||
"ArticleState",
|
|
||||||
"Article",
|
|
||||||
"IndexType",
|
|
||||||
"ArticleIndex",
|
|
||||||
"ArticleIndexRule",
|
|
||||||
"ArticleContentRuleType",
|
|
||||||
"ArticleContentRule",
|
|
||||||
"Post",
|
|
||||||
]
|
|
|
@ -1,74 +0,0 @@
|
||||||
"""
|
|
||||||
Database connection setup
|
|
||||||
"""
|
|
||||||
import os
|
|
||||||
|
|
||||||
from sqlalchemy import create_engine, MetaData, event
|
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
|
||||||
from sqlalchemy.orm import scoped_session, sessionmaker
|
|
||||||
|
|
||||||
try:
|
|
||||||
from greenlet import getcurrent as get_ident
|
|
||||||
except ImportError:
|
|
||||||
from threading import get_ident
|
|
||||||
|
|
||||||
|
|
||||||
# Define naming conventions for generated constraints
|
|
||||||
metadata = MetaData(
|
|
||||||
naming_convention={
|
|
||||||
"ix": "ix_%(column_0_label)s",
|
|
||||||
"uq": "uq_%(table_name)s_%(column_0_name)s",
|
|
||||||
"ck": "ck_%(table_name)s_%(constraint_name)s",
|
|
||||||
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
|
|
||||||
"pk": "pk_%(table_name)s",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Base class for ORM models
|
|
||||||
ModelBase = declarative_base(metadata=metadata)
|
|
||||||
|
|
||||||
|
|
||||||
class DbContext:
|
|
||||||
"""Class encapsulating connections to the database."""
|
|
||||||
|
|
||||||
def __init__(self, path=None, uri=None, echo=False):
|
|
||||||
"""
|
|
||||||
Create a database context.
|
|
||||||
Exactly one of `path` and `uri` should be specified.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if path and uri:
|
|
||||||
raise ValueError("Only one of path and uri may be specified")
|
|
||||||
self.db_uri = uri if uri else f"sqlite:///{os.path.abspath(path)}"
|
|
||||||
|
|
||||||
# Create an engine and enable foreign key constraints in sqlite
|
|
||||||
self.engine = create_engine(self.db_uri, echo=echo)
|
|
||||||
|
|
||||||
def set_sqlite_pragma(dbapi_connection, connection_record):
|
|
||||||
cursor = dbapi_connection.cursor()
|
|
||||||
cursor.execute("PRAGMA foreign_keys=ON")
|
|
||||||
cursor.close()
|
|
||||||
|
|
||||||
event.listens_for(self.engine, "connect")(set_sqlite_pragma)
|
|
||||||
|
|
||||||
# Create a thread-safe session factory
|
|
||||||
sm = sessionmaker(bind=self.engine)
|
|
||||||
|
|
||||||
def add_lifecycle_hook(sm, from_state, to_state):
|
|
||||||
def object_lifecycle_hook(_, obj):
|
|
||||||
print(f"object moved from {from_state} to {to_state}: {obj}")
|
|
||||||
|
|
||||||
event.listens_for(sm, f"{from_state}_to_{to_state}")(object_lifecycle_hook)
|
|
||||||
|
|
||||||
if echo:
|
|
||||||
add_lifecycle_hook(sm, "persistent", "detached")
|
|
||||||
|
|
||||||
self.session = scoped_session(sm, scopefunc=get_ident)
|
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
|
||||||
"""Provides shortcut access to session.execute."""
|
|
||||||
return self.session.execute(*args, **kwargs)
|
|
||||||
|
|
||||||
def create_all(self):
|
|
||||||
"""Initializes the database schema."""
|
|
||||||
ModelBase.metadata.create_all(self.engine)
|
|
|
@ -1,669 +0,0 @@
|
||||||
"""
|
|
||||||
Data model SQL definitions
|
|
||||||
"""
|
|
||||||
import enum
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
from sqlalchemy import (
|
|
||||||
Boolean,
|
|
||||||
Column,
|
|
||||||
CHAR,
|
|
||||||
DateTime,
|
|
||||||
Enum,
|
|
||||||
ForeignKey,
|
|
||||||
func,
|
|
||||||
Integer,
|
|
||||||
String,
|
|
||||||
Text,
|
|
||||||
text,
|
|
||||||
TypeDecorator,
|
|
||||||
)
|
|
||||||
from sqlalchemy.orm import relationship, backref
|
|
||||||
from sqlalchemy.sql.schema import UniqueConstraint
|
|
||||||
|
|
||||||
from .database import ModelBase
|
|
||||||
|
|
||||||
|
|
||||||
class Uuid(TypeDecorator):
|
|
||||||
"""
|
|
||||||
A uuid backed by a char(32) field in sqlite.
|
|
||||||
"""
|
|
||||||
|
|
||||||
impl = CHAR(32)
|
|
||||||
cache_ok = True
|
|
||||||
|
|
||||||
def process_bind_param(self, value, dialect):
|
|
||||||
if value is None:
|
|
||||||
return value
|
|
||||||
elif not isinstance(value, uuid.UUID):
|
|
||||||
return f"{uuid.UUID(value).int:32x}"
|
|
||||||
else:
|
|
||||||
return f"{value.int:32x}"
|
|
||||||
|
|
||||||
def process_result_value(self, value, dialect):
|
|
||||||
if value is None:
|
|
||||||
return value
|
|
||||||
elif not isinstance(value, uuid.UUID):
|
|
||||||
return uuid.UUID(value)
|
|
||||||
else:
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
class User(ModelBase):
|
|
||||||
"""
|
|
||||||
Represents a single user of Amanuensis.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "user"
|
|
||||||
|
|
||||||
#############
|
|
||||||
# User info #
|
|
||||||
#############
|
|
||||||
|
|
||||||
# Primary user id
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
|
|
||||||
# The user's human-readable identifier
|
|
||||||
username = Column(String, nullable=False, unique=True)
|
|
||||||
|
|
||||||
# Hashed and salted password
|
|
||||||
password = Column(String, nullable=False)
|
|
||||||
|
|
||||||
# Human-readable username as shown to other users
|
|
||||||
display_name = Column(String, nullable=False)
|
|
||||||
|
|
||||||
# The user's email address
|
|
||||||
email = Column(String, nullable=False)
|
|
||||||
|
|
||||||
# Whether the user can access site admin functions
|
|
||||||
is_site_admin = Column(Boolean, nullable=False, server_default=text("FALSE"))
|
|
||||||
|
|
||||||
####################
|
|
||||||
# History tracking #
|
|
||||||
####################
|
|
||||||
|
|
||||||
# The timestamp the user was created
|
|
||||||
created = Column(DateTime, nullable=False, server_default=func.now())
|
|
||||||
|
|
||||||
# The timestamp the user last logged in
|
|
||||||
# This is NULL if the user has never logged in
|
|
||||||
last_login = Column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
# The timestamp the user last performed an action
|
|
||||||
# This is NULL if the user has never performed an action
|
|
||||||
last_activity = Column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
#############################
|
|
||||||
# Foreign key relationships #
|
|
||||||
#############################
|
|
||||||
|
|
||||||
memberships = relationship("Membership", back_populates="user")
|
|
||||||
characters = relationship("Character", back_populates="user")
|
|
||||||
posts = relationship("Post", back_populates="user")
|
|
||||||
|
|
||||||
#########################
|
|
||||||
# Flask-Login interface #
|
|
||||||
#########################
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_authenticated(self: "User") -> bool:
|
|
||||||
return True
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_active(self: "User") -> bool:
|
|
||||||
return True
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_anonymous(self: "User") -> bool:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_id(self: "User") -> str:
|
|
||||||
return str(self.id)
|
|
||||||
|
|
||||||
|
|
||||||
class Lexicon(ModelBase):
|
|
||||||
"""
|
|
||||||
Represents a single game of Lexicon.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "lexicon"
|
|
||||||
|
|
||||||
#############
|
|
||||||
# Game info #
|
|
||||||
#############
|
|
||||||
|
|
||||||
# Primary lexicon id
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
|
|
||||||
# The lexicon's human-readable identifier
|
|
||||||
name = Column(String, nullable=False, unique=True)
|
|
||||||
|
|
||||||
# Optional title override
|
|
||||||
# If this is NULL, the title is rendered as "Lexicon <name>"
|
|
||||||
title = Column(String, nullable=True)
|
|
||||||
|
|
||||||
# The initial prompt describing the game's setting
|
|
||||||
prompt = Column(String, nullable=False, default="")
|
|
||||||
|
|
||||||
####################
|
|
||||||
# History tracking #
|
|
||||||
####################
|
|
||||||
|
|
||||||
# The timestamp the lexicon was created
|
|
||||||
created = Column(DateTime, nullable=False, server_default=func.now())
|
|
||||||
|
|
||||||
# The timestamp of the last change in game state
|
|
||||||
last_updated = Column(
|
|
||||||
DateTime, nullable=False, server_default=func.now(), onupdate=func.now()
|
|
||||||
)
|
|
||||||
|
|
||||||
# The timestamp the first turn was started
|
|
||||||
# This is NULL until the game starts
|
|
||||||
started = Column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
# The timestamp when the last turn was published
|
|
||||||
# This is NULL until the game is completed
|
|
||||||
completed = Column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
##############
|
|
||||||
# Turn state #
|
|
||||||
##############
|
|
||||||
|
|
||||||
# The current turn number
|
|
||||||
# This is NULL until the game strts
|
|
||||||
current_turn = Column(Integer, nullable=True)
|
|
||||||
|
|
||||||
# The number of turns in the game
|
|
||||||
turn_count = Column(Integer, nullable=False, default=8)
|
|
||||||
|
|
||||||
################################
|
|
||||||
# Visibility and join settings #
|
|
||||||
################################
|
|
||||||
|
|
||||||
# Whether players can join the game
|
|
||||||
joinable = Column(Boolean, nullable=False, default=False)
|
|
||||||
|
|
||||||
# Whether the game is listed on public pages
|
|
||||||
public = Column(Boolean, nullable=False, default=False)
|
|
||||||
|
|
||||||
# Optional password required to join
|
|
||||||
# If this is NULL, no password is required to join
|
|
||||||
join_password = Column(String, nullable=True)
|
|
||||||
|
|
||||||
# Maximum number of players who can join
|
|
||||||
# If this is NULL, there is no limit to player joins
|
|
||||||
player_limit = Column(Integer, nullable=True)
|
|
||||||
|
|
||||||
# Maximum number of characters per player
|
|
||||||
# If this is NULL, there is no limit to creating characters
|
|
||||||
character_limit = Column(Integer, nullable=True, default=1)
|
|
||||||
|
|
||||||
####################
|
|
||||||
# Publish settings #
|
|
||||||
####################
|
|
||||||
|
|
||||||
# Recurrence for turn publish attempts, as crontab spec
|
|
||||||
# If this is NULL, turns will not publish on a recurrence
|
|
||||||
publish_recur = Column(String, nullable=True)
|
|
||||||
|
|
||||||
# Whether to attempt publish when an article is approved
|
|
||||||
publish_asap = Column(Boolean, nullable=False, default=True)
|
|
||||||
|
|
||||||
# Allow an incomplete turn to be published with this many articles
|
|
||||||
# If this is NULL, the publish quorum is the number of characters
|
|
||||||
publish_quorum = Column(Integer, nullable=True)
|
|
||||||
|
|
||||||
#####################
|
|
||||||
# Addendum settings #
|
|
||||||
#####################
|
|
||||||
|
|
||||||
# Whether to allow addendum articles
|
|
||||||
allow_addendum = Column(Boolean, nullable=False, default=False)
|
|
||||||
|
|
||||||
# Maximum number of addenda per player per turn
|
|
||||||
# If this is NULL, there is no limit
|
|
||||||
addendum_turn_limit = Column(Integer, nullable=True)
|
|
||||||
|
|
||||||
# Maximum number of addenda per title
|
|
||||||
# If this is NULL, there is no limit
|
|
||||||
addendum_title_limit = Column(Integer, nullable=True)
|
|
||||||
|
|
||||||
##########################
|
|
||||||
# Collaboration settings #
|
|
||||||
##########################
|
|
||||||
|
|
||||||
# Enable the social posting feature
|
|
||||||
allow_post = Column(Boolean, nullable=False, default=True)
|
|
||||||
|
|
||||||
# Show title stubs in the index when a new article is approved
|
|
||||||
show_stubs = Column(Boolean, nullable=False, default=True)
|
|
||||||
|
|
||||||
# Show other players' progress for the current turn
|
|
||||||
show_peer_progress = Column(Boolean, nullable=False, default=True)
|
|
||||||
|
|
||||||
#############################
|
|
||||||
# Foreign key relationships #
|
|
||||||
#############################
|
|
||||||
|
|
||||||
memberships = relationship("Membership", back_populates="lexicon")
|
|
||||||
characters = relationship("Character", back_populates="lexicon")
|
|
||||||
articles = relationship("Article", back_populates="lexicon")
|
|
||||||
indices = relationship("ArticleIndex", back_populates="lexicon")
|
|
||||||
index_rules = relationship("ArticleIndexRule", back_populates="lexicon")
|
|
||||||
content_rules = relationship("ArticleContentRule", back_populates="lexicon")
|
|
||||||
posts = relationship(
|
|
||||||
"Post", back_populates="lexicon", order_by="Post.created.desc()"
|
|
||||||
)
|
|
||||||
|
|
||||||
#######################
|
|
||||||
# Derived information #
|
|
||||||
#######################
|
|
||||||
|
|
||||||
@property
|
|
||||||
def full_title(self: "Lexicon") -> str:
|
|
||||||
return self.title if self.title else f"Lexicon {self.name}"
|
|
||||||
|
|
||||||
|
|
||||||
class Membership(ModelBase):
|
|
||||||
"""
|
|
||||||
Represents a user's participation in a Lexicon game.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "membership"
|
|
||||||
__table_args__ = (UniqueConstraint("user_id", "lexicon_id"),)
|
|
||||||
|
|
||||||
###################
|
|
||||||
# Membership keys #
|
|
||||||
###################
|
|
||||||
|
|
||||||
# Primary membership id
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
|
|
||||||
# The user who is a member of a lexicon
|
|
||||||
user_id = Column(Integer, ForeignKey("user.id"), nullable=False)
|
|
||||||
|
|
||||||
# The lexicon of which the user is a member
|
|
||||||
lexicon_id = Column(Integer, ForeignKey("lexicon.id"), nullable=False)
|
|
||||||
|
|
||||||
####################
|
|
||||||
# History tracking #
|
|
||||||
####################
|
|
||||||
|
|
||||||
# Timestamp the user joined the game
|
|
||||||
joined = Column(DateTime, nullable=False, server_default=func.now())
|
|
||||||
|
|
||||||
# Timestamp of the last time the user viewed the post feed
|
|
||||||
# This is NULL if the player has never viewed posts
|
|
||||||
last_post_seen = Column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
###################
|
|
||||||
# Player settings #
|
|
||||||
###################
|
|
||||||
|
|
||||||
# Whether the user can access editor functions
|
|
||||||
is_editor = Column(Boolean, nullable=False, server_default=text("FALSE"))
|
|
||||||
|
|
||||||
#########################
|
|
||||||
# Notification settings #
|
|
||||||
#########################
|
|
||||||
|
|
||||||
# Whether the user should be notified when an article becomes reviewable
|
|
||||||
notify_ready = Column(Boolean, nullable=False, default=True)
|
|
||||||
|
|
||||||
# Whether the user should be notified when one of their articles is rejected
|
|
||||||
notify_reject = Column(Boolean, nullable=False, default=True)
|
|
||||||
|
|
||||||
# Whether the user should be notified when one of their articles is approved
|
|
||||||
notify_approve = Column(Boolean, nullable=False, default=True)
|
|
||||||
|
|
||||||
#############################
|
|
||||||
# Foreign key relationships #
|
|
||||||
#############################
|
|
||||||
|
|
||||||
user = relationship("User", back_populates="memberships")
|
|
||||||
lexicon = relationship("Lexicon", back_populates="memberships")
|
|
||||||
|
|
||||||
|
|
||||||
class Character(ModelBase):
|
|
||||||
"""
|
|
||||||
Represents a character played by a uaser in a Lexicon game.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "character"
|
|
||||||
|
|
||||||
##################
|
|
||||||
# Character info #
|
|
||||||
##################
|
|
||||||
|
|
||||||
# Primary character id
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
|
|
||||||
# Public-facing character id
|
|
||||||
public_id = Column(Uuid, nullable=False, unique=True, default=uuid.uuid4)
|
|
||||||
|
|
||||||
# The lexicon to which this character belongs
|
|
||||||
lexicon_id = Column(Integer, ForeignKey("lexicon.id"), nullable=False)
|
|
||||||
|
|
||||||
# The user to whom this character belongs
|
|
||||||
user_id = Column(Integer, ForeignKey("user.id"), nullable=False)
|
|
||||||
|
|
||||||
# The character's name
|
|
||||||
name = Column(String, nullable=False)
|
|
||||||
|
|
||||||
# The character's signature
|
|
||||||
signature = Column(String, nullable=False)
|
|
||||||
|
|
||||||
#############################
|
|
||||||
# Foreign key relationships #
|
|
||||||
#############################
|
|
||||||
|
|
||||||
user = relationship("User", back_populates="characters")
|
|
||||||
lexicon = relationship("Lexicon", back_populates="characters")
|
|
||||||
articles = relationship("Article", back_populates="character")
|
|
||||||
index_rules = relationship("ArticleIndexRule", back_populates="character")
|
|
||||||
|
|
||||||
|
|
||||||
class ArticleState(enum.Enum):
|
|
||||||
"""
|
|
||||||
The step of the editorial process an article is in.
|
|
||||||
"""
|
|
||||||
|
|
||||||
DRAFT = 0
|
|
||||||
SUBMITTED = 1
|
|
||||||
APPROVED = 2
|
|
||||||
|
|
||||||
|
|
||||||
class Article(ModelBase):
|
|
||||||
"""
|
|
||||||
Represents a single article in a lexicon.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "article"
|
|
||||||
|
|
||||||
################
|
|
||||||
# Article info #
|
|
||||||
################
|
|
||||||
|
|
||||||
# Primary article id
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
|
|
||||||
# Public-facing article id
|
|
||||||
public_id = Column(Uuid, nullable=False, unique=True, default=uuid.uuid4)
|
|
||||||
|
|
||||||
# The lexicon to which this article belongs
|
|
||||||
lexicon_id = Column(Integer, ForeignKey("lexicon.id"), nullable=False)
|
|
||||||
|
|
||||||
# The character who is the author of this article
|
|
||||||
character_id = Column(Integer, ForeignKey("character.id"), nullable=False)
|
|
||||||
|
|
||||||
# The article to which this is an addendum
|
|
||||||
addendum_to = Column(Integer, ForeignKey("article.id"), nullable=True)
|
|
||||||
|
|
||||||
#################
|
|
||||||
# Article state #
|
|
||||||
#################
|
|
||||||
|
|
||||||
# The turn in which the article was published
|
|
||||||
# This is NULL until the article is published
|
|
||||||
turn = Column(Integer, nullable=True)
|
|
||||||
|
|
||||||
# The stage of review the article is in
|
|
||||||
state = Column(Enum(ArticleState), nullable=False, default=ArticleState.DRAFT)
|
|
||||||
|
|
||||||
# The number of times the article has been submitted
|
|
||||||
submit_nonce = Column(Integer, nullable=False, default=0)
|
|
||||||
|
|
||||||
# Whether the article is an Ersatz Scrivener article
|
|
||||||
ersatz = Column(Boolean, nullable=False, default=False)
|
|
||||||
|
|
||||||
####################
|
|
||||||
# History tracking #
|
|
||||||
####################
|
|
||||||
|
|
||||||
# Timestamp the content of the article was last updated
|
|
||||||
last_updated = Column(
|
|
||||||
DateTime, nullable=False, server_default=func.now(), onupdate=func.now()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Timestamp the article was last submitted
|
|
||||||
# This is NULL until the article is submitted
|
|
||||||
submitted = Column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
# Timestamp the article was last approved
|
|
||||||
# This is NULL until the article is approved
|
|
||||||
approved = Column(DateTime, nullable=True)
|
|
||||||
|
|
||||||
###################
|
|
||||||
# Article content #
|
|
||||||
###################
|
|
||||||
|
|
||||||
# The article's title
|
|
||||||
title = Column(String, nullable=False, default="")
|
|
||||||
|
|
||||||
# The article's text
|
|
||||||
body = Column(Text, nullable=False)
|
|
||||||
|
|
||||||
#############################
|
|
||||||
# Foreign key relationships #
|
|
||||||
#############################
|
|
||||||
|
|
||||||
lexicon = relationship("Lexicon", back_populates="articles")
|
|
||||||
character = relationship("Character", back_populates="articles")
|
|
||||||
addenda = relationship("Article", backref=backref("parent", remote_side=[id]))
|
|
||||||
|
|
||||||
|
|
||||||
class IndexType(enum.Enum):
|
|
||||||
"""
|
|
||||||
The title-matching behavior of an article index.
|
|
||||||
"""
|
|
||||||
|
|
||||||
CHAR = 0
|
|
||||||
RANGE = 1
|
|
||||||
PREFIX = 2
|
|
||||||
ETC = 3
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self.name
|
|
||||||
|
|
||||||
|
|
||||||
class ArticleIndex(ModelBase):
|
|
||||||
"""
|
|
||||||
Represents an index definition.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "article_index"
|
|
||||||
__table_args__ = (UniqueConstraint("lexicon_id", "index_type", "pattern"),)
|
|
||||||
|
|
||||||
##############
|
|
||||||
# Index info #
|
|
||||||
##############
|
|
||||||
|
|
||||||
# Primary index id
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
|
|
||||||
# The lexicon this index is in
|
|
||||||
lexicon_id = Column(Integer, ForeignKey("lexicon.id"), nullable=False)
|
|
||||||
|
|
||||||
# The index type
|
|
||||||
index_type = Column(Enum(IndexType), nullable=False)
|
|
||||||
|
|
||||||
# The index pattern
|
|
||||||
pattern = Column(String, nullable=False)
|
|
||||||
|
|
||||||
# The order in which the index is processed
|
|
||||||
logical_order = Column(Integer, nullable=False, default=0)
|
|
||||||
|
|
||||||
# The order in which the index is displayed
|
|
||||||
display_order = Column(Integer, nullable=False, default=0)
|
|
||||||
|
|
||||||
# The maximum number of articles allowed in this index
|
|
||||||
# If this is NULL, there is no limit on this index
|
|
||||||
capacity = Column(Integer, nullable=True)
|
|
||||||
|
|
||||||
#############################
|
|
||||||
# Foreign key relationships #
|
|
||||||
#############################
|
|
||||||
|
|
||||||
lexicon = relationship("Lexicon", back_populates="indices")
|
|
||||||
index_rules = relationship("ArticleIndexRule", back_populates="index")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self):
|
|
||||||
return f"{self.index_type}:{self.pattern}"
|
|
||||||
|
|
||||||
|
|
||||||
class ArticleIndexRule(ModelBase):
|
|
||||||
"""
|
|
||||||
Represents a restriction of which index a character may write in for a turn.
|
|
||||||
A character with multiple index rules may write in any index that satisfies
|
|
||||||
a rule. A character with no index rules may write in any index.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "article_index_rule"
|
|
||||||
__table_args__ = (UniqueConstraint("character_id", "index_id", "turn"),)
|
|
||||||
|
|
||||||
###################
|
|
||||||
# Index rule info #
|
|
||||||
###################
|
|
||||||
|
|
||||||
# Primary index rule id
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
|
|
||||||
# The lexicon of this index rule
|
|
||||||
lexicon_id = Column(Integer, ForeignKey("lexicon.id"), nullable=False)
|
|
||||||
|
|
||||||
####################
|
|
||||||
# Index rule scope #
|
|
||||||
####################
|
|
||||||
|
|
||||||
# The character to whom this rule applies
|
|
||||||
character_id = Column(Integer, ForeignKey("character.id"), nullable=False)
|
|
||||||
|
|
||||||
# The index to which the character is restricted
|
|
||||||
index_id = Column(Integer, ForeignKey("article_index.id"), nullable=False)
|
|
||||||
|
|
||||||
# The turn in which this rule applies
|
|
||||||
turn = Column(Integer, nullable=False)
|
|
||||||
|
|
||||||
#############################
|
|
||||||
# Foreign key relationships #
|
|
||||||
#############################
|
|
||||||
|
|
||||||
lexicon = relationship("Lexicon", back_populates="index_rules")
|
|
||||||
index = relationship("ArticleIndex", back_populates="index_rules")
|
|
||||||
character = relationship("Character", back_populates="index_rules")
|
|
||||||
|
|
||||||
|
|
||||||
class ArticleContentRuleType(enum.Enum):
|
|
||||||
"""
|
|
||||||
The possible article content rules.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Whether characters can cite themselves
|
|
||||||
ALLOW_SELF_CITE = 0
|
|
||||||
# Whether characters can write new articles instead of phantoms
|
|
||||||
ALLOW_NEW_ARTICLE = 1
|
|
||||||
# Required number of extant articles cited
|
|
||||||
CITE_EXTANT_MIN = 2
|
|
||||||
CITE_EXTANT_MAX = 3
|
|
||||||
# Required number of phantom articles cited
|
|
||||||
CITE_PHANTOM_MIN = 4
|
|
||||||
CITE_PHANTOM_MAX = 5
|
|
||||||
# Required number of new articles cited
|
|
||||||
CITE_NEW_MIN = 6
|
|
||||||
CITE_NEW_MAX = 7
|
|
||||||
# Required number of characters among authors of articles cited
|
|
||||||
CITE_CHARS_MIN = 8
|
|
||||||
CITE_CHARS_MAX = 9
|
|
||||||
# Required number of citations of any kind
|
|
||||||
CITE_TOTAL_MIN = 10
|
|
||||||
CITE_TOTAL_MAX = 11
|
|
||||||
# Warn player below this wordcount
|
|
||||||
WORD_MIN_SOFT = 12
|
|
||||||
# Require player to exceed this wordcount
|
|
||||||
WORD_MIN_HARD = 13
|
|
||||||
# Warn player above this wordcount
|
|
||||||
WORD_MAX_SOFT = 14
|
|
||||||
# Require player to be under this wordcount
|
|
||||||
WORD_MAX_HARD = 15
|
|
||||||
|
|
||||||
|
|
||||||
class ArticleContentRule(ModelBase):
|
|
||||||
"""
|
|
||||||
Represents a restriction on the content of an article for a turn.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "article_content_rule"
|
|
||||||
|
|
||||||
#####################
|
|
||||||
# Content rule info #
|
|
||||||
#####################
|
|
||||||
|
|
||||||
# Primary content rule id
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
|
|
||||||
# The lexicon of this content rule
|
|
||||||
lexicon_id = Column(Integer, ForeignKey("lexicon.id"), nullable=False)
|
|
||||||
|
|
||||||
######################
|
|
||||||
# Content rule scope #
|
|
||||||
######################
|
|
||||||
|
|
||||||
# The turn in which this rule applies
|
|
||||||
turn = Column(Integer, nullable=False)
|
|
||||||
|
|
||||||
###########################
|
|
||||||
# The content of the rule #
|
|
||||||
###########################
|
|
||||||
|
|
||||||
# The content rule type that is being declared
|
|
||||||
rule_name = Column(Enum(ArticleContentRuleType), nullable=False)
|
|
||||||
|
|
||||||
# The new value for the rule
|
|
||||||
# If this is NULL, the rule is disabled or has the default value
|
|
||||||
rule_value = Column(Integer, nullable=True)
|
|
||||||
|
|
||||||
#############################
|
|
||||||
# Foreign key relationships #
|
|
||||||
#############################
|
|
||||||
|
|
||||||
lexicon = relationship("Lexicon", back_populates="content_rules")
|
|
||||||
|
|
||||||
|
|
||||||
class Post(ModelBase):
|
|
||||||
"""
|
|
||||||
Represents a post in the game feed.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "post"
|
|
||||||
|
|
||||||
#############
|
|
||||||
# Post info #
|
|
||||||
#############
|
|
||||||
|
|
||||||
# Primary post id
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
|
|
||||||
# The lexicon in which the post was made
|
|
||||||
lexicon_id = Column(Integer, ForeignKey("lexicon.id"), nullable=False)
|
|
||||||
|
|
||||||
# The user who made the post
|
|
||||||
# This may be NULL if the post was made by Amanuensis
|
|
||||||
user_id = Column(Integer, ForeignKey("user.id"), nullable=True)
|
|
||||||
|
|
||||||
################
|
|
||||||
# Post content #
|
|
||||||
################
|
|
||||||
|
|
||||||
# The timestamp the post was created
|
|
||||||
created = Column(DateTime, nullable=False, server_default=func.now())
|
|
||||||
|
|
||||||
# The body of the post
|
|
||||||
body = Column(Text, nullable=False)
|
|
||||||
|
|
||||||
#############################
|
|
||||||
# Foreign key relationships #
|
|
||||||
#############################
|
|
||||||
|
|
||||||
user = relationship("User", back_populates="posts")
|
|
||||||
lexicon = relationship("Lexicon", back_populates="posts")
|
|
|
@ -1,25 +1,32 @@
|
||||||
"""
|
|
||||||
Submodule of custom exception types
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class AmanuensisError(Exception):
|
class AmanuensisError(Exception):
|
||||||
"""Base class for exceptions in Amanuensis"""
|
"""Base class for exceptions in amanuensis"""
|
||||||
|
|
||||||
|
|
||||||
|
class MissingConfigError(AmanuensisError):
|
||||||
|
"""A config file is missing that was expected to be present"""
|
||||||
|
def __init__(self, path):
|
||||||
|
super().__init__("A config file or directory was expected to "
|
||||||
|
f"exist, but could not be found: {path}")
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigAlreadyExistsError(AmanuensisError):
|
||||||
|
"""Attempted to create a config, but it already exists"""
|
||||||
|
def __init__(self, path):
|
||||||
|
super().__init__("Attempted to create a config, but it already "
|
||||||
|
f"exists: {path}")
|
||||||
|
|
||||||
|
|
||||||
|
class MalformedConfigError(AmanuensisError):
|
||||||
|
"""A config file could not be read and parsed"""
|
||||||
|
|
||||||
|
|
||||||
|
class ReadOnlyError(AmanuensisError):
|
||||||
|
"""A config was edited in readonly mode"""
|
||||||
|
|
||||||
|
|
||||||
class ArgumentError(AmanuensisError):
|
class ArgumentError(AmanuensisError):
|
||||||
"""An internal call was made with invalid arguments."""
|
"""An internal call was made with invalid arguments"""
|
||||||
|
|
||||||
|
|
||||||
class BackendArgumentTypeError(ArgumentError):
|
class IndexMismatchError(AmanuensisError):
|
||||||
"""
|
"""An id was obtained from an index, but the object doesn't exist"""
|
||||||
A call to a backend function was made with a value of an invalid type for the parameter.
|
|
||||||
Specify the invalid parameter and value as a kwarg.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, obj_type, **kwarg):
|
|
||||||
if not kwarg:
|
|
||||||
raise ValueError("Missing kwarg")
|
|
||||||
param, value = next(iter(kwarg.items()))
|
|
||||||
msg = f"Expected {param} of type {obj_type}, got {type(value)}"
|
|
||||||
super().__init__(msg)
|
|
||||||
|
|
|
@ -0,0 +1,104 @@
|
||||||
|
"""
|
||||||
|
Submodule of functions for creating and managing lexicons within the
|
||||||
|
general Amanuensis context.
|
||||||
|
"""
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
from typing import Iterable
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from amanuensis.config import RootConfigDirectoryContext, AttrOrderedDict
|
||||||
|
from amanuensis.errors import ArgumentError
|
||||||
|
from amanuensis.models import ModelFactory, UserModel, LexiconModel
|
||||||
|
from amanuensis.resources import get_stream
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def valid_name(name: str) -> bool:
|
||||||
|
"""
|
||||||
|
Validates that a lexicon name consists only of alpahnumerics, dashes,
|
||||||
|
underscores, and spaces
|
||||||
|
"""
|
||||||
|
return re.match(r'^[A-Za-z0-9-_ ]+$', name) is not None
|
||||||
|
|
||||||
|
|
||||||
|
def create_lexicon(
|
||||||
|
root: RootConfigDirectoryContext,
|
||||||
|
name: str,
|
||||||
|
editor: UserModel) -> LexiconModel:
|
||||||
|
"""
|
||||||
|
Creates a lexicon with the given name and sets the given user as its editor
|
||||||
|
"""
|
||||||
|
# Verify arguments
|
||||||
|
if not name:
|
||||||
|
raise ArgumentError(f'Empty lexicon name: "{name}"')
|
||||||
|
if not valid_name(name):
|
||||||
|
raise ArgumentError(f'Invalid lexicon name: "{name}"')
|
||||||
|
with root.lexicon.read_index() as extant_lexicons:
|
||||||
|
if name in extant_lexicons.keys():
|
||||||
|
raise ArgumentError(f'Lexicon name already taken: "{name}"')
|
||||||
|
if editor is None:
|
||||||
|
raise ArgumentError('Editor must not be None')
|
||||||
|
|
||||||
|
# Create the lexicon directory and initialize it with a blank lexicon
|
||||||
|
lid: str = uuid.uuid4().hex
|
||||||
|
lex_dir = os.path.join(root.lexicon.path, lid)
|
||||||
|
os.mkdir(lex_dir)
|
||||||
|
with get_stream("lexicon.json") as s:
|
||||||
|
path: str = os.path.join(lex_dir, 'config.json')
|
||||||
|
with open(path, 'wb') as f:
|
||||||
|
f.write(s.read())
|
||||||
|
|
||||||
|
# Create subdirectories
|
||||||
|
os.mkdir(os.path.join(lex_dir, 'draft'))
|
||||||
|
os.mkdir(os.path.join(lex_dir, 'src'))
|
||||||
|
os.mkdir(os.path.join(lex_dir, 'article'))
|
||||||
|
|
||||||
|
# Update the index with the new lexicon
|
||||||
|
with root.lexicon.edit_index() as index:
|
||||||
|
index[name] = lid
|
||||||
|
|
||||||
|
# Fill out the new lexicon
|
||||||
|
with root.lexicon[lid].edit_config() as cfg:
|
||||||
|
cfg.lid = lid
|
||||||
|
cfg.name = name
|
||||||
|
cfg.editor = editor.uid
|
||||||
|
cfg.time.created = int(time.time())
|
||||||
|
|
||||||
|
with root.lexicon[lid].edit('info', create=True):
|
||||||
|
pass # Create an empry config file
|
||||||
|
|
||||||
|
# Load the lexicon and add the editor and default character
|
||||||
|
model_factory: ModelFactory = ModelFactory(root)
|
||||||
|
lexicon = model_factory.lexicon(lid)
|
||||||
|
with lexicon.ctx.edit_config() as cfg:
|
||||||
|
cfg.join.joined.append(editor.uid)
|
||||||
|
with get_stream('character.json') as template:
|
||||||
|
character = json.load(template, object_pairs_hook=AttrOrderedDict)
|
||||||
|
character.cid = 'default'
|
||||||
|
character.name = 'Ersatz Scrivener'
|
||||||
|
character.player = None
|
||||||
|
cfg.character.new(character.cid, character)
|
||||||
|
|
||||||
|
# Log the creation
|
||||||
|
message = f'Created {lexicon.title}, ed. {editor.cfg.displayname} ({lid})'
|
||||||
|
lexicon.log(message)
|
||||||
|
logger.info(message)
|
||||||
|
|
||||||
|
return lexicon
|
||||||
|
|
||||||
|
|
||||||
|
def load_all_lexicons(
|
||||||
|
root: RootConfigDirectoryContext) -> Iterable[LexiconModel]:
|
||||||
|
"""
|
||||||
|
Iterably loads every lexicon in the config store
|
||||||
|
"""
|
||||||
|
model_factory: ModelFactory = ModelFactory(root)
|
||||||
|
with root.lexicon.read_index() as index:
|
||||||
|
for lid in index.values():
|
||||||
|
lexicon: LexiconModel = model_factory.lexicon(lid)
|
||||||
|
yield lexicon
|
|
@ -9,94 +9,11 @@ from amanuensis.config import ReadOnlyOrderedDict
|
||||||
from amanuensis.models import LexiconModel, UserModel
|
from amanuensis.models import LexiconModel, UserModel
|
||||||
from amanuensis.parser import (
|
from amanuensis.parser import (
|
||||||
parse_raw_markdown,
|
parse_raw_markdown,
|
||||||
|
GetCitations,
|
||||||
|
HtmlRenderer,
|
||||||
titlesort,
|
titlesort,
|
||||||
filesafe_title)
|
filesafe_title,
|
||||||
from amanuensis.parser.core import RenderableVisitor
|
ConstraintAnalysis)
|
||||||
|
|
||||||
|
|
||||||
class GetCitations(RenderableVisitor):
|
|
||||||
def __init__(self):
|
|
||||||
self.citations = []
|
|
||||||
|
|
||||||
def ParsedArticle(self, span):
|
|
||||||
span.recurse(self)
|
|
||||||
return self.citations
|
|
||||||
|
|
||||||
def CitationSpan(self, span):
|
|
||||||
self.citations.append(span.cite_target)
|
|
||||||
return self
|
|
||||||
|
|
||||||
|
|
||||||
class ConstraintAnalysis(RenderableVisitor):
|
|
||||||
def __init__(self, lexicon: LexiconModel):
|
|
||||||
self.info: List[str] = []
|
|
||||||
self.warning: List[str] = []
|
|
||||||
self.error: List[str] = []
|
|
||||||
|
|
||||||
self.word_count: int = 0
|
|
||||||
self.citations: list = []
|
|
||||||
self.signatures: int = 0
|
|
||||||
|
|
||||||
def TextSpan(self, span):
|
|
||||||
self.word_count += len(re.split(r'\s+', span.innertext.strip()))
|
|
||||||
return self
|
|
||||||
|
|
||||||
def SignatureParagraph(self, span):
|
|
||||||
self.signatures += 1
|
|
||||||
span.recurse(self)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def CitationSpan(self, span):
|
|
||||||
self.citations.append(span.cite_target)
|
|
||||||
span.recurse(self)
|
|
||||||
return self
|
|
||||||
|
|
||||||
|
|
||||||
class HtmlRenderer(RenderableVisitor):
|
|
||||||
"""
|
|
||||||
Renders an article token tree into published article HTML.
|
|
||||||
"""
|
|
||||||
def __init__(self, lexicon_name: str, written_articles: Iterable[str]):
|
|
||||||
self.lexicon_name: str = lexicon_name
|
|
||||||
self.written_articles: Iterable[str] = written_articles
|
|
||||||
|
|
||||||
def TextSpan(self, span):
|
|
||||||
return span.innertext
|
|
||||||
|
|
||||||
def LineBreak(self, span):
|
|
||||||
return '<br>'
|
|
||||||
|
|
||||||
def ParsedArticle(self, span):
|
|
||||||
return '\n'.join(span.recurse(self))
|
|
||||||
|
|
||||||
def BodyParagraph(self, span):
|
|
||||||
return f'<p>{"".join(span.recurse(self))}</p>'
|
|
||||||
|
|
||||||
def SignatureParagraph(self, span):
|
|
||||||
return (
|
|
||||||
'<hr><span class="signature"><p>'
|
|
||||||
f'{"".join(span.recurse(self))}'
|
|
||||||
'</p></span>'
|
|
||||||
)
|
|
||||||
|
|
||||||
def BoldSpan(self, span):
|
|
||||||
return f'<b>{"".join(span.recurse(self))}</b>'
|
|
||||||
|
|
||||||
def ItalicSpan(self, span):
|
|
||||||
return f'<i>{"".join(span.recurse(self))}</i>'
|
|
||||||
|
|
||||||
def CitationSpan(self, span):
|
|
||||||
if span.cite_target in self.written_articles:
|
|
||||||
link_class = ''
|
|
||||||
else:
|
|
||||||
link_class = ' class="phantom"'
|
|
||||||
# link = url_for(
|
|
||||||
# 'lexicon.article',
|
|
||||||
# name=self.lexicon_name,
|
|
||||||
# title=filesafe_title(span.cite_target))
|
|
||||||
link = (f'/lexicon/{self.lexicon_name}'
|
|
||||||
+ f'/article/{filesafe_title(span.cite_target)}')
|
|
||||||
return f'<a href="{link}"{link_class}>{"".join(span.recurse(self))}</a>'
|
|
||||||
|
|
||||||
|
|
||||||
def get_player_characters(
|
def get_player_characters(
|
||||||
|
@ -392,7 +309,7 @@ def sort_by_index_spec(articles, index_specs, key=None):
|
||||||
indexed = OrderedDict()
|
indexed = OrderedDict()
|
||||||
for index in index_list_order:
|
for index in index_list_order:
|
||||||
indexed[index.pattern] = []
|
indexed[index.pattern] = []
|
||||||
# Sort articles into indices
|
# Sort articles into indexes
|
||||||
for article in articles_titlesorted:
|
for article in articles_titlesorted:
|
||||||
for index in index_eval_order:
|
for index in index_eval_order:
|
||||||
if index_match(index, key(article)):
|
if index_match(index, key(article)):
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
# from amanuensis.config.loader import AttrOrderedDict
|
# from amanuensis.config.loader import AttrOrderedDict
|
||||||
# from amanuensis.errors import ArgumentError
|
# from amanuensis.errors import ArgumentError
|
||||||
# from amanuensis.lexicon import LexiconModel
|
# from amanuensis.lexicon import LexiconModel
|
||||||
# from amanuensis.parser import parse_raw_markdown, filesafe_title, titlesort
|
# from amanuensis.parser import parse_raw_markdown, GetCitations, HtmlRenderer, filesafe_title, titlesort
|
||||||
# from amanuensis.resources import get_stream
|
# from amanuensis.resources import get_stream
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -11,6 +11,58 @@ from amanuensis.models import LexiconModel, UserModel
|
||||||
from amanuensis.resources import get_stream
|
from amanuensis.resources import get_stream
|
||||||
|
|
||||||
|
|
||||||
|
def player_can_join_lexicon(
|
||||||
|
player: UserModel,
|
||||||
|
lexicon: LexiconModel,
|
||||||
|
password: str = None) -> bool:
|
||||||
|
"""
|
||||||
|
Checks whether the given player can join a lexicon
|
||||||
|
"""
|
||||||
|
# Trivial failures
|
||||||
|
if lexicon is None:
|
||||||
|
return False
|
||||||
|
if player is None:
|
||||||
|
return False
|
||||||
|
# Can't join if already in the game
|
||||||
|
if player.uid in lexicon.cfg.join.joined:
|
||||||
|
return False
|
||||||
|
# Can't join if the game is closed
|
||||||
|
if not lexicon.cfg.join.open:
|
||||||
|
return False
|
||||||
|
# Can't join if there's no room left
|
||||||
|
if len(lexicon.cfg.join.joined) >= lexicon.cfg.join.max_players:
|
||||||
|
return False
|
||||||
|
# Can't join if the password doesn't check out
|
||||||
|
if (lexicon.cfg.join.password is not None
|
||||||
|
and lexicon.cfg.join.password != password):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def add_player_to_lexicon(
|
||||||
|
player: UserModel,
|
||||||
|
lexicon: LexiconModel) -> None:
|
||||||
|
"""
|
||||||
|
Unconditionally adds a player to a lexicon
|
||||||
|
"""
|
||||||
|
# Verify arguments
|
||||||
|
if lexicon is None:
|
||||||
|
raise ArgumentError(f'Invalid lexicon: {lexicon}')
|
||||||
|
if player is None:
|
||||||
|
raise ArgumentError(f'Invalid player: {player}')
|
||||||
|
|
||||||
|
# Idempotently add player
|
||||||
|
added = False
|
||||||
|
with lexicon.ctx.edit_config() as cfg:
|
||||||
|
if player.uid not in cfg.join.joined:
|
||||||
|
cfg.join.joined.append(player.uid)
|
||||||
|
added = True
|
||||||
|
|
||||||
|
# Log to the lexicon's log
|
||||||
|
if added:
|
||||||
|
lexicon.log('Player "{0.cfg.username}" joined ({0.uid})'.format(player))
|
||||||
|
|
||||||
|
|
||||||
def player_can_create_character(
|
def player_can_create_character(
|
||||||
player: UserModel,
|
player: UserModel,
|
||||||
lexicon: LexiconModel,
|
lexicon: LexiconModel,
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
from .setup import init_logging
|
||||||
|
|
||||||
|
__all__ = [member.__name__ for member in [
|
||||||
|
init_logging
|
||||||
|
]]
|
|
@ -0,0 +1,45 @@
|
||||||
|
import logging
|
||||||
|
import logging.handlers
|
||||||
|
|
||||||
|
|
||||||
|
basic_formatter = logging.Formatter(
|
||||||
|
fmt='[{levelname}] {message}',
|
||||||
|
style='{')
|
||||||
|
detailed_formatter = logging.Formatter(
|
||||||
|
fmt='[{asctime}] [{levelname}:{filename}:{lineno}] {message}',
|
||||||
|
style='{')
|
||||||
|
basic_console_handler = logging.StreamHandler()
|
||||||
|
basic_console_handler.setLevel(logging.INFO)
|
||||||
|
basic_console_handler.setFormatter(basic_formatter)
|
||||||
|
detailed_console_handler = logging.StreamHandler()
|
||||||
|
detailed_console_handler.setLevel(logging.DEBUG)
|
||||||
|
detailed_console_handler.setFormatter(detailed_formatter)
|
||||||
|
|
||||||
|
|
||||||
|
def get_file_handler(filename: str) -> logging.Handler:
|
||||||
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
|
filename=filename,
|
||||||
|
maxBytes=1000000,
|
||||||
|
backupCount=10,
|
||||||
|
delay=True,
|
||||||
|
encoding='utf8',
|
||||||
|
)
|
||||||
|
handler.setLevel(logging.DEBUG)
|
||||||
|
handler.setFormatter(detailed_formatter)
|
||||||
|
return handler
|
||||||
|
|
||||||
|
|
||||||
|
def init_logging(verbose: bool, log_filename: str):
|
||||||
|
"""
|
||||||
|
Initializes the Amanuensis logger settings
|
||||||
|
"""
|
||||||
|
logger = logging.getLogger("amanuensis")
|
||||||
|
if log_filename:
|
||||||
|
logger.addHandler(get_file_handler(log_filename))
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
elif verbose:
|
||||||
|
logger.addHandler(detailed_console_handler)
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
else:
|
||||||
|
logger.addHandler(basic_console_handler)
|
||||||
|
logger.setLevel(logging.INFO)
|
|
@ -0,0 +1,11 @@
|
||||||
|
from .factory import ModelFactory
|
||||||
|
from .lexicon import LexiconModel
|
||||||
|
from .user import UserModelBase, UserModel, AnonymousUserModel
|
||||||
|
|
||||||
|
__all__ = [member.__name__ for member in [
|
||||||
|
ModelFactory,
|
||||||
|
LexiconModel,
|
||||||
|
UserModelBase,
|
||||||
|
UserModel,
|
||||||
|
AnonymousUserModel,
|
||||||
|
]]
|
|
@ -0,0 +1,57 @@
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from amanuensis.config import is_guid, RootConfigDirectoryContext
|
||||||
|
from amanuensis.errors import ArgumentError
|
||||||
|
|
||||||
|
from .user import UserModel
|
||||||
|
from .lexicon import LexiconModel
|
||||||
|
|
||||||
|
|
||||||
|
class ModelFactory():
|
||||||
|
def __init__(self, root: RootConfigDirectoryContext):
|
||||||
|
self.root: RootConfigDirectoryContext = root
|
||||||
|
|
||||||
|
def try_user(self, identifier: str) -> Optional[UserModel]:
|
||||||
|
user: Optional[UserModel] = None
|
||||||
|
try:
|
||||||
|
user = self.user(identifier)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
return user
|
||||||
|
|
||||||
|
def user(self, identifier: str) -> UserModel:
|
||||||
|
"""Get the user model for the given id or username"""
|
||||||
|
# Ensure we have something to work with
|
||||||
|
if identifier is None:
|
||||||
|
raise ArgumentError('identifer must not be None')
|
||||||
|
# Ensure we have a user guid
|
||||||
|
if not is_guid(identifier):
|
||||||
|
with self.root.user.read_index() as index:
|
||||||
|
uid = index.get(identifier, None)
|
||||||
|
if uid is None:
|
||||||
|
raise KeyError(f'Unknown username: {identifier})')
|
||||||
|
if not is_guid(uid):
|
||||||
|
raise ValueError(f'Invalid index entry: {uid}')
|
||||||
|
else:
|
||||||
|
uid = identifier
|
||||||
|
user = UserModel(self.root, uid)
|
||||||
|
return user
|
||||||
|
|
||||||
|
def lexicon(self, identifier: str) -> LexiconModel:
|
||||||
|
"""Get the lexicon model for the given id or name"""
|
||||||
|
# Ensure we have something to work with
|
||||||
|
if identifier is None:
|
||||||
|
raise ArgumentError('identifier must not be None')
|
||||||
|
# Ensure we have a lexicon guid
|
||||||
|
if not is_guid(identifier):
|
||||||
|
with self.root.lexicon.read_index() as index:
|
||||||
|
lid = index.get(identifier, None)
|
||||||
|
if lid is None:
|
||||||
|
raise KeyError(f'Unknown lexicon: {identifier}')
|
||||||
|
if not is_guid(lid):
|
||||||
|
raise ValueError(f'Invalid index entry: {lid}')
|
||||||
|
else:
|
||||||
|
lid = identifier
|
||||||
|
lexicon = LexiconModel(self.root, lid)
|
||||||
|
return lexicon
|
|
@ -0,0 +1,64 @@
|
||||||
|
import time
|
||||||
|
from typing import cast
|
||||||
|
|
||||||
|
from amanuensis.config import (
|
||||||
|
RootConfigDirectoryContext,
|
||||||
|
LexiconConfigDirectoryContext,
|
||||||
|
ReadOnlyOrderedDict)
|
||||||
|
|
||||||
|
|
||||||
|
class LexiconModel():
|
||||||
|
PREGAME = "unstarted"
|
||||||
|
ONGOING = "ongoing"
|
||||||
|
COMPLETE = "completed"
|
||||||
|
|
||||||
|
"""Represents a lexicon in the Amanuensis config store"""
|
||||||
|
def __init__(self, root: RootConfigDirectoryContext, lid: str):
|
||||||
|
self._lid: str = lid
|
||||||
|
# Creating the config context implicitly checks for existence
|
||||||
|
self._ctx: LexiconConfigDirectoryContext = (
|
||||||
|
cast(LexiconConfigDirectoryContext, root.lexicon[lid]))
|
||||||
|
with self._ctx.read_config() as config:
|
||||||
|
self._cfg: ReadOnlyOrderedDict = cast(ReadOnlyOrderedDict, config)
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f'<Lexicon {self.cfg.name}>'
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f'<LexiconModel({self.lid})>'
|
||||||
|
|
||||||
|
# Properties
|
||||||
|
|
||||||
|
@property
|
||||||
|
def lid(self) -> str:
|
||||||
|
"""Lexicon guid"""
|
||||||
|
return self._lid
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ctx(self) -> LexiconConfigDirectoryContext:
|
||||||
|
"""Lexicon config directory context"""
|
||||||
|
return self._ctx
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cfg(self) -> ReadOnlyOrderedDict:
|
||||||
|
"""Cached lexicon config"""
|
||||||
|
return self._cfg
|
||||||
|
|
||||||
|
# Utilities
|
||||||
|
|
||||||
|
@property
|
||||||
|
def title(self) -> str:
|
||||||
|
return self.cfg.get('title') or f'Lexicon {self.cfg.name}'
|
||||||
|
|
||||||
|
def log(self, message: str) -> None:
|
||||||
|
now = int(time.time())
|
||||||
|
with self.ctx.edit_config() as cfg:
|
||||||
|
cfg.log.append([now, message])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def status(self) -> str:
|
||||||
|
if self.cfg.turn.current is None:
|
||||||
|
return LexiconModel.PREGAME
|
||||||
|
if self.cfg.turn.current > self.cfg.turn.max:
|
||||||
|
return LexiconModel.COMPLETE
|
||||||
|
return LexiconModel.ONGOING
|
|
@ -0,0 +1,83 @@
|
||||||
|
from typing import cast
|
||||||
|
|
||||||
|
from werkzeug.security import generate_password_hash, check_password_hash
|
||||||
|
|
||||||
|
from amanuensis.config import (
|
||||||
|
RootConfigDirectoryContext,
|
||||||
|
UserConfigDirectoryContext,
|
||||||
|
ReadOnlyOrderedDict)
|
||||||
|
|
||||||
|
|
||||||
|
class UserModelBase():
|
||||||
|
"""Common base class for auth and anon user models"""
|
||||||
|
|
||||||
|
# Properties
|
||||||
|
|
||||||
|
@property
|
||||||
|
def uid(self) -> str:
|
||||||
|
"""User guid"""
|
||||||
|
return getattr(self, '_uid', None)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ctx(self) -> UserConfigDirectoryContext:
|
||||||
|
"""User config directory context"""
|
||||||
|
return getattr(self, '_ctx', None)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cfg(self) -> ReadOnlyOrderedDict:
|
||||||
|
"""Cached user config"""
|
||||||
|
return getattr(self, '_cfg', None)
|
||||||
|
|
||||||
|
# Flask-Login interfaces
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_authenticated(self) -> bool:
|
||||||
|
return self.uid is not None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_active(self) -> bool:
|
||||||
|
return self.uid is not None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_anonymous(self) -> bool:
|
||||||
|
return self.uid is None
|
||||||
|
|
||||||
|
def get_id(self) -> str:
|
||||||
|
return self.uid
|
||||||
|
|
||||||
|
|
||||||
|
class UserModel(UserModelBase):
|
||||||
|
"""Represents a user in the Amanuensis config store"""
|
||||||
|
def __init__(self, root: RootConfigDirectoryContext, uid: str):
|
||||||
|
self._uid: str = uid
|
||||||
|
# Creating the config context implicitly checks for existence
|
||||||
|
self._ctx: UserConfigDirectoryContext = (
|
||||||
|
cast(UserConfigDirectoryContext, root.user[uid]))
|
||||||
|
with self._ctx.read_config() as config:
|
||||||
|
self._cfg: ReadOnlyOrderedDict = cast(ReadOnlyOrderedDict, config)
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f'<{self.cfg.username}>'
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f'<UserModel({self.uid})>'
|
||||||
|
|
||||||
|
# Utility methods
|
||||||
|
|
||||||
|
def set_password(self, password: str) -> None:
|
||||||
|
pw_hash = generate_password_hash(password)
|
||||||
|
with self.ctx.edit_config() as cfg:
|
||||||
|
cfg['password'] = pw_hash
|
||||||
|
|
||||||
|
def check_password(self, password) -> bool:
|
||||||
|
with self.ctx.read_config() as cfg:
|
||||||
|
return check_password_hash(cfg.password, password)
|
||||||
|
|
||||||
|
|
||||||
|
class AnonymousUserModel(UserModelBase):
|
||||||
|
"""Represents an anonymous user"""
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return '<Anonymous>'
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return '<AnonymousUserModel>'
|
|
@ -2,14 +2,19 @@
|
||||||
Module encapsulating all markdown parsing functionality.
|
Module encapsulating all markdown parsing functionality.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from .core import RenderableVisitor
|
from .analyze import ConstraintAnalysis, GetCitations
|
||||||
from .helpers import normalize_title, filesafe_title, titlesort
|
from .core import normalize_title
|
||||||
|
from .helpers import titlesort, filesafe_title
|
||||||
from .parsing import parse_raw_markdown
|
from .parsing import parse_raw_markdown
|
||||||
|
from .render import PreviewHtmlRenderer, HtmlRenderer
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"RenderableVisitor",
|
ConstraintAnalysis.__name__,
|
||||||
"normalize_title",
|
GetCitations.__name__,
|
||||||
"filesafe_title",
|
normalize_title.__name__,
|
||||||
"titlesort",
|
titlesort.__name__,
|
||||||
"parse_raw_markdown",
|
filesafe_title.__name__,
|
||||||
|
parse_raw_markdown.__name__,
|
||||||
|
PreviewHtmlRenderer.__name__,
|
||||||
|
HtmlRenderer.__name__,
|
||||||
]
|
]
|
||||||
|
|
|
@ -0,0 +1,49 @@
|
||||||
|
"""
|
||||||
|
Internal module encapsulating visitors that compute metrics on articles
|
||||||
|
for verification against constraints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from amanuensis.models import LexiconModel
|
||||||
|
|
||||||
|
from .core import RenderableVisitor
|
||||||
|
|
||||||
|
|
||||||
|
class GetCitations(RenderableVisitor):
|
||||||
|
def __init__(self):
|
||||||
|
self.citations = []
|
||||||
|
|
||||||
|
def ParsedArticle(self, span):
|
||||||
|
span.recurse(self)
|
||||||
|
return self.citations
|
||||||
|
|
||||||
|
def CitationSpan(self, span):
|
||||||
|
self.citations.append(span.cite_target)
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class ConstraintAnalysis(RenderableVisitor):
|
||||||
|
def __init__(self, lexicon: LexiconModel):
|
||||||
|
self.info: List[str] = []
|
||||||
|
self.warning: List[str] = []
|
||||||
|
self.error: List[str] = []
|
||||||
|
|
||||||
|
self.word_count: int = 0
|
||||||
|
self.citations: list = []
|
||||||
|
self.signatures: int = 0
|
||||||
|
|
||||||
|
def TextSpan(self, span):
|
||||||
|
self.word_count += len(re.split(r'\s+', span.innertext.strip()))
|
||||||
|
return self
|
||||||
|
|
||||||
|
def SignatureParagraph(self, span):
|
||||||
|
self.signatures += 1
|
||||||
|
span.recurse(self)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def CitationSpan(self, span):
|
||||||
|
self.citations.append(span.cite_target)
|
||||||
|
span.recurse(self)
|
||||||
|
return self
|
|
@ -5,134 +5,131 @@ which can be operated on by a visitor defining functions that hook off
|
||||||
of the different token types.
|
of the different token types.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
from typing import Callable, Any, Sequence
|
from typing import Callable, Any, Sequence
|
||||||
|
|
||||||
from .helpers import normalize_title
|
RenderHook = Callable[['Renderable'], Any]
|
||||||
|
Spans = Sequence['Renderable']
|
||||||
|
|
||||||
|
|
||||||
RenderHook = Callable[["Renderable"], Any]
|
def normalize_title(title: str) -> str:
|
||||||
Spans = Sequence["Renderable"]
|
"""
|
||||||
|
Normalizes strings as titles:
|
||||||
|
- Strips leading and trailing whitespace
|
||||||
|
- Merges internal whitespace into a single space
|
||||||
|
- Capitalizes the first word
|
||||||
|
"""
|
||||||
|
cleaned = re.sub(r'\s+', " ", title.strip())
|
||||||
|
return cleaned[:1].capitalize() + cleaned[1:]
|
||||||
|
|
||||||
|
|
||||||
class Renderable:
|
class Renderable():
|
||||||
"""
|
"""
|
||||||
Base class for parsed markdown. Provides the `render()` method for
|
Base class for parsed markdown. Provides the `render()` method for
|
||||||
visiting the token tree.
|
visiting the token tree.
|
||||||
"""
|
"""
|
||||||
|
def render(self: 'Renderable', renderer: 'RenderableVisitor'):
|
||||||
def render(self: "Renderable", renderer: "RenderableVisitor"):
|
"""
|
||||||
"""
|
Execute the apppropriate visitor method on this Renderable.
|
||||||
Execute the apppropriate visitor method on this Renderable.
|
"""
|
||||||
Visitors implement hooks by declaring methods whose names are
|
hook: RenderHook = getattr(renderer, type(self).__name__, None)
|
||||||
the name of a Renderable class.
|
if hook:
|
||||||
"""
|
return hook(self)
|
||||||
hook: RenderHook = getattr(renderer, type(self).__name__, None)
|
return None
|
||||||
if hook:
|
|
||||||
return hook(self)
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class TextSpan(Renderable):
|
class TextSpan(Renderable):
|
||||||
"""A length of text."""
|
"""An unstyled length of text."""
|
||||||
|
def __init__(self, innertext: str):
|
||||||
|
self.innertext = innertext
|
||||||
|
|
||||||
def __init__(self, innertext: str):
|
def __str__(self):
|
||||||
self.innertext = innertext
|
return f"[{self.innertext}]"
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<{self.innertext}>"
|
|
||||||
|
|
||||||
|
|
||||||
class LineBreak(Renderable):
|
class LineBreak(Renderable):
|
||||||
"""A line break within a paragraph."""
|
"""A line break within a paragraph."""
|
||||||
|
def __str__(self):
|
||||||
def __repr__(self):
|
return "<break>"
|
||||||
return "<break>"
|
|
||||||
|
|
||||||
|
|
||||||
class SpanContainer(Renderable):
|
class SpanContainer(Renderable):
|
||||||
"""A formatting element that wraps some amount of text."""
|
"""A formatting element that wraps some amount of text."""
|
||||||
|
def __init__(self, spans: Spans):
|
||||||
|
self.spans: Spans = spans
|
||||||
|
|
||||||
def __init__(self, spans: Spans):
|
def __str__(self):
|
||||||
self.spans: Spans = spans
|
return (f'[{type(self).__name__} '
|
||||||
|
+ f'{" ".join([str(span) for span in self.spans])}]')
|
||||||
|
|
||||||
def __repr__(self):
|
def recurse(self, renderer: 'RenderableVisitor'):
|
||||||
return (
|
return [child.render(renderer) for child in self.spans]
|
||||||
f"<{type(self).__name__} "
|
|
||||||
+ f'{" ".join([repr(span) for span in self.spans])}>'
|
|
||||||
)
|
|
||||||
|
|
||||||
def recurse(self, renderer: "RenderableVisitor"):
|
|
||||||
return [child.render(renderer) for child in self.spans]
|
|
||||||
|
|
||||||
|
|
||||||
class ParsedArticle(SpanContainer):
|
class ParsedArticle(SpanContainer):
|
||||||
"""Token tree root node, containing some number of paragraph tokens."""
|
"""Token tree root node, containing some number of paragraph tokens."""
|
||||||
|
|
||||||
|
|
||||||
class BodyParagraph(SpanContainer):
|
class BodyParagraph(SpanContainer):
|
||||||
"""A normal paragraph."""
|
"""A normal paragraph."""
|
||||||
|
|
||||||
|
|
||||||
class SignatureParagraph(SpanContainer):
|
class SignatureParagraph(SpanContainer):
|
||||||
"""A paragraph preceded by a signature mark."""
|
"""A paragraph preceded by a signature mark."""
|
||||||
|
|
||||||
|
|
||||||
class BoldSpan(SpanContainer):
|
class BoldSpan(SpanContainer):
|
||||||
"""A span of text inside bold marks."""
|
"""A span of text inside bold marks."""
|
||||||
|
|
||||||
|
|
||||||
class ItalicSpan(SpanContainer):
|
class ItalicSpan(SpanContainer):
|
||||||
"""A span of text inside italic marks."""
|
"""A span of text inside italic marks."""
|
||||||
|
|
||||||
|
|
||||||
class CitationSpan(SpanContainer):
|
class CitationSpan(SpanContainer):
|
||||||
"""A citation to another article."""
|
"""A citation to another article."""
|
||||||
|
def __init__(self, spans: Spans, cite_target: str):
|
||||||
|
super().__init__(spans)
|
||||||
|
# Normalize citation target on parse, since we don't want
|
||||||
|
# abnormal title strings lying around causing trouble.
|
||||||
|
self.cite_target: str = normalize_title(cite_target)
|
||||||
|
|
||||||
def __init__(self, spans: Spans, cite_target: str):
|
def __str__(self):
|
||||||
super().__init__(spans)
|
return (f'{{{" ".join([str(span) for span in self.spans])}'
|
||||||
# Normalize citation target on parse, since we don't want
|
+ f':{self.cite_target}}}')
|
||||||
# abnormal title strings lying around causing trouble.
|
|
||||||
self.cite_target: str = normalize_title(cite_target)
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return (
|
|
||||||
f'{{{" ".join([repr(span) for span in self.spans])}'
|
|
||||||
+ f":{self.cite_target}}}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class RenderableVisitor:
|
class RenderableVisitor():
|
||||||
"""
|
"""
|
||||||
Default implementation of the visitor pattern. Executes once on
|
Default implementation of the visitor pattern. Executes once on
|
||||||
each token in the tree and returns itself.
|
each token in the tree and returns itself.
|
||||||
"""
|
"""
|
||||||
|
def TextSpan(self, span: TextSpan):
|
||||||
|
return self
|
||||||
|
|
||||||
def TextSpan(self, span: TextSpan):
|
def LineBreak(self, span: LineBreak):
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def LineBreak(self, span: LineBreak):
|
def ParsedArticle(self, span: ParsedArticle):
|
||||||
return self
|
span.recurse(self)
|
||||||
|
return self
|
||||||
|
|
||||||
def ParsedArticle(self, span: ParsedArticle):
|
def BodyParagraph(self, span: BodyParagraph):
|
||||||
span.recurse(self)
|
span.recurse(self)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def BodyParagraph(self, span: BodyParagraph):
|
def SignatureParagraph(self, span: SignatureParagraph):
|
||||||
span.recurse(self)
|
span.recurse(self)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def SignatureParagraph(self, span: SignatureParagraph):
|
def BoldSpan(self, span: BoldSpan):
|
||||||
span.recurse(self)
|
span.recurse(self)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def BoldSpan(self, span: BoldSpan):
|
def ItalicSpan(self, span: ItalicSpan):
|
||||||
span.recurse(self)
|
span.recurse(self)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def ItalicSpan(self, span: ItalicSpan):
|
def CitationSpan(self, span: CitationSpan):
|
||||||
span.recurse(self)
|
span.recurse(self)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def CitationSpan(self, span: CitationSpan):
|
|
||||||
span.recurse(self)
|
|
||||||
return self
|
|
||||||
|
|
|
@ -1,53 +1,28 @@
|
||||||
"""
|
|
||||||
Helper functions for manipulating titles during parsing
|
|
||||||
"""
|
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
def normalize_title(title: str) -> str:
|
|
||||||
"""
|
|
||||||
Normalizes strings as titles:
|
|
||||||
- Strips leading and trailing whitespace
|
|
||||||
- Merges internal whitespace into a single space
|
|
||||||
- Capitalizes the first word
|
|
||||||
"""
|
|
||||||
cleaned = re.sub(r"\s+", " ", title.strip())
|
|
||||||
return cleaned[:1].capitalize() + cleaned[1:]
|
|
||||||
|
|
||||||
|
|
||||||
def titlesort(title: str) -> str:
|
def titlesort(title: str) -> str:
|
||||||
"""
|
"""
|
||||||
Strips articles off of titles for alphabetical sorting purposes
|
Strips articles off of titles for alphabetical sorting purposes
|
||||||
"""
|
"""
|
||||||
lower = title.lower()
|
lower = title.lower()
|
||||||
if lower.startswith("the "):
|
if lower.startswith("the "):
|
||||||
return lower[4:]
|
return lower[4:]
|
||||||
if lower.startswith("an "):
|
if lower.startswith("an "):
|
||||||
return lower[3:]
|
return lower[3:]
|
||||||
if lower.startswith("a "):
|
if lower.startswith("a "):
|
||||||
return lower[2:]
|
return lower[2:]
|
||||||
return lower
|
return lower
|
||||||
|
|
||||||
|
|
||||||
def filesafe_title(title: str) -> str:
|
def filesafe_title(title: str) -> str:
|
||||||
"""
|
"""
|
||||||
Makes an article title filename-safe.
|
Makes an article title filename-safe.
|
||||||
"""
|
"""
|
||||||
# Replace whitespace with _
|
s = re.sub(r"\s+", '_', title) # Replace whitespace with _
|
||||||
s = re.sub(r"\s+", "_", title)
|
s = re.sub(r"~", '-', s) # parse.quote doesn't catch ~
|
||||||
|
s = urllib.parse.quote(s) # Encode all other characters
|
||||||
# parse.quote doesn't catch ~
|
s = re.sub(r"%", "", s) # Strip encoding %s
|
||||||
s = re.sub(r"~", "-", s)
|
s = s[:64] # Limit to 64 characters
|
||||||
|
return s
|
||||||
# Encode all other characters
|
|
||||||
s = urllib.parse.quote(s)
|
|
||||||
|
|
||||||
# Strip encoding %s
|
|
||||||
s = re.sub(r"%", "", s)
|
|
||||||
|
|
||||||
# Limit to 64 characters
|
|
||||||
s = s[:64]
|
|
||||||
|
|
||||||
return s
|
|
||||||
|
|
|
@ -7,203 +7,150 @@ import re
|
||||||
from typing import Sequence
|
from typing import Sequence
|
||||||
|
|
||||||
from .core import (
|
from .core import (
|
||||||
TextSpan,
|
TextSpan,
|
||||||
LineBreak,
|
LineBreak,
|
||||||
ParsedArticle,
|
ParsedArticle,
|
||||||
BodyParagraph,
|
BodyParagraph,
|
||||||
SignatureParagraph,
|
SignatureParagraph,
|
||||||
BoldSpan,
|
BoldSpan,
|
||||||
ItalicSpan,
|
ItalicSpan,
|
||||||
CitationSpan,
|
CitationSpan,
|
||||||
Renderable,
|
Renderable,
|
||||||
SpanContainer,
|
SpanContainer
|
||||||
)
|
)
|
||||||
|
|
||||||
Spans = Sequence[Renderable]
|
Spans = Sequence[Renderable]
|
||||||
|
|
||||||
|
|
||||||
def parse_raw_markdown(text: str) -> ParsedArticle:
|
def parse_raw_markdown(text: str) -> ParsedArticle:
|
||||||
"""
|
"""
|
||||||
Parses a body of Lexipython markdown into a Renderable tree.
|
Parses a body of Lexipython markdown into a Renderable tree.
|
||||||
"""
|
"""
|
||||||
# Parse each paragraph individually, as no formatting applies
|
# Parse each paragraph individually, as no formatting applies
|
||||||
# across paragraphs
|
# across paragraphs
|
||||||
paragraphs = re.split(r"\n\n+", text)
|
paragraphs = re.split(r'\n\n+', text)
|
||||||
parse_results = list(map(parse_paragraph, paragraphs))
|
parse_results = list(map(parse_paragraph, paragraphs))
|
||||||
return ParsedArticle(parse_results)
|
return ParsedArticle(parse_results)
|
||||||
|
|
||||||
|
|
||||||
def parse_paragraph(text: str) -> SpanContainer:
|
def parse_paragraph(text: str) -> SpanContainer:
|
||||||
"""
|
# Parse the paragraph as a span of text
|
||||||
Parses a block of text into a paragraph object.
|
text = text.strip()
|
||||||
"""
|
if text and text[0] == '~':
|
||||||
# Parse the paragraph as a span of text
|
return SignatureParagraph(parse_paired_formatting(text[1:]))
|
||||||
text = text.strip()
|
else:
|
||||||
if text and text[0] == "~":
|
return BodyParagraph(parse_paired_formatting(text))
|
||||||
return SignatureParagraph(parse_paired_formatting(text[1:]))
|
|
||||||
else:
|
|
||||||
return BodyParagraph(parse_paired_formatting(text))
|
|
||||||
|
|
||||||
|
|
||||||
def parse_paired_formatting(
|
def parse_paired_formatting(
|
||||||
text: str,
|
text: str,
|
||||||
in_cite: bool = False,
|
cite: bool = True,
|
||||||
in_bold: bool = False,
|
bold: bool = True,
|
||||||
in_italic: bool = False,
|
italic: bool = True) -> Spans:
|
||||||
) -> Spans:
|
# Find positions of any paired formatting
|
||||||
"""
|
first_cite = find_pair(text, "[[", "]]", cite)
|
||||||
Parses citations, bolds, and italics, which can be nested inside each other.
|
first_bold = find_pair(text, "**", "**", bold)
|
||||||
A single type cannot nest inside itself, which is controlled by setting the
|
first_italic = find_pair(text, "//", "//", italic)
|
||||||
flag parameters to False.
|
# Load the possible parse handlers into the map
|
||||||
"""
|
handlers = {}
|
||||||
# Find positions of any paired formatting
|
handlers[first_cite] = lambda: parse_citation(text, bold=bold, italic=italic)
|
||||||
next_cite = find_pair(text, "[[", "]]") if not in_cite else -1
|
handlers[first_bold] = lambda: parse_bold(text, cite=cite, italic=italic)
|
||||||
next_bold = find_pair(text, "**", "**") if not in_bold else -1
|
handlers[first_italic] = lambda: parse_italic(text, cite=cite, bold=bold)
|
||||||
next_italic = find_pair(text, "//", "//") if not in_italic else -1
|
# If nothing was found, move on to the next parsing step
|
||||||
# Create a map from a formatting mark's distance to its parse handler
|
handlers[-1] = lambda: parse_breaks(text)
|
||||||
handlers = {}
|
# Choose a handler based on the earliest found result
|
||||||
handlers[next_cite] = lambda: parse_citation(
|
finds = [i for i in (first_cite, first_bold, first_italic) if i > -1]
|
||||||
text, in_bold=in_bold, in_italic=in_italic
|
first = min(finds) if finds else -1
|
||||||
)
|
return handlers[first]()
|
||||||
handlers[next_bold] = lambda: parse_bold(text, in_cite=in_cite, in_italic=in_italic)
|
|
||||||
handlers[next_italic] = lambda: parse_italic(text, in_cite=in_cite, in_bold=in_bold)
|
|
||||||
# Map the next parsing step at -1. If we're currently inside a formatting
|
|
||||||
# mark pair, skip parsing line breaks, which are not allowed inside paired
|
|
||||||
# marks.
|
|
||||||
if in_cite or in_bold or in_italic:
|
|
||||||
handlers[-1] = lambda: parse_text(text)
|
|
||||||
else:
|
|
||||||
handlers[-1] = lambda: parse_breaks(text)
|
|
||||||
# Choose the handler for the earliest found pair, or the default handler
|
|
||||||
# at -1 if nothing was found.
|
|
||||||
finds = [i for i in (next_cite, next_bold, next_italic) if i > -1]
|
|
||||||
first = min(finds) if finds else -1
|
|
||||||
return handlers[first]()
|
|
||||||
|
|
||||||
|
|
||||||
def find_pair(text: str, open_tag: str, close_tag: str) -> int:
|
def find_pair(
|
||||||
"""
|
text: str,
|
||||||
Finds the beginning of a pair of formatting marks.
|
open_tag: str,
|
||||||
"""
|
close_tag: str,
|
||||||
# If the open tag wasn't found, return -1
|
valid: bool) -> int:
|
||||||
first = text.find(open_tag)
|
# If skipping, return -1
|
||||||
if first < 0:
|
if not valid:
|
||||||
return -1
|
return -1
|
||||||
# If the close tag wasn't found after the open tag, return -1
|
# If the open tag wasn't found, return -1
|
||||||
second = text.find(close_tag, first + len(open_tag))
|
first = text.find(open_tag)
|
||||||
if second < 0:
|
if first < 0:
|
||||||
return -1
|
return -1
|
||||||
# Otherwise, the pair exists
|
# If the close tag wasn't found after the open tag, return -1
|
||||||
return first
|
second = text.find(close_tag, first + len(open_tag))
|
||||||
|
if second < 0:
|
||||||
|
return -1
|
||||||
|
# Otherwise, the pair exists
|
||||||
|
return first
|
||||||
|
|
||||||
|
|
||||||
def parse_citation(
|
def parse_citation(text: str, bold: bool = True, italic: bool = True) -> Spans:
|
||||||
text: str,
|
cite_open = text.find("[[")
|
||||||
in_bold: bool = False,
|
if cite_open > -1:
|
||||||
in_italic: bool = False,
|
cite_close = text.find("]]", cite_open + 2)
|
||||||
) -> Spans:
|
# Since we searched for pairs from the beginning, there should be no
|
||||||
"""
|
# undetected pair formatting before this one, so move to the next
|
||||||
Parses text into a citation span.
|
# level of parsing
|
||||||
"""
|
spans_before = parse_breaks(text[:cite_open])
|
||||||
cite_open = text.find("[[")
|
# Continue parsing pair formatting after this one closes with all
|
||||||
if cite_open > -1:
|
# three as valid choices
|
||||||
cite_close = text.find("]]", cite_open + 2)
|
spans_after = parse_paired_formatting(text[cite_close + 2:])
|
||||||
# Since we searched for pairs from the beginning, there should be no
|
# Parse inner text and skip parsing for this format pair
|
||||||
# undetected pair formatting before this one, so move to the next
|
text_inner = text[cite_open + 2:cite_close]
|
||||||
# level of parsing
|
# For citations specifically, we may need to split off a citation
|
||||||
spans_before = parse_breaks(text[:cite_open])
|
# target from the alias text
|
||||||
# Continue parsing pair formatting after this one closes with all
|
inner_split = text_inner.split("|", 1)
|
||||||
# three as valid choices
|
text_inner_actual, cite_target = inner_split[0], inner_split[-1]
|
||||||
spans_after = parse_paired_formatting(text[cite_close + 2 :])
|
spans_inner = parse_paired_formatting(text_inner_actual,
|
||||||
# Parse inner text and skip parsing for this format pair
|
cite=False, bold=bold, italic=italic)
|
||||||
text_inner = text[cite_open + 2 : cite_close]
|
citation = CitationSpan(spans_inner, cite_target)
|
||||||
# For citations specifically, try to split off a citation target.
|
return [*spans_before, citation, *spans_after]
|
||||||
# If there's no citation target to split, use the same text as the
|
# Should never happen
|
||||||
# citation text and the target.
|
return parse_breaks(text)
|
||||||
inner_split = text_inner.split("|", 1)
|
|
||||||
text_inner_actual, cite_target = inner_split[0], inner_split[-1]
|
|
||||||
spans_inner = parse_paired_formatting(
|
|
||||||
text_inner_actual, in_cite=True, in_bold=in_bold, in_italic=in_italic
|
|
||||||
)
|
|
||||||
citation = CitationSpan(spans_inner, cite_target)
|
|
||||||
return [*spans_before, citation, *spans_after]
|
|
||||||
# Should never happen
|
|
||||||
return parse_breaks(text)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_bold(
|
def parse_bold(text: str, cite: bool = True, italic: bool = True) -> Spans:
|
||||||
text: str,
|
bold_open = text.find("**")
|
||||||
in_cite: bool = False,
|
if bold_open > -1:
|
||||||
in_italic: bool = False,
|
bold_close = text.find("**", bold_open + 2)
|
||||||
) -> Spans:
|
# Should be no formatting behind us
|
||||||
"""
|
spans_before = parse_breaks(text[:bold_open])
|
||||||
Parses text into a bold span.
|
# Freely parse formatting after us
|
||||||
"""
|
spans_after = parse_paired_formatting(text[bold_close + 2:])
|
||||||
bold_open = text.find("**")
|
# Parse inner text minus bold parsing
|
||||||
if bold_open > -1:
|
text_inner = text[bold_open + 2:bold_close]
|
||||||
bold_close = text.find("**", bold_open + 2)
|
spans_inner = parse_paired_formatting(text_inner,
|
||||||
# Should be no formatting behind us
|
cite=cite, bold=False, italic=italic)
|
||||||
spans_before = parse_breaks(text[:bold_open])
|
bold = BoldSpan(spans_inner)
|
||||||
# Freely parse formatting after us
|
return [*spans_before, bold, *spans_after]
|
||||||
spans_after = parse_paired_formatting(text[bold_close + 2 :])
|
# Should never happen
|
||||||
# Parse inner text minus bold parsing
|
return parse_italic(text)
|
||||||
text_inner = text[bold_open + 2 : bold_close]
|
|
||||||
spans_inner = parse_paired_formatting(
|
|
||||||
text_inner, in_cite=in_cite, in_bold=True, in_italic=in_italic
|
|
||||||
)
|
|
||||||
bold = BoldSpan(spans_inner)
|
|
||||||
return [*spans_before, bold, *spans_after]
|
|
||||||
# Should never happen
|
|
||||||
return parse_italic(text)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_italic(
|
def parse_italic(text: str, cite: bool = True, bold: bool = True) -> Spans:
|
||||||
text: str,
|
italic_open = text.find("//")
|
||||||
in_cite: bool = False,
|
if italic_open > -1:
|
||||||
in_bold: bool = False,
|
italic_close = text.find("//", italic_open + 2)
|
||||||
) -> Spans:
|
# Should be no formatting behind us
|
||||||
"""
|
spans_before = parse_breaks(text[:italic_open])
|
||||||
Parses text into an italic span.
|
# Freely parse formatting after us
|
||||||
"""
|
spans_after = parse_paired_formatting(text[italic_close + 2:])
|
||||||
italic_open = text.find("//")
|
# Parse inner text minus italic parsing
|
||||||
if italic_open > -1:
|
text_inner = text[italic_open + 2:italic_close]
|
||||||
italic_close = text.find("//", italic_open + 2)
|
spans_inner = parse_paired_formatting(text_inner,
|
||||||
# Should be no formatting behind us
|
cite=cite, bold=bold, italic=False)
|
||||||
spans_before = parse_breaks(text[:italic_open])
|
italic = ItalicSpan(spans_inner)
|
||||||
# Freely parse formatting after us
|
return [*spans_before, italic, *spans_after]
|
||||||
spans_after = parse_paired_formatting(text[italic_close + 2 :])
|
# Should never happen
|
||||||
# Parse inner text minus italic parsing
|
return parse_breaks(text)
|
||||||
text_inner = text[italic_open + 2 : italic_close]
|
|
||||||
spans_inner = parse_paired_formatting(
|
|
||||||
text_inner, in_cite=in_cite, in_bold=in_bold, in_italic=True
|
|
||||||
)
|
|
||||||
italic = ItalicSpan(spans_inner)
|
|
||||||
return [*spans_before, italic, *spans_after]
|
|
||||||
# Should never happen
|
|
||||||
return parse_breaks(text)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_breaks(text: str) -> Spans:
|
def parse_breaks(text: str) -> Spans:
|
||||||
"""
|
if not text:
|
||||||
Parses intra-paragraph line breaks.
|
return []
|
||||||
"""
|
splits: Spans = list(map(TextSpan, text.split("\\\\\n")))
|
||||||
# Parse empty text into nothing
|
spans: Spans = [
|
||||||
if not text:
|
splits[i // 2] if i % 2 == 0 else LineBreak()
|
||||||
return []
|
for i in range(0, 2 * len(splits) - 1)
|
||||||
# Split on the line break mark appearing at the end of the line
|
]
|
||||||
splits: Spans = list(map(TextSpan, text.split("\\\\\n")))
|
return spans
|
||||||
# Put a LineBreak between each TextSpan
|
|
||||||
spans: Spans = [
|
|
||||||
splits[i // 2] if i % 2 == 0 else LineBreak()
|
|
||||||
for i in range(0, 2 * len(splits) - 1)
|
|
||||||
]
|
|
||||||
return spans
|
|
||||||
|
|
||||||
|
|
||||||
def parse_text(text: str) -> Spans:
|
|
||||||
"""
|
|
||||||
Parses text with no remaining parseable marks.
|
|
||||||
"""
|
|
||||||
if not text:
|
|
||||||
return []
|
|
||||||
return [TextSpan(text)]
|
|
||||||
|
|
|
@ -0,0 +1,104 @@
|
||||||
|
"""
|
||||||
|
Internal module encapsulating visitors that render articles into
|
||||||
|
readable formats.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Iterable
|
||||||
|
|
||||||
|
from .core import RenderableVisitor
|
||||||
|
from .helpers import filesafe_title
|
||||||
|
|
||||||
|
|
||||||
|
class HtmlRenderer(RenderableVisitor):
|
||||||
|
"""
|
||||||
|
Renders an article token tree into published article HTML.
|
||||||
|
"""
|
||||||
|
def __init__(self, lexicon_name: str, written_articles: Iterable[str]):
|
||||||
|
self.lexicon_name: str = lexicon_name
|
||||||
|
self.written_articles: Iterable[str] = written_articles
|
||||||
|
|
||||||
|
def TextSpan(self, span):
|
||||||
|
return span.innertext
|
||||||
|
|
||||||
|
def LineBreak(self, span):
|
||||||
|
return '<br>'
|
||||||
|
|
||||||
|
def ParsedArticle(self, span):
|
||||||
|
return '\n'.join(span.recurse(self))
|
||||||
|
|
||||||
|
def BodyParagraph(self, span):
|
||||||
|
return f'<p>{"".join(span.recurse(self))}</p>'
|
||||||
|
|
||||||
|
def SignatureParagraph(self, span):
|
||||||
|
return (
|
||||||
|
'<hr><span class="signature"><p>'
|
||||||
|
f'{"".join(span.recurse(self))}'
|
||||||
|
'</p></span>'
|
||||||
|
)
|
||||||
|
|
||||||
|
def BoldSpan(self, span):
|
||||||
|
return f'<b>{"".join(span.recurse(self))}</b>'
|
||||||
|
|
||||||
|
def ItalicSpan(self, span):
|
||||||
|
return f'<i>{"".join(span.recurse(self))}</i>'
|
||||||
|
|
||||||
|
def CitationSpan(self, span):
|
||||||
|
if span.cite_target in self.written_articles:
|
||||||
|
link_class = ''
|
||||||
|
else:
|
||||||
|
link_class = ' class="phantom"'
|
||||||
|
# link = url_for(
|
||||||
|
# 'lexicon.article',
|
||||||
|
# name=self.lexicon_name,
|
||||||
|
# title=filesafe_title(span.cite_target))
|
||||||
|
link = (f'/lexicon/{self.lexicon_name}'
|
||||||
|
+ f'/article/{filesafe_title(span.cite_target)}')
|
||||||
|
return f'<a href="{link}"{link_class}>{"".join(span.recurse(self))}</a>'
|
||||||
|
|
||||||
|
|
||||||
|
class PreviewHtmlRenderer(RenderableVisitor):
|
||||||
|
def __init__(self, lexicon):
|
||||||
|
with lexicon.ctx.read('info') as info:
|
||||||
|
self.article_map = {
|
||||||
|
title: article.character
|
||||||
|
for title, article in info.items()
|
||||||
|
}
|
||||||
|
self.citations = []
|
||||||
|
self.contents = ""
|
||||||
|
|
||||||
|
def TextSpan(self, span):
|
||||||
|
return span.innertext
|
||||||
|
|
||||||
|
def LineBreak(self, span):
|
||||||
|
return '<br>'
|
||||||
|
|
||||||
|
def ParsedArticle(self, span):
|
||||||
|
self.contents = '\n'.join(span.recurse(self))
|
||||||
|
return self
|
||||||
|
|
||||||
|
def BodyParagraph(self, span):
|
||||||
|
return f'<p>{"".join(span.recurse(self))}</p>'
|
||||||
|
|
||||||
|
def SignatureParagraph(self, span):
|
||||||
|
return (
|
||||||
|
'<hr><span class="signature"><p>'
|
||||||
|
f'{"".join(span.recurse(self))}'
|
||||||
|
'</p></span>'
|
||||||
|
)
|
||||||
|
|
||||||
|
def BoldSpan(self, span):
|
||||||
|
return f'<b>{"".join(span.recurse(self))}</b>'
|
||||||
|
|
||||||
|
def ItalicSpan(self, span):
|
||||||
|
return f'<i>{"".join(span.recurse(self))}</i>'
|
||||||
|
|
||||||
|
def CitationSpan(self, span):
|
||||||
|
if span.cite_target in self.article_map:
|
||||||
|
if self.article_map.get(span.cite_target):
|
||||||
|
link_class = '[extant]'
|
||||||
|
else:
|
||||||
|
link_class = '[phantom]'
|
||||||
|
else:
|
||||||
|
link_class = '[new]'
|
||||||
|
self.citations.append(f'{span.cite_target} {link_class}')
|
||||||
|
return f'<u>{"".join(span.recurse(self))}</u>[{len(self.citations)}]'
|
|
@ -2,5 +2,5 @@ import pkg_resources
|
||||||
|
|
||||||
|
|
||||||
def get_stream(*path):
|
def get_stream(*path):
|
||||||
rs_path = "/".join(path)
|
rs_path = "/".join(path)
|
||||||
return pkg_resources.resource_stream(__name__, rs_path)
|
return pkg_resources.resource_stream(__name__, rs_path)
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
{
|
||||||
|
"version": "0",
|
||||||
|
"aid": null,
|
||||||
|
"lexicon": null,
|
||||||
|
"character": null,
|
||||||
|
"title": null,
|
||||||
|
"turn": null,
|
||||||
|
"status": {
|
||||||
|
"ready": false,
|
||||||
|
"approved": false
|
||||||
|
},
|
||||||
|
"contents": null
|
||||||
|
}
|
|
@ -0,0 +1,7 @@
|
||||||
|
{
|
||||||
|
"version": "0",
|
||||||
|
"cid": null,
|
||||||
|
"name": null,
|
||||||
|
"player": null,
|
||||||
|
"signature": null
|
||||||
|
}
|
|
@ -17,7 +17,7 @@ div#editor-left {
|
||||||
display: flex;
|
display: flex;
|
||||||
align-items: stretch;
|
align-items: stretch;
|
||||||
}
|
}
|
||||||
div#editor-left section {
|
div#editor-left div.contentblock {
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
margin: 10px 5px 10px 10px;
|
margin: 10px 5px 10px 10px;
|
||||||
|
@ -48,7 +48,7 @@ textarea#editor-content {
|
||||||
div#editor-right {
|
div#editor-right {
|
||||||
overflow-y: scroll;
|
overflow-y: scroll;
|
||||||
}
|
}
|
||||||
div#editor-right section {
|
div#editor-right div.contentblock {
|
||||||
margin: 10px 5px 10px 10px;
|
margin: 10px 5px 10px 10px;
|
||||||
}
|
}
|
||||||
span.message-warning {
|
span.message-warning {
|
||||||
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
{
|
||||||
|
"version": "0",
|
||||||
|
"secret_key": null,
|
||||||
|
"address": "127.0.0.1",
|
||||||
|
"port": "5000",
|
||||||
|
"lexicon_data": "./lexicon",
|
||||||
|
"static_root": "../resources",
|
||||||
|
"email": {
|
||||||
|
"server": null,
|
||||||
|
"port": null,
|
||||||
|
"tls": null,
|
||||||
|
"username": null,
|
||||||
|
"password": null
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,107 @@
|
||||||
|
{
|
||||||
|
"version": "0",
|
||||||
|
"lid": null,
|
||||||
|
"name": null,
|
||||||
|
"title": null,
|
||||||
|
"editor": null,
|
||||||
|
"prompt": null,
|
||||||
|
"time": {
|
||||||
|
"created": null,
|
||||||
|
"completed": null
|
||||||
|
},
|
||||||
|
"turn": {
|
||||||
|
"current": null,
|
||||||
|
"max": 8,
|
||||||
|
"assignment": {
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"join": {
|
||||||
|
"public": false,
|
||||||
|
"open": false,
|
||||||
|
"password": null,
|
||||||
|
"max_players": 4,
|
||||||
|
"chars_per_player": 1,
|
||||||
|
"joined": []
|
||||||
|
},
|
||||||
|
"publish": {
|
||||||
|
"notify": {
|
||||||
|
"editor_on_ready": true,
|
||||||
|
"player_on_reject": true,
|
||||||
|
"player_on_accept": false
|
||||||
|
},
|
||||||
|
"deadlines": null,
|
||||||
|
"asap": false,
|
||||||
|
"quorum": null,
|
||||||
|
"block_on_ready": true
|
||||||
|
},
|
||||||
|
"article": {
|
||||||
|
"index": {
|
||||||
|
"list": [
|
||||||
|
{
|
||||||
|
"type": "char",
|
||||||
|
"pri": 0,
|
||||||
|
"pattern": "ABC"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "char",
|
||||||
|
"pri": 0,
|
||||||
|
"pattern": "DEF"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "char",
|
||||||
|
"pri": 0,
|
||||||
|
"pattern": "GHI"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "char",
|
||||||
|
"pri": 0,
|
||||||
|
"pattern": "JKL"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "char",
|
||||||
|
"pri": 0,
|
||||||
|
"pattern": "MNO"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "char",
|
||||||
|
"pri": 0,
|
||||||
|
"pattern": "PQRS"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "char",
|
||||||
|
"pri": 0,
|
||||||
|
"pattern": "TUV"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "char",
|
||||||
|
"pri": 0,
|
||||||
|
"pattern": "WXYZ"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"capacity": null
|
||||||
|
},
|
||||||
|
"citation": {
|
||||||
|
"allow_self": false,
|
||||||
|
"min_extant": null,
|
||||||
|
"max_extant": null,
|
||||||
|
"min_phantom": null,
|
||||||
|
"max_phantom": null,
|
||||||
|
"min_total": null,
|
||||||
|
"max_total": null,
|
||||||
|
"min_chars": null,
|
||||||
|
"max_chars": null
|
||||||
|
},
|
||||||
|
"word_limit": {
|
||||||
|
"soft": null,
|
||||||
|
"hard": null
|
||||||
|
},
|
||||||
|
"addendum": {
|
||||||
|
"allowed": false,
|
||||||
|
"max": null
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"character": {
|
||||||
|
},
|
||||||
|
"log": [
|
||||||
|
]
|
||||||
|
}
|
|
@ -1,21 +1,16 @@
|
||||||
:root {
|
|
||||||
--button-default: #dddddd;
|
|
||||||
--button-hover: #cccccc;
|
|
||||||
--button-current: #bbbbbb;
|
|
||||||
}
|
|
||||||
body {
|
body {
|
||||||
background-color: #eeeeee;
|
background-color: #eeeeee;
|
||||||
line-height: 1.4;
|
line-height: 1.4;
|
||||||
font-size: 16px;
|
font-size: 16px;
|
||||||
}
|
}
|
||||||
main {
|
div#wrapper {
|
||||||
max-width: 800px;
|
max-width: 800px;
|
||||||
position: absolute;
|
position: absolute;
|
||||||
left: 0;
|
left: 0;
|
||||||
right: 0;
|
right: 0;
|
||||||
margin: 0 auto;
|
margin: 0 auto;
|
||||||
}
|
}
|
||||||
header {
|
div#header {
|
||||||
padding: 5px;
|
padding: 5px;
|
||||||
margin: 5px;
|
margin: 5px;
|
||||||
background-color: #ffffff;
|
background-color: #ffffff;
|
||||||
|
@ -24,7 +19,7 @@ header {
|
||||||
overflow: auto;
|
overflow: auto;
|
||||||
position: relative;
|
position: relative;
|
||||||
}
|
}
|
||||||
header p, header h2 {
|
div#header p, div#header h2 {
|
||||||
margin: 5px;
|
margin: 5px;
|
||||||
}
|
}
|
||||||
div#login-status {
|
div#login-status {
|
||||||
|
@ -33,7 +28,7 @@ div#login-status {
|
||||||
top: 10px;
|
top: 10px;
|
||||||
right: 10px;
|
right: 10px;
|
||||||
}
|
}
|
||||||
nav {
|
div#sidebar {
|
||||||
width: 200px;
|
width: 200px;
|
||||||
float:left;
|
float:left;
|
||||||
margin:5px;
|
margin:5px;
|
||||||
|
@ -46,8 +41,11 @@ nav {
|
||||||
img#logo {
|
img#logo {
|
||||||
max-width: 200px;
|
max-width: 200px;
|
||||||
}
|
}
|
||||||
nav table {
|
table {
|
||||||
|
table-layout: fixed;
|
||||||
width: 100%;
|
width: 100%;
|
||||||
|
}
|
||||||
|
div#sidebar table {
|
||||||
border-collapse: collapse;
|
border-collapse: collapse;
|
||||||
}
|
}
|
||||||
div.citeblock table td:first-child + td a {
|
div.citeblock table td:first-child + td a {
|
||||||
|
@ -59,36 +57,36 @@ div.misclinks table td a {
|
||||||
table a {
|
table a {
|
||||||
display: flex;
|
display: flex;
|
||||||
padding: 3px;
|
padding: 3px;
|
||||||
background-color: var(--button-default);
|
background-color: #dddddd;
|
||||||
border-radius: 5px;
|
border-radius: 5px;
|
||||||
text-decoration: none;
|
text-decoration: none;
|
||||||
}
|
}
|
||||||
nav table a {
|
div#sidebar table a {
|
||||||
justify-content: center;
|
justify-content: center;
|
||||||
}
|
}
|
||||||
table a:hover {
|
table a:hover {
|
||||||
background-color: var(--button-hover);
|
background-color: #cccccc;
|
||||||
}
|
}
|
||||||
nav table a.current-page {
|
div#sidebar table a.current-page {
|
||||||
background-color: var(--button-current);
|
background-color: #bbbbbb;
|
||||||
}
|
}
|
||||||
nav table a.current-page:hover {
|
div#sidebar table a.current-page:hover {
|
||||||
background-color: var(--button-current);
|
background-color: #bbbbbb;
|
||||||
}
|
}
|
||||||
nav table td {
|
div#sidebar table td {
|
||||||
padding: 0px; margin: 3px 0;
|
padding: 0px; margin: 3px 0;
|
||||||
border-bottom: 8px solid transparent;
|
border-bottom: 8px solid transparent;
|
||||||
}
|
}
|
||||||
article {
|
div#content {
|
||||||
margin: 5px;
|
margin: 5px;
|
||||||
}
|
}
|
||||||
article.content-2col {
|
div.content-2col {
|
||||||
position: absolute;
|
position: absolute;
|
||||||
right: 0px;
|
right: 0px;
|
||||||
left: 226px;
|
left: 226px;
|
||||||
max-width: 564px;
|
max-width: 564px;
|
||||||
}
|
}
|
||||||
section {
|
div.contentblock {
|
||||||
background-color: #ffffff;
|
background-color: #ffffff;
|
||||||
box-shadow: 2px 2px 10px #888888;
|
box-shadow: 2px 2px 10px #888888;
|
||||||
margin-bottom: 5px;
|
margin-bottom: 5px;
|
||||||
|
@ -97,6 +95,9 @@ section {
|
||||||
border-radius: 5px;
|
border-radius: 5px;
|
||||||
word-break: break-word;
|
word-break: break-word;
|
||||||
}
|
}
|
||||||
|
div.contentblock h3 {
|
||||||
|
margin: 0.3em 0;
|
||||||
|
}
|
||||||
a.phantom {
|
a.phantom {
|
||||||
color: #cc2200;
|
color: #cc2200;
|
||||||
}
|
}
|
||||||
|
@ -111,9 +112,8 @@ textarea.fullwidth {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
box-sizing: border-box;
|
box-sizing: border-box;
|
||||||
}
|
}
|
||||||
input.fullwidth {
|
input.smallnumber {
|
||||||
width: 100%;
|
width: 4em;
|
||||||
box-sizing: border-box;
|
|
||||||
}
|
}
|
||||||
form#session-settings p {
|
form#session-settings p {
|
||||||
line-height: 1.8em;
|
line-height: 1.8em;
|
||||||
|
@ -126,34 +126,6 @@ div.dashboard-lexicon-item {
|
||||||
padding: 0 10px;
|
padding: 0 10px;
|
||||||
border-left: 3px solid black;
|
border-left: 3px solid black;
|
||||||
}
|
}
|
||||||
ul.blockitem-list {
|
|
||||||
list-style: none;
|
|
||||||
margin-block-start: 1em;
|
|
||||||
margin-block-end: 1em;
|
|
||||||
margin-inline-start: 0.5em;
|
|
||||||
margin-inline-end: 0.5em;
|
|
||||||
padding-inline-start: 0;
|
|
||||||
padding-inline-end: 0;
|
|
||||||
}
|
|
||||||
ul.blockitem-list li {
|
|
||||||
border-inline-start: 3px solid black;
|
|
||||||
padding-inline-start: 0.5em;
|
|
||||||
}
|
|
||||||
ul.blockitem-list * {
|
|
||||||
margin-block-start: 0.5em;
|
|
||||||
margin-block-end: 0.5em;
|
|
||||||
}
|
|
||||||
ul.blockitem-list pre {
|
|
||||||
background-color: lightgray;
|
|
||||||
padding-block-start: 2px;
|
|
||||||
padding-block-end: 2px;
|
|
||||||
padding-inline-start: 2px;
|
|
||||||
padding-inline-end: 2px;
|
|
||||||
border: 1px solid gray;
|
|
||||||
border-radius: 2px;
|
|
||||||
font-size: smaller;
|
|
||||||
white-space: break-spaces;
|
|
||||||
}
|
|
||||||
div.dashboard-lexicon-unstarted {
|
div.dashboard-lexicon-unstarted {
|
||||||
border-left-color: blue;
|
border-left-color: blue;
|
||||||
}
|
}
|
||||||
|
@ -171,79 +143,27 @@ div.dashboard-lexicon-item p {
|
||||||
margin-block-start: 0.5em;
|
margin-block-start: 0.5em;
|
||||||
margin-block-end: 0.5em;
|
margin-block-end: 0.5em;
|
||||||
}
|
}
|
||||||
ul.unordered-tabs {
|
|
||||||
list-style: none;
|
|
||||||
margin-block-start: 0;
|
|
||||||
margin-block-end: 0;
|
|
||||||
margin-inline-start: 0;
|
|
||||||
margin-inline-end: 0;
|
|
||||||
padding-block-start: 0;
|
|
||||||
padding-block-end: 0;
|
|
||||||
padding-inline-start: 0;
|
|
||||||
padding-inline-end: 0;
|
|
||||||
}
|
|
||||||
ul.unordered-tabs li {
|
|
||||||
display: inline-block;
|
|
||||||
margin: 3px;
|
|
||||||
}
|
|
||||||
ul.unordered-tabs li a {
|
|
||||||
background-color: var(--button-current);
|
|
||||||
display: flex;
|
|
||||||
border: 5px solid var(--button-current);
|
|
||||||
border-radius: 5px;
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
ul.unordered-tabs li a[href] {
|
|
||||||
background-color: var(--button-default);
|
|
||||||
border-color: var(--button-default);
|
|
||||||
}
|
|
||||||
ul.unordered-tabs li a[href]:hover {
|
|
||||||
background-color: var(--button-hover);
|
|
||||||
border-color: var(--button-hover);
|
|
||||||
}
|
|
||||||
details.setting-help {
|
|
||||||
margin-block-start: 1em;
|
|
||||||
margin-block-end: 1em;
|
|
||||||
}
|
|
||||||
#index-definition-table td:nth-child(2) {
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
#index-definition-table td:nth-child(2) *:only-child {
|
|
||||||
box-sizing: border-box;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
#index-definition-table td input[type=number] {
|
|
||||||
width: 4em;
|
|
||||||
}
|
|
||||||
section.new-post {
|
|
||||||
padding: 9px;
|
|
||||||
border: 1px dashed red;
|
|
||||||
}
|
|
||||||
p.post-byline {
|
|
||||||
color: #606060;
|
|
||||||
text-align: right;
|
|
||||||
}
|
|
||||||
@media only screen and (max-width: 816px) {
|
@media only screen and (max-width: 816px) {
|
||||||
main {
|
div#wrapper {
|
||||||
padding: 5px;
|
padding: 5px;
|
||||||
}
|
}
|
||||||
header {
|
div#header {
|
||||||
max-width: 554px;
|
max-width: 554px;
|
||||||
margin: 0 auto;
|
margin: 0 auto;
|
||||||
}
|
}
|
||||||
nav {
|
div#sidebar {
|
||||||
max-width: 548px;
|
max-width: 548px;
|
||||||
width: inherit;
|
width: inherit;
|
||||||
float: inherit;
|
float: inherit;
|
||||||
margin: 10px auto;
|
margin: 10px auto;
|
||||||
}
|
}
|
||||||
article{
|
div#content{
|
||||||
margin: 10px auto;
|
margin: 10px auto;
|
||||||
}
|
}
|
||||||
article.content-1col {
|
div.content-1col {
|
||||||
max-width: 564px;
|
max-width: 564px;
|
||||||
}
|
}
|
||||||
article.content-2col {
|
div.content-2col {
|
||||||
max-width: 564px;
|
max-width: 564px;
|
||||||
position: static;
|
position: static;
|
||||||
right: inherit;
|
right: inherit;
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
{
|
||||||
|
"version": "0",
|
||||||
|
"uid": null,
|
||||||
|
"username": null,
|
||||||
|
"displayname": null,
|
||||||
|
"email": null,
|
||||||
|
"password": null,
|
||||||
|
"created": null,
|
||||||
|
"last_login": null,
|
||||||
|
"last_activity": null,
|
||||||
|
"new_password_required": true,
|
||||||
|
"is_admin": false
|
||||||
|
}
|
|
@ -1,113 +1,46 @@
|
||||||
from datetime import datetime, timezone
|
|
||||||
import json
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from flask import Flask, g, url_for, redirect
|
from flask import Flask
|
||||||
|
|
||||||
from amanuensis.backend import *
|
from amanuensis.config import RootConfigDirectoryContext, ENV_CONFIG_DIR
|
||||||
from amanuensis.config import AmanuensisConfig, CommandLineConfig
|
from amanuensis.models import ModelFactory
|
||||||
from amanuensis.db import DbContext
|
from .auth import get_login_manager, bp_auth
|
||||||
from amanuensis.parser import filesafe_title
|
from .helpers import register_custom_filters
|
||||||
import amanuensis.server.auth as auth
|
from .home import bp_home
|
||||||
from amanuensis.server.helpers import UuidConverter, current_lexicon, current_membership
|
from .lexicon import bp_lexicon
|
||||||
import amanuensis.server.home as home
|
from .session import bp_session
|
||||||
import amanuensis.server.lexicon as lexicon
|
|
||||||
|
|
||||||
|
|
||||||
def date_format(dt: datetime, formatstr="%Y-%m-%d %H:%M:%S%z") -> str:
|
def get_app(root: RootConfigDirectoryContext) -> Flask:
|
||||||
"""Convert datetime to human-readable string"""
|
# Flask app init
|
||||||
if dt is None:
|
with root.read_config() as cfg:
|
||||||
return "never"
|
app = Flask(
|
||||||
# Cast db time to UTC, then convert to local timezone
|
__name__,
|
||||||
adjusted = dt.replace(tzinfo=timezone.utc).astimezone()
|
template_folder='.',
|
||||||
return adjusted.strftime(formatstr)
|
static_folder=cfg.static_root
|
||||||
|
)
|
||||||
|
app.secret_key = bytes.fromhex(cfg.secret_key)
|
||||||
|
app.config['root'] = root
|
||||||
|
app.config['model_factory'] = ModelFactory(root)
|
||||||
|
app.jinja_options['trim_blocks'] = True
|
||||||
|
app.jinja_options['lstrip_blocks'] = True
|
||||||
|
register_custom_filters(app)
|
||||||
|
|
||||||
|
# Flask-Login init
|
||||||
|
login_manager = get_login_manager(root)
|
||||||
|
login_manager.init_app(app)
|
||||||
|
|
||||||
|
# Blueprint inits
|
||||||
|
app.register_blueprint(bp_auth)
|
||||||
|
app.register_blueprint(bp_home)
|
||||||
|
app.register_blueprint(bp_lexicon)
|
||||||
|
app.register_blueprint(bp_session)
|
||||||
|
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
def article_link(title):
|
def default():
|
||||||
"""Get the url for a lexicon by its title"""
|
cwd = os.getcwd()
|
||||||
return url_for(
|
config_dir = os.environ.get(ENV_CONFIG_DIR, "amanuensis")
|
||||||
'lexicon.article',
|
root = RootConfigDirectoryContext(os.path.join(cwd, config_dir))
|
||||||
lexicon_name=g.lexicon.name,
|
return get_app(root)
|
||||||
title=filesafe_title(title))
|
|
||||||
|
|
||||||
|
|
||||||
def get_app(
|
|
||||||
config: AmanuensisConfig,
|
|
||||||
db: DbContext = None,
|
|
||||||
) -> Flask:
|
|
||||||
"""Application factory"""
|
|
||||||
# Create the Flask object
|
|
||||||
app = Flask(__name__, template_folder=".", static_folder=config.STATIC_ROOT)
|
|
||||||
|
|
||||||
# Load keys from the config object
|
|
||||||
app.config.from_object(config)
|
|
||||||
|
|
||||||
# If a config file is now specified, also load keys from there
|
|
||||||
if config_path := app.config.get("CONFIG_FILE", None):
|
|
||||||
app.config.from_file(os.path.abspath(config_path), json.load)
|
|
||||||
|
|
||||||
# Assert that all required config values are now set
|
|
||||||
for config_key in ("SECRET_KEY", "DATABASE_URI"):
|
|
||||||
if not app.config.get(config_key):
|
|
||||||
raise Exception(f"{config_key} must be defined")
|
|
||||||
|
|
||||||
# Create the database context, if one wasn't already given
|
|
||||||
if db is None:
|
|
||||||
db = DbContext(uri=app.config["DATABASE_URI"])
|
|
||||||
|
|
||||||
# Make the database connection available to requests via g
|
|
||||||
def db_setup():
|
|
||||||
g.db = db
|
|
||||||
|
|
||||||
app.before_request(db_setup)
|
|
||||||
|
|
||||||
# Tear down the session on request teardown
|
|
||||||
def db_teardown(response_or_exc):
|
|
||||||
db.session.remove()
|
|
||||||
|
|
||||||
app.teardown_appcontext(db_teardown)
|
|
||||||
|
|
||||||
# Configure jinja options
|
|
||||||
def add_jinja_context():
|
|
||||||
return {
|
|
||||||
"db": db,
|
|
||||||
"lexiq": lexiq,
|
|
||||||
"userq": userq,
|
|
||||||
"memq": memq,
|
|
||||||
"charq": charq,
|
|
||||||
"indq": indq,
|
|
||||||
"postq": postq,
|
|
||||||
"current_lexicon": current_lexicon,
|
|
||||||
"current_membership": current_membership
|
|
||||||
}
|
|
||||||
|
|
||||||
app.jinja_options.update(trim_blocks=True, lstrip_blocks=True)
|
|
||||||
app.template_filter("date")(date_format)
|
|
||||||
app.template_filter("articlelink")(article_link)
|
|
||||||
app.context_processor(add_jinja_context)
|
|
||||||
|
|
||||||
# Set up uuid route converter
|
|
||||||
app.url_map.converters["uuid"] = UuidConverter
|
|
||||||
|
|
||||||
# Set up Flask-Login
|
|
||||||
auth.get_login_manager().init_app(app)
|
|
||||||
|
|
||||||
# Register blueprints
|
|
||||||
app.register_blueprint(auth.bp)
|
|
||||||
app.register_blueprint(home.bp)
|
|
||||||
app.register_blueprint(lexicon.bp)
|
|
||||||
|
|
||||||
# Add a root redirect
|
|
||||||
def root():
|
|
||||||
return redirect(url_for("home.home"))
|
|
||||||
|
|
||||||
app.route("/")(root)
|
|
||||||
|
|
||||||
return app
|
|
||||||
|
|
||||||
|
|
||||||
def run():
|
|
||||||
"""Run the server, populating the config from the command line."""
|
|
||||||
config = CommandLineConfig()
|
|
||||||
app = get_app(config)
|
|
||||||
app.run(debug=app.testing)
|
|
||||||
|
|
|
@ -1,79 +1,76 @@
|
||||||
import logging
|
import logging
|
||||||
from typing import Optional
|
import time
|
||||||
|
|
||||||
from flask import (
|
from flask import (
|
||||||
Blueprint,
|
Blueprint,
|
||||||
flash,
|
render_template,
|
||||||
g,
|
redirect,
|
||||||
redirect,
|
url_for,
|
||||||
render_template,
|
flash,
|
||||||
url_for,
|
current_app)
|
||||||
)
|
|
||||||
from flask_login import (
|
from flask_login import (
|
||||||
AnonymousUserMixin,
|
login_user,
|
||||||
login_user,
|
logout_user,
|
||||||
logout_user,
|
login_required,
|
||||||
login_required,
|
LoginManager)
|
||||||
LoginManager,
|
|
||||||
)
|
|
||||||
|
|
||||||
from amanuensis.backend import userq
|
from amanuensis.config import RootConfigDirectoryContext
|
||||||
from amanuensis.db import User
|
from amanuensis.models import ModelFactory, AnonymousUserModel
|
||||||
|
|
||||||
from .forms import LoginForm
|
from .forms import LoginForm
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
bp = Blueprint("auth", __name__, url_prefix="/auth", template_folder=".")
|
|
||||||
|
|
||||||
|
|
||||||
def get_login_manager() -> LoginManager:
|
def get_login_manager(root: RootConfigDirectoryContext) -> LoginManager:
|
||||||
"""Login manager factory"""
|
"""
|
||||||
login_manager = LoginManager()
|
Creates a login manager
|
||||||
login_manager.login_view = "auth.login"
|
"""
|
||||||
login_manager.anonymous_user = AnonymousUserMixin
|
login_manager = LoginManager()
|
||||||
|
login_manager.login_view = 'auth.login'
|
||||||
|
login_manager.anonymous_user = AnonymousUserModel
|
||||||
|
|
||||||
def load_user(user_id_str: str) -> Optional[User]:
|
@login_manager.user_loader
|
||||||
try:
|
def load_user(uid):
|
||||||
user_id = int(user_id_str)
|
return current_app.config['model_factory'].user(str(uid))
|
||||||
except:
|
|
||||||
return None
|
|
||||||
return userq.try_from_id(g.db, user_id)
|
|
||||||
|
|
||||||
login_manager.user_loader(load_user)
|
return login_manager
|
||||||
|
|
||||||
return login_manager
|
|
||||||
|
|
||||||
|
|
||||||
@bp.route("/login/", methods=["GET", "POST"])
|
bp_auth = Blueprint('auth', __name__,
|
||||||
|
url_prefix='/auth',
|
||||||
|
template_folder='.')
|
||||||
|
|
||||||
|
|
||||||
|
@bp_auth.route('/login/', methods=['GET', 'POST'])
|
||||||
def login():
|
def login():
|
||||||
form = LoginForm()
|
model_factory: ModelFactory = current_app.config['model_factory']
|
||||||
|
form = LoginForm()
|
||||||
|
|
||||||
if not form.validate_on_submit():
|
if not form.validate_on_submit():
|
||||||
# Either the request was GET and we should render the form,
|
# Either the request was GET and we should render the form,
|
||||||
# or the request was POST and validation failed.
|
# or the request was POST and validation failed.
|
||||||
return render_template("auth.login.jinja", form=form)
|
return render_template('auth.login.jinja', form=form)
|
||||||
|
|
||||||
# POST with valid data
|
# POST with valid data
|
||||||
username: str = form.username.data
|
username = form.username.data
|
||||||
password: str = form.password.data
|
user = model_factory.try_user(username)
|
||||||
user: User = userq.try_from_username(g.db, username)
|
if not user or not user.check_password(form.password.data):
|
||||||
if not user or not userq.password_check(g.db, username, password):
|
# Bad creds
|
||||||
# Bad creds
|
flash("Login not recognized")
|
||||||
flash("Login not recognized")
|
return redirect(url_for('auth.login'))
|
||||||
return redirect(url_for("auth.login"))
|
|
||||||
|
|
||||||
# Login credentials were correct
|
# Login credentials were correct
|
||||||
remember_me: bool = form.remember.data
|
remember_me = form.remember.data
|
||||||
login_user(user, remember=remember_me)
|
login_user(user, remember=remember_me)
|
||||||
userq.update_logged_in(g.db, username)
|
with user.ctx.edit_config() as cfg:
|
||||||
LOG.info("Logged in user {0.username} ({0.id})".format(user))
|
cfg.last_login = int(time.time())
|
||||||
return redirect(url_for("home.home"))
|
logger.info('Logged in user "{0.username}" ({0.uid})'.format(user.cfg))
|
||||||
|
return redirect(url_for('home.home'))
|
||||||
|
|
||||||
|
|
||||||
@bp.get("/logout/")
|
@bp_auth.route("/logout/", methods=['GET'])
|
||||||
@login_required
|
@login_required
|
||||||
def logout():
|
def logout():
|
||||||
logout_user()
|
logout_user()
|
||||||
return redirect(url_for("home.home"))
|
return redirect(url_for('home.home'))
|
||||||
|
|
|
@ -3,22 +3,21 @@
|
||||||
{% block header %}<h2>Amanuensis - Login</h2>{% endblock %}
|
{% block header %}<h2>Amanuensis - Login</h2>{% endblock %}
|
||||||
{% block login_status_attr %}style="display:none"{% endblock %}
|
{% block login_status_attr %}style="display:none"{% endblock %}
|
||||||
{% block main %}
|
{% block main %}
|
||||||
<section>
|
|
||||||
<form action="" method="post" novalidate>
|
<form action="" method="post" novalidate>
|
||||||
{{ form.hidden_tag() }}
|
{{ form.hidden_tag() }}
|
||||||
<p>{{ form.username.label }}<br>{{ form.username(size=32) }}
|
<p>{{ form.username.label }}<br>{{ form.username(size=32) }}
|
||||||
{% for error in form.username.errors %}
|
{% for error in form.username.errors %}
|
||||||
<br><span style="color: #ff0000">{{ error }}</span>
|
<br><span style="color: #ff0000">{{ error }}</span>
|
||||||
{% endfor %}</p>
|
{% endfor %}</p>
|
||||||
<p>{{ form.password.label }}<br>{{ form.password(size=32) }}
|
<p>{{ form.password.label }}<br>{{ form.password(size=32) }}
|
||||||
{% for error in form.password.errors %}
|
{% for error in form.password.errors %}
|
||||||
<br><span style="color: #ff0000">{{ error }}</span>
|
<br><span style="color: #ff0000">{{ error }}</span>
|
||||||
{% endfor %}</p>
|
{% endfor %}</p>
|
||||||
<p>{{ form.remember }} {{ form.remember.label }}</p>
|
<p>{{ form.remember }} {{ form.remember.label }}</p>
|
||||||
<p>{{ form.submit() }}</p>
|
<p>{{ form.submit() }}</p>
|
||||||
</form>
|
</form>
|
||||||
{% for message in get_flashed_messages() %}
|
{% for message in get_flashed_messages() %}
|
||||||
<span style="color: #ff0000">{{ message }}</span><br>
|
<span style="color: #ff0000">{{ message }}</span><br>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</section>
|
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
{% set template_content_blocks = [self.main()] %}
|
|
@ -4,9 +4,12 @@ from wtforms.validators import DataRequired
|
||||||
|
|
||||||
|
|
||||||
class LoginForm(FlaskForm):
|
class LoginForm(FlaskForm):
|
||||||
"""/auth/login/"""
|
"""/auth/login/"""
|
||||||
|
username = StringField(
|
||||||
username = StringField("Username", validators=[DataRequired()])
|
'Username',
|
||||||
password = PasswordField("Password", validators=[DataRequired()])
|
validators=[DataRequired()])
|
||||||
remember = BooleanField("Stay logged in")
|
password = PasswordField(
|
||||||
submit = SubmitField("Log in")
|
'Password',
|
||||||
|
validators=[DataRequired()])
|
||||||
|
remember = BooleanField('Stay logged in')
|
||||||
|
submit = SubmitField('Log in')
|
||||||
|
|
|
@ -1,173 +1,114 @@
|
||||||
|
# Standard library imports
|
||||||
|
from datetime import datetime
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from typing import Optional, Any
|
|
||||||
from uuid import UUID
|
|
||||||
|
|
||||||
from flask import (
|
# Third party imports
|
||||||
_request_ctx_stack,
|
from flask import g, flash, redirect, url_for, current_app
|
||||||
flash,
|
|
||||||
g,
|
|
||||||
has_request_context,
|
|
||||||
redirect,
|
|
||||||
request,
|
|
||||||
url_for,
|
|
||||||
)
|
|
||||||
from flask_login import current_user
|
from flask_login import current_user
|
||||||
from werkzeug.local import LocalProxy
|
|
||||||
from werkzeug.routing import BaseConverter, ValidationError
|
|
||||||
|
|
||||||
from amanuensis.backend import lexiq, memq
|
# Module imports
|
||||||
from amanuensis.db import DbContext, Lexicon, User, Membership
|
from amanuensis.parser import filesafe_title
|
||||||
|
from amanuensis.models import ModelFactory, UserModel, LexiconModel
|
||||||
|
|
||||||
|
|
||||||
class UuidConverter(BaseConverter):
|
def register_custom_filters(app):
|
||||||
"""Converter that matches version 4 UUIDs"""
|
"""Adds custom filters to the Flask app"""
|
||||||
regex = r"[0-9A-Fa-f]{8}-[0-9A-Fa-f]{4}-4[0-9A-Fa-f]{3}-[89aAbB][0-9A-Fa-f]{3}-[0-9A-Fa-f]{12}"
|
|
||||||
|
|
||||||
def to_python(self, value: str) -> Any:
|
@app.template_filter("user_attr")
|
||||||
try:
|
def get_user_attr(uid, attr):
|
||||||
return UUID(value)
|
factory: ModelFactory = current_app.config['model_factory']
|
||||||
except:
|
user: UserModel = factory.user(uid)
|
||||||
return ValidationError(f"Invalid UUID: {value}")
|
val = getattr(user.cfg, attr)
|
||||||
|
return val
|
||||||
|
|
||||||
def to_url(self, value: Any) -> str:
|
@app.template_filter("asdate")
|
||||||
if not isinstance(value, UUID):
|
def timestamp_to_readable(ts, formatstr="%Y-%m-%d %H:%M:%S"):
|
||||||
raise ValueError(f"Expected UUID, got {type(value)}: {value}")
|
if ts is None:
|
||||||
return str(value)
|
return "null"
|
||||||
|
dt = datetime.fromtimestamp(ts)
|
||||||
|
return dt.strftime(formatstr)
|
||||||
|
|
||||||
|
@app.template_filter("articlelink")
|
||||||
|
def article_link(title):
|
||||||
|
return url_for(
|
||||||
|
'lexicon.article',
|
||||||
|
name=g.lexicon.cfg.name,
|
||||||
|
title=filesafe_title(title))
|
||||||
|
|
||||||
def get_current_lexicon():
|
@app.context_processor
|
||||||
# Check if the request context is for a lexicon page
|
def lexicon_status():
|
||||||
if not has_request_context():
|
return dict(
|
||||||
return None
|
PREGAME=LexiconModel.PREGAME,
|
||||||
lexicon_name = request.view_args.get("lexicon_name")
|
ONGOING=LexiconModel.ONGOING,
|
||||||
if not lexicon_name:
|
COMPLETE=LexiconModel.COMPLETE)
|
||||||
return None
|
|
||||||
# Pull up the lexicon if it exists and cache it in the request context
|
|
||||||
if not hasattr(_request_ctx_stack.top, "lexicon"):
|
|
||||||
db: DbContext = g.db
|
|
||||||
lexicon: Optional[Lexicon] = lexiq.try_from_name(db, lexicon_name)
|
|
||||||
setattr(_request_ctx_stack.top, "lexicon", lexicon)
|
|
||||||
# Return the cached lexicon
|
|
||||||
return getattr(_request_ctx_stack.top, "lexicon", None)
|
|
||||||
|
|
||||||
|
|
||||||
current_lexicon = LocalProxy(get_current_lexicon)
|
|
||||||
|
|
||||||
|
|
||||||
def get_current_membership():
|
|
||||||
# Base the current membership on the current user and the current lexicon
|
|
||||||
user: User = current_user
|
|
||||||
if not user or not user.is_authenticated:
|
|
||||||
return None
|
|
||||||
lexicon: Lexicon = current_lexicon
|
|
||||||
if not lexicon:
|
|
||||||
return None
|
|
||||||
# Pull up the membership and cache it in the request context
|
|
||||||
if not hasattr(_request_ctx_stack.top, "membership"):
|
|
||||||
db: DbContext = g.db
|
|
||||||
mem: Membership = memq.try_from_ids(db, user.id, lexicon.id)
|
|
||||||
setattr(_request_ctx_stack.top, "membership", mem)
|
|
||||||
# Return cached membership
|
|
||||||
return getattr(_request_ctx_stack.top, "membership", None)
|
|
||||||
|
|
||||||
|
|
||||||
current_membership = LocalProxy(get_current_membership)
|
|
||||||
|
|
||||||
|
|
||||||
def lexicon_param(route):
|
def lexicon_param(route):
|
||||||
"""
|
"""Wrapper for loading a route's lexicon"""
|
||||||
Wrapper for loading a route's lexicon to `g`.
|
@wraps(route)
|
||||||
This decorator should be applied above any other decorators that reference `g.lexicon`.
|
def with_lexicon(**kwargs):
|
||||||
"""
|
name = kwargs.get('name')
|
||||||
@wraps(route)
|
model_factory: ModelFactory = current_app.config['model_factory']
|
||||||
def with_lexicon(*args, **kwargs):
|
g.lexicon = model_factory.lexicon(name)
|
||||||
db: DbContext = g.db
|
if g.lexicon is None:
|
||||||
name: str = kwargs.get('lexicon_name')
|
flash(f'Couldn\'t find a lexicon with the name "{name}"')
|
||||||
lexicon: Optional[Lexicon] = lexiq.try_from_name(db, name)
|
return redirect(url_for("home.home"))
|
||||||
if lexicon is None:
|
return route(**kwargs)
|
||||||
flash(f"Couldn't find a lexicon with the name \"{name}\"")
|
return with_lexicon
|
||||||
return redirect(url_for("home.home"))
|
|
||||||
g.lexicon = lexicon
|
|
||||||
return route(*args, **kwargs)
|
|
||||||
return with_lexicon
|
|
||||||
|
|
||||||
|
|
||||||
def admin_required(route):
|
def admin_required(route):
|
||||||
"""
|
"""
|
||||||
Restricts a route to users who are site admins.
|
Requires the user to be an admin to load this page
|
||||||
"""
|
"""
|
||||||
@wraps(route)
|
@wraps(route)
|
||||||
def admin_route(*args, **kwargs):
|
def admin_route(*args, **kwargs):
|
||||||
user: User = current_user
|
if not current_user.cfg.is_admin:
|
||||||
if not user.is_authenticated or not user.is_site_admin:
|
flash("You must be an admin to view this page")
|
||||||
flash("You must be an admin to view this page")
|
return redirect(url_for('home.home'))
|
||||||
return redirect(url_for('home.home'))
|
return route(*args, **kwargs)
|
||||||
return route(*args, **kwargs)
|
return admin_route
|
||||||
return admin_route
|
|
||||||
|
|
||||||
|
|
||||||
def player_required(route):
|
def player_required(route):
|
||||||
"""
|
"""
|
||||||
Restricts a route to users who are players in the current lexicon.
|
Requires the user to be a player in the lexicon to load this page
|
||||||
"""
|
"""
|
||||||
@wraps(route)
|
@wraps(route)
|
||||||
def player_route(*args, **kwargs):
|
def player_route(*args, **kwargs):
|
||||||
db: DbContext = g.db
|
if current_user.uid not in g.lexicon.cfg.join.joined:
|
||||||
user: User = current_user
|
flash("You must be a player to view this page")
|
||||||
lexicon: Lexicon = current_lexicon
|
return (redirect(url_for('lexicon.contents', name=g.lexicon.cfg.name))
|
||||||
if not user.is_authenticated:
|
if g.lexicon.cfg.join.public
|
||||||
flash("You must be a player to view this page")
|
else redirect(url_for('home.home')))
|
||||||
if lexicon.public:
|
return route(*args, **kwargs)
|
||||||
return redirect(url_for('lexicon.contents', lexicon_name=lexicon.name))
|
return player_route
|
||||||
else:
|
|
||||||
return redirect(url_for('home.home'))
|
|
||||||
mem: Optional[Membership] = memq.try_from_ids(db, user.id, lexicon.id)
|
|
||||||
if not mem:
|
|
||||||
flash("You must be a player to view this page")
|
|
||||||
if lexicon.public:
|
|
||||||
return redirect(url_for('lexicon.contents', lexicon_name=lexicon.name))
|
|
||||||
else:
|
|
||||||
return redirect(url_for('home.home'))
|
|
||||||
return route(*args, **kwargs)
|
|
||||||
return player_route
|
|
||||||
|
|
||||||
|
|
||||||
def player_required_if_not_public(route):
|
def player_required_if_not_public(route):
|
||||||
"""
|
"""
|
||||||
Restricts a route to users who are players in the current lexicon if the lexicon is nonpublic.
|
Requires the user to be a player in the lexicon to load this page if the
|
||||||
"""
|
lexicon has join.public = false
|
||||||
@wraps(route)
|
"""
|
||||||
def player_route(*args, **kwargs):
|
@wraps(route)
|
||||||
db: DbContext = g.db
|
def player_route(*args, **kwargs):
|
||||||
user: User = current_user
|
if ((not g.lexicon.cfg.join.public)
|
||||||
lexicon: Lexicon = current_lexicon
|
and current_user.uid not in g.lexicon.cfg.join.joined):
|
||||||
if not user.is_authenticated and not lexicon.public:
|
flash("You must be a player to view this page")
|
||||||
mem: Optional[Membership] = memq.try_from_ids(db, user.id, lexicon.id)
|
return redirect(url_for('home.home'))
|
||||||
if not mem:
|
return route(*args, **kwargs)
|
||||||
flash("You must be a player to view this page")
|
return player_route
|
||||||
return redirect(url_for('home.home'))
|
|
||||||
return route(*args, **kwargs)
|
|
||||||
return player_route
|
|
||||||
|
|
||||||
|
|
||||||
def editor_required(route):
|
def editor_required(route):
|
||||||
"""
|
"""
|
||||||
Restricts a route to users who are editors of the current lexicon.
|
Requires the user to be the editor of the current lexicon to load this
|
||||||
"""
|
page
|
||||||
@wraps(route)
|
"""
|
||||||
def editor_route(*args, **kwargs):
|
@wraps(route)
|
||||||
db: DbContext = g.db
|
def editor_route(*args, **kwargs):
|
||||||
user: User = current_user
|
if current_user.uid != g.lexicon.cfg.editor:
|
||||||
lexicon: Lexicon = current_lexicon
|
flash("You must be the editor to view this page")
|
||||||
if not user.is_authenticated:
|
return redirect(url_for('lexicon.contents', name=g.lexicon.cfg.name))
|
||||||
flash("You must be a player to view this page")
|
return route(*args, **kwargs)
|
||||||
if lexicon.public:
|
return editor_route
|
||||||
return redirect(url_for('lexicon.contents', lexicon_name=lexicon.name))
|
|
||||||
else:
|
|
||||||
return redirect(url_for('home.home'))
|
|
||||||
mem: Optional[Membership] = memq.try_from_ids(db, user.id, lexicon.id)
|
|
||||||
if not mem or not mem.is_editor:
|
|
||||||
flash("You must be the editor to view this page")
|
|
||||||
return redirect(url_for('lexicon.contents', lexicon_name=lexicon.name))
|
|
||||||
return route(*args, **kwargs)
|
|
||||||
return editor_route
|
|
||||||
|
|
|
@ -1,43 +1,65 @@
|
||||||
from flask import Blueprint, render_template, g
|
from flask import Blueprint, render_template, redirect, url_for, current_app
|
||||||
|
from flask_login import login_required, current_user
|
||||||
|
|
||||||
from amanuensis.backend import userq, lexiq
|
from amanuensis.config import RootConfigDirectoryContext
|
||||||
|
from amanuensis.lexicon import create_lexicon, load_all_lexicons
|
||||||
|
from amanuensis.models import UserModel, ModelFactory
|
||||||
|
from amanuensis.server.helpers import admin_required
|
||||||
|
from amanuensis.user import load_all_users
|
||||||
|
|
||||||
# from .forms import LexiconCreateForm
|
from .forms import LexiconCreateForm
|
||||||
|
|
||||||
bp = Blueprint("home", __name__, url_prefix="/home", template_folder=".")
|
bp_home = Blueprint('home', __name__,
|
||||||
|
url_prefix='/home',
|
||||||
|
template_folder='.')
|
||||||
|
|
||||||
|
|
||||||
@bp.get("/")
|
@bp_home.route('/', methods=['GET'])
|
||||||
def home():
|
def home():
|
||||||
return render_template("home.root.jinja")
|
root: RootConfigDirectoryContext = current_app.config['root']
|
||||||
|
user: UserModel = current_user
|
||||||
|
user_lexicons = []
|
||||||
|
public_lexicons = []
|
||||||
|
for lexicon in load_all_lexicons(root):
|
||||||
|
if user.uid in lexicon.cfg.join.joined:
|
||||||
|
user_lexicons.append(lexicon)
|
||||||
|
elif lexicon.cfg.join.public:
|
||||||
|
public_lexicons.append(lexicon)
|
||||||
|
return render_template(
|
||||||
|
'home.root.jinja',
|
||||||
|
user_lexicons=user_lexicons,
|
||||||
|
public_lexicons=public_lexicons)
|
||||||
|
|
||||||
|
|
||||||
@bp.get("/admin/")
|
@bp_home.route('/admin/', methods=['GET'])
|
||||||
# @login_required
|
@login_required
|
||||||
# @admin_required
|
@admin_required
|
||||||
def admin():
|
def admin():
|
||||||
return render_template("home.admin.jinja", userq=userq, lexiq=lexiq)
|
root: RootConfigDirectoryContext = current_app.config['root']
|
||||||
|
users = list(load_all_users(root))
|
||||||
|
lexicons = list(load_all_lexicons(root))
|
||||||
|
return render_template('home.admin.jinja', users=users, lexicons=lexicons)
|
||||||
|
|
||||||
|
|
||||||
# @bp_home.route("/admin/create/", methods=['GET', 'POST'])
|
@bp_home.route("/admin/create/", methods=['GET', 'POST'])
|
||||||
# @login_required
|
@login_required
|
||||||
# @admin_required
|
@admin_required
|
||||||
# def admin_create():
|
def admin_create():
|
||||||
# form = LexiconCreateForm()
|
form = LexiconCreateForm()
|
||||||
|
|
||||||
# if not form.validate_on_submit():
|
if not form.validate_on_submit():
|
||||||
# # GET or POST with invalid form data
|
# GET or POST with invalid form data
|
||||||
# return render_template('home.create.jinja', form=form)
|
return render_template('home.create.jinja', form=form)
|
||||||
|
|
||||||
# # POST with valid data
|
# POST with valid data
|
||||||
# root: RootConfigDirectoryContext = current_app.config['root']
|
root: RootConfigDirectoryContext = current_app.config['root']
|
||||||
# model_factory: ModelFactory = current_app.config['model_factory']
|
model_factory: ModelFactory = current_app.config['model_factory']
|
||||||
# lexicon_name = form.lexiconName.data
|
lexicon_name = form.lexiconName.data
|
||||||
# editor_name = form.editorName.data
|
editor_name = form.editorName.data
|
||||||
# prompt = form.promptText.data
|
prompt = form.promptText.data
|
||||||
# # Editor's existence was checked by form validators
|
# Editor's existence was checked by form validators
|
||||||
# editor = model_factory.user(editor_name)
|
editor = model_factory.user(editor_name)
|
||||||
# lexicon = create_lexicon(root, lexicon_name, editor)
|
lexicon = create_lexicon(root, lexicon_name, editor)
|
||||||
# with lexicon.ctx.edit_config() as cfg:
|
with lexicon.ctx.edit_config() as cfg:
|
||||||
# cfg.prompt = prompt
|
cfg.prompt = prompt
|
||||||
# return redirect(url_for('session.session', name=lexicon_name))
|
return redirect(url_for('session.session', name=lexicon_name))
|
||||||
|
|
|
@ -2,16 +2,16 @@ from flask_wtf import FlaskForm
|
||||||
from wtforms import StringField, SubmitField, TextAreaField
|
from wtforms import StringField, SubmitField, TextAreaField
|
||||||
from wtforms.validators import DataRequired
|
from wtforms.validators import DataRequired
|
||||||
|
|
||||||
# from amanuensis.server.forms import User, Lexicon
|
from amanuensis.server.forms import User, Lexicon
|
||||||
|
|
||||||
|
|
||||||
# class LexiconCreateForm(FlaskForm):
|
class LexiconCreateForm(FlaskForm):
|
||||||
# """/admin/create/"""
|
"""/admin/create/"""
|
||||||
# lexiconName = StringField(
|
lexiconName = StringField(
|
||||||
# 'Lexicon name',
|
'Lexicon name',
|
||||||
# validators=[DataRequired(), Lexicon(should_exist=False)])
|
validators=[DataRequired(), Lexicon(should_exist=False)])
|
||||||
# editorName = StringField(
|
editorName = StringField(
|
||||||
# 'Username of editor',
|
'Username of editor',
|
||||||
# validators=[DataRequired(), User(should_exist=True)])
|
validators=[DataRequired(), User(should_exist=True)])
|
||||||
# promptText = TextAreaField('Prompt')
|
promptText = TextAreaField('Prompt')
|
||||||
# submit = SubmitField('Create')
|
submit = SubmitField('Create')
|
||||||
|
|
|
@ -3,20 +3,18 @@
|
||||||
{% block title %}Admin | Amanuensis{% endblock %}
|
{% block title %}Admin | Amanuensis{% endblock %}
|
||||||
{% block header %}<h2>Amanuensis - Admin Dashboard</h2>{% endblock %}
|
{% block header %}<h2>Amanuensis - Admin Dashboard</h2>{% endblock %}
|
||||||
|
|
||||||
{# TODO #}
|
|
||||||
{% block sb_home %}<a href="{{ url_for('home.home') }}">Home</a>{% endblock %}
|
{% block sb_home %}<a href="{{ url_for('home.home') }}">Home</a>{% endblock %}
|
||||||
{% block sb_create %}<a href="#{#{ url_for('home.admin_create') }#}">Create a lexicon</a>{% endblock %}
|
{% block sb_create %}<a href="{{ url_for('home.admin_create') }}">Create a lexicon</a>{% endblock %}
|
||||||
{% set template_sidebar_rows = [self.sb_home(), self.sb_create()] %}
|
{% set template_sidebar_rows = [self.sb_home(), self.sb_create()] %}
|
||||||
|
|
||||||
{% block main %}
|
{% block main %}
|
||||||
<section>
|
|
||||||
<p>Users:</p>
|
<p>Users:</p>
|
||||||
{% for user in userq.get_all(db) %}
|
{% for user in users %}
|
||||||
{{ macros.dashboard_user_item(user) }}
|
{{ macros.dashboard_user_item(user) }}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
<p>Lexicons:</p>
|
<p>Lexicons:</p>
|
||||||
{% for lexicon in lexiq.get_all(db) %}
|
{% for lexicon in lexicons %}
|
||||||
{{ macros.dashboard_lexicon_item(lexicon) }}
|
{{ macros.dashboard_lexicon_item(lexicon) }}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</section>
|
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
{% set template_content_blocks = [self.main()] %}
|
|
@ -4,13 +4,6 @@
|
||||||
{% block header %}<h2>Amanuensis - Home</h2>{% endblock %}
|
{% block header %}<h2>Amanuensis - Home</h2>{% endblock %}
|
||||||
|
|
||||||
{% block main %}
|
{% block main %}
|
||||||
{% if current_user.is_site_admin %}
|
|
||||||
<section>
|
|
||||||
<a href="{{ url_for('home.admin') }}" style="display:block; text-align:center;">Admin dashboard</a>
|
|
||||||
</section>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
<section>
|
|
||||||
<h1>Welcome to Amanuensis!</h1>
|
<h1>Welcome to Amanuensis!</h1>
|
||||||
<p>Amanuensis is a hub for playing Lexicon, the encyclopedia RPG. Log in to access your Lexicon games. If you do not have an account, contact the administrator.</p>
|
<p>Amanuensis is a hub for playing Lexicon, the encyclopedia RPG. Log in to access your Lexicon games. If you do not have an account, contact the administrator.</p>
|
||||||
|
|
||||||
|
@ -18,16 +11,10 @@
|
||||||
<span style="color:#ff0000">{{ message }}</span><br>
|
<span style="color:#ff0000">{{ message }}</span><br>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|
||||||
{% if current_user.is_authenticated %}
|
|
||||||
{% set joined = lexiq.get_joined(db, current_user.id)|list %}
|
|
||||||
{% else %}
|
|
||||||
{% set joined = [] %}
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if current_user.is_authenticated %}
|
{% if current_user.is_authenticated %}
|
||||||
<h2>Your games</h2>
|
<h2>Your games</h2>
|
||||||
{% if joined %}
|
{% if user_lexicons %}
|
||||||
{% for lexicon in joined %}
|
{% for lexicon in user_lexicons %}
|
||||||
{{ macros.dashboard_lexicon_item(lexicon) }}
|
{{ macros.dashboard_lexicon_item(lexicon) }}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% else %}
|
{% else %}
|
||||||
|
@ -35,14 +22,21 @@
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% set public = lexiq.get_public(db)|reject("in", joined)|list %}
|
|
||||||
<h2>Public games</h2>
|
<h2>Public games</h2>
|
||||||
{% if public %}
|
{% if public_lexicons %}
|
||||||
{% for lexicon in public %}
|
{% for lexicon in public_lexicons %}
|
||||||
{{ macros.dashboard_lexicon_item(lexicon) }}
|
{{ macros.dashboard_lexicon_item(lexicon) }}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% else %}
|
{% else %}
|
||||||
<p>No public games available.</p>
|
<p>No public games available.</p>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</section>
|
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
{% set template_content_blocks = [self.main()] %}
|
||||||
|
|
||||||
|
{% if current_user.cfg.is_admin %}
|
||||||
|
{% block admin_dash %}
|
||||||
|
<a href="{{ url_for('home.admin') }}" style="display:block; text-align:center;">Admin dashboard</a>
|
||||||
|
{% endblock %}
|
||||||
|
{% set template_content_blocks = [self.admin_dash()] + template_content_blocks %}
|
||||||
|
{% endif %}
|
|
@ -1,41 +1,44 @@
|
||||||
{% extends "page_2col.jinja" %}
|
{% extends "page_2col.jinja" %}
|
||||||
{% set lexicon_title = g.lexicon.full_title %}
|
{% set lexicon_title = g.lexicon.title %}
|
||||||
|
|
||||||
{% block header %}
|
{% block header %}
|
||||||
<h2>{{ lexicon_title }}</h2>
|
<h2>{{ lexicon_title }}</h2>
|
||||||
<p><i>{{ g.lexicon.prompt }}</i></p>
|
<p><i>{{ g.lexicon.cfg.prompt }}</i></p>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block sb_logo %}{% endblock %}
|
{% block sb_logo %}{% endblock %}
|
||||||
{% block sb_characters %}<a
|
{% block sb_home %}<a href="{{ url_for('home.home') }}">Home</a>
|
||||||
{% if current_page == "characters" %}class="current-page"
|
{% endblock %}
|
||||||
{% else %}href="{{ url_for('lexicon.characters.list', lexicon_name=g.lexicon.name) }}"
|
|
||||||
{% endif %}>Characters</a>{% endblock %}
|
|
||||||
{% block sb_contents %}<a
|
{% block sb_contents %}<a
|
||||||
{% if current_page == "contents" %}class="current-page"
|
{% if current_page == "contents" %}class="current-page"
|
||||||
{% else %}href="{{ url_for('lexicon.contents', lexicon_name=g.lexicon.name) }}"
|
{% else %}href="{{ url_for('lexicon.contents', name=g.lexicon.cfg.name) }}"
|
||||||
{% endif %}>Contents</a>{% endblock %}
|
{% endif %}>Contents</a>{% endblock %}
|
||||||
{% block sb_posts %}<a
|
|
||||||
{% if current_page == "posts" %}class="current-page"
|
|
||||||
{% else %}href="{{ url_for('lexicon.posts.list', lexicon_name=g.lexicon.name) }}"
|
|
||||||
{% endif %}>Posts{% set unread_count = postq.get_unread_count(g.db, current_membership.id) if current_membership else None %}{% if unread_count %} ({{ unread_count }}){% endif %}</a>{% endblock %}
|
|
||||||
{% block sb_rules %}<a
|
{% block sb_rules %}<a
|
||||||
{% if current_page == "rules" %}class="current-page"
|
{% if current_page == "rules" %}class="current-page"
|
||||||
{% else %}href="{{ url_for('lexicon.rules', lexicon_name=g.lexicon.name) }}"
|
{% else %}href="{{ url_for('lexicon.rules', name=g.lexicon.cfg.name) }}"
|
||||||
{% endif %}>Rules</a>{% endblock %}
|
{% endif %}>Rules</a>{% endblock %}
|
||||||
{% block sb_settings %}<a
|
{% block sb_session %}<a
|
||||||
{% if current_page == "settings" %}class="current-page"
|
{% if current_page == "session" %}class="current-page"
|
||||||
{% else %}href="{{ url_for('lexicon.settings.page', lexicon_name=g.lexicon.name) }}"
|
{% else %}href="{{ url_for('session.session', name=g.lexicon.cfg.name) }}"
|
||||||
{% endif %}>Settings</a>{% endblock %}
|
{% endif %}>Session</a>{% endblock %}
|
||||||
{% block sb_stats %}<a
|
{% block sb_stats %}<a
|
||||||
{% if current_page == "statistics" %}class="current-page"
|
{% if current_page == "statistics" %}class="current-page"
|
||||||
{% else %}href="{{ url_for('lexicon.stats', lexicon_name=g.lexicon.name) }}"
|
{% else %}href="{{ url_for('lexicon.stats', name=g.lexicon.cfg.name) }}"
|
||||||
{% endif %}>Statistics</a>{% endblock %}
|
{% endif %}>Statistics</a>{% endblock %}
|
||||||
|
|
||||||
|
{% if current_user.uid in g.lexicon.cfg.join.joined %}
|
||||||
|
{# self.sb_logo(), #}
|
||||||
{% set template_sidebar_rows = [
|
{% set template_sidebar_rows = [
|
||||||
self.sb_characters(),
|
self.sb_home(),
|
||||||
self.sb_contents(),
|
self.sb_contents(),
|
||||||
self.sb_posts(),
|
self.sb_rules(),
|
||||||
self.sb_rules(),
|
self.sb_session(),
|
||||||
self.sb_settings(),
|
self.sb_stats()] %}
|
||||||
self.sb_stats()] %}
|
{% else %}
|
||||||
|
{# self.sb_logo(), #}
|
||||||
|
{% set template_sidebar_rows = [
|
||||||
|
self.sb_home(),
|
||||||
|
self.sb_contents(),
|
||||||
|
self.sb_rules(),
|
||||||
|
self.sb_stats()] %}
|
||||||
|
{% endif %}
|
||||||
|
|
|
@ -1,92 +1,96 @@
|
||||||
from flask import Blueprint, flash, redirect, url_for, g, render_template, Markup
|
from flask import (
|
||||||
|
Blueprint,
|
||||||
|
flash,
|
||||||
|
redirect,
|
||||||
|
url_for,
|
||||||
|
g,
|
||||||
|
render_template,
|
||||||
|
Markup)
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
|
|
||||||
from amanuensis.backend import lexiq, memq
|
from amanuensis.lexicon import (
|
||||||
from amanuensis.db import DbContext, Lexicon, User
|
player_can_join_lexicon,
|
||||||
from amanuensis.errors import ArgumentError
|
add_player_to_lexicon,
|
||||||
from amanuensis.server.helpers import lexicon_param, player_required_if_not_public
|
sort_by_index_spec)
|
||||||
|
from amanuensis.models import LexiconModel
|
||||||
|
from amanuensis.server.helpers import (
|
||||||
|
lexicon_param,
|
||||||
|
player_required_if_not_public)
|
||||||
|
|
||||||
from .characters import bp as characters_bp
|
|
||||||
from .forms import LexiconJoinForm
|
from .forms import LexiconJoinForm
|
||||||
from .posts import bp as posts_bp
|
|
||||||
from .settings import bp as settings_bp
|
|
||||||
|
|
||||||
|
|
||||||
bp = Blueprint(
|
bp_lexicon = Blueprint('lexicon', __name__,
|
||||||
"lexicon", __name__, url_prefix="/lexicon/<lexicon_name>", template_folder="."
|
url_prefix='/lexicon/<name>',
|
||||||
)
|
template_folder='.')
|
||||||
bp.register_blueprint(characters_bp)
|
|
||||||
bp.register_blueprint(posts_bp)
|
|
||||||
bp.register_blueprint(settings_bp)
|
|
||||||
|
|
||||||
|
|
||||||
@bp.route("/join/", methods=["GET", "POST"])
|
@bp_lexicon.route("/join/", methods=['GET', 'POST'])
|
||||||
@lexicon_param
|
@lexicon_param
|
||||||
@login_required
|
@login_required
|
||||||
def join(lexicon_name):
|
def join(name):
|
||||||
lexicon: Lexicon = g.lexicon
|
if g.lexicon.status != LexiconModel.PREGAME:
|
||||||
if not lexicon.joinable:
|
flash("Can't join a game already in progress")
|
||||||
flash("This game isn't open for joining")
|
return redirect(url_for('home.home'))
|
||||||
return redirect(url_for("home.home"))
|
|
||||||
|
|
||||||
form = LexiconJoinForm()
|
if not g.lexicon.cfg.join.open:
|
||||||
|
flash("This game isn't open for joining")
|
||||||
|
return redirect(url_for('home.home'))
|
||||||
|
|
||||||
if not form.validate_on_submit():
|
form = LexiconJoinForm()
|
||||||
# GET or POST with invalid form data
|
|
||||||
return render_template(
|
|
||||||
"lexicon.join.jinja", lexicon_name=lexicon_name, form=form
|
|
||||||
)
|
|
||||||
|
|
||||||
# POST with valid data
|
if not form.validate_on_submit():
|
||||||
# If the game is passworded, check password
|
# GET or POST with invalid form data
|
||||||
db: DbContext = g.db
|
return render_template('lexicon.join.jinja', form=form)
|
||||||
if lexicon.join_password and not lexiq.password_check(
|
|
||||||
db, lexicon.id, form.password.data
|
|
||||||
):
|
|
||||||
# Bad creds, try again
|
|
||||||
flash("Incorrect password")
|
|
||||||
return redirect(url_for("lexicon.join", lexicon_name=lexicon_name))
|
|
||||||
|
|
||||||
# If the password was correct, check if the user can join
|
# POST with valid data
|
||||||
user: User = current_user
|
# If the game is passworded, check password
|
||||||
try:
|
if (g.lexicon.cfg.join.password
|
||||||
memq.create(db, user.id, lexicon.id, is_editor=False)
|
and form.password.data != g.lexicon.cfg.join.password):
|
||||||
return redirect(url_for("session.session", lexicon_name=lexicon_name))
|
# Bad creds, try again
|
||||||
except ArgumentError:
|
flash('Incorrect password')
|
||||||
flash("Could not join game")
|
return redirect(url_for('lexicon.join', name=name))
|
||||||
return redirect(url_for("home.home", lexicon_name=lexicon_name))
|
# If the password was correct, check if the user can join
|
||||||
|
if player_can_join_lexicon(current_user, g.lexicon, form.password.data):
|
||||||
|
add_player_to_lexicon(current_user, g.lexicon)
|
||||||
|
return redirect(url_for('session.session', name=name))
|
||||||
|
else:
|
||||||
|
flash('Could not join game')
|
||||||
|
return redirect(url_for('home.home', name=name))
|
||||||
|
|
||||||
|
|
||||||
@bp.get("/contents/")
|
@bp_lexicon.route('/contents/', methods=['GET'])
|
||||||
@lexicon_param
|
@lexicon_param
|
||||||
@player_required_if_not_public
|
@player_required_if_not_public
|
||||||
def contents(lexicon_name):
|
def contents(name):
|
||||||
# indexed = sort_by_index_spec(info, g.lexicon.cfg.article.index.list)
|
with g.lexicon.ctx.read('info') as info:
|
||||||
# for articles in indexed.values():
|
indexed = sort_by_index_spec(info, g.lexicon.cfg.article.index.list)
|
||||||
# for i in range(len(articles)):
|
for articles in indexed.values():
|
||||||
# articles[i] = {
|
for i in range(len(articles)):
|
||||||
# 'title': articles[i],
|
articles[i] = {
|
||||||
# **info.get(articles[i])}
|
'title': articles[i],
|
||||||
return render_template("lexicon.contents.jinja", lexicon_name=lexicon_name)
|
**info.get(articles[i])}
|
||||||
|
return render_template('lexicon.contents.jinja', indexed=indexed)
|
||||||
|
|
||||||
|
|
||||||
@bp.get("/article/<title>")
|
@bp_lexicon.route('/article/<title>')
|
||||||
@lexicon_param
|
@lexicon_param
|
||||||
@player_required_if_not_public
|
@player_required_if_not_public
|
||||||
def article(lexicon_name, title):
|
def article(name, title):
|
||||||
# article = {**a, 'html': Markup(a['html'])}
|
with g.lexicon.ctx.article.read(title) as a:
|
||||||
return render_template("lexicon.article.jinja", lexicon_name=lexicon_name)
|
article = {**a, 'html': Markup(a['html'])}
|
||||||
|
return render_template('lexicon.article.jinja', article=article)
|
||||||
|
|
||||||
|
|
||||||
@bp.get("/rules/")
|
@bp_lexicon.route('/rules/', methods=['GET'])
|
||||||
@lexicon_param
|
@lexicon_param
|
||||||
@player_required_if_not_public
|
@player_required_if_not_public
|
||||||
def rules(lexicon_name):
|
def rules(name):
|
||||||
return render_template("lexicon.rules.jinja", lexicon_name=lexicon_name)
|
return render_template('lexicon.rules.jinja')
|
||||||
|
|
||||||
|
|
||||||
@bp.get("/statistics/")
|
@bp_lexicon.route('/statistics/', methods=['GET'])
|
||||||
@lexicon_param
|
@lexicon_param
|
||||||
@player_required_if_not_public
|
@player_required_if_not_public
|
||||||
def stats(lexicon_name):
|
def stats(name):
|
||||||
return render_template("lexicon.statistics.jinja", lexicon_name=lexicon_name)
|
return render_template('lexicon.statistics.jinja')
|
||||||
|
|
|
@ -1,83 +0,0 @@
|
||||||
from typing import Optional
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
from flask import Blueprint, render_template, url_for, g, flash
|
|
||||||
from flask_login import current_user
|
|
||||||
from werkzeug.utils import redirect
|
|
||||||
|
|
||||||
from amanuensis.backend import charq
|
|
||||||
from amanuensis.db import Character
|
|
||||||
from amanuensis.server.helpers import lexicon_param, player_required
|
|
||||||
|
|
||||||
from .forms import CharacterCreateForm
|
|
||||||
|
|
||||||
|
|
||||||
bp = Blueprint("characters", __name__, url_prefix="/characters", template_folder=".")
|
|
||||||
|
|
||||||
|
|
||||||
@bp.get("/")
|
|
||||||
@lexicon_param
|
|
||||||
@player_required
|
|
||||||
def list(lexicon_name):
|
|
||||||
return render_template("characters.jinja", lexicon_name=lexicon_name)
|
|
||||||
|
|
||||||
|
|
||||||
@bp.route("/edit/<uuid:character_id>", methods=["GET", "POST"])
|
|
||||||
@lexicon_param
|
|
||||||
@player_required
|
|
||||||
def edit(lexicon_name, character_id: uuid.UUID):
|
|
||||||
character: Optional[Character] = charq.try_from_public_id(g.db, character_id)
|
|
||||||
if not character:
|
|
||||||
flash("Character not found")
|
|
||||||
return redirect(url_for("lexicon.characters.list", lexicon_name=lexicon_name))
|
|
||||||
|
|
||||||
form = CharacterCreateForm()
|
|
||||||
|
|
||||||
if not form.is_submitted():
|
|
||||||
# GET
|
|
||||||
form.name.data = character.name
|
|
||||||
form.signature.data = character.signature
|
|
||||||
return render_template(
|
|
||||||
"characters.edit.jinja",
|
|
||||||
lexicon_name=lexicon_name,
|
|
||||||
character=character,
|
|
||||||
form=form,
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# POST
|
|
||||||
if form.validate():
|
|
||||||
# Data is valid
|
|
||||||
character.name = form.name.data
|
|
||||||
character.signature = form.signature.data
|
|
||||||
g.db.session.commit() # TODO refactor into backend
|
|
||||||
return redirect(
|
|
||||||
url_for("lexicon.characters.list", lexicon_name=lexicon_name)
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# POST submitted invalid data
|
|
||||||
return render_template(
|
|
||||||
"characters.edit.jinja",
|
|
||||||
lexicon_name=lexicon_name,
|
|
||||||
character=character,
|
|
||||||
form=form,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@bp.get("/new/")
|
|
||||||
@lexicon_param
|
|
||||||
@player_required
|
|
||||||
def new(lexicon_name):
|
|
||||||
dummy_name = f"{current_user.username}'s new character"
|
|
||||||
dummy_signature = "~"
|
|
||||||
char = charq.create(
|
|
||||||
g.db, g.lexicon.id, current_user.id, dummy_name, dummy_signature
|
|
||||||
)
|
|
||||||
return redirect(
|
|
||||||
url_for(
|
|
||||||
"lexicon.characters.edit",
|
|
||||||
lexicon_name=lexicon_name,
|
|
||||||
character_id=char.public_id,
|
|
||||||
)
|
|
||||||
)
|
|
|
@ -1,24 +0,0 @@
|
||||||
{% extends "lexicon.jinja" %}
|
|
||||||
{% block title %}Edit {{ character.name }} | {{ lexicon_title }}{% endblock %}
|
|
||||||
|
|
||||||
{% block main %}
|
|
||||||
<section>
|
|
||||||
<form action="" method="post" novalidate>
|
|
||||||
{{ form.hidden_tag() }}
|
|
||||||
<p>
|
|
||||||
{{ form.name.label }}<br>{{ form.name(size=32) }}
|
|
||||||
</p>
|
|
||||||
{% for error in form.name.errors %}
|
|
||||||
<span style="color: #ff0000">{{ error }}</span><br>
|
|
||||||
{% endfor %}</p>
|
|
||||||
<p>
|
|
||||||
{{ form.signature.label }}<br>{{ form.signature(class_='fullwidth') }}
|
|
||||||
</p>
|
|
||||||
<p>{{ form.submit() }}</p>
|
|
||||||
</form>
|
|
||||||
|
|
||||||
{% for message in get_flashed_messages() %}
|
|
||||||
<span style="color:#ff0000">{{ message }}</span><br>
|
|
||||||
{% endfor %}
|
|
||||||
</section>
|
|
||||||
{% endblock %}
|
|
|
@ -1,35 +0,0 @@
|
||||||
{% extends "lexicon.jinja" %}
|
|
||||||
{% set current_page = "characters" %}
|
|
||||||
{% block title %}Character | {{ lexicon_title }}{% endblock %}
|
|
||||||
|
|
||||||
{% block main %}
|
|
||||||
<section>
|
|
||||||
<h1>Characters</h1>
|
|
||||||
{% set players = memq.get_players_in_lexicon(db, g.lexicon.id)|list %}
|
|
||||||
{% set characters = charq.get_in_lexicon(db, g.lexicon.id)|list %}
|
|
||||||
<p>This lexicon has <b>{{ players|count }}</b> player{% if players|count > 1 %}s{% endif %} and <b>{{ characters|count }}</b> character{% if characters|count > 1 %}s{% endif %}.</p>
|
|
||||||
{% for message in get_flashed_messages() %}
|
|
||||||
<span style="color:#ff0000">{{ message }}</span><br>
|
|
||||||
{% endfor %}
|
|
||||||
<ul class="blockitem-list">
|
|
||||||
{% if characters|map(attribute="user_id")|select("equalto", current_user.id)|list|count < g.lexicon.character_limit %}
|
|
||||||
<li>
|
|
||||||
<h3><a href="{{ url_for('lexicon.characters.new', lexicon_name=lexicon_name) }}">Create a new character</a></h3>
|
|
||||||
<p>You have created {{ characters|map(attribute="user_id")|select("equalto", current_user.id)|list|count }} out of {{ g.lexicon.character_limit }} allowed characters.</p>
|
|
||||||
</li>
|
|
||||||
{% endif %}
|
|
||||||
{% for character in characters %}
|
|
||||||
<li>
|
|
||||||
<h3>{{ character.name }}</h3>
|
|
||||||
{% if character.user == current_user %}
|
|
||||||
<pre>{{ character.signature }}</pre>
|
|
||||||
{% endif %}
|
|
||||||
<p>Player: {{ character.user.username }}</p>
|
|
||||||
{% if character.user == current_user %}
|
|
||||||
<p><a href="{{ url_for('lexicon.characters.edit', lexicon_name=lexicon_name, character_id=character.public_id) }}">Edit this character</a></p>
|
|
||||||
{% endif %}
|
|
||||||
</li>
|
|
||||||
{% endfor %}
|
|
||||||
</ul>
|
|
||||||
</section>
|
|
||||||
{% endblock %}
|
|
|
@ -1,11 +0,0 @@
|
||||||
from flask_wtf import FlaskForm
|
|
||||||
from wtforms import StringField, SubmitField, TextAreaField
|
|
||||||
from wtforms.validators import DataRequired
|
|
||||||
|
|
||||||
|
|
||||||
class CharacterCreateForm(FlaskForm):
|
|
||||||
"""/lexicon/<name>/characters/edit/<character_id>"""
|
|
||||||
|
|
||||||
name = StringField("Character name", validators=[DataRequired()])
|
|
||||||
signature = TextAreaField("Signature")
|
|
||||||
submit = SubmitField("Submit")
|
|
|
@ -3,7 +3,6 @@ from wtforms import StringField, SubmitField
|
||||||
|
|
||||||
|
|
||||||
class LexiconJoinForm(FlaskForm):
|
class LexiconJoinForm(FlaskForm):
|
||||||
"""/lexicon/<name>/join/"""
|
"""/lexicon/<name>/join/"""
|
||||||
|
password = StringField('Password')
|
||||||
password = StringField("Password")
|
submit = SubmitField('Submit')
|
||||||
submit = SubmitField("Submit")
|
|
||||||
|
|
|
@ -2,24 +2,27 @@
|
||||||
{% block title %}{{ article.title }} | {{ lexicon_title }}{% endblock %}
|
{% block title %}{{ article.title }} | {{ lexicon_title }}{% endblock %}
|
||||||
|
|
||||||
{% block main %}
|
{% block main %}
|
||||||
<section>
|
|
||||||
{% for message in get_flashed_messages() %}
|
{% for message in get_flashed_messages() %}
|
||||||
<span style="color:#ff0000">{{ message }}</span><br>
|
<span style="color:#ff0000">{{ message }}</span><br>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|
||||||
<h1>{{ article.title }}</h1>
|
<h1>{{ article.title }}</h1>
|
||||||
{{ article.html }}
|
{{ article.html }}
|
||||||
</section>
|
|
||||||
<section>
|
|
||||||
<p>
|
|
||||||
{% for citation in article.cites %}
|
|
||||||
<a href="{{ citation|articlelink }}">{{ citation }}</a>{% if not loop.last %} / {% endif %}
|
|
||||||
{% endfor %}
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
{% for citation in article.citedby %}
|
|
||||||
<a href="{{ citation|articlelink }}">{{ citation }}</a>{% if not loop.last %} / {% endif %}
|
|
||||||
{% endfor %}
|
|
||||||
</p>
|
|
||||||
</section>
|
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block citations %}
|
||||||
|
<p>
|
||||||
|
{% for citation in article.cites %}
|
||||||
|
<a href="{{ citation|articlelink }}">{{ citation }}</a>{% if not loop.last %} / {% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
{% for citation in article.citedby %}
|
||||||
|
<a href="{{ citation|articlelink }}">{{ citation }}</a>{% if not loop.last %} / {% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
</p>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% set template_content_blocks = [self.main(), self.citations()] %}
|
|
@ -3,7 +3,7 @@
|
||||||
{% block title %}Index | {{ lexicon_title }}{% endblock %}
|
{% block title %}Index | {{ lexicon_title }}{% endblock %}
|
||||||
|
|
||||||
{% block main %}
|
{% block main %}
|
||||||
<section>
|
|
||||||
{% for message in get_flashed_messages() %}
|
{% for message in get_flashed_messages() %}
|
||||||
<span style="color:#ff0000">{{ message }}</span><br>
|
<span style="color:#ff0000">{{ message }}</span><br>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
@ -14,11 +14,12 @@
|
||||||
<ul>
|
<ul>
|
||||||
{% for article in indexed[index] %}
|
{% for article in indexed[index] %}
|
||||||
<li><a href="{{ article.title|articlelink }}" class="{{ 'phantom' if not article.character else '' }}">
|
<li><a href="{{ article.title|articlelink }}" class="{{ 'phantom' if not article.character else '' }}">
|
||||||
{{ article.title }}
|
{{ article.title }}
|
||||||
</a></li>
|
</a></li>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</ul>
|
</ul>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</section>
|
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
{% set template_content_blocks = [self.main()] %}
|
|
@ -1,20 +1,19 @@
|
||||||
{% extends "lexicon.jinja" %}
|
{% extends "lexicon.jinja" %}
|
||||||
{% block title %}Join | {{ g.lexicon.full_title }}{% endblock %}
|
{% block title %}Join | {{ lexicon_title }}{% endblock %}
|
||||||
|
|
||||||
{% block main %}
|
{% block main %}
|
||||||
<section>
|
|
||||||
<form id="lexicon-join" action="" method="post" novalidate>
|
<form id="lexicon-join" action="" method="post" novalidate>
|
||||||
{{ form.hidden_tag() }}
|
{{ form.hidden_tag() }}
|
||||||
{% if g.lexicon.join_password %}
|
{% if g.lexicon.cfg.join.password %}
|
||||||
<p>{{ form.password.label }}<br>{{ form.password(size=32) }}</p>
|
<p>{{ form.password.label }}<br>{{ form.password(size=32) }}</p>
|
||||||
{% else %}
|
{% endif %}
|
||||||
<p>Join {{ g.lexicon.full_title }}?</p>
|
<p>{{ form.submit() }}</p>
|
||||||
{% endif %}
|
|
||||||
<p>{{ form.submit() }}</p>
|
|
||||||
</form>
|
</form>
|
||||||
|
|
||||||
{% for message in get_flashed_messages() %}
|
{% for message in get_flashed_messages() %}
|
||||||
<span style="color:#ff0000">{{ message }}</span><br>
|
<span style="color:#ff0000">{{ message }}</span><br>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</section>
|
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
{% set template_content_blocks = [self.main()] %}
|
|
@ -3,7 +3,8 @@
|
||||||
{% block title %}Rules | {{ lexicon_title }}{% endblock %}
|
{% block title %}Rules | {{ lexicon_title }}{% endblock %}
|
||||||
|
|
||||||
{% block main %}
|
{% block main %}
|
||||||
<section>
|
|
||||||
Placeholder text
|
Placeholder text
|
||||||
</section>
|
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
{% set template_content_blocks = [self.main()] %}
|
|
@ -3,7 +3,8 @@
|
||||||
{% block title %}Session | {{ lexicon_title }}{% endblock %}
|
{% block title %}Session | {{ lexicon_title }}{% endblock %}
|
||||||
|
|
||||||
{% block main %}
|
{% block main %}
|
||||||
<section>
|
|
||||||
Placeholder text
|
Placeholder text
|
||||||
</section>
|
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
{% set template_content_blocks = [self.main()] %}
|
|
@ -1,95 +0,0 @@
|
||||||
from flask import Blueprint, render_template, g
|
|
||||||
from flask.helpers import url_for
|
|
||||||
from flask_login import current_user
|
|
||||||
from werkzeug.utils import redirect
|
|
||||||
|
|
||||||
from amanuensis.backend import postq
|
|
||||||
from amanuensis.db import Post
|
|
||||||
from amanuensis.parser import RenderableVisitor, parse_raw_markdown
|
|
||||||
from amanuensis.parser.core import *
|
|
||||||
from amanuensis.server.helpers import (
|
|
||||||
lexicon_param,
|
|
||||||
player_required,
|
|
||||||
current_lexicon,
|
|
||||||
current_membership,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .forms import CreatePostForm
|
|
||||||
|
|
||||||
|
|
||||||
bp = Blueprint("posts", __name__, url_prefix="/posts", template_folder=".")
|
|
||||||
|
|
||||||
|
|
||||||
class PostFormatter(RenderableVisitor):
|
|
||||||
"""Parses stylistic markdown into HTML without links."""
|
|
||||||
|
|
||||||
def TextSpan(self, span: TextSpan):
|
|
||||||
return span.innertext
|
|
||||||
|
|
||||||
def LineBreak(self, span: LineBreak):
|
|
||||||
return "<br>"
|
|
||||||
|
|
||||||
def ParsedArticle(self, span: ParsedArticle):
|
|
||||||
return "\n".join(span.recurse(self))
|
|
||||||
|
|
||||||
def BodyParagraph(self, span: BodyParagraph):
|
|
||||||
return f'<p>{"".join(span.recurse(self))}</p>'
|
|
||||||
|
|
||||||
def SignatureParagraph(self, span: SignatureParagraph):
|
|
||||||
return (
|
|
||||||
'<hr><span class="signature"><p>'
|
|
||||||
f'{"".join(span.recurse(self))}'
|
|
||||||
"</p></span>"
|
|
||||||
)
|
|
||||||
|
|
||||||
def BoldSpan(self, span: BoldSpan):
|
|
||||||
return f'<b>{"".join(span.recurse(self))}</b>'
|
|
||||||
|
|
||||||
def ItalicSpan(self, span: ItalicSpan):
|
|
||||||
return f'<i>{"".join(span.recurse(self))}</i>'
|
|
||||||
|
|
||||||
def CitationSpan(self, span: CitationSpan):
|
|
||||||
return "".join(span.recurse(self))
|
|
||||||
|
|
||||||
|
|
||||||
def render_post_body(post: Post) -> str:
|
|
||||||
"""Parse and render the body of a post into post-safe HTML."""
|
|
||||||
renderable: ParsedArticle = parse_raw_markdown(post.body)
|
|
||||||
rendered: str = renderable.render(PostFormatter())
|
|
||||||
return rendered
|
|
||||||
|
|
||||||
|
|
||||||
@bp.get("/")
|
|
||||||
@lexicon_param
|
|
||||||
@player_required
|
|
||||||
def list(lexicon_name):
|
|
||||||
form = CreatePostForm()
|
|
||||||
new_posts, old_posts = postq.get_posts_for_membership(g.db, current_membership.id)
|
|
||||||
return render_template(
|
|
||||||
"posts.jinja",
|
|
||||||
lexicon_name=lexicon_name,
|
|
||||||
form=form,
|
|
||||||
render_post_body=render_post_body,
|
|
||||||
new_posts=new_posts,
|
|
||||||
old_posts=old_posts,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@bp.post("/")
|
|
||||||
@lexicon_param
|
|
||||||
@player_required
|
|
||||||
def create(lexicon_name):
|
|
||||||
form = CreatePostForm()
|
|
||||||
if form.validate():
|
|
||||||
# Data is valid
|
|
||||||
postq.create(g.db, current_lexicon.id, current_user.id, form.body.data)
|
|
||||||
return redirect(url_for("lexicon.posts.list", lexicon_name=lexicon_name))
|
|
||||||
|
|
||||||
else:
|
|
||||||
# POST received invalid data
|
|
||||||
return render_template(
|
|
||||||
"posts.jinja",
|
|
||||||
lexicon_name=lexicon_name,
|
|
||||||
form=form,
|
|
||||||
render_post_body=render_post_body,
|
|
||||||
)
|
|
|
@ -1,10 +0,0 @@
|
||||||
from flask_wtf import FlaskForm
|
|
||||||
from wtforms import SubmitField, TextAreaField
|
|
||||||
from wtforms.validators import DataRequired
|
|
||||||
|
|
||||||
|
|
||||||
class CreatePostForm(FlaskForm):
|
|
||||||
"""/lexicon/<name>/posts/"""
|
|
||||||
|
|
||||||
body = TextAreaField(validators=[DataRequired()])
|
|
||||||
submit = SubmitField("Post")
|
|
|
@ -1,28 +0,0 @@
|
||||||
{% extends "lexicon.jinja" %}
|
|
||||||
{% set current_page = "posts" %}
|
|
||||||
{% block title %}Character | {{ lexicon_title }}{% endblock %}
|
|
||||||
|
|
||||||
{% macro make_post(post, is_new) %}
|
|
||||||
<section{% if is_new %} class="new-post"{% endif %}>
|
|
||||||
<p>{{ render_post_body(post) }}</p>
|
|
||||||
<p class="post-byline">Posted {% if post.user_id %}by {{ post.user.display_name }} {% endif %}at {{ post.created }}</p>
|
|
||||||
</section>
|
|
||||||
{% endmacro %}
|
|
||||||
|
|
||||||
{% block main %}
|
|
||||||
{% if current_lexicon.allow_post %}
|
|
||||||
<section>
|
|
||||||
<form action="" method="post" novalidate>
|
|
||||||
{{ form.hidden_tag() }}
|
|
||||||
<p>{{ form.body(class_='fullwidth') }}</p>
|
|
||||||
<p>{{ form.submit() }}</p>
|
|
||||||
</form>
|
|
||||||
</section>
|
|
||||||
{% endif %}
|
|
||||||
{% for post in new_posts %}
|
|
||||||
{{ make_post(post, True) }}
|
|
||||||
{% endfor %}
|
|
||||||
{% for post in old_posts %}
|
|
||||||
{{ make_post(post, False) }}
|
|
||||||
{% endfor %}
|
|
||||||
{% endblock %}
|
|
|
@ -1,292 +0,0 @@
|
||||||
from typing import Sequence
|
|
||||||
|
|
||||||
from flask import Blueprint, render_template, url_for, g, flash, redirect
|
|
||||||
|
|
||||||
from amanuensis.backend import *
|
|
||||||
from amanuensis.db import *
|
|
||||||
from amanuensis.server.helpers import (
|
|
||||||
editor_required,
|
|
||||||
lexicon_param,
|
|
||||||
player_required,
|
|
||||||
current_membership,
|
|
||||||
current_lexicon,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .forms import (
|
|
||||||
PlayerSettingsForm,
|
|
||||||
SetupSettingsForm,
|
|
||||||
IndexSchemaForm,
|
|
||||||
IndexAssignmentsForm,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
bp = Blueprint("settings", __name__, url_prefix="/settings", template_folder=".")
|
|
||||||
|
|
||||||
|
|
||||||
@bp.get("/")
|
|
||||||
@lexicon_param
|
|
||||||
@player_required
|
|
||||||
def page(lexicon_name):
|
|
||||||
return redirect(url_for("lexicon.settings.player", lexicon_name=lexicon_name))
|
|
||||||
|
|
||||||
|
|
||||||
@bp.route("/player/", methods=["GET", "POST"])
|
|
||||||
@lexicon_param
|
|
||||||
@player_required
|
|
||||||
def player(lexicon_name):
|
|
||||||
form = PlayerSettingsForm()
|
|
||||||
mem: Membership = current_membership
|
|
||||||
|
|
||||||
if not form.is_submitted():
|
|
||||||
# GET
|
|
||||||
form.notify_ready.data = mem.notify_ready
|
|
||||||
form.notify_reject.data = mem.notify_reject
|
|
||||||
form.notify_approve.data = mem.notify_approve
|
|
||||||
return render_template(
|
|
||||||
"settings.jinja",
|
|
||||||
lexicon_name=lexicon_name,
|
|
||||||
page_name=player.__name__,
|
|
||||||
form=form,
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# POST
|
|
||||||
if form.validate():
|
|
||||||
# Data is valid
|
|
||||||
mem.notify_ready = form.notify_ready.data
|
|
||||||
mem.notify_reject = form.notify_reject.data
|
|
||||||
mem.notify_approve = form.notify_approve.data
|
|
||||||
g.db.session.commit() # TODO refactor into backend
|
|
||||||
flash("Settings saved")
|
|
||||||
return redirect(
|
|
||||||
url_for("lexicon.settings.player", lexicon_name=lexicon_name)
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# Invalid POST data
|
|
||||||
return render_template(
|
|
||||||
"settings.jinja",
|
|
||||||
lexicon_name=lexicon_name,
|
|
||||||
page_name=player.__name__,
|
|
||||||
form=form,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@bp.route("/setup/", methods=["GET", "POST"])
|
|
||||||
@lexicon_param
|
|
||||||
@editor_required
|
|
||||||
def setup(lexicon_name):
|
|
||||||
form = SetupSettingsForm()
|
|
||||||
lexicon: Lexicon = current_lexicon
|
|
||||||
|
|
||||||
if not form.is_submitted():
|
|
||||||
# GET
|
|
||||||
form.title.data = lexicon.title
|
|
||||||
form.prompt.data = lexicon.prompt
|
|
||||||
form.public.data = lexicon.public
|
|
||||||
form.joinable.data = lexicon.joinable
|
|
||||||
form.has_password.data = lexicon.join_password is not None
|
|
||||||
form.turn_count.data = lexicon.turn_count
|
|
||||||
form.player_limit.data = lexicon.player_limit
|
|
||||||
form.character_limit.data = lexicon.character_limit
|
|
||||||
form.allow_post.data = lexicon.allow_post
|
|
||||||
return render_template(
|
|
||||||
"settings.jinja",
|
|
||||||
lexicon_name=lexicon_name,
|
|
||||||
page_name=setup.__name__,
|
|
||||||
form=form,
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# POST
|
|
||||||
if form.validate():
|
|
||||||
# Data is valid
|
|
||||||
lexicon.title = form.title.data
|
|
||||||
lexicon.prompt = form.prompt.data
|
|
||||||
lexicon.public = form.public.data
|
|
||||||
lexicon.joinable = form.joinable.data
|
|
||||||
new_password = form.password.data if form.has_password.data else None
|
|
||||||
lexiq.password_set(g.db, lexicon.id, new_password)
|
|
||||||
lexicon.turn_count = form.turn_count.data
|
|
||||||
lexicon.player_limit = form.player_limit.data
|
|
||||||
lexicon.character_limit = form.character_limit.data
|
|
||||||
lexicon.allow_post = form.allow_post.data
|
|
||||||
g.db.session.commit() # TODO refactor into backend
|
|
||||||
flash("Settings saved")
|
|
||||||
return redirect(
|
|
||||||
url_for("lexicon.settings.setup", lexicon_name=lexicon_name)
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# Invalid POST data
|
|
||||||
return render_template(
|
|
||||||
"settings.jinja",
|
|
||||||
lexicon_name=lexicon_name,
|
|
||||||
page_name=setup.__name__,
|
|
||||||
form=form,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@bp.get("/index/")
|
|
||||||
@lexicon_param
|
|
||||||
@editor_required
|
|
||||||
def index(lexicon_name):
|
|
||||||
# Get the current indices
|
|
||||||
indices: Sequence[ArticleIndex] = indq.get_for_lexicon(g.db, current_lexicon.id)
|
|
||||||
index_data = [
|
|
||||||
{
|
|
||||||
"index_type": str(index.index_type),
|
|
||||||
"pattern": index.pattern,
|
|
||||||
"logical_order": index.logical_order,
|
|
||||||
"display_order": index.display_order,
|
|
||||||
"capacity": index.capacity,
|
|
||||||
}
|
|
||||||
for index in indices
|
|
||||||
]
|
|
||||||
# Add a blank index to allow for adding rules
|
|
||||||
index_data.append(
|
|
||||||
{
|
|
||||||
"index_type": "",
|
|
||||||
"pattern": None,
|
|
||||||
"logical_order": None,
|
|
||||||
"display_order": None,
|
|
||||||
"capacity": None,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
form = IndexSchemaForm(indices=index_data)
|
|
||||||
return render_template(
|
|
||||||
"settings.jinja", lexicon_name=lexicon_name, page_name=index.__name__, form=form
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@bp.post("/index/")
|
|
||||||
@lexicon_param
|
|
||||||
@editor_required
|
|
||||||
def index_post(lexicon_name):
|
|
||||||
# Initialize the form
|
|
||||||
form = IndexSchemaForm()
|
|
||||||
if form.validate():
|
|
||||||
# Valid data, strip out all indices with the blank type
|
|
||||||
indices = [
|
|
||||||
ArticleIndex(
|
|
||||||
lexicon_id=current_lexicon.id,
|
|
||||||
index_type=index_def.index_type.data,
|
|
||||||
pattern=index_def.pattern.data,
|
|
||||||
logical_order=index_def.logical_order.data,
|
|
||||||
display_order=index_def.display_order.data,
|
|
||||||
capacity=index_def.capacity.data,
|
|
||||||
)
|
|
||||||
for index_def in form.indices.entries
|
|
||||||
if index_def.index_type.data
|
|
||||||
]
|
|
||||||
indq.update(g.db, current_lexicon.id, indices)
|
|
||||||
return redirect(url_for("lexicon.settings.index", lexicon_name=lexicon_name))
|
|
||||||
else:
|
|
||||||
# Invalid data
|
|
||||||
return render_template(
|
|
||||||
"settings.jinja",
|
|
||||||
lexicon_name=lexicon_name,
|
|
||||||
page_name=index.__name__,
|
|
||||||
form=form,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@bp.get("/assign/")
|
|
||||||
@lexicon_param
|
|
||||||
@editor_required
|
|
||||||
def assign(lexicon_name):
|
|
||||||
# Get the current assignments
|
|
||||||
rules: Sequence[ArticleIndexRule] = list(
|
|
||||||
irq.get_for_lexicon(g.db, current_lexicon.id)
|
|
||||||
)
|
|
||||||
rule_data = [
|
|
||||||
{
|
|
||||||
"turn": rule.turn,
|
|
||||||
"index": rule.index.name,
|
|
||||||
"character": str(rule.character.public_id),
|
|
||||||
}
|
|
||||||
for rule in rules
|
|
||||||
]
|
|
||||||
# Add a blank rule to allow for adding rules
|
|
||||||
rule_data.append(
|
|
||||||
{
|
|
||||||
"turn": 0,
|
|
||||||
"index": "",
|
|
||||||
"character": "",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
form = IndexAssignmentsForm(rules=rule_data)
|
|
||||||
form.populate(current_lexicon)
|
|
||||||
return render_template(
|
|
||||||
"settings.jinja",
|
|
||||||
lexicon_name=lexicon_name,
|
|
||||||
page_name=assign.__name__,
|
|
||||||
form=form,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@bp.post("/assign/")
|
|
||||||
@lexicon_param
|
|
||||||
@editor_required
|
|
||||||
def assign_post(lexicon_name):
|
|
||||||
# Initialize the form
|
|
||||||
form = IndexAssignmentsForm()
|
|
||||||
form.populate(current_lexicon)
|
|
||||||
if form.validate():
|
|
||||||
# Valid data
|
|
||||||
indices = list(current_lexicon.indices)
|
|
||||||
characters = list(current_lexicon.characters)
|
|
||||||
rules = []
|
|
||||||
for rule_def in form.rules.entries:
|
|
||||||
# Strip out all assignments with no character
|
|
||||||
if not rule_def.character.data:
|
|
||||||
continue
|
|
||||||
# Look up the necessary ids from the public representations
|
|
||||||
character = [
|
|
||||||
c for c in characters if c.public_id == rule_def.character.data
|
|
||||||
]
|
|
||||||
if not character:
|
|
||||||
return redirect(
|
|
||||||
url_for("lexicon.settings.assign", lexicon_name=lexicon_name)
|
|
||||||
)
|
|
||||||
index = [i for i in indices if i.name == rule_def.index.data]
|
|
||||||
if not index:
|
|
||||||
return redirect(
|
|
||||||
url_for("lexicon.settings.assign", lexicon_name=lexicon_name)
|
|
||||||
)
|
|
||||||
rules.append(
|
|
||||||
ArticleIndexRule(
|
|
||||||
lexicon_id=current_lexicon.id,
|
|
||||||
character_id=character[0].id,
|
|
||||||
index_id=index[0].id,
|
|
||||||
turn=rule_def.turn.data,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
irq.update(g.db, current_lexicon.id, rules)
|
|
||||||
return redirect(url_for("lexicon.settings.assign", lexicon_name=lexicon_name))
|
|
||||||
else:
|
|
||||||
# Invalid data
|
|
||||||
return render_template(
|
|
||||||
"settings.jinja",
|
|
||||||
lexicon_name=lexicon_name,
|
|
||||||
page_name=assign.__name__,
|
|
||||||
form=form,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@bp.get("/publish/")
|
|
||||||
@lexicon_param
|
|
||||||
@editor_required
|
|
||||||
def publish(lexicon_name):
|
|
||||||
return render_template(
|
|
||||||
"settings.jinja", lexicon_name=lexicon_name, page_name=publish.__name__
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@bp.get("/article/")
|
|
||||||
@lexicon_param
|
|
||||||
@editor_required
|
|
||||||
def article(lexicon_name):
|
|
||||||
return render_template(
|
|
||||||
"settings.jinja", lexicon_name=lexicon_name, page_name=article.__name__
|
|
||||||
)
|
|
|
@ -1,129 +0,0 @@
|
||||||
import uuid
|
|
||||||
|
|
||||||
from flask_wtf import FlaskForm
|
|
||||||
from wtforms import (
|
|
||||||
BooleanField,
|
|
||||||
FieldList,
|
|
||||||
FormField,
|
|
||||||
IntegerField,
|
|
||||||
PasswordField,
|
|
||||||
SelectField,
|
|
||||||
StringField,
|
|
||||||
SubmitField,
|
|
||||||
TextAreaField,
|
|
||||||
)
|
|
||||||
from wtforms.validators import Optional, DataRequired, ValidationError
|
|
||||||
from wtforms.widgets.html5 import NumberInput
|
|
||||||
|
|
||||||
from amanuensis.db import IndexType, Lexicon
|
|
||||||
|
|
||||||
|
|
||||||
class PlayerSettingsForm(FlaskForm):
|
|
||||||
"""/lexicon/<name>/settings/player/"""
|
|
||||||
|
|
||||||
notify_ready = BooleanField("Notify me when an article is submitted for review")
|
|
||||||
notify_reject = BooleanField("Notify me when an editor rejects one of my articles")
|
|
||||||
notify_approve = BooleanField(
|
|
||||||
"Notify me when an editor approves one of my articles"
|
|
||||||
)
|
|
||||||
submit = SubmitField("Submit")
|
|
||||||
|
|
||||||
|
|
||||||
class SetupSettingsForm(FlaskForm):
|
|
||||||
"""/lexicon/<name>/settings/setup/"""
|
|
||||||
|
|
||||||
title = StringField("Title override")
|
|
||||||
prompt = TextAreaField("Prompt", validators=[DataRequired()])
|
|
||||||
public = BooleanField("Make game publicly visible")
|
|
||||||
joinable = BooleanField("Allow players to join game")
|
|
||||||
has_password = BooleanField("Require password to join the game")
|
|
||||||
password = PasswordField("Game password")
|
|
||||||
turn_count = IntegerField(
|
|
||||||
"Number of turns", widget=NumberInput(), validators=[DataRequired()]
|
|
||||||
)
|
|
||||||
player_limit = IntegerField(
|
|
||||||
"Maximum number of players", widget=NumberInput(), validators=[Optional()]
|
|
||||||
)
|
|
||||||
character_limit = IntegerField(
|
|
||||||
"Maximum number of characters per player",
|
|
||||||
widget=NumberInput(),
|
|
||||||
validators=[Optional()],
|
|
||||||
)
|
|
||||||
allow_post = BooleanField("Allow players to make posts")
|
|
||||||
submit = SubmitField("Submit")
|
|
||||||
|
|
||||||
|
|
||||||
def parse_index_type(type_str):
|
|
||||||
if not type_str:
|
|
||||||
return None
|
|
||||||
return getattr(IndexType, type_str)
|
|
||||||
|
|
||||||
|
|
||||||
class IndexDefinitionForm(FlaskForm):
|
|
||||||
"""/lexicon/<name>/settings/index/"""
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
# Disable CSRF on the individual index definitions, since the schema
|
|
||||||
# form will have one
|
|
||||||
csrf = False
|
|
||||||
|
|
||||||
TYPE_CHOICES = [("", "")] + [(str(t), str(t).lower()) for t in IndexType]
|
|
||||||
|
|
||||||
index_type = SelectField(choices=TYPE_CHOICES, coerce=parse_index_type)
|
|
||||||
pattern = StringField()
|
|
||||||
logical_order = IntegerField(
|
|
||||||
widget=NumberInput(min=-99, max=99), validators=[Optional()]
|
|
||||||
)
|
|
||||||
display_order = IntegerField(
|
|
||||||
widget=NumberInput(min=-99, max=99), validators=[Optional()]
|
|
||||||
)
|
|
||||||
capacity = IntegerField(widget=NumberInput(min=0, max=99), validators=[Optional()])
|
|
||||||
|
|
||||||
def validate_pattern(form, field):
|
|
||||||
if form.index_type.data and not field.data:
|
|
||||||
raise ValidationError("Pattern must be defined")
|
|
||||||
|
|
||||||
|
|
||||||
class IndexSchemaForm(FlaskForm):
|
|
||||||
"""/lexicon/<name>/settings/index/"""
|
|
||||||
|
|
||||||
indices = FieldList(FormField(IndexDefinitionForm))
|
|
||||||
submit = SubmitField("Submit")
|
|
||||||
|
|
||||||
|
|
||||||
def parse_uuid(uuid_str):
|
|
||||||
if not uuid_str:
|
|
||||||
return None
|
|
||||||
return uuid.UUID(uuid_str)
|
|
||||||
|
|
||||||
|
|
||||||
class AssignmentDefinitionForm(FlaskForm):
|
|
||||||
"""/lexicon/<name>/settings/assign/"""
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
# Disable CSRF on the individual assignment definitions, since the
|
|
||||||
# schema form will have one
|
|
||||||
csrf = False
|
|
||||||
|
|
||||||
turn = IntegerField(widget=NumberInput(min=0, max=99))
|
|
||||||
index = SelectField()
|
|
||||||
character = SelectField(coerce=parse_uuid)
|
|
||||||
|
|
||||||
|
|
||||||
class IndexAssignmentsForm(FlaskForm):
|
|
||||||
"""/lexicon/<name>/settings/assign/"""
|
|
||||||
|
|
||||||
rules = FieldList(FormField(AssignmentDefinitionForm))
|
|
||||||
submit = SubmitField("Submit")
|
|
||||||
|
|
||||||
def populate(self, lexicon: Lexicon):
|
|
||||||
"""Populate the select fields with indices and characters"""
|
|
||||||
index_choices = []
|
|
||||||
for i in lexicon.indices:
|
|
||||||
index_choices.append((i.name, i.pattern))
|
|
||||||
char_choices = [("", "")]
|
|
||||||
for c in lexicon.characters:
|
|
||||||
char_choices.append((str(c.public_id), c.name))
|
|
||||||
for rule in self.rules:
|
|
||||||
rule.index.choices = index_choices
|
|
||||||
rule.character.choices = char_choices
|
|
|
@ -1,179 +0,0 @@
|
||||||
{% extends "lexicon.jinja" %}
|
|
||||||
{% set current_page = "settings" %}
|
|
||||||
{% block title %}Edit | {{ lexicon_title }}{% endblock %}
|
|
||||||
|
|
||||||
{% macro settings_page_link(page, text) -%}
|
|
||||||
<a{% if page_name != page %} href="{{ url_for('lexicon.settings.' + page, lexicon_name=lexicon_name) }}"{% endif %}>{{ text }}</a>
|
|
||||||
{%- endmacro %}
|
|
||||||
|
|
||||||
{% macro flag_setting(field) %}
|
|
||||||
{{ field() }}
|
|
||||||
{{ field.label }}<br>
|
|
||||||
{% endmacro %}
|
|
||||||
|
|
||||||
{% macro number_setting(field) %}
|
|
||||||
{{ field(autocomplete="off", class_="smallnumber") }}
|
|
||||||
{{ field.label }}<br>
|
|
||||||
{% for error in field.errors %}
|
|
||||||
<span style="color: #ff0000">{{ error }}</span><br>
|
|
||||||
{% endfor %}
|
|
||||||
{% endmacro %}
|
|
||||||
|
|
||||||
{% block main %}
|
|
||||||
<section>
|
|
||||||
{% if current_membership.is_editor %}
|
|
||||||
<ul class="unordered-tabs">
|
|
||||||
<li>{{ settings_page_link("player", "Player Settings") }}</li>
|
|
||||||
<li>{{ settings_page_link("setup", "Game Setup") }}</li>
|
|
||||||
<li>{{ settings_page_link("index", "Article Indices") }}</li>
|
|
||||||
<li>{{ settings_page_link("assign", "Index Assignments") }}</li>
|
|
||||||
<li>{{ settings_page_link("publish", "Turn Publishing") }}</li>
|
|
||||||
<li>{{ settings_page_link("article", "Article Requirements") }}</li>
|
|
||||||
</ul>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if page_name == "player" %}
|
|
||||||
<h3>Player Settings</h3>
|
|
||||||
<p>These settings are specific to you as a player in this lexicon.</p>
|
|
||||||
<form action="" method="post" novalidate>
|
|
||||||
{{ form.hidden_tag() }}
|
|
||||||
<p>
|
|
||||||
{% if current_membership.is_editor %}{{ flag_setting(form.notify_ready) }}{% endif %}
|
|
||||||
{{ flag_setting(form.notify_reject) }}
|
|
||||||
{{ flag_setting(form.notify_approve) }}
|
|
||||||
</p>
|
|
||||||
<p>{{ form.submit() }}</p>
|
|
||||||
</form>
|
|
||||||
|
|
||||||
{% for message in get_flashed_messages() %}
|
|
||||||
<span style="color:#ff0000">{{ message }}</span><br>
|
|
||||||
{% endfor %}
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if page_name == "setup" %}
|
|
||||||
<h3>Game Setup</h3>
|
|
||||||
<form action="" method="post" novalidate>
|
|
||||||
{{ form.hidden_tag() }}
|
|
||||||
<p>
|
|
||||||
{{ form.title.label }}:<br>
|
|
||||||
{{ form.title(autocomplete="off", placeholder="Lexicon " + lexicon_name, class_="fullwidth") }}<br>
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
{{ form.prompt.label }}: {{ form.prompt(class_="fullwidth") }}
|
|
||||||
{% for error in form.prompt.errors %}
|
|
||||||
<span style="color: #ff0000">{{ error }}</span><br>
|
|
||||||
{% endfor %}
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
{{ flag_setting(form.public) }}
|
|
||||||
{{ flag_setting(form.joinable) }}
|
|
||||||
{{ form.has_password() }}
|
|
||||||
{{ form.has_password.label }}:<br>
|
|
||||||
{{ form.password(autocomplete="off") }}
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
{{ number_setting(form.turn_count) }}
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
{{ number_setting(form.player_limit) }}
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
{{ number_setting(form.character_limit) }}
|
|
||||||
</p>
|
|
||||||
<p>{{ flag_setting(form.allow_post) }}</p>
|
|
||||||
<p>{{ form.submit() }}</p>
|
|
||||||
</form>
|
|
||||||
{% for message in get_flashed_messages() %}
|
|
||||||
<span style="color:#ff0000">{{ message }}</span><br>
|
|
||||||
{% endfor %}
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if page_name == "index" %}
|
|
||||||
<h3>Article Indexes</h3>
|
|
||||||
<details class="setting-help">
|
|
||||||
<summary>Index definition help</summary>
|
|
||||||
<p>An index is a rule that matches the title of a lexicon article based on its <em>index type</em> and <em>pattern</em>. A <em>char</em> index matches a title if the first letter of the title (excluding "A", "An", and "The") is one of the letters in the pattern. A <em>range</em> index has a pattern denoting a range of letters, such as "A-F", and matches a title if the first letter of the title is in the range. A <em>prefix</em> index matches any title that begins with the pattern. An <em>etc</em> index always matches a title.</p>
|
|
||||||
<p>When a title is to be sorted under an index, indices are checked in order, sorted first by descending order of <em>logical priority</em>, and then by alphabetical order of index pattern. The title is sorted under the first index that matches it.</p>
|
|
||||||
<p>On the contents page, indices and the articles under them are displayed sorted instead by <em>display order</em> and then alphabetically by pattern.</p>
|
|
||||||
<p>The <em>capacity</em> of an index is the number of articles that may exist under that index. If an index is at capacity, no new articles may be written or created via phantom citation in that index.</p>
|
|
||||||
<p>To add an index, fill in the type and pattern in the blank row and save your changes. To remove an index, set the type to blank. Note: If you change the type or pattern of an index, all index assignments will be reset. Avoid changing index definitions during gameplay.</p>
|
|
||||||
</details>
|
|
||||||
<form action="" method="post" novalidate>
|
|
||||||
{{ form.hidden_tag() }}
|
|
||||||
<table id="index-definition-table">
|
|
||||||
<tr>
|
|
||||||
<th>Type</th>
|
|
||||||
<th>Pattern</th>
|
|
||||||
<th>Disp Or</th>
|
|
||||||
<th>Log Or</th>
|
|
||||||
<th>Cap</th>
|
|
||||||
</tr>
|
|
||||||
{% for index_form in form.indices %}
|
|
||||||
<tr>
|
|
||||||
<td>{{ index_form.index_type() }}</td>
|
|
||||||
<td>{{ index_form.pattern() }}</td>
|
|
||||||
<td>{{ index_form.logical_order() }}</td>
|
|
||||||
<td>{{ index_form.display_order() }}</td>
|
|
||||||
<td>{{ index_form.capacity() }}</td>
|
|
||||||
</tr>
|
|
||||||
{% for field in index_form %}
|
|
||||||
{% for error in field.errors %}
|
|
||||||
<tr>
|
|
||||||
<td colspan="5"><span style="color: #ff0000">{{ error }}</span></td>
|
|
||||||
</tr>
|
|
||||||
{% endfor %}
|
|
||||||
{% endfor %}
|
|
||||||
{% endfor %}
|
|
||||||
</table>
|
|
||||||
<p>{{ form.submit() }}</p>
|
|
||||||
</form>
|
|
||||||
{% for message in get_flashed_messages() %}
|
|
||||||
<span style="color:#ff0000">{{ message }}</span><br>
|
|
||||||
{% endfor %}
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if page_name == "assign" %}
|
|
||||||
<h3>Index Assignments</h3>
|
|
||||||
<details class="setting-help">
|
|
||||||
<summary>Index assignment help</summary>
|
|
||||||
<p>An index assignment is a rule that requires a player to write an article under certain indices for a particular turn. If more than one rule applies to a player, any index satisfying one of those rules is permitted. If no rule applies to a player, any index is permitted.</p>
|
|
||||||
</details>
|
|
||||||
<form action="" method="post" novalidate>
|
|
||||||
{{ form.hidden_tag() }}
|
|
||||||
<table id="index-definition-table2">
|
|
||||||
<tr>
|
|
||||||
<th>Turn</th>
|
|
||||||
<th>Index</th>
|
|
||||||
<th>Character</th>
|
|
||||||
</tr>
|
|
||||||
{% for rule_form in form.rules %}
|
|
||||||
<tr>
|
|
||||||
<td>{{ rule_form.turn() }}</td>
|
|
||||||
<td>{{ rule_form.index() }}</td>
|
|
||||||
<td>{{ rule_form.character() }}</td>
|
|
||||||
</tr>
|
|
||||||
{% for field in index_form %}
|
|
||||||
{% for error in field.errors %}
|
|
||||||
<tr>
|
|
||||||
<td colspan="5"><span style="color: #ff0000">{{ error }}</span></td>
|
|
||||||
</tr>
|
|
||||||
{% endfor %}
|
|
||||||
{% endfor %}
|
|
||||||
{% endfor %}
|
|
||||||
</table>
|
|
||||||
<p>{{ form.submit() }}</p>
|
|
||||||
</form>
|
|
||||||
{% for message in get_flashed_messages() %}
|
|
||||||
<span style="color:#ff0000">{{ message }}</span><br>
|
|
||||||
{% endfor %}
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if page_name == "publish" %}
|
|
||||||
<h3>Turn Publishing</h3>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if page_name == "article" %}
|
|
||||||
<h3>Article Requirements</h3>
|
|
||||||
{% endif %}
|
|
||||||
</section>
|
|
||||||
{% endblock %}
|
|
|
@ -1,47 +1,45 @@
|
||||||
{% macro dashboard_lexicon_item(lexicon) %}
|
{% macro dashboard_lexicon_item(lexicon) %}
|
||||||
{% set status = "completed" if lexicon.completed else "ongoing" if lexicon.started else "unstarted" %}
|
<div class="dashboard-lexicon-item dashboard-lexicon-{{ lexicon.status }}">
|
||||||
<div class="dashboard-lexicon-item dashboard-lexicon-{{ status }}">
|
<p>
|
||||||
<p>
|
<span class="dashboard-lexicon-item-title">
|
||||||
<span class="dashboard-lexicon-item-title">
|
<a href="{{ url_for('lexicon.contents', name=lexicon.cfg.name) }}">
|
||||||
<a href="{{ url_for('lexicon.contents', lexicon_name=lexicon.name) }}">{{ lexicon.full_title }}</a>
|
Lexicon {{ lexicon.cfg.name }}</a>
|
||||||
</span>
|
</span>
|
||||||
[{{ status.capitalize() }}]
|
[{{ lexicon.status.capitalize() }}]
|
||||||
</p>
|
</p>
|
||||||
<p><i>{{ lexicon.prompt }}</i></p>
|
<p><i>{{ lexicon.cfg.prompt }}</i></p>
|
||||||
{% if current_user.is_authenticated %}
|
{% if current_user.is_authenticated %}
|
||||||
<p>
|
<p>
|
||||||
{#-
|
{%
|
||||||
Show detailed player information if the current user is a member of the lexicon or if the current user is a site admin. The filter sequence must be converted to a list because it returns a generator, which is truthy.
|
if current_user.uid in lexicon.cfg.join.joined
|
||||||
-#}
|
or current_user.cfg.is_admin
|
||||||
{%-
|
%}
|
||||||
if lexicon.memberships|map(attribute="user_id")|select("equalto", current_user.id)|list
|
Editor: {{ lexicon.cfg.editor|user_attr('username') }} /
|
||||||
or current_user.is_site_admin
|
Players:
|
||||||
-%}
|
{% for uid in lexicon.cfg.join.joined %}
|
||||||
Editor: {{
|
{{ uid|user_attr('username') }}{% if not loop.last %}, {% endif %}
|
||||||
lexicon.memberships|selectattr("is_editor")|map(attribute="user")|map(attribute="username")|join(", ")
|
{% endfor %}
|
||||||
}} / Players: {{
|
({{ lexicon.cfg.join.joined|count }}/{{ lexicon.cfg.join.max_players }})
|
||||||
lexicon.memberships|map(attribute="user")|map(attribute="username")|join(", ")
|
{% else %}
|
||||||
}} ({{ lexicon.memberships|count }}
|
Players: {{ lexicon.cfg.join.joined|count }}/{{ lexicon.cfg.join.max_players }}
|
||||||
{%- if lexicon.player_limit is not none -%}
|
{% if lexicon.cfg.join.public and lexicon.cfg.join.open %}
|
||||||
/{{ lexicon.player_limit }}
|
/ <a href="{{ url_for('lexicon.join', name=lexicon.cfg.name) }}">
|
||||||
{%- endif -%})
|
Join game
|
||||||
{%- else -%}
|
</a>
|
||||||
Players: {{ lexicon.memberships|count }}{% if lexicon.player_limit is not none %} / {{ lexicon.player_limit }}{% endif -%}
|
{% endif %}
|
||||||
{%-
|
{% endif %}
|
||||||
if lexicon.public and lexicon.joinable
|
</p>
|
||||||
%} / <a href="{{ url_for('lexicon.join', lexicon_name=lexicon.name) }}">Join game</a>
|
{% endif %}
|
||||||
{%- endif -%}
|
|
||||||
{%- endif -%}
|
|
||||||
</p>
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
</div>
|
||||||
{% endmacro %}
|
{% endmacro %}
|
||||||
|
|
||||||
{% macro dashboard_user_item(user) %}
|
{% macro dashboard_user_item(user) %}
|
||||||
<div class="dashboard-lexicon-item">
|
<div class="dashboard-lexicon-item">
|
||||||
<p>
|
<p>
|
||||||
<b>{{ user.username }}</b> {% if user.username != user.display_name %} / {{ user.display_name }}{% endif %} (id #{{user.id}}){% if user.is_site_admin %} <b>[ADMIN]</b>{% endif %}
|
<b>{{ user.cfg.username }}</b>
|
||||||
</p>
|
{% if user.cfg.username != user.cfg.displayname %} / {{ user.cfg.displayname }}{% endif %}
|
||||||
<p>Last activity: {{ user.last_activity|date }} — Last login: {{ user.last_login|date }}</p>
|
({{user.uid}})
|
||||||
|
</p>
|
||||||
|
<p>Last activity: {{ user.cfg.last_activity|asdate }} — Last login: {{ user.cfg.last_login|asdate }}</p>
|
||||||
</div>
|
</div>
|
||||||
{% endmacro %}
|
{% endmacro %}
|
|
@ -1,34 +1,33 @@
|
||||||
<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
<html>
|
<html>
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8"/>
|
<meta charset="utf-8"/>
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||||
<title>{% block title %}{% endblock %}</title>
|
<title>{% block title %}{% endblock %}</title>
|
||||||
<link rel="icon" type="image/png" href="{{ url_for('static', filename='amanuensis.png') }}">
|
<link rel="icon" type="image/png" href="{{ url_for('static', filename='amanuensis.png') }}">
|
||||||
<link rel="stylesheet" href="{{ url_for('static', filename='page.css') }}">
|
<link rel="stylesheet" href="{{ url_for("static", filename="page.css") }}">
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<main>
|
<div id="wrapper">
|
||||||
<header>
|
<div id="header">
|
||||||
<div id="login-status" {% block login_status_attr %}{% endblock %}>
|
<div id="login-status" {% block login_status_attr %}{% endblock %}>
|
||||||
{% if current_user.is_authenticated %}
|
{% if current_user.is_authenticated %}
|
||||||
<b>{{ current_user.username -}}</b>
|
<b>{{ current_user.cfg.username -}}</b>
|
||||||
‧
|
(<a href="{{ url_for('auth.logout') }}">Logout</a>)
|
||||||
<a href="{{ url_for('home.home') }}">Home</a>
|
{% else %}
|
||||||
‧
|
<a href="{{ url_for('auth.login') }}">Login</a>
|
||||||
<a href="{{ url_for('auth.logout') }}">Logout</a>
|
{% endif %}
|
||||||
{% else %}
|
</div>
|
||||||
<a href="{{ url_for('home.home') }}">Home</a>
|
{% block header %}{% endblock %}
|
||||||
‧
|
</div>
|
||||||
<a href="{{ url_for('auth.login') }}">Login</a>
|
{% block sidebar %}{% endblock %}
|
||||||
{% endif %}
|
<div id="content" class="{% block content_class %}{% endblock %}">
|
||||||
</div>
|
{% if not template_content_blocks %}{% set template_content_blocks = [] %}{% endif %}
|
||||||
{% block header %}{% endblock %}
|
{% if not content_blocks %}{% set content_blocks = [] %}{% endif %}
|
||||||
</header>
|
{% for content_block in template_content_blocks + content_blocks %}<div class="contentblock">
|
||||||
{% block sidebar %}{% endblock %}
|
{{ content_block|safe }}</div>
|
||||||
<article class="{% block content_class %}{% endblock %}">
|
{% endfor %}
|
||||||
{% block main %}{% endblock %}
|
</div>
|
||||||
</article>
|
</div>
|
||||||
</main>
|
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
{% extends "page.jinja" %}
|
{% extends "page.jinja" %}
|
||||||
{% block sidebar %}
|
{% block sidebar %}
|
||||||
<nav>
|
<div id="sidebar">
|
||||||
{% if not template_sidebar_rows %}{% set template_sidebar_rows = [] %}{% endif %}
|
{% if not template_sidebar_rows %}{% set template_sidebar_rows = [] %}{% endif %}
|
||||||
{% if not sidebar_rows %}{% set sidebar_rows = [] %}{% endif %}
|
{% if not sidebar_rows %}{% set sidebar_rows = [] %}{% endif %}
|
||||||
<table>
|
<table>
|
||||||
{% for row in template_sidebar_rows + sidebar_rows %}
|
{% for row in template_sidebar_rows + sidebar_rows %}
|
||||||
<tr><td>{{ row|safe }}</td></tr>{% endfor %}
|
<tr><td>{{ row|safe }}</td></tr>{% endfor %}
|
||||||
</table>
|
</table>
|
||||||
</nav>
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
{% block content_class %}content-2col{% endblock %}
|
{% block content_class %}content-2col{% endblock %}
|
|
@ -15,7 +15,9 @@ from amanuensis.lexicon import (
|
||||||
create_character_in_lexicon,
|
create_character_in_lexicon,
|
||||||
get_draft)
|
get_draft)
|
||||||
from amanuensis.models import LexiconModel
|
from amanuensis.models import LexiconModel
|
||||||
from amanuensis.parser import parse_raw_markdown
|
from amanuensis.parser import (
|
||||||
|
parse_raw_markdown,
|
||||||
|
PreviewHtmlRenderer)
|
||||||
from amanuensis.server.helpers import (
|
from amanuensis.server.helpers import (
|
||||||
lexicon_param,
|
lexicon_param,
|
||||||
player_required,
|
player_required,
|
||||||
|
@ -27,7 +29,7 @@ from .forms import (
|
||||||
LexiconPublishTurnForm,
|
LexiconPublishTurnForm,
|
||||||
LexiconConfigForm)
|
LexiconConfigForm)
|
||||||
|
|
||||||
from .editor import load_editor, new_draft, update_draft, PreviewHtmlRenderer
|
from .editor import load_editor, new_draft, update_draft
|
||||||
|
|
||||||
|
|
||||||
bp_session = Blueprint('session', __name__,
|
bp_session = Blueprint('session', __name__,
|
||||||
|
@ -68,6 +70,77 @@ def session(name):
|
||||||
publish_form=form)
|
publish_form=form)
|
||||||
|
|
||||||
|
|
||||||
|
def edit_character(name, form, character):
|
||||||
|
if not form.is_submitted():
|
||||||
|
# GET, populate with values
|
||||||
|
return render_template(
|
||||||
|
'session.character.jinja', form=form.for_character(character))
|
||||||
|
|
||||||
|
if not form.validate():
|
||||||
|
# POST with invalid data, return unchanged
|
||||||
|
return render_template('session.character.jinja', form=form)
|
||||||
|
|
||||||
|
# POST with valid data, update character
|
||||||
|
with g.lexicon.ctx.edit_config() as cfg:
|
||||||
|
char = cfg.character[character.cid]
|
||||||
|
char.name = form.characterName.data
|
||||||
|
char.signature = form.defaultSignature.data
|
||||||
|
flash('Character updated')
|
||||||
|
return redirect(url_for('session.session', name=name))
|
||||||
|
|
||||||
|
|
||||||
|
def create_character(name: str, form: LexiconCharacterForm):
|
||||||
|
# Characters can't be created if the game has already started
|
||||||
|
if g.lexicon.status != LexiconModel.PREGAME:
|
||||||
|
flash("Characters can't be added after the game has started")
|
||||||
|
return redirect(url_for('session.session', name=name))
|
||||||
|
# Characters can't be created beyond the per-player limit
|
||||||
|
player_characters = get_player_characters(g.lexicon, current_user.uid)
|
||||||
|
if len(list(player_characters)) >= g.lexicon.cfg.join.chars_per_player:
|
||||||
|
flash("Can't create more characters")
|
||||||
|
return redirect(url_for('session.session', name=name))
|
||||||
|
|
||||||
|
if not form.is_submitted():
|
||||||
|
# GET, populate with default values
|
||||||
|
return render_template(
|
||||||
|
'session.character.jinja', form=form.for_new())
|
||||||
|
|
||||||
|
if not form.validate():
|
||||||
|
# POST with invalid data, return unchanged
|
||||||
|
return render_template('session.character.jinja', form=form)
|
||||||
|
|
||||||
|
# POST with valid data, create character
|
||||||
|
char_name = form.characterName.data
|
||||||
|
cid = create_character_in_lexicon(current_user, g.lexicon, char_name)
|
||||||
|
with g.lexicon.ctx.edit_config() as cfg:
|
||||||
|
cfg.character[cid].signature = form.defaultSignature.data
|
||||||
|
flash('Character created')
|
||||||
|
return redirect(url_for('session.session', name=name))
|
||||||
|
|
||||||
|
|
||||||
|
@bp_session.route('/character/', methods=['GET', 'POST'])
|
||||||
|
@lexicon_param
|
||||||
|
@player_required
|
||||||
|
def character(name):
|
||||||
|
form = LexiconCharacterForm()
|
||||||
|
cid = request.args.get('cid')
|
||||||
|
if not cid:
|
||||||
|
# No character specified, creating a new character
|
||||||
|
return create_character(name, form)
|
||||||
|
|
||||||
|
character = g.lexicon.cfg.character.get(cid)
|
||||||
|
if not character:
|
||||||
|
# Bad character id, abort
|
||||||
|
flash('Character not found')
|
||||||
|
return redirect(url_for('session.session', name=name))
|
||||||
|
if current_user.uid not in (character.player, g.lexicon.cfg.editor):
|
||||||
|
# Only its owner and the editor can edit a character
|
||||||
|
flash('Access denied')
|
||||||
|
return redirect(url_for('session.session', name=name))
|
||||||
|
# Edit allowed
|
||||||
|
return edit_character(name, form, character)
|
||||||
|
|
||||||
|
|
||||||
@bp_session.route('/settings/', methods=['GET', 'POST'])
|
@bp_session.route('/settings/', methods=['GET', 'POST'])
|
||||||
@lexicon_param
|
@lexicon_param
|
||||||
@editor_required
|
@editor_required
|
||||||
|
|
|
@ -17,56 +17,8 @@ from amanuensis.lexicon import (
|
||||||
from amanuensis.models import LexiconModel
|
from amanuensis.models import LexiconModel
|
||||||
from amanuensis.parser import (
|
from amanuensis.parser import (
|
||||||
normalize_title,
|
normalize_title,
|
||||||
parse_raw_markdown)
|
parse_raw_markdown,
|
||||||
from amanuensis.parser.core import RenderableVisitor
|
PreviewHtmlRenderer)
|
||||||
|
|
||||||
|
|
||||||
class PreviewHtmlRenderer(RenderableVisitor):
|
|
||||||
def __init__(self, lexicon):
|
|
||||||
with lexicon.ctx.read('info') as info:
|
|
||||||
self.article_map = {
|
|
||||||
title: article.character
|
|
||||||
for title, article in info.items()
|
|
||||||
}
|
|
||||||
self.citations = []
|
|
||||||
self.contents = ""
|
|
||||||
|
|
||||||
def TextSpan(self, span):
|
|
||||||
return span.innertext
|
|
||||||
|
|
||||||
def LineBreak(self, span):
|
|
||||||
return '<br>'
|
|
||||||
|
|
||||||
def ParsedArticle(self, span):
|
|
||||||
self.contents = '\n'.join(span.recurse(self))
|
|
||||||
return self
|
|
||||||
|
|
||||||
def BodyParagraph(self, span):
|
|
||||||
return f'<p>{"".join(span.recurse(self))}</p>'
|
|
||||||
|
|
||||||
def SignatureParagraph(self, span):
|
|
||||||
return (
|
|
||||||
'<hr><span class="signature"><p>'
|
|
||||||
f'{"".join(span.recurse(self))}'
|
|
||||||
'</p></span>'
|
|
||||||
)
|
|
||||||
|
|
||||||
def BoldSpan(self, span):
|
|
||||||
return f'<b>{"".join(span.recurse(self))}</b>'
|
|
||||||
|
|
||||||
def ItalicSpan(self, span):
|
|
||||||
return f'<i>{"".join(span.recurse(self))}</i>'
|
|
||||||
|
|
||||||
def CitationSpan(self, span):
|
|
||||||
if span.cite_target in self.article_map:
|
|
||||||
if self.article_map.get(span.cite_target):
|
|
||||||
link_class = '[extant]'
|
|
||||||
else:
|
|
||||||
link_class = '[phantom]'
|
|
||||||
else:
|
|
||||||
link_class = '[new]'
|
|
||||||
self.citations.append(f'{span.cite_target} {link_class}')
|
|
||||||
return f'<u>{"".join(span.recurse(self))}</u>[{len(self.citations)}]'
|
|
||||||
|
|
||||||
|
|
||||||
def load_editor(lexicon: LexiconModel, aid: str):
|
def load_editor(lexicon: LexiconModel, aid: str):
|
||||||
|
|
|
@ -0,0 +1,26 @@
|
||||||
|
{% extends "lexicon.jinja" %}
|
||||||
|
{% block title %}Character | {{ lexicon_title }}{% endblock %}
|
||||||
|
|
||||||
|
{% block main %}
|
||||||
|
|
||||||
|
<h1>Character</h1>
|
||||||
|
<form action="" method="post" novalidate>
|
||||||
|
{{ form.hidden_tag() }}
|
||||||
|
<p>
|
||||||
|
{{ form.characterName.label }}<br>{{ form.characterName(size=32) }}
|
||||||
|
</p>
|
||||||
|
{% for error in form.characterName.errors %}
|
||||||
|
<span style="color: #ff0000">{{ error }}</span><br>
|
||||||
|
{% endfor %}</p>
|
||||||
|
<p>
|
||||||
|
{{ form.defaultSignature.label }}<br>{{ form.defaultSignature(class_='fullwidth') }}
|
||||||
|
</p>
|
||||||
|
<p>{{ form.submit() }}</p>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
{% for message in get_flashed_messages() %}
|
||||||
|
<span style="color:#ff0000">{{ message }}</span><br>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
|
{% set template_content_blocks = [self.main()] %}
|
|
@ -34,7 +34,7 @@
|
||||||
<body>
|
<body>
|
||||||
<div id="wrapper">
|
<div id="wrapper">
|
||||||
<div id="editor-left" class="column">
|
<div id="editor-left" class="column">
|
||||||
<section>
|
<div class="contentblock">
|
||||||
{# Thin header bar #}
|
{# Thin header bar #}
|
||||||
<div id="editor-header">
|
<div id="editor-header">
|
||||||
{# Header always includes backlink to lexicon #}
|
{# Header always includes backlink to lexicon #}
|
||||||
|
@ -103,16 +103,16 @@
|
||||||
{# #}{{ article.contents }}{#
|
{# #}{{ article.contents }}{#
|
||||||
#}</textarea>
|
#}</textarea>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</section>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div id="editor-right" class="column">
|
<div id="editor-right" class="column">
|
||||||
<section id="preview">
|
<div id="preview" class="contentblock">
|
||||||
<p>This editor requires Javascript to function.</p>
|
<p>This editor requires Javascript to function.</p>
|
||||||
</div>
|
</div>
|
||||||
<section id="preview-citations">
|
<div id="preview-citations" class="contentblock">
|
||||||
<p> </p>
|
<p> </p>
|
||||||
</div>
|
</div>
|
||||||
<section id="preview-control">
|
<div id="preview-control" class="contentblock">
|
||||||
<p> </p>
|
<p> </p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -45,21 +45,47 @@
|
||||||
|
|
||||||
<h3>General</h3>
|
<h3>General</h3>
|
||||||
<p>
|
<p>
|
||||||
|
{{ form.title.label }}:<br>
|
||||||
|
{{ form.title(autocomplete="off", size=32, style="width:100%") }}<br>
|
||||||
{{ form.editor.label }}: {{ form.editor(autocomplete="off") }}<br>
|
{{ form.editor.label }}: {{ form.editor(autocomplete="off") }}<br>
|
||||||
{% for error in form.editor.errors %}
|
{% for error in form.editor.errors %}
|
||||||
<span style="color: #ff0000">{{ error }}</span><br>
|
<span style="color: #ff0000">{{ error }}</span><br>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
{{ form.prompt.label }}: {{ form.prompt(class_="fullwidth") }}
|
||||||
|
{% for error in form.prompt.errors %}
|
||||||
|
<span style="color: #ff0000">{{ error }}</span><br>
|
||||||
|
{% endfor %}
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<h3>Game Progress</h3>
|
<h3>Game Progress</h3>
|
||||||
<p>
|
<p>
|
||||||
{{ number_setting(form.turnCurrent) }}
|
{{ number_setting(form.turnCurrent) }}
|
||||||
|
{{ number_setting(form.turnMax) }}
|
||||||
|
{{ form.articleIndexList.label }}:<br>
|
||||||
|
{{ form.articleIndexList(class_="fullwidth", rows=10) }}
|
||||||
|
{% for error in form.articleIndexList.errors %}
|
||||||
|
<span style="color: #ff0000">{{ error }}</span><br>
|
||||||
|
{% endfor %}
|
||||||
|
{{ number_setting(form.articleIndexCapacity) }}
|
||||||
{{ form.turnAssignment.label }}:<br>
|
{{ form.turnAssignment.label }}:<br>
|
||||||
{{ form.turnAssignment(class_="fullwidth", rows=10) }}
|
{{ form.turnAssignment(class_="fullwidth", rows=10) }}
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
<h3>Visibility and Joining</h3>
|
||||||
|
<p>
|
||||||
|
{{ flag_setting(form.joinPublic) }}
|
||||||
|
{{ flag_setting(form.joinOpen) }}
|
||||||
|
{{ form.joinPassword(autocomplete="off") }}
|
||||||
|
{{ form.joinPassword.label }}<br>
|
||||||
|
{{ number_setting(form.joinMaxPlayers) }}
|
||||||
|
{{ number_setting(form.joinCharsPerPlayer) }}
|
||||||
|
</p>
|
||||||
|
|
||||||
<h3>Turn Publishing</h3>
|
<h3>Turn Publishing</h3>
|
||||||
<p>
|
<p>
|
||||||
|
{{ flag_setting(form.publishNotifyEditorOnReady) }}
|
||||||
|
{{ flag_setting(form.publishNotifyPlayerOnReject) }}
|
||||||
|
{{ flag_setting(form.publishNotifyPlayerOnAccept) }}
|
||||||
{{ form.publishDeadlines(autocomplete="off") }}
|
{{ form.publishDeadlines(autocomplete="off") }}
|
||||||
{{ form.publishDeadlines.label }}<br>
|
{{ form.publishDeadlines.label }}<br>
|
||||||
{{ flag_setting(form.publishAsap) }}
|
{{ flag_setting(form.publishAsap) }}
|
||||||
|
|
|
@ -158,20 +158,65 @@ class Settings():
|
||||||
if name.startswith('s_'):
|
if name.startswith('s_'):
|
||||||
yield name, setting
|
yield name, setting
|
||||||
|
|
||||||
|
s_title = Setting('title',
|
||||||
|
StringField('Title override', validators=[Optional()]))
|
||||||
|
|
||||||
s_editor = Setting('editor',
|
s_editor = Setting('editor',
|
||||||
SelectField('Editor', validators=[DataRequired(), User(True)]),
|
SelectField('Editor', validators=[DataRequired(), User(True)]),
|
||||||
translator=UsernameTranslator())
|
translator=UsernameTranslator())
|
||||||
|
|
||||||
|
s_prompt = Setting('prompt',
|
||||||
|
TextAreaField('Prompt', validators=[DataRequired()]))
|
||||||
|
|
||||||
s_turnCurrent = Setting('turn.current',
|
s_turnCurrent = Setting('turn.current',
|
||||||
IntegerField(
|
IntegerField(
|
||||||
'Current turn',
|
'Current turn',
|
||||||
widget=NumberInput(),
|
widget=NumberInput(),
|
||||||
validators=[Optional()]))
|
validators=[Optional()]))
|
||||||
|
|
||||||
|
s_turnMax = Setting('turn.max',
|
||||||
|
IntegerField(
|
||||||
|
'Number of turns',
|
||||||
|
widget=NumberInput(),
|
||||||
|
validators=[DataRequired()]))
|
||||||
|
|
||||||
s_turnAssignment = Setting('turn.assignment',
|
s_turnAssignment = Setting('turn.assignment',
|
||||||
TextAreaField('index assignment raw'),
|
TextAreaField('index assignment raw'),
|
||||||
translator=TmpAsgnTranslator())
|
translator=TmpAsgnTranslator())
|
||||||
|
|
||||||
|
s_joinPublic = Setting('join.public',
|
||||||
|
BooleanField('Show game on public pages'))
|
||||||
|
|
||||||
|
s_joinOpen = Setting('join.open',
|
||||||
|
BooleanField('Allow players to join game'))
|
||||||
|
|
||||||
|
s_joinPassword = Setting('join.password',
|
||||||
|
StringField('Password to join game', validators=[Optional()]))
|
||||||
|
|
||||||
|
s_joinMaxPlayers = Setting('join.max_players',
|
||||||
|
IntegerField(
|
||||||
|
'Maximum number of players',
|
||||||
|
widget=NumberInput(),
|
||||||
|
validators=[DataRequired()]))
|
||||||
|
|
||||||
|
s_joinCharsPerPlayer = Setting('join.chars_per_player',
|
||||||
|
IntegerField(
|
||||||
|
'Characters per player',
|
||||||
|
widget=NumberInput(),
|
||||||
|
validators=[DataRequired()]))
|
||||||
|
|
||||||
|
s_publishNotifyEditorOnReady = Setting('publish.notify_editor_on_ready',
|
||||||
|
BooleanField(
|
||||||
|
'Notify the editor when a player marks an article as ready'))
|
||||||
|
|
||||||
|
s_publishNotifyPlayerOnReject = Setting('publish.notify_player_on_reject',
|
||||||
|
BooleanField(
|
||||||
|
'Notify a player when their article is rejected by the editor'))
|
||||||
|
|
||||||
|
s_publishNotifyPlayerOnAccept = Setting('publish.notify_player_on_accept',
|
||||||
|
BooleanField(
|
||||||
|
'Notify a player when their article is accepted by the editor'))
|
||||||
|
|
||||||
s_publishDeadlines = Setting('publish.deadlines',
|
s_publishDeadlines = Setting('publish.deadlines',
|
||||||
StringField(
|
StringField(
|
||||||
'Turn deadline, as a crontab specification',
|
'Turn deadline, as a crontab specification',
|
||||||
|
@ -191,6 +236,18 @@ class Settings():
|
||||||
BooleanField(
|
BooleanField(
|
||||||
'Block turn publish if any articles are awaiting editor review'))
|
'Block turn publish if any articles are awaiting editor review'))
|
||||||
|
|
||||||
|
s_articleIndexList = Setting('article.index.list',
|
||||||
|
TextAreaField(
|
||||||
|
'Index specifications',
|
||||||
|
validators=[IndexList]),
|
||||||
|
translator=IndexListTranslator())
|
||||||
|
|
||||||
|
s_articleIndexCapacity = Setting('article.index.capacity',
|
||||||
|
IntegerField(
|
||||||
|
'Index capacity override',
|
||||||
|
widget=NumberInput(),
|
||||||
|
validators=[Optional()]))
|
||||||
|
|
||||||
s_articleCitationAllowSelf = Setting('article.citation.allow_self',
|
s_articleCitationAllowSelf = Setting('article.citation.allow_self',
|
||||||
BooleanField('Allow players to cite themselves'))
|
BooleanField('Allow players to cite themselves'))
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,12 @@
|
||||||
|
from .manage import load_all_users
|
||||||
|
from .signup import (
|
||||||
|
create_user,
|
||||||
|
valid_username,
|
||||||
|
valid_email)
|
||||||
|
|
||||||
|
__all__ = [member.__name__ for member in [
|
||||||
|
load_all_users,
|
||||||
|
create_user,
|
||||||
|
valid_username,
|
||||||
|
valid_email,
|
||||||
|
]]
|
|
@ -0,0 +1,19 @@
|
||||||
|
"""
|
||||||
|
General functions for managing users
|
||||||
|
"""
|
||||||
|
from typing import Iterable
|
||||||
|
|
||||||
|
from amanuensis.config import RootConfigDirectoryContext
|
||||||
|
from amanuensis.models import ModelFactory, UserModel
|
||||||
|
|
||||||
|
|
||||||
|
def load_all_users(
|
||||||
|
root: RootConfigDirectoryContext) -> Iterable[UserModel]:
|
||||||
|
"""
|
||||||
|
Iterably loads every lexicon in the config store
|
||||||
|
"""
|
||||||
|
model_factory: ModelFactory = ModelFactory(root)
|
||||||
|
with root.user.read_index() as index:
|
||||||
|
for uid in index.values():
|
||||||
|
user: UserModel = model_factory.user(uid)
|
||||||
|
yield user
|
|
@ -0,0 +1,77 @@
|
||||||
|
"""
|
||||||
|
Submodule encapsulating functionality pertaining to creating users in
|
||||||
|
an Amanuensis instance.
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
from typing import Tuple
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from amanuensis.config import RootConfigDirectoryContext
|
||||||
|
from amanuensis.errors import ArgumentError
|
||||||
|
from amanuensis.models import ModelFactory, UserModel
|
||||||
|
from amanuensis.resources import get_stream
|
||||||
|
|
||||||
|
|
||||||
|
def valid_username(username: str) -> bool:
|
||||||
|
"""
|
||||||
|
A valid username is at least three characters long and composed solely of
|
||||||
|
alpahnumerics, dashes, and underscores. Additionally, usernames may not
|
||||||
|
be 32 hex digits, since that may be confused for an internal id.
|
||||||
|
"""
|
||||||
|
length_and_characters = re.match(r'^[A-Za-z0-9-_]{3,}$', username)
|
||||||
|
is_a_guid = re.match(r'^[A-Za-z0-9]{32}$', username)
|
||||||
|
return bool(length_and_characters and not is_a_guid)
|
||||||
|
|
||||||
|
|
||||||
|
def valid_email(email: str) -> bool:
|
||||||
|
"""Vaguely RFC2822 email verifier"""
|
||||||
|
atom = r"[0-9A-Za-z!#$%&'*+-/=?^_`{|}~]{1,}"
|
||||||
|
dotatom = atom + r"(\." + atom + r")*"
|
||||||
|
addrspec = '^' + dotatom + '@' + dotatom + '$'
|
||||||
|
return bool(re.match(addrspec, email))
|
||||||
|
|
||||||
|
|
||||||
|
def create_user(
|
||||||
|
root: RootConfigDirectoryContext,
|
||||||
|
model_factory: ModelFactory,
|
||||||
|
username: str,
|
||||||
|
displayname: str,
|
||||||
|
email: str) -> Tuple[UserModel, str]:
|
||||||
|
"""
|
||||||
|
Creates a new user
|
||||||
|
"""
|
||||||
|
# Validate arguments
|
||||||
|
if not valid_username(username):
|
||||||
|
raise ArgumentError('Invalid username: "{}"'.format(username))
|
||||||
|
if email and not valid_email(email):
|
||||||
|
raise ArgumentError('Invalid email: "{}"'.format(email))
|
||||||
|
|
||||||
|
# Create the user directory and config file
|
||||||
|
uid: str = uuid.uuid4().hex
|
||||||
|
user_dir: str = os.path.join(root.user.path, uid)
|
||||||
|
os.mkdir(user_dir)
|
||||||
|
with get_stream('user.json') as s:
|
||||||
|
path: str = os.path.join(user_dir, 'config.json')
|
||||||
|
with open(path, 'wb') as f:
|
||||||
|
f.write(s.read())
|
||||||
|
|
||||||
|
# Create the user index entry
|
||||||
|
with root.user.edit_index() as index:
|
||||||
|
index[username] = uid
|
||||||
|
|
||||||
|
# Fill out the new user
|
||||||
|
with root.user[uid].edit_config() as cfg:
|
||||||
|
cfg.uid = uid
|
||||||
|
cfg.username = username
|
||||||
|
cfg.displayname = displayname
|
||||||
|
cfg.email = email
|
||||||
|
cfg.created = int(time.time())
|
||||||
|
|
||||||
|
# Load the user model and set a temporary password
|
||||||
|
temporary_password = os.urandom(32).hex()
|
||||||
|
user = model_factory.user(uid)
|
||||||
|
user.set_password(temporary_password)
|
||||||
|
|
||||||
|
return user, temporary_password
|
|
@ -1,7 +0,0 @@
|
||||||
{ pkgs ? import <nixpkgs> {} }:
|
|
||||||
|
|
||||||
let
|
|
||||||
app = pkgs.poetry2nix.mkPoetryApplication {
|
|
||||||
projectDir = ./.;
|
|
||||||
};
|
|
||||||
in app.dependencyEnv
|
|
4
mypy.ini
4
mypy.ini
|
@ -1,4 +0,0 @@
|
||||||
[mypy]
|
|
||||||
ignore_missing_imports = true
|
|
||||||
exclude = "|amanuensis/lexicon/.*|amanuensis/server/.*|amanuensis/server/session/.*|"
|
|
||||||
; mypy stable doesn't support pyproject.toml yet
|
|
|
@ -1,722 +0,0 @@
|
||||||
[[package]]
|
|
||||||
name = "appdirs"
|
|
||||||
version = "1.4.4"
|
|
||||||
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "atomicwrites"
|
|
||||||
version = "1.4.0"
|
|
||||||
description = "Atomic file writes."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "attrs"
|
|
||||||
version = "21.2.0"
|
|
||||||
description = "Classes Without Boilerplate"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"]
|
|
||||||
docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
|
|
||||||
tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"]
|
|
||||||
tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "beautifulsoup4"
|
|
||||||
version = "4.9.3"
|
|
||||||
description = "Screen-scraping library"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
soupsieve = {version = ">1.2", markers = "python_version >= \"3.0\""}
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
html5lib = ["html5lib"]
|
|
||||||
lxml = ["lxml"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "black"
|
|
||||||
version = "21.6b0"
|
|
||||||
description = "The uncompromising code formatter."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6.2"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
appdirs = "*"
|
|
||||||
click = ">=7.1.2"
|
|
||||||
mypy-extensions = ">=0.4.3"
|
|
||||||
pathspec = ">=0.8.1,<1"
|
|
||||||
regex = ">=2020.1.8"
|
|
||||||
toml = ">=0.10.1"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
colorama = ["colorama (>=0.4.3)"]
|
|
||||||
d = ["aiohttp (>=3.6.0)", "aiohttp-cors (>=0.4.0)"]
|
|
||||||
python2 = ["typed-ast (>=1.4.2)"]
|
|
||||||
uvloop = ["uvloop (>=0.15.2)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bs4"
|
|
||||||
version = "0.0.1"
|
|
||||||
description = "Dummy package for Beautiful Soup"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
beautifulsoup4 = "*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "click"
|
|
||||||
version = "8.0.1"
|
|
||||||
description = "Composable command line interface toolkit"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "colorama"
|
|
||||||
version = "0.4.4"
|
|
||||||
description = "Cross-platform colored terminal text."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "flask"
|
|
||||||
version = "2.0.1"
|
|
||||||
description = "A simple framework for building complex web applications."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
click = ">=7.1.2"
|
|
||||||
itsdangerous = ">=2.0"
|
|
||||||
Jinja2 = ">=3.0"
|
|
||||||
Werkzeug = ">=2.0"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
async = ["asgiref (>=3.2)"]
|
|
||||||
dotenv = ["python-dotenv"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "flask-login"
|
|
||||||
version = "0.5.0"
|
|
||||||
description = "User session management for Flask"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
Flask = "*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "flask-wtf"
|
|
||||||
version = "0.15.1"
|
|
||||||
description = "Simple integration of Flask and WTForms."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">= 3.6"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
Flask = "*"
|
|
||||||
itsdangerous = "*"
|
|
||||||
WTForms = "*"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
email = ["email-validator"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "greenlet"
|
|
||||||
version = "1.1.0"
|
|
||||||
description = "Lightweight in-process concurrent programming"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
docs = ["sphinx"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "itsdangerous"
|
|
||||||
version = "2.0.1"
|
|
||||||
description = "Safely pass data to untrusted environments and back."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "jinja2"
|
|
||||||
version = "3.0.1"
|
|
||||||
description = "A very fast and expressive template engine."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
MarkupSafe = ">=2.0"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
i18n = ["Babel (>=2.7)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "markupsafe"
|
|
||||||
version = "2.0.1"
|
|
||||||
description = "Safely add untrusted strings to HTML/XML markup."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "more-itertools"
|
|
||||||
version = "8.8.0"
|
|
||||||
description = "More routines for operating on iterables, beyond itertools"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.5"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "mypy"
|
|
||||||
version = "0.812"
|
|
||||||
description = "Optional static typing for Python"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.5"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
mypy-extensions = ">=0.4.3,<0.5.0"
|
|
||||||
typed-ast = ">=1.4.0,<1.5.0"
|
|
||||||
typing-extensions = ">=3.7.4"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
dmypy = ["psutil (>=4.0)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "mypy-extensions"
|
|
||||||
version = "0.4.3"
|
|
||||||
description = "Experimental type system extensions for programs checked with the mypy typechecker."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "packaging"
|
|
||||||
version = "20.9"
|
|
||||||
description = "Core utilities for Python packages"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
pyparsing = ">=2.0.2"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pathspec"
|
|
||||||
version = "0.8.1"
|
|
||||||
description = "Utility library for gitignore style pattern matching of file paths."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pluggy"
|
|
||||||
version = "0.13.1"
|
|
||||||
description = "plugin and hook calling mechanisms for python"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
dev = ["pre-commit", "tox"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "py"
|
|
||||||
version = "1.10.0"
|
|
||||||
description = "library with cross-python path, ini-parsing, io, code, log facilities"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pyparsing"
|
|
||||||
version = "2.4.7"
|
|
||||||
description = "Python parsing module"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pytest"
|
|
||||||
version = "5.4.3"
|
|
||||||
description = "pytest: simple powerful testing with Python"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.5"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
|
|
||||||
attrs = ">=17.4.0"
|
|
||||||
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
|
||||||
more-itertools = ">=4.0.0"
|
|
||||||
packaging = "*"
|
|
||||||
pluggy = ">=0.12,<1.0"
|
|
||||||
py = ">=1.5.0"
|
|
||||||
wcwidth = "*"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
checkqa-mypy = ["mypy (==v0.761)"]
|
|
||||||
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "regex"
|
|
||||||
version = "2021.4.4"
|
|
||||||
description = "Alternative regular expression module, to replace re."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "soupsieve"
|
|
||||||
version = "2.2.1"
|
|
||||||
description = "A modern CSS selector implementation for Beautiful Soup."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "sqlalchemy"
|
|
||||||
version = "1.4.19"
|
|
||||||
description = "Database Abstraction Library"
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\""}
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
aiomysql = ["greenlet (!=0.4.17)", "aiomysql"]
|
|
||||||
aiosqlite = ["greenlet (!=0.4.17)", "aiosqlite"]
|
|
||||||
asyncio = ["greenlet (!=0.4.17)"]
|
|
||||||
mariadb_connector = ["mariadb (>=1.0.1)"]
|
|
||||||
mssql = ["pyodbc"]
|
|
||||||
mssql_pymssql = ["pymssql"]
|
|
||||||
mssql_pyodbc = ["pyodbc"]
|
|
||||||
mypy = ["sqlalchemy2-stubs", "mypy (>=0.800)"]
|
|
||||||
mysql = ["mysqlclient (>=1.4.0,<2)", "mysqlclient (>=1.4.0)"]
|
|
||||||
mysql_connector = ["mysqlconnector"]
|
|
||||||
oracle = ["cx_oracle (>=7,<8)", "cx_oracle (>=7)"]
|
|
||||||
postgresql = ["psycopg2 (>=2.7)"]
|
|
||||||
postgresql_asyncpg = ["greenlet (!=0.4.17)", "asyncpg"]
|
|
||||||
postgresql_pg8000 = ["pg8000 (>=1.16.6)"]
|
|
||||||
postgresql_psycopg2binary = ["psycopg2-binary"]
|
|
||||||
postgresql_psycopg2cffi = ["psycopg2cffi"]
|
|
||||||
pymysql = ["pymysql (<1)", "pymysql"]
|
|
||||||
sqlcipher = ["sqlcipher3-binary"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "toml"
|
|
||||||
version = "0.10.2"
|
|
||||||
description = "Python Library for Tom's Obvious, Minimal Language"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "typed-ast"
|
|
||||||
version = "1.4.3"
|
|
||||||
description = "a fork of Python 2 and 3 ast modules with type comment support"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "typing-extensions"
|
|
||||||
version = "3.10.0.0"
|
|
||||||
description = "Backported and Experimental Type Hints for Python 3.5+"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wcwidth"
|
|
||||||
version = "0.2.5"
|
|
||||||
description = "Measures the displayed width of unicode strings in a terminal"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "werkzeug"
|
|
||||||
version = "2.0.1"
|
|
||||||
description = "The comprehensive WSGI web application library."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
watchdog = ["watchdog"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wtforms"
|
|
||||||
version = "2.3.3"
|
|
||||||
description = "A flexible forms validation and rendering library for Python web development."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
MarkupSafe = "*"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
email = ["email-validator"]
|
|
||||||
ipaddress = ["ipaddress"]
|
|
||||||
locale = ["Babel (>=1.3)"]
|
|
||||||
|
|
||||||
[metadata]
|
|
||||||
lock-version = "1.1"
|
|
||||||
python-versions = "^3.8"
|
|
||||||
content-hash = "97e970853a3db968f05e70b83348d52d1a5aaed12a844b30cc15d039827233d4"
|
|
||||||
|
|
||||||
[metadata.files]
|
|
||||||
appdirs = [
|
|
||||||
{file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
|
|
||||||
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
|
|
||||||
]
|
|
||||||
atomicwrites = [
|
|
||||||
{file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
|
|
||||||
{file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
|
|
||||||
]
|
|
||||||
attrs = [
|
|
||||||
{file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"},
|
|
||||||
{file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"},
|
|
||||||
]
|
|
||||||
beautifulsoup4 = [
|
|
||||||
{file = "beautifulsoup4-4.9.3-py2-none-any.whl", hash = "sha256:4c98143716ef1cb40bf7f39a8e3eec8f8b009509e74904ba3a7b315431577e35"},
|
|
||||||
{file = "beautifulsoup4-4.9.3-py3-none-any.whl", hash = "sha256:fff47e031e34ec82bf17e00da8f592fe7de69aeea38be00523c04623c04fb666"},
|
|
||||||
{file = "beautifulsoup4-4.9.3.tar.gz", hash = "sha256:84729e322ad1d5b4d25f805bfa05b902dd96450f43842c4e99067d5e1369eb25"},
|
|
||||||
]
|
|
||||||
black = [
|
|
||||||
{file = "black-21.6b0-py3-none-any.whl", hash = "sha256:dfb8c5a069012b2ab1e972e7b908f5fb42b6bbabcba0a788b86dc05067c7d9c7"},
|
|
||||||
{file = "black-21.6b0.tar.gz", hash = "sha256:dc132348a88d103016726fe360cb9ede02cecf99b76e3660ce6c596be132ce04"},
|
|
||||||
]
|
|
||||||
bs4 = [
|
|
||||||
{file = "bs4-0.0.1.tar.gz", hash = "sha256:36ecea1fd7cc5c0c6e4a1ff075df26d50da647b75376626cc186e2212886dd3a"},
|
|
||||||
]
|
|
||||||
click = [
|
|
||||||
{file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"},
|
|
||||||
{file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"},
|
|
||||||
]
|
|
||||||
colorama = [
|
|
||||||
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
|
|
||||||
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
|
|
||||||
]
|
|
||||||
flask = [
|
|
||||||
{file = "Flask-2.0.1-py3-none-any.whl", hash = "sha256:a6209ca15eb63fc9385f38e452704113d679511d9574d09b2cf9183ae7d20dc9"},
|
|
||||||
{file = "Flask-2.0.1.tar.gz", hash = "sha256:1c4c257b1892aec1398784c63791cbaa43062f1f7aeb555c4da961b20ee68f55"},
|
|
||||||
]
|
|
||||||
flask-login = [
|
|
||||||
{file = "Flask-Login-0.5.0.tar.gz", hash = "sha256:6d33aef15b5bcead780acc339464aae8a6e28f13c90d8b1cf9de8b549d1c0b4b"},
|
|
||||||
{file = "Flask_Login-0.5.0-py2.py3-none-any.whl", hash = "sha256:7451b5001e17837ba58945aead261ba425fdf7b4f0448777e597ddab39f4fba0"},
|
|
||||||
]
|
|
||||||
flask-wtf = [
|
|
||||||
{file = "Flask-WTF-0.15.1.tar.gz", hash = "sha256:ff177185f891302dc253437fe63081e7a46a4e99aca61dfe086fb23e54fff2dc"},
|
|
||||||
{file = "Flask_WTF-0.15.1-py2.py3-none-any.whl", hash = "sha256:6ff7af73458f182180906a37a783e290bdc8a3817fe4ad17227563137ca285bf"},
|
|
||||||
]
|
|
||||||
greenlet = [
|
|
||||||
{file = "greenlet-1.1.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:60848099b76467ef09b62b0f4512e7e6f0a2c977357a036de602b653667f5f4c"},
|
|
||||||
{file = "greenlet-1.1.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:f42ad188466d946f1b3afc0a9e1a266ac8926461ee0786c06baac6bd71f8a6f3"},
|
|
||||||
{file = "greenlet-1.1.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:76ed710b4e953fc31c663b079d317c18f40235ba2e3d55f70ff80794f7b57922"},
|
|
||||||
{file = "greenlet-1.1.0-cp27-cp27m-win32.whl", hash = "sha256:b33b51ab057f8a20b497ffafdb1e79256db0c03ef4f5e3d52e7497200e11f821"},
|
|
||||||
{file = "greenlet-1.1.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ed1377feed808c9c1139bdb6a61bcbf030c236dd288d6fca71ac26906ab03ba6"},
|
|
||||||
{file = "greenlet-1.1.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:da862b8f7de577bc421323714f63276acb2f759ab8c5e33335509f0b89e06b8f"},
|
|
||||||
{file = "greenlet-1.1.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:5f75e7f237428755d00e7460239a2482fa7e3970db56c8935bd60da3f0733e56"},
|
|
||||||
{file = "greenlet-1.1.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:258f9612aba0d06785143ee1cbf2d7361801c95489c0bd10c69d163ec5254a16"},
|
|
||||||
{file = "greenlet-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d928e2e3c3906e0a29b43dc26d9b3d6e36921eee276786c4e7ad9ff5665c78a"},
|
|
||||||
{file = "greenlet-1.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cc407b68e0a874e7ece60f6639df46309376882152345508be94da608cc0b831"},
|
|
||||||
{file = "greenlet-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c557c809eeee215b87e8a7cbfb2d783fb5598a78342c29ade561440abae7d22"},
|
|
||||||
{file = "greenlet-1.1.0-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:3d13da093d44dee7535b91049e44dd2b5540c2a0e15df168404d3dd2626e0ec5"},
|
|
||||||
{file = "greenlet-1.1.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:b3090631fecdf7e983d183d0fad7ea72cfb12fa9212461a9b708ff7907ffff47"},
|
|
||||||
{file = "greenlet-1.1.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:06ecb43b04480e6bafc45cb1b4b67c785e183ce12c079473359e04a709333b08"},
|
|
||||||
{file = "greenlet-1.1.0-cp35-cp35m-win32.whl", hash = "sha256:944fbdd540712d5377a8795c840a97ff71e7f3221d3fddc98769a15a87b36131"},
|
|
||||||
{file = "greenlet-1.1.0-cp35-cp35m-win_amd64.whl", hash = "sha256:c767458511a59f6f597bfb0032a1c82a52c29ae228c2c0a6865cfeaeaac4c5f5"},
|
|
||||||
{file = "greenlet-1.1.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:2325123ff3a8ecc10ca76f062445efef13b6cf5a23389e2df3c02a4a527b89bc"},
|
|
||||||
{file = "greenlet-1.1.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:598bcfd841e0b1d88e32e6a5ea48348a2c726461b05ff057c1b8692be9443c6e"},
|
|
||||||
{file = "greenlet-1.1.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:be9768e56f92d1d7cd94185bab5856f3c5589a50d221c166cc2ad5eb134bd1dc"},
|
|
||||||
{file = "greenlet-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe7eac0d253915116ed0cd160a15a88981a1d194c1ef151e862a5c7d2f853d3"},
|
|
||||||
{file = "greenlet-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a6b035aa2c5fcf3dbbf0e3a8a5bc75286fc2d4e6f9cfa738788b433ec894919"},
|
|
||||||
{file = "greenlet-1.1.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca1c4a569232c063615f9e70ff9a1e2fee8c66a6fb5caf0f5e8b21a396deec3e"},
|
|
||||||
{file = "greenlet-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:3096286a6072553b5dbd5efbefc22297e9d06a05ac14ba017233fedaed7584a8"},
|
|
||||||
{file = "greenlet-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c35872b2916ab5a240d52a94314c963476c989814ba9b519bc842e5b61b464bb"},
|
|
||||||
{file = "greenlet-1.1.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:b97c9a144bbeec7039cca44df117efcbeed7209543f5695201cacf05ba3b5857"},
|
|
||||||
{file = "greenlet-1.1.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:16183fa53bc1a037c38d75fdc59d6208181fa28024a12a7f64bb0884434c91ea"},
|
|
||||||
{file = "greenlet-1.1.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6b1d08f2e7f2048d77343279c4d4faa7aef168b3e36039cba1917fffb781a8ed"},
|
|
||||||
{file = "greenlet-1.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14927b15c953f8f2d2a8dffa224aa78d7759ef95284d4c39e1745cf36e8cdd2c"},
|
|
||||||
{file = "greenlet-1.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bdcff4b9051fb1aa4bba4fceff6a5f770c6be436408efd99b76fc827f2a9319"},
|
|
||||||
{file = "greenlet-1.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c70c7dd733a4c56838d1f1781e769081a25fade879510c5b5f0df76956abfa05"},
|
|
||||||
{file = "greenlet-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:0de64d419b1cb1bfd4ea544bedea4b535ef3ae1e150b0f2609da14bbf48a4a5f"},
|
|
||||||
{file = "greenlet-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:8833e27949ea32d27f7e96930fa29404dd4f2feb13cce483daf52e8842ec246a"},
|
|
||||||
{file = "greenlet-1.1.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:c1580087ab493c6b43e66f2bdd165d9e3c1e86ef83f6c2c44a29f2869d2c5bd5"},
|
|
||||||
{file = "greenlet-1.1.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:ad80bb338cf9f8129c049837a42a43451fc7c8b57ad56f8e6d32e7697b115505"},
|
|
||||||
{file = "greenlet-1.1.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:a9017ff5fc2522e45562882ff481128631bf35da444775bc2776ac5c61d8bcae"},
|
|
||||||
{file = "greenlet-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7920e3eccd26b7f4c661b746002f5ec5f0928076bd738d38d894bb359ce51927"},
|
|
||||||
{file = "greenlet-1.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:408071b64e52192869129a205e5b463abda36eff0cebb19d6e63369440e4dc99"},
|
|
||||||
{file = "greenlet-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be13a18cec649ebaab835dff269e914679ef329204704869f2f167b2c163a9da"},
|
|
||||||
{file = "greenlet-1.1.0-cp38-cp38-win32.whl", hash = "sha256:22002259e5b7828b05600a762579fa2f8b33373ad95a0ee57b4d6109d0e589ad"},
|
|
||||||
{file = "greenlet-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:206295d270f702bc27dbdbd7651e8ebe42d319139e0d90217b2074309a200da8"},
|
|
||||||
{file = "greenlet-1.1.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:096cb0217d1505826ba3d723e8981096f2622cde1eb91af9ed89a17c10aa1f3e"},
|
|
||||||
{file = "greenlet-1.1.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:03f28a5ea20201e70ab70518d151116ce939b412961c33827519ce620957d44c"},
|
|
||||||
{file = "greenlet-1.1.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:7db68f15486d412b8e2cfcd584bf3b3a000911d25779d081cbbae76d71bd1a7e"},
|
|
||||||
{file = "greenlet-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70bd1bb271e9429e2793902dfd194b653221904a07cbf207c3139e2672d17959"},
|
|
||||||
{file = "greenlet-1.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f92731609d6625e1cc26ff5757db4d32b6b810d2a3363b0ff94ff573e5901f6f"},
|
|
||||||
{file = "greenlet-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06d7ac89e6094a0a8f8dc46aa61898e9e1aec79b0f8b47b2400dd51a44dbc832"},
|
|
||||||
{file = "greenlet-1.1.0-cp39-cp39-win32.whl", hash = "sha256:adb94a28225005890d4cf73648b5131e885c7b4b17bc762779f061844aabcc11"},
|
|
||||||
{file = "greenlet-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa4230234d02e6f32f189fd40b59d5a968fe77e80f59c9c933384fe8ba535535"},
|
|
||||||
{file = "greenlet-1.1.0.tar.gz", hash = "sha256:c87df8ae3f01ffb4483c796fe1b15232ce2b219f0b18126948616224d3f658ee"},
|
|
||||||
]
|
|
||||||
itsdangerous = [
|
|
||||||
{file = "itsdangerous-2.0.1-py3-none-any.whl", hash = "sha256:5174094b9637652bdb841a3029700391451bd092ba3db90600dea710ba28e97c"},
|
|
||||||
{file = "itsdangerous-2.0.1.tar.gz", hash = "sha256:9e724d68fc22902a1435351f84c3fb8623f303fffcc566a4cb952df8c572cff0"},
|
|
||||||
]
|
|
||||||
jinja2 = [
|
|
||||||
{file = "Jinja2-3.0.1-py3-none-any.whl", hash = "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4"},
|
|
||||||
{file = "Jinja2-3.0.1.tar.gz", hash = "sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4"},
|
|
||||||
]
|
|
||||||
markupsafe = [
|
|
||||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"},
|
|
||||||
{file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"},
|
|
||||||
{file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"},
|
|
||||||
]
|
|
||||||
more-itertools = [
|
|
||||||
{file = "more-itertools-8.8.0.tar.gz", hash = "sha256:83f0308e05477c68f56ea3a888172c78ed5d5b3c282addb67508e7ba6c8f813a"},
|
|
||||||
{file = "more_itertools-8.8.0-py3-none-any.whl", hash = "sha256:2cf89ec599962f2ddc4d568a05defc40e0a587fbc10d5989713638864c36be4d"},
|
|
||||||
]
|
|
||||||
mypy = [
|
|
||||||
{file = "mypy-0.812-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:a26f8ec704e5a7423c8824d425086705e381b4f1dfdef6e3a1edab7ba174ec49"},
|
|
||||||
{file = "mypy-0.812-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:28fb5479c494b1bab244620685e2eb3c3f988d71fd5d64cc753195e8ed53df7c"},
|
|
||||||
{file = "mypy-0.812-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:9743c91088d396c1a5a3c9978354b61b0382b4e3c440ce83cf77994a43e8c521"},
|
|
||||||
{file = "mypy-0.812-cp35-cp35m-win_amd64.whl", hash = "sha256:d7da2e1d5f558c37d6e8c1246f1aec1e7349e4913d8fb3cb289a35de573fe2eb"},
|
|
||||||
{file = "mypy-0.812-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4eec37370483331d13514c3f55f446fc5248d6373e7029a29ecb7b7494851e7a"},
|
|
||||||
{file = "mypy-0.812-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d65cc1df038ef55a99e617431f0553cd77763869eebdf9042403e16089fe746c"},
|
|
||||||
{file = "mypy-0.812-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:61a3d5b97955422964be6b3baf05ff2ce7f26f52c85dd88db11d5e03e146a3a6"},
|
|
||||||
{file = "mypy-0.812-cp36-cp36m-win_amd64.whl", hash = "sha256:25adde9b862f8f9aac9d2d11971f226bd4c8fbaa89fb76bdadb267ef22d10064"},
|
|
||||||
{file = "mypy-0.812-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:552a815579aa1e995f39fd05dde6cd378e191b063f031f2acfe73ce9fb7f9e56"},
|
|
||||||
{file = "mypy-0.812-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:499c798053cdebcaa916eef8cd733e5584b5909f789de856b482cd7d069bdad8"},
|
|
||||||
{file = "mypy-0.812-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:5873888fff1c7cf5b71efbe80e0e73153fe9212fafdf8e44adfe4c20ec9f82d7"},
|
|
||||||
{file = "mypy-0.812-cp37-cp37m-win_amd64.whl", hash = "sha256:9f94aac67a2045ec719ffe6111df543bac7874cee01f41928f6969756e030564"},
|
|
||||||
{file = "mypy-0.812-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d23e0ea196702d918b60c8288561e722bf437d82cb7ef2edcd98cfa38905d506"},
|
|
||||||
{file = "mypy-0.812-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:674e822aa665b9fd75130c6c5f5ed9564a38c6cea6a6432ce47eafb68ee578c5"},
|
|
||||||
{file = "mypy-0.812-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:abf7e0c3cf117c44d9285cc6128856106183938c68fd4944763003decdcfeb66"},
|
|
||||||
{file = "mypy-0.812-cp38-cp38-win_amd64.whl", hash = "sha256:0d0a87c0e7e3a9becdfbe936c981d32e5ee0ccda3e0f07e1ef2c3d1a817cf73e"},
|
|
||||||
{file = "mypy-0.812-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7ce3175801d0ae5fdfa79b4f0cfed08807af4d075b402b7e294e6aa72af9aa2a"},
|
|
||||||
{file = "mypy-0.812-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:b09669bcda124e83708f34a94606e01b614fa71931d356c1f1a5297ba11f110a"},
|
|
||||||
{file = "mypy-0.812-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:33f159443db0829d16f0a8d83d94df3109bb6dd801975fe86bacb9bf71628e97"},
|
|
||||||
{file = "mypy-0.812-cp39-cp39-win_amd64.whl", hash = "sha256:3f2aca7f68580dc2508289c729bd49ee929a436208d2b2b6aab15745a70a57df"},
|
|
||||||
{file = "mypy-0.812-py3-none-any.whl", hash = "sha256:2f9b3407c58347a452fc0736861593e105139b905cca7d097e413453a1d650b4"},
|
|
||||||
{file = "mypy-0.812.tar.gz", hash = "sha256:cd07039aa5df222037005b08fbbfd69b3ab0b0bd7a07d7906de75ae52c4e3119"},
|
|
||||||
]
|
|
||||||
mypy-extensions = [
|
|
||||||
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
|
|
||||||
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
|
|
||||||
]
|
|
||||||
packaging = [
|
|
||||||
{file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"},
|
|
||||||
{file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"},
|
|
||||||
]
|
|
||||||
pathspec = [
|
|
||||||
{file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"},
|
|
||||||
{file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"},
|
|
||||||
]
|
|
||||||
pluggy = [
|
|
||||||
{file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"},
|
|
||||||
{file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
|
|
||||||
]
|
|
||||||
py = [
|
|
||||||
{file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"},
|
|
||||||
{file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"},
|
|
||||||
]
|
|
||||||
pyparsing = [
|
|
||||||
{file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"},
|
|
||||||
{file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
|
|
||||||
]
|
|
||||||
pytest = [
|
|
||||||
{file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"},
|
|
||||||
{file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"},
|
|
||||||
]
|
|
||||||
regex = [
|
|
||||||
{file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"},
|
|
||||||
{file = "regex-2021.4.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711"},
|
|
||||||
{file = "regex-2021.4.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11"},
|
|
||||||
{file = "regex-2021.4.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968"},
|
|
||||||
{file = "regex-2021.4.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0"},
|
|
||||||
{file = "regex-2021.4.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4"},
|
|
||||||
{file = "regex-2021.4.4-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a"},
|
|
||||||
{file = "regex-2021.4.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7"},
|
|
||||||
{file = "regex-2021.4.4-cp36-cp36m-win32.whl", hash = "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29"},
|
|
||||||
{file = "regex-2021.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79"},
|
|
||||||
{file = "regex-2021.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8"},
|
|
||||||
{file = "regex-2021.4.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31"},
|
|
||||||
{file = "regex-2021.4.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a"},
|
|
||||||
{file = "regex-2021.4.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5"},
|
|
||||||
{file = "regex-2021.4.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82"},
|
|
||||||
{file = "regex-2021.4.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765"},
|
|
||||||
{file = "regex-2021.4.4-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e"},
|
|
||||||
{file = "regex-2021.4.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439"},
|
|
||||||
{file = "regex-2021.4.4-cp37-cp37m-win32.whl", hash = "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d"},
|
|
||||||
{file = "regex-2021.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3"},
|
|
||||||
{file = "regex-2021.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500"},
|
|
||||||
{file = "regex-2021.4.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14"},
|
|
||||||
{file = "regex-2021.4.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480"},
|
|
||||||
{file = "regex-2021.4.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc"},
|
|
||||||
{file = "regex-2021.4.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093"},
|
|
||||||
{file = "regex-2021.4.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10"},
|
|
||||||
{file = "regex-2021.4.4-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f"},
|
|
||||||
{file = "regex-2021.4.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87"},
|
|
||||||
{file = "regex-2021.4.4-cp38-cp38-win32.whl", hash = "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac"},
|
|
||||||
{file = "regex-2021.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2"},
|
|
||||||
{file = "regex-2021.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17"},
|
|
||||||
{file = "regex-2021.4.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605"},
|
|
||||||
{file = "regex-2021.4.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9"},
|
|
||||||
{file = "regex-2021.4.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7"},
|
|
||||||
{file = "regex-2021.4.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8"},
|
|
||||||
{file = "regex-2021.4.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed"},
|
|
||||||
{file = "regex-2021.4.4-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c"},
|
|
||||||
{file = "regex-2021.4.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"},
|
|
||||||
{file = "regex-2021.4.4-cp39-cp39-win32.whl", hash = "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6"},
|
|
||||||
{file = "regex-2021.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07"},
|
|
||||||
{file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"},
|
|
||||||
]
|
|
||||||
soupsieve = [
|
|
||||||
{file = "soupsieve-2.2.1-py3-none-any.whl", hash = "sha256:c2c1c2d44f158cdbddab7824a9af8c4f83c76b1e23e049479aa432feb6c4c23b"},
|
|
||||||
{file = "soupsieve-2.2.1.tar.gz", hash = "sha256:052774848f448cf19c7e959adf5566904d525f33a3f8b6ba6f6f8f26ec7de0cc"},
|
|
||||||
]
|
|
||||||
sqlalchemy = [
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:ddbce8fe4d0190db21db602e38aaf4c158c540b49f1ef7475323ec682a9fbf2d"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:942ca49b7ec7449d2473a6587825c55ad99534ddfc4eee249dd42be3cc1aa8c9"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp27-cp27m-win32.whl", hash = "sha256:9c0945c79cbe507b49524e31a4bb8700060bbccb60bb553df6432e176baff3d5"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp27-cp27m-win_amd64.whl", hash = "sha256:6fd1b745ade2020a1a7bf1e22536d8afe86287882c81ca5d860bdf231d5854e9"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0fb3f73e5009f5a4c9b24469939d3d57cc3ad8099a09c0cfefc47fe45ab7ffbe"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:64eab458619ef759f16f0f82242813d3289e829f8557fbc7c212ca4eadf96472"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:311051c06f905774427b4a92dcb3924d6ee563dea3a88176da02fdfc572d0d1d"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a34a7fd3353ee61a1dca72fc0c3e38d4e56bdc2c343e712f60a8c70acd4ef5bf"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ace9ab2af9d7d7b0e2ff2178809941c56ab8921e38128278192a73a8a1c08a2"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp36-cp36m-win32.whl", hash = "sha256:96d3d4a7ead376d738775a1fa9786dc17a31975ec664cea284e53735c79a5686"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp36-cp36m-win_amd64.whl", hash = "sha256:20f4bf1459548a74aade997cb045015e4d72f0fde1789b09b3bb380be28f6511"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:8cba69545246d16c6d2a12ce45865947cbdd814bacddf2e532fdd4512e70728c"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57ba8a96b6d058c7dcf44de8ac0955b7a787f7177a0221dd4b8016e0191268f5"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8f1e7f4de05c15d6b46af12f3cf0c2552f2940d201a49926703249a62402d851"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c92d9ebf4b38c22c0c9e4f203a80e101910a50dc555b4578816932015b97d7f"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp37-cp37m-win32.whl", hash = "sha256:c6efc7477551ba9ce632d5c3b448b7de0277c86005eec190a1068fcc7115fd0e"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp37-cp37m-win_amd64.whl", hash = "sha256:e2761b925fda550debfd5a8bc3cef9debc9a23c6a280429c4ec3a07c35c6b4b3"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:58d4f79d119010fdced6e7fd7e4b9f2230dbf55a8235d7c58b1c8207ef74791b"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cefd44faca7c57534503261f6fab49bd47eb9c2945ee0bab09faaa8cb047c24f"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9133635edcec1e7fbfc16eba5dc2b5b3b11818d25b7a57cfcbfa8d3b3e9594fd"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3cf5f543d048a7c8da500133068c5c90c97a2c4bf0c027928a85028a519f33d"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp38-cp38-win32.whl", hash = "sha256:d04160462f874eaa4d88721a0d5ecca8ebf433616801efe779f252ef87b0e216"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp38-cp38-win_amd64.whl", hash = "sha256:45b0f773e195d8d51e2fd67cb5b5fb32f5a1f5e7f0752016207091bed108909a"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:93ba458b3c279581288a10a55df2aa6ac3509882228fcbad9d9d88069f899337"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6317701c06a829b066c794545512bb70b1a10a74574cfa5658a0aaf49f31aa93"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:95a9fd0a11f89a80d8815418eccba034f3fec8ea1f04c41b6b8decc5c95852e9"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9014fd1d8aebcb4eb6bc69a382dd149200e1d5924412b1d08b4443f6c1ce526f"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp39-cp39-win32.whl", hash = "sha256:fa05a77662c23226c9ec031638fd90ae767009e05cd092b948740f09d10645f0"},
|
|
||||||
{file = "SQLAlchemy-1.4.19-cp39-cp39-win_amd64.whl", hash = "sha256:d7b21a4b62921cf6dca97e8f9dea1fbe2432aebbb09895a2bd4f527105af41a4"},
|
|
||||||
{file = "SQLAlchemy-1.4.19.tar.gz", hash = "sha256:89a5a13dcf33b7e47c7a9404a297c836965a247c7f076a0fe0910cae2bee5ce2"},
|
|
||||||
]
|
|
||||||
toml = [
|
|
||||||
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
|
|
||||||
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
|
|
||||||
]
|
|
||||||
typed-ast = [
|
|
||||||
{file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"},
|
|
||||||
{file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"},
|
|
||||||
{file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"},
|
|
||||||
{file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"},
|
|
||||||
{file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"},
|
|
||||||
{file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"},
|
|
||||||
{file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"},
|
|
||||||
{file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"},
|
|
||||||
{file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"},
|
|
||||||
{file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"},
|
|
||||||
{file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"},
|
|
||||||
{file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"},
|
|
||||||
{file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"},
|
|
||||||
{file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"},
|
|
||||||
{file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"},
|
|
||||||
{file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"},
|
|
||||||
{file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"},
|
|
||||||
{file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"},
|
|
||||||
{file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"},
|
|
||||||
{file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"},
|
|
||||||
{file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"},
|
|
||||||
{file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"},
|
|
||||||
{file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"},
|
|
||||||
{file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"},
|
|
||||||
{file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"},
|
|
||||||
{file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"},
|
|
||||||
{file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"},
|
|
||||||
{file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"},
|
|
||||||
{file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"},
|
|
||||||
{file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"},
|
|
||||||
]
|
|
||||||
typing-extensions = [
|
|
||||||
{file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"},
|
|
||||||
{file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"},
|
|
||||||
{file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"},
|
|
||||||
]
|
|
||||||
wcwidth = [
|
|
||||||
{file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"},
|
|
||||||
{file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"},
|
|
||||||
]
|
|
||||||
werkzeug = [
|
|
||||||
{file = "Werkzeug-2.0.1-py3-none-any.whl", hash = "sha256:6c1ec500dcdba0baa27600f6a22f6333d8b662d22027ff9f6202e3367413caa8"},
|
|
||||||
{file = "Werkzeug-2.0.1.tar.gz", hash = "sha256:1de1db30d010ff1af14a009224ec49ab2329ad2cde454c8a708130642d579c42"},
|
|
||||||
]
|
|
||||||
wtforms = [
|
|
||||||
{file = "WTForms-2.3.3-py2.py3-none-any.whl", hash = "sha256:7b504fc724d0d1d4d5d5c114e778ec88c37ea53144683e084215eed5155ada4c"},
|
|
||||||
{file = "WTForms-2.3.3.tar.gz", hash = "sha256:81195de0ac94fbc8368abbaf9197b88c4f3ffd6c2719b5bf5fc9da744f3d829c"},
|
|
||||||
]
|
|
|
@ -1,36 +0,0 @@
|
||||||
[tool.poetry]
|
|
||||||
name = "amanuensis"
|
|
||||||
version = "0.1.0"
|
|
||||||
description = "An application for playing Lexicon, the encyclopedia RPG"
|
|
||||||
authors = ["Tim Van Baak <tim.vanbaak@gmail.com>"]
|
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
|
||||||
python = "^3.8"
|
|
||||||
Flask = "^2.0.1"
|
|
||||||
Flask-Login = "^0.5.0"
|
|
||||||
Flask-WTF = "^0.15.1"
|
|
||||||
SQLAlchemy = "^1.4.12"
|
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
|
||||||
pytest = "^5.2"
|
|
||||||
black = "^21.5b2"
|
|
||||||
mypy = "^0.812"
|
|
||||||
bs4 = "^0.0.1"
|
|
||||||
|
|
||||||
[tool.poetry.scripts]
|
|
||||||
amanuensis-cli = "amanuensis.cli:main"
|
|
||||||
amanuensis-server = "amanuensis.server:run"
|
|
||||||
|
|
||||||
[tool.black]
|
|
||||||
extend-exclude = "^/amanuensis/lexicon/.*|^/amanuensis/server/[^/]*py|^/amanuensis/server/session/.*|"
|
|
||||||
|
|
||||||
[tool.mypy]
|
|
||||||
ignore_missing_imports = true
|
|
||||||
exclude = "|amanuensis/lexicon/.*|amanuensis/server/.*|amanuensis/server/session/.*|"
|
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
|
||||||
addopts = "--show-capture=log"
|
|
||||||
|
|
||||||
[build-system]
|
|
||||||
requires = ["poetry-core>=1.0.0"]
|
|
||||||
build-backend = "poetry.core.masonry.api"
|
|
|
@ -1,5 +0,0 @@
|
||||||
[pytest]
|
|
||||||
addopts = --show-capture=stdout
|
|
||||||
; pytest should be able to read the pyproject.toml file, but for some reason it
|
|
||||||
; doesn't seem to be working here. This file is a temporary fix until that gets
|
|
||||||
; resolved.
|
|
|
@ -0,0 +1,24 @@
|
||||||
|
astroid==2.3.3
|
||||||
|
Click==7.0
|
||||||
|
entrypoints==0.3
|
||||||
|
flake8==3.7.9
|
||||||
|
Flask==1.1.1
|
||||||
|
Flask-Login==0.4.1
|
||||||
|
Flask-WTF==0.14.2
|
||||||
|
isort==4.3.21
|
||||||
|
itsdangerous==1.1.0
|
||||||
|
Jinja2==2.10.3
|
||||||
|
lazy-object-proxy==1.4.3
|
||||||
|
MarkupSafe==1.1.1
|
||||||
|
mccabe==0.6.1
|
||||||
|
mypy==0.770
|
||||||
|
mypy-extensions==0.4.3
|
||||||
|
pkg-resources==0.0.0
|
||||||
|
pycodestyle==2.5.0
|
||||||
|
pyflakes==2.1.1
|
||||||
|
six==1.14.0
|
||||||
|
typed-ast==1.4.1
|
||||||
|
typing-extensions==3.7.4.2
|
||||||
|
Werkzeug==0.16.0
|
||||||
|
wrapt==1.11.2
|
||||||
|
WTForms==2.2.1
|
|
@ -0,0 +1,22 @@
|
||||||
|
from setuptools import setup, find_packages
|
||||||
|
|
||||||
|
with open("README.md") as f:
|
||||||
|
long_desc = f.read()
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name='amanuensis',
|
||||||
|
version='0.0.1',
|
||||||
|
author='Tim Van Baak',
|
||||||
|
description='An application for playing Lexicon, the encyclopedia RPG',
|
||||||
|
long_description=long_desc,
|
||||||
|
long_description_content_type='text/markdown',
|
||||||
|
url='https://github.com/Jaculabilis/Amanuensis',
|
||||||
|
packages=find_packages(),
|
||||||
|
include_package_data=True,
|
||||||
|
zip_safe=True,
|
||||||
|
install_requires=[
|
||||||
|
'flask',
|
||||||
|
'flask_wtf',
|
||||||
|
'flask_login',
|
||||||
|
],
|
||||||
|
)
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue