Incorporate server and cli into new code #13
@ -3,6 +3,7 @@ Lexicon query interface
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import Sequence
|
||||
|
||||
from sqlalchemy import select, func
|
||||
|
||||
@ -52,3 +53,8 @@ def create(
|
||||
db.session.add(new_lexicon)
|
||||
db.session.commit()
|
||||
return new_lexicon
|
||||
|
||||
|
||||
def get_all_lexicons(db: DbContext) -> Sequence[Lexicon]:
|
||||
"""Get all lexicons."""
|
||||
return db(select(Lexicon)).scalars()
|
||||
|
@ -3,7 +3,7 @@ User query interface
|
||||
"""
|
||||
|
||||
import re
|
||||
import uuid
|
||||
from typing import Sequence
|
||||
|
||||
from sqlalchemy import select, func
|
||||
|
||||
@ -67,3 +67,8 @@ def create(
|
||||
db.session.add(new_user)
|
||||
db.session.commit()
|
||||
return new_user
|
||||
|
||||
|
||||
def get_all_users(db: DbContext) -> Sequence[User]:
|
||||
"""Get all users."""
|
||||
return db(select(User)).scalars()
|
||||
|
@ -1,72 +1,98 @@
|
||||
#
|
||||
# The cli module must not import other parts of the application at the module
|
||||
# level. This is because most other modules depend on the config module. The
|
||||
# config module may depend on __main__'s commandline parsing to locate config
|
||||
# files, and __main__'s commandline parsing requires importing (but not
|
||||
# executing) the functions in the cli module. Thus, cli functions must only
|
||||
# import the config module inside the various command methods, which are only
|
||||
# run after commandline parsing has already occurred.
|
||||
#
|
||||
from argparse import ArgumentParser
|
||||
import logging
|
||||
import logging.config
|
||||
|
||||
import amanuensis.cli.admin
|
||||
import amanuensis.cli.lexicon
|
||||
import amanuensis.cli.user
|
||||
|
||||
|
||||
def server_commands(commands={}):
|
||||
if commands:
|
||||
return commands
|
||||
import amanuensis.cli.server
|
||||
for name, func in vars(amanuensis.cli.server).items():
|
||||
if name.startswith("command_"):
|
||||
name = name[8:].replace("_", "-")
|
||||
commands[name] = func
|
||||
return commands
|
||||
LOGGING_CONFIG = {
|
||||
"version": 1,
|
||||
"formatters": {
|
||||
"fmt_basic": {
|
||||
"validate": True,
|
||||
"format": "%(message)s",
|
||||
},
|
||||
"fmt_detailed": {
|
||||
"validate": True,
|
||||
"format": "%(asctime)s %(levelname)s %(message)s",
|
||||
},
|
||||
},
|
||||
"handlers": {
|
||||
"hnd_stderr": {
|
||||
"class": "logging.StreamHandler",
|
||||
"level": "INFO",
|
||||
"formatter": "fmt_basic",
|
||||
},
|
||||
},
|
||||
"loggers": {
|
||||
__name__: {
|
||||
"level": "DEBUG",
|
||||
"handlers": ["hnd_stderr"],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def lexicon_commands(commands={}):
|
||||
if commands:
|
||||
return commands
|
||||
import amanuensis.cli.lexicon
|
||||
for name, func in vars(amanuensis.cli.lexicon).items():
|
||||
if name.startswith("command_"):
|
||||
name = name[8:].replace("_", "-")
|
||||
commands["lexicon-" + name] = func
|
||||
return commands
|
||||
def add_subcommand(subparsers, module) -> None:
|
||||
"""Add a cli submodule's commands as a subparser."""
|
||||
# Get the command information from the module
|
||||
command_name: str = getattr(module, "COMMAND_NAME")
|
||||
command_help: str = getattr(module, "COMMAND_HELP")
|
||||
if not command_name and command_help:
|
||||
return
|
||||
|
||||
# Add the subparser for the command and set a default action
|
||||
command_parser: ArgumentParser = subparsers.add_parser(
|
||||
command_name, help=command_help
|
||||
)
|
||||
command_parser.set_defaults(func=lambda args: command_parser.print_usage())
|
||||
|
||||
# Add all subcommands in the command module
|
||||
subcommands = command_parser.add_subparsers(metavar="SUBCOMMAND")
|
||||
for name, obj in vars(module).items():
|
||||
if name.startswith("command_"):
|
||||
# Hyphenate subcommand names
|
||||
sc_name: str = name[8:].replace("_", "-")
|
||||
# Only the first line of the subcommand function docstring is used
|
||||
sc_help = ((obj.__doc__ or "").strip() or "\n").splitlines()[0]
|
||||
|
||||
# Add the command and any arguments defined by its decorators
|
||||
subcommand: ArgumentParser = subcommands.add_parser(
|
||||
sc_name, help=sc_help, description=obj.__doc__
|
||||
)
|
||||
subcommand.set_defaults(func=obj)
|
||||
for args, kwargs in obj.__dict__.get("add_argument", []):
|
||||
subcommand.add_argument(*args, **kwargs)
|
||||
|
||||
|
||||
def user_commands(commands={}):
|
||||
if commands:
|
||||
return commands
|
||||
import amanuensis.cli.user
|
||||
for name, func in vars(amanuensis.cli.user).items():
|
||||
if name.startswith("command_"):
|
||||
name = name[8:].replace("_", "-")
|
||||
commands["user-" + name] = func
|
||||
return commands
|
||||
def init_logger(args):
|
||||
"""Set up logging based on verbosity args"""
|
||||
if args.verbose:
|
||||
handler = LOGGING_CONFIG["handlers"]["hnd_stderr"]
|
||||
handler["formatter"] = "fmt_detailed"
|
||||
handler["level"] = "DEBUG"
|
||||
logging.config.dictConfig(LOGGING_CONFIG)
|
||||
|
||||
|
||||
def get_commands():
|
||||
return {**server_commands(), **lexicon_commands(), **user_commands()}
|
||||
def main():
|
||||
"""CLI entry point"""
|
||||
# Set up the top-level parser
|
||||
parser = ArgumentParser()
|
||||
parser.set_defaults(
|
||||
parser=parser,
|
||||
func=lambda args: parser.print_usage(),
|
||||
)
|
||||
parser.add_argument("--verbose", "-v", action="store_true", help="Verbose output")
|
||||
|
||||
# Add commands from cli submodules
|
||||
subparsers = parser.add_subparsers(metavar="COMMAND")
|
||||
add_subcommand(subparsers, amanuensis.cli.admin)
|
||||
add_subcommand(subparsers, amanuensis.cli.lexicon)
|
||||
add_subcommand(subparsers, amanuensis.cli.user)
|
||||
|
||||
def cmd_desc(func):
|
||||
return ((func.__doc__ or "").strip() or '\n').splitlines()[0]
|
||||
|
||||
|
||||
def describe_commands():
|
||||
longest = max(map(len, server_commands().keys()))
|
||||
server_desc = "General commands:\n{}\n".format("\n".join([
|
||||
" {1:<{0}} : {2}".format(longest, name, cmd_desc(func))
|
||||
for name, func in server_commands().items()
|
||||
]))
|
||||
|
||||
longest = max(map(len, lexicon_commands().keys()))
|
||||
lexicon_desc = "Lexicon commands:\n{}\n".format("\n".join([
|
||||
" {1:<{0}} : {2}".format(longest, name, cmd_desc(func))
|
||||
for name, func in lexicon_commands().items()
|
||||
]))
|
||||
|
||||
longest = max(map(len, user_commands().keys()))
|
||||
user_desc = "User commands:\n{}\n".format("\n".join([
|
||||
" {1:<{0}} : {2}".format(longest, name, cmd_desc(func))
|
||||
for name, func in user_commands().items()
|
||||
]))
|
||||
|
||||
return "\n".join([server_desc, lexicon_desc, user_desc])
|
||||
# Parse args and execute the desired action
|
||||
args = parser.parse_args()
|
||||
init_logger(args)
|
||||
args.func(args)
|
||||
|
60
amanuensis/cli/admin.py
Normal file
60
amanuensis/cli/admin.py
Normal file
@ -0,0 +1,60 @@
|
||||
import collections
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
from amanuensis.db import DbContext
|
||||
|
||||
from .helpers import add_argument
|
||||
|
||||
|
||||
COMMAND_NAME = "admin"
|
||||
COMMAND_HELP = "Interact with Amanuensis."
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@add_argument(
|
||||
"path", metavar="DB_PATH", help="Path to where the database should be created"
|
||||
)
|
||||
@add_argument("--force", "-f", action="store_true", help="Overwrite existing database")
|
||||
@add_argument("--verbose", "-v", action="store_true", help="Enable db echo")
|
||||
def command_init_db(args) -> int:
|
||||
"""
|
||||
Initialize the Amanuensis database.
|
||||
"""
|
||||
# Check if force is required
|
||||
if not args.force and os.path.exists(args.path):
|
||||
args.parser.error(f"{args.path} already exists and --force was not specified")
|
||||
|
||||
# Initialize the database
|
||||
db_uri = f"sqlite:///{os.path.abspath(args.path)}"
|
||||
LOG.info(f"Creating database at {db_uri}")
|
||||
db = DbContext(db_uri, debug=args.verbose)
|
||||
db.create_all()
|
||||
|
||||
LOG.info("Done")
|
||||
return 0
|
||||
|
||||
|
||||
@add_argument("path", metavar="CONFIG_PATH", help="Path to the config file")
|
||||
def command_secret_key(args) -> int:
|
||||
"""
|
||||
Generate a Flask secret key.
|
||||
|
||||
The Flask server will not run unless a secret key has
|
||||
been generated.
|
||||
"""
|
||||
# Load the json config
|
||||
with open(args.path, mode="r", encoding="utf8") as f:
|
||||
config = json.load(f, object_pairs_hook=collections.OrderedDict)
|
||||
|
||||
# Set the secret key to a new random string
|
||||
config["SECRET_KEY"] = os.urandom(32).hex()
|
||||
|
||||
# Write the config back out
|
||||
with open(args.path, mode="w", encoding="utf8") as f:
|
||||
json.dump(config, f, indent=2)
|
||||
|
||||
LOG.info("Regenerated Flask secret key")
|
||||
return 0
|
@ -1,209 +1,20 @@
|
||||
# Standard library imports
|
||||
from argparse import ArgumentParser
|
||||
from functools import wraps
|
||||
from json.decoder import JSONDecodeError
|
||||
from logging import getLogger
|
||||
from sys import exc_info
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
|
||||
#
|
||||
# The add_argument and no_argument function wrappers allow the same
|
||||
# function to both configure a command and execute it. This keeps
|
||||
# command argument configuration close to where the command is defined
|
||||
# and reduces the number of things the main parser has to handle.
|
||||
#
|
||||
"""
|
||||
Helpers for cli commands.
|
||||
"""
|
||||
|
||||
|
||||
def add_argument(*args, **kwargs):
|
||||
"""Passes the given args and kwargs to subparser.add_argument"""
|
||||
"""Defines an argument to a cli command."""
|
||||
|
||||
def argument_adder(command):
|
||||
@wraps(command)
|
||||
def augmented_command(cmd_args):
|
||||
# Add this wrapper's command in the parser pass
|
||||
if isinstance(cmd_args, ArgumentParser):
|
||||
cmd_args.add_argument(*args, **kwargs)
|
||||
# If there are more command wrappers, pass through to them
|
||||
if command.__dict__.get('wrapper', False):
|
||||
command(cmd_args)
|
||||
# Parser pass doesn't return a value
|
||||
return None
|
||||
def argument_adder(command_func):
|
||||
"""Decorator function for storing parser args on the function."""
|
||||
|
||||
# Pass through transparently in the execute pass
|
||||
return command(cmd_args)
|
||||
# Store the kw/args in the function dictionary
|
||||
add_args = command_func.__dict__.get("add_argument", [])
|
||||
add_args.append((args, kwargs))
|
||||
command_func.__dict__["add_argument"] = add_args
|
||||
|
||||
# Mark the command as wrapped so control passes through
|
||||
augmented_command.__dict__['wrapper'] = True
|
||||
return augmented_command
|
||||
# Return the same function
|
||||
return command_func
|
||||
|
||||
return argument_adder
|
||||
|
||||
|
||||
def no_argument(command):
|
||||
"""Noops for subparsers"""
|
||||
@wraps(command)
|
||||
def augmented_command(cmd_args):
|
||||
# Noop in the parser pass
|
||||
if isinstance(cmd_args, ArgumentParser):
|
||||
return None
|
||||
# Pass through in the execute pass
|
||||
return command(cmd_args)
|
||||
|
||||
return augmented_command
|
||||
|
||||
|
||||
#
|
||||
# Many commands require specifying a lexicon or user to operate on, so
|
||||
# the requires_lexicon and requires_user wrappers replace @add_argument
|
||||
# as well as automatically create the model for the object from the
|
||||
# provided identifier.
|
||||
#
|
||||
|
||||
|
||||
LEXICON_ARGS = ['--lexicon']
|
||||
LEXICON_KWARGS = {
|
||||
'metavar': 'LEXICON',
|
||||
'dest': 'lexicon',
|
||||
'help': 'Specify a user to operate on'}
|
||||
|
||||
|
||||
def requires_lexicon(command):
|
||||
@wraps(command)
|
||||
def augmented_command(cmd_args):
|
||||
# Add lexicon argument in parser pass
|
||||
if isinstance(cmd_args, ArgumentParser):
|
||||
cmd_args.add_argument(*LEXICON_ARGS, **LEXICON_KWARGS)
|
||||
# If there are more command wrappers, pass through to them
|
||||
if command.__dict__.get('wrapper', False):
|
||||
command(cmd_args)
|
||||
# Parser pass doesn't return a value
|
||||
return None
|
||||
|
||||
# Verify lexicon argument in execute pass
|
||||
val = getattr(cmd_args, 'lexicon', None)
|
||||
if not val:
|
||||
logger.error("Missing --lexicon argument")
|
||||
return -1
|
||||
try:
|
||||
model_factory = cmd_args.model_factory
|
||||
cmd_args.lexicon = model_factory.lexicon(val)
|
||||
except Exception:
|
||||
ex_type, value, tb = exc_info()
|
||||
logger.error(
|
||||
f'Loading lexicon "{val}" failed with '
|
||||
f'{ex_type.__name__}: {value}')
|
||||
return -1
|
||||
return command(cmd_args)
|
||||
|
||||
augmented_command.__dict__['wrapper'] = True
|
||||
return augmented_command
|
||||
|
||||
|
||||
USER_ARGS = ['--user']
|
||||
USER_KWARGS = {
|
||||
'metavar': 'USER',
|
||||
'dest': 'user',
|
||||
'help': 'Specify a user to operate on'}
|
||||
|
||||
|
||||
def requires_user(command):
|
||||
@wraps(command)
|
||||
def augmented_command(cmd_args):
|
||||
# Add user argument in parser pass
|
||||
if isinstance(cmd_args, ArgumentParser):
|
||||
cmd_args.add_argument(*USER_ARGS, **USER_KWARGS)
|
||||
# If there are more command wrappers, pass through to them
|
||||
if command.__dict__.get('wrapper', False):
|
||||
command(cmd_args)
|
||||
# Parser pass doesn't return a value
|
||||
return None
|
||||
|
||||
# Verify user argument in execute pass
|
||||
val = getattr(cmd_args, "user", None)
|
||||
if not val:
|
||||
logger.error("Missing --user argument")
|
||||
return -1
|
||||
try:
|
||||
model_factory = cmd_args.model_factory
|
||||
cmd_args.user = model_factory.user(val)
|
||||
except Exception:
|
||||
ex_type, value, tb = exc_info()
|
||||
logger.error(
|
||||
f'Loading user "{val}" failed with '
|
||||
f'{ex_type.__name__}: {value}')
|
||||
return -1
|
||||
return command(cmd_args)
|
||||
|
||||
augmented_command.__dict__['wrapper'] = True
|
||||
return augmented_command
|
||||
|
||||
|
||||
# Wrapper for aliasing commands
|
||||
def alias(cmd_alias):
|
||||
"""Adds an alias to the function dictionary"""
|
||||
def aliaser(command):
|
||||
aliases = command.__dict__.get('aliases', [])
|
||||
aliases.append(cmd_alias)
|
||||
command.__dict__['aliases'] = aliases
|
||||
return command
|
||||
return aliaser
|
||||
|
||||
|
||||
# Helpers for common command tasks
|
||||
|
||||
CONFIG_GET_ROOT_VALUE = object()
|
||||
|
||||
|
||||
def config_get(cfg, pathspec):
|
||||
"""
|
||||
Performs config --get for a given config
|
||||
|
||||
cfg is from a `with json_ro` context
|
||||
path is the full pathspec, unsplit
|
||||
"""
|
||||
import json
|
||||
|
||||
if pathspec is CONFIG_GET_ROOT_VALUE:
|
||||
path = []
|
||||
else:
|
||||
path = pathspec.split(".")
|
||||
for spec in path:
|
||||
if spec not in cfg:
|
||||
logger.error("Path not found: {}".format(pathspec))
|
||||
return -1
|
||||
cfg = cfg.get(spec)
|
||||
print(json.dumps(cfg, indent=2))
|
||||
return 0
|
||||
|
||||
|
||||
def config_set(obj_id, cfg, set_tuple):
|
||||
"""
|
||||
Performs config --set for a given config
|
||||
|
||||
config is from a "with json_rw" context
|
||||
set_tuple is a tuple of the pathspec and the value
|
||||
"""
|
||||
import json
|
||||
pathspec, value = set_tuple
|
||||
if not pathspec:
|
||||
logger.error("Path must be non-empty")
|
||||
path = pathspec.split('.')
|
||||
try:
|
||||
value = json.loads(value)
|
||||
except JSONDecodeError:
|
||||
pass # Leave value as string
|
||||
for spec in path[:-1]:
|
||||
if spec not in cfg:
|
||||
logger.error("Path not found")
|
||||
return -1
|
||||
cfg = cfg.get(spec)
|
||||
key = path[-1]
|
||||
if key not in cfg:
|
||||
logger.error("Path not found")
|
||||
return -1
|
||||
old_value = cfg[key]
|
||||
cfg[key] = value
|
||||
logger.info("{}.{}: {} -> {}".format(obj_id, pathspec, old_value, value))
|
||||
return 0
|
||||
return argument_adder
|
||||
|
@ -1,324 +1,30 @@
|
||||
# Standard library imports
|
||||
import logging
|
||||
|
||||
# Module imports
|
||||
from amanuensis.config import RootConfigDirectoryContext
|
||||
from amanuensis.models import LexiconModel, UserModel
|
||||
|
||||
from .helpers import (
|
||||
add_argument, no_argument, requires_lexicon, requires_user, alias,
|
||||
config_get, config_set, CONFIG_GET_ROOT_VALUE)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
#
|
||||
# CRUD commands
|
||||
#
|
||||
from .helpers import add_argument
|
||||
|
||||
|
||||
COMMAND_NAME = "lexicon"
|
||||
COMMAND_HELP = "Interact with lexicons."
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@alias('lc')
|
||||
@add_argument("--name", required=True, help="The name of the new lexicon")
|
||||
@requires_user
|
||||
@add_argument("--prompt", help="The lexicon's prompt")
|
||||
def command_create(args):
|
||||
"""
|
||||
Create a lexicon
|
||||
|
||||
The specified user will be the editor. A newly created created lexicon is
|
||||
not open for joining and requires additional configuration before it is
|
||||
playable. The editor should ensure that all settings are as desired before
|
||||
opening the lexicon for player joins.
|
||||
"""
|
||||
# Module imports
|
||||
from amanuensis.lexicon import valid_name, create_lexicon
|
||||
|
||||
root: RootConfigDirectoryContext = args.root
|
||||
|
||||
# Verify arguments
|
||||
if not valid_name(args.name):
|
||||
logger.error(f'Lexicon name contains illegal characters: "{args.name}"')
|
||||
return -1
|
||||
with root.lexicon.read_index() as index:
|
||||
if args.name in index.keys():
|
||||
logger.error(f'A lexicon with name "{args.name}" already exists')
|
||||
return -1
|
||||
|
||||
# Perform command
|
||||
create_lexicon(root, args.name, args.user)
|
||||
|
||||
# Output already logged by create_lexicon
|
||||
return 0
|
||||
"""
|
||||
Create a lexicon.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
@alias('ld')
|
||||
@requires_lexicon
|
||||
@add_argument("--purge", action="store_true", help="Delete the lexicon's data")
|
||||
def command_delete(args):
|
||||
"""
|
||||
Delete a lexicon and optionally its data
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
# # Module imports
|
||||
# from amanuensis.config import logger
|
||||
# from amanuensis.lexicon.manage import delete_lexicon
|
||||
|
||||
# # Perform command
|
||||
# delete_lexicon(args.lexicon, args.purge)
|
||||
|
||||
# # Output
|
||||
# logger.info('Deleted lexicon "{}"'.format(args.lexicon.name))
|
||||
# return 0
|
||||
"""
|
||||
Delete a lexicon.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
@alias('ll')
|
||||
@no_argument
|
||||
def command_list(args):
|
||||
"""
|
||||
List all lexicons and their statuses
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
# # Module imports
|
||||
# from amanuensis.lexicon.manage import get_all_lexicons
|
||||
|
||||
# # Execute command
|
||||
# lexicons = get_all_lexicons()
|
||||
|
||||
# # Output
|
||||
# statuses = []
|
||||
# for lex in lexicons:
|
||||
# statuses.append("{0.lid} {0.name} ({1})".format(lex, lex.status()))
|
||||
# for s in statuses:
|
||||
# print(s)
|
||||
# return 0
|
||||
|
||||
|
||||
@alias('ln')
|
||||
@requires_lexicon
|
||||
@add_argument("--get",
|
||||
metavar="PATHSPEC",
|
||||
dest="get",
|
||||
nargs="?",
|
||||
const=CONFIG_GET_ROOT_VALUE,
|
||||
help="Get the value of a config key")
|
||||
@add_argument("--set",
|
||||
metavar=("PATHSPEC", "VALUE"),
|
||||
dest="set",
|
||||
nargs=2,
|
||||
help="Set the value of a config key")
|
||||
def command_config(args):
|
||||
"""
|
||||
Interact with a lexicon's config
|
||||
"""
|
||||
lexicon: LexiconModel = args.lexicon
|
||||
|
||||
# Verify arguments
|
||||
if args.get and args.set:
|
||||
logger.error("Specify one of --get and --set")
|
||||
return -1
|
||||
|
||||
# Execute command
|
||||
if args.get:
|
||||
config_get(lexicon.cfg, args.get)
|
||||
|
||||
if args.set:
|
||||
with lexicon.ctx.edit_config() as cfg:
|
||||
config_set(lexicon.lid, cfg, args.set)
|
||||
|
||||
# config_* functions handle output
|
||||
return 0
|
||||
|
||||
#
|
||||
# Player/character commands
|
||||
#
|
||||
|
||||
|
||||
@alias('lpa')
|
||||
@requires_lexicon
|
||||
@requires_user
|
||||
def command_player_add(args):
|
||||
"""
|
||||
Add a player to a lexicon
|
||||
"""
|
||||
lexicon: LexiconModel = args.lexicon
|
||||
user: UserModel = args.user
|
||||
|
||||
# Module imports
|
||||
from amanuensis.lexicon import add_player_to_lexicon
|
||||
|
||||
# Verify arguments
|
||||
if user.uid in lexicon.cfg.join.joined:
|
||||
logger.error(f'"{user.cfg.username}" is already a player '
|
||||
f'in "{lexicon.cfg.name}"')
|
||||
return -1
|
||||
|
||||
# Perform command
|
||||
add_player_to_lexicon(user, lexicon)
|
||||
|
||||
# Output
|
||||
logger.info(f'Added user "{user.cfg.username}" to '
|
||||
f'lexicon "{lexicon.cfg.name}"')
|
||||
return 0
|
||||
|
||||
|
||||
@alias('lpr')
|
||||
@requires_lexicon
|
||||
@requires_user
|
||||
def command_player_remove(args):
|
||||
"""
|
||||
Remove a player from a lexicon
|
||||
|
||||
Removing a player dissociates them from any characters
|
||||
they control but does not delete any character data.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
# # Module imports
|
||||
# from amanuensis.lexicon.manage import remove_player
|
||||
|
||||
# # Verify arguments
|
||||
# if not args.user.in_lexicon(args.lexicon):
|
||||
# logger.error('"{0.username}" is not a player in lexicon "{1.name}"'
|
||||
# ''.format(args.user, args.lexicon))
|
||||
# return -1
|
||||
# if args.user.id == args.lexicon.editor:
|
||||
# logger.error("Can't remove the editor of a lexicon")
|
||||
# return -1
|
||||
|
||||
# # Perform command
|
||||
# remove_player(args.lexicon, args.user)
|
||||
|
||||
# # Output
|
||||
# logger.info('Removed "{0.username}" from lexicon "{1.name}"'.format(
|
||||
# args.user, args.lexicon))
|
||||
# return 0
|
||||
|
||||
|
||||
@alias('lpl')
|
||||
@requires_lexicon
|
||||
def command_player_list(args):
|
||||
"""
|
||||
List all players in a lexicon
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
# import json
|
||||
# # Module imports
|
||||
# from amanuensis.user import UserModel
|
||||
|
||||
# # Perform command
|
||||
# players = list(map(
|
||||
# lambda uid: UserModel.by(uid=uid).username,
|
||||
# args.lexicon.join.joined))
|
||||
|
||||
# # Output
|
||||
# print(json.dumps(players, indent=2))
|
||||
# return 0
|
||||
|
||||
|
||||
@alias('lcc')
|
||||
@requires_lexicon
|
||||
@requires_user
|
||||
@add_argument("--charname", required=True, help="The character's name")
|
||||
def command_char_create(args):
|
||||
"""
|
||||
Create a character for a lexicon
|
||||
|
||||
The specified player will be set as the character's player.
|
||||
"""
|
||||
lexicon: LexiconModel = args.lexicon
|
||||
user: UserModel = args.user
|
||||
|
||||
# Module imports
|
||||
from amanuensis.lexicon import create_character_in_lexicon
|
||||
|
||||
# Verify arguments
|
||||
if user.uid not in lexicon.cfg.join.joined:
|
||||
logger.error('"{0.username}" is not a player in lexicon "{1.name}"'
|
||||
''.format(user.cfg, lexicon.cfg))
|
||||
return -1
|
||||
|
||||
# Perform command
|
||||
create_character_in_lexicon(user, lexicon, args.charname)
|
||||
|
||||
# Output
|
||||
logger.info(f'Created character "{args.charname}" for "{user.cfg.username}"'
|
||||
f' in "{lexicon.cfg.name}"')
|
||||
return 0
|
||||
|
||||
|
||||
@alias('lcd')
|
||||
@requires_lexicon
|
||||
@add_argument("--charname", required=True, help="The character's name")
|
||||
def command_char_delete(args):
|
||||
"""
|
||||
Delete a character from a lexicon
|
||||
|
||||
Deleting a character dissociates them from any content
|
||||
they have contributed rather than deleting it.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
# # Module imports
|
||||
# from amanuensis.lexicon import LexiconModel
|
||||
# from amanuensis.lexicon.manage import delete_character
|
||||
|
||||
# # Verify arguments
|
||||
# lex = LexiconModel.by(name=args.lexicon)
|
||||
# if lex is None:
|
||||
# logger.error("Could not find lexicon '{}'".format(args.lexicon))
|
||||
# return -1
|
||||
|
||||
# # Internal call
|
||||
# delete_character(lex, args.charname)
|
||||
# return 0
|
||||
|
||||
|
||||
@alias('lcl')
|
||||
@requires_lexicon
|
||||
def command_char_list(args):
|
||||
"""
|
||||
List all characters in a lexicon
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
# import json
|
||||
# # Module imports
|
||||
# from amanuensis.lexicon import LexiconModel
|
||||
|
||||
# # Verify arguments
|
||||
# lex = LexiconModel.by(name=args.lexicon)
|
||||
# if lex is None:
|
||||
# logger.error("Could not find lexicon '{}'".format(args.lexicon))
|
||||
# return -1
|
||||
|
||||
# # Internal call
|
||||
# print(json.dumps(lex.character, indent=2))
|
||||
# return 0
|
||||
|
||||
#
|
||||
# Procedural commands
|
||||
#
|
||||
|
||||
|
||||
@alias('lpt')
|
||||
@requires_lexicon
|
||||
@add_argument("--as-deadline",
|
||||
action="store_true",
|
||||
help="Notifies players of the publish result")
|
||||
@add_argument("--force",
|
||||
action="store_true",
|
||||
help="Publish all approved articles, regardless of other checks")
|
||||
def command_publish_turn(args):
|
||||
"""
|
||||
Publishes the current turn of a lexicon
|
||||
|
||||
The --as-deadline flag is intended to be used only by the scheduled publish
|
||||
attempts controlled by the publish.deadlines setting.
|
||||
|
||||
The --force flag bypasses the publish.quorum and publish.block_on_ready
|
||||
settings.
|
||||
"""
|
||||
# Module imports
|
||||
from amanuensis.lexicon import attempt_publish
|
||||
|
||||
# Internal call
|
||||
result = attempt_publish(args.lexicon)
|
||||
|
||||
if not result:
|
||||
logger.error('Publish failed, check lexicon log')
|
||||
"""
|
||||
List all lexicons and their statuses.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
@ -1,120 +0,0 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
from amanuensis.config import RootConfigDirectoryContext
|
||||
|
||||
from .helpers import (
|
||||
add_argument,
|
||||
no_argument,
|
||||
alias,
|
||||
config_get,
|
||||
config_set,
|
||||
CONFIG_GET_ROOT_VALUE)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@alias('i')
|
||||
@add_argument("--refresh",
|
||||
action="store_true",
|
||||
help="Refresh an existing config directory")
|
||||
def command_init(args):
|
||||
"""
|
||||
Initialize a config directory at --config-dir
|
||||
|
||||
A clean config directory will contain a config.json, a
|
||||
lexicon config directory, and a user config directory.
|
||||
|
||||
Refreshing an existing directory will add keys to the global config that
|
||||
are present in the default configs. Users and lexicons that are missing
|
||||
from the indexes will be deleted, and stale index entries will be removed.
|
||||
"""
|
||||
# Module imports
|
||||
from amanuensis.config.init import create_config_dir
|
||||
|
||||
# Verify arguments
|
||||
if args.refresh and not os.path.isdir(args.config_dir):
|
||||
print("Error: couldn't find directory '{}'".format(args.config_dir))
|
||||
|
||||
# Internal call
|
||||
create_config_dir(args.config_dir, args.refresh)
|
||||
logger.info(f'Initialized config dir at {args.config_dir}')
|
||||
return 0
|
||||
|
||||
|
||||
@alias('gs')
|
||||
@no_argument
|
||||
def command_generate_secret(args):
|
||||
"""
|
||||
Generate a Flask secret key
|
||||
|
||||
The Flask server will not run unless a secret key has
|
||||
been generated.
|
||||
"""
|
||||
root: RootConfigDirectoryContext = args.root
|
||||
secret_key: bytes = os.urandom(32)
|
||||
with root.edit_config() as cfg:
|
||||
cfg.secret_key = secret_key.hex()
|
||||
logger.info("Regenerated Flask secret key")
|
||||
return 0
|
||||
|
||||
|
||||
@alias('r')
|
||||
@add_argument("-a", "--address", default="127.0.0.1")
|
||||
@add_argument("-p", "--port", default="5000")
|
||||
@add_argument("--debug", action="store_true")
|
||||
def command_run(args):
|
||||
"""
|
||||
Run the default Flask server
|
||||
|
||||
The default Flask server is not secure, and should
|
||||
only be used for development.
|
||||
"""
|
||||
from amanuensis.server import get_app
|
||||
|
||||
root: RootConfigDirectoryContext = args.root
|
||||
|
||||
with root.read_config() as cfg:
|
||||
if cfg.secret_key is None:
|
||||
logger.error("Can't run server without a secret_key. "
|
||||
"Run generate-secet first.")
|
||||
return -1
|
||||
|
||||
get_app(root).run(host=args.address, port=args.port, debug=args.debug)
|
||||
return 0
|
||||
|
||||
|
||||
@alias('n')
|
||||
@add_argument("--get",
|
||||
metavar="PATHSPEC",
|
||||
dest="get",
|
||||
nargs="?",
|
||||
const=CONFIG_GET_ROOT_VALUE,
|
||||
help="Get the value of a config key")
|
||||
@add_argument("--set",
|
||||
metavar=("PATHSPEC", "VALUE"),
|
||||
dest="set",
|
||||
nargs=2,
|
||||
help="Set the value of a config key")
|
||||
def command_config(args):
|
||||
"""
|
||||
Interact with the global config
|
||||
|
||||
PATHSPEC is a path into the config object formatted as
|
||||
a dot-separated sequence of keys.
|
||||
"""
|
||||
root: RootConfigDirectoryContext = args.root
|
||||
|
||||
if args.get and args.set:
|
||||
logger.error("Specify one of --get and --set")
|
||||
return -1
|
||||
|
||||
if args.get:
|
||||
with root.read_config() as cfg:
|
||||
config_get(cfg, args.get)
|
||||
|
||||
if args.set:
|
||||
with root.edit_config() as cfg:
|
||||
config_set("config", cfg, args.set)
|
||||
|
||||
return 0
|
@ -1,158 +1,37 @@
|
||||
# Standard library imports
|
||||
import getpass
|
||||
import logging
|
||||
# import shutil
|
||||
|
||||
# Module imports
|
||||
from amanuensis.models import UserModel
|
||||
|
||||
from .helpers import (
|
||||
add_argument,
|
||||
no_argument,
|
||||
requires_user,
|
||||
alias,
|
||||
config_get,
|
||||
config_set,
|
||||
CONFIG_GET_ROOT_VALUE)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
from .helpers import add_argument
|
||||
|
||||
|
||||
COMMAND_NAME = "user"
|
||||
COMMAND_HELP = "Interact with users."
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@alias('uc')
|
||||
@add_argument("--username", required=True, help="Name of user to create")
|
||||
@add_argument("--email", help="User's email")
|
||||
@add_argument("--displayname", help="User's publicly displayed name")
|
||||
def command_create(args):
|
||||
"""
|
||||
Create a user
|
||||
"""
|
||||
# Module imports
|
||||
from amanuensis.user import (
|
||||
valid_username, valid_email, create_user)
|
||||
|
||||
# Verify arguments
|
||||
if not valid_username(args.username):
|
||||
logger.error("Invalid username: usernames may only contain alphanumer"
|
||||
"ic characters, dashes, and underscores")
|
||||
return -1
|
||||
if not args.displayname:
|
||||
args.displayname = args.username
|
||||
if args.email and not valid_email(args.email):
|
||||
logger.error("Invalid email")
|
||||
return -1
|
||||
try:
|
||||
existing_user = args.model_factory.user(args.username)
|
||||
if existing_user is not None:
|
||||
logger.error("Invalid username: username is already taken")
|
||||
return -1
|
||||
except Exception:
|
||||
pass # User doesn't already exist, good to go
|
||||
|
||||
# Perform command
|
||||
new_user, tmp_pw = create_user(
|
||||
args.root,
|
||||
args.model_factory,
|
||||
args.username,
|
||||
args.displayname,
|
||||
args.email)
|
||||
|
||||
# Output
|
||||
print(tmp_pw)
|
||||
return 0
|
||||
"""
|
||||
Create a user.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
@alias('ud')
|
||||
@requires_user
|
||||
def command_delete(args):
|
||||
"""
|
||||
Delete a user
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
# # Module imports
|
||||
# from amanuensis.config import logger, prepend, json_rw
|
||||
|
||||
# # Perform command
|
||||
# user_path = prepend('user', args.user.id)
|
||||
# shutil.rmtree(user_path)
|
||||
# with json_rw('user', 'index.json') as index:
|
||||
# del index[args.user.username]
|
||||
|
||||
# # TODO resolve user id references in all games
|
||||
|
||||
# # Output
|
||||
# logger.info("Deleted user {0.username} ({0.id})".format(args.user))
|
||||
# return 0
|
||||
"""
|
||||
Delete a user.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
@alias('ul')
|
||||
@no_argument
|
||||
def command_list(args):
|
||||
"""List all users"""
|
||||
raise NotImplementedError()
|
||||
# # Module imports
|
||||
# from amanuensis.config import prepend, json_ro
|
||||
# from amanuensis.user import UserModel
|
||||
|
||||
# # Perform command
|
||||
# users = []
|
||||
# with json_ro('user', 'index.json') as index:
|
||||
# for username, uid in index.items():
|
||||
# users.append(UserModel.by(uid=uid))
|
||||
|
||||
# # Output
|
||||
# users.sort(key=lambda u: u.username)
|
||||
# for user in users:
|
||||
# print("{0.id} {0.displayname} ({0.username})".format(user))
|
||||
# return 0
|
||||
"""
|
||||
List all users.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
@alias('un')
|
||||
@requires_user
|
||||
@add_argument(
|
||||
"--get", metavar="PATHSPEC", dest="get",
|
||||
nargs="?", const=CONFIG_GET_ROOT_VALUE, help="Get the value of a config key")
|
||||
@add_argument(
|
||||
"--set", metavar=("PATHSPEC", "VALUE"), dest="set",
|
||||
nargs=2, help="Set the value of a config key")
|
||||
def command_config(args):
|
||||
"""
|
||||
Interact with a user's config
|
||||
"""
|
||||
user: UserModel = args.user
|
||||
|
||||
# Verify arguments
|
||||
if args.get and args.set:
|
||||
logger.error("Specify one of --get and --set")
|
||||
return -1
|
||||
|
||||
# Perform command
|
||||
if args.get:
|
||||
config_get(user.cfg, args.get)
|
||||
|
||||
if args.set:
|
||||
with user.ctx.edit_config() as cfg:
|
||||
config_set(user.uid, cfg, args.set)
|
||||
|
||||
# Output
|
||||
return 0
|
||||
|
||||
|
||||
@alias('up')
|
||||
@requires_user
|
||||
@add_argument("--password", help="The password to set. Used for scripting; "
|
||||
"not recommended for general use")
|
||||
def command_passwd(args):
|
||||
"""
|
||||
Set a user's password
|
||||
"""
|
||||
user: UserModel = args.user
|
||||
|
||||
# Verify arguments
|
||||
password: str = args.password or getpass.getpass("Password: ")
|
||||
|
||||
# Perform command
|
||||
user.set_password(password)
|
||||
|
||||
# Output
|
||||
logger.info('Updated password for {}'.format(user.cfg.username))
|
||||
return 0
|
||||
"""
|
||||
Set a user's password.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
46
amanuensis/config.py
Normal file
46
amanuensis/config.py
Normal file
@ -0,0 +1,46 @@
|
||||
from argparse import ArgumentParser
|
||||
from typing import Optional
|
||||
import os
|
||||
|
||||
|
||||
class AmanuensisConfig:
|
||||
"""Base config type. Defines config keys for subclasses to override."""
|
||||
|
||||
# If CONFIG_FILE is defined, the config file it points to may override
|
||||
# config values defined on the config object itself.
|
||||
CONFIG_FILE: Optional[str] = None
|
||||
STATIC_ROOT: Optional[str] = "../resources"
|
||||
SECRET_KEY: Optional[str] = "secret"
|
||||
DATABASE_URI: Optional[str] = "sqlite:///:memory:"
|
||||
TESTING: bool = False
|
||||
|
||||
|
||||
class EnvironmentConfig(AmanuensisConfig):
|
||||
"""Loads config values from environment variables."""
|
||||
|
||||
CONFIG_FILE = os.environ.get("AMANUENSIS_CONFIG_FILE", AmanuensisConfig.CONFIG_FILE)
|
||||
STATIC_ROOT = os.environ.get("AMANUENSIS_STATIC_ROOT", AmanuensisConfig.STATIC_ROOT)
|
||||
SECRET_KEY = os.environ.get("AMANUENSIS_SECRET_KEY", AmanuensisConfig.SECRET_KEY)
|
||||
DATABASE_URI = os.environ.get(
|
||||
"AMANUENSIS_DATABASE_URI", AmanuensisConfig.DATABASE_URI
|
||||
)
|
||||
TESTING = os.environ.get("AMANUENSIS_TESTING", "").lower() in ("true", "1")
|
||||
|
||||
|
||||
class CommandLineConfig(AmanuensisConfig):
|
||||
"""Loads config values from command line arguments."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument("--config-file", default=AmanuensisConfig.CONFIG_FILE)
|
||||
parser.add_argument("--static-root", default=AmanuensisConfig.STATIC_ROOT)
|
||||
parser.add_argument("--secret-key", default=AmanuensisConfig.SECRET_KEY)
|
||||
parser.add_argument("--database-uri", default=AmanuensisConfig.DATABASE_URI)
|
||||
parser.add_argument("--debug", action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
self.CONFIG_FILE = args.config_file
|
||||
self.STATIC_ROOT = args.static_root
|
||||
self.SECRET_KEY = args.secret_key
|
||||
self.DATABASE_URI = args.database_uri
|
||||
self.TESTING = args.debug
|
@ -1,23 +0,0 @@
|
||||
# Module imports
|
||||
from .dict import AttrOrderedDict, ReadOnlyOrderedDict
|
||||
from .directory import (
|
||||
RootConfigDirectoryContext,
|
||||
UserConfigDirectoryContext,
|
||||
LexiconConfigDirectoryContext,
|
||||
is_guid)
|
||||
|
||||
# Environment variable name constants
|
||||
ENV_SECRET_KEY = "AMANUENSIS_SECRET_KEY"
|
||||
ENV_CONFIG_DIR = "AMANUENSIS_CONFIG_DIR"
|
||||
ENV_LOG_FILE = "AMANUENSIS_LOG_FILE"
|
||||
ENV_LOG_FILE_SIZE = "AMANUENSIS_LOG_FILE_SIZE"
|
||||
ENV_LOG_FILE_NUM = "AMANUENSIS_LOG_FILE_NUM"
|
||||
|
||||
__all__ = [
|
||||
AttrOrderedDict.__name__,
|
||||
ReadOnlyOrderedDict.__name__,
|
||||
RootConfigDirectoryContext.__name__,
|
||||
UserConfigDirectoryContext.__name__,
|
||||
LexiconConfigDirectoryContext.__name__,
|
||||
is_guid.__name__,
|
||||
]
|
@ -1,82 +0,0 @@
|
||||
"""
|
||||
`with` context managers for mediating config file access.
|
||||
"""
|
||||
# Standard library imports
|
||||
import fcntl
|
||||
import json
|
||||
|
||||
# Application imports
|
||||
from .dict import AttrOrderedDict, ReadOnlyOrderedDict
|
||||
|
||||
|
||||
class open_lock():
|
||||
"""A context manager that opens a file with the specified file lock"""
|
||||
def __init__(self, path, mode, lock_type):
|
||||
self.fd = open(path, mode, encoding='utf8')
|
||||
fcntl.lockf(self.fd, lock_type)
|
||||
|
||||
def __enter__(self):
|
||||
return self.fd
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
fcntl.lockf(self.fd, fcntl.LOCK_UN)
|
||||
self.fd.close()
|
||||
|
||||
|
||||
class open_sh(open_lock):
|
||||
"""A context manager that opens a file with a shared lock"""
|
||||
def __init__(self, path, mode):
|
||||
super().__init__(path, mode, fcntl.LOCK_SH)
|
||||
|
||||
|
||||
class open_ex(open_lock):
|
||||
"""A context manager that opens a file with an exclusive lock"""
|
||||
def __init__(self, path, mode):
|
||||
super().__init__(path, mode, fcntl.LOCK_EX)
|
||||
|
||||
|
||||
class json_ro(open_sh):
|
||||
"""
|
||||
A context manager that opens a file in a shared, read-only mode.
|
||||
The contents of the file are read as JSON and returned as a read-
|
||||
only OrderedDict.
|
||||
"""
|
||||
def __init__(self, path):
|
||||
super().__init__(path, 'r')
|
||||
self.config = None
|
||||
|
||||
def __enter__(self) -> ReadOnlyOrderedDict:
|
||||
self.config = json.load(self.fd, object_pairs_hook=ReadOnlyOrderedDict)
|
||||
return self.config
|
||||
|
||||
|
||||
class json_rw(open_ex):
|
||||
"""
|
||||
A context manager that opens a file with an exclusive lock. The
|
||||
file mode defaults to r+, which requires that the file exist. The
|
||||
file mode can be set to w+ to create a new file by setting the new
|
||||
kwarg in the ctor. The contents of the file are read as JSON and
|
||||
returned in an AttrOrderedDict. Any changes to the context dict
|
||||
will be written out to the file when the context manager exits,
|
||||
unless an exception is raised before exiting.
|
||||
"""
|
||||
def __init__(self, path, new=False):
|
||||
mode = 'w+' if new else 'r+'
|
||||
super().__init__(path, mode)
|
||||
self.config = None
|
||||
self.new = new
|
||||
|
||||
def __enter__(self) -> AttrOrderedDict:
|
||||
if not self.new:
|
||||
self.config = json.load(self.fd, object_pairs_hook=AttrOrderedDict)
|
||||
else:
|
||||
self.config = AttrOrderedDict()
|
||||
return self.config
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
# Only write the new value out if there wasn't an exception
|
||||
if not exc_type:
|
||||
self.fd.seek(0)
|
||||
json.dump(self.config, self.fd, allow_nan=False, indent='\t')
|
||||
self.fd.truncate()
|
||||
super().__exit__(exc_type, exc_value, traceback)
|
@ -1,52 +0,0 @@
|
||||
"""
|
||||
Dictionary classes used to represent JSON config files in memory.
|
||||
"""
|
||||
from collections import OrderedDict
|
||||
|
||||
from amanuensis.errors import ReadOnlyError
|
||||
|
||||
|
||||
class AttrOrderedDict(OrderedDict):
|
||||
"""
|
||||
An OrderedDict with attribute access to known keys and explicit
|
||||
creation of new keys.
|
||||
"""
|
||||
def __getattr__(self, key):
|
||||
if key not in self:
|
||||
raise AttributeError(key)
|
||||
return self[key]
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
if key not in self:
|
||||
raise AttributeError(key)
|
||||
self[key] = value
|
||||
|
||||
def new(self, key, value):
|
||||
"""Setter for adding new keys"""
|
||||
if key in self:
|
||||
raise KeyError("Key already exists: '{}'".format(key))
|
||||
self[key] = value
|
||||
|
||||
|
||||
class ReadOnlyOrderedDict(OrderedDict):
|
||||
"""
|
||||
An OrderedDict that cannot be modified with attribute access to
|
||||
known keys.
|
||||
"""
|
||||
def __readonly__(self, *args, **kwargs):
|
||||
raise ReadOnlyError("Cannot modify a ReadOnlyOrderedDict")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ReadOnlyOrderedDict, self).__init__(*args, **kwargs)
|
||||
self.__setitem__ = self.__readonly__
|
||||
self.__delitem__ = self.__readonly__
|
||||
self.pop = self.__readonly__
|
||||
self.popitem = self.__readonly__
|
||||
self.clear = self.__readonly__
|
||||
self.update = self.__readonly__
|
||||
self.setdefault = self.__readonly__
|
||||
|
||||
def __getattr__(self, key):
|
||||
if key not in self:
|
||||
raise AttributeError(key)
|
||||
return self[key]
|
@ -1,160 +0,0 @@
|
||||
"""
|
||||
Config directory abstractions that encapsulate path munging and context
|
||||
manager usage.
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
from typing import Iterable
|
||||
|
||||
from amanuensis.errors import MissingConfigError, ConfigAlreadyExistsError
|
||||
|
||||
from .context import json_ro, json_rw
|
||||
|
||||
|
||||
def is_guid(s: str) -> bool:
|
||||
return bool(re.match(r'[0-9a-z]{32}', s.lower()))
|
||||
|
||||
|
||||
class ConfigDirectoryContext():
|
||||
"""
|
||||
Base class for CRUD operations on config files in a config
|
||||
directory.
|
||||
"""
|
||||
def __init__(self, path: str):
|
||||
self.path: str = path
|
||||
if not os.path.isdir(self.path):
|
||||
raise MissingConfigError(path)
|
||||
|
||||
def new(self, filename) -> json_rw:
|
||||
"""
|
||||
Creates a JSON file that doesn't already exist.
|
||||
"""
|
||||
if not filename.endswith('.json'):
|
||||
filename = f'{filename}.json'
|
||||
fpath: str = os.path.join(self.path, filename)
|
||||
if os.path.isfile(fpath):
|
||||
raise ConfigAlreadyExistsError(fpath)
|
||||
return json_rw(fpath, new=True)
|
||||
|
||||
def read(self, filename) -> json_ro:
|
||||
"""
|
||||
Loads a JSON file in read-only mode.
|
||||
"""
|
||||
if not filename.endswith('.json'):
|
||||
filename = f'{filename}.json'
|
||||
fpath: str = os.path.join(self.path, filename)
|
||||
if not os.path.isfile(fpath):
|
||||
raise MissingConfigError(fpath)
|
||||
return json_ro(fpath)
|
||||
|
||||
def edit(self, filename, create=False) -> json_rw:
|
||||
"""
|
||||
Loads a JSON file in write mode.
|
||||
"""
|
||||
if not filename.endswith('.json'):
|
||||
filename = f'{filename}.json'
|
||||
fpath: str = os.path.join(self.path, filename)
|
||||
if not create and not os.path.isfile(fpath):
|
||||
raise MissingConfigError(fpath)
|
||||
return json_rw(fpath, new=create)
|
||||
|
||||
def delete(self, filename) -> None:
|
||||
"""Deletes a file."""
|
||||
if not filename.endswith('.json'):
|
||||
filename = f'{filename}.json'
|
||||
fpath: str = os.path.join(self.path, filename)
|
||||
if not os.path.isfile(fpath):
|
||||
raise MissingConfigError(fpath)
|
||||
os.remove(fpath)
|
||||
|
||||
def ls(self) -> Iterable[str]:
|
||||
"""Lists all files in this directory."""
|
||||
filenames: Iterable[str] = os.listdir(self.path)
|
||||
return filenames
|
||||
|
||||
|
||||
class ConfigFileConfigDirectoryContext(ConfigDirectoryContext):
|
||||
"""
|
||||
Config directory with a `config.json`.
|
||||
"""
|
||||
def __init__(self, path: str):
|
||||
super().__init__(path)
|
||||
config_path = os.path.join(self.path, 'config.json')
|
||||
if not os.path.isfile(config_path):
|
||||
raise MissingConfigError(config_path)
|
||||
|
||||
def edit_config(self) -> json_rw:
|
||||
"""rw context manager for this object's config file."""
|
||||
return self.edit('config')
|
||||
|
||||
def read_config(self) -> json_ro:
|
||||
"""ro context manager for this object's config file."""
|
||||
return self.read('config')
|
||||
|
||||
|
||||
class IndexDirectoryContext(ConfigDirectoryContext):
|
||||
"""
|
||||
A lookup layer for getting config directory contexts for lexicon
|
||||
or user directories.
|
||||
"""
|
||||
def __init__(self, path: str, cdc_type: type):
|
||||
super().__init__(path)
|
||||
index_path = os.path.join(self.path, 'index.json')
|
||||
if not os.path.isfile(index_path):
|
||||
raise MissingConfigError(index_path)
|
||||
self.cdc_type = cdc_type
|
||||
|
||||
def __getitem__(self, key: str) -> ConfigFileConfigDirectoryContext:
|
||||
"""
|
||||
Returns a context to the given item. key is treated as the
|
||||
item's id if it's a guid string, otherwise it's treated as
|
||||
the item's indexed name and run through the index first.
|
||||
"""
|
||||
if not is_guid(key):
|
||||
with self.read_index() as index:
|
||||
iid = index.get(key)
|
||||
if not iid:
|
||||
raise MissingConfigError(key)
|
||||
key = iid
|
||||
return self.cdc_type(os.path.join(self.path, key))
|
||||
|
||||
def edit_index(self) -> json_rw:
|
||||
return self.edit('index')
|
||||
|
||||
def read_index(self) -> json_ro:
|
||||
return self.read('index')
|
||||
|
||||
|
||||
class RootConfigDirectoryContext(ConfigFileConfigDirectoryContext):
|
||||
"""
|
||||
Context for the config directory with links to the lexicon and
|
||||
user contexts.
|
||||
"""
|
||||
def __init__(self, path):
|
||||
super().__init__(path)
|
||||
self.lexicon: IndexDirectoryContext = IndexDirectoryContext(
|
||||
os.path.join(self.path, 'lexicon'),
|
||||
LexiconConfigDirectoryContext)
|
||||
self.user: IndexDirectoryContext = IndexDirectoryContext(
|
||||
os.path.join(self.path, 'user'),
|
||||
UserConfigDirectoryContext)
|
||||
|
||||
|
||||
class LexiconConfigDirectoryContext(ConfigFileConfigDirectoryContext):
|
||||
"""
|
||||
A config context for a lexicon's config directory.
|
||||
"""
|
||||
def __init__(self, path):
|
||||
super().__init__(path)
|
||||
self.draft: ConfigDirectoryContext = ConfigDirectoryContext(
|
||||
os.path.join(self.path, 'draft'))
|
||||
self.src: ConfigDirectoryContext = ConfigDirectoryContext(
|
||||
os.path.join(self.path, 'src'))
|
||||
self.article: ConfigDirectoryContext = ConfigDirectoryContext(
|
||||
os.path.join(self.path, 'article'))
|
||||
|
||||
|
||||
class UserConfigDirectoryContext(ConfigFileConfigDirectoryContext):
|
||||
"""
|
||||
A config context for a user's config directory.
|
||||
"""
|
@ -1,96 +0,0 @@
|
||||
# Standard library imports
|
||||
from collections import OrderedDict
|
||||
import fcntl
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
|
||||
# Module imports
|
||||
from amanuensis.resources import get_stream
|
||||
|
||||
from .context import json_ro, json_rw
|
||||
|
||||
|
||||
def create_config_dir(config_dir, refresh=False):
|
||||
"""
|
||||
Create or refresh a config directory
|
||||
"""
|
||||
|
||||
def prepend(*path):
|
||||
joined = os.path.join(*path)
|
||||
if not joined.startswith(config_dir):
|
||||
joined = os.path.join(config_dir, joined)
|
||||
return joined
|
||||
|
||||
# Create the directory if it doesn't exist.
|
||||
if not os.path.isdir(config_dir):
|
||||
os.mkdir(config_dir)
|
||||
|
||||
# The directory should be empty if we're not updating an existing one.
|
||||
if len(os.listdir(config_dir)) > 0 and not refresh:
|
||||
print("Directory {} is not empty".format(config_dir))
|
||||
return -1
|
||||
|
||||
# Update or create global config.
|
||||
def_cfg = get_stream("global.json")
|
||||
global_config_path = prepend("config.json")
|
||||
if refresh and os.path.isfile(global_config_path):
|
||||
# We need to write an entirely different ordereddict to the config
|
||||
# file, so we mimic the config.context functionality manually.
|
||||
with open(global_config_path, 'r+', encoding='utf8') as cfg_file:
|
||||
fcntl.lockf(cfg_file, fcntl.LOCK_EX)
|
||||
old_cfg = json.load(cfg_file, object_pairs_hook=OrderedDict)
|
||||
new_cfg = json.load(def_cfg, object_pairs_hook=OrderedDict)
|
||||
merged = {}
|
||||
for key in new_cfg:
|
||||
merged[key] = old_cfg[key] if key in old_cfg else new_cfg[key]
|
||||
if key not in old_cfg:
|
||||
print("Added key '{}' to config".format(key))
|
||||
for key in old_cfg:
|
||||
if key not in new_cfg:
|
||||
print("Config contains unknown key '{}'".format(key))
|
||||
merged[key] = old_cfg[key]
|
||||
cfg_file.seek(0)
|
||||
json.dump(merged, cfg_file, allow_nan=False, indent='\t')
|
||||
cfg_file.truncate()
|
||||
fcntl.lockf(cfg_file, fcntl.LOCK_UN)
|
||||
else:
|
||||
with open(prepend("config.json"), 'wb') as f:
|
||||
f.write(def_cfg.read())
|
||||
|
||||
# Ensure lexicon subdir exists.
|
||||
if not os.path.isdir(prepend("lexicon")):
|
||||
os.mkdir(prepend("lexicon"))
|
||||
if not os.path.isfile(prepend("lexicon", "index.json")):
|
||||
with open(prepend("lexicon", "index.json"), 'w') as f:
|
||||
json.dump({}, f)
|
||||
|
||||
# Ensure user subdir exists.
|
||||
if not os.path.isdir(prepend("user")):
|
||||
os.mkdir(prepend("user"))
|
||||
if not os.path.isfile(prepend('user', 'index.json')):
|
||||
with open(prepend('user', 'index.json'), 'w') as f:
|
||||
json.dump({}, f)
|
||||
|
||||
if refresh:
|
||||
for dir_name in ('lexicon', 'user'):
|
||||
# Clean up unindexed folders
|
||||
with json_ro(prepend(dir_name, 'index.json')) as index:
|
||||
known = list(index.values())
|
||||
entries = os.listdir(prepend(dir_name))
|
||||
for dir_entry in entries:
|
||||
if dir_entry == "index.json":
|
||||
continue
|
||||
if dir_entry in known:
|
||||
continue
|
||||
print("Removing unindexed folder: '{}/{}'"
|
||||
.format(dir_name, dir_entry))
|
||||
shutil.rmtree(prepend(dir_name, dir_entry))
|
||||
|
||||
# Remove orphaned index listings
|
||||
with json_rw(prepend(dir_name, 'index.json')) as index:
|
||||
for name, entry in index.items():
|
||||
if not os.path.isdir(prepend(dir_name, entry)):
|
||||
print("Removing stale {} index entry '{}: {}'"
|
||||
.format(dir_name, name, entry))
|
||||
del index[name]
|
@ -3,8 +3,12 @@ Database connection setup
|
||||
"""
|
||||
from sqlalchemy import create_engine, MetaData, event
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import scoped_session
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.orm import scoped_session, sessionmaker
|
||||
|
||||
try:
|
||||
from greenlet import getcurrent as get_ident
|
||||
except ImportError:
|
||||
from threading import get_ident
|
||||
|
||||
|
||||
# Define naming conventions for generated constraints
|
||||
@ -34,7 +38,9 @@ class DbContext:
|
||||
cursor.close()
|
||||
|
||||
# Create a thread-safe session factory
|
||||
self.session = scoped_session(sessionmaker(bind=self.engine))
|
||||
self.session = scoped_session(
|
||||
sessionmaker(bind=self.engine), scopefunc=get_ident
|
||||
)
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
"""Provides shortcut access to session.execute."""
|
||||
|
@ -13,7 +13,6 @@ from sqlalchemy import (
|
||||
ForeignKey,
|
||||
Integer,
|
||||
String,
|
||||
Table,
|
||||
Text,
|
||||
text,
|
||||
TypeDecorator,
|
||||
@ -234,6 +233,14 @@ class Lexicon(ModelBase):
|
||||
content_rules = relationship("ArticleContentRule", back_populates="lexicon")
|
||||
posts = relationship("Post", back_populates="lexicon")
|
||||
|
||||
#######################
|
||||
# Derived information #
|
||||
#######################
|
||||
|
||||
@property
|
||||
def full_title(self: "Lexicon") -> str:
|
||||
return self.title if self.title else f"Lexicon {self.name}"
|
||||
|
||||
|
||||
class Membership(ModelBase):
|
||||
"""
|
||||
|
@ -1,5 +0,0 @@
|
||||
from .setup import init_logging
|
||||
|
||||
__all__ = [member.__name__ for member in [
|
||||
init_logging
|
||||
]]
|
@ -1,45 +0,0 @@
|
||||
import logging
|
||||
import logging.handlers
|
||||
|
||||
|
||||
basic_formatter = logging.Formatter(
|
||||
fmt='[{levelname}] {message}',
|
||||
style='{')
|
||||
detailed_formatter = logging.Formatter(
|
||||
fmt='[{asctime}] [{levelname}:{filename}:{lineno}] {message}',
|
||||
style='{')
|
||||
basic_console_handler = logging.StreamHandler()
|
||||
basic_console_handler.setLevel(logging.INFO)
|
||||
basic_console_handler.setFormatter(basic_formatter)
|
||||
detailed_console_handler = logging.StreamHandler()
|
||||
detailed_console_handler.setLevel(logging.DEBUG)
|
||||
detailed_console_handler.setFormatter(detailed_formatter)
|
||||
|
||||
|
||||
def get_file_handler(filename: str) -> logging.Handler:
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
filename=filename,
|
||||
maxBytes=1000000,
|
||||
backupCount=10,
|
||||
delay=True,
|
||||
encoding='utf8',
|
||||
)
|
||||
handler.setLevel(logging.DEBUG)
|
||||
handler.setFormatter(detailed_formatter)
|
||||
return handler
|
||||
|
||||
|
||||
def init_logging(verbose: bool, log_filename: str):
|
||||
"""
|
||||
Initializes the Amanuensis logger settings
|
||||
"""
|
||||
logger = logging.getLogger("amanuensis")
|
||||
if log_filename:
|
||||
logger.addHandler(get_file_handler(log_filename))
|
||||
logger.setLevel(logging.DEBUG)
|
||||
elif verbose:
|
||||
logger.addHandler(detailed_console_handler)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
else:
|
||||
logger.addHandler(basic_console_handler)
|
||||
logger.setLevel(logging.INFO)
|
@ -1,46 +1,68 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
from flask import Flask
|
||||
from flask import Flask, g
|
||||
|
||||
from amanuensis.config import RootConfigDirectoryContext, ENV_CONFIG_DIR
|
||||
from amanuensis.models import ModelFactory
|
||||
from .auth import get_login_manager, bp_auth
|
||||
from .helpers import register_custom_filters
|
||||
from .home import bp_home
|
||||
from .lexicon import bp_lexicon
|
||||
from .session import bp_session
|
||||
from amanuensis.config import AmanuensisConfig, CommandLineConfig
|
||||
from amanuensis.db import DbContext
|
||||
import amanuensis.server.home
|
||||
|
||||
|
||||
def get_app(root: RootConfigDirectoryContext) -> Flask:
|
||||
# Flask app init
|
||||
with root.read_config() as cfg:
|
||||
app = Flask(
|
||||
__name__,
|
||||
template_folder='.',
|
||||
static_folder=cfg.static_root
|
||||
)
|
||||
app.secret_key = bytes.fromhex(cfg.secret_key)
|
||||
app.config['root'] = root
|
||||
app.config['model_factory'] = ModelFactory(root)
|
||||
app.jinja_options['trim_blocks'] = True
|
||||
app.jinja_options['lstrip_blocks'] = True
|
||||
register_custom_filters(app)
|
||||
def get_app(
|
||||
config: AmanuensisConfig,
|
||||
db: DbContext = None,
|
||||
) -> Flask:
|
||||
"""Application factory"""
|
||||
# Create the Flask object
|
||||
app = Flask(__name__, template_folder=".", static_folder=config.STATIC_ROOT)
|
||||
|
||||
# Flask-Login init
|
||||
login_manager = get_login_manager(root)
|
||||
login_manager.init_app(app)
|
||||
# Load keys from the config object
|
||||
app.config.from_object(config)
|
||||
|
||||
# Blueprint inits
|
||||
app.register_blueprint(bp_auth)
|
||||
app.register_blueprint(bp_home)
|
||||
app.register_blueprint(bp_lexicon)
|
||||
app.register_blueprint(bp_session)
|
||||
# If a config file is now specified, also load keys from there
|
||||
if config_path := app.config.get("CONFIG_FILE", None):
|
||||
app.config.from_file(os.path.abspath(config_path), json.load)
|
||||
|
||||
return app
|
||||
# Assert that all required config values are now set
|
||||
for config_key in ("SECRET_KEY", "DATABASE_URI"):
|
||||
if not app.config.get(config_key):
|
||||
raise Exception(f"{config_key} must be defined")
|
||||
|
||||
# Create the database context, if one wasn't already given
|
||||
if db is None:
|
||||
db = DbContext(app.config["DATABASE_URI"])
|
||||
|
||||
# Make the database connection available to requests via g
|
||||
def db_setup():
|
||||
g.db = db
|
||||
|
||||
app.before_request(db_setup)
|
||||
|
||||
# Tear down the session on request teardown
|
||||
def db_teardown(response_or_exc):
|
||||
db.session.remove()
|
||||
|
||||
app.teardown_appcontext(db_teardown)
|
||||
|
||||
# Configure jinja options
|
||||
app.jinja_options.update(trim_blocks=True, lstrip_blocks=True)
|
||||
|
||||
# Set up Flask-Login
|
||||
# TODO
|
||||
|
||||
# Register blueprints
|
||||
app.register_blueprint(amanuensis.server.home.bp)
|
||||
|
||||
def test():
|
||||
return "Hello, world!"
|
||||
|
||||
app.route("/")(test)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
def default():
|
||||
cwd = os.getcwd()
|
||||
config_dir = os.environ.get(ENV_CONFIG_DIR, "amanuensis")
|
||||
root = RootConfigDirectoryContext(os.path.join(cwd, config_dir))
|
||||
return get_app(root)
|
||||
def run():
|
||||
"""Run the server, populating the config from the command line."""
|
||||
config = CommandLineConfig()
|
||||
app = get_app(config)
|
||||
app.run(debug=app.testing)
|
||||
|
@ -1,64 +1,64 @@
|
||||
from flask import Blueprint, render_template, redirect, url_for, current_app
|
||||
from flask_login import login_required, current_user
|
||||
from flask import Blueprint, render_template, g
|
||||
|
||||
from amanuensis.config import RootConfigDirectoryContext
|
||||
from amanuensis.lexicon import create_lexicon, load_all_lexicons
|
||||
from amanuensis.models import UserModel, ModelFactory
|
||||
from amanuensis.server.helpers import admin_required
|
||||
# from flask import Blueprint, render_template, redirect, url_for, current_app
|
||||
# from flask_login import login_required, current_user
|
||||
|
||||
from .forms import LexiconCreateForm
|
||||
import amanuensis.backend.user as userq
|
||||
import amanuensis.backend.lexicon as lexiq
|
||||
|
||||
bp_home = Blueprint('home', __name__,
|
||||
url_prefix='/home',
|
||||
template_folder='.')
|
||||
# from amanuensis.config import RootConfigDirectoryContext
|
||||
# from amanuensis.lexicon import create_lexicon, load_all_lexicons
|
||||
# from amanuensis.models import UserModel, ModelFactory
|
||||
# from amanuensis.server.helpers import admin_required
|
||||
|
||||
# from .forms import LexiconCreateForm
|
||||
|
||||
bp = Blueprint("home", __name__, url_prefix="/home", template_folder=".")
|
||||
|
||||
|
||||
@bp_home.route('/', methods=['GET'])
|
||||
def home():
|
||||
root: RootConfigDirectoryContext = current_app.config['root']
|
||||
user: UserModel = current_user
|
||||
user_lexicons = []
|
||||
public_lexicons = []
|
||||
for lexicon in load_all_lexicons(root):
|
||||
if user.uid in lexicon.cfg.join.joined:
|
||||
user_lexicons.append(lexicon)
|
||||
elif lexicon.cfg.join.public:
|
||||
public_lexicons.append(lexicon)
|
||||
return render_template(
|
||||
'home.root.jinja',
|
||||
user_lexicons=user_lexicons,
|
||||
public_lexicons=public_lexicons)
|
||||
# @bp.get("/")
|
||||
# def home():
|
||||
# Show lexicons that are visible to the current user
|
||||
# return "TODO"
|
||||
# user_lexicons = []
|
||||
# public_lexicons = []
|
||||
# for lexicon in load_all_lexicons(root):
|
||||
# if user.uid in lexicon.cfg.join.joined:
|
||||
# user_lexicons.append(lexicon)
|
||||
# elif lexicon.cfg.join.public:
|
||||
# public_lexicons.append(lexicon)
|
||||
# return render_template(
|
||||
# 'home.root.jinja',
|
||||
# user_lexicons=user_lexicons,
|
||||
# public_lexicons=public_lexicons)
|
||||
|
||||
|
||||
@bp_home.route('/admin/', methods=['GET'])
|
||||
@login_required
|
||||
@admin_required
|
||||
@bp.get("/admin/")
|
||||
# @login_required
|
||||
# @admin_required
|
||||
def admin():
|
||||
root: RootConfigDirectoryContext = current_app.config['root']
|
||||
users = []
|
||||
lexicons = list(load_all_lexicons(root))
|
||||
return render_template('home.admin.jinja', users=users, lexicons=lexicons)
|
||||
return render_template("home.admin.jinja", db=g.db, userq=userq, lexiq=lexiq)
|
||||
|
||||
|
||||
@bp_home.route("/admin/create/", methods=['GET', 'POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def admin_create():
|
||||
form = LexiconCreateForm()
|
||||
# @bp_home.route("/admin/create/", methods=['GET', 'POST'])
|
||||
# @login_required
|
||||
# @admin_required
|
||||
# def admin_create():
|
||||
# form = LexiconCreateForm()
|
||||
|
||||
if not form.validate_on_submit():
|
||||
# GET or POST with invalid form data
|
||||
return render_template('home.create.jinja', form=form)
|
||||
# if not form.validate_on_submit():
|
||||
# # GET or POST with invalid form data
|
||||
# return render_template('home.create.jinja', form=form)
|
||||
|
||||
# POST with valid data
|
||||
root: RootConfigDirectoryContext = current_app.config['root']
|
||||
model_factory: ModelFactory = current_app.config['model_factory']
|
||||
lexicon_name = form.lexiconName.data
|
||||
editor_name = form.editorName.data
|
||||
prompt = form.promptText.data
|
||||
# Editor's existence was checked by form validators
|
||||
editor = model_factory.user(editor_name)
|
||||
lexicon = create_lexicon(root, lexicon_name, editor)
|
||||
with lexicon.ctx.edit_config() as cfg:
|
||||
cfg.prompt = prompt
|
||||
return redirect(url_for('session.session', name=lexicon_name))
|
||||
# # POST with valid data
|
||||
# root: RootConfigDirectoryContext = current_app.config['root']
|
||||
# model_factory: ModelFactory = current_app.config['model_factory']
|
||||
# lexicon_name = form.lexiconName.data
|
||||
# editor_name = form.editorName.data
|
||||
# prompt = form.promptText.data
|
||||
# # Editor's existence was checked by form validators
|
||||
# editor = model_factory.user(editor_name)
|
||||
# lexicon = create_lexicon(root, lexicon_name, editor)
|
||||
# with lexicon.ctx.edit_config() as cfg:
|
||||
# cfg.prompt = prompt
|
||||
# return redirect(url_for('session.session', name=lexicon_name))
|
||||
|
@ -3,17 +3,18 @@
|
||||
{% block title %}Admin | Amanuensis{% endblock %}
|
||||
{% block header %}<h2>Amanuensis - Admin Dashboard</h2>{% endblock %}
|
||||
|
||||
{% block sb_home %}<a href="{{ url_for('home.home') }}">Home</a>{% endblock %}
|
||||
{% block sb_create %}<a href="{{ url_for('home.admin_create') }}">Create a lexicon</a>{% endblock %}
|
||||
{# TODO #}
|
||||
{% block sb_home %}<a href="#{#{ url_for('home.home') }#}">Home</a>{% endblock %}
|
||||
{% block sb_create %}<a href="#{#{ url_for('home.admin_create') }#}">Create a lexicon</a>{% endblock %}
|
||||
{% set template_sidebar_rows = [self.sb_home(), self.sb_create()] %}
|
||||
|
||||
{% block main %}
|
||||
<p>Users:</p>
|
||||
{% for user in users %}
|
||||
{% for user in userq.get_all_users(db) %}
|
||||
{{ macros.dashboard_user_item(user) }}
|
||||
{% endfor %}
|
||||
<p>Lexicons:</p>
|
||||
{% for lexicon in lexicons %}
|
||||
{% for lexicon in lexiq.get_all_lexicons(db) %}
|
||||
{{ macros.dashboard_lexicon_item(lexicon) }}
|
||||
{% endfor %}
|
||||
{% endblock %}
|
||||
|
@ -1,45 +1,47 @@
|
||||
{% macro dashboard_lexicon_item(lexicon) %}
|
||||
<div class="dashboard-lexicon-item dashboard-lexicon-{{ lexicon.status }}">
|
||||
<p>
|
||||
<span class="dashboard-lexicon-item-title">
|
||||
<a href="{{ url_for('lexicon.contents', name=lexicon.cfg.name) }}">
|
||||
Lexicon {{ lexicon.cfg.name }}</a>
|
||||
</span>
|
||||
[{{ lexicon.status.capitalize() }}]
|
||||
</p>
|
||||
<p><i>{{ lexicon.cfg.prompt }}</i></p>
|
||||
{% if current_user.is_authenticated %}
|
||||
<p>
|
||||
{%
|
||||
if current_user.uid in lexicon.cfg.join.joined
|
||||
or current_user.cfg.is_admin
|
||||
%}
|
||||
Editor: {{ lexicon.cfg.editor|user_attr('username') }} /
|
||||
Players:
|
||||
{% for uid in lexicon.cfg.join.joined %}
|
||||
{{ uid|user_attr('username') }}{% if not loop.last %}, {% endif %}
|
||||
{% endfor %}
|
||||
({{ lexicon.cfg.join.joined|count }}/{{ lexicon.cfg.join.max_players }})
|
||||
{% else %}
|
||||
Players: {{ lexicon.cfg.join.joined|count }}/{{ lexicon.cfg.join.max_players }}
|
||||
{% if lexicon.cfg.join.public and lexicon.cfg.join.open %}
|
||||
/ <a href="{{ url_for('lexicon.join', name=lexicon.cfg.name) }}">
|
||||
Join game
|
||||
</a>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</p>
|
||||
{% endif %}
|
||||
{% set status = "completed" if lexicon.completed else "ongoing" if lexicon.started else "unstarted" %}
|
||||
<div class="dashboard-lexicon-item dashboard-lexicon-{{ status }}">
|
||||
<p>
|
||||
<span class="dashboard-lexicon-item-title">
|
||||
<a href="#{#{ url_for('lexicon.contents', name=lexicon.cfg.name) }#}">
|
||||
{{ lexicon.full_title }}</a>
|
||||
</span>
|
||||
[{{ lexicon.status.capitalize() }}]
|
||||
</p>
|
||||
<p><i>{{ lexicon.prompt }}</i></p>
|
||||
{# {% if current_user.is_authenticated %} #}
|
||||
<p>
|
||||
{# TODO #}
|
||||
{# {%
|
||||
if current_user.uid in lexicon.cfg.join.joined
|
||||
or current_user.cfg.is_admin
|
||||
%} #}
|
||||
Editor: {#{ lexicon.cfg.editor|user_attr('username') }#} /
|
||||
Players:
|
||||
{# {% for uid in lexicon.cfg.join.joined %} #}
|
||||
{# {{ uid|user_attr('username') }}{% if not loop.last %}, {% endif %} #}
|
||||
{# {% endfor %} #}
|
||||
{# ({{ lexicon.cfg.join.joined|count }}/{{ lexicon.cfg.join.max_players }}) #}
|
||||
{# {% else %} #}
|
||||
{# Players: {{ lexicon.cfg.join.joined|count }}/{{ lexicon.cfg.join.max_players }} #}
|
||||
{# {% if lexicon.cfg.join.public and lexicon.cfg.join.open %} #}
|
||||
{# / <a href="{{ url_for('lexicon.join', name=lexicon.cfg.name) }}"> #}
|
||||
{# Join game #}
|
||||
{# </a> #}
|
||||
{# {% endif %} #}
|
||||
{# {% endif %} #}
|
||||
</p>
|
||||
{# {% endif %} #}
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro dashboard_user_item(user) %}
|
||||
<div class="dashboard-lexicon-item">
|
||||
<p>
|
||||
<b>{{ user.cfg.username }}</b>
|
||||
{% if user.cfg.username != user.cfg.displayname %} / {{ user.cfg.displayname }}{% endif %}
|
||||
({{user.uid}})
|
||||
</p>
|
||||
<p>Last activity: {{ user.cfg.last_activity|asdate }} — Last login: {{ user.cfg.last_login|asdate }}</p>
|
||||
<p>
|
||||
<b>{{ user.username }}</b>
|
||||
{% if user.username != user.display_name %} / {{ user.display_name }}{% endif %}
|
||||
(id #{{user.id}})
|
||||
</p>
|
||||
<p>Last activity: {{ user.last_activity }} — Last login: {{ user.last_login }}</p>
|
||||
</div>
|
||||
{% endmacro %}
|
@ -1,33 +1,34 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8"/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>{% block title %}{% endblock %}</title>
|
||||
<link rel="icon" type="image/png" href="{{ url_for('static', filename='amanuensis.png') }}">
|
||||
<link rel="stylesheet" href="{{ url_for("static", filename="page.css") }}">
|
||||
<meta charset="utf-8"/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>{% block title %}{% endblock %}</title>
|
||||
<link rel="icon" type="image/png" href="{{ url_for('static', filename='amanuensis.png') }}">
|
||||
<link rel="stylesheet" href="{{ url_for('static', filename='page.css') }}">
|
||||
</head>
|
||||
<body>
|
||||
<div id="wrapper">
|
||||
<div id="header">
|
||||
<div id="login-status" {% block login_status_attr %}{% endblock %}>
|
||||
{% if current_user.is_authenticated %}
|
||||
<b>{{ current_user.cfg.username -}}</b>
|
||||
(<a href="{{ url_for('auth.logout') }}">Logout</a>)
|
||||
{% else %}
|
||||
<a href="{{ url_for('auth.login') }}">Login</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% block header %}{% endblock %}
|
||||
</div>
|
||||
{% block sidebar %}{% endblock %}
|
||||
<div id="content" class="{% block content_class %}{% endblock %}">
|
||||
{% if not template_content_blocks %}{% set template_content_blocks = [] %}{% endif %}
|
||||
{% if not content_blocks %}{% set content_blocks = [] %}{% endif %}
|
||||
{% for content_block in template_content_blocks + content_blocks %}<div class="contentblock">
|
||||
{{ content_block|safe }}</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
<div id="wrapper">
|
||||
<div id="header">
|
||||
<div id="login-status" {% block login_status_attr %}{% endblock %}>
|
||||
{# TODO #}
|
||||
{# {% if current_user.is_authenticated %}
|
||||
<b>{{ current_user.cfg.username -}}</b>
|
||||
(<a href="{{ url_for('auth.logout') }}">Logout</a>)
|
||||
{% else %} #}
|
||||
<a href="#{#{ url_for('auth.login') }#}">Login</a>
|
||||
{# {% endif %} #}
|
||||
</div>
|
||||
{% block header %}{% endblock %}
|
||||
</div>
|
||||
{% block sidebar %}{% endblock %}
|
||||
<div id="content" class="{% block content_class %}{% endblock %}">
|
||||
{% if not template_content_blocks %}{% set template_content_blocks = [] %}{% endif %}
|
||||
{% if not content_blocks %}{% set content_blocks = [] %}{% endif %}
|
||||
{% for content_block in template_content_blocks + content_blocks %}<div class="contentblock">
|
||||
{{ content_block|safe }}</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
@ -1,12 +1,12 @@
|
||||
{% extends "page.jinja" %}
|
||||
{% block sidebar %}
|
||||
<div id="sidebar">
|
||||
{% if not template_sidebar_rows %}{% set template_sidebar_rows = [] %}{% endif %}
|
||||
{% if not sidebar_rows %}{% set sidebar_rows = [] %}{% endif %}
|
||||
<table>
|
||||
{% for row in template_sidebar_rows + sidebar_rows %}
|
||||
<tr><td>{{ row|safe }}</td></tr>{% endfor %}
|
||||
</table>
|
||||
{% if not template_sidebar_rows %}{% set template_sidebar_rows = [] %}{% endif %}
|
||||
{% if not sidebar_rows %}{% set sidebar_rows = [] %}{% endif %}
|
||||
<table>
|
||||
{% for row in template_sidebar_rows + sidebar_rows %}
|
||||
<tr><td>{{ row|safe }}</td></tr>{% endfor %}
|
||||
</table>
|
||||
</div>
|
||||
{% endblock %}
|
||||
{% block content_class %}content-2col{% endblock %}
|
2
mypy.ini
2
mypy.ini
@ -1,4 +1,4 @@
|
||||
[mypy]
|
||||
ignore_missing_imports = true
|
||||
exclude = "amanuensis/cli/.*|amanuensis/config/.*|amanuensis/lexicon/.*|amanuensis/log/.*|amanuensis/models/.*|amanuensis/resources/.*|amanuensis/server/.*|amanuensis/user/.*|amanuensis/__main__.py"
|
||||
exclude = "|amanuensis/lexicon/.*|amanuensis/models/.*|amanuensis/resources/.*|amanuensis/server/.*|amanuensis/user/.*|amanuensis/__main__.py|"
|
||||
; mypy stable doesn't support pyproject.toml yet
|
133
poetry.lock
generated
133
poetry.lock
generated
@ -30,7 +30,7 @@ tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>
|
||||
|
||||
[[package]]
|
||||
name = "black"
|
||||
version = "21.5b2"
|
||||
version = "21.6b0"
|
||||
description = "The uncompromising code formatter."
|
||||
category = "dev"
|
||||
optional = false
|
||||
@ -52,37 +52,39 @@ uvloop = ["uvloop (>=0.15.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "click"
|
||||
version = "7.1.2"
|
||||
version = "8.0.1"
|
||||
description = "Composable command line interface toolkit"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.4"
|
||||
description = "Cross-platform colored terminal text."
|
||||
category = "dev"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
|
||||
[[package]]
|
||||
name = "flask"
|
||||
version = "1.1.4"
|
||||
version = "2.0.1"
|
||||
description = "A simple framework for building complex web applications."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
click = ">=5.1,<8.0"
|
||||
itsdangerous = ">=0.24,<2.0"
|
||||
Jinja2 = ">=2.10.1,<3.0"
|
||||
Werkzeug = ">=0.15,<2.0"
|
||||
click = ">=7.1.2"
|
||||
itsdangerous = ">=2.0"
|
||||
Jinja2 = ">=3.0"
|
||||
Werkzeug = ">=2.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["pytest", "coverage", "tox", "sphinx", "pallets-sphinx-themes", "sphinxcontrib-log-cabinet", "sphinx-issues"]
|
||||
docs = ["sphinx", "pallets-sphinx-themes", "sphinxcontrib-log-cabinet", "sphinx-issues"]
|
||||
async = ["asgiref (>=3.2)"]
|
||||
dotenv = ["python-dotenv"]
|
||||
|
||||
[[package]]
|
||||
@ -122,25 +124,25 @@ docs = ["sphinx"]
|
||||
|
||||
[[package]]
|
||||
name = "itsdangerous"
|
||||
version = "1.1.0"
|
||||
description = "Various helpers to pass data to untrusted environments and back."
|
||||
version = "2.0.1"
|
||||
description = "Safely pass data to untrusted environments and back."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "jinja2"
|
||||
version = "2.11.3"
|
||||
version = "3.0.1"
|
||||
description = "A very fast and expressive template engine."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
MarkupSafe = ">=0.23"
|
||||
MarkupSafe = ">=2.0"
|
||||
|
||||
[package.extras]
|
||||
i18n = ["Babel (>=0.8)"]
|
||||
i18n = ["Babel (>=2.7)"]
|
||||
|
||||
[[package]]
|
||||
name = "markupsafe"
|
||||
@ -260,7 +262,7 @@ python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
version = "1.4.17"
|
||||
version = "1.4.18"
|
||||
description = "Database Abstraction Library"
|
||||
category = "main"
|
||||
optional = false
|
||||
@ -323,14 +325,13 @@ python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "werkzeug"
|
||||
version = "1.0.1"
|
||||
version = "2.0.1"
|
||||
description = "The comprehensive WSGI web application library."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.extras]
|
||||
dev = ["pytest", "pytest-timeout", "coverage", "tox", "sphinx", "pallets-sphinx-themes", "sphinx-issues"]
|
||||
watchdog = ["watchdog"]
|
||||
|
||||
[[package]]
|
||||
@ -352,7 +353,7 @@ locale = ["Babel (>=1.3)"]
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.8"
|
||||
content-hash = "8c38b0703447e638ee8181a4e449f0eab57858e171cd0de9d4e9fe07c61d0071"
|
||||
content-hash = "493d96d9f3aa7056057b41877a76b5d4c4bcbd7f0a3c2864e4221024547ded87"
|
||||
|
||||
[metadata.files]
|
||||
appdirs = [
|
||||
@ -368,20 +369,20 @@ attrs = [
|
||||
{file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"},
|
||||
]
|
||||
black = [
|
||||
{file = "black-21.5b2-py3-none-any.whl", hash = "sha256:e5cf21ebdffc7a9b29d73912b6a6a9a4df4ce70220d523c21647da2eae0751ef"},
|
||||
{file = "black-21.5b2.tar.gz", hash = "sha256:1fc0e0a2c8ae7d269dfcf0c60a89afa299664f3e811395d40b1922dff8f854b5"},
|
||||
{file = "black-21.6b0-py3-none-any.whl", hash = "sha256:dfb8c5a069012b2ab1e972e7b908f5fb42b6bbabcba0a788b86dc05067c7d9c7"},
|
||||
{file = "black-21.6b0.tar.gz", hash = "sha256:dc132348a88d103016726fe360cb9ede02cecf99b76e3660ce6c596be132ce04"},
|
||||
]
|
||||
click = [
|
||||
{file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"},
|
||||
{file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"},
|
||||
{file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"},
|
||||
{file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"},
|
||||
]
|
||||
colorama = [
|
||||
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
|
||||
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
|
||||
]
|
||||
flask = [
|
||||
{file = "Flask-1.1.4-py2.py3-none-any.whl", hash = "sha256:c34f04500f2cbbea882b1acb02002ad6fe6b7ffa64a6164577995657f50aed22"},
|
||||
{file = "Flask-1.1.4.tar.gz", hash = "sha256:0fbeb6180d383a9186d0d6ed954e0042ad9f18e0e8de088b2b419d526927d196"},
|
||||
{file = "Flask-2.0.1-py3-none-any.whl", hash = "sha256:a6209ca15eb63fc9385f38e452704113d679511d9574d09b2cf9183ae7d20dc9"},
|
||||
{file = "Flask-2.0.1.tar.gz", hash = "sha256:1c4c257b1892aec1398784c63791cbaa43062f1f7aeb555c4da961b20ee68f55"},
|
||||
]
|
||||
flask-login = [
|
||||
{file = "Flask-Login-0.5.0.tar.gz", hash = "sha256:6d33aef15b5bcead780acc339464aae8a6e28f13c90d8b1cf9de8b549d1c0b4b"},
|
||||
@ -443,12 +444,12 @@ greenlet = [
|
||||
{file = "greenlet-1.1.0.tar.gz", hash = "sha256:c87df8ae3f01ffb4483c796fe1b15232ce2b219f0b18126948616224d3f658ee"},
|
||||
]
|
||||
itsdangerous = [
|
||||
{file = "itsdangerous-1.1.0-py2.py3-none-any.whl", hash = "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749"},
|
||||
{file = "itsdangerous-1.1.0.tar.gz", hash = "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19"},
|
||||
{file = "itsdangerous-2.0.1-py3-none-any.whl", hash = "sha256:5174094b9637652bdb841a3029700391451bd092ba3db90600dea710ba28e97c"},
|
||||
{file = "itsdangerous-2.0.1.tar.gz", hash = "sha256:9e724d68fc22902a1435351f84c3fb8623f303fffcc566a4cb952df8c572cff0"},
|
||||
]
|
||||
jinja2 = [
|
||||
{file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"},
|
||||
{file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"},
|
||||
{file = "Jinja2-3.0.1-py3-none-any.whl", hash = "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4"},
|
||||
{file = "Jinja2-3.0.1.tar.gz", hash = "sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4"},
|
||||
]
|
||||
markupsafe = [
|
||||
{file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"},
|
||||
@ -586,36 +587,36 @@ regex = [
|
||||
{file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"},
|
||||
]
|
||||
sqlalchemy = [
|
||||
{file = "SQLAlchemy-1.4.17-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:c367ed95d41df584f412a9419b5ece85b0d6c2a08a51ae13ae47ef74ff9a9349"},
|
||||
{file = "SQLAlchemy-1.4.17-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fdad4a33140b77df61d456922b7974c1f1bb2c35238f6809f078003a620c4734"},
|
||||
{file = "SQLAlchemy-1.4.17-cp27-cp27m-win32.whl", hash = "sha256:f1c68f7bd4a57ffdb85eab489362828dddf6cd565a4c18eda4c446c1d5d3059d"},
|
||||
{file = "SQLAlchemy-1.4.17-cp27-cp27m-win_amd64.whl", hash = "sha256:ee6e7ca09ff274c55d19a1e15ee6f884fa0230c0d9b8d22a456e249d08dee5bf"},
|
||||
{file = "SQLAlchemy-1.4.17-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5f00a2be7d777119e15ccfb5ba0b2a92e8a193959281089d79821a001095f80"},
|
||||
{file = "SQLAlchemy-1.4.17-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:1dd77acbc19bee9c0ba858ff5e4e5d5c60895495c83b4df9bcdf4ad5e9b74f21"},
|
||||
{file = "SQLAlchemy-1.4.17-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5732858e56d32fa7e02468f4fd2d8f01ddf709e5b93d035c637762890f8ed8b6"},
|
||||
{file = "SQLAlchemy-1.4.17-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:949ac299903d2ed8419086f81847381184e2264f3431a33af4679546dcc87f01"},
|
||||
{file = "SQLAlchemy-1.4.17-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:196fb6bb2733834e506c925d7532f8eabad9d2304deef738a40846e54c31e236"},
|
||||
{file = "SQLAlchemy-1.4.17-cp36-cp36m-win32.whl", hash = "sha256:bde055c019e6e449ebc4ec61abd3e08690abeb028c7ada2a3b95d8e352b7b514"},
|
||||
{file = "SQLAlchemy-1.4.17-cp36-cp36m-win_amd64.whl", hash = "sha256:b0ad951a6e590bbcfbfeadc5748ef5ec8ede505a8119a71b235f7481cc08371c"},
|
||||
{file = "SQLAlchemy-1.4.17-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:82922a320d38d7d6aa3a8130523ec7e8c70fa95f7ca7d0fd6ec114b626e4b10b"},
|
||||
{file = "SQLAlchemy-1.4.17-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e133e2551fa99c75849848a4ac08efb79930561eb629dd7d2dc9b7ee05256e6"},
|
||||
{file = "SQLAlchemy-1.4.17-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7e45043fe11d503e1c3f9dcf5b42f92d122a814237cd9af68a11dae46ecfcae1"},
|
||||
{file = "SQLAlchemy-1.4.17-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:461a4ea803ce0834822f372617a68ac97f9fa1281f2a984624554c651d7c3ae1"},
|
||||
{file = "SQLAlchemy-1.4.17-cp37-cp37m-win32.whl", hash = "sha256:4d93b62e98248e3e1ac1e91c2e6ee1e7316f704be1f734338b350b6951e6c175"},
|
||||
{file = "SQLAlchemy-1.4.17-cp37-cp37m-win_amd64.whl", hash = "sha256:a2d225c8863a76d15468896dc5af36f1e196b403eb9c7e0151e77ffab9e7df57"},
|
||||
{file = "SQLAlchemy-1.4.17-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:b59b2c0a3b1d93027f6b6b8379a50c354483fe1ebe796c6740e157bb2e06d39a"},
|
||||
{file = "SQLAlchemy-1.4.17-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7222f3236c280fab3a2d76f903b493171f0ffc29667538cc388a5d5dd0216a88"},
|
||||
{file = "SQLAlchemy-1.4.17-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4b09191ed22af149c07a880f309b7740f3f782ff13325bae5c6168a6aa57e715"},
|
||||
{file = "SQLAlchemy-1.4.17-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216ff28fe803885ceb5b131dcee6507d28d255808dd5bcffcb3b5fa75be2e102"},
|
||||
{file = "SQLAlchemy-1.4.17-cp38-cp38-win32.whl", hash = "sha256:dde05ae0987e43ec84e64d6722ce66305eda2a5e2b7d6fda004b37aabdfbb909"},
|
||||
{file = "SQLAlchemy-1.4.17-cp38-cp38-win_amd64.whl", hash = "sha256:bc89e37c359dcd4d75b744e5e81af128ba678aa2ecea4be957e80e6e958a1612"},
|
||||
{file = "SQLAlchemy-1.4.17-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:4c5e20666b33b03bf7f14953f0deb93007bf8c1342e985bd7c7cf25f46fac579"},
|
||||
{file = "SQLAlchemy-1.4.17-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f63e1f531a8bf52184e2afb53648511f3f8534decb7575b483a583d3cd8d13ed"},
|
||||
{file = "SQLAlchemy-1.4.17-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7dc3d3285fb682316d580d84e6e0840fdd8ffdc05cb696db74b9dd746c729908"},
|
||||
{file = "SQLAlchemy-1.4.17-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58c02d1771bb0e61bc9ced8f3b36b5714d9ece8fd4bdbe2a44a892574c3bbc3c"},
|
||||
{file = "SQLAlchemy-1.4.17-cp39-cp39-win32.whl", hash = "sha256:6fe1c8dc26bc0005439cb78ebc78772a22cccc773f5a0e67cb3002d791f53f0f"},
|
||||
{file = "SQLAlchemy-1.4.17-cp39-cp39-win_amd64.whl", hash = "sha256:7eb55d5583076c03aaf1510473fad2a61288490809049cb31028af56af7068ee"},
|
||||
{file = "SQLAlchemy-1.4.17.tar.gz", hash = "sha256:651cdb3adcee13624ba22d5ff3e96f91e16a115d2ca489ddc16a8e4c217e8509"},
|
||||
{file = "SQLAlchemy-1.4.18-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:d76abceeb6f7c564fdbc304b1ce17ec59664ca7ed0fe6dbc6fc6a960c91370e3"},
|
||||
{file = "SQLAlchemy-1.4.18-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4cdc91bb3ee5b10e24ec59303131b791f3f82caa4dd8b36064d1918b0f4d0de4"},
|
||||
{file = "SQLAlchemy-1.4.18-cp27-cp27m-win32.whl", hash = "sha256:3690fc0fc671419debdae9b33df1434ac9253155fd76d0f66a01f7b459d56ee6"},
|
||||
{file = "SQLAlchemy-1.4.18-cp27-cp27m-win_amd64.whl", hash = "sha256:5b827d3d1d982b38d2bab551edf9893c4734b5db9b852b28d3bc809ea7e179f6"},
|
||||
{file = "SQLAlchemy-1.4.18-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:495cce8174c670f1d885e2259d710b0120888db2169ea14fc32d1f72e7950642"},
|
||||
{file = "SQLAlchemy-1.4.18-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:60cfe1fb59a34569816907cb25bb256c9490824679c46777377bcc01f6813a81"},
|
||||
{file = "SQLAlchemy-1.4.18-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3357948fa439eb5c7241a8856738605d7ab9d9f276ca5c5cc3220455a5f8e6c"},
|
||||
{file = "SQLAlchemy-1.4.18-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:93394d68f02ecbf8c0a4355b6452793000ce0ee7aef79d2c85b491da25a88af7"},
|
||||
{file = "SQLAlchemy-1.4.18-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56958dd833145f1aa75f8987dfe0cf6f149e93aa31967b7004d4eb9cb579fefc"},
|
||||
{file = "SQLAlchemy-1.4.18-cp36-cp36m-win32.whl", hash = "sha256:664c6cc84a5d2bad2a4a3984d146b6201b850ba0a7125b2fcd29ca06cddac4b1"},
|
||||
{file = "SQLAlchemy-1.4.18-cp36-cp36m-win_amd64.whl", hash = "sha256:77549e5ae996de50ad9f69f863c91daf04842b14233e133335b900b152bffb07"},
|
||||
{file = "SQLAlchemy-1.4.18-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:e2aa39fdf5bff1c325a8648ac1957a0320c66763a3fa5f0f4a02457b2afcf372"},
|
||||
{file = "SQLAlchemy-1.4.18-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffb18eb56546aa66640fef831e5d0fe1a8dfbf11cdf5b00803826a01dbbbf3b1"},
|
||||
{file = "SQLAlchemy-1.4.18-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc474d0c40cef94d9b68980155d686d5ad43a9ca0834a8729052d3585f289d57"},
|
||||
{file = "SQLAlchemy-1.4.18-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d4b2c23d20acf631456e645227cef014e7f84a111118d530cfa1d6053fd05a9"},
|
||||
{file = "SQLAlchemy-1.4.18-cp37-cp37m-win32.whl", hash = "sha256:45bbb935b305e381bcb542bf4d952232282ba76881e3458105e4733ba0976060"},
|
||||
{file = "SQLAlchemy-1.4.18-cp37-cp37m-win_amd64.whl", hash = "sha256:3a6afb7a55374329601c8fcad277f0a47793386255764431c8f6a231a6947ee9"},
|
||||
{file = "SQLAlchemy-1.4.18-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:9a62b06ad450386a2e671d0bcc5cd430690b77a5cd41c54ede4e4bf46d7a4978"},
|
||||
{file = "SQLAlchemy-1.4.18-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70674f2ff315a74061da7af1225770578d23f4f6f74dd2e1964493abd8d804bc"},
|
||||
{file = "SQLAlchemy-1.4.18-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4f375c52fed5f2ecd06be18756f121b3167a1fdc4543d877961fba04b1713214"},
|
||||
{file = "SQLAlchemy-1.4.18-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eba098a4962e1ab0d446c814ae67e30da82c446b382cf718306cc90d4e2ad85f"},
|
||||
{file = "SQLAlchemy-1.4.18-cp38-cp38-win32.whl", hash = "sha256:ee3428f6100ff2b07e7ecec6357d865a4d604c801760094883587ecdbf8a3533"},
|
||||
{file = "SQLAlchemy-1.4.18-cp38-cp38-win_amd64.whl", hash = "sha256:5c62fff70348e3f8e4392540d31f3b8c251dc8eb830173692e5d61896d4309d6"},
|
||||
{file = "SQLAlchemy-1.4.18-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:8924d552decf1a50d57dca4984ebd0778a55ca2cb1c0ef16df8c1fed405ff290"},
|
||||
{file = "SQLAlchemy-1.4.18-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:284b6df04bc30e886998e0fdbd700ef9ffb83bcb484ffc54d4084959240dce91"},
|
||||
{file = "SQLAlchemy-1.4.18-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:146af9e67d0f821b28779d602372e65d019db01532d8f7101e91202d447c14ec"},
|
||||
{file = "SQLAlchemy-1.4.18-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2129d33b54da4d4771868a3639a07f461adc5887dbd9e0a80dbf560272245525"},
|
||||
{file = "SQLAlchemy-1.4.18-cp39-cp39-win32.whl", hash = "sha256:0653d444d52f2b9a0cba1ea5cd0fc64e616ee3838ee86c1863781b2a8670fc0c"},
|
||||
{file = "SQLAlchemy-1.4.18-cp39-cp39-win_amd64.whl", hash = "sha256:c824d14b52000597dfcced0a4e480fd8664b09fed606e746a2c67fe5fbe8dfd9"},
|
||||
{file = "SQLAlchemy-1.4.18.tar.gz", hash = "sha256:d25210f5f1a6b7b6b357d8fa199fc1d5be828c67cc1af517600c02e5b2727e4c"},
|
||||
]
|
||||
toml = [
|
||||
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
|
||||
@ -663,8 +664,8 @@ wcwidth = [
|
||||
{file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"},
|
||||
]
|
||||
werkzeug = [
|
||||
{file = "Werkzeug-1.0.1-py2.py3-none-any.whl", hash = "sha256:2de2a5db0baeae7b2d2664949077c2ac63fbd16d98da0ff71837f7d1dea3fd43"},
|
||||
{file = "Werkzeug-1.0.1.tar.gz", hash = "sha256:6c80b1e5ad3665290ea39320b91e1be1e0d5f60652b964a3070216de83d2e47c"},
|
||||
{file = "Werkzeug-2.0.1-py3-none-any.whl", hash = "sha256:6c1ec500dcdba0baa27600f6a22f6333d8b662d22027ff9f6202e3367413caa8"},
|
||||
{file = "Werkzeug-2.0.1.tar.gz", hash = "sha256:1de1db30d010ff1af14a009224ec49ab2329ad2cde454c8a708130642d579c42"},
|
||||
]
|
||||
wtforms = [
|
||||
{file = "WTForms-2.3.3-py2.py3-none-any.whl", hash = "sha256:7b504fc724d0d1d4d5d5c114e778ec88c37ea53144683e084215eed5155ada4c"},
|
||||
|
@ -6,7 +6,7 @@ authors = ["Tim Van Baak <tim.vanbaak@gmail.com>"]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.8"
|
||||
Flask = "^1.1.2"
|
||||
Flask = "^2.0.1"
|
||||
Flask-Login = "^0.5.0"
|
||||
Flask-WTF = "^0.14.3"
|
||||
SQLAlchemy = "^1.4.12"
|
||||
@ -16,8 +16,12 @@ pytest = "^5.2"
|
||||
black = "^21.5b2"
|
||||
mypy = "^0.812"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
amanuensis-cli = "amanuensis.cli:main"
|
||||
amanuensis-server = "amanuensis.server:run"
|
||||
|
||||
[tool.black]
|
||||
extend-exclude = "^/amanuensis/cli/.*|^/amanuensis/config/.*|^/amanuensis/lexicon/.*|^/amanuensis/log/.*|^/amanuensis/models/.*|^/amanuensis/resources/.*|^/amanuensis/server/.*|^/amanuensis/user/.*|^/amanuensis/__main__.py"
|
||||
extend-exclude = "^/amanuensis/lexicon/.*|^/amanuensis/models/.*|^/amanuensis/resources/.*|^/amanuensis/server/.*|^/amanuensis/user/.*|^/amanuensis/__main__.py"
|
||||
|
||||
[tool.mypy]
|
||||
ignore_missing_imports = true
|
||||
|
@ -8,6 +8,8 @@ import amanuensis.backend.character as charq
|
||||
import amanuensis.backend.lexicon as lexiq
|
||||
import amanuensis.backend.membership as memq
|
||||
import amanuensis.backend.user as userq
|
||||
from amanuensis.config import AmanuensisConfig
|
||||
from amanuensis.server import get_app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -122,3 +124,16 @@ def lexicon_with_editor(make):
|
||||
)
|
||||
assert membership
|
||||
return (lexicon, editor)
|
||||
|
||||
|
||||
class TestConfig(AmanuensisConfig):
|
||||
TESTING = True
|
||||
SECRET_KEY = "secret key"
|
||||
DATABASE_URI = "sqlite:///:memory:"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app(db):
|
||||
"""Provides an application running on top of the test database."""
|
||||
server_app = get_app(TestConfig, db)
|
||||
return server_app
|
||||
|
13
tests/test_server.py
Normal file
13
tests/test_server.py
Normal file
@ -0,0 +1,13 @@
|
||||
from flask import Flask
|
||||
|
||||
|
||||
def test_app_testing(app: Flask):
|
||||
"""Confirm that the test config loads correctly."""
|
||||
assert app.testing
|
||||
|
||||
|
||||
def test_client(app: Flask):
|
||||
"""Test that the test client works."""
|
||||
with app.test_client() as client:
|
||||
response = client.get("/")
|
||||
assert b"world" in response.data
|
Loading…
Reference in New Issue
Block a user