import sys # For argv and stderr
import os # For reading directories
import re # For parsing lex content
import io # For writing pages out as UTF-8
import networkx # For pagerank analytics
from collections import defaultdict # For rank inversion in statistics
from src import utils
from src.article import LexiconArticle
class LexiconPage:
"""
An abstraction layer around formatting a Lexicon page skeleton with kwargs
so that kwargs that are constant across pages aren't repeated.
"""
def __init__(self, skeleton=None, page=None):
self.kwargs = {}
self.skeleton = skeleton
if page is not None:
self.skeleton = page.skeleton
self.kwargs = dict(page.kwargs)
def add_kwargs(self, **kwargs):
self.kwargs.update(kwargs)
def format(self, **kwargs):
total_kwargs = {**self.kwargs, **kwargs}
return self.skeleton.format(**total_kwargs)
def build_contents_page(page, articles, index_list):
"""
Builds the full HTML of the contents page.
"""
content = "
"
# Head the contents page with counts of written and phantom articles
phantom_count = len([article for article in articles if article.player is None])
if phantom_count == 0:
content += "
There are {0} entries in this lexicon.
\n".format(len(articles))
else:
content += "
There are {0} entries, {1} written and {2} phantom.
\n".format(
len(articles), len(articles) - phantom_count, phantom_count)
# Prepare article links
link_by_title = {article.title : "
{0}".format(
article.title, article.title_filesafe,
" class=\"phantom\"" if article.player is None else "")
for article in articles}
# Write the articles in alphabetical order
content += utils.load_resource("contents.html")
content += "
\n
\n"
indices = index_list.split("\n")
alphabetical_order = sorted(
articles,
key=lambda a: utils.titlesort(a.title))
check_off = list(alphabetical_order)
for index_str in indices:
content += "{0}
\n".format(index_str)
for article in alphabetical_order:
if (utils.titlesort(article.title)[0].upper() in index_str):
check_off.remove(article)
content += "- {}
\n".format(link_by_title[article.title])
if len(check_off) > 0:
content += "&c.
\n"
for article in check_off:
content += "- {}
\n".format(link_by_title[article.title])
content += "
\n
\n"
# Write the articles in turn order
content += "
\n
\n"
turn_numbers = [article.turn for article in articles if article.player is not None]
first_turn, last_turn = min(turn_numbers), max(turn_numbers)
turn_order = sorted(
articles,
key=lambda a: (a.turn, utils.titlesort(a.title)))
check_off = list(turn_order)
for turn_num in range(first_turn, last_turn + 1):
content += "Turn {0}
\n".format(turn_num)
for article in turn_order:
if article.turn == turn_num:
check_off.remove(article)
content += "- {}
\n".format(link_by_title[article.title])
if len(check_off) > 0:
content += "Unwritten
\n"
for article in check_off:
content += "- {}
\n".format(link_by_title[article.title])
content += "
\n
\n"
# Fill in the page skeleton
return page.format(title="Index", content=content)
def build_rules_page(page):
"""
Builds the full HTML of the rules page.
"""
content = utils.load_resource("rules.html")
# Fill in the entry skeleton
return page.format(title="Rules", content=content)
def build_formatting_page(page):
"""
Builds the full HTML of the formatting page.
"""
content = utils.load_resource("formatting.html")
# Fill in the entry skeleton
return page.format(title="Formatting", content=content)
def build_session_page(page, session_content):
"""
Builds the full HTML of the session page.
"""
# Fill in the entry skeleton
content = "
{}
".format(session_content)
return page.format(title="Session", content=content)
def reverse_statistics_dict(stats, reverse=True):
"""
Transforms a dictionary mapping titles to a value into a list of values
and lists of titles. The list is sorted by the value, and the titles are
sorted alphabetically.
"""
rev = {}
for key, value in stats.items():
if value not in rev:
rev[value] = []
rev[value].append(key)
for key, value in rev.items():
rev[key] = sorted(value, key=lambda t: utils.titlesort(t))
return sorted(rev.items(), key=lambda x:x[0], reverse=reverse)
def itemize(stats_list):
return map(lambda x: "{0} – {1}".format(x[0], "; ".join(x[1])), stats_list)
def build_statistics_page(page, articles):
"""
Builds the full HTML of the statistics page.
"""
content = ""
# Top pages by pagerank
# Compute pagerank for each article
G = networkx.Graph()
for article in articles:
for citation in article.citations:
G.add_edge(article.title, citation.target)
rank_by_article = networkx.pagerank(G)
# Get the top ten articles by pagerank
top_pageranks = reverse_statistics_dict(rank_by_article)[:10]
# Replace the pageranks with ordinals
top_ranked = enumerate(map(lambda x: x[1], top_pageranks), start=1)
# Format the ranks into strings
top_ranked_items = itemize(top_ranked)
# Write the statistics to the page
content += "
\n"
content += "Top 10 articles by page rank:
\n"
content += "
\n".join(top_ranked_items)
content += "
\n"
# Top number of citations made
citations_made = {article.title : len(article.citations) for article in articles}
top_citations = reverse_statistics_dict(citations_made)[:3]
top_citations_items = itemize(top_citations)
content += "
\n"
content += "Top articles by citations made:
\n"
content += "
\n".join(top_citations_items)
content += "
\n"
# Top number of times cited
citations_to = {article.title : len(article.citedby) for article in articles}
top_cited = reverse_statistics_dict(citations_to)[:3]
top_cited_items = itemize(top_cited)
content += "
\n"
content += "Most cited articles:
\n"
content += "
\n".join(top_cited_items)
content += "
\n"
# Top article length, roughly by words
article_length = {}
for article in articles:
format_map = {
"c"+str(c.id): c.text
for c in article.citations
}
plain_content = article.content.format(**format_map)
article_length[article.title] = len(plain_content.split())
top_length = reverse_statistics_dict(article_length)[:3]
top_length_items = itemize(top_length)
content += "
\n"
content += "Longest articles:
\n"
content += "
\n".join(top_length_items)
content += "
\n"
# Total word count
content += "
\n"
content += "Total word count:
\n"
content += str(sum(article_length.values()))
content += "
\n"
# Player pageranks
# Add addendums and recompute pagerank
for article in articles:
for addendum in article.addendums:
for citation in addendum.citations:
addendum_title = "{0.title}-T{0.turn}".format(addendum)
G.add_edge(addendum_title, citation.target)
rank_by_article = networkx.pagerank(G)
players = sorted(set([article.player for article in articles if article.player is not None]))
pagerank_by_player = {player: 0 for player in players}
for article in articles:
if article.player is not None:
pagerank_by_player[article.player] += rank_by_article[article.title]
for addendum in article.addendums:
addendum_title = "{0.title}-T{0.turn}".format(addendum)
pagerank_by_player[addendum_title] += rank_by_article[addendum_title]
for player in players:
pagerank_by_player[player] = round(pagerank_by_player[player], 3)
player_rank = reverse_statistics_dict(pagerank_by_player)
player_rank_items = itemize(player_rank)
content += "
\n"
content += "Player total page rank:
\n"
content += "
\n".join(player_rank_items)
content += "
\n"
# Player citations made
cite_count_by_player = {player: 0 for player in players}
for article in articles:
if article.player is not None:
unique_citations = set([a.target for a in article.citations])
cite_count_by_player[article.player] += len(unique_citations)
for addendum in article.addendums:
cite_count_by_player[addendum.player] += len(addendum.citations)
player_cites_made_ranks = reverse_statistics_dict(cite_count_by_player)
player_cites_made_items = itemize(player_cites_made_ranks)
content += "
\n"
content += "Citations made by player:
\n"
content += "
\n".join(player_cites_made_items)
content += "
\n"
# Player cited count
cited_times = {player : 0 for player in players}
for article in articles:
if article.player is not None:
cited_times[article.player] += len(article.citedby)
cited_times_ranked = reverse_statistics_dict(cited_times)
cited_times_items = itemize(cited_times_ranked)
content += "
\n"
content += "Citations made to player:
\n"
content += "
\n".join(cited_times_items)
content += "
\n"
# Fill in the entry skeleton
return page.format(title="Statistics", content=content)
def build_graphviz_file(cite_map):
"""
Builds a citation graph in dot format for Graphviz.
"""
result = []
result.append("digraph G {\n")
# Node labeling
written_entries = list(cite_map.keys())
phantom_entries = set([title for cites in cite_map.values() for title in cites if title not in written_entries])
node_labels = [title[:20] for title in written_entries + list(phantom_entries)]
node_names = [hash(i) for i in node_labels]
for i in range(len(node_labels)):
result.append("{} [label=\"{}\"];\n".format(node_names[i], node_labels[i]))
# Edges
for citer in written_entries:
for cited in cite_map[citer]:
result.append("{}->{};\n".format(hash(citer[:20]), hash(cited[:20])))
# Return result
result.append("overlap=false;\n}\n")
return "".join(result)#"…"
def build_compiled_page(articles, config):
"""
Builds a page compiling all articles in the Lexicon.
"""
# Sort by turn and title
turn_order = sorted(
articles,
key=lambda a: (a.turn, utils.titlesort(a.title)))
# Build the content of each article
css = utils.load_resource("lexicon.css")
css += "\n"\
"body { background: #ffffff; }\n"\
"sup { vertical-align: top; font-size: 0.6em; }\n"
content = "\n"\
"\n"\
"
{lexicon}\n"\
"\n"\
"\n"\
"
{lexicon}
".format(
lexicon=config["LEXICON_TITLE"],
css=css)
for article in turn_order:
# Stitch in superscripts for citations
format_map = {
format_id: "{}
{}".format(cite_tuple[0], format_id[1:])
for format_id, cite_tuple in article.citations.items()
}
article_body = article.content.format(**format_map)
# Stitch a page-break-avoid div around the header and first paragraph
article_body = article_body.replace("", "
", 1)
# Append the citation block
cite_list = "