euler/ipython/publish.py

94 lines
2.7 KiB
Python
Raw Normal View History

#!/usr/bin/env python
import jinja2
import os
import subprocess
import shutil
from operator import itemgetter
from collections import namedtuple
from os.path import getmtime
from bs4 import BeautifulSoup
import json
def extract_metadata(ipynb_file):
Metadata = namedtuple('Metadata', ['tags', 'completion_date'])
with open(ipynb_file, 'r') as f:
j = json.load(f)
try:
tags = j['metadata']['tags']
completion_date = j['metadata']['completion_date']
except KeyError:
raise Exception('Failed extracting meta from {}.'.format(ipynb_file))
return Metadata(tags, completion_date)
def file_name_to_solution(name):
Solution = namedtuple('Solution', ["number", "ipynb", "html", "name", "metadata"])
number = int(name.replace("EulerProblem", "").replace(".ipynb", ""))
ipynb = name
metadata = extract_metadata(ipynb)
html = name.replace(".ipynb", ".html")
name = name.replace("EulerProblem", "Problem ").replace(".ipynb", "")
return Solution(number, ipynb, html, name, metadata)
def get_solution_list(directory="./"):
l = [file_name_to_solution(f) for f in os.listdir(directory)
2018-05-20 21:30:38 +02:00
if f.endswith(".ipynb")
if f.startswith("EulerProblem")
if not f.endswith("000.ipynb")]
l.sort(key=itemgetter(0))
return l
def render_solutions(solutions):
loader = jinja2.FileSystemLoader(searchpath="./")
env = jinja2.Environment(loader=loader)
template = env.get_template("template.html")
d = {"solutions": solutions}
with open("html/index.html", 'w') as f:
f.write(template.render(**d))
def convert_solutions_to_html(solutions):
for s in solutions:
html = os.path.join("html", s.html)
if not os.path.isfile(html) or getmtime(html) < getmtime(s.ipynb):
args = ["jupyter-nbconvert", s.ipynb, "--output-dir=html"]
subprocess.call(args)
post_process(html)
def post_process(html):
def tag(t, string="", children=[], **attrs):
t = BeautifulSoup("", 'html.parser').new_tag(t, **attrs)
if string:
t.string = string
for c in children:
t.append(c)
return t
with open(html, 'r') as f:
soup = BeautifulSoup(f.read(), 'html.parser')
soup_h1 = soup.find("h1")
soup_p = tag("p", children=[tag("a", href="/euler", string="Back to overview.")])
soup_h1.insert_after(soup_p)
with open(html, 'w') as f:
f.write(str(soup))
def ship_to_failx():
args = ["rsync", "-r", "html/", "failx@felixm.de:/home/failx/html/euler"]
subprocess.call(args)
if __name__ == "__main__":
solutions = get_solution_list()
convert_solutions_to_html(solutions)
render_solutions(solutions)
ship_to_failx()