2020-01-24 22:15:30 +01:00
|
|
|
#!/usr/bin/env python3
|
2020-03-10 08:11:54 +01:00
|
|
|
# pylint: disable=logging-format-interpolation
|
2020-01-24 22:15:30 +01:00
|
|
|
"Creates a static site for redacted.life"
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
from collections import UserList
|
2020-01-25 19:10:25 +01:00
|
|
|
from datetime import datetime
|
2020-02-01 10:59:36 +01:00
|
|
|
import json
|
2020-01-24 22:15:30 +01:00
|
|
|
import os
|
|
|
|
import os.path as path
|
2020-03-10 08:11:54 +01:00
|
|
|
import logging
|
2020-01-24 22:15:30 +01:00
|
|
|
import re
|
2020-01-25 19:10:25 +01:00
|
|
|
import subprocess
|
2020-01-25 22:54:59 +01:00
|
|
|
import shutil
|
2020-01-24 22:15:30 +01:00
|
|
|
import sys
|
|
|
|
|
|
|
|
import jinja2
|
2020-01-25 20:10:29 +01:00
|
|
|
import markdown
|
2020-02-05 00:29:57 +01:00
|
|
|
from mutagen.mp3 import MP3
|
2020-01-24 22:15:30 +01:00
|
|
|
|
|
|
|
|
2020-01-25 19:10:25 +01:00
|
|
|
def gen_name(date, slug):
|
|
|
|
"Returns to file name"
|
|
|
|
return date.strftime("%Y-%m-%d-") + slug
|
|
|
|
|
|
|
|
|
2020-02-05 00:29:57 +01:00
|
|
|
def seconds_to_str(seconds):
|
|
|
|
"Convert seconds to a string of hh:mm:ss"
|
|
|
|
seconds = int(seconds)
|
|
|
|
return f"{seconds // 3600:02}:{(seconds % 3600) // 60:02}:{seconds % 60:02}"
|
|
|
|
|
|
|
|
|
2020-01-24 22:15:30 +01:00
|
|
|
class EpisodeList(UserList):
|
|
|
|
"Represents list of episodes"
|
2020-02-05 00:29:57 +01:00
|
|
|
def __init__(self, url, data, output, template, archives):
|
2020-01-24 22:15:30 +01:00
|
|
|
super().__init__(data)
|
2020-02-05 00:29:57 +01:00
|
|
|
self.url = url
|
2020-01-24 22:15:30 +01:00
|
|
|
self.output = output
|
|
|
|
self.template = template
|
2020-01-26 00:28:09 +01:00
|
|
|
self.archives = archives
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"New EpisodeList: {url=} {output=} {template=} {archives=}")
|
2020-01-24 22:15:30 +01:00
|
|
|
|
2020-01-25 19:10:25 +01:00
|
|
|
def sort(self, *_args, **_kwargs):
|
2020-01-24 22:15:30 +01:00
|
|
|
"Sorts the EpisodeList"
|
2020-01-30 01:13:13 +01:00
|
|
|
super().sort(key=lambda x: x.date, reverse=False)
|
2020-01-24 22:15:30 +01:00
|
|
|
|
2020-01-25 19:10:25 +01:00
|
|
|
def generate_thumbnails(self):
|
2020-01-24 22:15:30 +01:00
|
|
|
"Generates thumbnails for all the videos"
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"Creating missing directories")
|
2020-01-25 19:10:25 +01:00
|
|
|
if not path.isdir(self.output + "assets"):
|
|
|
|
os.mkdir(self.output + "assets")
|
|
|
|
if not path.isdir(self.output + "assets/thumbnails"):
|
|
|
|
os.mkdir(self.output + "assets/thumbnails")
|
|
|
|
for episode in self.data:
|
|
|
|
location = (self.output + "assets/thumbnails/" +
|
|
|
|
gen_name(episode.date, episode.slug) + ".jpg")
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"Creating thumbnail for {episode=} at {location}")
|
2020-01-25 19:10:25 +01:00
|
|
|
episode.store_thumbnail(location)
|
2020-01-24 22:15:30 +01:00
|
|
|
|
2020-02-05 00:29:57 +01:00
|
|
|
def generate_rss(self, header):
|
|
|
|
"Generates the RSS Feed"
|
|
|
|
with open(self.output + "feed_mp3.rss", "w") as mp3, \
|
|
|
|
open(self.output + "feed_ogg.rss", "w") as ogg:
|
|
|
|
# pylint: disable = invalid-name
|
|
|
|
for s, ext in ((mp3, "mp3"), (ogg, "ogg")):
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"Writing header for {ext}")
|
2020-02-05 00:29:57 +01:00
|
|
|
s.write(header)
|
|
|
|
for ep in self.data:
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"Writing item for episode {ep} with {ext=}")
|
2020-02-05 00:29:57 +01:00
|
|
|
s.write("<item>")
|
|
|
|
s.write("\n")
|
|
|
|
# Title
|
|
|
|
s.write(f"<title><![CDATA[{ep.title}]]></title>")
|
|
|
|
s.write("\n")
|
|
|
|
# Description
|
|
|
|
s.write("<description><![CDATA["
|
|
|
|
f"{ep.config['description']}]]></description>")
|
|
|
|
s.write("\n")
|
|
|
|
# Date
|
|
|
|
datestring = ep.date.strftime(
|
|
|
|
'%a, %d %b, %Y %H:%M:%Sz GMT'
|
|
|
|
)
|
|
|
|
s.write(f"<pubDate>{datestring}</pubDate>")
|
|
|
|
s.write("\n")
|
|
|
|
# iTunes: explicit, author, subtitle, keywords
|
|
|
|
s.write(f"<itunes:explicit>{ep.config['explicit']}"
|
|
|
|
"</itunes:explicit>")
|
|
|
|
s.write("\n")
|
|
|
|
s.write(
|
|
|
|
f"<itunes:author><![CDATA[{ep.config['author']}]]>"
|
|
|
|
"</itunes:author>"
|
|
|
|
)
|
|
|
|
s.write("\n")
|
|
|
|
s.write(
|
|
|
|
"<itunes:subtitle><![CDATA["
|
|
|
|
f"{ep.config['subtitle']}]]></itunes:subtitle>"
|
|
|
|
)
|
|
|
|
s.write("\n")
|
|
|
|
s.write(
|
|
|
|
f"<itunes:keywords>{','.join(ep.config['tags'])}"
|
|
|
|
"</itunes:keywords>"
|
|
|
|
)
|
|
|
|
s.write("\n")
|
|
|
|
s.write(f"<itunes:duration>{seconds_to_str(len(ep))}"
|
|
|
|
"</itunes:duration>")
|
|
|
|
s.write("\n")
|
|
|
|
# Content (show_notes)
|
|
|
|
s.write(
|
|
|
|
f"<content:encoded><![CDATA[{ep.show_notes}]]>"
|
|
|
|
"</content:encoded>"
|
|
|
|
)
|
|
|
|
s.write("\n")
|
|
|
|
# GUID
|
|
|
|
s.write(
|
|
|
|
f"<guid isPermaLink=\"true\">{self.url}{ep.slug}"
|
|
|
|
".html</guid>"
|
|
|
|
)
|
|
|
|
s.write("\n")
|
|
|
|
# Enclosure
|
|
|
|
audio = f'{self.url}assets/music/{ep.slug}.{ext}'
|
|
|
|
size = path.getsize(f"{ep.audio}.{ext}")
|
|
|
|
s.write(
|
|
|
|
f'<enclosure url="{audio}" type="audio/{ext}" '
|
|
|
|
f'length="{size}" />'
|
|
|
|
)
|
|
|
|
s.write("\n")
|
|
|
|
# Categories
|
|
|
|
for tag in ep.config["tags"]:
|
|
|
|
s.write(f"<category><![CDATA[{tag}]]></category>")
|
|
|
|
s.write("\n")
|
|
|
|
s.write("</item>")
|
|
|
|
s.write("\n")
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"Writing end for {ext}")
|
2020-02-05 00:29:57 +01:00
|
|
|
s.write("</channel>")
|
|
|
|
s.write("\n")
|
|
|
|
s.write("</rss>")
|
2020-01-24 22:15:30 +01:00
|
|
|
|
2020-01-26 00:28:09 +01:00
|
|
|
def generate_archives(self):
|
|
|
|
"Generates archives page"
|
|
|
|
with open(self.output + "archives.html", "w") as file:
|
|
|
|
episodes = [{
|
|
|
|
"slug": gen_name(i.date, i.slug) + ".html",
|
|
|
|
"title": i.title
|
2020-02-01 10:59:36 +01:00
|
|
|
} for i in self.data[::-1]]
|
2020-01-29 14:15:35 +01:00
|
|
|
file.write(self.archives.render(episodes=episodes,
|
|
|
|
title="Archives"))
|
2020-01-26 00:28:09 +01:00
|
|
|
|
2020-01-25 22:54:59 +01:00
|
|
|
def generate_site(self, root):
|
|
|
|
"Generates the entire site"
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info("Generating CSS from SCSS")
|
2020-01-25 22:54:59 +01:00
|
|
|
subprocess.run(["sass", "--update", f"{root}scss:{root}assets/css"],
|
|
|
|
check=True)
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info("Copy the existing assets")
|
2020-01-25 22:54:59 +01:00
|
|
|
shutil.copytree(root + "assets", self.output + "assets",
|
|
|
|
dirs_exist_ok=True)
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info("Create the required directories")
|
2020-01-25 22:54:59 +01:00
|
|
|
paths = [
|
|
|
|
"assets",
|
2020-01-26 03:00:02 +01:00
|
|
|
"assets/audio",
|
2020-01-25 22:54:59 +01:00
|
|
|
"assets/videos",
|
|
|
|
]
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info("Creating missing directories")
|
2020-01-25 22:54:59 +01:00
|
|
|
for directory in paths:
|
|
|
|
if not path.isdir(self.output + directory):
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"Creating directory {directory}")
|
2020-01-25 22:54:59 +01:00
|
|
|
os.mkdir(self.output + directory)
|
|
|
|
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info("Render episodes and copy data")
|
2020-01-25 22:54:59 +01:00
|
|
|
for episode in self.data:
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"Rendering episode {episode}")
|
2020-01-25 22:54:59 +01:00
|
|
|
html = f"{self.output}{gen_name(episode.date, episode.slug)}.html"
|
|
|
|
thumbnail = ("assets/thumbnails/" +
|
|
|
|
gen_name(episode.date, episode.slug) + ".jpg")
|
|
|
|
video = (self.output + "assets/videos/" +
|
|
|
|
gen_name(episode.date, episode.slug) + ".mp4")
|
2020-01-26 03:00:02 +01:00
|
|
|
audio = (self.output + "assets/audio/" +
|
2020-01-25 22:54:59 +01:00
|
|
|
gen_name(episode.date, episode.slug) + ".mp3")
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"Copying {episode.video} to {video}")
|
2020-01-25 22:54:59 +01:00
|
|
|
shutil.copy2(episode.video, video)
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"Copying {episode.audio}.mp3 to {audio}")
|
2020-02-05 00:29:57 +01:00
|
|
|
shutil.copy2(episode.audio + ".mp3", audio)
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"Copying {episode.audio}.ogg to {audio}")
|
2020-02-05 00:29:57 +01:00
|
|
|
shutil.copy2(episode.audio + ".ogg", audio)
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"Writing to {html}")
|
2020-01-25 22:54:59 +01:00
|
|
|
with open(html, "w") as file:
|
|
|
|
file.write(episode.render(self.template, thumbnail))
|
|
|
|
|
2020-01-30 01:13:13 +01:00
|
|
|
last = self.data[-1]
|
|
|
|
last_name = f"{self.output}{gen_name(last.date, last.slug)}.html"
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"Copying last one ({last}) to index.html")
|
2020-01-30 01:13:13 +01:00
|
|
|
shutil.copy2(last_name, self.output + "index.html")
|
2020-01-24 22:15:30 +01:00
|
|
|
|
|
|
|
|
|
|
|
class Episode:
|
|
|
|
"Represents one episode of podcast"
|
2020-01-30 01:13:13 +01:00
|
|
|
def __init__(self, date, slug, title, show_notes, video_src, audio_src, config):
|
2020-01-24 22:15:30 +01:00
|
|
|
self.date = date
|
|
|
|
self.slug = slug
|
2020-02-05 00:32:51 +01:00
|
|
|
self.title = title
|
2020-01-25 20:10:29 +01:00
|
|
|
self.show_notes = markdown.markdown(show_notes)
|
2020-01-25 19:10:25 +01:00
|
|
|
self.video = video_src
|
2020-01-27 12:44:38 +01:00
|
|
|
self.audio = audio_src
|
2020-01-30 01:13:13 +01:00
|
|
|
self.config = config
|
2020-02-05 00:29:57 +01:00
|
|
|
self.length = MP3(audio_src + ".mp3").info.length
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"New episode: {date=} {slug=} {title=} {self.video=} "
|
|
|
|
f"{self.audio=} {config=} {self.length=} {self.show_notes=}")
|
2020-01-24 22:15:30 +01:00
|
|
|
|
2020-01-25 19:10:25 +01:00
|
|
|
def render(self, template, thumbnail_src):
|
2020-01-24 22:15:30 +01:00
|
|
|
"Renders the Episode with the given template"
|
|
|
|
return template.render(
|
|
|
|
title=self.title,
|
|
|
|
show_notes=jinja2.Markup(self.show_notes),
|
|
|
|
thumbnail_src=thumbnail_src,
|
2020-01-25 22:54:59 +01:00
|
|
|
video_src=f"assets/videos/{path.basename(self.video)}"
|
2020-01-24 22:15:30 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
def store_thumbnail(self, location):
|
|
|
|
"Stores the thumbnail for given image at path"
|
2020-01-25 19:10:25 +01:00
|
|
|
args = ["ffmpeg", "-i", self.video, "-ss", "00:00:01.000", "-vframes",
|
|
|
|
"1", location]
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"Running {' '.join(args)}")
|
2020-01-25 19:10:25 +01:00
|
|
|
subprocess.run(args, check=False)
|
2020-01-24 22:15:30 +01:00
|
|
|
|
2020-02-05 00:29:57 +01:00
|
|
|
def __len__(self):
|
|
|
|
return int(self.length)
|
|
|
|
|
2020-03-10 08:11:54 +01:00
|
|
|
def __str__(self):
|
|
|
|
return f"{self.slug}: {self.title}"
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return str(self)
|
2020-01-24 22:15:30 +01:00
|
|
|
|
2020-01-27 12:44:38 +01:00
|
|
|
def parse_args():
|
|
|
|
"Parses arguments"
|
2020-01-24 22:15:30 +01:00
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument("input_dir", help="Input directory")
|
|
|
|
parser.add_argument("output_dir", help="Output directory")
|
2020-02-05 00:29:57 +01:00
|
|
|
parser.add_argument("url", help="Base URL of website")
|
2020-03-10 08:11:54 +01:00
|
|
|
parser.add_argument("--verbose", "-v", action="store_true", help="Verbose Logging")
|
2020-01-24 22:15:30 +01:00
|
|
|
args = parser.parse_args()
|
2020-03-09 19:13:22 +01:00
|
|
|
input_dir = path.abspath(args.input_dir.rstrip("/")) + "/"
|
|
|
|
output_dir = path.abspath(args.output_dir.rstrip("/")) + "/"
|
2020-03-10 08:11:54 +01:00
|
|
|
if args.verbose:
|
|
|
|
logging.basicConfig(level=logging.DEBUG)
|
|
|
|
else:
|
|
|
|
logging.basicConfig()
|
2020-02-05 00:29:57 +01:00
|
|
|
url = args.url.rstrip("/") + "/"
|
|
|
|
return input_dir, output_dir, url
|
2020-01-27 12:44:38 +01:00
|
|
|
|
|
|
|
|
2020-01-30 01:13:13 +01:00
|
|
|
class ParseError(ValueError):
|
|
|
|
"Error raised while parsing a file"
|
|
|
|
|
|
|
|
|
2020-02-05 00:29:57 +01:00
|
|
|
def parse_file(file, array_keys=("tags")):
|
2020-01-30 01:13:13 +01:00
|
|
|
"Parses a file"
|
|
|
|
config = {}
|
|
|
|
kv_re = re.compile(r"(?P<key>\w+):\s*(?P<value>.*)")
|
|
|
|
while line := file.readline():
|
|
|
|
if line.rstrip("\n") == "---":
|
|
|
|
break
|
|
|
|
if line.strip() == "":
|
|
|
|
continue
|
|
|
|
if match := kv_re.match(line):
|
|
|
|
if match.group("key").strip().lower() in array_keys:
|
|
|
|
config[match.group("key")] = [i.strip() for i in
|
|
|
|
match.group("value").split(",")]
|
|
|
|
else:
|
2020-02-05 00:32:51 +01:00
|
|
|
config[match.group("key")] = match.group("value").strip()
|
2020-01-30 01:13:13 +01:00
|
|
|
else:
|
|
|
|
raise ParseError(f"Invalid line {line}")
|
|
|
|
|
|
|
|
return (config, file.read())
|
|
|
|
|
|
|
|
|
|
|
|
def main(args):
|
2020-01-27 12:44:38 +01:00
|
|
|
"Main method"
|
|
|
|
root = path.dirname(sys.argv[0]).rstrip("/") + "/"
|
2020-02-05 00:29:57 +01:00
|
|
|
input_dir, output_dir, url = args
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"Input directory: {input_dir}")
|
|
|
|
logging.info(f"Output directory: {output_dir}")
|
|
|
|
logging.info(f"URL: {url}")
|
2020-01-24 22:15:30 +01:00
|
|
|
|
|
|
|
# Input validation
|
2020-01-26 00:28:09 +01:00
|
|
|
paths = [
|
|
|
|
input_dir,
|
|
|
|
input_dir + "md",
|
|
|
|
input_dir + "videos",
|
2020-01-26 03:00:02 +01:00
|
|
|
input_dir + "audio",
|
2020-01-26 00:28:09 +01:00
|
|
|
]
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info("Checking if all paths exist.")
|
2020-01-30 01:13:13 +01:00
|
|
|
if not all(path.isdir((fail := i)) for i in paths):
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.error(f"Invalid Input. {fail} is not a directory.")
|
2020-01-30 01:13:13 +01:00
|
|
|
return 1
|
2020-01-24 22:15:30 +01:00
|
|
|
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info("Creating output directory if it doesn't exist")
|
2020-01-27 12:44:38 +01:00
|
|
|
if not path.isdir(output_dir):
|
|
|
|
os.mkdir(output_dir)
|
2020-01-24 22:15:30 +01:00
|
|
|
|
2020-01-26 00:28:09 +01:00
|
|
|
env = jinja2.Environment(
|
2020-01-25 19:10:25 +01:00
|
|
|
loader=jinja2.FileSystemLoader(root),
|
2020-01-24 22:15:30 +01:00
|
|
|
autoescape=jinja2.select_autoescape("html")
|
2020-01-26 00:28:09 +01:00
|
|
|
)
|
2020-01-24 22:15:30 +01:00
|
|
|
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info("Creating EpisodeList")
|
2020-01-26 00:28:09 +01:00
|
|
|
podcast = EpisodeList(
|
2020-02-05 00:29:57 +01:00
|
|
|
url,
|
2020-01-26 00:28:09 +01:00
|
|
|
[],
|
|
|
|
output_dir,
|
|
|
|
env.get_template("index.html"),
|
|
|
|
env.get_template("archives.html")
|
|
|
|
)
|
2020-01-24 22:15:30 +01:00
|
|
|
|
2020-01-25 19:10:25 +01:00
|
|
|
split = re.compile(r"((?P<date>\d{4}-[01]?\d-[0123]?\d)-(?P<slug>.*).md)")
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"Parsing all files in {input_dir}md")
|
2020-01-24 22:15:30 +01:00
|
|
|
for file in os.listdir(input_dir + "md"):
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"File: {file}")
|
2020-01-24 22:15:30 +01:00
|
|
|
match = split.match(file)
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"Match: {match}")
|
2020-01-24 22:15:30 +01:00
|
|
|
if not match:
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.error(f"Invalid filename: {file}")
|
2020-01-24 22:15:30 +01:00
|
|
|
continue
|
2020-03-10 08:11:54 +01:00
|
|
|
date = datetime.strptime(match.group("date"), "%Y-%m-%d")
|
|
|
|
logging.info(f"Date: {date}")
|
2020-01-24 22:15:30 +01:00
|
|
|
slug = match.group("slug")
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"Slug: {slug}")
|
2020-01-25 19:10:25 +01:00
|
|
|
with open(input_dir + "md/" + file) as episode:
|
2020-01-30 01:13:13 +01:00
|
|
|
try:
|
|
|
|
config, show_notes = parse_file(episode)
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info(f"Config: {config}")
|
|
|
|
logging.info(f"Show Notes: {show_notes}")
|
2020-01-30 01:13:13 +01:00
|
|
|
except ParseError as err:
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.error(f"Error while parsing file: {file}")
|
|
|
|
logging.error(err)
|
2020-01-30 01:13:13 +01:00
|
|
|
return 2
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info("Appending to EpisodeList")
|
2020-01-26 00:28:09 +01:00
|
|
|
podcast.append(
|
|
|
|
Episode(
|
|
|
|
date,
|
|
|
|
slug,
|
2020-01-30 01:13:13 +01:00
|
|
|
config["title"],
|
2020-01-26 00:28:09 +01:00
|
|
|
show_notes,
|
|
|
|
input_dir + "videos/" + gen_name(date, slug) + ".mp4",
|
2020-02-05 00:29:57 +01:00
|
|
|
input_dir + "audio/" + gen_name(date, slug),
|
2020-01-30 01:13:13 +01:00
|
|
|
config
|
2020-01-26 00:28:09 +01:00
|
|
|
)
|
|
|
|
)
|
2020-01-24 22:15:30 +01:00
|
|
|
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info("Generating subscribe page")
|
2020-02-01 10:59:36 +01:00
|
|
|
with open(input_dir + "subscribe.json") as subscribe, \
|
|
|
|
open(output_dir + "subscribe.html", "w") as html:
|
|
|
|
html.write(env.get_template("subscribe.html").render(
|
|
|
|
subscribtions=json.load(subscribe)
|
|
|
|
))
|
|
|
|
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info("Generating donate page")
|
2020-02-01 23:05:27 +01:00
|
|
|
with open(input_dir + "donate.json") as donate, \
|
|
|
|
open(output_dir + "donate.html", "w") as html:
|
|
|
|
html.write(env.get_template("donate.html").render(
|
|
|
|
donations=json.load(donate)
|
|
|
|
))
|
|
|
|
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info("Sorting podcasts")
|
2020-01-24 22:15:30 +01:00
|
|
|
podcast.sort()
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info("Generating thumbnails")
|
2020-01-25 19:10:25 +01:00
|
|
|
podcast.generate_thumbnails()
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info("Generating archives pages")
|
2020-01-26 00:28:09 +01:00
|
|
|
podcast.generate_archives()
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info("Generating RSS feeds")
|
2020-02-05 00:29:57 +01:00
|
|
|
with open(input_dir + "header.rss") as header:
|
|
|
|
podcast.generate_rss(header.read())
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info("Generating Site")
|
2020-01-25 22:54:59 +01:00
|
|
|
podcast.generate_site(root)
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info("Copying Overrides")
|
2020-01-27 00:13:39 +01:00
|
|
|
shutil.copytree(input_dir + "overrides", output_dir, dirs_exist_ok=True)
|
2020-03-10 08:11:54 +01:00
|
|
|
logging.info("Done")
|
2020-01-30 01:13:13 +01:00
|
|
|
return 0
|
2020-01-24 22:15:30 +01:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2020-01-30 01:13:13 +01:00
|
|
|
sys.exit(main(parse_args()))
|