#!/usr/bin/env python
# HTMLGDump - dumps a git repo to html (and symlinks)
# Copyright (C) 2021 Soni L.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# tl;dr: install this as a git hook (post-receive)
# then configure your webserver and stuff
import dataclasses
import os
import os.path
import pathlib
import shutil
import subprocess
import sys
from urllib.parse import quote
import pygit2
from pygments import highlight
from pygments.formatters import HtmlFormatter
from pygments.lexers import get_lexer_for_filename
from pygments.lexers import guess_lexer
from pygments.lexers import guess_lexer_for_filename
import pygments.util
@dataclasses.dataclass
class GitChange:
old_value: str
new_value: str
ref_name: str
deleting: bool = dataclasses.field(init=False)
def __post_init__(self):
self.deleting = self.new_value == "0"*40 or self.new_value == "0"*64
def get_relative(path, target):
"""Makes target relative to path, without filesystem operations."""
return os.path.relpath(target, start=path)
def find_lexer(text, meta):
"""Attempts to find a lexer for the given text/meta."""
# TODO this can probably be improved
# try exact lexers based on filename
# this is by far the fastest, but may lead to incorrect results sometimes.
try:
if len(set(get_lexer_for_filename(f[1]).name for f in meta)) == 1:
lex = get_lexer_for_filename(meta[0][1])
return lex
except pygments.util.ClassNotFound:
pass
# try lexers based on filename and content
try:
if len(set(guess_lexer_for_filename(f[1], text).name for f in meta)) == 1:
lex = guess_lexer_for_filename(meta[0][1], text)
return lex
except pygments.util.ClassNotFound:
pass
# try lexers based only on content
try:
lex = guess_lexer(text)
return lex
except pygments.util.ClassNotFound:
pass
return None
def check_soupault_version(soupault):
"""Checks if the given soupault command provides the correct version."""
# e.g. soupault 3.1.1
# versions up to 3.1.0 have a major security flaw which makes them
# unsuitable for use with this program
version = subprocess.run([
soupault,
"--version"
], stdout=subprocess.PIPE, check=True).stdout.splitlines()[0].decode()
if not version.startswith("soupault 3."):
print("please use soupault 3.1.1 or newer")
exit()
if version.startswith("soupault 3.0."):
print("please use soupault 3.1.1 or newer")
exit()
# semver doesn't allow leading 0 on any numeric fields, so this is safe
if version.startswith("soupault 3.1.0"):
print("please use soupault 3.1.1 or newer")
exit()
def find_soupault_config(dirs):
for d in dirs:
path = pathlib.Path(d) / "soupault.toml"
try:
f = path.open()
print("using {} as soupault config".format(path))
return f
except OSError as e:
pass
print("couldn't find soupault config. tried paths:")
for d in dirs:
path = pathlib.Path(d) / "soupault.toml"
print(path)
exit()
CACHE_HOME = os.environ.get('XDG_CACHE_HOME', '')
if not CACHE_HOME:
CACHE_HOME = os.environ['HOME'] + '/.cache'
CACHE_HOME = CACHE_HOME + "/htmlgdump"
CONFIG_HOME = os.environ.get('XDG_CONFIG_HOME', '')
if not CONFIG_HOME:
CONFIG_HOME = os.environ['HOME'] + '/.config'
CONFIG_HOME = CONFIG_HOME + "/htmlgdump"
CONFIG_DIRS = os.environ.get('XDG_CONFIG_DIRS', '')
if not CONFIG_DIRS:
CONFIG_DIRS = '/etc/xdg'
# TODO check if this is correct
CONFIG_DIRS = [config_dir + "/htmlgdump" for config_dir in CONFIG_DIRS.split(':')]
soupault_config = find_soupault_config([CONFIG_HOME] + CONFIG_DIRS)
# post-receive runs on $GIT_DIR
repo = pygit2.Repository(os.getcwd())
try:
name = pathlib.Path.cwd().relative_to(repo.config["htmlgdump.base"])
except (KeyError, ValueError):
print("please set htmlgdump.base")
exit()
soupault = "soupault"
try:
soupault = repo.config["htmlgdump.soupault"]
except (KeyError, ValueError):
pass
check_soupault_version(soupault)
changes = [GitChange(*l.rstrip("\n").split(" ", 2)) for l in sys.stdin]
gen_dir = pathlib.Path(CACHE_HOME) / name / "gen"
gen_dir.mkdir(parents=True,exist_ok=True)
todocommits = set()
print("updating refs")
# build changed refs
for c in changes:
path = gen_dir / c.ref_name
if c.deleting:
try:
shutil.rmtree(path)
except FileNotFoundError:
pass
else:
path.mkdir(parents=True,exist_ok=True)
index = path / "index.html"
link = path / "tree"
tree = gen_dir / "trees" / str(repo[c.new_value].tree_id)
with index.open("w") as f:
# TODO
f.write("<!DOCTYPE html><html><head><meta charset=\"utf-8\"><title>ref</title><body><a href=\"./tree\">view tree</a></body></html>")
todocommits.add(repo[c.new_value])
linktarget = get_relative(path, tree)
link.unlink(missing_ok=True)
link.symlink_to(linktarget, target_is_directory=True)
print("generating refs")
# create missing refs
for ref in repo.references:
ref = repo.references.get(ref)
path = gen_dir / ref.name
path.mkdir(parents=True,exist_ok=True)
index = path / "index.html"
link = path / "tree"
tree = gen_dir / "trees" / str(ref.peel(pygit2.Commit).tree_id)
try:
f = index.open("x")
except FileExistsError:
# check if we've already visited this commit
continue
with f:
# TODO
f.write("<!DOCTYPE html><html><head><meta charset=\"utf-8\"><title>ref</title><body><a href=\"./tree\">view tree</a></body></html>")
todocommits.add(ref.peel(pygit2.Commit))
linktarget = get_relative(path, tree)
link.symlink_to(linktarget, target_is_directory=True)
todotrees = set()
print("generating commits")
# build commits
while todocommits:
c = todocommits.pop()
path = gen_dir / "commits" / str(c.id)
path.mkdir(parents=True,exist_ok=True)
index = path / "index.html"
link = path / "tree"
tree = gen_dir / "trees" / str(c.tree_id)
try:
f = index.open("x")
except FileExistsError:
# check if we've already visited this commit
continue
with f:
# TODO
f.write("<!DOCTYPE html><html><head><meta charset=\"utf-8\"><title>commit</title><body><a href=\"./tree\">view tree</a></body></html>")
todotrees.add(c.tree)
todocommits.update(c.parents)
linktarget = get_relative(path, tree)
link.symlink_to(linktarget, target_is_directory=True)
# a dict /!\
# maps blobs to some metadata
# FIXME this can get quite expensive with larger repos, and might even run out
# of RAM.
todoblobs = {}
print("generating trees")
# build trees
while todotrees:
t = todotrees.pop()
path = gen_dir / "trees" / str(t.id)
path.mkdir(parents=True,exist_ok=True)
index = path / "index.html"
try:
f = index.open("x")
except FileExistsError:
# check if we've already visited this tree
continue
with f:
f.write("<!DOCTYPE html><html><head><meta charset=\"utf-8\"><title>tree</title><body><ul>")
for obj in t:
linkname = obj.name
# a git repo can contain any file, including index.html among
# others, but you can never make a file conflict with the id of
# the tree it's in. (or at least, it's impractical to do so.)
# hashes are kinda awesome!
# so we just mangle those to not conflict with our own index.html
# note that this does mean the index.html files cannot be easily
# permalinked, sorry.
if linkname == "index.html":
linkname = str(t.id) + "_index.html"
quoted = quote(linkname, safe='')
link = path / linkname
if isinstance(obj, pygit2.Blob):
blobmeta = todoblobs.setdefault(obj, [])
blobmeta += [(obj.filemode, obj.name)]
tree = gen_dir / "blobs" / str(obj.id)
linktarget = get_relative(path, tree)
link.symlink_to(linktarget, target_is_directory=True)
# FIXME html-escape
f.write("<li><a href=\"./{}\">{}</a></li>".format(quoted, quoted))
elif isinstance(obj, pygit2.Tree):
todotrees.add(obj)
tree = gen_dir / "trees" / str(obj.id)
linktarget = get_relative(path, tree)
link.symlink_to(linktarget, target_is_directory=True)
# FIXME html-escape
f.write("<li><a href=\"./{}\">{}</a></li>".format(quoted, quoted))
else:
# TODO not implemented, sorry. altho apparently submodules use
# commits in trees?
raise TypeError
f.write("</ul></body></html>")
print("generating blobs")
# build blobs
while todoblobs:
(b, meta) = todoblobs.popitem()
path = gen_dir / "blobs" / str(b.id)
path.mkdir(parents=True,exist_ok=True)
index = path / "index.html"
try:
f = index.open("x")
except FileExistsError:
# check if we've already visited this tree
continue
with f:
f.write("<!DOCTYPE html><html><head><meta charset=\"utf-8\"><title>blob</title><body>")
f.write("<a href=\"./raw.bin\">view raw</a>")
try:
text = b.data.decode("utf-8", errors="strict")
lex = find_lexer(text, meta)
if lex is not None:
f.write(highlight(text, lex, HtmlFormatter()))
else:
# TODO maybe just write `text` (html escaped)?
pass
except UnicodeError:
pass
f.write("</body></html>")
raw = path / "raw.bin"
with raw.open("wb") as f:
f.write(b)
# create index.html
path = gen_dir / "index.html"
with path.open("w") as f:
f.write("<!DOCTYPE html><html><head><meta charset=\"utf-8\"><title>index</title><body><ul>")
if not repo.head_is_unborn:
ref = repo.head
quoted = quote(ref.name, safe='/')
# FIXME html-escape
f.write("<li><a href=\"./{}\">{}</a></li>".format(quoted, quoted))
for ref in repo.references:
ref = repo.references.get(ref)
quoted = quote(ref.name, safe='/')
# FIXME html-escape
f.write("<li><a href=\"./{}\">{}</a></li>".format(quoted, quoted))
f.write("</ul></body></html>")
print("preparing build")
# CANNOT use shutil.copytree - it is broken.
# also need to be aware of copying into a directory, so we just always make it
# a directory.
build_dir = pathlib.Path(CACHE_HOME) / name / "build"
build_dir.mkdir(parents=True,exist_ok=True)
subprocess.run(["cp", "-R", "-P", *gen_dir.glob("*"), build_dir], check=True)
print("running soupault")
# run soupault on it. note that soupault currently follows symlinks.
# FIXME: don't put symlinks in the site dir
subprocess.run(
[
soupault,
"--site-dir",
gen_dir,
"--build-dir",
build_dir,
],
cwd=pathlib.Path(CONFIG_HOME),
env={
**os.environ,
'SOUPAULT_CONFIG': '/dev/fd/{}'.format(soupault_config.fileno())
},
check=True,
pass_fds=[soupault_config.fileno()]
)
print("copying to output")
# CANNOT use shutil.copytree - it is broken.
# also need to be aware of copying into a directory, so we just always make it
# a directory.
browse = pathlib.Path.cwd() / "browse"
browse.mkdir(parents=True,exist_ok=True)
subprocess.run(["cp", "-R", "-P", *build_dir.glob("*"), browse], check=True)
# └── gen
# ├── blobs
# │ └── e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
# │ ├── index.html
# │ └── raw.bin
# ├── commits
# │ ├── 21177a2933b1a9d21d8437159405c5bc68b4d32e
# │ │ ├── index.html
# │ │ └── tree -> ../../trees/1663be45d5f6b9f092c4b98d44cf7992b427172f
# │ └── 3ea9318f6271ece3c7560f18d0b22f50bd3cefe5
# │ ├── index.html
# │ └── tree -> ../../trees/17d6338b3a3dc189bdc3bea8481fe5f32fd388c8
# ├── refs
# │ └── heads
# │ └── default
# │ ├── index.html
# │ └── tree -> ../../../trees/1663be45d5f6b9f092c4b98d44cf7992b427172f
# └── trees
# ├── 1663be45d5f6b9f092c4b98d44cf7992b427172f
# │ ├── bar -> ../../blobs/e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
# │ ├── baz -> ../29ba47b07d262ad717095f2d94ec771194c4c083
# │ ├── deleteme -> ../../blobs/e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
# │ ├── foo -> ../../blobs/e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
# │ └── index.html
# ├── 17d6338b3a3dc189bdc3bea8481fe5f32fd388c8
# │ ├── bar -> ../../blobs/e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
# │ ├── baz -> ../29ba47b07d262ad717095f2d94ec771194c4c083
# │ ├── foo -> ../../blobs/e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
# │ └── index.html
# └── 29ba47b07d262ad717095f2d94ec771194c4c083
# ├── index.html
# └── qux -> ../../blobs/e69de29bb2d1d6434b8b29ae775ad8c2e48c5391