Use Typer for CLI and implement hexdoc serve

This commit is contained in:
object-Object 2023-09-29 22:52:30 -04:00
parent 63e8a44db1
commit 8ae198f00a
13 changed files with 562 additions and 478 deletions

View file

@ -52,7 +52,7 @@ jobs:
release: ${{ steps.parse-inputs.outputs.release }}
branch: ${{ steps.parse-inputs.outputs.branch }}
hexdoc-common: ${{ steps.parse-inputs.outputs.hexdoc-common }}
hexdoc: ${{ steps.parse-inputs.outputs.hexdoc }}
hexdoc-props: ${{ steps.parse-inputs.outputs.hexdoc }}
steps:
- uses: actions/checkout@v3
@ -71,7 +71,7 @@ jobs:
- name: Parse inputs
id: parse-inputs
run: |
release=${{ inputs.publish-release }}
release=${{ inputs.publish-release }}
update_latest=${{ inputs.publish-latest-and-root }}
if [[ ${{ inputs.checkout }} == true ]]; then
@ -82,17 +82,29 @@ jobs:
branch=none
props=doc/properties.toml
fi
hexdoc_common="--is-release $release --update-latest $update_latest"
hexdoc="hexdoc $props --ci $hexdoc_common"
echo "HEXDOC=$hexdoc" >> "$GITHUB_ENV"
if [[ $release == true ]]; then
release_flag="--release"
else
release_flag="--no-release"
fi
if [[ $update_latest == true ]]; then
update_latest_flag="--update-latest"
else
update_latest_flag="--no-update-latest"
fi
hexdoc_common="$release_flag $update_latest_flag"
echo "HEXDOC_COMMON=$props" >> "$GITHUB_ENV"
echo "HEXDOC_PROPS=$hexdoc_common" >> "$GITHUB_ENV"
echo "release=$release" >> "$GITHUB_OUTPUT"
echo "update-latest=$update_latest" >> "$GITHUB_OUTPUT"
echo "branch=$branch" >> "$GITHUB_OUTPUT"
echo "hexdoc-common=$hexdoc_common" >> "$GITHUB_OUTPUT"
echo "hexdoc=$hexdoc" >> "$GITHUB_OUTPUT"
echo "hexdoc-props=$props" >> "$GITHUB_OUTPUT"
- name: Checkout input branch
if: steps.parse-inputs.outputs.branch != 'none'
@ -108,13 +120,13 @@ jobs:
- name: List book languages
id: list-langs
run: |
echo "matrix=$($HEXDOC --list-langs)" >> "$GITHUB_OUTPUT"
echo "matrix=$(hexdoc list-langs $HEXDOC_PROPS $HEXDOC_COMMON)" >> "$GITHUB_OUTPUT"
if [[ $RUNNER_DEBUG ]]; then
tree -I '__pycache__|Common|Fabric|Forge|venv'
fi
- name: Export web book
run: $HEXDOC --export-only
run: hexdoc export $HEXDOC_PROPS $HEXDOC_COMMON
- name: Bump version
if: inputs.bump-version-segment
@ -162,7 +174,7 @@ jobs:
env:
GITHUB_PAGES_URL: ${{ needs.build.outputs.pages-url }}
HEXDOC: ${{ needs.build.outputs.hexdoc }} --lang ${{ matrix.lang }} -o _site --clean
HEXDOC: hexdoc render ${{ needs.build.outputs.hexdoc-props }} _site ${{ needs.build.outputs.hexdoc-common }} --lang ${{ matrix.lang }} --clean
steps:
- uses: actions/checkout@v3
@ -227,7 +239,7 @@ jobs:
path: _site/src/docs
- name: Add new docs to site
run: hexdoc_merge ${{ needs.build.outputs.hexdoc-common }} --src _site/src/docs --dst _site/dst/docs
run: hexdoc merge ${{ needs.build.outputs.hexdoc-common }} --src _site/src/docs --dst _site/dst/docs
- name: Deploy to Pages
uses: JamesIves/github-pages-deploy-action@v4

View file

@ -1,5 +1,5 @@
{
"watch": ["doc/src/hexdoc/_templates"],
"ext": "jinja,html,css,js",
"exec": "hexdoc doc/properties.toml -o _site/src/docs --lang en_us --allow-missing && hexdoc_merge --src _site/src/docs --dst _site/dst/docs"
"exec": "hexdoc serve doc/properties.toml --src _site/src/docs --dst _site/dst/docs --lang en_us --allow-missing --release"
}

View file

@ -0,0 +1,89 @@
import logging
from pathlib import Path
from hexdoc.hexcasting.hex_book import load_hex_book
from hexdoc.minecraft import I18n
from hexdoc.patchouli import Book
from hexdoc.plugin import PluginManager
from hexdoc.utils import ModResourceLoader, Properties
from .logging import setup_logging
def load_common_data(props_file: Path, verbosity: int):
setup_logging(verbosity)
props = Properties.load(props_file)
pm = PluginManager()
version = load_version(props, pm)
return props, pm, version
def load_version(props: Properties, pm: PluginManager):
version = pm.mod_version(props.modid)
logging.getLogger(__name__).info(f"Loading hexdoc for {props.modid} {version}")
return version
def load_book(
props: Properties,
pm: PluginManager,
lang: str | None,
allow_missing: bool,
):
"""lang, book, i18n"""
with ModResourceLoader.clean_and_load_all(props, pm) as loader:
lang, i18n = _load_i18n(loader, lang, allow_missing)[0]
_, data = Book.load_book_json(loader, props.book)
book = load_hex_book(data, pm, loader, i18n)
return lang, book, i18n
def load_books(
props: Properties,
pm: PluginManager,
lang: str | None,
allow_missing: bool,
):
"""books, mod_metadata"""
with ModResourceLoader.clean_and_load_all(props, pm) as loader:
_, book_data = Book.load_book_json(loader, props.book)
books = dict[str, tuple[Book, I18n]]()
for lang, i18n in _load_i18n(loader, lang, allow_missing):
books[lang] = (load_hex_book(book_data, pm, loader, i18n), i18n)
loader.export_dir = None # only export the first (default) book
return books, loader.mod_metadata
def _load_i18n(
loader: ModResourceLoader,
lang: str | None,
allow_missing: bool,
) -> list[tuple[str, I18n]]:
# only load the specified language
if lang is not None:
i18n = I18n.load(
loader,
lang=lang,
allow_missing=allow_missing,
)
return [(lang, i18n)]
# load everything
per_lang_i18n = I18n.load_all(
loader,
allow_missing=allow_missing,
)
# ensure the default lang is loaded first
default_lang = loader.props.default_lang
default_i18n = per_lang_i18n.pop(default_lang)
return [(default_lang, default_i18n), *per_lang_i18n.items()]

View file

@ -0,0 +1,20 @@
import logging
def setup_logging(verbosity: int):
logging.basicConfig(
style="{",
format="\033[1m[{relativeCreated:.02f} | {levelname} | {name}]\033[0m {message}",
level=log_level(verbosity),
)
logging.getLogger(__name__).info("Starting.")
def log_level(verbosity: int) -> int:
match verbosity:
case 0:
return logging.WARNING
case 1:
return logging.INFO
case _:
return logging.DEBUG

190
doc/src/hexdoc/cli/main.py Normal file
View file

@ -0,0 +1,190 @@
import json
import logging
import os
import shutil
from http.server import HTTPServer, SimpleHTTPRequestHandler
from pathlib import Path
from typing import Annotated, Union
import typer
from hexdoc.minecraft import I18n
from hexdoc.utils import ModResourceLoader
from .load import load_book, load_books, load_common_data
from .render import create_jinja_env, render_book
from .sitemap import (
assert_version_exists,
delete_root_book,
delete_updated_books,
dump_sitemap,
load_sitemap,
)
VerbosityOption = Annotated[int, typer.Option("--verbose", "-v", count=True)]
RequiredPathOption = Annotated[Path, typer.Option()]
app = typer.Typer()
@app.command()
def list_langs(
props_file: Path,
*,
verbosity: VerbosityOption = 0,
):
"""Get the available language codes as a JSON list."""
props, pm, _ = load_common_data(props_file, verbosity)
with ModResourceLoader.load_all(props, pm, export=False) as loader:
langs = sorted(I18n.list_all(loader))
print(json.dumps(langs))
@app.command()
def export(
props_file: Path,
*,
lang: Union[str, None] = None,
allow_missing: bool = False,
verbosity: VerbosityOption = 0,
):
"""Run hexdoc, but skip rendering the web book - just export the book resources."""
props, pm, _ = load_common_data(props_file, verbosity)
load_book(props, pm, lang, allow_missing)
@app.command()
def render(
props_file: Path,
output_dir: Path,
*,
update_latest: bool = True,
release: bool = False,
clean: bool = False,
lang: Union[str, None] = None,
allow_missing: bool = False,
verbosity: VerbosityOption = 0,
):
"""Export resources and render the web book."""
# load data
props, pm, version = load_common_data(props_file, verbosity)
books, mod_metadata = load_books(props, pm, lang, allow_missing)
# set up Jinja
env = create_jinja_env(props)
templates = {
"index.html": env.get_template(props.template.main),
"index.css": env.get_template(props.template.style),
}
if clean:
shutil.rmtree(output_dir, ignore_errors=True)
for should_render, version_, is_root in [
(update_latest, "latest", False),
(release, version, False),
(update_latest and release, version, True),
]:
if not should_render:
continue
for lang_, (book, i18n) in books.items():
render_book(
props=props,
lang=lang_,
book=book,
i18n=i18n,
templates=templates,
output_dir=output_dir,
mod_metadata=mod_metadata,
allow_missing=allow_missing,
version=version_,
is_root=is_root,
)
logging.getLogger(__name__).info("Done.")
@app.command()
def merge(
*,
src: RequiredPathOption,
dst: RequiredPathOption,
update_latest: bool = True,
release: bool = False,
):
# ensure at least the default language was built successfully
if update_latest:
assert_version_exists(root=src, version="latest")
# TODO: figure out how to do this with pluggy (we don't have the props file here)
# if is_release:
# assert_version_exists(src, GRADLE_VERSION)
dst.mkdir(parents=True, exist_ok=True)
# remove any stale data that we're about to replace
delete_updated_books(src=src, dst=dst)
if update_latest and release:
delete_root_book(root=dst)
# do the merge
shutil.copytree(src=src, dst=dst, dirs_exist_ok=True)
# rebuild the sitemap
sitemap = load_sitemap(dst)
dump_sitemap(dst, sitemap)
@app.command()
def serve(
props_file: Path,
*,
port: int = 8000,
src: RequiredPathOption,
dst: RequiredPathOption,
update_latest: bool = True,
release: bool = False,
clean: bool = False,
lang: Union[str, None] = None,
allow_missing: bool = False,
verbosity: VerbosityOption = 0,
):
book_path = dst.resolve().relative_to(Path.cwd())
base_url = f"http://localhost:{port}"
book_url = f"{base_url}/{book_path.as_posix()}"
os.environ |= {
"DEBUG_GITHUBUSERCONTENT": base_url,
"GITHUB_PAGES_URL": book_url,
}
print("Rendering...")
render(
props_file=props_file,
output_dir=src,
update_latest=update_latest,
release=release,
clean=clean,
lang=lang,
allow_missing=allow_missing,
verbosity=verbosity,
)
print("Merging...")
merge(
src=src,
dst=dst,
update_latest=update_latest,
release=release,
)
print(f"Serving web book at {book_url} (press ctrl+c to exit)\n")
with HTTPServer(("", port), SimpleHTTPRequestHandler) as httpd:
httpd.serve_forever()
if __name__ == "__main__":
app()

View file

@ -0,0 +1,137 @@
# pyright: reportUnknownMemberType=false, reportUnknownArgumentType=false
# pyright: reportUnknownLambdaType=false
import logging
import shutil
from pathlib import Path
from jinja2 import (
ChoiceLoader,
FileSystemLoader,
PackageLoader,
StrictUndefined,
Template,
)
from jinja2.sandbox import SandboxedEnvironment
from hexdoc.minecraft import I18n
from hexdoc.patchouli import Book
from hexdoc.utils import Properties
from hexdoc.utils.jinja_extensions import (
IncludeRawExtension,
hexdoc_block,
hexdoc_localize,
hexdoc_texture_url,
hexdoc_wrap,
)
from hexdoc.utils.path import write_to_path
from hexdoc.utils.resource_loader import HexdocMetadata
from .sitemap import MARKER_NAME, SitemapMarker
def create_jinja_env(props: Properties):
env = SandboxedEnvironment(
# search order: template_dirs, template_packages
loader=ChoiceLoader(
[FileSystemLoader(props.template.dirs)]
+ [PackageLoader(name, str(path)) for name, path in props.template.packages]
),
undefined=StrictUndefined,
lstrip_blocks=True,
trim_blocks=True,
autoescape=True,
extensions=[
IncludeRawExtension,
],
)
env.filters |= { # pyright: ignore[reportGeneralTypeIssues]
"hexdoc_block": hexdoc_block,
"hexdoc_wrap": hexdoc_wrap,
"hexdoc_localize": hexdoc_localize,
"hexdoc_texture_url": hexdoc_texture_url,
}
return env
def render_book(
*,
props: Properties,
lang: str,
book: Book,
i18n: I18n,
templates: dict[str, Template],
output_dir: Path,
mod_metadata: dict[str, HexdocMetadata],
allow_missing: bool,
version: str,
is_root: bool,
):
# /index.html
# /lang/index.html
# /v/version/index.html
# /v/version/lang/index.html
path = Path()
if not is_root:
path /= "v"
path /= version
if lang != props.default_lang:
path /= lang
output_dir /= path
page_url = "/".join([props.url, *path.parts])
logging.getLogger(__name__).info(f"Rendering {output_dir}")
template_args = {
**props.template.args,
"book": book,
"props": props,
"page_url": page_url,
"version": version,
"lang": lang,
"mod_metadata": mod_metadata,
"is_bleeding_edge": version == "latest",
"_": lambda key: hexdoc_localize( # i18n helper
key,
do_format=False,
props=props,
book=book,
i18n=i18n,
allow_missing=allow_missing,
),
"_f": lambda key: hexdoc_localize( # i18n helper with patchi formatting
key,
do_format=True,
props=props,
book=book,
i18n=i18n,
allow_missing=allow_missing,
),
}
for filename, template in templates.items():
file = template.render(template_args)
stripped_file = strip_empty_lines(file)
write_to_path(output_dir / filename, stripped_file)
if props.template.static_dir:
shutil.copytree(props.template.static_dir, output_dir, dirs_exist_ok=True)
# marker file for updating the sitemap later
# we use this because matrix doesn't have outputs
# this feels scuffed but it does work
if not is_root:
marker = SitemapMarker(
version=version,
lang=lang,
path="/" + "/".join(path.parts),
is_default_lang=lang == props.default_lang,
)
(output_dir / MARKER_NAME).write_text(marker.model_dump_json())
def strip_empty_lines(text: str) -> str:
return "\n".join(s for s in text.splitlines() if s.strip())

View file

@ -0,0 +1,81 @@
import shutil
from collections import defaultdict
from pathlib import Path
from pydantic import Field, TypeAdapter
from hexdoc.utils import DEFAULT_CONFIG, HexdocModel
from hexdoc.utils.path import write_to_path
MARKER_NAME = ".sitemap-marker.json"
class SitemapMarker(HexdocModel):
version: str
lang: str
path: str
is_default_lang: bool
@classmethod
def load(cls, path: Path):
return cls.model_validate_json(path.read_text("utf-8"))
class SitemapItem(HexdocModel):
default_path: str = Field(alias="defaultPath", default="")
lang_paths: dict[str, str] = Field(alias="langPaths", default_factory=dict)
def add_marker(self, marker: SitemapMarker):
self.lang_paths[marker.lang] = marker.path
if marker.is_default_lang:
self.default_path = marker.path
Sitemap = dict[str, SitemapItem]
def load_sitemap(root: Path) -> Sitemap:
sitemap: Sitemap = defaultdict(SitemapItem)
# crawl the new tree to rebuild the sitemap
for marker_path in root.rglob(MARKER_NAME):
marker = SitemapMarker.load(marker_path)
sitemap[marker.version].add_marker(marker)
return sitemap
def dump_sitemap(root: Path, sitemap: Sitemap):
# dump the sitemap using a TypeAdapter so it serializes the items properly
ta = TypeAdapter(Sitemap, config=DEFAULT_CONFIG)
write_to_path(
root / "meta" / "sitemap.json",
ta.dump_json(sitemap, by_alias=True),
)
def assert_version_exists(*, root: Path, version: str):
path = root / "v" / version / "index.html"
if not path.is_file():
raise FileNotFoundError(f"Missing default language for {version}: {path}")
def delete_root_book(*, root: Path):
"""Remove the book from the site root."""
for path in root.iterdir():
if path.name in ["v", "meta"]:
continue
if path.is_dir():
shutil.rmtree(path)
else:
path.unlink()
def delete_updated_books(*, src: Path, dst: Path):
src_markers = src.rglob(MARKER_NAME)
for marker in src_markers:
src_dir = marker.parent
dst_dir = dst / src_dir.relative_to(src)
shutil.rmtree(dst_dir, ignore_errors=True)

View file

@ -1,356 +0,0 @@
# pyright: reportUnknownMemberType=false, reportUnknownArgumentType=false
# pyright: reportUnknownLambdaType=false
import io
import json
import logging
import os
import shutil
import sys
from argparse import ArgumentParser
from pathlib import Path
from typing import Self, Sequence
from jinja2 import (
ChoiceLoader,
FileSystemLoader,
PackageLoader,
StrictUndefined,
Template,
)
from jinja2.sandbox import SandboxedEnvironment
from pydantic import model_validator
from hexdoc.hexcasting.hex_book import load_hex_book
from hexdoc.minecraft import I18n
from hexdoc.patchouli import Book
from hexdoc.plugin import PluginManager
from hexdoc.utils import HexdocModel, ModResourceLoader, Properties
from hexdoc.utils.deserialize import cast_or_raise
from hexdoc.utils.jinja_extensions import (
IncludeRawExtension,
hexdoc_block,
hexdoc_localize,
hexdoc_texture_url,
hexdoc_wrap,
)
from hexdoc.utils.path import write_to_path
from hexdoc.utils.resource_loader import HexdocMetadata
MARKER_NAME = ".sitemap-marker.json"
def strip_empty_lines(text: str) -> str:
return "\n".join(s for s in text.splitlines() if s.strip())
# CLI arguments
class Args(HexdocModel):
"""example: main.py properties.toml -o out.html"""
properties_file: Path
verbose: int
ci: bool
allow_missing: bool
lang: str | None
is_release: bool
update_latest: bool
clean: bool
output_dir: Path | None
export_only: bool
list_langs: bool
@classmethod
def parse_args(cls, args: Sequence[str] | None = None) -> Self:
parser = cls._parser()
args_dict = vars(parser.parse_args(args))
return cls.model_validate(args_dict)
@classmethod
def _parser(cls):
parser = ArgumentParser()
parser.add_argument("properties_file", type=Path)
parser.add_argument("--verbose", "-v", action="count", default=0)
parser.add_argument("--ci", action="store_true")
parser.add_argument("--allow-missing", action="store_true")
parser.add_argument("--lang", type=str, default=None)
parser.add_argument("--clean", action="store_true")
# do this instead of store_true because it's easier to use with Actions
parser.add_argument("--is-release", default=False)
parser.add_argument("--update-latest", default=True)
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("--output-dir", "-o", type=Path)
group.add_argument("--export-only", action="store_true")
group.add_argument("--list-langs", action="store_true")
return parser
@model_validator(mode="after")
def _post_root(self):
if self.ci and os.getenv("RUNNER_DEBUG") == "1":
self.verbose = True
# exactly one of these must be truthy (should be enforced by group above)
assert bool(self.output_dir) + self.export_only + self.list_langs == 1
return self
@property
def log_level(self) -> int:
match self.verbose:
case 0:
return logging.WARNING
case 1:
return logging.INFO
case _:
return logging.DEBUG
class SitemapMarker(HexdocModel):
version: str
lang: str
path: str
is_default_lang: bool
@classmethod
def load(cls, path: Path):
return cls.model_validate_json(path.read_text("utf-8"))
def main(args: Args | None = None) -> None:
# set stdout to utf-8 so printing to pipe or redirect doesn't break on Windows
# (common windows L)
cast_or_raise(sys.stdout, io.TextIOWrapper).reconfigure(encoding="utf-8")
cast_or_raise(sys.stderr, io.TextIOWrapper).reconfigure(encoding="utf-8")
# allow passing Args for test cases, but parse by default
if args is None:
args = Args.parse_args()
# set up logging
logging.basicConfig(
style="{",
format="\033[1m[{relativeCreated:.02f} | {levelname} | {name}]\033[0m {message}",
level=args.log_level,
)
logger = logging.getLogger(__name__)
logger.info("Starting.")
# Properties is the main config file for hexdoc
props = Properties.load(args.properties_file)
logger.debug(props)
# load plugins
pm = PluginManager()
version = pm.mod_version(props.modid)
logger.info(f"Building docs for {props.modid} {version}")
# just list the languages and exit
if args.list_langs:
with ModResourceLoader.load_all(props, pm, export=False) as loader:
langs = sorted(I18n.list_all(loader))
print(json.dumps(langs))
return
# load everything
with ModResourceLoader.clean_and_load_all(props, pm) as loader:
books = dict[str, tuple[Book, I18n]]()
if args.lang:
first_lang = args.lang
per_lang_i18n = {
first_lang: I18n.load(
loader,
lang=first_lang,
allow_missing=args.allow_missing,
)
}
else:
first_lang = props.default_lang
per_lang_i18n = I18n.load_all(
loader,
allow_missing=args.allow_missing,
)
# if export_only, skip actually loading the other languages' books
if args.export_only:
per_lang_i18n = {first_lang: per_lang_i18n[first_lang]}
_, book_data = Book.load_book_json(loader, props.book)
# load one book with exporting enabled
first_i18n = per_lang_i18n.pop(first_lang)
books[first_lang] = (
load_hex_book(book_data, pm, loader, first_i18n),
first_i18n,
)
# then load the rest with exporting disabled for efficiency
loader.export_dir = None
for lang, i18n in per_lang_i18n.items():
books[lang] = (load_hex_book(book_data, pm, loader, i18n), i18n)
mod_metadata = loader.mod_metadata
if args.export_only:
return
# set up Jinja
env = SandboxedEnvironment(
# search order: template_dirs, template_packages
loader=ChoiceLoader(
[FileSystemLoader(props.template.dirs)]
+ [PackageLoader(name, str(path)) for name, path in props.template.packages]
),
undefined=StrictUndefined,
lstrip_blocks=True,
trim_blocks=True,
autoescape=True,
extensions=[
IncludeRawExtension,
],
)
env.filters |= { # pyright: ignore[reportGeneralTypeIssues]
"hexdoc_block": hexdoc_block,
"hexdoc_wrap": hexdoc_wrap,
"hexdoc_localize": hexdoc_localize,
"hexdoc_texture_url": hexdoc_texture_url,
}
templates = {
"index.html": env.get_template(props.template.main),
"index.css": env.get_template(props.template.style),
}
# render everything
assert (output_dir := args.output_dir)
if args.clean:
shutil.rmtree(output_dir, ignore_errors=True)
if args.update_latest:
render_books(
props=props,
books=books,
templates=templates,
output_dir=output_dir,
mod_metadata=mod_metadata,
allow_missing=args.allow_missing,
version="latest",
is_root=False,
)
if args.is_release:
render_books(
props=props,
books=books,
templates=templates,
output_dir=output_dir,
mod_metadata=mod_metadata,
allow_missing=args.allow_missing,
version=version,
is_root=False,
)
# the default book should be the latest released version
if args.update_latest and args.is_release:
render_books(
props=props,
books=books,
templates=templates,
output_dir=output_dir,
mod_metadata=mod_metadata,
allow_missing=args.allow_missing,
version=version,
is_root=True,
)
logger.info("Done.")
def render_books(
*,
props: Properties,
books: dict[str, tuple[Book, I18n]],
templates: dict[str, Template],
output_dir: Path,
mod_metadata: dict[str, HexdocMetadata],
allow_missing: bool,
version: str,
is_root: bool,
):
for lang, (book, i18n) in books.items():
# /index.html
# /lang/index.html
# /v/version/index.html
# /v/version/lang/index.html
path = Path()
if not is_root:
path /= "v"
path /= version
if lang != props.default_lang:
path /= lang
output_dir /= path
page_url = "/".join([props.url, *path.parts])
logging.getLogger(__name__).info(f"Rendering {output_dir}")
template_args = {
**props.template.args,
"book": book,
"props": props,
"page_url": page_url,
"version": version,
"lang": lang,
"mod_metadata": mod_metadata,
"is_bleeding_edge": version == "latest",
"_": lambda key: hexdoc_localize( # i18n helper
key,
do_format=False,
props=props,
book=book,
i18n=i18n,
allow_missing=allow_missing,
),
"_f": lambda key: hexdoc_localize( # i18n helper with patchi formatting
key,
do_format=True,
props=props,
book=book,
i18n=i18n,
allow_missing=allow_missing,
),
}
for filename, template in templates.items():
file = template.render(template_args)
stripped_file = strip_empty_lines(file)
write_to_path(output_dir / filename, stripped_file)
if props.template.static_dir:
shutil.copytree(props.template.static_dir, output_dir, dirs_exist_ok=True)
# marker file for updating the sitemap later
# we use this because matrix doesn't have outputs
# this feels scuffed but it does work
if not is_root:
marker = SitemapMarker(
version=version,
lang=lang,
path="/" + "/".join(path.parts),
is_default_lang=lang == props.default_lang,
)
(output_dir / MARKER_NAME).write_text(marker.model_dump_json())
if __name__ == "__main__":
main()

View file

@ -1,101 +0,0 @@
import shutil
from argparse import ArgumentParser
from collections import defaultdict
from pathlib import Path
from typing import Self, Sequence
from pydantic import Field, TypeAdapter
from hexdoc.utils import DEFAULT_CONFIG, HexdocModel
from hexdoc.utils.path import write_to_path
from .hexdoc import MARKER_NAME, SitemapMarker
class SitemapItem(HexdocModel):
default_path: str = Field(alias="defaultPath", default="")
lang_paths: dict[str, str] = Field(alias="langPaths", default_factory=dict)
def add_marker(self, marker: SitemapMarker):
self.lang_paths[marker.lang] = marker.path
if marker.is_default_lang:
self.default_path = marker.path
Sitemap = dict[str, SitemapItem]
# CLI arguments
class Args(HexdocModel):
"""example: main.py properties.toml -o out.html"""
src: Path
dst: Path
is_release: bool
update_latest: bool
@classmethod
def parse_args(cls, args: Sequence[str] | None = None) -> Self:
parser = ArgumentParser(allow_abbrev=False)
parser.add_argument("--src", type=Path, required=True)
parser.add_argument("--dst", type=Path, required=True)
parser.add_argument("--is-release", default=False)
parser.add_argument("--update-latest", default=True)
return cls.model_validate(vars(parser.parse_args(args)))
def assert_version_exists(src: Path, version: str):
path = src / "v" / version / "index.html"
if not path.is_file():
raise FileNotFoundError(f"Missing default language for {version}: {path}")
def main():
args = Args.parse_args()
# ensure at least the default language was built successfully
if args.update_latest:
assert_version_exists(args.src, "latest")
# TODO: figure out how to do this with pluggy
# if args.is_release:
# assert_version_exists(args.src, GRADLE_VERSION)
args.dst.mkdir(parents=True, exist_ok=True)
# remove the book from the root of the destination since we're adding a new one now
if args.is_release and args.update_latest:
for path in args.dst.iterdir():
if path.name in ["v", "meta"]:
continue
if path.is_dir():
shutil.rmtree(path)
else:
path.unlink()
# find all the marked directories in source and delete them from dest
for marker_path in args.src.rglob(MARKER_NAME):
dst_marker_dir = args.dst / marker_path.parent.relative_to(args.src)
shutil.rmtree(dst_marker_dir, ignore_errors=True)
# that should be all the possible conflicts, so copy src into dst now
shutil.copytree(args.src, args.dst, dirs_exist_ok=True)
# crawl the new tree to rebuild the sitemap
sitemap: Sitemap = defaultdict(SitemapItem)
for marker_path in args.dst.rglob(MARKER_NAME):
marker = SitemapMarker.load(marker_path)
sitemap[marker.version].add_marker(marker)
# dump the sitemap using a TypeAdapter so it serializes the items properly
ta = TypeAdapter(Sitemap, config=DEFAULT_CONFIG)
write_to_path(
args.dst / "meta" / "sitemap.json",
ta.dump_json(sitemap, by_alias=True),
)
if __name__ == "__main__":
main()

View file

@ -1,5 +1,6 @@
from __future__ import annotations
import logging
import re
from pathlib import Path
from typing import Annotated, Any, Self
@ -29,10 +30,23 @@ class EnvironmentVariableProps(BaseSettings):
# set by CI
github_pages_url: NoTrailingSlashHttpUrl
# optional for debugging
debug_githubusercontent: str | None = None
@classmethod
def model_validate_env(cls):
return cls.model_validate({})
@property
def githubusercontent(self):
if self.debug_githubusercontent is not None:
return self.debug_githubusercontent
return (
f"https://raw.githubusercontent.com"
f"/{self.repo_owner}/{self.repo_name}/{self.github_sha}"
)
@property
def repo_owner(self):
return self._github_repository_parts[0]
@ -95,11 +109,14 @@ class Properties(StripHiddenModel):
@classmethod
def load(cls, path: Path) -> Self:
env = EnvironmentVariableProps.model_validate_env()
return cls.model_validate(
props = cls.model_validate(
load_toml_with_placeholders(path) | {"env": env},
context=RelativePathContext(root=path.parent),
)
logging.getLogger(__name__).debug(props)
return props
def mod_loc(self, path: str) -> ResourceLocation:
"""Returns a ResourceLocation with self.modid as the namespace."""
return ResourceLocation(self.modid, path)

View file

@ -118,12 +118,7 @@ class ModResourceLoader:
# export this mod's metadata
loader.mod_metadata[props.modid] = metadata = HexdocMetadata(
book_url=f"{props.url}/v/{version}",
asset_url=(
f"https://raw.githubusercontent.com"
f"/{props.env.repo_owner}"
f"/{props.env.repo_name}"
f"/{props.env.github_sha}"
),
asset_url=props.env.githubusercontent,
textures=loader._map_own_assets("textures", root=repo_root),
sounds=loader._map_own_assets("sounds", root=repo_root),
)

View file

@ -41,6 +41,7 @@ dependencies = [
"Jinja2>=3.1.2,<4",
"pyjson5>=1.6.3,<2",
"pluggy>=1.3.0,<2",
"typer[all]>=0.9.0,<1",
]
dynamic = ["version"]
@ -85,8 +86,7 @@ packages = [
]
[project.scripts]
hexdoc = "hexdoc.scripts.hexdoc:main"
hexdoc_merge = "hexdoc.scripts.hexdoc_merge:main"
hexdoc = "hexdoc.cli.main:app"
# hexdoc