Stop changing working directory
This commit is contained in:
parent
0b7d117de9
commit
81fff3b1d7
4 changed files with 165 additions and 165 deletions
|
@ -10,13 +10,12 @@ from typing import Self, Sequence
|
|||
|
||||
from jinja2 import ChoiceLoader, FileSystemLoader, PackageLoader, StrictUndefined
|
||||
from jinja2.sandbox import SandboxedEnvironment
|
||||
from pydantic import field_validator, model_validator
|
||||
from pydantic import model_validator
|
||||
|
||||
from hexdoc.hexcasting.hex_book import load_hex_book
|
||||
from hexdoc.minecraft import I18n
|
||||
from hexdoc.patchouli import Book
|
||||
from hexdoc.utils import HexdocModel, ModResourceLoader, Properties
|
||||
from hexdoc.utils.cd import cd
|
||||
from hexdoc.utils.path import write_to_path
|
||||
|
||||
from .__gradle_version__ import GRADLE_VERSION
|
||||
|
@ -49,6 +48,12 @@ class Args(HexdocModel):
|
|||
|
||||
@classmethod
|
||||
def parse_args(cls, args: Sequence[str] | None = None) -> Self:
|
||||
parser = cls._parser()
|
||||
args_dict = vars(parser.parse_args(args))
|
||||
return cls.model_validate(args_dict)
|
||||
|
||||
@classmethod
|
||||
def _parser(cls):
|
||||
parser = ArgumentParser()
|
||||
|
||||
parser.add_argument("properties_file", type=Path)
|
||||
|
@ -58,6 +63,7 @@ class Args(HexdocModel):
|
|||
parser.add_argument("--allow-missing", action="store_true")
|
||||
parser.add_argument("--lang", type=str, default=None)
|
||||
parser.add_argument("--clean", action="store_true")
|
||||
|
||||
# do this instead of store_true because it's easier to use with Actions
|
||||
parser.add_argument("--is-release", default=False)
|
||||
parser.add_argument("--update-latest", default=True)
|
||||
|
@ -67,20 +73,7 @@ class Args(HexdocModel):
|
|||
group.add_argument("--export-only", action="store_true")
|
||||
group.add_argument("--list-langs", action="store_true")
|
||||
|
||||
return cls.model_validate(vars(parser.parse_args(args)))
|
||||
|
||||
@field_validator(
|
||||
"properties_file",
|
||||
"output_dir",
|
||||
mode="after",
|
||||
)
|
||||
def _resolve_path(cls, value: Path | None):
|
||||
# make paths absolute because we're cd'ing later
|
||||
match value:
|
||||
case Path():
|
||||
return value.resolve()
|
||||
case _:
|
||||
return value
|
||||
return parser
|
||||
|
||||
@model_validator(mode="after")
|
||||
def _post_root(self):
|
||||
|
@ -119,155 +112,151 @@ def main(args: Args | None = None) -> None:
|
|||
if args is None:
|
||||
args = Args.parse_args()
|
||||
|
||||
# treat all paths as relative to the location of the properties file by cd-ing there
|
||||
with cd(args.properties_file.parent):
|
||||
# set stdout to utf-8 so printing to pipe or redirect doesn't break on Windows
|
||||
# (common windows L)
|
||||
assert isinstance(sys.stdout, io.TextIOWrapper)
|
||||
sys.stdout.reconfigure(encoding="utf-8")
|
||||
# set stdout to utf-8 so printing to pipe or redirect doesn't break on Windows
|
||||
# (common windows L)
|
||||
assert isinstance(sys.stdout, io.TextIOWrapper)
|
||||
sys.stdout.reconfigure(encoding="utf-8")
|
||||
|
||||
# set up logging
|
||||
logging.basicConfig(
|
||||
style="{",
|
||||
format="\033[1m[{relativeCreated:.02f} | {levelname} | {name}]\033[0m {message}",
|
||||
level=args.log_level,
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
# set up logging
|
||||
logging.basicConfig(
|
||||
style="{",
|
||||
format="\033[1m[{relativeCreated:.02f} | {levelname} | {name}]\033[0m {message}",
|
||||
level=args.log_level,
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
props = Properties.load(args.properties_file)
|
||||
props = Properties.load(args.properties_file)
|
||||
logger.debug(props)
|
||||
|
||||
# just list the languages and exit
|
||||
if args.list_langs:
|
||||
with ModResourceLoader.load_all(props, export=False) as loader:
|
||||
langs = sorted(I18n.list_all(loader))
|
||||
print(json.dumps(langs))
|
||||
return
|
||||
|
||||
# load everything
|
||||
with ModResourceLoader.clean_and_load_all(props) as loader:
|
||||
books = dict[str, Book]()
|
||||
|
||||
if args.lang:
|
||||
first_lang = args.lang
|
||||
per_lang_i18n = {
|
||||
first_lang: I18n.load(
|
||||
loader,
|
||||
lang=first_lang,
|
||||
allow_missing=args.allow_missing,
|
||||
)
|
||||
}
|
||||
else:
|
||||
first_lang = props.default_lang
|
||||
per_lang_i18n = I18n.load_all(
|
||||
loader,
|
||||
allow_missing=args.allow_missing,
|
||||
)
|
||||
|
||||
# if export_only, skip actually loading the other languages' books
|
||||
if args.export_only:
|
||||
per_lang_i18n = {first_lang: per_lang_i18n[first_lang]}
|
||||
|
||||
_, book_data = Book.load_book_json(loader, props.book)
|
||||
|
||||
# load one book with exporting enabled
|
||||
books[first_lang] = load_hex_book(
|
||||
book_data,
|
||||
loader,
|
||||
i18n=per_lang_i18n.pop(first_lang),
|
||||
)
|
||||
|
||||
# then load the rest with exporting disabled for efficiency
|
||||
loader.export_dir = None
|
||||
for lang, i18n in per_lang_i18n.items():
|
||||
books[lang] = load_hex_book(book_data, loader, i18n)
|
||||
|
||||
if args.export_only:
|
||||
# just list the languages and exit
|
||||
if args.list_langs:
|
||||
with ModResourceLoader.load_all(props, export=False) as loader:
|
||||
langs = sorted(I18n.list_all(loader))
|
||||
print(json.dumps(langs))
|
||||
return
|
||||
|
||||
assert args.output_dir
|
||||
if args.clean:
|
||||
shutil.rmtree(args.output_dir, ignore_errors=True)
|
||||
# load everything
|
||||
with ModResourceLoader.clean_and_load_all(props) as loader:
|
||||
books = dict[str, Book]()
|
||||
|
||||
# set up Jinja environment
|
||||
env = SandboxedEnvironment(
|
||||
# search order: template_dirs, template_packages
|
||||
loader=ChoiceLoader(
|
||||
[FileSystemLoader(props.template.dirs)]
|
||||
+ [
|
||||
PackageLoader(name, str(path))
|
||||
for name, path in props.template.packages
|
||||
]
|
||||
),
|
||||
undefined=StrictUndefined,
|
||||
lstrip_blocks=True,
|
||||
trim_blocks=True,
|
||||
autoescape=True,
|
||||
extensions=[
|
||||
IncludeRawExtension,
|
||||
],
|
||||
if args.lang:
|
||||
first_lang = args.lang
|
||||
per_lang_i18n = {
|
||||
first_lang: I18n.load(
|
||||
loader,
|
||||
lang=first_lang,
|
||||
allow_missing=args.allow_missing,
|
||||
)
|
||||
}
|
||||
else:
|
||||
first_lang = props.default_lang
|
||||
per_lang_i18n = I18n.load_all(
|
||||
loader,
|
||||
allow_missing=args.allow_missing,
|
||||
)
|
||||
|
||||
# if export_only, skip actually loading the other languages' books
|
||||
if args.export_only:
|
||||
per_lang_i18n = {first_lang: per_lang_i18n[first_lang]}
|
||||
|
||||
_, book_data = Book.load_book_json(loader, props.book)
|
||||
|
||||
# load one book with exporting enabled
|
||||
books[first_lang] = load_hex_book(
|
||||
book_data,
|
||||
loader,
|
||||
i18n=per_lang_i18n.pop(first_lang),
|
||||
)
|
||||
|
||||
env.filters |= { # type: ignore
|
||||
"hexdoc_block": hexdoc_block,
|
||||
"hexdoc_wrap": hexdoc_wrap,
|
||||
}
|
||||
# then load the rest with exporting disabled for efficiency
|
||||
loader.export_dir = None
|
||||
for lang, i18n in per_lang_i18n.items():
|
||||
books[lang] = load_hex_book(book_data, loader, i18n)
|
||||
|
||||
template = env.get_template(props.template.main)
|
||||
if args.export_only:
|
||||
return
|
||||
|
||||
static_dir = props.template.static_dir
|
||||
assert args.output_dir
|
||||
if args.clean:
|
||||
shutil.rmtree(args.output_dir, ignore_errors=True)
|
||||
|
||||
versions = ["latest"] if args.update_latest else []
|
||||
# set up Jinja environment
|
||||
env = SandboxedEnvironment(
|
||||
# search order: template_dirs, template_packages
|
||||
loader=ChoiceLoader(
|
||||
[FileSystemLoader(props.template.dirs)]
|
||||
+ [PackageLoader(name, str(path)) for name, path in props.template.packages]
|
||||
),
|
||||
undefined=StrictUndefined,
|
||||
lstrip_blocks=True,
|
||||
trim_blocks=True,
|
||||
autoescape=True,
|
||||
extensions=[
|
||||
IncludeRawExtension,
|
||||
],
|
||||
)
|
||||
|
||||
if args.is_release:
|
||||
# root should be the latest released version
|
||||
versions.append(GRADLE_VERSION)
|
||||
if args.update_latest:
|
||||
versions.append("")
|
||||
env.filters |= { # type: ignore
|
||||
"hexdoc_block": hexdoc_block,
|
||||
"hexdoc_wrap": hexdoc_wrap,
|
||||
}
|
||||
|
||||
# render each version and language separately
|
||||
for version in versions:
|
||||
for lang, book in books.items():
|
||||
is_default_lang = lang == props.default_lang
|
||||
template = env.get_template(props.template.main)
|
||||
|
||||
# /index.html
|
||||
# /lang/index.html
|
||||
# /v/version/index.html
|
||||
# /v/version/lang/index.html
|
||||
parts = ("v", version) if version else ()
|
||||
if not is_default_lang:
|
||||
parts += (lang,)
|
||||
static_dir = props.template.static_dir
|
||||
|
||||
output_dir = args.output_dir / Path(*parts)
|
||||
page_url = "/".join((props.url,) + parts)
|
||||
versions = ["latest"] if args.update_latest else []
|
||||
|
||||
logger.info(f"Rendering {output_dir}")
|
||||
docs = strip_empty_lines(
|
||||
template.render(
|
||||
**props.template.args,
|
||||
book=book,
|
||||
props=props,
|
||||
page_url=page_url,
|
||||
version=version or GRADLE_VERSION,
|
||||
lang=lang,
|
||||
is_bleeding_edge=version == "latest",
|
||||
)
|
||||
if args.is_release:
|
||||
# root should be the latest released version
|
||||
versions.append(GRADLE_VERSION)
|
||||
if args.update_latest:
|
||||
versions.append("")
|
||||
|
||||
# render each version and language separately
|
||||
for version in versions:
|
||||
for lang, book in books.items():
|
||||
is_default_lang = lang == props.default_lang
|
||||
|
||||
# /index.html
|
||||
# /lang/index.html
|
||||
# /v/version/index.html
|
||||
# /v/version/lang/index.html
|
||||
parts = ("v", version) if version else ()
|
||||
if not is_default_lang:
|
||||
parts += (lang,)
|
||||
|
||||
output_dir = args.output_dir / Path(*parts)
|
||||
page_url = "/".join((props.url,) + parts)
|
||||
|
||||
logger.info(f"Rendering {output_dir}")
|
||||
docs = strip_empty_lines(
|
||||
template.render(
|
||||
**props.template.args,
|
||||
book=book,
|
||||
props=props,
|
||||
page_url=page_url,
|
||||
version=version or GRADLE_VERSION,
|
||||
lang=lang,
|
||||
is_bleeding_edge=version == "latest",
|
||||
)
|
||||
)
|
||||
|
||||
write_to_path(output_dir / "index.html", docs)
|
||||
if static_dir:
|
||||
shutil.copytree(static_dir, output_dir, dirs_exist_ok=True)
|
||||
write_to_path(output_dir / "index.html", docs)
|
||||
if static_dir:
|
||||
shutil.copytree(static_dir, output_dir, dirs_exist_ok=True)
|
||||
|
||||
# marker file for updating the sitemap later
|
||||
# we use this because matrix doesn't have outputs
|
||||
# this feels scuffed but it does work
|
||||
if version:
|
||||
marker = SitemapMarker(
|
||||
version=version,
|
||||
lang=lang,
|
||||
path="/" + "/".join(parts),
|
||||
is_default_lang=is_default_lang,
|
||||
)
|
||||
(output_dir / MARKER_NAME).write_text(marker.model_dump_json())
|
||||
# marker file for updating the sitemap later
|
||||
# we use this because matrix doesn't have outputs
|
||||
# this feels scuffed but it does work
|
||||
if version:
|
||||
marker = SitemapMarker(
|
||||
version=version,
|
||||
lang=lang,
|
||||
path="/" + "/".join(parts),
|
||||
is_default_lang=is_default_lang,
|
||||
)
|
||||
(output_dir / MARKER_NAME).write_text(marker.model_dump_json())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -1,15 +1,19 @@
|
|||
import os
|
||||
from contextlib import contextmanager
|
||||
from typing import AnyStr
|
||||
from pathlib import Path
|
||||
from typing import Annotated
|
||||
|
||||
from pydantic import AfterValidator, ValidationInfo
|
||||
|
||||
from .deserialize import cast_or_raise
|
||||
from .model import ValidationContext
|
||||
|
||||
|
||||
# https://stackoverflow.com/a/24176022
|
||||
@contextmanager
|
||||
def cd(newdir: os.PathLike[AnyStr]):
|
||||
"""Context manager which temporarily changes the script's working directory."""
|
||||
prevdir = os.getcwd()
|
||||
os.chdir(os.path.expanduser(newdir))
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
os.chdir(prevdir)
|
||||
class RelativePathContext(ValidationContext):
|
||||
root: Path
|
||||
|
||||
|
||||
def validate_relative_path(path: Path, info: ValidationInfo):
|
||||
context = cast_or_raise(info.context, RelativePathContext)
|
||||
return context.root / path
|
||||
|
||||
|
||||
RelativePath = Annotated[Path, AfterValidator(validate_relative_path)]
|
||||
|
|
|
@ -8,6 +8,7 @@ from typing import Annotated, Any, Self
|
|||
|
||||
from pydantic import AfterValidator, Field, HttpUrl, TypeAdapter, field_validator
|
||||
|
||||
from .cd import RelativePath, RelativePathContext
|
||||
from .model import DEFAULT_CONFIG, StripHiddenModel
|
||||
from .resource import ResourceDir, ResourceLocation
|
||||
from .toml_placeholders import load_toml_with_placeholders
|
||||
|
@ -20,14 +21,14 @@ NoTrailingSlashHttpUrl = Annotated[
|
|||
|
||||
|
||||
class PatternStubProps(StripHiddenModel):
|
||||
path: Path
|
||||
path: RelativePath
|
||||
regex: re.Pattern[str]
|
||||
|
||||
|
||||
class TemplateProps(StripHiddenModel):
|
||||
main: str
|
||||
static_dir: Path | None = None
|
||||
dirs: list[Path] = Field(default_factory=list)
|
||||
static_dir: RelativePath | None = None
|
||||
dirs: list[RelativePath] = Field(default_factory=list)
|
||||
packages: list[tuple[str, Path]]
|
||||
args: dict[str, Any]
|
||||
|
||||
|
@ -52,7 +53,7 @@ class Properties(StripHiddenModel):
|
|||
the text color to the default."""
|
||||
|
||||
resource_dirs: list[ResourceDir]
|
||||
export_dir: Path | None = None
|
||||
export_dir: RelativePath | None = None
|
||||
|
||||
pattern_stubs: list[PatternStubProps]
|
||||
|
||||
|
@ -65,7 +66,10 @@ class Properties(StripHiddenModel):
|
|||
|
||||
@classmethod
|
||||
def load(cls, path: Path) -> Self:
|
||||
return cls.model_validate(load_toml_with_placeholders(path))
|
||||
return cls.model_validate(
|
||||
load_toml_with_placeholders(path),
|
||||
context=RelativePathContext(root=path.parent),
|
||||
)
|
||||
|
||||
def mod_loc(self, path: str) -> ResourceLocation:
|
||||
"""Returns a ResourceLocation with self.modid as the namespace."""
|
||||
|
|
|
@ -35,6 +35,8 @@ from pydantic import (
|
|||
from pydantic.dataclasses import dataclass
|
||||
from pydantic.functional_validators import ModelWrapValidatorHandler
|
||||
|
||||
from hexdoc.utils.cd import RelativePath
|
||||
|
||||
from .deserialize import JSONDict
|
||||
from .model import DEFAULT_CONFIG, HexdocModel, ValidationContext
|
||||
|
||||
|
@ -232,7 +234,8 @@ class BaseResourceDir(HexdocModel, ABC):
|
|||
|
||||
|
||||
class PathResourceDir(BaseResourceDir):
|
||||
path: Path
|
||||
# input is relative to the props file
|
||||
path: RelativePath
|
||||
|
||||
# direct paths are probably from this mod
|
||||
external: bool = False
|
||||
|
|
Loading…
Reference in a new issue