Fully convert from Dacite to Pydantic!

This commit is contained in:
object-Object 2023-07-03 21:18:36 -04:00
parent 0a949a2b56
commit 04b926cf42
37 changed files with 982 additions and 2838 deletions

2
.vscode/launch.json vendored
View file

@ -17,7 +17,7 @@
"type": "python", "type": "python",
"request": "launch", "request": "launch",
"cwd": "${workspaceFolder}/doc", "cwd": "${workspaceFolder}/doc",
"program": "src/main.py", "module": "hexcasting.scripts.main",
"args": [ "args": [
"properties.toml", "properties.toml",
], ],

View file

@ -7,6 +7,7 @@ recipe_dirs = [
"{fabric.generated}/data/{modid}/recipes", "{fabric.generated}/data/{modid}/recipes",
"{forge.generated}/data/{modid}/recipes", "{forge.generated}/data/{modid}/recipes",
] ]
default_recipe_dir = 0
# NOTE: _Raw means "don't apply variable interpolation to this value" # NOTE: _Raw means "don't apply variable interpolation to this value"
# more on that later # more on that later

View file

@ -14,13 +14,15 @@ authors = [
readme = "README.md" readme = "README.md"
requires-python = ">=3.11" requires-python = ">=3.11"
dependencies = [ dependencies = [
"typed-argument-parser>=1.8.0", "typing_extensions~=4.7.0",
"pydantic==2.0b3", "typed-argument-parser~=1.8.0",
"pydantic==2.0",
] ]
[project.entry-points."hexdoc.Page"] [project.entry-points."hexdoc.Page"]
hexdoc-patchouli = "patchouli.page.pages" hexdoc-patchouli = "patchouli.page.pages"
hexdoc-hexcasting = "hexcasting.hex_pages" hexdoc-hexcasting = "hexcasting.hex_pages"
hexdoc-abstract-hexcasting = "hexcasting.abstract_hex_pages"
[project.entry-points."hexdoc.Recipe"] [project.entry-points."hexdoc.Recipe"]
hexdoc-minecraft = "minecraft.recipe.recipes" hexdoc-minecraft = "minecraft.recipe.recipes"

View file

@ -1,225 +0,0 @@
# pyright: reportPrivateUsage=false
# pyright: reportUnknownArgumentType=false
# pyright: reportUnknownMemberType=false
import copy
from itertools import zip_longest
from typing import (
Any,
ClassVar,
Collection,
Mapping,
TypeVar,
get_args,
get_origin,
get_type_hints,
)
import dacite.core
import dacite.types
from dacite import (
Config,
DaciteError,
StrictUnionMatchError,
UnionMatchError,
from_dict as _original_from_dict,
)
from dacite.cache import cache
from dacite.core import _build_value as _original_build_value
from dacite.data import Data
from dacite.dataclasses import get_fields
from dacite.types import extract_generic, is_instance, is_optional, is_subclass
from common.types import isinstance_or_raise
class UnionSkip(Exception):
"""Tagged union classes may raise this during initialization to say the data doesn't
match their type."""
def handle_metadata(data_class: type[Any], data: dict[str, Any]):
"""Applies our custom metadata. Currently this just renames fields."""
# only transform a dict once, in case this is called multiple times
data = data.copy()
if data.get("__metadata_handled"): # mischief managed?
return data
data["__metadata_handled"] = True
for field in get_fields(data_class):
try:
key_name = field.metadata["rename"]
if not isinstance(key_name, str):
# TODO: raise?
continue
if field.name in data:
# TODO: could instead keep a set of renamed fields, skip writing from a shadowed field
raise ValueError(
f"Can't rename key '{key_name}' to field '{field.name}' because the key '{field.name}' also exists in the dict\n{data}"
)
data[field.name] = data.pop(key_name)
except KeyError:
pass
return data
def handle_metadata_final(data_class: type[Any], data: dict[str, Any]):
"""As `handle_metadata`, but removes the key marking data as handled.
Should only be used within a custom from_dict implementation.
"""
data = handle_metadata(data_class, data)
data.pop("__metadata_handled")
return data
def _patched_build_value(type_: type[Any], data: Any, config: Config) -> Any:
if type_ not in config.type_hooks:
origin = get_origin(type_)
if origin and origin in config.type_hooks:
data = config.type_hooks[origin](data)
return _original_build_value(type_, data, config)
# fixes https://github.com/konradhalas/dacite/issues/234
# workaround for https://github.com/konradhalas/dacite/issues/218
# this code is, like, really bad. but to be fair dacite's isn't a whole lot better
# and as long as it works, does it really matter?
def _patched_build_value_for_union(union: type[Any], data: Any, config: Config) -> Any:
types = extract_generic(union)
if is_optional(union) and len(types) == 2:
return _patched_build_value(type_=types[0], data=data, config=config)
exceptions: list[Exception] = []
union_matches = {}
original_data = copy.deepcopy(data)
data_ = data
union_matches = {}
for inner_type in types:
try:
try:
value = _patched_build_value(type_=inner_type, data=data, config=config)
except UnionSkip:
continue
except Exception as e:
e.add_note(f"inner_type: {inner_type}")
exceptions.append(e)
continue
if is_instance(value, inner_type):
if config.strict_unions_match:
union_matches[inner_type] = value
else:
return value
except DaciteError as e:
e.add_note(f"inner_type: {inner_type}")
exceptions.append(e)
if config.strict_unions_match and union_matches:
if len(union_matches) > 1:
e = StrictUnionMatchError(union_matches)
e.add_note(f"union_matches: {union_matches}")
exceptions.append(e)
else:
return union_matches.popitem()[1]
if not config.check_types:
return data
e = UnionMatchError(field_type=union, value=data)
e.add_note(f"\noriginal data: {original_data}")
e.add_note(f"maybe-or-maybe-not-transformed data: {data}")
e.add_note(f"transformed data: {data_}\n")
exceptions.append(e)
raise ExceptionGroup("Failed to match union", exceptions)
# fixes https://github.com/konradhalas/dacite/issues/217
def _patched_build_value_for_collection(
collection: type[Any], data: Any, config: Config
) -> Any:
data_type = data.__class__
if isinstance(data, Mapping) and is_subclass(collection, Mapping):
key_type, item_type = extract_generic(collection, defaults=(Any, Any))
return data_type(
(
_patched_build_value(type_=key_type, data=key, config=config),
_patched_build_value(type_=item_type, data=value, config=config),
)
for key, value in data.items()
)
elif isinstance(data, tuple) and is_subclass(collection, tuple):
if not data:
return data_type()
types = extract_generic(collection)
if len(types) == 2 and types[1] == Ellipsis:
return data_type(
_patched_build_value(type_=types[0], data=item, config=config)
for item in data
)
return data_type(
_patched_build_value(type_=type_, data=item, config=config)
for item, type_ in zip_longest(data, types)
)
elif isinstance(data, Collection) and is_subclass(collection, Collection):
item_type = extract_generic(collection, defaults=(Any,))[0]
return data_type(
_patched_build_value(type_=item_type, data=item, config=config)
for item in data
)
return data
_T = TypeVar("_T")
def _patched_from_dict(
data_class: type[_T],
data: Data,
config: Config | None = None,
) -> _T:
if isinstance(data, data_class):
return data
# ensure it's a dict, or add context
try:
assert isinstance_or_raise(data, dict)
except TypeError as e:
if config and data_class not in config.type_hooks:
e.add_note(f"Note: {data_class} is not in type_hooks: {config.type_hooks}")
else:
e.add_note(f"data_class: {data_class}")
raise
data = handle_metadata_final(data_class, data)
return _original_from_dict(data_class, data, config)
def _patched_is_valid_generic_class(value: Any, type_: type[Any]) -> bool:
origin = get_origin(type_)
if not (origin and isinstance(value, origin)):
return False
type_args = get_args(type_)
type_hints = cache(get_type_hints)(type(value))
for field_name, field_type in type_hints.items():
field_value = getattr(value, field_name, None)
if isinstance(field_type, TypeVar):
# TODO: this will fail to detect incorrect type in some cases
# see comments on https://github.com/konradhalas/dacite/pull/209
if not any(is_instance(field_value, arg) for arg in type_args):
return False
elif get_origin(field_type) is not ClassVar:
if not is_instance(field_value, field_type):
return False
return True
# we do a bit of monkeypatching
dacite.from_dict = _patched_from_dict
dacite.core.from_dict = _patched_from_dict
dacite.core._build_value = _patched_build_value
dacite.core._build_value_for_union = _patched_build_value_for_union
dacite.core._build_value_for_collection = _patched_build_value_for_collection
dacite.types.is_valid_generic_class = _patched_is_valid_generic_class

View file

@ -1,101 +1,43 @@
# make sure we patch dacite before doing any parsing
# should this be a PR? probably! TODO: i'll do it later
from common import dacite_patch as _ # isort: skip
import json import json
from dataclasses import dataclass, field
from pathlib import Path from pathlib import Path
from typing import Any, Callable, TypeVar from typing import Any, TypeGuard, TypeVar, get_origin
import tomllib _T = TypeVar("_T")
from dacite import Config, from_dict
from pydantic import ConfigDict
from common.dacite_patch import handle_metadata _DEFAULT_MESSAGE = "Expected any of {expected}, got {actual}: {value}"
from common.toml_placeholders import TOMLDict, fill_placeholders
from common.types import Castable, JSONDict, JSONValue, isinstance_or_raise
DEFAULT_CONFIG = ConfigDict( # there may well be a better way to do this but i don't know what it is
strict=True, def isinstance_or_raise(
extra="forbid", val: Any,
) class_or_tuple: type[_T] | tuple[type[_T], ...],
message: str = _DEFAULT_MESSAGE,
) -> TypeGuard[_T]:
"""Usage: `assert isinstance_or_raise(val, str)`
_T_Input = TypeVar("_T_Input") message placeholders: `{expected}`, `{actual}`, `{value}`
_T_Dataclass = TypeVar("_T_Dataclass")
TypeHook = Callable[[_T_Dataclass | Any], _T_Dataclass | dict[str, Any]]
TypeHooks = dict[type[_T_Dataclass], TypeHook[_T_Dataclass]]
TypeHookMaker = Callable[[_T_Input], TypeHooks[_T_Dataclass]]
@dataclass
class TypedConfig(Config):
"""Dacite config, but with proper type hints and sane defaults."""
type_hooks: TypeHooks[Any] = field(default_factory=dict)
cast: list[TypeHook[Any]] = field(default_factory=list)
check_types: bool = True
strict: bool = True
strict_unions_match: bool = True
def __post_init__(self):
self.cast.append(Castable)
def metadata(*, rename: str) -> dict[str, Any]:
"""Helper for specifying dataclass field metadata.
Args:
rename: The value under this key, if any, will instead be assigned to this field.
""" """
return {
"rename": rename, # convert generic types into the origin type
} if not isinstance(class_or_tuple, tuple):
class_or_tuple = (class_or_tuple,)
ungenericed_classes = tuple(get_origin(t) or t for t in class_or_tuple)
if not isinstance(val, ungenericed_classes):
# just in case the caller messed up the message formatting
subs = {"expected": class_or_tuple, "actual": type(val), "value": val}
try:
raise TypeError(message.format(**subs))
except KeyError:
raise TypeError(_DEFAULT_MESSAGE.format(**subs))
return True
def rename(rename: str) -> dict[str, Any]: JSONDict = dict[str, "JSONValue"]
"""Helper for specifying field metadata to rename a FromPath field."""
return metadata(rename=rename) JSONValue = JSONDict | list["JSONValue"] | str | int | float | bool | None
def load_json_object(path: Path) -> JSONDict: def load_json(path: Path) -> JSONDict:
data: JSONValue = json.loads(path.read_text("utf-8")) data: JSONValue = json.loads(path.read_text("utf-8"))
assert isinstance_or_raise(data, dict) assert isinstance_or_raise(data, dict)
return data return data
def load_json_data(
data_class: type[Any],
path: Path,
extra_data: dict[str, Any] = {},
) -> dict[str, Any]:
"""Load a dict from a JSON file and apply metadata transformations to it."""
data = load_json_object(path)
return handle_metadata(data_class, data) | extra_data
def load_toml_data(data_class: type[Any], path: Path) -> TOMLDict:
data = tomllib.loads(path.read_text("utf-8"))
fill_placeholders(data)
return handle_metadata(data_class, data)
def from_dict_checked(
data_class: type[_T_Dataclass],
data: dict[str, Any],
config: TypedConfig,
path: Path | None = None,
) -> _T_Dataclass:
"""Convert a dict to a dataclass.
path is currently just used for error messages.
"""
try:
return from_dict(data_class, data, config)
except Exception as e:
if path:
e.add_note(str(path))
raise

60
doc/src/common/model.py Normal file
View file

@ -0,0 +1,60 @@
from pathlib import Path
from typing import TYPE_CHECKING, Any, Generic, Self, TypeVar, dataclass_transform
from pydantic import BaseModel, ConfigDict
from typing_extensions import TypedDict
from common.deserialize import load_json
if TYPE_CHECKING:
from pydantic.root_model import Model
AnyContext = TypeVar("AnyContext", bound=TypedDict)
DEFAULT_CONFIG = ConfigDict(
extra="forbid",
)
@dataclass_transform()
class HexDocModel(Generic[AnyContext], BaseModel):
model_config = DEFAULT_CONFIG
# override the context type to use a generic TypedDict
# TODO: open an issue on Pydantic for this
if TYPE_CHECKING:
@classmethod
def model_validate( # type: ignore
cls: type[Model],
obj: Any,
*,
strict: bool | None = None,
from_attributes: bool | None = None,
context: AnyContext | None = None,
) -> Model:
...
@classmethod
def model_validate_json( # type: ignore
cls: type[Model],
json_data: str | bytes | bytearray,
*,
strict: bool | None = None,
context: AnyContext | None = None,
) -> Model:
...
@dataclass_transform(frozen_default=True)
class FrozenHexDocModel(Generic[AnyContext], HexDocModel[AnyContext]):
model_config = DEFAULT_CONFIG | {"frozen": True}
@dataclass_transform()
class HexDocModelFile(HexDocModel[AnyContext]):
@classmethod
def load(cls, path: Path, context: AnyContext) -> Self:
data = load_json(path) | {"__path": path}
return cls.model_validate(data, context=context)

View file

@ -1,17 +1,17 @@
from __future__ import annotations from __future__ import annotations
import re import re
from dataclasses import InitVar, dataclass, field
from pathlib import Path from pathlib import Path
from typing import Self from typing import Any, Self
from common.deserialize import TypedConfig, from_dict_checked, load_toml_data, rename from pydantic import Field, model_validator
from common.pattern import PatternStubFile
from common.types import LocalizedStr from common.model import HexDocModel
from common.toml_placeholders import load_toml
from hexcasting.pattern import PatternStubFile
@dataclass class PlatformProps(HexDocModel[Any]):
class PlatformProps:
resources: Path resources: Path
generated: Path generated: Path
src: Path src: Path
@ -19,37 +19,31 @@ class PlatformProps:
pattern_stubs: list[PatternStubFile] | None = None pattern_stubs: list[PatternStubFile] | None = None
@dataclass class I18nProps(HexDocModel[Any]):
class I18nProps:
lang: str lang: str
filename: str filename: str
extra: dict[str, LocalizedStr] | None = None extra: dict[str, str] | None = None
@dataclass(kw_only=True) class Properties(HexDocModel[Any]):
class Properties:
modid: str modid: str
book_name: str book_name: str
template: Path template: Path
recipe_dirs: list[Path]
_pattern_regex: InitVar[str] = field(metadata=rename("pattern_regex")) recipe_dirs: list[Path]
pattern_re: re.Pattern[str] = field(init=False) default_recipe_dir_index_: int = Field(alias="default_recipe_dir")
pattern_regex: re.Pattern[str]
i18n: I18nProps i18n: I18nProps
common: PlatformProps common: PlatformProps
fabric: PlatformProps # TODO: non-shitty way to make these optional for addons fabric: PlatformProps # TODO: some way to make these optional for addons
forge: PlatformProps forge: PlatformProps
def __post_init__(self, _pattern_regex: str):
object.__setattr__(self, "pattern_re", re.compile(_pattern_regex))
@classmethod @classmethod
def load(cls, path: Path) -> Self: def load(cls, path: Path) -> Self:
data = load_toml_data(cls, path) return cls.model_validate(load_toml(path))
config = TypedConfig(cast=[LocalizedStr, Path])
return from_dict_checked(cls, data, config)
@property @property
def resources_dir(self): def resources_dir(self):
@ -82,6 +76,10 @@ class Properties:
def templates_dir(self) -> Path: def templates_dir(self) -> Path:
return self.book_dir / self.lang / "templates" return self.book_dir / self.lang / "templates"
@property
def default_recipe_dir(self) -> Path:
return self.recipe_dirs[self.default_recipe_dir_index_]
@property @property
def platforms(self) -> list[PlatformProps]: def platforms(self) -> list[PlatformProps]:
platforms = [self.common] platforms = [self.common]
@ -99,3 +97,11 @@ class Properties:
if platform.pattern_stubs if platform.pattern_stubs
for stub in platform.pattern_stubs for stub in platform.pattern_stubs
] ]
@model_validator(mode="after")
def _check_default_recipe_dir(self):
if self.default_recipe_dir_index_ >= len(self.recipe_dirs):
raise ValueError(
f"default_recipe_dir must be a valid index of recipe_dirs (expected <={len(self.recipe_dirs)}, got {self.default_recipe_dir_index_})"
)
return self

View file

@ -4,14 +4,18 @@ from __future__ import annotations
from collections import defaultdict from collections import defaultdict
from enum import Enum from enum import Enum
from typing import Any, ClassVar, Generator, Self from typing import TYPE_CHECKING, Any, ClassVar, Generator, Self, cast
from dacite import StrictUnionMatchError, UnionMatchError, from_dict
from pkg_resources import iter_entry_points from pkg_resources import iter_entry_points
from pydantic import ValidationInfo, model_validator
from pydantic.functional_validators import ModelWrapValidatorHandler
from common.dacite_patch import UnionSkip from minecraft.resource import ResourceLocation
from common.deserialize import TypedConfig
from common.types import isinstance_or_raise from .model import AnyContext, HexDocModel
if TYPE_CHECKING:
from pydantic.root_model import Model
class NoValueType(Enum): class NoValueType(Enum):
@ -27,18 +31,27 @@ NoValue = NoValueType._token
TagValue = str | NoValueType TagValue = str | NoValueType
class WrongTagSkip(UnionSkip): _loaded_groups: set[str] = set()
def __init__( _rebuilt_models: set[type[Any]] = set()
self,
union_type: type[InternallyTaggedUnion],
tag_value: TagValue,
) -> None:
super().__init__(
f"Expected {union_type._tag_key}={union_type.__expected_tag_value}, got {tag_value}"
)
class InternallyTaggedUnion: def load_entry_points(group: str):
# don't load a group multiple times
if group in _loaded_groups:
return
_loaded_groups.add(group)
for entry_point in iter_entry_points(group):
try:
entry_point.load()
except ModuleNotFoundError as e:
e.add_note(
f'Note: Tried to load entry point "{entry_point}" from {entry_point.dist}'
)
raise
class InternallyTaggedUnion(HexDocModel[AnyContext]):
"""Implements [internally tagged unions](https://serde.rs/enum-representations.html#internally-tagged) """Implements [internally tagged unions](https://serde.rs/enum-representations.html#internally-tagged)
using the [Registry pattern](https://charlesreid1.github.io/python-patterns-the-registry.html). using the [Registry pattern](https://charlesreid1.github.io/python-patterns-the-registry.html).
@ -57,17 +70,11 @@ class InternallyTaggedUnion:
shouldn't be instantiated (eg. abstract classes). shouldn't be instantiated (eg. abstract classes).
""" """
_loaded_groups: ClassVar[set[str]] = set() # inherited
"""Global set of groups whose plugins have already been loaded. Do not overwrite.
We use this so we don't have to load the same modules over and over again.
"""
_group: ClassVar[str | None] = None _group: ClassVar[str | None] = None
_tag_key: ClassVar[str | None] = None _tag_key: ClassVar[str | None] = None
__expected_tag_value: ClassVar[TagValue | None] # per-class
__all_subtypes: ClassVar[set[type[Self]]] __all_subtypes: ClassVar[set[type[Self]]]
__concrete_subtypes: ClassVar[defaultdict[TagValue, set[type[Self]]]] __concrete_subtypes: ClassVar[defaultdict[TagValue, set[type[Self]]]]
@ -97,7 +104,6 @@ class InternallyTaggedUnion:
return return
# per-class data and lookups # per-class data and lookups
cls.__expected_tag_value = value
cls.__all_subtypes = set() cls.__all_subtypes = set()
cls.__concrete_subtypes = defaultdict(set) cls.__concrete_subtypes = defaultdict(set)
@ -115,7 +121,9 @@ class InternallyTaggedUnion:
return tag_key return tag_key
@classmethod @classmethod
def _supertypes(cls) -> Generator[type[InternallyTaggedUnion], None, None]: def _supertypes(
cls,
) -> Generator[type[InternallyTaggedUnion[AnyContext]], None, None]:
tag_key = cls._tag_key_or_raise() tag_key = cls._tag_key_or_raise()
# we consider a type to be its own supertype/subtype # we consider a type to be its own supertype/subtype
@ -137,27 +145,61 @@ class InternallyTaggedUnion:
return cls.__concrete_subtypes return cls.__concrete_subtypes
@classmethod @classmethod
def _resolve_from_dict(cls, data: Self | Any, config: TypedConfig) -> Self: def model_validate(
# if we haven't yet, load plugins from entry points cls: type[Model],
if cls._group is not None and cls._group not in cls._loaded_groups: obj: Any,
cls._loaded_groups.add(cls._group) *,
for entry_point in iter_entry_points(cls._group): strict: bool | None = None,
try: from_attributes: bool | None = None,
entry_point.load() context: AnyContext | None = None,
except ModuleNotFoundError as e: ) -> Model:
e.add_note( # resolve forward references, because apparently we need to do this
f'Note: Tried to load entry point "{entry_point}" from {entry_point.dist}' if cls not in _rebuilt_models:
) _rebuilt_models.add(cls)
raise cls.model_rebuild(
_types_namespace={
"ResourceLocation": ResourceLocation,
}
)
# do this first so we know it's part of a union return super().model_validate(
obj,
strict=strict,
from_attributes=from_attributes,
context=context,
)
@model_validator(mode="wrap")
@classmethod
def _resolve_from_dict(
cls,
data: dict[str, Any] | Self | Any,
handler: ModelWrapValidatorHandler[Self],
info: ValidationInfo,
) -> Self:
# load plugins from entry points
if cls._group is not None:
load_entry_points(cls._group)
# do this early so we know it's part of a union before returning anything
tag_key = cls._tag_key_or_raise() tag_key = cls._tag_key_or_raise()
# if it's already instantiated, just return it; otherwise ensure it's a dict # if it's already instantiated, just return it; otherwise ensure it's a dict
if isinstance(data, InternallyTaggedUnion): match data:
assert isinstance_or_raise(data, cls) case InternallyTaggedUnion():
return data return data
assert isinstance_or_raise(data, dict[str, Any]) case dict():
# ew
data = cast(dict[str, Any], data)
case _:
return handler(data)
# don't infinite loop calling this same validator forever
if "__resolved" in data or not info.context:
data.pop("__resolved")
return handler(data)
data["__resolved"] = True
context = cast(AnyContext, info.context)
# tag value, eg. "minecraft:crafting_shaped" # tag value, eg. "minecraft:crafting_shaped"
tag_value = data.get(tag_key, NoValue) tag_value = data.get(tag_key, NoValue)
@ -168,30 +210,44 @@ class InternallyTaggedUnion:
# try all the types # try all the types
exceptions: list[Exception] = [] exceptions: list[Exception] = []
union_matches: dict[type[InternallyTaggedUnion], InternallyTaggedUnion] = {} matches: dict[type[Self], Self] = {}
for inner_type in tag_types: for inner_type in tag_types:
try: try:
value = from_dict(inner_type, data, config) matches[inner_type] = inner_type.model_validate(data, context=context)
if not config.strict_unions_match: except Exception as e:
return value exceptions.append(e)
union_matches[inner_type] = value
except UnionSkip:
pass
except Exception as entry_point:
exceptions.append(entry_point)
# ensure we only matched one # ensure we only matched one
match len(union_matches): match len(matches):
case 1: case 1:
return union_matches.popitem()[1] return matches.popitem()[1]
case x if x > 1 and config.strict_unions_match: case x if x > 1:
exceptions.append(StrictUnionMatchError(union_matches)) raise ExceptionGroup(
f"Ambiguous union match for {cls} with {cls._tag_key}={tag_value}: {matches.keys()}: {data}",
exceptions,
)
case _: case _:
exceptions.append(UnionMatchError(tag_types, data)) raise ExceptionGroup(
f"Failed to match {cls} with {cls._tag_key}={tag_value} to any of {tag_types}: {data}",
exceptions,
)
# oopsies
raise ExceptionGroup( class TypeTaggedUnion(InternallyTaggedUnion[AnyContext], key="type", value=None):
f"Failed to match {cls} with {cls._tag_key}={tag_value} to any of {tag_types}: {data}", type: ResourceLocation | None
exceptions,
) def __init_subclass__(
cls,
*,
group: str | None = None,
type: TagValue | None,
) -> None:
super().__init_subclass__(group=group, value=type)
match type:
case str():
cls.type = ResourceLocation.from_str(type)
case NoValueType():
cls.type = None
case None:
pass

View file

@ -1,8 +1,11 @@
import datetime import datetime
import re import re
from pathlib import Path
from typing import Callable, TypeVar from typing import Callable, TypeVar
from common.types import isinstance_or_raise import tomllib
from common.deserialize import isinstance_or_raise
# TODO: there's (figuratively) literally no comments in this file # TODO: there's (figuratively) literally no comments in this file
@ -110,3 +113,9 @@ def _fill_placeholders(
table = stack[-1] table = stack[-1]
for key, child in table.items(): for key, child in table.items():
_handle_child(data, stack, expanded, key, child, table.__setitem__) _handle_child(data, stack, expanded, key, child, table.__setitem__)
def load_toml(path: Path) -> TOMLDict:
data = tomllib.loads(path.read_text("utf-8"))
fill_placeholders(data)
return data

View file

@ -2,97 +2,63 @@ from __future__ import annotations
import string import string
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from typing import Any, Mapping, Protocol, Self, TypeGuard, TypeVar, get_origin from typing import Any, Mapping, Protocol, TypeVar
JSONDict = dict[str, "JSONValue"] from pydantic import field_validator, model_validator
from pydantic.dataclasses import dataclass
JSONValue = JSONDict | list["JSONValue"] | str | int | float | bool | None from common.model import DEFAULT_CONFIG
_T = TypeVar("_T") _T = TypeVar("_T")
_DEFAULT_MESSAGE = "Expected any of {expected}, got {actual}: {value}"
@dataclass(config=DEFAULT_CONFIG, frozen=True)
# there may well be a better way to do this but i don't know what it is class Color:
def isinstance_or_raise( """Represents a hexadecimal color.
val: Any,
class_or_tuple: type[_T] | tuple[type[_T], ...],
message: str = _DEFAULT_MESSAGE,
) -> TypeGuard[_T]:
"""Usage: `assert isinstance_or_raise(val, str)`
message placeholders: `{expected}`, `{actual}`, `{value}`
"""
# convert generic types into the origin type
if not isinstance(class_or_tuple, tuple):
class_or_tuple = (class_or_tuple,)
ungenericed_classes = tuple(get_origin(t) or t for t in class_or_tuple)
if not isinstance(val, ungenericed_classes):
# just in case the caller messed up the message formatting
subs = {"expected": class_or_tuple, "actual": type(val), "value": val}
try:
raise TypeError(message.format(**subs))
except KeyError:
raise TypeError(_DEFAULT_MESSAGE.format(**subs))
return True
class Castable:
"""Abstract base class for types with a constructor in the form `C(value) -> C`.
Subclassing this ABC allows for automatic deserialization using Dacite.
"""
class Color(str, Castable):
"""Newtype-style class representing a hexadecimal color.
Inputs are coerced to lowercase `rrggbb`. Raises ValueError on invalid input. Inputs are coerced to lowercase `rrggbb`. Raises ValueError on invalid input.
Valid formats, all of which would be converted to `0099ff`: Valid formats, all of which would be converted to `0099ff`:
- `#0099FF` - `"#0099FF"`
- `#0099ff` - `"#0099ff"`
- `#09F` - `"#09F"`
- `#09f` - `"#09f"`
- `0099FF` - `"0099FF"`
- `0099ff` - `"0099ff"`
- `09F` - `"09F"`
- `09f` - `"09f"`
- `0x0099ff`
""" """
__slots__ = () value: str
def __new__(cls, value: str) -> Self: @model_validator(mode="before")
# this is a castable type hook but we hint str for usability def _pre_root(cls, value: Any):
assert isinstance_or_raise(value, str) if isinstance(value, (str, int)):
return {"value": value}
return value
color = value.removeprefix("#").lower() @field_validator("value", mode="before")
def _check_value(cls, value: str | int | Any) -> str:
# type check
match value:
case str():
value = value.removeprefix("#").lower()
case int():
# int to hex string
value = f"{value:0>6x}"
case _:
raise TypeError(f"Expected str or int, got {type(value)}")
# 012 -> 001122 # 012 -> 001122
if len(color) == 3: if len(value) == 3:
color = "".join(c + c for c in color) value = "".join(c + c for c in value)
# length and character check # length and character check
if len(color) != 6 or any(c not in string.hexdigits for c in color): if len(value) != 6 or any(c not in string.hexdigits for c in value):
raise ValueError(f"invalid color code: {value}") raise ValueError(f"invalid color code: {value}")
return str.__new__(cls, color) return value
# subclass instead of newtype so it exists at runtime, so we can use isinstance
class LocalizedStr(str):
"""Represents a string which has been localized."""
def __new__(cls, value: str) -> Self:
# this is a castable type hook but we hint str for usability
assert isinstance_or_raise(value, str)
return str.__new__(cls, value)
class LocalizedItem(LocalizedStr):
pass
class Sortable(ABC): class Sortable(ABC):

View file

@ -1,7 +1,9 @@
__all__ = [ __all__ = [
"HexBook", "HexBook",
"HexBookState", "HexContext",
"AnyHexContext",
"HexBookModel",
] ]
from .hex_state import HexBook, HexBookState from .hex_book import AnyHexContext, HexBook, HexBookModel, HexContext

View file

@ -0,0 +1,81 @@
from __future__ import annotations
from abc import ABC, abstractmethod
from typing import Any, cast
from pydantic import Field, ValidationInfo, model_validator
from hexcasting.pattern import RawPatternInfo
from minecraft.i18n import LocalizedStr
from minecraft.resource import ResourceLocation
from patchouli.page import PageWithTitle
from .hex_book import AnyHexContext, HexContext
# TODO: make anchor required
class PageWithPattern(PageWithTitle[AnyHexContext], ABC, type=None):
title_: None = Field(default=None, include=True)
op_id: ResourceLocation | None = None
header: LocalizedStr | None = None
input: str | None = None
output: str | None = None
hex_size: int | None = None
# must be after op_id, so just put it last
patterns_: RawPatternInfo | list[RawPatternInfo] = Field(
alias="patterns", include=True
)
@property
@abstractmethod
def name(self) -> LocalizedStr:
...
@property
def args(self) -> str | None:
inp = self.input or ""
oup = self.output or ""
if inp or oup:
return f"{inp} \u2192 {oup}".strip()
return None
@property
def title(self) -> str:
suffix = f" ({self.args})" if self.args else ""
return self.name.value + suffix
@property
def patterns(self) -> list[RawPatternInfo]:
if isinstance(self.patterns_, list):
return self.patterns_
return [self.patterns_]
class PageWithOpPattern(PageWithPattern[AnyHexContext], type=None):
name_: LocalizedStr = Field(include=True)
op_id: ResourceLocation
header: None = None
@property
def name(self) -> LocalizedStr:
return self.name_
@model_validator(mode="before")
def _check_name(cls, values: dict[str, Any], info: ValidationInfo):
context = cast(HexContext, info.context)
if not context or (op_id := values.get("op_id")) is None:
return values
name = context["i18n"].localize_pattern(op_id)
return values | {"name_": name}
class PageWithRawPattern(PageWithPattern[AnyHexContext], type=None):
op_id: None = None
header: LocalizedStr
@property
def name(self) -> LocalizedStr:
return self.header

View file

@ -0,0 +1,53 @@
from typing import Any, Generic, TypeVar
from common.model import AnyContext
from common.properties import Properties
from hexcasting.pattern import PatternInfo
from minecraft.resource import ResourceLocation
from patchouli.book import Book
from patchouli.context import AnyBookContext, BookContext
class HexContext(BookContext):
blacklist: set[str]
spoilers: set[str]
patterns: dict[ResourceLocation, PatternInfo]
AnyHexContext = TypeVar("AnyHexContext", bound=HexContext)
class HexBookModel(
Generic[AnyContext, AnyBookContext, AnyHexContext],
Book[AnyHexContext, AnyHexContext],
):
@classmethod
def prepare(cls, props: Properties) -> tuple[dict[str, Any], HexContext]:
data, context = super().prepare(props)
# load patterns
patterns = dict[ResourceLocation, PatternInfo]()
signatures = dict[str, PatternInfo]() # just for duplicate checking
for stub in props.pattern_stubs:
# for each stub, load all the patterns in the file
for pattern in stub.load_patterns(props.modid, props.pattern_regex):
# check for duplicates, because why not
if duplicate := (
patterns.get(pattern.id) or signatures.get(pattern.signature)
):
raise ValueError(
f"Duplicate pattern {pattern.id}\n{pattern}\n{duplicate}"
)
patterns[pattern.id] = pattern
signatures[pattern.signature] = pattern
# build new context
return data, {
**context,
"blacklist": set(),
"spoilers": set(),
"patterns": patterns,
}
HexBook = HexBookModel[HexContext, HexContext, HexContext]

View file

@ -1,118 +1,71 @@
from __future__ import annotations from __future__ import annotations
from abc import ABC, abstractmethod from typing import Any, cast
from dataclasses import dataclass, field
from common.deserialize import rename from pydantic import Field, ValidationInfo, model_validator
from common.pattern import RawPatternInfo
from common.types import LocalizedStr from hexcasting.pattern import RawPatternInfo
from minecraft.i18n import LocalizedStr
from minecraft.recipe import CraftingRecipe from minecraft.recipe import CraftingRecipe
from minecraft.resource import ResourceLocation from minecraft.resource import ResourceLocation
from patchouli.page import PageWithCraftingRecipes, PageWithText, PageWithTitle from patchouli.page import PageWithCraftingRecipes, PageWithText
from .abstract_hex_pages import PageWithOpPattern, PageWithRawPattern
from .hex_book import HexContext
from .hex_recipes import BrainsweepRecipe from .hex_recipes import BrainsweepRecipe
from .hex_state import HexBookState
@dataclass(kw_only=True) class LookupPatternPage(
class PageWithPattern(PageWithTitle[HexBookState], ABC, type=None): PageWithOpPattern[HexContext],
_patterns: RawPatternInfo | list[RawPatternInfo] = field( type="hexcasting:pattern",
metadata=rename("patterns") ):
) patterns_: list[RawPatternInfo]
op_id: ResourceLocation | None = None
header: LocalizedStr | None = None
input: str | None = None
output: str | None = None
hex_size: int | None = None
_title: None = None @model_validator(mode="before")
def _check_patterns(cls, data: dict[str, Any], info: ValidationInfo):
context = cast(HexContext, info.context)
if not context:
return data
@property # look up the pattern from the op id
@abstractmethod op_id = ResourceLocation.from_str(data["op_id"])
def name(self) -> LocalizedStr: pattern = context["patterns"][op_id]
... return data | {"patterns_": [pattern], "op_id": op_id}
@property
def args(self) -> str | None:
inp = self.input or ""
oup = self.output or ""
if inp or oup:
return f"{inp} \u2192 {oup}".strip()
return None
@property
def title(self) -> LocalizedStr:
suffix = f" ({self.args})" if self.args else ""
return LocalizedStr(self.name + suffix)
@property
def patterns(self) -> list[RawPatternInfo]:
if isinstance(self._patterns, list):
return self._patterns
return [self._patterns]
@dataclass class ManualOpPatternPage(
class LookupPatternPage(PageWithPattern, type="hexcasting:pattern"): PageWithOpPattern[HexContext],
state: HexBookState type="hexcasting:manual_pattern",
):
_patterns: list[RawPatternInfo] = field(init=False) pass
op_id: ResourceLocation
header: None
def __post_init__(self):
self._patterns = [self.state.patterns[self.op_id]]
@property
def name(self) -> LocalizedStr:
return self.i18n.localize_pattern(self.op_id)
@dataclass class ManualRawPatternPage(
class ManualPatternNosigPage(PageWithPattern, type="hexcasting:manual_pattern_nosig"): PageWithRawPattern[HexContext],
header: LocalizedStr type="hexcasting:manual_pattern",
op_id: None ):
input: None pass
output: None
@property
def name(self) -> LocalizedStr:
return self.header
@dataclass class ManualPatternNosigPage(
class ManualOpPatternPage(PageWithPattern, type="hexcasting:manual_pattern"): PageWithRawPattern[HexContext],
op_id: ResourceLocation type="hexcasting:manual_pattern_nosig",
header: None ):
input: None = None
@property output: None = None
def name(self) -> LocalizedStr:
return self.i18n.localize_pattern(self.op_id)
@dataclass
class ManualRawPatternPage(PageWithPattern, type="hexcasting:manual_pattern"):
op_id: None
header: LocalizedStr
@property
def name(self) -> LocalizedStr:
return self.header
@dataclass
class CraftingMultiPage( class CraftingMultiPage(
PageWithCraftingRecipes[HexBookState], PageWithCraftingRecipes[HexContext],
type="hexcasting:crafting_multi", type="hexcasting:crafting_multi",
): ):
heading: LocalizedStr # ...heading? heading: LocalizedStr # ...heading?
_recipes: list[CraftingRecipe] = field(metadata=rename("recipes")) recipes_: list[CraftingRecipe] = Field(alias="recipes", include=True)
@property @property
def recipes(self) -> list[CraftingRecipe]: def recipes(self) -> list[CraftingRecipe]:
return self._recipes return self.recipes_
@dataclass class BrainsweepPage(PageWithText[HexContext], type="hexcasting:brainsweep"):
class BrainsweepPage(PageWithText[HexBookState], type="hexcasting:brainsweep"):
recipe: BrainsweepRecipe recipe: BrainsweepRecipe

View file

@ -1,7 +1,8 @@
from dataclasses import dataclass
from typing import Any, Literal from typing import Any, Literal
from common.types import LocalizedItem from common.model import HexDocModel
from hexcasting.hex_book import HexContext
from minecraft.i18n import LocalizedItem
from minecraft.recipe import ( from minecraft.recipe import (
ItemIngredient, ItemIngredient,
MinecraftItemIdIngredient, MinecraftItemIdIngredient,
@ -10,20 +11,16 @@ from minecraft.recipe import (
) )
from minecraft.resource import ResourceLocation from minecraft.resource import ResourceLocation
from .hex_state import HexBookState
# ingredients # ingredients
@dataclass class VillagerIngredient(HexDocModel[HexContext]): # lol, lmao
class VillagerIngredient: # lol, lmao
minLevel: int minLevel: int
profession: ResourceLocation | None = None profession: ResourceLocation | None = None
biome: ResourceLocation | None = None biome: ResourceLocation | None = None
@dataclass class BlockStateIngredient(HexDocModel[HexContext]):
class BlockStateIngredient:
# TODO: StateIngredient should also be a TypeTaggedUnion, probably # TODO: StateIngredient should also be a TypeTaggedUnion, probably
type: Literal["block"] type: Literal["block"]
block: ResourceLocation block: ResourceLocation
@ -35,9 +32,8 @@ _MinecraftItemIngredientOrList = (
) )
@dataclass
class ModConditionalIngredient( class ModConditionalIngredient(
ItemIngredient[HexBookState], ItemIngredient[HexContext],
type="hexcasting:mod_conditional", type="hexcasting:mod_conditional",
): ):
default: _MinecraftItemIngredientOrList default: _MinecraftItemIngredientOrList
@ -48,8 +44,7 @@ class ModConditionalIngredient(
# results # results
@dataclass(kw_only=True) class BlockState(HexDocModel[HexContext]):
class BlockState:
name: LocalizedItem name: LocalizedItem
properties: dict[str, Any] | None = None properties: dict[str, Any] | None = None
@ -57,8 +52,7 @@ class BlockState:
# recipes # recipes
@dataclass class BrainsweepRecipe(Recipe[HexContext], type="hexcasting:brainsweep"):
class BrainsweepRecipe(Recipe[HexBookState], type="hexcasting:brainsweep"):
blockIn: BlockStateIngredient blockIn: BlockStateIngredient
villagerIn: VillagerIngredient villagerIn: VillagerIngredient
result: BlockState result: BlockState

View file

@ -1,32 +0,0 @@
from dataclasses import dataclass
from typing import Any
from common.pattern import PatternInfo
from minecraft.resource import ResourceLocation
from patchouli.book import Book
from patchouli.state import BookState
@dataclass(repr=False)
class HexBookState(BookState):
def __post_init__(self, *args: Any, **kwargs: Any):
super().__post_init__(*args, **kwargs)
# mutable state
self.blacklist: set[str] = set()
self.spoilers: set[str] = set()
# patterns
self.patterns: dict[ResourceLocation, PatternInfo] = {}
for stub in self.props.pattern_stubs:
# for each stub, load all the patterns in the file
for pattern in stub.load_patterns(self.props.modid, self.props.pattern_re):
# check for key clobbering, because why not
if duplicate := self.patterns.get(pattern.id):
raise ValueError(
f"Duplicate pattern {pattern.id}\n{pattern}\n{duplicate}"
)
self.patterns[pattern.id] = pattern
HexBook = Book[HexBookState]

View file

@ -1,9 +1,11 @@
import re import re
from dataclasses import dataclass
from enum import Enum from enum import Enum
from pathlib import Path from pathlib import Path
from typing import Generator from typing import Annotated, Any, Generator
from pydantic import BeforeValidator
from common.model import HexDocModel
from minecraft.resource import ResourceLocation from minecraft.resource import ResourceLocation
@ -15,27 +17,37 @@ class Direction(Enum):
WEST = 4 WEST = 4
NORTH_WEST = 5 NORTH_WEST = 5
@classmethod
def validate(cls, value: str | int | Any):
match value:
case str():
return cls[value]
case int():
return cls(value)
case _:
return value
@dataclass(kw_only=True)
class RawPatternInfo: DirectionField = Annotated[Direction, BeforeValidator(Direction.validate)]
startdir: Direction
class RawPatternInfo(HexDocModel[Any]):
startdir: DirectionField
signature: str signature: str
is_per_world: bool = False is_per_world: bool = False
q: int | None = None q: int | None = None
r: int | None = None r: int | None = None
@dataclass(kw_only=True)
class PatternInfo(RawPatternInfo): class PatternInfo(RawPatternInfo):
id: ResourceLocation id: ResourceLocation
@property @property
def op_id(self): def name(self):
return self.id.path return self.id.path
@dataclass class PatternStubFile(HexDocModel[Any]):
class PatternStubFile:
file: Path file: Path
def load_patterns( def load_patterns(

View file

@ -6,14 +6,10 @@ from dataclasses import InitVar, dataclass
from html import escape from html import escape
from typing import IO, Any from typing import IO, Any
from common.types import LocalizedStr
from hexcasting import HexBook from hexcasting import HexBook
from hexcasting.hex_pages import ( from hexcasting.abstract_hex_pages import PageWithPattern
BrainsweepPage, from hexcasting.hex_pages import BrainsweepPage, CraftingMultiPage, LookupPatternPage
CraftingMultiPage, from minecraft.i18n import LocalizedStr
LookupPatternPage,
PageWithPattern,
)
from patchouli import Category, Entry, FormatTree, Page from patchouli import Category, Entry, FormatTree, Page
from patchouli.page import ( from patchouli.page import (
CraftingPage, CraftingPage,
@ -86,8 +82,8 @@ class Stream:
with self.pair_tag(name, **kwargs): with self.pair_tag(name, **kwargs):
pass pass
def text(self, txt: str): def text(self, txt: str | LocalizedStr):
print(escape(txt), file=self.stream, end="") print(escape(str(txt)), file=self.stream, end="")
return self return self
@ -124,14 +120,16 @@ def get_format(out: Stream, ty: str, value: Any):
def entry_spoilered(root_info: HexBook, entry: Entry): def entry_spoilered(root_info: HexBook, entry: Entry):
if entry.advancement is None: if entry.advancement is None:
return False return False
return str(entry.advancement) in root_info.state.spoilers return str(entry.advancement) in root_info.context["spoilers"]
def category_spoilered(root_info: HexBook, category: Category): def category_spoilered(root_info: HexBook, category: Category):
return all(entry_spoilered(root_info, ent) for ent in category.entries) return all(entry_spoilered(root_info, ent) for ent in category.entries)
def write_block(out: Stream, block: FormatTree | str): def write_block(out: Stream, block: FormatTree | str | LocalizedStr):
if isinstance(block, LocalizedStr):
block = str(block)
if isinstance(block, str): if isinstance(block, str):
first = False first = False
for line in block.split("\n"): for line in block.split("\n"):
@ -268,7 +266,7 @@ def write_page(out: Stream, pageid: str, page: Page[Any]):
with out.pair_tag("p", clazz="todo-note"): with out.pair_tag("p", clazz="todo-note"):
out.text(f"TODO: Missing processor for type: {type(page)}") out.text(f"TODO: Missing processor for type: {type(page)}")
if isinstance(page, PageWithText): if isinstance(page, PageWithText):
write_block(out, page.text or page.state.format(LocalizedStr(""))) write_block(out, page.text or FormatTree.empty())
out.tag("br") out.tag("br")
@ -294,7 +292,7 @@ def write_category(out: Stream, book: HexBook, category: Category):
permalink(out, category.id.href) permalink(out, category.id.href)
write_block(out, category.description) write_block(out, category.description)
for entry in category.entries: for entry in category.entries:
if entry.id.path not in book.state.blacklist: if entry.id.path not in book.context["blacklist"]:
write_entry(out, book, entry) write_entry(out, book, entry)
@ -351,11 +349,11 @@ def generate_docs(book: HexBook, template: str) -> str:
for line in template.splitlines(True): for line in template.splitlines(True):
if line.startswith("#DO_NOT_RENDER"): if line.startswith("#DO_NOT_RENDER"):
_, *blacklist = line.split() _, *blacklist = line.split()
book.state.blacklist.update(blacklist) book.context["blacklist"].update(blacklist)
if line.startswith("#SPOILER"): if line.startswith("#SPOILER"):
_, *spoilers = line.split() _, *spoilers = line.split()
book.state.spoilers.update(spoilers) book.context["spoilers"].update(spoilers)
elif line == "#DUMP_BODY_HERE\n": elif line == "#DUMP_BODY_HERE\n":
write_book(Stream(output), book) write_book(Stream(output), book)
print("", file=output) print("", file=output)

View file

@ -1,19 +1,13 @@
# because Tap.add_argument isn't typed, for some reason # because Tap.add_argument isn't typed, for some reason
# pyright: reportUnknownMemberType=false # pyright: reportUnknownMemberType=false
# make sure we patch dacite before doing any parsing
# this is also in common.deserialize but hey, it doesn't hurt to put it here too
# should this be a PR? probably! TODO: i'll do it later
from common import dacite_patch as _ # isort: skip
import sys import sys
from pathlib import Path from pathlib import Path
from tap import Tap from tap import Tap
from common.properties import Properties from common.properties import Properties
from hexcasting.hex_state import HexBookState from hexcasting.hex_book import HexBook
from patchouli import Book
from .collate_data import generate_docs from .collate_data import generate_docs
@ -35,7 +29,7 @@ class Args(Tap):
def main(args: Args) -> None: def main(args: Args) -> None:
# load the properties and book # load the properties and book
props = Properties.load(args.properties_file) props = Properties.load(args.properties_file)
book = Book.load(HexBookState(props)) book = HexBook.load(*HexBook.prepare(props))
# load and fill the template # load and fill the template
template = props.template.read_text("utf-8") template = props.template.read_text("utf-8")

View file

@ -1,13 +1,97 @@
from dataclasses import InitVar, dataclass from __future__ import annotations
from pathlib import Path
from common.deserialize import load_json_object from dataclasses import InitVar
from functools import total_ordering
from pathlib import Path
from typing import Any, Callable, Self, cast
from pydantic import ValidationInfo, model_validator
from pydantic.dataclasses import dataclass
from pydantic.functional_validators import ModelWrapValidatorHandler
from typing_extensions import TypedDict
from common.deserialize import isinstance_or_raise, load_json
from common.model import DEFAULT_CONFIG, HexDocModel
from common.properties import Properties from common.properties import Properties
from common.types import LocalizedItem, LocalizedStr, isinstance_or_raise
from minecraft.resource import ItemStack, ResourceLocation from minecraft.resource import ItemStack, ResourceLocation
@dataclass class I18nContext(TypedDict):
i18n: I18n
@total_ordering
class LocalizedStr(HexDocModel[I18nContext]):
"""Represents a string which has been localized."""
key: str
value: str
@classmethod
def skip_i18n(cls, key: str) -> Self:
"""Returns an instance of this class with `value = key`."""
return cls(key=key, value=key)
@classmethod
def skip_key(cls, value: str) -> Self:
"""Returns an instance of this class with an empty key."""
return cls(key="", value=value)
@model_validator(mode="wrap")
@classmethod
def _check_localize(
cls,
value: str | Any,
handler: ModelWrapValidatorHandler[Self],
info: ValidationInfo,
):
# TODO: if we need LocalizedStr to work as a dict key, add another check which
# returns cls.skip_i18n(value) if info.context is falsy
if not isinstance(value, str):
return handler(value)
# this is nasty, but we need to use cast to get type checking for context
context = cast(I18nContext, info.context)
return cls._localize(context["i18n"], value)
@classmethod
def _localize(cls, i18n: I18n, key: str) -> Self:
return i18n.localize(key)
def map(self, fn: Callable[[str], str]) -> Self:
"""Returns a copy of this object with `new.value = fn(old.value)`."""
return self.model_copy(update={"value": fn(self.value)})
def __repr__(self) -> str:
return self.value
def __str__(self) -> str:
return self.value
def __eq__(self, other: LocalizedStr | str | Any):
match other:
case LocalizedStr():
return self.value == other.value
case str():
return self.value == other
case _:
return super().__eq__(other)
def __lt__(self, other: LocalizedStr | str):
match other:
case LocalizedStr():
return self.value < other.value
case str():
return self.value < other
class LocalizedItem(LocalizedStr):
@classmethod
def _localize(cls, i18n: I18n, key: str) -> Self:
return i18n.localize_item(key)
@dataclass(config=DEFAULT_CONFIG)
class I18n: class I18n:
"""Handles localization of strings.""" """Handles localization of strings."""
@ -25,13 +109,16 @@ class I18n:
# or maybe dict[(str, str), LocalizedStr] # or maybe dict[(str, str), LocalizedStr]
# we could also use that to ensure all i18n files have the same set of keys # we could also use that to ensure all i18n files have the same set of keys
path = self.dir / self.props.i18n.filename path = self.dir / self.props.i18n.filename
raw_lookup = load_json_object(path) | (self.props.i18n.extra or {}) raw_lookup = load_json(path) | (self.props.i18n.extra or {})
# validate and insert # validate and insert
self._lookup = {} self._lookup = {}
for key, raw_value in raw_lookup.items(): for key, raw_value in raw_lookup.items():
assert isinstance_or_raise(raw_value, str) assert isinstance_or_raise(raw_value, str)
self._lookup[key] = LocalizedStr(raw_value) self._lookup[key] = LocalizedStr(
key=key,
value=raw_value.replace("%%", "%"),
)
@property @property
def dir(self) -> Path: def dir(self) -> Path:
@ -40,53 +127,41 @@ class I18n:
def localize( def localize(
self, self,
key: str | list[str] | tuple[str, ...], *keys: str,
default: str | None = None, default: str | None = None,
skip_errors: bool = False,
) -> LocalizedStr: ) -> LocalizedStr:
"""Looks up the given string in the lang table if i18n is enabled. """Looks up the given string in the lang table if i18n is enabled. Otherwise,
Otherwise, returns the original key. returns the original key.
If a tuple/list of keys is provided, returns the value of the first key which If multiple keys are provided, returns the value of the first key which exists.
exists. That is, subsequent keys are treated as fallbacks for the first. That is, subsequent keys are treated as fallbacks for the first.
Raises KeyError if i18n is enabled and skip_errors is False but the key has no Raises KeyError if i18n is enabled and skip_errors is False but the key has no
corresponding localized value. corresponding localized value.
""" """
assert isinstance_or_raise(key, (str, list[str], tuple[str, ...])) # if i18n is disabled, just return the key
if self._lookup is None: if self._lookup is None:
# if i18n is disabled, just return the key return LocalizedStr.skip_i18n(keys[0])
if not isinstance(key, str):
key = key[0] # for a single key, look it up
localized = key if len(keys) == 1:
elif isinstance(key, str):
# for a single key, look it up
if default is not None: if default is not None:
localized = self._lookup.get(key, default) return self._lookup.get(keys[0], LocalizedStr.skip_i18n(default))
elif skip_errors: # raises if not found
localized = self._lookup.get(key, key) return self._lookup[keys[0]]
else:
# raises if not found
localized = self._lookup[key]
else:
# for a list/tuple of keys, return the first one that matches (by recursing)
for current_key in key[:-1]:
assert isinstance_or_raise(current_key, str)
try:
return self.localize(current_key)
except KeyError:
continue
return self.localize(key[-1], default, skip_errors)
return LocalizedStr(localized.replace("%%", "%")) # for a list/tuple of keys, return the first one that matches (by recursing)
for current_key in keys[:-1]:
assert isinstance_or_raise(current_key, str)
try:
return self.localize(current_key)
except KeyError:
continue
def localize_pattern( return self.localize(keys[-1], default=default)
self,
op_id: ResourceLocation, def localize_pattern(self, op_id: ResourceLocation) -> LocalizedStr:
skip_errors: bool = False,
) -> LocalizedStr:
"""Localizes the given pattern id (internal name, eg. brainsweep). """Localizes the given pattern id (internal name, eg. brainsweep).
Raises KeyError if i18n is enabled and skip_errors is False but the key has no localization. Raises KeyError if i18n is enabled and skip_errors is False but the key has no localization.
@ -94,23 +169,20 @@ class I18n:
# prefer the book-specific translation if it exists # prefer the book-specific translation if it exists
# TODO: should this use op_id.namespace anywhere? # TODO: should this use op_id.namespace anywhere?
return self.localize( return self.localize(
(f"hexcasting.spell.book.{op_id}", f"hexcasting.spell.{op_id}"), f"hexcasting.spell.book.{op_id}",
skip_errors=skip_errors, f"hexcasting.spell.{op_id}",
) )
def localize_item( def localize_item(self, item: ItemStack | str) -> LocalizedItem:
self,
item: ItemStack | str,
skip_errors: bool = False,
) -> LocalizedItem:
"""Localizes the given item resource name. """Localizes the given item resource name.
Raises KeyError if i18n is enabled and skip_errors is False but the key has no localization. Raises KeyError if i18n is enabled and skip_errors is False but the key has no localization.
""" """
if isinstance(item, str): if isinstance(item, str):
item = ItemStack.from_str(item) item = ItemStack.from_str(item)
return LocalizedItem(
self.localize( localized = self.localize(
(item.i18n_key("block"), item.i18n_key()), skip_errors=skip_errors item.i18n_key("block"),
) item.i18n_key(),
) )
return LocalizedItem(key=localized.key, value=localized.value)

View file

@ -11,10 +11,15 @@ __all__ = [
"CraftingShapelessRecipe", "CraftingShapelessRecipe",
] ]
from .abstract_recipes import ItemResult, Recipe from .abstract_recipes import Recipe
from .ingredients import ( from .ingredients import (
ItemIngredient, ItemIngredient,
MinecraftItemIdIngredient, MinecraftItemIdIngredient,
MinecraftItemTagIngredient, MinecraftItemTagIngredient,
) )
from .recipes import CraftingRecipe, CraftingShapedRecipe, CraftingShapelessRecipe from .recipes import (
CraftingRecipe,
CraftingShapedRecipe,
CraftingShapelessRecipe,
ItemResult,
)

View file

@ -1,38 +1,48 @@
from __future__ import annotations from __future__ import annotations
from dataclasses import dataclass from typing import Any, Self, cast
from typing import Any, Self
from common.deserialize import load_json_data from pydantic import ValidationInfo, model_validator
from common.types import LocalizedItem
from common.deserialize import load_json
from common.tagged_union import TypeTaggedUnion
from minecraft.resource import ResourceLocation from minecraft.resource import ResourceLocation
from patchouli.state import AnyState, StatefulTypeTaggedUnion from patchouli.context import AnyBookContext
@dataclass class Recipe(TypeTaggedUnion[AnyBookContext], group="hexdoc.Recipe", type=None):
class ItemResult:
item: LocalizedItem
count: int | None = None
@dataclass(kw_only=True)
class Recipe(StatefulTypeTaggedUnion[AnyState], group="hexdoc.Recipe", type=None):
id: ResourceLocation id: ResourceLocation
group: str | None = None group: str | None = None
@classmethod @model_validator(mode="before")
def stateful_type_hook(cls, data: Self | Any, state: AnyState) -> Self: def _pre_root(
# if it's a resourcelocation, fetch the data in the corresponding recipe file cls,
if isinstance(data, (str, ResourceLocation)): values: str | ResourceLocation | dict[str, Any] | Self,
id = ResourceLocation.from_str(data) info: ValidationInfo,
):
"""Loads the recipe from json if the actual value is a resource location str."""
if not info.context or isinstance(values, (dict, Recipe)):
return values
# FIXME: hack # if necessary, convert the id to a ResourceLocation
# this is to ensure the recipe exists on all platforms, because we've had match values:
# issues with that in the past (eg. Hexal's Mote Nexus) case str():
data = {} id = ResourceLocation.from_str(values)
for recipe_dir in state.props.recipe_dirs: case ResourceLocation():
# TODO: should this use id.namespace somewhere? id = values
path = recipe_dir / f"{id.path}.json"
data = load_json_data(cls, path, {"id": id})
return super().stateful_type_hook(data, state) # load the recipe
context = cast(AnyBookContext, info.context)
for recipe_dir in context["props"].recipe_dirs:
# TODO: should this use id.namespace somewhere?
path = recipe_dir / f"{id.path}.json"
if recipe_dir == context["props"].default_recipe_dir:
# only load from one file
values = load_json(path) | {"id": id}
elif not path.exists():
# this is to ensure the recipe at least exists on all platforms
# because we've had issues with that before (eg. Hexal's Mote Nexus)
raise ValueError(f"Recipe {id} missing from path {path}")
return values

View file

@ -1,26 +1,24 @@
from dataclasses import dataclass from common.tagged_union import NoValue, TypeTaggedUnion
from common.tagged_union import NoValue
from minecraft.resource import ResourceLocation from minecraft.resource import ResourceLocation
from patchouli.state import AnyState, BookState, StatefulTypeTaggedUnion from patchouli.context import AnyBookContext, BookContext
class ItemIngredient( class ItemIngredient(
StatefulTypeTaggedUnion[AnyState], TypeTaggedUnion[AnyBookContext],
group="hexdoc.ItemIngredient", group="hexdoc.ItemIngredient",
type=None, type=None,
): ):
pass pass
ItemIngredientOrList = ItemIngredient[AnyState] | list[ItemIngredient[AnyState]] ItemIngredientOrList = (
ItemIngredient[AnyBookContext] | list[ItemIngredient[AnyBookContext]]
)
@dataclass class MinecraftItemIdIngredient(ItemIngredient[BookContext], type=NoValue):
class MinecraftItemIdIngredient(ItemIngredient[BookState], type=NoValue):
item: ResourceLocation item: ResourceLocation
@dataclass class MinecraftItemTagIngredient(ItemIngredient[BookContext], type=NoValue):
class MinecraftItemTagIngredient(ItemIngredient[BookState], type=NoValue):
tag: ResourceLocation tag: ResourceLocation

View file

@ -1,27 +1,30 @@
from dataclasses import dataclass from common.model import HexDocModel
from minecraft.i18n import LocalizedItem
from patchouli.context import BookContext
from patchouli.state import BookState from .abstract_recipes import Recipe
from .abstract_recipes import ItemResult, Recipe
from .ingredients import ItemIngredientOrList from .ingredients import ItemIngredientOrList
@dataclass class ItemResult(HexDocModel[BookContext]):
item: LocalizedItem
count: int | None = None
class CraftingShapedRecipe( class CraftingShapedRecipe(
Recipe[BookState], Recipe[BookContext],
type="minecraft:crafting_shaped", type="minecraft:crafting_shaped",
): ):
pattern: list[str] pattern: list[str]
key: dict[str, ItemIngredientOrList[BookState]] key: dict[str, ItemIngredientOrList[BookContext]]
result: ItemResult result: ItemResult
@dataclass
class CraftingShapelessRecipe( class CraftingShapelessRecipe(
Recipe[BookState], Recipe[BookContext],
type="minecraft:crafting_shapeless", type="minecraft:crafting_shapeless",
): ):
ingredients: list[ItemIngredientOrList[BookState]] ingredients: list[ItemIngredientOrList[BookContext]]
result: ItemResult result: ItemResult

View file

@ -3,72 +3,71 @@
from __future__ import annotations from __future__ import annotations
import re import re
from abc import ABC, abstractmethod
from pathlib import Path from pathlib import Path
from typing import Any, Self from typing import Any, ClassVar, Self
from pydantic import field_validator, model_validator, validator from pydantic import field_validator, model_serializer, model_validator
from pydantic.dataclasses import dataclass from pydantic.dataclasses import dataclass
from pydantic.functional_validators import ModelWrapValidatorHandler
from common.deserialize import DEFAULT_CONFIG from common.model import DEFAULT_CONFIG
from common.types import isinstance_or_raise
def _make_re(count: bool = False, nbt: bool = False) -> re.Pattern[str]: def _make_regex(count: bool = False, nbt: bool = False) -> re.Pattern[str]:
pattern = r"(?:([0-9a-z_\-.]+):)?([0-9a-z_\-./]+)" pattern = r"(?:(?P<namespace>[0-9a-z_\-.]+):)?(?P<path>[0-9a-z_\-./]+)"
if count: if count:
pattern += r"(?:#([0-9]+))?" pattern += r"(?:#(?P<count>[0-9]+))?"
if nbt: if nbt:
pattern += r"({.*})?" pattern += r"(?P<nbt>{.*})?"
return re.compile(pattern) return re.compile(pattern)
_RESOURCE_LOCATION_RE = _make_re() @dataclass(config=DEFAULT_CONFIG, frozen=True, repr=False)
_ITEM_STACK_RE = _make_re(count=True, nbt=True) class BaseResourceLocation:
_ENTITY_RE = _make_re(nbt=True)
@dataclass(config=DEFAULT_CONFIG, repr=False, frozen=True)
class BaseResourceLocation(ABC):
"""Represents a Minecraft resource location / namespaced ID."""
namespace: str namespace: str
path: str path: str
@classmethod # TODO: model_validator _from_str_regex: ClassVar[re.Pattern[str]]
def from_str(cls, raw: Self | str) -> Self:
if isinstance(raw, BaseResourceLocation): def __init_subclass__(cls, regex: re.Pattern[str]) -> None:
return raw cls._from_str_regex = regex
return cls(*cls._match_groups(raw))
@classmethod @classmethod
def _match_groups(cls, raw: str) -> tuple[str, ...]: def from_str(cls, raw: str) -> Self:
assert isinstance_or_raise(raw, str) # TODO: remove match = cls._from_str_regex.fullmatch(raw)
match = cls._fullmatch(raw)
if match is None: if match is None:
raise ValueError(f"Invalid {cls.__name__} string: {raw}") raise ValueError(f"Invalid {cls.__name__} string: {raw}")
namespace, *rest = match.groups() return cls(**match.groupdict())
return (namespace or "minecraft", *rest)
@model_validator(mode="wrap")
@classmethod @classmethod
@abstractmethod def _pre_root(cls, values: str | Any, handler: ModelWrapValidatorHandler[Self]):
def _fullmatch(cls, string: str) -> re.Match[str] | None: # before validating the fields, if it's a string instead of a dict, convert it
... if isinstance(values, str):
return cls.from_str(values)
return handler(values)
@field_validator("namespace", mode="before")
def _default_namespace(cls, value: str | None) -> str:
if value is None:
return "minecraft"
return value
@model_serializer
def _ser_model(self) -> str:
return str(self)
def __repr__(self) -> str: def __repr__(self) -> str:
return f"{self.namespace}:{self.path}" return f"{self.namespace}:{self.path}"
@dataclass(config=DEFAULT_CONFIG, repr=False, frozen=True) @dataclass(config=DEFAULT_CONFIG, frozen=True, repr=False)
class ResourceLocation(BaseResourceLocation): class ResourceLocation(BaseResourceLocation, regex=_make_regex()):
@classmethod """Represents a Minecraft resource location / namespaced ID."""
def _fullmatch(cls, string: str) -> re.Match[str] | None:
return _RESOURCE_LOCATION_RE.fullmatch(string)
@classmethod @classmethod
def from_file(cls, modid: str, base_dir: Path, path: Path) -> ResourceLocation: def from_file(cls, modid: str, base_dir: Path, path: Path) -> Self:
resource_path = path.relative_to(base_dir).with_suffix("").as_posix() resource_path = path.relative_to(base_dir).with_suffix("").as_posix()
return ResourceLocation(modid, resource_path) return ResourceLocation(modid, resource_path)
@ -81,8 +80,8 @@ class ResourceLocation(BaseResourceLocation):
ResLoc = ResourceLocation ResLoc = ResourceLocation
@dataclass(config=DEFAULT_CONFIG, repr=False, frozen=True) @dataclass(config=DEFAULT_CONFIG, frozen=True, repr=False)
class ItemStack(BaseResourceLocation): class ItemStack(BaseResourceLocation, regex=_make_regex(count=True, nbt=True)):
"""Represents an item with optional count and NBT. """Represents an item with optional count and NBT.
Inherits from BaseResourceLocation, not ResourceLocation. Inherits from BaseResourceLocation, not ResourceLocation.
@ -91,16 +90,6 @@ class ItemStack(BaseResourceLocation):
count: int | None = None count: int | None = None
nbt: str | None = None nbt: str | None = None
@field_validator("count", mode="before") # TODO: move this into _match_groups?
def convert_count(cls, count: str | int | None):
if isinstance(count, str):
return int(count)
return count
@classmethod
def _fullmatch(cls, string: str) -> re.Match[str] | None:
return _ITEM_STACK_RE.fullmatch(string)
def i18n_key(self, root: str = "item") -> str: def i18n_key(self, root: str = "item") -> str:
return f"{root}.{self.namespace}.{self.path}" return f"{root}.{self.namespace}.{self.path}"
@ -113,8 +102,8 @@ class ItemStack(BaseResourceLocation):
return s return s
@dataclass(repr=False, frozen=True) @dataclass(config=DEFAULT_CONFIG, frozen=True, repr=False)
class Entity(BaseResourceLocation): class Entity(BaseResourceLocation, regex=_make_regex(nbt=True)):
"""Represents an entity with optional NBT. """Represents an entity with optional NBT.
Inherits from BaseResourceLocation, not ResourceLocation. Inherits from BaseResourceLocation, not ResourceLocation.
@ -122,10 +111,6 @@ class Entity(BaseResourceLocation):
nbt: str | None = None nbt: str | None = None
@classmethod
def _fullmatch(cls, string: str) -> re.Match[str] | None:
return _ENTITY_RE.fullmatch(string)
def __repr__(self) -> str: def __repr__(self) -> str:
s = super().__repr__() s = super().__repr__()
if self.nbt is not None: if self.nbt is not None:

View file

@ -1,24 +1,23 @@
from __future__ import annotations from __future__ import annotations
from abc import ABC from typing import Any, Generic, Literal, Self, cast
from dataclasses import dataclass, field
from typing import Literal, Self
from common.deserialize import from_dict_checked, load_json_data, rename from pydantic import Field, ValidationInfo, model_validator
from common.types import Color, LocalizedStr
from minecraft.i18n import I18n from common.deserialize import isinstance_or_raise, load_json
from minecraft.recipe import ItemIngredient, Recipe from common.model import AnyContext, HexDocModel
from common.properties import Properties
from common.types import Color
from minecraft.i18n import I18n, LocalizedStr
from minecraft.resource import ItemStack, ResLoc, ResourceLocation from minecraft.resource import ItemStack, ResLoc, ResourceLocation
from .category import Category from .category import Category
from .context import AnyBookContext, BookContext
from .entry import Entry from .entry import Entry
from .formatting import FormatTree from .formatting import DEFAULT_MACROS, FormatTree
from .page import Page
from .state import AnyState, Stateful
@dataclass class Book(Generic[AnyContext, AnyBookContext], HexDocModel[AnyBookContext]):
class Book(Stateful[AnyState], ABC):
"""Main Patchouli book class. """Main Patchouli book class.
Includes all data from book.json, categories/entries/pages, and i18n. Includes all data from book.json, categories/entries/pages, and i18n.
@ -30,6 +29,10 @@ class Book(Stateful[AnyState], ABC):
See: https://vazkiimods.github.io/Patchouli/docs/reference/book-json See: https://vazkiimods.github.io/Patchouli/docs/reference/book-json
""" """
# not in book.json
context: AnyBookContext = Field(default_factory=dict)
categories: dict[ResourceLocation, Category] = Field(default_factory=dict)
# required # required
name: LocalizedStr name: LocalizedStr
landing_text: FormatTree landing_text: FormatTree
@ -48,9 +51,7 @@ class Book(Stateful[AnyState], ABC):
progress_bar_background: Color = Color("DDDDDD") progress_bar_background: Color = Color("DDDDDD")
open_sound: ResourceLocation | None = None open_sound: ResourceLocation | None = None
flip_sound: ResourceLocation | None = None flip_sound: ResourceLocation | None = None
_index_icon: ResourceLocation | None = field( index_icon_: ResourceLocation | None = Field(default=None, alias="index_icon")
default=None, metadata=rename("index_icon")
)
pamphlet: bool = False pamphlet: bool = False
show_progress: bool = True show_progress: bool = True
version: str | int = 0 version: str | int = 0
@ -61,8 +62,8 @@ class Book(Stateful[AnyState], ABC):
custom_book_item: ItemStack | None = None custom_book_item: ItemStack | None = None
show_toasts: bool = True show_toasts: bool = True
use_blocky_font: bool = False use_blocky_font: bool = False
do_i18n: bool = field(default=False, metadata=rename("i18n")) do_i18n: bool = Field(default=False, alias="i18n")
macros: dict[str, str] = field(default_factory=dict) macros: dict[str, str] = Field(default_factory=dict)
pause_game: bool = False pause_game: bool = False
text_overflow_mode: Literal["overflow", "resize", "truncate"] | None = None text_overflow_mode: Literal["overflow", "resize", "truncate"] | None = None
extend: str | None = None extend: str | None = None
@ -70,41 +71,45 @@ class Book(Stateful[AnyState], ABC):
allow_extensions: bool = True allow_extensions: bool = True
@classmethod @classmethod
def load(cls, state: AnyState) -> Self: def load(cls, data: dict[str, Any], context: AnyBookContext):
"""Loads `book.json` and finishes initializing the shared state. return cls.model_validate(data, context=context)
Subclasses should generally not override this. To customize state creation or
add type hooks (including page or recipe types), override `__post_init__()`,
calling `super()` at the end (because that's where categories/entries load).
"""
@classmethod
def prepare(cls, props: Properties) -> tuple[dict[str, Any], BookContext]:
# read the raw dict from the json file # read the raw dict from the json file
path = state.props.book_dir / "book.json" path = props.book_dir / "book.json"
data = load_json_data(cls, path, {"state": state}) data = load_json(path)
assert isinstance_or_raise(data, dict[str, Any])
state.i18n = I18n(state.props, data["do_i18n"])
state.add_macros(data["macros"])
state.add_stateful_unions(Page, Recipe, ItemIngredient)
# NOW we can convert the actual book data # NOW we can convert the actual book data
return from_dict_checked(cls, data, state.config, path) return data, {
"i18n": I18n(props, data["i18n"]),
"props": props,
"macros": data["macros"] | DEFAULT_MACROS,
}
def __post_init__(self) -> None: @model_validator(mode="after")
def _post_root(self, info: ValidationInfo) -> Self:
"""Loads categories and entries.""" """Loads categories and entries."""
context = cast(AnyBookContext, info.context)
self.context = context
# categories # categories
self.categories = Category.load_all(self.state) self.categories = Category.load_all(context)
# entries # entries
for path in self.props.entries_dir.rglob("*.json"): for path in context["props"].entries_dir.rglob("*.json"):
# i used the entry to insert the entry (pretty sure thanos said that) # i used the entry to insert the entry (pretty sure thanos said that)
entry = Entry.load(path, self.state) entry = Entry.load(path, context)
self.categories[entry.category_id].entries.append(entry) self.categories[entry.category_id].entries.append(entry)
# we inserted a bunch of entries in no particular order, so sort each category # we inserted a bunch of entries in no particular order, so sort each category
for category in self.categories.values(): for category in self.categories.values():
category.entries.sort() category.entries.sort()
return self
@property @property
def index_icon(self) -> ResourceLocation: def index_icon(self) -> ResourceLocation:
# default value as defined by patchouli, apparently # default value as defined by patchouli, apparently
return self.model if self._index_icon is None else self._index_icon return self.model if self.index_icon_ is None else self.index_icon_

View file

@ -1,67 +1,88 @@
from __future__ import annotations from __future__ import annotations
from dataclasses import dataclass, field from pathlib import Path
from typing import Self from typing import Self
from common.deserialize import rename from pydantic import Field
from common.types import LocalizedStr, Sortable, sorted_dict
from common.properties import Properties
from common.types import Sortable, sorted_dict
from minecraft.i18n import LocalizedStr
from minecraft.resource import ItemStack, ResourceLocation from minecraft.resource import ItemStack, ResourceLocation
from .context import BookContext, BookModelFile
from .entry import Entry from .entry import Entry
from .formatting import FormatTree from .formatting import FormatTree
from .state import BookState, StatefulFile
@dataclass class Category(BookModelFile[BookContext, BookContext], Sortable):
class Category(StatefulFile[BookState], Sortable):
"""Category with pages and localizations. """Category with pages and localizations.
See: https://vazkiimods.github.io/Patchouli/docs/reference/category-json See: https://vazkiimods.github.io/Patchouli/docs/reference/category-json
""" """
entries: list[Entry] = Field(default_factory=list)
# required # required
name: LocalizedStr name: LocalizedStr
description: FormatTree description: FormatTree
icon: ItemStack icon: ItemStack
# optional # optional
_parent_id: ResourceLocation | None = field(default=None, metadata=rename("parent")) parent_id: ResourceLocation | None = Field(default=None, alias="parent")
parent: Category | None = field(default=None, init=False) parent_cmp_key_: tuple[int, ...] | None = None
flag: str | None = None flag: str | None = None
sortnum: int = 0 sortnum: int = 0
secret: bool = False secret: bool = False
def __post_init__(self):
self.entries: list[Entry] = []
@classmethod @classmethod
def load_all(cls, state: BookState): def load_all(cls, context: BookContext):
categories: dict[ResourceLocation, Self] = {} categories: dict[ResourceLocation, Self] = {}
# load # load
for path in state.props.categories_dir.rglob("*.json"): for path in context["props"].categories_dir.rglob("*.json"):
category = cls.load(path, state) category = cls.load(path, context)
categories[category.id] = category categories[category.id] = category
# late-init parent # late-init _parent_cmp_key
for category in categories.values(): # track iterations to avoid an infinite loop if for some reason there's a cycle
if category._parent_id is not None: # TODO: array of non-ready categories so we can give a better error message?
category.parent = categories[category._parent_id] done, iterations = False, 0
while not done and (iterations := iterations + 1) < 1000:
done = True
for category in categories.values():
# if we still need to init this category, get the parent
if category._is_cmp_key_ready:
continue
assert category.parent_id
parent = categories[category.parent_id]
# only set _parent_cmp_key if the parent has been initialized
if parent._is_cmp_key_ready:
category.parent_cmp_key_ = parent._cmp_key
else:
done = False
if not done:
raise RuntimeError(
f"Possible circular dependency of category parents: {categories}"
)
# return sorted by sortnum, which requires parent to be initialized # return sorted by sortnum, which requires parent to be initialized
return sorted_dict(categories) return sorted_dict(categories)
@classmethod
def _id_base_dir(cls, props: Properties) -> Path:
# implement BookModelFile
return props.categories_dir
@property @property
def id(self) -> ResourceLocation: def _is_cmp_key_ready(self) -> bool:
return ResourceLocation.from_file( return self.parent_id is None or self.parent_cmp_key_ is not None
self.props.modid,
self.props.categories_dir,
self.path,
)
@property @property
def _cmp_key(self) -> tuple[int, ...]: def _cmp_key(self) -> tuple[int, ...]:
# implement Sortable # implement Sortable
if parent := self.parent: if parent_cmp_key := self.parent_cmp_key_:
return parent._cmp_key + (self.sortnum,) return parent_cmp_key + (self.sortnum,)
return (self.sortnum,) return (self.sortnum,)

View file

@ -0,0 +1,45 @@
from abc import ABC, abstractmethod
from pathlib import Path
from typing import Any, Generic, TypeVar, cast, dataclass_transform
from pydantic import ValidationInfo, model_validator
from common.model import AnyContext, HexDocModelFile
from common.properties import Properties
from minecraft.resource import ResourceLocation
from patchouli.formatting import FormatContext
class BookContext(FormatContext):
props: Properties
AnyBookContext = TypeVar("AnyBookContext", bound=BookContext)
@dataclass_transform()
class BookModelFile(
Generic[AnyContext, AnyBookContext],
HexDocModelFile[AnyBookContext],
ABC,
):
id: ResourceLocation
@classmethod
@abstractmethod
def _id_base_dir(cls, props: Properties) -> Path:
...
@model_validator(mode="before")
def _pre_root(cls, values: dict[str, Any], info: ValidationInfo) -> dict[str, Any]:
if not info.context:
return values
context = cast(AnyBookContext, info.context)
return values | {
"id": ResourceLocation.from_file(
modid=context["props"].modid,
base_dir=cls._id_base_dir(context["props"]),
path=values.pop("__path"),
)
}

View file

@ -1,17 +1,19 @@
from __future__ import annotations from __future__ import annotations
from dataclasses import dataclass, field from pathlib import Path
from common.deserialize import rename from pydantic import Field
from common.types import Color, LocalizedStr, Sortable
from common.properties import Properties
from common.types import Color, Sortable
from minecraft.i18n import LocalizedStr
from minecraft.resource import ItemStack, ResourceLocation from minecraft.resource import ItemStack, ResourceLocation
from patchouli.state import BookState, StatefulFile
from .context import BookContext, BookModelFile
from .page import Page from .page import Page
@dataclass class Entry(BookModelFile[BookContext, BookContext], Sortable):
class Entry(StatefulFile[BookState], Sortable):
"""Entry json file, with pages and localizations. """Entry json file, with pages and localizations.
See: https://vazkiimods.github.io/Patchouli/docs/reference/entry-json See: https://vazkiimods.github.io/Patchouli/docs/reference/entry-json
@ -19,9 +21,9 @@ class Entry(StatefulFile[BookState], Sortable):
# required (entry.json) # required (entry.json)
name: LocalizedStr name: LocalizedStr
category_id: ResourceLocation = field(metadata=rename("category")) category_id: ResourceLocation = Field(alias="category")
icon: ItemStack icon: ItemStack
pages: list[Page[BookState]] pages: list[Page[BookContext]]
# optional (entry.json) # optional (entry.json)
advancement: ResourceLocation | None = None advancement: ResourceLocation | None = None
@ -34,11 +36,9 @@ class Entry(StatefulFile[BookState], Sortable):
extra_recipe_mappings: dict[ItemStack, int] | None = None extra_recipe_mappings: dict[ItemStack, int] | None = None
entry_color: Color | None = None # this is undocumented lmao entry_color: Color | None = None # this is undocumented lmao
@property @classmethod
def id(self) -> ResourceLocation: def _id_base_dir(cls, props: Properties) -> Path:
return ResourceLocation.from_file( return props.entries_dir
self.props.modid, self.props.entries_dir, self.path
)
@property @property
def _cmp_key(self) -> tuple[bool, int, LocalizedStr]: def _cmp_key(self) -> tuple[bool, int, LocalizedStr]:

View file

@ -1,6 +1,14 @@
from __future__ import annotations
import re import re
from dataclasses import dataclass from typing import NamedTuple, Self, cast
from typing import NamedTuple, Self
from pydantic import ValidationInfo, model_validator
from pydantic.dataclasses import dataclass
from pydantic.functional_validators import ModelWrapValidatorHandler
from common.model import DEFAULT_CONFIG
from minecraft.i18n import I18nContext, LocalizedStr
DEFAULT_MACROS = { DEFAULT_MACROS = {
"$(obf)": "$(k)", "$(obf)": "$(k)",
@ -113,17 +121,21 @@ def parse_style(style_text: str) -> Style | str:
_FORMAT_RE = re.compile(r"\$\(([^)]*)\)") _FORMAT_RE = re.compile(r"\$\(([^)]*)\)")
@dataclass class FormatContext(I18nContext):
macros: dict[str, str]
@dataclass(config=DEFAULT_CONFIG)
class FormatTree: class FormatTree:
style: Style style: Style
children: list[Self | str] children: list[FormatTree | str]
@classmethod @classmethod
def empty(cls) -> Self: def empty(cls) -> Self:
return cls(Style("base", None), []) return cls(Style("base", None), [])
@classmethod @classmethod
def format(cls, macros: dict[str, str], string: str) -> Self: def format(cls, string: str, macros: dict[str, str]) -> Self:
# resolve macros # resolve macros
# TODO: use ahocorasick? this feels inefficient # TODO: use ahocorasick? this feels inefficient
old_string = None old_string = None
@ -187,3 +199,19 @@ class FormatTree:
style_stack[-1].children.append(last_node) style_stack[-1].children.append(last_node)
return style_stack[0] return style_stack[0]
@model_validator(mode="wrap")
@classmethod
def _wrap_root(
cls,
value: str | LocalizedStr | Self,
handler: ModelWrapValidatorHandler[Self],
info: ValidationInfo,
):
context = cast(FormatContext, info.context)
if not context or isinstance(value, FormatTree):
return handler(value)
if not isinstance(value, LocalizedStr):
value = context["i18n"].localize(value)
return cls.format(value.value, context["macros"])

View file

@ -1,20 +1,19 @@
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from dataclasses import dataclass, field from typing import Any, Self
from typing import Any, Self, TypeVar
from common.deserialize import rename from pydantic import Field, model_validator
from common.types import LocalizedStr from pydantic.functional_validators import ModelWrapValidatorHandler
from common.tagged_union import TypeTaggedUnion
from minecraft.i18n import LocalizedStr
from minecraft.recipe import CraftingRecipe from minecraft.recipe import CraftingRecipe
from minecraft.resource import ResourceLocation from minecraft.resource import ResourceLocation
from ..context import AnyBookContext
from ..formatting import FormatTree from ..formatting import FormatTree
from ..state import AnyState, StatefulTypeTaggedUnion
_T = TypeVar("_T")
@dataclass(kw_only=True) class Page(TypeTaggedUnion[AnyBookContext], group="hexdoc.Page", type=None):
class Page(StatefulTypeTaggedUnion[AnyState], group="hexdoc.Page", type=None):
"""Base class for Patchouli page types. """Base class for Patchouli page types.
See: https://vazkiimods.github.io/Patchouli/docs/patchouli-basics/page-types See: https://vazkiimods.github.io/Patchouli/docs/patchouli-basics/page-types
@ -24,29 +23,27 @@ class Page(StatefulTypeTaggedUnion[AnyState], group="hexdoc.Page", type=None):
flag: str | None = None flag: str | None = None
anchor: str | None = None anchor: str | None = None
@model_validator(mode="wrap")
@classmethod @classmethod
def stateful_type_hook(cls, data: Self | Any, state: AnyState) -> Self: def _pre_root(cls, value: str | Any, handler: ModelWrapValidatorHandler[Self]):
if isinstance(data, str): if isinstance(value, str):
data = {"type": "patchouli:text", "text": data} return handler({"type": "patchouli:text", "text": value})
return super().stateful_type_hook(data, state) return handler(value)
@dataclass(kw_only=True) class PageWithText(Page[AnyBookContext], type=None):
class PageWithText(Page[AnyState], type=None):
text: FormatTree | None = None text: FormatTree | None = None
@dataclass(kw_only=True) class PageWithTitle(PageWithText[AnyBookContext], type=None):
class PageWithTitle(PageWithText[AnyState], type=None): title_: LocalizedStr | None = Field(default=None, alias="title")
_title: LocalizedStr | None = field(default=None, metadata=rename("title"))
@property @property
def title(self) -> LocalizedStr | None: def title(self) -> str | None:
return self._title return self.title_.value if self.title_ else None
@dataclass(kw_only=True) class PageWithCraftingRecipes(PageWithText[AnyBookContext], ABC, type=None):
class PageWithCraftingRecipes(PageWithText[AnyState], ABC, type=None):
@property @property
@abstractmethod @abstractmethod
def recipes(self) -> list[CraftingRecipe]: def recipes(self) -> list[CraftingRecipe]:

View file

@ -1,32 +1,29 @@
from __future__ import annotations from __future__ import annotations
from dataclasses import dataclass, field
from typing import Any from typing import Any
from common.deserialize import rename from pydantic import Field
from common.types import LocalizedItem, LocalizedStr
from minecraft.i18n import LocalizedItem, LocalizedStr
from minecraft.recipe import CraftingRecipe from minecraft.recipe import CraftingRecipe
from minecraft.resource import Entity, ItemStack, ResourceLocation from minecraft.resource import Entity, ItemStack, ResourceLocation
from patchouli.context import BookContext
from ..formatting import FormatTree from ..formatting import FormatTree
from ..state import BookState
from .abstract_pages import Page, PageWithCraftingRecipes, PageWithText, PageWithTitle from .abstract_pages import Page, PageWithCraftingRecipes, PageWithText, PageWithTitle
@dataclass(kw_only=True) class TextPage(PageWithTitle[BookContext], type="patchouli:text"):
class TextPage(PageWithTitle[BookState], type="patchouli:text"):
text: FormatTree text: FormatTree
@dataclass class ImagePage(PageWithTitle[BookContext], type="patchouli:image"):
class ImagePage(PageWithTitle[BookState], type="patchouli:image"):
images: list[ResourceLocation] images: list[ResourceLocation]
border: bool = False border: bool = False
@dataclass
class CraftingPage( class CraftingPage(
PageWithCraftingRecipes[BookState], PageWithCraftingRecipes[BookContext],
type="patchouli:crafting", type="patchouli:crafting",
): ):
recipe: CraftingRecipe recipe: CraftingRecipe
@ -41,18 +38,16 @@ class CraftingPage(
# TODO: this should probably inherit PageWithRecipes too # TODO: this should probably inherit PageWithRecipes too
@dataclass class SmeltingPage(PageWithTitle[BookContext], type="patchouli:smelting"):
class SmeltingPage(PageWithTitle[BookState], type="patchouli:smelting"):
recipe: ItemStack recipe: ItemStack
recipe2: ItemStack | None = None recipe2: ItemStack | None = None
@dataclass class MultiblockPage(PageWithText[BookContext], type="patchouli:multiblock"):
class MultiblockPage(PageWithText[BookState], type="patchouli:multiblock"):
name: LocalizedStr name: LocalizedStr
multiblock_id: ResourceLocation | None = None multiblock_id: ResourceLocation | None = None
# TODO: https://vazkiimods.github.io/Patchouli/docs/patchouli-basics/multiblocks/ # TODO: https://vazkiimods.github.io/Patchouli/docs/patchouli-basics/multiblocks/
# this should be a dataclass, but hex doesn't have any multiblock pages so idc # this should be a modeled class, but hex doesn't have any multiblock pages so idc
multiblock: Any | None = None multiblock: Any | None = None
enable_visualize: bool = True enable_visualize: bool = True
@ -61,8 +56,7 @@ class MultiblockPage(PageWithText[BookState], type="patchouli:multiblock"):
raise ValueError(f"One of multiblock_id or multiblock must be set\n{self}") raise ValueError(f"One of multiblock_id or multiblock must be set\n{self}")
@dataclass class EntityPage(PageWithText[BookContext], type="patchouli:entity"):
class EntityPage(PageWithText[BookState], type="patchouli:entity"):
entity: Entity entity: Entity
scale: float = 1 scale: float = 1
offset: float = 0 offset: float = 0
@ -71,34 +65,31 @@ class EntityPage(PageWithText[BookState], type="patchouli:entity"):
name: LocalizedStr | None = None name: LocalizedStr | None = None
@dataclass class SpotlightPage(PageWithTitle[BookContext], type="patchouli:spotlight"):
class SpotlightPage(PageWithTitle[BookState], type="patchouli:spotlight"):
item: LocalizedItem # TODO: patchi says this is an ItemStack, so this might break item: LocalizedItem # TODO: patchi says this is an ItemStack, so this might break
link_recipe: bool = False link_recipe: bool = False
@dataclass
class LinkPage(TextPage, type="patchouli:link"): class LinkPage(TextPage, type="patchouli:link"):
url: str url: str
link_text: LocalizedStr link_text: LocalizedStr
@dataclass(kw_only=True) class RelationsPage(PageWithTitle[BookContext], type="patchouli:relations"):
class RelationsPage(PageWithTitle[BookState], type="patchouli:relations"):
entries: list[ResourceLocation] entries: list[ResourceLocation]
_title: LocalizedStr = field( title_: LocalizedStr = Field(
default=LocalizedStr("Related Chapters"), metadata=rename("title") default=LocalizedStr.skip_key("Related Chapters"),
alias="title",
) )
@dataclass class QuestPage(PageWithTitle[BookContext], type="patchouli:quest"):
class QuestPage(PageWithTitle[BookState], type="patchouli:quest"):
trigger: ResourceLocation | None = None trigger: ResourceLocation | None = None
_title: LocalizedStr = field( title_: LocalizedStr = Field(
default=LocalizedStr("Objective"), metadata=rename("title") default=LocalizedStr.skip_key("Objective"),
alias="title",
) )
@dataclass class EmptyPage(Page[BookContext], type="patchouli:empty"):
class EmptyPage(Page[BookState], type="patchouli:empty"):
draw_filler: bool = True draw_filler: bool = True

View file

@ -1,166 +0,0 @@
from __future__ import annotations
from dataclasses import dataclass, field
from pathlib import Path
from typing import Any, Generic, Self, TypeVar, cast
from common.deserialize import (
TypedConfig,
TypeHook,
TypeHooks,
from_dict_checked,
load_json_data,
)
from common.pattern import Direction
from common.properties import Properties
from common.tagged_union import InternallyTaggedUnion, NoValueType, TagValue
from common.types import LocalizedItem, LocalizedStr, isinstance_or_raise
from minecraft.i18n import I18n
from minecraft.resource import ItemStack, ResourceLocation
from .formatting import DEFAULT_MACROS, FormatTree
@dataclass(repr=False)
class BookState:
"""Stores data which needs to be accessible/mutable from many different places.
This helps us avoid some *really* ugly circular dependencies in the book tree.
"""
props: Properties
def __post_init__(self):
self._macros: dict[str, str] = DEFAULT_MACROS
self._i18n: I18n | None = None
# type conversion hooks
self._type_hooks: TypeHooks[Any] = {
ResourceLocation: ResourceLocation.from_str,
ItemStack: ItemStack.from_str,
Direction: Direction.__getitem__,
FormatTree: self.format,
}
@property
def i18n(self) -> I18n:
if self._i18n is None:
raise RuntimeError("Tried to use state.i18n before initializing it")
return self._i18n
@i18n.setter
def i18n(self, i18n: I18n):
self._i18n = i18n
self._type_hooks |= {
LocalizedStr: self.i18n.localize,
LocalizedItem: self.i18n.localize_item,
}
def add_macros(self, macros: dict[str, str]):
# TODO: order of operations?
self._macros = macros | self._macros
def add_stateful_unions(
self,
*unions: type[StatefulInternallyTaggedUnion[Self]],
):
for union in unions:
self._type_hooks |= union.make_type_hooks(self)
def format(self, text: str | LocalizedStr) -> FormatTree:
"""Converts the given string into a FormatTree, localizing it if necessary."""
# we use this as a type hook
assert isinstance_or_raise(text, (str, LocalizedStr))
if not isinstance(text, LocalizedStr):
text = self.i18n.localize(text)
return FormatTree.format(self._macros, text)
@property
def config(self) -> TypedConfig:
"""Creates a Dacite config."""
return TypedConfig(type_hooks=self._type_hooks)
AnyState = TypeVar("AnyState", bound=BookState)
@dataclass(kw_only=True)
class Stateful(Generic[AnyState]):
"""Base for dataclasses with a BookState object.
Provides some helper properties to make the state more ergonomic to use.
"""
state: AnyState
@property
def props(self):
return self.state.props
@property
def i18n(self):
return self.state.i18n
@dataclass(kw_only=True)
class StatefulFile(Stateful[AnyState]):
"""Base for dataclasses which can be loaded from a JSON file given a path and the
shared state. Extends Stateful."""
path: Path
@classmethod
def load(cls, path: Path, state: AnyState) -> Self:
# load the raw data from json, and add our extra fields
data = load_json_data(cls, path, {"path": path, "state": state})
return from_dict_checked(cls, data, state.config, path)
class StatefulInternallyTaggedUnion(
Stateful[AnyState],
InternallyTaggedUnion,
group=None,
key=None,
value=None,
):
@classmethod
def stateful_type_hook(cls, data: Self | Any, state: AnyState) -> Self:
if isinstance(data, dict):
# FIXME: ew
data = cast(dict[str, Any], data) | {"state": state}
return cls._resolve_from_dict(data, state.config)
@classmethod
def make_type_hook(cls, state: AnyState) -> TypeHook[Self]:
return lambda data: cls.stateful_type_hook(data, state)
@classmethod
def make_type_hooks(cls, state: BookState) -> TypeHooks[Self]:
return {
subtype: subtype.make_type_hook(state) for subtype in cls._all_subtypes()
}
@dataclass(kw_only=True)
class StatefulTypeTaggedUnion(
StatefulInternallyTaggedUnion[AnyState],
key="type",
value=None,
): # :(
type: ResourceLocation | None = field(init=False)
def __init_subclass__(
cls,
*,
group: str | None = None,
type: TagValue | None,
) -> None:
super().__init_subclass__(group=group, value=type)
match type:
case str():
cls.type = ResourceLocation.from_str(type)
case NoValueType():
cls.type = None
case None:
pass

File diff suppressed because it is too large Load diff

View file

@ -16,4 +16,4 @@ colors: list[str] = [
@pytest.mark.parametrize("s", colors) @pytest.mark.parametrize("s", colors)
def test_color(s: str): def test_color(s: str):
assert Color(s) == "0099ff" assert Color(s).value == "0099ff"

View file

@ -1,5 +1,4 @@
# pyright: reportPrivateUsage=false # pyright: reportPrivateUsage=false
from common.types import LocalizedStr
from patchouli.formatting import DEFAULT_MACROS, FormatTree, Style from patchouli.formatting import DEFAULT_MACROS, FormatTree, Style
@ -8,7 +7,7 @@ def test_format_string():
test_str = "Write the given iota to my $(l:patterns/readwrite#hexcasting:write/local)$(#490)local$().$(br)The $(l:patterns/readwrite#hexcasting:write/local)$(#490)local$() is a lot like a $(l:items/focus)$(#b0b)Focus$(). It's cleared when I stop casting a Hex, starts with $(l:casting/influences)$(#490)Null$() in it, and is preserved between casts of $(l:patterns/meta#hexcasting:for_each)$(#fc77be)Thoth's Gambit$(). " test_str = "Write the given iota to my $(l:patterns/readwrite#hexcasting:write/local)$(#490)local$().$(br)The $(l:patterns/readwrite#hexcasting:write/local)$(#490)local$() is a lot like a $(l:items/focus)$(#b0b)Focus$(). It's cleared when I stop casting a Hex, starts with $(l:casting/influences)$(#490)Null$() in it, and is preserved between casts of $(l:patterns/meta#hexcasting:for_each)$(#fc77be)Thoth's Gambit$(). "
# act # act
tree = FormatTree.format(DEFAULT_MACROS, LocalizedStr(test_str)) tree = FormatTree.format(test_str, DEFAULT_MACROS)
# assert # assert
# TODO: possibly make this less lazy # TODO: possibly make this less lazy

View file

@ -1,8 +1,7 @@
import subprocess import subprocess
import sys import sys
from dataclasses import Field, fields
from pathlib import Path from pathlib import Path
from typing import Any, Iterator from typing import Iterator
import pytest import pytest
from bs4 import BeautifulSoup as bs from bs4 import BeautifulSoup as bs
@ -10,11 +9,7 @@ from syrupy.assertion import SnapshotAssertion
from syrupy.extensions.amber import AmberSnapshotExtension from syrupy.extensions.amber import AmberSnapshotExtension
from syrupy.types import SerializedData from syrupy.types import SerializedData
from common.properties import Properties
from common.types import LocalizedStr
from hexcasting.hex_state import HexBookState
from hexcasting.scripts.main import Args, main from hexcasting.scripts.main import Args, main
from patchouli import Book, FormatTree
def prettify(data: SerializedData) -> str: def prettify(data: SerializedData) -> str:
@ -70,26 +65,26 @@ def test_stdout(capsys: pytest.CaptureFixture[str], snapshot: SnapshotAssertion)
assert capsys.readouterr() == snapshot.use_extension(NoDiffSnapshotEx) assert capsys.readouterr() == snapshot.use_extension(NoDiffSnapshotEx)
def test_book_text(snapshot: SnapshotAssertion): # def test_book_text(snapshot: SnapshotAssertion):
def test_field(data_class: Any, field: Field[Any]): # def test_field(data_class: Any, field: Field[Any]):
value = getattr(data_class, field.name, None) # value = getattr(data_class, field.name, None)
if isinstance(value, (LocalizedStr, FormatTree)): # if isinstance(value, (LocalizedStr, FormatTree)):
assert value == snapshot # assert value == snapshot
props = Properties.load(Path("properties.toml")) # props = Properties.load(Path("properties.toml"))
book = Book.load(HexBookState(props)) # book = Book.load(HexBookState(props))
for field in fields(book): # for field in fields(book):
test_field(book, field) # test_field(book, field)
for category in book.categories.values(): # for category in book.categories.values():
for field in fields(category): # for field in fields(category):
test_field(category, field) # test_field(category, field)
for entry in category.entries: # for entry in category.entries:
for field in fields(entry): # for field in fields(entry):
test_field(entry, field) # test_field(entry, field)
for page in entry.pages: # for page in entry.pages:
for field in fields(page): # for field in fields(page):
test_field(page, field) # test_field(page, field)