Add ModResourceLoader, un-generic HexDocModel, class-based context
This commit is contained in:
parent
1a19a12659
commit
12e4670b7e
35 changed files with 705 additions and 485 deletions
|
@ -1,4 +1,5 @@
|
|||
- [x] Better resource loading
|
||||
- [ ] Better resource loading
|
||||
- [ ] Fix model_validator type hints (before should take Any and narrow from there)
|
||||
- [ ] Sandbox for Jinja
|
||||
- [ ] First-class addon support
|
||||
- [ ] Language picker
|
||||
|
|
|
@ -8,6 +8,7 @@ is_0_black = false
|
|||
|
||||
# top takes priority
|
||||
resource_dirs = [
|
||||
{ path = "src/hexdoc/_export/resources", reexport = false },
|
||||
"{_common.src}/main/resources",
|
||||
"{_common.src}/generated/resources",
|
||||
"{_fabric.src}/main/resources",
|
||||
|
@ -15,9 +16,10 @@ resource_dirs = [
|
|||
"{_forge.src}/main/resources",
|
||||
"{_forge.src}/generated/resources",
|
||||
]
|
||||
export_dir = "src/hexdoc/_export/generated"
|
||||
|
||||
# NOTE: "!Raw" means "don't apply variable interpolation to this value"
|
||||
_pattern_regex = {"!Raw"='make\(\s*"(?P<name>[a-zA-Z0-9_\/]+)",\s*(?:new )?(?:ActionRegistryEntry|OperationAction)\(\s*HexPattern\.fromAngles\(\s*"(?P<signature>[aqweds]+)",\s*HexDir.(?P<startdir>\w+)\)'}
|
||||
_pattern_regex = { "!Raw" = 'make\(\s*"(?P<name>[a-zA-Z0-9_\/]+)",\s*(?:new )?(?:ActionRegistryEntry|OperationAction)\(\s*HexPattern\.fromAngles\(\s*"(?P<signature>[aqweds]+)",\s*HexDir.(?P<startdir>\w+)\)' }
|
||||
|
||||
spoilered_advancements = [
|
||||
"hexcasting:opened_eyes",
|
||||
|
@ -49,16 +51,6 @@ hexcasting = "https://raw.githubusercontent.com/gamma-delta/HexMod/main/Common/s
|
|||
[i18n]
|
||||
default_lang = "en_us"
|
||||
|
||||
[i18n.extra]
|
||||
"item.minecraft.amethyst_shard" = "Amethyst Shard"
|
||||
"item.minecraft.budding_amethyst" = "Budding Amethyst"
|
||||
"block.hexcasting.slate" = "Blank Slate"
|
||||
|
||||
[i18n.keys]
|
||||
use = "Right Click"
|
||||
sneak = "Left Shift"
|
||||
jump = "Space"
|
||||
|
||||
|
||||
[[pattern_stubs]]
|
||||
path = "{^_common.package}/common/lib/hex/HexActions.java"
|
||||
|
|
|
@ -13,42 +13,42 @@ authors = [
|
|||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"typing_extensions~=4.7.0",
|
||||
"pydantic~=2.1.1",
|
||||
"Jinja2~=3.1.2",
|
||||
"pyjson5~=1.6.3",
|
||||
"typing_extensions>=4.7.0",
|
||||
"importlib_resources>=6.0.1",
|
||||
"pydantic>=2.2.0",
|
||||
"Jinja2>=3.1.2",
|
||||
"pyjson5>=1.6.3",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"black==23.7.0",
|
||||
"isort==5.12.0",
|
||||
"pytest~=7.3.1",
|
||||
"syrupy~=4.0.2",
|
||||
"pyright==1.1.318",
|
||||
"pytest>=7.3.1",
|
||||
"syrupy>=4.0.2",
|
||||
"hatchling",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
hexdoc = "hexdoc.hexdoc:main"
|
||||
|
||||
[project.entry-points."hexdoc.book_data"]
|
||||
"hexcasting:thehexbook" = "hexdoc._book_data"
|
||||
[project.entry-points."hexdoc.export"]
|
||||
hexcasting = "hexdoc._export:__resources__"
|
||||
|
||||
[project.entry-points."hexdoc.Page"]
|
||||
hexdoc-patchouli = "hexdoc.patchouli.page.pages"
|
||||
hexdoc-hexcasting = "hexdoc.hexcasting.page.hex_pages"
|
||||
patchouli = "hexdoc.patchouli.page.pages"
|
||||
hexcasting = "hexdoc.hexcasting.page.hex_pages"
|
||||
|
||||
[project.entry-points."hexdoc.Recipe"]
|
||||
hexdoc-minecraft = "hexdoc.minecraft.recipe.recipes"
|
||||
hexdoc-hexcasting = "hexdoc.hexcasting.hex_recipes"
|
||||
minecraft = "hexdoc.minecraft.recipe.recipes"
|
||||
hexcasting = "hexdoc.hexcasting.hex_recipes"
|
||||
|
||||
[project.entry-points."hexdoc.ItemIngredient"]
|
||||
hexdoc-minecraft = "hexdoc.minecraft.recipe.ingredients"
|
||||
hexdoc-hexcasting = "hexdoc.hexcasting.hex_recipes"
|
||||
minecraft = "hexdoc.minecraft.recipe.ingredients"
|
||||
hexcasting = "hexdoc.hexcasting.hex_recipes"
|
||||
|
||||
[project.entry-points."hexdoc.BrainsweepeeIngredient"]
|
||||
hexdoc-hexcasting = "hexdoc.hexcasting.hex_recipes"
|
||||
hexcasting = "hexdoc.hexcasting.hex_recipes"
|
||||
|
||||
|
||||
[tool.hatch.build]
|
||||
|
@ -77,8 +77,6 @@ combine_as_imports = true
|
|||
pythonVersion = "3.11"
|
||||
pythonPlatform = "All"
|
||||
|
||||
include = ["src"]
|
||||
|
||||
# mostly we use strict mode
|
||||
# but pyright doesn't allow decreasing error severity in strict mode
|
||||
# so we need to manually specify all of the strict mode overrides so we can do that :/
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
BOOK_DATA_PATH = "data.json"
|
1
doc/src/hexdoc/_export/.gitignore
vendored
Normal file
1
doc/src/hexdoc/_export/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
/generated
|
1
doc/src/hexdoc/_export/__init__.py
Normal file
1
doc/src/hexdoc/_export/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
__resources__: list[str] = ["resources", "generated"]
|
|
@ -0,0 +1,16 @@
|
|||
{
|
||||
key: {
|
||||
use: "Right Click",
|
||||
sneak: "Left Shift",
|
||||
jump: "Space",
|
||||
},
|
||||
|
||||
"item.minecraft": {
|
||||
amethyst_shard: "Amethyst Shard",
|
||||
budding_amethyst: "Budding Amethyst",
|
||||
},
|
||||
|
||||
"block.hexcasting": {
|
||||
slate: "Blank Slate",
|
||||
},
|
||||
}
|
|
@ -1,7 +1,4 @@
|
|||
__all__ = [
|
||||
"AnyHexContext",
|
||||
"HexBook",
|
||||
"HexBookType",
|
||||
"HexContext",
|
||||
"Direction",
|
||||
"PatternInfo",
|
||||
|
@ -9,5 +6,5 @@ __all__ = [
|
|||
]
|
||||
|
||||
|
||||
from .hex_book import AnyHexContext, HexBook, HexBookType, HexContext
|
||||
from .hex_book import HexContext
|
||||
from .pattern import Direction, PatternInfo, RawPatternInfo
|
||||
|
|
|
@ -1,67 +1,48 @@
|
|||
import logging
|
||||
from typing import Any, Generic, TypeVar
|
||||
|
||||
from hexdoc.patchouli import AnyBookContext, Book, BookContext
|
||||
from hexdoc.utils import AnyContext, Properties, ResourceLocation
|
||||
from hexdoc.patchouli.book import BookContext
|
||||
from hexdoc.utils import Properties, ResourceLocation
|
||||
from hexdoc.utils.properties import PatternStubProps
|
||||
|
||||
from .pattern import Direction, PatternInfo
|
||||
|
||||
|
||||
# conthext, perhaps
|
||||
class HexContext(BookContext):
|
||||
patterns: dict[ResourceLocation, PatternInfo]
|
||||
|
||||
|
||||
AnyHexContext = TypeVar("AnyHexContext", bound=HexContext)
|
||||
def load_patterns(props: Properties):
|
||||
patterns = dict[ResourceLocation, PatternInfo]()
|
||||
signatures = dict[str, PatternInfo]() # just for duplicate checking
|
||||
|
||||
for stub in props.pattern_stubs:
|
||||
# for each stub, load all the patterns in the file
|
||||
for pattern in _load_stub_patterns(stub, props):
|
||||
logging.getLogger(__name__).debug(f"Load pattern: {pattern.id}")
|
||||
|
||||
# check for duplicates, because why not
|
||||
if duplicate := (
|
||||
patterns.get(pattern.id) or signatures.get(pattern.signature)
|
||||
):
|
||||
raise ValueError(
|
||||
f"Duplicate pattern {pattern.id}\n{pattern}\n{duplicate}"
|
||||
)
|
||||
|
||||
patterns[pattern.id] = pattern
|
||||
signatures[pattern.signature] = pattern
|
||||
|
||||
return patterns
|
||||
|
||||
|
||||
class HexBookType(
|
||||
Generic[AnyContext, AnyBookContext, AnyHexContext],
|
||||
Book[AnyHexContext, AnyHexContext],
|
||||
):
|
||||
@classmethod
|
||||
def prepare(cls, props: Properties) -> tuple[dict[str, Any], HexContext]:
|
||||
data, context = super().prepare(props)
|
||||
|
||||
# load patterns
|
||||
patterns = dict[ResourceLocation, PatternInfo]()
|
||||
signatures = dict[str, PatternInfo]() # just for duplicate checking
|
||||
|
||||
for stub in props.pattern_stubs:
|
||||
# for each stub, load all the patterns in the file
|
||||
for pattern in cls.load_patterns(stub, props):
|
||||
# check for duplicates, because why not
|
||||
if duplicate := (
|
||||
patterns.get(pattern.id) or signatures.get(pattern.signature)
|
||||
):
|
||||
raise ValueError(
|
||||
f"Duplicate pattern {pattern.id}\n{pattern}\n{duplicate}"
|
||||
)
|
||||
|
||||
patterns[pattern.id] = pattern
|
||||
signatures[pattern.signature] = pattern
|
||||
|
||||
logging.getLogger(__name__).debug(f"Patterns: {patterns.keys()}")
|
||||
|
||||
# build new context
|
||||
return data, {
|
||||
**context,
|
||||
"patterns": patterns,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def load_patterns(cls, stub: PatternStubProps, props: Properties):
|
||||
# TODO: add Gradle task to generate json with this data. this is dumb and fragile.
|
||||
stub_text = stub.path.read_text("utf-8")
|
||||
for match in stub.regex.finditer(stub_text):
|
||||
groups = match.groupdict()
|
||||
yield PatternInfo(
|
||||
startdir=Direction[groups["startdir"]],
|
||||
signature=groups["signature"],
|
||||
# is_per_world=bool(is_per_world), # FIXME: idfk how to do this now
|
||||
id=props.mod_loc(groups["name"]),
|
||||
)
|
||||
|
||||
|
||||
# type alias for convenience
|
||||
HexBook = HexBookType[HexContext, HexContext, HexContext]
|
||||
def _load_stub_patterns(stub: PatternStubProps, props: Properties):
|
||||
# TODO: add Gradle task to generate json with this data. this is dumb and fragile.
|
||||
stub_text = stub.path.read_text("utf-8")
|
||||
for match in stub.regex.finditer(stub_text):
|
||||
groups = match.groupdict()
|
||||
yield PatternInfo(
|
||||
startdir=Direction[groups["startdir"]],
|
||||
signature=groups["signature"],
|
||||
# is_per_world=bool(is_per_world), # FIXME: idfk how to do this now
|
||||
id=props.mod_loc(groups["name"]),
|
||||
)
|
||||
|
|
|
@ -7,15 +7,12 @@ from hexdoc.minecraft.recipe import (
|
|||
MinecraftItemTagIngredient,
|
||||
)
|
||||
from hexdoc.utils import HexDocModel, ResourceLocation, TypeTaggedUnion
|
||||
from hexdoc.utils.model import AnyContext
|
||||
|
||||
from .hex_book import HexContext
|
||||
|
||||
# ingredients
|
||||
|
||||
|
||||
class BrainsweepeeIngredient(
|
||||
TypeTaggedUnion[AnyContext],
|
||||
TypeTaggedUnion,
|
||||
group="hexdoc.BrainsweepeeIngredient",
|
||||
type=None,
|
||||
):
|
||||
|
@ -23,21 +20,21 @@ class BrainsweepeeIngredient(
|
|||
|
||||
|
||||
# lol, lmao
|
||||
class VillagerIngredient(BrainsweepeeIngredient[HexContext], type="villager"):
|
||||
class VillagerIngredient(BrainsweepeeIngredient, type="villager"):
|
||||
minLevel: int
|
||||
profession: ResourceLocation | None = None
|
||||
biome: ResourceLocation | None = None
|
||||
|
||||
|
||||
class EntityTypeIngredient(BrainsweepeeIngredient[HexContext], type="entity_type"):
|
||||
class EntityTypeIngredient(BrainsweepeeIngredient, type="entity_type"):
|
||||
entityType: ResourceLocation
|
||||
|
||||
|
||||
class EntityTagIngredient(BrainsweepeeIngredient[HexContext], type="entity_tag"):
|
||||
class EntityTagIngredient(BrainsweepeeIngredient, type="entity_tag"):
|
||||
tag: ResourceLocation
|
||||
|
||||
|
||||
class BlockStateIngredient(HexDocModel[HexContext]):
|
||||
class BlockStateIngredient(HexDocModel):
|
||||
# TODO: tagged union
|
||||
type: Literal["block"]
|
||||
block: ResourceLocation
|
||||
|
@ -50,7 +47,7 @@ _MinecraftItemIngredientOrList = (
|
|||
|
||||
|
||||
class ModConditionalIngredient(
|
||||
ItemIngredient[HexContext],
|
||||
ItemIngredient,
|
||||
type="hexcasting:mod_conditional",
|
||||
):
|
||||
default: _MinecraftItemIngredientOrList
|
||||
|
@ -61,7 +58,7 @@ class ModConditionalIngredient(
|
|||
# results
|
||||
|
||||
|
||||
class BlockState(HexDocModel[HexContext]):
|
||||
class BlockState(HexDocModel):
|
||||
name: LocalizedItem
|
||||
properties: dict[str, Any] | None = None
|
||||
|
||||
|
@ -69,8 +66,8 @@ class BlockState(HexDocModel[HexContext]):
|
|||
# recipes
|
||||
|
||||
|
||||
class BrainsweepRecipe(Recipe[HexContext], type="hexcasting:brainsweep"):
|
||||
class BrainsweepRecipe(Recipe, type="hexcasting:brainsweep"):
|
||||
blockIn: BlockStateIngredient
|
||||
cost: int
|
||||
entityIn: BrainsweepeeIngredient[HexContext]
|
||||
entityIn: BrainsweepeeIngredient
|
||||
result: BlockState
|
||||
|
|
|
@ -1,18 +1,19 @@
|
|||
from types import NoneType
|
||||
from typing import Any, cast
|
||||
from typing import Any
|
||||
|
||||
from pydantic import ValidationInfo, model_validator
|
||||
|
||||
from hexdoc.minecraft import LocalizedStr
|
||||
from hexdoc.minecraft.i18n import I18nContext
|
||||
from hexdoc.patchouli.page import PageWithText
|
||||
from hexdoc.utils import ResourceLocation
|
||||
from hexdoc.utils.deserialize import cast_or_raise
|
||||
|
||||
from ..hex_book import AnyHexContext, HexContext
|
||||
from ..pattern import RawPatternInfo
|
||||
|
||||
|
||||
# TODO: make anchor required (breaks because of Greater Sentinel)
|
||||
class PageWithPattern(PageWithText[AnyHexContext], type=None):
|
||||
class PageWithPattern(PageWithText, type=None):
|
||||
header: LocalizedStr
|
||||
patterns: list[RawPatternInfo]
|
||||
input: str | None = None
|
||||
|
@ -41,16 +42,16 @@ class PageWithPattern(PageWithText[AnyHexContext], type=None):
|
|||
return f"{self.header}{suffix}"
|
||||
|
||||
|
||||
class PageWithOpPattern(PageWithPattern[AnyHexContext], type=None):
|
||||
class PageWithOpPattern(PageWithPattern, type=None):
|
||||
op_id: ResourceLocation
|
||||
|
||||
@model_validator(mode="before")
|
||||
def _pre_root_header(cls, values: dict[str, Any], info: ValidationInfo):
|
||||
context = cast(HexContext, info.context)
|
||||
if not context:
|
||||
if not info.context:
|
||||
return values
|
||||
context = cast_or_raise(info.context, I18nContext)
|
||||
|
||||
# use the pattern name as the header
|
||||
return values | {
|
||||
"header": context["i18n"].localize_pattern(values["op_id"]),
|
||||
"header": context.i18n.localize_pattern(values["op_id"]),
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Any, cast
|
||||
from typing import Any
|
||||
|
||||
from pydantic import ValidationInfo, model_validator
|
||||
|
||||
|
@ -6,22 +6,23 @@ from hexdoc.minecraft import LocalizedStr
|
|||
from hexdoc.minecraft.recipe import CraftingRecipe
|
||||
from hexdoc.patchouli.page import PageWithText, PageWithTitle
|
||||
from hexdoc.utils import ResourceLocation
|
||||
from hexdoc.utils.deserialize import cast_or_raise
|
||||
|
||||
from ..hex_book import HexContext
|
||||
from ..hex_recipes import BrainsweepRecipe
|
||||
from .abstract_hex_pages import PageWithOpPattern, PageWithPattern
|
||||
|
||||
|
||||
class LookupPatternPage(PageWithOpPattern[HexContext], type="hexcasting:pattern"):
|
||||
class LookupPatternPage(PageWithOpPattern, type="hexcasting:pattern"):
|
||||
@model_validator(mode="before")
|
||||
def _pre_root_lookup(cls, values: dict[str, Any], info: ValidationInfo):
|
||||
context = cast(HexContext, info.context)
|
||||
if not context:
|
||||
if not info.context:
|
||||
return values
|
||||
context = cast_or_raise(info.context, HexContext)
|
||||
|
||||
# look up the pattern from the op id
|
||||
op_id = ResourceLocation.from_str(values["op_id"])
|
||||
pattern = context["patterns"][op_id]
|
||||
pattern = context.patterns[op_id]
|
||||
return values | {
|
||||
"op_id": op_id,
|
||||
"patterns": [pattern],
|
||||
|
@ -29,21 +30,21 @@ class LookupPatternPage(PageWithOpPattern[HexContext], type="hexcasting:pattern"
|
|||
|
||||
|
||||
class ManualOpPatternPage(
|
||||
PageWithOpPattern[HexContext],
|
||||
PageWithOpPattern,
|
||||
type="hexcasting:manual_pattern",
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
class ManualRawPatternPage(
|
||||
PageWithPattern[HexContext],
|
||||
PageWithPattern,
|
||||
type="hexcasting:manual_pattern",
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
class ManualPatternNosigPage(
|
||||
PageWithPattern[HexContext],
|
||||
PageWithPattern,
|
||||
type="hexcasting:manual_pattern_nosig",
|
||||
template_type="hexcasting:manual_pattern",
|
||||
):
|
||||
|
@ -51,10 +52,10 @@ class ManualPatternNosigPage(
|
|||
output: None = None
|
||||
|
||||
|
||||
class CraftingMultiPage(PageWithTitle[HexContext], type="hexcasting:crafting_multi"):
|
||||
class CraftingMultiPage(PageWithTitle, type="hexcasting:crafting_multi"):
|
||||
heading: LocalizedStr # TODO: should this be renamed to header?
|
||||
recipes: list[CraftingRecipe]
|
||||
|
||||
|
||||
class BrainsweepPage(PageWithText[HexContext], type="hexcasting:brainsweep"):
|
||||
class BrainsweepPage(PageWithText, type="hexcasting:brainsweep"):
|
||||
recipe: BrainsweepRecipe
|
||||
|
|
|
@ -28,7 +28,7 @@ class Direction(Enum):
|
|||
DirectionField = Annotated[Direction, BeforeValidator(Direction.validate)]
|
||||
|
||||
|
||||
class RawPatternInfo(HexDocModel[Any]):
|
||||
class RawPatternInfo(HexDocModel):
|
||||
startdir: DirectionField
|
||||
signature: str
|
||||
is_per_world: bool = False
|
||||
|
|
|
@ -5,7 +5,7 @@ import sys
|
|||
from argparse import ArgumentParser
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Self, Sequence
|
||||
from typing import Any, Self, Sequence
|
||||
|
||||
from jinja2 import (
|
||||
ChoiceLoader,
|
||||
|
@ -15,9 +15,13 @@ from jinja2 import (
|
|||
StrictUndefined,
|
||||
)
|
||||
|
||||
from hexdoc.hexcasting import HexBook
|
||||
from hexdoc.hexcasting.hex_book import HexContext, load_patterns
|
||||
from hexdoc.minecraft.i18n import I18n
|
||||
from hexdoc.patchouli.book import Book
|
||||
from hexdoc.utils import Properties
|
||||
from hexdoc.utils.cd import cd
|
||||
from hexdoc.utils.deserialize import cast_or_raise
|
||||
from hexdoc.utils.resource_loader import ModResourceLoader
|
||||
|
||||
from .jinja_extensions import IncludeRawExtension, hexdoc_block, hexdoc_wrap
|
||||
|
||||
|
@ -51,6 +55,7 @@ class Args:
|
|||
return cls(**vars(parser.parse_args(args)))
|
||||
|
||||
def __post_init__(self):
|
||||
# make paths absolute because we're cd'ing later
|
||||
self.properties_file = self.properties_file.resolve()
|
||||
if self.output_file:
|
||||
self.output_file = self.output_file.resolve()
|
||||
|
@ -83,7 +88,22 @@ def main(args: Args | None = None) -> None:
|
|||
|
||||
# load the book
|
||||
props = Properties.load(args.properties_file)
|
||||
book = HexBook.load(*HexBook.prepare(props))
|
||||
with ModResourceLoader.load_all(props) as loader:
|
||||
_, book_data = Book.load_book_json(loader, props.book)
|
||||
book = Book.load_all(
|
||||
book_data,
|
||||
HexContext(
|
||||
props=props,
|
||||
loader=loader,
|
||||
i18n=I18n(
|
||||
props=props,
|
||||
loader=loader,
|
||||
enabled=cast_or_raise(book_data["i18n"], bool),
|
||||
),
|
||||
macros=cast_or_raise(book_data["macros"], dict[Any, Any]),
|
||||
patterns=load_patterns(props),
|
||||
),
|
||||
)
|
||||
|
||||
# set up Jinja environment
|
||||
# TODO: SandboxedEnvironment
|
||||
|
|
|
@ -1,30 +1,34 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from dataclasses import InitVar
|
||||
from functools import total_ordering
|
||||
from typing import Any, Callable, Self, cast
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Self
|
||||
|
||||
from pydantic import ValidationInfo, model_validator
|
||||
from pydantic.dataclasses import dataclass
|
||||
from pydantic.functional_validators import ModelWrapValidatorHandler
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
from hexdoc.utils import (
|
||||
DEFAULT_CONFIG,
|
||||
HexDocModel,
|
||||
ItemStack,
|
||||
ModResourceLoader,
|
||||
Properties,
|
||||
ResourceLocation,
|
||||
)
|
||||
from hexdoc.utils.deserialize import isinstance_or_raise, load_and_flatten_json_dict
|
||||
|
||||
|
||||
class I18nContext(TypedDict):
|
||||
i18n: I18n
|
||||
from hexdoc.utils.deserialize import (
|
||||
cast_or_raise,
|
||||
decode_and_flatten_json_dict,
|
||||
isinstance_or_raise,
|
||||
)
|
||||
from hexdoc.utils.model import HexDocValidationContext
|
||||
from hexdoc.utils.types import without_suffix
|
||||
|
||||
|
||||
@total_ordering
|
||||
class LocalizedStr(HexDocModel[I18nContext]):
|
||||
class LocalizedStr(HexDocModel):
|
||||
"""Represents a string which has been localized."""
|
||||
|
||||
key: str
|
||||
|
@ -53,8 +57,8 @@ class LocalizedStr(HexDocModel[I18nContext]):
|
|||
if not isinstance(value, str):
|
||||
return handler(value)
|
||||
|
||||
context = cast(I18nContext, info.context)
|
||||
return cls._localize(context["i18n"], value)
|
||||
context = cast_or_raise(info.context, I18nContext)
|
||||
return cls._localize(context.i18n, value)
|
||||
|
||||
@classmethod
|
||||
def _localize(cls, i18n: I18n, key: str) -> Self:
|
||||
|
@ -98,11 +102,12 @@ class I18n:
|
|||
"""Handles localization of strings."""
|
||||
|
||||
props: InitVar[Properties]
|
||||
loader: InitVar[ModResourceLoader]
|
||||
enabled: bool
|
||||
|
||||
lookup: dict[str, LocalizedStr] | None = None
|
||||
|
||||
def __post_init__(self, props: Properties):
|
||||
def __post_init__(self, props: Properties, loader: ModResourceLoader):
|
||||
# skip loading the files if we don't need to
|
||||
self.lookup = None
|
||||
if not self.enabled:
|
||||
|
@ -113,7 +118,7 @@ class I18n:
|
|||
# or maybe dict[(str, str), LocalizedStr]
|
||||
# we could also use that to ensure all i18n files have the same set of keys
|
||||
raw_lookup: dict[str, str] = {}
|
||||
for _, path in props.find_resources(
|
||||
for _, _, data in loader.load_resources(
|
||||
type="assets",
|
||||
folder="lang",
|
||||
base_id=ResourceLocation("*", ""),
|
||||
|
@ -123,9 +128,10 @@ class I18n:
|
|||
f"{props.i18n.default_lang}.flatten.json",
|
||||
f"{props.i18n.default_lang}.flatten.json5",
|
||||
],
|
||||
decode=decode_and_flatten_json_dict,
|
||||
export=self._export,
|
||||
):
|
||||
raw_lookup |= load_and_flatten_json_dict(path)
|
||||
raw_lookup |= props.i18n.extra
|
||||
raw_lookup |= data
|
||||
|
||||
# validate and insert
|
||||
self.lookup = {
|
||||
|
@ -133,6 +139,17 @@ class I18n:
|
|||
for key, value in raw_lookup.items()
|
||||
}
|
||||
|
||||
def _export(self, path: Path, value: dict[str, str]):
|
||||
path = without_suffix(path).with_suffix(".json")
|
||||
|
||||
try:
|
||||
current = decode_and_flatten_json_dict(path.read_text("utf-8"))
|
||||
except FileNotFoundError:
|
||||
current = {}
|
||||
|
||||
with path.open("w", encoding="utf-8") as f:
|
||||
json.dump(current | value, f)
|
||||
|
||||
def localize(self, *keys: str, default: str | None = None) -> LocalizedStr:
|
||||
"""Looks up the given string in the lang table if i18n is enabled. Otherwise,
|
||||
returns the original key.
|
||||
|
@ -140,7 +157,7 @@ class I18n:
|
|||
If multiple keys are provided, returns the value of the first key which exists.
|
||||
That is, subsequent keys are treated as fallbacks for the first.
|
||||
|
||||
Raises KeyError if i18n is enabled and skip_errors is False but the key has no
|
||||
Raises KeyError if i18n is enabled and default is None but the key has no
|
||||
corresponding localized value.
|
||||
"""
|
||||
|
||||
|
@ -190,3 +207,10 @@ class I18n:
|
|||
item.i18n_key(),
|
||||
)
|
||||
return LocalizedItem(key=localized.key, value=localized.value)
|
||||
|
||||
def localize_key(self, key: str) -> LocalizedStr:
|
||||
return self.localize(f"key.{key}")
|
||||
|
||||
|
||||
class I18nContext(HexDocValidationContext):
|
||||
i18n: I18n
|
||||
|
|
|
@ -1,28 +1,25 @@
|
|||
import logging
|
||||
from typing import Any, Self, cast
|
||||
from typing import Any
|
||||
|
||||
from pydantic import ValidationInfo, model_validator
|
||||
|
||||
from hexdoc.utils import AnyPropsContext, ResourceLocation, TypeTaggedUnion
|
||||
from hexdoc.utils.deserialize import load_json_dict
|
||||
from hexdoc.utils import ResourceLocation, TypeTaggedUnion
|
||||
from hexdoc.utils.deserialize import cast_or_raise
|
||||
from hexdoc.utils.resource_loader import LoaderContext
|
||||
|
||||
|
||||
class Recipe(TypeTaggedUnion[AnyPropsContext], group="hexdoc.Recipe", type=None):
|
||||
class Recipe(TypeTaggedUnion, group="hexdoc.Recipe", type=None):
|
||||
id: ResourceLocation
|
||||
|
||||
group: str | None = None
|
||||
category: str | None = None
|
||||
|
||||
@model_validator(mode="before")
|
||||
def _pre_root(
|
||||
cls,
|
||||
values: str | ResourceLocation | dict[str, Any] | Self,
|
||||
info: ValidationInfo,
|
||||
):
|
||||
def _pre_root(cls, values: Any, info: ValidationInfo):
|
||||
"""Loads the recipe from json if the actual value is a resource location str."""
|
||||
context = cast(AnyPropsContext, info.context)
|
||||
if not context or isinstance(values, (dict, Recipe)):
|
||||
if not info.context:
|
||||
return values
|
||||
context = cast_or_raise(info.context, LoaderContext)
|
||||
|
||||
# if necessary, convert the id to a ResourceLocation
|
||||
match values:
|
||||
|
@ -30,8 +27,10 @@ class Recipe(TypeTaggedUnion[AnyPropsContext], group="hexdoc.Recipe", type=None)
|
|||
id = ResourceLocation.from_str(values)
|
||||
case ResourceLocation():
|
||||
id = values
|
||||
case _:
|
||||
return values
|
||||
|
||||
# load the recipe
|
||||
path = context["props"].find_resource("data", "recipes", id)
|
||||
logging.getLogger(__name__).debug(f"Load {cls}\n id: {id}\n path: {path}")
|
||||
return load_json_dict(path) | {"id": id}
|
||||
_, data = context.loader.load_resource("data", "recipes", id)
|
||||
logging.getLogger(__name__).debug(f"Load {cls} from {id}")
|
||||
return data | {"id": id}
|
||||
|
|
|
@ -1,22 +1,16 @@
|
|||
from typing import Any
|
||||
|
||||
from hexdoc.utils import AnyContext, NoValue, ResourceLocation, TypeTaggedUnion
|
||||
from hexdoc.utils import NoValue, ResourceLocation, TypeTaggedUnion
|
||||
|
||||
|
||||
class ItemIngredient(
|
||||
TypeTaggedUnion[AnyContext],
|
||||
group="hexdoc.ItemIngredient",
|
||||
type=None,
|
||||
):
|
||||
class ItemIngredient(TypeTaggedUnion, group="hexdoc.ItemIngredient", type=None):
|
||||
pass
|
||||
|
||||
|
||||
ItemIngredientOrList = ItemIngredient[AnyContext] | list[ItemIngredient[AnyContext]]
|
||||
ItemIngredientOrList = ItemIngredient | list[ItemIngredient]
|
||||
|
||||
|
||||
class MinecraftItemIdIngredient(ItemIngredient[Any], type=NoValue):
|
||||
class MinecraftItemIdIngredient(ItemIngredient, type=NoValue):
|
||||
item: ResourceLocation
|
||||
|
||||
|
||||
class MinecraftItemTagIngredient(ItemIngredient[Any], type=NoValue):
|
||||
class MinecraftItemTagIngredient(ItemIngredient, type=NoValue):
|
||||
tag: ResourceLocation
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Any
|
||||
|
||||
from hexdoc.utils import HexDocModel
|
||||
|
||||
from ..i18n import LocalizedItem
|
||||
|
@ -7,26 +5,20 @@ from .abstract_recipes import Recipe
|
|||
from .ingredients import ItemIngredientOrList
|
||||
|
||||
|
||||
class ItemResult(HexDocModel[Any]):
|
||||
class ItemResult(HexDocModel):
|
||||
item: LocalizedItem
|
||||
count: int | None = None
|
||||
|
||||
|
||||
class CraftingShapedRecipe(
|
||||
Recipe[Any],
|
||||
type="minecraft:crafting_shaped",
|
||||
):
|
||||
key: dict[str, ItemIngredientOrList[Any]]
|
||||
class CraftingShapedRecipe(Recipe, type="minecraft:crafting_shaped"):
|
||||
key: dict[str, ItemIngredientOrList]
|
||||
pattern: list[str]
|
||||
result: ItemResult
|
||||
show_notification: bool
|
||||
|
||||
|
||||
class CraftingShapelessRecipe(
|
||||
Recipe[Any],
|
||||
type="minecraft:crafting_shapeless",
|
||||
):
|
||||
ingredients: list[ItemIngredientOrList[Any]]
|
||||
class CraftingShapelessRecipe(Recipe, type="minecraft:crafting_shapeless"):
|
||||
ingredients: list[ItemIngredientOrList]
|
||||
result: ItemResult
|
||||
|
||||
|
||||
|
|
|
@ -4,12 +4,9 @@ __all__ = [
|
|||
"Entry",
|
||||
"Page",
|
||||
"FormatTree",
|
||||
"AnyBookContext",
|
||||
"BookContext",
|
||||
]
|
||||
|
||||
from .book import Book
|
||||
from .book_models import AnyBookContext, BookContext
|
||||
from .category import Category
|
||||
from .entry import Entry
|
||||
from .page import Page
|
||||
|
|
|
@ -1,28 +1,31 @@
|
|||
from importlib import resources
|
||||
from importlib.metadata import entry_points
|
||||
from typing import Any, Generic, Literal, Self, cast
|
||||
from typing import Any, Literal, Self
|
||||
|
||||
from pydantic import Field, ValidationInfo, model_validator
|
||||
|
||||
from hexdoc.minecraft import I18n, LocalizedStr
|
||||
from hexdoc.minecraft.i18n import I18nContext
|
||||
from hexdoc.utils import (
|
||||
AnyContext,
|
||||
Color,
|
||||
HexDocModel,
|
||||
ItemStack,
|
||||
Properties,
|
||||
LoaderContext,
|
||||
ModResourceLoader,
|
||||
ResLoc,
|
||||
ResourceLocation,
|
||||
)
|
||||
from hexdoc.utils.deserialize import isinstance_or_raise, load_json_dict
|
||||
from hexdoc.utils.deserialize import cast_or_raise
|
||||
|
||||
from .book_models import AnyBookContext, BookContext
|
||||
from .category import Category
|
||||
from .entry import Entry
|
||||
from .text import DEFAULT_MACROS, FormatTree
|
||||
from .text import FormatTree
|
||||
from .text.formatting import FormattingContext
|
||||
|
||||
|
||||
class Book(Generic[AnyContext, AnyBookContext], HexDocModel[AnyBookContext]):
|
||||
class BookContext(FormattingContext, LoaderContext):
|
||||
pass
|
||||
|
||||
|
||||
class Book(HexDocModel):
|
||||
"""Main Patchouli book class.
|
||||
|
||||
Includes all data from book.json, categories/entries/pages, and i18n.
|
||||
|
@ -73,62 +76,44 @@ class Book(Generic[AnyContext, AnyBookContext], HexDocModel[AnyBookContext]):
|
|||
text_overflow_mode: Literal["overflow", "resize", "truncate"] | None = None
|
||||
|
||||
@classmethod
|
||||
def prepare(cls, props: Properties) -> tuple[dict[str, Any], BookContext]:
|
||||
# read the raw dict from the json file
|
||||
path = props.find_resource("data", "patchouli_books", props.book / "book")
|
||||
data = load_json_dict(path)
|
||||
|
||||
# set up the deserialization context object
|
||||
assert isinstance_or_raise(data["i18n"], bool)
|
||||
assert isinstance_or_raise(data["macros"], dict)
|
||||
context: BookContext = {
|
||||
"props": props,
|
||||
"i18n": I18n(props, data["i18n"]),
|
||||
"macros": DEFAULT_MACROS | data["macros"],
|
||||
}
|
||||
|
||||
return data, context
|
||||
|
||||
@classmethod
|
||||
def load(cls, data: dict[str, Any], context: AnyBookContext) -> Self:
|
||||
def load_all(cls, data: dict[str, Any], context: BookContext) -> Self:
|
||||
return cls.model_validate(data, context=context)
|
||||
|
||||
@classmethod
|
||||
def from_id(cls, book_id: ResourceLocation) -> Self:
|
||||
# load the module for the given book id using the entry point
|
||||
# TODO: this is untested because it needs to change for 0.11 anyway :/
|
||||
books = entry_points(group="hexdoc.book_data")
|
||||
book_module = books[str(book_id)].load()
|
||||
|
||||
# read and validate the actual data file
|
||||
book_path = resources.files(book_module) / book_module.BOOK_DATA_PATH
|
||||
return cls.model_validate_json(book_path.read_text("utf-8"))
|
||||
def load_book_json(cls, loader: ModResourceLoader, id: ResourceLocation):
|
||||
return loader.load_resource(
|
||||
type="data",
|
||||
folder="patchouli_books",
|
||||
id=id / "book",
|
||||
)
|
||||
|
||||
@model_validator(mode="before")
|
||||
def _pre_root(cls, data: dict[str, Any], info: ValidationInfo) -> dict[str, Any]:
|
||||
context = cast(AnyBookContext, info.context)
|
||||
if not context:
|
||||
if not info.context:
|
||||
return data
|
||||
context = cast_or_raise(info.context, I18nContext)
|
||||
|
||||
return data | {
|
||||
"i18n_data": context["i18n"],
|
||||
"i18n_data": context.i18n,
|
||||
}
|
||||
|
||||
@model_validator(mode="after")
|
||||
def _post_root(self, info: ValidationInfo) -> Self:
|
||||
"""Loads categories and entries."""
|
||||
context = cast(AnyBookContext, info.context)
|
||||
if not context:
|
||||
if not info.context:
|
||||
return self
|
||||
context = cast_or_raise(info.context, BookContext)
|
||||
|
||||
# load categories
|
||||
self._categories: dict[ResourceLocation, Category] = Category.load_all(context)
|
||||
|
||||
# load entries
|
||||
for id, path in context["props"].find_book_assets("entries"):
|
||||
entry = Entry.load(id, path, context)
|
||||
for resource_dir, id, data in context.loader.load_book_assets("entries"):
|
||||
entry = Entry.load(id, data, context)
|
||||
|
||||
# i used the entry to insert the entry (pretty sure thanos said that)
|
||||
self._categories[entry.category_id].entries.append(entry)
|
||||
if not resource_dir.external:
|
||||
self._categories[entry.category_id].entries.append(entry)
|
||||
|
||||
# we inserted a bunch of entries in no particular order, so sort each category
|
||||
for category in self._categories.values():
|
||||
|
|
|
@ -1,37 +0,0 @@
|
|||
import logging
|
||||
from abc import ABC
|
||||
from pathlib import Path
|
||||
from typing import Generic, Self, TypeVar, dataclass_transform
|
||||
|
||||
from hexdoc.utils import AnyContext, ResourceLocation
|
||||
from hexdoc.utils.deserialize import load_json_dict
|
||||
from hexdoc.utils.model import HexDocModel
|
||||
|
||||
from .text.formatting import FormatContext
|
||||
|
||||
|
||||
class BookContext(FormatContext):
|
||||
pass
|
||||
|
||||
|
||||
AnyBookContext = TypeVar("AnyBookContext", bound=BookContext)
|
||||
|
||||
|
||||
@dataclass_transform()
|
||||
class BookFileModel(
|
||||
Generic[AnyContext, AnyBookContext],
|
||||
HexDocModel[AnyBookContext],
|
||||
ABC,
|
||||
):
|
||||
id: ResourceLocation
|
||||
|
||||
@classmethod
|
||||
def load(cls, id: ResourceLocation, path: Path, context: AnyBookContext) -> Self:
|
||||
logging.getLogger(__name__).debug(f"Load {cls}\n path: {path}")
|
||||
|
||||
try:
|
||||
data = load_json_dict(path) | {"id": id}
|
||||
return cls.model_validate(data, context=context)
|
||||
except Exception as e:
|
||||
e.add_note(f"File: {path}")
|
||||
raise
|
|
@ -3,15 +3,15 @@ from typing import Self
|
|||
from pydantic import Field
|
||||
|
||||
from hexdoc.minecraft import LocalizedStr
|
||||
from hexdoc.utils import ItemStack, ResourceLocation
|
||||
from hexdoc.utils import ItemStack, LoaderContext, ResourceLocation
|
||||
from hexdoc.utils.types import Sortable, sorted_dict
|
||||
|
||||
from .book_models import BookContext, BookFileModel
|
||||
from ..utils.model import HexDocFileModel
|
||||
from .entry import Entry
|
||||
from .text import FormatTree
|
||||
|
||||
|
||||
class Category(BookFileModel[BookContext, BookContext], Sortable):
|
||||
class Category(HexDocFileModel, Sortable):
|
||||
"""Category with pages and localizations.
|
||||
|
||||
See: https://vazkiimods.github.io/Patchouli/docs/reference/category-json
|
||||
|
@ -32,12 +32,12 @@ class Category(BookFileModel[BookContext, BookContext], Sortable):
|
|||
secret: bool = False
|
||||
|
||||
@classmethod
|
||||
def load_all(cls, context: BookContext):
|
||||
def load_all(cls, context: LoaderContext):
|
||||
categories: dict[ResourceLocation, Self] = {}
|
||||
|
||||
# load
|
||||
for id, path in context["props"].find_book_assets("categories"):
|
||||
category = cls.load(id, path, context)
|
||||
for _, id, data in context.loader.load_book_assets("categories"):
|
||||
category = cls.load(id, data, context)
|
||||
categories[id] = category
|
||||
|
||||
# late-init _parent_cmp_key
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
from typing import cast
|
||||
|
||||
from pydantic import Field, ValidationInfo, model_validator
|
||||
|
||||
from hexdoc.minecraft import LocalizedStr
|
||||
from hexdoc.utils import Color, ItemStack, ResourceLocation
|
||||
from hexdoc.utils.deserialize import cast_or_raise
|
||||
from hexdoc.utils.properties import PropsContext
|
||||
from hexdoc.utils.types import Sortable
|
||||
|
||||
from .book_models import BookContext, BookFileModel
|
||||
from ..utils.model import HexDocFileModel
|
||||
from .page.pages import Page
|
||||
|
||||
|
||||
class Entry(BookFileModel[BookContext, BookContext], Sortable):
|
||||
class Entry(HexDocFileModel, Sortable):
|
||||
"""Entry json file, with pages and localizations.
|
||||
|
||||
See: https://vazkiimods.github.io/Patchouli/docs/reference/entry-json
|
||||
|
@ -22,7 +22,7 @@ class Entry(BookFileModel[BookContext, BookContext], Sortable):
|
|||
name: LocalizedStr
|
||||
category_id: ResourceLocation = Field(alias="category")
|
||||
icon: ItemStack
|
||||
pages: list[Page[BookContext]]
|
||||
pages: list[Page]
|
||||
|
||||
# optional (entry.json)
|
||||
advancement: ResourceLocation | None = None
|
||||
|
@ -43,12 +43,12 @@ class Entry(BookFileModel[BookContext, BookContext], Sortable):
|
|||
|
||||
@model_validator(mode="after")
|
||||
def _check_is_spoiler(self, info: ValidationInfo):
|
||||
context = cast(BookContext | None, info.context)
|
||||
if not context or self.advancement is None:
|
||||
if not info.context or self.advancement is None:
|
||||
return self
|
||||
context = cast_or_raise(info.context, PropsContext)
|
||||
|
||||
self.is_spoiler = any(
|
||||
self.advancement.match(spoiler)
|
||||
for spoiler in context["props"].spoilered_advancements
|
||||
for spoiler in context.props.spoilered_advancements
|
||||
)
|
||||
return self
|
||||
|
|
|
@ -6,11 +6,10 @@ from pydantic.functional_validators import ModelWrapValidatorHandler
|
|||
from hexdoc.minecraft import LocalizedStr
|
||||
from hexdoc.utils import ResourceLocation, TypeTaggedUnion
|
||||
|
||||
from ..book_models import AnyBookContext
|
||||
from ..text import FormatTree
|
||||
|
||||
|
||||
class Page(TypeTaggedUnion[AnyBookContext], group="hexdoc.Page", type=None):
|
||||
class Page(TypeTaggedUnion, group="hexdoc.Page", type=None):
|
||||
"""Base class for Patchouli page types.
|
||||
|
||||
See: https://vazkiimods.github.io/Patchouli/docs/patchouli-basics/page-types
|
||||
|
@ -26,7 +25,7 @@ class Page(TypeTaggedUnion[AnyBookContext], group="hexdoc.Page", type=None):
|
|||
def __init_subclass__(
|
||||
cls,
|
||||
*,
|
||||
type: str | None,
|
||||
type: str | None = None, # FIXME:
|
||||
template_type: str | None = None,
|
||||
) -> None:
|
||||
super().__init_subclass__(group=None, type=type)
|
||||
|
@ -52,9 +51,9 @@ class Page(TypeTaggedUnion[AnyBookContext], group="hexdoc.Page", type=None):
|
|||
return self.__template
|
||||
|
||||
|
||||
class PageWithText(Page[AnyBookContext], type=None):
|
||||
class PageWithText(Page, type=None):
|
||||
text: FormatTree | None = None
|
||||
|
||||
|
||||
class PageWithTitle(PageWithText[AnyBookContext], type=None):
|
||||
class PageWithTitle(PageWithText, type=None):
|
||||
title: LocalizedStr | None = None
|
||||
|
|
|
@ -1,24 +1,25 @@
|
|||
from typing import Any
|
||||
from typing import Any, Self
|
||||
|
||||
from pydantic import model_validator
|
||||
|
||||
from hexdoc.minecraft import LocalizedItem, LocalizedStr
|
||||
from hexdoc.minecraft.recipe import CraftingRecipe
|
||||
from hexdoc.utils import Entity, ItemStack, ResourceLocation
|
||||
|
||||
from ..book_models import BookContext
|
||||
from ..text import FormatTree
|
||||
from .abstract_pages import Page, PageWithText, PageWithTitle
|
||||
|
||||
|
||||
class TextPage(PageWithTitle[BookContext], type="patchouli:text"):
|
||||
class TextPage(PageWithTitle, type="patchouli:text"):
|
||||
text: FormatTree
|
||||
|
||||
|
||||
class ImagePage(PageWithTitle[BookContext], type="patchouli:image"):
|
||||
class ImagePage(PageWithTitle, type="patchouli:image"):
|
||||
images: list[ResourceLocation]
|
||||
border: bool = False
|
||||
|
||||
|
||||
class CraftingPage(PageWithTitle[BookContext], type="patchouli:crafting"):
|
||||
class CraftingPage(PageWithTitle, type="patchouli:crafting"):
|
||||
recipe: CraftingRecipe
|
||||
recipe2: CraftingRecipe | None = None
|
||||
|
||||
|
@ -27,12 +28,12 @@ class CraftingPage(PageWithTitle[BookContext], type="patchouli:crafting"):
|
|||
return [r for r in [self.recipe, self.recipe2] if r is not None]
|
||||
|
||||
|
||||
class SmeltingPage(PageWithTitle[BookContext], type="patchouli:smelting"):
|
||||
class SmeltingPage(PageWithTitle, type="patchouli:smelting"):
|
||||
recipe: ItemStack
|
||||
recipe2: ItemStack | None = None
|
||||
|
||||
|
||||
class MultiblockPage(PageWithText[BookContext], type="patchouli:multiblock"):
|
||||
class MultiblockPage(PageWithText, type="patchouli:multiblock"):
|
||||
name: LocalizedStr
|
||||
multiblock_id: ResourceLocation | None = None
|
||||
# TODO: https://vazkiimods.github.io/Patchouli/docs/patchouli-basics/multiblocks/
|
||||
|
@ -40,12 +41,14 @@ class MultiblockPage(PageWithText[BookContext], type="patchouli:multiblock"):
|
|||
multiblock: Any | None = None
|
||||
enable_visualize: bool = True
|
||||
|
||||
def __post_init__(self):
|
||||
@model_validator(mode="after")
|
||||
def _check_multiblock(self) -> Self:
|
||||
if self.multiblock_id is None and self.multiblock is None:
|
||||
raise ValueError(f"One of multiblock_id or multiblock must be set\n{self}")
|
||||
return self
|
||||
|
||||
|
||||
class EntityPage(PageWithText[BookContext], type="patchouli:entity"):
|
||||
class EntityPage(PageWithText, type="patchouli:entity"):
|
||||
entity: Entity
|
||||
scale: float = 1
|
||||
offset: float = 0
|
||||
|
@ -54,7 +57,7 @@ class EntityPage(PageWithText[BookContext], type="patchouli:entity"):
|
|||
name: LocalizedStr | None = None
|
||||
|
||||
|
||||
class SpotlightPage(PageWithTitle[BookContext], type="patchouli:spotlight"):
|
||||
class SpotlightPage(PageWithTitle, type="patchouli:spotlight"):
|
||||
item: LocalizedItem # TODO: patchi says this is an ItemStack, so this might break
|
||||
link_recipe: bool = False
|
||||
|
||||
|
@ -64,18 +67,18 @@ class LinkPage(TextPage, type="patchouli:link"):
|
|||
link_text: LocalizedStr
|
||||
|
||||
|
||||
class RelationsPage(PageWithTitle[BookContext], type="patchouli:relations"):
|
||||
class RelationsPage(PageWithTitle, type="patchouli:relations"):
|
||||
entries: list[ResourceLocation]
|
||||
title: LocalizedStr = LocalizedStr.with_value("Related Chapters")
|
||||
|
||||
|
||||
class QuestPage(PageWithTitle[BookContext], type="patchouli:quest"):
|
||||
class QuestPage(PageWithTitle, type="patchouli:quest"):
|
||||
trigger: ResourceLocation | None = None
|
||||
title: LocalizedStr = LocalizedStr.with_value("Objective")
|
||||
|
||||
|
||||
class EmptyPage(
|
||||
Page[BookContext],
|
||||
Page,
|
||||
type="patchouli:empty",
|
||||
template_type="patchouli:page",
|
||||
):
|
||||
|
|
|
@ -6,15 +6,16 @@ import re
|
|||
from abc import ABC, abstractmethod
|
||||
from contextlib import nullcontext
|
||||
from enum import Enum, auto
|
||||
from typing import Any, Literal, Self, cast
|
||||
from typing import Literal, Self
|
||||
|
||||
from pydantic import ValidationInfo, model_validator
|
||||
from pydantic import ValidationInfo, field_validator, model_validator
|
||||
from pydantic.dataclasses import dataclass
|
||||
from pydantic.functional_validators import ModelWrapValidatorHandler
|
||||
|
||||
from hexdoc.minecraft import LocalizedStr
|
||||
from hexdoc.minecraft.i18n import I18nContext
|
||||
from hexdoc.minecraft.i18n import I18n, I18nContext
|
||||
from hexdoc.utils import DEFAULT_CONFIG, HexDocModel, Properties, PropsContext
|
||||
from hexdoc.utils.deserialize import cast_or_raise
|
||||
from hexdoc.utils.types import TryGetEnum
|
||||
|
||||
from .html import HTMLElement, HTMLStream
|
||||
|
@ -107,11 +108,11 @@ ParagraphStyleType = Literal[SpecialStyleType.paragraph]
|
|||
ColorStyleType = Literal[SpecialStyleType.color]
|
||||
|
||||
|
||||
class Style(ABC, HexDocModel[Any], frozen=True):
|
||||
class Style(ABC, HexDocModel, frozen=True):
|
||||
type: CommandStyleType | FunctionStyleType | SpecialStyleType
|
||||
|
||||
@staticmethod
|
||||
def parse(style_str: str, props: Properties) -> Style | _CloseTag | str:
|
||||
def parse(style_str: str, props: Properties, i18n: I18n) -> Style | _CloseTag | str:
|
||||
# direct text replacements
|
||||
if style_str in _REPLACEMENTS:
|
||||
return _REPLACEMENTS[style_str]
|
||||
|
@ -141,8 +142,8 @@ class Style(ABC, HexDocModel[Any], frozen=True):
|
|||
name, value = style_str.split(":", 1)
|
||||
|
||||
# keys
|
||||
if name == "k" and (key := props.i18n.keys.get(value)):
|
||||
return key
|
||||
if name == "k":
|
||||
return str(i18n.localize_key(value))
|
||||
|
||||
# all the other functions
|
||||
if style_type := FunctionStyleType.get(name):
|
||||
|
@ -242,16 +243,21 @@ class FunctionStyle(Style, frozen=True):
|
|||
|
||||
# intentionally not inheriting from Style, because this is basically an implementation
|
||||
# detail of the parser and should not be returned or exposed anywhere
|
||||
class _CloseTag(HexDocModel[Any], frozen=True):
|
||||
class _CloseTag(HexDocModel, frozen=True):
|
||||
type: FunctionStyleType | BaseStyleType | ColorStyleType
|
||||
|
||||
|
||||
_FORMAT_RE = re.compile(r"\$\(([^)]*)\)")
|
||||
|
||||
|
||||
class FormatContext(I18nContext, PropsContext):
|
||||
class FormattingContext(I18nContext, PropsContext):
|
||||
macros: dict[str, str]
|
||||
|
||||
@field_validator("macros")
|
||||
@classmethod
|
||||
def _add_default_macros(cls, macros: dict[str, str]) -> dict[str, str]:
|
||||
return DEFAULT_MACROS | macros
|
||||
|
||||
|
||||
@dataclass(config=DEFAULT_CONFIG)
|
||||
class FormatTree:
|
||||
|
@ -259,7 +265,13 @@ class FormatTree:
|
|||
children: list[FormatTree | str] # this can't be Self, it breaks Pydantic
|
||||
|
||||
@classmethod
|
||||
def format(cls, string: str, macros: dict[str, str], props: Properties) -> Self:
|
||||
def format(
|
||||
cls,
|
||||
string: str,
|
||||
macros: dict[str, str],
|
||||
props: Properties,
|
||||
i18n: I18n,
|
||||
) -> Self:
|
||||
# resolve macros
|
||||
# TODO: use ahocorasick? this feels inefficient
|
||||
old_string = None
|
||||
|
@ -280,7 +292,7 @@ class FormatTree:
|
|||
text_since_prev_style.append(leading_text)
|
||||
last_end = match.end()
|
||||
|
||||
match Style.parse(match[1], props):
|
||||
match Style.parse(match[1], props, i18n):
|
||||
case str(replacement):
|
||||
# str means "use this instead of the original value"
|
||||
text_since_prev_style.append(replacement)
|
||||
|
@ -335,10 +347,18 @@ class FormatTree:
|
|||
handler: ModelWrapValidatorHandler[Self],
|
||||
info: ValidationInfo,
|
||||
):
|
||||
context = cast(FormatContext, info.context)
|
||||
if not context or isinstance(value, FormatTree):
|
||||
if not info.context or isinstance(value, FormatTree):
|
||||
return handler(value)
|
||||
context = cast_or_raise(info.context, FormattingContext)
|
||||
|
||||
if isinstance(value, str):
|
||||
value = context["i18n"].localize(value)
|
||||
return cls.format(value.value, context["macros"], context["props"])
|
||||
value = context.i18n.localize(value)
|
||||
return cls.format(
|
||||
value.value,
|
||||
macros=context.macros,
|
||||
props=context.props,
|
||||
i18n=context.i18n,
|
||||
)
|
||||
|
||||
|
||||
FormatTree._wrap_root
|
||||
|
|
|
@ -2,24 +2,26 @@ __all__ = [
|
|||
"HexDocModel",
|
||||
"InternallyTaggedUnion",
|
||||
"Color",
|
||||
"AnyContext",
|
||||
"HexDocValidationContext",
|
||||
"DEFAULT_CONFIG",
|
||||
"NoValue",
|
||||
"NoValueType",
|
||||
"TagValue",
|
||||
"AnyPropsContext",
|
||||
"Properties",
|
||||
"PropsContext",
|
||||
"Entity",
|
||||
"ItemStack",
|
||||
"ResLoc",
|
||||
"ResourceLocation",
|
||||
"ModResourceLoader",
|
||||
"TypeTaggedUnion",
|
||||
"LoaderContext",
|
||||
]
|
||||
|
||||
from .model import DEFAULT_CONFIG, AnyContext, HexDocModel
|
||||
from .properties import AnyPropsContext, Properties, PropsContext
|
||||
from .model import DEFAULT_CONFIG, HexDocModel, HexDocValidationContext
|
||||
from .properties import Properties, PropsContext
|
||||
from .resource import Entity, ItemStack, ResLoc, ResourceLocation
|
||||
from .resource_loader import LoaderContext, ModResourceLoader
|
||||
from .tagged_union import (
|
||||
InternallyTaggedUnion,
|
||||
NoValue,
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
import logging
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Any, TypeGuard, TypeVar, get_origin
|
||||
|
||||
import pyjson5
|
||||
|
@ -11,7 +9,6 @@ _T_cov = TypeVar("_T_cov", covariant=True)
|
|||
_DEFAULT_MESSAGE = "Expected any of {expected}, got {actual}: {value}"
|
||||
|
||||
|
||||
# there may well be a better way to do this but i don't know what it is
|
||||
def isinstance_or_raise(
|
||||
val: Any,
|
||||
class_or_tuple: type[_T] | tuple[type[_T], ...],
|
||||
|
@ -37,31 +34,35 @@ def isinstance_or_raise(
|
|||
return True
|
||||
|
||||
|
||||
def cast_or_raise(
|
||||
val: Any,
|
||||
class_or_tuple: type[_T] | tuple[type[_T], ...],
|
||||
message: str = _DEFAULT_MESSAGE,
|
||||
) -> _T:
|
||||
assert isinstance_or_raise(val, class_or_tuple, message)
|
||||
return val
|
||||
|
||||
|
||||
JSONDict = dict[str, "JSONValue"]
|
||||
|
||||
JSONValue = JSONDict | list["JSONValue"] | str | int | float | bool | None
|
||||
|
||||
|
||||
def load_json_dict(path: Path) -> JSONDict:
|
||||
logging.getLogger(__name__).debug(f"Load json from {path}")
|
||||
data = pyjson5.decode(path.read_text("utf-8"))
|
||||
assert isinstance_or_raise(data, dict)
|
||||
return data
|
||||
def decode_json_dict(data: str) -> JSONDict:
|
||||
decoded = pyjson5.decode(data)
|
||||
assert isinstance_or_raise(decoded, dict)
|
||||
return decoded
|
||||
|
||||
|
||||
# implement pkpcpbp's flattening in python
|
||||
# https://github.com/gamma-delta/PKPCPBP/blob/786194a590f/src/main/java/at/petrak/pkpcpbp/filters/JsonUtil.java
|
||||
def load_and_flatten_json_dict(path: Path) -> dict[str, str]:
|
||||
logging.getLogger(__name__).debug(f"Load and flatten json from {path}")
|
||||
def decode_and_flatten_json_dict(data: str) -> dict[str, str]:
|
||||
# replace `\<LF> foobar` with `\<LF>foobar`
|
||||
data = re.sub(r"\\\n\s*", "\\\n", data)
|
||||
|
||||
# load file, replace `\<LF> foobar` with `\<LF>foobar`
|
||||
json_str = re.sub(r"\\\n\s*", "\\\n", path.read_text("utf-8"))
|
||||
|
||||
# decode json5 and flatten
|
||||
data = pyjson5.decode(json_str)
|
||||
assert isinstance_or_raise(data, JSONDict)
|
||||
|
||||
return _flatten_inner(data, "")
|
||||
# decode and flatten
|
||||
decoded = decode_json_dict(data)
|
||||
return _flatten_inner(decoded, "")
|
||||
|
||||
|
||||
def _flatten_inner(obj: JSONDict, prefix: str) -> dict[str, str]:
|
||||
|
|
|
@ -1,14 +1,19 @@
|
|||
from typing import TYPE_CHECKING, Any, Generic, TypeVar, dataclass_transform
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from abc import ABC
|
||||
from typing import TYPE_CHECKING, Any, Self, dataclass_transform
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, model_validator
|
||||
from pydantic.config import ConfigDict
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
from .deserialize import JSONDict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pydantic.root_model import Model
|
||||
|
||||
from .resource import ResourceLocation
|
||||
|
||||
AnyContext = TypeVar("AnyContext", bound=TypedDict)
|
||||
|
||||
DEFAULT_CONFIG = ConfigDict(
|
||||
extra="forbid",
|
||||
|
@ -16,37 +21,40 @@ DEFAULT_CONFIG = ConfigDict(
|
|||
|
||||
|
||||
@dataclass_transform()
|
||||
class HexDocModel(Generic[AnyContext], BaseModel):
|
||||
class HexDocModel(BaseModel):
|
||||
model_config = DEFAULT_CONFIG
|
||||
|
||||
# override the context type to use a generic TypedDict
|
||||
# TODO: open an issue on Pydantic for this
|
||||
# pydantic core actually allows PyAny for context, so I'm pretty sure this is fine
|
||||
if TYPE_CHECKING:
|
||||
|
||||
@classmethod
|
||||
def model_validate( # type: ignore
|
||||
def model_validate( # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
cls: type[Model],
|
||||
obj: Any,
|
||||
*,
|
||||
strict: bool | None = None,
|
||||
from_attributes: bool | None = None,
|
||||
context: AnyContext | None = None,
|
||||
context: HexDocValidationContext | None = None,
|
||||
) -> Model:
|
||||
...
|
||||
|
||||
@classmethod
|
||||
def model_validate_json( # type: ignore
|
||||
def model_validate_json( # pyright: ignore[reportIncompatibleMethodOverride]
|
||||
cls: type[Model],
|
||||
json_data: str | bytes | bytearray,
|
||||
*,
|
||||
strict: bool | None = None,
|
||||
context: AnyContext | None = None,
|
||||
context: HexDocValidationContext | None = None,
|
||||
) -> Model:
|
||||
...
|
||||
|
||||
|
||||
class HexDocValidationContext(HexDocModel):
|
||||
pass
|
||||
|
||||
|
||||
@dataclass_transform()
|
||||
class HexDocStripHiddenModel(HexDocModel[AnyContext]):
|
||||
class HexDocStripHiddenModel(HexDocModel):
|
||||
"""Base model which removes all keys starting with _ before validation."""
|
||||
|
||||
@model_validator(mode="before")
|
||||
|
@ -59,3 +67,18 @@ class HexDocStripHiddenModel(HexDocModel[AnyContext]):
|
|||
for key, value in values.items()
|
||||
if not (isinstance(key, str) and key.startswith("_"))
|
||||
}
|
||||
|
||||
|
||||
@dataclass_transform()
|
||||
class HexDocFileModel(HexDocModel, ABC):
|
||||
id: ResourceLocation
|
||||
|
||||
@classmethod
|
||||
def load(
|
||||
cls,
|
||||
id: ResourceLocation,
|
||||
data: JSONDict,
|
||||
context: HexDocValidationContext,
|
||||
) -> Self:
|
||||
logging.getLogger(__name__).debug(f"Load {cls} at {id}")
|
||||
return cls.model_validate(data | {"id": id}, context=context)
|
||||
|
|
|
@ -1,19 +1,15 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from collections.abc import Iterator
|
||||
from pathlib import Path
|
||||
from typing import Annotated, Any, Literal, Self, TypeVar
|
||||
from typing import Annotated, Any, Self
|
||||
|
||||
from pydantic import AfterValidator, Field, HttpUrl
|
||||
from typing_extensions import TypedDict
|
||||
from pydantic import AfterValidator, HttpUrl
|
||||
|
||||
from .model import HexDocStripHiddenModel
|
||||
from .resource import ResourceLocation
|
||||
from .model import HexDocModel, HexDocStripHiddenModel, HexDocValidationContext
|
||||
from .resource import ResourceDir, ResourceLocation
|
||||
from .toml_placeholders import load_toml_with_placeholders
|
||||
|
||||
ResourceType = Literal["assets", "data"]
|
||||
|
||||
NoTrailingSlashHttpUrl = Annotated[
|
||||
str,
|
||||
HttpUrl,
|
||||
|
@ -21,12 +17,16 @@ NoTrailingSlashHttpUrl = Annotated[
|
|||
]
|
||||
|
||||
|
||||
class PatternStubProps(HexDocStripHiddenModel[Any]):
|
||||
class HexDocMeta(HexDocModel):
|
||||
book_url: NoTrailingSlashHttpUrl
|
||||
|
||||
|
||||
class PatternStubProps(HexDocStripHiddenModel):
|
||||
path: Path
|
||||
regex: re.Pattern[str]
|
||||
|
||||
|
||||
class XplatProps(HexDocStripHiddenModel[Any]):
|
||||
class XplatProps(HexDocStripHiddenModel):
|
||||
src: Path
|
||||
pattern_stubs: list[PatternStubProps] | None = None
|
||||
resources: Path
|
||||
|
@ -37,13 +37,11 @@ class PlatformProps(XplatProps):
|
|||
tags: Path
|
||||
|
||||
|
||||
class I18nProps(HexDocStripHiddenModel[Any]):
|
||||
class I18nProps(HexDocStripHiddenModel):
|
||||
default_lang: str
|
||||
extra: dict[str, str] = Field(default_factory=dict)
|
||||
keys: dict[str, str] = Field(default_factory=dict)
|
||||
|
||||
|
||||
class Properties(HexDocStripHiddenModel[Any]):
|
||||
class Properties(HexDocStripHiddenModel):
|
||||
modid: str
|
||||
book: ResourceLocation
|
||||
url: NoTrailingSlashHttpUrl
|
||||
|
@ -52,7 +50,8 @@ class Properties(HexDocStripHiddenModel[Any]):
|
|||
"""If true, the style `$(0)` changes the text color to black; otherwise it resets
|
||||
the text color to the default."""
|
||||
|
||||
resource_dirs: list[Path]
|
||||
resource_dirs: list[ResourceDir]
|
||||
export_dir: Path
|
||||
|
||||
spoilered_advancements: set[ResourceLocation]
|
||||
entry_id_blacklist: set[ResourceLocation]
|
||||
|
@ -82,106 +81,6 @@ class Properties(HexDocStripHiddenModel[Any]):
|
|||
base_url = self.base_asset_urls[id.namespace]
|
||||
return f"{base_url}/{id.file_path_stub('assets').as_posix()}"
|
||||
|
||||
def find_book_assets(self, folder: Literal["categories", "entries", "templates"]):
|
||||
return self.find_resources(
|
||||
type="assets",
|
||||
folder="patchouli_books",
|
||||
base_id=self.book / self.i18n.default_lang / folder,
|
||||
)
|
||||
|
||||
def find_resource(
|
||||
self,
|
||||
type: ResourceType,
|
||||
folder: str,
|
||||
id: ResourceLocation,
|
||||
) -> Path:
|
||||
"""Find the first file with this resource location in `resource_dirs`.
|
||||
|
||||
If no file extension is provided, `.json` is assumed.
|
||||
|
||||
Raises FileNotFoundError if the file does not exist.
|
||||
"""
|
||||
|
||||
# check in each directory, return the first that exists
|
||||
path_stub = id.file_path_stub(type, folder)
|
||||
for resource_dir in self.resource_dirs:
|
||||
path = resource_dir / path_stub
|
||||
if path.is_file():
|
||||
return path
|
||||
|
||||
raise FileNotFoundError(f"Path {path_stub} not found in any resource dir")
|
||||
|
||||
def find_resources(
|
||||
self,
|
||||
type: ResourceType,
|
||||
folder: str,
|
||||
base_id: ResourceLocation,
|
||||
glob: str | list[str] = "**/*",
|
||||
reverse: bool = True,
|
||||
) -> Iterator[tuple[ResourceLocation, Path]]:
|
||||
"""Search for a glob under a given resource location in all of `resource_dirs`.
|
||||
|
||||
The path of the returned resource location is relative to the path of base_id.
|
||||
|
||||
If no file extension is provided for glob, `.json` is assumed.
|
||||
|
||||
Raises FileNotFoundError if no files were found in any resource dir.
|
||||
|
||||
For example:
|
||||
```py
|
||||
props.find_resources(
|
||||
type="assets",
|
||||
folder="lang",
|
||||
base_id=ResLoc("*", "subdir"),
|
||||
glob="*.flatten.json5",
|
||||
)
|
||||
|
||||
# [(hexcasting:en_us, .../resources/assets/hexcasting/lang/subdir/en_us.json)]
|
||||
```
|
||||
"""
|
||||
|
||||
# eg. assets/*/lang/subdir
|
||||
base_path_stub = base_id.file_path_stub(type, folder, assume_json=False)
|
||||
|
||||
# glob for json files if not provided
|
||||
globs = [glob] if isinstance(glob, str) else glob
|
||||
for i in range(len(globs)):
|
||||
if not Path(globs[i]).suffix:
|
||||
globs[i] += ".json"
|
||||
|
||||
# find all files matching the resloc
|
||||
found_any = False
|
||||
for resource_dir in (
|
||||
reversed(self.resource_dirs) if reverse else self.resource_dirs
|
||||
):
|
||||
# eg. .../resources/assets/*/lang/subdir
|
||||
for base_path in resource_dir.glob(base_path_stub.as_posix()):
|
||||
for glob_ in globs:
|
||||
# eg. .../resources/assets/hexcasting/lang/subdir/*.flatten.json5
|
||||
for path in base_path.glob(glob_):
|
||||
# only yield actual files
|
||||
if not path.is_file():
|
||||
continue
|
||||
found_any = True
|
||||
|
||||
# determine the resource location of this file
|
||||
# eg. en_us.flatten.json5 -> hexcasting:en_us
|
||||
path_stub = path.relative_to(base_path)
|
||||
while path_stub.suffix:
|
||||
path_stub = path_stub.with_suffix("")
|
||||
id = ResourceLocation(base_id.namespace, path_stub.as_posix())
|
||||
|
||||
yield id, path
|
||||
|
||||
# if we never yielded any files, raise an error
|
||||
if not found_any:
|
||||
raise FileNotFoundError(
|
||||
f"No files found under {base_path_stub}/{globs} in any resource dir"
|
||||
)
|
||||
|
||||
|
||||
class PropsContext(TypedDict):
|
||||
class PropsContext(HexDocValidationContext):
|
||||
props: Properties
|
||||
|
||||
|
||||
AnyPropsContext = TypeVar("AnyPropsContext", bound=PropsContext)
|
||||
|
|
|
@ -1,19 +1,35 @@
|
|||
# pyright: reportPrivateUsage=false
|
||||
# pyright: reportUnknownArgumentType=information, reportUnknownMemberType=information
|
||||
|
||||
# this file is used by basically everything
|
||||
# so if it's in literally any namespace, everything fucking dies from circular deps
|
||||
# so if it's in literally any other place, everything fucking dies from circular deps
|
||||
# basically, just leave it here
|
||||
|
||||
import re
|
||||
from fnmatch import fnmatch
|
||||
from pathlib import Path
|
||||
from typing import Any, ClassVar, Literal, Self
|
||||
from __future__ import annotations
|
||||
|
||||
from pydantic import field_validator, model_serializer, model_validator
|
||||
import re
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Iterator
|
||||
from contextlib import ExitStack, contextmanager
|
||||
from fnmatch import fnmatch
|
||||
from importlib import metadata
|
||||
from pathlib import Path
|
||||
from typing import Any, ClassVar, ContextManager, Iterable, Literal, Self
|
||||
|
||||
import importlib_resources as resources
|
||||
from importlib_resources.abc import Traversable
|
||||
from pydantic import (
|
||||
FieldValidationInfo,
|
||||
field_validator,
|
||||
model_serializer,
|
||||
model_validator,
|
||||
)
|
||||
from pydantic.dataclasses import dataclass
|
||||
from pydantic.functional_validators import ModelWrapValidatorHandler
|
||||
|
||||
from .model import DEFAULT_CONFIG
|
||||
from .model import DEFAULT_CONFIG, HexDocModel
|
||||
|
||||
HEXDOC_EXPORTS_GROUP = "hexdoc.exports"
|
||||
"""Entry point group name for bundled hexdoc data."""
|
||||
|
||||
|
||||
def _make_regex(count: bool = False, nbt: bool = False) -> re.Pattern[str]:
|
||||
|
@ -49,7 +65,7 @@ class BaseResourceLocation:
|
|||
|
||||
@model_validator(mode="wrap")
|
||||
@classmethod
|
||||
def _pre_root(cls, values: str | Any, handler: ModelWrapValidatorHandler[Self]):
|
||||
def _pre_root(cls, values: Any, handler: ModelWrapValidatorHandler[Self]):
|
||||
# before validating the fields, if it's a string instead of a dict, convert it
|
||||
if isinstance(values, str):
|
||||
return cls.from_str(values)
|
||||
|
@ -86,6 +102,16 @@ class ResourceLocation(BaseResourceLocation, regex=_make_regex()):
|
|||
def href(self) -> str:
|
||||
return f"#{self.path}"
|
||||
|
||||
def with_namespace(self, namespace: str):
|
||||
"""Returns a copy of this ResourceLocation with the given namespace."""
|
||||
return ResourceLocation(namespace, self.path)
|
||||
|
||||
def with_path(self, path: str | Path):
|
||||
"""Returns a copy of this ResourceLocation with the given path."""
|
||||
if isinstance(path, Path):
|
||||
path = path.as_posix()
|
||||
return ResourceLocation(self.namespace, path)
|
||||
|
||||
def match(self, pattern: Self) -> bool:
|
||||
return fnmatch(str(self), str(pattern))
|
||||
|
||||
|
@ -156,3 +182,102 @@ class Entity(BaseResourceLocation, regex=_make_regex(nbt=True)):
|
|||
if self.nbt is not None:
|
||||
s += self.nbt
|
||||
return s
|
||||
|
||||
|
||||
ResourceType = Literal["assets", "data"]
|
||||
|
||||
|
||||
class BaseResourceDir(HexDocModel, ABC):
|
||||
external: bool
|
||||
reexport: bool
|
||||
"""If not set, the default value will be `not self.external`.
|
||||
|
||||
Must be defined AFTER `external` in the Pydantic model.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def load(self) -> ContextManager[Iterable[PathResourceDir]]:
|
||||
...
|
||||
|
||||
@field_validator("reexport", mode="before")
|
||||
def _default_reexport(cls, value: Any, info: FieldValidationInfo):
|
||||
if value is None and "external" in info.data:
|
||||
return not info.data["external"]
|
||||
return value
|
||||
|
||||
|
||||
class PathResourceDir(BaseResourceDir):
|
||||
path: Path
|
||||
|
||||
# direct paths are probably from this mod
|
||||
external: bool = False
|
||||
reexport: bool = True
|
||||
|
||||
@contextmanager
|
||||
def load(self):
|
||||
yield [self]
|
||||
|
||||
@model_validator(mode="before")
|
||||
def _pre_root(cls: Any, value: Any):
|
||||
# treat plain strings as paths
|
||||
if isinstance(value, str):
|
||||
return {"path": value}
|
||||
return value
|
||||
|
||||
|
||||
class EntryPointResourceDir(BaseResourceDir):
|
||||
modid: str
|
||||
|
||||
# entry points are probably from other mods/packages
|
||||
external: bool = True
|
||||
reexport: bool = False
|
||||
|
||||
@contextmanager
|
||||
def load(self):
|
||||
with ExitStack() as stack:
|
||||
# NOT "yield from"
|
||||
yield [
|
||||
PathResourceDir(
|
||||
path=stack.enter_context(resources.as_file(traversable)),
|
||||
external=self.external,
|
||||
reexport=self.reexport,
|
||||
)
|
||||
for traversable in self._load_traversables()
|
||||
]
|
||||
|
||||
def _load_traversables(self) -> Iterator[Traversable]:
|
||||
entry_point = self._entry_point()
|
||||
base_traversable = resources.files(entry_point.module)
|
||||
|
||||
match entry_point.load():
|
||||
case str(stub) | Path(stub):
|
||||
yield base_traversable / stub
|
||||
|
||||
case [*stubs]:
|
||||
for stub in stubs:
|
||||
# this will probably give some vague error if stub isn't a StrPath
|
||||
yield base_traversable / stub
|
||||
|
||||
case value:
|
||||
raise TypeError(
|
||||
f"Expected a string/path or sequence of strings/paths at {entry_point}, got {type(value)}: {value}"
|
||||
)
|
||||
|
||||
def _entry_point(self) -> metadata.EntryPoint:
|
||||
match metadata.entry_points(group=HEXDOC_EXPORTS_GROUP, name=self.modid):
|
||||
case []:
|
||||
# too cold
|
||||
raise ModuleNotFoundError(
|
||||
f"No entry points found in group {HEXDOC_EXPORTS_GROUP} with name {self.modid}"
|
||||
)
|
||||
case [entry_point]:
|
||||
# just right
|
||||
return entry_point
|
||||
case [*entry_points]:
|
||||
# too hot
|
||||
raise ImportError(
|
||||
f"Multiple entry points found in group {HEXDOC_EXPORTS_GROUP} with name {self.modid}: {entry_points}"
|
||||
)
|
||||
|
||||
|
||||
ResourceDir = PathResourceDir | EntryPointResourceDir
|
||||
|
|
183
doc/src/hexdoc/utils/resource_loader.py
Normal file
183
doc/src/hexdoc/utils/resource_loader.py
Normal file
|
@ -0,0 +1,183 @@
|
|||
# pyright: reportPrivateUsage=information, reportUnknownArgumentType=information, reportUnknownMemberType=information
|
||||
|
||||
import logging
|
||||
import shutil
|
||||
from collections.abc import Iterator
|
||||
from contextlib import ExitStack, contextmanager
|
||||
from pathlib import Path
|
||||
from typing import Callable, Literal, Self, TypeVar
|
||||
|
||||
from pydantic.dataclasses import dataclass
|
||||
|
||||
from hexdoc.utils.deserialize import decode_json_dict
|
||||
from hexdoc.utils.model import DEFAULT_CONFIG, HexDocValidationContext
|
||||
from hexdoc.utils.types import without_suffix
|
||||
|
||||
from .properties import Properties
|
||||
from .resource import PathResourceDir, ResourceLocation, ResourceType
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
@dataclass(config=DEFAULT_CONFIG, kw_only=True)
|
||||
class ModResourceLoader:
|
||||
props: Properties
|
||||
resource_dirs: list[PathResourceDir]
|
||||
|
||||
@classmethod
|
||||
@contextmanager
|
||||
def load_all(cls, props: Properties) -> Iterator[Self]:
|
||||
# clear the export dir so we start with a clean slate
|
||||
try:
|
||||
shutil.rmtree(props.export_dir)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
with ExitStack() as stack:
|
||||
yield cls(
|
||||
props=props,
|
||||
resource_dirs=[
|
||||
inner
|
||||
for outer in props.resource_dirs
|
||||
for inner in stack.enter_context(outer.load())
|
||||
],
|
||||
)
|
||||
|
||||
def load_book_assets(self, folder: Literal["categories", "entries", "templates"]):
|
||||
return self.load_resources(
|
||||
type="assets",
|
||||
folder="patchouli_books",
|
||||
base_id=self.props.book / self.props.i18n.default_lang / folder,
|
||||
)
|
||||
|
||||
def load_resource(
|
||||
self,
|
||||
type: ResourceType,
|
||||
folder: str,
|
||||
id: ResourceLocation,
|
||||
*,
|
||||
decode: Callable[[str], _T] = decode_json_dict,
|
||||
export: Callable[[Path, _T], None] | None = None,
|
||||
) -> tuple[PathResourceDir, _T]:
|
||||
"""Find the first file with this resource location in `resource_dirs`.
|
||||
|
||||
If no file extension is provided, `.json` is assumed.
|
||||
|
||||
Raises FileNotFoundError if the file does not exist.
|
||||
"""
|
||||
|
||||
path_stub = id.file_path_stub(type, folder)
|
||||
|
||||
# check by descending priority, return the first that exists
|
||||
for resource_dir in self.resource_dirs:
|
||||
try:
|
||||
return resource_dir, self._load_path(
|
||||
resource_dir,
|
||||
path=resource_dir.path / path_stub,
|
||||
decode=decode,
|
||||
export=export,
|
||||
)
|
||||
except FileNotFoundError:
|
||||
continue
|
||||
|
||||
raise FileNotFoundError(f"Path {path_stub} not found in any resource dir")
|
||||
|
||||
def load_resources(
|
||||
self,
|
||||
type: ResourceType,
|
||||
folder: str,
|
||||
base_id: ResourceLocation,
|
||||
*,
|
||||
glob: str | list[str] = "**/*",
|
||||
decode: Callable[[str], _T] = decode_json_dict,
|
||||
export: Callable[[Path, _T], None] | None = None,
|
||||
) -> Iterator[tuple[PathResourceDir, ResourceLocation, _T]]:
|
||||
"""Search for a glob under a given resource location in all of `resource_dirs`.
|
||||
|
||||
Files are returned from lowest to highest priority in the load order, ie. later
|
||||
files should overwrite earlier ones.
|
||||
|
||||
The path of the returned resource location is relative to the path of base_id.
|
||||
|
||||
If no file extension is provided for glob, `.json` is assumed.
|
||||
|
||||
Raises FileNotFoundError if no files were found in any resource dir.
|
||||
|
||||
For example (albeit somewhat contrived):
|
||||
```py
|
||||
props.find_resources(
|
||||
type="assets",
|
||||
folder="lang",
|
||||
base_id=ResLoc("*", "subdir"),
|
||||
glob="*.flatten.json5",
|
||||
)
|
||||
|
||||
# [(hexcasting:en_us, .../resources/assets/hexcasting/lang/subdir/en_us.json)]
|
||||
```
|
||||
"""
|
||||
|
||||
# eg. assets/*/lang/subdir
|
||||
base_path_stub = base_id.file_path_stub(type, folder, assume_json=False)
|
||||
|
||||
# glob for json files if not provided
|
||||
globs = [glob] if isinstance(glob, str) else glob
|
||||
for i in range(len(globs)):
|
||||
if not Path(globs[i]).suffix:
|
||||
globs[i] += ".json"
|
||||
|
||||
# find all files matching the resloc
|
||||
found_any = False
|
||||
for resource_dir in reversed(self.resource_dirs):
|
||||
# eg. .../resources/assets/*/lang/subdir
|
||||
for base_path in resource_dir.path.glob(base_path_stub.as_posix()):
|
||||
for glob_ in globs:
|
||||
# eg. .../resources/assets/hexcasting/lang/subdir/*.flatten.json5
|
||||
for path in base_path.glob(glob_):
|
||||
id = ResourceLocation(
|
||||
# eg. ["assets", "hexcasting", "lang", ...][1]
|
||||
namespace=path.relative_to(resource_dir.path).parts[1],
|
||||
path=without_suffix(path.relative_to(base_path)).as_posix(),
|
||||
)
|
||||
|
||||
try:
|
||||
value = self._load_path(resource_dir, path, decode, export)
|
||||
found_any = True
|
||||
yield resource_dir, id, value
|
||||
except FileNotFoundError:
|
||||
continue
|
||||
|
||||
# if we never yielded any files, raise an error
|
||||
if not found_any:
|
||||
raise FileNotFoundError(
|
||||
f"No files found under {base_path_stub}/{globs} in any resource dir"
|
||||
)
|
||||
|
||||
def _load_path(
|
||||
self,
|
||||
resource_dir: PathResourceDir,
|
||||
path: Path,
|
||||
decode: Callable[[str], _T] = decode_json_dict,
|
||||
export: Callable[[Path, _T], None] | None = None,
|
||||
):
|
||||
if not path.is_file():
|
||||
raise FileNotFoundError(path)
|
||||
|
||||
logging.getLogger(__name__).debug(f"Loading {path}")
|
||||
data = path.read_text("utf-8")
|
||||
value = decode(data)
|
||||
|
||||
if resource_dir.reexport:
|
||||
out_path = self.props.export_dir / path.relative_to(resource_dir.path)
|
||||
out_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
logging.getLogger(__name__).debug(f"Exporting {path} to {out_path}")
|
||||
if export:
|
||||
export(out_path, value)
|
||||
else:
|
||||
out_path.write_text(data, "utf-8")
|
||||
|
||||
return value
|
||||
|
||||
|
||||
class LoaderContext(HexDocValidationContext):
|
||||
loader: ModResourceLoader
|
|
@ -1,16 +1,12 @@
|
|||
# pyright: reportPrivateUsage=false
|
||||
|
||||
# from __future__ import annotations
|
||||
|
||||
from collections import defaultdict
|
||||
from enum import Enum
|
||||
from typing import Any, ClassVar, Generator, Self, cast
|
||||
from typing import Any, ClassVar, Generator, Self
|
||||
|
||||
from pkg_resources import iter_entry_points
|
||||
from pydantic import ValidationInfo, model_validator
|
||||
from pydantic.functional_validators import ModelWrapValidatorHandler
|
||||
|
||||
from .model import AnyContext, HexDocModel
|
||||
from .model import HexDocModel
|
||||
from .resource import ResourceLocation
|
||||
|
||||
|
||||
|
@ -20,7 +16,7 @@ class NoValueType(Enum):
|
|||
_token = 0
|
||||
|
||||
|
||||
NoValue = NoValueType._token
|
||||
NoValue = NoValueType._token # pyright: ignore[reportPrivateUsage]
|
||||
"""A singleton (like None) representing the value of a nonexistent dict key."""
|
||||
|
||||
TagValue = str | NoValueType
|
||||
|
@ -44,7 +40,7 @@ def load_entry_points(group: str):
|
|||
raise
|
||||
|
||||
|
||||
class InternallyTaggedUnion(HexDocModel[AnyContext]):
|
||||
class InternallyTaggedUnion(HexDocModel):
|
||||
"""Implements [internally tagged unions](https://serde.rs/enum-representations.html#internally-tagged)
|
||||
using the [Registry pattern](https://charlesreid1.github.io/python-patterns-the-registry.html).
|
||||
|
||||
|
@ -139,7 +135,7 @@ class InternallyTaggedUnion(HexDocModel[AnyContext]):
|
|||
@classmethod
|
||||
def _resolve_from_dict(
|
||||
cls,
|
||||
data: dict[str, Any] | Self | Any,
|
||||
value: Any,
|
||||
handler: ModelWrapValidatorHandler[Self],
|
||||
info: ValidationInfo,
|
||||
) -> Self:
|
||||
|
@ -151,14 +147,13 @@ class InternallyTaggedUnion(HexDocModel[AnyContext]):
|
|||
tag_key = cls._tag_key_or_raise()
|
||||
|
||||
# if it's already instantiated, just return it; otherwise ensure it's a dict
|
||||
match data:
|
||||
match value:
|
||||
case InternallyTaggedUnion():
|
||||
return data
|
||||
return value
|
||||
case dict():
|
||||
# ew
|
||||
data = cast(dict[str, Any], data)
|
||||
data: dict[str, Any] = value
|
||||
case _:
|
||||
return handler(data)
|
||||
return handler(value)
|
||||
|
||||
# don't infinite loop calling the same validator forever
|
||||
if "__resolved" in data:
|
||||
|
@ -177,10 +172,12 @@ class InternallyTaggedUnion(HexDocModel[AnyContext]):
|
|||
exceptions: list[Exception] = []
|
||||
matches: dict[type[Self], Self] = {}
|
||||
|
||||
context = cast(AnyContext | None, info.context)
|
||||
for inner_type in tag_types:
|
||||
try:
|
||||
matches[inner_type] = inner_type.model_validate(data, context=context)
|
||||
matches[inner_type] = inner_type.model_validate(
|
||||
data,
|
||||
context=info.context,
|
||||
)
|
||||
except Exception as e:
|
||||
exceptions.append(e)
|
||||
|
||||
|
@ -200,14 +197,14 @@ class InternallyTaggedUnion(HexDocModel[AnyContext]):
|
|||
)
|
||||
|
||||
|
||||
class TypeTaggedUnion(InternallyTaggedUnion[AnyContext], key="type", value=None):
|
||||
class TypeTaggedUnion(InternallyTaggedUnion, key="type", value=None):
|
||||
type: ResourceLocation | NoValueType | None
|
||||
|
||||
def __init_subclass__(
|
||||
cls,
|
||||
*,
|
||||
group: str | None = None,
|
||||
type: TagValue | None,
|
||||
type: TagValue | None = None, # FIXME: see pydantic/7171
|
||||
) -> None:
|
||||
super().__init_subclass__(group=group, value=type)
|
||||
match type:
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import string
|
||||
from abc import ABC, abstractmethod
|
||||
from enum import Enum, unique
|
||||
from pathlib import Path
|
||||
from typing import Any, Mapping, Protocol, TypeVar
|
||||
|
||||
from pydantic import field_validator, model_validator
|
||||
|
@ -38,7 +39,7 @@ class Color:
|
|||
return value
|
||||
|
||||
@field_validator("value", mode="before")
|
||||
def _check_value(cls, value: str | int | Any) -> str:
|
||||
def _check_value(cls, value: Any) -> str:
|
||||
# type check
|
||||
match value:
|
||||
case str():
|
||||
|
@ -100,3 +101,9 @@ class TryGetEnum(Enum):
|
|||
return cls(value)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def without_suffix(path: Path) -> Path:
|
||||
while path.suffix:
|
||||
path = path.with_suffix("")
|
||||
return path
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
from argparse import Namespace
|
||||
from typing import cast
|
||||
|
||||
from hexdoc.minecraft.i18n import I18n
|
||||
from hexdoc.patchouli.text import DEFAULT_MACROS, FormatTree
|
||||
from hexdoc.patchouli.text.formatting import (
|
||||
CommandStyle,
|
||||
|
@ -16,10 +17,11 @@ from hexdoc.utils.properties import Properties
|
|||
def test_format_string():
|
||||
# arrange
|
||||
test_str = "Write the given iota to my $(l:patterns/readwrite#hexcasting:write/local)$(#490)local$().$(br)The $(l:patterns/readwrite#hexcasting:write/local)$(#490)local$() is a lot like a $(l:items/focus)$(#b0b)Focus$(). It's cleared when I stop casting a Hex, starts with $(l:casting/influences)$(#490)Null$() in it, and is preserved between casts of $(l:patterns/meta#hexcasting:for_each)$(#fc77be)Thoth's Gambit$(). "
|
||||
mock_props = Namespace(is_0_black=False, i18n=Namespace(keys={}))
|
||||
mock_i18n = cast(I18n, Namespace(keys={}))
|
||||
mock_props = cast(Properties, Namespace(is_0_black=False, i18n=mock_i18n))
|
||||
|
||||
# act
|
||||
tree = FormatTree.format(test_str, DEFAULT_MACROS, cast(Properties, mock_props))
|
||||
tree = FormatTree.format(test_str, DEFAULT_MACROS, mock_props, mock_i18n)
|
||||
|
||||
# assert
|
||||
# TODO: possibly make this less lazy
|
||||
|
|
Loading…
Reference in a new issue