Minor refactors and some work on bundling book data

This commit is contained in:
object-Object 2023-07-27 21:56:10 -04:00
parent ba13bf3836
commit 71258eb72b
12 changed files with 87 additions and 50 deletions

View file

@ -37,8 +37,8 @@ show_landing_text = true
hexcasting = "https://raw.githubusercontent.com/gamma-delta/HexMod/main/Common/src/main/resources" hexcasting = "https://raw.githubusercontent.com/gamma-delta/HexMod/main/Common/src/main/resources"
[i18n] [i18n]
lang = "en_us" default_lang = "en_us"
filename = "{lang}.json" filename = "{default_lang}.json"
[i18n.extra] [i18n.extra]
"item.minecraft.amethyst_shard" = "Amethyst Shard" "item.minecraft.amethyst_shard" = "Amethyst Shard"
"item.minecraft.budding_amethyst" = "Budding Amethyst" "item.minecraft.budding_amethyst" = "Budding Amethyst"

View file

@ -2,12 +2,9 @@
requires = ["hatchling"] requires = ["hatchling"]
build-backend = "hatchling.build" build-backend = "hatchling.build"
[tool.hatch.build]
packages = ["src/hexdoc"]
[project] [project]
name = "HexDoc" name = "hexdoc"
version = "0.1.0" version = "0.1.0"
authors = [ authors = [
{ name="Alwinfy" }, { name="Alwinfy" },
@ -26,13 +23,17 @@ dependencies = [
dev = [ dev = [
"black==23.7.0", "black==23.7.0",
"isort==5.12.0", "isort==5.12.0",
"pytest==7.3.1", "pytest~=7.3.1",
"syrupy==4.0.2", "syrupy~=4.0.2",
"hatchling",
] ]
[project.scripts] [project.scripts]
hexdoc = "hexdoc.hexdoc:main" hexdoc = "hexdoc.hexdoc:main"
[project.entry-points."hexdoc.book_data"]
"hexcasting:thehexbook" = "hexdoc._book_data"
[project.entry-points."hexdoc.Page"] [project.entry-points."hexdoc.Page"]
hexdoc-patchouli = "hexdoc.patchouli.page.pages" hexdoc-patchouli = "hexdoc.patchouli.page.pages"
hexdoc-hexcasting = "hexdoc.hexcasting.page.hex_pages" hexdoc-hexcasting = "hexdoc.hexcasting.page.hex_pages"
@ -46,6 +47,10 @@ hexdoc-minecraft = "hexdoc.minecraft.recipe.ingredients"
hexdoc-hexcasting = "hexdoc.hexcasting.hex_recipes" hexdoc-hexcasting = "hexdoc.hexcasting.hex_recipes"
[tool.hatch.build]
packages = ["src/hexdoc"]
[tool.pytest.ini_options] [tool.pytest.ini_options]
addopts = ["--import-mode=importlib"] addopts = ["--import-mode=importlib"]
markers = [ markers = [
@ -103,10 +108,6 @@ reportUntypedBaseClass = "error"
reportUntypedClassDecorator = "error" reportUntypedClassDecorator = "error"
reportUntypedFunctionDecorator = "error" reportUntypedFunctionDecorator = "error"
reportUntypedNamedTuple = "error" reportUntypedNamedTuple = "error"
reportUnusedClass = "error"
reportUnusedExpression = "error"
reportUnusedFunction = "error"
reportUnusedVariable = "error"
reportWildcardImportFromLibrary = "error" reportWildcardImportFromLibrary = "error"
reportMissingTypeArgument = "warning" reportMissingTypeArgument = "warning"
@ -116,6 +117,10 @@ reportUnknownLambdaType = "warning"
reportUnknownMemberType = "warning" reportUnknownMemberType = "warning"
reportUnnecessaryComparison = "warning" reportUnnecessaryComparison = "warning"
reportUnnecessaryIsInstance = "warning" reportUnnecessaryIsInstance = "warning"
reportUnusedClass = "warning"
reportUnusedExpression = "warning"
reportUnusedFunction = "warning"
reportUnusedImport = "warning" reportUnusedImport = "warning"
reportUnusedVariable = "warning"
reportUnknownVariableType = "none" reportUnknownVariableType = "none"

View file

@ -0,0 +1 @@
BOOK_DATA_PATH = "data.json"

View file

View file

@ -16,7 +16,7 @@ from hexdoc.utils import (
Properties, Properties,
ResourceLocation, ResourceLocation,
) )
from hexdoc.utils.deserialize import isinstance_or_raise, load_json from hexdoc.utils.deserialize import isinstance_or_raise, load_json_dict
class I18nContext(TypedDict): class I18nContext(TypedDict):
@ -115,7 +115,7 @@ class I18n:
# we could also use that to ensure all i18n files have the same set of keys # we could also use that to ensure all i18n files have the same set of keys
lang_dir = props.resources_dir / "assets" / props.modid / "lang" lang_dir = props.resources_dir / "assets" / props.modid / "lang"
path = lang_dir / props.i18n.filename path = lang_dir / props.i18n.filename
raw_lookup = load_json(path) | (props.i18n.extra or {}) raw_lookup = load_json_dict(path) | (props.i18n.extra or {})
# validate and insert # validate and insert
self.lookup = {} self.lookup = {}

View file

@ -3,7 +3,7 @@ from typing import Any, Self, cast
from pydantic import ValidationInfo, model_validator from pydantic import ValidationInfo, model_validator
from hexdoc.utils import AnyPropsContext, ResourceLocation, TypeTaggedUnion from hexdoc.utils import AnyPropsContext, ResourceLocation, TypeTaggedUnion
from hexdoc.utils.deserialize import load_json from hexdoc.utils.deserialize import load_json_dict
class Recipe(TypeTaggedUnion[AnyPropsContext], group="hexdoc.Recipe", type=None): class Recipe(TypeTaggedUnion[AnyPropsContext], group="hexdoc.Recipe", type=None):
@ -35,7 +35,7 @@ class Recipe(TypeTaggedUnion[AnyPropsContext], group="hexdoc.Recipe", type=None)
path = recipe_dir / f"{id.path}.json" path = recipe_dir / f"{id.path}.json"
if recipe_dir == context["props"].default_recipe_dir: if recipe_dir == context["props"].default_recipe_dir:
# only load from one file # only load from one file
values = load_json(path) | {"id": id} values = load_json_dict(path) | {"id": id}
elif not path.exists(): elif not path.exists():
# this is to ensure the recipe at least exists on all platforms # this is to ensure the recipe at least exists on all platforms
# because we've had issues with that before (eg. Hexal's Mote Nexus) # because we've had issues with that before (eg. Hexal's Mote Nexus)

View file

@ -1,3 +1,5 @@
from importlib import resources
from importlib.metadata import entry_points
from typing import Any, Generic, Literal, Self, cast from typing import Any, Generic, Literal, Self, cast
from pydantic import Field, ValidationInfo, model_validator from pydantic import Field, ValidationInfo, model_validator
@ -12,7 +14,7 @@ from hexdoc.utils import (
ResLoc, ResLoc,
ResourceLocation, ResourceLocation,
) )
from hexdoc.utils.deserialize import isinstance_or_raise, load_json from hexdoc.utils.deserialize import isinstance_or_raise, load_json_dict
from .book_models import AnyBookContext, BookContext from .book_models import AnyBookContext, BookContext
from .category import Category from .category import Category
@ -33,8 +35,7 @@ class Book(Generic[AnyContext, AnyBookContext], HexDocModel[AnyBookContext]):
""" """
# not in book.json # not in book.json
context: AnyBookContext = Field(default_factory=dict) i18n_data: I18n
categories: dict[ResourceLocation, Category] = Field(default_factory=dict)
# required # required
name: LocalizedStr name: LocalizedStr
@ -65,51 +66,73 @@ class Book(Generic[AnyContext, AnyBookContext], HexDocModel[AnyBookContext]):
custom_book_item: ItemStack | None = None custom_book_item: ItemStack | None = None
show_toasts: bool = True show_toasts: bool = True
use_blocky_font: bool = False use_blocky_font: bool = False
do_i18n: bool = Field(default=False, alias="i18n") i18n: bool = False
macros: dict[str, str] = Field(default_factory=dict) macros: dict[str, str] = Field(default_factory=dict)
pause_game: bool = False pause_game: bool = False
text_overflow_mode: Literal["overflow", "resize", "truncate"] | None = None text_overflow_mode: Literal["overflow", "resize", "truncate"] | None = None
extend: str | None = None extend: ResourceLocation | None = None
"""NOTE: currently this WILL NOT load values from the target book!"""
allow_extensions: bool = True allow_extensions: bool = True
@classmethod
def load(cls, data: dict[str, Any], context: AnyBookContext):
return cls.model_validate(data, context=context)
@classmethod @classmethod
def prepare(cls, props: Properties) -> tuple[dict[str, Any], BookContext]: def prepare(cls, props: Properties) -> tuple[dict[str, Any], BookContext]:
# read the raw dict from the json file # read the raw dict from the json file
path = props.book_dir / "book.json" path = props.book_dir / "book.json"
data = load_json(path) data = load_json_dict(path)
assert isinstance_or_raise(data, dict[str, Any])
# NOW we can convert the actual book data # set up the deserialization context object
return data, { assert isinstance_or_raise(data["i18n"], bool)
"i18n": I18n(props, data["i18n"]), assert isinstance_or_raise(data["macros"], dict)
context: BookContext = {
"props": props, "props": props,
"i18n": I18n(props, data["i18n"]),
"macros": DEFAULT_MACROS | data["macros"], "macros": DEFAULT_MACROS | data["macros"],
} }
return data, context
@classmethod
def load(cls, data: dict[str, Any], context: AnyBookContext) -> Self:
return cls.model_validate(data, context=context)
@classmethod
def from_id(cls, book_id: ResourceLocation) -> Self:
# load the module for the given book id using the entry point
# TODO: this is untested because it needs to change for 0.11 anyway :/
books = entry_points(group="hexdoc.book_data")
book_module = books[str(book_id)].load()
# read and validate the actual data file
book_path = resources.files(book_module) / book_module.BOOK_DATA_PATH
return cls.model_validate_json(book_path.read_text("utf-8"))
@model_validator(mode="before")
def _pre_root(cls, data: dict[str, Any], info: ValidationInfo) -> dict[str, Any]:
context = cast(AnyBookContext, info.context)
if not context:
return data
return data | {
"i18n_data": context["i18n"],
}
@model_validator(mode="after") @model_validator(mode="after")
def _post_root(self, info: ValidationInfo) -> Self: def _post_root(self, info: ValidationInfo) -> Self:
"""Loads categories and entries.""" """Loads categories and entries."""
context = cast(AnyBookContext, info.context) context = cast(AnyBookContext, info.context)
if not context: if not context:
return self return self
self.context = context
# categories # load categories
self.categories = Category.load_all(context) self._categories: dict[ResourceLocation, Category] = Category.load_all(context)
# entries # load entries
for path in context["props"].entries_dir.rglob("*.json"): for path in context["props"].entries_dir.rglob("*.json"):
# i used the entry to insert the entry (pretty sure thanos said that)
entry = Entry.load(path, context) entry = Entry.load(path, context)
self.categories[entry.category_id].entries.append(entry) # i used the entry to insert the entry (pretty sure thanos said that)
self._categories[entry.category_id].entries.append(entry)
# we inserted a bunch of entries in no particular order, so sort each category # we inserted a bunch of entries in no particular order, so sort each category
for category in self.categories.values(): for category in self._categories.values():
category.entries.sort() category.entries.sort()
return self return self
@ -120,5 +143,5 @@ class Book(Generic[AnyContext, AnyBookContext], HexDocModel[AnyBookContext]):
return self.model if self.index_icon_ is None else self.index_icon_ return self.model if self.index_icon_ is None else self.index_icon_
@property @property
def props(self) -> Properties: def categories(self):
return self.context["props"] return self._categories

View file

@ -216,10 +216,14 @@ class ParagraphStyle(Style, frozen=True):
return out.element("p", **self.attributes) return out.element("p", **self.attributes)
def is_external_link(value: str) -> bool:
return value.startswith(("https:", "http:"))
def _format_href(value: str) -> str: def _format_href(value: str) -> str:
if not value.startswith(("http:", "https:")): if is_external_link(value):
return "#" + value.replace("#", "@") return value
return value return f"#{value.replace('#', '@')}"
class FunctionStyle(Style, frozen=True): class FunctionStyle(Style, frozen=True):

View file

@ -38,7 +38,7 @@ JSONDict = dict[str, "JSONValue"]
JSONValue = JSONDict | list["JSONValue"] | str | int | float | bool | None JSONValue = JSONDict | list["JSONValue"] | str | int | float | bool | None
def load_json(path: Path) -> JSONDict: def load_json_dict(path: Path) -> JSONDict:
data: JSONValue = json.loads(path.read_text("utf-8")) data: JSONValue = json.loads(path.read_text("utf-8"))
assert isinstance_or_raise(data, dict) assert isinstance_or_raise(data, dict)
return data return data

View file

@ -4,7 +4,7 @@ from typing import TYPE_CHECKING, Any, Generic, Self, TypeVar, dataclass_transfo
from pydantic import BaseModel, ConfigDict from pydantic import BaseModel, ConfigDict
from typing_extensions import TypedDict from typing_extensions import TypedDict
from .deserialize import load_json from .deserialize import load_json_dict
if TYPE_CHECKING: if TYPE_CHECKING:
from pydantic.root_model import Model from pydantic.root_model import Model
@ -56,5 +56,5 @@ class FrozenHexDocModel(Generic[AnyContext], HexDocModel[AnyContext]):
class HexDocFileModel(HexDocModel[AnyContext]): class HexDocFileModel(HexDocModel[AnyContext]):
@classmethod @classmethod
def load(cls, path: Path, context: AnyContext) -> Self: def load(cls, path: Path, context: AnyContext) -> Self:
data = load_json(path) | {"__path": path} data = load_json_dict(path) | {"__path": path}
return cls.model_validate(data, context=context) return cls.model_validate(data, context=context)

View file

@ -33,7 +33,7 @@ class PlatformProps(HexDocModel[Any]):
class I18nProps(HexDocModel[Any]): class I18nProps(HexDocModel[Any]):
lang: str default_lang: str
filename: str filename: str
extra: dict[str, str] | None = None extra: dict[str, str] | None = None
@ -78,7 +78,7 @@ class Properties(HexDocModel[Any]):
@property @property
def lang(self): def lang(self):
return self.i18n.lang return self.i18n.default_lang
@property @property
def book_dir(self) -> Path: def book_dir(self) -> Path:

View file

@ -35,12 +35,16 @@ class BaseResourceLocation:
cls._from_str_regex = regex cls._from_str_regex = regex
@classmethod @classmethod
def from_str(cls, raw: str) -> Self: def from_str(cls, raw: str, default_namespace: str | None = None) -> Self:
match = cls._from_str_regex.fullmatch(raw) match = cls._from_str_regex.fullmatch(raw)
if match is None: if match is None:
raise ValueError(f"Invalid {cls.__name__} string: {raw}") raise ValueError(f"Invalid {cls.__name__} string: {raw}")
return cls(**match.groupdict()) groups = match.groupdict()
if not groups.get("namespace") and default_namespace is not None:
groups["namespace"] = default_namespace
return cls(**groups)
@model_validator(mode="wrap") @model_validator(mode="wrap")
@classmethod @classmethod