Entry dataclass
This commit is contained in:
parent
25213840b9
commit
e42a8272e4
10 changed files with 242 additions and 143 deletions
|
@ -105,7 +105,9 @@ def get_format(out: Stream, ty: str, value: Any):
|
|||
|
||||
|
||||
def entry_spoilered(root_info: Book, entry: Entry):
|
||||
return entry.get("advancement", None) in root_info.spoilers
|
||||
if entry.raw.advancement is None:
|
||||
return False
|
||||
return str(entry.raw.advancement) in root_info.spoilers
|
||||
|
||||
|
||||
def category_spoilered(root_info: Book, category: Category):
|
||||
|
@ -255,14 +257,14 @@ def write_page(out: Stream, pageid: str, page: Page):
|
|||
|
||||
|
||||
def write_entry(out: Stream, book: Book, entry: Entry):
|
||||
with out.pair_tag("div", id=entry["id"]):
|
||||
with out.pair_tag("div", id=entry.id.path):
|
||||
with out.pair_tag_if(entry_spoilered(book, entry), "div", clazz="spoilered"):
|
||||
with out.pair_tag("h3", clazz="entry-title page-header"):
|
||||
write_block(out, entry["name"])
|
||||
write_block(out, entry.name)
|
||||
anchor_toc(out)
|
||||
permalink(out, "#" + entry["id"])
|
||||
for page in entry["pages"]:
|
||||
write_page(out, entry["id"], page)
|
||||
permalink(out, entry.href)
|
||||
for page in entry.pages:
|
||||
write_page(out, entry.id.path, page)
|
||||
|
||||
|
||||
def write_category(out: Stream, book: Book, category: Category):
|
||||
|
@ -276,7 +278,7 @@ def write_category(out: Stream, book: Book, category: Category):
|
|||
permalink(out, category.href)
|
||||
write_block(out, category.description)
|
||||
for entry in category.entries:
|
||||
if entry["id"] not in book.blacklist:
|
||||
if entry.id.path not in book.blacklist:
|
||||
write_entry(out, book, entry)
|
||||
|
||||
|
||||
|
@ -306,10 +308,10 @@ def write_toc(out: Stream, book: Book):
|
|||
with out.pair_tag("li"):
|
||||
with out.pair_tag(
|
||||
"a",
|
||||
href="#" + entry["id"],
|
||||
href=entry.href,
|
||||
clazz="spoilered" if entry_spoilered(book, entry) else "",
|
||||
):
|
||||
out.text(entry["name"])
|
||||
out.text(entry.name)
|
||||
|
||||
|
||||
def write_book(out: Stream, book: Book):
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
# circular imports are gross
|
||||
|
@ -11,12 +11,16 @@ if TYPE_CHECKING:
|
|||
else:
|
||||
Book, Category, Entry = Any, Any, Any
|
||||
|
||||
# TODO: consolidate ABCs here
|
||||
|
||||
@dataclass
|
||||
class WithBook:
|
||||
"""Helper base class for composition with Book."""
|
||||
|
||||
book: Book
|
||||
class WithBook(ABC):
|
||||
"""ABC for composition with Book."""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def book(self) -> Book:
|
||||
...
|
||||
|
||||
@property
|
||||
def resource_dir(self):
|
||||
|
|
|
@ -1,10 +1,38 @@
|
|||
from pathlib import Path
|
||||
from typing import Self
|
||||
# pyright: reportUnknownArgumentType=false, reportUnknownMemberType=false
|
||||
|
||||
import dataclasses
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
from typing import Any, Self, dataclass_transform
|
||||
|
||||
import serde
|
||||
from serde import SerdeError
|
||||
from serde.json import from_json
|
||||
|
||||
|
||||
class FromStr(ABC):
|
||||
"""Base class for types which are deserialized from a string."""
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def from_str(cls, s: str) -> Self:
|
||||
...
|
||||
|
||||
@classmethod
|
||||
def field(cls, *args: Any, factory: str | None = None, **kwargs: Any) -> Any:
|
||||
"""Helper method for using this as a dataclass field. You must use this method if
|
||||
you're putting this in a serde class.
|
||||
|
||||
If `factory` is provided, `default_factory` will be set to the following:
|
||||
`lambda: cls.from_str(factory)`
|
||||
"""
|
||||
if factory is not None:
|
||||
kwargs["default_factory"] = lambda: cls.from_str(factory)
|
||||
return serde.field(*args, deserializer=cls.from_str, **kwargs)
|
||||
|
||||
|
||||
# dataclass_transform ensures type checkers work properly with these field specifiers
|
||||
@dataclass_transform(field_specifiers=(dataclasses.field, FromStr.field, serde.field))
|
||||
class FromJson:
|
||||
"""Helper methods for JSON-deserialized dataclasses."""
|
||||
|
||||
|
|
|
@ -1,14 +1,24 @@
|
|||
from typing import Any, Mapping, Protocol, TypeVar
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, Mapping, TypeVar
|
||||
|
||||
|
||||
class Comparable(Protocol):
|
||||
def __lt__(self, other: Any) -> bool:
|
||||
class Sortable(ABC):
|
||||
"""ABC for classes which can be sorted."""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def cmp_key(self) -> Any:
|
||||
...
|
||||
|
||||
def __lt__(self, other: Any) -> bool:
|
||||
if isinstance(other, Sortable):
|
||||
return self.cmp_key < other.cmp_key
|
||||
return NotImplemented
|
||||
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_T_Comparable = TypeVar("_T_Comparable", bound=Comparable)
|
||||
_T_Sortable = TypeVar("_T_Sortable", bound=Sortable)
|
||||
|
||||
|
||||
def sorted_dict(d: Mapping[_T, _T_Comparable]) -> dict[_T, _T_Comparable]:
|
||||
def sorted_dict(d: Mapping[_T, _T_Sortable]) -> dict[_T, _T_Sortable]:
|
||||
return dict(sorted(d.items(), key=lambda item: item[1]))
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import json
|
||||
import re
|
||||
from dataclasses import InitVar, dataclass, field
|
||||
from dataclasses import InitVar, dataclass
|
||||
from pathlib import Path
|
||||
from typing import Self
|
||||
|
||||
|
@ -20,6 +20,8 @@ _EXTRA_I18N = {
|
|||
"block.hexcasting.slate": LocalizedStr("Blank Slate"),
|
||||
}
|
||||
|
||||
I18nLookup = dict[str, LocalizedStr]
|
||||
|
||||
|
||||
@dataclass
|
||||
class I18n:
|
||||
|
@ -30,32 +32,37 @@ class I18n:
|
|||
default_lang: str
|
||||
|
||||
enabled: InitVar[bool]
|
||||
extra_i18n: InitVar[dict[str, LocalizedStr] | None] = None
|
||||
extra_i18n: InitVar[I18nLookup | None] = None
|
||||
|
||||
_i18n: dict[str, LocalizedStr] | None = field(default=None)
|
||||
_lookup: I18nLookup | None = None
|
||||
|
||||
def __post_init__(self, enabled: bool, extra_i18n: dict[str, LocalizedStr] | None):
|
||||
def __post_init__(self, enabled: bool, extra_i18n: I18nLookup | None):
|
||||
if not enabled:
|
||||
return
|
||||
|
||||
# load and deserialize
|
||||
# TODO: load ALL of the i18n files, return dict[str, dict[str, LocalizedStr]]
|
||||
# load, deserialize, validate
|
||||
# TODO: load ALL of the i18n files, return dict[str, _Lookup] | None
|
||||
# or maybe dict[(str, str), LocalizedStr]
|
||||
# we could also use that to ensure all i18n files have the same set of keys
|
||||
# TODO: types, all of this is nasty
|
||||
path = self.dir / f"{self.default_lang}.json"
|
||||
self._i18n = json.loads(path.read_text("utf-8"))
|
||||
self._i18n.update(_EXTRA_I18N)
|
||||
if extra_i18n:
|
||||
self._i18n.update(extra_i18n)
|
||||
self._lookup: I18nLookup | None = json.loads(path.read_text("utf-8"))
|
||||
|
||||
# type-checking
|
||||
# validate
|
||||
# TODO: there's probably a library we can use to do this for us
|
||||
assert isinstance(
|
||||
self._i18n, dict
|
||||
), f"Unexpected top-level type `{type(self._i18n)}` in i18n: {path}"
|
||||
for k, v in self._i18n.items():
|
||||
assert isinstance(k, str), f"Unexpected key type `{type(k)}` in i18n: {k}"
|
||||
assert isinstance(v, str), f"Unexpected value type `{type(v)}` in i18n: {v}"
|
||||
self._lookup, dict
|
||||
), f"Unexpected top-level type `{type(self._lookup)}` in {path}"
|
||||
for k, v in self._lookup.items():
|
||||
assert isinstance(k, str), f"Unexpected key type `{type(k)}` in {path}: {k}"
|
||||
assert isinstance(
|
||||
v, str
|
||||
), f"Unexpected value type `{type(v)}` in {path}: {v}"
|
||||
|
||||
# add extras
|
||||
self._lookup.update(_EXTRA_I18N)
|
||||
if extra_i18n:
|
||||
self._lookup.update(extra_i18n)
|
||||
|
||||
@property
|
||||
def dir(self) -> Path:
|
||||
|
@ -73,16 +80,16 @@ class I18n:
|
|||
|
||||
Raises KeyError if i18n is enabled and skip_errors is False but the key has no localization.
|
||||
"""
|
||||
if self._i18n is None:
|
||||
if self._lookup is None:
|
||||
return LocalizedStr(key.replace("%%", "%"))
|
||||
|
||||
if default is not None:
|
||||
localized = self._i18n.get(key, default)
|
||||
localized = self._lookup.get(key, default)
|
||||
elif skip_errors:
|
||||
localized = self._i18n.get(key, key)
|
||||
localized = self._lookup.get(key, key)
|
||||
else:
|
||||
# raises if not found
|
||||
localized = self._i18n[key]
|
||||
localized = self._lookup[key]
|
||||
|
||||
return LocalizedStr(localized.replace("%%", "%"))
|
||||
|
||||
|
|
|
@ -1,8 +1,12 @@
|
|||
import re
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Self
|
||||
from __future__ import annotations
|
||||
|
||||
from serde import field
|
||||
import re
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Self
|
||||
|
||||
from common.deserialize import FromStr
|
||||
|
||||
_RESOURCE_LOCATION_RE = re.compile(r"(?:([0-9a-z_\-.]+):)?([0-9a-z_\-./]+)")
|
||||
_ITEM_STACK_SUFFIX_RE = re.compile(r"(?:#([0-9]+))?({.*})?")
|
||||
|
@ -11,7 +15,7 @@ _ITEM_STACK_SUFFIX_RE = re.compile(r"(?:#([0-9]+))?({.*})?")
|
|||
# TODO: instead of the dataclass field thing, make this subclass str
|
||||
# _namespace and _method, access via properties
|
||||
@dataclass(repr=False, frozen=True)
|
||||
class ResourceLocation:
|
||||
class ResourceLocation(FromStr):
|
||||
"""Represents a Minecraft resource location / namespaced ID."""
|
||||
|
||||
namespace: str
|
||||
|
@ -29,17 +33,6 @@ class ResourceLocation:
|
|||
|
||||
return cls(namespace, path)
|
||||
|
||||
@classmethod
|
||||
def field(cls, s: str | None = None, **kwargs: Any) -> Any:
|
||||
"""Helper method for using this as a dataclass field. You must use this method if
|
||||
you're putting this in a serde class.
|
||||
|
||||
s may be a raw resource location string to construct a default value from.
|
||||
"""
|
||||
if s is not None:
|
||||
kwargs["default_factory"] = cls.from_str(s)
|
||||
return field(deserializer=cls.from_str, **kwargs)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.namespace}:{self.path}"
|
||||
|
||||
|
@ -76,3 +69,29 @@ class ItemStack(ResourceLocation):
|
|||
if self.nbt is not None:
|
||||
s += self.nbt
|
||||
return s
|
||||
|
||||
|
||||
@dataclass
|
||||
class WithPathId(ABC):
|
||||
"""ABC for classes with a ResourceLocation id."""
|
||||
|
||||
path: Path
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def base_dir(self) -> Path:
|
||||
"""Base directory. Combine with self.id.path to find this file."""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def modid(self) -> str:
|
||||
...
|
||||
|
||||
@property
|
||||
def id(self) -> ResourceLocation:
|
||||
resource_path = self.path.relative_to(self.base_dir).with_suffix("").as_posix()
|
||||
return ResourceLocation(self.modid, resource_path)
|
||||
|
||||
@property
|
||||
def href(self) -> str:
|
||||
return f"#{self.id.path}"
|
||||
|
|
|
@ -54,11 +54,11 @@ class RawBook(FromJson):
|
|||
|
||||
# optional
|
||||
book_texture: ResourceLocation = ResourceLocation.field(
|
||||
"patchouli:textures/gui/book_brown.png"
|
||||
factory="patchouli:textures/gui/book_brown.png"
|
||||
)
|
||||
filler_texture: ResourceLocation | None = ResourceLocation.field(default=None)
|
||||
crafting_texture: ResourceLocation | None = ResourceLocation.field(default=None)
|
||||
model: ResourceLocation = ResourceLocation.field("patchouli:book_brown")
|
||||
model: ResourceLocation = ResourceLocation.field(factory="patchouli:book_brown")
|
||||
text_color: Color = Color("000000")
|
||||
header_color: Color = Color("333333")
|
||||
nameplate_color: Color = Color("FFDD00")
|
||||
|
@ -69,7 +69,8 @@ class RawBook(FromJson):
|
|||
open_sound: ResourceLocation | None = ResourceLocation.field(default=None)
|
||||
flip_sound: ResourceLocation | None = ResourceLocation.field(default=None)
|
||||
_index_icon: ResourceLocation | None = ResourceLocation.field(
|
||||
rename="index_icon", default=None
|
||||
rename="index_icon",
|
||||
default=None,
|
||||
)
|
||||
pamphlet: bool = False
|
||||
show_progress: bool = True
|
||||
|
@ -187,6 +188,6 @@ class Book:
|
|||
have been added to the book.
|
||||
"""
|
||||
categories = (
|
||||
Category(self, path) for path in self.categories_dir.rglob("*.json")
|
||||
Category(path, self) for path in self.categories_dir.rglob("*.json")
|
||||
)
|
||||
return {category.id: category for category in categories}
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from dataclasses import InitVar, dataclass
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from common.composition import WithBook
|
||||
from common.composition import Book, WithBook
|
||||
from common.deserialize import FromJson
|
||||
from common.formatting import FormatTree
|
||||
from common.utils import Sortable
|
||||
from minecraft.i18n import LocalizedStr
|
||||
from minecraft.resource import ItemStack, ResourceLocation
|
||||
from patchouli.entry import Entry, parse_entry
|
||||
from minecraft.resource import ItemStack, ResourceLocation, WithPathId
|
||||
from patchouli.entry import Entry
|
||||
from serde import deserialize
|
||||
|
||||
|
||||
|
@ -33,45 +33,20 @@ class RawCategory(FromJson):
|
|||
|
||||
|
||||
@dataclass
|
||||
class Category(WithBook):
|
||||
class Category(WithBook, WithPathId, Sortable):
|
||||
"""Category with pages and localizations."""
|
||||
|
||||
path: InitVar[Path]
|
||||
_book: Book
|
||||
|
||||
def __post_init__(self, path: Path):
|
||||
self.raw: RawCategory = RawCategory.load(path)
|
||||
|
||||
# category id
|
||||
id_resource_path = path.relative_to(self.dir).with_suffix("").as_posix()
|
||||
self.id = ResourceLocation(self.modid, id_resource_path)
|
||||
def __post_init__(self):
|
||||
self.raw: RawCategory = RawCategory.load(self.path)
|
||||
|
||||
# localized strings
|
||||
self.name: LocalizedStr = self.i18n.localize(self.raw.name)
|
||||
self.description: FormatTree = self.book.format(self.raw.description)
|
||||
|
||||
# entries
|
||||
# TODO: make not bad
|
||||
self.entries: list[Entry] = []
|
||||
entry_dir = self.book.entries_dir / self.id.path
|
||||
for entry_path in entry_dir.glob("*.json"):
|
||||
basename = entry_path.stem
|
||||
self.entries.append(
|
||||
parse_entry(
|
||||
self.book, entry_path.as_posix(), self.id.path + "/" + basename
|
||||
)
|
||||
)
|
||||
self.entries.sort(
|
||||
key=lambda ent: (
|
||||
not ent.get("priority", False),
|
||||
ent.get("sortnum", 0),
|
||||
ent["name"],
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def dir(self) -> Path:
|
||||
"""Directory containing this category's json file."""
|
||||
return self.book.categories_dir
|
||||
self.entries: list[Entry] = self._load_entries()
|
||||
|
||||
@property
|
||||
def parent(self) -> Category | None:
|
||||
|
@ -79,17 +54,23 @@ class Category(WithBook):
|
|||
return None
|
||||
return self.book.categories[self.raw.parent]
|
||||
|
||||
@property
|
||||
def href(self) -> str:
|
||||
return f"#{self.id.path}"
|
||||
def _load_entries(self) -> list[Entry]:
|
||||
entry_dir = self.book.entries_dir / self.id.path
|
||||
return sorted(Entry(path, self) for path in entry_dir.glob("*.json"))
|
||||
|
||||
@property
|
||||
def sortnum(self) -> tuple[int, ...]:
|
||||
def book(self) -> Book:
|
||||
# implement WithBook
|
||||
return self._book
|
||||
|
||||
@property
|
||||
def base_dir(self) -> Path:
|
||||
# implement WithPathId
|
||||
return self.book.categories_dir
|
||||
|
||||
@property
|
||||
def cmp_key(self) -> tuple[int, ...]:
|
||||
# implement Sortable
|
||||
if self.parent:
|
||||
return self.parent.sortnum + (self.raw.sortnum,)
|
||||
return self.parent.cmp_key + (self.raw.sortnum,)
|
||||
return (self.raw.sortnum,)
|
||||
|
||||
def __lt__(self, other: Any) -> bool:
|
||||
if isinstance(other, Category):
|
||||
return self.sortnum < other.sortnum
|
||||
return NotImplemented
|
||||
|
|
|
@ -1,54 +1,101 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import NotRequired, TypedDict
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from common.composition import Book
|
||||
from patchouli.page import Page, Page_patchouli_text, page_transformers, slurp
|
||||
|
||||
|
||||
class Entry(TypedDict):
|
||||
category: str
|
||||
icon: str
|
||||
id: str
|
||||
name: str
|
||||
pages: list[Page]
|
||||
advancement: NotRequired[str]
|
||||
entry_color: NotRequired[str]
|
||||
extra_recipe_mappings: NotRequired[dict]
|
||||
flag: NotRequired[str]
|
||||
priority: NotRequired[bool]
|
||||
read_by_default: NotRequired[bool]
|
||||
sort_num: NotRequired[int]
|
||||
sortnum: NotRequired[float | int]
|
||||
from common.composition import Book, Category, WithBook
|
||||
from common.deserialize import FromJson
|
||||
from common.utils import Sortable
|
||||
from minecraft.i18n import LocalizedStr
|
||||
from minecraft.resource import ItemStack, ResourceLocation, WithPathId
|
||||
from patchouli.page import Page, Page_patchouli_text, page_transformers
|
||||
from serde import deserialize
|
||||
|
||||
|
||||
# TODO: remove
|
||||
def do_localize(book: Book, obj: Entry | Page, *names: str) -> None:
|
||||
def do_localize(book: Book, obj: Page | dict[str, Any], *names: str) -> None:
|
||||
for name in names:
|
||||
if name in obj:
|
||||
obj[name] = book.i18n.localize(obj[name])
|
||||
|
||||
|
||||
# TODO: remove
|
||||
def do_format(book: Book, obj: Entry | Page, *names: str) -> None:
|
||||
def do_format(book: Book, obj: Page | dict[str, Any], *names: str) -> None:
|
||||
for name in names:
|
||||
if name in obj:
|
||||
obj[name] = book.format(obj[name])
|
||||
|
||||
|
||||
# TODO: move to serde
|
||||
def parse_entry(book: Book, entry_path: str, ent_name: str) -> Entry:
|
||||
data: Entry = slurp(f"{entry_path}")
|
||||
do_localize(book, data, "name")
|
||||
for i, page in enumerate(data["pages"]):
|
||||
if isinstance(page, str):
|
||||
page = Page_patchouli_text(type="patchouli:text", text=book.format(page))
|
||||
data["pages"][i] = page
|
||||
else:
|
||||
do_format(book, page, "text")
|
||||
do_localize(book, page, "title", "header")
|
||||
if page_transformer := page_transformers.get(page["type"]):
|
||||
page_transformer(book, page)
|
||||
data["id"] = ent_name
|
||||
@deserialize
|
||||
class RawEntry(FromJson):
|
||||
"""Direct representation of an Entry json file.
|
||||
|
||||
return data
|
||||
See: https://vazkiimods.github.io/Patchouli/docs/reference/entry-json
|
||||
"""
|
||||
|
||||
# required
|
||||
name: str
|
||||
category: ResourceLocation = ResourceLocation.field()
|
||||
icon: ItemStack = ItemStack.field()
|
||||
pages: list[dict[str, Any] | str] # TODO: type
|
||||
|
||||
# optional
|
||||
advancement: ResourceLocation | None = ResourceLocation.field(default=None)
|
||||
flag: str | None = None
|
||||
priority: bool = False
|
||||
secret: bool = False
|
||||
read_by_default: bool = False
|
||||
sortnum: int = 0
|
||||
turnin: ResourceLocation | None = ResourceLocation.field(default=None)
|
||||
# TODO: this should be dict[ItemStack, int] but I have no idea how to make that work
|
||||
extra_recipe_mappings: dict[str, int] | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class Entry(WithBook, WithPathId, Sortable):
|
||||
"""Entry with pages and localizations."""
|
||||
|
||||
category: Category
|
||||
|
||||
def __post_init__(self):
|
||||
# load raw entry and ensure the category matches
|
||||
self.raw: RawEntry = RawEntry.load(self.path)
|
||||
if self.raw.category != self.category.id:
|
||||
raise ValueError(
|
||||
f"Entry {self.raw.name} has category {self.raw.category} but was initialized by {self.category.id}"
|
||||
)
|
||||
|
||||
# localized strings
|
||||
self.name: LocalizedStr = self.i18n.localize(self.raw.name)
|
||||
|
||||
# entries
|
||||
# TODO: make badn't
|
||||
self.pages: list[Page | dict[str, Any]] = []
|
||||
for page in self.raw.pages:
|
||||
if isinstance(page, str):
|
||||
page = Page_patchouli_text(
|
||||
type="patchouli:text", text=self.book.format(page)
|
||||
)
|
||||
else:
|
||||
do_format(self.book, page, "text")
|
||||
do_localize(self.book, page, "title", "header")
|
||||
if page_transformer := page_transformers.get(page["type"]):
|
||||
page_transformer(self.book, page)
|
||||
self.pages.append(page)
|
||||
|
||||
@property
|
||||
def book(self) -> Book:
|
||||
# implement WithBook
|
||||
return self.category.book
|
||||
|
||||
@property
|
||||
def base_dir(self) -> Path:
|
||||
# implement WithPathId
|
||||
return self.book.entries_dir
|
||||
|
||||
@property
|
||||
def cmp_key(self) -> tuple[bool, int, LocalizedStr]:
|
||||
# implement Sortable
|
||||
# note: python sorts false before true, so we invert priority
|
||||
return (not self.raw.priority, self.raw.sortnum, self.name)
|
||||
|
|
|
@ -198,7 +198,7 @@ def fetch_bswp_recipe_result(book: Book, recipe: str):
|
|||
|
||||
|
||||
# TODO: remove
|
||||
def do_localize(book: Book, obj: Entry | Page, *names: str) -> None:
|
||||
def do_localize(book: Book, obj: Page, *names: str) -> None:
|
||||
for name in names:
|
||||
if name in obj:
|
||||
obj[name] = book.i18n.localize(obj[name])
|
||||
|
|
Loading…
Reference in a new issue