Fix hexdummy pattern stub regex and duplicated entries

This commit is contained in:
object-Object 2023-09-25 21:35:26 -04:00
parent 485d7f9d33
commit 68921389ed
10 changed files with 48 additions and 53 deletions

View file

@ -11,7 +11,7 @@
{# actual book content (ie. all the categories) #}
<main class="book-body">
{% for category in book.categories.values() if category.entries +%}
{% for category in book.categories.values() if category.entries.values() +%}
{% include "category.html.jinja" %}
{% endfor +%}
</main>

View file

@ -6,7 +6,7 @@
{{ category.description|hexdoc_block }}
{% endcall %}
{% for entry in category.entries if entry.id not in props.entry_id_blacklist +%}
{% for entry in category.entries.values() if entry.id not in props.entry_id_blacklist +%}
{% include "entry.html.jinja" %}
{% endfor +%}
</section>

View file

@ -12,14 +12,14 @@
><i class="bi bi-list-nested"></i></a>{{ macros.permalink("table-of-contents", "toc-permalink") }}</span>
</h2>
{% for category in book.categories.values() if category.entries %}
{% for category in book.categories.values() if category.entries.values() %}
<details class="toc-category">
{# category #}
<summary>{{ macros.maybe_spoilered_link(category) }}</summary>
{# list of entries in the category #}
<ul>
{% for entry in category.entries %}
{% for entry in category.entries.values() %}
<li>{{ macros.maybe_spoilered_link(entry) }}</li>
{% endfor %}
</ul>

View file

@ -105,7 +105,7 @@ class HexContext(BookContext):
self.patterns[pattern.id] = pattern
signatures[pattern.signature] = pattern
def _load_stub_patterns(self, stub: PatternStubProps, per_world: Tag | None):
def _load_stub_patterns(self, stub: PatternStubProps, per_world_tag: Tag | None):
# TODO: add Gradle task to generate json with this data. this is dumb and fragile.
logging.getLogger(__name__).info(f"Load pattern stub from {stub.path}")
stub_text = stub.path.read_text("utf-8")
@ -114,11 +114,14 @@ class HexContext(BookContext):
groups = match.groupdict()
id = self.props.mod_loc(groups["name"])
if per_world_tag is not None:
is_per_world = id in per_world_tag.values
else:
is_per_world = groups.get("is_per_world") == stub.per_world_value
yield PatternInfo(
id=id,
startdir=Direction[groups["startdir"]],
signature=groups["signature"],
is_per_world=(id in per_world.values)
if per_world
else groups.get("is_per_world") == "true",
id=id,
is_per_world=is_per_world,
)

View file

@ -4,7 +4,6 @@ import json
import logging
from collections import defaultdict
from functools import total_ordering
from pathlib import Path
from typing import Any, Callable, Self
from pydantic import ValidationInfo, model_validator
@ -17,7 +16,6 @@ from hexdoc.utils.deserialize import (
decode_and_flatten_json_dict,
isinstance_or_raise,
)
from hexdoc.utils.path import replace_suffixes
from hexdoc.utils.resource_loader import LoaderContext
@ -166,15 +164,8 @@ class I18n(HexdocModel):
)
@classmethod
def _export(
cls,
new: dict[str, str],
current: dict[str, str] | None,
path: Path,
):
data = json.dumps((current or {}) | new)
path = replace_suffixes(path, ".json")
return data, path
def _export(cls, new: dict[str, str], current: dict[str, str] | None):
return json.dumps((current or {}) | new)
def localize(
self,

View file

@ -1,6 +1,6 @@
from __future__ import annotations
from typing import Any, Iterator, Self
from typing import Iterator, Self
from pydantic import Field
@ -63,7 +63,7 @@ class Tag(HexdocModel):
case OptionalTagValue(id=id):
yield id
def _export(self, current: Self | None, *_: Any):
def _export(self, current: Self | None):
if self.replace or current is None:
tag = self
else:

View file

@ -14,6 +14,7 @@ from hexdoc.utils import (
)
from hexdoc.utils.compat import HexVersion
from hexdoc.utils.deserialize import cast_or_raise
from hexdoc.utils.types import sorted_dict
from .book_context import BookContext
from .category import Category
@ -142,9 +143,23 @@ class Book(HexdocModel):
"Ensure the paths in your properties file are correct."
)
# load entries
found_internal_entries = self._load_all_entries(context)
if not found_internal_entries:
raise ValueError(
"No internal entries found. "
"Ensure the paths in your properties file are correct."
)
# we inserted a bunch of entries in no particular order, so sort each category
for category in self._categories.values():
category.entries = sorted_dict(category.entries)
return self
def _load_all_entries(self, context: BookContext):
found_internal_entries = False
# load entries
for resource_dir, id, data in context.loader.load_book_assets(
self.id,
"entries",
@ -161,19 +176,9 @@ class Book(HexdocModel):
# i used the entry to insert the entry (pretty sure thanos said that)
if not resource_dir.external:
found_internal_entries = True
self._categories[entry.category_id].entries.append(entry)
self._categories[entry.category_id].entries[entry.id] = entry
if not found_internal_entries:
raise ValueError(
"No entries found for this book. "
"Ensure the paths in your properties file are correct."
)
# we inserted a bunch of entries in no particular order, so sort each category
for category in self._categories.values():
category.entries.sort()
return self
return found_internal_entries
@property
def categories(self):

View file

@ -17,7 +17,7 @@ class Category(HexdocIDModel, Sortable):
See: https://vazkiimods.github.io/Patchouli/docs/reference/category-json
"""
entries: list[Entry] = Field(default_factory=list)
entries: dict[ResourceLocation, Entry] = Field(default_factory=dict)
# required
name: LocalizedStr
@ -37,17 +37,16 @@ class Category(HexdocIDModel, Sortable):
context: LoaderContext,
book_id: ResourceLocation,
use_resource_pack: bool,
):
categories: dict[ResourceLocation, Self] = {}
) -> dict[ResourceLocation, Self]:
# load
for resource_dir, id, data in context.loader.load_book_assets(
book_id,
"categories",
use_resource_pack,
):
category = cls.load(resource_dir, id, data, context)
categories[id] = category
categories = {
id: cls.load(resource_dir, id, data, context)
for resource_dir, id, data in context.loader.load_book_assets(
book_id,
"categories",
use_resource_pack,
)
}
# late-init _parent_cmp_key
# track iterations to avoid an infinite loop if for some reason there's a cycle
@ -78,7 +77,7 @@ class Category(HexdocIDModel, Sortable):
@property
def is_spoiler(self) -> bool:
return all(entry.is_spoiler for entry in self.entries)
return all(entry.is_spoiler for entry in self.entries.values())
@property
def _is_cmp_key_ready(self) -> bool:

View file

@ -23,6 +23,7 @@ NoTrailingSlashHttpUrl = Annotated[
class PatternStubProps(StripHiddenModel):
path: RelativePath
regex: re.Pattern[str]
per_world_value: str | None = "true"
class TemplateProps(StripHiddenModel):

View file

@ -24,7 +24,7 @@ METADATA_SUFFIX = ".hexdoc.json"
_T = TypeVar("_T")
_T_Model = TypeVar("_T_Model", bound=HexdocModel)
ExportFn = Callable[[_T, _T | None, Path], str | tuple[str, Path]]
ExportFn = Callable[[_T, _T | None], str]
class HexdocMetadata(HexdocModel):
@ -409,11 +409,7 @@ class ModResourceLoader:
except FileNotFoundError:
old_value = None
match export(value, old_value, out_path):
case str(out_data):
pass
case (str(out_data), Path() as out_path):
pass
out_data = export(value, old_value)
write_to_path(out_path, out_data)