Book refactor (enormous)
This commit is contained in:
parent
b7be50bb24
commit
0cd6631c09
18 changed files with 963 additions and 704 deletions
23
.vscode/launch.json
vendored
Normal file
23
.vscode/launch.json
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: Generate Docs",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"cwd": "${workspaceFolder}/doc",
|
||||
"program": "src/main.py",
|
||||
"args": [
|
||||
"../Common/src/main/resources",
|
||||
"hexcasting",
|
||||
"thehexbook",
|
||||
"template.html",
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": false
|
||||
}
|
||||
]
|
||||
}
|
4
.vscode/settings.json
vendored
4
.vscode/settings.json
vendored
|
@ -16,10 +16,12 @@
|
|||
"reportUnknownParameterType": "error",
|
||||
"reportUnknownArgumentType": "warning",
|
||||
"reportUnknownLambdaType": "warning",
|
||||
"reportUnknownVariableType": "warning",
|
||||
"reportUnknownVariableType": "none",
|
||||
"reportUnknownMemberType": "warning",
|
||||
"reportUnnecessaryComparison": "warning",
|
||||
"reportMissingTypeArgument": "warning",
|
||||
"reportUnusedImport": "information",
|
||||
"reportPrivateUsage": "warning",
|
||||
},
|
||||
"python.analysis.diagnosticMode": "workspace",
|
||||
"python.languageServer": "Pylance",
|
||||
|
|
|
@ -4,6 +4,8 @@ Contains the Python docgen for Hex Casting.
|
|||
|
||||
## Setup
|
||||
|
||||
The minimum Python version to run this script is `3.11`.
|
||||
|
||||
```sh
|
||||
cd doc
|
||||
python -m venv venv
|
||||
|
|
|
@ -2,3 +2,4 @@ black==22.10.0 # formatting
|
|||
pytest==7.3.1 # testing framework
|
||||
syrupy==4.0.2 # snapshot tests
|
||||
typed-argument-parser==1.8.0 # better argument parsing
|
||||
pyserde==0.10.8 # book deserialization
|
||||
|
|
|
@ -1,431 +1,17 @@
|
|||
#!/usr/bin/env python3
|
||||
import io
|
||||
import json # codec
|
||||
import os # listdir
|
||||
import re # parsing
|
||||
from html import escape
|
||||
from typing import Any, Callable, Generator
|
||||
from __future__ import annotations
|
||||
|
||||
from patchouli.types import (
|
||||
Book,
|
||||
Category,
|
||||
Entry,
|
||||
FormatTree,
|
||||
ManualPatternPage,
|
||||
Page,
|
||||
Page_hexcasting_pattern,
|
||||
Page_patchouli_text,
|
||||
PatternPage,
|
||||
PatternPageWithSig,
|
||||
Registry,
|
||||
Style,
|
||||
Text,
|
||||
_BasePage,
|
||||
)
|
||||
import io
|
||||
from html import escape
|
||||
from typing import IO, Any
|
||||
|
||||
from common.formatting import FormatTree
|
||||
from patchouli.book import Book, Category, Entry, Page
|
||||
|
||||
# extra info :(
|
||||
lang = "en_us"
|
||||
repo_names = {
|
||||
"hexcasting": "https://raw.githubusercontent.com/gamma-delta/HexMod/main/Common/src/main/resources",
|
||||
}
|
||||
extra_i18n = {
|
||||
"item.minecraft.amethyst_shard": "Amethyst Shard",
|
||||
"item.minecraft.budding_amethyst": "Budding Amethyst",
|
||||
"block.hexcasting.slate": "Blank Slate",
|
||||
}
|
||||
|
||||
default_macros = {
|
||||
"$(obf)": "$(k)",
|
||||
"$(bold)": "$(l)",
|
||||
"$(strike)": "$(m)",
|
||||
"$(italic)": "$(o)",
|
||||
"$(italics)": "$(o)",
|
||||
"$(list": "$(li",
|
||||
"$(reset)": "$()",
|
||||
"$(clear)": "$()",
|
||||
"$(2br)": "$(br2)",
|
||||
"$(p)": "$(br2)",
|
||||
"/$": "$()",
|
||||
"<br>": "$(br)",
|
||||
"$(nocolor)": "$(0)",
|
||||
"$(item)": "$(#b0b)",
|
||||
"$(thing)": "$(#490)",
|
||||
}
|
||||
|
||||
colors: dict[str, str | None] = {
|
||||
"0": None,
|
||||
"1": "00a",
|
||||
"2": "0a0",
|
||||
"3": "0aa",
|
||||
"4": "a00",
|
||||
"5": "a0a",
|
||||
"6": "fa0",
|
||||
"7": "aaa",
|
||||
"8": "555",
|
||||
"9": "55f",
|
||||
"a": "5f5",
|
||||
"b": "5ff",
|
||||
"c": "f55",
|
||||
"d": "f5f",
|
||||
"e": "ff5",
|
||||
"f": "fff",
|
||||
}
|
||||
types = {
|
||||
"k": "obf",
|
||||
"l": "bold",
|
||||
"m": "strikethrough",
|
||||
"n": "underline",
|
||||
"o": "italic",
|
||||
}
|
||||
|
||||
keys = {
|
||||
"use": "Right Click",
|
||||
"sneak": "Left Shift",
|
||||
}
|
||||
|
||||
pattern_pat = re.compile(
|
||||
r'HexPattern\.fromAngles\("([qweasd]+)", HexDir\.(\w+)\),\s*modLoc\("([^"]+)"\)([^;]*true\);)?'
|
||||
)
|
||||
pattern_stubs = [
|
||||
(None, "at/petrak/hexcasting/interop/pehkui/PehkuiInterop.java"),
|
||||
(None, "at/petrak/hexcasting/common/casting/RegisterPatterns.java"),
|
||||
("Fabric", "at/petrak/hexcasting/fabric/interop/gravity/GravityApiInterop.java"),
|
||||
]
|
||||
|
||||
# TODO: what the hell is this
|
||||
bind1 = (lambda: None).__get__(0).__class__
|
||||
|
||||
|
||||
# TODO: serde
|
||||
def slurp(filename: str) -> Any:
|
||||
with open(filename, "r", encoding="utf-8") as fh:
|
||||
return json.load(fh)
|
||||
|
||||
|
||||
def parse_style(sty: str) -> tuple[str, Style | None]:
|
||||
# TODO: match, maybe
|
||||
if sty == "br":
|
||||
return "\n", None
|
||||
if sty == "br2":
|
||||
return "", Style("para", {})
|
||||
if sty == "li":
|
||||
return "", Style("para", {"clazz": "fake-li"})
|
||||
if sty[:2] == "k:":
|
||||
return keys[sty[2:]], None
|
||||
if sty[:2] == "l:":
|
||||
return "", Style("link", sty[2:])
|
||||
if sty == "/l":
|
||||
return "", Style("link", None)
|
||||
if sty == "playername":
|
||||
return "[Playername]", None
|
||||
if sty[:2] == "t:":
|
||||
return "", Style("tooltip", sty[2:])
|
||||
if sty == "/t":
|
||||
return "", Style("tooltip", None)
|
||||
if sty[:2] == "c:":
|
||||
return "", Style("cmd_click", sty[2:])
|
||||
if sty == "/c":
|
||||
return "", Style("cmd_click", None)
|
||||
if sty == "r" or not sty:
|
||||
return "", Style("base", None)
|
||||
if sty in types:
|
||||
return "", Style(types[sty], True)
|
||||
if sty in colors:
|
||||
return "", Style("color", colors[sty])
|
||||
if sty.startswith("#") and len(sty) in [4, 7]:
|
||||
return "", Style("color", sty[1:])
|
||||
# TODO more style parse
|
||||
raise ValueError("Unknown style: " + sty)
|
||||
|
||||
|
||||
def localize(i18n: dict[str, str], string: str, default: str | None = None) -> str:
|
||||
return (
|
||||
i18n.get(string, default if default else string) if i18n else string
|
||||
).replace("%%", "%")
|
||||
|
||||
|
||||
format_re = re.compile(r"\$\(([^)]*)\)")
|
||||
|
||||
|
||||
def format_string(root_data: Book, string: str) -> FormatTree:
|
||||
# FIXME: ew.
|
||||
# resolve lang
|
||||
string = localize(root_data["i18n"], string)
|
||||
|
||||
# resolve macros
|
||||
# FIXME: this is just a fancy if statement, I think
|
||||
old_string = None
|
||||
while old_string != string:
|
||||
old_string = string
|
||||
for macro, replace in root_data["macros"].items():
|
||||
string = string.replace(macro, replace)
|
||||
else:
|
||||
break
|
||||
|
||||
# lex out parsed styles
|
||||
text_nodes: list[str] = []
|
||||
styles: list[Style] = []
|
||||
last_end = 0
|
||||
extra_text = ""
|
||||
for mobj in re.finditer(format_re, string):
|
||||
bonus_text, sty = parse_style(mobj.group(1))
|
||||
text = string[last_end : mobj.start()] + bonus_text
|
||||
if sty:
|
||||
styles.append(sty)
|
||||
text_nodes.append(extra_text + text)
|
||||
extra_text = ""
|
||||
else:
|
||||
extra_text += text
|
||||
last_end = mobj.end()
|
||||
text_nodes.append(extra_text + string[last_end:])
|
||||
first_node, *text_nodes = text_nodes
|
||||
|
||||
# parse
|
||||
style_stack = [
|
||||
FormatTree(Style("base", True), []),
|
||||
FormatTree(Style("para", {}), [first_node]),
|
||||
]
|
||||
for style, text in zip(styles, text_nodes):
|
||||
tmp_stylestack: list[Style] = []
|
||||
if style.type == "base":
|
||||
while style_stack[-1].style.type != "para":
|
||||
last_node = style_stack.pop()
|
||||
style_stack[-1].children.append(last_node)
|
||||
elif any(tree.style.type == style.type for tree in style_stack):
|
||||
while len(style_stack) >= 2:
|
||||
last_node = style_stack.pop()
|
||||
style_stack[-1].children.append(last_node)
|
||||
if last_node.style.type == style.type:
|
||||
break
|
||||
tmp_stylestack.append(last_node.style)
|
||||
for sty in tmp_stylestack:
|
||||
style_stack.append(FormatTree(sty, []))
|
||||
if style.value is None:
|
||||
if text:
|
||||
style_stack[-1].children.append(text)
|
||||
else:
|
||||
style_stack.append(FormatTree(style, [text] if text else []))
|
||||
while len(style_stack) >= 2:
|
||||
last_node = style_stack.pop()
|
||||
style_stack[-1].children.append(last_node)
|
||||
|
||||
return style_stack[0]
|
||||
|
||||
|
||||
def localize_pattern(root_data: Book, op_id: str) -> str:
|
||||
return localize(
|
||||
root_data["i18n"],
|
||||
"hexcasting.spell.book." + op_id,
|
||||
localize(root_data["i18n"], "hexcasting.spell." + op_id),
|
||||
)
|
||||
|
||||
|
||||
# TODO: types
|
||||
def do_localize(root_data: Book, obj: Any, *names: str) -> None:
|
||||
for name in names:
|
||||
if name in obj:
|
||||
obj[name] = localize(root_data["i18n"], obj[name])
|
||||
|
||||
|
||||
# TODO: types
|
||||
def do_format(root_data: Book, obj: Any, *names: str) -> None:
|
||||
for name in names:
|
||||
if name in obj:
|
||||
obj[name] = format_string(root_data, obj[name])
|
||||
|
||||
|
||||
def fetch_patterns(root_data: Book) -> Registry:
|
||||
registry: Registry = {}
|
||||
for loader, stub in pattern_stubs:
|
||||
filename = f"{root_data['resource_dir']}/../java/{stub}"
|
||||
if loader:
|
||||
filename = filename.replace("Common", loader)
|
||||
with open(filename, "r", encoding="utf-8") as fh:
|
||||
pattern_data = fh.read()
|
||||
for mobj in re.finditer(pattern_pat, pattern_data):
|
||||
string, start_angle, name, is_per_world = mobj.groups()
|
||||
registry[root_data["modid"] + ":" + name] = (
|
||||
string,
|
||||
start_angle,
|
||||
bool(
|
||||
is_per_world
|
||||
), # TODO: changing to is_per_world == "true" makes the tests fail??
|
||||
)
|
||||
return registry
|
||||
|
||||
|
||||
def resolve_pattern(root_data: Book, page: Page_hexcasting_pattern) -> None:
|
||||
if "pattern_reg" not in root_data:
|
||||
root_data["pattern_reg"] = fetch_patterns(root_data)
|
||||
page["op"] = [root_data["pattern_reg"][page["op_id"]]]
|
||||
page["name"] = localize_pattern(root_data, page["op_id"])
|
||||
|
||||
|
||||
def fixup_pattern(do_sig: bool, root_data: Book, page: ManualPatternPage) -> None:
|
||||
patterns = page["patterns"]
|
||||
if (op_id := page.get("op_id")) is not None:
|
||||
page["header"] = localize_pattern(root_data, op_id)
|
||||
if not isinstance(patterns, list):
|
||||
patterns = [patterns]
|
||||
if do_sig:
|
||||
inp = page.get("input", None) or ""
|
||||
oup = page.get("output", None) or ""
|
||||
pipe = f"{inp} \u2192 {oup}".strip()
|
||||
suffix = f" ({pipe})" if inp or oup else ""
|
||||
page["header"] += suffix
|
||||
page["op"] = [(p["signature"], p["startdir"], False) for p in patterns]
|
||||
|
||||
|
||||
# TODO: recipe type (not a page, apparently)
|
||||
def fetch_recipe(root_data: Book, recipe: str) -> dict[str, dict[str, str]]:
|
||||
modid, recipeid = recipe.split(":")
|
||||
gen_resource_dir = (
|
||||
root_data["resource_dir"]
|
||||
.replace("/main/", "/generated/")
|
||||
.replace("Common/", "Forge/")
|
||||
) # TODO hack
|
||||
recipe_path = f"{gen_resource_dir}/data/{modid}/recipes/{recipeid}.json"
|
||||
return slurp(recipe_path)
|
||||
|
||||
|
||||
def fetch_recipe_result(root_data: Book, recipe: str):
|
||||
return fetch_recipe(root_data, recipe)["result"]["item"]
|
||||
|
||||
|
||||
def fetch_bswp_recipe_result(root_data: Book, recipe: str):
|
||||
return fetch_recipe(root_data, recipe)["result"]["name"]
|
||||
|
||||
|
||||
def localize_item(root_data: Book, item: str) -> str:
|
||||
# TODO hack
|
||||
item = re.sub("{.*", "", item.replace(":", "."))
|
||||
block = "block." + item
|
||||
block_l = localize(root_data["i18n"], block)
|
||||
if block_l != block:
|
||||
return block_l
|
||||
return localize(root_data["i18n"], "item." + item)
|
||||
|
||||
|
||||
# TODO: move all of this to the individual classes, because this cannot be properly typed
|
||||
page_types: dict[str, Callable[[Book, dict[str, Any]], None]] = {
|
||||
"hexcasting:pattern": resolve_pattern,
|
||||
"hexcasting:manual_pattern": bind1(fixup_pattern, True),
|
||||
"hexcasting:manual_pattern_nosig": bind1(fixup_pattern, False),
|
||||
"hexcasting:brainsweep": lambda rd, page: page.__setitem__(
|
||||
"output_name", localize_item(rd, fetch_bswp_recipe_result(rd, page["recipe"]))
|
||||
),
|
||||
"patchouli:link": lambda rd, page: do_localize(rd, page, "link_text"),
|
||||
"patchouli:crafting": lambda rd, page: page.__setitem__(
|
||||
"item_name",
|
||||
[
|
||||
localize_item(rd, fetch_recipe_result(rd, page[ty]))
|
||||
for ty in ("recipe", "recipe2")
|
||||
if ty in page
|
||||
],
|
||||
),
|
||||
"hexcasting:crafting_multi": lambda rd, page: page.__setitem__(
|
||||
"item_name",
|
||||
[
|
||||
localize_item(rd, fetch_recipe_result(rd, recipe))
|
||||
for recipe in page["recipes"]
|
||||
],
|
||||
),
|
||||
"patchouli:spotlight": lambda rd, page: page.__setitem__(
|
||||
"item_name", localize_item(rd, page["item"])
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def walk_dir(root_dir: str, prefix: str) -> Generator[str, None, None]:
|
||||
search_dir = root_dir + "/" + prefix
|
||||
for fh in os.scandir(search_dir):
|
||||
if fh.is_dir():
|
||||
yield from walk_dir(root_dir, prefix + fh.name + "/")
|
||||
elif fh.name.endswith(".json"):
|
||||
yield prefix + fh.name
|
||||
|
||||
|
||||
# TODO: move to serde
|
||||
def parse_entry(root_data: Book, entry_path: str, ent_name: str) -> Entry:
|
||||
data: Entry = slurp(f"{entry_path}")
|
||||
do_localize(root_data, data, "name")
|
||||
for i, page in enumerate(data["pages"]):
|
||||
if isinstance(page, str):
|
||||
page = Page_patchouli_text(type="patchouli:text", text=page)
|
||||
data["pages"][i] = page
|
||||
|
||||
do_localize(root_data, page, "title", "header")
|
||||
do_format(root_data, page, "text")
|
||||
if page_type := page_types.get(page["type"]):
|
||||
page_type(root_data, page)
|
||||
data["id"] = ent_name
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def parse_category(root_data: Book, base_dir: str, cat_name: str) -> Category:
|
||||
data: Category = slurp(f"{base_dir}/categories/{cat_name}.json")
|
||||
do_localize(root_data, data, "name")
|
||||
do_format(root_data, data, "description")
|
||||
|
||||
entry_dir = f"{base_dir}/entries/{cat_name}"
|
||||
entries: list[Entry] = []
|
||||
for filename in os.listdir(entry_dir):
|
||||
if filename.endswith(".json"):
|
||||
basename = filename[:-5]
|
||||
entries.append(
|
||||
parse_entry(
|
||||
root_data, f"{entry_dir}/{filename}", cat_name + "/" + basename
|
||||
)
|
||||
)
|
||||
entries.sort(
|
||||
key=lambda ent: (
|
||||
not ent.get("priority", False),
|
||||
ent.get("sortnum", 0),
|
||||
ent["name"],
|
||||
)
|
||||
)
|
||||
data["entries"] = entries
|
||||
data["id"] = cat_name
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def parse_sortnum(cats: dict[str, Category], name: str) -> tuple[int, ...]:
|
||||
if "/" in name:
|
||||
ix = name.rindex("/")
|
||||
return parse_sortnum(cats, name[:ix]) + (cats[name].get("sortnum", 0),)
|
||||
return (cats[name].get("sortnum", 0),)
|
||||
|
||||
|
||||
# TODO: use Path instead of strings
|
||||
def parse_book(resource_dir: str, mod_name: str, book_name: str) -> Book:
|
||||
base_dir = f"{resource_dir}/data/{mod_name}/patchouli_books/{book_name}"
|
||||
root_info: Book = slurp(f"{base_dir}/book.json")
|
||||
|
||||
root_info["resource_dir"] = resource_dir
|
||||
root_info["modid"] = mod_name
|
||||
root_info.setdefault("macros", {}).update(default_macros)
|
||||
if root_info.setdefault("i18n", {}):
|
||||
root_info["i18n"] = slurp(f"{resource_dir}/assets/{mod_name}/lang/{lang}.json")
|
||||
root_info["i18n"].update(extra_i18n)
|
||||
|
||||
book_dir = f"{base_dir}/{lang}"
|
||||
|
||||
categories: list[Category] = []
|
||||
for filename in walk_dir(f"{book_dir}/categories", ""):
|
||||
basename = filename[:-5]
|
||||
categories.append(parse_category(root_info, book_dir, basename))
|
||||
cats = {cat["id"]: cat for cat in categories}
|
||||
categories.sort(key=lambda cat: (parse_sortnum(cats, cat["id"]), cat["name"]))
|
||||
|
||||
do_localize(root_info, root_info, "name")
|
||||
do_format(root_info, root_info, "landing_text")
|
||||
root_info["categories"] = categories
|
||||
root_info["blacklist"] = set()
|
||||
root_info["spoilers"] = set()
|
||||
|
||||
return root_info
|
||||
|
||||
|
||||
# TODO: type
|
||||
|
@ -439,7 +25,8 @@ def tag_args(kwargs: dict[str, Any]):
|
|||
class PairTag:
|
||||
__slots__ = ["stream", "name", "kwargs"]
|
||||
|
||||
def __init__(self, stream, name, **kwargs):
|
||||
# TODO: type
|
||||
def __init__(self, stream: IO[str], name: str, **kwargs: Any):
|
||||
self.stream = stream
|
||||
self.name = name
|
||||
self.kwargs = tag_args(kwargs)
|
||||
|
@ -447,7 +34,7 @@ class PairTag:
|
|||
def __enter__(self):
|
||||
print(f"<{self.name}{self.kwargs}>", file=self.stream, end="")
|
||||
|
||||
def __exit__(self, _1, _2, _3):
|
||||
def __exit__(self, _1: Any, _2: Any, _3: Any):
|
||||
print(f"</{self.name}>", file=self.stream, end="")
|
||||
|
||||
|
||||
|
@ -455,37 +42,38 @@ class Empty:
|
|||
def __enter__(self):
|
||||
pass
|
||||
|
||||
def __exit__(self, _1, _2, _3):
|
||||
def __exit__(self, _1: Any, _2: Any, _3: Any):
|
||||
pass
|
||||
|
||||
|
||||
class Stream:
|
||||
__slots__ = ["stream"]
|
||||
|
||||
def __init__(self, stream):
|
||||
def __init__(self, stream: IO[str]):
|
||||
self.stream = stream
|
||||
|
||||
def tag(self, name, **kwargs):
|
||||
def tag(self, name: str, **kwargs: Any):
|
||||
keywords = tag_args(kwargs)
|
||||
print(f"<{name}{keywords} />", file=self.stream, end="")
|
||||
return self
|
||||
|
||||
def pair_tag(self, name, **kwargs):
|
||||
def pair_tag(self, name: str, **kwargs: Any):
|
||||
return PairTag(self.stream, name, **kwargs)
|
||||
|
||||
def pair_tag_if(self, cond, name, **kwargs):
|
||||
def pair_tag_if(self, cond: Any, name: str, **kwargs: Any):
|
||||
return self.pair_tag(name, **kwargs) if cond else Empty()
|
||||
|
||||
def empty_pair_tag(self, name, **kwargs):
|
||||
def empty_pair_tag(self, name: str, **kwargs: Any):
|
||||
with self.pair_tag(name, **kwargs):
|
||||
pass
|
||||
|
||||
def text(self, txt):
|
||||
def text(self, txt: str):
|
||||
print(escape(txt), file=self.stream, end="")
|
||||
return self
|
||||
|
||||
|
||||
def get_format(out, ty, value):
|
||||
# TODO: move
|
||||
def get_format(out: Stream, ty: str, value: Any):
|
||||
if ty == "para":
|
||||
return out.pair_tag("p", **value)
|
||||
if ty == "color":
|
||||
|
@ -514,15 +102,15 @@ def get_format(out, ty, value):
|
|||
raise ValueError("Unknown format type: " + ty)
|
||||
|
||||
|
||||
def entry_spoilered(root_info, entry):
|
||||
return entry.get("advancement", None) in root_info["spoilers"]
|
||||
def entry_spoilered(root_info: Book, entry: Entry):
|
||||
return entry.get("advancement", None) in root_info.spoilers
|
||||
|
||||
|
||||
def category_spoilered(root_info, category):
|
||||
def category_spoilered(root_info: Book, category: Category):
|
||||
return all(entry_spoilered(root_info, ent) for ent in category["entries"])
|
||||
|
||||
|
||||
def write_block(out, block):
|
||||
def write_block(out: Stream, block: FormatTree | str):
|
||||
if isinstance(block, str):
|
||||
first = False
|
||||
for line in block.split("\n"):
|
||||
|
@ -542,22 +130,21 @@ def write_block(out, block):
|
|||
write_block(out, child)
|
||||
|
||||
|
||||
def anchor_toc(out):
|
||||
def anchor_toc(out: Stream):
|
||||
with out.pair_tag(
|
||||
"a", href="#table-of-contents", clazz="permalink small", title="Jump to top"
|
||||
):
|
||||
out.empty_pair_tag("i", clazz="bi bi-box-arrow-up")
|
||||
|
||||
|
||||
def permalink(out, link):
|
||||
def permalink(out: Stream, link: str):
|
||||
with out.pair_tag("a", href=link, clazz="permalink small", title="Permalink"):
|
||||
out.empty_pair_tag("i", clazz="bi bi-link-45deg")
|
||||
|
||||
|
||||
# TODO modularize
|
||||
def write_page(out, pageid, page):
|
||||
if "anchor" in page:
|
||||
anchor_id = pageid + "@" + page["anchor"]
|
||||
def write_page(out: Stream, pageid: str, page: Page):
|
||||
if anchor := page.get("anchor"):
|
||||
anchor_id = pageid + "@" + anchor
|
||||
else:
|
||||
anchor_id = None
|
||||
|
||||
|
@ -642,19 +229,19 @@ def write_page(out, pageid, page):
|
|||
permalink(out, "#" + anchor_id)
|
||||
with out.pair_tag("details", clazz="spell-collapsible"):
|
||||
out.empty_pair_tag("summary", clazz="collapse-spell")
|
||||
for string, start_angle, per_world in page["op"]:
|
||||
for pattern in page["op"]:
|
||||
with out.pair_tag(
|
||||
"canvas",
|
||||
clazz="spell-viz",
|
||||
width=216,
|
||||
height=216,
|
||||
data_string=string,
|
||||
data_start=start_angle.lower(),
|
||||
data_per_world=per_world,
|
||||
data_string=pattern.angle_sig,
|
||||
data_start=pattern.direction.lower(),
|
||||
data_per_world=pattern.is_per_world,
|
||||
):
|
||||
out.text(
|
||||
"Your browser does not support visualizing patterns. Pattern code: "
|
||||
+ string
|
||||
+ pattern.angle_sig
|
||||
)
|
||||
write_block(out, page["text"])
|
||||
else:
|
||||
|
@ -665,7 +252,7 @@ def write_page(out, pageid, page):
|
|||
out.tag("br")
|
||||
|
||||
|
||||
def write_entry(out, book, entry):
|
||||
def write_entry(out: Stream, book: Book, entry: Entry):
|
||||
with out.pair_tag("div", id=entry["id"]):
|
||||
with out.pair_tag_if(entry_spoilered(book, entry), "div", clazz="spoilered"):
|
||||
with out.pair_tag("h3", clazz="entry-title page-header"):
|
||||
|
@ -676,7 +263,7 @@ def write_entry(out, book, entry):
|
|||
write_page(out, entry["id"], page)
|
||||
|
||||
|
||||
def write_category(out, book, category):
|
||||
def write_category(out: Stream, book: Book, category: Category):
|
||||
with out.pair_tag("section", id=category["id"]):
|
||||
with out.pair_tag_if(
|
||||
category_spoilered(book, category), "div", clazz="spoilered"
|
||||
|
@ -687,11 +274,11 @@ def write_category(out, book, category):
|
|||
permalink(out, "#" + category["id"])
|
||||
write_block(out, category["description"])
|
||||
for entry in category["entries"]:
|
||||
if entry["id"] not in book["blacklist"]:
|
||||
if entry["id"] not in book.blacklist:
|
||||
write_entry(out, book, entry)
|
||||
|
||||
|
||||
def write_toc(out, book):
|
||||
def write_toc(out: Stream, book: Book):
|
||||
with out.pair_tag("h2", id="table-of-contents", clazz="page-header"):
|
||||
out.text("Table of Contents")
|
||||
with out.pair_tag(
|
||||
|
@ -703,7 +290,7 @@ def write_toc(out, book):
|
|||
):
|
||||
out.empty_pair_tag("i", clazz="bi bi-list-nested")
|
||||
permalink(out, "#table-of-contents")
|
||||
for category in book["categories"]:
|
||||
for category in book.categories:
|
||||
with out.pair_tag("details", clazz="toc-category"):
|
||||
with out.pair_tag("summary"):
|
||||
with out.pair_tag(
|
||||
|
@ -723,20 +310,20 @@ def write_toc(out, book):
|
|||
out.text(entry["name"])
|
||||
|
||||
|
||||
def write_book(out, book):
|
||||
def write_book(out: Stream, book: Book):
|
||||
with out.pair_tag("div", clazz="container"):
|
||||
with out.pair_tag("header", clazz="jumbotron"):
|
||||
with out.pair_tag("h1", clazz="book-title"):
|
||||
write_block(out, book["name"])
|
||||
write_block(out, book["landing_text"])
|
||||
write_block(out, book.name)
|
||||
write_block(out, book.landing_text)
|
||||
with out.pair_tag("nav"):
|
||||
write_toc(out, book)
|
||||
with out.pair_tag("main", clazz="book-body"):
|
||||
for category in book["categories"]:
|
||||
for category in book.categories:
|
||||
write_category(out, book, category)
|
||||
|
||||
|
||||
def generate_docs(book, template: str) -> str:
|
||||
def generate_docs(book: Book, template: str) -> str:
|
||||
# FIXME: super hacky temporary solution for returning this as a string
|
||||
# just pass a string buffer to everything instead of a file
|
||||
with io.StringIO() as output:
|
||||
|
@ -744,11 +331,11 @@ def generate_docs(book, template: str) -> str:
|
|||
for line in template.splitlines(True):
|
||||
if line.startswith("#DO_NOT_RENDER"):
|
||||
_, *blacklist = line.split()
|
||||
book["blacklist"].update(blacklist)
|
||||
book.blacklist.update(blacklist)
|
||||
|
||||
if line.startswith("#SPOILER"):
|
||||
_, *spoilers = line.split()
|
||||
book["spoilers"].update(spoilers)
|
||||
book.spoilers.update(spoilers)
|
||||
elif line == "#DUMP_BODY_HERE\n":
|
||||
write_book(Stream(output), book)
|
||||
print("", file=output)
|
||||
|
|
89
doc/src/common/deserialize.py
Normal file
89
doc/src/common/deserialize.py
Normal file
|
@ -0,0 +1,89 @@
|
|||
import json
|
||||
import string
|
||||
from pathlib import Path
|
||||
from typing import Self
|
||||
|
||||
from serde import SerdeError
|
||||
from serde.json import from_json
|
||||
|
||||
|
||||
# subclass instead of newtype so it exists at runtime, so we can use isinstance
|
||||
class LocalizedStr(str):
|
||||
"""Represents a string which has been localized with the i18n dict."""
|
||||
|
||||
def __new__(cls, s: str) -> Self:
|
||||
return str.__new__(cls, s)
|
||||
|
||||
|
||||
# TODO: move to config
|
||||
_EXTRA_I18N = {
|
||||
"item.minecraft.amethyst_shard": LocalizedStr("Amethyst Shard"),
|
||||
"item.minecraft.budding_amethyst": LocalizedStr("Budding Amethyst"),
|
||||
"block.hexcasting.slate": LocalizedStr("Blank Slate"),
|
||||
}
|
||||
|
||||
# TODO: load ALL of the i18n files, return dict[str, dict[str, LocalizedStr]]
|
||||
# or maybe dict[(str, str), LocalizedStr]
|
||||
# we could also use that to ensure all i18n files have the same set of keys
|
||||
def load_i18n(path: Path) -> dict[str, LocalizedStr]:
|
||||
# load, deserialize, and type-check lang file
|
||||
# TODO: there's probably a library we can use to do this for us
|
||||
i18n: dict[str, LocalizedStr] = json.loads(path.read_text("utf-8"))
|
||||
i18n.update(_EXTRA_I18N)
|
||||
|
||||
assert isinstance(i18n, dict), f"Unexpected top-level type `{type(i18n)}` in i18n"
|
||||
for k, v in i18n.items():
|
||||
assert isinstance(k, str), f"Unexpected key type `{type(k)}` in i18n: {k}"
|
||||
assert isinstance(v, str), f"Unexpected value type `{type(v)}` in i18n: {v}"
|
||||
|
||||
return i18n
|
||||
|
||||
|
||||
class Color(str):
|
||||
"""Newtype-style class representing a hexadecimal color.
|
||||
|
||||
Inputs are coerced to lowercase `rrggbb`. Raises ValueError on invalid input.
|
||||
|
||||
Valid formats, all of which would be converted to `0099ff`:
|
||||
- `#0099FF`
|
||||
- `#0099ff`
|
||||
- `#09F`
|
||||
- `#09f`
|
||||
- `0099FF`
|
||||
- `0099ff`
|
||||
- `09F`
|
||||
- `09f`
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def __new__(cls, s: str) -> Self:
|
||||
color = s.removeprefix("#").lower()
|
||||
|
||||
# 012 -> 001122
|
||||
if len(color) == 3:
|
||||
color = "".join(c + c for c in color)
|
||||
|
||||
# length and character check
|
||||
if len(color) != 6 or any(c not in string.hexdigits for c in color):
|
||||
raise ValueError(f"invalid color code: {s}")
|
||||
|
||||
return str.__new__(cls, color)
|
||||
|
||||
|
||||
class FromJson:
|
||||
"""Helper methods for JSON-deserialized dataclasses."""
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, string: str | bytes) -> Self:
|
||||
"""Deserializes the given string into this class."""
|
||||
try:
|
||||
return from_json(cls, string)
|
||||
except SerdeError as e:
|
||||
e.add_note(str(string))
|
||||
raise
|
||||
|
||||
@classmethod
|
||||
def load(cls, path: Path) -> Self:
|
||||
"""Reads and deserializes the JSON file at the given path."""
|
||||
return cls.from_json(path.read_text("utf-8"))
|
152
doc/src/common/formatting.py
Normal file
152
doc/src/common/formatting.py
Normal file
|
@ -0,0 +1,152 @@
|
|||
import re
|
||||
from dataclasses import dataclass
|
||||
from typing import NamedTuple, Self
|
||||
|
||||
from common.deserialize import LocalizedStr
|
||||
|
||||
_COLORS: dict[str, str | None] = {
|
||||
"0": None,
|
||||
"1": "00a",
|
||||
"2": "0a0",
|
||||
"3": "0aa",
|
||||
"4": "a00",
|
||||
"5": "a0a",
|
||||
"6": "fa0",
|
||||
"7": "aaa",
|
||||
"8": "555",
|
||||
"9": "55f",
|
||||
"a": "5f5",
|
||||
"b": "5ff",
|
||||
"c": "f55",
|
||||
"d": "f5f",
|
||||
"e": "ff5",
|
||||
"f": "fff",
|
||||
}
|
||||
|
||||
_TYPES = {
|
||||
"k": "obf",
|
||||
"l": "bold",
|
||||
"m": "strikethrough",
|
||||
"n": "underline",
|
||||
"o": "italic",
|
||||
}
|
||||
|
||||
_KEYS = {
|
||||
"use": "Right Click",
|
||||
"sneak": "Left Shift",
|
||||
}
|
||||
|
||||
|
||||
class Style(NamedTuple):
|
||||
type: str
|
||||
value: str | bool | dict[str, str] | None
|
||||
|
||||
|
||||
def parse_style(sty: str) -> tuple[str, Style | None]:
|
||||
# TODO: match, maybe
|
||||
if sty == "br":
|
||||
return "\n", None
|
||||
if sty == "br2":
|
||||
return "", Style("para", {})
|
||||
if sty == "li":
|
||||
return "", Style("para", {"clazz": "fake-li"})
|
||||
if sty[:2] == "k:":
|
||||
return _KEYS[sty[2:]], None
|
||||
if sty[:2] == "l:":
|
||||
return "", Style("link", sty[2:])
|
||||
if sty == "/l":
|
||||
return "", Style("link", None)
|
||||
if sty == "playername":
|
||||
return "[Playername]", None
|
||||
if sty[:2] == "t:":
|
||||
return "", Style("tooltip", sty[2:])
|
||||
if sty == "/t":
|
||||
return "", Style("tooltip", None)
|
||||
if sty[:2] == "c:":
|
||||
return "", Style("cmd_click", sty[2:])
|
||||
if sty == "/c":
|
||||
return "", Style("cmd_click", None)
|
||||
if sty == "r" or not sty:
|
||||
return "", Style("base", None)
|
||||
if sty in _TYPES:
|
||||
return "", Style(_TYPES[sty], True)
|
||||
if sty in _COLORS:
|
||||
return "", Style("color", _COLORS[sty])
|
||||
if sty.startswith("#") and len(sty) in [4, 7]:
|
||||
return "", Style("color", sty[1:])
|
||||
# TODO more style parse
|
||||
raise ValueError("Unknown style: " + sty)
|
||||
|
||||
|
||||
_format_re = re.compile(r"\$\(([^)]*)\)")
|
||||
|
||||
|
||||
@dataclass
|
||||
class FormatTree:
|
||||
style: Style
|
||||
children: list[Self | str]
|
||||
|
||||
@classmethod
|
||||
def empty(cls) -> Self:
|
||||
return cls(Style("base", None), [])
|
||||
|
||||
@classmethod
|
||||
def format(cls, macros: dict[str, str], string: LocalizedStr) -> Self:
|
||||
# FIXME: ew.
|
||||
|
||||
# resolve macros
|
||||
# TODO: use ahocorasick
|
||||
old_string = None
|
||||
while old_string != string:
|
||||
old_string = string
|
||||
for macro, replace in macros.items():
|
||||
string = LocalizedStr(string.replace(macro, replace))
|
||||
|
||||
# lex out parsed styles
|
||||
text_nodes: list[str] = []
|
||||
styles: list[Style] = []
|
||||
last_end = 0
|
||||
extra_text = ""
|
||||
for mobj in re.finditer(_format_re, string):
|
||||
bonus_text, sty = parse_style(mobj.group(1))
|
||||
text = string[last_end : mobj.start()] + bonus_text
|
||||
if sty:
|
||||
styles.append(sty)
|
||||
text_nodes.append(extra_text + text)
|
||||
extra_text = ""
|
||||
else:
|
||||
extra_text += text
|
||||
last_end = mobj.end()
|
||||
text_nodes.append(extra_text + string[last_end:])
|
||||
first_node, *text_nodes = text_nodes
|
||||
|
||||
# parse
|
||||
style_stack = [
|
||||
FormatTree(Style("base", True), []),
|
||||
FormatTree(Style("para", {}), [first_node]),
|
||||
]
|
||||
for style, text in zip(styles, text_nodes):
|
||||
tmp_stylestack: list[Style] = []
|
||||
if style.type == "base":
|
||||
while style_stack[-1].style.type != "para":
|
||||
last_node = style_stack.pop()
|
||||
style_stack[-1].children.append(last_node)
|
||||
elif any(tree.style.type == style.type for tree in style_stack):
|
||||
while len(style_stack) >= 2:
|
||||
last_node = style_stack.pop()
|
||||
style_stack[-1].children.append(last_node)
|
||||
if last_node.style.type == style.type:
|
||||
break
|
||||
tmp_stylestack.append(last_node.style)
|
||||
for sty in tmp_stylestack:
|
||||
style_stack.append(FormatTree(sty, []))
|
||||
if style.value is None:
|
||||
if text:
|
||||
style_stack[-1].children.append(text)
|
||||
else:
|
||||
style_stack.append(FormatTree(style, [text] if text else []))
|
||||
while len(style_stack) >= 2:
|
||||
last_node = style_stack.pop()
|
||||
style_stack[-1].children.append(last_node)
|
||||
|
||||
return style_stack[0]
|
75
doc/src/common/pattern_info.py
Normal file
75
doc/src/common/pattern_info.py
Normal file
|
@ -0,0 +1,75 @@
|
|||
import re
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
@dataclass
|
||||
class RawPatternInfo:
|
||||
direction: str
|
||||
angle_sig: str
|
||||
is_per_world: bool
|
||||
|
||||
|
||||
@dataclass
|
||||
class PatternInfo(RawPatternInfo):
|
||||
modid: str
|
||||
name: str
|
||||
|
||||
@property
|
||||
def resource_location(self) -> str:
|
||||
return f"{self.modid}:{self.name}"
|
||||
|
||||
|
||||
_DEFAULT_PATTERN_RE = re.compile(
|
||||
r'HexPattern\.fromAngles\("([qweasd]+)", HexDir\.(\w+)\),\s*modLoc\("([^"]+)"\)([^;]*true\);)?'
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PatternStubFile:
|
||||
loader: str | None
|
||||
raw_path: str
|
||||
|
||||
def path(self, resource_dir: Path) -> Path:
|
||||
file = resource_dir.parent / "java" / self.raw_path
|
||||
if self.loader is not None:
|
||||
file = Path(str(file).replace("Common", self.loader))
|
||||
return file
|
||||
|
||||
def load_patterns(
|
||||
self,
|
||||
resource_dir: Path,
|
||||
modid: str,
|
||||
pattern_re: re.Pattern[str] = _DEFAULT_PATTERN_RE,
|
||||
) -> dict[str, PatternInfo]:
|
||||
# TODO: add Gradle task to generate json with this data. this is dumb and fragile.
|
||||
|
||||
patterns: dict[str, PatternInfo] = {}
|
||||
|
||||
pattern_data = self.path(resource_dir).read_text("utf-8")
|
||||
for match in pattern_re.finditer(pattern_data):
|
||||
angle_sig, direction, name, is_per_world = match.groups()
|
||||
pattern = PatternInfo(
|
||||
direction,
|
||||
angle_sig,
|
||||
bool(is_per_world),
|
||||
modid,
|
||||
name,
|
||||
)
|
||||
patterns[pattern.resource_location] = pattern
|
||||
|
||||
return patterns
|
||||
|
||||
|
||||
def load_patterns(
|
||||
pattern_stubs: list[PatternStubFile],
|
||||
resource_dir: Path,
|
||||
modid: str,
|
||||
pattern_re: re.Pattern[str] = _DEFAULT_PATTERN_RE,
|
||||
) -> dict[str, PatternInfo]:
|
||||
"""Returns map from resource location (eg. hexcasting:brainsweep) to PatternInfo."""
|
||||
|
||||
patterns: dict[str, PatternInfo] = {}
|
||||
for stub in pattern_stubs:
|
||||
patterns.update(stub.load_patterns(resource_dir, modid, pattern_re))
|
||||
return patterns
|
|
@ -1,8 +1,24 @@
|
|||
# pyright: reportUnknownMemberType=false
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from collate_data import generate_docs, parse_book
|
||||
from collate_data import generate_docs
|
||||
from common.pattern_info import PatternStubFile
|
||||
from patchouli.book import Book
|
||||
from tap import Tap
|
||||
|
||||
if sys.version_info < (3, 11):
|
||||
raise RuntimeError("Minimum Python version: 3.11")
|
||||
|
||||
_PATTERN_STUBS = [
|
||||
PatternStubFile(None, "at/petrak/hexcasting/interop/pehkui/PehkuiInterop.java"),
|
||||
PatternStubFile(None, "at/petrak/hexcasting/common/casting/RegisterPatterns.java"),
|
||||
PatternStubFile(
|
||||
"Fabric",
|
||||
"at/petrak/hexcasting/fabric/interop/gravity/GravityApiInterop.java",
|
||||
),
|
||||
]
|
||||
|
||||
# CLI arguments
|
||||
class Args(Tap):
|
||||
|
@ -29,7 +45,7 @@ class Args(Tap):
|
|||
|
||||
def main(args: Args) -> None:
|
||||
# read the book and template, then fill the template
|
||||
book = parse_book(args.root.as_posix(), args.mod_name, args.book_name)
|
||||
book = Book(args.root, args.mod_name, args.book_name, _PATTERN_STUBS)
|
||||
template = args.template_file.read_text("utf-8")
|
||||
|
||||
docs = generate_docs(book, template)
|
||||
|
|
546
doc/src/patchouli/book.py
Normal file
546
doc/src/patchouli/book.py
Normal file
|
@ -0,0 +1,546 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import dataclasses as dc
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from dataclasses import InitVar, dataclass
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Generic,
|
||||
Literal,
|
||||
LiteralString,
|
||||
NotRequired,
|
||||
TypedDict,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
from common.deserialize import Color, FromJson, LocalizedStr, load_i18n
|
||||
from common.formatting import FormatTree
|
||||
from common.pattern_info import (
|
||||
PatternInfo,
|
||||
PatternStubFile,
|
||||
RawPatternInfo,
|
||||
load_patterns,
|
||||
)
|
||||
from serde import deserialize, field
|
||||
|
||||
_T_LiteralString = TypeVar("_T_LiteralString", bound=LiteralString)
|
||||
|
||||
|
||||
class _BasePage(TypedDict, Generic[_T_LiteralString]):
|
||||
type: _T_LiteralString
|
||||
|
||||
|
||||
class Page_patchouli_text(_BasePage[Literal["patchouli:text"]]):
|
||||
text: FormatTree | list
|
||||
anchor: NotRequired[str]
|
||||
input: NotRequired[str]
|
||||
op_id: NotRequired[str]
|
||||
output: NotRequired[str]
|
||||
title: NotRequired[str]
|
||||
|
||||
|
||||
class Page_patchouli_link(_BasePage[Literal["patchouli:link"]]):
|
||||
link_text: str
|
||||
text: FormatTree
|
||||
url: str
|
||||
|
||||
|
||||
class Page_patchouli_spotlight(_BasePage[Literal["patchouli:spotlight"]]):
|
||||
item: str
|
||||
item_name: str
|
||||
link_recipe: bool
|
||||
text: FormatTree
|
||||
anchor: NotRequired[str]
|
||||
|
||||
|
||||
class Page_patchouli_crafting(_BasePage[Literal["patchouli:crafting"]]):
|
||||
item_name: list
|
||||
recipe: str
|
||||
anchor: NotRequired[str]
|
||||
recipe2: NotRequired[str]
|
||||
text: NotRequired[FormatTree | list]
|
||||
title: NotRequired[str]
|
||||
|
||||
|
||||
class Page_patchouli_image(_BasePage[Literal["patchouli:image"]]):
|
||||
border: bool
|
||||
images: list
|
||||
title: str
|
||||
|
||||
|
||||
class Page_patchouli_empty(_BasePage[Literal["patchouli:empty"]]):
|
||||
pass
|
||||
|
||||
|
||||
class Page_hexcasting_pattern(_BasePage[Literal["hexcasting:pattern"]]):
|
||||
name: str
|
||||
op: list[PatternInfo]
|
||||
op_id: str
|
||||
text: FormatTree | list
|
||||
anchor: NotRequired[str]
|
||||
header: NotRequired[str]
|
||||
hex_size: NotRequired[int]
|
||||
input: NotRequired[str]
|
||||
output: NotRequired[str]
|
||||
|
||||
|
||||
class Page_hexcasting_manual_pattern(_BasePage[Literal["hexcasting:manual_pattern"]]):
|
||||
anchor: str
|
||||
header: str
|
||||
op: list[RawPatternInfo]
|
||||
patterns: dict | list
|
||||
text: FormatTree
|
||||
input: NotRequired[str]
|
||||
op_id: NotRequired[str]
|
||||
output: NotRequired[str]
|
||||
|
||||
|
||||
class Page_hexcasting_manual_pattern_nosig(
|
||||
_BasePage[Literal["hexcasting:manual_pattern_nosig"]]
|
||||
):
|
||||
header: str
|
||||
op: list[RawPatternInfo]
|
||||
patterns: dict | list
|
||||
text: FormatTree
|
||||
|
||||
|
||||
class Page_hexcasting_crafting_multi(_BasePage[Literal["hexcasting:crafting_multi"]]):
|
||||
heading: str
|
||||
item_name: list
|
||||
recipes: list
|
||||
text: FormatTree
|
||||
|
||||
|
||||
class Page_hexcasting_brainsweep(_BasePage[Literal["hexcasting:brainsweep"]]):
|
||||
output_name: str
|
||||
recipe: str
|
||||
text: FormatTree
|
||||
|
||||
|
||||
# convenient type aliases
|
||||
# TODO: replace with polymorphism, probably
|
||||
|
||||
Page = (
|
||||
Page_patchouli_text
|
||||
| Page_patchouli_link
|
||||
| Page_patchouli_spotlight
|
||||
| Page_patchouli_crafting
|
||||
| Page_patchouli_image
|
||||
| Page_patchouli_empty
|
||||
| Page_hexcasting_pattern
|
||||
| Page_hexcasting_manual_pattern
|
||||
| Page_hexcasting_manual_pattern_nosig
|
||||
| Page_hexcasting_crafting_multi
|
||||
| Page_hexcasting_brainsweep
|
||||
)
|
||||
|
||||
RecipePage = (
|
||||
Page_patchouli_crafting
|
||||
| Page_hexcasting_crafting_multi
|
||||
| Page_hexcasting_brainsweep
|
||||
)
|
||||
|
||||
PatternPageWithSig = Page_hexcasting_pattern | Page_hexcasting_manual_pattern
|
||||
|
||||
ManualPatternPage = (
|
||||
Page_hexcasting_manual_pattern | Page_hexcasting_manual_pattern_nosig
|
||||
)
|
||||
|
||||
PatternPage = (
|
||||
Page_hexcasting_pattern
|
||||
| Page_hexcasting_manual_pattern
|
||||
| Page_hexcasting_manual_pattern_nosig
|
||||
)
|
||||
|
||||
|
||||
class Entry(TypedDict):
|
||||
category: str
|
||||
icon: str
|
||||
id: str
|
||||
name: str
|
||||
pages: list[_BasePage]
|
||||
advancement: NotRequired[str]
|
||||
entry_color: NotRequired[str]
|
||||
extra_recipe_mappings: NotRequired[dict]
|
||||
flag: NotRequired[str]
|
||||
priority: NotRequired[bool]
|
||||
read_by_default: NotRequired[bool]
|
||||
sort_num: NotRequired[int]
|
||||
sortnum: NotRequired[float | int]
|
||||
|
||||
|
||||
class Category(TypedDict):
|
||||
description: FormatTree
|
||||
entries: list[Entry]
|
||||
icon: str
|
||||
id: str
|
||||
name: str
|
||||
sortnum: int
|
||||
entry_color: NotRequired[str]
|
||||
flag: NotRequired[str]
|
||||
parent: NotRequired[str]
|
||||
|
||||
|
||||
# TODO: what the hell is this
|
||||
bind1 = (lambda: None).__get__(0).__class__
|
||||
|
||||
|
||||
# TODO: serde
|
||||
def slurp(filename: str) -> Any:
|
||||
with open(filename, "r", encoding="utf-8") as fh:
|
||||
return json.load(fh)
|
||||
|
||||
|
||||
def resolve_pattern(book: Book, page: Page_hexcasting_pattern) -> None:
|
||||
page["op"] = [book.patterns[page["op_id"]]]
|
||||
page["name"] = book.localize_pattern(page["op_id"])
|
||||
|
||||
|
||||
def fixup_pattern(do_sig: bool, book: Book, page: ManualPatternPage) -> None:
|
||||
patterns = page["patterns"]
|
||||
if (op_id := page.get("op_id")) is not None:
|
||||
page["header"] = book.localize_pattern(op_id)
|
||||
if not isinstance(patterns, list):
|
||||
patterns = [patterns]
|
||||
if do_sig:
|
||||
inp = page.get("input", None) or ""
|
||||
oup = page.get("output", None) or ""
|
||||
pipe = f"{inp} \u2192 {oup}".strip()
|
||||
suffix = f" ({pipe})" if inp or oup else ""
|
||||
page["header"] += suffix
|
||||
page["op"] = [
|
||||
RawPatternInfo(p["startdir"], p["signature"], False) for p in patterns
|
||||
]
|
||||
|
||||
|
||||
# TODO: recipe type (not a page, apparently)
|
||||
def fetch_recipe(book: Book, recipe: str) -> dict[str, dict[str, str]]:
|
||||
modid, recipeid = recipe.split(":")
|
||||
gen_resource_dir = (
|
||||
book.resource_dir.as_posix()
|
||||
.replace("/main/", "/generated/")
|
||||
.replace("Common/", "Forge/")
|
||||
) # TODO hack
|
||||
recipe_path = f"{gen_resource_dir}/data/{modid}/recipes/{recipeid}.json"
|
||||
return slurp(recipe_path)
|
||||
|
||||
|
||||
def fetch_recipe_result(book: Book, recipe: str):
|
||||
return fetch_recipe(book, recipe)["result"]["item"]
|
||||
|
||||
|
||||
def fetch_bswp_recipe_result(book: Book, recipe: str):
|
||||
return fetch_recipe(book, recipe)["result"]["name"]
|
||||
|
||||
|
||||
# TODO: move all of this to the individual page classes
|
||||
page_transformers: dict[str, Callable[[Book, Any], None]] = {
|
||||
"hexcasting:pattern": resolve_pattern,
|
||||
"hexcasting:manual_pattern": bind1(fixup_pattern, True),
|
||||
"hexcasting:manual_pattern_nosig": bind1(fixup_pattern, False),
|
||||
"hexcasting:brainsweep": lambda book, page: page.__setitem__(
|
||||
"output_name",
|
||||
book.localize_item(fetch_bswp_recipe_result(book, page["recipe"])),
|
||||
),
|
||||
"patchouli:link": lambda book, page: do_localize(book, page, "link_text"),
|
||||
"patchouli:crafting": lambda book, page: page.__setitem__(
|
||||
"item_name",
|
||||
[
|
||||
book.localize_item(fetch_recipe_result(book, page[ty]))
|
||||
for ty in ("recipe", "recipe2")
|
||||
if ty in page
|
||||
],
|
||||
),
|
||||
"hexcasting:crafting_multi": lambda book, page: page.__setitem__(
|
||||
"item_name",
|
||||
[
|
||||
book.localize_item(fetch_recipe_result(book, recipe))
|
||||
for recipe in page["recipes"]
|
||||
],
|
||||
),
|
||||
"patchouli:spotlight": lambda book, page: page.__setitem__(
|
||||
"item_name", book.localize_item(page["item"])
|
||||
),
|
||||
}
|
||||
|
||||
# TODO: remove
|
||||
def do_localize(book: Book, obj: Category | Entry | Page, *names: str) -> None:
|
||||
for name in names:
|
||||
if name in obj:
|
||||
obj[name] = book.localize(obj[name])
|
||||
|
||||
|
||||
# TODO: remove
|
||||
def do_format(book: Book, obj: Category | Entry | Page, *names: str) -> None:
|
||||
for name in names:
|
||||
if name in obj:
|
||||
obj[name] = book.format(obj[name])
|
||||
|
||||
|
||||
# TODO: move to serde
|
||||
def parse_entry(book: Book, entry_path: str, ent_name: str) -> Entry:
|
||||
data: Entry = slurp(f"{entry_path}")
|
||||
do_localize(book, data, "name")
|
||||
for i, page in enumerate(data["pages"]):
|
||||
if isinstance(page, str):
|
||||
page = Page_patchouli_text(type="patchouli:text", text=book.format(page))
|
||||
data["pages"][i] = page
|
||||
else:
|
||||
do_format(book, page, "text")
|
||||
do_localize(book, page, "title", "header")
|
||||
if page_transformer := page_transformers.get(page["type"]):
|
||||
page_transformer(book, page)
|
||||
data["id"] = ent_name
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def parse_category(book: Book, base_dir: str, cat_name: str) -> Category:
|
||||
data: Category = slurp(f"{base_dir}/categories/{cat_name}.json")
|
||||
do_localize(book, data, "name")
|
||||
do_format(book, data, "description")
|
||||
|
||||
entry_dir = f"{base_dir}/entries/{cat_name}"
|
||||
entries: list[Entry] = []
|
||||
for filename in os.listdir(entry_dir):
|
||||
if filename.endswith(".json"):
|
||||
basename = filename[:-5]
|
||||
entries.append(
|
||||
parse_entry(book, f"{entry_dir}/{filename}", cat_name + "/" + basename)
|
||||
)
|
||||
entries.sort(
|
||||
key=lambda ent: (
|
||||
not ent.get("priority", False),
|
||||
ent.get("sortnum", 0),
|
||||
ent["name"],
|
||||
)
|
||||
)
|
||||
data["entries"] = entries
|
||||
data["id"] = cat_name
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def parse_sortnum(cats: dict[str, Category], name: str) -> tuple[int, ...]:
|
||||
if "/" in name:
|
||||
ix = name.rindex("/")
|
||||
return parse_sortnum(cats, name[:ix]) + (cats[name].get("sortnum", 0),)
|
||||
return (cats[name].get("sortnum", 0),)
|
||||
|
||||
|
||||
_DEFAULT_LANG_NAME = "en_us"
|
||||
|
||||
_DEFAULT_MACROS: dict[str, str] = {
|
||||
"$(obf)": "$(k)",
|
||||
"$(bold)": "$(l)",
|
||||
"$(strike)": "$(m)",
|
||||
"$(italic)": "$(o)",
|
||||
"$(italics)": "$(o)",
|
||||
"$(list": "$(li",
|
||||
"$(reset)": "$()",
|
||||
"$(clear)": "$()",
|
||||
"$(2br)": "$(br2)",
|
||||
"$(p)": "$(br2)",
|
||||
"/$": "$()",
|
||||
"<br>": "$(br)",
|
||||
"$(nocolor)": "$(0)",
|
||||
"$(item)": "$(#b0b)",
|
||||
"$(thing)": "$(#490)",
|
||||
}
|
||||
|
||||
|
||||
TextOverflowMode = Literal["overflow"] | Literal["resize"] | Literal["truncate"]
|
||||
|
||||
|
||||
@deserialize
|
||||
@dataclass
|
||||
class RawBook(FromJson):
|
||||
"""Direct representation of book.json.
|
||||
|
||||
You should probably not use this to edit and re-serialize book.json, because this sets
|
||||
all the default values as defined by the docs. (TODO: superclass which doesn't do that)
|
||||
|
||||
See: https://vazkiimods.github.io/Patchouli/docs/reference/book-json
|
||||
"""
|
||||
|
||||
# required
|
||||
name: str
|
||||
landing_text: str
|
||||
|
||||
# optional
|
||||
book_texture: str = "patchouli:textures/gui/book_brown.png"
|
||||
filler_texture: str | None = None
|
||||
crafting_texture: str | None = None
|
||||
model: str = "patchouli:book_brown"
|
||||
text_color: Color = Color("000000")
|
||||
header_color: Color = Color("333333")
|
||||
nameplate_color: Color = Color("FFDD00")
|
||||
link_color: Color = Color("0000EE")
|
||||
link_hover_color: Color = Color("8800EE")
|
||||
progress_bar_color: Color = Color("FFFF55")
|
||||
progress_bar_background: Color = Color("DDDDDD")
|
||||
open_sound: str | None = None
|
||||
flip_sound: str | None = None
|
||||
_index_icon: str | None = field(rename="index_icon", default=None)
|
||||
pamphlet: bool = False
|
||||
show_progress: bool = True
|
||||
version: str | int = 0
|
||||
subtitle: str | None = None
|
||||
creative_tab: str = "misc" # TODO: this was changed in 1.19.3+, and again in 1.20
|
||||
advancements_tab: str | None = None
|
||||
dont_generate_book: bool = False
|
||||
custom_book_item: str | None = None
|
||||
show_toasts: bool = True
|
||||
use_blocky_font: bool = False
|
||||
i18n: bool = False
|
||||
macros: dict[str, str] | None = None
|
||||
pause_game: bool = False
|
||||
text_overflow_mode: TextOverflowMode | None = None
|
||||
extend: str | None = None
|
||||
"""NOTE: currently this WILL NOT load values from the target book!"""
|
||||
allow_extensions: bool = True
|
||||
|
||||
@property
|
||||
def index_icon(self) -> str:
|
||||
return self.model if self._index_icon is None else self._index_icon
|
||||
|
||||
|
||||
@dataclass
|
||||
class Book:
|
||||
"""Main dataclass for the docgen.
|
||||
Includes all data from book.json, some useful paths, and the pattern lookup.
|
||||
|
||||
Constructor opens a bunch of files.
|
||||
"""
|
||||
|
||||
# constructor args
|
||||
resource_dir: Path
|
||||
modid: str
|
||||
patchouli_name: str
|
||||
|
||||
pattern_stubs: InitVar[list[PatternStubFile]]
|
||||
lang_name: InitVar[str] = _DEFAULT_LANG_NAME
|
||||
|
||||
# other fields
|
||||
blacklist: set[str] = dc.field(init=False, default_factory=set)
|
||||
categories: list[Category] = dc.field(init=False, default_factory=list)
|
||||
macros: dict[str, str] = dc.field(init=False, default_factory=dict)
|
||||
spoilers: set[str] = dc.field(init=False, default_factory=set)
|
||||
|
||||
def __post_init__(
|
||||
self,
|
||||
pattern_stubs: list[PatternStubFile],
|
||||
lang_name: str,
|
||||
) -> None:
|
||||
# deserialize raw book
|
||||
# must be initialized first
|
||||
self.raw: RawBook = RawBook.load(self.book_dir / "book.json")
|
||||
|
||||
# lang
|
||||
# must be initialized before using self.localize or self.format
|
||||
lang_file = self.lang_dir / f"{lang_name}.json"
|
||||
self.lang: dict[str, LocalizedStr] | None = (
|
||||
load_i18n(lang_file) if self.raw.i18n else None
|
||||
)
|
||||
|
||||
# macros
|
||||
# must be initialized before using self.format
|
||||
# TODO: order of operations - should default macros really be overriding book macros?
|
||||
if self.raw.macros is not None:
|
||||
self.macros.update(self.raw.macros)
|
||||
self.macros.update(_DEFAULT_MACROS)
|
||||
|
||||
# localized strings
|
||||
self.name: LocalizedStr = self.localize(self.raw.name)
|
||||
self.landing_text: FormatTree = self.format(self.raw.landing_text)
|
||||
|
||||
# patterns
|
||||
self.patterns: dict[str, PatternInfo] = load_patterns(
|
||||
pattern_stubs,
|
||||
self.resource_dir,
|
||||
self.modid,
|
||||
)
|
||||
|
||||
# categories
|
||||
# TODO: make this not awful
|
||||
base_dir = self.book_dir / _DEFAULT_LANG_NAME
|
||||
categories_dir = base_dir / "categories"
|
||||
for path in categories_dir.rglob("*.json"):
|
||||
basename = path.relative_to(categories_dir).with_suffix("").as_posix()
|
||||
self.categories.append(parse_category(self, base_dir.as_posix(), basename))
|
||||
cats = {cat["id"]: cat for cat in self.categories}
|
||||
self.categories.sort(
|
||||
key=lambda cat: (parse_sortnum(cats, cat["id"]), cat["name"])
|
||||
)
|
||||
|
||||
@property
|
||||
def book_dir(self) -> Path:
|
||||
return (
|
||||
self.resource_dir
|
||||
/ "data"
|
||||
/ self.modid
|
||||
/ "patchouli_books"
|
||||
/ self.patchouli_name
|
||||
)
|
||||
|
||||
@property
|
||||
def lang_dir(self) -> Path:
|
||||
return self.resource_dir / "assets" / self.modid / "lang"
|
||||
|
||||
def localize(
|
||||
self,
|
||||
key: str,
|
||||
default: str | None = None,
|
||||
skip_errors: bool = False,
|
||||
) -> LocalizedStr:
|
||||
"""Looks up the given string in the lang table if i18n is enabled.
|
||||
Otherwise, returns the original key.
|
||||
|
||||
Raises KeyError if i18n is enabled and skip_errors is False but the key has no localization.
|
||||
"""
|
||||
if self.lang is None:
|
||||
return LocalizedStr(key.replace("%%", "%"))
|
||||
|
||||
if default is not None:
|
||||
localized = self.lang.get(key, default)
|
||||
elif skip_errors:
|
||||
localized = self.lang.get(key, key)
|
||||
else:
|
||||
# raises if not found
|
||||
localized = self.lang[key]
|
||||
|
||||
return LocalizedStr(localized.replace("%%", "%"))
|
||||
|
||||
def localize_pattern(self, op_id: str, skip_errors: bool = False) -> LocalizedStr:
|
||||
"""Localizes the given pattern id (internal name, eg. brainsweep).
|
||||
|
||||
Raises KeyError if i18n is enabled and skip_errors is False but the key has no localization.
|
||||
"""
|
||||
try:
|
||||
# prefer the book-specific translation if it exists
|
||||
# don't pass skip_errors here because we need to catch it below
|
||||
return self.localize(f"hexcasting.spell.book.{op_id}")
|
||||
except KeyError:
|
||||
return self.localize(f"hexcasting.spell.{op_id}", skip_errors=skip_errors)
|
||||
|
||||
def localize_item(self, item: str, skip_errors: bool = False) -> LocalizedStr:
|
||||
"""Localizes the given item resource name.
|
||||
|
||||
Raises KeyError if i18n is enabled and skip_errors is False but the key has no localization.
|
||||
"""
|
||||
# FIXME: hack
|
||||
item = re.sub(r"{.*", "", item.replace(":", "."))
|
||||
try:
|
||||
return self.localize(f"block.{item}")
|
||||
except KeyError:
|
||||
return self.localize(f"item.{item}", skip_errors=skip_errors)
|
||||
|
||||
def format(self, text: str | LocalizedStr, skip_errors: bool = False) -> FormatTree:
|
||||
"""Converts the given string into a FormatTree, localizing it if necessary."""
|
||||
if not isinstance(text, LocalizedStr):
|
||||
text = self.localize(text, skip_errors=skip_errors)
|
||||
return FormatTree.format(self.macros, text)
|
0
doc/src/patchouli/category.py
Normal file
0
doc/src/patchouli/category.py
Normal file
0
doc/src/patchouli/entry.py
Normal file
0
doc/src/patchouli/entry.py
Normal file
0
doc/src/patchouli/page.py
Normal file
0
doc/src/patchouli/page.py
Normal file
|
@ -1,209 +0,0 @@
|
|||
# these are mostly just copied from HexBug for now
|
||||
# TODO: use dataclasses-json
|
||||
|
||||
from typing import (
|
||||
Generic,
|
||||
Literal,
|
||||
LiteralString,
|
||||
NamedTuple,
|
||||
NotRequired,
|
||||
Self,
|
||||
TypedDict,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
T = TypeVar("T", bound=LiteralString)
|
||||
|
||||
|
||||
class Style(NamedTuple):
|
||||
type: str
|
||||
value: str | bool | dict[str, str] | None
|
||||
|
||||
|
||||
class FormatTree(NamedTuple):
|
||||
style: Style
|
||||
children: list[Self | str]
|
||||
|
||||
|
||||
Text = FormatTree | str
|
||||
|
||||
|
||||
class _BasePage(TypedDict, Generic[T]):
|
||||
type: T
|
||||
|
||||
|
||||
class Page_patchouli_text(_BasePage[Literal["patchouli:text"]]):
|
||||
text: FormatTree | list
|
||||
anchor: NotRequired[str]
|
||||
input: NotRequired[str]
|
||||
op_id: NotRequired[str]
|
||||
output: NotRequired[str]
|
||||
title: NotRequired[str]
|
||||
|
||||
|
||||
class Page_patchouli_link(_BasePage[Literal["patchouli:link"]]):
|
||||
link_text: str
|
||||
text: FormatTree
|
||||
url: str
|
||||
|
||||
|
||||
class Page_patchouli_spotlight(_BasePage[Literal["patchouli:spotlight"]]):
|
||||
item: str
|
||||
item_name: str
|
||||
link_recipe: bool
|
||||
text: FormatTree
|
||||
anchor: NotRequired[str]
|
||||
|
||||
|
||||
class Page_patchouli_crafting(_BasePage[Literal["patchouli:crafting"]]):
|
||||
item_name: list
|
||||
recipe: str
|
||||
anchor: NotRequired[str]
|
||||
recipe2: NotRequired[str]
|
||||
text: NotRequired[FormatTree | list]
|
||||
title: NotRequired[str]
|
||||
|
||||
|
||||
class Page_patchouli_image(_BasePage[Literal["patchouli:image"]]):
|
||||
border: bool
|
||||
images: list
|
||||
title: str
|
||||
|
||||
|
||||
class Page_patchouli_empty(_BasePage[Literal["patchouli:empty"]]):
|
||||
pass
|
||||
|
||||
|
||||
class Page_hexcasting_pattern(_BasePage[Literal["hexcasting:pattern"]]):
|
||||
name: str
|
||||
op: list
|
||||
op_id: str
|
||||
text: FormatTree | list
|
||||
anchor: NotRequired[str]
|
||||
header: NotRequired[str]
|
||||
hex_size: NotRequired[int]
|
||||
input: NotRequired[str]
|
||||
output: NotRequired[str]
|
||||
|
||||
|
||||
class Page_hexcasting_manual_pattern(_BasePage[Literal["hexcasting:manual_pattern"]]):
|
||||
anchor: str
|
||||
header: str
|
||||
op: list
|
||||
patterns: dict | list
|
||||
text: FormatTree
|
||||
input: NotRequired[str]
|
||||
op_id: NotRequired[str]
|
||||
output: NotRequired[str]
|
||||
|
||||
|
||||
class Page_hexcasting_manual_pattern_nosig(
|
||||
_BasePage[Literal["hexcasting:manual_pattern_nosig"]]
|
||||
):
|
||||
header: str
|
||||
op: list
|
||||
patterns: dict | list
|
||||
text: FormatTree
|
||||
|
||||
|
||||
class Page_hexcasting_crafting_multi(_BasePage[Literal["hexcasting:crafting_multi"]]):
|
||||
heading: str
|
||||
item_name: list
|
||||
recipes: list
|
||||
text: FormatTree
|
||||
|
||||
|
||||
class Page_hexcasting_brainsweep(_BasePage[Literal["hexcasting:brainsweep"]]):
|
||||
output_name: str
|
||||
recipe: str
|
||||
text: FormatTree
|
||||
|
||||
|
||||
# convenient type aliases
|
||||
# TODO: replace with polymorphism, probably
|
||||
|
||||
Page = (
|
||||
Page_patchouli_text
|
||||
| Page_patchouli_link
|
||||
| Page_patchouli_spotlight
|
||||
| Page_patchouli_crafting
|
||||
| Page_patchouli_image
|
||||
| Page_patchouli_empty
|
||||
| Page_hexcasting_pattern
|
||||
| Page_hexcasting_manual_pattern
|
||||
| Page_hexcasting_manual_pattern_nosig
|
||||
| Page_hexcasting_crafting_multi
|
||||
| Page_hexcasting_brainsweep
|
||||
)
|
||||
|
||||
RecipePage = (
|
||||
Page_patchouli_crafting
|
||||
| Page_hexcasting_crafting_multi
|
||||
| Page_hexcasting_brainsweep
|
||||
)
|
||||
|
||||
PatternPageWithSig = Page_hexcasting_pattern | Page_hexcasting_manual_pattern
|
||||
|
||||
ManualPatternPage = (
|
||||
Page_hexcasting_manual_pattern | Page_hexcasting_manual_pattern_nosig
|
||||
)
|
||||
|
||||
PatternPage = (
|
||||
Page_hexcasting_pattern
|
||||
| Page_hexcasting_manual_pattern
|
||||
| Page_hexcasting_manual_pattern_nosig
|
||||
)
|
||||
|
||||
|
||||
class Entry(TypedDict):
|
||||
category: str
|
||||
icon: str
|
||||
id: str
|
||||
name: str
|
||||
pages: list[_BasePage]
|
||||
advancement: NotRequired[str]
|
||||
entry_color: NotRequired[str]
|
||||
extra_recipe_mappings: NotRequired[dict]
|
||||
flag: NotRequired[str]
|
||||
priority: NotRequired[bool]
|
||||
read_by_default: NotRequired[bool]
|
||||
sort_num: NotRequired[int]
|
||||
sortnum: NotRequired[float | int]
|
||||
|
||||
|
||||
class Category(TypedDict):
|
||||
description: FormatTree | list
|
||||
entries: list[Entry]
|
||||
icon: str
|
||||
id: str
|
||||
name: str
|
||||
sortnum: int
|
||||
entry_color: NotRequired[str]
|
||||
flag: NotRequired[str]
|
||||
parent: NotRequired[str]
|
||||
|
||||
|
||||
# TODO: class
|
||||
Registry = dict[str, tuple[str, str, bool]]
|
||||
|
||||
|
||||
class Book(TypedDict):
|
||||
blacklist: set
|
||||
categories: list[Category]
|
||||
i18n: dict[str, str]
|
||||
landing_text: FormatTree
|
||||
macros: dict[str, str]
|
||||
modid: str
|
||||
name: str
|
||||
pattern_reg: Registry
|
||||
resource_dir: str
|
||||
spoilers: set
|
||||
version: int
|
||||
book_texture: NotRequired[str]
|
||||
creative_tab: NotRequired[str]
|
||||
extend: NotRequired[str]
|
||||
filler_texture: NotRequired[str]
|
||||
model: NotRequired[str]
|
||||
nameplate_color: NotRequired[str]
|
||||
show_progress: NotRequired[bool]
|
||||
src_dir: NotRequired[str]
|
File diff suppressed because one or more lines are too long
|
@ -1,19 +0,0 @@
|
|||
import pytest
|
||||
from patchouli.types import Book, FormatTree, Style
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def book() -> Book:
|
||||
return Book(
|
||||
blacklist=set(),
|
||||
categories=[],
|
||||
i18n={},
|
||||
landing_text=FormatTree(Style("", None), []),
|
||||
macros={},
|
||||
modid="",
|
||||
name="",
|
||||
pattern_reg={},
|
||||
resource_dir="",
|
||||
spoilers=set(),
|
||||
version=0,
|
||||
)
|
|
@ -1,21 +1,15 @@
|
|||
from collate_data import FormatTree, Style, default_macros, format_string
|
||||
from patchouli.types import Book
|
||||
# pyright: reportPrivateUsage=false
|
||||
from common.deserialize import LocalizedStr
|
||||
from common.formatting import FormatTree, Style
|
||||
from patchouli.book import _DEFAULT_MACROS
|
||||
|
||||
|
||||
def test_format_string(book: Book):
|
||||
def test_format_string():
|
||||
# arrange
|
||||
book.update(
|
||||
{
|
||||
"i18n": {},
|
||||
"macros": default_macros,
|
||||
"resource_dir": "Common/src/main/resources",
|
||||
"modid": "hexcasting",
|
||||
}
|
||||
)
|
||||
test_str = "Write the given iota to my $(l:patterns/readwrite#hexcasting:write/local)$(#490)local$().$(br)The $(l:patterns/readwrite#hexcasting:write/local)$(#490)local$() is a lot like a $(l:items/focus)$(#b0b)Focus$(). It's cleared when I stop casting a Hex, starts with $(l:casting/influences)$(#490)Null$() in it, and is preserved between casts of $(l:patterns/meta#hexcasting:for_each)$(#fc77be)Thoth's Gambit$(). "
|
||||
|
||||
# act
|
||||
tree = format_string(book, test_str)
|
||||
tree = FormatTree.format(_DEFAULT_MACROS, LocalizedStr(test_str))
|
||||
|
||||
# assert
|
||||
# FIXME: extremely lazy
|
||||
|
|
|
@ -15,7 +15,7 @@ class DocgenArgs:
|
|||
argv: list[str]
|
||||
|
||||
def assert_out_path(self):
|
||||
actual = self.out_path.read_text()
|
||||
actual = self.out_path.read_text("utf-8")
|
||||
assert actual == self.snapshot
|
||||
|
||||
|
||||
|
|
Loading…
Reference in a new issue