Add cookiecutter template

This commit is contained in:
object-Object 2023-09-08 00:55:34 -04:00
parent 7e2f60c243
commit 3375ca1b3c
17 changed files with 769 additions and 9 deletions

View file

@ -1,10 +1,10 @@
# doc
# hexdoc
Contains the Python docgen for Hex Casting.
This is the Python docgen for Hex Casting.
## Version scheme
This package uses [hatch-gradle-version](https://pypi.org/project/hatch-gradle-version) to generate a version number based on the mod version it was built with.
We use [hatch-gradle-version](https://pypi.org/project/hatch-gradle-version) to generate the version number based on whichever mod version the docgen was built with.
The version is in this format: `mod-version.python-version.mod-pre.python-dev.python-post`
@ -27,12 +27,15 @@ pip install -e .[dev]
### CI/CD
- Under Settings > Environments, create a new environment called `pypi`
WIP.
- Under Settings > Environments, create a new environment called `pypi` (and optionally `testpypi`)
- Follow these instructions: https://docs.pypi.org/trusted-publishers/creating-a-project-through-oidc/
- TODO
## Usage
```sh
hexdoc doc/properties.toml -o out
# run from the repo root, not doc/
hexdoc ./doc/properties.toml -o _site/src/docs
hexdoc_merge --src _site/src/docs --dst _site/dst/docs
```

View file

@ -2,3 +2,5 @@
- [ ] Unit test for mock addon book
- [ ] Re-add edified wood recipe to [Common/src/main/resources/assets/hexcasting/patchouli_books/thehexbook/en_us/entries/items/edified.json](items/edified) when it actually exists
- [ ] Fix mobile styling for navbar dropdowns
- [ ] Remove base_asset_urls
- [ ] Publish the CI actions/workflow

22
doc/cookiecutter.json Normal file
View file

@ -0,0 +1,22 @@
{
"output_directory": "",
"modid": "",
"mod_name": "{{ cookiecutter.modid|capitalize }}",
"plugin_classname": "{{ cookiecutter.modid|capitalize }}Plugin",
"author": "",
"fallback_book_url": "https://example.com",
"pattern_regex": [
"hex_latest",
"hex_0.10.3",
"hexal_0.3.0",
"hexal_0.2.18"
],
"_hexdoc_version": "1.0",
"__project_name": "hexdoc-{{ cookiecutter.modid|lower }}",
"__project_slug": "hexdoc_{{ cookiecutter.modid|lower }}",
"__src": "src/{{ cookiecutter.__project_slug }}",
"__export_root": "src/{{ cookiecutter.__project_slug }}/_export"
}

View file

@ -0,0 +1,34 @@
{% raw -%}
name: Install wheel from artifact
description: Install wheel from artifact
inputs:
name:
description: Artifact name.
required: true
python-version:
description: Version range or exact version of Python to use.
required: true
runs:
using: composite
steps:
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: ${{ inputs.python-version }}
cache: pip
- name: Download artifact
uses: actions/download-artifact@v3
with:
name: ${{ inputs.name }}
path: _dist
- name: Install wheel
shell: bash
run: |
wheels=( _dist/*-py3-none-any.whl )
pip install "${wheels[0]}"
rm -r _dist
{% endraw %}

View file

@ -0,0 +1,293 @@
{% raw -%}
name: Build the web book
on:
push:
workflow_dispatch:
inputs:
branch:
description: 'Branch to generate docs from'
type: choice
options:
- (same as workflow)
release:
description: Release this version
type: boolean
default: false
update-latest:
description: Overwrite latest (and root, if releasing)
type: boolean
default: true
publish:
description: Package index to publish to
type: choice
options:
- none
- PyPI (release)
- TestPyPI
segment:
description: 'Version segment to bump with Hatch'
type: string
required: false
env:
PYPI_PACKAGE: {% endraw %}{{ cookiecutter.__project_name }}{% raw %}
permissions:
contents: read
concurrency:
group: "docgen"
cancel-in-progress: false
jobs:
build:
runs-on: ubuntu-latest
# only execute on the default branch or when invoked manually
if: |-
github.event_name == 'workflow_dispatch' ||
github.ref == format('refs/heads/{0}', github.event.repository.default_branch)
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
permissions:
contents: write
pages: read
outputs:
pages-url: ${{ steps.get-url.outputs.pages-url }}
matrix: ${{ steps.list-langs.outputs.matrix }}
release: ${{ steps.parse-inputs.outputs.release }}
branch: ${{ steps.parse-inputs.outputs.branch }}
hexdoc-common: ${{ steps.parse-inputs.outputs.hexdoc-common }}
hexdoc: ${{ steps.parse-inputs.outputs.hexdoc }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.11"
cache: pip
- name: Get Pages url
id: get-url
run: |
url=$(gh api "repos/$GITHUB_REPOSITORY/pages" --jq '.html_url')
echo "pages-url=$url" >> "$GITHUB_OUTPUT"
echo "GITHUB_PAGES_URL=$url" >> "$GITHUB_ENV"
- name: Parse inputs
id: parse-inputs
run: |
release=${{ github.event_name == 'workflow_dispatch' && inputs.release || startsWith(github.ref, 'refs/tags') || startsWith(github.event.head_commit.message, '[Release]') }}
update_latest=${{ github.event_name != 'workflow_dispatch' || inputs.update-latest }}
if [[ ${{ github.event_name == 'workflow_dispatch' && inputs.branch != '(same as workflow)' }} == true ]]; then
branch='${{ inputs.branch }}'
props=_checkout/doc/properties.toml
echo "HATCH_GRADLE_DIR=_checkout" >> "$GITHUB_ENV"
else
branch=none
props=doc/properties.toml
fi
hexdoc_common="--is-release $release --update-latest $update_latest"
hexdoc="hexdoc $props --ci $hexdoc_common"
echo "HEXDOC=$hexdoc" >> "$GITHUB_ENV"
echo "release=$release" >> "$GITHUB_OUTPUT"
echo "update-latest=$update_latest" >> "$GITHUB_OUTPUT"
echo "branch=$branch" >> "$GITHUB_OUTPUT"
echo "hexdoc-common=$hexdoc_common" >> "$GITHUB_OUTPUT"
echo "hexdoc=$hexdoc" >> "$GITHUB_OUTPUT"
- name: Checkout input branch
if: steps.parse-inputs.outputs.branch != 'none'
id: checkout-input
uses: actions/checkout@v3
with:
ref: ${{ steps.parse-inputs.outputs.branch }}
path: _checkout
- name: Install docgen from source
run: pip install . hatch
- name: List book languages
id: list-langs
run: |
echo "matrix=$($HEXDOC --list-langs)" >> "$GITHUB_OUTPUT"
if [[ $RUNNER_DEBUG ]]; then
tree -I '__pycache__|Common|Fabric|Forge|venv'
fi
- name: Export web book
run: $HEXDOC --export-only
- name: Bump version
if: github.event_name == 'workflow_dispatch' && inputs.segment
run: hatch version "${{ inputs.segment }}"
- name: Commit changes
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_message: Build web book from ${{ github.ref }}
- name: Build docgen
run: hatch build
- name: Upload docgen artifact
uses: actions/upload-artifact@v3
with:
name: docgen-build
path: dist
- name: Copy build to Pages
if: steps.parse-inputs.outputs.update-latest
run: |
mkdir -p _site/dist
cp dist/*.whl _site/dist/latest.whl
cp dist/*.tar.gz _site/dist/latest.tar.gz
- name: Upload temporary Pages artifact
if: steps.parse-inputs.outputs.update-latest
uses: actions/upload-artifact@v3
with:
name: github-pages-tmp
path: _site
generate:
runs-on: ubuntu-latest
needs: build
continue-on-error: true
strategy:
fail-fast: false
matrix:
lang: ${{ fromJson(needs.build.outputs.matrix) }}
env:
GITHUB_PAGES_URL: ${{ needs.build.outputs.pages-url }}
HEXDOC: ${{ needs.build.outputs.hexdoc }} --lang ${{ matrix.lang }} -o _site --clean
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/install-artifact-wheel
with:
name: docgen-build
python-version: "3.11"
- name: Checkout input branch
if: needs.build.outputs.branch != 'none'
uses: actions/checkout@v3
with:
ref: ${{ needs.build.outputs.branch }}
path: _checkout
- name: Generate web book
id: gen-normal
continue-on-error: true
run: $HEXDOC
- name: Generate web book with missing translations
if: steps.gen-normal.outcome == 'failure'
run: $HEXDOC --allow-missing
- name: Upload temporary Pages artifact
uses: actions/upload-artifact@v3
with:
name: github-pages-tmp
path: _site
- name: Fail if the first generate step failed
if: steps.gen-normal.outcome == 'failure'
run: |
echo "::error::Missing some i18n keys."
exit 1
deploy-pages:
runs-on: ubuntu-latest
needs: [build, generate]
permissions:
contents: write
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/install-artifact-wheel
with:
name: docgen-build
python-version: "3.11"
- name: Checkout current Pages
uses: actions/checkout@v3
continue-on-error: true
with:
ref: gh-pages
path: _site/dst
- name: Download temporary Pages artifact
uses: actions/download-artifact@v3
with:
name: github-pages-tmp
path: _site/src/docs
- name: Add new docs to site
run: hexdoc_merge ${{ needs.build.outputs.hexdoc-common }} --src _site/src/docs --dst _site/dst/docs
- name: Deploy to Pages
uses: JamesIves/github-pages-deploy-action@v4
with:
folder: _site/dst/docs
target-folder: docs
publish-pypi:
runs-on: ubuntu-latest
needs: build
if: |-
needs.build.outputs.release == 'true' &&
(github.event_name != 'workflow_dispatch' || inputs.publish == 'PyPI (release)')
environment:
name: pypi
url: https://pypi.org/p/${{ env.PYPI_PACKAGE }}
permissions:
id-token: write
steps:
- name: Download docgen artifact
uses: actions/download-artifact@v3
with:
name: docgen-build
path: dist
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
publish-testpypi:
runs-on: ubuntu-latest
needs: build
if: github.event_name == 'workflow_dispatch' && inputs.publish == 'TestPyPI'
environment:
name: testpypi
url: https://test.pypi.org/p/${{ env.PYPI_PACKAGE }}
permissions:
id-token: write
steps:
- name: Download docgen artifact
uses: actions/download-artifact@v3
with:
name: docgen-build
path: dist
- name: Publish to TestPyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
repository-url: https://test.pypi.org/legacy/
{% endraw %}

View file

@ -0,0 +1,161 @@
# hexdoc
doc/**/_export/generated/
_site/
_checkout/
__gradle_version__.py
# Python
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/

View file

@ -0,0 +1,41 @@
# {{ cookiecutter.modid }}
This is the Python docgen for {{ cookiecutter.modid }}.
## Version scheme
We use [hatch-gradle-version](https://pypi.org/project/hatch-gradle-version) to generate the version number based on whichever mod version the docgen was built with.
The version is in this format: `mod-version.python-version.mod-pre.python-dev.python-post`
For example:
* Mod version: `0.11.1-7`
* Python package version: `1.0.dev0`
* Full version: `0.11.1.1.0rc7.dev0`
## Setup
```sh
python -m venv venv
.\venv\Scripts\activate # Windows
source venv/bin/activate # anything other than Windows
# run from the repo root, not doc/
pip install -e .[dev]
```
### CI/CD
WIP.
- Under Settings > Environments, create a new environment called `pypi` (and optionally `testpypi`)
- Follow these instructions: https://docs.pypi.org/trusted-publishers/creating-a-project-through-oidc/
## Usage
```sh
# run from the repo root, not doc/
hexdoc ./doc/properties.toml -o _site/src/docs
hexdoc_merge --src _site/src/docs --dst _site/dst/docs
```

View file

@ -0,0 +1,67 @@
modid = "{{ cookiecutter.modid }}"
book = "hexcasting:thehexbook"
fallback_url = "{{ cookiecutter.fallback_book_url }}"
default_lang = "en_us"
resource_dirs = [ # top takes priority
{ path = "{{ cookiecutter.__export_root }}/resources", reexport = false },
"{_common.src}/main/resources",
"{_common.src}/generated/resources",
"{_fabric.src}/main/resources",
"{_fabric.src}/generated/resources",
"{_forge.src}/main/resources",
"{_forge.src}/generated/resources",
{ modid = "hexcasting" },
]
export_dir = "{{ cookiecutter.__export_root }}/generated"
{# beware of eldritch abombinations lurking beneath these waters #}
# NOTE: "!Raw" means "don't apply variable interpolation to this value"
{% if cookiecutter.pattern_regex == "hex_latest" -%}
_pattern_regex = { "!Raw" = 'make\(\s*"(?P<name>[a-zA-Z0-9_\/]+)",\s*(?:new )?(?:ActionRegistryEntry|OperationAction)\(\s*HexPattern\.fromAngles\(\s*"(?P<signature>[aqweds]+)",\s*HexDir.(?P<startdir>\w+)\)' }
{% elif cookiecutter.pattern_regex == "hex_0.10.3" -%}
_pattern_regex = { "!Raw" = 'HexPattern\.fromAngles\("(?P<signature>[qweasd]+)", HexDir\.(?P<startdir>\w+)\),\s*modLoc\("(?P<name>[^"]+)"\)[^;]+?(?:makeConstantOp|Op\w+|Widget\.\w+)(?:[^;]*(?P<is_per_world>true)\);)?' }
{% elif cookiecutter.pattern_regex == "hexal_0.3.0" -%}
_pattern_regex = { "!Raw" = 'make\(\s*"(?P<name>[a-zA-Z0-9_\/]+)",\s*HexPattern\.fromAngles\(\s*"(?P<signature>[aqweds]+)",\s*HexDir.(?P<startdir>\w+)\)' }
{% elif cookiecutter.pattern_regex == "hexal_0.2.18" -%}
_pattern_regex = { "!Raw" = 'HexPattern\.fromAngles\("(?P<signature>[qweasd]+)", HexDir\.(?P<startdir>\w+)\),\s*modLoc\("(?P<name>[^"]+)"\),[^,]+?(?:makeConstantOp|Op\w+).*?(?P<is_per_world>\btrue)?\)(?:[^\)]+?\bval\b|(?:(?!\bval\b)(?:.))+$)' }
{% else %}
{# intentionally crash the template because we got an unhandled value #}
{{ 0/0 }}
{% endif %}
[[pattern_stubs]]
path = "{^_common.package}/TODO/TODO.java"
regex = "{^_pattern_regex}"
[template]
main = "main.html.jinja"
static_dir = "static"
packages = [
"{{ cookiecutter.__project_slug }}",
"hexdoc",
]
[template.args]
title = "{{ cookiecutter.mod_name }} Book"
mod_name = "{{ cookiecutter.mod_name }}"
author = "{{ cookiecutter.author }}"
description = "The {{ cookiecutter.mod_name }} Book, all in one place."
icon_href = "icon.png"
show_landing_text = false
# platforms
[_common]
src = "../Common/src"
package = "{src}/main/java/TODO/{{ cookiecutter.modid }}"
[_fabric]
src = "../Fabric/src"
package = "{src}/main/java/TODO/{{ cookiecutter.modid }}/fabric"
[_forge]
src = "../Forge/src"
package = "{src}/main/java/TODO/{{ cookiecutter.modid }}/forge"

View file

@ -0,0 +1,4 @@
# This file is auto-generated by hatch-gradle-version.
# Only the value of PY_VERSION is editable. Do not edit other values.
PY_VERSION = "1.0"

View file

@ -0,0 +1,2 @@
# You can add extra resources in this directory for hexdoc to load.
# For example, hexdoc uses this for translations which are only needed for the web book.

View file

@ -0,0 +1,21 @@
from importlib.resources import Package
from hexdoc.plugin import LoadResourceDirsImpl, ModVersionImpl, hookimpl
from .__gradle_version__ import GRADLE_VERSION
class {{ cookiecutter.plugin_classname }}(LoadResourceDirsImpl, ModVersionImpl):
@staticmethod
@hookimpl
def hexdoc_mod_version() -> str:
return GRADLE_VERSION
@staticmethod
@hookimpl
def hexdoc_load_resource_dirs() -> Package | list[Package]:
# This needs to be a lazy import because they may not exist when this file is
# first loaded, eg. when generating the contents of generated.
from ._export import generated, resources
return [generated, resources]

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.5 KiB

View file

@ -0,0 +1,110 @@
[build-system]
requires = ["hatchling", "hatch-gradle-version>=0.7.0"]
build-backend = "hatchling.build"
# project metadata
[project]
name = "{{ cookiecutter.__project_name }}"
readme = "doc/README.md"
authors = [
{ name="{{ cookiecutter.author }}" },
]
requires-python = ">=3.11"
dynamic = ["version", "dependencies", "optional-dependencies"]
# Gradle version/deps
[tool.hatch.version]
scheme = "gradle"
source = "gradle-properties"
py-path = "doc/{{ cookiecutter.__src }}/__version__.py"
[tool.hatch.metadata.hooks.gradle-properties]
dependencies = [ # this can include normal dependency strings too
{ package="hexdoc", op="~=", py-version="{{ cookiecutter._hexdoc_version }}", key="hexcastingVersion" },
]
[tool.hatch.metadata.hooks.gradle-properties.optional-dependencies]
dev = [
"black==23.7.0",
"isort==5.12.0",
]
# directory inclusion
[tool.hatch.build]
only-include = [
"doc/{{ cookiecutter.__src }}",
"gradle.properties",
]
artifacts = [
"/doc/{{ cookiecutter.__export_root }}/generated",
"/doc/{{ cookiecutter.__src }}/__gradle_version__.py",
]
[tool.hatch.build.targets.wheel]
sources = ["doc/src"]
# hexdoc entry points
[project.entry-points.hexdoc]
{{ cookiecutter.modid }} = "{{ cookiecutter.__project_slug }}._hooks:{{ cookiecutter.plugin_classname }}"
# linting
[tool.pyright]
pythonVersion = "3.11"
pythonPlatform = "All"
include = ["doc/src"]
extraPaths = ["doc/src"]
typeCheckingMode = "basic"
strictDictionaryInference = true
strictListInference = true
strictSetInference = true
reportAssertAlwaysTrue = "error"
reportConstantRedefinition = "error"
reportDeprecated = "error"
reportDuplicateImport = "error"
reportFunctionMemberAccess = "error"
reportIncompatibleMethodOverride = "error"
reportIncompatibleVariableOverride = "error"
reportIncompleteStub = "error"
reportInconsistentConstructor = "error"
reportInvalidStringEscapeSequence = "error"
reportInvalidStubStatement = "error"
reportInvalidTypeVarUse = "error"
reportMatchNotExhaustive = "error"
reportMissingParameterType = "error"
reportMissingTypeStubs = "error"
reportOverlappingOverload = "error"
reportSelfClsParameterName = "error"
reportTypeCommentUsage = "error"
reportUnknownParameterType = "error"
reportUnnecessaryCast = "error"
reportUnnecessaryContains = "error"
reportUnsupportedDunderAll = "error"
reportUntypedBaseClass = "error"
reportUntypedClassDecorator = "error"
reportUntypedFunctionDecorator = "error"
reportUntypedNamedTuple = "error"
reportWildcardImportFromLibrary = "error"
reportMissingTypeArgument = "warning"
reportPrivateUsage = "warning"
reportUnknownArgumentType = "warning"
reportUnknownLambdaType = "warning"
reportUnknownMemberType = "warning"
reportUnnecessaryComparison = "warning"
reportUnnecessaryIsInstance = "warning"
reportUnusedClass = "warning"
reportUnusedExpression = "warning"
reportUnusedFunction = "warning"
reportUnusedImport = "warning"
reportUnusedVariable = "warning"
reportUnknownVariableType = "none"

View file

@ -93,9 +93,9 @@ known_first_party = ["hexdoc"]
pythonVersion = "3.11"
pythonPlatform = "All"
extraPaths = [
"doc/src",
]
include = ["doc/src"]
extraPaths = ["doc/src"]
exclude = ["doc/{{cookiecutter.directory}}"]
# mostly we use strict mode
# but pyright doesn't allow decreasing error severity in strict mode