Edit on GitHub

hexdoc.core

 1__all__ = [
 2    "METADATA_SUFFIX",
 3    "AssumeTag",
 4    "BaseProperties",
 5    "BaseResourceDir",
 6    "BaseResourceLocation",
 7    "BookFolder",
 8    "Entity",
 9    "ExportFn",
10    "IsVersion",
11    "ItemStack",
12    "MinecraftVersion",
13    "ModResourceLoader",
14    "PathResourceDir",
15    "PluginResourceDir",
16    "Properties",
17    "ResLoc",
18    "ResourceDir",
19    "ResourceLocation",
20    "ResourceType",
21    "ValueIfVersion",
22    "VersionSource",
23    "Versioned",
24    "compat",
25    "properties",
26]
27
28from .compat import (
29    IsVersion,
30    MinecraftVersion,
31    ValueIfVersion,
32    Versioned,
33    VersionSource,
34)
35from .loader import (
36    METADATA_SUFFIX,
37    BookFolder,
38    ExportFn,
39    ModResourceLoader,
40)
41from .properties import BaseProperties, Properties
42from .resource import (
43    AssumeTag,
44    BaseResourceLocation,
45    Entity,
46    ItemStack,
47    ResLoc,
48    ResourceLocation,
49    ResourceType,
50)
51from .resource_dir import (
52    BaseResourceDir,
53    PathResourceDir,
54    PluginResourceDir,
55    ResourceDir,
56)
METADATA_SUFFIX = '.hexdoc.json'
AssumeTag = typing.Annotated[~_T, BeforeValidator(func=<function _add_hashtag_to_tag>, json_schema_input_type=PydanticUndefined)]
class BaseProperties(hexdoc.model.strip_hidden.StripHiddenModel, hexdoc.utils.context.ValidationContext):
176class BaseProperties(StripHiddenModel, ValidationContext):
177    env: SkipJsonSchema[EnvironmentVariableProps]
178    props_dir: SkipJsonSchema[Path]
179
180    @classmethod
181    def load(cls, path: Path) -> Self:
182        return cls.load_data(
183            props_dir=path.parent,
184            data=load_toml_with_placeholders(path),
185        )
186
187    @classmethod
188    def load_data(cls, props_dir: Path, data: dict[str, Any]) -> Self:
189        props_dir = props_dir.resolve()
190
191        with relative_path_root(props_dir):
192            env = EnvironmentVariableProps.model_getenv()
193            props = cls.model_validate(
194                data
195                | {
196                    "env": env,
197                    "props_dir": props_dir,
198                },
199            )
200
201        logger.log(TRACE, props)
202        return props
203
204    @override
205    @classmethod
206    def model_json_schema(
207        cls,
208        by_alias: bool = True,
209        ref_template: str = DEFAULT_REF_TEMPLATE,
210        schema_generator: type[GenerateJsonSchema] = GenerateJsonSchemaTOML,
211        mode: Literal["validation", "serialization"] = "validation",
212    ) -> dict[str, Any]:
213        return super().model_json_schema(by_alias, ref_template, schema_generator, mode)

Base model which removes all keys starting with _ before validation.

env: typing.Annotated[hexdoc.core.properties.EnvironmentVariableProps, SkipJsonSchema()]
props_dir: typing.Annotated[pathlib.Path, SkipJsonSchema()]
@classmethod
def load(cls, path: pathlib.Path) -> Self:
180    @classmethod
181    def load(cls, path: Path) -> Self:
182        return cls.load_data(
183            props_dir=path.parent,
184            data=load_toml_with_placeholders(path),
185        )
@classmethod
def load_data(cls, props_dir: pathlib.Path, data: dict[str, typing.Any]) -> Self:
187    @classmethod
188    def load_data(cls, props_dir: Path, data: dict[str, Any]) -> Self:
189        props_dir = props_dir.resolve()
190
191        with relative_path_root(props_dir):
192            env = EnvironmentVariableProps.model_getenv()
193            props = cls.model_validate(
194                data
195                | {
196                    "env": env,
197                    "props_dir": props_dir,
198                },
199            )
200
201        logger.log(TRACE, props)
202        return props
@override
@classmethod
def model_json_schema( cls, by_alias: bool = True, ref_template: str = '#/$defs/{model}', schema_generator: type[pydantic.json_schema.GenerateJsonSchema] = <class 'hexdoc.utils.deserialize.toml.GenerateJsonSchemaTOML'>, mode: Literal['validation', 'serialization'] = 'validation') -> dict[str, typing.Any]:
204    @override
205    @classmethod
206    def model_json_schema(
207        cls,
208        by_alias: bool = True,
209        ref_template: str = DEFAULT_REF_TEMPLATE,
210        schema_generator: type[GenerateJsonSchema] = GenerateJsonSchemaTOML,
211        mode: Literal["validation", "serialization"] = "validation",
212    ) -> dict[str, Any]:
213        return super().model_json_schema(by_alias, ref_template, schema_generator, mode)

Generates a JSON schema for a model class.

Args: by_alias: Whether to use attribute aliases or not. ref_template: The reference template. schema_generator: To override the logic used to generate the JSON schema, as a subclass of GenerateJsonSchema with your desired modifications mode: The mode in which to generate the schema.

Returns: The JSON schema for the given model class.

class BaseResourceDir(hexdoc.model.base.HexdocModel, abc.ABC):
26class BaseResourceDir(HexdocModel, ABC):
27    @staticmethod
28    def _json_schema_extra(schema: dict[str, Any]):
29        properties = schema.pop("properties")
30        new_schema = {
31            "anyOf": [
32                schema | {"properties": properties | {key: value}}
33                for key, value in {
34                    "external": properties.pop("external"),
35                    "internal": {
36                        "type": "boolean",
37                        "default": True,
38                        "title": "Internal",
39                    },
40                }.items()
41            ],
42        }
43        schema.clear()
44        schema.update(new_schema)
45
46    model_config = DEFAULT_CONFIG | {
47        "json_schema_extra": _json_schema_extra,
48    }
49
50    external: bool
51    reexport: bool
52    """If not set, the default value will be `not self.external`.
53
54    Must be defined AFTER `external` in the Pydantic model.
55    """
56
57    @abstractmethod
58    def load(
59        self,
60        pm: PluginManager,
61    ) -> ContextManager[Iterable[PathResourceDir]]: ...
62
63    @property
64    def internal(self):
65        return not self.external
66
67    @model_validator(mode="before")
68    @classmethod
69    def _default_reexport(cls, data: JSONDict | Any):
70        if not isinstance(data, dict):
71            return data
72
73        external = cls._get_external(data)
74        if external is None:
75            return data
76
77        if "reexport" not in data:
78            data["reexport"] = not external
79
80        return data
81
82    @classmethod
83    def _get_external(cls, data: JSONDict | Any):
84        match data:
85            case {"external": bool(), "internal": bool()}:
86                raise ValueError(f"Expected internal OR external, got both: {data}")
87            case {"external": bool(external)}:
88                return external
89            case {"internal": bool(internal)}:
90                data.pop("internal")
91                external = data["external"] = not internal
92                return external
93            case _:
94                return None

Base class for all Pydantic models in hexdoc.

Sets the default model config, and overrides __init__ to allow using the init_context context manager to set validation context for constructors.

external: bool
reexport: bool

If not set, the default value will be not self.external.

Must be defined AFTER external in the Pydantic model.

@abstractmethod
def load( self, pm: hexdoc.plugin.PluginManager) -> ContextManager[Iterable[PathResourceDir]]:
57    @abstractmethod
58    def load(
59        self,
60        pm: PluginManager,
61    ) -> ContextManager[Iterable[PathResourceDir]]: ...
internal
63    @property
64    def internal(self):
65        return not self.external
@dataclass(frozen=True, repr=False, config=DEFAULT_CONFIG | ConfigDict(json_schema_extra=resloc_json_schema_extra, arbitrary_types_allowed=True))
class BaseResourceLocation:
 92@dataclass(
 93    frozen=True,
 94    repr=False,
 95    config=DEFAULT_CONFIG
 96    | ConfigDict(
 97        json_schema_extra=resloc_json_schema_extra,
 98        arbitrary_types_allowed=True,
 99    ),
100)
101class BaseResourceLocation:
102    namespace: str
103    path: str
104
105    _from_str_regex: ClassVar[re.Pattern[str]]
106
107    def __init_subclass__(cls, regex: re.Pattern[str] | None) -> None:
108        if regex:
109            cls._from_str_regex = regex
110
111    @classmethod
112    def from_str(cls, raw: str) -> Self:
113        match = cls._from_str_regex.fullmatch(raw)
114        if match is None:
115            raise ValueError(f"Invalid {cls.__name__} string: {raw}")
116
117        return cls(**match.groupdict())
118
119    @classmethod
120    def model_validate(cls, value: Any, *, context: Any = None):
121        ta = TypeAdapter(cls)
122        return ta.validate_python(value, context=context)
123
124    @model_validator(mode="wrap")
125    @classmethod
126    def _pre_root(cls, values: Any, handler: ModelWrapValidatorHandler[Self]):
127        # before validating the fields, if it's a string instead of a dict, convert it
128        logger.log(TRACE, f"Convert {values} to {cls.__name__}")
129        if isinstance(values, str):
130            return cls.from_str(values)
131        return handler(values)
132
133    @field_validator("namespace", mode="before")
134    def _default_namespace(cls, value: Any):
135        match value:
136            case str():
137                return value.lower()
138            case None:
139                return "minecraft"
140            case _:
141                return value
142
143    @field_validator("path")
144    def _validate_path(cls, value: str):
145        return value.lower().rstrip("/")
146
147    @model_serializer
148    def _ser_model(self) -> str:
149        return str(self)
150
151    @property
152    def id(self) -> ResourceLocation:
153        return ResourceLocation(self.namespace, self.path)
154
155    def i18n_key(self, root: str) -> str:
156        # TODO: is this how i18n works????? (apparently, because it's working)
157        return f"{root}.{self.namespace}.{self.path.replace('/', '.')}"
158
159    def __repr__(self) -> str:
160        return f"{self.namespace}:{self.path}"
BaseResourceLocation(*args: Any, **kwargs: Any)
118    def __init__(__dataclass_self__: PydanticDataclass, *args: Any, **kwargs: Any) -> None:
119        __tracebackhide__ = True
120        s = __dataclass_self__
121        s.__pydantic_validator__.validate_python(ArgsKwargs(args, kwargs), self_instance=s)
namespace: str
path: str
@classmethod
def from_str(cls, raw: str) -> Self:
111    @classmethod
112    def from_str(cls, raw: str) -> Self:
113        match = cls._from_str_regex.fullmatch(raw)
114        if match is None:
115            raise ValueError(f"Invalid {cls.__name__} string: {raw}")
116
117        return cls(**match.groupdict())
@classmethod
def model_validate(cls, value: Any, *, context: Any = None):
119    @classmethod
120    def model_validate(cls, value: Any, *, context: Any = None):
121        ta = TypeAdapter(cls)
122        return ta.validate_python(value, context=context)
id: ResourceLocation
151    @property
152    def id(self) -> ResourceLocation:
153        return ResourceLocation(self.namespace, self.path)
def i18n_key(self, root: str) -> str:
155    def i18n_key(self, root: str) -> str:
156        # TODO: is this how i18n works????? (apparently, because it's working)
157        return f"{root}.{self.namespace}.{self.path.replace('/', '.')}"
BookFolder = typing.Literal['categories', 'entries', 'templates']
@dataclass(frozen=True, repr=False)
class Entity(hexdoc.core.BaseResourceLocation):
317@dataclass(frozen=True, repr=False)
318class Entity(BaseResourceLocation, regex=_make_regex(nbt=True)):
319    """Represents an entity with optional NBT.
320
321    Inherits from BaseResourceLocation, not ResourceLocation.
322    """
323
324    nbt: str | None = None
325
326    def __repr__(self) -> str:
327        s = super().__repr__()
328        if self.nbt is not None:
329            s += self.nbt
330        return s

Represents an entity with optional NBT.

Inherits from BaseResourceLocation, not ResourceLocation.

Entity(*args: Any, **kwargs: Any)
118    def __init__(__dataclass_self__: PydanticDataclass, *args: Any, **kwargs: Any) -> None:
119        __tracebackhide__ = True
120        s = __dataclass_self__
121        s.__pydantic_validator__.validate_python(ArgsKwargs(args, kwargs), self_instance=s)
nbt: str | None = None
ExportFn = typing.Callable[[~_T, typing.Optional[~_T]], str]
@dataclass(frozen=True)
class IsVersion(hexdoc.core.Versioned):
 65@dataclass(frozen=True)
 66class IsVersion(Versioned):
 67    """Instances of this class are truthy if version_spec matches version_source, which
 68    defaults to MinecraftVersion.
 69
 70    Can be used as a Pydantic validator annotation, which raises ValueError if
 71    version_spec doesn't match the current version. Use it like this:
 72
 73    `Annotated[str, IsVersion(">=1.20")] | Annotated[None, IsVersion("<1.20")]`
 74
 75    Can also be used as a class decorator for Pydantic models, which raises ValueError
 76    when validating the model if version_spec doesn't match the current version.
 77    Decorated classes must subclass HexdocModel (or HexdocBaseModel).
 78    """
 79
 80    def __bool__(self):
 81        return self.is_current
 82
 83    def __call__(self, cls: _T_ModelType) -> _T_ModelType:
 84        cls.__hexdoc_before_validator__ = self._model_validator
 85        return cls
 86
 87    def __get_pydantic_core_schema__(
 88        self,
 89        source_type: type[Any],
 90        handler: GetCoreSchemaHandler,
 91    ) -> core_schema.CoreSchema:
 92        return core_schema.no_info_before_validator_function(
 93            self._schema_validator,
 94            schema=handler(source_type),
 95        )
 96
 97    def _schema_validator(self, value: Any):
 98        if self.is_current:
 99            return value
100        raise ValueError(
101            f"Expected version {self.version_spec}, got {self.version_source.get()}"
102        )
103
104    def _model_validator(self, cls: Any, value: Any, info: ValidationInfo):
105        return self._schema_validator(value)

Instances of this class are truthy if version_spec matches version_source, which defaults to MinecraftVersion.

Can be used as a Pydantic validator annotation, which raises ValueError if version_spec doesn't match the current version. Use it like this:

Annotated[str, IsVersion(">=1.20")] | Annotated[None, IsVersion("<1.20")]

Can also be used as a class decorator for Pydantic models, which raises ValueError when validating the model if version_spec doesn't match the current version. Decorated classes must subclass HexdocModel (or HexdocBaseModel).

IsVersion( version_spec: str, *, version_source: VersionSource = <class 'MinecraftVersion'>)
@dataclass(frozen=True, repr=False)
class ItemStack(hexdoc.core.BaseResourceLocation):
260@dataclass(frozen=True, repr=False)
261class ItemStack(BaseResourceLocation, regex=_make_regex(count=True, nbt=True)):
262    """Represents an item with optional count and NBT.
263
264    Inherits from BaseResourceLocation, not ResourceLocation.
265    """
266
267    count: int | None = None
268    nbt: str | None = None
269
270    _data: SkipJsonSchema[Compound | None] = None
271
272    def __init_subclass__(cls, **kwargs: Any):
273        super().__init_subclass__(regex=cls._from_str_regex, **kwargs)
274
275    def __post_init__(self):
276        object.__setattr__(self, "_data", _parse_nbt(self.nbt))
277
278    @property
279    def data(self):
280        return self._data
281
282    def get_name(self) -> str | None:
283        if self.data is None:
284            return None
285
286        component_json = self.data.get(NBTPath("display.Name"))  # pyright: ignore[reportUnknownVariableType, reportUnknownMemberType]
287        if not isinstance(component_json, str):
288            return None
289
290        try:
291            component: JsonValue = json.loads(component_json)
292        except ValueError:
293            return None
294
295        if not isinstance(component, dict):
296            return None
297
298        name = component.get("text")
299        if not isinstance(name, str):
300            return None
301
302        return name
303
304    @override
305    def i18n_key(self, root: str = "item") -> str:
306        return super().i18n_key(root)
307
308    def __repr__(self) -> str:
309        s = super().__repr__()
310        if self.count is not None:
311            s += f"#{self.count}"
312        if self.nbt is not None:
313            s += self.nbt
314        return s

Represents an item with optional count and NBT.

Inherits from BaseResourceLocation, not ResourceLocation.

ItemStack(*args: Any, **kwargs: Any)
118    def __init__(__dataclass_self__: PydanticDataclass, *args: Any, **kwargs: Any) -> None:
119        __tracebackhide__ = True
120        s = __dataclass_self__
121        s.__pydantic_validator__.validate_python(ArgsKwargs(args, kwargs), self_instance=s)
count: int | None = None
nbt: str | None = None
data
278    @property
279    def data(self):
280        return self._data
def get_name(self) -> str | None:
282    def get_name(self) -> str | None:
283        if self.data is None:
284            return None
285
286        component_json = self.data.get(NBTPath("display.Name"))  # pyright: ignore[reportUnknownVariableType, reportUnknownMemberType]
287        if not isinstance(component_json, str):
288            return None
289
290        try:
291            component: JsonValue = json.loads(component_json)
292        except ValueError:
293            return None
294
295        if not isinstance(component, dict):
296            return None
297
298        name = component.get("text")
299        if not isinstance(name, str):
300            return None
301
302        return name
@override
def i18n_key(self, root: str = 'item') -> str:
304    @override
305    def i18n_key(self, root: str = "item") -> str:
306        return super().i18n_key(root)
class MinecraftVersion(hexdoc.core.VersionSource):
34class MinecraftVersion(VersionSource):
35    MINECRAFT_VERSION: ClassVar[str | None] = None
36
37    @override
38    @classmethod
39    def get(cls) -> str | None:
40        return cls.MINECRAFT_VERSION
41
42    @override
43    @classmethod
44    def matches(cls, specifier: str | SpecifierSet) -> bool:
45        if isinstance(specifier, str):
46            specifier = SpecifierSet(specifier)
47        if (version := cls.get()) is None:
48            return True
49        return version in specifier

Base class for protocol classes.

Protocol classes are defined as::

class Proto(Protocol):
    def meth(self) -> int:
        ...

Such classes are primarily used with static type checkers that recognize structural subtyping (static duck-typing).

For example::

class C:
    def meth(self) -> int:
        return 0

def func(x: Proto) -> int:
    return x.meth()

func(C())  # Passes static type check

See PEP 544 for details. Protocol classes decorated with @typing.runtime_checkable act as simple-minded runtime protocols that check only the presence of given attributes, ignoring their type signatures. Protocol classes can be generic, they are defined as::

class GenProto(Protocol[T]):
    def meth(self) -> T:
        ...
MINECRAFT_VERSION: ClassVar[str | None] = None
@override
@classmethod
def get(cls) -> str | None:
37    @override
38    @classmethod
39    def get(cls) -> str | None:
40        return cls.MINECRAFT_VERSION

Returns the current version.

@override
@classmethod
def matches(cls, specifier: str | packaging.specifiers.SpecifierSet) -> bool:
42    @override
43    @classmethod
44    def matches(cls, specifier: str | SpecifierSet) -> bool:
45        if isinstance(specifier, str):
46            specifier = SpecifierSet(specifier)
47        if (version := cls.get()) is None:
48            return True
49        return version in specifier

Returns True if the current version matches the version_spec.

@dataclass(config=DEFAULT_CONFIG | {'arbitrary_types_allowed': True}, kw_only=True)
class ModResourceLoader(hexdoc.utils.context.ValidationContext):
 48@dataclass(config=DEFAULT_CONFIG | {"arbitrary_types_allowed": True}, kw_only=True)
 49class ModResourceLoader(ValidationContext):
 50    props: Properties
 51    export_dir: Path | None
 52    resource_dirs: Sequence[PathResourceDir]
 53    _stack: SkipValidation[ExitStack]
 54
 55    @classmethod
 56    def clean_and_load_all(
 57        cls,
 58        props: Properties,
 59        pm: PluginManager,
 60        *,
 61        export: bool = False,
 62    ):
 63        # clear the export dir so we start with a clean slate
 64        if props.export_dir and export:
 65            subprocess.run(
 66                ["git", "clean", "-fdX", props.export_dir],
 67                cwd=props.props_dir,
 68            )
 69
 70            write_to_path(
 71                props.export_dir / "__init__.py",
 72                dedent(
 73                    """\
 74                    # This directory is auto-generated by hexdoc.
 75                    # Do not edit or commit these files.
 76                    """
 77                ),
 78            )
 79
 80        return cls.load_all(
 81            props,
 82            pm,
 83            export=export,
 84        )
 85
 86    @classmethod
 87    def load_all(
 88        cls,
 89        props: Properties,
 90        pm: PluginManager,
 91        *,
 92        export: bool = False,
 93    ) -> Self:
 94        export_dir = props.export_dir if export else None
 95        stack = ExitStack()
 96
 97        with relative_path_root(Path()):
 98            resource_dirs = [
 99                path_resource_dir
100                for resource_dir in props.resource_dirs
101                for path_resource_dir in stack.enter_context(resource_dir.load(pm))
102            ]
103
104        return cls(
105            props=props,
106            export_dir=export_dir,
107            resource_dirs=resource_dirs,
108            _stack=stack,
109        )
110
111    def __enter__(self):
112        return self
113
114    def __exit__(self, *exc_details: Any):
115        return self._stack.__exit__(*exc_details)
116
117    def close(self):
118        self._stack.close()
119
120    def _map_own_assets(self, folder: str, *, root: str | Path):
121        return {
122            id: path.resolve().relative_to(root)
123            for _, id, path in self.find_resources(
124                "assets",
125                namespace=self.props.modid,
126                folder="",
127                glob=f"{folder}/**/*.*",
128                allow_missing=True,
129            )
130        }
131
132    @property
133    def should_export(self):
134        return self.export_dir is not None
135
136    def load_metadata(
137        self,
138        *,
139        name_pattern: str = "{modid}",
140        model_type: type[_T_Model],
141        allow_missing: bool = False,
142    ) -> dict[str, _T_Model]:
143        """eg. `"{modid}.patterns"`"""
144        metadata = dict[str, _T_Model]()
145
146        # TODO: refactor
147        cached_metadata = self.props.cache_dir / (
148            name_pattern.format(modid=self.props.modid) + METADATA_SUFFIX
149        )
150        if cached_metadata.is_file():
151            metadata[self.props.modid] = model_type.model_validate_json(
152                cached_metadata.read_bytes()
153            )
154
155        for resource_dir in self.resource_dirs:
156            # skip if the resource dir has no metadata set, because we're only loading
157            # this for external mods (TODO: this feels flawed)
158            modid = resource_dir.modid
159            if modid is None or modid in metadata:
160                continue
161
162            try:
163                _, metadata[modid] = self.load_resource(
164                    Path(name_pattern.format(modid=modid) + METADATA_SUFFIX),
165                    decode=model_type.model_validate_json,
166                    export=False,
167                )
168            except FileNotFoundError:
169                if allow_missing:
170                    continue
171                raise
172
173        return metadata
174
175    @must_yield_something
176    def load_book_assets(
177        self,
178        parent_book_id: ResourceLocation,
179        folder: BookFolder,
180        use_resource_pack: bool,
181        lang: str | None = None,
182    ) -> Iterator[tuple[PathResourceDir, ResourceLocation, JSONDict]]:
183        if self.props.book_id is None:
184            raise TypeError("Can't load book assets because props.book_id is None")
185
186        if lang is None:
187            lang = self.props.default_lang
188
189        # use ordered set to be deterministic but avoid duplicate ids
190        books_to_check = PydanticOrderedSet[ResourceLocation].collect(
191            parent_book_id,
192            self.props.book_id,
193            *self.props.extra_books,
194        )
195
196        for book_id in books_to_check:
197            yield from self.load_resources(
198                type="assets" if use_resource_pack else "data",
199                folder=Path("patchouli_books") / book_id.path / lang / folder,
200                namespace=book_id.namespace,
201                allow_missing=True,
202            )
203
204    @overload
205    def load_resource(
206        self,
207        type: ResourceType,
208        folder: str | Path,
209        id: ResourceLocation,
210        *,
211        decode: Callable[[str], _T] = decode_json_dict,
212        export: ExportFn[_T] | Literal[False] | None = None,
213    ) -> tuple[PathResourceDir, _T]: ...
214
215    @overload
216    def load_resource(
217        self,
218        path: Path,
219        /,
220        *,
221        decode: Callable[[str], _T] = decode_json_dict,
222        export: ExportFn[_T] | Literal[False] | None = None,
223    ) -> tuple[PathResourceDir, _T]: ...
224
225    def load_resource(
226        self,
227        *args: Any,
228        decode: Callable[[str], _T] = decode_json_dict,
229        export: ExportFn[_T] | Literal[False] | None = None,
230        **kwargs: Any,
231    ) -> tuple[PathResourceDir, _T]:
232        """Find the first file with this resource location in `resource_dirs`.
233
234        If no file extension is provided, `.json` is assumed.
235
236        Raises FileNotFoundError if the file does not exist.
237        """
238
239        resource_dir, path = self.find_resource(*args, **kwargs)
240        return resource_dir, self._load_path(
241            resource_dir,
242            path,
243            decode=decode,
244            export=export,
245        )
246
247    @overload
248    def find_resource(
249        self,
250        type: ResourceType,
251        folder: str | Path,
252        id: ResourceLocation,
253    ) -> tuple[PathResourceDir, Path]: ...
254
255    @overload
256    def find_resource(
257        self,
258        path: Path,
259        /,
260    ) -> tuple[PathResourceDir, Path]: ...
261
262    def find_resource(
263        self,
264        type: ResourceType | Path,
265        folder: str | Path | None = None,
266        id: ResourceLocation | None = None,
267    ) -> tuple[PathResourceDir, Path]:
268        """Find the first file with this resource location in `resource_dirs`.
269
270        If no file extension is provided, `.json` / `.json5` is assumed.
271
272        Raises FileNotFoundError if the file does not exist.
273        """
274
275        if isinstance(type, Path):
276            path_stub = type
277        else:
278            assert folder is not None and id is not None
279            path_stub = id.file_path_stub(type, folder)
280
281        # check by descending priority, return the first that exists
282        for resource_dir in self.resource_dirs:
283            path = resource_dir.path / path_stub
284            if path.is_file():
285                return resource_dir, path
286            if path.suffix == ".json":
287                path = path.with_suffix(".json5")
288                if path.is_file():
289                    return resource_dir, path
290
291        raise FileNotFoundError(f"Path {path_stub} not found in any resource dir")
292
293    @overload
294    def load_resources(
295        self,
296        type: ResourceType,
297        *,
298        namespace: str,
299        folder: str | Path,
300        glob: str | list[str] = "**/*",
301        allow_missing: bool = False,
302        internal_only: bool = False,
303        decode: Callable[[str], _T] = decode_json_dict,
304        export: ExportFn[_T] | Literal[False] | None = None,
305    ) -> Iterator[tuple[PathResourceDir, ResourceLocation, _T]]: ...
306
307    @overload
308    def load_resources(
309        self,
310        type: ResourceType,
311        *,
312        folder: str | Path,
313        id: ResourceLocation,
314        allow_missing: bool = False,
315        internal_only: bool = False,
316        decode: Callable[[str], _T] = decode_json_dict,
317        export: ExportFn[_T] | Literal[False] | None = None,
318    ) -> Iterator[tuple[PathResourceDir, ResourceLocation, _T]]: ...
319
320    def load_resources(
321        self,
322        type: ResourceType,
323        *,
324        decode: Callable[[str], _T] = decode_json_dict,
325        export: ExportFn[_T] | Literal[False] | None = None,
326        **kwargs: Any,
327    ) -> Iterator[tuple[PathResourceDir, ResourceLocation, _T]]:
328        """Like `find_resources`, but also loads the file contents and reexports it."""
329        for resource_dir, value_id, path in self.find_resources(type, **kwargs):
330            value = self._load_path(
331                resource_dir,
332                path,
333                decode=decode,
334                export=export,
335            )
336            yield resource_dir, value_id, value
337
338    @overload
339    def find_resources(
340        self,
341        type: ResourceType,
342        *,
343        namespace: str,
344        folder: str | Path,
345        glob: str | list[str] = "**/*",
346        allow_missing: bool = False,
347        internal_only: bool = False,
348    ) -> Iterator[tuple[PathResourceDir, ResourceLocation, Path]]: ...
349
350    @overload
351    def find_resources(
352        self,
353        type: ResourceType,
354        *,
355        folder: str | Path,
356        id: ResourceLocation,
357        allow_missing: bool = False,
358        internal_only: bool = False,
359    ) -> Iterator[tuple[PathResourceDir, ResourceLocation, Path]]: ...
360
361    def find_resources(
362        self,
363        type: ResourceType,
364        *,
365        folder: str | Path,
366        id: ResourceLocation | None = None,
367        namespace: str | None = None,
368        glob: str | list[str] = "**/*",
369        allow_missing: bool = False,
370        internal_only: bool = False,
371    ) -> Iterator[tuple[PathResourceDir, ResourceLocation, Path]]:
372        """Search for a glob under a given resource location in all of `resource_dirs`.
373
374        Files are returned from lowest to highest priority in the load order, ie. later
375        files should overwrite earlier ones.
376
377        If no file extension is provided for glob, `.json` is assumed.
378
379        Raises FileNotFoundError if no files were found in any resource dir.
380
381        For example:
382        ```py
383        props.find_resources(
384            "assets",
385            "lang/subdir",
386            namespace="*",
387            glob="*.flatten.json5",
388        )
389
390        # [(hexcasting:en_us, .../resources/assets/hexcasting/lang/subdir/en_us.json)]
391        ```
392        """
393
394        if id is not None:
395            namespace = id.namespace
396            glob = id.path
397
398        # eg. assets/*/lang/subdir
399        if namespace is not None:
400            base_path_stub = Path(type) / namespace / folder
401        else:
402            raise RuntimeError(
403                "No overload matches the specified arguments (expected id or namespace)"
404            )
405
406        # glob for json files if not provided
407        globs = [glob] if isinstance(glob, str) else glob
408        for i in range(len(globs)):
409            if not Path(globs[i]).suffix:
410                globs.append(globs[i] + ".json5")
411                globs[i] += ".json"
412
413        # find all files matching the resloc
414        found_any = False
415        for resource_dir in reversed(self.resource_dirs):
416            if internal_only and not resource_dir.internal:
417                continue
418
419            # eg. .../resources/assets/*/lang/subdir
420            for base_path in resource_dir.path.glob(base_path_stub.as_posix()):
421                for glob_ in globs:
422                    # eg. .../resources/assets/hexcasting/lang/subdir/*.flatten.json5
423                    for path in base_path.glob(glob_):
424                        # only strip json/json5, not eg. png
425                        id_path = path.relative_to(base_path)
426                        if "json" in path.name:
427                            id_path = strip_suffixes(id_path)
428
429                        id = ResourceLocation(
430                            # eg. ["assets", "hexcasting", "lang", ...][1]
431                            namespace=path.relative_to(resource_dir.path).parts[1],
432                            path=id_path.as_posix(),
433                        )
434
435                        if path.is_file():
436                            found_any = True
437                            yield resource_dir, id, path
438
439        # if we never yielded any files, raise an error
440        if not allow_missing and not found_any:
441            raise FileNotFoundError(
442                f"No files found under {base_path_stub / repr(globs)} in any resource dir"
443            )
444
445    def _load_path(
446        self,
447        resource_dir: PathResourceDir,
448        path: Path,
449        *,
450        decode: Callable[[str], _T] = decode_json_dict,
451        export: ExportFn[_T] | Literal[False] | None = None,
452    ) -> _T:
453        if not path.is_file():
454            raise FileNotFoundError(path)
455
456        logger.debug(f"Loading {path}")
457
458        data = path.read_text("utf-8")
459        value = decode(data)
460
461        if resource_dir.reexport and export is not False:
462            self.export(
463                path.relative_to(resource_dir.path),
464                data,
465                value,
466                decode=decode,
467                export=export,
468            )
469
470        return value
471
472    @overload
473    def export(self, /, path: Path, data: str, *, cache: bool = False) -> None: ...
474
475    @overload
476    def export(
477        self,
478        /,
479        path: Path,
480        data: str,
481        value: _T,
482        *,
483        decode: Callable[[str], _T] = decode_json_dict,
484        export: ExportFn[_T] | None = None,
485        cache: bool = False,
486    ) -> None: ...
487
488    def export(
489        self,
490        path: Path,
491        data: str,
492        value: _T = None,
493        *,
494        decode: Callable[[str], _T] = decode_json_dict,
495        export: ExportFn[_T] | None = None,
496        cache: bool = False,
497    ) -> None:
498        if not self.export_dir:
499            return
500        out_path = self.export_dir / path
501
502        logger.log(TRACE, f"Exporting {path} to {out_path}")
503        if export is None:
504            out_data = data
505        else:
506            try:
507                old_value = decode(out_path.read_text("utf-8"))
508            except FileNotFoundError:
509                old_value = None
510
511            out_data = export(value, old_value)
512
513        write_to_path(out_path, out_data)
514
515        if cache:
516            write_to_path(self.props.cache_dir / path, out_data)
517
518    def export_raw(self, path: Path, data: bytes):
519        if not self.export_dir:
520            return
521        out_path = self.export_dir / path
522
523        logger.log(TRACE, f"Exporting {path} to {out_path}")
524        write_to_path(out_path, data)
525
526    def __repr__(self):
527        return f"{self.__class__.__name__}(...)"
ModResourceLoader(*args: Any, **kwargs: Any)
118    def __init__(__dataclass_self__: PydanticDataclass, *args: Any, **kwargs: Any) -> None:
119        __tracebackhide__ = True
120        s = __dataclass_self__
121        s.__pydantic_validator__.validate_python(ArgsKwargs(args, kwargs), self_instance=s)
props: Properties
export_dir: pathlib.Path | None
resource_dirs: Sequence[PathResourceDir]
@classmethod
def clean_and_load_all( cls, props: Properties, pm: hexdoc.plugin.PluginManager, *, export: bool = False):
55    @classmethod
56    def clean_and_load_all(
57        cls,
58        props: Properties,
59        pm: PluginManager,
60        *,
61        export: bool = False,
62    ):
63        # clear the export dir so we start with a clean slate
64        if props.export_dir and export:
65            subprocess.run(
66                ["git", "clean", "-fdX", props.export_dir],
67                cwd=props.props_dir,
68            )
69
70            write_to_path(
71                props.export_dir / "__init__.py",
72                dedent(
73                    """\
74                    # This directory is auto-generated by hexdoc.
75                    # Do not edit or commit these files.
76                    """
77                ),
78            )
79
80        return cls.load_all(
81            props,
82            pm,
83            export=export,
84        )
@classmethod
def load_all( cls, props: Properties, pm: hexdoc.plugin.PluginManager, *, export: bool = False) -> Self:
 86    @classmethod
 87    def load_all(
 88        cls,
 89        props: Properties,
 90        pm: PluginManager,
 91        *,
 92        export: bool = False,
 93    ) -> Self:
 94        export_dir = props.export_dir if export else None
 95        stack = ExitStack()
 96
 97        with relative_path_root(Path()):
 98            resource_dirs = [
 99                path_resource_dir
100                for resource_dir in props.resource_dirs
101                for path_resource_dir in stack.enter_context(resource_dir.load(pm))
102            ]
103
104        return cls(
105            props=props,
106            export_dir=export_dir,
107            resource_dirs=resource_dirs,
108            _stack=stack,
109        )
def close(self):
117    def close(self):
118        self._stack.close()
should_export
132    @property
133    def should_export(self):
134        return self.export_dir is not None
def load_metadata( self, *, name_pattern: str = '{modid}', model_type: type[~_T_Model], allow_missing: bool = False) -> dict[str, ~_T_Model]:
136    def load_metadata(
137        self,
138        *,
139        name_pattern: str = "{modid}",
140        model_type: type[_T_Model],
141        allow_missing: bool = False,
142    ) -> dict[str, _T_Model]:
143        """eg. `"{modid}.patterns"`"""
144        metadata = dict[str, _T_Model]()
145
146        # TODO: refactor
147        cached_metadata = self.props.cache_dir / (
148            name_pattern.format(modid=self.props.modid) + METADATA_SUFFIX
149        )
150        if cached_metadata.is_file():
151            metadata[self.props.modid] = model_type.model_validate_json(
152                cached_metadata.read_bytes()
153            )
154
155        for resource_dir in self.resource_dirs:
156            # skip if the resource dir has no metadata set, because we're only loading
157            # this for external mods (TODO: this feels flawed)
158            modid = resource_dir.modid
159            if modid is None or modid in metadata:
160                continue
161
162            try:
163                _, metadata[modid] = self.load_resource(
164                    Path(name_pattern.format(modid=modid) + METADATA_SUFFIX),
165                    decode=model_type.model_validate_json,
166                    export=False,
167                )
168            except FileNotFoundError:
169                if allow_missing:
170                    continue
171                raise
172
173        return metadata

eg. "{modid}.patterns"

@must_yield_something
def load_book_assets( self, parent_book_id: ResourceLocation, folder: Literal['categories', 'entries', 'templates'], use_resource_pack: bool, lang: str | None = None) -> Iterator[tuple[PathResourceDir, ResourceLocation, dict[str, JsonValue]]]:
175    @must_yield_something
176    def load_book_assets(
177        self,
178        parent_book_id: ResourceLocation,
179        folder: BookFolder,
180        use_resource_pack: bool,
181        lang: str | None = None,
182    ) -> Iterator[tuple[PathResourceDir, ResourceLocation, JSONDict]]:
183        if self.props.book_id is None:
184            raise TypeError("Can't load book assets because props.book_id is None")
185
186        if lang is None:
187            lang = self.props.default_lang
188
189        # use ordered set to be deterministic but avoid duplicate ids
190        books_to_check = PydanticOrderedSet[ResourceLocation].collect(
191            parent_book_id,
192            self.props.book_id,
193            *self.props.extra_books,
194        )
195
196        for book_id in books_to_check:
197            yield from self.load_resources(
198                type="assets" if use_resource_pack else "data",
199                folder=Path("patchouli_books") / book_id.path / lang / folder,
200                namespace=book_id.namespace,
201                allow_missing=True,
202            )
def load_resource( self, *args: Any, decode: Callable[[str], ~_T] = <function decode_json_dict>, export: Union[Callable[[~_T, Optional[~_T]], str], Literal[False], NoneType] = None, **kwargs: Any) -> tuple[PathResourceDir, ~_T]:
225    def load_resource(
226        self,
227        *args: Any,
228        decode: Callable[[str], _T] = decode_json_dict,
229        export: ExportFn[_T] | Literal[False] | None = None,
230        **kwargs: Any,
231    ) -> tuple[PathResourceDir, _T]:
232        """Find the first file with this resource location in `resource_dirs`.
233
234        If no file extension is provided, `.json` is assumed.
235
236        Raises FileNotFoundError if the file does not exist.
237        """
238
239        resource_dir, path = self.find_resource(*args, **kwargs)
240        return resource_dir, self._load_path(
241            resource_dir,
242            path,
243            decode=decode,
244            export=export,
245        )

Find the first file with this resource location in resource_dirs.

If no file extension is provided, .json is assumed.

Raises FileNotFoundError if the file does not exist.

def find_resource( self, type: Union[Literal['assets', 'data', ''], pathlib.Path], folder: str | pathlib.Path | None = None, id: ResourceLocation | None = None) -> tuple[PathResourceDir, pathlib.Path]:
262    def find_resource(
263        self,
264        type: ResourceType | Path,
265        folder: str | Path | None = None,
266        id: ResourceLocation | None = None,
267    ) -> tuple[PathResourceDir, Path]:
268        """Find the first file with this resource location in `resource_dirs`.
269
270        If no file extension is provided, `.json` / `.json5` is assumed.
271
272        Raises FileNotFoundError if the file does not exist.
273        """
274
275        if isinstance(type, Path):
276            path_stub = type
277        else:
278            assert folder is not None and id is not None
279            path_stub = id.file_path_stub(type, folder)
280
281        # check by descending priority, return the first that exists
282        for resource_dir in self.resource_dirs:
283            path = resource_dir.path / path_stub
284            if path.is_file():
285                return resource_dir, path
286            if path.suffix == ".json":
287                path = path.with_suffix(".json5")
288                if path.is_file():
289                    return resource_dir, path
290
291        raise FileNotFoundError(f"Path {path_stub} not found in any resource dir")

Find the first file with this resource location in resource_dirs.

If no file extension is provided, .json / .json5 is assumed.

Raises FileNotFoundError if the file does not exist.

def load_resources( self, type: Literal['assets', 'data', ''], *, decode: Callable[[str], ~_T] = <function decode_json_dict>, export: Union[Callable[[~_T, Optional[~_T]], str], Literal[False], NoneType] = None, **kwargs: Any) -> Iterator[tuple[PathResourceDir, ResourceLocation, ~_T]]:
320    def load_resources(
321        self,
322        type: ResourceType,
323        *,
324        decode: Callable[[str], _T] = decode_json_dict,
325        export: ExportFn[_T] | Literal[False] | None = None,
326        **kwargs: Any,
327    ) -> Iterator[tuple[PathResourceDir, ResourceLocation, _T]]:
328        """Like `find_resources`, but also loads the file contents and reexports it."""
329        for resource_dir, value_id, path in self.find_resources(type, **kwargs):
330            value = self._load_path(
331                resource_dir,
332                path,
333                decode=decode,
334                export=export,
335            )
336            yield resource_dir, value_id, value

Like find_resources, but also loads the file contents and reexports it.

def find_resources( self, type: Literal['assets', 'data', ''], *, folder: str | pathlib.Path, id: ResourceLocation | None = None, namespace: str | None = None, glob: str | list[str] = '**/*', allow_missing: bool = False, internal_only: bool = False) -> Iterator[tuple[PathResourceDir, ResourceLocation, pathlib.Path]]:
361    def find_resources(
362        self,
363        type: ResourceType,
364        *,
365        folder: str | Path,
366        id: ResourceLocation | None = None,
367        namespace: str | None = None,
368        glob: str | list[str] = "**/*",
369        allow_missing: bool = False,
370        internal_only: bool = False,
371    ) -> Iterator[tuple[PathResourceDir, ResourceLocation, Path]]:
372        """Search for a glob under a given resource location in all of `resource_dirs`.
373
374        Files are returned from lowest to highest priority in the load order, ie. later
375        files should overwrite earlier ones.
376
377        If no file extension is provided for glob, `.json` is assumed.
378
379        Raises FileNotFoundError if no files were found in any resource dir.
380
381        For example:
382        ```py
383        props.find_resources(
384            "assets",
385            "lang/subdir",
386            namespace="*",
387            glob="*.flatten.json5",
388        )
389
390        # [(hexcasting:en_us, .../resources/assets/hexcasting/lang/subdir/en_us.json)]
391        ```
392        """
393
394        if id is not None:
395            namespace = id.namespace
396            glob = id.path
397
398        # eg. assets/*/lang/subdir
399        if namespace is not None:
400            base_path_stub = Path(type) / namespace / folder
401        else:
402            raise RuntimeError(
403                "No overload matches the specified arguments (expected id or namespace)"
404            )
405
406        # glob for json files if not provided
407        globs = [glob] if isinstance(glob, str) else glob
408        for i in range(len(globs)):
409            if not Path(globs[i]).suffix:
410                globs.append(globs[i] + ".json5")
411                globs[i] += ".json"
412
413        # find all files matching the resloc
414        found_any = False
415        for resource_dir in reversed(self.resource_dirs):
416            if internal_only and not resource_dir.internal:
417                continue
418
419            # eg. .../resources/assets/*/lang/subdir
420            for base_path in resource_dir.path.glob(base_path_stub.as_posix()):
421                for glob_ in globs:
422                    # eg. .../resources/assets/hexcasting/lang/subdir/*.flatten.json5
423                    for path in base_path.glob(glob_):
424                        # only strip json/json5, not eg. png
425                        id_path = path.relative_to(base_path)
426                        if "json" in path.name:
427                            id_path = strip_suffixes(id_path)
428
429                        id = ResourceLocation(
430                            # eg. ["assets", "hexcasting", "lang", ...][1]
431                            namespace=path.relative_to(resource_dir.path).parts[1],
432                            path=id_path.as_posix(),
433                        )
434
435                        if path.is_file():
436                            found_any = True
437                            yield resource_dir, id, path
438
439        # if we never yielded any files, raise an error
440        if not allow_missing and not found_any:
441            raise FileNotFoundError(
442                f"No files found under {base_path_stub / repr(globs)} in any resource dir"
443            )

Search for a glob under a given resource location in all of resource_dirs.

Files are returned from lowest to highest priority in the load order, ie. later files should overwrite earlier ones.

If no file extension is provided for glob, .json is assumed.

Raises FileNotFoundError if no files were found in any resource dir.

For example:

props.find_resources(
    "assets",
    "lang/subdir",
    namespace="*",
    glob="*.flatten.json5",
)

# [(hexcasting:en_us, .../resources/assets/hexcasting/lang/subdir/en_us.json)]
def export( self, path: pathlib.Path, data: str, value: ~_T = None, *, decode: Callable[[str], ~_T] = <function decode_json_dict>, export: Optional[Callable[[~_T, Optional[~_T]], str]] = None, cache: bool = False) -> None:
488    def export(
489        self,
490        path: Path,
491        data: str,
492        value: _T = None,
493        *,
494        decode: Callable[[str], _T] = decode_json_dict,
495        export: ExportFn[_T] | None = None,
496        cache: bool = False,
497    ) -> None:
498        if not self.export_dir:
499            return
500        out_path = self.export_dir / path
501
502        logger.log(TRACE, f"Exporting {path} to {out_path}")
503        if export is None:
504            out_data = data
505        else:
506            try:
507                old_value = decode(out_path.read_text("utf-8"))
508            except FileNotFoundError:
509                old_value = None
510
511            out_data = export(value, old_value)
512
513        write_to_path(out_path, out_data)
514
515        if cache:
516            write_to_path(self.props.cache_dir / path, out_data)
def export_raw(self, path: pathlib.Path, data: bytes):
518    def export_raw(self, path: Path, data: bytes):
519        if not self.export_dir:
520            return
521        out_path = self.export_dir / path
522
523        logger.log(TRACE, f"Exporting {path} to {out_path}")
524        write_to_path(out_path, data)
class PathResourceDir(hexdoc.core.resource_dir.BasePathResourceDir):
123class PathResourceDir(BasePathResourceDir):
124    """Represents a path to a resources directory or a mod's `.jar` file."""
125
126    @staticmethod
127    def _json_schema_extra(schema: dict[str, Any]):
128        BaseResourceDir._json_schema_extra(schema)
129        new_schema = {
130            "anyOf": [
131                {
132                    "type": "string",
133                    "format": "path",
134                },
135                *schema["anyOf"],
136            ]
137        }
138        schema.clear()
139        schema.update(new_schema)
140
141    model_config = DEFAULT_CONFIG | {
142        "json_schema_extra": _json_schema_extra,
143    }
144
145    path: RelativePath
146    """A path relative to `hexdoc.toml`."""
147
148    archive: bool = Field(default=None, validate_default=False)  # type: ignore
149    """If true, treat this path as a zip archive (eg. a mod's `.jar` file).
150
151    If `path` ends with `.jar` or `.zip`, defaults to `True`.
152    """
153
154    # not a props field
155    _modid: str | None = None
156
157    @property
158    def modid(self):
159        return self._modid
160
161    @property
162    @override
163    def _paths(self):
164        return [self.path]
165
166    def set_modid(self, modid: str) -> Self:
167        self._modid = modid
168        return self
169
170    @contextmanager
171    @override
172    def load(self, pm: PluginManager):
173        if self.archive:
174            with self._extract_archive() as path:
175                update = {
176                    "path": path,
177                    "archive": False,
178                }
179                yield [self.model_copy(update=update)]
180        else:
181            yield [self]
182
183    @contextmanager
184    def _extract_archive(self) -> Iterator[Path]:
185        with (
186            ZipFile(self.path, "r") as zf,
187            TemporaryDirectory(suffix=self.path.name) as tempdir,
188        ):
189            # extract root-level files and *useful* sub-directories
190            # ie. avoid extracting classes etc
191            for info in zf.filelist:
192                path = info.filename
193                if path.startswith(("assets/", "data/")) or "/" not in path:
194                    zf.extract(info, tempdir)
195
196            yield Path(tempdir)
197
198    @model_validator(mode="before")
199    def _pre_root(cls: Any, value: Any):
200        # treat plain strings as paths
201        if isinstance(value, str):
202            return {"path": value}
203        return value
204
205    @model_validator(mode="after")
206    def _post_root(self):
207        if cast_nullable(self.archive) is None:
208            self.archive = self.path.suffix in {".jar", ".zip"}
209        return self

Represents a path to a resources directory or a mod's .jar file.

path: typing.Annotated[pathlib.Path, AfterValidator(func=<function <lambda> at 0x7f29b39d79c0>)]

A path relative to hexdoc.toml.

archive: bool

If true, treat this path as a zip archive (eg. a mod's .jar file).

If path ends with .jar or .zip, defaults to True.

modid
157    @property
158    def modid(self):
159        return self._modid
def set_modid(self, modid: str) -> Self:
166    def set_modid(self, modid: str) -> Self:
167        self._modid = modid
168        return self
@contextmanager
@override
def load(self, pm: hexdoc.plugin.PluginManager):
170    @contextmanager
171    @override
172    def load(self, pm: PluginManager):
173        if self.archive:
174            with self._extract_archive() as path:
175                update = {
176                    "path": path,
177                    "archive": False,
178                }
179                yield [self.model_copy(update=update)]
180        else:
181            yield [self]
def model_post_init(self: pydantic.main.BaseModel, context: Any, /) -> None:
337def init_private_attributes(self: BaseModel, context: Any, /) -> None:
338    """This function is meant to behave like a BaseModel method to initialise private attributes.
339
340    It takes context as an argument since that's what pydantic-core passes when calling it.
341
342    Args:
343        self: The BaseModel instance.
344        context: The context.
345    """
346    if getattr(self, '__pydantic_private__', None) is None:
347        pydantic_private = {}
348        for name, private_attr in self.__private_attributes__.items():
349            default = private_attr.get_default()
350            if default is not PydanticUndefined:
351                pydantic_private[name] = default
352        object_setattr(self, '__pydantic_private__', pydantic_private)

This function is meant to behave like a BaseModel method to initialise private attributes.

It takes context as an argument since that's what pydantic-core passes when calling it.

Args: self: The BaseModel instance. context: The context.

class PluginResourceDir(hexdoc.core.BaseResourceDir):
304class PluginResourceDir(BaseResourceDir):
305    modid: str
306
307    # if we're specifying a modid, it's probably from some other mod/package
308    external: bool = True
309    reexport: bool = False
310
311    @contextmanager
312    @override
313    def load(self, pm: PluginManager):
314        with ExitStack() as stack:
315            yield list(self._load_all(pm, stack))  # NOT "yield from"
316
317    def _load_all(self, pm: PluginManager, stack: ExitStack):
318        for module in pm.load_resources(self.modid):
319            traversable = resources.files(module)
320            path = stack.enter_context(resources.as_file(traversable))
321
322            yield PathResourceDir(
323                path=path,
324                external=self.external,
325                reexport=self.reexport,
326            ).set_modid(self.modid)  # setting _modid directly causes a validation error

Base class for all Pydantic models in hexdoc.

Sets the default model config, and overrides __init__ to allow using the init_context context manager to set validation context for constructors.

modid: str
external: bool
reexport: bool

If not set, the default value will be not self.external.

Must be defined AFTER external in the Pydantic model.

@contextmanager
@override
def load(self, pm: hexdoc.plugin.PluginManager):
311    @contextmanager
312    @override
313    def load(self, pm: PluginManager):
314        with ExitStack() as stack:
315            yield list(self._load_all(pm, stack))  # NOT "yield from"
class Properties(hexdoc.core.BaseProperties):
216class Properties(BaseProperties):
217    """Pydantic model for `hexdoc.toml` / `properties.toml`."""
218
219    modid: str
220
221    book_type: str = "patchouli"
222    """Modid of the `hexdoc.plugin.BookPlugin` to use when loading this book."""
223
224    # TODO: make another properties type without book_id
225    book_id: ResourceLocation | None = Field(alias="book", default=None)
226    extra_books: list[ResourceLocation] = Field(default_factory=list)
227
228    default_lang: str = "en_us"
229    default_branch: str = "main"
230
231    is_0_black: bool = False
232    """If true, the style `$(0)` changes the text color to black; otherwise it resets
233    the text color to the default."""
234
235    resource_dirs: Sequence[ResourceDir]
236    export_dir: RelativePath | None = None
237
238    entry_id_blacklist: set[ResourceLocation] = Field(default_factory=set)
239
240    macros: dict[str, str] = Field(default_factory=dict)
241    link_overrides: dict[str, str] = Field(default_factory=dict)
242
243    textures: TexturesProps = Field(default_factory=TexturesProps)
244
245    template: TemplateProps | None = None
246
247    lang: defaultdict[
248        str,
249        Annotated[LangProps, Field(default_factory=LangProps)],
250    ] = Field(default_factory=lambda: defaultdict(LangProps))
251    """Per-language configuration. The key should be the language code, eg. `en_us`."""
252
253    extra: dict[str, Any] = Field(default_factory=dict)
254
255    def mod_loc(self, path: str) -> ResourceLocation:
256        """Returns a ResourceLocation with self.modid as the namespace."""
257        return ResourceLocation(self.modid, path)
258
259    @property
260    def prerender_dir(self):
261        return self.cache_dir / "prerender"
262
263    @property
264    def cache_dir(self):
265        return self.repo_root / ".hexdoc"
266
267    @cached_property
268    def repo_root(self):
269        return git_root(self.props_dir)

Pydantic model for hexdoc.toml / properties.toml.

modid: str
book_type: str

Modid of the hexdoc.plugin.BookPlugin to use when loading this book.

book_id: ResourceLocation | None
extra_books: list[ResourceLocation]
default_lang: str
default_branch: str
is_0_black: bool

If true, the style $(0) changes the text color to black; otherwise it resets the text color to the default.

resource_dirs: Sequence[PathResourceDir | hexdoc.core.resource_dir.PatchouliBooksResourceDir | PluginResourceDir | hexdoc.core.resource_dir.GlobResourceDir]
export_dir: Optional[Annotated[pathlib.Path, AfterValidator(func=<function <lambda> at 0x7f29b39d79c0>)]]
entry_id_blacklist: set[ResourceLocation]
macros: dict[str, str]
lang: collections.defaultdict[str, typing.Annotated[hexdoc.core.properties.LangProps, FieldInfo(annotation=NoneType, required=False, default_factory=LangProps)]]

Per-language configuration. The key should be the language code, eg. en_us.

extra: dict[str, typing.Any]
def mod_loc(self, path: str) -> ResourceLocation:
255    def mod_loc(self, path: str) -> ResourceLocation:
256        """Returns a ResourceLocation with self.modid as the namespace."""
257        return ResourceLocation(self.modid, path)

Returns a ResourceLocation with self.modid as the namespace.

prerender_dir
259    @property
260    def prerender_dir(self):
261        return self.cache_dir / "prerender"
cache_dir
263    @property
264    def cache_dir(self):
265        return self.repo_root / ".hexdoc"
repo_root
267    @cached_property
268    def repo_root(self):
269        return git_root(self.props_dir)
ResLoc = <class 'ResourceLocation'>
ResourceDir = PathResourceDir | hexdoc.core.resource_dir.PatchouliBooksResourceDir | PluginResourceDir | hexdoc.core.resource_dir.GlobResourceDir
@dataclass(frozen=True, repr=False)
class ResourceLocation(hexdoc.core.BaseResourceLocation):
163@dataclass(frozen=True, repr=False)
164class ResourceLocation(BaseResourceLocation, regex=_make_regex()):
165    """Represents a Minecraft resource location / namespaced ID."""
166
167    is_tag: bool = False
168
169    @classmethod
170    def from_str(cls, raw: str) -> Self:
171        id = super().from_str(raw.removeprefix("#"))
172        if raw.startswith("#"):
173            object.__setattr__(id, "is_tag", True)
174        return id
175
176    @classmethod
177    def from_file(cls, modid: str, base_dir: Path, path: Path) -> Self:
178        resource_path = path.relative_to(base_dir).with_suffix("").as_posix()
179        return cls(modid, resource_path)
180
181    @classmethod
182    def from_model_path(cls, model_path: str | Path) -> Self:
183        match = MODEL_PATH_REGEX.search(Path(model_path).as_posix())
184        if not match:
185            raise ValueError(f"Failed to match model path: {model_path}")
186        return cls(match["namespace"], match["path"])
187
188    @property
189    def href(self) -> str:
190        return f"#{self.path}"
191
192    @property
193    def css_class(self) -> str:
194        stripped_path = re.sub(r"[\*\/\.]", "-", self.path)
195        return f"texture-{self.namespace}-{stripped_path}"
196
197    def with_namespace(self, namespace: str) -> Self:
198        """Returns a copy of this ResourceLocation with the given namespace."""
199        return self.__class__(namespace, self.path)
200
201    def with_path(self, path: str | Path) -> Self:
202        """Returns a copy of this ResourceLocation with the given path."""
203        if isinstance(path, Path):
204            path = path.as_posix()
205        return self.__class__(self.namespace, path)
206
207    def match(self, pattern: Self) -> bool:
208        return fnmatch(str(self), str(pattern))
209
210    def template_path(self, type: str, folder: str = "") -> str:
211        return self.file_path_stub(type, folder, assume_json=False).as_posix()
212
213    def file_path_stub(
214        self,
215        type: ResourceType | str,
216        folder: str | Path = "",
217        assume_json: bool = True,
218    ) -> Path:
219        """Returns the path to find this resource within a resource directory.
220
221        If `assume_json` is True and no file extension is provided, `.json` is assumed.
222
223        For example:
224        ```py
225        ResLoc("hexcasting", "thehexbook/book").file_path_stub("data", "patchouli_books")
226        # data/hexcasting/patchouli_books/thehexbook/book.json
227        ```
228        """
229        # if folder is an empty string, Path won't add an extra slash
230        path = Path(type) / self.namespace / folder / self.path
231        if assume_json and not path.suffix:
232            return path.with_suffix(".json")
233        return path
234
235    def removeprefix(self, prefix: str) -> Self:
236        return self.with_path(self.path.removeprefix(prefix))
237
238    def __truediv__(self, other: str) -> Self:
239        return self.with_path(f"{self.path}/{other}")
240
241    def __rtruediv__(self, other: str) -> Self:
242        return self.with_path(f"{other}/{self.path}")
243
244    def __add__(self, other: str) -> Self:
245        return self.with_path(self.path + other)
246
247    def __repr__(self) -> str:
248        s = super().__repr__()
249        if self.is_tag:
250            return f"#{s}"
251        return s

Represents a Minecraft resource location / namespaced ID.

ResourceLocation(*args: Any, **kwargs: Any)
118    def __init__(__dataclass_self__: PydanticDataclass, *args: Any, **kwargs: Any) -> None:
119        __tracebackhide__ = True
120        s = __dataclass_self__
121        s.__pydantic_validator__.validate_python(ArgsKwargs(args, kwargs), self_instance=s)
is_tag: bool = False
@classmethod
def from_str(cls, raw: str) -> Self:
169    @classmethod
170    def from_str(cls, raw: str) -> Self:
171        id = super().from_str(raw.removeprefix("#"))
172        if raw.startswith("#"):
173            object.__setattr__(id, "is_tag", True)
174        return id
@classmethod
def from_file(cls, modid: str, base_dir: pathlib.Path, path: pathlib.Path) -> Self:
176    @classmethod
177    def from_file(cls, modid: str, base_dir: Path, path: Path) -> Self:
178        resource_path = path.relative_to(base_dir).with_suffix("").as_posix()
179        return cls(modid, resource_path)
@classmethod
def from_model_path(cls, model_path: str | pathlib.Path) -> Self:
181    @classmethod
182    def from_model_path(cls, model_path: str | Path) -> Self:
183        match = MODEL_PATH_REGEX.search(Path(model_path).as_posix())
184        if not match:
185            raise ValueError(f"Failed to match model path: {model_path}")
186        return cls(match["namespace"], match["path"])
href: str
188    @property
189    def href(self) -> str:
190        return f"#{self.path}"
css_class: str
192    @property
193    def css_class(self) -> str:
194        stripped_path = re.sub(r"[\*\/\.]", "-", self.path)
195        return f"texture-{self.namespace}-{stripped_path}"
def with_namespace(self, namespace: str) -> Self:
197    def with_namespace(self, namespace: str) -> Self:
198        """Returns a copy of this ResourceLocation with the given namespace."""
199        return self.__class__(namespace, self.path)

Returns a copy of this ResourceLocation with the given namespace.

def with_path(self, path: str | pathlib.Path) -> Self:
201    def with_path(self, path: str | Path) -> Self:
202        """Returns a copy of this ResourceLocation with the given path."""
203        if isinstance(path, Path):
204            path = path.as_posix()
205        return self.__class__(self.namespace, path)

Returns a copy of this ResourceLocation with the given path.

def match(self, pattern: Self) -> bool:
207    def match(self, pattern: Self) -> bool:
208        return fnmatch(str(self), str(pattern))
def template_path(self, type: str, folder: str = '') -> str:
210    def template_path(self, type: str, folder: str = "") -> str:
211        return self.file_path_stub(type, folder, assume_json=False).as_posix()
def file_path_stub( self, type: Union[Literal['assets', 'data', ''], str], folder: str | pathlib.Path = '', assume_json: bool = True) -> pathlib.Path:
213    def file_path_stub(
214        self,
215        type: ResourceType | str,
216        folder: str | Path = "",
217        assume_json: bool = True,
218    ) -> Path:
219        """Returns the path to find this resource within a resource directory.
220
221        If `assume_json` is True and no file extension is provided, `.json` is assumed.
222
223        For example:
224        ```py
225        ResLoc("hexcasting", "thehexbook/book").file_path_stub("data", "patchouli_books")
226        # data/hexcasting/patchouli_books/thehexbook/book.json
227        ```
228        """
229        # if folder is an empty string, Path won't add an extra slash
230        path = Path(type) / self.namespace / folder / self.path
231        if assume_json and not path.suffix:
232            return path.with_suffix(".json")
233        return path

Returns the path to find this resource within a resource directory.

If assume_json is True and no file extension is provided, .json is assumed.

For example:

ResLoc("hexcasting", "thehexbook/book").file_path_stub("data", "patchouli_books")
# data/hexcasting/patchouli_books/thehexbook/book.json
def removeprefix(self, prefix: str) -> Self:
235    def removeprefix(self, prefix: str) -> Self:
236        return self.with_path(self.path.removeprefix(prefix))
ResourceType = typing.Literal['assets', 'data', '']
@dataclass(frozen=True)
class ValueIfVersion(hexdoc.core.Versioned, typing.Generic[~_If, ~_Else]):
123@dataclass(frozen=True)
124class ValueIfVersion(Versioned, Generic[_If, _Else]):
125    value_if: _If
126    value_else: _Else
127
128    def __call__(self) -> _If | _Else:
129        if self.is_current:
130            return self.value_if
131        return self.value_else
value_if: ~_If
value_else: ~_Else
class VersionSource(typing.Protocol):
22class VersionSource(Protocol):
23    @classmethod
24    def get(cls) -> str | None:
25        """Returns the current version."""
26        ...
27
28    @classmethod
29    def matches(cls, specifier: str | SpecifierSet) -> bool:
30        """Returns True if the current version matches the version_spec."""
31        ...

Base class for protocol classes.

Protocol classes are defined as::

class Proto(Protocol):
    def meth(self) -> int:
        ...

Such classes are primarily used with static type checkers that recognize structural subtyping (static duck-typing).

For example::

class C:
    def meth(self) -> int:
        return 0

def func(x: Proto) -> int:
    return x.meth()

func(C())  # Passes static type check

See PEP 544 for details. Protocol classes decorated with @typing.runtime_checkable act as simple-minded runtime protocols that check only the presence of given attributes, ignoring their type signatures. Protocol classes can be generic, they are defined as::

class GenProto(Protocol[T]):
    def meth(self) -> T:
        ...
VersionSource(*args, **kwargs)
1953def _no_init_or_replace_init(self, *args, **kwargs):
1954    cls = type(self)
1955
1956    if cls._is_protocol:
1957        raise TypeError('Protocols cannot be instantiated')
1958
1959    # Already using a custom `__init__`. No need to calculate correct
1960    # `__init__` to call. This can lead to RecursionError. See bpo-45121.
1961    if cls.__init__ is not _no_init_or_replace_init:
1962        return
1963
1964    # Initially, `__init__` of a protocol subclass is set to `_no_init_or_replace_init`.
1965    # The first instantiation of the subclass will call `_no_init_or_replace_init` which
1966    # searches for a proper new `__init__` in the MRO. The new `__init__`
1967    # replaces the subclass' old `__init__` (ie `_no_init_or_replace_init`). Subsequent
1968    # instantiation of the protocol subclass will thus use the new
1969    # `__init__` and no longer call `_no_init_or_replace_init`.
1970    for base in cls.__mro__:
1971        init = base.__dict__.get('__init__', _no_init_or_replace_init)
1972        if init is not _no_init_or_replace_init:
1973            cls.__init__ = init
1974            break
1975    else:
1976        # should not happen
1977        cls.__init__ = object.__init__
1978
1979    cls.__init__(self, *args, **kwargs)
@classmethod
def get(cls) -> str | None:
23    @classmethod
24    def get(cls) -> str | None:
25        """Returns the current version."""
26        ...

Returns the current version.

@classmethod
def matches(cls, specifier: str | packaging.specifiers.SpecifierSet) -> bool:
28    @classmethod
29    def matches(cls, specifier: str | SpecifierSet) -> bool:
30        """Returns True if the current version matches the version_spec."""
31        ...

Returns True if the current version matches the version_spec.

@dataclass(frozen=True)
class Versioned:
52@dataclass(frozen=True)
53class Versioned:
54    """Base class for types which can behave differently based on a version source,
55    which defaults to MinecraftVersion."""
56
57    version_spec: str
58    version_source: VersionSource = field(default=MinecraftVersion, kw_only=True)
59
60    @property
61    def is_current(self):
62        return self.version_source.matches(self.version_spec)

Base class for types which can behave differently based on a version source, which defaults to MinecraftVersion.

Versioned( version_spec: str, *, version_source: VersionSource = <class 'MinecraftVersion'>)
version_spec: str
version_source: VersionSource = <class 'MinecraftVersion'>
is_current
60    @property
61    def is_current(self):
62        return self.version_source.matches(self.version_spec)