mapack 0.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
mapack-0.0.0/PKG-INFO ADDED
@@ -0,0 +1,31 @@
1
+ Metadata-Version: 2.4
2
+ Name: mapack
3
+ Version: 0.0.0
4
+ Summary: Config-driven Minecraft map packer CLI
5
+ Keywords: minecraft,cli,packaging,jsonc
6
+ Classifier: Development Status :: 3 - Alpha
7
+ Classifier: Intended Audience :: Developers
8
+ Classifier: Programming Language :: Python :: 3
9
+ Classifier: Programming Language :: Python :: 3.10
10
+ Classifier: Programming Language :: Python :: 3.11
11
+ Classifier: Programming Language :: Python :: 3.12
12
+ Classifier: Programming Language :: Python :: 3.13
13
+ Classifier: Operating System :: OS Independent
14
+ Classifier: Environment :: Console
15
+ Requires-Python: >=3.10
16
+ Description-Content-Type: text/markdown
17
+ Requires-Dist: click<9,>=8.1
18
+
19
+ # Mapack (Minecraft Map Packer)
20
+
21
+ Config-driven CLI tool to build and export Minecraft map artifacts from a JSON/JSONC config file.
22
+
23
+ ## Usage
24
+
25
+ ```bash
26
+ mapack <config.jsonc>
27
+ ```
28
+
29
+ ## Documentation
30
+
31
+ To Be Written.
mapack-0.0.0/README.md ADDED
@@ -0,0 +1,13 @@
1
+ # Mapack (Minecraft Map Packer)
2
+
3
+ Config-driven CLI tool to build and export Minecraft map artifacts from a JSON/JSONC config file.
4
+
5
+ ## Usage
6
+
7
+ ```bash
8
+ mapack <config.jsonc>
9
+ ```
10
+
11
+ ## Documentation
12
+
13
+ To Be Written.
@@ -0,0 +1,3 @@
1
+ from .cli import main
2
+
3
+ __all__ = ["main"]
@@ -0,0 +1,43 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from pathlib import Path
5
+
6
+ import click
7
+
8
+ from config.parser import load_json_or_jsonc
9
+ from core.interpreter import ConfigInterpreter
10
+
11
+ logging.basicConfig(level=logging.INFO, format="[%(levelname)s] %(message)s")
12
+ logger = logging.getLogger("mapack")
13
+
14
+
15
+ @click.command(context_settings={"help_option_names": ["-h", "--help"]})
16
+ @click.argument("config_file", type=click.Path(exists=True, dir_okay=False, path_type=Path))
17
+ @click.option(
18
+ "--target",
19
+ "targets",
20
+ multiple=True,
21
+ help="Target(s) to execute. If omitted, all targets are executed.",
22
+ )
23
+ @click.option("--dry-run", is_flag=True, default=False, help="Build plan without writing output files.")
24
+ def main(config_file: Path, targets: tuple[str, ...], dry_run: bool) -> None:
25
+ """Pack maps from a JSON/JSONC config file."""
26
+ config_path = config_file.resolve()
27
+ config = load_json_or_jsonc(config_path)
28
+
29
+ interpreter = ConfigInterpreter(config=config, config_path=config_path)
30
+ outputs_by_target = interpreter.run(list(targets) if targets else None, dry_run=dry_run)
31
+
32
+ click.echo("Build finished.")
33
+ for target_name, outputs in outputs_by_target.items():
34
+ click.echo(f"- target={target_name}")
35
+ if outputs:
36
+ for output in outputs:
37
+ click.echo(f" - {output}")
38
+ else:
39
+ click.echo(" - (no exported artifacts)")
40
+
41
+
42
+ if __name__ == "__main__":
43
+ main()
@@ -0,0 +1,13 @@
1
+ from .expressions import ExpressionContext, evaluate_expression
2
+ from .parser import load_json_or_jsonc
3
+ from .templating import get_dotted, render_template, resolve_templates, set_dotted
4
+
5
+ __all__ = [
6
+ "ExpressionContext",
7
+ "evaluate_expression",
8
+ "load_json_or_jsonc",
9
+ "get_dotted",
10
+ "render_template",
11
+ "resolve_templates",
12
+ "set_dotted",
13
+ ]
@@ -0,0 +1,72 @@
1
+ from __future__ import annotations
2
+
3
+ import ast
4
+ from dataclasses import dataclass
5
+ from pathlib import Path
6
+ from typing import Any, Callable
7
+
8
+
9
+ @dataclass(slots=True)
10
+ class ExpressionContext:
11
+ cwd: Path
12
+
13
+
14
+ _ALLOWED_BOOL_NAMES = {
15
+ "true": True,
16
+ "false": False,
17
+ "null": None,
18
+ "none": None,
19
+ }
20
+
21
+
22
+ class SafeExpressionEvaluator(ast.NodeVisitor):
23
+ def __init__(self, *, functions: dict[str, Callable[..., Any]]) -> None:
24
+ self.functions = functions
25
+
26
+ def visit_Expression(self, node: ast.Expression) -> Any: # noqa: N802
27
+ return self.visit(node.body)
28
+
29
+ def visit_Constant(self, node: ast.Constant) -> Any: # noqa: N802
30
+ return node.value
31
+
32
+ def visit_Name(self, node: ast.Name) -> Any: # noqa: N802
33
+ key = node.id.casefold()
34
+ if key in _ALLOWED_BOOL_NAMES:
35
+ return _ALLOWED_BOOL_NAMES[key]
36
+ raise ValueError(f"Unknown name in expression: {node.id}")
37
+
38
+ def visit_Call(self, node: ast.Call) -> Any: # noqa: N802
39
+ if not isinstance(node.func, ast.Name):
40
+ raise ValueError("Only direct function calls are supported")
41
+ if node.func.id not in self.functions:
42
+ raise ValueError(f"Function not allowed: {node.func.id}")
43
+
44
+ fn = self.functions[node.func.id]
45
+ args = [self.visit(arg) for arg in node.args]
46
+ kwargs = {kw.arg: self.visit(kw.value) for kw in node.keywords if kw.arg is not None}
47
+ return fn(*args, **kwargs)
48
+
49
+ def generic_visit(self, node: ast.AST) -> Any:
50
+ raise ValueError(f"Unsupported expression node: {type(node).__name__}")
51
+
52
+
53
+ def evaluate_expression(text: str, *, context: ExpressionContext) -> Any:
54
+ def count_files(path: str, recursive: bool = True, include_dirs: bool = False) -> int:
55
+ root = (context.cwd / path).resolve()
56
+ if not root.exists():
57
+ return 0
58
+
59
+ total = 0
60
+ if recursive:
61
+ for child in root.rglob("*"):
62
+ if child.is_file() or (include_dirs and child.is_dir()):
63
+ total += 1
64
+ else:
65
+ for child in root.iterdir():
66
+ if child.is_file() or (include_dirs and child.is_dir()):
67
+ total += 1
68
+ return total
69
+
70
+ tree = ast.parse(text, mode="eval")
71
+ evaluator = SafeExpressionEvaluator(functions={"count_files": count_files})
72
+ return evaluator.visit(tree)
@@ -0,0 +1,120 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ from pathlib import Path
5
+
6
+
7
+ def _strip_jsonc(text: str) -> str:
8
+ """Remove // and /* */ comments while preserving string literals."""
9
+ out: list[str] = []
10
+ i = 0
11
+ in_string = False
12
+ string_quote = ""
13
+ escaped = False
14
+ in_line_comment = False
15
+ in_block_comment = False
16
+
17
+ while i < len(text):
18
+ ch = text[i]
19
+ nxt = text[i + 1] if i + 1 < len(text) else ""
20
+
21
+ if in_line_comment:
22
+ if ch == "\n":
23
+ in_line_comment = False
24
+ out.append(ch)
25
+ i += 1
26
+ continue
27
+
28
+ if in_block_comment:
29
+ if ch == "*" and nxt == "/":
30
+ in_block_comment = False
31
+ i += 2
32
+ else:
33
+ i += 1
34
+ continue
35
+
36
+ if in_string:
37
+ out.append(ch)
38
+ if escaped:
39
+ escaped = False
40
+ elif ch == "\\":
41
+ escaped = True
42
+ elif ch == string_quote:
43
+ in_string = False
44
+ i += 1
45
+ continue
46
+
47
+ if ch in ('"', "'"):
48
+ in_string = True
49
+ string_quote = ch
50
+ out.append(ch)
51
+ i += 1
52
+ continue
53
+
54
+ if ch == "/" and nxt == "/":
55
+ in_line_comment = True
56
+ i += 2
57
+ continue
58
+
59
+ if ch == "/" and nxt == "*":
60
+ in_block_comment = True
61
+ i += 2
62
+ continue
63
+
64
+ out.append(ch)
65
+ i += 1
66
+
67
+ return "".join(out)
68
+
69
+
70
+ def _strip_trailing_commas(text: str) -> str:
71
+ """Remove trailing commas before ] or } while preserving strings."""
72
+ out: list[str] = []
73
+ i = 0
74
+ in_string = False
75
+ string_quote = ""
76
+ escaped = False
77
+
78
+ while i < len(text):
79
+ ch = text[i]
80
+
81
+ if in_string:
82
+ out.append(ch)
83
+ if escaped:
84
+ escaped = False
85
+ elif ch == "\\":
86
+ escaped = True
87
+ elif ch == string_quote:
88
+ in_string = False
89
+ i += 1
90
+ continue
91
+
92
+ if ch in ('"', "'"):
93
+ in_string = True
94
+ string_quote = ch
95
+ out.append(ch)
96
+ i += 1
97
+ continue
98
+
99
+ if ch == ",":
100
+ j = i + 1
101
+ while j < len(text) and text[j] in " \t\r\n":
102
+ j += 1
103
+ if j < len(text) and text[j] in "]}":
104
+ i += 1
105
+ continue
106
+
107
+ out.append(ch)
108
+ i += 1
109
+
110
+ return "".join(out)
111
+
112
+
113
+ def load_json_or_jsonc(config_path: Path) -> dict:
114
+ raw = config_path.read_text(encoding="utf-8")
115
+ cleaned = _strip_jsonc(raw)
116
+ cleaned = _strip_trailing_commas(cleaned)
117
+ data = json.loads(cleaned)
118
+ if not isinstance(data, dict):
119
+ raise ValueError("Top-level config must be an object")
120
+ return data
@@ -0,0 +1,47 @@
1
+ from __future__ import annotations
2
+
3
+ import copy
4
+ import re
5
+ from typing import Any
6
+
7
+ _TOKEN = re.compile(r"\{([a-zA-Z0-9_.-]+)\}")
8
+
9
+
10
+ def get_dotted(mapping: dict[str, Any], dotted: str) -> Any:
11
+ current: Any = mapping
12
+ for part in dotted.split("."):
13
+ if not isinstance(current, dict) or part not in current:
14
+ raise KeyError(dotted)
15
+ current = current[part]
16
+ return current
17
+
18
+
19
+ def set_dotted(mapping: dict[str, Any], dotted: str, value: Any) -> None:
20
+ parts = dotted.split(".")
21
+ current: dict[str, Any] = mapping
22
+ for part in parts[:-1]:
23
+ child = current.get(part)
24
+ if not isinstance(child, dict):
25
+ child = {}
26
+ current[part] = child
27
+ current = child
28
+ current[parts[-1]] = value
29
+
30
+
31
+ def render_template(value: str, scope: dict[str, Any]) -> str:
32
+ def replace(match: re.Match[str]) -> str:
33
+ key = match.group(1)
34
+ resolved = get_dotted(scope, key)
35
+ return str(resolved)
36
+
37
+ return _TOKEN.sub(replace, value)
38
+
39
+
40
+ def resolve_templates(obj: Any, scope: dict[str, Any]) -> Any:
41
+ if isinstance(obj, str):
42
+ return render_template(obj, scope)
43
+ if isinstance(obj, list):
44
+ return [resolve_templates(item, scope) for item in obj]
45
+ if isinstance(obj, dict):
46
+ return {k: resolve_templates(v, scope) for k, v in obj.items()}
47
+ return copy.deepcopy(obj)
@@ -0,0 +1,4 @@
1
+ from .interpreter import ConfigInterpreter
2
+ from .runtime import ArtifactResult, InterpreterState
3
+
4
+ __all__ = ["ConfigInterpreter", "ArtifactResult", "InterpreterState"]
@@ -0,0 +1,347 @@
1
+ from __future__ import annotations
2
+
3
+ import copy
4
+ import logging
5
+ import shutil
6
+ from dataclasses import dataclass
7
+ from pathlib import Path
8
+ from tempfile import TemporaryDirectory
9
+ from typing import Any
10
+
11
+ from config.expressions import ExpressionContext, evaluate_expression
12
+ from config.templating import render_template, resolve_templates, set_dotted
13
+ from transforms import load_builtin_transforms
14
+ from transforms.registry import run_transform
15
+ from .runtime import ArtifactResult, InterpreterState
16
+
17
+ logger = logging.getLogger("mapack")
18
+
19
+
20
+ @dataclass(slots=True)
21
+ class TransformContext:
22
+ interpreter: "ConfigInterpreter"
23
+ state: InterpreterState
24
+ artifact_name: str
25
+ workdir: Path
26
+
27
+ def resolve_value(self, value: Any) -> Any:
28
+ return self.interpreter._resolve_value(value, self.state)
29
+
30
+ def resolve_expr_or_value(self, value: Any) -> Any:
31
+ return self.interpreter._resolve_expr_or_value(value, self.state, self.workdir)
32
+
33
+ def resolve_source(self, source_spec: Any, *, allow_artifact_output: bool) -> Path:
34
+ return self.interpreter._resolve_source(source_spec, self.state, allow_artifact_output=allow_artifact_output)
35
+
36
+ def run_nested_transform(self, spec: dict[str, Any]) -> None:
37
+ self.interpreter._run_transform(spec, self.state, self.artifact_name, self.workdir)
38
+
39
+
40
+ class ConfigInterpreter:
41
+ def __init__(self, config: dict[str, Any], config_path: Path) -> None:
42
+ self.config = config
43
+ self.config_path = config_path.resolve()
44
+ load_builtin_transforms()
45
+
46
+ def run(self, targets: list[str] | None = None, *, dry_run: bool = False) -> dict[str, list[Path]]:
47
+ available_targets = self._get_targets()
48
+ selected = targets or list(available_targets.keys())
49
+
50
+ outputs_by_target: dict[str, list[Path]] = {}
51
+ for target_name in selected:
52
+ if target_name not in available_targets:
53
+ raise KeyError(f"Unknown target: {target_name}")
54
+
55
+ merged_target = self._materialize_target(target_name)
56
+ state = self._build_state_for_target(target_name, merged_target)
57
+ outputs_by_target[target_name] = self._execute_target(state, merged_target, dry_run=dry_run)
58
+
59
+ return outputs_by_target
60
+
61
+ def _execute_target(self, state: InterpreterState, target_config: dict[str, Any], *, dry_run: bool) -> list[Path]:
62
+ artifacts = target_config.get("artifacts")
63
+ if not isinstance(artifacts, dict):
64
+ raise ValueError("target.artifacts must be an object")
65
+
66
+ requested: list[str] = []
67
+ for name, spec in artifacts.items():
68
+ if not isinstance(spec, dict):
69
+ continue
70
+ export = spec.get("export") or {}
71
+ if isinstance(export, dict) and export.get("enabled", False):
72
+ requested.append(name)
73
+
74
+ produced: list[Path] = []
75
+ with TemporaryDirectory(prefix=f"mapack-{state.target_name}-") as tmpdir:
76
+ tmp_root = Path(tmpdir)
77
+ for artifact_name in requested:
78
+ result = self._build_artifact(
79
+ artifact_name,
80
+ state=state,
81
+ target_artifacts=artifacts,
82
+ temp_root=tmp_root,
83
+ dry_run=dry_run,
84
+ )
85
+ if result.output_path:
86
+ produced.append(result.output_path)
87
+
88
+ return produced
89
+
90
+ def _build_artifact(
91
+ self,
92
+ artifact_name: str,
93
+ *,
94
+ state: InterpreterState,
95
+ target_artifacts: dict[str, Any],
96
+ temp_root: Path,
97
+ dry_run: bool,
98
+ ) -> ArtifactResult:
99
+ existing = state.artifact_results.get(artifact_name)
100
+ if existing is not None:
101
+ return existing
102
+
103
+ if artifact_name not in target_artifacts:
104
+ raise KeyError(f"Unknown artifact: {artifact_name}")
105
+
106
+ artifact_spec = target_artifacts[artifact_name]
107
+ if not isinstance(artifact_spec, dict):
108
+ raise ValueError(f"Artifact '{artifact_name}' definition must be an object")
109
+
110
+ depends_on = artifact_spec.get("depends_on", [])
111
+ if depends_on is None:
112
+ depends_on = []
113
+ if not isinstance(depends_on, list):
114
+ raise ValueError(f"Artifact '{artifact_name}' depends_on must be a list")
115
+
116
+ for dep in depends_on:
117
+ dep_name = str(dep)
118
+ self._build_artifact(dep_name, state=state, target_artifacts=target_artifacts, temp_root=temp_root, dry_run=dry_run)
119
+
120
+ workdir = temp_root / artifact_name
121
+ workdir.mkdir(parents=True, exist_ok=True)
122
+ result = ArtifactResult(name=artifact_name, workdir=workdir)
123
+ state.artifact_results[artifact_name] = result
124
+
125
+ if not dry_run:
126
+ src_spec = artifact_spec.get("src")
127
+ if src_spec is not None:
128
+ src_path = self._resolve_source(src_spec, state, allow_artifact_output=False)
129
+ self._copy_source_to_artifact_root(src_path, workdir)
130
+
131
+ for transform in artifact_spec.get("transforms", []):
132
+ self._run_transform(transform, state, artifact_name, workdir)
133
+ else:
134
+ logger.info("artifact=%s dry-run: skipped source copy and transforms", artifact_name)
135
+
136
+ export = artifact_spec.get("export")
137
+ if isinstance(export, dict) and export.get("enabled", False):
138
+ resolved_export = self._resolve_value(export, state)
139
+ if not isinstance(resolved_export, dict):
140
+ raise ValueError(f"Artifact '{artifact_name}' export must resolve to object")
141
+
142
+ dest_raw = resolved_export.get("dest")
143
+ if not isinstance(dest_raw, str):
144
+ raise ValueError(f"Artifact '{artifact_name}' export.dest must be a string")
145
+ dest_path = self._resolve_path(dest_raw)
146
+
147
+ zipped = bool(resolved_export.get("zipped", True))
148
+ if not dry_run:
149
+ if zipped:
150
+ self._zip_directory(workdir, dest_path)
151
+ else:
152
+ if dest_path.exists():
153
+ shutil.rmtree(dest_path, ignore_errors=True)
154
+ shutil.copytree(workdir, dest_path, dirs_exist_ok=True)
155
+ result.output_path = dest_path
156
+ logger.info("artifact=%s exported -> %s", artifact_name, dest_path)
157
+ else:
158
+ logger.info("artifact=%s built (no export)", artifact_name)
159
+
160
+ return result
161
+
162
+ def _run_transform(self, spec: dict[str, Any], state: InterpreterState, artifact_name: str, workdir: Path) -> None:
163
+ if not isinstance(spec, dict):
164
+ raise ValueError(f"Transform in '{artifact_name}' must be an object")
165
+ transform_type = spec.get("type")
166
+ if not isinstance(transform_type, str):
167
+ raise ValueError(f"Transform in '{artifact_name}' missing string field 'type'")
168
+
169
+ resolved_spec = self._resolve_value(spec, state)
170
+ if not isinstance(resolved_spec, dict):
171
+ raise ValueError("Resolved transform spec must be an object")
172
+
173
+ logger.info("artifact=%s transform=%s id=%s", artifact_name, transform_type, resolved_spec.get("id"))
174
+ ctx = TransformContext(interpreter=self, state=state, artifact_name=artifact_name, workdir=workdir)
175
+ run_transform(transform_type, ctx, resolved_spec)
176
+
177
+ def _copy_source_to_artifact_root(self, src_path: Path, workdir: Path) -> None:
178
+ if not src_path.exists():
179
+ raise FileNotFoundError(f"Artifact source does not exist: {src_path}")
180
+
181
+ if src_path.is_file():
182
+ shutil.copy2(src_path, workdir / src_path.name)
183
+ return
184
+
185
+ for child in src_path.iterdir():
186
+ dest = workdir / child.name
187
+ if child.is_dir():
188
+ shutil.copytree(child, dest, dirs_exist_ok=True)
189
+ else:
190
+ shutil.copy2(child, dest)
191
+
192
+ def _resolve_source(self, source_spec: Any, state: InterpreterState, *, allow_artifact_output: bool) -> Path:
193
+ resolved = self._resolve_value(source_spec, state)
194
+
195
+ if isinstance(resolved, str):
196
+ return self._resolve_path(resolved)
197
+
198
+ if isinstance(resolved, dict):
199
+ artifact_ref = resolved.get("artifact")
200
+ if artifact_ref is not None:
201
+ ref_name = str(artifact_ref)
202
+ if ref_name not in state.artifact_results:
203
+ raise KeyError(f"Referenced artifact has not been built yet: {ref_name}")
204
+ ref = state.artifact_results[ref_name]
205
+ wants_output = bool(resolved.get("output", False))
206
+ if wants_output:
207
+ if not allow_artifact_output:
208
+ raise ValueError("This source location does not allow artifact output references")
209
+ if ref.output_path is None:
210
+ raise ValueError(f"Artifact '{ref_name}' has no output to reference")
211
+ return ref.output_path
212
+ return ref.workdir
213
+
214
+ if "path" in resolved and isinstance(resolved["path"], str):
215
+ return self._resolve_path(resolved["path"])
216
+
217
+ raise ValueError(f"Unsupported source spec: {source_spec}")
218
+
219
+ def _resolve_path(self, value: str) -> Path:
220
+ path = Path(value)
221
+ if path.is_absolute():
222
+ return path
223
+ return (self.config_path.parent / path).resolve()
224
+
225
+ def _resolve_expr_or_value(self, value: Any, state: InterpreterState, cwd: Path) -> Any:
226
+ resolved = self._resolve_value(value, state)
227
+ if not isinstance(resolved, str):
228
+ return resolved
229
+
230
+ try:
231
+ return evaluate_expression(resolved, context=ExpressionContext(cwd=cwd))
232
+ except Exception:
233
+ return resolved
234
+
235
+ def _resolve_value(self, value: Any, state: InterpreterState) -> Any:
236
+ return resolve_templates(value, state.scope)
237
+
238
+ def _build_state_for_target(self, target_name: str, target_config: dict[str, Any]) -> InterpreterState:
239
+ variables = target_config.get("variables")
240
+ if not isinstance(variables, dict):
241
+ raise ValueError(f"target '{target_name}' missing object field: variables")
242
+
243
+ scope: dict[str, Any] = copy.deepcopy(variables)
244
+
245
+ precomputed = target_config.get("precomputed_vars", {})
246
+ if not isinstance(precomputed, dict):
247
+ raise ValueError(f"target '{target_name}' precomputed_vars must be an object")
248
+
249
+ for dotted_key, raw_value in precomputed.items():
250
+ if not isinstance(dotted_key, str):
251
+ raise ValueError("precomputed_vars keys must be strings")
252
+ if not isinstance(raw_value, str):
253
+ set_dotted(scope, dotted_key, raw_value)
254
+ continue
255
+ rendered = render_template(raw_value, scope)
256
+ set_dotted(scope, dotted_key, rendered)
257
+
258
+ return InterpreterState(config_path=self.config_path, target_name=target_name, scope=scope)
259
+
260
+ def _materialize_target(self, target_name: str) -> dict[str, Any]:
261
+ globals_obj = self.config.get("globals", {})
262
+ targets = self._get_targets()
263
+ target_obj = targets[target_name]
264
+
265
+ use_global = target_obj.get("use_global", {})
266
+ merged: dict[str, Any]
267
+ if isinstance(use_global, dict) and use_global.get("use_all", False):
268
+ merged = copy.deepcopy(globals_obj)
269
+ else:
270
+ merged = {}
271
+
272
+ target_without_use_global = {k: v for k, v in target_obj.items() if k != "use_global"}
273
+ merged = self._deep_merge(merged, target_without_use_global)
274
+
275
+ artifacts = merged.get("artifacts")
276
+ if isinstance(artifacts, dict):
277
+ for artifact_name, artifact_spec in list(artifacts.items()):
278
+ if not isinstance(artifact_spec, dict):
279
+ continue
280
+ artifacts[artifact_name] = self._apply_mod_transforms(artifact_spec)
281
+
282
+ return merged
283
+
284
+ def _apply_mod_transforms(self, artifact_spec: dict[str, Any]) -> dict[str, Any]:
285
+ out = copy.deepcopy(artifact_spec)
286
+ mod_ops = out.pop("mod_transforms", None)
287
+ if not mod_ops:
288
+ return out
289
+
290
+ transforms = out.get("transforms", [])
291
+ if not isinstance(transforms, list):
292
+ raise ValueError("artifact.transforms must be a list")
293
+
294
+ for op in mod_ops:
295
+ if not isinstance(op, dict):
296
+ raise ValueError("mod_transforms entries must be objects")
297
+ action = op.get("op")
298
+ ref = op.get("ref")
299
+ transform = op.get("transform")
300
+ if not isinstance(ref, str):
301
+ raise ValueError("mod_transforms.ref must be a string")
302
+
303
+ idx = next((i for i, t in enumerate(transforms) if isinstance(t, dict) and t.get("id") == ref), None)
304
+ if idx is None:
305
+ raise ValueError(f"mod_transforms ref not found: {ref}")
306
+
307
+ if action == "replace":
308
+ if not isinstance(transform, dict):
309
+ raise ValueError("mod_transforms.replace requires object transform")
310
+ transforms[idx] = transform
311
+ elif action == "insert":
312
+ if not isinstance(transform, dict):
313
+ raise ValueError("mod_transforms.insert requires object transform")
314
+ transforms.insert(idx + 1, transform)
315
+ elif action in {"remove", "delete"}:
316
+ transforms.pop(idx)
317
+ else:
318
+ raise ValueError(f"Unsupported mod_transforms op: {action}")
319
+
320
+ out["transforms"] = transforms
321
+ return out
322
+
323
+ def _deep_merge(self, base: Any, override: Any) -> Any:
324
+ if isinstance(base, dict) and isinstance(override, dict):
325
+ merged = copy.deepcopy(base)
326
+ for key, value in override.items():
327
+ if key in merged:
328
+ merged[key] = self._deep_merge(merged[key], value)
329
+ else:
330
+ merged[key] = copy.deepcopy(value)
331
+ return merged
332
+ return copy.deepcopy(override)
333
+
334
+ def _get_targets(self) -> dict[str, Any]:
335
+ targets = self.config.get("targets")
336
+ if not isinstance(targets, dict):
337
+ raise ValueError("config.targets must be an object")
338
+ return targets
339
+
340
+ def _zip_directory(self, source_dir: Path, zip_path: Path) -> None:
341
+ zip_path.parent.mkdir(parents=True, exist_ok=True)
342
+ if zip_path.suffix.lower() == ".zip":
343
+ base_name = zip_path.with_suffix("")
344
+ else:
345
+ base_name = zip_path
346
+ zip_path = zip_path.with_suffix(".zip")
347
+ shutil.make_archive(str(base_name), "zip", root_dir=source_dir)
@@ -0,0 +1,24 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass, field
4
+ from pathlib import Path
5
+ from typing import Any
6
+
7
+
8
+ @dataclass(slots=True)
9
+ class ArtifactResult:
10
+ name: str
11
+ workdir: Path
12
+ output_path: Path | None = None
13
+
14
+
15
+ @dataclass(slots=True)
16
+ class InterpreterState:
17
+ config_path: Path
18
+ target_name: str
19
+ scope: dict[str, Any]
20
+ artifact_results: dict[str, ArtifactResult] = field(default_factory=dict)
21
+
22
+ @property
23
+ def config_dir(self) -> Path:
24
+ return self.config_path.parent
@@ -0,0 +1,31 @@
1
+ Metadata-Version: 2.4
2
+ Name: mapack
3
+ Version: 0.0.0
4
+ Summary: Config-driven Minecraft map packer CLI
5
+ Keywords: minecraft,cli,packaging,jsonc
6
+ Classifier: Development Status :: 3 - Alpha
7
+ Classifier: Intended Audience :: Developers
8
+ Classifier: Programming Language :: Python :: 3
9
+ Classifier: Programming Language :: Python :: 3.10
10
+ Classifier: Programming Language :: Python :: 3.11
11
+ Classifier: Programming Language :: Python :: 3.12
12
+ Classifier: Programming Language :: Python :: 3.13
13
+ Classifier: Operating System :: OS Independent
14
+ Classifier: Environment :: Console
15
+ Requires-Python: >=3.10
16
+ Description-Content-Type: text/markdown
17
+ Requires-Dist: click<9,>=8.1
18
+
19
+ # Mapack (Minecraft Map Packer)
20
+
21
+ Config-driven CLI tool to build and export Minecraft map artifacts from a JSON/JSONC config file.
22
+
23
+ ## Usage
24
+
25
+ ```bash
26
+ mapack <config.jsonc>
27
+ ```
28
+
29
+ ## Documentation
30
+
31
+ To Be Written.
@@ -0,0 +1,25 @@
1
+ README.md
2
+ pyproject.toml
3
+ app/__init__.py
4
+ app/cli.py
5
+ config/__init__.py
6
+ config/expressions.py
7
+ config/parser.py
8
+ config/templating.py
9
+ core/__init__.py
10
+ core/interpreter.py
11
+ core/runtime.py
12
+ mapack.egg-info/PKG-INFO
13
+ mapack.egg-info/SOURCES.txt
14
+ mapack.egg-info/dependency_links.txt
15
+ mapack.egg-info/entry_points.txt
16
+ mapack.egg-info/requires.txt
17
+ mapack.egg-info/top_level.txt
18
+ transforms/__init__.py
19
+ transforms/base.py
20
+ transforms/conditional.py
21
+ transforms/copy.py
22
+ transforms/git_ops.py
23
+ transforms/log.py
24
+ transforms/mc_feature.py
25
+ transforms/registry.py
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ mapack = app.cli:main
@@ -0,0 +1 @@
1
+ click<9,>=8.1
@@ -0,0 +1,4 @@
1
+ app
2
+ config
3
+ core
4
+ transforms
@@ -0,0 +1,31 @@
1
+ [build-system]
2
+ requires = ["setuptools>=69", "wheel"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "mapack"
7
+ description = "Config-driven Minecraft map packer CLI"
8
+ readme = "README.md"
9
+ requires-python = ">=3.10"
10
+ keywords = ["minecraft", "cli", "packaging", "jsonc"]
11
+ classifiers = [
12
+ "Development Status :: 3 - Alpha",
13
+ "Intended Audience :: Developers",
14
+ "Programming Language :: Python :: 3",
15
+ "Programming Language :: Python :: 3.10",
16
+ "Programming Language :: Python :: 3.11",
17
+ "Programming Language :: Python :: 3.12",
18
+ "Programming Language :: Python :: 3.13",
19
+ "Operating System :: OS Independent",
20
+ "Environment :: Console",
21
+ ]
22
+ dependencies = [
23
+ "click>=8.1,<9"
24
+ ]
25
+ dynamic = ["version"]
26
+
27
+ [project.scripts]
28
+ mapack = "app.cli:main"
29
+
30
+ [tool.setuptools.packages.find]
31
+ include = ["app*", "config*", "core*", "transforms*"]
mapack-0.0.0/setup.cfg ADDED
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,13 @@
1
+ from .registry import registry
2
+
3
+
4
+ def load_builtin_transforms() -> None:
5
+ # import side-effects for registration
6
+ from . import conditional # noqa: F401
7
+ from . import copy # noqa: F401
8
+ from . import git_ops # noqa: F401
9
+ from . import log # noqa: F401
10
+ from . import mc_feature # noqa: F401
11
+
12
+
13
+ __all__ = ["registry", "load_builtin_transforms"]
@@ -0,0 +1,11 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Any, Protocol
4
+
5
+
6
+ class TransformContextProtocol(Protocol):
7
+ def run_nested_transform(self, spec: dict[str, Any]) -> None: ...
8
+
9
+
10
+ class TransformHandler(Protocol):
11
+ def __call__(self, ctx: TransformContextProtocol, spec: dict[str, Any]) -> None: ...
@@ -0,0 +1,45 @@
1
+ from __future__ import annotations
2
+
3
+ from .registry import register_transform
4
+
5
+
6
+ def _compare(op: str, a, b) -> bool:
7
+ match op:
8
+ case "==":
9
+ return a == b
10
+ case "!=":
11
+ return a != b
12
+ case ">":
13
+ return a > b
14
+ case ">=":
15
+ return a >= b
16
+ case "<":
17
+ return a < b
18
+ case "<=":
19
+ return a <= b
20
+ case _:
21
+ raise ValueError(f"Unsupported conditional op: {op}")
22
+
23
+
24
+ def _run_transform_or_list(ctx, block):
25
+ if block is None:
26
+ return
27
+ if isinstance(block, list):
28
+ for item in block:
29
+ ctx.run_nested_transform(item)
30
+ return
31
+ if isinstance(block, dict):
32
+ ctx.run_nested_transform(block)
33
+ return
34
+ raise ValueError("conditional transform expects dict or list for then/else")
35
+
36
+
37
+ @register_transform("conditional")
38
+ def transform_conditional(ctx, spec: dict) -> None:
39
+ op = str(spec.get("op", "=="))
40
+ a = ctx.resolve_expr_or_value(spec.get("a"))
41
+ b = ctx.resolve_expr_or_value(spec.get("b"))
42
+ if _compare(op, a, b):
43
+ _run_transform_or_list(ctx, spec.get("then"))
44
+ else:
45
+ _run_transform_or_list(ctx, spec.get("else"))
@@ -0,0 +1,44 @@
1
+ from __future__ import annotations
2
+
3
+ import shutil
4
+ from pathlib import Path
5
+
6
+ from .registry import register_transform
7
+
8
+
9
+ def _copy_file(src: Path, dst: Path) -> None:
10
+ dst.parent.mkdir(parents=True, exist_ok=True)
11
+ shutil.copy2(src, dst)
12
+
13
+
14
+ def _copy_tree_contents(src: Path, dst: Path) -> None:
15
+ dst.mkdir(parents=True, exist_ok=True)
16
+ for child in src.iterdir():
17
+ target = dst / child.name
18
+ if child.is_dir():
19
+ shutil.copytree(child, target, dirs_exist_ok=True)
20
+ else:
21
+ _copy_file(child, target)
22
+
23
+
24
+ @register_transform("copy")
25
+ def transform_copy(ctx, spec: dict) -> None:
26
+ src = ctx.resolve_source(spec.get("src"), allow_artifact_output=True)
27
+ dest_rel = str(ctx.resolve_value(spec.get("dest", ".")))
28
+ dest = (ctx.workdir / dest_rel).resolve()
29
+
30
+ if not src.exists():
31
+ raise FileNotFoundError(f"copy transform source does not exist: {src}")
32
+
33
+ if src.is_file():
34
+ if dest.exists() and dest.is_dir():
35
+ _copy_file(src, dest / src.name)
36
+ else:
37
+ _copy_file(src, dest)
38
+ return
39
+
40
+ # directory source
41
+ if dest.exists() and dest.is_file():
42
+ raise ValueError(f"Cannot copy directory into file: {dest}")
43
+
44
+ _copy_tree_contents(src, dest)
@@ -0,0 +1,46 @@
1
+ from __future__ import annotations
2
+
3
+ import subprocess
4
+
5
+ from .registry import register_transform
6
+
7
+
8
+ def _run_git(args: list[str], cwd) -> None:
9
+ subprocess.run(["git", *args], cwd=str(cwd), check=True)
10
+
11
+
12
+ @register_transform("git:clone")
13
+ def transform_git_clone(ctx, spec: dict) -> None:
14
+ repo_url = str(ctx.resolve_value(spec.get("repo_url")))
15
+ branch = spec.get("branch")
16
+ dest_rel = str(ctx.resolve_value(spec.get("dest", ".")))
17
+ dest = (ctx.workdir / dest_rel).resolve()
18
+ dest.parent.mkdir(parents=True, exist_ok=True)
19
+
20
+ args = ["clone"]
21
+ if branch:
22
+ args.extend(["--branch", str(ctx.resolve_value(branch))])
23
+ if dest_rel in {"", "."}:
24
+ args.extend([repo_url, "."])
25
+ else:
26
+ args.extend([repo_url, str(dest)])
27
+ _run_git(args, cwd=ctx.workdir)
28
+
29
+
30
+ @register_transform("git:pull")
31
+ def transform_git_pull(ctx, spec: dict) -> None:
32
+ repo_dir_rel = str(ctx.resolve_value(spec.get("repo_dir", ".")))
33
+ branch = spec.get("branch")
34
+ repo_dir = (ctx.workdir / repo_dir_rel).resolve()
35
+ catch = spec.get("catch")
36
+
37
+ args = ["pull"]
38
+ if branch:
39
+ args.extend(["origin", str(ctx.resolve_value(branch))])
40
+
41
+ try:
42
+ _run_git(args, cwd=repo_dir)
43
+ except Exception:
44
+ if catch is None:
45
+ raise
46
+ ctx.run_nested_transform(catch)
@@ -0,0 +1,13 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+
5
+ from .registry import register_transform
6
+
7
+ logger = logging.getLogger("mapack")
8
+
9
+
10
+ @register_transform("log")
11
+ def transform_log(ctx, spec: dict) -> None:
12
+ message = ctx.resolve_value(spec.get("message", ""))
13
+ logger.info("[transform:log] %s", message)
@@ -0,0 +1,46 @@
1
+ from __future__ import annotations
2
+
3
+ import shutil
4
+ from pathlib import Path
5
+
6
+ from .registry import register_transform
7
+
8
+
9
+ _DIMENSION_PATHS = {
10
+ "minecraft:the_nether": [
11
+ ["DIM-1"],
12
+ ["dimensions", "minecraft", "the_nether"],
13
+ ],
14
+ "minecraft:the_end": [
15
+ ["DIM1"],
16
+ ["dimensions", "minecraft", "the_end"],
17
+ ],
18
+ }
19
+
20
+
21
+ def _remove_dimension_folders(workdir: Path, keep: set[str]) -> None:
22
+ for dim, folders in _DIMENSION_PATHS.items():
23
+ if dim in keep:
24
+ continue
25
+ for parts in folders:
26
+ path = workdir.joinpath(*parts)
27
+ if path.is_dir():
28
+ shutil.rmtree(path, ignore_errors=True)
29
+
30
+
31
+ @register_transform("mc:feature")
32
+ def transform_mc_feature(ctx, spec: dict) -> None:
33
+ feature = str(ctx.resolve_value(spec.get("feature", "")))
34
+ args = spec.get("args") or {}
35
+ if not isinstance(args, dict):
36
+ raise ValueError("mc:feature args must be an object")
37
+
38
+ if feature == "delete_dimensions":
39
+ keep_raw = args.get("keep", ["minecraft:overworld"])
40
+ if not isinstance(keep_raw, list):
41
+ raise ValueError("mc:feature delete_dimensions args.keep must be a list")
42
+ keep = {str(ctx.resolve_value(v)) for v in keep_raw}
43
+ _remove_dimension_folders(ctx.workdir, keep)
44
+ return
45
+
46
+ raise ValueError(f"Unsupported mc:feature value: {feature}")
@@ -0,0 +1,40 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Any
4
+
5
+ from .base import TransformHandler
6
+
7
+
8
+ class TransformRegistry:
9
+ def __init__(self) -> None:
10
+ self._handlers: dict[str, TransformHandler] = {}
11
+
12
+ def register(self, name: str, handler: TransformHandler) -> None:
13
+ key = name.strip()
14
+ if not key:
15
+ raise ValueError("Transform name cannot be empty")
16
+ self._handlers[key] = handler
17
+
18
+ def get(self, name: str) -> TransformHandler:
19
+ if name not in self._handlers:
20
+ raise KeyError(f"Unknown transform type: {name}")
21
+ return self._handlers[name]
22
+
23
+ def names(self) -> list[str]:
24
+ return sorted(self._handlers.keys())
25
+
26
+
27
+ registry = TransformRegistry()
28
+
29
+
30
+ def register_transform(name: str):
31
+ def wrapper(func: TransformHandler) -> TransformHandler:
32
+ registry.register(name, func)
33
+ return func
34
+
35
+ return wrapper
36
+
37
+
38
+ def run_transform(name: str, ctx: Any, spec: dict[str, Any]) -> None:
39
+ handler = registry.get(name)
40
+ handler(ctx, spec)