typst-pyexec 0.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,6 @@
1
+ """typst_pyexec — reactive Python execution engine for Typst documents."""
2
+
3
+ __version__ = "0.1.1"
4
+ __all__ = ["Builder"]
5
+
6
+ from typst_pyexec.builder import Builder
@@ -0,0 +1,218 @@
1
+ """High-level build orchestrator.
2
+
3
+ Ties together parsing, DAG analysis, scheduling, execution,
4
+ rendering, and document injection.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import logging
10
+ import os
11
+ import subprocess
12
+ import time
13
+ from pathlib import Path
14
+
15
+ from typst_pyexec.core.cache import CacheStore
16
+ from typst_pyexec.core.dag import DependencyGraph
17
+ from typst_pyexec.core.executor import Executor
18
+ from typst_pyexec.core.kernel import KernelManager
19
+ from typst_pyexec.core.parser import Cell, Parser
20
+ from typst_pyexec.core.renderer import Renderer
21
+ from typst_pyexec.core.scheduler import Scheduler
22
+ from typst_pyexec.utils.hashing import sha256_text
23
+
24
+ logger = logging.getLogger(__name__)
25
+
26
+
27
+ class Builder:
28
+ """Orchestrates the full build pipeline for a single Typst document.
29
+
30
+ Parameters
31
+ ----------
32
+ source:
33
+ Path to the ``.typ`` source file.
34
+ output_dir:
35
+ Directory in which to write the intermediate ``.typst_pyexec.typ``
36
+ file and the ``.typst_pyexec/`` state directory. Defaults to the
37
+ directory of *source*.
38
+ use_cache:
39
+ Whether to use the disk-based execution cache.
40
+ n_jobs:
41
+ Number of parallel workers passed to ``joblib``.
42
+ compiler:
43
+ Name / path of the ``typst`` compiler binary.
44
+ """
45
+
46
+ def __init__(
47
+ self,
48
+ source: Path,
49
+ output_dir: Path | None = None,
50
+ use_cache: bool = True,
51
+ n_jobs: int = -1,
52
+ compiler: str = "typst",
53
+ ) -> None:
54
+ self.source = source.resolve()
55
+ self.output_dir = (output_dir or self.source.parent).resolve()
56
+ self.use_cache = use_cache
57
+ self.n_jobs = n_jobs
58
+ self.compiler = compiler
59
+
60
+ self._state_dir = self.output_dir / ".typst_pyexec"
61
+ self._figures_dir = self._state_dir / "figures"
62
+ self._state_dir.mkdir(parents=True, exist_ok=True)
63
+ self._figures_dir.mkdir(parents=True, exist_ok=True)
64
+
65
+ self._intermediate = self.output_dir / (self.source.stem + ".typst_pyexec.typ")
66
+
67
+ self._cache = CacheStore(self._state_dir / "cache")
68
+ self._kernel = KernelManager(self._state_dir)
69
+ self._renderer = Renderer(self._figures_dir, self._state_dir)
70
+
71
+ self._parser = Parser()
72
+ self._dag = DependencyGraph()
73
+ self._scheduler = Scheduler()
74
+
75
+ # ------------------------------------------------------------------
76
+ # Public API
77
+ # ------------------------------------------------------------------
78
+
79
+ def build(self) -> None:
80
+ """Run a single build pass."""
81
+ t0 = time.perf_counter()
82
+ logger.info("Building %s", self.source)
83
+
84
+ previous_cwd = Path.cwd()
85
+ os.chdir(self.source.parent)
86
+ try:
87
+ source_text = self.source.read_text(encoding="utf-8")
88
+ cells = self._parser.parse(source_text)
89
+
90
+ if not cells:
91
+ logger.info("No Python cells found — copying source verbatim.")
92
+ self._intermediate.write_text(source_text, encoding="utf-8")
93
+ self._compile()
94
+ return
95
+
96
+ self._dag.build(cells)
97
+ groups = self._scheduler.schedule(self._dag)
98
+
99
+ executor = Executor(
100
+ kernel=self._kernel,
101
+ cache=self._cache if self.use_cache else None,
102
+ figures_dir=self._figures_dir,
103
+ working_dir=self.source.parent,
104
+ n_jobs=self.n_jobs,
105
+ )
106
+
107
+ changed_ids = self._detect_changed_cells(cells)
108
+ refresh_ids = {
109
+ c.cell_id
110
+ for c in cells
111
+ if _as_bool(c.metadata.get("refresh"), False)
112
+ and _as_bool(c.metadata.get("execute"), True)
113
+ }
114
+ # Regular changes cascade through DAG; refresh-only cells re-run
115
+ # without cascading to downstream dependents.
116
+ cells_to_run = self._dag.affected(changed_ids) | refresh_ids
117
+ logger.info("%d/%d cells need execution", len(cells_to_run), len(cells))
118
+
119
+ results = executor.run(cells, groups, cells_to_run, dag=self._dag)
120
+
121
+ # Render and inject
122
+ output_text = self._renderer.inject(source_text, cells, results)
123
+ self._intermediate.write_text(output_text, encoding="utf-8")
124
+
125
+ # Persist notebook
126
+ self._renderer.sync_notebook(cells, results, self._state_dir / "notebook.ipynb")
127
+
128
+ elapsed = (time.perf_counter() - t0) * 1000
129
+ logger.info("Build finished in %.1f ms", elapsed)
130
+
131
+ self._compile()
132
+ finally:
133
+ os.chdir(previous_cwd)
134
+
135
+ def watch(self) -> None:
136
+ """Watch the source file and rebuild on every change."""
137
+ from watchdog.events import FileModifiedEvent, FileSystemEventHandler
138
+ from watchdog.observers import Observer
139
+
140
+ logger.info("Watching %s (Ctrl-C to stop)", self.source)
141
+
142
+ # Initial build
143
+ try:
144
+ self.build()
145
+ except Exception as exc:
146
+ logger.error("Initial build failed: %s", exc)
147
+
148
+ class _Handler(FileSystemEventHandler):
149
+ def __init__(self, builder: Builder) -> None:
150
+ self._builder = builder
151
+
152
+ def on_modified(self, event: FileModifiedEvent) -> None: # type: ignore[override]
153
+ event_path = Path(os.fsdecode(event.src_path)).resolve()
154
+ if event_path == self._builder.source:
155
+ logger.info("Change detected — rebuilding…")
156
+ try:
157
+ self._builder.build()
158
+ except Exception as exc:
159
+ logger.error("Rebuild failed: %s", exc)
160
+
161
+ observer = Observer()
162
+ observer.schedule(_Handler(self), str(self.source.parent), recursive=False)
163
+ observer.start()
164
+ try:
165
+ while True:
166
+ time.sleep(0.5)
167
+ except KeyboardInterrupt:
168
+ logger.info("Stopping watcher…")
169
+ finally:
170
+ observer.stop()
171
+ observer.join()
172
+ self._kernel.shutdown()
173
+
174
+ # ------------------------------------------------------------------
175
+ # Internal helpers
176
+ # ------------------------------------------------------------------
177
+
178
+ def _detect_changed_cells(self, cells: list[Cell]) -> set[str]:
179
+ """Return IDs of cells whose source hash is not present in cache."""
180
+ changed: set[str] = set()
181
+ for cell in cells:
182
+ if not _as_bool(cell.metadata.get("execute"), True):
183
+ continue
184
+ h = sha256_text(cell.source)
185
+ cached = self._cache.load_by_hash(h)
186
+ if cached is None:
187
+ changed.add(cell.cell_id)
188
+ return changed
189
+
190
+ def _compile(self) -> None:
191
+ """Invoke the Typst compiler on the intermediate document."""
192
+ cmd = [self.compiler, "compile", str(self._intermediate)]
193
+ logger.info("Running: %s", " ".join(cmd))
194
+ try:
195
+ result = subprocess.run(
196
+ cmd,
197
+ capture_output=True,
198
+ text=True,
199
+ timeout=120,
200
+ )
201
+ if result.returncode != 0:
202
+ logger.error(
203
+ "Typst compiler error:\n%s", result.stderr or result.stdout
204
+ )
205
+ else:
206
+ logger.info("Compilation successful.")
207
+ except FileNotFoundError:
208
+ logger.warning(
209
+ "Compiler %r not found — skipping compilation step.", self.compiler
210
+ )
211
+ except subprocess.TimeoutExpired:
212
+ logger.error("Typst compiler timed out.")
213
+
214
+
215
+ def _as_bool(value: str | None, default: bool) -> bool:
216
+ if value is None:
217
+ return default
218
+ return value.strip().lower() in {"1", "true", "yes", "on"}
typst_pyexec/cli.py ADDED
@@ -0,0 +1,114 @@
1
+ """CLI entry point for typst_pyexec."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import argparse
6
+ import logging
7
+ import sys
8
+ from pathlib import Path
9
+
10
+ from typst_pyexec.builder import Builder
11
+ from typst_pyexec.utils.logging import configure_logging
12
+
13
+
14
+ def _build_command(args: argparse.Namespace) -> None:
15
+ builder = Builder(
16
+ source=Path(args.file),
17
+ output_dir=Path(args.output_dir) if args.output_dir else None,
18
+ use_cache=not args.no_cache,
19
+ n_jobs=args.jobs,
20
+ compiler=args.compiler,
21
+ )
22
+ builder.build()
23
+
24
+
25
+ def _watch_command(args: argparse.Namespace) -> None:
26
+ builder = Builder(
27
+ source=Path(args.file),
28
+ output_dir=Path(args.output_dir) if args.output_dir else None,
29
+ use_cache=not args.no_cache,
30
+ n_jobs=args.jobs,
31
+ compiler=args.compiler,
32
+ )
33
+ builder.watch()
34
+
35
+
36
+ def _clean_command(args: argparse.Namespace) -> None:
37
+ import shutil
38
+
39
+ cache_dir = Path(".typst_pyexec")
40
+ if cache_dir.exists():
41
+ shutil.rmtree(cache_dir)
42
+ print(f"Removed {cache_dir}")
43
+ else:
44
+ print(f"{cache_dir} not found — nothing to clean.")
45
+
46
+
47
+ def _make_parser() -> argparse.ArgumentParser:
48
+ parser = argparse.ArgumentParser(
49
+ prog="typst_pyexec",
50
+ description="Reactive Python execution engine for Typst documents.",
51
+ )
52
+ parser.add_argument(
53
+ "--log-level",
54
+ default="INFO",
55
+ choices=["DEBUG", "INFO", "WARNING", "ERROR"],
56
+ help="Logging verbosity (default: INFO)",
57
+ )
58
+
59
+ sub = parser.add_subparsers(dest="command", required=True)
60
+
61
+ # --- build ---
62
+ build_p = sub.add_parser("build", help="Build document once and exit.")
63
+ build_p.add_argument("file", help="Path to .typ source file")
64
+ _add_common_args(build_p)
65
+ build_p.set_defaults(func=_build_command)
66
+
67
+ # --- watch ---
68
+ watch_p = sub.add_parser("watch", help="Watch for changes and rebuild.")
69
+ watch_p.add_argument("file", help="Path to .typ source file")
70
+ _add_common_args(watch_p)
71
+ watch_p.set_defaults(func=_watch_command)
72
+
73
+ # --- clean ---
74
+ clean_p = sub.add_parser("clean", help="Remove .typst_pyexec/ cache directory.")
75
+ clean_p.set_defaults(func=_clean_command)
76
+
77
+ return parser
78
+
79
+
80
+ def _add_common_args(p: argparse.ArgumentParser) -> None:
81
+ p.add_argument(
82
+ "--output-dir",
83
+ default=None,
84
+ help="Override output directory (default: same as source file)",
85
+ )
86
+ p.add_argument(
87
+ "--no-cache",
88
+ action="store_true",
89
+ default=False,
90
+ help="Disable execution cache",
91
+ )
92
+ p.add_argument(
93
+ "--jobs",
94
+ type=int,
95
+ default=-1,
96
+ help="Number of parallel workers (-1 = all CPUs, default: -1)",
97
+ )
98
+ p.add_argument(
99
+ "--compiler",
100
+ default="typst",
101
+ help="Typst compiler command (default: typst)",
102
+ )
103
+
104
+
105
+ def main(argv: list[str] | None = None) -> None:
106
+ """Entry point for the ``typst_pyexec`` CLI."""
107
+ parser = _make_parser()
108
+ args = parser.parse_args(argv)
109
+ configure_logging(getattr(logging, args.log_level))
110
+ args.func(args)
111
+
112
+
113
+ if __name__ == "__main__":
114
+ main(sys.argv[1:])
@@ -0,0 +1 @@
1
+ # typst_pyexec/core/__init__.py
@@ -0,0 +1,110 @@
1
+ """Disk-based execution cache keyed by SHA-256 of cell source."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import json
6
+ import logging
7
+ from pathlib import Path
8
+
9
+ from typst_pyexec.utils.hashing import sha256_text
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+ _CACHE_SCHEMA_VERSION = 2
14
+
15
+
16
+ class CacheStore:
17
+ """Persistent JSON cache stored under *cache_dir*.
18
+
19
+ Each entry is a file named ``<sha256>.json`` and contains the
20
+ execution result for the cell with that source hash.
21
+
22
+ Parameters
23
+ ----------
24
+ cache_dir:
25
+ Directory in which cache files are stored. Created on demand.
26
+ """
27
+
28
+ def __init__(self, cache_dir: Path) -> None:
29
+ self._dir = cache_dir
30
+ self._dir.mkdir(parents=True, exist_ok=True)
31
+
32
+ # ------------------------------------------------------------------
33
+ # Core API
34
+ # ------------------------------------------------------------------
35
+
36
+ def load(self, cell_id: str) -> dict | None:
37
+ """Return cached entry for *cell_id*, or ``None`` if not found.
38
+
39
+ The cache entry is matched by *cell_id* via a lookup file that
40
+ maps cell IDs to their most-recent hash.
41
+ """
42
+ lookup = self._lookup_file(cell_id)
43
+ if not lookup.exists():
44
+ return None
45
+ try:
46
+ ref = json.loads(lookup.read_text(encoding="utf-8"))
47
+ h = ref.get("hash")
48
+ entry_file = self._entry_file(h)
49
+ if entry_file.exists():
50
+ return json.loads(entry_file.read_text(encoding="utf-8"))
51
+ except (json.JSONDecodeError, OSError) as exc:
52
+ logger.debug("Cache miss for %s: %s", cell_id, exc)
53
+ return None
54
+
55
+ def load_by_hash(self, source_hash: str) -> dict | None:
56
+ """Return cached entry by *source_hash*, or ``None``."""
57
+ entry_file = self._entry_file(source_hash)
58
+ if not entry_file.exists():
59
+ return None
60
+ try:
61
+ return json.loads(entry_file.read_text(encoding="utf-8"))
62
+ except (json.JSONDecodeError, OSError):
63
+ return None
64
+
65
+ def save(self, cell_id: str, source: str, result: dict) -> None:
66
+ """Persist *result* for *cell_id* with *source*'s hash as key."""
67
+ h = sha256_text(source)
68
+ entry: dict = {
69
+ "schema_version": _CACHE_SCHEMA_VERSION,
70
+ "hash": h,
71
+ "cell_id": cell_id,
72
+ "stdout": result.get("stdout", ""),
73
+ "stderr": result.get("stderr", ""),
74
+ "display_data": result.get("display_data", []),
75
+ "figures": result.get("figures", []),
76
+ "figure_metadata": result.get("figure_metadata", []),
77
+ "error": result.get("error"),
78
+ "status": result.get("status", "ok"),
79
+ }
80
+ entry_file = self._entry_file(h)
81
+ entry_file.write_text(json.dumps(entry, indent=2), encoding="utf-8")
82
+
83
+ # Update the lookup file
84
+ lookup = self._lookup_file(cell_id)
85
+ lookup.write_text(json.dumps({"hash": h}), encoding="utf-8")
86
+
87
+ logger.debug("Cached cell %s (hash=%s…)", cell_id, h[:8])
88
+
89
+ def invalidate(self, cell_id: str) -> None:
90
+ """Remove the lookup entry for *cell_id* (does not delete the data file)."""
91
+ lookup = self._lookup_file(cell_id)
92
+ if lookup.exists():
93
+ lookup.unlink()
94
+
95
+ def clear(self) -> None:
96
+ """Delete all cache files."""
97
+ for f in self._dir.iterdir():
98
+ f.unlink(missing_ok=True)
99
+ logger.info("Cache cleared.")
100
+
101
+ # ------------------------------------------------------------------
102
+ # Paths
103
+ # ------------------------------------------------------------------
104
+
105
+ def _entry_file(self, source_hash: str) -> Path:
106
+ return self._dir / f"{source_hash}.json"
107
+
108
+ def _lookup_file(self, cell_id: str) -> Path:
109
+ safe = cell_id.replace("/", "_").replace("\\", "_")
110
+ return self._dir / f"_id_{safe}.json"