glitchlings 0.5.0__cp312-cp312-win_amd64.whl → 0.5.1__cp312-cp312-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of glitchlings might be problematic. Click here for more details.

glitchlings/compat.py CHANGED
@@ -6,7 +6,7 @@ import re
6
6
  from dataclasses import dataclass
7
7
  from importlib import import_module, metadata
8
8
  from types import ModuleType
9
- from typing import Any, Callable, Iterable, Protocol, cast
9
+ from typing import Any, Callable, Iterable, Mapping, NoReturn, Protocol, cast
10
10
 
11
11
 
12
12
  class _MissingSentinel:
@@ -50,26 +50,106 @@ else:
50
50
  Requirement = cast(type[_RequirementProtocol], _RequirementClass)
51
51
 
52
52
 
53
+ def _build_lightning_stub() -> ModuleType:
54
+ """Return a minimal PyTorch Lightning stub when the dependency is absent."""
55
+
56
+ module = ModuleType("pytorch_lightning")
57
+
58
+ class LightningDataModule: # pragma: no cover - simple compatibility shim
59
+ """Lightweight stand-in for PyTorch Lightning's ``LightningDataModule``."""
60
+
61
+ def __init__(self, *args: Any, **kwargs: Any) -> None: # noqa: D401 - parity with real class
62
+ pass
63
+
64
+ def prepare_data(self, *args: Any, **kwargs: Any) -> None: # noqa: D401 - parity with real class
65
+ return None
66
+
67
+ def setup(self, *args: Any, **kwargs: Any) -> None:
68
+ return None
69
+
70
+ def teardown(self, *args: Any, **kwargs: Any) -> None:
71
+ return None
72
+
73
+ def state_dict(self) -> dict[str, Any]:
74
+ return {}
75
+
76
+ def load_state_dict(self, state_dict: Mapping[str, Any]) -> None:
77
+ return None
78
+
79
+ def transfer_batch_to_device(self, batch: Any, device: Any, dataloader_idx: int) -> Any:
80
+ return batch
81
+
82
+ def on_before_batch_transfer(self, batch: Any, dataloader_idx: int) -> Any:
83
+ return batch
84
+
85
+ def on_after_batch_transfer(self, batch: Any, dataloader_idx: int) -> Any:
86
+ return batch
87
+
88
+ def train_dataloader(self, *args: Any, **kwargs: Any) -> Any:
89
+ return []
90
+
91
+ def val_dataloader(self, *args: Any, **kwargs: Any) -> Any:
92
+ return []
93
+
94
+ def test_dataloader(self, *args: Any, **kwargs: Any) -> Any:
95
+ return []
96
+
97
+ def predict_dataloader(self, *args: Any, **kwargs: Any) -> Any:
98
+ return []
99
+
100
+ setattr(module, "LightningDataModule", LightningDataModule)
101
+ setattr(module, "__all__", ["LightningDataModule"])
102
+ setattr(
103
+ module,
104
+ "__doc__",
105
+ "Lightweight stub module that exposes a minimal LightningDataModule "
106
+ "when PyTorch Lightning is unavailable.",
107
+ )
108
+ setattr(module, "__version__", "0.0.0-stub")
109
+ return module
110
+
111
+
53
112
  @dataclass
54
113
  class OptionalDependency:
55
114
  """Lazily import an optional dependency and retain the import error."""
56
115
 
57
116
  module_name: str
117
+ fallback_factory: Callable[[], ModuleType] | None = None
58
118
  _cached: ModuleType | None | _MissingSentinel = _MISSING
59
119
  _error: ModuleNotFoundError | None = None
120
+ _used_fallback: bool = False
121
+ _fallback_instance: ModuleType | None = None
60
122
 
61
123
  def _attempt_import(self) -> ModuleType | None:
62
124
  try:
63
125
  module = import_module(self.module_name)
64
126
  except ModuleNotFoundError as exc:
127
+ if self.fallback_factory is not None:
128
+ if self._fallback_instance is None:
129
+ self._fallback_instance = self.fallback_factory()
130
+ module = self._fallback_instance
131
+ self._cached = module
132
+ # Preserve the original error so load()/require() can re-raise it
133
+ self._error = exc
134
+ self._used_fallback = True
135
+ return module
65
136
  self._cached = None
66
137
  self._error = exc
67
138
  return None
68
139
  else:
69
140
  self._cached = module
70
141
  self._error = None
142
+ self._used_fallback = False
71
143
  return module
72
144
 
145
+ def _raise_missing_error(self) -> NoReturn:
146
+ """Raise ModuleNotFoundError for the missing dependency."""
147
+ error = self._error
148
+ if error is not None:
149
+ raise error
150
+ message = f"{self.module_name} is not installed"
151
+ raise ModuleNotFoundError(message)
152
+
73
153
  def get(self) -> ModuleType | None:
74
154
  """Return the imported module or ``None`` when unavailable."""
75
155
  cached = self._cached
@@ -82,12 +162,10 @@ class OptionalDependency:
82
162
  def load(self) -> ModuleType:
83
163
  """Return the dependency, raising the original import error when absent."""
84
164
  module = self.get()
165
+ if self._used_fallback:
166
+ self._raise_missing_error()
85
167
  if module is None:
86
- error = self._error
87
- if error is not None:
88
- raise error
89
- message = f"{self.module_name} is not installed"
90
- raise ModuleNotFoundError(message)
168
+ self._raise_missing_error()
91
169
  return module
92
170
 
93
171
  def require(self, message: str) -> ModuleType:
@@ -99,18 +177,27 @@ class OptionalDependency:
99
177
 
100
178
  def available(self) -> bool:
101
179
  """Return ``True`` when the dependency can be imported."""
102
- return self.get() is not None
180
+ module = self.get()
181
+ if module is None:
182
+ return False
183
+ if self._used_fallback:
184
+ return False
185
+ return True
103
186
 
104
187
  def reset(self) -> None:
105
188
  """Forget any cached import result."""
106
189
  self._cached = _MISSING
107
190
  self._error = None
191
+ self._used_fallback = False
192
+ self._fallback_instance = None
108
193
 
109
194
  def attr(self, attribute: str) -> Any | None:
110
195
  """Return ``attribute`` from the dependency when available."""
111
196
  module = self.get()
112
197
  if module is None:
113
198
  return None
199
+ if self._used_fallback:
200
+ return None
114
201
  return getattr(module, attribute, None)
115
202
 
116
203
  @property
@@ -120,7 +207,10 @@ class OptionalDependency:
120
207
  return self._error
121
208
 
122
209
 
123
- pytorch_lightning = OptionalDependency("pytorch_lightning")
210
+ pytorch_lightning = OptionalDependency(
211
+ "pytorch_lightning",
212
+ fallback_factory=_build_lightning_stub,
213
+ )
124
214
  datasets = OptionalDependency("datasets")
125
215
  verifiers = OptionalDependency("verifiers")
126
216
  jellyfish = OptionalDependency("jellyfish")
@@ -1,17 +1,20 @@
1
- """Synchronise canonical glitchling assets with the vendored Rust copies."""
1
+ """Maintain the canonical glitchling asset bundle shared by Python and Rust."""
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
5
  import argparse
6
- import shutil
7
6
  import sys
8
7
  from pathlib import Path
9
8
  from typing import Iterator, Sequence
10
9
 
11
- RUST_VENDORED_ASSETS: frozenset[str] = frozenset({
12
- "hokey_assets.json",
13
- "ocr_confusions.tsv",
14
- })
10
+ PIPELINE_ASSETS: frozenset[str] = frozenset(
11
+ {
12
+ "apostrofae_pairs.json",
13
+ "ekkokin_homophones.json",
14
+ "hokey_assets.json",
15
+ "ocr_confusions.tsv",
16
+ }
17
+ )
15
18
 
16
19
 
17
20
  def _project_root(default: Path | None = None) -> Path:
@@ -21,24 +24,24 @@ def _project_root(default: Path | None = None) -> Path:
21
24
 
22
25
 
23
26
  def _canonical_asset_dir(project_root: Path) -> Path:
24
- canonical = project_root / "src" / "glitchlings" / "zoo" / "assets"
27
+ canonical = project_root / "assets"
25
28
  if not canonical.is_dir():
26
29
  raise RuntimeError(
27
- "expected canonical assets under 'src/glitchlings/zoo/assets'; "
30
+ "expected canonical assets under 'assets'; "
28
31
  "run this command from the repository root"
29
32
  )
30
33
  return canonical
31
34
 
32
35
 
33
- def _rust_asset_dir(project_root: Path) -> Path:
36
+ def _legacy_rust_asset_dir(project_root: Path) -> Path:
34
37
  return project_root / "rust" / "zoo" / "assets"
35
38
 
36
39
 
37
- def _iter_extraneous_assets(rust_dir: Path) -> Iterator[Path]:
40
+ def _iter_legacy_assets(rust_dir: Path) -> Iterator[Path]:
38
41
  if not rust_dir.exists():
39
42
  return
40
43
  for path in rust_dir.iterdir():
41
- if path.is_file() and path.name not in RUST_VENDORED_ASSETS:
44
+ if path.is_file():
42
45
  yield path
43
46
 
44
47
 
@@ -48,81 +51,55 @@ def sync_assets(
48
51
  check: bool = False,
49
52
  quiet: bool = False,
50
53
  ) -> bool:
51
- """Synchronise the vendored Rust asset copies with the canonical sources."""
54
+ """Ensure pipeline assets exist only at their canonical location."""
52
55
 
53
56
  root = _project_root(project_root)
54
57
  canonical_dir = _canonical_asset_dir(root)
55
- rust_dir = _rust_asset_dir(root)
58
+ rust_dir = _legacy_rust_asset_dir(root)
56
59
 
57
- missing_sources = [
58
- name
59
- for name in RUST_VENDORED_ASSETS
60
- if not (canonical_dir / name).is_file()
61
- ]
60
+ missing_sources = [name for name in PIPELINE_ASSETS if not (canonical_dir / name).is_file()]
62
61
  if missing_sources:
63
62
  missing_list = ", ".join(sorted(missing_sources))
64
63
  raise RuntimeError(f"missing canonical assets: {missing_list}")
65
64
 
66
- extraneous = list(_iter_extraneous_assets(rust_dir))
67
-
68
- mismatched: list[tuple[str, str]] = []
69
- for name in sorted(RUST_VENDORED_ASSETS):
70
- source = canonical_dir / name
71
- target = rust_dir / name
72
- if not target.exists():
73
- mismatched.append((name, "missing"))
74
- continue
75
- if source.read_bytes() != target.read_bytes():
76
- mismatched.append((name, "outdated"))
65
+ legacy_assets = list(_iter_legacy_assets(rust_dir))
77
66
 
78
67
  if check:
79
- if mismatched or extraneous:
68
+ if legacy_assets:
80
69
  if not quiet:
81
- for name, reason in mismatched:
82
- target = rust_dir / name
83
- print(
84
- f"{target.relative_to(root)} is {reason}; run sync_assets to refresh it",
85
- file=sys.stderr,
86
- )
87
- for extra in extraneous:
88
- print(
89
- (
90
- "unexpected vendored asset "
91
- f"{extra.relative_to(root)}; run sync_assets to prune it"
92
- ),
93
- file=sys.stderr,
70
+ for duplicate in legacy_assets:
71
+ message = (
72
+ "legacy vendored asset "
73
+ f"{duplicate.relative_to(root)} still exists; "
74
+ "run sync_assets to remove it"
94
75
  )
76
+ print(message, file=sys.stderr)
95
77
  return False
96
78
  if not quiet:
97
- print("Rust asset bundle is up to date.")
79
+ print("No legacy Rust asset copies detected.")
98
80
  return True
99
81
 
100
- rust_dir.mkdir(parents=True, exist_ok=True)
101
-
102
- for name, reason in mismatched:
103
- source = canonical_dir / name
104
- target = rust_dir / name
105
- shutil.copy2(source, target)
106
- if not quiet:
107
- verb = "Copied" if reason == "missing" else "Updated"
108
- print(
109
- f"{verb} {source.relative_to(root)} -> {target.relative_to(root)}",
110
- )
111
-
112
- for extra in extraneous:
113
- extra.unlink()
82
+ removed_any = False
83
+ for duplicate in legacy_assets:
84
+ duplicate.unlink()
85
+ removed_any = True
114
86
  if not quiet:
115
- print(f"Removed extraneous vendored asset {extra.relative_to(root)}")
87
+ print(f"Removed legacy vendored asset {duplicate.relative_to(root)}")
116
88
 
117
- if not mismatched and not extraneous and not quiet:
118
- print("Rust asset bundle already aligned with canonical copies.")
89
+ if removed_any:
90
+ try:
91
+ rust_dir.rmdir()
92
+ except OSError:
93
+ pass
94
+ elif not quiet:
95
+ print("No legacy Rust asset copies to remove.")
119
96
 
120
97
  return True
121
98
 
122
99
 
123
100
  def build_parser() -> argparse.ArgumentParser:
124
101
  parser = argparse.ArgumentParser(
125
- description="Synchronise canonical glitchling assets with the vendored Rust copies.",
102
+ description="Prune legacy vendored Rust assets so only canonical copies remain.",
126
103
  )
127
104
  parser.add_argument(
128
105
  "--check",
@@ -187,7 +187,19 @@ def _ensure_datamodule_class() -> Any:
187
187
  setattr(datamodule_cls, "glitch", glitch)
188
188
 
189
189
  if not issubclass(_GlitchedLightningDataModule, datamodule_cls):
190
- _GlitchedLightningDataModule.__bases__ = (datamodule_cls,)
190
+ try:
191
+ _GlitchedLightningDataModule.__bases__ = (datamodule_cls,)
192
+ except TypeError:
193
+ namespace = {
194
+ name: value
195
+ for name, value in vars(_GlitchedLightningDataModule).items()
196
+ if name not in {"__dict__", "__weakref__"}
197
+ }
198
+ replacement = cast(
199
+ type[Any],
200
+ type("_GlitchedLightningDataModule", (datamodule_cls,), namespace),
201
+ )
202
+ globals()["_GlitchedLightningDataModule"] = replacement
191
203
 
192
204
  return datamodule_cls
193
205
 
@@ -0,0 +1,5 @@
1
+ from __future__ import annotations
2
+
3
+ from .zoo.spectroll import Spectroll, spectroll, swap_colors
4
+
5
+ __all__ = ["Spectroll", "spectroll", "swap_colors"]
@@ -14,6 +14,7 @@ from .core import (
14
14
  plan_glitchling_specs,
15
15
  plan_glitchlings,
16
16
  )
17
+ from .ekkokin import Ekkokin, ekkokin
17
18
  from .hokey import Hokey, hokey
18
19
  from .jargoyle import Jargoyle, jargoyle
19
20
  from .jargoyle import dependencies_available as _jargoyle_available
@@ -22,6 +23,7 @@ from .redactyl import Redactyl, redactyl
22
23
  from .reduple import Reduple, reduple
23
24
  from .rushmore import Rushmore, rushmore
24
25
  from .scannequin import Scannequin, scannequin
26
+ from .spectroll import Spectroll, spectroll
25
27
  from .typogre import Typogre, typogre
26
28
  from .zeedub import Zeedub, zeedub
27
29
 
@@ -32,6 +34,8 @@ __all__ = [
32
34
  "mim1c",
33
35
  "Jargoyle",
34
36
  "jargoyle",
37
+ "Ekkokin",
38
+ "ekkokin",
35
39
  "Apostrofae",
36
40
  "apostrofae",
37
41
  "Hokey",
@@ -44,6 +48,8 @@ __all__ = [
44
48
  "rushmore",
45
49
  "Redactyl",
46
50
  "redactyl",
51
+ "Spectroll",
52
+ "spectroll",
47
53
  "Scannequin",
48
54
  "scannequin",
49
55
  "Zeedub",
@@ -64,10 +70,10 @@ __all__ = [
64
70
 
65
71
  _HAS_JARGOYLE = _jargoyle_available()
66
72
 
67
- _BUILTIN_GLITCHLING_LIST: list[Glitchling] = [typogre, apostrofae, hokey, mim1c]
73
+ _BUILTIN_GLITCHLING_LIST: list[Glitchling] = [typogre, apostrofae, hokey, mim1c, ekkokin]
68
74
  if _HAS_JARGOYLE:
69
75
  _BUILTIN_GLITCHLING_LIST.append(jargoyle)
70
- _BUILTIN_GLITCHLING_LIST.extend([adjax, reduple, rushmore, redactyl, scannequin, zeedub])
76
+ _BUILTIN_GLITCHLING_LIST.extend([adjax, reduple, rushmore, redactyl, spectroll, scannequin, zeedub])
71
77
 
72
78
  BUILTIN_GLITCHLINGS: dict[str, Glitchling] = {
73
79
  glitchling.name.lower(): glitchling for glitchling in _BUILTIN_GLITCHLING_LIST
@@ -75,6 +81,7 @@ BUILTIN_GLITCHLINGS: dict[str, Glitchling] = {
75
81
 
76
82
  _BUILTIN_GLITCHLING_TYPES: dict[str, type[Glitchling]] = {
77
83
  typogre.name.lower(): Typogre,
84
+ ekkokin.name.lower(): Ekkokin,
78
85
  apostrofae.name.lower(): Apostrofae,
79
86
  hokey.name.lower(): Hokey,
80
87
  mim1c.name.lower(): Mim1c,
@@ -82,6 +89,7 @@ _BUILTIN_GLITCHLING_TYPES: dict[str, type[Glitchling]] = {
82
89
  reduple.name.lower(): Reduple,
83
90
  rushmore.name.lower(): Rushmore,
84
91
  redactyl.name.lower(): Redactyl,
92
+ spectroll.name.lower(): Spectroll,
85
93
  scannequin.name.lower(): Scannequin,
86
94
  zeedub.name.lower(): Zeedub,
87
95
  }
@@ -1,19 +1,41 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import json
4
+ from functools import cache
4
5
  from hashlib import blake2b
5
6
  from importlib import resources
6
7
  from importlib.resources.abc import Traversable
7
- from typing import Any, BinaryIO, TextIO, cast
8
+ from pathlib import Path
9
+ from typing import Any, BinaryIO, Iterable, TextIO, cast
8
10
 
9
11
  _DEFAULT_DIGEST_SIZE = 32
10
12
 
11
13
 
14
+ def _iter_asset_roots() -> Iterable[Traversable]:
15
+ """Yield candidate locations for the shared glitchling asset bundle."""
16
+
17
+ package_root: Traversable | None
18
+ try:
19
+ package_root = resources.files("glitchlings").joinpath("assets")
20
+ except ModuleNotFoundError: # pragma: no cover - defensive guard for install issues
21
+ package_root = None
22
+ if package_root is not None and package_root.is_dir():
23
+ yield package_root
24
+
25
+ repo_root = Path(__file__).resolve().parents[4] / "assets"
26
+ if repo_root.is_dir():
27
+ yield cast(Traversable, repo_root)
28
+
29
+
12
30
  def _asset(name: str) -> Traversable:
13
- asset = resources.files(__name__).joinpath(name)
14
- if not asset.is_file(): # pragma: no cover - defensive guard for packaging issues
15
- raise FileNotFoundError(f"Asset '{name}' not found at {asset}")
16
- return asset
31
+ asset_roots = list(_iter_asset_roots())
32
+ for root in asset_roots:
33
+ candidate = root.joinpath(name)
34
+ if candidate.is_file():
35
+ return candidate
36
+
37
+ searched = ", ".join(str(root.joinpath(name)) for root in asset_roots) or "<unavailable>"
38
+ raise FileNotFoundError(f"Asset '{name}' not found in: {searched}")
17
39
 
18
40
 
19
41
  def read_text(name: str, *, encoding: str = "utf-8") -> str:
@@ -51,4 +73,19 @@ def hash_asset(name: str) -> str:
51
73
  return digest.hexdigest()
52
74
 
53
75
 
54
- __all__ = ["read_text", "open_text", "open_binary", "load_json", "hash_asset"]
76
+ @cache
77
+ def load_homophone_groups(name: str = "ekkokin_homophones.json") -> tuple[tuple[str, ...], ...]:
78
+ """Return the curated homophone sets bundled for the Ekkokin glitchling."""
79
+
80
+ data: list[list[str]] = load_json(name)
81
+ return tuple(tuple(group) for group in data)
82
+
83
+
84
+ __all__ = [
85
+ "read_text",
86
+ "open_text",
87
+ "open_binary",
88
+ "load_json",
89
+ "hash_asset",
90
+ "load_homophone_groups",
91
+ ]