tnfr 6.0.0__py3-none-any.whl → 7.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tnfr might be problematic. Click here for more details.

Files changed (176) hide show
  1. tnfr/__init__.py +50 -5
  2. tnfr/__init__.pyi +0 -7
  3. tnfr/_compat.py +0 -1
  4. tnfr/_generated_version.py +34 -0
  5. tnfr/_version.py +44 -2
  6. tnfr/alias.py +14 -13
  7. tnfr/alias.pyi +5 -37
  8. tnfr/cache.py +9 -729
  9. tnfr/cache.pyi +8 -224
  10. tnfr/callback_utils.py +16 -31
  11. tnfr/callback_utils.pyi +3 -29
  12. tnfr/cli/__init__.py +17 -11
  13. tnfr/cli/__init__.pyi +0 -21
  14. tnfr/cli/arguments.py +175 -14
  15. tnfr/cli/arguments.pyi +5 -11
  16. tnfr/cli/execution.py +434 -48
  17. tnfr/cli/execution.pyi +14 -24
  18. tnfr/cli/utils.py +20 -3
  19. tnfr/cli/utils.pyi +5 -5
  20. tnfr/config/__init__.py +2 -1
  21. tnfr/config/__init__.pyi +2 -0
  22. tnfr/config/feature_flags.py +83 -0
  23. tnfr/config/init.py +1 -1
  24. tnfr/config/operator_names.py +1 -14
  25. tnfr/config/presets.py +6 -26
  26. tnfr/constants/__init__.py +10 -13
  27. tnfr/constants/__init__.pyi +10 -22
  28. tnfr/constants/aliases.py +31 -0
  29. tnfr/constants/core.py +4 -3
  30. tnfr/constants/init.py +1 -1
  31. tnfr/constants/metric.py +3 -3
  32. tnfr/dynamics/__init__.py +64 -10
  33. tnfr/dynamics/__init__.pyi +3 -4
  34. tnfr/dynamics/adaptation.py +79 -13
  35. tnfr/dynamics/aliases.py +10 -9
  36. tnfr/dynamics/coordination.py +77 -35
  37. tnfr/dynamics/dnfr.py +575 -274
  38. tnfr/dynamics/dnfr.pyi +1 -10
  39. tnfr/dynamics/integrators.py +47 -33
  40. tnfr/dynamics/integrators.pyi +0 -1
  41. tnfr/dynamics/runtime.py +489 -129
  42. tnfr/dynamics/sampling.py +2 -0
  43. tnfr/dynamics/selectors.py +101 -62
  44. tnfr/execution.py +15 -8
  45. tnfr/execution.pyi +5 -25
  46. tnfr/flatten.py +7 -3
  47. tnfr/flatten.pyi +1 -8
  48. tnfr/gamma.py +22 -26
  49. tnfr/gamma.pyi +0 -6
  50. tnfr/glyph_history.py +37 -26
  51. tnfr/glyph_history.pyi +1 -19
  52. tnfr/glyph_runtime.py +16 -0
  53. tnfr/glyph_runtime.pyi +9 -0
  54. tnfr/immutable.py +20 -15
  55. tnfr/immutable.pyi +4 -7
  56. tnfr/initialization.py +5 -7
  57. tnfr/initialization.pyi +1 -9
  58. tnfr/io.py +6 -305
  59. tnfr/io.pyi +13 -8
  60. tnfr/mathematics/__init__.py +81 -0
  61. tnfr/mathematics/backend.py +426 -0
  62. tnfr/mathematics/dynamics.py +398 -0
  63. tnfr/mathematics/epi.py +254 -0
  64. tnfr/mathematics/generators.py +222 -0
  65. tnfr/mathematics/metrics.py +119 -0
  66. tnfr/mathematics/operators.py +233 -0
  67. tnfr/mathematics/operators_factory.py +71 -0
  68. tnfr/mathematics/projection.py +78 -0
  69. tnfr/mathematics/runtime.py +173 -0
  70. tnfr/mathematics/spaces.py +247 -0
  71. tnfr/mathematics/transforms.py +292 -0
  72. tnfr/metrics/__init__.py +10 -10
  73. tnfr/metrics/coherence.py +123 -94
  74. tnfr/metrics/common.py +22 -13
  75. tnfr/metrics/common.pyi +42 -11
  76. tnfr/metrics/core.py +72 -14
  77. tnfr/metrics/diagnosis.py +48 -57
  78. tnfr/metrics/diagnosis.pyi +3 -7
  79. tnfr/metrics/export.py +3 -5
  80. tnfr/metrics/glyph_timing.py +41 -31
  81. tnfr/metrics/reporting.py +13 -6
  82. tnfr/metrics/sense_index.py +884 -114
  83. tnfr/metrics/trig.py +167 -11
  84. tnfr/metrics/trig.pyi +1 -0
  85. tnfr/metrics/trig_cache.py +112 -15
  86. tnfr/node.py +400 -17
  87. tnfr/node.pyi +55 -38
  88. tnfr/observers.py +111 -8
  89. tnfr/observers.pyi +0 -15
  90. tnfr/ontosim.py +9 -6
  91. tnfr/ontosim.pyi +0 -5
  92. tnfr/operators/__init__.py +529 -42
  93. tnfr/operators/__init__.pyi +14 -0
  94. tnfr/operators/definitions.py +350 -18
  95. tnfr/operators/definitions.pyi +0 -14
  96. tnfr/operators/grammar.py +760 -0
  97. tnfr/operators/jitter.py +28 -22
  98. tnfr/operators/registry.py +7 -12
  99. tnfr/operators/registry.pyi +0 -2
  100. tnfr/operators/remesh.py +38 -61
  101. tnfr/rng.py +17 -300
  102. tnfr/schemas/__init__.py +8 -0
  103. tnfr/schemas/grammar.json +94 -0
  104. tnfr/selector.py +3 -4
  105. tnfr/selector.pyi +1 -1
  106. tnfr/sense.py +22 -24
  107. tnfr/sense.pyi +0 -7
  108. tnfr/structural.py +504 -21
  109. tnfr/structural.pyi +41 -18
  110. tnfr/telemetry/__init__.py +23 -1
  111. tnfr/telemetry/cache_metrics.py +226 -0
  112. tnfr/telemetry/nu_f.py +423 -0
  113. tnfr/telemetry/nu_f.pyi +123 -0
  114. tnfr/tokens.py +1 -4
  115. tnfr/tokens.pyi +1 -6
  116. tnfr/trace.py +20 -53
  117. tnfr/trace.pyi +9 -37
  118. tnfr/types.py +244 -15
  119. tnfr/types.pyi +200 -14
  120. tnfr/units.py +69 -0
  121. tnfr/units.pyi +16 -0
  122. tnfr/utils/__init__.py +107 -48
  123. tnfr/utils/__init__.pyi +80 -11
  124. tnfr/utils/cache.py +1705 -65
  125. tnfr/utils/cache.pyi +370 -58
  126. tnfr/utils/chunks.py +104 -0
  127. tnfr/utils/chunks.pyi +21 -0
  128. tnfr/utils/data.py +95 -5
  129. tnfr/utils/data.pyi +8 -17
  130. tnfr/utils/graph.py +2 -4
  131. tnfr/utils/init.py +31 -7
  132. tnfr/utils/init.pyi +4 -11
  133. tnfr/utils/io.py +313 -14
  134. tnfr/{helpers → utils}/numeric.py +50 -24
  135. tnfr/utils/numeric.pyi +21 -0
  136. tnfr/validation/__init__.py +92 -4
  137. tnfr/validation/__init__.pyi +77 -17
  138. tnfr/validation/compatibility.py +79 -43
  139. tnfr/validation/compatibility.pyi +4 -6
  140. tnfr/validation/grammar.py +55 -133
  141. tnfr/validation/grammar.pyi +37 -8
  142. tnfr/validation/graph.py +138 -0
  143. tnfr/validation/graph.pyi +17 -0
  144. tnfr/validation/rules.py +161 -74
  145. tnfr/validation/rules.pyi +55 -18
  146. tnfr/validation/runtime.py +263 -0
  147. tnfr/validation/runtime.pyi +31 -0
  148. tnfr/validation/soft_filters.py +170 -0
  149. tnfr/validation/soft_filters.pyi +37 -0
  150. tnfr/validation/spectral.py +159 -0
  151. tnfr/validation/spectral.pyi +46 -0
  152. tnfr/validation/syntax.py +28 -139
  153. tnfr/validation/syntax.pyi +7 -4
  154. tnfr/validation/window.py +39 -0
  155. tnfr/validation/window.pyi +1 -0
  156. tnfr/viz/__init__.py +9 -0
  157. tnfr/viz/matplotlib.py +246 -0
  158. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/METADATA +63 -19
  159. tnfr-7.0.0.dist-info/RECORD +185 -0
  160. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/licenses/LICENSE.md +1 -1
  161. tnfr/constants_glyphs.py +0 -16
  162. tnfr/constants_glyphs.pyi +0 -12
  163. tnfr/grammar.py +0 -25
  164. tnfr/grammar.pyi +0 -13
  165. tnfr/helpers/__init__.py +0 -151
  166. tnfr/helpers/__init__.pyi +0 -66
  167. tnfr/helpers/numeric.pyi +0 -12
  168. tnfr/presets.py +0 -15
  169. tnfr/presets.pyi +0 -7
  170. tnfr/utils/io.pyi +0 -10
  171. tnfr/utils/validators.py +0 -130
  172. tnfr/utils/validators.pyi +0 -19
  173. tnfr-6.0.0.dist-info/RECORD +0 -157
  174. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/WHEEL +0 -0
  175. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/entry_points.txt +0 -0
  176. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/top_level.txt +0 -0
tnfr/dynamics/sampling.py CHANGED
@@ -1,3 +1,5 @@
1
+ """Sampling helpers used by runtime selectors and glyph application."""
2
+
1
3
  from __future__ import annotations
2
4
 
3
5
  from typing import cast
@@ -3,33 +3,36 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  import math
6
- from abc import ABC, abstractmethod
7
6
  import sys
7
+ from abc import ABC, abstractmethod
8
8
  from collections.abc import Mapping, MutableMapping, Sequence
9
9
  from concurrent.futures import ProcessPoolExecutor
10
10
  from dataclasses import dataclass
11
11
  from operator import itemgetter
12
12
  from typing import Any, cast
13
-
14
13
  from ..alias import collect_attr, get_attr
15
14
  from ..constants import get_graph_param, get_param
16
- from ..glyph_history import ensure_history, recent_glyph
17
- from ..helpers.numeric import clamp01
15
+ from ..glyph_history import ensure_history
16
+ from ..utils import clamp01, resolve_chunk_size
18
17
  from ..metrics.common import compute_dnfr_accel_max, merge_and_normalize_weights
19
18
  from ..operators import apply_glyph
19
+ from ..validation import (
20
+ GrammarContext,
21
+ StructuralGrammarError,
22
+ enforce_canonical_grammar,
23
+ on_applied_glyph,
24
+ record_grammar_violation,
25
+ )
20
26
  from ..selector import (
21
27
  _apply_selector_hysteresis,
22
28
  _calc_selector_score,
23
29
  _selector_norms,
24
30
  _selector_thresholds,
25
31
  )
26
- from ..types import Glyph, GlyphSelector, HistoryState, NodeId, TNFRGraph
32
+ from ..types import Glyph, GlyphCode, GlyphSelector, HistoryState, NodeId, TNFRGraph
27
33
  from ..utils import get_numpy
28
- from ..validation.grammar import enforce_canonical_grammar, on_applied_glyph
34
+ from ..validation import soft_grammar_filters
29
35
  from .aliases import ALIAS_D2EPI, ALIAS_DNFR, ALIAS_DSI, ALIAS_SI
30
- from .._compat import TypeAlias
31
-
32
- GlyphCode: TypeAlias = Glyph | str
33
36
 
34
37
  __all__ = (
35
38
  "GlyphCode",
@@ -92,23 +95,12 @@ def _soft_grammar_prefilter(
92
95
  G: TNFRGraph,
93
96
  n: NodeId,
94
97
  cand: GlyphCode,
95
- dnfr: float,
96
- accel: float,
97
98
  ) -> GlyphCode:
98
99
  """Soft grammar: avoid repetitions before the canonical one."""
99
100
 
100
- gram = get_graph_param(G, "GRAMMAR", dict)
101
- gwin = int(gram.get("window", 3))
102
- avoid = {str(item) for item in gram.get("avoid_repeats", [])}
103
- force_dn = float(gram.get("force_dnfr", 0.60))
104
- force_ac = float(gram.get("force_accel", 0.60))
105
- fallbacks = cast(Mapping[str, GlyphCode], gram.get("fallbacks", {}))
106
- nd = G.nodes[n]
107
- cand_key = str(cand)
108
- if cand_key in avoid and recent_glyph(nd, cand_key, gwin):
109
- if not (dnfr >= force_dn or accel >= force_ac):
110
- cand = fallbacks.get(cand_key, cand)
111
- return cand
101
+ ctx = GrammarContext.from_graph(G)
102
+ filtered = soft_grammar_filters(ctx, n, cand)
103
+ return cast(GlyphCode, filtered)
112
104
 
113
105
 
114
106
  def _selector_normalized_metrics(
@@ -125,9 +117,9 @@ def _selector_normalized_metrics(
125
117
  def _selector_base_choice(
126
118
  Si: float, dnfr: float, accel: float, thr: Mapping[str, float]
127
119
  ) -> GlyphCode:
128
- si_hi, si_lo, dnfr_hi, acc_hi = itemgetter(
129
- "si_hi", "si_lo", "dnfr_hi", "accel_hi"
130
- )(thr)
120
+ si_hi, si_lo, dnfr_hi, acc_hi = itemgetter("si_hi", "si_lo", "dnfr_hi", "accel_hi")(
121
+ thr
122
+ )
131
123
  if Si >= si_hi:
132
124
  return "IL"
133
125
  if Si <= si_lo:
@@ -140,6 +132,8 @@ def _selector_base_choice(
140
132
 
141
133
 
142
134
  def _configure_selector_weights(G: TNFRGraph) -> Mapping[str, float]:
135
+ """Load and cache selector weight configuration from graph parameters."""
136
+
143
137
  weights = merge_and_normalize_weights(
144
138
  G, "SELECTOR_WEIGHTS", ("w_si", "w_dnfr", "w_accel")
145
139
  )
@@ -202,11 +196,13 @@ def _parametric_selector_logic(G: TNFRGraph, n: NodeId) -> GlyphCode:
202
196
 
203
197
  cand = _apply_score_override(cand, score, dnfr, thr["dnfr_lo"])
204
198
 
205
- return _soft_grammar_prefilter(G, n, cand, dnfr, accel)
199
+ return _soft_grammar_prefilter(G, n, cand)
206
200
 
207
201
 
208
202
  @dataclass(slots=True)
209
203
  class _SelectorPreselection:
204
+ """Precomputed selector context shared across glyph decisions."""
205
+
210
206
  kind: str
211
207
  metrics: Mapping[Any, tuple[float, float, float]]
212
208
  base_choices: Mapping[Any, GlyphCode]
@@ -265,10 +261,14 @@ class DefaultGlyphSelector(AbstractSelector):
265
261
  self._prepared_graph_id: int | None = None
266
262
 
267
263
  def prepare(self, graph: TNFRGraph, nodes: Sequence[NodeId]) -> None:
264
+ """Precompute default selector metrics for ``nodes``."""
265
+
268
266
  self._preselection = _build_default_preselection(graph, nodes)
269
267
  self._prepared_graph_id = id(graph)
270
268
 
271
269
  def select(self, graph: TNFRGraph, node: NodeId) -> GlyphCode:
270
+ """Return the canonical glyph for ``node`` using cached metrics when available."""
271
+
272
272
  if self._prepared_graph_id == id(graph):
273
273
  preselection = self._preselection
274
274
  else:
@@ -288,12 +288,16 @@ class ParametricGlyphSelector(AbstractSelector):
288
288
  self._prepared_graph_id: int | None = None
289
289
 
290
290
  def prepare(self, graph: TNFRGraph, nodes: Sequence[NodeId]) -> None:
291
+ """Precompute parametric selector metrics and hysteresis thresholds."""
292
+
291
293
  _selector_norms(graph)
292
294
  _configure_selector_weights(graph)
293
295
  self._preselection = _build_param_preselection(graph, nodes)
294
296
  self._prepared_graph_id = id(graph)
295
297
 
296
298
  def select(self, graph: TNFRGraph, node: NodeId) -> GlyphCode:
299
+ """Return the parametric glyph decision for ``node``."""
300
+
297
301
  if self._prepared_graph_id == id(graph):
298
302
  preselection = self._preselection
299
303
  else:
@@ -317,17 +321,29 @@ def _choose_glyph(
317
321
  al_max: int,
318
322
  en_max: int,
319
323
  ) -> GlyphCode:
324
+ """Return glyph for ``n`` considering forced lags and canonical grammar."""
325
+
320
326
  if h_al[n] > al_max:
321
327
  return Glyph.AL
322
328
  if h_en[n] > en_max:
323
329
  return Glyph.EN
324
330
  g = selector(G, n)
325
331
  if use_canon:
326
- g = enforce_canonical_grammar(G, n, g)
332
+ try:
333
+ g = enforce_canonical_grammar(G, n, g)
334
+ except StructuralGrammarError as err:
335
+ nd = G.nodes[n]
336
+ history = tuple(str(item) for item in nd.get("glyph_history", ()))
337
+ selector_name = getattr(selector, "__name__", selector.__class__.__name__)
338
+ err.attach_context(node=n, selector=selector_name, history=history, stage="selector")
339
+ record_grammar_violation(G, n, err, stage="selector")
340
+ raise
327
341
  return g
328
342
 
329
343
 
330
344
  def _selector_parallel_jobs(G: TNFRGraph) -> int | None:
345
+ """Return worker count for selector helpers when parallelism is enabled."""
346
+
331
347
  raw_jobs = G.graph.get("GLYPH_SELECTOR_N_JOBS")
332
348
  try:
333
349
  n_jobs = None if raw_jobs is None else int(raw_jobs)
@@ -339,8 +355,10 @@ def _selector_parallel_jobs(G: TNFRGraph) -> int | None:
339
355
 
340
356
 
341
357
  def _selector_metrics_chunk(
342
- args: tuple[list[float], list[float], list[float], float, float]
358
+ args: tuple[list[float], list[float], list[float], float, float],
343
359
  ) -> tuple[list[float], list[float], list[float]]:
360
+ """Normalise metric chunk values for multiprocessing execution."""
361
+
344
362
  si_values, dnfr_values, accel_values, dnfr_max, accel_max = args
345
363
  si_seq = [clamp01(float(v)) for v in si_values]
346
364
  dnfr_seq = [abs(float(v)) / dnfr_max for v in dnfr_values]
@@ -354,6 +372,8 @@ def _collect_selector_metrics(
354
372
  norms: Mapping[str, float],
355
373
  n_jobs: int | None = None,
356
374
  ) -> dict[Any, tuple[float, float, float]]:
375
+ """Return normalised (Si, ΔNFR, acceleration) triples for ``nodes``."""
376
+
357
377
  if not nodes:
358
378
  return {}
359
379
 
@@ -369,12 +389,16 @@ def _collect_selector_metrics(
369
389
  if np_mod is not None:
370
390
  si_seq_np = collect_attr(G, nodes, ALIAS_SI, 0.5, np=np_mod).astype(float)
371
391
  si_seq_np = np_mod.clip(si_seq_np, 0.0, 1.0)
372
- dnfr_seq_np = np_mod.abs(
373
- collect_attr(G, nodes, ALIAS_DNFR, 0.0, np=np_mod).astype(float)
374
- ) / dnfr_max
375
- accel_seq_np = np_mod.abs(
376
- collect_attr(G, nodes, ALIAS_D2EPI, 0.0, np=np_mod).astype(float)
377
- ) / accel_max
392
+ dnfr_seq_np = (
393
+ np_mod.abs(collect_attr(G, nodes, ALIAS_DNFR, 0.0, np=np_mod).astype(float))
394
+ / dnfr_max
395
+ )
396
+ accel_seq_np = (
397
+ np_mod.abs(
398
+ collect_attr(G, nodes, ALIAS_D2EPI, 0.0, np=np_mod).astype(float)
399
+ )
400
+ / accel_max
401
+ )
378
402
 
379
403
  si_seq = si_seq_np.tolist()
380
404
  dnfr_seq = dnfr_seq_np.tolist()
@@ -390,7 +414,12 @@ def _collect_selector_metrics(
390
414
  dnfr_seq = [abs(float(v)) / dnfr_max for v in dnfr_values]
391
415
  accel_seq = [abs(float(v)) / accel_max for v in accel_values]
392
416
  else:
393
- chunk_size = max(1, math.ceil(len(nodes) / worker_count))
417
+ approx_chunk = math.ceil(len(nodes) / worker_count) if worker_count else None
418
+ chunk_size = resolve_chunk_size(
419
+ approx_chunk,
420
+ len(nodes),
421
+ minimum=1,
422
+ )
394
423
  chunk_bounds = [
395
424
  (start, min(start + chunk_size, len(nodes)))
396
425
  for start in range(0, len(nodes), chunk_size)
@@ -400,7 +429,9 @@ def _collect_selector_metrics(
400
429
  dnfr_seq = []
401
430
  accel_seq = []
402
431
 
403
- def _args_iter() -> Sequence[tuple[list[float], list[float], list[float], float, float]]:
432
+ def _args_iter() -> (
433
+ Sequence[tuple[list[float], list[float], list[float], float, float]]
434
+ ):
404
435
  for start, end in chunk_bounds:
405
436
  yield (
406
437
  si_values[start:end],
@@ -449,7 +480,7 @@ def _compute_default_base_choices(
449
480
 
450
481
 
451
482
  def _param_base_worker(
452
- args: tuple[Mapping[str, float], list[tuple[Any, tuple[float, float, float]]]]
483
+ args: tuple[Mapping[str, float], list[tuple[Any, tuple[float, float, float]]]],
453
484
  ) -> list[tuple[Any, str]]:
454
485
  thresholds, chunk = args
455
486
  return [
@@ -473,11 +504,13 @@ def _compute_param_base_choices(
473
504
  for node, (Si, dnfr, accel) in items
474
505
  }
475
506
 
476
- chunk_size = max(1, math.ceil(len(items) / n_jobs))
477
- chunks = [
478
- items[i : i + chunk_size]
479
- for i in range(0, len(items), chunk_size)
480
- ]
507
+ approx_chunk = math.ceil(len(items) / n_jobs) if n_jobs else None
508
+ chunk_size = resolve_chunk_size(
509
+ approx_chunk,
510
+ len(items),
511
+ minimum=1,
512
+ )
513
+ chunks = [items[i : i + chunk_size] for i in range(0, len(items), chunk_size)]
481
514
  base: dict[Any, str] = {}
482
515
  args = ((thresholds, chunk) for chunk in chunks)
483
516
  executor_cls = ProcessPoolExecutor
@@ -498,6 +531,8 @@ def _prepare_selector_preselection(
498
531
  selector: GlyphSelector,
499
532
  nodes: Sequence[NodeId],
500
533
  ) -> _SelectorPreselection | None:
534
+ """Build cached selector metrics when ``selector`` supports them."""
535
+
501
536
  if selector is default_glyph_selector:
502
537
  return _build_default_preselection(G, nodes)
503
538
  if selector is parametric_glyph_selector:
@@ -511,6 +546,8 @@ def _resolve_preselected_glyph(
511
546
  selector: GlyphSelector,
512
547
  preselection: _SelectorPreselection | None,
513
548
  ) -> GlyphCode:
549
+ """Return glyph for ``n`` using ``preselection`` shortcuts when possible."""
550
+
514
551
  if preselection is None:
515
552
  return selector(G, n)
516
553
 
@@ -534,15 +571,13 @@ def _resolve_preselected_glyph(
534
571
  cand = _selector_base_choice(Si, dnfr, accel, thresholds)
535
572
 
536
573
  nd = G.nodes[n]
537
- hist_cand = _apply_selector_hysteresis(
538
- nd, Si, dnfr, accel, thresholds, margin
539
- )
574
+ hist_cand = _apply_selector_hysteresis(nd, Si, dnfr, accel, thresholds, margin)
540
575
  if hist_cand is not None:
541
576
  return hist_cand
542
577
 
543
578
  score = _compute_selector_score(G, nd, Si, dnfr, accel, cand)
544
579
  cand = _apply_score_override(cand, score, dnfr, thresholds["dnfr_lo"])
545
- return _soft_grammar_prefilter(G, n, cand, dnfr, accel)
580
+ return _soft_grammar_prefilter(G, n, cand)
546
581
 
547
582
  return selector(G, n)
548
583
 
@@ -553,20 +588,19 @@ def _glyph_proposal_worker(
553
588
  TNFRGraph,
554
589
  GlyphSelector,
555
590
  _SelectorPreselection | None,
556
- ]
591
+ ],
557
592
  ) -> list[tuple[NodeId, GlyphCode]]:
558
593
  nodes, G, selector, preselection = args
559
594
  return [
560
- (n, _resolve_preselected_glyph(G, n, selector, preselection))
561
- for n in nodes
595
+ (n, _resolve_preselected_glyph(G, n, selector, preselection)) for n in nodes
562
596
  ]
563
597
 
564
598
 
565
599
  def _apply_glyphs(G: TNFRGraph, selector: GlyphSelector, hist: HistoryState) -> None:
600
+ """Apply glyph decisions across the graph updating hysteresis trackers."""
601
+
566
602
  window = int(get_param(G, "GLYPH_HYSTERESIS_WINDOW"))
567
- use_canon = bool(
568
- get_graph_param(G, "GRAMMAR_CANON", dict).get("enabled", False)
569
- )
603
+ use_canon = bool(get_graph_param(G, "GRAMMAR_CANON", dict).get("enabled", False))
570
604
  al_max = get_graph_param(G, "AL_MAX_LAG", int)
571
605
  en_max = get_graph_param(G, "EN_MAX_LAG", int)
572
606
 
@@ -601,11 +635,14 @@ def _apply_glyphs(G: TNFRGraph, selector: GlyphSelector, hist: HistoryState) ->
601
635
  n_jobs = _selector_parallel_jobs(G)
602
636
  if n_jobs is None:
603
637
  for n in to_select:
604
- decisions[n] = _resolve_preselected_glyph(
605
- G, n, selector, preselection
606
- )
638
+ decisions[n] = _resolve_preselected_glyph(G, n, selector, preselection)
607
639
  else:
608
- chunk_size = max(1, math.ceil(len(to_select) / n_jobs))
640
+ approx_chunk = math.ceil(len(to_select) / n_jobs) if n_jobs else None
641
+ chunk_size = resolve_chunk_size(
642
+ approx_chunk,
643
+ len(to_select),
644
+ minimum=1,
645
+ )
609
646
  chunks = [
610
647
  to_select[idx : idx + chunk_size]
611
648
  for idx in range(0, len(to_select), chunk_size)
@@ -617,9 +654,7 @@ def _apply_glyphs(G: TNFRGraph, selector: GlyphSelector, hist: HistoryState) ->
617
654
  dynamics_module, "ProcessPoolExecutor", ProcessPoolExecutor
618
655
  )
619
656
  with executor_cls(max_workers=n_jobs) as executor:
620
- args_iter = (
621
- (chunk, G, selector, preselection) for chunk in chunks
622
- )
657
+ args_iter = ((chunk, G, selector, preselection) for chunk in chunks)
623
658
  for results in executor.map(_glyph_proposal_worker, args_iter):
624
659
  for node, glyph in results:
625
660
  decisions[node] = glyph
@@ -657,6 +692,8 @@ def _apply_glyphs(G: TNFRGraph, selector: GlyphSelector, hist: HistoryState) ->
657
692
 
658
693
 
659
694
  def _apply_selector(G: TNFRGraph) -> GlyphSelector:
695
+ """Resolve the glyph selector callable configured on ``G``."""
696
+
660
697
  raw_selector = G.graph.get("glyph_selector")
661
698
 
662
699
  selector: GlyphSelector
@@ -673,8 +710,10 @@ def _apply_selector(G: TNFRGraph) -> GlyphSelector:
673
710
  else:
674
711
  selector = default_glyph_selector
675
712
 
676
- if isinstance(selector, ParametricGlyphSelector) or selector is parametric_glyph_selector:
713
+ if (
714
+ isinstance(selector, ParametricGlyphSelector)
715
+ or selector is parametric_glyph_selector
716
+ ):
677
717
  _selector_norms(G)
678
718
  _configure_selector_weights(G)
679
719
  return selector
680
-
tnfr/execution.py CHANGED
@@ -7,15 +7,14 @@ from collections.abc import Callable, Iterable, Sequence
7
7
  from typing import Any, Optional, cast
8
8
 
9
9
  from ._compat import TypeAlias
10
-
11
- from .utils import MAX_MATERIALIZE_DEFAULT, ensure_collection, is_non_string_sequence
12
10
  from .constants import get_param
13
11
  from .dynamics import step
14
12
  from .flatten import _flatten
15
13
  from .glyph_history import ensure_history
16
- from .validation.grammar import apply_glyph_with_grammar
17
- from .tokens import OpTag, TARGET, THOL, WAIT, Token
14
+ from .tokens import TARGET, THOL, WAIT, OpTag, Token
18
15
  from .types import Glyph, NodeId, TNFRGraph
16
+ from .utils import MAX_MATERIALIZE_DEFAULT, ensure_collection, is_non_string_sequence
17
+ from .validation import apply_glyph_with_grammar
19
18
 
20
19
  AdvanceFn = Callable[[TNFRGraph], None]
21
20
  TraceEntry = dict[str, Any]
@@ -72,6 +71,8 @@ def _advance(G: TNFRGraph, step_fn: AdvanceFn) -> None:
72
71
 
73
72
 
74
73
  def _record_trace(trace: ProgramTrace, G: TNFRGraph, op: OpTag, **data: Any) -> None:
74
+ """Append an operation snapshot to ``trace`` using graph time metadata."""
75
+
75
76
  trace.append({"t": float(G.graph.get("_t", 0.0)), "op": op.name, **data})
76
77
 
77
78
 
@@ -189,25 +190,31 @@ def compile_sequence(
189
190
 
190
191
 
191
192
  def seq(*tokens: Token) -> list[Token]:
193
+ """Return a mutable list of ``tokens`` for explicit sequence editing."""
194
+
192
195
  return list(tokens)
193
196
 
194
197
 
195
- def block(
196
- *tokens: Token, repeat: int = 1, close: Optional[Glyph] = None
197
- ) -> THOL:
198
+ def block(*tokens: Token, repeat: int = 1, close: Optional[Glyph] = None) -> THOL:
199
+ """Build a THOL block with optional repetition and forced closure."""
200
+
198
201
  return THOL(body=list(tokens), repeat=repeat, force_close=close)
199
202
 
200
203
 
201
204
  def target(nodes: Optional[Iterable[NodeId]] = None) -> TARGET:
205
+ """Return a TARGET token selecting ``nodes`` (defaults to all nodes)."""
206
+
202
207
  return TARGET(nodes=nodes)
203
208
 
204
209
 
205
210
  def wait(steps: int = 1) -> WAIT:
211
+ """Return a WAIT token forcing ``steps`` structural updates before resuming."""
212
+
206
213
  return WAIT(steps=max(1, int(steps)))
207
214
 
208
215
 
209
216
  def basic_canonical_example() -> list[Token]:
210
- """Reference canonical sequence.
217
+ """Return the canonical preset sequence.
211
218
 
212
219
  Returns a copy of the canonical preset tokens to keep CLI defaults aligned
213
220
  with :func:`tnfr.config.presets.get_preset`.
tnfr/execution.pyi CHANGED
@@ -5,16 +5,13 @@ from collections.abc import Callable, Iterable, Sequence
5
5
  from typing import Any, Optional
6
6
 
7
7
  from ._compat import TypeAlias
8
-
9
- from .tokens import OpTag, TARGET, THOL, WAIT, Token
8
+ from .tokens import TARGET, THOL, WAIT, OpTag, Token
10
9
  from .types import Glyph, NodeId, TNFRGraph
11
10
 
12
11
  __all__: list[str]
13
12
 
14
-
15
13
  def __getattr__(name: str) -> Any: ...
16
14
 
17
-
18
15
  AdvanceFn = Callable[[TNFRGraph], None]
19
16
  TraceEntry = dict[str, Any]
20
17
  ProgramTrace: TypeAlias = deque[TraceEntry]
@@ -27,39 +24,22 @@ CANONICAL_PRESET_NAME: str
27
24
  CANONICAL_PROGRAM_TOKENS: tuple[Token, ...]
28
25
  HANDLERS: dict[OpTag, HandlerFn]
29
26
 
30
-
31
27
  def _apply_glyph_to_targets(
32
28
  G: TNFRGraph, g: Glyph | str, nodes: Iterable[NodeId] | None = ...
33
29
  ) -> None: ...
34
-
35
-
36
- def _record_trace(trace: ProgramTrace, G: TNFRGraph, op: OpTag, **data: Any) -> None: ...
37
-
38
-
30
+ def _record_trace(
31
+ trace: ProgramTrace, G: TNFRGraph, op: OpTag, **data: Any
32
+ ) -> None: ...
39
33
  def compile_sequence(
40
34
  sequence: Iterable[Token] | Sequence[Token] | Any,
41
35
  *,
42
36
  max_materialize: int | None = ...,
43
37
  ) -> list[tuple[OpTag, Any]]: ...
44
-
45
-
46
38
  def play(
47
39
  G: TNFRGraph, sequence: Sequence[Token], step_fn: Optional[AdvanceFn] = ...
48
40
  ) -> None: ...
49
-
50
-
51
41
  def seq(*tokens: Token) -> list[Token]: ...
52
-
53
-
54
- def block(
55
- *tokens: Token, repeat: int = ..., close: Glyph | None = ...
56
- ) -> THOL: ...
57
-
58
-
42
+ def block(*tokens: Token, repeat: int = ..., close: Glyph | None = ...) -> THOL: ...
59
43
  def target(nodes: Iterable[NodeId] | None = ...) -> TARGET: ...
60
-
61
-
62
44
  def wait(steps: int = ...) -> WAIT: ...
63
-
64
-
65
45
  def basic_canonical_example() -> list[Token]: ...
tnfr/flatten.py CHANGED
@@ -7,6 +7,9 @@ from dataclasses import dataclass
7
7
  from itertools import chain
8
8
  from typing import Any, Callable
9
9
 
10
+ from .config.constants import GLYPHS_CANONICAL_SET
11
+ from .tokens import TARGET, THOL, THOL_SENTINEL, WAIT, OpTag, Token
12
+ from .types import Glyph
10
13
  from .utils import (
11
14
  MAX_MATERIALIZE_DEFAULT,
12
15
  STRING_TYPES,
@@ -14,9 +17,6 @@ from .utils import (
14
17
  flatten_structure,
15
18
  normalize_materialize_limit,
16
19
  )
17
- from .config.constants import GLYPHS_CANONICAL_SET
18
- from .tokens import THOL, TARGET, WAIT, OpTag, THOL_SENTINEL, Token
19
- from .types import Glyph
20
20
 
21
21
  __all__ = [
22
22
  "THOLEvaluator",
@@ -124,9 +124,13 @@ class THOLEvaluator:
124
124
  self._started = False
125
125
 
126
126
  def __iter__(self) -> "THOLEvaluator":
127
+ """Return the evaluator itself to stream THOL expansion."""
128
+
127
129
  return self
128
130
 
129
131
  def __next__(self) -> Token | object:
132
+ """Yield the next token or :data:`THOL_SENTINEL` during evaluation."""
133
+
130
134
  if not self._started:
131
135
  self._started = True
132
136
  return THOL_SENTINEL
tnfr/flatten.pyi CHANGED
@@ -7,20 +7,13 @@ from .tokens import THOL, Token
7
7
 
8
8
  __all__: list[str]
9
9
 
10
-
11
10
  def __getattr__(name: str) -> Any: ...
12
11
 
13
-
14
12
  class THOLEvaluator(Iterator[Token | object]):
15
- def __init__(
16
- self, item: THOL, *, max_materialize: int | None = ...
17
- ) -> None: ...
18
-
13
+ def __init__(self, item: THOL, *, max_materialize: int | None = ...) -> None: ...
19
14
  def __iter__(self) -> THOLEvaluator: ...
20
-
21
15
  def __next__(self) -> Token | object: ...
22
16
 
23
-
24
17
  def parse_program_tokens(
25
18
  obj: Iterable[Any] | Sequence[Any] | Any,
26
19
  *,