tnfr 6.0.0__py3-none-any.whl → 7.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tnfr might be problematic. Click here for more details.
- tnfr/__init__.py +50 -5
- tnfr/__init__.pyi +0 -7
- tnfr/_compat.py +0 -1
- tnfr/_generated_version.py +34 -0
- tnfr/_version.py +44 -2
- tnfr/alias.py +14 -13
- tnfr/alias.pyi +5 -37
- tnfr/cache.py +9 -729
- tnfr/cache.pyi +8 -224
- tnfr/callback_utils.py +16 -31
- tnfr/callback_utils.pyi +3 -29
- tnfr/cli/__init__.py +17 -11
- tnfr/cli/__init__.pyi +0 -21
- tnfr/cli/arguments.py +175 -14
- tnfr/cli/arguments.pyi +5 -11
- tnfr/cli/execution.py +434 -48
- tnfr/cli/execution.pyi +14 -24
- tnfr/cli/utils.py +20 -3
- tnfr/cli/utils.pyi +5 -5
- tnfr/config/__init__.py +2 -1
- tnfr/config/__init__.pyi +2 -0
- tnfr/config/feature_flags.py +83 -0
- tnfr/config/init.py +1 -1
- tnfr/config/operator_names.py +1 -14
- tnfr/config/presets.py +6 -26
- tnfr/constants/__init__.py +10 -13
- tnfr/constants/__init__.pyi +10 -22
- tnfr/constants/aliases.py +31 -0
- tnfr/constants/core.py +4 -3
- tnfr/constants/init.py +1 -1
- tnfr/constants/metric.py +3 -3
- tnfr/dynamics/__init__.py +64 -10
- tnfr/dynamics/__init__.pyi +3 -4
- tnfr/dynamics/adaptation.py +79 -13
- tnfr/dynamics/aliases.py +10 -9
- tnfr/dynamics/coordination.py +77 -35
- tnfr/dynamics/dnfr.py +575 -274
- tnfr/dynamics/dnfr.pyi +1 -10
- tnfr/dynamics/integrators.py +47 -33
- tnfr/dynamics/integrators.pyi +0 -1
- tnfr/dynamics/runtime.py +489 -129
- tnfr/dynamics/sampling.py +2 -0
- tnfr/dynamics/selectors.py +101 -62
- tnfr/execution.py +15 -8
- tnfr/execution.pyi +5 -25
- tnfr/flatten.py +7 -3
- tnfr/flatten.pyi +1 -8
- tnfr/gamma.py +22 -26
- tnfr/gamma.pyi +0 -6
- tnfr/glyph_history.py +37 -26
- tnfr/glyph_history.pyi +1 -19
- tnfr/glyph_runtime.py +16 -0
- tnfr/glyph_runtime.pyi +9 -0
- tnfr/immutable.py +20 -15
- tnfr/immutable.pyi +4 -7
- tnfr/initialization.py +5 -7
- tnfr/initialization.pyi +1 -9
- tnfr/io.py +6 -305
- tnfr/io.pyi +13 -8
- tnfr/mathematics/__init__.py +81 -0
- tnfr/mathematics/backend.py +426 -0
- tnfr/mathematics/dynamics.py +398 -0
- tnfr/mathematics/epi.py +254 -0
- tnfr/mathematics/generators.py +222 -0
- tnfr/mathematics/metrics.py +119 -0
- tnfr/mathematics/operators.py +233 -0
- tnfr/mathematics/operators_factory.py +71 -0
- tnfr/mathematics/projection.py +78 -0
- tnfr/mathematics/runtime.py +173 -0
- tnfr/mathematics/spaces.py +247 -0
- tnfr/mathematics/transforms.py +292 -0
- tnfr/metrics/__init__.py +10 -10
- tnfr/metrics/coherence.py +123 -94
- tnfr/metrics/common.py +22 -13
- tnfr/metrics/common.pyi +42 -11
- tnfr/metrics/core.py +72 -14
- tnfr/metrics/diagnosis.py +48 -57
- tnfr/metrics/diagnosis.pyi +3 -7
- tnfr/metrics/export.py +3 -5
- tnfr/metrics/glyph_timing.py +41 -31
- tnfr/metrics/reporting.py +13 -6
- tnfr/metrics/sense_index.py +884 -114
- tnfr/metrics/trig.py +167 -11
- tnfr/metrics/trig.pyi +1 -0
- tnfr/metrics/trig_cache.py +112 -15
- tnfr/node.py +400 -17
- tnfr/node.pyi +55 -38
- tnfr/observers.py +111 -8
- tnfr/observers.pyi +0 -15
- tnfr/ontosim.py +9 -6
- tnfr/ontosim.pyi +0 -5
- tnfr/operators/__init__.py +529 -42
- tnfr/operators/__init__.pyi +14 -0
- tnfr/operators/definitions.py +350 -18
- tnfr/operators/definitions.pyi +0 -14
- tnfr/operators/grammar.py +760 -0
- tnfr/operators/jitter.py +28 -22
- tnfr/operators/registry.py +7 -12
- tnfr/operators/registry.pyi +0 -2
- tnfr/operators/remesh.py +38 -61
- tnfr/rng.py +17 -300
- tnfr/schemas/__init__.py +8 -0
- tnfr/schemas/grammar.json +94 -0
- tnfr/selector.py +3 -4
- tnfr/selector.pyi +1 -1
- tnfr/sense.py +22 -24
- tnfr/sense.pyi +0 -7
- tnfr/structural.py +504 -21
- tnfr/structural.pyi +41 -18
- tnfr/telemetry/__init__.py +23 -1
- tnfr/telemetry/cache_metrics.py +226 -0
- tnfr/telemetry/nu_f.py +423 -0
- tnfr/telemetry/nu_f.pyi +123 -0
- tnfr/tokens.py +1 -4
- tnfr/tokens.pyi +1 -6
- tnfr/trace.py +20 -53
- tnfr/trace.pyi +9 -37
- tnfr/types.py +244 -15
- tnfr/types.pyi +200 -14
- tnfr/units.py +69 -0
- tnfr/units.pyi +16 -0
- tnfr/utils/__init__.py +107 -48
- tnfr/utils/__init__.pyi +80 -11
- tnfr/utils/cache.py +1705 -65
- tnfr/utils/cache.pyi +370 -58
- tnfr/utils/chunks.py +104 -0
- tnfr/utils/chunks.pyi +21 -0
- tnfr/utils/data.py +95 -5
- tnfr/utils/data.pyi +8 -17
- tnfr/utils/graph.py +2 -4
- tnfr/utils/init.py +31 -7
- tnfr/utils/init.pyi +4 -11
- tnfr/utils/io.py +313 -14
- tnfr/{helpers → utils}/numeric.py +50 -24
- tnfr/utils/numeric.pyi +21 -0
- tnfr/validation/__init__.py +92 -4
- tnfr/validation/__init__.pyi +77 -17
- tnfr/validation/compatibility.py +79 -43
- tnfr/validation/compatibility.pyi +4 -6
- tnfr/validation/grammar.py +55 -133
- tnfr/validation/grammar.pyi +37 -8
- tnfr/validation/graph.py +138 -0
- tnfr/validation/graph.pyi +17 -0
- tnfr/validation/rules.py +161 -74
- tnfr/validation/rules.pyi +55 -18
- tnfr/validation/runtime.py +263 -0
- tnfr/validation/runtime.pyi +31 -0
- tnfr/validation/soft_filters.py +170 -0
- tnfr/validation/soft_filters.pyi +37 -0
- tnfr/validation/spectral.py +159 -0
- tnfr/validation/spectral.pyi +46 -0
- tnfr/validation/syntax.py +28 -139
- tnfr/validation/syntax.pyi +7 -4
- tnfr/validation/window.py +39 -0
- tnfr/validation/window.pyi +1 -0
- tnfr/viz/__init__.py +9 -0
- tnfr/viz/matplotlib.py +246 -0
- {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/METADATA +63 -19
- tnfr-7.0.0.dist-info/RECORD +185 -0
- {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/licenses/LICENSE.md +1 -1
- tnfr/constants_glyphs.py +0 -16
- tnfr/constants_glyphs.pyi +0 -12
- tnfr/grammar.py +0 -25
- tnfr/grammar.pyi +0 -13
- tnfr/helpers/__init__.py +0 -151
- tnfr/helpers/__init__.pyi +0 -66
- tnfr/helpers/numeric.pyi +0 -12
- tnfr/presets.py +0 -15
- tnfr/presets.pyi +0 -7
- tnfr/utils/io.pyi +0 -10
- tnfr/utils/validators.py +0 -130
- tnfr/utils/validators.pyi +0 -19
- tnfr-6.0.0.dist-info/RECORD +0 -157
- {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/WHEEL +0 -0
- {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/entry_points.txt +0 -0
- {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,760 @@
|
|
|
1
|
+
"""Canonical grammar and sequence validation for structural operators."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import os
|
|
7
|
+
from collections.abc import Iterable, MutableMapping
|
|
8
|
+
from copy import deepcopy
|
|
9
|
+
from dataclasses import dataclass
|
|
10
|
+
from importlib import resources
|
|
11
|
+
from json import JSONDecodeError
|
|
12
|
+
from types import MappingProxyType
|
|
13
|
+
from typing import Any, Mapping, Optional, Sequence
|
|
14
|
+
|
|
15
|
+
from ..config.operator_names import (
|
|
16
|
+
CONTRACTION,
|
|
17
|
+
COHERENCE,
|
|
18
|
+
COUPLING,
|
|
19
|
+
DISSONANCE,
|
|
20
|
+
EMISSION,
|
|
21
|
+
INTERMEDIATE_OPERATORS,
|
|
22
|
+
MUTATION,
|
|
23
|
+
RECEPTION,
|
|
24
|
+
RECURSIVITY,
|
|
25
|
+
RESONANCE,
|
|
26
|
+
SELF_ORGANIZATION,
|
|
27
|
+
SELF_ORGANIZATION_CLOSURES,
|
|
28
|
+
SILENCE,
|
|
29
|
+
TRANSITION,
|
|
30
|
+
EXPANSION,
|
|
31
|
+
VALID_END_OPERATORS,
|
|
32
|
+
VALID_START_OPERATORS,
|
|
33
|
+
canonical_operator_name,
|
|
34
|
+
operator_display_name,
|
|
35
|
+
)
|
|
36
|
+
from ..constants import DEFAULTS, get_param
|
|
37
|
+
from ..types import Glyph, NodeId, TNFRGraph
|
|
38
|
+
from ..validation import ValidationOutcome, rules as _rules
|
|
39
|
+
from ..validation.soft_filters import soft_grammar_filters
|
|
40
|
+
from ..utils import get_logger
|
|
41
|
+
from .registry import OPERATORS
|
|
42
|
+
|
|
43
|
+
try: # pragma: no cover - optional dependency import
|
|
44
|
+
from jsonschema import Draft7Validator
|
|
45
|
+
from jsonschema import exceptions as _jsonschema_exceptions
|
|
46
|
+
except Exception: # pragma: no cover - jsonschema optional
|
|
47
|
+
Draft7Validator = None # type: ignore[assignment]
|
|
48
|
+
_jsonschema_exceptions = None # type: ignore[assignment]
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
__all__ = [
|
|
52
|
+
"GrammarContext",
|
|
53
|
+
"GrammarConfigurationError",
|
|
54
|
+
"StructuralGrammarError",
|
|
55
|
+
"RepeatWindowError",
|
|
56
|
+
"MutationPreconditionError",
|
|
57
|
+
"TholClosureError",
|
|
58
|
+
"TransitionCompatibilityError",
|
|
59
|
+
"SequenceSyntaxError",
|
|
60
|
+
"SequenceValidationResult",
|
|
61
|
+
"record_grammar_violation",
|
|
62
|
+
"_gram_state",
|
|
63
|
+
"apply_glyph_with_grammar",
|
|
64
|
+
"enforce_canonical_grammar",
|
|
65
|
+
"on_applied_glyph",
|
|
66
|
+
"parse_sequence",
|
|
67
|
+
"validate_sequence",
|
|
68
|
+
"FUNCTION_TO_GLYPH",
|
|
69
|
+
"GLYPH_TO_FUNCTION",
|
|
70
|
+
"glyph_function_name",
|
|
71
|
+
"function_name_to_glyph",
|
|
72
|
+
]
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
logger = get_logger(__name__)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
_SCHEMA_LOAD_ERROR: str | None = None
|
|
79
|
+
_SOFT_VALIDATOR: Draft7Validator | None = None
|
|
80
|
+
_CANON_VALIDATOR: Draft7Validator | None = None
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
# Structural mapping ---------------------------------------------------------
|
|
84
|
+
|
|
85
|
+
# NOTE: Glyph comments describe the structural function following TNFR canon
|
|
86
|
+
# so downstream modules can reason in terms of operator semantics rather than
|
|
87
|
+
# internal glyph codes. This mapping is the single source of truth for
|
|
88
|
+
# translating between glyph identifiers and the structural operators defined
|
|
89
|
+
# in :mod:`tnfr.config.operator_names`.
|
|
90
|
+
GLYPH_TO_FUNCTION: dict[Glyph, str] = {
|
|
91
|
+
Glyph.AL: EMISSION, # Emission — seeds coherence outward from the node.
|
|
92
|
+
Glyph.EN: RECEPTION, # Reception — anchors inbound energy into the EPI.
|
|
93
|
+
Glyph.IL: COHERENCE, # Coherence — compresses ΔNFR drift to stabilise C(t).
|
|
94
|
+
Glyph.OZ: DISSONANCE, # Dissonance — injects controlled tension for probes.
|
|
95
|
+
Glyph.UM: COUPLING, # Coupling — synchronises bidirectional coherence links.
|
|
96
|
+
Glyph.RA: RESONANCE, # Resonance — amplifies aligned structural frequency.
|
|
97
|
+
Glyph.SHA: SILENCE, # Silence — suspends reorganisation while preserving form.
|
|
98
|
+
Glyph.VAL: EXPANSION, # Expansion — dilates the structure to explore volume.
|
|
99
|
+
Glyph.NUL: CONTRACTION, # Contraction — concentrates trajectories into the core.
|
|
100
|
+
Glyph.THOL: SELF_ORGANIZATION, # Self-organisation — spawns autonomous cascades.
|
|
101
|
+
Glyph.ZHIR: MUTATION, # Mutation — pivots the node across structural thresholds.
|
|
102
|
+
Glyph.NAV: TRANSITION, # Transition — guides controlled regime hand-offs.
|
|
103
|
+
Glyph.REMESH: RECURSIVITY, # Recursivity — echoes patterns across nested EPIs.
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
FUNCTION_TO_GLYPH: dict[str, Glyph] = {name: glyph for glyph, name in GLYPH_TO_FUNCTION.items()}
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def glyph_function_name(val: Glyph | str | None, *, default: str | None = None) -> str | None:
|
|
110
|
+
"""Return the structural operator name corresponding to ``val``.
|
|
111
|
+
|
|
112
|
+
Parameters
|
|
113
|
+
----------
|
|
114
|
+
val:
|
|
115
|
+
Glyph enumeration, glyph code or structural operator identifier.
|
|
116
|
+
default:
|
|
117
|
+
Value returned when ``val`` cannot be translated.
|
|
118
|
+
"""
|
|
119
|
+
|
|
120
|
+
if val is None:
|
|
121
|
+
return default
|
|
122
|
+
if isinstance(val, Glyph):
|
|
123
|
+
return GLYPH_TO_FUNCTION.get(val, default)
|
|
124
|
+
try:
|
|
125
|
+
glyph = Glyph(str(val))
|
|
126
|
+
except (TypeError, ValueError):
|
|
127
|
+
canon = canonical_operator_name(str(val))
|
|
128
|
+
return canon if canon in FUNCTION_TO_GLYPH else default
|
|
129
|
+
else:
|
|
130
|
+
return GLYPH_TO_FUNCTION.get(glyph, default)
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def function_name_to_glyph(val: str | Glyph | None, *, default: Glyph | None = None) -> Glyph | None:
|
|
134
|
+
"""Return the :class:`Glyph` associated with the structural identifier ``val``."""
|
|
135
|
+
|
|
136
|
+
if val is None:
|
|
137
|
+
return default
|
|
138
|
+
if isinstance(val, Glyph):
|
|
139
|
+
return val
|
|
140
|
+
try:
|
|
141
|
+
return Glyph(str(val))
|
|
142
|
+
except (TypeError, ValueError):
|
|
143
|
+
canon = canonical_operator_name(str(val))
|
|
144
|
+
return FUNCTION_TO_GLYPH.get(canon, default)
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
@dataclass(slots=True)
|
|
148
|
+
class GrammarContext:
|
|
149
|
+
"""Shared context for grammar helpers."""
|
|
150
|
+
|
|
151
|
+
G: TNFRGraph
|
|
152
|
+
cfg_soft: dict[str, Any]
|
|
153
|
+
cfg_canon: dict[str, Any]
|
|
154
|
+
norms: dict[str, Any]
|
|
155
|
+
|
|
156
|
+
def __post_init__(self) -> None:
|
|
157
|
+
_validate_grammar_configs(self)
|
|
158
|
+
|
|
159
|
+
@classmethod
|
|
160
|
+
def from_graph(cls, G: TNFRGraph) -> "GrammarContext":
|
|
161
|
+
"""Create a context pulling graph overrides or isolated defaults.
|
|
162
|
+
|
|
163
|
+
When a graph omits grammar configuration, copies of
|
|
164
|
+
:data:`tnfr.constants.DEFAULTS` are materialised so each context can
|
|
165
|
+
mutate its settings without leaking state into other graphs.
|
|
166
|
+
"""
|
|
167
|
+
|
|
168
|
+
def _copy_default(key: str) -> dict[str, Any]:
|
|
169
|
+
default = DEFAULTS.get(key, {})
|
|
170
|
+
if isinstance(default, Mapping):
|
|
171
|
+
return {k: deepcopy(v) for k, v in default.items()}
|
|
172
|
+
if isinstance(default, Iterable) and not isinstance(default, (str, bytes)):
|
|
173
|
+
return dict(default)
|
|
174
|
+
return {}
|
|
175
|
+
|
|
176
|
+
cfg_soft = G.graph.get("GRAMMAR")
|
|
177
|
+
if cfg_soft is None:
|
|
178
|
+
cfg_soft = _copy_default("GRAMMAR")
|
|
179
|
+
|
|
180
|
+
cfg_canon = G.graph.get("GRAMMAR_CANON")
|
|
181
|
+
if cfg_canon is None:
|
|
182
|
+
cfg_canon = _copy_default("GRAMMAR_CANON")
|
|
183
|
+
|
|
184
|
+
return cls(
|
|
185
|
+
G=G,
|
|
186
|
+
cfg_soft=cfg_soft,
|
|
187
|
+
cfg_canon=cfg_canon,
|
|
188
|
+
norms=G.graph.get("_sel_norms") or {},
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def _gram_state(nd: dict[str, Any]) -> dict[str, Any]:
|
|
193
|
+
return nd.setdefault("_GRAM", {"thol_open": False, "thol_len": 0})
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
class GrammarConfigurationError(ValueError):
|
|
197
|
+
"""Raised when grammar configuration violates the bundled JSON schema."""
|
|
198
|
+
|
|
199
|
+
__slots__ = ("section", "messages", "details")
|
|
200
|
+
|
|
201
|
+
def __init__(
|
|
202
|
+
self,
|
|
203
|
+
section: str,
|
|
204
|
+
messages: Sequence[str],
|
|
205
|
+
*,
|
|
206
|
+
details: Sequence[tuple[str, str]] | None = None,
|
|
207
|
+
) -> None:
|
|
208
|
+
msg = "; ".join(messages)
|
|
209
|
+
super().__init__(f"invalid {section} configuration: {msg}")
|
|
210
|
+
self.section = section
|
|
211
|
+
self.messages = tuple(messages)
|
|
212
|
+
self.details = tuple(details or ())
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def _validation_env_flag() -> bool | None:
|
|
216
|
+
flag = os.environ.get("TNFR_GRAMMAR_VALIDATE")
|
|
217
|
+
if flag is None:
|
|
218
|
+
return None
|
|
219
|
+
normalised = flag.strip().lower()
|
|
220
|
+
if normalised in {"0", "false", "off", "no"}:
|
|
221
|
+
return False
|
|
222
|
+
if normalised in {"1", "true", "on", "yes"}:
|
|
223
|
+
return True
|
|
224
|
+
return None
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def _ensure_schema_validators() -> tuple[Draft7Validator | None, Draft7Validator | None] | None:
|
|
228
|
+
global _SCHEMA_LOAD_ERROR, _SOFT_VALIDATOR, _CANON_VALIDATOR
|
|
229
|
+
if _SOFT_VALIDATOR is not None or _CANON_VALIDATOR is not None:
|
|
230
|
+
return _SOFT_VALIDATOR, _CANON_VALIDATOR
|
|
231
|
+
if _SCHEMA_LOAD_ERROR is not None:
|
|
232
|
+
return None
|
|
233
|
+
if Draft7Validator is None:
|
|
234
|
+
_SCHEMA_LOAD_ERROR = "jsonschema package is not installed"
|
|
235
|
+
return None
|
|
236
|
+
try:
|
|
237
|
+
schema_text = resources.files("tnfr.schemas").joinpath("grammar.json").read_text("utf-8")
|
|
238
|
+
except FileNotFoundError:
|
|
239
|
+
_SCHEMA_LOAD_ERROR = "grammar schema resource not found"
|
|
240
|
+
return None
|
|
241
|
+
try:
|
|
242
|
+
schema = json.loads(schema_text)
|
|
243
|
+
except JSONDecodeError as exc:
|
|
244
|
+
_SCHEMA_LOAD_ERROR = f"unable to decode grammar schema: {exc}"
|
|
245
|
+
return None
|
|
246
|
+
definitions = schema.get("definitions")
|
|
247
|
+
if not isinstance(definitions, Mapping):
|
|
248
|
+
_SCHEMA_LOAD_ERROR = "grammar schema missing definitions"
|
|
249
|
+
return None
|
|
250
|
+
soft_schema = definitions.get("cfg_soft")
|
|
251
|
+
canon_schema = definitions.get("cfg_canon")
|
|
252
|
+
if soft_schema is None or canon_schema is None:
|
|
253
|
+
_SCHEMA_LOAD_ERROR = "grammar schema missing cfg_soft/cfg_canon definitions"
|
|
254
|
+
return None
|
|
255
|
+
if not isinstance(soft_schema, Mapping) or not isinstance(canon_schema, Mapping):
|
|
256
|
+
_SCHEMA_LOAD_ERROR = "grammar schema definitions must be objects"
|
|
257
|
+
return None
|
|
258
|
+
soft_payload = dict(soft_schema)
|
|
259
|
+
canon_payload = dict(canon_schema)
|
|
260
|
+
soft_payload.setdefault("definitions", definitions)
|
|
261
|
+
canon_payload.setdefault("definitions", definitions)
|
|
262
|
+
try:
|
|
263
|
+
_SOFT_VALIDATOR = Draft7Validator(soft_payload)
|
|
264
|
+
_CANON_VALIDATOR = Draft7Validator(canon_payload)
|
|
265
|
+
except Exception as exc: # pragma: no cover - delegated to jsonschema
|
|
266
|
+
if _jsonschema_exceptions is not None and isinstance(
|
|
267
|
+
exc, _jsonschema_exceptions.SchemaError
|
|
268
|
+
):
|
|
269
|
+
_SCHEMA_LOAD_ERROR = f"invalid grammar schema: {exc.message}"
|
|
270
|
+
else:
|
|
271
|
+
_SCHEMA_LOAD_ERROR = f"unable to construct grammar validators: {exc}"
|
|
272
|
+
_SOFT_VALIDATOR = None
|
|
273
|
+
_CANON_VALIDATOR = None
|
|
274
|
+
return None
|
|
275
|
+
return _SOFT_VALIDATOR, _CANON_VALIDATOR
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
def _format_validation_error(err: Any) -> str:
|
|
279
|
+
path = "".join(f"[{p}]" if isinstance(p, int) else f".{p}" for p in err.absolute_path)
|
|
280
|
+
path = path.lstrip(".") or "<root>"
|
|
281
|
+
return f"{path}: {err.message}"
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
def _validate_grammar_configs(ctx: GrammarContext) -> None:
|
|
285
|
+
flag = _validation_env_flag()
|
|
286
|
+
if flag is False:
|
|
287
|
+
logger.debug("TNFR_GRAMMAR_VALIDATE=0 → skipping grammar schema validation")
|
|
288
|
+
return
|
|
289
|
+
validators = _ensure_schema_validators()
|
|
290
|
+
if validators is None:
|
|
291
|
+
if flag is True:
|
|
292
|
+
reason = _SCHEMA_LOAD_ERROR or "validators unavailable"
|
|
293
|
+
raise RuntimeError(
|
|
294
|
+
"grammar schema validation requested but unavailable: " f"{reason}"
|
|
295
|
+
)
|
|
296
|
+
if _SCHEMA_LOAD_ERROR is not None:
|
|
297
|
+
logger.debug("Skipping grammar schema validation: %s", _SCHEMA_LOAD_ERROR)
|
|
298
|
+
return
|
|
299
|
+
soft_validator, canon_validator = validators
|
|
300
|
+
issues: list[tuple[str, str]] = []
|
|
301
|
+
if soft_validator is not None:
|
|
302
|
+
for err in soft_validator.iter_errors(ctx.cfg_soft): # type: ignore[union-attr]
|
|
303
|
+
issues.append(("cfg_soft", _format_validation_error(err)))
|
|
304
|
+
|
|
305
|
+
canon_cfg: Mapping[str, Any] | None
|
|
306
|
+
if isinstance(ctx.cfg_canon, Mapping):
|
|
307
|
+
canon_cfg = ctx.cfg_canon
|
|
308
|
+
else:
|
|
309
|
+
canon_cfg = None
|
|
310
|
+
issues.append(
|
|
311
|
+
(
|
|
312
|
+
"cfg_canon",
|
|
313
|
+
"GRAMMAR_CANON must be a mapping"
|
|
314
|
+
f" (received {type(ctx.cfg_canon).__name__})",
|
|
315
|
+
)
|
|
316
|
+
)
|
|
317
|
+
|
|
318
|
+
if canon_validator is not None:
|
|
319
|
+
for err in canon_validator.iter_errors(ctx.cfg_canon): # type: ignore[union-attr]
|
|
320
|
+
issues.append(("cfg_canon", _format_validation_error(err)))
|
|
321
|
+
|
|
322
|
+
cfg_for_lengths: Mapping[str, Any] = canon_cfg or {}
|
|
323
|
+
min_len = cfg_for_lengths.get("thol_min_len")
|
|
324
|
+
max_len = cfg_for_lengths.get("thol_max_len")
|
|
325
|
+
if isinstance(min_len, int) and isinstance(max_len, int) and max_len < min_len:
|
|
326
|
+
issues.append(
|
|
327
|
+
(
|
|
328
|
+
"cfg_canon",
|
|
329
|
+
"thol_max_len must be greater than or equal to thol_min_len",
|
|
330
|
+
)
|
|
331
|
+
)
|
|
332
|
+
if not issues:
|
|
333
|
+
return
|
|
334
|
+
section = issues[0][0]
|
|
335
|
+
raise GrammarConfigurationError(
|
|
336
|
+
section,
|
|
337
|
+
[msg for _, msg in issues],
|
|
338
|
+
details=issues,
|
|
339
|
+
)
|
|
340
|
+
|
|
341
|
+
|
|
342
|
+
class SequenceSyntaxError(ValueError):
|
|
343
|
+
"""Raised when an operator sequence violates the canonical grammar."""
|
|
344
|
+
|
|
345
|
+
__slots__ = ("index", "token", "message")
|
|
346
|
+
|
|
347
|
+
def __init__(self, index: int, token: object, message: str) -> None:
|
|
348
|
+
super().__init__(message)
|
|
349
|
+
self.index = index
|
|
350
|
+
self.token = token
|
|
351
|
+
self.message = message
|
|
352
|
+
|
|
353
|
+
def __str__(self) -> str: # pragma: no cover - delegated to ValueError
|
|
354
|
+
return self.message
|
|
355
|
+
|
|
356
|
+
|
|
357
|
+
class SequenceValidationResult(ValidationOutcome[tuple[str, ...]]):
|
|
358
|
+
"""Structured report emitted by :func:`validate_sequence`."""
|
|
359
|
+
|
|
360
|
+
__slots__ = ("tokens", "canonical_tokens", "message", "metadata", "error")
|
|
361
|
+
|
|
362
|
+
def __init__(
|
|
363
|
+
self,
|
|
364
|
+
*,
|
|
365
|
+
tokens: Sequence[str],
|
|
366
|
+
canonical_tokens: Sequence[str],
|
|
367
|
+
passed: bool,
|
|
368
|
+
message: str,
|
|
369
|
+
metadata: Mapping[str, object],
|
|
370
|
+
error: SequenceSyntaxError | None = None,
|
|
371
|
+
) -> None:
|
|
372
|
+
tokens_tuple = tuple(tokens)
|
|
373
|
+
canonical_tuple = tuple(canonical_tokens)
|
|
374
|
+
metadata_dict = dict(metadata)
|
|
375
|
+
metadata_view = MappingProxyType(metadata_dict)
|
|
376
|
+
summary: dict[str, object] = {
|
|
377
|
+
"message": message,
|
|
378
|
+
"passed": passed,
|
|
379
|
+
"tokens": tokens_tuple,
|
|
380
|
+
}
|
|
381
|
+
if metadata_dict:
|
|
382
|
+
summary["metadata"] = metadata_view
|
|
383
|
+
if error is not None:
|
|
384
|
+
summary["error"] = {"index": error.index, "token": error.token}
|
|
385
|
+
super().__init__(
|
|
386
|
+
subject=canonical_tuple,
|
|
387
|
+
passed=passed,
|
|
388
|
+
summary=summary,
|
|
389
|
+
artifacts={"canonical_tokens": canonical_tuple},
|
|
390
|
+
)
|
|
391
|
+
self.tokens = tokens_tuple
|
|
392
|
+
self.canonical_tokens = canonical_tuple
|
|
393
|
+
self.message = message
|
|
394
|
+
self.metadata = metadata_view
|
|
395
|
+
self.error = error
|
|
396
|
+
|
|
397
|
+
|
|
398
|
+
_CANONICAL_START = tuple(sorted(VALID_START_OPERATORS))
|
|
399
|
+
_CANONICAL_INTERMEDIATE = tuple(sorted(INTERMEDIATE_OPERATORS))
|
|
400
|
+
_CANONICAL_END = tuple(sorted(VALID_END_OPERATORS))
|
|
401
|
+
|
|
402
|
+
|
|
403
|
+
def _format_token_group(tokens: Sequence[str]) -> str:
|
|
404
|
+
return ", ".join(operator_display_name(token) for token in tokens)
|
|
405
|
+
|
|
406
|
+
|
|
407
|
+
class _SequenceAutomaton:
|
|
408
|
+
__slots__ = (
|
|
409
|
+
"_canonical",
|
|
410
|
+
"_found_reception",
|
|
411
|
+
"_found_coherence",
|
|
412
|
+
"_seen_intermediate",
|
|
413
|
+
"_open_thol",
|
|
414
|
+
"_unknown_tokens",
|
|
415
|
+
)
|
|
416
|
+
|
|
417
|
+
def __init__(self) -> None:
|
|
418
|
+
self._canonical: list[str] = []
|
|
419
|
+
self._found_reception = False
|
|
420
|
+
self._found_coherence = False
|
|
421
|
+
self._seen_intermediate = False
|
|
422
|
+
self._open_thol = False
|
|
423
|
+
self._unknown_tokens: list[tuple[int, str]] = []
|
|
424
|
+
|
|
425
|
+
def run(self, names: Sequence[str]) -> None:
|
|
426
|
+
if not names:
|
|
427
|
+
raise SequenceSyntaxError(index=-1, token=None, message="empty sequence")
|
|
428
|
+
for index, token in enumerate(names):
|
|
429
|
+
self._consume(token, index)
|
|
430
|
+
self._finalize(names)
|
|
431
|
+
|
|
432
|
+
def _consume(self, token: str, index: int) -> None:
|
|
433
|
+
if not isinstance(token, str):
|
|
434
|
+
raise SequenceSyntaxError(index=index, token=token, message="tokens must be str")
|
|
435
|
+
canonical = canonical_operator_name(token)
|
|
436
|
+
self._canonical.append(canonical)
|
|
437
|
+
if canonical not in OPERATORS:
|
|
438
|
+
self._unknown_tokens.append((index, token))
|
|
439
|
+
if index == 0:
|
|
440
|
+
if canonical not in VALID_START_OPERATORS:
|
|
441
|
+
expected = _format_token_group(_CANONICAL_START)
|
|
442
|
+
raise SequenceSyntaxError(index=index, token=token, message=f"must start with {expected}")
|
|
443
|
+
if canonical == RECEPTION and not self._found_reception:
|
|
444
|
+
self._found_reception = True
|
|
445
|
+
elif self._found_reception and canonical == COHERENCE and not self._found_coherence:
|
|
446
|
+
self._found_coherence = True
|
|
447
|
+
elif self._found_coherence and canonical in INTERMEDIATE_OPERATORS:
|
|
448
|
+
self._seen_intermediate = True
|
|
449
|
+
if canonical == SELF_ORGANIZATION:
|
|
450
|
+
self._open_thol = True
|
|
451
|
+
elif self._open_thol and canonical in SELF_ORGANIZATION_CLOSURES:
|
|
452
|
+
self._open_thol = False
|
|
453
|
+
|
|
454
|
+
def _finalize(self, names: Sequence[str]) -> None:
|
|
455
|
+
if self._unknown_tokens:
|
|
456
|
+
ordered = ", ".join(sorted({token for _, token in self._unknown_tokens}))
|
|
457
|
+
first_index, first_token = self._unknown_tokens[0]
|
|
458
|
+
raise SequenceSyntaxError(
|
|
459
|
+
index=first_index,
|
|
460
|
+
token=first_token,
|
|
461
|
+
message=f"unknown tokens: {ordered}",
|
|
462
|
+
)
|
|
463
|
+
if not (self._found_reception and self._found_coherence):
|
|
464
|
+
raise SequenceSyntaxError(
|
|
465
|
+
index=-1,
|
|
466
|
+
token=None,
|
|
467
|
+
message=f"missing {RECEPTION}→{COHERENCE} segment",
|
|
468
|
+
)
|
|
469
|
+
if not self._seen_intermediate:
|
|
470
|
+
intermediate = _format_token_group(_CANONICAL_INTERMEDIATE)
|
|
471
|
+
raise SequenceSyntaxError(
|
|
472
|
+
index=-1,
|
|
473
|
+
token=None,
|
|
474
|
+
message=f"missing {intermediate} segment",
|
|
475
|
+
)
|
|
476
|
+
if self._canonical[-1] not in VALID_END_OPERATORS:
|
|
477
|
+
cierre = _format_token_group(_CANONICAL_END)
|
|
478
|
+
raise SequenceSyntaxError(
|
|
479
|
+
index=len(names) - 1,
|
|
480
|
+
token=names[-1],
|
|
481
|
+
message=f"sequence must end with {cierre}",
|
|
482
|
+
)
|
|
483
|
+
if self._open_thol:
|
|
484
|
+
raise SequenceSyntaxError(
|
|
485
|
+
index=len(names) - 1,
|
|
486
|
+
token=names[-1],
|
|
487
|
+
message=f"{operator_display_name(SELF_ORGANIZATION)} block without closure",
|
|
488
|
+
)
|
|
489
|
+
|
|
490
|
+
@property
|
|
491
|
+
def canonical(self) -> tuple[str, ...]:
|
|
492
|
+
return tuple(self._canonical)
|
|
493
|
+
|
|
494
|
+
def metadata(self) -> Mapping[str, object]:
|
|
495
|
+
return {
|
|
496
|
+
"has_reception": self._found_reception,
|
|
497
|
+
"has_coherence": self._found_coherence,
|
|
498
|
+
"has_intermediate": self._seen_intermediate,
|
|
499
|
+
"open_thol": self._open_thol,
|
|
500
|
+
"unknown_tokens": frozenset(token for _, token in self._unknown_tokens),
|
|
501
|
+
}
|
|
502
|
+
|
|
503
|
+
|
|
504
|
+
_MISSING = object()
|
|
505
|
+
|
|
506
|
+
|
|
507
|
+
class StructuralGrammarError(RuntimeError):
|
|
508
|
+
"""Raised when canonical grammar invariants are violated."""
|
|
509
|
+
|
|
510
|
+
__slots__ = (
|
|
511
|
+
"rule",
|
|
512
|
+
"candidate",
|
|
513
|
+
"window",
|
|
514
|
+
"threshold",
|
|
515
|
+
"order",
|
|
516
|
+
"context",
|
|
517
|
+
"message",
|
|
518
|
+
)
|
|
519
|
+
|
|
520
|
+
def __init__(
|
|
521
|
+
self,
|
|
522
|
+
*,
|
|
523
|
+
rule: str,
|
|
524
|
+
candidate: str,
|
|
525
|
+
message: str,
|
|
526
|
+
window: int | None = None,
|
|
527
|
+
threshold: float | None = None,
|
|
528
|
+
order: Sequence[str] | None = None,
|
|
529
|
+
context: Mapping[str, object] | None = None,
|
|
530
|
+
) -> None:
|
|
531
|
+
super().__init__(message)
|
|
532
|
+
self.rule = rule
|
|
533
|
+
self.candidate = candidate
|
|
534
|
+
self.message = message
|
|
535
|
+
self.window = window
|
|
536
|
+
self.threshold = threshold
|
|
537
|
+
self.order = tuple(order) if order is not None else None
|
|
538
|
+
self.context: dict[str, object] = dict(context or {})
|
|
539
|
+
|
|
540
|
+
def attach_context(self, **context: object) -> "StructuralGrammarError":
|
|
541
|
+
"""Return ``self`` after updating contextual metadata."""
|
|
542
|
+
|
|
543
|
+
for key, value in context.items():
|
|
544
|
+
if value is not None:
|
|
545
|
+
self.context[key] = value
|
|
546
|
+
return self
|
|
547
|
+
|
|
548
|
+
def to_payload(self) -> dict[str, object]:
|
|
549
|
+
"""Return a structured payload suitable for telemetry sinks."""
|
|
550
|
+
|
|
551
|
+
payload: dict[str, object] = {
|
|
552
|
+
"rule": self.rule,
|
|
553
|
+
"candidate": self.candidate,
|
|
554
|
+
"message": self.message,
|
|
555
|
+
}
|
|
556
|
+
if self.window is not None:
|
|
557
|
+
payload["window"] = self.window
|
|
558
|
+
if self.threshold is not None:
|
|
559
|
+
payload["threshold"] = self.threshold
|
|
560
|
+
if self.order is not None:
|
|
561
|
+
payload["order"] = self.order
|
|
562
|
+
if self.context:
|
|
563
|
+
payload["context"] = dict(self.context)
|
|
564
|
+
return payload
|
|
565
|
+
|
|
566
|
+
|
|
567
|
+
class RepeatWindowError(StructuralGrammarError):
|
|
568
|
+
"""Repeated glyph within the configured hysteresis window."""
|
|
569
|
+
|
|
570
|
+
|
|
571
|
+
class MutationPreconditionError(StructuralGrammarError):
|
|
572
|
+
"""Mutation attempted without satisfying canonical dissonance requirements."""
|
|
573
|
+
|
|
574
|
+
|
|
575
|
+
class TholClosureError(StructuralGrammarError):
|
|
576
|
+
"""THOL block reached closure conditions without a canonical terminator."""
|
|
577
|
+
|
|
578
|
+
|
|
579
|
+
class TransitionCompatibilityError(StructuralGrammarError):
|
|
580
|
+
"""Transition attempted that violates canonical compatibility tables."""
|
|
581
|
+
|
|
582
|
+
|
|
583
|
+
def _record_grammar_violation(
|
|
584
|
+
G: TNFRGraph, node: NodeId, error: StructuralGrammarError, *, stage: str
|
|
585
|
+
) -> None:
|
|
586
|
+
"""Store ``error`` telemetry on ``G`` and emit a structured log."""
|
|
587
|
+
|
|
588
|
+
telemetry = G.graph.setdefault("telemetry", {})
|
|
589
|
+
if not isinstance(telemetry, MutableMapping):
|
|
590
|
+
telemetry = {}
|
|
591
|
+
G.graph["telemetry"] = telemetry
|
|
592
|
+
channel = telemetry.setdefault("grammar_errors", [])
|
|
593
|
+
if not isinstance(channel, list):
|
|
594
|
+
channel = []
|
|
595
|
+
telemetry["grammar_errors"] = channel
|
|
596
|
+
payload = {"node": node, "stage": stage, **error.to_payload()}
|
|
597
|
+
channel.append(payload)
|
|
598
|
+
logger.warning(
|
|
599
|
+
"grammar violation on node %s during %s: %s", node, stage, payload, exc_info=error
|
|
600
|
+
)
|
|
601
|
+
|
|
602
|
+
|
|
603
|
+
def record_grammar_violation(
|
|
604
|
+
G: TNFRGraph, node: NodeId, error: StructuralGrammarError, *, stage: str
|
|
605
|
+
) -> None:
|
|
606
|
+
"""Public shim for recording grammar violations with telemetry hooks intact."""
|
|
607
|
+
|
|
608
|
+
_record_grammar_violation(G, node, error, stage=stage)
|
|
609
|
+
|
|
610
|
+
|
|
611
|
+
def _analyse_sequence(names: Iterable[str]) -> SequenceValidationResult:
|
|
612
|
+
names_list = list(names)
|
|
613
|
+
automaton = _SequenceAutomaton()
|
|
614
|
+
try:
|
|
615
|
+
automaton.run(names_list)
|
|
616
|
+
error: SequenceSyntaxError | None = None
|
|
617
|
+
message = "ok"
|
|
618
|
+
except SequenceSyntaxError as exc:
|
|
619
|
+
error = exc
|
|
620
|
+
message = exc.message
|
|
621
|
+
return SequenceValidationResult(
|
|
622
|
+
tokens=tuple(names_list),
|
|
623
|
+
canonical_tokens=automaton.canonical,
|
|
624
|
+
passed=error is None,
|
|
625
|
+
message=message,
|
|
626
|
+
metadata=automaton.metadata(),
|
|
627
|
+
error=error,
|
|
628
|
+
)
|
|
629
|
+
|
|
630
|
+
|
|
631
|
+
def validate_sequence(
|
|
632
|
+
names: Iterable[str] | object = _MISSING, **kwargs: object
|
|
633
|
+
) -> ValidationOutcome[tuple[str, ...]]:
|
|
634
|
+
if kwargs:
|
|
635
|
+
unexpected = ", ".join(sorted(kwargs))
|
|
636
|
+
raise TypeError(
|
|
637
|
+
f"validate_sequence() got unexpected keyword argument(s): {unexpected}"
|
|
638
|
+
)
|
|
639
|
+
if names is _MISSING:
|
|
640
|
+
raise TypeError("validate_sequence() missing required argument: 'names'")
|
|
641
|
+
return _analyse_sequence(names)
|
|
642
|
+
|
|
643
|
+
|
|
644
|
+
def parse_sequence(names: Iterable[str]) -> SequenceValidationResult:
|
|
645
|
+
result = _analyse_sequence(names)
|
|
646
|
+
if not result.passed:
|
|
647
|
+
if result.error is not None:
|
|
648
|
+
raise result.error
|
|
649
|
+
raise SequenceSyntaxError(index=-1, token=None, message=result.message)
|
|
650
|
+
return result
|
|
651
|
+
|
|
652
|
+
|
|
653
|
+
def enforce_canonical_grammar(
|
|
654
|
+
G: TNFRGraph,
|
|
655
|
+
n: NodeId,
|
|
656
|
+
cand: Glyph | str,
|
|
657
|
+
ctx: Optional[GrammarContext] = None,
|
|
658
|
+
) -> Glyph | str:
|
|
659
|
+
"""Validate ``cand`` against canonical grammar rules and preserve structural identifiers.
|
|
660
|
+
|
|
661
|
+
When callers provide textual operator identifiers (for example ``"emission"``), the
|
|
662
|
+
returned value mirrors the canonical structural token instead of the raw glyph code.
|
|
663
|
+
This keeps downstream traces aligned with TNFR operator semantics while still
|
|
664
|
+
permitting glyph inputs for internal workflows.
|
|
665
|
+
"""
|
|
666
|
+
if ctx is None:
|
|
667
|
+
ctx = GrammarContext.from_graph(G)
|
|
668
|
+
|
|
669
|
+
nd = ctx.G.nodes[n]
|
|
670
|
+
st = _gram_state(nd)
|
|
671
|
+
|
|
672
|
+
raw_cand = cand
|
|
673
|
+
cand = _rules.coerce_glyph(cand)
|
|
674
|
+
input_was_str = isinstance(raw_cand, str)
|
|
675
|
+
|
|
676
|
+
if not isinstance(cand, Glyph):
|
|
677
|
+
translated = function_name_to_glyph(raw_cand if input_was_str else cand)
|
|
678
|
+
if translated is None and cand is not raw_cand:
|
|
679
|
+
translated = function_name_to_glyph(cand)
|
|
680
|
+
if translated is not None:
|
|
681
|
+
cand = translated
|
|
682
|
+
|
|
683
|
+
from ..validation.compatibility import CANON_COMPAT
|
|
684
|
+
|
|
685
|
+
if not isinstance(cand, Glyph) or cand not in CANON_COMPAT:
|
|
686
|
+
return raw_cand if input_was_str else cand
|
|
687
|
+
|
|
688
|
+
cand = soft_grammar_filters(ctx, n, cand)
|
|
689
|
+
cand = _rules._check_oz_to_zhir(ctx, n, cand)
|
|
690
|
+
cand = _rules._check_thol_closure(ctx, n, cand, st)
|
|
691
|
+
cand = _rules._check_compatibility(ctx, n, cand)
|
|
692
|
+
|
|
693
|
+
coerced_final = _rules.coerce_glyph(cand)
|
|
694
|
+
if input_was_str:
|
|
695
|
+
resolved = glyph_function_name(coerced_final)
|
|
696
|
+
if resolved is None:
|
|
697
|
+
resolved = glyph_function_name(cand)
|
|
698
|
+
if resolved is not None:
|
|
699
|
+
return resolved
|
|
700
|
+
if isinstance(coerced_final, Glyph):
|
|
701
|
+
return coerced_final.value
|
|
702
|
+
return str(cand)
|
|
703
|
+
return coerced_final if isinstance(coerced_final, Glyph) else cand
|
|
704
|
+
|
|
705
|
+
|
|
706
|
+
def on_applied_glyph(G: TNFRGraph, n: NodeId, applied: Glyph | str) -> None:
|
|
707
|
+
nd = G.nodes[n]
|
|
708
|
+
st = _gram_state(nd)
|
|
709
|
+
glyph = function_name_to_glyph(applied)
|
|
710
|
+
|
|
711
|
+
if glyph is Glyph.THOL:
|
|
712
|
+
st["thol_open"] = True
|
|
713
|
+
st["thol_len"] = 0
|
|
714
|
+
elif glyph in (Glyph.SHA, Glyph.NUL):
|
|
715
|
+
st["thol_open"] = False
|
|
716
|
+
st["thol_len"] = 0
|
|
717
|
+
|
|
718
|
+
|
|
719
|
+
def apply_glyph_with_grammar(
|
|
720
|
+
G: TNFRGraph,
|
|
721
|
+
nodes: Optional[Iterable[NodeId | "NodeProtocol"]],
|
|
722
|
+
glyph: Glyph | str,
|
|
723
|
+
window: Optional[int] = None,
|
|
724
|
+
) -> None:
|
|
725
|
+
if window is None:
|
|
726
|
+
window = get_param(G, "GLYPH_HYSTERESIS_WINDOW")
|
|
727
|
+
|
|
728
|
+
g_str = glyph.value if isinstance(glyph, Glyph) else str(glyph)
|
|
729
|
+
iter_nodes = G.nodes() if nodes is None else nodes
|
|
730
|
+
ctx = GrammarContext.from_graph(G)
|
|
731
|
+
from . import apply_glyph as _apply_glyph
|
|
732
|
+
|
|
733
|
+
for node_ref in iter_nodes:
|
|
734
|
+
node_id = node_ref.n if hasattr(node_ref, "n") else node_ref
|
|
735
|
+
try:
|
|
736
|
+
g_eff = enforce_canonical_grammar(G, node_id, g_str, ctx)
|
|
737
|
+
except StructuralGrammarError as err:
|
|
738
|
+
nd = G.nodes[node_id]
|
|
739
|
+
|
|
740
|
+
def _structural_history(value: Glyph | str) -> str:
|
|
741
|
+
default = value.value if isinstance(value, Glyph) else str(value)
|
|
742
|
+
resolved = glyph_function_name(value, default=default)
|
|
743
|
+
return default if resolved is None else resolved
|
|
744
|
+
|
|
745
|
+
history = tuple(
|
|
746
|
+
_structural_history(item) for item in nd.get("glyph_history", ())
|
|
747
|
+
)
|
|
748
|
+
err.attach_context(node=node_id, stage="apply_glyph", history=history)
|
|
749
|
+
_record_grammar_violation(G, node_id, err, stage="apply_glyph")
|
|
750
|
+
raise
|
|
751
|
+
telemetry_token = g_eff
|
|
752
|
+
glyph_obj = g_eff if isinstance(g_eff, Glyph) else function_name_to_glyph(g_eff)
|
|
753
|
+
if glyph_obj is None:
|
|
754
|
+
coerced = _rules.coerce_glyph(g_eff)
|
|
755
|
+
glyph_obj = coerced if isinstance(coerced, Glyph) else None
|
|
756
|
+
if glyph_obj is None:
|
|
757
|
+
raise ValueError(f"unknown glyph: {g_eff}")
|
|
758
|
+
|
|
759
|
+
_apply_glyph(G, node_id, glyph_obj, window=window)
|
|
760
|
+
on_applied_glyph(G, node_id, telemetry_token)
|