tnfr 4.5.2__py3-none-any.whl → 7.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tnfr might be problematic. Click here for more details.
- tnfr/__init__.py +275 -51
- tnfr/__init__.pyi +33 -0
- tnfr/_compat.py +10 -0
- tnfr/_generated_version.py +34 -0
- tnfr/_version.py +49 -0
- tnfr/_version.pyi +7 -0
- tnfr/alias.py +117 -31
- tnfr/alias.pyi +108 -0
- tnfr/cache.py +6 -572
- tnfr/cache.pyi +16 -0
- tnfr/callback_utils.py +16 -38
- tnfr/callback_utils.pyi +79 -0
- tnfr/cli/__init__.py +34 -14
- tnfr/cli/__init__.pyi +26 -0
- tnfr/cli/arguments.py +211 -28
- tnfr/cli/arguments.pyi +27 -0
- tnfr/cli/execution.py +470 -50
- tnfr/cli/execution.pyi +70 -0
- tnfr/cli/utils.py +18 -3
- tnfr/cli/utils.pyi +8 -0
- tnfr/config/__init__.py +13 -0
- tnfr/config/__init__.pyi +10 -0
- tnfr/{constants_glyphs.py → config/constants.py} +26 -20
- tnfr/config/constants.pyi +12 -0
- tnfr/config/feature_flags.py +83 -0
- tnfr/{config.py → config/init.py} +11 -7
- tnfr/config/init.pyi +8 -0
- tnfr/config/operator_names.py +93 -0
- tnfr/config/operator_names.pyi +28 -0
- tnfr/config/presets.py +84 -0
- tnfr/config/presets.pyi +7 -0
- tnfr/constants/__init__.py +80 -29
- tnfr/constants/__init__.pyi +92 -0
- tnfr/constants/aliases.py +31 -0
- tnfr/constants/core.py +4 -4
- tnfr/constants/core.pyi +17 -0
- tnfr/constants/init.py +1 -1
- tnfr/constants/init.pyi +12 -0
- tnfr/constants/metric.py +7 -15
- tnfr/constants/metric.pyi +19 -0
- tnfr/dynamics/__init__.py +165 -633
- tnfr/dynamics/__init__.pyi +82 -0
- tnfr/dynamics/adaptation.py +267 -0
- tnfr/dynamics/aliases.py +23 -0
- tnfr/dynamics/coordination.py +385 -0
- tnfr/dynamics/dnfr.py +2283 -400
- tnfr/dynamics/dnfr.pyi +24 -0
- tnfr/dynamics/integrators.py +406 -98
- tnfr/dynamics/integrators.pyi +34 -0
- tnfr/dynamics/runtime.py +881 -0
- tnfr/dynamics/sampling.py +10 -5
- tnfr/dynamics/sampling.pyi +7 -0
- tnfr/dynamics/selectors.py +719 -0
- tnfr/execution.py +70 -48
- tnfr/execution.pyi +45 -0
- tnfr/flatten.py +13 -9
- tnfr/flatten.pyi +21 -0
- tnfr/gamma.py +66 -53
- tnfr/gamma.pyi +34 -0
- tnfr/glyph_history.py +110 -52
- tnfr/glyph_history.pyi +35 -0
- tnfr/glyph_runtime.py +16 -0
- tnfr/glyph_runtime.pyi +9 -0
- tnfr/immutable.py +69 -28
- tnfr/immutable.pyi +34 -0
- tnfr/initialization.py +16 -16
- tnfr/initialization.pyi +65 -0
- tnfr/io.py +6 -240
- tnfr/io.pyi +16 -0
- tnfr/locking.pyi +7 -0
- tnfr/mathematics/__init__.py +81 -0
- tnfr/mathematics/backend.py +426 -0
- tnfr/mathematics/dynamics.py +398 -0
- tnfr/mathematics/epi.py +254 -0
- tnfr/mathematics/generators.py +222 -0
- tnfr/mathematics/metrics.py +119 -0
- tnfr/mathematics/operators.py +233 -0
- tnfr/mathematics/operators_factory.py +71 -0
- tnfr/mathematics/projection.py +78 -0
- tnfr/mathematics/runtime.py +173 -0
- tnfr/mathematics/spaces.py +247 -0
- tnfr/mathematics/transforms.py +292 -0
- tnfr/metrics/__init__.py +10 -10
- tnfr/metrics/__init__.pyi +20 -0
- tnfr/metrics/coherence.py +993 -324
- tnfr/metrics/common.py +23 -16
- tnfr/metrics/common.pyi +46 -0
- tnfr/metrics/core.py +251 -35
- tnfr/metrics/core.pyi +13 -0
- tnfr/metrics/diagnosis.py +708 -111
- tnfr/metrics/diagnosis.pyi +85 -0
- tnfr/metrics/export.py +27 -15
- tnfr/metrics/glyph_timing.py +232 -42
- tnfr/metrics/reporting.py +33 -22
- tnfr/metrics/reporting.pyi +12 -0
- tnfr/metrics/sense_index.py +987 -43
- tnfr/metrics/sense_index.pyi +9 -0
- tnfr/metrics/trig.py +214 -23
- tnfr/metrics/trig.pyi +13 -0
- tnfr/metrics/trig_cache.py +115 -22
- tnfr/metrics/trig_cache.pyi +10 -0
- tnfr/node.py +542 -136
- tnfr/node.pyi +178 -0
- tnfr/observers.py +152 -35
- tnfr/observers.pyi +31 -0
- tnfr/ontosim.py +23 -19
- tnfr/ontosim.pyi +28 -0
- tnfr/operators/__init__.py +601 -82
- tnfr/operators/__init__.pyi +45 -0
- tnfr/operators/definitions.py +513 -0
- tnfr/operators/definitions.pyi +78 -0
- tnfr/operators/grammar.py +760 -0
- tnfr/operators/jitter.py +107 -38
- tnfr/operators/jitter.pyi +11 -0
- tnfr/operators/registry.py +75 -0
- tnfr/operators/registry.pyi +13 -0
- tnfr/operators/remesh.py +149 -88
- tnfr/py.typed +0 -0
- tnfr/rng.py +46 -143
- tnfr/rng.pyi +14 -0
- tnfr/schemas/__init__.py +8 -0
- tnfr/schemas/grammar.json +94 -0
- tnfr/selector.py +25 -19
- tnfr/selector.pyi +19 -0
- tnfr/sense.py +72 -62
- tnfr/sense.pyi +23 -0
- tnfr/structural.py +522 -262
- tnfr/structural.pyi +69 -0
- tnfr/telemetry/__init__.py +35 -0
- tnfr/telemetry/cache_metrics.py +226 -0
- tnfr/telemetry/nu_f.py +423 -0
- tnfr/telemetry/nu_f.pyi +123 -0
- tnfr/telemetry/verbosity.py +37 -0
- tnfr/tokens.py +1 -3
- tnfr/tokens.pyi +36 -0
- tnfr/trace.py +270 -113
- tnfr/trace.pyi +40 -0
- tnfr/types.py +574 -6
- tnfr/types.pyi +331 -0
- tnfr/units.py +69 -0
- tnfr/units.pyi +16 -0
- tnfr/utils/__init__.py +217 -0
- tnfr/utils/__init__.pyi +202 -0
- tnfr/utils/cache.py +2395 -0
- tnfr/utils/cache.pyi +468 -0
- tnfr/utils/chunks.py +104 -0
- tnfr/utils/chunks.pyi +21 -0
- tnfr/{collections_utils.py → utils/data.py} +147 -90
- tnfr/utils/data.pyi +64 -0
- tnfr/utils/graph.py +85 -0
- tnfr/utils/graph.pyi +10 -0
- tnfr/utils/init.py +770 -0
- tnfr/utils/init.pyi +78 -0
- tnfr/utils/io.py +456 -0
- tnfr/{helpers → utils}/numeric.py +51 -24
- tnfr/utils/numeric.pyi +21 -0
- tnfr/validation/__init__.py +113 -0
- tnfr/validation/__init__.pyi +77 -0
- tnfr/validation/compatibility.py +95 -0
- tnfr/validation/compatibility.pyi +6 -0
- tnfr/validation/grammar.py +71 -0
- tnfr/validation/grammar.pyi +40 -0
- tnfr/validation/graph.py +138 -0
- tnfr/validation/graph.pyi +17 -0
- tnfr/validation/rules.py +281 -0
- tnfr/validation/rules.pyi +55 -0
- tnfr/validation/runtime.py +263 -0
- tnfr/validation/runtime.pyi +31 -0
- tnfr/validation/soft_filters.py +170 -0
- tnfr/validation/soft_filters.pyi +37 -0
- tnfr/validation/spectral.py +159 -0
- tnfr/validation/spectral.pyi +46 -0
- tnfr/validation/syntax.py +40 -0
- tnfr/validation/syntax.pyi +10 -0
- tnfr/validation/window.py +39 -0
- tnfr/validation/window.pyi +1 -0
- tnfr/viz/__init__.py +9 -0
- tnfr/viz/matplotlib.py +246 -0
- tnfr-7.0.0.dist-info/METADATA +179 -0
- tnfr-7.0.0.dist-info/RECORD +185 -0
- {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/licenses/LICENSE.md +1 -1
- tnfr/grammar.py +0 -344
- tnfr/graph_utils.py +0 -84
- tnfr/helpers/__init__.py +0 -71
- tnfr/import_utils.py +0 -228
- tnfr/json_utils.py +0 -162
- tnfr/logging_utils.py +0 -116
- tnfr/presets.py +0 -60
- tnfr/validators.py +0 -84
- tnfr/value_utils.py +0 -59
- tnfr-4.5.2.dist-info/METADATA +0 -379
- tnfr-4.5.2.dist-info/RECORD +0 -67
- {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/WHEEL +0 -0
- {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/entry_points.txt +0 -0
- {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/top_level.txt +0 -0
tnfr/execution.py
CHANGED
|
@@ -4,28 +4,24 @@ from __future__ import annotations
|
|
|
4
4
|
|
|
5
5
|
from collections import deque
|
|
6
6
|
from collections.abc import Callable, Iterable, Sequence
|
|
7
|
-
from typing import Any, Optional
|
|
7
|
+
from typing import Any, Optional, cast
|
|
8
8
|
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
from .collections_utils import (
|
|
12
|
-
MAX_MATERIALIZE_DEFAULT,
|
|
13
|
-
ensure_collection,
|
|
14
|
-
is_non_string_sequence,
|
|
15
|
-
)
|
|
9
|
+
from ._compat import TypeAlias
|
|
16
10
|
from .constants import get_param
|
|
17
11
|
from .dynamics import step
|
|
18
12
|
from .flatten import _flatten
|
|
19
13
|
from .glyph_history import ensure_history
|
|
20
|
-
from .
|
|
21
|
-
from .
|
|
22
|
-
from .
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
AdvanceFn = Callable[[
|
|
14
|
+
from .tokens import TARGET, THOL, WAIT, OpTag, Token
|
|
15
|
+
from .types import Glyph, NodeId, TNFRGraph
|
|
16
|
+
from .utils import MAX_MATERIALIZE_DEFAULT, ensure_collection, is_non_string_sequence
|
|
17
|
+
from .validation import apply_glyph_with_grammar
|
|
18
|
+
|
|
19
|
+
AdvanceFn = Callable[[TNFRGraph], None]
|
|
20
|
+
TraceEntry = dict[str, Any]
|
|
21
|
+
ProgramTrace: TypeAlias = deque[TraceEntry]
|
|
26
22
|
HandlerFn = Callable[
|
|
27
|
-
[
|
|
28
|
-
Optional[
|
|
23
|
+
[TNFRGraph, Any, Optional[Sequence[NodeId]], ProgramTrace, AdvanceFn],
|
|
24
|
+
Optional[Sequence[NodeId]],
|
|
29
25
|
]
|
|
30
26
|
|
|
31
27
|
__all__ = [
|
|
@@ -45,7 +41,7 @@ __all__ = [
|
|
|
45
41
|
]
|
|
46
42
|
|
|
47
43
|
|
|
48
|
-
CANONICAL_PRESET_NAME = "
|
|
44
|
+
CANONICAL_PRESET_NAME = "canonical_example"
|
|
49
45
|
CANONICAL_PROGRAM_TOKENS: tuple[Token, ...] = (
|
|
50
46
|
Glyph.SHA,
|
|
51
47
|
Glyph.AL,
|
|
@@ -56,13 +52,13 @@ CANONICAL_PROGRAM_TOKENS: tuple[Token, ...] = (
|
|
|
56
52
|
)
|
|
57
53
|
|
|
58
54
|
|
|
59
|
-
def _window(G) -> int:
|
|
55
|
+
def _window(G: TNFRGraph) -> int:
|
|
60
56
|
return int(get_param(G, "GLYPH_HYSTERESIS_WINDOW"))
|
|
61
57
|
|
|
62
58
|
|
|
63
59
|
def _apply_glyph_to_targets(
|
|
64
|
-
G, g: Glyph | str, nodes: Optional[Iterable[
|
|
65
|
-
):
|
|
60
|
+
G: TNFRGraph, g: Glyph | str, nodes: Optional[Iterable[NodeId]] = None
|
|
61
|
+
) -> None:
|
|
66
62
|
"""Apply ``g`` to ``nodes`` (or all nodes) respecting the grammar."""
|
|
67
63
|
|
|
68
64
|
nodes_iter = G.nodes() if nodes is None else nodes
|
|
@@ -70,22 +66,24 @@ def _apply_glyph_to_targets(
|
|
|
70
66
|
apply_glyph_with_grammar(G, nodes_iter, g, w)
|
|
71
67
|
|
|
72
68
|
|
|
73
|
-
def _advance(G, step_fn: AdvanceFn):
|
|
69
|
+
def _advance(G: TNFRGraph, step_fn: AdvanceFn) -> None:
|
|
74
70
|
step_fn(G)
|
|
75
71
|
|
|
76
72
|
|
|
77
|
-
def _record_trace(trace:
|
|
73
|
+
def _record_trace(trace: ProgramTrace, G: TNFRGraph, op: OpTag, **data: Any) -> None:
|
|
74
|
+
"""Append an operation snapshot to ``trace`` using graph time metadata."""
|
|
75
|
+
|
|
78
76
|
trace.append({"t": float(G.graph.get("_t", 0.0)), "op": op.name, **data})
|
|
79
77
|
|
|
80
78
|
|
|
81
79
|
def _advance_and_record(
|
|
82
|
-
G,
|
|
83
|
-
trace:
|
|
80
|
+
G: TNFRGraph,
|
|
81
|
+
trace: ProgramTrace,
|
|
84
82
|
label: OpTag,
|
|
85
83
|
step_fn: AdvanceFn,
|
|
86
84
|
*,
|
|
87
85
|
times: int = 1,
|
|
88
|
-
**data,
|
|
86
|
+
**data: Any,
|
|
89
87
|
) -> None:
|
|
90
88
|
for _ in range(times):
|
|
91
89
|
_advance(G, step_fn)
|
|
@@ -93,40 +91,55 @@ def _advance_and_record(
|
|
|
93
91
|
|
|
94
92
|
|
|
95
93
|
def _handle_target(
|
|
96
|
-
G
|
|
97
|
-
|
|
94
|
+
G: TNFRGraph,
|
|
95
|
+
payload: TARGET,
|
|
96
|
+
_curr_target: Optional[Sequence[NodeId]],
|
|
97
|
+
trace: ProgramTrace,
|
|
98
|
+
_step_fn: AdvanceFn,
|
|
99
|
+
) -> Sequence[NodeId]:
|
|
98
100
|
"""Handle a ``TARGET`` token and return the active node set."""
|
|
99
101
|
|
|
100
102
|
nodes_src = G.nodes() if payload.nodes is None else payload.nodes
|
|
101
103
|
nodes = ensure_collection(nodes_src, max_materialize=None)
|
|
102
|
-
|
|
104
|
+
if is_non_string_sequence(nodes):
|
|
105
|
+
curr_target = cast(Sequence[NodeId], nodes)
|
|
106
|
+
else:
|
|
107
|
+
curr_target = tuple(nodes)
|
|
103
108
|
_record_trace(trace, G, OpTag.TARGET, n=len(curr_target))
|
|
104
109
|
return curr_target
|
|
105
110
|
|
|
106
111
|
|
|
107
112
|
def _handle_wait(
|
|
108
|
-
G
|
|
109
|
-
|
|
113
|
+
G: TNFRGraph,
|
|
114
|
+
steps: int,
|
|
115
|
+
curr_target: Optional[Sequence[NodeId]],
|
|
116
|
+
trace: ProgramTrace,
|
|
117
|
+
step_fn: AdvanceFn,
|
|
118
|
+
) -> Optional[Sequence[NodeId]]:
|
|
110
119
|
_advance_and_record(G, trace, OpTag.WAIT, step_fn, times=steps, k=steps)
|
|
111
120
|
return curr_target
|
|
112
121
|
|
|
113
122
|
|
|
114
123
|
def _handle_glyph(
|
|
115
|
-
G,
|
|
116
|
-
g: str,
|
|
117
|
-
curr_target,
|
|
118
|
-
trace:
|
|
124
|
+
G: TNFRGraph,
|
|
125
|
+
g: Glyph | str,
|
|
126
|
+
curr_target: Optional[Sequence[NodeId]],
|
|
127
|
+
trace: ProgramTrace,
|
|
119
128
|
step_fn: AdvanceFn,
|
|
120
129
|
label: OpTag = OpTag.GLYPH,
|
|
121
|
-
):
|
|
130
|
+
) -> Optional[Sequence[NodeId]]:
|
|
122
131
|
_apply_glyph_to_targets(G, g, curr_target)
|
|
123
132
|
_advance_and_record(G, trace, label, step_fn, g=g)
|
|
124
133
|
return curr_target
|
|
125
134
|
|
|
126
135
|
|
|
127
136
|
def _handle_thol(
|
|
128
|
-
G
|
|
129
|
-
|
|
137
|
+
G: TNFRGraph,
|
|
138
|
+
g: Glyph | str | None,
|
|
139
|
+
curr_target: Optional[Sequence[NodeId]],
|
|
140
|
+
trace: ProgramTrace,
|
|
141
|
+
step_fn: AdvanceFn,
|
|
142
|
+
) -> Optional[Sequence[NodeId]]:
|
|
130
143
|
return _handle_glyph(
|
|
131
144
|
G, g or Glyph.THOL.value, curr_target, trace, step_fn, label=OpTag.THOL
|
|
132
145
|
)
|
|
@@ -141,20 +154,23 @@ HANDLERS: dict[OpTag, HandlerFn] = {
|
|
|
141
154
|
|
|
142
155
|
|
|
143
156
|
def play(
|
|
144
|
-
G, sequence: Sequence[Token], step_fn: Optional[AdvanceFn] = None
|
|
157
|
+
G: TNFRGraph, sequence: Sequence[Token], step_fn: Optional[AdvanceFn] = None
|
|
145
158
|
) -> None:
|
|
146
159
|
"""Execute a canonical sequence on graph ``G``."""
|
|
147
160
|
|
|
148
161
|
step_fn = step_fn or step
|
|
149
162
|
|
|
150
|
-
curr_target: Optional[
|
|
163
|
+
curr_target: Optional[Sequence[NodeId]] = None
|
|
151
164
|
|
|
152
165
|
history = ensure_history(G)
|
|
153
166
|
maxlen = int(get_param(G, "PROGRAM_TRACE_MAXLEN"))
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
167
|
+
trace_obj = history.get("program_trace")
|
|
168
|
+
trace: ProgramTrace
|
|
169
|
+
if not isinstance(trace_obj, deque) or trace_obj.maxlen != maxlen:
|
|
170
|
+
trace = cast(ProgramTrace, deque(trace_obj or [], maxlen=maxlen))
|
|
157
171
|
history["program_trace"] = trace
|
|
172
|
+
else:
|
|
173
|
+
trace = cast(ProgramTrace, trace_obj)
|
|
158
174
|
|
|
159
175
|
for op, payload in _flatten(sequence):
|
|
160
176
|
handler: HandlerFn | None = HANDLERS.get(op)
|
|
@@ -174,28 +190,34 @@ def compile_sequence(
|
|
|
174
190
|
|
|
175
191
|
|
|
176
192
|
def seq(*tokens: Token) -> list[Token]:
|
|
193
|
+
"""Return a mutable list of ``tokens`` for explicit sequence editing."""
|
|
194
|
+
|
|
177
195
|
return list(tokens)
|
|
178
196
|
|
|
179
197
|
|
|
180
|
-
def block(
|
|
181
|
-
|
|
182
|
-
|
|
198
|
+
def block(*tokens: Token, repeat: int = 1, close: Optional[Glyph] = None) -> THOL:
|
|
199
|
+
"""Build a THOL block with optional repetition and forced closure."""
|
|
200
|
+
|
|
183
201
|
return THOL(body=list(tokens), repeat=repeat, force_close=close)
|
|
184
202
|
|
|
185
203
|
|
|
186
|
-
def target(nodes: Optional[Iterable[
|
|
204
|
+
def target(nodes: Optional[Iterable[NodeId]] = None) -> TARGET:
|
|
205
|
+
"""Return a TARGET token selecting ``nodes`` (defaults to all nodes)."""
|
|
206
|
+
|
|
187
207
|
return TARGET(nodes=nodes)
|
|
188
208
|
|
|
189
209
|
|
|
190
210
|
def wait(steps: int = 1) -> WAIT:
|
|
211
|
+
"""Return a WAIT token forcing ``steps`` structural updates before resuming."""
|
|
212
|
+
|
|
191
213
|
return WAIT(steps=max(1, int(steps)))
|
|
192
214
|
|
|
193
215
|
|
|
194
216
|
def basic_canonical_example() -> list[Token]:
|
|
195
|
-
"""
|
|
217
|
+
"""Return the canonical preset sequence.
|
|
196
218
|
|
|
197
219
|
Returns a copy of the canonical preset tokens to keep CLI defaults aligned
|
|
198
|
-
with :func:`tnfr.presets.get_preset`.
|
|
220
|
+
with :func:`tnfr.config.presets.get_preset`.
|
|
199
221
|
"""
|
|
200
222
|
|
|
201
223
|
return list(CANONICAL_PROGRAM_TOKENS)
|
tnfr/execution.pyi
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections import deque
|
|
4
|
+
from collections.abc import Callable, Iterable, Sequence
|
|
5
|
+
from typing import Any, Optional
|
|
6
|
+
|
|
7
|
+
from ._compat import TypeAlias
|
|
8
|
+
from .tokens import TARGET, THOL, WAIT, OpTag, Token
|
|
9
|
+
from .types import Glyph, NodeId, TNFRGraph
|
|
10
|
+
|
|
11
|
+
__all__: list[str]
|
|
12
|
+
|
|
13
|
+
def __getattr__(name: str) -> Any: ...
|
|
14
|
+
|
|
15
|
+
AdvanceFn = Callable[[TNFRGraph], None]
|
|
16
|
+
TraceEntry = dict[str, Any]
|
|
17
|
+
ProgramTrace: TypeAlias = deque[TraceEntry]
|
|
18
|
+
HandlerFn = Callable[
|
|
19
|
+
[TNFRGraph, Any, Sequence[NodeId] | None, ProgramTrace, AdvanceFn],
|
|
20
|
+
Sequence[NodeId] | None,
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
CANONICAL_PRESET_NAME: str
|
|
24
|
+
CANONICAL_PROGRAM_TOKENS: tuple[Token, ...]
|
|
25
|
+
HANDLERS: dict[OpTag, HandlerFn]
|
|
26
|
+
|
|
27
|
+
def _apply_glyph_to_targets(
|
|
28
|
+
G: TNFRGraph, g: Glyph | str, nodes: Iterable[NodeId] | None = ...
|
|
29
|
+
) -> None: ...
|
|
30
|
+
def _record_trace(
|
|
31
|
+
trace: ProgramTrace, G: TNFRGraph, op: OpTag, **data: Any
|
|
32
|
+
) -> None: ...
|
|
33
|
+
def compile_sequence(
|
|
34
|
+
sequence: Iterable[Token] | Sequence[Token] | Any,
|
|
35
|
+
*,
|
|
36
|
+
max_materialize: int | None = ...,
|
|
37
|
+
) -> list[tuple[OpTag, Any]]: ...
|
|
38
|
+
def play(
|
|
39
|
+
G: TNFRGraph, sequence: Sequence[Token], step_fn: Optional[AdvanceFn] = ...
|
|
40
|
+
) -> None: ...
|
|
41
|
+
def seq(*tokens: Token) -> list[Token]: ...
|
|
42
|
+
def block(*tokens: Token, repeat: int = ..., close: Glyph | None = ...) -> THOL: ...
|
|
43
|
+
def target(nodes: Iterable[NodeId] | None = ...) -> TARGET: ...
|
|
44
|
+
def wait(steps: int = ...) -> WAIT: ...
|
|
45
|
+
def basic_canonical_example() -> list[Token]: ...
|
tnfr/flatten.py
CHANGED
|
@@ -7,16 +7,16 @@ from dataclasses import dataclass
|
|
|
7
7
|
from itertools import chain
|
|
8
8
|
from typing import Any, Callable
|
|
9
9
|
|
|
10
|
-
from .
|
|
10
|
+
from .config.constants import GLYPHS_CANONICAL_SET
|
|
11
|
+
from .tokens import TARGET, THOL, THOL_SENTINEL, WAIT, OpTag, Token
|
|
12
|
+
from .types import Glyph
|
|
13
|
+
from .utils import (
|
|
11
14
|
MAX_MATERIALIZE_DEFAULT,
|
|
15
|
+
STRING_TYPES,
|
|
12
16
|
ensure_collection,
|
|
13
17
|
flatten_structure,
|
|
14
|
-
STRING_TYPES,
|
|
15
18
|
normalize_materialize_limit,
|
|
16
19
|
)
|
|
17
|
-
from .constants_glyphs import GLYPHS_CANONICAL_SET
|
|
18
|
-
from .tokens import THOL, TARGET, WAIT, OpTag, THOL_SENTINEL, Token
|
|
19
|
-
from .types import Glyph
|
|
20
20
|
|
|
21
21
|
__all__ = [
|
|
22
22
|
"THOLEvaluator",
|
|
@@ -124,9 +124,13 @@ class THOLEvaluator:
|
|
|
124
124
|
self._started = False
|
|
125
125
|
|
|
126
126
|
def __iter__(self) -> "THOLEvaluator":
|
|
127
|
+
"""Return the evaluator itself to stream THOL expansion."""
|
|
128
|
+
|
|
127
129
|
return self
|
|
128
130
|
|
|
129
|
-
def __next__(self):
|
|
131
|
+
def __next__(self) -> Token | object:
|
|
132
|
+
"""Yield the next token or :data:`THOL_SENTINEL` during evaluation."""
|
|
133
|
+
|
|
130
134
|
if not self._started:
|
|
131
135
|
self._started = True
|
|
132
136
|
return THOL_SENTINEL
|
|
@@ -211,7 +215,7 @@ def _coerce_mapping_token(
|
|
|
211
215
|
if isinstance(close, str):
|
|
212
216
|
close_enum = Glyph.__members__.get(close)
|
|
213
217
|
if close_enum is None:
|
|
214
|
-
raise ValueError(f"
|
|
218
|
+
raise ValueError(f"Unknown closing glyph: {close!r}")
|
|
215
219
|
close = close_enum
|
|
216
220
|
elif close is not None and not isinstance(close, Glyph):
|
|
217
221
|
raise TypeError("THOL close glyph must be a Glyph or string name")
|
|
@@ -235,7 +239,7 @@ def parse_program_tokens(
|
|
|
235
239
|
|
|
236
240
|
sequence = _iter_source(obj, max_materialize=max_materialize)
|
|
237
241
|
|
|
238
|
-
def _expand(item: Any):
|
|
242
|
+
def _expand(item: Any) -> Iterable[Any] | None:
|
|
239
243
|
if isinstance(item, Mapping):
|
|
240
244
|
return (_coerce_mapping_token(item, max_materialize=max_materialize),)
|
|
241
245
|
return None
|
|
@@ -259,7 +263,7 @@ def _flatten(
|
|
|
259
263
|
ops: list[tuple[OpTag, Any]] = []
|
|
260
264
|
sequence = _iter_source(seq, max_materialize=max_materialize)
|
|
261
265
|
|
|
262
|
-
def _expand(item: Any):
|
|
266
|
+
def _expand(item: Any) -> Iterable[Any] | None:
|
|
263
267
|
if isinstance(item, THOL):
|
|
264
268
|
return THOLEvaluator(item, max_materialize=max_materialize)
|
|
265
269
|
if isinstance(item, Mapping):
|
tnfr/flatten.pyi
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections.abc import Iterable, Iterator, Sequence
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
from .tokens import THOL, Token
|
|
7
|
+
|
|
8
|
+
__all__: list[str]
|
|
9
|
+
|
|
10
|
+
def __getattr__(name: str) -> Any: ...
|
|
11
|
+
|
|
12
|
+
class THOLEvaluator(Iterator[Token | object]):
|
|
13
|
+
def __init__(self, item: THOL, *, max_materialize: int | None = ...) -> None: ...
|
|
14
|
+
def __iter__(self) -> THOLEvaluator: ...
|
|
15
|
+
def __next__(self) -> Token | object: ...
|
|
16
|
+
|
|
17
|
+
def parse_program_tokens(
|
|
18
|
+
obj: Iterable[Any] | Sequence[Any] | Any,
|
|
19
|
+
*,
|
|
20
|
+
max_materialize: int | None = ...,
|
|
21
|
+
) -> list[Token]: ...
|
tnfr/gamma.py
CHANGED
|
@@ -1,24 +1,26 @@
|
|
|
1
1
|
"""Gamma registry."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
import math
|
|
6
|
-
import logging
|
|
4
|
+
|
|
7
5
|
import hashlib
|
|
6
|
+
import logging
|
|
7
|
+
import math
|
|
8
8
|
from collections.abc import Mapping
|
|
9
9
|
from functools import lru_cache
|
|
10
10
|
from types import MappingProxyType
|
|
11
|
+
from typing import Any, Callable, NamedTuple
|
|
11
12
|
|
|
12
|
-
from .
|
|
13
|
-
from .
|
|
14
|
-
from .
|
|
15
|
-
from .cache import edge_version_cache, node_set_checksum
|
|
16
|
-
from .json_utils import json_dumps
|
|
17
|
-
from .logging_utils import get_logger
|
|
13
|
+
from .alias import get_theta_attr
|
|
14
|
+
from .constants import DEFAULTS
|
|
15
|
+
from .utils import json_dumps
|
|
18
16
|
from .metrics.trig_cache import get_trig_cache
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
17
|
+
from .types import GammaSpec, NodeId, TNFRGraph
|
|
18
|
+
from .utils import (
|
|
19
|
+
edge_version_cache,
|
|
20
|
+
get_graph_mapping,
|
|
21
|
+
get_logger,
|
|
22
|
+
node_set_checksum,
|
|
23
|
+
)
|
|
22
24
|
|
|
23
25
|
logger = get_logger(__name__)
|
|
24
26
|
|
|
@@ -44,9 +46,8 @@ def _default_gamma_spec() -> tuple[bytes, str]:
|
|
|
44
46
|
return dumped, hash_
|
|
45
47
|
|
|
46
48
|
|
|
47
|
-
def _ensure_kuramoto_cache(G, t) -> None:
|
|
48
|
-
"""Cache ``(R, ψ)`` for the current step ``t`` using
|
|
49
|
-
``edge_version_cache``."""
|
|
49
|
+
def _ensure_kuramoto_cache(G: TNFRGraph, t: float | int) -> None:
|
|
50
|
+
"""Cache ``(R, ψ)`` for the current step ``t`` using ``edge_version_cache``."""
|
|
50
51
|
checksum = G.graph.get("_dnfr_nodes_checksum")
|
|
51
52
|
if checksum is None:
|
|
52
53
|
# reuse checksum from cached_nodes_and_A when available
|
|
@@ -63,7 +64,7 @@ def _ensure_kuramoto_cache(G, t) -> None:
|
|
|
63
64
|
G.graph["_kuramoto_cache"] = entry
|
|
64
65
|
|
|
65
66
|
|
|
66
|
-
def kuramoto_R_psi(G) -> tuple[float, float]:
|
|
67
|
+
def kuramoto_R_psi(G: TNFRGraph) -> tuple[float, float]:
|
|
67
68
|
"""Return ``(R, ψ)`` for Kuramoto order using θ from all nodes."""
|
|
68
69
|
max_steps = int(G.graph.get("KURAMOTO_CACHE_STEPS", 1))
|
|
69
70
|
trig = get_trig_cache(G, cache_size=max_steps)
|
|
@@ -78,7 +79,9 @@ def kuramoto_R_psi(G) -> tuple[float, float]:
|
|
|
78
79
|
return R, psi
|
|
79
80
|
|
|
80
81
|
|
|
81
|
-
def _kuramoto_common(
|
|
82
|
+
def _kuramoto_common(
|
|
83
|
+
G: TNFRGraph, node: NodeId, _cfg: GammaSpec
|
|
84
|
+
) -> tuple[float, float, float]:
|
|
82
85
|
"""Return ``(θ_i, R, ψ)`` for Kuramoto-based Γ functions.
|
|
83
86
|
|
|
84
87
|
Reads cached global order ``R`` and mean phase ``ψ`` and obtains node
|
|
@@ -88,11 +91,12 @@ def _kuramoto_common(G, node, _cfg):
|
|
|
88
91
|
cache = G.graph.get("_kuramoto_cache", {})
|
|
89
92
|
R = float(cache.get("R", 0.0))
|
|
90
93
|
psi = float(cache.get("psi", 0.0))
|
|
91
|
-
|
|
94
|
+
th_val = get_theta_attr(G.nodes[node], 0.0)
|
|
95
|
+
th_i = float(th_val if th_val is not None else 0.0)
|
|
92
96
|
return th_i, R, psi
|
|
93
97
|
|
|
94
98
|
|
|
95
|
-
def _read_gamma_raw(G) ->
|
|
99
|
+
def _read_gamma_raw(G: TNFRGraph) -> GammaSpec | None:
|
|
96
100
|
"""Return raw Γ specification from ``G.graph['GAMMA']``.
|
|
97
101
|
|
|
98
102
|
The returned value is the direct contents of ``G.graph['GAMMA']`` when
|
|
@@ -104,11 +108,13 @@ def _read_gamma_raw(G) -> Mapping[str, Any] | None:
|
|
|
104
108
|
if raw is None or isinstance(raw, Mapping):
|
|
105
109
|
return raw
|
|
106
110
|
return get_graph_mapping(
|
|
107
|
-
G,
|
|
111
|
+
G,
|
|
112
|
+
"GAMMA",
|
|
113
|
+
"G.graph['GAMMA'] is not a mapping; using {'type': 'none'}",
|
|
108
114
|
)
|
|
109
115
|
|
|
110
116
|
|
|
111
|
-
def _get_gamma_spec(G) ->
|
|
117
|
+
def _get_gamma_spec(G: TNFRGraph) -> GammaSpec:
|
|
112
118
|
"""Return validated Γ specification caching results.
|
|
113
119
|
|
|
114
120
|
The raw value from ``G.graph['GAMMA']`` is cached together with the
|
|
@@ -122,7 +128,7 @@ def _get_gamma_spec(G) -> Mapping[str, Any]:
|
|
|
122
128
|
cached_spec = G.graph.get("_gamma_spec")
|
|
123
129
|
cached_hash = G.graph.get("_gamma_spec_hash")
|
|
124
130
|
|
|
125
|
-
def _hash_mapping(mapping:
|
|
131
|
+
def _hash_mapping(mapping: GammaSpec) -> str:
|
|
126
132
|
dumped = json_dumps(mapping, sort_keys=True, to_bytes=True)
|
|
127
133
|
return hashlib.blake2b(dumped, digest_size=16).hexdigest()
|
|
128
134
|
|
|
@@ -165,9 +171,7 @@ def _get_gamma_spec(G) -> Mapping[str, Any]:
|
|
|
165
171
|
# -----------------
|
|
166
172
|
|
|
167
173
|
|
|
168
|
-
def _gamma_params(
|
|
169
|
-
cfg: Mapping[str, Any], **defaults: float
|
|
170
|
-
) -> tuple[float, ...]:
|
|
174
|
+
def _gamma_params(cfg: GammaSpec, **defaults: float) -> tuple[float, ...]:
|
|
171
175
|
"""Return normalized Γ parameters from ``cfg``.
|
|
172
176
|
|
|
173
177
|
Parameters are retrieved from ``cfg`` using the keys in ``defaults`` and
|
|
@@ -179,28 +183,28 @@ def _gamma_params(
|
|
|
179
183
|
>>> beta, R0 = _gamma_params(cfg, beta=0.0, R0=0.0)
|
|
180
184
|
"""
|
|
181
185
|
|
|
182
|
-
return tuple(
|
|
183
|
-
float(cfg.get(name, default)) for name, default in defaults.items()
|
|
184
|
-
)
|
|
186
|
+
return tuple(float(cfg.get(name, default)) for name, default in defaults.items())
|
|
185
187
|
|
|
186
188
|
|
|
187
189
|
# -----------------
|
|
188
|
-
# Γi(R)
|
|
190
|
+
# Canonical Γi(R)
|
|
189
191
|
# -----------------
|
|
190
192
|
|
|
191
193
|
|
|
192
|
-
def gamma_none(G, node, t, cfg:
|
|
194
|
+
def gamma_none(G: TNFRGraph, node: NodeId, t: float | int, cfg: GammaSpec) -> float:
|
|
195
|
+
"""Return ``0.0`` to disable Γ forcing for the given node."""
|
|
196
|
+
|
|
193
197
|
return 0.0
|
|
194
198
|
|
|
195
199
|
|
|
196
200
|
def _gamma_kuramoto(
|
|
197
|
-
G,
|
|
198
|
-
node,
|
|
199
|
-
cfg:
|
|
201
|
+
G: TNFRGraph,
|
|
202
|
+
node: NodeId,
|
|
203
|
+
cfg: GammaSpec,
|
|
200
204
|
builder: Callable[..., float],
|
|
201
205
|
**defaults: float,
|
|
202
206
|
) -> float:
|
|
203
|
-
"""
|
|
207
|
+
"""Construct a Kuramoto-based Γ function.
|
|
204
208
|
|
|
205
209
|
``builder`` receives ``(θ_i, R, ψ, *params)`` where ``params`` are
|
|
206
210
|
extracted from ``cfg`` according to ``defaults``.
|
|
@@ -220,11 +224,15 @@ def _builder_bandpass(th_i: float, R: float, psi: float, beta: float) -> float:
|
|
|
220
224
|
return beta * R * (1.0 - R) * sgn
|
|
221
225
|
|
|
222
226
|
|
|
223
|
-
def _builder_tanh(
|
|
227
|
+
def _builder_tanh(
|
|
228
|
+
th_i: float, R: float, psi: float, beta: float, k: float, R0: float
|
|
229
|
+
) -> float:
|
|
224
230
|
return beta * math.tanh(k * (R - R0)) * math.cos(th_i - psi)
|
|
225
231
|
|
|
226
232
|
|
|
227
|
-
def gamma_kuramoto_linear(
|
|
233
|
+
def gamma_kuramoto_linear(
|
|
234
|
+
G: TNFRGraph, node: NodeId, t: float | int, cfg: GammaSpec
|
|
235
|
+
) -> float:
|
|
228
236
|
"""Linear Kuramoto coupling for Γi(R).
|
|
229
237
|
|
|
230
238
|
Formula: Γ = β · (R - R0) · cos(θ_i - ψ)
|
|
@@ -239,13 +247,17 @@ def gamma_kuramoto_linear(G, node, t, cfg: dict[str, Any]) -> float:
|
|
|
239
247
|
return _gamma_kuramoto(G, node, cfg, _builder_linear, beta=0.0, R0=0.0)
|
|
240
248
|
|
|
241
249
|
|
|
242
|
-
def gamma_kuramoto_bandpass(
|
|
243
|
-
|
|
250
|
+
def gamma_kuramoto_bandpass(
|
|
251
|
+
G: TNFRGraph, node: NodeId, t: float | int, cfg: GammaSpec
|
|
252
|
+
) -> float:
|
|
253
|
+
"""Compute Γ = β · R(1-R) · sign(cos(θ_i - ψ))."""
|
|
244
254
|
|
|
245
255
|
return _gamma_kuramoto(G, node, cfg, _builder_bandpass, beta=0.0)
|
|
246
256
|
|
|
247
257
|
|
|
248
|
-
def gamma_kuramoto_tanh(
|
|
258
|
+
def gamma_kuramoto_tanh(
|
|
259
|
+
G: TNFRGraph, node: NodeId, t: float | int, cfg: GammaSpec
|
|
260
|
+
) -> float:
|
|
249
261
|
"""Saturating tanh coupling for Γi(R).
|
|
250
262
|
|
|
251
263
|
Formula: Γ = β · tanh(k·(R - R0)) · cos(θ_i - ψ)
|
|
@@ -257,7 +269,7 @@ def gamma_kuramoto_tanh(G, node, t, cfg: dict[str, Any]) -> float:
|
|
|
257
269
|
return _gamma_kuramoto(G, node, cfg, _builder_tanh, beta=0.0, k=1.0, R0=0.0)
|
|
258
270
|
|
|
259
271
|
|
|
260
|
-
def gamma_harmonic(G, node, t, cfg:
|
|
272
|
+
def gamma_harmonic(G: TNFRGraph, node: NodeId, t: float | int, cfg: GammaSpec) -> float:
|
|
261
273
|
"""Harmonic forcing aligned with the global phase field.
|
|
262
274
|
|
|
263
275
|
Formula: Γ = β · sin(ω·t + φ) · cos(θ_i - ψ)
|
|
@@ -271,13 +283,15 @@ def gamma_harmonic(G, node, t, cfg: dict[str, Any]) -> float:
|
|
|
271
283
|
|
|
272
284
|
|
|
273
285
|
class GammaEntry(NamedTuple):
|
|
274
|
-
|
|
286
|
+
"""Lookup entry linking Γ evaluators with their preconditions."""
|
|
287
|
+
|
|
288
|
+
fn: Callable[[TNFRGraph, NodeId, float | int, GammaSpec], float]
|
|
275
289
|
needs_kuramoto: bool
|
|
276
290
|
|
|
277
291
|
|
|
278
|
-
# ``GAMMA_REGISTRY``
|
|
279
|
-
# ``
|
|
280
|
-
#
|
|
292
|
+
# ``GAMMA_REGISTRY`` associates each coupling name with a ``GammaEntry`` where
|
|
293
|
+
# ``fn`` is the evaluation function and ``needs_kuramoto`` indicates whether
|
|
294
|
+
# the global phase order must be precomputed.
|
|
281
295
|
GAMMA_REGISTRY: dict[str, GammaEntry] = {
|
|
282
296
|
"none": GammaEntry(gamma_none, False),
|
|
283
297
|
"kuramoto_linear": GammaEntry(gamma_kuramoto_linear, True),
|
|
@@ -288,20 +302,19 @@ GAMMA_REGISTRY: dict[str, GammaEntry] = {
|
|
|
288
302
|
|
|
289
303
|
|
|
290
304
|
def eval_gamma(
|
|
291
|
-
G,
|
|
292
|
-
node,
|
|
293
|
-
t,
|
|
305
|
+
G: TNFRGraph,
|
|
306
|
+
node: NodeId,
|
|
307
|
+
t: float | int,
|
|
294
308
|
*,
|
|
295
309
|
strict: bool = False,
|
|
296
310
|
log_level: int | None = None,
|
|
297
311
|
) -> float:
|
|
298
|
-
"""Evaluate Γi for ``node``
|
|
299
|
-
specification.
|
|
312
|
+
"""Evaluate Γi for ``node`` using ``G.graph['GAMMA']`` specification.
|
|
300
313
|
|
|
301
314
|
If ``strict`` is ``True`` exceptions raised during evaluation are
|
|
302
315
|
propagated instead of returning ``0.0``. Likewise, if the specified
|
|
303
|
-
Γ type is not registered a warning is emitted (
|
|
304
|
-
|
|
316
|
+
Γ type is not registered a warning is emitted (or ``ValueError`` in
|
|
317
|
+
strict mode) and ``gamma_none`` is used.
|
|
305
318
|
|
|
306
319
|
``log_level`` controls the logging level for captured errors when
|
|
307
320
|
``strict`` is ``False``. If omitted, ``logging.ERROR`` is used in
|
|
@@ -311,7 +324,7 @@ def eval_gamma(
|
|
|
311
324
|
spec_type = spec.get("type", "none")
|
|
312
325
|
reg_entry = GAMMA_REGISTRY.get(spec_type)
|
|
313
326
|
if reg_entry is None:
|
|
314
|
-
msg = f"
|
|
327
|
+
msg = f"Unknown GAMMA type: {spec_type}"
|
|
315
328
|
if strict:
|
|
316
329
|
raise ValueError(msg)
|
|
317
330
|
logger.warning(msg)
|
|
@@ -330,7 +343,7 @@ def eval_gamma(
|
|
|
330
343
|
)
|
|
331
344
|
logger.log(
|
|
332
345
|
level,
|
|
333
|
-
"
|
|
346
|
+
"Failed to evaluate Γi for node %s at t=%s: %s: %s",
|
|
334
347
|
node,
|
|
335
348
|
t,
|
|
336
349
|
exc.__class__.__name__,
|
tnfr/gamma.pyi
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
from typing import Callable, NamedTuple
|
|
2
|
+
|
|
3
|
+
from .types import GammaSpec, NodeId, TNFRGraph
|
|
4
|
+
|
|
5
|
+
__all__: tuple[str, ...]
|
|
6
|
+
|
|
7
|
+
class GammaEntry(NamedTuple):
|
|
8
|
+
fn: Callable[[TNFRGraph, NodeId, float | int, GammaSpec], float]
|
|
9
|
+
needs_kuramoto: bool
|
|
10
|
+
|
|
11
|
+
GAMMA_REGISTRY: dict[str, GammaEntry]
|
|
12
|
+
|
|
13
|
+
def kuramoto_R_psi(G: TNFRGraph) -> tuple[float, float]: ...
|
|
14
|
+
def gamma_none(G: TNFRGraph, node: NodeId, t: float | int, cfg: GammaSpec) -> float: ...
|
|
15
|
+
def gamma_kuramoto_linear(
|
|
16
|
+
G: TNFRGraph, node: NodeId, t: float | int, cfg: GammaSpec
|
|
17
|
+
) -> float: ...
|
|
18
|
+
def gamma_kuramoto_bandpass(
|
|
19
|
+
G: TNFRGraph, node: NodeId, t: float | int, cfg: GammaSpec
|
|
20
|
+
) -> float: ...
|
|
21
|
+
def gamma_kuramoto_tanh(
|
|
22
|
+
G: TNFRGraph, node: NodeId, t: float | int, cfg: GammaSpec
|
|
23
|
+
) -> float: ...
|
|
24
|
+
def gamma_harmonic(
|
|
25
|
+
G: TNFRGraph, node: NodeId, t: float | int, cfg: GammaSpec
|
|
26
|
+
) -> float: ...
|
|
27
|
+
def eval_gamma(
|
|
28
|
+
G: TNFRGraph,
|
|
29
|
+
node: NodeId,
|
|
30
|
+
t: float | int,
|
|
31
|
+
*,
|
|
32
|
+
strict: bool = ...,
|
|
33
|
+
log_level: int | None = ...,
|
|
34
|
+
) -> float: ...
|