tnfr 4.5.2__py3-none-any.whl → 8.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tnfr might be problematic. Click here for more details.
- tnfr/__init__.py +334 -50
- tnfr/__init__.pyi +33 -0
- tnfr/_compat.py +10 -0
- tnfr/_generated_version.py +34 -0
- tnfr/_version.py +49 -0
- tnfr/_version.pyi +7 -0
- tnfr/alias.py +214 -37
- tnfr/alias.pyi +108 -0
- tnfr/backends/__init__.py +354 -0
- tnfr/backends/jax_backend.py +173 -0
- tnfr/backends/numpy_backend.py +238 -0
- tnfr/backends/optimized_numpy.py +420 -0
- tnfr/backends/torch_backend.py +408 -0
- tnfr/cache.py +149 -556
- tnfr/cache.pyi +13 -0
- tnfr/cli/__init__.py +51 -16
- tnfr/cli/__init__.pyi +26 -0
- tnfr/cli/arguments.py +344 -32
- tnfr/cli/arguments.pyi +29 -0
- tnfr/cli/execution.py +676 -50
- tnfr/cli/execution.pyi +70 -0
- tnfr/cli/interactive_validator.py +614 -0
- tnfr/cli/utils.py +18 -3
- tnfr/cli/utils.pyi +7 -0
- tnfr/cli/validate.py +236 -0
- tnfr/compat/__init__.py +85 -0
- tnfr/compat/dataclass.py +136 -0
- tnfr/compat/jsonschema_stub.py +61 -0
- tnfr/compat/matplotlib_stub.py +73 -0
- tnfr/compat/numpy_stub.py +155 -0
- tnfr/config/__init__.py +224 -0
- tnfr/config/__init__.pyi +10 -0
- tnfr/{constants_glyphs.py → config/constants.py} +26 -20
- tnfr/config/constants.pyi +12 -0
- tnfr/config/defaults.py +54 -0
- tnfr/{constants/core.py → config/defaults_core.py} +59 -6
- tnfr/config/defaults_init.py +33 -0
- tnfr/config/defaults_metric.py +104 -0
- tnfr/config/feature_flags.py +81 -0
- tnfr/config/feature_flags.pyi +16 -0
- tnfr/config/glyph_constants.py +31 -0
- tnfr/config/init.py +77 -0
- tnfr/config/init.pyi +8 -0
- tnfr/config/operator_names.py +254 -0
- tnfr/config/operator_names.pyi +36 -0
- tnfr/config/physics_derivation.py +354 -0
- tnfr/config/presets.py +83 -0
- tnfr/config/presets.pyi +7 -0
- tnfr/config/security.py +927 -0
- tnfr/config/thresholds.py +114 -0
- tnfr/config/tnfr_config.py +498 -0
- tnfr/constants/__init__.py +51 -133
- tnfr/constants/__init__.pyi +92 -0
- tnfr/constants/aliases.py +33 -0
- tnfr/constants/aliases.pyi +27 -0
- tnfr/constants/init.py +3 -1
- tnfr/constants/init.pyi +12 -0
- tnfr/constants/metric.py +9 -15
- tnfr/constants/metric.pyi +19 -0
- tnfr/core/__init__.py +33 -0
- tnfr/core/container.py +226 -0
- tnfr/core/default_implementations.py +329 -0
- tnfr/core/interfaces.py +279 -0
- tnfr/dynamics/__init__.py +213 -633
- tnfr/dynamics/__init__.pyi +83 -0
- tnfr/dynamics/adaptation.py +267 -0
- tnfr/dynamics/adaptation.pyi +7 -0
- tnfr/dynamics/adaptive_sequences.py +189 -0
- tnfr/dynamics/adaptive_sequences.pyi +14 -0
- tnfr/dynamics/aliases.py +23 -0
- tnfr/dynamics/aliases.pyi +19 -0
- tnfr/dynamics/bifurcation.py +232 -0
- tnfr/dynamics/canonical.py +229 -0
- tnfr/dynamics/canonical.pyi +48 -0
- tnfr/dynamics/coordination.py +385 -0
- tnfr/dynamics/coordination.pyi +25 -0
- tnfr/dynamics/dnfr.py +2699 -398
- tnfr/dynamics/dnfr.pyi +26 -0
- tnfr/dynamics/dynamic_limits.py +225 -0
- tnfr/dynamics/feedback.py +252 -0
- tnfr/dynamics/feedback.pyi +24 -0
- tnfr/dynamics/fused_dnfr.py +454 -0
- tnfr/dynamics/homeostasis.py +157 -0
- tnfr/dynamics/homeostasis.pyi +14 -0
- tnfr/dynamics/integrators.py +496 -102
- tnfr/dynamics/integrators.pyi +36 -0
- tnfr/dynamics/learning.py +310 -0
- tnfr/dynamics/learning.pyi +33 -0
- tnfr/dynamics/metabolism.py +254 -0
- tnfr/dynamics/nbody.py +796 -0
- tnfr/dynamics/nbody_tnfr.py +783 -0
- tnfr/dynamics/propagation.py +326 -0
- tnfr/dynamics/runtime.py +908 -0
- tnfr/dynamics/runtime.pyi +77 -0
- tnfr/dynamics/sampling.py +10 -5
- tnfr/dynamics/sampling.pyi +7 -0
- tnfr/dynamics/selectors.py +711 -0
- tnfr/dynamics/selectors.pyi +85 -0
- tnfr/dynamics/structural_clip.py +207 -0
- tnfr/errors/__init__.py +37 -0
- tnfr/errors/contextual.py +492 -0
- tnfr/execution.py +77 -55
- tnfr/execution.pyi +45 -0
- tnfr/extensions/__init__.py +205 -0
- tnfr/extensions/__init__.pyi +18 -0
- tnfr/extensions/base.py +173 -0
- tnfr/extensions/base.pyi +35 -0
- tnfr/extensions/business/__init__.py +71 -0
- tnfr/extensions/business/__init__.pyi +11 -0
- tnfr/extensions/business/cookbook.py +88 -0
- tnfr/extensions/business/cookbook.pyi +8 -0
- tnfr/extensions/business/health_analyzers.py +202 -0
- tnfr/extensions/business/health_analyzers.pyi +9 -0
- tnfr/extensions/business/patterns.py +183 -0
- tnfr/extensions/business/patterns.pyi +8 -0
- tnfr/extensions/medical/__init__.py +73 -0
- tnfr/extensions/medical/__init__.pyi +11 -0
- tnfr/extensions/medical/cookbook.py +88 -0
- tnfr/extensions/medical/cookbook.pyi +8 -0
- tnfr/extensions/medical/health_analyzers.py +181 -0
- tnfr/extensions/medical/health_analyzers.pyi +9 -0
- tnfr/extensions/medical/patterns.py +163 -0
- tnfr/extensions/medical/patterns.pyi +8 -0
- tnfr/flatten.py +29 -50
- tnfr/flatten.pyi +21 -0
- tnfr/gamma.py +66 -53
- tnfr/gamma.pyi +36 -0
- tnfr/glyph_history.py +144 -57
- tnfr/glyph_history.pyi +35 -0
- tnfr/glyph_runtime.py +19 -0
- tnfr/glyph_runtime.pyi +8 -0
- tnfr/immutable.py +70 -30
- tnfr/immutable.pyi +36 -0
- tnfr/initialization.py +22 -16
- tnfr/initialization.pyi +65 -0
- tnfr/io.py +5 -241
- tnfr/io.pyi +13 -0
- tnfr/locking.pyi +7 -0
- tnfr/mathematics/__init__.py +79 -0
- tnfr/mathematics/backend.py +453 -0
- tnfr/mathematics/backend.pyi +99 -0
- tnfr/mathematics/dynamics.py +408 -0
- tnfr/mathematics/dynamics.pyi +90 -0
- tnfr/mathematics/epi.py +391 -0
- tnfr/mathematics/epi.pyi +65 -0
- tnfr/mathematics/generators.py +242 -0
- tnfr/mathematics/generators.pyi +29 -0
- tnfr/mathematics/metrics.py +119 -0
- tnfr/mathematics/metrics.pyi +16 -0
- tnfr/mathematics/operators.py +239 -0
- tnfr/mathematics/operators.pyi +59 -0
- tnfr/mathematics/operators_factory.py +124 -0
- tnfr/mathematics/operators_factory.pyi +11 -0
- tnfr/mathematics/projection.py +87 -0
- tnfr/mathematics/projection.pyi +33 -0
- tnfr/mathematics/runtime.py +182 -0
- tnfr/mathematics/runtime.pyi +64 -0
- tnfr/mathematics/spaces.py +256 -0
- tnfr/mathematics/spaces.pyi +83 -0
- tnfr/mathematics/transforms.py +305 -0
- tnfr/mathematics/transforms.pyi +62 -0
- tnfr/metrics/__init__.py +47 -9
- tnfr/metrics/__init__.pyi +20 -0
- tnfr/metrics/buffer_cache.py +163 -0
- tnfr/metrics/buffer_cache.pyi +24 -0
- tnfr/metrics/cache_utils.py +214 -0
- tnfr/metrics/coherence.py +1510 -330
- tnfr/metrics/coherence.pyi +129 -0
- tnfr/metrics/common.py +23 -16
- tnfr/metrics/common.pyi +35 -0
- tnfr/metrics/core.py +251 -36
- tnfr/metrics/core.pyi +13 -0
- tnfr/metrics/diagnosis.py +709 -110
- tnfr/metrics/diagnosis.pyi +86 -0
- tnfr/metrics/emergence.py +245 -0
- tnfr/metrics/export.py +60 -18
- tnfr/metrics/export.pyi +7 -0
- tnfr/metrics/glyph_timing.py +233 -43
- tnfr/metrics/glyph_timing.pyi +81 -0
- tnfr/metrics/learning_metrics.py +280 -0
- tnfr/metrics/learning_metrics.pyi +21 -0
- tnfr/metrics/phase_coherence.py +351 -0
- tnfr/metrics/phase_compatibility.py +349 -0
- tnfr/metrics/reporting.py +63 -28
- tnfr/metrics/reporting.pyi +25 -0
- tnfr/metrics/sense_index.py +1126 -43
- tnfr/metrics/sense_index.pyi +9 -0
- tnfr/metrics/trig.py +215 -23
- tnfr/metrics/trig.pyi +13 -0
- tnfr/metrics/trig_cache.py +148 -24
- tnfr/metrics/trig_cache.pyi +10 -0
- tnfr/multiscale/__init__.py +32 -0
- tnfr/multiscale/hierarchical.py +517 -0
- tnfr/node.py +646 -140
- tnfr/node.pyi +139 -0
- tnfr/observers.py +160 -45
- tnfr/observers.pyi +31 -0
- tnfr/ontosim.py +23 -19
- tnfr/ontosim.pyi +28 -0
- tnfr/operators/__init__.py +1358 -106
- tnfr/operators/__init__.pyi +31 -0
- tnfr/operators/algebra.py +277 -0
- tnfr/operators/canonical_patterns.py +420 -0
- tnfr/operators/cascade.py +267 -0
- tnfr/operators/cycle_detection.py +358 -0
- tnfr/operators/definitions.py +4108 -0
- tnfr/operators/definitions.pyi +78 -0
- tnfr/operators/grammar.py +1164 -0
- tnfr/operators/grammar.pyi +140 -0
- tnfr/operators/hamiltonian.py +710 -0
- tnfr/operators/health_analyzer.py +809 -0
- tnfr/operators/jitter.py +107 -38
- tnfr/operators/jitter.pyi +11 -0
- tnfr/operators/lifecycle.py +314 -0
- tnfr/operators/metabolism.py +618 -0
- tnfr/operators/metrics.py +2138 -0
- tnfr/operators/network_analysis/__init__.py +27 -0
- tnfr/operators/network_analysis/source_detection.py +186 -0
- tnfr/operators/nodal_equation.py +395 -0
- tnfr/operators/pattern_detection.py +660 -0
- tnfr/operators/patterns.py +669 -0
- tnfr/operators/postconditions/__init__.py +38 -0
- tnfr/operators/postconditions/mutation.py +236 -0
- tnfr/operators/preconditions/__init__.py +1226 -0
- tnfr/operators/preconditions/coherence.py +305 -0
- tnfr/operators/preconditions/dissonance.py +236 -0
- tnfr/operators/preconditions/emission.py +128 -0
- tnfr/operators/preconditions/mutation.py +580 -0
- tnfr/operators/preconditions/reception.py +125 -0
- tnfr/operators/preconditions/resonance.py +364 -0
- tnfr/operators/registry.py +74 -0
- tnfr/operators/registry.pyi +9 -0
- tnfr/operators/remesh.py +1415 -91
- tnfr/operators/remesh.pyi +26 -0
- tnfr/operators/structural_units.py +268 -0
- tnfr/operators/unified_grammar.py +105 -0
- tnfr/parallel/__init__.py +54 -0
- tnfr/parallel/auto_scaler.py +234 -0
- tnfr/parallel/distributed.py +384 -0
- tnfr/parallel/engine.py +238 -0
- tnfr/parallel/gpu_engine.py +420 -0
- tnfr/parallel/monitoring.py +248 -0
- tnfr/parallel/partitioner.py +459 -0
- tnfr/py.typed +0 -0
- tnfr/recipes/__init__.py +22 -0
- tnfr/recipes/cookbook.py +743 -0
- tnfr/rng.py +75 -151
- tnfr/rng.pyi +26 -0
- tnfr/schemas/__init__.py +8 -0
- tnfr/schemas/grammar.json +94 -0
- tnfr/sdk/__init__.py +107 -0
- tnfr/sdk/__init__.pyi +19 -0
- tnfr/sdk/adaptive_system.py +173 -0
- tnfr/sdk/adaptive_system.pyi +21 -0
- tnfr/sdk/builders.py +370 -0
- tnfr/sdk/builders.pyi +51 -0
- tnfr/sdk/fluent.py +1121 -0
- tnfr/sdk/fluent.pyi +74 -0
- tnfr/sdk/templates.py +342 -0
- tnfr/sdk/templates.pyi +41 -0
- tnfr/sdk/utils.py +341 -0
- tnfr/secure_config.py +46 -0
- tnfr/security/__init__.py +70 -0
- tnfr/security/database.py +514 -0
- tnfr/security/subprocess.py +503 -0
- tnfr/security/validation.py +290 -0
- tnfr/selector.py +59 -22
- tnfr/selector.pyi +19 -0
- tnfr/sense.py +92 -67
- tnfr/sense.pyi +23 -0
- tnfr/services/__init__.py +17 -0
- tnfr/services/orchestrator.py +325 -0
- tnfr/sparse/__init__.py +39 -0
- tnfr/sparse/representations.py +492 -0
- tnfr/structural.py +639 -263
- tnfr/structural.pyi +83 -0
- tnfr/telemetry/__init__.py +35 -0
- tnfr/telemetry/cache_metrics.py +226 -0
- tnfr/telemetry/cache_metrics.pyi +64 -0
- tnfr/telemetry/nu_f.py +422 -0
- tnfr/telemetry/nu_f.pyi +108 -0
- tnfr/telemetry/verbosity.py +36 -0
- tnfr/telemetry/verbosity.pyi +15 -0
- tnfr/tokens.py +2 -4
- tnfr/tokens.pyi +36 -0
- tnfr/tools/__init__.py +20 -0
- tnfr/tools/domain_templates.py +478 -0
- tnfr/tools/sequence_generator.py +846 -0
- tnfr/topology/__init__.py +13 -0
- tnfr/topology/asymmetry.py +151 -0
- tnfr/trace.py +300 -126
- tnfr/trace.pyi +42 -0
- tnfr/tutorials/__init__.py +38 -0
- tnfr/tutorials/autonomous_evolution.py +285 -0
- tnfr/tutorials/interactive.py +1576 -0
- tnfr/tutorials/structural_metabolism.py +238 -0
- tnfr/types.py +743 -12
- tnfr/types.pyi +357 -0
- tnfr/units.py +68 -0
- tnfr/units.pyi +13 -0
- tnfr/utils/__init__.py +282 -0
- tnfr/utils/__init__.pyi +215 -0
- tnfr/utils/cache.py +4223 -0
- tnfr/utils/cache.pyi +470 -0
- tnfr/{callback_utils.py → utils/callbacks.py} +26 -39
- tnfr/utils/callbacks.pyi +49 -0
- tnfr/utils/chunks.py +108 -0
- tnfr/utils/chunks.pyi +22 -0
- tnfr/utils/data.py +428 -0
- tnfr/utils/data.pyi +74 -0
- tnfr/utils/graph.py +85 -0
- tnfr/utils/graph.pyi +10 -0
- tnfr/utils/init.py +821 -0
- tnfr/utils/init.pyi +80 -0
- tnfr/utils/io.py +559 -0
- tnfr/utils/io.pyi +66 -0
- tnfr/{helpers → utils}/numeric.py +51 -24
- tnfr/utils/numeric.pyi +21 -0
- tnfr/validation/__init__.py +257 -0
- tnfr/validation/__init__.pyi +85 -0
- tnfr/validation/compatibility.py +460 -0
- tnfr/validation/compatibility.pyi +6 -0
- tnfr/validation/config.py +73 -0
- tnfr/validation/graph.py +139 -0
- tnfr/validation/graph.pyi +18 -0
- tnfr/validation/input_validation.py +755 -0
- tnfr/validation/invariants.py +712 -0
- tnfr/validation/rules.py +253 -0
- tnfr/validation/rules.pyi +44 -0
- tnfr/validation/runtime.py +279 -0
- tnfr/validation/runtime.pyi +28 -0
- tnfr/validation/sequence_validator.py +162 -0
- tnfr/validation/soft_filters.py +170 -0
- tnfr/validation/soft_filters.pyi +32 -0
- tnfr/validation/spectral.py +164 -0
- tnfr/validation/spectral.pyi +42 -0
- tnfr/validation/validator.py +1266 -0
- tnfr/validation/window.py +39 -0
- tnfr/validation/window.pyi +1 -0
- tnfr/visualization/__init__.py +98 -0
- tnfr/visualization/cascade_viz.py +256 -0
- tnfr/visualization/hierarchy.py +284 -0
- tnfr/visualization/sequence_plotter.py +784 -0
- tnfr/viz/__init__.py +60 -0
- tnfr/viz/matplotlib.py +278 -0
- tnfr/viz/matplotlib.pyi +35 -0
- tnfr-8.5.0.dist-info/METADATA +573 -0
- tnfr-8.5.0.dist-info/RECORD +353 -0
- {tnfr-4.5.2.dist-info → tnfr-8.5.0.dist-info}/entry_points.txt +1 -0
- {tnfr-4.5.2.dist-info → tnfr-8.5.0.dist-info}/licenses/LICENSE.md +1 -1
- tnfr/collections_utils.py +0 -300
- tnfr/config.py +0 -32
- tnfr/grammar.py +0 -344
- tnfr/graph_utils.py +0 -84
- tnfr/helpers/__init__.py +0 -71
- tnfr/import_utils.py +0 -228
- tnfr/json_utils.py +0 -162
- tnfr/logging_utils.py +0 -116
- tnfr/presets.py +0 -60
- tnfr/validators.py +0 -84
- tnfr/value_utils.py +0 -59
- tnfr-4.5.2.dist-info/METADATA +0 -379
- tnfr-4.5.2.dist-info/RECORD +0 -67
- {tnfr-4.5.2.dist-info → tnfr-8.5.0.dist-info}/WHEEL +0 -0
- {tnfr-4.5.2.dist-info → tnfr-8.5.0.dist-info}/top_level.txt +0 -0
tnfr/collections_utils.py
DELETED
|
@@ -1,300 +0,0 @@
|
|
|
1
|
-
"""Utilities for working with generic collections and weight mappings."""
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
import logging
|
|
6
|
-
from collections import deque
|
|
7
|
-
from collections.abc import Collection, Iterable, Mapping, Sequence
|
|
8
|
-
from itertools import islice
|
|
9
|
-
from typing import Any, Callable, Iterator, TypeVar, cast
|
|
10
|
-
|
|
11
|
-
from .logging_utils import get_logger
|
|
12
|
-
from .logging_utils import warn_once as _warn_once_factory
|
|
13
|
-
from .value_utils import convert_value
|
|
14
|
-
from .helpers.numeric import kahan_sum_nd
|
|
15
|
-
|
|
16
|
-
T = TypeVar("T")
|
|
17
|
-
|
|
18
|
-
logger = get_logger(__name__)
|
|
19
|
-
|
|
20
|
-
STRING_TYPES = (str, bytes, bytearray)
|
|
21
|
-
|
|
22
|
-
NEGATIVE_WEIGHTS_MSG = "Negative weights detected: %s"
|
|
23
|
-
|
|
24
|
-
_NEGATIVE_WARN_ONCE_MAXSIZE = 1024
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
def negative_weights_warn_once(
|
|
28
|
-
*, maxsize: int = _NEGATIVE_WARN_ONCE_MAXSIZE
|
|
29
|
-
) -> Callable[[Mapping[str, float]], None]:
|
|
30
|
-
"""Return a ``WarnOnce`` callable for negative weight warnings.
|
|
31
|
-
|
|
32
|
-
The returned callable may be reused across multiple
|
|
33
|
-
:func:`normalize_weights` invocations to suppress duplicate warnings for
|
|
34
|
-
the same keys.
|
|
35
|
-
"""
|
|
36
|
-
|
|
37
|
-
return _warn_once_factory(logger, NEGATIVE_WEIGHTS_MSG, maxsize=maxsize)
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
def _log_negative_weights(negatives: Mapping[str, float]) -> None:
|
|
41
|
-
"""Log negative weight warnings without deduplicating keys."""
|
|
42
|
-
|
|
43
|
-
logger.warning(NEGATIVE_WEIGHTS_MSG, negatives)
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
def _resolve_negative_warn_handler(
|
|
47
|
-
warn_once: bool | Callable[[Mapping[str, float]], None]
|
|
48
|
-
) -> Callable[[Mapping[str, float]], None]:
|
|
49
|
-
"""Return a callable that logs negative weight warnings."""
|
|
50
|
-
|
|
51
|
-
if callable(warn_once):
|
|
52
|
-
return warn_once
|
|
53
|
-
if warn_once:
|
|
54
|
-
return negative_weights_warn_once()
|
|
55
|
-
return _log_negative_weights
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
def is_non_string_sequence(obj: Any) -> bool:
|
|
59
|
-
"""Return ``True`` if ``obj`` is an ``Iterable`` but not string-like or a mapping."""
|
|
60
|
-
return isinstance(obj, Iterable) and not isinstance(obj, (*STRING_TYPES, Mapping))
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
def flatten_structure(
|
|
64
|
-
obj: Any,
|
|
65
|
-
*,
|
|
66
|
-
expand: Callable[[Any], Iterable[Any] | None] | None = None,
|
|
67
|
-
) -> Iterator[Any]:
|
|
68
|
-
"""Yield leaf items from ``obj``.
|
|
69
|
-
|
|
70
|
-
The order of yielded items follows the order of the input iterable when it
|
|
71
|
-
is defined. For unordered iterables like :class:`set` the resulting order is
|
|
72
|
-
arbitrary. Mappings are treated as atomic items and not expanded.
|
|
73
|
-
|
|
74
|
-
Parameters
|
|
75
|
-
----------
|
|
76
|
-
obj:
|
|
77
|
-
Object that may contain nested iterables.
|
|
78
|
-
expand:
|
|
79
|
-
Optional callable returning a replacement iterable for ``item``. When
|
|
80
|
-
it returns ``None`` the ``item`` is processed normally.
|
|
81
|
-
"""
|
|
82
|
-
|
|
83
|
-
stack = deque([obj])
|
|
84
|
-
seen: set[int] = set()
|
|
85
|
-
while stack:
|
|
86
|
-
item = stack.pop()
|
|
87
|
-
item_id = id(item)
|
|
88
|
-
if item_id in seen:
|
|
89
|
-
continue
|
|
90
|
-
if expand is not None:
|
|
91
|
-
replacement = expand(item)
|
|
92
|
-
if replacement is not None:
|
|
93
|
-
seen.add(item_id)
|
|
94
|
-
stack.extendleft(replacement)
|
|
95
|
-
continue
|
|
96
|
-
if is_non_string_sequence(item):
|
|
97
|
-
seen.add(item_id)
|
|
98
|
-
stack.extendleft(item)
|
|
99
|
-
else:
|
|
100
|
-
yield item
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
__all__ = (
|
|
104
|
-
"MAX_MATERIALIZE_DEFAULT",
|
|
105
|
-
"normalize_materialize_limit",
|
|
106
|
-
"is_non_string_sequence",
|
|
107
|
-
"flatten_structure",
|
|
108
|
-
"STRING_TYPES",
|
|
109
|
-
"ensure_collection",
|
|
110
|
-
"normalize_weights",
|
|
111
|
-
"negative_weights_warn_once",
|
|
112
|
-
"normalize_counter",
|
|
113
|
-
"mix_groups",
|
|
114
|
-
)
|
|
115
|
-
|
|
116
|
-
MAX_MATERIALIZE_DEFAULT: int = 1000
|
|
117
|
-
"""Default materialization limit used by :func:`ensure_collection`.
|
|
118
|
-
|
|
119
|
-
This guard prevents accidentally consuming huge or infinite iterables when a
|
|
120
|
-
limit is not explicitly provided. Pass ``max_materialize=None`` to disable the
|
|
121
|
-
limit.
|
|
122
|
-
"""
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
def normalize_materialize_limit(max_materialize: int | None) -> int | None:
|
|
126
|
-
"""Normalize and validate ``max_materialize`` returning a usable limit."""
|
|
127
|
-
if max_materialize is None:
|
|
128
|
-
return None
|
|
129
|
-
limit = int(max_materialize)
|
|
130
|
-
if limit < 0:
|
|
131
|
-
raise ValueError("'max_materialize' must be non-negative")
|
|
132
|
-
return limit
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
def ensure_collection(
|
|
136
|
-
it: Iterable[T],
|
|
137
|
-
*,
|
|
138
|
-
max_materialize: int | None = MAX_MATERIALIZE_DEFAULT,
|
|
139
|
-
error_msg: str | None = None,
|
|
140
|
-
) -> Collection[T]:
|
|
141
|
-
"""Return ``it`` as a :class:`Collection`, materializing when needed.
|
|
142
|
-
|
|
143
|
-
Checks are executed in the following order:
|
|
144
|
-
|
|
145
|
-
1. Existing collections are returned directly. String-like inputs
|
|
146
|
-
(``str``, ``bytes`` and ``bytearray``) are wrapped as a single item
|
|
147
|
-
tuple.
|
|
148
|
-
2. The object must be an :class:`Iterable`; otherwise ``TypeError`` is
|
|
149
|
-
raised.
|
|
150
|
-
3. Remaining iterables are materialized up to ``max_materialize`` items.
|
|
151
|
-
``None`` disables the limit. ``error_msg`` customizes the
|
|
152
|
-
:class:`ValueError` raised when the iterable yields more items than
|
|
153
|
-
allowed. The input is consumed at most once and no extra items beyond the
|
|
154
|
-
limit are stored in memory.
|
|
155
|
-
"""
|
|
156
|
-
|
|
157
|
-
# Step 1: early-return for collections and raw strings/bytes
|
|
158
|
-
if isinstance(it, Collection):
|
|
159
|
-
if isinstance(it, STRING_TYPES):
|
|
160
|
-
return (cast(T, it),)
|
|
161
|
-
else:
|
|
162
|
-
return it
|
|
163
|
-
|
|
164
|
-
# Step 2: ensure the input is iterable
|
|
165
|
-
if not isinstance(it, Iterable):
|
|
166
|
-
raise TypeError(f"{it!r} is not iterable")
|
|
167
|
-
|
|
168
|
-
# Step 3: validate limit and materialize items once
|
|
169
|
-
limit = normalize_materialize_limit(max_materialize)
|
|
170
|
-
if limit is None:
|
|
171
|
-
return tuple(it)
|
|
172
|
-
if limit == 0:
|
|
173
|
-
return ()
|
|
174
|
-
|
|
175
|
-
items = tuple(islice(it, limit + 1))
|
|
176
|
-
if len(items) > limit:
|
|
177
|
-
examples = ", ".join(repr(x) for x in items[:3])
|
|
178
|
-
msg = error_msg or (
|
|
179
|
-
f"Iterable produced {len(items)} items, exceeds limit {limit}; first items: [{examples}]"
|
|
180
|
-
)
|
|
181
|
-
raise ValueError(msg)
|
|
182
|
-
return items
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
def _convert_and_validate_weights(
|
|
186
|
-
dict_like: Mapping[str, Any],
|
|
187
|
-
keys: Iterable[str] | Sequence[str],
|
|
188
|
-
default: float,
|
|
189
|
-
*,
|
|
190
|
-
error_on_conversion: bool,
|
|
191
|
-
error_on_negative: bool,
|
|
192
|
-
warn_once: bool | Callable[[Mapping[str, float]], None],
|
|
193
|
-
) -> tuple[dict[str, float], list[str], float]:
|
|
194
|
-
"""Return converted weights, deduplicated keys and the accumulated total."""
|
|
195
|
-
|
|
196
|
-
keys_list = list(dict.fromkeys(keys))
|
|
197
|
-
default_float = float(default)
|
|
198
|
-
|
|
199
|
-
def convert(k: str) -> float:
|
|
200
|
-
ok, val = convert_value(
|
|
201
|
-
dict_like.get(k, default_float),
|
|
202
|
-
float,
|
|
203
|
-
strict=error_on_conversion,
|
|
204
|
-
key=k,
|
|
205
|
-
log_level=logging.WARNING,
|
|
206
|
-
)
|
|
207
|
-
return cast(float, val) if ok else default_float
|
|
208
|
-
|
|
209
|
-
weights = {k: convert(k) for k in keys_list}
|
|
210
|
-
negatives = {k: w for k, w in weights.items() if w < 0}
|
|
211
|
-
total = kahan_sum_nd(((w,) for w in weights.values()), dims=1)[0]
|
|
212
|
-
|
|
213
|
-
if negatives:
|
|
214
|
-
if error_on_negative:
|
|
215
|
-
raise ValueError(NEGATIVE_WEIGHTS_MSG % negatives)
|
|
216
|
-
warn_negative = _resolve_negative_warn_handler(warn_once)
|
|
217
|
-
warn_negative(negatives)
|
|
218
|
-
for key, weight in negatives.items():
|
|
219
|
-
weights[key] = 0.0
|
|
220
|
-
total -= weight
|
|
221
|
-
|
|
222
|
-
return weights, keys_list, total
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
def normalize_weights(
|
|
226
|
-
dict_like: Mapping[str, Any],
|
|
227
|
-
keys: Iterable[str] | Sequence[str],
|
|
228
|
-
default: float = 0.0,
|
|
229
|
-
*,
|
|
230
|
-
error_on_negative: bool = False,
|
|
231
|
-
warn_once: bool | Callable[[Mapping[str, float]], None] = True,
|
|
232
|
-
error_on_conversion: bool = False,
|
|
233
|
-
) -> dict[str, float]:
|
|
234
|
-
"""Normalize ``keys`` in mapping ``dict_like`` so their sum is 1.
|
|
235
|
-
|
|
236
|
-
``keys`` may be any iterable of strings and is materialized once while
|
|
237
|
-
collapsing repeated entries preserving their first occurrence.
|
|
238
|
-
|
|
239
|
-
Negative weights are handled according to ``error_on_negative``. When
|
|
240
|
-
``True`` a :class:`ValueError` is raised. Otherwise negatives are logged,
|
|
241
|
-
replaced with ``0`` and the remaining weights are renormalized. If all
|
|
242
|
-
weights are non-positive a uniform distribution is returned.
|
|
243
|
-
|
|
244
|
-
Conversion errors are controlled separately by ``error_on_conversion``. When
|
|
245
|
-
``True`` any :class:`TypeError` or :class:`ValueError` while converting a
|
|
246
|
-
value to ``float`` is propagated. Otherwise the error is logged and the
|
|
247
|
-
``default`` value is used.
|
|
248
|
-
|
|
249
|
-
``warn_once`` accepts either a boolean or a callable. ``False`` logs all
|
|
250
|
-
negative weights using :func:`logging.Logger.warning`. ``True`` (the
|
|
251
|
-
default) creates a fresh :class:`~tnfr.logging_utils.WarnOnce` instance for
|
|
252
|
-
the call, emitting a single warning containing all negative keys. To reuse
|
|
253
|
-
deduplication state across calls, pass a callable such as
|
|
254
|
-
:func:`negative_weights_warn_once`.
|
|
255
|
-
"""
|
|
256
|
-
weights, keys_list, total = _convert_and_validate_weights(
|
|
257
|
-
dict_like,
|
|
258
|
-
keys,
|
|
259
|
-
default,
|
|
260
|
-
error_on_conversion=error_on_conversion,
|
|
261
|
-
error_on_negative=error_on_negative,
|
|
262
|
-
warn_once=warn_once,
|
|
263
|
-
)
|
|
264
|
-
if not keys_list:
|
|
265
|
-
return {}
|
|
266
|
-
if total <= 0:
|
|
267
|
-
uniform = 1.0 / len(keys_list)
|
|
268
|
-
return {k: uniform for k in keys_list}
|
|
269
|
-
return {k: w / total for k, w in weights.items()}
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
def normalize_counter(
|
|
273
|
-
counts: Mapping[str, float | int],
|
|
274
|
-
) -> tuple[dict[str, float], float]:
|
|
275
|
-
"""Normalize a ``Counter`` returning proportions and total."""
|
|
276
|
-
total = kahan_sum_nd(((c,) for c in counts.values()), dims=1)[0]
|
|
277
|
-
if total <= 0:
|
|
278
|
-
return {}, 0
|
|
279
|
-
dist = {k: v / total for k, v in counts.items() if v}
|
|
280
|
-
return dist, total
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
def mix_groups(
|
|
284
|
-
dist: Mapping[str, float],
|
|
285
|
-
groups: Mapping[str, Iterable[str]],
|
|
286
|
-
*,
|
|
287
|
-
prefix: str = "_",
|
|
288
|
-
) -> dict[str, float]:
|
|
289
|
-
"""Aggregate values of ``dist`` according to ``groups``."""
|
|
290
|
-
out: dict[str, float] = dict(dist)
|
|
291
|
-
out.update(
|
|
292
|
-
{
|
|
293
|
-
f"{prefix}{label}": kahan_sum_nd(
|
|
294
|
-
((dist.get(k, 0.0),) for k in keys),
|
|
295
|
-
dims=1,
|
|
296
|
-
)[0]
|
|
297
|
-
for label, keys in groups.items()
|
|
298
|
-
}
|
|
299
|
-
)
|
|
300
|
-
return out
|
tnfr/config.py
DELETED
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
"""Configuration utilities."""
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
from typing import Any, TYPE_CHECKING
|
|
5
|
-
from collections.abc import Mapping
|
|
6
|
-
from pathlib import Path
|
|
7
|
-
from .io import read_structured_file
|
|
8
|
-
|
|
9
|
-
from .constants import inject_defaults
|
|
10
|
-
|
|
11
|
-
if TYPE_CHECKING: # pragma: no cover - only for type checkers
|
|
12
|
-
import networkx as nx # type: ignore[import-untyped]
|
|
13
|
-
|
|
14
|
-
__all__ = ("load_config", "apply_config")
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
def load_config(path: str | Path) -> Mapping[str, Any]:
|
|
18
|
-
"""Read a JSON/YAML file and return a mapping with parameters."""
|
|
19
|
-
path_obj = path if isinstance(path, Path) else Path(path)
|
|
20
|
-
data = read_structured_file(path_obj)
|
|
21
|
-
if not isinstance(data, Mapping):
|
|
22
|
-
raise ValueError("Configuration file must contain an object")
|
|
23
|
-
return data
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
def apply_config(G: nx.Graph, path: str | Path) -> None:
|
|
27
|
-
"""Inject parameters from ``path`` into ``G.graph``.
|
|
28
|
-
|
|
29
|
-
Reuses :func:`inject_defaults` to keep canonical default semantics.
|
|
30
|
-
"""
|
|
31
|
-
cfg = load_config(path)
|
|
32
|
-
inject_defaults(G, cfg, override=True)
|
tnfr/grammar.py
DELETED
|
@@ -1,344 +0,0 @@
|
|
|
1
|
-
"""Grammar rules."""
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
from dataclasses import dataclass
|
|
5
|
-
from typing import Any, Iterable, Optional, Callable
|
|
6
|
-
|
|
7
|
-
from .constants import DEFAULTS, get_aliases, get_param
|
|
8
|
-
from .alias import get_attr
|
|
9
|
-
from .helpers.numeric import clamp01
|
|
10
|
-
from .glyph_history import recent_glyph
|
|
11
|
-
from .types import Glyph
|
|
12
|
-
from .operators import apply_glyph # avoid repeated import inside functions
|
|
13
|
-
from .metrics.common import normalize_dnfr
|
|
14
|
-
|
|
15
|
-
ALIAS_SI = get_aliases("SI")
|
|
16
|
-
ALIAS_D2EPI = get_aliases("D2EPI")
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
@dataclass
|
|
20
|
-
class GrammarContext:
|
|
21
|
-
"""Shared context for grammar helpers.
|
|
22
|
-
|
|
23
|
-
Collects graph-level settings to reduce positional parameters across
|
|
24
|
-
helper functions.
|
|
25
|
-
"""
|
|
26
|
-
|
|
27
|
-
G: Any
|
|
28
|
-
cfg_soft: dict[str, Any]
|
|
29
|
-
cfg_canon: dict[str, Any]
|
|
30
|
-
norms: dict[str, Any]
|
|
31
|
-
|
|
32
|
-
@classmethod
|
|
33
|
-
def from_graph(cls, G: Any) -> "GrammarContext":
|
|
34
|
-
"""Create a :class:`GrammarContext` for ``G``."""
|
|
35
|
-
return cls(
|
|
36
|
-
G=G,
|
|
37
|
-
cfg_soft=G.graph.get("GRAMMAR", DEFAULTS.get("GRAMMAR", {})),
|
|
38
|
-
cfg_canon=G.graph.get(
|
|
39
|
-
"GRAMMAR_CANON", DEFAULTS.get("GRAMMAR_CANON", {})
|
|
40
|
-
),
|
|
41
|
-
norms=G.graph.get("_sel_norms") or {},
|
|
42
|
-
)
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
__all__ = (
|
|
46
|
-
"CANON_COMPAT",
|
|
47
|
-
"CANON_FALLBACK",
|
|
48
|
-
"enforce_canonical_grammar",
|
|
49
|
-
"on_applied_glyph",
|
|
50
|
-
"apply_glyph_with_grammar",
|
|
51
|
-
"GrammarContext",
|
|
52
|
-
)
|
|
53
|
-
|
|
54
|
-
# -------------------------
|
|
55
|
-
# Per-node grammar state
|
|
56
|
-
# -------------------------
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
def _gram_state(nd: dict[str, Any]) -> dict[str, Any]:
|
|
60
|
-
"""Create or return the node grammar state.
|
|
61
|
-
|
|
62
|
-
Fields:
|
|
63
|
-
- thol_open (bool)
|
|
64
|
-
- thol_len (int)
|
|
65
|
-
"""
|
|
66
|
-
return nd.setdefault("_GRAM", {"thol_open": False, "thol_len": 0})
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
# -------------------------
|
|
70
|
-
# Canonical compatibilities (allowed next glyphs)
|
|
71
|
-
# -------------------------
|
|
72
|
-
CANON_COMPAT: dict[Glyph, set[Glyph]] = {
|
|
73
|
-
# Inicio / apertura
|
|
74
|
-
Glyph.AL: {Glyph.EN, Glyph.RA, Glyph.NAV, Glyph.VAL, Glyph.UM},
|
|
75
|
-
Glyph.EN: {Glyph.IL, Glyph.UM, Glyph.RA, Glyph.NAV},
|
|
76
|
-
# Estabilización / difusión / acople
|
|
77
|
-
Glyph.IL: {Glyph.RA, Glyph.VAL, Glyph.UM, Glyph.SHA},
|
|
78
|
-
Glyph.UM: {Glyph.RA, Glyph.IL, Glyph.VAL, Glyph.NAV},
|
|
79
|
-
Glyph.RA: {Glyph.IL, Glyph.VAL, Glyph.UM, Glyph.NAV},
|
|
80
|
-
Glyph.VAL: {Glyph.UM, Glyph.RA, Glyph.IL, Glyph.NAV},
|
|
81
|
-
# Disonancia → transición → mutación
|
|
82
|
-
Glyph.OZ: {Glyph.ZHIR, Glyph.NAV},
|
|
83
|
-
Glyph.ZHIR: {Glyph.IL, Glyph.NAV},
|
|
84
|
-
Glyph.NAV: {Glyph.OZ, Glyph.ZHIR, Glyph.RA, Glyph.IL, Glyph.UM},
|
|
85
|
-
# Cierres / latencias
|
|
86
|
-
Glyph.SHA: {Glyph.AL, Glyph.EN},
|
|
87
|
-
Glyph.NUL: {Glyph.AL, Glyph.IL},
|
|
88
|
-
# Bloques autoorganizativos
|
|
89
|
-
Glyph.THOL: {
|
|
90
|
-
Glyph.OZ,
|
|
91
|
-
Glyph.ZHIR,
|
|
92
|
-
Glyph.NAV,
|
|
93
|
-
Glyph.RA,
|
|
94
|
-
Glyph.IL,
|
|
95
|
-
Glyph.UM,
|
|
96
|
-
Glyph.SHA,
|
|
97
|
-
Glyph.NUL,
|
|
98
|
-
},
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
# Canonical fallbacks when a transition is not allowed
|
|
102
|
-
CANON_FALLBACK: dict[Glyph, Glyph] = {
|
|
103
|
-
Glyph.AL: Glyph.EN,
|
|
104
|
-
Glyph.EN: Glyph.IL,
|
|
105
|
-
Glyph.IL: Glyph.RA,
|
|
106
|
-
Glyph.NAV: Glyph.RA,
|
|
107
|
-
Glyph.NUL: Glyph.AL,
|
|
108
|
-
Glyph.OZ: Glyph.ZHIR,
|
|
109
|
-
Glyph.RA: Glyph.IL,
|
|
110
|
-
Glyph.SHA: Glyph.AL,
|
|
111
|
-
Glyph.THOL: Glyph.NAV,
|
|
112
|
-
Glyph.UM: Glyph.RA,
|
|
113
|
-
Glyph.VAL: Glyph.RA,
|
|
114
|
-
Glyph.ZHIR: Glyph.IL,
|
|
115
|
-
}
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
def _coerce_glyph(val: Any) -> Glyph | Any:
|
|
119
|
-
"""Return ``val`` as ``Glyph`` when possible."""
|
|
120
|
-
try:
|
|
121
|
-
return Glyph(val)
|
|
122
|
-
except (ValueError, TypeError):
|
|
123
|
-
return val
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
def _glyph_fallback(cand_key: str, fallbacks: dict[str, Any]) -> Glyph | str:
|
|
127
|
-
"""Determine fallback glyph for ``cand_key`` and return converted value."""
|
|
128
|
-
glyph_key = _coerce_glyph(cand_key)
|
|
129
|
-
canon_fb = (
|
|
130
|
-
CANON_FALLBACK.get(glyph_key, cand_key)
|
|
131
|
-
if isinstance(glyph_key, Glyph)
|
|
132
|
-
else cand_key
|
|
133
|
-
)
|
|
134
|
-
fb = fallbacks.get(cand_key, canon_fb)
|
|
135
|
-
return _coerce_glyph(fb)
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
# -------------------------
|
|
139
|
-
# THOL closures and ZHIR preconditions
|
|
140
|
-
# -------------------------
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
def get_norm(ctx: GrammarContext, key: str) -> float:
|
|
144
|
-
"""Retrieve a global normalisation value from ``ctx.norms``."""
|
|
145
|
-
return float(ctx.norms.get(key, 1.0)) or 1.0
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
def _norm_attr(ctx: GrammarContext, nd, attr_alias: str, norm_key: str) -> float:
|
|
149
|
-
"""Normalise ``attr_alias`` using the global maximum ``norm_key``."""
|
|
150
|
-
|
|
151
|
-
max_val = get_norm(ctx, norm_key)
|
|
152
|
-
return clamp01(abs(get_attr(nd, attr_alias, 0.0)) / max_val)
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
def _si(nd) -> float:
|
|
156
|
-
return clamp01(get_attr(nd, ALIAS_SI, 0.5))
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
def _accel_norm(ctx: GrammarContext, nd) -> float:
|
|
160
|
-
"""Normalise acceleration using the global maximum."""
|
|
161
|
-
return _norm_attr(ctx, nd, ALIAS_D2EPI, "accel_max")
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
def _check_repeats(ctx: GrammarContext, n, cand: Glyph | str) -> Glyph | str:
|
|
165
|
-
"""Avoid recent repetitions according to ``ctx.cfg_soft``."""
|
|
166
|
-
nd = ctx.G.nodes[n]
|
|
167
|
-
cfg = ctx.cfg_soft
|
|
168
|
-
gwin = int(cfg.get("window", 0))
|
|
169
|
-
avoid = set(cfg.get("avoid_repeats", []))
|
|
170
|
-
fallbacks = cfg.get("fallbacks", {})
|
|
171
|
-
cand_key = cand.value if isinstance(cand, Glyph) else str(cand)
|
|
172
|
-
if gwin > 0 and cand_key in avoid and recent_glyph(nd, cand_key, gwin):
|
|
173
|
-
return _glyph_fallback(cand_key, fallbacks)
|
|
174
|
-
return cand
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
def _maybe_force(
|
|
178
|
-
ctx: GrammarContext,
|
|
179
|
-
n,
|
|
180
|
-
cand: Glyph | str,
|
|
181
|
-
original: Glyph | str,
|
|
182
|
-
accessor: Callable[[GrammarContext, dict[str, Any]], float],
|
|
183
|
-
key: str,
|
|
184
|
-
) -> Glyph | str:
|
|
185
|
-
"""Restore ``original`` if ``accessor`` exceeds ``key`` threshold."""
|
|
186
|
-
if cand == original:
|
|
187
|
-
return cand
|
|
188
|
-
force_th = float(ctx.cfg_soft.get(key, 0.60))
|
|
189
|
-
if accessor(ctx, ctx.G.nodes[n]) >= force_th:
|
|
190
|
-
return original
|
|
191
|
-
return cand
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
def _check_oz_to_zhir(ctx: GrammarContext, n, cand: Glyph | str) -> Glyph | str:
|
|
195
|
-
nd = ctx.G.nodes[n]
|
|
196
|
-
cand_glyph = _coerce_glyph(cand)
|
|
197
|
-
if cand_glyph == Glyph.ZHIR:
|
|
198
|
-
cfg = ctx.cfg_canon
|
|
199
|
-
win = int(cfg.get("zhir_requires_oz_window", 3))
|
|
200
|
-
dn_min = float(cfg.get("zhir_dnfr_min", 0.05))
|
|
201
|
-
dnfr_max = get_norm(ctx, "dnfr_max")
|
|
202
|
-
if (
|
|
203
|
-
not recent_glyph(nd, Glyph.OZ, win)
|
|
204
|
-
and normalize_dnfr(nd, dnfr_max) < dn_min
|
|
205
|
-
):
|
|
206
|
-
return Glyph.OZ
|
|
207
|
-
return cand
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
def _check_thol_closure(
|
|
211
|
-
ctx: GrammarContext, n, cand: Glyph | str, st: dict[str, Any]
|
|
212
|
-
) -> Glyph | str:
|
|
213
|
-
nd = ctx.G.nodes[n]
|
|
214
|
-
if st.get("thol_open", False):
|
|
215
|
-
st["thol_len"] = int(st.get("thol_len", 0)) + 1
|
|
216
|
-
cfg = ctx.cfg_canon
|
|
217
|
-
minlen = int(cfg.get("thol_min_len", 2))
|
|
218
|
-
maxlen = int(cfg.get("thol_max_len", 6))
|
|
219
|
-
close_dn = float(cfg.get("thol_close_dnfr", 0.15))
|
|
220
|
-
dnfr_max = get_norm(ctx, "dnfr_max")
|
|
221
|
-
if st["thol_len"] >= maxlen or (
|
|
222
|
-
st["thol_len"] >= minlen
|
|
223
|
-
and normalize_dnfr(nd, dnfr_max) <= close_dn
|
|
224
|
-
):
|
|
225
|
-
return (
|
|
226
|
-
Glyph.NUL
|
|
227
|
-
if _si(nd) >= float(cfg.get("si_high", 0.66))
|
|
228
|
-
else Glyph.SHA
|
|
229
|
-
)
|
|
230
|
-
return cand
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
def _check_compatibility(ctx: GrammarContext, n, cand: Glyph | str) -> Glyph | str:
|
|
234
|
-
nd = ctx.G.nodes[n]
|
|
235
|
-
hist = nd.get("glyph_history")
|
|
236
|
-
prev = hist[-1] if hist else None
|
|
237
|
-
prev_glyph = _coerce_glyph(prev)
|
|
238
|
-
cand_glyph = _coerce_glyph(cand)
|
|
239
|
-
if isinstance(prev_glyph, Glyph):
|
|
240
|
-
allowed = CANON_COMPAT.get(prev_glyph)
|
|
241
|
-
if allowed is None:
|
|
242
|
-
return cand
|
|
243
|
-
if isinstance(cand_glyph, Glyph):
|
|
244
|
-
if cand_glyph not in allowed:
|
|
245
|
-
return CANON_FALLBACK.get(prev_glyph, cand_glyph)
|
|
246
|
-
else:
|
|
247
|
-
return CANON_FALLBACK.get(prev_glyph, cand)
|
|
248
|
-
return cand
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
# -------------------------
|
|
252
|
-
# Core: enforce grammar on a candidate
|
|
253
|
-
# -------------------------
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
def enforce_canonical_grammar(
|
|
257
|
-
G, n, cand: Glyph | str, ctx: Optional[GrammarContext] = None
|
|
258
|
-
) -> Glyph | str:
|
|
259
|
-
"""Validate and adjust a candidate glyph according to canonical grammar.
|
|
260
|
-
|
|
261
|
-
Key rules:
|
|
262
|
-
- Repeat window with forces based on |ΔNFR| and acceleration.
|
|
263
|
-
- Transition compatibilities (TNFR path).
|
|
264
|
-
- OZ→ZHIR: mutation requires recent dissonance or high |ΔNFR|.
|
|
265
|
-
- THOL[...]: forces closure with SHA or NUL when the field stabilises
|
|
266
|
-
or block length is reached; maintains per-node state.
|
|
267
|
-
|
|
268
|
-
Returns the effective glyph to apply.
|
|
269
|
-
"""
|
|
270
|
-
if ctx is None:
|
|
271
|
-
ctx = GrammarContext.from_graph(G)
|
|
272
|
-
|
|
273
|
-
nd = ctx.G.nodes[n]
|
|
274
|
-
st = _gram_state(nd)
|
|
275
|
-
|
|
276
|
-
raw_cand = cand
|
|
277
|
-
cand = _coerce_glyph(cand)
|
|
278
|
-
input_was_str = isinstance(raw_cand, str)
|
|
279
|
-
|
|
280
|
-
# 0) If glyphs outside the alphabet arrive, leave untouched
|
|
281
|
-
if not isinstance(cand, Glyph) or cand not in CANON_COMPAT:
|
|
282
|
-
return raw_cand if input_was_str else cand
|
|
283
|
-
|
|
284
|
-
original = cand
|
|
285
|
-
cand = _check_repeats(ctx, n, cand)
|
|
286
|
-
|
|
287
|
-
dnfr_accessor = lambda ctx, nd: normalize_dnfr(nd, get_norm(ctx, "dnfr_max"))
|
|
288
|
-
cand = _maybe_force(ctx, n, cand, original, dnfr_accessor, "force_dnfr")
|
|
289
|
-
cand = _maybe_force(ctx, n, cand, original, _accel_norm, "force_accel")
|
|
290
|
-
cand = _check_oz_to_zhir(ctx, n, cand)
|
|
291
|
-
cand = _check_thol_closure(ctx, n, cand, st)
|
|
292
|
-
cand = _check_compatibility(ctx, n, cand)
|
|
293
|
-
|
|
294
|
-
coerced_final = _coerce_glyph(cand)
|
|
295
|
-
if input_was_str:
|
|
296
|
-
if isinstance(coerced_final, Glyph):
|
|
297
|
-
return coerced_final.value
|
|
298
|
-
return str(cand)
|
|
299
|
-
return coerced_final if isinstance(coerced_final, Glyph) else cand
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
# -------------------------
|
|
303
|
-
# Post-selection: update grammar state
|
|
304
|
-
# -------------------------
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
def on_applied_glyph(G, n, applied: str) -> None:
|
|
308
|
-
nd = G.nodes[n]
|
|
309
|
-
st = _gram_state(nd)
|
|
310
|
-
if applied == Glyph.THOL:
|
|
311
|
-
st["thol_open"] = True
|
|
312
|
-
st["thol_len"] = 0
|
|
313
|
-
elif applied in (Glyph.SHA, Glyph.NUL):
|
|
314
|
-
st["thol_open"] = False
|
|
315
|
-
st["thol_len"] = 0
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
# -------------------------
|
|
319
|
-
# Direct application with canonical grammar
|
|
320
|
-
# -------------------------
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
def apply_glyph_with_grammar(
|
|
324
|
-
G,
|
|
325
|
-
nodes: Optional[Iterable[Any]],
|
|
326
|
-
glyph: Glyph | str,
|
|
327
|
-
window: Optional[int] = None,
|
|
328
|
-
) -> None:
|
|
329
|
-
"""Apply ``glyph`` to ``nodes`` enforcing the canonical grammar.
|
|
330
|
-
|
|
331
|
-
``nodes`` may be a ``NodeView`` or any iterable. The iterable is consumed
|
|
332
|
-
directly to avoid unnecessary materialisation; callers must materialise if
|
|
333
|
-
they need indexing.
|
|
334
|
-
"""
|
|
335
|
-
if window is None:
|
|
336
|
-
window = get_param(G, "GLYPH_HYSTERESIS_WINDOW")
|
|
337
|
-
|
|
338
|
-
g_str = glyph.value if isinstance(glyph, Glyph) else str(glyph)
|
|
339
|
-
iter_nodes = G.nodes() if nodes is None else nodes
|
|
340
|
-
ctx = GrammarContext.from_graph(G)
|
|
341
|
-
for n in iter_nodes:
|
|
342
|
-
g_eff = enforce_canonical_grammar(G, n, g_str, ctx)
|
|
343
|
-
apply_glyph(G, n, g_eff, window=window)
|
|
344
|
-
on_applied_glyph(G, n, g_eff)
|