tnfr 3.0.3__py3-none-any.whl → 8.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tnfr might be problematic. Click here for more details.
- tnfr/__init__.py +375 -56
- tnfr/__init__.pyi +33 -0
- tnfr/_compat.py +10 -0
- tnfr/_generated_version.py +34 -0
- tnfr/_version.py +49 -0
- tnfr/_version.pyi +7 -0
- tnfr/alias.py +723 -0
- tnfr/alias.pyi +108 -0
- tnfr/backends/__init__.py +354 -0
- tnfr/backends/jax_backend.py +173 -0
- tnfr/backends/numpy_backend.py +238 -0
- tnfr/backends/optimized_numpy.py +420 -0
- tnfr/backends/torch_backend.py +408 -0
- tnfr/cache.py +171 -0
- tnfr/cache.pyi +13 -0
- tnfr/cli/__init__.py +110 -0
- tnfr/cli/__init__.pyi +26 -0
- tnfr/cli/arguments.py +489 -0
- tnfr/cli/arguments.pyi +29 -0
- tnfr/cli/execution.py +914 -0
- tnfr/cli/execution.pyi +70 -0
- tnfr/cli/interactive_validator.py +614 -0
- tnfr/cli/utils.py +51 -0
- tnfr/cli/utils.pyi +7 -0
- tnfr/cli/validate.py +236 -0
- tnfr/compat/__init__.py +85 -0
- tnfr/compat/dataclass.py +136 -0
- tnfr/compat/jsonschema_stub.py +61 -0
- tnfr/compat/matplotlib_stub.py +73 -0
- tnfr/compat/numpy_stub.py +155 -0
- tnfr/config/__init__.py +224 -0
- tnfr/config/__init__.pyi +10 -0
- tnfr/config/constants.py +104 -0
- tnfr/config/constants.pyi +12 -0
- tnfr/config/defaults.py +54 -0
- tnfr/config/defaults_core.py +212 -0
- tnfr/config/defaults_init.py +33 -0
- tnfr/config/defaults_metric.py +104 -0
- tnfr/config/feature_flags.py +81 -0
- tnfr/config/feature_flags.pyi +16 -0
- tnfr/config/glyph_constants.py +31 -0
- tnfr/config/init.py +77 -0
- tnfr/config/init.pyi +8 -0
- tnfr/config/operator_names.py +254 -0
- tnfr/config/operator_names.pyi +36 -0
- tnfr/config/physics_derivation.py +354 -0
- tnfr/config/presets.py +83 -0
- tnfr/config/presets.pyi +7 -0
- tnfr/config/security.py +927 -0
- tnfr/config/thresholds.py +114 -0
- tnfr/config/tnfr_config.py +498 -0
- tnfr/constants/__init__.py +92 -0
- tnfr/constants/__init__.pyi +92 -0
- tnfr/constants/aliases.py +33 -0
- tnfr/constants/aliases.pyi +27 -0
- tnfr/constants/init.py +33 -0
- tnfr/constants/init.pyi +12 -0
- tnfr/constants/metric.py +104 -0
- tnfr/constants/metric.pyi +19 -0
- tnfr/core/__init__.py +33 -0
- tnfr/core/container.py +226 -0
- tnfr/core/default_implementations.py +329 -0
- tnfr/core/interfaces.py +279 -0
- tnfr/dynamics/__init__.py +238 -0
- tnfr/dynamics/__init__.pyi +83 -0
- tnfr/dynamics/adaptation.py +267 -0
- tnfr/dynamics/adaptation.pyi +7 -0
- tnfr/dynamics/adaptive_sequences.py +189 -0
- tnfr/dynamics/adaptive_sequences.pyi +14 -0
- tnfr/dynamics/aliases.py +23 -0
- tnfr/dynamics/aliases.pyi +19 -0
- tnfr/dynamics/bifurcation.py +232 -0
- tnfr/dynamics/canonical.py +229 -0
- tnfr/dynamics/canonical.pyi +48 -0
- tnfr/dynamics/coordination.py +385 -0
- tnfr/dynamics/coordination.pyi +25 -0
- tnfr/dynamics/dnfr.py +3034 -0
- tnfr/dynamics/dnfr.pyi +26 -0
- tnfr/dynamics/dynamic_limits.py +225 -0
- tnfr/dynamics/feedback.py +252 -0
- tnfr/dynamics/feedback.pyi +24 -0
- tnfr/dynamics/fused_dnfr.py +454 -0
- tnfr/dynamics/homeostasis.py +157 -0
- tnfr/dynamics/homeostasis.pyi +14 -0
- tnfr/dynamics/integrators.py +661 -0
- tnfr/dynamics/integrators.pyi +36 -0
- tnfr/dynamics/learning.py +310 -0
- tnfr/dynamics/learning.pyi +33 -0
- tnfr/dynamics/metabolism.py +254 -0
- tnfr/dynamics/nbody.py +796 -0
- tnfr/dynamics/nbody_tnfr.py +783 -0
- tnfr/dynamics/propagation.py +326 -0
- tnfr/dynamics/runtime.py +908 -0
- tnfr/dynamics/runtime.pyi +77 -0
- tnfr/dynamics/sampling.py +36 -0
- tnfr/dynamics/sampling.pyi +7 -0
- tnfr/dynamics/selectors.py +711 -0
- tnfr/dynamics/selectors.pyi +85 -0
- tnfr/dynamics/structural_clip.py +207 -0
- tnfr/errors/__init__.py +37 -0
- tnfr/errors/contextual.py +492 -0
- tnfr/execution.py +223 -0
- tnfr/execution.pyi +45 -0
- tnfr/extensions/__init__.py +205 -0
- tnfr/extensions/__init__.pyi +18 -0
- tnfr/extensions/base.py +173 -0
- tnfr/extensions/base.pyi +35 -0
- tnfr/extensions/business/__init__.py +71 -0
- tnfr/extensions/business/__init__.pyi +11 -0
- tnfr/extensions/business/cookbook.py +88 -0
- tnfr/extensions/business/cookbook.pyi +8 -0
- tnfr/extensions/business/health_analyzers.py +202 -0
- tnfr/extensions/business/health_analyzers.pyi +9 -0
- tnfr/extensions/business/patterns.py +183 -0
- tnfr/extensions/business/patterns.pyi +8 -0
- tnfr/extensions/medical/__init__.py +73 -0
- tnfr/extensions/medical/__init__.pyi +11 -0
- tnfr/extensions/medical/cookbook.py +88 -0
- tnfr/extensions/medical/cookbook.pyi +8 -0
- tnfr/extensions/medical/health_analyzers.py +181 -0
- tnfr/extensions/medical/health_analyzers.pyi +9 -0
- tnfr/extensions/medical/patterns.py +163 -0
- tnfr/extensions/medical/patterns.pyi +8 -0
- tnfr/flatten.py +262 -0
- tnfr/flatten.pyi +21 -0
- tnfr/gamma.py +354 -0
- tnfr/gamma.pyi +36 -0
- tnfr/glyph_history.py +377 -0
- tnfr/glyph_history.pyi +35 -0
- tnfr/glyph_runtime.py +19 -0
- tnfr/glyph_runtime.pyi +8 -0
- tnfr/immutable.py +218 -0
- tnfr/immutable.pyi +36 -0
- tnfr/initialization.py +203 -0
- tnfr/initialization.pyi +65 -0
- tnfr/io.py +10 -0
- tnfr/io.pyi +13 -0
- tnfr/locking.py +37 -0
- tnfr/locking.pyi +7 -0
- tnfr/mathematics/__init__.py +79 -0
- tnfr/mathematics/backend.py +453 -0
- tnfr/mathematics/backend.pyi +99 -0
- tnfr/mathematics/dynamics.py +408 -0
- tnfr/mathematics/dynamics.pyi +90 -0
- tnfr/mathematics/epi.py +391 -0
- tnfr/mathematics/epi.pyi +65 -0
- tnfr/mathematics/generators.py +242 -0
- tnfr/mathematics/generators.pyi +29 -0
- tnfr/mathematics/metrics.py +119 -0
- tnfr/mathematics/metrics.pyi +16 -0
- tnfr/mathematics/operators.py +239 -0
- tnfr/mathematics/operators.pyi +59 -0
- tnfr/mathematics/operators_factory.py +124 -0
- tnfr/mathematics/operators_factory.pyi +11 -0
- tnfr/mathematics/projection.py +87 -0
- tnfr/mathematics/projection.pyi +33 -0
- tnfr/mathematics/runtime.py +182 -0
- tnfr/mathematics/runtime.pyi +64 -0
- tnfr/mathematics/spaces.py +256 -0
- tnfr/mathematics/spaces.pyi +83 -0
- tnfr/mathematics/transforms.py +305 -0
- tnfr/mathematics/transforms.pyi +62 -0
- tnfr/metrics/__init__.py +79 -0
- tnfr/metrics/__init__.pyi +20 -0
- tnfr/metrics/buffer_cache.py +163 -0
- tnfr/metrics/buffer_cache.pyi +24 -0
- tnfr/metrics/cache_utils.py +214 -0
- tnfr/metrics/coherence.py +2009 -0
- tnfr/metrics/coherence.pyi +129 -0
- tnfr/metrics/common.py +158 -0
- tnfr/metrics/common.pyi +35 -0
- tnfr/metrics/core.py +316 -0
- tnfr/metrics/core.pyi +13 -0
- tnfr/metrics/diagnosis.py +833 -0
- tnfr/metrics/diagnosis.pyi +86 -0
- tnfr/metrics/emergence.py +245 -0
- tnfr/metrics/export.py +179 -0
- tnfr/metrics/export.pyi +7 -0
- tnfr/metrics/glyph_timing.py +379 -0
- tnfr/metrics/glyph_timing.pyi +81 -0
- tnfr/metrics/learning_metrics.py +280 -0
- tnfr/metrics/learning_metrics.pyi +21 -0
- tnfr/metrics/phase_coherence.py +351 -0
- tnfr/metrics/phase_compatibility.py +349 -0
- tnfr/metrics/reporting.py +183 -0
- tnfr/metrics/reporting.pyi +25 -0
- tnfr/metrics/sense_index.py +1203 -0
- tnfr/metrics/sense_index.pyi +9 -0
- tnfr/metrics/trig.py +373 -0
- tnfr/metrics/trig.pyi +13 -0
- tnfr/metrics/trig_cache.py +233 -0
- tnfr/metrics/trig_cache.pyi +10 -0
- tnfr/multiscale/__init__.py +32 -0
- tnfr/multiscale/hierarchical.py +517 -0
- tnfr/node.py +763 -0
- tnfr/node.pyi +139 -0
- tnfr/observers.py +255 -130
- tnfr/observers.pyi +31 -0
- tnfr/ontosim.py +144 -137
- tnfr/ontosim.pyi +28 -0
- tnfr/operators/__init__.py +1672 -0
- tnfr/operators/__init__.pyi +31 -0
- tnfr/operators/algebra.py +277 -0
- tnfr/operators/canonical_patterns.py +420 -0
- tnfr/operators/cascade.py +267 -0
- tnfr/operators/cycle_detection.py +358 -0
- tnfr/operators/definitions.py +4108 -0
- tnfr/operators/definitions.pyi +78 -0
- tnfr/operators/grammar.py +1164 -0
- tnfr/operators/grammar.pyi +140 -0
- tnfr/operators/hamiltonian.py +710 -0
- tnfr/operators/health_analyzer.py +809 -0
- tnfr/operators/jitter.py +272 -0
- tnfr/operators/jitter.pyi +11 -0
- tnfr/operators/lifecycle.py +314 -0
- tnfr/operators/metabolism.py +618 -0
- tnfr/operators/metrics.py +2138 -0
- tnfr/operators/network_analysis/__init__.py +27 -0
- tnfr/operators/network_analysis/source_detection.py +186 -0
- tnfr/operators/nodal_equation.py +395 -0
- tnfr/operators/pattern_detection.py +660 -0
- tnfr/operators/patterns.py +669 -0
- tnfr/operators/postconditions/__init__.py +38 -0
- tnfr/operators/postconditions/mutation.py +236 -0
- tnfr/operators/preconditions/__init__.py +1226 -0
- tnfr/operators/preconditions/coherence.py +305 -0
- tnfr/operators/preconditions/dissonance.py +236 -0
- tnfr/operators/preconditions/emission.py +128 -0
- tnfr/operators/preconditions/mutation.py +580 -0
- tnfr/operators/preconditions/reception.py +125 -0
- tnfr/operators/preconditions/resonance.py +364 -0
- tnfr/operators/registry.py +74 -0
- tnfr/operators/registry.pyi +9 -0
- tnfr/operators/remesh.py +1809 -0
- tnfr/operators/remesh.pyi +26 -0
- tnfr/operators/structural_units.py +268 -0
- tnfr/operators/unified_grammar.py +105 -0
- tnfr/parallel/__init__.py +54 -0
- tnfr/parallel/auto_scaler.py +234 -0
- tnfr/parallel/distributed.py +384 -0
- tnfr/parallel/engine.py +238 -0
- tnfr/parallel/gpu_engine.py +420 -0
- tnfr/parallel/monitoring.py +248 -0
- tnfr/parallel/partitioner.py +459 -0
- tnfr/py.typed +0 -0
- tnfr/recipes/__init__.py +22 -0
- tnfr/recipes/cookbook.py +743 -0
- tnfr/rng.py +178 -0
- tnfr/rng.pyi +26 -0
- tnfr/schemas/__init__.py +8 -0
- tnfr/schemas/grammar.json +94 -0
- tnfr/sdk/__init__.py +107 -0
- tnfr/sdk/__init__.pyi +19 -0
- tnfr/sdk/adaptive_system.py +173 -0
- tnfr/sdk/adaptive_system.pyi +21 -0
- tnfr/sdk/builders.py +370 -0
- tnfr/sdk/builders.pyi +51 -0
- tnfr/sdk/fluent.py +1121 -0
- tnfr/sdk/fluent.pyi +74 -0
- tnfr/sdk/templates.py +342 -0
- tnfr/sdk/templates.pyi +41 -0
- tnfr/sdk/utils.py +341 -0
- tnfr/secure_config.py +46 -0
- tnfr/security/__init__.py +70 -0
- tnfr/security/database.py +514 -0
- tnfr/security/subprocess.py +503 -0
- tnfr/security/validation.py +290 -0
- tnfr/selector.py +247 -0
- tnfr/selector.pyi +19 -0
- tnfr/sense.py +378 -0
- tnfr/sense.pyi +23 -0
- tnfr/services/__init__.py +17 -0
- tnfr/services/orchestrator.py +325 -0
- tnfr/sparse/__init__.py +39 -0
- tnfr/sparse/representations.py +492 -0
- tnfr/structural.py +705 -0
- tnfr/structural.pyi +83 -0
- tnfr/telemetry/__init__.py +35 -0
- tnfr/telemetry/cache_metrics.py +226 -0
- tnfr/telemetry/cache_metrics.pyi +64 -0
- tnfr/telemetry/nu_f.py +422 -0
- tnfr/telemetry/nu_f.pyi +108 -0
- tnfr/telemetry/verbosity.py +36 -0
- tnfr/telemetry/verbosity.pyi +15 -0
- tnfr/tokens.py +58 -0
- tnfr/tokens.pyi +36 -0
- tnfr/tools/__init__.py +20 -0
- tnfr/tools/domain_templates.py +478 -0
- tnfr/tools/sequence_generator.py +846 -0
- tnfr/topology/__init__.py +13 -0
- tnfr/topology/asymmetry.py +151 -0
- tnfr/trace.py +543 -0
- tnfr/trace.pyi +42 -0
- tnfr/tutorials/__init__.py +38 -0
- tnfr/tutorials/autonomous_evolution.py +285 -0
- tnfr/tutorials/interactive.py +1576 -0
- tnfr/tutorials/structural_metabolism.py +238 -0
- tnfr/types.py +775 -0
- tnfr/types.pyi +357 -0
- tnfr/units.py +68 -0
- tnfr/units.pyi +13 -0
- tnfr/utils/__init__.py +282 -0
- tnfr/utils/__init__.pyi +215 -0
- tnfr/utils/cache.py +4223 -0
- tnfr/utils/cache.pyi +470 -0
- tnfr/utils/callbacks.py +375 -0
- tnfr/utils/callbacks.pyi +49 -0
- tnfr/utils/chunks.py +108 -0
- tnfr/utils/chunks.pyi +22 -0
- tnfr/utils/data.py +428 -0
- tnfr/utils/data.pyi +74 -0
- tnfr/utils/graph.py +85 -0
- tnfr/utils/graph.pyi +10 -0
- tnfr/utils/init.py +821 -0
- tnfr/utils/init.pyi +80 -0
- tnfr/utils/io.py +559 -0
- tnfr/utils/io.pyi +66 -0
- tnfr/utils/numeric.py +114 -0
- tnfr/utils/numeric.pyi +21 -0
- tnfr/validation/__init__.py +257 -0
- tnfr/validation/__init__.pyi +85 -0
- tnfr/validation/compatibility.py +460 -0
- tnfr/validation/compatibility.pyi +6 -0
- tnfr/validation/config.py +73 -0
- tnfr/validation/graph.py +139 -0
- tnfr/validation/graph.pyi +18 -0
- tnfr/validation/input_validation.py +755 -0
- tnfr/validation/invariants.py +712 -0
- tnfr/validation/rules.py +253 -0
- tnfr/validation/rules.pyi +44 -0
- tnfr/validation/runtime.py +279 -0
- tnfr/validation/runtime.pyi +28 -0
- tnfr/validation/sequence_validator.py +162 -0
- tnfr/validation/soft_filters.py +170 -0
- tnfr/validation/soft_filters.pyi +32 -0
- tnfr/validation/spectral.py +164 -0
- tnfr/validation/spectral.pyi +42 -0
- tnfr/validation/validator.py +1266 -0
- tnfr/validation/window.py +39 -0
- tnfr/validation/window.pyi +1 -0
- tnfr/visualization/__init__.py +98 -0
- tnfr/visualization/cascade_viz.py +256 -0
- tnfr/visualization/hierarchy.py +284 -0
- tnfr/visualization/sequence_plotter.py +784 -0
- tnfr/viz/__init__.py +60 -0
- tnfr/viz/matplotlib.py +278 -0
- tnfr/viz/matplotlib.pyi +35 -0
- tnfr-8.5.0.dist-info/METADATA +573 -0
- tnfr-8.5.0.dist-info/RECORD +353 -0
- tnfr-8.5.0.dist-info/entry_points.txt +3 -0
- tnfr-3.0.3.dist-info/licenses/LICENSE.txt → tnfr-8.5.0.dist-info/licenses/LICENSE.md +1 -1
- tnfr/constants.py +0 -183
- tnfr/dynamics.py +0 -543
- tnfr/helpers.py +0 -198
- tnfr/main.py +0 -37
- tnfr/operators.py +0 -296
- tnfr-3.0.3.dist-info/METADATA +0 -35
- tnfr-3.0.3.dist-info/RECORD +0 -13
- {tnfr-3.0.3.dist-info → tnfr-8.5.0.dist-info}/WHEEL +0 -0
- {tnfr-3.0.3.dist-info → tnfr-8.5.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
"""Semantic validation for TNFR operator sequences.
|
|
2
|
+
|
|
3
|
+
This module validates the semantic correctness of operator sequences beyond
|
|
4
|
+
syntactic grammar rules. It checks for patterns that may indicate structural
|
|
5
|
+
instability or non-canonical reorganization.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
from .invariants import InvariantSeverity, InvariantViolation
|
|
11
|
+
|
|
12
|
+
__all__ = [
|
|
13
|
+
"SequenceSemanticValidator",
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class SequenceSemanticValidator:
|
|
18
|
+
"""Validates semantics of operator sequences."""
|
|
19
|
+
|
|
20
|
+
def __init__(self) -> None:
|
|
21
|
+
"""Initialize semantic validator with default rules."""
|
|
22
|
+
# Semantic rules between operators
|
|
23
|
+
self.semantic_rules: dict[str, dict] = {
|
|
24
|
+
"mutation_without_stabilization": {
|
|
25
|
+
"pattern": ["mutation"],
|
|
26
|
+
"not_followed_by": ["coherence", "silence"],
|
|
27
|
+
"severity": InvariantSeverity.WARNING,
|
|
28
|
+
"message": "Mutation should be followed by stabilization (coherence or silence)",
|
|
29
|
+
},
|
|
30
|
+
"excessive_dissonance": {
|
|
31
|
+
"pattern": ["dissonance", "dissonance", "dissonance"],
|
|
32
|
+
"severity": InvariantSeverity.ERROR,
|
|
33
|
+
"message": "Excessive consecutive dissonance may destabilize structure",
|
|
34
|
+
},
|
|
35
|
+
"contraction_after_silence": {
|
|
36
|
+
"pattern": ["silence", "contraction"],
|
|
37
|
+
"severity": InvariantSeverity.WARNING,
|
|
38
|
+
"message": "Contraction immediately after silence may be redundant",
|
|
39
|
+
},
|
|
40
|
+
"excessive_recursivity": {
|
|
41
|
+
"pattern": ["recursivity", "recursivity", "recursivity"],
|
|
42
|
+
"severity": InvariantSeverity.WARNING,
|
|
43
|
+
"message": "Excessive consecutive recursivity may indicate inefficient sequence design",
|
|
44
|
+
},
|
|
45
|
+
"silence_after_mutation": {
|
|
46
|
+
"pattern": ["mutation", "silence"],
|
|
47
|
+
"severity": InvariantSeverity.INFO,
|
|
48
|
+
"message": "Silence after mutation preserves the new phase state (valid pattern)",
|
|
49
|
+
},
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
def validate_semantic_sequence(
|
|
53
|
+
self, sequence: list[str]
|
|
54
|
+
) -> list[InvariantViolation]:
|
|
55
|
+
"""Validates semantics of the operator sequence.
|
|
56
|
+
|
|
57
|
+
Parameters
|
|
58
|
+
----------
|
|
59
|
+
sequence : list[str]
|
|
60
|
+
List of operator names in sequence.
|
|
61
|
+
|
|
62
|
+
Returns
|
|
63
|
+
-------
|
|
64
|
+
list[InvariantViolation]
|
|
65
|
+
List of semantic violations found.
|
|
66
|
+
"""
|
|
67
|
+
violations: list[InvariantViolation] = []
|
|
68
|
+
|
|
69
|
+
for rule_name, rule in self.semantic_rules.items():
|
|
70
|
+
violations.extend(self._check_rule(sequence, rule_name, rule))
|
|
71
|
+
|
|
72
|
+
return violations
|
|
73
|
+
|
|
74
|
+
def _check_rule(
|
|
75
|
+
self, sequence: list[str], rule_name: str, rule: dict
|
|
76
|
+
) -> list[InvariantViolation]:
|
|
77
|
+
"""Verifies a specific semantic rule.
|
|
78
|
+
|
|
79
|
+
Parameters
|
|
80
|
+
----------
|
|
81
|
+
sequence : list[str]
|
|
82
|
+
Operator sequence to check.
|
|
83
|
+
rule_name : str
|
|
84
|
+
Name of the rule being checked.
|
|
85
|
+
rule : dict
|
|
86
|
+
Rule definition with pattern and constraints.
|
|
87
|
+
|
|
88
|
+
Returns
|
|
89
|
+
-------
|
|
90
|
+
list[InvariantViolation]
|
|
91
|
+
Violations found for this rule.
|
|
92
|
+
"""
|
|
93
|
+
violations: list[InvariantViolation] = []
|
|
94
|
+
pattern = rule["pattern"]
|
|
95
|
+
|
|
96
|
+
# Search for patterns in the sequence
|
|
97
|
+
for i in range(len(sequence) - len(pattern) + 1):
|
|
98
|
+
if sequence[i : i + len(pattern)] == pattern:
|
|
99
|
+
|
|
100
|
+
# If there are no additional constraints, the pattern itself is a violation
|
|
101
|
+
if "not_followed_by" not in rule and "not_preceded_by" not in rule:
|
|
102
|
+
violations.append(
|
|
103
|
+
InvariantViolation(
|
|
104
|
+
invariant_id=0,
|
|
105
|
+
severity=rule["severity"],
|
|
106
|
+
description=f"Semantic rule violation: {rule['message']}",
|
|
107
|
+
suggestion=rule.get(
|
|
108
|
+
"suggestion", "Review operator sequence"
|
|
109
|
+
),
|
|
110
|
+
)
|
|
111
|
+
)
|
|
112
|
+
continue
|
|
113
|
+
|
|
114
|
+
# Verificar reglas 'not_followed_by'
|
|
115
|
+
if "not_followed_by" in rule:
|
|
116
|
+
next_pos = i + len(pattern)
|
|
117
|
+
if next_pos < len(sequence):
|
|
118
|
+
next_op = sequence[next_pos]
|
|
119
|
+
if next_op not in rule["not_followed_by"]:
|
|
120
|
+
violations.append(
|
|
121
|
+
InvariantViolation(
|
|
122
|
+
invariant_id=0, # Semántica, no invariante numérico
|
|
123
|
+
severity=rule["severity"],
|
|
124
|
+
description=f"Semantic rule violation: {rule['message']}",
|
|
125
|
+
suggestion=f"Consider adding one of: {rule['not_followed_by']}",
|
|
126
|
+
)
|
|
127
|
+
)
|
|
128
|
+
# Si es el último operador y la regla requiere algo después
|
|
129
|
+
elif next_pos == len(sequence):
|
|
130
|
+
violations.append(
|
|
131
|
+
InvariantViolation(
|
|
132
|
+
invariant_id=0,
|
|
133
|
+
severity=rule["severity"],
|
|
134
|
+
description=f"Semantic rule violation: {rule['message']}",
|
|
135
|
+
suggestion=f"Sequence should end with one of: {rule['not_followed_by']}",
|
|
136
|
+
)
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
# Verificar reglas 'not_preceded_by'
|
|
140
|
+
if "not_preceded_by" in rule:
|
|
141
|
+
if i == 0: # No hay operador anterior
|
|
142
|
+
violations.append(
|
|
143
|
+
InvariantViolation(
|
|
144
|
+
invariant_id=0,
|
|
145
|
+
severity=rule["severity"],
|
|
146
|
+
description=f"Semantic rule violation: {rule['message']}",
|
|
147
|
+
suggestion=f"Sequence should start with one of: {rule['not_preceded_by']}",
|
|
148
|
+
)
|
|
149
|
+
)
|
|
150
|
+
else:
|
|
151
|
+
prev_op = sequence[i - 1]
|
|
152
|
+
if prev_op not in rule["not_preceded_by"]:
|
|
153
|
+
violations.append(
|
|
154
|
+
InvariantViolation(
|
|
155
|
+
invariant_id=0,
|
|
156
|
+
severity=rule["severity"],
|
|
157
|
+
description=f"Semantic rule violation: {rule['message']}",
|
|
158
|
+
suggestion=f"'{pattern[0]}' should be preceded by one of: {rule['not_preceded_by']}",
|
|
159
|
+
)
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
return violations
|
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
"""Soft grammar filters harmonising canonical selector heuristics."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import TYPE_CHECKING, Any, Callable
|
|
6
|
+
|
|
7
|
+
from ..alias import get_attr
|
|
8
|
+
from ..constants.aliases import ALIAS_D2EPI
|
|
9
|
+
from ..glyph_history import recent_glyph
|
|
10
|
+
from ..types import Glyph
|
|
11
|
+
from ..utils import clamp01
|
|
12
|
+
from .rules import glyph_fallback, get_norm, normalized_dnfr
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING: # pragma: no cover - import cycle guard
|
|
15
|
+
from collections.abc import Mapping
|
|
16
|
+
from ..operators.grammar import GrammarContext
|
|
17
|
+
|
|
18
|
+
__all__ = (
|
|
19
|
+
"acceleration_norm",
|
|
20
|
+
"check_repeats",
|
|
21
|
+
"maybe_force",
|
|
22
|
+
"soft_grammar_filters",
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def acceleration_norm(ctx: "GrammarContext", nd: "Mapping[str, Any]") -> float:
|
|
27
|
+
"""Return the node acceleration normalised to ``[0, 1]``.
|
|
28
|
+
|
|
29
|
+
The computation uses the canonical ``accel_max`` bound stored in
|
|
30
|
+
:class:`~tnfr.validation.GrammarContext`. Values beyond the bound are
|
|
31
|
+
clamped to preserve structural comparability with ΔNFR-based heuristics.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
max_val = get_norm(ctx, "accel_max")
|
|
35
|
+
return clamp01(abs(get_attr(nd, ALIAS_D2EPI, 0.0)) / max_val)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def check_repeats(ctx: "GrammarContext", n: Any, cand: Glyph | str) -> Glyph | str:
|
|
39
|
+
"""Swap ``cand`` when it breaches the configured repetition window.
|
|
40
|
+
|
|
41
|
+
The rule honours the soft grammar configuration stored in ``ctx.cfg_soft``:
|
|
42
|
+
|
|
43
|
+
* ``window`` specifies the history length inspected using
|
|
44
|
+
:func:`tnfr.glyph_history.recent_glyph`.
|
|
45
|
+
* ``avoid_repeats`` enumerates glyph codes to dodge within that window.
|
|
46
|
+
* ``fallbacks`` optionally map avoided glyph codes to explicit
|
|
47
|
+
replacements, defaulting to canonical fallbacks when unspecified.
|
|
48
|
+
"""
|
|
49
|
+
|
|
50
|
+
nd = ctx.G.nodes[n]
|
|
51
|
+
cfg = ctx.cfg_soft
|
|
52
|
+
gwin = int(cfg.get("window", 0))
|
|
53
|
+
avoid = set(cfg.get("avoid_repeats", []))
|
|
54
|
+
fallbacks = cfg.get("fallbacks", {})
|
|
55
|
+
cand_key = cand.value if isinstance(cand, Glyph) else str(cand)
|
|
56
|
+
if gwin > 0 and cand_key in avoid and recent_glyph(nd, cand_key, gwin):
|
|
57
|
+
fallback = glyph_fallback(cand_key, fallbacks)
|
|
58
|
+
fallback_key = fallback.value if isinstance(fallback, Glyph) else str(fallback)
|
|
59
|
+
if fallback_key != cand_key:
|
|
60
|
+
return fallback
|
|
61
|
+
history: list[str] = []
|
|
62
|
+
for item in nd.get("glyph_history", ()):
|
|
63
|
+
if isinstance(item, Glyph):
|
|
64
|
+
history.append(item.value)
|
|
65
|
+
else:
|
|
66
|
+
try:
|
|
67
|
+
history.append(Glyph(str(item)).value)
|
|
68
|
+
except (TypeError, ValueError):
|
|
69
|
+
history.append(str(item))
|
|
70
|
+
order = (*history[-gwin:], cand_key)
|
|
71
|
+
from ..operators import grammar as _grammar
|
|
72
|
+
|
|
73
|
+
def to_structural(value: Glyph | str) -> str:
|
|
74
|
+
default = value.value if isinstance(value, Glyph) else str(value)
|
|
75
|
+
result = _grammar.glyph_function_name(value, default=default)
|
|
76
|
+
return default if result is None else result
|
|
77
|
+
|
|
78
|
+
cand_name = to_structural(cand_key)
|
|
79
|
+
fallback_name = to_structural(fallback_key)
|
|
80
|
+
order_names = tuple(to_structural(item) for item in order)
|
|
81
|
+
|
|
82
|
+
raise _grammar.RepeatWindowError(
|
|
83
|
+
rule="repeat-window",
|
|
84
|
+
candidate=cand_name,
|
|
85
|
+
message=f"{cand_name} repeats within window {gwin}",
|
|
86
|
+
window=gwin,
|
|
87
|
+
order=order_names,
|
|
88
|
+
context={"fallback": fallback_name},
|
|
89
|
+
)
|
|
90
|
+
return cand
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def maybe_force(
|
|
94
|
+
ctx: "GrammarContext",
|
|
95
|
+
n: Any,
|
|
96
|
+
cand: Glyph | str,
|
|
97
|
+
original: Glyph | str,
|
|
98
|
+
accessor: Callable[["GrammarContext", "Mapping[str, Any]"], float],
|
|
99
|
+
key: str,
|
|
100
|
+
) -> Glyph | str:
|
|
101
|
+
"""Return ``original`` when ``accessor`` crosses the soft threshold ``key``.
|
|
102
|
+
|
|
103
|
+
``accessor`` receives the grammar context and node attributes. Whenever the
|
|
104
|
+
resulting score is greater than or equal to the configured threshold the
|
|
105
|
+
original candidate is restored, ensuring soft filters never override
|
|
106
|
+
high-confidence canonical choices.
|
|
107
|
+
"""
|
|
108
|
+
|
|
109
|
+
if cand == original:
|
|
110
|
+
return cand
|
|
111
|
+
force_th = float(ctx.cfg_soft.get(key, 0.60))
|
|
112
|
+
if accessor(ctx, ctx.G.nodes[n]) >= force_th:
|
|
113
|
+
return original
|
|
114
|
+
return cand
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def _match_template(result: Glyph | str, template: Glyph | str) -> Glyph | str:
|
|
118
|
+
"""Coerce ``result`` to mirror ``template``'s representation when possible."""
|
|
119
|
+
|
|
120
|
+
if isinstance(template, str) and isinstance(result, Glyph):
|
|
121
|
+
return result.value
|
|
122
|
+
if isinstance(template, Glyph) and isinstance(result, str):
|
|
123
|
+
try:
|
|
124
|
+
return Glyph(result)
|
|
125
|
+
except (TypeError, ValueError):
|
|
126
|
+
return result
|
|
127
|
+
return result
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def soft_grammar_filters(
|
|
131
|
+
ctx: "GrammarContext",
|
|
132
|
+
n: Any,
|
|
133
|
+
cand: Glyph | str,
|
|
134
|
+
*,
|
|
135
|
+
original: Glyph | str | None = None,
|
|
136
|
+
template: Glyph | str | None = None,
|
|
137
|
+
) -> Glyph | str:
|
|
138
|
+
"""Apply the canonical soft grammar pipeline for ``cand``.
|
|
139
|
+
|
|
140
|
+
The pipeline performs three ordered checks:
|
|
141
|
+
|
|
142
|
+
1. :func:`check_repeats` swaps recent repetitions for the configured
|
|
143
|
+
fallback glyphs.
|
|
144
|
+
2. :func:`maybe_force` with :func:`tnfr.validation.rules.normalized_dnfr`
|
|
145
|
+
restores the original glyph when ΔNFR already exceeds ``force_dnfr``.
|
|
146
|
+
3. :func:`maybe_force` with :func:`acceleration_norm` performs the same
|
|
147
|
+
safeguard using the ``force_accel`` threshold.
|
|
148
|
+
|
|
149
|
+
Parameters
|
|
150
|
+
----------
|
|
151
|
+
ctx:
|
|
152
|
+
Active grammar context providing node access and configuration.
|
|
153
|
+
n:
|
|
154
|
+
Node identifier inside ``ctx.G``.
|
|
155
|
+
cand:
|
|
156
|
+
Candidate glyph (``Glyph`` or code string) to validate softly.
|
|
157
|
+
original:
|
|
158
|
+
Optional glyph used as the reference for :func:`maybe_force`. When
|
|
159
|
+
omitted, ``cand`` before filtering is used as the anchor value.
|
|
160
|
+
template:
|
|
161
|
+
Optional value whose type will be preserved in the returned result. By
|
|
162
|
+
default the original ``cand`` representation is used.
|
|
163
|
+
"""
|
|
164
|
+
|
|
165
|
+
anchor = cand if original is None else original
|
|
166
|
+
filtered = check_repeats(ctx, n, cand)
|
|
167
|
+
filtered = maybe_force(ctx, n, filtered, anchor, normalized_dnfr, "force_dnfr")
|
|
168
|
+
filtered = maybe_force(ctx, n, filtered, anchor, acceleration_norm, "force_accel")
|
|
169
|
+
base_template = cand if template is None else template
|
|
170
|
+
return _match_template(filtered, base_template)
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any, Callable, Mapping
|
|
4
|
+
|
|
5
|
+
from ..types import Glyph
|
|
6
|
+
from .grammar import GrammarContext
|
|
7
|
+
|
|
8
|
+
__all__ = (
|
|
9
|
+
"acceleration_norm",
|
|
10
|
+
"check_repeats",
|
|
11
|
+
"maybe_force",
|
|
12
|
+
"soft_grammar_filters",
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
def acceleration_norm(ctx: GrammarContext, nd: Mapping[str, Any]) -> float: ...
|
|
16
|
+
def check_repeats(ctx: GrammarContext, n: Any, cand: Glyph | str) -> Glyph | str: ...
|
|
17
|
+
def maybe_force(
|
|
18
|
+
ctx: GrammarContext,
|
|
19
|
+
n: Any,
|
|
20
|
+
cand: Glyph | str,
|
|
21
|
+
original: Glyph | str,
|
|
22
|
+
accessor: Callable[[GrammarContext, Mapping[str, Any]], float],
|
|
23
|
+
key: str,
|
|
24
|
+
) -> Glyph | str: ...
|
|
25
|
+
def soft_grammar_filters(
|
|
26
|
+
ctx: GrammarContext,
|
|
27
|
+
n: Any,
|
|
28
|
+
cand: Glyph | str,
|
|
29
|
+
*,
|
|
30
|
+
original: Glyph | str | None = ...,
|
|
31
|
+
template: Glyph | str | None = ...,
|
|
32
|
+
) -> Glyph | str: ...
|
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
"""Spectral validation helpers aligned with the TNFR canonical interface."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from ..compat.dataclass import dataclass
|
|
6
|
+
from typing import Any, Mapping, Sequence
|
|
7
|
+
|
|
8
|
+
import numpy as np
|
|
9
|
+
|
|
10
|
+
from ..mathematics.operators import CoherenceOperator, FrequencyOperator
|
|
11
|
+
from ..mathematics.spaces import HilbertSpace
|
|
12
|
+
from ..mathematics.runtime import (
|
|
13
|
+
coherence as runtime_coherence,
|
|
14
|
+
frequency_positive as runtime_frequency_positive,
|
|
15
|
+
normalized as runtime_normalized,
|
|
16
|
+
stable_unitary as runtime_stable_unitary,
|
|
17
|
+
)
|
|
18
|
+
from . import ValidationOutcome, Validator
|
|
19
|
+
|
|
20
|
+
__all__ = ("NFRValidator",)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass(slots=True)
|
|
24
|
+
class NFRValidator(Validator[np.ndarray]):
|
|
25
|
+
"""Validate spectral states against TNFR canonical invariants."""
|
|
26
|
+
|
|
27
|
+
hilbert_space: HilbertSpace
|
|
28
|
+
coherence_operator: CoherenceOperator
|
|
29
|
+
coherence_threshold: float
|
|
30
|
+
frequency_operator: FrequencyOperator | None = None
|
|
31
|
+
atol: float = 1e-9
|
|
32
|
+
|
|
33
|
+
def _compute_summary(
|
|
34
|
+
self,
|
|
35
|
+
state: Sequence[complex] | np.ndarray,
|
|
36
|
+
*,
|
|
37
|
+
enforce_frequency_positivity: bool | None = None,
|
|
38
|
+
) -> tuple[bool, dict[str, Any], np.ndarray]:
|
|
39
|
+
vector = self.hilbert_space.project(state)
|
|
40
|
+
|
|
41
|
+
normalized_passed, norm_value = runtime_normalized(
|
|
42
|
+
vector, self.hilbert_space, atol=self.atol
|
|
43
|
+
)
|
|
44
|
+
if np.isclose(norm_value, 0.0, atol=self.atol):
|
|
45
|
+
raise ValueError("Cannot normalise a null state vector.")
|
|
46
|
+
normalised_vector = vector / norm_value
|
|
47
|
+
|
|
48
|
+
coherence_passed, coherence_value = runtime_coherence(
|
|
49
|
+
normalised_vector,
|
|
50
|
+
self.coherence_operator,
|
|
51
|
+
self.coherence_threshold,
|
|
52
|
+
normalise=False,
|
|
53
|
+
atol=self.atol,
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
frequency_summary: dict[str, Any] | None = None
|
|
57
|
+
freq_ok = True
|
|
58
|
+
if self.frequency_operator is not None:
|
|
59
|
+
if enforce_frequency_positivity is None:
|
|
60
|
+
enforce_frequency_positivity = True
|
|
61
|
+
|
|
62
|
+
runtime_summary = runtime_frequency_positive(
|
|
63
|
+
normalised_vector,
|
|
64
|
+
self.frequency_operator,
|
|
65
|
+
normalise=False,
|
|
66
|
+
enforce=enforce_frequency_positivity,
|
|
67
|
+
atol=self.atol,
|
|
68
|
+
)
|
|
69
|
+
freq_ok = bool(runtime_summary["passed"])
|
|
70
|
+
frequency_summary = {
|
|
71
|
+
**runtime_summary,
|
|
72
|
+
"enforced": runtime_summary["enforce"],
|
|
73
|
+
}
|
|
74
|
+
frequency_summary.pop("enforce", None)
|
|
75
|
+
elif enforce_frequency_positivity:
|
|
76
|
+
raise ValueError(
|
|
77
|
+
"Frequency positivity enforcement requested without operator."
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
unitary_passed, unitary_norm = runtime_stable_unitary(
|
|
81
|
+
normalised_vector,
|
|
82
|
+
self.coherence_operator,
|
|
83
|
+
self.hilbert_space,
|
|
84
|
+
normalise=False,
|
|
85
|
+
atol=self.atol,
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
summary: dict[str, Any] = {
|
|
89
|
+
"normalized": bool(normalized_passed),
|
|
90
|
+
"coherence": {
|
|
91
|
+
"passed": bool(coherence_passed),
|
|
92
|
+
"value": coherence_value,
|
|
93
|
+
"threshold": self.coherence_threshold,
|
|
94
|
+
},
|
|
95
|
+
"frequency": frequency_summary,
|
|
96
|
+
"unitary_stability": {
|
|
97
|
+
"passed": bool(unitary_passed),
|
|
98
|
+
"norm_after": unitary_norm,
|
|
99
|
+
},
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
overall = bool(
|
|
103
|
+
normalized_passed and coherence_passed and freq_ok and unitary_passed
|
|
104
|
+
)
|
|
105
|
+
return overall, summary, normalised_vector
|
|
106
|
+
|
|
107
|
+
def validate(
|
|
108
|
+
self,
|
|
109
|
+
subject: Sequence[complex] | np.ndarray,
|
|
110
|
+
/,
|
|
111
|
+
*,
|
|
112
|
+
enforce_frequency_positivity: bool | None = None,
|
|
113
|
+
) -> ValidationOutcome[np.ndarray]:
|
|
114
|
+
"""Return :class:`ValidationOutcome` for ``subject``."""
|
|
115
|
+
|
|
116
|
+
overall, summary, normalised_vector = self._compute_summary(
|
|
117
|
+
subject, enforce_frequency_positivity=enforce_frequency_positivity
|
|
118
|
+
)
|
|
119
|
+
artifacts = {"normalised_state": normalised_vector}
|
|
120
|
+
return ValidationOutcome(
|
|
121
|
+
subject=normalised_vector,
|
|
122
|
+
passed=overall,
|
|
123
|
+
summary=summary,
|
|
124
|
+
artifacts=artifacts,
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
def validate_state(
|
|
128
|
+
self,
|
|
129
|
+
state: Sequence[complex] | np.ndarray,
|
|
130
|
+
*,
|
|
131
|
+
enforce_frequency_positivity: bool | None = None,
|
|
132
|
+
) -> tuple[bool, dict[str, Any]]:
|
|
133
|
+
"""Backward compatible validation returning ``(passed, summary)``."""
|
|
134
|
+
|
|
135
|
+
overall, summary, _ = self._compute_summary(
|
|
136
|
+
state, enforce_frequency_positivity=enforce_frequency_positivity
|
|
137
|
+
)
|
|
138
|
+
return overall, summary
|
|
139
|
+
|
|
140
|
+
def report(self, outcome: ValidationOutcome[np.ndarray]) -> str:
|
|
141
|
+
"""Return a human-readable report naming failed conditions."""
|
|
142
|
+
|
|
143
|
+
summary = outcome.summary
|
|
144
|
+
failed_checks: list[str] = []
|
|
145
|
+
if not summary.get("normalized", False):
|
|
146
|
+
failed_checks.append("normalization")
|
|
147
|
+
|
|
148
|
+
coherence_summary = summary.get("coherence", {})
|
|
149
|
+
if not coherence_summary.get("passed", False):
|
|
150
|
+
failed_checks.append("coherence threshold")
|
|
151
|
+
|
|
152
|
+
frequency_summary = summary.get("frequency")
|
|
153
|
+
if isinstance(frequency_summary, Mapping) and not frequency_summary.get(
|
|
154
|
+
"passed", False
|
|
155
|
+
):
|
|
156
|
+
failed_checks.append("frequency positivity")
|
|
157
|
+
|
|
158
|
+
unitary_summary = summary.get("unitary_stability", {})
|
|
159
|
+
if not unitary_summary.get("passed", False):
|
|
160
|
+
failed_checks.append("unitary stability")
|
|
161
|
+
|
|
162
|
+
if not failed_checks:
|
|
163
|
+
return "All validation checks passed."
|
|
164
|
+
return "Failed checks: " + ", ".join(failed_checks) + "."
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any, Mapping, Sequence
|
|
4
|
+
|
|
5
|
+
import numpy as np
|
|
6
|
+
|
|
7
|
+
from ..mathematics.operators import CoherenceOperator, FrequencyOperator
|
|
8
|
+
from ..mathematics.spaces import HilbertSpace
|
|
9
|
+
from . import ValidationOutcome, Validator
|
|
10
|
+
|
|
11
|
+
class NFRValidator(Validator[np.ndarray]):
|
|
12
|
+
hilbert_space: HilbertSpace
|
|
13
|
+
coherence_operator: CoherenceOperator
|
|
14
|
+
coherence_threshold: float
|
|
15
|
+
frequency_operator: FrequencyOperator | None
|
|
16
|
+
atol: float
|
|
17
|
+
|
|
18
|
+
def __init__(
|
|
19
|
+
self,
|
|
20
|
+
hilbert_space: HilbertSpace,
|
|
21
|
+
coherence_operator: CoherenceOperator,
|
|
22
|
+
*,
|
|
23
|
+
coherence_threshold: float,
|
|
24
|
+
frequency_operator: FrequencyOperator | None = ...,
|
|
25
|
+
atol: float = ...,
|
|
26
|
+
) -> None: ...
|
|
27
|
+
def validate(
|
|
28
|
+
self,
|
|
29
|
+
subject: Sequence[complex] | np.ndarray,
|
|
30
|
+
/,
|
|
31
|
+
*,
|
|
32
|
+
enforce_frequency_positivity: bool | None = ...,
|
|
33
|
+
) -> ValidationOutcome[np.ndarray]: ... # type: ignore[override]
|
|
34
|
+
def validate_state(
|
|
35
|
+
self,
|
|
36
|
+
state: Sequence[complex] | np.ndarray,
|
|
37
|
+
*,
|
|
38
|
+
enforce_frequency_positivity: bool | None = ...,
|
|
39
|
+
) -> tuple[bool, Mapping[str, Any]]: ...
|
|
40
|
+
def report(self, outcome: ValidationOutcome[np.ndarray]) -> str: ...
|
|
41
|
+
|
|
42
|
+
__all__ = ("NFRValidator",)
|