tnfr 3.0.3__py3-none-any.whl → 8.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tnfr might be problematic. Click here for more details.
- tnfr/__init__.py +375 -56
- tnfr/__init__.pyi +33 -0
- tnfr/_compat.py +10 -0
- tnfr/_generated_version.py +34 -0
- tnfr/_version.py +49 -0
- tnfr/_version.pyi +7 -0
- tnfr/alias.py +723 -0
- tnfr/alias.pyi +108 -0
- tnfr/backends/__init__.py +354 -0
- tnfr/backends/jax_backend.py +173 -0
- tnfr/backends/numpy_backend.py +238 -0
- tnfr/backends/optimized_numpy.py +420 -0
- tnfr/backends/torch_backend.py +408 -0
- tnfr/cache.py +171 -0
- tnfr/cache.pyi +13 -0
- tnfr/cli/__init__.py +110 -0
- tnfr/cli/__init__.pyi +26 -0
- tnfr/cli/arguments.py +489 -0
- tnfr/cli/arguments.pyi +29 -0
- tnfr/cli/execution.py +914 -0
- tnfr/cli/execution.pyi +70 -0
- tnfr/cli/interactive_validator.py +614 -0
- tnfr/cli/utils.py +51 -0
- tnfr/cli/utils.pyi +7 -0
- tnfr/cli/validate.py +236 -0
- tnfr/compat/__init__.py +85 -0
- tnfr/compat/dataclass.py +136 -0
- tnfr/compat/jsonschema_stub.py +61 -0
- tnfr/compat/matplotlib_stub.py +73 -0
- tnfr/compat/numpy_stub.py +155 -0
- tnfr/config/__init__.py +224 -0
- tnfr/config/__init__.pyi +10 -0
- tnfr/config/constants.py +104 -0
- tnfr/config/constants.pyi +12 -0
- tnfr/config/defaults.py +54 -0
- tnfr/config/defaults_core.py +212 -0
- tnfr/config/defaults_init.py +33 -0
- tnfr/config/defaults_metric.py +104 -0
- tnfr/config/feature_flags.py +81 -0
- tnfr/config/feature_flags.pyi +16 -0
- tnfr/config/glyph_constants.py +31 -0
- tnfr/config/init.py +77 -0
- tnfr/config/init.pyi +8 -0
- tnfr/config/operator_names.py +254 -0
- tnfr/config/operator_names.pyi +36 -0
- tnfr/config/physics_derivation.py +354 -0
- tnfr/config/presets.py +83 -0
- tnfr/config/presets.pyi +7 -0
- tnfr/config/security.py +927 -0
- tnfr/config/thresholds.py +114 -0
- tnfr/config/tnfr_config.py +498 -0
- tnfr/constants/__init__.py +92 -0
- tnfr/constants/__init__.pyi +92 -0
- tnfr/constants/aliases.py +33 -0
- tnfr/constants/aliases.pyi +27 -0
- tnfr/constants/init.py +33 -0
- tnfr/constants/init.pyi +12 -0
- tnfr/constants/metric.py +104 -0
- tnfr/constants/metric.pyi +19 -0
- tnfr/core/__init__.py +33 -0
- tnfr/core/container.py +226 -0
- tnfr/core/default_implementations.py +329 -0
- tnfr/core/interfaces.py +279 -0
- tnfr/dynamics/__init__.py +238 -0
- tnfr/dynamics/__init__.pyi +83 -0
- tnfr/dynamics/adaptation.py +267 -0
- tnfr/dynamics/adaptation.pyi +7 -0
- tnfr/dynamics/adaptive_sequences.py +189 -0
- tnfr/dynamics/adaptive_sequences.pyi +14 -0
- tnfr/dynamics/aliases.py +23 -0
- tnfr/dynamics/aliases.pyi +19 -0
- tnfr/dynamics/bifurcation.py +232 -0
- tnfr/dynamics/canonical.py +229 -0
- tnfr/dynamics/canonical.pyi +48 -0
- tnfr/dynamics/coordination.py +385 -0
- tnfr/dynamics/coordination.pyi +25 -0
- tnfr/dynamics/dnfr.py +3034 -0
- tnfr/dynamics/dnfr.pyi +26 -0
- tnfr/dynamics/dynamic_limits.py +225 -0
- tnfr/dynamics/feedback.py +252 -0
- tnfr/dynamics/feedback.pyi +24 -0
- tnfr/dynamics/fused_dnfr.py +454 -0
- tnfr/dynamics/homeostasis.py +157 -0
- tnfr/dynamics/homeostasis.pyi +14 -0
- tnfr/dynamics/integrators.py +661 -0
- tnfr/dynamics/integrators.pyi +36 -0
- tnfr/dynamics/learning.py +310 -0
- tnfr/dynamics/learning.pyi +33 -0
- tnfr/dynamics/metabolism.py +254 -0
- tnfr/dynamics/nbody.py +796 -0
- tnfr/dynamics/nbody_tnfr.py +783 -0
- tnfr/dynamics/propagation.py +326 -0
- tnfr/dynamics/runtime.py +908 -0
- tnfr/dynamics/runtime.pyi +77 -0
- tnfr/dynamics/sampling.py +36 -0
- tnfr/dynamics/sampling.pyi +7 -0
- tnfr/dynamics/selectors.py +711 -0
- tnfr/dynamics/selectors.pyi +85 -0
- tnfr/dynamics/structural_clip.py +207 -0
- tnfr/errors/__init__.py +37 -0
- tnfr/errors/contextual.py +492 -0
- tnfr/execution.py +223 -0
- tnfr/execution.pyi +45 -0
- tnfr/extensions/__init__.py +205 -0
- tnfr/extensions/__init__.pyi +18 -0
- tnfr/extensions/base.py +173 -0
- tnfr/extensions/base.pyi +35 -0
- tnfr/extensions/business/__init__.py +71 -0
- tnfr/extensions/business/__init__.pyi +11 -0
- tnfr/extensions/business/cookbook.py +88 -0
- tnfr/extensions/business/cookbook.pyi +8 -0
- tnfr/extensions/business/health_analyzers.py +202 -0
- tnfr/extensions/business/health_analyzers.pyi +9 -0
- tnfr/extensions/business/patterns.py +183 -0
- tnfr/extensions/business/patterns.pyi +8 -0
- tnfr/extensions/medical/__init__.py +73 -0
- tnfr/extensions/medical/__init__.pyi +11 -0
- tnfr/extensions/medical/cookbook.py +88 -0
- tnfr/extensions/medical/cookbook.pyi +8 -0
- tnfr/extensions/medical/health_analyzers.py +181 -0
- tnfr/extensions/medical/health_analyzers.pyi +9 -0
- tnfr/extensions/medical/patterns.py +163 -0
- tnfr/extensions/medical/patterns.pyi +8 -0
- tnfr/flatten.py +262 -0
- tnfr/flatten.pyi +21 -0
- tnfr/gamma.py +354 -0
- tnfr/gamma.pyi +36 -0
- tnfr/glyph_history.py +377 -0
- tnfr/glyph_history.pyi +35 -0
- tnfr/glyph_runtime.py +19 -0
- tnfr/glyph_runtime.pyi +8 -0
- tnfr/immutable.py +218 -0
- tnfr/immutable.pyi +36 -0
- tnfr/initialization.py +203 -0
- tnfr/initialization.pyi +65 -0
- tnfr/io.py +10 -0
- tnfr/io.pyi +13 -0
- tnfr/locking.py +37 -0
- tnfr/locking.pyi +7 -0
- tnfr/mathematics/__init__.py +79 -0
- tnfr/mathematics/backend.py +453 -0
- tnfr/mathematics/backend.pyi +99 -0
- tnfr/mathematics/dynamics.py +408 -0
- tnfr/mathematics/dynamics.pyi +90 -0
- tnfr/mathematics/epi.py +391 -0
- tnfr/mathematics/epi.pyi +65 -0
- tnfr/mathematics/generators.py +242 -0
- tnfr/mathematics/generators.pyi +29 -0
- tnfr/mathematics/metrics.py +119 -0
- tnfr/mathematics/metrics.pyi +16 -0
- tnfr/mathematics/operators.py +239 -0
- tnfr/mathematics/operators.pyi +59 -0
- tnfr/mathematics/operators_factory.py +124 -0
- tnfr/mathematics/operators_factory.pyi +11 -0
- tnfr/mathematics/projection.py +87 -0
- tnfr/mathematics/projection.pyi +33 -0
- tnfr/mathematics/runtime.py +182 -0
- tnfr/mathematics/runtime.pyi +64 -0
- tnfr/mathematics/spaces.py +256 -0
- tnfr/mathematics/spaces.pyi +83 -0
- tnfr/mathematics/transforms.py +305 -0
- tnfr/mathematics/transforms.pyi +62 -0
- tnfr/metrics/__init__.py +79 -0
- tnfr/metrics/__init__.pyi +20 -0
- tnfr/metrics/buffer_cache.py +163 -0
- tnfr/metrics/buffer_cache.pyi +24 -0
- tnfr/metrics/cache_utils.py +214 -0
- tnfr/metrics/coherence.py +2009 -0
- tnfr/metrics/coherence.pyi +129 -0
- tnfr/metrics/common.py +158 -0
- tnfr/metrics/common.pyi +35 -0
- tnfr/metrics/core.py +316 -0
- tnfr/metrics/core.pyi +13 -0
- tnfr/metrics/diagnosis.py +833 -0
- tnfr/metrics/diagnosis.pyi +86 -0
- tnfr/metrics/emergence.py +245 -0
- tnfr/metrics/export.py +179 -0
- tnfr/metrics/export.pyi +7 -0
- tnfr/metrics/glyph_timing.py +379 -0
- tnfr/metrics/glyph_timing.pyi +81 -0
- tnfr/metrics/learning_metrics.py +280 -0
- tnfr/metrics/learning_metrics.pyi +21 -0
- tnfr/metrics/phase_coherence.py +351 -0
- tnfr/metrics/phase_compatibility.py +349 -0
- tnfr/metrics/reporting.py +183 -0
- tnfr/metrics/reporting.pyi +25 -0
- tnfr/metrics/sense_index.py +1203 -0
- tnfr/metrics/sense_index.pyi +9 -0
- tnfr/metrics/trig.py +373 -0
- tnfr/metrics/trig.pyi +13 -0
- tnfr/metrics/trig_cache.py +233 -0
- tnfr/metrics/trig_cache.pyi +10 -0
- tnfr/multiscale/__init__.py +32 -0
- tnfr/multiscale/hierarchical.py +517 -0
- tnfr/node.py +763 -0
- tnfr/node.pyi +139 -0
- tnfr/observers.py +255 -130
- tnfr/observers.pyi +31 -0
- tnfr/ontosim.py +144 -137
- tnfr/ontosim.pyi +28 -0
- tnfr/operators/__init__.py +1672 -0
- tnfr/operators/__init__.pyi +31 -0
- tnfr/operators/algebra.py +277 -0
- tnfr/operators/canonical_patterns.py +420 -0
- tnfr/operators/cascade.py +267 -0
- tnfr/operators/cycle_detection.py +358 -0
- tnfr/operators/definitions.py +4108 -0
- tnfr/operators/definitions.pyi +78 -0
- tnfr/operators/grammar.py +1164 -0
- tnfr/operators/grammar.pyi +140 -0
- tnfr/operators/hamiltonian.py +710 -0
- tnfr/operators/health_analyzer.py +809 -0
- tnfr/operators/jitter.py +272 -0
- tnfr/operators/jitter.pyi +11 -0
- tnfr/operators/lifecycle.py +314 -0
- tnfr/operators/metabolism.py +618 -0
- tnfr/operators/metrics.py +2138 -0
- tnfr/operators/network_analysis/__init__.py +27 -0
- tnfr/operators/network_analysis/source_detection.py +186 -0
- tnfr/operators/nodal_equation.py +395 -0
- tnfr/operators/pattern_detection.py +660 -0
- tnfr/operators/patterns.py +669 -0
- tnfr/operators/postconditions/__init__.py +38 -0
- tnfr/operators/postconditions/mutation.py +236 -0
- tnfr/operators/preconditions/__init__.py +1226 -0
- tnfr/operators/preconditions/coherence.py +305 -0
- tnfr/operators/preconditions/dissonance.py +236 -0
- tnfr/operators/preconditions/emission.py +128 -0
- tnfr/operators/preconditions/mutation.py +580 -0
- tnfr/operators/preconditions/reception.py +125 -0
- tnfr/operators/preconditions/resonance.py +364 -0
- tnfr/operators/registry.py +74 -0
- tnfr/operators/registry.pyi +9 -0
- tnfr/operators/remesh.py +1809 -0
- tnfr/operators/remesh.pyi +26 -0
- tnfr/operators/structural_units.py +268 -0
- tnfr/operators/unified_grammar.py +105 -0
- tnfr/parallel/__init__.py +54 -0
- tnfr/parallel/auto_scaler.py +234 -0
- tnfr/parallel/distributed.py +384 -0
- tnfr/parallel/engine.py +238 -0
- tnfr/parallel/gpu_engine.py +420 -0
- tnfr/parallel/monitoring.py +248 -0
- tnfr/parallel/partitioner.py +459 -0
- tnfr/py.typed +0 -0
- tnfr/recipes/__init__.py +22 -0
- tnfr/recipes/cookbook.py +743 -0
- tnfr/rng.py +178 -0
- tnfr/rng.pyi +26 -0
- tnfr/schemas/__init__.py +8 -0
- tnfr/schemas/grammar.json +94 -0
- tnfr/sdk/__init__.py +107 -0
- tnfr/sdk/__init__.pyi +19 -0
- tnfr/sdk/adaptive_system.py +173 -0
- tnfr/sdk/adaptive_system.pyi +21 -0
- tnfr/sdk/builders.py +370 -0
- tnfr/sdk/builders.pyi +51 -0
- tnfr/sdk/fluent.py +1121 -0
- tnfr/sdk/fluent.pyi +74 -0
- tnfr/sdk/templates.py +342 -0
- tnfr/sdk/templates.pyi +41 -0
- tnfr/sdk/utils.py +341 -0
- tnfr/secure_config.py +46 -0
- tnfr/security/__init__.py +70 -0
- tnfr/security/database.py +514 -0
- tnfr/security/subprocess.py +503 -0
- tnfr/security/validation.py +290 -0
- tnfr/selector.py +247 -0
- tnfr/selector.pyi +19 -0
- tnfr/sense.py +378 -0
- tnfr/sense.pyi +23 -0
- tnfr/services/__init__.py +17 -0
- tnfr/services/orchestrator.py +325 -0
- tnfr/sparse/__init__.py +39 -0
- tnfr/sparse/representations.py +492 -0
- tnfr/structural.py +705 -0
- tnfr/structural.pyi +83 -0
- tnfr/telemetry/__init__.py +35 -0
- tnfr/telemetry/cache_metrics.py +226 -0
- tnfr/telemetry/cache_metrics.pyi +64 -0
- tnfr/telemetry/nu_f.py +422 -0
- tnfr/telemetry/nu_f.pyi +108 -0
- tnfr/telemetry/verbosity.py +36 -0
- tnfr/telemetry/verbosity.pyi +15 -0
- tnfr/tokens.py +58 -0
- tnfr/tokens.pyi +36 -0
- tnfr/tools/__init__.py +20 -0
- tnfr/tools/domain_templates.py +478 -0
- tnfr/tools/sequence_generator.py +846 -0
- tnfr/topology/__init__.py +13 -0
- tnfr/topology/asymmetry.py +151 -0
- tnfr/trace.py +543 -0
- tnfr/trace.pyi +42 -0
- tnfr/tutorials/__init__.py +38 -0
- tnfr/tutorials/autonomous_evolution.py +285 -0
- tnfr/tutorials/interactive.py +1576 -0
- tnfr/tutorials/structural_metabolism.py +238 -0
- tnfr/types.py +775 -0
- tnfr/types.pyi +357 -0
- tnfr/units.py +68 -0
- tnfr/units.pyi +13 -0
- tnfr/utils/__init__.py +282 -0
- tnfr/utils/__init__.pyi +215 -0
- tnfr/utils/cache.py +4223 -0
- tnfr/utils/cache.pyi +470 -0
- tnfr/utils/callbacks.py +375 -0
- tnfr/utils/callbacks.pyi +49 -0
- tnfr/utils/chunks.py +108 -0
- tnfr/utils/chunks.pyi +22 -0
- tnfr/utils/data.py +428 -0
- tnfr/utils/data.pyi +74 -0
- tnfr/utils/graph.py +85 -0
- tnfr/utils/graph.pyi +10 -0
- tnfr/utils/init.py +821 -0
- tnfr/utils/init.pyi +80 -0
- tnfr/utils/io.py +559 -0
- tnfr/utils/io.pyi +66 -0
- tnfr/utils/numeric.py +114 -0
- tnfr/utils/numeric.pyi +21 -0
- tnfr/validation/__init__.py +257 -0
- tnfr/validation/__init__.pyi +85 -0
- tnfr/validation/compatibility.py +460 -0
- tnfr/validation/compatibility.pyi +6 -0
- tnfr/validation/config.py +73 -0
- tnfr/validation/graph.py +139 -0
- tnfr/validation/graph.pyi +18 -0
- tnfr/validation/input_validation.py +755 -0
- tnfr/validation/invariants.py +712 -0
- tnfr/validation/rules.py +253 -0
- tnfr/validation/rules.pyi +44 -0
- tnfr/validation/runtime.py +279 -0
- tnfr/validation/runtime.pyi +28 -0
- tnfr/validation/sequence_validator.py +162 -0
- tnfr/validation/soft_filters.py +170 -0
- tnfr/validation/soft_filters.pyi +32 -0
- tnfr/validation/spectral.py +164 -0
- tnfr/validation/spectral.pyi +42 -0
- tnfr/validation/validator.py +1266 -0
- tnfr/validation/window.py +39 -0
- tnfr/validation/window.pyi +1 -0
- tnfr/visualization/__init__.py +98 -0
- tnfr/visualization/cascade_viz.py +256 -0
- tnfr/visualization/hierarchy.py +284 -0
- tnfr/visualization/sequence_plotter.py +784 -0
- tnfr/viz/__init__.py +60 -0
- tnfr/viz/matplotlib.py +278 -0
- tnfr/viz/matplotlib.pyi +35 -0
- tnfr-8.5.0.dist-info/METADATA +573 -0
- tnfr-8.5.0.dist-info/RECORD +353 -0
- tnfr-8.5.0.dist-info/entry_points.txt +3 -0
- tnfr-3.0.3.dist-info/licenses/LICENSE.txt → tnfr-8.5.0.dist-info/licenses/LICENSE.md +1 -1
- tnfr/constants.py +0 -183
- tnfr/dynamics.py +0 -543
- tnfr/helpers.py +0 -198
- tnfr/main.py +0 -37
- tnfr/operators.py +0 -296
- tnfr-3.0.3.dist-info/METADATA +0 -35
- tnfr-3.0.3.dist-info/RECORD +0 -13
- {tnfr-3.0.3.dist-info → tnfr-8.5.0.dist-info}/WHEEL +0 -0
- {tnfr-3.0.3.dist-info → tnfr-8.5.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,669 @@
|
|
|
1
|
+
"""Advanced pattern detection for structural operator sequences.
|
|
2
|
+
|
|
3
|
+
.. deprecated:: 0.2.0
|
|
4
|
+
This module is deprecated and will be removed in version 1.0.0.
|
|
5
|
+
Use :mod:`tnfr.operators.pattern_detection` instead, which provides unified
|
|
6
|
+
pattern detection with explicit U1-U4 grammar rule mappings.
|
|
7
|
+
|
|
8
|
+
The :class:`AdvancedPatternDetector` class remains available for backward
|
|
9
|
+
compatibility, but new code should use :class:`UnifiedPatternDetector`.
|
|
10
|
+
|
|
11
|
+
This module provides unified pattern detection using coherence-weighted scoring.
|
|
12
|
+
All patterns are evaluated independently, but their match scores are weighted by
|
|
13
|
+
their structural coherence level. This respects TNFR's principle that emergent
|
|
14
|
+
patterns (with self-organization, phase transitions) have fundamentally higher
|
|
15
|
+
structural complexity than simple compositional patterns.
|
|
16
|
+
|
|
17
|
+
Coherence weights reflect observable structural depth, not arbitrary rankings.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
from __future__ import annotations
|
|
21
|
+
|
|
22
|
+
import warnings
|
|
23
|
+
from functools import lru_cache
|
|
24
|
+
from typing import TYPE_CHECKING, Any, Mapping, Sequence, Tuple
|
|
25
|
+
|
|
26
|
+
# Issue deprecation warning on import
|
|
27
|
+
warnings.warn(
|
|
28
|
+
"patterns module is deprecated. "
|
|
29
|
+
"Use tnfr.operators.pattern_detection.UnifiedPatternDetector instead. "
|
|
30
|
+
"AdvancedPatternDetector remains available for backward compatibility.",
|
|
31
|
+
DeprecationWarning,
|
|
32
|
+
stacklevel=2,
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
if TYPE_CHECKING:
|
|
36
|
+
from .grammar import StructuralPattern
|
|
37
|
+
|
|
38
|
+
from ..config.operator_names import (
|
|
39
|
+
COHERENCE,
|
|
40
|
+
CONTRACTION,
|
|
41
|
+
COUPLING,
|
|
42
|
+
DISSONANCE,
|
|
43
|
+
EMISSION,
|
|
44
|
+
EXPANSION,
|
|
45
|
+
MUTATION,
|
|
46
|
+
RECEPTION,
|
|
47
|
+
RECURSIVITY,
|
|
48
|
+
RESONANCE,
|
|
49
|
+
SELF_ORGANIZATION,
|
|
50
|
+
SILENCE,
|
|
51
|
+
TRANSITION,
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
__all__ = ["AdvancedPatternDetector"]
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class AdvancedPatternDetector:
|
|
58
|
+
"""Pattern detector using coherence-weighted scoring.
|
|
59
|
+
|
|
60
|
+
Each pattern is evaluated independently and scored based on match quality.
|
|
61
|
+
The final score is weighted by the pattern's structural coherence level,
|
|
62
|
+
which reflects observable complexity (emergence, self-organization, phase
|
|
63
|
+
transitions) rather than arbitrary ranking.
|
|
64
|
+
|
|
65
|
+
This allows: weak match with emergent pattern > strong match with simple pattern,
|
|
66
|
+
while still enabling perfect simple matches to win over weak emergent matches.
|
|
67
|
+
"""
|
|
68
|
+
|
|
69
|
+
# Coherence weights based on structural complexity (not arbitrary hierarchy)
|
|
70
|
+
# These reflect measurable properties: emergence, nested structure, phase transitions
|
|
71
|
+
COHERENCE_WEIGHTS = {
|
|
72
|
+
# Level 3: Emergent self-organizing patterns (highest structural depth)
|
|
73
|
+
"THERAPEUTIC": 3.0, # Full healing cycle with controlled crisis resolution
|
|
74
|
+
"EDUCATIONAL": 3.0, # Complete transformative learning with phase shift
|
|
75
|
+
"ORGANIZATIONAL": 3.0, # Institutional evolution with emergent reorganization
|
|
76
|
+
"CREATIVE": 3.0, # Artistic emergence through self-organization
|
|
77
|
+
"REGENERATIVE": 3.0, # Self-sustaining cycle with autonomous renewal
|
|
78
|
+
# Level 2: Structural transformations (medium complexity)
|
|
79
|
+
"HIERARCHICAL": 2.0, # Self-organization creates nested structure
|
|
80
|
+
"BIFURCATED": 2.0, # Phase transition (OZ→ZHIR) branches possibility space
|
|
81
|
+
"FRACTAL": 2.0, # Recursive structure across scales
|
|
82
|
+
"CYCLIC": 2.0, # Regenerative loops with multiple state transitions
|
|
83
|
+
"DEEP_LEARNING": 2.0, # Deep adaptive learning with self-organization
|
|
84
|
+
"EXPLORATORY_LEARNING": 2.0, # Learning through exploration and resonance
|
|
85
|
+
"ADAPTIVE_MUTATION": 2.0, # Transformative learning with mutation
|
|
86
|
+
# Level 1: Compositional patterns (building blocks)
|
|
87
|
+
"BOOTSTRAP": 1.0, # Initialization sequence
|
|
88
|
+
"EXPLORE": 1.0, # Controlled exploration
|
|
89
|
+
"STABILIZE": 1.0, # Consolidation ending
|
|
90
|
+
"RESONATE": 1.0, # Amplification through coupling
|
|
91
|
+
"COMPRESS": 1.0, # Simplification sequence
|
|
92
|
+
"BASIC_LEARNING": 1.0, # Simple learning sequence
|
|
93
|
+
"CONSOLIDATION_CYCLE": 1.0, # Memory consolidation
|
|
94
|
+
# Canonical IL (Coherence) compositional patterns
|
|
95
|
+
"SAFE_ACTIVATION": 1.0, # emission → coherence: Emission stabilized
|
|
96
|
+
"STABLE_INTEGRATION": 1.0, # reception → coherence: Reception consolidated
|
|
97
|
+
"CREATIVE_RESOLUTION": 1.0, # dissonance → coherence: Dissonance resolved
|
|
98
|
+
"RESONANCE_CONSOLIDATION": 1.0, # resonance → coherence: Resonance locked
|
|
99
|
+
"STABLE_TRANSFORMATION": 1.0, # coherence → mutation: Controlled mutation
|
|
100
|
+
# Level 0: Simple patterns
|
|
101
|
+
"LINEAR": 0.5, # Basic progression without transformation
|
|
102
|
+
"MINIMAL": 0.5, # Single or very few operators
|
|
103
|
+
# Special cases
|
|
104
|
+
"COMPLEX": 1.5, # Multiple patterns combined
|
|
105
|
+
"UNKNOWN": 0.1, # Fallback for unclassified sequences
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
def __init__(self) -> None:
|
|
109
|
+
"""Initialize the coherence-weighted pattern detector with caching."""
|
|
110
|
+
# Pattern signatures: each pattern maps to a list of operator subsequences
|
|
111
|
+
# that characterize it, along with required/optional operators
|
|
112
|
+
self._patterns = {
|
|
113
|
+
# Fundamental patterns
|
|
114
|
+
"LINEAR": {
|
|
115
|
+
"max_length": 5,
|
|
116
|
+
"excludes": {DISSONANCE, MUTATION, SELF_ORGANIZATION},
|
|
117
|
+
"min_score": 0.1, # Base score for matching criteria
|
|
118
|
+
},
|
|
119
|
+
"HIERARCHICAL": {
|
|
120
|
+
"requires": {SELF_ORGANIZATION},
|
|
121
|
+
},
|
|
122
|
+
"FRACTAL": {
|
|
123
|
+
"requires": {TRANSITION},
|
|
124
|
+
"requires_any": {COUPLING, RECURSIVITY},
|
|
125
|
+
},
|
|
126
|
+
"CYCLIC": {
|
|
127
|
+
"min_count": {TRANSITION: 2},
|
|
128
|
+
},
|
|
129
|
+
"BIFURCATED": {
|
|
130
|
+
"adjacent_pairs": [(DISSONANCE, MUTATION), (DISSONANCE, CONTRACTION)],
|
|
131
|
+
},
|
|
132
|
+
# Domain-specific patterns
|
|
133
|
+
"THERAPEUTIC": {
|
|
134
|
+
"subsequences": [
|
|
135
|
+
[
|
|
136
|
+
RECEPTION,
|
|
137
|
+
EMISSION,
|
|
138
|
+
COHERENCE,
|
|
139
|
+
DISSONANCE,
|
|
140
|
+
SELF_ORGANIZATION,
|
|
141
|
+
COHERENCE,
|
|
142
|
+
]
|
|
143
|
+
],
|
|
144
|
+
},
|
|
145
|
+
"EDUCATIONAL": {
|
|
146
|
+
"subsequences": [
|
|
147
|
+
[RECEPTION, EMISSION, COHERENCE, EXPANSION, DISSONANCE, MUTATION]
|
|
148
|
+
],
|
|
149
|
+
},
|
|
150
|
+
"ORGANIZATIONAL": {
|
|
151
|
+
"subsequences": [
|
|
152
|
+
[
|
|
153
|
+
TRANSITION,
|
|
154
|
+
EMISSION,
|
|
155
|
+
RECEPTION,
|
|
156
|
+
COUPLING,
|
|
157
|
+
RESONANCE,
|
|
158
|
+
DISSONANCE,
|
|
159
|
+
SELF_ORGANIZATION,
|
|
160
|
+
]
|
|
161
|
+
],
|
|
162
|
+
},
|
|
163
|
+
"CREATIVE": {
|
|
164
|
+
"subsequences": [
|
|
165
|
+
[
|
|
166
|
+
SILENCE,
|
|
167
|
+
EMISSION,
|
|
168
|
+
EXPANSION,
|
|
169
|
+
DISSONANCE,
|
|
170
|
+
MUTATION,
|
|
171
|
+
SELF_ORGANIZATION,
|
|
172
|
+
]
|
|
173
|
+
],
|
|
174
|
+
},
|
|
175
|
+
"REGENERATIVE": {
|
|
176
|
+
"subsequences": [
|
|
177
|
+
[
|
|
178
|
+
COHERENCE,
|
|
179
|
+
RESONANCE,
|
|
180
|
+
EXPANSION,
|
|
181
|
+
SILENCE,
|
|
182
|
+
TRANSITION,
|
|
183
|
+
EMISSION,
|
|
184
|
+
RECEPTION,
|
|
185
|
+
COUPLING,
|
|
186
|
+
COHERENCE,
|
|
187
|
+
]
|
|
188
|
+
],
|
|
189
|
+
},
|
|
190
|
+
# Compositional patterns
|
|
191
|
+
"BOOTSTRAP": {
|
|
192
|
+
"subsequences": [[EMISSION, COUPLING, COHERENCE]],
|
|
193
|
+
"max_length": 5,
|
|
194
|
+
},
|
|
195
|
+
"EXPLORE": {
|
|
196
|
+
"subsequences": [[DISSONANCE, MUTATION, COHERENCE]],
|
|
197
|
+
},
|
|
198
|
+
"STABILIZE": {
|
|
199
|
+
"ending_pairs": [(COHERENCE, SILENCE), (COHERENCE, RESONANCE)],
|
|
200
|
+
},
|
|
201
|
+
"RESONATE": {
|
|
202
|
+
"subsequences": [[RESONANCE, COUPLING, RESONANCE]],
|
|
203
|
+
},
|
|
204
|
+
"COMPRESS": {
|
|
205
|
+
"subsequences": [[CONTRACTION, COHERENCE, SILENCE]],
|
|
206
|
+
},
|
|
207
|
+
# Canonical IL (Coherence) sequences - compositional patterns
|
|
208
|
+
# These are fundamental 2-operator sequences involving Coherence that appear
|
|
209
|
+
# frequently in TNFR structural transformations
|
|
210
|
+
"SAFE_ACTIVATION": {
|
|
211
|
+
"subsequences": [[EMISSION, COHERENCE]],
|
|
212
|
+
"max_length": 3,
|
|
213
|
+
"description": "Emission stabilized immediately (emission → coherence)",
|
|
214
|
+
},
|
|
215
|
+
"STABLE_INTEGRATION": {
|
|
216
|
+
"subsequences": [[RECEPTION, COHERENCE]],
|
|
217
|
+
"max_length": 3,
|
|
218
|
+
"description": "Reception consolidated (reception → coherence)",
|
|
219
|
+
},
|
|
220
|
+
"CREATIVE_RESOLUTION": {
|
|
221
|
+
"subsequences": [[DISSONANCE, COHERENCE]],
|
|
222
|
+
"max_length": 4,
|
|
223
|
+
"description": "Dissonance resolved into coherence (dissonance → coherence)",
|
|
224
|
+
},
|
|
225
|
+
"RESONANCE_CONSOLIDATION": {
|
|
226
|
+
"subsequences": [[RESONANCE, COHERENCE]],
|
|
227
|
+
"max_length": 4,
|
|
228
|
+
"description": "Propagated coherence locked in (resonance → coherence)",
|
|
229
|
+
},
|
|
230
|
+
"STABLE_TRANSFORMATION": {
|
|
231
|
+
"subsequences": [[COHERENCE, MUTATION]],
|
|
232
|
+
"max_length": 4,
|
|
233
|
+
"description": "Coherence enabling controlled mutation (coherence → mutation)",
|
|
234
|
+
"note": "Generates CAUTION warning due to coherence→mutation compatibility level",
|
|
235
|
+
},
|
|
236
|
+
# Adaptive learning patterns (AL + T'HOL canonical sequences)
|
|
237
|
+
"BASIC_LEARNING": {
|
|
238
|
+
"subsequences": [[EMISSION, RECEPTION, COHERENCE, SILENCE]],
|
|
239
|
+
"max_length": 6,
|
|
240
|
+
},
|
|
241
|
+
"DEEP_LEARNING": {
|
|
242
|
+
"subsequences": [
|
|
243
|
+
[
|
|
244
|
+
EMISSION,
|
|
245
|
+
RECEPTION,
|
|
246
|
+
COHERENCE,
|
|
247
|
+
DISSONANCE,
|
|
248
|
+
SELF_ORGANIZATION,
|
|
249
|
+
COHERENCE,
|
|
250
|
+
SILENCE,
|
|
251
|
+
]
|
|
252
|
+
],
|
|
253
|
+
"requires": {EMISSION, SELF_ORGANIZATION},
|
|
254
|
+
},
|
|
255
|
+
"EXPLORATORY_LEARNING": {
|
|
256
|
+
"subsequences": [
|
|
257
|
+
[
|
|
258
|
+
EMISSION,
|
|
259
|
+
RECEPTION,
|
|
260
|
+
COHERENCE,
|
|
261
|
+
DISSONANCE,
|
|
262
|
+
SELF_ORGANIZATION,
|
|
263
|
+
RESONANCE,
|
|
264
|
+
COHERENCE,
|
|
265
|
+
SILENCE,
|
|
266
|
+
]
|
|
267
|
+
],
|
|
268
|
+
"requires": {EMISSION, DISSONANCE, SELF_ORGANIZATION, RESONANCE},
|
|
269
|
+
},
|
|
270
|
+
"CONSOLIDATION_CYCLE": {
|
|
271
|
+
"subsequences": [[EMISSION, RECEPTION, COHERENCE, RECURSIVITY]],
|
|
272
|
+
"max_length": 6,
|
|
273
|
+
},
|
|
274
|
+
"ADAPTIVE_MUTATION": {
|
|
275
|
+
"subsequences": [
|
|
276
|
+
[EMISSION, RECEPTION, COHERENCE, DISSONANCE, MUTATION, TRANSITION]
|
|
277
|
+
],
|
|
278
|
+
"requires": {EMISSION, DISSONANCE, MUTATION},
|
|
279
|
+
},
|
|
280
|
+
"MINIMAL": {
|
|
281
|
+
"max_length": 1,
|
|
282
|
+
"min_score": 0.1,
|
|
283
|
+
},
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
# Cache for pattern detection results (maxsize=256 to cache common sequences)
|
|
287
|
+
self._detect_cache = lru_cache(maxsize=256)(self._detect_pattern_cached)
|
|
288
|
+
|
|
289
|
+
def detect_pattern(self, sequence: Sequence[str]) -> StructuralPattern:
|
|
290
|
+
"""Detect the best matching pattern using coherence-weighted scoring.
|
|
291
|
+
|
|
292
|
+
Parameters
|
|
293
|
+
----------
|
|
294
|
+
sequence : Sequence[str]
|
|
295
|
+
Canonical operator names to analyze.
|
|
296
|
+
|
|
297
|
+
Returns
|
|
298
|
+
-------
|
|
299
|
+
StructuralPattern
|
|
300
|
+
The pattern with the highest coherence-weighted score.
|
|
301
|
+
|
|
302
|
+
Notes
|
|
303
|
+
-----
|
|
304
|
+
All patterns are evaluated and scored independently. The final score for
|
|
305
|
+
each pattern is: match_score * coherence_weight.
|
|
306
|
+
|
|
307
|
+
This means:
|
|
308
|
+
- A weak match (0.3) with an emergent pattern (weight 3.0) scores 0.9
|
|
309
|
+
- A strong match (0.4) with a compositional pattern (weight 1.0) scores 0.4
|
|
310
|
+
- The emergent pattern wins, respecting structural depth
|
|
311
|
+
|
|
312
|
+
However:
|
|
313
|
+
- A perfect match (0.8) with LINEAR (weight 0.5) scores 0.4
|
|
314
|
+
- A weak match (0.3) with THERAPEUTIC (weight 3.0) scores 0.9
|
|
315
|
+
- THERAPEUTIC wins, but if LINEAR had 1.0 match, it would score 0.5 and still lose
|
|
316
|
+
- This is correct: even a perfect simple pattern has less structural depth
|
|
317
|
+
"""
|
|
318
|
+
# Convert to tuple for caching (sequences must be hashable)
|
|
319
|
+
sequence_tuple = (
|
|
320
|
+
tuple(sequence) if not isinstance(sequence, tuple) else sequence
|
|
321
|
+
)
|
|
322
|
+
return self._detect_cache(sequence_tuple)
|
|
323
|
+
|
|
324
|
+
def _detect_pattern_cached(
|
|
325
|
+
self, sequence_tuple: Tuple[str, ...]
|
|
326
|
+
) -> StructuralPattern:
|
|
327
|
+
"""Cached implementation of pattern detection with grammar validation.
|
|
328
|
+
|
|
329
|
+
Parameters
|
|
330
|
+
----------
|
|
331
|
+
sequence_tuple : Tuple[str, ...]
|
|
332
|
+
Immutable sequence of canonical operator names.
|
|
333
|
+
|
|
334
|
+
Returns
|
|
335
|
+
-------
|
|
336
|
+
StructuralPattern
|
|
337
|
+
The pattern with the highest coherence-weighted score.
|
|
338
|
+
|
|
339
|
+
Notes
|
|
340
|
+
-----
|
|
341
|
+
**Grammar Validation**: Sequences are validated against TNFR canonical
|
|
342
|
+
grammar before pattern matching. Invalid sequences return UNKNOWN pattern.
|
|
343
|
+
This ensures that only grammatically coherent sequences are recognized
|
|
344
|
+
as canonical patterns.
|
|
345
|
+
|
|
346
|
+
**Note**: Grammar validation is performed without triggering pattern detection
|
|
347
|
+
to avoid recursion.
|
|
348
|
+
"""
|
|
349
|
+
from .grammar import StructuralPattern
|
|
350
|
+
|
|
351
|
+
sequence = list(sequence_tuple)
|
|
352
|
+
|
|
353
|
+
if not sequence:
|
|
354
|
+
return StructuralPattern.UNKNOWN
|
|
355
|
+
|
|
356
|
+
# **NEW**: Validate sequence grammar before pattern detection
|
|
357
|
+
# Use validate_sequence which now won't call back to pattern detection
|
|
358
|
+
try:
|
|
359
|
+
from ..validation import validate_sequence
|
|
360
|
+
|
|
361
|
+
validation_result = validate_sequence(sequence)
|
|
362
|
+
if not validation_result.passed:
|
|
363
|
+
# Sequence violates TNFR grammar - cannot be a canonical pattern
|
|
364
|
+
return StructuralPattern.UNKNOWN
|
|
365
|
+
except RecursionError:
|
|
366
|
+
# If recursion occurs, skip grammar validation to break the cycle
|
|
367
|
+
# This can happen if validate_sequence internally calls pattern detection
|
|
368
|
+
pass
|
|
369
|
+
|
|
370
|
+
# Score all patterns with coherence weighting
|
|
371
|
+
weighted_scores = {}
|
|
372
|
+
raw_scores = {}
|
|
373
|
+
for pattern_name, criteria in self._patterns.items():
|
|
374
|
+
raw_score = self._score_pattern(sequence, criteria)
|
|
375
|
+
if raw_score > 0:
|
|
376
|
+
weight = self.COHERENCE_WEIGHTS.get(pattern_name, 1.0)
|
|
377
|
+
weighted_scores[pattern_name] = raw_score * weight
|
|
378
|
+
raw_scores[pattern_name] = raw_score
|
|
379
|
+
|
|
380
|
+
# Handle COMPLEX: long sequences with multiple good raw matches
|
|
381
|
+
# But NOT if we have a clear domain pattern match
|
|
382
|
+
if len(sequence) > 8 and len(raw_scores) >= 3:
|
|
383
|
+
# Check if we have a strong domain pattern match (even if weighted)
|
|
384
|
+
domain_patterns = [
|
|
385
|
+
"THERAPEUTIC",
|
|
386
|
+
"EDUCATIONAL",
|
|
387
|
+
"ORGANIZATIONAL",
|
|
388
|
+
"CREATIVE",
|
|
389
|
+
"REGENERATIVE",
|
|
390
|
+
]
|
|
391
|
+
domain_weighted_scores = {
|
|
392
|
+
p: weighted_scores.get(p, 0)
|
|
393
|
+
for p in domain_patterns
|
|
394
|
+
if p in weighted_scores
|
|
395
|
+
}
|
|
396
|
+
if domain_weighted_scores:
|
|
397
|
+
max_domain_score = max(domain_weighted_scores.values())
|
|
398
|
+
# If a domain pattern has the highest weighted score, don't mark as COMPLEX
|
|
399
|
+
if max_domain_score == max(weighted_scores.values()):
|
|
400
|
+
pass # Let it fall through to return the best pattern
|
|
401
|
+
else:
|
|
402
|
+
# Check if we have diverse high-coherence patterns
|
|
403
|
+
high_coherence_patterns = [
|
|
404
|
+
p
|
|
405
|
+
for p, s in raw_scores.items()
|
|
406
|
+
if s > 0.3 and self.COHERENCE_WEIGHTS.get(p, 1.0) >= 2.0
|
|
407
|
+
]
|
|
408
|
+
if len(high_coherence_patterns) >= 2:
|
|
409
|
+
return StructuralPattern.COMPLEX
|
|
410
|
+
else:
|
|
411
|
+
# No domain patterns, check for general complexity
|
|
412
|
+
high_coherence_patterns = [
|
|
413
|
+
p
|
|
414
|
+
for p, s in raw_scores.items()
|
|
415
|
+
if s > 0.3 and self.COHERENCE_WEIGHTS.get(p, 1.0) >= 2.0
|
|
416
|
+
]
|
|
417
|
+
if len(high_coherence_patterns) >= 2:
|
|
418
|
+
return StructuralPattern.COMPLEX
|
|
419
|
+
|
|
420
|
+
# Return best weighted match or UNKNOWN
|
|
421
|
+
if not weighted_scores:
|
|
422
|
+
return StructuralPattern.UNKNOWN
|
|
423
|
+
|
|
424
|
+
best_pattern = max(weighted_scores, key=weighted_scores.get)
|
|
425
|
+
return StructuralPattern[best_pattern]
|
|
426
|
+
|
|
427
|
+
def _score_pattern(
|
|
428
|
+
self, sequence: Sequence[str], criteria: dict[str, Any]
|
|
429
|
+
) -> float:
|
|
430
|
+
"""Score how well a sequence matches pattern criteria.
|
|
431
|
+
|
|
432
|
+
Returns
|
|
433
|
+
-------
|
|
434
|
+
float
|
|
435
|
+
Score from 0.0 (no match) to 1.0+ (perfect match).
|
|
436
|
+
"""
|
|
437
|
+
score = 0.0
|
|
438
|
+
|
|
439
|
+
# Check subsequences (highest weight)
|
|
440
|
+
if "subsequences" in criteria:
|
|
441
|
+
for subseq in criteria["subsequences"]:
|
|
442
|
+
if self._contains_subsequence(sequence, subseq):
|
|
443
|
+
# Score based on coverage: how much of sequence is in pattern
|
|
444
|
+
coverage = len(subseq) / len(sequence)
|
|
445
|
+
score += 0.8 * coverage
|
|
446
|
+
|
|
447
|
+
# Check required operators
|
|
448
|
+
if "requires" in criteria:
|
|
449
|
+
if all(op in sequence for op in criteria["requires"]):
|
|
450
|
+
score += 0.3
|
|
451
|
+
else:
|
|
452
|
+
return 0.0 # Hard requirement
|
|
453
|
+
|
|
454
|
+
# Check any-of requirements
|
|
455
|
+
if "requires_any" in criteria:
|
|
456
|
+
if any(op in sequence for op in criteria["requires_any"]):
|
|
457
|
+
score += 0.2
|
|
458
|
+
else:
|
|
459
|
+
return 0.0 # Hard requirement
|
|
460
|
+
|
|
461
|
+
# Check minimum counts
|
|
462
|
+
if "min_count" in criteria:
|
|
463
|
+
for op, min_val in criteria["min_count"].items():
|
|
464
|
+
if sequence.count(op) >= min_val:
|
|
465
|
+
score += 0.4
|
|
466
|
+
else:
|
|
467
|
+
return 0.0 # Hard requirement
|
|
468
|
+
|
|
469
|
+
# Check adjacent pairs
|
|
470
|
+
if "adjacent_pairs" in criteria:
|
|
471
|
+
for op1, op2 in criteria["adjacent_pairs"]:
|
|
472
|
+
if self._has_adjacent_pair(sequence, op1, op2):
|
|
473
|
+
score += 0.5
|
|
474
|
+
break
|
|
475
|
+
else:
|
|
476
|
+
return 0.0 # None found
|
|
477
|
+
|
|
478
|
+
# Check ending pairs
|
|
479
|
+
if "ending_pairs" in criteria and len(sequence) >= 2:
|
|
480
|
+
for op1, op2 in criteria["ending_pairs"]:
|
|
481
|
+
if sequence[-2] == op1 and sequence[-1] == op2:
|
|
482
|
+
score += 0.4
|
|
483
|
+
break
|
|
484
|
+
else:
|
|
485
|
+
return 0.0 # None found
|
|
486
|
+
|
|
487
|
+
# Check excludes
|
|
488
|
+
if "excludes" in criteria:
|
|
489
|
+
if any(op in sequence for op in criteria["excludes"]):
|
|
490
|
+
return 0.0 # Exclusion violated
|
|
491
|
+
|
|
492
|
+
# Check max length
|
|
493
|
+
if "max_length" in criteria:
|
|
494
|
+
if len(sequence) > criteria["max_length"]:
|
|
495
|
+
return 0.0 # Too long
|
|
496
|
+
|
|
497
|
+
# Add base score if pattern has min_score (e.g., for LINEAR)
|
|
498
|
+
if "min_score" in criteria:
|
|
499
|
+
score += criteria["min_score"]
|
|
500
|
+
|
|
501
|
+
return score
|
|
502
|
+
|
|
503
|
+
def analyze_sequence_composition(
|
|
504
|
+
self, sequence: Sequence[str]
|
|
505
|
+
) -> Mapping[str, Any]:
|
|
506
|
+
"""Perform comprehensive analysis of sequence structure.
|
|
507
|
+
|
|
508
|
+
Parameters
|
|
509
|
+
----------
|
|
510
|
+
sequence : Sequence[str]
|
|
511
|
+
Canonical operator names to analyze.
|
|
512
|
+
|
|
513
|
+
Returns
|
|
514
|
+
-------
|
|
515
|
+
Mapping[str, Any]
|
|
516
|
+
Analysis containing:
|
|
517
|
+
- primary_pattern: Best matching pattern (coherence-weighted)
|
|
518
|
+
- pattern_scores: Raw match scores for all patterns
|
|
519
|
+
- weighted_scores: Coherence-weighted scores for all patterns
|
|
520
|
+
- coherence_weights: The weight applied to each pattern
|
|
521
|
+
- components: List of identified sub-patterns
|
|
522
|
+
- complexity_score: Measure of sequence complexity (0.0-1.0)
|
|
523
|
+
- domain_suitability: Scores for different application domains
|
|
524
|
+
- structural_health: Coherence and stability metrics
|
|
525
|
+
"""
|
|
526
|
+
primary = self.detect_pattern(sequence)
|
|
527
|
+
|
|
528
|
+
# Get raw and weighted scores for all patterns
|
|
529
|
+
pattern_scores = {}
|
|
530
|
+
weighted_scores = {}
|
|
531
|
+
for pattern_name, criteria in self._patterns.items():
|
|
532
|
+
raw_score = self._score_pattern(sequence, criteria)
|
|
533
|
+
if raw_score > 0:
|
|
534
|
+
pattern_scores[pattern_name] = raw_score
|
|
535
|
+
weight = self.COHERENCE_WEIGHTS.get(pattern_name, 1.0)
|
|
536
|
+
weighted_scores[pattern_name] = raw_score * weight
|
|
537
|
+
|
|
538
|
+
components = self._identify_components(sequence)
|
|
539
|
+
|
|
540
|
+
return {
|
|
541
|
+
"primary_pattern": primary.value,
|
|
542
|
+
"pattern_scores": pattern_scores,
|
|
543
|
+
"weighted_scores": weighted_scores,
|
|
544
|
+
"coherence_weights": dict(self.COHERENCE_WEIGHTS),
|
|
545
|
+
"components": components,
|
|
546
|
+
"complexity_score": self._calculate_complexity(sequence),
|
|
547
|
+
"domain_suitability": self._assess_domain_fit(sequence),
|
|
548
|
+
"structural_health": self._calculate_health_metrics(sequence),
|
|
549
|
+
}
|
|
550
|
+
|
|
551
|
+
# Helper methods (keep existing implementations) --------------------------
|
|
552
|
+
|
|
553
|
+
def _contains_subsequence(
|
|
554
|
+
self, sequence: Sequence[str], pattern: Sequence[str]
|
|
555
|
+
) -> bool:
|
|
556
|
+
"""Check if pattern exists as a subsequence within sequence."""
|
|
557
|
+
if len(pattern) > len(sequence):
|
|
558
|
+
return False
|
|
559
|
+
|
|
560
|
+
pattern_len = len(pattern)
|
|
561
|
+
for i in range(len(sequence) - pattern_len + 1):
|
|
562
|
+
if all(sequence[i + j] == pattern[j] for j in range(pattern_len)):
|
|
563
|
+
return True
|
|
564
|
+
return False
|
|
565
|
+
|
|
566
|
+
def _has_adjacent_pair(self, sequence: Sequence[str], op1: str, op2: str) -> bool:
|
|
567
|
+
"""Check if op1 is immediately followed by op2."""
|
|
568
|
+
for i in range(len(sequence) - 1):
|
|
569
|
+
if sequence[i] == op1 and sequence[i + 1] == op2:
|
|
570
|
+
return True
|
|
571
|
+
return False
|
|
572
|
+
|
|
573
|
+
def _identify_components(self, seq: Sequence[str]) -> list[str]:
|
|
574
|
+
"""Identify all pattern components present in the sequence.
|
|
575
|
+
|
|
576
|
+
Components are identified based on partial matches OR presence of
|
|
577
|
+
characteristic subsequences, even if the full pattern doesn't match.
|
|
578
|
+
"""
|
|
579
|
+
components = []
|
|
580
|
+
|
|
581
|
+
# Check each pattern for partial matches (score > 0 but not dominant)
|
|
582
|
+
for pattern_name, criteria in self._patterns.items():
|
|
583
|
+
score = self._score_pattern(seq, criteria)
|
|
584
|
+
if 0 < score < 0.8: # Partial match
|
|
585
|
+
components.append(pattern_name.lower())
|
|
586
|
+
|
|
587
|
+
# Also check for characteristic subsequences even if pattern doesn't fully match
|
|
588
|
+
# (e.g., BOOTSTRAP subsequence in a long sequence)
|
|
589
|
+
bootstrap_seq = [EMISSION, COUPLING, COHERENCE]
|
|
590
|
+
if (
|
|
591
|
+
self._contains_subsequence(seq, bootstrap_seq)
|
|
592
|
+
and "bootstrap" not in components
|
|
593
|
+
):
|
|
594
|
+
components.append("bootstrap")
|
|
595
|
+
|
|
596
|
+
explore_seq = [DISSONANCE, MUTATION, COHERENCE]
|
|
597
|
+
if self._contains_subsequence(seq, explore_seq) and "explore" not in components:
|
|
598
|
+
components.append("explore")
|
|
599
|
+
|
|
600
|
+
# Add structural indicators
|
|
601
|
+
if DISSONANCE in seq:
|
|
602
|
+
components.append("crisis")
|
|
603
|
+
if SELF_ORGANIZATION in seq:
|
|
604
|
+
components.append("reorganization")
|
|
605
|
+
if MUTATION in seq:
|
|
606
|
+
components.append("transformation")
|
|
607
|
+
if seq.count(TRANSITION) >= 2:
|
|
608
|
+
components.append("cyclic_navigation")
|
|
609
|
+
|
|
610
|
+
return components
|
|
611
|
+
|
|
612
|
+
def _calculate_complexity(self, seq: Sequence[str]) -> float:
|
|
613
|
+
"""Calculate complexity score based on sequence characteristics."""
|
|
614
|
+
if not seq:
|
|
615
|
+
return 0.0
|
|
616
|
+
|
|
617
|
+
# Factors contributing to complexity
|
|
618
|
+
length_factor = min(len(seq) / 15.0, 1.0)
|
|
619
|
+
|
|
620
|
+
unique_count = len(set(seq))
|
|
621
|
+
diversity_factor = unique_count / len(seq)
|
|
622
|
+
|
|
623
|
+
complex_ops = {DISSONANCE, MUTATION, SELF_ORGANIZATION, TRANSITION}
|
|
624
|
+
complex_count = sum(1 for op in seq if op in complex_ops)
|
|
625
|
+
complexity_factor = min(complex_count / 5.0, 1.0)
|
|
626
|
+
|
|
627
|
+
return 0.3 * length_factor + 0.3 * diversity_factor + 0.4 * complexity_factor
|
|
628
|
+
|
|
629
|
+
def _assess_domain_fit(self, seq: Sequence[str]) -> Mapping[str, float]:
|
|
630
|
+
"""Assess suitability for different application domains."""
|
|
631
|
+
scores: dict[str, float] = {}
|
|
632
|
+
|
|
633
|
+
# Score based on domain-specific pattern matches
|
|
634
|
+
domain_patterns = {
|
|
635
|
+
"therapeutic": "THERAPEUTIC",
|
|
636
|
+
"educational": "EDUCATIONAL",
|
|
637
|
+
"organizational": "ORGANIZATIONAL",
|
|
638
|
+
"creative": "CREATIVE",
|
|
639
|
+
"regenerative": "REGENERATIVE",
|
|
640
|
+
"basic_learning": "BASIC_LEARNING",
|
|
641
|
+
"deep_learning": "DEEP_LEARNING",
|
|
642
|
+
"exploratory_learning": "EXPLORATORY_LEARNING",
|
|
643
|
+
"consolidation_cycle": "CONSOLIDATION_CYCLE",
|
|
644
|
+
"adaptive_mutation": "ADAPTIVE_MUTATION",
|
|
645
|
+
}
|
|
646
|
+
|
|
647
|
+
for domain, pattern_name in domain_patterns.items():
|
|
648
|
+
if pattern_name in self._patterns:
|
|
649
|
+
score = self._score_pattern(seq, self._patterns[pattern_name])
|
|
650
|
+
scores[domain] = min(score, 1.0)
|
|
651
|
+
|
|
652
|
+
return scores
|
|
653
|
+
|
|
654
|
+
def _calculate_health_metrics(self, seq: Sequence[str]) -> Mapping[str, Any]:
|
|
655
|
+
"""Calculate structural health indicators."""
|
|
656
|
+
stabilizers = sum(1 for op in seq if op in {COHERENCE, SILENCE, RESONANCE})
|
|
657
|
+
destabilizers = sum(1 for op in seq if op in {DISSONANCE, MUTATION, EXPANSION})
|
|
658
|
+
|
|
659
|
+
total = len(seq)
|
|
660
|
+
balance = (stabilizers - destabilizers) / total if total > 0 else 0.0
|
|
661
|
+
|
|
662
|
+
return {
|
|
663
|
+
"stabilizer_count": stabilizers,
|
|
664
|
+
"destabilizer_count": destabilizers,
|
|
665
|
+
"balance": balance,
|
|
666
|
+
"has_closure": (
|
|
667
|
+
seq[-1] in {SILENCE, TRANSITION, RECURSIVITY} if seq else False
|
|
668
|
+
),
|
|
669
|
+
}
|