tnfr 4.5.2__py3-none-any.whl → 7.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tnfr might be problematic. Click here for more details.
- tnfr/__init__.py +275 -51
- tnfr/__init__.pyi +33 -0
- tnfr/_compat.py +10 -0
- tnfr/_generated_version.py +34 -0
- tnfr/_version.py +49 -0
- tnfr/_version.pyi +7 -0
- tnfr/alias.py +117 -31
- tnfr/alias.pyi +108 -0
- tnfr/cache.py +6 -572
- tnfr/cache.pyi +16 -0
- tnfr/callback_utils.py +16 -38
- tnfr/callback_utils.pyi +79 -0
- tnfr/cli/__init__.py +34 -14
- tnfr/cli/__init__.pyi +26 -0
- tnfr/cli/arguments.py +211 -28
- tnfr/cli/arguments.pyi +27 -0
- tnfr/cli/execution.py +470 -50
- tnfr/cli/execution.pyi +70 -0
- tnfr/cli/utils.py +18 -3
- tnfr/cli/utils.pyi +8 -0
- tnfr/config/__init__.py +13 -0
- tnfr/config/__init__.pyi +10 -0
- tnfr/{constants_glyphs.py → config/constants.py} +26 -20
- tnfr/config/constants.pyi +12 -0
- tnfr/config/feature_flags.py +83 -0
- tnfr/{config.py → config/init.py} +11 -7
- tnfr/config/init.pyi +8 -0
- tnfr/config/operator_names.py +93 -0
- tnfr/config/operator_names.pyi +28 -0
- tnfr/config/presets.py +84 -0
- tnfr/config/presets.pyi +7 -0
- tnfr/constants/__init__.py +80 -29
- tnfr/constants/__init__.pyi +92 -0
- tnfr/constants/aliases.py +31 -0
- tnfr/constants/core.py +4 -4
- tnfr/constants/core.pyi +17 -0
- tnfr/constants/init.py +1 -1
- tnfr/constants/init.pyi +12 -0
- tnfr/constants/metric.py +7 -15
- tnfr/constants/metric.pyi +19 -0
- tnfr/dynamics/__init__.py +165 -633
- tnfr/dynamics/__init__.pyi +82 -0
- tnfr/dynamics/adaptation.py +267 -0
- tnfr/dynamics/aliases.py +23 -0
- tnfr/dynamics/coordination.py +385 -0
- tnfr/dynamics/dnfr.py +2283 -400
- tnfr/dynamics/dnfr.pyi +24 -0
- tnfr/dynamics/integrators.py +406 -98
- tnfr/dynamics/integrators.pyi +34 -0
- tnfr/dynamics/runtime.py +881 -0
- tnfr/dynamics/sampling.py +10 -5
- tnfr/dynamics/sampling.pyi +7 -0
- tnfr/dynamics/selectors.py +719 -0
- tnfr/execution.py +70 -48
- tnfr/execution.pyi +45 -0
- tnfr/flatten.py +13 -9
- tnfr/flatten.pyi +21 -0
- tnfr/gamma.py +66 -53
- tnfr/gamma.pyi +34 -0
- tnfr/glyph_history.py +110 -52
- tnfr/glyph_history.pyi +35 -0
- tnfr/glyph_runtime.py +16 -0
- tnfr/glyph_runtime.pyi +9 -0
- tnfr/immutable.py +69 -28
- tnfr/immutable.pyi +34 -0
- tnfr/initialization.py +16 -16
- tnfr/initialization.pyi +65 -0
- tnfr/io.py +6 -240
- tnfr/io.pyi +16 -0
- tnfr/locking.pyi +7 -0
- tnfr/mathematics/__init__.py +81 -0
- tnfr/mathematics/backend.py +426 -0
- tnfr/mathematics/dynamics.py +398 -0
- tnfr/mathematics/epi.py +254 -0
- tnfr/mathematics/generators.py +222 -0
- tnfr/mathematics/metrics.py +119 -0
- tnfr/mathematics/operators.py +233 -0
- tnfr/mathematics/operators_factory.py +71 -0
- tnfr/mathematics/projection.py +78 -0
- tnfr/mathematics/runtime.py +173 -0
- tnfr/mathematics/spaces.py +247 -0
- tnfr/mathematics/transforms.py +292 -0
- tnfr/metrics/__init__.py +10 -10
- tnfr/metrics/__init__.pyi +20 -0
- tnfr/metrics/coherence.py +993 -324
- tnfr/metrics/common.py +23 -16
- tnfr/metrics/common.pyi +46 -0
- tnfr/metrics/core.py +251 -35
- tnfr/metrics/core.pyi +13 -0
- tnfr/metrics/diagnosis.py +708 -111
- tnfr/metrics/diagnosis.pyi +85 -0
- tnfr/metrics/export.py +27 -15
- tnfr/metrics/glyph_timing.py +232 -42
- tnfr/metrics/reporting.py +33 -22
- tnfr/metrics/reporting.pyi +12 -0
- tnfr/metrics/sense_index.py +987 -43
- tnfr/metrics/sense_index.pyi +9 -0
- tnfr/metrics/trig.py +214 -23
- tnfr/metrics/trig.pyi +13 -0
- tnfr/metrics/trig_cache.py +115 -22
- tnfr/metrics/trig_cache.pyi +10 -0
- tnfr/node.py +542 -136
- tnfr/node.pyi +178 -0
- tnfr/observers.py +152 -35
- tnfr/observers.pyi +31 -0
- tnfr/ontosim.py +23 -19
- tnfr/ontosim.pyi +28 -0
- tnfr/operators/__init__.py +601 -82
- tnfr/operators/__init__.pyi +45 -0
- tnfr/operators/definitions.py +513 -0
- tnfr/operators/definitions.pyi +78 -0
- tnfr/operators/grammar.py +760 -0
- tnfr/operators/jitter.py +107 -38
- tnfr/operators/jitter.pyi +11 -0
- tnfr/operators/registry.py +75 -0
- tnfr/operators/registry.pyi +13 -0
- tnfr/operators/remesh.py +149 -88
- tnfr/py.typed +0 -0
- tnfr/rng.py +46 -143
- tnfr/rng.pyi +14 -0
- tnfr/schemas/__init__.py +8 -0
- tnfr/schemas/grammar.json +94 -0
- tnfr/selector.py +25 -19
- tnfr/selector.pyi +19 -0
- tnfr/sense.py +72 -62
- tnfr/sense.pyi +23 -0
- tnfr/structural.py +522 -262
- tnfr/structural.pyi +69 -0
- tnfr/telemetry/__init__.py +35 -0
- tnfr/telemetry/cache_metrics.py +226 -0
- tnfr/telemetry/nu_f.py +423 -0
- tnfr/telemetry/nu_f.pyi +123 -0
- tnfr/telemetry/verbosity.py +37 -0
- tnfr/tokens.py +1 -3
- tnfr/tokens.pyi +36 -0
- tnfr/trace.py +270 -113
- tnfr/trace.pyi +40 -0
- tnfr/types.py +574 -6
- tnfr/types.pyi +331 -0
- tnfr/units.py +69 -0
- tnfr/units.pyi +16 -0
- tnfr/utils/__init__.py +217 -0
- tnfr/utils/__init__.pyi +202 -0
- tnfr/utils/cache.py +2395 -0
- tnfr/utils/cache.pyi +468 -0
- tnfr/utils/chunks.py +104 -0
- tnfr/utils/chunks.pyi +21 -0
- tnfr/{collections_utils.py → utils/data.py} +147 -90
- tnfr/utils/data.pyi +64 -0
- tnfr/utils/graph.py +85 -0
- tnfr/utils/graph.pyi +10 -0
- tnfr/utils/init.py +770 -0
- tnfr/utils/init.pyi +78 -0
- tnfr/utils/io.py +456 -0
- tnfr/{helpers → utils}/numeric.py +51 -24
- tnfr/utils/numeric.pyi +21 -0
- tnfr/validation/__init__.py +113 -0
- tnfr/validation/__init__.pyi +77 -0
- tnfr/validation/compatibility.py +95 -0
- tnfr/validation/compatibility.pyi +6 -0
- tnfr/validation/grammar.py +71 -0
- tnfr/validation/grammar.pyi +40 -0
- tnfr/validation/graph.py +138 -0
- tnfr/validation/graph.pyi +17 -0
- tnfr/validation/rules.py +281 -0
- tnfr/validation/rules.pyi +55 -0
- tnfr/validation/runtime.py +263 -0
- tnfr/validation/runtime.pyi +31 -0
- tnfr/validation/soft_filters.py +170 -0
- tnfr/validation/soft_filters.pyi +37 -0
- tnfr/validation/spectral.py +159 -0
- tnfr/validation/spectral.pyi +46 -0
- tnfr/validation/syntax.py +40 -0
- tnfr/validation/syntax.pyi +10 -0
- tnfr/validation/window.py +39 -0
- tnfr/validation/window.pyi +1 -0
- tnfr/viz/__init__.py +9 -0
- tnfr/viz/matplotlib.py +246 -0
- tnfr-7.0.0.dist-info/METADATA +179 -0
- tnfr-7.0.0.dist-info/RECORD +185 -0
- {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/licenses/LICENSE.md +1 -1
- tnfr/grammar.py +0 -344
- tnfr/graph_utils.py +0 -84
- tnfr/helpers/__init__.py +0 -71
- tnfr/import_utils.py +0 -228
- tnfr/json_utils.py +0 -162
- tnfr/logging_utils.py +0 -116
- tnfr/presets.py +0 -60
- tnfr/validators.py +0 -84
- tnfr/value_utils.py +0 -59
- tnfr-4.5.2.dist-info/METADATA +0 -379
- tnfr-4.5.2.dist-info/RECORD +0 -67
- {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/WHEEL +0 -0
- {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/entry_points.txt +0 -0
- {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/top_level.txt +0 -0
tnfr/cli/execution.py
CHANGED
|
@@ -1,48 +1,76 @@
|
|
|
1
|
+
"""CLI execution helpers for running canonical TNFR programs."""
|
|
2
|
+
|
|
1
3
|
from __future__ import annotations
|
|
2
4
|
|
|
3
5
|
import argparse
|
|
4
|
-
|
|
6
|
+
from collections import deque
|
|
7
|
+
from collections.abc import Mapping
|
|
8
|
+
from copy import deepcopy
|
|
9
|
+
from importlib import import_module
|
|
5
10
|
from pathlib import Path
|
|
6
|
-
from typing import Any, Optional
|
|
11
|
+
from typing import Any, Optional, Sequence
|
|
7
12
|
|
|
8
|
-
import networkx as nx
|
|
13
|
+
import networkx as nx
|
|
14
|
+
import numpy as np
|
|
9
15
|
|
|
10
|
-
from ..
|
|
11
|
-
from ..
|
|
16
|
+
from ..config import apply_config
|
|
17
|
+
from ..config.presets import (
|
|
18
|
+
PREFERRED_PRESET_NAMES,
|
|
19
|
+
get_preset,
|
|
20
|
+
)
|
|
21
|
+
from ..alias import get_attr
|
|
22
|
+
from ..constants import METRIC_DEFAULTS, VF_PRIMARY, get_aliases, get_param
|
|
23
|
+
from ..dynamics import default_glyph_selector, parametric_glyph_selector, run
|
|
24
|
+
from ..execution import CANONICAL_PRESET_NAME, play
|
|
25
|
+
from ..flatten import parse_program_tokens
|
|
26
|
+
from ..glyph_history import ensure_history
|
|
27
|
+
from ..mathematics import (
|
|
28
|
+
BasicStateProjector,
|
|
29
|
+
CoherenceOperator,
|
|
30
|
+
FrequencyOperator,
|
|
31
|
+
HilbertSpace,
|
|
32
|
+
MathematicalDynamicsEngine,
|
|
33
|
+
NFRValidator,
|
|
34
|
+
make_coherence_operator,
|
|
35
|
+
make_frequency_operator,
|
|
36
|
+
)
|
|
12
37
|
from ..metrics import (
|
|
13
|
-
register_metrics_callbacks,
|
|
14
|
-
glyph_top,
|
|
15
|
-
export_metrics,
|
|
16
38
|
build_metrics_summary,
|
|
39
|
+
export_metrics,
|
|
40
|
+
glyph_top,
|
|
41
|
+
register_metrics_callbacks,
|
|
17
42
|
)
|
|
18
43
|
from ..metrics.core import _metrics_step
|
|
44
|
+
from ..ontosim import prepare_network
|
|
45
|
+
from ..sense import register_sigma_callback
|
|
19
46
|
from ..trace import register_trace
|
|
20
|
-
from ..
|
|
21
|
-
from ..
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
47
|
+
from ..types import ProgramTokens
|
|
48
|
+
from ..utils import (
|
|
49
|
+
StructuredFileError,
|
|
50
|
+
clamp01,
|
|
51
|
+
get_logger,
|
|
52
|
+
json_dumps,
|
|
53
|
+
read_structured_file,
|
|
54
|
+
safe_write,
|
|
26
55
|
)
|
|
27
|
-
from ..presets import get_preset
|
|
28
|
-
from ..config import apply_config
|
|
29
|
-
from ..io import read_structured_file, safe_write, StructuredFileError
|
|
30
|
-
from ..glyph_history import ensure_history
|
|
31
|
-
from ..ontosim import preparar_red
|
|
32
|
-
from ..logging_utils import get_logger
|
|
33
|
-
from ..types import Glyph
|
|
34
|
-
from ..json_utils import json_dumps
|
|
35
|
-
from ..flatten import parse_program_tokens
|
|
36
|
-
|
|
37
56
|
from .arguments import _args_to_dict
|
|
57
|
+
from .utils import _parse_cli_variants
|
|
58
|
+
from ..validation import validate_canon
|
|
38
59
|
|
|
39
60
|
logger = get_logger(__name__)
|
|
40
61
|
|
|
62
|
+
_VF_ALIASES = get_aliases("VF")
|
|
63
|
+
VF_ALIAS_KEYS: tuple[str, ...] = (VF_PRIMARY,) + tuple(
|
|
64
|
+
alias for alias in _VF_ALIASES if alias != VF_PRIMARY
|
|
65
|
+
)
|
|
66
|
+
|
|
41
67
|
|
|
42
68
|
# CLI summaries should remain concise by default while allowing callers to
|
|
43
69
|
# inspect the full glyphogram series when needed.
|
|
44
70
|
DEFAULT_SUMMARY_SERIES_LIMIT = 10
|
|
45
71
|
|
|
72
|
+
_PREFERRED_PRESETS_DISPLAY = ", ".join(PREFERRED_PRESET_NAMES)
|
|
73
|
+
|
|
46
74
|
|
|
47
75
|
def _save_json(path: str, data: Any) -> None:
|
|
48
76
|
payload = json_dumps(data, ensure_ascii=False, indent=2, default=list)
|
|
@@ -53,11 +81,17 @@ def _attach_callbacks(G: "nx.Graph") -> None:
|
|
|
53
81
|
register_sigma_callback(G)
|
|
54
82
|
register_metrics_callbacks(G)
|
|
55
83
|
register_trace(G)
|
|
84
|
+
history = ensure_history(G)
|
|
85
|
+
maxlen = int(get_param(G, "PROGRAM_TRACE_MAXLEN"))
|
|
86
|
+
history.setdefault("program_trace", deque(maxlen=maxlen))
|
|
87
|
+
history.setdefault("trace_meta", [])
|
|
56
88
|
_metrics_step(G, ctx=None)
|
|
57
89
|
|
|
58
90
|
|
|
59
91
|
def _persist_history(G: "nx.Graph", args: argparse.Namespace) -> None:
|
|
60
|
-
if getattr(args, "save_history", None) or getattr(
|
|
92
|
+
if getattr(args, "save_history", None) or getattr(
|
|
93
|
+
args, "export_history_base", None
|
|
94
|
+
):
|
|
61
95
|
history = ensure_history(G)
|
|
62
96
|
if getattr(args, "save_history", None):
|
|
63
97
|
_save_json(args.save_history, history)
|
|
@@ -65,7 +99,155 @@ def _persist_history(G: "nx.Graph", args: argparse.Namespace) -> None:
|
|
|
65
99
|
export_metrics(G, args.export_history_base, fmt=args.export_format)
|
|
66
100
|
|
|
67
101
|
|
|
102
|
+
def _to_float_array(values: Sequence[float] | None, *, name: str) -> np.ndarray | None:
|
|
103
|
+
if values is None:
|
|
104
|
+
return None
|
|
105
|
+
array = np.asarray(list(values), dtype=float)
|
|
106
|
+
if array.ndim != 1:
|
|
107
|
+
raise ValueError(f"{name} must be a one-dimensional sequence of numbers")
|
|
108
|
+
return array
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def _resolve_math_dimension(
|
|
112
|
+
args: argparse.Namespace, fallback: int
|
|
113
|
+
) -> int:
|
|
114
|
+
dimension = getattr(args, "math_dimension", None)
|
|
115
|
+
candidate_lengths: list[int] = []
|
|
116
|
+
for attr in (
|
|
117
|
+
"math_coherence_spectrum",
|
|
118
|
+
"math_frequency_diagonal",
|
|
119
|
+
"math_generator_diagonal",
|
|
120
|
+
):
|
|
121
|
+
seq = getattr(args, attr, None)
|
|
122
|
+
if seq is not None:
|
|
123
|
+
candidate_lengths.append(len(seq))
|
|
124
|
+
if dimension is None:
|
|
125
|
+
if candidate_lengths:
|
|
126
|
+
unique = set(candidate_lengths)
|
|
127
|
+
if len(unique) > 1:
|
|
128
|
+
raise ValueError(
|
|
129
|
+
"Math engine configuration requires matching sequence lengths"
|
|
130
|
+
)
|
|
131
|
+
dimension = unique.pop()
|
|
132
|
+
else:
|
|
133
|
+
dimension = fallback
|
|
134
|
+
else:
|
|
135
|
+
for length in candidate_lengths:
|
|
136
|
+
if length != dimension:
|
|
137
|
+
raise ValueError(
|
|
138
|
+
"Math engine sequence lengths must match the requested dimension"
|
|
139
|
+
)
|
|
140
|
+
if dimension is None or dimension <= 0:
|
|
141
|
+
raise ValueError("Hilbert space dimension must be a positive integer")
|
|
142
|
+
return int(dimension)
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def _build_math_engine_config(
|
|
146
|
+
G: "nx.Graph", args: argparse.Namespace
|
|
147
|
+
) -> dict[str, Any]:
|
|
148
|
+
node_count = G.number_of_nodes() if hasattr(G, "number_of_nodes") else len(list(G.nodes))
|
|
149
|
+
fallback_dim = max(1, int(node_count) if node_count is not None else 1)
|
|
150
|
+
dimension = _resolve_math_dimension(args, fallback=fallback_dim)
|
|
151
|
+
|
|
152
|
+
coherence_spectrum = _to_float_array(
|
|
153
|
+
getattr(args, "math_coherence_spectrum", None),
|
|
154
|
+
name="--math-coherence-spectrum",
|
|
155
|
+
)
|
|
156
|
+
if coherence_spectrum is not None and coherence_spectrum.size != dimension:
|
|
157
|
+
raise ValueError("Coherence spectrum length must equal the Hilbert dimension")
|
|
158
|
+
|
|
159
|
+
frequency_diagonal = _to_float_array(
|
|
160
|
+
getattr(args, "math_frequency_diagonal", None),
|
|
161
|
+
name="--math-frequency-diagonal",
|
|
162
|
+
)
|
|
163
|
+
if frequency_diagonal is not None and frequency_diagonal.size != dimension:
|
|
164
|
+
raise ValueError("Frequency diagonal length must equal the Hilbert dimension")
|
|
165
|
+
|
|
166
|
+
generator_diagonal = _to_float_array(
|
|
167
|
+
getattr(args, "math_generator_diagonal", None),
|
|
168
|
+
name="--math-generator-diagonal",
|
|
169
|
+
)
|
|
170
|
+
if generator_diagonal is not None and generator_diagonal.size != dimension:
|
|
171
|
+
raise ValueError("Generator diagonal length must equal the Hilbert dimension")
|
|
172
|
+
|
|
173
|
+
coherence_c_min = getattr(args, "math_coherence_c_min", None)
|
|
174
|
+
if coherence_spectrum is None:
|
|
175
|
+
coherence_operator = make_coherence_operator(
|
|
176
|
+
dimension,
|
|
177
|
+
c_min=float(coherence_c_min) if coherence_c_min is not None else 0.1,
|
|
178
|
+
)
|
|
179
|
+
else:
|
|
180
|
+
if coherence_c_min is not None:
|
|
181
|
+
coherence_operator = CoherenceOperator(
|
|
182
|
+
coherence_spectrum, c_min=float(coherence_c_min)
|
|
183
|
+
)
|
|
184
|
+
else:
|
|
185
|
+
coherence_operator = CoherenceOperator(coherence_spectrum)
|
|
186
|
+
if not coherence_operator.is_positive_semidefinite():
|
|
187
|
+
raise ValueError("Coherence spectrum must be positive semidefinite")
|
|
188
|
+
|
|
189
|
+
frequency_matrix: np.ndarray
|
|
190
|
+
if frequency_diagonal is None:
|
|
191
|
+
frequency_matrix = np.eye(dimension, dtype=float)
|
|
192
|
+
else:
|
|
193
|
+
frequency_matrix = np.diag(frequency_diagonal)
|
|
194
|
+
frequency_operator = make_frequency_operator(frequency_matrix)
|
|
195
|
+
|
|
196
|
+
generator_matrix: np.ndarray
|
|
197
|
+
if generator_diagonal is None:
|
|
198
|
+
generator_matrix = np.zeros((dimension, dimension), dtype=float)
|
|
199
|
+
else:
|
|
200
|
+
generator_matrix = np.diag(generator_diagonal)
|
|
201
|
+
|
|
202
|
+
hilbert_space = HilbertSpace(dimension)
|
|
203
|
+
dynamics_engine = MathematicalDynamicsEngine(
|
|
204
|
+
generator_matrix,
|
|
205
|
+
hilbert_space=hilbert_space,
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
coherence_threshold = getattr(args, "math_coherence_threshold", None)
|
|
209
|
+
if coherence_threshold is None:
|
|
210
|
+
coherence_threshold = float(coherence_operator.c_min)
|
|
211
|
+
else:
|
|
212
|
+
coherence_threshold = float(coherence_threshold)
|
|
213
|
+
|
|
214
|
+
state_projector = BasicStateProjector()
|
|
215
|
+
validator = NFRValidator(
|
|
216
|
+
hilbert_space,
|
|
217
|
+
coherence_operator,
|
|
218
|
+
coherence_threshold,
|
|
219
|
+
frequency_operator=frequency_operator,
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
return {
|
|
223
|
+
"enabled": True,
|
|
224
|
+
"dimension": dimension,
|
|
225
|
+
"hilbert_space": hilbert_space,
|
|
226
|
+
"coherence_operator": coherence_operator,
|
|
227
|
+
"frequency_operator": frequency_operator,
|
|
228
|
+
"coherence_threshold": coherence_threshold,
|
|
229
|
+
"state_projector": state_projector,
|
|
230
|
+
"validator": validator,
|
|
231
|
+
"dynamics_engine": dynamics_engine,
|
|
232
|
+
"generator_matrix": generator_matrix,
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
def _configure_math_engine(G: "nx.Graph", args: argparse.Namespace) -> None:
|
|
237
|
+
if not getattr(args, "math_engine", False):
|
|
238
|
+
G.graph.pop("MATH_ENGINE", None)
|
|
239
|
+
return
|
|
240
|
+
try:
|
|
241
|
+
config = _build_math_engine_config(G, args)
|
|
242
|
+
except ValueError as exc:
|
|
243
|
+
logger.error("Math engine configuration error: %s", exc)
|
|
244
|
+
raise SystemExit(1) from exc
|
|
245
|
+
G.graph["MATH_ENGINE"] = config
|
|
246
|
+
|
|
247
|
+
|
|
68
248
|
def build_basic_graph(args: argparse.Namespace) -> "nx.Graph":
|
|
249
|
+
"""Construct the base graph topology described by CLI ``args``."""
|
|
250
|
+
|
|
69
251
|
n = args.nodes
|
|
70
252
|
topology = getattr(args, "topology", "ring").lower()
|
|
71
253
|
seed = getattr(args, "seed", None)
|
|
@@ -81,7 +263,7 @@ def build_basic_graph(args: argparse.Namespace) -> "nx.Graph":
|
|
|
81
263
|
fallback = 0.0
|
|
82
264
|
else:
|
|
83
265
|
fallback = 3.0 / n
|
|
84
|
-
prob =
|
|
266
|
+
prob = clamp01(fallback)
|
|
85
267
|
if not 0.0 <= prob <= 1.0:
|
|
86
268
|
raise ValueError(f"p must be between 0 and 1; received {prob}")
|
|
87
269
|
G = nx.gnp_random_graph(n, prob, seed=seed)
|
|
@@ -95,8 +277,14 @@ def build_basic_graph(args: argparse.Namespace) -> "nx.Graph":
|
|
|
95
277
|
|
|
96
278
|
|
|
97
279
|
def apply_cli_config(G: "nx.Graph", args: argparse.Namespace) -> None:
|
|
280
|
+
"""Apply CLI overrides from ``args`` to graph-level configuration."""
|
|
281
|
+
|
|
98
282
|
if args.config:
|
|
99
|
-
|
|
283
|
+
try:
|
|
284
|
+
apply_config(G, Path(args.config))
|
|
285
|
+
except (StructuredFileError, ValueError) as exc:
|
|
286
|
+
logger.error("%s", exc)
|
|
287
|
+
raise SystemExit(1) from exc
|
|
100
288
|
arg_map = {
|
|
101
289
|
"dt": ("DT", float),
|
|
102
290
|
"integrator": ("INTEGRATOR_METHOD", str),
|
|
@@ -108,8 +296,18 @@ def apply_cli_config(G: "nx.Graph", args: argparse.Namespace) -> None:
|
|
|
108
296
|
if val is not None:
|
|
109
297
|
G.graph[key] = conv(val)
|
|
110
298
|
|
|
299
|
+
base_gcanon: dict[str, Any]
|
|
300
|
+
existing_gcanon = G.graph.get("GRAMMAR_CANON")
|
|
301
|
+
if isinstance(existing_gcanon, Mapping):
|
|
302
|
+
base_gcanon = {
|
|
303
|
+
**METRIC_DEFAULTS["GRAMMAR_CANON"],
|
|
304
|
+
**dict(existing_gcanon),
|
|
305
|
+
}
|
|
306
|
+
else:
|
|
307
|
+
base_gcanon = dict(METRIC_DEFAULTS["GRAMMAR_CANON"])
|
|
308
|
+
|
|
111
309
|
gcanon = {
|
|
112
|
-
**
|
|
310
|
+
**base_gcanon,
|
|
113
311
|
**_args_to_dict(args, prefix="grammar_"),
|
|
114
312
|
}
|
|
115
313
|
if getattr(args, "grammar_canon", None) is not None:
|
|
@@ -122,9 +320,7 @@ def apply_cli_config(G: "nx.Graph", args: argparse.Namespace) -> None:
|
|
|
122
320
|
"basic": default_glyph_selector,
|
|
123
321
|
"param": parametric_glyph_selector,
|
|
124
322
|
}
|
|
125
|
-
G.graph["glyph_selector"] = sel_map.get(
|
|
126
|
-
selector, default_glyph_selector
|
|
127
|
-
)
|
|
323
|
+
G.graph["glyph_selector"] = sel_map.get(selector, default_glyph_selector)
|
|
128
324
|
|
|
129
325
|
if hasattr(args, "gamma_type"):
|
|
130
326
|
G.graph["GAMMA"] = {
|
|
@@ -133,8 +329,41 @@ def apply_cli_config(G: "nx.Graph", args: argparse.Namespace) -> None:
|
|
|
133
329
|
"R0": args.gamma_R0,
|
|
134
330
|
}
|
|
135
331
|
|
|
332
|
+
for attr, key in (
|
|
333
|
+
("trace_verbosity", "TRACE"),
|
|
334
|
+
("metrics_verbosity", "METRICS"),
|
|
335
|
+
):
|
|
336
|
+
cfg = G.graph.get(key)
|
|
337
|
+
if not isinstance(cfg, dict):
|
|
338
|
+
cfg = deepcopy(METRIC_DEFAULTS[key])
|
|
339
|
+
G.graph[key] = cfg
|
|
340
|
+
value = getattr(args, attr, None)
|
|
341
|
+
if value is not None:
|
|
342
|
+
cfg["verbosity"] = value
|
|
343
|
+
|
|
344
|
+
candidate_count = getattr(args, "um_candidate_count", None)
|
|
345
|
+
if candidate_count is not None:
|
|
346
|
+
G.graph["UM_CANDIDATE_COUNT"] = int(candidate_count)
|
|
347
|
+
|
|
348
|
+
stop_window = getattr(args, "stop_early_window", None)
|
|
349
|
+
stop_fraction = getattr(args, "stop_early_fraction", None)
|
|
350
|
+
if stop_window is not None or stop_fraction is not None:
|
|
351
|
+
stop_cfg = G.graph.get("STOP_EARLY")
|
|
352
|
+
if isinstance(stop_cfg, Mapping):
|
|
353
|
+
next_cfg = {**stop_cfg}
|
|
354
|
+
else:
|
|
355
|
+
next_cfg = deepcopy(METRIC_DEFAULTS["STOP_EARLY"])
|
|
356
|
+
if stop_window is not None:
|
|
357
|
+
next_cfg["window"] = int(stop_window)
|
|
358
|
+
if stop_fraction is not None:
|
|
359
|
+
next_cfg["fraction"] = float(stop_fraction)
|
|
360
|
+
next_cfg.setdefault("enabled", True)
|
|
361
|
+
G.graph["STOP_EARLY"] = next_cfg
|
|
362
|
+
|
|
136
363
|
|
|
137
364
|
def register_callbacks_and_observer(G: "nx.Graph") -> None:
|
|
365
|
+
"""Attach callbacks and validators required for CLI runs."""
|
|
366
|
+
|
|
138
367
|
_attach_callbacks(G)
|
|
139
368
|
validate_canon(G)
|
|
140
369
|
|
|
@@ -144,12 +373,13 @@ def _build_graph_from_args(args: argparse.Namespace) -> "nx.Graph":
|
|
|
144
373
|
apply_cli_config(G, args)
|
|
145
374
|
if getattr(args, "observer", False):
|
|
146
375
|
G.graph["ATTACH_STD_OBSERVER"] = True
|
|
147
|
-
|
|
376
|
+
prepare_network(G)
|
|
148
377
|
register_callbacks_and_observer(G)
|
|
378
|
+
_configure_math_engine(G, args)
|
|
149
379
|
return G
|
|
150
380
|
|
|
151
381
|
|
|
152
|
-
def _load_sequence(path: Path) ->
|
|
382
|
+
def _load_sequence(path: Path) -> ProgramTokens:
|
|
153
383
|
try:
|
|
154
384
|
data = read_structured_file(path)
|
|
155
385
|
except (StructuredFileError, OSError) as exc:
|
|
@@ -159,19 +389,29 @@ def _load_sequence(path: Path) -> list[Any]:
|
|
|
159
389
|
message = str(StructuredFileError(path, exc))
|
|
160
390
|
logger.error("%s", message)
|
|
161
391
|
raise SystemExit(1) from exc
|
|
392
|
+
if isinstance(data, Mapping) and "sequence" in data:
|
|
393
|
+
data = data["sequence"]
|
|
162
394
|
return parse_program_tokens(data)
|
|
163
395
|
|
|
164
396
|
|
|
165
397
|
def resolve_program(
|
|
166
|
-
args: argparse.Namespace, default: Optional[
|
|
167
|
-
) -> Optional[
|
|
398
|
+
args: argparse.Namespace, default: Optional[ProgramTokens] = None
|
|
399
|
+
) -> Optional[ProgramTokens]:
|
|
400
|
+
"""Resolve preset/sequence inputs into program tokens."""
|
|
401
|
+
|
|
168
402
|
if getattr(args, "preset", None):
|
|
169
403
|
try:
|
|
170
404
|
return get_preset(args.preset)
|
|
171
405
|
except KeyError as exc:
|
|
406
|
+
details = exc.args[0] if exc.args else "Preset lookup failed."
|
|
172
407
|
logger.error(
|
|
173
|
-
|
|
408
|
+
(
|
|
409
|
+
"Unknown preset '%s'. Available presets: %s. %s "
|
|
410
|
+
"Use --sequence-file to execute custom sequences."
|
|
411
|
+
),
|
|
174
412
|
args.preset,
|
|
413
|
+
_PREFERRED_PRESETS_DISPLAY,
|
|
414
|
+
details,
|
|
175
415
|
)
|
|
176
416
|
raise SystemExit(1) from exc
|
|
177
417
|
if getattr(args, "sequence_file", None):
|
|
@@ -180,8 +420,12 @@ def resolve_program(
|
|
|
180
420
|
|
|
181
421
|
|
|
182
422
|
def run_program(
|
|
183
|
-
G: Optional["nx.Graph"],
|
|
423
|
+
G: Optional["nx.Graph"],
|
|
424
|
+
program: Optional[ProgramTokens],
|
|
425
|
+
args: argparse.Namespace,
|
|
184
426
|
) -> "nx.Graph":
|
|
427
|
+
"""Execute ``program`` (or timed run) on ``G`` using CLI options."""
|
|
428
|
+
|
|
185
429
|
if G is None:
|
|
186
430
|
G = _build_graph_from_args(args)
|
|
187
431
|
|
|
@@ -197,6 +441,13 @@ def run_program(
|
|
|
197
441
|
if value is not None:
|
|
198
442
|
run_kwargs[attr] = value
|
|
199
443
|
|
|
444
|
+
job_overrides: dict[str, Any] = {}
|
|
445
|
+
dnfr_jobs = getattr(args, "dnfr_n_jobs", None)
|
|
446
|
+
if dnfr_jobs is not None:
|
|
447
|
+
job_overrides["dnfr_n_jobs"] = int(dnfr_jobs)
|
|
448
|
+
if job_overrides:
|
|
449
|
+
run_kwargs["n_jobs"] = job_overrides
|
|
450
|
+
|
|
200
451
|
run(G, steps=steps, **run_kwargs)
|
|
201
452
|
else:
|
|
202
453
|
play(G, program)
|
|
@@ -208,7 +459,7 @@ def run_program(
|
|
|
208
459
|
def _run_cli_program(
|
|
209
460
|
args: argparse.Namespace,
|
|
210
461
|
*,
|
|
211
|
-
default_program: Optional[
|
|
462
|
+
default_program: Optional[ProgramTokens] = None,
|
|
212
463
|
graph: Optional["nx.Graph"] = None,
|
|
213
464
|
) -> tuple[int, Optional["nx.Graph"]]:
|
|
214
465
|
try:
|
|
@@ -217,10 +468,118 @@ def _run_cli_program(
|
|
|
217
468
|
code = exc.code if isinstance(exc.code, int) else 1
|
|
218
469
|
return code or 1, None
|
|
219
470
|
|
|
220
|
-
|
|
471
|
+
try:
|
|
472
|
+
result_graph = run_program(graph, program, args)
|
|
473
|
+
except SystemExit as exc:
|
|
474
|
+
code = exc.code if isinstance(exc.code, int) else 1
|
|
475
|
+
return code or 1, None
|
|
221
476
|
return 0, result_graph
|
|
222
477
|
|
|
223
478
|
|
|
479
|
+
def _log_math_engine_summary(G: "nx.Graph") -> None:
|
|
480
|
+
math_cfg = G.graph.get("MATH_ENGINE")
|
|
481
|
+
if not isinstance(math_cfg, Mapping) or not math_cfg.get("enabled"):
|
|
482
|
+
return
|
|
483
|
+
|
|
484
|
+
nodes = list(G.nodes)
|
|
485
|
+
if not nodes:
|
|
486
|
+
logger.info("[MATH] Math engine validation skipped: no nodes present")
|
|
487
|
+
return
|
|
488
|
+
|
|
489
|
+
hilbert_space: HilbertSpace = math_cfg["hilbert_space"]
|
|
490
|
+
coherence_operator: CoherenceOperator = math_cfg["coherence_operator"]
|
|
491
|
+
frequency_operator: FrequencyOperator | None = math_cfg.get("frequency_operator")
|
|
492
|
+
state_projector: BasicStateProjector = math_cfg.get(
|
|
493
|
+
"state_projector", BasicStateProjector()
|
|
494
|
+
)
|
|
495
|
+
validator: NFRValidator | None = math_cfg.get("validator")
|
|
496
|
+
if validator is None:
|
|
497
|
+
coherence_threshold = math_cfg.get("coherence_threshold")
|
|
498
|
+
validator = NFRValidator(
|
|
499
|
+
hilbert_space,
|
|
500
|
+
coherence_operator,
|
|
501
|
+
float(coherence_threshold) if coherence_threshold is not None else 0.0,
|
|
502
|
+
frequency_operator=frequency_operator,
|
|
503
|
+
)
|
|
504
|
+
math_cfg["validator"] = validator
|
|
505
|
+
|
|
506
|
+
enforce_frequency = bool(frequency_operator is not None)
|
|
507
|
+
|
|
508
|
+
norm_values: list[float] = []
|
|
509
|
+
normalized_flags: list[bool] = []
|
|
510
|
+
coherence_flags: list[bool] = []
|
|
511
|
+
coherence_values: list[float] = []
|
|
512
|
+
coherence_threshold: float | None = None
|
|
513
|
+
frequency_flags: list[bool] = []
|
|
514
|
+
frequency_values: list[float] = []
|
|
515
|
+
frequency_spectrum_min: float | None = None
|
|
516
|
+
|
|
517
|
+
for node_id in nodes:
|
|
518
|
+
data = G.nodes[node_id]
|
|
519
|
+
epi = float(data.get("EPI", 0.0))
|
|
520
|
+
nu_f = float(
|
|
521
|
+
get_attr(
|
|
522
|
+
data,
|
|
523
|
+
VF_ALIAS_KEYS,
|
|
524
|
+
default=float(data.get(VF_PRIMARY, 0.0)),
|
|
525
|
+
)
|
|
526
|
+
)
|
|
527
|
+
theta = float(data.get("theta", 0.0))
|
|
528
|
+
state = state_projector(epi=epi, nu_f=nu_f, theta=theta, dim=hilbert_space.dimension)
|
|
529
|
+
norm_values.append(float(hilbert_space.norm(state)))
|
|
530
|
+
outcome = validator.validate(
|
|
531
|
+
state,
|
|
532
|
+
enforce_frequency_positivity=enforce_frequency,
|
|
533
|
+
)
|
|
534
|
+
summary = outcome.summary
|
|
535
|
+
normalized_flags.append(bool(summary.get("normalized", False)))
|
|
536
|
+
|
|
537
|
+
coherence_summary = summary.get("coherence")
|
|
538
|
+
if isinstance(coherence_summary, Mapping):
|
|
539
|
+
coherence_flags.append(bool(coherence_summary.get("passed", False)))
|
|
540
|
+
coherence_values.append(float(coherence_summary.get("value", 0.0)))
|
|
541
|
+
if coherence_threshold is None and "threshold" in coherence_summary:
|
|
542
|
+
coherence_threshold = float(coherence_summary.get("threshold", 0.0))
|
|
543
|
+
|
|
544
|
+
frequency_summary = summary.get("frequency")
|
|
545
|
+
if isinstance(frequency_summary, Mapping):
|
|
546
|
+
frequency_flags.append(bool(frequency_summary.get("passed", False)))
|
|
547
|
+
frequency_values.append(float(frequency_summary.get("value", 0.0)))
|
|
548
|
+
if frequency_spectrum_min is None and "spectrum_min" in frequency_summary:
|
|
549
|
+
frequency_spectrum_min = float(frequency_summary.get("spectrum_min", 0.0))
|
|
550
|
+
|
|
551
|
+
if norm_values:
|
|
552
|
+
logger.info(
|
|
553
|
+
"[MATH] Hilbert norm preserved=%s (min=%.6f, max=%.6f)",
|
|
554
|
+
all(normalized_flags),
|
|
555
|
+
min(norm_values),
|
|
556
|
+
max(norm_values),
|
|
557
|
+
)
|
|
558
|
+
|
|
559
|
+
if coherence_values and coherence_threshold is not None:
|
|
560
|
+
logger.info(
|
|
561
|
+
"[MATH] Coherence ≥ C_min=%s (C_min=%.6f, min=%.6f)",
|
|
562
|
+
all(coherence_flags),
|
|
563
|
+
float(coherence_threshold),
|
|
564
|
+
min(coherence_values),
|
|
565
|
+
)
|
|
566
|
+
|
|
567
|
+
if frequency_values:
|
|
568
|
+
if frequency_spectrum_min is not None:
|
|
569
|
+
logger.info(
|
|
570
|
+
"[MATH] νf positivity=%s (min=%.6f, spectrum_min=%.6f)",
|
|
571
|
+
all(frequency_flags),
|
|
572
|
+
min(frequency_values),
|
|
573
|
+
frequency_spectrum_min,
|
|
574
|
+
)
|
|
575
|
+
else:
|
|
576
|
+
logger.info(
|
|
577
|
+
"[MATH] νf positivity=%s (min=%.6f)",
|
|
578
|
+
all(frequency_flags),
|
|
579
|
+
min(frequency_values),
|
|
580
|
+
)
|
|
581
|
+
|
|
582
|
+
|
|
224
583
|
def _log_run_summaries(G: "nx.Graph", args: argparse.Namespace) -> None:
|
|
225
584
|
cfg_coh = G.graph.get("COHERENCE", METRIC_DEFAULTS["COHERENCE"])
|
|
226
585
|
cfg_diag = G.graph.get("DIAGNOSIS", METRIC_DEFAULTS["DIAGNOSIS"])
|
|
@@ -229,26 +588,30 @@ def _log_run_summaries(G: "nx.Graph", args: argparse.Namespace) -> None:
|
|
|
229
588
|
if cfg_coh.get("enabled", True):
|
|
230
589
|
Wstats = hist.get(cfg_coh.get("stats_history_key", "W_stats"), [])
|
|
231
590
|
if Wstats:
|
|
232
|
-
logger.info("[COHERENCE]
|
|
591
|
+
logger.info("[COHERENCE] last step: %s", Wstats[-1])
|
|
233
592
|
|
|
234
593
|
if cfg_diag.get("enabled", True):
|
|
235
594
|
last_diag = hist.get(cfg_diag.get("history_key", "nodal_diag"), [])
|
|
236
595
|
if last_diag:
|
|
237
596
|
sample = list(last_diag[-1].values())[:3]
|
|
238
|
-
logger.info("[DIAGNOSIS]
|
|
597
|
+
logger.info("[DIAGNOSIS] sample: %s", sample)
|
|
239
598
|
|
|
240
599
|
if args.summary:
|
|
241
600
|
summary_limit = getattr(args, "summary_limit", DEFAULT_SUMMARY_SERIES_LIMIT)
|
|
242
601
|
summary, has_latency_values = build_metrics_summary(
|
|
243
602
|
G, series_limit=summary_limit
|
|
244
603
|
)
|
|
245
|
-
logger.info("Tg
|
|
246
|
-
logger.info("Top
|
|
604
|
+
logger.info("Global Tg: %s", summary["Tg_global"])
|
|
605
|
+
logger.info("Top operators by Tg: %s", glyph_top(G, k=5))
|
|
247
606
|
if has_latency_values:
|
|
248
|
-
logger.info("
|
|
607
|
+
logger.info("Average latency: %s", summary["latency_mean"])
|
|
608
|
+
|
|
609
|
+
_log_math_engine_summary(G)
|
|
249
610
|
|
|
250
611
|
|
|
251
612
|
def cmd_run(args: argparse.Namespace) -> int:
|
|
613
|
+
"""Execute ``tnfr run`` returning the exit status."""
|
|
614
|
+
|
|
252
615
|
code, graph = _run_cli_program(args)
|
|
253
616
|
if code != 0:
|
|
254
617
|
return code
|
|
@@ -259,18 +622,18 @@ def cmd_run(args: argparse.Namespace) -> int:
|
|
|
259
622
|
|
|
260
623
|
|
|
261
624
|
def cmd_sequence(args: argparse.Namespace) -> int:
|
|
625
|
+
"""Execute ``tnfr sequence`` returning the exit status."""
|
|
626
|
+
|
|
262
627
|
if args.preset and args.sequence_file:
|
|
263
|
-
logger.error(
|
|
264
|
-
"No se puede usar --preset y --sequence-file al mismo tiempo"
|
|
265
|
-
)
|
|
628
|
+
logger.error("Cannot use --preset and --sequence-file at the same time")
|
|
266
629
|
return 1
|
|
267
|
-
code, _ = _run_cli_program(
|
|
268
|
-
args, default_program=get_preset(CANONICAL_PRESET_NAME)
|
|
269
|
-
)
|
|
630
|
+
code, _ = _run_cli_program(args, default_program=get_preset(CANONICAL_PRESET_NAME))
|
|
270
631
|
return code
|
|
271
632
|
|
|
272
633
|
|
|
273
634
|
def cmd_metrics(args: argparse.Namespace) -> int:
|
|
635
|
+
"""Execute ``tnfr metrics`` returning the exit status."""
|
|
636
|
+
|
|
274
637
|
if getattr(args, "steps", None) is None:
|
|
275
638
|
# Default a longer run for metrics stability
|
|
276
639
|
args.steps = 200
|
|
@@ -286,3 +649,60 @@ def cmd_metrics(args: argparse.Namespace) -> int:
|
|
|
286
649
|
else:
|
|
287
650
|
logger.info("%s", json_dumps(out))
|
|
288
651
|
return 0
|
|
652
|
+
|
|
653
|
+
|
|
654
|
+
def cmd_profile_si(args: argparse.Namespace) -> int:
|
|
655
|
+
"""Execute ``tnfr profile-si`` returning the exit status."""
|
|
656
|
+
|
|
657
|
+
try:
|
|
658
|
+
profile_module = import_module("benchmarks.compute_si_profile")
|
|
659
|
+
except ModuleNotFoundError as exc: # pragma: no cover - optional dependency
|
|
660
|
+
logger.error("Sense Index profiling helpers unavailable: %s", exc)
|
|
661
|
+
return 1
|
|
662
|
+
|
|
663
|
+
profile_compute_si = getattr(profile_module, "profile_compute_si")
|
|
664
|
+
|
|
665
|
+
profile_compute_si(
|
|
666
|
+
node_count=int(args.nodes),
|
|
667
|
+
chord_step=int(args.chord_step),
|
|
668
|
+
loops=int(args.loops),
|
|
669
|
+
output_dir=Path(args.output_dir),
|
|
670
|
+
fmt=str(args.format),
|
|
671
|
+
sort=str(args.sort),
|
|
672
|
+
)
|
|
673
|
+
return 0
|
|
674
|
+
|
|
675
|
+
|
|
676
|
+
def cmd_profile_pipeline(args: argparse.Namespace) -> int:
|
|
677
|
+
"""Execute ``tnfr profile-pipeline`` returning the exit status."""
|
|
678
|
+
|
|
679
|
+
try:
|
|
680
|
+
profile_module = import_module("benchmarks.full_pipeline_profile")
|
|
681
|
+
except ModuleNotFoundError as exc: # pragma: no cover - optional dependency
|
|
682
|
+
logger.error("Full pipeline profiling helpers unavailable: %s", exc)
|
|
683
|
+
return 1
|
|
684
|
+
|
|
685
|
+
profile_full_pipeline = getattr(profile_module, "profile_full_pipeline")
|
|
686
|
+
|
|
687
|
+
try:
|
|
688
|
+
si_chunk_sizes = _parse_cli_variants(getattr(args, "si_chunk_sizes", None))
|
|
689
|
+
dnfr_chunk_sizes = _parse_cli_variants(getattr(args, "dnfr_chunk_sizes", None))
|
|
690
|
+
si_workers = _parse_cli_variants(getattr(args, "si_workers", None))
|
|
691
|
+
dnfr_workers = _parse_cli_variants(getattr(args, "dnfr_workers", None))
|
|
692
|
+
except ValueError as exc:
|
|
693
|
+
logger.error("%s", exc)
|
|
694
|
+
return 2
|
|
695
|
+
|
|
696
|
+
profile_full_pipeline(
|
|
697
|
+
node_count=int(args.nodes),
|
|
698
|
+
edge_probability=float(args.edge_probability),
|
|
699
|
+
loops=int(args.loops),
|
|
700
|
+
seed=int(args.seed),
|
|
701
|
+
output_dir=Path(args.output_dir),
|
|
702
|
+
sort=str(args.sort),
|
|
703
|
+
si_chunk_sizes=si_chunk_sizes,
|
|
704
|
+
dnfr_chunk_sizes=dnfr_chunk_sizes,
|
|
705
|
+
si_workers=si_workers,
|
|
706
|
+
dnfr_workers=dnfr_workers,
|
|
707
|
+
)
|
|
708
|
+
return 0
|