tnfr 4.5.1__py3-none-any.whl → 4.5.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tnfr might be problematic. Click here for more details.

Files changed (78) hide show
  1. tnfr/__init__.py +91 -90
  2. tnfr/alias.py +546 -0
  3. tnfr/cache.py +578 -0
  4. tnfr/callback_utils.py +388 -0
  5. tnfr/cli/__init__.py +75 -0
  6. tnfr/cli/arguments.py +177 -0
  7. tnfr/cli/execution.py +288 -0
  8. tnfr/cli/utils.py +36 -0
  9. tnfr/collections_utils.py +300 -0
  10. tnfr/config.py +19 -28
  11. tnfr/constants/__init__.py +174 -0
  12. tnfr/constants/core.py +159 -0
  13. tnfr/constants/init.py +31 -0
  14. tnfr/constants/metric.py +110 -0
  15. tnfr/constants_glyphs.py +98 -0
  16. tnfr/dynamics/__init__.py +658 -0
  17. tnfr/dynamics/dnfr.py +733 -0
  18. tnfr/dynamics/integrators.py +267 -0
  19. tnfr/dynamics/sampling.py +31 -0
  20. tnfr/execution.py +201 -0
  21. tnfr/flatten.py +283 -0
  22. tnfr/gamma.py +302 -88
  23. tnfr/glyph_history.py +290 -0
  24. tnfr/grammar.py +285 -96
  25. tnfr/graph_utils.py +84 -0
  26. tnfr/helpers/__init__.py +71 -0
  27. tnfr/helpers/numeric.py +87 -0
  28. tnfr/immutable.py +178 -0
  29. tnfr/import_utils.py +228 -0
  30. tnfr/initialization.py +197 -0
  31. tnfr/io.py +246 -0
  32. tnfr/json_utils.py +162 -0
  33. tnfr/locking.py +37 -0
  34. tnfr/logging_utils.py +116 -0
  35. tnfr/metrics/__init__.py +41 -0
  36. tnfr/metrics/coherence.py +829 -0
  37. tnfr/metrics/common.py +151 -0
  38. tnfr/metrics/core.py +101 -0
  39. tnfr/metrics/diagnosis.py +234 -0
  40. tnfr/metrics/export.py +137 -0
  41. tnfr/metrics/glyph_timing.py +189 -0
  42. tnfr/metrics/reporting.py +148 -0
  43. tnfr/metrics/sense_index.py +120 -0
  44. tnfr/metrics/trig.py +181 -0
  45. tnfr/metrics/trig_cache.py +109 -0
  46. tnfr/node.py +214 -159
  47. tnfr/observers.py +126 -136
  48. tnfr/ontosim.py +134 -134
  49. tnfr/operators/__init__.py +420 -0
  50. tnfr/operators/jitter.py +203 -0
  51. tnfr/operators/remesh.py +485 -0
  52. tnfr/presets.py +46 -14
  53. tnfr/rng.py +254 -0
  54. tnfr/selector.py +210 -0
  55. tnfr/sense.py +284 -131
  56. tnfr/structural.py +207 -79
  57. tnfr/tokens.py +60 -0
  58. tnfr/trace.py +329 -94
  59. tnfr/types.py +43 -17
  60. tnfr/validators.py +70 -24
  61. tnfr/value_utils.py +59 -0
  62. tnfr-4.5.2.dist-info/METADATA +379 -0
  63. tnfr-4.5.2.dist-info/RECORD +67 -0
  64. tnfr/cli.py +0 -322
  65. tnfr/constants.py +0 -277
  66. tnfr/dynamics.py +0 -814
  67. tnfr/helpers.py +0 -264
  68. tnfr/main.py +0 -47
  69. tnfr/metrics.py +0 -597
  70. tnfr/operators.py +0 -525
  71. tnfr/program.py +0 -176
  72. tnfr/scenarios.py +0 -34
  73. tnfr-4.5.1.dist-info/METADATA +0 -221
  74. tnfr-4.5.1.dist-info/RECORD +0 -28
  75. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/WHEEL +0 -0
  76. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/entry_points.txt +0 -0
  77. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/licenses/LICENSE.md +0 -0
  78. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/top_level.txt +0 -0
tnfr/io.py ADDED
@@ -0,0 +1,246 @@
1
+ """Structured file I/O utilities.
2
+
3
+ Optional parsers such as ``tomllib``/``tomli`` and ``pyyaml`` are loaded via
4
+ the :func:`tnfr.import_utils.cached_import` helper. Their import results and
5
+ failure states are cached and can be cleared with
6
+ ``cached_import.cache_clear()`` and :func:`tnfr.import_utils.prune_failed_imports`
7
+ when needed.
8
+ """
9
+
10
+ from __future__ import annotations
11
+
12
+ import json
13
+ import os
14
+ import tempfile
15
+ from pathlib import Path
16
+ from typing import Any, Callable
17
+ from functools import partial
18
+
19
+ from .import_utils import cached_import
20
+ from .logging_utils import get_logger
21
+
22
+
23
+ def _raise_import_error(name: str, *_: Any, **__: Any) -> Any:
24
+ raise ImportError(f"{name} is not installed")
25
+
26
+
27
+ _MISSING_TOML_ERROR = type(
28
+ "MissingTOMLDependencyError",
29
+ (Exception,),
30
+ {"__doc__": "Fallback error used when tomllib/tomli is missing."},
31
+ )
32
+
33
+ _MISSING_YAML_ERROR = type(
34
+ "MissingPyYAMLDependencyError",
35
+ (Exception,),
36
+ {"__doc__": "Fallback error used when pyyaml is missing."},
37
+ )
38
+
39
+
40
+ tomllib = cached_import(
41
+ "tomllib",
42
+ emit="log",
43
+ fallback=cached_import("tomli", emit="log"),
44
+ )
45
+ has_toml = tomllib is not None
46
+
47
+
48
+ TOMLDecodeError = cached_import(
49
+ "tomllib",
50
+ "TOMLDecodeError",
51
+ emit="log",
52
+ fallback=cached_import(
53
+ "tomli",
54
+ "TOMLDecodeError",
55
+ emit="log",
56
+ fallback=_MISSING_TOML_ERROR,
57
+ ),
58
+ )
59
+
60
+
61
+ _TOML_LOADS: Callable[[str], Any] = cached_import(
62
+ "tomllib",
63
+ "loads",
64
+ emit="log",
65
+ fallback=cached_import(
66
+ "tomli",
67
+ "loads",
68
+ emit="log",
69
+ fallback=partial(_raise_import_error, "tomllib/tomli"),
70
+ ),
71
+ )
72
+
73
+
74
+ yaml = cached_import("yaml", emit="log")
75
+
76
+
77
+ YAMLError = cached_import(
78
+ "yaml",
79
+ "YAMLError",
80
+ emit="log",
81
+ fallback=_MISSING_YAML_ERROR,
82
+ )
83
+
84
+
85
+ _YAML_SAFE_LOAD: Callable[[str], Any] = cached_import(
86
+ "yaml",
87
+ "safe_load",
88
+ emit="log",
89
+ fallback=partial(_raise_import_error, "pyyaml"),
90
+ )
91
+
92
+
93
+ def _parse_yaml(text: str) -> Any:
94
+ """Parse YAML ``text`` using ``safe_load`` if available."""
95
+ return _YAML_SAFE_LOAD(text)
96
+
97
+
98
+ def _parse_toml(text: str) -> Any:
99
+ """Parse TOML ``text`` using ``tomllib`` or ``tomli``."""
100
+ return _TOML_LOADS(text)
101
+
102
+
103
+ PARSERS = {
104
+ ".json": json.loads,
105
+ ".yaml": _parse_yaml,
106
+ ".yml": _parse_yaml,
107
+ ".toml": _parse_toml,
108
+ }
109
+
110
+
111
+ def _get_parser(suffix: str) -> Callable[[str], Any]:
112
+ try:
113
+ return PARSERS[suffix]
114
+ except KeyError as exc:
115
+ raise ValueError(f"Unsupported suffix: {suffix}") from exc
116
+
117
+
118
+ ERROR_MESSAGES = {
119
+ OSError: "Could not read {path}: {e}",
120
+ UnicodeDecodeError: "Encoding error while reading {path}: {e}",
121
+ json.JSONDecodeError: "Error parsing JSON file at {path}: {e}",
122
+ YAMLError: "Error parsing YAML file at {path}: {e}",
123
+ ImportError: "Missing dependency parsing {path}: {e}",
124
+ }
125
+ if has_toml:
126
+ ERROR_MESSAGES[TOMLDecodeError] = "Error parsing TOML file at {path}: {e}"
127
+
128
+
129
+ def _format_structured_file_error(path: Path, e: Exception) -> str:
130
+ for exc, msg in ERROR_MESSAGES.items():
131
+ if isinstance(e, exc):
132
+ return msg.format(path=path, e=e)
133
+ return f"Error parsing {path}: {e}"
134
+
135
+
136
+ class StructuredFileError(Exception):
137
+ """Error while reading or parsing a structured file."""
138
+
139
+ def __init__(self, path: Path, original: Exception):
140
+ super().__init__(_format_structured_file_error(path, original))
141
+ self.path = path
142
+
143
+
144
+ def read_structured_file(path: Path) -> Any:
145
+ """Read a JSON, YAML or TOML file and return parsed data."""
146
+ suffix = path.suffix.lower()
147
+ try:
148
+ parser = _get_parser(suffix)
149
+ except ValueError as e:
150
+ raise StructuredFileError(path, e) from e
151
+ try:
152
+ text = path.read_text(encoding="utf-8")
153
+ return parser(text)
154
+ except (
155
+ OSError,
156
+ UnicodeDecodeError,
157
+ json.JSONDecodeError,
158
+ YAMLError,
159
+ TOMLDecodeError,
160
+ ImportError,
161
+ ) as e:
162
+ raise StructuredFileError(path, e) from e
163
+
164
+
165
+ logger = get_logger(__name__)
166
+
167
+
168
+ def safe_write(
169
+ path: str | Path,
170
+ write: Callable[[Any], Any],
171
+ *,
172
+ mode: str = "w",
173
+ encoding: str | None = "utf-8",
174
+ atomic: bool = True,
175
+ sync: bool | None = None,
176
+ **open_kwargs: Any,
177
+ ) -> None:
178
+ """Write to ``path`` ensuring parent directory exists and handle errors.
179
+
180
+ Parameters
181
+ ----------
182
+ path:
183
+ Destination file path.
184
+ write:
185
+ Callback receiving the opened file object and performing the actual
186
+ write.
187
+ mode:
188
+ File mode passed to :func:`open`. Text modes (default) use UTF-8
189
+ encoding unless ``encoding`` is ``None``. When a binary mode is used
190
+ (``'b'`` in ``mode``) no encoding parameter is supplied so
191
+ ``write`` may write bytes.
192
+ encoding:
193
+ Encoding for text modes. Ignored for binary modes.
194
+ atomic:
195
+ When ``True`` (default) writes to a temporary file and atomically
196
+ replaces the destination after flushing to disk. When ``False``
197
+ writes directly to ``path`` without any atomicity guarantee.
198
+ sync:
199
+ When ``True`` flushes and fsyncs the file descriptor after writing.
200
+ ``None`` uses ``atomic`` to determine syncing behaviour.
201
+ """
202
+ path = Path(path)
203
+ path.parent.mkdir(parents=True, exist_ok=True)
204
+ open_params = dict(mode=mode, **open_kwargs)
205
+ if "b" not in mode and encoding is not None:
206
+ open_params["encoding"] = encoding
207
+ if sync is None:
208
+ sync = atomic
209
+ tmp_path: Path | None = None
210
+ try:
211
+ if atomic:
212
+ tmp_fd = tempfile.NamedTemporaryFile(dir=path.parent, delete=False)
213
+ tmp_path = Path(tmp_fd.name)
214
+ tmp_fd.close()
215
+ with open(tmp_path, **open_params) as fd:
216
+ write(fd)
217
+ if sync:
218
+ fd.flush()
219
+ os.fsync(fd.fileno())
220
+ try:
221
+ os.replace(tmp_path, path)
222
+ except OSError as e:
223
+ logger.error(
224
+ "Atomic replace failed for %s -> %s: %s", tmp_path, path, e
225
+ )
226
+ raise
227
+ else:
228
+ with open(path, **open_params) as fd:
229
+ write(fd)
230
+ if sync:
231
+ fd.flush()
232
+ os.fsync(fd.fileno())
233
+ except (OSError, ValueError, TypeError) as e:
234
+ raise type(e)(f"Failed to write file {path}: {e}") from e
235
+ finally:
236
+ if tmp_path is not None:
237
+ tmp_path.unlink(missing_ok=True)
238
+
239
+
240
+ __all__ = (
241
+ "read_structured_file",
242
+ "safe_write",
243
+ "StructuredFileError",
244
+ "TOMLDecodeError",
245
+ "YAMLError",
246
+ )
tnfr/json_utils.py ADDED
@@ -0,0 +1,162 @@
1
+ """JSON helpers with optional :mod:`orjson` support.
2
+
3
+ This module lazily imports :mod:`orjson` on first use of :func:`json_dumps`.
4
+ The fast serializer is brought in through
5
+ ``tnfr.import_utils.cached_import``; its cache and failure registry can be
6
+ reset using ``cached_import.cache_clear()`` and
7
+ :func:`tnfr.import_utils.prune_failed_imports`.
8
+ """
9
+
10
+ from __future__ import annotations
11
+
12
+ from dataclasses import dataclass
13
+ import json
14
+ from typing import Any, Callable
15
+
16
+ from .import_utils import cached_import
17
+ from .logging_utils import get_logger, warn_once
18
+
19
+ _ORJSON_PARAMS_MSG = (
20
+ "'ensure_ascii', 'separators', 'cls' and extra kwargs are ignored when using orjson: %s"
21
+ )
22
+
23
+ logger = get_logger(__name__)
24
+
25
+ _warn_ignored_params_once = warn_once(logger, _ORJSON_PARAMS_MSG)
26
+
27
+
28
+ def clear_orjson_param_warnings() -> None:
29
+ """Reset cached warnings for ignored :mod:`orjson` parameters."""
30
+
31
+ _warn_ignored_params_once.clear()
32
+
33
+
34
+ def _format_ignored_params(combo: frozenset[str]) -> str:
35
+ """Return a stable representation for ignored parameter combinations."""
36
+ return "{" + ", ".join(map(repr, sorted(combo))) + "}"
37
+
38
+
39
+ @dataclass(frozen=True)
40
+ class JsonDumpsParams:
41
+ """Container describing the parameters used by :func:`json_dumps`."""
42
+
43
+ sort_keys: bool = False
44
+ default: Callable[[Any], Any] | None = None
45
+ ensure_ascii: bool = True
46
+ separators: tuple[str, str] = (",", ":")
47
+ cls: type[json.JSONEncoder] | None = None
48
+ to_bytes: bool = False
49
+
50
+
51
+ DEFAULT_PARAMS = JsonDumpsParams()
52
+
53
+
54
+ def _collect_ignored_params(
55
+ params: JsonDumpsParams, extra_kwargs: dict[str, Any]
56
+ ) -> frozenset[str]:
57
+ """Return a stable set of parameters ignored by :mod:`orjson`."""
58
+
59
+ ignored: set[str] = set()
60
+ if params.ensure_ascii is not True:
61
+ ignored.add("ensure_ascii")
62
+ if params.separators != (",", ":"):
63
+ ignored.add("separators")
64
+ if params.cls is not None:
65
+ ignored.add("cls")
66
+ if extra_kwargs:
67
+ ignored.update(extra_kwargs.keys())
68
+ return frozenset(ignored)
69
+
70
+
71
+ def _json_dumps_orjson(
72
+ orjson: Any,
73
+ obj: Any,
74
+ params: JsonDumpsParams,
75
+ **kwargs: Any,
76
+ ) -> bytes | str:
77
+ """Serialize using :mod:`orjson` and warn about unsupported parameters."""
78
+
79
+ ignored = _collect_ignored_params(params, kwargs)
80
+ if ignored:
81
+ _warn_ignored_params_once(ignored, _format_ignored_params(ignored))
82
+
83
+ option = orjson.OPT_SORT_KEYS if params.sort_keys else 0
84
+ data = orjson.dumps(obj, option=option, default=params.default)
85
+ return data if params.to_bytes else data.decode("utf-8")
86
+
87
+
88
+ def _json_dumps_std(
89
+ obj: Any,
90
+ params: JsonDumpsParams,
91
+ **kwargs: Any,
92
+ ) -> bytes | str:
93
+ """Serialize using the standard library :func:`json.dumps`."""
94
+ result = json.dumps(
95
+ obj,
96
+ sort_keys=params.sort_keys,
97
+ ensure_ascii=params.ensure_ascii,
98
+ separators=params.separators,
99
+ cls=params.cls,
100
+ default=params.default,
101
+ **kwargs,
102
+ )
103
+ return result if not params.to_bytes else result.encode("utf-8")
104
+
105
+
106
+ def json_dumps(
107
+ obj: Any,
108
+ *,
109
+ sort_keys: bool = False,
110
+ default: Callable[[Any], Any] | None = None,
111
+ ensure_ascii: bool = True,
112
+ separators: tuple[str, str] = (",", ":"),
113
+ cls: type[json.JSONEncoder] | None = None,
114
+ to_bytes: bool = False,
115
+ **kwargs: Any,
116
+ ) -> bytes | str:
117
+ """Serialize ``obj`` to JSON using ``orjson`` when available.
118
+
119
+ Returns a ``str`` by default. Pass ``to_bytes=True`` to obtain a ``bytes``
120
+ result. When :mod:`orjson` is used, the ``ensure_ascii``, ``separators``,
121
+ ``cls`` and any additional keyword arguments are ignored because they are
122
+ not supported by :func:`orjson.dumps`. A warning is emitted whenever such
123
+ ignored parameters are detected.
124
+ """
125
+ if not isinstance(sort_keys, bool):
126
+ raise TypeError("sort_keys must be a boolean")
127
+ if default is not None and not callable(default):
128
+ raise TypeError("default must be callable when provided")
129
+ if not isinstance(ensure_ascii, bool):
130
+ raise TypeError("ensure_ascii must be a boolean")
131
+ if not isinstance(separators, tuple) or len(separators) != 2:
132
+ raise TypeError("separators must be a tuple of two strings")
133
+ if not all(isinstance(part, str) for part in separators):
134
+ raise TypeError("separators must be a tuple of two strings")
135
+ if cls is not None:
136
+ if not isinstance(cls, type) or not issubclass(cls, json.JSONEncoder):
137
+ raise TypeError("cls must be a subclass of json.JSONEncoder")
138
+ if not isinstance(to_bytes, bool):
139
+ raise TypeError("to_bytes must be a boolean")
140
+
141
+ if (
142
+ sort_keys is False
143
+ and default is None
144
+ and ensure_ascii is True
145
+ and separators == (",", ":")
146
+ and cls is None
147
+ and to_bytes is False
148
+ ):
149
+ params = DEFAULT_PARAMS
150
+ else:
151
+ params = JsonDumpsParams(
152
+ sort_keys=sort_keys,
153
+ default=default,
154
+ ensure_ascii=ensure_ascii,
155
+ separators=separators,
156
+ cls=cls,
157
+ to_bytes=to_bytes,
158
+ )
159
+ orjson = cached_import("orjson", emit="log")
160
+ if orjson is not None:
161
+ return _json_dumps_orjson(orjson, obj, params, **kwargs)
162
+ return _json_dumps_std(obj, params, **kwargs)
tnfr/locking.py ADDED
@@ -0,0 +1,37 @@
1
+ """Utilities for named locks.
2
+
3
+ This module provides helpers to obtain process-wide ``threading.Lock``
4
+ instances identified by name. Locks are created lazily and reused,
5
+ allowing different modules to synchronise on shared resources without
6
+ redefining locks repeatedly.
7
+ """
8
+
9
+ from __future__ import annotations
10
+
11
+ import threading
12
+ from weakref import WeakValueDictionary
13
+
14
+ # Registry of locks by name guarded by ``_REGISTRY_LOCK``.
15
+ # Using ``WeakValueDictionary`` ensures that once a lock is no longer
16
+ # referenced elsewhere, it is removed from the registry automatically,
17
+ # keeping the catalogue aligned with active coherence nodes.
18
+ _locks: WeakValueDictionary[str, threading.Lock] = WeakValueDictionary()
19
+ _REGISTRY_LOCK = threading.Lock()
20
+
21
+
22
+ def get_lock(name: str) -> threading.Lock:
23
+ """Return a re-usable lock identified by ``name``.
24
+
25
+ The same lock object is returned for identical names. Locks are
26
+ created on first use and stored in a process-wide registry.
27
+ """
28
+
29
+ with _REGISTRY_LOCK:
30
+ lock = _locks.get(name)
31
+ if lock is None:
32
+ lock = threading.Lock()
33
+ _locks[name] = lock
34
+ return lock
35
+
36
+
37
+ __all__ = ["get_lock"]
tnfr/logging_utils.py ADDED
@@ -0,0 +1,116 @@
1
+ """Logging utilities for TNFR.
2
+
3
+ Centralises creation of module-specific loggers so that all TNFR
4
+ modules share a consistent configuration.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import logging
10
+ import threading
11
+ from typing import Any, Hashable, Mapping
12
+
13
+ __all__ = ("_configure_root", "get_logger", "WarnOnce", "warn_once")
14
+
15
+ _LOGGING_CONFIGURED = False
16
+
17
+
18
+ def _configure_root() -> None:
19
+ """Ensure the root logger has handlers and a default format."""
20
+
21
+ global _LOGGING_CONFIGURED
22
+ if _LOGGING_CONFIGURED:
23
+ return
24
+
25
+ root = logging.getLogger()
26
+ if not root.handlers:
27
+ kwargs = {"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"}
28
+ if root.level == logging.NOTSET:
29
+ kwargs["level"] = logging.INFO
30
+ logging.basicConfig(**kwargs)
31
+
32
+ _LOGGING_CONFIGURED = True
33
+
34
+
35
+ def get_logger(name: str) -> logging.Logger:
36
+ """Return a module-specific logger."""
37
+ _configure_root()
38
+ return logging.getLogger(name)
39
+
40
+
41
+ class WarnOnce:
42
+ """Log a warning only once for each unique key.
43
+
44
+ ``WarnOnce`` tracks seen keys in a bounded :class:`set`. When ``maxsize`` is
45
+ reached an arbitrary key is evicted to keep memory usage stable; ordered
46
+ eviction is intentionally avoided to keep the implementation lightweight.
47
+ Instances are callable and accept either a mapping of keys to values or a
48
+ single key/value pair. Passing ``maxsize <= 0`` disables caching and logs on
49
+ every invocation.
50
+ """
51
+
52
+ def __init__(self, logger: logging.Logger, msg: str, *, maxsize: int = 1024) -> None:
53
+ self._logger = logger
54
+ self._msg = msg
55
+ self._maxsize = maxsize
56
+ self._seen: set[Hashable] = set()
57
+ self._lock = threading.Lock()
58
+
59
+ def _mark_seen(self, key: Hashable) -> bool:
60
+ """Return ``True`` when ``key`` has not been seen before."""
61
+
62
+ if self._maxsize <= 0:
63
+ # Caching disabled – always log.
64
+ return True
65
+ if key in self._seen:
66
+ return False
67
+ if len(self._seen) >= self._maxsize:
68
+ # ``set.pop()`` removes an arbitrary element which is acceptable for
69
+ # this lightweight cache.
70
+ self._seen.pop()
71
+ self._seen.add(key)
72
+ return True
73
+
74
+ def __call__(
75
+ self,
76
+ data: Mapping[Hashable, Any] | Hashable,
77
+ value: Any | None = None,
78
+ ) -> None:
79
+ """Log new keys found in ``data``.
80
+
81
+ ``data`` may be a mapping of keys to payloads or a single key. When
82
+ called with a single key ``value`` customises the payload passed to the
83
+ logging message; the key itself is used when ``value`` is omitted.
84
+ """
85
+
86
+ if isinstance(data, Mapping):
87
+ new_items: dict[Hashable, Any] = {}
88
+ with self._lock:
89
+ for key, item_value in data.items():
90
+ if self._mark_seen(key):
91
+ new_items[key] = item_value
92
+ if new_items:
93
+ self._logger.warning(self._msg, new_items)
94
+ return
95
+
96
+ key = data
97
+ payload = value if value is not None else data
98
+ with self._lock:
99
+ should_log = self._mark_seen(key)
100
+ if should_log:
101
+ self._logger.warning(self._msg, payload)
102
+
103
+ def clear(self) -> None:
104
+ """Reset tracked keys."""
105
+ with self._lock:
106
+ self._seen.clear()
107
+
108
+
109
+ def warn_once(
110
+ logger: logging.Logger,
111
+ msg: str,
112
+ *,
113
+ maxsize: int = 1024,
114
+ ) -> WarnOnce:
115
+ """Return a :class:`WarnOnce` logger."""
116
+ return WarnOnce(logger, msg, maxsize=maxsize)
@@ -0,0 +1,41 @@
1
+ """Registerable metrics."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from .core import register_metrics_callbacks
6
+ from .reporting import (
7
+ Tg_global,
8
+ Tg_by_node,
9
+ latency_series,
10
+ glyphogram_series,
11
+ glyph_top,
12
+ build_metrics_summary,
13
+ )
14
+ from .coherence import (
15
+ coherence_matrix,
16
+ local_phase_sync,
17
+ local_phase_sync_weighted,
18
+ register_coherence_callbacks,
19
+ )
20
+ from .diagnosis import (
21
+ register_diagnosis_callbacks,
22
+ dissonance_events,
23
+ )
24
+ from .export import export_metrics
25
+
26
+ __all__ = (
27
+ "register_metrics_callbacks",
28
+ "Tg_global",
29
+ "Tg_by_node",
30
+ "latency_series",
31
+ "glyphogram_series",
32
+ "glyph_top",
33
+ "build_metrics_summary",
34
+ "coherence_matrix",
35
+ "local_phase_sync",
36
+ "local_phase_sync_weighted",
37
+ "register_coherence_callbacks",
38
+ "register_diagnosis_callbacks",
39
+ "dissonance_events",
40
+ "export_metrics",
41
+ )