krons 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kronos/__init__.py +0 -0
- kronos/core/__init__.py +145 -0
- kronos/core/broadcaster.py +116 -0
- kronos/core/element.py +225 -0
- kronos/core/event.py +316 -0
- kronos/core/eventbus.py +116 -0
- kronos/core/flow.py +356 -0
- kronos/core/graph.py +442 -0
- kronos/core/node.py +982 -0
- kronos/core/pile.py +575 -0
- kronos/core/processor.py +494 -0
- kronos/core/progression.py +296 -0
- kronos/enforcement/__init__.py +57 -0
- kronos/enforcement/common/__init__.py +34 -0
- kronos/enforcement/common/boolean.py +85 -0
- kronos/enforcement/common/choice.py +97 -0
- kronos/enforcement/common/mapping.py +118 -0
- kronos/enforcement/common/model.py +102 -0
- kronos/enforcement/common/number.py +98 -0
- kronos/enforcement/common/string.py +140 -0
- kronos/enforcement/context.py +129 -0
- kronos/enforcement/policy.py +80 -0
- kronos/enforcement/registry.py +153 -0
- kronos/enforcement/rule.py +312 -0
- kronos/enforcement/service.py +370 -0
- kronos/enforcement/validator.py +198 -0
- kronos/errors.py +146 -0
- kronos/operations/__init__.py +32 -0
- kronos/operations/builder.py +228 -0
- kronos/operations/flow.py +398 -0
- kronos/operations/node.py +101 -0
- kronos/operations/registry.py +92 -0
- kronos/protocols.py +414 -0
- kronos/py.typed +0 -0
- kronos/services/__init__.py +81 -0
- kronos/services/backend.py +286 -0
- kronos/services/endpoint.py +608 -0
- kronos/services/hook.py +471 -0
- kronos/services/imodel.py +465 -0
- kronos/services/registry.py +115 -0
- kronos/services/utilities/__init__.py +36 -0
- kronos/services/utilities/header_factory.py +87 -0
- kronos/services/utilities/rate_limited_executor.py +271 -0
- kronos/services/utilities/rate_limiter.py +180 -0
- kronos/services/utilities/resilience.py +414 -0
- kronos/session/__init__.py +41 -0
- kronos/session/exchange.py +258 -0
- kronos/session/message.py +60 -0
- kronos/session/session.py +411 -0
- kronos/specs/__init__.py +25 -0
- kronos/specs/adapters/__init__.py +0 -0
- kronos/specs/adapters/_utils.py +45 -0
- kronos/specs/adapters/dataclass_field.py +246 -0
- kronos/specs/adapters/factory.py +56 -0
- kronos/specs/adapters/pydantic_adapter.py +309 -0
- kronos/specs/adapters/sql_ddl.py +946 -0
- kronos/specs/catalog/__init__.py +36 -0
- kronos/specs/catalog/_audit.py +39 -0
- kronos/specs/catalog/_common.py +43 -0
- kronos/specs/catalog/_content.py +59 -0
- kronos/specs/catalog/_enforcement.py +70 -0
- kronos/specs/factory.py +120 -0
- kronos/specs/operable.py +314 -0
- kronos/specs/phrase.py +405 -0
- kronos/specs/protocol.py +140 -0
- kronos/specs/spec.py +506 -0
- kronos/types/__init__.py +60 -0
- kronos/types/_sentinel.py +311 -0
- kronos/types/base.py +369 -0
- kronos/types/db_types.py +260 -0
- kronos/types/identity.py +66 -0
- kronos/utils/__init__.py +40 -0
- kronos/utils/_hash.py +234 -0
- kronos/utils/_json_dump.py +392 -0
- kronos/utils/_lazy_init.py +63 -0
- kronos/utils/_to_list.py +165 -0
- kronos/utils/_to_num.py +85 -0
- kronos/utils/_utils.py +375 -0
- kronos/utils/concurrency/__init__.py +205 -0
- kronos/utils/concurrency/_async_call.py +333 -0
- kronos/utils/concurrency/_cancel.py +122 -0
- kronos/utils/concurrency/_errors.py +96 -0
- kronos/utils/concurrency/_patterns.py +363 -0
- kronos/utils/concurrency/_primitives.py +328 -0
- kronos/utils/concurrency/_priority_queue.py +135 -0
- kronos/utils/concurrency/_resource_tracker.py +110 -0
- kronos/utils/concurrency/_run_async.py +67 -0
- kronos/utils/concurrency/_task.py +95 -0
- kronos/utils/concurrency/_utils.py +79 -0
- kronos/utils/fuzzy/__init__.py +14 -0
- kronos/utils/fuzzy/_extract_json.py +90 -0
- kronos/utils/fuzzy/_fuzzy_json.py +288 -0
- kronos/utils/fuzzy/_fuzzy_match.py +149 -0
- kronos/utils/fuzzy/_string_similarity.py +187 -0
- kronos/utils/fuzzy/_to_dict.py +396 -0
- kronos/utils/sql/__init__.py +13 -0
- kronos/utils/sql/_sql_validation.py +142 -0
- krons-0.1.0.dist-info/METADATA +70 -0
- krons-0.1.0.dist-info/RECORD +101 -0
- krons-0.1.0.dist-info/WHEEL +4 -0
- krons-0.1.0.dist-info/licenses/LICENSE +201 -0
kronos/utils/_utils.py
ADDED
|
@@ -0,0 +1,375 @@
|
|
|
1
|
+
# Copyright (c) 2025 - 2026, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
import contextlib
|
|
5
|
+
import importlib.util
|
|
6
|
+
from collections.abc import Awaitable, Callable
|
|
7
|
+
from datetime import UTC, datetime
|
|
8
|
+
from functools import wraps
|
|
9
|
+
from pathlib import Path as StdPath
|
|
10
|
+
from types import UnionType
|
|
11
|
+
from typing import Any, ParamSpec, TypeVar, Union, get_args, get_origin
|
|
12
|
+
from uuid import UUID, uuid4
|
|
13
|
+
|
|
14
|
+
from anyio import Path as AsyncPath
|
|
15
|
+
|
|
16
|
+
from kronos.protocols import Observable
|
|
17
|
+
|
|
18
|
+
__all__ = (
|
|
19
|
+
"create_path",
|
|
20
|
+
"get_bins",
|
|
21
|
+
"import_module",
|
|
22
|
+
"is_import_installed",
|
|
23
|
+
"now_utc",
|
|
24
|
+
"to_uuid",
|
|
25
|
+
"coerce_created_at",
|
|
26
|
+
"load_type_from_string",
|
|
27
|
+
"async_synchronized",
|
|
28
|
+
"synchronized",
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
P = ParamSpec("P")
|
|
32
|
+
R = TypeVar("R")
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def now_utc() -> datetime:
|
|
36
|
+
"""Get current UTC datetime."""
|
|
37
|
+
return datetime.now(UTC)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
async def create_path(
|
|
41
|
+
directory: StdPath | AsyncPath | str,
|
|
42
|
+
filename: str,
|
|
43
|
+
extension: str | None = None,
|
|
44
|
+
timestamp: bool = False,
|
|
45
|
+
dir_exist_ok: bool = True,
|
|
46
|
+
file_exist_ok: bool = False,
|
|
47
|
+
time_prefix: bool = False,
|
|
48
|
+
timestamp_format: str | None = None,
|
|
49
|
+
random_hash_digits: int = 0,
|
|
50
|
+
timeout: float | None = None,
|
|
51
|
+
) -> AsyncPath:
|
|
52
|
+
"""Generate file path asynchronously with optional timeout.
|
|
53
|
+
|
|
54
|
+
Args:
|
|
55
|
+
directory: Base directory path
|
|
56
|
+
filename: Target filename (may contain subdirectory with /)
|
|
57
|
+
extension: File extension (if filename doesn't have one)
|
|
58
|
+
timestamp: Add timestamp to filename
|
|
59
|
+
dir_exist_ok: Allow existing directories
|
|
60
|
+
file_exist_ok: Allow existing files
|
|
61
|
+
time_prefix: Put timestamp before filename instead of after
|
|
62
|
+
timestamp_format: Custom strftime format for timestamp
|
|
63
|
+
random_hash_digits: Add random hash suffix (0 = disabled)
|
|
64
|
+
timeout: Maximum time in seconds for async I/O operations (None = no timeout)
|
|
65
|
+
|
|
66
|
+
Returns:
|
|
67
|
+
AsyncPath to the created/validated file path
|
|
68
|
+
|
|
69
|
+
Raises:
|
|
70
|
+
ValueError: If filename contains backslash
|
|
71
|
+
FileExistsError: If file exists and file_exist_ok is False
|
|
72
|
+
TimeoutError: If timeout is exceeded
|
|
73
|
+
"""
|
|
74
|
+
from .concurrency import move_on_after
|
|
75
|
+
|
|
76
|
+
async def _impl() -> AsyncPath:
|
|
77
|
+
nonlocal directory, filename
|
|
78
|
+
|
|
79
|
+
if "/" in filename:
|
|
80
|
+
sub_dir, filename = filename.split("/")[:-1], filename.split("/")[-1]
|
|
81
|
+
directory = AsyncPath(directory) / "/".join(sub_dir)
|
|
82
|
+
|
|
83
|
+
if "\\" in filename:
|
|
84
|
+
raise ValueError("Filename cannot contain directory separators.")
|
|
85
|
+
|
|
86
|
+
directory = AsyncPath(directory)
|
|
87
|
+
if "." in filename:
|
|
88
|
+
name, ext = filename.rsplit(".", 1)
|
|
89
|
+
else:
|
|
90
|
+
name = filename
|
|
91
|
+
ext = extension or ""
|
|
92
|
+
ext = f".{ext.lstrip('.')}" if ext else ""
|
|
93
|
+
|
|
94
|
+
if timestamp:
|
|
95
|
+
ts_str = datetime.now().strftime(timestamp_format or "%Y%m%d%H%M%S")
|
|
96
|
+
name = f"{ts_str}_{name}" if time_prefix else f"{name}_{ts_str}"
|
|
97
|
+
|
|
98
|
+
if random_hash_digits > 0:
|
|
99
|
+
random_suffix = uuid4().hex[:random_hash_digits]
|
|
100
|
+
name = f"{name}-{random_suffix}"
|
|
101
|
+
|
|
102
|
+
full_path = directory / f"{name}{ext}"
|
|
103
|
+
|
|
104
|
+
await full_path.parent.mkdir(parents=True, exist_ok=dir_exist_ok)
|
|
105
|
+
|
|
106
|
+
if await full_path.exists() and not file_exist_ok:
|
|
107
|
+
raise FileExistsError(f"File {full_path} already exists.")
|
|
108
|
+
|
|
109
|
+
return full_path
|
|
110
|
+
|
|
111
|
+
if timeout is None:
|
|
112
|
+
return await _impl()
|
|
113
|
+
|
|
114
|
+
with move_on_after(timeout) as cancel_scope:
|
|
115
|
+
result = await _impl()
|
|
116
|
+
if cancel_scope.cancelled_caught:
|
|
117
|
+
raise TimeoutError(f"create_path timed out after {timeout}s")
|
|
118
|
+
return result
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def get_bins(input_: list[str], upper: int) -> list[list[int]]:
|
|
122
|
+
"""Partition string list indices into bins by cumulative length.
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
input_: List of strings to partition.
|
|
126
|
+
upper: Maximum cumulative length per bin.
|
|
127
|
+
|
|
128
|
+
Returns:
|
|
129
|
+
List of index lists, each bin's total string length < upper.
|
|
130
|
+
"""
|
|
131
|
+
current = 0
|
|
132
|
+
bins = []
|
|
133
|
+
current_bin = []
|
|
134
|
+
for idx, item in enumerate(input_):
|
|
135
|
+
if current + len(item) < upper:
|
|
136
|
+
current_bin.append(idx)
|
|
137
|
+
current += len(item)
|
|
138
|
+
else:
|
|
139
|
+
bins.append(current_bin)
|
|
140
|
+
current_bin = [idx]
|
|
141
|
+
current = len(item)
|
|
142
|
+
if current_bin:
|
|
143
|
+
bins.append(current_bin)
|
|
144
|
+
return bins
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def import_module(
|
|
148
|
+
package_name: str,
|
|
149
|
+
module_name: str | None = None,
|
|
150
|
+
import_name: str | list | None = None,
|
|
151
|
+
) -> Any:
|
|
152
|
+
"""Import module or attribute(s) by dotted path.
|
|
153
|
+
|
|
154
|
+
Args:
|
|
155
|
+
package_name: Base package name (e.g., 'kron').
|
|
156
|
+
module_name: Optional submodule (e.g., 'utils').
|
|
157
|
+
import_name: Attribute(s) to import from module.
|
|
158
|
+
|
|
159
|
+
Returns:
|
|
160
|
+
Module object, single attribute, or list of attributes.
|
|
161
|
+
|
|
162
|
+
Raises:
|
|
163
|
+
ImportError: If module or attribute not found.
|
|
164
|
+
"""
|
|
165
|
+
try:
|
|
166
|
+
full_import_path = f"{package_name}.{module_name}" if module_name else package_name
|
|
167
|
+
|
|
168
|
+
if import_name:
|
|
169
|
+
import_name = [import_name] if not isinstance(import_name, list) else import_name
|
|
170
|
+
a = __import__(
|
|
171
|
+
full_import_path,
|
|
172
|
+
fromlist=import_name,
|
|
173
|
+
)
|
|
174
|
+
if len(import_name) == 1:
|
|
175
|
+
return getattr(a, import_name[0])
|
|
176
|
+
return [getattr(a, name) for name in import_name]
|
|
177
|
+
else:
|
|
178
|
+
return __import__(full_import_path)
|
|
179
|
+
|
|
180
|
+
except ImportError as e:
|
|
181
|
+
raise ImportError(f"Failed to import module {full_import_path}: {e}") from e
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def is_import_installed(package_name: str) -> bool:
|
|
185
|
+
"""Check if package is installed."""
|
|
186
|
+
return importlib.util.find_spec(package_name) is not None
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
_TYPE_CACHE: dict[str, type] = {}
|
|
190
|
+
|
|
191
|
+
_DEFAULT_ALLOWED_PREFIXES: frozenset[str] = frozenset({"kronos."})
|
|
192
|
+
_ALLOWED_MODULE_PREFIXES: set[str] = set(_DEFAULT_ALLOWED_PREFIXES)
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def register_type_prefix(prefix: str) -> None:
|
|
196
|
+
"""Register module prefix for dynamic type loading allowlist.
|
|
197
|
+
|
|
198
|
+
Security: Only register prefixes for modules you control.
|
|
199
|
+
|
|
200
|
+
Args:
|
|
201
|
+
prefix: Module prefix to allow (e.g., "myapp.models.").
|
|
202
|
+
Must end with "." to prevent prefix attacks.
|
|
203
|
+
|
|
204
|
+
Raises:
|
|
205
|
+
ValueError: If prefix doesn't end with ".".
|
|
206
|
+
"""
|
|
207
|
+
if not prefix.endswith("."):
|
|
208
|
+
raise ValueError(f"Prefix must end with '.': {prefix}")
|
|
209
|
+
_ALLOWED_MODULE_PREFIXES.add(prefix)
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
def load_type_from_string(type_str: str) -> type:
|
|
213
|
+
"""Load type from fully qualified path (e.g., 'kronos.core.Node').
|
|
214
|
+
|
|
215
|
+
Security: Only allowlisted module prefixes can be loaded.
|
|
216
|
+
|
|
217
|
+
Args:
|
|
218
|
+
type_str: Fully qualified type path.
|
|
219
|
+
|
|
220
|
+
Returns:
|
|
221
|
+
The loaded type class.
|
|
222
|
+
|
|
223
|
+
Raises:
|
|
224
|
+
ValueError: If path invalid, not allowlisted, or type not found.
|
|
225
|
+
"""
|
|
226
|
+
if type_str in _TYPE_CACHE:
|
|
227
|
+
return _TYPE_CACHE[type_str]
|
|
228
|
+
|
|
229
|
+
if not isinstance(type_str, str):
|
|
230
|
+
raise ValueError(f"Expected string, got {type(type_str)}")
|
|
231
|
+
|
|
232
|
+
if "." not in type_str:
|
|
233
|
+
raise ValueError(f"Invalid type path (no module): {type_str}")
|
|
234
|
+
|
|
235
|
+
if not any(type_str.startswith(prefix) for prefix in _ALLOWED_MODULE_PREFIXES):
|
|
236
|
+
raise ValueError(
|
|
237
|
+
f"Module '{type_str}' not in allowed prefixes: {sorted(_ALLOWED_MODULE_PREFIXES)}"
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
try:
|
|
241
|
+
module_path, class_name = type_str.rsplit(".", 1)
|
|
242
|
+
import importlib
|
|
243
|
+
|
|
244
|
+
module = importlib.import_module(module_path)
|
|
245
|
+
if module is None:
|
|
246
|
+
raise ImportError(f"Module '{module_path}' not found")
|
|
247
|
+
|
|
248
|
+
type_class = getattr(module, class_name)
|
|
249
|
+
if not isinstance(type_class, type):
|
|
250
|
+
raise ValueError(f"'{type_str}' is not a type")
|
|
251
|
+
|
|
252
|
+
_TYPE_CACHE[type_str] = type_class
|
|
253
|
+
return type_class
|
|
254
|
+
|
|
255
|
+
except (ValueError, ImportError, AttributeError) as e:
|
|
256
|
+
raise ValueError(f"Failed to load type '{type_str}': {e}") from e
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
def extract_types(item_type: Any) -> set[type]:
|
|
260
|
+
"""Extract concrete types from type annotations.
|
|
261
|
+
|
|
262
|
+
Handles Union, list, set, and single types recursively.
|
|
263
|
+
|
|
264
|
+
Args:
|
|
265
|
+
item_type: Type annotation (Union[X, Y], list[type], set[type], or type).
|
|
266
|
+
|
|
267
|
+
Returns:
|
|
268
|
+
Set of concrete types extracted from the annotation.
|
|
269
|
+
"""
|
|
270
|
+
|
|
271
|
+
def is_union(t):
|
|
272
|
+
origin = get_origin(t)
|
|
273
|
+
return origin is Union or isinstance(t, UnionType)
|
|
274
|
+
|
|
275
|
+
extracted: set[type] = set()
|
|
276
|
+
|
|
277
|
+
if isinstance(item_type, set):
|
|
278
|
+
for t in item_type:
|
|
279
|
+
if is_union(t):
|
|
280
|
+
extracted.update(get_args(t))
|
|
281
|
+
else:
|
|
282
|
+
extracted.add(t)
|
|
283
|
+
return extracted
|
|
284
|
+
|
|
285
|
+
if isinstance(item_type, list):
|
|
286
|
+
for t in item_type:
|
|
287
|
+
if is_union(t):
|
|
288
|
+
extracted.update(get_args(t))
|
|
289
|
+
else:
|
|
290
|
+
extracted.add(t)
|
|
291
|
+
return extracted
|
|
292
|
+
|
|
293
|
+
if is_union(item_type):
|
|
294
|
+
return set(get_args(item_type))
|
|
295
|
+
|
|
296
|
+
return {item_type}
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
def to_uuid(value: Any) -> UUID:
|
|
300
|
+
"""Convert value to UUID instance.
|
|
301
|
+
|
|
302
|
+
Args:
|
|
303
|
+
value: UUID, UUID string, or Observable with .id attribute.
|
|
304
|
+
|
|
305
|
+
Returns:
|
|
306
|
+
UUID instance.
|
|
307
|
+
|
|
308
|
+
Raises:
|
|
309
|
+
ValueError: If value cannot be converted to UUID.
|
|
310
|
+
"""
|
|
311
|
+
if isinstance(value, Observable):
|
|
312
|
+
return value.id
|
|
313
|
+
if isinstance(value, UUID):
|
|
314
|
+
return value
|
|
315
|
+
if isinstance(value, str):
|
|
316
|
+
return UUID(value)
|
|
317
|
+
raise ValueError("Cannot get ID from item.")
|
|
318
|
+
|
|
319
|
+
|
|
320
|
+
def coerce_created_at(v) -> datetime:
|
|
321
|
+
"""Coerce value to UTC-aware datetime.
|
|
322
|
+
|
|
323
|
+
Args:
|
|
324
|
+
v: datetime, Unix timestamp (int/float), or ISO string.
|
|
325
|
+
|
|
326
|
+
Returns:
|
|
327
|
+
UTC-aware datetime instance.
|
|
328
|
+
|
|
329
|
+
Raises:
|
|
330
|
+
ValueError: If value cannot be parsed as datetime.
|
|
331
|
+
"""
|
|
332
|
+
if isinstance(v, datetime):
|
|
333
|
+
return v.replace(tzinfo=UTC) if v.tzinfo is None else v
|
|
334
|
+
|
|
335
|
+
if isinstance(v, (int, float)):
|
|
336
|
+
return datetime.fromtimestamp(v, tz=UTC)
|
|
337
|
+
|
|
338
|
+
if isinstance(v, str):
|
|
339
|
+
with contextlib.suppress(ValueError):
|
|
340
|
+
return datetime.fromtimestamp(float(v), tz=UTC)
|
|
341
|
+
with contextlib.suppress(ValueError):
|
|
342
|
+
return datetime.fromisoformat(v)
|
|
343
|
+
raise ValueError(f"String '{v}' is neither timestamp nor ISO format")
|
|
344
|
+
|
|
345
|
+
raise ValueError(f"Expected datetime/timestamp/string, got {type(v).__name__}")
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
def synchronized(func: Callable[P, R]) -> Callable[P, R]:
|
|
349
|
+
"""Decorator for thread-safe method execution.
|
|
350
|
+
|
|
351
|
+
Requires decorated method's instance to have self._lock (threading.Lock).
|
|
352
|
+
"""
|
|
353
|
+
|
|
354
|
+
@wraps(func)
|
|
355
|
+
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
|
356
|
+
self = args[0]
|
|
357
|
+
with self._lock:
|
|
358
|
+
return func(*args, **kwargs)
|
|
359
|
+
|
|
360
|
+
return wrapper
|
|
361
|
+
|
|
362
|
+
|
|
363
|
+
def async_synchronized(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[R]]:
|
|
364
|
+
"""Decorator for async-safe method execution.
|
|
365
|
+
|
|
366
|
+
Requires decorated method's instance to have self._async_lock (anyio.Lock).
|
|
367
|
+
"""
|
|
368
|
+
|
|
369
|
+
@wraps(func)
|
|
370
|
+
async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
|
371
|
+
self = args[0]
|
|
372
|
+
async with self._async_lock: # type: ignore[attr-defined]
|
|
373
|
+
return await func(*args, **kwargs)
|
|
374
|
+
|
|
375
|
+
return wrapper
|
|
@@ -0,0 +1,205 @@
|
|
|
1
|
+
# Copyright (c) 2025 - 2026, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
"""Async concurrency utilities with lazy loading for fast import.
|
|
5
|
+
|
|
6
|
+
Core Patterns:
|
|
7
|
+
gather: Run awaitables concurrently, collect results in order.
|
|
8
|
+
race: Return first completion, cancel the rest.
|
|
9
|
+
bounded_map: Apply async function with concurrency limit.
|
|
10
|
+
retry: Exponential backoff with deadline awareness.
|
|
11
|
+
CompletionStream: Iterate results as they complete.
|
|
12
|
+
|
|
13
|
+
Batch Processing:
|
|
14
|
+
alcall: Apply function to list with retry, timeout, concurrency control.
|
|
15
|
+
bcall: Batch wrapper yielding results per batch.
|
|
16
|
+
|
|
17
|
+
Primitives (anyio wrappers):
|
|
18
|
+
Lock, Semaphore, Event, Condition, Queue, CapacityLimiter
|
|
19
|
+
TaskGroup, create_task_group
|
|
20
|
+
|
|
21
|
+
Cancellation:
|
|
22
|
+
CancelScope, move_on_after, move_on_at, fail_after, fail_at
|
|
23
|
+
effective_deadline, get_cancelled_exc_class, is_cancelled
|
|
24
|
+
|
|
25
|
+
Utilities:
|
|
26
|
+
run_async: Execute coroutine from sync context.
|
|
27
|
+
run_sync: Run sync function in thread pool.
|
|
28
|
+
sleep, current_time, is_coro_func
|
|
29
|
+
|
|
30
|
+
Resource Tracking:
|
|
31
|
+
LeakTracker, track_resource, untrack_resource
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
from __future__ import annotations
|
|
35
|
+
|
|
36
|
+
from typing import TYPE_CHECKING
|
|
37
|
+
|
|
38
|
+
_LAZY_IMPORTS: dict[str, tuple[str, str]] = {
|
|
39
|
+
# _cancel
|
|
40
|
+
"CancelScope": ("kronos.utils.concurrency._cancel", "CancelScope"),
|
|
41
|
+
"effective_deadline": ("kronos.utils.concurrency._cancel", "effective_deadline"),
|
|
42
|
+
"fail_after": ("kronos.utils.concurrency._cancel", "fail_after"),
|
|
43
|
+
"fail_at": ("kronos.utils.concurrency._cancel", "fail_at"),
|
|
44
|
+
"move_on_after": ("kronos.utils.concurrency._cancel", "move_on_after"),
|
|
45
|
+
"move_on_at": ("kronos.utils.concurrency._cancel", "move_on_at"),
|
|
46
|
+
# _errors
|
|
47
|
+
"get_cancelled_exc_class": (
|
|
48
|
+
"kronos.utils.concurrency._errors",
|
|
49
|
+
"get_cancelled_exc_class",
|
|
50
|
+
),
|
|
51
|
+
"is_cancelled": ("kronos.utils.concurrency._errors", "is_cancelled"),
|
|
52
|
+
"non_cancel_subgroup": (
|
|
53
|
+
"kronos.utils.concurrency._errors",
|
|
54
|
+
"non_cancel_subgroup",
|
|
55
|
+
),
|
|
56
|
+
"shield": ("kronos.utils.concurrency._errors", "shield"),
|
|
57
|
+
# _patterns
|
|
58
|
+
"CompletionStream": ("kronos.utils.concurrency._patterns", "CompletionStream"),
|
|
59
|
+
"bounded_map": ("kronos.utils.concurrency._patterns", "bounded_map"),
|
|
60
|
+
"gather": ("kronos.utils.concurrency._patterns", "gather"),
|
|
61
|
+
"race": ("kronos.utils.concurrency._patterns", "race"),
|
|
62
|
+
"retry": ("kronos.utils.concurrency._patterns", "retry"),
|
|
63
|
+
# _primitives
|
|
64
|
+
"CapacityLimiter": ("kronos.utils.concurrency._primitives", "CapacityLimiter"),
|
|
65
|
+
"Condition": ("kronos.utils.concurrency._primitives", "Condition"),
|
|
66
|
+
"Event": ("kronos.utils.concurrency._primitives", "Event"),
|
|
67
|
+
"Lock": ("kronos.utils.concurrency._primitives", "Lock"),
|
|
68
|
+
"Queue": ("kronos.utils.concurrency._primitives", "Queue"),
|
|
69
|
+
"Semaphore": ("kronos.utils.concurrency._primitives", "Semaphore"),
|
|
70
|
+
# _priority_queue
|
|
71
|
+
"PriorityQueue": ("kronos.utils.concurrency._priority_queue", "PriorityQueue"),
|
|
72
|
+
"QueueEmpty": ("kronos.utils.concurrency._priority_queue", "QueueEmpty"),
|
|
73
|
+
"QueueFull": ("kronos.utils.concurrency._priority_queue", "QueueFull"),
|
|
74
|
+
# _resource_tracker
|
|
75
|
+
"LeakInfo": ("kronos.utils.concurrency._resource_tracker", "LeakInfo"),
|
|
76
|
+
"LeakTracker": ("kronos.utils.concurrency._resource_tracker", "LeakTracker"),
|
|
77
|
+
"track_resource": (
|
|
78
|
+
"kronos.utils.concurrency._resource_tracker",
|
|
79
|
+
"track_resource",
|
|
80
|
+
),
|
|
81
|
+
"untrack_resource": (
|
|
82
|
+
"kronos.utils.concurrency._resource_tracker",
|
|
83
|
+
"untrack_resource",
|
|
84
|
+
),
|
|
85
|
+
# _run_async
|
|
86
|
+
"run_async": ("kronos.utils.concurrency._run_async", "run_async"),
|
|
87
|
+
# _task
|
|
88
|
+
"TaskGroup": ("kronos.utils.concurrency._task", "TaskGroup"),
|
|
89
|
+
"create_task_group": ("kronos.utils.concurrency._task", "create_task_group"),
|
|
90
|
+
# _utils
|
|
91
|
+
"current_time": ("kronos.utils.concurrency._utils", "current_time"),
|
|
92
|
+
"is_coro_func": ("kronos.utils.concurrency._utils", "is_coro_func"),
|
|
93
|
+
"run_sync": ("kronos.utils.concurrency._utils", "run_sync"),
|
|
94
|
+
"sleep": ("kronos.utils.concurrency._utils", "sleep"),
|
|
95
|
+
"alcall": ("kronos.utils.concurrency._async_call", "alcall"),
|
|
96
|
+
"bcall": ("kronos.utils.concurrency._async_call", "bcall"),
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
_LOADED: dict[str, object] = {}
|
|
100
|
+
|
|
101
|
+
# Re-export built-in ExceptionGroup
|
|
102
|
+
ExceptionGroup = ExceptionGroup
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def __getattr__(name: str) -> object:
|
|
106
|
+
"""Lazy import attributes on first access."""
|
|
107
|
+
if name in _LOADED:
|
|
108
|
+
return _LOADED[name]
|
|
109
|
+
|
|
110
|
+
if name in _LAZY_IMPORTS:
|
|
111
|
+
from importlib import import_module
|
|
112
|
+
|
|
113
|
+
module_name, attr_name = _LAZY_IMPORTS[name]
|
|
114
|
+
module = import_module(module_name)
|
|
115
|
+
value = getattr(module, attr_name)
|
|
116
|
+
_LOADED[name] = value
|
|
117
|
+
return value
|
|
118
|
+
|
|
119
|
+
# Special case: ConcurrencyEvent is alias for Event
|
|
120
|
+
if name == "ConcurrencyEvent":
|
|
121
|
+
value = __getattr__("Event")
|
|
122
|
+
_LOADED[name] = value
|
|
123
|
+
return value
|
|
124
|
+
|
|
125
|
+
raise AttributeError(f"module 'kronos.utils.concurrency' has no attribute {name!r}")
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def __dir__() -> list[str]:
|
|
129
|
+
"""Return all available attributes for autocomplete."""
|
|
130
|
+
return list(__all__)
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
# TYPE_CHECKING block for static analysis
|
|
134
|
+
if TYPE_CHECKING:
|
|
135
|
+
from ._async_call import alcall, bcall
|
|
136
|
+
from ._cancel import (
|
|
137
|
+
CancelScope,
|
|
138
|
+
effective_deadline,
|
|
139
|
+
fail_after,
|
|
140
|
+
fail_at,
|
|
141
|
+
move_on_after,
|
|
142
|
+
move_on_at,
|
|
143
|
+
)
|
|
144
|
+
from ._errors import (
|
|
145
|
+
get_cancelled_exc_class,
|
|
146
|
+
is_cancelled,
|
|
147
|
+
non_cancel_subgroup,
|
|
148
|
+
shield,
|
|
149
|
+
)
|
|
150
|
+
from ._patterns import CompletionStream, bounded_map, gather, race, retry
|
|
151
|
+
from ._primitives import CapacityLimiter, Condition, Event, Lock, Queue, Semaphore
|
|
152
|
+
from ._priority_queue import PriorityQueue, QueueEmpty, QueueFull
|
|
153
|
+
from ._resource_tracker import (
|
|
154
|
+
LeakInfo,
|
|
155
|
+
LeakTracker,
|
|
156
|
+
track_resource,
|
|
157
|
+
untrack_resource,
|
|
158
|
+
)
|
|
159
|
+
from ._run_async import run_async
|
|
160
|
+
from ._task import TaskGroup, create_task_group
|
|
161
|
+
from ._utils import current_time, is_coro_func, run_sync, sleep
|
|
162
|
+
|
|
163
|
+
ConcurrencyEvent = Event
|
|
164
|
+
|
|
165
|
+
__all__ = (
|
|
166
|
+
"CancelScope",
|
|
167
|
+
"CapacityLimiter",
|
|
168
|
+
"CompletionStream",
|
|
169
|
+
"ConcurrencyEvent",
|
|
170
|
+
"Condition",
|
|
171
|
+
"Event",
|
|
172
|
+
"ExceptionGroup",
|
|
173
|
+
"LeakInfo",
|
|
174
|
+
"LeakTracker",
|
|
175
|
+
"Lock",
|
|
176
|
+
"PriorityQueue",
|
|
177
|
+
"Queue",
|
|
178
|
+
"QueueEmpty",
|
|
179
|
+
"QueueFull",
|
|
180
|
+
"Semaphore",
|
|
181
|
+
"TaskGroup",
|
|
182
|
+
"alcall",
|
|
183
|
+
"bcall",
|
|
184
|
+
"bounded_map",
|
|
185
|
+
"create_task_group",
|
|
186
|
+
"current_time",
|
|
187
|
+
"effective_deadline",
|
|
188
|
+
"fail_after",
|
|
189
|
+
"fail_at",
|
|
190
|
+
"gather",
|
|
191
|
+
"get_cancelled_exc_class",
|
|
192
|
+
"is_cancelled",
|
|
193
|
+
"is_coro_func",
|
|
194
|
+
"move_on_after",
|
|
195
|
+
"move_on_at",
|
|
196
|
+
"non_cancel_subgroup",
|
|
197
|
+
"race",
|
|
198
|
+
"retry",
|
|
199
|
+
"run_async",
|
|
200
|
+
"run_sync",
|
|
201
|
+
"shield",
|
|
202
|
+
"sleep",
|
|
203
|
+
"track_resource",
|
|
204
|
+
"untrack_resource",
|
|
205
|
+
)
|