krons 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. kronos/__init__.py +0 -0
  2. kronos/core/__init__.py +145 -0
  3. kronos/core/broadcaster.py +116 -0
  4. kronos/core/element.py +225 -0
  5. kronos/core/event.py +316 -0
  6. kronos/core/eventbus.py +116 -0
  7. kronos/core/flow.py +356 -0
  8. kronos/core/graph.py +442 -0
  9. kronos/core/node.py +982 -0
  10. kronos/core/pile.py +575 -0
  11. kronos/core/processor.py +494 -0
  12. kronos/core/progression.py +296 -0
  13. kronos/enforcement/__init__.py +57 -0
  14. kronos/enforcement/common/__init__.py +34 -0
  15. kronos/enforcement/common/boolean.py +85 -0
  16. kronos/enforcement/common/choice.py +97 -0
  17. kronos/enforcement/common/mapping.py +118 -0
  18. kronos/enforcement/common/model.py +102 -0
  19. kronos/enforcement/common/number.py +98 -0
  20. kronos/enforcement/common/string.py +140 -0
  21. kronos/enforcement/context.py +129 -0
  22. kronos/enforcement/policy.py +80 -0
  23. kronos/enforcement/registry.py +153 -0
  24. kronos/enforcement/rule.py +312 -0
  25. kronos/enforcement/service.py +370 -0
  26. kronos/enforcement/validator.py +198 -0
  27. kronos/errors.py +146 -0
  28. kronos/operations/__init__.py +32 -0
  29. kronos/operations/builder.py +228 -0
  30. kronos/operations/flow.py +398 -0
  31. kronos/operations/node.py +101 -0
  32. kronos/operations/registry.py +92 -0
  33. kronos/protocols.py +414 -0
  34. kronos/py.typed +0 -0
  35. kronos/services/__init__.py +81 -0
  36. kronos/services/backend.py +286 -0
  37. kronos/services/endpoint.py +608 -0
  38. kronos/services/hook.py +471 -0
  39. kronos/services/imodel.py +465 -0
  40. kronos/services/registry.py +115 -0
  41. kronos/services/utilities/__init__.py +36 -0
  42. kronos/services/utilities/header_factory.py +87 -0
  43. kronos/services/utilities/rate_limited_executor.py +271 -0
  44. kronos/services/utilities/rate_limiter.py +180 -0
  45. kronos/services/utilities/resilience.py +414 -0
  46. kronos/session/__init__.py +41 -0
  47. kronos/session/exchange.py +258 -0
  48. kronos/session/message.py +60 -0
  49. kronos/session/session.py +411 -0
  50. kronos/specs/__init__.py +25 -0
  51. kronos/specs/adapters/__init__.py +0 -0
  52. kronos/specs/adapters/_utils.py +45 -0
  53. kronos/specs/adapters/dataclass_field.py +246 -0
  54. kronos/specs/adapters/factory.py +56 -0
  55. kronos/specs/adapters/pydantic_adapter.py +309 -0
  56. kronos/specs/adapters/sql_ddl.py +946 -0
  57. kronos/specs/catalog/__init__.py +36 -0
  58. kronos/specs/catalog/_audit.py +39 -0
  59. kronos/specs/catalog/_common.py +43 -0
  60. kronos/specs/catalog/_content.py +59 -0
  61. kronos/specs/catalog/_enforcement.py +70 -0
  62. kronos/specs/factory.py +120 -0
  63. kronos/specs/operable.py +314 -0
  64. kronos/specs/phrase.py +405 -0
  65. kronos/specs/protocol.py +140 -0
  66. kronos/specs/spec.py +506 -0
  67. kronos/types/__init__.py +60 -0
  68. kronos/types/_sentinel.py +311 -0
  69. kronos/types/base.py +369 -0
  70. kronos/types/db_types.py +260 -0
  71. kronos/types/identity.py +66 -0
  72. kronos/utils/__init__.py +40 -0
  73. kronos/utils/_hash.py +234 -0
  74. kronos/utils/_json_dump.py +392 -0
  75. kronos/utils/_lazy_init.py +63 -0
  76. kronos/utils/_to_list.py +165 -0
  77. kronos/utils/_to_num.py +85 -0
  78. kronos/utils/_utils.py +375 -0
  79. kronos/utils/concurrency/__init__.py +205 -0
  80. kronos/utils/concurrency/_async_call.py +333 -0
  81. kronos/utils/concurrency/_cancel.py +122 -0
  82. kronos/utils/concurrency/_errors.py +96 -0
  83. kronos/utils/concurrency/_patterns.py +363 -0
  84. kronos/utils/concurrency/_primitives.py +328 -0
  85. kronos/utils/concurrency/_priority_queue.py +135 -0
  86. kronos/utils/concurrency/_resource_tracker.py +110 -0
  87. kronos/utils/concurrency/_run_async.py +67 -0
  88. kronos/utils/concurrency/_task.py +95 -0
  89. kronos/utils/concurrency/_utils.py +79 -0
  90. kronos/utils/fuzzy/__init__.py +14 -0
  91. kronos/utils/fuzzy/_extract_json.py +90 -0
  92. kronos/utils/fuzzy/_fuzzy_json.py +288 -0
  93. kronos/utils/fuzzy/_fuzzy_match.py +149 -0
  94. kronos/utils/fuzzy/_string_similarity.py +187 -0
  95. kronos/utils/fuzzy/_to_dict.py +396 -0
  96. kronos/utils/sql/__init__.py +13 -0
  97. kronos/utils/sql/_sql_validation.py +142 -0
  98. krons-0.1.0.dist-info/METADATA +70 -0
  99. krons-0.1.0.dist-info/RECORD +101 -0
  100. krons-0.1.0.dist-info/WHEEL +4 -0
  101. krons-0.1.0.dist-info/licenses/LICENSE +201 -0
kronos/core/event.py ADDED
@@ -0,0 +1,316 @@
1
+ # Copyright (c) 2025 - 2026, HaiyangLi <quantocean.li at gmail dot com>
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ from __future__ import annotations
5
+
6
+ import contextlib
7
+ import math
8
+ from dataclasses import dataclass
9
+ from typing import Any, final
10
+
11
+ import orjson
12
+ from pydantic import Field, field_serializer, field_validator
13
+
14
+ from kronos.errors import KronError, KronTimeoutError
15
+ from kronos.protocols import Invocable, Serializable, implements
16
+ from kronos.types import Enum, MaybeSentinel, MaybeUnset, Unset, is_sentinel, is_unset
17
+ from kronos.utils import async_synchronized, concurrency, json_dumpb
18
+
19
+ from .element import LN_ELEMENT_FIELDS, Element
20
+
21
+ __all__ = (
22
+ "Event",
23
+ "EventStatus",
24
+ "Execution",
25
+ )
26
+
27
+
28
+ class EventStatus(Enum):
29
+ """Event execution status states.
30
+
31
+ Values:
32
+ PENDING: Not yet started
33
+ PROCESSING: Currently executing
34
+ COMPLETED: Finished successfully
35
+ FAILED: Execution failed with error
36
+ CANCELLED: Interrupted by timeout or cancellation
37
+ SKIPPED: Bypassed due to condition
38
+ ABORTED: Pre-validation rejected, never started
39
+ """
40
+
41
+ PENDING = "pending"
42
+ PROCESSING = "processing"
43
+ COMPLETED = "completed"
44
+ FAILED = "failed"
45
+ CANCELLED = "cancelled"
46
+ SKIPPED = "skipped"
47
+ ABORTED = "aborted"
48
+
49
+
50
+ @implements(Serializable)
51
+ @dataclass(slots=True)
52
+ class Execution:
53
+ """Execution state (status, duration, response, error, retryable).
54
+
55
+ Attributes:
56
+ status: Current execution status
57
+ duration: Elapsed time in seconds (Unset until complete)
58
+ response: Result (Unset if unavailable, None if legitimate null)
59
+ error: Exception if failed (Unset/None/BaseException)
60
+ retryable: Whether retry is safe (Unset/bool)
61
+ """
62
+
63
+ status: EventStatus = EventStatus.PENDING
64
+ duration: MaybeUnset[float] = Unset
65
+ response: MaybeSentinel[Any] = Unset
66
+ error: MaybeUnset[BaseException] | None = Unset
67
+ retryable: MaybeUnset[bool] = Unset
68
+
69
+ def to_dict(self, **kwargs: Any) -> dict[str, Any]:
70
+ """Serialize to dict. Sentinels become None; errors become dicts."""
71
+ res_ = Unset
72
+ if is_sentinel(self.response):
73
+ res_ = None
74
+ else:
75
+ with contextlib.suppress(orjson.JSONDecodeError, TypeError):
76
+ res_ = json_dumpb(self.response)
77
+ if is_unset(res_):
78
+ res_ = "<unserializable>"
79
+
80
+ error_dict = None
81
+ if not is_unset(self.error) and self.error is not None:
82
+ if isinstance(self.error, Serializable):
83
+ error_dict = self.error.to_dict()
84
+ elif isinstance(self.error, ExceptionGroup):
85
+ error_dict = self._serialize_exception_group(self.error)
86
+ else:
87
+ error_dict = {
88
+ "error": type(self.error).__name__,
89
+ "message": str(self.error),
90
+ }
91
+
92
+ duration_value = None if is_unset(self.duration) else self.duration
93
+ retryable_value = None if is_unset(self.retryable) else self.retryable
94
+
95
+ return {
96
+ "status": self.status.value,
97
+ "duration": duration_value,
98
+ "response": res_,
99
+ "error": error_dict,
100
+ "retryable": retryable_value,
101
+ }
102
+
103
+ def _serialize_exception_group(
104
+ self,
105
+ eg: ExceptionGroup,
106
+ depth: int = 0,
107
+ _seen: set[int] | None = None,
108
+ ) -> dict[str, Any]:
109
+ """Recursively serialize ExceptionGroup with depth limit and cycle detection.
110
+
111
+ Args:
112
+ eg: ExceptionGroup to serialize.
113
+ depth: Current recursion depth (internal).
114
+ _seen: Object IDs already visited for cycle detection (internal).
115
+
116
+ Returns:
117
+ Dict with error type, message, and nested exceptions.
118
+ """
119
+ MAX_DEPTH = 100
120
+ if depth > MAX_DEPTH:
121
+ return {
122
+ "error": "ExceptionGroup",
123
+ "message": f"Max nesting depth ({MAX_DEPTH}) exceeded",
124
+ "nested_count": len(eg.exceptions) if hasattr(eg, "exceptions") else 0,
125
+ }
126
+
127
+ if _seen is None:
128
+ _seen = set()
129
+
130
+ eg_id = id(eg)
131
+ if eg_id in _seen:
132
+ return {
133
+ "error": "ExceptionGroup",
134
+ "message": "Circular reference detected",
135
+ }
136
+
137
+ _seen.add(eg_id)
138
+
139
+ try:
140
+ exceptions = []
141
+ for exc in eg.exceptions:
142
+ if isinstance(exc, Serializable):
143
+ exceptions.append(exc.to_dict())
144
+ elif isinstance(exc, ExceptionGroup):
145
+ exceptions.append(self._serialize_exception_group(exc, depth + 1, _seen))
146
+ else:
147
+ exceptions.append(
148
+ {
149
+ "error": type(exc).__name__,
150
+ "message": str(exc),
151
+ }
152
+ )
153
+
154
+ return {
155
+ "error": type(eg).__name__,
156
+ "message": str(eg),
157
+ "exceptions": exceptions,
158
+ }
159
+ finally:
160
+ _seen.discard(eg_id)
161
+
162
+ def add_error(self, exc: BaseException) -> None:
163
+ """Add error; creates ExceptionGroup if multiple errors accumulated."""
164
+ if is_unset(self.error) or self.error is None:
165
+ self.error = exc
166
+ elif isinstance(self.error, ExceptionGroup):
167
+ self.error = ExceptionGroup( # type: ignore[type-var]
168
+ "multiple errors",
169
+ [*self.error.exceptions, exc],
170
+ )
171
+ else:
172
+ self.error = ExceptionGroup( # type: ignore[type-var]
173
+ "multiple errors",
174
+ [self.error, exc],
175
+ )
176
+
177
+
178
+ @implements(Invocable)
179
+ class Event(Element):
180
+ """Base event with lifecycle tracking and execution state.
181
+
182
+ Subclasses implement _invoke(). invoke() manages transitions, timing, errors.
183
+
184
+ Attributes:
185
+ execution: Execution state
186
+ timeout: Optional timeout in seconds (None = no timeout)
187
+ """
188
+
189
+ execution: Execution = Field(default_factory=Execution)
190
+ timeout: MaybeUnset[float] = Field(Unset, exclude=True)
191
+ streaming: bool = Field(False, exclude=True)
192
+
193
+ def model_post_init(self, __context) -> None:
194
+ """Initialize async lock for thread-safe invoke()."""
195
+ super().model_post_init(__context)
196
+ self._async_lock = concurrency.Lock()
197
+
198
+ @field_validator("timeout")
199
+ @classmethod
200
+ def _validate_timeout(cls, v: Any) -> MaybeUnset[float]:
201
+ """Validate timeout is positive and finite (raises ValueError if not)."""
202
+ if is_sentinel(v, {"none", "empty"}):
203
+ return Unset
204
+ if not math.isfinite(v):
205
+ raise ValueError(f"timeout must be finite, got {v}")
206
+ if v <= 0:
207
+ raise ValueError(f"timeout must be positive, got {v}")
208
+ return v
209
+
210
+ @field_serializer("execution")
211
+ def _serialize_execution(self, val: Execution) -> dict:
212
+ """Serialize Execution to dict."""
213
+ return val.to_dict()
214
+
215
+ @property
216
+ def request(self) -> dict:
217
+ """Request parameters for this event. Override in subclasses."""
218
+ return {}
219
+
220
+ async def _invoke(self) -> Any:
221
+ """Execute event logic. Subclasses must override."""
222
+ raise NotImplementedError("Subclasses must implement _invoke()")
223
+
224
+ @final
225
+ @async_synchronized
226
+ async def invoke(self) -> None:
227
+ """Execute with lifecycle management: status tracking, timing, error capture.
228
+
229
+ Idempotent: no-op if status is not PENDING. Thread-safe via async lock.
230
+ Sets execution.status, duration, response/error, and retryable flag.
231
+ """
232
+ if self.execution.status != EventStatus.PENDING:
233
+ return
234
+
235
+ start = concurrency.current_time()
236
+
237
+ try:
238
+ self.execution.status = EventStatus.PROCESSING
239
+
240
+ if not is_unset(self.timeout):
241
+ with concurrency.fail_after(self.timeout):
242
+ result = await self._invoke()
243
+ else:
244
+ result = await self._invoke()
245
+
246
+ self.execution.response = result
247
+ self.execution.error = None
248
+ self.execution.status = EventStatus.COMPLETED
249
+ self.execution.retryable = False
250
+
251
+ except TimeoutError:
252
+ lion_timeout = KronTimeoutError(
253
+ f"Operation timed out after {self.timeout}s",
254
+ retryable=True,
255
+ )
256
+
257
+ self.execution.response = Unset
258
+ self.execution.error = lion_timeout
259
+ self.execution.status = EventStatus.CANCELLED
260
+ self.execution.retryable = lion_timeout.retryable
261
+
262
+ except Exception as e:
263
+ if isinstance(e, ExceptionGroup):
264
+ retryable = all(
265
+ not isinstance(exc, KronError) or exc.retryable for exc in e.exceptions
266
+ )
267
+ self.execution.retryable = retryable
268
+ else:
269
+ self.execution.retryable = e.retryable if isinstance(e, KronError) else True
270
+
271
+ self.execution.response = Unset
272
+ self.execution.error = e
273
+ self.execution.status = EventStatus.FAILED
274
+
275
+ except BaseException as e:
276
+ if isinstance(e, concurrency.get_cancelled_exc_class()):
277
+ self.execution.response = Unset
278
+ self.execution.error = e
279
+ self.execution.status = EventStatus.CANCELLED
280
+ self.execution.retryable = True
281
+
282
+ raise
283
+
284
+ finally:
285
+ self.execution.duration = concurrency.current_time() - start
286
+
287
+ async def stream(self) -> Any:
288
+ """Stream execution results. Override if streaming=True."""
289
+ raise NotImplementedError("Subclasses must implement stream() if streaming=True")
290
+
291
+ def as_fresh_event(self, copy_meta: bool = False) -> Event:
292
+ """Clone with reset execution state (new ID, PENDING status).
293
+
294
+ Args:
295
+ copy_meta: If True, copy original metadata to clone.
296
+
297
+ Returns:
298
+ Fresh Event with original ID/created_at stored in metadata["original"].
299
+ """
300
+ d_ = self.to_dict()
301
+ for key in ["execution", *LN_ELEMENT_FIELDS]:
302
+ d_.pop(key, None)
303
+
304
+ fresh = self.__class__(**d_)
305
+
306
+ if not is_sentinel(self.timeout):
307
+ fresh.timeout = self.timeout
308
+
309
+ if copy_meta:
310
+ fresh.metadata = self.metadata.copy()
311
+
312
+ fresh.metadata["original"] = {
313
+ "id": str(self.id),
314
+ "created_at": self.created_at,
315
+ }
316
+ return fresh
@@ -0,0 +1,116 @@
1
+ # Copyright (c) 2025 - 2026, HaiyangLi <quantocean.li at gmail dot com>
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ from __future__ import annotations
5
+
6
+ import weakref
7
+ from collections import defaultdict
8
+ from collections.abc import Awaitable, Callable
9
+ from typing import Any
10
+
11
+ from kronos.utils.concurrency import gather
12
+
13
+ __all__ = ("EventBus", "Handler")
14
+
15
+ #: Async event handler signature: any args/kwargs, returns None.
16
+ Handler = Callable[..., Awaitable[None]]
17
+
18
+
19
+ class EventBus:
20
+ """In-process pub/sub with weakref-based automatic handler cleanup.
21
+
22
+ Provides topic-based event routing where handlers are stored as weak
23
+ references, enabling automatic cleanup when handler objects are
24
+ garbage collected.
25
+
26
+ Example:
27
+ bus = EventBus()
28
+ async def on_update(data): print(data)
29
+ bus.subscribe("updates", on_update)
30
+ await bus.emit("updates", {"key": "value"})
31
+ """
32
+
33
+ def __init__(self) -> None:
34
+ """Initialize with empty subscription registry."""
35
+ self._subs: dict[str, list[weakref.ref[Handler]]] = defaultdict(list)
36
+
37
+ def subscribe(self, topic: str, handler: Handler) -> None:
38
+ """Subscribe async handler to topic.
39
+
40
+ Args:
41
+ topic: Event topic name.
42
+ handler: Async callable to invoke on emit. Stored as weakref.
43
+ """
44
+ self._subs[topic].append(weakref.ref(handler))
45
+
46
+ def unsubscribe(self, topic: str, handler: Handler) -> bool:
47
+ """Remove handler from topic subscription.
48
+
49
+ Args:
50
+ topic: Event topic name.
51
+ handler: Previously subscribed handler.
52
+
53
+ Returns:
54
+ True if handler was found and removed, False otherwise.
55
+ """
56
+ if topic not in self._subs:
57
+ return False
58
+ for weak_ref in list(self._subs[topic]):
59
+ if weak_ref() is handler:
60
+ self._subs[topic].remove(weak_ref)
61
+ return True
62
+ return False
63
+
64
+ def _cleanup_dead_refs(self, topic: str) -> list[Handler]:
65
+ """Prune dead weakrefs, return live handlers."""
66
+ handlers, alive_refs = [], []
67
+ for weak_ref in self._subs[topic]:
68
+ if (handler := weak_ref()) is not None:
69
+ handlers.append(handler)
70
+ alive_refs.append(weak_ref)
71
+ self._subs[topic] = alive_refs
72
+ return handlers
73
+
74
+ async def emit(self, topic: str, *args: Any, **kwargs: Any) -> None:
75
+ """Emit event to all topic subscribers concurrently.
76
+
77
+ Args:
78
+ topic: Event topic name.
79
+ *args: Positional args passed to handlers.
80
+ **kwargs: Keyword args passed to handlers.
81
+
82
+ Note:
83
+ Handler exceptions are suppressed (logged via gather).
84
+ """
85
+ if topic not in self._subs:
86
+ return
87
+ if handlers := self._cleanup_dead_refs(topic):
88
+ await gather(*(h(*args, **kwargs) for h in handlers), return_exceptions=True)
89
+
90
+ def clear(self, topic: str | None = None) -> None:
91
+ """Clear subscriptions.
92
+
93
+ Args:
94
+ topic: Specific topic to clear, or None for all topics.
95
+ """
96
+ if topic is None:
97
+ self._subs.clear()
98
+ else:
99
+ self._subs.pop(topic, None)
100
+
101
+ def topics(self) -> list[str]:
102
+ """Return list of all registered topic names."""
103
+ return list(self._subs.keys())
104
+
105
+ def handler_count(self, topic: str) -> int:
106
+ """Count live handlers for topic (triggers dead ref cleanup).
107
+
108
+ Args:
109
+ topic: Event topic name.
110
+
111
+ Returns:
112
+ Number of active handlers (excludes GC'd handlers).
113
+ """
114
+ if topic not in self._subs:
115
+ return 0
116
+ return len(self._cleanup_dead_refs(topic))