lionherd-core 1.0.0a3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lionherd_core/__init__.py +84 -0
- lionherd_core/base/__init__.py +30 -0
- lionherd_core/base/_utils.py +295 -0
- lionherd_core/base/broadcaster.py +128 -0
- lionherd_core/base/element.py +300 -0
- lionherd_core/base/event.py +322 -0
- lionherd_core/base/eventbus.py +112 -0
- lionherd_core/base/flow.py +236 -0
- lionherd_core/base/graph.py +616 -0
- lionherd_core/base/node.py +212 -0
- lionherd_core/base/pile.py +811 -0
- lionherd_core/base/progression.py +261 -0
- lionherd_core/errors.py +104 -0
- lionherd_core/libs/__init__.py +2 -0
- lionherd_core/libs/concurrency/__init__.py +60 -0
- lionherd_core/libs/concurrency/_cancel.py +85 -0
- lionherd_core/libs/concurrency/_errors.py +80 -0
- lionherd_core/libs/concurrency/_patterns.py +238 -0
- lionherd_core/libs/concurrency/_primitives.py +253 -0
- lionherd_core/libs/concurrency/_priority_queue.py +135 -0
- lionherd_core/libs/concurrency/_resource_tracker.py +66 -0
- lionherd_core/libs/concurrency/_task.py +58 -0
- lionherd_core/libs/concurrency/_utils.py +61 -0
- lionherd_core/libs/schema_handlers/__init__.py +35 -0
- lionherd_core/libs/schema_handlers/_function_call_parser.py +122 -0
- lionherd_core/libs/schema_handlers/_minimal_yaml.py +88 -0
- lionherd_core/libs/schema_handlers/_schema_to_model.py +251 -0
- lionherd_core/libs/schema_handlers/_typescript.py +153 -0
- lionherd_core/libs/string_handlers/__init__.py +15 -0
- lionherd_core/libs/string_handlers/_extract_json.py +65 -0
- lionherd_core/libs/string_handlers/_fuzzy_json.py +103 -0
- lionherd_core/libs/string_handlers/_string_similarity.py +347 -0
- lionherd_core/libs/string_handlers/_to_num.py +63 -0
- lionherd_core/ln/__init__.py +45 -0
- lionherd_core/ln/_async_call.py +314 -0
- lionherd_core/ln/_fuzzy_match.py +166 -0
- lionherd_core/ln/_fuzzy_validate.py +151 -0
- lionherd_core/ln/_hash.py +141 -0
- lionherd_core/ln/_json_dump.py +347 -0
- lionherd_core/ln/_list_call.py +110 -0
- lionherd_core/ln/_to_dict.py +373 -0
- lionherd_core/ln/_to_list.py +190 -0
- lionherd_core/ln/_utils.py +156 -0
- lionherd_core/lndl/__init__.py +62 -0
- lionherd_core/lndl/errors.py +30 -0
- lionherd_core/lndl/fuzzy.py +321 -0
- lionherd_core/lndl/parser.py +427 -0
- lionherd_core/lndl/prompt.py +137 -0
- lionherd_core/lndl/resolver.py +323 -0
- lionherd_core/lndl/types.py +287 -0
- lionherd_core/protocols.py +181 -0
- lionherd_core/py.typed +0 -0
- lionherd_core/types/__init__.py +46 -0
- lionherd_core/types/_sentinel.py +131 -0
- lionherd_core/types/base.py +341 -0
- lionherd_core/types/operable.py +133 -0
- lionherd_core/types/spec.py +313 -0
- lionherd_core/types/spec_adapters/__init__.py +10 -0
- lionherd_core/types/spec_adapters/_protocol.py +125 -0
- lionherd_core/types/spec_adapters/pydantic_field.py +177 -0
- lionherd_core-1.0.0a3.dist-info/METADATA +502 -0
- lionherd_core-1.0.0a3.dist-info/RECORD +64 -0
- lionherd_core-1.0.0a3.dist-info/WHEEL +4 -0
- lionherd_core-1.0.0a3.dist-info/licenses/LICENSE +201 -0
|
@@ -0,0 +1,300 @@
|
|
|
1
|
+
# Copyright (c) 2025, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
from __future__ import annotations
|
|
5
|
+
|
|
6
|
+
import datetime as dt
|
|
7
|
+
from typing import Any, Literal
|
|
8
|
+
from uuid import UUID, uuid4
|
|
9
|
+
|
|
10
|
+
from pydantic import BaseModel, Field, field_validator
|
|
11
|
+
|
|
12
|
+
from ..protocols import (
|
|
13
|
+
Deserializable,
|
|
14
|
+
Hashable,
|
|
15
|
+
Observable,
|
|
16
|
+
Serializable,
|
|
17
|
+
implements,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
__all__ = ("DEFAULT_ELEMENT_SERIALIZER", "LN_ELEMENT_FIELDS", "Element")
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@implements(Observable, Serializable, Deserializable, Hashable)
|
|
24
|
+
class Element(BaseModel):
|
|
25
|
+
"""Base element with UUID identity, timestamps, polymorphic serialization.
|
|
26
|
+
|
|
27
|
+
Attributes:
|
|
28
|
+
id: UUID identifier (frozen, auto-generated)
|
|
29
|
+
created_at: UTC datetime (frozen, auto-generated)
|
|
30
|
+
metadata: Arbitrary metadata dict
|
|
31
|
+
|
|
32
|
+
Serialization injects lion_class for polymorphic deserialization.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
id: UUID = Field(default_factory=uuid4, frozen=True)
|
|
36
|
+
created_at: dt.datetime = Field(default_factory=lambda: dt.datetime.now(dt.UTC), frozen=True)
|
|
37
|
+
metadata: dict[str, Any] = Field(default_factory=dict)
|
|
38
|
+
|
|
39
|
+
model_config = {
|
|
40
|
+
"arbitrary_types_allowed": True,
|
|
41
|
+
"validate_assignment": True,
|
|
42
|
+
"use_enum_values": True,
|
|
43
|
+
"extra": "forbid",
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
@field_validator("id", mode="before")
|
|
47
|
+
@classmethod
|
|
48
|
+
def _coerce_id(cls, v) -> UUID:
|
|
49
|
+
"""Coerce to UUID4."""
|
|
50
|
+
from ._utils import to_uuid
|
|
51
|
+
|
|
52
|
+
return to_uuid(v)
|
|
53
|
+
|
|
54
|
+
@field_validator("created_at", mode="before")
|
|
55
|
+
@classmethod
|
|
56
|
+
def _coerce_created_at(cls, v) -> dt.datetime:
|
|
57
|
+
"""Coerce to UTC datetime."""
|
|
58
|
+
from ._utils import coerce_created_at
|
|
59
|
+
|
|
60
|
+
return coerce_created_at(v)
|
|
61
|
+
|
|
62
|
+
@field_validator("metadata", mode="before")
|
|
63
|
+
@classmethod
|
|
64
|
+
def _validate_meta_integrity(cls, val: dict[str, Any] | None) -> dict[str, Any]:
|
|
65
|
+
"""Validate and coerce metadata to dict. Raises ValueError if conversion fails."""
|
|
66
|
+
if not val:
|
|
67
|
+
return {}
|
|
68
|
+
|
|
69
|
+
if not isinstance(val, dict):
|
|
70
|
+
from lionherd_core.ln import to_dict
|
|
71
|
+
|
|
72
|
+
val = to_dict(val, recursive=True, suppress=True)
|
|
73
|
+
|
|
74
|
+
if not isinstance(val, dict):
|
|
75
|
+
raise ValueError("Invalid metadata: must be a dictionary")
|
|
76
|
+
|
|
77
|
+
return val
|
|
78
|
+
|
|
79
|
+
@classmethod
|
|
80
|
+
def class_name(cls, full: bool = False) -> str:
|
|
81
|
+
"""Returns this class's name, stripping generic type parameters.
|
|
82
|
+
|
|
83
|
+
For generic classes (e.g., Flow[Item, Prog]), returns the origin class name
|
|
84
|
+
without type parameters (e.g., Flow).
|
|
85
|
+
|
|
86
|
+
Args:
|
|
87
|
+
full: If True, returns fully qualified name (module.Class); otherwise class name only
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
Class name string without generic parameters
|
|
91
|
+
|
|
92
|
+
Note:
|
|
93
|
+
For Pydantic generic models, runtime classes have type parameters in __name__.
|
|
94
|
+
We strip these using string parsing since typing.get_origin() doesn't work
|
|
95
|
+
on Pydantic runtime instances.
|
|
96
|
+
"""
|
|
97
|
+
# For Pydantic generic models, __name__ and __qualname__ include type params at runtime
|
|
98
|
+
# e.g., "Flow[Item, Prog]" instead of "Flow"
|
|
99
|
+
name = cls.__qualname__ if full else cls.__name__
|
|
100
|
+
|
|
101
|
+
# Strip generic type parameters (Flow[E, P] -> Flow)
|
|
102
|
+
if "[" in name:
|
|
103
|
+
name = name.split("[")[0]
|
|
104
|
+
|
|
105
|
+
if full:
|
|
106
|
+
return f"{cls.__module__}.{name}"
|
|
107
|
+
return name
|
|
108
|
+
|
|
109
|
+
def _to_dict(self, **kwargs: Any) -> dict[str, Any]:
|
|
110
|
+
"""Serialize to dict with lion_class injected in metadata."""
|
|
111
|
+
data = self.model_dump(**kwargs)
|
|
112
|
+
|
|
113
|
+
# Inject lion_class for polymorphic deserialization, if not explicitly excluded
|
|
114
|
+
if "metadata" in data:
|
|
115
|
+
data["metadata"]["lion_class"] = self.__class__.class_name(full=True)
|
|
116
|
+
|
|
117
|
+
return data
|
|
118
|
+
|
|
119
|
+
def to_dict(
|
|
120
|
+
self,
|
|
121
|
+
mode: Literal["python", "json", "db"] = "python",
|
|
122
|
+
created_at_format: Literal["datetime", "isoformat", "timestamp"] | None = None,
|
|
123
|
+
meta_key: str | None = None,
|
|
124
|
+
**kwargs: Any,
|
|
125
|
+
) -> dict[str, Any]:
|
|
126
|
+
"""Serialize to dict with lion_class metadata.
|
|
127
|
+
|
|
128
|
+
Args:
|
|
129
|
+
mode: python/json/db (db auto-renames metadata to node_metadata)
|
|
130
|
+
created_at_format: datetime/isoformat/timestamp (auto-selected by mode)
|
|
131
|
+
meta_key: Rename metadata field (overrides db default)
|
|
132
|
+
**kwargs: Passed to model_dump()
|
|
133
|
+
"""
|
|
134
|
+
if created_at_format is None:
|
|
135
|
+
created_at_format = "isoformat" if mode in ("json", "db") else "datetime"
|
|
136
|
+
|
|
137
|
+
if meta_key is None and mode == "db":
|
|
138
|
+
meta_key = "node_metadata"
|
|
139
|
+
|
|
140
|
+
if mode == "python":
|
|
141
|
+
data = self._to_dict(**kwargs)
|
|
142
|
+
elif mode in ("json", "db"):
|
|
143
|
+
import orjson
|
|
144
|
+
|
|
145
|
+
kwargs.pop("mode", None) # Avoid recursion
|
|
146
|
+
json_bytes = self.to_json(decode=False, **kwargs)
|
|
147
|
+
data = orjson.loads(json_bytes)
|
|
148
|
+
else:
|
|
149
|
+
raise ValueError(f"Invalid mode: {mode}. Must be 'python', 'json', or 'db'")
|
|
150
|
+
|
|
151
|
+
if "created_at" in data and mode == "python":
|
|
152
|
+
if created_at_format == "isoformat":
|
|
153
|
+
data["created_at"] = self.created_at.isoformat()
|
|
154
|
+
elif created_at_format == "timestamp":
|
|
155
|
+
data["created_at"] = self.created_at.timestamp()
|
|
156
|
+
# "datetime" is default for python mode, already in correct format
|
|
157
|
+
|
|
158
|
+
# Rename metadata key if specified (works with any mode)
|
|
159
|
+
if meta_key and "metadata" in data:
|
|
160
|
+
data[meta_key] = data.pop("metadata")
|
|
161
|
+
|
|
162
|
+
return data
|
|
163
|
+
|
|
164
|
+
@classmethod
|
|
165
|
+
def from_dict(cls, data: dict[str, Any], meta_key: str | None = None, **kwargs: Any) -> Element:
|
|
166
|
+
"""Deserialize from dict with polymorphic type restoration via lion_class.
|
|
167
|
+
|
|
168
|
+
Args:
|
|
169
|
+
data: Serialized element dict
|
|
170
|
+
meta_key: Restore metadata from this key (db mode compatibility)
|
|
171
|
+
**kwargs: Passed to model_validate()
|
|
172
|
+
|
|
173
|
+
Raises:
|
|
174
|
+
ValueError: If lion_class invalid or not Element subclass
|
|
175
|
+
"""
|
|
176
|
+
data = data.copy() # avoid mutating input
|
|
177
|
+
|
|
178
|
+
# Restore metadata from custom key if specified (db mode deserialization)
|
|
179
|
+
if meta_key and meta_key in data:
|
|
180
|
+
data["metadata"] = data.pop(meta_key)
|
|
181
|
+
elif "node_metadata" in data and "metadata" not in data: # backward compatibility
|
|
182
|
+
data["metadata"] = data.pop("node_metadata")
|
|
183
|
+
|
|
184
|
+
data.pop("node_metadata", None) # remove legacy key if present
|
|
185
|
+
|
|
186
|
+
# Extract and remove lion_class from metadata (serialization-only metadata)
|
|
187
|
+
metadata = data.get("metadata", {})
|
|
188
|
+
if isinstance(metadata, dict):
|
|
189
|
+
metadata = metadata.copy()
|
|
190
|
+
data["metadata"] = metadata
|
|
191
|
+
lion_class = metadata.pop("lion_class", None)
|
|
192
|
+
else:
|
|
193
|
+
lion_class = None
|
|
194
|
+
|
|
195
|
+
if lion_class and lion_class != cls.class_name(full=True):
|
|
196
|
+
from ._utils import load_type_from_string # Dynamic import to load the target class
|
|
197
|
+
|
|
198
|
+
try:
|
|
199
|
+
target_cls = load_type_from_string(lion_class)
|
|
200
|
+
except ValueError as e:
|
|
201
|
+
raise ValueError(f"Failed to deserialize class '{lion_class}': {e}") from e
|
|
202
|
+
|
|
203
|
+
if not issubclass(target_cls, Element):
|
|
204
|
+
raise ValueError(
|
|
205
|
+
f"'{lion_class}' is not an Element subclass. "
|
|
206
|
+
f"Cannot deserialize into {cls.__name__}"
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
# Polymorphic deserialization requires from_dict
|
|
210
|
+
if not hasattr(target_cls, "from_dict") or not callable(
|
|
211
|
+
getattr(target_cls, "from_dict", None)
|
|
212
|
+
):
|
|
213
|
+
raise ValueError(
|
|
214
|
+
f"'{lion_class}' does not implement from_dict(). "
|
|
215
|
+
f"Cannot perform polymorphic deserialization"
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
# Prevent infinite recursion: check if target has different from_dict implementation
|
|
219
|
+
# Use getattr to safely access __func__ (classmethods have it, but type system doesn't guarantee)
|
|
220
|
+
target_func = getattr(target_cls.from_dict, "__func__", target_cls.from_dict)
|
|
221
|
+
cls_func = getattr(cls.from_dict, "__func__", cls.from_dict)
|
|
222
|
+
if target_func is cls_func:
|
|
223
|
+
return target_cls.model_validate(data, **kwargs)
|
|
224
|
+
|
|
225
|
+
# Delegate to target class's from_dict (different implementation)
|
|
226
|
+
return target_cls.from_dict(data, **kwargs)
|
|
227
|
+
|
|
228
|
+
return cls.model_validate(data, **kwargs)
|
|
229
|
+
|
|
230
|
+
@classmethod
|
|
231
|
+
def from_json(cls, json_str: str, /, **kwargs: Any) -> Element:
|
|
232
|
+
"""Create from JSON string."""
|
|
233
|
+
import orjson
|
|
234
|
+
|
|
235
|
+
return cls.from_dict(orjson.loads(json_str), **kwargs)
|
|
236
|
+
|
|
237
|
+
def to_json(
|
|
238
|
+
self,
|
|
239
|
+
*,
|
|
240
|
+
pretty: bool = False,
|
|
241
|
+
sort_keys: bool = False,
|
|
242
|
+
decode: bool = True,
|
|
243
|
+
**kwargs: Any,
|
|
244
|
+
) -> str | bytes:
|
|
245
|
+
"""Serialize to JSON with nested Element/BaseModel support.
|
|
246
|
+
|
|
247
|
+
Args:
|
|
248
|
+
pretty: Indent output
|
|
249
|
+
sort_keys: Sort dict keys
|
|
250
|
+
decode: Return str (True) or bytes (False)
|
|
251
|
+
**kwargs: Passed to model_dump()
|
|
252
|
+
"""
|
|
253
|
+
from lionherd_core.ln import json_dumps
|
|
254
|
+
|
|
255
|
+
# Get dict with lion_class metadata (python mode for nested object handling)
|
|
256
|
+
data = self._to_dict(**kwargs)
|
|
257
|
+
|
|
258
|
+
return json_dumps(
|
|
259
|
+
data,
|
|
260
|
+
default=_get_default_serializer(),
|
|
261
|
+
pretty=pretty,
|
|
262
|
+
sort_keys=sort_keys,
|
|
263
|
+
decode=decode,
|
|
264
|
+
)
|
|
265
|
+
|
|
266
|
+
def __eq__(self, other: Any) -> bool:
|
|
267
|
+
"""Elements are equal if they have the same ID."""
|
|
268
|
+
if not isinstance(other, Element):
|
|
269
|
+
return NotImplemented
|
|
270
|
+
return self.id == other.id
|
|
271
|
+
|
|
272
|
+
def __hash__(self) -> int:
|
|
273
|
+
"""Hash by ID for use in sets/dicts."""
|
|
274
|
+
return hash(self.id)
|
|
275
|
+
|
|
276
|
+
def __bool__(self) -> bool:
|
|
277
|
+
"""Elements are always truthy."""
|
|
278
|
+
return True
|
|
279
|
+
|
|
280
|
+
def __repr__(self) -> str:
|
|
281
|
+
return f"{self.__class__.__name__}(id={self.id})"
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
LN_ELEMENT_FIELDS = frozenset(("id", "created_at", "metadata"))
|
|
285
|
+
DEFAULT_ELEMENT_SERIALIZER = None
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
def _get_default_serializer():
|
|
289
|
+
"""Get or create default orjson serializer (lazy init to avoid circular imports)."""
|
|
290
|
+
global DEFAULT_ELEMENT_SERIALIZER
|
|
291
|
+
|
|
292
|
+
if DEFAULT_ELEMENT_SERIALIZER is None:
|
|
293
|
+
from lionherd_core.ln import get_orjson_default
|
|
294
|
+
|
|
295
|
+
from ._utils import get_element_serializer_config
|
|
296
|
+
|
|
297
|
+
order, additional = get_element_serializer_config()
|
|
298
|
+
DEFAULT_ELEMENT_SERIALIZER = get_orjson_default(order=order, additional=additional)
|
|
299
|
+
|
|
300
|
+
return DEFAULT_ELEMENT_SERIALIZER
|
|
@@ -0,0 +1,322 @@
|
|
|
1
|
+
# Copyright (c) 2025, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
from __future__ import annotations
|
|
5
|
+
|
|
6
|
+
import math
|
|
7
|
+
from dataclasses import dataclass
|
|
8
|
+
from typing import Any, final
|
|
9
|
+
|
|
10
|
+
from pydantic import Field, field_serializer, field_validator
|
|
11
|
+
|
|
12
|
+
from ..errors import TimeoutError as LionherdTimeoutError
|
|
13
|
+
from ..libs.concurrency import Lock
|
|
14
|
+
from ..protocols import Invocable, Serializable, implements
|
|
15
|
+
from ..types import Enum, MaybeSentinel, MaybeUnset, Unset, is_sentinel
|
|
16
|
+
from ._utils import async_synchronized
|
|
17
|
+
from .element import LN_ELEMENT_FIELDS, Element
|
|
18
|
+
|
|
19
|
+
__all__ = (
|
|
20
|
+
"Event",
|
|
21
|
+
"EventStatus",
|
|
22
|
+
"Execution",
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class EventStatus(Enum):
|
|
27
|
+
"""Event execution status states.
|
|
28
|
+
|
|
29
|
+
Values:
|
|
30
|
+
PENDING: Not yet started
|
|
31
|
+
PROCESSING: Currently executing
|
|
32
|
+
COMPLETED: Finished successfully
|
|
33
|
+
FAILED: Execution failed with error
|
|
34
|
+
CANCELLED: Interrupted by timeout or cancellation
|
|
35
|
+
SKIPPED: Bypassed due to condition
|
|
36
|
+
ABORTED: Pre-validation rejected, never started
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
PENDING = "pending"
|
|
40
|
+
PROCESSING = "processing"
|
|
41
|
+
COMPLETED = "completed"
|
|
42
|
+
FAILED = "failed"
|
|
43
|
+
CANCELLED = "cancelled"
|
|
44
|
+
SKIPPED = "skipped"
|
|
45
|
+
ABORTED = "aborted"
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
@implements(Serializable)
|
|
49
|
+
@dataclass(slots=True)
|
|
50
|
+
class Execution:
|
|
51
|
+
"""Execution state (status, duration, response, error, retryable).
|
|
52
|
+
|
|
53
|
+
Attributes:
|
|
54
|
+
status: Current execution status
|
|
55
|
+
duration: Elapsed time in seconds (Unset until complete)
|
|
56
|
+
response: Result (Unset if unavailable, None if legitimate null)
|
|
57
|
+
error: Exception if failed (Unset/None/BaseException)
|
|
58
|
+
retryable: Whether retry is safe (Unset/bool)
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
status: EventStatus = EventStatus.PENDING
|
|
62
|
+
duration: MaybeUnset[float] = Unset
|
|
63
|
+
response: MaybeSentinel[Any] = Unset
|
|
64
|
+
error: MaybeUnset[BaseException] | None = Unset
|
|
65
|
+
retryable: MaybeUnset[bool] = Unset
|
|
66
|
+
|
|
67
|
+
def to_dict(self) -> dict[str, Any]:
|
|
68
|
+
"""Serialize to dict with sentinel handling."""
|
|
69
|
+
from ._utils import get_json_serializable
|
|
70
|
+
|
|
71
|
+
if is_sentinel(self.response):
|
|
72
|
+
res_ = None
|
|
73
|
+
else:
|
|
74
|
+
res_ = get_json_serializable(self.response)
|
|
75
|
+
if res_ is Unset:
|
|
76
|
+
res_ = "<unserializable>"
|
|
77
|
+
|
|
78
|
+
error_dict = None
|
|
79
|
+
if self.error is not Unset and self.error is not None:
|
|
80
|
+
from lionherd_core.errors import LionherdError
|
|
81
|
+
|
|
82
|
+
if isinstance(self.error, LionherdError):
|
|
83
|
+
error_dict = self.error.to_dict()
|
|
84
|
+
elif isinstance(self.error, ExceptionGroup):
|
|
85
|
+
error_dict = self._serialize_exception_group(self.error)
|
|
86
|
+
else:
|
|
87
|
+
error_dict = {
|
|
88
|
+
"error": type(self.error).__name__,
|
|
89
|
+
"message": str(self.error),
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
duration_value = None if self.duration is Unset else self.duration
|
|
93
|
+
retryable_value = None if self.retryable is Unset else self.retryable
|
|
94
|
+
|
|
95
|
+
return {
|
|
96
|
+
"status": self.status.value,
|
|
97
|
+
"duration": duration_value,
|
|
98
|
+
"response": res_,
|
|
99
|
+
"error": error_dict,
|
|
100
|
+
"retryable": retryable_value,
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
def _serialize_exception_group(self, eg: ExceptionGroup) -> dict[str, Any]:
|
|
104
|
+
"""Recursively serialize ExceptionGroup and nested exceptions."""
|
|
105
|
+
from lionherd_core.errors import LionherdError
|
|
106
|
+
|
|
107
|
+
exceptions = []
|
|
108
|
+
for exc in eg.exceptions:
|
|
109
|
+
if isinstance(exc, LionherdError):
|
|
110
|
+
exceptions.append(exc.to_dict())
|
|
111
|
+
elif isinstance(exc, ExceptionGroup):
|
|
112
|
+
exceptions.append(self._serialize_exception_group(exc))
|
|
113
|
+
else:
|
|
114
|
+
exceptions.append(
|
|
115
|
+
{
|
|
116
|
+
"error": type(exc).__name__,
|
|
117
|
+
"message": str(exc),
|
|
118
|
+
}
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
return {
|
|
122
|
+
"error": type(eg).__name__,
|
|
123
|
+
"message": str(eg),
|
|
124
|
+
"exceptions": exceptions,
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
def add_error(self, exc: BaseException) -> None:
|
|
128
|
+
"""Add error to execution. Creates ExceptionGroup if multiple errors."""
|
|
129
|
+
if self.error is Unset or self.error is None:
|
|
130
|
+
self.error = exc
|
|
131
|
+
elif isinstance(self.error, ExceptionGroup):
|
|
132
|
+
# Already have group - extend it
|
|
133
|
+
self.error = ExceptionGroup( # type: ignore[type-var]
|
|
134
|
+
"multiple errors",
|
|
135
|
+
[*self.error.exceptions, exc],
|
|
136
|
+
)
|
|
137
|
+
else:
|
|
138
|
+
self.error = ExceptionGroup( # type: ignore[type-var]
|
|
139
|
+
"multiple errors",
|
|
140
|
+
[self.error, exc],
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
@implements(Invocable)
|
|
145
|
+
class Event(Element):
|
|
146
|
+
"""Base event with lifecycle tracking and execution state.
|
|
147
|
+
|
|
148
|
+
Subclasses implement _invoke(). invoke() manages transitions, timing, errors.
|
|
149
|
+
|
|
150
|
+
Attributes:
|
|
151
|
+
execution: Execution state
|
|
152
|
+
timeout: Optional timeout in seconds (None = no timeout)
|
|
153
|
+
"""
|
|
154
|
+
|
|
155
|
+
execution: Execution = Field(default_factory=Execution)
|
|
156
|
+
timeout: float | None = Field(None, exclude=True)
|
|
157
|
+
|
|
158
|
+
def model_post_init(self, __context) -> None:
|
|
159
|
+
"""Initialize async lock for thread-safe invoke()."""
|
|
160
|
+
super().model_post_init(__context)
|
|
161
|
+
self._async_lock = Lock()
|
|
162
|
+
|
|
163
|
+
@field_validator("timeout")
|
|
164
|
+
@classmethod
|
|
165
|
+
def _validate_timeout(cls, v: float | None) -> float | None:
|
|
166
|
+
"""Validate timeout is positive and finite (raises ValueError if not)."""
|
|
167
|
+
if v is not None:
|
|
168
|
+
if not math.isfinite(v):
|
|
169
|
+
raise ValueError(f"timeout must be finite, got {v}")
|
|
170
|
+
if v <= 0:
|
|
171
|
+
raise ValueError(f"timeout must be positive, got {v}")
|
|
172
|
+
return v
|
|
173
|
+
|
|
174
|
+
@field_serializer("execution")
|
|
175
|
+
def _serialize_execution(self, val: Execution) -> dict:
|
|
176
|
+
"""Serialize Execution to dict."""
|
|
177
|
+
return val.to_dict()
|
|
178
|
+
|
|
179
|
+
@property
|
|
180
|
+
def request(self) -> dict:
|
|
181
|
+
"""Get request info."""
|
|
182
|
+
return {}
|
|
183
|
+
|
|
184
|
+
@property
|
|
185
|
+
def status(self) -> EventStatus:
|
|
186
|
+
"""Get execution status."""
|
|
187
|
+
return self.execution.status
|
|
188
|
+
|
|
189
|
+
@status.setter
|
|
190
|
+
def status(self, val: EventStatus | str) -> None:
|
|
191
|
+
"""Set execution status."""
|
|
192
|
+
if isinstance(val, str):
|
|
193
|
+
val = EventStatus(val)
|
|
194
|
+
elif not isinstance(val, EventStatus):
|
|
195
|
+
raise ValueError(f"Invalid status type: {type(val).__name__}")
|
|
196
|
+
self.execution.status = val
|
|
197
|
+
|
|
198
|
+
@property
|
|
199
|
+
def response(self) -> Any:
|
|
200
|
+
"""Get execution response (read-only)."""
|
|
201
|
+
return self.execution.response
|
|
202
|
+
|
|
203
|
+
async def _invoke(self) -> Any:
|
|
204
|
+
"""Execute event. Override in subclasses."""
|
|
205
|
+
raise NotImplementedError("Subclasses must implement _invoke()")
|
|
206
|
+
|
|
207
|
+
@final
|
|
208
|
+
@async_synchronized
|
|
209
|
+
async def invoke(self) -> Any:
|
|
210
|
+
"""Execute with status tracking, timing, error capture (idempotent).
|
|
211
|
+
|
|
212
|
+
**Idempotency**: Multiple concurrent calls execute _invoke() exactly once.
|
|
213
|
+
Subsequent calls return cached result without re-execution. Once COMPLETED
|
|
214
|
+
or FAILED, invoke() will return the cached response.
|
|
215
|
+
|
|
216
|
+
**Retry Pattern**: To retry after FAILED status, use `as_fresh_event()` to
|
|
217
|
+
create a new Event with reset execution state. Direct invoke() calls on
|
|
218
|
+
completed events always return cached results.
|
|
219
|
+
|
|
220
|
+
Returns:
|
|
221
|
+
Execution result (same for all concurrent callers)
|
|
222
|
+
"""
|
|
223
|
+
from lionherd_core.libs.concurrency import current_time
|
|
224
|
+
|
|
225
|
+
# Idempotency: Return cached result if already executed
|
|
226
|
+
if self.execution.status != EventStatus.PENDING:
|
|
227
|
+
return self.execution.response
|
|
228
|
+
|
|
229
|
+
start = current_time()
|
|
230
|
+
|
|
231
|
+
try:
|
|
232
|
+
self.execution.status = EventStatus.PROCESSING
|
|
233
|
+
|
|
234
|
+
if self.timeout is not None:
|
|
235
|
+
from lionherd_core.libs.concurrency import fail_after
|
|
236
|
+
|
|
237
|
+
with fail_after(self.timeout):
|
|
238
|
+
result = await self._invoke()
|
|
239
|
+
else:
|
|
240
|
+
result = await self._invoke()
|
|
241
|
+
|
|
242
|
+
# Success path
|
|
243
|
+
self.execution.response = result
|
|
244
|
+
self.execution.error = None
|
|
245
|
+
self.execution.status = EventStatus.COMPLETED
|
|
246
|
+
self.execution.retryable = False
|
|
247
|
+
return result
|
|
248
|
+
|
|
249
|
+
except TimeoutError:
|
|
250
|
+
lionherd_timeout = LionherdTimeoutError(
|
|
251
|
+
f"Operation timed out after {self.timeout}s",
|
|
252
|
+
retryable=True,
|
|
253
|
+
)
|
|
254
|
+
|
|
255
|
+
self.execution.response = Unset
|
|
256
|
+
self.execution.error = lionherd_timeout
|
|
257
|
+
self.execution.status = EventStatus.CANCELLED
|
|
258
|
+
self.execution.retryable = lionherd_timeout.retryable
|
|
259
|
+
return None
|
|
260
|
+
|
|
261
|
+
except Exception as e:
|
|
262
|
+
from lionherd_core.errors import LionherdError
|
|
263
|
+
|
|
264
|
+
if isinstance(e, ExceptionGroup):
|
|
265
|
+
# All exceptions must be retryable for group to be retryable
|
|
266
|
+
retryable = True
|
|
267
|
+
for exc in e.exceptions:
|
|
268
|
+
if isinstance(exc, LionherdError) and not exc.retryable:
|
|
269
|
+
retryable = False
|
|
270
|
+
break
|
|
271
|
+
|
|
272
|
+
self.execution.retryable = retryable
|
|
273
|
+
else:
|
|
274
|
+
if isinstance(e, LionherdError):
|
|
275
|
+
self.execution.retryable = e.retryable
|
|
276
|
+
else:
|
|
277
|
+
self.execution.retryable = True
|
|
278
|
+
|
|
279
|
+
self.execution.response = Unset
|
|
280
|
+
self.execution.error = e
|
|
281
|
+
self.execution.status = EventStatus.FAILED
|
|
282
|
+
return None
|
|
283
|
+
|
|
284
|
+
except BaseException as e:
|
|
285
|
+
from lionherd_core.libs.concurrency import get_cancelled_exc_class
|
|
286
|
+
|
|
287
|
+
if isinstance(e, get_cancelled_exc_class()):
|
|
288
|
+
self.execution.response = Unset
|
|
289
|
+
self.execution.error = e
|
|
290
|
+
self.execution.status = EventStatus.CANCELLED
|
|
291
|
+
self.execution.retryable = True
|
|
292
|
+
|
|
293
|
+
raise
|
|
294
|
+
|
|
295
|
+
finally:
|
|
296
|
+
self.execution.duration = current_time() - start
|
|
297
|
+
|
|
298
|
+
async def stream(self) -> Any:
|
|
299
|
+
"""Stream execution. Override if supported."""
|
|
300
|
+
raise NotImplementedError("Subclasses must implement stream() if streaming=True")
|
|
301
|
+
|
|
302
|
+
def as_fresh_event(self, copy_meta: bool = False) -> Event:
|
|
303
|
+
"""Clone with reset execution (fresh ID, PENDING status)."""
|
|
304
|
+
d_ = self.to_dict()
|
|
305
|
+
for key in ["execution", *LN_ELEMENT_FIELDS]:
|
|
306
|
+
d_.pop(key, None)
|
|
307
|
+
|
|
308
|
+
fresh = self.__class__(**d_)
|
|
309
|
+
|
|
310
|
+
if hasattr(self, "timeout") and self.timeout is not None:
|
|
311
|
+
fresh.timeout = self.timeout
|
|
312
|
+
|
|
313
|
+
if copy_meta and hasattr(self, "metadata"):
|
|
314
|
+
fresh.metadata = self.metadata.copy()
|
|
315
|
+
|
|
316
|
+
if hasattr(fresh, "metadata"):
|
|
317
|
+
fresh.metadata["original"] = {
|
|
318
|
+
"id": str(self.id),
|
|
319
|
+
"created_at": self.created_at,
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
return fresh
|