lionherd-core 1.0.0a3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. lionherd_core/__init__.py +84 -0
  2. lionherd_core/base/__init__.py +30 -0
  3. lionherd_core/base/_utils.py +295 -0
  4. lionherd_core/base/broadcaster.py +128 -0
  5. lionherd_core/base/element.py +300 -0
  6. lionherd_core/base/event.py +322 -0
  7. lionherd_core/base/eventbus.py +112 -0
  8. lionherd_core/base/flow.py +236 -0
  9. lionherd_core/base/graph.py +616 -0
  10. lionherd_core/base/node.py +212 -0
  11. lionherd_core/base/pile.py +811 -0
  12. lionherd_core/base/progression.py +261 -0
  13. lionherd_core/errors.py +104 -0
  14. lionherd_core/libs/__init__.py +2 -0
  15. lionherd_core/libs/concurrency/__init__.py +60 -0
  16. lionherd_core/libs/concurrency/_cancel.py +85 -0
  17. lionherd_core/libs/concurrency/_errors.py +80 -0
  18. lionherd_core/libs/concurrency/_patterns.py +238 -0
  19. lionherd_core/libs/concurrency/_primitives.py +253 -0
  20. lionherd_core/libs/concurrency/_priority_queue.py +135 -0
  21. lionherd_core/libs/concurrency/_resource_tracker.py +66 -0
  22. lionherd_core/libs/concurrency/_task.py +58 -0
  23. lionherd_core/libs/concurrency/_utils.py +61 -0
  24. lionherd_core/libs/schema_handlers/__init__.py +35 -0
  25. lionherd_core/libs/schema_handlers/_function_call_parser.py +122 -0
  26. lionherd_core/libs/schema_handlers/_minimal_yaml.py +88 -0
  27. lionherd_core/libs/schema_handlers/_schema_to_model.py +251 -0
  28. lionherd_core/libs/schema_handlers/_typescript.py +153 -0
  29. lionherd_core/libs/string_handlers/__init__.py +15 -0
  30. lionherd_core/libs/string_handlers/_extract_json.py +65 -0
  31. lionherd_core/libs/string_handlers/_fuzzy_json.py +103 -0
  32. lionherd_core/libs/string_handlers/_string_similarity.py +347 -0
  33. lionherd_core/libs/string_handlers/_to_num.py +63 -0
  34. lionherd_core/ln/__init__.py +45 -0
  35. lionherd_core/ln/_async_call.py +314 -0
  36. lionherd_core/ln/_fuzzy_match.py +166 -0
  37. lionherd_core/ln/_fuzzy_validate.py +151 -0
  38. lionherd_core/ln/_hash.py +141 -0
  39. lionherd_core/ln/_json_dump.py +347 -0
  40. lionherd_core/ln/_list_call.py +110 -0
  41. lionherd_core/ln/_to_dict.py +373 -0
  42. lionherd_core/ln/_to_list.py +190 -0
  43. lionherd_core/ln/_utils.py +156 -0
  44. lionherd_core/lndl/__init__.py +62 -0
  45. lionherd_core/lndl/errors.py +30 -0
  46. lionherd_core/lndl/fuzzy.py +321 -0
  47. lionherd_core/lndl/parser.py +427 -0
  48. lionherd_core/lndl/prompt.py +137 -0
  49. lionherd_core/lndl/resolver.py +323 -0
  50. lionherd_core/lndl/types.py +287 -0
  51. lionherd_core/protocols.py +181 -0
  52. lionherd_core/py.typed +0 -0
  53. lionherd_core/types/__init__.py +46 -0
  54. lionherd_core/types/_sentinel.py +131 -0
  55. lionherd_core/types/base.py +341 -0
  56. lionherd_core/types/operable.py +133 -0
  57. lionherd_core/types/spec.py +313 -0
  58. lionherd_core/types/spec_adapters/__init__.py +10 -0
  59. lionherd_core/types/spec_adapters/_protocol.py +125 -0
  60. lionherd_core/types/spec_adapters/pydantic_field.py +177 -0
  61. lionherd_core-1.0.0a3.dist-info/METADATA +502 -0
  62. lionherd_core-1.0.0a3.dist-info/RECORD +64 -0
  63. lionherd_core-1.0.0a3.dist-info/WHEEL +4 -0
  64. lionherd_core-1.0.0a3.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,261 @@
1
+ # Copyright (c) 2025, HaiyangLi <quantocean.li at gmail dot com>
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ from __future__ import annotations
5
+
6
+ import contextlib
7
+ from typing import Any, overload
8
+ from uuid import UUID
9
+
10
+ from pydantic import Field, field_validator
11
+
12
+ from ..protocols import Containable, implements
13
+ from .element import Element
14
+
15
+ __all__ = ("Progression",)
16
+
17
+
18
+ @implements(Containable)
19
+ class Progression(Element):
20
+ """Ordered sequence of UUIDs with Element identity.
21
+
22
+ Attributes:
23
+ name: Optional progression name
24
+ order: Ordered UUIDs (allows duplicates)
25
+
26
+ Supports list-like operations (append/insert/remove/pop/extend), reordering (move/swap/reverse),
27
+ and idempotent set-like operations (include/exclude).
28
+ """
29
+
30
+ name: str | None = Field(
31
+ default=None,
32
+ description="Optional name for this progression (e.g., 'execution_order')",
33
+ )
34
+ order: list[UUID] = Field(
35
+ default_factory=list,
36
+ description="Ordered sequence of UUIDs",
37
+ )
38
+
39
+ def __init__(
40
+ self, order: list[UUID] | list[Element] | None = None, name: str | None = None, **data
41
+ ):
42
+ """Initialize Progression.
43
+
44
+ Args:
45
+ order: Initial items (UUIDs or Elements)
46
+ name: Optional name for this progression
47
+ **data: Additional Element fields
48
+ """
49
+ # Convert Elements to UUIDs
50
+ if order:
51
+ from ._utils import to_uuid
52
+
53
+ order = [to_uuid(item) for item in order]
54
+
55
+ # Pass all field values through **kwargs to satisfy mypy
56
+ super().__init__(**{"name": name, "order": order or [], **data})
57
+
58
+ @field_validator("order", mode="before")
59
+ @classmethod
60
+ def _validate_order(cls, value: Any) -> list[UUID]:
61
+ """Validate and coerce order field."""
62
+ if value is None:
63
+ return []
64
+
65
+ from ._utils import to_uuid
66
+
67
+ if not isinstance(value, list):
68
+ value = [value]
69
+
70
+ result = []
71
+ for item in value:
72
+ with contextlib.suppress(Exception):
73
+ result.append(to_uuid(item))
74
+ return result
75
+
76
+ # ==================== Core Operations ====================
77
+
78
+ def append(self, item_id: UUID | Element) -> None:
79
+ """Add item to end of progression."""
80
+ from ._utils import to_uuid
81
+
82
+ uid = to_uuid(item_id)
83
+ self.order.append(uid)
84
+
85
+ def insert(self, index: int, item_id: UUID | Element) -> None:
86
+ """Insert item at specific position."""
87
+ from ._utils import to_uuid
88
+
89
+ uid = to_uuid(item_id)
90
+ self.order.insert(index, uid)
91
+
92
+ def remove(self, item_id: UUID | Element) -> None:
93
+ """Remove first occurrence of item from progression."""
94
+ from ._utils import to_uuid
95
+
96
+ uid = to_uuid(item_id)
97
+ self.order.remove(uid)
98
+
99
+ def pop(self, index: int = -1) -> UUID:
100
+ """Remove and return item at index."""
101
+ return self.order.pop(index)
102
+
103
+ def popleft(self) -> UUID:
104
+ """Remove and return first item (queue behavior)."""
105
+ if not self.order:
106
+ raise IndexError("Progression is empty")
107
+ return self.order.pop(0)
108
+
109
+ def clear(self) -> None:
110
+ """Remove all items from progression."""
111
+ self.order.clear()
112
+
113
+ def extend(self, items: list[UUID | Element]) -> None:
114
+ """Extend progression with multiple items."""
115
+ from ._utils import to_uuid
116
+
117
+ for item in items:
118
+ uid = to_uuid(item)
119
+ self.order.append(uid)
120
+
121
+ # ==================== Query Operations ====================
122
+
123
+ def __contains__(self, item: UUID | Element) -> bool:
124
+ """Check if item is in progression."""
125
+ from ._utils import to_uuid
126
+
127
+ with contextlib.suppress(Exception):
128
+ uid = to_uuid(item)
129
+ return uid in self.order
130
+ return False
131
+
132
+ def __len__(self) -> int:
133
+ """Return number of items."""
134
+ return len(self.order)
135
+
136
+ def __iter__(self):
137
+ """Iterate over UUIDs in order."""
138
+ return iter(self.order)
139
+
140
+ @overload
141
+ def __getitem__(self, index: int) -> UUID:
142
+ """Get single item by index."""
143
+ ...
144
+
145
+ @overload
146
+ def __getitem__(self, index: slice) -> list[UUID]:
147
+ """Get multiple items by slice."""
148
+ ...
149
+
150
+ def __getitem__(self, index: int | slice) -> UUID | list[UUID]:
151
+ """Get item(s) by index."""
152
+ return self.order[index]
153
+
154
+ def __setitem__(self, index: int | slice, value: UUID | Element | list) -> None:
155
+ """Set item(s) at index."""
156
+ from ._utils import to_uuid
157
+
158
+ if isinstance(index, slice):
159
+ # Type guard: ensure value is a list when using slice
160
+ if not isinstance(value, list):
161
+ raise TypeError(f"Cannot assign {type(value).__name__} to slice, expected list")
162
+ self.order[index] = [to_uuid(v) for v in value]
163
+ else:
164
+ self.order[index] = to_uuid(value)
165
+
166
+ def index(self, item_id: UUID | Element) -> int:
167
+ """Get index of item in progression."""
168
+ from ._utils import to_uuid
169
+
170
+ uid = to_uuid(item_id)
171
+ return self.order.index(uid)
172
+
173
+ def __reversed__(self):
174
+ """Iterate over UUIDs in reverse order."""
175
+ return reversed(self.order)
176
+
177
+ def _validate_index(self, index: int, allow_end: bool = False) -> int:
178
+ """Validate and normalize index (supports negative). Raises IndexError if out of bounds."""
179
+ length = len(self.order)
180
+ if length == 0 and not allow_end:
181
+ raise IndexError("Progression is empty")
182
+
183
+ # Normalize negative indices
184
+ if index < 0:
185
+ index = length + index
186
+
187
+ # Check bounds
188
+ max_index = length if allow_end else length - 1
189
+ if index < 0 or index > max_index:
190
+ raise IndexError(f"Index {index} out of range for progression of length {length}")
191
+
192
+ return index
193
+
194
+ # ==================== Workflow Operations ====================
195
+
196
+ def move(self, from_index: int, to_index: int) -> None:
197
+ """Move item from one position to another.
198
+
199
+ Args:
200
+ from_index: Current position (supports negative indexing)
201
+ to_index: Target position (supports negative indexing)
202
+ """
203
+ from_index = self._validate_index(from_index)
204
+ # For to_index, allow insertion at end
205
+ to_index = self._validate_index(to_index, allow_end=True)
206
+
207
+ item = self.order.pop(from_index)
208
+ # Adjust to_index if we removed item before it
209
+ if from_index < to_index:
210
+ to_index -= 1
211
+ self.order.insert(to_index, item)
212
+
213
+ def swap(self, index1: int, index2: int) -> None:
214
+ """Swap two items by index.
215
+
216
+ Args:
217
+ index1: First position (supports negative indexing)
218
+ index2: Second position (supports negative indexing)
219
+ """
220
+ index1 = self._validate_index(index1)
221
+ index2 = self._validate_index(index2)
222
+
223
+ self.order[index1], self.order[index2] = self.order[index2], self.order[index1]
224
+
225
+ def reverse(self) -> None:
226
+ """Reverse the progression in-place."""
227
+ self.order.reverse()
228
+
229
+ # ==================== Set-like Operations ====================
230
+
231
+ def include(self, item: UUID | Element) -> bool:
232
+ """Include item in progression (idempotent).
233
+
234
+ Returns:
235
+ bool: True if item was added, False if already present
236
+ """
237
+ from ._utils import to_uuid
238
+
239
+ uid = to_uuid(item)
240
+ if uid not in self.order:
241
+ self.order.append(uid)
242
+ return True
243
+ return False
244
+
245
+ def exclude(self, item: UUID | Element) -> bool:
246
+ """Exclude item from progression (idempotent).
247
+
248
+ Returns:
249
+ bool: True if item was removed, False if not present
250
+ """
251
+ from ._utils import to_uuid
252
+
253
+ uid = to_uuid(item)
254
+ if uid in self.order:
255
+ self.order.remove(uid)
256
+ return True
257
+ return False
258
+
259
+ def __repr__(self) -> str:
260
+ name_str = f" name='{self.name}'" if self.name else ""
261
+ return f"Progression(len={len(self)}{name_str})"
@@ -0,0 +1,104 @@
1
+ # Copyright (c) 2025, HaiyangLi <quantocean.li at gmail dot com>
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ from __future__ import annotations
5
+
6
+ from typing import Any
7
+
8
+ from .protocols import Serializable, implements
9
+
10
+ __all__ = (
11
+ "ConfigurationError",
12
+ "ConnectionError",
13
+ "ExecutionError",
14
+ "LionherdError",
15
+ "TimeoutError",
16
+ "ValidationError",
17
+ )
18
+
19
+
20
+ @implements(Serializable)
21
+ class LionherdError(Exception):
22
+ """Base exception for all lionherd errors.
23
+
24
+ Attributes:
25
+ message: Human-readable error message
26
+ details: Additional structured context
27
+ retryable: Whether this error can be retried
28
+ """
29
+
30
+ default_message: str = "Lionherd error"
31
+ default_retryable: bool = True
32
+
33
+ def __init__(
34
+ self,
35
+ message: str | None = None,
36
+ *,
37
+ details: dict[str, Any] | None = None,
38
+ retryable: bool | None = None,
39
+ cause: Exception | None = None,
40
+ ):
41
+ """Initialize error.
42
+
43
+ Args:
44
+ message: Error message (uses default_message if None)
45
+ details: Additional context dict
46
+ retryable: Whether error can be retried (uses default_retryable if None)
47
+ cause: Original exception that caused this error
48
+ """
49
+ self.message = message or self.default_message
50
+ self.details = details or {}
51
+ self.retryable = retryable if retryable is not None else self.default_retryable
52
+
53
+ if cause:
54
+ self.__cause__ = cause # Preserve traceback
55
+
56
+ super().__init__(self.message)
57
+
58
+ def to_dict(self) -> dict[str, Any]:
59
+ """Serialize error to dict for logging/debugging.
60
+
61
+ Returns:
62
+ Dict with error type, message, details, retryable flag
63
+ """
64
+ return {
65
+ "error": self.__class__.__name__,
66
+ "message": self.message,
67
+ "retryable": self.retryable,
68
+ **({"details": self.details} if self.details else {}),
69
+ }
70
+
71
+
72
+ class ValidationError(LionherdError):
73
+ """Validation failure. Not retryable."""
74
+
75
+ default_message = "Validation failed"
76
+ default_retryable = False # Validation errors won't fix themselves
77
+
78
+
79
+ class ConfigurationError(LionherdError):
80
+ """Configuration error. Not retryable."""
81
+
82
+ default_message = "Configuration error"
83
+ default_retryable = False # Config errors need manual fixes
84
+
85
+
86
+ class ExecutionError(LionherdError):
87
+ """Event/Calling execution failure. Retryable by default."""
88
+
89
+ default_message = "Execution failed"
90
+ default_retryable = True # Most execution failures are transient
91
+
92
+
93
+ class ConnectionError(LionherdError):
94
+ """Connection/network failure. Retryable by default."""
95
+
96
+ default_message = "Connection error"
97
+ default_retryable = True # Network issues are often transient
98
+
99
+
100
+ class TimeoutError(LionherdError):
101
+ """Operation timeout. Retryable by default."""
102
+
103
+ default_message = "Operation timed out"
104
+ default_retryable = True # Timeouts might succeed with more time
@@ -0,0 +1,2 @@
1
+ # Copyright (c) 2025, HaiyangLi <quantocean.li at gmail dot com>
2
+ # SPDX-License-Identifier: Apache-2.0
@@ -0,0 +1,60 @@
1
+ # Copyright (c) 2025, HaiyangLi <quantocean.li at gmail dot com>
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ from ._cancel import (
5
+ CancelScope,
6
+ effective_deadline,
7
+ fail_after,
8
+ fail_at,
9
+ move_on_after,
10
+ move_on_at,
11
+ )
12
+ from ._errors import get_cancelled_exc_class, is_cancelled, non_cancel_subgroup, shield
13
+ from ._patterns import CompletionStream, bounded_map, gather, race, retry
14
+ from ._primitives import CapacityLimiter, Condition, Event, Lock, Queue, Semaphore
15
+ from ._priority_queue import PriorityQueue, QueueEmpty, QueueFull
16
+ from ._resource_tracker import LeakInfo, LeakTracker, track_resource, untrack_resource
17
+ from ._task import TaskGroup, create_task_group
18
+ from ._utils import current_time, is_coro_func, run_sync, sleep
19
+
20
+ ExceptionGroup = ExceptionGroup # Re-export built-in
21
+ ConcurrencyEvent = Event
22
+
23
+ __all__ = (
24
+ "CancelScope",
25
+ "CapacityLimiter",
26
+ "CompletionStream",
27
+ "ConcurrencyEvent",
28
+ "Condition",
29
+ "Event",
30
+ "ExceptionGroup",
31
+ "LeakInfo",
32
+ "LeakTracker",
33
+ "Lock",
34
+ "PriorityQueue",
35
+ "Queue",
36
+ "QueueEmpty",
37
+ "QueueFull",
38
+ "Semaphore",
39
+ "TaskGroup",
40
+ "bounded_map",
41
+ "create_task_group",
42
+ "current_time",
43
+ "effective_deadline",
44
+ "fail_after",
45
+ "fail_at",
46
+ "gather",
47
+ "get_cancelled_exc_class",
48
+ "is_cancelled",
49
+ "is_coro_func",
50
+ "move_on_after",
51
+ "move_on_at",
52
+ "non_cancel_subgroup",
53
+ "race",
54
+ "retry",
55
+ "run_sync",
56
+ "shield",
57
+ "sleep",
58
+ "track_resource",
59
+ "untrack_resource",
60
+ )
@@ -0,0 +1,85 @@
1
+ # Copyright (c) 2025, HaiyangLi <quantocean.li at gmail dot com>
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ from __future__ import annotations
5
+
6
+ from collections.abc import Iterator
7
+ from contextlib import contextmanager
8
+ from math import isinf
9
+
10
+ import anyio
11
+
12
+ from ._utils import current_time
13
+
14
+ CancelScope = anyio.CancelScope
15
+
16
+
17
+ __all__ = (
18
+ "CancelScope",
19
+ "effective_deadline",
20
+ "fail_after",
21
+ "fail_at",
22
+ "move_on_after",
23
+ "move_on_at",
24
+ )
25
+
26
+
27
+ @contextmanager
28
+ def fail_after(seconds: float | None) -> Iterator[CancelScope]:
29
+ """Create context with timeout that raises TimeoutError."""
30
+ if seconds is None:
31
+ # No timeout, but still cancellable by outer scopes
32
+ with CancelScope() as scope:
33
+ yield scope
34
+ return
35
+ with anyio.fail_after(seconds) as scope:
36
+ yield scope
37
+
38
+
39
+ @contextmanager
40
+ def move_on_after(seconds: float | None) -> Iterator[CancelScope]:
41
+ """Create context with timeout that silently cancels."""
42
+ if seconds is None:
43
+ # No timeout, but still cancellable by outer scopes
44
+ with CancelScope() as scope:
45
+ yield scope
46
+ return
47
+ with anyio.move_on_after(seconds) as scope:
48
+ yield scope
49
+
50
+
51
+ @contextmanager
52
+ def fail_at(deadline: float | None) -> Iterator[CancelScope]:
53
+ """Create context that raises TimeoutError at absolute deadline."""
54
+ if deadline is None:
55
+ # No timeout, but still cancellable by outer scopes
56
+ with CancelScope() as scope:
57
+ yield scope
58
+ return
59
+ now = current_time()
60
+ seconds = max(0.0, deadline - now)
61
+ with fail_after(seconds) as scope:
62
+ yield scope
63
+
64
+
65
+ @contextmanager
66
+ def move_on_at(deadline: float | None) -> Iterator[CancelScope]:
67
+ """Create context that silently cancels at absolute deadline."""
68
+ if deadline is None:
69
+ # No timeout, but still cancellable by outer scopes
70
+ with CancelScope() as scope:
71
+ yield scope
72
+ return
73
+ now = current_time()
74
+ seconds = max(0.0, deadline - now)
75
+ with anyio.move_on_after(seconds) as scope:
76
+ yield scope
77
+
78
+
79
+ def effective_deadline() -> float | None:
80
+ """Return the ambient effective deadline, or None if unlimited.
81
+
82
+ AnyIO uses +inf to indicate "no deadline".
83
+ """
84
+ d = anyio.current_effective_deadline()
85
+ return None if isinf(d) else d
@@ -0,0 +1,80 @@
1
+ # Copyright (c) 2025, HaiyangLi <quantocean.li at gmail dot com>
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ from __future__ import annotations
5
+
6
+ from collections.abc import Awaitable, Callable
7
+ from typing import ParamSpec, TypeVar
8
+
9
+ import anyio
10
+
11
+ T = TypeVar("T")
12
+ P = ParamSpec("P")
13
+
14
+
15
+ __all__ = (
16
+ "get_cancelled_exc_class",
17
+ "is_cancelled",
18
+ "non_cancel_subgroup",
19
+ "shield",
20
+ "split_cancellation",
21
+ )
22
+
23
+
24
+ def get_cancelled_exc_class() -> type[BaseException]:
25
+ """Return the backend-native cancellation exception class."""
26
+ return anyio.get_cancelled_exc_class()
27
+
28
+
29
+ def is_cancelled(exc: BaseException) -> bool:
30
+ """True if this is the backend-native cancellation exception."""
31
+ return isinstance(exc, anyio.get_cancelled_exc_class())
32
+
33
+
34
+ async def shield(func: Callable[P, Awaitable[T]], *args: P.args, **kwargs: P.kwargs) -> T:
35
+ """Run ``func`` immune to outer cancellation.
36
+
37
+ Args:
38
+ func: Async function to shield from cancellation
39
+ *args: Positional arguments for func
40
+ **kwargs: Keyword arguments for func
41
+
42
+ Returns:
43
+ Result of func
44
+ """
45
+ with anyio.CancelScope(shield=True):
46
+ return await func(*args, **kwargs) # type: ignore[return-value]
47
+
48
+
49
+ # -------- ExceptionGroup helpers (Python 3.11+) --------
50
+
51
+
52
+ def split_cancellation(
53
+ eg: BaseExceptionGroup,
54
+ ) -> tuple[BaseExceptionGroup | None, BaseExceptionGroup | None]:
55
+ """Split an exception group into (cancel_subgroup, non_cancel_subgroup).
56
+
57
+ Uses Python 3.11+ ExceptionGroup.split() to preserve structure, tracebacks,
58
+ __cause__/__context__/__notes__.
59
+
60
+ Args:
61
+ eg: Exception group to split
62
+
63
+ Returns:
64
+ Tuple of (cancellation exceptions, non-cancellation exceptions).
65
+ Either element may be None if that category is empty.
66
+ """
67
+ return eg.split(anyio.get_cancelled_exc_class())
68
+
69
+
70
+ def non_cancel_subgroup(eg: BaseExceptionGroup) -> BaseExceptionGroup | None:
71
+ """Return subgroup without cancellations, or None if empty.
72
+
73
+ Args:
74
+ eg: Exception group to filter
75
+
76
+ Returns:
77
+ Subgroup containing only non-cancellation exceptions, or None.
78
+ """
79
+ _, rest = eg.split(anyio.get_cancelled_exc_class())
80
+ return rest