lionherd-core 1.0.0a3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. lionherd_core/__init__.py +84 -0
  2. lionherd_core/base/__init__.py +30 -0
  3. lionherd_core/base/_utils.py +295 -0
  4. lionherd_core/base/broadcaster.py +128 -0
  5. lionherd_core/base/element.py +300 -0
  6. lionherd_core/base/event.py +322 -0
  7. lionherd_core/base/eventbus.py +112 -0
  8. lionherd_core/base/flow.py +236 -0
  9. lionherd_core/base/graph.py +616 -0
  10. lionherd_core/base/node.py +212 -0
  11. lionherd_core/base/pile.py +811 -0
  12. lionherd_core/base/progression.py +261 -0
  13. lionherd_core/errors.py +104 -0
  14. lionherd_core/libs/__init__.py +2 -0
  15. lionherd_core/libs/concurrency/__init__.py +60 -0
  16. lionherd_core/libs/concurrency/_cancel.py +85 -0
  17. lionherd_core/libs/concurrency/_errors.py +80 -0
  18. lionherd_core/libs/concurrency/_patterns.py +238 -0
  19. lionherd_core/libs/concurrency/_primitives.py +253 -0
  20. lionherd_core/libs/concurrency/_priority_queue.py +135 -0
  21. lionherd_core/libs/concurrency/_resource_tracker.py +66 -0
  22. lionherd_core/libs/concurrency/_task.py +58 -0
  23. lionherd_core/libs/concurrency/_utils.py +61 -0
  24. lionherd_core/libs/schema_handlers/__init__.py +35 -0
  25. lionherd_core/libs/schema_handlers/_function_call_parser.py +122 -0
  26. lionherd_core/libs/schema_handlers/_minimal_yaml.py +88 -0
  27. lionherd_core/libs/schema_handlers/_schema_to_model.py +251 -0
  28. lionherd_core/libs/schema_handlers/_typescript.py +153 -0
  29. lionherd_core/libs/string_handlers/__init__.py +15 -0
  30. lionherd_core/libs/string_handlers/_extract_json.py +65 -0
  31. lionherd_core/libs/string_handlers/_fuzzy_json.py +103 -0
  32. lionherd_core/libs/string_handlers/_string_similarity.py +347 -0
  33. lionherd_core/libs/string_handlers/_to_num.py +63 -0
  34. lionherd_core/ln/__init__.py +45 -0
  35. lionherd_core/ln/_async_call.py +314 -0
  36. lionherd_core/ln/_fuzzy_match.py +166 -0
  37. lionherd_core/ln/_fuzzy_validate.py +151 -0
  38. lionherd_core/ln/_hash.py +141 -0
  39. lionherd_core/ln/_json_dump.py +347 -0
  40. lionherd_core/ln/_list_call.py +110 -0
  41. lionherd_core/ln/_to_dict.py +373 -0
  42. lionherd_core/ln/_to_list.py +190 -0
  43. lionherd_core/ln/_utils.py +156 -0
  44. lionherd_core/lndl/__init__.py +62 -0
  45. lionherd_core/lndl/errors.py +30 -0
  46. lionherd_core/lndl/fuzzy.py +321 -0
  47. lionherd_core/lndl/parser.py +427 -0
  48. lionherd_core/lndl/prompt.py +137 -0
  49. lionherd_core/lndl/resolver.py +323 -0
  50. lionherd_core/lndl/types.py +287 -0
  51. lionherd_core/protocols.py +181 -0
  52. lionherd_core/py.typed +0 -0
  53. lionherd_core/types/__init__.py +46 -0
  54. lionherd_core/types/_sentinel.py +131 -0
  55. lionherd_core/types/base.py +341 -0
  56. lionherd_core/types/operable.py +133 -0
  57. lionherd_core/types/spec.py +313 -0
  58. lionherd_core/types/spec_adapters/__init__.py +10 -0
  59. lionherd_core/types/spec_adapters/_protocol.py +125 -0
  60. lionherd_core/types/spec_adapters/pydantic_field.py +177 -0
  61. lionherd_core-1.0.0a3.dist-info/METADATA +502 -0
  62. lionherd_core-1.0.0a3.dist-info/RECORD +64 -0
  63. lionherd_core-1.0.0a3.dist-info/WHEEL +4 -0
  64. lionherd_core-1.0.0a3.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,112 @@
1
+ # Copyright (c) 2025, HaiyangLi <quantocean.li at gmail dot com>
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ from __future__ import annotations
5
+
6
+ import weakref
7
+ from collections import defaultdict
8
+ from collections.abc import Awaitable, Callable
9
+ from typing import Any
10
+
11
+ from ..libs.concurrency import gather
12
+
13
+ __all__ = ("EventBus", "Handler")
14
+
15
+ Handler = Callable[..., Awaitable[None]]
16
+
17
+
18
+ class EventBus:
19
+ """In-process pub/sub with concurrent handler execution.
20
+
21
+ Fire-and-forget: handlers run concurrently via gather(), exceptions suppressed.
22
+
23
+ Memory Management:
24
+ Uses weakref for automatic handler cleanup when handler objects are garbage collected.
25
+ Prevents memory leaks in long-running services where handlers are registered dynamically.
26
+ """
27
+
28
+ def __init__(self) -> None:
29
+ """Initialize with empty subscription registry."""
30
+ # Store weak references to handlers for automatic cleanup
31
+ self._subs: dict[str, list[weakref.ref[Handler]]] = defaultdict(list)
32
+
33
+ def subscribe(self, topic: str, handler: Handler) -> None:
34
+ """Subscribe async handler to topic.
35
+
36
+ Handlers are stored as weak references for automatic cleanup when
37
+ handler objects are garbage collected. Prevents memory leaks in
38
+ long-running services.
39
+ """
40
+ # Store weakref without callback - cleanup happens lazily
41
+ weak_handler = weakref.ref(handler)
42
+ self._subs[topic].append(weak_handler)
43
+
44
+ def unsubscribe(self, topic: str, handler: Handler) -> bool:
45
+ """Unsubscribe handler from topic. Returns True if found and removed."""
46
+ if topic not in self._subs:
47
+ return False
48
+
49
+ # Find and remove weakref that points to this handler
50
+ for weak_ref in list(self._subs[topic]):
51
+ if weak_ref() is handler:
52
+ self._subs[topic].remove(weak_ref)
53
+ return True
54
+ return False
55
+
56
+ def _cleanup_dead_refs(self, topic: str) -> list[Handler]:
57
+ """Remove garbage-collected handlers and return list of live handlers.
58
+
59
+ Lazily cleans up dead weakrefs during normal operations (emit/handler_count).
60
+ Updates subscription list in-place to remove dead references.
61
+
62
+ Returns:
63
+ List of live handler callables (weakrefs resolved).
64
+ """
65
+ weak_refs = self._subs[topic]
66
+ handlers = []
67
+ alive_refs = []
68
+
69
+ for weak_ref in weak_refs:
70
+ handler = weak_ref()
71
+ if handler is not None:
72
+ handlers.append(handler)
73
+ alive_refs.append(weak_ref)
74
+
75
+ # Update subscription list to remove dead references
76
+ self._subs[topic] = alive_refs
77
+ return handlers
78
+
79
+ async def emit(self, topic: str, *args: Any, **kwargs: Any) -> None:
80
+ """Emit event to all subscribers.
81
+
82
+ Handlers run concurrently via gather(), exceptions suppressed.
83
+ Dead weakrefs are lazily cleaned up during emission.
84
+ """
85
+ if topic not in self._subs:
86
+ return
87
+
88
+ handlers = self._cleanup_dead_refs(topic)
89
+ if not handlers:
90
+ return
91
+
92
+ # Run all handlers concurrently, suppress exceptions
93
+ await gather(*(h(*args, **kwargs) for h in handlers), return_exceptions=True)
94
+
95
+ def clear(self, topic: str | None = None) -> None:
96
+ """Clear subscriptions for topic (or all if None)."""
97
+ if topic is None:
98
+ self._subs.clear()
99
+ else:
100
+ self._subs.pop(topic, None)
101
+
102
+ def topics(self) -> list[str]:
103
+ """Get list of all registered topics."""
104
+ return list(self._subs.keys())
105
+
106
+ def handler_count(self, topic: str) -> int:
107
+ """Get number of live handlers for topic (excludes garbage-collected handlers)."""
108
+ if topic not in self._subs:
109
+ return 0
110
+
111
+ handlers = self._cleanup_dead_refs(topic)
112
+ return len(handlers)
@@ -0,0 +1,236 @@
1
+ # Copyright (c) 2025, HaiyangLi <quantocean.li at gmail dot com>
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ from __future__ import annotations
5
+
6
+ from typing import Any, Generic, Literal, TypeVar
7
+ from uuid import UUID
8
+
9
+ from pydantic import Field, PrivateAttr, field_validator
10
+
11
+ from ..protocols import Serializable, implements
12
+ from ._utils import extract_types
13
+ from .element import Element
14
+ from .pile import Pile
15
+ from .progression import Progression
16
+
17
+ __all__ = ("Flow",)
18
+
19
+ E = TypeVar("E", bound=Element) # Element type for items
20
+ P = TypeVar("P", bound=Progression) # Progression type
21
+
22
+
23
+ @implements(Serializable)
24
+ class Flow(Element, Generic[E, P]):
25
+ """Workflow state machine with ordered progressions and referenced items.
26
+
27
+ Flow uses composition: two Pile instances for clear separation.
28
+ - progressions: Named sequences of item UUIDs (workflow stages)
29
+ - items: Referenced elements (Nodes, Agents, etc.)
30
+
31
+ Generic Parameters:
32
+ E: Element type for items
33
+ P: Progression type
34
+ """
35
+
36
+ name: str | None = Field(
37
+ default=None,
38
+ description="Optional name for this flow (e.g., 'task_workflow')",
39
+ )
40
+ progressions: Pile[P] = Field(
41
+ default_factory=Pile,
42
+ description="Workflow stages as named progressions",
43
+ )
44
+ items: Pile[E] = Field(
45
+ default_factory=Pile,
46
+ description="Items that progressions reference",
47
+ )
48
+ _progression_names: dict[str, UUID] = PrivateAttr(default_factory=dict)
49
+
50
+ @field_validator("items", "progressions", mode="wrap")
51
+ @classmethod
52
+ def _validate_piles(cls, v: Any, handler: Any) -> Any:
53
+ """Convert dict to Pile during deserialization."""
54
+ if isinstance(v, dict):
55
+ return Pile.from_dict(v)
56
+ # Let Pydantic handle it
57
+ return handler(v)
58
+
59
+ def model_post_init(self, __context: Any) -> None:
60
+ """Rebuild _progression_names index after deserialization."""
61
+ super().model_post_init(__context)
62
+ # Rebuild name index from progressions
63
+ for progression in self.progressions:
64
+ if progression.name:
65
+ self._progression_names[progression.name] = progression.id
66
+
67
+ def __init__(
68
+ self,
69
+ items: list[E] | None = None,
70
+ name: str | None = None,
71
+ item_type: type[E] | set[type] | list[type] | None = None,
72
+ strict_type: bool = False,
73
+ **data,
74
+ ):
75
+ """Initialize Flow with optional items and type validation.
76
+
77
+ Args:
78
+ items: Initial items to add to items pile
79
+ name: Flow name
80
+ item_type: Type(s) for validation
81
+ strict_type: Enforce exact type match (no subclasses)
82
+ **data: Additional Element fields
83
+ """
84
+ # Let Pydantic create default piles, then populate
85
+ super().__init__(name=name, **data)
86
+
87
+ # Normalize item_type to set and extract types from unions
88
+ if item_type is not None:
89
+ item_type = extract_types(item_type)
90
+
91
+ # Set item_type and strict_type on items pile if provided
92
+ if item_type:
93
+ self.items.item_type = item_type
94
+ if strict_type:
95
+ self.items.strict_type = strict_type
96
+
97
+ # Add items after initialization (only if items is a list, not during deserialization)
98
+ if items and isinstance(items, list):
99
+ for item in items:
100
+ self.items.add(item)
101
+
102
+ # ==================== Progression Management ====================
103
+
104
+ def add_progression(self, progression: P) -> None:
105
+ """Add progression with name registration. Raises ValueError if UUID or name exists."""
106
+ # Check name uniqueness
107
+ if progression.name and progression.name in self._progression_names:
108
+ raise ValueError(
109
+ f"Progression with name '{progression.name}' already exists. Names must be unique."
110
+ )
111
+
112
+ # Add to progressions pile
113
+ self.progressions.add(progression)
114
+
115
+ # Register name if present
116
+ if progression.name:
117
+ self._progression_names[progression.name] = progression.id
118
+
119
+ def remove_progression(self, progression_id: UUID | str | P) -> P:
120
+ """Remove progression by UUID or name. Raises ValueError if not found."""
121
+ # Resolve name to UUID if needed
122
+ if isinstance(progression_id, str) and progression_id in self._progression_names:
123
+ uid = self._progression_names[progression_id]
124
+ del self._progression_names[progression_id]
125
+ return self.progressions.remove(uid)
126
+
127
+ # Convert to UUID for type-safe removal
128
+ from ._utils import to_uuid
129
+
130
+ uid = to_uuid(progression_id)
131
+ prog: P = self.progressions[uid]
132
+
133
+ if prog.name and prog.name in self._progression_names:
134
+ del self._progression_names[prog.name]
135
+ return self.progressions.remove(uid)
136
+
137
+ def get_progression(self, key: UUID | str | P) -> P:
138
+ """Get progression by UUID or name. Raises KeyError if not found."""
139
+ if isinstance(key, str):
140
+ # Check name index first
141
+ if key in self._progression_names:
142
+ uid = self._progression_names[key]
143
+ return self.progressions[uid]
144
+
145
+ # Try parsing as UUID string
146
+ from ._utils import to_uuid
147
+
148
+ try:
149
+ uid = to_uuid(key)
150
+ return self.progressions[uid]
151
+ except (ValueError, TypeError):
152
+ raise KeyError(f"Progression '{key}' not found in flow")
153
+
154
+ # UUID or Progression instance
155
+ return self.progressions[key]
156
+
157
+ # ==================== Item Management ====================
158
+
159
+ def add_item(
160
+ self,
161
+ item: E,
162
+ progression_ids: list[UUID | str] | UUID | str | None = None,
163
+ ) -> None:
164
+ """Add item to items pile and optionally to progressions. Raises ValueError if exists."""
165
+ # Add to items pile
166
+ self.items.add(item)
167
+
168
+ # Add to specified progressions
169
+ if progression_ids is not None:
170
+ # Normalize to list
171
+ ids = [progression_ids] if not isinstance(progression_ids, list) else progression_ids
172
+
173
+ for prog_id in ids:
174
+ progression = self.get_progression(prog_id)
175
+ progression.append(item)
176
+
177
+ def remove_item(
178
+ self,
179
+ item_id: UUID | str | Element,
180
+ remove_from_progressions: bool = True,
181
+ ) -> E:
182
+ """Remove item from items pile and optionally from progressions. Raises ValueError if not found."""
183
+ from ._utils import to_uuid
184
+
185
+ uid = to_uuid(item_id)
186
+
187
+ # Remove from progressions first
188
+ if remove_from_progressions:
189
+ for progression in self.progressions:
190
+ if uid in progression:
191
+ progression.remove(uid)
192
+
193
+ # Remove from items pile
194
+ return self.items.remove(uid)
195
+
196
+ def __repr__(self) -> str:
197
+ name_str = f" name='{self.name}'" if self.name else ""
198
+ return f"Flow(items={len(self.items)}, progressions={len(self.progressions)}{name_str})"
199
+
200
+ def to_dict(
201
+ self,
202
+ mode: Literal["python", "json", "db"] = "python",
203
+ created_at_format: Literal["datetime", "isoformat", "timestamp"] | None = None,
204
+ meta_key: str | None = None,
205
+ **kwargs: Any,
206
+ ) -> dict[str, Any]:
207
+ """Serialize Flow with proper Pile serialization for items and progressions.
208
+
209
+ Overrides Element.to_dict() to ensure Pile fields are properly serialized
210
+ with their items, not just metadata.
211
+ """
212
+ # Exclude items and progressions from parent serialization
213
+ exclude = kwargs.pop("exclude", set())
214
+ if isinstance(exclude, set):
215
+ exclude = exclude | {"items", "progressions"}
216
+ else:
217
+ exclude = set(exclude) | {"items", "progressions"}
218
+
219
+ # Get base Element serialization (without Pile fields)
220
+ data = super().to_dict(
221
+ mode=mode,
222
+ created_at_format=created_at_format,
223
+ meta_key=meta_key,
224
+ exclude=exclude,
225
+ **kwargs,
226
+ )
227
+
228
+ # Add Pile fields with their proper serialization (includes items)
229
+ data["items"] = self.items.to_dict(
230
+ mode=mode, created_at_format=created_at_format, meta_key=meta_key
231
+ )
232
+ data["progressions"] = self.progressions.to_dict(
233
+ mode=mode, created_at_format=created_at_format, meta_key=meta_key
234
+ )
235
+
236
+ return data