lionherd-core 1.0.0a3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. lionherd_core/__init__.py +84 -0
  2. lionherd_core/base/__init__.py +30 -0
  3. lionherd_core/base/_utils.py +295 -0
  4. lionherd_core/base/broadcaster.py +128 -0
  5. lionherd_core/base/element.py +300 -0
  6. lionherd_core/base/event.py +322 -0
  7. lionherd_core/base/eventbus.py +112 -0
  8. lionherd_core/base/flow.py +236 -0
  9. lionherd_core/base/graph.py +616 -0
  10. lionherd_core/base/node.py +212 -0
  11. lionherd_core/base/pile.py +811 -0
  12. lionherd_core/base/progression.py +261 -0
  13. lionherd_core/errors.py +104 -0
  14. lionherd_core/libs/__init__.py +2 -0
  15. lionherd_core/libs/concurrency/__init__.py +60 -0
  16. lionherd_core/libs/concurrency/_cancel.py +85 -0
  17. lionherd_core/libs/concurrency/_errors.py +80 -0
  18. lionherd_core/libs/concurrency/_patterns.py +238 -0
  19. lionherd_core/libs/concurrency/_primitives.py +253 -0
  20. lionherd_core/libs/concurrency/_priority_queue.py +135 -0
  21. lionherd_core/libs/concurrency/_resource_tracker.py +66 -0
  22. lionherd_core/libs/concurrency/_task.py +58 -0
  23. lionherd_core/libs/concurrency/_utils.py +61 -0
  24. lionherd_core/libs/schema_handlers/__init__.py +35 -0
  25. lionherd_core/libs/schema_handlers/_function_call_parser.py +122 -0
  26. lionherd_core/libs/schema_handlers/_minimal_yaml.py +88 -0
  27. lionherd_core/libs/schema_handlers/_schema_to_model.py +251 -0
  28. lionherd_core/libs/schema_handlers/_typescript.py +153 -0
  29. lionherd_core/libs/string_handlers/__init__.py +15 -0
  30. lionherd_core/libs/string_handlers/_extract_json.py +65 -0
  31. lionherd_core/libs/string_handlers/_fuzzy_json.py +103 -0
  32. lionherd_core/libs/string_handlers/_string_similarity.py +347 -0
  33. lionherd_core/libs/string_handlers/_to_num.py +63 -0
  34. lionherd_core/ln/__init__.py +45 -0
  35. lionherd_core/ln/_async_call.py +314 -0
  36. lionherd_core/ln/_fuzzy_match.py +166 -0
  37. lionherd_core/ln/_fuzzy_validate.py +151 -0
  38. lionherd_core/ln/_hash.py +141 -0
  39. lionherd_core/ln/_json_dump.py +347 -0
  40. lionherd_core/ln/_list_call.py +110 -0
  41. lionherd_core/ln/_to_dict.py +373 -0
  42. lionherd_core/ln/_to_list.py +190 -0
  43. lionherd_core/ln/_utils.py +156 -0
  44. lionherd_core/lndl/__init__.py +62 -0
  45. lionherd_core/lndl/errors.py +30 -0
  46. lionherd_core/lndl/fuzzy.py +321 -0
  47. lionherd_core/lndl/parser.py +427 -0
  48. lionherd_core/lndl/prompt.py +137 -0
  49. lionherd_core/lndl/resolver.py +323 -0
  50. lionherd_core/lndl/types.py +287 -0
  51. lionherd_core/protocols.py +181 -0
  52. lionherd_core/py.typed +0 -0
  53. lionherd_core/types/__init__.py +46 -0
  54. lionherd_core/types/_sentinel.py +131 -0
  55. lionherd_core/types/base.py +341 -0
  56. lionherd_core/types/operable.py +133 -0
  57. lionherd_core/types/spec.py +313 -0
  58. lionherd_core/types/spec_adapters/__init__.py +10 -0
  59. lionherd_core/types/spec_adapters/_protocol.py +125 -0
  60. lionherd_core/types/spec_adapters/pydantic_field.py +177 -0
  61. lionherd_core-1.0.0a3.dist-info/METADATA +502 -0
  62. lionherd_core-1.0.0a3.dist-info/RECORD +64 -0
  63. lionherd_core-1.0.0a3.dist-info/WHEEL +4 -0
  64. lionherd_core-1.0.0a3.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,616 @@
1
+ # Copyright (c) 2025, HaiyangLi <quantocean.li at gmail dot com>
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ from __future__ import annotations
5
+
6
+ import threading
7
+ from collections import deque
8
+ from typing import Any, Literal
9
+ from uuid import UUID
10
+
11
+ from pydantic import Field, PrivateAttr, field_validator, model_validator
12
+ from pydapter import (
13
+ Adaptable as PydapterAdaptable,
14
+ AsyncAdaptable as PydapterAsyncAdaptable,
15
+ )
16
+ from typing_extensions import override
17
+
18
+ from ..protocols import (
19
+ Adaptable,
20
+ AsyncAdaptable,
21
+ Containable,
22
+ Deserializable,
23
+ Serializable,
24
+ implements,
25
+ )
26
+ from ._utils import synchronized
27
+ from .element import Element
28
+ from .node import Node
29
+ from .pile import Pile
30
+
31
+ __all__ = ("Edge", "EdgeCondition", "Graph")
32
+
33
+ GRAPH_REGISTRY: dict[str, type[Graph]] = {}
34
+
35
+
36
+ # ==================== EdgeCondition ====================
37
+
38
+
39
+ class EdgeCondition:
40
+ """Runtime predicate for edge traversal (not serialized).
41
+
42
+ Override apply() for custom async logic. Callable via __call__() for sync contexts.
43
+ """
44
+
45
+ def __init__(self, **kwargs: Any):
46
+ """Initialize condition. Subclasses can store state as needed."""
47
+ for key, value in kwargs.items():
48
+ setattr(self, key, value)
49
+
50
+ async def apply(self, *args: Any, **kwargs: Any) -> bool:
51
+ """Evaluate condition. Override for custom logic. Default: always True."""
52
+ return True
53
+
54
+ def __call__(self, *args: Any, **kwargs: Any) -> bool:
55
+ """Sync callable interface. Prefer async apply() for async contexts."""
56
+ import anyio
57
+
58
+ async def _run():
59
+ return await self.apply(*args, **kwargs)
60
+
61
+ return anyio.run(_run)
62
+
63
+
64
+ # ==================== Edge ====================
65
+
66
+
67
+ class Edge(Element):
68
+ """Directed edge with labels, conditions, properties.
69
+
70
+ Attributes:
71
+ head: Source node UUID
72
+ tail: Target node UUID
73
+ label: Edge labels (list of strings)
74
+ condition: Runtime traversal predicate (not serialized)
75
+ properties: Custom edge attributes
76
+ """
77
+
78
+ head: UUID = Field(description="Source node ID")
79
+ tail: UUID = Field(description="Target node ID")
80
+ label: list[str] = Field(default_factory=list, description="Edge labels")
81
+ condition: EdgeCondition | None = Field(
82
+ default=None, exclude=True, description="Runtime traversal condition (not serialized)"
83
+ )
84
+ properties: dict[str, Any] = Field(default_factory=dict, description="Custom edge properties")
85
+
86
+ @field_validator("head", "tail", mode="before")
87
+ @classmethod
88
+ def _validate_uuid(cls, value: Any) -> UUID:
89
+ """Coerce to UUID."""
90
+ from ._utils import to_uuid
91
+
92
+ return to_uuid(value)
93
+
94
+ async def check_condition(self, *args: Any, **kwargs: Any) -> bool:
95
+ """Check if edge is traversable. Returns True if no condition or condition passes."""
96
+ if self.condition is None:
97
+ return True
98
+ return await self.condition.apply(*args, **kwargs)
99
+
100
+
101
+ # ==================== Graph ====================
102
+
103
+
104
+ @implements(Serializable, Deserializable, Containable, Adaptable, AsyncAdaptable)
105
+ class Graph(Element, PydapterAdaptable, PydapterAsyncAdaptable):
106
+ """Directed graph with Pile-backed storage, O(1) operations, graph algorithms.
107
+
108
+ Adjacency lists (_out_edges, _in_edges) provide O(1) node/edge queries.
109
+ Supports cycle detection, topological sort, pathfinding.
110
+
111
+ Adapter Registration (Rust-like isolated pattern):
112
+ Each Graph subclass has its own independent adapter registry. No auto-registration.
113
+ Must explicitly register adapters on each class that needs them:
114
+
115
+ ```python
116
+ from pydapter.adapters import TomlAdapter
117
+
118
+
119
+ class CustomGraph(Graph):
120
+ pass
121
+
122
+
123
+ # Must register explicitly (no inheritance from parent)
124
+ CustomGraph.register_adapter(TomlAdapter)
125
+ custom_graph.adapt_to("toml") # Now works
126
+ ```
127
+
128
+ This prevents adapter pollution and ensures explicit control per class.
129
+ """
130
+
131
+ nodes: Pile[Node] = Field(
132
+ default_factory=lambda: Pile(item_type=Node),
133
+ description="Node storage via Pile",
134
+ )
135
+ edges: Pile[Edge] = Field(
136
+ default_factory=lambda: Pile(item_type=Edge),
137
+ description="Edge storage via Pile",
138
+ )
139
+ _out_edges: dict[UUID, set[UUID]] = PrivateAttr(default_factory=dict)
140
+ _in_edges: dict[UUID, set[UUID]] = PrivateAttr(default_factory=dict)
141
+ _lock: threading.RLock = PrivateAttr(default_factory=threading.RLock)
142
+
143
+ @field_validator("nodes", "edges", mode="wrap")
144
+ @classmethod
145
+ def _deserialize_nodes_edges(cls, v: Any, handler) -> Pile:
146
+ """Deserialize nodes/edges from dict format."""
147
+ if isinstance(v, Pile):
148
+ return v
149
+ if isinstance(v, dict):
150
+ return Pile.from_dict(v)
151
+ # Let Pydantic handle other cases (like default_factory)
152
+ return handler(v)
153
+
154
+ @model_validator(mode="after")
155
+ def _rebuild_adjacency_after_init(self) -> Graph:
156
+ """Rebuild adjacency lists after model initialization."""
157
+ self._rebuild_adjacency()
158
+ return self
159
+
160
+ def _rebuild_adjacency(self) -> None:
161
+ """Rebuild adjacency lists from nodes and edges."""
162
+ self._out_edges = {node_id: set() for node_id in self.nodes.items}
163
+ self._in_edges = {node_id: set() for node_id in self.nodes.items}
164
+
165
+ for edge_id in self.edges.items:
166
+ edge = self.edges.items[edge_id]
167
+ if edge.head in self._out_edges:
168
+ self._out_edges[edge.head].add(edge_id)
169
+ if edge.tail in self._in_edges:
170
+ self._in_edges[edge.tail].add(edge_id)
171
+
172
+ # ==================== Node Operations ====================
173
+
174
+ @synchronized
175
+ def add_node(self, node: Node) -> None:
176
+ """Add node to graph. Raises ValueError if exists.
177
+
178
+ Thread-safe: Uses @synchronized to ensure atomic operation across
179
+ nodes.add() and adjacency dict initialization.
180
+ """
181
+ if node.id in self.nodes:
182
+ raise ValueError(f"Node {node.id} already exists in graph")
183
+
184
+ self.nodes.add(node)
185
+ self._out_edges[node.id] = set()
186
+ self._in_edges[node.id] = set()
187
+
188
+ @synchronized
189
+ def remove_node(self, node_id: UUID | Node) -> Node:
190
+ """Remove node and all connected edges. Raises ValueError if not found.
191
+
192
+ Thread-safe: Uses @synchronized with RLock to allow nested calls to
193
+ remove_edge(). Ensures atomic operation across edge removal, dict
194
+ cleanup, and node removal.
195
+ """
196
+ from ._utils import to_uuid
197
+
198
+ nid = to_uuid(node_id)
199
+ if nid not in self.nodes:
200
+ raise ValueError(f"Node {nid} not found in graph")
201
+
202
+ # Remove all connected edges
203
+ for edge_id in list(self._in_edges[nid]):
204
+ self.remove_edge(edge_id)
205
+ for edge_id in list(self._out_edges[nid]):
206
+ self.remove_edge(edge_id)
207
+
208
+ # Remove adjacency entries
209
+ del self._in_edges[nid]
210
+ del self._out_edges[nid]
211
+
212
+ # Remove and return node
213
+ return self.nodes.remove(nid)
214
+
215
+ def get_node(self, node_id: UUID | Node) -> Node:
216
+ """Get node by ID. Raises ValueError if not found."""
217
+ from ._utils import to_uuid
218
+
219
+ nid = to_uuid(node_id)
220
+ if nid not in self.nodes:
221
+ raise ValueError(f"Node {nid} not found in graph")
222
+ return self.nodes.get(nid)
223
+
224
+ # ==================== Edge Operations ====================
225
+
226
+ @synchronized
227
+ def add_edge(self, edge: Edge) -> None:
228
+ """Add edge to graph. Raises ValueError if exists or head/tail missing.
229
+
230
+ Thread-safe: Uses @synchronized to ensure atomic operation across
231
+ edges.add() and adjacency list updates. Critical for Rust port and
232
+ Python 3.13+ nogil where GIL won't protect dict operations.
233
+ """
234
+ if edge.id in self.edges:
235
+ raise ValueError(f"Edge {edge.id} already exists in graph")
236
+ if edge.head not in self.nodes:
237
+ raise ValueError(f"Head node {edge.head} not in graph")
238
+ if edge.tail not in self.nodes:
239
+ raise ValueError(f"Tail node {edge.tail} not in graph")
240
+
241
+ self.edges.add(edge)
242
+ self._out_edges[edge.head].add(edge.id)
243
+ self._in_edges[edge.tail].add(edge.id)
244
+
245
+ @synchronized
246
+ def remove_edge(self, edge_id: UUID | Edge) -> Edge:
247
+ """Remove edge from graph. Raises ValueError if not found.
248
+
249
+ Thread-safe: Uses @synchronized to ensure atomic operation across
250
+ adjacency dict updates and edges.remove(). RLock allows nested calls
251
+ from remove_node().
252
+ """
253
+ from ._utils import to_uuid
254
+
255
+ eid = to_uuid(edge_id)
256
+ if eid not in self.edges:
257
+ raise ValueError(f"Edge {eid} not found in graph")
258
+
259
+ edge = self.edges.get(eid)
260
+ self._out_edges[edge.head].discard(eid)
261
+ self._in_edges[edge.tail].discard(eid)
262
+
263
+ return self.edges.remove(eid)
264
+
265
+ def get_edge(self, edge_id: UUID | Edge) -> Edge:
266
+ """Get edge by ID. Raises ValueError if not found."""
267
+ from ._utils import to_uuid
268
+
269
+ eid = to_uuid(edge_id)
270
+ if eid not in self.edges:
271
+ raise ValueError(f"Edge {eid} not found in graph")
272
+ return self.edges.get(eid)
273
+
274
+ # ==================== Graph Queries ====================
275
+
276
+ def get_predecessors(self, node_id: UUID | Node) -> list[Node]:
277
+ """Get all nodes with edges pointing to this node."""
278
+ from ._utils import to_uuid
279
+
280
+ nid = to_uuid(node_id)
281
+ predecessors = []
282
+ for edge_id in self._in_edges.get(nid, set()):
283
+ edge = self.edges.get(edge_id)
284
+ predecessors.append(self.nodes.get(edge.head))
285
+ return predecessors
286
+
287
+ def get_successors(self, node_id: UUID | Node) -> list[Node]:
288
+ """Get all nodes this node points to."""
289
+ from ._utils import to_uuid
290
+
291
+ nid = to_uuid(node_id)
292
+ successors = []
293
+ for edge_id in self._out_edges.get(nid, set()):
294
+ edge = self.edges.get(edge_id)
295
+ successors.append(self.nodes.get(edge.tail))
296
+ return successors
297
+
298
+ def get_node_edges(
299
+ self,
300
+ node_id: UUID | Node,
301
+ direction: Literal["in", "out", "both"] = "both",
302
+ ) -> list[Edge]:
303
+ """Get edges connected to node.
304
+
305
+ Args:
306
+ node_id: Node ID or Node
307
+ direction: in/out/both
308
+
309
+ Raises:
310
+ ValueError: If invalid direction
311
+ """
312
+ from ._utils import to_uuid
313
+
314
+ if direction not in {"in", "out", "both"}:
315
+ raise ValueError(f"Invalid direction: {direction}")
316
+
317
+ nid = to_uuid(node_id)
318
+ result = []
319
+
320
+ if direction in {"in", "both"}:
321
+ for edge_id in self._in_edges.get(nid, set()):
322
+ result.append(self.edges.get(edge_id))
323
+
324
+ if direction in {"out", "both"}:
325
+ for edge_id in self._out_edges.get(nid, set()):
326
+ result.append(self.edges.get(edge_id))
327
+
328
+ return result
329
+
330
+ def get_heads(self) -> list[Node]:
331
+ """Get all nodes with no incoming edges (source nodes)."""
332
+ return [self.nodes.get(nid) for nid, in_edges in self._in_edges.items() if not in_edges]
333
+
334
+ def get_tails(self) -> list[Node]:
335
+ """Get all nodes with no outgoing edges (sink nodes)."""
336
+ return [self.nodes.get(nid) for nid, out_edges in self._out_edges.items() if not out_edges]
337
+
338
+ # ==================== Graph Algorithms ====================
339
+
340
+ def is_acyclic(self) -> bool:
341
+ """Check if graph is acyclic using three-color DFS."""
342
+ WHITE, GRAY, BLACK = 0, 1, 2
343
+ colors = {nid: WHITE for nid in self.nodes.items}
344
+
345
+ def dfs(node_id: UUID) -> bool:
346
+ """DFS visit. Returns True if acyclic, False if cycle found."""
347
+ colors[node_id] = GRAY
348
+
349
+ for edge_id in self._out_edges[node_id]:
350
+ neighbor_id = self.edges.items[edge_id].tail
351
+ if colors[neighbor_id] == GRAY:
352
+ # Back edge -> cycle detected
353
+ return False
354
+ if colors[neighbor_id] == WHITE and not dfs(neighbor_id):
355
+ return False
356
+
357
+ colors[node_id] = BLACK
358
+ return True
359
+
360
+ # Check all components
361
+ return all(
362
+ not (colors[node_id] == WHITE and not dfs(node_id)) for node_id in self.nodes.items
363
+ )
364
+
365
+ def topological_sort(self) -> list[Node]:
366
+ """Topological sort using Kahn's algorithm. Raises ValueError if cyclic."""
367
+ if not self.is_acyclic():
368
+ raise ValueError("Cannot topologically sort graph with cycles")
369
+
370
+ # Calculate in-degrees
371
+ in_degree = {nid: len(edges) for nid, edges in self._in_edges.items()}
372
+
373
+ # Queue of nodes with no incoming edges
374
+ queue: deque[UUID] = deque([nid for nid, deg in in_degree.items() if deg == 0])
375
+ result: list[Node] = []
376
+
377
+ while queue:
378
+ node_id = queue.popleft()
379
+ result.append(self.nodes.get(node_id))
380
+
381
+ # Reduce in-degree of neighbors
382
+ for edge_id in self._out_edges[node_id]:
383
+ neighbor_id = self.edges.get(edge_id).tail
384
+ in_degree[neighbor_id] -= 1
385
+ if in_degree[neighbor_id] == 0:
386
+ queue.append(neighbor_id)
387
+
388
+ return result
389
+
390
+ def find_path(
391
+ self,
392
+ start: UUID | Node,
393
+ end: UUID | Node,
394
+ check_conditions: bool = False,
395
+ ) -> list[Edge] | None:
396
+ """Find path from start to end using BFS. Returns edges or None if no path."""
397
+ from ._utils import to_uuid
398
+
399
+ start_id = to_uuid(start)
400
+ end_id = to_uuid(end)
401
+
402
+ if start_id not in self.nodes or end_id not in self.nodes:
403
+ raise ValueError("Start or end node not in graph")
404
+
405
+ # BFS with parent tracking
406
+ queue: deque[UUID] = deque([start_id])
407
+ parent: dict[UUID, tuple[UUID, UUID]] = {} # {node_id: (parent_id, edge_id)}
408
+ visited = {start_id}
409
+
410
+ while queue:
411
+ current_id = queue.popleft()
412
+
413
+ if current_id == end_id:
414
+ # Reconstruct path
415
+ path = []
416
+ node_id = end_id
417
+ while node_id in parent:
418
+ parent_id, edge_id = parent[node_id]
419
+ path.append(self.edges.get(edge_id))
420
+ node_id = parent_id
421
+ return list(reversed(path))
422
+
423
+ # Explore neighbors
424
+ for edge_id in self._out_edges[current_id]:
425
+ edge = self.edges.get(edge_id)
426
+ neighbor_id = edge.tail
427
+
428
+ if neighbor_id not in visited:
429
+ # Check condition if requested
430
+ if check_conditions:
431
+ import anyio
432
+
433
+ if not anyio.run(edge.check_condition):
434
+ continue
435
+
436
+ visited.add(neighbor_id)
437
+ parent[neighbor_id] = (current_id, edge_id)
438
+ queue.append(neighbor_id)
439
+
440
+ return None # No path found
441
+
442
+ def __contains__(self, item: object) -> bool:
443
+ """Check if node or edge is in graph."""
444
+ if isinstance(item, Node):
445
+ return item in self.nodes
446
+ if isinstance(item, Edge):
447
+ return item in self.edges
448
+ if isinstance(item, UUID):
449
+ return item in self.nodes or item in self.edges
450
+ return False
451
+
452
+ def __len__(self) -> int:
453
+ """Return number of nodes."""
454
+ return len(self.nodes)
455
+
456
+ # ==================== Serialization ====================
457
+
458
+ @override
459
+ def to_dict(
460
+ self,
461
+ mode: Literal["python", "json", "db"] = "python",
462
+ created_at_format: Literal["datetime", "isoformat", "timestamp"] | None = None,
463
+ meta_key: str | None = None,
464
+ item_meta_key: str | None = None,
465
+ item_created_at_format: Literal["datetime", "isoformat", "timestamp"] | None = None,
466
+ **kwargs: Any,
467
+ ) -> dict[str, Any]:
468
+ """Serialize graph with manual Pile field handling.
469
+
470
+ Args:
471
+ mode: python/json/db
472
+ created_at_format: Timestamp format for Graph
473
+ meta_key: Rename Graph metadata field
474
+ item_meta_key: Pass to Pile.to_dict for node/edge metadata
475
+ item_created_at_format: Pass to Pile.to_dict for node/edge timestamps
476
+ **kwargs: Passed to model_dump()
477
+ """
478
+ # Merge exclude set with any user-provided exclude
479
+ exclude = kwargs.pop("exclude", set())
480
+ if isinstance(exclude, set):
481
+ exclude = exclude | {"nodes", "edges"}
482
+ else:
483
+ exclude = set(exclude) | {"nodes", "edges"}
484
+
485
+ # Get base Element serialization, excluding nodes and edges
486
+ data = super().to_dict(
487
+ mode=mode,
488
+ created_at_format=created_at_format,
489
+ meta_key=meta_key,
490
+ exclude=exclude,
491
+ **kwargs,
492
+ )
493
+
494
+ # Manually serialize Pile fields with item parameters
495
+ data["nodes"] = self.nodes.to_dict(
496
+ mode=mode,
497
+ item_meta_key=item_meta_key,
498
+ item_created_at_format=item_created_at_format,
499
+ )
500
+ data["edges"] = self.edges.to_dict(
501
+ mode=mode,
502
+ item_meta_key=item_meta_key,
503
+ item_created_at_format=item_created_at_format,
504
+ )
505
+
506
+ return data
507
+
508
+ @classmethod
509
+ def from_dict(
510
+ cls,
511
+ data: dict[str, Any],
512
+ meta_key: str | None = None,
513
+ item_meta_key: str | None = None,
514
+ **kwargs: Any,
515
+ ) -> Graph:
516
+ """Deserialize Graph from dict.
517
+
518
+ Args:
519
+ data: Serialized graph data
520
+ meta_key: Restore Graph metadata from this key (db compatibility)
521
+ item_meta_key: Pass to Pile.from_dict for node/edge deserialization
522
+ **kwargs: Additional arguments
523
+ """
524
+ from .pile import Pile
525
+
526
+ # Make a copy to avoid mutating input
527
+ data = data.copy()
528
+
529
+ # Restore metadata from custom key if specified (db mode deserialization)
530
+ if meta_key and meta_key in data:
531
+ data["metadata"] = data.pop(meta_key)
532
+
533
+ # Extract, deserialize, and restore nodes and edges Piles
534
+ nodes_data = data.pop("nodes", None)
535
+ edges_data = data.pop("edges", None)
536
+
537
+ # Deserialize Piles and put them back in data for proper construction
538
+ if nodes_data:
539
+ data["nodes"] = Pile.from_dict(
540
+ nodes_data, meta_key=item_meta_key, item_meta_key=item_meta_key
541
+ )
542
+ if edges_data:
543
+ data["edges"] = Pile.from_dict(
544
+ edges_data, meta_key=item_meta_key, item_meta_key=item_meta_key
545
+ )
546
+
547
+ # Create graph with all fields properly deserialized
548
+ graph = cls.model_validate(data, **kwargs)
549
+
550
+ return graph
551
+
552
+ # ==================== Adapter Methods ====================
553
+
554
+ def adapt_to(self, obj_key: str, many: bool = False, **kwargs: Any) -> Any:
555
+ """Convert to external format via pydapter adapter.
556
+
557
+ Args:
558
+ obj_key: Adapter key (e.g., "neo4j")
559
+ many: Whether to adapt multiple Graph instances
560
+ **kwargs: Passed to adapter
561
+
562
+ Returns:
563
+ Adapted object (format depends on adapter)
564
+ """
565
+ kwargs.setdefault("adapt_meth", "to_dict")
566
+ kwargs.setdefault("adapt_kw", {"mode": "db"})
567
+ return super().adapt_to(obj_key=obj_key, many=many, **kwargs)
568
+
569
+ @classmethod
570
+ def adapt_from(cls, obj: Any, obj_key: str, many: bool = False, **kwargs: Any) -> Graph:
571
+ """Create from external format via pydapter adapter.
572
+
573
+ Args:
574
+ obj: Source object
575
+ obj_key: Adapter key (e.g., "neo4j")
576
+ many: Whether to deserialize multiple Graph instances
577
+ **kwargs: Passed to adapter
578
+
579
+ Returns:
580
+ Graph instance
581
+ """
582
+ kwargs.setdefault("adapt_meth", "from_dict")
583
+ return super().adapt_from(obj, obj_key=obj_key, many=many, **kwargs)
584
+
585
+ async def adapt_to_async(self, obj_key: str, many: bool = False, **kwargs: Any) -> Any:
586
+ """Async convert to external format via pydapter async adapter.
587
+
588
+ Args:
589
+ obj_key: Adapter key
590
+ many: Whether to adapt multiple Graph instances
591
+ **kwargs: Passed to adapter
592
+
593
+ Returns:
594
+ Adapted object
595
+ """
596
+ kwargs.setdefault("adapt_meth", "to_dict")
597
+ kwargs.setdefault("adapt_kw", {"mode": "db"})
598
+ return await super().adapt_to_async(obj_key=obj_key, many=many, **kwargs)
599
+
600
+ @classmethod
601
+ async def adapt_from_async(
602
+ cls, obj: Any, obj_key: str, many: bool = False, **kwargs: Any
603
+ ) -> Graph:
604
+ """Async create from external format via pydapter async adapter.
605
+
606
+ Args:
607
+ obj: Source object
608
+ obj_key: Adapter key
609
+ many: Whether to deserialize multiple Graph instances
610
+ **kwargs: Passed to adapter
611
+
612
+ Returns:
613
+ Graph instance
614
+ """
615
+ kwargs.setdefault("adapt_meth", "from_dict")
616
+ return await super().adapt_from_async(obj, obj_key=obj_key, many=many, **kwargs)