krons 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kronos/__init__.py +0 -0
- kronos/core/__init__.py +145 -0
- kronos/core/broadcaster.py +116 -0
- kronos/core/element.py +225 -0
- kronos/core/event.py +316 -0
- kronos/core/eventbus.py +116 -0
- kronos/core/flow.py +356 -0
- kronos/core/graph.py +442 -0
- kronos/core/node.py +982 -0
- kronos/core/pile.py +575 -0
- kronos/core/processor.py +494 -0
- kronos/core/progression.py +296 -0
- kronos/enforcement/__init__.py +57 -0
- kronos/enforcement/common/__init__.py +34 -0
- kronos/enforcement/common/boolean.py +85 -0
- kronos/enforcement/common/choice.py +97 -0
- kronos/enforcement/common/mapping.py +118 -0
- kronos/enforcement/common/model.py +102 -0
- kronos/enforcement/common/number.py +98 -0
- kronos/enforcement/common/string.py +140 -0
- kronos/enforcement/context.py +129 -0
- kronos/enforcement/policy.py +80 -0
- kronos/enforcement/registry.py +153 -0
- kronos/enforcement/rule.py +312 -0
- kronos/enforcement/service.py +370 -0
- kronos/enforcement/validator.py +198 -0
- kronos/errors.py +146 -0
- kronos/operations/__init__.py +32 -0
- kronos/operations/builder.py +228 -0
- kronos/operations/flow.py +398 -0
- kronos/operations/node.py +101 -0
- kronos/operations/registry.py +92 -0
- kronos/protocols.py +414 -0
- kronos/py.typed +0 -0
- kronos/services/__init__.py +81 -0
- kronos/services/backend.py +286 -0
- kronos/services/endpoint.py +608 -0
- kronos/services/hook.py +471 -0
- kronos/services/imodel.py +465 -0
- kronos/services/registry.py +115 -0
- kronos/services/utilities/__init__.py +36 -0
- kronos/services/utilities/header_factory.py +87 -0
- kronos/services/utilities/rate_limited_executor.py +271 -0
- kronos/services/utilities/rate_limiter.py +180 -0
- kronos/services/utilities/resilience.py +414 -0
- kronos/session/__init__.py +41 -0
- kronos/session/exchange.py +258 -0
- kronos/session/message.py +60 -0
- kronos/session/session.py +411 -0
- kronos/specs/__init__.py +25 -0
- kronos/specs/adapters/__init__.py +0 -0
- kronos/specs/adapters/_utils.py +45 -0
- kronos/specs/adapters/dataclass_field.py +246 -0
- kronos/specs/adapters/factory.py +56 -0
- kronos/specs/adapters/pydantic_adapter.py +309 -0
- kronos/specs/adapters/sql_ddl.py +946 -0
- kronos/specs/catalog/__init__.py +36 -0
- kronos/specs/catalog/_audit.py +39 -0
- kronos/specs/catalog/_common.py +43 -0
- kronos/specs/catalog/_content.py +59 -0
- kronos/specs/catalog/_enforcement.py +70 -0
- kronos/specs/factory.py +120 -0
- kronos/specs/operable.py +314 -0
- kronos/specs/phrase.py +405 -0
- kronos/specs/protocol.py +140 -0
- kronos/specs/spec.py +506 -0
- kronos/types/__init__.py +60 -0
- kronos/types/_sentinel.py +311 -0
- kronos/types/base.py +369 -0
- kronos/types/db_types.py +260 -0
- kronos/types/identity.py +66 -0
- kronos/utils/__init__.py +40 -0
- kronos/utils/_hash.py +234 -0
- kronos/utils/_json_dump.py +392 -0
- kronos/utils/_lazy_init.py +63 -0
- kronos/utils/_to_list.py +165 -0
- kronos/utils/_to_num.py +85 -0
- kronos/utils/_utils.py +375 -0
- kronos/utils/concurrency/__init__.py +205 -0
- kronos/utils/concurrency/_async_call.py +333 -0
- kronos/utils/concurrency/_cancel.py +122 -0
- kronos/utils/concurrency/_errors.py +96 -0
- kronos/utils/concurrency/_patterns.py +363 -0
- kronos/utils/concurrency/_primitives.py +328 -0
- kronos/utils/concurrency/_priority_queue.py +135 -0
- kronos/utils/concurrency/_resource_tracker.py +110 -0
- kronos/utils/concurrency/_run_async.py +67 -0
- kronos/utils/concurrency/_task.py +95 -0
- kronos/utils/concurrency/_utils.py +79 -0
- kronos/utils/fuzzy/__init__.py +14 -0
- kronos/utils/fuzzy/_extract_json.py +90 -0
- kronos/utils/fuzzy/_fuzzy_json.py +288 -0
- kronos/utils/fuzzy/_fuzzy_match.py +149 -0
- kronos/utils/fuzzy/_string_similarity.py +187 -0
- kronos/utils/fuzzy/_to_dict.py +396 -0
- kronos/utils/sql/__init__.py +13 -0
- kronos/utils/sql/_sql_validation.py +142 -0
- krons-0.1.0.dist-info/METADATA +70 -0
- krons-0.1.0.dist-info/RECORD +101 -0
- krons-0.1.0.dist-info/WHEEL +4 -0
- krons-0.1.0.dist-info/licenses/LICENSE +201 -0
kronos/core/graph.py
ADDED
|
@@ -0,0 +1,442 @@
|
|
|
1
|
+
# Copyright (c) 2025 - 2026, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
from __future__ import annotations
|
|
5
|
+
|
|
6
|
+
import threading
|
|
7
|
+
from collections import deque
|
|
8
|
+
from typing import Any, Literal
|
|
9
|
+
from uuid import UUID
|
|
10
|
+
|
|
11
|
+
from pydantic import Field, PrivateAttr, field_validator, model_validator
|
|
12
|
+
from typing_extensions import override
|
|
13
|
+
|
|
14
|
+
from kronos.errors import NotFoundError
|
|
15
|
+
from kronos.protocols import Containable, Deserializable, Serializable, implements
|
|
16
|
+
from kronos.types import Unset, UnsetType, is_unset
|
|
17
|
+
from kronos.utils import synchronized
|
|
18
|
+
|
|
19
|
+
from .element import Element
|
|
20
|
+
from .node import Node
|
|
21
|
+
from .pile import Pile
|
|
22
|
+
|
|
23
|
+
__all__ = ("Edge", "EdgeCondition", "Graph")
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class EdgeCondition:
|
|
27
|
+
"""Runtime predicate for edge traversal.
|
|
28
|
+
|
|
29
|
+
Subclass and override apply() for custom traversal logic.
|
|
30
|
+
Default implementation always returns True (unconditional traversal).
|
|
31
|
+
|
|
32
|
+
Example:
|
|
33
|
+
class WeightThreshold(EdgeCondition):
|
|
34
|
+
async def apply(self, context) -> bool:
|
|
35
|
+
return context.get("weight", 0) > self.threshold
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
def __init__(self, **kwargs: Any):
|
|
39
|
+
"""Initialize with arbitrary state attributes."""
|
|
40
|
+
for key, value in kwargs.items():
|
|
41
|
+
setattr(self, key, value)
|
|
42
|
+
|
|
43
|
+
async def apply(self, *args: Any, **kwargs: Any) -> bool:
|
|
44
|
+
"""Evaluate condition. Override for custom logic."""
|
|
45
|
+
return True
|
|
46
|
+
|
|
47
|
+
async def __call__(self, *args: Any, **kwargs: Any) -> bool:
|
|
48
|
+
"""Async callable interface delegating to apply()."""
|
|
49
|
+
return await self.apply(*args, **kwargs)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class Edge(Element):
|
|
53
|
+
"""Directed edge connecting two nodes with optional labels, conditions, and properties.
|
|
54
|
+
|
|
55
|
+
Edges are directional: head -> tail. Conditions are runtime-only (not serialized).
|
|
56
|
+
"""
|
|
57
|
+
|
|
58
|
+
head: UUID = Field(description="Source node ID")
|
|
59
|
+
tail: UUID = Field(description="Target node ID")
|
|
60
|
+
label: list[str] = Field(default_factory=list, description="Edge labels/tags")
|
|
61
|
+
condition: EdgeCondition | None = Field(
|
|
62
|
+
default=None, exclude=True, description="Runtime traversal predicate"
|
|
63
|
+
)
|
|
64
|
+
properties: dict[str, Any] = Field(default_factory=dict, description="Custom edge attributes")
|
|
65
|
+
|
|
66
|
+
@field_validator("head", "tail", mode="before")
|
|
67
|
+
@classmethod
|
|
68
|
+
def _validate_uuid(cls, value: Any) -> UUID:
|
|
69
|
+
"""Coerce head/tail to UUID."""
|
|
70
|
+
return cls._coerce_id(value)
|
|
71
|
+
|
|
72
|
+
async def check_condition(self, *args: Any, **kwargs: Any) -> bool:
|
|
73
|
+
"""Check traversability. Returns True if no condition set or condition passes."""
|
|
74
|
+
if self.condition is None:
|
|
75
|
+
return True
|
|
76
|
+
return await self.condition.apply(*args, **kwargs)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
@implements(
|
|
80
|
+
Serializable,
|
|
81
|
+
Deserializable,
|
|
82
|
+
Containable,
|
|
83
|
+
)
|
|
84
|
+
class Graph(Element):
|
|
85
|
+
"""Directed graph with Pile-backed storage and O(1) adjacency operations.
|
|
86
|
+
|
|
87
|
+
Features:
|
|
88
|
+
- O(1) node/edge lookup via adjacency lists
|
|
89
|
+
- Cycle detection, topological sort, pathfinding
|
|
90
|
+
- Thread-safe mutations (RLock synchronized)
|
|
91
|
+
- Conditional edge traversal
|
|
92
|
+
|
|
93
|
+
Example:
|
|
94
|
+
graph = Graph()
|
|
95
|
+
graph.add_node(Node())
|
|
96
|
+
graph.add_edge(Edge(head=n1.id, tail=n2.id))
|
|
97
|
+
path = await graph.find_path(n1, n2)
|
|
98
|
+
"""
|
|
99
|
+
|
|
100
|
+
nodes: Pile[Node] = Field(
|
|
101
|
+
default_factory=lambda: Pile(item_type=Node), description="Node storage"
|
|
102
|
+
)
|
|
103
|
+
edges: Pile[Edge] = Field(
|
|
104
|
+
default_factory=lambda: Pile(item_type=Edge), description="Edge storage"
|
|
105
|
+
)
|
|
106
|
+
_out_edges: dict[UUID, set[UUID]] = PrivateAttr(default_factory=dict)
|
|
107
|
+
_in_edges: dict[UUID, set[UUID]] = PrivateAttr(default_factory=dict)
|
|
108
|
+
_lock: threading.RLock = PrivateAttr(default_factory=threading.RLock)
|
|
109
|
+
|
|
110
|
+
@field_validator("nodes", "edges", mode="wrap")
|
|
111
|
+
@classmethod
|
|
112
|
+
def _deserialize_nodes_edges(cls, v: Any, handler) -> Pile:
|
|
113
|
+
"""Deserialize nodes/edges from dict if needed."""
|
|
114
|
+
if isinstance(v, Pile):
|
|
115
|
+
return v
|
|
116
|
+
if isinstance(v, dict):
|
|
117
|
+
return Pile.from_dict(v)
|
|
118
|
+
return handler(v) # pragma: no cover
|
|
119
|
+
|
|
120
|
+
@model_validator(mode="after")
|
|
121
|
+
def _rebuild_adjacency_after_init(self) -> Graph:
|
|
122
|
+
"""Rebuild adjacency lists after model initialization."""
|
|
123
|
+
self._rebuild_adjacency()
|
|
124
|
+
return self
|
|
125
|
+
|
|
126
|
+
def _rebuild_adjacency(self) -> None:
|
|
127
|
+
"""Rebuild _out_edges and _in_edges from current nodes/edges."""
|
|
128
|
+
self._out_edges = {node_id: set() for node_id in self.nodes.keys()} # noqa: SIM118
|
|
129
|
+
self._in_edges = {node_id: set() for node_id in self.nodes.keys()} # noqa: SIM118
|
|
130
|
+
|
|
131
|
+
for edge_id in self.edges.keys(): # noqa: SIM118
|
|
132
|
+
edge = self.edges[edge_id]
|
|
133
|
+
if edge.head in self._out_edges:
|
|
134
|
+
self._out_edges[edge.head].add(edge_id)
|
|
135
|
+
if edge.tail in self._in_edges:
|
|
136
|
+
self._in_edges[edge.tail].add(edge_id)
|
|
137
|
+
|
|
138
|
+
def _check_node_exists(self, node_id: UUID) -> Node:
|
|
139
|
+
"""Verify node exists. Raises NotFoundError with graph context."""
|
|
140
|
+
try:
|
|
141
|
+
return self.nodes[node_id]
|
|
142
|
+
except NotFoundError as e:
|
|
143
|
+
raise NotFoundError(
|
|
144
|
+
f"Node {node_id} not found in graph",
|
|
145
|
+
details=e.details,
|
|
146
|
+
retryable=e.retryable,
|
|
147
|
+
cause=e,
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
def _check_edge_exists(self, edge_id: UUID) -> Edge:
|
|
151
|
+
"""Verify edge exists. Raises NotFoundError with graph context."""
|
|
152
|
+
try:
|
|
153
|
+
return self.edges[edge_id]
|
|
154
|
+
except NotFoundError as e:
|
|
155
|
+
raise NotFoundError(
|
|
156
|
+
f"Edge {edge_id} not found in graph",
|
|
157
|
+
details=e.details,
|
|
158
|
+
retryable=e.retryable,
|
|
159
|
+
cause=e,
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
# ==================== Node Operations ====================
|
|
163
|
+
|
|
164
|
+
@synchronized
|
|
165
|
+
def add_node(self, node: Node) -> None:
|
|
166
|
+
"""Add node to graph. Raises ExistsError if duplicate."""
|
|
167
|
+
self.nodes.add(node)
|
|
168
|
+
self._out_edges[node.id] = set()
|
|
169
|
+
self._in_edges[node.id] = set()
|
|
170
|
+
|
|
171
|
+
@synchronized
|
|
172
|
+
def remove_node(self, node_id: UUID | Node) -> Node:
|
|
173
|
+
"""Remove node and all connected edges. Raises NotFoundError if missing."""
|
|
174
|
+
nid = self._coerce_id(node_id)
|
|
175
|
+
self._check_node_exists(nid)
|
|
176
|
+
|
|
177
|
+
for edge_id in list(self._in_edges[nid]):
|
|
178
|
+
self.remove_edge(edge_id)
|
|
179
|
+
for edge_id in list(self._out_edges[nid]):
|
|
180
|
+
self.remove_edge(edge_id)
|
|
181
|
+
|
|
182
|
+
del self._in_edges[nid]
|
|
183
|
+
del self._out_edges[nid]
|
|
184
|
+
return self.nodes.remove(nid)
|
|
185
|
+
|
|
186
|
+
# ==================== Edge Operations ====================
|
|
187
|
+
|
|
188
|
+
@synchronized
|
|
189
|
+
def add_edge(self, edge: Edge) -> None:
|
|
190
|
+
"""Add edge to graph. Raises NotFoundError if head/tail missing."""
|
|
191
|
+
if edge.head not in self.nodes:
|
|
192
|
+
raise NotFoundError(f"Head node {edge.head} not in graph")
|
|
193
|
+
if edge.tail not in self.nodes:
|
|
194
|
+
raise NotFoundError(f"Tail node {edge.tail} not in graph")
|
|
195
|
+
|
|
196
|
+
self.edges.add(edge)
|
|
197
|
+
self._out_edges[edge.head].add(edge.id)
|
|
198
|
+
self._in_edges[edge.tail].add(edge.id)
|
|
199
|
+
|
|
200
|
+
@synchronized
|
|
201
|
+
def remove_edge(self, edge_id: UUID | Edge) -> Edge:
|
|
202
|
+
"""Remove edge from graph. Raises NotFoundError if missing."""
|
|
203
|
+
eid = self._coerce_id(edge_id)
|
|
204
|
+
edge = self._check_edge_exists(eid)
|
|
205
|
+
|
|
206
|
+
self._out_edges[edge.head].discard(eid)
|
|
207
|
+
self._in_edges[edge.tail].discard(eid)
|
|
208
|
+
return self.edges.remove(eid)
|
|
209
|
+
|
|
210
|
+
# ==================== Graph Queries ====================
|
|
211
|
+
|
|
212
|
+
def get_predecessors(self, node_id: UUID | Node) -> list[Node]:
|
|
213
|
+
"""Get nodes with edges pointing to this node (in-neighbors)."""
|
|
214
|
+
nid = self._coerce_id(node_id)
|
|
215
|
+
return [self.nodes[self.edges[eid].head] for eid in self._in_edges.get(nid, set())]
|
|
216
|
+
|
|
217
|
+
def get_successors(self, node_id: UUID | Node) -> list[Node]:
|
|
218
|
+
"""Get nodes this node points to (out-neighbors)."""
|
|
219
|
+
nid = self._coerce_id(node_id)
|
|
220
|
+
return [self.nodes[self.edges[eid].tail] for eid in self._out_edges.get(nid, set())]
|
|
221
|
+
|
|
222
|
+
def get_node_edges(
|
|
223
|
+
self,
|
|
224
|
+
node_id: UUID | Node,
|
|
225
|
+
direction: Literal["in", "out", "both"] = "both",
|
|
226
|
+
) -> list[Edge]:
|
|
227
|
+
"""Get edges connected to node by direction (in/out/both)."""
|
|
228
|
+
if direction not in {"in", "out", "both"}:
|
|
229
|
+
raise ValueError(f"Invalid direction: {direction}")
|
|
230
|
+
|
|
231
|
+
nid = self._coerce_id(node_id)
|
|
232
|
+
result = []
|
|
233
|
+
|
|
234
|
+
if direction in {"in", "both"}:
|
|
235
|
+
result.extend(self.edges[eid] for eid in self._in_edges.get(nid, set()))
|
|
236
|
+
if direction in {"out", "both"}:
|
|
237
|
+
result.extend(self.edges[eid] for eid in self._out_edges.get(nid, set()))
|
|
238
|
+
|
|
239
|
+
return result
|
|
240
|
+
|
|
241
|
+
def get_heads(self) -> list[Node]:
|
|
242
|
+
"""Get source nodes (no incoming edges)."""
|
|
243
|
+
return [self.nodes[nid] for nid, in_edges in self._in_edges.items() if not in_edges]
|
|
244
|
+
|
|
245
|
+
def get_tails(self) -> list[Node]:
|
|
246
|
+
"""Get sink nodes (no outgoing edges)."""
|
|
247
|
+
return [self.nodes[nid] for nid, out_edges in self._out_edges.items() if not out_edges]
|
|
248
|
+
|
|
249
|
+
# ==================== Graph Algorithms ====================
|
|
250
|
+
|
|
251
|
+
def is_acyclic(self) -> bool:
|
|
252
|
+
"""Check if graph is acyclic using three-color DFS. O(V+E)."""
|
|
253
|
+
WHITE, GRAY, BLACK = 0, 1, 2
|
|
254
|
+
colors = {nid: WHITE for nid in self.nodes.keys()} # noqa: SIM118
|
|
255
|
+
|
|
256
|
+
def dfs(node_id: UUID) -> bool:
|
|
257
|
+
colors[node_id] = GRAY
|
|
258
|
+
for edge_id in self._out_edges[node_id]:
|
|
259
|
+
neighbor_id = self.edges[edge_id].tail
|
|
260
|
+
if colors[neighbor_id] == GRAY:
|
|
261
|
+
return False
|
|
262
|
+
if colors[neighbor_id] == WHITE and not dfs(neighbor_id):
|
|
263
|
+
return False
|
|
264
|
+
colors[node_id] = BLACK
|
|
265
|
+
return True
|
|
266
|
+
|
|
267
|
+
return all(
|
|
268
|
+
not (colors[node_id] == WHITE and not dfs(node_id))
|
|
269
|
+
for node_id in self.nodes.keys() # noqa: SIM118
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
def topological_sort(self) -> list[Node]:
|
|
273
|
+
"""Topological sort via Kahn's algorithm. Raises ValueError if cyclic."""
|
|
274
|
+
if not self.is_acyclic():
|
|
275
|
+
raise ValueError("Cannot topologically sort graph with cycles")
|
|
276
|
+
|
|
277
|
+
in_degree = {nid: len(edges) for nid, edges in self._in_edges.items()}
|
|
278
|
+
queue: deque[UUID] = deque([nid for nid, deg in in_degree.items() if deg == 0])
|
|
279
|
+
result: list[Node] = []
|
|
280
|
+
|
|
281
|
+
while queue:
|
|
282
|
+
node_id = queue.popleft()
|
|
283
|
+
result.append(self.nodes[node_id])
|
|
284
|
+
|
|
285
|
+
for edge_id in self._out_edges[node_id]:
|
|
286
|
+
neighbor_id = self.edges[edge_id].tail
|
|
287
|
+
in_degree[neighbor_id] -= 1
|
|
288
|
+
if in_degree[neighbor_id] == 0:
|
|
289
|
+
queue.append(neighbor_id)
|
|
290
|
+
|
|
291
|
+
return result
|
|
292
|
+
|
|
293
|
+
async def find_path(
|
|
294
|
+
self,
|
|
295
|
+
start: UUID | Node,
|
|
296
|
+
end: UUID | Node,
|
|
297
|
+
check_conditions: bool = False,
|
|
298
|
+
) -> list[Edge] | None:
|
|
299
|
+
"""Find path via BFS. Returns edge list or None if no path exists.
|
|
300
|
+
|
|
301
|
+
Args:
|
|
302
|
+
start: Source node
|
|
303
|
+
end: Target node
|
|
304
|
+
check_conditions: If True, respect edge conditions during traversal
|
|
305
|
+
|
|
306
|
+
Raises:
|
|
307
|
+
NotFoundError: If start or end node not in graph
|
|
308
|
+
"""
|
|
309
|
+
start_id = self._coerce_id(start)
|
|
310
|
+
end_id = self._coerce_id(end)
|
|
311
|
+
|
|
312
|
+
if start_id not in self.nodes or end_id not in self.nodes:
|
|
313
|
+
raise NotFoundError("Start or end node not in graph")
|
|
314
|
+
|
|
315
|
+
queue: deque[UUID] = deque([start_id])
|
|
316
|
+
parent: dict[UUID, tuple[UUID, UUID]] = {}
|
|
317
|
+
visited = {start_id}
|
|
318
|
+
|
|
319
|
+
while queue:
|
|
320
|
+
current_id = queue.popleft()
|
|
321
|
+
|
|
322
|
+
if current_id == end_id:
|
|
323
|
+
path = []
|
|
324
|
+
node_id = end_id
|
|
325
|
+
while node_id in parent:
|
|
326
|
+
parent_id, edge_id = parent[node_id]
|
|
327
|
+
path.append(self.edges[edge_id])
|
|
328
|
+
node_id = parent_id
|
|
329
|
+
return list(reversed(path))
|
|
330
|
+
|
|
331
|
+
for edge_id in self._out_edges[current_id]:
|
|
332
|
+
edge: Edge = self.edges[edge_id]
|
|
333
|
+
neighbor_id = edge.tail
|
|
334
|
+
|
|
335
|
+
if neighbor_id not in visited:
|
|
336
|
+
if check_conditions and not await edge.check_condition():
|
|
337
|
+
continue
|
|
338
|
+
visited.add(neighbor_id)
|
|
339
|
+
parent[neighbor_id] = (current_id, edge_id)
|
|
340
|
+
queue.append(neighbor_id)
|
|
341
|
+
|
|
342
|
+
return None
|
|
343
|
+
|
|
344
|
+
def __contains__(self, item: object) -> bool:
|
|
345
|
+
"""Check if node, edge, or UUID is in graph."""
|
|
346
|
+
if isinstance(item, Node):
|
|
347
|
+
return item in self.nodes
|
|
348
|
+
if isinstance(item, Edge):
|
|
349
|
+
return item in self.edges
|
|
350
|
+
if isinstance(item, UUID):
|
|
351
|
+
return item in self.nodes or item in self.edges
|
|
352
|
+
return False
|
|
353
|
+
|
|
354
|
+
def __len__(self) -> int:
|
|
355
|
+
"""Return node count."""
|
|
356
|
+
return len(self.nodes)
|
|
357
|
+
|
|
358
|
+
# ==================== Serialization ====================
|
|
359
|
+
|
|
360
|
+
@override
|
|
361
|
+
def to_dict(
|
|
362
|
+
self,
|
|
363
|
+
mode: Literal["python", "json", "db"] = "python",
|
|
364
|
+
created_at_format: (Literal["datetime", "isoformat", "timestamp"] | UnsetType) = Unset,
|
|
365
|
+
meta_key: str | UnsetType = Unset,
|
|
366
|
+
item_meta_key: str | UnsetType = Unset,
|
|
367
|
+
item_created_at_format: (Literal["datetime", "isoformat", "timestamp"] | UnsetType) = Unset,
|
|
368
|
+
**kwargs: Any,
|
|
369
|
+
) -> dict[str, Any]:
|
|
370
|
+
"""Serialize graph with nodes and edges as nested Pile dicts.
|
|
371
|
+
|
|
372
|
+
Args:
|
|
373
|
+
mode: Serialization mode (python/json/db)
|
|
374
|
+
created_at_format: Timestamp format for Graph
|
|
375
|
+
meta_key: Rename Graph metadata field
|
|
376
|
+
item_meta_key: Metadata key for nodes/edges
|
|
377
|
+
item_created_at_format: Timestamp format for nodes/edges
|
|
378
|
+
**kwargs: Passed to model_dump()
|
|
379
|
+
"""
|
|
380
|
+
exclude = kwargs.pop("exclude", set())
|
|
381
|
+
exclude = (exclude if isinstance(exclude, set) else set(exclude)) | {
|
|
382
|
+
"nodes",
|
|
383
|
+
"edges",
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
data = super().to_dict(
|
|
387
|
+
mode=mode,
|
|
388
|
+
created_at_format=created_at_format,
|
|
389
|
+
meta_key=meta_key,
|
|
390
|
+
exclude=exclude,
|
|
391
|
+
**kwargs,
|
|
392
|
+
)
|
|
393
|
+
|
|
394
|
+
data["nodes"] = self.nodes.to_dict(
|
|
395
|
+
mode=mode,
|
|
396
|
+
item_meta_key=item_meta_key,
|
|
397
|
+
item_created_at_format=item_created_at_format,
|
|
398
|
+
)
|
|
399
|
+
data["edges"] = self.edges.to_dict(
|
|
400
|
+
mode=mode,
|
|
401
|
+
item_meta_key=item_meta_key,
|
|
402
|
+
item_created_at_format=item_created_at_format,
|
|
403
|
+
)
|
|
404
|
+
|
|
405
|
+
return data
|
|
406
|
+
|
|
407
|
+
@classmethod
|
|
408
|
+
def from_dict(
|
|
409
|
+
cls,
|
|
410
|
+
data: dict[str, Any],
|
|
411
|
+
meta_key: str | UnsetType = Unset,
|
|
412
|
+
item_meta_key: str | UnsetType = Unset,
|
|
413
|
+
**kwargs: Any,
|
|
414
|
+
) -> Graph:
|
|
415
|
+
"""Deserialize Graph from dict. Adjacency lists rebuilt automatically.
|
|
416
|
+
|
|
417
|
+
Args:
|
|
418
|
+
data: Serialized graph data
|
|
419
|
+
meta_key: Restore Graph metadata from this key
|
|
420
|
+
item_meta_key: Metadata key for node/edge deserialization
|
|
421
|
+
**kwargs: Additional model_validate arguments
|
|
422
|
+
"""
|
|
423
|
+
from .pile import Pile
|
|
424
|
+
|
|
425
|
+
data = data.copy()
|
|
426
|
+
|
|
427
|
+
if not is_unset(meta_key) and meta_key in data:
|
|
428
|
+
data["metadata"] = data.pop(meta_key)
|
|
429
|
+
|
|
430
|
+
nodes_data = data.pop("nodes", None)
|
|
431
|
+
edges_data = data.pop("edges", None)
|
|
432
|
+
|
|
433
|
+
if nodes_data:
|
|
434
|
+
data["nodes"] = Pile.from_dict(
|
|
435
|
+
nodes_data, meta_key=item_meta_key, item_meta_key=item_meta_key
|
|
436
|
+
)
|
|
437
|
+
if edges_data:
|
|
438
|
+
data["edges"] = Pile.from_dict(
|
|
439
|
+
edges_data, meta_key=item_meta_key, item_meta_key=item_meta_key
|
|
440
|
+
)
|
|
441
|
+
|
|
442
|
+
return cls.model_validate(data, **kwargs)
|