duragraph-python 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- duragraph/__init__.py +35 -0
- duragraph/cli/__init__.py +5 -0
- duragraph/cli/main.py +163 -0
- duragraph/edges.py +116 -0
- duragraph/graph.py +429 -0
- duragraph/nodes.py +252 -0
- duragraph/prompts/__init__.py +6 -0
- duragraph/prompts/decorators.py +43 -0
- duragraph/prompts/store.py +171 -0
- duragraph/py.typed +0 -0
- duragraph/types.py +100 -0
- duragraph/worker/__init__.py +5 -0
- duragraph/worker/worker.py +327 -0
- duragraph_python-0.1.0.dist-info/METADATA +224 -0
- duragraph_python-0.1.0.dist-info/RECORD +18 -0
- duragraph_python-0.1.0.dist-info/WHEEL +4 -0
- duragraph_python-0.1.0.dist-info/entry_points.txt +2 -0
- duragraph_python-0.1.0.dist-info/licenses/LICENSE +190 -0
duragraph/graph.py
ADDED
|
@@ -0,0 +1,429 @@
|
|
|
1
|
+
"""Graph decorator and class for DuraGraph workflows."""
|
|
2
|
+
|
|
3
|
+
from collections.abc import AsyncIterator, Callable
|
|
4
|
+
from typing import Any, TypeVar
|
|
5
|
+
|
|
6
|
+
from duragraph.edges import Edge, NodeProxy
|
|
7
|
+
from duragraph.nodes import NodeMetadata
|
|
8
|
+
from duragraph.types import Event, GraphConfig, RunResult, State
|
|
9
|
+
|
|
10
|
+
T = TypeVar("T")
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class GraphDefinition:
|
|
14
|
+
"""Internal representation of a graph definition."""
|
|
15
|
+
|
|
16
|
+
def __init__(
|
|
17
|
+
self,
|
|
18
|
+
graph_id: str,
|
|
19
|
+
nodes: dict[str, NodeMetadata],
|
|
20
|
+
edges: list[Edge],
|
|
21
|
+
entrypoint: str | None = None,
|
|
22
|
+
):
|
|
23
|
+
self.graph_id = graph_id
|
|
24
|
+
self.nodes = nodes
|
|
25
|
+
self.edges = edges
|
|
26
|
+
self.entrypoint = entrypoint
|
|
27
|
+
|
|
28
|
+
def to_ir(self) -> dict[str, Any]:
|
|
29
|
+
"""Convert to Intermediate Representation for the control plane."""
|
|
30
|
+
nodes_ir = []
|
|
31
|
+
for name, meta in self.nodes.items():
|
|
32
|
+
node_ir = {
|
|
33
|
+
"id": name,
|
|
34
|
+
"type": meta.node_type,
|
|
35
|
+
"config": meta.config,
|
|
36
|
+
}
|
|
37
|
+
nodes_ir.append(node_ir)
|
|
38
|
+
|
|
39
|
+
edges_ir = []
|
|
40
|
+
for edge in self.edges:
|
|
41
|
+
edge_ir = edge.to_dict()
|
|
42
|
+
edges_ir.append(edge_ir)
|
|
43
|
+
|
|
44
|
+
return {
|
|
45
|
+
"version": "1.0",
|
|
46
|
+
"graph": {
|
|
47
|
+
"id": self.graph_id,
|
|
48
|
+
"entrypoint": self.entrypoint,
|
|
49
|
+
"nodes": nodes_ir,
|
|
50
|
+
"edges": edges_ir,
|
|
51
|
+
},
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class GraphInstance:
|
|
56
|
+
"""Runtime instance of a graph that can be executed."""
|
|
57
|
+
|
|
58
|
+
def __init__(self, definition: GraphDefinition, instance: Any):
|
|
59
|
+
self._definition = definition
|
|
60
|
+
self._instance = instance
|
|
61
|
+
self._control_plane_url: str | None = None
|
|
62
|
+
|
|
63
|
+
def run(
|
|
64
|
+
self,
|
|
65
|
+
input: State,
|
|
66
|
+
*,
|
|
67
|
+
config: GraphConfig | None = None,
|
|
68
|
+
thread_id: str | None = None,
|
|
69
|
+
) -> RunResult:
|
|
70
|
+
"""Execute the graph synchronously.
|
|
71
|
+
|
|
72
|
+
Args:
|
|
73
|
+
input: Initial state for the graph.
|
|
74
|
+
config: Optional execution configuration.
|
|
75
|
+
thread_id: Optional thread ID for conversation context.
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
RunResult with execution output.
|
|
79
|
+
"""
|
|
80
|
+
# Local execution - traverse graph and execute nodes
|
|
81
|
+
state = input.copy()
|
|
82
|
+
nodes_executed: list[str] = []
|
|
83
|
+
|
|
84
|
+
current_node = self._definition.entrypoint
|
|
85
|
+
if current_node is None:
|
|
86
|
+
raise ValueError("No entrypoint defined for graph")
|
|
87
|
+
|
|
88
|
+
while current_node is not None:
|
|
89
|
+
# Execute node
|
|
90
|
+
node_method = getattr(self._instance, current_node, None)
|
|
91
|
+
if node_method is None:
|
|
92
|
+
raise ValueError(f"Node method '{current_node}' not found")
|
|
93
|
+
|
|
94
|
+
result = node_method(state)
|
|
95
|
+
if isinstance(result, dict):
|
|
96
|
+
state.update(result)
|
|
97
|
+
nodes_executed.append(current_node)
|
|
98
|
+
|
|
99
|
+
# Find next node
|
|
100
|
+
next_node = None
|
|
101
|
+
for edge in self._definition.edges:
|
|
102
|
+
if edge.source == current_node:
|
|
103
|
+
if isinstance(edge.target, str):
|
|
104
|
+
next_node = edge.target
|
|
105
|
+
elif isinstance(edge.target, dict):
|
|
106
|
+
# Router node - result should be the key
|
|
107
|
+
if isinstance(result, str) and result in edge.target:
|
|
108
|
+
next_node = edge.target[result]
|
|
109
|
+
break
|
|
110
|
+
|
|
111
|
+
current_node = next_node
|
|
112
|
+
|
|
113
|
+
return RunResult(
|
|
114
|
+
run_id="local-run",
|
|
115
|
+
status="completed",
|
|
116
|
+
output=state,
|
|
117
|
+
nodes_executed=nodes_executed,
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
async def arun(
|
|
121
|
+
self,
|
|
122
|
+
input: State,
|
|
123
|
+
*,
|
|
124
|
+
config: GraphConfig | None = None,
|
|
125
|
+
thread_id: str | None = None,
|
|
126
|
+
) -> RunResult:
|
|
127
|
+
"""Execute the graph asynchronously.
|
|
128
|
+
|
|
129
|
+
Args:
|
|
130
|
+
input: Initial state for the graph.
|
|
131
|
+
config: Optional execution configuration.
|
|
132
|
+
thread_id: Optional thread ID for conversation context.
|
|
133
|
+
|
|
134
|
+
Returns:
|
|
135
|
+
RunResult with execution output.
|
|
136
|
+
"""
|
|
137
|
+
# For now, delegate to sync implementation
|
|
138
|
+
return self.run(input, config=config, thread_id=thread_id)
|
|
139
|
+
|
|
140
|
+
async def stream(
|
|
141
|
+
self,
|
|
142
|
+
input: State,
|
|
143
|
+
*,
|
|
144
|
+
config: GraphConfig | None = None,
|
|
145
|
+
thread_id: str | None = None,
|
|
146
|
+
) -> AsyncIterator[Event]:
|
|
147
|
+
"""Stream graph execution events.
|
|
148
|
+
|
|
149
|
+
Args:
|
|
150
|
+
input: Initial state for the graph.
|
|
151
|
+
config: Optional execution configuration.
|
|
152
|
+
thread_id: Optional thread ID for conversation context.
|
|
153
|
+
|
|
154
|
+
Yields:
|
|
155
|
+
Event objects for each execution step.
|
|
156
|
+
"""
|
|
157
|
+
from datetime import datetime
|
|
158
|
+
|
|
159
|
+
run_id = "local-stream"
|
|
160
|
+
state = input.copy()
|
|
161
|
+
|
|
162
|
+
yield Event(
|
|
163
|
+
type="run_started",
|
|
164
|
+
run_id=run_id,
|
|
165
|
+
data={"input": input},
|
|
166
|
+
timestamp=datetime.utcnow().isoformat(),
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
current_node = self._definition.entrypoint
|
|
170
|
+
if current_node is None:
|
|
171
|
+
yield Event(
|
|
172
|
+
type="run_failed",
|
|
173
|
+
run_id=run_id,
|
|
174
|
+
data={"error": "No entrypoint defined"},
|
|
175
|
+
timestamp=datetime.utcnow().isoformat(),
|
|
176
|
+
)
|
|
177
|
+
return
|
|
178
|
+
|
|
179
|
+
while current_node is not None:
|
|
180
|
+
yield Event(
|
|
181
|
+
type="node_started",
|
|
182
|
+
run_id=run_id,
|
|
183
|
+
node_id=current_node,
|
|
184
|
+
data={},
|
|
185
|
+
timestamp=datetime.utcnow().isoformat(),
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
node_method = getattr(self._instance, current_node, None)
|
|
189
|
+
if node_method is None:
|
|
190
|
+
yield Event(
|
|
191
|
+
type="run_failed",
|
|
192
|
+
run_id=run_id,
|
|
193
|
+
data={"error": f"Node '{current_node}' not found"},
|
|
194
|
+
timestamp=datetime.utcnow().isoformat(),
|
|
195
|
+
)
|
|
196
|
+
return
|
|
197
|
+
|
|
198
|
+
result = node_method(state)
|
|
199
|
+
if isinstance(result, dict):
|
|
200
|
+
state.update(result)
|
|
201
|
+
|
|
202
|
+
yield Event(
|
|
203
|
+
type="node_completed",
|
|
204
|
+
run_id=run_id,
|
|
205
|
+
node_id=current_node,
|
|
206
|
+
data={"output": result},
|
|
207
|
+
timestamp=datetime.utcnow().isoformat(),
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
# Find next node
|
|
211
|
+
next_node = None
|
|
212
|
+
for edge in self._definition.edges:
|
|
213
|
+
if edge.source == current_node:
|
|
214
|
+
if isinstance(edge.target, str):
|
|
215
|
+
next_node = edge.target
|
|
216
|
+
elif isinstance(edge.target, dict):
|
|
217
|
+
if isinstance(result, str) and result in edge.target:
|
|
218
|
+
next_node = edge.target[result]
|
|
219
|
+
break
|
|
220
|
+
|
|
221
|
+
current_node = next_node
|
|
222
|
+
|
|
223
|
+
yield Event(
|
|
224
|
+
type="run_completed",
|
|
225
|
+
run_id=run_id,
|
|
226
|
+
data={"output": state},
|
|
227
|
+
timestamp=datetime.utcnow().isoformat(),
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
def serve(
|
|
231
|
+
self,
|
|
232
|
+
control_plane_url: str,
|
|
233
|
+
*,
|
|
234
|
+
worker_name: str | None = None,
|
|
235
|
+
capabilities: list[str] | None = None,
|
|
236
|
+
) -> None:
|
|
237
|
+
"""Register and serve this graph on the control plane.
|
|
238
|
+
|
|
239
|
+
Args:
|
|
240
|
+
control_plane_url: URL of the DuraGraph control plane.
|
|
241
|
+
worker_name: Optional name for the worker.
|
|
242
|
+
capabilities: Optional list of worker capabilities.
|
|
243
|
+
"""
|
|
244
|
+
from duragraph.worker import Worker
|
|
245
|
+
|
|
246
|
+
worker = Worker(
|
|
247
|
+
control_plane_url=control_plane_url,
|
|
248
|
+
name=worker_name,
|
|
249
|
+
capabilities=capabilities,
|
|
250
|
+
)
|
|
251
|
+
worker.register_graph(self._definition)
|
|
252
|
+
worker.run()
|
|
253
|
+
|
|
254
|
+
async def aserve(
|
|
255
|
+
self,
|
|
256
|
+
control_plane_url: str,
|
|
257
|
+
*,
|
|
258
|
+
worker_name: str | None = None,
|
|
259
|
+
capabilities: list[str] | None = None,
|
|
260
|
+
) -> None:
|
|
261
|
+
"""Async version of serve().
|
|
262
|
+
|
|
263
|
+
Args:
|
|
264
|
+
control_plane_url: URL of the DuraGraph control plane.
|
|
265
|
+
worker_name: Optional name for the worker.
|
|
266
|
+
capabilities: Optional list of worker capabilities.
|
|
267
|
+
"""
|
|
268
|
+
from duragraph.worker import Worker
|
|
269
|
+
|
|
270
|
+
worker = Worker(
|
|
271
|
+
control_plane_url=control_plane_url,
|
|
272
|
+
name=worker_name,
|
|
273
|
+
capabilities=capabilities,
|
|
274
|
+
)
|
|
275
|
+
worker.register_graph(self._definition)
|
|
276
|
+
await worker.arun()
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
def Graph(
|
|
280
|
+
id: str,
|
|
281
|
+
*,
|
|
282
|
+
description: str | None = None,
|
|
283
|
+
version: str = "1.0.0",
|
|
284
|
+
) -> Callable[[type[T]], type[T]]:
|
|
285
|
+
"""Decorator to define a graph from a class.
|
|
286
|
+
|
|
287
|
+
Args:
|
|
288
|
+
id: Unique identifier for the graph.
|
|
289
|
+
description: Optional description of the graph.
|
|
290
|
+
version: Version string for the graph.
|
|
291
|
+
|
|
292
|
+
Returns:
|
|
293
|
+
Decorated class that can be instantiated as a graph.
|
|
294
|
+
|
|
295
|
+
Example:
|
|
296
|
+
@Graph(id="customer_support")
|
|
297
|
+
class CustomerSupportAgent:
|
|
298
|
+
@entrypoint
|
|
299
|
+
@llm_node(model="gpt-4o-mini")
|
|
300
|
+
def classify(self, state):
|
|
301
|
+
return {"intent": "billing"}
|
|
302
|
+
|
|
303
|
+
@llm_node(model="gpt-4o-mini")
|
|
304
|
+
def respond(self, state):
|
|
305
|
+
return {"response": "I'll help with billing."}
|
|
306
|
+
|
|
307
|
+
classify >> respond
|
|
308
|
+
"""
|
|
309
|
+
|
|
310
|
+
def decorator(cls: type[T]) -> type[T]:
|
|
311
|
+
original_init = cls.__init__
|
|
312
|
+
|
|
313
|
+
def new_init(self: Any, *args: Any, **kwargs: Any) -> None:
|
|
314
|
+
original_init(self, *args, **kwargs)
|
|
315
|
+
self._graph_id = id
|
|
316
|
+
self._graph_description = description
|
|
317
|
+
self._graph_version = version
|
|
318
|
+
self._edges: list[Edge] = []
|
|
319
|
+
self._setup_node_proxies()
|
|
320
|
+
|
|
321
|
+
def _setup_node_proxies(self: Any) -> None:
|
|
322
|
+
"""Set up NodeProxy objects for >> operator."""
|
|
323
|
+
for name in dir(self):
|
|
324
|
+
if name.startswith("_"):
|
|
325
|
+
continue
|
|
326
|
+
attr = getattr(self, name)
|
|
327
|
+
if callable(attr) and hasattr(attr, "_node_metadata"):
|
|
328
|
+
# Create a proxy that enables >> operator
|
|
329
|
+
proxy = NodeProxy(name, self)
|
|
330
|
+
setattr(self, f"_{name}_proxy", proxy)
|
|
331
|
+
|
|
332
|
+
def _add_edge(self: Any, source: str, target: str) -> None:
|
|
333
|
+
"""Add an edge between nodes."""
|
|
334
|
+
self._edges.append(Edge(source, target))
|
|
335
|
+
|
|
336
|
+
def _get_definition(self: Any) -> GraphDefinition:
|
|
337
|
+
"""Get the graph definition."""
|
|
338
|
+
nodes: dict[str, NodeMetadata] = {}
|
|
339
|
+
entrypoint: str | None = None
|
|
340
|
+
|
|
341
|
+
for name in dir(self):
|
|
342
|
+
if name.startswith("_"):
|
|
343
|
+
continue
|
|
344
|
+
attr = getattr(self, name)
|
|
345
|
+
if callable(attr) and hasattr(attr, "_node_metadata"):
|
|
346
|
+
meta: NodeMetadata = attr._node_metadata
|
|
347
|
+
nodes[name] = meta
|
|
348
|
+
if meta.config.get("is_entrypoint"):
|
|
349
|
+
entrypoint = name
|
|
350
|
+
|
|
351
|
+
return GraphDefinition(
|
|
352
|
+
graph_id=self._graph_id,
|
|
353
|
+
nodes=nodes,
|
|
354
|
+
edges=self._edges,
|
|
355
|
+
entrypoint=entrypoint,
|
|
356
|
+
)
|
|
357
|
+
|
|
358
|
+
def run(
|
|
359
|
+
self: Any,
|
|
360
|
+
input: State,
|
|
361
|
+
*,
|
|
362
|
+
config: GraphConfig | None = None,
|
|
363
|
+
thread_id: str | None = None,
|
|
364
|
+
) -> RunResult:
|
|
365
|
+
"""Execute the graph."""
|
|
366
|
+
definition = self._get_definition()
|
|
367
|
+
instance = GraphInstance(definition, self)
|
|
368
|
+
return instance.run(input, config=config, thread_id=thread_id)
|
|
369
|
+
|
|
370
|
+
async def arun(
|
|
371
|
+
self: Any,
|
|
372
|
+
input: State,
|
|
373
|
+
*,
|
|
374
|
+
config: GraphConfig | None = None,
|
|
375
|
+
thread_id: str | None = None,
|
|
376
|
+
) -> RunResult:
|
|
377
|
+
"""Execute the graph asynchronously."""
|
|
378
|
+
definition = self._get_definition()
|
|
379
|
+
instance = GraphInstance(definition, self)
|
|
380
|
+
return await instance.arun(input, config=config, thread_id=thread_id)
|
|
381
|
+
|
|
382
|
+
async def stream(
|
|
383
|
+
self: Any,
|
|
384
|
+
input: State,
|
|
385
|
+
*,
|
|
386
|
+
config: GraphConfig | None = None,
|
|
387
|
+
thread_id: str | None = None,
|
|
388
|
+
) -> AsyncIterator[Event]:
|
|
389
|
+
"""Stream graph execution events."""
|
|
390
|
+
definition = self._get_definition()
|
|
391
|
+
instance = GraphInstance(definition, self)
|
|
392
|
+
async for event in instance.stream(input, config=config, thread_id=thread_id):
|
|
393
|
+
yield event
|
|
394
|
+
|
|
395
|
+
def serve(
|
|
396
|
+
self: Any,
|
|
397
|
+
control_plane_url: str,
|
|
398
|
+
*,
|
|
399
|
+
worker_name: str | None = None,
|
|
400
|
+
capabilities: list[str] | None = None,
|
|
401
|
+
) -> None:
|
|
402
|
+
"""Register and serve this graph."""
|
|
403
|
+
definition = self._get_definition()
|
|
404
|
+
instance = GraphInstance(definition, self)
|
|
405
|
+
instance.serve(
|
|
406
|
+
control_plane_url,
|
|
407
|
+
worker_name=worker_name,
|
|
408
|
+
capabilities=capabilities,
|
|
409
|
+
)
|
|
410
|
+
|
|
411
|
+
def as_subgraph(cls_self: type[Any]) -> Any:
|
|
412
|
+
"""Return this graph as a subgraph node."""
|
|
413
|
+
# Create a subgraph node that can be used in another graph
|
|
414
|
+
instance = cls_self()
|
|
415
|
+
return instance._get_definition()
|
|
416
|
+
|
|
417
|
+
cls.__init__ = new_init
|
|
418
|
+
cls._setup_node_proxies = _setup_node_proxies
|
|
419
|
+
cls._add_edge = _add_edge
|
|
420
|
+
cls._get_definition = _get_definition
|
|
421
|
+
cls.run = run
|
|
422
|
+
cls.arun = arun
|
|
423
|
+
cls.stream = stream
|
|
424
|
+
cls.serve = serve
|
|
425
|
+
cls.as_subgraph = classmethod(as_subgraph)
|
|
426
|
+
|
|
427
|
+
return cls
|
|
428
|
+
|
|
429
|
+
return decorator
|
duragraph/nodes.py
ADDED
|
@@ -0,0 +1,252 @@
|
|
|
1
|
+
"""Node decorators for DuraGraph workflows."""
|
|
2
|
+
|
|
3
|
+
from collections.abc import Callable
|
|
4
|
+
from functools import wraps
|
|
5
|
+
from typing import Any, TypeVar
|
|
6
|
+
|
|
7
|
+
F = TypeVar("F", bound=Callable[..., Any])
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class NodeMetadata:
|
|
11
|
+
"""Metadata attached to node functions."""
|
|
12
|
+
|
|
13
|
+
def __init__(
|
|
14
|
+
self,
|
|
15
|
+
node_type: str,
|
|
16
|
+
name: str | None = None,
|
|
17
|
+
config: dict[str, Any] | None = None,
|
|
18
|
+
):
|
|
19
|
+
self.node_type = node_type
|
|
20
|
+
self.name = name
|
|
21
|
+
self.config = config or {}
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def node(
|
|
25
|
+
name: str | None = None,
|
|
26
|
+
*,
|
|
27
|
+
retry_on: list[str] | None = None,
|
|
28
|
+
max_retries: int = 3,
|
|
29
|
+
retry_delay: float = 1.0,
|
|
30
|
+
) -> Callable[[F], F]:
|
|
31
|
+
"""Basic node decorator for custom logic.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
name: Optional name for the node. Defaults to function name.
|
|
35
|
+
retry_on: List of exception types to retry on.
|
|
36
|
+
max_retries: Maximum number of retry attempts.
|
|
37
|
+
retry_delay: Delay between retries in seconds.
|
|
38
|
+
|
|
39
|
+
Example:
|
|
40
|
+
@node()
|
|
41
|
+
def my_processor(self, state):
|
|
42
|
+
return {"processed": True}
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
def decorator(func: F) -> F:
|
|
46
|
+
@wraps(func)
|
|
47
|
+
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
48
|
+
return func(*args, **kwargs)
|
|
49
|
+
|
|
50
|
+
wrapper._node_metadata = NodeMetadata( # type: ignore
|
|
51
|
+
node_type="function",
|
|
52
|
+
name=name or func.__name__,
|
|
53
|
+
config={
|
|
54
|
+
"retry_on": retry_on or [],
|
|
55
|
+
"max_retries": max_retries,
|
|
56
|
+
"retry_delay": retry_delay,
|
|
57
|
+
},
|
|
58
|
+
)
|
|
59
|
+
return wrapper # type: ignore
|
|
60
|
+
|
|
61
|
+
return decorator
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def llm_node(
|
|
65
|
+
model: str = "gpt-4o-mini",
|
|
66
|
+
*,
|
|
67
|
+
name: str | None = None,
|
|
68
|
+
temperature: float = 0.7,
|
|
69
|
+
max_tokens: int | None = None,
|
|
70
|
+
system_prompt: str | None = None,
|
|
71
|
+
tools: list[str] | None = None,
|
|
72
|
+
stream: bool = True,
|
|
73
|
+
) -> Callable[[F], F]:
|
|
74
|
+
"""LLM node decorator for AI-powered processing.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
model: LLM model identifier (e.g., "gpt-4o-mini", "claude-3-sonnet").
|
|
78
|
+
name: Optional name for the node. Defaults to function name.
|
|
79
|
+
temperature: Sampling temperature (0.0 to 2.0).
|
|
80
|
+
max_tokens: Maximum tokens in response.
|
|
81
|
+
system_prompt: System prompt for the LLM.
|
|
82
|
+
tools: List of tool names available to the LLM.
|
|
83
|
+
stream: Whether to stream responses.
|
|
84
|
+
|
|
85
|
+
Example:
|
|
86
|
+
@llm_node(model="gpt-4o-mini", temperature=0.3)
|
|
87
|
+
def classify_intent(self, state):
|
|
88
|
+
return state
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
def decorator(func: F) -> F:
|
|
92
|
+
@wraps(func)
|
|
93
|
+
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
94
|
+
return func(*args, **kwargs)
|
|
95
|
+
|
|
96
|
+
wrapper._node_metadata = NodeMetadata( # type: ignore
|
|
97
|
+
node_type="llm",
|
|
98
|
+
name=name or func.__name__,
|
|
99
|
+
config={
|
|
100
|
+
"model": model,
|
|
101
|
+
"temperature": temperature,
|
|
102
|
+
"max_tokens": max_tokens,
|
|
103
|
+
"system_prompt": system_prompt,
|
|
104
|
+
"tools": tools or [],
|
|
105
|
+
"stream": stream,
|
|
106
|
+
},
|
|
107
|
+
)
|
|
108
|
+
return wrapper # type: ignore
|
|
109
|
+
|
|
110
|
+
return decorator
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def tool_node(
|
|
114
|
+
name: str | None = None,
|
|
115
|
+
*,
|
|
116
|
+
timeout: float = 30.0,
|
|
117
|
+
retry_on: list[str] | None = None,
|
|
118
|
+
max_retries: int = 3,
|
|
119
|
+
) -> Callable[[F], F]:
|
|
120
|
+
"""Tool node decorator for external tool execution.
|
|
121
|
+
|
|
122
|
+
Args:
|
|
123
|
+
name: Optional name for the node. Defaults to function name.
|
|
124
|
+
timeout: Execution timeout in seconds.
|
|
125
|
+
retry_on: List of exception types to retry on.
|
|
126
|
+
max_retries: Maximum number of retry attempts.
|
|
127
|
+
|
|
128
|
+
Example:
|
|
129
|
+
@tool_node()
|
|
130
|
+
def search_database(self, state):
|
|
131
|
+
results = db.search(state["query"])
|
|
132
|
+
return {"results": results}
|
|
133
|
+
"""
|
|
134
|
+
|
|
135
|
+
def decorator(func: F) -> F:
|
|
136
|
+
@wraps(func)
|
|
137
|
+
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
138
|
+
return func(*args, **kwargs)
|
|
139
|
+
|
|
140
|
+
wrapper._node_metadata = NodeMetadata( # type: ignore
|
|
141
|
+
node_type="tool",
|
|
142
|
+
name=name or func.__name__,
|
|
143
|
+
config={
|
|
144
|
+
"timeout": timeout,
|
|
145
|
+
"retry_on": retry_on or [],
|
|
146
|
+
"max_retries": max_retries,
|
|
147
|
+
},
|
|
148
|
+
)
|
|
149
|
+
return wrapper # type: ignore
|
|
150
|
+
|
|
151
|
+
return decorator
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def router_node(
|
|
155
|
+
name: str | None = None,
|
|
156
|
+
) -> Callable[[F], F]:
|
|
157
|
+
"""Router node decorator for conditional branching.
|
|
158
|
+
|
|
159
|
+
The decorated function should return the name of the next node to execute.
|
|
160
|
+
|
|
161
|
+
Args:
|
|
162
|
+
name: Optional name for the node. Defaults to function name.
|
|
163
|
+
|
|
164
|
+
Example:
|
|
165
|
+
@router_node()
|
|
166
|
+
def route_by_intent(self, state):
|
|
167
|
+
if state["intent"] == "billing":
|
|
168
|
+
return "billing_handler"
|
|
169
|
+
return "general_handler"
|
|
170
|
+
"""
|
|
171
|
+
|
|
172
|
+
def decorator(func: F) -> F:
|
|
173
|
+
@wraps(func)
|
|
174
|
+
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
175
|
+
return func(*args, **kwargs)
|
|
176
|
+
|
|
177
|
+
wrapper._node_metadata = NodeMetadata( # type: ignore
|
|
178
|
+
node_type="router",
|
|
179
|
+
name=name or func.__name__,
|
|
180
|
+
config={},
|
|
181
|
+
)
|
|
182
|
+
return wrapper # type: ignore
|
|
183
|
+
|
|
184
|
+
return decorator
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def human_node(
|
|
188
|
+
prompt: str = "Please review and continue",
|
|
189
|
+
*,
|
|
190
|
+
name: str | None = None,
|
|
191
|
+
timeout: float | None = None,
|
|
192
|
+
interrupt_before: bool = True,
|
|
193
|
+
) -> Callable[[F], F]:
|
|
194
|
+
"""Human-in-the-loop node decorator.
|
|
195
|
+
|
|
196
|
+
Args:
|
|
197
|
+
prompt: Message to display to the human reviewer.
|
|
198
|
+
name: Optional name for the node. Defaults to function name.
|
|
199
|
+
timeout: Optional timeout for human response in seconds.
|
|
200
|
+
interrupt_before: If True, interrupt before node execution.
|
|
201
|
+
|
|
202
|
+
Example:
|
|
203
|
+
@human_node(prompt="Please approve this response")
|
|
204
|
+
def review_response(self, state):
|
|
205
|
+
return state
|
|
206
|
+
"""
|
|
207
|
+
|
|
208
|
+
def decorator(func: F) -> F:
|
|
209
|
+
@wraps(func)
|
|
210
|
+
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
211
|
+
return func(*args, **kwargs)
|
|
212
|
+
|
|
213
|
+
wrapper._node_metadata = NodeMetadata( # type: ignore
|
|
214
|
+
node_type="human",
|
|
215
|
+
name=name or func.__name__,
|
|
216
|
+
config={
|
|
217
|
+
"prompt": prompt,
|
|
218
|
+
"timeout": timeout,
|
|
219
|
+
"interrupt_before": interrupt_before,
|
|
220
|
+
},
|
|
221
|
+
)
|
|
222
|
+
return wrapper # type: ignore
|
|
223
|
+
|
|
224
|
+
return decorator
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def entrypoint(func: F) -> F:
|
|
228
|
+
"""Mark a node as the graph entry point.
|
|
229
|
+
|
|
230
|
+
Example:
|
|
231
|
+
@entrypoint
|
|
232
|
+
@llm_node(model="gpt-4o-mini")
|
|
233
|
+
def start(self, state):
|
|
234
|
+
return state
|
|
235
|
+
"""
|
|
236
|
+
# Check if already has node metadata
|
|
237
|
+
if hasattr(func, "_node_metadata"):
|
|
238
|
+
func._node_metadata.config["is_entrypoint"] = True # type: ignore
|
|
239
|
+
else:
|
|
240
|
+
# Wrap as basic node
|
|
241
|
+
@wraps(func)
|
|
242
|
+
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
243
|
+
return func(*args, **kwargs)
|
|
244
|
+
|
|
245
|
+
wrapper._node_metadata = NodeMetadata( # type: ignore
|
|
246
|
+
node_type="function",
|
|
247
|
+
name=func.__name__,
|
|
248
|
+
config={"is_entrypoint": True},
|
|
249
|
+
)
|
|
250
|
+
return wrapper # type: ignore
|
|
251
|
+
|
|
252
|
+
return func
|