agentic-blocks 0.1.17__py3-none-any.whl → 0.1.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agentic_blocks/agent.py +1 -2
- agentic_blocks/llm.py +8 -8
- agentic_blocks/tracing/__init__.py +13 -0
- agentic_blocks/tracing/config.py +111 -0
- agentic_blocks/tracing/core.py +287 -0
- agentic_blocks/tracing/decorator.py +316 -0
- agentic_blocks/visualization/async_flow.py +165 -0
- agentic_blocks/visualization/async_loop_flow.py +73 -0
- agentic_blocks/visualization/visualize.py +1016 -0
- {agentic_blocks-0.1.17.dist-info → agentic_blocks-0.1.19.dist-info}/METADATA +3 -1
- agentic_blocks-0.1.19.dist-info/RECORD +18 -0
- agentic_blocks-0.1.17.dist-info/RECORD +0 -11
- {agentic_blocks-0.1.17.dist-info → agentic_blocks-0.1.19.dist-info}/WHEEL +0 -0
- {agentic_blocks-0.1.17.dist-info → agentic_blocks-0.1.19.dist-info}/licenses/LICENSE +0 -0
- {agentic_blocks-0.1.17.dist-info → agentic_blocks-0.1.19.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,316 @@
|
|
1
|
+
"""
|
2
|
+
Decorator for tracing PocketFlow workflows with Langfuse.
|
3
|
+
"""
|
4
|
+
|
5
|
+
import functools
|
6
|
+
import inspect
|
7
|
+
import uuid
|
8
|
+
from typing import Any, Callable, Dict, Optional, Union
|
9
|
+
|
10
|
+
from .config import TracingConfig
|
11
|
+
from .core import LangfuseTracer
|
12
|
+
|
13
|
+
|
14
|
+
def trace_flow(
|
15
|
+
config: Optional[TracingConfig] = None,
|
16
|
+
flow_name: Optional[str] = None,
|
17
|
+
session_id: Optional[str] = None,
|
18
|
+
user_id: Optional[str] = None,
|
19
|
+
):
|
20
|
+
"""
|
21
|
+
Decorator to add Langfuse tracing to PocketFlow flows.
|
22
|
+
|
23
|
+
This decorator automatically traces:
|
24
|
+
- Flow execution start/end
|
25
|
+
- Each node's prep, exec, and post phases
|
26
|
+
- Input and output data for each phase
|
27
|
+
- Errors and exceptions
|
28
|
+
|
29
|
+
Args:
|
30
|
+
config: TracingConfig instance. If None, loads from environment.
|
31
|
+
flow_name: Custom name for the flow. If None, uses the flow class name.
|
32
|
+
session_id: Session ID for grouping related traces.
|
33
|
+
user_id: User ID for the trace.
|
34
|
+
|
35
|
+
Returns:
|
36
|
+
Decorated flow class or function.
|
37
|
+
|
38
|
+
Example:
|
39
|
+
```python
|
40
|
+
from tracing import trace_flow
|
41
|
+
|
42
|
+
@trace_flow()
|
43
|
+
class MyFlow(Flow):
|
44
|
+
def __init__(self):
|
45
|
+
super().__init__(start=MyNode())
|
46
|
+
|
47
|
+
# Or with custom configuration
|
48
|
+
config = TracingConfig.from_env()
|
49
|
+
|
50
|
+
@trace_flow(config=config, flow_name="CustomFlow")
|
51
|
+
class MyFlow(Flow):
|
52
|
+
pass
|
53
|
+
```
|
54
|
+
"""
|
55
|
+
|
56
|
+
def decorator(flow_class_or_func):
|
57
|
+
# Handle both class and function decoration
|
58
|
+
if inspect.isclass(flow_class_or_func):
|
59
|
+
return _trace_flow_class(
|
60
|
+
flow_class_or_func, config, flow_name, session_id, user_id
|
61
|
+
)
|
62
|
+
else:
|
63
|
+
return _trace_flow_function(
|
64
|
+
flow_class_or_func, config, flow_name, session_id, user_id
|
65
|
+
)
|
66
|
+
|
67
|
+
return decorator
|
68
|
+
|
69
|
+
|
70
|
+
def _trace_flow_class(flow_class, config, flow_name, session_id, user_id):
|
71
|
+
"""Trace a Flow class by wrapping its methods."""
|
72
|
+
|
73
|
+
# Get or create config
|
74
|
+
if config is None:
|
75
|
+
config = TracingConfig.from_env()
|
76
|
+
|
77
|
+
# Override session/user if provided
|
78
|
+
if session_id:
|
79
|
+
config.session_id = session_id
|
80
|
+
if user_id:
|
81
|
+
config.user_id = user_id
|
82
|
+
|
83
|
+
# Get flow name
|
84
|
+
if flow_name is None:
|
85
|
+
flow_name = flow_class.__name__
|
86
|
+
|
87
|
+
# Store original methods
|
88
|
+
original_init = flow_class.__init__
|
89
|
+
original_run = getattr(flow_class, "run", None)
|
90
|
+
original_run_async = getattr(flow_class, "run_async", None)
|
91
|
+
|
92
|
+
def traced_init(self, *args, **kwargs):
|
93
|
+
"""Initialize the flow with tracing capabilities."""
|
94
|
+
# Call original init
|
95
|
+
original_init(self, *args, **kwargs)
|
96
|
+
|
97
|
+
# Add tracing attributes
|
98
|
+
self._tracer = LangfuseTracer(config)
|
99
|
+
self._flow_name = flow_name
|
100
|
+
self._trace_id = None
|
101
|
+
|
102
|
+
# Patch all nodes in the flow
|
103
|
+
self._patch_nodes()
|
104
|
+
|
105
|
+
def traced_run(self, shared):
|
106
|
+
"""Traced version of the run method."""
|
107
|
+
if not hasattr(self, "_tracer"):
|
108
|
+
# Fallback if not properly initialized
|
109
|
+
return original_run(self, shared) if original_run else None
|
110
|
+
|
111
|
+
# Start trace
|
112
|
+
self._trace_id = self._tracer.start_trace(self._flow_name, shared)
|
113
|
+
|
114
|
+
try:
|
115
|
+
# Run the original flow
|
116
|
+
result = original_run(self, shared) if original_run else None
|
117
|
+
|
118
|
+
# End trace successfully
|
119
|
+
self._tracer.end_trace(shared, "success")
|
120
|
+
|
121
|
+
return result
|
122
|
+
|
123
|
+
except Exception as e:
|
124
|
+
# End trace with error
|
125
|
+
self._tracer.end_trace(shared, "error")
|
126
|
+
raise
|
127
|
+
finally:
|
128
|
+
# Ensure cleanup
|
129
|
+
self._tracer.flush()
|
130
|
+
|
131
|
+
async def traced_run_async(self, shared):
|
132
|
+
"""Traced version of the async run method."""
|
133
|
+
if not hasattr(self, "_tracer"):
|
134
|
+
# Fallback if not properly initialized
|
135
|
+
return (
|
136
|
+
await original_run_async(self, shared) if original_run_async else None
|
137
|
+
)
|
138
|
+
|
139
|
+
# Start trace
|
140
|
+
self._trace_id = self._tracer.start_trace(self._flow_name, shared)
|
141
|
+
|
142
|
+
try:
|
143
|
+
# Run the original flow
|
144
|
+
result = (
|
145
|
+
await original_run_async(self, shared) if original_run_async else None
|
146
|
+
)
|
147
|
+
|
148
|
+
# End trace successfully
|
149
|
+
self._tracer.end_trace(shared, "success")
|
150
|
+
|
151
|
+
return result
|
152
|
+
|
153
|
+
except Exception as e:
|
154
|
+
# End trace with error
|
155
|
+
self._tracer.end_trace(shared, "error")
|
156
|
+
raise
|
157
|
+
finally:
|
158
|
+
# Ensure cleanup
|
159
|
+
self._tracer.flush()
|
160
|
+
|
161
|
+
def patch_nodes(self):
|
162
|
+
"""Patch all nodes in the flow to add tracing."""
|
163
|
+
if not hasattr(self, "start_node") or not self.start_node:
|
164
|
+
return
|
165
|
+
|
166
|
+
visited = set()
|
167
|
+
nodes_to_patch = [self.start_node]
|
168
|
+
|
169
|
+
while nodes_to_patch:
|
170
|
+
node = nodes_to_patch.pop(0)
|
171
|
+
if id(node) in visited:
|
172
|
+
continue
|
173
|
+
|
174
|
+
visited.add(id(node))
|
175
|
+
|
176
|
+
# Patch this node
|
177
|
+
self._patch_node(node)
|
178
|
+
|
179
|
+
# Add successors to patch list
|
180
|
+
if hasattr(node, "successors"):
|
181
|
+
for successor in node.successors.values():
|
182
|
+
if successor and id(successor) not in visited:
|
183
|
+
nodes_to_patch.append(successor)
|
184
|
+
|
185
|
+
def patch_node(self, node):
|
186
|
+
"""Patch a single node to add tracing."""
|
187
|
+
if hasattr(node, "_pocketflow_traced"):
|
188
|
+
return # Already patched
|
189
|
+
|
190
|
+
node_id = str(uuid.uuid4())
|
191
|
+
node_name = type(node).__name__
|
192
|
+
|
193
|
+
# Store original methods
|
194
|
+
original_prep = getattr(node, "prep", None)
|
195
|
+
original_exec = getattr(node, "exec", None)
|
196
|
+
original_post = getattr(node, "post", None)
|
197
|
+
original_prep_async = getattr(node, "prep_async", None)
|
198
|
+
original_exec_async = getattr(node, "exec_async", None)
|
199
|
+
original_post_async = getattr(node, "post_async", None)
|
200
|
+
|
201
|
+
# Create traced versions
|
202
|
+
if original_prep:
|
203
|
+
node.prep = self._create_traced_method(
|
204
|
+
original_prep, node_id, node_name, "prep"
|
205
|
+
)
|
206
|
+
if original_exec:
|
207
|
+
node.exec = self._create_traced_method(
|
208
|
+
original_exec, node_id, node_name, "exec"
|
209
|
+
)
|
210
|
+
if original_post:
|
211
|
+
node.post = self._create_traced_method(
|
212
|
+
original_post, node_id, node_name, "post"
|
213
|
+
)
|
214
|
+
if original_prep_async:
|
215
|
+
node.prep_async = self._create_traced_async_method(
|
216
|
+
original_prep_async, node_id, node_name, "prep"
|
217
|
+
)
|
218
|
+
if original_exec_async:
|
219
|
+
node.exec_async = self._create_traced_async_method(
|
220
|
+
original_exec_async, node_id, node_name, "exec"
|
221
|
+
)
|
222
|
+
if original_post_async:
|
223
|
+
node.post_async = self._create_traced_async_method(
|
224
|
+
original_post_async, node_id, node_name, "post"
|
225
|
+
)
|
226
|
+
|
227
|
+
# Mark as traced
|
228
|
+
node._pocketflow_traced = True
|
229
|
+
|
230
|
+
def create_traced_method(self, original_method, node_id, node_name, phase):
|
231
|
+
"""Create a traced version of a synchronous method."""
|
232
|
+
|
233
|
+
@functools.wraps(original_method)
|
234
|
+
def traced_method(*args, **kwargs):
|
235
|
+
span_id = self._tracer.start_node_span(node_name, node_id, phase)
|
236
|
+
|
237
|
+
try:
|
238
|
+
result = original_method(*args, **kwargs)
|
239
|
+
self._tracer.end_node_span(span_id, input_data=args, output_data=result)
|
240
|
+
return result
|
241
|
+
except Exception as e:
|
242
|
+
self._tracer.end_node_span(span_id, input_data=args, error=e)
|
243
|
+
raise
|
244
|
+
|
245
|
+
return traced_method
|
246
|
+
|
247
|
+
def create_traced_async_method(self, original_method, node_id, node_name, phase):
|
248
|
+
"""Create a traced version of an asynchronous method."""
|
249
|
+
|
250
|
+
@functools.wraps(original_method)
|
251
|
+
async def traced_async_method(*args, **kwargs):
|
252
|
+
span_id = self._tracer.start_node_span(node_name, node_id, phase)
|
253
|
+
|
254
|
+
try:
|
255
|
+
result = await original_method(*args, **kwargs)
|
256
|
+
self._tracer.end_node_span(span_id, input_data=args, output_data=result)
|
257
|
+
return result
|
258
|
+
except Exception as e:
|
259
|
+
self._tracer.end_node_span(span_id, input_data=args, error=e)
|
260
|
+
raise
|
261
|
+
|
262
|
+
return traced_async_method
|
263
|
+
|
264
|
+
# Replace methods on the class
|
265
|
+
flow_class.__init__ = traced_init
|
266
|
+
flow_class._patch_nodes = patch_nodes
|
267
|
+
flow_class._patch_node = patch_node
|
268
|
+
flow_class._create_traced_method = create_traced_method
|
269
|
+
flow_class._create_traced_async_method = create_traced_async_method
|
270
|
+
|
271
|
+
if original_run:
|
272
|
+
flow_class.run = traced_run
|
273
|
+
if original_run_async:
|
274
|
+
flow_class.run_async = traced_run_async
|
275
|
+
|
276
|
+
return flow_class
|
277
|
+
|
278
|
+
|
279
|
+
def _trace_flow_function(flow_func, config, flow_name, session_id, user_id):
|
280
|
+
"""Trace a flow function (for functional-style flows)."""
|
281
|
+
|
282
|
+
# Get or create config
|
283
|
+
if config is None:
|
284
|
+
config = TracingConfig.from_env()
|
285
|
+
|
286
|
+
# Override session/user if provided
|
287
|
+
if session_id:
|
288
|
+
config.session_id = session_id
|
289
|
+
if user_id:
|
290
|
+
config.user_id = user_id
|
291
|
+
|
292
|
+
# Get flow name
|
293
|
+
if flow_name is None:
|
294
|
+
flow_name = flow_func.__name__
|
295
|
+
|
296
|
+
tracer = LangfuseTracer(config)
|
297
|
+
|
298
|
+
@functools.wraps(flow_func)
|
299
|
+
def traced_flow_func(*args, **kwargs):
|
300
|
+
# Assume first argument is shared data
|
301
|
+
shared = args[0] if args else {}
|
302
|
+
|
303
|
+
# Start trace
|
304
|
+
trace_id = tracer.start_trace(flow_name, shared)
|
305
|
+
|
306
|
+
try:
|
307
|
+
result = flow_func(*args, **kwargs)
|
308
|
+
tracer.end_trace(shared, "success")
|
309
|
+
return result
|
310
|
+
except Exception as e:
|
311
|
+
tracer.end_trace(shared, "error")
|
312
|
+
raise
|
313
|
+
finally:
|
314
|
+
tracer.flush()
|
315
|
+
|
316
|
+
return traced_flow_func
|
@@ -0,0 +1,165 @@
|
|
1
|
+
from pocketflow import AsyncNode, AsyncFlow
|
2
|
+
import asyncio
|
3
|
+
|
4
|
+
|
5
|
+
# Define Payment Nodes
|
6
|
+
class ValidatePayment(AsyncNode):
|
7
|
+
async def exec_async(self, prep_res):
|
8
|
+
print("1.1.Validating payment...")
|
9
|
+
return "Payment validated successfully"
|
10
|
+
|
11
|
+
async def post_async(self, shared, prep_res, exec_res):
|
12
|
+
shared["payment_status"] = exec_res
|
13
|
+
return "default"
|
14
|
+
|
15
|
+
|
16
|
+
class ProcessPayment(AsyncNode):
|
17
|
+
async def exec_async(self, prep_res):
|
18
|
+
print("1.2.Processing payment...")
|
19
|
+
return "Payment processed successfully"
|
20
|
+
|
21
|
+
async def post_async(self, shared, prep_res, exec_res):
|
22
|
+
shared["payment_result"] = exec_res
|
23
|
+
return "default"
|
24
|
+
|
25
|
+
|
26
|
+
class PaymentConfirmation(AsyncNode):
|
27
|
+
async def exec_async(self, prep_res):
|
28
|
+
print("1.3.Confirming payment...")
|
29
|
+
return "Payment confirmed"
|
30
|
+
|
31
|
+
async def post_async(self, shared, prep_res, exec_res):
|
32
|
+
shared["payment_confirmation"] = exec_res
|
33
|
+
return "default"
|
34
|
+
|
35
|
+
|
36
|
+
# Define Inventory Nodes
|
37
|
+
class CheckStock(AsyncNode):
|
38
|
+
async def exec_async(self, prep_res):
|
39
|
+
print("2.1.Checking inventory stock...")
|
40
|
+
return "Stock available"
|
41
|
+
|
42
|
+
async def post_async(self, shared, prep_res, exec_res):
|
43
|
+
shared["stock_status"] = exec_res
|
44
|
+
return "default"
|
45
|
+
|
46
|
+
|
47
|
+
class ReserveItems(AsyncNode):
|
48
|
+
async def exec_async(self, prep_res):
|
49
|
+
print("2.2.Reserving items...")
|
50
|
+
return "Items reserved"
|
51
|
+
|
52
|
+
async def post_async(self, shared, prep_res, exec_res):
|
53
|
+
shared["reservation_status"] = exec_res
|
54
|
+
return "default"
|
55
|
+
|
56
|
+
|
57
|
+
class UpdateInventory(AsyncNode):
|
58
|
+
async def exec_async(self, prep_res):
|
59
|
+
print("2.3. Updating inventory...")
|
60
|
+
return "Inventory updated"
|
61
|
+
|
62
|
+
async def post_async(self, shared, prep_res, exec_res):
|
63
|
+
shared["inventory_update"] = exec_res
|
64
|
+
return "default"
|
65
|
+
|
66
|
+
|
67
|
+
# Define Shipping Nodes
|
68
|
+
class CreateLabel(AsyncNode):
|
69
|
+
async def exec_async(self, prep_res):
|
70
|
+
print("3.1 Creating shipping label...")
|
71
|
+
return "Shipping label created"
|
72
|
+
|
73
|
+
async def post_async(self, shared, prep_res, exec_res):
|
74
|
+
shared["shipping_label"] = exec_res
|
75
|
+
return "default"
|
76
|
+
|
77
|
+
|
78
|
+
class AssignCarrier(AsyncNode):
|
79
|
+
async def exec_async(self, prep_res):
|
80
|
+
print("3.2 Assigning carrier...")
|
81
|
+
return "Carrier assigned"
|
82
|
+
|
83
|
+
async def post_async(self, shared, prep_res, exec_res):
|
84
|
+
shared["carrier"] = exec_res
|
85
|
+
return "default"
|
86
|
+
|
87
|
+
|
88
|
+
class SchedulePickup(AsyncNode):
|
89
|
+
async def exec_async(self, prep_res):
|
90
|
+
print("3.3 Scheduling pickup...")
|
91
|
+
return "Pickup scheduled"
|
92
|
+
|
93
|
+
async def post_async(self, shared, prep_res, exec_res):
|
94
|
+
shared["pickup_status"] = exec_res
|
95
|
+
return "default"
|
96
|
+
|
97
|
+
|
98
|
+
# Create node instances
|
99
|
+
validate_payment = ValidatePayment()
|
100
|
+
process_payment = ProcessPayment()
|
101
|
+
payment_confirmation = PaymentConfirmation()
|
102
|
+
|
103
|
+
check_stock = CheckStock()
|
104
|
+
reserve_items = ReserveItems()
|
105
|
+
update_inventory = UpdateInventory()
|
106
|
+
|
107
|
+
create_label = CreateLabel()
|
108
|
+
assign_carrier = AssignCarrier()
|
109
|
+
schedule_pickup = SchedulePickup()
|
110
|
+
|
111
|
+
# Payment processing sub-flow
|
112
|
+
validate_payment >> process_payment >> payment_confirmation
|
113
|
+
payment_flow = AsyncFlow(start=validate_payment)
|
114
|
+
|
115
|
+
# Inventory sub-flow
|
116
|
+
check_stock >> reserve_items >> update_inventory
|
117
|
+
inventory_flow = AsyncFlow(start=check_stock)
|
118
|
+
|
119
|
+
# Shipping sub-flow
|
120
|
+
create_label >> assign_carrier >> schedule_pickup
|
121
|
+
shipping_flow = AsyncFlow(start=create_label)
|
122
|
+
|
123
|
+
# Connect the flows into a main order pipeline
|
124
|
+
payment_flow >> inventory_flow >> shipping_flow
|
125
|
+
# payment_flow >> inventory_flow >> create_label
|
126
|
+
# payment_flow >> inventory_flow >> assign_carrier
|
127
|
+
|
128
|
+
|
129
|
+
# Create the master flow
|
130
|
+
class OrderFlow(AsyncFlow):
|
131
|
+
pass
|
132
|
+
|
133
|
+
|
134
|
+
order_pipeline = OrderFlow(start=payment_flow)
|
135
|
+
|
136
|
+
# Create shared data structure
|
137
|
+
shared_data = {
|
138
|
+
"order_id": "ORD-12345",
|
139
|
+
"customer": "John Doe",
|
140
|
+
"items": [
|
141
|
+
{"id": "ITEM-001", "name": "Smartphone", "price": 999.99, "quantity": 1},
|
142
|
+
{"id": "ITEM-002", "name": "Phone case", "price": 29.99, "quantity": 1},
|
143
|
+
],
|
144
|
+
"shipping_address": {
|
145
|
+
"street": "123 Main St",
|
146
|
+
"city": "Anytown",
|
147
|
+
"state": "CA",
|
148
|
+
"zip": "12345",
|
149
|
+
},
|
150
|
+
}
|
151
|
+
|
152
|
+
|
153
|
+
# Run the entire pipeline asynchronously
|
154
|
+
async def main():
|
155
|
+
await order_pipeline.run_async(shared_data)
|
156
|
+
|
157
|
+
# Print final status
|
158
|
+
print("\nOrder processing completed!")
|
159
|
+
print(f"Payment: {shared_data.get('payment_confirmation')}")
|
160
|
+
print(f"Inventory: {shared_data.get('inventory_update')}")
|
161
|
+
print(f"Shipping: {shared_data.get('pickup_status')}")
|
162
|
+
|
163
|
+
|
164
|
+
if __name__ == "__main__":
|
165
|
+
asyncio.run(main())
|
@@ -0,0 +1,73 @@
|
|
1
|
+
from async_flow import *
|
2
|
+
from pocketflow import Flow, AsyncParallelBatchNode, Node
|
3
|
+
|
4
|
+
# Create node instances
|
5
|
+
validate_payment = ValidatePayment()
|
6
|
+
process_payment = ProcessPayment()
|
7
|
+
payment_confirmation = PaymentConfirmation()
|
8
|
+
|
9
|
+
check_stock = CheckStock()
|
10
|
+
reserve_items = ReserveItems()
|
11
|
+
update_inventory = UpdateInventory()
|
12
|
+
|
13
|
+
create_label = CreateLabel()
|
14
|
+
assign_carrier = AssignCarrier()
|
15
|
+
schedule_pickup = SchedulePickup()
|
16
|
+
|
17
|
+
# Payment processing sub-flow
|
18
|
+
validate_payment >> process_payment
|
19
|
+
validate_payment - "out_of_stock" >> validate_payment # 循环重试
|
20
|
+
process_payment - 'something fail' >> validate_payment
|
21
|
+
process_payment - 'pass' >> payment_confirmation
|
22
|
+
payment_flow = AsyncFlow(start=validate_payment)
|
23
|
+
|
24
|
+
# Inventory sub-flow
|
25
|
+
check_stock >> reserve_items >> update_inventory
|
26
|
+
inventory_flow = AsyncFlow(start=check_stock)
|
27
|
+
|
28
|
+
# Shipping sub-flow
|
29
|
+
create_label >> assign_carrier >> schedule_pickup
|
30
|
+
shipping_flow = AsyncFlow(start=create_label)
|
31
|
+
|
32
|
+
# Connect the flows into a main order pipeline
|
33
|
+
payment_flow >> inventory_flow >> shipping_flow
|
34
|
+
# payment_flow >> inventory_flow >> create_label
|
35
|
+
# payment_flow >> inventory_flow >> assign_carrier
|
36
|
+
|
37
|
+
|
38
|
+
# Create the master flow
|
39
|
+
class OrderFlow(AsyncFlow):
|
40
|
+
pass
|
41
|
+
|
42
|
+
order_pipeline = OrderFlow(start=payment_flow)
|
43
|
+
|
44
|
+
# Create shared data structure
|
45
|
+
shared_data = {
|
46
|
+
"order_id": "ORD-12345",
|
47
|
+
"customer": "John Doe",
|
48
|
+
"items": [
|
49
|
+
{"id": "ITEM-001", "name": "Smartphone", "price": 999.99, "quantity": 1},
|
50
|
+
{"id": "ITEM-002", "name": "Phone case", "price": 29.99, "quantity": 1},
|
51
|
+
],
|
52
|
+
"shipping_address": {
|
53
|
+
"street": "123 Main St",
|
54
|
+
"city": "Anytown",
|
55
|
+
"state": "CA",
|
56
|
+
"zip": "12345",
|
57
|
+
},
|
58
|
+
}
|
59
|
+
|
60
|
+
|
61
|
+
# Run the entire pipeline asynchronously
|
62
|
+
async def main():
|
63
|
+
await order_pipeline.run_async(shared_data)
|
64
|
+
|
65
|
+
# Print final status
|
66
|
+
print("\nOrder processing completed!")
|
67
|
+
print(f"Payment: {shared_data.get('payment_confirmation')}")
|
68
|
+
print(f"Inventory: {shared_data.get('inventory_update')}")
|
69
|
+
print(f"Shipping: {shared_data.get('pickup_status')}")
|
70
|
+
|
71
|
+
|
72
|
+
if __name__ == "__main__":
|
73
|
+
asyncio.run(main())
|