vellum-ai 0.14.37__py3-none-any.whl → 0.14.39__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vellum/__init__.py +10 -0
- vellum/client/core/client_wrapper.py +1 -1
- vellum/client/reference.md +6272 -0
- vellum/client/types/__init__.py +10 -0
- vellum/client/types/ad_hoc_fulfilled_prompt_execution_meta.py +2 -0
- vellum/client/types/fulfilled_prompt_execution_meta.py +2 -0
- vellum/client/types/test_suite_run_exec_config_request.py +4 -0
- vellum/client/types/test_suite_run_progress.py +20 -0
- vellum/client/types/test_suite_run_prompt_sandbox_exec_config_data_request.py +27 -0
- vellum/client/types/test_suite_run_prompt_sandbox_exec_config_request.py +29 -0
- vellum/client/types/test_suite_run_read.py +3 -0
- vellum/client/types/test_suite_run_workflow_sandbox_exec_config_data_request.py +22 -0
- vellum/client/types/test_suite_run_workflow_sandbox_exec_config_request.py +29 -0
- vellum/client/types/vellum_sdk_error_code_enum.py +1 -0
- vellum/client/types/workflow_execution_event_error_code.py +1 -0
- vellum/plugins/pydantic.py +1 -1
- vellum/types/test_suite_run_progress.py +3 -0
- vellum/types/test_suite_run_prompt_sandbox_exec_config_data_request.py +3 -0
- vellum/types/test_suite_run_prompt_sandbox_exec_config_request.py +3 -0
- vellum/types/test_suite_run_workflow_sandbox_exec_config_data_request.py +3 -0
- vellum/types/test_suite_run_workflow_sandbox_exec_config_request.py +3 -0
- vellum/workflows/errors/types.py +1 -0
- vellum/workflows/events/node.py +2 -1
- vellum/workflows/events/tests/test_event.py +1 -0
- vellum/workflows/events/types.py +3 -40
- vellum/workflows/events/workflow.py +15 -4
- vellum/workflows/nodes/displayable/bases/base_prompt_node/node.py +7 -1
- vellum/workflows/nodes/displayable/bases/prompt_deployment_node.py +94 -3
- vellum/workflows/nodes/displayable/conftest.py +2 -6
- vellum/workflows/nodes/displayable/guardrail_node/node.py +1 -1
- vellum/workflows/nodes/displayable/guardrail_node/tests/__init__.py +0 -0
- vellum/workflows/nodes/displayable/guardrail_node/tests/test_node.py +50 -0
- vellum/workflows/nodes/displayable/inline_prompt_node/tests/test_node.py +6 -1
- vellum/workflows/nodes/displayable/prompt_deployment_node/tests/test_node.py +323 -0
- vellum/workflows/runner/runner.py +78 -57
- vellum/workflows/state/base.py +177 -50
- vellum/workflows/state/tests/test_state.py +26 -20
- vellum/workflows/types/definition.py +71 -0
- vellum/workflows/types/generics.py +34 -1
- vellum/workflows/workflows/base.py +26 -19
- vellum/workflows/workflows/tests/test_base_workflow.py +232 -1
- {vellum_ai-0.14.37.dist-info → vellum_ai-0.14.39.dist-info}/METADATA +1 -1
- {vellum_ai-0.14.37.dist-info → vellum_ai-0.14.39.dist-info}/RECORD +49 -35
- vellum_cli/push.py +2 -3
- vellum_cli/tests/test_push.py +52 -0
- vellum_ee/workflows/display/vellum.py +0 -5
- {vellum_ai-0.14.37.dist-info → vellum_ai-0.14.39.dist-info}/LICENSE +0 -0
- {vellum_ai-0.14.37.dist-info → vellum_ai-0.14.39.dist-info}/WHEEL +0 -0
- {vellum_ai-0.14.37.dist-info → vellum_ai-0.14.39.dist-info}/entry_points.txt +0 -0
@@ -4,11 +4,11 @@ from dataclasses import dataclass
|
|
4
4
|
import logging
|
5
5
|
from queue import Empty, Queue
|
6
6
|
from threading import Event as ThreadingEvent, Thread
|
7
|
-
from uuid import UUID
|
7
|
+
from uuid import UUID, uuid4
|
8
8
|
from typing import TYPE_CHECKING, Any, Dict, Generic, Iterable, Iterator, Optional, Sequence, Set, Tuple, Type, Union
|
9
9
|
|
10
10
|
from vellum.workflows.constants import undefined
|
11
|
-
from vellum.workflows.context import ExecutionContext, execution_context, get_execution_context
|
11
|
+
from vellum.workflows.context import ExecutionContext, execution_context, get_execution_context
|
12
12
|
from vellum.workflows.descriptors.base import BaseDescriptor
|
13
13
|
from vellum.workflows.edges.edge import Edge
|
14
14
|
from vellum.workflows.errors import WorkflowError, WorkflowErrorCode
|
@@ -30,7 +30,7 @@ from vellum.workflows.events.node import (
|
|
30
30
|
NodeExecutionRejectedBody,
|
31
31
|
NodeExecutionStreamingBody,
|
32
32
|
)
|
33
|
-
from vellum.workflows.events.types import BaseEvent, NodeParentContext, WorkflowParentContext
|
33
|
+
from vellum.workflows.events.types import BaseEvent, NodeParentContext, ParentContext, WorkflowParentContext
|
34
34
|
from vellum.workflows.events.workflow import (
|
35
35
|
WorkflowExecutionFulfilledBody,
|
36
36
|
WorkflowExecutionInitiatedBody,
|
@@ -90,6 +90,7 @@ class WorkflowRunner(Generic[StateType]):
|
|
90
90
|
|
91
91
|
self.workflow = workflow
|
92
92
|
self._is_resuming = False
|
93
|
+
self._should_emit_initial_state = True
|
93
94
|
if entrypoint_nodes:
|
94
95
|
if len(list(entrypoint_nodes)) > 1:
|
95
96
|
raise ValueError("Cannot resume from multiple nodes")
|
@@ -98,7 +99,8 @@ class WorkflowRunner(Generic[StateType]):
|
|
98
99
|
# https://app.shortcut.com/vellum/story/4408
|
99
100
|
node = next(iter(entrypoint_nodes))
|
100
101
|
if state:
|
101
|
-
self._initial_state = state
|
102
|
+
self._initial_state = deepcopy(state)
|
103
|
+
self._initial_state.meta.span_id = uuid4()
|
102
104
|
else:
|
103
105
|
self._initial_state = self.workflow.get_state_at_node(node)
|
104
106
|
self._entrypoints = entrypoint_nodes
|
@@ -123,8 +125,13 @@ class WorkflowRunner(Generic[StateType]):
|
|
123
125
|
if state:
|
124
126
|
self._initial_state = deepcopy(state)
|
125
127
|
self._initial_state.meta.workflow_inputs = normalized_inputs
|
128
|
+
self._initial_state.meta.span_id = uuid4()
|
126
129
|
else:
|
127
130
|
self._initial_state = self.workflow.get_default_state(normalized_inputs)
|
131
|
+
# We don't want to emit the initial state on the base case of Workflow Runs, since
|
132
|
+
# all of that data is redundant and is derivable. It also clearly communicates that
|
133
|
+
# there was no initial state provided by the user to invoke the workflow.
|
134
|
+
self._should_emit_initial_state = False
|
128
135
|
self._entrypoints = self.workflow.get_entrypoints()
|
129
136
|
|
130
137
|
# This queue is responsible for sending events from WorkflowRunner to the outside world
|
@@ -146,7 +153,6 @@ class WorkflowRunner(Generic[StateType]):
|
|
146
153
|
self._active_nodes_by_execution_id: Dict[UUID, ActiveNode[StateType]] = {}
|
147
154
|
self._cancel_signal = cancel_signal
|
148
155
|
self._execution_context = init_execution_context or get_execution_context()
|
149
|
-
self._parent_context = self._execution_context.parent_context
|
150
156
|
|
151
157
|
setattr(
|
152
158
|
self._initial_state,
|
@@ -159,13 +165,13 @@ class WorkflowRunner(Generic[StateType]):
|
|
159
165
|
def _snapshot_state(self, state: StateType) -> StateType:
|
160
166
|
self._workflow_event_inner_queue.put(
|
161
167
|
WorkflowExecutionSnapshottedEvent(
|
162
|
-
trace_id=
|
168
|
+
trace_id=self._execution_context.trace_id,
|
163
169
|
span_id=state.meta.span_id,
|
164
170
|
body=WorkflowExecutionSnapshottedBody(
|
165
171
|
workflow_definition=self.workflow.__class__,
|
166
172
|
state=state,
|
167
173
|
),
|
168
|
-
parent=self.
|
174
|
+
parent=self._execution_context.parent_context,
|
169
175
|
)
|
170
176
|
)
|
171
177
|
self.workflow._store.append_state_snapshot(state)
|
@@ -178,16 +184,16 @@ class WorkflowRunner(Generic[StateType]):
|
|
178
184
|
return event
|
179
185
|
|
180
186
|
def _run_work_item(self, node: BaseNode[StateType], span_id: UUID) -> None:
|
181
|
-
|
187
|
+
execution = get_execution_context()
|
182
188
|
self._workflow_event_inner_queue.put(
|
183
189
|
NodeExecutionInitiatedEvent(
|
184
|
-
trace_id=
|
190
|
+
trace_id=execution.trace_id,
|
185
191
|
span_id=span_id,
|
186
192
|
body=NodeExecutionInitiatedBody(
|
187
193
|
node_definition=node.__class__,
|
188
194
|
inputs=node._inputs,
|
189
195
|
),
|
190
|
-
parent=parent_context,
|
196
|
+
parent=execution.parent_context,
|
191
197
|
)
|
192
198
|
)
|
193
199
|
|
@@ -197,7 +203,7 @@ class WorkflowRunner(Generic[StateType]):
|
|
197
203
|
updated_parent_context = NodeParentContext(
|
198
204
|
span_id=span_id,
|
199
205
|
node_definition=node.__class__,
|
200
|
-
parent=parent_context,
|
206
|
+
parent=execution.parent_context,
|
201
207
|
)
|
202
208
|
node_run_response: NodeRunResponse
|
203
209
|
was_mocked: Optional[bool] = None
|
@@ -209,7 +215,7 @@ class WorkflowRunner(Generic[StateType]):
|
|
209
215
|
break
|
210
216
|
|
211
217
|
if not was_mocked:
|
212
|
-
with execution_context(parent_context=updated_parent_context, trace_id=
|
218
|
+
with execution_context(parent_context=updated_parent_context, trace_id=execution.trace_id):
|
213
219
|
node_run_response = node.run()
|
214
220
|
|
215
221
|
ports = node.Ports()
|
@@ -232,7 +238,7 @@ class WorkflowRunner(Generic[StateType]):
|
|
232
238
|
outputs = node.Outputs()
|
233
239
|
|
234
240
|
def initiate_node_streaming_output(output: BaseOutput) -> None:
|
235
|
-
|
241
|
+
execution = get_execution_context()
|
236
242
|
streaming_output_queues[output.name] = Queue()
|
237
243
|
output_descriptor = OutputReference(
|
238
244
|
name=output.name,
|
@@ -240,23 +246,24 @@ class WorkflowRunner(Generic[StateType]):
|
|
240
246
|
instance=None,
|
241
247
|
outputs_class=node.Outputs,
|
242
248
|
)
|
243
|
-
node.state.
|
249
|
+
with node.state.__quiet__():
|
250
|
+
node.state.meta.node_outputs[output_descriptor] = streaming_output_queues[output.name]
|
244
251
|
initiated_output: BaseOutput = BaseOutput(name=output.name)
|
245
252
|
initiated_ports = initiated_output > ports
|
246
253
|
self._workflow_event_inner_queue.put(
|
247
254
|
NodeExecutionStreamingEvent(
|
248
|
-
trace_id=
|
255
|
+
trace_id=execution.trace_id,
|
249
256
|
span_id=span_id,
|
250
257
|
body=NodeExecutionStreamingBody(
|
251
258
|
node_definition=node.__class__,
|
252
259
|
output=initiated_output,
|
253
260
|
invoked_ports=initiated_ports,
|
254
261
|
),
|
255
|
-
parent=parent_context,
|
262
|
+
parent=execution.parent_context,
|
256
263
|
),
|
257
264
|
)
|
258
265
|
|
259
|
-
with execution_context(parent_context=updated_parent_context, trace_id=
|
266
|
+
with execution_context(parent_context=updated_parent_context, trace_id=execution.trace_id):
|
260
267
|
for output in node_run_response:
|
261
268
|
invoked_ports = output > ports
|
262
269
|
if output.is_initiated:
|
@@ -268,14 +275,14 @@ class WorkflowRunner(Generic[StateType]):
|
|
268
275
|
streaming_output_queues[output.name].put(output.delta)
|
269
276
|
self._workflow_event_inner_queue.put(
|
270
277
|
NodeExecutionStreamingEvent(
|
271
|
-
trace_id=
|
278
|
+
trace_id=execution.trace_id,
|
272
279
|
span_id=span_id,
|
273
280
|
body=NodeExecutionStreamingBody(
|
274
281
|
node_definition=node.__class__,
|
275
282
|
output=output,
|
276
283
|
invoked_ports=invoked_ports,
|
277
284
|
),
|
278
|
-
parent=parent_context,
|
285
|
+
parent=execution.parent_context,
|
279
286
|
),
|
280
287
|
)
|
281
288
|
elif output.is_fulfilled:
|
@@ -285,31 +292,32 @@ class WorkflowRunner(Generic[StateType]):
|
|
285
292
|
setattr(outputs, output.name, output.value)
|
286
293
|
self._workflow_event_inner_queue.put(
|
287
294
|
NodeExecutionStreamingEvent(
|
288
|
-
trace_id=
|
295
|
+
trace_id=execution.trace_id,
|
289
296
|
span_id=span_id,
|
290
297
|
body=NodeExecutionStreamingBody(
|
291
298
|
node_definition=node.__class__,
|
292
299
|
output=output,
|
293
300
|
invoked_ports=invoked_ports,
|
294
301
|
),
|
295
|
-
parent=parent_context,
|
302
|
+
parent=execution.parent_context,
|
296
303
|
)
|
297
304
|
)
|
298
305
|
|
299
306
|
node.state.meta.node_execution_cache.fulfill_node_execution(node.__class__, span_id)
|
300
307
|
|
301
|
-
|
302
|
-
|
303
|
-
if
|
304
|
-
|
305
|
-
|
308
|
+
with node.state.__atomic__():
|
309
|
+
for descriptor, output_value in outputs:
|
310
|
+
if output_value is undefined:
|
311
|
+
if descriptor in node.state.meta.node_outputs:
|
312
|
+
del node.state.meta.node_outputs[descriptor]
|
313
|
+
continue
|
306
314
|
|
307
|
-
|
315
|
+
node.state.meta.node_outputs[descriptor] = output_value
|
308
316
|
|
309
317
|
invoked_ports = ports(outputs, node.state)
|
310
318
|
self._workflow_event_inner_queue.put(
|
311
319
|
NodeExecutionFulfilledEvent(
|
312
|
-
trace_id=
|
320
|
+
trace_id=execution.trace_id,
|
313
321
|
span_id=span_id,
|
314
322
|
body=NodeExecutionFulfilledBody(
|
315
323
|
node_definition=node.__class__,
|
@@ -317,33 +325,33 @@ class WorkflowRunner(Generic[StateType]):
|
|
317
325
|
invoked_ports=invoked_ports,
|
318
326
|
mocked=was_mocked,
|
319
327
|
),
|
320
|
-
parent=parent_context,
|
328
|
+
parent=execution.parent_context,
|
321
329
|
)
|
322
330
|
)
|
323
331
|
except NodeException as e:
|
324
332
|
logger.info(e)
|
325
333
|
self._workflow_event_inner_queue.put(
|
326
334
|
NodeExecutionRejectedEvent(
|
327
|
-
trace_id=
|
335
|
+
trace_id=execution.trace_id,
|
328
336
|
span_id=span_id,
|
329
337
|
body=NodeExecutionRejectedBody(
|
330
338
|
node_definition=node.__class__,
|
331
339
|
error=e.error,
|
332
340
|
),
|
333
|
-
parent=parent_context,
|
341
|
+
parent=execution.parent_context,
|
334
342
|
)
|
335
343
|
)
|
336
344
|
except WorkflowInitializationException as e:
|
337
345
|
logger.info(e)
|
338
346
|
self._workflow_event_inner_queue.put(
|
339
347
|
NodeExecutionRejectedEvent(
|
340
|
-
trace_id=
|
348
|
+
trace_id=execution.trace_id,
|
341
349
|
span_id=span_id,
|
342
350
|
body=NodeExecutionRejectedBody(
|
343
351
|
node_definition=node.__class__,
|
344
352
|
error=e.error,
|
345
353
|
),
|
346
|
-
parent=parent_context,
|
354
|
+
parent=execution.parent_context,
|
347
355
|
)
|
348
356
|
)
|
349
357
|
except Exception as e:
|
@@ -351,7 +359,7 @@ class WorkflowRunner(Generic[StateType]):
|
|
351
359
|
|
352
360
|
self._workflow_event_inner_queue.put(
|
353
361
|
NodeExecutionRejectedEvent(
|
354
|
-
trace_id=
|
362
|
+
trace_id=execution.trace_id,
|
355
363
|
span_id=span_id,
|
356
364
|
body=NodeExecutionRejectedBody(
|
357
365
|
node_definition=node.__class__,
|
@@ -360,17 +368,23 @@ class WorkflowRunner(Generic[StateType]):
|
|
360
368
|
code=WorkflowErrorCode.INTERNAL_ERROR,
|
361
369
|
),
|
362
370
|
),
|
363
|
-
parent=parent_context,
|
371
|
+
parent=execution.parent_context,
|
364
372
|
),
|
365
373
|
)
|
366
374
|
|
367
375
|
logger.debug(f"Finished running node: {node.__class__.__name__}")
|
368
376
|
|
369
|
-
def _context_run_work_item(
|
370
|
-
|
371
|
-
|
372
|
-
|
373
|
-
|
377
|
+
def _context_run_work_item(
|
378
|
+
self,
|
379
|
+
node: BaseNode[StateType],
|
380
|
+
span_id: UUID,
|
381
|
+
parent_context: ParentContext,
|
382
|
+
trace_id: UUID,
|
383
|
+
) -> None:
|
384
|
+
with execution_context(
|
385
|
+
parent_context=parent_context,
|
386
|
+
trace_id=trace_id,
|
387
|
+
):
|
374
388
|
self._run_work_item(node, span_id)
|
375
389
|
|
376
390
|
def _handle_invoked_ports(self, state: StateType, ports: Optional[Iterable[Port]]) -> None:
|
@@ -419,14 +433,19 @@ class WorkflowRunner(Generic[StateType]):
|
|
419
433
|
if not node_class.Trigger.should_initiate(state, all_deps, node_span_id):
|
420
434
|
return
|
421
435
|
|
422
|
-
|
436
|
+
execution = get_execution_context()
|
423
437
|
node = node_class(state=state, context=self.workflow.context)
|
424
438
|
state.meta.node_execution_cache.initiate_node_execution(node_class, node_span_id)
|
425
439
|
self._active_nodes_by_execution_id[node_span_id] = ActiveNode(node=node)
|
426
440
|
|
427
441
|
worker_thread = Thread(
|
428
442
|
target=self._context_run_work_item,
|
429
|
-
kwargs={
|
443
|
+
kwargs={
|
444
|
+
"node": node,
|
445
|
+
"span_id": node_span_id,
|
446
|
+
"parent_context": execution.parent_context,
|
447
|
+
"trace_id": execution.trace_id,
|
448
|
+
},
|
430
449
|
)
|
431
450
|
worker_thread.start()
|
432
451
|
|
@@ -495,66 +514,68 @@ class WorkflowRunner(Generic[StateType]):
|
|
495
514
|
|
496
515
|
def _initiate_workflow_event(self) -> WorkflowExecutionInitiatedEvent:
|
497
516
|
return WorkflowExecutionInitiatedEvent(
|
498
|
-
trace_id=self.
|
517
|
+
trace_id=self._execution_context.trace_id,
|
499
518
|
span_id=self._initial_state.meta.span_id,
|
500
519
|
body=WorkflowExecutionInitiatedBody(
|
501
520
|
workflow_definition=self.workflow.__class__,
|
502
521
|
inputs=self._initial_state.meta.workflow_inputs,
|
522
|
+
initial_state=deepcopy(self._initial_state) if self._should_emit_initial_state else None,
|
503
523
|
),
|
504
|
-
parent=self.
|
524
|
+
parent=self._execution_context.parent_context,
|
505
525
|
)
|
506
526
|
|
507
527
|
def _stream_workflow_event(self, output: BaseOutput) -> WorkflowExecutionStreamingEvent:
|
508
528
|
return WorkflowExecutionStreamingEvent(
|
509
|
-
trace_id=self.
|
529
|
+
trace_id=self._execution_context.trace_id,
|
510
530
|
span_id=self._initial_state.meta.span_id,
|
511
531
|
body=WorkflowExecutionStreamingBody(
|
512
532
|
workflow_definition=self.workflow.__class__,
|
513
533
|
output=output,
|
514
534
|
),
|
515
|
-
parent=self.
|
535
|
+
parent=self._execution_context.parent_context,
|
516
536
|
)
|
517
537
|
|
518
538
|
def _fulfill_workflow_event(self, outputs: OutputsType) -> WorkflowExecutionFulfilledEvent:
|
519
539
|
return WorkflowExecutionFulfilledEvent(
|
520
|
-
trace_id=self.
|
540
|
+
trace_id=self._execution_context.trace_id,
|
521
541
|
span_id=self._initial_state.meta.span_id,
|
522
542
|
body=WorkflowExecutionFulfilledBody(
|
523
543
|
workflow_definition=self.workflow.__class__,
|
524
544
|
outputs=outputs,
|
525
545
|
),
|
526
|
-
parent=self.
|
546
|
+
parent=self._execution_context.parent_context,
|
527
547
|
)
|
528
548
|
|
529
549
|
def _reject_workflow_event(self, error: WorkflowError) -> WorkflowExecutionRejectedEvent:
|
530
550
|
return WorkflowExecutionRejectedEvent(
|
531
|
-
trace_id=self.
|
551
|
+
trace_id=self._execution_context.trace_id,
|
532
552
|
span_id=self._initial_state.meta.span_id,
|
533
553
|
body=WorkflowExecutionRejectedBody(
|
534
554
|
workflow_definition=self.workflow.__class__,
|
535
555
|
error=error,
|
536
556
|
),
|
537
|
-
parent=self.
|
557
|
+
parent=self._execution_context.parent_context,
|
538
558
|
)
|
539
559
|
|
540
560
|
def _resume_workflow_event(self) -> WorkflowExecutionResumedEvent:
|
541
561
|
return WorkflowExecutionResumedEvent(
|
542
|
-
trace_id=self.
|
562
|
+
trace_id=self._execution_context.trace_id,
|
543
563
|
span_id=self._initial_state.meta.span_id,
|
544
564
|
body=WorkflowExecutionResumedBody(
|
545
565
|
workflow_definition=self.workflow.__class__,
|
546
566
|
),
|
567
|
+
parent=self._execution_context.parent_context,
|
547
568
|
)
|
548
569
|
|
549
570
|
def _pause_workflow_event(self, external_inputs: Iterable[ExternalInputReference]) -> WorkflowExecutionPausedEvent:
|
550
571
|
return WorkflowExecutionPausedEvent(
|
551
|
-
trace_id=self.
|
572
|
+
trace_id=self._execution_context.trace_id,
|
552
573
|
span_id=self._initial_state.meta.span_id,
|
553
574
|
body=WorkflowExecutionPausedBody(
|
554
575
|
workflow_definition=self.workflow.__class__,
|
555
576
|
external_inputs=external_inputs,
|
556
577
|
),
|
557
|
-
parent=self.
|
578
|
+
parent=self._execution_context.parent_context,
|
558
579
|
)
|
559
580
|
|
560
581
|
def _stream(self) -> None:
|
@@ -564,13 +585,13 @@ class WorkflowRunner(Generic[StateType]):
|
|
564
585
|
current_parent = WorkflowParentContext(
|
565
586
|
span_id=self._initial_state.meta.span_id,
|
566
587
|
workflow_definition=self.workflow.__class__,
|
567
|
-
parent=self.
|
588
|
+
parent=self._execution_context.parent_context,
|
568
589
|
type="WORKFLOW",
|
569
590
|
)
|
570
591
|
for node_cls in self._entrypoints:
|
571
592
|
try:
|
572
593
|
if not self._max_concurrency or len(self._active_nodes_by_execution_id) < self._max_concurrency:
|
573
|
-
with execution_context(parent_context=current_parent, trace_id=self.
|
594
|
+
with execution_context(parent_context=current_parent, trace_id=self._execution_context.trace_id):
|
574
595
|
self._run_node_if_ready(self._initial_state, node_cls)
|
575
596
|
else:
|
576
597
|
self._concurrency_queue.put((self._initial_state, node_cls, None))
|
@@ -600,7 +621,7 @@ class WorkflowRunner(Generic[StateType]):
|
|
600
621
|
|
601
622
|
self._workflow_event_outer_queue.put(event)
|
602
623
|
|
603
|
-
with execution_context(parent_context=current_parent, trace_id=self.
|
624
|
+
with execution_context(parent_context=current_parent, trace_id=self._execution_context.trace_id):
|
604
625
|
rejection_error = self._handle_work_item_event(event)
|
605
626
|
|
606
627
|
if rejection_error:
|
@@ -611,7 +632,7 @@ class WorkflowRunner(Generic[StateType]):
|
|
611
632
|
while event := self._workflow_event_inner_queue.get_nowait():
|
612
633
|
self._workflow_event_outer_queue.put(event)
|
613
634
|
|
614
|
-
with execution_context(parent_context=current_parent, trace_id=self.
|
635
|
+
with execution_context(parent_context=current_parent, trace_id=self._execution_context.trace_id):
|
615
636
|
rejection_error = self._handle_work_item_event(event)
|
616
637
|
|
617
638
|
if rejection_error:
|