agno 2.1.3__py3-none-any.whl → 2.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
agno/workflow/loop.py CHANGED
@@ -13,6 +13,7 @@ from agno.run.workflow import (
13
13
  WorkflowRunOutput,
14
14
  WorkflowRunOutputEvent,
15
15
  )
16
+ from agno.session.workflow import WorkflowSession
16
17
  from agno.utils.log import log_debug, logger
17
18
  from agno.workflow.step import Step
18
19
  from agno.workflow.types import StepInput, StepOutput, StepType
@@ -132,6 +133,9 @@ class Loop:
132
133
  workflow_run_response: Optional[WorkflowRunOutput] = None,
133
134
  store_executor_outputs: bool = True,
134
135
  session_state: Optional[Dict[str, Any]] = None,
136
+ workflow_session: Optional[WorkflowSession] = None,
137
+ add_workflow_history_to_steps: Optional[bool] = False,
138
+ num_history_runs: int = 3,
135
139
  ) -> StepOutput:
136
140
  """Execute loop steps with iteration control - mirrors workflow execution logic"""
137
141
  # Use workflow logger for loop orchestration
@@ -157,6 +161,9 @@ class Loop:
157
161
  workflow_run_response=workflow_run_response,
158
162
  store_executor_outputs=store_executor_outputs,
159
163
  session_state=session_state,
164
+ workflow_session=workflow_session,
165
+ add_workflow_history_to_steps=add_workflow_history_to_steps,
166
+ num_history_runs=num_history_runs,
160
167
  )
161
168
 
162
169
  # Handle both single StepOutput and List[StepOutput] (from Loop/Condition steps)
@@ -225,6 +232,9 @@ class Loop:
225
232
  store_executor_outputs: bool = True,
226
233
  session_state: Optional[Dict[str, Any]] = None,
227
234
  parent_step_id: Optional[str] = None,
235
+ workflow_session: Optional[WorkflowSession] = None,
236
+ add_workflow_history_to_steps: Optional[bool] = False,
237
+ num_history_runs: int = 3,
228
238
  ) -> Iterator[Union[WorkflowRunOutputEvent, StepOutput]]:
229
239
  """Execute loop steps with streaming support - mirrors workflow execution logic"""
230
240
  log_debug(f"Loop Start: {self.name}", center=True, symbol="=")
@@ -297,6 +307,9 @@ class Loop:
297
307
  store_executor_outputs=store_executor_outputs,
298
308
  session_state=session_state,
299
309
  parent_step_id=loop_step_id,
310
+ add_workflow_history_to_steps=add_workflow_history_to_steps,
311
+ workflow_session=workflow_session,
312
+ num_history_runs=num_history_runs,
300
313
  ):
301
314
  if isinstance(event, StepOutput):
302
315
  step_outputs_for_iteration.append(event)
@@ -410,6 +423,9 @@ class Loop:
410
423
  workflow_run_response: Optional[WorkflowRunOutput] = None,
411
424
  store_executor_outputs: bool = True,
412
425
  session_state: Optional[Dict[str, Any]] = None,
426
+ workflow_session: Optional[WorkflowSession] = None,
427
+ add_workflow_history_to_steps: Optional[bool] = False,
428
+ num_history_runs: int = 3,
413
429
  ) -> StepOutput:
414
430
  """Execute loop steps asynchronously with iteration control - mirrors workflow execution logic"""
415
431
  # Use workflow logger for async loop orchestration
@@ -437,6 +453,9 @@ class Loop:
437
453
  workflow_run_response=workflow_run_response,
438
454
  store_executor_outputs=store_executor_outputs,
439
455
  session_state=session_state,
456
+ workflow_session=workflow_session,
457
+ add_workflow_history_to_steps=add_workflow_history_to_steps,
458
+ num_history_runs=num_history_runs,
440
459
  )
441
460
 
442
461
  # Handle both single StepOutput and List[StepOutput] (from Loop/Condition steps)
@@ -508,6 +527,9 @@ class Loop:
508
527
  store_executor_outputs: bool = True,
509
528
  session_state: Optional[Dict[str, Any]] = None,
510
529
  parent_step_id: Optional[str] = None,
530
+ workflow_session: Optional[WorkflowSession] = None,
531
+ add_workflow_history_to_steps: Optional[bool] = False,
532
+ num_history_runs: int = 3,
511
533
  ) -> AsyncIterator[Union[WorkflowRunOutputEvent, TeamRunOutputEvent, RunOutputEvent, StepOutput]]:
512
534
  """Execute loop steps with async streaming support - mirrors workflow execution logic"""
513
535
  log_debug(f"Loop Start: {self.name}", center=True, symbol="=")
@@ -580,6 +602,9 @@ class Loop:
580
602
  store_executor_outputs=store_executor_outputs,
581
603
  session_state=session_state,
582
604
  parent_step_id=loop_step_id,
605
+ workflow_session=workflow_session,
606
+ add_workflow_history_to_steps=add_workflow_history_to_steps,
607
+ num_history_runs=num_history_runs,
583
608
  ):
584
609
  if isinstance(event, StepOutput):
585
610
  step_outputs_for_iteration.append(event)
agno/workflow/parallel.py CHANGED
@@ -14,6 +14,7 @@ from agno.run.workflow import (
14
14
  WorkflowRunOutput,
15
15
  WorkflowRunOutputEvent,
16
16
  )
17
+ from agno.session.workflow import WorkflowSession
17
18
  from agno.utils.log import log_debug, logger
18
19
  from agno.utils.merge_dict import merge_parallel_session_states
19
20
  from agno.workflow.condition import Condition
@@ -200,6 +201,9 @@ class Parallel:
200
201
  workflow_run_response: Optional[WorkflowRunOutput] = None,
201
202
  store_executor_outputs: bool = True,
202
203
  session_state: Optional[Dict[str, Any]] = None,
204
+ workflow_session: Optional[WorkflowSession] = None,
205
+ add_workflow_history_to_steps: Optional[bool] = False,
206
+ num_history_runs: int = 3,
203
207
  ) -> StepOutput:
204
208
  """Execute all steps in parallel and return aggregated result"""
205
209
  # Use workflow logger for parallel orchestration
@@ -228,6 +232,9 @@ class Parallel:
228
232
  user_id=user_id,
229
233
  workflow_run_response=workflow_run_response,
230
234
  store_executor_outputs=store_executor_outputs,
235
+ workflow_session=workflow_session,
236
+ add_workflow_history_to_steps=add_workflow_history_to_steps,
237
+ num_history_runs=num_history_runs,
231
238
  session_state=step_session_state,
232
239
  ) # type: ignore[union-attr]
233
240
  return idx, step_result, step_session_state
@@ -315,6 +322,9 @@ class Parallel:
315
322
  store_executor_outputs: bool = True,
316
323
  session_state: Optional[Dict[str, Any]] = None,
317
324
  parent_step_id: Optional[str] = None,
325
+ workflow_session: Optional[WorkflowSession] = None,
326
+ add_workflow_history_to_steps: Optional[bool] = False,
327
+ num_history_runs: int = 3,
318
328
  ) -> Iterator[Union[WorkflowRunOutputEvent, StepOutput]]:
319
329
  """Execute all steps in parallel with streaming support"""
320
330
  log_debug(f"Parallel Start: {self.name} ({len(self.steps)} steps)", center=True, symbol="=")
@@ -345,14 +355,20 @@ class Parallel:
345
355
  parent_step_id=parent_step_id,
346
356
  )
347
357
 
358
+ import queue
359
+
360
+ event_queue = queue.Queue() # type: ignore
361
+ step_results = []
362
+ modified_session_states = []
363
+
348
364
  def execute_step_stream_with_index(step_with_index):
349
- """Execute a single step with streaming and preserve its original index"""
365
+ """Execute a single step with streaming and put events in queue immediately"""
350
366
  idx, step = step_with_index
351
367
  # Use the individual session_state copy for this step
352
368
  step_session_state = session_state_copies[idx]
353
369
 
354
370
  try:
355
- step_events = []
371
+ step_outputs = []
356
372
 
357
373
  # If step_index is None or integer (main step): create (step_index, sub_index)
358
374
  # If step_index is tuple (child step): all parallel sub-steps get same index
@@ -374,79 +390,81 @@ class Parallel:
374
390
  store_executor_outputs=store_executor_outputs,
375
391
  session_state=step_session_state,
376
392
  parent_step_id=parallel_step_id,
393
+ workflow_session=workflow_session,
394
+ add_workflow_history_to_steps=add_workflow_history_to_steps,
395
+ num_history_runs=num_history_runs,
377
396
  ):
378
- step_events.append(event)
379
- return idx, step_events, step_session_state
397
+ # Put event immediately in queue
398
+ event_queue.put(("event", idx, event))
399
+ if isinstance(event, StepOutput):
400
+ step_outputs.append(event)
401
+
402
+ # Signal completion for this step
403
+ event_queue.put(("complete", idx, step_outputs, step_session_state))
404
+ return idx, step_outputs, step_session_state
380
405
  except Exception as exc:
381
406
  parallel_step_name = getattr(step, "name", f"step_{idx}")
382
407
  logger.error(f"Parallel step {parallel_step_name} streaming failed: {exc}")
383
- return (
384
- idx,
385
- [
386
- StepOutput(
387
- step_name=parallel_step_name,
388
- content=f"Step {parallel_step_name} failed: {str(exc)}",
389
- success=False,
390
- error=str(exc),
391
- )
392
- ],
393
- step_session_state,
408
+ error_event = StepOutput(
409
+ step_name=parallel_step_name,
410
+ content=f"Step {parallel_step_name} failed: {str(exc)}",
411
+ success=False,
412
+ error=str(exc),
394
413
  )
414
+ event_queue.put(("event", idx, error_event))
415
+ event_queue.put(("complete", idx, [error_event], step_session_state))
416
+ return idx, [error_event], step_session_state
395
417
 
396
- # Use index to preserve order
418
+ # Submit all parallel tasks
397
419
  indexed_steps = list(enumerate(self.steps))
398
- all_events_with_indices = []
399
- step_results = []
400
- modified_session_states = []
401
420
 
402
421
  with ThreadPoolExecutor(max_workers=len(self.steps)) as executor:
403
- # Submit all tasks with their original indices
404
- future_to_index = {
405
- executor.submit(execute_step_stream_with_index, indexed_step): indexed_step[0]
406
- for indexed_step in indexed_steps
407
- }
422
+ # Submit all tasks
423
+ futures = [executor.submit(execute_step_stream_with_index, indexed_step) for indexed_step in indexed_steps]
408
424
 
409
- # Collect results and modified session_state copies
410
- for future in as_completed(future_to_index):
425
+ # Process events from queue as they arrive
426
+ completed_steps = 0
427
+ total_steps = len(self.steps)
428
+
429
+ while completed_steps < total_steps:
411
430
  try:
412
- index, events, modified_session_state = future.result()
413
- all_events_with_indices.append((index, events))
414
- modified_session_states.append(modified_session_state)
431
+ message_type, step_idx, *data = event_queue.get(timeout=1.0)
432
+
433
+ if message_type == "event":
434
+ event = data[0]
435
+ # Yield events immediately as they arrive (except StepOutputs)
436
+ if not isinstance(event, StepOutput):
437
+ yield event
415
438
 
416
- # Extract StepOutput from events for the final result
417
- step_outputs = [event for event in events if isinstance(event, StepOutput)]
418
- if step_outputs:
439
+ elif message_type == "complete":
440
+ step_outputs, step_session_state = data
419
441
  step_results.extend(step_outputs)
442
+ modified_session_states.append(step_session_state)
443
+ completed_steps += 1
444
+
445
+ step_name = getattr(self.steps[step_idx], "name", f"step_{step_idx}")
446
+ log_debug(f"Parallel step {step_name} streaming completed")
447
+
448
+ except queue.Empty:
449
+ for i, future in enumerate(futures):
450
+ if future.done() and future.exception():
451
+ logger.error(f"Parallel step {i} failed: {future.exception()}")
452
+ if completed_steps < total_steps:
453
+ completed_steps += 1
454
+ except Exception as e:
455
+ logger.error(f"Error processing parallel step events: {e}")
456
+ completed_steps += 1
420
457
 
421
- step_name = getattr(self.steps[index], "name", f"step_{index}")
422
- log_debug(f"Parallel step {step_name} streaming completed")
458
+ for future in futures:
459
+ try:
460
+ future.result()
423
461
  except Exception as e:
424
- index = future_to_index[future]
425
- step_name = getattr(self.steps[index], "name", f"step_{index}")
426
- logger.error(f"Parallel step {step_name} streaming failed: {e}")
427
- error_event = StepOutput(
428
- step_name=step_name,
429
- content=f"Step {step_name} failed: {str(e)}",
430
- success=False,
431
- error=str(e),
432
- )
433
- all_events_with_indices.append((index, [error_event]))
434
- step_results.append(error_event)
462
+ logger.error(f"Future completion error: {e}")
435
463
 
436
464
  # Merge all session_state changes back into the original session_state
437
465
  if session_state is not None:
438
466
  merge_parallel_session_states(session_state, modified_session_states)
439
467
 
440
- # Sort events by original index to preserve order
441
- all_events_with_indices.sort(key=lambda x: x[0])
442
-
443
- # Yield all collected streaming events in order (but not final StepOutputs)
444
- for _, events in all_events_with_indices:
445
- for event in events:
446
- # Only yield non-StepOutput events during streaming to avoid duplication
447
- if not isinstance(event, StepOutput):
448
- yield event
449
-
450
468
  # Flatten step_results - handle steps that return List[StepOutput] (like Condition/Loop)
451
469
  flattened_step_results: List[StepOutput] = []
452
470
  for result in step_results:
@@ -473,7 +491,7 @@ class Parallel:
473
491
  step_name=self.name,
474
492
  step_index=step_index,
475
493
  parallel_step_count=len(self.steps),
476
- step_results=[aggregated_result], # Now single aggregated result
494
+ step_results=flattened_step_results,
477
495
  step_id=parallel_step_id,
478
496
  parent_step_id=parent_step_id,
479
497
  )
@@ -486,6 +504,9 @@ class Parallel:
486
504
  workflow_run_response: Optional[WorkflowRunOutput] = None,
487
505
  store_executor_outputs: bool = True,
488
506
  session_state: Optional[Dict[str, Any]] = None,
507
+ workflow_session: Optional[WorkflowSession] = None,
508
+ add_workflow_history_to_steps: Optional[bool] = False,
509
+ num_history_runs: int = 3,
489
510
  ) -> StepOutput:
490
511
  """Execute all steps in parallel using asyncio and return aggregated result"""
491
512
  # Use workflow logger for async parallel orchestration
@@ -514,6 +535,9 @@ class Parallel:
514
535
  user_id=user_id,
515
536
  workflow_run_response=workflow_run_response,
516
537
  store_executor_outputs=store_executor_outputs,
538
+ workflow_session=workflow_session,
539
+ add_workflow_history_to_steps=add_workflow_history_to_steps,
540
+ num_history_runs=num_history_runs,
517
541
  session_state=step_session_state,
518
542
  ) # type: ignore[union-attr]
519
543
  return idx, inner_step_result, step_session_state
@@ -602,6 +626,9 @@ class Parallel:
602
626
  store_executor_outputs: bool = True,
603
627
  session_state: Optional[Dict[str, Any]] = None,
604
628
  parent_step_id: Optional[str] = None,
629
+ workflow_session: Optional[WorkflowSession] = None,
630
+ add_workflow_history_to_steps: Optional[bool] = False,
631
+ num_history_runs: int = 3,
605
632
  ) -> AsyncIterator[Union[WorkflowRunOutputEvent, TeamRunOutputEvent, RunOutputEvent, StepOutput]]:
606
633
  """Execute all steps in parallel with async streaming support"""
607
634
  log_debug(f"Parallel Start: {self.name} ({len(self.steps)} steps)", center=True, symbol="=")
@@ -632,14 +659,20 @@ class Parallel:
632
659
  parent_step_id=parent_step_id,
633
660
  )
634
661
 
662
+ import asyncio
663
+
664
+ event_queue = asyncio.Queue() # type: ignore
665
+ step_results = []
666
+ modified_session_states = []
667
+
635
668
  async def execute_step_stream_async_with_index(step_with_index):
636
- """Execute a single step with async streaming and preserve its original index"""
669
+ """Execute a single step with async streaming and yield events immediately"""
637
670
  idx, step = step_with_index
638
671
  # Use the individual session_state copy for this step
639
672
  step_session_state = session_state_copies[idx]
640
673
 
641
674
  try:
642
- step_events = []
675
+ step_outputs = []
643
676
 
644
677
  # If step_index is None or integer (main step): create (step_index, sub_index)
645
678
  # If step_index is tuple (child step): all parallel sub-steps get same index
@@ -661,78 +694,69 @@ class Parallel:
661
694
  store_executor_outputs=store_executor_outputs,
662
695
  session_state=step_session_state,
663
696
  parent_step_id=parallel_step_id,
697
+ workflow_session=workflow_session,
698
+ add_workflow_history_to_steps=add_workflow_history_to_steps,
699
+ num_history_runs=num_history_runs,
664
700
  ): # type: ignore[union-attr]
665
- step_events.append(event)
666
- return idx, step_events, step_session_state
701
+ # Yield events immediately to the queue
702
+ await event_queue.put(("event", idx, event))
703
+ if isinstance(event, StepOutput):
704
+ step_outputs.append(event)
705
+
706
+ # Signal completion for this step
707
+ await event_queue.put(("complete", idx, step_outputs, step_session_state))
708
+ return idx, step_outputs, step_session_state
667
709
  except Exception as e:
668
710
  parallel_step_name = getattr(step, "name", f"step_{idx}")
669
711
  logger.error(f"Parallel step {parallel_step_name} async streaming failed: {e}")
670
- return (
671
- idx,
672
- [
673
- StepOutput(
674
- step_name=parallel_step_name,
675
- content=f"Step {parallel_step_name} failed: {str(e)}",
676
- success=False,
677
- error=str(e),
678
- )
679
- ],
680
- step_session_state,
712
+ error_event = StepOutput(
713
+ step_name=parallel_step_name,
714
+ content=f"Step {parallel_step_name} failed: {str(e)}",
715
+ success=False,
716
+ error=str(e),
681
717
  )
718
+ await event_queue.put(("event", idx, error_event))
719
+ await event_queue.put(("complete", idx, [error_event], step_session_state))
720
+ return idx, [error_event], step_session_state
682
721
 
683
- # Use index to preserve order
722
+ # Start all parallel tasks
684
723
  indexed_steps = list(enumerate(self.steps))
685
- all_events_with_indices = []
686
- step_results = []
687
- modified_session_states = []
724
+ tasks = [
725
+ asyncio.create_task(execute_step_stream_async_with_index(indexed_step)) for indexed_step in indexed_steps
726
+ ]
688
727
 
689
- # Create tasks for all steps with their indices
690
- tasks = [execute_step_stream_async_with_index(indexed_step) for indexed_step in indexed_steps]
728
+ # Process events as they arrive and track completion
729
+ completed_steps = 0
730
+ total_steps = len(self.steps)
691
731
 
692
- # Execute all tasks concurrently
693
- results_with_indices = await asyncio.gather(*tasks, return_exceptions=True)
732
+ while completed_steps < total_steps:
733
+ try:
734
+ message_type, step_idx, *data = await event_queue.get()
694
735
 
695
- # Process results and handle exceptions, preserving order
696
- for i, result in enumerate(results_with_indices):
697
- if isinstance(result, Exception):
698
- step_name = getattr(self.steps[i], "name", f"step_{i}")
699
- logger.error(f"Parallel step {step_name} async streaming failed: {result}")
700
- error_event = StepOutput(
701
- step_name=step_name,
702
- content=f"Step {step_name} failed: {str(result)}",
703
- success=False,
704
- error=str(result),
705
- )
706
- all_events_with_indices.append((i, [error_event]))
707
- step_results.append(error_event)
708
- modified_session_states.append(session_state_copies[i])
709
- else:
710
- index, events, modified_session_state = result # type: ignore[misc]
711
- all_events_with_indices.append((index, events))
712
- modified_session_states.append(modified_session_state)
736
+ if message_type == "event":
737
+ event = data[0]
738
+ if not isinstance(event, StepOutput):
739
+ yield event
713
740
 
714
- # Extract StepOutput from events for the final result
715
- step_outputs = [event for event in events if isinstance(event, StepOutput)]
716
- if step_outputs:
741
+ elif message_type == "complete":
742
+ step_outputs, step_session_state = data
717
743
  step_results.extend(step_outputs)
744
+ modified_session_states.append(step_session_state)
745
+ completed_steps += 1
718
746
 
719
- step_name = getattr(self.steps[index], "name", f"step_{index}")
720
- log_debug(f"Parallel step {step_name} async streaming completed")
747
+ step_name = getattr(self.steps[step_idx], "name", f"step_{step_idx}")
748
+ log_debug(f"Parallel step {step_name} async streaming completed")
749
+
750
+ except Exception as e:
751
+ logger.error(f"Error processing parallel step events: {e}")
752
+ completed_steps += 1
753
+
754
+ await asyncio.gather(*tasks, return_exceptions=True)
721
755
 
722
756
  # Merge all session_state changes back into the original session_state
723
757
  if session_state is not None:
724
758
  merge_parallel_session_states(session_state, modified_session_states)
725
759
 
726
- # Sort events by original index to preserve order
727
- all_events_with_indices.sort(key=lambda x: x[0])
728
-
729
- # Yield all collected streaming events in order (but not final StepOutputs)
730
- for _, events in all_events_with_indices:
731
- for event in events:
732
- # Only yield non-StepOutput events during streaming to avoid duplication
733
- if not isinstance(event, StepOutput):
734
- yield event
735
-
736
760
  # Flatten step_results - handle steps that return List[StepOutput] (like Condition/Loop)
737
761
  flattened_step_results: List[StepOutput] = []
738
762
  for result in step_results:
@@ -759,7 +783,7 @@ class Parallel:
759
783
  step_name=self.name,
760
784
  step_index=step_index,
761
785
  parallel_step_count=len(self.steps),
762
- step_results=[aggregated_result], # Now single aggregated result
786
+ step_results=flattened_step_results,
763
787
  step_id=parallel_step_id,
764
788
  parent_step_id=parent_step_id,
765
789
  )
agno/workflow/router.py CHANGED
@@ -11,6 +11,7 @@ from agno.run.workflow import (
11
11
  WorkflowRunOutput,
12
12
  WorkflowRunOutputEvent,
13
13
  )
14
+ from agno.session.workflow import WorkflowSession
14
15
  from agno.utils.log import log_debug, logger
15
16
  from agno.workflow.step import Step
16
17
  from agno.workflow.types import StepInput, StepOutput, StepType
@@ -150,6 +151,9 @@ class Router:
150
151
  workflow_run_response: Optional[WorkflowRunOutput] = None,
151
152
  session_state: Optional[Dict[str, Any]] = None,
152
153
  store_executor_outputs: bool = True,
154
+ workflow_session: Optional[WorkflowSession] = None,
155
+ add_workflow_history_to_steps: Optional[bool] = False,
156
+ num_history_runs: int = 3,
153
157
  ) -> StepOutput:
154
158
  """Execute the router and its selected steps with sequential chaining"""
155
159
  log_debug(f"Router Start: {self.name}", center=True, symbol="-")
@@ -184,6 +188,9 @@ class Router:
184
188
  workflow_run_response=workflow_run_response,
185
189
  store_executor_outputs=store_executor_outputs,
186
190
  session_state=session_state,
191
+ workflow_session=workflow_session,
192
+ add_workflow_history_to_steps=add_workflow_history_to_steps,
193
+ num_history_runs=num_history_runs,
187
194
  )
188
195
 
189
196
  # Handle both single StepOutput and List[StepOutput]
@@ -243,6 +250,9 @@ class Router:
243
250
  step_index: Optional[Union[int, tuple]] = None,
244
251
  store_executor_outputs: bool = True,
245
252
  parent_step_id: Optional[str] = None,
253
+ workflow_session: Optional[WorkflowSession] = None,
254
+ add_workflow_history_to_steps: Optional[bool] = False,
255
+ num_history_runs: int = 3,
246
256
  ) -> Iterator[Union[WorkflowRunOutputEvent, StepOutput]]:
247
257
  """Execute the router with streaming support"""
248
258
  log_debug(f"Router Start: {self.name}", center=True, symbol="-")
@@ -305,6 +315,9 @@ class Router:
305
315
  store_executor_outputs=store_executor_outputs,
306
316
  session_state=session_state,
307
317
  parent_step_id=router_step_id,
318
+ workflow_session=workflow_session,
319
+ add_workflow_history_to_steps=add_workflow_history_to_steps,
320
+ num_history_runs=num_history_runs,
308
321
  ):
309
322
  if isinstance(event, StepOutput):
310
323
  step_outputs_for_step.append(event)
@@ -386,6 +399,9 @@ class Router:
386
399
  workflow_run_response: Optional[WorkflowRunOutput] = None,
387
400
  session_state: Optional[Dict[str, Any]] = None,
388
401
  store_executor_outputs: bool = True,
402
+ workflow_session: Optional[WorkflowSession] = None,
403
+ add_workflow_history_to_steps: Optional[bool] = False,
404
+ num_history_runs: int = 3,
389
405
  ) -> StepOutput:
390
406
  """Async execute the router and its selected steps with sequential chaining"""
391
407
  log_debug(f"Router Start: {self.name}", center=True, symbol="-")
@@ -421,6 +437,9 @@ class Router:
421
437
  workflow_run_response=workflow_run_response,
422
438
  store_executor_outputs=store_executor_outputs,
423
439
  session_state=session_state,
440
+ workflow_session=workflow_session,
441
+ add_workflow_history_to_steps=add_workflow_history_to_steps,
442
+ num_history_runs=num_history_runs,
424
443
  )
425
444
  # Handle both single StepOutput and List[StepOutput]
426
445
  if isinstance(step_output, list):
@@ -482,6 +501,9 @@ class Router:
482
501
  step_index: Optional[Union[int, tuple]] = None,
483
502
  store_executor_outputs: bool = True,
484
503
  parent_step_id: Optional[str] = None,
504
+ workflow_session: Optional[WorkflowSession] = None,
505
+ add_workflow_history_to_steps: Optional[bool] = False,
506
+ num_history_runs: int = 3,
485
507
  ) -> AsyncIterator[Union[WorkflowRunOutputEvent, TeamRunOutputEvent, RunOutputEvent, StepOutput]]:
486
508
  """Async execute the router with streaming support"""
487
509
  log_debug(f"Router Start: {self.name}", center=True, symbol="-")
@@ -546,6 +568,9 @@ class Router:
546
568
  store_executor_outputs=store_executor_outputs,
547
569
  session_state=session_state,
548
570
  parent_step_id=router_step_id,
571
+ workflow_session=workflow_session,
572
+ add_workflow_history_to_steps=add_workflow_history_to_steps,
573
+ num_history_runs=num_history_runs,
549
574
  ):
550
575
  if isinstance(event, StepOutput):
551
576
  step_outputs_for_step.append(event)