flock-core 0.5.11__py3-none-any.whl → 0.5.20__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flock-core might be problematic. Click here for more details.

Files changed (91) hide show
  1. flock/__init__.py +1 -1
  2. flock/agent/__init__.py +30 -0
  3. flock/agent/builder_helpers.py +192 -0
  4. flock/agent/builder_validator.py +169 -0
  5. flock/agent/component_lifecycle.py +325 -0
  6. flock/agent/context_resolver.py +141 -0
  7. flock/agent/mcp_integration.py +212 -0
  8. flock/agent/output_processor.py +304 -0
  9. flock/api/__init__.py +20 -0
  10. flock/{api_models.py → api/models.py} +0 -2
  11. flock/{service.py → api/service.py} +3 -3
  12. flock/cli.py +2 -2
  13. flock/components/__init__.py +41 -0
  14. flock/components/agent/__init__.py +22 -0
  15. flock/{components.py → components/agent/base.py} +4 -3
  16. flock/{utility/output_utility_component.py → components/agent/output_utility.py} +12 -7
  17. flock/components/orchestrator/__init__.py +22 -0
  18. flock/{orchestrator_component.py → components/orchestrator/base.py} +5 -293
  19. flock/components/orchestrator/circuit_breaker.py +95 -0
  20. flock/components/orchestrator/collection.py +143 -0
  21. flock/components/orchestrator/deduplication.py +78 -0
  22. flock/core/__init__.py +30 -0
  23. flock/core/agent.py +953 -0
  24. flock/{artifacts.py → core/artifacts.py} +1 -1
  25. flock/{context_provider.py → core/context_provider.py} +3 -3
  26. flock/core/orchestrator.py +1102 -0
  27. flock/{store.py → core/store.py} +99 -454
  28. flock/{subscription.py → core/subscription.py} +1 -1
  29. flock/dashboard/collector.py +5 -5
  30. flock/dashboard/graph_builder.py +7 -7
  31. flock/dashboard/routes/__init__.py +21 -0
  32. flock/dashboard/routes/control.py +327 -0
  33. flock/dashboard/routes/helpers.py +340 -0
  34. flock/dashboard/routes/themes.py +76 -0
  35. flock/dashboard/routes/traces.py +521 -0
  36. flock/dashboard/routes/websocket.py +108 -0
  37. flock/dashboard/service.py +43 -1316
  38. flock/engines/dspy/__init__.py +20 -0
  39. flock/engines/dspy/artifact_materializer.py +216 -0
  40. flock/engines/dspy/signature_builder.py +474 -0
  41. flock/engines/dspy/streaming_executor.py +858 -0
  42. flock/engines/dspy_engine.py +45 -1330
  43. flock/engines/examples/simple_batch_engine.py +2 -2
  44. flock/examples.py +7 -7
  45. flock/logging/logging.py +1 -16
  46. flock/models/__init__.py +10 -0
  47. flock/orchestrator/__init__.py +45 -0
  48. flock/{artifact_collector.py → orchestrator/artifact_collector.py} +3 -3
  49. flock/orchestrator/artifact_manager.py +168 -0
  50. flock/{batch_accumulator.py → orchestrator/batch_accumulator.py} +2 -2
  51. flock/orchestrator/component_runner.py +389 -0
  52. flock/orchestrator/context_builder.py +167 -0
  53. flock/{correlation_engine.py → orchestrator/correlation_engine.py} +2 -2
  54. flock/orchestrator/event_emitter.py +167 -0
  55. flock/orchestrator/initialization.py +184 -0
  56. flock/orchestrator/lifecycle_manager.py +226 -0
  57. flock/orchestrator/mcp_manager.py +202 -0
  58. flock/orchestrator/scheduler.py +189 -0
  59. flock/orchestrator/server_manager.py +234 -0
  60. flock/orchestrator/tracing.py +147 -0
  61. flock/storage/__init__.py +10 -0
  62. flock/storage/artifact_aggregator.py +158 -0
  63. flock/storage/in_memory/__init__.py +6 -0
  64. flock/storage/in_memory/artifact_filter.py +114 -0
  65. flock/storage/in_memory/history_aggregator.py +115 -0
  66. flock/storage/sqlite/__init__.py +10 -0
  67. flock/storage/sqlite/agent_history_queries.py +154 -0
  68. flock/storage/sqlite/consumption_loader.py +100 -0
  69. flock/storage/sqlite/query_builder.py +112 -0
  70. flock/storage/sqlite/query_params_builder.py +91 -0
  71. flock/storage/sqlite/schema_manager.py +168 -0
  72. flock/storage/sqlite/summary_queries.py +194 -0
  73. flock/utils/__init__.py +14 -0
  74. flock/utils/async_utils.py +67 -0
  75. flock/{runtime.py → utils/runtime.py} +3 -3
  76. flock/utils/time_utils.py +53 -0
  77. flock/utils/type_resolution.py +38 -0
  78. flock/{utilities.py → utils/utilities.py} +2 -2
  79. flock/utils/validation.py +57 -0
  80. flock/utils/visibility.py +79 -0
  81. flock/utils/visibility_utils.py +134 -0
  82. {flock_core-0.5.11.dist-info → flock_core-0.5.20.dist-info}/METADATA +18 -4
  83. {flock_core-0.5.11.dist-info → flock_core-0.5.20.dist-info}/RECORD +89 -33
  84. flock/agent.py +0 -1578
  85. flock/orchestrator.py +0 -1983
  86. /flock/{visibility.py → core/visibility.py} +0 -0
  87. /flock/{system_artifacts.py → models/system_artifacts.py} +0 -0
  88. /flock/{helper → utils}/cli_helper.py +0 -0
  89. {flock_core-0.5.11.dist-info → flock_core-0.5.20.dist-info}/WHEEL +0 -0
  90. {flock_core-0.5.11.dist-info → flock_core-0.5.20.dist-info}/entry_points.txt +0 -0
  91. {flock_core-0.5.11.dist-info → flock_core-0.5.20.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,858 @@
1
+ """DSPy streaming execution with Rich display and WebSocket support.
2
+
3
+ Phase 6: Extracted from dspy_engine.py to reduce file size and improve modularity.
4
+
5
+ This module handles all streaming-related logic for DSPy program execution,
6
+ including two modes:
7
+ - CLI mode: Rich Live display with terminal formatting (agents.run())
8
+ - Dashboard mode: WebSocket-only streaming for parallel execution (no Rich overhead)
9
+ """
10
+
11
+ from __future__ import annotations
12
+
13
+ import asyncio
14
+ from collections import OrderedDict, defaultdict
15
+ from contextlib import nullcontext
16
+ from datetime import UTC
17
+ from typing import Any
18
+
19
+ from pydantic import BaseModel
20
+
21
+ from flock.dashboard.events import StreamingOutputEvent
22
+ from flock.logging.logging import get_logger
23
+
24
+
25
+ logger = get_logger(__name__)
26
+
27
+
28
+ class DSPyStreamingExecutor:
29
+ """Executes DSPy programs in streaming mode with Rich or WebSocket output.
30
+
31
+ Responsibilities:
32
+ - Standard (non-streaming) execution
33
+ - WebSocket-only streaming (dashboard mode, no Rich overhead)
34
+ - Rich CLI streaming with formatted tables
35
+ - Stream formatter setup (themes, styles)
36
+ - Final display rendering with artifact metadata
37
+ """
38
+
39
+ def __init__(
40
+ self,
41
+ *,
42
+ status_output_field: str,
43
+ stream_vertical_overflow: str,
44
+ theme: str,
45
+ no_output: bool,
46
+ ):
47
+ """Initialize streaming executor with configuration.
48
+
49
+ Args:
50
+ status_output_field: Field name for status output
51
+ stream_vertical_overflow: Rich Live vertical overflow strategy
52
+ theme: Theme name for Rich output formatting
53
+ no_output: Whether to disable output
54
+ """
55
+ self.status_output_field = status_output_field
56
+ self.stream_vertical_overflow = stream_vertical_overflow
57
+ self.theme = theme
58
+ self.no_output = no_output
59
+
60
+ async def execute_standard(
61
+ self, dspy_mod, program, *, description: str, payload: dict[str, Any]
62
+ ) -> Any:
63
+ """Execute DSPy program in standard mode (no streaming).
64
+
65
+ Args:
66
+ dspy_mod: DSPy module
67
+ program: DSPy program (Predict or ReAct)
68
+ description: System description
69
+ payload: Execution payload with semantic field names
70
+
71
+ Returns:
72
+ DSPy Prediction result
73
+ """
74
+ # Handle semantic fields format: {"description": ..., "task": ..., "report": ...}
75
+ if isinstance(payload, dict) and "description" in payload:
76
+ # Semantic fields: pass all fields as kwargs
77
+ return program(**payload)
78
+
79
+ # Fallback for unexpected payload format
80
+ raise ValueError(
81
+ f"Invalid payload format: expected dict with 'description' key, got {type(payload).__name__}"
82
+ )
83
+
84
+ async def execute_streaming_websocket_only(
85
+ self,
86
+ dspy_mod,
87
+ program,
88
+ signature,
89
+ *,
90
+ description: str,
91
+ payload: dict[str, Any],
92
+ agent: Any,
93
+ ctx: Any = None,
94
+ pre_generated_artifact_id: Any = None,
95
+ output_group=None,
96
+ ) -> tuple[Any, None]:
97
+ """Execute streaming for WebSocket only (no Rich display).
98
+
99
+ Optimized path for dashboard mode that skips all Rich formatting overhead.
100
+ Used when multiple agents stream in parallel to avoid terminal conflicts
101
+ and deadlocks with MCP tools.
102
+
103
+ This method eliminates the Rich Live context that can cause deadlocks when
104
+ combined with MCP tool execution and parallel agent streaming.
105
+
106
+ Args:
107
+ dspy_mod: DSPy module
108
+ program: DSPy program (Predict or ReAct)
109
+ signature: DSPy Signature
110
+ description: System description
111
+ payload: Execution payload with semantic field names
112
+ agent: Agent instance
113
+ ctx: Execution context
114
+ pre_generated_artifact_id: Pre-generated artifact ID for streaming
115
+ output_group: OutputGroup defining expected outputs
116
+
117
+ Returns:
118
+ Tuple of (DSPy Prediction result, None)
119
+ """
120
+ logger.info(
121
+ f"Agent {agent.name}: Starting WebSocket-only streaming (dashboard mode)"
122
+ )
123
+
124
+ # Get WebSocket broadcast function (security: wrapper prevents object traversal)
125
+ # Phase 6+7 Security Fix: Use broadcast wrapper from Agent class variable (prevents GOD MODE restoration)
126
+ from flock.core import Agent
127
+
128
+ ws_broadcast = Agent._websocket_broadcast_global
129
+
130
+ if not ws_broadcast:
131
+ logger.warning(
132
+ f"Agent {agent.name}: No WebSocket manager, falling back to standard execution"
133
+ )
134
+ result = await self.execute_standard(
135
+ dspy_mod, program, description=description, payload=payload
136
+ )
137
+ return result, None
138
+
139
+ # Get artifact type name for WebSocket events
140
+ artifact_type_name = "output"
141
+ # Use output_group.outputs (current group) if available, otherwise fallback to agent.outputs (all groups)
142
+ outputs_to_display = (
143
+ output_group.outputs
144
+ if output_group and hasattr(output_group, "outputs")
145
+ else agent.outputs
146
+ if hasattr(agent, "outputs")
147
+ else []
148
+ )
149
+
150
+ if outputs_to_display:
151
+ artifact_type_name = outputs_to_display[0].spec.type_name
152
+
153
+ # Prepare stream listeners
154
+ listeners = []
155
+ try:
156
+ streaming_mod = getattr(dspy_mod, "streaming", None)
157
+ if streaming_mod and hasattr(streaming_mod, "StreamListener"):
158
+ for name, field in signature.output_fields.items():
159
+ if field.annotation is str:
160
+ listeners.append(
161
+ streaming_mod.StreamListener(signature_field_name=name)
162
+ )
163
+ except Exception:
164
+ listeners = []
165
+
166
+ # Create streaming task
167
+ streaming_task = dspy_mod.streamify(
168
+ program,
169
+ is_async_program=True,
170
+ stream_listeners=listeners if listeners else None,
171
+ )
172
+
173
+ # Execute with appropriate payload format
174
+ if isinstance(payload, dict) and "description" in payload:
175
+ # Semantic fields: pass all fields as kwargs
176
+ stream_generator = streaming_task(**payload)
177
+ elif isinstance(payload, dict) and "input" in payload:
178
+ # Legacy format: {"input": ..., "context": ...}
179
+ stream_generator = streaming_task(
180
+ description=description,
181
+ input=payload["input"],
182
+ context=payload.get("context", []),
183
+ )
184
+ else:
185
+ # Old format: direct payload
186
+ stream_generator = streaming_task(
187
+ description=description, input=payload, context=[]
188
+ )
189
+
190
+ # Process stream (WebSocket only, no Rich display)
191
+ final_result = None
192
+ stream_sequence = 0
193
+
194
+ # Track background WebSocket broadcast tasks to prevent garbage collection
195
+ # Using fire-and-forget pattern to avoid blocking DSPy's streaming loop
196
+ ws_broadcast_tasks: set[asyncio.Task] = set()
197
+
198
+ async for value in stream_generator:
199
+ try:
200
+ from dspy.streaming import StatusMessage, StreamResponse
201
+ from litellm import ModelResponseStream
202
+ except Exception:
203
+ StatusMessage = object # type: ignore
204
+ StreamResponse = object # type: ignore
205
+ ModelResponseStream = object # type: ignore
206
+
207
+ if isinstance(value, StatusMessage):
208
+ token = getattr(value, "message", "")
209
+ if token:
210
+ try:
211
+ event = StreamingOutputEvent(
212
+ correlation_id=str(ctx.correlation_id)
213
+ if ctx and ctx.correlation_id
214
+ else "",
215
+ agent_name=agent.name,
216
+ run_id=ctx.task_id if ctx else "",
217
+ output_type="log",
218
+ content=str(token + "\n"),
219
+ sequence=stream_sequence,
220
+ is_final=False,
221
+ artifact_id=str(pre_generated_artifact_id),
222
+ artifact_type=artifact_type_name,
223
+ )
224
+ # Fire-and-forget to avoid blocking DSPy's streaming loop
225
+ task = asyncio.create_task(ws_broadcast(event))
226
+ ws_broadcast_tasks.add(task)
227
+ task.add_done_callback(ws_broadcast_tasks.discard)
228
+ stream_sequence += 1
229
+ except Exception as e:
230
+ logger.warning(f"Failed to emit streaming event: {e}")
231
+
232
+ elif isinstance(value, StreamResponse):
233
+ token = getattr(value, "chunk", None)
234
+ if token:
235
+ try:
236
+ event = StreamingOutputEvent(
237
+ correlation_id=str(ctx.correlation_id)
238
+ if ctx and ctx.correlation_id
239
+ else "",
240
+ agent_name=agent.name,
241
+ run_id=ctx.task_id if ctx else "",
242
+ output_type="llm_token",
243
+ content=str(token),
244
+ sequence=stream_sequence,
245
+ is_final=False,
246
+ artifact_id=str(pre_generated_artifact_id),
247
+ artifact_type=artifact_type_name,
248
+ )
249
+ # Fire-and-forget to avoid blocking DSPy's streaming loop
250
+ task = asyncio.create_task(ws_broadcast(event))
251
+ ws_broadcast_tasks.add(task)
252
+ task.add_done_callback(ws_broadcast_tasks.discard)
253
+ stream_sequence += 1
254
+ except Exception as e:
255
+ logger.warning(f"Failed to emit streaming event: {e}")
256
+
257
+ elif isinstance(value, ModelResponseStream):
258
+ chunk = value
259
+ token = chunk.choices[0].delta.content or ""
260
+ if token:
261
+ try:
262
+ event = StreamingOutputEvent(
263
+ correlation_id=str(ctx.correlation_id)
264
+ if ctx and ctx.correlation_id
265
+ else "",
266
+ agent_name=agent.name,
267
+ run_id=ctx.task_id if ctx else "",
268
+ output_type="llm_token",
269
+ content=str(token),
270
+ sequence=stream_sequence,
271
+ is_final=False,
272
+ artifact_id=str(pre_generated_artifact_id),
273
+ artifact_type=artifact_type_name,
274
+ )
275
+ # Fire-and-forget to avoid blocking DSPy's streaming loop
276
+ task = asyncio.create_task(ws_broadcast(event))
277
+ ws_broadcast_tasks.add(task)
278
+ task.add_done_callback(ws_broadcast_tasks.discard)
279
+ stream_sequence += 1
280
+ except Exception as e:
281
+ logger.warning(f"Failed to emit streaming event: {e}")
282
+
283
+ elif isinstance(value, dspy_mod.Prediction):
284
+ final_result = value
285
+ # Send final events
286
+ try:
287
+ event = StreamingOutputEvent(
288
+ correlation_id=str(ctx.correlation_id)
289
+ if ctx and ctx.correlation_id
290
+ else "",
291
+ agent_name=agent.name,
292
+ run_id=ctx.task_id if ctx else "",
293
+ output_type="log",
294
+ content=f"\nAmount of output tokens: {stream_sequence}",
295
+ sequence=stream_sequence,
296
+ is_final=True,
297
+ artifact_id=str(pre_generated_artifact_id),
298
+ artifact_type=artifact_type_name,
299
+ )
300
+ # Fire-and-forget to avoid blocking DSPy's streaming loop
301
+ task = asyncio.create_task(ws_broadcast(event))
302
+ ws_broadcast_tasks.add(task)
303
+ task.add_done_callback(ws_broadcast_tasks.discard)
304
+
305
+ event = StreamingOutputEvent(
306
+ correlation_id=str(ctx.correlation_id)
307
+ if ctx and ctx.correlation_id
308
+ else "",
309
+ agent_name=agent.name,
310
+ run_id=ctx.task_id if ctx else "",
311
+ output_type="log",
312
+ content="--- End of output ---",
313
+ sequence=stream_sequence + 1,
314
+ is_final=True,
315
+ artifact_id=str(pre_generated_artifact_id),
316
+ artifact_type=artifact_type_name,
317
+ )
318
+ # Fire-and-forget to avoid blocking DSPy's streaming loop
319
+ task = asyncio.create_task(ws_broadcast(event))
320
+ ws_broadcast_tasks.add(task)
321
+ task.add_done_callback(ws_broadcast_tasks.discard)
322
+ except Exception as e:
323
+ logger.warning(f"Failed to emit final streaming event: {e}")
324
+
325
+ if final_result is None:
326
+ raise RuntimeError(
327
+ f"Agent {agent.name}: Streaming did not yield a final prediction"
328
+ )
329
+
330
+ logger.info(
331
+ f"Agent {agent.name}: WebSocket streaming completed ({stream_sequence} tokens)"
332
+ )
333
+ return final_result, None
334
+
335
+ async def execute_streaming(
336
+ self,
337
+ dspy_mod,
338
+ program,
339
+ signature,
340
+ *,
341
+ description: str,
342
+ payload: dict[str, Any],
343
+ agent: Any,
344
+ ctx: Any = None,
345
+ pre_generated_artifact_id: Any = None,
346
+ output_group=None,
347
+ ) -> Any:
348
+ """Execute DSPy program in streaming mode with Rich table updates.
349
+
350
+ Args:
351
+ dspy_mod: DSPy module
352
+ program: DSPy program (Predict or ReAct)
353
+ signature: DSPy Signature
354
+ description: System description
355
+ payload: Execution payload with semantic field names
356
+ agent: Agent instance
357
+ ctx: Execution context
358
+ pre_generated_artifact_id: Pre-generated artifact ID for streaming
359
+ output_group: OutputGroup defining expected outputs
360
+
361
+ Returns:
362
+ Tuple of (DSPy Prediction result, stream display data for final rendering)
363
+ """
364
+ from rich.console import Console
365
+ from rich.live import Live
366
+
367
+ console = Console()
368
+
369
+ # Get WebSocket broadcast function (security: wrapper prevents object traversal)
370
+ # Phase 6+7 Security Fix: Use broadcast wrapper from Agent class variable (prevents GOD MODE restoration)
371
+ from flock.core import Agent
372
+
373
+ ws_broadcast = Agent._websocket_broadcast_global
374
+
375
+ # Prepare stream listeners for output field
376
+ listeners = []
377
+ try:
378
+ streaming_mod = getattr(dspy_mod, "streaming", None)
379
+ if streaming_mod and hasattr(streaming_mod, "StreamListener"):
380
+ for name, field in signature.output_fields.items():
381
+ if field.annotation is str:
382
+ listeners.append(
383
+ streaming_mod.StreamListener(signature_field_name=name)
384
+ )
385
+ except Exception:
386
+ listeners = []
387
+
388
+ streaming_task = dspy_mod.streamify(
389
+ program,
390
+ is_async_program=True,
391
+ stream_listeners=listeners if listeners else None,
392
+ )
393
+
394
+ # Execute with appropriate payload format
395
+ if isinstance(payload, dict) and "description" in payload:
396
+ # Semantic fields: pass all fields as kwargs
397
+ stream_generator = streaming_task(**payload)
398
+ elif isinstance(payload, dict) and "input" in payload:
399
+ # Legacy format: {"input": ..., "context": ...}
400
+ stream_generator = streaming_task(
401
+ description=description,
402
+ input=payload["input"],
403
+ context=payload.get("context", []),
404
+ )
405
+ else:
406
+ # Old format: direct payload
407
+ stream_generator = streaming_task(
408
+ description=description, input=payload, context=[]
409
+ )
410
+
411
+ signature_order = []
412
+ status_field = self.status_output_field
413
+ try:
414
+ signature_order = list(signature.output_fields.keys())
415
+ except Exception:
416
+ signature_order = []
417
+
418
+ # Initialize display data in full artifact format (matching OutputUtilityComponent display)
419
+ display_data: OrderedDict[str, Any] = OrderedDict()
420
+
421
+ # Use the pre-generated artifact ID that was created before execution started
422
+ display_data["id"] = str(pre_generated_artifact_id)
423
+
424
+ # Get the artifact type name from agent configuration
425
+ artifact_type_name = "output"
426
+ # Use output_group.outputs (current group) if available, otherwise fallback to agent.outputs (all groups)
427
+ outputs_to_display = (
428
+ output_group.outputs
429
+ if output_group and hasattr(output_group, "outputs")
430
+ else agent.outputs
431
+ if hasattr(agent, "outputs")
432
+ else []
433
+ )
434
+
435
+ if outputs_to_display:
436
+ artifact_type_name = outputs_to_display[0].spec.type_name
437
+ for output in outputs_to_display:
438
+ if output.spec.type_name not in artifact_type_name:
439
+ artifact_type_name += ", " + output.spec.type_name
440
+
441
+ display_data["type"] = artifact_type_name
442
+ display_data["payload"] = OrderedDict()
443
+
444
+ # Add output fields to payload section
445
+ for field_name in signature_order:
446
+ if field_name != "description": # Skip description field
447
+ display_data["payload"][field_name] = ""
448
+
449
+ display_data["produced_by"] = agent.name
450
+ display_data["correlation_id"] = (
451
+ str(ctx.correlation_id) if ctx and ctx.correlation_id else None
452
+ )
453
+ display_data["partition_key"] = None
454
+ display_data["tags"] = "set()"
455
+ display_data["visibility"] = OrderedDict([("kind", "Public")])
456
+ display_data["created_at"] = "streaming..."
457
+ display_data["version"] = 1
458
+ display_data["status"] = status_field
459
+
460
+ stream_buffers: defaultdict[str, list[str]] = defaultdict(list)
461
+ stream_buffers[status_field] = []
462
+ stream_sequence = 0 # Monotonic sequence for ordering
463
+
464
+ # Track background WebSocket broadcast tasks to prevent garbage collection
465
+ ws_broadcast_tasks: set[asyncio.Task] = set()
466
+
467
+ formatter = theme_dict = styles = agent_label = None
468
+ live_cm = nullcontext()
469
+ overflow_mode = self.stream_vertical_overflow
470
+
471
+ if not self.no_output:
472
+ # Import the patch function here to ensure it's applied
473
+ from flock.engines.dspy_engine import _ensure_live_crop_above
474
+
475
+ _ensure_live_crop_above()
476
+ (
477
+ formatter,
478
+ theme_dict,
479
+ styles,
480
+ agent_label,
481
+ ) = self.prepare_stream_formatter(agent)
482
+ initial_panel = formatter.format_result(
483
+ display_data, agent_label, theme_dict, styles
484
+ )
485
+ live_cm = Live(
486
+ initial_panel,
487
+ console=console,
488
+ refresh_per_second=4,
489
+ transient=False,
490
+ vertical_overflow=overflow_mode,
491
+ )
492
+
493
+ final_result: Any = None
494
+
495
+ with live_cm as live:
496
+
497
+ def _refresh_panel() -> None:
498
+ if formatter is None or live is None:
499
+ return
500
+ live.update(
501
+ formatter.format_result(
502
+ display_data, agent_label, theme_dict, styles
503
+ )
504
+ )
505
+
506
+ async for value in stream_generator:
507
+ try:
508
+ from dspy.streaming import StatusMessage, StreamResponse
509
+ from litellm import ModelResponseStream
510
+ except Exception:
511
+ StatusMessage = object # type: ignore
512
+ StreamResponse = object # type: ignore
513
+ ModelResponseStream = object # type: ignore
514
+
515
+ if isinstance(value, StatusMessage):
516
+ token = getattr(value, "message", "")
517
+ if token:
518
+ stream_buffers[status_field].append(str(token) + "\n")
519
+ display_data["status"] = "".join(stream_buffers[status_field])
520
+
521
+ # Emit to WebSocket (non-blocking to prevent deadlock)
522
+ if ws_broadcast and token:
523
+ try:
524
+ event = StreamingOutputEvent(
525
+ correlation_id=str(ctx.correlation_id)
526
+ if ctx and ctx.correlation_id
527
+ else "",
528
+ agent_name=agent.name,
529
+ run_id=ctx.task_id if ctx else "",
530
+ output_type="llm_token",
531
+ content=str(token + "\n"),
532
+ sequence=stream_sequence,
533
+ is_final=False,
534
+ artifact_id=str(
535
+ pre_generated_artifact_id
536
+ ), # Phase 6: Track artifact for message streaming
537
+ artifact_type=artifact_type_name, # Phase 6: Artifact type name
538
+ )
539
+ # Use create_task to avoid blocking the streaming loop
540
+ task = asyncio.create_task(ws_broadcast(event))
541
+ ws_broadcast_tasks.add(task)
542
+ task.add_done_callback(ws_broadcast_tasks.discard)
543
+ stream_sequence += 1
544
+ except Exception as e:
545
+ logger.warning(f"Failed to emit streaming event: {e}")
546
+ else:
547
+ logger.debug(
548
+ "No WebSocket manager present for streaming event."
549
+ )
550
+
551
+ if formatter is not None:
552
+ _refresh_panel()
553
+ continue
554
+
555
+ if isinstance(value, StreamResponse):
556
+ token = getattr(value, "chunk", None)
557
+ signature_field = getattr(value, "signature_field_name", None)
558
+ if signature_field and signature_field != "description":
559
+ # Update payload section - accumulate in "output" buffer
560
+ buffer_key = f"_stream_{signature_field}"
561
+ if token:
562
+ stream_buffers[buffer_key].append(str(token))
563
+ # Show streaming text in payload
564
+ display_data["payload"]["_streaming"] = "".join(
565
+ stream_buffers[buffer_key]
566
+ )
567
+
568
+ # Emit to WebSocket (non-blocking to prevent deadlock)
569
+ if ws_broadcast:
570
+ logger.info(
571
+ f"[STREAMING] Emitting StreamResponse token='{token}', sequence={stream_sequence}"
572
+ )
573
+ try:
574
+ event = StreamingOutputEvent(
575
+ correlation_id=str(ctx.correlation_id)
576
+ if ctx and ctx.correlation_id
577
+ else "",
578
+ agent_name=agent.name,
579
+ run_id=ctx.task_id if ctx else "",
580
+ output_type="llm_token",
581
+ content=str(token),
582
+ sequence=stream_sequence,
583
+ is_final=False,
584
+ artifact_id=str(
585
+ pre_generated_artifact_id
586
+ ), # Phase 6: Track artifact for message streaming
587
+ artifact_type=artifact_type_name, # Phase 6: Artifact type name
588
+ )
589
+ # Use create_task to avoid blocking the streaming loop
590
+ task = asyncio.create_task(ws_broadcast(event))
591
+ ws_broadcast_tasks.add(task)
592
+ task.add_done_callback(ws_broadcast_tasks.discard)
593
+ stream_sequence += 1
594
+ except Exception as e:
595
+ logger.warning(
596
+ f"Failed to emit streaming event: {e}"
597
+ )
598
+
599
+ if formatter is not None:
600
+ _refresh_panel()
601
+ continue
602
+
603
+ if isinstance(value, ModelResponseStream):
604
+ chunk = value
605
+ token = chunk.choices[0].delta.content or ""
606
+ signature_field = getattr(value, "signature_field_name", None)
607
+
608
+ if signature_field and signature_field != "description":
609
+ # Update payload section - accumulate in buffer
610
+ buffer_key = f"_stream_{signature_field}"
611
+ if token:
612
+ stream_buffers[buffer_key].append(str(token))
613
+ # Show streaming text in payload
614
+ display_data["payload"]["_streaming"] = "".join(
615
+ stream_buffers[buffer_key]
616
+ )
617
+ elif token:
618
+ stream_buffers[status_field].append(str(token))
619
+ display_data["status"] = "".join(stream_buffers[status_field])
620
+
621
+ # Emit to WebSocket (non-blocking to prevent deadlock)
622
+ if ws_broadcast and token:
623
+ try:
624
+ event = StreamingOutputEvent(
625
+ correlation_id=str(ctx.correlation_id)
626
+ if ctx and ctx.correlation_id
627
+ else "",
628
+ agent_name=agent.name,
629
+ run_id=ctx.task_id if ctx else "",
630
+ output_type="llm_token",
631
+ content=str(token),
632
+ sequence=stream_sequence,
633
+ is_final=False,
634
+ artifact_id=str(
635
+ pre_generated_artifact_id
636
+ ), # Phase 6: Track artifact for message streaming
637
+ artifact_type=display_data[
638
+ "type"
639
+ ], # Phase 6: Artifact type name from display_data
640
+ )
641
+ # Use create_task to avoid blocking the streaming loop
642
+ task = asyncio.create_task(ws_broadcast(event))
643
+ ws_broadcast_tasks.add(task)
644
+ task.add_done_callback(ws_broadcast_tasks.discard)
645
+ stream_sequence += 1
646
+ except Exception as e:
647
+ logger.warning(f"Failed to emit streaming event: {e}")
648
+
649
+ if formatter is not None:
650
+ _refresh_panel()
651
+ continue
652
+
653
+ if isinstance(value, dspy_mod.Prediction):
654
+ final_result = value
655
+
656
+ # Emit final streaming event (non-blocking to prevent deadlock)
657
+ if ws_broadcast:
658
+ try:
659
+ event = StreamingOutputEvent(
660
+ correlation_id=str(ctx.correlation_id)
661
+ if ctx and ctx.correlation_id
662
+ else "",
663
+ agent_name=agent.name,
664
+ run_id=ctx.task_id if ctx else "",
665
+ output_type="log",
666
+ content="\nAmount of output tokens: "
667
+ + str(stream_sequence),
668
+ sequence=stream_sequence,
669
+ is_final=True, # Mark as final
670
+ artifact_id=str(
671
+ pre_generated_artifact_id
672
+ ), # Phase 6: Track artifact for message streaming
673
+ artifact_type=display_data[
674
+ "type"
675
+ ], # Phase 6: Artifact type name
676
+ )
677
+ # Use create_task to avoid blocking the streaming loop
678
+ task = asyncio.create_task(ws_broadcast(event))
679
+ ws_broadcast_tasks.add(task)
680
+ task.add_done_callback(ws_broadcast_tasks.discard)
681
+ event = StreamingOutputEvent(
682
+ correlation_id=str(ctx.correlation_id)
683
+ if ctx and ctx.correlation_id
684
+ else "",
685
+ agent_name=agent.name,
686
+ run_id=ctx.task_id if ctx else "",
687
+ output_type="log",
688
+ content="--- End of output ---",
689
+ sequence=stream_sequence,
690
+ is_final=True, # Mark as final
691
+ artifact_id=str(
692
+ pre_generated_artifact_id
693
+ ), # Phase 6: Track artifact for message streaming
694
+ artifact_type=display_data[
695
+ "type"
696
+ ], # Phase 6: Artifact type name
697
+ )
698
+ # Use create_task to avoid blocking the streaming loop
699
+ task = asyncio.create_task(ws_broadcast(event))
700
+ ws_broadcast_tasks.add(task)
701
+ task.add_done_callback(ws_broadcast_tasks.discard)
702
+ except Exception as e:
703
+ logger.warning(f"Failed to emit final streaming event: {e}")
704
+
705
+ if formatter is not None:
706
+ # Update payload section with final values
707
+ payload_data = OrderedDict()
708
+ for field_name in signature_order:
709
+ if field_name != "description" and hasattr(
710
+ final_result, field_name
711
+ ):
712
+ field_value = getattr(final_result, field_name)
713
+
714
+ # Convert BaseModel instances to dicts for proper table rendering
715
+ if isinstance(field_value, list):
716
+ # Handle lists of BaseModel instances (fan-out/batch)
717
+ payload_data[field_name] = [
718
+ item.model_dump()
719
+ if isinstance(item, BaseModel)
720
+ else item
721
+ for item in field_value
722
+ ]
723
+ elif isinstance(field_value, BaseModel):
724
+ # Handle single BaseModel instance
725
+ payload_data[field_name] = field_value.model_dump()
726
+ else:
727
+ # Handle primitive types
728
+ payload_data[field_name] = field_value
729
+
730
+ # Update all fields with actual values
731
+ display_data["payload"].clear()
732
+ display_data["payload"].update(payload_data)
733
+
734
+ # Update timestamp
735
+ from datetime import datetime
736
+
737
+ display_data["created_at"] = datetime.now(UTC).isoformat()
738
+
739
+ # Remove status field from display
740
+ display_data.pop("status", None)
741
+ _refresh_panel()
742
+
743
+ if final_result is None:
744
+ raise RuntimeError("Streaming did not yield a final prediction.")
745
+
746
+ # Return both the result and the display data for final ID update
747
+ return final_result, (formatter, display_data, theme_dict, styles, agent_label)
748
+
749
+ def prepare_stream_formatter(
750
+ self, agent: Any
751
+ ) -> tuple[Any, dict[str, Any], dict[str, Any], str]:
752
+ """Build formatter + theme metadata for streaming tables.
753
+
754
+ Args:
755
+ agent: Agent instance
756
+
757
+ Returns:
758
+ Tuple of (formatter, theme_dict, styles, agent_label)
759
+ """
760
+ import pathlib
761
+
762
+ # Import model from local context since we're in a separate module
763
+ from flock.engines.dspy_engine import DSPyEngine
764
+ from flock.logging.formatters.themed_formatter import (
765
+ ThemedAgentResultFormatter,
766
+ create_pygments_syntax_theme,
767
+ get_default_styles,
768
+ load_syntax_theme_from_file,
769
+ load_theme_from_file,
770
+ )
771
+
772
+ # Get themes directory relative to engine module
773
+ themes_dir = (
774
+ pathlib.Path(DSPyEngine.__module__.replace(".", "/")).parent.parent
775
+ / "themes"
776
+ )
777
+ # Fallback: use __file__ if module path doesn't work
778
+ if not themes_dir.exists():
779
+ import flock.engines.dspy_engine as engine_mod
780
+
781
+ themes_dir = (
782
+ pathlib.Path(engine_mod.__file__).resolve().parents[1] / "themes"
783
+ )
784
+
785
+ theme_filename = self.theme
786
+ if not theme_filename.endswith(".toml"):
787
+ theme_filename = f"{theme_filename}.toml"
788
+ theme_path = themes_dir / theme_filename
789
+
790
+ try:
791
+ theme_dict = load_theme_from_file(theme_path)
792
+ except Exception:
793
+ fallback_path = themes_dir / "afterglow.toml"
794
+ theme_dict = load_theme_from_file(fallback_path)
795
+ theme_path = fallback_path
796
+
797
+ from flock.logging.formatters.themes import OutputTheme
798
+
799
+ formatter = ThemedAgentResultFormatter(theme=OutputTheme.afterglow)
800
+ styles = get_default_styles(theme_dict)
801
+ formatter.styles = styles
802
+
803
+ try:
804
+ syntax_theme = load_syntax_theme_from_file(theme_path)
805
+ formatter.syntax_style = create_pygments_syntax_theme(syntax_theme)
806
+ except Exception:
807
+ formatter.syntax_style = None
808
+
809
+ # Get model label from agent if available
810
+ model_label = getattr(agent, "engine", None)
811
+ if model_label and hasattr(model_label, "model"):
812
+ model_label = model_label.model or ""
813
+ else:
814
+ model_label = ""
815
+
816
+ agent_label = agent.name if not model_label else f"{agent.name} - {model_label}"
817
+
818
+ return formatter, theme_dict, styles, agent_label
819
+
820
+ def print_final_stream_display(
821
+ self,
822
+ stream_display_data: tuple[Any, OrderedDict, dict, dict, str],
823
+ artifact_id: str,
824
+ artifact,
825
+ ) -> None:
826
+ """Print the final streaming display with the real artifact ID.
827
+
828
+ Args:
829
+ stream_display_data: Tuple of (formatter, display_data, theme_dict, styles, agent_label)
830
+ artifact_id: Final artifact ID
831
+ artifact: Artifact instance with metadata
832
+ """
833
+ from rich.console import Console
834
+
835
+ formatter, display_data, theme_dict, styles, agent_label = stream_display_data
836
+
837
+ # Update display_data with the real artifact information
838
+ display_data["id"] = artifact_id
839
+ display_data["created_at"] = artifact.created_at.isoformat()
840
+
841
+ # Update all artifact metadata
842
+ display_data["correlation_id"] = (
843
+ str(artifact.correlation_id) if artifact.correlation_id else None
844
+ )
845
+ display_data["partition_key"] = artifact.partition_key
846
+ display_data["tags"] = (
847
+ "set()" if not artifact.tags else f"set({list(artifact.tags)})"
848
+ )
849
+
850
+ # Print the final panel
851
+ console = Console()
852
+ final_panel = formatter.format_result(
853
+ display_data, agent_label, theme_dict, styles
854
+ )
855
+ console.print(final_panel)
856
+
857
+
858
+ __all__ = ["DSPyStreamingExecutor"]