flock-core 0.5.8__py3-none-any.whl → 0.5.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of flock-core might be problematic. Click here for more details.
- flock/agent.py +149 -62
- flock/api/themes.py +6 -2
- flock/artifact_collector.py +6 -3
- flock/batch_accumulator.py +3 -1
- flock/cli.py +3 -1
- flock/components.py +45 -56
- flock/context_provider.py +531 -0
- flock/correlation_engine.py +8 -4
- flock/dashboard/collector.py +48 -29
- flock/dashboard/events.py +10 -4
- flock/dashboard/launcher.py +3 -1
- flock/dashboard/models/graph.py +9 -3
- flock/dashboard/service.py +143 -72
- flock/dashboard/websocket.py +17 -4
- flock/engines/dspy_engine.py +174 -98
- flock/engines/examples/simple_batch_engine.py +9 -3
- flock/examples.py +6 -2
- flock/frontend/src/services/indexeddb.test.ts +4 -4
- flock/frontend/src/services/indexeddb.ts +1 -1
- flock/helper/cli_helper.py +14 -1
- flock/logging/auto_trace.py +6 -1
- flock/logging/formatters/enum_builder.py +3 -1
- flock/logging/formatters/theme_builder.py +32 -17
- flock/logging/formatters/themed_formatter.py +38 -22
- flock/logging/logging.py +21 -7
- flock/logging/telemetry.py +9 -3
- flock/logging/telemetry_exporter/duckdb_exporter.py +27 -25
- flock/logging/trace_and_logged.py +14 -5
- flock/mcp/__init__.py +3 -6
- flock/mcp/client.py +49 -19
- flock/mcp/config.py +12 -6
- flock/mcp/manager.py +6 -2
- flock/mcp/servers/sse/flock_sse_server.py +9 -3
- flock/mcp/servers/streamable_http/flock_streamable_http_server.py +6 -2
- flock/mcp/tool.py +18 -6
- flock/mcp/types/handlers.py +3 -1
- flock/mcp/types/types.py +9 -3
- flock/orchestrator.py +204 -50
- flock/orchestrator_component.py +15 -5
- flock/patches/dspy_streaming_patch.py +12 -4
- flock/registry.py +9 -3
- flock/runtime.py +69 -18
- flock/service.py +19 -6
- flock/store.py +29 -10
- flock/subscription.py +6 -4
- flock/utilities.py +41 -13
- flock/utility/output_utility_component.py +31 -11
- {flock_core-0.5.8.dist-info → flock_core-0.5.10.dist-info}/METADATA +134 -4
- {flock_core-0.5.8.dist-info → flock_core-0.5.10.dist-info}/RECORD +52 -51
- {flock_core-0.5.8.dist-info → flock_core-0.5.10.dist-info}/WHEEL +0 -0
- {flock_core-0.5.8.dist-info → flock_core-0.5.10.dist-info}/entry_points.txt +0 -0
- {flock_core-0.5.8.dist-info → flock_core-0.5.10.dist-info}/licenses/LICENSE +0 -0
flock/dashboard/websocket.py
CHANGED
|
@@ -72,7 +72,11 @@ class WebSocketManager:
|
|
|
72
72
|
logger.info(f"WebSocket client added. Total clients: {len(self.clients)}")
|
|
73
73
|
|
|
74
74
|
# Start heartbeat task if enabled and not already running
|
|
75
|
-
if
|
|
75
|
+
if (
|
|
76
|
+
self.enable_heartbeat
|
|
77
|
+
and self._heartbeat_task is None
|
|
78
|
+
and not self._shutdown
|
|
79
|
+
):
|
|
76
80
|
self._heartbeat_task = asyncio.create_task(self._heartbeat_loop())
|
|
77
81
|
|
|
78
82
|
async def remove_client(self, websocket: WebSocket) -> None:
|
|
@@ -167,7 +171,9 @@ class WebSocketManager:
|
|
|
167
171
|
|
|
168
172
|
# Send ping to all clients
|
|
169
173
|
ping_tasks = []
|
|
170
|
-
for client in list(
|
|
174
|
+
for client in list(
|
|
175
|
+
self.clients
|
|
176
|
+
): # Copy to avoid modification during iteration
|
|
171
177
|
ping_tasks.append(self._ping_client(client))
|
|
172
178
|
|
|
173
179
|
# Execute pings concurrently
|
|
@@ -186,7 +192,10 @@ class WebSocketManager:
|
|
|
186
192
|
client: WebSocket client to ping
|
|
187
193
|
"""
|
|
188
194
|
try:
|
|
189
|
-
await client.send_json({
|
|
195
|
+
await client.send_json({
|
|
196
|
+
"type": "ping",
|
|
197
|
+
"timestamp": asyncio.get_event_loop().time(),
|
|
198
|
+
})
|
|
190
199
|
except Exception as e:
|
|
191
200
|
logger.warning(f"Failed to ping client: {e}")
|
|
192
201
|
await self.remove_client(client)
|
|
@@ -197,7 +206,11 @@ class WebSocketManager:
|
|
|
197
206
|
In production, heartbeat is disabled by default (enable_heartbeat=False).
|
|
198
207
|
Only starts if enable_heartbeat=True.
|
|
199
208
|
"""
|
|
200
|
-
if
|
|
209
|
+
if (
|
|
210
|
+
self.enable_heartbeat
|
|
211
|
+
and self._heartbeat_task is None
|
|
212
|
+
and not self._shutdown
|
|
213
|
+
):
|
|
201
214
|
self._heartbeat_task = asyncio.create_task(self._heartbeat_loop())
|
|
202
215
|
|
|
203
216
|
async def shutdown(self) -> None:
|
flock/engines/dspy_engine.py
CHANGED
|
@@ -62,7 +62,9 @@ def _ensure_live_crop_above() -> None:
|
|
|
62
62
|
# Extend the accepted literal at runtime so type checks don't block the new option.
|
|
63
63
|
current_args = getattr(_lr.VerticalOverflowMethod, "__args__", ())
|
|
64
64
|
if "crop_above" not in current_args:
|
|
65
|
-
_lr.VerticalOverflowMethod = _Literal[
|
|
65
|
+
_lr.VerticalOverflowMethod = _Literal[
|
|
66
|
+
"crop", "crop_above", "ellipsis", "visible"
|
|
67
|
+
] # type: ignore[assignment]
|
|
66
68
|
|
|
67
69
|
if getattr(_lr.LiveRender.__rich_console__, "_flock_crop_above", False):
|
|
68
70
|
_live_patch_applied = True
|
|
@@ -134,11 +136,13 @@ class DSPyEngine(EngineComponent):
|
|
|
134
136
|
default=False,
|
|
135
137
|
description="Disable output from the underlying DSPy program.",
|
|
136
138
|
)
|
|
137
|
-
stream_vertical_overflow: Literal["crop", "ellipsis", "crop_above", "visible"] =
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
139
|
+
stream_vertical_overflow: Literal["crop", "ellipsis", "crop_above", "visible"] = (
|
|
140
|
+
Field(
|
|
141
|
+
default="crop_above",
|
|
142
|
+
description=(
|
|
143
|
+
"Rich Live vertical overflow strategy; select how tall output is handled; 'crop_above' keeps the most recent rows visible."
|
|
144
|
+
),
|
|
145
|
+
)
|
|
142
146
|
)
|
|
143
147
|
status_output_field: str = Field(
|
|
144
148
|
default="_status_output",
|
|
@@ -153,7 +157,9 @@ class DSPyEngine(EngineComponent):
|
|
|
153
157
|
description="Enable caching of DSPy program results",
|
|
154
158
|
)
|
|
155
159
|
|
|
156
|
-
async def evaluate(
|
|
160
|
+
async def evaluate(
|
|
161
|
+
self, agent, ctx, inputs: EvalInputs, output_group
|
|
162
|
+
) -> EvalResult: # type: ignore[override]
|
|
157
163
|
"""Universal evaluation with auto-detection of batch and fan-out modes.
|
|
158
164
|
|
|
159
165
|
This single method handles ALL evaluation scenarios by auto-detecting:
|
|
@@ -221,11 +227,18 @@ class DSPyEngine(EngineComponent):
|
|
|
221
227
|
for artifact in inputs.artifacts
|
|
222
228
|
]
|
|
223
229
|
else:
|
|
224
|
-
validated_input = self._validate_input_payload(
|
|
230
|
+
validated_input = self._validate_input_payload(
|
|
231
|
+
input_model, primary_artifact.payload
|
|
232
|
+
)
|
|
225
233
|
output_model = self._resolve_output_model(agent)
|
|
226
234
|
|
|
227
|
-
#
|
|
228
|
-
|
|
235
|
+
# Phase 8: Use pre-filtered conversation context from Context (security fix)
|
|
236
|
+
# Orchestrator evaluates context BEFORE creating Context, so engines just read ctx.artifacts
|
|
237
|
+
# This fixes Vulnerability #4: Engines can no longer query arbitrary data via ctx.store
|
|
238
|
+
|
|
239
|
+
# Filter out input artifacts to avoid duplication in context
|
|
240
|
+
context_history = ctx.artifacts if ctx else []
|
|
241
|
+
|
|
229
242
|
has_context = bool(context_history) and self.should_use_context(inputs)
|
|
230
243
|
|
|
231
244
|
# Generate signature with semantic field naming
|
|
@@ -280,27 +293,37 @@ class DSPyEngine(EngineComponent):
|
|
|
280
293
|
|
|
281
294
|
# Detect if there's already an active Rich Live context
|
|
282
295
|
should_stream = self.stream
|
|
283
|
-
|
|
284
|
-
if
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
if
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
296
|
+
# Phase 6+7 Security Fix: Use Agent class variables for streaming coordination
|
|
297
|
+
if ctx:
|
|
298
|
+
from flock.agent import Agent
|
|
299
|
+
|
|
300
|
+
# Check if dashboard mode (WebSocket broadcast is set)
|
|
301
|
+
is_dashboard = Agent._websocket_broadcast_global is not None
|
|
302
|
+
# if dashboard we always stream, streaming queue only for CLI output
|
|
303
|
+
if should_stream and not is_dashboard:
|
|
304
|
+
# Get current active streams count from Agent class variable (shared across all agents)
|
|
305
|
+
active_streams = Agent._streaming_counter
|
|
306
|
+
|
|
307
|
+
if active_streams > 0:
|
|
308
|
+
should_stream = False # Suppress - another agent streaming
|
|
293
309
|
else:
|
|
294
|
-
|
|
310
|
+
Agent._streaming_counter = (
|
|
311
|
+
active_streams + 1
|
|
312
|
+
) # Mark as streaming
|
|
295
313
|
|
|
296
314
|
try:
|
|
297
315
|
if should_stream:
|
|
298
316
|
# Choose streaming method based on dashboard mode
|
|
299
|
-
|
|
317
|
+
# Phase 6+7 Security Fix: Check dashboard mode via Agent class variable
|
|
318
|
+
from flock.agent import Agent
|
|
319
|
+
|
|
320
|
+
is_dashboard = (
|
|
321
|
+
Agent._websocket_broadcast_global is not None if ctx else False
|
|
322
|
+
)
|
|
300
323
|
|
|
301
324
|
# DEBUG: Log routing decision
|
|
302
325
|
logger.info(
|
|
303
|
-
f"[STREAMING ROUTER] agent={agent.name}, is_dashboard={is_dashboard}
|
|
326
|
+
f"[STREAMING ROUTER] agent={agent.name}, is_dashboard={is_dashboard}"
|
|
304
327
|
)
|
|
305
328
|
|
|
306
329
|
if is_dashboard:
|
|
@@ -345,22 +368,23 @@ class DSPyEngine(EngineComponent):
|
|
|
345
368
|
if not self.no_output and ctx:
|
|
346
369
|
ctx.state["_flock_stream_live_active"] = True
|
|
347
370
|
else:
|
|
348
|
-
orchestrator = getattr(ctx, "orchestrator", None) if ctx else None
|
|
349
|
-
|
|
350
371
|
raw_result = await self._execute_standard(
|
|
351
372
|
dspy_mod,
|
|
352
373
|
program,
|
|
353
374
|
description=sys_desc,
|
|
354
375
|
payload=execution_payload,
|
|
355
376
|
)
|
|
356
|
-
|
|
377
|
+
# Phase 6+7 Security Fix: Check streaming state from Agent class variable
|
|
378
|
+
from flock.agent import Agent
|
|
379
|
+
|
|
380
|
+
if ctx and Agent._streaming_counter > 0:
|
|
357
381
|
ctx.state["_flock_output_queued"] = True
|
|
358
382
|
finally:
|
|
383
|
+
# Phase 6+7 Security Fix: Decrement counter using Agent class variable
|
|
359
384
|
if should_stream and ctx:
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
orchestrator._active_streams = max(0, orchestrator._active_streams - 1)
|
|
385
|
+
from flock.agent import Agent
|
|
386
|
+
|
|
387
|
+
Agent._streaming_counter = max(0, Agent._streaming_counter - 1)
|
|
364
388
|
|
|
365
389
|
# Extract semantic fields from Prediction
|
|
366
390
|
normalized_output = self._extract_multi_output_payload(raw_result, output_group)
|
|
@@ -405,7 +429,9 @@ class DSPyEngine(EngineComponent):
|
|
|
405
429
|
try:
|
|
406
430
|
import dspy
|
|
407
431
|
except Exception as exc:
|
|
408
|
-
raise NotImplementedError(
|
|
432
|
+
raise NotImplementedError(
|
|
433
|
+
"DSPy is not installed or failed to import."
|
|
434
|
+
) from exc
|
|
409
435
|
return dspy
|
|
410
436
|
|
|
411
437
|
def _select_primary_artifact(self, artifacts: Sequence[Artifact]) -> Artifact:
|
|
@@ -474,7 +500,11 @@ class DSPyEngine(EngineComponent):
|
|
|
474
500
|
Pluralized field name
|
|
475
501
|
"""
|
|
476
502
|
# Simple English pluralization rules
|
|
477
|
-
if
|
|
503
|
+
if (
|
|
504
|
+
field_name.endswith("y")
|
|
505
|
+
and len(field_name) > 1
|
|
506
|
+
and field_name[-2] not in "aeiou"
|
|
507
|
+
):
|
|
478
508
|
# story → stories (consonant + y)
|
|
479
509
|
return field_name[:-1] + "ies"
|
|
480
510
|
if field_name.endswith(("s", "x", "z", "ch", "sh")):
|
|
@@ -492,7 +522,11 @@ class DSPyEngine(EngineComponent):
|
|
|
492
522
|
Returns:
|
|
493
523
|
True if multi-output signature needed, False for single output (backward compat)
|
|
494
524
|
"""
|
|
495
|
-
if
|
|
525
|
+
if (
|
|
526
|
+
not output_group
|
|
527
|
+
or not hasattr(output_group, "outputs")
|
|
528
|
+
or not output_group.outputs
|
|
529
|
+
):
|
|
496
530
|
return False
|
|
497
531
|
|
|
498
532
|
# Multiple different types → multi-output
|
|
@@ -542,9 +576,13 @@ class DSPyEngine(EngineComponent):
|
|
|
542
576
|
|
|
543
577
|
signature = dspy_mod.Signature(fields)
|
|
544
578
|
|
|
545
|
-
instruction =
|
|
579
|
+
instruction = (
|
|
580
|
+
description or "Produce a valid output that matches the 'output' schema."
|
|
581
|
+
)
|
|
546
582
|
if has_context:
|
|
547
|
-
instruction +=
|
|
583
|
+
instruction += (
|
|
584
|
+
" Consider the conversation context provided to inform your response."
|
|
585
|
+
)
|
|
548
586
|
if batched:
|
|
549
587
|
instruction += (
|
|
550
588
|
" The 'input' field will contain a list of items representing the batch; "
|
|
@@ -694,26 +732,25 @@ class DSPyEngine(EngineComponent):
|
|
|
694
732
|
# 4. Build instruction
|
|
695
733
|
description = self.instructions or agent.description
|
|
696
734
|
instruction = (
|
|
697
|
-
description
|
|
735
|
+
description
|
|
736
|
+
or f"Process input and generate {len(output_group.outputs)} outputs."
|
|
698
737
|
)
|
|
699
738
|
|
|
700
739
|
if has_context:
|
|
701
|
-
instruction +=
|
|
740
|
+
instruction += (
|
|
741
|
+
" Consider the conversation context provided to inform your response."
|
|
742
|
+
)
|
|
702
743
|
|
|
703
744
|
# Add batching hint
|
|
704
745
|
if batched:
|
|
705
|
-
instruction +=
|
|
706
|
-
" Process the batch of inputs coherently, generating outputs for each item."
|
|
707
|
-
)
|
|
746
|
+
instruction += " Process the batch of inputs coherently, generating outputs for each item."
|
|
708
747
|
|
|
709
748
|
# Add semantic field names to instruction for clarity
|
|
710
749
|
output_field_names = [
|
|
711
750
|
name for name in fields.keys() if name not in {"description", "context"}
|
|
712
751
|
]
|
|
713
752
|
if len(output_field_names) > 2: # Multiple outputs
|
|
714
|
-
instruction += (
|
|
715
|
-
f" Generate ALL output fields as specified: {', '.join(output_field_names[1:])}."
|
|
716
|
-
)
|
|
753
|
+
instruction += f" Generate ALL output fields as specified: {', '.join(output_field_names[1:])}."
|
|
717
754
|
|
|
718
755
|
# instruction += " Return only valid JSON."
|
|
719
756
|
|
|
@@ -784,7 +821,9 @@ class DSPyEngine(EngineComponent):
|
|
|
784
821
|
else:
|
|
785
822
|
# Single mode: use first (or only) artifact
|
|
786
823
|
# For multi-input joins, we have one artifact per type
|
|
787
|
-
payload[field_name] =
|
|
824
|
+
payload[field_name] = (
|
|
825
|
+
validated_payloads[0] if validated_payloads else {}
|
|
826
|
+
)
|
|
788
827
|
|
|
789
828
|
return payload
|
|
790
829
|
|
|
@@ -840,7 +879,9 @@ class DSPyEngine(EngineComponent):
|
|
|
840
879
|
tools_list = list(tools or [])
|
|
841
880
|
try:
|
|
842
881
|
if tools_list:
|
|
843
|
-
return dspy_mod.ReAct(
|
|
882
|
+
return dspy_mod.ReAct(
|
|
883
|
+
signature, tools=tools_list, max_iters=self.max_tool_calls
|
|
884
|
+
)
|
|
844
885
|
return dspy_mod.Predict(signature)
|
|
845
886
|
except Exception:
|
|
846
887
|
return dspy_mod.Predict(signature)
|
|
@@ -923,7 +964,9 @@ class DSPyEngine(EngineComponent):
|
|
|
923
964
|
errors: list[str] = []
|
|
924
965
|
for output in outputs or []:
|
|
925
966
|
model_cls = output.spec.model
|
|
926
|
-
data = self._select_output_payload(
|
|
967
|
+
data = self._select_output_payload(
|
|
968
|
+
payload, model_cls, output.spec.type_name
|
|
969
|
+
)
|
|
927
970
|
|
|
928
971
|
# FAN-OUT: If count > 1, data should be a list and we create multiple artifacts
|
|
929
972
|
if output.count > 1:
|
|
@@ -1050,18 +1093,17 @@ class DSPyEngine(EngineComponent):
|
|
|
1050
1093
|
This method eliminates the Rich Live context that can cause deadlocks when
|
|
1051
1094
|
combined with MCP tool execution and parallel agent streaming.
|
|
1052
1095
|
"""
|
|
1053
|
-
logger.info(
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
1061
|
-
|
|
1062
|
-
|
|
1063
|
-
|
|
1064
|
-
if not ws_manager:
|
|
1096
|
+
logger.info(
|
|
1097
|
+
f"Agent {agent.name}: Starting WebSocket-only streaming (dashboard mode)"
|
|
1098
|
+
)
|
|
1099
|
+
|
|
1100
|
+
# Get WebSocket broadcast function (security: wrapper prevents object traversal)
|
|
1101
|
+
# Phase 6+7 Security Fix: Use broadcast wrapper from Agent class variable (prevents GOD MODE restoration)
|
|
1102
|
+
from flock.agent import Agent
|
|
1103
|
+
|
|
1104
|
+
ws_broadcast = Agent._websocket_broadcast_global
|
|
1105
|
+
|
|
1106
|
+
if not ws_broadcast:
|
|
1065
1107
|
logger.warning(
|
|
1066
1108
|
f"Agent {agent.name}: No WebSocket manager, falling back to standard execution"
|
|
1067
1109
|
)
|
|
@@ -1091,7 +1133,9 @@ class DSPyEngine(EngineComponent):
|
|
|
1091
1133
|
if streaming_mod and hasattr(streaming_mod, "StreamListener"):
|
|
1092
1134
|
for name, field in signature.output_fields.items():
|
|
1093
1135
|
if field.annotation is str:
|
|
1094
|
-
listeners.append(
|
|
1136
|
+
listeners.append(
|
|
1137
|
+
streaming_mod.StreamListener(signature_field_name=name)
|
|
1138
|
+
)
|
|
1095
1139
|
except Exception:
|
|
1096
1140
|
listeners = []
|
|
1097
1141
|
|
|
@@ -1115,7 +1159,9 @@ class DSPyEngine(EngineComponent):
|
|
|
1115
1159
|
)
|
|
1116
1160
|
else:
|
|
1117
1161
|
# Old format: direct payload
|
|
1118
|
-
stream_generator = streaming_task(
|
|
1162
|
+
stream_generator = streaming_task(
|
|
1163
|
+
description=description, input=payload, context=[]
|
|
1164
|
+
)
|
|
1119
1165
|
|
|
1120
1166
|
# Process stream (WebSocket only, no Rich display)
|
|
1121
1167
|
final_result = None
|
|
@@ -1152,7 +1198,7 @@ class DSPyEngine(EngineComponent):
|
|
|
1152
1198
|
artifact_type=artifact_type_name,
|
|
1153
1199
|
)
|
|
1154
1200
|
# Fire-and-forget to avoid blocking DSPy's streaming loop
|
|
1155
|
-
task = asyncio.create_task(
|
|
1201
|
+
task = asyncio.create_task(ws_broadcast(event))
|
|
1156
1202
|
ws_broadcast_tasks.add(task)
|
|
1157
1203
|
task.add_done_callback(ws_broadcast_tasks.discard)
|
|
1158
1204
|
stream_sequence += 1
|
|
@@ -1177,7 +1223,7 @@ class DSPyEngine(EngineComponent):
|
|
|
1177
1223
|
artifact_type=artifact_type_name,
|
|
1178
1224
|
)
|
|
1179
1225
|
# Fire-and-forget to avoid blocking DSPy's streaming loop
|
|
1180
|
-
task = asyncio.create_task(
|
|
1226
|
+
task = asyncio.create_task(ws_broadcast(event))
|
|
1181
1227
|
ws_broadcast_tasks.add(task)
|
|
1182
1228
|
task.add_done_callback(ws_broadcast_tasks.discard)
|
|
1183
1229
|
stream_sequence += 1
|
|
@@ -1203,7 +1249,7 @@ class DSPyEngine(EngineComponent):
|
|
|
1203
1249
|
artifact_type=artifact_type_name,
|
|
1204
1250
|
)
|
|
1205
1251
|
# Fire-and-forget to avoid blocking DSPy's streaming loop
|
|
1206
|
-
task = asyncio.create_task(
|
|
1252
|
+
task = asyncio.create_task(ws_broadcast(event))
|
|
1207
1253
|
ws_broadcast_tasks.add(task)
|
|
1208
1254
|
task.add_done_callback(ws_broadcast_tasks.discard)
|
|
1209
1255
|
stream_sequence += 1
|
|
@@ -1228,7 +1274,7 @@ class DSPyEngine(EngineComponent):
|
|
|
1228
1274
|
artifact_type=artifact_type_name,
|
|
1229
1275
|
)
|
|
1230
1276
|
# Fire-and-forget to avoid blocking DSPy's streaming loop
|
|
1231
|
-
task = asyncio.create_task(
|
|
1277
|
+
task = asyncio.create_task(ws_broadcast(event))
|
|
1232
1278
|
ws_broadcast_tasks.add(task)
|
|
1233
1279
|
task.add_done_callback(ws_broadcast_tasks.discard)
|
|
1234
1280
|
|
|
@@ -1246,16 +1292,20 @@ class DSPyEngine(EngineComponent):
|
|
|
1246
1292
|
artifact_type=artifact_type_name,
|
|
1247
1293
|
)
|
|
1248
1294
|
# Fire-and-forget to avoid blocking DSPy's streaming loop
|
|
1249
|
-
task = asyncio.create_task(
|
|
1295
|
+
task = asyncio.create_task(ws_broadcast(event))
|
|
1250
1296
|
ws_broadcast_tasks.add(task)
|
|
1251
1297
|
task.add_done_callback(ws_broadcast_tasks.discard)
|
|
1252
1298
|
except Exception as e:
|
|
1253
1299
|
logger.warning(f"Failed to emit final streaming event: {e}")
|
|
1254
1300
|
|
|
1255
1301
|
if final_result is None:
|
|
1256
|
-
raise RuntimeError(
|
|
1302
|
+
raise RuntimeError(
|
|
1303
|
+
f"Agent {agent.name}: Streaming did not yield a final prediction"
|
|
1304
|
+
)
|
|
1257
1305
|
|
|
1258
|
-
logger.info(
|
|
1306
|
+
logger.info(
|
|
1307
|
+
f"Agent {agent.name}: WebSocket streaming completed ({stream_sequence} tokens)"
|
|
1308
|
+
)
|
|
1259
1309
|
return final_result, None
|
|
1260
1310
|
|
|
1261
1311
|
async def _execute_streaming(
|
|
@@ -1277,14 +1327,11 @@ class DSPyEngine(EngineComponent):
|
|
|
1277
1327
|
|
|
1278
1328
|
console = Console()
|
|
1279
1329
|
|
|
1280
|
-
# Get
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
|
|
1285
|
-
collector = getattr(orchestrator, "_dashboard_collector", None)
|
|
1286
|
-
if collector:
|
|
1287
|
-
ws_manager = getattr(collector, "_websocket_manager", None)
|
|
1330
|
+
# Get WebSocket broadcast function (security: wrapper prevents object traversal)
|
|
1331
|
+
# Phase 6+7 Security Fix: Use broadcast wrapper from Agent class variable (prevents GOD MODE restoration)
|
|
1332
|
+
from flock.agent import Agent
|
|
1333
|
+
|
|
1334
|
+
ws_broadcast = Agent._websocket_broadcast_global
|
|
1288
1335
|
|
|
1289
1336
|
# Prepare stream listeners for output field
|
|
1290
1337
|
listeners = []
|
|
@@ -1293,7 +1340,9 @@ class DSPyEngine(EngineComponent):
|
|
|
1293
1340
|
if streaming_mod and hasattr(streaming_mod, "StreamListener"):
|
|
1294
1341
|
for name, field in signature.output_fields.items():
|
|
1295
1342
|
if field.annotation is str:
|
|
1296
|
-
listeners.append(
|
|
1343
|
+
listeners.append(
|
|
1344
|
+
streaming_mod.StreamListener(signature_field_name=name)
|
|
1345
|
+
)
|
|
1297
1346
|
except Exception:
|
|
1298
1347
|
listeners = []
|
|
1299
1348
|
|
|
@@ -1316,7 +1365,9 @@ class DSPyEngine(EngineComponent):
|
|
|
1316
1365
|
)
|
|
1317
1366
|
else:
|
|
1318
1367
|
# Old format: direct payload
|
|
1319
|
-
stream_generator = streaming_task(
|
|
1368
|
+
stream_generator = streaming_task(
|
|
1369
|
+
description=description, input=payload, context=[]
|
|
1370
|
+
)
|
|
1320
1371
|
|
|
1321
1372
|
signature_order = []
|
|
1322
1373
|
status_field = self.status_output_field
|
|
@@ -1386,7 +1437,9 @@ class DSPyEngine(EngineComponent):
|
|
|
1386
1437
|
styles,
|
|
1387
1438
|
agent_label,
|
|
1388
1439
|
) = self._prepare_stream_formatter(agent)
|
|
1389
|
-
initial_panel = formatter.format_result(
|
|
1440
|
+
initial_panel = formatter.format_result(
|
|
1441
|
+
display_data, agent_label, theme_dict, styles
|
|
1442
|
+
)
|
|
1390
1443
|
live_cm = Live(
|
|
1391
1444
|
initial_panel,
|
|
1392
1445
|
console=console,
|
|
@@ -1402,7 +1455,11 @@ class DSPyEngine(EngineComponent):
|
|
|
1402
1455
|
def _refresh_panel() -> None:
|
|
1403
1456
|
if formatter is None or live is None:
|
|
1404
1457
|
return
|
|
1405
|
-
live.update(
|
|
1458
|
+
live.update(
|
|
1459
|
+
formatter.format_result(
|
|
1460
|
+
display_data, agent_label, theme_dict, styles
|
|
1461
|
+
)
|
|
1462
|
+
)
|
|
1406
1463
|
|
|
1407
1464
|
async for value in stream_generator:
|
|
1408
1465
|
try:
|
|
@@ -1420,7 +1477,7 @@ class DSPyEngine(EngineComponent):
|
|
|
1420
1477
|
display_data["status"] = "".join(stream_buffers[status_field])
|
|
1421
1478
|
|
|
1422
1479
|
# Emit to WebSocket (non-blocking to prevent deadlock)
|
|
1423
|
-
if
|
|
1480
|
+
if ws_broadcast and token:
|
|
1424
1481
|
try:
|
|
1425
1482
|
event = StreamingOutputEvent(
|
|
1426
1483
|
correlation_id=str(ctx.correlation_id)
|
|
@@ -1438,14 +1495,16 @@ class DSPyEngine(EngineComponent):
|
|
|
1438
1495
|
artifact_type=artifact_type_name, # Phase 6: Artifact type name
|
|
1439
1496
|
)
|
|
1440
1497
|
# Use create_task to avoid blocking the streaming loop
|
|
1441
|
-
task = asyncio.create_task(
|
|
1498
|
+
task = asyncio.create_task(ws_broadcast(event))
|
|
1442
1499
|
ws_broadcast_tasks.add(task)
|
|
1443
1500
|
task.add_done_callback(ws_broadcast_tasks.discard)
|
|
1444
1501
|
stream_sequence += 1
|
|
1445
1502
|
except Exception as e:
|
|
1446
1503
|
logger.warning(f"Failed to emit streaming event: {e}")
|
|
1447
1504
|
else:
|
|
1448
|
-
logger.debug(
|
|
1505
|
+
logger.debug(
|
|
1506
|
+
"No WebSocket manager present for streaming event."
|
|
1507
|
+
)
|
|
1449
1508
|
|
|
1450
1509
|
if formatter is not None:
|
|
1451
1510
|
_refresh_panel()
|
|
@@ -1465,7 +1524,7 @@ class DSPyEngine(EngineComponent):
|
|
|
1465
1524
|
)
|
|
1466
1525
|
|
|
1467
1526
|
# Emit to WebSocket (non-blocking to prevent deadlock)
|
|
1468
|
-
if
|
|
1527
|
+
if ws_broadcast:
|
|
1469
1528
|
logger.info(
|
|
1470
1529
|
f"[STREAMING] Emitting StreamResponse token='{token}', sequence={stream_sequence}"
|
|
1471
1530
|
)
|
|
@@ -1486,12 +1545,14 @@ class DSPyEngine(EngineComponent):
|
|
|
1486
1545
|
artifact_type=artifact_type_name, # Phase 6: Artifact type name
|
|
1487
1546
|
)
|
|
1488
1547
|
# Use create_task to avoid blocking the streaming loop
|
|
1489
|
-
task = asyncio.create_task(
|
|
1548
|
+
task = asyncio.create_task(ws_broadcast(event))
|
|
1490
1549
|
ws_broadcast_tasks.add(task)
|
|
1491
1550
|
task.add_done_callback(ws_broadcast_tasks.discard)
|
|
1492
1551
|
stream_sequence += 1
|
|
1493
1552
|
except Exception as e:
|
|
1494
|
-
logger.warning(
|
|
1553
|
+
logger.warning(
|
|
1554
|
+
f"Failed to emit streaming event: {e}"
|
|
1555
|
+
)
|
|
1495
1556
|
|
|
1496
1557
|
if formatter is not None:
|
|
1497
1558
|
_refresh_panel()
|
|
@@ -1516,7 +1577,7 @@ class DSPyEngine(EngineComponent):
|
|
|
1516
1577
|
display_data["status"] = "".join(stream_buffers[status_field])
|
|
1517
1578
|
|
|
1518
1579
|
# Emit to WebSocket (non-blocking to prevent deadlock)
|
|
1519
|
-
if
|
|
1580
|
+
if ws_broadcast and token:
|
|
1520
1581
|
try:
|
|
1521
1582
|
event = StreamingOutputEvent(
|
|
1522
1583
|
correlation_id=str(ctx.correlation_id)
|
|
@@ -1536,7 +1597,7 @@ class DSPyEngine(EngineComponent):
|
|
|
1536
1597
|
], # Phase 6: Artifact type name from display_data
|
|
1537
1598
|
)
|
|
1538
1599
|
# Use create_task to avoid blocking the streaming loop
|
|
1539
|
-
task = asyncio.create_task(
|
|
1600
|
+
task = asyncio.create_task(ws_broadcast(event))
|
|
1540
1601
|
ws_broadcast_tasks.add(task)
|
|
1541
1602
|
task.add_done_callback(ws_broadcast_tasks.discard)
|
|
1542
1603
|
stream_sequence += 1
|
|
@@ -1551,7 +1612,7 @@ class DSPyEngine(EngineComponent):
|
|
|
1551
1612
|
final_result = value
|
|
1552
1613
|
|
|
1553
1614
|
# Emit final streaming event (non-blocking to prevent deadlock)
|
|
1554
|
-
if
|
|
1615
|
+
if ws_broadcast:
|
|
1555
1616
|
try:
|
|
1556
1617
|
event = StreamingOutputEvent(
|
|
1557
1618
|
correlation_id=str(ctx.correlation_id)
|
|
@@ -1560,16 +1621,19 @@ class DSPyEngine(EngineComponent):
|
|
|
1560
1621
|
agent_name=agent.name,
|
|
1561
1622
|
run_id=ctx.task_id if ctx else "",
|
|
1562
1623
|
output_type="log",
|
|
1563
|
-
content="\nAmount of output tokens: "
|
|
1624
|
+
content="\nAmount of output tokens: "
|
|
1625
|
+
+ str(stream_sequence),
|
|
1564
1626
|
sequence=stream_sequence,
|
|
1565
1627
|
is_final=True, # Mark as final
|
|
1566
1628
|
artifact_id=str(
|
|
1567
1629
|
pre_generated_artifact_id
|
|
1568
1630
|
), # Phase 6: Track artifact for message streaming
|
|
1569
|
-
artifact_type=display_data[
|
|
1631
|
+
artifact_type=display_data[
|
|
1632
|
+
"type"
|
|
1633
|
+
], # Phase 6: Artifact type name
|
|
1570
1634
|
)
|
|
1571
1635
|
# Use create_task to avoid blocking the streaming loop
|
|
1572
|
-
task = asyncio.create_task(
|
|
1636
|
+
task = asyncio.create_task(ws_broadcast(event))
|
|
1573
1637
|
ws_broadcast_tasks.add(task)
|
|
1574
1638
|
task.add_done_callback(ws_broadcast_tasks.discard)
|
|
1575
1639
|
event = StreamingOutputEvent(
|
|
@@ -1585,10 +1649,12 @@ class DSPyEngine(EngineComponent):
|
|
|
1585
1649
|
artifact_id=str(
|
|
1586
1650
|
pre_generated_artifact_id
|
|
1587
1651
|
), # Phase 6: Track artifact for message streaming
|
|
1588
|
-
artifact_type=display_data[
|
|
1652
|
+
artifact_type=display_data[
|
|
1653
|
+
"type"
|
|
1654
|
+
], # Phase 6: Artifact type name
|
|
1589
1655
|
)
|
|
1590
1656
|
# Use create_task to avoid blocking the streaming loop
|
|
1591
|
-
task = asyncio.create_task(
|
|
1657
|
+
task = asyncio.create_task(ws_broadcast(event))
|
|
1592
1658
|
ws_broadcast_tasks.add(task)
|
|
1593
1659
|
task.add_done_callback(ws_broadcast_tasks.discard)
|
|
1594
1660
|
except Exception as e:
|
|
@@ -1598,14 +1664,18 @@ class DSPyEngine(EngineComponent):
|
|
|
1598
1664
|
# Update payload section with final values
|
|
1599
1665
|
payload_data = OrderedDict()
|
|
1600
1666
|
for field_name in signature_order:
|
|
1601
|
-
if field_name != "description" and hasattr(
|
|
1667
|
+
if field_name != "description" and hasattr(
|
|
1668
|
+
final_result, field_name
|
|
1669
|
+
):
|
|
1602
1670
|
field_value = getattr(final_result, field_name)
|
|
1603
1671
|
|
|
1604
1672
|
# Convert BaseModel instances to dicts for proper table rendering
|
|
1605
1673
|
if isinstance(field_value, list):
|
|
1606
1674
|
# Handle lists of BaseModel instances (fan-out/batch)
|
|
1607
1675
|
payload_data[field_name] = [
|
|
1608
|
-
item.model_dump()
|
|
1676
|
+
item.model_dump()
|
|
1677
|
+
if isinstance(item, BaseModel)
|
|
1678
|
+
else item
|
|
1609
1679
|
for item in field_value
|
|
1610
1680
|
]
|
|
1611
1681
|
elif isinstance(field_value, BaseModel):
|
|
@@ -1698,11 +1768,15 @@ class DSPyEngine(EngineComponent):
|
|
|
1698
1768
|
str(artifact.correlation_id) if artifact.correlation_id else None
|
|
1699
1769
|
)
|
|
1700
1770
|
display_data["partition_key"] = artifact.partition_key
|
|
1701
|
-
display_data["tags"] =
|
|
1771
|
+
display_data["tags"] = (
|
|
1772
|
+
"set()" if not artifact.tags else f"set({list(artifact.tags)})"
|
|
1773
|
+
)
|
|
1702
1774
|
|
|
1703
1775
|
# Print the final panel
|
|
1704
1776
|
console = Console()
|
|
1705
|
-
final_panel = formatter.format_result(
|
|
1777
|
+
final_panel = formatter.format_result(
|
|
1778
|
+
display_data, agent_label, theme_dict, styles
|
|
1779
|
+
)
|
|
1706
1780
|
console.print(final_panel)
|
|
1707
1781
|
|
|
1708
1782
|
|
|
@@ -1714,7 +1788,9 @@ _apply_live_patch_on_import()
|
|
|
1714
1788
|
|
|
1715
1789
|
# Apply the DSPy streaming patch to fix deadlocks with MCP tools
|
|
1716
1790
|
try:
|
|
1717
|
-
from flock.patches.dspy_streaming_patch import
|
|
1791
|
+
from flock.patches.dspy_streaming_patch import (
|
|
1792
|
+
apply_patch as apply_dspy_streaming_patch,
|
|
1793
|
+
)
|
|
1718
1794
|
|
|
1719
1795
|
apply_dspy_streaming_patch()
|
|
1720
1796
|
except Exception:
|