flock-core 0.5.0b63__py3-none-any.whl → 0.5.0b70__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of flock-core might be problematic. Click here for more details.
- flock/agent.py +205 -27
- flock/cli.py +74 -2
- flock/dashboard/websocket.py +13 -2
- flock/engines/dspy_engine.py +70 -13
- flock/examples.py +4 -1
- flock/frontend/README.md +15 -1
- flock/frontend/package-lock.json +11 -21
- flock/frontend/package.json +1 -1
- flock/frontend/src/App.tsx +74 -6
- flock/frontend/src/__tests__/e2e/critical-scenarios.test.tsx +4 -5
- flock/frontend/src/__tests__/integration/filtering-e2e.test.tsx +7 -3
- flock/frontend/src/components/filters/ArtifactTypeFilter.tsx +21 -0
- flock/frontend/src/components/filters/FilterFlyout.module.css +104 -0
- flock/frontend/src/components/filters/FilterFlyout.tsx +80 -0
- flock/frontend/src/components/filters/FilterPills.module.css +186 -45
- flock/frontend/src/components/filters/FilterPills.test.tsx +115 -99
- flock/frontend/src/components/filters/FilterPills.tsx +120 -44
- flock/frontend/src/components/filters/ProducerFilter.tsx +21 -0
- flock/frontend/src/components/filters/SavedFiltersControl.module.css +60 -0
- flock/frontend/src/components/filters/SavedFiltersControl.test.tsx +158 -0
- flock/frontend/src/components/filters/SavedFiltersControl.tsx +159 -0
- flock/frontend/src/components/filters/TagFilter.tsx +21 -0
- flock/frontend/src/components/filters/TimeRangeFilter.module.css +24 -0
- flock/frontend/src/components/filters/TimeRangeFilter.tsx +6 -1
- flock/frontend/src/components/filters/VisibilityFilter.tsx +21 -0
- flock/frontend/src/components/graph/GraphCanvas.tsx +24 -0
- flock/frontend/src/components/layout/DashboardLayout.css +13 -0
- flock/frontend/src/components/layout/DashboardLayout.tsx +8 -24
- flock/frontend/src/components/modules/HistoricalArtifactsModule.module.css +288 -0
- flock/frontend/src/components/modules/HistoricalArtifactsModule.tsx +460 -0
- flock/frontend/src/components/modules/HistoricalArtifactsModuleWrapper.tsx +13 -0
- flock/frontend/src/components/modules/ModuleRegistry.ts +7 -1
- flock/frontend/src/components/modules/registerModules.ts +9 -10
- flock/frontend/src/hooks/useModules.ts +11 -1
- flock/frontend/src/services/api.ts +140 -0
- flock/frontend/src/services/indexeddb.ts +56 -2
- flock/frontend/src/services/websocket.ts +129 -0
- flock/frontend/src/store/filterStore.test.ts +105 -185
- flock/frontend/src/store/filterStore.ts +173 -26
- flock/frontend/src/store/graphStore.test.ts +19 -0
- flock/frontend/src/store/graphStore.ts +166 -27
- flock/frontend/src/types/filters.ts +34 -1
- flock/frontend/src/types/graph.ts +7 -0
- flock/frontend/src/utils/artifacts.ts +24 -0
- flock/mcp/client.py +25 -1
- flock/mcp/config.py +1 -10
- flock/mcp/manager.py +34 -3
- flock/mcp/types/callbacks.py +4 -1
- flock/orchestrator.py +56 -5
- flock/service.py +146 -9
- flock/store.py +971 -24
- {flock_core-0.5.0b63.dist-info → flock_core-0.5.0b70.dist-info}/METADATA +27 -1
- {flock_core-0.5.0b63.dist-info → flock_core-0.5.0b70.dist-info}/RECORD +56 -49
- flock/frontend/src/components/filters/FilterBar.module.css +0 -29
- flock/frontend/src/components/filters/FilterBar.test.tsx +0 -133
- flock/frontend/src/components/filters/FilterBar.tsx +0 -33
- flock/frontend/src/components/modules/EventLogModule.test.tsx +0 -401
- flock/frontend/src/components/modules/EventLogModule.tsx +0 -396
- flock/frontend/src/components/modules/EventLogModuleWrapper.tsx +0 -17
- {flock_core-0.5.0b63.dist-info → flock_core-0.5.0b70.dist-info}/WHEEL +0 -0
- {flock_core-0.5.0b63.dist-info → flock_core-0.5.0b70.dist-info}/entry_points.txt +0 -0
- {flock_core-0.5.0b63.dist-info → flock_core-0.5.0b70.dist-info}/licenses/LICENSE +0 -0
flock/agent.py
CHANGED
|
@@ -5,7 +5,7 @@ from __future__ import annotations
|
|
|
5
5
|
import asyncio
|
|
6
6
|
import os
|
|
7
7
|
from dataclasses import dataclass
|
|
8
|
-
from typing import TYPE_CHECKING, Any
|
|
8
|
+
from typing import TYPE_CHECKING, Any, TypedDict
|
|
9
9
|
|
|
10
10
|
from pydantic import BaseModel
|
|
11
11
|
|
|
@@ -27,6 +27,38 @@ if TYPE_CHECKING: # pragma: no cover - type hints only
|
|
|
27
27
|
from flock.orchestrator import Flock
|
|
28
28
|
|
|
29
29
|
|
|
30
|
+
class MCPServerConfig(TypedDict, total=False):
|
|
31
|
+
"""Configuration for MCP server assignment to an agent.
|
|
32
|
+
|
|
33
|
+
All fields are optional. If omitted, no restrictions apply.
|
|
34
|
+
|
|
35
|
+
Attributes:
|
|
36
|
+
roots: Filesystem paths this server can access.
|
|
37
|
+
Empty list or omitted = no mount restrictions.
|
|
38
|
+
tool_whitelist: Tool names the agent can use from this server.
|
|
39
|
+
Empty list or omitted = all tools available.
|
|
40
|
+
|
|
41
|
+
Examples:
|
|
42
|
+
>>> # No restrictions
|
|
43
|
+
>>> config: MCPServerConfig = {}
|
|
44
|
+
|
|
45
|
+
>>> # Mount restrictions only
|
|
46
|
+
>>> config: MCPServerConfig = {"roots": ["/workspace/data"]}
|
|
47
|
+
|
|
48
|
+
>>> # Tool whitelist only
|
|
49
|
+
>>> config: MCPServerConfig = {"tool_whitelist": ["read_file", "write_file"]}
|
|
50
|
+
|
|
51
|
+
>>> # Both restrictions
|
|
52
|
+
>>> config: MCPServerConfig = {
|
|
53
|
+
... "roots": ["/workspace/data"],
|
|
54
|
+
... "tool_whitelist": ["read_file"]
|
|
55
|
+
... }
|
|
56
|
+
"""
|
|
57
|
+
|
|
58
|
+
roots: list[str]
|
|
59
|
+
tool_whitelist: list[str]
|
|
60
|
+
|
|
61
|
+
|
|
30
62
|
@dataclass
|
|
31
63
|
class AgentOutput:
|
|
32
64
|
spec: ArtifactSpec
|
|
@@ -67,7 +99,7 @@ class Agent(metaclass=AutoTracedMeta):
|
|
|
67
99
|
self.engines: list[EngineComponent] = []
|
|
68
100
|
self.best_of_n: int = 1
|
|
69
101
|
self.best_of_score: Callable[[EvalResult], float] | None = None
|
|
70
|
-
self.max_concurrency: int =
|
|
102
|
+
self.max_concurrency: int = 2
|
|
71
103
|
self._semaphore = asyncio.Semaphore(self.max_concurrency)
|
|
72
104
|
self.calls_func: Callable[..., Any] | None = None
|
|
73
105
|
self.tools: set[Callable[..., Any]] = set()
|
|
@@ -77,6 +109,9 @@ class Agent(metaclass=AutoTracedMeta):
|
|
|
77
109
|
self.prevent_self_trigger: bool = True # T065: Prevent infinite feedback loops
|
|
78
110
|
# MCP integration
|
|
79
111
|
self.mcp_server_names: set[str] = set()
|
|
112
|
+
self.mcp_mount_points: list[str] = [] # Deprecated: Use mcp_server_mounts instead
|
|
113
|
+
self.mcp_server_mounts: dict[str, list[str]] = {} # Server-specific mount points
|
|
114
|
+
self.tool_whitelist: list[str] | None = None
|
|
80
115
|
|
|
81
116
|
@property
|
|
82
117
|
def identity(self) -> AgentIdentity:
|
|
@@ -137,15 +172,30 @@ class Agent(metaclass=AutoTracedMeta):
|
|
|
137
172
|
# Get the MCP manager from orchestrator
|
|
138
173
|
manager = self._orchestrator.get_mcp_manager()
|
|
139
174
|
|
|
140
|
-
# Import tool wrapper
|
|
141
|
-
|
|
142
175
|
# Fetch tools from all assigned servers
|
|
143
176
|
tools_dict = await manager.get_tools_for_agent(
|
|
144
177
|
agent_id=self.name,
|
|
145
178
|
run_id=ctx.task_id,
|
|
146
179
|
server_names=self.mcp_server_names,
|
|
180
|
+
server_mounts=self.mcp_server_mounts, # Pass server-specific mounts
|
|
147
181
|
)
|
|
148
182
|
|
|
183
|
+
# Whitelisting logic
|
|
184
|
+
tool_whitelist = self.tool_whitelist
|
|
185
|
+
if (
|
|
186
|
+
tool_whitelist is not None
|
|
187
|
+
and isinstance(tool_whitelist, list)
|
|
188
|
+
and len(tool_whitelist) > 0
|
|
189
|
+
):
|
|
190
|
+
filtered_tools: dict[str, Any] = {}
|
|
191
|
+
for tool_key, tool_entry in tools_dict.items():
|
|
192
|
+
if isinstance(tool_entry, dict):
|
|
193
|
+
original_name = tool_entry.get("original_name", None)
|
|
194
|
+
if original_name is not None and original_name in tool_whitelist:
|
|
195
|
+
filtered_tools[tool_key] = tool_entry
|
|
196
|
+
|
|
197
|
+
tools_dict = filtered_tools
|
|
198
|
+
|
|
149
199
|
# Convert to DSPy tool callables
|
|
150
200
|
dspy_tools = []
|
|
151
201
|
for namespaced_name, tool_info in tools_dict.items():
|
|
@@ -630,30 +680,103 @@ class AgentBuilder:
|
|
|
630
680
|
self._agent.tools.update(funcs)
|
|
631
681
|
return self
|
|
632
682
|
|
|
633
|
-
def with_mcps(
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
683
|
+
def with_mcps(
|
|
684
|
+
self,
|
|
685
|
+
servers: (
|
|
686
|
+
Iterable[str]
|
|
687
|
+
| dict[str, MCPServerConfig | list[str]] # Support both new and old format
|
|
688
|
+
| list[str | dict[str, MCPServerConfig | list[str]]]
|
|
689
|
+
),
|
|
690
|
+
) -> AgentBuilder:
|
|
691
|
+
"""Assign MCP servers to this agent with optional server-specific mount points.
|
|
692
|
+
|
|
693
|
+
Architecture Decision: AD001 - Two-Level Architecture
|
|
694
|
+
Agents reference servers registered at orchestrator level.
|
|
695
|
+
|
|
696
|
+
Args:
|
|
697
|
+
servers: One of:
|
|
698
|
+
- List of server names (strings) - no specific mounts
|
|
699
|
+
- Dict mapping server names to MCPServerConfig or list[str] (backward compatible)
|
|
700
|
+
- Mixed list of strings and dicts for flexibility
|
|
701
|
+
|
|
702
|
+
Returns:
|
|
703
|
+
self for method chaining
|
|
704
|
+
|
|
705
|
+
Raises:
|
|
706
|
+
ValueError: If any server name is not registered with orchestrator
|
|
707
|
+
|
|
708
|
+
Examples:
|
|
709
|
+
>>> # Simple: no mount restrictions
|
|
710
|
+
>>> agent.with_mcps(["filesystem", "github"])
|
|
711
|
+
|
|
712
|
+
>>> # New format: Server-specific config with roots and tool whitelist
|
|
713
|
+
>>> agent.with_mcps({
|
|
714
|
+
... "filesystem": {"roots": ["/workspace/dir/data"], "tool_whitelist": ["read_file"]},
|
|
715
|
+
... "github": {} # No restrictions for github
|
|
716
|
+
... })
|
|
717
|
+
|
|
718
|
+
>>> # Old format: Direct list (backward compatible)
|
|
719
|
+
>>> agent.with_mcps({
|
|
720
|
+
... "filesystem": ["/workspace/dir/data"], # Old format still works
|
|
721
|
+
... })
|
|
722
|
+
|
|
723
|
+
>>> # Mixed: backward compatible
|
|
724
|
+
>>> agent.with_mcps([
|
|
725
|
+
... "github", # No mounts
|
|
726
|
+
... {"filesystem": {"roots": ["mount1", "mount2"] } }
|
|
727
|
+
```
|
|
728
|
+
... ])
|
|
654
729
|
"""
|
|
655
|
-
#
|
|
656
|
-
server_set = set(
|
|
730
|
+
# Parse input into server_names and mounts
|
|
731
|
+
server_set: set[str] = set()
|
|
732
|
+
server_mounts: dict[str, list[str]] = {}
|
|
733
|
+
whitelist = None
|
|
734
|
+
|
|
735
|
+
if isinstance(servers, dict):
|
|
736
|
+
# Dict format: supports both old and new formats
|
|
737
|
+
# Old: {"server": ["/path1", "/path2"]}
|
|
738
|
+
# New: {"server": {"roots": ["/path1"], "tool_whitelist": ["tool1"]}}
|
|
739
|
+
for server_name, server_config in servers.items():
|
|
740
|
+
server_set.add(server_name)
|
|
741
|
+
|
|
742
|
+
# Check if it's the old format (direct list) or new format (MCPServerConfig dict)
|
|
743
|
+
if isinstance(server_config, list):
|
|
744
|
+
# Old format: direct list of paths (backward compatibility)
|
|
745
|
+
if len(server_config) > 0:
|
|
746
|
+
server_mounts[server_name] = list(server_config)
|
|
747
|
+
elif isinstance(server_config, dict):
|
|
748
|
+
# New format: MCPServerConfig with optional roots and tool_whitelist
|
|
749
|
+
mounts = server_config.get("roots", None)
|
|
750
|
+
if mounts is not None and isinstance(mounts, list) and len(mounts) > 0:
|
|
751
|
+
server_mounts[server_name] = list(mounts)
|
|
752
|
+
|
|
753
|
+
config_whitelist = server_config.get("tool_whitelist", None)
|
|
754
|
+
if (
|
|
755
|
+
config_whitelist is not None
|
|
756
|
+
and isinstance(config_whitelist, list)
|
|
757
|
+
and len(config_whitelist) > 0
|
|
758
|
+
):
|
|
759
|
+
whitelist = config_whitelist
|
|
760
|
+
elif isinstance(servers, list):
|
|
761
|
+
# List format: can be mixed
|
|
762
|
+
for item in servers:
|
|
763
|
+
if isinstance(item, str):
|
|
764
|
+
# Simple server name
|
|
765
|
+
server_set.add(item)
|
|
766
|
+
elif isinstance(item, dict):
|
|
767
|
+
# Dict with mounts
|
|
768
|
+
for server_name, mounts in item.items():
|
|
769
|
+
server_set.add(server_name)
|
|
770
|
+
if mounts:
|
|
771
|
+
server_mounts[server_name] = list(mounts)
|
|
772
|
+
else:
|
|
773
|
+
raise TypeError(
|
|
774
|
+
f"Invalid server specification: {item}. "
|
|
775
|
+
f"Expected string or dict, got {type(item).__name__}"
|
|
776
|
+
)
|
|
777
|
+
else:
|
|
778
|
+
# Assume it's an iterable of strings (backward compatibility)
|
|
779
|
+
server_set = set(servers)
|
|
657
780
|
|
|
658
781
|
# Validate all servers exist in orchestrator
|
|
659
782
|
registered_servers = set(self._orchestrator._mcp_configs.keys())
|
|
@@ -669,6 +792,61 @@ class AgentBuilder:
|
|
|
669
792
|
|
|
670
793
|
# Store in agent
|
|
671
794
|
self._agent.mcp_server_names = server_set
|
|
795
|
+
self._agent.mcp_server_mounts = server_mounts
|
|
796
|
+
self._agent.tool_whitelist = whitelist
|
|
797
|
+
|
|
798
|
+
return self
|
|
799
|
+
|
|
800
|
+
def mount(self, paths: str | list[str], *, validate: bool = False) -> AgentBuilder:
|
|
801
|
+
"""Mount agent in specific directories for MCP root access.
|
|
802
|
+
|
|
803
|
+
.. deprecated:: 0.2.0
|
|
804
|
+
Use `.with_mcps({"server_name": ["/path"]})` instead for server-specific mounts.
|
|
805
|
+
This method applies mounts globally to all MCP servers.
|
|
806
|
+
|
|
807
|
+
This sets the filesystem roots that MCP servers will operate under for this agent.
|
|
808
|
+
Paths are cumulative across multiple calls.
|
|
809
|
+
|
|
810
|
+
Args:
|
|
811
|
+
paths: Single path or list of paths to mount
|
|
812
|
+
validate: If True, validate that paths exist (default: False)
|
|
813
|
+
|
|
814
|
+
Returns:
|
|
815
|
+
AgentBuilder for method chaining
|
|
816
|
+
|
|
817
|
+
Example:
|
|
818
|
+
>>> # Old way (deprecated)
|
|
819
|
+
>>> agent.with_mcps(["filesystem"]).mount("/workspace/src")
|
|
820
|
+
>>>
|
|
821
|
+
>>> # New way (recommended)
|
|
822
|
+
>>> agent.with_mcps({"filesystem": ["/workspace/src"]})
|
|
823
|
+
"""
|
|
824
|
+
import warnings
|
|
825
|
+
|
|
826
|
+
warnings.warn(
|
|
827
|
+
"Agent.mount() is deprecated. Use .with_mcps({'server': ['/path']}) "
|
|
828
|
+
"for server-specific mounts instead.",
|
|
829
|
+
DeprecationWarning,
|
|
830
|
+
stacklevel=2,
|
|
831
|
+
)
|
|
832
|
+
|
|
833
|
+
if isinstance(paths, str):
|
|
834
|
+
paths = [paths]
|
|
835
|
+
if validate:
|
|
836
|
+
from pathlib import Path
|
|
837
|
+
|
|
838
|
+
for path in paths:
|
|
839
|
+
if not Path(path).exists():
|
|
840
|
+
raise ValueError(f"Mount path does not exist: {path}")
|
|
841
|
+
|
|
842
|
+
# Add to agent's mount points (cumulative) - for backward compatibility
|
|
843
|
+
self._agent.mcp_mount_points.extend(paths)
|
|
844
|
+
|
|
845
|
+
# Also add to all configured servers for backward compatibility
|
|
846
|
+
for server_name in self._agent.mcp_server_names:
|
|
847
|
+
if server_name not in self._agent.mcp_server_mounts:
|
|
848
|
+
self._agent.mcp_server_mounts[server_name] = []
|
|
849
|
+
self._agent.mcp_server_mounts[server_name].extend(paths)
|
|
672
850
|
|
|
673
851
|
return self
|
|
674
852
|
|
flock/cli.py
CHANGED
|
@@ -3,14 +3,17 @@
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
5
|
import asyncio
|
|
6
|
+
from datetime import datetime
|
|
6
7
|
|
|
7
8
|
import typer
|
|
8
9
|
from rich.console import Console
|
|
9
10
|
from rich.table import Table
|
|
11
|
+
from typer.models import OptionInfo
|
|
10
12
|
|
|
11
13
|
# Lazy import: only import examples when CLI commands are invoked
|
|
12
14
|
# This prevents polluting type_registry on every package import
|
|
13
15
|
from flock.service import BlackboardHTTPService
|
|
16
|
+
from flock.store import SQLiteBlackboardStore
|
|
14
17
|
|
|
15
18
|
|
|
16
19
|
app = typer.Typer(help="Blackboard Agents CLI")
|
|
@@ -58,16 +61,85 @@ def list_agents() -> None:
|
|
|
58
61
|
|
|
59
62
|
|
|
60
63
|
@app.command()
|
|
61
|
-
def serve(
|
|
64
|
+
def serve(
|
|
65
|
+
host: str = "127.0.0.1",
|
|
66
|
+
port: int = 8000,
|
|
67
|
+
sqlite_db: str | None = typer.Option(None, help="Path to SQLite blackboard store"),
|
|
68
|
+
) -> None:
|
|
62
69
|
"""Run the HTTP control plane bound to the demo orchestrator."""
|
|
63
70
|
|
|
64
71
|
from flock.examples import create_demo_orchestrator
|
|
65
72
|
|
|
66
|
-
|
|
73
|
+
if isinstance(sqlite_db, OptionInfo): # Allow direct invocation in tests
|
|
74
|
+
sqlite_db = sqlite_db.default
|
|
75
|
+
|
|
76
|
+
store = None
|
|
77
|
+
if sqlite_db is not None:
|
|
78
|
+
sqlite_store = SQLiteBlackboardStore(sqlite_db)
|
|
79
|
+
|
|
80
|
+
async def _prepare() -> SQLiteBlackboardStore:
|
|
81
|
+
await sqlite_store.ensure_schema()
|
|
82
|
+
return sqlite_store
|
|
83
|
+
|
|
84
|
+
store = asyncio.run(_prepare())
|
|
85
|
+
|
|
86
|
+
orchestrator, _ = create_demo_orchestrator(store=store)
|
|
67
87
|
service = BlackboardHTTPService(orchestrator)
|
|
68
88
|
service.run(host=host, port=port)
|
|
69
89
|
|
|
70
90
|
|
|
91
|
+
@app.command("init-sqlite-store")
|
|
92
|
+
def init_sqlite_store(
|
|
93
|
+
db_path: str = typer.Argument(..., help="Path to SQLite blackboard database"),
|
|
94
|
+
) -> None:
|
|
95
|
+
"""Initialise the SQLite store schema."""
|
|
96
|
+
|
|
97
|
+
store = SQLiteBlackboardStore(db_path)
|
|
98
|
+
|
|
99
|
+
async def _init() -> None:
|
|
100
|
+
await store.ensure_schema()
|
|
101
|
+
await store.close()
|
|
102
|
+
|
|
103
|
+
asyncio.run(_init())
|
|
104
|
+
console.print(f"[green]Initialised SQLite blackboard at {db_path}[/green]")
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
@app.command("sqlite-maintenance")
|
|
108
|
+
def sqlite_maintenance(
|
|
109
|
+
db_path: str = typer.Argument(..., help="Path to SQLite blackboard database"),
|
|
110
|
+
delete_before: str | None = typer.Option(
|
|
111
|
+
None, help="ISO timestamp; delete artifacts before this time"
|
|
112
|
+
),
|
|
113
|
+
vacuum: bool = typer.Option(False, help="Run VACUUM after maintenance"),
|
|
114
|
+
) -> None:
|
|
115
|
+
"""Perform maintenance tasks for the SQLite store."""
|
|
116
|
+
|
|
117
|
+
store = SQLiteBlackboardStore(db_path)
|
|
118
|
+
|
|
119
|
+
async def _maintain() -> tuple[int, bool]:
|
|
120
|
+
await store.ensure_schema()
|
|
121
|
+
deleted = 0
|
|
122
|
+
if delete_before is not None:
|
|
123
|
+
try:
|
|
124
|
+
before_dt = datetime.fromisoformat(delete_before)
|
|
125
|
+
except ValueError as exc: # pragma: no cover - Typer handles but defensive
|
|
126
|
+
raise typer.BadParameter(f"Invalid ISO timestamp: {delete_before}") from exc
|
|
127
|
+
deleted = await store.delete_before(before_dt)
|
|
128
|
+
if vacuum:
|
|
129
|
+
await store.vacuum()
|
|
130
|
+
await store.close()
|
|
131
|
+
return deleted, vacuum
|
|
132
|
+
|
|
133
|
+
deleted, vacuum_run = asyncio.run(_maintain())
|
|
134
|
+
console.print(
|
|
135
|
+
f"[yellow]Deleted {deleted} artifacts[/yellow]"
|
|
136
|
+
if delete_before is not None
|
|
137
|
+
else "[yellow]No deletions requested[/yellow]"
|
|
138
|
+
)
|
|
139
|
+
if vacuum_run:
|
|
140
|
+
console.print("[yellow]VACUUM completed[/yellow]")
|
|
141
|
+
|
|
142
|
+
|
|
71
143
|
def main() -> None:
|
|
72
144
|
app()
|
|
73
145
|
|
flock/dashboard/websocket.py
CHANGED
|
@@ -124,15 +124,26 @@ class WebSocketManager:
|
|
|
124
124
|
# Broadcast to all clients concurrently
|
|
125
125
|
# Use return_exceptions=True to handle client failures gracefully
|
|
126
126
|
# Use send_text() for FastAPI WebSocket (send JSON string as text)
|
|
127
|
+
# CRITICAL: Add timeout to prevent deadlock when client send buffer is full
|
|
127
128
|
clients_list = list(self.clients) # Copy to avoid modification during iteration
|
|
128
|
-
|
|
129
|
+
|
|
130
|
+
send_tasks = [
|
|
131
|
+
asyncio.wait_for(client.send_text(message), timeout=0.5) # 500ms timeout
|
|
132
|
+
for client in clients_list
|
|
133
|
+
]
|
|
129
134
|
results = await asyncio.gather(*send_tasks, return_exceptions=True)
|
|
130
135
|
|
|
131
136
|
# Remove clients that failed to receive the message
|
|
132
137
|
failed_clients = []
|
|
133
138
|
for client, result in zip(clients_list, results, strict=False):
|
|
134
139
|
if isinstance(result, Exception):
|
|
135
|
-
|
|
140
|
+
# Check if it's a timeout (backpressure) or other error
|
|
141
|
+
if isinstance(result, asyncio.TimeoutError):
|
|
142
|
+
logger.warning(
|
|
143
|
+
"Client send timeout (backpressure) - client is slow or disconnected, removing client"
|
|
144
|
+
)
|
|
145
|
+
else:
|
|
146
|
+
logger.warning(f"Failed to send to client: {result}")
|
|
136
147
|
failed_clients.append(client)
|
|
137
148
|
|
|
138
149
|
# Clean up failed clients
|
flock/engines/dspy_engine.py
CHANGED
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
+
import asyncio
|
|
5
6
|
import json
|
|
6
7
|
import os
|
|
7
8
|
from collections import OrderedDict, defaultdict
|
|
@@ -395,10 +396,46 @@ class DSPyEngine(EngineComponent):
|
|
|
395
396
|
if isinstance(raw, BaseModel):
|
|
396
397
|
return raw.model_dump()
|
|
397
398
|
if isinstance(raw, str):
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
399
|
+
text = raw.strip()
|
|
400
|
+
candidates: list[str] = []
|
|
401
|
+
|
|
402
|
+
# Primary attempt - full string
|
|
403
|
+
if text:
|
|
404
|
+
candidates.append(text)
|
|
405
|
+
|
|
406
|
+
# Handle DSPy streaming markers like `[[ ## output ## ]]`
|
|
407
|
+
if text.startswith("[[") and "]]" in text:
|
|
408
|
+
_, remainder = text.split("]]", 1)
|
|
409
|
+
remainder = remainder.strip()
|
|
410
|
+
if remainder:
|
|
411
|
+
candidates.append(remainder)
|
|
412
|
+
|
|
413
|
+
# Handle Markdown-style fenced blocks
|
|
414
|
+
if text.startswith("```") and text.endswith("```"):
|
|
415
|
+
fenced = text.strip("`").strip()
|
|
416
|
+
if fenced:
|
|
417
|
+
candidates.append(fenced)
|
|
418
|
+
|
|
419
|
+
# Extract first JSON-looking segment if present
|
|
420
|
+
for opener, closer in (("{", "}"), ("[", "]")):
|
|
421
|
+
start = text.find(opener)
|
|
422
|
+
end = text.rfind(closer)
|
|
423
|
+
if start != -1 and end != -1 and end > start:
|
|
424
|
+
segment = text[start : end + 1].strip()
|
|
425
|
+
if segment:
|
|
426
|
+
candidates.append(segment)
|
|
427
|
+
|
|
428
|
+
seen: set[str] = set()
|
|
429
|
+
for candidate in candidates:
|
|
430
|
+
if candidate in seen:
|
|
431
|
+
continue
|
|
432
|
+
seen.add(candidate)
|
|
433
|
+
try:
|
|
434
|
+
return json.loads(candidate)
|
|
435
|
+
except json.JSONDecodeError:
|
|
436
|
+
continue
|
|
437
|
+
|
|
438
|
+
return {"text": text}
|
|
402
439
|
if isinstance(raw, Mapping):
|
|
403
440
|
return dict(raw)
|
|
404
441
|
return {"value": raw}
|
|
@@ -562,6 +599,9 @@ class DSPyEngine(EngineComponent):
|
|
|
562
599
|
stream_buffers[status_field] = []
|
|
563
600
|
stream_sequence = 0 # Monotonic sequence for ordering
|
|
564
601
|
|
|
602
|
+
# Track background WebSocket broadcast tasks to prevent garbage collection
|
|
603
|
+
ws_broadcast_tasks: set[asyncio.Task] = set()
|
|
604
|
+
|
|
565
605
|
formatter = theme_dict = styles = agent_label = None
|
|
566
606
|
live_cm = nullcontext()
|
|
567
607
|
overflow_mode = self.stream_vertical_overflow
|
|
@@ -607,7 +647,7 @@ class DSPyEngine(EngineComponent):
|
|
|
607
647
|
stream_buffers[status_field].append(str(token) + "\n")
|
|
608
648
|
display_data["status"] = "".join(stream_buffers[status_field])
|
|
609
649
|
|
|
610
|
-
# Emit to WebSocket
|
|
650
|
+
# Emit to WebSocket (non-blocking to prevent deadlock)
|
|
611
651
|
if ws_manager and token:
|
|
612
652
|
try:
|
|
613
653
|
event = StreamingOutputEvent(
|
|
@@ -621,10 +661,15 @@ class DSPyEngine(EngineComponent):
|
|
|
621
661
|
sequence=stream_sequence,
|
|
622
662
|
is_final=False,
|
|
623
663
|
)
|
|
624
|
-
|
|
664
|
+
# Use create_task to avoid blocking the streaming loop
|
|
665
|
+
task = asyncio.create_task(ws_manager.broadcast(event))
|
|
666
|
+
ws_broadcast_tasks.add(task)
|
|
667
|
+
task.add_done_callback(ws_broadcast_tasks.discard)
|
|
625
668
|
stream_sequence += 1
|
|
626
669
|
except Exception as e:
|
|
627
670
|
logger.warning(f"Failed to emit streaming event: {e}")
|
|
671
|
+
else:
|
|
672
|
+
logger.exception("NO WS_MANAGER PRESENT!!!!")
|
|
628
673
|
|
|
629
674
|
if formatter is not None:
|
|
630
675
|
_refresh_panel()
|
|
@@ -643,7 +688,7 @@ class DSPyEngine(EngineComponent):
|
|
|
643
688
|
stream_buffers[buffer_key]
|
|
644
689
|
)
|
|
645
690
|
|
|
646
|
-
# Emit to WebSocket
|
|
691
|
+
# Emit to WebSocket (non-blocking to prevent deadlock)
|
|
647
692
|
if ws_manager:
|
|
648
693
|
logger.info(
|
|
649
694
|
f"[STREAMING] Emitting StreamResponse token='{token}', sequence={stream_sequence}"
|
|
@@ -660,7 +705,10 @@ class DSPyEngine(EngineComponent):
|
|
|
660
705
|
sequence=stream_sequence,
|
|
661
706
|
is_final=False,
|
|
662
707
|
)
|
|
663
|
-
|
|
708
|
+
# Use create_task to avoid blocking the streaming loop
|
|
709
|
+
task = asyncio.create_task(ws_manager.broadcast(event))
|
|
710
|
+
ws_broadcast_tasks.add(task)
|
|
711
|
+
task.add_done_callback(ws_broadcast_tasks.discard)
|
|
664
712
|
stream_sequence += 1
|
|
665
713
|
except Exception as e:
|
|
666
714
|
logger.warning(f"Failed to emit streaming event: {e}")
|
|
@@ -690,7 +738,7 @@ class DSPyEngine(EngineComponent):
|
|
|
690
738
|
stream_buffers[status_field].append(str(token))
|
|
691
739
|
display_data["status"] = "".join(stream_buffers[status_field])
|
|
692
740
|
|
|
693
|
-
# Emit to WebSocket
|
|
741
|
+
# Emit to WebSocket (non-blocking to prevent deadlock)
|
|
694
742
|
if ws_manager and token:
|
|
695
743
|
try:
|
|
696
744
|
event = StreamingOutputEvent(
|
|
@@ -704,7 +752,10 @@ class DSPyEngine(EngineComponent):
|
|
|
704
752
|
sequence=stream_sequence,
|
|
705
753
|
is_final=False,
|
|
706
754
|
)
|
|
707
|
-
|
|
755
|
+
# Use create_task to avoid blocking the streaming loop
|
|
756
|
+
task = asyncio.create_task(ws_manager.broadcast(event))
|
|
757
|
+
ws_broadcast_tasks.add(task)
|
|
758
|
+
task.add_done_callback(ws_broadcast_tasks.discard)
|
|
708
759
|
stream_sequence += 1
|
|
709
760
|
except Exception as e:
|
|
710
761
|
logger.warning(f"Failed to emit streaming event: {e}")
|
|
@@ -716,7 +767,7 @@ class DSPyEngine(EngineComponent):
|
|
|
716
767
|
if isinstance(value, dspy_mod.Prediction):
|
|
717
768
|
final_result = value
|
|
718
769
|
|
|
719
|
-
# Emit final streaming event
|
|
770
|
+
# Emit final streaming event (non-blocking to prevent deadlock)
|
|
720
771
|
if ws_manager:
|
|
721
772
|
try:
|
|
722
773
|
event = StreamingOutputEvent(
|
|
@@ -730,7 +781,10 @@ class DSPyEngine(EngineComponent):
|
|
|
730
781
|
sequence=stream_sequence,
|
|
731
782
|
is_final=True, # Mark as final
|
|
732
783
|
)
|
|
733
|
-
|
|
784
|
+
# Use create_task to avoid blocking the streaming loop
|
|
785
|
+
task = asyncio.create_task(ws_manager.broadcast(event))
|
|
786
|
+
ws_broadcast_tasks.add(task)
|
|
787
|
+
task.add_done_callback(ws_broadcast_tasks.discard)
|
|
734
788
|
event = StreamingOutputEvent(
|
|
735
789
|
correlation_id=str(ctx.correlation_id)
|
|
736
790
|
if ctx and ctx.correlation_id
|
|
@@ -742,7 +796,10 @@ class DSPyEngine(EngineComponent):
|
|
|
742
796
|
sequence=stream_sequence,
|
|
743
797
|
is_final=True, # Mark as final
|
|
744
798
|
)
|
|
745
|
-
|
|
799
|
+
# Use create_task to avoid blocking the streaming loop
|
|
800
|
+
task = asyncio.create_task(ws_manager.broadcast(event))
|
|
801
|
+
ws_broadcast_tasks.add(task)
|
|
802
|
+
task.add_done_callback(ws_broadcast_tasks.discard)
|
|
746
803
|
except Exception as e:
|
|
747
804
|
logger.warning(f"Failed to emit final streaming event: {e}")
|
|
748
805
|
|
flock/examples.py
CHANGED
|
@@ -18,6 +18,7 @@ from flock.components import EngineComponent
|
|
|
18
18
|
from flock.orchestrator import Flock
|
|
19
19
|
from flock.registry import flock_tool, flock_type, type_registry
|
|
20
20
|
from flock.runtime import EvalInputs, EvalResult
|
|
21
|
+
from flock.store import BlackboardStore
|
|
21
22
|
from flock.utilities import LoggingUtility, MetricsUtility
|
|
22
23
|
|
|
23
24
|
|
|
@@ -75,8 +76,10 @@ class TaglineEngine(EngineComponent):
|
|
|
75
76
|
|
|
76
77
|
def create_demo_orchestrator(
|
|
77
78
|
model: str | None = None,
|
|
79
|
+
*,
|
|
80
|
+
store: BlackboardStore | None = None,
|
|
78
81
|
) -> tuple[Flock, dict[str, AgentBuilder]]:
|
|
79
|
-
orchestrator = Flock(model)
|
|
82
|
+
orchestrator = Flock(model, store=store)
|
|
80
83
|
|
|
81
84
|
movie = (
|
|
82
85
|
orchestrator.agent("movie")
|
flock/frontend/README.md
CHANGED
|
@@ -33,7 +33,7 @@ The dashboard offers two complementary visualization modes:
|
|
|
33
33
|
|
|
34
34
|
### Extensible Module System
|
|
35
35
|
- **Custom Visualizations**: Add specialized views via the module system
|
|
36
|
-
- **
|
|
36
|
+
- **Historical Blackboard Module**: Persisted artifact browser with retention insights
|
|
37
37
|
- **Trace Viewer Module**: Jaeger-style distributed tracing with timeline and statistics
|
|
38
38
|
- **Context Menu Integration**: Right-click to add modules at any location
|
|
39
39
|
- **Persistent Layout**: Module positions and sizes are saved across sessions
|
|
@@ -123,6 +123,20 @@ Every traced operation captures:
|
|
|
123
123
|
- **Multi-Trace Comparison**: Open related traces to compare execution patterns
|
|
124
124
|
- **JSON Navigation**: Use "Expand All" for complex nested structures
|
|
125
125
|
|
|
126
|
+
### Historical Blackboard Module 📚
|
|
127
|
+
|
|
128
|
+
The new Historical Blackboard module brings persisted artifacts into the dashboard so operators can rewind the blackboard, not just watch the live firehose.
|
|
129
|
+
|
|
130
|
+
#### Highlights
|
|
131
|
+
|
|
132
|
+
- **SQLite-first loading**: Fetches paginated artifacts before WebSocket replay, so the graph and detail views start with real history.
|
|
133
|
+
- **Rich filtering**: Mirrors server-side `FilterConfig` capabilities (type, producer, tags, visibility, correlation, time range) with multi-select controls and saved presets.
|
|
134
|
+
- **Consumption awareness**: Displays who consumed each artifact, run IDs, and consumption timestamps—ideal for reconciling downstream behaviour.
|
|
135
|
+
- **Retention transparency**: Inline banners show the oldest/latest artifacts on disk and whether additional data can be loaded.
|
|
136
|
+
- **Virtualized table**: Efficiently scroll through thousands of artifacts with keyboard navigation, quick selection, and payload inspection via the JSON renderer.
|
|
137
|
+
|
|
138
|
+
Launch the module via the context menu (or `Add Module → Historical Blackboard`) after running `examples/03-the-dashboard/04_persistent_pizza_dashboard.py` against a SQLite-backed orchestrator.
|
|
139
|
+
|
|
126
140
|
### Modern UI/UX
|
|
127
141
|
- **Glassmorphism Design**: Modern dark theme with semi-transparent surfaces and blur effects
|
|
128
142
|
- **Keyboard Shortcuts**: Navigate efficiently with Ctrl+M, Ctrl+F, and Esc
|