flock-core 0.5.9__py3-none-any.whl → 0.5.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of flock-core might be problematic. Click here for more details.
- flock/agent.py +149 -62
- flock/api/themes.py +6 -2
- flock/api_models.py +285 -0
- flock/artifact_collector.py +6 -3
- flock/batch_accumulator.py +3 -1
- flock/cli.py +3 -1
- flock/components.py +45 -56
- flock/context_provider.py +531 -0
- flock/correlation_engine.py +8 -4
- flock/dashboard/collector.py +48 -29
- flock/dashboard/events.py +10 -4
- flock/dashboard/launcher.py +3 -1
- flock/dashboard/models/graph.py +9 -3
- flock/dashboard/service.py +187 -93
- flock/dashboard/websocket.py +17 -4
- flock/engines/dspy_engine.py +174 -98
- flock/engines/examples/simple_batch_engine.py +9 -3
- flock/examples.py +6 -2
- flock/frontend/src/services/indexeddb.test.ts +4 -4
- flock/frontend/src/services/indexeddb.ts +1 -1
- flock/helper/cli_helper.py +14 -1
- flock/logging/auto_trace.py +6 -1
- flock/logging/formatters/enum_builder.py +3 -1
- flock/logging/formatters/theme_builder.py +32 -17
- flock/logging/formatters/themed_formatter.py +38 -22
- flock/logging/logging.py +21 -7
- flock/logging/telemetry.py +9 -3
- flock/logging/telemetry_exporter/duckdb_exporter.py +27 -25
- flock/logging/trace_and_logged.py +14 -5
- flock/mcp/__init__.py +3 -6
- flock/mcp/client.py +49 -19
- flock/mcp/config.py +12 -6
- flock/mcp/manager.py +6 -2
- flock/mcp/servers/sse/flock_sse_server.py +9 -3
- flock/mcp/servers/streamable_http/flock_streamable_http_server.py +6 -2
- flock/mcp/tool.py +18 -6
- flock/mcp/types/handlers.py +3 -1
- flock/mcp/types/types.py +9 -3
- flock/orchestrator.py +449 -58
- flock/orchestrator_component.py +15 -5
- flock/patches/dspy_streaming_patch.py +12 -4
- flock/registry.py +9 -3
- flock/runtime.py +69 -18
- flock/service.py +135 -64
- flock/store.py +29 -10
- flock/subscription.py +6 -4
- flock/system_artifacts.py +33 -0
- flock/utilities.py +41 -13
- flock/utility/output_utility_component.py +31 -11
- {flock_core-0.5.9.dist-info → flock_core-0.5.11.dist-info}/METADATA +150 -26
- {flock_core-0.5.9.dist-info → flock_core-0.5.11.dist-info}/RECORD +54 -51
- {flock_core-0.5.9.dist-info → flock_core-0.5.11.dist-info}/WHEEL +0 -0
- {flock_core-0.5.9.dist-info → flock_core-0.5.11.dist-info}/entry_points.txt +0 -0
- {flock_core-0.5.9.dist-info → flock_core-0.5.11.dist-info}/licenses/LICENSE +0 -0
flock/orchestrator_component.py
CHANGED
|
@@ -150,7 +150,9 @@ class OrchestratorComponent(BaseModel, metaclass=TracedModelMeta):
|
|
|
150
150
|
>>> # Simple component
|
|
151
151
|
>>> class LoggingComponent(OrchestratorComponent):
|
|
152
152
|
... async def on_agent_scheduled(self, orch, agent, artifacts, task):
|
|
153
|
-
... print(
|
|
153
|
+
... print(
|
|
154
|
+
... f"Agent {agent.name} scheduled with {len(artifacts)} artifacts"
|
|
155
|
+
... )
|
|
154
156
|
|
|
155
157
|
>>> # Circuit breaker component
|
|
156
158
|
>>> class CircuitBreakerComponent(OrchestratorComponent):
|
|
@@ -166,7 +168,9 @@ class OrchestratorComponent(BaseModel, metaclass=TracedModelMeta):
|
|
|
166
168
|
"""
|
|
167
169
|
|
|
168
170
|
name: str | None = None
|
|
169
|
-
config: OrchestratorComponentConfig = Field(
|
|
171
|
+
config: OrchestratorComponentConfig = Field(
|
|
172
|
+
default_factory=OrchestratorComponentConfig
|
|
173
|
+
)
|
|
170
174
|
priority: int = 0 # Lower priority = earlier execution
|
|
171
175
|
|
|
172
176
|
# ──────────────────────────────────────────────────────────
|
|
@@ -355,7 +359,7 @@ class OrchestratorComponent(BaseModel, metaclass=TracedModelMeta):
|
|
|
355
359
|
... await self.ws.broadcast({
|
|
356
360
|
... "event": "agent_scheduled",
|
|
357
361
|
... "agent": agent.name,
|
|
358
|
-
... "count": len(artifacts)
|
|
362
|
+
... "count": len(artifacts),
|
|
359
363
|
... })
|
|
360
364
|
"""
|
|
361
365
|
|
|
@@ -484,7 +488,10 @@ class BuiltinCollectionComponent(OrchestratorComponent):
|
|
|
484
488
|
subscription_index=subscription_index,
|
|
485
489
|
)
|
|
486
490
|
|
|
487
|
-
if
|
|
491
|
+
if (
|
|
492
|
+
subscription.batch.timeout
|
|
493
|
+
and orchestrator._batch_timeout_task is None
|
|
494
|
+
):
|
|
488
495
|
import asyncio
|
|
489
496
|
|
|
490
497
|
orchestrator._batch_timeout_task = asyncio.create_task(
|
|
@@ -500,7 +507,10 @@ class BuiltinCollectionComponent(OrchestratorComponent):
|
|
|
500
507
|
subscription_index=subscription_index,
|
|
501
508
|
)
|
|
502
509
|
|
|
503
|
-
if
|
|
510
|
+
if (
|
|
511
|
+
subscription.batch.timeout
|
|
512
|
+
and orchestrator._batch_timeout_task is None
|
|
513
|
+
):
|
|
504
514
|
import asyncio
|
|
505
515
|
|
|
506
516
|
orchestrator._batch_timeout_task = asyncio.create_task(
|
|
@@ -45,7 +45,9 @@ def patched_sync_send_to_stream(stream, message):
|
|
|
45
45
|
try:
|
|
46
46
|
asyncio.run(_send())
|
|
47
47
|
except Exception as e:
|
|
48
|
-
logger.debug(
|
|
48
|
+
logger.debug(
|
|
49
|
+
f"DSPy status message send failed in sync context (non-critical): {e}"
|
|
50
|
+
)
|
|
49
51
|
|
|
50
52
|
|
|
51
53
|
def apply_patch():
|
|
@@ -55,12 +57,16 @@ def apply_patch():
|
|
|
55
57
|
|
|
56
58
|
# Store original for reference (in case we need to restore)
|
|
57
59
|
if not hasattr(dspy_messages, "_original_sync_send_to_stream"):
|
|
58
|
-
dspy_messages._original_sync_send_to_stream =
|
|
60
|
+
dspy_messages._original_sync_send_to_stream = (
|
|
61
|
+
dspy_messages.sync_send_to_stream
|
|
62
|
+
)
|
|
59
63
|
|
|
60
64
|
# Replace with our non-blocking version
|
|
61
65
|
dspy_messages.sync_send_to_stream = patched_sync_send_to_stream
|
|
62
66
|
|
|
63
|
-
logger.info(
|
|
67
|
+
logger.info(
|
|
68
|
+
"Applied DSPy streaming patch - status messages are now non-blocking"
|
|
69
|
+
)
|
|
64
70
|
return True
|
|
65
71
|
|
|
66
72
|
except Exception as e:
|
|
@@ -74,7 +80,9 @@ def restore_original():
|
|
|
74
80
|
import dspy.streaming.messages as dspy_messages
|
|
75
81
|
|
|
76
82
|
if hasattr(dspy_messages, "_original_sync_send_to_stream"):
|
|
77
|
-
dspy_messages.sync_send_to_stream =
|
|
83
|
+
dspy_messages.sync_send_to_stream = (
|
|
84
|
+
dspy_messages._original_sync_send_to_stream
|
|
85
|
+
)
|
|
78
86
|
logger.info("Restored original DSPy streaming function")
|
|
79
87
|
return True
|
|
80
88
|
|
flock/registry.py
CHANGED
|
@@ -25,9 +25,13 @@ class TypeRegistry:
|
|
|
25
25
|
|
|
26
26
|
def register(self, model: type[BaseModel], name: str | None = None) -> str:
|
|
27
27
|
if not issubclass(model, BaseModel):
|
|
28
|
-
raise RegistryError(
|
|
28
|
+
raise RegistryError(
|
|
29
|
+
"Only Pydantic models can be registered as artifact types."
|
|
30
|
+
)
|
|
29
31
|
type_name = (
|
|
30
|
-
name
|
|
32
|
+
name
|
|
33
|
+
or getattr(model, "__flock_type__", None)
|
|
34
|
+
or f"{model.__module__}.{model.__name__}"
|
|
31
35
|
)
|
|
32
36
|
existing_model = self._by_name.get(type_name)
|
|
33
37
|
if existing_model is not None and existing_model is not model:
|
|
@@ -127,7 +131,9 @@ def flock_type(model: type[BaseModel] | None = None, *, name: str | None = None)
|
|
|
127
131
|
return _wrap(model)
|
|
128
132
|
|
|
129
133
|
|
|
130
|
-
def flock_tool(
|
|
134
|
+
def flock_tool(
|
|
135
|
+
func: Callable[..., Any] | None = None, *, name: str | None = None
|
|
136
|
+
) -> Any:
|
|
131
137
|
"""Decorator to register a deterministic helper function for agents."""
|
|
132
138
|
|
|
133
139
|
def _wrap(callable_: Callable[..., Any]) -> Callable[..., Any]:
|
flock/runtime.py
CHANGED
|
@@ -5,7 +5,7 @@ from __future__ import annotations
|
|
|
5
5
|
from typing import Any
|
|
6
6
|
from uuid import UUID
|
|
7
7
|
|
|
8
|
-
from pydantic import BaseModel, Field
|
|
8
|
+
from pydantic import BaseModel, ConfigDict, Field
|
|
9
9
|
|
|
10
10
|
from flock.artifacts import Artifact
|
|
11
11
|
|
|
@@ -28,7 +28,9 @@ class EvalInputs(BaseModel):
|
|
|
28
28
|
|
|
29
29
|
Example:
|
|
30
30
|
>>> class TaskProcessor(EngineComponent):
|
|
31
|
-
... async def evaluate(
|
|
31
|
+
... async def evaluate(
|
|
32
|
+
... self, agent, ctx, inputs: EvalInputs
|
|
33
|
+
... ) -> EvalResult:
|
|
32
34
|
... task = inputs.first_as(Task)
|
|
33
35
|
... if not task:
|
|
34
36
|
... return EvalResult.empty()
|
|
@@ -88,9 +90,13 @@ class EvalResult(BaseModel):
|
|
|
88
90
|
|
|
89
91
|
Example:
|
|
90
92
|
>>> class TaskProcessor(EngineComponent):
|
|
91
|
-
... async def evaluate(
|
|
93
|
+
... async def evaluate(
|
|
94
|
+
... self, agent, ctx, inputs: EvalInputs
|
|
95
|
+
... ) -> EvalResult:
|
|
92
96
|
... task = inputs.first_as(Task)
|
|
93
|
-
... processed = Task(
|
|
97
|
+
... processed = Task(
|
|
98
|
+
... name=f"Done: {task.name}", priority=task.priority
|
|
99
|
+
... )
|
|
94
100
|
... return EvalResult.from_object(processed, agent=agent)
|
|
95
101
|
"""
|
|
96
102
|
from flock.artifacts import Artifact
|
|
@@ -136,14 +142,16 @@ class EvalResult(BaseModel):
|
|
|
136
142
|
|
|
137
143
|
Example:
|
|
138
144
|
>>> class MovieEngine(EngineComponent):
|
|
139
|
-
... async def evaluate(
|
|
145
|
+
... async def evaluate(
|
|
146
|
+
... self, agent, ctx, inputs: EvalInputs
|
|
147
|
+
... ) -> EvalResult:
|
|
140
148
|
... idea = inputs.first_as(Idea)
|
|
141
|
-
... movie = Movie(
|
|
149
|
+
... movie = Movie(
|
|
150
|
+
... title=idea.topic.upper(), runtime=240, synopsis="..."
|
|
151
|
+
... )
|
|
142
152
|
... tagline = Tagline(line="Don't miss it!")
|
|
143
153
|
... return EvalResult.from_objects(
|
|
144
|
-
... movie, tagline,
|
|
145
|
-
... agent=agent,
|
|
146
|
-
... metrics={"confidence": 0.9}
|
|
154
|
+
... movie, tagline, agent=agent, metrics={"confidence": 0.9}
|
|
147
155
|
... )
|
|
148
156
|
"""
|
|
149
157
|
from flock.artifacts import Artifact
|
|
@@ -190,7 +198,9 @@ class EvalResult(BaseModel):
|
|
|
190
198
|
|
|
191
199
|
Example:
|
|
192
200
|
>>> class ConditionalProcessor(EngineComponent):
|
|
193
|
-
... async def evaluate(
|
|
201
|
+
... async def evaluate(
|
|
202
|
+
... self, agent, ctx, inputs: EvalInputs
|
|
203
|
+
... ) -> EvalResult:
|
|
194
204
|
... task = inputs.first_as(Task)
|
|
195
205
|
... if task.priority < 3:
|
|
196
206
|
... return EvalResult.empty() # Skip low priority
|
|
@@ -229,12 +239,15 @@ class EvalResult(BaseModel):
|
|
|
229
239
|
|
|
230
240
|
Example:
|
|
231
241
|
>>> class ValidationAgent(EngineComponent):
|
|
232
|
-
... async def evaluate(
|
|
242
|
+
... async def evaluate(
|
|
243
|
+
... self, agent, ctx, inputs: EvalInputs
|
|
244
|
+
... ) -> EvalResult:
|
|
233
245
|
... task = inputs.first_as(Task)
|
|
234
246
|
... is_valid = task.priority >= 1
|
|
235
|
-
... return EvalResult.with_state(
|
|
236
|
-
...
|
|
237
|
-
...
|
|
247
|
+
... return EvalResult.with_state({
|
|
248
|
+
... "validation_passed": is_valid,
|
|
249
|
+
... "validator": "priority_check",
|
|
250
|
+
... })
|
|
238
251
|
"""
|
|
239
252
|
return cls(
|
|
240
253
|
artifacts=[],
|
|
@@ -245,13 +258,51 @@ class EvalResult(BaseModel):
|
|
|
245
258
|
|
|
246
259
|
|
|
247
260
|
class Context(BaseModel):
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
261
|
+
"""Runtime context for agent execution.
|
|
262
|
+
|
|
263
|
+
SECURITY FIX (2025-10-17): Simplified to data-only design.
|
|
264
|
+
Context is now just pre-filtered data with ZERO capabilities.
|
|
265
|
+
|
|
266
|
+
Vulnerabilities fixed:
|
|
267
|
+
- Vulnerability #1 (READ): Agents could bypass visibility via ctx.board.list()
|
|
268
|
+
- Vulnerability #2 (WRITE): Agents could bypass validation via ctx.board.publish()
|
|
269
|
+
- Vulnerability #3 (GOD MODE): Agents had unlimited ctx.orchestrator access
|
|
270
|
+
- Vulnerability #4 (STORE ACCESS): Agents could access ctx.store or ctx.provider._store
|
|
271
|
+
|
|
272
|
+
Solution: Orchestrator evaluates context BEFORE creating Context.
|
|
273
|
+
Engines receive only pre-filtered artifact data via ctx.artifacts.
|
|
274
|
+
No provider, no store, no capabilities - just immutable data.
|
|
275
|
+
|
|
276
|
+
Design Philosophy: Engines are pure functions (input + context → output).
|
|
277
|
+
They don't query, they don't mutate - they only transform data.
|
|
278
|
+
"""
|
|
279
|
+
|
|
280
|
+
model_config = ConfigDict(frozen=True)
|
|
281
|
+
|
|
282
|
+
# ❌ REMOVED: board: Any (security vulnerability)
|
|
283
|
+
# ❌ REMOVED: orchestrator: Any (security vulnerability)
|
|
284
|
+
# ❌ REMOVED: provider: Any (security vulnerability - engines could call provider methods)
|
|
285
|
+
# ❌ REMOVED: store: Any (security vulnerability - direct store access)
|
|
286
|
+
|
|
287
|
+
# ✅ FINAL SOLUTION: Pre-filtered artifacts (evaluated by orchestrator)
|
|
288
|
+
# Engines can only read this list - they cannot query for more data
|
|
289
|
+
artifacts: list[Artifact] = Field(
|
|
290
|
+
default_factory=list,
|
|
291
|
+
description="Pre-filtered conversation context artifacts (evaluated by orchestrator using context provider)",
|
|
292
|
+
)
|
|
293
|
+
|
|
294
|
+
# ✅ Agent identity (informational only - used by orchestrator for logging/tracing)
|
|
295
|
+
agent_identity: Any = Field(
|
|
296
|
+
default=None,
|
|
297
|
+
description="Agent identity (informational) - engines cannot use this to query data",
|
|
298
|
+
)
|
|
299
|
+
|
|
300
|
+
correlation_id: UUID | None = None
|
|
251
301
|
task_id: str
|
|
252
302
|
state: dict[str, Any] = Field(default_factory=dict)
|
|
253
303
|
is_batch: bool = Field(
|
|
254
|
-
default=False,
|
|
304
|
+
default=False,
|
|
305
|
+
description="True if this execution is processing a BatchSpec accumulation",
|
|
255
306
|
)
|
|
256
307
|
|
|
257
308
|
def get_variable(self, key: str, default: Any = None) -> Any:
|
flock/service.py
CHANGED
|
@@ -10,6 +10,20 @@ from uuid import UUID
|
|
|
10
10
|
from fastapi import FastAPI, HTTPException, Query
|
|
11
11
|
from fastapi.responses import PlainTextResponse
|
|
12
12
|
|
|
13
|
+
from flock.api_models import (
|
|
14
|
+
Agent,
|
|
15
|
+
AgentListResponse,
|
|
16
|
+
AgentRunRequest,
|
|
17
|
+
AgentRunResponse,
|
|
18
|
+
AgentSubscription,
|
|
19
|
+
ArtifactListResponse,
|
|
20
|
+
ArtifactPublishRequest,
|
|
21
|
+
ArtifactPublishResponse,
|
|
22
|
+
ArtifactSummaryResponse,
|
|
23
|
+
CorrelationStatusResponse,
|
|
24
|
+
HealthResponse,
|
|
25
|
+
ProducedArtifact,
|
|
26
|
+
)
|
|
13
27
|
from flock.registry import type_registry
|
|
14
28
|
from flock.store import ArtifactEnvelope, ConsumptionRecord, FilterConfig
|
|
15
29
|
|
|
@@ -21,7 +35,21 @@ if TYPE_CHECKING:
|
|
|
21
35
|
class BlackboardHTTPService:
|
|
22
36
|
def __init__(self, orchestrator: Flock) -> None:
|
|
23
37
|
self.orchestrator = orchestrator
|
|
24
|
-
self.app = FastAPI(
|
|
38
|
+
self.app = FastAPI(
|
|
39
|
+
title="Flock REST API Documentation",
|
|
40
|
+
version="1.0.0",
|
|
41
|
+
description="RESTful API for interacting with Flock agents and artifacts",
|
|
42
|
+
openapi_tags=[
|
|
43
|
+
{
|
|
44
|
+
"name": "Public API",
|
|
45
|
+
"description": "**Production-ready endpoints** for publishing artifacts, running agents, and querying data. Use these in your applications.",
|
|
46
|
+
},
|
|
47
|
+
{
|
|
48
|
+
"name": "Health & Metrics",
|
|
49
|
+
"description": "Monitoring endpoints for health checks and metrics collection.",
|
|
50
|
+
},
|
|
51
|
+
],
|
|
52
|
+
)
|
|
25
53
|
self._register_routes()
|
|
26
54
|
|
|
27
55
|
def _register_routes(self) -> None:
|
|
@@ -40,7 +68,9 @@ class BlackboardHTTPService:
|
|
|
40
68
|
"visibility": artifact.visibility.model_dump(mode="json"),
|
|
41
69
|
"visibility_kind": getattr(artifact.visibility, "kind", "Unknown"),
|
|
42
70
|
"created_at": artifact.created_at.isoformat(),
|
|
43
|
-
"correlation_id": str(artifact.correlation_id)
|
|
71
|
+
"correlation_id": str(artifact.correlation_id)
|
|
72
|
+
if artifact.correlation_id
|
|
73
|
+
else None,
|
|
44
74
|
"partition_key": artifact.partition_key,
|
|
45
75
|
"tags": sorted(artifact.tags),
|
|
46
76
|
"version": artifact.version,
|
|
@@ -56,7 +86,9 @@ class BlackboardHTTPService:
|
|
|
56
86
|
}
|
|
57
87
|
for record in consumptions
|
|
58
88
|
]
|
|
59
|
-
data["consumed_by"] = sorted({
|
|
89
|
+
data["consumed_by"] = sorted({
|
|
90
|
+
record.consumer for record in consumptions
|
|
91
|
+
})
|
|
60
92
|
return data
|
|
61
93
|
|
|
62
94
|
def _parse_datetime(value: str | None, label: str) -> datetime | None:
|
|
@@ -65,7 +97,9 @@ class BlackboardHTTPService:
|
|
|
65
97
|
try:
|
|
66
98
|
return datetime.fromisoformat(value)
|
|
67
99
|
except ValueError as exc: # pragma: no cover - FastAPI converts
|
|
68
|
-
raise HTTPException(
|
|
100
|
+
raise HTTPException(
|
|
101
|
+
status_code=400, detail=f"Invalid {label}: {value}"
|
|
102
|
+
) from exc
|
|
69
103
|
|
|
70
104
|
def _make_filter_config(
|
|
71
105
|
type_names: list[str] | None,
|
|
@@ -86,19 +120,25 @@ class BlackboardHTTPService:
|
|
|
86
120
|
end=_parse_datetime(end, "to"),
|
|
87
121
|
)
|
|
88
122
|
|
|
89
|
-
@app.post(
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
123
|
+
@app.post(
|
|
124
|
+
"/api/v1/artifacts",
|
|
125
|
+
response_model=ArtifactPublishResponse,
|
|
126
|
+
tags=["Public API"],
|
|
127
|
+
)
|
|
128
|
+
async def publish_artifact(
|
|
129
|
+
body: ArtifactPublishRequest,
|
|
130
|
+
) -> ArtifactPublishResponse:
|
|
95
131
|
try:
|
|
96
|
-
await orchestrator.publish({"type":
|
|
132
|
+
await orchestrator.publish({"type": body.type, **body.payload})
|
|
97
133
|
except Exception as exc: # pragma: no cover - FastAPI converts
|
|
98
134
|
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
99
|
-
return
|
|
135
|
+
return ArtifactPublishResponse(status="accepted")
|
|
100
136
|
|
|
101
|
-
@app.get(
|
|
137
|
+
@app.get(
|
|
138
|
+
"/api/v1/artifacts",
|
|
139
|
+
response_model=ArtifactListResponse,
|
|
140
|
+
tags=["Public API"],
|
|
141
|
+
)
|
|
102
142
|
async def list_artifacts(
|
|
103
143
|
type_names: list[str] | None = Query(None, alias="type"),
|
|
104
144
|
produced_by: list[str] | None = Query(None),
|
|
@@ -110,7 +150,7 @@ class BlackboardHTTPService:
|
|
|
110
150
|
limit: int = Query(50, ge=1, le=500),
|
|
111
151
|
offset: int = Query(0, ge=0),
|
|
112
152
|
embed_meta: bool = Query(False, alias="embed_meta"),
|
|
113
|
-
) ->
|
|
153
|
+
) -> ArtifactListResponse:
|
|
114
154
|
filters = _make_filter_config(
|
|
115
155
|
type_names,
|
|
116
156
|
produced_by,
|
|
@@ -129,15 +169,21 @@ class BlackboardHTTPService:
|
|
|
129
169
|
items: list[dict[str, Any]] = []
|
|
130
170
|
for artifact in artifacts:
|
|
131
171
|
if isinstance(artifact, ArtifactEnvelope):
|
|
132
|
-
items.append(
|
|
172
|
+
items.append(
|
|
173
|
+
_serialize_artifact(artifact.artifact, artifact.consumptions)
|
|
174
|
+
)
|
|
133
175
|
else:
|
|
134
176
|
items.append(_serialize_artifact(artifact))
|
|
135
|
-
return
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
177
|
+
return ArtifactListResponse(
|
|
178
|
+
items=items,
|
|
179
|
+
pagination={"limit": limit, "offset": offset, "total": total},
|
|
180
|
+
)
|
|
139
181
|
|
|
140
|
-
@app.get(
|
|
182
|
+
@app.get(
|
|
183
|
+
"/api/v1/artifacts/summary",
|
|
184
|
+
response_model=ArtifactSummaryResponse,
|
|
185
|
+
tags=["Public API"],
|
|
186
|
+
)
|
|
141
187
|
async def summarize_artifacts(
|
|
142
188
|
type_names: list[str] | None = Query(None, alias="type"),
|
|
143
189
|
produced_by: list[str] | None = Query(None),
|
|
@@ -146,7 +192,7 @@ class BlackboardHTTPService:
|
|
|
146
192
|
start: str | None = Query(None, alias="from"),
|
|
147
193
|
end: str | None = Query(None, alias="to"),
|
|
148
194
|
visibility: list[str] | None = Query(None),
|
|
149
|
-
) ->
|
|
195
|
+
) -> ArtifactSummaryResponse:
|
|
150
196
|
filters = _make_filter_config(
|
|
151
197
|
type_names,
|
|
152
198
|
produced_by,
|
|
@@ -157,31 +203,30 @@ class BlackboardHTTPService:
|
|
|
157
203
|
end,
|
|
158
204
|
)
|
|
159
205
|
summary = await orchestrator.store.summarize_artifacts(filters)
|
|
160
|
-
return
|
|
206
|
+
return ArtifactSummaryResponse(summary=summary)
|
|
161
207
|
|
|
162
|
-
@app.get("/api/v1/artifacts/{artifact_id}")
|
|
208
|
+
@app.get("/api/v1/artifacts/{artifact_id}", tags=["Public API"])
|
|
163
209
|
async def get_artifact(artifact_id: UUID) -> dict[str, Any]:
|
|
164
210
|
artifact = await orchestrator.store.get(artifact_id)
|
|
165
211
|
if artifact is None:
|
|
166
212
|
raise HTTPException(status_code=404, detail="artifact not found")
|
|
167
213
|
return _serialize_artifact(artifact)
|
|
168
214
|
|
|
169
|
-
@app.post(
|
|
170
|
-
|
|
215
|
+
@app.post(
|
|
216
|
+
"/api/v1/agents/{name}/run",
|
|
217
|
+
response_model=AgentRunResponse,
|
|
218
|
+
tags=["Public API"],
|
|
219
|
+
)
|
|
220
|
+
async def run_agent(name: str, body: AgentRunRequest) -> AgentRunResponse:
|
|
171
221
|
try:
|
|
172
222
|
agent = orchestrator.get_agent(name)
|
|
173
223
|
except KeyError as exc:
|
|
174
224
|
raise HTTPException(status_code=404, detail="agent not found") from exc
|
|
175
225
|
|
|
176
|
-
inputs_data: list[dict[str, Any]] = body.get("inputs") or []
|
|
177
226
|
inputs = []
|
|
178
|
-
for item in
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
if not type_name:
|
|
182
|
-
raise HTTPException(status_code=400, detail="Each input requires 'type'.")
|
|
183
|
-
model = type_registry.resolve(type_name)
|
|
184
|
-
instance = model(**payload)
|
|
227
|
+
for item in body.inputs:
|
|
228
|
+
model = type_registry.resolve(item.type)
|
|
229
|
+
instance = model(**item.payload)
|
|
185
230
|
inputs.append(instance)
|
|
186
231
|
|
|
187
232
|
try:
|
|
@@ -191,40 +236,42 @@ class BlackboardHTTPService:
|
|
|
191
236
|
status_code=500, detail=f"Agent execution failed: {exc}"
|
|
192
237
|
) from exc
|
|
193
238
|
|
|
194
|
-
return
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
239
|
+
return AgentRunResponse(
|
|
240
|
+
artifacts=[
|
|
241
|
+
ProducedArtifact(
|
|
242
|
+
id=str(artifact.id),
|
|
243
|
+
type=artifact.type,
|
|
244
|
+
payload=artifact.payload,
|
|
245
|
+
produced_by=artifact.produced_by,
|
|
246
|
+
)
|
|
202
247
|
for artifact in outputs
|
|
203
248
|
]
|
|
204
|
-
|
|
249
|
+
)
|
|
205
250
|
|
|
206
|
-
@app.get(
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
251
|
+
@app.get(
|
|
252
|
+
"/api/v1/agents", response_model=AgentListResponse, tags=["Public API"]
|
|
253
|
+
)
|
|
254
|
+
async def list_agents() -> AgentListResponse:
|
|
255
|
+
return AgentListResponse(
|
|
256
|
+
agents=[
|
|
257
|
+
Agent(
|
|
258
|
+
name=agent.name,
|
|
259
|
+
description=agent.description or "",
|
|
260
|
+
subscriptions=[
|
|
261
|
+
AgentSubscription(
|
|
262
|
+
types=list(subscription.type_names),
|
|
263
|
+
mode=subscription.mode,
|
|
264
|
+
delivery=subscription.delivery,
|
|
265
|
+
)
|
|
219
266
|
for subscription in agent.subscriptions
|
|
220
267
|
],
|
|
221
|
-
|
|
222
|
-
|
|
268
|
+
outputs=[output.spec.type_name for output in agent.outputs],
|
|
269
|
+
)
|
|
223
270
|
for agent in orchestrator.agents
|
|
224
271
|
]
|
|
225
|
-
|
|
272
|
+
)
|
|
226
273
|
|
|
227
|
-
@app.get("/api/v1/agents/{agent_id}/history-summary")
|
|
274
|
+
@app.get("/api/v1/agents/{agent_id}/history-summary", tags=["Public API"])
|
|
228
275
|
async def agent_history(
|
|
229
276
|
agent_id: str,
|
|
230
277
|
type_names: list[str] | None = Query(None, alias="type"),
|
|
@@ -247,13 +294,37 @@ class BlackboardHTTPService:
|
|
|
247
294
|
summary = await orchestrator.store.agent_history_summary(agent_id, filters)
|
|
248
295
|
return {"agent_id": agent_id, "summary": summary}
|
|
249
296
|
|
|
250
|
-
@app.get(
|
|
251
|
-
|
|
252
|
-
|
|
297
|
+
@app.get(
|
|
298
|
+
"/api/v1/correlations/{correlation_id}/status",
|
|
299
|
+
response_model=CorrelationStatusResponse,
|
|
300
|
+
tags=["Public API"],
|
|
301
|
+
)
|
|
302
|
+
async def get_correlation_status(
|
|
303
|
+
correlation_id: str,
|
|
304
|
+
) -> CorrelationStatusResponse:
|
|
305
|
+
"""Get the status of a workflow by correlation ID.
|
|
306
|
+
|
|
307
|
+
Returns workflow state (active/completed/failed/not_found), pending work status,
|
|
308
|
+
artifact counts, error counts, and timestamps.
|
|
309
|
+
|
|
310
|
+
This endpoint is useful for polling to check if a workflow has completed.
|
|
311
|
+
"""
|
|
312
|
+
try:
|
|
313
|
+
status = await orchestrator.get_correlation_status(correlation_id)
|
|
314
|
+
return CorrelationStatusResponse(**status)
|
|
315
|
+
except ValueError as exc:
|
|
316
|
+
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
317
|
+
|
|
318
|
+
@app.get("/health", response_model=HealthResponse, tags=["Health & Metrics"])
|
|
319
|
+
async def health() -> HealthResponse: # pragma: no cover - trivial
|
|
320
|
+
return HealthResponse(status="ok")
|
|
253
321
|
|
|
254
|
-
@app.get("/metrics")
|
|
322
|
+
@app.get("/metrics", tags=["Health & Metrics"])
|
|
255
323
|
async def metrics() -> PlainTextResponse:
|
|
256
|
-
lines = [
|
|
324
|
+
lines = [
|
|
325
|
+
f"blackboard_{key} {value}"
|
|
326
|
+
for key, value in orchestrator.metrics.items()
|
|
327
|
+
]
|
|
257
328
|
return PlainTextResponse("\n".join(lines))
|
|
258
329
|
|
|
259
330
|
def run(
|