flock-core 0.5.10__py3-none-any.whl → 0.5.20__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of flock-core might be problematic. Click here for more details.
- flock/__init__.py +1 -1
- flock/agent/__init__.py +30 -0
- flock/agent/builder_helpers.py +192 -0
- flock/agent/builder_validator.py +169 -0
- flock/agent/component_lifecycle.py +325 -0
- flock/agent/context_resolver.py +141 -0
- flock/agent/mcp_integration.py +212 -0
- flock/agent/output_processor.py +304 -0
- flock/api/__init__.py +20 -0
- flock/api/models.py +283 -0
- flock/{service.py → api/service.py} +121 -63
- flock/cli.py +2 -2
- flock/components/__init__.py +41 -0
- flock/components/agent/__init__.py +22 -0
- flock/{components.py → components/agent/base.py} +4 -3
- flock/{utility/output_utility_component.py → components/agent/output_utility.py} +12 -7
- flock/components/orchestrator/__init__.py +22 -0
- flock/{orchestrator_component.py → components/orchestrator/base.py} +5 -293
- flock/components/orchestrator/circuit_breaker.py +95 -0
- flock/components/orchestrator/collection.py +143 -0
- flock/components/orchestrator/deduplication.py +78 -0
- flock/core/__init__.py +30 -0
- flock/core/agent.py +953 -0
- flock/{artifacts.py → core/artifacts.py} +1 -1
- flock/{context_provider.py → core/context_provider.py} +3 -3
- flock/core/orchestrator.py +1102 -0
- flock/{store.py → core/store.py} +99 -454
- flock/{subscription.py → core/subscription.py} +1 -1
- flock/dashboard/collector.py +5 -5
- flock/dashboard/graph_builder.py +7 -7
- flock/dashboard/routes/__init__.py +21 -0
- flock/dashboard/routes/control.py +327 -0
- flock/dashboard/routes/helpers.py +340 -0
- flock/dashboard/routes/themes.py +76 -0
- flock/dashboard/routes/traces.py +521 -0
- flock/dashboard/routes/websocket.py +108 -0
- flock/dashboard/service.py +44 -1294
- flock/engines/dspy/__init__.py +20 -0
- flock/engines/dspy/artifact_materializer.py +216 -0
- flock/engines/dspy/signature_builder.py +474 -0
- flock/engines/dspy/streaming_executor.py +858 -0
- flock/engines/dspy_engine.py +45 -1330
- flock/engines/examples/simple_batch_engine.py +2 -2
- flock/examples.py +7 -7
- flock/logging/logging.py +1 -16
- flock/models/__init__.py +10 -0
- flock/models/system_artifacts.py +33 -0
- flock/orchestrator/__init__.py +45 -0
- flock/{artifact_collector.py → orchestrator/artifact_collector.py} +3 -3
- flock/orchestrator/artifact_manager.py +168 -0
- flock/{batch_accumulator.py → orchestrator/batch_accumulator.py} +2 -2
- flock/orchestrator/component_runner.py +389 -0
- flock/orchestrator/context_builder.py +167 -0
- flock/{correlation_engine.py → orchestrator/correlation_engine.py} +2 -2
- flock/orchestrator/event_emitter.py +167 -0
- flock/orchestrator/initialization.py +184 -0
- flock/orchestrator/lifecycle_manager.py +226 -0
- flock/orchestrator/mcp_manager.py +202 -0
- flock/orchestrator/scheduler.py +189 -0
- flock/orchestrator/server_manager.py +234 -0
- flock/orchestrator/tracing.py +147 -0
- flock/storage/__init__.py +10 -0
- flock/storage/artifact_aggregator.py +158 -0
- flock/storage/in_memory/__init__.py +6 -0
- flock/storage/in_memory/artifact_filter.py +114 -0
- flock/storage/in_memory/history_aggregator.py +115 -0
- flock/storage/sqlite/__init__.py +10 -0
- flock/storage/sqlite/agent_history_queries.py +154 -0
- flock/storage/sqlite/consumption_loader.py +100 -0
- flock/storage/sqlite/query_builder.py +112 -0
- flock/storage/sqlite/query_params_builder.py +91 -0
- flock/storage/sqlite/schema_manager.py +168 -0
- flock/storage/sqlite/summary_queries.py +194 -0
- flock/utils/__init__.py +14 -0
- flock/utils/async_utils.py +67 -0
- flock/{runtime.py → utils/runtime.py} +3 -3
- flock/utils/time_utils.py +53 -0
- flock/utils/type_resolution.py +38 -0
- flock/{utilities.py → utils/utilities.py} +2 -2
- flock/utils/validation.py +57 -0
- flock/utils/visibility.py +79 -0
- flock/utils/visibility_utils.py +134 -0
- {flock_core-0.5.10.dist-info → flock_core-0.5.20.dist-info}/METADATA +69 -61
- {flock_core-0.5.10.dist-info → flock_core-0.5.20.dist-info}/RECORD +89 -31
- flock/agent.py +0 -1578
- flock/orchestrator.py +0 -1746
- /flock/{visibility.py → core/visibility.py} +0 -0
- /flock/{helper → utils}/cli_helper.py +0 -0
- {flock_core-0.5.10.dist-info → flock_core-0.5.20.dist-info}/WHEEL +0 -0
- {flock_core-0.5.10.dist-info → flock_core-0.5.20.dist-info}/entry_points.txt +0 -0
- {flock_core-0.5.10.dist-info → flock_core-0.5.20.dist-info}/licenses/LICENSE +0 -0
flock/{store.py → core/store.py}
RENAMED
|
@@ -10,12 +10,11 @@ contract expected by the REST layer and dashboard.
|
|
|
10
10
|
|
|
11
11
|
import asyncio
|
|
12
12
|
import json
|
|
13
|
-
import re
|
|
14
13
|
from asyncio import Lock
|
|
15
14
|
from collections import defaultdict
|
|
16
15
|
from collections.abc import Iterable
|
|
17
16
|
from dataclasses import dataclass, field
|
|
18
|
-
from datetime import UTC, datetime
|
|
17
|
+
from datetime import UTC, datetime
|
|
19
18
|
from pathlib import Path
|
|
20
19
|
from typing import Any, TypeVar
|
|
21
20
|
from uuid import UUID
|
|
@@ -23,59 +22,16 @@ from uuid import UUID
|
|
|
23
22
|
import aiosqlite
|
|
24
23
|
from opentelemetry import trace
|
|
25
24
|
|
|
26
|
-
from flock.artifacts import Artifact
|
|
25
|
+
from flock.core.artifacts import Artifact
|
|
27
26
|
from flock.registry import type_registry
|
|
28
|
-
from flock.
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
PrivateVisibility,
|
|
32
|
-
PublicVisibility,
|
|
33
|
-
TenantVisibility,
|
|
34
|
-
Visibility,
|
|
35
|
-
)
|
|
27
|
+
from flock.storage.artifact_aggregator import ArtifactAggregator
|
|
28
|
+
from flock.utils.type_resolution import TypeResolutionHelper
|
|
29
|
+
from flock.utils.visibility_utils import deserialize_visibility
|
|
36
30
|
|
|
37
31
|
|
|
38
32
|
T = TypeVar("T")
|
|
39
33
|
tracer = trace.get_tracer(__name__)
|
|
40
34
|
|
|
41
|
-
ISO_DURATION_RE = re.compile(
|
|
42
|
-
r"^P(?:T?(?:(?P<hours>\d+)H)?(?:(?P<minutes>\d+)M)?(?:(?P<seconds>\d+)S)?)$"
|
|
43
|
-
)
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
def _parse_iso_duration(value: str | None) -> timedelta:
|
|
47
|
-
if not value:
|
|
48
|
-
return timedelta(0)
|
|
49
|
-
match = ISO_DURATION_RE.match(value)
|
|
50
|
-
if not match:
|
|
51
|
-
return timedelta(0)
|
|
52
|
-
hours = int(match.group("hours") or 0)
|
|
53
|
-
minutes = int(match.group("minutes") or 0)
|
|
54
|
-
seconds = int(match.group("seconds") or 0)
|
|
55
|
-
return timedelta(hours=hours, minutes=minutes, seconds=seconds)
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
def _deserialize_visibility(data: Any) -> Visibility:
|
|
59
|
-
if isinstance(data, Visibility):
|
|
60
|
-
return data
|
|
61
|
-
if not data:
|
|
62
|
-
return PublicVisibility()
|
|
63
|
-
kind = data.get("kind") if isinstance(data, dict) else None
|
|
64
|
-
if kind == "Public":
|
|
65
|
-
return PublicVisibility()
|
|
66
|
-
if kind == "Private":
|
|
67
|
-
return PrivateVisibility(agents=set(data.get("agents", [])))
|
|
68
|
-
if kind == "Labelled":
|
|
69
|
-
return LabelledVisibility(required_labels=set(data.get("required_labels", [])))
|
|
70
|
-
if kind == "Tenant":
|
|
71
|
-
return TenantVisibility(tenant_id=data.get("tenant_id"))
|
|
72
|
-
if kind == "After":
|
|
73
|
-
ttl = _parse_iso_duration(data.get("ttl"))
|
|
74
|
-
then_data = data.get("then") if isinstance(data, dict) else None
|
|
75
|
-
then_visibility = _deserialize_visibility(then_data) if then_data else None
|
|
76
|
-
return AfterVisibility(ttl=ttl, then=then_visibility)
|
|
77
|
-
return PublicVisibility()
|
|
78
|
-
|
|
79
35
|
|
|
80
36
|
@dataclass(slots=True)
|
|
81
37
|
class ConsumptionRecord:
|
|
@@ -232,6 +188,12 @@ class InMemoryBlackboardStore(BlackboardStore):
|
|
|
232
188
|
)
|
|
233
189
|
self._agent_snapshots: dict[str, AgentSnapshotRecord] = {}
|
|
234
190
|
|
|
191
|
+
# Initialize helper subsystems
|
|
192
|
+
from flock.storage.in_memory.history_aggregator import HistoryAggregator
|
|
193
|
+
|
|
194
|
+
self._aggregator = ArtifactAggregator()
|
|
195
|
+
self._history_aggregator = HistoryAggregator()
|
|
196
|
+
|
|
235
197
|
async def publish(self, artifact: Artifact) -> None:
|
|
236
198
|
async with self._lock:
|
|
237
199
|
self._by_id[artifact.id] = artifact
|
|
@@ -278,39 +240,19 @@ class InMemoryBlackboardStore(BlackboardStore):
|
|
|
278
240
|
offset: int = 0,
|
|
279
241
|
embed_meta: bool = False,
|
|
280
242
|
) -> tuple[list[Artifact | ArtifactEnvelope], int]:
|
|
243
|
+
"""Query artifacts using artifact filter helper."""
|
|
281
244
|
async with self._lock:
|
|
282
245
|
artifacts = list(self._by_id.values())
|
|
283
246
|
|
|
247
|
+
# Use artifact filter helper for filtering logic
|
|
284
248
|
filters = filters or FilterConfig()
|
|
285
|
-
|
|
286
|
-
if filters.type_names:
|
|
287
|
-
canonical = {
|
|
288
|
-
type_registry.resolve_name(name) for name in filters.type_names
|
|
289
|
-
}
|
|
249
|
+
from flock.storage.in_memory.artifact_filter import ArtifactFilter
|
|
290
250
|
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
def _matches(artifact: Artifact) -> bool:
|
|
294
|
-
if canonical and artifact.type not in canonical:
|
|
295
|
-
return False
|
|
296
|
-
if filters.produced_by and artifact.produced_by not in filters.produced_by:
|
|
297
|
-
return False
|
|
298
|
-
if filters.correlation_id and (
|
|
299
|
-
artifact.correlation_id is None
|
|
300
|
-
or str(artifact.correlation_id) != filters.correlation_id
|
|
301
|
-
):
|
|
302
|
-
return False
|
|
303
|
-
if filters.tags and not filters.tags.issubset(artifact.tags):
|
|
304
|
-
return False
|
|
305
|
-
if visibility_filter and artifact.visibility.kind not in visibility_filter:
|
|
306
|
-
return False
|
|
307
|
-
if filters.start and artifact.created_at < filters.start:
|
|
308
|
-
return False
|
|
309
|
-
return not (filters.end and artifact.created_at > filters.end)
|
|
310
|
-
|
|
311
|
-
filtered = [artifact for artifact in artifacts if _matches(artifact)]
|
|
251
|
+
artifact_filter = ArtifactFilter(filters)
|
|
252
|
+
filtered = [a for a in artifacts if artifact_filter.matches(a)]
|
|
312
253
|
filtered.sort(key=lambda a: (a.created_at, a.id))
|
|
313
254
|
|
|
255
|
+
# Apply pagination
|
|
314
256
|
total = len(filtered)
|
|
315
257
|
offset = max(offset, 0)
|
|
316
258
|
if limit <= 0:
|
|
@@ -334,6 +276,7 @@ class InMemoryBlackboardStore(BlackboardStore):
|
|
|
334
276
|
self,
|
|
335
277
|
filters: FilterConfig | None = None,
|
|
336
278
|
) -> dict[str, Any]:
|
|
279
|
+
"""Summarize artifacts using artifact aggregator."""
|
|
337
280
|
filters = filters or FilterConfig()
|
|
338
281
|
artifacts, total = await self.query_artifacts(
|
|
339
282
|
filters=filters,
|
|
@@ -342,61 +285,21 @@ class InMemoryBlackboardStore(BlackboardStore):
|
|
|
342
285
|
embed_meta=False,
|
|
343
286
|
)
|
|
344
287
|
|
|
345
|
-
|
|
346
|
-
by_producer: dict[str, int] = {}
|
|
347
|
-
by_visibility: dict[str, int] = {}
|
|
348
|
-
tag_counts: dict[str, int] = {}
|
|
349
|
-
earliest: datetime | None = None
|
|
350
|
-
latest: datetime | None = None
|
|
351
|
-
|
|
288
|
+
# Validate artifacts are correct type
|
|
352
289
|
for artifact in artifacts:
|
|
353
290
|
if not isinstance(artifact, Artifact):
|
|
354
291
|
raise TypeError("Expected Artifact instance")
|
|
355
|
-
by_type[artifact.type] = by_type.get(artifact.type, 0) + 1
|
|
356
|
-
by_producer[artifact.produced_by] = (
|
|
357
|
-
by_producer.get(artifact.produced_by, 0) + 1
|
|
358
|
-
)
|
|
359
|
-
kind = getattr(artifact.visibility, "kind", "Unknown")
|
|
360
|
-
by_visibility[kind] = by_visibility.get(kind, 0) + 1
|
|
361
|
-
for tag in artifact.tags:
|
|
362
|
-
tag_counts[tag] = tag_counts.get(tag, 0) + 1
|
|
363
|
-
if earliest is None or artifact.created_at < earliest:
|
|
364
|
-
earliest = artifact.created_at
|
|
365
|
-
if latest is None or artifact.created_at > latest:
|
|
366
|
-
latest = artifact.created_at
|
|
367
|
-
|
|
368
|
-
if earliest and latest:
|
|
369
|
-
span = latest - earliest
|
|
370
|
-
if span.days >= 2:
|
|
371
|
-
span_label = f"{span.days} days"
|
|
372
|
-
elif span.total_seconds() >= 3600:
|
|
373
|
-
hours = span.total_seconds() / 3600
|
|
374
|
-
span_label = f"{hours:.1f} hours"
|
|
375
|
-
elif span.total_seconds() > 0:
|
|
376
|
-
minutes = max(1, int(span.total_seconds() / 60))
|
|
377
|
-
span_label = f"{minutes} minutes"
|
|
378
|
-
else:
|
|
379
|
-
span_label = "moments"
|
|
380
|
-
else:
|
|
381
|
-
span_label = "empty"
|
|
382
292
|
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
"by_producer": by_producer,
|
|
387
|
-
"by_visibility": by_visibility,
|
|
388
|
-
"tag_counts": tag_counts,
|
|
389
|
-
"earliest_created_at": earliest.isoformat() if earliest else None,
|
|
390
|
-
"latest_created_at": latest.isoformat() if latest else None,
|
|
391
|
-
"is_full_window": filters.start is None and filters.end is None,
|
|
392
|
-
"window_span_label": span_label,
|
|
393
|
-
}
|
|
293
|
+
# Delegate to aggregator for all aggregation logic
|
|
294
|
+
is_full_window = filters.start is None and filters.end is None
|
|
295
|
+
return self._aggregator.build_summary(artifacts, total, is_full_window)
|
|
394
296
|
|
|
395
297
|
async def agent_history_summary(
|
|
396
298
|
self,
|
|
397
299
|
agent_id: str,
|
|
398
300
|
filters: FilterConfig | None = None,
|
|
399
301
|
) -> dict[str, Any]:
|
|
302
|
+
"""Summarize agent history using history aggregator."""
|
|
400
303
|
filters = filters or FilterConfig()
|
|
401
304
|
envelopes, _ = await self.query_artifacts(
|
|
402
305
|
filters=filters,
|
|
@@ -405,27 +308,8 @@ class InMemoryBlackboardStore(BlackboardStore):
|
|
|
405
308
|
embed_meta=True,
|
|
406
309
|
)
|
|
407
310
|
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
consumed_total = 0
|
|
411
|
-
consumed_by_type: dict[str, int] = defaultdict(int)
|
|
412
|
-
|
|
413
|
-
for envelope in envelopes:
|
|
414
|
-
if not isinstance(envelope, ArtifactEnvelope):
|
|
415
|
-
raise TypeError("Expected ArtifactEnvelope instance")
|
|
416
|
-
artifact = envelope.artifact
|
|
417
|
-
if artifact.produced_by == agent_id:
|
|
418
|
-
produced_total += 1
|
|
419
|
-
produced_by_type[artifact.type] += 1
|
|
420
|
-
for consumption in envelope.consumptions:
|
|
421
|
-
if consumption.consumer == agent_id:
|
|
422
|
-
consumed_total += 1
|
|
423
|
-
consumed_by_type[artifact.type] += 1
|
|
424
|
-
|
|
425
|
-
return {
|
|
426
|
-
"produced": {"total": produced_total, "by_type": dict(produced_by_type)},
|
|
427
|
-
"consumed": {"total": consumed_total, "by_type": dict(consumed_by_type)},
|
|
428
|
-
}
|
|
311
|
+
# Delegate to history aggregator for aggregation logic
|
|
312
|
+
return self._history_aggregator.aggregate(envelopes, agent_id)
|
|
429
313
|
|
|
430
314
|
async def upsert_agent_snapshot(self, snapshot: AgentSnapshotRecord) -> None:
|
|
431
315
|
async with self._lock:
|
|
@@ -451,8 +335,6 @@ __all__ = [
|
|
|
451
335
|
class SQLiteBlackboardStore(BlackboardStore):
|
|
452
336
|
"""SQLite-backed implementation of :class:`BlackboardStore`."""
|
|
453
337
|
|
|
454
|
-
SCHEMA_VERSION = 3
|
|
455
|
-
|
|
456
338
|
def __init__(self, db_path: str, *, timeout: float = 5.0) -> None:
|
|
457
339
|
self._db_path = Path(db_path)
|
|
458
340
|
self._timeout = timeout
|
|
@@ -461,6 +343,21 @@ class SQLiteBlackboardStore(BlackboardStore):
|
|
|
461
343
|
self._write_lock = asyncio.Lock()
|
|
462
344
|
self._schema_ready = False
|
|
463
345
|
|
|
346
|
+
# Initialize helper subsystems
|
|
347
|
+
from flock.storage.sqlite.agent_history_queries import AgentHistoryQueries
|
|
348
|
+
from flock.storage.sqlite.consumption_loader import SQLiteConsumptionLoader
|
|
349
|
+
from flock.storage.sqlite.query_builder import SQLiteQueryBuilder
|
|
350
|
+
from flock.storage.sqlite.query_params_builder import QueryParamsBuilder
|
|
351
|
+
from flock.storage.sqlite.schema_manager import SQLiteSchemaManager
|
|
352
|
+
from flock.storage.sqlite.summary_queries import SQLiteSummaryQueries
|
|
353
|
+
|
|
354
|
+
self._schema_manager = SQLiteSchemaManager()
|
|
355
|
+
self._query_builder = SQLiteQueryBuilder()
|
|
356
|
+
self._consumption_loader = SQLiteConsumptionLoader()
|
|
357
|
+
self._summary_queries = SQLiteSummaryQueries()
|
|
358
|
+
self._query_params_builder = QueryParamsBuilder()
|
|
359
|
+
self._agent_history_queries = AgentHistoryQueries()
|
|
360
|
+
|
|
464
361
|
async def publish(self, artifact: Artifact) -> None: # type: ignore[override]
|
|
465
362
|
with tracer.start_as_current_span("sqlite_store.publish"):
|
|
466
363
|
conn = await self._get_connection()
|
|
@@ -470,10 +367,9 @@ class SQLiteBlackboardStore(BlackboardStore):
|
|
|
470
367
|
tags_json = json.dumps(sorted(artifact.tags))
|
|
471
368
|
created_at = artifact.created_at.isoformat()
|
|
472
369
|
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
canonical_type = artifact.type
|
|
370
|
+
canonical_type = TypeResolutionHelper.safe_resolve(
|
|
371
|
+
type_registry, artifact.type
|
|
372
|
+
)
|
|
477
373
|
|
|
478
374
|
record = {
|
|
479
375
|
"artifact_id": str(artifact.id),
|
|
@@ -693,16 +589,18 @@ class SQLiteBlackboardStore(BlackboardStore):
|
|
|
693
589
|
offset: int = 0,
|
|
694
590
|
embed_meta: bool = False,
|
|
695
591
|
) -> tuple[list[Artifact | ArtifactEnvelope], int]:
|
|
592
|
+
"""Query artifacts using query params builder."""
|
|
696
593
|
filters = filters or FilterConfig()
|
|
697
594
|
conn = await self._get_connection()
|
|
698
595
|
|
|
699
596
|
where_clause, params = self._build_filters(filters)
|
|
700
|
-
count_query = f"SELECT COUNT(*) AS total FROM artifacts{where_clause}" # nosec B608
|
|
701
|
-
cursor = await conn.execute(count_query, tuple(params))
|
|
597
|
+
count_query = f"SELECT COUNT(*) AS total FROM artifacts{where_clause}" # nosec B608
|
|
598
|
+
cursor = await conn.execute(count_query, tuple(params))
|
|
702
599
|
total_row = await cursor.fetchone()
|
|
703
600
|
await cursor.close()
|
|
704
601
|
total = total_row["total"] if total_row else 0
|
|
705
602
|
|
|
603
|
+
# Build base query
|
|
706
604
|
query = f"""
|
|
707
605
|
SELECT
|
|
708
606
|
artifact_id,
|
|
@@ -719,17 +617,13 @@ class SQLiteBlackboardStore(BlackboardStore):
|
|
|
719
617
|
FROM artifacts
|
|
720
618
|
{where_clause}
|
|
721
619
|
ORDER BY created_at ASC, rowid ASC
|
|
722
|
-
""" # nosec B608
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
query_params = tuple(params)
|
|
730
|
-
else:
|
|
731
|
-
query += " LIMIT ? OFFSET ?"
|
|
732
|
-
query_params = (*params, limit, max(offset, 0))
|
|
620
|
+
""" # nosec B608
|
|
621
|
+
|
|
622
|
+
# Use query params builder for pagination
|
|
623
|
+
pagination_clause, query_params = (
|
|
624
|
+
self._query_params_builder.build_pagination_params(params, limit, offset)
|
|
625
|
+
)
|
|
626
|
+
query += pagination_clause
|
|
733
627
|
|
|
734
628
|
cursor = await conn.execute(query, query_params)
|
|
735
629
|
rows = await cursor.fetchall()
|
|
@@ -739,35 +633,11 @@ class SQLiteBlackboardStore(BlackboardStore):
|
|
|
739
633
|
if not embed_meta or not artifacts:
|
|
740
634
|
return artifacts, total
|
|
741
635
|
|
|
636
|
+
# Load consumptions using consumption loader
|
|
742
637
|
artifact_ids = [str(artifact.id) for artifact in artifacts]
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
artifact_id,
|
|
747
|
-
consumer,
|
|
748
|
-
run_id,
|
|
749
|
-
correlation_id,
|
|
750
|
-
consumed_at
|
|
751
|
-
FROM artifact_consumptions
|
|
752
|
-
WHERE artifact_id IN ({placeholders})
|
|
753
|
-
ORDER BY consumed_at ASC
|
|
754
|
-
""" # nosec B608 - placeholders string contains only '?' characters
|
|
755
|
-
cursor = await conn.execute(consumption_query, artifact_ids)
|
|
756
|
-
consumption_rows = await cursor.fetchall()
|
|
757
|
-
await cursor.close()
|
|
758
|
-
|
|
759
|
-
consumptions_map: dict[UUID, list[ConsumptionRecord]] = defaultdict(list)
|
|
760
|
-
for row in consumption_rows:
|
|
761
|
-
artifact_uuid = UUID(row["artifact_id"])
|
|
762
|
-
consumptions_map[artifact_uuid].append(
|
|
763
|
-
ConsumptionRecord(
|
|
764
|
-
artifact_id=artifact_uuid,
|
|
765
|
-
consumer=row["consumer"],
|
|
766
|
-
run_id=row["run_id"],
|
|
767
|
-
correlation_id=row["correlation_id"],
|
|
768
|
-
consumed_at=datetime.fromisoformat(row["consumed_at"]),
|
|
769
|
-
)
|
|
770
|
-
)
|
|
638
|
+
consumptions_map = await self._consumption_loader.load_for_artifacts(
|
|
639
|
+
conn, artifact_ids
|
|
640
|
+
)
|
|
771
641
|
|
|
772
642
|
envelopes: list[ArtifactEnvelope] = [
|
|
773
643
|
ArtifactEnvelope(
|
|
@@ -782,78 +652,32 @@ class SQLiteBlackboardStore(BlackboardStore):
|
|
|
782
652
|
self,
|
|
783
653
|
filters: FilterConfig | None = None,
|
|
784
654
|
) -> dict[str, Any]:
|
|
655
|
+
"""Summarize artifacts using summary query builder."""
|
|
785
656
|
filters = filters or FilterConfig()
|
|
786
657
|
conn = await self._get_connection()
|
|
787
658
|
|
|
788
659
|
where_clause, params = self._build_filters(filters)
|
|
789
660
|
params_tuple = tuple(params)
|
|
790
661
|
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
SELECT produced_by, COUNT(*) AS count
|
|
810
|
-
FROM artifacts
|
|
811
|
-
{where_clause}
|
|
812
|
-
GROUP BY produced_by
|
|
813
|
-
""" # nosec B608 - where_clause contains only parameter placeholders from _build_filters
|
|
814
|
-
cursor = await conn.execute(by_producer_query, params_tuple)
|
|
815
|
-
by_producer_rows = await cursor.fetchall()
|
|
816
|
-
await cursor.close()
|
|
817
|
-
by_producer = {row["produced_by"]: row["count"] for row in by_producer_rows}
|
|
818
|
-
|
|
819
|
-
by_visibility_query = f"""
|
|
820
|
-
SELECT json_extract(visibility, '$.kind') AS visibility_kind, COUNT(*) AS count
|
|
821
|
-
FROM artifacts
|
|
822
|
-
{where_clause}
|
|
823
|
-
GROUP BY json_extract(visibility, '$.kind')
|
|
824
|
-
""" # nosec B608 - where_clause contains only parameter placeholders from _build_filters
|
|
825
|
-
cursor = await conn.execute(by_visibility_query, params_tuple)
|
|
826
|
-
by_visibility_rows = await cursor.fetchall()
|
|
827
|
-
await cursor.close()
|
|
828
|
-
by_visibility = {
|
|
829
|
-
(row["visibility_kind"] or "Unknown"): row["count"]
|
|
830
|
-
for row in by_visibility_rows
|
|
831
|
-
}
|
|
832
|
-
|
|
833
|
-
tag_query = f"""
|
|
834
|
-
SELECT json_each.value AS tag, COUNT(*) AS count
|
|
835
|
-
FROM artifacts
|
|
836
|
-
JOIN json_each(artifacts.tags)
|
|
837
|
-
{where_clause}
|
|
838
|
-
GROUP BY json_each.value
|
|
839
|
-
""" # nosec B608 - where_clause contains only parameter placeholders produced by _build_filters
|
|
840
|
-
cursor = await conn.execute(tag_query, params_tuple)
|
|
841
|
-
tag_rows = await cursor.fetchall()
|
|
842
|
-
await cursor.close()
|
|
843
|
-
tag_counts = {row["tag"]: row["count"] for row in tag_rows}
|
|
844
|
-
|
|
845
|
-
range_query = f"""
|
|
846
|
-
SELECT MIN(created_at) AS earliest, MAX(created_at) AS latest
|
|
847
|
-
FROM artifacts
|
|
848
|
-
{where_clause}
|
|
849
|
-
""" # nosec B608 - safe composition using parameterized where_clause
|
|
850
|
-
cursor = await conn.execute(range_query, params_tuple)
|
|
851
|
-
range_row = await cursor.fetchone()
|
|
852
|
-
await cursor.close()
|
|
853
|
-
earliest = (
|
|
854
|
-
range_row["earliest"] if range_row and range_row["earliest"] else None
|
|
662
|
+
# Execute all summary queries using summary query builder
|
|
663
|
+
total = await self._summary_queries.count_total(
|
|
664
|
+
conn, where_clause, params_tuple
|
|
665
|
+
)
|
|
666
|
+
by_type = await self._summary_queries.group_by_type(
|
|
667
|
+
conn, where_clause, params_tuple
|
|
668
|
+
)
|
|
669
|
+
by_producer = await self._summary_queries.group_by_producer(
|
|
670
|
+
conn, where_clause, params_tuple
|
|
671
|
+
)
|
|
672
|
+
by_visibility = await self._summary_queries.group_by_visibility(
|
|
673
|
+
conn, where_clause, params_tuple
|
|
674
|
+
)
|
|
675
|
+
tag_counts = await self._summary_queries.count_tags(
|
|
676
|
+
conn, where_clause, params_tuple
|
|
677
|
+
)
|
|
678
|
+
earliest, latest = await self._summary_queries.get_date_range(
|
|
679
|
+
conn, where_clause, params_tuple
|
|
855
680
|
)
|
|
856
|
-
latest = range_row["latest"] if range_row and range_row["latest"] else None
|
|
857
681
|
|
|
858
682
|
return {
|
|
859
683
|
"total": total,
|
|
@@ -870,59 +694,27 @@ class SQLiteBlackboardStore(BlackboardStore):
|
|
|
870
694
|
agent_id: str,
|
|
871
695
|
filters: FilterConfig | None = None,
|
|
872
696
|
) -> dict[str, Any]:
|
|
697
|
+
"""Summarize agent history using agent history queries."""
|
|
873
698
|
filters = filters or FilterConfig()
|
|
874
699
|
conn = await self._get_connection()
|
|
875
700
|
|
|
876
|
-
|
|
877
|
-
produced_by_type
|
|
878
|
-
|
|
879
|
-
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
|
|
883
|
-
type_names=set(filters.type_names) if filters.type_names else None,
|
|
884
|
-
produced_by={agent_id},
|
|
885
|
-
correlation_id=filters.correlation_id,
|
|
886
|
-
tags=set(filters.tags) if filters.tags else None,
|
|
887
|
-
visibility=set(filters.visibility) if filters.visibility else None,
|
|
888
|
-
start=filters.start,
|
|
889
|
-
end=filters.end,
|
|
890
|
-
)
|
|
891
|
-
where_clause, params = self._build_filters(produced_filter)
|
|
892
|
-
produced_query = f"""
|
|
893
|
-
SELECT canonical_type, COUNT(*) AS count
|
|
894
|
-
FROM artifacts
|
|
895
|
-
{where_clause}
|
|
896
|
-
GROUP BY canonical_type
|
|
897
|
-
""" # nosec B608 - produced_filter yields parameter placeholders only
|
|
898
|
-
cursor = await conn.execute(produced_query, tuple(params))
|
|
899
|
-
rows = await cursor.fetchall()
|
|
900
|
-
await cursor.close()
|
|
901
|
-
produced_by_type = {row["canonical_type"]: row["count"] for row in rows}
|
|
902
|
-
produced_total = sum(produced_by_type.values())
|
|
903
|
-
|
|
904
|
-
where_clause, params = self._build_filters(filters, table_alias="a")
|
|
905
|
-
params_with_consumer = (*params, agent_id)
|
|
906
|
-
consumption_query = f"""
|
|
907
|
-
SELECT a.canonical_type AS canonical_type, COUNT(*) AS count
|
|
908
|
-
FROM artifact_consumptions c
|
|
909
|
-
JOIN artifacts a ON a.artifact_id = c.artifact_id
|
|
910
|
-
{where_clause}
|
|
911
|
-
{"AND" if where_clause else "WHERE"} c.consumer = ?
|
|
912
|
-
GROUP BY a.canonical_type
|
|
913
|
-
""" # nosec B608 - where_clause joins parameter placeholders only
|
|
914
|
-
cursor = await conn.execute(consumption_query, params_with_consumer)
|
|
915
|
-
consumption_rows = await cursor.fetchall()
|
|
916
|
-
await cursor.close()
|
|
917
|
-
|
|
918
|
-
consumed_by_type = {
|
|
919
|
-
row["canonical_type"]: row["count"] for row in consumption_rows
|
|
920
|
-
}
|
|
921
|
-
consumed_total = sum(consumed_by_type.values())
|
|
701
|
+
# Use agent history queries helper for both produced and consumed
|
|
702
|
+
produced_by_type = await self._agent_history_queries.query_produced(
|
|
703
|
+
conn, agent_id, filters, self._build_filters
|
|
704
|
+
)
|
|
705
|
+
consumed_by_type = await self._agent_history_queries.query_consumed(
|
|
706
|
+
conn, agent_id, filters, self._build_filters
|
|
707
|
+
)
|
|
922
708
|
|
|
923
709
|
return {
|
|
924
|
-
"produced": {
|
|
925
|
-
|
|
710
|
+
"produced": {
|
|
711
|
+
"total": sum(produced_by_type.values()),
|
|
712
|
+
"by_type": produced_by_type,
|
|
713
|
+
},
|
|
714
|
+
"consumed": {
|
|
715
|
+
"total": sum(consumed_by_type.values()),
|
|
716
|
+
"by_type": consumed_by_type,
|
|
717
|
+
},
|
|
926
718
|
}
|
|
927
719
|
|
|
928
720
|
async def upsert_agent_snapshot(self, snapshot: AgentSnapshotRecord) -> None:
|
|
@@ -1051,113 +843,9 @@ class SQLiteBlackboardStore(BlackboardStore):
|
|
|
1051
843
|
return conn
|
|
1052
844
|
|
|
1053
845
|
async def _apply_schema(self, conn: aiosqlite.Connection) -> None:
|
|
846
|
+
"""Apply database schema using schema manager."""
|
|
1054
847
|
async with self._connection_lock:
|
|
1055
|
-
await
|
|
1056
|
-
"""
|
|
1057
|
-
CREATE TABLE IF NOT EXISTS schema_meta (
|
|
1058
|
-
id INTEGER PRIMARY KEY CHECK (id = 1),
|
|
1059
|
-
version INTEGER NOT NULL,
|
|
1060
|
-
applied_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
|
|
1061
|
-
)
|
|
1062
|
-
"""
|
|
1063
|
-
)
|
|
1064
|
-
await conn.execute(
|
|
1065
|
-
"""
|
|
1066
|
-
INSERT OR IGNORE INTO schema_meta (id, version)
|
|
1067
|
-
VALUES (1, ?)
|
|
1068
|
-
""",
|
|
1069
|
-
(self.SCHEMA_VERSION,),
|
|
1070
|
-
)
|
|
1071
|
-
await conn.execute(
|
|
1072
|
-
"""
|
|
1073
|
-
CREATE TABLE IF NOT EXISTS artifacts (
|
|
1074
|
-
artifact_id TEXT PRIMARY KEY,
|
|
1075
|
-
type TEXT NOT NULL,
|
|
1076
|
-
canonical_type TEXT NOT NULL,
|
|
1077
|
-
produced_by TEXT NOT NULL,
|
|
1078
|
-
payload TEXT NOT NULL,
|
|
1079
|
-
version INTEGER NOT NULL,
|
|
1080
|
-
visibility TEXT NOT NULL,
|
|
1081
|
-
tags TEXT NOT NULL,
|
|
1082
|
-
correlation_id TEXT,
|
|
1083
|
-
partition_key TEXT,
|
|
1084
|
-
created_at TEXT NOT NULL
|
|
1085
|
-
)
|
|
1086
|
-
"""
|
|
1087
|
-
)
|
|
1088
|
-
await conn.execute(
|
|
1089
|
-
"""
|
|
1090
|
-
CREATE INDEX IF NOT EXISTS idx_artifacts_canonical_type_created
|
|
1091
|
-
ON artifacts(canonical_type, created_at)
|
|
1092
|
-
"""
|
|
1093
|
-
)
|
|
1094
|
-
await conn.execute(
|
|
1095
|
-
"""
|
|
1096
|
-
CREATE INDEX IF NOT EXISTS idx_artifacts_produced_by_created
|
|
1097
|
-
ON artifacts(produced_by, created_at)
|
|
1098
|
-
"""
|
|
1099
|
-
)
|
|
1100
|
-
await conn.execute(
|
|
1101
|
-
"""
|
|
1102
|
-
CREATE INDEX IF NOT EXISTS idx_artifacts_correlation
|
|
1103
|
-
ON artifacts(correlation_id)
|
|
1104
|
-
"""
|
|
1105
|
-
)
|
|
1106
|
-
await conn.execute(
|
|
1107
|
-
"""
|
|
1108
|
-
CREATE INDEX IF NOT EXISTS idx_artifacts_partition
|
|
1109
|
-
ON artifacts(partition_key)
|
|
1110
|
-
"""
|
|
1111
|
-
)
|
|
1112
|
-
await conn.execute(
|
|
1113
|
-
"""
|
|
1114
|
-
CREATE TABLE IF NOT EXISTS artifact_consumptions (
|
|
1115
|
-
artifact_id TEXT NOT NULL,
|
|
1116
|
-
consumer TEXT NOT NULL,
|
|
1117
|
-
run_id TEXT,
|
|
1118
|
-
correlation_id TEXT,
|
|
1119
|
-
consumed_at TEXT NOT NULL,
|
|
1120
|
-
PRIMARY KEY (artifact_id, consumer, consumed_at)
|
|
1121
|
-
)
|
|
1122
|
-
"""
|
|
1123
|
-
)
|
|
1124
|
-
await conn.execute(
|
|
1125
|
-
"""
|
|
1126
|
-
CREATE INDEX IF NOT EXISTS idx_consumptions_artifact
|
|
1127
|
-
ON artifact_consumptions(artifact_id)
|
|
1128
|
-
"""
|
|
1129
|
-
)
|
|
1130
|
-
await conn.execute(
|
|
1131
|
-
"""
|
|
1132
|
-
CREATE INDEX IF NOT EXISTS idx_consumptions_consumer
|
|
1133
|
-
ON artifact_consumptions(consumer)
|
|
1134
|
-
"""
|
|
1135
|
-
)
|
|
1136
|
-
await conn.execute(
|
|
1137
|
-
"""
|
|
1138
|
-
CREATE INDEX IF NOT EXISTS idx_consumptions_correlation
|
|
1139
|
-
ON artifact_consumptions(correlation_id)
|
|
1140
|
-
"""
|
|
1141
|
-
)
|
|
1142
|
-
await conn.execute(
|
|
1143
|
-
"""
|
|
1144
|
-
CREATE TABLE IF NOT EXISTS agent_snapshots (
|
|
1145
|
-
agent_name TEXT PRIMARY KEY,
|
|
1146
|
-
description TEXT NOT NULL,
|
|
1147
|
-
subscriptions TEXT NOT NULL,
|
|
1148
|
-
output_types TEXT NOT NULL,
|
|
1149
|
-
labels TEXT NOT NULL,
|
|
1150
|
-
first_seen TEXT NOT NULL,
|
|
1151
|
-
last_seen TEXT NOT NULL,
|
|
1152
|
-
signature TEXT NOT NULL
|
|
1153
|
-
)
|
|
1154
|
-
"""
|
|
1155
|
-
)
|
|
1156
|
-
await conn.execute(
|
|
1157
|
-
"UPDATE schema_meta SET version=? WHERE id=1",
|
|
1158
|
-
(self.SCHEMA_VERSION,),
|
|
1159
|
-
)
|
|
1160
|
-
await conn.commit()
|
|
848
|
+
await self._schema_manager.apply_schema(conn)
|
|
1161
849
|
self._schema_ready = True
|
|
1162
850
|
|
|
1163
851
|
def _build_filters(
|
|
@@ -1166,54 +854,11 @@ class SQLiteBlackboardStore(BlackboardStore):
|
|
|
1166
854
|
*,
|
|
1167
855
|
table_alias: str | None = None,
|
|
1168
856
|
) -> tuple[str, list[Any]]:
|
|
1169
|
-
|
|
1170
|
-
|
|
1171
|
-
params: list[Any] = []
|
|
1172
|
-
|
|
1173
|
-
if filters.type_names:
|
|
1174
|
-
canonical = {
|
|
1175
|
-
type_registry.resolve_name(name) for name in filters.type_names
|
|
1176
|
-
}
|
|
1177
|
-
placeholders = ", ".join("?" for _ in canonical)
|
|
1178
|
-
conditions.append(f"{prefix}canonical_type IN ({placeholders})")
|
|
1179
|
-
params.extend(sorted(canonical))
|
|
1180
|
-
|
|
1181
|
-
if filters.produced_by:
|
|
1182
|
-
placeholders = ", ".join("?" for _ in filters.produced_by)
|
|
1183
|
-
conditions.append(f"{prefix}produced_by IN ({placeholders})")
|
|
1184
|
-
params.extend(sorted(filters.produced_by))
|
|
1185
|
-
|
|
1186
|
-
if filters.correlation_id:
|
|
1187
|
-
conditions.append(f"{prefix}correlation_id = ?")
|
|
1188
|
-
params.append(filters.correlation_id)
|
|
1189
|
-
|
|
1190
|
-
if filters.visibility:
|
|
1191
|
-
placeholders = ", ".join("?" for _ in filters.visibility)
|
|
1192
|
-
conditions.append(
|
|
1193
|
-
f"json_extract({prefix}visibility, '$.kind') IN ({placeholders})"
|
|
1194
|
-
)
|
|
1195
|
-
params.extend(sorted(filters.visibility))
|
|
1196
|
-
|
|
1197
|
-
if filters.start is not None:
|
|
1198
|
-
conditions.append(f"{prefix}created_at >= ?")
|
|
1199
|
-
params.append(filters.start.isoformat())
|
|
1200
|
-
|
|
1201
|
-
if filters.end is not None:
|
|
1202
|
-
conditions.append(f"{prefix}created_at <= ?")
|
|
1203
|
-
params.append(filters.end.isoformat())
|
|
1204
|
-
|
|
1205
|
-
if filters.tags:
|
|
1206
|
-
column = f"{prefix}tags" if table_alias else "artifacts.tags"
|
|
1207
|
-
for tag in sorted(filters.tags):
|
|
1208
|
-
conditions.append(
|
|
1209
|
-
f"EXISTS (SELECT 1 FROM json_each({column}) WHERE json_each.value = ?)" # nosec B608 - column is internal constant
|
|
1210
|
-
)
|
|
1211
|
-
params.append(tag)
|
|
1212
|
-
|
|
1213
|
-
where_clause = f" WHERE {' AND '.join(conditions)}" if conditions else ""
|
|
1214
|
-
return where_clause, params
|
|
857
|
+
"""Build WHERE clause using query builder."""
|
|
858
|
+
return self._query_builder.build_filters(filters, table_alias=table_alias)
|
|
1215
859
|
|
|
1216
860
|
def _row_to_artifact(self, row: Any) -> Artifact:
|
|
861
|
+
"""Convert database row to Artifact using visibility utils."""
|
|
1217
862
|
payload = json.loads(row["payload"])
|
|
1218
863
|
visibility_data = json.loads(row["visibility"])
|
|
1219
864
|
tags = json.loads(row["tags"])
|
|
@@ -1224,7 +869,7 @@ class SQLiteBlackboardStore(BlackboardStore):
|
|
|
1224
869
|
type=row["type"],
|
|
1225
870
|
payload=payload,
|
|
1226
871
|
produced_by=row["produced_by"],
|
|
1227
|
-
visibility=
|
|
872
|
+
visibility=deserialize_visibility(visibility_data),
|
|
1228
873
|
tags=set(tags),
|
|
1229
874
|
correlation_id=correlation,
|
|
1230
875
|
partition_key=row["partition_key"],
|