flock-core 0.5.11__py3-none-any.whl → 0.5.20__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of flock-core might be problematic. Click here for more details.
- flock/__init__.py +1 -1
- flock/agent/__init__.py +30 -0
- flock/agent/builder_helpers.py +192 -0
- flock/agent/builder_validator.py +169 -0
- flock/agent/component_lifecycle.py +325 -0
- flock/agent/context_resolver.py +141 -0
- flock/agent/mcp_integration.py +212 -0
- flock/agent/output_processor.py +304 -0
- flock/api/__init__.py +20 -0
- flock/{api_models.py → api/models.py} +0 -2
- flock/{service.py → api/service.py} +3 -3
- flock/cli.py +2 -2
- flock/components/__init__.py +41 -0
- flock/components/agent/__init__.py +22 -0
- flock/{components.py → components/agent/base.py} +4 -3
- flock/{utility/output_utility_component.py → components/agent/output_utility.py} +12 -7
- flock/components/orchestrator/__init__.py +22 -0
- flock/{orchestrator_component.py → components/orchestrator/base.py} +5 -293
- flock/components/orchestrator/circuit_breaker.py +95 -0
- flock/components/orchestrator/collection.py +143 -0
- flock/components/orchestrator/deduplication.py +78 -0
- flock/core/__init__.py +30 -0
- flock/core/agent.py +953 -0
- flock/{artifacts.py → core/artifacts.py} +1 -1
- flock/{context_provider.py → core/context_provider.py} +3 -3
- flock/core/orchestrator.py +1102 -0
- flock/{store.py → core/store.py} +99 -454
- flock/{subscription.py → core/subscription.py} +1 -1
- flock/dashboard/collector.py +5 -5
- flock/dashboard/graph_builder.py +7 -7
- flock/dashboard/routes/__init__.py +21 -0
- flock/dashboard/routes/control.py +327 -0
- flock/dashboard/routes/helpers.py +340 -0
- flock/dashboard/routes/themes.py +76 -0
- flock/dashboard/routes/traces.py +521 -0
- flock/dashboard/routes/websocket.py +108 -0
- flock/dashboard/service.py +43 -1316
- flock/engines/dspy/__init__.py +20 -0
- flock/engines/dspy/artifact_materializer.py +216 -0
- flock/engines/dspy/signature_builder.py +474 -0
- flock/engines/dspy/streaming_executor.py +858 -0
- flock/engines/dspy_engine.py +45 -1330
- flock/engines/examples/simple_batch_engine.py +2 -2
- flock/examples.py +7 -7
- flock/logging/logging.py +1 -16
- flock/models/__init__.py +10 -0
- flock/orchestrator/__init__.py +45 -0
- flock/{artifact_collector.py → orchestrator/artifact_collector.py} +3 -3
- flock/orchestrator/artifact_manager.py +168 -0
- flock/{batch_accumulator.py → orchestrator/batch_accumulator.py} +2 -2
- flock/orchestrator/component_runner.py +389 -0
- flock/orchestrator/context_builder.py +167 -0
- flock/{correlation_engine.py → orchestrator/correlation_engine.py} +2 -2
- flock/orchestrator/event_emitter.py +167 -0
- flock/orchestrator/initialization.py +184 -0
- flock/orchestrator/lifecycle_manager.py +226 -0
- flock/orchestrator/mcp_manager.py +202 -0
- flock/orchestrator/scheduler.py +189 -0
- flock/orchestrator/server_manager.py +234 -0
- flock/orchestrator/tracing.py +147 -0
- flock/storage/__init__.py +10 -0
- flock/storage/artifact_aggregator.py +158 -0
- flock/storage/in_memory/__init__.py +6 -0
- flock/storage/in_memory/artifact_filter.py +114 -0
- flock/storage/in_memory/history_aggregator.py +115 -0
- flock/storage/sqlite/__init__.py +10 -0
- flock/storage/sqlite/agent_history_queries.py +154 -0
- flock/storage/sqlite/consumption_loader.py +100 -0
- flock/storage/sqlite/query_builder.py +112 -0
- flock/storage/sqlite/query_params_builder.py +91 -0
- flock/storage/sqlite/schema_manager.py +168 -0
- flock/storage/sqlite/summary_queries.py +194 -0
- flock/utils/__init__.py +14 -0
- flock/utils/async_utils.py +67 -0
- flock/{runtime.py → utils/runtime.py} +3 -3
- flock/utils/time_utils.py +53 -0
- flock/utils/type_resolution.py +38 -0
- flock/{utilities.py → utils/utilities.py} +2 -2
- flock/utils/validation.py +57 -0
- flock/utils/visibility.py +79 -0
- flock/utils/visibility_utils.py +134 -0
- {flock_core-0.5.11.dist-info → flock_core-0.5.20.dist-info}/METADATA +18 -4
- {flock_core-0.5.11.dist-info → flock_core-0.5.20.dist-info}/RECORD +89 -33
- flock/agent.py +0 -1578
- flock/orchestrator.py +0 -1983
- /flock/{visibility.py → core/visibility.py} +0 -0
- /flock/{system_artifacts.py → models/system_artifacts.py} +0 -0
- /flock/{helper → utils}/cli_helper.py +0 -0
- {flock_core-0.5.11.dist-info → flock_core-0.5.20.dist-info}/WHEEL +0 -0
- {flock_core-0.5.11.dist-info → flock_core-0.5.20.dist-info}/entry_points.txt +0 -0
- {flock_core-0.5.11.dist-info → flock_core-0.5.20.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
"""Agent history query utilities for SQLite storage.
|
|
2
|
+
|
|
3
|
+
Handles agent-specific produced/consumed queries for history summaries.
|
|
4
|
+
Extracted from store.py to reduce complexity from B (10) to A (5).
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
from typing import TYPE_CHECKING, Any
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
import aiosqlite
|
|
14
|
+
|
|
15
|
+
from flock.core.store import FilterConfig
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class AgentHistoryQueries:
|
|
19
|
+
"""
|
|
20
|
+
Execute SQLite queries for agent history summaries.
|
|
21
|
+
|
|
22
|
+
Provides focused methods for querying produced and consumed artifacts
|
|
23
|
+
for a specific agent.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
async def query_produced(
|
|
27
|
+
self,
|
|
28
|
+
conn: aiosqlite.Connection,
|
|
29
|
+
agent_id: str,
|
|
30
|
+
filters: FilterConfig,
|
|
31
|
+
build_filters_fn: Any, # Callable for filter building
|
|
32
|
+
) -> dict[str, int]:
|
|
33
|
+
"""
|
|
34
|
+
Query artifacts produced by agent, grouped by type.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
conn: Active database connection
|
|
38
|
+
agent_id: Producer to query for
|
|
39
|
+
filters: Base filter configuration
|
|
40
|
+
build_filters_fn: Function to build WHERE clause from filters
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
Dict mapping canonical types to production counts
|
|
44
|
+
|
|
45
|
+
Examples:
|
|
46
|
+
>>> queries = AgentHistoryQueries()
|
|
47
|
+
>>> produced = await queries.query_produced(
|
|
48
|
+
... conn, "agent1", filters, builder
|
|
49
|
+
... )
|
|
50
|
+
>>> produced
|
|
51
|
+
{"Result": 10, "Message": 5}
|
|
52
|
+
"""
|
|
53
|
+
# Check if agent is excluded by filters
|
|
54
|
+
if filters.produced_by and agent_id not in filters.produced_by:
|
|
55
|
+
return {}
|
|
56
|
+
|
|
57
|
+
# Derive filter for this specific agent
|
|
58
|
+
produced_filter = self._derive_produced_filter(filters, agent_id)
|
|
59
|
+
|
|
60
|
+
# Build WHERE clause
|
|
61
|
+
where_clause, params = build_filters_fn(produced_filter)
|
|
62
|
+
|
|
63
|
+
# Execute query
|
|
64
|
+
produced_query = f"""
|
|
65
|
+
SELECT canonical_type, COUNT(*) AS count
|
|
66
|
+
FROM artifacts
|
|
67
|
+
{where_clause}
|
|
68
|
+
GROUP BY canonical_type
|
|
69
|
+
""" # nosec B608 - where_clause contains only parameter placeholders
|
|
70
|
+
|
|
71
|
+
cursor = await conn.execute(produced_query, tuple(params))
|
|
72
|
+
rows = await cursor.fetchall()
|
|
73
|
+
await cursor.close()
|
|
74
|
+
|
|
75
|
+
return {row["canonical_type"]: row["count"] for row in rows}
|
|
76
|
+
|
|
77
|
+
async def query_consumed(
|
|
78
|
+
self,
|
|
79
|
+
conn: aiosqlite.Connection,
|
|
80
|
+
agent_id: str,
|
|
81
|
+
filters: FilterConfig,
|
|
82
|
+
build_filters_fn: Any, # Callable for filter building
|
|
83
|
+
) -> dict[str, int]:
|
|
84
|
+
"""
|
|
85
|
+
Query artifacts consumed by agent, grouped by type.
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
conn: Active database connection
|
|
89
|
+
agent_id: Consumer to query for
|
|
90
|
+
filters: Base filter configuration
|
|
91
|
+
build_filters_fn: Function to build WHERE clause from filters
|
|
92
|
+
|
|
93
|
+
Returns:
|
|
94
|
+
Dict mapping canonical types to consumption counts
|
|
95
|
+
|
|
96
|
+
Examples:
|
|
97
|
+
>>> queries = AgentHistoryQueries()
|
|
98
|
+
>>> consumed = await queries.query_consumed(
|
|
99
|
+
... conn, "agent1", filters, builder
|
|
100
|
+
... )
|
|
101
|
+
>>> consumed
|
|
102
|
+
{"Result": 8, "Message": 3}
|
|
103
|
+
"""
|
|
104
|
+
# Build WHERE clause with table alias for JOIN
|
|
105
|
+
where_clause, params = build_filters_fn(filters, table_alias="a")
|
|
106
|
+
params_with_consumer = (*params, agent_id)
|
|
107
|
+
|
|
108
|
+
# Execute query with JOIN
|
|
109
|
+
consumption_query = f"""
|
|
110
|
+
SELECT a.canonical_type AS canonical_type, COUNT(*) AS count
|
|
111
|
+
FROM artifact_consumptions c
|
|
112
|
+
JOIN artifacts a ON a.artifact_id = c.artifact_id
|
|
113
|
+
{where_clause}
|
|
114
|
+
{"AND" if where_clause else "WHERE"} c.consumer = ?
|
|
115
|
+
GROUP BY a.canonical_type
|
|
116
|
+
""" # nosec B608 - where_clause contains only parameter placeholders
|
|
117
|
+
|
|
118
|
+
cursor = await conn.execute(consumption_query, params_with_consumer)
|
|
119
|
+
rows = await cursor.fetchall()
|
|
120
|
+
await cursor.close()
|
|
121
|
+
|
|
122
|
+
return {row["canonical_type"]: row["count"] for row in rows}
|
|
123
|
+
|
|
124
|
+
def _derive_produced_filter(
|
|
125
|
+
self, base_filters: FilterConfig, agent_id: str
|
|
126
|
+
) -> FilterConfig:
|
|
127
|
+
"""
|
|
128
|
+
Derive a filter configuration specific to agent's production.
|
|
129
|
+
|
|
130
|
+
Creates a new FilterConfig with agent_id as producer while
|
|
131
|
+
preserving other filter criteria.
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
base_filters: Base filter configuration
|
|
135
|
+
agent_id: Agent to filter production for
|
|
136
|
+
|
|
137
|
+
Returns:
|
|
138
|
+
New FilterConfig with agent_id as producer
|
|
139
|
+
"""
|
|
140
|
+
from flock.core.store import FilterConfig
|
|
141
|
+
|
|
142
|
+
return FilterConfig(
|
|
143
|
+
type_names=set(base_filters.type_names)
|
|
144
|
+
if base_filters.type_names
|
|
145
|
+
else None,
|
|
146
|
+
produced_by={agent_id},
|
|
147
|
+
correlation_id=base_filters.correlation_id,
|
|
148
|
+
tags=set(base_filters.tags) if base_filters.tags else None,
|
|
149
|
+
visibility=set(base_filters.visibility)
|
|
150
|
+
if base_filters.visibility
|
|
151
|
+
else None,
|
|
152
|
+
start=base_filters.start,
|
|
153
|
+
end=base_filters.end,
|
|
154
|
+
)
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
"""SQLite consumption record loading utilities.
|
|
2
|
+
|
|
3
|
+
Handles loading and organizing consumption records for artifacts.
|
|
4
|
+
Extracted from query_artifacts to reduce complexity.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
from collections import defaultdict
|
|
10
|
+
from datetime import datetime
|
|
11
|
+
from typing import TYPE_CHECKING
|
|
12
|
+
from uuid import UUID
|
|
13
|
+
|
|
14
|
+
from flock.core.store import ConsumptionRecord
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
if TYPE_CHECKING:
|
|
18
|
+
import aiosqlite
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class SQLiteConsumptionLoader:
|
|
22
|
+
"""
|
|
23
|
+
Loads consumption records from SQLite database.
|
|
24
|
+
|
|
25
|
+
Separates consumption loading logic from main query method
|
|
26
|
+
for better testability and maintainability.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
async def load_for_artifacts(
|
|
30
|
+
self,
|
|
31
|
+
conn: aiosqlite.Connection,
|
|
32
|
+
artifact_ids: list[str],
|
|
33
|
+
) -> dict[UUID, list[ConsumptionRecord]]:
|
|
34
|
+
"""
|
|
35
|
+
Load consumption records for given artifact IDs.
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
conn: Active database connection
|
|
39
|
+
artifact_ids: List of artifact ID strings
|
|
40
|
+
|
|
41
|
+
Returns:
|
|
42
|
+
Dict mapping artifact UUIDs to their consumption records
|
|
43
|
+
|
|
44
|
+
Example:
|
|
45
|
+
>>> loader = SQLiteConsumptionLoader()
|
|
46
|
+
>>> consumptions = await loader.load_for_artifacts(conn, ["id1", "id2"])
|
|
47
|
+
>>> consumptions[UUID("id1")] # List[ConsumptionRecord]
|
|
48
|
+
"""
|
|
49
|
+
if not artifact_ids:
|
|
50
|
+
return {}
|
|
51
|
+
|
|
52
|
+
# Build query with proper placeholders
|
|
53
|
+
placeholders = ", ".join("?" for _ in artifact_ids)
|
|
54
|
+
consumption_query = f"""
|
|
55
|
+
SELECT
|
|
56
|
+
artifact_id,
|
|
57
|
+
consumer,
|
|
58
|
+
run_id,
|
|
59
|
+
correlation_id,
|
|
60
|
+
consumed_at
|
|
61
|
+
FROM artifact_consumptions
|
|
62
|
+
WHERE artifact_id IN ({placeholders})
|
|
63
|
+
ORDER BY consumed_at ASC
|
|
64
|
+
""" # nosec B608 - placeholders string contains only '?' characters
|
|
65
|
+
|
|
66
|
+
# Execute query
|
|
67
|
+
cursor = await conn.execute(consumption_query, artifact_ids)
|
|
68
|
+
consumption_rows = await cursor.fetchall()
|
|
69
|
+
await cursor.close()
|
|
70
|
+
|
|
71
|
+
# Build consumption map
|
|
72
|
+
return self._build_consumption_map(consumption_rows)
|
|
73
|
+
|
|
74
|
+
def _build_consumption_map(
|
|
75
|
+
self, rows: list[aiosqlite.Row]
|
|
76
|
+
) -> dict[UUID, list[ConsumptionRecord]]:
|
|
77
|
+
"""
|
|
78
|
+
Build consumption map from database rows.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
rows: Database rows with consumption data
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
Dict mapping artifact UUIDs to consumption records
|
|
85
|
+
"""
|
|
86
|
+
consumptions_map: dict[UUID, list[ConsumptionRecord]] = defaultdict(list)
|
|
87
|
+
|
|
88
|
+
for row in rows:
|
|
89
|
+
artifact_uuid = UUID(row["artifact_id"])
|
|
90
|
+
consumptions_map[artifact_uuid].append(
|
|
91
|
+
ConsumptionRecord(
|
|
92
|
+
artifact_id=artifact_uuid,
|
|
93
|
+
consumer=row["consumer"],
|
|
94
|
+
run_id=row["run_id"],
|
|
95
|
+
correlation_id=row["correlation_id"],
|
|
96
|
+
consumed_at=datetime.fromisoformat(row["consumed_at"]),
|
|
97
|
+
)
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
return consumptions_map
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
"""SQLite query building utilities for artifact filtering.
|
|
2
|
+
|
|
3
|
+
This module constructs safe, parameterized SQL queries from filter configurations.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
from typing import TYPE_CHECKING, Any
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from flock.core.store import FilterConfig
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class SQLiteQueryBuilder:
|
|
16
|
+
"""
|
|
17
|
+
Builds safe SQL queries with proper parameter binding.
|
|
18
|
+
|
|
19
|
+
Responsibilities:
|
|
20
|
+
- Build SELECT queries from filter configurations
|
|
21
|
+
- Construct WHERE clauses with parameter placeholders
|
|
22
|
+
- Prevent SQL injection via proper parameter binding
|
|
23
|
+
- Support complex filtering (types, producers, tags, visibility, dates)
|
|
24
|
+
|
|
25
|
+
All queries use parameter placeholders (?) and return both the SQL string
|
|
26
|
+
and parameter list to ensure safe execution.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
def build_filters(
|
|
30
|
+
self,
|
|
31
|
+
filters: FilterConfig,
|
|
32
|
+
*,
|
|
33
|
+
table_alias: str | None = None,
|
|
34
|
+
) -> tuple[str, list[Any]]:
|
|
35
|
+
"""
|
|
36
|
+
Build WHERE clause and parameters from filter configuration.
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
filters: Filter configuration specifying query constraints
|
|
40
|
+
table_alias: Optional table alias prefix (e.g., "a" for "a.type")
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
Tuple of (where_clause, parameters):
|
|
44
|
+
- where_clause: SQL WHERE clause string (e.g., " WHERE type = ?")
|
|
45
|
+
- parameters: List of values for parameter binding
|
|
46
|
+
|
|
47
|
+
Example:
|
|
48
|
+
>>> filters = FilterConfig(type_names={"BugReport"}, limit=10)
|
|
49
|
+
>>> where, params = builder.build_filters(filters)
|
|
50
|
+
>>> # where = " WHERE canonical_type IN (?)"
|
|
51
|
+
>>> # params = ["flock.BugReport"]
|
|
52
|
+
|
|
53
|
+
Security:
|
|
54
|
+
All values are bound via parameters, preventing SQL injection.
|
|
55
|
+
The WHERE clause contains only placeholders (?), never raw values.
|
|
56
|
+
"""
|
|
57
|
+
# Import here to avoid circular dependency
|
|
58
|
+
from flock.registry import type_registry
|
|
59
|
+
|
|
60
|
+
prefix = f"{table_alias}." if table_alias else ""
|
|
61
|
+
conditions: list[str] = []
|
|
62
|
+
params: list[Any] = []
|
|
63
|
+
|
|
64
|
+
# Type filter
|
|
65
|
+
if filters.type_names:
|
|
66
|
+
canonical = {
|
|
67
|
+
type_registry.resolve_name(name) for name in filters.type_names
|
|
68
|
+
}
|
|
69
|
+
placeholders = ", ".join("?" for _ in canonical)
|
|
70
|
+
conditions.append(f"{prefix}canonical_type IN ({placeholders})")
|
|
71
|
+
params.extend(sorted(canonical))
|
|
72
|
+
|
|
73
|
+
# Producer filter
|
|
74
|
+
if filters.produced_by:
|
|
75
|
+
placeholders = ", ".join("?" for _ in filters.produced_by)
|
|
76
|
+
conditions.append(f"{prefix}produced_by IN ({placeholders})")
|
|
77
|
+
params.extend(sorted(filters.produced_by))
|
|
78
|
+
|
|
79
|
+
# Correlation ID filter
|
|
80
|
+
if filters.correlation_id:
|
|
81
|
+
conditions.append(f"{prefix}correlation_id = ?")
|
|
82
|
+
params.append(filters.correlation_id)
|
|
83
|
+
|
|
84
|
+
# Visibility filter
|
|
85
|
+
if filters.visibility:
|
|
86
|
+
placeholders = ", ".join("?" for _ in filters.visibility)
|
|
87
|
+
conditions.append(
|
|
88
|
+
f"json_extract({prefix}visibility, '$.kind') IN ({placeholders})"
|
|
89
|
+
)
|
|
90
|
+
params.extend(sorted(filters.visibility))
|
|
91
|
+
|
|
92
|
+
# Date range filters
|
|
93
|
+
if filters.start is not None:
|
|
94
|
+
conditions.append(f"{prefix}created_at >= ?")
|
|
95
|
+
params.append(filters.start.isoformat())
|
|
96
|
+
|
|
97
|
+
if filters.end is not None:
|
|
98
|
+
conditions.append(f"{prefix}created_at <= ?")
|
|
99
|
+
params.append(filters.end.isoformat())
|
|
100
|
+
|
|
101
|
+
# Tag filter (JSON array contains check)
|
|
102
|
+
if filters.tags:
|
|
103
|
+
column = f"{prefix}tags" if table_alias else "artifacts.tags"
|
|
104
|
+
for tag in sorted(filters.tags):
|
|
105
|
+
conditions.append(
|
|
106
|
+
f"EXISTS (SELECT 1 FROM json_each({column}) WHERE json_each.value = ?)" # nosec B608 - column is internal constant
|
|
107
|
+
)
|
|
108
|
+
params.append(tag)
|
|
109
|
+
|
|
110
|
+
# Build final WHERE clause
|
|
111
|
+
where_clause = f" WHERE {' AND '.join(conditions)}" if conditions else ""
|
|
112
|
+
return where_clause, params
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
"""Query parameter building utilities for SQLite storage.
|
|
2
|
+
|
|
3
|
+
Handles pagination parameter construction for SQLite queries.
|
|
4
|
+
Extracted from store.py to reduce complexity from B (10) to A (4).
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class QueryParamsBuilder:
|
|
13
|
+
"""
|
|
14
|
+
Build query parameters for SQLite pagination.
|
|
15
|
+
|
|
16
|
+
Simplifies limit/offset parameter handling by providing focused
|
|
17
|
+
methods for different pagination scenarios.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
def build_pagination_params(
|
|
21
|
+
self,
|
|
22
|
+
base_params: list[Any],
|
|
23
|
+
limit: int,
|
|
24
|
+
offset: int,
|
|
25
|
+
) -> tuple[str, tuple[Any, ...]]:
|
|
26
|
+
"""
|
|
27
|
+
Build LIMIT/OFFSET clause and parameters for pagination.
|
|
28
|
+
|
|
29
|
+
Handles three scenarios:
|
|
30
|
+
1. No limit, no offset: Return all results
|
|
31
|
+
2. No limit, with offset: Skip first N results
|
|
32
|
+
3. With limit: Standard pagination
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
base_params: Base query parameters (from WHERE clause)
|
|
36
|
+
limit: Maximum number of results (0 = unlimited)
|
|
37
|
+
offset: Number of results to skip
|
|
38
|
+
|
|
39
|
+
Returns:
|
|
40
|
+
Tuple of (SQL clause suffix, complete parameters tuple)
|
|
41
|
+
|
|
42
|
+
Examples:
|
|
43
|
+
>>> builder = QueryParamsBuilder()
|
|
44
|
+
>>> clause, params = builder.build_pagination_params([], 10, 0)
|
|
45
|
+
>>> clause
|
|
46
|
+
' LIMIT ? OFFSET ?'
|
|
47
|
+
>>> params
|
|
48
|
+
(10, 0)
|
|
49
|
+
"""
|
|
50
|
+
normalized_offset = max(offset, 0)
|
|
51
|
+
|
|
52
|
+
if limit <= 0:
|
|
53
|
+
return self._build_unlimited_params(base_params, normalized_offset)
|
|
54
|
+
|
|
55
|
+
return self._build_limited_params(base_params, limit, normalized_offset)
|
|
56
|
+
|
|
57
|
+
def _build_unlimited_params(
|
|
58
|
+
self, base_params: list[Any], offset: int
|
|
59
|
+
) -> tuple[str, tuple[Any, ...]]:
|
|
60
|
+
"""
|
|
61
|
+
Build parameters for unlimited results with optional offset.
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
base_params: Base query parameters
|
|
65
|
+
offset: Number of results to skip (0 = no offset)
|
|
66
|
+
|
|
67
|
+
Returns:
|
|
68
|
+
Tuple of (SQL clause, parameters)
|
|
69
|
+
"""
|
|
70
|
+
if offset > 0:
|
|
71
|
+
# Need to skip first N results but return rest
|
|
72
|
+
return " LIMIT -1 OFFSET ?", (*base_params, offset)
|
|
73
|
+
|
|
74
|
+
# Return all results, no pagination
|
|
75
|
+
return "", tuple(base_params)
|
|
76
|
+
|
|
77
|
+
def _build_limited_params(
|
|
78
|
+
self, base_params: list[Any], limit: int, offset: int
|
|
79
|
+
) -> tuple[str, tuple[Any, ...]]:
|
|
80
|
+
"""
|
|
81
|
+
Build parameters for standard pagination with limit and offset.
|
|
82
|
+
|
|
83
|
+
Args:
|
|
84
|
+
base_params: Base query parameters
|
|
85
|
+
limit: Maximum number of results
|
|
86
|
+
offset: Number of results to skip
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
Tuple of (SQL clause, parameters)
|
|
90
|
+
"""
|
|
91
|
+
return " LIMIT ? OFFSET ?", (*base_params, limit, offset)
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
"""SQLite schema management for Flock blackboard store.
|
|
2
|
+
|
|
3
|
+
This module handles database schema creation, versioning, and migrations.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
from typing import TYPE_CHECKING
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
import aiosqlite
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class SQLiteSchemaManager:
|
|
16
|
+
"""
|
|
17
|
+
Manages SQLite database schema for blackboard storage.
|
|
18
|
+
|
|
19
|
+
Responsibilities:
|
|
20
|
+
- Schema version tracking
|
|
21
|
+
- Table creation
|
|
22
|
+
- Index creation
|
|
23
|
+
- Schema migrations
|
|
24
|
+
|
|
25
|
+
The schema includes:
|
|
26
|
+
- artifacts table: Core artifact storage
|
|
27
|
+
- artifact_consumptions table: Consumption tracking
|
|
28
|
+
- agent_snapshots table: Agent metadata
|
|
29
|
+
- schema_meta table: Version tracking
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
SCHEMA_VERSION = 3
|
|
33
|
+
|
|
34
|
+
async def apply_schema(self, conn: aiosqlite.Connection) -> None:
|
|
35
|
+
"""
|
|
36
|
+
Apply the blackboard schema to the SQLite connection.
|
|
37
|
+
|
|
38
|
+
Creates all tables and indices if they don't exist. Handles schema
|
|
39
|
+
versioning and migrations.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
conn: Active SQLite connection
|
|
43
|
+
|
|
44
|
+
Schema Tables:
|
|
45
|
+
- schema_meta: Tracks schema version
|
|
46
|
+
- artifacts: Core artifact storage
|
|
47
|
+
- artifact_consumptions: Consumption events
|
|
48
|
+
- agent_snapshots: Agent metadata
|
|
49
|
+
"""
|
|
50
|
+
# Schema version tracking
|
|
51
|
+
await conn.execute(
|
|
52
|
+
"""
|
|
53
|
+
CREATE TABLE IF NOT EXISTS schema_meta (
|
|
54
|
+
id INTEGER PRIMARY KEY CHECK (id = 1),
|
|
55
|
+
version INTEGER NOT NULL,
|
|
56
|
+
applied_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
|
|
57
|
+
)
|
|
58
|
+
"""
|
|
59
|
+
)
|
|
60
|
+
await conn.execute(
|
|
61
|
+
"""
|
|
62
|
+
INSERT OR IGNORE INTO schema_meta (id, version)
|
|
63
|
+
VALUES (1, ?)
|
|
64
|
+
""",
|
|
65
|
+
(self.SCHEMA_VERSION,),
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
# Main artifacts table
|
|
69
|
+
await conn.execute(
|
|
70
|
+
"""
|
|
71
|
+
CREATE TABLE IF NOT EXISTS artifacts (
|
|
72
|
+
artifact_id TEXT PRIMARY KEY,
|
|
73
|
+
type TEXT NOT NULL,
|
|
74
|
+
canonical_type TEXT NOT NULL,
|
|
75
|
+
produced_by TEXT NOT NULL,
|
|
76
|
+
payload TEXT NOT NULL,
|
|
77
|
+
version INTEGER NOT NULL,
|
|
78
|
+
visibility TEXT NOT NULL,
|
|
79
|
+
tags TEXT NOT NULL,
|
|
80
|
+
correlation_id TEXT,
|
|
81
|
+
partition_key TEXT,
|
|
82
|
+
created_at TEXT NOT NULL
|
|
83
|
+
)
|
|
84
|
+
"""
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
# Artifact indices for performance
|
|
88
|
+
await conn.execute(
|
|
89
|
+
"""
|
|
90
|
+
CREATE INDEX IF NOT EXISTS idx_artifacts_canonical_type_created
|
|
91
|
+
ON artifacts(canonical_type, created_at)
|
|
92
|
+
"""
|
|
93
|
+
)
|
|
94
|
+
await conn.execute(
|
|
95
|
+
"""
|
|
96
|
+
CREATE INDEX IF NOT EXISTS idx_artifacts_produced_by_created
|
|
97
|
+
ON artifacts(produced_by, created_at)
|
|
98
|
+
"""
|
|
99
|
+
)
|
|
100
|
+
await conn.execute(
|
|
101
|
+
"""
|
|
102
|
+
CREATE INDEX IF NOT EXISTS idx_artifacts_correlation
|
|
103
|
+
ON artifacts(correlation_id)
|
|
104
|
+
"""
|
|
105
|
+
)
|
|
106
|
+
await conn.execute(
|
|
107
|
+
"""
|
|
108
|
+
CREATE INDEX IF NOT EXISTS idx_artifacts_partition
|
|
109
|
+
ON artifacts(partition_key)
|
|
110
|
+
"""
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
# Consumption tracking table
|
|
114
|
+
await conn.execute(
|
|
115
|
+
"""
|
|
116
|
+
CREATE TABLE IF NOT EXISTS artifact_consumptions (
|
|
117
|
+
artifact_id TEXT NOT NULL,
|
|
118
|
+
consumer TEXT NOT NULL,
|
|
119
|
+
run_id TEXT,
|
|
120
|
+
correlation_id TEXT,
|
|
121
|
+
consumed_at TEXT NOT NULL,
|
|
122
|
+
PRIMARY KEY (artifact_id, consumer, consumed_at)
|
|
123
|
+
)
|
|
124
|
+
"""
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
# Consumption indices
|
|
128
|
+
await conn.execute(
|
|
129
|
+
"""
|
|
130
|
+
CREATE INDEX IF NOT EXISTS idx_consumptions_artifact
|
|
131
|
+
ON artifact_consumptions(artifact_id)
|
|
132
|
+
"""
|
|
133
|
+
)
|
|
134
|
+
await conn.execute(
|
|
135
|
+
"""
|
|
136
|
+
CREATE INDEX IF NOT EXISTS idx_consumptions_consumer
|
|
137
|
+
ON artifact_consumptions(consumer)
|
|
138
|
+
"""
|
|
139
|
+
)
|
|
140
|
+
await conn.execute(
|
|
141
|
+
"""
|
|
142
|
+
CREATE INDEX IF NOT EXISTS idx_consumptions_correlation
|
|
143
|
+
ON artifact_consumptions(correlation_id)
|
|
144
|
+
"""
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
# Agent snapshots table
|
|
148
|
+
await conn.execute(
|
|
149
|
+
"""
|
|
150
|
+
CREATE TABLE IF NOT EXISTS agent_snapshots (
|
|
151
|
+
agent_name TEXT PRIMARY KEY,
|
|
152
|
+
description TEXT NOT NULL,
|
|
153
|
+
subscriptions TEXT NOT NULL,
|
|
154
|
+
output_types TEXT NOT NULL,
|
|
155
|
+
labels TEXT NOT NULL,
|
|
156
|
+
first_seen TEXT NOT NULL,
|
|
157
|
+
last_seen TEXT NOT NULL,
|
|
158
|
+
signature TEXT NOT NULL
|
|
159
|
+
)
|
|
160
|
+
"""
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
# Update schema version
|
|
164
|
+
await conn.execute(
|
|
165
|
+
"UPDATE schema_meta SET version=? WHERE id=1",
|
|
166
|
+
(self.SCHEMA_VERSION,),
|
|
167
|
+
)
|
|
168
|
+
await conn.commit()
|