flock-core 0.5.10__py3-none-any.whl → 0.5.20__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flock-core might be problematic. Click here for more details.

Files changed (91) hide show
  1. flock/__init__.py +1 -1
  2. flock/agent/__init__.py +30 -0
  3. flock/agent/builder_helpers.py +192 -0
  4. flock/agent/builder_validator.py +169 -0
  5. flock/agent/component_lifecycle.py +325 -0
  6. flock/agent/context_resolver.py +141 -0
  7. flock/agent/mcp_integration.py +212 -0
  8. flock/agent/output_processor.py +304 -0
  9. flock/api/__init__.py +20 -0
  10. flock/api/models.py +283 -0
  11. flock/{service.py → api/service.py} +121 -63
  12. flock/cli.py +2 -2
  13. flock/components/__init__.py +41 -0
  14. flock/components/agent/__init__.py +22 -0
  15. flock/{components.py → components/agent/base.py} +4 -3
  16. flock/{utility/output_utility_component.py → components/agent/output_utility.py} +12 -7
  17. flock/components/orchestrator/__init__.py +22 -0
  18. flock/{orchestrator_component.py → components/orchestrator/base.py} +5 -293
  19. flock/components/orchestrator/circuit_breaker.py +95 -0
  20. flock/components/orchestrator/collection.py +143 -0
  21. flock/components/orchestrator/deduplication.py +78 -0
  22. flock/core/__init__.py +30 -0
  23. flock/core/agent.py +953 -0
  24. flock/{artifacts.py → core/artifacts.py} +1 -1
  25. flock/{context_provider.py → core/context_provider.py} +3 -3
  26. flock/core/orchestrator.py +1102 -0
  27. flock/{store.py → core/store.py} +99 -454
  28. flock/{subscription.py → core/subscription.py} +1 -1
  29. flock/dashboard/collector.py +5 -5
  30. flock/dashboard/graph_builder.py +7 -7
  31. flock/dashboard/routes/__init__.py +21 -0
  32. flock/dashboard/routes/control.py +327 -0
  33. flock/dashboard/routes/helpers.py +340 -0
  34. flock/dashboard/routes/themes.py +76 -0
  35. flock/dashboard/routes/traces.py +521 -0
  36. flock/dashboard/routes/websocket.py +108 -0
  37. flock/dashboard/service.py +44 -1294
  38. flock/engines/dspy/__init__.py +20 -0
  39. flock/engines/dspy/artifact_materializer.py +216 -0
  40. flock/engines/dspy/signature_builder.py +474 -0
  41. flock/engines/dspy/streaming_executor.py +858 -0
  42. flock/engines/dspy_engine.py +45 -1330
  43. flock/engines/examples/simple_batch_engine.py +2 -2
  44. flock/examples.py +7 -7
  45. flock/logging/logging.py +1 -16
  46. flock/models/__init__.py +10 -0
  47. flock/models/system_artifacts.py +33 -0
  48. flock/orchestrator/__init__.py +45 -0
  49. flock/{artifact_collector.py → orchestrator/artifact_collector.py} +3 -3
  50. flock/orchestrator/artifact_manager.py +168 -0
  51. flock/{batch_accumulator.py → orchestrator/batch_accumulator.py} +2 -2
  52. flock/orchestrator/component_runner.py +389 -0
  53. flock/orchestrator/context_builder.py +167 -0
  54. flock/{correlation_engine.py → orchestrator/correlation_engine.py} +2 -2
  55. flock/orchestrator/event_emitter.py +167 -0
  56. flock/orchestrator/initialization.py +184 -0
  57. flock/orchestrator/lifecycle_manager.py +226 -0
  58. flock/orchestrator/mcp_manager.py +202 -0
  59. flock/orchestrator/scheduler.py +189 -0
  60. flock/orchestrator/server_manager.py +234 -0
  61. flock/orchestrator/tracing.py +147 -0
  62. flock/storage/__init__.py +10 -0
  63. flock/storage/artifact_aggregator.py +158 -0
  64. flock/storage/in_memory/__init__.py +6 -0
  65. flock/storage/in_memory/artifact_filter.py +114 -0
  66. flock/storage/in_memory/history_aggregator.py +115 -0
  67. flock/storage/sqlite/__init__.py +10 -0
  68. flock/storage/sqlite/agent_history_queries.py +154 -0
  69. flock/storage/sqlite/consumption_loader.py +100 -0
  70. flock/storage/sqlite/query_builder.py +112 -0
  71. flock/storage/sqlite/query_params_builder.py +91 -0
  72. flock/storage/sqlite/schema_manager.py +168 -0
  73. flock/storage/sqlite/summary_queries.py +194 -0
  74. flock/utils/__init__.py +14 -0
  75. flock/utils/async_utils.py +67 -0
  76. flock/{runtime.py → utils/runtime.py} +3 -3
  77. flock/utils/time_utils.py +53 -0
  78. flock/utils/type_resolution.py +38 -0
  79. flock/{utilities.py → utils/utilities.py} +2 -2
  80. flock/utils/validation.py +57 -0
  81. flock/utils/visibility.py +79 -0
  82. flock/utils/visibility_utils.py +134 -0
  83. {flock_core-0.5.10.dist-info → flock_core-0.5.20.dist-info}/METADATA +69 -61
  84. {flock_core-0.5.10.dist-info → flock_core-0.5.20.dist-info}/RECORD +89 -31
  85. flock/agent.py +0 -1578
  86. flock/orchestrator.py +0 -1746
  87. /flock/{visibility.py → core/visibility.py} +0 -0
  88. /flock/{helper → utils}/cli_helper.py +0 -0
  89. {flock_core-0.5.10.dist-info → flock_core-0.5.20.dist-info}/WHEEL +0 -0
  90. {flock_core-0.5.10.dist-info → flock_core-0.5.20.dist-info}/entry_points.txt +0 -0
  91. {flock_core-0.5.10.dist-info → flock_core-0.5.20.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,147 @@
1
+ """Unified tracing utilities for orchestrator workflows.
2
+
3
+ Handles OpenTelemetry workflow spans and trace database management.
4
+ Extracted from orchestrator.py to reduce complexity.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from contextlib import asynccontextmanager
10
+ from pathlib import Path
11
+ from typing import TYPE_CHECKING, Any
12
+
13
+ from opentelemetry import trace
14
+ from opentelemetry.trace import Status, StatusCode
15
+
16
+
17
+ if TYPE_CHECKING:
18
+ from collections.abc import AsyncGenerator
19
+
20
+
21
+ class TracingManager:
22
+ """Manages unified tracing for orchestrator workflows.
23
+
24
+ Provides workflow span creation and trace database cleanup utilities.
25
+ """
26
+
27
+ def __init__(self) -> None:
28
+ """Initialize tracing manager."""
29
+ self._workflow_span = None
30
+
31
+ @asynccontextmanager
32
+ async def traced_run(
33
+ self, name: str = "workflow", flock_id: str | None = None
34
+ ) -> AsyncGenerator[Any, None]:
35
+ """Context manager for wrapping an entire execution in a single unified trace.
36
+
37
+ This creates a parent span that encompasses all operations (publish, run_until_idle, etc.)
38
+ within the context, ensuring they all belong to the same trace_id for better observability.
39
+
40
+ Args:
41
+ name: Name for the workflow trace (default: "workflow")
42
+ flock_id: Optional Flock instance ID for attribution
43
+
44
+ Yields:
45
+ The workflow span for optional manual attribute setting
46
+
47
+ Examples:
48
+ # Explicit workflow tracing (recommended)
49
+ async with tracing_manager.traced_run("pizza_workflow"):
50
+ await flock.publish(pizza_idea)
51
+ await flock.run_until_idle()
52
+ # All operations now share the same trace_id!
53
+
54
+ # Custom attributes
55
+ async with tracing_manager.traced_run("data_pipeline") as span:
56
+ span.set_attribute("pipeline.version", "2.0")
57
+ await flock.publish(data)
58
+ await flock.run_until_idle()
59
+ """
60
+ tracer = trace.get_tracer(__name__)
61
+ with tracer.start_as_current_span(name) as span:
62
+ # Set workflow-level attributes
63
+ span.set_attribute("flock.workflow", True)
64
+ span.set_attribute("workflow.name", name)
65
+ if flock_id:
66
+ span.set_attribute("workflow.flock_id", flock_id)
67
+
68
+ # Store span for nested operations to use
69
+ prev_workflow_span = self._workflow_span
70
+ self._workflow_span = span
71
+
72
+ try:
73
+ yield span
74
+ span.set_status(Status(StatusCode.OK))
75
+ except Exception as e:
76
+ span.set_status(Status(StatusCode.ERROR, str(e)))
77
+ span.record_exception(e)
78
+ raise
79
+ finally:
80
+ # Restore previous workflow span
81
+ self._workflow_span = prev_workflow_span
82
+
83
+ @property
84
+ def current_workflow_span(self) -> Any:
85
+ """Get the current workflow span (for nested operations)."""
86
+ return self._workflow_span
87
+
88
+ @staticmethod
89
+ def clear_traces(db_path: str = ".flock/traces.duckdb") -> dict[str, Any]:
90
+ """Clear all traces from the DuckDB database.
91
+
92
+ Useful for resetting debug sessions or cleaning up test data.
93
+
94
+ Args:
95
+ db_path: Path to the DuckDB database file (default: ".flock/traces.duckdb")
96
+
97
+ Returns:
98
+ Dictionary with operation results:
99
+ - deleted_count: Number of spans deleted
100
+ - success: Whether operation succeeded
101
+ - error: Error message if failed
102
+
103
+ Examples:
104
+ # Clear all traces
105
+ result = TracingManager.clear_traces()
106
+ print(f"Deleted {result['deleted_count']} spans")
107
+
108
+ # Custom database path
109
+ result = TracingManager.clear_traces(".flock/custom_traces.duckdb")
110
+
111
+ # Check if operation succeeded
112
+ if result['success']:
113
+ print("Traces cleared successfully!")
114
+ else:
115
+ print(f"Error: {result['error']}")
116
+ """
117
+ try:
118
+ import duckdb
119
+
120
+ db_file = Path(db_path)
121
+ if not db_file.exists():
122
+ return {
123
+ "success": False,
124
+ "deleted_count": 0,
125
+ "error": f"Database file not found: {db_path}",
126
+ }
127
+
128
+ # Connect and clear
129
+ conn = duckdb.connect(str(db_file))
130
+ try:
131
+ # Get count before deletion
132
+ count_result = conn.execute("SELECT COUNT(*) FROM spans").fetchone()
133
+ deleted_count = count_result[0] if count_result else 0
134
+
135
+ # Delete all spans
136
+ conn.execute("DELETE FROM spans")
137
+
138
+ # Vacuum to reclaim space
139
+ conn.execute("VACUUM")
140
+
141
+ return {"success": True, "deleted_count": deleted_count, "error": None}
142
+
143
+ finally:
144
+ conn.close()
145
+
146
+ except Exception as e:
147
+ return {"success": False, "deleted_count": 0, "error": str(e)}
@@ -0,0 +1,10 @@
1
+ """Storage backends for Flock blackboard."""
2
+
3
+ from flock.storage.sqlite.query_builder import SQLiteQueryBuilder
4
+ from flock.storage.sqlite.schema_manager import SQLiteSchemaManager
5
+
6
+
7
+ __all__ = [
8
+ "SQLiteQueryBuilder",
9
+ "SQLiteSchemaManager",
10
+ ]
@@ -0,0 +1,158 @@
1
+ """Artifact aggregation utilities for summary statistics.
2
+
3
+ Handles aggregation logic for artifact collections, computing statistics
4
+ like type distribution, producer counts, and time ranges.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from datetime import datetime
10
+ from typing import Any
11
+
12
+ from flock.core.artifacts import Artifact
13
+ from flock.utils.time_utils import format_time_span
14
+
15
+
16
+ class ArtifactAggregator:
17
+ """
18
+ Aggregates artifact statistics for summary reports.
19
+
20
+ Provides clean separation of aggregation logic from storage implementations.
21
+ Each aggregation method is simple and focused.
22
+ """
23
+
24
+ def aggregate_by_type(self, artifacts: list[Artifact]) -> dict[str, int]:
25
+ """
26
+ Count artifacts by type.
27
+
28
+ Args:
29
+ artifacts: List of artifacts to aggregate
30
+
31
+ Returns:
32
+ Dict mapping type names to counts
33
+ """
34
+ by_type: dict[str, int] = {}
35
+ for artifact in artifacts:
36
+ by_type[artifact.type] = by_type.get(artifact.type, 0) + 1
37
+ return by_type
38
+
39
+ def aggregate_by_producer(self, artifacts: list[Artifact]) -> dict[str, int]:
40
+ """
41
+ Count artifacts by producer.
42
+
43
+ Args:
44
+ artifacts: List of artifacts to aggregate
45
+
46
+ Returns:
47
+ Dict mapping producer names to counts
48
+ """
49
+ by_producer: dict[str, int] = {}
50
+ for artifact in artifacts:
51
+ by_producer[artifact.produced_by] = (
52
+ by_producer.get(artifact.produced_by, 0) + 1
53
+ )
54
+ return by_producer
55
+
56
+ def aggregate_by_visibility(self, artifacts: list[Artifact]) -> dict[str, int]:
57
+ """
58
+ Count artifacts by visibility kind.
59
+
60
+ Args:
61
+ artifacts: List of artifacts to aggregate
62
+
63
+ Returns:
64
+ Dict mapping visibility kinds to counts
65
+ """
66
+ by_visibility: dict[str, int] = {}
67
+ for artifact in artifacts:
68
+ kind = getattr(artifact.visibility, "kind", "Unknown")
69
+ by_visibility[kind] = by_visibility.get(kind, 0) + 1
70
+ return by_visibility
71
+
72
+ def aggregate_tags(self, artifacts: list[Artifact]) -> dict[str, int]:
73
+ """
74
+ Count tag occurrences across artifacts.
75
+
76
+ Args:
77
+ artifacts: List of artifacts to aggregate
78
+
79
+ Returns:
80
+ Dict mapping tag names to occurrence counts
81
+ """
82
+ tag_counts: dict[str, int] = {}
83
+ for artifact in artifacts:
84
+ for tag in artifact.tags:
85
+ tag_counts[tag] = tag_counts.get(tag, 0) + 1
86
+ return tag_counts
87
+
88
+ def get_date_range(
89
+ self, artifacts: list[Artifact]
90
+ ) -> tuple[datetime | None, datetime | None]:
91
+ """
92
+ Find earliest and latest creation times.
93
+
94
+ Args:
95
+ artifacts: List of artifacts to analyze
96
+
97
+ Returns:
98
+ Tuple of (earliest, latest) datetimes, or (None, None) if empty
99
+ """
100
+ if not artifacts:
101
+ return None, None
102
+
103
+ earliest: datetime | None = None
104
+ latest: datetime | None = None
105
+
106
+ for artifact in artifacts:
107
+ if earliest is None or artifact.created_at < earliest:
108
+ earliest = artifact.created_at
109
+ if latest is None or artifact.created_at > latest:
110
+ latest = artifact.created_at
111
+
112
+ return earliest, latest
113
+
114
+ def build_summary(
115
+ self,
116
+ artifacts: list[Artifact],
117
+ total: int,
118
+ is_full_window: bool,
119
+ ) -> dict[str, Any]:
120
+ """
121
+ Build complete summary statistics for artifacts.
122
+
123
+ Args:
124
+ artifacts: List of artifacts to summarize
125
+ total: Total count (may differ from len(artifacts) if paginated)
126
+ is_full_window: Whether this represents all artifacts (no filters)
127
+
128
+ Returns:
129
+ Dictionary with complete summary statistics:
130
+ - total: Total artifact count
131
+ - by_type: Type distribution
132
+ - by_producer: Producer distribution
133
+ - by_visibility: Visibility distribution
134
+ - tag_counts: Tag occurrence counts
135
+ - earliest_created_at: ISO string of earliest artifact
136
+ - latest_created_at: ISO string of latest artifact
137
+ - is_full_window: Whether all artifacts included
138
+ - window_span_label: Human-readable time span
139
+ """
140
+ by_type = self.aggregate_by_type(artifacts)
141
+ by_producer = self.aggregate_by_producer(artifacts)
142
+ by_visibility = self.aggregate_by_visibility(artifacts)
143
+ tag_counts = self.aggregate_tags(artifacts)
144
+ earliest, latest = self.get_date_range(artifacts)
145
+
146
+ window_span_label = format_time_span(earliest, latest)
147
+
148
+ return {
149
+ "total": total,
150
+ "by_type": by_type,
151
+ "by_producer": by_producer,
152
+ "by_visibility": by_visibility,
153
+ "tag_counts": tag_counts,
154
+ "earliest_created_at": earliest.isoformat() if earliest else None,
155
+ "latest_created_at": latest.isoformat() if latest else None,
156
+ "is_full_window": is_full_window,
157
+ "window_span_label": window_span_label,
158
+ }
@@ -0,0 +1,6 @@
1
+ """In-memory storage implementation utilities."""
2
+
3
+ from __future__ import annotations
4
+
5
+
6
+ __all__ = ["ArtifactFilter", "HistoryAggregator"]
@@ -0,0 +1,114 @@
1
+ """Artifact filtering utilities for in-memory storage.
2
+
3
+ Provides focused filtering logic for InMemoryBlackboardStore.query_artifacts.
4
+ Extracted from store.py to reduce complexity from B (10) to A (4).
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from typing import TYPE_CHECKING
10
+
11
+
12
+ if TYPE_CHECKING:
13
+ from flock.core.artifacts import Artifact
14
+ from flock.core.store import FilterConfig
15
+
16
+
17
+ class ArtifactFilter:
18
+ """
19
+ Filter artifacts based on FilterConfig criteria.
20
+
21
+ Separates filtering logic from query orchestration for better
22
+ testability and reduced complexity.
23
+ """
24
+
25
+ def __init__(self, filters: FilterConfig):
26
+ """
27
+ Initialize filter with configuration.
28
+
29
+ Args:
30
+ filters: Filter configuration with optional criteria
31
+ """
32
+ from flock.registry import type_registry
33
+
34
+ # Pre-resolve canonical types once
35
+ self.canonical_types: set[str] | None = None
36
+ if filters.type_names:
37
+ self.canonical_types = {
38
+ type_registry.resolve_name(name) for name in filters.type_names
39
+ }
40
+
41
+ self.produced_by = filters.produced_by or set()
42
+ self.correlation_id = filters.correlation_id
43
+ self.tags = filters.tags or set()
44
+ self.visibility_kinds = filters.visibility or set()
45
+ self.start = filters.start
46
+ self.end = filters.end
47
+
48
+ def matches(self, artifact: Artifact) -> bool:
49
+ """
50
+ Check if artifact matches all filter criteria.
51
+
52
+ Uses focused helper methods to keep complexity low (A-rated).
53
+ Each criterion is evaluated independently for clarity.
54
+
55
+ Args:
56
+ artifact: Artifact to check against filters
57
+
58
+ Returns:
59
+ True if artifact matches all criteria, False otherwise
60
+
61
+ Examples:
62
+ >>> filter = ArtifactFilter(FilterConfig(produced_by={"agent1"}))
63
+ >>> artifact = Artifact(type="Result", produced_by="agent1", ...)
64
+ >>> filter.matches(artifact)
65
+ True
66
+ """
67
+ return (
68
+ self._matches_type(artifact)
69
+ and self._matches_producer(artifact)
70
+ and self._matches_correlation(artifact)
71
+ and self._matches_tags(artifact)
72
+ and self._matches_visibility(artifact)
73
+ and self._matches_time_range(artifact)
74
+ )
75
+
76
+ def _matches_type(self, artifact: Artifact) -> bool:
77
+ """Check if artifact type matches filter."""
78
+ if not self.canonical_types:
79
+ return True
80
+ return artifact.type in self.canonical_types
81
+
82
+ def _matches_producer(self, artifact: Artifact) -> bool:
83
+ """Check if artifact producer matches filter."""
84
+ if not self.produced_by:
85
+ return True
86
+ return artifact.produced_by in self.produced_by
87
+
88
+ def _matches_correlation(self, artifact: Artifact) -> bool:
89
+ """Check if artifact correlation ID matches filter."""
90
+ if not self.correlation_id:
91
+ return True
92
+ if artifact.correlation_id is None:
93
+ return False
94
+ return str(artifact.correlation_id) == self.correlation_id
95
+
96
+ def _matches_tags(self, artifact: Artifact) -> bool:
97
+ """Check if artifact has all required tags."""
98
+ if not self.tags:
99
+ return True
100
+ return self.tags.issubset(artifact.tags)
101
+
102
+ def _matches_visibility(self, artifact: Artifact) -> bool:
103
+ """Check if artifact visibility kind matches filter."""
104
+ if not self.visibility_kinds:
105
+ return True
106
+ return artifact.visibility.kind in self.visibility_kinds
107
+
108
+ def _matches_time_range(self, artifact: Artifact) -> bool:
109
+ """Check if artifact creation time is within range."""
110
+ if self.start and artifact.created_at < self.start:
111
+ return False
112
+ if self.end and artifact.created_at > self.end:
113
+ return False
114
+ return True
@@ -0,0 +1,115 @@
1
+ """Agent history aggregation for in-memory storage.
2
+
3
+ Handles aggregation of produced/consumed artifacts for agent history summaries.
4
+ Extracted from store.py to reduce complexity from B (7) to A (4).
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from collections import defaultdict
10
+ from typing import TYPE_CHECKING, Any
11
+
12
+
13
+ if TYPE_CHECKING:
14
+ from flock.core.store import ArtifactEnvelope
15
+
16
+
17
+ class HistoryAggregator:
18
+ """
19
+ Aggregate agent history from artifact envelopes.
20
+
21
+ Provides focused aggregation methods for produced and consumed artifacts,
22
+ keeping complexity low through functional patterns.
23
+ """
24
+
25
+ def aggregate(
26
+ self, envelopes: list[ArtifactEnvelope], agent_id: str
27
+ ) -> dict[str, Any]:
28
+ """
29
+ Aggregate produced and consumed statistics for an agent.
30
+
31
+ Args:
32
+ envelopes: List of artifact envelopes with consumptions
33
+ agent_id: Agent to aggregate history for
34
+
35
+ Returns:
36
+ Dictionary with produced and consumed statistics:
37
+ {
38
+ "produced": {"total": int, "by_type": dict[str, int]},
39
+ "consumed": {"total": int, "by_type": dict[str, int]}
40
+ }
41
+
42
+ Examples:
43
+ >>> aggregator = HistoryAggregator()
44
+ >>> summary = aggregator.aggregate(envelopes, "agent1")
45
+ >>> summary["produced"]["total"]
46
+ 42
47
+ """
48
+ produced = self._aggregate_produced(envelopes, agent_id)
49
+ consumed = self._aggregate_consumed(envelopes, agent_id)
50
+
51
+ return {
52
+ "produced": {
53
+ "total": sum(produced.values()),
54
+ "by_type": dict(produced),
55
+ },
56
+ "consumed": {
57
+ "total": sum(consumed.values()),
58
+ "by_type": dict(consumed),
59
+ },
60
+ }
61
+
62
+ def _aggregate_produced(
63
+ self, envelopes: list[ArtifactEnvelope], agent_id: str
64
+ ) -> defaultdict[str, int]:
65
+ """
66
+ Count artifacts produced by agent, grouped by type.
67
+
68
+ Args:
69
+ envelopes: Artifact envelopes to analyze
70
+ agent_id: Producer to match
71
+
72
+ Returns:
73
+ Dict mapping artifact types to counts
74
+ """
75
+ from flock.core.store import ArtifactEnvelope
76
+
77
+ produced_by_type: defaultdict[str, int] = defaultdict(int)
78
+
79
+ for envelope in envelopes:
80
+ if not isinstance(envelope, ArtifactEnvelope):
81
+ raise TypeError("Expected ArtifactEnvelope instance")
82
+
83
+ artifact = envelope.artifact
84
+ if artifact.produced_by == agent_id:
85
+ produced_by_type[artifact.type] += 1
86
+
87
+ return produced_by_type
88
+
89
+ def _aggregate_consumed(
90
+ self, envelopes: list[ArtifactEnvelope], agent_id: str
91
+ ) -> defaultdict[str, int]:
92
+ """
93
+ Count artifacts consumed by agent, grouped by type.
94
+
95
+ Args:
96
+ envelopes: Artifact envelopes with consumption records
97
+ agent_id: Consumer to match
98
+
99
+ Returns:
100
+ Dict mapping artifact types to consumption counts
101
+ """
102
+ from flock.core.store import ArtifactEnvelope
103
+
104
+ consumed_by_type: defaultdict[str, int] = defaultdict(int)
105
+
106
+ for envelope in envelopes:
107
+ if not isinstance(envelope, ArtifactEnvelope):
108
+ raise TypeError("Expected ArtifactEnvelope instance")
109
+
110
+ artifact = envelope.artifact
111
+ for consumption in envelope.consumptions:
112
+ if consumption.consumer == agent_id:
113
+ consumed_by_type[artifact.type] += 1
114
+
115
+ return consumed_by_type
@@ -0,0 +1,10 @@
1
+ """SQLite storage backend components."""
2
+
3
+ from flock.storage.sqlite.query_builder import SQLiteQueryBuilder
4
+ from flock.storage.sqlite.schema_manager import SQLiteSchemaManager
5
+
6
+
7
+ __all__ = [
8
+ "SQLiteQueryBuilder",
9
+ "SQLiteSchemaManager",
10
+ ]