flock-core 0.5.11__py3-none-any.whl → 0.5.21__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flock-core might be problematic. Click here for more details.

Files changed (94) hide show
  1. flock/__init__.py +1 -1
  2. flock/agent/__init__.py +30 -0
  3. flock/agent/builder_helpers.py +192 -0
  4. flock/agent/builder_validator.py +169 -0
  5. flock/agent/component_lifecycle.py +325 -0
  6. flock/agent/context_resolver.py +141 -0
  7. flock/agent/mcp_integration.py +212 -0
  8. flock/agent/output_processor.py +304 -0
  9. flock/api/__init__.py +20 -0
  10. flock/{api_models.py → api/models.py} +0 -2
  11. flock/{service.py → api/service.py} +3 -3
  12. flock/cli.py +2 -2
  13. flock/components/__init__.py +41 -0
  14. flock/components/agent/__init__.py +22 -0
  15. flock/{components.py → components/agent/base.py} +4 -3
  16. flock/{utility/output_utility_component.py → components/agent/output_utility.py} +12 -7
  17. flock/components/orchestrator/__init__.py +22 -0
  18. flock/{orchestrator_component.py → components/orchestrator/base.py} +5 -293
  19. flock/components/orchestrator/circuit_breaker.py +95 -0
  20. flock/components/orchestrator/collection.py +143 -0
  21. flock/components/orchestrator/deduplication.py +78 -0
  22. flock/core/__init__.py +30 -0
  23. flock/core/agent.py +953 -0
  24. flock/{artifacts.py → core/artifacts.py} +1 -1
  25. flock/{context_provider.py → core/context_provider.py} +3 -3
  26. flock/core/orchestrator.py +1102 -0
  27. flock/{store.py → core/store.py} +99 -454
  28. flock/{subscription.py → core/subscription.py} +1 -1
  29. flock/dashboard/collector.py +5 -5
  30. flock/dashboard/events.py +1 -1
  31. flock/dashboard/graph_builder.py +7 -7
  32. flock/dashboard/routes/__init__.py +21 -0
  33. flock/dashboard/routes/control.py +327 -0
  34. flock/dashboard/routes/helpers.py +340 -0
  35. flock/dashboard/routes/themes.py +76 -0
  36. flock/dashboard/routes/traces.py +521 -0
  37. flock/dashboard/routes/websocket.py +108 -0
  38. flock/dashboard/service.py +43 -1316
  39. flock/engines/dspy/__init__.py +20 -0
  40. flock/engines/dspy/artifact_materializer.py +216 -0
  41. flock/engines/dspy/signature_builder.py +474 -0
  42. flock/engines/dspy/streaming_executor.py +812 -0
  43. flock/engines/dspy_engine.py +45 -1330
  44. flock/engines/examples/simple_batch_engine.py +2 -2
  45. flock/engines/streaming/__init__.py +3 -0
  46. flock/engines/streaming/sinks.py +489 -0
  47. flock/examples.py +7 -7
  48. flock/logging/logging.py +1 -16
  49. flock/models/__init__.py +10 -0
  50. flock/orchestrator/__init__.py +45 -0
  51. flock/{artifact_collector.py → orchestrator/artifact_collector.py} +3 -3
  52. flock/orchestrator/artifact_manager.py +168 -0
  53. flock/{batch_accumulator.py → orchestrator/batch_accumulator.py} +2 -2
  54. flock/orchestrator/component_runner.py +389 -0
  55. flock/orchestrator/context_builder.py +167 -0
  56. flock/{correlation_engine.py → orchestrator/correlation_engine.py} +2 -2
  57. flock/orchestrator/event_emitter.py +167 -0
  58. flock/orchestrator/initialization.py +184 -0
  59. flock/orchestrator/lifecycle_manager.py +226 -0
  60. flock/orchestrator/mcp_manager.py +202 -0
  61. flock/orchestrator/scheduler.py +189 -0
  62. flock/orchestrator/server_manager.py +234 -0
  63. flock/orchestrator/tracing.py +147 -0
  64. flock/storage/__init__.py +10 -0
  65. flock/storage/artifact_aggregator.py +158 -0
  66. flock/storage/in_memory/__init__.py +6 -0
  67. flock/storage/in_memory/artifact_filter.py +114 -0
  68. flock/storage/in_memory/history_aggregator.py +115 -0
  69. flock/storage/sqlite/__init__.py +10 -0
  70. flock/storage/sqlite/agent_history_queries.py +154 -0
  71. flock/storage/sqlite/consumption_loader.py +100 -0
  72. flock/storage/sqlite/query_builder.py +112 -0
  73. flock/storage/sqlite/query_params_builder.py +91 -0
  74. flock/storage/sqlite/schema_manager.py +168 -0
  75. flock/storage/sqlite/summary_queries.py +194 -0
  76. flock/utils/__init__.py +14 -0
  77. flock/utils/async_utils.py +67 -0
  78. flock/{runtime.py → utils/runtime.py} +3 -3
  79. flock/utils/time_utils.py +53 -0
  80. flock/utils/type_resolution.py +38 -0
  81. flock/{utilities.py → utils/utilities.py} +2 -2
  82. flock/utils/validation.py +57 -0
  83. flock/utils/visibility.py +79 -0
  84. flock/utils/visibility_utils.py +134 -0
  85. {flock_core-0.5.11.dist-info → flock_core-0.5.21.dist-info}/METADATA +19 -5
  86. {flock_core-0.5.11.dist-info → flock_core-0.5.21.dist-info}/RECORD +92 -34
  87. flock/agent.py +0 -1578
  88. flock/orchestrator.py +0 -1983
  89. /flock/{visibility.py → core/visibility.py} +0 -0
  90. /flock/{system_artifacts.py → models/system_artifacts.py} +0 -0
  91. /flock/{helper → utils}/cli_helper.py +0 -0
  92. {flock_core-0.5.11.dist-info → flock_core-0.5.21.dist-info}/WHEEL +0 -0
  93. {flock_core-0.5.11.dist-info → flock_core-0.5.21.dist-info}/entry_points.txt +0 -0
  94. {flock_core-0.5.11.dist-info → flock_core-0.5.21.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,168 @@
1
+ """SQLite schema management for Flock blackboard store.
2
+
3
+ This module handles database schema creation, versioning, and migrations.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ from typing import TYPE_CHECKING
9
+
10
+
11
+ if TYPE_CHECKING:
12
+ import aiosqlite
13
+
14
+
15
+ class SQLiteSchemaManager:
16
+ """
17
+ Manages SQLite database schema for blackboard storage.
18
+
19
+ Responsibilities:
20
+ - Schema version tracking
21
+ - Table creation
22
+ - Index creation
23
+ - Schema migrations
24
+
25
+ The schema includes:
26
+ - artifacts table: Core artifact storage
27
+ - artifact_consumptions table: Consumption tracking
28
+ - agent_snapshots table: Agent metadata
29
+ - schema_meta table: Version tracking
30
+ """
31
+
32
+ SCHEMA_VERSION = 3
33
+
34
+ async def apply_schema(self, conn: aiosqlite.Connection) -> None:
35
+ """
36
+ Apply the blackboard schema to the SQLite connection.
37
+
38
+ Creates all tables and indices if they don't exist. Handles schema
39
+ versioning and migrations.
40
+
41
+ Args:
42
+ conn: Active SQLite connection
43
+
44
+ Schema Tables:
45
+ - schema_meta: Tracks schema version
46
+ - artifacts: Core artifact storage
47
+ - artifact_consumptions: Consumption events
48
+ - agent_snapshots: Agent metadata
49
+ """
50
+ # Schema version tracking
51
+ await conn.execute(
52
+ """
53
+ CREATE TABLE IF NOT EXISTS schema_meta (
54
+ id INTEGER PRIMARY KEY CHECK (id = 1),
55
+ version INTEGER NOT NULL,
56
+ applied_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
57
+ )
58
+ """
59
+ )
60
+ await conn.execute(
61
+ """
62
+ INSERT OR IGNORE INTO schema_meta (id, version)
63
+ VALUES (1, ?)
64
+ """,
65
+ (self.SCHEMA_VERSION,),
66
+ )
67
+
68
+ # Main artifacts table
69
+ await conn.execute(
70
+ """
71
+ CREATE TABLE IF NOT EXISTS artifacts (
72
+ artifact_id TEXT PRIMARY KEY,
73
+ type TEXT NOT NULL,
74
+ canonical_type TEXT NOT NULL,
75
+ produced_by TEXT NOT NULL,
76
+ payload TEXT NOT NULL,
77
+ version INTEGER NOT NULL,
78
+ visibility TEXT NOT NULL,
79
+ tags TEXT NOT NULL,
80
+ correlation_id TEXT,
81
+ partition_key TEXT,
82
+ created_at TEXT NOT NULL
83
+ )
84
+ """
85
+ )
86
+
87
+ # Artifact indices for performance
88
+ await conn.execute(
89
+ """
90
+ CREATE INDEX IF NOT EXISTS idx_artifacts_canonical_type_created
91
+ ON artifacts(canonical_type, created_at)
92
+ """
93
+ )
94
+ await conn.execute(
95
+ """
96
+ CREATE INDEX IF NOT EXISTS idx_artifacts_produced_by_created
97
+ ON artifacts(produced_by, created_at)
98
+ """
99
+ )
100
+ await conn.execute(
101
+ """
102
+ CREATE INDEX IF NOT EXISTS idx_artifacts_correlation
103
+ ON artifacts(correlation_id)
104
+ """
105
+ )
106
+ await conn.execute(
107
+ """
108
+ CREATE INDEX IF NOT EXISTS idx_artifacts_partition
109
+ ON artifacts(partition_key)
110
+ """
111
+ )
112
+
113
+ # Consumption tracking table
114
+ await conn.execute(
115
+ """
116
+ CREATE TABLE IF NOT EXISTS artifact_consumptions (
117
+ artifact_id TEXT NOT NULL,
118
+ consumer TEXT NOT NULL,
119
+ run_id TEXT,
120
+ correlation_id TEXT,
121
+ consumed_at TEXT NOT NULL,
122
+ PRIMARY KEY (artifact_id, consumer, consumed_at)
123
+ )
124
+ """
125
+ )
126
+
127
+ # Consumption indices
128
+ await conn.execute(
129
+ """
130
+ CREATE INDEX IF NOT EXISTS idx_consumptions_artifact
131
+ ON artifact_consumptions(artifact_id)
132
+ """
133
+ )
134
+ await conn.execute(
135
+ """
136
+ CREATE INDEX IF NOT EXISTS idx_consumptions_consumer
137
+ ON artifact_consumptions(consumer)
138
+ """
139
+ )
140
+ await conn.execute(
141
+ """
142
+ CREATE INDEX IF NOT EXISTS idx_consumptions_correlation
143
+ ON artifact_consumptions(correlation_id)
144
+ """
145
+ )
146
+
147
+ # Agent snapshots table
148
+ await conn.execute(
149
+ """
150
+ CREATE TABLE IF NOT EXISTS agent_snapshots (
151
+ agent_name TEXT PRIMARY KEY,
152
+ description TEXT NOT NULL,
153
+ subscriptions TEXT NOT NULL,
154
+ output_types TEXT NOT NULL,
155
+ labels TEXT NOT NULL,
156
+ first_seen TEXT NOT NULL,
157
+ last_seen TEXT NOT NULL,
158
+ signature TEXT NOT NULL
159
+ )
160
+ """
161
+ )
162
+
163
+ # Update schema version
164
+ await conn.execute(
165
+ "UPDATE schema_meta SET version=? WHERE id=1",
166
+ (self.SCHEMA_VERSION,),
167
+ )
168
+ await conn.commit()
@@ -0,0 +1,194 @@
1
+ """SQLite summary query utilities.
2
+
3
+ Provides focused methods for executing summary/aggregation queries.
4
+ Extracted from summarize_artifacts to reduce complexity and improve testability.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from typing import TYPE_CHECKING, Any
10
+
11
+
12
+ if TYPE_CHECKING:
13
+ import aiosqlite
14
+
15
+
16
+ class SQLiteSummaryQueries:
17
+ """
18
+ Executes summary SQL queries for artifact statistics.
19
+
20
+ Each method handles one specific aggregation query, making them
21
+ simple, testable, and easy to maintain.
22
+ """
23
+
24
+ async def count_total(
25
+ self,
26
+ conn: aiosqlite.Connection,
27
+ where_clause: str,
28
+ params: tuple[Any, ...],
29
+ ) -> int:
30
+ """
31
+ Get total artifact count.
32
+
33
+ Args:
34
+ conn: Database connection
35
+ where_clause: SQL WHERE clause (e.g., " WHERE type = ?")
36
+ params: Parameter values for WHERE clause
37
+
38
+ Returns:
39
+ Total count of matching artifacts
40
+ """
41
+ count_query = f"SELECT COUNT(*) AS total FROM artifacts{where_clause}" # nosec B608
42
+ cursor = await conn.execute(count_query, params)
43
+ total_row = await cursor.fetchone()
44
+ await cursor.close()
45
+ return total_row["total"] if total_row else 0
46
+
47
+ async def group_by_type(
48
+ self,
49
+ conn: aiosqlite.Connection,
50
+ where_clause: str,
51
+ params: tuple[Any, ...],
52
+ ) -> dict[str, int]:
53
+ """
54
+ Get artifact counts grouped by type.
55
+
56
+ Args:
57
+ conn: Database connection
58
+ where_clause: SQL WHERE clause
59
+ params: Parameter values for WHERE clause
60
+
61
+ Returns:
62
+ Dict mapping canonical type names to counts
63
+ """
64
+ by_type_query = f"""
65
+ SELECT canonical_type, COUNT(*) AS count
66
+ FROM artifacts
67
+ {where_clause}
68
+ GROUP BY canonical_type
69
+ """ # nosec B608
70
+ cursor = await conn.execute(by_type_query, params)
71
+ by_type_rows = await cursor.fetchall()
72
+ await cursor.close()
73
+ return {row["canonical_type"]: row["count"] for row in by_type_rows}
74
+
75
+ async def group_by_producer(
76
+ self,
77
+ conn: aiosqlite.Connection,
78
+ where_clause: str,
79
+ params: tuple[Any, ...],
80
+ ) -> dict[str, int]:
81
+ """
82
+ Get artifact counts grouped by producer.
83
+
84
+ Args:
85
+ conn: Database connection
86
+ where_clause: SQL WHERE clause
87
+ params: Parameter values for WHERE clause
88
+
89
+ Returns:
90
+ Dict mapping producer names to counts
91
+ """
92
+ by_producer_query = f"""
93
+ SELECT produced_by, COUNT(*) AS count
94
+ FROM artifacts
95
+ {where_clause}
96
+ GROUP BY produced_by
97
+ """ # nosec B608
98
+ cursor = await conn.execute(by_producer_query, params)
99
+ by_producer_rows = await cursor.fetchall()
100
+ await cursor.close()
101
+ return {row["produced_by"]: row["count"] for row in by_producer_rows}
102
+
103
+ async def group_by_visibility(
104
+ self,
105
+ conn: aiosqlite.Connection,
106
+ where_clause: str,
107
+ params: tuple[Any, ...],
108
+ ) -> dict[str, int]:
109
+ """
110
+ Get artifact counts grouped by visibility kind.
111
+
112
+ Args:
113
+ conn: Database connection
114
+ where_clause: SQL WHERE clause
115
+ params: Parameter values for WHERE clause
116
+
117
+ Returns:
118
+ Dict mapping visibility kinds to counts
119
+ """
120
+ by_visibility_query = f"""
121
+ SELECT json_extract(visibility, '$.kind') AS visibility_kind, COUNT(*) AS count
122
+ FROM artifacts
123
+ {where_clause}
124
+ GROUP BY json_extract(visibility, '$.kind')
125
+ """ # nosec B608
126
+ cursor = await conn.execute(by_visibility_query, params)
127
+ by_visibility_rows = await cursor.fetchall()
128
+ await cursor.close()
129
+ return {
130
+ (row["visibility_kind"] or "Unknown"): row["count"]
131
+ for row in by_visibility_rows
132
+ }
133
+
134
+ async def count_tags(
135
+ self,
136
+ conn: aiosqlite.Connection,
137
+ where_clause: str,
138
+ params: tuple[Any, ...],
139
+ ) -> dict[str, int]:
140
+ """
141
+ Get tag occurrence counts.
142
+
143
+ Args:
144
+ conn: Database connection
145
+ where_clause: SQL WHERE clause
146
+ params: Parameter values for WHERE clause
147
+
148
+ Returns:
149
+ Dict mapping tag names to occurrence counts
150
+ """
151
+ tag_query = f"""
152
+ SELECT json_each.value AS tag, COUNT(*) AS count
153
+ FROM artifacts
154
+ JOIN json_each(artifacts.tags)
155
+ {where_clause}
156
+ GROUP BY json_each.value
157
+ """ # nosec B608
158
+ cursor = await conn.execute(tag_query, params)
159
+ tag_rows = await cursor.fetchall()
160
+ await cursor.close()
161
+ return {row["tag"]: row["count"] for row in tag_rows}
162
+
163
+ async def get_date_range(
164
+ self,
165
+ conn: aiosqlite.Connection,
166
+ where_clause: str,
167
+ params: tuple[Any, ...],
168
+ ) -> tuple[str | None, str | None]:
169
+ """
170
+ Get earliest and latest creation timestamps.
171
+
172
+ Args:
173
+ conn: Database connection
174
+ where_clause: SQL WHERE clause
175
+ params: Parameter values for WHERE clause
176
+
177
+ Returns:
178
+ Tuple of (earliest, latest) ISO timestamp strings, or (None, None)
179
+ """
180
+ range_query = f"""
181
+ SELECT MIN(created_at) AS earliest, MAX(created_at) AS latest
182
+ FROM artifacts
183
+ {where_clause}
184
+ """ # nosec B608
185
+ cursor = await conn.execute(range_query, params)
186
+ range_row = await cursor.fetchone()
187
+ await cursor.close()
188
+
189
+ if not range_row:
190
+ return None, None
191
+
192
+ earliest = range_row["earliest"] if range_row["earliest"] else None
193
+ latest = range_row["latest"] if range_row["latest"] else None
194
+ return earliest, latest
@@ -0,0 +1,14 @@
1
+ """Shared utilities for Flock framework."""
2
+
3
+ from flock.utils.type_resolution import TypeResolutionHelper
4
+ from flock.utils.visibility import VisibilityDeserializer
5
+ from flock.utils.async_utils import async_lock_required, AsyncLockRequired
6
+ from flock.utils.validation import ArtifactValidator
7
+
8
+ __all__ = [
9
+ "TypeResolutionHelper",
10
+ "VisibilityDeserializer",
11
+ "async_lock_required",
12
+ "AsyncLockRequired",
13
+ "ArtifactValidator",
14
+ ]
@@ -0,0 +1,67 @@
1
+ """Async utility decorators and helpers."""
2
+
3
+ from collections.abc import Callable
4
+ from functools import wraps
5
+ from typing import Any, TypeVar
6
+
7
+
8
+ T = TypeVar("T")
9
+
10
+
11
+ class AsyncLockRequired:
12
+ """Decorator ensuring async lock acquisition.
13
+
14
+ This utility eliminates 15+ duplicate lock acquisition patterns
15
+ scattered throughout orchestrator.py and agent.py.
16
+ """
17
+
18
+ def __init__(self, lock_attr: str = "_lock"):
19
+ """
20
+ Initialize decorator.
21
+
22
+ Args:
23
+ lock_attr: Name of lock attribute on class (default: "_lock")
24
+ """
25
+ self.lock_attr = lock_attr
26
+
27
+ def __call__(self, func: Callable[..., Any]) -> Callable[..., Any]:
28
+ """Apply decorator to function."""
29
+ lock_attr = self.lock_attr # Capture in closure
30
+
31
+ @wraps(func)
32
+ async def wrapper(instance: Any, *args: Any, **kwargs: Any) -> Any:
33
+ lock = getattr(instance, lock_attr)
34
+ async with lock:
35
+ return await func(instance, *args, **kwargs)
36
+
37
+ return wrapper
38
+
39
+
40
+ def async_lock_required(lock_attr: str = "_lock") -> AsyncLockRequired:
41
+ """
42
+ Decorator ensuring async lock acquisition.
43
+
44
+ This decorator automatically acquires and releases an async lock
45
+ before executing the decorated method, preventing race conditions.
46
+
47
+ Args:
48
+ lock_attr: Name of the lock attribute on the class (default: "_lock")
49
+
50
+ Returns:
51
+ AsyncLockRequired decorator instance
52
+
53
+ Example:
54
+ >>> class MyClass:
55
+ ... def __init__(self):
56
+ ... self._lock = asyncio.Lock()
57
+ ...
58
+ ... @async_lock_required()
59
+ ... async def my_method(self):
60
+ ... # Lock automatically acquired here
61
+ ... await asyncio.sleep(0.1)
62
+ ... return "done"
63
+
64
+ >>> obj = MyClass()
65
+ >>> result = await obj.my_method() # Lock acquired/released automatically
66
+ """
67
+ return AsyncLockRequired(lock_attr)
@@ -7,7 +7,7 @@ from uuid import UUID
7
7
 
8
8
  from pydantic import BaseModel, ConfigDict, Field
9
9
 
10
- from flock.artifacts import Artifact
10
+ from flock.core.artifacts import Artifact
11
11
 
12
12
 
13
13
  class EvalInputs(BaseModel):
@@ -99,7 +99,7 @@ class EvalResult(BaseModel):
99
99
  ... )
100
100
  ... return EvalResult.from_object(processed, agent=agent)
101
101
  """
102
- from flock.artifacts import Artifact
102
+ from flock.core.artifacts import Artifact
103
103
  from flock.registry import type_registry
104
104
 
105
105
  type_name = type_registry.name_for(type(obj))
@@ -154,7 +154,7 @@ class EvalResult(BaseModel):
154
154
  ... movie, tagline, agent=agent, metrics={"confidence": 0.9}
155
155
  ... )
156
156
  """
157
- from flock.artifacts import Artifact
157
+ from flock.core.artifacts import Artifact
158
158
  from flock.registry import type_registry
159
159
 
160
160
  artifacts = []
@@ -0,0 +1,53 @@
1
+ """Time formatting utilities.
2
+
3
+ Provides human-readable time span formatting for date ranges.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ from datetime import datetime
9
+
10
+
11
+ def format_time_span(earliest: datetime | None, latest: datetime | None) -> str:
12
+ """
13
+ Format time span between two datetimes as human-readable string.
14
+
15
+ Args:
16
+ earliest: Start datetime
17
+ latest: End datetime
18
+
19
+ Returns:
20
+ Human-readable span description:
21
+ - "X days" for spans >= 2 days
22
+ - "X.Y hours" for spans >= 1 hour
23
+ - "X minutes" for spans > 0
24
+ - "moments" for zero span
25
+ - "empty" if no dates provided
26
+
27
+ Examples:
28
+ >>> from datetime import datetime, timedelta
29
+ >>> now = datetime.now()
30
+ >>> format_time_span(now, now + timedelta(days=3))
31
+ "3 days"
32
+ >>> format_time_span(now, now + timedelta(hours=2))
33
+ "2.0 hours"
34
+ >>> format_time_span(now, now + timedelta(minutes=45))
35
+ "45 minutes"
36
+ """
37
+ if not earliest or not latest:
38
+ return "empty"
39
+
40
+ span = latest - earliest
41
+
42
+ if span.days >= 2:
43
+ return f"{span.days} days"
44
+
45
+ if span.total_seconds() >= 3600:
46
+ hours = span.total_seconds() / 3600
47
+ return f"{hours:.1f} hours"
48
+
49
+ if span.total_seconds() > 0:
50
+ minutes = max(1, int(span.total_seconds() / 60))
51
+ return f"{minutes} minutes"
52
+
53
+ return "moments"
@@ -0,0 +1,38 @@
1
+ """Type registry resolution utilities."""
2
+
3
+ from typing import TYPE_CHECKING
4
+
5
+ if TYPE_CHECKING:
6
+ from flock.registry import TypeRegistry
7
+
8
+ from flock.registry import RegistryError
9
+
10
+
11
+ class TypeResolutionHelper:
12
+ """Helper for safe type resolution.
13
+
14
+ This utility eliminates 8+ duplicate type resolution patterns
15
+ scattered across agent.py, store.py, orchestrator.py, and context_provider.py.
16
+ """
17
+
18
+ @staticmethod
19
+ def safe_resolve(registry: "TypeRegistry", type_name: str) -> str:
20
+ """
21
+ Safely resolve type name to canonical form.
22
+
23
+ Args:
24
+ registry: Type registry instance
25
+ type_name: Type name to resolve
26
+
27
+ Returns:
28
+ Canonical type name (or original if not found)
29
+
30
+ Example:
31
+ >>> canonical = TypeResolutionHelper.safe_resolve(registry, "MyType")
32
+ >>> # Returns "my_module.MyType" if found, else "MyType"
33
+ """
34
+ try:
35
+ return registry.resolve_name(type_name)
36
+ except RegistryError:
37
+ # Type not found or ambiguous - return original name
38
+ return type_name
@@ -19,11 +19,11 @@ from rich.pretty import Pretty
19
19
  from rich.table import Table
20
20
  from rich.text import Text
21
21
 
22
- from flock.components import AgentComponent
22
+ from flock.components.agent import AgentComponent
23
23
 
24
24
 
25
25
  if TYPE_CHECKING:
26
- from flock.runtime import Context, EvalInputs, EvalResult
26
+ from flock.utils.runtime import Context, EvalInputs, EvalResult
27
27
 
28
28
 
29
29
  class MetricsUtility(AgentComponent):
@@ -0,0 +1,57 @@
1
+ """Common validation utilities."""
2
+
3
+ from typing import Any, Callable
4
+
5
+ from pydantic import BaseModel, ValidationError
6
+
7
+
8
+ class ArtifactValidator:
9
+ """Validates artifacts against predicates.
10
+
11
+ This utility consolidates artifact validation patterns
12
+ used across agent.py for output validation.
13
+ """
14
+
15
+ @staticmethod
16
+ def validate_artifact(
17
+ artifact: Any,
18
+ model_cls: type[BaseModel],
19
+ predicate: Callable[[BaseModel], bool] | None = None,
20
+ ) -> tuple[bool, BaseModel | None, str | None]:
21
+ """
22
+ Validate artifact payload against model and optional predicate.
23
+
24
+ Args:
25
+ artifact: Artifact to validate
26
+ model_cls: Pydantic model class
27
+ predicate: Optional validation predicate
28
+
29
+ Returns:
30
+ Tuple of (is_valid, model_instance, error_message)
31
+
32
+ Example:
33
+ >>> from pydantic import BaseModel
34
+ >>> class MyModel(BaseModel):
35
+ ... name: str
36
+ ... age: int
37
+ >>> artifact = type("obj", (), {"payload": {"name": "Alice", "age": 30}})()
38
+ >>> is_valid, model, error = ArtifactValidator.validate_artifact(
39
+ ... artifact, MyModel, lambda m: m.age >= 18
40
+ ... )
41
+ >>> assert is_valid
42
+ >>> assert model.name == "Alice"
43
+ """
44
+ try:
45
+ # Validate against model
46
+ model_instance = model_cls(**artifact.payload)
47
+
48
+ # Apply predicate if provided
49
+ if predicate and not predicate(model_instance):
50
+ return False, model_instance, "Predicate validation failed"
51
+
52
+ return True, model_instance, None
53
+
54
+ except ValidationError as e:
55
+ return False, None, str(e)
56
+ except Exception as e:
57
+ return False, None, f"Validation error: {e}"