flock-core 0.5.7__py3-none-any.whl → 0.5.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of flock-core might be problematic. Click here for more details.
- flock/agent.py +336 -80
- flock/artifacts.py +2 -2
- flock/components.py +38 -30
- flock/correlation_engine.py +3 -6
- flock/dashboard/collector.py +9 -9
- flock/dashboard/events.py +8 -8
- flock/dashboard/service.py +7 -7
- flock/engines/dspy_engine.py +560 -64
- flock/engines/examples/simple_batch_engine.py +36 -20
- flock/examples.py +2 -2
- flock/helper/cli_helper.py +2 -2
- flock/logging/formatters/themed_formatter.py +3 -1
- flock/mcp/config.py +1 -2
- flock/mcp/tool.py +1 -2
- flock/orchestrator.py +2 -2
- flock/store.py +2 -2
- flock/utilities.py +1 -1
- flock/visibility.py +3 -3
- {flock_core-0.5.7.dist-info → flock_core-0.5.8.dist-info}/METADATA +97 -2
- {flock_core-0.5.7.dist-info → flock_core-0.5.8.dist-info}/RECORD +23 -23
- {flock_core-0.5.7.dist-info → flock_core-0.5.8.dist-info}/WHEEL +0 -0
- {flock_core-0.5.7.dist-info → flock_core-0.5.8.dist-info}/entry_points.txt +0 -0
- {flock_core-0.5.7.dist-info → flock_core-0.5.8.dist-info}/licenses/LICENSE +0 -0
flock/dashboard/collector.py
CHANGED
|
@@ -11,7 +11,7 @@ import json
|
|
|
11
11
|
import traceback
|
|
12
12
|
from collections import defaultdict, deque
|
|
13
13
|
from dataclasses import dataclass, field
|
|
14
|
-
from datetime import
|
|
14
|
+
from datetime import UTC, datetime
|
|
15
15
|
from typing import TYPE_CHECKING, Any, Optional
|
|
16
16
|
|
|
17
17
|
from pydantic import PrivateAttr
|
|
@@ -159,7 +159,7 @@ class DashboardEventCollector(AgentComponent):
|
|
|
159
159
|
Unmodified inputs (pass-through)
|
|
160
160
|
"""
|
|
161
161
|
# Record start time for duration calculation
|
|
162
|
-
self._run_start_times[ctx.task_id] = datetime.now(
|
|
162
|
+
self._run_start_times[ctx.task_id] = datetime.now(UTC).timestamp()
|
|
163
163
|
|
|
164
164
|
# Extract consumed types and artifact IDs
|
|
165
165
|
consumed_types = list({artifact.type for artifact in inputs})
|
|
@@ -280,7 +280,7 @@ class DashboardEventCollector(AgentComponent):
|
|
|
280
280
|
# Calculate duration
|
|
281
281
|
start_time = self._run_start_times.get(ctx.task_id)
|
|
282
282
|
if start_time:
|
|
283
|
-
duration_ms = (datetime.now(
|
|
283
|
+
duration_ms = (datetime.now(UTC).timestamp() - start_time) * 1000
|
|
284
284
|
del self._run_start_times[ctx.task_id]
|
|
285
285
|
else:
|
|
286
286
|
duration_ms = 0.0
|
|
@@ -319,7 +319,7 @@ class DashboardEventCollector(AgentComponent):
|
|
|
319
319
|
run.status = "completed"
|
|
320
320
|
run.duration_ms = duration_ms
|
|
321
321
|
run.metrics = dict(metrics)
|
|
322
|
-
run.completed_at = datetime.now(
|
|
322
|
+
run.completed_at = datetime.now(UTC)
|
|
323
323
|
for artifact_id in artifacts_produced:
|
|
324
324
|
if artifact_id not in run.produced_artifacts:
|
|
325
325
|
run.produced_artifacts.append(artifact_id)
|
|
@@ -345,7 +345,7 @@ class DashboardEventCollector(AgentComponent):
|
|
|
345
345
|
error_traceback = "".join(
|
|
346
346
|
traceback.format_exception(type(error), error, error.__traceback__)
|
|
347
347
|
)
|
|
348
|
-
failed_at = datetime.now(
|
|
348
|
+
failed_at = datetime.now(UTC).isoformat().replace("+00:00", "Z")
|
|
349
349
|
|
|
350
350
|
# Clean up start time tracking
|
|
351
351
|
if ctx.task_id in self._run_start_times:
|
|
@@ -374,7 +374,7 @@ class DashboardEventCollector(AgentComponent):
|
|
|
374
374
|
)
|
|
375
375
|
run.status = "error"
|
|
376
376
|
run.error_message = error_message
|
|
377
|
-
run.completed_at = datetime.now(
|
|
377
|
+
run.completed_at = datetime.now(UTC)
|
|
378
378
|
self._agent_status[agent.name] = "error"
|
|
379
379
|
await self._update_agent_snapshot_locked(agent)
|
|
380
380
|
|
|
@@ -442,7 +442,7 @@ class DashboardEventCollector(AgentComponent):
|
|
|
442
442
|
run_id=run_id,
|
|
443
443
|
agent_name=agent_name,
|
|
444
444
|
correlation_id=correlation_id,
|
|
445
|
-
started_at=datetime.now(
|
|
445
|
+
started_at=datetime.now(UTC) if ensure_started else None,
|
|
446
446
|
)
|
|
447
447
|
self._run_registry[run_id] = run
|
|
448
448
|
else:
|
|
@@ -450,11 +450,11 @@ class DashboardEventCollector(AgentComponent):
|
|
|
450
450
|
if correlation_id:
|
|
451
451
|
run.correlation_id = correlation_id
|
|
452
452
|
if ensure_started and run.started_at is None:
|
|
453
|
-
run.started_at = datetime.now(
|
|
453
|
+
run.started_at = datetime.now(UTC)
|
|
454
454
|
return run
|
|
455
455
|
|
|
456
456
|
async def _update_agent_snapshot_locked(self, agent: "Agent") -> None:
|
|
457
|
-
now = datetime.now(
|
|
457
|
+
now = datetime.now(UTC)
|
|
458
458
|
description = agent.description or ""
|
|
459
459
|
subscriptions = sorted(
|
|
460
460
|
{
|
flock/dashboard/events.py
CHANGED
|
@@ -4,7 +4,7 @@ Defines 5 core event types that capture agent execution lifecycle.
|
|
|
4
4
|
All schemas match DATA_MODEL.md specification exactly.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
-
from datetime import
|
|
7
|
+
from datetime import UTC, datetime
|
|
8
8
|
from typing import Any
|
|
9
9
|
|
|
10
10
|
from pydantic import BaseModel, Field
|
|
@@ -40,7 +40,7 @@ class AgentActivatedEvent(BaseModel):
|
|
|
40
40
|
# Event metadata
|
|
41
41
|
correlation_id: str
|
|
42
42
|
timestamp: str = Field(
|
|
43
|
-
default_factory=lambda: datetime.now(
|
|
43
|
+
default_factory=lambda: datetime.now(UTC).isoformat().replace("+00:00", "Z")
|
|
44
44
|
)
|
|
45
45
|
|
|
46
46
|
# Agent identification
|
|
@@ -74,7 +74,7 @@ class MessagePublishedEvent(BaseModel):
|
|
|
74
74
|
# Event metadata
|
|
75
75
|
correlation_id: str
|
|
76
76
|
timestamp: str = Field(
|
|
77
|
-
default_factory=lambda: datetime.now(
|
|
77
|
+
default_factory=lambda: datetime.now(UTC).isoformat().replace("+00:00", "Z")
|
|
78
78
|
)
|
|
79
79
|
|
|
80
80
|
# Artifact identification
|
|
@@ -109,7 +109,7 @@ class StreamingOutputEvent(BaseModel):
|
|
|
109
109
|
# Event metadata
|
|
110
110
|
correlation_id: str
|
|
111
111
|
timestamp: str = Field(
|
|
112
|
-
default_factory=lambda: datetime.now(
|
|
112
|
+
default_factory=lambda: datetime.now(UTC).isoformat().replace("+00:00", "Z")
|
|
113
113
|
)
|
|
114
114
|
|
|
115
115
|
# Agent identification
|
|
@@ -137,7 +137,7 @@ class AgentCompletedEvent(BaseModel):
|
|
|
137
137
|
# Event metadata
|
|
138
138
|
correlation_id: str
|
|
139
139
|
timestamp: str = Field(
|
|
140
|
-
default_factory=lambda: datetime.now(
|
|
140
|
+
default_factory=lambda: datetime.now(UTC).isoformat().replace("+00:00", "Z")
|
|
141
141
|
)
|
|
142
142
|
|
|
143
143
|
# Agent identification
|
|
@@ -163,7 +163,7 @@ class AgentErrorEvent(BaseModel):
|
|
|
163
163
|
# Event metadata
|
|
164
164
|
correlation_id: str
|
|
165
165
|
timestamp: str = Field(
|
|
166
|
-
default_factory=lambda: datetime.now(
|
|
166
|
+
default_factory=lambda: datetime.now(UTC).isoformat().replace("+00:00", "Z")
|
|
167
167
|
)
|
|
168
168
|
|
|
169
169
|
# Agent identification
|
|
@@ -187,7 +187,7 @@ class CorrelationGroupUpdatedEvent(BaseModel):
|
|
|
187
187
|
|
|
188
188
|
# Event metadata
|
|
189
189
|
timestamp: str = Field(
|
|
190
|
-
default_factory=lambda: datetime.now(
|
|
190
|
+
default_factory=lambda: datetime.now(UTC).isoformat().replace("+00:00", "Z")
|
|
191
191
|
)
|
|
192
192
|
|
|
193
193
|
# Agent identification
|
|
@@ -224,7 +224,7 @@ class BatchItemAddedEvent(BaseModel):
|
|
|
224
224
|
|
|
225
225
|
# Event metadata
|
|
226
226
|
timestamp: str = Field(
|
|
227
|
-
default_factory=lambda: datetime.now(
|
|
227
|
+
default_factory=lambda: datetime.now(UTC).isoformat().replace("+00:00", "Z")
|
|
228
228
|
)
|
|
229
229
|
|
|
230
230
|
# Agent identification
|
flock/dashboard/service.py
CHANGED
|
@@ -8,7 +8,7 @@ Provides real-time dashboard capabilities by:
|
|
|
8
8
|
"""
|
|
9
9
|
|
|
10
10
|
import os
|
|
11
|
-
from datetime import datetime, timedelta
|
|
11
|
+
from datetime import UTC, datetime, timedelta
|
|
12
12
|
from importlib.metadata import PackageNotFoundError, version
|
|
13
13
|
from pathlib import Path
|
|
14
14
|
from typing import Any
|
|
@@ -716,10 +716,10 @@ class DashboardHTTPService(BlackboardHTTPService):
|
|
|
716
716
|
if time_range and time_range[0]:
|
|
717
717
|
# Convert nanoseconds to datetime
|
|
718
718
|
oldest_trace = datetime.fromtimestamp(
|
|
719
|
-
time_range[0] / 1_000_000_000, tz=
|
|
719
|
+
time_range[0] / 1_000_000_000, tz=UTC
|
|
720
720
|
).isoformat()
|
|
721
721
|
newest_trace = datetime.fromtimestamp(
|
|
722
|
-
time_range[1] / 1_000_000_000, tz=
|
|
722
|
+
time_range[1] / 1_000_000_000, tz=UTC
|
|
723
723
|
).isoformat()
|
|
724
724
|
|
|
725
725
|
# Get file size
|
|
@@ -1051,7 +1051,7 @@ def _get_correlation_groups(
|
|
|
1051
1051
|
if not groups:
|
|
1052
1052
|
return []
|
|
1053
1053
|
|
|
1054
|
-
now = datetime.now(
|
|
1054
|
+
now = datetime.now(UTC)
|
|
1055
1055
|
result = []
|
|
1056
1056
|
|
|
1057
1057
|
for corr_key, group in groups.items():
|
|
@@ -1059,7 +1059,7 @@ def _get_correlation_groups(
|
|
|
1059
1059
|
if group.created_at_time:
|
|
1060
1060
|
created_at_time = group.created_at_time
|
|
1061
1061
|
if created_at_time.tzinfo is None:
|
|
1062
|
-
created_at_time = created_at_time.replace(tzinfo=
|
|
1062
|
+
created_at_time = created_at_time.replace(tzinfo=UTC)
|
|
1063
1063
|
elapsed = (now - created_at_time).total_seconds()
|
|
1064
1064
|
else:
|
|
1065
1065
|
elapsed = 0
|
|
@@ -1146,11 +1146,11 @@ def _get_batch_state(
|
|
|
1146
1146
|
if not accumulator or not accumulator.artifacts:
|
|
1147
1147
|
return None
|
|
1148
1148
|
|
|
1149
|
-
now = datetime.now(
|
|
1149
|
+
now = datetime.now(UTC)
|
|
1150
1150
|
# Ensure accumulator.created_at is timezone-aware
|
|
1151
1151
|
created_at = accumulator.created_at
|
|
1152
1152
|
if created_at.tzinfo is None:
|
|
1153
|
-
created_at = created_at.replace(tzinfo=
|
|
1153
|
+
created_at = created_at.replace(tzinfo=UTC)
|
|
1154
1154
|
elapsed = (now - created_at).total_seconds()
|
|
1155
1155
|
|
|
1156
1156
|
# Calculate items collected (needed for all batch types)
|