flock-core 0.5.0b71__py3-none-any.whl → 0.5.0b75__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flock-core might be problematic. Click here for more details.

Files changed (62) hide show
  1. flock/agent.py +39 -1
  2. flock/artifacts.py +17 -10
  3. flock/cli.py +1 -1
  4. flock/dashboard/__init__.py +2 -0
  5. flock/dashboard/collector.py +282 -6
  6. flock/dashboard/events.py +6 -0
  7. flock/dashboard/graph_builder.py +563 -0
  8. flock/dashboard/launcher.py +11 -6
  9. flock/dashboard/models/graph.py +156 -0
  10. flock/dashboard/service.py +175 -14
  11. flock/dashboard/static_v2/assets/index-DFRnI_mt.js +111 -0
  12. flock/dashboard/static_v2/assets/index-fPLNdmp1.css +1 -0
  13. flock/dashboard/static_v2/index.html +13 -0
  14. flock/dashboard/websocket.py +2 -2
  15. flock/engines/dspy_engine.py +27 -8
  16. flock/frontend/README.md +6 -6
  17. flock/frontend/src/App.tsx +23 -31
  18. flock/frontend/src/__tests__/integration/graph-snapshot.test.tsx +647 -0
  19. flock/frontend/src/components/details/DetailWindowContainer.tsx +13 -17
  20. flock/frontend/src/components/details/MessageDetailWindow.tsx +439 -0
  21. flock/frontend/src/components/details/MessageHistoryTab.tsx +128 -53
  22. flock/frontend/src/components/details/RunStatusTab.tsx +79 -38
  23. flock/frontend/src/components/graph/AgentNode.test.tsx +3 -1
  24. flock/frontend/src/components/graph/AgentNode.tsx +8 -6
  25. flock/frontend/src/components/graph/GraphCanvas.tsx +13 -8
  26. flock/frontend/src/components/graph/MessageNode.test.tsx +3 -1
  27. flock/frontend/src/components/graph/MessageNode.tsx +16 -3
  28. flock/frontend/src/components/layout/DashboardLayout.tsx +12 -9
  29. flock/frontend/src/components/modules/HistoricalArtifactsModule.tsx +4 -14
  30. flock/frontend/src/components/modules/ModuleRegistry.ts +5 -3
  31. flock/frontend/src/hooks/useModules.ts +12 -4
  32. flock/frontend/src/hooks/usePersistence.ts +5 -3
  33. flock/frontend/src/services/api.ts +3 -19
  34. flock/frontend/src/services/graphService.test.ts +330 -0
  35. flock/frontend/src/services/graphService.ts +75 -0
  36. flock/frontend/src/services/websocket.ts +104 -268
  37. flock/frontend/src/store/filterStore.test.ts +89 -1
  38. flock/frontend/src/store/filterStore.ts +38 -16
  39. flock/frontend/src/store/graphStore.test.ts +538 -173
  40. flock/frontend/src/store/graphStore.ts +374 -465
  41. flock/frontend/src/store/moduleStore.ts +51 -33
  42. flock/frontend/src/store/uiStore.ts +23 -11
  43. flock/frontend/src/types/graph.ts +77 -44
  44. flock/frontend/src/utils/mockData.ts +16 -3
  45. flock/frontend/vite.config.ts +2 -2
  46. flock/orchestrator.py +24 -6
  47. flock/service.py +2 -2
  48. flock/store.py +169 -4
  49. flock/themes/darkmatrix.toml +2 -2
  50. flock/themes/deep.toml +2 -2
  51. flock/themes/neopolitan.toml +4 -4
  52. {flock_core-0.5.0b71.dist-info → flock_core-0.5.0b75.dist-info}/METADATA +1 -1
  53. {flock_core-0.5.0b71.dist-info → flock_core-0.5.0b75.dist-info}/RECORD +56 -53
  54. flock/frontend/src/__tests__/e2e/critical-scenarios.test.tsx +0 -586
  55. flock/frontend/src/__tests__/integration/filtering-e2e.test.tsx +0 -391
  56. flock/frontend/src/__tests__/integration/graph-rendering.test.tsx +0 -640
  57. flock/frontend/src/services/websocket.test.ts +0 -595
  58. flock/frontend/src/utils/transforms.test.ts +0 -860
  59. flock/frontend/src/utils/transforms.ts +0 -323
  60. {flock_core-0.5.0b71.dist-info → flock_core-0.5.0b75.dist-info}/WHEEL +0 -0
  61. {flock_core-0.5.0b71.dist-info → flock_core-0.5.0b75.dist-info}/entry_points.txt +0 -0
  62. {flock_core-0.5.0b71.dist-info → flock_core-0.5.0b75.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,563 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ from collections import defaultdict
5
+ from collections.abc import Iterable, Mapping, Sequence
6
+ from datetime import datetime, timedelta, timezone
7
+
8
+ from flock.dashboard.collector import AgentSnapshot, DashboardEventCollector
9
+ from flock.dashboard.models.graph import (
10
+ GraphAgentMetrics,
11
+ GraphArtifact,
12
+ GraphEdge,
13
+ GraphFilters,
14
+ GraphMarker,
15
+ GraphNode,
16
+ GraphPosition,
17
+ GraphRequest,
18
+ GraphRun,
19
+ GraphSnapshot,
20
+ GraphState,
21
+ GraphStatistics,
22
+ GraphTimeRange,
23
+ GraphTimeRangePreset,
24
+ )
25
+ from flock.logging.auto_trace import AutoTracedMeta
26
+ from flock.orchestrator import Flock
27
+ from flock.store import (
28
+ Artifact,
29
+ BlackboardStore,
30
+ FilterConfig,
31
+ )
32
+ from flock.store import (
33
+ ArtifactEnvelope as StoreArtifactEnvelope,
34
+ )
35
+
36
+
37
+ class GraphAssembler(metaclass=AutoTracedMeta):
38
+ """Build graph snapshots for dashboard consumption."""
39
+
40
+ def __init__(
41
+ self,
42
+ store: BlackboardStore,
43
+ collector: DashboardEventCollector,
44
+ orchestrator: Flock,
45
+ ) -> None:
46
+ self._store = store
47
+ self._collector = collector
48
+ self._orchestrator = orchestrator
49
+
50
+ async def build_snapshot(self, request: GraphRequest) -> GraphSnapshot:
51
+ filters = request.filters or GraphFilters()
52
+ filter_config = self._to_filter_config(filters)
53
+ limit = max(1, request.options.limit if request.options else 500)
54
+
55
+ envelopes, total_available = await self._store.fetch_graph_artifacts(
56
+ filter_config,
57
+ limit=limit,
58
+ offset=0,
59
+ )
60
+
61
+ graph_state: GraphState = await self._collector.snapshot_graph_state()
62
+ agent_snapshots = await self._collector.snapshot_agent_registry()
63
+ artifacts = self._convert_envelopes_to_artifacts(envelopes, graph_state.consumptions)
64
+
65
+ produced_metrics, consumed_metrics = self._calculate_agent_metrics(artifacts.values())
66
+
67
+ if request.view_mode == "agent":
68
+ nodes = self._build_agent_nodes(
69
+ artifacts,
70
+ produced_metrics,
71
+ consumed_metrics,
72
+ graph_state,
73
+ agent_snapshots,
74
+ )
75
+ edges = self._derive_agent_edges(artifacts)
76
+ else:
77
+ nodes = self._build_message_nodes(artifacts)
78
+ edges = self._derive_blackboard_edges(artifacts, graph_state)
79
+
80
+ statistics = None
81
+ if request.options.include_statistics:
82
+ artifact_summary = await self._store.summarize_artifacts(filter_config)
83
+ statistics = GraphStatistics(
84
+ produced_by_agent=produced_metrics,
85
+ consumed_by_agent=consumed_metrics,
86
+ artifact_summary=artifact_summary,
87
+ )
88
+
89
+ filters_copy = filters.model_copy(deep=True)
90
+ generated_at = datetime.now(timezone.utc)
91
+
92
+ return GraphSnapshot(
93
+ generated_at=generated_at,
94
+ view_mode=request.view_mode,
95
+ filters=filters_copy,
96
+ nodes=nodes,
97
+ edges=edges,
98
+ statistics=statistics,
99
+ total_artifacts=total_available,
100
+ truncated=total_available > len(artifacts),
101
+ )
102
+
103
+ def _convert_envelopes_to_artifacts(
104
+ self,
105
+ envelopes: Sequence[StoreArtifactEnvelope],
106
+ runtime_consumptions: Mapping[str, Sequence[str]],
107
+ ) -> dict[str, GraphArtifact]:
108
+ artifacts: dict[str, GraphArtifact] = {}
109
+ for envelope in envelopes:
110
+ artifact: Artifact = envelope.artifact
111
+ artifact_id = str(artifact.id)
112
+ consumers = {record.consumer for record in envelope.consumptions}
113
+ runtime = runtime_consumptions.get(artifact_id, [])
114
+ consumers.update(runtime)
115
+
116
+ correlation_id = (
117
+ str(artifact.correlation_id) if artifact.correlation_id is not None else None
118
+ )
119
+ visibility_kind = getattr(artifact.visibility, "kind", None)
120
+ if visibility_kind is None:
121
+ cls_name = type(artifact.visibility).__name__
122
+ visibility_kind = cls_name[:-10] if cls_name.endswith("Visibility") else cls_name
123
+
124
+ artifacts[artifact_id] = GraphArtifact(
125
+ artifact_id=artifact_id,
126
+ artifact_type=artifact.type,
127
+ produced_by=artifact.produced_by,
128
+ consumed_by=sorted(consumers),
129
+ published_at=artifact.created_at,
130
+ payload=dict(artifact.payload),
131
+ correlation_id=correlation_id,
132
+ visibility_kind=visibility_kind,
133
+ tags=sorted(artifact.tags),
134
+ )
135
+ return artifacts
136
+
137
+ def _calculate_agent_metrics(
138
+ self,
139
+ artifacts: Iterable[GraphArtifact],
140
+ ) -> tuple[dict[str, GraphAgentMetrics], dict[str, GraphAgentMetrics]]:
141
+ produced_acc: dict[str, dict[str, int]] = defaultdict(lambda: defaultdict(int))
142
+ consumed_acc: dict[str, dict[str, int]] = defaultdict(lambda: defaultdict(int))
143
+ produced_totals: dict[str, int] = defaultdict(int)
144
+ consumed_totals: dict[str, int] = defaultdict(int)
145
+
146
+ for artifact in artifacts:
147
+ producer = artifact.produced_by or "external"
148
+ produced_totals[producer] += 1
149
+ produced_acc[producer][artifact.artifact_type] += 1
150
+
151
+ for consumer in artifact.consumed_by:
152
+ consumed_totals[consumer] += 1
153
+ consumed_acc[consumer][artifact.artifact_type] += 1
154
+
155
+ produced_metrics: dict[str, GraphAgentMetrics] = {}
156
+ for agent, total in produced_totals.items():
157
+ produced_metrics[agent] = GraphAgentMetrics(
158
+ total=total,
159
+ by_type=dict(produced_acc[agent]),
160
+ )
161
+
162
+ consumed_metrics: dict[str, GraphAgentMetrics] = {}
163
+ for agent, total in consumed_totals.items():
164
+ consumed_metrics[agent] = GraphAgentMetrics(
165
+ total=total,
166
+ by_type=dict(consumed_acc[agent]),
167
+ )
168
+
169
+ return produced_metrics, consumed_metrics
170
+
171
+ def _build_agent_nodes(
172
+ self,
173
+ artifacts: Mapping[str, GraphArtifact],
174
+ produced_metrics: Mapping[str, GraphAgentMetrics],
175
+ consumed_metrics: Mapping[str, GraphAgentMetrics],
176
+ graph_state: GraphState,
177
+ agent_snapshots: Mapping[str, AgentSnapshot],
178
+ ) -> list[GraphNode]:
179
+ nodes: list[GraphNode] = []
180
+ agent_status = graph_state.agent_status
181
+ active_names: set[str] = set()
182
+
183
+ existing_names: set[str] = set()
184
+
185
+ for agent in self._orchestrator.agents:
186
+ subscriptions = sorted(
187
+ {type_name for sub in agent.subscriptions for type_name in sub.type_names}
188
+ )
189
+ output_types = sorted({output.spec.type_name for output in agent.outputs})
190
+
191
+ produced = produced_metrics.get(agent.name)
192
+ consumed = consumed_metrics.get(agent.name)
193
+ snapshot = agent_snapshots.get(agent.name)
194
+
195
+ node_data = {
196
+ "name": agent.name,
197
+ "status": agent_status.get(agent.name, "idle"),
198
+ "subscriptions": subscriptions,
199
+ "outputTypes": output_types,
200
+ "sentCount": produced.total if produced else 0,
201
+ "recvCount": consumed.total if consumed else 0,
202
+ "sentByType": produced.by_type if produced else {},
203
+ "receivedByType": consumed.by_type if consumed else {},
204
+ "streamingTokens": [],
205
+ "labels": sorted(agent.labels),
206
+ "firstSeen": snapshot.first_seen.isoformat() if snapshot else None,
207
+ "lastSeen": snapshot.last_seen.isoformat() if snapshot else None,
208
+ "signature": snapshot.signature if snapshot else None,
209
+ }
210
+
211
+ nodes.append(
212
+ GraphNode(
213
+ id=agent.name,
214
+ type="agent",
215
+ data=node_data,
216
+ position=GraphPosition(),
217
+ hidden=False,
218
+ )
219
+ )
220
+ active_names.add(agent.name)
221
+ existing_names.add(agent.name)
222
+
223
+ for name, snapshot in agent_snapshots.items():
224
+ if name in active_names:
225
+ continue
226
+
227
+ produced = produced_metrics.get(name)
228
+ consumed = consumed_metrics.get(name)
229
+
230
+ node_data = {
231
+ "name": name,
232
+ "status": "inactive",
233
+ "subscriptions": list(snapshot.subscriptions),
234
+ "outputTypes": list(snapshot.output_types),
235
+ "sentCount": produced.total if produced else 0,
236
+ "recvCount": consumed.total if consumed else 0,
237
+ "sentByType": produced.by_type if produced else {},
238
+ "receivedByType": consumed.by_type if consumed else {},
239
+ "streamingTokens": [],
240
+ "labels": list(snapshot.labels),
241
+ "firstSeen": snapshot.first_seen.isoformat(),
242
+ "lastSeen": snapshot.last_seen.isoformat(),
243
+ "signature": snapshot.signature,
244
+ }
245
+
246
+ nodes.append(
247
+ GraphNode(
248
+ id=name,
249
+ type="agent",
250
+ data=node_data,
251
+ position=GraphPosition(),
252
+ hidden=False,
253
+ )
254
+ )
255
+ existing_names.add(name)
256
+
257
+ metric_names = set(produced_metrics.keys()) | set(consumed_metrics.keys())
258
+ for name in metric_names:
259
+ if name in existing_names:
260
+ continue
261
+ produced = produced_metrics.get(name)
262
+ consumed = consumed_metrics.get(name)
263
+ node_data = {
264
+ "name": name,
265
+ "status": "unknown",
266
+ "subscriptions": [],
267
+ "outputTypes": [],
268
+ "sentCount": produced.total if produced else 0,
269
+ "recvCount": consumed.total if consumed else 0,
270
+ "sentByType": produced.by_type if produced else {},
271
+ "receivedByType": consumed.by_type if consumed else {},
272
+ "streamingTokens": [],
273
+ "labels": [],
274
+ "firstSeen": None,
275
+ "lastSeen": None,
276
+ "signature": None,
277
+ }
278
+
279
+ nodes.append(
280
+ GraphNode(
281
+ id=name,
282
+ type="agent",
283
+ data=node_data,
284
+ position=GraphPosition(),
285
+ hidden=False,
286
+ )
287
+ )
288
+ existing_names.add(name)
289
+
290
+ return nodes
291
+
292
+ def _build_message_nodes(
293
+ self,
294
+ artifacts: Mapping[str, GraphArtifact],
295
+ ) -> list[GraphNode]:
296
+ nodes: list[GraphNode] = []
297
+
298
+ for artifact in artifacts.values():
299
+ payload_preview = self._payload_preview(artifact.payload)
300
+ timestamp_ms = int(artifact.published_at.timestamp() * 1000)
301
+
302
+ node_data = {
303
+ "artifactType": artifact.artifact_type,
304
+ "payloadPreview": payload_preview,
305
+ "payload": artifact.payload,
306
+ "producedBy": artifact.produced_by,
307
+ "consumedBy": list(artifact.consumed_by),
308
+ "timestamp": timestamp_ms,
309
+ "tags": artifact.tags,
310
+ "visibilityKind": artifact.visibility_kind or "Unknown",
311
+ "correlationId": artifact.correlation_id,
312
+ }
313
+
314
+ nodes.append(
315
+ GraphNode(
316
+ id=artifact.artifact_id,
317
+ type="message",
318
+ data=node_data,
319
+ position=GraphPosition(),
320
+ hidden=False,
321
+ )
322
+ )
323
+
324
+ return nodes
325
+
326
+ def _derive_agent_edges(
327
+ self,
328
+ artifacts: Mapping[str, GraphArtifact],
329
+ ) -> list[GraphEdge]:
330
+ edge_payloads: dict[str, dict] = {}
331
+ pair_group: dict[tuple[str, str], list[str]] = defaultdict(list)
332
+
333
+ for artifact in artifacts.values():
334
+ producer = artifact.produced_by or "external"
335
+ message_type = artifact.artifact_type
336
+ for consumer in artifact.consumed_by:
337
+ edge_id = f"{producer}__{consumer}__{message_type}"
338
+ payload = edge_payloads.setdefault(
339
+ edge_id,
340
+ {
341
+ "source": producer,
342
+ "target": consumer,
343
+ "message_type": message_type,
344
+ "artifact_ids": [],
345
+ "latest_timestamp": artifact.published_at,
346
+ },
347
+ )
348
+ payload["artifact_ids"].append(artifact.artifact_id)
349
+ payload["latest_timestamp"] = max(payload["latest_timestamp"], artifact.published_at)
350
+ pair_key = tuple(sorted((producer, consumer)))
351
+ if edge_id not in pair_group[pair_key]:
352
+ pair_group[pair_key].append(edge_id)
353
+
354
+ offsets = self._calculate_label_offsets(pair_group)
355
+ edges: list[GraphEdge] = []
356
+ for edge_id, payload in edge_payloads.items():
357
+ message_type = payload["message_type"]
358
+ artifact_ids = payload["artifact_ids"]
359
+ label = f"{message_type} ({len(artifact_ids)})"
360
+ edges.append(
361
+ GraphEdge(
362
+ id=edge_id,
363
+ source=payload["source"],
364
+ target=payload["target"],
365
+ type="message_flow",
366
+ label=label,
367
+ data={
368
+ "messageType": message_type,
369
+ "messageCount": len(artifact_ids),
370
+ "artifactIds": artifact_ids,
371
+ "latestTimestamp": payload["latest_timestamp"].isoformat(),
372
+ "labelOffset": offsets.get(edge_id, 0.0),
373
+ },
374
+ marker_end=GraphMarker(),
375
+ hidden=False,
376
+ )
377
+ )
378
+
379
+ return edges
380
+
381
+ def _derive_blackboard_edges(
382
+ self,
383
+ artifacts: Mapping[str, GraphArtifact],
384
+ graph_state: GraphState,
385
+ ) -> list[GraphEdge]:
386
+ artifact_ids = set(artifacts.keys())
387
+ edge_payloads: dict[str, dict] = {}
388
+ pair_group: dict[tuple[str, str], list[str]] = defaultdict(list)
389
+
390
+ for run in self._collect_runs_for_blackboard(artifacts, graph_state):
391
+ if run.status == "active":
392
+ continue
393
+ consumed = [
394
+ artifact_id for artifact_id in run.consumed_artifacts if artifact_id in artifact_ids
395
+ ]
396
+ produced = [
397
+ artifact_id for artifact_id in run.produced_artifacts if artifact_id in artifact_ids
398
+ ]
399
+ if not consumed or not produced:
400
+ continue
401
+ for consumed_id in consumed:
402
+ for produced_id in produced:
403
+ edge_id = f"{consumed_id}__{produced_id}__{run.run_id}"
404
+ payload = edge_payloads.setdefault(
405
+ edge_id,
406
+ {
407
+ "source": consumed_id,
408
+ "target": produced_id,
409
+ "agent_name": run.agent_name,
410
+ "run_id": run.run_id,
411
+ "duration_ms": run.duration_ms,
412
+ },
413
+ )
414
+ pair_key = tuple(sorted((consumed_id, produced_id)))
415
+ if edge_id not in pair_group[pair_key]:
416
+ pair_group[pair_key].append(edge_id)
417
+
418
+ offsets = self._calculate_label_offsets(pair_group)
419
+ edges: list[GraphEdge] = []
420
+ for edge_id, payload in edge_payloads.items():
421
+ edges.append(
422
+ GraphEdge(
423
+ id=edge_id,
424
+ source=payload["source"],
425
+ target=payload["target"],
426
+ type="transformation",
427
+ label=payload["agent_name"],
428
+ data={
429
+ "transformerAgent": payload["agent_name"],
430
+ "runId": payload["run_id"],
431
+ "durationMs": payload["duration_ms"],
432
+ "labelOffset": offsets.get(edge_id, 0.0),
433
+ },
434
+ marker_end=GraphMarker(),
435
+ hidden=False,
436
+ )
437
+ )
438
+
439
+ return edges
440
+
441
+ def _payload_preview(self, payload: Mapping[str, object]) -> str:
442
+ try:
443
+ serialized = json.dumps(payload, ensure_ascii=False)
444
+ except Exception:
445
+ serialized = str(payload)
446
+ return serialized[:120]
447
+
448
+ def _to_filter_config(self, filters: GraphFilters) -> FilterConfig:
449
+ start, end = self._resolve_time_bounds(filters.time_range)
450
+ return FilterConfig(
451
+ type_names=self._optional_set(filters.artifact_types),
452
+ produced_by=self._optional_set(filters.producers),
453
+ correlation_id=filters.correlation_id or None,
454
+ tags=self._optional_set(filters.tags),
455
+ visibility=self._optional_set(filters.visibility),
456
+ start=start,
457
+ end=end,
458
+ )
459
+
460
+ def _collect_runs_for_blackboard(
461
+ self,
462
+ artifacts: Mapping[str, GraphArtifact],
463
+ graph_state: GraphState,
464
+ ) -> list[GraphRun]:
465
+ existing_runs = list(graph_state.runs)
466
+ synthetic_runs = self._build_synthetic_runs(
467
+ artifacts, graph_state.consumptions, existing_runs
468
+ )
469
+ return existing_runs + synthetic_runs
470
+
471
+ def _build_synthetic_runs(
472
+ self,
473
+ artifacts: Mapping[str, GraphArtifact],
474
+ consumptions: Mapping[str, Sequence[str]],
475
+ existing_runs: Sequence[GraphRun],
476
+ ) -> list[GraphRun]:
477
+ existing_keys = {(run.agent_name, run.correlation_id or "") for run in existing_runs}
478
+
479
+ produced_buckets: dict[tuple[str, str], list[str]] = defaultdict(list)
480
+ consumed_buckets: dict[tuple[str, str], list[str]] = defaultdict(list)
481
+
482
+ for artifact in artifacts.values():
483
+ correlation = artifact.correlation_id or ""
484
+ producer = artifact.produced_by or "external"
485
+ produced_buckets[(producer, correlation)].append(artifact.artifact_id)
486
+
487
+ consumer_list = consumptions.get(artifact.artifact_id, artifact.consumed_by)
488
+ for consumer in consumer_list:
489
+ consumed_buckets[(consumer, correlation)].append(artifact.artifact_id)
490
+
491
+ synthetic_runs: list[GraphRun] = []
492
+ counter = 0
493
+ for key, consumed in consumed_buckets.items():
494
+ produced = produced_buckets.get(key)
495
+ if not consumed or not produced:
496
+ continue
497
+ if key in existing_keys:
498
+ continue
499
+
500
+ agent_name, correlation = key
501
+ run_id = f"synthetic_{agent_name}_{correlation or 'uncorrelated'}_{counter}"
502
+ counter += 1
503
+
504
+ synthetic_runs.append(
505
+ GraphRun(
506
+ run_id=run_id,
507
+ agent_name=agent_name,
508
+ correlation_id=correlation or None,
509
+ status="completed",
510
+ consumed_artifacts=sorted(set(consumed)),
511
+ produced_artifacts=sorted(set(produced)),
512
+ )
513
+ )
514
+
515
+ return synthetic_runs
516
+
517
+ def _resolve_time_bounds(
518
+ self, time_range: GraphTimeRange
519
+ ) -> tuple[datetime | None, datetime | None]:
520
+ now = datetime.now(timezone.utc)
521
+ preset = time_range.preset
522
+
523
+ if preset == GraphTimeRangePreset.ALL:
524
+ return None, None
525
+ if preset == GraphTimeRangePreset.CUSTOM:
526
+ return self._ensure_timezone(time_range.start), self._ensure_timezone(time_range.end)
527
+
528
+ if preset == GraphTimeRangePreset.LAST_5_MIN:
529
+ delta = timedelta(minutes=5)
530
+ elif preset == GraphTimeRangePreset.LAST_1_HOUR:
531
+ delta = timedelta(hours=1)
532
+ else:
533
+ delta = timedelta(minutes=10)
534
+
535
+ return now - delta, now
536
+
537
+ @staticmethod
538
+ def _ensure_timezone(value: datetime | None) -> datetime | None:
539
+ if value is None:
540
+ return None
541
+ if value.tzinfo is None:
542
+ return value.replace(tzinfo=timezone.utc)
543
+ return value.astimezone(timezone.utc)
544
+
545
+ @staticmethod
546
+ def _optional_set(values: Sequence[str]) -> set[str] | None:
547
+ cleaned = {value for value in values if value}
548
+ return cleaned if cleaned else None
549
+
550
+ @staticmethod
551
+ def _calculate_label_offsets(groups: Mapping[tuple[str, str], list[str]]) -> dict[str, float]:
552
+ offsets: dict[str, float] = {}
553
+ for edge_ids in groups.values():
554
+ total = len(edge_ids)
555
+ if total <= 1:
556
+ for edge_id in edge_ids:
557
+ offsets[edge_id] = 0.0
558
+ continue
559
+ offset_range = min(40.0, total * 15.0)
560
+ step = offset_range / (total - 1)
561
+ for index, edge_id in enumerate(edge_ids):
562
+ offsets[edge_id] = index * step - offset_range / 2
563
+ return offsets
@@ -29,26 +29,32 @@ class DashboardLauncher:
29
29
  - Clean up npm processes on shutdown
30
30
 
31
31
  Usage:
32
- launcher = DashboardLauncher(port=8000)
32
+ launcher = DashboardLauncher(port=8344)
33
33
  launcher.start() # Starts npm and opens browser
34
34
  # ... orchestrator runs ...
35
35
  launcher.stop() # Cleanup
36
36
 
37
37
  Or as context manager:
38
- with DashboardLauncher(port=8000):
38
+ with DashboardLauncher(port=8344):
39
39
  # orchestrator.serve() runs
40
40
  pass # Automatically cleaned up
41
41
  """
42
42
 
43
- def __init__(self, port: int = 8000, frontend_dir: Path | None = None):
43
+ def __init__(
44
+ self,
45
+ port: int = 8344,
46
+ frontend_dir: Path | None = None,
47
+ static_dir: Path | None = None,
48
+ ):
44
49
  """Initialize dashboard launcher.
45
50
 
46
51
  Args:
47
- port: HTTP port where dashboard will be served (default: 8000)
52
+ port: HTTP port where dashboard will be served (default: 8344)
48
53
  frontend_dir: Optional frontend directory path (defaults to FRONTEND_DIR)
49
54
  """
50
55
  self.port = port
51
56
  self.frontend_dir = frontend_dir or FRONTEND_DIR
57
+ self.static_dir = static_dir or Path(__file__).parent / "static"
52
58
  self.dev_mode = os.getenv("DASHBOARD_DEV", "0") == "1"
53
59
  self._npm_process: subprocess.Popen | None = None
54
60
 
@@ -130,8 +136,7 @@ class DashboardLauncher:
130
136
  import shutil
131
137
 
132
138
  source_dir = self.frontend_dir / "dist"
133
- # Dashboard directory is src/flock/dashboard
134
- target_dir = Path(__file__).parent / "static"
139
+ target_dir = self.static_dir
135
140
 
136
141
  if not source_dir.exists():
137
142
  print(f"[Dashboard] Warning: Build output not found at {source_dir}")