penguiflow 2.0.0__tar.gz → 2.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of penguiflow might be problematic. Click here for more details.

Files changed (50) hide show
  1. {penguiflow-2.0.0 → penguiflow-2.1.0}/PKG-INFO +84 -4
  2. {penguiflow-2.0.0 → penguiflow-2.1.0}/README.md +79 -3
  3. {penguiflow-2.0.0 → penguiflow-2.1.0}/penguiflow/__init__.py +20 -1
  4. penguiflow-2.1.0/penguiflow/admin.py +174 -0
  5. penguiflow-2.1.0/penguiflow/bus.py +30 -0
  6. {penguiflow-2.0.0 → penguiflow-2.1.0}/penguiflow/core.py +222 -13
  7. penguiflow-2.1.0/penguiflow/remote.py +486 -0
  8. penguiflow-2.1.0/penguiflow/state.py +64 -0
  9. {penguiflow-2.0.0 → penguiflow-2.1.0}/penguiflow.egg-info/PKG-INFO +84 -4
  10. {penguiflow-2.0.0 → penguiflow-2.1.0}/penguiflow.egg-info/SOURCES.txt +10 -0
  11. penguiflow-2.1.0/penguiflow.egg-info/entry_points.txt +2 -0
  12. {penguiflow-2.0.0 → penguiflow-2.1.0}/penguiflow.egg-info/requires.txt +5 -0
  13. penguiflow-2.1.0/penguiflow.egg-info/top_level.txt +2 -0
  14. penguiflow-2.1.0/penguiflow_a2a/__init__.py +19 -0
  15. penguiflow-2.1.0/penguiflow_a2a/server.py +695 -0
  16. {penguiflow-2.0.0 → penguiflow-2.1.0}/pyproject.toml +11 -3
  17. penguiflow-2.1.0/tests/test_a2a_server.py +341 -0
  18. penguiflow-2.1.0/tests/test_distribution_hooks.py +140 -0
  19. penguiflow-2.1.0/tests/test_remote.py +216 -0
  20. penguiflow-2.0.0/penguiflow.egg-info/top_level.txt +0 -1
  21. {penguiflow-2.0.0 → penguiflow-2.1.0}/LICENSE +0 -0
  22. {penguiflow-2.0.0 → penguiflow-2.1.0}/penguiflow/errors.py +0 -0
  23. {penguiflow-2.0.0 → penguiflow-2.1.0}/penguiflow/metrics.py +0 -0
  24. {penguiflow-2.0.0 → penguiflow-2.1.0}/penguiflow/middlewares.py +0 -0
  25. {penguiflow-2.0.0 → penguiflow-2.1.0}/penguiflow/node.py +0 -0
  26. {penguiflow-2.0.0 → penguiflow-2.1.0}/penguiflow/patterns.py +0 -0
  27. {penguiflow-2.0.0 → penguiflow-2.1.0}/penguiflow/policies.py +0 -0
  28. {penguiflow-2.0.0 → penguiflow-2.1.0}/penguiflow/registry.py +0 -0
  29. {penguiflow-2.0.0 → penguiflow-2.1.0}/penguiflow/streaming.py +0 -0
  30. {penguiflow-2.0.0 → penguiflow-2.1.0}/penguiflow/testkit.py +0 -0
  31. {penguiflow-2.0.0 → penguiflow-2.1.0}/penguiflow/types.py +0 -0
  32. {penguiflow-2.0.0 → penguiflow-2.1.0}/penguiflow/viz.py +0 -0
  33. {penguiflow-2.0.0 → penguiflow-2.1.0}/penguiflow.egg-info/dependency_links.txt +0 -0
  34. {penguiflow-2.0.0 → penguiflow-2.1.0}/setup.cfg +0 -0
  35. {penguiflow-2.0.0 → penguiflow-2.1.0}/tests/test_budgets.py +0 -0
  36. {penguiflow-2.0.0 → penguiflow-2.1.0}/tests/test_cancel.py +0 -0
  37. {penguiflow-2.0.0 → penguiflow-2.1.0}/tests/test_controller.py +0 -0
  38. {penguiflow-2.0.0 → penguiflow-2.1.0}/tests/test_core.py +0 -0
  39. {penguiflow-2.0.0 → penguiflow-2.1.0}/tests/test_errors.py +0 -0
  40. {penguiflow-2.0.0 → penguiflow-2.1.0}/tests/test_metadata.py +0 -0
  41. {penguiflow-2.0.0 → penguiflow-2.1.0}/tests/test_metrics.py +0 -0
  42. {penguiflow-2.0.0 → penguiflow-2.1.0}/tests/test_middlewares.py +0 -0
  43. {penguiflow-2.0.0 → penguiflow-2.1.0}/tests/test_node.py +0 -0
  44. {penguiflow-2.0.0 → penguiflow-2.1.0}/tests/test_patterns.py +0 -0
  45. {penguiflow-2.0.0 → penguiflow-2.1.0}/tests/test_registry.py +0 -0
  46. {penguiflow-2.0.0 → penguiflow-2.1.0}/tests/test_routing_policy.py +0 -0
  47. {penguiflow-2.0.0 → penguiflow-2.1.0}/tests/test_streaming.py +0 -0
  48. {penguiflow-2.0.0 → penguiflow-2.1.0}/tests/test_testkit.py +0 -0
  49. {penguiflow-2.0.0 → penguiflow-2.1.0}/tests/test_types.py +0 -0
  50. {penguiflow-2.0.0 → penguiflow-2.1.0}/tests/test_viz.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: penguiflow
3
- Version: 2.0.0
3
+ Version: 2.1.0
4
4
  Summary: Async agent orchestration primitives.
5
5
  Author: PenguiFlow Team
6
6
  License: MIT License
@@ -37,6 +37,10 @@ Requires-Dist: pytest-asyncio>=0.23; extra == "dev"
37
37
  Requires-Dist: pytest-cov>=4.0; extra == "dev"
38
38
  Requires-Dist: coverage[toml]>=7.0; extra == "dev"
39
39
  Requires-Dist: ruff>=0.2; extra == "dev"
40
+ Requires-Dist: fastapi>=0.110; extra == "dev"
41
+ Requires-Dist: httpx>=0.27; extra == "dev"
42
+ Provides-Extra: a2a-server
43
+ Requires-Dist: fastapi>=0.110; extra == "a2a-server"
40
44
  Dynamic: license-file
41
45
 
42
46
  # PenguiFlow 🐧❄️
@@ -77,6 +81,18 @@ It provides:
77
81
  * **Observability hooks** (`FlowEvent` callbacks for logging, MLflow, or custom metrics sinks)
78
82
  * **Policy-driven routing** (optional policies steer routers without breaking existing flows)
79
83
  * **Traceable exceptions** (`FlowError` captures node/trace metadata and optionally emits to Rookery)
84
+ * **Distribution hooks (opt-in)** — plug a `StateStore` to persist trace history and a
85
+ `MessageBus` to publish floe traffic for remote workers without changing existing flows.
86
+ * **Remote calls (opt-in)** — `RemoteNode` bridges the runtime to external agents through a
87
+ pluggable `RemoteTransport` interface (A2A-ready) while propagating streaming chunks and
88
+ cancellation.
89
+ * **A2A server adapter (opt-in)** — wrap a PenguiFlow graph in a FastAPI surface using
90
+ `penguiflow_a2a.A2AServerAdapter` so other agents can call `message/send`,
91
+ `message/stream`, and `tasks/cancel` while reusing the runtime's backpressure and
92
+ cancellation semantics.
93
+ * **Observability & ops polish** — remote calls emit structured metrics (latency, payload
94
+ sizes, cancel reasons) and the `penguiflow-admin` CLI replays trace history from any
95
+ configured `StateStore` for debugging.
80
96
 
81
97
  Built on pure `asyncio` (no threads), PenguiFlow is small, predictable, and repo-agnostic.
82
98
  Product repos only define **their models + node functions** — the core stays dependency-light.
@@ -168,6 +184,10 @@ print(out.payload) # PackOut(...)
168
184
  await flow.stop()
169
185
  ```
170
186
 
187
+ > **Opt-in distribution:** pass `state_store=` and/or `message_bus=` when calling
188
+ > `penguiflow.core.create(...)` to persist trace history and publish floe traffic
189
+ > without changing node logic.
190
+
171
191
  ---
172
192
 
173
193
  ## 🧭 Design Principles
@@ -222,6 +242,60 @@ sacrificing backpressure or ordering guarantees. The helper wraps the payload i
222
242
  increments per-stream sequence numbers. See `tests/test_streaming.py` and
223
243
  `examples/streaming_llm/` for an end-to-end walk-through.
224
244
 
245
+ ### Remote orchestration
246
+
247
+ Phase 2 introduces `RemoteNode` and the `RemoteTransport` protocol so flows can delegate
248
+ work to remote agents (e.g., the A2A JSON-RPC/SSE ecosystem) without changing existing
249
+ nodes. The helper records remote bindings via the `StateStore`, mirrors streaming
250
+ partials back into the graph, and propagates per-trace cancellation to remote tasks via
251
+ `RemoteTransport.cancel`. See `tests/test_remote.py` for reference in-memory transports.
252
+
253
+ ### Exposing a flow over A2A
254
+
255
+ Install the optional extra to expose PenguiFlow as an A2A-compatible FastAPI service:
256
+
257
+ ```bash
258
+ pip install "penguiflow[a2a-server]"
259
+ ```
260
+
261
+ Create the adapter and mount the routes:
262
+
263
+ ```python
264
+ from penguiflow import Message, Node, create
265
+ from penguiflow_a2a import A2AAgentCard, A2AServerAdapter, A2ASkill, create_a2a_app
266
+
267
+ async def orchestrate(message: Message, ctx):
268
+ await ctx.emit_chunk(parent=message, text="thinking...")
269
+ return {"result": "done"}
270
+
271
+ node = Node(orchestrate, name="main")
272
+ flow = create(node.to())
273
+
274
+ card = A2AAgentCard(
275
+ name="Main Agent",
276
+ description="Primary entrypoint for orchestration",
277
+ version="2.1.0",
278
+ skills=[A2ASkill(name="orchestrate", description="Handles orchestration")],
279
+ )
280
+
281
+ adapter = A2AServerAdapter(
282
+ flow,
283
+ agent_card=card,
284
+ agent_url="https://agent.example",
285
+ )
286
+ app = create_a2a_app(adapter)
287
+ ```
288
+
289
+ The generated FastAPI app implements:
290
+
291
+ * `GET /agent` for discovery (Agent Card)
292
+ * `POST /message/send` for unary execution
293
+ * `POST /message/stream` for SSE streaming
294
+ * `POST /tasks/cancel` to mirror cancellation into PenguiFlow traces
295
+
296
+ `A2AServerAdapter` reuses the runtime's `StateStore` hooks, so bindings between trace IDs
297
+ and external `taskId`/`contextId` pairs are persisted automatically.
298
+
225
299
  ### Reliability & guardrails
226
300
 
227
301
  PenguiFlow enforces reliability boundaries out of the box:
@@ -478,9 +552,15 @@ docs or diagramming pipelines.
478
552
  * **Structured `FlowEvent`s**: every node event carries `{ts, trace_id, node_name, event,
479
553
  latency_ms, q_depth_in, q_depth_out, attempt}` plus a mutable `extra` map for custom
480
554
  annotations.
555
+ * **Remote call telemetry**: `RemoteNode` executions emit extra metrics (latency, request
556
+ and response bytes, context/task identifiers, cancel reasons) so remote hops can be
557
+ traced end-to-end.
481
558
  * **Middleware hooks**: subscribe observers (e.g., MLflow) to the structured `FlowEvent`
482
559
  stream. See `examples/mlflow_metrics/` for an MLflow integration and
483
560
  `examples/reliability_middleware/` for a concrete timeout + retry walkthrough.
561
+ * **`penguiflow-admin` CLI**: inspect or replay stored trace history from any configured
562
+ `StateStore` (`penguiflow-admin history <trace>` or `penguiflow-admin replay <trace>`)
563
+ when debugging distributed runs.
484
564
 
485
565
  ---
486
566
 
@@ -488,9 +568,9 @@ docs or diagramming pipelines.
488
568
 
489
569
  - **In-process runtime**: there is no built-in distribution layer yet. Long-running CPU work should be delegated to your own pools or services.
490
570
  - **Registry-driven typing**: nodes default to validation. Provide a `ModelRegistry` when calling `flow.run(...)` or set `validate="none"` explicitly for untyped hops.
491
- - **Observability**: structured `FlowEvent` callbacks power logs/metrics; integrations with
492
- third-party stacks (OTel, Prometheus, Datadog) remain DIY. See the MLflow middleware
493
- example for a lightweight pattern.
571
+ - **Observability**: structured `FlowEvent` callbacks and the `penguiflow-admin` CLI power
572
+ local debugging; integrations with third-party stacks (OTel, Prometheus, Datadog) remain
573
+ DIY. See the MLflow middleware example for a lightweight pattern.
494
574
  - **Roadmap**: follow-up releases focus on optional distributed backends, deeper observability integrations, and additional playbook patterns. Contributions and proposals are welcome!
495
575
 
496
576
  ---
@@ -36,6 +36,18 @@ It provides:
36
36
  * **Observability hooks** (`FlowEvent` callbacks for logging, MLflow, or custom metrics sinks)
37
37
  * **Policy-driven routing** (optional policies steer routers without breaking existing flows)
38
38
  * **Traceable exceptions** (`FlowError` captures node/trace metadata and optionally emits to Rookery)
39
+ * **Distribution hooks (opt-in)** — plug a `StateStore` to persist trace history and a
40
+ `MessageBus` to publish floe traffic for remote workers without changing existing flows.
41
+ * **Remote calls (opt-in)** — `RemoteNode` bridges the runtime to external agents through a
42
+ pluggable `RemoteTransport` interface (A2A-ready) while propagating streaming chunks and
43
+ cancellation.
44
+ * **A2A server adapter (opt-in)** — wrap a PenguiFlow graph in a FastAPI surface using
45
+ `penguiflow_a2a.A2AServerAdapter` so other agents can call `message/send`,
46
+ `message/stream`, and `tasks/cancel` while reusing the runtime's backpressure and
47
+ cancellation semantics.
48
+ * **Observability & ops polish** — remote calls emit structured metrics (latency, payload
49
+ sizes, cancel reasons) and the `penguiflow-admin` CLI replays trace history from any
50
+ configured `StateStore` for debugging.
39
51
 
40
52
  Built on pure `asyncio` (no threads), PenguiFlow is small, predictable, and repo-agnostic.
41
53
  Product repos only define **their models + node functions** — the core stays dependency-light.
@@ -127,6 +139,10 @@ print(out.payload) # PackOut(...)
127
139
  await flow.stop()
128
140
  ```
129
141
 
142
+ > **Opt-in distribution:** pass `state_store=` and/or `message_bus=` when calling
143
+ > `penguiflow.core.create(...)` to persist trace history and publish floe traffic
144
+ > without changing node logic.
145
+
130
146
  ---
131
147
 
132
148
  ## 🧭 Design Principles
@@ -181,6 +197,60 @@ sacrificing backpressure or ordering guarantees. The helper wraps the payload i
181
197
  increments per-stream sequence numbers. See `tests/test_streaming.py` and
182
198
  `examples/streaming_llm/` for an end-to-end walk-through.
183
199
 
200
+ ### Remote orchestration
201
+
202
+ Phase 2 introduces `RemoteNode` and the `RemoteTransport` protocol so flows can delegate
203
+ work to remote agents (e.g., the A2A JSON-RPC/SSE ecosystem) without changing existing
204
+ nodes. The helper records remote bindings via the `StateStore`, mirrors streaming
205
+ partials back into the graph, and propagates per-trace cancellation to remote tasks via
206
+ `RemoteTransport.cancel`. See `tests/test_remote.py` for reference in-memory transports.
207
+
208
+ ### Exposing a flow over A2A
209
+
210
+ Install the optional extra to expose PenguiFlow as an A2A-compatible FastAPI service:
211
+
212
+ ```bash
213
+ pip install "penguiflow[a2a-server]"
214
+ ```
215
+
216
+ Create the adapter and mount the routes:
217
+
218
+ ```python
219
+ from penguiflow import Message, Node, create
220
+ from penguiflow_a2a import A2AAgentCard, A2AServerAdapter, A2ASkill, create_a2a_app
221
+
222
+ async def orchestrate(message: Message, ctx):
223
+ await ctx.emit_chunk(parent=message, text="thinking...")
224
+ return {"result": "done"}
225
+
226
+ node = Node(orchestrate, name="main")
227
+ flow = create(node.to())
228
+
229
+ card = A2AAgentCard(
230
+ name="Main Agent",
231
+ description="Primary entrypoint for orchestration",
232
+ version="2.1.0",
233
+ skills=[A2ASkill(name="orchestrate", description="Handles orchestration")],
234
+ )
235
+
236
+ adapter = A2AServerAdapter(
237
+ flow,
238
+ agent_card=card,
239
+ agent_url="https://agent.example",
240
+ )
241
+ app = create_a2a_app(adapter)
242
+ ```
243
+
244
+ The generated FastAPI app implements:
245
+
246
+ * `GET /agent` for discovery (Agent Card)
247
+ * `POST /message/send` for unary execution
248
+ * `POST /message/stream` for SSE streaming
249
+ * `POST /tasks/cancel` to mirror cancellation into PenguiFlow traces
250
+
251
+ `A2AServerAdapter` reuses the runtime's `StateStore` hooks, so bindings between trace IDs
252
+ and external `taskId`/`contextId` pairs are persisted automatically.
253
+
184
254
  ### Reliability & guardrails
185
255
 
186
256
  PenguiFlow enforces reliability boundaries out of the box:
@@ -437,9 +507,15 @@ docs or diagramming pipelines.
437
507
  * **Structured `FlowEvent`s**: every node event carries `{ts, trace_id, node_name, event,
438
508
  latency_ms, q_depth_in, q_depth_out, attempt}` plus a mutable `extra` map for custom
439
509
  annotations.
510
+ * **Remote call telemetry**: `RemoteNode` executions emit extra metrics (latency, request
511
+ and response bytes, context/task identifiers, cancel reasons) so remote hops can be
512
+ traced end-to-end.
440
513
  * **Middleware hooks**: subscribe observers (e.g., MLflow) to the structured `FlowEvent`
441
514
  stream. See `examples/mlflow_metrics/` for an MLflow integration and
442
515
  `examples/reliability_middleware/` for a concrete timeout + retry walkthrough.
516
+ * **`penguiflow-admin` CLI**: inspect or replay stored trace history from any configured
517
+ `StateStore` (`penguiflow-admin history <trace>` or `penguiflow-admin replay <trace>`)
518
+ when debugging distributed runs.
443
519
 
444
520
  ---
445
521
 
@@ -447,9 +523,9 @@ docs or diagramming pipelines.
447
523
 
448
524
  - **In-process runtime**: there is no built-in distribution layer yet. Long-running CPU work should be delegated to your own pools or services.
449
525
  - **Registry-driven typing**: nodes default to validation. Provide a `ModelRegistry` when calling `flow.run(...)` or set `validate="none"` explicitly for untyped hops.
450
- - **Observability**: structured `FlowEvent` callbacks power logs/metrics; integrations with
451
- third-party stacks (OTel, Prometheus, Datadog) remain DIY. See the MLflow middleware
452
- example for a lightweight pattern.
526
+ - **Observability**: structured `FlowEvent` callbacks and the `penguiflow-admin` CLI power
527
+ local debugging; integrations with third-party stacks (OTel, Prometheus, Datadog) remain
528
+ DIY. See the MLflow middleware example for a lightweight pattern.
453
529
  - **Roadmap**: follow-up releases focus on optional distributed backends, deeper observability integrations, and additional playbook patterns. Contributions and proposals are welcome!
454
530
 
455
531
  ---
@@ -3,6 +3,7 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  from . import testkit
6
+ from .bus import BusEnvelope, MessageBus
6
7
  from .core import (
7
8
  DEFAULT_QUEUE_MAXSIZE,
8
9
  Context,
@@ -18,6 +19,14 @@ from .node import Node, NodePolicy
18
19
  from .patterns import join_k, map_concurrent, predicate_router, union_router
19
20
  from .policies import DictRoutingPolicy, RoutingPolicy, RoutingRequest
20
21
  from .registry import ModelRegistry
22
+ from .remote import (
23
+ RemoteCallRequest,
24
+ RemoteCallResult,
25
+ RemoteNode,
26
+ RemoteStreamEvent,
27
+ RemoteTransport,
28
+ )
29
+ from .state import RemoteBinding, StateStore, StoredEvent
21
30
  from .streaming import (
22
31
  chunk_to_ws_json,
23
32
  emit_stream_events,
@@ -40,6 +49,8 @@ __all__ = [
40
49
  "FlowEvent",
41
50
  "FlowError",
42
51
  "FlowErrorCode",
52
+ "MessageBus",
53
+ "BusEnvelope",
43
54
  "call_playbook",
44
55
  "Headers",
45
56
  "Message",
@@ -63,6 +74,14 @@ __all__ = [
63
74
  "flow_to_dot",
64
75
  "create",
65
76
  "testkit",
77
+ "StateStore",
78
+ "StoredEvent",
79
+ "RemoteBinding",
80
+ "RemoteTransport",
81
+ "RemoteCallRequest",
82
+ "RemoteCallResult",
83
+ "RemoteStreamEvent",
84
+ "RemoteNode",
66
85
  ]
67
86
 
68
- __version__ = "2.0.0"
87
+ __version__ = "2.1.0"
@@ -0,0 +1,174 @@
1
+ """Developer CLI helpers for inspecting PenguiFlow trace history."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import argparse
6
+ import asyncio
7
+ import importlib
8
+ import json
9
+ import sys
10
+ from collections.abc import Callable, Sequence
11
+ from typing import Any
12
+
13
+ from .state import StateStore, StoredEvent
14
+
15
+ __all__ = ["load_state_store", "render_events", "main"]
16
+
17
+
18
+ class _Args(argparse.Namespace):
19
+ handler: Callable[[_Args], Any]
20
+ state_store: str
21
+ trace_id: str
22
+ tail: int | None
23
+ delay: float
24
+
25
+
26
+ def _resolve_factory(spec: str) -> Callable[[], Any]:
27
+ module_name, _, attr = spec.partition(":")
28
+ if not module_name or not attr:
29
+ raise ValueError(
30
+ "state store spec must be in the form 'package.module:callable'"
31
+ )
32
+ module = importlib.import_module(module_name)
33
+ try:
34
+ factory = getattr(module, attr)
35
+ except AttributeError as exc: # pragma: no cover - defensive guard
36
+ raise ValueError(f"{spec!r} does not resolve to a callable") from exc
37
+ if not callable(factory):
38
+ raise TypeError(f"{spec!r} resolved to {type(factory)!r}, not a callable")
39
+ return factory
40
+
41
+
42
+ async def load_state_store(spec: str) -> StateStore:
43
+ """Instantiate a :class:`StateStore` from ``module:callable`` spec."""
44
+
45
+ factory = _resolve_factory(spec)
46
+ instance = factory()
47
+ if asyncio.iscoroutine(instance):
48
+ instance = await instance
49
+ required = ("save_event", "load_history", "save_remote_binding")
50
+ if not all(hasattr(instance, attr) for attr in required): # pragma: no cover
51
+ raise TypeError(
52
+ "StateStore factories must implement "
53
+ "save_event/load_history/save_remote_binding"
54
+ )
55
+ return instance
56
+
57
+
58
+ def _trim_events(events: Sequence[StoredEvent], tail: int | None) -> list[StoredEvent]:
59
+ items = list(events)
60
+ if tail is None:
61
+ return items
62
+ if tail <= 0:
63
+ return []
64
+ return items[-tail:]
65
+
66
+
67
+ def render_events(
68
+ events: Sequence[StoredEvent], *, tail: int | None = None
69
+ ) -> list[str]:
70
+ """Return JSON line representations of ``events`` (optionally tail-truncated)."""
71
+
72
+ trimmed = _trim_events(events, tail)
73
+ lines: list[str] = []
74
+ for event in trimmed:
75
+ payload = dict(event.payload)
76
+ payload.setdefault("event", event.kind)
77
+ payload.setdefault("trace_id", event.trace_id)
78
+ payload.setdefault("node_name", event.node_name)
79
+ payload.setdefault("node_id", event.node_id)
80
+ payload.setdefault("ts", event.ts)
81
+ lines.append(json.dumps(payload, sort_keys=True, default=str))
82
+ return lines
83
+
84
+
85
+ async def _cmd_history(args: _Args) -> None:
86
+ store = await load_state_store(args.state_store)
87
+ events = await store.load_history(args.trace_id)
88
+ for line in render_events(events, tail=args.tail):
89
+ print(line)
90
+
91
+
92
+ async def _cmd_replay(args: _Args) -> None:
93
+ store = await load_state_store(args.state_store)
94
+ events = _trim_events(await store.load_history(args.trace_id), args.tail)
95
+ total = len(events)
96
+ if not total:
97
+ print(f"# trace {args.trace_id} has no stored events")
98
+ return
99
+ print(f"# replay trace={args.trace_id} events={total}")
100
+ for event in events:
101
+ payload = render_events([event])[0]
102
+ print(payload)
103
+ if args.delay > 0:
104
+ await asyncio.sleep(args.delay)
105
+
106
+
107
+ def _build_parser() -> argparse.ArgumentParser:
108
+ parser = argparse.ArgumentParser(
109
+ prog="penguiflow-admin",
110
+ description=(
111
+ "Inspect PenguiFlow trace history via configured StateStore "
112
+ "adapters."
113
+ ),
114
+ )
115
+ common = argparse.ArgumentParser(add_help=False)
116
+ common.add_argument(
117
+ "--state-store",
118
+ required=True,
119
+ help="Import path to a factory returning a StateStore (module:callable)",
120
+ )
121
+ common.add_argument(
122
+ "--tail",
123
+ type=int,
124
+ default=None,
125
+ help="Only show the last N events from the trace history.",
126
+ )
127
+ subparsers = parser.add_subparsers(dest="command", required=True)
128
+
129
+ history = subparsers.add_parser(
130
+ "history",
131
+ parents=[common],
132
+ help="Print stored events for a trace as JSON lines.",
133
+ )
134
+ history.add_argument("trace_id", help="Trace identifier to inspect")
135
+ history.set_defaults(handler=_cmd_history)
136
+
137
+ replay = subparsers.add_parser(
138
+ "replay",
139
+ parents=[common],
140
+ help="Replay events with optional delay to mimic runtime emission.",
141
+ )
142
+ replay.add_argument("trace_id", help="Trace identifier to replay")
143
+ replay.add_argument(
144
+ "--delay",
145
+ type=float,
146
+ default=0.0,
147
+ help="Sleep duration (seconds) between events when replaying.",
148
+ )
149
+ replay.set_defaults(handler=_cmd_replay)
150
+
151
+ return parser
152
+
153
+
154
+ def main(argv: Sequence[str] | None = None) -> int:
155
+ """Entry point for the ``penguiflow-admin`` CLI."""
156
+
157
+ parser = _build_parser()
158
+ args = parser.parse_args(argv)
159
+ handler = getattr(args, "handler", None)
160
+ if handler is None: # pragma: no cover - argparse guard
161
+ parser.print_help()
162
+ return 1
163
+
164
+ try:
165
+ asyncio.run(handler(args))
166
+ except Exception as exc: # pragma: no cover - runtime guard
167
+ print(f"error: {exc}", file=sys.stderr)
168
+ return 1
169
+ return 0
170
+
171
+
172
+ if __name__ == "__main__": # pragma: no cover - manual invocation
173
+ raise SystemExit(main())
174
+
@@ -0,0 +1,30 @@
1
+ """Message bus protocol for distributed PenguiFlow edges."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from collections.abc import Mapping
6
+ from dataclasses import dataclass
7
+ from typing import Any, Protocol
8
+
9
+
10
+ @dataclass(slots=True)
11
+ class BusEnvelope:
12
+ """Structured payload published to a :class:`MessageBus`."""
13
+
14
+ edge: str
15
+ source: str | None
16
+ target: str | None
17
+ trace_id: str | None
18
+ payload: Any
19
+ headers: Mapping[str, Any] | None
20
+ meta: Mapping[str, Any] | None
21
+
22
+
23
+ class MessageBus(Protocol):
24
+ """Protocol for pluggable message bus adapters."""
25
+
26
+ async def publish(self, envelope: BusEnvelope) -> None:
27
+ """Publish an envelope for downstream workers."""
28
+
29
+
30
+ __all__ = ["BusEnvelope", "MessageBus"]