penguiflow 2.2.3__py3-none-any.whl → 2.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of penguiflow might be problematic. Click here for more details.

Files changed (46) hide show
  1. examples/__init__.py +0 -0
  2. examples/controller_multihop/__init__.py +0 -0
  3. examples/controller_multihop/flow.py +54 -0
  4. examples/fanout_join/__init__.py +0 -0
  5. examples/fanout_join/flow.py +54 -0
  6. examples/map_concurrent/__init__.py +0 -0
  7. examples/map_concurrent/flow.py +56 -0
  8. examples/metadata_propagation/flow.py +61 -0
  9. examples/mlflow_metrics/__init__.py +1 -0
  10. examples/mlflow_metrics/flow.py +120 -0
  11. examples/playbook_retrieval/__init__.py +0 -0
  12. examples/playbook_retrieval/flow.py +61 -0
  13. examples/quickstart/__init__.py +0 -0
  14. examples/quickstart/flow.py +71 -0
  15. examples/react_minimal/main.py +109 -0
  16. examples/react_parallel/main.py +121 -0
  17. examples/react_pause_resume/main.py +157 -0
  18. examples/react_replan/main.py +133 -0
  19. examples/reliability_middleware/__init__.py +0 -0
  20. examples/reliability_middleware/flow.py +67 -0
  21. examples/roadmap_status_updates/__init__.py +0 -0
  22. examples/roadmap_status_updates/flow.py +640 -0
  23. examples/roadmap_status_updates_subflows/__init__.py +0 -0
  24. examples/roadmap_status_updates_subflows/flow.py +814 -0
  25. examples/routing_policy/__init__.py +0 -0
  26. examples/routing_policy/flow.py +89 -0
  27. examples/routing_predicate/__init__.py +0 -0
  28. examples/routing_predicate/flow.py +51 -0
  29. examples/routing_union/__init__.py +0 -0
  30. examples/routing_union/flow.py +56 -0
  31. examples/status_roadmap_flow/__init__.py +0 -0
  32. examples/status_roadmap_flow/flow.py +458 -0
  33. examples/streaming_llm/__init__.py +3 -0
  34. examples/streaming_llm/flow.py +77 -0
  35. examples/testkit_demo/flow.py +34 -0
  36. examples/trace_cancel/flow.py +79 -0
  37. examples/traceable_errors/flow.py +51 -0
  38. examples/visualizer/flow.py +49 -0
  39. penguiflow/__init__.py +1 -1
  40. {penguiflow-2.2.3.dist-info → penguiflow-2.2.5.dist-info}/METADATA +4 -1
  41. penguiflow-2.2.5.dist-info/RECORD +68 -0
  42. {penguiflow-2.2.3.dist-info → penguiflow-2.2.5.dist-info}/top_level.txt +1 -0
  43. penguiflow-2.2.3.dist-info/RECORD +0 -30
  44. {penguiflow-2.2.3.dist-info → penguiflow-2.2.5.dist-info}/WHEEL +0 -0
  45. {penguiflow-2.2.3.dist-info → penguiflow-2.2.5.dist-info}/entry_points.txt +0 -0
  46. {penguiflow-2.2.3.dist-info → penguiflow-2.2.5.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,640 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ from collections import defaultdict
5
+ from pathlib import Path
6
+ from typing import Any, Literal
7
+
8
+ from pydantic import BaseModel, Field
9
+
10
+ from penguiflow import (
11
+ FinalAnswer,
12
+ Headers,
13
+ Message,
14
+ ModelRegistry,
15
+ Node,
16
+ NodePolicy,
17
+ PenguiFlow,
18
+ StreamChunk,
19
+ create,
20
+ flow_to_mermaid,
21
+ map_concurrent,
22
+ )
23
+
24
+
25
+ class UserQuery(BaseModel):
26
+ """Incoming query payload from the frontend."""
27
+
28
+ text: str
29
+
30
+
31
+ class RoadmapStep(BaseModel):
32
+ """Describes an item in the UI roadmap."""
33
+
34
+ id: int
35
+ name: str
36
+ description: str
37
+
38
+
39
+ class StatusUpdate(BaseModel):
40
+ """UI status message emitted through the websocket."""
41
+
42
+ status: Literal["thinking", "ok", "error"]
43
+ message: str | None = None
44
+ roadmap_step_list: list[RoadmapStep] | None = None
45
+ roadmap_step_id: int | None = None
46
+ roadmap_step_status: Literal["running", "ok", "error"] | None = None
47
+
48
+
49
+ class FlowResponse(BaseModel):
50
+ """Pydantic model for Flow response structure."""
51
+
52
+ raw_output: str
53
+ artifacts: dict[str, Any] | None = None
54
+ session_info: str | None = None
55
+
56
+
57
+ class RouteDecision(BaseModel):
58
+ """Selected branch for the query."""
59
+
60
+ query: UserQuery
61
+ route: Literal["documents", "bug"]
62
+ reason: str
63
+
64
+
65
+ class DocumentState(BaseModel):
66
+ """Mutable state for the document analysis branch."""
67
+
68
+ query: UserQuery
69
+ route: Literal["documents"] = "documents"
70
+ steps: list[RoadmapStep]
71
+ sources: list[str] = Field(default_factory=list)
72
+ metadata: list[str] = Field(default_factory=list)
73
+ summary: str | None = None
74
+
75
+
76
+ class BugState(BaseModel):
77
+ """Mutable state for the bug triage branch."""
78
+
79
+ query: UserQuery
80
+ route: Literal["bug"] = "bug"
81
+ steps: list[RoadmapStep]
82
+ logs: list[str] = Field(default_factory=list)
83
+ checks: dict[str, str] = Field(default_factory=dict)
84
+ diagnosis: str | None = None
85
+
86
+
87
+ class SynthesisInput(BaseModel):
88
+ """Payload handed to the final synthesis node."""
89
+
90
+ query: UserQuery
91
+ route: Literal["documents", "bug"]
92
+ steps: list[RoadmapStep]
93
+ subflow_response: FlowResponse
94
+
95
+
96
+ FINAL_STEP = RoadmapStep(
97
+ id=99,
98
+ name="Compose final answer",
99
+ description="Merge context and model output for the UI",
100
+ )
101
+
102
+ DOCUMENT_STEPS: list[RoadmapStep] = [
103
+ RoadmapStep(id=1, name="Parse files", description="Enumerate candidate documents"),
104
+ RoadmapStep(id=2, name="Extract metadata", description="Analyze files in parallel"),
105
+ RoadmapStep(
106
+ id=3, name="Generate summary", description="Produce branch summary text"
107
+ ),
108
+ RoadmapStep(
109
+ id=4, name="Render HTML report", description="Attach structured artifacts"
110
+ ),
111
+ FINAL_STEP,
112
+ ]
113
+
114
+ BUG_STEPS: list[RoadmapStep] = [
115
+ RoadmapStep(id=10, name="Collect error logs", description="Gather stack traces"),
116
+ RoadmapStep(
117
+ id=11, name="Reproduce failure", description="Run lightweight diagnostics"
118
+ ),
119
+ RoadmapStep(id=12, name="Outline fix", description="Summarize remediation plan"),
120
+ FINAL_STEP,
121
+ ]
122
+
123
+ STATUS_BUFFER: defaultdict[str, list[StatusUpdate]] = defaultdict(list)
124
+ CHUNK_BUFFER: defaultdict[str, list[StreamChunk]] = defaultdict(list)
125
+
126
+
127
+ def reset_buffers() -> None:
128
+ """Helper used by tests to clear captured telemetry."""
129
+
130
+ STATUS_BUFFER.clear()
131
+ CHUNK_BUFFER.clear()
132
+
133
+
134
+ def _find_target(ctx, target_name: str) -> Node | None:
135
+ for candidate in getattr(ctx, "_outgoing", {}):
136
+ if getattr(candidate, "name", None) == target_name:
137
+ return candidate
138
+ return None
139
+
140
+
141
+ async def _emit_to_successors(
142
+ ctx,
143
+ parent: Message,
144
+ payload: Any,
145
+ *,
146
+ extra_exclude: set[str] | None = None,
147
+ ) -> None:
148
+ exclude = {"status_updates"}
149
+ if extra_exclude:
150
+ exclude.update(extra_exclude)
151
+
152
+ for candidate in getattr(ctx, "_outgoing", {}):
153
+ name = getattr(candidate, "name", None)
154
+ if name in exclude:
155
+ continue
156
+ next_message = parent.model_copy(update={"payload": payload})
157
+ await ctx.emit(next_message, to=candidate)
158
+
159
+
160
+ async def _emit_to_target(ctx, parent: Message, payload: Any, target_name: str) -> None:
161
+ target = _find_target(ctx, target_name)
162
+ if target is None: # pragma: no cover - defensive guard for misconfigured graphs
163
+ raise RuntimeError(
164
+ f"{target_name} is not connected to {getattr(ctx.owner, 'name', ctx.owner)}"
165
+ )
166
+ next_message = parent.model_copy(update={"payload": payload})
167
+ await ctx.emit(next_message, to=target)
168
+
169
+
170
+ async def emit_status(
171
+ ctx,
172
+ parent: Message,
173
+ *,
174
+ status: Literal["thinking", "ok", "error"] = "thinking",
175
+ message: str | None = None,
176
+ roadmap_step_id: int | None = None,
177
+ roadmap_step_status: Literal["running", "ok", "error"] | None = None,
178
+ roadmap_step_list: list[RoadmapStep] | None = None,
179
+ ) -> None:
180
+ """Fan-out helper that pushes a :class:`StatusUpdate` to the status sink."""
181
+
182
+ update = StatusUpdate(
183
+ status=status,
184
+ message=message,
185
+ roadmap_step_id=roadmap_step_id,
186
+ roadmap_step_status=roadmap_step_status,
187
+ roadmap_step_list=roadmap_step_list,
188
+ )
189
+ STATUS_BUFFER[parent.trace_id].append(update)
190
+ status_message = parent.model_copy(update={"payload": update})
191
+ target = _find_target(ctx, "status_updates")
192
+ if target is None: # pragma: no cover - defensive guard for misconfigured graphs
193
+ raise RuntimeError("status_updates node is not connected to this context")
194
+ await ctx.emit(status_message, to=target)
195
+
196
+
197
+ async def status_collector(message: Message, _ctx) -> None:
198
+ return None
199
+
200
+
201
+ async def chunk_collector(message: Message, _ctx) -> None:
202
+ chunk = message.payload
203
+ if isinstance(chunk, StreamChunk):
204
+ CHUNK_BUFFER[message.trace_id].append(chunk)
205
+
206
+
207
+ async def announce_start(message: Message, ctx) -> None:
208
+ await emit_status(ctx, message, message="Determining message path")
209
+ await _emit_to_successors(ctx, message, message.payload)
210
+
211
+
212
+ async def triage(message: Message, ctx) -> None:
213
+ payload = message.payload
214
+ if not isinstance(payload, UserQuery):
215
+ raise TypeError("triage expects a UserQuery payload")
216
+
217
+ text = payload.text.lower()
218
+ if any(keyword in text for keyword in ("bug", "error", "stacktrace")):
219
+ route: Literal["documents", "bug"] = "bug"
220
+ reason = "Detected incident keywords"
221
+ else:
222
+ route = "documents"
223
+ reason = "Defaulted to document summarizer"
224
+
225
+ await emit_status(ctx, message, message=f"Routing to {route} subflow")
226
+
227
+ decision = RouteDecision(query=payload, route=route, reason=reason)
228
+ target = "documents_plan" if route == "documents" else "bug_plan"
229
+ await _emit_to_target(ctx, message, decision, target)
230
+
231
+
232
+ async def document_plan(message: Message, ctx) -> None:
233
+ decision = message.payload
234
+ assert isinstance(decision, RouteDecision) and decision.route == "documents"
235
+
236
+ await emit_status(ctx, message, roadmap_step_list=DOCUMENT_STEPS)
237
+ state = DocumentState(query=decision.query, steps=DOCUMENT_STEPS)
238
+ await _emit_to_successors(ctx, message, state)
239
+
240
+
241
+ async def parse_documents(message: Message, ctx) -> None:
242
+ state = message.payload
243
+ assert isinstance(state, DocumentState)
244
+
245
+ step = DOCUMENT_STEPS[0]
246
+ await emit_status(
247
+ ctx,
248
+ message,
249
+ roadmap_step_id=step.id,
250
+ roadmap_step_status="running",
251
+ message="Parsing repository sources",
252
+ )
253
+
254
+ sources = ["README.md", "metrics.md", "changelog.md"]
255
+ updated = state.model_copy(update={"sources": sources})
256
+
257
+ await emit_status(
258
+ ctx,
259
+ message,
260
+ roadmap_step_id=step.id,
261
+ roadmap_step_status="ok",
262
+ message="Done!",
263
+ )
264
+ await _emit_to_successors(ctx, message, updated)
265
+
266
+
267
+ async def extract_metadata(message: Message, ctx) -> None:
268
+ state = message.payload
269
+ assert isinstance(state, DocumentState)
270
+
271
+ step = DOCUMENT_STEPS[1]
272
+ await emit_status(
273
+ ctx,
274
+ message,
275
+ roadmap_step_id=step.id,
276
+ roadmap_step_status="running",
277
+ message="Extracting metadata from 3 sources",
278
+ )
279
+
280
+ async def analyse(source: str) -> str:
281
+ await asyncio.sleep(0.01)
282
+ return f"{source}:tokens={len(source)}"
283
+
284
+ metadata = await map_concurrent(state.sources, analyse, max_concurrency=2)
285
+ updated = state.model_copy(update={"metadata": list(metadata)})
286
+
287
+ await emit_status(
288
+ ctx,
289
+ message,
290
+ roadmap_step_id=step.id,
291
+ roadmap_step_status="ok",
292
+ message="Done!",
293
+ )
294
+ await _emit_to_successors(ctx, message, updated)
295
+
296
+
297
+ async def generate_summary(message: Message, ctx) -> None:
298
+ state = message.payload
299
+ assert isinstance(state, DocumentState)
300
+
301
+ step = DOCUMENT_STEPS[2]
302
+ await emit_status(
303
+ ctx,
304
+ message,
305
+ roadmap_step_id=step.id,
306
+ roadmap_step_status="running",
307
+ message="Summarizing findings",
308
+ )
309
+
310
+ summary = f"Summarized {len(state.sources)} files with {len(state.metadata)}."
311
+ updated = state.model_copy(update={"summary": summary})
312
+
313
+ await emit_status(
314
+ ctx,
315
+ message,
316
+ roadmap_step_id=step.id,
317
+ roadmap_step_status="ok",
318
+ message="Done!",
319
+ )
320
+ await _emit_to_successors(ctx, message, updated)
321
+
322
+
323
+ async def render_report(message: Message, ctx) -> None:
324
+ state = message.payload
325
+ assert isinstance(state, DocumentState)
326
+
327
+ step = DOCUMENT_STEPS[3]
328
+ await emit_status(
329
+ ctx,
330
+ message,
331
+ roadmap_step_id=step.id,
332
+ roadmap_step_status="running",
333
+ message="Assembling HTML report",
334
+ )
335
+
336
+ artifacts = {
337
+ "sources": state.sources,
338
+ "metadata": state.metadata,
339
+ }
340
+ subflow_response = FlowResponse(
341
+ raw_output=state.summary or "No summary available",
342
+ artifacts=artifacts,
343
+ session_info="documents-branch",
344
+ )
345
+
346
+ await emit_status(
347
+ ctx,
348
+ message,
349
+ roadmap_step_id=step.id,
350
+ roadmap_step_status="ok",
351
+ message="Done!",
352
+ )
353
+ payload = SynthesisInput(
354
+ query=state.query,
355
+ route="documents",
356
+ steps=state.steps,
357
+ subflow_response=subflow_response,
358
+ )
359
+ await _emit_to_successors(ctx, message, payload)
360
+
361
+
362
+ async def bug_plan(message: Message, ctx) -> None:
363
+ decision = message.payload
364
+ assert isinstance(decision, RouteDecision) and decision.route == "bug"
365
+
366
+ await emit_status(ctx, message, roadmap_step_list=BUG_STEPS)
367
+ state = BugState(query=decision.query, steps=BUG_STEPS)
368
+ await _emit_to_successors(ctx, message, state)
369
+
370
+
371
+ async def collect_logs(message: Message, ctx) -> None:
372
+ state = message.payload
373
+ assert isinstance(state, BugState)
374
+
375
+ step = BUG_STEPS[0]
376
+ await emit_status(
377
+ ctx,
378
+ message,
379
+ roadmap_step_id=step.id,
380
+ roadmap_step_status="running",
381
+ message="Collecting stack traces",
382
+ )
383
+
384
+ logs = ["ValueError: invalid status", "Traceback (most recent call last)"]
385
+ updated = state.model_copy(update={"logs": logs})
386
+
387
+ await emit_status(
388
+ ctx,
389
+ message,
390
+ roadmap_step_id=step.id,
391
+ roadmap_step_status="ok",
392
+ message="Done!",
393
+ )
394
+ await _emit_to_successors(ctx, message, updated)
395
+
396
+
397
+ async def run_diagnostics(message: Message, ctx) -> None:
398
+ state = message.payload
399
+ assert isinstance(state, BugState)
400
+
401
+ step = BUG_STEPS[1]
402
+ await emit_status(
403
+ ctx,
404
+ message,
405
+ roadmap_step_id=step.id,
406
+ roadmap_step_status="running",
407
+ message="Running smoke diagnostics",
408
+ )
409
+
410
+ checks = {"unit": "pass", "integration": "fail"}
411
+ updated = state.model_copy(update={"checks": checks})
412
+
413
+ await emit_status(
414
+ ctx,
415
+ message,
416
+ roadmap_step_id=step.id,
417
+ roadmap_step_status="ok",
418
+ message="Done!",
419
+ )
420
+ await _emit_to_successors(ctx, message, updated)
421
+
422
+
423
+ async def propose_fix(message: Message, ctx) -> None:
424
+ state = message.payload
425
+ assert isinstance(state, BugState)
426
+
427
+ step = BUG_STEPS[2]
428
+ await emit_status(
429
+ ctx,
430
+ message,
431
+ roadmap_step_id=step.id,
432
+ roadmap_step_status="running",
433
+ message="Drafting fix recommendations",
434
+ )
435
+
436
+ diagnosis = "Integration regression detected. Roll back deployment."
437
+ # updated = state.model_copy(update={"diagnosis": diagnosis})
438
+
439
+ subflow_response = FlowResponse(
440
+ raw_output=diagnosis,
441
+ artifacts={"logs": state.logs, "checks": state.checks},
442
+ session_info="bug-branch",
443
+ )
444
+
445
+ await emit_status(
446
+ ctx,
447
+ message,
448
+ roadmap_step_id=step.id,
449
+ roadmap_step_status="ok",
450
+ message="Done!",
451
+ )
452
+ payload = SynthesisInput(
453
+ query=state.query,
454
+ route="bug",
455
+ steps=state.steps,
456
+ subflow_response=subflow_response,
457
+ )
458
+ await _emit_to_successors(ctx, message, payload)
459
+
460
+
461
+ async def compose_final(message: Message, ctx) -> None:
462
+ payload = message.payload
463
+ assert isinstance(payload, SynthesisInput)
464
+
465
+ final_step = payload.steps[-1]
466
+ await emit_status(
467
+ ctx,
468
+ message,
469
+ roadmap_step_id=final_step.id,
470
+ roadmap_step_status="running",
471
+ message="Synthesizing final response",
472
+ )
473
+
474
+ chunk_target = _find_target(ctx, "chunk_sink")
475
+ if chunk_target is not None:
476
+ await ctx.emit_chunk(
477
+ parent=message,
478
+ text="Synthesizing insights... ",
479
+ meta={"phase": "compose", "stage": 1},
480
+ to=chunk_target,
481
+ )
482
+ await ctx.emit_chunk(
483
+ parent=message,
484
+ text="ready.",
485
+ meta={"phase": "compose", "stage": 2},
486
+ done=True,
487
+ to=chunk_target,
488
+ )
489
+
490
+ raw_output = f"{payload.subflow_response.raw_output}\n\nRoute: {payload.route}."
491
+ artifacts = dict(payload.subflow_response.artifacts or {})
492
+ artifacts.setdefault("route", payload.route)
493
+
494
+ final_response = FlowResponse(
495
+ raw_output=raw_output,
496
+ artifacts=artifacts,
497
+ session_info=f"steps={len(payload.steps)}",
498
+ )
499
+
500
+ await emit_status(
501
+ ctx,
502
+ message,
503
+ roadmap_step_id=final_step.id,
504
+ roadmap_step_status="ok",
505
+ message="Done!",
506
+ )
507
+ await _emit_to_target(ctx, message, final_response, "deliver_final")
508
+
509
+
510
+ async def deliver_final(message: Message, _ctx) -> FinalAnswer:
511
+ payload = message.payload
512
+ assert isinstance(payload, FlowResponse)
513
+
514
+ text = payload.raw_output
515
+ if payload.artifacts:
516
+ text += f"\nArtifacts: {sorted(payload.artifacts)}"
517
+
518
+ final_answer = FinalAnswer(text=text)
519
+ return final_answer
520
+
521
+
522
+ def build_flow() -> tuple[PenguiFlow, ModelRegistry]:
523
+ status_node = Node(
524
+ status_collector, name="status_updates", policy=NodePolicy(validate="none")
525
+ )
526
+ chunk_node = Node(
527
+ chunk_collector, name="chunk_sink", policy=NodePolicy(validate="none")
528
+ )
529
+
530
+ start_node = Node(announce_start, name="start", policy=NodePolicy(validate="none"))
531
+ triage_node = Node(triage, name="triage", policy=NodePolicy(validate="none"))
532
+
533
+ doc_plan_node = Node(
534
+ document_plan, name="documents_plan", policy=NodePolicy(validate="none")
535
+ )
536
+ parse_node = Node(
537
+ parse_documents, name="parse_documents", policy=NodePolicy(validate="none")
538
+ )
539
+ metadata_node = Node(
540
+ extract_metadata, name="extract_metadata", policy=NodePolicy(validate="none")
541
+ )
542
+ summary_node = Node(
543
+ generate_summary, name="generate_summary", policy=NodePolicy(validate="none")
544
+ )
545
+ render_node = Node(
546
+ render_report, name="render_report", policy=NodePolicy(validate="none")
547
+ )
548
+
549
+ bug_plan_node = Node(bug_plan, name="bug_plan", policy=NodePolicy(validate="none"))
550
+ logs_node = Node(
551
+ collect_logs, name="collect_logs", policy=NodePolicy(validate="none")
552
+ )
553
+ diagnostics_node = Node(
554
+ run_diagnostics, name="run_diagnostics", policy=NodePolicy(validate="none")
555
+ )
556
+ fix_node = Node(propose_fix, name="propose_fix", policy=NodePolicy(validate="none"))
557
+
558
+ compose_node = Node(
559
+ compose_final, name="compose_final", policy=NodePolicy(validate="none")
560
+ )
561
+ final_node = Node(
562
+ deliver_final, name="deliver_final", policy=NodePolicy(validate="none")
563
+ )
564
+
565
+ flow = create(
566
+ start_node.to(triage_node, status_node),
567
+ triage_node.to(doc_plan_node, bug_plan_node, status_node),
568
+ doc_plan_node.to(parse_node, status_node),
569
+ parse_node.to(metadata_node, status_node),
570
+ metadata_node.to(summary_node, status_node),
571
+ summary_node.to(render_node, status_node),
572
+ render_node.to(compose_node, status_node),
573
+ bug_plan_node.to(logs_node, status_node),
574
+ logs_node.to(diagnostics_node, status_node),
575
+ diagnostics_node.to(fix_node, status_node),
576
+ fix_node.to(compose_node, status_node),
577
+ compose_node.to(status_node, chunk_node, final_node),
578
+ status_node.to(),
579
+ chunk_node.to(),
580
+ final_node.to(),
581
+ )
582
+
583
+ registry = ModelRegistry()
584
+ registry.register("documents_plan", RouteDecision, DocumentState)
585
+ registry.register("parse_documents", DocumentState, DocumentState)
586
+ registry.register("extract_metadata", DocumentState, DocumentState)
587
+ registry.register("generate_summary", DocumentState, DocumentState)
588
+ registry.register("render_report", DocumentState, SynthesisInput)
589
+ registry.register("bug_plan", RouteDecision, BugState)
590
+ registry.register("collect_logs", BugState, BugState)
591
+ registry.register("run_diagnostics", BugState, BugState)
592
+ registry.register("propose_fix", BugState, SynthesisInput)
593
+ registry.register("compose_final", SynthesisInput, FlowResponse)
594
+ registry.register("deliver_final", FlowResponse, FinalAnswer)
595
+
596
+ return flow, registry
597
+
598
+
599
+ async def run_example(query: str) -> FinalAnswer:
600
+ reset_buffers()
601
+ flow, registry = build_flow()
602
+ flow.run(registry=registry)
603
+ try:
604
+ message = Message(payload=UserQuery(text=query), headers=Headers(tenant="demo"))
605
+ await flow.emit(message)
606
+ result = await flow.fetch()
607
+ assert isinstance(result, FinalAnswer)
608
+ return result
609
+ finally:
610
+ await flow.stop()
611
+
612
+
613
+ def export_mermaid(flow: PenguiFlow, destination: Path | None = None) -> Path:
614
+ mermaid = flow_to_mermaid(flow, direction="TD")
615
+ path = destination or Path(__file__).with_name("flow.mermaid.md")
616
+ path.write_text(f"```mermaid\n{mermaid}\n```\n")
617
+ return path
618
+
619
+
620
+ async def main() -> None: # pragma: no cover - manual entrypoint
621
+ answer = await run_example("Summarize the latest release notes")
622
+
623
+ print("\n=== ROOKERY STATUS UPDATES ===")
624
+ for trace_id, updates in STATUS_BUFFER.items():
625
+ print(f"\nTrace: {trace_id}")
626
+ for i, update in enumerate(updates, 1):
627
+ print(f" [{i}] {update.model_dump_json(indent=2)}")
628
+
629
+ print("\n=== ROOKERY STREAM CHUNKS ===")
630
+ for trace_id, chunks in CHUNK_BUFFER.items():
631
+ print(f"\nTrace: {trace_id}")
632
+ for i, chunk in enumerate(chunks, 1):
633
+ print(f" [{i}] {chunk.model_dump_json(indent=2)}")
634
+
635
+ print("\n=== FINAL ANSWER ===")
636
+ print(answer.text)
637
+
638
+
639
+ if __name__ == "__main__": # pragma: no cover - manual entrypoint
640
+ asyncio.run(main())
File without changes