penguiflow 2.2.3__py3-none-any.whl → 2.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of penguiflow might be problematic. Click here for more details.
- examples/__init__.py +0 -0
- examples/controller_multihop/__init__.py +0 -0
- examples/controller_multihop/flow.py +54 -0
- examples/fanout_join/__init__.py +0 -0
- examples/fanout_join/flow.py +54 -0
- examples/map_concurrent/__init__.py +0 -0
- examples/map_concurrent/flow.py +56 -0
- examples/metadata_propagation/flow.py +61 -0
- examples/mlflow_metrics/__init__.py +1 -0
- examples/mlflow_metrics/flow.py +120 -0
- examples/playbook_retrieval/__init__.py +0 -0
- examples/playbook_retrieval/flow.py +61 -0
- examples/quickstart/__init__.py +0 -0
- examples/quickstart/flow.py +71 -0
- examples/react_minimal/main.py +109 -0
- examples/react_parallel/main.py +121 -0
- examples/react_pause_resume/main.py +157 -0
- examples/react_replan/main.py +133 -0
- examples/reliability_middleware/__init__.py +0 -0
- examples/reliability_middleware/flow.py +67 -0
- examples/roadmap_status_updates/__init__.py +0 -0
- examples/roadmap_status_updates/flow.py +640 -0
- examples/roadmap_status_updates_subflows/__init__.py +0 -0
- examples/roadmap_status_updates_subflows/flow.py +814 -0
- examples/routing_policy/__init__.py +0 -0
- examples/routing_policy/flow.py +89 -0
- examples/routing_predicate/__init__.py +0 -0
- examples/routing_predicate/flow.py +51 -0
- examples/routing_union/__init__.py +0 -0
- examples/routing_union/flow.py +56 -0
- examples/status_roadmap_flow/__init__.py +0 -0
- examples/status_roadmap_flow/flow.py +458 -0
- examples/streaming_llm/__init__.py +3 -0
- examples/streaming_llm/flow.py +77 -0
- examples/testkit_demo/flow.py +34 -0
- examples/trace_cancel/flow.py +79 -0
- examples/traceable_errors/flow.py +51 -0
- examples/visualizer/flow.py +49 -0
- penguiflow/__init__.py +1 -1
- {penguiflow-2.2.3.dist-info → penguiflow-2.2.5.dist-info}/METADATA +4 -1
- penguiflow-2.2.5.dist-info/RECORD +68 -0
- {penguiflow-2.2.3.dist-info → penguiflow-2.2.5.dist-info}/top_level.txt +1 -0
- penguiflow-2.2.3.dist-info/RECORD +0 -30
- {penguiflow-2.2.3.dist-info → penguiflow-2.2.5.dist-info}/WHEEL +0 -0
- {penguiflow-2.2.3.dist-info → penguiflow-2.2.5.dist-info}/entry_points.txt +0 -0
- {penguiflow-2.2.3.dist-info → penguiflow-2.2.5.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,814 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
from collections import defaultdict
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any, Literal
|
|
7
|
+
|
|
8
|
+
from pydantic import BaseModel, Field
|
|
9
|
+
|
|
10
|
+
from penguiflow import (
|
|
11
|
+
FinalAnswer,
|
|
12
|
+
Headers,
|
|
13
|
+
Message,
|
|
14
|
+
ModelRegistry,
|
|
15
|
+
Node,
|
|
16
|
+
NodePolicy,
|
|
17
|
+
PenguiFlow,
|
|
18
|
+
StreamChunk,
|
|
19
|
+
create,
|
|
20
|
+
flow_to_mermaid,
|
|
21
|
+
join_k,
|
|
22
|
+
map_concurrent,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class UserQuery(BaseModel):
|
|
27
|
+
"""Incoming query payload from the frontend."""
|
|
28
|
+
|
|
29
|
+
text: str
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class RoadmapStep(BaseModel):
|
|
33
|
+
"""Describes an item in the UI roadmap."""
|
|
34
|
+
|
|
35
|
+
id: int
|
|
36
|
+
name: str
|
|
37
|
+
description: str
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class StatusUpdate(BaseModel):
|
|
41
|
+
"""UI status message emitted through the websocket."""
|
|
42
|
+
|
|
43
|
+
status: Literal["thinking", "ok", "error"]
|
|
44
|
+
message: str | None = None
|
|
45
|
+
roadmap_step_list: list[RoadmapStep] | None = None
|
|
46
|
+
roadmap_step_id: int | None = None
|
|
47
|
+
roadmap_step_status: Literal["running", "ok", "error"] | None = None
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class FlowResponse(BaseModel):
|
|
51
|
+
"""Pydantic model for Flow response structure."""
|
|
52
|
+
|
|
53
|
+
raw_output: str
|
|
54
|
+
artifacts: dict[str, Any] | None = None
|
|
55
|
+
session_info: str | None = None
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class RouteDecision(BaseModel):
|
|
59
|
+
"""Selected branch for the query."""
|
|
60
|
+
|
|
61
|
+
query: UserQuery
|
|
62
|
+
route: Literal["documents", "bug"]
|
|
63
|
+
reason: str
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class DocumentState(BaseModel):
|
|
67
|
+
"""Mutable state for the document analysis branch."""
|
|
68
|
+
|
|
69
|
+
query: UserQuery
|
|
70
|
+
route: Literal["documents"] = "documents"
|
|
71
|
+
steps: list[RoadmapStep]
|
|
72
|
+
sources: list[str] = Field(default_factory=list)
|
|
73
|
+
metadata: list[str] = Field(default_factory=list)
|
|
74
|
+
summary: str | None = None
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class BugState(BaseModel):
|
|
78
|
+
"""Mutable state for the bug triage branch."""
|
|
79
|
+
|
|
80
|
+
query: UserQuery
|
|
81
|
+
route: Literal["bug"] = "bug"
|
|
82
|
+
steps: list[RoadmapStep]
|
|
83
|
+
logs: list[str] = Field(default_factory=list)
|
|
84
|
+
checks: dict[str, str] = Field(default_factory=dict)
|
|
85
|
+
diagnosis: str | None = None
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
class DiagnosticTask(BaseModel):
|
|
89
|
+
"""Work item forwarded to diagnostics subflows."""
|
|
90
|
+
|
|
91
|
+
state: BugState
|
|
92
|
+
check_name: str
|
|
93
|
+
detail: str
|
|
94
|
+
outcome: Literal["pass", "fail"] | None = None
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class DiagnosticBatch(BaseModel):
|
|
98
|
+
"""Aggregated diagnostics returned by join_k."""
|
|
99
|
+
|
|
100
|
+
tasks: list[DiagnosticTask]
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
class SynthesisInput(BaseModel):
|
|
104
|
+
"""Payload handed to the final synthesis node."""
|
|
105
|
+
|
|
106
|
+
query: UserQuery
|
|
107
|
+
route: Literal["documents", "bug"]
|
|
108
|
+
steps: list[RoadmapStep]
|
|
109
|
+
subflow_response: FlowResponse
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
FINAL_STEP = RoadmapStep(
|
|
113
|
+
id=99,
|
|
114
|
+
name="Compose final answer",
|
|
115
|
+
description="Merge context and model output for the UI",
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
DOCUMENT_STEPS: list[RoadmapStep] = [
|
|
119
|
+
RoadmapStep(id=1, name="Parse files", description="Enumerate candidate documents"),
|
|
120
|
+
RoadmapStep(id=2, name="Extract metadata", description="Analyze files in parallel"),
|
|
121
|
+
RoadmapStep(
|
|
122
|
+
id=3, name="Generate summary", description="Produce branch summary text"
|
|
123
|
+
),
|
|
124
|
+
RoadmapStep(
|
|
125
|
+
id=4, name="Render HTML report", description="Attach structured artifacts"
|
|
126
|
+
),
|
|
127
|
+
FINAL_STEP,
|
|
128
|
+
]
|
|
129
|
+
|
|
130
|
+
BUG_STEPS: list[RoadmapStep] = [
|
|
131
|
+
RoadmapStep(id=10, name="Collect error logs", description="Gather stack traces"),
|
|
132
|
+
RoadmapStep(
|
|
133
|
+
id=11, name="Reproduce failure", description="Run lightweight diagnostics"
|
|
134
|
+
),
|
|
135
|
+
RoadmapStep(id=12, name="Outline fix", description="Summarize remediation plan"),
|
|
136
|
+
FINAL_STEP,
|
|
137
|
+
]
|
|
138
|
+
|
|
139
|
+
STATUS_BUFFER: defaultdict[str, list[StatusUpdate]] = defaultdict(list)
|
|
140
|
+
CHUNK_BUFFER: defaultdict[str, list[StreamChunk]] = defaultdict(list)
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def reset_buffers() -> None:
|
|
144
|
+
"""Helper used by tests to clear captured telemetry."""
|
|
145
|
+
|
|
146
|
+
STATUS_BUFFER.clear()
|
|
147
|
+
CHUNK_BUFFER.clear()
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def _find_target(ctx, target_name: str) -> Node | None:
|
|
151
|
+
for candidate in getattr(ctx, "_outgoing", {}):
|
|
152
|
+
if getattr(candidate, "name", None) == target_name:
|
|
153
|
+
return candidate
|
|
154
|
+
return None
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
async def _emit_to_successors(
|
|
158
|
+
ctx,
|
|
159
|
+
parent: Message,
|
|
160
|
+
payload: Any,
|
|
161
|
+
*,
|
|
162
|
+
extra_exclude: set[str] | None = None,
|
|
163
|
+
) -> None:
|
|
164
|
+
exclude = {"status_updates"}
|
|
165
|
+
if extra_exclude:
|
|
166
|
+
exclude.update(extra_exclude)
|
|
167
|
+
|
|
168
|
+
for candidate in getattr(ctx, "_outgoing", {}):
|
|
169
|
+
name = getattr(candidate, "name", None)
|
|
170
|
+
if name in exclude:
|
|
171
|
+
continue
|
|
172
|
+
next_message = parent.model_copy(update={"payload": payload})
|
|
173
|
+
await ctx.emit(next_message, to=candidate)
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
async def _emit_to_target(ctx, parent: Message, payload: Any, target_name: str) -> None:
|
|
177
|
+
target = _find_target(ctx, target_name)
|
|
178
|
+
if target is None: # pragma: no cover - defensive guard for misconfigured graphs
|
|
179
|
+
raise RuntimeError(
|
|
180
|
+
f"{target_name} is not connected to {getattr(ctx.owner, 'name', ctx.owner)}"
|
|
181
|
+
)
|
|
182
|
+
next_message = parent.model_copy(update={"payload": payload})
|
|
183
|
+
await ctx.emit(next_message, to=target)
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
async def emit_status(
|
|
187
|
+
ctx,
|
|
188
|
+
parent: Message,
|
|
189
|
+
*,
|
|
190
|
+
status: Literal["thinking", "ok", "error"] = "thinking",
|
|
191
|
+
message: str | None = None,
|
|
192
|
+
roadmap_step_id: int | None = None,
|
|
193
|
+
roadmap_step_status: Literal["running", "ok", "error"] | None = None,
|
|
194
|
+
roadmap_step_list: list[RoadmapStep] | None = None,
|
|
195
|
+
) -> None:
|
|
196
|
+
"""Fan-out helper that pushes a :class:`StatusUpdate` to the status sink."""
|
|
197
|
+
|
|
198
|
+
update = StatusUpdate(
|
|
199
|
+
status=status,
|
|
200
|
+
message=message,
|
|
201
|
+
roadmap_step_id=roadmap_step_id,
|
|
202
|
+
roadmap_step_status=roadmap_step_status,
|
|
203
|
+
roadmap_step_list=roadmap_step_list,
|
|
204
|
+
)
|
|
205
|
+
STATUS_BUFFER[parent.trace_id].append(update)
|
|
206
|
+
status_message = parent.model_copy(update={"payload": update})
|
|
207
|
+
target = _find_target(ctx, "status_updates")
|
|
208
|
+
if target is None: # pragma: no cover - defensive guard for misconfigured graphs
|
|
209
|
+
raise RuntimeError("status_updates node is not connected to this context")
|
|
210
|
+
await ctx.emit(status_message, to=target)
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
async def status_collector(message: Message, _ctx) -> None:
|
|
214
|
+
return None
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
def build_metadata_playbook() -> tuple[PenguiFlow, ModelRegistry]:
|
|
218
|
+
"""Create a subflow that enriches document metadata concurrently."""
|
|
219
|
+
|
|
220
|
+
async def compute_metadata(message: Message, _ctx) -> DocumentState:
|
|
221
|
+
state = message.payload
|
|
222
|
+
if not isinstance(state, DocumentState):
|
|
223
|
+
raise TypeError("metadata_mapper expects a DocumentState payload")
|
|
224
|
+
|
|
225
|
+
async def analyse(source: str) -> str:
|
|
226
|
+
await asyncio.sleep(0.01)
|
|
227
|
+
checksum = sum(ord(char) for char in source) % 97
|
|
228
|
+
return f"{source}:tokens={len(source)}:digest={checksum}"
|
|
229
|
+
|
|
230
|
+
metadata = await map_concurrent(state.sources, analyse, max_concurrency=2)
|
|
231
|
+
return state.model_copy(update={"metadata": metadata})
|
|
232
|
+
|
|
233
|
+
metadata_node = Node(
|
|
234
|
+
compute_metadata, name="metadata_mapper", policy=NodePolicy(validate="none")
|
|
235
|
+
)
|
|
236
|
+
flow = create(metadata_node.to())
|
|
237
|
+
registry = ModelRegistry()
|
|
238
|
+
registry.register("metadata_mapper", DocumentState, DocumentState)
|
|
239
|
+
return flow, registry
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
def build_diagnostics_playbook() -> tuple[PenguiFlow, ModelRegistry]:
|
|
243
|
+
"""Create a subflow that fans out diagnostics and joins them with ``join_k``."""
|
|
244
|
+
|
|
245
|
+
async def seed_checks(message: Message, ctx) -> None:
|
|
246
|
+
state = message.payload
|
|
247
|
+
if not isinstance(state, BugState):
|
|
248
|
+
raise TypeError("seed_checks expects a BugState payload")
|
|
249
|
+
|
|
250
|
+
unit_target = _find_target(ctx, "unit_runner")
|
|
251
|
+
integration_target = _find_target(ctx, "integration_runner")
|
|
252
|
+
if unit_target is None or integration_target is None:
|
|
253
|
+
raise RuntimeError(
|
|
254
|
+
"diagnostics playbook requires unit and integration runners"
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
tasks = [
|
|
258
|
+
DiagnosticTask(
|
|
259
|
+
state=state, check_name="unit", detail="Unit regression suite"
|
|
260
|
+
),
|
|
261
|
+
DiagnosticTask(
|
|
262
|
+
state=state,
|
|
263
|
+
check_name="integration",
|
|
264
|
+
detail="Integration smoke tests",
|
|
265
|
+
),
|
|
266
|
+
]
|
|
267
|
+
|
|
268
|
+
for task in tasks:
|
|
269
|
+
target = unit_target if task.check_name == "unit" else integration_target
|
|
270
|
+
await ctx.emit(
|
|
271
|
+
message.model_copy(update={"payload": task}), to=target
|
|
272
|
+
)
|
|
273
|
+
|
|
274
|
+
async def run_unit_check(message: Message, ctx) -> None:
|
|
275
|
+
task = message.payload
|
|
276
|
+
if not isinstance(task, DiagnosticTask):
|
|
277
|
+
raise TypeError("run_unit_check expects DiagnosticTask payloads")
|
|
278
|
+
|
|
279
|
+
join_target = _find_target(ctx, "join_diagnostics")
|
|
280
|
+
if join_target is None:
|
|
281
|
+
raise RuntimeError("diagnostics playbook missing join_diagnostics node")
|
|
282
|
+
|
|
283
|
+
updated = task.model_copy(
|
|
284
|
+
update={"outcome": "pass", "detail": f"{task.detail} :: ok"}
|
|
285
|
+
)
|
|
286
|
+
await ctx.emit(
|
|
287
|
+
message.model_copy(update={"payload": updated}), to=join_target
|
|
288
|
+
)
|
|
289
|
+
|
|
290
|
+
async def run_integration_check(message: Message, ctx) -> None:
|
|
291
|
+
task = message.payload
|
|
292
|
+
if not isinstance(task, DiagnosticTask):
|
|
293
|
+
raise TypeError("run_integration_check expects DiagnosticTask payloads")
|
|
294
|
+
|
|
295
|
+
join_target = _find_target(ctx, "join_diagnostics")
|
|
296
|
+
if join_target is None:
|
|
297
|
+
raise RuntimeError("diagnostics playbook missing join_diagnostics node")
|
|
298
|
+
|
|
299
|
+
updated = task.model_copy(
|
|
300
|
+
update={"outcome": "fail", "detail": f"{task.detail} :: incident"}
|
|
301
|
+
)
|
|
302
|
+
await ctx.emit(
|
|
303
|
+
message.model_copy(update={"payload": updated}), to=join_target
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
async def shape_batch(message: Message, _ctx) -> Message:
|
|
307
|
+
tasks = message.payload
|
|
308
|
+
if not isinstance(tasks, list):
|
|
309
|
+
raise TypeError("format_diagnostic_batch expects a list payload")
|
|
310
|
+
batch = DiagnosticBatch(
|
|
311
|
+
tasks=[DiagnosticTask.model_validate(task) for task in tasks]
|
|
312
|
+
)
|
|
313
|
+
return message.model_copy(update={"payload": batch})
|
|
314
|
+
|
|
315
|
+
async def merge_batch(message: Message, _ctx) -> BugState:
|
|
316
|
+
batch = message.payload
|
|
317
|
+
if not isinstance(batch, DiagnosticBatch):
|
|
318
|
+
raise TypeError("merge_diagnostics expects a DiagnosticBatch payload")
|
|
319
|
+
if not batch.tasks:
|
|
320
|
+
raise ValueError("merge_diagnostics received an empty batch")
|
|
321
|
+
|
|
322
|
+
base_state = batch.tasks[0].state
|
|
323
|
+
checks = {task.check_name: task.outcome or "unknown" for task in batch.tasks}
|
|
324
|
+
log_entries = [
|
|
325
|
+
f"{task.check_name}: {task.detail} ({task.outcome or 'unknown'})"
|
|
326
|
+
for task in batch.tasks
|
|
327
|
+
]
|
|
328
|
+
updated_logs = [*base_state.logs, *log_entries]
|
|
329
|
+
return base_state.model_copy(update={"checks": checks, "logs": updated_logs})
|
|
330
|
+
|
|
331
|
+
seed_node = Node(
|
|
332
|
+
seed_checks, name="seed_checks", policy=NodePolicy(validate="none")
|
|
333
|
+
)
|
|
334
|
+
unit_node = Node(
|
|
335
|
+
run_unit_check, name="unit_runner", policy=NodePolicy(validate="none")
|
|
336
|
+
)
|
|
337
|
+
integration_node = Node(
|
|
338
|
+
run_integration_check,
|
|
339
|
+
name="integration_runner",
|
|
340
|
+
policy=NodePolicy(validate="none"),
|
|
341
|
+
)
|
|
342
|
+
join_node = join_k("join_diagnostics", 2)
|
|
343
|
+
batch_node = Node(
|
|
344
|
+
shape_batch,
|
|
345
|
+
name="format_diagnostic_batch",
|
|
346
|
+
policy=NodePolicy(validate="none"),
|
|
347
|
+
)
|
|
348
|
+
merge_node = Node(
|
|
349
|
+
merge_batch, name="merge_diagnostics", policy=NodePolicy(validate="none")
|
|
350
|
+
)
|
|
351
|
+
|
|
352
|
+
flow = create(
|
|
353
|
+
seed_node.to(unit_node, integration_node),
|
|
354
|
+
unit_node.to(join_node),
|
|
355
|
+
integration_node.to(join_node),
|
|
356
|
+
join_node.to(batch_node),
|
|
357
|
+
batch_node.to(merge_node),
|
|
358
|
+
merge_node.to(),
|
|
359
|
+
)
|
|
360
|
+
|
|
361
|
+
registry = ModelRegistry()
|
|
362
|
+
registry.register("merge_diagnostics", DiagnosticBatch, BugState)
|
|
363
|
+
return flow, registry
|
|
364
|
+
|
|
365
|
+
|
|
366
|
+
async def chunk_collector(message: Message, _ctx) -> None:
|
|
367
|
+
chunk = message.payload
|
|
368
|
+
if isinstance(chunk, StreamChunk):
|
|
369
|
+
CHUNK_BUFFER[message.trace_id].append(chunk)
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
async def announce_start(message: Message, ctx) -> None:
|
|
373
|
+
await emit_status(ctx, message, message="Determining message path")
|
|
374
|
+
await _emit_to_successors(ctx, message, message.payload)
|
|
375
|
+
|
|
376
|
+
|
|
377
|
+
async def triage(message: Message, ctx) -> None:
|
|
378
|
+
payload = message.payload
|
|
379
|
+
if not isinstance(payload, UserQuery):
|
|
380
|
+
raise TypeError("triage expects a UserQuery payload")
|
|
381
|
+
|
|
382
|
+
text = payload.text.lower()
|
|
383
|
+
if any(keyword in text for keyword in ("bug", "error", "stacktrace")):
|
|
384
|
+
route: Literal["documents", "bug"] = "bug"
|
|
385
|
+
reason = "Detected incident keywords"
|
|
386
|
+
else:
|
|
387
|
+
route = "documents"
|
|
388
|
+
reason = "Defaulted to document summarizer"
|
|
389
|
+
|
|
390
|
+
await emit_status(ctx, message, message=f"Routing to {route} subflow")
|
|
391
|
+
|
|
392
|
+
decision = RouteDecision(query=payload, route=route, reason=reason)
|
|
393
|
+
target = "documents_plan" if route == "documents" else "bug_plan"
|
|
394
|
+
await _emit_to_target(ctx, message, decision, target)
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
async def document_plan(message: Message, ctx) -> None:
|
|
398
|
+
decision = message.payload
|
|
399
|
+
assert isinstance(decision, RouteDecision) and decision.route == "documents"
|
|
400
|
+
|
|
401
|
+
await emit_status(ctx, message, roadmap_step_list=DOCUMENT_STEPS)
|
|
402
|
+
state = DocumentState(query=decision.query, steps=DOCUMENT_STEPS)
|
|
403
|
+
await _emit_to_successors(ctx, message, state)
|
|
404
|
+
|
|
405
|
+
|
|
406
|
+
async def parse_documents(message: Message, ctx) -> None:
|
|
407
|
+
state = message.payload
|
|
408
|
+
assert isinstance(state, DocumentState)
|
|
409
|
+
|
|
410
|
+
step = DOCUMENT_STEPS[0]
|
|
411
|
+
await emit_status(
|
|
412
|
+
ctx,
|
|
413
|
+
message,
|
|
414
|
+
roadmap_step_id=step.id,
|
|
415
|
+
roadmap_step_status="running",
|
|
416
|
+
message="Parsing repository sources",
|
|
417
|
+
)
|
|
418
|
+
|
|
419
|
+
sources = ["README.md", "metrics.md", "changelog.md"]
|
|
420
|
+
updated = state.model_copy(update={"sources": sources})
|
|
421
|
+
|
|
422
|
+
await emit_status(
|
|
423
|
+
ctx,
|
|
424
|
+
message,
|
|
425
|
+
roadmap_step_id=step.id,
|
|
426
|
+
roadmap_step_status="ok",
|
|
427
|
+
message="Done!",
|
|
428
|
+
)
|
|
429
|
+
await _emit_to_successors(ctx, message, updated)
|
|
430
|
+
|
|
431
|
+
|
|
432
|
+
async def extract_metadata(message: Message, ctx) -> None:
|
|
433
|
+
state = message.payload
|
|
434
|
+
assert isinstance(state, DocumentState)
|
|
435
|
+
|
|
436
|
+
step = DOCUMENT_STEPS[1]
|
|
437
|
+
await emit_status(
|
|
438
|
+
ctx,
|
|
439
|
+
message,
|
|
440
|
+
roadmap_step_id=step.id,
|
|
441
|
+
roadmap_step_status="running",
|
|
442
|
+
message="Launching metadata subflow",
|
|
443
|
+
)
|
|
444
|
+
|
|
445
|
+
updated_state = await ctx.call_playbook(build_metadata_playbook, message)
|
|
446
|
+
if not isinstance(updated_state, DocumentState):
|
|
447
|
+
raise TypeError("metadata subflow must return a DocumentState payload")
|
|
448
|
+
updated = updated_state
|
|
449
|
+
|
|
450
|
+
await emit_status(
|
|
451
|
+
ctx,
|
|
452
|
+
message,
|
|
453
|
+
roadmap_step_id=step.id,
|
|
454
|
+
roadmap_step_status="ok",
|
|
455
|
+
message="Done!",
|
|
456
|
+
)
|
|
457
|
+
await _emit_to_successors(ctx, message, updated)
|
|
458
|
+
|
|
459
|
+
|
|
460
|
+
async def generate_summary(message: Message, ctx) -> None:
|
|
461
|
+
state = message.payload
|
|
462
|
+
assert isinstance(state, DocumentState)
|
|
463
|
+
|
|
464
|
+
step = DOCUMENT_STEPS[2]
|
|
465
|
+
await emit_status(
|
|
466
|
+
ctx,
|
|
467
|
+
message,
|
|
468
|
+
roadmap_step_id=step.id,
|
|
469
|
+
roadmap_step_status="running",
|
|
470
|
+
message="Summarizing findings",
|
|
471
|
+
)
|
|
472
|
+
|
|
473
|
+
summary = f"Summarized {len(state.sources)} files with {len(state.metadata)}."
|
|
474
|
+
updated = state.model_copy(update={"summary": summary})
|
|
475
|
+
|
|
476
|
+
await emit_status(
|
|
477
|
+
ctx,
|
|
478
|
+
message,
|
|
479
|
+
roadmap_step_id=step.id,
|
|
480
|
+
roadmap_step_status="ok",
|
|
481
|
+
message="Done!",
|
|
482
|
+
)
|
|
483
|
+
await _emit_to_successors(ctx, message, updated)
|
|
484
|
+
|
|
485
|
+
|
|
486
|
+
async def render_report(message: Message, ctx) -> None:
|
|
487
|
+
state = message.payload
|
|
488
|
+
assert isinstance(state, DocumentState)
|
|
489
|
+
|
|
490
|
+
step = DOCUMENT_STEPS[3]
|
|
491
|
+
await emit_status(
|
|
492
|
+
ctx,
|
|
493
|
+
message,
|
|
494
|
+
roadmap_step_id=step.id,
|
|
495
|
+
roadmap_step_status="running",
|
|
496
|
+
message="Assembling HTML report",
|
|
497
|
+
)
|
|
498
|
+
|
|
499
|
+
artifacts = {
|
|
500
|
+
"sources": state.sources,
|
|
501
|
+
"metadata": state.metadata,
|
|
502
|
+
}
|
|
503
|
+
subflow_response = FlowResponse(
|
|
504
|
+
raw_output=state.summary or "No summary available",
|
|
505
|
+
artifacts=artifacts,
|
|
506
|
+
session_info="documents-branch",
|
|
507
|
+
)
|
|
508
|
+
|
|
509
|
+
await emit_status(
|
|
510
|
+
ctx,
|
|
511
|
+
message,
|
|
512
|
+
roadmap_step_id=step.id,
|
|
513
|
+
roadmap_step_status="ok",
|
|
514
|
+
message="Done!",
|
|
515
|
+
)
|
|
516
|
+
payload = SynthesisInput(
|
|
517
|
+
query=state.query,
|
|
518
|
+
route="documents",
|
|
519
|
+
steps=state.steps,
|
|
520
|
+
subflow_response=subflow_response,
|
|
521
|
+
)
|
|
522
|
+
await _emit_to_successors(ctx, message, payload)
|
|
523
|
+
|
|
524
|
+
|
|
525
|
+
async def bug_plan(message: Message, ctx) -> None:
|
|
526
|
+
decision = message.payload
|
|
527
|
+
assert isinstance(decision, RouteDecision) and decision.route == "bug"
|
|
528
|
+
|
|
529
|
+
await emit_status(ctx, message, roadmap_step_list=BUG_STEPS)
|
|
530
|
+
state = BugState(query=decision.query, steps=BUG_STEPS)
|
|
531
|
+
await _emit_to_successors(ctx, message, state)
|
|
532
|
+
|
|
533
|
+
|
|
534
|
+
async def collect_logs(message: Message, ctx) -> None:
|
|
535
|
+
state = message.payload
|
|
536
|
+
assert isinstance(state, BugState)
|
|
537
|
+
|
|
538
|
+
step = BUG_STEPS[0]
|
|
539
|
+
await emit_status(
|
|
540
|
+
ctx,
|
|
541
|
+
message,
|
|
542
|
+
roadmap_step_id=step.id,
|
|
543
|
+
roadmap_step_status="running",
|
|
544
|
+
message="Collecting stack traces",
|
|
545
|
+
)
|
|
546
|
+
|
|
547
|
+
logs = ["ValueError: invalid status", "Traceback (most recent call last)"]
|
|
548
|
+
updated = state.model_copy(update={"logs": logs})
|
|
549
|
+
|
|
550
|
+
await emit_status(
|
|
551
|
+
ctx,
|
|
552
|
+
message,
|
|
553
|
+
roadmap_step_id=step.id,
|
|
554
|
+
roadmap_step_status="ok",
|
|
555
|
+
message="Done!",
|
|
556
|
+
)
|
|
557
|
+
await _emit_to_successors(ctx, message, updated)
|
|
558
|
+
|
|
559
|
+
|
|
560
|
+
async def run_diagnostics(message: Message, ctx) -> None:
|
|
561
|
+
state = message.payload
|
|
562
|
+
assert isinstance(state, BugState)
|
|
563
|
+
|
|
564
|
+
step = BUG_STEPS[1]
|
|
565
|
+
await emit_status(
|
|
566
|
+
ctx,
|
|
567
|
+
message,
|
|
568
|
+
roadmap_step_id=step.id,
|
|
569
|
+
roadmap_step_status="running",
|
|
570
|
+
message="Launching diagnostics subflow",
|
|
571
|
+
)
|
|
572
|
+
|
|
573
|
+
updated_state = await ctx.call_playbook(build_diagnostics_playbook, message)
|
|
574
|
+
if not isinstance(updated_state, BugState):
|
|
575
|
+
raise TypeError("diagnostics subflow must return a BugState payload")
|
|
576
|
+
updated = updated_state
|
|
577
|
+
|
|
578
|
+
await emit_status(
|
|
579
|
+
ctx,
|
|
580
|
+
message,
|
|
581
|
+
roadmap_step_id=step.id,
|
|
582
|
+
roadmap_step_status="ok",
|
|
583
|
+
message="Done!",
|
|
584
|
+
)
|
|
585
|
+
await _emit_to_successors(ctx, message, updated)
|
|
586
|
+
|
|
587
|
+
|
|
588
|
+
async def propose_fix(message: Message, ctx) -> None:
|
|
589
|
+
state = message.payload
|
|
590
|
+
assert isinstance(state, BugState)
|
|
591
|
+
|
|
592
|
+
step = BUG_STEPS[2]
|
|
593
|
+
await emit_status(
|
|
594
|
+
ctx,
|
|
595
|
+
message,
|
|
596
|
+
roadmap_step_id=step.id,
|
|
597
|
+
roadmap_step_status="running",
|
|
598
|
+
message="Drafting fix recommendations",
|
|
599
|
+
)
|
|
600
|
+
|
|
601
|
+
diagnosis = "Integration regression detected. Roll back deployment."
|
|
602
|
+
# updated = state.model_copy(update={"diagnosis": diagnosis})
|
|
603
|
+
|
|
604
|
+
subflow_response = FlowResponse(
|
|
605
|
+
raw_output=diagnosis,
|
|
606
|
+
artifacts={"logs": state.logs, "checks": state.checks},
|
|
607
|
+
session_info="bug-branch",
|
|
608
|
+
)
|
|
609
|
+
|
|
610
|
+
await emit_status(
|
|
611
|
+
ctx,
|
|
612
|
+
message,
|
|
613
|
+
roadmap_step_id=step.id,
|
|
614
|
+
roadmap_step_status="ok",
|
|
615
|
+
message="Done!",
|
|
616
|
+
)
|
|
617
|
+
payload = SynthesisInput(
|
|
618
|
+
query=state.query,
|
|
619
|
+
route="bug",
|
|
620
|
+
steps=state.steps,
|
|
621
|
+
subflow_response=subflow_response,
|
|
622
|
+
)
|
|
623
|
+
await _emit_to_successors(ctx, message, payload)
|
|
624
|
+
|
|
625
|
+
|
|
626
|
+
async def compose_final(message: Message, ctx) -> None:
|
|
627
|
+
payload = message.payload
|
|
628
|
+
assert isinstance(payload, SynthesisInput)
|
|
629
|
+
|
|
630
|
+
final_step = payload.steps[-1]
|
|
631
|
+
await emit_status(
|
|
632
|
+
ctx,
|
|
633
|
+
message,
|
|
634
|
+
roadmap_step_id=final_step.id,
|
|
635
|
+
roadmap_step_status="running",
|
|
636
|
+
message="Synthesizing final response",
|
|
637
|
+
)
|
|
638
|
+
|
|
639
|
+
chunk_target = _find_target(ctx, "chunk_sink")
|
|
640
|
+
if chunk_target is not None:
|
|
641
|
+
await ctx.emit_chunk(
|
|
642
|
+
parent=message,
|
|
643
|
+
text="Synthesizing insights... ",
|
|
644
|
+
meta={"phase": "compose", "stage": 1},
|
|
645
|
+
to=chunk_target,
|
|
646
|
+
)
|
|
647
|
+
await ctx.emit_chunk(
|
|
648
|
+
parent=message,
|
|
649
|
+
text="ready.",
|
|
650
|
+
meta={"phase": "compose", "stage": 2},
|
|
651
|
+
done=True,
|
|
652
|
+
to=chunk_target,
|
|
653
|
+
)
|
|
654
|
+
|
|
655
|
+
raw_output = f"{payload.subflow_response.raw_output}\n\nRoute: {payload.route}."
|
|
656
|
+
artifacts = dict(payload.subflow_response.artifacts or {})
|
|
657
|
+
artifacts.setdefault("route", payload.route)
|
|
658
|
+
|
|
659
|
+
final_response = FlowResponse(
|
|
660
|
+
raw_output=raw_output,
|
|
661
|
+
artifacts=artifacts,
|
|
662
|
+
session_info=f"steps={len(payload.steps)}",
|
|
663
|
+
)
|
|
664
|
+
|
|
665
|
+
await emit_status(
|
|
666
|
+
ctx,
|
|
667
|
+
message,
|
|
668
|
+
roadmap_step_id=final_step.id,
|
|
669
|
+
roadmap_step_status="ok",
|
|
670
|
+
message="Done!",
|
|
671
|
+
)
|
|
672
|
+
|
|
673
|
+
meta = dict(message.meta)
|
|
674
|
+
meta["flow_response"] = final_response.model_dump()
|
|
675
|
+
target = _find_target(ctx, "deliver_final")
|
|
676
|
+
if target is None: # pragma: no cover - defensive guard
|
|
677
|
+
raise RuntimeError("deliver_final node is not connected")
|
|
678
|
+
response_message = message.model_copy(
|
|
679
|
+
update={"payload": final_response, "meta": meta}
|
|
680
|
+
)
|
|
681
|
+
await ctx.emit(response_message, to=target)
|
|
682
|
+
|
|
683
|
+
|
|
684
|
+
async def deliver_final(message: Message, _ctx) -> FinalAnswer:
|
|
685
|
+
payload = message.payload
|
|
686
|
+
assert isinstance(payload, FlowResponse)
|
|
687
|
+
|
|
688
|
+
text = payload.raw_output
|
|
689
|
+
if payload.artifacts:
|
|
690
|
+
text += f"\nArtifacts: {sorted(payload.artifacts)}"
|
|
691
|
+
|
|
692
|
+
final_answer = FinalAnswer(text=text)
|
|
693
|
+
return final_answer
|
|
694
|
+
|
|
695
|
+
|
|
696
|
+
def build_flow() -> tuple[PenguiFlow, ModelRegistry]:
|
|
697
|
+
status_node = Node(
|
|
698
|
+
status_collector, name="status_updates", policy=NodePolicy(validate="none")
|
|
699
|
+
)
|
|
700
|
+
chunk_node = Node(
|
|
701
|
+
chunk_collector, name="chunk_sink", policy=NodePolicy(validate="none")
|
|
702
|
+
)
|
|
703
|
+
|
|
704
|
+
start_node = Node(announce_start, name="start", policy=NodePolicy(validate="none"))
|
|
705
|
+
triage_node = Node(triage, name="triage", policy=NodePolicy(validate="none"))
|
|
706
|
+
|
|
707
|
+
doc_plan_node = Node(
|
|
708
|
+
document_plan, name="documents_plan", policy=NodePolicy(validate="none")
|
|
709
|
+
)
|
|
710
|
+
parse_node = Node(
|
|
711
|
+
parse_documents, name="parse_documents", policy=NodePolicy(validate="none")
|
|
712
|
+
)
|
|
713
|
+
metadata_node = Node(
|
|
714
|
+
extract_metadata, name="extract_metadata", policy=NodePolicy(validate="none")
|
|
715
|
+
)
|
|
716
|
+
summary_node = Node(
|
|
717
|
+
generate_summary, name="generate_summary", policy=NodePolicy(validate="none")
|
|
718
|
+
)
|
|
719
|
+
render_node = Node(
|
|
720
|
+
render_report, name="render_report", policy=NodePolicy(validate="none")
|
|
721
|
+
)
|
|
722
|
+
|
|
723
|
+
bug_plan_node = Node(bug_plan, name="bug_plan", policy=NodePolicy(validate="none"))
|
|
724
|
+
logs_node = Node(
|
|
725
|
+
collect_logs, name="collect_logs", policy=NodePolicy(validate="none")
|
|
726
|
+
)
|
|
727
|
+
diagnostics_node = Node(
|
|
728
|
+
run_diagnostics, name="run_diagnostics", policy=NodePolicy(validate="none")
|
|
729
|
+
)
|
|
730
|
+
fix_node = Node(propose_fix, name="propose_fix", policy=NodePolicy(validate="none"))
|
|
731
|
+
|
|
732
|
+
compose_node = Node(
|
|
733
|
+
compose_final, name="compose_final", policy=NodePolicy(validate="none")
|
|
734
|
+
)
|
|
735
|
+
final_node = Node(
|
|
736
|
+
deliver_final, name="deliver_final", policy=NodePolicy(validate="none")
|
|
737
|
+
)
|
|
738
|
+
|
|
739
|
+
flow = create(
|
|
740
|
+
start_node.to(triage_node, status_node),
|
|
741
|
+
triage_node.to(doc_plan_node, bug_plan_node, status_node),
|
|
742
|
+
doc_plan_node.to(parse_node, status_node),
|
|
743
|
+
parse_node.to(metadata_node, status_node),
|
|
744
|
+
metadata_node.to(summary_node, status_node),
|
|
745
|
+
summary_node.to(render_node, status_node),
|
|
746
|
+
render_node.to(compose_node, status_node),
|
|
747
|
+
bug_plan_node.to(logs_node, status_node),
|
|
748
|
+
logs_node.to(diagnostics_node, status_node),
|
|
749
|
+
diagnostics_node.to(fix_node, status_node),
|
|
750
|
+
fix_node.to(compose_node, status_node),
|
|
751
|
+
compose_node.to(status_node, chunk_node, final_node),
|
|
752
|
+
status_node.to(),
|
|
753
|
+
chunk_node.to(),
|
|
754
|
+
final_node.to(),
|
|
755
|
+
)
|
|
756
|
+
|
|
757
|
+
registry = ModelRegistry()
|
|
758
|
+
registry.register("documents_plan", RouteDecision, DocumentState)
|
|
759
|
+
registry.register("parse_documents", DocumentState, DocumentState)
|
|
760
|
+
registry.register("extract_metadata", DocumentState, DocumentState)
|
|
761
|
+
registry.register("generate_summary", DocumentState, DocumentState)
|
|
762
|
+
registry.register("render_report", DocumentState, SynthesisInput)
|
|
763
|
+
registry.register("bug_plan", RouteDecision, BugState)
|
|
764
|
+
registry.register("collect_logs", BugState, BugState)
|
|
765
|
+
registry.register("run_diagnostics", BugState, BugState)
|
|
766
|
+
registry.register("propose_fix", BugState, SynthesisInput)
|
|
767
|
+
registry.register("compose_final", SynthesisInput, FlowResponse)
|
|
768
|
+
registry.register("deliver_final", FlowResponse, FinalAnswer)
|
|
769
|
+
|
|
770
|
+
return flow, registry
|
|
771
|
+
|
|
772
|
+
|
|
773
|
+
async def run_example(query: str) -> FinalAnswer:
|
|
774
|
+
reset_buffers()
|
|
775
|
+
flow, registry = build_flow()
|
|
776
|
+
flow.run(registry=registry)
|
|
777
|
+
try:
|
|
778
|
+
message = Message(payload=UserQuery(text=query), headers=Headers(tenant="demo"))
|
|
779
|
+
await flow.emit(message)
|
|
780
|
+
result = await flow.fetch()
|
|
781
|
+
assert isinstance(result, FinalAnswer)
|
|
782
|
+
return result
|
|
783
|
+
finally:
|
|
784
|
+
await flow.stop()
|
|
785
|
+
|
|
786
|
+
|
|
787
|
+
def export_mermaid(flow: PenguiFlow, destination: Path | None = None) -> Path:
|
|
788
|
+
mermaid = flow_to_mermaid(flow, direction="TD")
|
|
789
|
+
path = destination or Path(__file__).with_name("flow.mermaid.md")
|
|
790
|
+
path.write_text(f"```mermaid\n{mermaid}\n```\n")
|
|
791
|
+
return path
|
|
792
|
+
|
|
793
|
+
|
|
794
|
+
async def main() -> None: # pragma: no cover - manual entrypoint
|
|
795
|
+
answer = await run_example("Summarize the latest release notes")
|
|
796
|
+
|
|
797
|
+
print("\n=== ROOKERY STATUS UPDATES ===")
|
|
798
|
+
for trace_id, updates in STATUS_BUFFER.items():
|
|
799
|
+
print(f"\nTrace: {trace_id}")
|
|
800
|
+
for i, update in enumerate(updates, 1):
|
|
801
|
+
print(f" [{i}] {update.model_dump_json(indent=2)}")
|
|
802
|
+
|
|
803
|
+
print("\n=== ROOKERY STREAM CHUNKS ===")
|
|
804
|
+
for trace_id, chunks in CHUNK_BUFFER.items():
|
|
805
|
+
print(f"\nTrace: {trace_id}")
|
|
806
|
+
for i, chunk in enumerate(chunks, 1):
|
|
807
|
+
print(f" [{i}] {chunk.model_dump_json(indent=2)}")
|
|
808
|
+
|
|
809
|
+
print("\n=== FINAL ANSWER ===")
|
|
810
|
+
print(answer.text)
|
|
811
|
+
|
|
812
|
+
|
|
813
|
+
if __name__ == "__main__": # pragma: no cover - manual entrypoint
|
|
814
|
+
asyncio.run(main())
|