glaip-sdk 0.0.18__py3-none-any.whl → 0.0.20__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. glaip_sdk/_version.py +2 -2
  2. glaip_sdk/branding.py +27 -2
  3. glaip_sdk/cli/auth.py +93 -28
  4. glaip_sdk/cli/commands/__init__.py +2 -2
  5. glaip_sdk/cli/commands/agents.py +108 -21
  6. glaip_sdk/cli/commands/configure.py +141 -90
  7. glaip_sdk/cli/commands/mcps.py +371 -48
  8. glaip_sdk/cli/commands/models.py +4 -3
  9. glaip_sdk/cli/commands/tools.py +27 -14
  10. glaip_sdk/cli/commands/update.py +66 -0
  11. glaip_sdk/cli/config.py +13 -2
  12. glaip_sdk/cli/display.py +35 -26
  13. glaip_sdk/cli/io.py +14 -5
  14. glaip_sdk/cli/main.py +185 -73
  15. glaip_sdk/cli/pager.py +2 -1
  16. glaip_sdk/cli/parsers/json_input.py +62 -14
  17. glaip_sdk/cli/resolution.py +4 -1
  18. glaip_sdk/cli/slash/__init__.py +3 -4
  19. glaip_sdk/cli/slash/agent_session.py +88 -36
  20. glaip_sdk/cli/slash/prompt.py +20 -48
  21. glaip_sdk/cli/slash/session.py +440 -189
  22. glaip_sdk/cli/transcript/__init__.py +71 -0
  23. glaip_sdk/cli/transcript/cache.py +338 -0
  24. glaip_sdk/cli/transcript/capture.py +278 -0
  25. glaip_sdk/cli/transcript/export.py +38 -0
  26. glaip_sdk/cli/transcript/launcher.py +79 -0
  27. glaip_sdk/cli/transcript/viewer.py +624 -0
  28. glaip_sdk/cli/update_notifier.py +29 -5
  29. glaip_sdk/cli/utils.py +256 -74
  30. glaip_sdk/client/agents.py +3 -1
  31. glaip_sdk/client/run_rendering.py +2 -2
  32. glaip_sdk/icons.py +19 -0
  33. glaip_sdk/models.py +6 -0
  34. glaip_sdk/rich_components.py +29 -1
  35. glaip_sdk/utils/__init__.py +1 -1
  36. glaip_sdk/utils/client_utils.py +6 -4
  37. glaip_sdk/utils/display.py +61 -32
  38. glaip_sdk/utils/rendering/formatting.py +6 -5
  39. glaip_sdk/utils/rendering/renderer/base.py +213 -66
  40. glaip_sdk/utils/rendering/renderer/debug.py +73 -16
  41. glaip_sdk/utils/rendering/renderer/panels.py +27 -15
  42. glaip_sdk/utils/rendering/renderer/progress.py +61 -38
  43. glaip_sdk/utils/serialization.py +5 -2
  44. glaip_sdk/utils/validation.py +1 -2
  45. {glaip_sdk-0.0.18.dist-info → glaip_sdk-0.0.20.dist-info}/METADATA +1 -1
  46. glaip_sdk-0.0.20.dist-info/RECORD +80 -0
  47. glaip_sdk/utils/rich_utils.py +0 -29
  48. glaip_sdk-0.0.18.dist-info/RECORD +0 -73
  49. {glaip_sdk-0.0.18.dist-info → glaip_sdk-0.0.20.dist-info}/WHEEL +0 -0
  50. {glaip_sdk-0.0.18.dist-info → glaip_sdk-0.0.20.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,71 @@
1
+ """Transcript utilities package for CLI.
2
+
3
+ Authors:
4
+ Raymond Christopher (raymond.christopher@gdplabs.id)
5
+ """
6
+
7
+ from glaip_sdk.cli.transcript.cache import (
8
+ TranscriptCacheStats,
9
+ TranscriptPayload,
10
+ TranscriptStoreResult,
11
+ ensure_cache_dir,
12
+ get_transcript_cache_stats,
13
+ latest_manifest_entry,
14
+ manifest_path,
15
+ resolve_manifest_entry,
16
+ store_transcript,
17
+ suggest_filename,
18
+ )
19
+ from glaip_sdk.cli.transcript.cache import (
20
+ export_transcript as export_cached_transcript,
21
+ )
22
+ from glaip_sdk.cli.transcript.capture import (
23
+ StoredTranscriptContext,
24
+ coerce_events,
25
+ coerce_result_text,
26
+ compute_finished_at,
27
+ extract_server_run_id,
28
+ register_last_transcript,
29
+ store_transcript_for_session,
30
+ )
31
+ from glaip_sdk.cli.transcript.export import (
32
+ normalise_export_destination,
33
+ resolve_manifest_for_export,
34
+ )
35
+ from glaip_sdk.cli.transcript.launcher import (
36
+ maybe_launch_post_run_viewer,
37
+ should_launch_post_run_viewer,
38
+ )
39
+ from glaip_sdk.cli.transcript.viewer import (
40
+ PostRunViewer,
41
+ ViewerContext,
42
+ run_viewer_session,
43
+ )
44
+
45
+ __all__ = [
46
+ "TranscriptCacheStats",
47
+ "TranscriptPayload",
48
+ "TranscriptStoreResult",
49
+ "ensure_cache_dir",
50
+ "get_transcript_cache_stats",
51
+ "manifest_path",
52
+ "store_transcript",
53
+ "suggest_filename",
54
+ "latest_manifest_entry",
55
+ "resolve_manifest_entry",
56
+ "export_cached_transcript",
57
+ "StoredTranscriptContext",
58
+ "coerce_events",
59
+ "coerce_result_text",
60
+ "compute_finished_at",
61
+ "extract_server_run_id",
62
+ "register_last_transcript",
63
+ "store_transcript_for_session",
64
+ "resolve_manifest_for_export",
65
+ "normalise_export_destination",
66
+ "maybe_launch_post_run_viewer",
67
+ "should_launch_post_run_viewer",
68
+ "ViewerContext",
69
+ "PostRunViewer",
70
+ "run_viewer_session",
71
+ ]
@@ -0,0 +1,338 @@
1
+ """Helpers for storing and exporting agent run transcripts.
2
+
3
+ Authors:
4
+ Raymond Christopher (raymond.christopher@gdplabs.id)
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import json
10
+ import os
11
+ import uuid
12
+ from collections.abc import Iterable
13
+ from dataclasses import dataclass
14
+ from datetime import datetime, timezone
15
+ from pathlib import Path
16
+ from typing import Any
17
+
18
+ DEFAULT_CACHE_ROOT = Path(
19
+ os.getenv(
20
+ "AIP_TRANSCRIPT_CACHE_DIR",
21
+ Path.home() / ".config" / "glaip-sdk" / "transcripts",
22
+ )
23
+ )
24
+ MANIFEST_FILENAME = "manifest.jsonl"
25
+
26
+
27
+ @dataclass(slots=True)
28
+ class TranscriptPayload:
29
+ """Data bundle representing a captured agent run."""
30
+
31
+ events: list[dict[str, Any]]
32
+ default_output: str
33
+ final_output: str
34
+ agent_id: str | None
35
+ agent_name: str | None
36
+ model: str | None
37
+ server_run_id: str | None
38
+ started_at: float | None
39
+ finished_at: float | None
40
+ created_at: datetime
41
+ source: str
42
+ meta: dict[str, Any]
43
+ run_id: str
44
+
45
+
46
+ @dataclass(slots=True)
47
+ class TranscriptStoreResult:
48
+ """Result of writing a transcript to the local cache."""
49
+
50
+ path: Path
51
+ manifest_entry: dict[str, Any]
52
+ pruned_entries: list[dict[str, Any]]
53
+
54
+
55
+ @dataclass(slots=True)
56
+ class TranscriptCacheStats:
57
+ """Lightweight usage snapshot for the transcript cache."""
58
+
59
+ cache_dir: Path
60
+ entry_count: int
61
+ total_bytes: int
62
+
63
+
64
+ def ensure_cache_dir(cache_dir: Path | None = None) -> Path:
65
+ """Ensure the cache directory exists and return it."""
66
+ directory = cache_dir or DEFAULT_CACHE_ROOT
67
+ try:
68
+ directory.mkdir(parents=True, exist_ok=True)
69
+ except PermissionError:
70
+ return _fallback_cache_dir()
71
+
72
+ if not os.access(directory, os.W_OK):
73
+ return _fallback_cache_dir()
74
+
75
+ return directory
76
+
77
+
78
+ def _fallback_cache_dir() -> Path:
79
+ """Return a writable fallback cache directory under the current working tree."""
80
+ fallback = Path.cwd() / ".glaip-transcripts"
81
+ fallback.mkdir(parents=True, exist_ok=True)
82
+ return fallback
83
+
84
+
85
+ def manifest_path(cache_dir: Path | None = None) -> Path:
86
+ """Return the manifest file path."""
87
+ return ensure_cache_dir(cache_dir) / MANIFEST_FILENAME
88
+
89
+
90
+ def _parse_iso(ts: str | None) -> datetime | None:
91
+ if not ts:
92
+ return None
93
+ try:
94
+ return datetime.fromisoformat(ts.replace("Z", "+00:00"))
95
+ except Exception:
96
+ return None
97
+
98
+
99
+ def _load_manifest_entries(cache_dir: Path | None = None) -> list[dict[str, Any]]:
100
+ path = manifest_path(cache_dir)
101
+ entries: list[dict[str, Any]] = []
102
+ if not path.exists():
103
+ return entries
104
+
105
+ with path.open("r", encoding="utf-8") as fh:
106
+ for line in fh:
107
+ line = line.strip()
108
+ if not line:
109
+ continue
110
+ try:
111
+ entry = json.loads(line)
112
+ entries.append(entry)
113
+ except json.JSONDecodeError:
114
+ continue
115
+ return entries
116
+
117
+
118
+ def _json_default(value: Any) -> Any:
119
+ """Ensure non-serialisable values degrade to readable strings."""
120
+ if isinstance(value, (str, int, float, bool)) or value is None:
121
+ return value
122
+ if isinstance(value, Path):
123
+ return str(value)
124
+ return repr(value)
125
+
126
+
127
+ def _write_manifest(
128
+ entries: Iterable[dict[str, Any]], cache_dir: Path | None = None
129
+ ) -> None:
130
+ path = manifest_path(cache_dir)
131
+ with path.open("w", encoding="utf-8") as fh:
132
+ for entry in entries:
133
+ fh.write(json.dumps(entry, ensure_ascii=False, default=_json_default))
134
+ fh.write("\n")
135
+
136
+
137
+ def store_transcript(
138
+ payload: TranscriptPayload,
139
+ *,
140
+ cache_dir: Path | None = None,
141
+ ) -> TranscriptStoreResult:
142
+ """Persist a transcript to disk and update the manifest."""
143
+ directory = ensure_cache_dir(cache_dir)
144
+ filename = f"run-{payload.run_id}.jsonl"
145
+ transcript_path = directory / filename
146
+
147
+ meta_line = {
148
+ "type": "meta",
149
+ "run_id": payload.run_id,
150
+ "agent_id": payload.agent_id,
151
+ "agent_name": payload.agent_name,
152
+ "model": payload.model,
153
+ "created_at": payload.created_at.isoformat(),
154
+ "default_output": payload.default_output,
155
+ "final_output": payload.final_output,
156
+ "server_run_id": payload.server_run_id,
157
+ "started_at": payload.started_at,
158
+ "finished_at": payload.finished_at,
159
+ "meta": payload.meta,
160
+ "source": payload.source,
161
+ }
162
+
163
+ def _write_transcript(path: Path) -> None:
164
+ with path.open("w", encoding="utf-8") as fh:
165
+ fh.write(json.dumps(meta_line, ensure_ascii=False, default=_json_default))
166
+ fh.write("\n")
167
+ for event in payload.events:
168
+ fh.write(
169
+ json.dumps(
170
+ {"type": "event", "event": event},
171
+ ensure_ascii=False,
172
+ default=_json_default,
173
+ )
174
+ )
175
+ fh.write("\n")
176
+
177
+ try:
178
+ _write_transcript(transcript_path)
179
+ except PermissionError:
180
+ directory = _fallback_cache_dir()
181
+ transcript_path = directory / filename
182
+ _write_transcript(transcript_path)
183
+
184
+ size_bytes = transcript_path.stat().st_size
185
+ manifest_entry = {
186
+ "run_id": payload.run_id,
187
+ "agent_id": payload.agent_id,
188
+ "agent_name": payload.agent_name,
189
+ "created_at": payload.created_at.isoformat(),
190
+ "cache_path": str(transcript_path),
191
+ "size_bytes": size_bytes,
192
+ "retained": True,
193
+ "source": payload.source,
194
+ "server_run_id": payload.server_run_id,
195
+ }
196
+
197
+ existing_entries = _load_manifest_entries(directory)
198
+ existing_entries.append(manifest_entry)
199
+ _write_manifest(existing_entries, directory)
200
+
201
+ return TranscriptStoreResult(
202
+ path=transcript_path,
203
+ manifest_entry=manifest_entry,
204
+ pruned_entries=[],
205
+ )
206
+
207
+
208
+ def latest_manifest_entry(cache_dir: Path | None = None) -> dict[str, Any] | None:
209
+ """Return the most recent manifest entry, if any."""
210
+ entries = _load_manifest_entries(cache_dir)
211
+ if not entries:
212
+ return None
213
+ return max(
214
+ entries,
215
+ key=lambda e: _parse_iso(e.get("created_at"))
216
+ or datetime.min.replace(tzinfo=timezone.utc),
217
+ )
218
+
219
+
220
+ def resolve_manifest_entry(
221
+ run_id: str,
222
+ cache_dir: Path | None = None,
223
+ ) -> dict[str, Any] | None:
224
+ """Find a manifest entry by run id."""
225
+ entries = _load_manifest_entries(cache_dir)
226
+ for entry in entries:
227
+ if entry.get("run_id") == run_id:
228
+ return entry
229
+ return None
230
+
231
+
232
+ def export_transcript(
233
+ *,
234
+ destination: Path,
235
+ run_id: str | None = None,
236
+ cache_dir: Path | None = None,
237
+ ) -> Path:
238
+ """Copy a cached transcript to the requested destination path."""
239
+ directory = ensure_cache_dir(cache_dir)
240
+ entry = (
241
+ resolve_manifest_entry(run_id, directory)
242
+ if run_id
243
+ else latest_manifest_entry(directory)
244
+ )
245
+ if entry is None:
246
+ raise FileNotFoundError("No cached transcripts available for export.")
247
+
248
+ cache_path = entry.get("cache_path")
249
+ if not cache_path:
250
+ raise FileNotFoundError("Cached transcript path missing from manifest.")
251
+
252
+ cache_file = Path(cache_path)
253
+ if not cache_file.exists():
254
+ raise FileNotFoundError(f"Cached transcript file not found: {cache_file}")
255
+
256
+ destination.parent.mkdir(parents=True, exist_ok=True)
257
+
258
+ try:
259
+ lines = cache_file.read_text(encoding="utf-8").splitlines()
260
+ records = [json.loads(line) for line in lines if line.strip()]
261
+ except json.JSONDecodeError as exc:
262
+ raise FileNotFoundError(
263
+ f"Cached transcript file is corrupted: {cache_file}"
264
+ ) from exc
265
+
266
+ with destination.open("w", encoding="utf-8") as fh:
267
+ for idx, record in enumerate(records):
268
+ json.dump(record, fh, ensure_ascii=False, indent=2)
269
+ fh.write("\n")
270
+ if idx != len(records) - 1:
271
+ fh.write("\n")
272
+
273
+ return destination
274
+
275
+
276
+ def suggest_filename(entry: dict[str, Any] | None = None) -> str:
277
+ """Return a friendly filename suggestion for exporting a transcript."""
278
+ run_id = entry.get("run_id") if entry else uuid.uuid4().hex
279
+ created_at = (
280
+ entry.get("created_at") if entry else datetime.now(timezone.utc).isoformat()
281
+ )
282
+ timestamp = (
283
+ created_at.replace(":", "").replace("-", "").replace("T", "_").split("+")[0]
284
+ )
285
+ return f"aip-run-{timestamp}-{run_id}.jsonl"
286
+
287
+
288
+ def build_payload(
289
+ *,
290
+ events: list[dict[str, Any]],
291
+ renderer_output: str,
292
+ final_output: str,
293
+ agent_id: str | None,
294
+ agent_name: str | None,
295
+ model: str | None,
296
+ server_run_id: str | None,
297
+ started_at: float | None,
298
+ finished_at: float | None,
299
+ meta: dict[str, Any],
300
+ source: str,
301
+ ) -> TranscriptPayload:
302
+ """Factory helper to prepare payload objects consistently."""
303
+ return TranscriptPayload(
304
+ events=events,
305
+ default_output=renderer_output,
306
+ final_output=final_output,
307
+ agent_id=agent_id,
308
+ agent_name=agent_name,
309
+ model=model,
310
+ server_run_id=server_run_id,
311
+ started_at=started_at,
312
+ finished_at=finished_at,
313
+ created_at=datetime.now(timezone.utc),
314
+ source=source,
315
+ meta=meta,
316
+ run_id=uuid.uuid4().hex,
317
+ )
318
+
319
+
320
+ def get_transcript_cache_stats(
321
+ cache_dir: Path | None = None,
322
+ ) -> TranscriptCacheStats:
323
+ """Return basic usage information about the transcript cache."""
324
+ directory = ensure_cache_dir(cache_dir)
325
+ entries = _load_manifest_entries(directory)
326
+
327
+ total_bytes = 0
328
+ for entry in entries:
329
+ try:
330
+ total_bytes += int(entry.get("size_bytes") or 0)
331
+ except Exception:
332
+ continue
333
+
334
+ return TranscriptCacheStats(
335
+ cache_dir=directory,
336
+ entry_count=len(entries),
337
+ total_bytes=total_bytes,
338
+ )
@@ -0,0 +1,278 @@
1
+ """Helpers for capturing and caching agent run transcripts.
2
+
3
+ Authors:
4
+ Raymond Christopher (raymond.christopher@gdplabs.id)
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import json
10
+ from dataclasses import dataclass
11
+ from typing import Any
12
+
13
+ from glaip_sdk.cli.transcript.cache import (
14
+ TranscriptPayload,
15
+ TranscriptStoreResult,
16
+ store_transcript,
17
+ )
18
+ from glaip_sdk.cli.transcript.cache import (
19
+ build_payload as build_transcript_payload,
20
+ )
21
+ from glaip_sdk.utils.rendering.renderer.progress import format_tool_title
22
+
23
+
24
+ @dataclass(slots=True)
25
+ class StoredTranscriptContext:
26
+ """Simple container linking payload and manifest data."""
27
+
28
+ payload: TranscriptPayload
29
+ store_result: TranscriptStoreResult
30
+
31
+
32
+ def coerce_events(value: Any) -> list[dict[str, Any]]:
33
+ """Normalise renderer events into a list of dictionaries."""
34
+ if not value:
35
+ return []
36
+ if isinstance(value, list):
37
+ return [item for item in value if isinstance(item, dict)]
38
+ try:
39
+ return [item for item in value if isinstance(item, dict)]
40
+ except Exception:
41
+ return []
42
+
43
+
44
+ def coerce_result_text(result: Any) -> str:
45
+ """Serialise renderer output to a string for transcript payloads."""
46
+ if result is None:
47
+ return ""
48
+ if isinstance(result, str):
49
+ return result
50
+ try:
51
+ return json.dumps(result, ensure_ascii=False, indent=2)
52
+ except Exception:
53
+ return str(result)
54
+
55
+
56
+ def compute_finished_at(renderer: Any) -> float | None:
57
+ """Best-effort end-time calculation based on renderer state."""
58
+ state = getattr(renderer, "state", None)
59
+ if state is not None:
60
+ started_at = getattr(state, "streaming_started_at", None)
61
+ duration = getattr(state, "final_duration_seconds", None)
62
+ else:
63
+ started_at = None
64
+ duration = None
65
+
66
+ if started_at is None:
67
+ stream_processor = getattr(renderer, "stream_processor", None)
68
+ started_at = (
69
+ getattr(stream_processor, "streaming_started_at", None)
70
+ if stream_processor is not None
71
+ else None
72
+ )
73
+ if started_at is None or duration is None:
74
+ return None
75
+ try:
76
+ return float(started_at) + float(duration)
77
+ except Exception:
78
+ return None
79
+
80
+
81
+ def extract_server_run_id(
82
+ meta: dict[str, Any], events: list[dict[str, Any]]
83
+ ) -> str | None:
84
+ """Derive a server-side run identifier from renderer metadata."""
85
+ run_id = meta.get("run_id") or meta.get("id")
86
+ if run_id:
87
+ return str(run_id)
88
+ for event in events:
89
+ metadata = event.get("metadata") or {}
90
+ candidate = metadata.get("run_id") or metadata.get("request_id")
91
+ if candidate:
92
+ return str(candidate)
93
+ return None
94
+
95
+
96
+ def _coerce_meta(meta: Any) -> dict[str, Any]:
97
+ """Ensure renderer metadata is recorded as a plain dictionary."""
98
+ if meta is None:
99
+ return {}
100
+ if isinstance(meta, dict):
101
+ return meta
102
+ if hasattr(meta, "items"):
103
+ try:
104
+ return {str(key): value for key, value in meta.items()}
105
+ except Exception:
106
+ pass
107
+ return {"value": coerce_result_text(meta)}
108
+
109
+
110
+ def register_last_transcript(
111
+ ctx: Any, payload: TranscriptPayload, store_result: TranscriptStoreResult
112
+ ) -> None:
113
+ """Persist last-run transcript references onto the Click context."""
114
+ ctx_obj = getattr(ctx, "obj", None)
115
+ if not isinstance(ctx_obj, dict):
116
+ return
117
+ ctx_obj["_last_transcript_payload"] = payload
118
+ ctx_obj["_last_transcript_manifest"] = store_result.manifest_entry
119
+ ctx_obj["_last_transcript_path"] = str(store_result.path)
120
+
121
+
122
+ def _extract_step_summaries(renderer: Any) -> list[dict[str, Any]]:
123
+ """Return lightweight step summaries for the transcript viewer."""
124
+ steps = getattr(renderer, "steps", None)
125
+ if steps is None:
126
+ return []
127
+
128
+ order = getattr(steps, "order", []) or []
129
+ by_id = getattr(steps, "by_id", {}) or {}
130
+
131
+ return [
132
+ _build_step_summary(by_id.get(step_id), index)
133
+ for index, step_id in enumerate(order)
134
+ if by_id.get(step_id) is not None
135
+ ]
136
+
137
+
138
+ def _build_step_summary(step: Any, index: int) -> dict[str, Any]:
139
+ """Construct a single step summary entry."""
140
+ kind = getattr(step, "kind", "") or ""
141
+ name = getattr(step, "name", "") or ""
142
+ status = getattr(step, "status", "") or ""
143
+ duration_ms = _coerce_duration_ms(getattr(step, "duration_ms", None))
144
+ display_name = _format_step_display_name(name)
145
+
146
+ return {
147
+ "index": index,
148
+ "step_id": getattr(step, "step_id", f"step-{index}"),
149
+ "kind": kind,
150
+ "name": name,
151
+ "display_name": display_name,
152
+ "status": status,
153
+ "duration_ms": duration_ms,
154
+ }
155
+
156
+
157
+ def _coerce_duration_ms(value: Any) -> int | None:
158
+ """Return duration in milliseconds if numeric, otherwise None."""
159
+ try:
160
+ if isinstance(value, (int, float)):
161
+ return int(value)
162
+ except Exception:
163
+ return None
164
+ return None
165
+
166
+
167
+ def _format_step_display_name(name: str) -> str:
168
+ """Apply tool title formatting with a safe fallback."""
169
+ try:
170
+ return format_tool_title(name)
171
+ except Exception:
172
+ return name
173
+
174
+
175
+ def _collect_renderer_outputs(
176
+ renderer: Any, final_result: Any
177
+ ) -> tuple[
178
+ list[dict[str, Any]],
179
+ str,
180
+ str,
181
+ ]:
182
+ """Collect events and text outputs from a renderer with safe fallbacks."""
183
+ events_raw = []
184
+ if hasattr(renderer, "get_transcript_events"):
185
+ try:
186
+ events_raw = renderer.get_transcript_events()
187
+ except Exception:
188
+ events_raw = []
189
+ events = coerce_events(events_raw)
190
+
191
+ aggregated_raw = ""
192
+ if hasattr(renderer, "get_aggregated_output"):
193
+ try:
194
+ aggregated_raw = renderer.get_aggregated_output()
195
+ except Exception:
196
+ aggregated_raw = ""
197
+
198
+ aggregated_output = coerce_result_text(aggregated_raw)
199
+ final_output = coerce_result_text(final_result)
200
+ return events, aggregated_output, final_output
201
+
202
+
203
+ def _derive_transcript_meta(
204
+ renderer: Any, model: str | None
205
+ ) -> tuple[dict[str, Any], float | None, float | None, str | None]:
206
+ """Build transcript metadata including step summaries and timings."""
207
+ raw_meta = getattr(getattr(renderer, "state", None), "meta", {}) or {}
208
+ meta = _coerce_meta(raw_meta)
209
+
210
+ step_summaries = _extract_step_summaries(renderer)
211
+ if step_summaries:
212
+ meta["transcript_steps"] = step_summaries
213
+
214
+ stream_processor = getattr(renderer, "stream_processor", None)
215
+ stream_started_at = (
216
+ getattr(stream_processor, "streaming_started_at", None)
217
+ if stream_processor is not None
218
+ else None
219
+ )
220
+ finished_at = compute_finished_at(renderer)
221
+ model_name = meta.get("model") or model
222
+ return meta, stream_started_at, finished_at, model_name
223
+
224
+
225
+ def store_transcript_for_session(
226
+ ctx: Any,
227
+ renderer: Any,
228
+ *,
229
+ final_result: Any,
230
+ agent_id: str | None,
231
+ agent_name: str | None,
232
+ model: str | None,
233
+ source: str,
234
+ ) -> StoredTranscriptContext | None:
235
+ """Capture renderer output and persist the transcript for later reuse."""
236
+ if not hasattr(renderer, "get_transcript_events"):
237
+ return None
238
+
239
+ events, aggregated_output, final_output = _collect_renderer_outputs(
240
+ renderer, final_result
241
+ )
242
+
243
+ if not (events or aggregated_output or final_output):
244
+ return None
245
+
246
+ meta, stream_started_at, finished_at, model_name = _derive_transcript_meta(
247
+ renderer, model
248
+ )
249
+
250
+ payload: TranscriptPayload = build_transcript_payload(
251
+ events=events,
252
+ renderer_output=aggregated_output,
253
+ final_output=final_output,
254
+ agent_id=agent_id,
255
+ agent_name=agent_name,
256
+ model=model_name,
257
+ server_run_id=extract_server_run_id(meta, events),
258
+ started_at=stream_started_at,
259
+ finished_at=finished_at,
260
+ meta=meta,
261
+ source=source,
262
+ )
263
+
264
+ store_result = store_transcript(payload)
265
+ register_last_transcript(ctx, payload, store_result)
266
+
267
+ return StoredTranscriptContext(payload=payload, store_result=store_result)
268
+
269
+
270
+ __all__ = [
271
+ "StoredTranscriptContext",
272
+ "coerce_events",
273
+ "coerce_result_text",
274
+ "compute_finished_at",
275
+ "extract_server_run_id",
276
+ "register_last_transcript",
277
+ "store_transcript_for_session",
278
+ ]