glaip-sdk 0.1.0__py3-none-any.whl → 0.6.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (156) hide show
  1. glaip_sdk/__init__.py +5 -2
  2. glaip_sdk/_version.py +10 -3
  3. glaip_sdk/agents/__init__.py +27 -0
  4. glaip_sdk/agents/base.py +1191 -0
  5. glaip_sdk/branding.py +15 -6
  6. glaip_sdk/cli/account_store.py +540 -0
  7. glaip_sdk/cli/agent_config.py +2 -6
  8. glaip_sdk/cli/auth.py +265 -45
  9. glaip_sdk/cli/commands/__init__.py +2 -2
  10. glaip_sdk/cli/commands/accounts.py +746 -0
  11. glaip_sdk/cli/commands/agents.py +251 -173
  12. glaip_sdk/cli/commands/common_config.py +101 -0
  13. glaip_sdk/cli/commands/configure.py +735 -143
  14. glaip_sdk/cli/commands/mcps.py +266 -134
  15. glaip_sdk/cli/commands/models.py +13 -9
  16. glaip_sdk/cli/commands/tools.py +67 -88
  17. glaip_sdk/cli/commands/transcripts.py +755 -0
  18. glaip_sdk/cli/commands/update.py +3 -8
  19. glaip_sdk/cli/config.py +49 -7
  20. glaip_sdk/cli/constants.py +38 -0
  21. glaip_sdk/cli/context.py +8 -0
  22. glaip_sdk/cli/core/__init__.py +79 -0
  23. glaip_sdk/cli/core/context.py +124 -0
  24. glaip_sdk/cli/core/output.py +846 -0
  25. glaip_sdk/cli/core/prompting.py +649 -0
  26. glaip_sdk/cli/core/rendering.py +187 -0
  27. glaip_sdk/cli/display.py +45 -32
  28. glaip_sdk/cli/hints.py +57 -0
  29. glaip_sdk/cli/io.py +14 -17
  30. glaip_sdk/cli/main.py +232 -143
  31. glaip_sdk/cli/masking.py +21 -33
  32. glaip_sdk/cli/mcp_validators.py +5 -15
  33. glaip_sdk/cli/pager.py +12 -19
  34. glaip_sdk/cli/parsers/__init__.py +1 -3
  35. glaip_sdk/cli/parsers/json_input.py +11 -22
  36. glaip_sdk/cli/resolution.py +3 -9
  37. glaip_sdk/cli/rich_helpers.py +1 -3
  38. glaip_sdk/cli/slash/__init__.py +0 -9
  39. glaip_sdk/cli/slash/accounts_controller.py +578 -0
  40. glaip_sdk/cli/slash/accounts_shared.py +75 -0
  41. glaip_sdk/cli/slash/agent_session.py +65 -29
  42. glaip_sdk/cli/slash/prompt.py +24 -10
  43. glaip_sdk/cli/slash/remote_runs_controller.py +566 -0
  44. glaip_sdk/cli/slash/session.py +807 -225
  45. glaip_sdk/cli/slash/tui/__init__.py +9 -0
  46. glaip_sdk/cli/slash/tui/accounts.tcss +86 -0
  47. glaip_sdk/cli/slash/tui/accounts_app.py +876 -0
  48. glaip_sdk/cli/slash/tui/background_tasks.py +72 -0
  49. glaip_sdk/cli/slash/tui/loading.py +58 -0
  50. glaip_sdk/cli/slash/tui/remote_runs_app.py +628 -0
  51. glaip_sdk/cli/transcript/__init__.py +12 -52
  52. glaip_sdk/cli/transcript/cache.py +258 -60
  53. glaip_sdk/cli/transcript/capture.py +72 -21
  54. glaip_sdk/cli/transcript/history.py +815 -0
  55. glaip_sdk/cli/transcript/launcher.py +1 -3
  56. glaip_sdk/cli/transcript/viewer.py +79 -499
  57. glaip_sdk/cli/update_notifier.py +177 -24
  58. glaip_sdk/cli/utils.py +242 -1308
  59. glaip_sdk/cli/validators.py +16 -18
  60. glaip_sdk/client/__init__.py +2 -1
  61. glaip_sdk/client/_agent_payloads.py +53 -37
  62. glaip_sdk/client/agent_runs.py +147 -0
  63. glaip_sdk/client/agents.py +320 -92
  64. glaip_sdk/client/base.py +78 -35
  65. glaip_sdk/client/main.py +19 -10
  66. glaip_sdk/client/mcps.py +123 -15
  67. glaip_sdk/client/run_rendering.py +136 -101
  68. glaip_sdk/client/shared.py +21 -0
  69. glaip_sdk/client/tools.py +163 -34
  70. glaip_sdk/client/validators.py +20 -48
  71. glaip_sdk/config/constants.py +11 -0
  72. glaip_sdk/exceptions.py +1 -3
  73. glaip_sdk/mcps/__init__.py +21 -0
  74. glaip_sdk/mcps/base.py +345 -0
  75. glaip_sdk/models/__init__.py +90 -0
  76. glaip_sdk/models/agent.py +47 -0
  77. glaip_sdk/models/agent_runs.py +116 -0
  78. glaip_sdk/models/common.py +42 -0
  79. glaip_sdk/models/mcp.py +33 -0
  80. glaip_sdk/models/tool.py +33 -0
  81. glaip_sdk/payload_schemas/__init__.py +1 -13
  82. glaip_sdk/payload_schemas/agent.py +1 -3
  83. glaip_sdk/registry/__init__.py +55 -0
  84. glaip_sdk/registry/agent.py +164 -0
  85. glaip_sdk/registry/base.py +139 -0
  86. glaip_sdk/registry/mcp.py +253 -0
  87. glaip_sdk/registry/tool.py +232 -0
  88. glaip_sdk/rich_components.py +58 -2
  89. glaip_sdk/runner/__init__.py +59 -0
  90. glaip_sdk/runner/base.py +84 -0
  91. glaip_sdk/runner/deps.py +115 -0
  92. glaip_sdk/runner/langgraph.py +706 -0
  93. glaip_sdk/runner/mcp_adapter/__init__.py +13 -0
  94. glaip_sdk/runner/mcp_adapter/base_mcp_adapter.py +43 -0
  95. glaip_sdk/runner/mcp_adapter/langchain_mcp_adapter.py +257 -0
  96. glaip_sdk/runner/mcp_adapter/mcp_config_builder.py +95 -0
  97. glaip_sdk/runner/tool_adapter/__init__.py +18 -0
  98. glaip_sdk/runner/tool_adapter/base_tool_adapter.py +44 -0
  99. glaip_sdk/runner/tool_adapter/langchain_tool_adapter.py +219 -0
  100. glaip_sdk/tools/__init__.py +22 -0
  101. glaip_sdk/tools/base.py +435 -0
  102. glaip_sdk/utils/__init__.py +58 -12
  103. glaip_sdk/utils/a2a/__init__.py +34 -0
  104. glaip_sdk/utils/a2a/event_processor.py +188 -0
  105. glaip_sdk/utils/agent_config.py +4 -14
  106. glaip_sdk/utils/bundler.py +267 -0
  107. glaip_sdk/utils/client.py +111 -0
  108. glaip_sdk/utils/client_utils.py +46 -28
  109. glaip_sdk/utils/datetime_helpers.py +58 -0
  110. glaip_sdk/utils/discovery.py +78 -0
  111. glaip_sdk/utils/display.py +25 -21
  112. glaip_sdk/utils/export.py +143 -0
  113. glaip_sdk/utils/general.py +1 -36
  114. glaip_sdk/utils/import_export.py +15 -16
  115. glaip_sdk/utils/import_resolver.py +492 -0
  116. glaip_sdk/utils/instructions.py +101 -0
  117. glaip_sdk/utils/rendering/__init__.py +115 -1
  118. glaip_sdk/utils/rendering/formatting.py +7 -35
  119. glaip_sdk/utils/rendering/layout/__init__.py +64 -0
  120. glaip_sdk/utils/rendering/{renderer → layout}/panels.py +10 -3
  121. glaip_sdk/utils/rendering/{renderer → layout}/progress.py +73 -12
  122. glaip_sdk/utils/rendering/layout/summary.py +74 -0
  123. glaip_sdk/utils/rendering/layout/transcript.py +606 -0
  124. glaip_sdk/utils/rendering/models.py +3 -6
  125. glaip_sdk/utils/rendering/renderer/__init__.py +9 -49
  126. glaip_sdk/utils/rendering/renderer/base.py +258 -1577
  127. glaip_sdk/utils/rendering/renderer/config.py +1 -5
  128. glaip_sdk/utils/rendering/renderer/debug.py +30 -34
  129. glaip_sdk/utils/rendering/renderer/factory.py +138 -0
  130. glaip_sdk/utils/rendering/renderer/stream.py +10 -51
  131. glaip_sdk/utils/rendering/renderer/summary_window.py +79 -0
  132. glaip_sdk/utils/rendering/renderer/thinking.py +273 -0
  133. glaip_sdk/utils/rendering/renderer/toggle.py +1 -3
  134. glaip_sdk/utils/rendering/renderer/tool_panels.py +442 -0
  135. glaip_sdk/utils/rendering/renderer/transcript_mode.py +162 -0
  136. glaip_sdk/utils/rendering/state.py +204 -0
  137. glaip_sdk/utils/rendering/step_tree_state.py +1 -3
  138. glaip_sdk/utils/rendering/steps/__init__.py +34 -0
  139. glaip_sdk/utils/rendering/{steps.py → steps/event_processor.py} +76 -517
  140. glaip_sdk/utils/rendering/steps/format.py +176 -0
  141. glaip_sdk/utils/rendering/steps/manager.py +387 -0
  142. glaip_sdk/utils/rendering/timing.py +36 -0
  143. glaip_sdk/utils/rendering/viewer/__init__.py +21 -0
  144. glaip_sdk/utils/rendering/viewer/presenter.py +184 -0
  145. glaip_sdk/utils/resource_refs.py +29 -26
  146. glaip_sdk/utils/runtime_config.py +425 -0
  147. glaip_sdk/utils/serialization.py +32 -46
  148. glaip_sdk/utils/sync.py +142 -0
  149. glaip_sdk/utils/tool_detection.py +33 -0
  150. glaip_sdk/utils/validation.py +20 -28
  151. {glaip_sdk-0.1.0.dist-info → glaip_sdk-0.6.10.dist-info}/METADATA +42 -4
  152. glaip_sdk-0.6.10.dist-info/RECORD +159 -0
  153. {glaip_sdk-0.1.0.dist-info → glaip_sdk-0.6.10.dist-info}/WHEEL +1 -1
  154. glaip_sdk/models.py +0 -259
  155. glaip_sdk-0.1.0.dist-info/RECORD +0 -82
  156. {glaip_sdk-0.1.0.dist-info → glaip_sdk-0.6.10.dist-info}/entry_points.txt +0 -0
@@ -4,68 +4,28 @@ Authors:
4
4
  Raymond Christopher (raymond.christopher@gdplabs.id)
5
5
  """
6
6
 
7
- from glaip_sdk.cli.transcript.cache import (
8
- TranscriptCacheStats,
9
- TranscriptPayload,
10
- TranscriptStoreResult,
11
- ensure_cache_dir,
12
- get_transcript_cache_stats,
13
- latest_manifest_entry,
14
- manifest_path,
15
- resolve_manifest_entry,
16
- store_transcript,
17
- suggest_filename,
18
- )
19
7
  from glaip_sdk.cli.transcript.cache import (
20
8
  export_transcript as export_cached_transcript,
21
9
  )
22
- from glaip_sdk.cli.transcript.capture import (
23
- StoredTranscriptContext,
24
- coerce_events,
25
- coerce_result_text,
26
- compute_finished_at,
27
- extract_server_run_id,
28
- register_last_transcript,
29
- store_transcript_for_session,
10
+ from glaip_sdk.cli.transcript.cache import (
11
+ get_transcript_cache_stats,
12
+ suggest_filename,
30
13
  )
14
+ from glaip_sdk.cli.transcript.capture import store_transcript_for_session
31
15
  from glaip_sdk.cli.transcript.export import (
32
16
  normalise_export_destination,
33
17
  resolve_manifest_for_export,
34
18
  )
35
- from glaip_sdk.cli.transcript.launcher import (
36
- maybe_launch_post_run_viewer,
37
- should_launch_post_run_viewer,
38
- )
39
- from glaip_sdk.cli.transcript.viewer import (
40
- PostRunViewer,
41
- ViewerContext,
42
- run_viewer_session,
43
- )
19
+ from glaip_sdk.cli.transcript.history import load_history_snapshot
20
+ from glaip_sdk.cli.transcript.launcher import maybe_launch_post_run_viewer
44
21
 
45
22
  __all__ = [
46
- "TranscriptCacheStats",
47
- "TranscriptPayload",
48
- "TranscriptStoreResult",
49
- "ensure_cache_dir",
50
- "get_transcript_cache_stats",
51
- "manifest_path",
52
- "store_transcript",
53
- "suggest_filename",
54
- "latest_manifest_entry",
55
- "resolve_manifest_entry",
56
23
  "export_cached_transcript",
57
- "StoredTranscriptContext",
58
- "coerce_events",
59
- "coerce_result_text",
60
- "compute_finished_at",
61
- "extract_server_run_id",
62
- "register_last_transcript",
63
- "store_transcript_for_session",
64
- "resolve_manifest_for_export",
65
- "normalise_export_destination",
24
+ "get_transcript_cache_stats",
25
+ "load_history_snapshot",
66
26
  "maybe_launch_post_run_viewer",
67
- "should_launch_post_run_viewer",
68
- "ViewerContext",
69
- "PostRunViewer",
70
- "run_viewer_session",
27
+ "normalise_export_destination",
28
+ "resolve_manifest_for_export",
29
+ "store_transcript_for_session",
30
+ "suggest_filename",
71
31
  ]
@@ -8,13 +8,17 @@ from __future__ import annotations
8
8
 
9
9
  import json
10
10
  import os
11
- import uuid
12
- from collections.abc import Iterable
11
+ import secrets
12
+ from collections.abc import Iterable, Iterator
13
13
  from dataclasses import dataclass
14
- from datetime import datetime, timezone
14
+ from datetime import datetime, timedelta, timezone
15
15
  from pathlib import Path
16
16
  from typing import Any
17
17
 
18
+ from glaip_sdk.utils.datetime_helpers import (
19
+ coerce_datetime as _coerce_datetime,
20
+ )
21
+
18
22
  DEFAULT_CACHE_ROOT = Path(
19
23
  os.getenv(
20
24
  "AIP_TRANSCRIPT_CACHE_DIR",
@@ -22,6 +26,11 @@ DEFAULT_CACHE_ROOT = Path(
22
26
  )
23
27
  )
24
28
  MANIFEST_FILENAME = "manifest.jsonl"
29
+ JSONL_SUFFIX = ".jsonl"
30
+ UTC_OFFSET_SUFFIX = "+00:00"
31
+
32
+ _RUN_ID_PREFIX = "run_"
33
+ _RUN_ID_ALPHABET = "23456789abcdefghjkmnpqrstuvwxyz"
25
34
 
26
35
 
27
36
  @dataclass(slots=True)
@@ -61,6 +70,124 @@ class TranscriptCacheStats:
61
70
  total_bytes: int
62
71
 
63
72
 
73
+ def generate_run_id(length: int = 6) -> str:
74
+ """Return a short, human-friendly run identifier."""
75
+ length = max(4, min(int(length or 0), 16)) or 6
76
+ return _RUN_ID_PREFIX + "".join(secrets.choice(_RUN_ID_ALPHABET) for _ in range(length))
77
+
78
+
79
+ def _timestamp_to_iso(value: Any) -> str | None:
80
+ """Convert supported timestamp-like values to an ISO8601 string with UTC designator."""
81
+ dt = _coerce_datetime(value)
82
+ if dt is None:
83
+ return None
84
+ if dt.year < 2000:
85
+ return None
86
+ return dt.isoformat().replace(UTC_OFFSET_SUFFIX, "Z")
87
+
88
+
89
+ def _compute_duration_seconds(start: Any, end: Any) -> int | None:
90
+ """Compute whole-second duration between two timestamp-like values."""
91
+ start_dt = _coerce_datetime(start)
92
+ end_dt = _coerce_datetime(end)
93
+ if start_dt is None or end_dt is None:
94
+ return None
95
+ delta = (end_dt - start_dt).total_seconds()
96
+ if delta < 0:
97
+ return None
98
+ return int(round(delta))
99
+
100
+
101
+ def _iter_candidate_paths(entry: dict[str, Any], directory: Path) -> Iterator[Path]:
102
+ """Yield plausible transcript paths for a manifest entry, deduplicated."""
103
+ seen: set[str] = set()
104
+
105
+ def _offer(path: Path) -> Iterator[Path]:
106
+ key = str(path)
107
+ if key not in seen:
108
+ seen.add(key)
109
+ yield path
110
+
111
+ for candidate in _filename_candidate_paths(entry, directory):
112
+ yield from _offer(candidate)
113
+ for candidate in _cache_path_candidate_paths(entry):
114
+ yield from _offer(candidate)
115
+ for candidate in _run_id_candidate_paths(entry, directory):
116
+ yield from _offer(candidate)
117
+
118
+
119
+ def _filename_candidate_paths(entry: dict[str, Any], directory: Path) -> tuple[Path, ...]:
120
+ """Return possible transcript paths derived from the manifest filename."""
121
+ filename = entry.get("filename")
122
+ if not filename:
123
+ return ()
124
+ candidate = Path(str(filename))
125
+ if not candidate.is_absolute():
126
+ candidate = directory / candidate
127
+ return (candidate,)
128
+
129
+
130
+ def _cache_path_candidate_paths(entry: dict[str, Any]) -> tuple[Path, ...]:
131
+ """Return legacy cache_path-derived transcript candidates."""
132
+ cache_path = entry.get("cache_path")
133
+ if not cache_path:
134
+ return ()
135
+ return (Path(str(cache_path)).expanduser(),)
136
+
137
+
138
+ def _run_id_candidate_paths(entry: dict[str, Any], directory: Path) -> tuple[Path, ...]:
139
+ """Return candidate transcript paths derived from the run id."""
140
+ run_id = entry.get("run_id")
141
+ if not run_id:
142
+ return ()
143
+ paths: list[Path] = []
144
+ for variant in _run_id_variants(str(run_id)):
145
+ name = variant if variant.endswith(JSONL_SUFFIX) else f"{variant}{JSONL_SUFFIX}"
146
+ paths.append(directory / name)
147
+ return tuple(paths)
148
+
149
+
150
+ def _run_id_variants(run_id: str) -> set[str]:
151
+ """Return plausible filename stems derived from a run id."""
152
+ variants = {run_id}
153
+ if run_id.startswith(_RUN_ID_PREFIX):
154
+ suffix = run_id[len(_RUN_ID_PREFIX) :]
155
+ if suffix:
156
+ variants.update({suffix, f"run-{suffix}"})
157
+ variants.add(f"run-{run_id}")
158
+ else:
159
+ variants.update({f"run-{run_id}", _RUN_ID_PREFIX + run_id})
160
+ return variants
161
+
162
+
163
+ def transcript_path_candidates(entry: dict[str, Any], cache_dir: Path | None = None) -> list[Path]:
164
+ """Return possible transcript file locations for a manifest entry."""
165
+ directory = ensure_cache_dir(cache_dir)
166
+ return list(_iter_candidate_paths(entry, directory))
167
+
168
+
169
+ def resolve_transcript_path(entry: dict[str, Any], cache_dir: Path | None = None) -> Path:
170
+ """Resolve the cached transcript path for a manifest entry or raise informative errors."""
171
+ candidates = transcript_path_candidates(entry, cache_dir)
172
+ if not candidates:
173
+ raise FileNotFoundError("Cached transcript path missing from manifest.")
174
+
175
+ for candidate in candidates:
176
+ if candidate.exists():
177
+ return candidate
178
+
179
+ raise FileNotFoundError(f"Cached transcript file not found: {candidates[0]}")
180
+
181
+
182
+ def _manifest_sort_key(entry: dict[str, Any]) -> datetime:
183
+ """Return a datetime for ordering manifest rows, defaulting to the distant past."""
184
+ for key in ("started_at", "created_at"):
185
+ dt = _coerce_datetime(entry.get(key))
186
+ if dt is not None:
187
+ return dt
188
+ return datetime.min.replace(tzinfo=timezone.utc)
189
+
190
+
64
191
  def ensure_cache_dir(cache_dir: Path | None = None) -> Path:
65
192
  """Ensure the cache directory exists and return it."""
66
193
  directory = cache_dir or DEFAULT_CACHE_ROOT
@@ -88,15 +215,17 @@ def manifest_path(cache_dir: Path | None = None) -> Path:
88
215
 
89
216
 
90
217
  def _parse_iso(ts: str | None) -> datetime | None:
218
+ """Parse metadata timestamps that may use the legacy Z suffix."""
91
219
  if not ts:
92
220
  return None
93
221
  try:
94
- return datetime.fromisoformat(ts.replace("Z", "+00:00"))
222
+ return datetime.fromisoformat(ts.replace("Z", UTC_OFFSET_SUFFIX))
95
223
  except Exception:
96
224
  return None
97
225
 
98
226
 
99
227
  def _load_manifest_entries(cache_dir: Path | None = None) -> list[dict[str, Any]]:
228
+ """Read manifest entries from disk, returning an empty list when missing."""
100
229
  path = manifest_path(cache_dir)
101
230
  entries: list[dict[str, Any]] = []
102
231
  if not path.exists():
@@ -124,14 +253,25 @@ def _json_default(value: Any) -> Any:
124
253
  return repr(value)
125
254
 
126
255
 
127
- def _write_manifest(
128
- entries: Iterable[dict[str, Any]], cache_dir: Path | None = None
129
- ) -> None:
256
+ def _write_manifest(entries: Iterable[dict[str, Any]], cache_dir: Path | None = None) -> None:
257
+ """Atomically write manifest entries back to disk."""
130
258
  path = manifest_path(cache_dir)
131
- with path.open("w", encoding="utf-8") as fh:
259
+ tmp_path = path.with_name(f"{path.name}.tmp")
260
+ with tmp_path.open("w", encoding="utf-8") as fh:
132
261
  for entry in entries:
133
262
  fh.write(json.dumps(entry, ensure_ascii=False, default=_json_default))
134
263
  fh.write("\n")
264
+ tmp_path.replace(path)
265
+
266
+
267
+ def load_manifest_entries(cache_dir: Path | None = None) -> list[dict[str, Any]]:
268
+ """Public wrapper around manifest loading for downstream tooling."""
269
+ return _load_manifest_entries(cache_dir)
270
+
271
+
272
+ def write_manifest(entries: Iterable[dict[str, Any]], cache_dir: Path | None = None) -> None:
273
+ """Persist manifest entries atomically."""
274
+ _write_manifest(entries, cache_dir)
135
275
 
136
276
 
137
277
  def store_transcript(
@@ -141,10 +281,38 @@ def store_transcript(
141
281
  ) -> TranscriptStoreResult:
142
282
  """Persist a transcript to disk and update the manifest."""
143
283
  directory = ensure_cache_dir(cache_dir)
144
- filename = f"run-{payload.run_id}.jsonl"
284
+ filename = _normalise_run_filename(payload.run_id)
145
285
  transcript_path = directory / filename
146
286
 
147
- meta_line = {
287
+ meta_line = _build_meta_line(payload)
288
+ transcript_path = _write_transcript_file(transcript_path, filename, meta_line, payload.events)
289
+ size_bytes = _safe_file_size(transcript_path)
290
+ manifest_entry = _build_manifest_entry(payload, transcript_path.name, size_bytes)
291
+ if transcript_path.parent != directory:
292
+ manifest_entry["cache_path"] = str(transcript_path)
293
+
294
+ existing_entries = _load_manifest_entries(directory)
295
+ existing_entries.append(manifest_entry)
296
+ _write_manifest(existing_entries, directory)
297
+
298
+ return TranscriptStoreResult(
299
+ path=transcript_path,
300
+ manifest_entry=manifest_entry,
301
+ pruned_entries=[],
302
+ )
303
+
304
+
305
+ def _normalise_run_filename(run_id: str) -> str:
306
+ """Ensure cached run filenames always end with .jsonl."""
307
+ run_basename = run_id.rstrip()
308
+ if run_basename.endswith(JSONL_SUFFIX):
309
+ run_basename = run_basename[: -len(JSONL_SUFFIX)]
310
+ return f"{run_basename}{JSONL_SUFFIX}"
311
+
312
+
313
+ def _build_meta_line(payload: TranscriptPayload) -> dict[str, Any]:
314
+ """Return the metadata header stored at the top of transcript files."""
315
+ return {
148
316
  "type": "meta",
149
317
  "run_id": payload.run_id,
150
318
  "agent_id": payload.agent_id,
@@ -160,11 +328,20 @@ def store_transcript(
160
328
  "source": payload.source,
161
329
  }
162
330
 
163
- def _write_transcript(path: Path) -> None:
164
- with path.open("w", encoding="utf-8") as fh:
331
+
332
+ def _write_transcript_file(
333
+ path: Path,
334
+ filename: str,
335
+ meta_line: dict[str, Any],
336
+ events: list[dict[str, Any]],
337
+ ) -> Path:
338
+ """Persist the transcript JSONL file, falling back to cwd when necessary."""
339
+
340
+ def _write(target: Path) -> None:
341
+ with target.open("w", encoding="utf-8") as fh:
165
342
  fh.write(json.dumps(meta_line, ensure_ascii=False, default=_json_default))
166
343
  fh.write("\n")
167
- for event in payload.events:
344
+ for event in events:
168
345
  fh.write(
169
346
  json.dumps(
170
347
  {"type": "event", "event": event},
@@ -175,34 +352,62 @@ def store_transcript(
175
352
  fh.write("\n")
176
353
 
177
354
  try:
178
- _write_transcript(transcript_path)
355
+ _write(path)
356
+ return path
179
357
  except PermissionError:
180
- directory = _fallback_cache_dir()
181
- transcript_path = directory / filename
182
- _write_transcript(transcript_path)
358
+ fallback_dir = _fallback_cache_dir()
359
+ fallback_path = fallback_dir / filename
360
+ _write(fallback_path)
361
+ return fallback_path
362
+
363
+
364
+ def _safe_file_size(path: Path) -> int:
365
+ """Return the file size, tolerating missing paths."""
366
+ try:
367
+ return path.stat().st_size
368
+ except FileNotFoundError:
369
+ return 0
370
+
183
371
 
184
- size_bytes = transcript_path.stat().st_size
185
- manifest_entry = {
372
+ def _build_manifest_entry(payload: TranscriptPayload, filename: str, size_bytes: int) -> dict[str, Any]:
373
+ """Generate the manifest row corresponding to a stored transcript."""
374
+ entry: dict[str, Any] = {
186
375
  "run_id": payload.run_id,
187
376
  "agent_id": payload.agent_id,
188
377
  "agent_name": payload.agent_name,
189
- "created_at": payload.created_at.isoformat(),
190
- "cache_path": str(transcript_path),
378
+ "started_at": _timestamp_to_iso(payload.started_at) or payload.created_at.isoformat(),
379
+ "finished_at": _timestamp_to_iso(payload.finished_at),
380
+ "duration_seconds": _compute_duration_seconds(payload.started_at, payload.finished_at),
191
381
  "size_bytes": size_bytes,
382
+ "filename": filename,
192
383
  "retained": True,
193
- "source": payload.source,
194
- "server_run_id": payload.server_run_id,
384
+ "model": payload.model,
195
385
  }
196
386
 
197
- existing_entries = _load_manifest_entries(directory)
198
- existing_entries.append(manifest_entry)
199
- _write_manifest(existing_entries, directory)
387
+ api_url = payload.meta.get("api_url")
388
+ if api_url:
389
+ entry["api_url"] = api_url
200
390
 
201
- return TranscriptStoreResult(
202
- path=transcript_path,
203
- manifest_entry=manifest_entry,
204
- pruned_entries=[],
205
- )
391
+ if entry["duration_seconds"] is None:
392
+ entry["duration_seconds"] = _coerce_duration_hint(payload.meta.get("final_duration_seconds"))
393
+
394
+ if entry.get("finished_at") is None and entry.get("started_at") and entry.get("duration_seconds") is not None:
395
+ start_dt = _coerce_datetime(entry["started_at"])
396
+ if start_dt is not None:
397
+ finished_dt = start_dt + timedelta(seconds=int(entry["duration_seconds"]))
398
+ entry["finished_at"] = finished_dt.isoformat().replace(UTC_OFFSET_SUFFIX, "Z")
399
+
400
+ return entry
401
+
402
+
403
+ def _coerce_duration_hint(value: Any) -> int | None:
404
+ """Convert loose duration hints to whole seconds."""
405
+ try:
406
+ if value is None:
407
+ return None
408
+ return int(round(float(value)))
409
+ except Exception:
410
+ return None
206
411
 
207
412
 
208
413
  def latest_manifest_entry(cache_dir: Path | None = None) -> dict[str, Any] | None:
@@ -210,11 +415,7 @@ def latest_manifest_entry(cache_dir: Path | None = None) -> dict[str, Any] | Non
210
415
  entries = _load_manifest_entries(cache_dir)
211
416
  if not entries:
212
417
  return None
213
- return max(
214
- entries,
215
- key=lambda e: _parse_iso(e.get("created_at"))
216
- or datetime.min.replace(tzinfo=timezone.utc),
217
- )
418
+ return max(entries, key=_manifest_sort_key)
218
419
 
219
420
 
220
421
  def resolve_manifest_entry(
@@ -237,21 +438,14 @@ def export_transcript(
237
438
  ) -> Path:
238
439
  """Copy a cached transcript to the requested destination path."""
239
440
  directory = ensure_cache_dir(cache_dir)
240
- entry = (
241
- resolve_manifest_entry(run_id, directory)
242
- if run_id
243
- else latest_manifest_entry(directory)
244
- )
441
+ entry = resolve_manifest_entry(run_id, directory) if run_id else latest_manifest_entry(directory)
245
442
  if entry is None:
246
443
  raise FileNotFoundError("No cached transcripts available for export.")
247
444
 
248
- cache_path = entry.get("cache_path")
249
- if not cache_path:
250
- raise FileNotFoundError("Cached transcript path missing from manifest.")
251
-
252
- cache_file = Path(cache_path)
253
- if not cache_file.exists():
254
- raise FileNotFoundError(f"Cached transcript file not found: {cache_file}")
445
+ try:
446
+ cache_file = resolve_transcript_path(entry, directory)
447
+ except FileNotFoundError as exc:
448
+ raise FileNotFoundError(str(exc)) from exc
255
449
 
256
450
  destination.parent.mkdir(parents=True, exist_ok=True)
257
451
 
@@ -259,9 +453,7 @@ def export_transcript(
259
453
  lines = cache_file.read_text(encoding="utf-8").splitlines()
260
454
  records = [json.loads(line) for line in lines if line.strip()]
261
455
  except json.JSONDecodeError as exc:
262
- raise FileNotFoundError(
263
- f"Cached transcript file is corrupted: {cache_file}"
264
- ) from exc
456
+ raise FileNotFoundError(f"Cached transcript file is corrupted: {cache_file}") from exc
265
457
 
266
458
  with destination.open("w", encoding="utf-8") as fh:
267
459
  for idx, record in enumerate(records):
@@ -275,14 +467,20 @@ def export_transcript(
275
467
 
276
468
  def suggest_filename(entry: dict[str, Any] | None = None) -> str:
277
469
  """Return a friendly filename suggestion for exporting a transcript."""
278
- run_id = entry.get("run_id") if entry else uuid.uuid4().hex
279
- created_at = (
280
- entry.get("created_at") if entry else datetime.now(timezone.utc).isoformat()
281
- )
282
- timestamp = (
283
- created_at.replace(":", "").replace("-", "").replace("T", "_").split("+")[0]
284
- )
285
- return f"aip-run-{timestamp}-{run_id}.jsonl"
470
+ run_id = entry.get("run_id") if entry else None
471
+ if not run_id:
472
+ run_id = generate_run_id()
473
+
474
+ timestamp_source = None
475
+ if entry:
476
+ timestamp_source = entry.get("started_at") or entry.get("created_at")
477
+
478
+ if not timestamp_source:
479
+ timestamp_source = datetime.now(timezone.utc).isoformat()
480
+
481
+ timestamp = str(timestamp_source).replace(":", "").replace("-", "").replace("T", "_").split("+")[0]
482
+ safe_run_id = str(run_id).replace("/", "-").replace(" ", "-")
483
+ return f"aip-run-{timestamp}-{safe_run_id}{JSONL_SUFFIX}"
286
484
 
287
485
 
288
486
  def build_payload(
@@ -313,7 +511,7 @@ def build_payload(
313
511
  created_at=datetime.now(timezone.utc),
314
512
  source=source,
315
513
  meta=meta,
316
- run_id=uuid.uuid4().hex,
514
+ run_id=generate_run_id(),
317
515
  )
318
516
 
319
517
 
@@ -8,8 +8,13 @@ from __future__ import annotations
8
8
 
9
9
  import json
10
10
  from dataclasses import dataclass
11
+ from io import StringIO
11
12
  from typing import Any
12
13
 
14
+ from rich.console import Console
15
+
16
+ from glaip_sdk.cli.auth import resolve_api_url_from_context
17
+ from glaip_sdk.cli.context import get_ctx_value
13
18
  from glaip_sdk.cli.transcript.cache import (
14
19
  TranscriptPayload,
15
20
  TranscriptStoreResult,
@@ -18,7 +23,7 @@ from glaip_sdk.cli.transcript.cache import (
18
23
  from glaip_sdk.cli.transcript.cache import (
19
24
  build_payload as build_transcript_payload,
20
25
  )
21
- from glaip_sdk.utils.rendering.renderer.progress import format_tool_title
26
+ from glaip_sdk.utils.rendering.layout.progress import format_tool_title
22
27
 
23
28
 
24
29
  @dataclass(slots=True)
@@ -65,11 +70,7 @@ def compute_finished_at(renderer: Any) -> float | None:
65
70
 
66
71
  if started_at is None:
67
72
  stream_processor = getattr(renderer, "stream_processor", None)
68
- started_at = (
69
- getattr(stream_processor, "streaming_started_at", None)
70
- if stream_processor is not None
71
- else None
72
- )
73
+ started_at = getattr(stream_processor, "streaming_started_at", None) if stream_processor is not None else None
73
74
  if started_at is None or duration is None:
74
75
  return None
75
76
  try:
@@ -78,9 +79,7 @@ def compute_finished_at(renderer: Any) -> float | None:
78
79
  return None
79
80
 
80
81
 
81
- def extract_server_run_id(
82
- meta: dict[str, Any], events: list[dict[str, Any]]
83
- ) -> str | None:
82
+ def extract_server_run_id(meta: dict[str, Any], events: list[dict[str, Any]]) -> str | None:
84
83
  """Derive a server-side run identifier from renderer metadata."""
85
84
  run_id = meta.get("run_id") or meta.get("id")
86
85
  if run_id:
@@ -107,9 +106,7 @@ def _coerce_meta(meta: Any) -> dict[str, Any]:
107
106
  return {"value": coerce_result_text(meta)}
108
107
 
109
108
 
110
- def register_last_transcript(
111
- ctx: Any, payload: TranscriptPayload, store_result: TranscriptStoreResult
112
- ) -> None:
109
+ def register_last_transcript(ctx: Any, payload: TranscriptPayload, store_result: TranscriptStoreResult) -> None:
113
110
  """Persist last-run transcript references onto the Click context."""
114
111
  ctx_obj = getattr(ctx, "obj", None)
115
112
  if not isinstance(ctx_obj, dict):
@@ -119,6 +116,15 @@ def register_last_transcript(
119
116
  ctx_obj["_last_transcript_path"] = str(store_result.path)
120
117
 
121
118
 
119
+ def _resolve_api_url(ctx: Any) -> str | None:
120
+ """Resolve API URL from context or account store (CLI/palette ignores env creds)."""
121
+ return resolve_api_url_from_context(
122
+ ctx,
123
+ get_api_url=lambda c: get_ctx_value(c, "api_url"),
124
+ get_account_name=lambda c: get_ctx_value(c, "account_name"),
125
+ )
126
+
127
+
122
128
  def _extract_step_summaries(renderer: Any) -> list[dict[str, Any]]:
123
129
  """Return lightweight step summaries for the transcript viewer."""
124
130
  steps = getattr(renderer, "steps", None)
@@ -172,6 +178,38 @@ def _format_step_display_name(name: str) -> str:
172
178
  return name
173
179
 
174
180
 
181
+ def _extract_step_summary_lines(renderer: Any) -> list[str]:
182
+ """Render the live steps summary to plain text lines."""
183
+ if not hasattr(renderer, "_render_steps_text"):
184
+ return []
185
+
186
+ try:
187
+ renderable = renderer._render_steps_text()
188
+ except Exception:
189
+ return []
190
+
191
+ buffer = StringIO()
192
+ console = Console(file=buffer, record=True, force_terminal=False, width=120)
193
+ try:
194
+ console.print(renderable)
195
+ except Exception:
196
+ return []
197
+
198
+ text = console.export_text() or buffer.getvalue()
199
+ lines = [line.rstrip() for line in text.splitlines()]
200
+ half = len(lines) // 2
201
+ if half and lines[:half] == lines[half : half * 2]:
202
+ return lines[:half]
203
+ start = 0
204
+ prefixes = ("🤖", "🔧", "💭", "├", "└", "│", "•")
205
+ for idx, line in enumerate(lines):
206
+ if line.lstrip().startswith(prefixes):
207
+ start = idx
208
+ break
209
+ trimmed = lines[start:]
210
+ return [line for line in trimmed if line]
211
+
212
+
175
213
  def _collect_renderer_outputs(
176
214
  renderer: Any, final_result: Any
177
215
  ) -> tuple[
@@ -211,13 +249,23 @@ def _derive_transcript_meta(
211
249
  if step_summaries:
212
250
  meta["transcript_steps"] = step_summaries
213
251
 
252
+ step_lines = _extract_step_summary_lines(renderer)
253
+ if step_lines:
254
+ meta["transcript_step_lines"] = step_lines
255
+
214
256
  stream_processor = getattr(renderer, "stream_processor", None)
215
257
  stream_started_at = (
216
- getattr(stream_processor, "streaming_started_at", None)
217
- if stream_processor is not None
218
- else None
258
+ getattr(stream_processor, "streaming_started_at", None) if stream_processor is not None else None
219
259
  )
220
260
  finished_at = compute_finished_at(renderer)
261
+ state = getattr(renderer, "state", None)
262
+ if state is not None:
263
+ duration_hint = getattr(state, "final_duration_seconds", None)
264
+ if duration_hint is not None:
265
+ try:
266
+ meta["final_duration_seconds"] = float(duration_hint)
267
+ except Exception:
268
+ pass
221
269
  model_name = meta.get("model") or model
222
270
  return meta, stream_started_at, finished_at, model_name
223
271
 
@@ -236,16 +284,19 @@ def store_transcript_for_session(
236
284
  if not hasattr(renderer, "get_transcript_events"):
237
285
  return None
238
286
 
239
- events, aggregated_output, final_output = _collect_renderer_outputs(
240
- renderer, final_result
241
- )
287
+ events, aggregated_output, final_output = _collect_renderer_outputs(renderer, final_result)
242
288
 
243
289
  if not (events or aggregated_output or final_output):
244
290
  return None
245
291
 
246
- meta, stream_started_at, finished_at, model_name = _derive_transcript_meta(
247
- renderer, model
248
- )
292
+ meta, stream_started_at, finished_at, model_name = _derive_transcript_meta(renderer, model)
293
+
294
+ try:
295
+ api_url = _resolve_api_url(ctx)
296
+ except Exception:
297
+ api_url = None
298
+ if api_url:
299
+ meta["api_url"] = api_url
249
300
 
250
301
  payload: TranscriptPayload = build_transcript_payload(
251
302
  events=events,