libretto 0.5.5 → 0.5.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. package/README.md +23 -10
  2. package/README.template.md +23 -10
  3. package/dist/cli/cli.js +10 -0
  4. package/dist/cli/commands/ai.js +77 -2
  5. package/dist/cli/commands/browser.js +71 -6
  6. package/dist/cli/commands/execution.js +101 -44
  7. package/dist/cli/commands/setup.js +376 -0
  8. package/dist/cli/commands/snapshot.js +2 -2
  9. package/dist/cli/commands/status.js +62 -0
  10. package/dist/cli/core/{snapshot-api-config.js → ai-model.js} +81 -7
  11. package/dist/cli/core/api-snapshot-analyzer.js +7 -5
  12. package/dist/cli/core/browser.js +39 -26
  13. package/dist/cli/core/{ai-config.js → config.js} +13 -79
  14. package/dist/cli/core/context.js +1 -25
  15. package/dist/cli/core/deploy-artifact.js +121 -61
  16. package/dist/cli/core/readonly-exec.js +231 -0
  17. package/dist/{shared/llm/client.js → cli/core/resolve-model.js} +4 -68
  18. package/dist/cli/core/session.js +44 -0
  19. package/dist/cli/core/skill-version.js +73 -0
  20. package/dist/cli/core/telemetry.js +1 -54
  21. package/dist/cli/index.js +1 -7
  22. package/dist/cli/router.js +4 -4
  23. package/dist/cli/workers/run-integration-runtime.js +17 -13
  24. package/dist/cli/workers/run-integration-worker-protocol.js +3 -2
  25. package/dist/index.d.ts +2 -4
  26. package/dist/index.js +2 -2
  27. package/dist/runtime/extract/extract.d.ts +2 -2
  28. package/dist/runtime/extract/extract.js +4 -2
  29. package/dist/runtime/extract/index.d.ts +1 -1
  30. package/dist/runtime/recovery/agent.d.ts +2 -3
  31. package/dist/runtime/recovery/agent.js +5 -3
  32. package/dist/runtime/recovery/errors.d.ts +2 -3
  33. package/dist/runtime/recovery/errors.js +4 -2
  34. package/dist/runtime/recovery/index.d.ts +1 -2
  35. package/dist/runtime/recovery/recovery.d.ts +2 -3
  36. package/dist/runtime/recovery/recovery.js +3 -3
  37. package/dist/shared/debug/pause.js +4 -21
  38. package/dist/shared/run/api.d.ts +2 -0
  39. package/dist/shared/run/browser.d.ts +4 -1
  40. package/dist/shared/run/browser.js +5 -3
  41. package/dist/shared/state/index.d.ts +1 -1
  42. package/dist/shared/state/index.js +2 -0
  43. package/dist/shared/state/session-state.d.ts +10 -1
  44. package/dist/shared/state/session-state.js +3 -0
  45. package/dist/shared/workflow/workflow.d.ts +2 -1
  46. package/dist/shared/workflow/workflow.js +16 -9
  47. package/package.json +17 -16
  48. package/scripts/postinstall.mjs +13 -11
  49. package/scripts/skills-libretto.mjs +14 -4
  50. package/skills/AGENTS.md +11 -0
  51. package/skills/libretto/SKILL.md +30 -9
  52. package/skills/libretto/references/auth-profiles.md +1 -1
  53. package/skills/libretto/references/code-generation-rules.md +3 -3
  54. package/skills/libretto/references/configuration-file-reference.md +11 -6
  55. package/skills/libretto-readonly/SKILL.md +95 -0
  56. package/src/cli/cli.ts +10 -0
  57. package/src/cli/commands/ai.ts +111 -1
  58. package/src/cli/commands/browser.ts +81 -7
  59. package/src/cli/commands/execution.ts +128 -61
  60. package/src/cli/commands/setup.ts +499 -0
  61. package/src/cli/commands/snapshot.ts +2 -2
  62. package/src/cli/commands/status.ts +77 -0
  63. package/src/cli/core/{snapshot-api-config.ts → ai-model.ts} +154 -14
  64. package/src/cli/core/api-snapshot-analyzer.ts +7 -5
  65. package/src/cli/core/browser.ts +45 -26
  66. package/src/cli/core/{ai-config.ts → config.ts} +13 -108
  67. package/src/cli/core/context.ts +1 -45
  68. package/src/cli/core/deploy-artifact.ts +141 -71
  69. package/src/cli/core/readonly-exec.ts +284 -0
  70. package/src/{shared/llm/client.ts → cli/core/resolve-model.ts} +3 -85
  71. package/src/cli/core/session.ts +62 -2
  72. package/src/cli/core/skill-version.ts +93 -0
  73. package/src/cli/core/telemetry.ts +0 -52
  74. package/src/cli/index.ts +0 -6
  75. package/src/cli/router.ts +4 -4
  76. package/src/cli/workers/run-integration-runtime.ts +16 -16
  77. package/src/cli/workers/run-integration-worker-protocol.ts +2 -1
  78. package/src/index.ts +1 -7
  79. package/src/runtime/extract/extract.ts +6 -5
  80. package/src/runtime/recovery/agent.ts +5 -4
  81. package/src/runtime/recovery/errors.ts +4 -3
  82. package/src/runtime/recovery/recovery.ts +4 -4
  83. package/src/shared/debug/pause.ts +4 -23
  84. package/src/shared/run/browser.ts +5 -1
  85. package/src/shared/state/index.ts +2 -0
  86. package/src/shared/state/session-state.ts +3 -0
  87. package/src/shared/workflow/workflow.ts +24 -13
  88. package/dist/cli/commands/init.js +0 -286
  89. package/dist/cli/commands/logs.js +0 -117
  90. package/dist/shared/llm/ai-sdk-adapter.d.ts +0 -22
  91. package/dist/shared/llm/ai-sdk-adapter.js +0 -49
  92. package/dist/shared/llm/client.d.ts +0 -13
  93. package/dist/shared/llm/index.d.ts +0 -5
  94. package/dist/shared/llm/index.js +0 -6
  95. package/dist/shared/llm/types.d.ts +0 -67
  96. package/dist/shared/llm/types.js +0 -0
  97. package/src/cli/commands/init.ts +0 -331
  98. package/src/cli/commands/logs.ts +0 -128
  99. package/src/shared/llm/ai-sdk-adapter.ts +0 -81
  100. package/src/shared/llm/index.ts +0 -3
  101. package/src/shared/llm/types.ts +0 -63
@@ -0,0 +1,62 @@
1
+ import { LIBRETTO_CONFIG_PATH } from "../core/context.js";
2
+ import { resolveAiSetupStatus } from "../core/ai-model.js";
3
+ import { listRunningSessions } from "../core/session.js";
4
+ import { SimpleCLI } from "../framework/simple-cli.js";
5
+ function printAiStatus(status) {
6
+ console.log("AI configuration:");
7
+ switch (status.kind) {
8
+ case "ready":
9
+ console.log(` \u2713 Model: ${status.model}`);
10
+ if (status.source === "config") {
11
+ console.log(` Config: ${LIBRETTO_CONFIG_PATH}`);
12
+ } else {
13
+ console.log(` Source: ${status.source}`);
14
+ }
15
+ console.log(
16
+ " To change: npx libretto ai configure openai | anthropic | gemini | vertex"
17
+ );
18
+ break;
19
+ case "configured-missing-credentials":
20
+ console.log(
21
+ ` \u2717 ${status.provider} is configured (model: ${status.model}), but credentials are missing.`
22
+ );
23
+ console.log(" Run `npx libretto setup` to repair.");
24
+ break;
25
+ case "invalid-config":
26
+ console.log(" \u2717 Config is invalid:");
27
+ for (const line of status.message.split("\n")) {
28
+ console.log(` ${line}`);
29
+ }
30
+ console.log(" Run `npx libretto setup` to reconfigure.");
31
+ break;
32
+ case "unconfigured":
33
+ console.log(" \u2717 No AI model configured.");
34
+ console.log(
35
+ " Run `npx libretto setup` or `npx libretto ai configure` to set up."
36
+ );
37
+ break;
38
+ }
39
+ }
40
+ function printOpenSessions(sessions) {
41
+ console.log("\nOpen sessions:");
42
+ if (sessions.length === 0) {
43
+ console.log(" No open sessions.");
44
+ return;
45
+ }
46
+ for (const session of sessions) {
47
+ const statusLabel = session.status ? ` [${session.status}]` : "";
48
+ const endpoint = `http://127.0.0.1:${session.port}`;
49
+ console.log(` ${session.session}${statusLabel} \u2014 ${endpoint}`);
50
+ }
51
+ }
52
+ const statusCommand = SimpleCLI.command({
53
+ description: "Show workspace status: AI configuration and open sessions"
54
+ }).input(SimpleCLI.input({ positionals: [], named: {} })).handle(async () => {
55
+ const aiStatus = resolveAiSetupStatus();
56
+ printAiStatus(aiStatus);
57
+ const sessions = listRunningSessions();
58
+ printOpenSessions(sessions);
59
+ });
60
+ export {
61
+ statusCommand
62
+ };
@@ -1,17 +1,34 @@
1
1
  import { existsSync, readFileSync } from "node:fs";
2
2
  import { dirname, join, resolve } from "node:path";
3
- import { readAiConfig } from "./ai-config.js";
3
+ import { readAiConfig } from "./config.js";
4
4
  import { LIBRETTO_CONFIG_PATH, REPO_ROOT } from "./context.js";
5
5
  import {
6
6
  hasProviderCredentials,
7
7
  parseModel
8
- } from "../../shared/llm/client.js";
8
+ } from "./resolve-model.js";
9
9
  const DEFAULT_SNAPSHOT_MODELS = {
10
10
  openai: "openai/gpt-5.4",
11
11
  anthropic: "anthropic/claude-sonnet-4-6",
12
12
  google: "google/gemini-3-flash-preview",
13
- vertex: "vertex/gemini-2.5-pro"
13
+ vertex: "vertex/gemini-2.5-flash"
14
14
  };
15
+ function detectProviderEnvVar(provider, env = process.env) {
16
+ switch (provider) {
17
+ case "openai":
18
+ return env.OPENAI_API_KEY?.trim() ? "OPENAI_API_KEY" : null;
19
+ case "anthropic":
20
+ return env.ANTHROPIC_API_KEY?.trim() ? "ANTHROPIC_API_KEY" : null;
21
+ case "google":
22
+ if (env.GEMINI_API_KEY?.trim()) return "GEMINI_API_KEY";
23
+ if (env.GOOGLE_GENERATIVE_AI_API_KEY?.trim())
24
+ return "GOOGLE_GENERATIVE_AI_API_KEY";
25
+ return null;
26
+ case "vertex":
27
+ if (env.GOOGLE_CLOUD_PROJECT?.trim()) return "GOOGLE_CLOUD_PROJECT";
28
+ if (env.GCLOUD_PROJECT?.trim()) return "GCLOUD_PROJECT";
29
+ return null;
30
+ }
31
+ }
15
32
  class SnapshotApiUnavailableError extends Error {
16
33
  constructor(message) {
17
34
  super(message);
@@ -49,7 +66,7 @@ function noSnapshotApiConfiguredMessage() {
49
66
  return [
50
67
  "Failed to analyze snapshot because no snapshot analyzer is configured.",
51
68
  `Add OPENAI_API_KEY, ANTHROPIC_API_KEY, GEMINI_API_KEY or GOOGLE_GENERATIVE_AI_API_KEY, or GOOGLE_CLOUD_PROJECT to .env or as a shell environment variable, or choose a default model with \`${defaultModelCommandLine()}\`.`,
52
- "For more info, run `npx libretto init`."
69
+ "For more info, run `npx libretto setup`."
53
70
  ].join(" ");
54
71
  }
55
72
  function missingProviderSnapshotMessage(selection) {
@@ -57,7 +74,7 @@ function missingProviderSnapshotMessage(selection) {
57
74
  return [
58
75
  `Failed to analyze snapshot because ${selection.provider} is configured${configuredSource}, but ${providerMissingCredentialSummary(selection.provider)}.`,
59
76
  providerSetupSentence(selection.provider),
60
- "For more info, run `npx libretto init`."
77
+ "For more info, run `npx libretto setup`."
61
78
  ].join(" ");
62
79
  }
63
80
  function readWorktreeEnvPath() {
@@ -128,11 +145,12 @@ function inferAutoSnapshotModel() {
128
145
  "vertex"
129
146
  ];
130
147
  for (const provider of providersInPriorityOrder) {
131
- if (!hasProviderCredentials(provider)) continue;
148
+ const envVar = detectProviderEnvVar(provider);
149
+ if (!envVar) continue;
132
150
  return {
133
151
  model: DEFAULT_SNAPSHOT_MODELS[provider],
134
152
  provider,
135
- source: `env:auto-${provider}`
153
+ source: `env:${envVar}`
136
154
  };
137
155
  }
138
156
  return null;
@@ -164,11 +182,67 @@ function resolveSnapshotApiModelOrThrow(config = readAiConfig()) {
164
182
  function isSnapshotApiUnavailableError(error) {
165
183
  return error instanceof SnapshotApiUnavailableError;
166
184
  }
185
+ function readAiConfigSafely(configPath) {
186
+ try {
187
+ return { ok: true, config: readAiConfig(configPath) };
188
+ } catch (err) {
189
+ return {
190
+ ok: false,
191
+ message: err instanceof Error ? err.message : String(err)
192
+ };
193
+ }
194
+ }
195
+ function resolveAiSetupStatus(configPath = LIBRETTO_CONFIG_PATH) {
196
+ loadSnapshotEnv();
197
+ const configResult = readAiConfigSafely(configPath);
198
+ if (!configResult.ok) {
199
+ return { kind: "invalid-config", message: configResult.message };
200
+ }
201
+ if (configResult.config) {
202
+ let selection;
203
+ try {
204
+ selection = resolveSnapshotApiModel(configResult.config);
205
+ } catch (err) {
206
+ return {
207
+ kind: "invalid-config",
208
+ message: err instanceof Error ? err.message : String(err)
209
+ };
210
+ }
211
+ if (!selection) {
212
+ return { kind: "unconfigured" };
213
+ }
214
+ if (hasProviderCredentials(selection.provider)) {
215
+ return {
216
+ kind: "ready",
217
+ model: selection.model,
218
+ provider: selection.provider,
219
+ source: selection.source
220
+ };
221
+ }
222
+ return {
223
+ kind: "configured-missing-credentials",
224
+ model: selection.model,
225
+ provider: selection.provider
226
+ };
227
+ }
228
+ const envSelection = resolveSnapshotApiModel(null);
229
+ if (envSelection && hasProviderCredentials(envSelection.provider)) {
230
+ return {
231
+ kind: "ready",
232
+ model: envSelection.model,
233
+ provider: envSelection.provider,
234
+ source: envSelection.source
235
+ };
236
+ }
237
+ return { kind: "unconfigured" };
238
+ }
167
239
  export {
240
+ DEFAULT_SNAPSHOT_MODELS,
168
241
  SnapshotApiUnavailableError,
169
242
  isSnapshotApiUnavailableError,
170
243
  loadSnapshotEnv,
171
244
  parseDotEnvAssignment,
245
+ resolveAiSetupStatus,
172
246
  resolveSnapshotApiModel,
173
247
  resolveSnapshotApiModelOrThrow
174
248
  };
@@ -1,13 +1,14 @@
1
1
  import { readFileSync } from "node:fs";
2
- import { createLLMClient } from "../../shared/llm/client.js";
2
+ import { generateObject } from "ai";
3
+ import { resolveModel } from "./resolve-model.js";
3
4
  import {
4
5
  InterpretResultSchema,
5
6
  buildInlinePromptSelection,
6
7
  getMimeType,
7
8
  readFileAsBase64
8
9
  } from "./snapshot-analyzer.js";
9
- import { readAiConfig } from "./ai-config.js";
10
- import { resolveSnapshotApiModelOrThrow } from "./snapshot-api-config.js";
10
+ import { readAiConfig } from "./config.js";
11
+ import { resolveSnapshotApiModelOrThrow } from "./ai-model.js";
11
12
  async function runApiInterpret(args, logger, configuredAi = readAiConfig()) {
12
13
  const selection = resolveSnapshotApiModelOrThrow(configuredAi);
13
14
  logger.info("api-interpret-start", {
@@ -41,8 +42,9 @@ async function runApiInterpret(args, logger, configuredAi = readAiConfig()) {
41
42
  const imageBase64 = readFileAsBase64(args.pngPath);
42
43
  const imageMimeType = getMimeType(args.pngPath);
43
44
  const imageBytes = Buffer.from(imageBase64, "base64");
44
- const client = createLLMClient(selection.model);
45
- const result = await client.generateObjectFromMessages({
45
+ const model = await resolveModel(selection.model);
46
+ const { object: result } = await generateObject({
47
+ model,
46
48
  schema: InterpretResultSchema,
47
49
  messages: [
48
50
  {
@@ -19,10 +19,11 @@ import {
19
19
  getSessionNetworkLogPath,
20
20
  PROFILES_DIR
21
21
  } from "./context.js";
22
- import { readLibrettoConfig } from "./ai-config.js";
22
+ import { readLibrettoConfig } from "./config.js";
23
23
  import {
24
24
  assertSessionAvailableForStart,
25
25
  clearSessionState,
26
+ isPidRunning,
26
27
  listSessionsWithStateFile,
27
28
  readSessionStateOrThrow,
28
29
  logFileForSession,
@@ -293,8 +294,16 @@ async function runOpen(rawUrl, headed, session, logger, options) {
293
294
  const parsedUrl = normalizeUrl(rawUrl);
294
295
  const url = parsedUrl.href;
295
296
  const viewport = resolveViewport(options?.viewport, logger);
297
+ const accessMode = options?.accessMode ?? "write-access";
296
298
  const windowPosition = headed ? resolveWindowPosition(logger) : void 0;
297
- logger.info("open-start", { url, headed, session, viewport, windowPosition });
299
+ logger.info("open-start", {
300
+ url,
301
+ headed,
302
+ session,
303
+ viewport,
304
+ windowPosition,
305
+ accessMode
306
+ });
298
307
  assertSessionAvailableForStart(session, logger);
299
308
  const port = await pickFreePort();
300
309
  const runLogPath = logFileForSession(session);
@@ -523,6 +532,7 @@ await new Promise(() => {});
523
532
  session,
524
533
  startedAt: (/* @__PURE__ */ new Date()).toISOString(),
525
534
  status: "active",
535
+ mode: accessMode,
526
536
  viewport
527
537
  },
528
538
  logger
@@ -633,14 +643,6 @@ async function runClose(session, logger) {
633
643
  function waitForCloseSignalWindow(ms) {
634
644
  return new Promise((r) => setTimeout(r, ms));
635
645
  }
636
- function isPidRunning(pid) {
637
- try {
638
- process.kill(pid, 0);
639
- return true;
640
- } catch {
641
- return false;
642
- }
643
- }
644
646
  function sendSignalToProcessGroupOrPid(pid, signal, logger, session) {
645
647
  try {
646
648
  process.kill(pid, signal);
@@ -773,8 +775,8 @@ async function runCloseAll(logger, options) {
773
775
  console.log(`Force-killed ${forceKilled} session(s).`);
774
776
  }
775
777
  }
776
- async function runConnect(cdpUrl, session, logger) {
777
- logger.info("connect-start", { cdpUrl, session });
778
+ async function runConnect(cdpUrl, session, logger, accessMode = "write-access") {
779
+ logger.info("connect-start", { cdpUrl, session, accessMode });
778
780
  assertSessionAvailableForStart(session, logger);
779
781
  let parsedUrl;
780
782
  try {
@@ -784,28 +786,38 @@ async function runConnect(cdpUrl, session, logger) {
784
786
  [
785
787
  `Invalid CDP URL: ${cdpUrl}`,
786
788
  ``,
787
- `Expected an HTTP URL pointing to a Chrome DevTools Protocol endpoint, for example:`,
789
+ `Expected an HTTP or WebSocket URL pointing to a Chrome DevTools Protocol endpoint, for example:`,
788
790
  ` libretto connect http://127.0.0.1:9222`,
789
791
  ` libretto connect http://remote-host:9222`,
790
- ` libretto connect http://remote-host:9222/devtools/browser/<id>`
792
+ ` libretto connect http://remote-host:9222/devtools/browser/<id>`,
793
+ ` libretto connect ws://remote-host:9222/devtools/browser/<id>`,
794
+ ` libretto connect wss://remote-host/cdp-endpoint`
791
795
  ].join("\n")
792
796
  );
793
797
  }
794
798
  const endpoint = parsedUrl.href;
795
- const port = parsedUrl.port ? Number(parsedUrl.port) : parsedUrl.protocol === "https:" ? 443 : 80;
799
+ const isWebSocket = parsedUrl.protocol === "ws:" || parsedUrl.protocol === "wss:";
800
+ const port = parsedUrl.port ? Number(parsedUrl.port) : parsedUrl.protocol === "https:" || parsedUrl.protocol === "wss:" ? 443 : 80;
796
801
  console.log(
797
802
  `Connecting to CDP endpoint at ${endpoint} (session: ${session})...`
798
803
  );
799
- const versionUrl = `${parsedUrl.protocol}//${parsedUrl.host}/json/version`;
800
- try {
801
- const resp = await fetch(versionUrl);
802
- const versionInfo = await resp.json();
803
- logger.info("connect-version-ok", { versionUrl, versionInfo });
804
- } catch (err) {
805
- logger.error("connect-version-failed", { versionUrl, error: err });
806
- throw new Error(
807
- `Cannot reach CDP endpoint at ${versionUrl}. Make sure the target is running and accessible at ${parsedUrl.host}.`
808
- );
804
+ if (!isWebSocket) {
805
+ const versionUrl = `${parsedUrl.protocol}//${parsedUrl.host}/json/version`;
806
+ try {
807
+ const resp = await fetch(versionUrl);
808
+ const versionInfo = await resp.json();
809
+ logger.info("connect-version-ok", { versionUrl, versionInfo });
810
+ } catch (err) {
811
+ logger.error("connect-version-failed", { versionUrl, error: err });
812
+ throw new Error(
813
+ `Cannot reach CDP endpoint at ${versionUrl}. Make sure the target is running and accessible at ${parsedUrl.host}.`
814
+ );
815
+ }
816
+ } else {
817
+ logger.info("connect-skip-version-check", {
818
+ reason: "WebSocket-only endpoint, skipping HTTP version check",
819
+ endpoint
820
+ });
809
821
  }
810
822
  const browser = await tryConnectToCDP(endpoint, logger, 1e4);
811
823
  if (!browser) {
@@ -826,7 +838,8 @@ async function runConnect(cdpUrl, session, logger) {
826
838
  cdpEndpoint: endpoint,
827
839
  session,
828
840
  startedAt: (/* @__PURE__ */ new Date()).toISOString(),
829
- status: "active"
841
+ status: "active",
842
+ mode: accessMode
830
843
  },
831
844
  logger
832
845
  );
@@ -1,6 +1,7 @@
1
1
  import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
2
2
  import { dirname } from "node:path";
3
3
  import { z } from "zod";
4
+ import { SessionAccessModeSchema } from "../../shared/state/index.js";
4
5
  import { LIBRETTO_CONFIG_PATH } from "./context.js";
5
6
  const CURRENT_CONFIG_VERSION = 1;
6
7
  const AiConfigSchema = z.object({
@@ -19,27 +20,9 @@ const LibrettoConfigSchema = z.object({
19
20
  version: z.literal(CURRENT_CONFIG_VERSION),
20
21
  ai: AiConfigSchema.optional(),
21
22
  viewport: ViewportConfigSchema.optional(),
22
- windowPosition: WindowPositionConfigSchema.optional()
23
+ windowPosition: WindowPositionConfigSchema.optional(),
24
+ sessionMode: SessionAccessModeSchema.optional()
23
25
  }).passthrough();
24
- const DEFAULT_MODELS = {
25
- openai: "openai/gpt-5.4",
26
- anthropic: "anthropic/claude-sonnet-4-6",
27
- gemini: "google/gemini-3-flash-preview",
28
- vertex: "vertex/gemini-2.5-pro"
29
- };
30
- const PROVIDER_ALIASES = {
31
- claude: DEFAULT_MODELS.anthropic,
32
- google: DEFAULT_MODELS.gemini
33
- };
34
- const CONFIGURE_PROVIDERS = [
35
- "openai",
36
- "anthropic",
37
- "gemini",
38
- "vertex"
39
- ];
40
- function formatConfigureProviders(separator = " | ") {
41
- return CONFIGURE_PROVIDERS.join(separator);
42
- }
43
26
  function formatConfigIssues(error) {
44
27
  return error.issues.map((issue) => ` - ${issue.path.join(".") || "root"}: ${issue.message}`).join("\n");
45
28
  }
@@ -58,7 +41,8 @@ function formatExpectedConfigExample() {
58
41
  windowPosition: {
59
42
  x: 1600,
60
43
  y: 120
61
- }
44
+ },
45
+ sessionMode: "write-access"
62
46
  },
63
47
  null,
64
48
  2
@@ -73,10 +57,10 @@ ${detail}` : null,
73
57
  "Expected config example:",
74
58
  formatExpectedConfigExample(),
75
59
  "Notes:",
76
- ' - "ai", "viewport", and "windowPosition" are optional.',
60
+ ' - "ai", "viewport", "windowPosition", and "sessionMode" are optional.',
77
61
  ' - "ai.model" must be a provider/model string like "openai/gpt-5.4" or "anthropic/claude-sonnet-4-6".',
78
62
  "Fix the file to match this shape, or delete it and rerun:",
79
- ` npx libretto ai configure ${formatConfigureProviders()}`
63
+ ` npx libretto ai configure openai | anthropic | gemini | vertex`
80
64
  ].filter(Boolean).join("\n")
81
65
  );
82
66
  }
@@ -112,7 +96,12 @@ function readAiConfig(configPath = LIBRETTO_CONFIG_PATH) {
112
96
  return readLibrettoConfig(configPath).ai ?? null;
113
97
  }
114
98
  function writeAiConfig(model, configPath = LIBRETTO_CONFIG_PATH) {
115
- const librettoConfig = readLibrettoConfig(configPath);
99
+ let librettoConfig;
100
+ try {
101
+ librettoConfig = readLibrettoConfig(configPath);
102
+ } catch {
103
+ librettoConfig = { version: CURRENT_CONFIG_VERSION };
104
+ }
116
105
  const ai = AiConfigSchema.parse({
117
106
  model,
118
107
  updatedAt: (/* @__PURE__ */ new Date()).toISOString()
@@ -139,60 +128,6 @@ function clearAiConfig(configPath = LIBRETTO_CONFIG_PATH) {
139
128
  );
140
129
  return true;
141
130
  }
142
- function printAiConfig(config, configPath) {
143
- console.log(`Model: ${config.model}`);
144
- console.log(`Config file: ${configPath}`);
145
- console.log(`Updated at: ${config.updatedAt}`);
146
- }
147
- function resolveModelFromInput(input) {
148
- const trimmed = input.trim();
149
- if (!trimmed) return null;
150
- if (trimmed.includes("/")) return trimmed;
151
- const normalized = trimmed.toLowerCase();
152
- return DEFAULT_MODELS[normalized] ?? PROVIDER_ALIASES[normalized] ?? null;
153
- }
154
- function runAiConfigure(input, options = {}) {
155
- const configureCommandName = options.configureCommandName ?? "npx libretto ai configure";
156
- const configPath = options.configPath ?? LIBRETTO_CONFIG_PATH;
157
- const presetArg = input.preset?.trim();
158
- if (!presetArg && !input.clear) {
159
- const config2 = readAiConfig(configPath);
160
- if (!config2) {
161
- console.log(
162
- `No AI config set. Choose a default model: ${configureCommandName} ${formatConfigureProviders()}`
163
- );
164
- console.log(
165
- "Provider credentials still come from your shell or .env file."
166
- );
167
- return;
168
- }
169
- printAiConfig(config2, configPath);
170
- return;
171
- }
172
- if (input.clear) {
173
- const removed = clearAiConfig(configPath);
174
- if (removed) {
175
- console.log(`Cleared AI config: ${configPath}`);
176
- } else {
177
- console.log("No AI config was set.");
178
- }
179
- return;
180
- }
181
- const model = resolveModelFromInput(presetArg);
182
- if (!model) {
183
- console.log(
184
- `Usage: ${configureCommandName} <${CONFIGURE_PROVIDERS.join("|")}|provider/model-id>
185
- ${configureCommandName}
186
- ${configureCommandName} --clear`
187
- );
188
- throw new Error(
189
- `Invalid provider or model. Use one of: ${formatConfigureProviders()}, or a full model string like "openai/gpt-4o".`
190
- );
191
- }
192
- const config = writeAiConfig(model, configPath);
193
- console.log("AI config saved.");
194
- printAiConfig(config, configPath);
195
- }
196
131
  export {
197
132
  AiConfigSchema,
198
133
  CURRENT_CONFIG_VERSION,
@@ -202,7 +137,6 @@ export {
202
137
  clearAiConfig,
203
138
  readAiConfig,
204
139
  readLibrettoConfig,
205
- runAiConfigure,
206
140
  writeAiConfig,
207
141
  writeLibrettoConfig
208
142
  };
@@ -54,34 +54,14 @@ function createLoggerForSession(session) {
54
54
  [createFileLogSink({ filePath: logFilePath })]
55
55
  );
56
56
  }
57
- async function closeLogger(logger) {
58
- if (!logger) return;
59
- await logger.close();
60
- }
61
57
  async function withSessionLogger(session, run) {
62
58
  const logger = createLoggerForSession(session);
63
59
  try {
64
60
  return await run(logger);
65
61
  } finally {
66
- await closeLogger(logger);
62
+ await logger.close();
67
63
  }
68
64
  }
69
- let llmClientFactory = null;
70
- function setLLMClientFactory(factory) {
71
- llmClientFactory = factory;
72
- }
73
- function getLLMClientFactory() {
74
- return llmClientFactory;
75
- }
76
- function maybeConfigureLLMClientFactoryFromEnv() {
77
- if (llmClientFactory) return;
78
- const hasAnyCreds = process.env.GOOGLE_CLOUD_PROJECT || process.env.GCLOUD_PROJECT || process.env.ANTHROPIC_API_KEY || process.env.OPENAI_API_KEY || process.env.GEMINI_API_KEY || process.env.GOOGLE_GENERATIVE_AI_API_KEY;
79
- if (!hasAnyCreds) return;
80
- setLLMClientFactory(async (_logger, model) => {
81
- const { createLLMClient } = await import("../../shared/llm/index.js");
82
- return createLLMClient(model);
83
- });
84
- }
85
65
  export {
86
66
  LIBRETTO_CONFIG_DIR,
87
67
  LIBRETTO_CONFIG_PATH,
@@ -89,10 +69,8 @@ export {
89
69
  LIBRETTO_SESSIONS_DIR,
90
70
  PROFILES_DIR,
91
71
  REPO_ROOT,
92
- closeLogger,
93
72
  createLoggerForSession,
94
73
  ensureLibrettoSetup,
95
- getLLMClientFactory,
96
74
  getSessionActionsLogPath,
97
75
  getSessionDir,
98
76
  getSessionLogsPath,
@@ -100,7 +78,5 @@ export {
100
78
  getSessionSnapshotRunDir,
101
79
  getSessionSnapshotsDir,
102
80
  getSessionStatePath,
103
- maybeConfigureLLMClientFactoryFromEnv,
104
- setLLMClientFactory,
105
81
  withSessionLogger
106
82
  };