open-agents-ai 0.11.1 → 0.11.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +231 -82
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -302,6 +302,17 @@ function performUpdate() {
302
302
  return false;
303
303
  }
304
304
  }
305
+ function performSilentUpdate() {
306
+ try {
307
+ execSync(`npm install -g ${PACKAGE_NAME}@latest`, {
308
+ stdio: "pipe",
309
+ timeout: 12e4
310
+ });
311
+ return true;
312
+ } catch {
313
+ return false;
314
+ }
315
+ }
305
316
  function restartProcess() {
306
317
  const args = process.argv.slice(1);
307
318
  try {
@@ -1153,7 +1164,7 @@ var init_shell = __esm({
1153
1164
  const timeout = args["timeout"] ?? this.defaultTimeout;
1154
1165
  const stdinInput = args["stdin"];
1155
1166
  const start = performance.now();
1156
- return new Promise((resolve13) => {
1167
+ return new Promise((resolve14) => {
1157
1168
  const child = spawn("bash", ["-c", command], {
1158
1169
  cwd: this.workingDir,
1159
1170
  env: {
@@ -1206,7 +1217,7 @@ var init_shell = __esm({
1206
1217
  const combined = stdout + stderr;
1207
1218
  const looksInteractive = /\? .+[›>]|y\/n|yes\/no|\(Y\/n\)|\[y\/N\]/i.test(combined);
1208
1219
  const hint = looksInteractive ? " The command appears to be waiting for interactive input. Use non-interactive flags (e.g., --yes, --no-input) or provide input via the stdin parameter." : "";
1209
- resolve13({
1220
+ resolve14({
1210
1221
  success: false,
1211
1222
  output: stdout,
1212
1223
  error: `Command timed out after ${timeout}ms.${hint}`,
@@ -1215,7 +1226,7 @@ var init_shell = __esm({
1215
1226
  return;
1216
1227
  }
1217
1228
  const success = code === 0;
1218
- resolve13({
1229
+ resolve14({
1219
1230
  success,
1220
1231
  output: stdout + (stderr && success ? `
1221
1232
  STDERR:
@@ -1226,7 +1237,7 @@ ${stderr}` : ""),
1226
1237
  });
1227
1238
  child.on("error", (err) => {
1228
1239
  clearTimeout(timer);
1229
- resolve13({
1240
+ resolve14({
1230
1241
  success: false,
1231
1242
  output: stdout,
1232
1243
  error: err.message,
@@ -2908,11 +2919,11 @@ var init_diagnostic = __esm({
2908
2919
  }
2909
2920
  return steps;
2910
2921
  }
2911
- runStep(step, command, cwd3) {
2922
+ runStep(step, command, cwd4) {
2912
2923
  const start = performance.now();
2913
2924
  try {
2914
2925
  const output = execSync5(command, {
2915
- cwd: cwd3,
2926
+ cwd: cwd4,
2916
2927
  encoding: "utf8",
2917
2928
  timeout: 12e4,
2918
2929
  stdio: ["pipe", "pipe", "pipe"],
@@ -3055,10 +3066,10 @@ var init_git_info = __esm({
3055
3066
  durationMs: performance.now() - start
3056
3067
  };
3057
3068
  }
3058
- git(cwd3, cmd) {
3069
+ git(cwd4, cmd) {
3059
3070
  try {
3060
3071
  return execSync6(`git ${cmd}`, {
3061
- cwd: cwd3,
3072
+ cwd: cwd4,
3062
3073
  encoding: "utf8",
3063
3074
  timeout: 1e4,
3064
3075
  stdio: ["pipe", "pipe", "pipe"]
@@ -3092,10 +3103,10 @@ var init_background_task = __esm({
3092
3103
  BackgroundTaskManager = class {
3093
3104
  tasks = /* @__PURE__ */ new Map();
3094
3105
  nextId = 1;
3095
- spawn(command, cwd3, timeoutMs = 6e5) {
3106
+ spawn(command, cwd4, timeoutMs = 6e5) {
3096
3107
  const id = `task-${this.nextId++}`;
3097
3108
  const child = spawn2("bash", ["-c", command], {
3098
- cwd: cwd3,
3109
+ cwd: cwd4,
3099
3110
  env: { ...process.env, CI: "true", NONINTERACTIVE: "1", NO_COLOR: "1" },
3100
3111
  stdio: ["ignore", "pipe", "pipe"],
3101
3112
  detached: false
@@ -6930,7 +6941,9 @@ ${summary}
6930
6941
  });
6931
6942
  if (!resp.ok) {
6932
6943
  const text = await resp.text().catch(() => "");
6933
- throw new Error(`Ollama HTTP ${resp.status}: ${text.slice(0, 200)}`);
6944
+ const isHtml = text.trimStart().startsWith("<!") || text.trimStart().startsWith("<html");
6945
+ const detail = isHtml ? `(received HTML error page \u2014 backend may be behind a proxy/CDN that is timing out)` : text.slice(0, 200);
6946
+ throw new Error(`Backend HTTP ${resp.status}: ${detail}`);
6934
6947
  }
6935
6948
  const data = await resp.json();
6936
6949
  const choices = data.choices ?? [];
@@ -7232,6 +7245,14 @@ function renderSlashHelp() {
7232
7245
  process.stdout.write(` ${c2.cyan(cmd.padEnd(30))} ${c2.dim(desc)}
7233
7246
  `);
7234
7247
  }
7248
+ process.stdout.write(`
7249
+ ${c2.bold("Project-local overrides:")}
7250
+
7251
+ `);
7252
+ process.stdout.write(` ${c2.dim("Append")} ${c2.yellow("--local")} ${c2.dim("to save settings to .oa/settings.json (this project only).")}
7253
+ `);
7254
+ process.stdout.write(` ${c2.dim("Example:")} ${c2.cyan("/model qwen3:32b --local")} ${c2.dim("/endpoint http://remote:8000/v1 --local")}
7255
+ `);
7235
7256
  process.stdout.write(`
7236
7257
  ${c2.bold("Mid-task steering:")}
7237
7258
 
@@ -7502,7 +7523,9 @@ async function handleSlashCommand(input, ctx) {
7502
7523
  if (!trimmed.startsWith("/"))
7503
7524
  return "not_a_command";
7504
7525
  const [cmd, ...rest] = trimmed.slice(1).split(/\s+/);
7505
- const arg = rest.join(" ").trim();
7526
+ const hasLocal = rest.includes("--local");
7527
+ const filteredRest = rest.filter((r) => r !== "--local");
7528
+ const arg = filteredRest.join(" ").trim();
7506
7529
  switch (cmd) {
7507
7530
  case "help":
7508
7531
  case "h":
@@ -7520,8 +7543,13 @@ async function handleSlashCommand(input, ctx) {
7520
7543
  case "verbose":
7521
7544
  case "v":
7522
7545
  ctx.setVerbose(!ctx.config.verbose);
7523
- ctx.saveSettings({ verbose: ctx.config.verbose });
7524
- renderInfo(`Verbose mode: ${ctx.config.verbose ? "on" : "off"}`);
7546
+ if (hasLocal) {
7547
+ ctx.saveLocalSettings({ verbose: ctx.config.verbose });
7548
+ renderInfo(`Verbose mode: ${ctx.config.verbose ? "on" : "off"} (project-local)`);
7549
+ } else {
7550
+ ctx.saveSettings({ verbose: ctx.config.verbose });
7551
+ renderInfo(`Verbose mode: ${ctx.config.verbose ? "on" : "off"}`);
7552
+ }
7525
7553
  return "handled";
7526
7554
  case "config":
7527
7555
  case "cfg":
@@ -7537,7 +7565,7 @@ async function handleSlashCommand(input, ctx) {
7537
7565
  return "handled";
7538
7566
  case "model":
7539
7567
  if (arg) {
7540
- await switchModel(arg, ctx);
7568
+ await switchModel(arg, ctx, hasLocal);
7541
7569
  } else {
7542
7570
  await showModelPicker(ctx);
7543
7571
  }
@@ -7547,22 +7575,23 @@ async function handleSlashCommand(input, ctx) {
7547
7575
  return "handled";
7548
7576
  case "endpoint":
7549
7577
  case "ep":
7550
- await handleEndpoint(arg, ctx);
7578
+ await handleEndpoint(arg, ctx, hasLocal);
7551
7579
  return "handled";
7552
7580
  case "update":
7553
7581
  case "upgrade":
7554
7582
  await handleUpdate();
7555
7583
  return "handled";
7556
7584
  case "voice": {
7585
+ const save = hasLocal ? ctx.saveLocalSettings.bind(ctx) : ctx.saveSettings.bind(ctx);
7557
7586
  if (arg) {
7558
7587
  const msg = await ctx.voiceSetModel(arg);
7559
- ctx.saveSettings({ voice: true, voiceModel: arg });
7560
- renderInfo(msg);
7588
+ save({ voice: true, voiceModel: arg });
7589
+ renderInfo(msg + (hasLocal ? " (project-local)" : ""));
7561
7590
  } else {
7562
7591
  const msg = await ctx.voiceToggle();
7563
7592
  const isOn = msg.toLowerCase().includes("enabled") || msg.toLowerCase().includes("on");
7564
- ctx.saveSettings({ voice: isOn });
7565
- renderInfo(msg);
7593
+ save({ voice: isOn });
7594
+ renderInfo(msg + (hasLocal ? " (project-local)" : ""));
7566
7595
  }
7567
7596
  return "handled";
7568
7597
  }
@@ -7591,7 +7620,7 @@ async function showModelPicker(ctx) {
7591
7620
  renderError(`Failed to fetch models: ${err instanceof Error ? err.message : String(err)}`);
7592
7621
  }
7593
7622
  }
7594
- async function handleEndpoint(arg, ctx) {
7623
+ async function handleEndpoint(arg, ctx, local = false) {
7595
7624
  if (!arg) {
7596
7625
  process.stdout.write(`
7597
7626
  ${c2.bold("Current endpoint:")}
@@ -7654,14 +7683,19 @@ async function handleEndpoint(arg, ctx) {
7654
7683
  renderInfo("Setting endpoint anyway \u2014 it may come online later.");
7655
7684
  }
7656
7685
  ctx.setEndpoint(url, backendType, apiKey);
7657
- setConfigValue("backendUrl", url);
7658
- setConfigValue("backendType", backendType);
7659
- if (apiKey) {
7660
- setConfigValue("apiKey", apiKey);
7686
+ const endpointSettings = { backendUrl: url, backendType, ...apiKey ? { apiKey } : {} };
7687
+ if (local) {
7688
+ ctx.saveLocalSettings(endpointSettings);
7689
+ } else {
7690
+ setConfigValue("backendUrl", url);
7691
+ setConfigValue("backendType", backendType);
7692
+ if (apiKey) {
7693
+ setConfigValue("apiKey", apiKey);
7694
+ }
7695
+ ctx.saveSettings(endpointSettings);
7661
7696
  }
7662
- ctx.saveSettings({ backendUrl: url, backendType, ...apiKey ? { apiKey } : {} });
7663
7697
  process.stdout.write(`
7664
- ${c2.green("\u2714")} Endpoint updated and saved:
7698
+ ${c2.green("\u2714")} Endpoint updated and saved${local ? " (project-local)" : ""}:
7665
7699
  `);
7666
7700
  process.stdout.write(` ${c2.cyan("URL".padEnd(8))} ${url}
7667
7701
  `);
@@ -7727,7 +7761,7 @@ async function handleUpdate() {
7727
7761
  `);
7728
7762
  restartProcess();
7729
7763
  }
7730
- async function switchModel(query, ctx) {
7764
+ async function switchModel(query, ctx, local = false) {
7731
7765
  try {
7732
7766
  const models = await fetchOllamaModels(ctx.config.backendUrl);
7733
7767
  const match = findModel(models, query);
@@ -7741,8 +7775,15 @@ async function switchModel(query, ctx) {
7741
7775
  }
7742
7776
  const oldModel = ctx.config.model;
7743
7777
  ctx.setModel(match.name);
7744
- ctx.saveSettings({ model: match.name });
7778
+ if (local) {
7779
+ ctx.saveLocalSettings({ model: match.name });
7780
+ } else {
7781
+ ctx.saveSettings({ model: match.name });
7782
+ }
7745
7783
  renderModelSwitch(oldModel, match.name);
7784
+ if (local) {
7785
+ renderInfo("Saved as project-local override.");
7786
+ }
7746
7787
  } catch (err) {
7747
7788
  renderError(`Failed to switch model: ${err instanceof Error ? err.message : String(err)}`);
7748
7789
  }
@@ -7845,8 +7886,8 @@ function modelSupportsToolCalling(modelName) {
7845
7886
  return false;
7846
7887
  }
7847
7888
  function ask(rl, question) {
7848
- return new Promise((resolve13) => {
7849
- rl.question(question, (answer) => resolve13(answer.trim()));
7889
+ return new Promise((resolve14) => {
7890
+ rl.question(question, (answer) => resolve14(answer.trim()));
7850
7891
  });
7851
7892
  }
7852
7893
  function pullModelWithAutoUpdate(tag) {
@@ -9273,7 +9314,7 @@ var init_voice = __esm({
9273
9314
  const cmd = this.getPlayCommand(path);
9274
9315
  if (!cmd)
9275
9316
  return;
9276
- return new Promise((resolve13) => {
9317
+ return new Promise((resolve14) => {
9277
9318
  const child = nodeSpawn(cmd[0], cmd.slice(1), {
9278
9319
  stdio: "ignore",
9279
9320
  detached: false
@@ -9282,16 +9323,16 @@ var init_voice = __esm({
9282
9323
  child.on("close", () => {
9283
9324
  if (this.currentPlayback === child)
9284
9325
  this.currentPlayback = null;
9285
- resolve13();
9326
+ resolve14();
9286
9327
  });
9287
9328
  child.on("error", () => {
9288
9329
  if (this.currentPlayback === child)
9289
9330
  this.currentPlayback = null;
9290
- resolve13();
9331
+ resolve14();
9291
9332
  });
9292
9333
  setTimeout(() => {
9293
9334
  this.killPlayback();
9294
- resolve13();
9335
+ resolve14();
9295
9336
  }, 15e3);
9296
9337
  });
9297
9338
  }
@@ -9711,6 +9752,10 @@ function startTask(task, config, repoRoot, voice) {
9711
9752
  }
9712
9753
  async function startInteractive(config, repoPath) {
9713
9754
  const repoRoot = resolve11(repoPath ?? cwd());
9755
+ const isResumed = !!process.env.__OA_RESUMED;
9756
+ if (isResumed) {
9757
+ delete process.env.__OA_RESUMED;
9758
+ }
9714
9759
  initOaDirectory(repoRoot);
9715
9760
  const savedSettings = resolveSettings(repoRoot);
9716
9761
  if (savedSettings.model)
@@ -9724,30 +9769,45 @@ async function startInteractive(config, repoPath) {
9724
9769
  config = { ...config, apiKey: savedSettings.apiKey };
9725
9770
  if (savedSettings.verbose !== void 0)
9726
9771
  config = { ...config, verbose: savedSettings.verbose };
9727
- const needsSetup = isFirstRun() || !await isModelAvailable(config);
9728
- if (needsSetup && config.backendType === "ollama") {
9729
- const setupModel = await runSetupWizard(config);
9730
- if (setupModel === null) {
9731
- process.exit(0);
9772
+ if (savedSettings.maxRetries !== void 0)
9773
+ config = { ...config, maxRetries: savedSettings.maxRetries };
9774
+ if (savedSettings.timeoutMs !== void 0)
9775
+ config = { ...config, timeoutMs: savedSettings.timeoutMs };
9776
+ if (savedSettings.dryRun !== void 0)
9777
+ config = { ...config, dryRun: savedSettings.dryRun };
9778
+ if (savedSettings.dbPath)
9779
+ config = { ...config, dbPath: savedSettings.dbPath };
9780
+ if (!isResumed) {
9781
+ const needsSetup = isFirstRun() || !await isModelAvailable(config);
9782
+ if (needsSetup && config.backendType === "ollama") {
9783
+ const setupModel = await runSetupWizard(config);
9784
+ if (setupModel === null) {
9785
+ process.exit(0);
9786
+ }
9787
+ config = { ...config, model: setupModel };
9732
9788
  }
9733
- config = { ...config, model: setupModel };
9734
9789
  }
9735
- try {
9736
- const healthUrl = config.backendType === "ollama" ? `${config.backendUrl}/api/tags` : `${config.backendUrl}/v1/models`;
9737
- const resp = await fetch(healthUrl, { signal: AbortSignal.timeout(1e4) });
9738
- if (!resp.ok)
9739
- throw new Error(`HTTP ${resp.status}`);
9740
- } catch {
9741
- renderError(`Cannot reach ${config.backendType} at ${config.backendUrl}`);
9742
- if (config.backendType === "ollama") {
9743
- renderInfo("Start Ollama with: ollama serve");
9790
+ if (!isResumed) {
9791
+ try {
9792
+ const healthUrl = config.backendType === "ollama" ? `${config.backendUrl}/api/tags` : `${config.backendUrl}/v1/models`;
9793
+ const resp = await fetch(healthUrl, { signal: AbortSignal.timeout(1e4) });
9794
+ if (!resp.ok)
9795
+ throw new Error(`HTTP ${resp.status}`);
9796
+ } catch {
9797
+ renderError(`Cannot reach ${config.backendType} at ${config.backendUrl}`);
9798
+ if (config.backendType === "ollama") {
9799
+ renderInfo("Start Ollama with: ollama serve");
9800
+ }
9801
+ renderInfo("Use /endpoint to configure a different backend.");
9802
+ process.exit(1);
9744
9803
  }
9745
- renderInfo("Use /endpoint to configure a different backend.");
9746
- process.exit(1);
9747
9804
  }
9748
9805
  process.stdout.write("\x1B[2J\x1B[H");
9749
9806
  const carousel = new Carousel();
9750
- const carouselLines = carousel.start();
9807
+ let carouselLines = 0;
9808
+ if (!isResumed) {
9809
+ carouselLines = carousel.start();
9810
+ }
9751
9811
  const version = getVersion();
9752
9812
  renderRichHeader({
9753
9813
  model: config.model,
@@ -9755,6 +9815,10 @@ async function startInteractive(config, repoPath) {
9755
9815
  workspace: repoRoot,
9756
9816
  carouselLines
9757
9817
  });
9818
+ if (isResumed) {
9819
+ renderInfo(`Auto-updated to v${version} \u2014 session resumed.
9820
+ `);
9821
+ }
9758
9822
  const voiceEngine = new VoiceEngine();
9759
9823
  if (savedSettings.voice) {
9760
9824
  voiceEngine.toggle().catch(() => {
@@ -9767,7 +9831,7 @@ async function startInteractive(config, repoPath) {
9767
9831
  let currentConfig = { ...config };
9768
9832
  let activeTask = null;
9769
9833
  let messageQueue = [];
9770
- let carouselRetired = false;
9834
+ let carouselRetired = isResumed;
9771
9835
  const idlePrompt = `${c2.bold(c2.blue("> "))}`;
9772
9836
  const activePrompt = `${c2.dim(c2.cyan("+ "))}`;
9773
9837
  const rl = readline2.createInterface({
@@ -9836,6 +9900,12 @@ async function startInteractive(config, repoPath) {
9836
9900
  saveGlobalSettings(settings);
9837
9901
  } catch {
9838
9902
  }
9903
+ },
9904
+ saveLocalSettings(settings) {
9905
+ try {
9906
+ saveProjectSettings(repoRoot, settings);
9907
+ } catch {
9908
+ }
9839
9909
  }
9840
9910
  };
9841
9911
  showPrompt();
@@ -9909,6 +9979,26 @@ ${c2.dim("Goodbye!")}
9909
9979
  } finally {
9910
9980
  activeTask = null;
9911
9981
  }
9982
+ try {
9983
+ const updateInfo = await checkForUpdate(version);
9984
+ if (updateInfo) {
9985
+ renderInfo(`Update available: v${version} \u2192 v${updateInfo.latestVersion}. Installing...`);
9986
+ const ok = performSilentUpdate();
9987
+ if (ok) {
9988
+ renderInfo(`Updated to v${updateInfo.latestVersion}. Reloading...
9989
+ `);
9990
+ process.env.__OA_RESUMED = "1";
9991
+ if (carousel.isRunning)
9992
+ carousel.stop();
9993
+ voiceEngine.dispose();
9994
+ rl.close();
9995
+ restartProcess();
9996
+ } else {
9997
+ renderWarning("Auto-update failed. Use /update to retry manually.");
9998
+ }
9999
+ }
10000
+ } catch {
10001
+ }
9912
10002
  showPrompt();
9913
10003
  });
9914
10004
  rl.on("close", () => {
@@ -9968,6 +10058,7 @@ var init_interactive = __esm({
9968
10058
  "use strict";
9969
10059
  init_dist5();
9970
10060
  init_dist2();
10061
+ init_updater();
9971
10062
  init_commands();
9972
10063
  init_setup();
9973
10064
  init_project_context();
@@ -10442,8 +10533,17 @@ var config_exports = {};
10442
10533
  __export(config_exports, {
10443
10534
  configCommand: () => configCommand
10444
10535
  });
10445
- import { join as join19 } from "node:path";
10536
+ import { join as join19, resolve as resolve13 } from "node:path";
10446
10537
  import { homedir as homedir7 } from "node:os";
10538
+ import { cwd as cwd3 } from "node:process";
10539
+ function coerceForSettings(key, value) {
10540
+ if (INT_KEYS.has(key))
10541
+ return parseInt(value, 10);
10542
+ if (BOOL_KEYS.has(key)) {
10543
+ return value === "1" || value.toLowerCase() === "true" || value.toLowerCase() === "yes";
10544
+ }
10545
+ return value;
10546
+ }
10447
10547
  async function configCommand(opts, config) {
10448
10548
  if (opts.subCommand === "set") {
10449
10549
  return handleSet(opts, config);
@@ -10454,9 +10554,11 @@ async function configCommand(opts, config) {
10454
10554
  return handleShow(opts, config);
10455
10555
  }
10456
10556
  function handleShow(opts, config) {
10557
+ const repoRoot = resolve13(opts.repoPath ?? cwd3());
10457
10558
  printHeader("Configuration");
10458
- printSection("Active Settings");
10559
+ printSection("Active Settings (merged)");
10459
10560
  printKeyValue("backendUrl", config.backendUrl, 2);
10561
+ printKeyValue("backendType", config.backendType, 2);
10460
10562
  printKeyValue("model", config.model, 2);
10461
10563
  printKeyValue("apiKey", config.apiKey ? "[set]" : "[not set]", 2);
10462
10564
  printKeyValue("maxRetries", String(config.maxRetries), 2);
@@ -10464,18 +10566,34 @@ function handleShow(opts, config) {
10464
10566
  printKeyValue("dryRun", String(config.dryRun), 2);
10465
10567
  printKeyValue("verbose", String(config.verbose), 2);
10466
10568
  printKeyValue("dbPath", config.dbPath, 2);
10569
+ const projectSettings = loadProjectSettings(repoRoot);
10570
+ const projectKeys = Object.entries(projectSettings).filter(([, v]) => v !== void 0);
10571
+ if (projectKeys.length > 0) {
10572
+ printSection(`Project Overrides (.oa/settings.json)`);
10573
+ for (const [k, v] of projectKeys) {
10574
+ printKeyValue(k, String(v), 2);
10575
+ }
10576
+ } else {
10577
+ printSection("Project Overrides");
10578
+ printInfo(" (none \u2014 use 'config set KEY VALUE --local' to add)");
10579
+ }
10580
+ const globalSettings = loadGlobalSettings();
10581
+ const globalKeys = Object.entries(globalSettings).filter(([, v]) => v !== void 0);
10582
+ if (globalKeys.length > 0) {
10583
+ printSection("Global Settings (~/.open-agents/settings.json)");
10584
+ for (const [k, v] of globalKeys) {
10585
+ printKeyValue(k, String(v), 2);
10586
+ }
10587
+ }
10467
10588
  printSection("Config File");
10468
10589
  printInfo(`~/.open-agents/config.json (${join19(homedir7(), ".open-agents", "config.json")})`);
10469
- printSection("Environment Variables");
10470
- printInfo("OPEN_AGENTS_BACKEND_URL \u2014 override backendUrl");
10471
- printInfo("OPEN_AGENTS_MODEL \u2014 override model");
10472
- printInfo("OPEN_AGENTS_API_KEY \u2014 override apiKey");
10473
- printInfo("OPEN_AGENTS_MAX_RETRIES \u2014 override maxRetries");
10474
- printInfo("OPEN_AGENTS_TIMEOUT_MS \u2014 override timeoutMs");
10475
- printInfo("OPEN_AGENTS_DRY_RUN \u2014 override dryRun (true/false)");
10476
- printInfo("OPEN_AGENTS_VERBOSE \u2014 override verbose (true/false)");
10477
- printInfo("OPEN_AGENTS_DB_PATH \u2014 override dbPath");
10478
- printInfo("VLLM_BASE_URL \u2014 fallback for backendUrl");
10590
+ printSection("Priority Chain");
10591
+ printInfo(" 1. CLI flags (--model, --backend-url, etc.)");
10592
+ printInfo(" 2. Project .oa/settings.json (--local)");
10593
+ printInfo(" 3. Global ~/.open-agents/settings.json");
10594
+ printInfo(" 4. Environment variables (OPEN_AGENTS_*)");
10595
+ printInfo(" 5. Global ~/.open-agents/config.json");
10596
+ printInfo(" 6. Built-in defaults");
10479
10597
  if (opts.verbose) {
10480
10598
  printSection("All Settable Keys");
10481
10599
  for (const [key, desc] of Object.entries(CONFIG_KEYS)) {
@@ -10486,7 +10604,7 @@ function handleShow(opts, config) {
10486
10604
  function handleSet(opts, _config) {
10487
10605
  const { key, value } = opts;
10488
10606
  if (!key) {
10489
- printError("Usage: open-agents config set KEY VALUE");
10607
+ printError("Usage: open-agents config set KEY VALUE [--local]");
10490
10608
  printInfo("Run 'open-agents config keys' to see available keys");
10491
10609
  process.exit(1);
10492
10610
  }
@@ -10500,37 +10618,63 @@ function handleSet(opts, _config) {
10500
10618
  printInfo("Run 'open-agents config keys' to see available keys");
10501
10619
  process.exit(1);
10502
10620
  }
10503
- try {
10504
- setConfigValue(key, value);
10505
- printSuccess(`Config updated: ${key} = ${value}`);
10506
- printInfo(`Saved to ~/.open-agents/config.json`);
10507
- } catch (err) {
10508
- printError(`Failed to save config: ${err instanceof Error ? err.message : String(err)}`);
10509
- process.exit(1);
10621
+ if (opts.local) {
10622
+ const repoRoot = resolve13(opts.repoPath ?? cwd3());
10623
+ try {
10624
+ initOaDirectory(repoRoot);
10625
+ const coerced = coerceForSettings(key, value);
10626
+ saveProjectSettings(repoRoot, { [key]: coerced });
10627
+ printSuccess(`Project override set: ${key} = ${value}`);
10628
+ printInfo(`Saved to ${join19(repoRoot, ".oa", "settings.json")}`);
10629
+ printInfo("This override applies only when running in this workspace.");
10630
+ } catch (err) {
10631
+ printError(`Failed to save: ${err instanceof Error ? err.message : String(err)}`);
10632
+ process.exit(1);
10633
+ }
10634
+ } else {
10635
+ try {
10636
+ setConfigValue(key, value);
10637
+ printSuccess(`Config updated: ${key} = ${value}`);
10638
+ printInfo(`Saved to ~/.open-agents/config.json`);
10639
+ printInfo("Tip: Use --local to set project-specific overrides.");
10640
+ } catch (err) {
10641
+ printError(`Failed to save config: ${err instanceof Error ? err.message : String(err)}`);
10642
+ process.exit(1);
10643
+ }
10510
10644
  }
10511
10645
  }
10512
10646
  function handleKeys() {
10513
10647
  printHeader("Config Keys");
10648
+ printInfo("All keys can be set globally or per-project (--local):\n");
10514
10649
  for (const [key, desc] of Object.entries(CONFIG_KEYS)) {
10515
10650
  printKeyValue(key, desc, 2);
10516
10651
  }
10652
+ printInfo("\nUsage:");
10653
+ printInfo(" oa config set model qwen3.5:122b # global default");
10654
+ printInfo(" oa config set model qwen3.5:122b --local # this project only");
10517
10655
  }
10518
- var CONFIG_KEYS;
10656
+ var CONFIG_KEYS, INT_KEYS, BOOL_KEYS;
10519
10657
  var init_config3 = __esm({
10520
10658
  "packages/cli/dist/commands/config.js"() {
10521
10659
  "use strict";
10522
10660
  init_config();
10661
+ init_oa_directory();
10523
10662
  init_output();
10524
10663
  CONFIG_KEYS = {
10525
- backendUrl: "vLLM backend base URL",
10526
- model: "Model name served by vLLM",
10664
+ backendUrl: "Backend base URL (Ollama or OpenAI-compatible)",
10665
+ backendType: "Backend type: ollama, vllm, fake",
10666
+ model: "Model name to use",
10527
10667
  apiKey: "Bearer token for authenticated deployments",
10528
10668
  maxRetries: "Maximum HTTP retries (integer)",
10529
10669
  timeoutMs: "Per-request timeout in milliseconds (integer)",
10530
10670
  dryRun: "Dry-run mode - patches not written (true/false)",
10531
10671
  verbose: "Verbose output (true/false)",
10532
- dbPath: "Path to SQLite memory database"
10672
+ dbPath: "Path to SQLite memory database",
10673
+ voice: "Enable TTS voice feedback (true/false)",
10674
+ voiceModel: "TTS voice model: glados, overwatch"
10533
10675
  };
10676
+ INT_KEYS = /* @__PURE__ */ new Set(["maxRetries", "timeoutMs"]);
10677
+ BOOL_KEYS = /* @__PURE__ */ new Set(["dryRun", "verbose", "voice"]);
10534
10678
  }
10535
10679
  });
10536
10680
 
@@ -10631,7 +10775,7 @@ async function serveVllm(opts, config) {
10631
10775
  await runVllmServer(args, opts.verbose ?? false);
10632
10776
  }
10633
10777
  async function runVllmServer(args, verbose) {
10634
- return new Promise((resolve13, reject) => {
10778
+ return new Promise((resolve14, reject) => {
10635
10779
  const child = spawn3("python", args, {
10636
10780
  stdio: verbose ? "inherit" : ["ignore", "pipe", "pipe"],
10637
10781
  env: { ...process.env }
@@ -10666,10 +10810,10 @@ async function runVllmServer(args, verbose) {
10666
10810
  child.once("exit", (code, signal) => {
10667
10811
  if (signal) {
10668
10812
  printInfo(`vLLM server stopped by signal ${signal}`);
10669
- resolve13();
10813
+ resolve14();
10670
10814
  } else if (code === 0) {
10671
10815
  printSuccess("vLLM server exited cleanly");
10672
- resolve13();
10816
+ resolve14();
10673
10817
  } else {
10674
10818
  printError(`vLLM server exited with code ${code}`);
10675
10819
  reject(new Error(`vLLM exited with code ${code}`));
@@ -11013,6 +11157,7 @@ function parseCliArgs(argv) {
11013
11157
  "max-retries": { type: "string" },
11014
11158
  "timeout-ms": { type: "string" },
11015
11159
  offline: { type: "boolean" },
11160
+ local: { type: "boolean", short: "l" },
11016
11161
  port: { type: "string" },
11017
11162
  suite: { type: "string" },
11018
11163
  help: { type: "boolean", short: "h" },
@@ -11034,6 +11179,7 @@ function parseCliArgs(argv) {
11034
11179
  maxRetries: typeof values["max-retries"] === "string" ? parseInt(values["max-retries"], 10) : void 0,
11035
11180
  timeoutMs: typeof values["timeout-ms"] === "string" ? parseInt(values["timeout-ms"], 10) : void 0,
11036
11181
  offline: values.offline === true,
11182
+ local: values.local === true,
11037
11183
  help: values.help === true,
11038
11184
  version: values.version === true
11039
11185
  };
@@ -11098,6 +11244,7 @@ Flags:
11098
11244
  -r, --repo <path> Repository root (default: cwd)
11099
11245
  --dry-run Validate patches, don't write to disk
11100
11246
  --offline Use FakeBackend, no backend connection needed
11247
+ -l, --local Save settings to .oa/settings.json (project-local)
11101
11248
  -v, --verbose Verbose output
11102
11249
  --max-retries <n> Max retries per model request
11103
11250
  --timeout-ms <ms> Overall task timeout
@@ -11192,7 +11339,9 @@ async function main() {
11192
11339
  subCommand: parsed.configSubCommand,
11193
11340
  key: parsed.configKey,
11194
11341
  value: parsed.configValue,
11195
- verbose: parsed.verbose
11342
+ verbose: parsed.verbose,
11343
+ local: parsed.local,
11344
+ repoPath: parsed.repoPath
11196
11345
  }, config);
11197
11346
  break;
11198
11347
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "open-agents-ai",
3
- "version": "0.11.1",
3
+ "version": "0.11.3",
4
4
  "description": "AI coding agent powered by open-source models (Ollama/vLLM) — interactive TUI with agentic tool-calling loop",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",