open-agents-ai 0.12.5 → 0.12.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +306 -53
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -7477,6 +7477,8 @@ TASK: ${task}` : task }
7477
7477
  ];
7478
7478
  const toolDefs = this.buildToolDefinitions();
7479
7479
  let totalTokens = 0;
7480
+ let promptTokens = 0;
7481
+ let completionTokens = 0;
7480
7482
  let estimatedTokens = 0;
7481
7483
  let toolCallCount = 0;
7482
7484
  let completed = false;
@@ -7550,9 +7552,17 @@ Integrate this guidance into your current approach. Continue working on the task
7550
7552
  };
7551
7553
  const response = this.options.streamEnabled && this.hasStreamingSupport() ? await this.streamingRequest(chatRequest, turn) : await this.backend.chatCompletion(chatRequest);
7552
7554
  totalTokens += response.usage?.totalTokens ?? 0;
7555
+ promptTokens += response.usage?.promptTokens ?? 0;
7556
+ completionTokens += response.usage?.completionTokens ?? 0;
7553
7557
  const choiceContent = response.choices[0]?.message?.content ?? "";
7554
7558
  const choiceArgs = response.choices[0]?.message?.toolCalls?.map((tc) => JSON.stringify(tc.arguments)).join("") ?? "";
7555
7559
  estimatedTokens += Math.ceil((choiceContent.length + choiceArgs.length) / 4);
7560
+ const estimatedContextTokens = Math.ceil(messages.reduce((sum, m) => sum + (typeof m.content === "string" ? m.content.length : 100), 0) / 4);
7561
+ this.emit({
7562
+ type: "token_usage",
7563
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
7564
+ tokenUsage: { promptTokens, completionTokens, totalTokens, estimatedContextTokens }
7565
+ });
7556
7566
  const choice = response.choices[0];
7557
7567
  if (!choice)
7558
7568
  break;
@@ -7712,9 +7722,17 @@ Integrate this guidance into your current approach. Continue working on the task
7712
7722
  const chatRequest = { messages: compactedMsgs, tools: toolDefs, temperature: this.options.temperature, maxTokens: this.options.maxTokens, timeoutMs: this.options.requestTimeoutMs };
7713
7723
  const response = this.options.streamEnabled && this.hasStreamingSupport() ? await this.streamingRequest(chatRequest, turn) : await this.backend.chatCompletion(chatRequest);
7714
7724
  totalTokens += response.usage?.totalTokens ?? 0;
7725
+ promptTokens += response.usage?.promptTokens ?? 0;
7726
+ completionTokens += response.usage?.completionTokens ?? 0;
7715
7727
  const choiceContent2 = response.choices[0]?.message?.content ?? "";
7716
7728
  const choiceArgs2 = response.choices[0]?.message?.toolCalls?.map((tc) => JSON.stringify(tc.arguments)).join("") ?? "";
7717
7729
  estimatedTokens += Math.ceil((choiceContent2.length + choiceArgs2.length) / 4);
7730
+ const bfEstCtx = Math.ceil(messages.reduce((sum, m) => sum + (typeof m.content === "string" ? m.content.length : 100), 0) / 4);
7731
+ this.emit({
7732
+ type: "token_usage",
7733
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
7734
+ tokenUsage: { promptTokens, completionTokens, totalTokens, estimatedContextTokens: bfEstCtx }
7735
+ });
7718
7736
  const choice = response.choices[0];
7719
7737
  if (!choice)
7720
7738
  break;
@@ -7773,7 +7791,7 @@ ${result.output}`;
7773
7791
  success: completed,
7774
7792
  timestamp: (/* @__PURE__ */ new Date()).toISOString()
7775
7793
  });
7776
- return { completed, turns: messages.filter((m) => m.role === "assistant").length, toolCalls: toolCallCount, totalTokens, estimatedTokens, summary, durationMs };
7794
+ return { completed, turns: messages.filter((m) => m.role === "assistant").length, toolCalls: toolCallCount, totalTokens, promptTokens, completionTokens, estimatedTokens, summary, durationMs };
7777
7795
  }
7778
7796
  // -------------------------------------------------------------------------
7779
7797
  // Image / multimodal support
@@ -8245,7 +8263,11 @@ ${newerSummary}` : newerSummary;
8245
8263
  }
8246
8264
  };
8247
8265
  }),
8248
- usage: usage ? { totalTokens: usage.total_tokens ?? 0 } : void 0
8266
+ usage: usage ? {
8267
+ totalTokens: usage.total_tokens ?? 0,
8268
+ promptTokens: usage.prompt_tokens,
8269
+ completionTokens: usage.completion_tokens
8270
+ } : void 0
8249
8271
  };
8250
8272
  }
8251
8273
  /**
@@ -12606,6 +12628,184 @@ ${files.map((f) => `- [\`${f}\`](./${f})`).join("\n")}
12606
12628
  }
12607
12629
  });
12608
12630
 
12631
+ // packages/cli/dist/tui/status-bar.js
12632
+ var FOOTER_ROWS, StatusBar;
12633
+ var init_status_bar = __esm({
12634
+ "packages/cli/dist/tui/status-bar.js"() {
12635
+ "use strict";
12636
+ init_render();
12637
+ FOOTER_ROWS = 5;
12638
+ StatusBar = class {
12639
+ metrics = {
12640
+ promptTokens: 0,
12641
+ completionTokens: 0,
12642
+ totalTokens: 0,
12643
+ estimatedContextTokens: 0,
12644
+ contextWindowSize: 131072
12645
+ };
12646
+ active = false;
12647
+ scrollRegionTop = 1;
12648
+ stdinHooked = false;
12649
+ /** Context window size to display. Can be updated if model changes. */
12650
+ setContextWindowSize(size) {
12651
+ this.metrics.contextWindowSize = size;
12652
+ }
12653
+ /** Update token metrics from a token_usage event */
12654
+ updateMetrics(update) {
12655
+ if (update.promptTokens !== void 0)
12656
+ this.metrics.promptTokens = update.promptTokens;
12657
+ if (update.completionTokens !== void 0)
12658
+ this.metrics.completionTokens = update.completionTokens;
12659
+ if (update.totalTokens !== void 0)
12660
+ this.metrics.totalTokens = update.totalTokens;
12661
+ if (update.estimatedContextTokens !== void 0)
12662
+ this.metrics.estimatedContextTokens = update.estimatedContextTokens;
12663
+ if (this.active)
12664
+ this.renderFooterPreserveCursor();
12665
+ }
12666
+ /** Reset metrics (e.g. on session start) */
12667
+ resetMetrics() {
12668
+ this.metrics.promptTokens = 0;
12669
+ this.metrics.completionTokens = 0;
12670
+ this.metrics.totalTokens = 0;
12671
+ this.metrics.estimatedContextTokens = 0;
12672
+ if (this.active)
12673
+ this.renderFooterPreserveCursor();
12674
+ }
12675
+ /** Activate the status bar — set scroll region, draw footer, hook stdin */
12676
+ activate(scrollRegionTop) {
12677
+ this.scrollRegionTop = scrollRegionTop ?? 1;
12678
+ this.active = true;
12679
+ this.applyScrollRegion();
12680
+ this.renderFooterAndPositionInput();
12681
+ this.hookStdin();
12682
+ }
12683
+ /** Deactivate — restore full-screen scroll region */
12684
+ deactivate() {
12685
+ this.active = false;
12686
+ const rows = process.stdout.rows ?? 24;
12687
+ process.stdout.write(`\x1B[1;${rows}r`);
12688
+ }
12689
+ /** Whether the status bar is currently active */
12690
+ get isActive() {
12691
+ return this.active;
12692
+ }
12693
+ /** Number of rows reserved at the bottom */
12694
+ get reservedRows() {
12695
+ return FOOTER_ROWS;
12696
+ }
12697
+ /** Handle terminal resize — reapply scroll region and redraw footer */
12698
+ handleResize() {
12699
+ if (!this.active)
12700
+ return;
12701
+ this.applyScrollRegion();
12702
+ this.renderFooterAndPositionInput();
12703
+ }
12704
+ /**
12705
+ * Call BEFORE writing content to the scrollable area above the footer.
12706
+ * Moves cursor into the scroll region so content output scrolls properly.
12707
+ */
12708
+ beginContentWrite() {
12709
+ if (!this.active)
12710
+ return;
12711
+ const rows = process.stdout.rows ?? 24;
12712
+ const scrollEnd = rows - FOOTER_ROWS;
12713
+ process.stdout.write(`\x1B[${scrollEnd};1H`);
12714
+ }
12715
+ /**
12716
+ * Call AFTER writing content to the scrollable area.
12717
+ * Redraws footer and parks cursor on the input row.
12718
+ * Does NOT try to restore to a previous cursor position — uses explicit
12719
+ * absolute positioning instead (avoids SCP/RCP alias issues on some terminals).
12720
+ */
12721
+ endContentWrite() {
12722
+ if (!this.active)
12723
+ return;
12724
+ this.renderFooterAndPositionInput();
12725
+ }
12726
+ /**
12727
+ * Clear the input row and position cursor there at column 1.
12728
+ * Used by showPrompt() before readline writes the prompt.
12729
+ */
12730
+ positionAtInput() {
12731
+ if (!this.active)
12732
+ return;
12733
+ const rows = process.stdout.rows ?? 24;
12734
+ const inputRow = rows - 2;
12735
+ process.stdout.write(`\x1B[${inputRow};1H\x1B[2K`);
12736
+ }
12737
+ /** Build the metrics line string */
12738
+ buildMetricsLine() {
12739
+ const m = this.metrics;
12740
+ const pipe = c2.white(" \u2502 ");
12741
+ const tokIn = m.promptTokens > 0 ? m.promptTokens.toLocaleString() : `~${Math.max(m.estimatedContextTokens, 0).toLocaleString()}`;
12742
+ const tokInLabel = c2.cyan("In: ") + c2.bold(tokIn);
12743
+ const tokOut = m.completionTokens > 0 ? m.completionTokens.toLocaleString() : `~${Math.ceil(m.totalTokens > 0 ? m.totalTokens - m.promptTokens : m.estimatedContextTokens * 0.3).toLocaleString()}`;
12744
+ const tokOutLabel = c2.green("Out: ") + c2.bold(tokOut);
12745
+ const ctxUsed = m.estimatedContextTokens;
12746
+ const ctxTotal = m.contextWindowSize;
12747
+ const ctxPct = ctxTotal > 0 ? Math.max(0, Math.min(100, Math.round((1 - ctxUsed / ctxTotal) * 100))) : 100;
12748
+ const ctxColor = ctxPct > 50 ? c2.green : ctxPct > 20 ? c2.yellow : c2.red;
12749
+ const ctxLabel = c2.blue("Ctx: ") + c2.bold(`${ctxUsed.toLocaleString()}/${ctxTotal.toLocaleString()}`) + ` ${ctxColor(`${ctxPct}%`)}`;
12750
+ return ` ${tokInLabel}${pipe}${tokOutLabel}${pipe}${ctxLabel}`;
12751
+ }
12752
+ // -------------------------------------------------------------------------
12753
+ // Private
12754
+ // -------------------------------------------------------------------------
12755
+ /** Set the DECSTBM scroll region to exclude the footer rows */
12756
+ applyScrollRegion() {
12757
+ const rows = process.stdout.rows ?? 24;
12758
+ const scrollEnd = Math.max(rows - FOOTER_ROWS, this.scrollRegionTop + 1);
12759
+ process.stdout.write(`\x1B[${this.scrollRegionTop};${scrollEnd}r\x1B[${scrollEnd};1H`);
12760
+ }
12761
+ /**
12762
+ * Draw the footer (separators + metrics) and leave cursor on the input row.
12763
+ * All ANSI sequences are batched into a SINGLE write to prevent interleaving.
12764
+ * Does NOT touch the input row content — readline manages that.
12765
+ */
12766
+ renderFooterAndPositionInput() {
12767
+ if (!this.active)
12768
+ return;
12769
+ const rows = process.stdout.rows ?? 24;
12770
+ const w = getTermWidth();
12771
+ const sep = c2.dim("\u2500".repeat(w));
12772
+ const inputRow = rows - 2;
12773
+ const buf = `\x1B[?7l\x1B[${rows - 4};1H\x1B[2K\x1B[${rows - 3};1H\x1B[2K${sep}\x1B[${rows - 1};1H\x1B[2K${sep}\x1B[${rows};1H\x1B[2K${this.buildMetricsLine()}\x1B[?7h\x1B[${inputRow};1H`;
12774
+ process.stdout.write(buf);
12775
+ }
12776
+ /**
12777
+ * Redraw footer while preserving the current cursor position.
12778
+ * Used by keystroke protection and metrics updates — these must not
12779
+ * move the cursor away from where readline left it.
12780
+ * Uses DEC DECSC/DECRC (\x1B7/\x1B8) for save/restore in a single write.
12781
+ */
12782
+ renderFooterPreserveCursor() {
12783
+ if (!this.active)
12784
+ return;
12785
+ const rows = process.stdout.rows ?? 24;
12786
+ const w = getTermWidth();
12787
+ const sep = c2.dim("\u2500".repeat(w));
12788
+ const buf = `\x1B7\x1B[?7l\x1B[${rows - 4};1H\x1B[2K\x1B[${rows - 3};1H\x1B[2K${sep}\x1B[${rows - 1};1H\x1B[2K${sep}\x1B[${rows};1H\x1B[2K${this.buildMetricsLine()}\x1B[?7h\x1B8`;
12789
+ process.stdout.write(buf);
12790
+ }
12791
+ /**
12792
+ * Hook into process.stdin to redraw footer after every keystroke.
12793
+ * Protects against corruption from backspace, delete, or other editing keys.
12794
+ */
12795
+ hookStdin() {
12796
+ if (this.stdinHooked)
12797
+ return;
12798
+ this.stdinHooked = true;
12799
+ process.stdin.on("data", () => {
12800
+ if (!this.active)
12801
+ return;
12802
+ setImmediate(() => this.renderFooterPreserveCursor());
12803
+ });
12804
+ }
12805
+ };
12806
+ }
12807
+ });
12808
+
12609
12809
  // packages/cli/dist/tui/interactive.js
12610
12810
  import * as readline2 from "node:readline";
12611
12811
  import { cwd } from "node:process";
@@ -12779,7 +12979,7 @@ Use task_status("${taskId}") or task_output("${taskId}") to check progress.`
12779
12979
  }
12780
12980
  };
12781
12981
  }
12782
- function startTask(task, config, repoRoot, voice, stream, taskStores, bruteForce) {
12982
+ function startTask(task, config, repoRoot, voice, stream, taskStores, bruteForce, statusBar) {
12783
12983
  const projectCtx = buildProjectContext(repoRoot, taskStores?.contextStores);
12784
12984
  const dynamicContext = formatContextForPrompt(projectCtx);
12785
12985
  const backend = new OllamaAgenticBackend(config.backendUrl.replace(/\/$/, ""), config.model);
@@ -12800,6 +13000,15 @@ function startTask(task, config, repoRoot, voice, stream, taskStores, bruteForce
12800
13000
  const editSessionId = `task-${Date.now()}`;
12801
13001
  const editHistory = createEditHistoryLogger(repoRoot, editSessionId);
12802
13002
  let lastToolCall = null;
13003
+ const contentWrite = (fn) => {
13004
+ if (statusBar?.isActive) {
13005
+ statusBar.beginContentWrite();
13006
+ fn();
13007
+ statusBar.endContentWrite();
13008
+ } else {
13009
+ fn();
13010
+ }
13011
+ };
12803
13012
  runner.onEvent((event) => {
12804
13013
  switch (event.type) {
12805
13014
  case "tool_call":
@@ -12814,35 +13023,42 @@ function startTask(task, config, repoRoot, voice, stream, taskStores, bruteForce
12814
13023
  }
12815
13024
  }
12816
13025
  lastToolCall = { name: event.toolName ?? "unknown", args: event.toolArgs ?? {} };
12817
- if (voice?.enabled) {
12818
- const desc = describeToolCall(event.toolName ?? "unknown", event.toolArgs ?? {});
12819
- renderVoiceText(desc);
12820
- voice.speak(desc);
12821
- }
12822
- renderToolCallStart(event.toolName ?? "unknown", event.toolArgs ?? {});
13026
+ contentWrite(() => {
13027
+ if (voice?.enabled) {
13028
+ const desc = describeToolCall(event.toolName ?? "unknown", event.toolArgs ?? {});
13029
+ renderVoiceText(desc);
13030
+ voice.speak(desc);
13031
+ }
13032
+ renderToolCallStart(event.toolName ?? "unknown", event.toolArgs ?? {});
13033
+ });
12823
13034
  break;
12824
13035
  case "tool_result":
12825
13036
  if (lastToolCall) {
12826
13037
  editHistory.logToolCall(lastToolCall.name, lastToolCall.args, event.success ?? false);
12827
13038
  lastToolCall = null;
12828
13039
  }
12829
- renderToolResult(event.toolName ?? "unknown", event.success ?? false, event.content ?? "");
12830
- if (voice?.enabled && !(event.success ?? true)) {
12831
- const desc = describeToolResult(event.toolName ?? "unknown", false);
12832
- if (desc) {
12833
- renderVoiceText(desc);
12834
- voice.speak(desc);
13040
+ contentWrite(() => {
13041
+ renderToolResult(event.toolName ?? "unknown", event.success ?? false, event.content ?? "");
13042
+ if (voice?.enabled && !(event.success ?? true)) {
13043
+ const desc = describeToolResult(event.toolName ?? "unknown", false);
13044
+ if (desc) {
13045
+ renderVoiceText(desc);
13046
+ voice.speak(desc);
13047
+ }
12835
13048
  }
12836
- }
13049
+ });
12837
13050
  break;
12838
13051
  case "model_response":
12839
13052
  if (config.verbose && !stream?.enabled && event.content) {
12840
- renderAssistantText(event.content);
13053
+ contentWrite(() => renderAssistantText(event.content));
12841
13054
  }
12842
13055
  break;
12843
13056
  case "stream_start":
12844
- if (stream?.enabled)
13057
+ if (stream?.enabled) {
13058
+ if (statusBar?.isActive)
13059
+ statusBar.beginContentWrite();
12845
13060
  stream.renderer.onStreamStart();
13061
+ }
12846
13062
  break;
12847
13063
  case "stream_token":
12848
13064
  if (stream?.enabled) {
@@ -12850,16 +13066,24 @@ function startTask(task, config, repoRoot, voice, stream, taskStores, bruteForce
12850
13066
  }
12851
13067
  break;
12852
13068
  case "stream_end":
12853
- if (stream?.enabled)
13069
+ if (stream?.enabled) {
12854
13070
  stream.renderer.onStreamEnd();
13071
+ if (statusBar?.isActive)
13072
+ statusBar.endContentWrite();
13073
+ }
12855
13074
  break;
12856
13075
  case "user_interrupt":
12857
13076
  break;
12858
13077
  case "compaction":
12859
- renderWarning(`Context compacted: ${event.content}`);
13078
+ contentWrite(() => renderWarning(`Context compacted: ${event.content}`));
12860
13079
  break;
12861
13080
  case "error":
12862
- renderError(event.content ?? "Unknown error");
13081
+ contentWrite(() => renderError(event.content ?? "Unknown error"));
13082
+ break;
13083
+ case "token_usage":
13084
+ if (statusBar && event.tokenUsage) {
13085
+ statusBar.updateMetrics(event.tokenUsage);
13086
+ }
12863
13087
  break;
12864
13088
  case "complete":
12865
13089
  break;
@@ -12868,18 +13092,20 @@ function startTask(task, config, repoRoot, voice, stream, taskStores, bruteForce
12868
13092
  const sessionId = `${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
12869
13093
  const promise = runner.run(task, `Working directory: ${repoRoot}`).then((result) => {
12870
13094
  const tokens = { total: result.totalTokens, estimated: result.estimatedTokens };
12871
- if (result.completed) {
12872
- renderTaskComplete(result.summary, result.turns, result.toolCalls, result.durationMs, tokens);
12873
- if (voice?.enabled && result.summary) {
12874
- const ttsText = result.summary.length > 300 ? result.summary.slice(0, 300) + "..." : result.summary;
12875
- voice.speak(`Task complete. ${ttsText}`);
12876
- }
12877
- } else {
12878
- renderTaskIncomplete(result.turns, result.toolCalls, result.durationMs, tokens);
12879
- if (voice?.enabled) {
12880
- voice.speak("Task did not complete.");
13095
+ contentWrite(() => {
13096
+ if (result.completed) {
13097
+ renderTaskComplete(result.summary, result.turns, result.toolCalls, result.durationMs, tokens);
13098
+ if (voice?.enabled && result.summary) {
13099
+ const ttsText = result.summary.length > 300 ? result.summary.slice(0, 300) + "..." : result.summary;
13100
+ voice.speak(`Task complete. ${ttsText}`);
13101
+ }
13102
+ } else {
13103
+ renderTaskIncomplete(result.turns, result.toolCalls, result.durationMs, tokens);
13104
+ if (voice?.enabled) {
13105
+ voice.speak("Task did not complete.");
13106
+ }
12881
13107
  }
12882
- }
13108
+ });
12883
13109
  try {
12884
13110
  saveSession(repoRoot, {
12885
13111
  id: sessionId,
@@ -13008,6 +13234,11 @@ async function startInteractive(config, repoPath) {
13008
13234
  carouselLines
13009
13235
  });
13010
13236
  }
13237
+ const statusBar = new StatusBar();
13238
+ if (process.stdout.isTTY) {
13239
+ const scrollTop = carouselLines > 0 ? carouselLines + 1 : 1;
13240
+ statusBar.activate(scrollTop);
13241
+ }
13011
13242
  const voiceEngine = new VoiceEngine();
13012
13243
  const streamRenderer = new StreamRenderer();
13013
13244
  if (savedSettings.voice) {
@@ -13036,10 +13267,12 @@ async function startInteractive(config, repoPath) {
13036
13267
  historySize: 100
13037
13268
  });
13038
13269
  process.stdout.on("resize", () => {
13270
+ statusBar.handleResize();
13039
13271
  if (!carouselRetired) {
13040
13272
  const termRows = process.stdout.rows ?? 24;
13041
13273
  const scrollStart = carousel.reservedRows + 1;
13042
- process.stdout.write(`\x1B[${scrollStart};${termRows}r`);
13274
+ const scrollEnd = Math.max(termRows - statusBar.reservedRows, scrollStart + 1);
13275
+ process.stdout.write(`\x1B[${scrollStart};${scrollEnd}r`);
13043
13276
  process.stdout.write(`\x1B[${scrollStart};1H\x1B[J`);
13044
13277
  renderRichHeader({
13045
13278
  model: currentConfig.model,
@@ -13052,8 +13285,20 @@ async function startInteractive(config, repoPath) {
13052
13285
  });
13053
13286
  function showPrompt() {
13054
13287
  rl.setPrompt(activeTask ? activePrompt : idlePrompt);
13288
+ if (statusBar.isActive) {
13289
+ statusBar.positionAtInput();
13290
+ }
13055
13291
  rl.prompt();
13056
13292
  }
13293
+ function writeContent(fn) {
13294
+ if (statusBar.isActive) {
13295
+ statusBar.beginContentWrite();
13296
+ fn();
13297
+ statusBar.endContentWrite();
13298
+ } else {
13299
+ fn();
13300
+ }
13301
+ }
13057
13302
  const commandCtx = {
13058
13303
  get config() {
13059
13304
  return currentConfig;
@@ -13076,8 +13321,11 @@ async function startInteractive(config, repoPath) {
13076
13321
  clearScreen() {
13077
13322
  process.stdout.write("\x1B[2J\x1B[H");
13078
13323
  renderCompactHeader(currentConfig.model);
13324
+ if (statusBar.isActive)
13325
+ statusBar.handleResize();
13079
13326
  },
13080
13327
  exit() {
13328
+ statusBar.deactivate();
13081
13329
  if (carousel.isRunning)
13082
13330
  carousel.stop();
13083
13331
  voiceEngine.dispose();
@@ -13130,23 +13378,23 @@ async function startInteractive(config, repoPath) {
13130
13378
  },
13131
13379
  dreamStart(mode) {
13132
13380
  if (activeTask) {
13133
- renderWarning("Cannot dream while a task is running.");
13381
+ writeContent(() => renderWarning("Cannot dream while a task is running."));
13134
13382
  return;
13135
13383
  }
13136
13384
  dreamEngine = new DreamEngine(currentConfig, repoRoot);
13137
- renderDreamStart(mode);
13385
+ writeContent(() => renderDreamStart(mode));
13138
13386
  dreamEngine.start(mode, (event) => {
13139
13387
  if (event.type === "tool_call") {
13140
- renderToolCallStart(event.toolName ?? "unknown", event.toolArgs ?? {});
13388
+ writeContent(() => renderToolCallStart(event.toolName ?? "unknown", event.toolArgs ?? {}));
13141
13389
  } else if (event.type === "tool_result") {
13142
- renderToolResult(event.toolName ?? "unknown", event.success ?? false, event.content ?? "");
13390
+ writeContent(() => renderToolResult(event.toolName ?? "unknown", event.success ?? false, event.content ?? ""));
13143
13391
  }
13144
13392
  }).then((state) => {
13145
- renderDreamEnd(state);
13393
+ writeContent(() => renderDreamEnd(state));
13146
13394
  dreamEngine = null;
13147
13395
  showPrompt();
13148
13396
  }).catch((err) => {
13149
- renderError(`Dream error: ${err instanceof Error ? err.message : String(err)}`);
13397
+ writeContent(() => renderError(`Dream error: ${err instanceof Error ? err.message : String(err)}`));
13150
13398
  dreamEngine = null;
13151
13399
  showPrompt();
13152
13400
  });
@@ -13173,7 +13421,7 @@ async function startInteractive(config, repoPath) {
13173
13421
  `Tool calls completed before update: ${pendingTask.toolCallCount}`,
13174
13422
  "Continue where you left off. Do not repeat work already done."
13175
13423
  ].filter(Boolean).join("\n\n");
13176
- renderInfo(`Resuming task: ${pendingTask.prompt.slice(0, 100)}${pendingTask.prompt.length > 100 ? "..." : ""}`);
13424
+ writeContent(() => renderInfo(`Resuming task: ${pendingTask.prompt.slice(0, 100)}${pendingTask.prompt.length > 100 ? "..." : ""}`));
13177
13425
  rl.emit("line", resumeContext);
13178
13426
  }, 100);
13179
13427
  }
@@ -13213,14 +13461,14 @@ ${c2.dim("Goodbye!")}
13213
13461
  const ext = extname5(cleanPath).toLowerCase();
13214
13462
  const mime = ext === ".png" ? "image/png" : ext === ".gif" ? "image/gif" : ext === ".webp" ? "image/webp" : "image/jpeg";
13215
13463
  activeTask.runner.injectImage(base64, mime, `User shared image: ${cleanPath}`);
13216
- renderUserInterrupt(`[Image: ${cleanPath}]`);
13464
+ writeContent(() => renderUserInterrupt(`[Image: ${cleanPath}]`));
13217
13465
  } catch {
13218
13466
  activeTask.runner.injectUserMessage(input);
13219
- renderUserInterrupt(input);
13467
+ writeContent(() => renderUserInterrupt(input));
13220
13468
  }
13221
13469
  } else {
13222
13470
  activeTask.runner.injectUserMessage(input);
13223
- renderUserInterrupt(input);
13471
+ writeContent(() => renderUserInterrupt(input));
13224
13472
  }
13225
13473
  showPrompt();
13226
13474
  return;
@@ -13236,8 +13484,10 @@ ${c2.dim("Goodbye!")}
13236
13484
  if (!carouselRetired && carousel.isRunning) {
13237
13485
  carousel.stop();
13238
13486
  carouselRetired = true;
13487
+ if (statusBar.isActive)
13488
+ statusBar.handleResize();
13239
13489
  }
13240
- renderUserMessage(isImage ? `[Image: ${cleanPath}]` : fullInput);
13490
+ writeContent(() => renderUserMessage(isImage ? `[Image: ${cleanPath}]` : fullInput));
13241
13491
  lastSubmittedPrompt = fullInput;
13242
13492
  try {
13243
13493
  const task = startTask(fullInput, currentConfig, repoRoot, voiceEngine, {
@@ -13248,13 +13498,13 @@ ${c2.dim("Goodbye!")}
13248
13498
  taskMemoryStore: taskMemoryStore ?? void 0,
13249
13499
  failureStore: failureStore ?? void 0,
13250
13500
  toolPatternStore: toolPatternStore ?? void 0
13251
- }, bruteForceEnabled);
13501
+ }, bruteForceEnabled, statusBar);
13252
13502
  activeTask = task;
13253
13503
  showPrompt();
13254
13504
  await task.promise;
13255
13505
  } catch (err) {
13256
13506
  const errMsg = err instanceof Error ? err.message : String(err);
13257
- renderError(errMsg);
13507
+ writeContent(() => renderError(errMsg));
13258
13508
  if (failureStore) {
13259
13509
  try {
13260
13510
  const { createHash: createHash2 } = await import("node:crypto");
@@ -13286,7 +13536,7 @@ ${c2.dim("Goodbye!")}
13286
13536
  try {
13287
13537
  const updateInfo = await checkForUpdate(version);
13288
13538
  if (updateInfo) {
13289
- renderInfo(`Update available: v${version} \u2192 v${updateInfo.latestVersion}. Installing...`);
13539
+ writeContent(() => renderInfo(`Update available: v${version} \u2192 v${updateInfo.latestVersion}. Installing...`));
13290
13540
  const ok = performSilentUpdate();
13291
13541
  if (ok) {
13292
13542
  if (lastSubmittedPrompt) {
@@ -13302,16 +13552,17 @@ ${c2.dim("Goodbye!")}
13302
13552
  } catch {
13303
13553
  }
13304
13554
  }
13305
- renderInfo(`Updated to v${updateInfo.latestVersion}. Reloading...
13306
- `);
13555
+ writeContent(() => renderInfo(`Updated to v${updateInfo.latestVersion}. Reloading...
13556
+ `));
13307
13557
  process.env.__OA_RESUMED = "1";
13558
+ statusBar.deactivate();
13308
13559
  if (carousel.isRunning)
13309
13560
  carousel.stop();
13310
13561
  voiceEngine.dispose();
13311
13562
  rl.close();
13312
13563
  restartProcess();
13313
13564
  } else {
13314
- renderWarning("Auto-update failed. Use /update to retry manually.");
13565
+ writeContent(() => renderWarning("Auto-update failed. Use /update to retry manually."));
13315
13566
  }
13316
13567
  }
13317
13568
  } catch {
@@ -13320,6 +13571,7 @@ ${c2.dim("Goodbye!")}
13320
13571
  showPrompt();
13321
13572
  });
13322
13573
  rl.on("close", () => {
13574
+ statusBar.deactivate();
13323
13575
  process.stdout.write(`
13324
13576
  ${c2.dim("Goodbye!")}
13325
13577
 
@@ -13329,11 +13581,11 @@ ${c2.dim("Goodbye!")}
13329
13581
  rl.on("SIGINT", () => {
13330
13582
  if (activeTask) {
13331
13583
  activeTask.runner.abort();
13332
- renderTaskAborted();
13584
+ writeContent(() => renderTaskAborted());
13333
13585
  } else {
13334
- process.stdout.write(`
13586
+ writeContent(() => process.stdout.write(`
13335
13587
  ${c2.dim("(Use /quit to exit)")}
13336
- `);
13588
+ `));
13337
13589
  }
13338
13590
  showPrompt();
13339
13591
  });
@@ -13388,6 +13640,7 @@ var init_interactive = __esm({
13388
13640
  init_stream_renderer();
13389
13641
  init_edit_history();
13390
13642
  init_dream_engine();
13643
+ init_status_bar();
13391
13644
  taskManager = new BackgroundTaskManager();
13392
13645
  }
13393
13646
  });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "open-agents-ai",
3
- "version": "0.12.5",
3
+ "version": "0.12.7",
4
4
  "description": "AI coding agent powered by open-source models (Ollama/vLLM) — interactive TUI with agentic tool-calling loop",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",