open-agents-ai 0.187.281 → 0.187.282

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +107 -2
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -328288,6 +328288,23 @@ function computeSignalFromText(text, confidence) {
328288
328288
  function truncateForLog(s2, n2) {
328289
328289
  return s2.length <= n2 ? s2 : s2.slice(0, n2 - 1) + "…";
328290
328290
  }
328291
+ function extractToolJson(text) {
328292
+ const lines = text.split(/\r?\n/);
328293
+ for (const line of lines) {
328294
+ const t2 = line.trim();
328295
+ if (!t2.startsWith("{") || !t2.endsWith("}")) continue;
328296
+ try {
328297
+ const obj = JSON.parse(t2);
328298
+ if (typeof obj.tool === "string") {
328299
+ const name10 = obj.tool;
328300
+ const args = obj.args && typeof obj.args === "object" ? obj.args : {};
328301
+ return { name: name10, args };
328302
+ }
328303
+ } catch {
328304
+ }
328305
+ }
328306
+ return null;
328307
+ }
328291
328308
  var VAD_SILENCE_MS, MAX_SEGMENT_MS, MAX_CONTEXT_TURNS, SYSTEM_PROMPT2, MIN_SIGNAL_SCORE, NOISE_ONLY_RE, VoiceChatSession;
328292
328309
  var init_voicechat = __esm({
328293
328310
  "packages/cli/src/tui/voicechat.ts"() {
@@ -328295,7 +328312,7 @@ var init_voicechat = __esm({
328295
328312
  VAD_SILENCE_MS = 2e3;
328296
328313
  MAX_SEGMENT_MS = 6500;
328297
328314
  MAX_CONTEXT_TURNS = 20;
328298
- SYSTEM_PROMPT2 = `You are a voice assistant having a live spoken conversation. Keep responses extremely brief — 1-2 sentences max. You're speaking aloud, not writing. Be conversational, direct, and helpful. Don't use markdown, bullet points, or formatting — just natural speech. If you don't know something, say so briefly. Do not over-think respond quickly and concisely.`;
328315
+ SYSTEM_PROMPT2 = `You are a voice assistant having a live spoken conversation. Keep responses extremely brief — 1-2 sentences max. You're speaking aloud, not writing. Be conversational, direct, and helpful. Don't use markdown or formatting — just natural speech. Never invent environment facts (like cwd, OS, specs). If you need a precise fact from the main agent, output on a single line EXACTLY one JSON object with fields {"tool": string, "args": object} and nothing else; then wait for the tool result before answering. Prefer tools for factual queries; otherwise, answer directly.`;
328299
328316
  MIN_SIGNAL_SCORE = 0.4;
328300
328317
  NOISE_ONLY_RE = /^(?:[.·…\s,;:!?\-–—_()\[\]{}"'`]+|(?:uh|um|erm|hmm|mm+|uhh+|umm+)[\s.!?]*)+$/i;
328301
328318
  VoiceChatSession = class extends EventEmitter10 {
@@ -328307,6 +328324,7 @@ var init_voicechat = __esm({
328307
328324
  runner;
328308
328325
  verbose = false;
328309
328326
  debugSnr = false;
328327
+ toolRelay = null;
328310
328328
  // State machine
328311
328329
  _state = "IDLE";
328312
328330
  active = false;
@@ -328341,6 +328359,7 @@ var init_voicechat = __esm({
328341
328359
  this.runner = opts.runner ?? null;
328342
328360
  this.verbose = Boolean(opts.verbose);
328343
328361
  this.debugSnr = Boolean(opts.debugSnr);
328362
+ this.toolRelay = opts.toolRelay ?? null;
328344
328363
  this.onStatus = opts.onStatus ?? (() => {
328345
328364
  });
328346
328365
  this.onUserSpeech = opts.onUserSpeech ?? (() => {
@@ -328545,7 +328564,30 @@ var init_voicechat = __esm({
328545
328564
  if (this.verbose) this.onStatus("Thinking...");
328546
328565
  this.abortController = new AbortController();
328547
328566
  try {
328548
- const response = await this.streamOllamaInference(this.abortController.signal);
328567
+ if (this.toolRelay?.contextSnapshot) {
328568
+ try {
328569
+ const snap = await Promise.resolve(this.toolRelay.contextSnapshot());
328570
+ if (snap && snap.trim()) {
328571
+ this.context.push({ role: "system", content: `Context snapshot (read-only):
328572
+ ${snap.trim()}` });
328573
+ }
328574
+ } catch {
328575
+ }
328576
+ }
328577
+ let response = await this.streamOllamaInference(this.abortController.signal);
328578
+ const toolReq = extractToolJson(response);
328579
+ if (toolReq && this.toolRelay) {
328580
+ const { name: name10, args } = toolReq;
328581
+ let toolOutput = "";
328582
+ try {
328583
+ toolOutput = await this.toolRelay.call(name10, args);
328584
+ } catch (e2) {
328585
+ toolOutput = `Tool ${name10} failed: ${e2 instanceof Error ? e2.message : String(e2)}`;
328586
+ }
328587
+ this.context.push({ role: "system", content: `Tool ${name10} result (authoritative):
328588
+ ${toolOutput}` });
328589
+ response = await this.streamOllamaInference(this.abortController.signal);
328590
+ }
328549
328591
  if (!this.active) return;
328550
328592
  if (response.trim()) {
328551
328593
  this.context.push({ role: "assistant", content: response.trim() });
@@ -333129,6 +333171,69 @@ Respond concisely and safely. Remember: you are talking to the general public.`;
333129
333171
  model: currentConfig.model,
333130
333172
  apiKey: currentConfig.apiKey,
333131
333173
  runner: summaryRunner,
333174
+ toolRelay: {
333175
+ async call(name10, args) {
333176
+ try {
333177
+ if (name10 === "voice_env") {
333178
+ const os8 = __require("node:os");
333179
+ const p2 = __require("node:process");
333180
+ const info = {
333181
+ cwd: repoRoot,
333182
+ platform: os8.platform(),
333183
+ arch: os8.arch(),
333184
+ cpu: (os8.cpus() || [])[0]?.model || "unknown",
333185
+ memGB: Math.round(os8.totalmem() / (1024 * 1024 * 1024)),
333186
+ node: p2.version,
333187
+ model: currentConfig.model
333188
+ };
333189
+ return JSON.stringify(info, null, 2);
333190
+ }
333191
+ if (name10 === "voice_status") {
333192
+ const status = activeTask ? {
333193
+ active: true,
333194
+ toolCalls: activeTask.toolCallCount,
333195
+ filesTouched: Array.from(activeTask.filesTouched).slice(-20)
333196
+ } : { active: false };
333197
+ return JSON.stringify(status, null, 2);
333198
+ }
333199
+ if (name10 === "voice_list_files") {
333200
+ const baseDir = String(args?.dir ?? ".");
333201
+ const { readdirSync: readdirSync31, statSync: statSync25 } = __require("node:fs");
333202
+ const { join: join106, resolve: resolve40 } = __require("node:path");
333203
+ const base3 = baseDir.startsWith("/") ? baseDir : resolve40(join106(repoRoot, baseDir));
333204
+ const items = readdirSync31(base3).slice(0, 200).map((f2) => {
333205
+ const s2 = statSync25(join106(base3, f2));
333206
+ return { name: f2, dir: s2.isDirectory(), size: s2.size };
333207
+ });
333208
+ return JSON.stringify({ dir: base3, items }, null, 2);
333209
+ }
333210
+ if (name10 === "voice_read_file") {
333211
+ const { readFileSync: readFileSync68 } = __require("node:fs");
333212
+ const { join: join106, resolve: resolve40 } = __require("node:path");
333213
+ const rel = String(args?.path || "");
333214
+ const max = Math.max(0, Math.min(8192, Number(args?.max) || 2048));
333215
+ const full = rel.startsWith("/") ? rel : resolve40(join106(repoRoot, rel));
333216
+ const buf = readFileSync68(full);
333217
+ const txt = buf.toString("utf8");
333218
+ return txt.length > max ? txt.slice(0, max) + `
333219
+ ... [truncated ${txt.length - max} chars]` : txt;
333220
+ }
333221
+ return `Unknown tool: ${name10}`;
333222
+ } catch (e2) {
333223
+ return `Error: ${e2?.message || String(e2)}`;
333224
+ }
333225
+ },
333226
+ contextSnapshot() {
333227
+ const parts = [];
333228
+ parts.push(`cwd: ${repoRoot}`);
333229
+ if (activeTask) {
333230
+ parts.push(`active: yes, toolCalls: ${activeTask.toolCallCount}, filesTouched: ${activeTask.filesTouched.size}`);
333231
+ } else {
333232
+ parts.push("active: no");
333233
+ }
333234
+ return parts.join("\n");
333235
+ }
333236
+ },
333132
333237
  verbose: false,
333133
333238
  debugSnr: false,
333134
333239
  onStatus(msg) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "open-agents-ai",
3
- "version": "0.187.281",
3
+ "version": "0.187.282",
4
4
  "description": "AI coding agent powered by open-source models (Ollama/vLLM) — interactive TUI with agentic tool-calling loop",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",