@ouro.bot/cli 0.1.0-alpha.505 → 0.1.0-alpha.507

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/changelog.json CHANGED
@@ -1,6 +1,21 @@
1
1
  {
2
2
  "_note": "This changelog is maintained as part of the PR/version-bump workflow. Agent-curated, not auto-generated. Agents read this file directly via read_file to understand what changed between versions.",
3
3
  "versions": [
4
+ {
5
+ "version": "0.1.0-alpha.507",
6
+ "changes": [
7
+ "Drop the second of two consecutive assistant messages with byte-identical content in `repairSessionMessages`. The existing back-to-back-assistant repair concatenated content with `\\n\\n`, which produced visible duplicate text on the surface when the second message was a retry/double-persist artifact (a common shape when a turn is interrupted mid-save and re-emitted).",
8
+ "Behavior change is narrow: trim-equality only, both sides must have non-empty content, and the second assistant must not have its own tool_calls. Different content still concatenates (legitimate continuation). Empty-content assistants still concatenate (the existing test case for null+undefined is preserved). Emits `mind.session_duplicate_assistant_dropped` (info) with the count so #622 (nerves-review) can show how often it fires in real traffic. 2 new tests cover the dedup path and the no-false-positive case (different content concatenates as before)."
9
+ ]
10
+ },
11
+ {
12
+ "version": "0.1.0-alpha.506",
13
+ "changes": [
14
+ "Detect duplicate tool_call_id across assistant messages in `validateSessionMessages`. MiniMax-M2.7 emits canonical tool_call ids of the form `call_function_<hash>_<n>` and reuses the same id across turns when the same function gets called — which causes provider rejections on replay because tool_call_id is supposed to be unique per request. The session sanitize pass already had position-aware orphan detection (#613) and inline-reasoning strip (#612); this adds the third member of the family — collision detection.",
15
+ "New exported `detectDuplicateToolCallIds(messages)` returns `{ id, indices }[]` for each tool_call_id that appears in multiple assistant messages. Same-message duplicates (one assistant calling the same id twice) are not flagged — those are a legitimate parallel-call shape. `validateSessionMessages` now folds collisions into its violations list with a message that calls out MiniMax specifically so operators reading nerves know what they're looking at.",
16
+ "Detection only — no rewriting yet, since rewriting tool_call_ids and the matching tool_results requires careful pairing logic that risks regression. The collision is visible to operators via the `mind.session_invariant_violation` nerves event the sanitize pass already emits when violations are present, and the existing `nerves-review` CLI from #622 makes it filterable. 3 new tests cover collision detection, single-message parallel-call shape (no false positive), and the all-distinct happy path."
17
+ ]
18
+ },
4
19
  {
5
20
  "version": "0.1.0-alpha.505",
6
21
  "changes": [
@@ -35,6 +35,7 @@ var __importStar = (this && this.__importStar) || (function () {
35
35
  Object.defineProperty(exports, "__esModule", { value: true });
36
36
  exports.normalizeContinuityState = normalizeContinuityState;
37
37
  exports.validateSessionMessages = validateSessionMessages;
38
+ exports.detectDuplicateToolCallIds = detectDuplicateToolCallIds;
38
39
  exports.repairSessionMessages = repairSessionMessages;
39
40
  exports.migrateToolNames = migrateToolNames;
40
41
  exports.sanitizeProviderMessages = sanitizeProviderMessages;
@@ -305,26 +306,86 @@ function validateSessionMessages(messages) {
305
306
  sawToolResultSincePrevAssistant = false;
306
307
  prevNonToolRole = msg.role;
307
308
  }
309
+ for (const collision of detectDuplicateToolCallIds(messages)) {
310
+ violations.push(`duplicate tool_call_id '${collision.id}' across assistant messages at indices ${collision.indices.join(", ")} — provider may reject (MiniMax canonicalizes call_function_<hash>_<n> across turns)`);
311
+ }
308
312
  return violations;
309
313
  }
314
+ /**
315
+ * Detect tool_call_ids that appear in more than one assistant message
316
+ * within the conversation. MiniMax-M2.7 in particular emits canonical
317
+ * ids of the form `call_function_<hash>_<n>` and reuses the same id
318
+ * across turns when the same function is called — which causes provider
319
+ * rejections on replay because tool_call_id is supposed to be unique
320
+ * per request. We don't (yet) rewrite these here; this function exists
321
+ * so the sanitize pipeline can surface the collision through nerves
322
+ * (`mind.session_invariant_violation`) and operators can decide.
323
+ *
324
+ * Same-message duplicates (one assistant calling the same id twice)
325
+ * are not collisions — they're a legitimate parallel call shape and
326
+ * would be handled by the assistant's own emit logic. We only flag
327
+ * cross-message reuse.
328
+ */
329
+ function detectDuplicateToolCallIds(messages) {
330
+ const idsByFirstIndex = new Map();
331
+ for (let i = 0; i < messages.length; i++) {
332
+ const msg = normalizeMessage(messages[i]);
333
+ if (msg.role !== "assistant")
334
+ continue;
335
+ const seenInThisMessage = new Set();
336
+ for (const call of msg.toolCalls) {
337
+ if (!call.id || seenInThisMessage.has(call.id))
338
+ continue;
339
+ seenInThisMessage.add(call.id);
340
+ const indices = idsByFirstIndex.get(call.id) ?? [];
341
+ indices.push(i);
342
+ idsByFirstIndex.set(call.id, indices);
343
+ }
344
+ }
345
+ const collisions = [];
346
+ for (const [id, indices] of idsByFirstIndex) {
347
+ if (indices.length > 1)
348
+ collisions.push({ id, indices });
349
+ }
350
+ return collisions;
351
+ }
310
352
  function repairSessionMessages(messages) {
311
353
  const normalized = messages.map(normalizeMessage);
312
354
  const violations = validateSessionMessages(messages);
313
355
  if (violations.length === 0)
314
356
  return normalized.map(toProviderMessage);
315
357
  const result = [];
358
+ let duplicateAssistantsDropped = 0;
316
359
  for (const msg of normalized) {
317
360
  if (msg.role === "assistant" && result.length > 0) {
318
361
  const prev = result[result.length - 1];
319
362
  if (prev.role === "assistant" && prev.toolCalls.length === 0) {
320
363
  const prevContent = contentText(prev.content);
321
364
  const curContent = contentText(msg.content);
365
+ // Drop the second of two consecutive assistants when the content is
366
+ // byte-identical (after trim) — that's a retry/double-persist artifact,
367
+ // not legitimate continuation. Concatenating them produced visible
368
+ // duplicate text in surfaces. Empty strings still concatenate (could
369
+ // be "" + real content).
370
+ if (prevContent.trim().length > 0 && prevContent.trim() === curContent.trim() && msg.toolCalls.length === 0) {
371
+ duplicateAssistantsDropped += 1;
372
+ continue;
373
+ }
322
374
  prev.content = `${prevContent}\n\n${curContent}`;
323
375
  continue;
324
376
  }
325
377
  }
326
378
  result.push(msg);
327
379
  }
380
+ if (duplicateAssistantsDropped > 0) {
381
+ (0, runtime_1.emitNervesEvent)({
382
+ level: "info",
383
+ event: "mind.session_duplicate_assistant_dropped",
384
+ component: "mind",
385
+ message: "dropped consecutive assistant messages with identical content (retry/double-persist artifact)",
386
+ meta: { count: duplicateAssistantsDropped },
387
+ });
388
+ }
328
389
  (0, runtime_1.emitNervesEvent)({
329
390
  level: "info",
330
391
  event: "mind.session_invariant_repair",
@@ -33,7 +33,7 @@ var __importStar = (this && this.__importStar) || (function () {
33
33
  };
34
34
  })();
35
35
  Object.defineProperty(exports, "__esModule", { value: true });
36
- exports.validateSessionMessages = exports.repairSessionMessages = exports.migrateToolNames = void 0;
36
+ exports.validateSessionMessages = exports.repairSessionMessages = exports.migrateToolNames = exports.detectDuplicateToolCallIds = void 0;
37
37
  exports.trimMessages = trimMessages;
38
38
  exports.saveSession = saveSession;
39
39
  exports.appendSyntheticAssistantMessage = appendSyntheticAssistantMessage;
@@ -50,6 +50,7 @@ const fs = __importStar(require("fs"));
50
50
  const path = __importStar(require("path"));
51
51
  const token_estimate_1 = require("./token-estimate");
52
52
  var session_events_2 = require("../heart/session-events");
53
+ Object.defineProperty(exports, "detectDuplicateToolCallIds", { enumerable: true, get: function () { return session_events_2.detectDuplicateToolCallIds; } });
53
54
  Object.defineProperty(exports, "migrateToolNames", { enumerable: true, get: function () { return session_events_2.migrateToolNames; } });
54
55
  Object.defineProperty(exports, "repairSessionMessages", { enumerable: true, get: function () { return session_events_2.repairSessionMessages; } });
55
56
  Object.defineProperty(exports, "validateSessionMessages", { enumerable: true, get: function () { return session_events_2.validateSessionMessages; } });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ouro.bot/cli",
3
- "version": "0.1.0-alpha.505",
3
+ "version": "0.1.0-alpha.507",
4
4
  "main": "dist/heart/daemon/ouro-entry.js",
5
5
  "bin": {
6
6
  "cli": "dist/heart/daemon/ouro-bot-entry.js",
@@ -37,8 +37,7 @@
37
37
  "lint": "eslint src/",
38
38
  "release:preflight": "node scripts/release-preflight.cjs",
39
39
  "release:smoke": "node scripts/release-smoke.cjs",
40
- "audit:nerves": "npm run build && node dist/nerves/coverage/cli-main.js",
41
- "session:stats": "npm run build && node dist/heart/session-stats-cli-main.js"
40
+ "audit:nerves": "npm run build && node dist/nerves/coverage/cli-main.js"
42
41
  },
43
42
  "dependencies": {
44
43
  "@anthropic-ai/sdk": "^0.78.0",