@yemi33/minions 0.1.1743 → 0.1.1745

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,10 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.1.1745 (2026-05-06)
4
+
5
+ ### Features
6
+ - fix doc-chat failure bugs from Ripley investigation (#2103)
7
+
3
8
  ## 0.1.1743 (2026-05-06)
4
9
 
5
10
  ### Fixes
@@ -499,7 +499,7 @@ async function _processQaMessage(message, selection, opts) {
499
499
  selection: selection,
500
500
  filePath: capturedFilePath || null,
501
501
  model: window._lastStatus?.autoMode?.ccModel || undefined,
502
- contentHash: capturedDocContext.content ? capturedDocContext.content.length + ':' + capturedDocContext.content.charCodeAt(0) + ':' + capturedDocContext.content.charCodeAt(capturedDocContext.content.length - 1) : undefined,
502
+ contentHash: capturedDocContext.content ? (function(s) { const m = Math.floor(s.length / 2); return s.length + ':' + s.charCodeAt(0) + ':' + s.charCodeAt(m) + ':' + s.charCodeAt(s.length - 1); })(capturedDocContext.content) : undefined,
503
503
  }),
504
504
  });
505
505
  let sessionDocContext = { ...capturedDocContext };
@@ -521,6 +521,12 @@ async function _processQaMessage(message, selection, opts) {
521
521
  async function _qaHandleStreamEvent(evt) {
522
522
  if (!evt || !evt.type) return;
523
523
  if (evt.type === 'heartbeat') return;
524
+ if (evt.type === 'progress') {
525
+ // Backend is retrying (attempt 2 or 3) — reset stall watchdog so the next
526
+ // attempt gets its own full stall window instead of sharing the first attempt's.
527
+ _resetQaStreamWatchdog();
528
+ return;
529
+ }
524
530
  if (evt.type === 'chunk') {
525
531
  _resetQaStreamWatchdog();
526
532
  streamedText = evt.text || '';
package/dashboard.js CHANGED
@@ -2377,7 +2377,7 @@ async function ccCall(message, { store = 'cc', sessionKey, extraContext, label =
2377
2377
  return result;
2378
2378
  }
2379
2379
 
2380
- async function ccCallStreaming(message, { store = 'cc', sessionKey, extraContext, label = 'command-center', timeout = CC_CALL_TIMEOUT_MS, maxTurns, allowedTools = 'Bash,Read,Write,Edit,Glob,Grep,WebFetch,WebSearch', skipStatePreamble = false, model, onAbortReady, onChunk, onToolUse, systemPrompt = CC_STATIC_SYSTEM_PROMPT } = {}) {
2380
+ async function ccCallStreaming(message, { store = 'cc', sessionKey, extraContext, label = 'command-center', timeout = CC_CALL_TIMEOUT_MS, maxTurns, allowedTools = 'Bash,Read,Write,Edit,Glob,Grep,WebFetch,WebSearch', skipStatePreamble = false, model, onAbortReady, onChunk, onToolUse, onRetry, systemPrompt = CC_STATIC_SYSTEM_PROMPT } = {}) {
2381
2381
  if (!maxTurns) maxTurns = CONFIG.engine?.ccMaxTurns || shared.ENGINE_DEFAULTS.ccMaxTurns;
2382
2382
  if (!model) model = CONFIG.engine?.ccModel || shared.ENGINE_DEFAULTS.ccModel;
2383
2383
  const ccEffort = CONFIG.engine?.ccEffort || shared.ENGINE_DEFAULTS.ccEffort;
@@ -2430,6 +2430,7 @@ async function ccCallStreaming(message, { store = 'cc', sessionKey, extraContext
2430
2430
  }
2431
2431
  }
2432
2432
 
2433
+ if (onRetry) onRetry(2);
2433
2434
  const freshPrompt = buildPrompt();
2434
2435
  const p2 = llm.callLLMStreaming(freshPrompt, systemPrompt, {
2435
2436
  timeout, label, model, maxTurns, allowedTools, effort: ccEffort, direct: true,
@@ -2450,6 +2451,7 @@ async function ccCallStreaming(message, { store = 'cc', sessionKey, extraContext
2450
2451
  if (maxTurns <= 1) return result;
2451
2452
  console.log(`[${label}] Fresh call also failed (code=${result.code}, empty=${!result.text}), retrying once more...`);
2452
2453
  await new Promise(r => setTimeout(r, 2000));
2454
+ if (onRetry) onRetry(3);
2453
2455
  const p3 = llm.callLLMStreaming(freshPrompt, systemPrompt, {
2454
2456
  timeout, label, model, maxTurns, allowedTools, effort: ccEffort, direct: true,
2455
2457
  engineConfig: CONFIG.engine,
@@ -2467,10 +2469,13 @@ async function ccCallStreaming(message, { store = 'cc', sessionKey, extraContext
2467
2469
  return result;
2468
2470
  }
2469
2471
 
2470
- // Lightweight content fingerprint — same algorithm used browser-side (no crypto needed)
2472
+ // Lightweight content fingerprint — same algorithm used browser-side (no crypto needed).
2473
+ // Uses length + first + middle + last char codes. Collisions on same-length strings with
2474
+ // identical first/middle/last chars are rare enough for a staleness check (not a security hash).
2471
2475
  function contentFingerprint(str) {
2472
2476
  if (!str) return '';
2473
- return str.length + ':' + str.charCodeAt(0) + ':' + str.charCodeAt(str.length - 1);
2477
+ const mid = Math.floor(str.length / 2);
2478
+ return str.length + ':' + str.charCodeAt(0) + ':' + str.charCodeAt(mid) + ':' + str.charCodeAt(str.length - 1);
2474
2479
  }
2475
2480
 
2476
2481
  function _parseDocChatResultText(text, { allowActions = false } = {}) {
@@ -2524,6 +2529,17 @@ function _formatDocChatContext({ document, title, filePath, selection, canEdit,
2524
2529
  return context;
2525
2530
  }
2526
2531
 
2532
+ // Map errorClass codes from the runtime adapter to actionable user-facing messages.
2533
+ // sessionPreserved=true means ccCall preserved the session — user can retry immediately.
2534
+ function _docChatErrorMessage(errorClass, sessionPreserved = false) {
2535
+ if (errorClass === 'auth-failure') return 'Claude authentication failed — run `claude auth` or check your API key, then try again.';
2536
+ if (errorClass === 'context-limit') return 'Session context is too long. Click "Clear" to start a fresh conversation.';
2537
+ if (errorClass === 'budget-exceeded') return 'API budget exceeded — check your Claude account spending limit.';
2538
+ if (errorClass === 'crash') return 'Claude runtime crashed unexpectedly. Try again.';
2539
+ if (sessionPreserved) return 'Temporary connection issue — your conversation is intact, send your message again.';
2540
+ return 'Failed to process request. Try again.';
2541
+ }
2542
+
2527
2543
  // Build the doc-chat extraContext for a single ccCall pass — refreshed on retry
2528
2544
  // so a fresh-session retry includes the full document instead of relying on the
2529
2545
  // dead session's prior turn for context.
@@ -2555,14 +2571,14 @@ async function _retryDocChatAfterResumeFailure({ result, initialPass, freshSessi
2555
2571
  }
2556
2572
 
2557
2573
  // Build the {error} envelope returned to the dashboard when doc-chat ultimately
2558
- // fails. Keeps the polite user-facing message but exposes the runtime's real
2559
- // stderr / exit code / errorClass so the UI can render the cause and so future
2560
- // failures are debuggable from logs.
2561
- function _docChatFailureResponse(label, filePath, result) {
2574
+ // fails. Surfaces an actionable user-facing message (via _docChatErrorMessage)
2575
+ // plus the runtime's real stderr / exit code / errorClass so the UI can render
2576
+ // the cause and so future failures are debuggable from logs.
2577
+ function _docChatFailureResponse(label, filePath, result, sessionPreserved = false) {
2562
2578
  const stderrTail = (result.stderr || '').slice(-2048);
2563
- console.error(`[${label}] Failed: code=${result.code}, empty=${!result.text}, filePath=${filePath}, errorClass=${result.errorClass || 'null'}, stderr=${stderrTail.slice(0, 200)}`);
2579
+ console.error(`[${label}] Failed: code=${result.code}, errorClass=${result.errorClass || 'null'}, sessionPreserved=${sessionPreserved}, empty=${!result.text}, filePath=${filePath}, stderr=${stderrTail.slice(0, 200)}`);
2564
2580
  return {
2565
- answer: 'Failed to process request. Try again.',
2581
+ answer: _docChatErrorMessage(result.errorClass, sessionPreserved),
2566
2582
  content: null,
2567
2583
  actions: [],
2568
2584
  error: {
@@ -2574,6 +2590,7 @@ function _docChatFailureResponse(label, filePath, result) {
2574
2590
  };
2575
2591
  }
2576
2592
 
2593
+
2577
2594
  // Doc-specific wrapper — adds document context, parses ---DOCUMENT---
2578
2595
  async function ccDocCall({ message, document, title, filePath, selection, canEdit, isJson, model, freshSession, onAbortReady }) {
2579
2596
  const sessionKey = filePath || title;
@@ -2628,13 +2645,14 @@ async function ccDocCall({ message, document, title, filePath, selection, canEdi
2628
2645
  }
2629
2646
 
2630
2647
  if (result.code !== 0 || !result.text) {
2631
- return _docChatFailureResponse('doc-chat', filePath, result);
2648
+ const sessionPreserved = !!(resolveSession('doc', sessionKey)?.sessionId);
2649
+ return _docChatFailureResponse('doc-chat', filePath, result, sessionPreserved);
2632
2650
  }
2633
2651
 
2634
2652
  return _parseDocChatResultText(result.text, { allowActions });
2635
2653
  }
2636
2654
 
2637
- async function ccDocCallStreaming({ message, document, title, filePath, selection, canEdit, isJson, model, freshSession, onAbortReady, onChunk, onToolUse }) {
2655
+ async function ccDocCallStreaming({ message, document, title, filePath, selection, canEdit, isJson, model, freshSession, onAbortReady, onChunk, onToolUse, onRetry }) {
2638
2656
  const sessionKey = filePath || title;
2639
2657
  const docSlice = document.slice(0, 20000);
2640
2658
 
@@ -2660,6 +2678,7 @@ async function ccDocCallStreaming({ message, document, title, filePath, selectio
2660
2678
  onAbortReady,
2661
2679
  onChunk: (text) => { if (onChunk) onChunk(_docChatDisplayText(text, { allowActions })); },
2662
2680
  onToolUse,
2681
+ onRetry,
2663
2682
  });
2664
2683
  };
2665
2684
 
@@ -2682,7 +2701,8 @@ async function ccDocCallStreaming({ message, document, title, filePath, selectio
2682
2701
  }
2683
2702
 
2684
2703
  if (result.code !== 0 || !result.text) {
2685
- return _docChatFailureResponse('doc-chat-stream', filePath, result);
2704
+ const sessionPreserved = !!(resolveSession('doc', sessionKey)?.sessionId);
2705
+ return _docChatFailureResponse('doc-chat-stream', filePath, result, sessionPreserved);
2686
2706
  }
2687
2707
 
2688
2708
  return _parseDocChatResultText(result.text, { allowActions });
@@ -4903,6 +4923,7 @@ What would you like to discuss or change? When you're happy, say "approve" and I
4903
4923
  onAbortReady: (abort) => { _docAbort = abort; },
4904
4924
  onChunk: (text) => { writeDocEvent({ type: 'chunk', text }); },
4905
4925
  onToolUse: (name, input) => { writeDocEvent({ type: 'tool', name, input: _lightToolInput(input) }); },
4926
+ onRetry: (attempt) => { writeDocEvent({ type: 'progress', attempt }); },
4906
4927
  });
4907
4928
  const actionResults = await executeDocChatActions(actions);
4908
4929
  const donePayload = (extra = {}) => ({
@@ -1,5 +1,5 @@
1
1
  {
2
2
  "runtime": "copilot",
3
3
  "models": null,
4
- "cachedAt": "2026-05-06T01:38:23.998Z"
4
+ "cachedAt": "2026-05-06T02:33:28.238Z"
5
5
  }
package/engine/llm.js CHANGED
@@ -23,8 +23,12 @@ const { resolveRuntime } = require('./runtimes');
23
23
  const MINIONS_DIR = shared.MINIONS_DIR;
24
24
  const ENGINE_DIR = path.join(MINIONS_DIR, 'engine');
25
25
  const COPILOT_TASK_COMPLETE_GRACE_MS = 3000;
26
- const LLM_EXIT_SETTLE_GRACE_MS = 1000;
27
26
  const MISSING_RUNTIME_EXIT_CODE = 78;
27
+ // When the spawned process emits 'exit' but 'close' is delayed (a detached
28
+ // grandchild inherited stdio), wait this long for trailing stdout data to
29
+ // drain into our buffer before finalizing on the exit fallback path. 'close'
30
+ // is preferred; this is a safety net so callers don't hang on inherited pipes.
31
+ const EXIT_DRAIN_FALLBACK_MS = 100;
28
32
 
29
33
  // ─── Engine-Usage Metrics ────────────────────────────────────────────────────
30
34
  //
@@ -423,6 +427,7 @@ function _createStreamAccumulator({
423
427
  onChunk = null,
424
428
  onToolUse = null,
425
429
  onTaskComplete = null,
430
+ onTerminalResult = null,
426
431
  onThinking = null,
427
432
  }) {
428
433
  if (!runtime?.capabilities?.streamConsumer || typeof runtime.createStreamConsumer !== 'function') {
@@ -438,6 +443,7 @@ function _createStreamAccumulator({
438
443
  let lastTextSent = '';
439
444
  let thinkingSent = false;
440
445
  let taskCompleteFired = false;
446
+ let terminalResultFired = false;
441
447
  let lastTaskCompleteSummary = '';
442
448
  const toolUses = [];
443
449
 
@@ -462,8 +468,18 @@ function _createStreamAccumulator({
462
468
  // override any streamed text (Claude's `result`, Copilot's final
463
469
  // assistant.message). onChunk is NOT fired here; this is the
464
470
  // authoritative final-text path, not a streaming chunk.
471
+ //
472
+ // Fire onTerminalResult once on the first non-empty terminal text so
473
+ // callers can early-resolve without waiting for the OS-level 'exit' /
474
+ // 'close' events — those can be delayed indefinitely on Linux when a
475
+ // detached grandchild has inherited the stdout pipe (e.g. Claude/Copilot
476
+ // CLIs that spawn background workers).
465
477
  if (typeof value !== 'string') return;
466
478
  text = _streamText(value);
479
+ if (value && onTerminalResult && !terminalResultFired) {
480
+ terminalResultFired = true;
481
+ onTerminalResult();
482
+ }
467
483
  },
468
484
  pushToolUse(name, input) {
469
485
  if (!name) return;
@@ -610,8 +626,6 @@ function callLLM(promptText, sysPromptText, opts = {}) {
610
626
  maxBudget, bare, fallbackModel,
611
627
  ...runtimeFeatureOpts,
612
628
  });
613
- let settled = false;
614
- let exitSettleTimer = null;
615
629
  let taskCompleteTimer = null;
616
630
  const scheduleTaskCompleteClose = () => {
617
631
  if (taskCompleteTimer) return;
@@ -623,12 +637,23 @@ function callLLM(promptText, sysPromptText, opts = {}) {
623
637
  taskCompleteTimer = null;
624
638
  }
625
639
  };
640
+ let resolved = false;
641
+ let exitFallbackTimer = null;
642
+ let exitCode = null;
643
+ const scheduleExitFallback = (code) => {
644
+ if (resolved || exitFallbackTimer) return;
645
+ exitFallbackTimer = setTimeout(() => finalizeAndResolve(code), EXIT_DRAIN_FALLBACK_MS);
646
+ };
626
647
  const acc = _createStreamAccumulator({
627
648
  runtime,
628
649
  maxRawBytes: ENGINE_DEFAULTS.maxLlmRawBytes,
629
650
  maxStderrBytes: ENGINE_DEFAULTS.maxLlmStderrBytes,
630
651
  maxLineBufferBytes: ENGINE_DEFAULTS.maxLlmLineBufferBytes,
631
652
  onTaskComplete: scheduleTaskCompleteClose,
653
+ // Terminal text from the runtime adapter signals the LLM has logically
654
+ // completed — kick the drain timer so we don't block on a delayed
655
+ // 'exit'/'close' when an inherited pipe keeps the parent's FDs open.
656
+ onTerminalResult: () => scheduleExitFallback(exitCode != null ? exitCode : 0),
632
657
  });
633
658
 
634
659
  _abort = () => { shared.killImmediate(proc); };
@@ -638,16 +663,14 @@ function callLLM(promptText, sysPromptText, opts = {}) {
638
663
 
639
664
  const timer = setTimeout(() => { shared.killImmediate(proc); }, timeout);
640
665
 
641
- function finish(code) {
642
- if (settled) return;
643
- settled = true;
666
+ const finalizeAndResolve = (code) => {
667
+ if (resolved) return;
668
+ resolved = true;
644
669
  clearTimeout(timer);
645
- if (exitSettleTimer) clearTimeout(exitSettleTimer);
646
670
  clearTaskCompleteTimer();
671
+ if (exitFallbackTimer) { clearTimeout(exitFallbackTimer); exitFallbackTimer = null; }
647
672
  for (const f of cleanupFiles) safeUnlink(f);
648
673
  const parsed = acc.finalize();
649
- try { proc.stdout?.destroy(); } catch {}
650
- try { proc.stderr?.destroy(); } catch {}
651
674
  const durationMs = Date.now() - _startMs;
652
675
  const usage = parsed.usage ? { ...parsed.usage, durationMs } : { durationMs };
653
676
  // parseError lets the adapter classify obvious failure modes (auth /
@@ -667,20 +690,22 @@ function callLLM(promptText, sysPromptText, opts = {}) {
667
690
  runtime: runtime.name,
668
691
  errorClass: errInfo.code,
669
692
  });
670
- }
693
+ };
671
694
 
672
- proc.on('close', finish);
673
- proc.on('exit', (code) => {
674
- if (settled) return;
675
- exitSettleTimer = setTimeout(() => finish(code), LLM_EXIT_SETTLE_GRACE_MS);
676
- });
695
+ // 'close' fires after stdio streams close; if a detached grandchild
696
+ // inherited stdout, that can be delayed indefinitely. 'exit' fires when
697
+ // the child itself exits — schedule a short drain window then resolve.
698
+ // On Linux, 'exit' itself can be delayed by an inherited pipe handle, so
699
+ // the accumulator's onTerminalResult provides a third early-resolve path.
700
+ proc.on('exit', (code) => { exitCode = code; scheduleExitFallback(code); });
701
+ proc.on('close', (code) => { finalizeAndResolve(code); });
677
702
 
678
703
  proc.on('error', (err) => {
679
- if (settled) return;
680
- settled = true;
704
+ if (resolved) return;
705
+ resolved = true;
681
706
  clearTimeout(timer);
682
- if (exitSettleTimer) clearTimeout(exitSettleTimer);
683
707
  clearTaskCompleteTimer();
708
+ if (exitFallbackTimer) { clearTimeout(exitFallbackTimer); exitFallbackTimer = null; }
684
709
  for (const f of cleanupFiles) safeUnlink(f);
685
710
  shared.log('error', `LLM spawn error (${label}): ${err.message}`);
686
711
  resolve({
@@ -726,8 +751,6 @@ function callLLMStreaming(promptText, sysPromptText, opts = {}) {
726
751
  maxBudget, bare, fallbackModel,
727
752
  ...runtimeFeatureOpts,
728
753
  });
729
- let settled = false;
730
- let exitSettleTimer = null;
731
754
  let taskCompleteTimer = null;
732
755
  const scheduleTaskCompleteClose = () => {
733
756
  if (taskCompleteTimer) return;
@@ -739,6 +762,13 @@ function callLLMStreaming(promptText, sysPromptText, opts = {}) {
739
762
  taskCompleteTimer = null;
740
763
  }
741
764
  };
765
+ let resolved = false;
766
+ let exitFallbackTimer = null;
767
+ let exitCode = null;
768
+ const scheduleExitFallback = (code) => {
769
+ if (resolved || exitFallbackTimer) return;
770
+ exitFallbackTimer = setTimeout(() => finalizeAndResolve(code), EXIT_DRAIN_FALLBACK_MS);
771
+ };
742
772
  const acc = _createStreamAccumulator({
743
773
  runtime,
744
774
  maxRawBytes: ENGINE_DEFAULTS.maxLlmRawBytes,
@@ -747,6 +777,10 @@ function callLLMStreaming(promptText, sysPromptText, opts = {}) {
747
777
  onChunk,
748
778
  onToolUse,
749
779
  onTaskComplete: scheduleTaskCompleteClose,
780
+ // Terminal text from the runtime adapter signals the LLM has logically
781
+ // completed — kick the drain timer so we don't block on a delayed
782
+ // 'exit'/'close' when an inherited pipe keeps the parent's FDs open.
783
+ onTerminalResult: () => scheduleExitFallback(exitCode != null ? exitCode : 0),
750
784
  onThinking: opts.onThinking || null,
751
785
  });
752
786
 
@@ -757,16 +791,14 @@ function callLLMStreaming(promptText, sysPromptText, opts = {}) {
757
791
 
758
792
  const timer = setTimeout(() => { shared.killImmediate(proc); }, timeout);
759
793
 
760
- function finish(code) {
761
- if (settled) return;
762
- settled = true;
794
+ const finalizeAndResolve = (code) => {
795
+ if (resolved) return;
796
+ resolved = true;
763
797
  clearTimeout(timer);
764
- if (exitSettleTimer) clearTimeout(exitSettleTimer);
765
798
  clearTaskCompleteTimer();
799
+ if (exitFallbackTimer) { clearTimeout(exitFallbackTimer); exitFallbackTimer = null; }
766
800
  for (const f of cleanupFiles) safeUnlink(f);
767
801
  const parsed = acc.finalize();
768
- try { proc.stdout?.destroy(); } catch {}
769
- try { proc.stderr?.destroy(); } catch {}
770
802
  const durationMs = Date.now() - _startMs;
771
803
  const usage = parsed.usage ? { ...parsed.usage, durationMs } : { durationMs };
772
804
  const errInfo = code !== 0
@@ -783,23 +815,22 @@ function callLLMStreaming(promptText, sysPromptText, opts = {}) {
783
815
  runtime: runtime.name,
784
816
  errorClass: errInfo.code,
785
817
  });
786
- }
818
+ };
787
819
 
788
- proc.on('close', finish);
789
- proc.on('exit', (code) => {
790
- // 'close' waits for stdio to close. If the runtime spawned a detached
791
- // grandchild that inherited stdout/stderr, the OS pipe stays open and
792
- // 'close' may never fire. Fall back to 'exit' after a drain window.
793
- if (settled) return;
794
- exitSettleTimer = setTimeout(() => finish(code), LLM_EXIT_SETTLE_GRACE_MS);
795
- });
820
+ // 'close' fires after stdio streams close; if a detached grandchild
821
+ // inherited stdout, that can be delayed indefinitely. 'exit' fires when
822
+ // the child itself exits schedule a short drain window then resolve.
823
+ // On Linux, 'exit' itself can be delayed by an inherited pipe handle, so
824
+ // the accumulator's onTerminalResult provides a third early-resolve path.
825
+ proc.on('exit', (code) => { exitCode = code; scheduleExitFallback(code); });
826
+ proc.on('close', (code) => { finalizeAndResolve(code); });
796
827
 
797
828
  proc.on('error', (err) => {
798
- if (settled) return;
799
- settled = true;
829
+ if (resolved) return;
830
+ resolved = true;
800
831
  clearTimeout(timer);
801
- if (exitSettleTimer) clearTimeout(exitSettleTimer);
802
832
  clearTaskCompleteTimer();
833
+ if (exitFallbackTimer) { clearTimeout(exitFallbackTimer); exitFallbackTimer = null; }
803
834
  for (const f of cleanupFiles) safeUnlink(f);
804
835
  shared.log('error', `LLM-stream spawn error (${label}): ${err.message}`);
805
836
  resolve({
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@yemi33/minions",
3
- "version": "0.1.1743",
3
+ "version": "0.1.1745",
4
4
  "description": "Multi-agent AI dev team that runs from ~/.minions/ — five autonomous agents share a single engine, dashboard, and knowledge base",
5
5
  "bin": {
6
6
  "minions": "bin/minions.js"