@yemi33/minions 0.1.1762 → 0.1.1764

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,10 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.1.1764 (2026-05-07)
4
+
5
+ ### Other
6
+ - Speed up meetings pagination
7
+
3
8
  ## 0.1.1762 (2026-05-07)
4
9
 
5
10
  ### Fixes
@@ -4,15 +4,19 @@ let _showArchived = false;
4
4
  const MTG_PER_PAGE = 10;
5
5
  let _mtgPage = 0;
6
6
  let _lastMeetingHash = '';
7
+ let _lastMeetingsForPaging = [];
8
+ let _mtgTotalPages = 1;
7
9
 
8
10
  function renderMeetings(meetings) {
9
11
  meetings = (meetings || []).filter(function(m) { return !isDeleted('mtg:' + m.id); });
10
12
  meetings.sort((a, b) => (b.createdAt || b.completedAt || '').localeCompare(a.createdAt || a.completedAt || ''));
13
+ _lastMeetingsForPaging = meetings;
11
14
  const el = document.getElementById('meetings-content');
12
15
  const countEl = document.getElementById('meetings-count');
13
16
  if (!meetings || meetings.length === 0) {
14
17
  countEl.textContent = '0';
15
18
  el.innerHTML = '<p class="empty">No meetings yet. Start one to have agents investigate, debate, and conclude on a topic.</p>';
19
+ _mtgTotalPages = 1;
16
20
  return;
17
21
  }
18
22
 
@@ -27,10 +31,12 @@ function renderMeetings(meetings) {
27
31
  if (visible.length === 0) {
28
32
  el.innerHTML = '<p class="empty">No active meetings.</p>';
29
33
  if (archived.length) el.insertAdjacentHTML('beforeend', '<div style="text-align:center;margin-top:8px"><button class="pr-pager-btn" style="font-size:10px" onclick="_toggleArchivedMeetings()">Show ' + archived.length + ' archived</button></div>');
34
+ _mtgTotalPages = 1;
30
35
  return;
31
36
  }
32
37
 
33
38
  const totalPages = Math.ceil(visible.length / MTG_PER_PAGE);
39
+ _mtgTotalPages = totalPages;
34
40
  if (_mtgPage >= totalPages) _mtgPage = totalPages - 1;
35
41
  const start = _mtgPage * MTG_PER_PAGE;
36
42
  const pageItems = visible.slice(start, start + MTG_PER_PAGE);
@@ -81,13 +87,26 @@ function renderMeetings(meetings) {
81
87
  restoreNotifBadges();
82
88
  }
83
89
 
84
- function _mtgPrev() { if (_mtgPage > 0) { _mtgPage--; refresh(); } }
85
- function _mtgNext() { _mtgPage++; refresh(); }
90
+ function _rerenderMeetingPageFromCache() {
91
+ renderMeetings(_lastMeetingsForPaging || []);
92
+ }
93
+ function _mtgPrev() {
94
+ if (_mtgPage > 0) {
95
+ _mtgPage--;
96
+ _rerenderMeetingPageFromCache();
97
+ }
98
+ }
99
+ function _mtgNext() {
100
+ if (_mtgPage < _mtgTotalPages - 1) {
101
+ _mtgPage++;
102
+ _rerenderMeetingPageFromCache();
103
+ }
104
+ }
86
105
 
87
106
  function _toggleArchivedMeetings() {
88
107
  _showArchived = !_showArchived;
89
108
  _mtgPage = 0;
90
- refresh();
109
+ _rerenderMeetingPageFromCache();
91
110
  }
92
111
 
93
112
  let _meetingPollInterval = null;
package/engine/cleanup.js CHANGED
@@ -48,6 +48,67 @@ function worktreeMatchesBranch(dirLower, branch, actualBranch = '') {
48
48
  return worktreeBranchMatches(actualBranch, branch) || worktreeDirMatchesBranch(dirLower, branch);
49
49
  }
50
50
 
51
+ /**
52
+ * Sweep leaked test-fixture meetings from a `meetings/` directory.
53
+ *
54
+ * Meeting unit/integration tests don't all honor MINIONS_TEST_DIR
55
+ * (engine/meeting.js is in ISOLATED_MODULES, but tests that load it after the
56
+ * env var is unset, or that fail before cleanup, leave `.json` + `.json.backup`
57
+ * sidecars behind). The engine's tick loop then re-discovers these fixtures
58
+ * every tick, fails playbook validation ("missing required template variables:
59
+ * agenda"), and spams log.json with the same error.
60
+ *
61
+ * Filter is conservative: only files whose ID begins with `TEST-` AND whose
62
+ * JSON either lacks an `agenda` or has an empty/whitespace-only one. Both the
63
+ * live `.json` and any `.json.backup` sidecar (which would otherwise
64
+ * auto-restore the file via safeJson on next read) are removed together.
65
+ *
66
+ * Returns the number of files unlinked.
67
+ */
68
+ function sweepLeakedTestMeetings(meetingsDir) {
69
+ let cleaned = 0;
70
+ try {
71
+ if (!fs.existsSync(meetingsDir)) return 0;
72
+ const candidates = new Set();
73
+ for (const f of fs.readdirSync(meetingsDir)) {
74
+ if (!f.startsWith('TEST-')) continue;
75
+ // Match "<id>.json" or "<id>.json.backup" — both belong to the same fixture
76
+ const idMatch = f.match(/^(TEST-[^/]+?)\.json(\.backup)?$/);
77
+ if (!idMatch) continue;
78
+ candidates.add(idMatch[1]);
79
+ }
80
+ for (const id of candidates) {
81
+ const livePath = path.join(meetingsDir, `${id}.json`);
82
+ const backupPath = `${livePath}.backup`;
83
+ // Decide whether to delete based on the live file's contents, falling
84
+ // back to the .backup sidecar if the live file is missing/corrupt.
85
+ let agendaPresent = false;
86
+ let saw = false;
87
+ for (const candidatePath of [livePath, backupPath]) {
88
+ try {
89
+ const data = JSON.parse(fs.readFileSync(candidatePath, 'utf8'));
90
+ saw = true;
91
+ if (data && typeof data.agenda === 'string' && data.agenda.trim()) {
92
+ agendaPresent = true;
93
+ break;
94
+ }
95
+ } catch { /* missing or corrupt — try next candidate */ }
96
+ }
97
+ // Delete only when we confirmed the fixture exists AND has no usable
98
+ // agenda. If neither file parses (saw=false), the directory entry is
99
+ // already useless; unlink both so safeJson can't resurrect it.
100
+ if (saw && agendaPresent) continue;
101
+ for (const target of [livePath, backupPath]) {
102
+ try { fs.unlinkSync(target); cleaned++; } catch { /* not present */ }
103
+ }
104
+ }
105
+ if (cleaned > 0) {
106
+ log('info', `Cleaned ${cleaned} leaked test-fixture meeting file(s) from ${meetingsDir}`);
107
+ }
108
+ } catch (e) { log('warn', 'cleanup leaked test meetings: ' + e.message); }
109
+ return cleaned;
110
+ }
111
+
51
112
  function getWorktreeBranch(wtPath) {
52
113
  try {
53
114
  return exec(`git -C "${wtPath}" branch --show-current`, { encoding: 'utf8', stdio: 'pipe', timeout: 5000, windowsHide: true }).trim();
@@ -418,6 +479,9 @@ async function runCleanup(config, verbose = false) {
418
479
  // 5. Clean spawn-debug.log
419
480
  try { fs.unlinkSync(path.join(ENGINE_DIR, 'spawn-debug.log')); } catch { /* cleanup */ }
420
481
 
482
+ // 5b. Sweep leaked test-fixture meetings from the live `meetings/` directory.
483
+ cleaned.leakedTestMeetings = sweepLeakedTestMeetings(path.join(MINIONS_DIR, 'meetings'));
484
+
421
485
  // 6. Prune old output archive files (keep last 30 per agent)
422
486
  for (const agentId of Object.keys(config.agents || {})) {
423
487
  const agentDir = path.join(MINIONS_DIR, 'agents', agentId);
@@ -862,6 +926,7 @@ function scrubStaleMetrics() {
862
926
  module.exports = {
863
927
  runCleanup,
864
928
  scrubStaleMetrics,
929
+ sweepLeakedTestMeetings, // exported for testing
865
930
  worktreeDirMatchesBranch, // exported for testing
866
931
  worktreeMatchesBranch, // exported for testing
867
932
  getWorktreeBranch, // exported for lifecycle cleanup
@@ -1,5 +1,5 @@
1
1
  {
2
2
  "runtime": "copilot",
3
3
  "models": null,
4
- "cachedAt": "2026-05-07T00:54:25.703Z"
4
+ "cachedAt": "2026-05-07T01:23:41.823Z"
5
5
  }
package/engine/github.js CHANGED
@@ -109,9 +109,13 @@ function recordSlugFailure(slug) {
109
109
  const backoffMs = Math.min(GH_POLL_BACKOFF_BASE_MS * Math.pow(2, failures - 1), GH_POLL_BACKOFF_MAX_MS);
110
110
  _ghPollBackoff.set(slug, { failures, backoffUntil: Date.now() + backoffMs });
111
111
  if (failures === 1) {
112
+ // First failure is genuinely actionable — surface it.
112
113
  log('warn', `GitHub poll: repo ${slug} failed — will retry in ${Math.round(backoffMs / 1000)}s`);
113
114
  } else {
114
- log('warn', `GitHub poll: repo ${slug} failed ${failures} times backoff ${Math.round(backoffMs / 1000)}s`);
115
+ // Subsequent escalations are deterministic backoff mathinfo, not warn.
116
+ // We already warned on the first failure; spamming the same operator
117
+ // with 12 escalation lines per outage adds no signal.
118
+ log('info', `GitHub poll: repo ${slug} failed ${failures} times — backoff ${Math.round(backoffMs / 1000)}s`);
115
119
  }
116
120
  }
117
121
 
@@ -1956,7 +1956,10 @@ function checkForLearnings(agentId, agentInfo, taskDesc) {
1956
1956
  log('info', `${agentInfo?.name || agentId} wrote ${agentFiles.length} finding(s) to inbox`);
1957
1957
  return;
1958
1958
  }
1959
- log('warn', `${agentInfo?.name || agentId} didn't write learnings no follow-up queued`);
1959
+ // Info, not warn: not writing learnings is a soft signal (the agent finished
1960
+ // and the engine continues fine). Operators don't need to act on it; keeping
1961
+ // it at warn floods log.json (~50 entries/day) with non-actionable noise.
1962
+ log('info', `${agentInfo?.name || agentId} didn't write learnings — no follow-up queued`);
1960
1963
  }
1961
1964
 
1962
1965
  function skillWriteTargets(runtimeName, project = null) {
@@ -3043,7 +3046,14 @@ async function runPostCompletionHooks(dispatchItem, agentId, code, stdout, confi
3043
3046
  }
3044
3047
  }
3045
3048
  } catch (err) {
3046
- log('warn', `Post-completion worktree cleanup error: ${err.message}`);
3049
+ // ENOENT = worktree root doesn't exist yet (no worktrees ever created
3050
+ // for this project, or already cleaned up). That's not a failure — there
3051
+ // is simply nothing to clean. Other errors still warn.
3052
+ if (err && err.code === 'ENOENT') {
3053
+ log('debug', `Post-completion worktree cleanup: no worktree root yet (${err.message})`);
3054
+ } else {
3055
+ log('warn', `Post-completion worktree cleanup error: ${err.message}`);
3056
+ }
3047
3057
  }
3048
3058
  }
3049
3059
 
package/engine/meeting.js CHANGED
@@ -33,6 +33,18 @@ function isTerminalMeetingStatus(status) {
33
33
  return TERMINAL_MEETING_STATUSES.has(String(status || '').toLowerCase());
34
34
  }
35
35
 
36
+ // Process-scoped dedup so a stuck meeting (missing agenda) logs the warning
37
+ // once per id rather than every tick (~1/min). Module-scoped lifetime is
38
+ // intentional: a fresh process should re-warn at startup so the operator sees
39
+ // the issue, but the same engine run shouldn't spam.
40
+ const _warnedMissingAgendaIds = new Set();
41
+ function _warnOnceMissingAgenda(meetingId) {
42
+ if (!meetingId || _warnedMissingAgendaIds.has(meetingId)) return;
43
+ _warnedMissingAgendaIds.add(meetingId);
44
+ log('warn', `Meeting ${meetingId}: skipping discovery — agenda is missing or empty (will not be re-logged this process)`);
45
+ }
46
+ function _resetMissingAgendaWarnings() { _warnedMissingAgendaIds.clear(); }
47
+
36
48
  function expectedMeetingStatusForRound(roundName) {
37
49
  return ROUND_STATUS_BY_NAME[String(roundName || '').toLowerCase()] || null;
38
50
  }
@@ -445,6 +457,16 @@ function discoverMeetingWork(config) {
445
457
  if (!ACTIVE_MEETING_STATUSES.has(roundName)) continue;
446
458
  const agents = config.agents || {};
447
459
 
460
+ // Pre-flight validation: meetings missing required template vars (agenda)
461
+ // would otherwise fail playbook rendering on every tick (~1/min), spamming
462
+ // log.json with the same "missing required template variables: agenda"
463
+ // error. Skip them silently here; emit one structured warning per meeting
464
+ // ID per process so the operator still has signal without the spam.
465
+ if (!meeting.agenda || !String(meeting.agenda).trim()) {
466
+ _warnOnceMissingAgenda(meeting.id);
467
+ continue;
468
+ }
469
+
448
470
  if (roundName === 'concluding') {
449
471
  // Only one agent should conclude — skip if already concluded or any conclude dispatch is active
450
472
  if (meeting.conclusion) continue;
@@ -902,4 +924,5 @@ module.exports = {
902
924
  collectMeetingTakeaways,
903
925
  collectMeetingNextSteps,
904
926
  buildTimedOutMeetingConclusion,
927
+ _resetMissingAgendaWarnings, // exported for testing only
905
928
  };
package/engine/shared.js CHANGED
@@ -128,6 +128,18 @@ function log(level, msg, meta = {}) {
128
128
  // Console output remains immediate (also redacted)
129
129
  console.log(`[${logTs()}] [${level}] ${safeMsg}`);
130
130
 
131
+ // Capture the resolved log file path AT WRITE TIME (not flush time).
132
+ // Stops test pollution: a test sets MINIONS_TEST_DIR, calls log(), the test
133
+ // ends and clears MINIONS_TEST_DIR, then the buffer flushes — without
134
+ // capture-at-write-time the entry would land in the production log.json.
135
+ // Stripped before persisting (see _flushLogBuffer).
136
+ Object.defineProperty(entry, '_logPath', {
137
+ value: _currentLogPath(),
138
+ enumerable: false,
139
+ writable: true,
140
+ configurable: true,
141
+ });
142
+
131
143
  _logBuffer.push(entry);
132
144
 
133
145
  // Start the flush timer lazily on first buffered entry
@@ -167,17 +179,33 @@ function _currentLogPath() {
167
179
 
168
180
  function _flushLogBuffer() {
169
181
  if (_logBuffer.length === 0) return;
170
- // SEC-09 defense-in-depth: redact again at flush time so any direct
171
- // `_logBuffer.push(entry)` callers (tests, future paths) can't leak secrets.
172
- const entries = _logBuffer.splice(0).map(redactSecrets);
173
- try {
174
- mutateJsonFileLocked(_currentLogPath(), (logData) => {
175
- if (!Array.isArray(logData)) logData = logData?.entries || [];
176
- logData.push(...entries);
177
- if (logData.length >= 2500) logData.splice(0, logData.length - 2000);
178
- return logData;
179
- }, { defaultValue: [] });
180
- } catch { /* logging should never crash the caller */ }
182
+ const drained = _logBuffer.splice(0);
183
+ // Group entries by their captured _logPath so test-originated entries always
184
+ // land in the test dir's log.json even if MINIONS_TEST_DIR has been cleared
185
+ // by the time we flush. Entries without _logPath fall back to current path
186
+ // (eg. direct _logBuffer.push() from tests).
187
+ const fallbackPath = _currentLogPath();
188
+ const byPath = new Map();
189
+ for (const raw of drained) {
190
+ const target = raw._logPath || fallbackPath;
191
+ // SEC-09 defense-in-depth: redact again at flush time so any direct
192
+ // `_logBuffer.push(entry)` callers (tests, future paths) can't leak secrets.
193
+ const entry = redactSecrets(raw);
194
+ // Strip the routing-only metadata before persisting.
195
+ delete entry._logPath;
196
+ if (!byPath.has(target)) byPath.set(target, []);
197
+ byPath.get(target).push(entry);
198
+ }
199
+ for (const [target, entries] of byPath) {
200
+ try {
201
+ mutateJsonFileLocked(target, (logData) => {
202
+ if (!Array.isArray(logData)) logData = logData?.entries || [];
203
+ logData.push(...entries);
204
+ if (logData.length >= 2500) logData.splice(0, logData.length - 2000);
205
+ return logData;
206
+ }, { defaultValue: [] });
207
+ } catch { /* logging should never crash the caller */ }
208
+ }
181
209
  }
182
210
 
183
211
  /** Flush buffered log entries to disk. Call during graceful shutdown to drain the buffer. */
@@ -2877,7 +2905,10 @@ function createThrottleTracker({ label, baseBackoffMs = 60000, maxBackoffMs = 32
2877
2905
  const waitMs = (retryAfterMs > 0) ? retryAfterMs : state.backoffMs;
2878
2906
  state.throttled = true;
2879
2907
  state.retryAfter = Date.now() + waitMs;
2880
- log('warn', `[${label}] Throttled retry after ${Math.round(waitMs / 1000)}s, consecutive hits: ${state.consecutiveHits}`);
2908
+ // Throttle retries are deterministic backoff math info, not warn.
2909
+ // Operator already sees rate-limit signals via the underlying API errors
2910
+ // upstream (which still log at warn). The retry-after restate is housekeeping.
2911
+ log('info', `[${label}] Throttled — retry after ${Math.round(waitMs / 1000)}s, consecutive hits: ${state.consecutiveHits}`);
2881
2912
  }
2882
2913
 
2883
2914
  function recordSuccess() {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@yemi33/minions",
3
- "version": "0.1.1762",
3
+ "version": "0.1.1764",
4
4
  "description": "Multi-agent AI dev team that runs from ~/.minions/ — five autonomous agents share a single engine, dashboard, and knowledge base",
5
5
  "bin": {
6
6
  "minions": "bin/minions.js"