agileflow 2.99.8 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/CHANGELOG.md +5 -0
  2. package/lib/cache-provider.js +155 -0
  3. package/lib/codebase-indexer.js +1 -1
  4. package/lib/content-sanitizer.js +1 -0
  5. package/lib/dashboard-protocol.js +25 -0
  6. package/lib/dashboard-server.js +184 -133
  7. package/lib/errors.js +18 -0
  8. package/lib/file-cache.js +1 -1
  9. package/lib/flag-detection.js +11 -20
  10. package/lib/git-operations.js +15 -33
  11. package/lib/merge-operations.js +40 -34
  12. package/lib/process-executor.js +199 -0
  13. package/lib/registry-cache.js +13 -47
  14. package/lib/skill-loader.js +206 -0
  15. package/lib/smart-json-file.js +2 -4
  16. package/package.json +1 -1
  17. package/scripts/agileflow-configure.js +13 -12
  18. package/scripts/agileflow-statusline.sh +30 -0
  19. package/scripts/agileflow-welcome.js +181 -212
  20. package/scripts/auto-self-improve.js +3 -3
  21. package/scripts/claude-smart.sh +67 -0
  22. package/scripts/claude-tmux.sh +248 -161
  23. package/scripts/damage-control-multi-agent.js +227 -0
  24. package/scripts/lib/bus-utils.js +471 -0
  25. package/scripts/lib/configure-detect.js +5 -6
  26. package/scripts/lib/configure-features.js +44 -0
  27. package/scripts/lib/configure-repair.js +5 -6
  28. package/scripts/lib/configure-utils.js +2 -3
  29. package/scripts/lib/context-formatter.js +87 -8
  30. package/scripts/lib/damage-control-utils.js +37 -3
  31. package/scripts/lib/file-lock.js +392 -0
  32. package/scripts/lib/ideation-index.js +2 -5
  33. package/scripts/lib/lifecycle-detector.js +123 -0
  34. package/scripts/lib/process-cleanup.js +55 -81
  35. package/scripts/lib/scale-detector.js +357 -0
  36. package/scripts/lib/signal-detectors.js +779 -0
  37. package/scripts/lib/story-state-machine.js +1 -1
  38. package/scripts/lib/sync-ideation-status.js +2 -3
  39. package/scripts/lib/task-registry.js +7 -1
  40. package/scripts/lib/team-events.js +357 -0
  41. package/scripts/messaging-bridge.js +79 -36
  42. package/scripts/migrate-ideation-index.js +37 -14
  43. package/scripts/obtain-context.js +37 -19
  44. package/scripts/ralph-loop.js +3 -4
  45. package/scripts/smart-detect.js +390 -0
  46. package/scripts/team-manager.js +174 -30
  47. package/src/core/commands/audit.md +13 -11
  48. package/src/core/commands/babysit.md +162 -115
  49. package/src/core/commands/changelog.md +21 -4
  50. package/src/core/commands/configure.md +105 -2
  51. package/src/core/commands/debt.md +12 -2
  52. package/src/core/commands/feedback.md +7 -6
  53. package/src/core/commands/ideate/history.md +1 -1
  54. package/src/core/commands/ideate/new.md +5 -5
  55. package/src/core/commands/logic/audit.md +2 -2
  56. package/src/core/commands/pr.md +7 -6
  57. package/src/core/commands/research/analyze.md +28 -20
  58. package/src/core/commands/research/ask.md +43 -0
  59. package/src/core/commands/research/import.md +29 -21
  60. package/src/core/commands/research/list.md +8 -7
  61. package/src/core/commands/research/synthesize.md +356 -20
  62. package/src/core/commands/research/view.md +8 -5
  63. package/src/core/commands/review.md +24 -6
  64. package/src/core/commands/skill/create.md +34 -0
  65. package/tools/cli/lib/docs-setup.js +4 -0
@@ -16,7 +16,8 @@
16
16
 
17
17
  const fs = require('fs');
18
18
  const path = require('path');
19
- const { execFileSync, spawnSync } = require('child_process');
19
+ const { spawnSync } = require('child_process');
20
+ const { executeCommandSync } = require('../../lib/process-executor');
20
21
 
21
22
  // Configuration constants
22
23
  const KILL_GRACE_PERIOD_MS = 5000; // Wait before SIGKILL
@@ -102,17 +103,12 @@ function getProcessStartTime(pid) {
102
103
  }
103
104
 
104
105
  if (process.platform === 'darwin') {
105
- try {
106
- const output = execFileSync('ps', ['-o', 'lstart=', '-p', String(pid)], {
107
- encoding: 'utf8',
108
- timeout: 2000,
109
- stdio: ['pipe', 'pipe', 'pipe'],
110
- });
111
- const ts = new Date(output.trim()).getTime();
112
- return Number.isFinite(ts) ? ts : null;
113
- } catch (e) {
114
- return null;
115
- }
106
+ const result = executeCommandSync('ps', ['-o', 'lstart=', '-p', String(pid)], {
107
+ timeout: 2000, fallback: null,
108
+ });
109
+ if (result.data === null) return null;
110
+ const ts = new Date(result.data).getTime();
111
+ return Number.isFinite(ts) ? ts : null;
116
112
  }
117
113
 
118
114
  return null;
@@ -145,17 +141,12 @@ function getParentPid(pid) {
145
141
  }
146
142
 
147
143
  if (process.platform === 'darwin') {
148
- try {
149
- const output = execFileSync('ps', ['-o', 'ppid=', '-p', String(pid)], {
150
- encoding: 'utf8',
151
- timeout: 2000,
152
- stdio: ['pipe', 'pipe', 'pipe'],
153
- });
154
- const ppid = parseInt(output.trim(), 10);
155
- return Number.isFinite(ppid) ? ppid : null;
156
- } catch (e) {
157
- return null;
158
- }
144
+ const result = executeCommandSync('ps', ['-o', 'ppid=', '-p', String(pid)], {
145
+ timeout: 2000, fallback: null,
146
+ });
147
+ if (result.data === null) return null;
148
+ const ppid = parseInt(result.data, 10);
149
+ return Number.isFinite(ppid) ? ppid : null;
159
150
  }
160
151
 
161
152
  return null;
@@ -180,17 +171,11 @@ function getArgsForPid(pid) {
180
171
  }
181
172
 
182
173
  if (process.platform === 'darwin') {
183
- try {
184
- const output = execFileSync('ps', ['-o', 'command=', '-p', String(pid)], {
185
- encoding: 'utf8',
186
- timeout: 2000,
187
- stdio: ['pipe', 'pipe', 'pipe'],
188
- });
189
- const cmd = output.trim();
190
- return cmd ? [cmd] : [];
191
- } catch (e) {
192
- return [];
193
- }
174
+ const result = executeCommandSync('ps', ['-o', 'command=', '-p', String(pid)], {
175
+ timeout: 2000, fallback: null,
176
+ });
177
+ if (result.data === null) return [];
178
+ return result.data ? [result.data] : [];
194
179
  }
195
180
 
196
181
  return [];
@@ -283,54 +268,43 @@ function findClaudeProcesses() {
283
268
  }
284
269
  } else if (process.platform === 'darwin') {
285
270
  // macOS: Use ps command
286
- try {
287
- const output = execFileSync(
288
- 'bash',
289
- ['-c', "ps -axo pid,lstart,command | grep -E 'claude' | grep -v grep"],
290
- {
291
- encoding: 'utf8',
292
- timeout: 5000,
293
- stdio: ['pipe', 'pipe', 'pipe'],
294
- }
295
- );
296
-
297
- for (const line of output.split('\n')) {
298
- if (!line.trim()) continue;
299
-
300
- // Parse: PID LSTART COMMAND
301
- // e.g.: 1234 Mon Feb 3 08:00:00 2026 claude --flag
302
- const match = line.match(/^\s*(\d+)\s+(\w+\s+\w+\s+\d+\s+[\d:]+\s+\d+)\s+(.*)$/);
303
- if (!match) continue;
304
-
305
- const pid = parseInt(match[1], 10);
306
- if (pid === currentPid || pid === parentPid) continue;
307
-
308
- const cmdline = match[3];
309
- if (!isClaudeProcess([cmdline])) continue;
310
-
311
- // Get cwd via lsof (slower but works on macOS)
312
- let cwd = null;
313
- try {
314
- const lsofOutput = execFileSync('lsof', ['-p', String(pid)], {
315
- encoding: 'utf8',
316
- timeout: 1000,
317
- stdio: ['pipe', 'pipe', 'pipe'],
318
- });
319
- const cwdLine = lsofOutput.split('\n').find(l => l.includes('cwd'));
320
- cwd = cwdLine ? cwdLine.split(/\s+/).pop().trim() : null;
321
- } catch (e) {
322
- // lsof failed
323
- }
324
-
325
- processes.push({
326
- pid,
327
- cwd,
328
- cmdline,
329
- startTime: new Date(match[2]).getTime(),
330
- });
271
+ // Note: uses bash -c for pipeline (grep) which can't be expressed with execFileSync
272
+ const psResult = executeCommandSync(
273
+ 'bash',
274
+ ['-c', "ps -axo pid,lstart,command | grep -E 'claude' | grep -v grep"],
275
+ { timeout: 5000, fallback: '' }
276
+ );
277
+
278
+ for (const line of psResult.data.split('\n')) {
279
+ if (!line.trim()) continue;
280
+
281
+ // Parse: PID LSTART COMMAND
282
+ // e.g.: 1234 Mon Feb 3 08:00:00 2026 claude --flag
283
+ const match = line.match(/^\s*(\d+)\s+(\w+\s+\w+\s+\d+\s+[\d:]+\s+\d+)\s+(.*)$/);
284
+ if (!match) continue;
285
+
286
+ const pid = parseInt(match[1], 10);
287
+ if (pid === currentPid || pid === parentPid) continue;
288
+
289
+ const cmdline = match[3];
290
+ if (!isClaudeProcess([cmdline])) continue;
291
+
292
+ // Get cwd via lsof (slower but works on macOS)
293
+ let cwd = null;
294
+ const lsofResult = executeCommandSync('lsof', ['-p', String(pid)], {
295
+ timeout: 1000, fallback: null,
296
+ });
297
+ if (lsofResult.data) {
298
+ const cwdLine = lsofResult.data.split('\n').find(l => l.includes('cwd'));
299
+ cwd = cwdLine ? cwdLine.split(/\s+/).pop().trim() : null;
331
300
  }
332
- } catch (e) {
333
- // ps/grep failed (no claude processes found)
301
+
302
+ processes.push({
303
+ pid,
304
+ cwd,
305
+ cmdline,
306
+ startTime: new Date(match[2]).getTime(),
307
+ });
334
308
  }
335
309
  }
336
310
 
@@ -0,0 +1,357 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * scale-detector.js
4
+ *
5
+ * Project scope detection for scale-adaptive workflows (EP-0033)
6
+ *
7
+ * Detects project scale based on:
8
+ * - Source file count (excluding node_modules, .git, dist, etc.)
9
+ * - Active stories in status.json
10
+ * - Git commit count (last 6 months)
11
+ * - Dependency count (from package.json)
12
+ *
13
+ * Scale tiers:
14
+ * micro - <20 files, <5 stories, <50 commits
15
+ * small - <100 files, <20 stories, <200 commits
16
+ * medium - <500 files, <50 stories, <1000 commits
17
+ * large - <2000 files, <200 stories, <5000 commits
18
+ * enterprise - 2000+ files
19
+ *
20
+ * Performance target: <200ms detection, cached with 60s TTL
21
+ */
22
+
23
+ const fs = require('fs');
24
+ const path = require('path');
25
+ const { git } = require('../../lib/process-executor');
26
+
27
+ // Cache TTL in milliseconds (60 seconds)
28
+ const CACHE_TTL_MS = 60000;
29
+
30
+ // Scale tier thresholds
31
+ const SCALE_THRESHOLDS = {
32
+ micro: { maxFiles: 20, maxStories: 5, maxCommits: 50 },
33
+ small: { maxFiles: 100, maxStories: 20, maxCommits: 200 },
34
+ medium: { maxFiles: 500, maxStories: 50, maxCommits: 1000 },
35
+ large: { maxFiles: 2000, maxStories: 200, maxCommits: 5000 },
36
+ // enterprise: anything above large
37
+ };
38
+
39
+ // Directories to exclude from file counting
40
+ const EXCLUDE_DIRS = new Set([
41
+ 'node_modules', '.git', 'dist', 'build', '.next', '.nuxt',
42
+ 'coverage', '.agileflow', '.claude', '__pycache__', '.venv',
43
+ 'vendor', 'target', 'out', '.cache', '.turbo', '.vercel',
44
+ ]);
45
+
46
+ // Source file extensions to count
47
+ const SOURCE_EXTENSIONS = new Set([
48
+ '.js', '.jsx', '.ts', '.tsx', '.py', '.rb', '.go', '.rs',
49
+ '.java', '.kt', '.swift', '.c', '.cpp', '.h', '.cs',
50
+ '.vue', '.svelte', '.astro', '.php', '.sh', '.bash',
51
+ '.css', '.scss', '.less', '.html', '.sql', '.graphql',
52
+ ]);
53
+
54
+ /**
55
+ * Count source files recursively (fast, synchronous).
56
+ * Uses readdir with withFileTypes to avoid stat calls.
57
+ *
58
+ * @param {string} dir - Directory to scan
59
+ * @param {number} maxDepth - Maximum recursion depth
60
+ * @returns {number} File count
61
+ */
62
+ function countSourceFiles(dir, maxDepth = 6) {
63
+ let count = 0;
64
+
65
+ function walk(currentDir, depth) {
66
+ if (depth > maxDepth) return;
67
+
68
+ let entries;
69
+ try {
70
+ entries = fs.readdirSync(currentDir, { withFileTypes: true });
71
+ } catch {
72
+ return;
73
+ }
74
+
75
+ for (const entry of entries) {
76
+ if (entry.isDirectory()) {
77
+ if (!EXCLUDE_DIRS.has(entry.name) && !entry.name.startsWith('.')) {
78
+ walk(path.join(currentDir, entry.name), depth + 1);
79
+ }
80
+ } else if (entry.isFile()) {
81
+ const ext = path.extname(entry.name).toLowerCase();
82
+ if (SOURCE_EXTENSIONS.has(ext)) {
83
+ count++;
84
+ }
85
+ }
86
+ }
87
+ }
88
+
89
+ walk(dir, 0);
90
+ return count;
91
+ }
92
+
93
+ /**
94
+ * Count active stories from status.json.
95
+ *
96
+ * @param {Object|null} statusJson - Pre-loaded status.json data
97
+ * @param {string} rootDir - Project root directory
98
+ * @returns {number} Total story count
99
+ */
100
+ function countStories(statusJson, rootDir) {
101
+ if (statusJson && statusJson.stories) {
102
+ return Object.keys(statusJson.stories).length;
103
+ }
104
+
105
+ // Fallback: read from disk
106
+ try {
107
+ const statusPath = path.join(rootDir, 'docs', '09-agents', 'status.json');
108
+ if (fs.existsSync(statusPath)) {
109
+ const data = JSON.parse(fs.readFileSync(statusPath, 'utf8'));
110
+ return data.stories ? Object.keys(data.stories).length : 0;
111
+ }
112
+ } catch {
113
+ // Silently fail
114
+ }
115
+ return 0;
116
+ }
117
+
118
+ /**
119
+ * Count git commits in the last 6 months.
120
+ *
121
+ * @param {string} rootDir - Project root directory
122
+ * @returns {number} Commit count
123
+ */
124
+ function countGitCommits(rootDir) {
125
+ const result = git(['rev-list', '--count', '--since=6 months ago', 'HEAD'], {
126
+ cwd: rootDir, timeout: 5000, fallback: '0',
127
+ });
128
+ const count = parseInt(result.data, 10);
129
+ return isNaN(count) ? 0 : count;
130
+ }
131
+
132
+ /**
133
+ * Count dependencies from package.json.
134
+ *
135
+ * @param {string} rootDir - Project root directory
136
+ * @returns {number} Dependency count
137
+ */
138
+ function countDependencies(rootDir) {
139
+ try {
140
+ const pkgPath = path.join(rootDir, 'package.json');
141
+ if (!fs.existsSync(pkgPath)) return 0;
142
+ const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf8'));
143
+ const deps = Object.keys(pkg.dependencies || {}).length;
144
+ const devDeps = Object.keys(pkg.devDependencies || {}).length;
145
+ return deps + devDeps;
146
+ } catch {
147
+ return 0;
148
+ }
149
+ }
150
+
151
+ /**
152
+ * Determine scale tier from metrics.
153
+ *
154
+ * @param {Object} metrics - { files, stories, commits, dependencies }
155
+ * @returns {string} Scale tier: micro|small|medium|large|enterprise
156
+ */
157
+ function classifyScale(metrics) {
158
+ const { files, stories, commits } = metrics;
159
+
160
+ // A project is classified at the HIGHEST tier where ANY metric exceeds the threshold
161
+ // This ensures we don't under-estimate complexity
162
+ for (const tier of ['micro', 'small', 'medium', 'large']) {
163
+ const t = SCALE_THRESHOLDS[tier];
164
+ if (files <= t.maxFiles && stories <= t.maxStories && commits <= t.maxCommits) {
165
+ return tier;
166
+ }
167
+ }
168
+ return 'enterprise';
169
+ }
170
+
171
+ /**
172
+ * Read cached scale detection from session-state.json.
173
+ *
174
+ * @param {string} rootDir - Project root directory
175
+ * @param {Object|null} sessionState - Pre-loaded session state
176
+ * @returns {Object|null} Cached result or null if expired/missing
177
+ */
178
+ function readCache(rootDir, sessionState) {
179
+ try {
180
+ let state = sessionState;
181
+ if (!state) {
182
+ const statePath = path.join(rootDir, 'docs', '09-agents', 'session-state.json');
183
+ if (!fs.existsSync(statePath)) return null;
184
+ state = JSON.parse(fs.readFileSync(statePath, 'utf8'));
185
+ }
186
+
187
+ const cached = state.scale_detection;
188
+ if (!cached || !cached.detected_at) return null;
189
+
190
+ const age = Date.now() - new Date(cached.detected_at).getTime();
191
+ if (age > CACHE_TTL_MS) return null;
192
+
193
+ return cached;
194
+ } catch {
195
+ return null;
196
+ }
197
+ }
198
+
199
+ /**
200
+ * Write scale detection to session-state.json cache.
201
+ *
202
+ * @param {string} rootDir - Project root directory
203
+ * @param {Object} result - Detection result
204
+ */
205
+ function writeCache(rootDir, result) {
206
+ try {
207
+ const statePath = path.join(rootDir, 'docs', '09-agents', 'session-state.json');
208
+ let state = {};
209
+ if (fs.existsSync(statePath)) {
210
+ state = JSON.parse(fs.readFileSync(statePath, 'utf8'));
211
+ }
212
+ state.scale_detection = result;
213
+ fs.writeFileSync(statePath, JSON.stringify(state, null, 2) + '\n');
214
+ } catch {
215
+ // Cache write failure is non-critical
216
+ }
217
+ }
218
+
219
+ /**
220
+ * Detect project scale with caching.
221
+ *
222
+ * @param {Object} options
223
+ * @param {string} options.rootDir - Project root directory
224
+ * @param {Object|null} options.statusJson - Pre-loaded status.json (optional)
225
+ * @param {Object|null} options.sessionState - Pre-loaded session-state.json (optional)
226
+ * @param {boolean} options.forceRefresh - Skip cache (default: false)
227
+ * @returns {Object} Scale detection result
228
+ */
229
+ function detectScale(options = {}) {
230
+ const {
231
+ rootDir = process.cwd(),
232
+ statusJson = null,
233
+ sessionState = null,
234
+ forceRefresh = false,
235
+ } = options;
236
+
237
+ // Check cache first
238
+ if (!forceRefresh) {
239
+ const cached = readCache(rootDir, sessionState);
240
+ if (cached) {
241
+ return { ...cached, fromCache: true };
242
+ }
243
+ }
244
+
245
+ const startTime = Date.now();
246
+
247
+ // Collect metrics
248
+ const metrics = {
249
+ files: countSourceFiles(rootDir),
250
+ stories: countStories(statusJson, rootDir),
251
+ commits: countGitCommits(rootDir),
252
+ dependencies: countDependencies(rootDir),
253
+ };
254
+
255
+ // Classify
256
+ const scale = classifyScale(metrics);
257
+
258
+ const result = {
259
+ scale,
260
+ metrics,
261
+ detected_at: new Date().toISOString(),
262
+ detection_ms: Date.now() - startTime,
263
+ fromCache: false,
264
+ };
265
+
266
+ // Write to cache
267
+ writeCache(rootDir, result);
268
+
269
+ return result;
270
+ }
271
+
272
+ /**
273
+ * Get a human-readable label for a scale tier.
274
+ *
275
+ * @param {string} scale - Scale tier
276
+ * @returns {string} Label with emoji
277
+ */
278
+ function getScaleLabel(scale) {
279
+ const labels = {
280
+ micro: 'Micro',
281
+ small: 'Small',
282
+ medium: 'Medium',
283
+ large: 'Large',
284
+ enterprise: 'Enterprise',
285
+ };
286
+ return labels[scale] || scale;
287
+ }
288
+
289
+ /**
290
+ * Get workflow recommendations based on scale.
291
+ *
292
+ * @param {string} scale - Scale tier
293
+ * @returns {Object} Recommendations for workflow depth
294
+ */
295
+ function getScaleRecommendations(scale) {
296
+ const recommendations = {
297
+ micro: {
298
+ planningDepth: 'minimal',
299
+ skipArchival: true,
300
+ skipEpicPlanning: true,
301
+ contextDepth: 'summary',
302
+ expertCount: 2,
303
+ welcomeDetail: 'compact',
304
+ description: 'Quick specs, direct implementation. Skip epics and full planning.',
305
+ },
306
+ small: {
307
+ planningDepth: 'light',
308
+ skipArchival: true,
309
+ skipEpicPlanning: false,
310
+ contextDepth: 'summary',
311
+ expertCount: 3,
312
+ welcomeDetail: 'compact',
313
+ description: 'Light stories, optional epics. Streamlined workflow.',
314
+ },
315
+ medium: {
316
+ planningDepth: 'standard',
317
+ skipArchival: false,
318
+ skipEpicPlanning: false,
319
+ contextDepth: 'standard',
320
+ expertCount: 4,
321
+ welcomeDetail: 'standard',
322
+ description: 'Full story workflow with epics and planning.',
323
+ },
324
+ large: {
325
+ planningDepth: 'thorough',
326
+ skipArchival: false,
327
+ skipEpicPlanning: false,
328
+ contextDepth: 'full',
329
+ expertCount: 5,
330
+ welcomeDetail: 'full',
331
+ description: 'Thorough planning with architecture review and multi-expert analysis.',
332
+ },
333
+ enterprise: {
334
+ planningDepth: 'comprehensive',
335
+ skipArchival: false,
336
+ skipEpicPlanning: false,
337
+ contextDepth: 'full',
338
+ expertCount: 5,
339
+ welcomeDetail: 'full',
340
+ description: 'Comprehensive planning with council review and full documentation.',
341
+ },
342
+ };
343
+ return recommendations[scale] || recommendations.medium;
344
+ }
345
+
346
+ module.exports = {
347
+ detectScale,
348
+ classifyScale,
349
+ getScaleLabel,
350
+ getScaleRecommendations,
351
+ countSourceFiles,
352
+ countStories,
353
+ countGitCommits,
354
+ countDependencies,
355
+ SCALE_THRESHOLDS,
356
+ CACHE_TTL_MS,
357
+ };