claude-autopm 1.28.0 → 1.30.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +75 -15
- package/autopm/.claude/scripts/pm/analytics.js +425 -0
- package/autopm/.claude/scripts/pm/sync-batch.js +337 -0
- package/lib/README-FILTER-SEARCH.md +285 -0
- package/lib/analytics-engine.js +689 -0
- package/lib/batch-processor-integration.js +366 -0
- package/lib/batch-processor.js +278 -0
- package/lib/burndown-chart.js +415 -0
- package/lib/conflict-history.js +316 -0
- package/lib/conflict-resolver.js +330 -0
- package/lib/dependency-analyzer.js +466 -0
- package/lib/filter-engine.js +414 -0
- package/lib/query-parser.js +322 -0
- package/lib/visual-diff.js +297 -0
- package/package.json +5 -4
|
@@ -0,0 +1,466 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* DependencyAnalyzer - Dependency graph analysis and bottleneck detection
|
|
3
|
+
*
|
|
4
|
+
* Analyzes task dependencies to find bottlenecks, critical paths,
|
|
5
|
+
* and parallelizable work.
|
|
6
|
+
*
|
|
7
|
+
* @example Basic Usage
|
|
8
|
+
* ```javascript
|
|
9
|
+
* const DependencyAnalyzer = require('./lib/dependency-analyzer');
|
|
10
|
+
* const analyzer = new DependencyAnalyzer();
|
|
11
|
+
*
|
|
12
|
+
* const analysis = await analyzer.analyze('epic-001', {
|
|
13
|
+
* basePath: '.claude'
|
|
14
|
+
* });
|
|
15
|
+
*
|
|
16
|
+
* console.log(analysis.bottlenecks); // Tasks blocking multiple others
|
|
17
|
+
* console.log(analysis.criticalPath); // Longest dependency chain
|
|
18
|
+
* console.log(analysis.parallelizable); // Tasks that can run in parallel
|
|
19
|
+
* ```
|
|
20
|
+
*
|
|
21
|
+
* @example Graph Structure
|
|
22
|
+
* ```javascript
|
|
23
|
+
* // analysis.graph structure:
|
|
24
|
+
* {
|
|
25
|
+
* nodes: ['task-001', 'task-002', 'task-003'],
|
|
26
|
+
* edges: [
|
|
27
|
+
* { from: 'task-001', to: 'task-002', type: 'depends_on' }
|
|
28
|
+
* ]
|
|
29
|
+
* }
|
|
30
|
+
* ```
|
|
31
|
+
*
|
|
32
|
+
* @module DependencyAnalyzer
|
|
33
|
+
* @version 1.0.0
|
|
34
|
+
* @since v1.29.0
|
|
35
|
+
*/
|
|
36
|
+
|
|
37
|
+
const FilterEngine = require('./filter-engine');
|
|
38
|
+
const path = require('path');
|
|
39
|
+
const fs = require('fs').promises;
|
|
40
|
+
|
|
41
|
+
class DependencyAnalyzer {
|
|
42
|
+
/**
|
|
43
|
+
* Create DependencyAnalyzer instance
|
|
44
|
+
*/
|
|
45
|
+
constructor() {
|
|
46
|
+
// No configuration needed
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
/**
|
|
50
|
+
* Analyze dependencies for an epic
|
|
51
|
+
*
|
|
52
|
+
* @param {string} epicId - Epic ID
|
|
53
|
+
* @param {Object} options - Options
|
|
54
|
+
* @param {string} options.basePath - Base path (default: '.claude')
|
|
55
|
+
* @returns {Promise<Object|null>} - Analysis object or null if epic not found
|
|
56
|
+
*
|
|
57
|
+
* @example
|
|
58
|
+
* const analysis = await analyzer.analyze('epic-001');
|
|
59
|
+
* // Returns:
|
|
60
|
+
* // {
|
|
61
|
+
* // graph: { nodes: [...], edges: [...] },
|
|
62
|
+
* // bottlenecks: [{ taskId: 'task-003', blocking: 5, impact: 'high' }],
|
|
63
|
+
* // criticalPath: ['task-001', 'task-003', 'task-008'],
|
|
64
|
+
* // parallelizable: [['task-002', 'task-004'], ['task-006', 'task-007']],
|
|
65
|
+
* // circularDependencies: []
|
|
66
|
+
* // }
|
|
67
|
+
*/
|
|
68
|
+
async analyze(epicId, options = {}) {
|
|
69
|
+
const basePath = options.basePath || '.claude';
|
|
70
|
+
const filterEngine = new FilterEngine({ basePath });
|
|
71
|
+
|
|
72
|
+
// Load tasks
|
|
73
|
+
const epicDir = path.join(basePath, 'epics', epicId);
|
|
74
|
+
|
|
75
|
+
const tasks = await filterEngine.loadFiles(epicDir);
|
|
76
|
+
|
|
77
|
+
// If no tasks loaded, epic directory doesn't exist
|
|
78
|
+
if (tasks.length === 0) {
|
|
79
|
+
return null;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// If no epic.md file exists, epic doesn't exist
|
|
83
|
+
const epicFile = tasks.find(t => t.path && t.path.endsWith('epic.md'));
|
|
84
|
+
if (!epicFile) {
|
|
85
|
+
return null;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
const taskFiles = tasks.filter(t => t.frontmatter.id && t.frontmatter.id !== epicId);
|
|
89
|
+
|
|
90
|
+
if (taskFiles.length === 0) {
|
|
91
|
+
return {
|
|
92
|
+
graph: { nodes: [], edges: [] },
|
|
93
|
+
bottlenecks: [],
|
|
94
|
+
criticalPath: [],
|
|
95
|
+
parallelizable: [],
|
|
96
|
+
circularDependencies: []
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
try {
|
|
101
|
+
|
|
102
|
+
// Build dependency graph
|
|
103
|
+
const graph = this._buildGraph(taskFiles);
|
|
104
|
+
|
|
105
|
+
// Find bottlenecks
|
|
106
|
+
const bottlenecks = this.findBottlenecks(graph);
|
|
107
|
+
|
|
108
|
+
// Find critical path
|
|
109
|
+
const criticalPath = this.findCriticalPath(graph);
|
|
110
|
+
|
|
111
|
+
// Find parallelizable tasks
|
|
112
|
+
const parallelizable = this.findParallelizable(graph);
|
|
113
|
+
|
|
114
|
+
// Detect circular dependencies
|
|
115
|
+
const circularDependencies = this._detectCircularDependencies(graph);
|
|
116
|
+
|
|
117
|
+
return {
|
|
118
|
+
graph,
|
|
119
|
+
bottlenecks,
|
|
120
|
+
criticalPath,
|
|
121
|
+
parallelizable,
|
|
122
|
+
circularDependencies
|
|
123
|
+
};
|
|
124
|
+
} catch (error) {
|
|
125
|
+
return null; // Epic not found
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Find bottlenecks in dependency graph
|
|
131
|
+
*
|
|
132
|
+
* @param {Object} graph - Dependency graph
|
|
133
|
+
* @returns {Array} - Bottleneck tasks
|
|
134
|
+
*
|
|
135
|
+
* @example
|
|
136
|
+
* const bottlenecks = analyzer.findBottlenecks(graph);
|
|
137
|
+
* // Returns: [
|
|
138
|
+
* // {
|
|
139
|
+
* // taskId: 'task-003',
|
|
140
|
+
* // blocking: 5,
|
|
141
|
+
* // impact: 'high',
|
|
142
|
+
* // reason: 'Blocks 5 other tasks'
|
|
143
|
+
* // }
|
|
144
|
+
* // ]
|
|
145
|
+
*/
|
|
146
|
+
findBottlenecks(graph) {
|
|
147
|
+
const bottlenecks = [];
|
|
148
|
+
const blockingCounts = {};
|
|
149
|
+
|
|
150
|
+
// Count how many tasks each node blocks
|
|
151
|
+
for (const edge of graph.edges) {
|
|
152
|
+
if (!blockingCounts[edge.from]) {
|
|
153
|
+
blockingCounts[edge.from] = 0;
|
|
154
|
+
}
|
|
155
|
+
blockingCounts[edge.from]++;
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
// Identify bottlenecks (tasks blocking 3+ others)
|
|
159
|
+
for (const [taskId, count] of Object.entries(blockingCounts)) {
|
|
160
|
+
if (count >= 3) {
|
|
161
|
+
let impact = 'low';
|
|
162
|
+
if (count >= 5) {
|
|
163
|
+
impact = 'high';
|
|
164
|
+
} else if (count >= 4) {
|
|
165
|
+
impact = 'medium';
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
bottlenecks.push({
|
|
169
|
+
taskId,
|
|
170
|
+
blocking: count,
|
|
171
|
+
impact,
|
|
172
|
+
reason: count === 1 ? 'Blocks 1 other task' : `Blocks ${count} other tasks`
|
|
173
|
+
});
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
// Sort by blocking count (descending)
|
|
178
|
+
bottlenecks.sort((a, b) => b.blocking - a.blocking);
|
|
179
|
+
|
|
180
|
+
return bottlenecks;
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
/**
|
|
184
|
+
* Find critical path through dependency graph
|
|
185
|
+
*
|
|
186
|
+
* @param {Object} graph - Dependency graph
|
|
187
|
+
* @returns {Array} - Task IDs in critical path
|
|
188
|
+
*
|
|
189
|
+
* @example
|
|
190
|
+
* const criticalPath = analyzer.findCriticalPath(graph);
|
|
191
|
+
* // Returns: ['task-001', 'task-003', 'task-008', 'task-015']
|
|
192
|
+
*/
|
|
193
|
+
findCriticalPath(graph) {
|
|
194
|
+
if (graph.nodes.length === 0) {
|
|
195
|
+
return [];
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
// Build adjacency list
|
|
199
|
+
const adjList = {};
|
|
200
|
+
const inDegree = {};
|
|
201
|
+
|
|
202
|
+
for (const node of graph.nodes) {
|
|
203
|
+
adjList[node] = [];
|
|
204
|
+
inDegree[node] = 0;
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
for (const edge of graph.edges) {
|
|
208
|
+
adjList[edge.from].push(edge.to);
|
|
209
|
+
inDegree[edge.to] = (inDegree[edge.to] || 0) + 1;
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
// Find longest path using dynamic programming
|
|
213
|
+
const longestPath = {};
|
|
214
|
+
const parent = {};
|
|
215
|
+
|
|
216
|
+
// Initialize
|
|
217
|
+
for (const node of graph.nodes) {
|
|
218
|
+
longestPath[node] = 0;
|
|
219
|
+
parent[node] = null;
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
// Topological sort with longest path calculation
|
|
223
|
+
const queue = [];
|
|
224
|
+
for (const node of graph.nodes) {
|
|
225
|
+
if (inDegree[node] === 0) {
|
|
226
|
+
queue.push(node);
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
const sorted = [];
|
|
231
|
+
while (queue.length > 0) {
|
|
232
|
+
const node = queue.shift();
|
|
233
|
+
sorted.push(node);
|
|
234
|
+
|
|
235
|
+
for (const neighbor of adjList[node]) {
|
|
236
|
+
// Update longest path
|
|
237
|
+
if (longestPath[node] + 1 > longestPath[neighbor]) {
|
|
238
|
+
longestPath[neighbor] = longestPath[node] + 1;
|
|
239
|
+
parent[neighbor] = node;
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
inDegree[neighbor]--;
|
|
243
|
+
if (inDegree[neighbor] === 0) {
|
|
244
|
+
queue.push(neighbor);
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
// Find node with longest path
|
|
250
|
+
let maxLength = 0;
|
|
251
|
+
let endNode = null;
|
|
252
|
+
|
|
253
|
+
for (const node of graph.nodes) {
|
|
254
|
+
if (longestPath[node] > maxLength) {
|
|
255
|
+
maxLength = longestPath[node];
|
|
256
|
+
endNode = node;
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
if (!endNode) {
|
|
261
|
+
// No dependencies, return first node
|
|
262
|
+
return graph.nodes.slice(0, 1);
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
// Reconstruct path
|
|
266
|
+
const path = [];
|
|
267
|
+
let current = endNode;
|
|
268
|
+
while (current !== null) {
|
|
269
|
+
path.unshift(current);
|
|
270
|
+
current = parent[current];
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
return path;
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
/**
|
|
277
|
+
* Find tasks that can be executed in parallel
|
|
278
|
+
*
|
|
279
|
+
* @param {Object} graph - Dependency graph
|
|
280
|
+
* @returns {Array<Array<string>>} - Groups of parallelizable tasks
|
|
281
|
+
*
|
|
282
|
+
* @example
|
|
283
|
+
* const parallelizable = analyzer.findParallelizable(graph);
|
|
284
|
+
* // Returns: [
|
|
285
|
+
* // ['task-002', 'task-004', 'task-005'], // Can work in parallel
|
|
286
|
+
* // ['task-006', 'task-007'] // Can work in parallel
|
|
287
|
+
* // ]
|
|
288
|
+
*/
|
|
289
|
+
findParallelizable(graph) {
|
|
290
|
+
if (graph.nodes.length === 0) {
|
|
291
|
+
return [];
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
// Build adjacency list
|
|
295
|
+
const adjList = {};
|
|
296
|
+
const inDegree = {};
|
|
297
|
+
|
|
298
|
+
for (const node of graph.nodes) {
|
|
299
|
+
adjList[node] = [];
|
|
300
|
+
inDegree[node] = 0;
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
for (const edge of graph.edges) {
|
|
304
|
+
adjList[edge.from].push(edge.to);
|
|
305
|
+
inDegree[edge.to] = (inDegree[edge.to] || 0) + 1;
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
// Group tasks by dependency level
|
|
309
|
+
const levels = [];
|
|
310
|
+
const visited = new Set();
|
|
311
|
+
const queue = [];
|
|
312
|
+
|
|
313
|
+
// Start with tasks that have no dependencies
|
|
314
|
+
for (const node of graph.nodes) {
|
|
315
|
+
if (inDegree[node] === 0) {
|
|
316
|
+
queue.push(node);
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
while (queue.length > 0 || visited.size < graph.nodes.length) {
|
|
321
|
+
// Current level = all tasks in queue
|
|
322
|
+
const currentLevel = [...queue];
|
|
323
|
+
queue.length = 0;
|
|
324
|
+
|
|
325
|
+
if (currentLevel.length > 1) {
|
|
326
|
+
// More than one task at this level = can be parallelized
|
|
327
|
+
levels.push(currentLevel);
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
// Mark as visited and add dependents
|
|
331
|
+
for (const node of currentLevel) {
|
|
332
|
+
visited.add(node);
|
|
333
|
+
|
|
334
|
+
for (const neighbor of adjList[node]) {
|
|
335
|
+
inDegree[neighbor]--;
|
|
336
|
+
if (inDegree[neighbor] === 0 && !visited.has(neighbor)) {
|
|
337
|
+
queue.push(neighbor);
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
// If queue is empty but we haven't visited all nodes,
|
|
343
|
+
// there might be a cycle or isolated nodes
|
|
344
|
+
if (queue.length === 0 && visited.size < graph.nodes.length) {
|
|
345
|
+
// Add unvisited nodes with lowest remaining in-degree
|
|
346
|
+
let minDegree = Infinity;
|
|
347
|
+
let nextNode = null;
|
|
348
|
+
|
|
349
|
+
for (const node of graph.nodes) {
|
|
350
|
+
if (!visited.has(node) && inDegree[node] < minDegree) {
|
|
351
|
+
minDegree = inDegree[node];
|
|
352
|
+
nextNode = node;
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
if (nextNode) {
|
|
357
|
+
queue.push(nextNode);
|
|
358
|
+
inDegree[nextNode] = 0;
|
|
359
|
+
} else {
|
|
360
|
+
break;
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
return levels;
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
// ============================================================================
|
|
369
|
+
// Private Helper Methods
|
|
370
|
+
// ============================================================================
|
|
371
|
+
|
|
372
|
+
_buildGraph(taskFiles) {
|
|
373
|
+
const nodes = [];
|
|
374
|
+
const edges = [];
|
|
375
|
+
|
|
376
|
+
// Create nodes
|
|
377
|
+
for (const task of taskFiles) {
|
|
378
|
+
nodes.push(task.frontmatter.id);
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
// Create edges from dependencies
|
|
382
|
+
for (const task of taskFiles) {
|
|
383
|
+
const taskId = task.frontmatter.id;
|
|
384
|
+
|
|
385
|
+
// From depends_on
|
|
386
|
+
if (task.frontmatter.depends_on && Array.isArray(task.frontmatter.depends_on)) {
|
|
387
|
+
for (const depId of task.frontmatter.depends_on) {
|
|
388
|
+
edges.push({
|
|
389
|
+
from: depId,
|
|
390
|
+
to: taskId,
|
|
391
|
+
type: 'depends_on'
|
|
392
|
+
});
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
// From blocks
|
|
397
|
+
if (task.frontmatter.blocks && Array.isArray(task.frontmatter.blocks)) {
|
|
398
|
+
for (const blockedId of task.frontmatter.blocks) {
|
|
399
|
+
edges.push({
|
|
400
|
+
from: taskId,
|
|
401
|
+
to: blockedId,
|
|
402
|
+
type: 'blocks'
|
|
403
|
+
});
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
return { nodes, edges };
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
_detectCircularDependencies(graph) {
|
|
412
|
+
const cycles = [];
|
|
413
|
+
const visited = new Set();
|
|
414
|
+
const recStack = new Set();
|
|
415
|
+
|
|
416
|
+
// Build adjacency list
|
|
417
|
+
const adjList = {};
|
|
418
|
+
for (const node of graph.nodes) {
|
|
419
|
+
adjList[node] = [];
|
|
420
|
+
}
|
|
421
|
+
for (const edge of graph.edges) {
|
|
422
|
+
adjList[edge.from].push(edge.to);
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
const dfs = (node, path) => {
|
|
426
|
+
visited.add(node);
|
|
427
|
+
recStack.add(node);
|
|
428
|
+
path.push(node);
|
|
429
|
+
|
|
430
|
+
for (const neighbor of adjList[node]) {
|
|
431
|
+
if (!visited.has(neighbor)) {
|
|
432
|
+
if (dfs(neighbor, path)) {
|
|
433
|
+
return true;
|
|
434
|
+
}
|
|
435
|
+
} else if (recStack.has(neighbor)) {
|
|
436
|
+
// Found cycle
|
|
437
|
+
const cycleStart = path.indexOf(neighbor);
|
|
438
|
+
const cycle = path.slice(cycleStart);
|
|
439
|
+
cycle.push(neighbor); // Close the cycle
|
|
440
|
+
|
|
441
|
+
cycles.push({
|
|
442
|
+
cycle,
|
|
443
|
+
length: cycle.length - 1
|
|
444
|
+
});
|
|
445
|
+
|
|
446
|
+
return true;
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
|
|
450
|
+
recStack.delete(node);
|
|
451
|
+
path.pop();
|
|
452
|
+
return false;
|
|
453
|
+
};
|
|
454
|
+
|
|
455
|
+
// Check each node
|
|
456
|
+
for (const node of graph.nodes) {
|
|
457
|
+
if (!visited.has(node)) {
|
|
458
|
+
dfs(node, []);
|
|
459
|
+
}
|
|
460
|
+
}
|
|
461
|
+
|
|
462
|
+
return cycles;
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
module.exports = DependencyAnalyzer;
|