codeswarm 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.codeswarm/skills/prd_template.md +98 -0
- package/AGENT_TIPS.md +206 -0
- package/BROWSER_TESTING.md +177 -0
- package/COORDINATOR.md +151 -0
- package/LICENSE +21 -0
- package/README.md +253 -0
- package/TASK_PROTOCOL.md +111 -0
- package/WORKFLOWS.md +174 -0
- package/bin/codeswarm.js +15 -0
- package/config.yaml +55 -0
- package/coordinator.sh +1762 -0
- package/dashboard/package-lock.json +1036 -0
- package/dashboard/package.json +14 -0
- package/dashboard/public/index.html +758 -0
- package/dashboard/server.js +444 -0
- package/docs/prd-example.md +90 -0
- package/docs/prd-template.md +45 -0
- package/orchestrate.sh +467 -0
- package/package.json +62 -0
- package/playwright.config.ts +19 -0
- package/setup.sh +142 -0
|
@@ -0,0 +1,444 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
const express = require('express');
|
|
3
|
+
const http = require('http');
|
|
4
|
+
const { WebSocketServer } = require('ws');
|
|
5
|
+
const chokidar = require('chokidar');
|
|
6
|
+
const fs = require('fs');
|
|
7
|
+
const path = require('path');
|
|
8
|
+
|
|
9
|
+
// ─── Parse args ───────────────────────────────────────
|
|
10
|
+
const args = process.argv.slice(2);
|
|
11
|
+
let projectDir = '';
|
|
12
|
+
let sessionId = '';
|
|
13
|
+
let port = 3777;
|
|
14
|
+
|
|
15
|
+
for (let i = 0; i < args.length; i++) {
|
|
16
|
+
if (args[i] === '--project' || args[i] === '-p') projectDir = args[++i];
|
|
17
|
+
else if (args[i] === '--session' || args[i] === '-s') sessionId = args[++i];
|
|
18
|
+
else if (args[i] === '--port') port = parseInt(args[++i], 10);
|
|
19
|
+
else if (args[i] === '--help' || args[i] === '-h') {
|
|
20
|
+
console.log(`
|
|
21
|
+
Agentic Dashboard — Real-time monitoring for multi-agent coordinator
|
|
22
|
+
|
|
23
|
+
Usage:
|
|
24
|
+
node server.js --project <path> [--session <id>] [--port 3777]
|
|
25
|
+
|
|
26
|
+
Options:
|
|
27
|
+
--project, -p Path to project with .codeswarm/ directory (required)
|
|
28
|
+
--session, -s Specific session ID (default: latest)
|
|
29
|
+
--port Server port (default: 3777)
|
|
30
|
+
`);
|
|
31
|
+
process.exit(0);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
if (!projectDir) {
|
|
36
|
+
console.error('ERROR: --project <path> is required');
|
|
37
|
+
process.exit(1);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
projectDir = path.resolve(projectDir);
|
|
41
|
+
const agenticDir = path.join(projectDir, '.codeswarm');
|
|
42
|
+
const sessionsDir = path.join(agenticDir, 'sessions');
|
|
43
|
+
|
|
44
|
+
if (!fs.existsSync(agenticDir)) {
|
|
45
|
+
console.error(`ERROR: ${agenticDir} does not exist`);
|
|
46
|
+
process.exit(1);
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// ─── Find session ─────────────────────────────────────
|
|
50
|
+
function findLatestSession() {
|
|
51
|
+
if (!fs.existsSync(sessionsDir)) return null;
|
|
52
|
+
const dirs = fs.readdirSync(sessionsDir)
|
|
53
|
+
.filter(d => d.startsWith('session_'))
|
|
54
|
+
.sort()
|
|
55
|
+
.reverse();
|
|
56
|
+
return dirs[0] || null;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
function getSessionDir() {
|
|
60
|
+
const sid = sessionId || findLatestSession();
|
|
61
|
+
if (!sid) return null;
|
|
62
|
+
return path.join(sessionsDir, sid);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// ─── Parse session data ───────────────────────────────
|
|
66
|
+
function stripAnsi(str) {
|
|
67
|
+
return str.replace(/\x1b\[[0-9;]*m/g, '');
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
function parseCoordinatorLog(sessionPath) {
|
|
71
|
+
const logFile = path.join(sessionPath, 'coordinator.log');
|
|
72
|
+
if (!fs.existsSync(logFile)) return [];
|
|
73
|
+
const raw = fs.readFileSync(logFile, 'utf-8');
|
|
74
|
+
const lines = raw.split('\n').filter(Boolean);
|
|
75
|
+
return lines.map(line => {
|
|
76
|
+
const clean = stripAnsi(line);
|
|
77
|
+
return { raw: clean, ts: extractTimestamp(clean) };
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
function extractTimestamp(line) {
|
|
82
|
+
const m = line.match(/^\[(\d{2}:\d{2}:\d{2})\]/);
|
|
83
|
+
return m ? m[1] : null;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
function parseTaskFile(sessionPath) {
|
|
87
|
+
const taskFile = path.join(projectDir, '.codeswarm', 'task.md');
|
|
88
|
+
if (!fs.existsSync(taskFile)) return { title: '', subtasks: [], isPrd: false };
|
|
89
|
+
|
|
90
|
+
const raw = fs.readFileSync(taskFile, 'utf-8');
|
|
91
|
+
const titleMatch = raw.match(/^# Task:\s*(.+)/m);
|
|
92
|
+
const title = titleMatch ? titleMatch[1].trim() : '';
|
|
93
|
+
|
|
94
|
+
// Detect if this is a PRD-derived task (has [US-XXX] tags)
|
|
95
|
+
const isPrd = /\[US-\d+\]/.test(raw);
|
|
96
|
+
|
|
97
|
+
const subtasks = [];
|
|
98
|
+
const lines = raw.split('\n');
|
|
99
|
+
let currentSubtask = null;
|
|
100
|
+
for (const line of lines) {
|
|
101
|
+
const m = line.match(/^(\d+):?\s*- \[(.)\]\s*(.+)/);
|
|
102
|
+
const m2 = !m ? line.match(/- \[(.)\]\s*(.+)/) : null;
|
|
103
|
+
if (m || m2) {
|
|
104
|
+
const checkbox = m ? m[2] : m2[1];
|
|
105
|
+
const text = m ? m[3].trim() : m2[2].trim();
|
|
106
|
+
const status = checkbox === 'x' ? 'done' : checkbox === '/' ? 'in_progress' : 'pending';
|
|
107
|
+
|
|
108
|
+
// Extract [US-XXX] tag if present
|
|
109
|
+
const usMatch = text.match(/^\*?\*?\[?(US-\d+)\]?/);
|
|
110
|
+
const storyId = usMatch ? usMatch[1] : null;
|
|
111
|
+
|
|
112
|
+
currentSubtask = { num: subtasks.length + 1, status, title: text, storyId, acceptanceCriteria: [] };
|
|
113
|
+
subtasks.push(currentSubtask);
|
|
114
|
+
} else if (currentSubtask && line.match(/^\s+- Acceptance Criteria:/)) {
|
|
115
|
+
// Mark next lines as AC
|
|
116
|
+
} else if (currentSubtask && line.match(/^\s{4,}- \[(.)\]/)) {
|
|
117
|
+
const acMatch = line.match(/^\s{4,}- \[(.)\]\s*(.+)/);
|
|
118
|
+
if (acMatch) {
|
|
119
|
+
currentSubtask.acceptanceCriteria.push({
|
|
120
|
+
status: acMatch[1] === 'x' ? 'pass' : acMatch[1] === '/' ? 'in_progress' : 'pending',
|
|
121
|
+
text: acMatch[2].trim()
|
|
122
|
+
});
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
return { title, subtasks, isPrd };
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
function getAgentFiles(sessionPath) {
|
|
130
|
+
if (!fs.existsSync(sessionPath)) return [];
|
|
131
|
+
const files = fs.readdirSync(sessionPath);
|
|
132
|
+
const agents = {};
|
|
133
|
+
|
|
134
|
+
for (const f of files) {
|
|
135
|
+
const logMatch = f.match(/^log_(\d+)_(\w+)\.md$/);
|
|
136
|
+
if (logMatch) {
|
|
137
|
+
const seq = parseInt(logMatch[1], 10);
|
|
138
|
+
const name = logMatch[2];
|
|
139
|
+
if (!agents[name]) agents[name] = { name, logs: [], prompts: [] };
|
|
140
|
+
const filePath = path.join(sessionPath, f);
|
|
141
|
+
const stat = fs.statSync(filePath);
|
|
142
|
+
agents[name].logs.push({
|
|
143
|
+
seq,
|
|
144
|
+
file: f,
|
|
145
|
+
size: stat.size,
|
|
146
|
+
mtime: stat.mtimeMs
|
|
147
|
+
});
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
const promptMatch = f.match(/^prompt_(\d+)_(\w+)\.md$/);
|
|
151
|
+
if (promptMatch) {
|
|
152
|
+
const seq = parseInt(promptMatch[1], 10);
|
|
153
|
+
const name = promptMatch[2];
|
|
154
|
+
if (!agents[name]) agents[name] = { name, logs: [], prompts: [] };
|
|
155
|
+
agents[name].prompts.push({ seq, file: f });
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
return Object.values(agents).map(a => ({
|
|
160
|
+
...a,
|
|
161
|
+
logs: a.logs.sort((x, y) => x.seq - y.seq),
|
|
162
|
+
prompts: a.prompts.sort((x, y) => x.seq - y.seq),
|
|
163
|
+
latestLog: a.logs.length > 0 ? a.logs[a.logs.length - 1] : null
|
|
164
|
+
}));
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
function getAgentLogTail(sessionPath, logFile, lines = 50) {
|
|
168
|
+
const filePath = path.join(sessionPath, logFile);
|
|
169
|
+
if (!fs.existsSync(filePath)) return '';
|
|
170
|
+
const raw = fs.readFileSync(filePath, 'utf-8');
|
|
171
|
+
const allLines = raw.split('\n');
|
|
172
|
+
return stripAnsi(allLines.slice(-lines).join('\n'));
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
function parseDirectives(sessionPath) {
|
|
176
|
+
if (!fs.existsSync(sessionPath)) return [];
|
|
177
|
+
const files = fs.readdirSync(sessionPath);
|
|
178
|
+
const directives = files
|
|
179
|
+
.filter(f => f.startsWith('directive_'))
|
|
180
|
+
.sort()
|
|
181
|
+
.map(f => {
|
|
182
|
+
const raw = fs.readFileSync(path.join(sessionPath, f), 'utf-8');
|
|
183
|
+
const clean = stripAnsi(raw);
|
|
184
|
+
const actionMatch = clean.match(/ACTION:\s*(\w+)/i);
|
|
185
|
+
const subtaskMatch = clean.match(/SUBTASK:\s*(\d+)/i);
|
|
186
|
+
const instructMatch = clean.match(/INSTRUCTIONS?:\s*(.+)/i);
|
|
187
|
+
return {
|
|
188
|
+
file: f,
|
|
189
|
+
action: actionMatch ? actionMatch[1] : 'UNKNOWN',
|
|
190
|
+
subtask: subtaskMatch ? parseInt(subtaskMatch[1], 10) : null,
|
|
191
|
+
instructions: instructMatch ? instructMatch[1].trim() : ''
|
|
192
|
+
};
|
|
193
|
+
});
|
|
194
|
+
return directives;
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
function detectAgentRoles(sessionPath) {
|
|
198
|
+
// Primary: read metadata.json written by coordinator
|
|
199
|
+
const metaFile = path.join(sessionPath, 'metadata.json');
|
|
200
|
+
if (fs.existsSync(metaFile)) {
|
|
201
|
+
try {
|
|
202
|
+
const meta = JSON.parse(fs.readFileSync(metaFile, 'utf-8'));
|
|
203
|
+
return {
|
|
204
|
+
planner: meta.planner || null,
|
|
205
|
+
executor: meta.executor || null,
|
|
206
|
+
reviewers: meta.reviewers || [],
|
|
207
|
+
frontendDev: meta.frontendDev || null,
|
|
208
|
+
frontendReviewers: meta.frontendReviewers || []
|
|
209
|
+
};
|
|
210
|
+
} catch (e) { /* fall through to log parsing */ }
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
// Fallback: parse coordinator log
|
|
214
|
+
const log = parseCoordinatorLog(sessionPath);
|
|
215
|
+
const roles = { planner: null, executor: null, reviewers: [], frontendDev: null, frontendReviewers: [] };
|
|
216
|
+
for (const entry of log) {
|
|
217
|
+
const line = entry.raw;
|
|
218
|
+
if (line.includes('Planner:')) {
|
|
219
|
+
const m = line.match(/Planner:\s*(\w+)/);
|
|
220
|
+
if (m) roles.planner = m[1];
|
|
221
|
+
}
|
|
222
|
+
if (line.includes('Executor:')) {
|
|
223
|
+
const m = line.match(/Executor:\s*(\w+)/);
|
|
224
|
+
if (m) roles.executor = m[1];
|
|
225
|
+
}
|
|
226
|
+
if (line.includes('Reviewers:') || line.includes('reviewers:')) {
|
|
227
|
+
const m = line.match(/Reviewers?:\s*(.+?)(?:\(|$)/i);
|
|
228
|
+
if (m) roles.reviewers = m[1].trim().split(/[\s,]+/).filter(Boolean);
|
|
229
|
+
}
|
|
230
|
+
if (line.includes('FE Dev:')) {
|
|
231
|
+
const m = line.match(/FE Dev:\s*(\w+)/);
|
|
232
|
+
if (m) roles.frontendDev = m[1];
|
|
233
|
+
}
|
|
234
|
+
if (line.includes('FE Review:')) {
|
|
235
|
+
const m = line.match(/FE Review:\s*(.+?)(?:\(|$)/i);
|
|
236
|
+
if (m) roles.frontendReviewers = m[1].trim().split(/[\s,]+/).filter(Boolean);
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
return roles;
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
function getSessionState() {
|
|
243
|
+
const sessionPath = getSessionDir();
|
|
244
|
+
if (!sessionPath || !fs.existsSync(sessionPath)) {
|
|
245
|
+
return { error: 'No session found', sessions: listSessions() };
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
const sid = path.basename(sessionPath);
|
|
249
|
+
const coordLog = parseCoordinatorLog(sessionPath);
|
|
250
|
+
const task = parseTaskFile(sessionPath);
|
|
251
|
+
const agents = getAgentFiles(sessionPath);
|
|
252
|
+
const directives = parseDirectives(path.join(sessionPath, 'directives'));
|
|
253
|
+
const roles = detectAgentRoles(sessionPath);
|
|
254
|
+
|
|
255
|
+
// Detect current round from coordinator log
|
|
256
|
+
let currentRound = 0;
|
|
257
|
+
for (const entry of coordLog) {
|
|
258
|
+
const m = entry.raw.match(/Round\s+(\d+)/i);
|
|
259
|
+
if (m) currentRound = Math.max(currentRound, parseInt(m[1], 10));
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
// Detect running agents (files modified in last 60s)
|
|
263
|
+
const now = Date.now();
|
|
264
|
+
const runningAgents = agents
|
|
265
|
+
.filter(a => a.latestLog && (now - a.latestLog.mtime) < 60000)
|
|
266
|
+
.map(a => a.name);
|
|
267
|
+
|
|
268
|
+
return {
|
|
269
|
+
sessionId: sid,
|
|
270
|
+
sessionPath,
|
|
271
|
+
project: projectDir,
|
|
272
|
+
roles,
|
|
273
|
+
currentRound,
|
|
274
|
+
task,
|
|
275
|
+
agents,
|
|
276
|
+
runningAgents,
|
|
277
|
+
directives,
|
|
278
|
+
coordLog: coordLog.slice(-100),
|
|
279
|
+
sessions: listSessions()
|
|
280
|
+
};
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
function listSessions() {
|
|
284
|
+
if (!fs.existsSync(sessionsDir)) return [];
|
|
285
|
+
return fs.readdirSync(sessionsDir)
|
|
286
|
+
.filter(d => d.startsWith('session_'))
|
|
287
|
+
.sort()
|
|
288
|
+
.reverse()
|
|
289
|
+
.slice(0, 20);
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
// ─── Express + WebSocket ──────────────────────────────
|
|
293
|
+
const app = express();
|
|
294
|
+
const server = http.createServer(app);
|
|
295
|
+
const wss = new WebSocketServer({ server });
|
|
296
|
+
|
|
297
|
+
app.use(express.static(path.join(__dirname, 'public')));
|
|
298
|
+
|
|
299
|
+
app.get('/api/state', (req, res) => {
|
|
300
|
+
res.json(getSessionState());
|
|
301
|
+
});
|
|
302
|
+
|
|
303
|
+
app.get('/api/sessions', (req, res) => {
|
|
304
|
+
res.json(listSessions());
|
|
305
|
+
});
|
|
306
|
+
|
|
307
|
+
app.get('/api/session/:sid', (req, res) => {
|
|
308
|
+
sessionId = req.params.sid;
|
|
309
|
+
res.json(getSessionState());
|
|
310
|
+
});
|
|
311
|
+
|
|
312
|
+
app.get('/api/log/:file', (req, res) => {
|
|
313
|
+
const sessionPath = getSessionDir();
|
|
314
|
+
if (!sessionPath) return res.status(404).json({ error: 'No session' });
|
|
315
|
+
const lines = parseInt(req.query.lines) || 200;
|
|
316
|
+
const tail = getAgentLogTail(sessionPath, req.params.file, lines);
|
|
317
|
+
res.json({ content: tail });
|
|
318
|
+
});
|
|
319
|
+
|
|
320
|
+
// Search within a log file
|
|
321
|
+
app.get('/api/log-search/:file', (req, res) => {
|
|
322
|
+
const sessionPath = getSessionDir();
|
|
323
|
+
if (!sessionPath) return res.status(404).json({ error: 'No session' });
|
|
324
|
+
const query = (req.query.q || '').toLowerCase();
|
|
325
|
+
if (!query) return res.json({ matches: [], total: 0 });
|
|
326
|
+
|
|
327
|
+
const filePath = path.join(sessionPath, req.params.file);
|
|
328
|
+
if (!fs.existsSync(filePath)) return res.status(404).json({ error: 'File not found' });
|
|
329
|
+
|
|
330
|
+
const raw = fs.readFileSync(filePath, 'utf-8');
|
|
331
|
+
const allLines = raw.split('\n');
|
|
332
|
+
const matches = [];
|
|
333
|
+
for (let i = 0; i < allLines.length; i++) {
|
|
334
|
+
const clean = stripAnsi(allLines[i]);
|
|
335
|
+
if (clean.toLowerCase().includes(query)) {
|
|
336
|
+
matches.push({ line: i + 1, content: clean });
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
res.json({ matches: matches.slice(0, 100), total: matches.length });
|
|
340
|
+
});
|
|
341
|
+
|
|
342
|
+
// Download raw log file
|
|
343
|
+
app.get('/api/log-download/:file', (req, res) => {
|
|
344
|
+
const sessionPath = getSessionDir();
|
|
345
|
+
if (!sessionPath) return res.status(404).json({ error: 'No session' });
|
|
346
|
+
const filePath = path.join(sessionPath, req.params.file);
|
|
347
|
+
if (!fs.existsSync(filePath)) return res.status(404).json({ error: 'File not found' });
|
|
348
|
+
|
|
349
|
+
// Strip ANSI codes for clean download
|
|
350
|
+
const raw = fs.readFileSync(filePath, 'utf-8');
|
|
351
|
+
const clean = stripAnsi(raw);
|
|
352
|
+
res.setHeader('Content-Disposition', `attachment; filename="${req.params.file}"`);
|
|
353
|
+
res.setHeader('Content-Type', 'text/plain');
|
|
354
|
+
res.send(clean);
|
|
355
|
+
});
|
|
356
|
+
|
|
357
|
+
// Detect agent phase from log content
|
|
358
|
+
app.get('/api/log-phases/:file', (req, res) => {
|
|
359
|
+
const sessionPath = getSessionDir();
|
|
360
|
+
if (!sessionPath) return res.status(404).json({ error: 'No session' });
|
|
361
|
+
const filePath = path.join(sessionPath, req.params.file);
|
|
362
|
+
if (!fs.existsSync(filePath)) return res.status(404).json({ error: 'File not found' });
|
|
363
|
+
|
|
364
|
+
const raw = fs.readFileSync(filePath, 'utf-8');
|
|
365
|
+
const clean = stripAnsi(raw);
|
|
366
|
+
const phases = detectPhases(clean);
|
|
367
|
+
res.json({ phases, currentPhase: phases.length > 0 ? phases[phases.length - 1].phase : null });
|
|
368
|
+
});
|
|
369
|
+
|
|
370
|
+
function detectPhases(logContent) {
|
|
371
|
+
const phases = [];
|
|
372
|
+
const lines = logContent.split('\n');
|
|
373
|
+
const phasePatterns = [
|
|
374
|
+
{ pattern: /reading|read file|view_file|cat |examining|analyzing/i, phase: 'Reading' },
|
|
375
|
+
{ pattern: /writing|write_to_file|creating file|edit |modifying|implement/i, phase: 'Implementing' },
|
|
376
|
+
{ pattern: /test|jest|mocha|pytest|mvn test|npm test|running tests/i, phase: 'Testing' },
|
|
377
|
+
{ pattern: /build|compile|mvn compile|npm run build|tsc/i, phase: 'Building' },
|
|
378
|
+
{ pattern: /git commit|git add|committing/i, phase: 'Committing' },
|
|
379
|
+
{ pattern: /review|checking|verif|validat/i, phase: 'Reviewing' },
|
|
380
|
+
];
|
|
381
|
+
|
|
382
|
+
for (let i = 0; i < lines.length; i++) {
|
|
383
|
+
for (const { pattern, phase } of phasePatterns) {
|
|
384
|
+
if (pattern.test(lines[i])) {
|
|
385
|
+
if (phases.length === 0 || phases[phases.length - 1].phase !== phase) {
|
|
386
|
+
phases.push({ phase, line: i + 1 });
|
|
387
|
+
}
|
|
388
|
+
break;
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
return phases;
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
// WebSocket broadcast
|
|
396
|
+
function broadcast(data) {
|
|
397
|
+
const msg = JSON.stringify(data);
|
|
398
|
+
wss.clients.forEach(client => {
|
|
399
|
+
if (client.readyState === 1) client.send(msg);
|
|
400
|
+
});
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
// File watcher
|
|
404
|
+
let watcher = null;
|
|
405
|
+
|
|
406
|
+
function startWatching() {
|
|
407
|
+
const sessionPath = getSessionDir();
|
|
408
|
+
if (!sessionPath) return;
|
|
409
|
+
|
|
410
|
+
if (watcher) watcher.close();
|
|
411
|
+
|
|
412
|
+
const watchPaths = [sessionPath, path.join(agenticDir, 'task.md')];
|
|
413
|
+
watcher = chokidar.watch(watchPaths, {
|
|
414
|
+
ignoreInitial: true,
|
|
415
|
+
awaitWriteFinish: { stabilityThreshold: 500, pollInterval: 200 }
|
|
416
|
+
});
|
|
417
|
+
|
|
418
|
+
watcher.on('all', (event, filePath) => {
|
|
419
|
+
broadcast({ type: 'update', event, file: path.basename(filePath), state: getSessionState() });
|
|
420
|
+
});
|
|
421
|
+
|
|
422
|
+
// Also watch for new sessions
|
|
423
|
+
chokidar.watch(sessionsDir, { depth: 0, ignoreInitial: true }).on('addDir', () => {
|
|
424
|
+
const latest = findLatestSession();
|
|
425
|
+
if (latest && latest !== sessionId) {
|
|
426
|
+
sessionId = latest;
|
|
427
|
+
startWatching();
|
|
428
|
+
broadcast({ type: 'new_session', sessionId: latest, state: getSessionState() });
|
|
429
|
+
}
|
|
430
|
+
});
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
wss.on('connection', (ws) => {
|
|
434
|
+
ws.send(JSON.stringify({ type: 'init', state: getSessionState() }));
|
|
435
|
+
});
|
|
436
|
+
|
|
437
|
+
// ─── Start ────────────────────────────────────────────
|
|
438
|
+
server.listen(port, () => {
|
|
439
|
+
console.log(`\n🎯 Agentic Dashboard running at http://localhost:${port}`);
|
|
440
|
+
console.log(`📁 Project: ${projectDir}`);
|
|
441
|
+
const sid = sessionId || findLatestSession();
|
|
442
|
+
console.log(`📋 Session: ${sid || '(waiting for session...)'}\n`);
|
|
443
|
+
startWatching();
|
|
444
|
+
});
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
# PRD: CMMS Work Order Management API
|
|
2
|
+
|
|
3
|
+
## Overview
|
|
4
|
+
Build a RESTful API for managing maintenance work orders in a CMMS (Computerized Maintenance Management System). The API handles work order lifecycle from creation through completion, with assignment, priority management, and status tracking.
|
|
5
|
+
|
|
6
|
+
## Tech Stack
|
|
7
|
+
Java 21, Spring Boot 3.x, PostgreSQL, Liquibase, Maven
|
|
8
|
+
|
|
9
|
+
## User Stories
|
|
10
|
+
|
|
11
|
+
### US-001: Database Schema & Entity [priority: 1]
|
|
12
|
+
**Description:** Create the database schema and JPA entity for work orders. This is the foundation for all other stories.
|
|
13
|
+
**Files:** `src/main/resources/db/changelog/001-create-work-orders.xml`, `src/main/java/com/app/domain/model/WorkOrder.java`, `src/main/java/com/app/domain/model/WorkOrderStatus.java`, `src/main/java/com/app/domain/model/WorkOrderPriority.java`
|
|
14
|
+
**Acceptance Criteria:**
|
|
15
|
+
- [ ] Liquibase migration creates `work_orders` table with columns: id, title, description, status, priority, assigned_to, created_by, due_date, completed_at, created_at, updated_at
|
|
16
|
+
- [ ] WorkOrder JPA entity maps correctly with proper annotations (@Entity, @Table, @Id, @Version)
|
|
17
|
+
- [ ] WorkOrderStatus enum: OPEN, IN_PROGRESS, ON_HOLD, COMPLETED, CANCELLED
|
|
18
|
+
- [ ] WorkOrderPriority enum: CRITICAL, HIGH, MEDIUM, LOW
|
|
19
|
+
- [ ] `mvn compile` passes with no errors
|
|
20
|
+
**Dependencies:** none
|
|
21
|
+
**Notes:** Follow hexagonal architecture. Use `@GeneratedValue(strategy = GenerationType.SEQUENCE)` with a dedicated sequence.
|
|
22
|
+
|
|
23
|
+
### US-002: Repository & Basic Service [priority: 2]
|
|
24
|
+
**Description:** Create the repository interface and service layer with basic CRUD operations.
|
|
25
|
+
**Files:** `src/main/java/com/app/infrastructure/adapter/out/persistence/WorkOrderRepository.java`, `src/main/java/com/app/domain/service/WorkOrderService.java`, `src/main/java/com/app/application/dto/WorkOrderDto.java`, `src/main/java/com/app/application/dto/CreateWorkOrderRequest.java`
|
|
26
|
+
**Acceptance Criteria:**
|
|
27
|
+
- [ ] JpaRepository with custom query: findByStatus, findByAssignedTo, findByPriority
|
|
28
|
+
- [ ] WorkOrderService implements: create, findById, findAll, update, delete
|
|
29
|
+
- [ ] CreateWorkOrderRequest DTO with Jakarta validation annotations (@NotBlank, @NotNull)
|
|
30
|
+
- [ ] WorkOrderDto as a Java record for responses
|
|
31
|
+
- [ ] Service uses constructor injection (no @Autowired on fields)
|
|
32
|
+
- [ ] `mvn compile` passes
|
|
33
|
+
**Dependencies:** US-001
|
|
34
|
+
**Notes:** Use MapStruct for entity-DTO mapping if the project already uses it, otherwise manual mapping.
|
|
35
|
+
|
|
36
|
+
### US-003: REST Controller [priority: 3]
|
|
37
|
+
**Description:** Create REST endpoints for work order CRUD operations with proper HTTP status codes and error handling.
|
|
38
|
+
**Files:** `src/main/java/com/app/infrastructure/adapter/in/web/WorkOrderController.java`, `src/main/java/com/app/shared/exception/ResourceNotFoundException.java`
|
|
39
|
+
**Acceptance Criteria:**
|
|
40
|
+
- [ ] POST /api/work-orders — creates work order, returns 201 with Location header
|
|
41
|
+
- [ ] GET /api/work-orders — lists all work orders with pagination (Pageable)
|
|
42
|
+
- [ ] GET /api/work-orders/{id} — returns single work order or 404
|
|
43
|
+
- [ ] PUT /api/work-orders/{id} — updates work order or 404
|
|
44
|
+
- [ ] DELETE /api/work-orders/{id} — soft-delete or 404
|
|
45
|
+
- [ ] @Valid on request bodies triggers 400 with field-level error messages
|
|
46
|
+
- [ ] Global exception handler returns consistent error JSON: { timestamp, status, message, path }
|
|
47
|
+
- [ ] `mvn compile` passes
|
|
48
|
+
**Dependencies:** US-002
|
|
49
|
+
**Notes:** Use @RestController with @RequestMapping("/api/work-orders"). Follow existing controller patterns in the project.
|
|
50
|
+
|
|
51
|
+
### US-004: Status Transitions & Business Rules [priority: 4]
|
|
52
|
+
**Description:** Implement work order status transition logic with validation. Not all transitions are valid (e.g., CANCELLED → IN_PROGRESS is not allowed).
|
|
53
|
+
**Files:** `src/main/java/com/app/domain/service/WorkOrderService.java`, `src/main/java/com/app/domain/model/WorkOrder.java`, `src/main/java/com/app/shared/exception/InvalidStatusTransitionException.java`
|
|
54
|
+
**Acceptance Criteria:**
|
|
55
|
+
- [ ] Valid transitions: OPEN→IN_PROGRESS, OPEN→CANCELLED, IN_PROGRESS→ON_HOLD, IN_PROGRESS→COMPLETED, IN_PROGRESS→CANCELLED, ON_HOLD→IN_PROGRESS, ON_HOLD→CANCELLED
|
|
56
|
+
- [ ] Invalid transitions throw InvalidStatusTransitionException (returns 409 Conflict)
|
|
57
|
+
- [ ] PATCH /api/work-orders/{id}/status endpoint accepts { "status": "IN_PROGRESS" }
|
|
58
|
+
- [ ] completed_at is automatically set when transitioning to COMPLETED
|
|
59
|
+
- [ ] assigned_to is required before transitioning to IN_PROGRESS
|
|
60
|
+
- [ ] `mvn compile` passes
|
|
61
|
+
**Dependencies:** US-003
|
|
62
|
+
**Notes:** Consider using a state machine pattern or a simple Map<Status, Set<Status>> for allowed transitions.
|
|
63
|
+
|
|
64
|
+
### US-005: Search & Filtering [priority: 5]
|
|
65
|
+
**Description:** Add search and filtering capabilities to the work orders list endpoint.
|
|
66
|
+
**Files:** `src/main/java/com/app/infrastructure/adapter/in/web/WorkOrderController.java`, `src/main/java/com/app/infrastructure/adapter/out/persistence/WorkOrderRepository.java`, `src/main/java/com/app/application/dto/WorkOrderFilterRequest.java`
|
|
67
|
+
**Acceptance Criteria:**
|
|
68
|
+
- [ ] GET /api/work-orders?status=OPEN&priority=HIGH — filter by status and/or priority
|
|
69
|
+
- [ ] GET /api/work-orders?search=pump — case-insensitive search in title and description
|
|
70
|
+
- [ ] GET /api/work-orders?assignedTo=user123 — filter by assignee
|
|
71
|
+
- [ ] GET /api/work-orders?dueBefore=2025-01-01 — filter by due date
|
|
72
|
+
- [ ] Filters are combinable (AND logic)
|
|
73
|
+
- [ ] Pagination still works with filters (page, size, sort parameters)
|
|
74
|
+
- [ ] `mvn compile` passes
|
|
75
|
+
**Dependencies:** US-003
|
|
76
|
+
**Notes:** Use Spring Data JPA Specifications or @Query with dynamic parameters. Do NOT use Criteria API directly.
|
|
77
|
+
|
|
78
|
+
---
|
|
79
|
+
|
|
80
|
+
## Non-Functional Requirements
|
|
81
|
+
- All endpoints require authentication (existing Spring Security config handles this)
|
|
82
|
+
- Use SLF4J Logger, not System.out.println
|
|
83
|
+
- Follow existing naming conventions in the project
|
|
84
|
+
- Database columns use snake_case, Java fields use camelCase
|
|
85
|
+
|
|
86
|
+
## Out of Scope
|
|
87
|
+
- File attachments on work orders
|
|
88
|
+
- Work order comments/notes
|
|
89
|
+
- Email notifications
|
|
90
|
+
- Audit log integration
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
# PRD: <Project Title>
|
|
2
|
+
|
|
3
|
+
## Overview
|
|
4
|
+
<2-3 sentence description of what this project/feature delivers and why>
|
|
5
|
+
|
|
6
|
+
## Tech Stack
|
|
7
|
+
<language, framework, database, etc.>
|
|
8
|
+
|
|
9
|
+
## User Stories
|
|
10
|
+
|
|
11
|
+
### US-001: <Title> [priority: 1]
|
|
12
|
+
**Description:** <what needs to be done and why>
|
|
13
|
+
**Files:** `path/to/file1.java`, `path/to/file2.java`
|
|
14
|
+
**Acceptance Criteria:**
|
|
15
|
+
- [ ] <specific, testable criterion>
|
|
16
|
+
- [ ] <specific, testable criterion>
|
|
17
|
+
- [ ] <specific, testable criterion>
|
|
18
|
+
**Dependencies:** none
|
|
19
|
+
**Notes:** <optional context, patterns to follow, gotchas>
|
|
20
|
+
|
|
21
|
+
### US-002: <Title> [priority: 2]
|
|
22
|
+
**Description:** <what needs to be done and why>
|
|
23
|
+
**Files:** `path/to/file1.java`
|
|
24
|
+
**Acceptance Criteria:**
|
|
25
|
+
- [ ] <specific, testable criterion>
|
|
26
|
+
- [ ] <specific, testable criterion>
|
|
27
|
+
**Dependencies:** US-001
|
|
28
|
+
**Notes:** <optional>
|
|
29
|
+
|
|
30
|
+
### US-003: <Title> [priority: 3]
|
|
31
|
+
**Description:** <what needs to be done and why>
|
|
32
|
+
**Files:** `path/to/new-file.java`
|
|
33
|
+
**Acceptance Criteria:**
|
|
34
|
+
- [ ] <specific, testable criterion>
|
|
35
|
+
- [ ] <specific, testable criterion>
|
|
36
|
+
**Dependencies:** US-001, US-002
|
|
37
|
+
**Notes:** <optional>
|
|
38
|
+
|
|
39
|
+
---
|
|
40
|
+
|
|
41
|
+
## Non-Functional Requirements
|
|
42
|
+
- <performance, security, logging, etc.>
|
|
43
|
+
|
|
44
|
+
## Out of Scope
|
|
45
|
+
- <what is explicitly NOT included>
|