@mmmbuto/nexuscli 0.7.5 → 0.7.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +12 -4
- package/bin/nexuscli.js +6 -6
- package/frontend/dist/assets/{index-CikJbUR5.js → index-BAY_sRAu.js} +1704 -1704
- package/frontend/dist/assets/{index-Bn_l1e6e.css → index-CHOlrfA0.css} +1 -1
- package/frontend/dist/index.html +2 -2
- package/lib/server/.env.example +1 -1
- package/lib/server/db.js.old +225 -0
- package/lib/server/docs/API_WRAPPER_CONTRACT.md +682 -0
- package/lib/server/docs/ARCHITECTURE.md +441 -0
- package/lib/server/docs/DATABASE_SCHEMA.md +783 -0
- package/lib/server/docs/DESIGN_PRINCIPLES.md +598 -0
- package/lib/server/docs/NEXUSCHAT_ANALYSIS.md +488 -0
- package/lib/server/docs/PIPELINE_INTEGRATION.md +636 -0
- package/lib/server/docs/README.md +272 -0
- package/lib/server/docs/UI_DESIGN.md +916 -0
- package/lib/server/lib/pty-adapter.js +15 -1
- package/lib/server/routes/chat.js +70 -8
- package/lib/server/routes/codex.js +61 -7
- package/lib/server/routes/gemini.js +66 -12
- package/lib/server/routes/sessions.js +7 -2
- package/lib/server/server.js +2 -0
- package/lib/server/services/base-cli-wrapper.js +137 -0
- package/lib/server/services/claude-wrapper.js +11 -1
- package/lib/server/services/cli-loader.js.backup +446 -0
- package/lib/server/services/codex-output-parser.js +8 -0
- package/lib/server/services/codex-wrapper.js +13 -4
- package/lib/server/services/context-bridge.js +24 -20
- package/lib/server/services/gemini-wrapper.js +26 -8
- package/lib/server/services/session-manager.js +20 -0
- package/lib/server/services/workspace-manager.js +1 -1
- package/lib/server/tests/performance.test.js +1 -1
- package/lib/server/tests/services.test.js +2 -2
- package/package.json +1 -1
|
@@ -0,0 +1,636 @@
|
|
|
1
|
+
# NexusCLI Pipeline Integration
|
|
2
|
+
|
|
3
|
+
**Version**: 0.1.0
|
|
4
|
+
**Created**: 2025-11-17
|
|
5
|
+
**Based on**: NexusChat development workflow (CLAUDE.md)
|
|
6
|
+
|
|
7
|
+
---
|
|
8
|
+
|
|
9
|
+
## 🔄 Pipeline Overview
|
|
10
|
+
|
|
11
|
+
NexusCLI follows a **5-phase pipeline** for distributed CLI orchestration, adapted from the NexusChat development workflow:
|
|
12
|
+
|
|
13
|
+
```
|
|
14
|
+
Analysis → Coordination → Execution → Test → Deploy
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
Each phase has specific responsibilities and integrates with the Control Plane and Node Wrappers.
|
|
18
|
+
|
|
19
|
+
---
|
|
20
|
+
|
|
21
|
+
## 📋 Phase Breakdown
|
|
22
|
+
|
|
23
|
+
### Phase 1: Analysis
|
|
24
|
+
|
|
25
|
+
**Purpose**: Understand what needs to be done, where, and validate feasibility.
|
|
26
|
+
|
|
27
|
+
**Control Plane Responsibilities**:
|
|
28
|
+
|
|
29
|
+
1. **Request Validation**
|
|
30
|
+
- Parse job specification (tool, command, target nodes)
|
|
31
|
+
- Validate command syntax
|
|
32
|
+
- Check tool availability on target nodes
|
|
33
|
+
- Verify user permissions
|
|
34
|
+
|
|
35
|
+
2. **Resource Assessment**
|
|
36
|
+
- Query Node Registry for available nodes
|
|
37
|
+
- Check node capacity (active jobs vs max concurrent)
|
|
38
|
+
- Estimate execution time and resource requirements
|
|
39
|
+
|
|
40
|
+
3. **Risk Analysis**
|
|
41
|
+
- Identify destructive commands (rm, mkfs, dd, etc.)
|
|
42
|
+
- Flag production nodes for approval workflows
|
|
43
|
+
- Check command whitelists/blacklists
|
|
44
|
+
|
|
45
|
+
**API Endpoints**:
|
|
46
|
+
```
|
|
47
|
+
POST /api/v1/jobs/validate
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
**Example**:
|
|
51
|
+
```json
|
|
52
|
+
// Request
|
|
53
|
+
{
|
|
54
|
+
"tool": "bash",
|
|
55
|
+
"command": "rm -rf /var/www/*",
|
|
56
|
+
"nodeIds": ["prod-001"]
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Response
|
|
60
|
+
{
|
|
61
|
+
"valid": false,
|
|
62
|
+
"warnings": [
|
|
63
|
+
{
|
|
64
|
+
"type": "destructive_command",
|
|
65
|
+
"message": "Command contains 'rm -rf' - requires approval",
|
|
66
|
+
"severity": "critical"
|
|
67
|
+
}
|
|
68
|
+
],
|
|
69
|
+
"suggestions": [
|
|
70
|
+
"Use 'rm -rf /var/www/tmp/*' for safer scoped deletion",
|
|
71
|
+
"Add --dry-run flag to test first"
|
|
72
|
+
]
|
|
73
|
+
}
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
---
|
|
77
|
+
|
|
78
|
+
### Phase 2: Coordination
|
|
79
|
+
|
|
80
|
+
**Purpose**: Plan execution strategy, select nodes, queue jobs.
|
|
81
|
+
|
|
82
|
+
**Control Plane Responsibilities**:
|
|
83
|
+
|
|
84
|
+
1. **Node Selection**
|
|
85
|
+
- Choose optimal node(s) based on:
|
|
86
|
+
- Current load
|
|
87
|
+
- Tool availability
|
|
88
|
+
- Geographic location (latency)
|
|
89
|
+
- Cost (if WellaNet integration)
|
|
90
|
+
|
|
91
|
+
2. **Job Scheduling**
|
|
92
|
+
- Queue jobs if nodes are busy
|
|
93
|
+
- Prioritize based on user roles
|
|
94
|
+
- Schedule batch jobs with dependencies
|
|
95
|
+
|
|
96
|
+
3. **Session Initialization**
|
|
97
|
+
- Create job records in storage
|
|
98
|
+
- Generate unique jobIds
|
|
99
|
+
- Set up SSE stream endpoints
|
|
100
|
+
|
|
101
|
+
**Algorithm** (Node Selection):
|
|
102
|
+
```javascript
|
|
103
|
+
function selectNode(jobSpec, availableNodes) {
|
|
104
|
+
// 1. Filter by tool availability
|
|
105
|
+
const capableNodes = availableNodes.filter(node =>
|
|
106
|
+
node.capabilities.tools.includes(jobSpec.tool)
|
|
107
|
+
);
|
|
108
|
+
|
|
109
|
+
// 2. Filter by load threshold
|
|
110
|
+
const readyNodes = capableNodes.filter(node =>
|
|
111
|
+
node.activeJobs < node.capabilities.maxConcurrentJobs
|
|
112
|
+
);
|
|
113
|
+
|
|
114
|
+
// 3. Sort by load (ascending)
|
|
115
|
+
readyNodes.sort((a, b) => a.activeJobs - b.activeJobs);
|
|
116
|
+
|
|
117
|
+
// 4. Return least loaded node
|
|
118
|
+
return readyNodes[0] || null;
|
|
119
|
+
}
|
|
120
|
+
```
|
|
121
|
+
|
|
122
|
+
**API Endpoints**:
|
|
123
|
+
```
|
|
124
|
+
POST /api/v1/jobs (creates and coordinates job)
|
|
125
|
+
GET /api/v1/nodes/optimal?tool=bash (suggest best node)
|
|
126
|
+
```
|
|
127
|
+
|
|
128
|
+
---
|
|
129
|
+
|
|
130
|
+
### Phase 3: Execution
|
|
131
|
+
|
|
132
|
+
**Purpose**: Execute command on remote node(s), stream real-time output.
|
|
133
|
+
|
|
134
|
+
**Workflow**:
|
|
135
|
+
|
|
136
|
+
```
|
|
137
|
+
Control Plane Wrapper (Node)
|
|
138
|
+
│ │
|
|
139
|
+
│ 1. POST /jobs │
|
|
140
|
+
├──────────────────────────────▶│
|
|
141
|
+
│ {jobId, tool, command} │
|
|
142
|
+
│ │
|
|
143
|
+
│ 2. Accept job │
|
|
144
|
+
│◀──────────────────────────────┤
|
|
145
|
+
│ {status: "accepted"} │
|
|
146
|
+
│ │
|
|
147
|
+
│ │ 3. Spawn PTY
|
|
148
|
+
│ │ pty.spawn(bash, ['-c', cmd])
|
|
149
|
+
│ │
|
|
150
|
+
│ 4. GET /jobs/:id/stream (SSE) │
|
|
151
|
+
├──────────────────────────────▶│
|
|
152
|
+
│ │
|
|
153
|
+
│ 5. Stream events │
|
|
154
|
+
│◀──────────────────────────────┤
|
|
155
|
+
│ data: {type:"status",...} │
|
|
156
|
+
│ data: {type:"output_chunk"} │
|
|
157
|
+
│ ... │
|
|
158
|
+
│ data: {type:"done"} │
|
|
159
|
+
│ │
|
|
160
|
+
│ 6. Close stream │
|
|
161
|
+
│◀──────────────────────────────┤
|
|
162
|
+
```
|
|
163
|
+
|
|
164
|
+
**Wrapper Execution Flow** (based on NexusChat claude-wrapper.js):
|
|
165
|
+
|
|
166
|
+
```javascript
|
|
167
|
+
async function executeJob({ jobId, command, workingDir, timeout }) {
|
|
168
|
+
// 1. Spawn PTY process
|
|
169
|
+
const ptyProcess = pty.spawn('/bin/bash', ['-c', command], {
|
|
170
|
+
cwd: workingDir,
|
|
171
|
+
env: process.env,
|
|
172
|
+
cols: 80,
|
|
173
|
+
rows: 30,
|
|
174
|
+
});
|
|
175
|
+
|
|
176
|
+
// 2. Attach output parser
|
|
177
|
+
const parser = new OutputParser();
|
|
178
|
+
let stdout = '';
|
|
179
|
+
let stderr = '';
|
|
180
|
+
|
|
181
|
+
ptyProcess.onData((data) => {
|
|
182
|
+
stdout += data;
|
|
183
|
+
|
|
184
|
+
// Parse and emit events
|
|
185
|
+
const events = parser.parse(data);
|
|
186
|
+
events.forEach(event => {
|
|
187
|
+
emitSSE(jobId, event); // Send to Control Plane
|
|
188
|
+
});
|
|
189
|
+
});
|
|
190
|
+
|
|
191
|
+
// 3. Handle timeout
|
|
192
|
+
const timer = setTimeout(() => {
|
|
193
|
+
ptyProcess.kill('SIGTERM');
|
|
194
|
+
emitSSE(jobId, {
|
|
195
|
+
type: 'error',
|
|
196
|
+
error: 'Job timeout exceeded',
|
|
197
|
+
timeout: timeout,
|
|
198
|
+
});
|
|
199
|
+
}, timeout);
|
|
200
|
+
|
|
201
|
+
// 4. Wait for completion
|
|
202
|
+
ptyProcess.onExit(({ exitCode }) => {
|
|
203
|
+
clearTimeout(timer);
|
|
204
|
+
|
|
205
|
+
// Save result
|
|
206
|
+
saveJobResult(jobId, {
|
|
207
|
+
exitCode,
|
|
208
|
+
stdout: cleanAnsi(stdout),
|
|
209
|
+
stderr: cleanAnsi(stderr),
|
|
210
|
+
duration: Date.now() - startTime,
|
|
211
|
+
});
|
|
212
|
+
|
|
213
|
+
// Emit done event
|
|
214
|
+
emitSSE(jobId, {
|
|
215
|
+
type: 'done',
|
|
216
|
+
exitCode,
|
|
217
|
+
});
|
|
218
|
+
});
|
|
219
|
+
}
|
|
220
|
+
```
|
|
221
|
+
|
|
222
|
+
**Output Parser** (adapted from NexusChat output-parser.js):
|
|
223
|
+
|
|
224
|
+
```javascript
|
|
225
|
+
class OutputParser {
|
|
226
|
+
constructor() {
|
|
227
|
+
this.state = 'idle';
|
|
228
|
+
this.buffer = '';
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
parse(chunk) {
|
|
232
|
+
const events = [];
|
|
233
|
+
this.buffer += chunk;
|
|
234
|
+
|
|
235
|
+
// Parse line by line
|
|
236
|
+
const lines = this.buffer.split('\n');
|
|
237
|
+
this.buffer = lines.pop(); // Keep incomplete line
|
|
238
|
+
|
|
239
|
+
for (const line of lines) {
|
|
240
|
+
// Detect tool execution
|
|
241
|
+
if (/^Running (bash|git|docker):/.test(line)) {
|
|
242
|
+
events.push({
|
|
243
|
+
type: 'status',
|
|
244
|
+
category: 'tool',
|
|
245
|
+
message: line,
|
|
246
|
+
icon: '🔧',
|
|
247
|
+
});
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
// Detect errors
|
|
251
|
+
if (/^Error:|FATAL:/.test(line)) {
|
|
252
|
+
events.push({
|
|
253
|
+
type: 'status',
|
|
254
|
+
category: 'warning',
|
|
255
|
+
message: line,
|
|
256
|
+
icon: '⚠️',
|
|
257
|
+
});
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
// Stream output
|
|
261
|
+
events.push({
|
|
262
|
+
type: 'output_chunk',
|
|
263
|
+
stream: 'stdout',
|
|
264
|
+
text: line + '\n',
|
|
265
|
+
});
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
return events;
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
```
|
|
272
|
+
|
|
273
|
+
---
|
|
274
|
+
|
|
275
|
+
### Phase 4: Test
|
|
276
|
+
|
|
277
|
+
**Purpose**: Validate execution results, assert expected outcomes.
|
|
278
|
+
|
|
279
|
+
**Validation Types**:
|
|
280
|
+
|
|
281
|
+
1. **Exit Code Validation**
|
|
282
|
+
```javascript
|
|
283
|
+
if (job.result.exitCode !== 0) {
|
|
284
|
+
throw new Error(`Job failed with exit code ${job.result.exitCode}`);
|
|
285
|
+
}
|
|
286
|
+
```
|
|
287
|
+
|
|
288
|
+
2. **Output Validation** (regex patterns)
|
|
289
|
+
```javascript
|
|
290
|
+
const expectedPattern = /Successfully deployed to production/;
|
|
291
|
+
if (!expectedPattern.test(job.result.stdout)) {
|
|
292
|
+
throw new Error('Expected success message not found in output');
|
|
293
|
+
}
|
|
294
|
+
```
|
|
295
|
+
|
|
296
|
+
3. **Side-Effect Validation** (chained jobs)
|
|
297
|
+
```javascript
|
|
298
|
+
// Example: After "deploy to prod", verify service is running
|
|
299
|
+
const verifyJob = await JobManager.create({
|
|
300
|
+
nodeId: 'prod-001',
|
|
301
|
+
tool: 'bash',
|
|
302
|
+
command: 'systemctl status myapp',
|
|
303
|
+
parent: job.jobId, // Link to parent job
|
|
304
|
+
});
|
|
305
|
+
|
|
306
|
+
if (verifyJob.result.exitCode !== 0) {
|
|
307
|
+
// Rollback parent job
|
|
308
|
+
await JobManager.rollback(job.jobId);
|
|
309
|
+
}
|
|
310
|
+
```
|
|
311
|
+
|
|
312
|
+
**Test Configuration** (per job):
|
|
313
|
+
```json
|
|
314
|
+
{
|
|
315
|
+
"jobId": "job-123",
|
|
316
|
+
"tool": "bash",
|
|
317
|
+
"command": "npm run build",
|
|
318
|
+
"tests": [
|
|
319
|
+
{
|
|
320
|
+
"type": "exit_code",
|
|
321
|
+
"expected": 0
|
|
322
|
+
},
|
|
323
|
+
{
|
|
324
|
+
"type": "output_contains",
|
|
325
|
+
"pattern": "Build successful",
|
|
326
|
+
"stream": "stdout"
|
|
327
|
+
},
|
|
328
|
+
{
|
|
329
|
+
"type": "file_exists",
|
|
330
|
+
"path": "/var/www/dist/index.html"
|
|
331
|
+
}
|
|
332
|
+
]
|
|
333
|
+
}
|
|
334
|
+
```
|
|
335
|
+
|
|
336
|
+
**API Endpoints**:
|
|
337
|
+
```
|
|
338
|
+
POST /api/v1/jobs/:id/test (run tests on completed job)
|
|
339
|
+
GET /api/v1/jobs/:id/test-results
|
|
340
|
+
```
|
|
341
|
+
|
|
342
|
+
---
|
|
343
|
+
|
|
344
|
+
### Phase 5: Deploy
|
|
345
|
+
|
|
346
|
+
**Purpose**: Execute follow-up actions, trigger dependent jobs, update state.
|
|
347
|
+
|
|
348
|
+
**Deployment Patterns**:
|
|
349
|
+
|
|
350
|
+
1. **Sequential Chain**
|
|
351
|
+
```javascript
|
|
352
|
+
// Build → Test → Deploy → Verify
|
|
353
|
+
const pipeline = [
|
|
354
|
+
{ tool: 'bash', command: 'npm run build' },
|
|
355
|
+
{ tool: 'bash', command: 'npm test' },
|
|
356
|
+
{ tool: 'bash', command: 'rsync -av dist/ /var/www/prod/' },
|
|
357
|
+
{ tool: 'bash', command: 'systemctl restart nginx' },
|
|
358
|
+
];
|
|
359
|
+
|
|
360
|
+
for (const step of pipeline) {
|
|
361
|
+
const job = await JobManager.create(step);
|
|
362
|
+
await JobManager.waitForCompletion(job.jobId);
|
|
363
|
+
|
|
364
|
+
if (job.result.exitCode !== 0) {
|
|
365
|
+
// Rollback previous steps
|
|
366
|
+
await rollbackPipeline(pipeline.slice(0, pipeline.indexOf(step)));
|
|
367
|
+
break;
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
```
|
|
371
|
+
|
|
372
|
+
2. **Parallel Fanout**
|
|
373
|
+
```javascript
|
|
374
|
+
// Deploy to multiple nodes simultaneously
|
|
375
|
+
const deployJobs = await Promise.all([
|
|
376
|
+
JobManager.create({ nodeId: 'web-001', command: 'deploy.sh' }),
|
|
377
|
+
JobManager.create({ nodeId: 'web-002', command: 'deploy.sh' }),
|
|
378
|
+
JobManager.create({ nodeId: 'web-003', command: 'deploy.sh' }),
|
|
379
|
+
]);
|
|
380
|
+
|
|
381
|
+
// Wait for all to complete
|
|
382
|
+
await Promise.all(deployJobs.map(j => JobManager.waitForCompletion(j.jobId)));
|
|
383
|
+
```
|
|
384
|
+
|
|
385
|
+
3. **Conditional Branching**
|
|
386
|
+
```javascript
|
|
387
|
+
const buildJob = await JobManager.create({ command: 'npm run build' });
|
|
388
|
+
await JobManager.waitForCompletion(buildJob.jobId);
|
|
389
|
+
|
|
390
|
+
if (buildJob.result.exitCode === 0) {
|
|
391
|
+
// Success: deploy to production
|
|
392
|
+
await JobManager.create({
|
|
393
|
+
nodeId: 'prod-001',
|
|
394
|
+
command: 'deploy-prod.sh',
|
|
395
|
+
parent: buildJob.jobId,
|
|
396
|
+
});
|
|
397
|
+
} else {
|
|
398
|
+
// Failure: notify team
|
|
399
|
+
await JobManager.create({
|
|
400
|
+
tool: 'bash',
|
|
401
|
+
command: 'curl -X POST slack-webhook -d "Build failed"',
|
|
402
|
+
});
|
|
403
|
+
}
|
|
404
|
+
```
|
|
405
|
+
|
|
406
|
+
**API Endpoints**:
|
|
407
|
+
```
|
|
408
|
+
POST /api/v1/pipelines (create multi-job pipeline)
|
|
409
|
+
GET /api/v1/pipelines/:id (get pipeline status)
|
|
410
|
+
POST /api/v1/jobs/:id/rollback (rollback deployed job)
|
|
411
|
+
```
|
|
412
|
+
|
|
413
|
+
---
|
|
414
|
+
|
|
415
|
+
## 🔗 Pipeline Example: Full Deployment Workflow
|
|
416
|
+
|
|
417
|
+
### Scenario: Deploy web application to production
|
|
418
|
+
|
|
419
|
+
```javascript
|
|
420
|
+
// Phase 1: ANALYSIS
|
|
421
|
+
const analysisResult = await fetch('/api/v1/jobs/validate', {
|
|
422
|
+
method: 'POST',
|
|
423
|
+
body: JSON.stringify({
|
|
424
|
+
tool: 'bash',
|
|
425
|
+
command: 'cd /var/www/myapp && git pull && npm run build',
|
|
426
|
+
nodeId: 'prod-web-001',
|
|
427
|
+
}),
|
|
428
|
+
});
|
|
429
|
+
// → { valid: true, estimatedDuration: 120000, warnings: [] }
|
|
430
|
+
|
|
431
|
+
// Phase 2: COORDINATION
|
|
432
|
+
const buildJob = await fetch('/api/v1/jobs', {
|
|
433
|
+
method: 'POST',
|
|
434
|
+
body: JSON.stringify({
|
|
435
|
+
nodeId: 'build-server',
|
|
436
|
+
tool: 'bash',
|
|
437
|
+
command: 'cd /var/www/myapp && npm run build',
|
|
438
|
+
metadata: { pipeline: 'production-deploy' },
|
|
439
|
+
}),
|
|
440
|
+
});
|
|
441
|
+
// → { jobId: "job-001", streamEndpoint: "/jobs/job-001/stream" }
|
|
442
|
+
|
|
443
|
+
// Phase 3: EXECUTION (real-time streaming)
|
|
444
|
+
const eventSource = new EventSource('/api/v1/jobs/job-001/stream');
|
|
445
|
+
eventSource.onmessage = (event) => {
|
|
446
|
+
const data = JSON.parse(event.data);
|
|
447
|
+
|
|
448
|
+
if (data.type === 'status') {
|
|
449
|
+
console.log(`[${data.category}] ${data.message}`);
|
|
450
|
+
// → [tool] Bash: npm run build
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
if (data.type === 'output_chunk') {
|
|
454
|
+
console.log(data.text);
|
|
455
|
+
// → > webpack compiled successfully
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
if (data.type === 'done') {
|
|
459
|
+
console.log('✅ Build completed');
|
|
460
|
+
eventSource.close();
|
|
461
|
+
}
|
|
462
|
+
};
|
|
463
|
+
|
|
464
|
+
// Phase 4: TEST
|
|
465
|
+
const testResult = await fetch('/api/v1/jobs/job-001/test', {
|
|
466
|
+
method: 'POST',
|
|
467
|
+
body: JSON.stringify({
|
|
468
|
+
tests: [
|
|
469
|
+
{ type: 'exit_code', expected: 0 },
|
|
470
|
+
{ type: 'file_exists', path: '/var/www/myapp/dist/index.html' },
|
|
471
|
+
],
|
|
472
|
+
}),
|
|
473
|
+
});
|
|
474
|
+
// → { passed: true, results: [...] }
|
|
475
|
+
|
|
476
|
+
// Phase 5: DEPLOY
|
|
477
|
+
if (testResult.passed) {
|
|
478
|
+
const deployJob = await fetch('/api/v1/jobs', {
|
|
479
|
+
method: 'POST',
|
|
480
|
+
body: JSON.stringify({
|
|
481
|
+
nodeId: 'prod-web-001',
|
|
482
|
+
tool: 'bash',
|
|
483
|
+
command: 'rsync -av /var/www/myapp/dist/ /var/www/production/',
|
|
484
|
+
parent: 'job-001', // Link to build job
|
|
485
|
+
}),
|
|
486
|
+
});
|
|
487
|
+
|
|
488
|
+
// Verify deployment
|
|
489
|
+
const verifyJob = await fetch('/api/v1/jobs', {
|
|
490
|
+
method: 'POST',
|
|
491
|
+
body: JSON.stringify({
|
|
492
|
+
nodeId: 'prod-web-001',
|
|
493
|
+
tool: 'bash',
|
|
494
|
+
command: 'systemctl status nginx && curl -I http://localhost',
|
|
495
|
+
parent: deployJob.jobId,
|
|
496
|
+
}),
|
|
497
|
+
});
|
|
498
|
+
}
|
|
499
|
+
```
|
|
500
|
+
|
|
501
|
+
---
|
|
502
|
+
|
|
503
|
+
## 🔄 Relationship to NexusChat Workflow
|
|
504
|
+
|
|
505
|
+
**NexusChat CLAUDE.md Pipeline** (adapted):
|
|
506
|
+
|
|
507
|
+
```
|
|
508
|
+
1. MODIFICA → Analysis (validate changes)
|
|
509
|
+
2. TEST → Coordination (select test environment)
|
|
510
|
+
3. DOCUMENTA → Execution (run tests, generate docs)
|
|
511
|
+
4. COMMIT ATOMICO → Test (verify all tests pass)
|
|
512
|
+
5. PUSH → Deploy (push to git, deploy to production)
|
|
513
|
+
```
|
|
514
|
+
|
|
515
|
+
**NexusCLI Equivalent**:
|
|
516
|
+
|
|
517
|
+
```
|
|
518
|
+
1. ANALYSIS → What needs to be done? Where? How?
|
|
519
|
+
2. COORDINATION → Which nodes? In what order? When?
|
|
520
|
+
3. EXECUTION → Run commands, stream output
|
|
521
|
+
4. TEST → Did it work? Assert expectations
|
|
522
|
+
5. DEPLOY → Apply changes, rollback if needed
|
|
523
|
+
```
|
|
524
|
+
|
|
525
|
+
---
|
|
526
|
+
|
|
527
|
+
## 📊 Monitoring & Observability
|
|
528
|
+
|
|
529
|
+
### Metrics (per phase)
|
|
530
|
+
|
|
531
|
+
**Analysis**:
|
|
532
|
+
- Validation failures per hour
|
|
533
|
+
- Average validation time
|
|
534
|
+
- Most common validation errors
|
|
535
|
+
|
|
536
|
+
**Coordination**:
|
|
537
|
+
- Node selection time
|
|
538
|
+
- Queue depth
|
|
539
|
+
- Jobs pending vs executing
|
|
540
|
+
|
|
541
|
+
**Execution**:
|
|
542
|
+
- Average job duration (per tool)
|
|
543
|
+
- Success rate (exit code 0)
|
|
544
|
+
- Timeout rate
|
|
545
|
+
|
|
546
|
+
**Test**:
|
|
547
|
+
- Test pass rate
|
|
548
|
+
- Failed assertion types
|
|
549
|
+
- Rollback frequency
|
|
550
|
+
|
|
551
|
+
**Deploy**:
|
|
552
|
+
- Deployment success rate
|
|
553
|
+
- Rollback time
|
|
554
|
+
- Pipeline completion time
|
|
555
|
+
|
|
556
|
+
**API Endpoint**:
|
|
557
|
+
```
|
|
558
|
+
GET /api/v1/metrics/pipeline
|
|
559
|
+
```
|
|
560
|
+
|
|
561
|
+
**Response**:
|
|
562
|
+
```json
|
|
563
|
+
{
|
|
564
|
+
"analysis": {
|
|
565
|
+
"totalValidations": 1523,
|
|
566
|
+
"validationFailures": 45,
|
|
567
|
+
"avgValidationTime": 12
|
|
568
|
+
},
|
|
569
|
+
"execution": {
|
|
570
|
+
"totalJobs": 8421,
|
|
571
|
+
"successfulJobs": 8103,
|
|
572
|
+
"failedJobs": 318,
|
|
573
|
+
"avgDuration": 3421
|
|
574
|
+
},
|
|
575
|
+
"deploy": {
|
|
576
|
+
"totalDeployments": 234,
|
|
577
|
+
"successRate": 0.963,
|
|
578
|
+
"avgRollbackTime": 8234
|
|
579
|
+
}
|
|
580
|
+
}
|
|
581
|
+
```
|
|
582
|
+
|
|
583
|
+
---
|
|
584
|
+
|
|
585
|
+
## 🚀 Future Enhancements
|
|
586
|
+
|
|
587
|
+
### Pipeline Templates
|
|
588
|
+
|
|
589
|
+
**Predefined Workflows**:
|
|
590
|
+
```yaml
|
|
591
|
+
# templates/web-deploy.yaml
|
|
592
|
+
name: "Production Web Deployment"
|
|
593
|
+
phases:
|
|
594
|
+
- analysis:
|
|
595
|
+
validate: true
|
|
596
|
+
approvers: ["admin@example.com"]
|
|
597
|
+
|
|
598
|
+
- coordination:
|
|
599
|
+
nodes: ["build-server"]
|
|
600
|
+
|
|
601
|
+
- execution:
|
|
602
|
+
steps:
|
|
603
|
+
- { tool: "bash", command: "git pull" }
|
|
604
|
+
- { tool: "bash", command: "npm install" }
|
|
605
|
+
- { tool: "bash", command: "npm run build" }
|
|
606
|
+
|
|
607
|
+
- test:
|
|
608
|
+
tests:
|
|
609
|
+
- { type: "exit_code", expected: 0 }
|
|
610
|
+
- { type: "file_exists", path: "dist/index.html" }
|
|
611
|
+
|
|
612
|
+
- deploy:
|
|
613
|
+
strategy: "rolling"
|
|
614
|
+
nodes: ["prod-web-001", "prod-web-002", "prod-web-003"]
|
|
615
|
+
healthCheck: "curl -f http://localhost/health"
|
|
616
|
+
```
|
|
617
|
+
|
|
618
|
+
### AI-Assisted Analysis
|
|
619
|
+
|
|
620
|
+
**Future Integration**:
|
|
621
|
+
- Use Claude/GPT to analyze commands for risks
|
|
622
|
+
- Suggest safer alternatives
|
|
623
|
+
- Auto-generate test assertions
|
|
624
|
+
- Predict optimal node selection
|
|
625
|
+
|
|
626
|
+
---
|
|
627
|
+
|
|
628
|
+
## 📚 Related Documents
|
|
629
|
+
|
|
630
|
+
- [API & Wrapper Contract](./API_WRAPPER_CONTRACT.md) - API specification
|
|
631
|
+
- [Architecture Overview](./ARCHITECTURE.md) - System architecture
|
|
632
|
+
- [NexusChat CLAUDE.md](../../CLAUDE.md) - Original workflow
|
|
633
|
+
|
|
634
|
+
---
|
|
635
|
+
|
|
636
|
+
_Generated by Claude Code (Sonnet 4.5) - 2025-11-17_
|