smash-os-install 0.2.2 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/install.mjs +791 -670
- package/package.json +2 -2
package/install.mjs
CHANGED
|
@@ -1,671 +1,792 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
/**
|
|
3
|
-
* smash-os-install — Install the SmashOS Claude Code harness into any repo.
|
|
4
|
-
*
|
|
5
|
-
* Usage (inside your repo root):
|
|
6
|
-
* npx smash-os-install
|
|
7
|
-
*
|
|
8
|
-
*
|
|
9
|
-
*
|
|
10
|
-
*
|
|
11
|
-
*
|
|
12
|
-
*
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
const
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
}
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
##
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
##
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
'
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
}
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
: '
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
const
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
)
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
}
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
}
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
2
|
+
/**
|
|
3
|
+
* smash-os-install — Install the SmashOS Claude Code harness into any repo.
|
|
4
|
+
*
|
|
5
|
+
* Usage (inside your repo root):
|
|
6
|
+
* npx smash-os-install
|
|
7
|
+
*
|
|
8
|
+
* What it does:
|
|
9
|
+
* 1. Prompts for project name and tech stack
|
|
10
|
+
* 2. Writes CLAUDE.md + /ai/ skeleton + .smash-os-mode=local
|
|
11
|
+
* 3. Installs SmashOS skills globally (~/.claude/skills/)
|
|
12
|
+
* 4. No web app, no API keys, no cloud dependencies required
|
|
13
|
+
*/
|
|
14
|
+
|
|
15
|
+
import { execSync } from 'child_process';
|
|
16
|
+
import { existsSync, readFileSync, writeFileSync, mkdirSync, readdirSync, copyFileSync } from 'fs';
|
|
17
|
+
import { join, dirname, basename } from 'path';
|
|
18
|
+
import { homedir } from 'os';
|
|
19
|
+
import prompts from 'prompts';
|
|
20
|
+
import chalk from 'chalk';
|
|
21
|
+
|
|
22
|
+
const cwd = process.cwd();
|
|
23
|
+
const isMarketing = process.argv.includes('--marketing');
|
|
24
|
+
const vaultConventions = join(process.env.USERPROFILE || process.env.HOME || homedir(), 'Desktop', 'SmashBurgerBar', 'SmashVault', 'Architecture', 'conventions.md');
|
|
25
|
+
const globalConventions = join(homedir(), '.claude', 'conventions.md');
|
|
26
|
+
const conventionsFile = existsSync(vaultConventions) ? vaultConventions : globalConventions;
|
|
27
|
+
|
|
28
|
+
// ─── Helpers ──────────────────────────────────────────────────────────────────
|
|
29
|
+
|
|
30
|
+
function writeFile(relPath, content) {
|
|
31
|
+
const abs = join(cwd, relPath);
|
|
32
|
+
mkdirSync(dirname(abs), { recursive: true });
|
|
33
|
+
writeFileSync(abs, content, 'utf8');
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
function mergeSettingsJson(newSettingsContent) {
|
|
37
|
+
const settingsPath = join(cwd, '.claude', 'settings.json');
|
|
38
|
+
|
|
39
|
+
if (!existsSync(settingsPath)) {
|
|
40
|
+
writeFile('.claude/settings.json', newSettingsContent);
|
|
41
|
+
return false;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
let existing;
|
|
45
|
+
try {
|
|
46
|
+
existing = JSON.parse(readFileSync(settingsPath, 'utf8'));
|
|
47
|
+
} catch {
|
|
48
|
+
writeFile('.claude/settings.json', newSettingsContent);
|
|
49
|
+
return false;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
const incoming = JSON.parse(newSettingsContent);
|
|
53
|
+
if (!existing.hooks) existing.hooks = {};
|
|
54
|
+
|
|
55
|
+
for (const [event, hookList] of Object.entries(incoming.hooks ?? {})) {
|
|
56
|
+
if (!existing.hooks[event]) existing.hooks[event] = [];
|
|
57
|
+
for (const hookGroup of hookList) {
|
|
58
|
+
for (const hook of hookGroup.hooks ?? []) {
|
|
59
|
+
const alreadyPresent = existing.hooks[event].some((g) =>
|
|
60
|
+
g.hooks?.some((h) => h.command === hook.command)
|
|
61
|
+
);
|
|
62
|
+
if (!alreadyPresent) {
|
|
63
|
+
existing.hooks[event].push({ hooks: [hook] });
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
writeFile('.claude/settings.json', JSON.stringify(existing, null, 2));
|
|
70
|
+
return true;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// ─── MCP Detection & Installation ─────────────────────────────────────────────
|
|
74
|
+
|
|
75
|
+
function getInstalledMcpOutput() {
|
|
76
|
+
try {
|
|
77
|
+
return execSync('claude mcp list', { encoding: 'utf8', stdio: 'pipe' }).toLowerCase();
|
|
78
|
+
} catch { return ''; }
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
const MCP_CATALOG = [
|
|
82
|
+
{
|
|
83
|
+
name: 'context7',
|
|
84
|
+
description: 'Live docs lookup (GSD research phases, library references)',
|
|
85
|
+
enhancementLevels: ['light', 'medium', 'high'],
|
|
86
|
+
autoInstall: 'claude mcp add --transport http context7 https://mcp.context7.com/mcp',
|
|
87
|
+
},
|
|
88
|
+
{
|
|
89
|
+
name: 'jcodemunch',
|
|
90
|
+
description: 'Code intelligence — symbol search, blast radius, dependency graphs',
|
|
91
|
+
enhancementLevels: ['light', 'medium', 'high'],
|
|
92
|
+
autoInstall: null,
|
|
93
|
+
docs: 'https://jcodemunch.com',
|
|
94
|
+
},
|
|
95
|
+
{
|
|
96
|
+
name: 'jdocmunch',
|
|
97
|
+
description: 'Documentation intelligence — companion to jcodemunch',
|
|
98
|
+
enhancementLevels: ['medium', 'high'],
|
|
99
|
+
autoInstall: null,
|
|
100
|
+
docs: 'https://jcodemunch.com',
|
|
101
|
+
},
|
|
102
|
+
{
|
|
103
|
+
name: 'chrome-devtools',
|
|
104
|
+
description: 'Browser automation — visual testing and verification',
|
|
105
|
+
enhancementLevels: ['medium', 'high'],
|
|
106
|
+
autoInstall: null,
|
|
107
|
+
docs: 'https://github.com/modelcontextprotocol/servers',
|
|
108
|
+
},
|
|
109
|
+
{
|
|
110
|
+
name: 'claude-peers',
|
|
111
|
+
description: 'Parallel pipeline execution + cross-session awareness (Security+QA run simultaneously)',
|
|
112
|
+
enhancementLevels: ['medium', 'high'],
|
|
113
|
+
autoInstall: 'claude mcp add claude-peers npx @louislva/claude-peers-mcp',
|
|
114
|
+
docs: 'https://github.com/louislva/claude-peers-mcp',
|
|
115
|
+
},
|
|
116
|
+
{
|
|
117
|
+
name: 'openspace',
|
|
118
|
+
description: 'Self-improving skill library — captures execution patterns, reduces token usage over time',
|
|
119
|
+
enhancementLevels: ['high'],
|
|
120
|
+
autoInstall: 'claude mcp add openspace npx openspace-mcp',
|
|
121
|
+
docs: 'https://github.com/HKUDS/OpenSpace',
|
|
122
|
+
},
|
|
123
|
+
];
|
|
124
|
+
|
|
125
|
+
async function checkAndInstallMcps(enhancement) {
|
|
126
|
+
if (enhancement === 'off') return;
|
|
127
|
+
|
|
128
|
+
const installed = getInstalledMcpOutput();
|
|
129
|
+
const relevant = MCP_CATALOG.filter(m => m.enhancementLevels.includes(enhancement));
|
|
130
|
+
const present = relevant.filter(m => installed.includes(m.name.toLowerCase()));
|
|
131
|
+
const missing = relevant.filter(m => !installed.includes(m.name.toLowerCase()));
|
|
132
|
+
|
|
133
|
+
console.log('');
|
|
134
|
+
console.log(chalk.bold(' Recommended MCPs') + chalk.dim(` for enhancement: ${enhancement}`));
|
|
135
|
+
console.log('');
|
|
136
|
+
|
|
137
|
+
for (const m of present) {
|
|
138
|
+
console.log(' ' + chalk.green('✓') + ' ' + chalk.white(m.name) + chalk.dim(` — ${m.description}`));
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
if (missing.length === 0) {
|
|
142
|
+
console.log(chalk.dim(' All recommended MCPs already installed.'));
|
|
143
|
+
return;
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
const autoInstallable = missing.filter(m => m.autoInstall);
|
|
147
|
+
const manualOnly = missing.filter(m => !m.autoInstall);
|
|
148
|
+
|
|
149
|
+
if (autoInstallable.length > 0) {
|
|
150
|
+
const { toInstall } = await prompts({
|
|
151
|
+
type: 'multiselect',
|
|
152
|
+
name: 'toInstall',
|
|
153
|
+
message: 'Install these MCPs now?',
|
|
154
|
+
choices: autoInstallable.map(m => ({
|
|
155
|
+
title: `${m.name} — ${m.description}`,
|
|
156
|
+
value: m.name,
|
|
157
|
+
selected: true,
|
|
158
|
+
})),
|
|
159
|
+
}, { onCancel: () => ({ toInstall: [] }) });
|
|
160
|
+
|
|
161
|
+
for (const name of (toInstall || [])) {
|
|
162
|
+
const mcp = autoInstallable.find(m => m.name === name);
|
|
163
|
+
try {
|
|
164
|
+
execSync(mcp.autoInstall, { encoding: 'utf8', stdio: 'pipe' });
|
|
165
|
+
console.log(' ' + chalk.green('✓') + ' ' + chalk.white(name) + chalk.dim(' installed'));
|
|
166
|
+
} catch (e) {
|
|
167
|
+
console.error(' ' + chalk.red('✗') + ' ' + name + ' — ' + (e.stderr?.toString().trim() || e.message));
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
if (manualOnly.length > 0) {
|
|
173
|
+
console.log('');
|
|
174
|
+
console.log(chalk.dim(' Manual setup required:'));
|
|
175
|
+
for (const m of manualOnly) {
|
|
176
|
+
console.log(' ' + chalk.yellow('→') + ' ' + chalk.white(m.name) + chalk.dim(` — ${m.docs}`));
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
// ─── Automation (Windows Task Scheduler) ──────────────────────────────────────
|
|
182
|
+
|
|
183
|
+
const SMASH_BASE = String.raw`C:\SmashOS`;
|
|
184
|
+
const REGISTRY_FILE = join(SMASH_BASE, 'projects.json');
|
|
185
|
+
const SCRIPTS_DIR = join(SMASH_BASE, 'scripts');
|
|
186
|
+
const SKILL_SCRIPTS = join(SMASH_BASE, '_skills', 'scripts');
|
|
187
|
+
const SKILL_LOGS = join(SMASH_BASE, '_skills', 'logs');
|
|
188
|
+
|
|
189
|
+
function autoRun(cmd) {
|
|
190
|
+
return execSync(cmd, { encoding: 'utf8', stdio: 'pipe' });
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
function findClaude() {
|
|
194
|
+
try {
|
|
195
|
+
const found = autoRun('where claude').trim().split('\n')[0].trim();
|
|
196
|
+
if (found && existsSync(found)) return found;
|
|
197
|
+
} catch { /* not in PATH */ }
|
|
198
|
+
const home = process.env.USERPROFILE || process.env.HOME || 'C:\\Users\\Administrator';
|
|
199
|
+
const candidates = [
|
|
200
|
+
join(home, '.local', 'bin', 'claude.exe'),
|
|
201
|
+
join(home, 'AppData', 'Local', 'Programs', 'claude', 'claude.exe'),
|
|
202
|
+
join(home, 'AppData', 'Roaming', 'npm', 'claude.cmd'),
|
|
203
|
+
'C:\\Program Files\\claude\\claude.exe',
|
|
204
|
+
];
|
|
205
|
+
for (const c of candidates) if (existsSync(c)) return c;
|
|
206
|
+
return null;
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
function loadRegistry() {
|
|
210
|
+
if (!existsSync(REGISTRY_FILE)) return [];
|
|
211
|
+
try { return JSON.parse(readFileSync(REGISTRY_FILE, 'utf8')); } catch { return []; }
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
function saveRegistry(projects) {
|
|
215
|
+
mkdirSync(SMASH_BASE, { recursive: true });
|
|
216
|
+
writeFileSync(REGISTRY_FILE, JSON.stringify(projects, null, 2), 'utf8');
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
function schtask(name, batPath, schedule) {
|
|
220
|
+
const user = process.env.USERNAME || process.env.USER || 'Administrator';
|
|
221
|
+
try {
|
|
222
|
+
autoRun(`schtasks /create /tn "SmashOS\\${name}" /tr "${batPath}" ${schedule} /f /ru "${user}"`);
|
|
223
|
+
console.log(' ' + chalk.green('✓') + ' SmashOS\\' + name);
|
|
224
|
+
} catch (e) {
|
|
225
|
+
console.error(' ' + chalk.red('✗') + ' SmashOS\\' + name + ' — ' + (e.stderr?.toString().trim() || e.message));
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
function buildBats(projects, claudeExe) {
|
|
230
|
+
const C = `"${claudeExe}" --dangerously-skip-permissions --print`;
|
|
231
|
+
const home = process.env.USERPROFILE || process.env.HOME || 'C:\\Users\\Administrator';
|
|
232
|
+
const hdr = `@echo off\nset CLAUDE="${claudeExe}" --dangerously-skip-permissions --print\n`;
|
|
233
|
+
|
|
234
|
+
function block(p, prompt, slug) {
|
|
235
|
+
const log = join(SMASH_BASE, p.name, 'logs', `${slug}.log`);
|
|
236
|
+
return `\n:: === ${p.name} ===\ncd /d "${p.path}"\necho [%date% %time%] Running ${slug}... >> "${log}"\n${C} "${prompt}" >> "${log}" 2>&1\necho [%date% %time%] Done. >> "${log}"`;
|
|
237
|
+
}
|
|
238
|
+
function blockCmd(p, cmd, slug) {
|
|
239
|
+
const log = join(SMASH_BASE, p.name, 'logs', `${slug}.log`);
|
|
240
|
+
return `\n:: === ${p.name} ===\ncd /d "${p.path}"\necho [%date% %time%] Running ${slug}... >> "${log}"\n${C} "${cmd}" >> "${log}" 2>&1\necho [%date% %time%] Done. >> "${log}"`;
|
|
241
|
+
}
|
|
242
|
+
function bat(slug, getBlock) { return hdr + '\n' + projects.map(p => getBlock(p)).join('\n') + '\n'; }
|
|
243
|
+
|
|
244
|
+
return [
|
|
245
|
+
{ taskName: 'Projects\\LockCleanup', batFile: join(SCRIPTS_DIR, 'lock-cleanup.bat'), schedule: '/sc hourly /st 00:00', content: bat('lock-cleanup', p => block(p, 'Review .claude/scheduled_tasks.lock and any stale pipeline lock files. Delete any locks older than 2 hours and report what was cleaned.', 'lock-cleanup')) },
|
|
246
|
+
{ taskName: 'Projects\\MemoryConsolidation', batFile: join(SCRIPTS_DIR, 'memory-consolidation.bat'), schedule: '/sc daily /st 01:00', content: bat('memory-consolidation', p => block(p, 'Read all files in ai/memory/. Consolidate duplicates and summarise entries older than 30 days into a single summary entry. Save the updated files.', 'memory-consolidation')) },
|
|
247
|
+
{ taskName: 'Projects\\NightlyAudit', batFile: join(SCRIPTS_DIR, 'nightly-audit.bat'), schedule: '/sc daily /st 02:00', content: bat('nightly-audit', p => blockCmd(p, '/smash-os:audit', 'nightly-audit')) },
|
|
248
|
+
{ taskName: 'Projects\\DocsRegeneration', batFile: join(SCRIPTS_DIR, 'docs-regeneration.bat'), schedule: '/sc daily /st 03:00', content: bat('docs-regeneration', p => block(p, 'Read ai/context/ files. Check if they are still accurate against the codebase. Flag outdated sections. Do NOT overwrite orchestrator.md.', 'docs-regeneration')) },
|
|
249
|
+
{ taskName: 'Projects\\CTOLoop', batFile: join(SCRIPTS_DIR, 'cto-loop.bat'), schedule: '/sc weekly /d MON /st 04:00', content: bat('cto-loop', p => block(p, 'Act as CTO. Read ai/memory/decisions.md and ai/memory/audits.md. Score codebase health out of 100 across security, architecture, tests, risks. Output structured report with top 3 risks and recommended next pipeline.', 'cto-loop')) },
|
|
250
|
+
{ taskName: 'Projects\\WeeklyImprovement', batFile: join(SCRIPTS_DIR, 'weekly-improvement.bat'), schedule: '/sc weekly /d MON /st 05:00', content: bat('weekly-improvement', p => blockCmd(p, '/smash-os:run weekly-improvement', 'weekly-improvement')) },
|
|
251
|
+
{ taskName: 'Projects\\RoleEvolution', batFile: join(SCRIPTS_DIR, 'role-evolution.bat'), schedule: '/sc weekly /d MON /st 06:30', content: bat('role-evolution', p => blockCmd(p, '/smash-os:evolve-roles', 'role-evolution')) },
|
|
252
|
+
{ taskName: '_skills\DreamMemory', batFile: join(SKILL_SCRIPTS, 'dream-memory.bat'), schedule: '/sc daily /st 04:00', content: `@echo off
|
|
253
|
+
cd /d "${home}"
|
|
254
|
+
set CONVENTIONS=%USERPROFILE%\Desktop\SmashBurgerBar\SmashVault\Architecture\conventions.md
|
|
255
|
+
if not exist "%CONVENTIONS%" set CONVENTIONS=%USERPROFILE%\.claude\conventions.md
|
|
256
|
+
echo [%date% %time%] Running dream-memory --all... >> "${join(SKILL_LOGS, 'dream-memory.log')}"
|
|
257
|
+
"${claudeExe}" --dangerously-skip-permissions --print "/dream-memory --all" >> "${join(SKILL_LOGS, 'dream-memory.log')}" 2>&1
|
|
258
|
+
echo [%date% %time%] Running dream-memory --vault on %CONVENTIONS%... >> "${join(SKILL_LOGS, 'dream-memory.log')}"
|
|
259
|
+
"${claudeExe}" --dangerously-skip-permissions --print "/dream-memory --vault %CONVENTIONS%" >> "${join(SKILL_LOGS, 'dream-memory.log')}" 2>&1
|
|
260
|
+
echo [%date% %time%] Done. >> "${join(SKILL_LOGS, 'dream-memory.log')}"
|
|
261
|
+
` },
|
|
262
|
+
{ taskName: '_skills\\SkillEvolution', batFile: join(SKILL_SCRIPTS, 'skill-evolution.bat'), schedule: '/sc weekly /d MON /st 06:00', content: `@echo off\ncd /d "${home}"\necho [%date% %time%] Running skill evolution... >> "${join(SKILL_LOGS, 'skill-evolution.log')}"\n"${claudeExe}" --dangerously-skip-permissions --print "/skill-evolution" >> "${join(SKILL_LOGS, 'skill-evolution.log')}" 2>&1\necho [%date% %time%] Done. >> "${join(SKILL_LOGS, 'skill-evolution.log')}"\n` },
|
|
263
|
+
{ taskName: '_skills\\SkillResearch', batFile: join(SKILL_SCRIPTS, 'skill-research.bat'), schedule: '/sc monthly /d 1 /st 07:00', content: `@echo off\ncd /d "${home}"\necho [%date% %time%] Running skill research... >> "${join(SKILL_LOGS, 'skill-research.log')}"\nfor %%S in (close-session open-session smash-os-run smash-os-audit smash-os-onboarding) do (\n "${claudeExe}" --dangerously-skip-permissions --print "/skill-researcher %%S" >> "${join(SKILL_LOGS, 'skill-research.log')}" 2>&1\n timeout /t 30 /nobreak >nul\n)\necho [%date% %time%] Done. >> "${join(SKILL_LOGS, 'skill-research.log')}"\n` },
|
|
264
|
+
];
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
function runAutomation(projectPath) {
|
|
268
|
+
console.log('');
|
|
269
|
+
console.log(chalk.bold(' Setting up Windows Task Scheduler...'));
|
|
270
|
+
console.log('');
|
|
271
|
+
|
|
272
|
+
const claudeExe = findClaude();
|
|
273
|
+
if (!claudeExe) {
|
|
274
|
+
console.error(' ' + chalk.red('✗') + ' claude.exe not found in PATH — skipping automation.');
|
|
275
|
+
console.error(chalk.dim(' Re-run: node install-automation.mjs after adding claude to PATH'));
|
|
276
|
+
return;
|
|
277
|
+
}
|
|
278
|
+
console.log(' ' + chalk.green('✓') + ' claude: ' + chalk.dim(claudeExe));
|
|
279
|
+
|
|
280
|
+
const projectName = (() => {
|
|
281
|
+
try { return JSON.parse(readFileSync(join(projectPath, 'package.json'), 'utf8')).name || basename(projectPath); }
|
|
282
|
+
catch { return basename(projectPath); }
|
|
283
|
+
})();
|
|
284
|
+
const current = { name: projectName, path: projectPath };
|
|
285
|
+
|
|
286
|
+
let projects = loadRegistry();
|
|
287
|
+
if (!projects.find(p => p.path === current.path)) {
|
|
288
|
+
projects.push({ name: current.name, path: current.path, addedAt: new Date().toISOString().slice(0, 10) });
|
|
289
|
+
saveRegistry(projects);
|
|
290
|
+
}
|
|
291
|
+
console.log(' ' + chalk.green('✓') + ` registry: ${projects.length} project(s)`);
|
|
292
|
+
|
|
293
|
+
// Ensure ~/.claude/conventions.md exists as fallback
|
|
294
|
+
const globalConv = join(homedir(), '.claude', 'conventions.md');
|
|
295
|
+
if (!existsSync(globalConv)) {
|
|
296
|
+
mkdirSync(join(homedir(), '.claude'), { recursive: true });
|
|
297
|
+
writeFileSync(globalConv, '# Conventions\n\n<!-- Consolidated by /dream-memory --vault -->\n', 'utf8');
|
|
298
|
+
console.log(' ' + chalk.green('✓') + ' ~/.claude/conventions.md (created)');
|
|
299
|
+
} else {
|
|
300
|
+
console.log(' ' + chalk.dim('↷') + ' ~/.claude/conventions.md (exists)');
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
for (const p of projects) mkdirSync(join(SMASH_BASE, p.name, 'logs'), { recursive: true });
|
|
304
|
+
mkdirSync(SCRIPTS_DIR, { recursive: true });
|
|
305
|
+
mkdirSync(SKILL_SCRIPTS, { recursive: true });
|
|
306
|
+
mkdirSync(SKILL_LOGS, { recursive: true });
|
|
307
|
+
|
|
308
|
+
const bats = buildBats(projects, claudeExe);
|
|
309
|
+
for (const b of bats) writeFileSync(b.batFile, b.content, 'utf8');
|
|
310
|
+
console.log(' ' + chalk.green('✓') + ` ${bats.length} bat files written`);
|
|
311
|
+
|
|
312
|
+
console.log('');
|
|
313
|
+
for (const b of bats) schtask(b.taskName, b.batFile, b.schedule);
|
|
314
|
+
|
|
315
|
+
console.log('');
|
|
316
|
+
console.log(chalk.dim(' Schedule: hourly lock-cleanup · 1am memory · 2am audit · 3am docs · Mon CTO+improvement · Mon 6am skills · Mon 6:30am role-evolution · 1st skill-research'));
|
|
317
|
+
console.log(chalk.dim(` Logs: C:\\SmashOS\\${projectName}\\logs\\`));
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
// ─── Main ─────────────────────────────────────────────────────────────────────
|
|
321
|
+
|
|
322
|
+
console.log('');
|
|
323
|
+
console.log(chalk.bold(` SmashOS ${isMarketing ? 'Marketing Harness' : 'Harness'} Installer`));
|
|
324
|
+
console.log(chalk.dim(' Local mode — no web app, no API keys required'));
|
|
325
|
+
console.log('');
|
|
326
|
+
|
|
327
|
+
// ─── Marketing install (--marketing flag) ─────────────────────────────────────
|
|
328
|
+
|
|
329
|
+
if (isMarketing) {
|
|
330
|
+
const mAnswers = await prompts([
|
|
331
|
+
{
|
|
332
|
+
type: 'text',
|
|
333
|
+
name: 'brandName',
|
|
334
|
+
message: 'Brand / project name',
|
|
335
|
+
initial: basename(cwd),
|
|
336
|
+
validate: (v) => (v.trim().length > 0 ? true : 'Required'),
|
|
337
|
+
},
|
|
338
|
+
{
|
|
339
|
+
type: 'select',
|
|
340
|
+
name: 'enhancement',
|
|
341
|
+
message: 'Enhancement mode',
|
|
342
|
+
choices: [
|
|
343
|
+
{ title: 'off — roles only, manual framework use', value: 'off' },
|
|
344
|
+
{ title: 'light — roles hint relevant workflows', value: 'light' },
|
|
345
|
+
{ title: 'medium — structured workflows enforced (recommended)', value: 'medium' },
|
|
346
|
+
{ title: 'high — full GSD phase engine + learning extraction', value: 'high' },
|
|
347
|
+
],
|
|
348
|
+
initial: 2,
|
|
349
|
+
},
|
|
350
|
+
], { onCancel: () => { console.log(chalk.yellow('\n Cancelled.')); process.exit(0); } });
|
|
351
|
+
|
|
352
|
+
const { brandName, enhancement: mEnhancement } = mAnswers;
|
|
353
|
+
|
|
354
|
+
const smashOsRoot = join(dirname(new URL(import.meta.url).pathname.replace(/^\/([A-Z]:)/, '$1')), '..');
|
|
355
|
+
const marketingSrc = join(smashOsRoot, 'other', 'cross-industry-domains', 'marketing');
|
|
356
|
+
|
|
357
|
+
function copyDir(src, destPrefix, skipIfExists = false) {
|
|
358
|
+
for (const entry of readdirSync(src, { withFileTypes: true })) {
|
|
359
|
+
const srcPath = join(src, entry.name);
|
|
360
|
+
const destRel = join(destPrefix, entry.name);
|
|
361
|
+
if (entry.isDirectory()) {
|
|
362
|
+
copyDir(srcPath, destRel, skipIfExists);
|
|
363
|
+
} else {
|
|
364
|
+
const destAbs = join(cwd, destRel);
|
|
365
|
+
if (skipIfExists && existsSync(destAbs)) {
|
|
366
|
+
console.log(' ' + chalk.dim('↷') + ' ' + chalk.dim(`${destRel} (exists — skipped)`));
|
|
367
|
+
} else {
|
|
368
|
+
mkdirSync(dirname(destAbs), { recursive: true });
|
|
369
|
+
copyFileSync(srcPath, destAbs);
|
|
370
|
+
console.log(' ' + chalk.green('✓') + ' ' + chalk.white(destRel));
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
console.log('');
|
|
377
|
+
writeFile('.smash-os-mode', 'marketing\n');
|
|
378
|
+
console.log(' ' + chalk.green('✓') + ' ' + chalk.white('.smash-os-mode') + chalk.dim(' (marketing)'));
|
|
379
|
+
writeFile('.smash-os-enhancement', (mEnhancement || 'medium') + '\n');
|
|
380
|
+
console.log(' ' + chalk.green('✓') + ' ' + chalk.white('.smash-os-enhancement') + chalk.dim(` (${mEnhancement || 'medium'})`));
|
|
381
|
+
|
|
382
|
+
const mClaudeMd = `# ${brandName} — SmashOS Marketing Harness Active\n\nYou are operating inside the SmashOS Marketing Workflow Harness.\nYou are not a single assistant. You are a coordinated AI marketing organisation.\nThis is a **fully local** installation — no web app, no API keys, no cloud dependencies.\n\n## On Session Start\n1. Read \`marketing/context/product.md\` (what are we marketing?)\n2. Read \`marketing/context/brand-guidelines.md\` (tone, colours, fonts, voice)\n3. Read \`marketing/context/target-audience.md\` (ICPs, segments, pain points)\n4. Read \`marketing/memory/decisions.md\` — last 20 entries (if it exists)\n5. Adopt the role: Brand Strategist\n\n## Cognitive Mode Rules\nTHINKING → Brand Strategist, Marketing Analyst (no content creation)\nEXECUTION → Content Writer, Social Media Manager (no strategy, no decisions)\nVALIDATION → SEO Specialist, Paid Media Analyst (no content creation)\n\n## Golden Rules\n- Never create content without an approved brief\n- Never publish without SEO + brand validation\n- Never run paid campaigns without ROI projections and goal alignment\n- Output structured results only\n- Save campaign decisions to memory after every significant choice\n\n## Slash Commands\n- /marketing:run [type] — trigger pipeline (campaign | content | seo-audit | paid | custom)\n- /marketing:role [name] — switch cognitive mode + load role definition\n- /marketing:memory — show recent campaign decisions and lessons\n- /marketing:status — print active campaigns, recent content, pipeline health\n- /marketing:brief [topic] — create a campaign brief for a specific topic\n\n## Enhancement Mode\nCurrent level: ${mEnhancement || 'medium'}\nRead \`.smash-os-enhancement\` at session start.\n\n## Mode\nLocal mode — all pipelines run inline via Claude Code.\n`;
|
|
383
|
+
writeFile('CLAUDE.md', mClaudeMd);
|
|
384
|
+
console.log(' ' + chalk.green('✓') + ' ' + chalk.white('CLAUDE.md') + chalk.dim(' (marketing)'));
|
|
385
|
+
|
|
386
|
+
console.log('');
|
|
387
|
+
console.log(chalk.dim(' Roles + workflows:'));
|
|
388
|
+
copyDir(join(marketingSrc, 'roles'), 'marketing/roles', false);
|
|
389
|
+
copyDir(join(marketingSrc, 'workflows'), 'marketing/workflows', false);
|
|
390
|
+
|
|
391
|
+
console.log('');
|
|
392
|
+
console.log(chalk.dim(' Context templates (skipped if already customised):'));
|
|
393
|
+
copyDir(join(marketingSrc, 'context'), 'marketing/context', true);
|
|
394
|
+
|
|
395
|
+
if (!existsSync(join(cwd, 'marketing/memory/decisions.md'))) {
|
|
396
|
+
writeFile('marketing/memory/decisions.md', '# Campaign Decisions Log\n\n<!-- SmashOS writes here after each campaign decision. -->\n<!-- ## YYYY-MM-DD — Decision title -->\n<!-- **Rationale:** why this was chosen -->\n<!-- **Outcome:** what changed -->\n');
|
|
397
|
+
console.log('');
|
|
398
|
+
console.log(' ' + chalk.green('✓') + ' ' + chalk.white('marketing/memory/decisions.md'));
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
const marketingSkills = {
|
|
402
|
+
'marketing-run': `---\nname: marketing-run\ndescription: Trigger a SmashOS marketing pipeline. Usage: /marketing:run [type] where type is campaign | content | seo-audit | paid | custom.\nallowed-tools: Bash, Read, Write, Edit, Glob, Grep\n---\n\n# /marketing:run\n\nTrigger a marketing pipeline for this project (local mode).\n\nLoad context from \`marketing/context/\` and \`marketing/memory/\` before starting.\n\n## Pipeline Types\n- \`campaign\` — full 6-phase campaign launch pipeline\n- \`content\` — weekly 30-day content calendar\n- \`seo-audit\` — monthly SEO health check\n- \`paid\` — paid media pre-launch review\n- \`custom\` — user-defined pipeline\n\n## Phase Output Format\n\n\`\`\`\n▸ SMASH MARKETING · phase {N} · {role}\n─────────────────────────────────────\n{output}\n─────────────────────────────────────\n\`\`\`\n\n## After All Phases\n\nSave key campaign decisions to \`marketing/memory/decisions.md\`.\n`,
|
|
403
|
+
'marketing-role': `---\nname: marketing-role\ndescription: Switch to a SmashOS marketing role. Usage: /marketing:role [name].\n---\n\n# /marketing:role\n\nSwitch to the named marketing role and adopt its cognitive mode.\n\n## Roles\n\n| Role | Mode |\n|---|---|\n| Brand Strategist | THINKING |\n| Marketing Analyst | THINKING |\n| Content Writer | EXECUTION |\n| Social Media Manager | EXECUTION |\n| SEO Specialist | VALIDATION |\n| Paid Media Analyst | VALIDATION |\n\n## Steps\n1. Read the role name from the argument\n2. Announce: "Switching to [Role] — [Mode] mode"\n3. Load \`marketing/roles/<role-slug>.md\` if it exists\n4. Apply cognitive mode restrictions\n`,
|
|
404
|
+
'marketing-brief': `---\nname: marketing-brief\ndescription: Generate a campaign brief for a topic. Usage: /marketing:brief [topic].\nallowed-tools: Read\n---\n\n# /marketing:brief\n\nGenerate a campaign brief as Brand Strategist.\n\n## Steps\n1. Read \`marketing/context/product.md\`\n2. Read \`marketing/context/brand-guidelines.md\`\n3. Read \`marketing/context/target-audience.md\`\n4. Adopt Brand Strategist [THINKING] mode\n5. Produce a full approved_brief JSON for the topic\n`,
|
|
405
|
+
};
|
|
406
|
+
|
|
407
|
+
console.log('');
|
|
408
|
+
const mSkillsHome = join(homedir(), '.claude', 'skills');
|
|
409
|
+
for (const [skillName, skillContent] of Object.entries(marketingSkills)) {
|
|
410
|
+
const skillDir = join(mSkillsHome, skillName);
|
|
411
|
+
mkdirSync(skillDir, { recursive: true });
|
|
412
|
+
writeFileSync(join(skillDir, 'SKILL.md'), skillContent, 'utf8');
|
|
413
|
+
console.log(' ' + chalk.green('✓') + ' ' + chalk.white(`/marketing:${skillName.replace('marketing-', '')}`) + chalk.dim(' → ~/.claude/skills/'));
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
console.log('');
|
|
417
|
+
console.log(chalk.bold.green(' SmashOS Marketing Harness installed!'));
|
|
418
|
+
console.log('');
|
|
419
|
+
console.log(chalk.dim(' Open Claude Code in this directory:'));
|
|
420
|
+
console.log(' ' + chalk.white(' claude .'));
|
|
421
|
+
console.log('');
|
|
422
|
+
console.log(chalk.dim(' Fill in marketing/context/ with your brand details.'));
|
|
423
|
+
console.log(chalk.dim(' Then run /marketing:run campaign to start a campaign.'));
|
|
424
|
+
console.log('');
|
|
425
|
+
process.exit(0);
|
|
426
|
+
}
|
|
427
|
+
|
|
428
|
+
const answers = await prompts([
|
|
429
|
+
{
|
|
430
|
+
type: 'text',
|
|
431
|
+
name: 'projectName',
|
|
432
|
+
message: 'Project name',
|
|
433
|
+
initial: basename(cwd),
|
|
434
|
+
validate: (v) => (v.trim().length > 0 ? true : 'Required'),
|
|
435
|
+
},
|
|
436
|
+
{
|
|
437
|
+
type: 'text',
|
|
438
|
+
name: 'techStack',
|
|
439
|
+
message: 'Tech stack (e.g. React + Node.js + Postgres)',
|
|
440
|
+
initial: 'TypeScript',
|
|
441
|
+
},
|
|
442
|
+
{
|
|
443
|
+
type: 'select',
|
|
444
|
+
name: 'enhancement',
|
|
445
|
+
message: 'Enhancement mode (superpowers + GSD framework integration)',
|
|
446
|
+
choices: [
|
|
447
|
+
{ title: 'off — roles only, manual framework use', value: 'off' },
|
|
448
|
+
{ title: 'light — roles hint relevant workflows', value: 'light' },
|
|
449
|
+
{ title: 'medium — structured workflows enforced (recommended)', value: 'medium' },
|
|
450
|
+
{ title: 'high — full GSD phase engine + learning extraction', value: 'high' },
|
|
451
|
+
],
|
|
452
|
+
initial: 2,
|
|
453
|
+
},
|
|
454
|
+
{
|
|
455
|
+
type: 'confirm',
|
|
456
|
+
name: 'isFrontend',
|
|
457
|
+
message: 'Is this a frontend project? (React / Next.js / Vue / Svelte — adds UI/UX Designer, Frontend Developer, Frontend QA roles)',
|
|
458
|
+
initial: false,
|
|
459
|
+
},
|
|
460
|
+
{
|
|
461
|
+
type: process.platform === 'win32' ? 'confirm' : null,
|
|
462
|
+
name: 'setupAutomation',
|
|
463
|
+
message: 'Set up Windows Task Scheduler automation? (nightly AI, weekly improvements)',
|
|
464
|
+
initial: true,
|
|
465
|
+
},
|
|
466
|
+
], { onCancel: () => { console.log(chalk.yellow('\n Cancelled.')); process.exit(0); } });
|
|
467
|
+
|
|
468
|
+
const { projectName, techStack, enhancement, isFrontend, setupAutomation } = answers;
|
|
469
|
+
|
|
470
|
+
// ─── File content ─────────────────────────────────────────────────────────────
|
|
471
|
+
|
|
472
|
+
const claudeMd = `# ${projectName} — SmashOS Harness Active
|
|
473
|
+
|
|
474
|
+
You are operating inside the SmashOS AI Workflow Harness.
|
|
475
|
+
You are not a single assistant. You are a coordinated AI engineering organisation.
|
|
476
|
+
This is a **fully local** installation — no web app, no API keys, no cloud dependencies.
|
|
477
|
+
|
|
478
|
+
## On Session Start
|
|
479
|
+
1. Read \`ai/context/product.md\` (if it exists)
|
|
480
|
+
2. Read \`ai/context/architecture.md\` (if it exists)
|
|
481
|
+
3. Read \`ai/context/coding-standards.md\` (if it exists)
|
|
482
|
+
4. Read \`ai/memory/decisions.md\` — last 20 entries (if it exists)
|
|
483
|
+
5. Adopt the role: Staff Engineer
|
|
484
|
+
|
|
485
|
+
## Cognitive Mode Rules
|
|
486
|
+
THINKING → Staff Engineer, Product Manager${isFrontend ? ', UI/UX Designer' : ''} (no file writes)
|
|
487
|
+
EXECUTION → Senior Developer, DevOps${isFrontend ? ', Frontend Developer' : ''} (no specs or decisions)
|
|
488
|
+
VALIDATION → Security Engineer, QA Engineer${isFrontend ? ', Frontend QA' : ''} (no production code)${isFrontend ? `
|
|
489
|
+
|
|
490
|
+
## Frontend Team (active — frontend files detected trigger these roles)
|
|
491
|
+
- UI/UX Designer [THINKING] — design review, brand compliance, UX audit
|
|
492
|
+
- Frontend Developer [EXECUTION] — component code, a11y, performance, design system
|
|
493
|
+
- Frontend QA [VALIDATION] — Playwright E2E tests, coverage sign-off
|
|
494
|
+
Trigger: any change to .tsx/.jsx/.css/.scss or components/app/pages/ directories
|
|
495
|
+
Manual: /smash-os:run frontend-audit` : ''}
|
|
496
|
+
|
|
497
|
+
## Golden Rules
|
|
498
|
+
- Never skip architecture review
|
|
499
|
+
- Never write code without an approved spec
|
|
500
|
+
- Never deploy without QA + Security validation
|
|
501
|
+
- Output structured results only
|
|
502
|
+
- Save decisions to memory after every significant choice
|
|
503
|
+
|
|
504
|
+
## Slash Commands
|
|
505
|
+
- /smash-os:run [type] — trigger pipeline (feature | bug-fix | security-audit | weekly-improvement | custom)
|
|
506
|
+
- /smash-os:role [name] — switch cognitive mode + load role definition
|
|
507
|
+
- /smash-os:memory — show recent decisions and lessons
|
|
508
|
+
- /smash-os:status — print health, pipelines, signals, recent decisions
|
|
509
|
+
- /smash-os:enhancement [lvl] — view or change enhancement mode (off | light | medium | high)
|
|
510
|
+
|
|
511
|
+
## Enhancement Mode
|
|
512
|
+
Current level: ${enhancement || 'medium'}
|
|
513
|
+
|
|
514
|
+
Read \`.smash-os-enhancement\` at session start and apply the corresponding framework integration.
|
|
515
|
+
|
|
516
|
+
## Tech Stack
|
|
517
|
+
${techStack}
|
|
518
|
+
`;
|
|
519
|
+
|
|
520
|
+
const aiFiles = {
|
|
521
|
+
'ai/context/product.md': `# Product Context — ${projectName}\n\n## What is this project?\n<!-- Describe what this project does -->\n\n## Who uses it?\n<!-- Describe the users -->\n\n## Core goals\n<!-- List the main goals -->\n\n## Tech Stack\n${techStack}\n`,
|
|
522
|
+
'ai/context/architecture.md': `# Architecture — ${projectName}\n\n## Overview\n<!-- Describe the high-level architecture -->\n\n## Key decisions\n<!-- List major architectural decisions and why they were made -->\n\n## Constraints\n<!-- List things that must not change -->\n`,
|
|
523
|
+
'ai/context/coding-standards.md': `# Coding Standards — ${projectName}\n\n## Language & formatting\n<!-- ESLint config, prettier, etc. -->\n\n## Naming conventions\n<!-- Variables, files, functions -->\n\n## Patterns to follow\n<!-- e.g. always use server actions, never bypass RLS -->\n\n## Patterns to avoid\n<!-- e.g. no raw SQL, no any types -->\n`,
|
|
524
|
+
'ai/memory/decisions.md': `# Decisions Log\n\n<!-- SmashOS writes here after each session. Format: -->\n<!-- ## YYYY-MM-DD — Decision title -->\n<!-- **Rationale:** why this was chosen -->\n<!-- **Outcome:** what changed -->\n`,
|
|
525
|
+
'ai/memory/lessons.md': `# Lessons Learned\n\n<!-- SmashOS writes here after bugs and incidents. Format: -->\n<!-- ## YYYY-MM-DD — Lesson title -->\n<!-- **What happened:** -->\n<!-- **What to do differently:** -->\n`,
|
|
526
|
+
};
|
|
527
|
+
|
|
528
|
+
const settingsJson = JSON.stringify({
|
|
529
|
+
hooks: {
|
|
530
|
+
SessionStart: [
|
|
531
|
+
{
|
|
532
|
+
hooks: [
|
|
533
|
+
{
|
|
534
|
+
type: 'command',
|
|
535
|
+
command: 'node "$CLAUDE_PROJECT_DIR/.claude/hooks/smash-os-boot.mjs"',
|
|
536
|
+
},
|
|
537
|
+
],
|
|
538
|
+
},
|
|
539
|
+
],
|
|
540
|
+
Stop: [
|
|
541
|
+
{
|
|
542
|
+
hooks: [
|
|
543
|
+
{
|
|
544
|
+
type: 'command',
|
|
545
|
+
command: 'node "$CLAUDE_PROJECT_DIR/.claude/hooks/smash-os-sync.mjs"',
|
|
546
|
+
},
|
|
547
|
+
],
|
|
548
|
+
},
|
|
549
|
+
],
|
|
550
|
+
},
|
|
551
|
+
}, null, 2);
|
|
552
|
+
|
|
553
|
+
const bootHook = `#!/usr/bin/env node
|
|
554
|
+
/**
|
|
555
|
+
* smash-os-boot.mjs — SessionStart hook (local mode)
|
|
556
|
+
* Reads ai/memory/decisions.md and injects recent decisions as session context.
|
|
557
|
+
* No network calls. No credentials required.
|
|
558
|
+
*/
|
|
559
|
+
|
|
560
|
+
import { readFileSync, existsSync } from 'fs';
|
|
561
|
+
import { join } from 'path';
|
|
562
|
+
|
|
563
|
+
const cwd = process.cwd();
|
|
564
|
+
|
|
565
|
+
function readFile(relPath) {
|
|
566
|
+
const abs = join(cwd, relPath);
|
|
567
|
+
return existsSync(abs) ? readFileSync(abs, 'utf8') : null;
|
|
568
|
+
}
|
|
569
|
+
|
|
570
|
+
function extractLastN(content, n) {
|
|
571
|
+
if (!content) return [];
|
|
572
|
+
const sections = content.split(/\\n(?=## )/).filter(s => s.trim());
|
|
573
|
+
return sections.slice(-n);
|
|
574
|
+
}
|
|
575
|
+
|
|
576
|
+
async function main() {
|
|
577
|
+
const enhancement = readFile('.smash-os-enhancement');
|
|
578
|
+
const enhancementLevel = enhancement ? enhancement.trim() : 'off';
|
|
579
|
+
|
|
580
|
+
const lines = [
|
|
581
|
+
'--- SmashOS Context (local mode) ---',
|
|
582
|
+
'',
|
|
583
|
+
'Mode: local',
|
|
584
|
+
\`Enhancement: \${enhancementLevel}\`,
|
|
585
|
+
'',
|
|
586
|
+
];
|
|
587
|
+
|
|
588
|
+
const decisions = readFile('ai/memory/decisions.md');
|
|
589
|
+
if (decisions) {
|
|
590
|
+
const recent = extractLastN(decisions, 5);
|
|
591
|
+
if (recent.length) {
|
|
592
|
+
lines.push('Recent Decisions (last 5):');
|
|
593
|
+
for (const section of recent) {
|
|
594
|
+
const firstLine = section.split('\\n')[0].replace(/^## /, '').trim();
|
|
595
|
+
lines.push(\` • \${firstLine}\`);
|
|
596
|
+
}
|
|
597
|
+
lines.push('');
|
|
598
|
+
}
|
|
599
|
+
}
|
|
600
|
+
|
|
601
|
+
lines.push('Run /smash-os:status for full health check.');
|
|
602
|
+
lines.push('Run /smash-os:run [type] to start a pipeline.');
|
|
603
|
+
lines.push('--- End SmashOS Context ---');
|
|
604
|
+
|
|
605
|
+
const output = { type: 'context', content: lines.join('\\n') };
|
|
606
|
+
process.stdout.write(JSON.stringify(output) + '\\n');
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
main().catch((err) => {
|
|
610
|
+
process.stderr.write(\`[SmashOS boot] Error: \${err.message}\\n\`);
|
|
611
|
+
process.exit(0);
|
|
612
|
+
});
|
|
613
|
+
`;
|
|
614
|
+
|
|
615
|
+
const syncHook = `#!/usr/bin/env node
|
|
616
|
+
/**
|
|
617
|
+
* smash-os-sync.mjs — Stop hook (local mode)
|
|
618
|
+
* Appends session summary to ai/memory/sessions.md locally.
|
|
619
|
+
* No network calls. No credentials required.
|
|
620
|
+
*/
|
|
621
|
+
|
|
622
|
+
import { readFileSync, existsSync, writeFileSync, mkdirSync } from 'fs';
|
|
623
|
+
import { join, dirname } from 'path';
|
|
624
|
+
import { homedir } from 'os';
|
|
625
|
+
|
|
626
|
+
const cwd = process.cwd();
|
|
627
|
+
const vaultConventions = join(process.env.USERPROFILE || process.env.HOME || homedir(), 'Desktop', 'SmashBurgerBar', 'SmashVault', 'Architecture', 'conventions.md');
|
|
628
|
+
const globalConventions = join(homedir(), '.claude', 'conventions.md');
|
|
629
|
+
const conventionsFile = existsSync(vaultConventions) ? vaultConventions : globalConventions;
|
|
630
|
+
|
|
631
|
+
function appendFile(relPath, content) {
|
|
632
|
+
const abs = join(cwd, relPath);
|
|
633
|
+
mkdirSync(dirname(abs), { recursive: true });
|
|
634
|
+
const existing = existsSync(abs) ? readFileSync(abs, 'utf8') : '';
|
|
635
|
+
writeFileSync(abs, existing + content, 'utf8');
|
|
636
|
+
}
|
|
637
|
+
|
|
638
|
+
async function main() {
|
|
639
|
+
let hookInput = {};
|
|
640
|
+
try {
|
|
641
|
+
const chunks = [];
|
|
642
|
+
for await (const chunk of process.stdin) chunks.push(chunk);
|
|
643
|
+
const raw = Buffer.concat(chunks).toString('utf8');
|
|
644
|
+
if (raw.trim()) hookInput = JSON.parse(raw);
|
|
645
|
+
} catch { /* stdin may be empty or non-JSON */ }
|
|
646
|
+
|
|
647
|
+
const summary = hookInput.session_summary || hookInput.summary || '';
|
|
648
|
+
const decisions = hookInput.decisions_made || [];
|
|
649
|
+
const files = hookInput.files_changed || [];
|
|
650
|
+
|
|
651
|
+
if (!summary && !decisions.length && !files.length) process.exit(0);
|
|
652
|
+
|
|
653
|
+
const timestamp = new Date().toISOString().slice(0, 16).replace('T', ' ');
|
|
654
|
+
const entry = [
|
|
655
|
+
\`\\n## \${timestamp}\`,
|
|
656
|
+
summary ? \`\\n\${summary}\` : '',
|
|
657
|
+
decisions.length ? \`\\n**Decisions:** \${decisions.join(', ')}\` : '',
|
|
658
|
+
files.length ? \`\\n**Files changed:** \${files.join(', ')}\` : '',
|
|
659
|
+
'',
|
|
660
|
+
].join('\\n');
|
|
661
|
+
|
|
662
|
+
appendFile('ai/memory/sessions.md', entry);
|
|
663
|
+
process.stderr.write('[SmashOS] Session saved locally to ai/memory/sessions.md\\n');
|
|
664
|
+
|
|
665
|
+
if (summary || decisions.length) {
|
|
666
|
+
const convEntry = [
|
|
667
|
+
\`\\n## \${timestamp} — pending consolidation\`,
|
|
668
|
+
summary ? \`\\nContext: \${summary.slice(0, 300)}\` : '',
|
|
669
|
+
decisions.length ? \`\\nDecisions: \${decisions.join('; ')}\` : '',
|
|
670
|
+
'\\n<!-- awaiting /dream-memory --vault -->',
|
|
671
|
+
'',
|
|
672
|
+
].join('\\n');
|
|
673
|
+
if (!existsSync(conventionsFile)) {
|
|
674
|
+
mkdirSync(dirname(conventionsFile), { recursive: true });
|
|
675
|
+
writeFileSync(conventionsFile, '# Conventions\\n\\n<!-- Consolidated by /dream-memory --vault -->\\n', 'utf8');
|
|
676
|
+
}
|
|
677
|
+
const existing = readFileSync(conventionsFile, 'utf8');
|
|
678
|
+
writeFileSync(conventionsFile, existing + convEntry, 'utf8');
|
|
679
|
+
process.stderr.write(\`[SmashOS] Conventions updated at \${conventionsFile}\\n\`);
|
|
680
|
+
}
|
|
681
|
+
process.exit(0);
|
|
682
|
+
}
|
|
683
|
+
|
|
684
|
+
main().catch((err) => {
|
|
685
|
+
process.stderr.write(\`[SmashOS sync] Error: \${err.message}\\n\`);
|
|
686
|
+
process.exit(0);
|
|
687
|
+
});
|
|
688
|
+
`;
|
|
689
|
+
|
|
690
|
+
const localSkills = {
|
|
691
|
+
'smash-os-role': `---\nname: smash-os-role\ndescription: Switch cognitive mode and load a SmashOS role definition. Use when switching roles or at session start.\n---\n\n# /smash-os:role\n\nSwitch to the named role and adopt its cognitive mode.\n\n## Roles\n\n| Role | Mode | Allowed |\n|---|---|---|\n| Staff Engineer | THINKING | Architecture, decisions, reviews |\n| Product Manager | THINKING | Specs, user stories, acceptance criteria |\n| Senior Developer | EXECUTION | Writing code, editing files |\n| Security Engineer | VALIDATION | Security review only, no code changes |\n| QA Engineer | VALIDATION | Testing and verification only |\n| DevOps Engineer | EXECUTION | Infrastructure, deployment |\n| UI/UX Designer | THINKING | Design review, UX audit, brand compliance (frontend projects only) |\n| Frontend Developer | EXECUTION | Component code, a11y, performance, design system (frontend projects only) |\n| Frontend QA | VALIDATION | Playwright E2E tests, coverage sign-off (frontend projects only) |\n\n## Steps\n1. Read the role name from the argument (default: Staff Engineer)\n2. Read \`.smash-os-enhancement\` (default: off if missing)\n3. Announce: "Switching to [Role] — [Mode] mode · Enhancement: [level]"\n4. Load \`ai/roles/<role-slug>.md\` if it exists\n5. Apply cognitive mode restrictions AND the enhancement behaviors below\n\n## Enhancement Mode Behaviors\n\n**off** — Role operates standalone. Use your own judgment on process.\n\n**light** — Surface framework hints before significant tasks:\n- Architecture or complex decisions → suggest \`/superpowers:brainstorming\` first\n- New code → remind about TDD (\`/superpowers:test-driven-development\`)\n- Bug investigation → suggest \`/superpowers:systematic-debugging\`\n- Do not force these — surface as a suggestion, then proceed\n\n**medium** — Enforce structured workflows:\n- Feature work: brainstorm → write-plans → TDD → verification-before-completion (mandatory gates)\n- Bug work: systematic-debugging with hypothesis log\n- Before claiming complete: always invoke \`/superpowers:verification-before-completion\`\n- Complex decisions: brainstorm alternatives before committing\n- After significant work: extract key learnings to \`ai/memory/lessons.md\`\n\n**high** — Full GSD phase management + active learning extraction:\n- Complex work (3+ implementation steps): use GSD (discuss → plan → execute → verify)\n- Use \`/gsd:plan-phase\` before any multi-step implementation\n- Simple/reversible tasks (< 30 min, single obvious approach): use medium workflows instead\n- After each phase: run \`/smash-os:extract-learnings\` to capture conventions and decisions\n- Completion: \`/gsd:verify-work\` before signing off\n- Learnings feed into \`ai/memory/\` and inform skill evolution\n`,
|
|
692
|
+
'smash-os-memory': `---\nname: smash-os-memory\ndescription: Show recent SmashOS decisions and lessons from the ai/memory/ folder.\n---\n\n# /smash-os:memory\n\nRead and display the local SmashOS memory files.\n\n## Steps\n1. Read \`ai/memory/decisions.md\` (last 20 entries)\n2. Read \`ai/memory/lessons.md\` (last 10 entries)\n3. Display them clearly — decisions first, then lessons\n4. If either file is empty or missing, say so\n`,
|
|
693
|
+
'smash-os-onboarding': `---\nname: smash-os:onboarding\ndescription: Run SmashOS onboarding — scans the codebase, pre-fills answers from what it finds, then walks through registration questions section by section. Each question can be skipped. Writes all ai/context/ files when done.\nallowed-tools: Bash, Read, Glob, Grep, Write\n---\n\n# /smash-os:onboarding\n\nOnboard this repo into SmashOS. Always local — no API keys required.\n\n## Phase 1 — Codebase Scan\n\nAnnounce start, then silently read:\n- \`package.json\`, \`README.md\`, \`CLAUDE.md\`, \`.smash-os-mode\`\n- Top-level dirs + \`src/\`, \`app/\`, \`lib/\` contents\n- Key config files: vite, next, react-router, tsconfig, tailwind, drizzle\n- \`supabase/migrations/\`, \`prisma/schema.prisma\`, \`.env.example\`\n- Any existing \`ai/context/\` files\n- Up to 5 representative source files\n\nBuild internal knowledge from what you find — used to pre-fill Phase 2.\n\n## Phase 2 — Registration Interview\n\nTell the user:\n\`\`\`\n▸ SMASH OS · registration interview\n─────────────────────────────────────\n I've scanned your codebase. Now I'll ask questions to fill in\n your AI context files. Each question shows what I already found.\n Type SKIP to skip any question. Type DONE to finish a section early.\n─────────────────────────────────────\n\`\`\`\n\nFor each question: show your pre-filled guess first, then ask. If user types SKIP — accept the pre-fill and move on. User answers always override pre-fills. One section at a time.\n\n**Section 1 — Product & Business → ai/context/product.md**\nQ1 What does this product do? | Q2 Who are the users? | Q3 Main modules/features? | Q4 Revenue/business model? | Q5 3–5 core daily workflows? | Q6 Non-negotiable rules? | Q7 What does "broken" look like?\n\n**Section 2 — Architecture & Tech Stack → ai/context/architecture.md**\nQ1 Frontend framework + version? | Q2 Backend/API layer? | Q3 Database(s)? | Q4 Auth method? | Q5 Deployment target? | Q6 External services/APIs? | Q7 Folder/module structure? | Q8 Architectural rules/boundaries?\n\n**Section 3 — Coding Standards → ai/context/coding-standards.md**\nQ1 Language + strictness? | Q2 Naming conventions? | Q3 How are mutations handled? | Q4 Styling approach? | Q5 Testing setup? | Q6 Consistent patterns/abstractions? | Q7 What should AI-generated code always avoid?\n\n**Section 4 — Database → ai/context/database.md** (skip if no DB detected)\nQ1 Main tables + what each stores? | Q2 Key relationships? | Q3 RLS or data isolation? | Q4 How is DB accessed in code? | Q5 Business-critical queries to document? | Q6 Tables needing special care?\n\n**Section 5 — Current State → ai/memory/decisions.md**\nQ1 Current state of codebase? | Q2 3 biggest pain points? | Q3 Actively being worked on? | Q4 Settled decisions not to revisit? | Q5 What has been tried and failed?\n\n## Phase 3 — Write Context Files\n\nMerge pre-fills + user answers. Write real prose — no placeholder comments.\nIf a file exists with real content, append rather than overwrite.\n\nFiles: ai/context/product.md | ai/context/architecture.md | ai/context/coding-standards.md | ai/context/database.md | ai/memory/decisions.md (append) | ai/context/orchestrator.md (update summaries)\n\n## Phase 4 — Confirm\n\n\`\`\`\n▸ SMASH OS · onboarded\n─────────────────────────────────────\n FILES WRITTEN\n · ai/context/product.md\n · ai/context/architecture.md\n · ai/context/coding-standards.md\n · ai/context/database.md\n · ai/memory/decisions.md (initial context appended)\n · ai/context/orchestrator.md (summaries updated)\n\n WHAT I NOW KNOW\n {2–4 sentence natural language summary}\n\n Run /smash-os:run to start your first pipeline.\n─────────────────────────────────────\n\`\`\`\n\n## Rules\n- Never ask for credentials\n- Always show pre-fill before asking\n- Never repeat a skipped question\n- Write real content — no placeholder comments\n- Re-runs append, never overwrite history\n`,
|
|
694
|
+
'smash-os-run': `---\nname: smash-os-run\ndescription: Trigger a SmashOS pipeline manually. Usage: /smash-os:run [type] where type is feature | bug-fix | weekly-improvement | security-audit | custom.\nallowed-tools: Bash, Read, Write, Edit, Glob, Grep\n---\n\n# /smash-os:run\n\nTrigger a SmashOS pipeline for this repo (local mode).\n\nAll pipelines execute inline — Claude Code IS the pipeline engine.\nLoad context from \`ai/context/\` and \`ai/memory/\` before starting.\n\n## Pipeline Types\n- \`feature\` — full 7-phase feature development\n- \`bug-fix\` — diagnose and fix a bug\n- \`security-audit\` — security review of recent changes\n- \`weekly-improvement\` — code quality and refactor pass\n- \`frontend-audit\` — UI/UX design review + component quality + Playwright tests (frontend projects only)\n- \`playwright\` — Frontend QA phase only: generate/review Playwright E2E tests (frontend projects only)\n- \`custom\` — user-defined pipeline (ask for description first)\n\n## Phase Sequence\n\nRun all 7 phases in order. For each phase adopt the role, do the work, output the result.\n\n**Phase 0 — Debate (Staff Engineer)**\n- Analyse from multiple architectural angles\n- Surface trade-offs and open questions\n\n**Phase 1 — Product Manager**\n- Define acceptance criteria and user stories\n- Output: spec with measurable outcomes\n\n**Phase 2 — Staff Engineer / Architecture**\n- Design implementation approach\n- Identify files to create/modify\n\n**Phase 3 — Senior Developer**\n- Write the actual code changes\n- Follow \`ai/context/coding-standards.md\`\n\n**Phase 4 — Security Engineer**\n- Review for auth, input validation, injection risks\n- Output: security sign-off or blockers\n\n**Phase 5 — QA Engineer**\n- Write test cases, identify edge cases\n- Output: test plan + pass/fail verdict\n\n**Phase 6 — DevOps**\n- Deployment impact, migrations, config changes\n- Output: deployment checklist\n\n## Phase Output Format\n\n\`\`\`\n▸ SMASH OS · phase {N} · {role}\n─────────────────────────────────────\n{output}\n─────────────────────────────────────\n\`\`\`\n\n## After All Phases\n\nSave key decisions to \`ai/memory/decisions.md\`.\n\nFinal output:\n\`\`\`\n▸ SMASH OS · pipeline complete [local]\n─────────────────────────────────────\n type {type}\n phases 7 / 7\n status completed\n─────────────────────────────────────\n\`\`\`\n\nIf enhancement is medium or high, also run \`/smash-os:extract-learnings\` after completion.\n`,
|
|
695
|
+
'smash-os-enhancement': `---\nname: smash-os-enhancement\ndescription: View or change the SmashOS enhancement level (off/light/medium/high). Controls how deeply superpowers and GSD frameworks integrate into role workflows.\nallowed-tools: Read, Write\n---\n\n# /smash-os:enhancement [level]\n\nView or set the SmashOS enhancement mode.\n\n## Steps\n1. Read the argument (if any). Valid values: \`off\`, \`light\`, \`medium\`, \`high\`\n2. If no argument: read \`.smash-os-enhancement\` (default: off) and display current level + description\n3. If argument provided: validate, write to \`.smash-os-enhancement\`, confirm change\n\n## Level Descriptions\n\n| Level | What it does |\n|---|---|\n| off | Roles operate standalone — no framework integration |\n| light | Roles surface hints for relevant superpowers skills |\n| medium | Structured workflows enforced: brainstorm → plan → TDD → verify + learning extraction |\n| high | Full GSD phase management + active learning extraction → feeds skill evolution |\n\n## Output Format\n\n\`\`\`\n▸ SMASH OS · enhancement mode\n─────────────────────────────────────\n level {level}\n {description}\n─────────────────────────────────────\n\`\`\`\n`,
|
|
696
|
+
'smash-os-extract-learnings': `---\nname: smash-os-extract-learnings\ndescription: Extract conventions, decisions, and lessons from the current session and save to ai/memory/. At high enhancement mode this runs automatically after each significant phase. Feeds into skill evolution.\nallowed-tools: Read, Write, Glob\n---\n\n# /smash-os:extract-learnings\n\nExtract and persist learnings from the current session.\n\n## Steps\n\n1. Review recent work in this session — what was built, what decisions were made, what friction was encountered\n2. Identify items in each category:\n - **Conventions discovered**: patterns or standards that emerged or were reinforced\n - **Decisions made**: architectural or product choices with rationale\n - **Lessons learned**: what went wrong or could be improved\n - **Role improvements**: behaviors that should be added/removed from a role definition\n3. Write to \`ai/memory/lessons.md\` (append) — lessons + conventions\n4. Write to \`ai/memory/decisions.md\` (append) — decisions with rationale\n5. If role improvements were identified:\n - Read the relevant \`ai/roles/<role-slug>.md\` file (create it if missing)\n - Propose the improvement inline\n - Ask: "Apply this improvement to the [Role] definition? (y/n)"\n - If yes: write the updated role file\n6. Output a brief summary of what was extracted\n\n## Output Format\n\n\`\`\`\n▸ SMASH OS · learnings extracted\n─────────────────────────────────────\n conventions {N}\n decisions {N}\n lessons {N}\n role updates {N applied | none}\n─────────────────────────────────────\n\`\`\`\n\n## Notes\n- Never overwrite existing entries — always append\n- Role improvements should be additive, not destructive\n- These files are read by skill-evolution — write clearly for future context\n- If nothing significant happened this session: output "No learnings to extract"\n`,
|
|
697
|
+
'smash-os-role-improve': `---\nname: smash-os-role-improve\ndescription: Role improvement pass — reads accumulated learnings from ai/memory/role-learnings/, applies insights back to each role definition. Gains new rules, prunes stale ones, consolidates duplicates, enhances the Enhancement Layer. Run manually or via the RoleImprovement scheduled task.\nallowed-tools: Read, Write, Edit\n---\n\n# /smash-os:role-improve\n\nRead all role learning logs. For each role with new entries, analyse and apply improvements back to the role definition.\n\n## Step 1 — Discover roles with learnings\n\nRead all files in \`ai/memory/role-learnings/\`. Skip any with no entries beyond the header (nothing after the \`---\` separator).\n\n## Step 2 — Analyse each learning log\n\nFor each role that has entries, extract:\n- **Gains** — new constraints or patterns that consistently improved output\n- **Prunes** — Hard Rules flagged as redundant, too slow, or consistently skipped\n- **Consolidations** — duplicate or near-duplicate rules that can be merged\n- **Enhancement Layer improvements** — which skills actually added value at which levels\n\n## Step 3 — Apply improvements to each role file\n\n1. Add new Hard Rules from Gains — each dated \`[YYYY-MM-DD]\`\n2. Remove pruned rules if flagged 3+ times; convert to comment if 1–2 times\n3. Consolidate duplicate rules\n4. Update Enhancement Layer skill mappings\n5. Append to \`## Improvement History\` (create if missing): \`- YYYY-MM-DD: {N gains, M prunes, K consolidations}\`\n\nDo not change Purpose, Responsibilities, Output Format, or Selective Context Load unless a learning entry explicitly proposes it.\n\n## Step 4 — Archive processed learnings\n\nAfter updating a role file, append an archive marker and move processed entries below it. New entries append above the archive.\n\n## Step 5 — Report\n\n\`\`\`\n▸ SMASH OS · role improvement pass\n─────────────────────────────────────\n {date}\n ROLES UPDATED {list with counts}\n ROLES SKIPPED {list — no new learnings}\n─────────────────────────────────────\n\`\`\`\n\n## Rules\n- Never remove a Hard Rule without 3+ learning entries flagging it\n- All new Hard Rules carry a derivation date\n- Idempotent — no new entries = no changes\n`,
|
|
698
|
+
'smash-os-evolve-roles': `---\nname: smash-os-evolve-roles\ndescription: Analyze accumulated learnings from ai/memory/ and propose targeted improvements to SmashOS role files. Bridges extract-learnings to skill evolution. Runs automatically weekly via Task Scheduler.\nallowed-tools: Read, Write, Glob\n---\n\n# /smash-os:evolve-roles\n\nAnalyze learnings accumulated across sessions and evolve role definitions based on real project experience.\n\n## Steps\n\n1. Read \`ai/memory/lessons.md\` — find entries tagged with role improvement suggestions (look for "Role improvements:", "role:", "[role improvement]" markers)\n2. Read \`ai/memory/decisions.md\` — find conventions that should be encoded into specific roles\n3. Skip any learning already marked with \`[applied YYYY-MM-DD]\`\n4. List all files in \`ai/roles/\` — project-specific role overrides\n - If \`ai/roles/\` is empty or missing: offer to scaffold defaults (staff-engineer, senior-developer, security-engineer)\n5. For each pending improvement, identify the target role and present:\n - Current relevant section (or "no file yet")\n - Proposed addition/change (be specific — not "be more careful", but "always check for stale migrations before running tests")\n - Ask: "Apply to [role-name].md? (y/n)"\n6. If approved: write/update \`ai/roles/<role-slug>.md\`, mark the source lesson as \`[applied {date}]\` in lessons.md\n7. After all project-level changes, ask: "Any improvements universal enough for the global skill? (y/n)"\n - If yes: list candidates, confirm each, then append to \`~/.claude/skills/smash-os-role/SKILL.md\` under a "## Project-Learned Behaviors" section\n8. Output summary\n\n## Output Format\n\n\`\`\`\n▸ SMASH OS · role evolution\n─────────────────────────────────────\n learnings reviewed {N}\n improvements found {N}\n applied to roles {N}\n global skill updated {yes | no}\n─────────────────────────────────────\n\`\`\`\n\n## Rules\n- Never remove existing role capabilities — only add or refine\n- Improvements must be specific and actionable, not vague\n- Mark applied learnings so they are not re-proposed on the next run\n- Global skill updates: only universal, project-agnostic behaviors (not "use Postgres migrations" — yes to "always confirm schema changes with Security Engineer before applying")\n- If no pending improvements: output "Roles are up to date — no changes needed"\n`,
|
|
699
|
+
};
|
|
700
|
+
|
|
701
|
+
// ─── Write files ──────────────────────────────────────────────────────────────
|
|
702
|
+
|
|
703
|
+
let written = 0;
|
|
704
|
+
|
|
705
|
+
writeFile('.smash-os-mode', 'local\n');
|
|
706
|
+
console.log(' ' + chalk.green('✓') + ' ' + chalk.white('.smash-os-mode') + chalk.dim(' (local)'));
|
|
707
|
+
written++;
|
|
708
|
+
|
|
709
|
+
writeFile('.smash-os-enhancement', (enhancement || 'medium') + '\n');
|
|
710
|
+
console.log(' ' + chalk.green('✓') + ' ' + chalk.white('.smash-os-enhancement') + chalk.dim(` (${enhancement || 'medium'})`));
|
|
711
|
+
written++;
|
|
712
|
+
|
|
713
|
+
writeFile('CLAUDE.md', claudeMd);
|
|
714
|
+
console.log(' ' + chalk.green('✓') + ' ' + chalk.white('CLAUDE.md'));
|
|
715
|
+
written++;
|
|
716
|
+
|
|
717
|
+
for (const [relPath, content] of Object.entries(aiFiles)) {
|
|
718
|
+
if (!existsSync(join(cwd, relPath))) {
|
|
719
|
+
writeFile(relPath, content);
|
|
720
|
+
console.log(' ' + chalk.green('✓') + ' ' + chalk.white(relPath));
|
|
721
|
+
written++;
|
|
722
|
+
} else {
|
|
723
|
+
console.log(' ' + chalk.dim('↷') + ' ' + chalk.dim(`${relPath} (already exists — skipped)`));
|
|
724
|
+
}
|
|
725
|
+
}
|
|
726
|
+
|
|
727
|
+
// ─── Frontend roles (opt-in) ──────────────────────────────────────────────────
|
|
728
|
+
|
|
729
|
+
if (isFrontend) {
|
|
730
|
+
const smashOsRoot = join(dirname(new URL(import.meta.url).pathname.replace(/^\/([A-Z]:)/, '$1')), '..');
|
|
731
|
+
const frontendRoles = [
|
|
732
|
+
{ src: 'ai/roles/ui-ux-designer.md', dest: 'ai/roles/ui-ux-designer.md' },
|
|
733
|
+
{ src: 'ai/roles/frontend-developer.md', dest: 'ai/roles/frontend-developer.md' },
|
|
734
|
+
{ src: 'ai/roles/frontend-qa.md', dest: 'ai/roles/frontend-qa.md' },
|
|
735
|
+
{ src: 'ai/workflows/frontend-audit.md', dest: 'ai/workflows/frontend-audit.md' },
|
|
736
|
+
];
|
|
737
|
+
console.log('');
|
|
738
|
+
console.log(chalk.dim(' Frontend team:'));
|
|
739
|
+
for (const { src, dest } of frontendRoles) {
|
|
740
|
+
const srcPath = join(smashOsRoot, src);
|
|
741
|
+
if (!existsSync(join(cwd, dest))) {
|
|
742
|
+
if (existsSync(srcPath)) {
|
|
743
|
+
writeFile(dest, readFileSync(srcPath, 'utf8'));
|
|
744
|
+
console.log(' ' + chalk.green('✓') + ' ' + chalk.white(dest));
|
|
745
|
+
written++;
|
|
746
|
+
} else {
|
|
747
|
+
console.log(' ' + chalk.yellow('!') + ' ' + chalk.dim(`${src} — source not found, skipped`));
|
|
748
|
+
}
|
|
749
|
+
} else {
|
|
750
|
+
console.log(' ' + chalk.dim('↷') + ' ' + chalk.dim(`${dest} (already exists — skipped)`));
|
|
751
|
+
}
|
|
752
|
+
}
|
|
753
|
+
}
|
|
754
|
+
|
|
755
|
+
writeFile('.claude/hooks/smash-os-boot.mjs', bootHook);
|
|
756
|
+
console.log(' ' + chalk.green('✓') + ' ' + chalk.white('.claude/hooks/smash-os-boot.mjs'));
|
|
757
|
+
written++;
|
|
758
|
+
|
|
759
|
+
writeFile('.claude/hooks/smash-os-sync.mjs', syncHook);
|
|
760
|
+
console.log(' ' + chalk.green('✓') + ' ' + chalk.white('.claude/hooks/smash-os-sync.mjs'));
|
|
761
|
+
written++;
|
|
762
|
+
|
|
763
|
+
const merged = mergeSettingsJson(settingsJson);
|
|
764
|
+
console.log(
|
|
765
|
+
' ' + chalk.green('✓') + ' ' +
|
|
766
|
+
chalk.white('.claude/settings.json') +
|
|
767
|
+
chalk.dim(merged ? ' (merged with existing)' : ' (created)')
|
|
768
|
+
);
|
|
769
|
+
written++;
|
|
770
|
+
|
|
771
|
+
console.log('');
|
|
772
|
+
for (const [skillName, skillContent] of Object.entries(localSkills)) {
|
|
773
|
+
const skillDir = join(homedir(), '.claude', 'skills', skillName);
|
|
774
|
+
mkdirSync(skillDir, { recursive: true });
|
|
775
|
+
writeFileSync(join(skillDir, 'SKILL.md'), skillContent, 'utf8');
|
|
776
|
+
console.log(' ' + chalk.green('✓') + ' ' + chalk.white(`/smash-os:${skillName.replace('smash-os-', '')}`) + chalk.dim(' → ~/.claude/skills/'));
|
|
777
|
+
written++;
|
|
778
|
+
}
|
|
779
|
+
|
|
780
|
+
console.log('');
|
|
781
|
+
console.log(chalk.bold.green(' SmashOS harness installed!') + chalk.dim(` (${written} files — local mode)`));
|
|
782
|
+
console.log('');
|
|
783
|
+
console.log(chalk.dim(' Open Claude Code in this directory:'));
|
|
784
|
+
console.log(' ' + chalk.white(' claude .'));
|
|
785
|
+
console.log('');
|
|
786
|
+
console.log(chalk.dim(' Fill in ai/context/ with your project details.'));
|
|
787
|
+
console.log(chalk.dim(' Then run /smash-os:run to start a pipeline.'));
|
|
788
|
+
console.log('');
|
|
789
|
+
|
|
790
|
+
if (setupAutomation) runAutomation(cwd);
|
|
791
|
+
|
|
792
|
+
await checkAndInstallMcps(enhancement || 'medium');
|