ai-dev-cli 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,83 @@
1
+ # ai-dev-cli
2
+
3
+ Scaffold & audit tool for [Claude Code](https://docs.anthropic.com/en/docs/claude-code) projects.
4
+
5
+ One command to set up CLAUDE.md, permissions, custom commands, hooks, MCP config, and coding standards.
6
+
7
+ ## Quick Start
8
+
9
+ ```bash
10
+ npx ai-dev-cli init
11
+ ```
12
+
13
+ ## Init Levels
14
+
15
+ | Level | What you get |
16
+ |-------|-------------|
17
+ | 0 — Minimal | `CLAUDE.md` + `.claude/settings.json` (permissions) |
18
+ | 1 — Commands | + `/plan` `/review` `/commit` custom commands |
19
+ | 2 — Quality | + Hook scripts (bash firewall, format guard, size guard, dep guard) |
20
+ | 3 — Full | + MCP config, coding standards skill, approved-deps, ADR directory |
21
+
22
+ Each level includes all lower levels.
23
+
24
+ ## Usage
25
+
26
+ ```bash
27
+ # Interactive (prompts for project name, database, level)
28
+ npx ai-dev-cli init
29
+
30
+ # Non-interactive
31
+ npx ai-dev-cli init --level 2 --name my-app --database prisma-sqlite
32
+
33
+ # Compliance audit
34
+ npx ai-dev-cli check
35
+ npx ai-dev-cli check --fix
36
+
37
+ # Takeover an existing project
38
+ npx ai-dev-cli takeover
39
+ ```
40
+
41
+ ### Database Options
42
+
43
+ | Option | Stack |
44
+ |--------|-------|
45
+ | `prisma-sqlite` | Prisma + SQLite |
46
+ | `prisma-pg` | Prisma + PostgreSQL |
47
+ | `supabase-cloud` | Supabase (Cloud) |
48
+ | `supabase-cn` | Supabase (China, self-hosted) |
49
+
50
+ ## What It Generates
51
+
52
+ ```
53
+ your-project/
54
+ CLAUDE.md # AI assistant instructions
55
+ .claude/
56
+ settings.json # Permissions + hooks config
57
+ commands/ # Custom slash commands
58
+ plan.md
59
+ review.md
60
+ commit.md
61
+ debug.md
62
+ doc.md
63
+ hooks/ # Automation hooks
64
+ pre-bash-firewall.sh
65
+ post-edit-format.sh
66
+ ...
67
+ skills/ # Coding standards skill
68
+ code-standards/
69
+ SKILL.md
70
+ .mcp.json # MCP server config
71
+ docs/
72
+ approved-deps.md
73
+ adr/
74
+ ```
75
+
76
+ ## Requirements
77
+
78
+ - Node.js >= 18
79
+ - Git
80
+
81
+ ## License
82
+
83
+ MIT
package/dist/index.js ADDED
@@ -0,0 +1,752 @@
1
+ #!/usr/bin/env node
2
+ "use strict";
3
+ var __create = Object.create;
4
+ var __defProp = Object.defineProperty;
5
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
6
+ var __getOwnPropNames = Object.getOwnPropertyNames;
7
+ var __getProtoOf = Object.getPrototypeOf;
8
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
9
+ var __copyProps = (to, from, except, desc) => {
10
+ if (from && typeof from === "object" || typeof from === "function") {
11
+ for (let key of __getOwnPropNames(from))
12
+ if (!__hasOwnProp.call(to, key) && key !== except)
13
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
14
+ }
15
+ return to;
16
+ };
17
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
18
+ // If the importer is in node compatibility mode or this is not an ESM
19
+ // file that has been converted to a CommonJS file using a Babel-
20
+ // compatible transform (i.e. "__esModule" has not been set), then set
21
+ // "default" to the CommonJS "module.exports" for node compatibility.
22
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
23
+ mod
24
+ ));
25
+
26
+ // src/index.ts
27
+ var import_commander = require("commander");
28
+
29
+ // src/commands/init.ts
30
+ var import_node_fs4 = __toESM(require("fs"));
31
+ var import_node_path3 = __toESM(require("path"));
32
+
33
+ // src/utils/logger.ts
34
+ var isColorSupported = process.stdout.isTTY === true && process.env.CI !== "true";
35
+ function colorize(code, text) {
36
+ return isColorSupported ? `\x1B[${code}m${text}\x1B[0m` : text;
37
+ }
38
+ var red = (s) => colorize(31, s);
39
+ var green = (s) => colorize(32, s);
40
+ var yellow = (s) => colorize(33, s);
41
+ var blue = (s) => colorize(34, s);
42
+ function info(msg) {
43
+ console.log(`${blue("[INFO]")} ${msg}`);
44
+ }
45
+ function ok(msg) {
46
+ console.log(`${green("[OK]")} ${msg}`);
47
+ }
48
+ function warn(msg) {
49
+ console.log(`${yellow("[WARN]")} ${msg}`);
50
+ }
51
+ function err(msg) {
52
+ console.error(`${red("[ERROR]")} ${msg}`);
53
+ }
54
+ function checkPass(msg) {
55
+ console.log(`${green("[PASS]")} ${msg}`);
56
+ }
57
+ function checkFail(msg) {
58
+ console.log(`${red("[FAIL]")} ${msg}`);
59
+ }
60
+ function checkWarn(msg) {
61
+ console.log(`${yellow("[WARN]")} ${msg}`);
62
+ }
63
+
64
+ // src/utils/file.ts
65
+ var import_node_fs = __toESM(require("fs"));
66
+ var import_node_path = __toESM(require("path"));
67
+ function safeWrite(filepath, content) {
68
+ const dir = import_node_path.default.dirname(filepath);
69
+ import_node_fs.default.mkdirSync(dir, { recursive: true });
70
+ if (import_node_fs.default.existsSync(filepath)) {
71
+ warn(`Skipped (already exists): ${filepath}`);
72
+ return false;
73
+ }
74
+ import_node_fs.default.writeFileSync(filepath, content, "utf-8");
75
+ ok(`Created: ${filepath}`);
76
+ return true;
77
+ }
78
+ function safeWriteExecutable(filepath, content) {
79
+ const created = safeWrite(filepath, content);
80
+ if (created && process.platform !== "win32") {
81
+ import_node_fs.default.chmodSync(filepath, 493);
82
+ }
83
+ return created;
84
+ }
85
+ function appendGitignore(entry) {
86
+ const gitignore = ".gitignore";
87
+ if (!import_node_fs.default.existsSync(gitignore)) {
88
+ import_node_fs.default.writeFileSync(gitignore, "", "utf-8");
89
+ }
90
+ const content = import_node_fs.default.readFileSync(gitignore, "utf-8");
91
+ const lines = content.split("\n");
92
+ if (!lines.includes(entry)) {
93
+ import_node_fs.default.appendFileSync(gitignore, `${entry}
94
+ `, "utf-8");
95
+ ok(`.gitignore += ${entry}`);
96
+ }
97
+ }
98
+ function readJsonFile(filepath) {
99
+ try {
100
+ const raw = import_node_fs.default.readFileSync(filepath, "utf-8");
101
+ return JSON.parse(raw);
102
+ } catch {
103
+ return null;
104
+ }
105
+ }
106
+ function mergeJsonFile(filepath, data) {
107
+ const existing = readJsonFile(filepath);
108
+ if (!existing) return;
109
+ const merged = deepMerge(existing, data);
110
+ import_node_fs.default.writeFileSync(filepath, JSON.stringify(merged, null, 2) + "\n", "utf-8");
111
+ ok(`${filepath} updated`);
112
+ }
113
+ function deepMerge(target, source) {
114
+ const result2 = { ...target };
115
+ for (const key of Object.keys(source)) {
116
+ if (isObject(target[key]) && isObject(source[key])) {
117
+ result2[key] = deepMerge(
118
+ target[key],
119
+ source[key]
120
+ );
121
+ } else {
122
+ result2[key] = source[key];
123
+ }
124
+ }
125
+ return result2;
126
+ }
127
+ function isObject(val) {
128
+ return val !== null && typeof val === "object" && !Array.isArray(val);
129
+ }
130
+
131
+ // src/utils/template.ts
132
+ var import_node_fs2 = __toESM(require("fs"));
133
+ var import_node_path2 = __toESM(require("path"));
134
+ function resolveTemplatesDir() {
135
+ let dir = __dirname;
136
+ for (let i = 0; i < 5; i++) {
137
+ const candidate = import_node_path2.default.join(dir, "templates");
138
+ if (import_node_fs2.default.existsSync(candidate)) return candidate;
139
+ dir = import_node_path2.default.dirname(dir);
140
+ }
141
+ throw new Error(
142
+ "Could not find templates directory. Ensure it exists alongside the package."
143
+ );
144
+ }
145
+ var TEMPLATES_DIR = resolveTemplatesDir();
146
+ function loadTemplate(name) {
147
+ const filepath = import_node_path2.default.join(TEMPLATES_DIR, name);
148
+ return import_node_fs2.default.readFileSync(filepath, "utf-8");
149
+ }
150
+ function renderTemplate(content, vars) {
151
+ return Object.entries(vars).reduce(
152
+ (result2, [key, value]) => result2.replaceAll(`{{${key}}}`, value),
153
+ content
154
+ );
155
+ }
156
+ function loadAndRender(name, vars) {
157
+ return renderTemplate(loadTemplate(name), vars);
158
+ }
159
+
160
+ // src/utils/detect.ts
161
+ var import_node_fs3 = __toESM(require("fs"));
162
+ function detectTechStack() {
163
+ const result2 = {
164
+ framework: "unknown",
165
+ language: "unknown",
166
+ runtime: "unknown",
167
+ database: "unknown"
168
+ };
169
+ const pkg = readJsonFile("package.json");
170
+ if (!pkg) return result2;
171
+ result2.runtime = "Node.js";
172
+ const deps = pkg.dependencies ?? {};
173
+ const devDeps = pkg.devDependencies ?? {};
174
+ if (import_node_fs3.default.existsSync("tsconfig.json") || devDeps["typescript"]) {
175
+ result2.language = "TypeScript";
176
+ } else {
177
+ result2.language = "JavaScript";
178
+ }
179
+ if (deps["next"]) result2.framework = "Next.js";
180
+ else if (deps["nuxt"]) result2.framework = "Nuxt";
181
+ else if (deps["react"]) result2.framework = "React";
182
+ else if (deps["vue"]) result2.framework = "Vue";
183
+ else if (deps["express"]) result2.framework = "Express";
184
+ if (deps["prisma"] || deps["@prisma/client"]) result2.database = "Prisma";
185
+ else if (deps["mongoose"]) result2.database = "MongoDB/Mongoose";
186
+ else if (deps["typeorm"]) result2.database = "TypeORM";
187
+ else if (deps["drizzle-orm"]) result2.database = "Drizzle";
188
+ else if (deps["sequelize"]) result2.database = "Sequelize";
189
+ return result2;
190
+ }
191
+ function extractNpmScripts() {
192
+ const pkg = readJsonFile("package.json");
193
+ if (!pkg || !pkg.scripts) return [];
194
+ const scripts = pkg.scripts;
195
+ return Object.keys(scripts).map((key) => `- \`npm run ${key}\` \u2014 ${key}`);
196
+ }
197
+ function extractDependencies() {
198
+ const pkg = readJsonFile("package.json");
199
+ if (!pkg) return [];
200
+ const deps = pkg.dependencies ?? {};
201
+ return Object.keys(deps).sort();
202
+ }
203
+ function detectDirectories() {
204
+ const candidates = [
205
+ "app",
206
+ "src",
207
+ "components",
208
+ "lib",
209
+ "services",
210
+ "pages",
211
+ "api",
212
+ "prisma",
213
+ "public",
214
+ "hooks",
215
+ "utils",
216
+ "types",
217
+ "middleware",
218
+ "routes",
219
+ "controllers",
220
+ "models",
221
+ "views"
222
+ ];
223
+ const found = [];
224
+ for (const d of candidates) {
225
+ if (import_node_fs3.default.existsSync(d) && import_node_fs3.default.statSync(d).isDirectory()) {
226
+ found.push(`/${d}`);
227
+ }
228
+ const srcPath = `src/${d}`;
229
+ if (import_node_fs3.default.existsSync(srcPath) && import_node_fs3.default.statSync(srcPath).isDirectory()) {
230
+ found.push(`/src/${d}`);
231
+ }
232
+ }
233
+ return found;
234
+ }
235
+ function commandExists(cmd) {
236
+ const { execSync: execSync2 } = require("child_process");
237
+ try {
238
+ execSync2(`command -v ${cmd}`, { stdio: "ignore" });
239
+ return true;
240
+ } catch {
241
+ return false;
242
+ }
243
+ }
244
+
245
+ // src/utils/prompt.ts
246
+ var import_node_readline = __toESM(require("readline"));
247
+ function ask(question, defaultValue) {
248
+ const rl = import_node_readline.default.createInterface({
249
+ input: process.stdin,
250
+ output: process.stdout
251
+ });
252
+ const label = defaultValue ? `${question} [${defaultValue}]: ` : `${question}: `;
253
+ return new Promise((resolve) => {
254
+ rl.question(label, (answer) => {
255
+ rl.close();
256
+ resolve(answer.trim() || defaultValue || "");
257
+ });
258
+ });
259
+ }
260
+ function select(question, options, defaultIndex = 0) {
261
+ const rl = import_node_readline.default.createInterface({
262
+ input: process.stdin,
263
+ output: process.stdout
264
+ });
265
+ console.log("");
266
+ console.log(question);
267
+ options.forEach((opt, i) => {
268
+ const marker = i === defaultIndex ? "\u276F" : " ";
269
+ console.log(` ${marker} ${i + 1}) ${opt}`);
270
+ });
271
+ console.log("");
272
+ return new Promise((resolve) => {
273
+ rl.question(`Select [1-${options.length}, default ${defaultIndex + 1}]: `, (answer) => {
274
+ rl.close();
275
+ const trimmed = answer.trim();
276
+ if (!trimmed) {
277
+ resolve(defaultIndex);
278
+ return;
279
+ }
280
+ const num = parseInt(trimmed, 10);
281
+ if (isNaN(num) || num < 1 || num > options.length) {
282
+ resolve(defaultIndex);
283
+ return;
284
+ }
285
+ resolve(num - 1);
286
+ });
287
+ });
288
+ }
289
+
290
+ // src/commands/init.ts
291
+ var VERSION = "2.0.0";
292
+ var DATABASE_OPTIONS = [
293
+ "prisma-sqlite",
294
+ "prisma-pg",
295
+ "supabase-cloud",
296
+ "supabase-cn"
297
+ ];
298
+ var DATABASE_LABELS = {
299
+ "prisma-sqlite": "Prisma + SQLite \u2014 zero-config local dev",
300
+ "prisma-pg": "Prisma + PostgreSQL \u2014 traditional relational DB",
301
+ "supabase-cloud": "Supabase (Cloud) \u2014 BaaS with auth/storage/realtime",
302
+ "supabase-cn": "Supabase (China) \u2014 self-hosted for China region"
303
+ };
304
+ var DATABASE_CONFIGS = {
305
+ "prisma-sqlite": {
306
+ TECH_STACK: "Next.js 14 App Router + TypeScript + Prisma + SQLite + Tailwind CSS",
307
+ DB_COMMANDS: "- `npm run db:migrate` \u2014 Prisma migrate dev\n- `npm run db:studio` \u2014 Prisma Studio data browser",
308
+ DB_ARCHITECTURE: "`/prisma` \u2014 Schema definitions and migrations",
309
+ DB_DEPS: "prisma / @prisma/client / zod",
310
+ DB_BOUNDARY_RULE: "DB queries go through Service layer only \u2014 no Prisma in components/API Routes",
311
+ CALL_DIRECTION: "components \u2192 hooks \u2192 API \u2192 services \u2192 prisma"
312
+ },
313
+ "prisma-pg": {
314
+ TECH_STACK: "Next.js 14 App Router + TypeScript + Prisma + PostgreSQL + Tailwind CSS",
315
+ DB_COMMANDS: "- `npm run db:migrate` \u2014 Prisma migrate dev\n- `npm run db:studio` \u2014 Prisma Studio data browser",
316
+ DB_ARCHITECTURE: "`/prisma` \u2014 Schema definitions and migrations",
317
+ DB_DEPS: "prisma / @prisma/client / zod",
318
+ DB_BOUNDARY_RULE: "DB queries go through Service layer only \u2014 no Prisma in components/API Routes",
319
+ CALL_DIRECTION: "components \u2192 hooks \u2192 API \u2192 services \u2192 prisma"
320
+ },
321
+ "supabase-cloud": {
322
+ TECH_STACK: "Next.js 14 App Router + TypeScript + Supabase + Tailwind CSS",
323
+ DB_COMMANDS: "- `npx supabase start` \u2014 Local dev server\n- `npx supabase db push` \u2014 Push schema changes",
324
+ DB_ARCHITECTURE: "`/supabase` \u2014 Migrations and edge functions",
325
+ DB_DEPS: "@supabase/supabase-js / @supabase/ssr / zod",
326
+ DB_BOUNDARY_RULE: "Supabase client calls go through Service layer only \u2014 no direct supabase calls in components",
327
+ CALL_DIRECTION: "components \u2192 hooks \u2192 API \u2192 services \u2192 supabase"
328
+ },
329
+ "supabase-cn": {
330
+ TECH_STACK: "Next.js 14 App Router + TypeScript + Supabase (self-hosted) + Tailwind CSS",
331
+ DB_COMMANDS: "- `npx supabase start` \u2014 Local dev server\n- `npx supabase db push` \u2014 Push schema changes",
332
+ DB_ARCHITECTURE: "`/supabase` \u2014 Migrations and edge functions",
333
+ DB_DEPS: "@supabase/supabase-js / @supabase/ssr / zod",
334
+ DB_BOUNDARY_RULE: "Supabase client calls go through Service layer only \u2014 no direct supabase calls in components",
335
+ CALL_DIRECTION: "components \u2192 hooks \u2192 API \u2192 services \u2192 supabase"
336
+ }
337
+ };
338
+ function resolveDatabaseOption(input) {
339
+ if (DATABASE_OPTIONS.includes(input)) {
340
+ return input;
341
+ }
342
+ err(`Invalid database option: ${input}`);
343
+ err(`Valid options: ${DATABASE_OPTIONS.join(", ")}`);
344
+ process.exit(1);
345
+ }
346
+ async function runInit(options) {
347
+ console.log("");
348
+ console.log("======================================");
349
+ console.log(` AI Dev Lifecycle Init v${VERSION}`);
350
+ console.log("======================================");
351
+ console.log("");
352
+ checkPrerequisites();
353
+ const defaultName = import_node_path3.default.basename(process.cwd());
354
+ const projectName = options.name || await ask("Project name", defaultName);
355
+ let dbOption;
356
+ if (options.database) {
357
+ dbOption = resolveDatabaseOption(options.database);
358
+ } else {
359
+ const labels = DATABASE_OPTIONS.map((key) => DATABASE_LABELS[key]);
360
+ const idx = await select("Select database solution:", labels, 0);
361
+ dbOption = DATABASE_OPTIONS[idx];
362
+ }
363
+ const dbConfig = DATABASE_CONFIGS[dbOption];
364
+ let level;
365
+ if (options.level !== void 0) {
366
+ level = parseInt(options.level, 10);
367
+ } else {
368
+ console.log("");
369
+ console.log("Select initialization level:");
370
+ console.log(" 0) Minimal \u2014 CLAUDE.md + basic permissions");
371
+ console.log(" 1) Commands \u2014 + /plan /review /commit");
372
+ console.log(" 2) Quality \u2014 + Hooks automation");
373
+ console.log(" 3) Full \u2014 + MCP + Skills + docs");
374
+ console.log("");
375
+ const input = await ask("Level [0-3]", "1");
376
+ level = parseInt(input, 10);
377
+ }
378
+ if (isNaN(level) || level < 0 || level > 3) {
379
+ err(`Invalid level: ${options.level}`);
380
+ process.exit(1);
381
+ }
382
+ console.log("");
383
+ info(`Database: ${dbOption}`);
384
+ info(`Starting Level ${level} initialization ...`);
385
+ console.log("");
386
+ const vars = {
387
+ PROJECT_NAME: projectName,
388
+ ...dbConfig
389
+ };
390
+ initLevel0(vars);
391
+ if (level >= 1) initLevel1();
392
+ if (level >= 2) initLevel2();
393
+ if (level >= 3) initLevel3(vars);
394
+ console.log("");
395
+ console.log("======================================");
396
+ ok(`Initialization complete! (Level ${level})`);
397
+ console.log("======================================");
398
+ console.log("");
399
+ console.log("Next steps:");
400
+ console.log(" 1. Edit CLAUDE.md \u2014 fill in project description and actual commands");
401
+ console.log(" 2. Run 'claude' to start Claude Code");
402
+ if (level < 3) {
403
+ console.log(` 3. Want more? Re-run: npx ai-dev-cli init --level 3`);
404
+ }
405
+ if (level >= 2) {
406
+ console.log(` 3. Test hooks: echo '{"tool_input":{"command":"sudo rm"}}' | .claude/hooks/pre-bash-firewall.sh`);
407
+ }
408
+ console.log("");
409
+ }
410
+ function checkPrerequisites() {
411
+ const missing = [];
412
+ if (!commandExists("git")) missing.push("git");
413
+ if (!commandExists("node")) missing.push("node");
414
+ if (missing.length > 0) {
415
+ err(`Missing required tools: ${missing.join(", ")}`);
416
+ err("Please install them before running this script.");
417
+ process.exit(1);
418
+ }
419
+ if (!commandExists("claude")) {
420
+ warn("Claude Code CLI not detected. Config files will still be generated.");
421
+ }
422
+ ok("Prerequisites check passed");
423
+ }
424
+ function initLevel0(vars) {
425
+ info("=== Level 0: Minimal Setup ===");
426
+ const claudeContent = loadAndRender("level-0/CLAUDE.md.tpl", vars);
427
+ safeWrite("CLAUDE.md", claudeContent);
428
+ const settingsContent = loadTemplate("level-0/settings.json.tpl");
429
+ safeWrite(".claude/settings.json", settingsContent);
430
+ appendGitignore("CLAUDE.local.md");
431
+ appendGitignore(".claude/settings.local.json");
432
+ appendGitignore(".claude/memory.jsonl");
433
+ }
434
+ function initLevel1() {
435
+ info("=== Level 1: Core Commands ===");
436
+ safeWrite(".claude/commands/plan.md", loadTemplate("level-1/plan.md"));
437
+ safeWrite(".claude/commands/review.md", loadTemplate("level-1/review.md"));
438
+ safeWrite(".claude/commands/commit.md", loadTemplate("level-1/commit.md"));
439
+ }
440
+ function initLevel2() {
441
+ info("=== Level 2: Quality Gates (Hooks) ===");
442
+ const hooks = [
443
+ "pre-bash-firewall.sh",
444
+ "post-edit-format.sh",
445
+ "pre-compact-preserve.sh",
446
+ "post-write-size-guard.sh",
447
+ "pre-bash-dep-guard.sh"
448
+ ];
449
+ for (const hook of hooks) {
450
+ safeWriteExecutable(
451
+ `.claude/hooks/${hook}`,
452
+ loadTemplate(`level-2/${hook}`)
453
+ );
454
+ }
455
+ const settingsPath = ".claude/settings.json";
456
+ if (import_node_fs4.default.existsSync(settingsPath)) {
457
+ const existing = readJsonFile(settingsPath);
458
+ if (existing && !existing["hooks"]) {
459
+ const hooksConfig = JSON.parse(loadTemplate("level-2/hooks-config.json"));
460
+ mergeJsonFile(settingsPath, hooksConfig);
461
+ } else {
462
+ warn("settings.json already has hooks config \u2014 skipped");
463
+ }
464
+ }
465
+ }
466
+ function initLevel3(vars) {
467
+ info("=== Level 3: Full System ===");
468
+ safeWrite(".mcp.json", loadTemplate("level-3/mcp.json"));
469
+ safeWrite(".claude/commands/debug.md", loadTemplate("level-3/debug.md"));
470
+ safeWrite(".claude/commands/doc.md", loadTemplate("level-3/doc.md"));
471
+ safeWrite(
472
+ ".claude/skills/code-standards/SKILL.md",
473
+ loadTemplate("level-3/code-standards.md")
474
+ );
475
+ safeWrite("docs/approved-deps.md", loadAndRender("level-3/approved-deps.md", vars));
476
+ const adrKeep = "docs/adr/.gitkeep";
477
+ if (!import_node_fs4.default.existsSync(adrKeep)) {
478
+ import_node_fs4.default.mkdirSync("docs/adr", { recursive: true });
479
+ import_node_fs4.default.writeFileSync(adrKeep, "");
480
+ ok("Created: docs/adr/.gitkeep");
481
+ }
482
+ }
483
+
484
+ // src/commands/check.ts
485
+ var import_node_fs5 = __toESM(require("fs"));
486
+ var import_node_path4 = __toESM(require("path"));
487
+ var import_node_child_process = require("child_process");
488
+ var result = { pass: 0, fail: 0, warn: 0 };
489
+ function pass(msg) {
490
+ checkPass(msg);
491
+ result.pass++;
492
+ }
493
+ function fail(msg) {
494
+ checkFail(msg);
495
+ result.fail++;
496
+ }
497
+ function warning(msg) {
498
+ checkWarn(msg);
499
+ result.warn++;
500
+ }
501
+ async function runCheck(options) {
502
+ result.pass = 0;
503
+ result.fail = 0;
504
+ result.warn = 0;
505
+ console.log("");
506
+ console.log("====== AI Dev Lifecycle Check ======");
507
+ console.log("");
508
+ checkClaudeMd(options.fix);
509
+ checkSettingsJson(options.fix);
510
+ checkHookPermissions(options.fix);
511
+ checkGitignore(options.fix);
512
+ checkEnvSafety();
513
+ checkLongFiles();
514
+ checkBannedDeps();
515
+ checkEnvFilesTracked();
516
+ console.log("");
517
+ console.log(
518
+ `====== Results: ${result.pass} passed, ${result.fail} failed, ${result.warn} warnings ======`
519
+ );
520
+ if (options.ci && result.fail > 0) {
521
+ process.exit(1);
522
+ }
523
+ }
524
+ function checkClaudeMd(fix) {
525
+ if (import_node_fs5.default.existsSync("CLAUDE.md")) {
526
+ pass("CLAUDE.md exists");
527
+ } else {
528
+ fail("CLAUDE.md does not exist");
529
+ if (fix) {
530
+ info("Auto-fix: Run `npx ai-dev-cli init --level 0` to create CLAUDE.md");
531
+ }
532
+ }
533
+ }
534
+ function checkSettingsJson(fix) {
535
+ const settingsPath = ".claude/settings.json";
536
+ if (!import_node_fs5.default.existsSync(settingsPath)) {
537
+ fail(".claude/settings.json does not exist");
538
+ if (fix) {
539
+ info("Auto-fix: Run `npx ai-dev-cli init --level 0` to create settings.json");
540
+ }
541
+ return;
542
+ }
543
+ const data = readJsonFile(settingsPath);
544
+ if (data) {
545
+ pass(".claude/settings.json format is valid");
546
+ } else {
547
+ fail(".claude/settings.json has invalid JSON format");
548
+ return;
549
+ }
550
+ const permissions = data["permissions"];
551
+ const deny = permissions?.["deny"];
552
+ if (deny && deny.length > 0) {
553
+ pass("deny rules configured");
554
+ } else {
555
+ warning("settings.json has no deny rules");
556
+ }
557
+ }
558
+ function checkHookPermissions(fix) {
559
+ const hooksDir = ".claude/hooks";
560
+ if (!import_node_fs5.default.existsSync(hooksDir)) return;
561
+ const files = import_node_fs5.default.readdirSync(hooksDir).filter((f) => f.endsWith(".sh"));
562
+ for (const file of files) {
563
+ const hookPath = import_node_path4.default.join(hooksDir, file);
564
+ try {
565
+ import_node_fs5.default.accessSync(hookPath, import_node_fs5.default.constants.X_OK);
566
+ pass(`Hook executable: ${file}`);
567
+ } catch {
568
+ fail(`Hook missing execute permission: ${hookPath}`);
569
+ if (fix && process.platform !== "win32") {
570
+ import_node_fs5.default.chmodSync(hookPath, 493);
571
+ info(`Auto-fix: chmod +x ${hookPath}`);
572
+ }
573
+ }
574
+ }
575
+ }
576
+ function checkGitignore(fix) {
577
+ const entries = [
578
+ "CLAUDE.local.md",
579
+ ".claude/settings.local.json",
580
+ ".claude/memory.jsonl"
581
+ ];
582
+ for (const entry of entries) {
583
+ if (import_node_fs5.default.existsSync(".gitignore") && import_node_fs5.default.readFileSync(".gitignore", "utf-8").split("\n").includes(entry)) {
584
+ pass(`.gitignore contains ${entry}`);
585
+ } else {
586
+ warning(`.gitignore missing ${entry}`);
587
+ if (fix) {
588
+ appendGitignore(entry);
589
+ }
590
+ }
591
+ }
592
+ }
593
+ function checkEnvSafety() {
594
+ if (!import_node_fs5.default.existsSync(".env")) {
595
+ pass("No .env file present (or not tracked)");
596
+ return;
597
+ }
598
+ try {
599
+ (0, import_node_child_process.execSync)("git ls-files --error-unmatch .env", { stdio: "ignore" });
600
+ fail(".env file is tracked by git (security risk!)");
601
+ } catch {
602
+ pass(".env exists but is not tracked by git");
603
+ }
604
+ }
605
+ function checkLongFiles() {
606
+ const longFiles = [];
607
+ walkDir(".", (filepath) => {
608
+ if (!filepath.match(/\.(ts|tsx)$/) || filepath.includes("node_modules") || filepath.includes(".next") || filepath.includes("dist")) {
609
+ return;
610
+ }
611
+ const content = import_node_fs5.default.readFileSync(filepath, "utf-8");
612
+ const lines = content.split("\n").length;
613
+ if (lines > 300) {
614
+ longFiles.push(`${filepath} (${lines} lines)`);
615
+ }
616
+ });
617
+ if (longFiles.length > 0) {
618
+ warning(`Files exceeding 300 lines:
619
+ ${longFiles.map((f) => ` - ${f}`).join("\n")}`);
620
+ } else {
621
+ pass("All .ts/.tsx files are within 300 lines");
622
+ }
623
+ }
624
+ function checkBannedDeps() {
625
+ const pkg = readJsonFile("package.json");
626
+ if (!pkg) return;
627
+ const deps = pkg["dependencies"] ?? {};
628
+ const devDeps = pkg["devDependencies"] ?? {};
629
+ let foundBanned = false;
630
+ for (const dep of ["moment", "jquery"]) {
631
+ if (deps[dep] || devDeps[dep]) {
632
+ fail(`Banned dependency found: ${dep}`);
633
+ foundBanned = true;
634
+ }
635
+ }
636
+ if (deps["lodash"]) {
637
+ warning("Using full lodash package \u2014 consider lodash-es with submodule imports");
638
+ }
639
+ if (!foundBanned) {
640
+ pass("No banned dependencies");
641
+ }
642
+ }
643
+ function checkEnvFilesTracked() {
644
+ if (!import_node_fs5.default.existsSync(".")) return;
645
+ const envFiles = import_node_fs5.default.readdirSync(".").filter(
646
+ (f) => f.startsWith(".env.") && f !== ".env.example" && f !== ".env.sample"
647
+ );
648
+ for (const envFile of envFiles) {
649
+ try {
650
+ (0, import_node_child_process.execSync)(`git ls-files --error-unmatch ${envFile}`, { stdio: "ignore" });
651
+ fail(`${envFile} is tracked by git`);
652
+ } catch {
653
+ }
654
+ }
655
+ }
656
+ function walkDir(dir, callback) {
657
+ if (!import_node_fs5.default.existsSync(dir)) return;
658
+ const entries = import_node_fs5.default.readdirSync(dir, { withFileTypes: true });
659
+ for (const entry of entries) {
660
+ const fullPath = import_node_path4.default.join(dir, entry.name);
661
+ if (entry.isDirectory()) {
662
+ if (["node_modules", ".next", "dist", ".git", ".claude"].includes(entry.name)) continue;
663
+ walkDir(fullPath, callback);
664
+ } else {
665
+ callback(fullPath);
666
+ }
667
+ }
668
+ }
669
+
670
+ // src/commands/takeover.ts
671
+ var import_node_fs6 = __toESM(require("fs"));
672
+ var import_node_path5 = __toESM(require("path"));
673
+ var VERSION2 = "2.0.0";
674
+ async function runTakeover() {
675
+ console.log("");
676
+ console.log("======================================");
677
+ console.log(` AI Dev Lifecycle Takeover v${VERSION2}`);
678
+ console.log("======================================");
679
+ console.log("");
680
+ const missing = [];
681
+ if (!commandExists("git")) missing.push("git");
682
+ if (!commandExists("node")) missing.push("node");
683
+ if (missing.length > 0) {
684
+ console.error(`Missing required tools: ${missing.join(", ")}`);
685
+ process.exit(1);
686
+ }
687
+ info("=== Analyzing existing project ===");
688
+ console.log("");
689
+ const projectName = import_node_path5.default.basename(process.cwd()).replace(/[^a-zA-Z0-9._-]/g, "");
690
+ const tech = detectTechStack();
691
+ ok(`Tech stack: ${tech.framework} + ${tech.language} + ${tech.runtime} + ${tech.database}`);
692
+ const scripts = extractNpmScripts();
693
+ const scriptsSection = scripts.length > 0 ? scripts.join("\n") : "[TODO: Fill in development commands]";
694
+ const dirs = detectDirectories();
695
+ const dirsSection = dirs.length > 0 ? dirs.map((d) => `- \`${d}\``).join("\n") : "[TODO: Fill in directory structure]";
696
+ let techLine = tech.framework;
697
+ if (tech.language !== "unknown") techLine += ` + ${tech.language}`;
698
+ if (tech.database !== "unknown") techLine += ` + ${tech.database}`;
699
+ const claudeContent = loadAndRender("takeover/CLAUDE.md.tpl", {
700
+ PROJECT_NAME: projectName,
701
+ TECH_LINE: techLine,
702
+ SCRIPTS_SECTION: scriptsSection,
703
+ DIRS_SECTION: dirsSection
704
+ });
705
+ if (safeWrite("CLAUDE.md", claudeContent)) {
706
+ ok("Created: CLAUDE.md (DRAFT \u2014 review required)");
707
+ }
708
+ const deps = extractDependencies();
709
+ if (deps.length > 0 && !import_node_fs6.default.existsSync("docs/approved-deps.md")) {
710
+ import_node_fs6.default.mkdirSync("docs", { recursive: true });
711
+ const content = [
712
+ "# Approved Dependencies (auto-generated from package.json)",
713
+ "",
714
+ "Review and mark banned items.",
715
+ "",
716
+ "## Current Dependencies",
717
+ ...deps.map((d) => `- ${d}`),
718
+ "",
719
+ "## Banned",
720
+ "[TODO: Add based on project conventions]",
721
+ ""
722
+ ].join("\n");
723
+ import_node_fs6.default.writeFileSync("docs/approved-deps.md", content, "utf-8");
724
+ ok("Created: docs/approved-deps.md (DRAFT \u2014 review required)");
725
+ }
726
+ safeWrite(".claude/settings.json", loadTemplate("level-0/settings.json.tpl"));
727
+ appendGitignore("CLAUDE.local.md");
728
+ appendGitignore(".claude/settings.local.json");
729
+ appendGitignore(".claude/memory.jsonl");
730
+ console.log("");
731
+ console.log("======================================");
732
+ ok("Takeover drafts generated");
733
+ console.log("======================================");
734
+ console.log("");
735
+ warn("All generated files are DRAFTS. You MUST review and refine:");
736
+ console.log(" 1. CLAUDE.md \u2014 add project description, business context, coding rules");
737
+ console.log(" 2. docs/approved-deps.md \u2014 mark banned dependencies");
738
+ console.log(" 3. After running 'claude', use this prompt to let AI help refine:");
739
+ console.log("");
740
+ console.log(' "Read the entire codebase, understand the architecture and module');
741
+ console.log(" relationships, then help me refine CLAUDE.md with architecture");
742
+ console.log(' descriptions and key rules."');
743
+ console.log("");
744
+ }
745
+
746
+ // src/index.ts
747
+ var program = new import_commander.Command();
748
+ program.name("ai-dev-cli").description("AI Dev Lifecycle \u2014 scaffold & audit tool for Claude Code projects").version("2.0.0");
749
+ program.command("init").description("Initialize AI dev system (Level 0-3)").option("-l, --level <0-3>", "initialization level").option("-n, --name <name>", "project name").option("-d, --database <option>", "database: prisma-sqlite|prisma-pg|supabase-cloud|supabase-cn").action(runInit);
750
+ program.command("check").description("Compliance audit").option("--fix", "auto-fix issues").option("--ci", "CI mode \u2014 exit non-zero on failure").action(runCheck);
751
+ program.command("takeover").description("Analyze and scaffold existing project").action(runTakeover);
752
+ program.parse();
package/package.json ADDED
@@ -0,0 +1,47 @@
1
+ {
2
+ "name": "ai-dev-cli",
3
+ "version": "2.0.0",
4
+ "description": "AI Dev Lifecycle — scaffold & audit tool for Claude Code projects",
5
+ "bin": {
6
+ "ai-dev-cli": "dist/index.js"
7
+ },
8
+ "files": [
9
+ "dist",
10
+ "templates"
11
+ ],
12
+ "scripts": {
13
+ "build": "tsup",
14
+ "dev": "tsx src/index.ts",
15
+ "test": "vitest run",
16
+ "test:watch": "vitest",
17
+ "lint": "tsc --noEmit"
18
+ },
19
+ "dependencies": {
20
+ "commander": "^13.0.0"
21
+ },
22
+ "devDependencies": {
23
+ "@types/node": "^22.0.0",
24
+ "tsup": "^8.0.0",
25
+ "tsx": "^4.0.0",
26
+ "typescript": "^5.7.0",
27
+ "vitest": "^3.0.0"
28
+ },
29
+ "engines": {
30
+ "node": ">=18"
31
+ },
32
+ "license": "MIT",
33
+ "repository": {
34
+ "type": "git",
35
+ "url": "git+https://github.com/wh759705-creator/ai-dev-lifecycle.git",
36
+ "directory": "cli"
37
+ },
38
+ "homepage": "https://github.com/wh759705-creator/ai-dev-lifecycle#readme",
39
+ "keywords": [
40
+ "claude-code",
41
+ "ai-dev",
42
+ "scaffold",
43
+ "cli",
44
+ "claude",
45
+ "ai-coding"
46
+ ]
47
+ }
@@ -0,0 +1,71 @@
1
+ # Project: {{PROJECT_NAME}}
2
+
3
+ [One-line project description].
4
+ Tech stack: {{TECH_STACK}}.
5
+
6
+ ## Commands
7
+
8
+ - `npm run dev` — Dev server (port 3000)
9
+ - `npm run build` — Production build
10
+ - `npm run test` — Run Vitest unit tests
11
+ - `npm run test:e2e` — Run Playwright E2E tests
12
+ - `npm run lint` — ESLint check
13
+ {{DB_COMMANDS}}
14
+
15
+ ## Architecture
16
+
17
+ - `/app` — Page routes and layouts (App Router)
18
+ - `/app/api` — API Route Handlers
19
+ - `/components` — React components
20
+ - `/components/ui` — Base UI components (shadcn/ui)
21
+ - `/lib` — Utilities, client instances, auth helpers
22
+ - `/services` — Business logic layer
23
+ - {{DB_ARCHITECTURE}}
24
+ - `/types` — Shared TypeScript type definitions
25
+ - `/hooks` — Custom React Hooks
26
+ - `/__tests__` — Test files
27
+
28
+ ## Key Rules
29
+
30
+ - NEVER commit .env files or any files containing secrets
31
+ - NEVER use `any` type — TypeScript strict mode
32
+ - NEVER rewrite entire modules at once — small steps, verify each
33
+ - MUST use named exports, no default export
34
+ - MUST validate all API inputs with zod
35
+ - MUST use unified AppError class for error handling
36
+ - MUST run related tests after code changes
37
+ - File naming: kebab-case; Component naming: PascalCase
38
+ - Single file must not exceed 300 lines — split if exceeded
39
+ - {{DB_BOUNDARY_RULE}}
40
+
41
+ ## Work Mode
42
+
43
+ - Produce a plan before coding (list files to modify and change points), wait for approval
44
+ - When modifying shared types/interfaces, search all references first, list impact scope
45
+ - Read the full content of any file BEFORE modifying it
46
+ - Search for existing similar functionality BEFORE implementing new features
47
+ - Verify that any referenced types/functions actually exist in the project (no fabrication)
48
+ - Commit at each working state — don't wait until everything is done
49
+
50
+ ## Module Boundaries
51
+
52
+ - Components (/components) never call the database directly
53
+ - API Routes only parse requests and format responses — business logic lives in Service layer
54
+ - Service layer is the sole home of business logic
55
+ - Call direction (one-way): {{CALL_DIRECTION}}
56
+
57
+ ## Project Glossary
58
+
59
+ <!-- Project-specific terms to prevent AI misunderstanding. Format: term — meaning in this project -->
60
+
61
+ ## Dependency Policy
62
+
63
+ Check docs/approved-deps.md before introducing new dependencies. Unlisted deps require justification.
64
+
65
+ ## Common Pitfalls (update when AI makes mistakes)
66
+
67
+ <!-- Format: - Describe the issue and correct approach (date) -->
68
+
69
+ ## Compact Preservation
70
+
71
+ Preserve: modified file paths, current task progress, test commands, architecture decisions, failed attempts and reasons
@@ -0,0 +1,33 @@
1
+ {
2
+ "permissions": {
3
+ "allow": [
4
+ "Bash(npm run lint)",
5
+ "Bash(npm run test:*)",
6
+ "Bash(npm run build)",
7
+ "Bash(npx prettier:*)",
8
+ "Bash(npx tsc:*)",
9
+ "Bash(git log:*)",
10
+ "Bash(git diff:*)",
11
+ "Bash(git status)",
12
+ "Bash(git branch:*)",
13
+ "Bash(git checkout:*)",
14
+ "Bash(git stash:*)"
15
+ ],
16
+ "deny": [
17
+ "Bash(sudo:*)",
18
+ "Bash(curl:*|*sh)",
19
+ "Bash(wget:*|*sh)",
20
+ "Bash(rm -rf /)",
21
+ "Bash(git push:*--force*)",
22
+ "Bash(git reset --hard)",
23
+ "Bash(chmod 777:*)",
24
+ "Read(.env)",
25
+ "Read(.env.*)",
26
+ "Read(**/*.key)",
27
+ "Read(**/*.pem)"
28
+ ]
29
+ },
30
+ "env": {
31
+ "BASH_DEFAULT_TIMEOUT_MS": "30000"
32
+ }
33
+ }
@@ -0,0 +1,17 @@
1
+ ---
2
+ description: "Smart commit — generate semantic commit message and execute"
3
+ allowed-tools: Bash, Read
4
+ ---
5
+
6
+ Execute a smart commit:
7
+
8
+ 1. Run `git status` and `git diff --staged` to see changes
9
+ 2. If no staged files, `git add` relevant files individually (never use `git add .`)
10
+ 3. Analyze changes and generate Conventional Commits format message:
11
+ feat|fix|refactor|docs|test|chore(scope): description
12
+
13
+ Rules:
14
+ - scope = module name or key part of file path
15
+ - Description in English, under 50 chars, explain WHY not WHAT
16
+ - Never commit .env or key files
17
+ - If changes span more than 10 files, suggest splitting into multiple commits
@@ -0,0 +1,16 @@
1
+ ---
2
+ description: "Requirements analysis and plan design — produce a plan, wait for approval before coding"
3
+ allowed-tools: Read, Grep, Glob, Bash
4
+ ---
5
+
6
+ Analyze requirement "$ARGUMENTS" and design an implementation plan:
7
+
8
+ 1. Use AskUserQuestion to confirm requirement details and boundaries
9
+ 2. Search the codebase for existing similar features or reusable modules
10
+ 3. Output the implementation plan:
11
+ - Overview (one sentence)
12
+ - Impact scope (new files, modified files, dependency changes)
13
+ - Implementation steps (specific to file and function level)
14
+ - Risk points and mitigations
15
+
16
+ IMPORTANT: After outputting the plan, you MUST wait for user confirmation. Do NOT start implementation on your own.
@@ -0,0 +1,19 @@
1
+ ---
2
+ description: "Code review — focus on correctness, security, and edge cases"
3
+ allowed-tools: Read, Grep, Glob, Bash
4
+ ---
5
+
6
+ Review recent git changes:
7
+
8
+ 1. Run `git diff HEAD` to see changes
9
+ 2. Review dimensions:
10
+ - Correctness: logic, edge cases, null defense, async error handling
11
+ - Security: injection risks, input validation, sensitive info exposure
12
+ - Performance: unnecessary re-renders, N+1 queries, large list pagination
13
+ - Standards: compliance with CLAUDE.md rules
14
+
15
+ 3. Output format — categorize by severity:
16
+ - **Must fix**: security vulnerabilities, logic errors, data loss risk
17
+ - **Suggested improvement**: code smells, missing error handling, perf issues
18
+ - **Minor suggestion**: naming optimization, style consistency
19
+ Each item includes: filename:line, problem description, fix suggestion
@@ -0,0 +1,24 @@
1
+ {
2
+ "hooks": {
3
+ "PreToolUse": [{
4
+ "matcher": "Bash",
5
+ "hooks": [
6
+ {"type": "command", "command": ".claude/hooks/pre-bash-firewall.sh"},
7
+ {"type": "command", "command": ".claude/hooks/pre-bash-dep-guard.sh"}
8
+ ]
9
+ }],
10
+ "PostToolUse": [{
11
+ "matcher": "Write|Edit",
12
+ "hooks": [
13
+ {"type": "command", "command": ".claude/hooks/post-edit-format.sh"},
14
+ {"type": "command", "command": ".claude/hooks/post-write-size-guard.sh"}
15
+ ]
16
+ }],
17
+ "PreCompact": [{
18
+ "matcher": "auto",
19
+ "hooks": [
20
+ {"type": "command", "command": ".claude/hooks/pre-compact-preserve.sh"}
21
+ ]
22
+ }]
23
+ }
24
+ }
@@ -0,0 +1,16 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+
4
+ input=$(cat)
5
+ file_path=$(echo "$input" | jq -r '.tool_input.file_path // ""')
6
+
7
+ # Only format project source files
8
+ case "$file_path" in
9
+ *.ts|*.tsx|*.js|*.jsx|*.json)
10
+ if command -v npx &>/dev/null; then
11
+ npx prettier --write "$file_path" 2>/dev/null || true
12
+ fi
13
+ ;;
14
+ esac
15
+
16
+ exit 0
@@ -0,0 +1,18 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+ input=$(cat)
4
+ file_path=$(echo "$input" | jq -r '.tool_input.file_path // ""')
5
+
6
+ # Only check TS/TSX files
7
+ case "$file_path" in
8
+ *.ts|*.tsx)
9
+ if [ -f "$file_path" ]; then
10
+ lines=$(wc -l < "$file_path")
11
+ if [ "$lines" -gt 300 ]; then
12
+ echo "WARNING: $file_path has ${lines} lines, exceeding the 300-line limit. Consider splitting." >&2
13
+ exit 1
14
+ fi
15
+ fi
16
+ ;;
17
+ esac
18
+ exit 0
@@ -0,0 +1,43 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+ input=$(cat)
4
+ cmd=$(echo "$input" | jq -r '.tool_input.command // ""')
5
+
6
+ # Only intercept package install commands
7
+ if ! echo "$cmd" | grep -qE '(npm install|npm add|yarn add|pnpm add)'; then
8
+ exit 0
9
+ fi
10
+
11
+ # Extract all package names (strip flags and versions)
12
+ pkgs=$(echo "$cmd" | sed -E 's/^(npm install|npm add|yarn add|pnpm add)[[:space:]]+//' | tr ' ' '\n' | grep -v '^-')
13
+
14
+ if [ -z "$pkgs" ]; then
15
+ exit 0
16
+ fi
17
+
18
+ # Pass 1: Check all packages against banned list (blocking)
19
+ banned=("moment" "jquery" "lodash")
20
+ for pkg in $pkgs; do
21
+ pkg=$(echo "$pkg" | sed 's/@.*//')
22
+ [ -z "$pkg" ] && continue
23
+ for b in "${banned[@]}"; do
24
+ if [ "$pkg" = "$b" ]; then
25
+ echo "BLOCKED: $pkg is in the banned dependency list. See docs/approved-deps.md for alternatives." >&2
26
+ exit 2
27
+ fi
28
+ done
29
+ done
30
+
31
+ # Pass 2: Check allowlist (non-blocking warning)
32
+ if [ -f "docs/approved-deps.md" ]; then
33
+ for pkg in $pkgs; do
34
+ pkg=$(echo "$pkg" | sed 's/@.*//')
35
+ [ -z "$pkg" ] && continue
36
+ if ! grep -q "$pkg" docs/approved-deps.md; then
37
+ echo "WARNING: $pkg is not in the approved dependency list. Evaluate before adding to docs/approved-deps.md." >&2
38
+ exit 1
39
+ fi
40
+ done
41
+ fi
42
+
43
+ exit 0
@@ -0,0 +1,28 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+
4
+ # Read tool input from stdin
5
+ input=$(cat)
6
+ cmd=$(echo "$input" | jq -r '.tool_input.command // ""')
7
+
8
+ # Dangerous command patterns
9
+ dangerous_patterns=(
10
+ 'rm -rf /'
11
+ 'git reset --hard'
12
+ 'git push.*--force'
13
+ 'git clean -f'
14
+ 'sudo '
15
+ 'chmod 777'
16
+ 'curl.*|.*sh'
17
+ 'wget.*|.*sh'
18
+ )
19
+
20
+ for pattern in "${dangerous_patterns[@]}"; do
21
+ if echo "$cmd" | grep -qE "$pattern"; then
22
+ echo "BLOCKED: Dangerous command pattern detected '$pattern'" >&2
23
+ echo "Command: $cmd" >&2
24
+ exit 2
25
+ fi
26
+ done
27
+
28
+ exit 0
@@ -0,0 +1,10 @@
1
+ #!/usr/bin/env bash
2
+ cat <<'EOF'
3
+ {
4
+ "hookSpecificOutput": {
5
+ "hookEventName": "PreCompact",
6
+ "additionalContext": "MUST preserve during compaction: 1. All modified file paths 2. Current task progress and next steps 3. Test commands and results 4. Architecture decisions 5. Failed attempts and reasons"
7
+ }
8
+ }
9
+ EOF
10
+ exit 0
@@ -0,0 +1,24 @@
1
+ # Approved Dependencies
2
+
3
+ Check this list before introducing new dependencies. Unlisted deps require evaluation.
4
+
5
+ ## Core Framework
6
+ - next / react / react-dom / typescript
7
+
8
+ ## Database
9
+ - {{DB_DEPS}}
10
+
11
+ ## UI
12
+ - tailwindcss / @radix-ui/* / lucide-react
13
+
14
+ ## Utilities
15
+ - date-fns (moment.js is BANNED)
16
+ - clsx / tailwind-merge
17
+
18
+ ## Testing
19
+ - vitest / @playwright/test / @testing-library/react
20
+
21
+ ## Banned
22
+ - moment.js — use date-fns instead
23
+ - lodash (full package) — use native methods or lodash-es submodule imports
24
+ - jquery
@@ -0,0 +1,37 @@
1
+ ---
2
+ name: code-standards
3
+ description: >
4
+ Coding standards guard. Automatically referenced during code generation.
5
+ Ensures TypeScript type safety, API input validation, unified error handling, and naming conventions.
6
+ ---
7
+
8
+ # Coding Standards
9
+
10
+ Check these rules before generating code:
11
+
12
+ ## TypeScript
13
+ - All functions must have complete type annotations — no `any`
14
+ - Use named exports — no default export
15
+ - Interface names without `I` prefix (use `User` not `IUser`)
16
+
17
+ ## API Route
18
+ - All inputs validated with zod schema
19
+ - Unified response format: `{ data, error, meta }`
20
+ - Errors use AppError class with error code
21
+
22
+ ## React Components
23
+ - Functional components + hooks
24
+ - Props defined as a separate type (ComponentNameProps)
25
+ - State management priority: useState → useReducer → Zustand → Server State
26
+
27
+ ## Files
28
+ - Single file ≤ 300 lines
29
+ - File naming: kebab-case; Component naming: PascalCase
30
+ - Test files colocated with source or in __tests__ directory
31
+
32
+ ## Template Reference
33
+ | Scenario | Reference |
34
+ |----------|-----------|
35
+ | API Route | See existing implementations under /app/api/ |
36
+ | Service | See existing implementations under /services/ |
37
+ | Component | See existing implementations under /components/ |
@@ -0,0 +1,15 @@
1
+ ---
2
+ description: "Bug diagnosis — systematically locate root cause"
3
+ allowed-tools: Read, Grep, Glob, Bash, Edit
4
+ ---
5
+
6
+ Diagnose the problem: "$ARGUMENTS"
7
+
8
+ Steps:
9
+ 1. Collect info: read error logs, find related source code
10
+ 2. Check recent git changes: `git log --oneline -10`
11
+ 3. Trace the call chain upward from the error point
12
+ 4. Rank possible root causes by probability
13
+ 5. Output: root cause analysis + fix plan + verification method
14
+
15
+ IMPORTANT: If unsure about the root cause, say so explicitly. Do NOT speculatively modify code.
@@ -0,0 +1,15 @@
1
+ ---
2
+ description: "Check and update project documentation to stay in sync with code"
3
+ allowed-tools: Read, Write, Edit, Glob, Bash
4
+ ---
5
+
6
+ Check which docs need updating after code changes:
7
+
8
+ 1. `git diff --name-only` to see changed files
9
+ 2. Check:
10
+ - New module → update CLAUDE.md "Architecture" section
11
+ - New command → update CLAUDE.md "Commands" section
12
+ - New dependency → update docs/approved-deps.md
13
+ - Major decision → suggest creating an ADR in docs/adr/
14
+ 3. Execute necessary updates
15
+ 4. If no updates needed, say so explicitly
@@ -0,0 +1,12 @@
1
+ {
2
+ "mcpServers": {
3
+ "context7": {
4
+ "command": "npx",
5
+ "args": ["-y", "@upstash/context7-mcp@latest"]
6
+ },
7
+ "playwright": {
8
+ "command": "npx",
9
+ "args": ["@playwright/mcp@latest"]
10
+ }
11
+ }
12
+ }
@@ -0,0 +1,25 @@
1
+ # Project: {{PROJECT_NAME}}
2
+
3
+ [TODO: One-line project description].
4
+ Tech stack: {{TECH_LINE}} (verify and adjust as needed).
5
+
6
+ ## Commands
7
+
8
+ {{SCRIPTS_SECTION}}
9
+
10
+ ## Architecture
11
+
12
+ {{DIRS_SECTION}}
13
+
14
+ ## Key Rules
15
+
16
+ - NEVER commit .env files
17
+ - MUST run related tests after code changes
18
+ - Produce a plan before coding, wait for approval
19
+ [TODO: Add project-specific rules]
20
+
21
+ ## Common Pitfalls (update when AI makes mistakes)
22
+
23
+ ## Compact Preservation
24
+
25
+ Preserve: modified file paths, current task progress, test commands, architecture decisions