@codifier/cli 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/README.md +511 -0
  2. package/commands/init.md +3 -0
  3. package/commands/onboard.md +3 -0
  4. package/commands/research.md +3 -0
  5. package/dist/cli/add.d.ts +5 -0
  6. package/dist/cli/add.d.ts.map +1 -0
  7. package/dist/cli/add.js +23 -0
  8. package/dist/cli/add.js.map +1 -0
  9. package/dist/cli/bin/codifier.d.ts +7 -0
  10. package/dist/cli/bin/codifier.d.ts.map +1 -0
  11. package/dist/cli/bin/codifier.js +43 -0
  12. package/dist/cli/bin/codifier.js.map +1 -0
  13. package/dist/cli/detect.d.ts +12 -0
  14. package/dist/cli/detect.d.ts.map +1 -0
  15. package/dist/cli/detect.js +35 -0
  16. package/dist/cli/detect.js.map +1 -0
  17. package/dist/cli/doctor.d.ts +5 -0
  18. package/dist/cli/doctor.d.ts.map +1 -0
  19. package/dist/cli/doctor.js +58 -0
  20. package/dist/cli/doctor.js.map +1 -0
  21. package/dist/cli/init.d.ts +6 -0
  22. package/dist/cli/init.d.ts.map +1 -0
  23. package/dist/cli/init.js +93 -0
  24. package/dist/cli/init.js.map +1 -0
  25. package/dist/cli/update.d.ts +5 -0
  26. package/dist/cli/update.d.ts.map +1 -0
  27. package/dist/cli/update.js +25 -0
  28. package/dist/cli/update.js.map +1 -0
  29. package/dist/index.js +87 -0
  30. package/package.json +40 -0
  31. package/skills/brownfield-onboard/SKILL.md +107 -0
  32. package/skills/initialize-project/SKILL.md +145 -0
  33. package/skills/initialize-project/templates/evals-prompt.md +39 -0
  34. package/skills/initialize-project/templates/requirements-prompt.md +44 -0
  35. package/skills/initialize-project/templates/roadmap-prompt.md +44 -0
  36. package/skills/initialize-project/templates/rules-prompt.md +34 -0
  37. package/skills/research-analyze/SKILL.md +131 -0
  38. package/skills/research-analyze/templates/query-generation-prompt.md +61 -0
  39. package/skills/research-analyze/templates/synthesis-prompt.md +67 -0
  40. package/skills/shared/codifier-tools.md +123 -0
@@ -0,0 +1,35 @@
1
+ /**
2
+ * Environment detection — identifies which LLM client is in use
3
+ * by checking for client-specific directories.
4
+ */
5
+ import { existsSync } from 'fs';
6
+ import { join } from 'path';
7
+ export function detectEnvironment(cwd = process.cwd()) {
8
+ if (existsSync(join(cwd, '.claude'))) {
9
+ return {
10
+ clientType: 'claude-code',
11
+ commandsDir: join(cwd, '.claude', 'commands'),
12
+ mcpConfigPath: join(cwd, '.mcp.json'),
13
+ };
14
+ }
15
+ if (existsSync(join(cwd, '.cursor'))) {
16
+ return {
17
+ clientType: 'cursor',
18
+ commandsDir: join(cwd, '.cursor', 'rules'),
19
+ mcpConfigPath: join(cwd, '.cursor', 'mcp.json'),
20
+ };
21
+ }
22
+ if (existsSync(join(cwd, '.windsurf'))) {
23
+ return {
24
+ clientType: 'windsurf',
25
+ commandsDir: join(cwd, '.windsurf', 'commands'),
26
+ mcpConfigPath: join(cwd, '.windsurf', 'mcp.json'),
27
+ };
28
+ }
29
+ return {
30
+ clientType: 'generic',
31
+ commandsDir: join(cwd, '.codifier', 'commands'),
32
+ mcpConfigPath: join(cwd, '.codifier', 'mcp.json'),
33
+ };
34
+ }
35
+ //# sourceMappingURL=detect.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"detect.js","sourceRoot":"","sources":["../../cli/detect.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAE,UAAU,EAAE,MAAM,IAAI,CAAC;AAChC,OAAO,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAU5B,MAAM,UAAU,iBAAiB,CAAC,MAAc,OAAO,CAAC,GAAG,EAAE;IAC3D,IAAI,UAAU,CAAC,IAAI,CAAC,GAAG,EAAE,SAAS,CAAC,CAAC,EAAE,CAAC;QACrC,OAAO;YACL,UAAU,EAAE,aAAa;YACzB,WAAW,EAAE,IAAI,CAAC,GAAG,EAAE,SAAS,EAAE,UAAU,CAAC;YAC7C,aAAa,EAAE,IAAI,CAAC,GAAG,EAAE,WAAW,CAAC;SACtC,CAAC;IACJ,CAAC;IAED,IAAI,UAAU,CAAC,IAAI,CAAC,GAAG,EAAE,SAAS,CAAC,CAAC,EAAE,CAAC;QACrC,OAAO;YACL,UAAU,EAAE,QAAQ;YACpB,WAAW,EAAE,IAAI,CAAC,GAAG,EAAE,SAAS,EAAE,OAAO,CAAC;YAC1C,aAAa,EAAE,IAAI,CAAC,GAAG,EAAE,SAAS,EAAE,UAAU,CAAC;SAChD,CAAC;IACJ,CAAC;IAED,IAAI,UAAU,CAAC,IAAI,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC,EAAE,CAAC;QACvC,OAAO;YACL,UAAU,EAAE,UAAU;YACtB,WAAW,EAAE,IAAI,CAAC,GAAG,EAAE,WAAW,EAAE,UAAU,CAAC;YAC/C,aAAa,EAAE,IAAI,CAAC,GAAG,EAAE,WAAW,EAAE,UAAU,CAAC;SAClD,CAAC;IACJ,CAAC;IAED,OAAO;QACL,UAAU,EAAE,SAAS;QACrB,WAAW,EAAE,IAAI,CAAC,GAAG,EAAE,WAAW,EAAE,UAAU,CAAC;QAC/C,aAAa,EAAE,IAAI,CAAC,GAAG,EAAE,WAAW,EAAE,UAAU,CAAC;KAClD,CAAC;AACJ,CAAC"}
@@ -0,0 +1,5 @@
1
+ /**
2
+ * `codifier doctor` — verify installation health.
3
+ */
4
+ export declare function runDoctor(): Promise<void>;
5
+ //# sourceMappingURL=doctor.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"doctor.d.ts","sourceRoot":"","sources":["../../cli/doctor.ts"],"names":[],"mappings":"AAAA;;GAEG;AAWH,wBAAsB,SAAS,IAAI,OAAO,CAAC,IAAI,CAAC,CAiD/C"}
@@ -0,0 +1,58 @@
1
+ /**
2
+ * `codifier doctor` — verify installation health.
3
+ */
4
+ import { existsSync, readFileSync } from 'fs';
5
+ import { join } from 'path';
6
+ const REQUIRED_SKILLS = [
7
+ 'initialize-project/SKILL.md',
8
+ 'brownfield-onboard/SKILL.md',
9
+ 'research-analyze/SKILL.md',
10
+ ];
11
+ export async function runDoctor() {
12
+ const cwd = process.cwd();
13
+ const configPath = join(cwd, '.codifier', 'config.json');
14
+ let allGood = true;
15
+ console.log('\nCodifier Doctor\n');
16
+ // Check config
17
+ if (!existsSync(configPath)) {
18
+ console.error('✗ .codifier/config.json not found — run `codifier init` first');
19
+ allGood = false;
20
+ }
21
+ else {
22
+ console.log('✓ .codifier/config.json found');
23
+ }
24
+ // Check skill files
25
+ for (const skillFile of REQUIRED_SKILLS) {
26
+ const fullPath = join(cwd, '.codifier', 'skills', skillFile);
27
+ if (!existsSync(fullPath) || readFileSync(fullPath, 'utf8').trim().length === 0) {
28
+ console.error(`✗ Missing or empty: .codifier/skills/${skillFile}`);
29
+ allGood = false;
30
+ }
31
+ else {
32
+ console.log(`✓ .codifier/skills/${skillFile}`);
33
+ }
34
+ }
35
+ // Check MCP connectivity
36
+ if (existsSync(configPath)) {
37
+ try {
38
+ const config = JSON.parse(readFileSync(configPath, 'utf8'));
39
+ console.log('\nChecking MCP connectivity...');
40
+ const response = await fetch(`${config.serverUrl}/health`);
41
+ if (response.ok) {
42
+ console.log('✓ MCP server reachable');
43
+ }
44
+ else {
45
+ console.warn(`⚠ Health check returned ${response.status}`);
46
+ allGood = false;
47
+ }
48
+ }
49
+ catch {
50
+ console.warn('⚠ Could not reach MCP server');
51
+ allGood = false;
52
+ }
53
+ }
54
+ console.log(allGood ? '\n✅ All checks passed\n' : '\n⚠ Some checks failed — see above\n');
55
+ if (!allGood)
56
+ process.exit(1);
57
+ }
58
+ //# sourceMappingURL=doctor.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"doctor.js","sourceRoot":"","sources":["../../cli/doctor.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EAAE,UAAU,EAAE,YAAY,EAAE,MAAM,IAAI,CAAC;AAC9C,OAAO,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAE5B,MAAM,eAAe,GAAG;IACtB,6BAA6B;IAC7B,6BAA6B;IAC7B,2BAA2B;CAC5B,CAAC;AAEF,MAAM,CAAC,KAAK,UAAU,SAAS;IAC7B,MAAM,GAAG,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC;IAC1B,MAAM,UAAU,GAAG,IAAI,CAAC,GAAG,EAAE,WAAW,EAAE,aAAa,CAAC,CAAC;IACzD,IAAI,OAAO,GAAG,IAAI,CAAC;IAEnB,OAAO,CAAC,GAAG,CAAC,qBAAqB,CAAC,CAAC;IAEnC,eAAe;IACf,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE,CAAC;QAC5B,OAAO,CAAC,KAAK,CAAC,+DAA+D,CAAC,CAAC;QAC/E,OAAO,GAAG,KAAK,CAAC;IAClB,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,GAAG,CAAC,+BAA+B,CAAC,CAAC;IAC/C,CAAC;IAED,oBAAoB;IACpB,KAAK,MAAM,SAAS,IAAI,eAAe,EAAE,CAAC;QACxC,MAAM,QAAQ,GAAG,IAAI,CAAC,GAAG,EAAE,WAAW,EAAE,QAAQ,EAAE,SAAS,CAAC,CAAC;QAC7D,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,IAAI,YAAY,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YAChF,OAAO,CAAC,KAAK,CAAC,wCAAwC,SAAS,EAAE,CAAC,CAAC;YACnE,OAAO,GAAG,KAAK,CAAC;QAClB,CAAC;aAAM,CAAC;YACN,OAAO,CAAC,GAAG,CAAC,sBAAsB,SAAS,EAAE,CAAC,CAAC;QACjD,CAAC;IACH,CAAC;IAED,yBAAyB;IACzB,IAAI,UAAU,CAAC,UAAU,CAAC,EAAE,CAAC;QAC3B,IAAI,CAAC;YACH,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,UAAU,EAAE,MAAM,CAAC,CAGzD,CAAC;YACF,OAAO,CAAC,GAAG,CAAC,gCAAgC,CAAC,CAAC;YAC9C,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,MAAM,CAAC,SAAS,SAAS,CAAC,CAAC;YAC3D,IAAI,QAAQ,CAAC,EAAE,EAAE,CAAC;gBAChB,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,CAAC;YACxC,CAAC;iBAAM,CAAC;gBACN,OAAO,CAAC,IAAI,CAAC,2BAA2B,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC;gBAC3D,OAAO,GAAG,KAAK,CAAC;YAClB,CAAC;QACH,CAAC;QAAC,MAAM,CAAC;YACP,OAAO,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;YAC7C,OAAO,GAAG,KAAK,CAAC;QAClB,CAAC;IACH,CAAC;IAED,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,yBAAyB,CAAC,CAAC,CAAC,sCAAsC,CAAC,CAAC;IAC1F,IAAI,CAAC,OAAO;QAAE,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,CAAC"}
@@ -0,0 +1,6 @@
1
+ /**
2
+ * `codifier init` — one-time scaffolder.
3
+ * Copies skills, slash commands, and writes MCP config.
4
+ */
5
+ export declare function runInit(): Promise<void>;
6
+ //# sourceMappingURL=init.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"init.d.ts","sourceRoot":"","sources":["../../cli/init.ts"],"names":[],"mappings":"AAAA;;;GAGG;AA0BH,wBAAsB,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC,CA+D7C"}
@@ -0,0 +1,93 @@
1
+ /**
2
+ * `codifier init` — one-time scaffolder.
3
+ * Copies skills, slash commands, and writes MCP config.
4
+ */
5
+ import { mkdirSync, cpSync, writeFileSync, existsSync } from 'fs';
6
+ import { join, dirname } from 'path';
7
+ import { fileURLToPath } from 'url';
8
+ import * as readline from 'readline';
9
+ import { detectEnvironment } from './detect.js';
10
+ const __dirname = dirname(fileURLToPath(import.meta.url));
11
+ // In the published npm package, skills/ is at the package root (sibling of dist/)
12
+ // When running from dist/cli/, the package root is two levels up
13
+ const PACKAGE_ROOT = join(__dirname, '..', '..');
14
+ const SKILLS_SOURCE = join(PACKAGE_ROOT, 'skills');
15
+ const COMMANDS_SOURCE = join(PACKAGE_ROOT, 'commands');
16
+ function prompt(question) {
17
+ const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
18
+ return new Promise((resolve) => {
19
+ rl.question(question, (answer) => {
20
+ rl.close();
21
+ resolve(answer.trim());
22
+ });
23
+ });
24
+ }
25
+ export async function runInit() {
26
+ const cwd = process.cwd();
27
+ const env = detectEnvironment(cwd);
28
+ console.log(`\nCodifier Init — detected client: ${env.clientType}\n`);
29
+ // 1. Create .codifier/skills/ and copy all skills
30
+ const skillsDest = join(cwd, '.codifier', 'skills');
31
+ mkdirSync(skillsDest, { recursive: true });
32
+ if (existsSync(SKILLS_SOURCE)) {
33
+ cpSync(SKILLS_SOURCE, skillsDest, { recursive: true });
34
+ console.log('✓ Skills copied to .codifier/skills/');
35
+ }
36
+ else {
37
+ console.warn(`⚠ Skills source not found at ${SKILLS_SOURCE} — skipping`);
38
+ }
39
+ // 2. Copy slash commands to client-specific location
40
+ mkdirSync(env.commandsDir, { recursive: true });
41
+ if (existsSync(COMMANDS_SOURCE)) {
42
+ cpSync(COMMANDS_SOURCE, env.commandsDir, { recursive: true });
43
+ console.log(`✓ Commands copied to ${env.commandsDir}`);
44
+ }
45
+ else {
46
+ console.warn(`⚠ Commands source not found at ${COMMANDS_SOURCE} — skipping`);
47
+ }
48
+ // 3. Prompt for server URL and API key
49
+ const serverUrl = await prompt('Codifier MCP server URL (e.g., https://codifier-mcp.fly.dev): ');
50
+ const apiKey = await prompt('Codifier API key: ');
51
+ // 4. Write .codifier/config.json
52
+ const configDir = join(cwd, '.codifier');
53
+ mkdirSync(configDir, { recursive: true });
54
+ const config = { serverUrl, apiKey, installedAt: new Date().toISOString() };
55
+ writeFileSync(join(configDir, 'config.json'), JSON.stringify(config, null, 2));
56
+ console.log('✓ Config saved to .codifier/config.json');
57
+ // 5. Write MCP config (client-specific format)
58
+ const mcpConfig = buildMcpConfig(serverUrl, apiKey);
59
+ writeFileSync(env.mcpConfigPath, JSON.stringify(mcpConfig, null, 2));
60
+ console.log(`✓ MCP config written to ${env.mcpConfigPath}`);
61
+ // 6. Verify connectivity (best-effort)
62
+ console.log('\nVerifying MCP connectivity...');
63
+ try {
64
+ const response = await fetch(`${serverUrl}/health`);
65
+ if (response.ok) {
66
+ console.log('✓ MCP server reachable');
67
+ }
68
+ else {
69
+ console.warn(`⚠ Health check returned ${response.status} — check your server URL`);
70
+ }
71
+ }
72
+ catch {
73
+ console.warn('⚠ Could not reach MCP server — check the URL and ensure the server is running');
74
+ }
75
+ // 7. Print summary
76
+ console.log('\n✅ Codifier installed successfully!\n');
77
+ console.log('Available skills:');
78
+ console.log(' • Initialize Project → /init');
79
+ console.log(' • Brownfield Onboard → /onboard');
80
+ console.log(' • Research & Analyze → /research');
81
+ console.log('\nRun /init in your LLM client to start your first project.\n');
82
+ }
83
+ function buildMcpConfig(serverUrl, apiKey) {
84
+ return {
85
+ mcpServers: {
86
+ codifier: {
87
+ url: `${serverUrl}/sse`,
88
+ headers: { Authorization: `Bearer ${apiKey}` },
89
+ },
90
+ },
91
+ };
92
+ }
93
+ //# sourceMappingURL=init.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"init.js","sourceRoot":"","sources":["../../cli/init.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAE,SAAS,EAAE,MAAM,EAAE,aAAa,EAAE,UAAU,EAAE,MAAM,IAAI,CAAC;AAClE,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,MAAM,CAAC;AACrC,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAC;AACpC,OAAO,KAAK,QAAQ,MAAM,UAAU,CAAC;AACrC,OAAO,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AAEhD,MAAM,SAAS,GAAG,OAAO,CAAC,aAAa,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAE1D,kFAAkF;AAClF,iEAAiE;AACjE,MAAM,YAAY,GAAG,IAAI,CAAC,SAAS,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;AACjD,MAAM,aAAa,GAAG,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;AACnD,MAAM,eAAe,GAAG,IAAI,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;AAEvD,SAAS,MAAM,CAAC,QAAgB;IAC9B,MAAM,EAAE,GAAG,QAAQ,CAAC,eAAe,CAAC,EAAE,KAAK,EAAE,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC;IACtF,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE;QAC7B,EAAE,CAAC,QAAQ,CAAC,QAAQ,EAAE,CAAC,MAAM,EAAE,EAAE;YAC/B,EAAE,CAAC,KAAK,EAAE,CAAC;YACX,OAAO,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,CAAC;QACzB,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,OAAO;IAC3B,MAAM,GAAG,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC;IAC1B,MAAM,GAAG,GAAG,iBAAiB,CAAC,GAAG,CAAC,CAAC;IAEnC,OAAO,CAAC,GAAG,CAAC,sCAAsC,GAAG,CAAC,UAAU,IAAI,CAAC,CAAC;IAEtE,kDAAkD;IAClD,MAAM,UAAU,GAAG,IAAI,CAAC,GAAG,EAAE,WAAW,EAAE,QAAQ,CAAC,CAAC;IACpD,SAAS,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAE3C,IAAI,UAAU,CAAC,aAAa,CAAC,EAAE,CAAC;QAC9B,MAAM,CAAC,aAAa,EAAE,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,sCAAsC,CAAC,CAAC;IACtD,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,IAAI,CAAC,gCAAgC,aAAa,aAAa,CAAC,CAAC;IAC3E,CAAC;IAED,qDAAqD;IACrD,SAAS,CAAC,GAAG,CAAC,WAAW,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAEhD,IAAI,UAAU,CAAC,eAAe,CAAC,EAAE,CAAC;QAChC,MAAM,CAAC,eAAe,EAAE,GAAG,CAAC,WAAW,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,wBAAwB,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC;IACzD,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,IAAI,CAAC,kCAAkC,eAAe,aAAa,CAAC,CAAC;IAC/E,CAAC;IAED,uCAAuC;IACvC,MAAM,SAAS,GAAG,MAAM,MAAM,CAAC,gEAAgE,CAAC,CAAC;IACjG,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,oBAAoB,CAAC,CAAC;IAElD,iCAAiC;IACjC,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;IACzC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAC1C,MAAM,MAAM,GAAG,EAAE,SAAS,EAAE,MAAM,EAAE,WAAW,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE,EAAE,CAAC;IAC5E,aAAa,CAAC,IAAI,CAAC,SAAS,EAAE,aAAa,CAAC,EAAE,IAAI,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC;IAC/E,OAAO,CAAC,GAAG,CAAC,yCAAyC,CAAC,CAAC;IAEvD,+CAA+C;IAC/C,MAAM,SAAS,GAAG,cAAc,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;IACpD,aAAa,CAAC,GAAG,CAAC,aAAa,EAAE,IAAI,CAAC,SAAS,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC;IACrE,OAAO,CAAC,GAAG,CAAC,2BAA2B,GAAG,CAAC,aAAa,EAAE,CAAC,CAAC;IAE5D,uCAAuC;IACvC,OAAO,CAAC,GAAG,CAAC,iCAAiC,CAAC,CAAC;IAC/C,IAAI,CAAC;QACH,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,SAAS,SAAS,CAAC,CAAC;QACpD,IAAI,QAAQ,CAAC,EAAE,EAAE,CAAC;YAChB,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,CAAC;QACxC,CAAC;aAAM,CAAC;YACN,OAAO,CAAC,IAAI,CAAC,2BAA2B,QAAQ,CAAC,MAAM,0BAA0B,CAAC,CAAC;QACrF,CAAC;IACH,CAAC;IAAC,MAAM,CAAC;QACP,OAAO,CAAC,IAAI,CAAC,+EAA+E,CAAC,CAAC;IAChG,CAAC;IAED,mBAAmB;IACnB,OAAO,CAAC,GAAG,CAAC,wCAAwC,CAAC,CAAC;IACtD,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,CAAC;IACjC,OAAO,CAAC,GAAG,CAAC,kCAAkC,CAAC,CAAC;IAChD,OAAO,CAAC,GAAG,CAAC,qCAAqC,CAAC,CAAC;IACnD,OAAO,CAAC,GAAG,CAAC,sCAAsC,CAAC,CAAC;IACpD,OAAO,CAAC,GAAG,CAAC,+DAA+D,CAAC,CAAC;AAC/E,CAAC;AAED,SAAS,cAAc,CAAC,SAAiB,EAAE,MAAc;IACvD,OAAO;QACL,UAAU,EAAE;YACV,QAAQ,EAAE;gBACR,GAAG,EAAE,GAAG,SAAS,MAAM;gBACvB,OAAO,EAAE,EAAE,aAAa,EAAE,UAAU,MAAM,EAAE,EAAE;aAC/C;SACF;KACF,CAAC;AACJ,CAAC"}
@@ -0,0 +1,5 @@
1
+ /**
2
+ * `codifier update` — pull latest skills from the npm package.
3
+ */
4
+ export declare function runUpdate(): Promise<void>;
5
+ //# sourceMappingURL=update.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"update.d.ts","sourceRoot":"","sources":["../../cli/update.ts"],"names":[],"mappings":"AAAA;;GAEG;AAUH,wBAAsB,SAAS,IAAI,OAAO,CAAC,IAAI,CAAC,CAiB/C"}
@@ -0,0 +1,25 @@
1
+ /**
2
+ * `codifier update` — pull latest skills from the npm package.
3
+ */
4
+ import { cpSync, existsSync } from 'fs';
5
+ import { join, dirname } from 'path';
6
+ import { fileURLToPath } from 'url';
7
+ const __dirname = dirname(fileURLToPath(import.meta.url));
8
+ const PACKAGE_ROOT = join(__dirname, '..', '..');
9
+ const SKILLS_SOURCE = join(PACKAGE_ROOT, 'skills');
10
+ export async function runUpdate() {
11
+ const cwd = process.cwd();
12
+ const skillsDest = join(cwd, '.codifier', 'skills');
13
+ if (!existsSync(skillsDest)) {
14
+ console.error('Error: .codifier/skills/ not found. Run `codifier init` first.');
15
+ process.exit(1);
16
+ }
17
+ if (!existsSync(SKILLS_SOURCE)) {
18
+ console.error(`Error: Skills source not found at ${SKILLS_SOURCE}`);
19
+ process.exit(1);
20
+ }
21
+ cpSync(SKILLS_SOURCE, skillsDest, { recursive: true });
22
+ console.log('✓ Skills updated in .codifier/skills/');
23
+ console.log('Note: .codifier/config.json was preserved.');
24
+ }
25
+ //# sourceMappingURL=update.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"update.js","sourceRoot":"","sources":["../../cli/update.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,IAAI,CAAC;AACxC,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,MAAM,CAAC;AACrC,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAC;AAEpC,MAAM,SAAS,GAAG,OAAO,CAAC,aAAa,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAC1D,MAAM,YAAY,GAAG,IAAI,CAAC,SAAS,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;AACjD,MAAM,aAAa,GAAG,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;AAEnD,MAAM,CAAC,KAAK,UAAU,SAAS;IAC7B,MAAM,GAAG,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC;IAC1B,MAAM,UAAU,GAAG,IAAI,CAAC,GAAG,EAAE,WAAW,EAAE,QAAQ,CAAC,CAAC;IAEpD,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE,CAAC;QAC5B,OAAO,CAAC,KAAK,CAAC,gEAAgE,CAAC,CAAC;QAChF,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC;IAED,IAAI,CAAC,UAAU,CAAC,aAAa,CAAC,EAAE,CAAC;QAC/B,OAAO,CAAC,KAAK,CAAC,qCAAqC,aAAa,EAAE,CAAC,CAAC;QACpE,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC;IAED,MAAM,CAAC,aAAa,EAAE,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IACvD,OAAO,CAAC,GAAG,CAAC,uCAAuC,CAAC,CAAC;IACrD,OAAO,CAAC,GAAG,CAAC,4CAA4C,CAAC,CAAC;AAC5D,CAAC"}
package/dist/index.js ADDED
@@ -0,0 +1,87 @@
1
+ /**
2
+ * CodifierMcp - MCP Server Entry Point
3
+ *
4
+ * Institutional memory system for AI-driven development.
5
+ * Provides fetch_context and update_memory tools via MCP protocol.
6
+ */
7
+ import { getConfig } from './config/env.js';
8
+ import { logger } from './utils/logger.js';
9
+ import { createDataStore } from './datastore/factory.js';
10
+ import { initializeMcpServer, connectStdioTransport } from './mcp/server.js';
11
+ import { startHttpServer } from './http/server.js';
12
+ /**
13
+ * Main entry point
14
+ */
15
+ async function main() {
16
+ try {
17
+ logger.info('Starting CodifierMcp server');
18
+ // Load and validate configuration
19
+ const config = getConfig();
20
+ logger.info('Configuration loaded', {
21
+ dataStore: config.DATA_STORE,
22
+ transportMode: config.TRANSPORT_MODE,
23
+ httpPort: config.TRANSPORT_MODE === 'http' ? config.HTTP_PORT : undefined,
24
+ logLevel: config.LOG_LEVEL,
25
+ });
26
+ // Create data store instance via factory
27
+ const dataStore = createDataStore(config);
28
+ logger.debug('Data store instance created', { backend: config.DATA_STORE });
29
+ // Initialize MCP server (transport-agnostic)
30
+ const server = await initializeMcpServer({
31
+ name: 'codifier-mcp',
32
+ version: '0.1.0',
33
+ dataStore,
34
+ });
35
+ // Connect appropriate transport based on configuration
36
+ if (config.TRANSPORT_MODE === 'stdio') {
37
+ logger.info('Starting server in stdio mode');
38
+ await connectStdioTransport(server);
39
+ logger.info('CodifierMcp server is ready (stdio transport)');
40
+ }
41
+ else if (config.TRANSPORT_MODE === 'http') {
42
+ logger.info('Starting server in HTTP mode');
43
+ await startHttpServer(server, {
44
+ port: config.HTTP_PORT,
45
+ apiAuthToken: config.API_AUTH_TOKEN,
46
+ dataStore,
47
+ });
48
+ logger.info('CodifierMcp server is ready (HTTP transport)');
49
+ }
50
+ logger.info('Tools available: fetch_context, update_memory');
51
+ // Handle graceful shutdown
52
+ process.on('SIGINT', async () => {
53
+ logger.info('Received SIGINT, shutting down gracefully');
54
+ try {
55
+ await server.close();
56
+ logger.info('Server closed successfully');
57
+ process.exit(0);
58
+ }
59
+ catch (error) {
60
+ logger.error('Error during shutdown', error);
61
+ process.exit(1);
62
+ }
63
+ });
64
+ process.on('SIGTERM', async () => {
65
+ logger.info('Received SIGTERM, shutting down gracefully');
66
+ try {
67
+ await server.close();
68
+ logger.info('Server closed successfully');
69
+ process.exit(0);
70
+ }
71
+ catch (error) {
72
+ logger.error('Error during shutdown', error);
73
+ process.exit(1);
74
+ }
75
+ });
76
+ }
77
+ catch (error) {
78
+ logger.error('Fatal error during startup', {
79
+ error: error instanceof Error ? error.message : 'Unknown error',
80
+ stack: error instanceof Error ? error.stack : undefined,
81
+ });
82
+ process.exit(1);
83
+ }
84
+ }
85
+ // Start the server
86
+ main();
87
+ //# sourceMappingURL=index.js.map
package/package.json ADDED
@@ -0,0 +1,40 @@
1
+ {
2
+ "name": "@codifier/cli",
3
+ "version": "2.0.0",
4
+ "description": "MCP server for institutional memory management",
5
+ "main": "dist/index.js",
6
+ "bin": {
7
+ "codifier": "dist/cli/bin/codifier.js"
8
+ },
9
+ "type": "module",
10
+ "files": [
11
+ "dist/cli/",
12
+ "skills/",
13
+ "commands/"
14
+ ],
15
+ "scripts": {
16
+ "build": "tsc",
17
+ "dev": "tsc && node dist/index.js",
18
+ "watch": "tsc --watch",
19
+ "start": "node dist/index.js",
20
+ "test": "node --test tests/http-server.test.js",
21
+ "prepublishOnly": "npm run build"
22
+ },
23
+ "dependencies": {
24
+ "@modelcontextprotocol/sdk": "^1.21.1",
25
+ "@supabase/supabase-js": "^2.95.3",
26
+ "commander": "^14.0.3",
27
+ "cors": "^2.8.5",
28
+ "express": "^5.0.0",
29
+ "js-yaml": "^4.1.0",
30
+ "repomix": "^0.3.0",
31
+ "zod": "^3.23.8"
32
+ },
33
+ "devDependencies": {
34
+ "@types/cors": "^2.8.17",
35
+ "@types/express": "^5.0.0",
36
+ "@types/js-yaml": "^4.0.9",
37
+ "@types/node": "^20.12.7",
38
+ "typescript": "^5.5.3"
39
+ }
40
+ }
@@ -0,0 +1,107 @@
1
+ # Skill: Brownfield Onboard
2
+
3
+ **Role:** Developer
4
+ **Purpose:** Onboard existing codebases into the Codifier shared knowledge base by packing repositories, generating architectural summaries, and persisting learnings.
5
+
6
+ See `../shared/codifier-tools.md` for full MCP tool reference.
7
+
8
+ ---
9
+
10
+ ## Prerequisites
11
+
12
+ - Active MCP connection to the Codifier server
13
+ - At least one repository URL (GitHub, GitLab, or local path)
14
+ - A project to associate the snapshots with (existing or new)
15
+
16
+ ---
17
+
18
+ ## Workflow
19
+
20
+ ### Step 1 — Identify or Create the Project
21
+
22
+ Call `manage_projects` with `operation: "list"` and show the user their existing projects.
23
+
24
+ Ask: **"Which project should these repositories be associated with, or should we create a new one?"**
25
+
26
+ - If **existing**: use the selected `project_id`.
27
+ - If **new**: collect name and optionally org, then call `manage_projects` with `operation: "create"`.
28
+
29
+ ### Step 2 — Collect Repository URLs
30
+
31
+ Ask the user to provide all repository URLs to onboard. They may provide:
32
+ - One or more GitHub/GitLab/Bitbucket HTTPS URLs
33
+ - Local filesystem paths (absolute)
34
+
35
+ Ask: **"Are there any other repos to include, or is this the complete list?"**
36
+
37
+ Also ask: **"What is the current state of these repos — active development, legacy, recently archived?"**
38
+
39
+ ### Step 3 — Fetch Existing Context
40
+
41
+ Call `fetch_context` with `{ project_id }` to retrieve any prior memories for this project. Summarize relevant findings to the user — prior architectural decisions, existing rules, or previous onboarding notes are important context.
42
+
43
+ ### Step 4 — Pack Repositories
44
+
45
+ For each repository URL:
46
+ 1. Call `pack_repo` with the URL, `project_id`, and a `version_label` (use current date: `"YYYY-MM"` or a tag like `"initial-onboard"`)
47
+ 2. Note the returned `repository_id`, `token_count`, and `file_count`
48
+ 3. Inform the user: "Packed `<repo-url>` — `<N>` files, `<M>` tokens"
49
+
50
+ If a pack fails, log the error and continue with remaining repos.
51
+
52
+ ### Step 5 — Generate Architectural Summary
53
+
54
+ Using the packed repository content (available in your context from the pack results) and any prior memories, generate a comprehensive architectural summary covering:
55
+
56
+ 1. **System Overview** — what the system does, its primary users, and its business purpose
57
+ 2. **Technology Stack** — languages, frameworks, databases, infrastructure
58
+ 3. **Module Structure** — major directories/packages and their responsibilities
59
+ 4. **Key Interfaces** — APIs, event buses, shared contracts between components
60
+ 5. **Data Flow** — how data moves through the system from input to output
61
+ 6. **External Dependencies** — third-party services, APIs, or systems integrated with
62
+ 7. **Known Issues / Technical Debt** — observations from the code (if apparent)
63
+ 8. **Conventions Observed** — naming patterns, file organisation, testing approach
64
+
65
+ Present the summary to the user and ask: **"Does this accurately describe the system? What should be added or corrected?"**
66
+
67
+ Incorporate feedback.
68
+
69
+ ### Step 6 — Persist Architectural Summary
70
+
71
+ Call `update_memory`:
72
+ ```
73
+ memory_type: "learning"
74
+ title: "Architectural Summary — <repo-name or project-name>"
75
+ content: { text: "<full summary markdown>", repos: ["<url1>", "<url2>"] }
76
+ tags: ["architecture", "onboarding", "brownfield"]
77
+ source_role: "developer"
78
+ ```
79
+
80
+ ### Step 7 — Persist Architectural Decisions
81
+
82
+ For any significant architectural decisions uncovered (e.g., "uses event sourcing", "monorepo with Turborepo", "Postgres as primary store"), ask the user which to persist as formal documents.
83
+
84
+ For each confirmed decision, call `update_memory`:
85
+ ```
86
+ memory_type: "document"
87
+ title: "ADR: <decision title>"
88
+ content: { text: "<decision description, rationale, and consequences>" }
89
+ tags: ["adr", "architecture"]
90
+ source_role: "developer"
91
+ ```
92
+
93
+ ### Step 8 — Summarize
94
+
95
+ Tell the user:
96
+ - Project ID
97
+ - Repositories packed (with IDs and token counts)
98
+ - Memories persisted (IDs and titles)
99
+ - How to retrieve this context in future: `fetch_context` with `{ project_id, tags: ["architecture"] }`
100
+
101
+ ---
102
+
103
+ ## Error Handling
104
+
105
+ - If `pack_repo` times out or fails: note the error in the summary, ask the user if they want to retry or skip.
106
+ - If a repo is private and credentials are not configured: inform the user that the server needs the relevant token (`GITHUB_TOKEN`, `GITLAB_TOKEN`) configured as an environment variable.
107
+ - If the packed content is very large (>500K tokens): focus the architectural summary on the highest-level structural observations rather than deep code analysis.
@@ -0,0 +1,145 @@
1
+ # Skill: Initialize Project
2
+
3
+ **Role:** Developer
4
+ **Purpose:** Set up a new project in the Codifier shared knowledge base — collecting context, optionally packing repositories, and generating four key artifacts: Rules.md, Evals.md, Requirements.md, and Roadmap.md.
5
+
6
+ See `../shared/codifier-tools.md` for full MCP tool reference.
7
+
8
+ ---
9
+
10
+ ## Prerequisites
11
+
12
+ - Active MCP connection to the Codifier server
13
+ - Project context: name, description, and optionally a Scope of Work (SOW) document and repo URLs
14
+
15
+ ---
16
+
17
+ ## Workflow
18
+
19
+ Follow these steps conversationally. You are the state machine — call MCP tools only for data operations.
20
+
21
+ ### Step 1 — Identify or Create the Project
22
+
23
+ Call `manage_projects` with `operation: "list"` to show the user their existing projects.
24
+
25
+ Ask: **"Is this a new project, or do you want to use an existing one?"**
26
+
27
+ - If **existing**: ask the user to select from the list; use that `project_id` for all subsequent calls.
28
+ - If **new**: collect a project name and optionally an org name, then call `manage_projects` with `operation: "create"`. Use the returned `project_id` for all subsequent calls.
29
+
30
+ ### Step 2 — Collect Project Context
31
+
32
+ Gather the following from the user in a single conversational turn:
33
+
34
+ 1. **Project name** (if not already set)
35
+ 2. **Description** — what does this project build and for whom?
36
+ 3. **Scope of Work (SOW)** — paste the SOW document, or describe key deliverables if no formal SOW exists
37
+ 4. **Repository URLs** (optional) — GitHub/GitLab URLs of codebases relevant to this project
38
+ 5. **Additional context** — any constraints, tech stack, team conventions, or prior decisions
39
+
40
+ Confirm you have understood all provided context before proceeding.
41
+
42
+ ### Step 3 — Pack Repositories (if URLs provided)
43
+
44
+ For each repository URL provided:
45
+ 1. Call `pack_repo` with the URL, `project_id`, and a `version_label` (use the current date or sprint label, e.g., `"2026-02"`)
46
+ 2. Note the returned `repository_id` and `token_count`
47
+ 3. Inform the user: "Packed `<repo-url>` — `<N>` tokens"
48
+
49
+ If no URLs were provided, skip this step.
50
+
51
+ ### Step 4 — Fetch Existing Context
52
+
53
+ Call `fetch_context` with `{ project_id }` (no type filter) to retrieve any prior memories for this project. This surfaces research findings, prior rules, or existing docs that should inform the new artifacts.
54
+
55
+ Summarize any relevant findings to the user before generating artifacts.
56
+
57
+ ### Step 5 — Generate Rules.md
58
+
59
+ Using the prompt template in `templates/rules-prompt.md`, generate a comprehensive set of development rules and coding standards for this project.
60
+
61
+ **Substitute these placeholders with actual values:**
62
+ - `{project_name}` — the project name
63
+ - `{description}` — the project description
64
+ - `{sow}` — the SOW or deliverables description
65
+ - `{repo_urls}` — list of repo URLs (or "none provided")
66
+ - `{additional_context}` — any extra context, including relevant memories from Step 4
67
+
68
+ Present the generated Rules.md to the user inline. Ask: **"Does this look right? Any rules to add, remove, or change?"**
69
+
70
+ Incorporate feedback before proceeding.
71
+
72
+ ### Step 6 — Generate Evals.md
73
+
74
+ Using the prompt template in `templates/evals-prompt.md`, generate evaluation criteria from the confirmed Rules.md.
75
+
76
+ **Substitute:**
77
+ - `{rules}` — the confirmed Rules.md content
78
+ - `{project_name}` — the project name
79
+ - `{description}` — the project description
80
+
81
+ Present Evals.md inline and ask for confirmation.
82
+
83
+ ### Step 7 — Generate Requirements.md
84
+
85
+ Using the prompt template in `templates/requirements-prompt.md`, generate a detailed requirements document.
86
+
87
+ **Substitute:**
88
+ - `{project_name}`, `{description}`, `{sow}`, `{repo_urls}`, `{additional_context}`
89
+
90
+ Present Requirements.md inline and ask for confirmation.
91
+
92
+ ### Step 8 — Generate Roadmap.md
93
+
94
+ Using the prompt template in `templates/roadmap-prompt.md`, generate a phased implementation roadmap from Requirements.md.
95
+
96
+ **Substitute:**
97
+ - `{requirements}` — the confirmed Requirements.md content
98
+ - `{project_name}`, `{description}`, `{repo_urls}`
99
+
100
+ Present Roadmap.md inline and ask for confirmation.
101
+
102
+ ### Step 9 — Persist All Artifacts
103
+
104
+ Call `update_memory` four times — once per artifact:
105
+
106
+ | Artifact | `memory_type` | `title` | `source_role` |
107
+ |----------|--------------|---------|---------------|
108
+ | Rules.md | `document` | `"Rules.md — <project_name>"` | `"developer"` |
109
+ | Evals.md | `document` | `"Evals.md — <project_name>"` | `"developer"` |
110
+ | Requirements.md | `document` | `"Requirements.md — <project_name>"` | `"developer"` |
111
+ | Roadmap.md | `document` | `"Roadmap.md — <project_name>"` | `"developer"` |
112
+
113
+ For each call, set `content: { text: "<full artifact markdown>" }` and add relevant `tags` (e.g., `["rules", "standards"]` for Rules.md).
114
+
115
+ ### Step 10 — Summarize
116
+
117
+ Tell the user:
118
+ - Project ID (so they can reference it later)
119
+ - Which artifacts were generated and persisted
120
+ - How many MCP tool calls were made total
121
+ - How to retrieve context in future sessions: `fetch_context` with `{ project_id, memory_type: "document" }`
122
+
123
+ ---
124
+
125
+ ## Context Assembly by Scenario
126
+
127
+ ### Greenfield + SOW
128
+ Emphasize SOW deliverables and functional requirements in rules and requirements generation. The roadmap should sequence SOW milestones explicitly.
129
+
130
+ ### Greenfield — No SOW
131
+ Prompt the user for key deliverables and target users before generating. Rules should be general-purpose but tailored to the tech stack described.
132
+
133
+ ### Brownfield + SOW
134
+ Pack all repos first (Step 3). Fetch existing memories (Step 4) — prior rules and learnings are especially important. SOW delta (what's changing vs. what exists) should drive Requirements.md.
135
+
136
+ ### Brownfield — No SOW
137
+ Pack all repos first. Spend extra time in conversation understanding the existing system before generating rules — ask about pain points, constraints, and what must not change.
138
+
139
+ ---
140
+
141
+ ## Error Handling
142
+
143
+ - If `pack_repo` fails for a URL: log the error, inform the user, and continue with remaining URLs.
144
+ - If `update_memory` fails: retry once. If still failing, present the artifact as a code block the user can save manually.
145
+ - If the user provides no description or SOW: ask at least 3 clarifying questions before attempting artifact generation.