@uxmaltech/collab-cli 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. package/README.md +227 -0
  2. package/bin/collab +10 -0
  3. package/dist/cli.js +34 -0
  4. package/dist/commands/canon/index.js +16 -0
  5. package/dist/commands/canon/rebuild.js +95 -0
  6. package/dist/commands/compose/generate.js +63 -0
  7. package/dist/commands/compose/index.js +18 -0
  8. package/dist/commands/compose/validate.js +53 -0
  9. package/dist/commands/doctor.js +153 -0
  10. package/dist/commands/index.js +27 -0
  11. package/dist/commands/infra/down.js +23 -0
  12. package/dist/commands/infra/index.js +20 -0
  13. package/dist/commands/infra/shared.js +59 -0
  14. package/dist/commands/infra/status.js +64 -0
  15. package/dist/commands/infra/up.js +29 -0
  16. package/dist/commands/init.js +830 -0
  17. package/dist/commands/mcp/index.js +20 -0
  18. package/dist/commands/mcp/shared.js +57 -0
  19. package/dist/commands/mcp/start.js +45 -0
  20. package/dist/commands/mcp/status.js +62 -0
  21. package/dist/commands/mcp/stop.js +23 -0
  22. package/dist/commands/seed.js +55 -0
  23. package/dist/commands/uninstall.js +36 -0
  24. package/dist/commands/up.js +78 -0
  25. package/dist/commands/update-canons.js +48 -0
  26. package/dist/commands/upgrade.js +54 -0
  27. package/dist/index.js +14 -0
  28. package/dist/lib/ai-client.js +317 -0
  29. package/dist/lib/ansi.js +58 -0
  30. package/dist/lib/canon-index-generator.js +64 -0
  31. package/dist/lib/canon-index-targets.js +68 -0
  32. package/dist/lib/canon-resolver.js +262 -0
  33. package/dist/lib/canon-scaffold.js +57 -0
  34. package/dist/lib/cli-detection.js +149 -0
  35. package/dist/lib/command-context.js +23 -0
  36. package/dist/lib/compose-defaults.js +47 -0
  37. package/dist/lib/compose-env.js +24 -0
  38. package/dist/lib/compose-paths.js +36 -0
  39. package/dist/lib/compose-renderer.js +134 -0
  40. package/dist/lib/compose-validator.js +56 -0
  41. package/dist/lib/config.js +195 -0
  42. package/dist/lib/credentials.js +63 -0
  43. package/dist/lib/docker-checks.js +73 -0
  44. package/dist/lib/docker-compose.js +15 -0
  45. package/dist/lib/docker-status.js +151 -0
  46. package/dist/lib/domain-gen.js +376 -0
  47. package/dist/lib/ecosystem.js +150 -0
  48. package/dist/lib/env-file.js +77 -0
  49. package/dist/lib/errors.js +30 -0
  50. package/dist/lib/executor.js +85 -0
  51. package/dist/lib/github-auth.js +204 -0
  52. package/dist/lib/hash.js +7 -0
  53. package/dist/lib/health-checker.js +140 -0
  54. package/dist/lib/logger.js +87 -0
  55. package/dist/lib/mcp-client.js +88 -0
  56. package/dist/lib/mode.js +36 -0
  57. package/dist/lib/model-listing.js +102 -0
  58. package/dist/lib/model-registry.js +55 -0
  59. package/dist/lib/npm-operations.js +69 -0
  60. package/dist/lib/orchestrator.js +170 -0
  61. package/dist/lib/parsers.js +42 -0
  62. package/dist/lib/port-resolver.js +57 -0
  63. package/dist/lib/preconditions.js +35 -0
  64. package/dist/lib/preflight.js +88 -0
  65. package/dist/lib/process.js +6 -0
  66. package/dist/lib/prompt.js +125 -0
  67. package/dist/lib/providers.js +117 -0
  68. package/dist/lib/repo-analysis-helpers.js +379 -0
  69. package/dist/lib/repo-scanner.js +195 -0
  70. package/dist/lib/service-health.js +79 -0
  71. package/dist/lib/shell.js +49 -0
  72. package/dist/lib/state.js +38 -0
  73. package/dist/lib/update-checker.js +130 -0
  74. package/dist/lib/version.js +27 -0
  75. package/dist/stages/agent-skills-setup.js +301 -0
  76. package/dist/stages/assistant-setup.js +325 -0
  77. package/dist/stages/canon-ingest.js +249 -0
  78. package/dist/stages/canon-rebuild-graph.js +33 -0
  79. package/dist/stages/canon-rebuild-indexes.js +40 -0
  80. package/dist/stages/canon-rebuild-snapshot.js +75 -0
  81. package/dist/stages/canon-rebuild-validate.js +57 -0
  82. package/dist/stages/canon-rebuild-vectors.js +30 -0
  83. package/dist/stages/canon-scaffold.js +15 -0
  84. package/dist/stages/canon-sync.js +49 -0
  85. package/dist/stages/ci-setup.js +56 -0
  86. package/dist/stages/domain-gen.js +363 -0
  87. package/dist/stages/graph-seed.js +26 -0
  88. package/dist/stages/repo-analysis-fileonly.js +111 -0
  89. package/dist/stages/repo-analysis.js +112 -0
  90. package/dist/stages/repo-scaffold.js +110 -0
  91. package/dist/templates/canon/contracts-readme.js +39 -0
  92. package/dist/templates/canon/domain-readme.js +40 -0
  93. package/dist/templates/canon/evolution/changelog.js +53 -0
  94. package/dist/templates/canon/governance/confidence-levels.js +38 -0
  95. package/dist/templates/canon/governance/implementation-process.js +34 -0
  96. package/dist/templates/canon/governance/review-process.js +29 -0
  97. package/dist/templates/canon/governance/schema-versioning.js +25 -0
  98. package/dist/templates/canon/governance/what-enters-the-canon.js +44 -0
  99. package/dist/templates/canon/index.js +28 -0
  100. package/dist/templates/canon/knowledge-readme.js +129 -0
  101. package/dist/templates/canon/system-prompt.js +101 -0
  102. package/dist/templates/ci/architecture-merge.js +29 -0
  103. package/dist/templates/ci/architecture-pr.js +26 -0
  104. package/dist/templates/ci/index.js +7 -0
  105. package/dist/templates/consolidated.js +114 -0
  106. package/dist/templates/infra.js +90 -0
  107. package/dist/templates/mcp.js +32 -0
  108. package/install.sh +455 -0
  109. package/package.json +48 -0
@@ -0,0 +1,363 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.buildFileOnlyDomainPipeline = buildFileOnlyDomainPipeline;
7
+ exports.buildIndexedDomainPipeline = buildIndexedDomainPipeline;
8
+ const node_fs_1 = __importDefault(require("node:fs"));
9
+ const node_path_1 = __importDefault(require("node:path"));
10
+ const ai_client_1 = require("../lib/ai-client");
11
+ const canon_resolver_1 = require("../lib/canon-resolver");
12
+ const domain_gen_1 = require("../lib/domain-gen");
13
+ const errors_1 = require("../lib/errors");
14
+ const github_auth_1 = require("../lib/github-auth");
15
+ const mcp_client_1 = require("../lib/mcp-client");
16
+ const providers_1 = require("../lib/providers");
17
+ const repo_scanner_1 = require("../lib/repo-scanner");
18
+ const service_health_1 = require("../lib/service-health");
19
+ // ────────────────────────────────────────────────────────────────
20
+ // Helper: collect markdown files recursively
21
+ // ────────────────────────────────────────────────────────────────
22
+ /** Recursively collects all `.md` file paths under the given directory. */
23
+ function collectMarkdownFiles(dir) {
24
+ if (!node_fs_1.default.existsSync(dir)) {
25
+ return [];
26
+ }
27
+ const results = [];
28
+ for (const entry of node_fs_1.default.readdirSync(dir, { withFileTypes: true })) {
29
+ const full = node_path_1.default.join(dir, entry.name);
30
+ if (entry.isDirectory()) {
31
+ results.push(...collectMarkdownFiles(full));
32
+ }
33
+ else if (entry.isFile() && entry.name.endsWith('.md')) {
34
+ results.push(full);
35
+ }
36
+ }
37
+ return results;
38
+ }
39
+ // ────────────────────────────────────────────────────────────────
40
+ // Shared stage: domain-analysis (used by both file-only & indexed)
41
+ // ────────────────────────────────────────────────────────────────
42
+ /** Builds the AI-driven domain analysis stage shared by both pipelines. */
43
+ function buildDomainAnalysisStage() {
44
+ return {
45
+ id: 'domain-analysis',
46
+ title: 'Analyze repository and generate domain definition',
47
+ recovery: [
48
+ 'Ensure an AI provider is configured (OPENAI_API_KEY, claude CLI, etc.).',
49
+ 'Run collab init --repo <package> --resume to retry analysis.',
50
+ ],
51
+ run: async (ctx) => {
52
+ const repoPath = ctx.options?._repoPath;
53
+ if (!repoPath) {
54
+ throw new errors_1.CliError('No repo path provided for domain analysis.');
55
+ }
56
+ // Resume support: check for cached result from a previous run
57
+ const cacheFile = node_path_1.default.join(ctx.config.collabDir, 'domain-gen-result.json');
58
+ if (node_fs_1.default.existsSync(cacheFile)) {
59
+ try {
60
+ const cached = JSON.parse(node_fs_1.default.readFileSync(cacheFile, 'utf8'));
61
+ if (cached.domainName && cached.domainSlug) {
62
+ ctx.logger.info(`Resuming with cached domain result: "${cached.domainName}".`);
63
+ if (ctx.options) {
64
+ ctx.options._domainResult = cached;
65
+ }
66
+ return;
67
+ }
68
+ }
69
+ catch {
70
+ // Cache is corrupt; re-run analysis
71
+ }
72
+ }
73
+ if (ctx.executor.dryRun) {
74
+ ctx.logger.info(`[dry-run] Would analyze ${node_path_1.default.basename(repoPath)} and generate domain definition.`);
75
+ return;
76
+ }
77
+ // 1. Scan the target repository
78
+ ctx.logger.info(`Scanning repository at ${repoPath}...`);
79
+ const repoCtx = (0, repo_scanner_1.scanRepository)(repoPath);
80
+ ctx.logger.info(`Detected: ${repoCtx.language}${repoCtx.framework ? ` / ${repoCtx.framework}` : ''}, ` +
81
+ `${repoCtx.totalSourceFiles} source files.`);
82
+ // 2. Build prompt
83
+ const prompt = (0, domain_gen_1.buildDomainGenPrompt)(repoCtx);
84
+ // 3. Get AI client — honour --providers CLI flag, fall back to config
85
+ const cliProvidersRaw = ctx.options?.providers;
86
+ const cliProviders = typeof cliProvidersRaw === 'string'
87
+ ? cliProvidersRaw.split(',').map((s) => s.trim()).filter(Boolean)
88
+ : Array.isArray(cliProvidersRaw) ? cliProvidersRaw : [];
89
+ const configuredProviders = (0, providers_1.getEnabledProviders)(ctx.config);
90
+ const providers = cliProviders.length > 0 ? cliProviders : configuredProviders;
91
+ const client = (0, ai_client_1.createFirstAvailableClient)(providers, ctx.config, ctx.logger);
92
+ if (!client) {
93
+ throw new errors_1.CliError('No AI provider available for domain analysis. ' +
94
+ 'Set OPENAI_API_KEY, configure claude CLI, install another supported provider, ' +
95
+ 'or pass --providers to select a specific provider.');
96
+ }
97
+ // 4. Call AI
98
+ ctx.logger.info('Generating domain definition via AI...');
99
+ const messages = [
100
+ { role: 'system', content: prompt.system },
101
+ { role: 'user', content: prompt.user },
102
+ ];
103
+ const rawResponse = await client.complete(messages, {
104
+ maxTokens: 8192,
105
+ temperature: 0.2,
106
+ });
107
+ // 5. Parse response
108
+ const result = (0, domain_gen_1.parseDomainGenerationResponse)(rawResponse);
109
+ ctx.logger.info(`Domain "${result.domainName}" generated: ` +
110
+ `${result.principles.length} principles, ${result.rules.length} rules, ` +
111
+ `${result.patterns.length} patterns, ${result.technologies.length} technologies.`);
112
+ // Store result for subsequent stages (in-memory + file for resume)
113
+ if (ctx.options) {
114
+ ctx.options._domainResult = result;
115
+ }
116
+ ctx.executor.writeFile(cacheFile, JSON.stringify(result, null, 2), {
117
+ description: 'cache domain generation result for resume support',
118
+ });
119
+ },
120
+ };
121
+ }
122
+ // ────────────────────────────────────────────────────────────────
123
+ // File-only stages
124
+ // ────────────────────────────────────────────────────────────────
125
+ /** Builds the stage that writes domain files to the local repository. */
126
+ function buildDomainFileWriteLocalStage() {
127
+ return {
128
+ id: 'domain-file-write-local',
129
+ title: 'Write domain files to local repo',
130
+ recovery: [
131
+ 'Verify write permissions for the target repo directory.',
132
+ 'Run collab init --repo <package> --resume to retry.',
133
+ ],
134
+ run: (ctx) => {
135
+ const repoPath = ctx.options?._repoPath;
136
+ const result = ctx.options?._domainResult;
137
+ if (!result) {
138
+ ctx.logger.info('No domain analysis result available; skipping file write.');
139
+ return;
140
+ }
141
+ const targetDir = node_path_1.default.join(repoPath, 'docs', 'architecture', 'repo', 'domains', result.domainSlug);
142
+ if (ctx.executor.dryRun) {
143
+ ctx.logger.info(`[dry-run] Would write domain files to ${targetDir}`);
144
+ return;
145
+ }
146
+ const count = (0, domain_gen_1.writeDomainFiles)(targetDir, result, ctx.executor);
147
+ ctx.logger.info(`Domain files written: ${count} file(s) to ${targetDir}`);
148
+ },
149
+ };
150
+ }
151
+ // ────────────────────────────────────────────────────────────────
152
+ // Indexed stages
153
+ // ────────────────────────────────────────────────────────────────
154
+ /** Builds the stage that syncs the business canon repository before writing. */
155
+ function buildDomainCanonSyncStage() {
156
+ return {
157
+ id: 'domain-canon-sync',
158
+ title: 'Sync business canon repository',
159
+ recovery: [
160
+ 'Ensure GitHub access is configured.',
161
+ 'Run collab init --repo <package> --resume to retry.',
162
+ ],
163
+ run: (ctx) => {
164
+ if (!(0, canon_resolver_1.isBusinessCanonConfigured)(ctx.config)) {
165
+ throw new errors_1.CliError('Business canon is not configured. Use --business-canon owner/repo to set it, or use --mode file-only.');
166
+ }
167
+ if (ctx.executor.dryRun) {
168
+ ctx.logger.info('[dry-run] Would sync business canon repository.');
169
+ return;
170
+ }
171
+ const auth = (0, github_auth_1.loadGitHubAuth)(ctx.config.collabDir);
172
+ const token = auth?.token;
173
+ const ok = (0, canon_resolver_1.syncBusinessCanon)(ctx.config, (msg) => ctx.logger.info(msg), token);
174
+ if (!ok) {
175
+ throw new errors_1.CliError('Failed to sync business canon repository.');
176
+ }
177
+ },
178
+ };
179
+ }
180
+ /** Builds the stage that writes domain files to the business canon repository. */
181
+ function buildDomainFileWriteCanonStage() {
182
+ return {
183
+ id: 'domain-file-write-canon',
184
+ title: 'Write domain files to business canon',
185
+ recovery: [
186
+ 'Verify write permissions for the business canon directory.',
187
+ 'Run collab init --repo <package> --resume to retry.',
188
+ ],
189
+ run: (ctx) => {
190
+ const result = ctx.options?._domainResult;
191
+ if (!result) {
192
+ ctx.logger.info('No domain analysis result available; skipping file write.');
193
+ return;
194
+ }
195
+ const canonDir = (0, canon_resolver_1.getBusinessCanonDir)(ctx.config);
196
+ const targetDir = node_path_1.default.join(canonDir, 'domains', result.domainSlug);
197
+ if (ctx.executor.dryRun) {
198
+ ctx.logger.info(`[dry-run] Would write domain files to ${targetDir}`);
199
+ return;
200
+ }
201
+ const count = (0, domain_gen_1.writeDomainFiles)(targetDir, result, ctx.executor);
202
+ ctx.logger.info(`Domain files written: ${count} file(s) to ${targetDir}`);
203
+ },
204
+ };
205
+ }
206
+ /** Builds the stage that appends domain vertices and edges to the graph seed. */
207
+ function buildDomainGraphUpdateStage() {
208
+ return {
209
+ id: 'domain-graph-update',
210
+ title: 'Update graph seed with domain vertices',
211
+ recovery: [
212
+ 'Verify write permissions for the business canon graph/seed directory.',
213
+ 'Run collab init --repo <package> --resume to retry.',
214
+ ],
215
+ run: (ctx) => {
216
+ const result = ctx.options?._domainResult;
217
+ if (!result) {
218
+ ctx.logger.info('No domain analysis result available; skipping graph update.');
219
+ return;
220
+ }
221
+ const canonDir = (0, canon_resolver_1.getBusinessCanonDir)(ctx.config);
222
+ const dataPath = node_path_1.default.join(canonDir, 'graph', 'seed', 'data.ngql');
223
+ if (ctx.executor.dryRun) {
224
+ ctx.logger.info(`[dry-run] Would append graph seed data to ${dataPath}`);
225
+ return;
226
+ }
227
+ const nextIds = (0, domain_gen_1.findNextIds)(dataPath);
228
+ const nGql = (0, domain_gen_1.generateDomainGraphSeed)(result, nextIds);
229
+ (0, domain_gen_1.appendGraphSeed)(dataPath, nGql, ctx.executor);
230
+ const techCount = result.technologies.length;
231
+ const patCount = result.patterns.length;
232
+ ctx.logger.info(`Graph seed updated: 1 domain + ${techCount} technology + ${patCount} pattern vertices, ` +
233
+ `${techCount + patCount} edges.`);
234
+ },
235
+ };
236
+ }
237
+ /** Builds the stage that commits and pushes domain changes to the business canon. */
238
+ function buildDomainCanonPushStage() {
239
+ return {
240
+ id: 'domain-canon-push',
241
+ title: 'Commit and push domain to business canon',
242
+ recovery: [
243
+ 'Ensure GitHub access is configured with push permissions.',
244
+ 'Run collab init --repo <package> --resume to retry.',
245
+ ],
246
+ run: (ctx) => {
247
+ const result = ctx.options?._domainResult;
248
+ const repoPath = ctx.options?._repoPath;
249
+ if (!result) {
250
+ ctx.logger.info('No domain analysis result available; skipping push.');
251
+ return;
252
+ }
253
+ const canonDir = (0, canon_resolver_1.getBusinessCanonDir)(ctx.config);
254
+ const repoName = repoPath ? node_path_1.default.basename(repoPath) : 'unknown';
255
+ if (ctx.executor.dryRun) {
256
+ ctx.logger.info(`[dry-run] Would commit and push domain "${result.domainName}" to business canon.`);
257
+ return;
258
+ }
259
+ // Stage files
260
+ ctx.executor.run('git', ['-C', canonDir, 'add', `domains/${result.domainSlug}/`]);
261
+ ctx.executor.run('git', ['-C', canonDir, 'add', 'graph/seed/data.ngql']);
262
+ // Commit
263
+ const commitMsg = `feat(domain): add ${result.domainName} from ${repoName}`;
264
+ ctx.executor.run('git', ['-C', canonDir, 'commit', '-m', commitMsg]);
265
+ // Push — use token auth via http.extraHeader to avoid leaking secrets in logs
266
+ const auth = (0, github_auth_1.loadGitHubAuth)(ctx.config.collabDir);
267
+ if (auth?.token) {
268
+ const canon = ctx.config.canons.business;
269
+ const remoteUrl = `https://github.com/${canon.repo}.git`;
270
+ const branch = canon.branch || 'main';
271
+ ctx.executor.run('git', [
272
+ '-C', canonDir,
273
+ '-c', `http.${remoteUrl}.extraHeader=Authorization: Bearer ${auth.token}`,
274
+ 'push', remoteUrl, branch,
275
+ ], { verboseOnly: true });
276
+ }
277
+ else {
278
+ ctx.executor.run('git', ['-C', canonDir, 'push']);
279
+ }
280
+ ctx.logger.info(`Domain "${result.domainName}" committed and pushed to business canon.`);
281
+ },
282
+ };
283
+ }
284
+ /** Builds the stage that ingests domain files into MCP via HTTP. */
285
+ function buildDomainIngestStage() {
286
+ return {
287
+ id: 'domain-ingest',
288
+ title: 'Ingest domain files into MCP',
289
+ recovery: [
290
+ 'Ensure MCP service is running and accessible.',
291
+ 'Run collab init --repo <package> --resume to retry ingestion.',
292
+ ],
293
+ run: async (ctx) => {
294
+ const result = ctx.options?._domainResult;
295
+ if (!result) {
296
+ ctx.logger.info('No domain analysis result available; skipping ingestion.');
297
+ return;
298
+ }
299
+ if (ctx.executor.dryRun) {
300
+ ctx.logger.info('[dry-run] Would ingest domain files into MCP via HTTP.');
301
+ return;
302
+ }
303
+ const canonDir = (0, canon_resolver_1.getBusinessCanonDir)(ctx.config);
304
+ const domainDir = node_path_1.default.join(canonDir, 'domains', result.domainSlug);
305
+ const baseUrl = (0, mcp_client_1.getMcpBaseUrl)(ctx.config);
306
+ const env = (0, service_health_1.loadRuntimeEnv)(ctx.config);
307
+ const apiKey = (0, mcp_client_1.resolveMcpApiKey)(env);
308
+ const timeoutMs = (0, mcp_client_1.resolveMcpHttpTimeoutMs)(env);
309
+ // Collect and ingest domain .md files
310
+ const mdFiles = collectMarkdownFiles(domainDir);
311
+ if (mdFiles.length > 0) {
312
+ const configuredRepo = ctx.config.canons.business.repo;
313
+ const repoSlug = configuredRepo.replace(/\.git$/, '').replace(/^https?:\/\/github\.com\//i, '');
314
+ const scope = repoSlug.split('/').pop() ?? repoSlug;
315
+ const org = repoSlug.includes('/') ? repoSlug.split('/')[0] : scope;
316
+ const documents = mdFiles.map((f) => ({
317
+ path: node_path_1.default.relative(canonDir, f),
318
+ content: node_fs_1.default.readFileSync(f, 'utf8'),
319
+ }));
320
+ ctx.logger.info(`Ingesting ${documents.length} domain file(s) via HTTP...`);
321
+ const ingestResult = await (0, mcp_client_1.ingestDocuments)(baseUrl, {
322
+ context: 'technical',
323
+ scope,
324
+ organization: org,
325
+ repo: repoSlug.includes('/') ? repoSlug : `${org}/${repoSlug}`,
326
+ documents,
327
+ }, apiKey, timeoutMs);
328
+ ctx.logger.info(`Domain ingested: ${ingestResult.vector.ingested_files} files, ` +
329
+ `${ingestResult.vector.total_points} vectors, ` +
330
+ `${ingestResult.graph.nodes_created} graph nodes.`);
331
+ }
332
+ // Trigger graph re-seed
333
+ ctx.logger.info('Triggering graph re-seed...');
334
+ const seedResult = await (0, mcp_client_1.triggerGraphSeed)(baseUrl, apiKey, timeoutMs);
335
+ ctx.logger.info(`Graph re-seed complete: ${seedResult.nodes_created} nodes, ${seedResult.edges_created} edges.`);
336
+ },
337
+ };
338
+ }
339
+ // ────────────────────────────────────────────────────────────────
340
+ // Pipeline builders
341
+ // ────────────────────────────────────────────────────────────────
342
+ /**
343
+ * Builds the file-only domain generation pipeline (2 stages).
344
+ */
345
+ function buildFileOnlyDomainPipeline() {
346
+ return [
347
+ buildDomainAnalysisStage(),
348
+ buildDomainFileWriteLocalStage(),
349
+ ];
350
+ }
351
+ /**
352
+ * Builds the indexed domain generation pipeline (6 stages).
353
+ */
354
+ function buildIndexedDomainPipeline() {
355
+ return [
356
+ buildDomainCanonSyncStage(),
357
+ buildDomainAnalysisStage(),
358
+ buildDomainFileWriteCanonStage(),
359
+ buildDomainGraphUpdateStage(),
360
+ buildDomainCanonPushStage(),
361
+ buildDomainIngestStage(),
362
+ ];
363
+ }
@@ -0,0 +1,26 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.graphSeedStage = void 0;
4
+ const mcp_client_1 = require("../lib/mcp-client");
5
+ const service_health_1 = require("../lib/service-health");
6
+ exports.graphSeedStage = {
7
+ id: 'graph-seed',
8
+ title: 'Seed NebulaGraph knowledge graph',
9
+ recovery: [
10
+ 'Ensure MCP service is running and accessible.',
11
+ 'Run collab init --resume to retry graph seeding.',
12
+ ],
13
+ run: async (ctx) => {
14
+ if (ctx.executor.dryRun) {
15
+ ctx.logger.info('[dry-run] Would seed NebulaGraph with canonical knowledge graph via HTTP.');
16
+ return;
17
+ }
18
+ ctx.logger.info('Seeding NebulaGraph with canonical architecture graph via HTTP...');
19
+ const baseUrl = (0, mcp_client_1.getMcpBaseUrl)(ctx.config);
20
+ const env = (0, service_health_1.loadRuntimeEnv)(ctx.config);
21
+ const apiKey = (0, mcp_client_1.resolveMcpApiKey)(env);
22
+ const timeoutMs = (0, mcp_client_1.resolveMcpHttpTimeoutMs)(env);
23
+ const result = await (0, mcp_client_1.triggerGraphSeed)(baseUrl, apiKey, timeoutMs);
24
+ ctx.logger.info(`NebulaGraph seeding complete: ${result.nodes_created} nodes, ${result.edges_created} edges.`);
25
+ },
26
+ };
@@ -0,0 +1,111 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.repoAnalysisFileOnlyStage = void 0;
7
+ const node_path_1 = __importDefault(require("node:path"));
8
+ const ai_client_1 = require("../lib/ai-client");
9
+ const canon_resolver_1 = require("../lib/canon-resolver");
10
+ const orchestrator_1 = require("../lib/orchestrator");
11
+ const providers_1 = require("../lib/providers");
12
+ const repo_analysis_helpers_1 = require("../lib/repo-analysis-helpers");
13
+ const repo_scanner_1 = require("../lib/repo-scanner");
14
+ /**
15
+ * Checks if copilot is the only enabled provider.
16
+ * Copilot doesn't support AI completion — skip analysis in that case.
17
+ */
18
+ function onlyCopilotEnabled(providers) {
19
+ return providers.length === 1 && providers[0] === 'copilot';
20
+ }
21
+ exports.repoAnalysisFileOnlyStage = {
22
+ id: 'repo-analysis-fileonly',
23
+ title: 'AI-powered repository analysis',
24
+ recovery: [
25
+ 'Ensure AI provider API keys are set, or configure a CLI provider (codex, claude, gemini).',
26
+ 'Run collab init --resume to retry repository analysis.',
27
+ ],
28
+ run: async (ctx) => {
29
+ if (ctx.options?.skipAnalysis) {
30
+ ctx.logger.info('Skipping repository analysis by user choice.');
31
+ return;
32
+ }
33
+ const enabledProviders = (0, providers_1.getEnabledProviders)(ctx.config);
34
+ if (enabledProviders.length === 0 || onlyCopilotEnabled(enabledProviders)) {
35
+ ctx.logger.info('No AI-capable providers enabled; skipping repository analysis.');
36
+ // Still scan repo for basic AI helper files
37
+ const repoCtx = (0, repo_scanner_1.scanRepository)((0, orchestrator_1.getRepoBaseDir)(ctx));
38
+ (0, repo_analysis_helpers_1.generateAiHelpers)(ctx, repoCtx, {});
39
+ return;
40
+ }
41
+ if (ctx.executor.dryRun) {
42
+ ctx.logger.info('[dry-run] Would analyze repository and generate architecture files.');
43
+ return;
44
+ }
45
+ const client = (0, ai_client_1.createFirstAvailableClient)(enabledProviders, ctx.config, ctx.logger);
46
+ if (!client) {
47
+ ctx.logger.warn('No AI provider credentials or CLI available; generating basic AI helpers only.');
48
+ const repoCtx = (0, repo_scanner_1.scanRepository)((0, orchestrator_1.getRepoBaseDir)(ctx));
49
+ (0, repo_analysis_helpers_1.generateAiHelpers)(ctx, repoCtx, {});
50
+ return;
51
+ }
52
+ // Load system prompt from canons (should already exist from canon-sync stage)
53
+ if (!(0, canon_resolver_1.isCanonsAvailable)()) {
54
+ ctx.logger.info('Canons not installed. Downloading collab-architecture...');
55
+ const ok = (0, canon_resolver_1.syncCanons)((msg) => ctx.logger.info(msg));
56
+ if (!ok || !(0, canon_resolver_1.isCanonsAvailable)()) {
57
+ ctx.logger.warn('Failed to download canons. Skipping repository analysis.');
58
+ return;
59
+ }
60
+ }
61
+ const systemPrompt = (0, canon_resolver_1.resolveCanonFile)('prompts/init/system-prompt.md');
62
+ if (!systemPrompt) {
63
+ ctx.logger.warn('System prompt not found in canons. Run "collab update-canons" to refresh. Skipping analysis.');
64
+ return;
65
+ }
66
+ ctx.logger.info('Scanning repository structure...');
67
+ const repoCtx = (0, repo_scanner_1.scanRepository)((0, orchestrator_1.getRepoBaseDir)(ctx));
68
+ ctx.logger.info(`Repository: ${repoCtx.name} (${repoCtx.language}${repoCtx.framework ? ` / ${repoCtx.framework}` : ''}, ${repoCtx.totalSourceFiles} source files)`);
69
+ const messages = [
70
+ { role: 'system', content: systemPrompt },
71
+ { role: 'user', content: (0, repo_analysis_helpers_1.buildUserMessage)(repoCtx) },
72
+ ];
73
+ // Write debug prompt
74
+ const promptDebugPath = node_path_1.default.join(ctx.config.collabDir, `init-prompt-${client.provider}-cli.md`);
75
+ const promptContent = [
76
+ `# Init Prompt — ${client.provider} CLI`,
77
+ `<!-- Generated: ${new Date().toISOString()} -->`,
78
+ '',
79
+ '## System Prompt',
80
+ '',
81
+ messages[0].content,
82
+ '',
83
+ '## User Prompt',
84
+ '',
85
+ messages[1].content,
86
+ '',
87
+ ].join('\n');
88
+ ctx.executor.ensureDirectory(node_path_1.default.dirname(promptDebugPath));
89
+ ctx.executor.writeFile(promptDebugPath, promptContent, {
90
+ description: `write init prompt debug file for ${client.provider}`,
91
+ });
92
+ ctx.logger.info('Analyzing repository with AI provider...');
93
+ const response = await client.complete(messages, { maxTokens: 8192 });
94
+ let analysis;
95
+ try {
96
+ const jsonStr = (0, repo_analysis_helpers_1.extractJson)(response);
97
+ analysis = JSON.parse(jsonStr);
98
+ }
99
+ catch (err) {
100
+ ctx.logger.warn(`Failed to parse AI analysis response: ${String(err)}`);
101
+ ctx.logger.debug(`Raw response (first 500 chars): ${response.slice(0, 500)}`);
102
+ // Still generate AI helpers with empty analysis
103
+ (0, repo_analysis_helpers_1.generateAiHelpers)(ctx, repoCtx, {});
104
+ return;
105
+ }
106
+ // File-only mode writes to repoDir (docs/architecture/repo/)
107
+ (0, repo_analysis_helpers_1.writeAnalysisResults)(ctx, ctx.config.repoDir, analysis);
108
+ // Generate docs/ai/ helper files
109
+ (0, repo_analysis_helpers_1.generateAiHelpers)(ctx, repoCtx, analysis);
110
+ },
111
+ };
@@ -0,0 +1,112 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.repoAnalysisStage = void 0;
7
+ const node_path_1 = __importDefault(require("node:path"));
8
+ const ai_client_1 = require("../lib/ai-client");
9
+ const canon_resolver_1 = require("../lib/canon-resolver");
10
+ const orchestrator_1 = require("../lib/orchestrator");
11
+ const providers_1 = require("../lib/providers");
12
+ const repo_analysis_helpers_1 = require("../lib/repo-analysis-helpers");
13
+ const repo_scanner_1 = require("../lib/repo-scanner");
14
+ /**
15
+ * Checks if copilot is the only enabled provider.
16
+ */
17
+ function onlyCopilotEnabled(providers) {
18
+ return providers.length === 1 && providers[0] === 'copilot';
19
+ }
20
+ exports.repoAnalysisStage = {
21
+ id: 'repo-analysis',
22
+ title: 'AI-powered repository analysis',
23
+ recovery: [
24
+ 'Ensure AI provider API keys are set in environment, or configure a CLI provider (codex, claude, gemini).',
25
+ 'Run collab init --resume to retry repository analysis.',
26
+ ],
27
+ run: async (ctx) => {
28
+ if (ctx.options?.skipAnalysis) {
29
+ ctx.logger.info('Skipping repository analysis by user choice.');
30
+ return;
31
+ }
32
+ const enabledProviders = (0, providers_1.getEnabledProviders)(ctx.config);
33
+ if (enabledProviders.length === 0 || onlyCopilotEnabled(enabledProviders)) {
34
+ ctx.logger.info('No AI-capable providers enabled; skipping repository analysis.');
35
+ // Still generate basic AI helper files
36
+ const repoCtx = (0, repo_scanner_1.scanRepository)((0, orchestrator_1.getRepoBaseDir)(ctx));
37
+ (0, repo_analysis_helpers_1.generateAiHelpers)(ctx, repoCtx, {});
38
+ return;
39
+ }
40
+ // In dry-run mode, just show what would happen
41
+ if (ctx.executor.dryRun) {
42
+ ctx.logger.info('[dry-run] Would analyze repository and generate architecture files.');
43
+ return;
44
+ }
45
+ const client = (0, ai_client_1.createFirstAvailableClient)(enabledProviders, ctx.config, ctx.logger);
46
+ if (!client) {
47
+ ctx.logger.warn('No AI provider credentials or CLI available; generating basic AI helpers only.');
48
+ const repoCtx = (0, repo_scanner_1.scanRepository)((0, orchestrator_1.getRepoBaseDir)(ctx));
49
+ (0, repo_analysis_helpers_1.generateAiHelpers)(ctx, repoCtx, {});
50
+ return;
51
+ }
52
+ // Load system prompt from canons — auto-clone if missing
53
+ if (!(0, canon_resolver_1.isCanonsAvailable)()) {
54
+ ctx.logger.info('Canons not installed. Downloading collab-architecture...');
55
+ const ok = (0, canon_resolver_1.syncCanons)((msg) => ctx.logger.info(msg));
56
+ if (!ok || !(0, canon_resolver_1.isCanonsAvailable)()) {
57
+ ctx.logger.warn('Failed to download canons. Skipping repository analysis.');
58
+ return;
59
+ }
60
+ }
61
+ const systemPrompt = (0, canon_resolver_1.resolveCanonFile)('prompts/init/system-prompt.md');
62
+ if (!systemPrompt) {
63
+ ctx.logger.warn('System prompt not found in canons (prompts/init/system-prompt.md). Run "collab update-canons" to refresh. Skipping repository analysis.');
64
+ return;
65
+ }
66
+ ctx.logger.info('Scanning repository structure...');
67
+ const repoCtx = (0, repo_scanner_1.scanRepository)((0, orchestrator_1.getRepoBaseDir)(ctx));
68
+ ctx.logger.info(`Repository: ${repoCtx.name} (${repoCtx.language}${repoCtx.framework ? ` / ${repoCtx.framework}` : ''}, ${repoCtx.totalSourceFiles} source files)`);
69
+ const messages = [
70
+ { role: 'system', content: systemPrompt },
71
+ { role: 'user', content: (0, repo_analysis_helpers_1.buildUserMessage)(repoCtx) },
72
+ ];
73
+ // Write the init prompt to .collab/ for debugging and tracking
74
+ const promptDebugPath = node_path_1.default.join(ctx.config.workspaceDir, '.collab', `init-prompt-${client.provider}-cli.md`);
75
+ const promptContent = [
76
+ `# Init Prompt — ${client.provider} CLI`,
77
+ `<!-- Generated: ${new Date().toISOString()} -->`,
78
+ '',
79
+ '## System Prompt',
80
+ '',
81
+ messages[0].content,
82
+ '',
83
+ '## User Prompt',
84
+ '',
85
+ messages[1].content,
86
+ '',
87
+ ].join('\n');
88
+ ctx.executor.ensureDirectory(node_path_1.default.dirname(promptDebugPath));
89
+ ctx.executor.writeFile(promptDebugPath, promptContent, {
90
+ description: `write init prompt debug file for ${client.provider}`,
91
+ });
92
+ ctx.logger.info('Analyzing repository with AI provider...');
93
+ const response = await client.complete(messages, { maxTokens: 8192 });
94
+ // Parse the response
95
+ let analysis;
96
+ try {
97
+ const jsonStr = (0, repo_analysis_helpers_1.extractJson)(response);
98
+ analysis = JSON.parse(jsonStr);
99
+ }
100
+ catch (err) {
101
+ ctx.logger.warn(`Failed to parse AI analysis response: ${String(err)}`);
102
+ ctx.logger.debug(`Raw response (first 500 chars): ${response.slice(0, 500)}`);
103
+ // Still generate AI helpers with empty analysis
104
+ (0, repo_analysis_helpers_1.generateAiHelpers)(ctx, repoCtx, {});
105
+ return;
106
+ }
107
+ // Indexed mode writes to repoDir (docs/architecture/repo/)
108
+ (0, repo_analysis_helpers_1.writeAnalysisResults)(ctx, ctx.config.repoDir, analysis);
109
+ // Generate docs/ai/ helper files
110
+ (0, repo_analysis_helpers_1.generateAiHelpers)(ctx, repoCtx, analysis);
111
+ },
112
+ };