@uxmaltech/collab-cli 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. package/README.md +227 -0
  2. package/bin/collab +10 -0
  3. package/dist/cli.js +34 -0
  4. package/dist/commands/canon/index.js +16 -0
  5. package/dist/commands/canon/rebuild.js +95 -0
  6. package/dist/commands/compose/generate.js +63 -0
  7. package/dist/commands/compose/index.js +18 -0
  8. package/dist/commands/compose/validate.js +53 -0
  9. package/dist/commands/doctor.js +153 -0
  10. package/dist/commands/index.js +27 -0
  11. package/dist/commands/infra/down.js +23 -0
  12. package/dist/commands/infra/index.js +20 -0
  13. package/dist/commands/infra/shared.js +59 -0
  14. package/dist/commands/infra/status.js +64 -0
  15. package/dist/commands/infra/up.js +29 -0
  16. package/dist/commands/init.js +830 -0
  17. package/dist/commands/mcp/index.js +20 -0
  18. package/dist/commands/mcp/shared.js +57 -0
  19. package/dist/commands/mcp/start.js +45 -0
  20. package/dist/commands/mcp/status.js +62 -0
  21. package/dist/commands/mcp/stop.js +23 -0
  22. package/dist/commands/seed.js +55 -0
  23. package/dist/commands/uninstall.js +36 -0
  24. package/dist/commands/up.js +78 -0
  25. package/dist/commands/update-canons.js +48 -0
  26. package/dist/commands/upgrade.js +54 -0
  27. package/dist/index.js +14 -0
  28. package/dist/lib/ai-client.js +317 -0
  29. package/dist/lib/ansi.js +58 -0
  30. package/dist/lib/canon-index-generator.js +64 -0
  31. package/dist/lib/canon-index-targets.js +68 -0
  32. package/dist/lib/canon-resolver.js +262 -0
  33. package/dist/lib/canon-scaffold.js +57 -0
  34. package/dist/lib/cli-detection.js +149 -0
  35. package/dist/lib/command-context.js +23 -0
  36. package/dist/lib/compose-defaults.js +47 -0
  37. package/dist/lib/compose-env.js +24 -0
  38. package/dist/lib/compose-paths.js +36 -0
  39. package/dist/lib/compose-renderer.js +134 -0
  40. package/dist/lib/compose-validator.js +56 -0
  41. package/dist/lib/config.js +195 -0
  42. package/dist/lib/credentials.js +63 -0
  43. package/dist/lib/docker-checks.js +73 -0
  44. package/dist/lib/docker-compose.js +15 -0
  45. package/dist/lib/docker-status.js +151 -0
  46. package/dist/lib/domain-gen.js +376 -0
  47. package/dist/lib/ecosystem.js +150 -0
  48. package/dist/lib/env-file.js +77 -0
  49. package/dist/lib/errors.js +30 -0
  50. package/dist/lib/executor.js +85 -0
  51. package/dist/lib/github-auth.js +204 -0
  52. package/dist/lib/hash.js +7 -0
  53. package/dist/lib/health-checker.js +140 -0
  54. package/dist/lib/logger.js +87 -0
  55. package/dist/lib/mcp-client.js +88 -0
  56. package/dist/lib/mode.js +36 -0
  57. package/dist/lib/model-listing.js +102 -0
  58. package/dist/lib/model-registry.js +55 -0
  59. package/dist/lib/npm-operations.js +69 -0
  60. package/dist/lib/orchestrator.js +170 -0
  61. package/dist/lib/parsers.js +42 -0
  62. package/dist/lib/port-resolver.js +57 -0
  63. package/dist/lib/preconditions.js +35 -0
  64. package/dist/lib/preflight.js +88 -0
  65. package/dist/lib/process.js +6 -0
  66. package/dist/lib/prompt.js +125 -0
  67. package/dist/lib/providers.js +117 -0
  68. package/dist/lib/repo-analysis-helpers.js +379 -0
  69. package/dist/lib/repo-scanner.js +195 -0
  70. package/dist/lib/service-health.js +79 -0
  71. package/dist/lib/shell.js +49 -0
  72. package/dist/lib/state.js +38 -0
  73. package/dist/lib/update-checker.js +130 -0
  74. package/dist/lib/version.js +27 -0
  75. package/dist/stages/agent-skills-setup.js +301 -0
  76. package/dist/stages/assistant-setup.js +325 -0
  77. package/dist/stages/canon-ingest.js +249 -0
  78. package/dist/stages/canon-rebuild-graph.js +33 -0
  79. package/dist/stages/canon-rebuild-indexes.js +40 -0
  80. package/dist/stages/canon-rebuild-snapshot.js +75 -0
  81. package/dist/stages/canon-rebuild-validate.js +57 -0
  82. package/dist/stages/canon-rebuild-vectors.js +30 -0
  83. package/dist/stages/canon-scaffold.js +15 -0
  84. package/dist/stages/canon-sync.js +49 -0
  85. package/dist/stages/ci-setup.js +56 -0
  86. package/dist/stages/domain-gen.js +363 -0
  87. package/dist/stages/graph-seed.js +26 -0
  88. package/dist/stages/repo-analysis-fileonly.js +111 -0
  89. package/dist/stages/repo-analysis.js +112 -0
  90. package/dist/stages/repo-scaffold.js +110 -0
  91. package/dist/templates/canon/contracts-readme.js +39 -0
  92. package/dist/templates/canon/domain-readme.js +40 -0
  93. package/dist/templates/canon/evolution/changelog.js +53 -0
  94. package/dist/templates/canon/governance/confidence-levels.js +38 -0
  95. package/dist/templates/canon/governance/implementation-process.js +34 -0
  96. package/dist/templates/canon/governance/review-process.js +29 -0
  97. package/dist/templates/canon/governance/schema-versioning.js +25 -0
  98. package/dist/templates/canon/governance/what-enters-the-canon.js +44 -0
  99. package/dist/templates/canon/index.js +28 -0
  100. package/dist/templates/canon/knowledge-readme.js +129 -0
  101. package/dist/templates/canon/system-prompt.js +101 -0
  102. package/dist/templates/ci/architecture-merge.js +29 -0
  103. package/dist/templates/ci/architecture-pr.js +26 -0
  104. package/dist/templates/ci/index.js +7 -0
  105. package/dist/templates/consolidated.js +114 -0
  106. package/dist/templates/infra.js +90 -0
  107. package/dist/templates/mcp.js +32 -0
  108. package/install.sh +455 -0
  109. package/package.json +48 -0
@@ -0,0 +1,102 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.listModels = listModels;
7
+ const node_https_1 = __importDefault(require("node:https"));
8
+ const node_url_1 = require("node:url");
9
+ function httpsGet(url, headers = {}, timeoutMs = 15_000) {
10
+ return new Promise((resolve, reject) => {
11
+ const parsed = new node_url_1.URL(url);
12
+ const req = node_https_1.default.request({
13
+ hostname: parsed.hostname,
14
+ port: parsed.port || 443,
15
+ path: parsed.pathname + parsed.search,
16
+ method: 'GET',
17
+ headers: { Accept: 'application/json', ...headers },
18
+ }, (res) => {
19
+ let data = '';
20
+ res.on('data', (chunk) => {
21
+ data += chunk.toString();
22
+ });
23
+ res.on('end', () => {
24
+ resolve({ statusCode: res.statusCode ?? 0, body: data });
25
+ });
26
+ });
27
+ req.setTimeout(timeoutMs, () => {
28
+ req.destroy(new Error('Model listing request timed out'));
29
+ });
30
+ req.on('error', reject);
31
+ req.end();
32
+ });
33
+ }
34
+ // ---------- Gemini ----------
35
+ async function listGeminiModels(apiKey) {
36
+ const url = `https://generativelanguage.googleapis.com/v1beta/models?key=${apiKey}&pageSize=100`;
37
+ const res = await httpsGet(url);
38
+ if (res.statusCode < 200 || res.statusCode >= 300) {
39
+ throw new Error(`Gemini API error (${res.statusCode})`);
40
+ }
41
+ const parsed = JSON.parse(res.body);
42
+ const models = (parsed.models ?? []);
43
+ return models
44
+ .filter((m) => m.supportedGenerationMethods?.includes('generateContent'))
45
+ .filter((m) => m.name.includes('gemini'))
46
+ .map((m) => ({
47
+ id: m.name.replace('models/', ''),
48
+ name: m.displayName,
49
+ }));
50
+ }
51
+ // ---------- OpenAI ----------
52
+ async function listOpenAiModels(apiKey) {
53
+ const url = 'https://api.openai.com/v1/models';
54
+ const res = await httpsGet(url, { Authorization: `Bearer ${apiKey}` });
55
+ if (res.statusCode < 200 || res.statusCode >= 300) {
56
+ throw new Error(`OpenAI API error (${res.statusCode})`);
57
+ }
58
+ const parsed = JSON.parse(res.body);
59
+ const models = (parsed.data ?? []);
60
+ // Filter to chat/completion models, exclude fine-tuned, audio, realtime, embedding
61
+ const chatPrefixes = ['gpt-4', 'gpt-3.5', 'o3', 'o4', 'o1'];
62
+ const excludePatterns = ['realtime', 'audio', 'embed', 'tts', 'whisper', 'dall-e', 'davinci', 'babbage'];
63
+ return models
64
+ .filter((m) => chatPrefixes.some((p) => m.id.startsWith(p)))
65
+ .filter((m) => !excludePatterns.some((e) => m.id.includes(e)))
66
+ .map((m) => ({ id: m.id }))
67
+ .sort((a, b) => a.id.localeCompare(b.id));
68
+ }
69
+ // ---------- Anthropic ----------
70
+ async function listAnthropicModels(apiKey) {
71
+ const url = 'https://api.anthropic.com/v1/models?limit=100';
72
+ const res = await httpsGet(url, {
73
+ 'x-api-key': apiKey,
74
+ 'anthropic-version': '2023-06-01',
75
+ });
76
+ if (res.statusCode < 200 || res.statusCode >= 300) {
77
+ throw new Error(`Anthropic API error (${res.statusCode})`);
78
+ }
79
+ const parsed = JSON.parse(res.body);
80
+ const models = (parsed.data ?? []);
81
+ return models.map((m) => ({
82
+ id: m.id,
83
+ name: m.display_name,
84
+ }));
85
+ }
86
+ // ---------- Public API ----------
87
+ /**
88
+ * Lists available models from a provider's API.
89
+ * Validates the API key in the process — throws on invalid key.
90
+ */
91
+ async function listModels(provider, apiKey) {
92
+ switch (provider) {
93
+ case 'gemini':
94
+ return listGeminiModels(apiKey);
95
+ case 'codex':
96
+ return listOpenAiModels(apiKey);
97
+ case 'claude':
98
+ return listAnthropicModels(apiKey);
99
+ case 'copilot':
100
+ return [];
101
+ }
102
+ }
@@ -0,0 +1,55 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.getRegistryPath = getRegistryPath;
7
+ exports.loadRegistry = loadRegistry;
8
+ exports.saveProviderModels = saveProviderModels;
9
+ exports.loadProviderModels = loadProviderModels;
10
+ const node_fs_1 = __importDefault(require("node:fs"));
11
+ const node_path_1 = __importDefault(require("node:path"));
12
+ function getRegistryPath(config) {
13
+ return node_path_1.default.join(config.collabDir, 'models.json');
14
+ }
15
+ /**
16
+ * Loads the full model registry. Returns an empty registry if the file
17
+ * does not exist or cannot be parsed.
18
+ */
19
+ function loadRegistry(config) {
20
+ const registryPath = getRegistryPath(config);
21
+ if (!node_fs_1.default.existsSync(registryPath)) {
22
+ return { updatedAt: new Date().toISOString(), providers: {} };
23
+ }
24
+ try {
25
+ const raw = node_fs_1.default.readFileSync(registryPath, 'utf8');
26
+ return JSON.parse(raw);
27
+ }
28
+ catch {
29
+ return { updatedAt: new Date().toISOString(), providers: {} };
30
+ }
31
+ }
32
+ /**
33
+ * Saves model listing results for a provider into the registry.
34
+ * Merges with existing entries for other providers.
35
+ */
36
+ function saveProviderModels(config, provider, models) {
37
+ const registry = loadRegistry(config);
38
+ const now = new Date().toISOString();
39
+ registry.updatedAt = now;
40
+ registry.providers[provider] = {
41
+ queriedAt: now,
42
+ models,
43
+ };
44
+ const registryPath = getRegistryPath(config);
45
+ node_fs_1.default.mkdirSync(node_path_1.default.dirname(registryPath), { recursive: true });
46
+ node_fs_1.default.writeFileSync(registryPath, JSON.stringify(registry, null, 2), 'utf8');
47
+ }
48
+ /**
49
+ * Loads stored models for a single provider.
50
+ * Returns null if no entry exists.
51
+ */
52
+ function loadProviderModels(config, provider) {
53
+ const registry = loadRegistry(config);
54
+ return registry.providers[provider] ?? null;
55
+ }
@@ -0,0 +1,69 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.requireNpm = requireNpm;
4
+ exports.npmGlobalInstall = npmGlobalInstall;
5
+ exports.npmGlobalUninstall = npmGlobalUninstall;
6
+ const node_child_process_1 = require("node:child_process");
7
+ const ansi_1 = require("./ansi");
8
+ const shell_1 = require("./shell");
9
+ const NPM_PACKAGE = '@uxmaltech/collab-cli';
10
+ const PERMISSION_ERROR = /EACCES|permission denied/i;
11
+ /**
12
+ * Resolves the npm binary path or writes an error and returns null.
13
+ */
14
+ function requireNpm() {
15
+ const npmPath = (0, shell_1.resolveCommandPath)('npm');
16
+ if (!npmPath) {
17
+ process.stderr.write((0, ansi_1.red)(`${ansi_1.CROSS} npm not found in PATH. Install Node.js/npm first.\n`));
18
+ }
19
+ return npmPath;
20
+ }
21
+ /**
22
+ * Runs `npm install -g @uxmaltech/collab-cli@<version>`.
23
+ * Returns true on success, false on failure (with error already printed).
24
+ */
25
+ function npmGlobalInstall(npmPath, version) {
26
+ const spec = `${NPM_PACKAGE}@${version}`;
27
+ try {
28
+ (0, node_child_process_1.execFileSync)(npmPath, ['install', '-g', spec], {
29
+ stdio: 'inherit',
30
+ timeout: 60_000,
31
+ });
32
+ return true;
33
+ }
34
+ catch (error) {
35
+ const message = error instanceof Error ? error.message : String(error);
36
+ if (PERMISSION_ERROR.test(message)) {
37
+ process.stderr.write((0, ansi_1.red)(`${ansi_1.CROSS} Permission denied. Try:\n`) +
38
+ ` sudo npm install -g ${spec}\n`);
39
+ }
40
+ else {
41
+ process.stderr.write((0, ansi_1.red)(`${ansi_1.CROSS} Install failed: ${message}\n`));
42
+ }
43
+ return false;
44
+ }
45
+ }
46
+ /**
47
+ * Runs `npm uninstall -g @uxmaltech/collab-cli`.
48
+ * Returns true on success, false on failure (with error already printed).
49
+ */
50
+ function npmGlobalUninstall(npmPath) {
51
+ try {
52
+ (0, node_child_process_1.execFileSync)(npmPath, ['uninstall', '-g', NPM_PACKAGE], {
53
+ stdio: 'inherit',
54
+ timeout: 60_000,
55
+ });
56
+ return true;
57
+ }
58
+ catch (error) {
59
+ const message = error instanceof Error ? error.message : String(error);
60
+ if (PERMISSION_ERROR.test(message)) {
61
+ process.stderr.write((0, ansi_1.red)(`${ansi_1.CROSS} Permission denied. Try:\n`) +
62
+ ` sudo npm uninstall -g ${NPM_PACKAGE}\n`);
63
+ }
64
+ else {
65
+ process.stderr.write((0, ansi_1.red)(`${ansi_1.CROSS} Uninstall failed: ${message}\n`));
66
+ }
67
+ return false;
68
+ }
69
+ }
@@ -0,0 +1,170 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.getRepoBaseDir = getRepoBaseDir;
4
+ exports.runOrchestration = runOrchestration;
5
+ exports.runPerRepoOrchestration = runPerRepoOrchestration;
6
+ const errors_1 = require("./errors");
7
+ const state_1 = require("./state");
8
+ /**
9
+ * Returns the effective base directory for stages that write per-repo files.
10
+ * In workspace mode (repoConfig present) this is the individual repo dir;
11
+ * in single-repo mode it falls back to workspaceDir.
12
+ */
13
+ function getRepoBaseDir(ctx) {
14
+ const rc = ctx.options?._repoConfig;
15
+ return rc ? rc.repoDir : ctx.config.workspaceDir;
16
+ }
17
+ /**
18
+ * Inspects stderr for well-known platform-specific errors and returns
19
+ * additional recovery hints that are appended to the stage's recovery list.
20
+ */
21
+ function detectPlatformHints(stderr) {
22
+ if (!stderr)
23
+ return [];
24
+ const hints = [];
25
+ // macOS: Docker credential helper cannot access the keychain
26
+ if (process.platform === 'darwin' &&
27
+ /keychain.*cannot be accessed|error getting credentials/i.test(stderr)) {
28
+ hints.push('macOS keychain is locked. Run: security unlock-keychain ~/Library/Keychains/login.keychain-db');
29
+ }
30
+ // Docker daemon not running
31
+ if (/cannot connect to.*docker daemon|is the docker daemon running/i.test(stderr)) {
32
+ hints.push('Docker daemon is not running. Start Docker Desktop or run: sudo systemctl start docker');
33
+ }
34
+ // Docker registry authentication failure (only in Docker/image contexts)
35
+ if (/(denied: requested access to the resource|unauthorized: authentication required|pull access denied)/i.test(stderr) &&
36
+ /(docker|ghcr\.io|image|manifest|pull)/i.test(stderr)) {
37
+ hints.push('Docker registry auth failed. Run: docker login ghcr.io');
38
+ }
39
+ // ARM64 / multi-platform manifest mismatch
40
+ if (/no matching manifest.*arm64|no matching manifest for.*platform/i.test(stderr)) {
41
+ hints.push('Image not available for this platform. Verify the image supports linux/arm64 or rebuild with --platform.');
42
+ }
43
+ // Network errors during image pull (only in Docker/image contexts)
44
+ if (/(network.*unreachable|timeout.*pull|dial tcp.*connection refused)/i.test(stderr) &&
45
+ /(docker|image|pull|manifest|registry)/i.test(stderr)) {
46
+ hints.push('Network error pulling images. Check internet connection and DNS settings.');
47
+ }
48
+ return hints;
49
+ }
50
+ function formatFailure(failure) {
51
+ const lines = [];
52
+ lines.push(`Stage '${failure.stage}' failed.`);
53
+ lines.push(failure.message);
54
+ if (failure.command) {
55
+ lines.push('');
56
+ lines.push(`Command: ${failure.command}`);
57
+ }
58
+ if (failure.stderr && failure.stderr.trim().length > 0) {
59
+ lines.push('');
60
+ lines.push('stderr:');
61
+ lines.push(failure.stderr.trim());
62
+ }
63
+ const platformHints = detectPlatformHints(failure.stderr);
64
+ const allRecovery = [...failure.recovery, ...platformHints];
65
+ if (allRecovery.length > 0) {
66
+ lines.push('');
67
+ lines.push('Recovery actions:');
68
+ for (const step of allRecovery) {
69
+ lines.push(`- ${step}`);
70
+ }
71
+ }
72
+ return lines.join('\n');
73
+ }
74
+ function currentWorkflowState(workflow) {
75
+ return {
76
+ completedStages: workflow?.completedStages ?? [],
77
+ updatedAt: workflow?.updatedAt ?? new Date().toISOString(),
78
+ failure: workflow?.failure,
79
+ };
80
+ }
81
+ async function runOrchestration(options, stages) {
82
+ const state = (0, state_1.loadState)(options.config);
83
+ const previous = currentWorkflowState(state.workflows[options.workflowId]);
84
+ const completed = new Set(options.resume ? previous.completedStages : []);
85
+ if (options.mode) {
86
+ options.logger.workflowHeader(options.workflowId, options.mode);
87
+ }
88
+ if (options.resume && completed.size > 0) {
89
+ options.logger.info(`Resuming '${options.workflowId}': ${completed.size}/${stages.length} stages complete.`);
90
+ for (const stage of stages) {
91
+ const status = completed.has(stage.id) ? '[done]' : '[pending]';
92
+ options.logger.info(` ${status} ${stage.title}`);
93
+ }
94
+ }
95
+ const total = stages.length;
96
+ let stageIndex = 0;
97
+ for (const stage of stages) {
98
+ stageIndex++;
99
+ if (options.resume && completed.has(stage.id)) {
100
+ options.logger.info(`Skipping completed stage '${stage.title}'`);
101
+ continue;
102
+ }
103
+ options.logger.stageHeader(stageIndex, total, stage.title);
104
+ try {
105
+ await stage.run({
106
+ config: options.config,
107
+ executor: options.executor,
108
+ logger: options.logger,
109
+ options: options.stageOptions,
110
+ });
111
+ completed.add(stage.id);
112
+ state.workflows[options.workflowId] = {
113
+ completedStages: [...completed],
114
+ updatedAt: new Date().toISOString(),
115
+ };
116
+ (0, state_1.saveState)(options.config, state, options.executor);
117
+ options.logger.step(true, stage.title);
118
+ }
119
+ catch (error) {
120
+ const failure = {
121
+ stage: stage.id,
122
+ message: error instanceof Error ? error.message : String(error),
123
+ recovery: stage.recovery.length > 0 ? stage.recovery : ['Run the workflow again with --resume.'],
124
+ failedAt: new Date().toISOString(),
125
+ };
126
+ if (error instanceof errors_1.CommandExecutionError) {
127
+ failure.command = error.details.command;
128
+ failure.stderr = error.details.stderr || error.details.stdout;
129
+ }
130
+ state.workflows[options.workflowId] = {
131
+ completedStages: [...completed],
132
+ updatedAt: new Date().toISOString(),
133
+ failure,
134
+ };
135
+ (0, state_1.saveState)(options.config, state, options.executor);
136
+ throw new errors_1.CliError(formatFailure(failure));
137
+ }
138
+ }
139
+ state.workflows[options.workflowId] = {
140
+ completedStages: [...completed],
141
+ updatedAt: new Date().toISOString(),
142
+ };
143
+ (0, state_1.saveState)(options.config, state, options.executor);
144
+ }
145
+ /**
146
+ * Runs a set of stages scoped to a single repo inside a workspace.
147
+ *
148
+ * - Uses a namespaced workflow ID (`{baseId}:{repoName}`) for resume support.
149
+ * - Overrides `config.repoDir` and `config.aiDir` so existing stages write
150
+ * into the repo instead of the workspace root.
151
+ * - Passes the `RepoConfig` via `stageOptions._repoConfig`.
152
+ */
153
+ async function runPerRepoOrchestration(baseOptions, repoConfig, stages) {
154
+ const repoWorkflowId = `${baseOptions.workflowId}:${repoConfig.name}`;
155
+ const repoAwareConfig = {
156
+ ...baseOptions.config,
157
+ repoDir: repoConfig.architectureRepoDir,
158
+ aiDir: repoConfig.aiDir,
159
+ };
160
+ const stageOptions = {
161
+ ...baseOptions.stageOptions,
162
+ _repoConfig: repoConfig,
163
+ };
164
+ await runOrchestration({
165
+ ...baseOptions,
166
+ workflowId: repoWorkflowId,
167
+ config: repoAwareConfig,
168
+ stageOptions,
169
+ }, stages);
170
+ }
@@ -0,0 +1,42 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.parseNumber = parseNumber;
4
+ exports.parsePositiveInt = parsePositiveInt;
5
+ exports.parseHealthOptions = parseHealthOptions;
6
+ const errors_1 = require("./errors");
7
+ /**
8
+ * Parses a string to a number, returning the fallback when the value
9
+ * is undefined or not a valid integer.
10
+ */
11
+ function parseNumber(value, fallback) {
12
+ if (!value) {
13
+ return fallback;
14
+ }
15
+ const parsed = Number.parseInt(value, 10);
16
+ return Number.isNaN(parsed) ? fallback : parsed;
17
+ }
18
+ /**
19
+ * Parses a string to a positive integer, throwing a CliError when the
20
+ * value is present but invalid. Returns the fallback for undefined values.
21
+ */
22
+ function parsePositiveInt(flagName, value, fallback) {
23
+ if (value === undefined) {
24
+ return fallback;
25
+ }
26
+ const parsed = Number(value);
27
+ if (!Number.isInteger(parsed) || parsed <= 0) {
28
+ throw new errors_1.CliError(`${flagName} must be a positive integer.`);
29
+ }
30
+ return parsed;
31
+ }
32
+ /**
33
+ * Builds a ServiceHealthOptions object from raw CLI option strings.
34
+ * Uses strict positive-integer validation to match `mcp start` behavior.
35
+ */
36
+ function parseHealthOptions(options) {
37
+ return {
38
+ timeoutMs: parsePositiveInt('--timeout-ms', options.timeoutMs, 5_000),
39
+ retries: parsePositiveInt('--retries', options.retries, 15),
40
+ retryDelayMs: parsePositiveInt('--retry-delay-ms', options.retryDelayMs, 2_000),
41
+ };
42
+ }
@@ -0,0 +1,57 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.resolveAvailablePorts = resolveAvailablePorts;
7
+ const node_net_1 = __importDefault(require("node:net"));
8
+ const DEFAULT_PORTS = {
9
+ qdrant: 6333,
10
+ nebula: 9669,
11
+ mcp: 7337,
12
+ };
13
+ /**
14
+ * Tests whether a TCP port is available on 127.0.0.1.
15
+ * Returns true if the port is free.
16
+ */
17
+ function isPortAvailable(port) {
18
+ return new Promise((resolve) => {
19
+ const server = node_net_1.default.createServer();
20
+ server.once('error', () => resolve(false));
21
+ server.once('listening', () => {
22
+ server.close(() => resolve(true));
23
+ });
24
+ server.listen(port, '127.0.0.1');
25
+ });
26
+ }
27
+ /**
28
+ * Finds the next available port starting from `start`, incrementing by 1.
29
+ * Skips ports already claimed in `reserved` to avoid collisions when
30
+ * multiple services are resolved in the same run.
31
+ * Throws if no port is found within 100 attempts.
32
+ */
33
+ async function findAvailablePort(start, reserved) {
34
+ for (let port = start; port < start + 100; port++) {
35
+ if (reserved.has(port))
36
+ continue;
37
+ if (await isPortAvailable(port)) {
38
+ return port;
39
+ }
40
+ }
41
+ throw new Error(`No available port found in range ${start}–${start + 99}`);
42
+ }
43
+ /**
44
+ * Resolves available ports for Qdrant, NebulaGraph, and MCP.
45
+ * If default ports are in use, automatically increments to find free ones.
46
+ * Ports are resolved sequentially to avoid assigning the same port to
47
+ * multiple services.
48
+ */
49
+ async function resolveAvailablePorts(defaults = DEFAULT_PORTS) {
50
+ const reserved = new Set();
51
+ const qdrant = await findAvailablePort(defaults.qdrant, reserved);
52
+ reserved.add(qdrant);
53
+ const nebula = await findAvailablePort(defaults.nebula, reserved);
54
+ reserved.add(nebula);
55
+ const mcp = await findAvailablePort(defaults.mcp, reserved);
56
+ return { qdrant, nebula, mcp };
57
+ }
@@ -0,0 +1,35 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.ensureCommandAvailable = ensureCommandAvailable;
7
+ exports.ensureFileExists = ensureFileExists;
8
+ exports.ensureWritableDirectory = ensureWritableDirectory;
9
+ const node_fs_1 = __importDefault(require("node:fs"));
10
+ const errors_1 = require("./errors");
11
+ const shell_1 = require("./shell");
12
+ function ensureCommandAvailable(commandName, options = {}) {
13
+ const resolved = (0, shell_1.resolveCommandPath)(commandName);
14
+ if (!resolved) {
15
+ if (options.dryRun) {
16
+ return null;
17
+ }
18
+ throw new errors_1.CliError(`Required command '${commandName}' is not available in PATH. Install it and retry.`);
19
+ }
20
+ return resolved;
21
+ }
22
+ function ensureFileExists(filePath, label = 'File') {
23
+ if (!node_fs_1.default.existsSync(filePath)) {
24
+ throw new errors_1.CliError(`${label} not found: ${filePath}`);
25
+ }
26
+ }
27
+ function ensureWritableDirectory(directoryPath) {
28
+ node_fs_1.default.mkdirSync(directoryPath, { recursive: true });
29
+ try {
30
+ node_fs_1.default.accessSync(directoryPath, node_fs_1.default.constants.W_OK);
31
+ }
32
+ catch {
33
+ throw new errors_1.CliError(`Directory is not writable: ${directoryPath}`);
34
+ }
35
+ }
@@ -0,0 +1,88 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.runPreflightChecks = runPreflightChecks;
4
+ exports.assertPreflightChecks = assertPreflightChecks;
5
+ const docker_checks_1 = require("./docker-checks");
6
+ const errors_1 = require("./errors");
7
+ const shell_1 = require("./shell");
8
+ function commandCheck(commandName, fix, executor) {
9
+ const resolved = (0, shell_1.resolveCommandPath)(commandName);
10
+ if (!resolved) {
11
+ return {
12
+ id: commandName,
13
+ ok: false,
14
+ detail: `${commandName} not found in PATH`,
15
+ fix,
16
+ };
17
+ }
18
+ const version = executor.run(commandName, ['--version'], { check: false, verboseOnly: true });
19
+ const detail = version.stdout.trim() || version.stderr.trim();
20
+ const ok = version.status === 0;
21
+ return {
22
+ id: commandName,
23
+ ok,
24
+ detail: detail || (ok ? `found at ${resolved}` : `${commandName} --version exited with code ${version.status}`),
25
+ fix,
26
+ };
27
+ }
28
+ function dockerComposeCheck(executor) {
29
+ const dockerResolved = (0, shell_1.resolveCommandPath)('docker');
30
+ if (!dockerResolved) {
31
+ return {
32
+ id: 'docker-compose-plugin',
33
+ ok: false,
34
+ detail: 'docker command not found; cannot verify compose plugin',
35
+ fix: 'Install Docker Desktop or Docker Engine with compose plugin.',
36
+ };
37
+ }
38
+ const version = executor.run('docker', ['compose', 'version'], { check: false, verboseOnly: true });
39
+ const output = `${version.stdout}\n${version.stderr}`.trim();
40
+ if (version.status === 0) {
41
+ return {
42
+ id: 'docker-compose-plugin',
43
+ ok: true,
44
+ detail: output || 'docker compose plugin available',
45
+ fix: 'Install Docker Compose plugin.',
46
+ };
47
+ }
48
+ return {
49
+ id: 'docker-compose-plugin',
50
+ ok: false,
51
+ detail: output || 'docker compose plugin unavailable',
52
+ fix: 'Install Docker Compose plugin and verify with: docker compose version',
53
+ };
54
+ }
55
+ function runPreflightChecks(executor, opts) {
56
+ const results = [];
57
+ results.push(commandCheck('node', 'Install Node.js >= 20.', executor), commandCheck('npm', 'Install npm >= 10 (bundled with modern Node.js).', executor), commandCheck('python3', 'Install Python 3 (used by ingestion and tooling).', executor), commandCheck('docker', 'Install Docker Desktop or Docker Engine.', executor), dockerComposeCheck(executor));
58
+ // In indexed mode, verify the Docker daemon is actually reachable
59
+ // (skip if docker binary itself is missing — already reported above)
60
+ if (opts?.mode === 'indexed') {
61
+ const dockerCommandCheck = results.find((r) => r.id === 'docker');
62
+ if (dockerCommandCheck?.ok) {
63
+ const daemon = (0, docker_checks_1.checkDockerDaemon)(executor);
64
+ results.push({
65
+ id: 'docker-daemon',
66
+ ok: daemon.ok,
67
+ detail: daemon.ok
68
+ ? `Docker daemon v${daemon.version}`
69
+ : (daemon.error ?? 'Docker daemon unreachable'),
70
+ fix: 'Start Docker Desktop or run: sudo systemctl start docker',
71
+ });
72
+ }
73
+ }
74
+ return results;
75
+ }
76
+ function assertPreflightChecks(results, logger) {
77
+ const failed = results.filter((item) => !item.ok);
78
+ for (const item of results) {
79
+ const prefix = item.ok ? '[PASS]' : '[FAIL]';
80
+ logger.result(`${prefix} ${item.id}: ${item.detail}`);
81
+ if (!item.ok) {
82
+ logger.result(` fix: ${item.fix}`);
83
+ }
84
+ }
85
+ if (failed.length > 0) {
86
+ throw new errors_1.CliError(`Preflight failed with ${failed.length} issue(s).`);
87
+ }
88
+ }
@@ -0,0 +1,6 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.runProcess = runProcess;
4
+ function runProcess(executor, commandName, args, options = {}) {
5
+ return executor.run(commandName, args, options);
6
+ }