@uxmaltech/collab-cli 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. package/README.md +227 -0
  2. package/bin/collab +10 -0
  3. package/dist/cli.js +34 -0
  4. package/dist/commands/canon/index.js +16 -0
  5. package/dist/commands/canon/rebuild.js +95 -0
  6. package/dist/commands/compose/generate.js +63 -0
  7. package/dist/commands/compose/index.js +18 -0
  8. package/dist/commands/compose/validate.js +53 -0
  9. package/dist/commands/doctor.js +153 -0
  10. package/dist/commands/index.js +27 -0
  11. package/dist/commands/infra/down.js +23 -0
  12. package/dist/commands/infra/index.js +20 -0
  13. package/dist/commands/infra/shared.js +59 -0
  14. package/dist/commands/infra/status.js +64 -0
  15. package/dist/commands/infra/up.js +29 -0
  16. package/dist/commands/init.js +830 -0
  17. package/dist/commands/mcp/index.js +20 -0
  18. package/dist/commands/mcp/shared.js +57 -0
  19. package/dist/commands/mcp/start.js +45 -0
  20. package/dist/commands/mcp/status.js +62 -0
  21. package/dist/commands/mcp/stop.js +23 -0
  22. package/dist/commands/seed.js +55 -0
  23. package/dist/commands/uninstall.js +36 -0
  24. package/dist/commands/up.js +78 -0
  25. package/dist/commands/update-canons.js +48 -0
  26. package/dist/commands/upgrade.js +54 -0
  27. package/dist/index.js +14 -0
  28. package/dist/lib/ai-client.js +317 -0
  29. package/dist/lib/ansi.js +58 -0
  30. package/dist/lib/canon-index-generator.js +64 -0
  31. package/dist/lib/canon-index-targets.js +68 -0
  32. package/dist/lib/canon-resolver.js +262 -0
  33. package/dist/lib/canon-scaffold.js +57 -0
  34. package/dist/lib/cli-detection.js +149 -0
  35. package/dist/lib/command-context.js +23 -0
  36. package/dist/lib/compose-defaults.js +47 -0
  37. package/dist/lib/compose-env.js +24 -0
  38. package/dist/lib/compose-paths.js +36 -0
  39. package/dist/lib/compose-renderer.js +134 -0
  40. package/dist/lib/compose-validator.js +56 -0
  41. package/dist/lib/config.js +195 -0
  42. package/dist/lib/credentials.js +63 -0
  43. package/dist/lib/docker-checks.js +73 -0
  44. package/dist/lib/docker-compose.js +15 -0
  45. package/dist/lib/docker-status.js +151 -0
  46. package/dist/lib/domain-gen.js +376 -0
  47. package/dist/lib/ecosystem.js +150 -0
  48. package/dist/lib/env-file.js +77 -0
  49. package/dist/lib/errors.js +30 -0
  50. package/dist/lib/executor.js +85 -0
  51. package/dist/lib/github-auth.js +204 -0
  52. package/dist/lib/hash.js +7 -0
  53. package/dist/lib/health-checker.js +140 -0
  54. package/dist/lib/logger.js +87 -0
  55. package/dist/lib/mcp-client.js +88 -0
  56. package/dist/lib/mode.js +36 -0
  57. package/dist/lib/model-listing.js +102 -0
  58. package/dist/lib/model-registry.js +55 -0
  59. package/dist/lib/npm-operations.js +69 -0
  60. package/dist/lib/orchestrator.js +170 -0
  61. package/dist/lib/parsers.js +42 -0
  62. package/dist/lib/port-resolver.js +57 -0
  63. package/dist/lib/preconditions.js +35 -0
  64. package/dist/lib/preflight.js +88 -0
  65. package/dist/lib/process.js +6 -0
  66. package/dist/lib/prompt.js +125 -0
  67. package/dist/lib/providers.js +117 -0
  68. package/dist/lib/repo-analysis-helpers.js +379 -0
  69. package/dist/lib/repo-scanner.js +195 -0
  70. package/dist/lib/service-health.js +79 -0
  71. package/dist/lib/shell.js +49 -0
  72. package/dist/lib/state.js +38 -0
  73. package/dist/lib/update-checker.js +130 -0
  74. package/dist/lib/version.js +27 -0
  75. package/dist/stages/agent-skills-setup.js +301 -0
  76. package/dist/stages/assistant-setup.js +325 -0
  77. package/dist/stages/canon-ingest.js +249 -0
  78. package/dist/stages/canon-rebuild-graph.js +33 -0
  79. package/dist/stages/canon-rebuild-indexes.js +40 -0
  80. package/dist/stages/canon-rebuild-snapshot.js +75 -0
  81. package/dist/stages/canon-rebuild-validate.js +57 -0
  82. package/dist/stages/canon-rebuild-vectors.js +30 -0
  83. package/dist/stages/canon-scaffold.js +15 -0
  84. package/dist/stages/canon-sync.js +49 -0
  85. package/dist/stages/ci-setup.js +56 -0
  86. package/dist/stages/domain-gen.js +363 -0
  87. package/dist/stages/graph-seed.js +26 -0
  88. package/dist/stages/repo-analysis-fileonly.js +111 -0
  89. package/dist/stages/repo-analysis.js +112 -0
  90. package/dist/stages/repo-scaffold.js +110 -0
  91. package/dist/templates/canon/contracts-readme.js +39 -0
  92. package/dist/templates/canon/domain-readme.js +40 -0
  93. package/dist/templates/canon/evolution/changelog.js +53 -0
  94. package/dist/templates/canon/governance/confidence-levels.js +38 -0
  95. package/dist/templates/canon/governance/implementation-process.js +34 -0
  96. package/dist/templates/canon/governance/review-process.js +29 -0
  97. package/dist/templates/canon/governance/schema-versioning.js +25 -0
  98. package/dist/templates/canon/governance/what-enters-the-canon.js +44 -0
  99. package/dist/templates/canon/index.js +28 -0
  100. package/dist/templates/canon/knowledge-readme.js +129 -0
  101. package/dist/templates/canon/system-prompt.js +101 -0
  102. package/dist/templates/ci/architecture-merge.js +29 -0
  103. package/dist/templates/ci/architecture-pr.js +26 -0
  104. package/dist/templates/ci/index.js +7 -0
  105. package/dist/templates/consolidated.js +114 -0
  106. package/dist/templates/infra.js +90 -0
  107. package/dist/templates/mcp.js +32 -0
  108. package/install.sh +455 -0
  109. package/package.json +48 -0
@@ -0,0 +1,134 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.generateComposeFiles = generateComposeFiles;
7
+ const node_fs_1 = __importDefault(require("node:fs"));
8
+ const node_path_1 = __importDefault(require("node:path"));
9
+ const compose_env_1 = require("./compose-env");
10
+ const compose_paths_1 = require("./compose-paths");
11
+ const hash_1 = require("./hash");
12
+ const state_1 = require("./state");
13
+ const compose_defaults_1 = require("./compose-defaults");
14
+ const consolidated_1 = require("../templates/consolidated");
15
+ const infra_1 = require("../templates/infra");
16
+ const mcp_1 = require("../templates/mcp");
17
+ const GENERATED_HEADER = '# Generated by collab-cli. Do not edit manually unless you accept drift warnings.\n\n';
18
+ function resolveEnvFilePath(config, envFile) {
19
+ return envFile ? node_path_1.default.resolve(config.workspaceDir, envFile) : config.envFile;
20
+ }
21
+ function renderContent(mode) {
22
+ if (mode === 'consolidated') {
23
+ return [
24
+ {
25
+ filePath: '',
26
+ content: `${GENERATED_HEADER}${consolidated_1.consolidatedTemplate}`,
27
+ },
28
+ ];
29
+ }
30
+ return [
31
+ {
32
+ filePath: '',
33
+ content: `${GENERATED_HEADER}${infra_1.infraTemplate}`,
34
+ },
35
+ {
36
+ filePath: '',
37
+ content: `${GENERATED_HEADER}${mcp_1.mcpTemplate}`,
38
+ },
39
+ ];
40
+ }
41
+ function assignOutputPaths(files, config, mode, outputDirectory, outputFile) {
42
+ const composePaths = (0, compose_paths_1.getComposeFilePaths)(config, outputDirectory);
43
+ if (mode === 'consolidated') {
44
+ const filePath = outputFile
45
+ ? node_path_1.default.resolve(config.workspaceDir, outputFile)
46
+ : composePaths.consolidated;
47
+ return [
48
+ {
49
+ filePath,
50
+ content: files[0].content,
51
+ },
52
+ ];
53
+ }
54
+ return [
55
+ {
56
+ filePath: composePaths.infra,
57
+ content: files[0].content,
58
+ },
59
+ {
60
+ filePath: composePaths.mcp,
61
+ content: files[1].content,
62
+ },
63
+ ];
64
+ }
65
+ function scopeFromRepo(repo, fallback) {
66
+ const normalized = repo.trim().replace(/\/+$/, '').replace(/\.git$/, '');
67
+ const scope = normalized.split('/').filter(Boolean).pop();
68
+ return scope && scope.length > 0 ? scope : fallback;
69
+ }
70
+ function computeEnvOverrides(config) {
71
+ const baseScope = 'uxmaltech';
72
+ const businessRepo = config.canons?.business?.repo;
73
+ if (!businessRepo) {
74
+ return { MCP_TECHNICAL_SCOPES: baseScope };
75
+ }
76
+ const businessScope = scopeFromRepo(businessRepo, baseScope);
77
+ return {
78
+ MCP_TECHNICAL_SCOPES: businessScope === baseScope ? baseScope : `${baseScope},${businessScope}`,
79
+ };
80
+ }
81
+ function generateComposeFiles(options) {
82
+ const envFilePath = resolveEnvFilePath(options.config, options.envFile);
83
+ const overrides = computeEnvOverrides(options.config);
84
+ const defaults = options.config.workspace?.name
85
+ ? (0, compose_defaults_1.scopedComposeDefaults)(options.config.workspace.name)
86
+ : compose_defaults_1.COMPOSE_ENV_DEFAULTS;
87
+ const env = (0, compose_env_1.ensureComposeEnvFile)(envFilePath, options.logger, options.executor, overrides, defaults);
88
+ const rendered = renderContent(options.mode);
89
+ const files = assignOutputPaths(rendered, options.config, options.mode, options.outputDirectory, options.outputFile);
90
+ const state = (0, state_1.loadState)(options.config);
91
+ const driftWarnings = [];
92
+ for (const file of files) {
93
+ options.executor.ensureDirectory(node_path_1.default.dirname(file.filePath));
94
+ const stateKey = (0, state_1.toStateKey)(options.config, file.filePath);
95
+ const previous = state.generatedFiles[stateKey];
96
+ if (previous && node_fs_1.default.existsSync(file.filePath)) {
97
+ const existingContent = node_fs_1.default.readFileSync(file.filePath, 'utf8');
98
+ const existingHash = (0, hash_1.sha256)(existingContent);
99
+ if (existingHash !== previous.hash) {
100
+ driftWarnings.push(`Manual edits detected in ${file.filePath}; file will be regenerated from templates.`);
101
+ }
102
+ }
103
+ options.executor.writeFile(file.filePath, file.content, { description: 'write compose file' });
104
+ state.generatedFiles[stateKey] = {
105
+ hash: (0, hash_1.sha256)(file.content),
106
+ generatedAt: new Date().toISOString(),
107
+ };
108
+ }
109
+ (0, state_1.saveState)(options.config, state, options.executor);
110
+ // Pre-create external Docker resources (network + volumes) so that
111
+ // `docker compose up` succeeds even when run from a different project
112
+ // directory. These are no-ops when the resources already exist.
113
+ ensureExternalDockerResources(env, options.executor);
114
+ return {
115
+ files,
116
+ envFilePath,
117
+ env,
118
+ driftWarnings,
119
+ };
120
+ }
121
+ function ensureExternalDockerResources(env, executor) {
122
+ const network = env.COLLAB_NETWORK || compose_defaults_1.COMPOSE_ENV_DEFAULTS.COLLAB_NETWORK;
123
+ const volumes = [
124
+ env.QDRANT_VOLUME || compose_defaults_1.COMPOSE_ENV_DEFAULTS.QDRANT_VOLUME,
125
+ env.NEBULA_METAD_VOLUME || compose_defaults_1.COMPOSE_ENV_DEFAULTS.NEBULA_METAD_VOLUME,
126
+ env.NEBULA_STORAGED_VOLUME || compose_defaults_1.COMPOSE_ENV_DEFAULTS.NEBULA_STORAGED_VOLUME,
127
+ env.MCP_VOLUME || compose_defaults_1.COMPOSE_ENV_DEFAULTS.MCP_VOLUME,
128
+ ];
129
+ // check: false — these are idempotent; "already exists" is not an error
130
+ executor.run('docker', ['network', 'create', network], { verboseOnly: true, check: false });
131
+ for (const vol of volumes) {
132
+ executor.run('docker', ['volume', 'create', vol], { verboseOnly: true, check: false });
133
+ }
134
+ }
@@ -0,0 +1,56 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.validateComposeFiles = validateComposeFiles;
4
+ exports.assertComposeFilesValid = assertComposeFilesValid;
5
+ const errors_1 = require("./errors");
6
+ const docker_compose_1 = require("./docker-compose");
7
+ const preconditions_1 = require("./preconditions");
8
+ function compactError(raw) {
9
+ const lines = raw
10
+ .split(/\r?\n/)
11
+ .map((line) => line.trimEnd())
12
+ .filter((line) => line.length > 0);
13
+ return lines.slice(0, 12).join('\n');
14
+ }
15
+ function validateComposeFiles(filePaths, cwd, executor) {
16
+ if (executor.dryRun) {
17
+ for (const filePath of filePaths) {
18
+ executor.run('docker', ['compose', '-f', filePath, 'config'], {
19
+ cwd,
20
+ check: false,
21
+ });
22
+ }
23
+ return [];
24
+ }
25
+ for (const filePath of filePaths) {
26
+ (0, preconditions_1.ensureFileExists)(filePath, 'Compose file');
27
+ }
28
+ (0, preconditions_1.ensureCommandAvailable)('docker', { dryRun: executor.dryRun });
29
+ const errors = [];
30
+ for (const filePath of filePaths) {
31
+ const result = (0, docker_compose_1.runDockerCompose)({
32
+ executor,
33
+ files: [filePath],
34
+ arguments: ['config'],
35
+ cwd,
36
+ check: false,
37
+ });
38
+ if (result.status !== 0) {
39
+ errors.push({
40
+ filePath,
41
+ message: compactError(result.stderr || result.stdout),
42
+ });
43
+ }
44
+ }
45
+ return errors;
46
+ }
47
+ function assertComposeFilesValid(filePaths, cwd, executor) {
48
+ const errors = validateComposeFiles(filePaths, cwd, executor);
49
+ if (errors.length === 0) {
50
+ return;
51
+ }
52
+ const formatted = errors
53
+ .map((item) => `- ${item.filePath}\n${item.message || '(no error details available)'}`)
54
+ .join('\n\n');
55
+ throw new errors_1.CliError(`Compose validation failed:\n${formatted}`);
56
+ }
@@ -0,0 +1,195 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.defaultCollabConfig = defaultCollabConfig;
7
+ exports.loadCollabConfig = loadCollabConfig;
8
+ exports.ensureCollabDirectory = ensureCollabDirectory;
9
+ exports.serializeUserConfig = serializeUserConfig;
10
+ exports.isWorkspaceMode = isWorkspaceMode;
11
+ exports.resolveRepoConfigs = resolveRepoConfigs;
12
+ exports.discoverRepos = discoverRepos;
13
+ exports.isWorkspaceRoot = isWorkspaceRoot;
14
+ exports.deriveWorkspaceName = deriveWorkspaceName;
15
+ exports.detectWorkspaceLayout = detectWorkspaceLayout;
16
+ const node_fs_1 = __importDefault(require("node:fs"));
17
+ const node_path_1 = __importDefault(require("node:path"));
18
+ const mode_1 = require("./mode");
19
+ const DEFAULT_COMPOSE_PATHS = {
20
+ consolidatedFile: 'docker-compose.yml',
21
+ infraFile: 'docker-compose.infra.yml',
22
+ mcpFile: 'docker-compose.mcp.yml',
23
+ };
24
+ function defaultCollabConfig(cwd = process.cwd()) {
25
+ const workspaceDir = node_path_1.default.resolve(cwd);
26
+ const collabDir = node_path_1.default.join(workspaceDir, '.collab');
27
+ const architectureDir = node_path_1.default.join(workspaceDir, 'docs', 'architecture');
28
+ return {
29
+ workspaceDir,
30
+ collabDir,
31
+ configFile: node_path_1.default.join(collabDir, 'config.json'),
32
+ stateFile: node_path_1.default.join(collabDir, 'state.json'),
33
+ envFile: node_path_1.default.join(workspaceDir, '.env'),
34
+ mode: mode_1.DEFAULT_MODE,
35
+ compose: { ...DEFAULT_COMPOSE_PATHS },
36
+ architectureDir,
37
+ uxmaltechDir: node_path_1.default.join(architectureDir, 'uxmaltech'),
38
+ repoDir: node_path_1.default.join(architectureDir, 'repo'),
39
+ aiDir: node_path_1.default.join(workspaceDir, 'docs', 'ai'),
40
+ };
41
+ }
42
+ function readRawConfig(configFile) {
43
+ if (!node_fs_1.default.existsSync(configFile)) {
44
+ return {};
45
+ }
46
+ const raw = node_fs_1.default.readFileSync(configFile, 'utf8');
47
+ const parsed = JSON.parse(raw);
48
+ return parsed;
49
+ }
50
+ function loadCollabConfig(cwd = process.cwd()) {
51
+ const defaults = defaultCollabConfig(cwd);
52
+ const raw = readRawConfig(defaults.configFile);
53
+ const architectureDir = raw.architectureDir
54
+ ? node_path_1.default.resolve(defaults.workspaceDir, raw.architectureDir)
55
+ : defaults.architectureDir;
56
+ const workspace = migrateWorkspaceConfig(raw.workspace, defaults.workspaceDir);
57
+ return {
58
+ ...defaults,
59
+ mode: (0, mode_1.parseMode)(raw.mode, defaults.mode),
60
+ envFile: raw.envFile ? node_path_1.default.resolve(defaults.workspaceDir, raw.envFile) : defaults.envFile,
61
+ compose: {
62
+ consolidatedFile: raw.compose?.consolidatedFile ?? defaults.compose.consolidatedFile,
63
+ infraFile: raw.compose?.infraFile ?? defaults.compose.infraFile,
64
+ mcpFile: raw.compose?.mcpFile ?? defaults.compose.mcpFile,
65
+ projectName: raw.compose?.projectName ?? (workspace ? `collab-${workspace.name}` : undefined),
66
+ },
67
+ architectureDir,
68
+ uxmaltechDir: node_path_1.default.join(architectureDir, 'uxmaltech'),
69
+ repoDir: node_path_1.default.join(architectureDir, 'repo'),
70
+ aiDir: node_path_1.default.join(defaults.workspaceDir, 'docs', 'ai'),
71
+ assistants: raw.assistants,
72
+ workspace,
73
+ canons: raw.canons,
74
+ };
75
+ }
76
+ function ensureCollabDirectory(config) {
77
+ node_fs_1.default.mkdirSync(config.collabDir, { recursive: true });
78
+ }
79
+ function serializeUserConfig(config) {
80
+ const data = {
81
+ mode: config.mode,
82
+ compose: config.compose,
83
+ envFile: node_path_1.default.relative(config.workspaceDir, config.envFile),
84
+ };
85
+ if (config.assistants) {
86
+ data.assistants = config.assistants;
87
+ }
88
+ if (config.workspace) {
89
+ data.workspace = config.workspace;
90
+ }
91
+ if (config.canons) {
92
+ data.canons = config.canons;
93
+ }
94
+ return JSON.stringify(data, null, 2);
95
+ }
96
+ // ────────────────────────────────────────────────────────────────
97
+ // Workspace helpers
98
+ // ────────────────────────────────────────────────────────────────
99
+ function isWorkspaceMode(config) {
100
+ return config.workspace !== undefined && config.workspace.repos.length > 0;
101
+ }
102
+ function resolveRepoConfigs(config) {
103
+ if (!config.workspace) {
104
+ return [];
105
+ }
106
+ return config.workspace.repos.map((repoName) => {
107
+ const repoDir = node_path_1.default.join(config.workspaceDir, repoName);
108
+ return {
109
+ name: repoName,
110
+ repoDir,
111
+ architectureRepoDir: node_path_1.default.join(repoDir, 'docs', 'architecture', 'repo'),
112
+ aiDir: node_path_1.default.join(repoDir, 'docs', 'ai'),
113
+ };
114
+ });
115
+ }
116
+ /**
117
+ * Detects subdirectories of `workspaceDir` that contain a `.git` directory.
118
+ * Returns sorted directory names (not full paths).
119
+ */
120
+ function discoverRepos(workspaceDir) {
121
+ const entries = node_fs_1.default.readdirSync(workspaceDir, { withFileTypes: true });
122
+ const repos = [];
123
+ for (const entry of entries) {
124
+ if (!entry.isDirectory())
125
+ continue;
126
+ if (entry.name.startsWith('.'))
127
+ continue;
128
+ const gitDir = node_path_1.default.join(workspaceDir, entry.name, '.git');
129
+ if (node_fs_1.default.existsSync(gitDir)) {
130
+ repos.push(entry.name);
131
+ }
132
+ }
133
+ return repos.sort();
134
+ }
135
+ /**
136
+ * Returns true when the directory looks like a workspace root:
137
+ * no `.git/` of its own and at least one child git repo.
138
+ */
139
+ function isWorkspaceRoot(dir) {
140
+ const hasOwnGit = node_fs_1.default.existsSync(node_path_1.default.join(dir, '.git'));
141
+ if (hasOwnGit)
142
+ return false;
143
+ return discoverRepos(dir).length >= 1;
144
+ }
145
+ /**
146
+ * Derives a slugified workspace name from a directory path.
147
+ */
148
+ function deriveWorkspaceName(dir) {
149
+ return node_path_1.default.basename(node_path_1.default.resolve(dir))
150
+ .toLowerCase()
151
+ .replace(/[^a-z0-9]+/g, '-')
152
+ .replace(/^-|-$/g, '') || 'workspace';
153
+ }
154
+ /**
155
+ * Detects the workspace layout of a directory.
156
+ *
157
+ * - If the directory is itself a git repo → mono-repo with repos=["."]
158
+ * - If it has ≥2 child git repos → multi-repo
159
+ * - If it has exactly 1 child git repo → mono-repo
160
+ * - If it has 0 child git repos → null (caller decides)
161
+ */
162
+ function detectWorkspaceLayout(dir) {
163
+ // Case 1: current directory IS a git repo
164
+ if (node_fs_1.default.existsSync(node_path_1.default.join(dir, '.git'))) {
165
+ return { type: 'mono-repo', repos: ['.'] };
166
+ }
167
+ // Case 2: scan for child repos
168
+ const childRepos = discoverRepos(dir);
169
+ if (childRepos.length >= 2) {
170
+ return { type: 'multi-repo', repos: childRepos };
171
+ }
172
+ if (childRepos.length === 1) {
173
+ return { type: 'mono-repo', repos: childRepos };
174
+ }
175
+ // No repos found — caller must decide
176
+ return null;
177
+ }
178
+ /**
179
+ * Migrates an older workspace config (repos-only) to the full format
180
+ * with name and type fields. Returns undefined if no workspace data.
181
+ */
182
+ function migrateWorkspaceConfig(raw, workspaceDir) {
183
+ if (!raw || !Array.isArray(raw.repos)) {
184
+ return undefined;
185
+ }
186
+ const repos = raw.repos.filter((r) => typeof r === 'string' && r.length > 0);
187
+ if (repos.length === 0) {
188
+ return undefined;
189
+ }
190
+ return {
191
+ name: raw.name || deriveWorkspaceName(workspaceDir),
192
+ type: raw.type || (repos.length >= 2 ? 'multi-repo' : 'mono-repo'),
193
+ repos,
194
+ };
195
+ }
@@ -0,0 +1,63 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.getCredentialsPath = getCredentialsPath;
7
+ exports.saveApiKey = saveApiKey;
8
+ exports.loadApiKey = loadApiKey;
9
+ const node_fs_1 = __importDefault(require("node:fs"));
10
+ const node_path_1 = __importDefault(require("node:path"));
11
+ function getCredentialsPath(config) {
12
+ return node_path_1.default.join(config.collabDir, 'credentials.json');
13
+ }
14
+ /**
15
+ * Saves an API key for a provider to the credentials file.
16
+ */
17
+ function saveApiKey(config, provider, apiKey) {
18
+ const credPath = getCredentialsPath(config);
19
+ const credDir = node_path_1.default.dirname(credPath);
20
+ // Ensure .collab directory exists
21
+ node_fs_1.default.mkdirSync(credDir, { recursive: true });
22
+ // Load existing credentials
23
+ const existing = loadAllCredentials(config);
24
+ existing[provider] = apiKey;
25
+ // Write with restricted permissions
26
+ node_fs_1.default.writeFileSync(credPath, JSON.stringify(existing, null, 2), {
27
+ encoding: 'utf8',
28
+ mode: 0o600,
29
+ });
30
+ // Ensure permissions on directory and file
31
+ try {
32
+ node_fs_1.default.chmodSync(credDir, 0o700);
33
+ node_fs_1.default.chmodSync(credPath, 0o600);
34
+ }
35
+ catch {
36
+ // Non-critical: permissions may not be applicable on all platforms
37
+ }
38
+ }
39
+ /**
40
+ * Loads the API key for a single provider from the credentials file.
41
+ * Returns null if not found.
42
+ */
43
+ function loadApiKey(config, provider) {
44
+ const all = loadAllCredentials(config);
45
+ return all[provider] ?? null;
46
+ }
47
+ /**
48
+ * Loads all stored credentials. Returns an empty object if the file
49
+ * does not exist or cannot be parsed.
50
+ */
51
+ function loadAllCredentials(config) {
52
+ const credPath = getCredentialsPath(config);
53
+ if (!node_fs_1.default.existsSync(credPath)) {
54
+ return {};
55
+ }
56
+ try {
57
+ const raw = node_fs_1.default.readFileSync(credPath, 'utf8');
58
+ return JSON.parse(raw);
59
+ }
60
+ catch {
61
+ return {};
62
+ }
63
+ }
@@ -0,0 +1,73 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.checkDockerDaemon = checkDockerDaemon;
4
+ exports.checkDockerImages = checkDockerImages;
5
+ const shell_1 = require("./shell");
6
+ /**
7
+ * Checks whether the Docker daemon is running and returns its server version.
8
+ */
9
+ function checkDockerDaemon(executor) {
10
+ const dockerPath = (0, shell_1.resolveCommandPath)('docker');
11
+ if (!dockerPath) {
12
+ return { ok: false, error: 'docker command not found in PATH' };
13
+ }
14
+ if (executor.dryRun) {
15
+ return { ok: true, version: 'dry-run' };
16
+ }
17
+ const result = executor.run('docker', ['info', '--format', '{{.ServerVersion}}'], {
18
+ check: false,
19
+ verboseOnly: true,
20
+ });
21
+ if (result.status === 0 && result.stdout.trim()) {
22
+ return { ok: true, version: result.stdout.trim() };
23
+ }
24
+ const stderr = result.stderr.trim();
25
+ if (/cannot connect to.*docker daemon|is the docker daemon running/i.test(stderr)) {
26
+ return { ok: false, error: 'Docker daemon is not running. Start Docker Desktop or run: sudo systemctl start docker' };
27
+ }
28
+ return { ok: false, error: stderr || 'Docker daemon check failed' };
29
+ }
30
+ /**
31
+ * Checks whether Docker images are locally available.
32
+ *
33
+ * Uses `docker image inspect` to check local presence only — avoids
34
+ * network calls that may fail behind firewalls or require authentication.
35
+ */
36
+ function checkDockerImages(executor, images) {
37
+ const dockerPath = (0, shell_1.resolveCommandPath)('docker');
38
+ if (!dockerPath) {
39
+ return images.map((image) => ({
40
+ image,
41
+ ok: false,
42
+ error: 'docker command not found',
43
+ }));
44
+ }
45
+ return images.map((image) => {
46
+ if (executor.dryRun) {
47
+ return { image, ok: true };
48
+ }
49
+ const result = executor.run('docker', ['image', 'inspect', image, '--format', '{{.Id}}'], {
50
+ check: false,
51
+ verboseOnly: true,
52
+ });
53
+ if (result.status === 0 && result.stdout.trim()) {
54
+ return { image, ok: true };
55
+ }
56
+ const stderr = result.stderr.trim();
57
+ // Distinguish daemon/auth errors from genuine "image not found"
58
+ if (/cannot connect to.*docker daemon|is the docker daemon running/i.test(stderr)) {
59
+ return {
60
+ image,
61
+ ok: false,
62
+ error: 'Docker daemon is not running. Start Docker Desktop or run: sudo systemctl start docker',
63
+ };
64
+ }
65
+ return {
66
+ image,
67
+ ok: false,
68
+ error: /no such image|not found/i.test(stderr)
69
+ ? `Image not found locally. Pull with: docker pull ${image}`
70
+ : (stderr || `docker image inspect failed for ${image}`),
71
+ };
72
+ });
73
+ }
@@ -0,0 +1,15 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.runDockerCompose = runDockerCompose;
4
+ const process_1 = require("./process");
5
+ function composeArgs(files, args, projectName) {
6
+ const projectArgs = projectName ? ['-p', projectName] : [];
7
+ const fileArgs = files.flatMap((filePath) => ['-f', filePath]);
8
+ return ['compose', ...projectArgs, ...fileArgs, ...args];
9
+ }
10
+ function runDockerCompose(command) {
11
+ return (0, process_1.runProcess)(command.executor, 'docker', composeArgs(command.files, command.arguments, command.projectName), {
12
+ cwd: command.cwd,
13
+ check: command.check,
14
+ });
15
+ }
@@ -0,0 +1,151 @@
1
+ "use strict";
2
+ /**
3
+ * Docker Compose container status parsing and display.
4
+ * Parses `docker compose ps --format json` output, merges with health
5
+ * check results, and renders a formatted status table.
6
+ */
7
+ Object.defineProperty(exports, "__esModule", { value: true });
8
+ exports.SERVICE_LABELS = void 0;
9
+ exports.parseComposePs = parseComposePs;
10
+ exports.buildServiceStatusList = buildServiceStatusList;
11
+ exports.printStatusTable = printStatusTable;
12
+ const ansi_1 = require("./ansi");
13
+ /** Human-readable labels for known Docker Compose services. */
14
+ exports.SERVICE_LABELS = {
15
+ qdrant: 'Qdrant (Vector DB)',
16
+ metad0: 'NebulaGraph metad',
17
+ storaged0: 'NebulaGraph storaged',
18
+ graphd: 'NebulaGraph graphd',
19
+ mcp: 'MCP Server',
20
+ };
21
+ /** Safely extracts a string value from a parsed JSON field. */
22
+ function str(value) {
23
+ return typeof value === 'string' ? value : '';
24
+ }
25
+ /**
26
+ * Parses the stdout of `docker compose ps --format json`.
27
+ * Docker Compose V2 outputs one JSON object per line.
28
+ * Skips malformed lines gracefully.
29
+ */
30
+ function parseComposePs(stdout) {
31
+ if (!stdout.trim()) {
32
+ return [];
33
+ }
34
+ const lines = stdout.trim().split('\n');
35
+ const containers = [];
36
+ for (const line of lines) {
37
+ const trimmed = line.trim();
38
+ if (!trimmed) {
39
+ continue;
40
+ }
41
+ try {
42
+ const raw = JSON.parse(trimmed);
43
+ containers.push({
44
+ name: str(raw['Name']) || str(raw['name']),
45
+ service: str(raw['Service']) || str(raw['service']),
46
+ state: str(raw['State']) || str(raw['state']),
47
+ status: str(raw['Status']) || str(raw['status']),
48
+ ports: str(raw['Ports']) || str(raw['ports']),
49
+ health: str(raw['Health']) || str(raw['health']),
50
+ });
51
+ }
52
+ catch {
53
+ // Skip malformed JSON lines
54
+ }
55
+ }
56
+ return containers;
57
+ }
58
+ /**
59
+ * Simplifies Docker port mappings for display.
60
+ * Converts "0.0.0.0:6333->6333/tcp" to "6333/tcp".
61
+ */
62
+ function formatPorts(raw) {
63
+ if (!raw) {
64
+ return '';
65
+ }
66
+ return raw
67
+ .split(', ')
68
+ .map((mapping) => {
69
+ const arrow = mapping.indexOf('->');
70
+ return arrow !== -1 ? mapping.slice(arrow + 2) : mapping;
71
+ })
72
+ .join(', ');
73
+ }
74
+ /**
75
+ * Merges container info with health check results into a unified status list.
76
+ * Services without running containers are shown as "Not running".
77
+ * Health check results are matched by name to the service name.
78
+ */
79
+ function buildServiceStatusList(services, containers, healthResults) {
80
+ const containerMap = new Map();
81
+ for (const c of containers) {
82
+ containerMap.set(c.service, c);
83
+ }
84
+ const healthMap = new Map();
85
+ for (const h of healthResults) {
86
+ healthMap.set(h.name, h);
87
+ }
88
+ return services.map((service) => {
89
+ const container = containerMap.get(service);
90
+ const health = healthMap.get(service);
91
+ const label = exports.SERVICE_LABELS[service] ?? service;
92
+ if (!container) {
93
+ return {
94
+ service,
95
+ label,
96
+ running: false,
97
+ status: 'Not running',
98
+ ports: '',
99
+ healthOk: health ? health.ok : null,
100
+ healthDetail: health?.detail ?? '',
101
+ };
102
+ }
103
+ const running = container.state === 'running';
104
+ return {
105
+ service,
106
+ label,
107
+ container: container.name,
108
+ running,
109
+ status: container.status || container.state,
110
+ ports: formatPorts(container.ports),
111
+ healthOk: health ? health.ok : null,
112
+ healthDetail: health?.detail ?? '',
113
+ };
114
+ });
115
+ }
116
+ /**
117
+ * Renders a formatted status table to the terminal.
118
+ * Each service shows its status, ports, and health check result.
119
+ * A summary line at the bottom shows the overall running count.
120
+ */
121
+ function printStatusTable(logger, title, services, composePath) {
122
+ const line = (0, ansi_1.dim)('\u2500'.repeat(48));
123
+ logger.result('');
124
+ logger.result(` ${line}`);
125
+ logger.result(` ${(0, ansi_1.bold)(title)}`);
126
+ logger.result(` ${line}`);
127
+ for (const svc of services) {
128
+ logger.result('');
129
+ logger.result(` ${(0, ansi_1.bold)(svc.label)}`);
130
+ const statusMarker = svc.running ? (0, ansi_1.green)(ansi_1.CHECK) : (0, ansi_1.red)(ansi_1.CROSS);
131
+ logger.result(` Status: ${statusMarker} ${svc.status}`);
132
+ if (svc.ports) {
133
+ logger.result(` Ports: ${svc.ports}`);
134
+ }
135
+ if (svc.healthOk !== null) {
136
+ const healthMarker = svc.healthOk ? (0, ansi_1.green)(ansi_1.CHECK) : (0, ansi_1.red)(ansi_1.CROSS);
137
+ logger.result(` Health: ${healthMarker} ${svc.healthDetail}`);
138
+ }
139
+ }
140
+ // Summary
141
+ logger.result('');
142
+ logger.result(` ${line}`);
143
+ const running = services.filter((s) => s.running).length;
144
+ const total = services.length;
145
+ const allUp = running === total;
146
+ const summaryMarker = allUp ? (0, ansi_1.green)(ansi_1.CHECK) : (0, ansi_1.red)(ansi_1.CROSS);
147
+ const summaryText = `${running}/${total} services running`;
148
+ const composeNote = composePath ? ` | Compose: ${composePath}` : '';
149
+ logger.result(` ${summaryMarker} ${summaryText}${(0, ansi_1.dim)(composeNote)}`);
150
+ logger.result('');
151
+ }