tlc-claude-code 2.0.1 → 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/commands/tlc/deploy.md +194 -2
- package/.claude/commands/tlc/e2e-verify.md +214 -0
- package/.claude/commands/tlc/guard.md +191 -0
- package/.claude/commands/tlc/help.md +32 -0
- package/.claude/commands/tlc/init.md +73 -37
- package/.claude/commands/tlc/llm.md +19 -4
- package/.claude/commands/tlc/preflight.md +134 -0
- package/.claude/commands/tlc/review.md +17 -4
- package/.claude/commands/tlc/watchci.md +159 -0
- package/.claude/hooks/tlc-block-tools.sh +41 -0
- package/.claude/hooks/tlc-capture-exchange.sh +50 -0
- package/.claude/hooks/tlc-post-build.sh +38 -0
- package/.claude/hooks/tlc-post-push.sh +22 -0
- package/.claude/hooks/tlc-prompt-guard.sh +69 -0
- package/.claude/hooks/tlc-session-init.sh +123 -0
- package/CLAUDE.md +12 -0
- package/bin/install.js +171 -2
- package/bin/postinstall.js +45 -26
- package/dashboard-web/dist/assets/index-CdS5CHqu.css +1 -0
- package/dashboard-web/dist/assets/index-CwNPPVpg.js +483 -0
- package/dashboard-web/dist/assets/index-CwNPPVpg.js.map +1 -0
- package/dashboard-web/dist/index.html +2 -2
- package/docker-compose.dev.yml +18 -12
- package/package.json +3 -1
- package/server/index.js +228 -2
- package/server/lib/capture-bridge.js +242 -0
- package/server/lib/capture-bridge.test.js +363 -0
- package/server/lib/capture-guard.js +140 -0
- package/server/lib/capture-guard.test.js +182 -0
- package/server/lib/command-runner.js +159 -0
- package/server/lib/command-runner.test.js +92 -0
- package/server/lib/deploy/runners/dependency-runner.js +106 -0
- package/server/lib/deploy/runners/dependency-runner.test.js +148 -0
- package/server/lib/deploy/runners/secrets-runner.js +174 -0
- package/server/lib/deploy/runners/secrets-runner.test.js +127 -0
- package/server/lib/deploy/security-gates.js +11 -24
- package/server/lib/deploy/security-gates.test.js +9 -2
- package/server/lib/deploy-engine.js +182 -0
- package/server/lib/deploy-engine.test.js +147 -0
- package/server/lib/docker-api.js +137 -0
- package/server/lib/docker-api.test.js +202 -0
- package/server/lib/docker-client.js +297 -0
- package/server/lib/docker-client.test.js +308 -0
- package/server/lib/input-sanitizer.js +86 -0
- package/server/lib/input-sanitizer.test.js +117 -0
- package/server/lib/launchd-agent.js +225 -0
- package/server/lib/launchd-agent.test.js +185 -0
- package/server/lib/memory-api.js +3 -1
- package/server/lib/memory-api.test.js +3 -5
- package/server/lib/memory-bridge-e2e.test.js +160 -0
- package/server/lib/memory-committer.js +18 -4
- package/server/lib/memory-committer.test.js +21 -0
- package/server/lib/memory-hooks-capture.test.js +69 -4
- package/server/lib/memory-hooks-integration.test.js +98 -0
- package/server/lib/memory-hooks.js +42 -4
- package/server/lib/memory-store-adapter.js +105 -0
- package/server/lib/memory-store-adapter.test.js +141 -0
- package/server/lib/memory-wiring-e2e.test.js +93 -0
- package/server/lib/nginx-config.js +114 -0
- package/server/lib/nginx-config.test.js +82 -0
- package/server/lib/ollama-health.js +91 -0
- package/server/lib/ollama-health.test.js +74 -0
- package/server/lib/port-guard.js +44 -0
- package/server/lib/port-guard.test.js +65 -0
- package/server/lib/project-scanner.js +37 -2
- package/server/lib/project-scanner.test.js +152 -0
- package/server/lib/remember-command.js +2 -0
- package/server/lib/remember-command.test.js +23 -0
- package/server/lib/security/crypto-utils.test.js +2 -2
- package/server/lib/semantic-recall.js +1 -1
- package/server/lib/semantic-recall.test.js +17 -0
- package/server/lib/ssh-client.js +184 -0
- package/server/lib/ssh-client.test.js +127 -0
- package/server/lib/vps-api.js +184 -0
- package/server/lib/vps-api.test.js +208 -0
- package/server/lib/vps-bootstrap.js +124 -0
- package/server/lib/vps-bootstrap.test.js +79 -0
- package/server/lib/vps-monitor.js +126 -0
- package/server/lib/vps-monitor.test.js +98 -0
- package/server/lib/workspace-api.js +182 -1
- package/server/lib/workspace-api.test.js +474 -0
- package/server/package-lock.json +737 -0
- package/server/package.json +3 -0
- package/dashboard-web/dist/assets/index-Uhc49PE-.css +0 -1
- package/dashboard-web/dist/assets/index-W36XHPC5.js +0 -431
- package/dashboard-web/dist/assets/index-W36XHPC5.js.map +0 -1
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Secrets Runner Tests
|
|
3
|
+
*/
|
|
4
|
+
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
5
|
+
import { createSecretsRunner } from './secrets-runner.js';
|
|
6
|
+
|
|
7
|
+
describe('secrets-runner', () => {
|
|
8
|
+
let globMock;
|
|
9
|
+
let readFileMock;
|
|
10
|
+
let runner;
|
|
11
|
+
|
|
12
|
+
beforeEach(() => {
|
|
13
|
+
globMock = vi.fn().mockResolvedValue([]);
|
|
14
|
+
readFileMock = vi.fn().mockResolvedValue('');
|
|
15
|
+
runner = createSecretsRunner({ glob: globMock, readFile: readFileMock });
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
it('passes when no secrets found in clean project', async () => {
|
|
19
|
+
globMock.mockResolvedValue(['src/index.js']);
|
|
20
|
+
readFileMock.mockResolvedValue('const x = 1;\nconsole.log(x);');
|
|
21
|
+
|
|
22
|
+
const result = await runner('/test/project', {});
|
|
23
|
+
expect(result.passed).toBe(true);
|
|
24
|
+
expect(result.findings).toEqual([]);
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
it('detects hardcoded password assignment', async () => {
|
|
28
|
+
globMock.mockResolvedValue(['src/config.js']);
|
|
29
|
+
readFileMock.mockResolvedValue(
|
|
30
|
+
'const config = {\n password: "supersecret123"\n};'
|
|
31
|
+
);
|
|
32
|
+
|
|
33
|
+
const result = await runner('/test/project', {});
|
|
34
|
+
expect(result.passed).toBe(false);
|
|
35
|
+
expect(result.findings.length).toBeGreaterThan(0);
|
|
36
|
+
expect(result.findings[0].file).toBe('src/config.js');
|
|
37
|
+
expect(result.findings[0].pattern).toBeDefined();
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
it('detects AWS access key pattern', async () => {
|
|
41
|
+
globMock.mockResolvedValue(['src/aws.js']);
|
|
42
|
+
readFileMock.mockResolvedValue(
|
|
43
|
+
'const key = "AKIAIOSFODNN7EXAMPLE";\n'
|
|
44
|
+
);
|
|
45
|
+
|
|
46
|
+
const result = await runner('/test/project', {});
|
|
47
|
+
expect(result.passed).toBe(false);
|
|
48
|
+
expect(result.findings.length).toBeGreaterThan(0);
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
it('detects private key header', async () => {
|
|
52
|
+
globMock.mockResolvedValue(['certs/key.pem']);
|
|
53
|
+
readFileMock.mockResolvedValue(
|
|
54
|
+
'-----BEGIN RSA PRIVATE KEY-----\nMIIEpA...\n-----END RSA PRIVATE KEY-----'
|
|
55
|
+
);
|
|
56
|
+
|
|
57
|
+
const result = await runner('/test/project', {});
|
|
58
|
+
expect(result.passed).toBe(false);
|
|
59
|
+
expect(result.findings.length).toBeGreaterThan(0);
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
it('detects generic API key pattern', async () => {
|
|
63
|
+
globMock.mockResolvedValue(['src/api.js']);
|
|
64
|
+
readFileMock.mockResolvedValue(
|
|
65
|
+
'const token = "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";\n'
|
|
66
|
+
);
|
|
67
|
+
|
|
68
|
+
const result = await runner('/test/project', {});
|
|
69
|
+
expect(result.passed).toBe(false);
|
|
70
|
+
expect(result.findings.length).toBeGreaterThan(0);
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
it('excludes test files by default', async () => {
|
|
74
|
+
globMock.mockResolvedValue([]);
|
|
75
|
+
|
|
76
|
+
const result = await runner('/test/project', {});
|
|
77
|
+
expect(result.passed).toBe(true);
|
|
78
|
+
|
|
79
|
+
// Verify glob was called with exclusion patterns
|
|
80
|
+
const callArgs = globMock.mock.calls[0];
|
|
81
|
+
expect(callArgs[0]).toBeDefined(); // pattern
|
|
82
|
+
expect(callArgs[1]).toBeDefined(); // options with ignore
|
|
83
|
+
});
|
|
84
|
+
|
|
85
|
+
it('excludes node_modules by default', async () => {
|
|
86
|
+
globMock.mockResolvedValue([]);
|
|
87
|
+
|
|
88
|
+
await runner('/test/project', {});
|
|
89
|
+
const callArgs = globMock.mock.calls[0];
|
|
90
|
+
const options = callArgs[1];
|
|
91
|
+
expect(options.ignore).toContain('**/node_modules/**');
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
it('supports configurable exclusion patterns', async () => {
|
|
95
|
+
const customRunner = createSecretsRunner({
|
|
96
|
+
glob: globMock,
|
|
97
|
+
readFile: readFileMock,
|
|
98
|
+
extraIgnore: ['**/fixtures/**'],
|
|
99
|
+
});
|
|
100
|
+
|
|
101
|
+
globMock.mockResolvedValue([]);
|
|
102
|
+
|
|
103
|
+
await customRunner('/test/project', {});
|
|
104
|
+
const callArgs = globMock.mock.calls[0];
|
|
105
|
+
const options = callArgs[1];
|
|
106
|
+
expect(options.ignore).toContain('**/fixtures/**');
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
it('handles empty project directory', async () => {
|
|
110
|
+
globMock.mockResolvedValue([]);
|
|
111
|
+
|
|
112
|
+
const result = await runner('/test/project', {});
|
|
113
|
+
expect(result.passed).toBe(true);
|
|
114
|
+
expect(result.findings).toEqual([]);
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
it('includes line number in findings', async () => {
|
|
118
|
+
globMock.mockResolvedValue(['src/config.js']);
|
|
119
|
+
readFileMock.mockResolvedValue(
|
|
120
|
+
'const a = 1;\nconst password = "secret";\nconst b = 2;'
|
|
121
|
+
);
|
|
122
|
+
|
|
123
|
+
const result = await runner('/test/project', {});
|
|
124
|
+
expect(result.passed).toBe(false);
|
|
125
|
+
expect(result.findings[0].line).toBe(2);
|
|
126
|
+
});
|
|
127
|
+
});
|
|
@@ -5,6 +5,9 @@
|
|
|
5
5
|
* running security gates during deployment validation.
|
|
6
6
|
*/
|
|
7
7
|
|
|
8
|
+
import { createDependencyRunner } from './runners/dependency-runner.js';
|
|
9
|
+
import { createSecretsRunner } from './runners/secrets-runner.js';
|
|
10
|
+
|
|
8
11
|
/**
|
|
9
12
|
* Gate type constants
|
|
10
13
|
*/
|
|
@@ -36,29 +39,13 @@ const DEFAULT_TIER_GATES = {
|
|
|
36
39
|
};
|
|
37
40
|
|
|
38
41
|
/**
|
|
39
|
-
*
|
|
42
|
+
* Built-in runners for dependencies and secrets gates.
|
|
43
|
+
* SAST, DAST, and container gates require custom runner injection.
|
|
44
|
+
* Gates without runners will SKIP (not fake-pass).
|
|
40
45
|
*/
|
|
41
|
-
const
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
return { passed: true, findings: [] };
|
|
45
|
-
},
|
|
46
|
-
dast: async (projectPath, options) => {
|
|
47
|
-
// Placeholder DAST implementation
|
|
48
|
-
return { passed: true, findings: [] };
|
|
49
|
-
},
|
|
50
|
-
dependencies: async (projectPath, options) => {
|
|
51
|
-
// Placeholder dependency scanning implementation
|
|
52
|
-
return { passed: true, findings: [] };
|
|
53
|
-
},
|
|
54
|
-
container: async (projectPath, options) => {
|
|
55
|
-
// Placeholder container scanning implementation
|
|
56
|
-
return { passed: true, findings: [] };
|
|
57
|
-
},
|
|
58
|
-
secrets: async (projectPath, options) => {
|
|
59
|
-
// Placeholder secrets scanning implementation
|
|
60
|
-
return { passed: true, findings: [] };
|
|
61
|
-
},
|
|
46
|
+
const builtInRunners = {
|
|
47
|
+
dependencies: createDependencyRunner(),
|
|
48
|
+
secrets: createSecretsRunner(),
|
|
62
49
|
};
|
|
63
50
|
|
|
64
51
|
/**
|
|
@@ -197,8 +184,8 @@ export async function runAllGates(tier, options = {}) {
|
|
|
197
184
|
export function createSecurityGates(config = {}) {
|
|
198
185
|
const { runners = {}, gateConfig = null } = config;
|
|
199
186
|
|
|
200
|
-
// Merge
|
|
201
|
-
const allRunners = { ...
|
|
187
|
+
// Merge built-in runners with custom runners (custom overrides built-in)
|
|
188
|
+
const allRunners = { ...builtInRunners, ...runners };
|
|
202
189
|
|
|
203
190
|
return {
|
|
204
191
|
/**
|
|
@@ -213,10 +213,17 @@ describe('security-gates', () => {
|
|
|
213
213
|
expect(gates.hasRunner('custom')).toBe(true);
|
|
214
214
|
});
|
|
215
215
|
|
|
216
|
-
it('
|
|
216
|
+
it('has built-in runners for dependencies and secrets', () => {
|
|
217
217
|
const gates = createSecurityGates();
|
|
218
|
-
expect(gates.hasRunner('sast')).toBe(true);
|
|
219
218
|
expect(gates.hasRunner('dependencies')).toBe(true);
|
|
219
|
+
expect(gates.hasRunner('secrets')).toBe(true);
|
|
220
|
+
});
|
|
221
|
+
|
|
222
|
+
it('skips SAST/DAST/container without custom runners', () => {
|
|
223
|
+
const gates = createSecurityGates();
|
|
224
|
+
expect(gates.hasRunner('sast')).toBe(false);
|
|
225
|
+
expect(gates.hasRunner('dast')).toBe(false);
|
|
226
|
+
expect(gates.hasRunner('container')).toBe(false);
|
|
220
227
|
});
|
|
221
228
|
});
|
|
222
229
|
});
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Deploy Engine — deploy projects to VPS via SSH
|
|
3
|
+
* Phase 80 Task 6
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const { generateSiteConfig } = require('./nginx-config.js');
|
|
7
|
+
const { isValidBranch, isValidRepoUrl, isValidDomain, isValidProjectName } = require('./input-sanitizer.js');
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Sanitize branch name for DNS/container use
|
|
11
|
+
*/
|
|
12
|
+
function sanitizeBranch(branch) {
|
|
13
|
+
if (!branch) return 'unknown';
|
|
14
|
+
return branch.toLowerCase().replace(/[^a-z0-9-]/g, '-').replace(/-+/g, '-').replace(/^-|-$/g, '').slice(0, 63);
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Create deploy engine
|
|
19
|
+
* @param {Object} options
|
|
20
|
+
* @param {Object} options.sshClient - SSH client instance
|
|
21
|
+
* @returns {Object} Deploy engine API
|
|
22
|
+
*/
|
|
23
|
+
function createDeployEngine({ sshClient }) {
|
|
24
|
+
const BASE_PORT = 4000;
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Deploy a project to VPS
|
|
28
|
+
*/
|
|
29
|
+
async function deploy(sshConfig, project, options = {}, onProgress) {
|
|
30
|
+
const { domain, branch = 'main' } = options;
|
|
31
|
+
if (!isValidProjectName(project.name)) throw new Error(`Invalid project name: ${project.name}`);
|
|
32
|
+
if (!isValidBranch(branch)) throw new Error(`Invalid branch name: ${branch}`);
|
|
33
|
+
if (project.repoUrl && !isValidRepoUrl(project.repoUrl)) throw new Error(`Invalid repo URL: ${project.repoUrl}`);
|
|
34
|
+
if (domain && !isValidDomain(domain)) throw new Error(`Invalid domain: ${domain}`);
|
|
35
|
+
const deployDir = `/opt/deploys/${project.name}`;
|
|
36
|
+
const report = (step, msg) => onProgress && onProgress({ step, message: msg });
|
|
37
|
+
|
|
38
|
+
// Step 1: Ensure deploy directory
|
|
39
|
+
report('prepare', 'Creating deploy directory...');
|
|
40
|
+
await sshClient.exec(sshConfig, `mkdir -p ${deployDir}`);
|
|
41
|
+
|
|
42
|
+
// Step 2: Clone or pull
|
|
43
|
+
report('git', 'Fetching latest code...');
|
|
44
|
+
const checkGit = await sshClient.exec(sshConfig, `test -d ${deployDir}/.git && echo "exists" || echo "new"`);
|
|
45
|
+
if (checkGit.stdout.trim() === 'exists') {
|
|
46
|
+
await sshClient.exec(sshConfig, `cd ${deployDir} && git fetch origin && git checkout ${branch} && git pull origin ${branch}`);
|
|
47
|
+
} else {
|
|
48
|
+
await sshClient.exec(sshConfig, `git clone ${project.repoUrl} ${deployDir} && cd ${deployDir} && git checkout ${branch}`);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// Step 3: Docker compose up
|
|
52
|
+
report('docker', 'Starting containers...');
|
|
53
|
+
await sshClient.exec(sshConfig, `cd ${deployDir} && docker compose up -d --build`);
|
|
54
|
+
|
|
55
|
+
// Step 4: Nginx config
|
|
56
|
+
if (domain) {
|
|
57
|
+
report('nginx', 'Configuring Nginx...');
|
|
58
|
+
const nginxConf = generateSiteConfig({ domain, port: 3000, proxyPass: 'http://127.0.0.1:3000' });
|
|
59
|
+
await sshClient.exec(sshConfig, `cat > /etc/nginx/sites-available/${project.name} << 'NGINX_EOF'\n${nginxConf}\nNGINX_EOF`);
|
|
60
|
+
await sshClient.exec(sshConfig, `ln -sf /etc/nginx/sites-available/${project.name} /etc/nginx/sites-enabled/`);
|
|
61
|
+
await sshClient.exec(sshConfig, `nginx -t && nginx -s reload`);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
// Step 5: SSL
|
|
65
|
+
if (domain) {
|
|
66
|
+
report('ssl', 'Setting up SSL...');
|
|
67
|
+
await sshClient.exec(sshConfig, `certbot --nginx -d ${domain} --non-interactive --agree-tos --email admin@${domain} 2>/dev/null || true`);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
report('done', 'Deployment complete');
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Deploy a branch preview
|
|
75
|
+
*/
|
|
76
|
+
async function deployBranch(sshConfig, project, branch, baseDomain, onProgress) {
|
|
77
|
+
if (!isValidProjectName(project.name)) throw new Error(`Invalid project name: ${project.name}`);
|
|
78
|
+
if (!isValidBranch(branch)) throw new Error(`Invalid branch name: ${branch}`);
|
|
79
|
+
if (project.repoUrl && !isValidRepoUrl(project.repoUrl)) throw new Error(`Invalid repo URL: ${project.repoUrl}`);
|
|
80
|
+
if (baseDomain && !isValidDomain(baseDomain)) throw new Error(`Invalid base domain: ${baseDomain}`);
|
|
81
|
+
const sanitized = sanitizeBranch(branch);
|
|
82
|
+
const deployDir = `/opt/deploys/${project.name}/branches/${sanitized}`;
|
|
83
|
+
const containerName = `tlc-${sanitizeBranch(project.name)}-${sanitized}`;
|
|
84
|
+
const report = (step, msg) => onProgress && onProgress({ step, message: msg });
|
|
85
|
+
|
|
86
|
+
// Allocate port
|
|
87
|
+
report('prepare', 'Allocating port...');
|
|
88
|
+
let portData = {};
|
|
89
|
+
try {
|
|
90
|
+
const portsResult = await sshClient.exec(sshConfig, `cat /opt/deploys/${project.name}/ports.json 2>/dev/null || echo "{}"`);
|
|
91
|
+
portData = JSON.parse(portsResult.stdout.trim());
|
|
92
|
+
} catch {}
|
|
93
|
+
const usedPorts = Object.values(portData);
|
|
94
|
+
let port = BASE_PORT;
|
|
95
|
+
while (usedPorts.includes(port)) port++;
|
|
96
|
+
portData[sanitized] = port;
|
|
97
|
+
const portJson = Buffer.from(JSON.stringify(portData)).toString('base64');
|
|
98
|
+
await sshClient.exec(sshConfig, `mkdir -p /opt/deploys/${project.name} && echo '${portJson}' | base64 -d > /opt/deploys/${project.name}/ports.json`);
|
|
99
|
+
|
|
100
|
+
// Clone branch
|
|
101
|
+
report('git', `Cloning branch ${branch}...`);
|
|
102
|
+
await sshClient.exec(sshConfig, `mkdir -p ${deployDir}`);
|
|
103
|
+
const checkGit = await sshClient.exec(sshConfig, `test -d ${deployDir}/.git && echo "exists" || echo "new"`);
|
|
104
|
+
if (checkGit.stdout.trim() === 'exists') {
|
|
105
|
+
await sshClient.exec(sshConfig, `cd ${deployDir} && git fetch origin && git reset --hard origin/${branch} 2>/dev/null || git checkout -b ${branch} origin/${branch}`);
|
|
106
|
+
} else {
|
|
107
|
+
await sshClient.exec(sshConfig, `git clone -b ${branch} ${project.repoUrl} ${deployDir}`);
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// Docker compose with custom port
|
|
111
|
+
report('docker', 'Starting container...');
|
|
112
|
+
await sshClient.exec(sshConfig, `cd ${deployDir} && APP_PORT=${port} COMPOSE_PROJECT_NAME=${containerName} docker compose up -d --build`);
|
|
113
|
+
|
|
114
|
+
// Nginx for subdomain
|
|
115
|
+
report('nginx', `Configuring ${sanitized}.${baseDomain}...`);
|
|
116
|
+
const nginxConf = generateSiteConfig({
|
|
117
|
+
domain: `${sanitized}.${baseDomain}`,
|
|
118
|
+
port,
|
|
119
|
+
proxyPass: `http://127.0.0.1:${port}`,
|
|
120
|
+
});
|
|
121
|
+
await sshClient.exec(sshConfig, `cat > /etc/nginx/sites-available/${containerName} << 'NGINX_EOF'\n${nginxConf}\nNGINX_EOF`);
|
|
122
|
+
await sshClient.exec(sshConfig, `ln -sf /etc/nginx/sites-available/${containerName} /etc/nginx/sites-enabled/`);
|
|
123
|
+
await sshClient.exec(sshConfig, `nginx -t && nginx -s reload`);
|
|
124
|
+
|
|
125
|
+
report('done', `Preview at ${sanitized}.${baseDomain}`);
|
|
126
|
+
return { subdomain: `${sanitized}.${baseDomain}`, port, containerName };
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Rollback to previous commit
|
|
131
|
+
*/
|
|
132
|
+
async function rollback(sshConfig, project, onProgress) {
|
|
133
|
+
const deployDir = `/opt/deploys/${project.name}`;
|
|
134
|
+
const report = (step, msg) => onProgress && onProgress({ step, message: msg });
|
|
135
|
+
|
|
136
|
+
report('rollback', 'Rolling back...');
|
|
137
|
+
await sshClient.exec(sshConfig, `cd ${deployDir} && git checkout HEAD~1`);
|
|
138
|
+
await sshClient.exec(sshConfig, `cd ${deployDir} && docker compose up -d --build`);
|
|
139
|
+
report('done', 'Rollback complete');
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
/**
|
|
143
|
+
* Clean up a branch preview
|
|
144
|
+
*/
|
|
145
|
+
async function cleanupBranch(sshConfig, project, branch) {
|
|
146
|
+
const sanitized = sanitizeBranch(branch);
|
|
147
|
+
const containerName = `tlc-${sanitizeBranch(project.name)}-${sanitized}`;
|
|
148
|
+
const deployDir = `/opt/deploys/${project.name}/branches/${sanitized}`;
|
|
149
|
+
|
|
150
|
+
// Stop and remove containers
|
|
151
|
+
await sshClient.exec(sshConfig, `cd ${deployDir} && docker compose down 2>/dev/null; docker stop ${containerName} 2>/dev/null; docker rm ${containerName} 2>/dev/null || true`);
|
|
152
|
+
|
|
153
|
+
// Remove nginx config
|
|
154
|
+
await sshClient.exec(sshConfig, `rm -f /etc/nginx/sites-enabled/${containerName} /etc/nginx/sites-available/${containerName}`);
|
|
155
|
+
await sshClient.exec(sshConfig, `nginx -t && nginx -s reload 2>/dev/null || true`);
|
|
156
|
+
|
|
157
|
+
// Remove deploy directory
|
|
158
|
+
await sshClient.exec(sshConfig, `rm -rf ${deployDir}`);
|
|
159
|
+
|
|
160
|
+
// Remove from port allocation
|
|
161
|
+
try {
|
|
162
|
+
const portsResult = await sshClient.exec(sshConfig, `cat /opt/deploys/${project.name}/ports.json 2>/dev/null || echo "{}"`);
|
|
163
|
+
const portData = JSON.parse(portsResult.stdout.trim());
|
|
164
|
+
delete portData[sanitized];
|
|
165
|
+
const portJson = Buffer.from(JSON.stringify(portData)).toString('base64');
|
|
166
|
+
await sshClient.exec(sshConfig, `echo '${portJson}' | base64 -d > /opt/deploys/${project.name}/ports.json`);
|
|
167
|
+
} catch {}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
/**
|
|
171
|
+
* List active deployments
|
|
172
|
+
*/
|
|
173
|
+
async function listDeployments(sshConfig, project) {
|
|
174
|
+
const result = await sshClient.exec(sshConfig, `ls /opt/deploys/${project.name}/branches/ 2>/dev/null || echo ""`);
|
|
175
|
+
const branches = result.stdout.trim().split('\n').filter(Boolean);
|
|
176
|
+
return branches.map(name => ({ branch: name, directory: `/opt/deploys/${project.name}/branches/${name}` }));
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
return { deploy, deployBranch, rollback, cleanupBranch, listDeployments, sanitizeBranch };
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
module.exports = { createDeployEngine };
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, vi } from 'vitest';
|
|
2
|
+
|
|
3
|
+
const { createDeployEngine } = await import('./deploy-engine.js');
|
|
4
|
+
|
|
5
|
+
function createMockSsh() {
|
|
6
|
+
return {
|
|
7
|
+
exec: vi.fn().mockResolvedValue({ stdout: '', stderr: '', exitCode: 0 }),
|
|
8
|
+
execStream: vi.fn().mockResolvedValue(0),
|
|
9
|
+
upload: vi.fn().mockResolvedValue(),
|
|
10
|
+
};
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
describe('DeployEngine', () => {
|
|
14
|
+
let engine;
|
|
15
|
+
let mockSsh;
|
|
16
|
+
|
|
17
|
+
beforeEach(() => {
|
|
18
|
+
mockSsh = createMockSsh();
|
|
19
|
+
engine = createDeployEngine({ sshClient: mockSsh });
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
describe('deploy', () => {
|
|
23
|
+
it('executes git clone + docker compose + nginx steps', async () => {
|
|
24
|
+
const sshConfig = { host: '1.2.3.4', username: 'deploy', privateKeyPath: '/key' };
|
|
25
|
+
const project = { name: 'myapp', repoUrl: 'git@github.com:user/myapp.git' };
|
|
26
|
+
const progress = [];
|
|
27
|
+
|
|
28
|
+
await engine.deploy(sshConfig, project, { domain: 'myapp.dev', branch: 'main' }, (step) => progress.push(step));
|
|
29
|
+
|
|
30
|
+
// Should have called ssh exec multiple times
|
|
31
|
+
expect(mockSsh.exec.mock.calls.length).toBeGreaterThan(0);
|
|
32
|
+
// Should have progress steps
|
|
33
|
+
expect(progress.length).toBeGreaterThan(0);
|
|
34
|
+
// Verify key steps happened
|
|
35
|
+
const commands = mockSsh.exec.mock.calls.map(c => c[1]);
|
|
36
|
+
expect(commands.some(c => c.includes('git'))).toBe(true);
|
|
37
|
+
expect(commands.some(c => c.includes('docker'))).toBe(true);
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
it('generates correct Nginx config for project domain', async () => {
|
|
41
|
+
const sshConfig = { host: '1.2.3.4', username: 'deploy', privateKeyPath: '/key' };
|
|
42
|
+
const project = { name: 'myapp', repoUrl: 'git@github.com:user/myapp.git' };
|
|
43
|
+
|
|
44
|
+
await engine.deploy(sshConfig, project, { domain: 'myapp.dev', branch: 'main' });
|
|
45
|
+
|
|
46
|
+
// Should write nginx config
|
|
47
|
+
const commands = mockSsh.exec.mock.calls.map(c => c[1]);
|
|
48
|
+
const nginxWrite = commands.find(c => c.includes('sites-available') || c.includes('nginx'));
|
|
49
|
+
expect(nginxWrite).toBeTruthy();
|
|
50
|
+
});
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
describe('deployBranch', () => {
|
|
54
|
+
it('creates subdomain config for branch', async () => {
|
|
55
|
+
const sshConfig = { host: '1.2.3.4', username: 'deploy', privateKeyPath: '/key' };
|
|
56
|
+
const project = { name: 'myapp', repoUrl: 'git@github.com:user/myapp.git' };
|
|
57
|
+
|
|
58
|
+
await engine.deployBranch(sshConfig, project, 'feat-login', 'myapp.dev');
|
|
59
|
+
|
|
60
|
+
const commands = mockSsh.exec.mock.calls.map(c => c[1]);
|
|
61
|
+
expect(commands.some(c => c.includes('feat-login'))).toBe(true);
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
it('sanitizes branch name for DNS', async () => {
|
|
65
|
+
const sshConfig = { host: '1.2.3.4', username: 'deploy', privateKeyPath: '/key' };
|
|
66
|
+
const project = { name: 'myapp', repoUrl: 'git@github.com:user/myapp.git' };
|
|
67
|
+
|
|
68
|
+
await engine.deployBranch(sshConfig, project, 'feature/login-page', 'myapp.dev');
|
|
69
|
+
|
|
70
|
+
const commands = mockSsh.exec.mock.calls.map(c => c[1]);
|
|
71
|
+
// Should contain sanitized name (slashes → dashes)
|
|
72
|
+
expect(commands.some(c => c.includes('feature-login-page'))).toBe(true);
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
// Phase 81 Task 5: git commands must use original branch name
|
|
76
|
+
it('git clone uses original branch name not sanitized', async () => {
|
|
77
|
+
const sshConfig = { host: '1.2.3.4', username: 'deploy', privateKeyPath: '/key' };
|
|
78
|
+
const project = { name: 'myapp', repoUrl: 'git@github.com:user/myapp.git' };
|
|
79
|
+
|
|
80
|
+
await engine.deployBranch(sshConfig, project, 'feature/login-page', 'myapp.dev');
|
|
81
|
+
|
|
82
|
+
const commands = mockSsh.exec.mock.calls.map(c => c[1]);
|
|
83
|
+
// git clone -b must use the ORIGINAL branch name (feature/login-page)
|
|
84
|
+
const cloneCmd = commands.find(c => c.includes('git clone'));
|
|
85
|
+
if (cloneCmd) {
|
|
86
|
+
expect(cloneCmd).toContain('-b feature/login-page');
|
|
87
|
+
}
|
|
88
|
+
// git reset/checkout must use the ORIGINAL branch name
|
|
89
|
+
const resetCmd = commands.find(c => c.includes('git reset') || c.includes('git checkout'));
|
|
90
|
+
if (resetCmd) {
|
|
91
|
+
expect(resetCmd).toContain('origin/feature/login-page');
|
|
92
|
+
}
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
it('allocates unique port', async () => {
|
|
96
|
+
mockSsh.exec.mockImplementation(async (config, cmd) => {
|
|
97
|
+
if (cmd.includes('cat') && cmd.includes('ports.json')) {
|
|
98
|
+
return { stdout: '{}', stderr: '', exitCode: 0 };
|
|
99
|
+
}
|
|
100
|
+
return { stdout: '', stderr: '', exitCode: 0 };
|
|
101
|
+
});
|
|
102
|
+
|
|
103
|
+
const sshConfig = { host: '1.2.3.4', username: 'deploy', privateKeyPath: '/key' };
|
|
104
|
+
const project = { name: 'myapp', repoUrl: 'git@github.com:user/myapp.git' };
|
|
105
|
+
|
|
106
|
+
const result = await engine.deployBranch(sshConfig, project, 'main', 'myapp.dev');
|
|
107
|
+
expect(result.port).toBeGreaterThan(0);
|
|
108
|
+
});
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
describe('rollback', () => {
|
|
112
|
+
it('checks out previous commit', async () => {
|
|
113
|
+
const sshConfig = { host: '1.2.3.4', username: 'deploy', privateKeyPath: '/key' };
|
|
114
|
+
|
|
115
|
+
await engine.rollback(sshConfig, { name: 'myapp' });
|
|
116
|
+
|
|
117
|
+
const commands = mockSsh.exec.mock.calls.map(c => c[1]);
|
|
118
|
+
expect(commands.some(c => c.includes('git') && c.includes('HEAD~1'))).toBe(true);
|
|
119
|
+
});
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
describe('cleanupBranch', () => {
|
|
123
|
+
it('removes container and nginx config', async () => {
|
|
124
|
+
const sshConfig = { host: '1.2.3.4', username: 'deploy', privateKeyPath: '/key' };
|
|
125
|
+
|
|
126
|
+
await engine.cleanupBranch(sshConfig, { name: 'myapp' }, 'feat-login');
|
|
127
|
+
|
|
128
|
+
const commands = mockSsh.exec.mock.calls.map(c => c[1]);
|
|
129
|
+
expect(commands.some(c => c.includes('docker') && (c.includes('stop') || c.includes('rm')))).toBe(true);
|
|
130
|
+
expect(commands.some(c => c.includes('rm') && c.includes('sites-enabled'))).toBe(true);
|
|
131
|
+
});
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
describe('listDeployments', () => {
|
|
135
|
+
it('returns active deploys', async () => {
|
|
136
|
+
mockSsh.exec.mockResolvedValue({
|
|
137
|
+
stdout: 'main\nfeat-login\n',
|
|
138
|
+
stderr: '',
|
|
139
|
+
exitCode: 0,
|
|
140
|
+
});
|
|
141
|
+
|
|
142
|
+
const sshConfig = { host: '1.2.3.4', username: 'deploy', privateKeyPath: '/key' };
|
|
143
|
+
const deploys = await engine.listDeployments(sshConfig, { name: 'myapp' });
|
|
144
|
+
expect(Array.isArray(deploys)).toBe(true);
|
|
145
|
+
});
|
|
146
|
+
});
|
|
147
|
+
});
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Docker API Router — Express routes for Docker management
|
|
3
|
+
* Phase 80 Task 1
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const express = require('express');
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Create Docker API router
|
|
10
|
+
* @param {Object} options
|
|
11
|
+
* @param {Object} options.dockerClient - Docker client instance
|
|
12
|
+
* @returns {express.Router}
|
|
13
|
+
*/
|
|
14
|
+
function createDockerRouter({ dockerClient }) {
|
|
15
|
+
const router = express.Router();
|
|
16
|
+
|
|
17
|
+
// GET /docker/status
|
|
18
|
+
router.get('/status', async (req, res) => {
|
|
19
|
+
try {
|
|
20
|
+
const status = await dockerClient.isAvailable();
|
|
21
|
+
if (!status.available) {
|
|
22
|
+
return res.status(503).json(status);
|
|
23
|
+
}
|
|
24
|
+
res.json(status);
|
|
25
|
+
} catch (err) {
|
|
26
|
+
res.status(503).json({ available: false, error: err.message });
|
|
27
|
+
}
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
// GET /docker/containers
|
|
31
|
+
router.get('/containers', async (req, res) => {
|
|
32
|
+
try {
|
|
33
|
+
const all = req.query.all === 'true';
|
|
34
|
+
const containers = await dockerClient.listContainers(all);
|
|
35
|
+
res.json(containers);
|
|
36
|
+
} catch (err) {
|
|
37
|
+
res.status(500).json({ error: err.message });
|
|
38
|
+
}
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
// GET /docker/containers/:id
|
|
42
|
+
router.get('/containers/:id', async (req, res) => {
|
|
43
|
+
try {
|
|
44
|
+
const detail = await dockerClient.getContainer(req.params.id);
|
|
45
|
+
res.json(detail);
|
|
46
|
+
} catch (err) {
|
|
47
|
+
const status = err.statusCode === 404 ? 404 : 500;
|
|
48
|
+
res.status(status).json({ error: err.message });
|
|
49
|
+
}
|
|
50
|
+
});
|
|
51
|
+
|
|
52
|
+
// POST /docker/containers/:id/start
|
|
53
|
+
router.post('/containers/:id/start', async (req, res) => {
|
|
54
|
+
try {
|
|
55
|
+
await dockerClient.startContainer(req.params.id);
|
|
56
|
+
res.json({ ok: true });
|
|
57
|
+
} catch (err) {
|
|
58
|
+
res.status(500).json({ error: err.message });
|
|
59
|
+
}
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
// POST /docker/containers/:id/stop
|
|
63
|
+
router.post('/containers/:id/stop', async (req, res) => {
|
|
64
|
+
try {
|
|
65
|
+
await dockerClient.stopContainer(req.params.id);
|
|
66
|
+
res.json({ ok: true });
|
|
67
|
+
} catch (err) {
|
|
68
|
+
res.status(500).json({ error: err.message });
|
|
69
|
+
}
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
// POST /docker/containers/:id/restart
|
|
73
|
+
router.post('/containers/:id/restart', async (req, res) => {
|
|
74
|
+
try {
|
|
75
|
+
await dockerClient.restartContainer(req.params.id);
|
|
76
|
+
res.json({ ok: true });
|
|
77
|
+
} catch (err) {
|
|
78
|
+
res.status(500).json({ error: err.message });
|
|
79
|
+
}
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
// DELETE /docker/containers/:id
|
|
83
|
+
router.delete('/containers/:id', async (req, res) => {
|
|
84
|
+
try {
|
|
85
|
+
const force = req.query.force === 'true';
|
|
86
|
+
await dockerClient.removeContainer(req.params.id, force);
|
|
87
|
+
res.json({ ok: true });
|
|
88
|
+
} catch (err) {
|
|
89
|
+
res.status(500).json({ error: err.message });
|
|
90
|
+
}
|
|
91
|
+
});
|
|
92
|
+
|
|
93
|
+
// GET /docker/containers/:id/logs
|
|
94
|
+
router.get('/containers/:id/logs', async (req, res) => {
|
|
95
|
+
try {
|
|
96
|
+
const tail = parseInt(req.query.tail, 10) || 100;
|
|
97
|
+
const logs = await dockerClient.getContainerLogs(req.params.id, { tail });
|
|
98
|
+
res.json({ logs });
|
|
99
|
+
} catch (err) {
|
|
100
|
+
res.status(500).json({ error: err.message });
|
|
101
|
+
}
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
// GET /docker/containers/:id/stats
|
|
105
|
+
router.get('/containers/:id/stats', async (req, res) => {
|
|
106
|
+
try {
|
|
107
|
+
const stats = await dockerClient.getContainerStats(req.params.id);
|
|
108
|
+
res.json(stats);
|
|
109
|
+
} catch (err) {
|
|
110
|
+
res.status(500).json({ error: err.message });
|
|
111
|
+
}
|
|
112
|
+
});
|
|
113
|
+
|
|
114
|
+
// GET /docker/images
|
|
115
|
+
router.get('/images', async (req, res) => {
|
|
116
|
+
try {
|
|
117
|
+
const images = await dockerClient.listImages();
|
|
118
|
+
res.json(images);
|
|
119
|
+
} catch (err) {
|
|
120
|
+
res.status(500).json({ error: err.message });
|
|
121
|
+
}
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
// GET /docker/volumes
|
|
125
|
+
router.get('/volumes', async (req, res) => {
|
|
126
|
+
try {
|
|
127
|
+
const volumes = await dockerClient.listVolumes();
|
|
128
|
+
res.json(volumes);
|
|
129
|
+
} catch (err) {
|
|
130
|
+
res.status(500).json({ error: err.message });
|
|
131
|
+
}
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
return router;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
module.exports = { createDockerRouter };
|