tlc-claude-code 2.0.1 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/agents/builder.md +144 -0
- package/.claude/agents/planner.md +143 -0
- package/.claude/agents/reviewer.md +160 -0
- package/.claude/commands/tlc/build.md +4 -0
- package/.claude/commands/tlc/deploy.md +194 -2
- package/.claude/commands/tlc/e2e-verify.md +214 -0
- package/.claude/commands/tlc/guard.md +191 -0
- package/.claude/commands/tlc/help.md +32 -0
- package/.claude/commands/tlc/init.md +73 -37
- package/.claude/commands/tlc/llm.md +19 -4
- package/.claude/commands/tlc/preflight.md +134 -0
- package/.claude/commands/tlc/review-plan.md +363 -0
- package/.claude/commands/tlc/review.md +172 -57
- package/.claude/commands/tlc/watchci.md +159 -0
- package/.claude/hooks/tlc-block-tools.sh +41 -0
- package/.claude/hooks/tlc-capture-exchange.sh +50 -0
- package/.claude/hooks/tlc-post-build.sh +38 -0
- package/.claude/hooks/tlc-post-push.sh +22 -0
- package/.claude/hooks/tlc-prompt-guard.sh +69 -0
- package/.claude/hooks/tlc-session-init.sh +123 -0
- package/CLAUDE.md +13 -0
- package/bin/install.js +268 -2
- package/bin/postinstall.js +102 -24
- package/bin/setup-autoupdate.js +206 -0
- package/bin/setup-autoupdate.test.js +124 -0
- package/bin/tlc.js +0 -0
- package/dashboard-web/dist/assets/index-CdS5CHqu.css +1 -0
- package/dashboard-web/dist/assets/index-CwNPPVpg.js +483 -0
- package/dashboard-web/dist/assets/index-CwNPPVpg.js.map +1 -0
- package/dashboard-web/dist/index.html +2 -2
- package/docker-compose.dev.yml +18 -12
- package/package.json +4 -2
- package/scripts/project-docs.js +1 -1
- package/server/index.js +228 -2
- package/server/lib/capture-bridge.js +242 -0
- package/server/lib/capture-bridge.test.js +363 -0
- package/server/lib/capture-guard.js +140 -0
- package/server/lib/capture-guard.test.js +182 -0
- package/server/lib/command-runner.js +159 -0
- package/server/lib/command-runner.test.js +92 -0
- package/server/lib/cost-tracker.test.js +49 -12
- package/server/lib/deploy/runners/dependency-runner.js +106 -0
- package/server/lib/deploy/runners/dependency-runner.test.js +148 -0
- package/server/lib/deploy/runners/secrets-runner.js +174 -0
- package/server/lib/deploy/runners/secrets-runner.test.js +127 -0
- package/server/lib/deploy/security-gates.js +11 -24
- package/server/lib/deploy/security-gates.test.js +9 -2
- package/server/lib/deploy-engine.js +182 -0
- package/server/lib/deploy-engine.test.js +147 -0
- package/server/lib/docker-api.js +137 -0
- package/server/lib/docker-api.test.js +202 -0
- package/server/lib/docker-client.js +297 -0
- package/server/lib/docker-client.test.js +308 -0
- package/server/lib/input-sanitizer.js +86 -0
- package/server/lib/input-sanitizer.test.js +117 -0
- package/server/lib/launchd-agent.js +225 -0
- package/server/lib/launchd-agent.test.js +185 -0
- package/server/lib/memory-api.js +3 -1
- package/server/lib/memory-api.test.js +3 -5
- package/server/lib/memory-bridge-e2e.test.js +160 -0
- package/server/lib/memory-committer.js +18 -4
- package/server/lib/memory-committer.test.js +21 -0
- package/server/lib/memory-hooks-capture.test.js +69 -4
- package/server/lib/memory-hooks-integration.test.js +98 -0
- package/server/lib/memory-hooks.js +42 -4
- package/server/lib/memory-store-adapter.js +105 -0
- package/server/lib/memory-store-adapter.test.js +141 -0
- package/server/lib/memory-wiring-e2e.test.js +93 -0
- package/server/lib/nginx-config.js +114 -0
- package/server/lib/nginx-config.test.js +82 -0
- package/server/lib/ollama-health.js +91 -0
- package/server/lib/ollama-health.test.js +74 -0
- package/server/lib/orchestration/agent-dispatcher.js +114 -0
- package/server/lib/orchestration/agent-dispatcher.test.js +110 -0
- package/server/lib/orchestration/orchestrator.js +130 -0
- package/server/lib/orchestration/orchestrator.test.js +192 -0
- package/server/lib/orchestration/tmux-manager.js +101 -0
- package/server/lib/orchestration/tmux-manager.test.js +109 -0
- package/server/lib/orchestration/worktree-manager.js +132 -0
- package/server/lib/orchestration/worktree-manager.test.js +129 -0
- package/server/lib/port-guard.js +44 -0
- package/server/lib/port-guard.test.js +65 -0
- package/server/lib/project-scanner.js +37 -2
- package/server/lib/project-scanner.test.js +152 -0
- package/server/lib/remember-command.js +2 -0
- package/server/lib/remember-command.test.js +23 -0
- package/server/lib/review/plan-reviewer.js +260 -0
- package/server/lib/review/plan-reviewer.test.js +269 -0
- package/server/lib/review/review-schemas.js +173 -0
- package/server/lib/review/review-schemas.test.js +152 -0
- package/server/lib/security/crypto-utils.test.js +2 -2
- package/server/lib/semantic-recall.js +1 -1
- package/server/lib/semantic-recall.test.js +17 -0
- package/server/lib/ssh-client.js +184 -0
- package/server/lib/ssh-client.test.js +127 -0
- package/server/lib/vps-api.js +184 -0
- package/server/lib/vps-api.test.js +208 -0
- package/server/lib/vps-bootstrap.js +124 -0
- package/server/lib/vps-bootstrap.test.js +79 -0
- package/server/lib/vps-monitor.js +126 -0
- package/server/lib/vps-monitor.test.js +98 -0
- package/server/lib/workspace-api.js +182 -1
- package/server/lib/workspace-api.test.js +474 -0
- package/server/package-lock.json +737 -0
- package/server/package.json +3 -0
- package/server/setup.sh +271 -271
- package/dashboard-web/dist/assets/index-Uhc49PE-.css +0 -1
- package/dashboard-web/dist/assets/index-W36XHPC5.js +0 -431
- package/dashboard-web/dist/assets/index-W36XHPC5.js.map +0 -1
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Secrets Runner
|
|
3
|
+
*
|
|
4
|
+
* Scans project files for hardcoded secrets using regex patterns.
|
|
5
|
+
* No external tools required — pure pattern matching.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { readdir, readFile as fsReadFile } from 'node:fs/promises';
|
|
9
|
+
import path from 'node:path';
|
|
10
|
+
|
|
11
|
+
/** Secret detection patterns */
|
|
12
|
+
const SECRET_PATTERNS = [
|
|
13
|
+
{
|
|
14
|
+
name: 'hardcoded-password',
|
|
15
|
+
pattern: /(?:password|passwd|pwd)\s*[:=]\s*["'][^"']{4,}["']/i,
|
|
16
|
+
severity: 'high',
|
|
17
|
+
},
|
|
18
|
+
{
|
|
19
|
+
name: 'aws-access-key',
|
|
20
|
+
pattern: /AKIA[0-9A-Z]{16}/,
|
|
21
|
+
severity: 'critical',
|
|
22
|
+
},
|
|
23
|
+
{
|
|
24
|
+
name: 'private-key',
|
|
25
|
+
pattern: /-----BEGIN (?:RSA |EC |DSA )?PRIVATE KEY-----/,
|
|
26
|
+
severity: 'critical',
|
|
27
|
+
},
|
|
28
|
+
{
|
|
29
|
+
name: 'generic-api-key',
|
|
30
|
+
pattern: /(?:api[_-]?key|apikey)\s*[:=]\s*["'][^"']{8,}["']/i,
|
|
31
|
+
severity: 'high',
|
|
32
|
+
},
|
|
33
|
+
{
|
|
34
|
+
name: 'generic-secret',
|
|
35
|
+
pattern: /(?:secret|token)\s*[:=]\s*["'](?:sk_live_|sk_test_|ghp_|gho_|ghs_)[^"']+["']/i,
|
|
36
|
+
severity: 'high',
|
|
37
|
+
},
|
|
38
|
+
];
|
|
39
|
+
|
|
40
|
+
/** File extensions to scan */
|
|
41
|
+
const SCAN_EXTENSIONS = new Set([
|
|
42
|
+
'.js', '.ts', '.json', '.env', '.yml', '.yaml', '.jsx', '.tsx', '.mjs', '.cjs',
|
|
43
|
+
]);
|
|
44
|
+
|
|
45
|
+
/** Default file glob pattern (used with injected glob) */
|
|
46
|
+
const DEFAULT_GLOB = '**/*.{js,ts,json,env,yml,yaml,jsx,tsx,mjs,cjs}';
|
|
47
|
+
|
|
48
|
+
/** Default exclusion patterns */
|
|
49
|
+
const DEFAULT_IGNORE = [
|
|
50
|
+
'**/node_modules/**',
|
|
51
|
+
'**/.git/**',
|
|
52
|
+
'**/package-lock.json',
|
|
53
|
+
'**/yarn.lock',
|
|
54
|
+
'**/pnpm-lock.yaml',
|
|
55
|
+
'**/*.test.*',
|
|
56
|
+
'**/*.spec.*',
|
|
57
|
+
'**/__tests__/**',
|
|
58
|
+
];
|
|
59
|
+
|
|
60
|
+
/** Directory names to skip during recursive walk */
|
|
61
|
+
const SKIP_DIRS = new Set(['node_modules', '.git', '__tests__']);
|
|
62
|
+
|
|
63
|
+
/** File patterns to skip */
|
|
64
|
+
const SKIP_FILE_PATTERNS = [
|
|
65
|
+
/\.test\./,
|
|
66
|
+
/\.spec\./,
|
|
67
|
+
/package-lock\.json$/,
|
|
68
|
+
/yarn\.lock$/,
|
|
69
|
+
/pnpm-lock\.yaml$/,
|
|
70
|
+
];
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Recursively find files matching scan extensions
|
|
74
|
+
* @param {string} dir - Directory to walk
|
|
75
|
+
* @param {string} baseDir - Base directory for relative paths
|
|
76
|
+
* @returns {Promise<string[]>} Relative file paths
|
|
77
|
+
*/
|
|
78
|
+
async function walkDir(dir, baseDir) {
|
|
79
|
+
const results = [];
|
|
80
|
+
const entries = await readdir(dir, { withFileTypes: true });
|
|
81
|
+
|
|
82
|
+
for (const entry of entries) {
|
|
83
|
+
if (entry.isDirectory()) {
|
|
84
|
+
if (SKIP_DIRS.has(entry.name)) continue;
|
|
85
|
+
const subResults = await walkDir(path.join(dir, entry.name), baseDir);
|
|
86
|
+
results.push(...subResults);
|
|
87
|
+
} else if (entry.isFile()) {
|
|
88
|
+
const ext = path.extname(entry.name);
|
|
89
|
+
if (!SCAN_EXTENSIONS.has(ext)) continue;
|
|
90
|
+
|
|
91
|
+
const relPath = path.relative(baseDir, path.join(dir, entry.name));
|
|
92
|
+
if (SKIP_FILE_PATTERNS.some((p) => p.test(relPath))) continue;
|
|
93
|
+
|
|
94
|
+
results.push(relPath);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
return results;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
/**
|
|
102
|
+
* Create a secrets scanning runner
|
|
103
|
+
* @param {Object} [deps] - Injectable dependencies for testing
|
|
104
|
+
* @param {Function} [deps.glob] - Glob function (pattern, options) => string[]
|
|
105
|
+
* @param {Function} [deps.readFile] - File reader (path) => string
|
|
106
|
+
* @param {string[]} [deps.extraIgnore] - Additional exclusion patterns
|
|
107
|
+
* @returns {Function} Runner function: (projectPath, options) => { passed, findings }
|
|
108
|
+
*/
|
|
109
|
+
export function createSecretsRunner(deps = {}) {
|
|
110
|
+
const {
|
|
111
|
+
glob: globFn,
|
|
112
|
+
readFile: readFileFn,
|
|
113
|
+
extraIgnore = [],
|
|
114
|
+
} = deps;
|
|
115
|
+
|
|
116
|
+
const ignorePatterns = [...DEFAULT_IGNORE, ...extraIgnore];
|
|
117
|
+
|
|
118
|
+
return async (projectPath, options = {}) => {
|
|
119
|
+
let files;
|
|
120
|
+
|
|
121
|
+
if (globFn) {
|
|
122
|
+
// Use injected glob (testing)
|
|
123
|
+
files = await globFn(DEFAULT_GLOB, {
|
|
124
|
+
cwd: projectPath,
|
|
125
|
+
ignore: ignorePatterns,
|
|
126
|
+
});
|
|
127
|
+
} else {
|
|
128
|
+
// Use built-in recursive walker (production)
|
|
129
|
+
try {
|
|
130
|
+
files = await walkDir(projectPath, projectPath);
|
|
131
|
+
} catch {
|
|
132
|
+
files = [];
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
if (files.length === 0) {
|
|
137
|
+
return { passed: true, findings: [] };
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
const findings = [];
|
|
141
|
+
|
|
142
|
+
for (const file of files) {
|
|
143
|
+
let content;
|
|
144
|
+
if (readFileFn) {
|
|
145
|
+
content = await readFileFn(file);
|
|
146
|
+
} else {
|
|
147
|
+
content = await fsReadFile(path.join(projectPath, file), 'utf-8');
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
const lines = content.split('\n');
|
|
151
|
+
|
|
152
|
+
for (let i = 0; i < lines.length; i++) {
|
|
153
|
+
const line = lines[i];
|
|
154
|
+
|
|
155
|
+
for (const secretPattern of SECRET_PATTERNS) {
|
|
156
|
+
if (secretPattern.pattern.test(line)) {
|
|
157
|
+
findings.push({
|
|
158
|
+
severity: secretPattern.severity,
|
|
159
|
+
file,
|
|
160
|
+
line: i + 1,
|
|
161
|
+
pattern: secretPattern.name,
|
|
162
|
+
match: line.trim().substring(0, 80),
|
|
163
|
+
});
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
return {
|
|
170
|
+
passed: findings.length === 0,
|
|
171
|
+
findings,
|
|
172
|
+
};
|
|
173
|
+
};
|
|
174
|
+
}
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Secrets Runner Tests
|
|
3
|
+
*/
|
|
4
|
+
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
5
|
+
import { createSecretsRunner } from './secrets-runner.js';
|
|
6
|
+
|
|
7
|
+
describe('secrets-runner', () => {
|
|
8
|
+
let globMock;
|
|
9
|
+
let readFileMock;
|
|
10
|
+
let runner;
|
|
11
|
+
|
|
12
|
+
beforeEach(() => {
|
|
13
|
+
globMock = vi.fn().mockResolvedValue([]);
|
|
14
|
+
readFileMock = vi.fn().mockResolvedValue('');
|
|
15
|
+
runner = createSecretsRunner({ glob: globMock, readFile: readFileMock });
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
it('passes when no secrets found in clean project', async () => {
|
|
19
|
+
globMock.mockResolvedValue(['src/index.js']);
|
|
20
|
+
readFileMock.mockResolvedValue('const x = 1;\nconsole.log(x);');
|
|
21
|
+
|
|
22
|
+
const result = await runner('/test/project', {});
|
|
23
|
+
expect(result.passed).toBe(true);
|
|
24
|
+
expect(result.findings).toEqual([]);
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
it('detects hardcoded password assignment', async () => {
|
|
28
|
+
globMock.mockResolvedValue(['src/config.js']);
|
|
29
|
+
readFileMock.mockResolvedValue(
|
|
30
|
+
'const config = {\n password: "supersecret123"\n};'
|
|
31
|
+
);
|
|
32
|
+
|
|
33
|
+
const result = await runner('/test/project', {});
|
|
34
|
+
expect(result.passed).toBe(false);
|
|
35
|
+
expect(result.findings.length).toBeGreaterThan(0);
|
|
36
|
+
expect(result.findings[0].file).toBe('src/config.js');
|
|
37
|
+
expect(result.findings[0].pattern).toBeDefined();
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
it('detects AWS access key pattern', async () => {
|
|
41
|
+
globMock.mockResolvedValue(['src/aws.js']);
|
|
42
|
+
readFileMock.mockResolvedValue(
|
|
43
|
+
'const key = "AKIAIOSFODNN7EXAMPLE";\n'
|
|
44
|
+
);
|
|
45
|
+
|
|
46
|
+
const result = await runner('/test/project', {});
|
|
47
|
+
expect(result.passed).toBe(false);
|
|
48
|
+
expect(result.findings.length).toBeGreaterThan(0);
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
it('detects private key header', async () => {
|
|
52
|
+
globMock.mockResolvedValue(['certs/key.pem']);
|
|
53
|
+
readFileMock.mockResolvedValue(
|
|
54
|
+
'-----BEGIN RSA PRIVATE KEY-----\nMIIEpA...\n-----END RSA PRIVATE KEY-----'
|
|
55
|
+
);
|
|
56
|
+
|
|
57
|
+
const result = await runner('/test/project', {});
|
|
58
|
+
expect(result.passed).toBe(false);
|
|
59
|
+
expect(result.findings.length).toBeGreaterThan(0);
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
it('detects generic API key pattern', async () => {
|
|
63
|
+
globMock.mockResolvedValue(['src/api.js']);
|
|
64
|
+
readFileMock.mockResolvedValue(
|
|
65
|
+
'const token = "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";\n'
|
|
66
|
+
);
|
|
67
|
+
|
|
68
|
+
const result = await runner('/test/project', {});
|
|
69
|
+
expect(result.passed).toBe(false);
|
|
70
|
+
expect(result.findings.length).toBeGreaterThan(0);
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
it('excludes test files by default', async () => {
|
|
74
|
+
globMock.mockResolvedValue([]);
|
|
75
|
+
|
|
76
|
+
const result = await runner('/test/project', {});
|
|
77
|
+
expect(result.passed).toBe(true);
|
|
78
|
+
|
|
79
|
+
// Verify glob was called with exclusion patterns
|
|
80
|
+
const callArgs = globMock.mock.calls[0];
|
|
81
|
+
expect(callArgs[0]).toBeDefined(); // pattern
|
|
82
|
+
expect(callArgs[1]).toBeDefined(); // options with ignore
|
|
83
|
+
});
|
|
84
|
+
|
|
85
|
+
it('excludes node_modules by default', async () => {
|
|
86
|
+
globMock.mockResolvedValue([]);
|
|
87
|
+
|
|
88
|
+
await runner('/test/project', {});
|
|
89
|
+
const callArgs = globMock.mock.calls[0];
|
|
90
|
+
const options = callArgs[1];
|
|
91
|
+
expect(options.ignore).toContain('**/node_modules/**');
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
it('supports configurable exclusion patterns', async () => {
|
|
95
|
+
const customRunner = createSecretsRunner({
|
|
96
|
+
glob: globMock,
|
|
97
|
+
readFile: readFileMock,
|
|
98
|
+
extraIgnore: ['**/fixtures/**'],
|
|
99
|
+
});
|
|
100
|
+
|
|
101
|
+
globMock.mockResolvedValue([]);
|
|
102
|
+
|
|
103
|
+
await customRunner('/test/project', {});
|
|
104
|
+
const callArgs = globMock.mock.calls[0];
|
|
105
|
+
const options = callArgs[1];
|
|
106
|
+
expect(options.ignore).toContain('**/fixtures/**');
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
it('handles empty project directory', async () => {
|
|
110
|
+
globMock.mockResolvedValue([]);
|
|
111
|
+
|
|
112
|
+
const result = await runner('/test/project', {});
|
|
113
|
+
expect(result.passed).toBe(true);
|
|
114
|
+
expect(result.findings).toEqual([]);
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
it('includes line number in findings', async () => {
|
|
118
|
+
globMock.mockResolvedValue(['src/config.js']);
|
|
119
|
+
readFileMock.mockResolvedValue(
|
|
120
|
+
'const a = 1;\nconst password = "secret";\nconst b = 2;'
|
|
121
|
+
);
|
|
122
|
+
|
|
123
|
+
const result = await runner('/test/project', {});
|
|
124
|
+
expect(result.passed).toBe(false);
|
|
125
|
+
expect(result.findings[0].line).toBe(2);
|
|
126
|
+
});
|
|
127
|
+
});
|
|
@@ -5,6 +5,9 @@
|
|
|
5
5
|
* running security gates during deployment validation.
|
|
6
6
|
*/
|
|
7
7
|
|
|
8
|
+
import { createDependencyRunner } from './runners/dependency-runner.js';
|
|
9
|
+
import { createSecretsRunner } from './runners/secrets-runner.js';
|
|
10
|
+
|
|
8
11
|
/**
|
|
9
12
|
* Gate type constants
|
|
10
13
|
*/
|
|
@@ -36,29 +39,13 @@ const DEFAULT_TIER_GATES = {
|
|
|
36
39
|
};
|
|
37
40
|
|
|
38
41
|
/**
|
|
39
|
-
*
|
|
42
|
+
* Built-in runners for dependencies and secrets gates.
|
|
43
|
+
* SAST, DAST, and container gates require custom runner injection.
|
|
44
|
+
* Gates without runners will SKIP (not fake-pass).
|
|
40
45
|
*/
|
|
41
|
-
const
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
return { passed: true, findings: [] };
|
|
45
|
-
},
|
|
46
|
-
dast: async (projectPath, options) => {
|
|
47
|
-
// Placeholder DAST implementation
|
|
48
|
-
return { passed: true, findings: [] };
|
|
49
|
-
},
|
|
50
|
-
dependencies: async (projectPath, options) => {
|
|
51
|
-
// Placeholder dependency scanning implementation
|
|
52
|
-
return { passed: true, findings: [] };
|
|
53
|
-
},
|
|
54
|
-
container: async (projectPath, options) => {
|
|
55
|
-
// Placeholder container scanning implementation
|
|
56
|
-
return { passed: true, findings: [] };
|
|
57
|
-
},
|
|
58
|
-
secrets: async (projectPath, options) => {
|
|
59
|
-
// Placeholder secrets scanning implementation
|
|
60
|
-
return { passed: true, findings: [] };
|
|
61
|
-
},
|
|
46
|
+
const builtInRunners = {
|
|
47
|
+
dependencies: createDependencyRunner(),
|
|
48
|
+
secrets: createSecretsRunner(),
|
|
62
49
|
};
|
|
63
50
|
|
|
64
51
|
/**
|
|
@@ -197,8 +184,8 @@ export async function runAllGates(tier, options = {}) {
|
|
|
197
184
|
export function createSecurityGates(config = {}) {
|
|
198
185
|
const { runners = {}, gateConfig = null } = config;
|
|
199
186
|
|
|
200
|
-
// Merge
|
|
201
|
-
const allRunners = { ...
|
|
187
|
+
// Merge built-in runners with custom runners (custom overrides built-in)
|
|
188
|
+
const allRunners = { ...builtInRunners, ...runners };
|
|
202
189
|
|
|
203
190
|
return {
|
|
204
191
|
/**
|
|
@@ -213,10 +213,17 @@ describe('security-gates', () => {
|
|
|
213
213
|
expect(gates.hasRunner('custom')).toBe(true);
|
|
214
214
|
});
|
|
215
215
|
|
|
216
|
-
it('
|
|
216
|
+
it('has built-in runners for dependencies and secrets', () => {
|
|
217
217
|
const gates = createSecurityGates();
|
|
218
|
-
expect(gates.hasRunner('sast')).toBe(true);
|
|
219
218
|
expect(gates.hasRunner('dependencies')).toBe(true);
|
|
219
|
+
expect(gates.hasRunner('secrets')).toBe(true);
|
|
220
|
+
});
|
|
221
|
+
|
|
222
|
+
it('skips SAST/DAST/container without custom runners', () => {
|
|
223
|
+
const gates = createSecurityGates();
|
|
224
|
+
expect(gates.hasRunner('sast')).toBe(false);
|
|
225
|
+
expect(gates.hasRunner('dast')).toBe(false);
|
|
226
|
+
expect(gates.hasRunner('container')).toBe(false);
|
|
220
227
|
});
|
|
221
228
|
});
|
|
222
229
|
});
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Deploy Engine — deploy projects to VPS via SSH
|
|
3
|
+
* Phase 80 Task 6
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const { generateSiteConfig } = require('./nginx-config.js');
|
|
7
|
+
const { isValidBranch, isValidRepoUrl, isValidDomain, isValidProjectName } = require('./input-sanitizer.js');
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Sanitize branch name for DNS/container use
|
|
11
|
+
*/
|
|
12
|
+
function sanitizeBranch(branch) {
|
|
13
|
+
if (!branch) return 'unknown';
|
|
14
|
+
return branch.toLowerCase().replace(/[^a-z0-9-]/g, '-').replace(/-+/g, '-').replace(/^-|-$/g, '').slice(0, 63);
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Create deploy engine
|
|
19
|
+
* @param {Object} options
|
|
20
|
+
* @param {Object} options.sshClient - SSH client instance
|
|
21
|
+
* @returns {Object} Deploy engine API
|
|
22
|
+
*/
|
|
23
|
+
function createDeployEngine({ sshClient }) {
|
|
24
|
+
const BASE_PORT = 4000;
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Deploy a project to VPS
|
|
28
|
+
*/
|
|
29
|
+
async function deploy(sshConfig, project, options = {}, onProgress) {
|
|
30
|
+
const { domain, branch = 'main' } = options;
|
|
31
|
+
if (!isValidProjectName(project.name)) throw new Error(`Invalid project name: ${project.name}`);
|
|
32
|
+
if (!isValidBranch(branch)) throw new Error(`Invalid branch name: ${branch}`);
|
|
33
|
+
if (project.repoUrl && !isValidRepoUrl(project.repoUrl)) throw new Error(`Invalid repo URL: ${project.repoUrl}`);
|
|
34
|
+
if (domain && !isValidDomain(domain)) throw new Error(`Invalid domain: ${domain}`);
|
|
35
|
+
const deployDir = `/opt/deploys/${project.name}`;
|
|
36
|
+
const report = (step, msg) => onProgress && onProgress({ step, message: msg });
|
|
37
|
+
|
|
38
|
+
// Step 1: Ensure deploy directory
|
|
39
|
+
report('prepare', 'Creating deploy directory...');
|
|
40
|
+
await sshClient.exec(sshConfig, `mkdir -p ${deployDir}`);
|
|
41
|
+
|
|
42
|
+
// Step 2: Clone or pull
|
|
43
|
+
report('git', 'Fetching latest code...');
|
|
44
|
+
const checkGit = await sshClient.exec(sshConfig, `test -d ${deployDir}/.git && echo "exists" || echo "new"`);
|
|
45
|
+
if (checkGit.stdout.trim() === 'exists') {
|
|
46
|
+
await sshClient.exec(sshConfig, `cd ${deployDir} && git fetch origin && git checkout ${branch} && git pull origin ${branch}`);
|
|
47
|
+
} else {
|
|
48
|
+
await sshClient.exec(sshConfig, `git clone ${project.repoUrl} ${deployDir} && cd ${deployDir} && git checkout ${branch}`);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// Step 3: Docker compose up
|
|
52
|
+
report('docker', 'Starting containers...');
|
|
53
|
+
await sshClient.exec(sshConfig, `cd ${deployDir} && docker compose up -d --build`);
|
|
54
|
+
|
|
55
|
+
// Step 4: Nginx config
|
|
56
|
+
if (domain) {
|
|
57
|
+
report('nginx', 'Configuring Nginx...');
|
|
58
|
+
const nginxConf = generateSiteConfig({ domain, port: 3000, proxyPass: 'http://127.0.0.1:3000' });
|
|
59
|
+
await sshClient.exec(sshConfig, `cat > /etc/nginx/sites-available/${project.name} << 'NGINX_EOF'\n${nginxConf}\nNGINX_EOF`);
|
|
60
|
+
await sshClient.exec(sshConfig, `ln -sf /etc/nginx/sites-available/${project.name} /etc/nginx/sites-enabled/`);
|
|
61
|
+
await sshClient.exec(sshConfig, `nginx -t && nginx -s reload`);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
// Step 5: SSL
|
|
65
|
+
if (domain) {
|
|
66
|
+
report('ssl', 'Setting up SSL...');
|
|
67
|
+
await sshClient.exec(sshConfig, `certbot --nginx -d ${domain} --non-interactive --agree-tos --email admin@${domain} 2>/dev/null || true`);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
report('done', 'Deployment complete');
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Deploy a branch preview
|
|
75
|
+
*/
|
|
76
|
+
async function deployBranch(sshConfig, project, branch, baseDomain, onProgress) {
|
|
77
|
+
if (!isValidProjectName(project.name)) throw new Error(`Invalid project name: ${project.name}`);
|
|
78
|
+
if (!isValidBranch(branch)) throw new Error(`Invalid branch name: ${branch}`);
|
|
79
|
+
if (project.repoUrl && !isValidRepoUrl(project.repoUrl)) throw new Error(`Invalid repo URL: ${project.repoUrl}`);
|
|
80
|
+
if (baseDomain && !isValidDomain(baseDomain)) throw new Error(`Invalid base domain: ${baseDomain}`);
|
|
81
|
+
const sanitized = sanitizeBranch(branch);
|
|
82
|
+
const deployDir = `/opt/deploys/${project.name}/branches/${sanitized}`;
|
|
83
|
+
const containerName = `tlc-${sanitizeBranch(project.name)}-${sanitized}`;
|
|
84
|
+
const report = (step, msg) => onProgress && onProgress({ step, message: msg });
|
|
85
|
+
|
|
86
|
+
// Allocate port
|
|
87
|
+
report('prepare', 'Allocating port...');
|
|
88
|
+
let portData = {};
|
|
89
|
+
try {
|
|
90
|
+
const portsResult = await sshClient.exec(sshConfig, `cat /opt/deploys/${project.name}/ports.json 2>/dev/null || echo "{}"`);
|
|
91
|
+
portData = JSON.parse(portsResult.stdout.trim());
|
|
92
|
+
} catch {}
|
|
93
|
+
const usedPorts = Object.values(portData);
|
|
94
|
+
let port = BASE_PORT;
|
|
95
|
+
while (usedPorts.includes(port)) port++;
|
|
96
|
+
portData[sanitized] = port;
|
|
97
|
+
const portJson = Buffer.from(JSON.stringify(portData)).toString('base64');
|
|
98
|
+
await sshClient.exec(sshConfig, `mkdir -p /opt/deploys/${project.name} && echo '${portJson}' | base64 -d > /opt/deploys/${project.name}/ports.json`);
|
|
99
|
+
|
|
100
|
+
// Clone branch
|
|
101
|
+
report('git', `Cloning branch ${branch}...`);
|
|
102
|
+
await sshClient.exec(sshConfig, `mkdir -p ${deployDir}`);
|
|
103
|
+
const checkGit = await sshClient.exec(sshConfig, `test -d ${deployDir}/.git && echo "exists" || echo "new"`);
|
|
104
|
+
if (checkGit.stdout.trim() === 'exists') {
|
|
105
|
+
await sshClient.exec(sshConfig, `cd ${deployDir} && git fetch origin && git reset --hard origin/${branch} 2>/dev/null || git checkout -b ${branch} origin/${branch}`);
|
|
106
|
+
} else {
|
|
107
|
+
await sshClient.exec(sshConfig, `git clone -b ${branch} ${project.repoUrl} ${deployDir}`);
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// Docker compose with custom port
|
|
111
|
+
report('docker', 'Starting container...');
|
|
112
|
+
await sshClient.exec(sshConfig, `cd ${deployDir} && APP_PORT=${port} COMPOSE_PROJECT_NAME=${containerName} docker compose up -d --build`);
|
|
113
|
+
|
|
114
|
+
// Nginx for subdomain
|
|
115
|
+
report('nginx', `Configuring ${sanitized}.${baseDomain}...`);
|
|
116
|
+
const nginxConf = generateSiteConfig({
|
|
117
|
+
domain: `${sanitized}.${baseDomain}`,
|
|
118
|
+
port,
|
|
119
|
+
proxyPass: `http://127.0.0.1:${port}`,
|
|
120
|
+
});
|
|
121
|
+
await sshClient.exec(sshConfig, `cat > /etc/nginx/sites-available/${containerName} << 'NGINX_EOF'\n${nginxConf}\nNGINX_EOF`);
|
|
122
|
+
await sshClient.exec(sshConfig, `ln -sf /etc/nginx/sites-available/${containerName} /etc/nginx/sites-enabled/`);
|
|
123
|
+
await sshClient.exec(sshConfig, `nginx -t && nginx -s reload`);
|
|
124
|
+
|
|
125
|
+
report('done', `Preview at ${sanitized}.${baseDomain}`);
|
|
126
|
+
return { subdomain: `${sanitized}.${baseDomain}`, port, containerName };
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Rollback to previous commit
|
|
131
|
+
*/
|
|
132
|
+
async function rollback(sshConfig, project, onProgress) {
|
|
133
|
+
const deployDir = `/opt/deploys/${project.name}`;
|
|
134
|
+
const report = (step, msg) => onProgress && onProgress({ step, message: msg });
|
|
135
|
+
|
|
136
|
+
report('rollback', 'Rolling back...');
|
|
137
|
+
await sshClient.exec(sshConfig, `cd ${deployDir} && git checkout HEAD~1`);
|
|
138
|
+
await sshClient.exec(sshConfig, `cd ${deployDir} && docker compose up -d --build`);
|
|
139
|
+
report('done', 'Rollback complete');
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
/**
|
|
143
|
+
* Clean up a branch preview
|
|
144
|
+
*/
|
|
145
|
+
async function cleanupBranch(sshConfig, project, branch) {
|
|
146
|
+
const sanitized = sanitizeBranch(branch);
|
|
147
|
+
const containerName = `tlc-${sanitizeBranch(project.name)}-${sanitized}`;
|
|
148
|
+
const deployDir = `/opt/deploys/${project.name}/branches/${sanitized}`;
|
|
149
|
+
|
|
150
|
+
// Stop and remove containers
|
|
151
|
+
await sshClient.exec(sshConfig, `cd ${deployDir} && docker compose down 2>/dev/null; docker stop ${containerName} 2>/dev/null; docker rm ${containerName} 2>/dev/null || true`);
|
|
152
|
+
|
|
153
|
+
// Remove nginx config
|
|
154
|
+
await sshClient.exec(sshConfig, `rm -f /etc/nginx/sites-enabled/${containerName} /etc/nginx/sites-available/${containerName}`);
|
|
155
|
+
await sshClient.exec(sshConfig, `nginx -t && nginx -s reload 2>/dev/null || true`);
|
|
156
|
+
|
|
157
|
+
// Remove deploy directory
|
|
158
|
+
await sshClient.exec(sshConfig, `rm -rf ${deployDir}`);
|
|
159
|
+
|
|
160
|
+
// Remove from port allocation
|
|
161
|
+
try {
|
|
162
|
+
const portsResult = await sshClient.exec(sshConfig, `cat /opt/deploys/${project.name}/ports.json 2>/dev/null || echo "{}"`);
|
|
163
|
+
const portData = JSON.parse(portsResult.stdout.trim());
|
|
164
|
+
delete portData[sanitized];
|
|
165
|
+
const portJson = Buffer.from(JSON.stringify(portData)).toString('base64');
|
|
166
|
+
await sshClient.exec(sshConfig, `echo '${portJson}' | base64 -d > /opt/deploys/${project.name}/ports.json`);
|
|
167
|
+
} catch {}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
/**
|
|
171
|
+
* List active deployments
|
|
172
|
+
*/
|
|
173
|
+
async function listDeployments(sshConfig, project) {
|
|
174
|
+
const result = await sshClient.exec(sshConfig, `ls /opt/deploys/${project.name}/branches/ 2>/dev/null || echo ""`);
|
|
175
|
+
const branches = result.stdout.trim().split('\n').filter(Boolean);
|
|
176
|
+
return branches.map(name => ({ branch: name, directory: `/opt/deploys/${project.name}/branches/${name}` }));
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
return { deploy, deployBranch, rollback, cleanupBranch, listDeployments, sanitizeBranch };
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
module.exports = { createDeployEngine };
|