tlc-claude-code 2.4.10 → 2.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/commands/tlc/autofix.md +34 -1
- package/.claude/commands/tlc/build.md +203 -27
- package/.claude/commands/tlc/ci.md +178 -414
- package/.claude/commands/tlc/coverage.md +34 -0
- package/.claude/commands/tlc/deploy.md +19 -6
- package/.claude/commands/tlc/discuss.md +34 -0
- package/.claude/commands/tlc/docs.md +35 -1
- package/.claude/commands/tlc/e2e.md +300 -0
- package/.claude/commands/tlc/edge-cases.md +35 -1
- package/.claude/commands/tlc/init.md +38 -8
- package/.claude/commands/tlc/issues.md +46 -0
- package/.claude/commands/tlc/new-project.md +46 -4
- package/.claude/commands/tlc/plan.md +76 -0
- package/.claude/commands/tlc/quick.md +33 -0
- package/.claude/commands/tlc/release.md +85 -135
- package/.claude/commands/tlc/restore.md +14 -0
- package/.claude/commands/tlc/review.md +80 -1
- package/.claude/commands/tlc/tlc.md +134 -0
- package/.claude/commands/tlc/verify.md +64 -65
- package/.claude/commands/tlc/watchci.md +10 -0
- package/.claude/hooks/tlc-block-tools.sh +13 -0
- package/.claude/hooks/tlc-session-init.sh +9 -0
- package/CODING-STANDARDS.md +35 -10
- package/package.json +1 -1
- package/server/lib/block-tools-hook.js +23 -0
- package/server/lib/e2e/acceptance-parser.js +132 -0
- package/server/lib/e2e/acceptance-parser.test.js +110 -0
- package/server/lib/e2e/framework-detector.js +47 -0
- package/server/lib/e2e/framework-detector.test.js +94 -0
- package/server/lib/e2e/log-assertions.js +107 -0
- package/server/lib/e2e/log-assertions.test.js +68 -0
- package/server/lib/e2e/test-generator.js +159 -0
- package/server/lib/e2e/test-generator.test.js +121 -0
- package/server/lib/e2e/verify-runner.js +191 -0
- package/server/lib/e2e/verify-runner.test.js +167 -0
- package/server/lib/github/config.js +458 -0
- package/server/lib/github/config.test.js +385 -0
- package/server/lib/github/gh-client.js +303 -0
- package/server/lib/github/gh-client.test.js +499 -0
- package/server/lib/github/gh-projects.js +594 -0
- package/server/lib/github/gh-projects.test.js +583 -0
- package/server/lib/github/index.js +19 -0
- package/server/lib/github/plan-sync.js +456 -0
- package/server/lib/github/plan-sync.test.js +805 -0
- package/server/lib/hooks/block-tools-hook.test.js +54 -0
- package/server/lib/orchestration/cli-dispatch.js +16 -1
- package/server/lib/orchestration/cli-dispatch.test.js +94 -8
- package/server/lib/orchestration/completion-checker.js +101 -0
- package/server/lib/orchestration/completion-checker.test.js +177 -0
- package/server/lib/orchestration/result-verifier.js +143 -0
- package/server/lib/orchestration/result-verifier.test.js +291 -0
- package/server/lib/orchestration/session-dispatcher.js +99 -0
- package/server/lib/orchestration/session-dispatcher.test.js +215 -0
- package/server/lib/orchestration/session-status.js +147 -0
- package/server/lib/orchestration/session-status.test.js +130 -0
- package/server/lib/release/agent-runner-updates.js +24 -0
- package/server/lib/release/agent-runner-updates.test.js +22 -0
- package/server/lib/release/changelog-generator.js +142 -0
- package/server/lib/release/changelog-generator.test.js +113 -0
- package/server/lib/release/ci-watcher.js +83 -0
- package/server/lib/release/ci-watcher.test.js +81 -0
- package/server/lib/release/health-checker.js +111 -0
- package/server/lib/release/health-checker.test.js +121 -0
- package/server/lib/release/release-pipeline.js +187 -0
- package/server/lib/release/release-pipeline.test.js +262 -0
- package/server/lib/release/version-bumper.js +183 -0
- package/server/lib/release/version-bumper.test.js +142 -0
- package/server/lib/routing-preamble.integration.test.js +12 -0
- package/server/lib/routing-preamble.js +13 -2
- package/server/lib/routing-preamble.test.js +49 -0
- package/server/lib/scaffolding/ci-detector.js +139 -0
- package/server/lib/scaffolding/ci-detector.test.js +198 -0
- package/server/lib/scaffolding/ci-scaffolder.js +347 -0
- package/server/lib/scaffolding/ci-scaffolder.test.js +157 -0
- package/server/lib/scaffolding/deploy-detector.js +135 -0
- package/server/lib/scaffolding/deploy-detector.test.js +106 -0
- package/server/lib/scaffolding/health-scaffold.js +374 -0
- package/server/lib/scaffolding/health-scaffold.test.js +99 -0
- package/server/lib/scaffolding/logger-scaffold.js +196 -0
- package/server/lib/scaffolding/logger-scaffold.test.js +146 -0
- package/server/lib/scaffolding/migration-detector.js +78 -0
- package/server/lib/scaffolding/migration-detector.test.js +127 -0
- package/server/lib/scaffolding/snapshot-manager.js +142 -0
- package/server/lib/scaffolding/snapshot-manager.test.js +225 -0
- package/server/lib/task-router-config.js +50 -20
- package/server/lib/task-router-config.test.js +29 -15
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
const path = require('path');
|
|
2
|
+
|
|
3
|
+
function scaffoldLogger({ projectDir, language, framework, fs }) {
|
|
4
|
+
void framework;
|
|
5
|
+
|
|
6
|
+
if (language !== 'javascript' && language !== 'typescript' && language !== 'python' && language !== 'go') {
|
|
7
|
+
return {
|
|
8
|
+
files: [],
|
|
9
|
+
dependencies: [],
|
|
10
|
+
skipped: true,
|
|
11
|
+
skipReason: 'unsupported language',
|
|
12
|
+
};
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
if (language === 'javascript' || language === 'typescript') {
|
|
16
|
+
if (hasExistingNodeLogger(projectDir, fs)) {
|
|
17
|
+
return createSkippedResult('existing logger dependency found in package.json');
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
return {
|
|
21
|
+
files: [
|
|
22
|
+
{
|
|
23
|
+
path: 'src/lib/logger.js',
|
|
24
|
+
content: createNodeLoggerContent(),
|
|
25
|
+
},
|
|
26
|
+
],
|
|
27
|
+
dependencies: ['pino', 'pino-http'],
|
|
28
|
+
skipped: false,
|
|
29
|
+
skipReason: null,
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
if (language === 'python') {
|
|
34
|
+
if (hasExistingPythonLogger(projectDir, fs)) {
|
|
35
|
+
return createSkippedResult('existing logger dependency found in requirements.txt');
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
return {
|
|
39
|
+
files: [
|
|
40
|
+
{
|
|
41
|
+
path: 'src/logger.py',
|
|
42
|
+
content: createPythonLoggerContent(),
|
|
43
|
+
},
|
|
44
|
+
],
|
|
45
|
+
dependencies: ['structlog'],
|
|
46
|
+
skipped: false,
|
|
47
|
+
skipReason: null,
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
return {
|
|
52
|
+
files: [
|
|
53
|
+
{
|
|
54
|
+
path: 'internal/logger/logger.go',
|
|
55
|
+
content: createGoLoggerContent(),
|
|
56
|
+
},
|
|
57
|
+
],
|
|
58
|
+
dependencies: [],
|
|
59
|
+
skipped: false,
|
|
60
|
+
skipReason: null,
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
function createSkippedResult(reason) {
|
|
65
|
+
return {
|
|
66
|
+
files: [],
|
|
67
|
+
dependencies: [],
|
|
68
|
+
skipped: true,
|
|
69
|
+
skipReason: reason,
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
function hasExistingNodeLogger(projectDir, fs) {
|
|
74
|
+
const packageJsonPath = path.join(projectDir, 'package.json');
|
|
75
|
+
|
|
76
|
+
if (!fs || !fs.existsSync || !fs.readFileSync || !fs.existsSync(packageJsonPath)) {
|
|
77
|
+
return false;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
try {
|
|
81
|
+
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
|
|
82
|
+
const dependencies = {
|
|
83
|
+
...(packageJson.dependencies || {}),
|
|
84
|
+
...(packageJson.devDependencies || {}),
|
|
85
|
+
...(packageJson.peerDependencies || {}),
|
|
86
|
+
...(packageJson.optionalDependencies || {}),
|
|
87
|
+
};
|
|
88
|
+
|
|
89
|
+
return ['pino', 'winston', 'bunyan'].some(name => Boolean(dependencies[name]));
|
|
90
|
+
} catch {
|
|
91
|
+
return false;
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
function hasExistingPythonLogger(projectDir, fs) {
|
|
96
|
+
const requirementsPath = path.join(projectDir, 'requirements.txt');
|
|
97
|
+
|
|
98
|
+
if (!fs || !fs.existsSync || !fs.readFileSync || !fs.existsSync(requirementsPath)) {
|
|
99
|
+
return false;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
try {
|
|
103
|
+
const requirements = fs.readFileSync(requirementsPath, 'utf8');
|
|
104
|
+
return /(^|\n)\s*structlog(?:[<=>~!].*)?\s*($|\n)/i.test(requirements);
|
|
105
|
+
} catch {
|
|
106
|
+
return false;
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
function createNodeLoggerContent() {
|
|
111
|
+
return `const pino = require('pino');
|
|
112
|
+
const pinoHttp = require('pino-http');
|
|
113
|
+
|
|
114
|
+
const logger = pino({
|
|
115
|
+
level: process.env.LOG_LEVEL || 'info',
|
|
116
|
+
base: undefined,
|
|
117
|
+
timestamp: pino.stdTimeFunctions.isoTime,
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
function createChildLogger(bindings = {}) {
|
|
121
|
+
return logger.child(bindings);
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
function createRequestLogger() {
|
|
125
|
+
return pinoHttp({
|
|
126
|
+
logger,
|
|
127
|
+
genReqId(req) {
|
|
128
|
+
return req.id || req.headers['x-request-id'];
|
|
129
|
+
},
|
|
130
|
+
customSuccessMessage(req, res) {
|
|
131
|
+
return \`\${req.method} \${req.url} completed with \${res.statusCode}\`;
|
|
132
|
+
},
|
|
133
|
+
customErrorMessage(req, res, error) {
|
|
134
|
+
return \`\${req.method} \${req.url} failed with \${res.statusCode}: \${error.message}\`;
|
|
135
|
+
},
|
|
136
|
+
});
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
module.exports = {
|
|
140
|
+
logger,
|
|
141
|
+
createChildLogger,
|
|
142
|
+
createRequestLogger,
|
|
143
|
+
};
|
|
144
|
+
`;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
function createPythonLoggerContent() {
|
|
148
|
+
return `import logging
|
|
149
|
+
import sys
|
|
150
|
+
|
|
151
|
+
import structlog
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
logging.basicConfig(
|
|
155
|
+
format="%(message)s",
|
|
156
|
+
stream=sys.stdout,
|
|
157
|
+
level=logging.INFO,
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
structlog.configure(
|
|
161
|
+
processors=[
|
|
162
|
+
structlog.contextvars.merge_contextvars,
|
|
163
|
+
structlog.processors.add_log_level,
|
|
164
|
+
structlog.processors.TimeStamper(fmt="iso"),
|
|
165
|
+
structlog.processors.JSONRenderer(),
|
|
166
|
+
],
|
|
167
|
+
wrapper_class=structlog.make_filtering_bound_logger(logging.INFO),
|
|
168
|
+
logger_factory=structlog.PrintLoggerFactory(file=sys.stdout),
|
|
169
|
+
cache_logger_on_first_use=True,
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def get_logger(**bindings):
|
|
174
|
+
return structlog.get_logger().bind(**bindings)
|
|
175
|
+
`;
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
function createGoLoggerContent() {
|
|
179
|
+
return `package logger
|
|
180
|
+
|
|
181
|
+
import (
|
|
182
|
+
"log/slog"
|
|
183
|
+
"os"
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
func New() *slog.Logger {
|
|
187
|
+
return slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
|
|
188
|
+
Level: slog.LevelInfo,
|
|
189
|
+
}))
|
|
190
|
+
}
|
|
191
|
+
`;
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
module.exports = {
|
|
195
|
+
scaffoldLogger,
|
|
196
|
+
};
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
import { describe, expect, it } from 'vitest';
|
|
2
|
+
|
|
3
|
+
import { scaffoldLogger } from './logger-scaffold.js';
|
|
4
|
+
|
|
5
|
+
function createFsMock(files = {}) {
|
|
6
|
+
return {
|
|
7
|
+
existsSync(targetPath) {
|
|
8
|
+
return Object.prototype.hasOwnProperty.call(files, targetPath);
|
|
9
|
+
},
|
|
10
|
+
readFileSync(targetPath) {
|
|
11
|
+
if (!this.existsSync(targetPath)) {
|
|
12
|
+
throw new Error(`ENOENT: ${targetPath}`);
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
return files[targetPath];
|
|
16
|
+
},
|
|
17
|
+
};
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
describe('scaffoldLogger', () => {
|
|
21
|
+
it('generates pino setup for Node.js projects', () => {
|
|
22
|
+
const result = scaffoldLogger({
|
|
23
|
+
projectDir: '/project',
|
|
24
|
+
language: 'javascript',
|
|
25
|
+
framework: 'express',
|
|
26
|
+
fs: createFsMock(),
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
expect(result.skipped).toBe(false);
|
|
30
|
+
expect(result.skipReason).toBeNull();
|
|
31
|
+
expect(result.dependencies).toEqual(['pino', 'pino-http']);
|
|
32
|
+
expect(result.files).toHaveLength(1);
|
|
33
|
+
expect(result.files[0].path).toBe('src/lib/logger.js');
|
|
34
|
+
expect(result.files[0].content).toContain("const pino = require('pino');");
|
|
35
|
+
expect(result.files[0].content).toContain("const pinoHttp = require('pino-http');");
|
|
36
|
+
expect(result.files[0].content).toContain('const logger = pino(');
|
|
37
|
+
expect(result.files[0].content).toContain('function createChildLogger(bindings = {})');
|
|
38
|
+
expect(result.files[0].content).toContain('function createRequestLogger()');
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
it('generates structlog setup for Python projects', () => {
|
|
42
|
+
const result = scaffoldLogger({
|
|
43
|
+
projectDir: '/project',
|
|
44
|
+
language: 'python',
|
|
45
|
+
framework: 'fastapi',
|
|
46
|
+
fs: createFsMock(),
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
expect(result.skipped).toBe(false);
|
|
50
|
+
expect(result.skipReason).toBeNull();
|
|
51
|
+
expect(result.dependencies).toEqual(['structlog']);
|
|
52
|
+
expect(result.files).toHaveLength(1);
|
|
53
|
+
expect(result.files[0].path).toBe('src/logger.py');
|
|
54
|
+
expect(result.files[0].content).toContain('import logging');
|
|
55
|
+
expect(result.files[0].content).toContain('import structlog');
|
|
56
|
+
expect(result.files[0].content).toContain('structlog.configure(');
|
|
57
|
+
expect(result.files[0].content).toContain('def get_logger(**bindings):');
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
it('generates slog setup for Go projects', () => {
|
|
61
|
+
const result = scaffoldLogger({
|
|
62
|
+
projectDir: '/project',
|
|
63
|
+
language: 'go',
|
|
64
|
+
framework: 'gin',
|
|
65
|
+
fs: createFsMock(),
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
expect(result.skipped).toBe(false);
|
|
69
|
+
expect(result.skipReason).toBeNull();
|
|
70
|
+
expect(result.dependencies).toEqual([]);
|
|
71
|
+
expect(result.files).toHaveLength(1);
|
|
72
|
+
expect(result.files[0].path).toBe('internal/logger/logger.go');
|
|
73
|
+
expect(result.files[0].content).toContain('package logger');
|
|
74
|
+
expect(result.files[0].content).toContain('"log/slog"');
|
|
75
|
+
expect(result.files[0].content).toContain('slog.NewJSONHandler');
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
it('skips Node.js generation when pino already exists in package.json', () => {
|
|
79
|
+
const result = scaffoldLogger({
|
|
80
|
+
projectDir: '/project',
|
|
81
|
+
language: 'typescript',
|
|
82
|
+
framework: 'express',
|
|
83
|
+
fs: createFsMock({
|
|
84
|
+
'/project/package.json': JSON.stringify({
|
|
85
|
+
dependencies: {
|
|
86
|
+
pino: '^9.0.0',
|
|
87
|
+
},
|
|
88
|
+
}),
|
|
89
|
+
}),
|
|
90
|
+
});
|
|
91
|
+
|
|
92
|
+
expect(result.skipped).toBe(true);
|
|
93
|
+
expect(result.skipReason).toContain('existing logger dependency');
|
|
94
|
+
expect(result.files).toEqual([]);
|
|
95
|
+
});
|
|
96
|
+
|
|
97
|
+
it('skips unsupported languages', () => {
|
|
98
|
+
const result = scaffoldLogger({
|
|
99
|
+
projectDir: '/project',
|
|
100
|
+
language: 'ruby',
|
|
101
|
+
framework: 'rails',
|
|
102
|
+
fs: createFsMock(),
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
expect(result.skipped).toBe(true);
|
|
106
|
+
expect(result.skipReason).toBe('unsupported language');
|
|
107
|
+
expect(result.files).toEqual([]);
|
|
108
|
+
expect(result.dependencies).toEqual([]);
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
it('includes a child logger factory in generated Node.js logger', () => {
|
|
112
|
+
const result = scaffoldLogger({
|
|
113
|
+
projectDir: '/project',
|
|
114
|
+
language: 'javascript',
|
|
115
|
+
framework: 'express',
|
|
116
|
+
fs: createFsMock(),
|
|
117
|
+
});
|
|
118
|
+
|
|
119
|
+
expect(result.files[0].content).toContain('return logger.child(bindings);');
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
it('returns the correct dependencies for each supported language', () => {
|
|
123
|
+
const nodeResult = scaffoldLogger({
|
|
124
|
+
projectDir: '/project',
|
|
125
|
+
language: 'javascript',
|
|
126
|
+
framework: 'express',
|
|
127
|
+
fs: createFsMock(),
|
|
128
|
+
});
|
|
129
|
+
const pythonResult = scaffoldLogger({
|
|
130
|
+
projectDir: '/project',
|
|
131
|
+
language: 'python',
|
|
132
|
+
framework: 'fastapi',
|
|
133
|
+
fs: createFsMock(),
|
|
134
|
+
});
|
|
135
|
+
const goResult = scaffoldLogger({
|
|
136
|
+
projectDir: '/project',
|
|
137
|
+
language: 'go',
|
|
138
|
+
framework: 'gin',
|
|
139
|
+
fs: createFsMock(),
|
|
140
|
+
});
|
|
141
|
+
|
|
142
|
+
expect(nodeResult.dependencies).toEqual(['pino', 'pino-http']);
|
|
143
|
+
expect(pythonResult.dependencies).toEqual(['structlog']);
|
|
144
|
+
expect(goResult.dependencies).toEqual([]);
|
|
145
|
+
});
|
|
146
|
+
});
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
const path = require('path');
|
|
2
|
+
|
|
3
|
+
const ORM_ORDER = ['prisma', 'drizzle', 'typeorm', 'alembic', 'sql'];
|
|
4
|
+
|
|
5
|
+
function normalizePath(filePath) {
|
|
6
|
+
return String(filePath || '').replace(/\\/g, '/');
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
function isTestFile(filePath) {
|
|
10
|
+
return /\.(test|spec)\.[^.]+$/i.test(path.basename(filePath));
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
function detectOrmType(filePath) {
|
|
14
|
+
const normalizedPath = normalizePath(filePath);
|
|
15
|
+
|
|
16
|
+
if (/^prisma\/migrations\/.+/.test(normalizedPath)) {
|
|
17
|
+
return 'prisma';
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
if (/^drizzle\/.+/.test(normalizedPath) || /(^|.+\/)drizzle\/[^/]+\.sql$/i.test(normalizedPath)) {
|
|
21
|
+
return 'drizzle';
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
if (/^migrations\/[^/]+\.(ts|js)$/i.test(normalizedPath)) {
|
|
25
|
+
return 'typeorm';
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
if (/^alembic\/versions\/.+/.test(normalizedPath)) {
|
|
29
|
+
return 'alembic';
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
if (/^migrations\/[^/]+\.sql$/i.test(normalizedPath) || /^db\/migrate\/[^/]+\.sql$/i.test(normalizedPath)) {
|
|
33
|
+
return 'sql';
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
return null;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
function detectMigrations({ diffFiles = [] } = {}) {
|
|
40
|
+
const detectedFiles = [];
|
|
41
|
+
const detectedOrmTypes = new Set();
|
|
42
|
+
|
|
43
|
+
for (const diffFile of diffFiles) {
|
|
44
|
+
const normalizedPath = normalizePath(diffFile);
|
|
45
|
+
|
|
46
|
+
if (!normalizedPath || isTestFile(normalizedPath)) {
|
|
47
|
+
continue;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
const ormType = detectOrmType(normalizedPath);
|
|
51
|
+
if (!ormType) {
|
|
52
|
+
continue;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
detectedFiles.push(diffFile);
|
|
56
|
+
detectedOrmTypes.add(ormType);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
if (detectedFiles.length === 0) {
|
|
60
|
+
return {
|
|
61
|
+
hasMigrations: false,
|
|
62
|
+
migrationFiles: [],
|
|
63
|
+
ormType: null,
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
const ormType = ORM_ORDER.find((candidate) => detectedOrmTypes.has(candidate)) || null;
|
|
68
|
+
|
|
69
|
+
return {
|
|
70
|
+
hasMigrations: true,
|
|
71
|
+
migrationFiles: detectedFiles,
|
|
72
|
+
ormType,
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
module.exports = {
|
|
77
|
+
detectMigrations,
|
|
78
|
+
};
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
import { describe, it, expect } from 'vitest';
|
|
2
|
+
|
|
3
|
+
const { detectMigrations } = require('./migration-detector');
|
|
4
|
+
|
|
5
|
+
describe('migration-detector', () => {
|
|
6
|
+
it('detects Prisma migration with correct ormType', () => {
|
|
7
|
+
const result = detectMigrations({
|
|
8
|
+
diffFiles: ['prisma/migrations/20260330120000_init/migration.sql'],
|
|
9
|
+
});
|
|
10
|
+
|
|
11
|
+
expect(result).toEqual({
|
|
12
|
+
hasMigrations: true,
|
|
13
|
+
migrationFiles: ['prisma/migrations/20260330120000_init/migration.sql'],
|
|
14
|
+
ormType: 'prisma',
|
|
15
|
+
});
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
it('detects Drizzle migration', () => {
|
|
19
|
+
const result = detectMigrations({
|
|
20
|
+
diffFiles: ['src/drizzle/0002_add_users.sql'],
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
expect(result).toEqual({
|
|
24
|
+
hasMigrations: true,
|
|
25
|
+
migrationFiles: ['src/drizzle/0002_add_users.sql'],
|
|
26
|
+
ormType: 'drizzle',
|
|
27
|
+
});
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
it('detects TypeORM migration', () => {
|
|
31
|
+
const result = detectMigrations({
|
|
32
|
+
diffFiles: ['migrations/1711824000000-create-users.ts'],
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
expect(result).toEqual({
|
|
36
|
+
hasMigrations: true,
|
|
37
|
+
migrationFiles: ['migrations/1711824000000-create-users.ts'],
|
|
38
|
+
ormType: 'typeorm',
|
|
39
|
+
});
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
it('detects Alembic migration', () => {
|
|
43
|
+
const result = detectMigrations({
|
|
44
|
+
diffFiles: ['alembic/versions/abc123_create_users.py'],
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
expect(result).toEqual({
|
|
48
|
+
hasMigrations: true,
|
|
49
|
+
migrationFiles: ['alembic/versions/abc123_create_users.py'],
|
|
50
|
+
ormType: 'alembic',
|
|
51
|
+
});
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
it('detects raw SQL migration', () => {
|
|
55
|
+
const result = detectMigrations({
|
|
56
|
+
diffFiles: ['db/migrate/20260330121000_create_users.sql'],
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
expect(result).toEqual({
|
|
60
|
+
hasMigrations: true,
|
|
61
|
+
migrationFiles: ['db/migrate/20260330121000_create_users.sql'],
|
|
62
|
+
ormType: 'sql',
|
|
63
|
+
});
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
it('does not detect test files with migration in the name', () => {
|
|
67
|
+
const result = detectMigrations({
|
|
68
|
+
diffFiles: ['tests/migration-detector.test.js', 'specs/user-migration.spec.ts'],
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
expect(result).toEqual({
|
|
72
|
+
hasMigrations: false,
|
|
73
|
+
migrationFiles: [],
|
|
74
|
+
ormType: null,
|
|
75
|
+
});
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
it('returns false when there are no migration files', () => {
|
|
79
|
+
const result = detectMigrations({
|
|
80
|
+
diffFiles: ['src/index.js', 'README.md'],
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
expect(result).toEqual({
|
|
84
|
+
hasMigrations: false,
|
|
85
|
+
migrationFiles: [],
|
|
86
|
+
ormType: null,
|
|
87
|
+
});
|
|
88
|
+
});
|
|
89
|
+
|
|
90
|
+
it('returns all matching migration files when multiple are present', () => {
|
|
91
|
+
const result = detectMigrations({
|
|
92
|
+
diffFiles: [
|
|
93
|
+
'prisma/migrations/20260330120000_init/migration.sql',
|
|
94
|
+
'prisma/migrations/20260330130000_add_users/migration.sql',
|
|
95
|
+
],
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
expect(result).toEqual({
|
|
99
|
+
hasMigrations: true,
|
|
100
|
+
migrationFiles: [
|
|
101
|
+
'prisma/migrations/20260330120000_init/migration.sql',
|
|
102
|
+
'prisma/migrations/20260330130000_add_users/migration.sql',
|
|
103
|
+
],
|
|
104
|
+
ormType: 'prisma',
|
|
105
|
+
});
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
it('returns only migration files from a mixed file list', () => {
|
|
109
|
+
const result = detectMigrations({
|
|
110
|
+
diffFiles: [
|
|
111
|
+
'src/index.js',
|
|
112
|
+
'migrations/1711824000000-create-users.js',
|
|
113
|
+
'docs/migrations.md',
|
|
114
|
+
'db/migrate/20260330121000_create_users.sql',
|
|
115
|
+
],
|
|
116
|
+
});
|
|
117
|
+
|
|
118
|
+
expect(result).toEqual({
|
|
119
|
+
hasMigrations: true,
|
|
120
|
+
migrationFiles: [
|
|
121
|
+
'migrations/1711824000000-create-users.js',
|
|
122
|
+
'db/migrate/20260330121000_create_users.sql',
|
|
123
|
+
],
|
|
124
|
+
ormType: 'typeorm',
|
|
125
|
+
});
|
|
126
|
+
});
|
|
127
|
+
});
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
function quote(value) {
|
|
5
|
+
return `"${String(value).replace(/"/g, '\\"')}"`;
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
function formatDate(date = new Date()) {
|
|
9
|
+
return date.toISOString().slice(0, 10);
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
function sanitizeSegment(value) {
|
|
13
|
+
return String(value || 'unknown').replace(/[^a-zA-Z0-9._-]+/g, '-');
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
function getDbName(dbType, connectionString) {
|
|
17
|
+
if (dbType === 'sqlite') {
|
|
18
|
+
return sanitizeSegment(path.basename(connectionString, path.extname(connectionString)));
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
const parsed = new URL(connectionString);
|
|
22
|
+
return sanitizeSegment(parsed.pathname.replace(/^\/+/, '') || 'database');
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function buildSnapshotPath({ dbType, connectionString, snapshotDir, gitRef }) {
|
|
26
|
+
const date = formatDate();
|
|
27
|
+
const dbName = getDbName(dbType, connectionString);
|
|
28
|
+
const filename = `${date}-${sanitizeSegment(gitRef)}-${dbName}.dump`;
|
|
29
|
+
return path.join(snapshotDir, filename);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
function parseDbUrl(connectionString) {
|
|
33
|
+
const parsed = new URL(connectionString);
|
|
34
|
+
return {
|
|
35
|
+
host: parsed.hostname,
|
|
36
|
+
port: parsed.port,
|
|
37
|
+
user: decodeURIComponent(parsed.username || ''),
|
|
38
|
+
password: decodeURIComponent(parsed.password || ''),
|
|
39
|
+
dbName: parsed.pathname.replace(/^\/+/, ''),
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
async function takeSnapshot({ dbType, connectionString, snapshotDir, gitRef, exec, fs: fileSystem = fs }) {
|
|
44
|
+
const snapshotPath = buildSnapshotPath({ dbType, connectionString, snapshotDir, gitRef });
|
|
45
|
+
|
|
46
|
+
try {
|
|
47
|
+
if (dbType === 'postgresql') {
|
|
48
|
+
await exec(`pg_dump --format=custom -f ${quote(snapshotPath)} ${quote(connectionString)}`);
|
|
49
|
+
} else if (dbType === 'mysql') {
|
|
50
|
+
const { host, port, user, password, dbName } = parseDbUrl(connectionString);
|
|
51
|
+
await exec(
|
|
52
|
+
`mysqldump --host=${host} --port=${port || 3306} --user=${user} --password=${password} ${dbName} > ${quote(snapshotPath)}`
|
|
53
|
+
);
|
|
54
|
+
} else if (dbType === 'sqlite') {
|
|
55
|
+
fileSystem.copyFileSync(connectionString, snapshotPath);
|
|
56
|
+
} else {
|
|
57
|
+
throw new Error(`Unsupported dbType: ${dbType}`);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
return { success: true, snapshotPath };
|
|
61
|
+
} catch (error) {
|
|
62
|
+
return { success: false, error, reason: error.message };
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
async function restoreSnapshot({ snapshotPath, dbType, connectionString, exec, fs: fileSystem = fs }) {
|
|
67
|
+
try {
|
|
68
|
+
if (dbType === 'postgresql') {
|
|
69
|
+
await exec(
|
|
70
|
+
`pg_restore --clean --if-exists --dbname=${quote(connectionString)} ${quote(snapshotPath)}`
|
|
71
|
+
);
|
|
72
|
+
} else if (dbType === 'mysql') {
|
|
73
|
+
const { host, port, user, password, dbName } = parseDbUrl(connectionString);
|
|
74
|
+
await exec(
|
|
75
|
+
`mysql --host=${host} --port=${port || 3306} --user=${user} --password=${password} ${dbName} < ${quote(snapshotPath)}`
|
|
76
|
+
);
|
|
77
|
+
} else if (dbType === 'sqlite') {
|
|
78
|
+
fileSystem.copyFileSync(snapshotPath, connectionString);
|
|
79
|
+
} else {
|
|
80
|
+
throw new Error(`Unsupported dbType: ${dbType}`);
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
return { success: true };
|
|
84
|
+
} catch (error) {
|
|
85
|
+
return { success: false, error, reason: error.message };
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
function parseSnapshotFilename(filename) {
|
|
90
|
+
const match = filename.match(/^(\d{4}-\d{2}-\d{2})-(.+)-([^-]+)\.dump$/);
|
|
91
|
+
if (!match) {
|
|
92
|
+
return null;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
return {
|
|
96
|
+
date: match[1],
|
|
97
|
+
gitRef: match[2],
|
|
98
|
+
dbName: match[3],
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
function listSnapshots({ snapshotDir, fs: fileSystem = fs }) {
|
|
103
|
+
return fileSystem
|
|
104
|
+
.readdirSync(snapshotDir)
|
|
105
|
+
.map((filename) => {
|
|
106
|
+
const parsed = parseSnapshotFilename(filename);
|
|
107
|
+
if (!parsed) {
|
|
108
|
+
return null;
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
const fullPath = path.join(snapshotDir, filename);
|
|
112
|
+
const stats = fileSystem.statSync(fullPath);
|
|
113
|
+
|
|
114
|
+
return {
|
|
115
|
+
path: fullPath,
|
|
116
|
+
date: parsed.date,
|
|
117
|
+
gitRef: parsed.gitRef,
|
|
118
|
+
dbName: parsed.dbName,
|
|
119
|
+
size: stats.size,
|
|
120
|
+
};
|
|
121
|
+
})
|
|
122
|
+
.filter(Boolean)
|
|
123
|
+
.sort((a, b) => b.date.localeCompare(a.date) || b.path.localeCompare(a.path));
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
function enforceRetention({ snapshotDir, maxSnapshots = 10, fs: fileSystem = fs }) {
|
|
127
|
+
const snapshots = listSnapshots({ snapshotDir, fs: fileSystem });
|
|
128
|
+
const staleSnapshots = snapshots.slice(maxSnapshots).reverse();
|
|
129
|
+
|
|
130
|
+
for (const snapshot of staleSnapshots) {
|
|
131
|
+
fileSystem.unlinkSync(snapshot.path);
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
return staleSnapshots.map((snapshot) => snapshot.path);
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
module.exports = {
|
|
138
|
+
takeSnapshot,
|
|
139
|
+
restoreSnapshot,
|
|
140
|
+
listSnapshots,
|
|
141
|
+
enforceRetention,
|
|
142
|
+
};
|