deepflow 0.1.106 → 0.1.108
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/install.js +50 -17
- package/bin/install.test.js +206 -167
- package/bin/plan-consolidator.js +19 -1
- package/bin/plan-consolidator.test.js +150 -0
- package/bin/ratchet.js +5 -5
- package/bin/ratchet.test.js +172 -0
- package/bin/worktree-deps.js +127 -0
- package/hooks/df-spec-lint.js +13 -2
- package/hooks/df-spec-lint.test.js +133 -0
- package/package.json +1 -1
- package/src/commands/df/execute.md +53 -2
- package/src/commands/df/plan.md +244 -16
- package/src/commands/df/verify.md +45 -7
- package/templates/explore-protocol.md.bak +69 -0
- package/templates/plan-template.md +11 -0
- package/templates/spec-template.md +15 -0
|
@@ -880,3 +880,153 @@ describe('formatConsolidated — additional edge cases', () => {
|
|
|
880
880
|
assert.ok(output.includes('Blocked by: T1, T2'));
|
|
881
881
|
});
|
|
882
882
|
});
|
|
883
|
+
|
|
884
|
+
// ---------------------------------------------------------------------------
|
|
885
|
+
// 10. parseArgs — --specs-dir (AC-1)
|
|
886
|
+
// ---------------------------------------------------------------------------
|
|
887
|
+
|
|
888
|
+
describe('parseArgs — --specs-dir', () => {
|
|
889
|
+
test('recognizes --specs-dir and stores it in args.specsDir', () => {
|
|
890
|
+
const args = parseArgs(['node', 'plan-consolidator.js', '--plans-dir', '/tmp/plans', '--specs-dir', 'specs/']);
|
|
891
|
+
assert.equal(args.specsDir, 'specs/');
|
|
892
|
+
});
|
|
893
|
+
|
|
894
|
+
test('specsDir is null when --specs-dir is not provided', () => {
|
|
895
|
+
const args = parseArgs(['node', 'plan-consolidator.js', '--plans-dir', '/tmp/plans']);
|
|
896
|
+
assert.equal(args.specsDir, null);
|
|
897
|
+
});
|
|
898
|
+
|
|
899
|
+
test('specsDir is null when --specs-dir has no following value', () => {
|
|
900
|
+
const args = parseArgs(['node', 'plan-consolidator.js', '--plans-dir', '/tmp/plans', '--specs-dir']);
|
|
901
|
+
assert.equal(args.specsDir, null);
|
|
902
|
+
});
|
|
903
|
+
|
|
904
|
+
test('--specs-dir works without --plans-dir (args are independent)', () => {
|
|
905
|
+
const args = parseArgs(['node', 'plan-consolidator.js', '--specs-dir', 'my-specs/']);
|
|
906
|
+
assert.equal(args.specsDir, 'my-specs/');
|
|
907
|
+
assert.equal(args.plansDir, null);
|
|
908
|
+
});
|
|
909
|
+
});
|
|
910
|
+
|
|
911
|
+
// ---------------------------------------------------------------------------
|
|
912
|
+
// 11. CLI integration — stale-filter with --specs-dir (AC-2, AC-3, AC-4)
|
|
913
|
+
// ---------------------------------------------------------------------------
|
|
914
|
+
|
|
915
|
+
describe('CLI integration — stale-filter', () => {
|
|
916
|
+
test('AC-2: mini-plan is included when matching spec exists in --specs-dir', () => {
|
|
917
|
+
const plansDir = makeTmpDir();
|
|
918
|
+
const specsDir = makeTmpDir();
|
|
919
|
+
try {
|
|
920
|
+
// Create a mini-plan doing-foo.md
|
|
921
|
+
fs.writeFileSync(path.join(plansDir, 'doing-foo.md'), `## Tasks
|
|
922
|
+
- [ ] **T1**: Foo task
|
|
923
|
+
`);
|
|
924
|
+
// Create matching spec in specsDir
|
|
925
|
+
fs.writeFileSync(path.join(specsDir, 'doing-foo.md'), '# Spec for foo\n');
|
|
926
|
+
|
|
927
|
+
const result = runConsolidator(['--plans-dir', plansDir, '--specs-dir', specsDir]);
|
|
928
|
+
assert.equal(result.code, 0);
|
|
929
|
+
assert.ok(result.stdout.includes('**T1**: Foo task'), 'task from matched mini-plan should appear in output');
|
|
930
|
+
assert.ok(!result.stderr.includes('skipping stale'), 'no stale warning should appear when spec exists');
|
|
931
|
+
} finally {
|
|
932
|
+
rmrf(plansDir);
|
|
933
|
+
rmrf(specsDir);
|
|
934
|
+
}
|
|
935
|
+
});
|
|
936
|
+
|
|
937
|
+
test('AC-3: mini-plan is excluded with stderr warning when spec is absent from --specs-dir', () => {
|
|
938
|
+
const plansDir = makeTmpDir();
|
|
939
|
+
const specsDir = makeTmpDir();
|
|
940
|
+
try {
|
|
941
|
+
// Create a mini-plan doing-bar.md
|
|
942
|
+
fs.writeFileSync(path.join(plansDir, 'doing-bar.md'), `## Tasks
|
|
943
|
+
- [ ] **T1**: Bar task
|
|
944
|
+
`);
|
|
945
|
+
// No matching spec in specsDir — specsDir is empty
|
|
946
|
+
|
|
947
|
+
const result = runConsolidator(['--plans-dir', plansDir, '--specs-dir', specsDir]);
|
|
948
|
+
assert.equal(result.code, 0);
|
|
949
|
+
assert.ok(!result.stdout.includes('**T1**: Bar task'), 'stale mini-plan task should be excluded from output');
|
|
950
|
+
assert.ok(result.stderr.includes('skipping stale mini-plan doing-bar.md'), 'stderr should warn about stale mini-plan');
|
|
951
|
+
} finally {
|
|
952
|
+
rmrf(plansDir);
|
|
953
|
+
rmrf(specsDir);
|
|
954
|
+
}
|
|
955
|
+
});
|
|
956
|
+
|
|
957
|
+
test('AC-3: stderr warning includes the specsDir path', () => {
|
|
958
|
+
const plansDir = makeTmpDir();
|
|
959
|
+
const specsDir = makeTmpDir();
|
|
960
|
+
try {
|
|
961
|
+
fs.writeFileSync(path.join(plansDir, 'doing-bar.md'), `## Tasks
|
|
962
|
+
- [ ] **T1**: Bar task
|
|
963
|
+
`);
|
|
964
|
+
|
|
965
|
+
const result = runConsolidator(['--plans-dir', plansDir, '--specs-dir', specsDir]);
|
|
966
|
+
// Warning format: "plan-consolidator: skipping stale mini-plan {filename} (no matching spec in {specsDir})"
|
|
967
|
+
assert.ok(result.stderr.includes('no matching spec in'), 'stderr should mention no matching spec');
|
|
968
|
+
} finally {
|
|
969
|
+
rmrf(plansDir);
|
|
970
|
+
rmrf(specsDir);
|
|
971
|
+
}
|
|
972
|
+
});
|
|
973
|
+
|
|
974
|
+
test('AC-2+AC-3: only matched mini-plans are included when some specs exist and some do not', () => {
|
|
975
|
+
const plansDir = makeTmpDir();
|
|
976
|
+
const specsDir = makeTmpDir();
|
|
977
|
+
try {
|
|
978
|
+
// Two mini-plans: doing-present.md (spec exists) and doing-stale.md (spec absent)
|
|
979
|
+
fs.writeFileSync(path.join(plansDir, 'doing-present.md'), `## Tasks
|
|
980
|
+
- [ ] **T1**: Present task
|
|
981
|
+
`);
|
|
982
|
+
fs.writeFileSync(path.join(plansDir, 'doing-stale.md'), `## Tasks
|
|
983
|
+
- [ ] **T1**: Stale task
|
|
984
|
+
`);
|
|
985
|
+
// Only doing-present.md has a spec
|
|
986
|
+
fs.writeFileSync(path.join(specsDir, 'doing-present.md'), '# Spec for present\n');
|
|
987
|
+
|
|
988
|
+
const result = runConsolidator(['--plans-dir', plansDir, '--specs-dir', specsDir]);
|
|
989
|
+
assert.equal(result.code, 0);
|
|
990
|
+
assert.ok(result.stdout.includes('Present task'), 'matched mini-plan task should be included');
|
|
991
|
+
assert.ok(!result.stdout.includes('Stale task'), 'stale mini-plan task should be excluded');
|
|
992
|
+
assert.ok(result.stderr.includes('skipping stale mini-plan doing-stale.md'), 'stderr should warn about stale entry');
|
|
993
|
+
} finally {
|
|
994
|
+
rmrf(plansDir);
|
|
995
|
+
rmrf(specsDir);
|
|
996
|
+
}
|
|
997
|
+
});
|
|
998
|
+
|
|
999
|
+
test('AC-4: without --specs-dir, all doing-* mini-plans are included (backward compatible)', () => {
|
|
1000
|
+
const plansDir = makeTmpDir();
|
|
1001
|
+
try {
|
|
1002
|
+
fs.writeFileSync(path.join(plansDir, 'doing-alpha.md'), `## Tasks
|
|
1003
|
+
- [ ] **T1**: Alpha task
|
|
1004
|
+
`);
|
|
1005
|
+
fs.writeFileSync(path.join(plansDir, 'doing-beta.md'), `## Tasks
|
|
1006
|
+
- [ ] **T1**: Beta task
|
|
1007
|
+
`);
|
|
1008
|
+
|
|
1009
|
+
const result = runConsolidator(['--plans-dir', plansDir]);
|
|
1010
|
+
assert.equal(result.code, 0);
|
|
1011
|
+
assert.ok(result.stdout.includes('Alpha task'), 'alpha mini-plan should be included without --specs-dir');
|
|
1012
|
+
assert.ok(result.stdout.includes('Beta task'), 'beta mini-plan should be included without --specs-dir');
|
|
1013
|
+
} finally {
|
|
1014
|
+
rmrf(plansDir);
|
|
1015
|
+
}
|
|
1016
|
+
});
|
|
1017
|
+
|
|
1018
|
+
test('AC-4: backward-compat — no stale warnings emitted without --specs-dir', () => {
|
|
1019
|
+
const plansDir = makeTmpDir();
|
|
1020
|
+
try {
|
|
1021
|
+
fs.writeFileSync(path.join(plansDir, 'doing-orphan.md'), `## Tasks
|
|
1022
|
+
- [ ] **T1**: Orphan task (no spec file anywhere)
|
|
1023
|
+
`);
|
|
1024
|
+
|
|
1025
|
+
const result = runConsolidator(['--plans-dir', plansDir]);
|
|
1026
|
+
assert.equal(result.code, 0);
|
|
1027
|
+
assert.ok(!result.stderr.includes('skipping stale'), 'no stale warnings without --specs-dir');
|
|
1028
|
+
} finally {
|
|
1029
|
+
rmrf(plansDir);
|
|
1030
|
+
}
|
|
1031
|
+
});
|
|
1032
|
+
});
|
package/bin/ratchet.js
CHANGED
|
@@ -74,7 +74,7 @@ function loadConfig(repoRoot) {
|
|
|
74
74
|
// Snapshot: read auto-snapshot.txt and absolutize paths
|
|
75
75
|
// ---------------------------------------------------------------------------
|
|
76
76
|
|
|
77
|
-
function loadSnapshotFiles(repoRoot) {
|
|
77
|
+
function loadSnapshotFiles(repoRoot, resolveBase = repoRoot) {
|
|
78
78
|
const snapshotPath = path.join(repoRoot, '.deepflow', 'auto-snapshot.txt');
|
|
79
79
|
if (!fs.existsSync(snapshotPath)) return [];
|
|
80
80
|
|
|
@@ -82,7 +82,7 @@ function loadSnapshotFiles(repoRoot) {
|
|
|
82
82
|
.split('\n')
|
|
83
83
|
.map(l => l.trim())
|
|
84
84
|
.filter(l => l.length > 0)
|
|
85
|
-
.map(rel => path.join(
|
|
85
|
+
.map(rel => path.join(resolveBase, rel));
|
|
86
86
|
}
|
|
87
87
|
|
|
88
88
|
// ---------------------------------------------------------------------------
|
|
@@ -340,13 +340,13 @@ function main() {
|
|
|
340
340
|
|
|
341
341
|
const cfg = loadConfig(repoRoot);
|
|
342
342
|
const projectType = detectProjectType(repoRoot);
|
|
343
|
-
const snapshotFiles = loadSnapshotFiles(repoRoot);
|
|
343
|
+
const snapshotFiles = loadSnapshotFiles(repoRoot, cwd);
|
|
344
344
|
const cmds = buildCommands(repoRoot, projectType, snapshotFiles, cfg);
|
|
345
345
|
// --snapshot flag overrides the snapshot-derived test command
|
|
346
346
|
if (cliArgs.snapshot && fs.existsSync(cliArgs.snapshot)) {
|
|
347
347
|
const snapFiles = fs.readFileSync(cliArgs.snapshot, 'utf8')
|
|
348
348
|
.split('\n').map(l => l.trim()).filter(l => l.length > 0)
|
|
349
|
-
.map(rel => path.isAbsolute(rel) ? rel : path.join(
|
|
349
|
+
.map(rel => path.isAbsolute(rel) ? rel : path.join(cwd, rel));
|
|
350
350
|
if (snapFiles.length > 0 && projectType === 'node' && !cfg.test_command) {
|
|
351
351
|
cmds.test = ['node', '--test', ...snapFiles];
|
|
352
352
|
}
|
|
@@ -363,7 +363,7 @@ function main() {
|
|
|
363
363
|
if (exe !== 'npx' && !commandExists(exe)) continue;
|
|
364
364
|
}
|
|
365
365
|
|
|
366
|
-
const { ok, log } = runCommand(cmd,
|
|
366
|
+
const { ok, log } = runCommand(cmd, cwd);
|
|
367
367
|
|
|
368
368
|
if (ok === null) {
|
|
369
369
|
// executable spawning error — skip stage
|
package/bin/ratchet.test.js
CHANGED
|
@@ -1171,3 +1171,175 @@ describe('Subprocess integration — --task flag updates PLAN.md on PASS', () =>
|
|
|
1171
1171
|
assert.ok(plan.includes('- [ ] **T42**'), 'PLAN.md should remain unchecked without --task');
|
|
1172
1172
|
});
|
|
1173
1173
|
});
|
|
1174
|
+
|
|
1175
|
+
// ---------------------------------------------------------------------------
|
|
1176
|
+
// 19. Worktree cwd routing — commands execute in worktree path
|
|
1177
|
+
// ---------------------------------------------------------------------------
|
|
1178
|
+
|
|
1179
|
+
describe('loadSnapshotFiles — resolveBase parameter resolves paths against cwd not repoRoot', () => {
|
|
1180
|
+
let repoRoot;
|
|
1181
|
+
let worktreeDir;
|
|
1182
|
+
|
|
1183
|
+
beforeEach(() => {
|
|
1184
|
+
repoRoot = makeTmpDir();
|
|
1185
|
+
worktreeDir = makeTmpDir();
|
|
1186
|
+
});
|
|
1187
|
+
|
|
1188
|
+
afterEach(() => {
|
|
1189
|
+
rmrf(repoRoot);
|
|
1190
|
+
rmrf(worktreeDir);
|
|
1191
|
+
});
|
|
1192
|
+
|
|
1193
|
+
test('resolveBase defaults to repoRoot when not provided', () => {
|
|
1194
|
+
const deepflowDir = path.join(repoRoot, '.deepflow');
|
|
1195
|
+
fs.mkdirSync(deepflowDir, { recursive: true });
|
|
1196
|
+
fs.writeFileSync(
|
|
1197
|
+
path.join(deepflowDir, 'auto-snapshot.txt'),
|
|
1198
|
+
'bin/ratchet.test.js\n'
|
|
1199
|
+
);
|
|
1200
|
+
|
|
1201
|
+
const files = loadSnapshotFiles(repoRoot);
|
|
1202
|
+
assert.equal(files.length, 1);
|
|
1203
|
+
assert.equal(files[0], path.join(repoRoot, 'bin/ratchet.test.js'));
|
|
1204
|
+
});
|
|
1205
|
+
|
|
1206
|
+
test('resolveBase overrides path resolution when cwd differs from repoRoot', () => {
|
|
1207
|
+
// Snapshot lives in repoRoot's .deepflow dir, but paths should resolve against worktreeDir
|
|
1208
|
+
const deepflowDir = path.join(repoRoot, '.deepflow');
|
|
1209
|
+
fs.mkdirSync(deepflowDir, { recursive: true });
|
|
1210
|
+
fs.writeFileSync(
|
|
1211
|
+
path.join(deepflowDir, 'auto-snapshot.txt'),
|
|
1212
|
+
'bin/ratchet.test.js\ntest/integration.test.js\n'
|
|
1213
|
+
);
|
|
1214
|
+
|
|
1215
|
+
// Pass worktreeDir as resolveBase — paths should resolve against it, not repoRoot
|
|
1216
|
+
const files = loadSnapshotFiles(repoRoot, worktreeDir);
|
|
1217
|
+
assert.equal(files.length, 2);
|
|
1218
|
+
assert.equal(files[0], path.join(worktreeDir, 'bin/ratchet.test.js'));
|
|
1219
|
+
assert.equal(files[1], path.join(worktreeDir, 'test/integration.test.js'));
|
|
1220
|
+
// Confirm the paths do NOT point into repoRoot
|
|
1221
|
+
assert.ok(!files[0].startsWith(repoRoot), 'resolveBase should override repoRoot for path resolution');
|
|
1222
|
+
});
|
|
1223
|
+
|
|
1224
|
+
test('resolveBase changes where test files are expected to live', () => {
|
|
1225
|
+
const deepflowDir = path.join(repoRoot, '.deepflow');
|
|
1226
|
+
fs.mkdirSync(deepflowDir, { recursive: true });
|
|
1227
|
+
fs.writeFileSync(
|
|
1228
|
+
path.join(deepflowDir, 'auto-snapshot.txt'),
|
|
1229
|
+
'spec/my.test.js\n'
|
|
1230
|
+
);
|
|
1231
|
+
|
|
1232
|
+
const filesFromRepo = loadSnapshotFiles(repoRoot, repoRoot);
|
|
1233
|
+
const filesFromWorktree = loadSnapshotFiles(repoRoot, worktreeDir);
|
|
1234
|
+
|
|
1235
|
+
assert.equal(filesFromRepo[0], path.join(repoRoot, 'spec/my.test.js'));
|
|
1236
|
+
assert.equal(filesFromWorktree[0], path.join(worktreeDir, 'spec/my.test.js'));
|
|
1237
|
+
assert.notEqual(filesFromRepo[0], filesFromWorktree[0]);
|
|
1238
|
+
});
|
|
1239
|
+
});
|
|
1240
|
+
|
|
1241
|
+
describe('Subprocess integration — --worktree flag routes commands to worktree cwd', () => {
|
|
1242
|
+
let repoDir;
|
|
1243
|
+
let worktreeDir;
|
|
1244
|
+
|
|
1245
|
+
beforeEach(() => {
|
|
1246
|
+
// Set up main repo
|
|
1247
|
+
repoDir = makeTmpDir();
|
|
1248
|
+
execFileSync('git', ['init'], { cwd: repoDir, stdio: 'ignore' });
|
|
1249
|
+
execFileSync('git', ['config', 'user.email', 'test@test.com'], { cwd: repoDir, stdio: 'ignore' });
|
|
1250
|
+
execFileSync('git', ['config', 'user.name', 'Test'], { cwd: repoDir, stdio: 'ignore' });
|
|
1251
|
+
fs.writeFileSync(path.join(repoDir, 'dummy.txt'), 'hello');
|
|
1252
|
+
execFileSync('git', ['add', '.'], { cwd: repoDir, stdio: 'ignore' });
|
|
1253
|
+
execFileSync('git', ['commit', '-m', 'init'], { cwd: repoDir, stdio: 'ignore' });
|
|
1254
|
+
|
|
1255
|
+
// Set up worktree directory as a separate git repo (simulating a worktree checkout)
|
|
1256
|
+
worktreeDir = makeTmpDir();
|
|
1257
|
+
execFileSync('git', ['init'], { cwd: worktreeDir, stdio: 'ignore' });
|
|
1258
|
+
execFileSync('git', ['config', 'user.email', 'test@test.com'], { cwd: worktreeDir, stdio: 'ignore' });
|
|
1259
|
+
execFileSync('git', ['config', 'user.name', 'Test'], { cwd: worktreeDir, stdio: 'ignore' });
|
|
1260
|
+
fs.writeFileSync(path.join(worktreeDir, 'dummy.txt'), 'hello');
|
|
1261
|
+
execFileSync('git', ['add', '.'], { cwd: worktreeDir, stdio: 'ignore' });
|
|
1262
|
+
execFileSync('git', ['commit', '-m', 'init'], { cwd: worktreeDir, stdio: 'ignore' });
|
|
1263
|
+
});
|
|
1264
|
+
|
|
1265
|
+
afterEach(() => {
|
|
1266
|
+
rmrf(repoDir);
|
|
1267
|
+
rmrf(worktreeDir);
|
|
1268
|
+
});
|
|
1269
|
+
|
|
1270
|
+
test('--worktree flag causes test command to execute in worktree path', () => {
|
|
1271
|
+
// Write a test file in the WORKTREE dir that prints its cwd via process.cwd()
|
|
1272
|
+
const testFile = path.join(worktreeDir, 'cwd-check.test.js');
|
|
1273
|
+
fs.writeFileSync(testFile, [
|
|
1274
|
+
"'use strict';",
|
|
1275
|
+
"const { test } = require('node:test');",
|
|
1276
|
+
"const assert = require('node:assert/strict');",
|
|
1277
|
+
"const path = require('node:path');",
|
|
1278
|
+
"test('cwd is worktree path', () => {",
|
|
1279
|
+
" // This file lives in the worktree dir — if cwd is correct, __dirname matches cwd prefix",
|
|
1280
|
+
" assert.ok(process.cwd().startsWith(path.dirname(__dirname) || '/'), 'cwd should be set');",
|
|
1281
|
+
"});",
|
|
1282
|
+
].join('\n'));
|
|
1283
|
+
|
|
1284
|
+
// Write snapshot pointing to the test file (relative path from worktreeDir)
|
|
1285
|
+
const deepflowDir = path.join(worktreeDir, '.deepflow');
|
|
1286
|
+
fs.mkdirSync(deepflowDir, { recursive: true });
|
|
1287
|
+
fs.writeFileSync(
|
|
1288
|
+
path.join(deepflowDir, 'auto-snapshot.txt'),
|
|
1289
|
+
'cwd-check.test.js\n'
|
|
1290
|
+
);
|
|
1291
|
+
|
|
1292
|
+
// Write package.json in worktreeDir so it's detected as node project
|
|
1293
|
+
fs.writeFileSync(path.join(worktreeDir, 'package.json'), JSON.stringify({ name: 'test-worktree' }));
|
|
1294
|
+
|
|
1295
|
+
const result = spawnSync(process.execPath, [RATCHET_PATH, '--worktree', worktreeDir], {
|
|
1296
|
+
cwd: repoDir, // invoked from a different cwd (repoDir)
|
|
1297
|
+
encoding: 'utf8',
|
|
1298
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
1299
|
+
});
|
|
1300
|
+
|
|
1301
|
+
const output = (result.stdout || '').trim();
|
|
1302
|
+
assert.ok(output.length > 0, 'ratchet should produce output');
|
|
1303
|
+
const parsed = JSON.parse(output);
|
|
1304
|
+
// The test should pass because the test file exists in worktreeDir and runs correctly
|
|
1305
|
+
assert.equal(parsed.result, 'PASS', `Expected PASS but got: ${JSON.stringify(parsed)}`);
|
|
1306
|
+
});
|
|
1307
|
+
|
|
1308
|
+
test('--worktree flag: snapshot paths resolve against worktreeDir, not process.cwd()', () => {
|
|
1309
|
+
// Place test file only in worktreeDir (NOT in repoDir)
|
|
1310
|
+
const testFile = path.join(worktreeDir, 'only-in-worktree.test.js');
|
|
1311
|
+
fs.writeFileSync(testFile, [
|
|
1312
|
+
"'use strict';",
|
|
1313
|
+
"const { test } = require('node:test');",
|
|
1314
|
+
"const assert = require('node:assert/strict');",
|
|
1315
|
+
"test('exists only in worktree', () => { assert.ok(true); });",
|
|
1316
|
+
].join('\n'));
|
|
1317
|
+
|
|
1318
|
+
// Snapshot is in worktreeDir's .deepflow
|
|
1319
|
+
const deepflowDir = path.join(worktreeDir, '.deepflow');
|
|
1320
|
+
fs.mkdirSync(deepflowDir, { recursive: true });
|
|
1321
|
+
fs.writeFileSync(
|
|
1322
|
+
path.join(deepflowDir, 'auto-snapshot.txt'),
|
|
1323
|
+
'only-in-worktree.test.js\n'
|
|
1324
|
+
);
|
|
1325
|
+
fs.writeFileSync(path.join(worktreeDir, 'package.json'), JSON.stringify({ name: 'wt' }));
|
|
1326
|
+
|
|
1327
|
+
// Verify the test file does NOT exist in repoDir (to confirm routing works)
|
|
1328
|
+
assert.ok(
|
|
1329
|
+
!fs.existsSync(path.join(repoDir, 'only-in-worktree.test.js')),
|
|
1330
|
+
'Test file should not exist in repoDir'
|
|
1331
|
+
);
|
|
1332
|
+
|
|
1333
|
+
const result = spawnSync(process.execPath, [RATCHET_PATH, '--worktree', worktreeDir], {
|
|
1334
|
+
cwd: repoDir,
|
|
1335
|
+
encoding: 'utf8',
|
|
1336
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
1337
|
+
});
|
|
1338
|
+
|
|
1339
|
+
const output = (result.stdout || '').trim();
|
|
1340
|
+
assert.ok(output.length > 0, 'ratchet should produce output');
|
|
1341
|
+
const parsed = JSON.parse(output);
|
|
1342
|
+
// PASS means the test file was found in worktreeDir — cwd routing works
|
|
1343
|
+
assert.equal(parsed.result, 'PASS', `Expected PASS but got: ${JSON.stringify(parsed)}`);
|
|
1344
|
+
});
|
|
1345
|
+
});
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* deepflow worktree-deps
|
|
4
|
+
* Symlinks node_modules from the main repo into a worktree so that
|
|
5
|
+
* TypeScript / LSP / builds resolve dependencies without a full install.
|
|
6
|
+
*
|
|
7
|
+
* Usage: node bin/worktree-deps.js --source /path/to/repo --worktree /path/to/worktree
|
|
8
|
+
*
|
|
9
|
+
* Walks the source repo looking for node_modules directories (max depth 2)
|
|
10
|
+
* and creates corresponding symlinks in the worktree.
|
|
11
|
+
*
|
|
12
|
+
* Exit codes: 0=OK, 1=ERROR
|
|
13
|
+
*/
|
|
14
|
+
|
|
15
|
+
'use strict';
|
|
16
|
+
|
|
17
|
+
const fs = require('fs');
|
|
18
|
+
const path = require('path');
|
|
19
|
+
|
|
20
|
+
// ---------------------------------------------------------------------------
|
|
21
|
+
// Args
|
|
22
|
+
// ---------------------------------------------------------------------------
|
|
23
|
+
|
|
24
|
+
function parseArgs() {
|
|
25
|
+
const args = process.argv.slice(2);
|
|
26
|
+
const opts = {};
|
|
27
|
+
for (let i = 0; i < args.length; i++) {
|
|
28
|
+
if (args[i] === '--source' && args[i + 1]) opts.source = args[++i];
|
|
29
|
+
else if (args[i] === '--worktree' && args[i + 1]) opts.worktree = args[++i];
|
|
30
|
+
}
|
|
31
|
+
if (!opts.source || !opts.worktree) {
|
|
32
|
+
console.error('Usage: node bin/worktree-deps.js --source <repo> --worktree <worktree>');
|
|
33
|
+
process.exit(1);
|
|
34
|
+
}
|
|
35
|
+
return opts;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
// ---------------------------------------------------------------------------
|
|
39
|
+
// Find node_modules directories (depth 0 and 1 level of nesting)
|
|
40
|
+
// ---------------------------------------------------------------------------
|
|
41
|
+
|
|
42
|
+
function findNodeModules(root) {
|
|
43
|
+
const results = [];
|
|
44
|
+
|
|
45
|
+
// Root node_modules
|
|
46
|
+
const rootNM = path.join(root, 'node_modules');
|
|
47
|
+
if (fs.existsSync(rootNM)) {
|
|
48
|
+
results.push('node_modules');
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// Scan common monorepo directory patterns for nested node_modules
|
|
52
|
+
const monorepoPatterns = ['packages', 'apps', 'libs', 'services', 'modules', 'plugins'];
|
|
53
|
+
|
|
54
|
+
for (const dir of monorepoPatterns) {
|
|
55
|
+
const dirPath = path.join(root, dir);
|
|
56
|
+
if (!fs.existsSync(dirPath) || !fs.statSync(dirPath).isDirectory()) continue;
|
|
57
|
+
|
|
58
|
+
let entries;
|
|
59
|
+
try {
|
|
60
|
+
entries = fs.readdirSync(dirPath);
|
|
61
|
+
} catch (_) {
|
|
62
|
+
continue;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
for (const entry of entries) {
|
|
66
|
+
const entryPath = path.join(dirPath, entry);
|
|
67
|
+
if (!fs.statSync(entryPath).isDirectory()) continue;
|
|
68
|
+
|
|
69
|
+
const nm = path.join(entryPath, 'node_modules');
|
|
70
|
+
if (fs.existsSync(nm)) {
|
|
71
|
+
results.push(path.join(dir, entry, 'node_modules'));
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
return results;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// ---------------------------------------------------------------------------
|
|
80
|
+
// Create symlinks
|
|
81
|
+
// ---------------------------------------------------------------------------
|
|
82
|
+
|
|
83
|
+
function symlinkDeps(source, worktree) {
|
|
84
|
+
const nodeModulesPaths = findNodeModules(source);
|
|
85
|
+
|
|
86
|
+
if (nodeModulesPaths.length === 0) {
|
|
87
|
+
console.log('{"linked":0,"message":"no node_modules found in source"}');
|
|
88
|
+
return;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
let linked = 0;
|
|
92
|
+
const errors = [];
|
|
93
|
+
|
|
94
|
+
for (const relPath of nodeModulesPaths) {
|
|
95
|
+
const srcAbs = path.join(source, relPath);
|
|
96
|
+
const dstAbs = path.join(worktree, relPath);
|
|
97
|
+
|
|
98
|
+
// Skip if already exists (symlink or directory)
|
|
99
|
+
if (fs.existsSync(dstAbs)) {
|
|
100
|
+
continue;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// Ensure parent directory exists in worktree
|
|
104
|
+
const parent = path.dirname(dstAbs);
|
|
105
|
+
if (!fs.existsSync(parent)) {
|
|
106
|
+
fs.mkdirSync(parent, { recursive: true });
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
try {
|
|
110
|
+
fs.symlinkSync(srcAbs, dstAbs, 'dir');
|
|
111
|
+
linked++;
|
|
112
|
+
} catch (err) {
|
|
113
|
+
errors.push({ path: relPath, error: err.message });
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
const result = { linked, total: nodeModulesPaths.length };
|
|
118
|
+
if (errors.length > 0) result.errors = errors;
|
|
119
|
+
console.log(JSON.stringify(result));
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
// ---------------------------------------------------------------------------
|
|
123
|
+
// Main
|
|
124
|
+
// ---------------------------------------------------------------------------
|
|
125
|
+
|
|
126
|
+
const opts = parseArgs();
|
|
127
|
+
symlinkDeps(opts.source, opts.worktree);
|
package/hooks/df-spec-lint.js
CHANGED
|
@@ -123,12 +123,23 @@ function computeLayer(content) {
|
|
|
123
123
|
* @param {string} content - The raw markdown content of the spec file.
|
|
124
124
|
* @param {object} opts
|
|
125
125
|
* @param {'interactive'|'auto'} opts.mode
|
|
126
|
+
* @param {string|null} opts.filename - Optional filename (basename) used for stem validation.
|
|
126
127
|
* @returns {{ hard: string[], advisory: string[] }}
|
|
127
128
|
*/
|
|
128
|
-
function validateSpec(content, { mode = 'interactive', specsDir = null } = {}) {
|
|
129
|
+
function validateSpec(content, { mode = 'interactive', specsDir = null, filename = null } = {}) {
|
|
129
130
|
const hard = [];
|
|
130
131
|
const advisory = [];
|
|
131
132
|
|
|
133
|
+
// ── Spec filename stem validation ────────────────────────────────────
|
|
134
|
+
if (filename !== null) {
|
|
135
|
+
let stem = path.basename(filename, '.md');
|
|
136
|
+
stem = stem.replace(/^(doing-|done-)/, '');
|
|
137
|
+
const SAFE_STEM = /^[a-z0-9]([a-z0-9-]*[a-z0-9])?$/;
|
|
138
|
+
if (!SAFE_STEM.test(stem)) {
|
|
139
|
+
hard.push(`Spec filename stem contains unsafe characters: "${stem}"`);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
|
|
132
143
|
// ── Frontmatter: parse and validate derives-from ─────────────────────
|
|
133
144
|
const { frontmatter } = parseFrontmatter(content);
|
|
134
145
|
if (frontmatter['derives-from'] !== undefined) {
|
|
@@ -339,7 +350,7 @@ if (require.main === module) {
|
|
|
339
350
|
const content = fs.readFileSync(filePath, 'utf8');
|
|
340
351
|
const mode = process.argv.includes('--auto') ? 'auto' : 'interactive';
|
|
341
352
|
const specsDir = path.resolve(path.dirname(filePath));
|
|
342
|
-
const result = validateSpec(content, { mode, specsDir });
|
|
353
|
+
const result = validateSpec(content, { mode, specsDir, filename: path.basename(filePath) });
|
|
343
354
|
|
|
344
355
|
if (result.hard.length > 0) {
|
|
345
356
|
console.error('HARD invariant failures:');
|
|
@@ -410,3 +410,136 @@ describe('derives-from validation', () => {
|
|
|
410
410
|
assert.deepEqual(resultWith.hard, resultWithout.hard);
|
|
411
411
|
});
|
|
412
412
|
});
|
|
413
|
+
|
|
414
|
+
// ---------------------------------------------------------------------------
|
|
415
|
+
// validateSpec — spec filename stem validation
|
|
416
|
+
// ---------------------------------------------------------------------------
|
|
417
|
+
|
|
418
|
+
describe('validateSpec stem validation', () => {
|
|
419
|
+
test('valid plain name passes', () => {
|
|
420
|
+
const result = validateSpec(fullSpec(), { filename: 'my-spec.md' });
|
|
421
|
+
const stemErrors = result.hard.filter((m) => m.includes('unsafe characters'));
|
|
422
|
+
assert.equal(stemErrors.length, 0);
|
|
423
|
+
});
|
|
424
|
+
|
|
425
|
+
test('valid name with numbers passes', () => {
|
|
426
|
+
const result = validateSpec(fullSpec(), { filename: 'spec-v2-fix.md' });
|
|
427
|
+
const stemErrors = result.hard.filter((m) => m.includes('unsafe characters'));
|
|
428
|
+
assert.equal(stemErrors.length, 0);
|
|
429
|
+
});
|
|
430
|
+
|
|
431
|
+
test('single character name passes', () => {
|
|
432
|
+
const result = validateSpec(fullSpec(), { filename: 'a.md' });
|
|
433
|
+
const stemErrors = result.hard.filter((m) => m.includes('unsafe characters'));
|
|
434
|
+
assert.equal(stemErrors.length, 0);
|
|
435
|
+
});
|
|
436
|
+
|
|
437
|
+
test('doing- prefix is stripped before validation', () => {
|
|
438
|
+
const result = validateSpec(fullSpec(), { filename: 'doing-my-spec.md' });
|
|
439
|
+
const stemErrors = result.hard.filter((m) => m.includes('unsafe characters'));
|
|
440
|
+
assert.equal(stemErrors.length, 0);
|
|
441
|
+
});
|
|
442
|
+
|
|
443
|
+
test('done- prefix is stripped before validation', () => {
|
|
444
|
+
const result = validateSpec(fullSpec(), { filename: 'done-my-spec.md' });
|
|
445
|
+
const stemErrors = result.hard.filter((m) => m.includes('unsafe characters'));
|
|
446
|
+
assert.equal(stemErrors.length, 0);
|
|
447
|
+
});
|
|
448
|
+
|
|
449
|
+
test('filename with dollar sign is rejected as hard failure', () => {
|
|
450
|
+
const result = validateSpec(fullSpec(), { filename: 'spec-$bad.md' });
|
|
451
|
+
const stemErrors = result.hard.filter((m) => m.includes('unsafe characters'));
|
|
452
|
+
assert.equal(stemErrors.length, 1);
|
|
453
|
+
});
|
|
454
|
+
|
|
455
|
+
test('filename with backtick is rejected as hard failure', () => {
|
|
456
|
+
const result = validateSpec(fullSpec(), { filename: 'spec-`bad.md' });
|
|
457
|
+
const stemErrors = result.hard.filter((m) => m.includes('unsafe characters'));
|
|
458
|
+
assert.equal(stemErrors.length, 1);
|
|
459
|
+
});
|
|
460
|
+
|
|
461
|
+
test('filename with pipe character is rejected as hard failure', () => {
|
|
462
|
+
const result = validateSpec(fullSpec(), { filename: 'spec|bad.md' });
|
|
463
|
+
const stemErrors = result.hard.filter((m) => m.includes('unsafe characters'));
|
|
464
|
+
assert.equal(stemErrors.length, 1);
|
|
465
|
+
});
|
|
466
|
+
|
|
467
|
+
test('filename with semicolon is rejected as hard failure', () => {
|
|
468
|
+
const result = validateSpec(fullSpec(), { filename: 'spec;bad.md' });
|
|
469
|
+
const stemErrors = result.hard.filter((m) => m.includes('unsafe characters'));
|
|
470
|
+
assert.equal(stemErrors.length, 1);
|
|
471
|
+
});
|
|
472
|
+
|
|
473
|
+
test('filename with ampersand is rejected as hard failure', () => {
|
|
474
|
+
const result = validateSpec(fullSpec(), { filename: 'spec&bad.md' });
|
|
475
|
+
const stemErrors = result.hard.filter((m) => m.includes('unsafe characters'));
|
|
476
|
+
assert.equal(stemErrors.length, 1);
|
|
477
|
+
});
|
|
478
|
+
|
|
479
|
+
test('filename with space is rejected as hard failure', () => {
|
|
480
|
+
const result = validateSpec(fullSpec(), { filename: 'spec bad.md' });
|
|
481
|
+
const stemErrors = result.hard.filter((m) => m.includes('unsafe characters'));
|
|
482
|
+
assert.equal(stemErrors.length, 1);
|
|
483
|
+
});
|
|
484
|
+
|
|
485
|
+
test('filename with path traversal (..) is rejected as hard failure', () => {
|
|
486
|
+
const result = validateSpec(fullSpec(), { filename: '..evil.md' });
|
|
487
|
+
const stemErrors = result.hard.filter((m) => m.includes('unsafe characters'));
|
|
488
|
+
assert.equal(stemErrors.length, 1);
|
|
489
|
+
});
|
|
490
|
+
|
|
491
|
+
test('filename with leading hyphen is rejected as hard failure', () => {
|
|
492
|
+
const result = validateSpec(fullSpec(), { filename: '-leading.md' });
|
|
493
|
+
const stemErrors = result.hard.filter((m) => m.includes('unsafe characters'));
|
|
494
|
+
assert.equal(stemErrors.length, 1);
|
|
495
|
+
});
|
|
496
|
+
|
|
497
|
+
test('filename with trailing hyphen is rejected as hard failure', () => {
|
|
498
|
+
const result = validateSpec(fullSpec(), { filename: 'trailing-.md' });
|
|
499
|
+
const stemErrors = result.hard.filter((m) => m.includes('unsafe characters'));
|
|
500
|
+
assert.equal(stemErrors.length, 1);
|
|
501
|
+
});
|
|
502
|
+
|
|
503
|
+
test('empty stem (only prefix) is rejected as hard failure', () => {
|
|
504
|
+
// A filename of just "doing-.md" strips to empty string
|
|
505
|
+
const result = validateSpec(fullSpec(), { filename: 'doing-.md' });
|
|
506
|
+
const stemErrors = result.hard.filter((m) => m.includes('unsafe characters'));
|
|
507
|
+
assert.equal(stemErrors.length, 1);
|
|
508
|
+
});
|
|
509
|
+
|
|
510
|
+
test('empty filename stem (.md only) is rejected as hard failure', () => {
|
|
511
|
+
const result = validateSpec(fullSpec(), { filename: '.md' });
|
|
512
|
+
const stemErrors = result.hard.filter((m) => m.includes('unsafe characters'));
|
|
513
|
+
assert.equal(stemErrors.length, 1);
|
|
514
|
+
});
|
|
515
|
+
|
|
516
|
+
test('stem validation failure is in hard array, not advisory', () => {
|
|
517
|
+
const result = validateSpec(fullSpec(), { filename: 'spec$bad.md' });
|
|
518
|
+
const hardErrors = result.hard.filter((m) => m.includes('unsafe characters'));
|
|
519
|
+
const advisoryErrors = result.advisory.filter((m) => m.includes('unsafe characters'));
|
|
520
|
+
assert.equal(hardErrors.length, 1);
|
|
521
|
+
assert.equal(advisoryErrors.length, 0);
|
|
522
|
+
});
|
|
523
|
+
|
|
524
|
+
test('no filename passed (null) skips stem validation', () => {
|
|
525
|
+
// No filename option — stem check should not run
|
|
526
|
+
const result = validateSpec(fullSpec());
|
|
527
|
+
const stemErrors = result.hard.filter((m) => m.includes('unsafe characters'));
|
|
528
|
+
assert.equal(stemErrors.length, 0);
|
|
529
|
+
});
|
|
530
|
+
|
|
531
|
+
test('all existing repo spec names pass validation', () => {
|
|
532
|
+
const existingNames = [
|
|
533
|
+
'done-dashboard-model-cost-fixes.md',
|
|
534
|
+
'done-orchestrator-v2.md',
|
|
535
|
+
'done-plan-cleanup.md',
|
|
536
|
+
'done-plan-fanout.md',
|
|
537
|
+
'done-quality-gates.md',
|
|
538
|
+
];
|
|
539
|
+
for (const filename of existingNames) {
|
|
540
|
+
const result = validateSpec(fullSpec(), { filename });
|
|
541
|
+
const stemErrors = result.hard.filter((m) => m.includes('unsafe characters'));
|
|
542
|
+
assert.equal(stemErrors.length, 0, `Expected ${filename} to pass but got stem errors`);
|
|
543
|
+
}
|
|
544
|
+
});
|
|
545
|
+
});
|