deepflow 0.1.107 → 0.1.108

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/bin/install.js CHANGED
@@ -35,6 +35,23 @@ const GLOBAL_DIR = path.join(os.homedir(), '.claude');
35
35
  const PROJECT_DIR = path.join(process.cwd(), '.claude');
36
36
  const PACKAGE_DIR = path.resolve(__dirname, '..');
37
37
 
38
+ /**
39
+ * Atomically write data to targetPath using a write-to-temp + rename pattern.
40
+ * If the write fails, the original file is left untouched and the temp file is
41
+ * cleaned up. Temp file is created in the same directory as the target so the
42
+ * rename is within the same filesystem (atomic on POSIX).
43
+ */
44
+ function atomicWriteFileSync(targetPath, data) {
45
+ const tmpPath = targetPath + '.tmp';
46
+ try {
47
+ fs.writeFileSync(tmpPath, data);
48
+ fs.renameSync(tmpPath, targetPath);
49
+ } catch (err) {
50
+ try { fs.unlinkSync(tmpPath); } catch (_) {}
51
+ throw err;
52
+ }
53
+ }
54
+
38
55
  function updateGlobalPackage() {
39
56
  const currentVersion = require(path.join(PACKAGE_DIR, 'package.json')).version;
40
57
  try {
@@ -144,7 +161,7 @@ async function main() {
144
161
  // Copy bin utilities (plan-consolidator, wave-runner, ratchet)
145
162
  const binDest = path.join(CLAUDE_DIR, 'bin');
146
163
  fs.mkdirSync(binDest, { recursive: true });
147
- for (const script of ['plan-consolidator.js', 'wave-runner.js', 'ratchet.js']) {
164
+ for (const script of ['plan-consolidator.js', 'wave-runner.js', 'ratchet.js', 'worktree-deps.js']) {
148
165
  const src = path.join(PACKAGE_DIR, 'bin', script);
149
166
  if (fs.existsSync(src)) {
150
167
  fs.copyFileSync(src, path.join(binDest, script));
@@ -211,7 +228,7 @@ async function main() {
211
228
  console.log(' commands/df/ — /df:discover, /df:debate, /df:spec, /df:plan, /df:execute, /df:verify, /df:auto, /df:update');
212
229
  console.log(' skills/ — gap-discovery, atomic-commits, code-completeness, browse-fetch, browse-verify, auto-cycle');
213
230
  console.log(' agents/ — reasoner (/df:auto — autonomous execution via /loop)');
214
- console.log(' bin/ — plan-consolidator, wave-runner, ratchet');
231
+ console.log(' bin/ — plan-consolidator, wave-runner, ratchet, worktree-deps');
215
232
  console.log(' templates/ — explore-protocol (auto-injected into Explore agents via hook)');
216
233
  if (level === 'global') {
217
234
  console.log(' hooks/ — statusline, update checker, invariant checker, worktree guard, explore protocol');
@@ -434,7 +451,7 @@ async function configureHooks(claudeDir) {
434
451
  console.log(` ${c.dim}${file} copied (no @hook-event tag — not wired)${c.reset}`);
435
452
  }
436
453
 
437
- fs.writeFileSync(settingsPath, JSON.stringify(settings, null, 2));
454
+ atomicWriteFileSync(settingsPath, JSON.stringify(settings, null, 2));
438
455
  }
439
456
 
440
457
  function configureProjectSettings(claudeDir) {
@@ -457,7 +474,7 @@ function configureProjectSettings(claudeDir) {
457
474
  // Configure permissions for background agents
458
475
  configurePermissions(settings);
459
476
 
460
- fs.writeFileSync(settingsPath, JSON.stringify(settings, null, 2));
477
+ atomicWriteFileSync(settingsPath, JSON.stringify(settings, null, 2));
461
478
  log('LSP tool enabled + agent permissions configured (project)');
462
479
  }
463
480
 
@@ -614,6 +631,7 @@ async function uninstall() {
614
631
  'bin/plan-consolidator.js',
615
632
  'bin/wave-runner.js',
616
633
  'bin/ratchet.js',
634
+ 'bin/worktree-deps.js',
617
635
  'templates'
618
636
  ];
619
637
 
@@ -675,7 +693,7 @@ async function uninstall() {
675
693
  console.log(` ${c.green}✓${c.reset} Removed deepflow permissions from settings`);
676
694
  }
677
695
 
678
- fs.writeFileSync(settingsPath, JSON.stringify(settings, null, 2));
696
+ atomicWriteFileSync(settingsPath, JSON.stringify(settings, null, 2));
679
697
  } catch (e) {
680
698
  // Fail silently
681
699
  }
@@ -702,7 +720,7 @@ async function uninstall() {
702
720
  fs.unlinkSync(localSettingsPath);
703
721
  console.log(` ${c.green}✓${c.reset} Removed settings.local.json (empty after cleanup)`);
704
722
  } else {
705
- fs.writeFileSync(localSettingsPath, JSON.stringify(localSettings, null, 2));
723
+ atomicWriteFileSync(localSettingsPath, JSON.stringify(localSettings, null, 2));
706
724
  console.log(` ${c.green}✓${c.reset} Removed deepflow settings from settings.local.json`);
707
725
  }
708
726
  } catch (e) {
@@ -717,7 +735,7 @@ async function uninstall() {
717
735
  }
718
736
 
719
737
  // Export for testing
720
- module.exports = { scanHookEvents, removeDeepflowHooks };
738
+ module.exports = { scanHookEvents, removeDeepflowHooks, atomicWriteFileSync };
721
739
 
722
740
  // Only run main when executed directly (not when required by tests)
723
741
  if (require.main === module) {
@@ -1040,3 +1040,116 @@ describe('copyDir security hardening (symlink & path traversal)', () => {
1040
1040
  );
1041
1041
  });
1042
1042
  });
1043
+
1044
+ // ---------------------------------------------------------------------------
1045
+ // 7. atomicWriteFileSync — write-to-temp + rename pattern
1046
+ // ---------------------------------------------------------------------------
1047
+
1048
+ describe('atomicWriteFileSync', () => {
1049
+ const { atomicWriteFileSync } = require('./install.js');
1050
+ let tmpDir;
1051
+
1052
+ beforeEach(() => {
1053
+ tmpDir = makeTmpDir();
1054
+ });
1055
+
1056
+ afterEach(() => {
1057
+ rmrf(tmpDir);
1058
+ });
1059
+
1060
+ test('writes data to target file', () => {
1061
+ const target = path.join(tmpDir, 'settings.json');
1062
+ atomicWriteFileSync(target, '{"key":"value"}');
1063
+ assert.equal(fs.readFileSync(target, 'utf8'), '{"key":"value"}');
1064
+ });
1065
+
1066
+ test('leaves no .tmp artifact on success', () => {
1067
+ const target = path.join(tmpDir, 'settings.json');
1068
+ atomicWriteFileSync(target, 'data');
1069
+ assert.ok(!fs.existsSync(target + '.tmp'), 'No .tmp file should remain after successful write');
1070
+ });
1071
+
1072
+ test('overwrites existing target with new content', () => {
1073
+ const target = path.join(tmpDir, 'settings.json');
1074
+ fs.writeFileSync(target, 'original');
1075
+ atomicWriteFileSync(target, 'updated');
1076
+ assert.equal(fs.readFileSync(target, 'utf8'), 'updated');
1077
+ });
1078
+
1079
+ test('leaves original untouched when write to temp fails', () => {
1080
+ const target = path.join(tmpDir, 'settings.json');
1081
+ fs.writeFileSync(target, 'safe-original');
1082
+
1083
+ // Force writeFileSync to fail by passing a directory path as the tmpPath target
1084
+ // We do this by making the .tmp path a directory so writeFileSync throws EISDIR
1085
+ const tmpPath = target + '.tmp';
1086
+ fs.mkdirSync(tmpPath);
1087
+
1088
+ let threw = false;
1089
+ try {
1090
+ atomicWriteFileSync(target, 'should-not-overwrite');
1091
+ } catch (_) {
1092
+ threw = true;
1093
+ }
1094
+
1095
+ assert.ok(threw, 'atomicWriteFileSync should rethrow write errors');
1096
+ assert.equal(
1097
+ fs.readFileSync(target, 'utf8'),
1098
+ 'safe-original',
1099
+ 'Original file must be untouched when temp write fails'
1100
+ );
1101
+ });
1102
+
1103
+ test('cleans up .tmp artifact when write fails', () => {
1104
+ const target = path.join(tmpDir, 'settings.json');
1105
+ const tmpPath = target + '.tmp';
1106
+
1107
+ // Intercept: write succeeds but rename fails
1108
+ // We simulate this by making the target's parent dir read-only after the temp write
1109
+ // Instead, test cleanup via the EISDIR approach (tmpPath is a dir — can't write into it)
1110
+ // After EISDIR on writeFileSync(tmpPath), unlinkSync should clean it up.
1111
+ // Since tmpPath was created as a dir in this test, unlinkSync would fail silently,
1112
+ // but the dir itself was pre-existing. Let's use a simpler approach:
1113
+ // patch by making target a directory, which causes renameSync to fail after temp write.
1114
+
1115
+ // Create a target that is a directory so renameSync(tmp, target) fails
1116
+ fs.mkdirSync(target);
1117
+ fs.writeFileSync(path.join(target, 'dummy'), 'x'); // non-empty so unlinkSync fails cleanly
1118
+
1119
+ let threw = false;
1120
+ try {
1121
+ atomicWriteFileSync(target, 'data');
1122
+ } catch (_) {
1123
+ threw = true;
1124
+ }
1125
+
1126
+ assert.ok(threw, 'Should throw when rename fails');
1127
+ // .tmp should be cleaned up
1128
+ assert.ok(!fs.existsSync(tmpPath), '.tmp file should be cleaned up after rename failure');
1129
+ });
1130
+
1131
+ test('source uses atomicWriteFileSync for all 4 settings writes', () => {
1132
+ const src = fs.readFileSync(path.resolve(__dirname, 'install.js'), 'utf8');
1133
+ // Count occurrences of atomicWriteFileSync calls (excluding the definition)
1134
+ const calls = src.match(/atomicWriteFileSync\(/g) || [];
1135
+ // 1 definition + 4 call sites = 5 total occurrences minimum
1136
+ assert.ok(
1137
+ calls.length >= 5,
1138
+ `Expected at least 5 occurrences of atomicWriteFileSync (1 def + 4 calls), found ${calls.length}`
1139
+ );
1140
+ });
1141
+
1142
+ test('source exports atomicWriteFileSync for testing', () => {
1143
+ const src = fs.readFileSync(path.resolve(__dirname, 'install.js'), 'utf8');
1144
+ assert.ok(
1145
+ src.includes('atomicWriteFileSync') && src.includes('module.exports'),
1146
+ 'install.js should export atomicWriteFileSync'
1147
+ );
1148
+ const exportLine = src.match(/module\.exports\s*=\s*\{([^}]+)\}/);
1149
+ assert.ok(exportLine, 'module.exports should be a plain object');
1150
+ assert.ok(
1151
+ exportLine[1].includes('atomicWriteFileSync'),
1152
+ 'module.exports should include atomicWriteFileSync'
1153
+ );
1154
+ });
1155
+ });
@@ -23,12 +23,14 @@ const path = require('path');
23
23
  // ---------------------------------------------------------------------------
24
24
 
25
25
  function parseArgs(argv) {
26
- const args = { plansDir: null };
26
+ const args = { plansDir: null, specsDir: null };
27
27
  let i = 2;
28
28
  while (i < argv.length) {
29
29
  const arg = argv[i];
30
30
  if (arg === '--plans-dir' && argv[i + 1]) {
31
31
  args.plansDir = argv[++i];
32
+ } else if (arg === '--specs-dir' && argv[i + 1]) {
33
+ args.specsDir = argv[++i];
32
34
  }
33
35
  i++;
34
36
  }
@@ -282,6 +284,22 @@ function main() {
282
284
  process.exit(1);
283
285
  }
284
286
 
287
+ // Stale-filter: when --specs-dir is set, remove mini-plans whose corresponding
288
+ // spec file does not exist in specsDir
289
+ if (args.specsDir) {
290
+ const specsDir = path.resolve(process.cwd(), args.specsDir);
291
+ entries = entries.filter(filename => {
292
+ const specPath = path.join(specsDir, filename);
293
+ if (!fs.existsSync(specPath)) {
294
+ process.stderr.write(
295
+ `plan-consolidator: skipping stale mini-plan ${filename} (no matching spec in ${args.specsDir})\n`
296
+ );
297
+ return false;
298
+ }
299
+ return true;
300
+ });
301
+ }
302
+
285
303
  if (entries.length === 0) {
286
304
  process.stdout.write('## Tasks\n\n(no mini-plan files found in ' + plansDir + ')\n');
287
305
  process.exit(0);
@@ -880,3 +880,153 @@ describe('formatConsolidated — additional edge cases', () => {
880
880
  assert.ok(output.includes('Blocked by: T1, T2'));
881
881
  });
882
882
  });
883
+
884
+ // ---------------------------------------------------------------------------
885
+ // 10. parseArgs — --specs-dir (AC-1)
886
+ // ---------------------------------------------------------------------------
887
+
888
+ describe('parseArgs — --specs-dir', () => {
889
+ test('recognizes --specs-dir and stores it in args.specsDir', () => {
890
+ const args = parseArgs(['node', 'plan-consolidator.js', '--plans-dir', '/tmp/plans', '--specs-dir', 'specs/']);
891
+ assert.equal(args.specsDir, 'specs/');
892
+ });
893
+
894
+ test('specsDir is null when --specs-dir is not provided', () => {
895
+ const args = parseArgs(['node', 'plan-consolidator.js', '--plans-dir', '/tmp/plans']);
896
+ assert.equal(args.specsDir, null);
897
+ });
898
+
899
+ test('specsDir is null when --specs-dir has no following value', () => {
900
+ const args = parseArgs(['node', 'plan-consolidator.js', '--plans-dir', '/tmp/plans', '--specs-dir']);
901
+ assert.equal(args.specsDir, null);
902
+ });
903
+
904
+ test('--specs-dir works without --plans-dir (args are independent)', () => {
905
+ const args = parseArgs(['node', 'plan-consolidator.js', '--specs-dir', 'my-specs/']);
906
+ assert.equal(args.specsDir, 'my-specs/');
907
+ assert.equal(args.plansDir, null);
908
+ });
909
+ });
910
+
911
+ // ---------------------------------------------------------------------------
912
+ // 11. CLI integration — stale-filter with --specs-dir (AC-2, AC-3, AC-4)
913
+ // ---------------------------------------------------------------------------
914
+
915
+ describe('CLI integration — stale-filter', () => {
916
+ test('AC-2: mini-plan is included when matching spec exists in --specs-dir', () => {
917
+ const plansDir = makeTmpDir();
918
+ const specsDir = makeTmpDir();
919
+ try {
920
+ // Create a mini-plan doing-foo.md
921
+ fs.writeFileSync(path.join(plansDir, 'doing-foo.md'), `## Tasks
922
+ - [ ] **T1**: Foo task
923
+ `);
924
+ // Create matching spec in specsDir
925
+ fs.writeFileSync(path.join(specsDir, 'doing-foo.md'), '# Spec for foo\n');
926
+
927
+ const result = runConsolidator(['--plans-dir', plansDir, '--specs-dir', specsDir]);
928
+ assert.equal(result.code, 0);
929
+ assert.ok(result.stdout.includes('**T1**: Foo task'), 'task from matched mini-plan should appear in output');
930
+ assert.ok(!result.stderr.includes('skipping stale'), 'no stale warning should appear when spec exists');
931
+ } finally {
932
+ rmrf(plansDir);
933
+ rmrf(specsDir);
934
+ }
935
+ });
936
+
937
+ test('AC-3: mini-plan is excluded with stderr warning when spec is absent from --specs-dir', () => {
938
+ const plansDir = makeTmpDir();
939
+ const specsDir = makeTmpDir();
940
+ try {
941
+ // Create a mini-plan doing-bar.md
942
+ fs.writeFileSync(path.join(plansDir, 'doing-bar.md'), `## Tasks
943
+ - [ ] **T1**: Bar task
944
+ `);
945
+ // No matching spec in specsDir — specsDir is empty
946
+
947
+ const result = runConsolidator(['--plans-dir', plansDir, '--specs-dir', specsDir]);
948
+ assert.equal(result.code, 0);
949
+ assert.ok(!result.stdout.includes('**T1**: Bar task'), 'stale mini-plan task should be excluded from output');
950
+ assert.ok(result.stderr.includes('skipping stale mini-plan doing-bar.md'), 'stderr should warn about stale mini-plan');
951
+ } finally {
952
+ rmrf(plansDir);
953
+ rmrf(specsDir);
954
+ }
955
+ });
956
+
957
+ test('AC-3: stderr warning includes the specsDir path', () => {
958
+ const plansDir = makeTmpDir();
959
+ const specsDir = makeTmpDir();
960
+ try {
961
+ fs.writeFileSync(path.join(plansDir, 'doing-bar.md'), `## Tasks
962
+ - [ ] **T1**: Bar task
963
+ `);
964
+
965
+ const result = runConsolidator(['--plans-dir', plansDir, '--specs-dir', specsDir]);
966
+ // Warning format: "plan-consolidator: skipping stale mini-plan {filename} (no matching spec in {specsDir})"
967
+ assert.ok(result.stderr.includes('no matching spec in'), 'stderr should mention no matching spec');
968
+ } finally {
969
+ rmrf(plansDir);
970
+ rmrf(specsDir);
971
+ }
972
+ });
973
+
974
+ test('AC-2+AC-3: only matched mini-plans are included when some specs exist and some do not', () => {
975
+ const plansDir = makeTmpDir();
976
+ const specsDir = makeTmpDir();
977
+ try {
978
+ // Two mini-plans: doing-present.md (spec exists) and doing-stale.md (spec absent)
979
+ fs.writeFileSync(path.join(plansDir, 'doing-present.md'), `## Tasks
980
+ - [ ] **T1**: Present task
981
+ `);
982
+ fs.writeFileSync(path.join(plansDir, 'doing-stale.md'), `## Tasks
983
+ - [ ] **T1**: Stale task
984
+ `);
985
+ // Only doing-present.md has a spec
986
+ fs.writeFileSync(path.join(specsDir, 'doing-present.md'), '# Spec for present\n');
987
+
988
+ const result = runConsolidator(['--plans-dir', plansDir, '--specs-dir', specsDir]);
989
+ assert.equal(result.code, 0);
990
+ assert.ok(result.stdout.includes('Present task'), 'matched mini-plan task should be included');
991
+ assert.ok(!result.stdout.includes('Stale task'), 'stale mini-plan task should be excluded');
992
+ assert.ok(result.stderr.includes('skipping stale mini-plan doing-stale.md'), 'stderr should warn about stale entry');
993
+ } finally {
994
+ rmrf(plansDir);
995
+ rmrf(specsDir);
996
+ }
997
+ });
998
+
999
+ test('AC-4: without --specs-dir, all doing-* mini-plans are included (backward compatible)', () => {
1000
+ const plansDir = makeTmpDir();
1001
+ try {
1002
+ fs.writeFileSync(path.join(plansDir, 'doing-alpha.md'), `## Tasks
1003
+ - [ ] **T1**: Alpha task
1004
+ `);
1005
+ fs.writeFileSync(path.join(plansDir, 'doing-beta.md'), `## Tasks
1006
+ - [ ] **T1**: Beta task
1007
+ `);
1008
+
1009
+ const result = runConsolidator(['--plans-dir', plansDir]);
1010
+ assert.equal(result.code, 0);
1011
+ assert.ok(result.stdout.includes('Alpha task'), 'alpha mini-plan should be included without --specs-dir');
1012
+ assert.ok(result.stdout.includes('Beta task'), 'beta mini-plan should be included without --specs-dir');
1013
+ } finally {
1014
+ rmrf(plansDir);
1015
+ }
1016
+ });
1017
+
1018
+ test('AC-4: backward-compat — no stale warnings emitted without --specs-dir', () => {
1019
+ const plansDir = makeTmpDir();
1020
+ try {
1021
+ fs.writeFileSync(path.join(plansDir, 'doing-orphan.md'), `## Tasks
1022
+ - [ ] **T1**: Orphan task (no spec file anywhere)
1023
+ `);
1024
+
1025
+ const result = runConsolidator(['--plans-dir', plansDir]);
1026
+ assert.equal(result.code, 0);
1027
+ assert.ok(!result.stderr.includes('skipping stale'), 'no stale warnings without --specs-dir');
1028
+ } finally {
1029
+ rmrf(plansDir);
1030
+ }
1031
+ });
1032
+ });
package/bin/ratchet.js CHANGED
@@ -74,7 +74,7 @@ function loadConfig(repoRoot) {
74
74
  // Snapshot: read auto-snapshot.txt and absolutize paths
75
75
  // ---------------------------------------------------------------------------
76
76
 
77
- function loadSnapshotFiles(repoRoot) {
77
+ function loadSnapshotFiles(repoRoot, resolveBase = repoRoot) {
78
78
  const snapshotPath = path.join(repoRoot, '.deepflow', 'auto-snapshot.txt');
79
79
  if (!fs.existsSync(snapshotPath)) return [];
80
80
 
@@ -82,7 +82,7 @@ function loadSnapshotFiles(repoRoot) {
82
82
  .split('\n')
83
83
  .map(l => l.trim())
84
84
  .filter(l => l.length > 0)
85
- .map(rel => path.join(repoRoot, rel));
85
+ .map(rel => path.join(resolveBase, rel));
86
86
  }
87
87
 
88
88
  // ---------------------------------------------------------------------------
@@ -340,13 +340,13 @@ function main() {
340
340
 
341
341
  const cfg = loadConfig(repoRoot);
342
342
  const projectType = detectProjectType(repoRoot);
343
- const snapshotFiles = loadSnapshotFiles(repoRoot);
343
+ const snapshotFiles = loadSnapshotFiles(repoRoot, cwd);
344
344
  const cmds = buildCommands(repoRoot, projectType, snapshotFiles, cfg);
345
345
  // --snapshot flag overrides the snapshot-derived test command
346
346
  if (cliArgs.snapshot && fs.existsSync(cliArgs.snapshot)) {
347
347
  const snapFiles = fs.readFileSync(cliArgs.snapshot, 'utf8')
348
348
  .split('\n').map(l => l.trim()).filter(l => l.length > 0)
349
- .map(rel => path.isAbsolute(rel) ? rel : path.join(repoRoot, rel));
349
+ .map(rel => path.isAbsolute(rel) ? rel : path.join(cwd, rel));
350
350
  if (snapFiles.length > 0 && projectType === 'node' && !cfg.test_command) {
351
351
  cmds.test = ['node', '--test', ...snapFiles];
352
352
  }
@@ -363,7 +363,7 @@ function main() {
363
363
  if (exe !== 'npx' && !commandExists(exe)) continue;
364
364
  }
365
365
 
366
- const { ok, log } = runCommand(cmd, repoRoot);
366
+ const { ok, log } = runCommand(cmd, cwd);
367
367
 
368
368
  if (ok === null) {
369
369
  // executable spawning error — skip stage
@@ -1171,3 +1171,175 @@ describe('Subprocess integration — --task flag updates PLAN.md on PASS', () =>
1171
1171
  assert.ok(plan.includes('- [ ] **T42**'), 'PLAN.md should remain unchecked without --task');
1172
1172
  });
1173
1173
  });
1174
+
1175
+ // ---------------------------------------------------------------------------
1176
+ // 19. Worktree cwd routing — commands execute in worktree path
1177
+ // ---------------------------------------------------------------------------
1178
+
1179
+ describe('loadSnapshotFiles — resolveBase parameter resolves paths against cwd not repoRoot', () => {
1180
+ let repoRoot;
1181
+ let worktreeDir;
1182
+
1183
+ beforeEach(() => {
1184
+ repoRoot = makeTmpDir();
1185
+ worktreeDir = makeTmpDir();
1186
+ });
1187
+
1188
+ afterEach(() => {
1189
+ rmrf(repoRoot);
1190
+ rmrf(worktreeDir);
1191
+ });
1192
+
1193
+ test('resolveBase defaults to repoRoot when not provided', () => {
1194
+ const deepflowDir = path.join(repoRoot, '.deepflow');
1195
+ fs.mkdirSync(deepflowDir, { recursive: true });
1196
+ fs.writeFileSync(
1197
+ path.join(deepflowDir, 'auto-snapshot.txt'),
1198
+ 'bin/ratchet.test.js\n'
1199
+ );
1200
+
1201
+ const files = loadSnapshotFiles(repoRoot);
1202
+ assert.equal(files.length, 1);
1203
+ assert.equal(files[0], path.join(repoRoot, 'bin/ratchet.test.js'));
1204
+ });
1205
+
1206
+ test('resolveBase overrides path resolution when cwd differs from repoRoot', () => {
1207
+ // Snapshot lives in repoRoot's .deepflow dir, but paths should resolve against worktreeDir
1208
+ const deepflowDir = path.join(repoRoot, '.deepflow');
1209
+ fs.mkdirSync(deepflowDir, { recursive: true });
1210
+ fs.writeFileSync(
1211
+ path.join(deepflowDir, 'auto-snapshot.txt'),
1212
+ 'bin/ratchet.test.js\ntest/integration.test.js\n'
1213
+ );
1214
+
1215
+ // Pass worktreeDir as resolveBase — paths should resolve against it, not repoRoot
1216
+ const files = loadSnapshotFiles(repoRoot, worktreeDir);
1217
+ assert.equal(files.length, 2);
1218
+ assert.equal(files[0], path.join(worktreeDir, 'bin/ratchet.test.js'));
1219
+ assert.equal(files[1], path.join(worktreeDir, 'test/integration.test.js'));
1220
+ // Confirm the paths do NOT point into repoRoot
1221
+ assert.ok(!files[0].startsWith(repoRoot), 'resolveBase should override repoRoot for path resolution');
1222
+ });
1223
+
1224
+ test('resolveBase changes where test files are expected to live', () => {
1225
+ const deepflowDir = path.join(repoRoot, '.deepflow');
1226
+ fs.mkdirSync(deepflowDir, { recursive: true });
1227
+ fs.writeFileSync(
1228
+ path.join(deepflowDir, 'auto-snapshot.txt'),
1229
+ 'spec/my.test.js\n'
1230
+ );
1231
+
1232
+ const filesFromRepo = loadSnapshotFiles(repoRoot, repoRoot);
1233
+ const filesFromWorktree = loadSnapshotFiles(repoRoot, worktreeDir);
1234
+
1235
+ assert.equal(filesFromRepo[0], path.join(repoRoot, 'spec/my.test.js'));
1236
+ assert.equal(filesFromWorktree[0], path.join(worktreeDir, 'spec/my.test.js'));
1237
+ assert.notEqual(filesFromRepo[0], filesFromWorktree[0]);
1238
+ });
1239
+ });
1240
+
1241
+ describe('Subprocess integration — --worktree flag routes commands to worktree cwd', () => {
1242
+ let repoDir;
1243
+ let worktreeDir;
1244
+
1245
+ beforeEach(() => {
1246
+ // Set up main repo
1247
+ repoDir = makeTmpDir();
1248
+ execFileSync('git', ['init'], { cwd: repoDir, stdio: 'ignore' });
1249
+ execFileSync('git', ['config', 'user.email', 'test@test.com'], { cwd: repoDir, stdio: 'ignore' });
1250
+ execFileSync('git', ['config', 'user.name', 'Test'], { cwd: repoDir, stdio: 'ignore' });
1251
+ fs.writeFileSync(path.join(repoDir, 'dummy.txt'), 'hello');
1252
+ execFileSync('git', ['add', '.'], { cwd: repoDir, stdio: 'ignore' });
1253
+ execFileSync('git', ['commit', '-m', 'init'], { cwd: repoDir, stdio: 'ignore' });
1254
+
1255
+ // Set up worktree directory as a separate git repo (simulating a worktree checkout)
1256
+ worktreeDir = makeTmpDir();
1257
+ execFileSync('git', ['init'], { cwd: worktreeDir, stdio: 'ignore' });
1258
+ execFileSync('git', ['config', 'user.email', 'test@test.com'], { cwd: worktreeDir, stdio: 'ignore' });
1259
+ execFileSync('git', ['config', 'user.name', 'Test'], { cwd: worktreeDir, stdio: 'ignore' });
1260
+ fs.writeFileSync(path.join(worktreeDir, 'dummy.txt'), 'hello');
1261
+ execFileSync('git', ['add', '.'], { cwd: worktreeDir, stdio: 'ignore' });
1262
+ execFileSync('git', ['commit', '-m', 'init'], { cwd: worktreeDir, stdio: 'ignore' });
1263
+ });
1264
+
1265
+ afterEach(() => {
1266
+ rmrf(repoDir);
1267
+ rmrf(worktreeDir);
1268
+ });
1269
+
1270
+ test('--worktree flag causes test command to execute in worktree path', () => {
1271
+ // Write a test file in the WORKTREE dir that prints its cwd via process.cwd()
1272
+ const testFile = path.join(worktreeDir, 'cwd-check.test.js');
1273
+ fs.writeFileSync(testFile, [
1274
+ "'use strict';",
1275
+ "const { test } = require('node:test');",
1276
+ "const assert = require('node:assert/strict');",
1277
+ "const path = require('node:path');",
1278
+ "test('cwd is worktree path', () => {",
1279
+ " // This file lives in the worktree dir — if cwd is correct, __dirname matches cwd prefix",
1280
+ " assert.ok(process.cwd().startsWith(path.dirname(__dirname) || '/'), 'cwd should be set');",
1281
+ "});",
1282
+ ].join('\n'));
1283
+
1284
+ // Write snapshot pointing to the test file (relative path from worktreeDir)
1285
+ const deepflowDir = path.join(worktreeDir, '.deepflow');
1286
+ fs.mkdirSync(deepflowDir, { recursive: true });
1287
+ fs.writeFileSync(
1288
+ path.join(deepflowDir, 'auto-snapshot.txt'),
1289
+ 'cwd-check.test.js\n'
1290
+ );
1291
+
1292
+ // Write package.json in worktreeDir so it's detected as node project
1293
+ fs.writeFileSync(path.join(worktreeDir, 'package.json'), JSON.stringify({ name: 'test-worktree' }));
1294
+
1295
+ const result = spawnSync(process.execPath, [RATCHET_PATH, '--worktree', worktreeDir], {
1296
+ cwd: repoDir, // invoked from a different cwd (repoDir)
1297
+ encoding: 'utf8',
1298
+ stdio: ['ignore', 'pipe', 'pipe'],
1299
+ });
1300
+
1301
+ const output = (result.stdout || '').trim();
1302
+ assert.ok(output.length > 0, 'ratchet should produce output');
1303
+ const parsed = JSON.parse(output);
1304
+ // The test should pass because the test file exists in worktreeDir and runs correctly
1305
+ assert.equal(parsed.result, 'PASS', `Expected PASS but got: ${JSON.stringify(parsed)}`);
1306
+ });
1307
+
1308
+ test('--worktree flag: snapshot paths resolve against worktreeDir, not process.cwd()', () => {
1309
+ // Place test file only in worktreeDir (NOT in repoDir)
1310
+ const testFile = path.join(worktreeDir, 'only-in-worktree.test.js');
1311
+ fs.writeFileSync(testFile, [
1312
+ "'use strict';",
1313
+ "const { test } = require('node:test');",
1314
+ "const assert = require('node:assert/strict');",
1315
+ "test('exists only in worktree', () => { assert.ok(true); });",
1316
+ ].join('\n'));
1317
+
1318
+ // Snapshot is in worktreeDir's .deepflow
1319
+ const deepflowDir = path.join(worktreeDir, '.deepflow');
1320
+ fs.mkdirSync(deepflowDir, { recursive: true });
1321
+ fs.writeFileSync(
1322
+ path.join(deepflowDir, 'auto-snapshot.txt'),
1323
+ 'only-in-worktree.test.js\n'
1324
+ );
1325
+ fs.writeFileSync(path.join(worktreeDir, 'package.json'), JSON.stringify({ name: 'wt' }));
1326
+
1327
+ // Verify the test file does NOT exist in repoDir (to confirm routing works)
1328
+ assert.ok(
1329
+ !fs.existsSync(path.join(repoDir, 'only-in-worktree.test.js')),
1330
+ 'Test file should not exist in repoDir'
1331
+ );
1332
+
1333
+ const result = spawnSync(process.execPath, [RATCHET_PATH, '--worktree', worktreeDir], {
1334
+ cwd: repoDir,
1335
+ encoding: 'utf8',
1336
+ stdio: ['ignore', 'pipe', 'pipe'],
1337
+ });
1338
+
1339
+ const output = (result.stdout || '').trim();
1340
+ assert.ok(output.length > 0, 'ratchet should produce output');
1341
+ const parsed = JSON.parse(output);
1342
+ // PASS means the test file was found in worktreeDir — cwd routing works
1343
+ assert.equal(parsed.result, 'PASS', `Expected PASS but got: ${JSON.stringify(parsed)}`);
1344
+ });
1345
+ });