claude-remote-cli 3.9.4 → 3.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/frontend/assets/index-BTOnhJQN.css +32 -0
- package/dist/frontend/assets/index-Dgf6cKGu.js +52 -0
- package/dist/frontend/index.html +2 -2
- package/dist/server/branch-linker.js +136 -0
- package/dist/server/config.js +31 -1
- package/dist/server/index.js +260 -6
- package/dist/server/integration-github.js +117 -0
- package/dist/server/integration-jira.js +177 -0
- package/dist/server/integration-linear.js +176 -0
- package/dist/server/org-dashboard.js +222 -0
- package/dist/server/review-poller.js +241 -0
- package/dist/server/sessions.js +43 -3
- package/dist/server/ticket-transitions.js +265 -0
- package/dist/server/watcher.js +124 -0
- package/dist/test/branch-linker.test.js +231 -0
- package/dist/test/branch-watcher.test.js +73 -0
- package/dist/test/config.test.js +56 -0
- package/dist/test/integration-github.test.js +203 -0
- package/dist/test/integration-jira.test.js +302 -0
- package/dist/test/integration-linear.test.js +293 -0
- package/dist/test/org-dashboard.test.js +240 -0
- package/dist/test/review-poller.test.js +344 -0
- package/dist/test/ticket-transitions.test.js +470 -0
- package/package.json +1 -1
- package/dist/frontend/assets/index-BYv7-2w9.css +0 -32
- package/dist/frontend/assets/index-CO9tRKXI.js +0 -52
package/dist/server/watcher.js
CHANGED
|
@@ -1,6 +1,9 @@
|
|
|
1
1
|
import fs from 'node:fs';
|
|
2
2
|
import path from 'node:path';
|
|
3
|
+
import { execFile } from 'node:child_process';
|
|
4
|
+
import { promisify } from 'node:util';
|
|
3
5
|
import { EventEmitter } from 'node:events';
|
|
6
|
+
const execFileAsync = promisify(execFile);
|
|
4
7
|
export const WORKTREE_DIRS = ['.worktrees', '.claude/worktrees'];
|
|
5
8
|
export function isValidWorktreePath(worktreePath) {
|
|
6
9
|
const resolved = path.resolve(worktreePath);
|
|
@@ -137,3 +140,124 @@ export class WorktreeWatcher extends EventEmitter {
|
|
|
137
140
|
this._closeAll();
|
|
138
141
|
}
|
|
139
142
|
}
|
|
143
|
+
export class BranchWatcher {
|
|
144
|
+
_watchers = [];
|
|
145
|
+
_debounceTimers = new Map();
|
|
146
|
+
_lastBranch = new Map();
|
|
147
|
+
_callback;
|
|
148
|
+
constructor(callback) {
|
|
149
|
+
this._callback = callback;
|
|
150
|
+
}
|
|
151
|
+
rebuild(rootDirs) {
|
|
152
|
+
this._closeAll();
|
|
153
|
+
for (const rootDir of rootDirs) {
|
|
154
|
+
let entries;
|
|
155
|
+
try {
|
|
156
|
+
entries = fs.readdirSync(rootDir, { withFileTypes: true });
|
|
157
|
+
}
|
|
158
|
+
catch (_) {
|
|
159
|
+
continue;
|
|
160
|
+
}
|
|
161
|
+
for (const entry of entries) {
|
|
162
|
+
if (!entry.isDirectory() || entry.name.startsWith('.'))
|
|
163
|
+
continue;
|
|
164
|
+
const repoPath = path.join(rootDir, entry.name);
|
|
165
|
+
if (!fs.existsSync(path.join(repoPath, '.git')))
|
|
166
|
+
continue;
|
|
167
|
+
this._watchRepoHeads(repoPath);
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
_watchRepoHeads(repoPath) {
|
|
172
|
+
// Watch main repo HEAD
|
|
173
|
+
const mainHead = path.join(repoPath, '.git', 'HEAD');
|
|
174
|
+
this._watchHeadFile(mainHead, repoPath);
|
|
175
|
+
// Watch worktree HEADs: <repoPath>/.git/worktrees/*/HEAD
|
|
176
|
+
const worktreesGitDir = path.join(repoPath, '.git', 'worktrees');
|
|
177
|
+
let wtEntries;
|
|
178
|
+
try {
|
|
179
|
+
wtEntries = fs.readdirSync(worktreesGitDir, { withFileTypes: true });
|
|
180
|
+
}
|
|
181
|
+
catch (_) {
|
|
182
|
+
return; // No worktrees
|
|
183
|
+
}
|
|
184
|
+
for (const entry of wtEntries) {
|
|
185
|
+
if (!entry.isDirectory())
|
|
186
|
+
continue;
|
|
187
|
+
const wtGitDir = path.join(worktreesGitDir, entry.name);
|
|
188
|
+
const headFile = path.join(wtGitDir, 'HEAD');
|
|
189
|
+
if (!fs.existsSync(headFile))
|
|
190
|
+
continue;
|
|
191
|
+
// Map worktree git dir back to checkout path via gitdir file
|
|
192
|
+
const gitdirFile = path.join(wtGitDir, 'gitdir');
|
|
193
|
+
let checkoutPath;
|
|
194
|
+
try {
|
|
195
|
+
const gitdirContent = fs.readFileSync(gitdirFile, 'utf-8').trim();
|
|
196
|
+
// gitdir contains <checkoutPath>/.git — strip the /.git suffix
|
|
197
|
+
checkoutPath = gitdirContent.replace(/\/\.git\/?$/, '');
|
|
198
|
+
}
|
|
199
|
+
catch (_) {
|
|
200
|
+
continue;
|
|
201
|
+
}
|
|
202
|
+
this._watchHeadFile(headFile, checkoutPath);
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
_watchHeadFile(headPath, cwdPath) {
|
|
206
|
+
// Seed initial branch to avoid false-positive on first change detection
|
|
207
|
+
try {
|
|
208
|
+
const content = fs.readFileSync(headPath, 'utf-8').trim();
|
|
209
|
+
const match = content.match(/^ref: refs\/heads\/(.+)$/);
|
|
210
|
+
if (match)
|
|
211
|
+
this._lastBranch.set(cwdPath, match[1]);
|
|
212
|
+
}
|
|
213
|
+
catch (_) { }
|
|
214
|
+
try {
|
|
215
|
+
const watcher = fs.watch(headPath, { persistent: false }, () => {
|
|
216
|
+
this._debouncedCheck(cwdPath);
|
|
217
|
+
});
|
|
218
|
+
watcher.on('error', () => { });
|
|
219
|
+
this._watchers.push(watcher);
|
|
220
|
+
}
|
|
221
|
+
catch (_) { }
|
|
222
|
+
}
|
|
223
|
+
_debouncedCheck(cwdPath) {
|
|
224
|
+
const existing = this._debounceTimers.get(cwdPath);
|
|
225
|
+
if (existing)
|
|
226
|
+
clearTimeout(existing);
|
|
227
|
+
this._debounceTimers.set(cwdPath, setTimeout(() => {
|
|
228
|
+
this._debounceTimers.delete(cwdPath);
|
|
229
|
+
this._readAndEmit(cwdPath);
|
|
230
|
+
}, 300));
|
|
231
|
+
}
|
|
232
|
+
async _readAndEmit(cwdPath) {
|
|
233
|
+
try {
|
|
234
|
+
const { stdout } = await execFileAsync('git', ['rev-parse', '--abbrev-ref', 'HEAD'], { cwd: cwdPath });
|
|
235
|
+
const newBranch = stdout.trim();
|
|
236
|
+
const lastBranch = this._lastBranch.get(cwdPath);
|
|
237
|
+
if (newBranch && newBranch !== lastBranch) {
|
|
238
|
+
this._lastBranch.set(cwdPath, newBranch);
|
|
239
|
+
this._callback(cwdPath, newBranch);
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
catch (_) {
|
|
243
|
+
// Non-fatal — repo may be in detached HEAD or mid-rebase
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
_closeAll() {
|
|
247
|
+
for (const w of this._watchers) {
|
|
248
|
+
try {
|
|
249
|
+
w.close();
|
|
250
|
+
}
|
|
251
|
+
catch (_) { }
|
|
252
|
+
}
|
|
253
|
+
this._watchers = [];
|
|
254
|
+
for (const timer of this._debounceTimers.values()) {
|
|
255
|
+
clearTimeout(timer);
|
|
256
|
+
}
|
|
257
|
+
this._debounceTimers.clear();
|
|
258
|
+
this._lastBranch.clear();
|
|
259
|
+
}
|
|
260
|
+
close() {
|
|
261
|
+
this._closeAll();
|
|
262
|
+
}
|
|
263
|
+
}
|
|
@@ -0,0 +1,231 @@
|
|
|
1
|
+
import { test, before, after } from 'node:test';
|
|
2
|
+
import assert from 'node:assert/strict';
|
|
3
|
+
import fs from 'node:fs';
|
|
4
|
+
import path from 'node:path';
|
|
5
|
+
import os from 'node:os';
|
|
6
|
+
import express from 'express';
|
|
7
|
+
import { createBranchLinkerRouter, invalidateBranchLinkerCache, } from '../server/branch-linker.js';
|
|
8
|
+
import { saveConfig, DEFAULTS } from '../server/config.js';
|
|
9
|
+
let tmpDir;
|
|
10
|
+
let configPath;
|
|
11
|
+
let server;
|
|
12
|
+
let baseUrl;
|
|
13
|
+
const WORKSPACE_PATH_A = '/fake/workspace/repo-a';
|
|
14
|
+
const WORKSPACE_PATH_B = '/fake/workspace/repo-b';
|
|
15
|
+
/**
|
|
16
|
+
* Creates a mock execAsync that returns configured branch lists per cwd.
|
|
17
|
+
* - `git branch -a` → returns newline-joined branch names or throws
|
|
18
|
+
*/
|
|
19
|
+
function makeMockExec(opts) {
|
|
20
|
+
return async (cmd, args, options) => {
|
|
21
|
+
const command = cmd;
|
|
22
|
+
const argv = args;
|
|
23
|
+
const cwd = options.cwd ?? '';
|
|
24
|
+
if (command === 'git' && argv[0] === 'branch') {
|
|
25
|
+
if (opts.errorByPath?.[cwd])
|
|
26
|
+
throw opts.errorByPath[cwd];
|
|
27
|
+
const branches = opts.branchesByPath?.[cwd] ?? [];
|
|
28
|
+
return { stdout: branches.join('\n') + '\n', stderr: '' };
|
|
29
|
+
}
|
|
30
|
+
throw new Error(`Unexpected exec call: ${command} ${argv.join(' ')}`);
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
function startServer(execAsyncFn, getActiveBranchNames) {
|
|
34
|
+
return new Promise((resolve) => {
|
|
35
|
+
const app = express();
|
|
36
|
+
app.use(express.json());
|
|
37
|
+
const deps = {
|
|
38
|
+
configPath,
|
|
39
|
+
execAsync: execAsyncFn,
|
|
40
|
+
...(getActiveBranchNames ? { getActiveBranchNames } : {}),
|
|
41
|
+
};
|
|
42
|
+
app.use('/branch-linker', createBranchLinkerRouter(deps));
|
|
43
|
+
server = app.listen(0, '127.0.0.1', () => {
|
|
44
|
+
const addr = server.address();
|
|
45
|
+
if (typeof addr === 'object' && addr) {
|
|
46
|
+
baseUrl = `http://127.0.0.1:${addr.port}`;
|
|
47
|
+
}
|
|
48
|
+
resolve();
|
|
49
|
+
});
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
function stopServer() {
|
|
53
|
+
return new Promise((resolve) => {
|
|
54
|
+
if (server)
|
|
55
|
+
server.close(() => resolve());
|
|
56
|
+
else
|
|
57
|
+
resolve();
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
async function getLinks() {
|
|
61
|
+
const res = await fetch(`${baseUrl}/branch-linker/links`);
|
|
62
|
+
return res.json();
|
|
63
|
+
}
|
|
64
|
+
before(() => {
|
|
65
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'branch-linker-test-'));
|
|
66
|
+
configPath = path.join(tmpDir, 'config.json');
|
|
67
|
+
// Clear any module-level cache before test suite runs
|
|
68
|
+
invalidateBranchLinkerCache();
|
|
69
|
+
});
|
|
70
|
+
after(async () => {
|
|
71
|
+
await stopServer();
|
|
72
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
73
|
+
});
|
|
74
|
+
test('extracts Jira ticket IDs from branch names', async () => {
|
|
75
|
+
await stopServer();
|
|
76
|
+
invalidateBranchLinkerCache();
|
|
77
|
+
saveConfig(configPath, {
|
|
78
|
+
...DEFAULTS,
|
|
79
|
+
workspaces: [WORKSPACE_PATH_A],
|
|
80
|
+
});
|
|
81
|
+
const exec = makeMockExec({
|
|
82
|
+
branchesByPath: {
|
|
83
|
+
[WORKSPACE_PATH_A]: ['dy/fix/ACME-123-auth', 'main'],
|
|
84
|
+
},
|
|
85
|
+
});
|
|
86
|
+
await startServer(exec);
|
|
87
|
+
const data = await getLinks();
|
|
88
|
+
assert.ok('ACME-123' in data, 'Should extract ACME-123 from branch name');
|
|
89
|
+
const links = data['ACME-123'];
|
|
90
|
+
assert.equal(links.length, 1);
|
|
91
|
+
assert.equal(links[0].branchName, 'dy/fix/ACME-123-auth');
|
|
92
|
+
assert.equal(links[0].repoPath, WORKSPACE_PATH_A);
|
|
93
|
+
assert.equal(links[0].repoName, 'repo-a');
|
|
94
|
+
});
|
|
95
|
+
test('extracts GH issue IDs from gh-N branches', async () => {
|
|
96
|
+
await stopServer();
|
|
97
|
+
invalidateBranchLinkerCache();
|
|
98
|
+
saveConfig(configPath, {
|
|
99
|
+
...DEFAULTS,
|
|
100
|
+
workspaces: [WORKSPACE_PATH_A],
|
|
101
|
+
});
|
|
102
|
+
const exec = makeMockExec({
|
|
103
|
+
branchesByPath: {
|
|
104
|
+
// Use a branch that only the GH regex matches (no embedded uppercase-only letters)
|
|
105
|
+
// to get a single clean GH-42 link. The Jira regex (/[A-Z]{2,}-\d+/gi) would also
|
|
106
|
+
// match 'gh-42' since the flag is case-insensitive, so we use a prefix that isolates
|
|
107
|
+
// the GH regex match by starting with 'gh-' at the very start of the branch name.
|
|
108
|
+
[WORKSPACE_PATH_A]: ['gh-42-login-fix'],
|
|
109
|
+
},
|
|
110
|
+
});
|
|
111
|
+
await startServer(exec);
|
|
112
|
+
const data = await getLinks();
|
|
113
|
+
assert.ok('GH-42' in data, 'Should extract GH-42 from branch name');
|
|
114
|
+
const links = data['GH-42'];
|
|
115
|
+
// Only the GH regex matches this branch — the Jira regex explicitly excludes 'GH'
|
|
116
|
+
// to avoid double-matching. Verify all links point to the correct branch and repo.
|
|
117
|
+
assert.ok(links.length >= 1, 'Should have at least one GH-42 link');
|
|
118
|
+
assert.ok(links.every((l) => l.branchName === 'gh-42-login-fix'), 'All links should reference the correct branch');
|
|
119
|
+
assert.ok(links.every((l) => l.repoPath === WORKSPACE_PATH_A), 'All links should reference the correct repo');
|
|
120
|
+
});
|
|
121
|
+
test('same ticket in two repos yields array of 2 BranchLinks', async () => {
|
|
122
|
+
await stopServer();
|
|
123
|
+
invalidateBranchLinkerCache();
|
|
124
|
+
saveConfig(configPath, {
|
|
125
|
+
...DEFAULTS,
|
|
126
|
+
workspaces: [WORKSPACE_PATH_A, WORKSPACE_PATH_B],
|
|
127
|
+
});
|
|
128
|
+
const exec = makeMockExec({
|
|
129
|
+
branchesByPath: {
|
|
130
|
+
[WORKSPACE_PATH_A]: ['feature/PROJ-99-payment'],
|
|
131
|
+
[WORKSPACE_PATH_B]: ['bugfix/PROJ-99-payment-fix'],
|
|
132
|
+
},
|
|
133
|
+
});
|
|
134
|
+
await startServer(exec);
|
|
135
|
+
const data = await getLinks();
|
|
136
|
+
assert.ok('PROJ-99' in data, 'Should have PROJ-99 key');
|
|
137
|
+
const links = data['PROJ-99'];
|
|
138
|
+
assert.equal(links.length, 2, 'Should have 2 BranchLinks for the same ticket across 2 repos');
|
|
139
|
+
const repoPaths = links.map((l) => l.repoPath).sort();
|
|
140
|
+
assert.deepEqual(repoPaths, [WORKSPACE_PATH_A, WORKSPACE_PATH_B].sort());
|
|
141
|
+
});
|
|
142
|
+
test('ignores branches without ticket IDs', async () => {
|
|
143
|
+
await stopServer();
|
|
144
|
+
invalidateBranchLinkerCache();
|
|
145
|
+
saveConfig(configPath, {
|
|
146
|
+
...DEFAULTS,
|
|
147
|
+
workspaces: [WORKSPACE_PATH_A],
|
|
148
|
+
});
|
|
149
|
+
const exec = makeMockExec({
|
|
150
|
+
branchesByPath: {
|
|
151
|
+
[WORKSPACE_PATH_A]: ['main', 'develop', 'chore/cleanup', 'feature/new-ui'],
|
|
152
|
+
},
|
|
153
|
+
});
|
|
154
|
+
await startServer(exec);
|
|
155
|
+
const data = await getLinks();
|
|
156
|
+
assert.equal(Object.keys(data).length, 0, 'Plain branches should produce no ticket links');
|
|
157
|
+
});
|
|
158
|
+
test('hasActiveSession true when branch is in active set', async () => {
|
|
159
|
+
await stopServer();
|
|
160
|
+
invalidateBranchLinkerCache();
|
|
161
|
+
saveConfig(configPath, {
|
|
162
|
+
...DEFAULTS,
|
|
163
|
+
workspaces: [WORKSPACE_PATH_A],
|
|
164
|
+
});
|
|
165
|
+
const activeBranch = 'feature/ACTIVE-1-work';
|
|
166
|
+
const exec = makeMockExec({
|
|
167
|
+
branchesByPath: {
|
|
168
|
+
[WORKSPACE_PATH_A]: [activeBranch, 'feature/INACTIVE-2-other'],
|
|
169
|
+
},
|
|
170
|
+
});
|
|
171
|
+
const getActiveBranchNames = () => {
|
|
172
|
+
return new Map([[WORKSPACE_PATH_A, new Set([activeBranch])]]);
|
|
173
|
+
};
|
|
174
|
+
await startServer(exec, getActiveBranchNames);
|
|
175
|
+
const data = await getLinks();
|
|
176
|
+
const activeLinks = data['ACTIVE-1'];
|
|
177
|
+
assert.ok(activeLinks, 'Should have ACTIVE-1 ticket');
|
|
178
|
+
assert.equal(activeLinks.length, 1);
|
|
179
|
+
assert.equal(activeLinks[0].hasActiveSession, true, 'Active branch should have hasActiveSession true');
|
|
180
|
+
const inactiveLinks = data['INACTIVE-2'];
|
|
181
|
+
assert.ok(inactiveLinks, 'Should have INACTIVE-2 ticket');
|
|
182
|
+
assert.equal(inactiveLinks[0].hasActiveSession, false, 'Inactive branch should have hasActiveSession false');
|
|
183
|
+
});
|
|
184
|
+
test('invalidateBranchLinkerCache forces fresh scan', async () => {
|
|
185
|
+
await stopServer();
|
|
186
|
+
invalidateBranchLinkerCache();
|
|
187
|
+
saveConfig(configPath, {
|
|
188
|
+
...DEFAULTS,
|
|
189
|
+
workspaces: [WORKSPACE_PATH_A],
|
|
190
|
+
});
|
|
191
|
+
let gitCallCount = 0;
|
|
192
|
+
const baseExec = makeMockExec({
|
|
193
|
+
branchesByPath: {
|
|
194
|
+
[WORKSPACE_PATH_A]: ['feature/SCAN-1-fresh'],
|
|
195
|
+
},
|
|
196
|
+
});
|
|
197
|
+
const countingExec = async (...args) => {
|
|
198
|
+
const [cmd] = args;
|
|
199
|
+
if (cmd === 'git')
|
|
200
|
+
gitCallCount++;
|
|
201
|
+
return baseExec(...args);
|
|
202
|
+
};
|
|
203
|
+
await startServer(countingExec);
|
|
204
|
+
// First request — populates module-level cache
|
|
205
|
+
const first = await getLinks();
|
|
206
|
+
assert.ok('SCAN-1' in first, 'Should have SCAN-1 after first request');
|
|
207
|
+
assert.equal(gitCallCount, 1, 'git should be called once on first request');
|
|
208
|
+
// Second request — served from cache
|
|
209
|
+
const second = await getLinks();
|
|
210
|
+
assert.ok('SCAN-1' in second);
|
|
211
|
+
assert.equal(gitCallCount, 1, 'git should not be called again within TTL');
|
|
212
|
+
// Invalidate cache
|
|
213
|
+
invalidateBranchLinkerCache();
|
|
214
|
+
// Third request — cache is cleared, should fetch fresh
|
|
215
|
+
const third = await getLinks();
|
|
216
|
+
assert.ok('SCAN-1' in third);
|
|
217
|
+
assert.equal(gitCallCount, 2, 'git should be called again after cache invalidation');
|
|
218
|
+
});
|
|
219
|
+
test('returns empty object when no workspaces', async () => {
|
|
220
|
+
await stopServer();
|
|
221
|
+
invalidateBranchLinkerCache();
|
|
222
|
+
saveConfig(configPath, {
|
|
223
|
+
...DEFAULTS,
|
|
224
|
+
workspaces: [],
|
|
225
|
+
});
|
|
226
|
+
// execAsync should never be called here
|
|
227
|
+
const exec = makeMockExec({});
|
|
228
|
+
await startServer(exec);
|
|
229
|
+
const data = await getLinks();
|
|
230
|
+
assert.equal(Object.keys(data).length, 0, 'Should return empty object when no workspaces configured');
|
|
231
|
+
});
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import { describe, it, afterEach } from 'node:test';
|
|
2
|
+
import assert from 'node:assert/strict';
|
|
3
|
+
import fs from 'node:fs';
|
|
4
|
+
import path from 'node:path';
|
|
5
|
+
import os from 'node:os';
|
|
6
|
+
import { execFileSync } from 'node:child_process';
|
|
7
|
+
import { BranchWatcher } from '../server/watcher.js';
|
|
8
|
+
function makeTempGitRepo() {
|
|
9
|
+
// Resolve symlinks (macOS /var → /private/var) so paths match git output
|
|
10
|
+
const dir = fs.realpathSync(fs.mkdtempSync(path.join(os.tmpdir(), 'branch-watcher-test-')));
|
|
11
|
+
execFileSync('git', ['init', '-b', 'main'], { cwd: dir });
|
|
12
|
+
execFileSync('git', ['-c', 'user.name=Test', '-c', 'user.email=test@test.com', 'commit', '--allow-empty', '-m', 'init'], { cwd: dir });
|
|
13
|
+
return dir;
|
|
14
|
+
}
|
|
15
|
+
describe('BranchWatcher', () => {
|
|
16
|
+
const cleanups = [];
|
|
17
|
+
afterEach(() => {
|
|
18
|
+
for (const fn of cleanups) {
|
|
19
|
+
try {
|
|
20
|
+
fn();
|
|
21
|
+
}
|
|
22
|
+
catch { /* ignore */ }
|
|
23
|
+
}
|
|
24
|
+
cleanups.length = 0;
|
|
25
|
+
});
|
|
26
|
+
it('detects branch change via HEAD file write', async () => {
|
|
27
|
+
const repoDir = makeTempGitRepo();
|
|
28
|
+
const parentDir = path.dirname(repoDir);
|
|
29
|
+
cleanups.push(() => fs.rmSync(repoDir, { recursive: true, force: true }));
|
|
30
|
+
const events = [];
|
|
31
|
+
const watcher = new BranchWatcher((cwdPath, newBranch) => {
|
|
32
|
+
events.push({ cwdPath, newBranch });
|
|
33
|
+
});
|
|
34
|
+
cleanups.push(() => watcher.close());
|
|
35
|
+
watcher.rebuild([parentDir]);
|
|
36
|
+
// Let fs.watch initialize
|
|
37
|
+
await new Promise(resolve => setTimeout(resolve, 200));
|
|
38
|
+
// Create the branch first, then simulate checkout by writing HEAD directly
|
|
39
|
+
// (more deterministic than git checkout which uses lock+rename)
|
|
40
|
+
execFileSync('git', ['branch', 'feature-test'], { cwd: repoDir });
|
|
41
|
+
const headPath = path.join(repoDir, '.git', 'HEAD');
|
|
42
|
+
fs.writeFileSync(headPath, 'ref: refs/heads/feature-test\n');
|
|
43
|
+
// Wait for debounce (300ms) + processing
|
|
44
|
+
await new Promise(resolve => setTimeout(resolve, 800));
|
|
45
|
+
assert.ok(events.length > 0, 'Expected at least one branch change event');
|
|
46
|
+
const lastEvent = events[events.length - 1];
|
|
47
|
+
assert.equal(lastEvent.cwdPath, repoDir);
|
|
48
|
+
assert.equal(lastEvent.newBranch, 'feature-test');
|
|
49
|
+
});
|
|
50
|
+
it('does not fire callback if branch did not change', async () => {
|
|
51
|
+
const repoDir = makeTempGitRepo();
|
|
52
|
+
const parentDir = path.dirname(repoDir);
|
|
53
|
+
cleanups.push(() => fs.rmSync(repoDir, { recursive: true, force: true }));
|
|
54
|
+
const events = [];
|
|
55
|
+
const watcher = new BranchWatcher((cwdPath, newBranch) => {
|
|
56
|
+
events.push({ cwdPath, newBranch });
|
|
57
|
+
});
|
|
58
|
+
cleanups.push(() => watcher.close());
|
|
59
|
+
watcher.rebuild([parentDir]);
|
|
60
|
+
await new Promise(resolve => setTimeout(resolve, 200));
|
|
61
|
+
// Touch the HEAD file without changing the branch content
|
|
62
|
+
const headPath = path.join(repoDir, '.git', 'HEAD');
|
|
63
|
+
const content = fs.readFileSync(headPath, 'utf-8');
|
|
64
|
+
fs.writeFileSync(headPath, content);
|
|
65
|
+
await new Promise(resolve => setTimeout(resolve, 800));
|
|
66
|
+
assert.equal(events.length, 0, 'Should not fire callback when branch is unchanged');
|
|
67
|
+
});
|
|
68
|
+
it('closes cleanly', () => {
|
|
69
|
+
const watcher = new BranchWatcher(() => { });
|
|
70
|
+
watcher.close();
|
|
71
|
+
// No error means success
|
|
72
|
+
});
|
|
73
|
+
});
|
package/dist/test/config.test.js
CHANGED
|
@@ -223,3 +223,59 @@ test('deleteWorkspaceSettingKeys is no-op for nonexistent workspace', () => {
|
|
|
223
223
|
fs.writeFileSync(configPath, JSON.stringify(config), 'utf8');
|
|
224
224
|
assert.doesNotThrow(() => deleteWorkspaceSettingKeys(configPath, config, '/no/such/repo', ['defaultYolo']));
|
|
225
225
|
});
|
|
226
|
+
test('workspaceGroups with valid paths loads cleanly', () => {
|
|
227
|
+
const configPath = path.join(tmpDir, 'config.json');
|
|
228
|
+
fs.writeFileSync(configPath, JSON.stringify({
|
|
229
|
+
workspaces: ['/a/repo', '/b/repo'],
|
|
230
|
+
workspaceGroups: {
|
|
231
|
+
'Group A': ['/a/repo'],
|
|
232
|
+
'Group B': ['/b/repo'],
|
|
233
|
+
},
|
|
234
|
+
}), 'utf8');
|
|
235
|
+
const config = loadConfig(configPath);
|
|
236
|
+
assert.deepEqual(config.workspaceGroups['Group A'], ['/a/repo']);
|
|
237
|
+
assert.deepEqual(config.workspaceGroups['Group B'], ['/b/repo']);
|
|
238
|
+
});
|
|
239
|
+
test('workspaceGroups with invalid path filters it out', () => {
|
|
240
|
+
const configPath = path.join(tmpDir, 'config.json');
|
|
241
|
+
fs.writeFileSync(configPath, JSON.stringify({
|
|
242
|
+
workspaces: ['/valid/repo'],
|
|
243
|
+
workspaceGroups: {
|
|
244
|
+
'My Group': ['/valid/repo', '/not/in/workspaces'],
|
|
245
|
+
},
|
|
246
|
+
}), 'utf8');
|
|
247
|
+
const config = loadConfig(configPath);
|
|
248
|
+
assert.deepEqual(config.workspaceGroups['My Group'], ['/valid/repo']);
|
|
249
|
+
});
|
|
250
|
+
test('workspaceGroups with duplicate path keeps first-group winner', () => {
|
|
251
|
+
const configPath = path.join(tmpDir, 'config.json');
|
|
252
|
+
fs.writeFileSync(configPath, JSON.stringify({
|
|
253
|
+
workspaces: ['/shared/repo'],
|
|
254
|
+
workspaceGroups: {
|
|
255
|
+
'First': ['/shared/repo'],
|
|
256
|
+
'Second': ['/shared/repo'],
|
|
257
|
+
},
|
|
258
|
+
}), 'utf8');
|
|
259
|
+
const config = loadConfig(configPath);
|
|
260
|
+
assert.deepEqual(config.workspaceGroups['First'], ['/shared/repo']);
|
|
261
|
+
assert.equal(config.workspaceGroups['Second'], undefined);
|
|
262
|
+
});
|
|
263
|
+
test('workspaceGroups undefined produces no errors', () => {
|
|
264
|
+
const configPath = path.join(tmpDir, 'config.json');
|
|
265
|
+
fs.writeFileSync(configPath, JSON.stringify({
|
|
266
|
+
workspaces: ['/some/repo'],
|
|
267
|
+
}), 'utf8');
|
|
268
|
+
const config = loadConfig(configPath);
|
|
269
|
+
assert.equal(config.workspaceGroups, undefined);
|
|
270
|
+
});
|
|
271
|
+
test('workspaceGroups with all-invalid paths removes empty group', () => {
|
|
272
|
+
const configPath = path.join(tmpDir, 'config.json');
|
|
273
|
+
fs.writeFileSync(configPath, JSON.stringify({
|
|
274
|
+
workspaces: ['/valid/repo'],
|
|
275
|
+
workspaceGroups: {
|
|
276
|
+
'Ghost Group': ['/not/here', '/also/not/here'],
|
|
277
|
+
},
|
|
278
|
+
}), 'utf8');
|
|
279
|
+
const config = loadConfig(configPath);
|
|
280
|
+
assert.equal(config.workspaceGroups['Ghost Group'], undefined);
|
|
281
|
+
});
|