claude-remote-cli 3.9.5 → 3.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/frontend/assets/index-B7wmLeyf.js +52 -0
- package/dist/frontend/assets/index-BTOnhJQN.css +32 -0
- package/dist/frontend/index.html +2 -2
- package/dist/server/branch-linker.js +134 -0
- package/dist/server/config.js +31 -1
- package/dist/server/index.js +186 -2
- package/dist/server/integration-github.js +117 -0
- package/dist/server/integration-jira.js +172 -0
- package/dist/server/org-dashboard.js +222 -0
- package/dist/server/review-poller.js +241 -0
- package/dist/server/sessions.js +43 -3
- package/dist/server/ticket-transitions.js +153 -0
- package/dist/test/branch-linker.test.js +231 -0
- package/dist/test/config.test.js +56 -0
- package/dist/test/integration-github.test.js +203 -0
- package/dist/test/integration-jira.test.js +221 -0
- package/dist/test/org-dashboard.test.js +240 -0
- package/dist/test/review-poller.test.js +344 -0
- package/dist/test/ticket-transitions.test.js +265 -0
- package/package.json +1 -1
- package/dist/frontend/assets/index-BYv7-2w9.css +0 -32
- package/dist/frontend/assets/index-CO9tRKXI.js +0 -52
|
@@ -0,0 +1,231 @@
|
|
|
1
|
+
import { test, before, after } from 'node:test';
|
|
2
|
+
import assert from 'node:assert/strict';
|
|
3
|
+
import fs from 'node:fs';
|
|
4
|
+
import path from 'node:path';
|
|
5
|
+
import os from 'node:os';
|
|
6
|
+
import express from 'express';
|
|
7
|
+
import { createBranchLinkerRouter, invalidateBranchLinkerCache, } from '../server/branch-linker.js';
|
|
8
|
+
import { saveConfig, DEFAULTS } from '../server/config.js';
|
|
9
|
+
let tmpDir;
|
|
10
|
+
let configPath;
|
|
11
|
+
let server;
|
|
12
|
+
let baseUrl;
|
|
13
|
+
const WORKSPACE_PATH_A = '/fake/workspace/repo-a';
|
|
14
|
+
const WORKSPACE_PATH_B = '/fake/workspace/repo-b';
|
|
15
|
+
/**
|
|
16
|
+
* Creates a mock execAsync that returns configured branch lists per cwd.
|
|
17
|
+
* - `git branch -a` → returns newline-joined branch names or throws
|
|
18
|
+
*/
|
|
19
|
+
function makeMockExec(opts) {
|
|
20
|
+
return async (cmd, args, options) => {
|
|
21
|
+
const command = cmd;
|
|
22
|
+
const argv = args;
|
|
23
|
+
const cwd = options.cwd ?? '';
|
|
24
|
+
if (command === 'git' && argv[0] === 'branch') {
|
|
25
|
+
if (opts.errorByPath?.[cwd])
|
|
26
|
+
throw opts.errorByPath[cwd];
|
|
27
|
+
const branches = opts.branchesByPath?.[cwd] ?? [];
|
|
28
|
+
return { stdout: branches.join('\n') + '\n', stderr: '' };
|
|
29
|
+
}
|
|
30
|
+
throw new Error(`Unexpected exec call: ${command} ${argv.join(' ')}`);
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
function startServer(execAsyncFn, getActiveBranchNames) {
|
|
34
|
+
return new Promise((resolve) => {
|
|
35
|
+
const app = express();
|
|
36
|
+
app.use(express.json());
|
|
37
|
+
const deps = {
|
|
38
|
+
configPath,
|
|
39
|
+
execAsync: execAsyncFn,
|
|
40
|
+
...(getActiveBranchNames ? { getActiveBranchNames } : {}),
|
|
41
|
+
};
|
|
42
|
+
app.use('/branch-linker', createBranchLinkerRouter(deps));
|
|
43
|
+
server = app.listen(0, '127.0.0.1', () => {
|
|
44
|
+
const addr = server.address();
|
|
45
|
+
if (typeof addr === 'object' && addr) {
|
|
46
|
+
baseUrl = `http://127.0.0.1:${addr.port}`;
|
|
47
|
+
}
|
|
48
|
+
resolve();
|
|
49
|
+
});
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
function stopServer() {
|
|
53
|
+
return new Promise((resolve) => {
|
|
54
|
+
if (server)
|
|
55
|
+
server.close(() => resolve());
|
|
56
|
+
else
|
|
57
|
+
resolve();
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
async function getLinks() {
|
|
61
|
+
const res = await fetch(`${baseUrl}/branch-linker/links`);
|
|
62
|
+
return res.json();
|
|
63
|
+
}
|
|
64
|
+
before(() => {
|
|
65
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'branch-linker-test-'));
|
|
66
|
+
configPath = path.join(tmpDir, 'config.json');
|
|
67
|
+
// Clear any module-level cache before test suite runs
|
|
68
|
+
invalidateBranchLinkerCache();
|
|
69
|
+
});
|
|
70
|
+
after(async () => {
|
|
71
|
+
await stopServer();
|
|
72
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
73
|
+
});
|
|
74
|
+
test('extracts Jira ticket IDs from branch names', async () => {
|
|
75
|
+
await stopServer();
|
|
76
|
+
invalidateBranchLinkerCache();
|
|
77
|
+
saveConfig(configPath, {
|
|
78
|
+
...DEFAULTS,
|
|
79
|
+
workspaces: [WORKSPACE_PATH_A],
|
|
80
|
+
});
|
|
81
|
+
const exec = makeMockExec({
|
|
82
|
+
branchesByPath: {
|
|
83
|
+
[WORKSPACE_PATH_A]: ['dy/fix/ACME-123-auth', 'main'],
|
|
84
|
+
},
|
|
85
|
+
});
|
|
86
|
+
await startServer(exec);
|
|
87
|
+
const data = await getLinks();
|
|
88
|
+
assert.ok('ACME-123' in data, 'Should extract ACME-123 from branch name');
|
|
89
|
+
const links = data['ACME-123'];
|
|
90
|
+
assert.equal(links.length, 1);
|
|
91
|
+
assert.equal(links[0].branchName, 'dy/fix/ACME-123-auth');
|
|
92
|
+
assert.equal(links[0].repoPath, WORKSPACE_PATH_A);
|
|
93
|
+
assert.equal(links[0].repoName, 'repo-a');
|
|
94
|
+
});
|
|
95
|
+
test('extracts GH issue IDs from gh-N branches', async () => {
|
|
96
|
+
await stopServer();
|
|
97
|
+
invalidateBranchLinkerCache();
|
|
98
|
+
saveConfig(configPath, {
|
|
99
|
+
...DEFAULTS,
|
|
100
|
+
workspaces: [WORKSPACE_PATH_A],
|
|
101
|
+
});
|
|
102
|
+
const exec = makeMockExec({
|
|
103
|
+
branchesByPath: {
|
|
104
|
+
// Use a branch that only the GH regex matches (no embedded uppercase-only letters)
|
|
105
|
+
// to get a single clean GH-42 link. The Jira regex (/[A-Z]{2,}-\d+/gi) would also
|
|
106
|
+
// match 'gh-42' since the flag is case-insensitive, so we use a prefix that isolates
|
|
107
|
+
// the GH regex match by starting with 'gh-' at the very start of the branch name.
|
|
108
|
+
[WORKSPACE_PATH_A]: ['gh-42-login-fix'],
|
|
109
|
+
},
|
|
110
|
+
});
|
|
111
|
+
await startServer(exec);
|
|
112
|
+
const data = await getLinks();
|
|
113
|
+
assert.ok('GH-42' in data, 'Should extract GH-42 from branch name');
|
|
114
|
+
const links = data['GH-42'];
|
|
115
|
+
// Only the GH regex matches this branch — the Jira regex explicitly excludes 'GH'
|
|
116
|
+
// to avoid double-matching. Verify all links point to the correct branch and repo.
|
|
117
|
+
assert.ok(links.length >= 1, 'Should have at least one GH-42 link');
|
|
118
|
+
assert.ok(links.every((l) => l.branchName === 'gh-42-login-fix'), 'All links should reference the correct branch');
|
|
119
|
+
assert.ok(links.every((l) => l.repoPath === WORKSPACE_PATH_A), 'All links should reference the correct repo');
|
|
120
|
+
});
|
|
121
|
+
test('same ticket in two repos yields array of 2 BranchLinks', async () => {
|
|
122
|
+
await stopServer();
|
|
123
|
+
invalidateBranchLinkerCache();
|
|
124
|
+
saveConfig(configPath, {
|
|
125
|
+
...DEFAULTS,
|
|
126
|
+
workspaces: [WORKSPACE_PATH_A, WORKSPACE_PATH_B],
|
|
127
|
+
});
|
|
128
|
+
const exec = makeMockExec({
|
|
129
|
+
branchesByPath: {
|
|
130
|
+
[WORKSPACE_PATH_A]: ['feature/PROJ-99-payment'],
|
|
131
|
+
[WORKSPACE_PATH_B]: ['bugfix/PROJ-99-payment-fix'],
|
|
132
|
+
},
|
|
133
|
+
});
|
|
134
|
+
await startServer(exec);
|
|
135
|
+
const data = await getLinks();
|
|
136
|
+
assert.ok('PROJ-99' in data, 'Should have PROJ-99 key');
|
|
137
|
+
const links = data['PROJ-99'];
|
|
138
|
+
assert.equal(links.length, 2, 'Should have 2 BranchLinks for the same ticket across 2 repos');
|
|
139
|
+
const repoPaths = links.map((l) => l.repoPath).sort();
|
|
140
|
+
assert.deepEqual(repoPaths, [WORKSPACE_PATH_A, WORKSPACE_PATH_B].sort());
|
|
141
|
+
});
|
|
142
|
+
test('ignores branches without ticket IDs', async () => {
|
|
143
|
+
await stopServer();
|
|
144
|
+
invalidateBranchLinkerCache();
|
|
145
|
+
saveConfig(configPath, {
|
|
146
|
+
...DEFAULTS,
|
|
147
|
+
workspaces: [WORKSPACE_PATH_A],
|
|
148
|
+
});
|
|
149
|
+
const exec = makeMockExec({
|
|
150
|
+
branchesByPath: {
|
|
151
|
+
[WORKSPACE_PATH_A]: ['main', 'develop', 'chore/cleanup', 'feature/new-ui'],
|
|
152
|
+
},
|
|
153
|
+
});
|
|
154
|
+
await startServer(exec);
|
|
155
|
+
const data = await getLinks();
|
|
156
|
+
assert.equal(Object.keys(data).length, 0, 'Plain branches should produce no ticket links');
|
|
157
|
+
});
|
|
158
|
+
test('hasActiveSession true when branch is in active set', async () => {
|
|
159
|
+
await stopServer();
|
|
160
|
+
invalidateBranchLinkerCache();
|
|
161
|
+
saveConfig(configPath, {
|
|
162
|
+
...DEFAULTS,
|
|
163
|
+
workspaces: [WORKSPACE_PATH_A],
|
|
164
|
+
});
|
|
165
|
+
const activeBranch = 'feature/ACTIVE-1-work';
|
|
166
|
+
const exec = makeMockExec({
|
|
167
|
+
branchesByPath: {
|
|
168
|
+
[WORKSPACE_PATH_A]: [activeBranch, 'feature/INACTIVE-2-other'],
|
|
169
|
+
},
|
|
170
|
+
});
|
|
171
|
+
const getActiveBranchNames = () => {
|
|
172
|
+
return new Map([[WORKSPACE_PATH_A, new Set([activeBranch])]]);
|
|
173
|
+
};
|
|
174
|
+
await startServer(exec, getActiveBranchNames);
|
|
175
|
+
const data = await getLinks();
|
|
176
|
+
const activeLinks = data['ACTIVE-1'];
|
|
177
|
+
assert.ok(activeLinks, 'Should have ACTIVE-1 ticket');
|
|
178
|
+
assert.equal(activeLinks.length, 1);
|
|
179
|
+
assert.equal(activeLinks[0].hasActiveSession, true, 'Active branch should have hasActiveSession true');
|
|
180
|
+
const inactiveLinks = data['INACTIVE-2'];
|
|
181
|
+
assert.ok(inactiveLinks, 'Should have INACTIVE-2 ticket');
|
|
182
|
+
assert.equal(inactiveLinks[0].hasActiveSession, false, 'Inactive branch should have hasActiveSession false');
|
|
183
|
+
});
|
|
184
|
+
test('invalidateBranchLinkerCache forces fresh scan', async () => {
|
|
185
|
+
await stopServer();
|
|
186
|
+
invalidateBranchLinkerCache();
|
|
187
|
+
saveConfig(configPath, {
|
|
188
|
+
...DEFAULTS,
|
|
189
|
+
workspaces: [WORKSPACE_PATH_A],
|
|
190
|
+
});
|
|
191
|
+
let gitCallCount = 0;
|
|
192
|
+
const baseExec = makeMockExec({
|
|
193
|
+
branchesByPath: {
|
|
194
|
+
[WORKSPACE_PATH_A]: ['feature/SCAN-1-fresh'],
|
|
195
|
+
},
|
|
196
|
+
});
|
|
197
|
+
const countingExec = async (...args) => {
|
|
198
|
+
const [cmd] = args;
|
|
199
|
+
if (cmd === 'git')
|
|
200
|
+
gitCallCount++;
|
|
201
|
+
return baseExec(...args);
|
|
202
|
+
};
|
|
203
|
+
await startServer(countingExec);
|
|
204
|
+
// First request — populates module-level cache
|
|
205
|
+
const first = await getLinks();
|
|
206
|
+
assert.ok('SCAN-1' in first, 'Should have SCAN-1 after first request');
|
|
207
|
+
assert.equal(gitCallCount, 1, 'git should be called once on first request');
|
|
208
|
+
// Second request — served from cache
|
|
209
|
+
const second = await getLinks();
|
|
210
|
+
assert.ok('SCAN-1' in second);
|
|
211
|
+
assert.equal(gitCallCount, 1, 'git should not be called again within TTL');
|
|
212
|
+
// Invalidate cache
|
|
213
|
+
invalidateBranchLinkerCache();
|
|
214
|
+
// Third request — cache is cleared, should fetch fresh
|
|
215
|
+
const third = await getLinks();
|
|
216
|
+
assert.ok('SCAN-1' in third);
|
|
217
|
+
assert.equal(gitCallCount, 2, 'git should be called again after cache invalidation');
|
|
218
|
+
});
|
|
219
|
+
test('returns empty object when no workspaces', async () => {
|
|
220
|
+
await stopServer();
|
|
221
|
+
invalidateBranchLinkerCache();
|
|
222
|
+
saveConfig(configPath, {
|
|
223
|
+
...DEFAULTS,
|
|
224
|
+
workspaces: [],
|
|
225
|
+
});
|
|
226
|
+
// execAsync should never be called here
|
|
227
|
+
const exec = makeMockExec({});
|
|
228
|
+
await startServer(exec);
|
|
229
|
+
const data = await getLinks();
|
|
230
|
+
assert.equal(Object.keys(data).length, 0, 'Should return empty object when no workspaces configured');
|
|
231
|
+
});
|
package/dist/test/config.test.js
CHANGED
|
@@ -223,3 +223,59 @@ test('deleteWorkspaceSettingKeys is no-op for nonexistent workspace', () => {
|
|
|
223
223
|
fs.writeFileSync(configPath, JSON.stringify(config), 'utf8');
|
|
224
224
|
assert.doesNotThrow(() => deleteWorkspaceSettingKeys(configPath, config, '/no/such/repo', ['defaultYolo']));
|
|
225
225
|
});
|
|
226
|
+
test('workspaceGroups with valid paths loads cleanly', () => {
|
|
227
|
+
const configPath = path.join(tmpDir, 'config.json');
|
|
228
|
+
fs.writeFileSync(configPath, JSON.stringify({
|
|
229
|
+
workspaces: ['/a/repo', '/b/repo'],
|
|
230
|
+
workspaceGroups: {
|
|
231
|
+
'Group A': ['/a/repo'],
|
|
232
|
+
'Group B': ['/b/repo'],
|
|
233
|
+
},
|
|
234
|
+
}), 'utf8');
|
|
235
|
+
const config = loadConfig(configPath);
|
|
236
|
+
assert.deepEqual(config.workspaceGroups['Group A'], ['/a/repo']);
|
|
237
|
+
assert.deepEqual(config.workspaceGroups['Group B'], ['/b/repo']);
|
|
238
|
+
});
|
|
239
|
+
test('workspaceGroups with invalid path filters it out', () => {
|
|
240
|
+
const configPath = path.join(tmpDir, 'config.json');
|
|
241
|
+
fs.writeFileSync(configPath, JSON.stringify({
|
|
242
|
+
workspaces: ['/valid/repo'],
|
|
243
|
+
workspaceGroups: {
|
|
244
|
+
'My Group': ['/valid/repo', '/not/in/workspaces'],
|
|
245
|
+
},
|
|
246
|
+
}), 'utf8');
|
|
247
|
+
const config = loadConfig(configPath);
|
|
248
|
+
assert.deepEqual(config.workspaceGroups['My Group'], ['/valid/repo']);
|
|
249
|
+
});
|
|
250
|
+
test('workspaceGroups with duplicate path keeps first-group winner', () => {
|
|
251
|
+
const configPath = path.join(tmpDir, 'config.json');
|
|
252
|
+
fs.writeFileSync(configPath, JSON.stringify({
|
|
253
|
+
workspaces: ['/shared/repo'],
|
|
254
|
+
workspaceGroups: {
|
|
255
|
+
'First': ['/shared/repo'],
|
|
256
|
+
'Second': ['/shared/repo'],
|
|
257
|
+
},
|
|
258
|
+
}), 'utf8');
|
|
259
|
+
const config = loadConfig(configPath);
|
|
260
|
+
assert.deepEqual(config.workspaceGroups['First'], ['/shared/repo']);
|
|
261
|
+
assert.equal(config.workspaceGroups['Second'], undefined);
|
|
262
|
+
});
|
|
263
|
+
test('workspaceGroups undefined produces no errors', () => {
|
|
264
|
+
const configPath = path.join(tmpDir, 'config.json');
|
|
265
|
+
fs.writeFileSync(configPath, JSON.stringify({
|
|
266
|
+
workspaces: ['/some/repo'],
|
|
267
|
+
}), 'utf8');
|
|
268
|
+
const config = loadConfig(configPath);
|
|
269
|
+
assert.equal(config.workspaceGroups, undefined);
|
|
270
|
+
});
|
|
271
|
+
test('workspaceGroups with all-invalid paths removes empty group', () => {
|
|
272
|
+
const configPath = path.join(tmpDir, 'config.json');
|
|
273
|
+
fs.writeFileSync(configPath, JSON.stringify({
|
|
274
|
+
workspaces: ['/valid/repo'],
|
|
275
|
+
workspaceGroups: {
|
|
276
|
+
'Ghost Group': ['/not/here', '/also/not/here'],
|
|
277
|
+
},
|
|
278
|
+
}), 'utf8');
|
|
279
|
+
const config = loadConfig(configPath);
|
|
280
|
+
assert.equal(config.workspaceGroups['Ghost Group'], undefined);
|
|
281
|
+
});
|
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
import { test, before, after } from 'node:test';
|
|
2
|
+
import assert from 'node:assert/strict';
|
|
3
|
+
import fs from 'node:fs';
|
|
4
|
+
import path from 'node:path';
|
|
5
|
+
import os from 'node:os';
|
|
6
|
+
import express from 'express';
|
|
7
|
+
import { createIntegrationGitHubRouter } from '../server/integration-github.js';
|
|
8
|
+
import { saveConfig, DEFAULTS } from '../server/config.js';
|
|
9
|
+
let tmpDir;
|
|
10
|
+
let configPath;
|
|
11
|
+
let server;
|
|
12
|
+
let baseUrl;
|
|
13
|
+
const WORKSPACE_PATH_A = '/fake/workspace/repo-a';
|
|
14
|
+
const WORKSPACE_PATH_B = '/fake/workspace/repo-b';
|
|
15
|
+
/**
|
|
16
|
+
* Builds a minimal GhIssueItem for use in mock stdout payloads.
|
|
17
|
+
*/
|
|
18
|
+
function makeIssueItem(overrides) {
|
|
19
|
+
const { number = 1, title = 'Test Issue', url = `https://github.com/fake/repo/issues/${overrides.number ?? 1}`, updatedAt = '2026-03-21T00:00:00Z', createdAt = '2026-03-20T00:00:00Z', } = overrides;
|
|
20
|
+
return {
|
|
21
|
+
number,
|
|
22
|
+
title,
|
|
23
|
+
url,
|
|
24
|
+
state: 'OPEN',
|
|
25
|
+
labels: [],
|
|
26
|
+
assignees: [],
|
|
27
|
+
createdAt,
|
|
28
|
+
updatedAt,
|
|
29
|
+
};
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Creates a mock execAsync that routes calls based on the command and cwd.
|
|
33
|
+
* - `gh issue list` in a given cwd → returns configured issues list or throws
|
|
34
|
+
*/
|
|
35
|
+
function makeMockExec(opts) {
|
|
36
|
+
return async (cmd, args, options) => {
|
|
37
|
+
const command = cmd;
|
|
38
|
+
const argv = args;
|
|
39
|
+
const cwd = options.cwd ?? '';
|
|
40
|
+
if (command === 'gh' && argv[0] === 'issue' && argv[1] === 'list') {
|
|
41
|
+
if (opts.globalError)
|
|
42
|
+
throw opts.globalError;
|
|
43
|
+
if (opts.errorByPath?.[cwd])
|
|
44
|
+
throw opts.errorByPath[cwd];
|
|
45
|
+
const items = opts.issuesByPath?.[cwd] ?? [];
|
|
46
|
+
return { stdout: JSON.stringify(items), stderr: '' };
|
|
47
|
+
}
|
|
48
|
+
throw new Error(`Unexpected exec call: ${command} ${argv.join(' ')}`);
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
function startServer(execAsyncFn) {
|
|
52
|
+
return new Promise((resolve) => {
|
|
53
|
+
const app = express();
|
|
54
|
+
app.use(express.json());
|
|
55
|
+
const deps = { configPath, execAsync: execAsyncFn };
|
|
56
|
+
app.use('/integration-github', createIntegrationGitHubRouter(deps));
|
|
57
|
+
server = app.listen(0, '127.0.0.1', () => {
|
|
58
|
+
const addr = server.address();
|
|
59
|
+
if (typeof addr === 'object' && addr) {
|
|
60
|
+
baseUrl = `http://127.0.0.1:${addr.port}`;
|
|
61
|
+
}
|
|
62
|
+
resolve();
|
|
63
|
+
});
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
function stopServer() {
|
|
67
|
+
return new Promise((resolve) => {
|
|
68
|
+
if (server)
|
|
69
|
+
server.close(() => resolve());
|
|
70
|
+
else
|
|
71
|
+
resolve();
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
async function getIssues() {
|
|
75
|
+
const res = await fetch(`${baseUrl}/integration-github/issues`);
|
|
76
|
+
return res.json();
|
|
77
|
+
}
|
|
78
|
+
before(() => {
|
|
79
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'integration-github-test-'));
|
|
80
|
+
configPath = path.join(tmpDir, 'config.json');
|
|
81
|
+
});
|
|
82
|
+
after(async () => {
|
|
83
|
+
await stopServer();
|
|
84
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
85
|
+
});
|
|
86
|
+
test('returns issues from all workspace repos merged and sorted', async () => {
|
|
87
|
+
await stopServer();
|
|
88
|
+
saveConfig(configPath, {
|
|
89
|
+
...DEFAULTS,
|
|
90
|
+
workspaces: [WORKSPACE_PATH_A, WORKSPACE_PATH_B],
|
|
91
|
+
});
|
|
92
|
+
const exec = makeMockExec({
|
|
93
|
+
issuesByPath: {
|
|
94
|
+
[WORKSPACE_PATH_A]: [
|
|
95
|
+
makeIssueItem({ number: 10, title: 'Issue A', updatedAt: '2026-03-21T10:00:00Z' }),
|
|
96
|
+
makeIssueItem({ number: 11, title: 'Issue A2', updatedAt: '2026-03-19T10:00:00Z' }),
|
|
97
|
+
],
|
|
98
|
+
[WORKSPACE_PATH_B]: [
|
|
99
|
+
makeIssueItem({ number: 20, title: 'Issue B', updatedAt: '2026-03-20T10:00:00Z' }),
|
|
100
|
+
],
|
|
101
|
+
},
|
|
102
|
+
});
|
|
103
|
+
await startServer(exec);
|
|
104
|
+
const data = await getIssues();
|
|
105
|
+
assert.equal(data.error, undefined, `Unexpected error: ${data.error}`);
|
|
106
|
+
assert.equal(data.issues.length, 3, 'Should return all 3 issues from both repos');
|
|
107
|
+
// Verify sorted descending by updatedAt
|
|
108
|
+
const updatedAts = data.issues.map((i) => i.updatedAt);
|
|
109
|
+
assert.deepEqual(updatedAts, [
|
|
110
|
+
'2026-03-21T10:00:00Z',
|
|
111
|
+
'2026-03-20T10:00:00Z',
|
|
112
|
+
'2026-03-19T10:00:00Z',
|
|
113
|
+
]);
|
|
114
|
+
// Verify repoPath and repoName are attached
|
|
115
|
+
const issue10 = data.issues.find((i) => i.number === 10);
|
|
116
|
+
assert.equal(issue10?.repoPath, WORKSPACE_PATH_A);
|
|
117
|
+
assert.equal(issue10?.repoName, 'repo-a');
|
|
118
|
+
const issue20 = data.issues.find((i) => i.number === 20);
|
|
119
|
+
assert.equal(issue20?.repoPath, WORKSPACE_PATH_B);
|
|
120
|
+
assert.equal(issue20?.repoName, 'repo-b');
|
|
121
|
+
});
|
|
122
|
+
test('returns no_workspaces error when empty', async () => {
|
|
123
|
+
await stopServer();
|
|
124
|
+
saveConfig(configPath, {
|
|
125
|
+
...DEFAULTS,
|
|
126
|
+
workspaces: [],
|
|
127
|
+
});
|
|
128
|
+
// execAsync should never be called — pass a mock that always throws to verify early-return
|
|
129
|
+
const exec = makeMockExec({});
|
|
130
|
+
await startServer(exec);
|
|
131
|
+
const data = await getIssues();
|
|
132
|
+
assert.equal(data.error, 'no_workspaces');
|
|
133
|
+
assert.equal(data.issues.length, 0);
|
|
134
|
+
});
|
|
135
|
+
test('returns gh_not_in_path when gh not found', async () => {
|
|
136
|
+
await stopServer();
|
|
137
|
+
saveConfig(configPath, {
|
|
138
|
+
...DEFAULTS,
|
|
139
|
+
workspaces: [WORKSPACE_PATH_A],
|
|
140
|
+
});
|
|
141
|
+
const err = new Error('spawn gh ENOENT');
|
|
142
|
+
err.code = 'ENOENT';
|
|
143
|
+
const exec = makeMockExec({ globalError: err });
|
|
144
|
+
await startServer(exec);
|
|
145
|
+
const data = await getIssues();
|
|
146
|
+
assert.equal(data.error, 'gh_not_in_path');
|
|
147
|
+
assert.equal(data.issues.length, 0);
|
|
148
|
+
});
|
|
149
|
+
test('caches per-repo within TTL — gh called once per repo for two requests', async () => {
|
|
150
|
+
await stopServer();
|
|
151
|
+
saveConfig(configPath, {
|
|
152
|
+
...DEFAULTS,
|
|
153
|
+
workspaces: [WORKSPACE_PATH_A, WORKSPACE_PATH_B],
|
|
154
|
+
});
|
|
155
|
+
let ghCallCount = 0;
|
|
156
|
+
const baseExec = makeMockExec({
|
|
157
|
+
issuesByPath: {
|
|
158
|
+
[WORKSPACE_PATH_A]: [makeIssueItem({ number: 1 })],
|
|
159
|
+
[WORKSPACE_PATH_B]: [makeIssueItem({ number: 2 })],
|
|
160
|
+
},
|
|
161
|
+
});
|
|
162
|
+
const countingExec = async (...args) => {
|
|
163
|
+
const [cmd, argv] = args;
|
|
164
|
+
if (cmd === 'gh' && argv[0] === 'issue') {
|
|
165
|
+
ghCallCount++;
|
|
166
|
+
}
|
|
167
|
+
return baseExec(...args);
|
|
168
|
+
};
|
|
169
|
+
await startServer(countingExec);
|
|
170
|
+
// First request — populates cache for both repos
|
|
171
|
+
const first = await getIssues();
|
|
172
|
+
assert.equal(first.error, undefined);
|
|
173
|
+
assert.equal(first.issues.length, 2);
|
|
174
|
+
assert.equal(ghCallCount, 2, 'gh should be called once per repo on first request');
|
|
175
|
+
// Second request — should be served from per-repo cache, no additional calls
|
|
176
|
+
const second = await getIssues();
|
|
177
|
+
assert.equal(second.error, undefined);
|
|
178
|
+
assert.equal(second.issues.length, 2);
|
|
179
|
+
assert.equal(ghCallCount, 2, 'gh should not be called again within TTL (cache hit)');
|
|
180
|
+
});
|
|
181
|
+
test('partial failure: repo that throws still returns others', async () => {
|
|
182
|
+
await stopServer();
|
|
183
|
+
saveConfig(configPath, {
|
|
184
|
+
...DEFAULTS,
|
|
185
|
+
workspaces: [WORKSPACE_PATH_A, WORKSPACE_PATH_B],
|
|
186
|
+
});
|
|
187
|
+
// repo-b will throw a generic error (not ENOENT, so non-fatal)
|
|
188
|
+
const exec = makeMockExec({
|
|
189
|
+
issuesByPath: {
|
|
190
|
+
[WORKSPACE_PATH_A]: [makeIssueItem({ number: 99, title: 'Surviving issue' })],
|
|
191
|
+
},
|
|
192
|
+
errorByPath: {
|
|
193
|
+
[WORKSPACE_PATH_B]: new Error('git command failed'),
|
|
194
|
+
},
|
|
195
|
+
});
|
|
196
|
+
await startServer(exec);
|
|
197
|
+
const data = await getIssues();
|
|
198
|
+
// No top-level error — partial failures are silent
|
|
199
|
+
assert.equal(data.error, undefined, `Unexpected error: ${data.error}`);
|
|
200
|
+
assert.equal(data.issues.length, 1, 'Should return the one issue from the succeeding repo');
|
|
201
|
+
assert.equal(data.issues[0]?.number, 99);
|
|
202
|
+
assert.equal(data.issues[0]?.repoPath, WORKSPACE_PATH_A);
|
|
203
|
+
});
|