@indigoai-us/hq-cli 5.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/__tests__/credentials.test.d.ts +5 -0
- package/dist/__tests__/credentials.test.d.ts.map +1 -0
- package/dist/__tests__/credentials.test.js +169 -0
- package/dist/__tests__/credentials.test.js.map +1 -0
- package/dist/commands/add.d.ts +6 -0
- package/dist/commands/add.d.ts.map +1 -0
- package/dist/commands/add.js +60 -0
- package/dist/commands/add.js.map +1 -0
- package/dist/commands/auth.d.ts +17 -0
- package/dist/commands/auth.d.ts.map +1 -0
- package/dist/commands/auth.js +269 -0
- package/dist/commands/auth.js.map +1 -0
- package/dist/commands/cloud-setup.d.ts +19 -0
- package/dist/commands/cloud-setup.d.ts.map +1 -0
- package/dist/commands/cloud-setup.js +206 -0
- package/dist/commands/cloud-setup.js.map +1 -0
- package/dist/commands/cloud.d.ts +16 -0
- package/dist/commands/cloud.d.ts.map +1 -0
- package/dist/commands/cloud.js +263 -0
- package/dist/commands/cloud.js.map +1 -0
- package/dist/commands/initial-upload.d.ts +67 -0
- package/dist/commands/initial-upload.d.ts.map +1 -0
- package/dist/commands/initial-upload.js +205 -0
- package/dist/commands/initial-upload.js.map +1 -0
- package/dist/commands/list.d.ts +6 -0
- package/dist/commands/list.d.ts.map +1 -0
- package/dist/commands/list.js +55 -0
- package/dist/commands/list.js.map +1 -0
- package/dist/commands/sync.d.ts +6 -0
- package/dist/commands/sync.d.ts.map +1 -0
- package/dist/commands/sync.js +104 -0
- package/dist/commands/sync.js.map +1 -0
- package/dist/commands/update.d.ts +7 -0
- package/dist/commands/update.d.ts.map +1 -0
- package/dist/commands/update.js +60 -0
- package/dist/commands/update.js.map +1 -0
- package/dist/index.d.ts +6 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +36 -0
- package/dist/index.js.map +1 -0
- package/dist/strategies/link.d.ts +7 -0
- package/dist/strategies/link.d.ts.map +1 -0
- package/dist/strategies/link.js +51 -0
- package/dist/strategies/link.js.map +1 -0
- package/dist/strategies/merge.d.ts +7 -0
- package/dist/strategies/merge.d.ts.map +1 -0
- package/dist/strategies/merge.js +110 -0
- package/dist/strategies/merge.js.map +1 -0
- package/dist/sync-worker.d.ts +11 -0
- package/dist/sync-worker.d.ts.map +1 -0
- package/dist/sync-worker.js +77 -0
- package/dist/sync-worker.js.map +1 -0
- package/dist/types.d.ts +41 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +5 -0
- package/dist/types.js.map +1 -0
- package/dist/utils/api-client.d.ts +26 -0
- package/dist/utils/api-client.d.ts.map +1 -0
- package/dist/utils/api-client.js +87 -0
- package/dist/utils/api-client.js.map +1 -0
- package/dist/utils/credentials.d.ts +44 -0
- package/dist/utils/credentials.d.ts.map +1 -0
- package/dist/utils/credentials.js +101 -0
- package/dist/utils/credentials.js.map +1 -0
- package/dist/utils/git.d.ts +13 -0
- package/dist/utils/git.d.ts.map +1 -0
- package/dist/utils/git.js +70 -0
- package/dist/utils/git.js.map +1 -0
- package/dist/utils/manifest.d.ts +16 -0
- package/dist/utils/manifest.d.ts.map +1 -0
- package/dist/utils/manifest.js +95 -0
- package/dist/utils/manifest.js.map +1 -0
- package/dist/utils/sync.d.ts +125 -0
- package/dist/utils/sync.d.ts.map +1 -0
- package/dist/utils/sync.js +291 -0
- package/dist/utils/sync.js.map +1 -0
- package/package.json +36 -0
- package/src/__tests__/cloud-setup.test.ts +117 -0
- package/src/__tests__/credentials.test.ts +203 -0
- package/src/__tests__/initial-upload.test.ts +414 -0
- package/src/__tests__/sync.test.ts +627 -0
- package/src/commands/add.ts +74 -0
- package/src/commands/auth.ts +303 -0
- package/src/commands/cloud-setup.ts +251 -0
- package/src/commands/cloud.ts +300 -0
- package/src/commands/initial-upload.ts +263 -0
- package/src/commands/list.ts +66 -0
- package/src/commands/sync.ts +149 -0
- package/src/commands/update.ts +71 -0
- package/src/hq-cloud.d.ts +19 -0
- package/src/index.ts +46 -0
- package/src/strategies/link.ts +62 -0
- package/src/strategies/merge.ts +142 -0
- package/src/sync-worker.ts +82 -0
- package/src/types.ts +47 -0
- package/src/utils/api-client.ts +111 -0
- package/src/utils/credentials.ts +124 -0
- package/src/utils/git.ts +74 -0
- package/src/utils/manifest.ts +111 -0
- package/src/utils/sync.ts +381 -0
- package/tsconfig.json +9 -0
- package/vitest.config.ts +8 -0
|
@@ -0,0 +1,627 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Tests for sync utilities (utils/sync.ts)
|
|
3
|
+
*
|
|
4
|
+
* Covers:
|
|
5
|
+
* - File hashing (hashFile, hashBuffer)
|
|
6
|
+
* - Ignore patterns (shouldIgnore)
|
|
7
|
+
* - Directory walking (walkDir)
|
|
8
|
+
* - Local manifest computation (computeLocalManifest)
|
|
9
|
+
* - Sync state persistence (readSyncState, writeSyncState)
|
|
10
|
+
* - API operations (syncDiff, uploadFile, downloadFile) — mocked
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
|
|
14
|
+
import * as fs from 'fs';
|
|
15
|
+
import * as path from 'path';
|
|
16
|
+
import * as os from 'os';
|
|
17
|
+
import * as crypto from 'crypto';
|
|
18
|
+
import {
|
|
19
|
+
hashFile,
|
|
20
|
+
hashBuffer,
|
|
21
|
+
shouldIgnore,
|
|
22
|
+
walkDir,
|
|
23
|
+
computeLocalManifest,
|
|
24
|
+
readSyncState,
|
|
25
|
+
writeSyncState,
|
|
26
|
+
getSyncStatePath,
|
|
27
|
+
type CloudSyncState,
|
|
28
|
+
type ManifestEntry,
|
|
29
|
+
} from '../utils/sync.js';
|
|
30
|
+
|
|
31
|
+
// ── Test helpers ─────────────────────────────────────────────────────────────
|
|
32
|
+
|
|
33
|
+
let tmpDir: string;
|
|
34
|
+
|
|
35
|
+
beforeEach(() => {
|
|
36
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'hq-sync-test-'));
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
afterEach(() => {
|
|
40
|
+
try {
|
|
41
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
42
|
+
} catch {
|
|
43
|
+
// Ignore cleanup errors on Windows
|
|
44
|
+
}
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
/** Create a file in tmpDir with given relative path and content. */
|
|
48
|
+
function createFile(relativePath: string, content: string): string {
|
|
49
|
+
const absPath = path.join(tmpDir, relativePath);
|
|
50
|
+
const dir = path.dirname(absPath);
|
|
51
|
+
if (!fs.existsSync(dir)) {
|
|
52
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
53
|
+
}
|
|
54
|
+
fs.writeFileSync(absPath, content);
|
|
55
|
+
return absPath;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// ── hashFile ─────────────────────────────────────────────────────────────────
|
|
59
|
+
|
|
60
|
+
describe('hashFile', () => {
|
|
61
|
+
it('returns SHA-256 hex digest of file contents', () => {
|
|
62
|
+
const content = 'Hello, HQ Cloud!';
|
|
63
|
+
const filePath = createFile('test.txt', content);
|
|
64
|
+
const expected = crypto.createHash('sha256').update(content).digest('hex');
|
|
65
|
+
expect(hashFile(filePath)).toBe(expected);
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
it('returns different hashes for different content', () => {
|
|
69
|
+
const file1 = createFile('a.txt', 'content A');
|
|
70
|
+
const file2 = createFile('b.txt', 'content B');
|
|
71
|
+
expect(hashFile(file1)).not.toBe(hashFile(file2));
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
it('returns same hash for identical content in different files', () => {
|
|
75
|
+
const file1 = createFile('copy1.txt', 'identical');
|
|
76
|
+
const file2 = createFile('copy2.txt', 'identical');
|
|
77
|
+
expect(hashFile(file1)).toBe(hashFile(file2));
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
it('handles empty files', () => {
|
|
81
|
+
const filePath = createFile('empty.txt', '');
|
|
82
|
+
const expected = crypto.createHash('sha256').update('').digest('hex');
|
|
83
|
+
expect(hashFile(filePath)).toBe(expected);
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
it('handles binary content', () => {
|
|
87
|
+
const absPath = path.join(tmpDir, 'binary.bin');
|
|
88
|
+
const buffer = Buffer.from([0x00, 0xff, 0x42, 0x13, 0x37]);
|
|
89
|
+
fs.writeFileSync(absPath, buffer);
|
|
90
|
+
const expected = crypto.createHash('sha256').update(buffer).digest('hex');
|
|
91
|
+
expect(hashFile(absPath)).toBe(expected);
|
|
92
|
+
});
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
// ── hashBuffer ───────────────────────────────────────────────────────────────
|
|
96
|
+
|
|
97
|
+
describe('hashBuffer', () => {
|
|
98
|
+
it('returns SHA-256 hex digest of a buffer', () => {
|
|
99
|
+
const buf = Buffer.from('test data');
|
|
100
|
+
const expected = crypto.createHash('sha256').update(buf).digest('hex');
|
|
101
|
+
expect(hashBuffer(buf)).toBe(expected);
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
it('returns same hash as hashFile for same content', () => {
|
|
105
|
+
const content = 'matching content';
|
|
106
|
+
const filePath = createFile('match.txt', content);
|
|
107
|
+
expect(hashBuffer(Buffer.from(content))).toBe(hashFile(filePath));
|
|
108
|
+
});
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
// ── shouldIgnore ─────────────────────────────────────────────────────────────
|
|
112
|
+
|
|
113
|
+
describe('shouldIgnore', () => {
|
|
114
|
+
it('ignores .git directory paths', () => {
|
|
115
|
+
expect(shouldIgnore('.git/config')).toBe(true);
|
|
116
|
+
expect(shouldIgnore('sub/.git/HEAD')).toBe(true);
|
|
117
|
+
});
|
|
118
|
+
|
|
119
|
+
it('ignores node_modules', () => {
|
|
120
|
+
expect(shouldIgnore('node_modules/package/index.js')).toBe(true);
|
|
121
|
+
expect(shouldIgnore('packages/cli/node_modules/dep/lib.js')).toBe(true);
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
it('ignores .claude directory', () => {
|
|
125
|
+
expect(shouldIgnore('.claude/config.json')).toBe(true);
|
|
126
|
+
});
|
|
127
|
+
|
|
128
|
+
it('ignores dist directory', () => {
|
|
129
|
+
expect(shouldIgnore('dist/index.js')).toBe(true);
|
|
130
|
+
expect(shouldIgnore('packages/lib/dist/types.d.ts')).toBe(true);
|
|
131
|
+
});
|
|
132
|
+
|
|
133
|
+
it('ignores .log files', () => {
|
|
134
|
+
expect(shouldIgnore('debug.log')).toBe(true);
|
|
135
|
+
expect(shouldIgnore('logs/app.log')).toBe(true);
|
|
136
|
+
});
|
|
137
|
+
|
|
138
|
+
it('ignores .DS_Store', () => {
|
|
139
|
+
expect(shouldIgnore('.DS_Store')).toBe(true);
|
|
140
|
+
expect(shouldIgnore('sub/.DS_Store')).toBe(true);
|
|
141
|
+
});
|
|
142
|
+
|
|
143
|
+
it('ignores .env files', () => {
|
|
144
|
+
expect(shouldIgnore('.env')).toBe(true);
|
|
145
|
+
expect(shouldIgnore('.env.local')).toBe(true);
|
|
146
|
+
});
|
|
147
|
+
|
|
148
|
+
it('does not ignore normal files', () => {
|
|
149
|
+
expect(shouldIgnore('src/index.ts')).toBe(false);
|
|
150
|
+
expect(shouldIgnore('README.md')).toBe(false);
|
|
151
|
+
expect(shouldIgnore('workers/dev/worker.yaml')).toBe(false);
|
|
152
|
+
expect(shouldIgnore('package.json')).toBe(false);
|
|
153
|
+
});
|
|
154
|
+
|
|
155
|
+
it('handles forward and back slashes', () => {
|
|
156
|
+
expect(shouldIgnore('node_modules\\dep\\index.js')).toBe(true);
|
|
157
|
+
expect(shouldIgnore('.git\\config')).toBe(true);
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
it('ignores cdk.out directory', () => {
|
|
161
|
+
expect(shouldIgnore('cdk.out/manifest.json')).toBe(true);
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
it('ignores __pycache__ directory', () => {
|
|
165
|
+
expect(shouldIgnore('__pycache__/module.cpython-311.pyc')).toBe(true);
|
|
166
|
+
});
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
// ── walkDir ──────────────────────────────────────────────────────────────────
|
|
170
|
+
|
|
171
|
+
describe('walkDir', () => {
|
|
172
|
+
it('returns empty array for empty directory', () => {
|
|
173
|
+
expect(walkDir(tmpDir)).toEqual([]);
|
|
174
|
+
});
|
|
175
|
+
|
|
176
|
+
it('finds files in the root directory', () => {
|
|
177
|
+
createFile('a.txt', 'a');
|
|
178
|
+
createFile('b.txt', 'b');
|
|
179
|
+
const files = walkDir(tmpDir);
|
|
180
|
+
expect(files.sort()).toEqual(['a.txt', 'b.txt']);
|
|
181
|
+
});
|
|
182
|
+
|
|
183
|
+
it('finds files in subdirectories with forward-slash paths', () => {
|
|
184
|
+
createFile('src/index.ts', 'code');
|
|
185
|
+
createFile('src/utils/helper.ts', 'helper');
|
|
186
|
+
const files = walkDir(tmpDir);
|
|
187
|
+
expect(files.sort()).toEqual(['src/index.ts', 'src/utils/helper.ts']);
|
|
188
|
+
});
|
|
189
|
+
|
|
190
|
+
it('skips .git directories', () => {
|
|
191
|
+
createFile('.git/config', 'gitconfig');
|
|
192
|
+
createFile('src/main.ts', 'code');
|
|
193
|
+
const files = walkDir(tmpDir);
|
|
194
|
+
expect(files).toEqual(['src/main.ts']);
|
|
195
|
+
});
|
|
196
|
+
|
|
197
|
+
it('skips node_modules', () => {
|
|
198
|
+
createFile('node_modules/dep/index.js', 'code');
|
|
199
|
+
createFile('package.json', '{}');
|
|
200
|
+
const files = walkDir(tmpDir);
|
|
201
|
+
expect(files).toEqual(['package.json']);
|
|
202
|
+
});
|
|
203
|
+
|
|
204
|
+
it('skips .log files', () => {
|
|
205
|
+
createFile('app.log', 'log data');
|
|
206
|
+
createFile('src/app.ts', 'code');
|
|
207
|
+
const files = walkDir(tmpDir);
|
|
208
|
+
expect(files).toEqual(['src/app.ts']);
|
|
209
|
+
});
|
|
210
|
+
|
|
211
|
+
it('skips .DS_Store', () => {
|
|
212
|
+
createFile('.DS_Store', '');
|
|
213
|
+
createFile('readme.md', 'hi');
|
|
214
|
+
const files = walkDir(tmpDir);
|
|
215
|
+
expect(files).toEqual(['readme.md']);
|
|
216
|
+
});
|
|
217
|
+
|
|
218
|
+
it('handles deeply nested structures', () => {
|
|
219
|
+
createFile('a/b/c/d/e/file.txt', 'deep');
|
|
220
|
+
const files = walkDir(tmpDir);
|
|
221
|
+
expect(files).toEqual(['a/b/c/d/e/file.txt']);
|
|
222
|
+
});
|
|
223
|
+
|
|
224
|
+
it('returns empty for non-existent directory', () => {
|
|
225
|
+
const files = walkDir(path.join(tmpDir, 'does-not-exist'));
|
|
226
|
+
expect(files).toEqual([]);
|
|
227
|
+
});
|
|
228
|
+
});
|
|
229
|
+
|
|
230
|
+
// ── computeLocalManifest ─────────────────────────────────────────────────────
|
|
231
|
+
|
|
232
|
+
describe('computeLocalManifest', () => {
|
|
233
|
+
it('returns empty manifest for empty directory', () => {
|
|
234
|
+
expect(computeLocalManifest(tmpDir)).toEqual([]);
|
|
235
|
+
});
|
|
236
|
+
|
|
237
|
+
it('computes manifest with correct hash, size, and path', () => {
|
|
238
|
+
const content = 'manifest test content';
|
|
239
|
+
createFile('test.txt', content);
|
|
240
|
+
const manifest = computeLocalManifest(tmpDir);
|
|
241
|
+
|
|
242
|
+
expect(manifest).toHaveLength(1);
|
|
243
|
+
const entry = manifest[0];
|
|
244
|
+
expect(entry.path).toBe('test.txt');
|
|
245
|
+
expect(entry.hash).toBe(
|
|
246
|
+
crypto.createHash('sha256').update(content).digest('hex')
|
|
247
|
+
);
|
|
248
|
+
expect(entry.size).toBe(Buffer.byteLength(content));
|
|
249
|
+
expect(entry.lastModified).toBeTruthy();
|
|
250
|
+
// Verify it's a valid ISO date
|
|
251
|
+
expect(new Date(entry.lastModified).toISOString()).toBe(entry.lastModified);
|
|
252
|
+
});
|
|
253
|
+
|
|
254
|
+
it('computes manifest for multiple files', () => {
|
|
255
|
+
createFile('a.txt', 'aaa');
|
|
256
|
+
createFile('src/b.ts', 'bbb');
|
|
257
|
+
createFile('deep/nested/c.md', 'ccc');
|
|
258
|
+
|
|
259
|
+
const manifest = computeLocalManifest(tmpDir);
|
|
260
|
+
expect(manifest).toHaveLength(3);
|
|
261
|
+
|
|
262
|
+
const paths = manifest.map((e) => e.path).sort();
|
|
263
|
+
expect(paths).toEqual(['a.txt', 'deep/nested/c.md', 'src/b.ts']);
|
|
264
|
+
});
|
|
265
|
+
|
|
266
|
+
it('excludes ignored files from manifest', () => {
|
|
267
|
+
createFile('.git/HEAD', 'ref: refs/heads/main');
|
|
268
|
+
createFile('node_modules/dep/lib.js', 'code');
|
|
269
|
+
createFile('debug.log', 'logs');
|
|
270
|
+
createFile('src/index.ts', 'real code');
|
|
271
|
+
|
|
272
|
+
const manifest = computeLocalManifest(tmpDir);
|
|
273
|
+
expect(manifest).toHaveLength(1);
|
|
274
|
+
expect(manifest[0].path).toBe('src/index.ts');
|
|
275
|
+
});
|
|
276
|
+
|
|
277
|
+
it('includes correct file sizes', () => {
|
|
278
|
+
createFile('small.txt', 'hi');
|
|
279
|
+
createFile('bigger.txt', 'a'.repeat(1000));
|
|
280
|
+
|
|
281
|
+
const manifest = computeLocalManifest(tmpDir);
|
|
282
|
+
const small = manifest.find((e) => e.path === 'small.txt');
|
|
283
|
+
const bigger = manifest.find((e) => e.path === 'bigger.txt');
|
|
284
|
+
|
|
285
|
+
expect(small!.size).toBe(2);
|
|
286
|
+
expect(bigger!.size).toBe(1000);
|
|
287
|
+
});
|
|
288
|
+
});
|
|
289
|
+
|
|
290
|
+
// ── Sync state persistence ───────────────────────────────────────────────────
|
|
291
|
+
|
|
292
|
+
describe('readSyncState', () => {
|
|
293
|
+
it('returns default state when no file exists', () => {
|
|
294
|
+
const state = readSyncState(tmpDir);
|
|
295
|
+
expect(state).toEqual({ running: false, errors: [] });
|
|
296
|
+
});
|
|
297
|
+
|
|
298
|
+
it('reads persisted state from disk', () => {
|
|
299
|
+
const saved: CloudSyncState = {
|
|
300
|
+
running: true,
|
|
301
|
+
pid: 12345,
|
|
302
|
+
lastSync: '2026-02-13T10:00:00.000Z',
|
|
303
|
+
fileCount: 42,
|
|
304
|
+
errors: [],
|
|
305
|
+
};
|
|
306
|
+
fs.writeFileSync(
|
|
307
|
+
getSyncStatePath(tmpDir),
|
|
308
|
+
JSON.stringify(saved)
|
|
309
|
+
);
|
|
310
|
+
|
|
311
|
+
const state = readSyncState(tmpDir);
|
|
312
|
+
expect(state.running).toBe(true);
|
|
313
|
+
expect(state.pid).toBe(12345);
|
|
314
|
+
expect(state.lastSync).toBe('2026-02-13T10:00:00.000Z');
|
|
315
|
+
expect(state.fileCount).toBe(42);
|
|
316
|
+
expect(state.errors).toEqual([]);
|
|
317
|
+
});
|
|
318
|
+
|
|
319
|
+
it('returns default state when file is corrupted', () => {
|
|
320
|
+
fs.writeFileSync(getSyncStatePath(tmpDir), 'not valid json');
|
|
321
|
+
const state = readSyncState(tmpDir);
|
|
322
|
+
expect(state).toEqual({ running: false, errors: [] });
|
|
323
|
+
});
|
|
324
|
+
});
|
|
325
|
+
|
|
326
|
+
describe('writeSyncState', () => {
|
|
327
|
+
it('writes state to disk and can be read back', () => {
|
|
328
|
+
const state: CloudSyncState = {
|
|
329
|
+
running: false,
|
|
330
|
+
lastSync: '2026-02-13T12:00:00.000Z',
|
|
331
|
+
fileCount: 100,
|
|
332
|
+
errors: ['some error'],
|
|
333
|
+
};
|
|
334
|
+
writeSyncState(tmpDir, state);
|
|
335
|
+
|
|
336
|
+
const raw = fs.readFileSync(getSyncStatePath(tmpDir), 'utf-8');
|
|
337
|
+
const parsed = JSON.parse(raw);
|
|
338
|
+
expect(parsed.running).toBe(false);
|
|
339
|
+
expect(parsed.lastSync).toBe('2026-02-13T12:00:00.000Z');
|
|
340
|
+
expect(parsed.fileCount).toBe(100);
|
|
341
|
+
expect(parsed.errors).toEqual(['some error']);
|
|
342
|
+
});
|
|
343
|
+
|
|
344
|
+
it('overwrites existing state', () => {
|
|
345
|
+
writeSyncState(tmpDir, { running: true, pid: 111, errors: [] });
|
|
346
|
+
writeSyncState(tmpDir, { running: false, errors: ['updated'] });
|
|
347
|
+
|
|
348
|
+
const state = readSyncState(tmpDir);
|
|
349
|
+
expect(state.running).toBe(false);
|
|
350
|
+
expect(state.pid).toBeUndefined();
|
|
351
|
+
expect(state.errors).toEqual(['updated']);
|
|
352
|
+
});
|
|
353
|
+
});
|
|
354
|
+
|
|
355
|
+
describe('getSyncStatePath', () => {
|
|
356
|
+
it('returns path ending with .hq-cloud-sync.json', () => {
|
|
357
|
+
const p = getSyncStatePath(tmpDir);
|
|
358
|
+
expect(p).toContain('.hq-cloud-sync.json');
|
|
359
|
+
expect(p.startsWith(tmpDir)).toBe(true);
|
|
360
|
+
});
|
|
361
|
+
});
|
|
362
|
+
|
|
363
|
+
// ── API operation tests (mocked) ─────────────────────────────────────────────
|
|
364
|
+
|
|
365
|
+
// We mock the api-client module to test the sync operations without a real server
|
|
366
|
+
vi.mock('../utils/api-client.js', () => ({
|
|
367
|
+
apiRequest: vi.fn(),
|
|
368
|
+
getApiUrl: vi.fn(() => 'https://api.test.local'),
|
|
369
|
+
}));
|
|
370
|
+
|
|
371
|
+
import { apiRequest } from '../utils/api-client.js';
|
|
372
|
+
import { syncDiff, uploadFile, downloadFile, pushChanges, pullChanges } from '../utils/sync.js';
|
|
373
|
+
|
|
374
|
+
const mockApiRequest = vi.mocked(apiRequest);
|
|
375
|
+
|
|
376
|
+
describe('syncDiff (mocked API)', () => {
|
|
377
|
+
beforeEach(() => {
|
|
378
|
+
vi.clearAllMocks();
|
|
379
|
+
});
|
|
380
|
+
|
|
381
|
+
it('sends local manifest to POST /api/files/sync and returns diff', async () => {
|
|
382
|
+
createFile('src/index.ts', 'code');
|
|
383
|
+
|
|
384
|
+
mockApiRequest.mockResolvedValueOnce({
|
|
385
|
+
ok: true,
|
|
386
|
+
status: 200,
|
|
387
|
+
data: { toUpload: ['src/index.ts'], toDownload: [] },
|
|
388
|
+
});
|
|
389
|
+
|
|
390
|
+
const diff = await syncDiff(tmpDir);
|
|
391
|
+
expect(diff.toUpload).toEqual(['src/index.ts']);
|
|
392
|
+
expect(diff.toDownload).toEqual([]);
|
|
393
|
+
|
|
394
|
+
// Verify API was called correctly
|
|
395
|
+
expect(mockApiRequest).toHaveBeenCalledWith(
|
|
396
|
+
'POST',
|
|
397
|
+
'/api/files/sync',
|
|
398
|
+
expect.objectContaining({
|
|
399
|
+
manifest: expect.arrayContaining([
|
|
400
|
+
expect.objectContaining({ path: 'src/index.ts' }),
|
|
401
|
+
]),
|
|
402
|
+
})
|
|
403
|
+
);
|
|
404
|
+
});
|
|
405
|
+
|
|
406
|
+
it('throws on API error', async () => {
|
|
407
|
+
mockApiRequest.mockResolvedValueOnce({
|
|
408
|
+
ok: false,
|
|
409
|
+
status: 500,
|
|
410
|
+
error: 'Internal server error',
|
|
411
|
+
});
|
|
412
|
+
|
|
413
|
+
await expect(syncDiff(tmpDir)).rejects.toThrow('Sync diff failed');
|
|
414
|
+
});
|
|
415
|
+
});
|
|
416
|
+
|
|
417
|
+
describe('uploadFile (mocked API)', () => {
|
|
418
|
+
beforeEach(() => {
|
|
419
|
+
vi.clearAllMocks();
|
|
420
|
+
});
|
|
421
|
+
|
|
422
|
+
it('sends file content as base64 to POST /api/files/upload', async () => {
|
|
423
|
+
const content = 'file content to upload';
|
|
424
|
+
createFile('upload.txt', content);
|
|
425
|
+
|
|
426
|
+
mockApiRequest.mockResolvedValueOnce({ ok: true, status: 200 });
|
|
427
|
+
|
|
428
|
+
await uploadFile('upload.txt', tmpDir);
|
|
429
|
+
|
|
430
|
+
expect(mockApiRequest).toHaveBeenCalledWith(
|
|
431
|
+
'POST',
|
|
432
|
+
'/api/files/upload',
|
|
433
|
+
expect.objectContaining({
|
|
434
|
+
path: 'upload.txt',
|
|
435
|
+
content: Buffer.from(content).toString('base64'),
|
|
436
|
+
size: Buffer.byteLength(content),
|
|
437
|
+
})
|
|
438
|
+
);
|
|
439
|
+
});
|
|
440
|
+
|
|
441
|
+
it('throws on upload failure', async () => {
|
|
442
|
+
createFile('fail.txt', 'content');
|
|
443
|
+
|
|
444
|
+
mockApiRequest.mockResolvedValueOnce({
|
|
445
|
+
ok: false,
|
|
446
|
+
status: 413,
|
|
447
|
+
error: 'File too large',
|
|
448
|
+
});
|
|
449
|
+
|
|
450
|
+
await expect(uploadFile('fail.txt', tmpDir)).rejects.toThrow('Upload failed');
|
|
451
|
+
});
|
|
452
|
+
});
|
|
453
|
+
|
|
454
|
+
describe('downloadFile (mocked API)', () => {
|
|
455
|
+
beforeEach(() => {
|
|
456
|
+
vi.clearAllMocks();
|
|
457
|
+
});
|
|
458
|
+
|
|
459
|
+
it('downloads and writes file content from API', async () => {
|
|
460
|
+
const content = 'downloaded content';
|
|
461
|
+
const base64 = Buffer.from(content).toString('base64');
|
|
462
|
+
|
|
463
|
+
mockApiRequest.mockResolvedValueOnce({
|
|
464
|
+
ok: true,
|
|
465
|
+
status: 200,
|
|
466
|
+
data: { content: base64, size: Buffer.byteLength(content) },
|
|
467
|
+
});
|
|
468
|
+
|
|
469
|
+
await downloadFile('new-file.txt', tmpDir);
|
|
470
|
+
|
|
471
|
+
const written = fs.readFileSync(path.join(tmpDir, 'new-file.txt'), 'utf-8');
|
|
472
|
+
expect(written).toBe(content);
|
|
473
|
+
});
|
|
474
|
+
|
|
475
|
+
it('creates subdirectories as needed', async () => {
|
|
476
|
+
const content = 'nested';
|
|
477
|
+
const base64 = Buffer.from(content).toString('base64');
|
|
478
|
+
|
|
479
|
+
mockApiRequest.mockResolvedValueOnce({
|
|
480
|
+
ok: true,
|
|
481
|
+
status: 200,
|
|
482
|
+
data: { content: base64, size: content.length },
|
|
483
|
+
});
|
|
484
|
+
|
|
485
|
+
await downloadFile('deep/nested/dir/file.txt', tmpDir);
|
|
486
|
+
|
|
487
|
+
const absPath = path.join(tmpDir, 'deep', 'nested', 'dir', 'file.txt');
|
|
488
|
+
expect(fs.existsSync(absPath)).toBe(true);
|
|
489
|
+
expect(fs.readFileSync(absPath, 'utf-8')).toBe(content);
|
|
490
|
+
});
|
|
491
|
+
|
|
492
|
+
it('throws on download failure', async () => {
|
|
493
|
+
mockApiRequest.mockResolvedValueOnce({
|
|
494
|
+
ok: false,
|
|
495
|
+
status: 404,
|
|
496
|
+
error: 'Not found',
|
|
497
|
+
});
|
|
498
|
+
|
|
499
|
+
await expect(downloadFile('missing.txt', tmpDir)).rejects.toThrow('Download failed');
|
|
500
|
+
});
|
|
501
|
+
});
|
|
502
|
+
|
|
503
|
+
describe('pushChanges (mocked API)', () => {
|
|
504
|
+
beforeEach(() => {
|
|
505
|
+
vi.clearAllMocks();
|
|
506
|
+
});
|
|
507
|
+
|
|
508
|
+
it('uploads files identified by syncDiff', async () => {
|
|
509
|
+
createFile('a.txt', 'aaa');
|
|
510
|
+
createFile('b.txt', 'bbb');
|
|
511
|
+
|
|
512
|
+
// First call: syncDiff
|
|
513
|
+
mockApiRequest.mockResolvedValueOnce({
|
|
514
|
+
ok: true,
|
|
515
|
+
status: 200,
|
|
516
|
+
data: { toUpload: ['a.txt'], toDownload: [] },
|
|
517
|
+
});
|
|
518
|
+
// Second call: uploadFile for a.txt
|
|
519
|
+
mockApiRequest.mockResolvedValueOnce({ ok: true, status: 200 });
|
|
520
|
+
|
|
521
|
+
const result = await pushChanges(tmpDir);
|
|
522
|
+
expect(result.uploaded).toBe(1);
|
|
523
|
+
expect(result.errors).toEqual([]);
|
|
524
|
+
});
|
|
525
|
+
|
|
526
|
+
it('collects errors without stopping when file is missing locally', async () => {
|
|
527
|
+
createFile('ok.txt', 'ok');
|
|
528
|
+
// Note: missing.txt does NOT exist on disk, so uploadFile will throw
|
|
529
|
+
// at fs.readFileSync before reaching apiRequest — no mock needed for it.
|
|
530
|
+
|
|
531
|
+
// syncDiff returns two files to upload
|
|
532
|
+
mockApiRequest.mockResolvedValueOnce({
|
|
533
|
+
ok: true,
|
|
534
|
+
status: 200,
|
|
535
|
+
data: { toUpload: ['ok.txt', 'missing.txt'], toDownload: [] },
|
|
536
|
+
});
|
|
537
|
+
// Upload for ok.txt succeeds
|
|
538
|
+
mockApiRequest.mockResolvedValueOnce({ ok: true, status: 200 });
|
|
539
|
+
// No mock for missing.txt — it throws before calling apiRequest
|
|
540
|
+
|
|
541
|
+
const result = await pushChanges(tmpDir);
|
|
542
|
+
// ok.txt succeeded
|
|
543
|
+
expect(result.uploaded).toBe(1);
|
|
544
|
+
// missing.txt failed (fs.readFileSync throws)
|
|
545
|
+
expect(result.errors.length).toBe(1);
|
|
546
|
+
});
|
|
547
|
+
|
|
548
|
+
it('collects errors when API rejects upload', async () => {
|
|
549
|
+
createFile('ok.txt', 'ok');
|
|
550
|
+
createFile('rejected.txt', 'too big');
|
|
551
|
+
|
|
552
|
+
// syncDiff returns two files to upload
|
|
553
|
+
mockApiRequest.mockResolvedValueOnce({
|
|
554
|
+
ok: true,
|
|
555
|
+
status: 200,
|
|
556
|
+
data: { toUpload: ['ok.txt', 'rejected.txt'], toDownload: [] },
|
|
557
|
+
});
|
|
558
|
+
// First upload succeeds
|
|
559
|
+
mockApiRequest.mockResolvedValueOnce({ ok: true, status: 200 });
|
|
560
|
+
// Second upload fails via API error
|
|
561
|
+
mockApiRequest.mockResolvedValueOnce({
|
|
562
|
+
ok: false,
|
|
563
|
+
status: 413,
|
|
564
|
+
error: 'File too large',
|
|
565
|
+
});
|
|
566
|
+
|
|
567
|
+
const result = await pushChanges(tmpDir);
|
|
568
|
+
expect(result.uploaded).toBe(1);
|
|
569
|
+
expect(result.errors.length).toBe(1);
|
|
570
|
+
expect(result.errors[0]).toContain('Upload failed');
|
|
571
|
+
});
|
|
572
|
+
});
|
|
573
|
+
|
|
574
|
+
describe('pullChanges (mocked API)', () => {
|
|
575
|
+
beforeEach(() => {
|
|
576
|
+
vi.clearAllMocks();
|
|
577
|
+
});
|
|
578
|
+
|
|
579
|
+
it('downloads files identified by syncDiff', async () => {
|
|
580
|
+
// syncDiff
|
|
581
|
+
mockApiRequest.mockResolvedValueOnce({
|
|
582
|
+
ok: true,
|
|
583
|
+
status: 200,
|
|
584
|
+
data: { toUpload: [], toDownload: ['remote.txt'] },
|
|
585
|
+
});
|
|
586
|
+
// downloadFile
|
|
587
|
+
mockApiRequest.mockResolvedValueOnce({
|
|
588
|
+
ok: true,
|
|
589
|
+
status: 200,
|
|
590
|
+
data: { content: Buffer.from('remote content').toString('base64'), size: 14 },
|
|
591
|
+
});
|
|
592
|
+
|
|
593
|
+
const result = await pullChanges(tmpDir);
|
|
594
|
+
expect(result.downloaded).toBe(1);
|
|
595
|
+
expect(result.errors).toEqual([]);
|
|
596
|
+
|
|
597
|
+
// Verify file was written
|
|
598
|
+
const content = fs.readFileSync(path.join(tmpDir, 'remote.txt'), 'utf-8');
|
|
599
|
+
expect(content).toBe('remote content');
|
|
600
|
+
});
|
|
601
|
+
|
|
602
|
+
it('collects errors without stopping', async () => {
|
|
603
|
+
// syncDiff returns two files to download
|
|
604
|
+
mockApiRequest.mockResolvedValueOnce({
|
|
605
|
+
ok: true,
|
|
606
|
+
status: 200,
|
|
607
|
+
data: { toUpload: [], toDownload: ['ok.txt', 'fail.txt'] },
|
|
608
|
+
});
|
|
609
|
+
// First download succeeds
|
|
610
|
+
mockApiRequest.mockResolvedValueOnce({
|
|
611
|
+
ok: true,
|
|
612
|
+
status: 200,
|
|
613
|
+
data: { content: Buffer.from('ok').toString('base64'), size: 2 },
|
|
614
|
+
});
|
|
615
|
+
// Second download fails
|
|
616
|
+
mockApiRequest.mockResolvedValueOnce({
|
|
617
|
+
ok: false,
|
|
618
|
+
status: 500,
|
|
619
|
+
error: 'Download error',
|
|
620
|
+
});
|
|
621
|
+
|
|
622
|
+
const result = await pullChanges(tmpDir);
|
|
623
|
+
expect(result.downloaded).toBe(1);
|
|
624
|
+
expect(result.errors.length).toBe(1);
|
|
625
|
+
expect(result.errors[0]).toContain('Download failed');
|
|
626
|
+
});
|
|
627
|
+
});
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* hq modules add command (US-003)
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { Command } from 'commander';
|
|
6
|
+
import { findHqRoot, addModule, parseRepoName, isValidRepoUrl } from '../utils/manifest.js';
|
|
7
|
+
import type { ModuleDefinition, SyncStrategy } from '../types.js';
|
|
8
|
+
|
|
9
|
+
export function registerAddCommand(program: Command): void {
|
|
10
|
+
program
|
|
11
|
+
.command('add <repo-url>')
|
|
12
|
+
.description('Add a module to the manifest')
|
|
13
|
+
.option('--as <name>', 'Module name (defaults to repo name)')
|
|
14
|
+
.option('--branch <branch>', 'Git branch to track', 'main')
|
|
15
|
+
.option('--strategy <strategy>', 'Sync strategy: link | merge | copy', 'link')
|
|
16
|
+
.option('--path <mapping>', 'Path mapping src:dest (can repeat)', (val, prev: string[]) => [...prev, val], [])
|
|
17
|
+
.action(async (repoUrl: string, options: {
|
|
18
|
+
as?: string;
|
|
19
|
+
branch: string;
|
|
20
|
+
strategy: string;
|
|
21
|
+
path: string[];
|
|
22
|
+
}) => {
|
|
23
|
+
try {
|
|
24
|
+
// Validate repo URL
|
|
25
|
+
if (!isValidRepoUrl(repoUrl)) {
|
|
26
|
+
console.error('Error: Invalid repo URL. Must start with https:// or git@');
|
|
27
|
+
process.exit(1);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
// Parse name
|
|
31
|
+
const name = options.as || parseRepoName(repoUrl);
|
|
32
|
+
|
|
33
|
+
// Validate strategy
|
|
34
|
+
const validStrategies: SyncStrategy[] = ['link', 'merge', 'copy'];
|
|
35
|
+
if (!validStrategies.includes(options.strategy as SyncStrategy)) {
|
|
36
|
+
console.error(`Error: Invalid strategy "${options.strategy}". Use: ${validStrategies.join(', ')}`);
|
|
37
|
+
process.exit(1);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// Parse path mappings
|
|
41
|
+
const paths = options.path.length > 0
|
|
42
|
+
? options.path.map(p => {
|
|
43
|
+
const [src, dest] = p.split(':');
|
|
44
|
+
if (!src || !dest) {
|
|
45
|
+
throw new Error(`Invalid path mapping: ${p}. Format: src:dest`);
|
|
46
|
+
}
|
|
47
|
+
return { src, dest };
|
|
48
|
+
})
|
|
49
|
+
: [{ src: '.', dest: `workers/${name}` }]; // Default: entire repo to workers/
|
|
50
|
+
|
|
51
|
+
const module: ModuleDefinition = {
|
|
52
|
+
name,
|
|
53
|
+
repo: repoUrl,
|
|
54
|
+
branch: options.branch,
|
|
55
|
+
strategy: options.strategy as SyncStrategy,
|
|
56
|
+
paths,
|
|
57
|
+
};
|
|
58
|
+
|
|
59
|
+
const hqRoot = findHqRoot();
|
|
60
|
+
addModule(hqRoot, module);
|
|
61
|
+
|
|
62
|
+
console.log(`Added module "${name}"`);
|
|
63
|
+
console.log(` Repo: ${repoUrl}`);
|
|
64
|
+
console.log(` Branch: ${options.branch}`);
|
|
65
|
+
console.log(` Strategy: ${options.strategy}`);
|
|
66
|
+
console.log(` Paths: ${paths.map(p => `${p.src} -> ${p.dest}`).join(', ')}`);
|
|
67
|
+
console.log('\nRun "hq modules sync" to fetch and sync the module.');
|
|
68
|
+
|
|
69
|
+
} catch (error) {
|
|
70
|
+
console.error('Error:', error instanceof Error ? error.message : error);
|
|
71
|
+
process.exit(1);
|
|
72
|
+
}
|
|
73
|
+
});
|
|
74
|
+
}
|