@uvrn/cli 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +340 -0
- package/dist/cli.d.ts +11 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +297 -0
- package/dist/cli.js.map +1 -0
- package/dist/index.d.ts +6 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +10 -0
- package/dist/index.js.map +1 -0
- package/docs/CLI_GUIDE.md +618 -0
- package/jest.config.js +12 -0
- package/package.json +42 -0
- package/src/cli.ts +294 -0
- package/src/index.ts +6 -0
- package/test-bundle.json +25 -0
- package/test-receipt.json +1 -0
- package/tests/cli.test.ts +393 -0
- package/tsconfig.json +24 -0
|
@@ -0,0 +1,393 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Unit tests for CLI argument parsing and basic functionality
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { execSync } from 'child_process';
|
|
6
|
+
import * as fs from 'fs';
|
|
7
|
+
import * as path from 'path';
|
|
8
|
+
|
|
9
|
+
const CLI_PATH = path.resolve(__dirname, '../dist/cli.js');
|
|
10
|
+
|
|
11
|
+
describe('Delta Engine CLI', () => {
|
|
12
|
+
describe('Version and Help', () => {
|
|
13
|
+
test('should display version', () => {
|
|
14
|
+
const output = execSync(`node ${CLI_PATH} --version`, { encoding: 'utf-8' });
|
|
15
|
+
expect(output.trim()).toBe('1.0.0');
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
test('should display help', () => {
|
|
19
|
+
const output = execSync(`node ${CLI_PATH} --help`, { encoding: 'utf-8' });
|
|
20
|
+
expect(output).toContain('CLI for Loosechain Delta Engine');
|
|
21
|
+
expect(output).toContain('run');
|
|
22
|
+
expect(output).toContain('validate');
|
|
23
|
+
expect(output).toContain('verify');
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
test('should display command help for run', () => {
|
|
27
|
+
const output = execSync(`node ${CLI_PATH} run --help`, { encoding: 'utf-8' });
|
|
28
|
+
expect(output).toContain('Execute delta engine');
|
|
29
|
+
expect(output).toContain('--output');
|
|
30
|
+
expect(output).toContain('--quiet');
|
|
31
|
+
expect(output).toContain('--pretty');
|
|
32
|
+
});
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
describe('Input Validation', () => {
|
|
36
|
+
test('should reject invalid JSON', () => {
|
|
37
|
+
const invalidJson = '{ invalid json }';
|
|
38
|
+
const tempFile = path.join(__dirname, 'temp-invalid.json');
|
|
39
|
+
fs.writeFileSync(tempFile, invalidJson);
|
|
40
|
+
|
|
41
|
+
try {
|
|
42
|
+
execSync(`node ${CLI_PATH} run ${tempFile}`, { encoding: 'utf-8' });
|
|
43
|
+
fail('Should have thrown an error');
|
|
44
|
+
} catch (error: any) {
|
|
45
|
+
// Invalid JSON is caught during parsing, which is treated as ENGINE_ERROR (2)
|
|
46
|
+
expect(error.status).toBeGreaterThan(0);
|
|
47
|
+
expect(error.stderr.toString()).toContain('Invalid JSON');
|
|
48
|
+
} finally {
|
|
49
|
+
fs.unlinkSync(tempFile);
|
|
50
|
+
}
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
test('should reject bundle with missing required fields', () => {
|
|
54
|
+
const invalidBundle = {
|
|
55
|
+
bundleId: 'test',
|
|
56
|
+
// Missing claim, thresholdPct, dataSpecs
|
|
57
|
+
};
|
|
58
|
+
const tempFile = path.join(__dirname, 'temp-incomplete.json');
|
|
59
|
+
fs.writeFileSync(tempFile, JSON.stringify(invalidBundle));
|
|
60
|
+
|
|
61
|
+
try {
|
|
62
|
+
execSync(`node ${CLI_PATH} validate ${tempFile}`, { encoding: 'utf-8' });
|
|
63
|
+
fail('Should have thrown an error');
|
|
64
|
+
} catch (error: any) {
|
|
65
|
+
expect(error.status).toBe(1); // INVALID_BUNDLE
|
|
66
|
+
} finally {
|
|
67
|
+
fs.unlinkSync(tempFile);
|
|
68
|
+
}
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
test('should accept valid bundle structure', () => {
|
|
72
|
+
const validBundle = {
|
|
73
|
+
bundleId: 'test-001',
|
|
74
|
+
claim: 'Test claim',
|
|
75
|
+
thresholdPct: 0.05,
|
|
76
|
+
dataSpecs: [
|
|
77
|
+
{
|
|
78
|
+
id: 'source-a',
|
|
79
|
+
label: 'Source A',
|
|
80
|
+
sourceKind: 'metric',
|
|
81
|
+
originDocIds: ['doc-a'],
|
|
82
|
+
metrics: [{ key: 'value', value: 100 }]
|
|
83
|
+
},
|
|
84
|
+
{
|
|
85
|
+
id: 'source-b',
|
|
86
|
+
label: 'Source B',
|
|
87
|
+
sourceKind: 'metric',
|
|
88
|
+
originDocIds: ['doc-b'],
|
|
89
|
+
metrics: [{ key: 'value', value: 102 }]
|
|
90
|
+
}
|
|
91
|
+
]
|
|
92
|
+
};
|
|
93
|
+
const tempFile = path.join(__dirname, 'temp-valid.json');
|
|
94
|
+
fs.writeFileSync(tempFile, JSON.stringify(validBundle));
|
|
95
|
+
|
|
96
|
+
try {
|
|
97
|
+
const output = execSync(`node ${CLI_PATH} validate ${tempFile} --quiet`, {
|
|
98
|
+
encoding: 'utf-8'
|
|
99
|
+
});
|
|
100
|
+
const result = JSON.parse(output);
|
|
101
|
+
expect(result.valid).toBe(true);
|
|
102
|
+
} finally {
|
|
103
|
+
fs.unlinkSync(tempFile);
|
|
104
|
+
}
|
|
105
|
+
});
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
describe('Run Command', () => {
|
|
109
|
+
const validBundle = {
|
|
110
|
+
bundleId: 'test-run-001',
|
|
111
|
+
claim: 'Test run command',
|
|
112
|
+
thresholdPct: 0.05,
|
|
113
|
+
dataSpecs: [
|
|
114
|
+
{
|
|
115
|
+
id: 'source-a',
|
|
116
|
+
label: 'Source A',
|
|
117
|
+
sourceKind: 'metric',
|
|
118
|
+
originDocIds: ['doc-a'],
|
|
119
|
+
metrics: [{ key: 'value', value: 100 }]
|
|
120
|
+
},
|
|
121
|
+
{
|
|
122
|
+
id: 'source-b',
|
|
123
|
+
label: 'Source B',
|
|
124
|
+
sourceKind: 'metric',
|
|
125
|
+
originDocIds: ['doc-b'],
|
|
126
|
+
metrics: [{ key: 'value', value: 102 }]
|
|
127
|
+
}
|
|
128
|
+
]
|
|
129
|
+
};
|
|
130
|
+
|
|
131
|
+
test('should generate receipt from valid bundle', () => {
|
|
132
|
+
const tempFile = path.join(__dirname, 'temp-run.json');
|
|
133
|
+
fs.writeFileSync(tempFile, JSON.stringify(validBundle));
|
|
134
|
+
|
|
135
|
+
try {
|
|
136
|
+
const output = execSync(`node ${CLI_PATH} run ${tempFile} --quiet`, {
|
|
137
|
+
encoding: 'utf-8'
|
|
138
|
+
});
|
|
139
|
+
const receipt = JSON.parse(output);
|
|
140
|
+
|
|
141
|
+
expect(receipt).toHaveProperty('bundleId', 'test-run-001');
|
|
142
|
+
expect(receipt).toHaveProperty('deltaFinal');
|
|
143
|
+
expect(receipt).toHaveProperty('sources');
|
|
144
|
+
expect(receipt).toHaveProperty('rounds');
|
|
145
|
+
expect(receipt).toHaveProperty('outcome');
|
|
146
|
+
expect(receipt).toHaveProperty('hash');
|
|
147
|
+
expect(receipt.sources).toEqual(['Source A', 'Source B']);
|
|
148
|
+
} finally {
|
|
149
|
+
fs.unlinkSync(tempFile);
|
|
150
|
+
}
|
|
151
|
+
});
|
|
152
|
+
|
|
153
|
+
test('should write receipt to output file', () => {
|
|
154
|
+
const bundleFile = path.join(__dirname, 'temp-bundle-output.json');
|
|
155
|
+
const receiptFile = path.join(__dirname, 'temp-receipt-output.json');
|
|
156
|
+
fs.writeFileSync(bundleFile, JSON.stringify(validBundle));
|
|
157
|
+
|
|
158
|
+
try {
|
|
159
|
+
execSync(`node ${CLI_PATH} run ${bundleFile} --output ${receiptFile} --quiet`);
|
|
160
|
+
|
|
161
|
+
expect(fs.existsSync(receiptFile)).toBe(true);
|
|
162
|
+
|
|
163
|
+
const receipt = JSON.parse(fs.readFileSync(receiptFile, 'utf-8'));
|
|
164
|
+
expect(receipt).toHaveProperty('hash');
|
|
165
|
+
} finally {
|
|
166
|
+
fs.unlinkSync(bundleFile);
|
|
167
|
+
if (fs.existsSync(receiptFile)) {
|
|
168
|
+
fs.unlinkSync(receiptFile);
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
});
|
|
172
|
+
|
|
173
|
+
test('should support pretty-print option', () => {
|
|
174
|
+
const tempFile = path.join(__dirname, 'temp-pretty.json');
|
|
175
|
+
fs.writeFileSync(tempFile, JSON.stringify(validBundle));
|
|
176
|
+
|
|
177
|
+
try {
|
|
178
|
+
const output = execSync(`node ${CLI_PATH} run ${tempFile} --pretty --quiet`, {
|
|
179
|
+
encoding: 'utf-8'
|
|
180
|
+
});
|
|
181
|
+
|
|
182
|
+
// Pretty-printed JSON should have indentation
|
|
183
|
+
expect(output).toContain('\n ');
|
|
184
|
+
expect(output).toContain('"bundleId"');
|
|
185
|
+
} finally {
|
|
186
|
+
fs.unlinkSync(tempFile);
|
|
187
|
+
}
|
|
188
|
+
});
|
|
189
|
+
});
|
|
190
|
+
|
|
191
|
+
describe('Verify Command', () => {
|
|
192
|
+
test('should verify valid receipt', () => {
|
|
193
|
+
// First generate a receipt
|
|
194
|
+
const bundle = {
|
|
195
|
+
bundleId: 'verify-test-001',
|
|
196
|
+
claim: 'Test verify',
|
|
197
|
+
thresholdPct: 0.05,
|
|
198
|
+
dataSpecs: [
|
|
199
|
+
{
|
|
200
|
+
id: 'source-a',
|
|
201
|
+
label: 'Source A',
|
|
202
|
+
sourceKind: 'metric',
|
|
203
|
+
originDocIds: ['doc-a'],
|
|
204
|
+
metrics: [{ key: 'value', value: 100 }]
|
|
205
|
+
},
|
|
206
|
+
{
|
|
207
|
+
id: 'source-b',
|
|
208
|
+
label: 'Source B',
|
|
209
|
+
sourceKind: 'metric',
|
|
210
|
+
originDocIds: ['doc-b'],
|
|
211
|
+
metrics: [{ key: 'value', value: 102 }]
|
|
212
|
+
}
|
|
213
|
+
]
|
|
214
|
+
};
|
|
215
|
+
|
|
216
|
+
const bundleFile = path.join(__dirname, 'temp-verify-bundle.json');
|
|
217
|
+
const receiptFile = path.join(__dirname, 'temp-verify-receipt.json');
|
|
218
|
+
fs.writeFileSync(bundleFile, JSON.stringify(bundle));
|
|
219
|
+
|
|
220
|
+
try {
|
|
221
|
+
// Generate receipt
|
|
222
|
+
execSync(`node ${CLI_PATH} run ${bundleFile} --output ${receiptFile} --quiet`);
|
|
223
|
+
|
|
224
|
+
// Verify receipt
|
|
225
|
+
const output = execSync(`node ${CLI_PATH} verify ${receiptFile} --quiet`, {
|
|
226
|
+
encoding: 'utf-8'
|
|
227
|
+
});
|
|
228
|
+
const result = JSON.parse(output);
|
|
229
|
+
|
|
230
|
+
expect(result.verified).toBe(true);
|
|
231
|
+
expect(result.hash).toBeDefined();
|
|
232
|
+
} finally {
|
|
233
|
+
fs.unlinkSync(bundleFile);
|
|
234
|
+
if (fs.existsSync(receiptFile)) {
|
|
235
|
+
fs.unlinkSync(receiptFile);
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
});
|
|
239
|
+
|
|
240
|
+
test('should detect tampered receipt', () => {
|
|
241
|
+
// First generate a valid receipt
|
|
242
|
+
const bundle = {
|
|
243
|
+
bundleId: 'tamper-test-001',
|
|
244
|
+
claim: 'Test tampering',
|
|
245
|
+
thresholdPct: 0.05,
|
|
246
|
+
dataSpecs: [
|
|
247
|
+
{
|
|
248
|
+
id: 'source-a',
|
|
249
|
+
label: 'Source A',
|
|
250
|
+
sourceKind: 'metric',
|
|
251
|
+
originDocIds: ['doc-a'],
|
|
252
|
+
metrics: [{ key: 'value', value: 100 }]
|
|
253
|
+
},
|
|
254
|
+
{
|
|
255
|
+
id: 'source-b',
|
|
256
|
+
label: 'Source B',
|
|
257
|
+
sourceKind: 'metric',
|
|
258
|
+
originDocIds: ['doc-b'],
|
|
259
|
+
metrics: [{ key: 'value', value: 102 }]
|
|
260
|
+
}
|
|
261
|
+
]
|
|
262
|
+
};
|
|
263
|
+
|
|
264
|
+
const bundleFile = path.join(__dirname, 'temp-tamper-bundle.json');
|
|
265
|
+
const receiptFile = path.join(__dirname, 'temp-tamper-receipt.json');
|
|
266
|
+
fs.writeFileSync(bundleFile, JSON.stringify(bundle));
|
|
267
|
+
|
|
268
|
+
try {
|
|
269
|
+
// Generate receipt
|
|
270
|
+
execSync(`node ${CLI_PATH} run ${bundleFile} --output ${receiptFile} --quiet`);
|
|
271
|
+
|
|
272
|
+
// Tamper with receipt
|
|
273
|
+
const receipt = JSON.parse(fs.readFileSync(receiptFile, 'utf-8'));
|
|
274
|
+
receipt.outcome = 'tampered'; // Change outcome but keep hash
|
|
275
|
+
fs.writeFileSync(receiptFile, JSON.stringify(receipt));
|
|
276
|
+
|
|
277
|
+
// Verify should fail
|
|
278
|
+
try {
|
|
279
|
+
execSync(`node ${CLI_PATH} verify ${receiptFile} --quiet`);
|
|
280
|
+
fail('Should have detected tampering');
|
|
281
|
+
} catch (error: any) {
|
|
282
|
+
expect(error.status).toBe(2); // ENGINE_ERROR
|
|
283
|
+
}
|
|
284
|
+
} finally {
|
|
285
|
+
fs.unlinkSync(bundleFile);
|
|
286
|
+
if (fs.existsSync(receiptFile)) {
|
|
287
|
+
fs.unlinkSync(receiptFile);
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
});
|
|
291
|
+
});
|
|
292
|
+
|
|
293
|
+
describe('Stdin Input', () => {
|
|
294
|
+
test('should accept bundle from stdin', () => {
|
|
295
|
+
const bundle = {
|
|
296
|
+
bundleId: 'stdin-test-001',
|
|
297
|
+
claim: 'Test stdin',
|
|
298
|
+
thresholdPct: 0.05,
|
|
299
|
+
dataSpecs: [
|
|
300
|
+
{
|
|
301
|
+
id: 'source-a',
|
|
302
|
+
label: 'Source A',
|
|
303
|
+
sourceKind: 'metric',
|
|
304
|
+
originDocIds: ['doc-a'],
|
|
305
|
+
metrics: [{ key: 'value', value: 100 }]
|
|
306
|
+
},
|
|
307
|
+
{
|
|
308
|
+
id: 'source-b',
|
|
309
|
+
label: 'Source B',
|
|
310
|
+
sourceKind: 'metric',
|
|
311
|
+
originDocIds: ['doc-b'],
|
|
312
|
+
metrics: [{ key: 'value', value: 102 }]
|
|
313
|
+
}
|
|
314
|
+
]
|
|
315
|
+
};
|
|
316
|
+
|
|
317
|
+
const output = execSync(`echo '${JSON.stringify(bundle)}' | node ${CLI_PATH} run --quiet`, {
|
|
318
|
+
encoding: 'utf-8',
|
|
319
|
+
shell: '/bin/bash'
|
|
320
|
+
});
|
|
321
|
+
|
|
322
|
+
const receipt = JSON.parse(output);
|
|
323
|
+
expect(receipt).toHaveProperty('bundleId', 'stdin-test-001');
|
|
324
|
+
expect(receipt).toHaveProperty('hash');
|
|
325
|
+
});
|
|
326
|
+
});
|
|
327
|
+
|
|
328
|
+
describe('Exit Codes', () => {
|
|
329
|
+
test('should return 0 on success', () => {
|
|
330
|
+
const bundle = {
|
|
331
|
+
bundleId: 'exit-test-001',
|
|
332
|
+
claim: 'Test exit code',
|
|
333
|
+
thresholdPct: 0.05,
|
|
334
|
+
dataSpecs: [
|
|
335
|
+
{
|
|
336
|
+
id: 'source-a',
|
|
337
|
+
label: 'Source A',
|
|
338
|
+
sourceKind: 'metric',
|
|
339
|
+
originDocIds: ['doc-a'],
|
|
340
|
+
metrics: [{ key: 'value', value: 100 }]
|
|
341
|
+
},
|
|
342
|
+
{
|
|
343
|
+
id: 'source-b',
|
|
344
|
+
label: 'Source B',
|
|
345
|
+
sourceKind: 'metric',
|
|
346
|
+
originDocIds: ['doc-b'],
|
|
347
|
+
metrics: [{ key: 'value', value: 102 }]
|
|
348
|
+
}
|
|
349
|
+
]
|
|
350
|
+
};
|
|
351
|
+
|
|
352
|
+
const tempFile = path.join(__dirname, 'temp-exit-success.json');
|
|
353
|
+
fs.writeFileSync(tempFile, JSON.stringify(bundle));
|
|
354
|
+
|
|
355
|
+
try {
|
|
356
|
+
const result = execSync(`node ${CLI_PATH} run ${tempFile} --quiet; echo $?`, {
|
|
357
|
+
encoding: 'utf-8',
|
|
358
|
+
shell: '/bin/bash'
|
|
359
|
+
});
|
|
360
|
+
|
|
361
|
+
// Last line should be exit code 0
|
|
362
|
+
expect(result.trim().endsWith('0')).toBe(true);
|
|
363
|
+
} finally {
|
|
364
|
+
fs.unlinkSync(tempFile);
|
|
365
|
+
}
|
|
366
|
+
});
|
|
367
|
+
|
|
368
|
+
test('should return 1 for invalid bundle', () => {
|
|
369
|
+
const invalidBundle = { bundleId: 'test' }; // Missing required fields
|
|
370
|
+
|
|
371
|
+
const tempFile = path.join(__dirname, 'temp-exit-invalid.json');
|
|
372
|
+
fs.writeFileSync(tempFile, JSON.stringify(invalidBundle));
|
|
373
|
+
|
|
374
|
+
try {
|
|
375
|
+
execSync(`node ${CLI_PATH} validate ${tempFile}`, { encoding: 'utf-8' });
|
|
376
|
+
fail('Should have exited with code 1');
|
|
377
|
+
} catch (error: any) {
|
|
378
|
+
expect(error.status).toBe(1);
|
|
379
|
+
} finally {
|
|
380
|
+
fs.unlinkSync(tempFile);
|
|
381
|
+
}
|
|
382
|
+
});
|
|
383
|
+
|
|
384
|
+
test('should return 3 for file not found', () => {
|
|
385
|
+
try {
|
|
386
|
+
execSync(`node ${CLI_PATH} run nonexistent-file.json`, { encoding: 'utf-8' });
|
|
387
|
+
fail('Should have exited with code 3');
|
|
388
|
+
} catch (error: any) {
|
|
389
|
+
expect(error.status).toBe(3);
|
|
390
|
+
}
|
|
391
|
+
});
|
|
392
|
+
});
|
|
393
|
+
});
|
package/tsconfig.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
{
|
|
2
|
+
"compilerOptions": {
|
|
3
|
+
"target": "ES2022",
|
|
4
|
+
"module": "commonjs",
|
|
5
|
+
"lib": ["ES2022"],
|
|
6
|
+
"outDir": "./dist",
|
|
7
|
+
"rootDir": "./src",
|
|
8
|
+
"declaration": true,
|
|
9
|
+
"declarationMap": true,
|
|
10
|
+
"sourceMap": true,
|
|
11
|
+
"strict": true,
|
|
12
|
+
"esModuleInterop": true,
|
|
13
|
+
"skipLibCheck": true,
|
|
14
|
+
"forceConsistentCasingInFileNames": true,
|
|
15
|
+
"moduleResolution": "node",
|
|
16
|
+
"resolveJsonModule": true,
|
|
17
|
+
"noUnusedLocals": true,
|
|
18
|
+
"noUnusedParameters": true,
|
|
19
|
+
"noImplicitReturns": true,
|
|
20
|
+
"noFallthroughCasesInSwitch": true
|
|
21
|
+
},
|
|
22
|
+
"include": ["src/**/*"],
|
|
23
|
+
"exclude": ["node_modules", "dist", "tests", "**/*.test.ts"]
|
|
24
|
+
}
|