@zoebuildsai/trace 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (130) hide show
  1. package/.gitignore +115 -0
  2. package/.trace/progress.json +22 -0
  3. package/README.md +466 -0
  4. package/RELEASE-NOTES-1.5.0.md +410 -0
  5. package/STATUS.md +245 -0
  6. package/dist/auto-commit.d.ts +66 -0
  7. package/dist/auto-commit.d.ts.map +1 -0
  8. package/dist/auto-commit.js +180 -0
  9. package/dist/auto-commit.js.map +1 -0
  10. package/dist/cli.d.ts +7 -0
  11. package/dist/cli.d.ts.map +1 -0
  12. package/dist/cli.js +246 -0
  13. package/dist/cli.js.map +1 -0
  14. package/dist/commands.d.ts +46 -0
  15. package/dist/commands.d.ts.map +1 -0
  16. package/dist/commands.js +256 -0
  17. package/dist/commands.js.map +1 -0
  18. package/dist/diff.d.ts +23 -0
  19. package/dist/diff.d.ts.map +1 -0
  20. package/dist/diff.js +106 -0
  21. package/dist/diff.js.map +1 -0
  22. package/dist/github.d.ts.map +1 -0
  23. package/dist/github.js.map +1 -0
  24. package/dist/index-cache.d.ts +35 -0
  25. package/dist/index-cache.d.ts.map +1 -0
  26. package/dist/index-cache.js +114 -0
  27. package/dist/index-cache.js.map +1 -0
  28. package/dist/index.d.ts +15 -0
  29. package/dist/index.d.ts.map +1 -0
  30. package/dist/index.js +25 -0
  31. package/dist/index.js.map +1 -0
  32. package/dist/storage.d.ts +45 -0
  33. package/dist/storage.d.ts.map +1 -0
  34. package/dist/storage.js +151 -0
  35. package/dist/storage.js.map +1 -0
  36. package/dist/sync.d.ts +60 -0
  37. package/dist/sync.js +184 -0
  38. package/dist/tags.d.ts +85 -0
  39. package/dist/tags.d.ts.map +1 -0
  40. package/dist/tags.js +219 -0
  41. package/dist/tags.js.map +1 -0
  42. package/dist/types.d.ts +102 -0
  43. package/dist/types.d.ts.map +1 -0
  44. package/dist/types.js +6 -0
  45. package/dist/types.js.map +1 -0
  46. package/docs/.nojekyll +0 -0
  47. package/docs/README.md +73 -0
  48. package/docs/_config.yml +2 -0
  49. package/docs/index.html +960 -0
  50. package/docs-website/package.json +20 -0
  51. package/jest.config.js +21 -0
  52. package/package.json +50 -0
  53. package/scripts/init.ts +290 -0
  54. package/src/agent-audit.ts +270 -0
  55. package/src/agent-checkout.ts +227 -0
  56. package/src/agent-coordination.ts +318 -0
  57. package/src/async-queue.ts +203 -0
  58. package/src/auto-branching.ts +279 -0
  59. package/src/auto-commit.ts +166 -0
  60. package/src/cherry-pick.ts +252 -0
  61. package/src/chunked-upload.ts +224 -0
  62. package/src/cli-v2.ts +335 -0
  63. package/src/cli.ts +318 -0
  64. package/src/cliff-detection.ts +232 -0
  65. package/src/commands.ts +267 -0
  66. package/src/commit-hash-system.ts +351 -0
  67. package/src/compression.ts +176 -0
  68. package/src/conflict-resolution-ui.ts +277 -0
  69. package/src/conflict-visualization.ts +238 -0
  70. package/src/diff-formatter.ts +184 -0
  71. package/src/diff.ts +124 -0
  72. package/src/distributed-coordination.ts +273 -0
  73. package/src/git-interop.ts +316 -0
  74. package/src/index-cache.ts +88 -0
  75. package/src/index.ts +38 -0
  76. package/src/merge-engine.ts +143 -0
  77. package/src/message-search.ts +370 -0
  78. package/src/performance-monitoring.ts +236 -0
  79. package/src/rebase.ts +327 -0
  80. package/src/rollback.ts +215 -0
  81. package/src/semantic-grouping.ts +245 -0
  82. package/src/stage-area.ts +324 -0
  83. package/src/stash.ts +278 -0
  84. package/src/storage.ts +131 -0
  85. package/src/sync.ts +205 -0
  86. package/src/tags.ts +244 -0
  87. package/src/types.ts +119 -0
  88. package/src/webhooks.ts +119 -0
  89. package/src/workspace-isolation.ts +298 -0
  90. package/tests/auto-commit.test.ts +308 -0
  91. package/tests/checkout.test.ts +136 -0
  92. package/tests/commit.test.ts +118 -0
  93. package/tests/diff.test.ts +191 -0
  94. package/tests/github.test.ts +94 -0
  95. package/tests/integration.test.ts +267 -0
  96. package/tests/log.test.ts +125 -0
  97. package/tests/phase2-integration.test.ts +370 -0
  98. package/tests/storage.test.ts +167 -0
  99. package/tests/tags.test.ts +477 -0
  100. package/tests/types.test.ts +75 -0
  101. package/tests/v1.1/agent-audit.test.ts +472 -0
  102. package/tests/v1.1/agent-coordination.test.ts +308 -0
  103. package/tests/v1.1/async-queue.test.ts +253 -0
  104. package/tests/v1.1/comprehensive.test.ts +521 -0
  105. package/tests/v1.1/diff-formatter.test.ts +238 -0
  106. package/tests/v1.1/integration.test.ts +389 -0
  107. package/tests/v1.1/onboarding.test.ts +365 -0
  108. package/tests/v1.1/rollback.test.ts +370 -0
  109. package/tests/v1.1/semantic-grouping.test.ts +230 -0
  110. package/tests/v1.2/chunked-upload.test.ts +301 -0
  111. package/tests/v1.2/cliff-detection.test.ts +272 -0
  112. package/tests/v1.2/commit-hash-system.test.ts +288 -0
  113. package/tests/v1.2/compression.test.ts +220 -0
  114. package/tests/v1.2/conflict-visualization.test.ts +263 -0
  115. package/tests/v1.2/distributed.test.ts +261 -0
  116. package/tests/v1.2/performance-monitoring.test.ts +328 -0
  117. package/tests/v1.3/auto-branching.test.ts +270 -0
  118. package/tests/v1.3/message-search.test.ts +264 -0
  119. package/tests/v1.3/stage-area.test.ts +330 -0
  120. package/tests/v1.3/stash-rebase-cherry-pick.test.ts +361 -0
  121. package/tests/v1.4/cli.test.ts +171 -0
  122. package/tests/v1.4/conflict-resolution-advanced.test.ts +429 -0
  123. package/tests/v1.4/conflict-resolution-ui.test.ts +286 -0
  124. package/tests/v1.4/workspace-isolation-advanced.test.ts +382 -0
  125. package/tests/v1.4/workspace-isolation.test.ts +268 -0
  126. package/tests/v1.5/agent-coordination.real.test.ts +401 -0
  127. package/tests/v1.5/cli-v2.test.ts +354 -0
  128. package/tests/v1.5/git-interop.real.test.ts +358 -0
  129. package/tests/v1.5/integration-testing.real.test.ts +440 -0
  130. package/tsconfig.json +26 -0
@@ -0,0 +1,301 @@
1
+ /**
2
+ * Chunked Upload Tests
3
+ * Large file handling with resumable transfers
4
+ */
5
+
6
+ import { ChunkedUploadEngine, ChunkedFile } from '../../src/chunked-upload';
7
+
8
+ describe('ChunkedUploadEngine', () => {
9
+ let engine: ChunkedUploadEngine;
10
+
11
+ beforeEach(() => {
12
+ engine = new ChunkedUploadEngine();
13
+ });
14
+
15
+ describe('Upload Initialization', () => {
16
+ test('initializes upload for file', () => {
17
+ const file = engine.initUpload('file-1', 'document.pdf', 50 * 1024 * 1024);
18
+ expect(file.fileId).toBe('file-1');
19
+ expect(file.fileName).toBe('document.pdf');
20
+ expect(file.totalSize).toBe(50 * 1024 * 1024);
21
+ });
22
+
23
+ test('calculates correct chunk count', () => {
24
+ const file = engine.initUpload('f1', 'test.bin', 15 * 1024 * 1024);
25
+ // Default chunk size 5MB, so 15MB = 3 chunks
26
+ expect(file.totalChunks).toBe(3);
27
+ });
28
+
29
+ test('supports custom chunk size', () => {
30
+ const file = engine.initUpload('f1', 'test.bin', 10 * 1024 * 1024, 2 * 1024 * 1024);
31
+ expect(file.chunkSize).toBe(2 * 1024 * 1024);
32
+ expect(file.totalChunks).toBe(5);
33
+ });
34
+
35
+ test('starts with uploading status', () => {
36
+ const file = engine.initUpload('f1', 'test', 1000);
37
+ expect(file.status).toBe('uploading');
38
+ });
39
+
40
+ test('initializes progress to 0', () => {
41
+ const file = engine.initUpload('f1', 'test', 1000);
42
+ expect(file.progress).toBe(0);
43
+ });
44
+ });
45
+
46
+ describe('Chunk Upload', () => {
47
+ test('uploads single chunk', () => {
48
+ engine.initUpload('f1', 'test.bin', 10 * 1024 * 1024);
49
+ const chunk = engine.uploadChunk('f1', 0, Buffer.from('chunk data'), 'hash1');
50
+ expect(chunk.index).toBe(0);
51
+ expect(chunk.size).toBe('chunk data'.length);
52
+ });
53
+
54
+ test('tracks multiple chunks', () => {
55
+ engine.initUpload('f1', 'test.bin', 15 * 1024 * 1024);
56
+ engine.uploadChunk('f1', 0, Buffer.from('chunk0'), 'h0');
57
+ engine.uploadChunk('f1', 1, Buffer.from('chunk1'), 'h1');
58
+ engine.uploadChunk('f1', 2, Buffer.from('chunk2'), 'h2');
59
+ const file = engine.getUploadStatus('f1');
60
+ expect(file?.chunks).toHaveLength(3);
61
+ });
62
+
63
+ test('updates progress as chunks upload', () => {
64
+ engine.initUpload('f1', 'test.bin', 15 * 1024 * 1024);
65
+ engine.uploadChunk('f1', 0, Buffer.from('c0'), 'h0');
66
+ let file = engine.getUploadStatus('f1');
67
+ expect(file?.progress).toBe(Math.round((1 / 3) * 100));
68
+
69
+ engine.uploadChunk('f1', 1, Buffer.from('c1'), 'h1');
70
+ file = engine.getUploadStatus('f1');
71
+ expect(file?.progress).toBe(Math.round((2 / 3) * 100));
72
+ });
73
+
74
+ test('marks complete when all chunks received', () => {
75
+ engine.initUpload('f1', 'test.bin', 10 * 1024 * 1024);
76
+ engine.uploadChunk('f1', 0, Buffer.from('c0'), 'h0');
77
+ engine.uploadChunk('f1', 1, Buffer.from('c1'), 'h1');
78
+ const file = engine.getUploadStatus('f1');
79
+ expect(file?.status).toBe('completed');
80
+ expect(file?.progress).toBe(100);
81
+ });
82
+
83
+ test('throws error for unknown upload', () => {
84
+ expect(() => {
85
+ engine.uploadChunk('unknown', 0, Buffer.from('data'), 'hash');
86
+ }).toThrow();
87
+ });
88
+ });
89
+
90
+ describe('Chunk Verification', () => {
91
+ test('verifies chunk hash', () => {
92
+ engine.initUpload('f1', 'test', 1000);
93
+ const data = Buffer.from('test data');
94
+ const chunk = engine.uploadChunk('f1', 0, data, 'hash123');
95
+ const valid = engine.verifyChunk(chunk.id, 'hash123');
96
+ expect(valid).toBe(true);
97
+ });
98
+
99
+ test('rejects mismatched hash', () => {
100
+ engine.initUpload('f1', 'test', 1000);
101
+ const chunk = engine.uploadChunk('f1', 0, Buffer.from('test'), 'hash1');
102
+ const valid = engine.verifyChunk(chunk.id, 'wrong-hash');
103
+ expect(valid).toBe(false);
104
+ });
105
+
106
+ test('returns false for missing chunk', () => {
107
+ const valid = engine.verifyChunk('unknown', 'hash');
108
+ expect(valid).toBe(false);
109
+ });
110
+ });
111
+
112
+ describe('File Reconstruction', () => {
113
+ test('reconstructs complete file', () => {
114
+ engine.initUpload('f1', 'test.bin', 100);
115
+ engine.uploadChunk('f1', 0, Buffer.from('part1'), 'h1');
116
+ engine.uploadChunk('f1', 1, Buffer.from('part2'), 'h2');
117
+
118
+ const reconstructed = engine.reconstructFile('f1');
119
+ expect(reconstructed.toString()).toBe('part1part2');
120
+ });
121
+
122
+ test('maintains chunk order', () => {
123
+ engine.initUpload('f1', 'test', 100);
124
+ engine.uploadChunk('f1', 2, Buffer.from('3'), 'h3');
125
+ engine.uploadChunk('f1', 0, Buffer.from('1'), 'h1');
126
+ engine.uploadChunk('f1', 1, Buffer.from('2'), 'h2');
127
+
128
+ const file = engine.reconstructFile('f1');
129
+ expect(file.toString()).toBe('123');
130
+ });
131
+
132
+ test('throws error if incomplete', () => {
133
+ engine.initUpload('f1', 'test.bin', 100);
134
+ engine.uploadChunk('f1', 0, Buffer.from('part1'), 'h1');
135
+
136
+ expect(() => {
137
+ engine.reconstructFile('f1');
138
+ }).toThrow();
139
+ });
140
+ });
141
+
142
+ describe('Resume Upload', () => {
143
+ test('identifies missing chunks', () => {
144
+ engine.initUpload('f1', 'test.bin', 20 * 1024 * 1024);
145
+ engine.uploadChunk('f1', 0, Buffer.from('c0'), 'h0');
146
+ engine.uploadChunk('f1', 2, Buffer.from('c2'), 'h2');
147
+
148
+ const missing = engine.getMissingChunks('f1');
149
+ expect(missing).toContain(1);
150
+ expect(missing).toContain(3);
151
+ expect(missing).not.toContain(0);
152
+ expect(missing).not.toContain(2);
153
+ });
154
+
155
+ test('returns empty for complete upload', () => {
156
+ engine.initUpload('f1', 'test.bin', 10 * 1024 * 1024);
157
+ engine.uploadChunk('f1', 0, Buffer.from('c0'), 'h0');
158
+ engine.uploadChunk('f1', 1, Buffer.from('c1'), 'h1');
159
+
160
+ const missing = engine.getMissingChunks('f1');
161
+ expect(missing).toHaveLength(0);
162
+ });
163
+ });
164
+
165
+ describe('Cancel Upload', () => {
166
+ test('cancels active upload', () => {
167
+ engine.initUpload('f1', 'test.bin', 100);
168
+ const result = engine.cancelUpload('f1');
169
+ expect(result).toBe(true);
170
+ expect(engine.getUploadStatus('f1')).toBeUndefined();
171
+ });
172
+
173
+ test('returns false for unknown upload', () => {
174
+ const result = engine.cancelUpload('unknown');
175
+ expect(result).toBe(false);
176
+ });
177
+
178
+ test('cleans up chunks on cancel', () => {
179
+ engine.initUpload('f1', 'test.bin', 100);
180
+ engine.uploadChunk('f1', 0, Buffer.from('data'), 'hash');
181
+ engine.cancelUpload('f1');
182
+
183
+ expect(() => {
184
+ engine.reconstructFile('f1');
185
+ }).toThrow();
186
+ });
187
+ });
188
+
189
+ describe('Progress Tracking', () => {
190
+ test('reports upload progress', () => {
191
+ engine.initUpload('f1', 'test.bin', 20 * 1024 * 1024);
192
+ engine.uploadChunk('f1', 0, Buffer.from('c0'), 'h0');
193
+
194
+ const progress = engine.getProgress('f1');
195
+ expect(progress).toBeGreaterThan(0);
196
+ expect(progress).toBeLessThan(100);
197
+ });
198
+
199
+ test('returns 0 for unknown upload', () => {
200
+ const progress = engine.getProgress('unknown');
201
+ expect(progress).toBe(0);
202
+ });
203
+ });
204
+
205
+ describe('Time Estimation', () => {
206
+ test('estimates upload time', () => {
207
+ const estimate = engine.estimateUploadTime(50 * 1024 * 1024, 5);
208
+ expect(estimate.chunks).toBe(10); // 50MB / 5MB chunks
209
+ expect(estimate.timeSeconds).toBeGreaterThan(0);
210
+ });
211
+
212
+ test('accounts for network speed', () => {
213
+ const fast = engine.estimateUploadTime(100 * 1024 * 1024, 50);
214
+ const slow = engine.estimateUploadTime(100 * 1024 * 1024, 5);
215
+ expect(slow.timeSeconds).toBeGreaterThan(fast.timeSeconds);
216
+ });
217
+ });
218
+
219
+ describe('Active Uploads', () => {
220
+ test('lists active uploads', () => {
221
+ engine.initUpload('f1', 'test1.bin', 100);
222
+ engine.initUpload('f2', 'test2.bin', 100);
223
+
224
+ const active = engine.getActiveUploads();
225
+ expect(active).toHaveLength(2);
226
+ });
227
+
228
+ test('excludes completed uploads', () => {
229
+ engine.initUpload('f1', 'test1.bin', 10 * 1024 * 1024);
230
+ engine.uploadChunk('f1', 0, Buffer.from('c0'), 'h0');
231
+ engine.uploadChunk('f1', 1, Buffer.from('c1'), 'h1');
232
+
233
+ const active = engine.getActiveUploads();
234
+ expect(active).toHaveLength(0);
235
+ });
236
+ });
237
+
238
+ describe('Cleanup', () => {
239
+ test('cleans up specific upload', () => {
240
+ engine.initUpload('f1', 'test1', 100);
241
+ engine.uploadChunk('f1', 0, Buffer.from('c0'), 'h0');
242
+
243
+ engine.cleanup('f1');
244
+ expect(engine.getUploadStatus('f1')).toBeUndefined();
245
+ });
246
+
247
+ test('cleans up all completed uploads', () => {
248
+ engine.initUpload('f1', 'test1.bin', 10 * 1024 * 1024);
249
+ engine.uploadChunk('f1', 0, Buffer.from('c0'), 'h0');
250
+ engine.uploadChunk('f1', 1, Buffer.from('c1'), 'h1');
251
+
252
+ engine.cleanup();
253
+ expect(engine.getActiveUploads()).toHaveLength(0);
254
+ });
255
+ });
256
+
257
+ describe('Performance', () => {
258
+ test('handles 100 concurrent uploads', () => {
259
+ const start = Date.now();
260
+ for (let i = 0; i < 100; i++) {
261
+ engine.initUpload(`f${i}`, `file${i}.bin`, 10 * 1024 * 1024);
262
+ }
263
+ const elapsed = Date.now() - start;
264
+ expect(elapsed).toBeLessThan(100);
265
+ });
266
+
267
+ test('uploads 1000 chunks quickly', () => {
268
+ engine.initUpload('f1', 'test.bin', 5 * 1024 * 1024 * 1000);
269
+ const start = Date.now();
270
+ for (let i = 0; i < 1000; i++) {
271
+ engine.uploadChunk('f1', i, Buffer.from(`c${i}`), `h${i}`);
272
+ }
273
+ const elapsed = Date.now() - start;
274
+ expect(elapsed).toBeLessThan(500);
275
+ });
276
+ });
277
+
278
+ describe('Large File Scenarios', () => {
279
+ test('handles 100MB file', () => {
280
+ const file = engine.initUpload('f1', 'large.bin', 100 * 1024 * 1024);
281
+ expect(file.totalChunks).toBe(20); // 100MB / 5MB
282
+ });
283
+
284
+ test('handles 1GB file', () => {
285
+ const file = engine.initUpload('f1', 'huge.bin', 1024 * 1024 * 1024);
286
+ expect(file.totalChunks).toBe(205); // ~1GB / 5MB
287
+ });
288
+
289
+ test('resumes partial 100MB upload', () => {
290
+ engine.initUpload('f1', 'large.bin', 100 * 1024 * 1024);
291
+
292
+ // Upload first 10 chunks
293
+ for (let i = 0; i < 10; i++) {
294
+ engine.uploadChunk('f1', i, Buffer.from(`chunk${i}`), `hash${i}`);
295
+ }
296
+
297
+ const missing = engine.getMissingChunks('f1');
298
+ expect(missing).toHaveLength(10); // 20 total - 10 uploaded
299
+ });
300
+ });
301
+ });
@@ -0,0 +1,272 @@
1
+ /**
2
+ * Cliff Detection Tests
3
+ * Breaking change detection before checkout/merge
4
+ */
5
+
6
+ import { CliffDetection, CliffAnalysis } from '../../src/cliff-detection';
7
+
8
+ describe('CliffDetection', () => {
9
+ describe('Basic Cliff Analysis', () => {
10
+ test('detects file removal', () => {
11
+ const old = new Map([['src/core.ts', 'export class Core {}']]);
12
+ const newFiles = new Map();
13
+
14
+ const analysis = CliffDetection.analyzeCliff(old, newFiles);
15
+ expect(analysis.hasCliffs).toBe(true);
16
+ expect(analysis.warnings.some(w => w.type === 'breaking-change')).toBe(true);
17
+ });
18
+
19
+ test('detects multiple file removals', () => {
20
+ const old = new Map([
21
+ ['src/core.ts', 'content'],
22
+ ['src/api.ts', 'content'],
23
+ ['src/utils.ts', 'content'],
24
+ ]);
25
+ const newFiles = new Map();
26
+
27
+ const analysis = CliffDetection.analyzeCliff(old, newFiles);
28
+ expect(analysis.warnings.filter(w => w.type === 'breaking-change')).toHaveLength(3);
29
+ });
30
+
31
+ test('ignores new files', () => {
32
+ const old = new Map();
33
+ const newFiles = new Map([
34
+ ['src/new.ts', 'export class New {}'],
35
+ ['src/another.ts', 'export class Another {}'],
36
+ ]);
37
+
38
+ const analysis = CliffDetection.analyzeCliff(old, newFiles);
39
+ expect(analysis.warnings.length).toBe(0);
40
+ });
41
+
42
+ test('calculates risk score', () => {
43
+ const old = new Map([['src/core.ts', 'content']]);
44
+ const newFiles = new Map();
45
+
46
+ const analysis = CliffDetection.analyzeCliff(old, newFiles);
47
+ expect(analysis.riskScore).toBeGreaterThan(0);
48
+ expect(analysis.riskScore).toBeLessThanOrEqual(100);
49
+ });
50
+ });
51
+
52
+ describe('API Removal Detection', () => {
53
+ test('detects removed export', () => {
54
+ const old = new Map([
55
+ ['src/api.ts', 'export class UserAPI {}'],
56
+ ]);
57
+ const newFiles = new Map([
58
+ ['src/api.ts', '// API removed'],
59
+ ]);
60
+
61
+ const analysis = CliffDetection.analyzeCliff(old, newFiles);
62
+ expect(analysis.warnings.length).toBeGreaterThan(0);
63
+ });
64
+
65
+ test('detects removed class definition', () => {
66
+ const old = new Map([
67
+ ['src/models.ts', 'export class User {}'],
68
+ ]);
69
+ const newFiles = new Map([
70
+ ['src/models.ts', '// Use new UserV2'],
71
+ ]);
72
+
73
+ const analysis = CliffDetection.analyzeCliff(old, newFiles);
74
+ expect(analysis.hasCliffs).toBe(true);
75
+ });
76
+ });
77
+
78
+ describe('Security Issue Detection', () => {
79
+ test('detects security TODOs', () => {
80
+ const old = new Map();
81
+ const newFiles = new Map([
82
+ ['src/auth.ts', 'TODO: Fix security vulnerability'],
83
+ ]);
84
+
85
+ const analysis = CliffDetection.analyzeCliff(old, newFiles);
86
+ expect(analysis.warnings.some(w => w.type === 'security-issue')).toBe(true);
87
+ expect(analysis.warnings.some(w => w.severity === 'critical')).toBe(true);
88
+ });
89
+
90
+ test('detects auth issues', () => {
91
+ const old = new Map();
92
+ const newFiles = new Map([
93
+ ['src/auth.ts', 'FIXME: Auth bypass vulnerability found'],
94
+ ]);
95
+
96
+ const analysis = CliffDetection.analyzeCliff(old, newFiles);
97
+ expect(analysis.warnings.some(w => w.type === 'security-issue')).toBe(true);
98
+ });
99
+
100
+ test('detects vulnerability mentions', () => {
101
+ const old = new Map();
102
+ const newFiles = new Map([
103
+ ['src/crypto.ts', 'this is vulnerable to timing attacks'],
104
+ ]);
105
+
106
+ const analysis = CliffDetection.analyzeCliff(old, newFiles);
107
+ expect(analysis.warnings.some(w => w.type === 'security-issue')).toBe(true);
108
+ });
109
+ });
110
+
111
+ describe('Performance Regression Detection', () => {
112
+ test('detects increased loops', () => {
113
+ const old = new Map([
114
+ ['src/process.ts', 'for (let i = 0; i < 10; i++) {}'],
115
+ ]);
116
+ const newFiles = new Map([
117
+ ['src/process.ts', 'for (let i = 0; i < 10; i++) { for (let j = 0; j < 10; j++) { for (let k = 0; k < 10; k++) {} } }'],
118
+ ]);
119
+
120
+ const analysis = CliffDetection.analyzeCliff(old, newFiles);
121
+ expect(analysis.warnings.some(w => w.type === 'performance-regression')).toBe(true);
122
+ });
123
+
124
+ test('detects sync operations', () => {
125
+ const old = new Map([
126
+ ['src/io.ts', 'await readFile(path)'],
127
+ ]);
128
+ const newFiles = new Map([
129
+ ['src/io.ts', 'readFileSync(path)'],
130
+ ]);
131
+
132
+ const analysis = CliffDetection.analyzeCliff(old, newFiles);
133
+ expect(analysis.warnings.some(w => w.type === 'performance-regression')).toBe(true);
134
+ });
135
+ });
136
+
137
+ describe('Risk Level Classification', () => {
138
+ test('classifies safe risk level', () => {
139
+ const level = CliffDetection.getRiskLevel(10);
140
+ expect(level).toBe('safe');
141
+ });
142
+
143
+ test('classifies caution risk level', () => {
144
+ const level = CliffDetection.getRiskLevel(35);
145
+ expect(level).toBe('caution');
146
+ });
147
+
148
+ test('classifies warning risk level', () => {
149
+ const level = CliffDetection.getRiskLevel(65);
150
+ expect(level).toBe('warning');
151
+ });
152
+
153
+ test('classifies dangerous risk level', () => {
154
+ const level = CliffDetection.getRiskLevel(85);
155
+ expect(level).toBe('dangerous');
156
+ });
157
+ });
158
+
159
+ describe('Pre-Checkout Warnings', () => {
160
+ test('warns before unsafe checkout', () => {
161
+ const current = new Map([
162
+ ['src/core.ts', 'current content'],
163
+ ]);
164
+ const target = new Map(); // File removed
165
+
166
+ const analysis = CliffDetection.checkBeforeCheckout(current, target);
167
+ expect(analysis.safeToUpdate).toBe(true); // No critical issues
168
+ expect(analysis.hasCliffs).toBe(true);
169
+ });
170
+
171
+ test('allows safe checkout', () => {
172
+ const current = new Map([
173
+ ['src/core.ts', 'v1 content'],
174
+ ]);
175
+ const target = new Map([
176
+ ['src/core.ts', 'v1.1 content (minor update)'],
177
+ ]);
178
+
179
+ const analysis = CliffDetection.checkBeforeCheckout(current, target);
180
+ expect(analysis.hasCliffs).toBe(false);
181
+ });
182
+ });
183
+
184
+ describe('Pre-Merge Warnings', () => {
185
+ test('warns before unsafe merge', () => {
186
+ const base = new Map([
187
+ ['src/core.ts', 'original'],
188
+ ]);
189
+ const merge = new Map(); // File removed
190
+
191
+ const analysis = CliffDetection.checkBeforeMerge(base, merge);
192
+ expect(analysis.hasCliffs).toBe(true);
193
+ });
194
+ });
195
+
196
+ describe('Report Formatting', () => {
197
+ test('formats clean report', () => {
198
+ const report = {
199
+ timestamp: Date.now(),
200
+ totalOperations: 100,
201
+ averageLatency: 45,
202
+ bottlenecks: [],
203
+ recommendations: [],
204
+ };
205
+
206
+ const formatted = CliffDetection.formatWarnings({
207
+ hasCliffs: false,
208
+ warnings: [],
209
+ safeToUpdate: true,
210
+ riskScore: 0,
211
+ });
212
+
213
+ expect(formatted).toContain('No breaking changes');
214
+ expect(formatted).toContain('Safe to update');
215
+ });
216
+
217
+ test('formats risk warning', () => {
218
+ const formatted = CliffDetection.formatWarnings({
219
+ hasCliffs: true,
220
+ warnings: [
221
+ {
222
+ type: 'breaking-change',
223
+ severity: 'high',
224
+ description: 'Major API change',
225
+ affectedFiles: ['src/api.ts'],
226
+ suggestion: 'Update consumers of this API',
227
+ },
228
+ ],
229
+ safeToUpdate: false,
230
+ riskScore: 75,
231
+ });
232
+
233
+ expect(formatted).toContain('HIGH');
234
+ expect(formatted).toContain('breaking-change');
235
+ expect(formatted).toContain('Major API change');
236
+ });
237
+ });
238
+
239
+ describe('Complex Scenarios', () => {
240
+ test('handles multiple simultaneous issues', () => {
241
+ const old = new Map([
242
+ ['src/api.ts', 'export class API { userGet() {} }'],
243
+ ['src/auth.ts', 'export const authenticate = () => {}'],
244
+ ['src/utils.ts', 'export const helper = () => {}'],
245
+ ]);
246
+
247
+ const newFiles = new Map([
248
+ ['src/api.ts', 'TODO: Fix security vulnerability'],
249
+ ['src/auth.ts', '// Removed (use OAuth instead)'],
250
+ ['src/utils.ts', 'FIXME: Auth bypass'],
251
+ ]);
252
+
253
+ const analysis = CliffDetection.analyzeCliff(old, newFiles);
254
+ expect(analysis.warnings.length).toBeGreaterThan(1);
255
+ expect(analysis.riskScore).toBeGreaterThan(50);
256
+ });
257
+
258
+ test('prioritizes critical issues', () => {
259
+ const old = new Map([
260
+ ['src/core.ts', 'original'],
261
+ ]);
262
+
263
+ const newFiles = new Map([
264
+ ['src/core.ts', 'TODO: Security TODO, performance issue, and removed API'],
265
+ ]);
266
+
267
+ const analysis = CliffDetection.analyzeCliff(old, newFiles);
268
+ const criticalWarnings = analysis.warnings.filter(w => w.severity === 'critical');
269
+ expect(criticalWarnings.length).toBeGreaterThan(0);
270
+ });
271
+ });
272
+ });