@jamesaphoenix/tx-test-utils 0.4.2 → 0.4.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/README.md +480 -0
  2. package/dist/adapters/better-sqlite3-adapter.d.ts +36 -0
  3. package/dist/adapters/better-sqlite3-adapter.d.ts.map +1 -0
  4. package/dist/adapters/better-sqlite3-adapter.js +78 -0
  5. package/dist/adapters/better-sqlite3-adapter.js.map +1 -0
  6. package/dist/chaos/chaos-utilities.d.ts +465 -0
  7. package/dist/chaos/chaos-utilities.d.ts.map +1 -0
  8. package/dist/chaos/chaos-utilities.js +793 -0
  9. package/dist/chaos/chaos-utilities.js.map +1 -0
  10. package/dist/chaos/chaos.test.d.ts +9 -0
  11. package/dist/chaos/chaos.test.d.ts.map +1 -0
  12. package/dist/chaos/chaos.test.js +498 -0
  13. package/dist/chaos/chaos.test.js.map +1 -0
  14. package/dist/chaos/index.d.ts +20 -0
  15. package/dist/chaos/index.d.ts.map +1 -0
  16. package/dist/chaos/index.js +39 -0
  17. package/dist/chaos/index.js.map +1 -0
  18. package/dist/database/index.d.ts +1 -1
  19. package/dist/database/index.d.ts.map +1 -1
  20. package/dist/database/index.js +1 -1
  21. package/dist/database/index.js.map +1 -1
  22. package/dist/database/test-database.d.ts +25 -5
  23. package/dist/database/test-database.d.ts.map +1 -1
  24. package/dist/database/test-database.js +142 -11
  25. package/dist/database/test-database.js.map +1 -1
  26. package/dist/factories/index.d.ts +0 -3
  27. package/dist/factories/index.d.ts.map +1 -1
  28. package/dist/factories/index.js +0 -6
  29. package/dist/factories/index.js.map +1 -1
  30. package/dist/factories/learning.factory.js +1 -1
  31. package/dist/factories/learning.factory.js.map +1 -1
  32. package/dist/helpers/index.d.ts +2 -0
  33. package/dist/helpers/index.d.ts.map +1 -1
  34. package/dist/helpers/index.js +4 -0
  35. package/dist/helpers/index.js.map +1 -1
  36. package/dist/helpers/shared-test-layer.d.ts +74 -0
  37. package/dist/helpers/shared-test-layer.d.ts.map +1 -0
  38. package/dist/helpers/shared-test-layer.js +104 -0
  39. package/dist/helpers/shared-test-layer.js.map +1 -0
  40. package/dist/helpers/sqlite-factory.d.ts +49 -0
  41. package/dist/helpers/sqlite-factory.d.ts.map +1 -0
  42. package/dist/helpers/sqlite-factory.js +74 -0
  43. package/dist/helpers/sqlite-factory.js.map +1 -0
  44. package/dist/index.d.ts +6 -2
  45. package/dist/index.d.ts.map +1 -1
  46. package/dist/index.js +30 -8
  47. package/dist/index.js.map +1 -1
  48. package/dist/singleton.d.ts +49 -0
  49. package/dist/singleton.d.ts.map +1 -0
  50. package/dist/singleton.js +66 -0
  51. package/dist/singleton.js.map +1 -0
  52. package/package.json +11 -10
  53. package/dist/factories/factories.test.d.ts +0 -8
  54. package/dist/factories/factories.test.d.ts.map +0 -1
  55. package/dist/factories/factories.test.js +0 -419
  56. package/dist/factories/factories.test.js.map +0 -1
@@ -0,0 +1,793 @@
1
+ /**
2
+ * Chaos Engineering Utilities Implementation
3
+ *
4
+ * Provides controlled failure injection for testing tx resilience.
5
+ *
6
+ * @module @tx/test-utils/chaos/chaos-utilities
7
+ */
8
+ import { fixtureId } from "../fixtures/index.js";
9
+ /**
10
+ * Simulate process death mid-operation.
11
+ *
12
+ * Wraps an async operation and "crashes" after specified time.
13
+ * Useful for testing transaction rollback and partial state handling.
14
+ *
15
+ * @example
16
+ * ```typescript
17
+ * // Test that a transaction rolls back on crash
18
+ * const result = await crashAfter({ ms: 100 }, async () => {
19
+ * await startLongOperation()
20
+ * await sleep(200) // This won't complete
21
+ * })
22
+ * expect(result.completed).toBe(false)
23
+ * ```
24
+ */
25
+ export const crashAfter = async (options, operation) => {
26
+ const startTime = Date.now();
27
+ let completed = false;
28
+ let value;
29
+ let error;
30
+ const crashPromise = sleep(options.ms).then(async () => {
31
+ if (!completed) {
32
+ if (options.beforeCrash) {
33
+ await options.beforeCrash();
34
+ }
35
+ if (options.throwOnCrash) {
36
+ error = new CrashSimulationError(options.ms);
37
+ }
38
+ }
39
+ });
40
+ const operationPromise = (async () => {
41
+ try {
42
+ value = await operation();
43
+ completed = true;
44
+ }
45
+ catch (e) {
46
+ error = e instanceof Error ? e : new Error(String(e));
47
+ }
48
+ })();
49
+ await Promise.race([crashPromise, operationPromise]);
50
+ const elapsedMs = Date.now() - startTime;
51
+ if (options.throwOnCrash && error) {
52
+ throw error;
53
+ }
54
+ return {
55
+ completed,
56
+ elapsedMs,
57
+ value,
58
+ error
59
+ };
60
+ };
61
+ /**
62
+ * Custom error type for crash simulation.
63
+ */
64
+ export class CrashSimulationError extends Error {
65
+ name = "CrashSimulationError";
66
+ crashAfterMs;
67
+ constructor(ms) {
68
+ super(`Simulated crash after ${ms}ms`);
69
+ this.crashAfterMs = ms;
70
+ }
71
+ }
72
+ /**
73
+ * Controller for managing worker heartbeat state.
74
+ * Allows simulating heartbeat failure and restoration.
75
+ */
76
+ export class WorkerHeartbeatController {
77
+ workerId;
78
+ db;
79
+ originalHeartbeat = null;
80
+ killed = false;
81
+ constructor(options) {
82
+ this.workerId = options.workerId;
83
+ this.db = options.db;
84
+ }
85
+ /**
86
+ * Stop the heartbeat by setting last_heartbeat_at to a past time.
87
+ * This will trigger dead worker detection.
88
+ *
89
+ * @param minutesInPast How many minutes in the past to set the heartbeat (default: 60)
90
+ */
91
+ kill(minutesInPast = 60) {
92
+ if (this.killed)
93
+ return;
94
+ // Store original value for restore
95
+ const worker = this.db.query("SELECT last_heartbeat_at FROM workers WHERE id = ?", [this.workerId])[0];
96
+ if (worker) {
97
+ this.originalHeartbeat = new Date(worker.last_heartbeat_at);
98
+ }
99
+ // Set heartbeat to past time
100
+ const pastTime = new Date(Date.now() - minutesInPast * 60 * 1000);
101
+ this.db.run("UPDATE workers SET last_heartbeat_at = ? WHERE id = ?", [pastTime.toISOString(), this.workerId]);
102
+ this.killed = true;
103
+ }
104
+ /**
105
+ * Restore the original heartbeat timestamp.
106
+ */
107
+ restore() {
108
+ if (!this.killed || !this.originalHeartbeat)
109
+ return;
110
+ this.db.run("UPDATE workers SET last_heartbeat_at = ? WHERE id = ?", [this.originalHeartbeat.toISOString(), this.workerId]);
111
+ this.killed = false;
112
+ }
113
+ /**
114
+ * Update heartbeat to current time (simulate alive worker).
115
+ */
116
+ revive() {
117
+ this.db.run("UPDATE workers SET last_heartbeat_at = ? WHERE id = ?", [new Date().toISOString(), this.workerId]);
118
+ this.killed = false;
119
+ }
120
+ /**
121
+ * Check if heartbeat is currently killed.
122
+ */
123
+ isKilled() {
124
+ return this.killed;
125
+ }
126
+ }
127
+ /**
128
+ * Kill a worker's heartbeat to trigger dead worker detection.
129
+ *
130
+ * @example
131
+ * ```typescript
132
+ * const controller = killHeartbeat({ workerId: 'worker-123', db: testDb })
133
+ * controller.kill(30) // Set heartbeat to 30 minutes ago
134
+ *
135
+ * // Worker should now be detected as dead
136
+ * const deadWorkers = await findDeadWorkers()
137
+ * expect(deadWorkers).toContain('worker-123')
138
+ *
139
+ * controller.restore() // Restore original heartbeat
140
+ * ```
141
+ */
142
+ export const killHeartbeat = (options) => {
143
+ return new WorkerHeartbeatController(options);
144
+ };
145
+ /**
146
+ * Spawn n workers that all attempt to claim the same task.
147
+ * Tests claim atomicity and race condition handling.
148
+ *
149
+ * @example
150
+ * ```typescript
151
+ * const result = await raceWorkers({
152
+ * count: 5,
153
+ * taskId: 'tx-abc123',
154
+ * db: testDb
155
+ * })
156
+ *
157
+ * expect(result.successfulClaims).toBe(1) // Only one winner
158
+ * expect(result.losers.length).toBe(4)
159
+ * ```
160
+ */
161
+ export const raceWorkers = async (options) => {
162
+ const { count, taskId, db, delayBetweenMs = 0, leaseDurationMinutes = 30 } = options;
163
+ const workers = [];
164
+ const losers = [];
165
+ const errors = [];
166
+ let winner = null;
167
+ let successfulClaims = 0;
168
+ // Create worker IDs
169
+ for (let i = 0; i < count; i++) {
170
+ workers.push(fixtureId(`race-worker-${i}`));
171
+ }
172
+ // Register all workers first
173
+ const now = new Date();
174
+ for (const workerId of workers) {
175
+ db.run(`INSERT INTO workers (id, name, hostname, pid, status, registered_at, last_heartbeat_at, capabilities, metadata)
176
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`, [workerId, `Worker ${workerId}`, "test-host", process.pid, "idle", now.toISOString(), now.toISOString(), "[]", "{}"]);
177
+ }
178
+ // Race to claim using atomic INSERT ... WHERE NOT EXISTS
179
+ // This matches the production ClaimService.claim implementation
180
+ const claimPromises = workers.map(async (workerId, index) => {
181
+ if (delayBetweenMs > 0 && index > 0) {
182
+ await sleep(delayBetweenMs * index);
183
+ }
184
+ try {
185
+ // Use atomic INSERT ... WHERE NOT EXISTS to prevent race conditions
186
+ // This is the same pattern used in ClaimRepository.tryInsertAtomic
187
+ const leaseExpiresAt = new Date(Date.now() + leaseDurationMinutes * 60 * 1000);
188
+ const result = db.run(`INSERT INTO task_claims (task_id, worker_id, claimed_at, lease_expires_at, renewed_count, status)
189
+ SELECT ?, ?, ?, ?, ?, ?
190
+ WHERE NOT EXISTS (
191
+ SELECT 1 FROM task_claims
192
+ WHERE task_id = ? AND status = 'active'
193
+ )`, [taskId, workerId, now.toISOString(), leaseExpiresAt.toISOString(), 0, "active", taskId]);
194
+ if (result.changes > 0) {
195
+ // We won the race
196
+ if (winner === null) {
197
+ winner = workerId;
198
+ successfulClaims++;
199
+ }
200
+ else {
201
+ // This should never happen with atomic inserts
202
+ errors.push({ workerId, error: "Duplicate successful claim - atomic insert failed!" });
203
+ }
204
+ }
205
+ else {
206
+ // Another worker already claimed
207
+ losers.push(workerId);
208
+ }
209
+ }
210
+ catch (e) {
211
+ losers.push(workerId);
212
+ errors.push({ workerId, error: e instanceof Error ? e.message : String(e) });
213
+ }
214
+ });
215
+ await Promise.all(claimPromises);
216
+ return {
217
+ winner,
218
+ workers,
219
+ losers,
220
+ successfulClaims,
221
+ errors
222
+ };
223
+ };
224
+ /**
225
+ * Inject invalid data into the database for testing validation and recovery.
226
+ *
227
+ * @example
228
+ * ```typescript
229
+ * // Inject an invalid status
230
+ * const result = corruptState({
231
+ * table: 'tasks',
232
+ * type: 'invalid_status',
233
+ * db: testDb,
234
+ * rowId: 'tx-abc123'
235
+ * })
236
+ *
237
+ * // Test that validation catches it
238
+ * await expect(taskService.get('tx-abc123')).rejects.toThrow()
239
+ * ```
240
+ */
241
+ export const corruptState = (options) => {
242
+ const { table, type, db, rowId, field } = options;
243
+ let targetId = rowId ?? fixtureId(`corrupted-${table}-${type}`);
244
+ let corrupted = false;
245
+ switch (type) {
246
+ case "null_required_field":
247
+ if (table === "tasks") {
248
+ if (rowId) {
249
+ db.run(`UPDATE tasks SET title = NULL WHERE id = ?`, [rowId]);
250
+ }
251
+ else {
252
+ // Insert with null title (bypassing constraint)
253
+ db.exec(`PRAGMA foreign_keys = OFF`);
254
+ db.run(`INSERT INTO tasks (id, title, description, status, score, created_at, updated_at, metadata)
255
+ VALUES (?, NULL, '', 'backlog', 0, datetime('now'), datetime('now'), '{}')`, [targetId]);
256
+ db.exec(`PRAGMA foreign_keys = ON`);
257
+ }
258
+ corrupted = true;
259
+ }
260
+ break;
261
+ case "invalid_status":
262
+ if (table === "tasks") {
263
+ // Use PRAGMA ignore_check_constraints to bypass CHECK constraint on status
264
+ // The pragma is connection-level so it applies to parameterized queries too
265
+ db.exec(`PRAGMA foreign_keys = OFF`);
266
+ db.exec(`PRAGMA ignore_check_constraints = ON`);
267
+ if (rowId) {
268
+ // Get existing row data
269
+ const existing = db.query("SELECT * FROM tasks WHERE id = ?", [rowId])[0];
270
+ if (existing) {
271
+ db.run(`DELETE FROM tasks WHERE id = ?`, [rowId]);
272
+ db.run(`INSERT INTO tasks (id, title, description, status, score, parent_id, created_at, updated_at, completed_at, metadata)
273
+ VALUES (?, ?, ?, 'INVALID_STATUS', ?, ?, ?, datetime('now'), ?, ?)`, [rowId, existing.title, existing.description, existing.score, existing.parent_id, existing.created_at, existing.completed_at, existing.metadata]);
274
+ }
275
+ }
276
+ else {
277
+ db.run(`INSERT INTO tasks (id, title, description, status, score, created_at, updated_at, metadata)
278
+ VALUES (?, 'Corrupted Task', '', 'INVALID_STATUS', 0, datetime('now'), datetime('now'), '{}')`, [targetId]);
279
+ }
280
+ db.exec(`PRAGMA ignore_check_constraints = OFF`);
281
+ db.exec(`PRAGMA foreign_keys = ON`);
282
+ corrupted = true;
283
+ }
284
+ break;
285
+ case "invalid_json":
286
+ if (table === "tasks") {
287
+ if (rowId) {
288
+ db.run(`UPDATE tasks SET metadata = 'not valid json {' WHERE id = ?`, [rowId]);
289
+ }
290
+ else {
291
+ db.run(`INSERT INTO tasks (id, title, description, status, score, created_at, updated_at, metadata)
292
+ VALUES (?, 'Task with bad JSON', '', 'backlog', 0, datetime('now'), datetime('now'), 'not valid json {')`, [targetId]);
293
+ }
294
+ corrupted = true;
295
+ }
296
+ break;
297
+ case "truncated_string":
298
+ if (table === "tasks" && field === "title") {
299
+ const truncated = "A"; // Single character truncated title
300
+ if (rowId) {
301
+ db.run(`UPDATE tasks SET title = ? WHERE id = ?`, [truncated, rowId]);
302
+ }
303
+ corrupted = true;
304
+ }
305
+ break;
306
+ case "future_timestamp":
307
+ if (table === "tasks") {
308
+ const futureDate = new Date(Date.now() + 365 * 24 * 60 * 60 * 1000).toISOString(); // 1 year in future
309
+ if (rowId) {
310
+ db.run(`UPDATE tasks SET created_at = ? WHERE id = ?`, [futureDate, rowId]);
311
+ }
312
+ else {
313
+ db.run(`INSERT INTO tasks (id, title, description, status, score, created_at, updated_at, metadata)
314
+ VALUES (?, 'Future Task', '', 'backlog', 0, ?, ?, '{}')`, [targetId, futureDate, futureDate]);
315
+ }
316
+ corrupted = true;
317
+ }
318
+ break;
319
+ case "negative_score":
320
+ if (table === "tasks") {
321
+ if (rowId) {
322
+ db.run(`UPDATE tasks SET score = -1000 WHERE id = ?`, [rowId]);
323
+ }
324
+ else {
325
+ db.run(`INSERT INTO tasks (id, title, description, status, score, created_at, updated_at, metadata)
326
+ VALUES (?, 'Negative Score Task', '', 'backlog', -1000, datetime('now'), datetime('now'), '{}')`, [targetId]);
327
+ }
328
+ corrupted = true;
329
+ }
330
+ break;
331
+ case "orphaned_dependency":
332
+ if (table === "task_dependencies") {
333
+ const nonExistentId = fixtureId("non-existent-task");
334
+ db.exec(`PRAGMA foreign_keys = OFF`);
335
+ db.run(`INSERT INTO task_dependencies (blocker_id, blocked_id, created_at)
336
+ VALUES (?, ?, datetime('now'))`, [nonExistentId, targetId]);
337
+ db.exec(`PRAGMA foreign_keys = ON`);
338
+ corrupted = true;
339
+ }
340
+ break;
341
+ case "self_reference":
342
+ if (table === "tasks") {
343
+ if (rowId) {
344
+ db.exec(`PRAGMA foreign_keys = OFF`);
345
+ db.run(`UPDATE tasks SET parent_id = ? WHERE id = ?`, [rowId, rowId]);
346
+ db.exec(`PRAGMA foreign_keys = ON`);
347
+ }
348
+ else {
349
+ db.exec(`PRAGMA foreign_keys = OFF`);
350
+ db.run(`INSERT INTO tasks (id, title, description, status, score, parent_id, created_at, updated_at, metadata)
351
+ VALUES (?, 'Self-referencing Task', '', 'backlog', 0, ?, datetime('now'), datetime('now'), '{}')`, [targetId, targetId]);
352
+ db.exec(`PRAGMA foreign_keys = ON`);
353
+ }
354
+ corrupted = true;
355
+ }
356
+ break;
357
+ }
358
+ return { rowId: targetId, corrupted };
359
+ };
360
+ /**
361
+ * Deterministically replay a JSONL sync log against the database.
362
+ * Useful for testing sync conflict resolution and data migration.
363
+ *
364
+ * @example
365
+ * ```typescript
366
+ * const jsonl = `
367
+ * {"v":1,"op":"upsert","ts":"2024-01-01T00:00:00Z","id":"tx-abc123","data":{"title":"Task 1","status":"backlog","score":500}}
368
+ * {"v":1,"op":"upsert","ts":"2024-01-02T00:00:00Z","id":"tx-abc123","data":{"title":"Task 1 Updated","status":"active","score":600}}
369
+ * `
370
+ *
371
+ * const result = await replayJSONL({ db: testDb, content: jsonl })
372
+ * expect(result.tasksUpdated).toBe(1)
373
+ * ```
374
+ */
375
+ export const replayJSONL = (options) => {
376
+ const { db, content, clearFirst = false } = options;
377
+ const result = {
378
+ opsReplayed: 0,
379
+ tasksCreated: 0,
380
+ tasksUpdated: 0,
381
+ tasksDeleted: 0,
382
+ depsAdded: 0,
383
+ depsRemoved: 0,
384
+ errors: []
385
+ };
386
+ // Clear if requested
387
+ if (clearFirst) {
388
+ db.exec("DELETE FROM task_dependencies");
389
+ db.exec("DELETE FROM tasks");
390
+ }
391
+ // Parse operations
392
+ let ops;
393
+ if (typeof content === "string") {
394
+ const lines = content.trim().split("\n").filter(Boolean);
395
+ ops = lines.map((line, idx) => {
396
+ try {
397
+ return JSON.parse(line);
398
+ }
399
+ catch {
400
+ result.errors.push(`Line ${idx + 1}: Invalid JSON`);
401
+ return null;
402
+ }
403
+ }).filter((op) => op !== null);
404
+ }
405
+ else {
406
+ ops = [...content];
407
+ }
408
+ // Sort by timestamp for deterministic replay
409
+ ops.sort((a, b) => a.ts.localeCompare(b.ts));
410
+ // Replay each operation
411
+ for (const op of ops) {
412
+ try {
413
+ switch (op.op) {
414
+ case "upsert":
415
+ if (op.id && op.data) {
416
+ const existing = db.query("SELECT id FROM tasks WHERE id = ?", [op.id])[0];
417
+ if (existing) {
418
+ db.run(`UPDATE tasks SET title = ?, description = ?, status = ?, score = ?, parent_id = ?, updated_at = ?, metadata = ? WHERE id = ?`, [
419
+ op.data.title ?? "",
420
+ op.data.description ?? "",
421
+ op.data.status ?? "backlog",
422
+ op.data.score ?? 0,
423
+ op.data.parentId ?? null,
424
+ op.ts,
425
+ JSON.stringify(op.data.metadata ?? {}),
426
+ op.id
427
+ ]);
428
+ result.tasksUpdated++;
429
+ }
430
+ else {
431
+ db.run(`INSERT INTO tasks (id, title, description, status, score, parent_id, created_at, updated_at, metadata)
432
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`, [
433
+ op.id,
434
+ op.data.title ?? "",
435
+ op.data.description ?? "",
436
+ op.data.status ?? "backlog",
437
+ op.data.score ?? 0,
438
+ op.data.parentId ?? null,
439
+ op.ts,
440
+ op.ts,
441
+ JSON.stringify(op.data.metadata ?? {})
442
+ ]);
443
+ result.tasksCreated++;
444
+ }
445
+ }
446
+ break;
447
+ case "delete":
448
+ if (op.id) {
449
+ const deleteResult = db.run("DELETE FROM tasks WHERE id = ?", [op.id]);
450
+ if (deleteResult.changes > 0) {
451
+ result.tasksDeleted++;
452
+ }
453
+ }
454
+ break;
455
+ case "dep_add":
456
+ if (op.blockerId && op.blockedId) {
457
+ try {
458
+ db.run(`INSERT OR IGNORE INTO task_dependencies (blocker_id, blocked_id, created_at)
459
+ VALUES (?, ?, ?)`, [op.blockerId, op.blockedId, op.ts]);
460
+ result.depsAdded++;
461
+ }
462
+ catch {
463
+ // Ignore constraint violations
464
+ }
465
+ }
466
+ break;
467
+ case "dep_remove":
468
+ if (op.blockerId && op.blockedId) {
469
+ const removeResult = db.run("DELETE FROM task_dependencies WHERE blocker_id = ? AND blocked_id = ?", [op.blockerId, op.blockedId]);
470
+ if (removeResult.changes > 0) {
471
+ result.depsRemoved++;
472
+ }
473
+ }
474
+ break;
475
+ }
476
+ result.opsReplayed++;
477
+ }
478
+ catch (e) {
479
+ result.errors.push(`Op ${op.op} ${op.id ?? op.blockerId}: ${e instanceof Error ? e.message : String(e)}`);
480
+ }
481
+ }
482
+ return result;
483
+ };
484
+ /**
485
+ * Attempt to complete an already-completed task.
486
+ * Tests idempotency and double-completion handling.
487
+ *
488
+ * @example
489
+ * ```typescript
490
+ * const result = await doubleComplete({
491
+ * taskId: 'tx-abc123',
492
+ * db: testDb
493
+ * })
494
+ *
495
+ * expect(result.firstCompleted).toBe(true)
496
+ * expect(result.secondCompleted).toBe(false) // Should be idempotent
497
+ * ```
498
+ */
499
+ export const doubleComplete = (options) => {
500
+ const { taskId, db } = options;
501
+ // Get original status
502
+ const task = db.query("SELECT status FROM tasks WHERE id = ?", [taskId])[0];
503
+ if (!task) {
504
+ return {
505
+ firstCompleted: false,
506
+ secondCompleted: false,
507
+ secondError: "Task not found",
508
+ originalStatus: "unknown",
509
+ finalStatus: "unknown"
510
+ };
511
+ }
512
+ const originalStatus = task.status;
513
+ let firstCompleted = false;
514
+ let secondCompleted = false;
515
+ let secondError;
516
+ // First completion
517
+ if (task.status !== "done") {
518
+ db.run("UPDATE tasks SET status = 'done', completed_at = datetime('now'), updated_at = datetime('now') WHERE id = ?", [taskId]);
519
+ firstCompleted = true;
520
+ }
521
+ else {
522
+ firstCompleted = true; // Already done counts as completed
523
+ }
524
+ // Second completion attempt
525
+ const afterFirst = db.query("SELECT status, completed_at FROM tasks WHERE id = ?", [taskId])[0];
526
+ if (afterFirst?.status === "done") {
527
+ // Attempt to complete again (should be idempotent or rejected)
528
+ const originalCompletedAt = afterFirst.completed_at;
529
+ db.run("UPDATE tasks SET status = 'done', completed_at = datetime('now'), updated_at = datetime('now') WHERE id = ?", [taskId]);
530
+ const afterSecond = db.query("SELECT completed_at FROM tasks WHERE id = ?", [taskId])[0];
531
+ // If completed_at changed, second completion "worked" (which may be a bug depending on design)
532
+ if (afterSecond?.completed_at !== originalCompletedAt) {
533
+ secondCompleted = true;
534
+ secondError = "Warning: completed_at was updated on second completion";
535
+ }
536
+ else {
537
+ secondCompleted = false;
538
+ }
539
+ }
540
+ // Get final status
541
+ const final = db.query("SELECT status FROM tasks WHERE id = ?", [taskId])[0];
542
+ return {
543
+ firstCompleted,
544
+ secondCompleted,
545
+ secondError,
546
+ originalStatus,
547
+ finalStatus: final?.status ?? "unknown"
548
+ };
549
+ };
550
+ /**
551
+ * Simulate an interrupted database write operation.
552
+ * Tests transaction handling and partial failure recovery.
553
+ *
554
+ * @example
555
+ * ```typescript
556
+ * const result = partialWrite({
557
+ * table: 'tasks',
558
+ * db: testDb,
559
+ * rowCount: 10,
560
+ * failAtRow: 5,
561
+ * useTransaction: true
562
+ * })
563
+ *
564
+ * // With transaction, should rollback all
565
+ * expect(result.rolledBack).toBe(true)
566
+ * expect(result.rowsWritten).toBe(0)
567
+ * ```
568
+ */
569
+ export const partialWrite = (options) => {
570
+ const { table, db, rowCount, failAtRow, useTransaction = false } = options;
571
+ const result = {
572
+ rowsWritten: 0,
573
+ rowsFailed: 0,
574
+ rolledBack: false,
575
+ writtenIds: [],
576
+ error: undefined
577
+ };
578
+ const writeRow = (index) => {
579
+ const id = fixtureId(`partial-write-${table}-${index}`);
580
+ if (index === failAtRow) {
581
+ throw new Error(`Simulated failure at row ${index}`);
582
+ }
583
+ if (table === "tasks") {
584
+ db.run(`INSERT INTO tasks (id, title, description, status, score, created_at, updated_at, metadata)
585
+ VALUES (?, ?, '', 'backlog', 0, datetime('now'), datetime('now'), '{}')`, [id, `Partial Write Task ${index}`]);
586
+ }
587
+ else if (table === "learnings") {
588
+ db.run(`INSERT INTO learnings (id, content, source, confidence, category, created_at, updated_at)
589
+ VALUES (?, ?, 'test', 0.5, 'test', datetime('now'), datetime('now'))`, [id, `Learning ${index}`]);
590
+ }
591
+ return id;
592
+ };
593
+ if (useTransaction) {
594
+ try {
595
+ db.transaction(() => {
596
+ for (let i = 1; i <= rowCount; i++) {
597
+ const id = writeRow(i);
598
+ result.writtenIds.push(id);
599
+ result.rowsWritten++;
600
+ }
601
+ });
602
+ }
603
+ catch (e) {
604
+ result.error = e instanceof Error ? e.message : String(e);
605
+ result.rowsFailed = rowCount - result.rowsWritten;
606
+ result.rolledBack = true;
607
+ result.writtenIds = [];
608
+ result.rowsWritten = 0;
609
+ }
610
+ }
611
+ else {
612
+ for (let i = 1; i <= rowCount; i++) {
613
+ try {
614
+ const id = writeRow(i);
615
+ result.writtenIds.push(id);
616
+ result.rowsWritten++;
617
+ }
618
+ catch (e) {
619
+ result.error = e instanceof Error ? e.message : String(e);
620
+ result.rowsFailed = rowCount - i + 1;
621
+ break;
622
+ }
623
+ }
624
+ }
625
+ return result;
626
+ };
627
+ /**
628
+ * Perform a claim with artificial delay to test race conditions.
629
+ * Useful for testing claim conflict resolution.
630
+ *
631
+ * @example
632
+ * ```typescript
633
+ * // Start a delayed claim
634
+ * const delayedPromise = delayedClaim({
635
+ * taskId: 'tx-abc123',
636
+ * workerId: 'slow-worker',
637
+ * db: testDb,
638
+ * delayMs: 100,
639
+ * checkRace: true
640
+ * })
641
+ *
642
+ * // Another worker claims immediately
643
+ * await claimTask('tx-abc123', 'fast-worker')
644
+ *
645
+ * // Delayed claim should detect the race
646
+ * const result = await delayedPromise
647
+ * expect(result.raceDetected).toBe(true)
648
+ * expect(result.claimed).toBe(false)
649
+ * ```
650
+ */
651
+ export const delayedClaim = async (options) => {
652
+ const { taskId, workerId, db, delayMs, checkRace = true } = options;
653
+ const startTime = Date.now();
654
+ let claimExistedBefore = false;
655
+ if (checkRace) {
656
+ const existing = db.query("SELECT worker_id FROM task_claims WHERE task_id = ? AND status = 'active'", [taskId])[0];
657
+ claimExistedBefore = !!existing;
658
+ }
659
+ // Wait
660
+ await sleep(delayMs);
661
+ // Check if someone else claimed during our delay
662
+ let raceDetected = false;
663
+ let claimedBy = null;
664
+ const afterDelay = db.query("SELECT worker_id FROM task_claims WHERE task_id = ? AND status = 'active'", [taskId])[0];
665
+ if (afterDelay) {
666
+ claimedBy = afterDelay.worker_id;
667
+ if (!claimExistedBefore && afterDelay.worker_id !== workerId) {
668
+ raceDetected = true;
669
+ }
670
+ }
671
+ // Attempt to claim using atomic INSERT ... WHERE NOT EXISTS
672
+ let claimed = false;
673
+ if (!afterDelay) {
674
+ try {
675
+ const leaseExpiresAt = new Date(Date.now() + 30 * 60 * 1000);
676
+ const result = db.run(`INSERT INTO task_claims (task_id, worker_id, claimed_at, lease_expires_at, renewed_count, status)
677
+ SELECT ?, ?, datetime('now'), ?, 0, 'active'
678
+ WHERE NOT EXISTS (
679
+ SELECT 1 FROM task_claims
680
+ WHERE task_id = ? AND status = 'active'
681
+ )`, [taskId, workerId, leaseExpiresAt.toISOString(), taskId]);
682
+ if (result.changes > 0) {
683
+ claimed = true;
684
+ claimedBy = workerId;
685
+ }
686
+ else {
687
+ // Another worker claimed during delay
688
+ const finalCheck = db.query("SELECT worker_id FROM task_claims WHERE task_id = ? AND status = 'active'", [taskId])[0];
689
+ if (finalCheck) {
690
+ claimedBy = finalCheck.worker_id;
691
+ raceDetected = finalCheck.worker_id !== workerId;
692
+ }
693
+ }
694
+ }
695
+ catch {
696
+ // Claim failed
697
+ const finalCheck = db.query("SELECT worker_id FROM task_claims WHERE task_id = ? AND status = 'active'", [taskId])[0];
698
+ if (finalCheck) {
699
+ claimedBy = finalCheck.worker_id;
700
+ raceDetected = finalCheck.worker_id !== workerId;
701
+ }
702
+ }
703
+ }
704
+ return {
705
+ claimed,
706
+ raceDetected,
707
+ claimedBy,
708
+ waitedMs: Date.now() - startTime
709
+ };
710
+ };
711
+ /**
712
+ * Create a large number of tasks quickly for stress testing.
713
+ * Tests bulk operations and performance under load.
714
+ *
715
+ * @example
716
+ * ```typescript
717
+ * const result = stressLoad({
718
+ * taskCount: 10000,
719
+ * db: testDb,
720
+ * withDependencies: true,
721
+ * dependencyRatio: 0.3
722
+ * })
723
+ *
724
+ * console.log(`Created ${result.tasksCreated} tasks in ${result.elapsedMs}ms`)
725
+ * console.log(`Rate: ${result.tasksPerSecond.toFixed(0)} tasks/sec`)
726
+ * ```
727
+ */
728
+ export const stressLoad = (options) => {
729
+ const { taskCount, db, withDependencies = false, dependencyRatio = 0.2, batchSize = 1000, mixedStatuses = false } = options;
730
+ const startTime = Date.now();
731
+ const taskIds = [];
732
+ let depsCreated = 0;
733
+ const statuses = mixedStatuses
734
+ ? ["backlog", "ready", "planning", "active", "blocked", "review", "done"]
735
+ : ["backlog"];
736
+ // Create tasks in batches
737
+ for (let batch = 0; batch < Math.ceil(taskCount / batchSize); batch++) {
738
+ const batchStart = batch * batchSize;
739
+ const batchEnd = Math.min(batchStart + batchSize, taskCount);
740
+ db.transaction(() => {
741
+ for (let i = batchStart; i < batchEnd; i++) {
742
+ const id = fixtureId(`stress-task-${i}`);
743
+ const status = statuses[i % statuses.length];
744
+ const score = Math.floor(Math.random() * 1000);
745
+ db.run(`INSERT INTO tasks (id, title, description, status, score, created_at, updated_at, metadata)
746
+ VALUES (?, ?, '', ?, ?, datetime('now'), datetime('now'), '{}')`, [id, `Stress Task ${i}`, status, score]);
747
+ taskIds.push(id);
748
+ }
749
+ });
750
+ }
751
+ // Create dependencies if requested
752
+ if (withDependencies && taskIds.length > 1) {
753
+ const depCount = Math.floor(taskIds.length * dependencyRatio);
754
+ db.transaction(() => {
755
+ for (let i = 0; i < depCount; i++) {
756
+ // Pick two random different tasks
757
+ const blockerIdx = Math.floor(Math.random() * taskIds.length);
758
+ let blockedIdx = Math.floor(Math.random() * taskIds.length);
759
+ while (blockedIdx === blockerIdx) {
760
+ blockedIdx = Math.floor(Math.random() * taskIds.length);
761
+ }
762
+ try {
763
+ const insertResult = db.run(`INSERT OR IGNORE INTO task_dependencies (blocker_id, blocked_id, created_at)
764
+ VALUES (?, ?, datetime('now'))`, [taskIds[blockerIdx], taskIds[blockedIdx]]);
765
+ if (insertResult.changes > 0) {
766
+ depsCreated++;
767
+ }
768
+ }
769
+ catch {
770
+ // Ignore constraint violations (cycles, duplicates)
771
+ }
772
+ }
773
+ });
774
+ }
775
+ const elapsedMs = Date.now() - startTime;
776
+ return {
777
+ tasksCreated: taskIds.length,
778
+ depsCreated,
779
+ elapsedMs,
780
+ tasksPerSecond: taskIds.length / (elapsedMs / 1000),
781
+ taskIds
782
+ };
783
+ };
784
+ // =============================================================================
785
+ // Helper Functions
786
+ // =============================================================================
787
+ /**
788
+ * Sleep for specified milliseconds.
789
+ */
790
+ const sleep = (ms) => {
791
+ return new Promise(resolve => setTimeout(resolve, ms));
792
+ };
793
+ //# sourceMappingURL=chaos-utilities.js.map