@dotdo/postgres 0.1.0 → 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/dist/backup/backup-manager.d.ts +244 -0
  2. package/dist/backup/backup-manager.d.ts.map +1 -0
  3. package/dist/backup/backup-manager.js +726 -0
  4. package/dist/backup/backup-manager.js.map +1 -0
  5. package/dist/observability/production-metrics.d.ts +318 -0
  6. package/dist/observability/production-metrics.d.ts.map +1 -0
  7. package/dist/observability/production-metrics.js +747 -0
  8. package/dist/observability/production-metrics.js.map +1 -0
  9. package/dist/pglite-assets/pglite.data +0 -0
  10. package/dist/pglite-assets/pglite.wasm +0 -0
  11. package/dist/pitr/pitr-manager.d.ts +240 -0
  12. package/dist/pitr/pitr-manager.d.ts.map +1 -0
  13. package/dist/pitr/pitr-manager.js +837 -0
  14. package/dist/pitr/pitr-manager.js.map +1 -0
  15. package/dist/streaming/cdc-iceberg-connector.d.ts +1 -1
  16. package/dist/streaming/cdc-iceberg-connector.js +1 -1
  17. package/dist/streaming/live-cdc-stream.d.ts +1 -1
  18. package/dist/streaming/live-cdc-stream.js +1 -1
  19. package/dist/worker/auth.d.ts.map +1 -1
  20. package/dist/worker/auth.js +16 -6
  21. package/dist/worker/auth.js.map +1 -1
  22. package/dist/worker/entry.d.ts.map +1 -1
  23. package/dist/worker/entry.js +108 -26
  24. package/dist/worker/entry.js.map +1 -1
  25. package/package.json +7 -6
  26. package/src/__tests__/backup.test.ts +944 -0
  27. package/src/__tests__/observability.test.ts +1089 -0
  28. package/src/__tests__/pitr.test.ts +1240 -0
  29. package/src/backup/backup-manager.ts +1006 -0
  30. package/src/observability/production-metrics.ts +1054 -0
  31. package/src/pglite-assets/pglite.data +0 -0
  32. package/src/pglite-assets/pglite.wasm +0 -0
  33. package/src/pitr/pitr-manager.ts +1136 -0
  34. package/src/worker/auth.ts +17 -6
  35. package/src/worker/entry.ts +112 -30
@@ -0,0 +1,944 @@
1
+ /**
2
+ * Tests for Automated Backup to R2
3
+ * Task: postgres-7yr6.1 - Production readiness: Automated backup to R2
4
+ *
5
+ * RED phase TDD - These tests define the expected API surface for:
6
+ * - Scheduled backup creation
7
+ * - Incremental backup support
8
+ * - Backup manifest management
9
+ * - Restore from backup
10
+ * - Backup lifecycle (retention, pruning)
11
+ * - Error handling and edge cases
12
+ */
13
+
14
+ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'
15
+
16
+ // These imports define the expected API surface - they will fail until implemented
17
+ import {
18
+ BackupManager,
19
+ createBackupManager,
20
+ type BackupConfig,
21
+ type BackupManifest,
22
+ type BackupEntry,
23
+ type BackupSchedule,
24
+ type BackupResult,
25
+ type RestoreResult,
26
+ type BackupStats,
27
+ type IncrementalBackupState,
28
+ type BackupRetentionPolicy,
29
+ } from '../backup/backup-manager'
30
+
31
+ // =============================================================================
32
+ // Mock Setup
33
+ // =============================================================================
34
+
35
+ const createMockR2Bucket = () => ({
36
+ get: vi.fn(),
37
+ put: vi.fn().mockResolvedValue(undefined),
38
+ delete: vi.fn().mockResolvedValue(undefined),
39
+ list: vi.fn().mockResolvedValue({ objects: [], truncated: false }),
40
+ head: vi.fn(),
41
+ })
42
+
43
+ const createMockDOStorage = () => ({
44
+ get: vi.fn(),
45
+ put: vi.fn().mockResolvedValue(undefined),
46
+ delete: vi.fn().mockResolvedValue(undefined),
47
+ list: vi.fn().mockResolvedValue(new Map()),
48
+ })
49
+
50
+ const createMockPGLite = () => ({
51
+ query: vi.fn().mockResolvedValue({ rows: [] }),
52
+ exec: vi.fn().mockResolvedValue(undefined),
53
+ close: vi.fn().mockResolvedValue(undefined),
54
+ })
55
+
56
+ // =============================================================================
57
+ // Tests: BackupManager Creation
58
+ // =============================================================================
59
+
60
+ describe('BackupManager', () => {
61
+ let mockBucket: ReturnType<typeof createMockR2Bucket>
62
+ let mockStorage: ReturnType<typeof createMockDOStorage>
63
+ let mockPGLite: ReturnType<typeof createMockPGLite>
64
+
65
+ beforeEach(() => {
66
+ vi.useFakeTimers()
67
+ mockBucket = createMockR2Bucket()
68
+ mockStorage = createMockDOStorage()
69
+ mockPGLite = createMockPGLite()
70
+ })
71
+
72
+ afterEach(() => {
73
+ vi.useRealTimers()
74
+ vi.clearAllMocks()
75
+ })
76
+
77
+ describe('createBackupManager()', () => {
78
+ it('should create a BackupManager instance with minimal config', () => {
79
+ const manager = createBackupManager({
80
+ bucket: mockBucket as unknown as R2Bucket,
81
+ doId: 'test-do-123',
82
+ prefix: 'backups/',
83
+ })
84
+ expect(manager).toBeInstanceOf(BackupManager)
85
+ })
86
+
87
+ it('should create a BackupManager with full configuration', () => {
88
+ const config: BackupConfig = {
89
+ bucket: mockBucket as unknown as R2Bucket,
90
+ doId: 'test-do-123',
91
+ prefix: 'backups/',
92
+ schedule: {
93
+ intervalMs: 3600000, // 1 hour
94
+ type: 'incremental',
95
+ fullBackupIntervalMs: 86400000, // 24 hours
96
+ },
97
+ retention: {
98
+ maxBackups: 30,
99
+ maxAgeDays: 7,
100
+ keepMinFullBackups: 3,
101
+ },
102
+ compression: true,
103
+ checksumAlgorithm: 'sha-256',
104
+ maxConcurrentUploads: 3,
105
+ }
106
+ const manager = createBackupManager(config)
107
+ expect(manager).toBeInstanceOf(BackupManager)
108
+ })
109
+
110
+ it('should throw if bucket is not provided', () => {
111
+ expect(() =>
112
+ createBackupManager({
113
+ bucket: undefined as unknown as R2Bucket,
114
+ doId: 'test-do-123',
115
+ prefix: 'backups/',
116
+ })
117
+ ).toThrow()
118
+ })
119
+
120
+ it('should throw if doId is empty', () => {
121
+ expect(() =>
122
+ createBackupManager({
123
+ bucket: mockBucket as unknown as R2Bucket,
124
+ doId: '',
125
+ prefix: 'backups/',
126
+ })
127
+ ).toThrow()
128
+ })
129
+ })
130
+
131
+ // ===========================================================================
132
+ // Tests: Full Backup
133
+ // ===========================================================================
134
+
135
+ describe('Full Backup', () => {
136
+ let manager: InstanceType<typeof BackupManager>
137
+
138
+ beforeEach(() => {
139
+ manager = createBackupManager({
140
+ bucket: mockBucket as unknown as R2Bucket,
141
+ doId: 'test-do-123',
142
+ prefix: 'backups/',
143
+ })
144
+ })
145
+
146
+ it('should create a full backup of the database', async () => {
147
+ mockPGLite.query.mockResolvedValueOnce({
148
+ rows: [{ datname: 'test' }],
149
+ })
150
+
151
+ const result = await manager.createFullBackup(mockPGLite as any)
152
+
153
+ expect(result.success).toBe(true)
154
+ expect(result.type).toBe('full')
155
+ expect(result.backupId).toBeDefined()
156
+ expect(result.sizeBytes).toBeGreaterThan(0)
157
+ expect(result.timestamp).toBeDefined()
158
+ })
159
+
160
+ it('should store backup data in R2 with correct key structure', async () => {
161
+ await manager.createFullBackup(mockPGLite as any)
162
+
163
+ expect(mockBucket.put).toHaveBeenCalled()
164
+ const putCall = mockBucket.put.mock.calls[0]
165
+ expect(putCall[0]).toMatch(/^backups\/test-do-123\/full\//)
166
+ })
167
+
168
+ it('should include checksum in backup metadata', async () => {
169
+ const result = await manager.createFullBackup(mockPGLite as any)
170
+
171
+ expect(result.checksum).toBeDefined()
172
+ expect(result.checksumAlgorithm).toBe('sha-256')
173
+ })
174
+
175
+ it('should compress backup data when compression is enabled', async () => {
176
+ const compressedManager = createBackupManager({
177
+ bucket: mockBucket as unknown as R2Bucket,
178
+ doId: 'test-do-123',
179
+ prefix: 'backups/',
180
+ compression: true,
181
+ })
182
+
183
+ const result = await compressedManager.createFullBackup(mockPGLite as any)
184
+
185
+ expect(result.compressed).toBe(true)
186
+ expect(result.uncompressedSizeBytes).toBeGreaterThanOrEqual(result.sizeBytes)
187
+ })
188
+
189
+ it('should handle backup failure gracefully', async () => {
190
+ mockBucket.put.mockRejectedValueOnce(new Error('R2 write failed'))
191
+
192
+ const result = await manager.createFullBackup(mockPGLite as any)
193
+
194
+ expect(result.success).toBe(false)
195
+ expect(result.error).toContain('R2 write failed')
196
+ })
197
+
198
+ it('should capture all tables in the full backup', async () => {
199
+ mockPGLite.query
200
+ .mockResolvedValueOnce({
201
+ rows: [
202
+ { tablename: 'users', schemaname: 'public' },
203
+ { tablename: 'orders', schemaname: 'public' },
204
+ { tablename: 'products', schemaname: 'public' },
205
+ ],
206
+ })
207
+
208
+ const result = await manager.createFullBackup(mockPGLite as any)
209
+
210
+ expect(result.tables).toContain('users')
211
+ expect(result.tables).toContain('orders')
212
+ expect(result.tables).toContain('products')
213
+ })
214
+
215
+ it('should generate a unique backup ID for each backup', async () => {
216
+ const result1 = await manager.createFullBackup(mockPGLite as any)
217
+ const result2 = await manager.createFullBackup(mockPGLite as any)
218
+
219
+ expect(result1.backupId).not.toBe(result2.backupId)
220
+ })
221
+
222
+ it('should record backup duration', async () => {
223
+ const result = await manager.createFullBackup(mockPGLite as any)
224
+
225
+ expect(result.durationMs).toBeDefined()
226
+ expect(result.durationMs).toBeGreaterThanOrEqual(0)
227
+ })
228
+ })
229
+
230
+ // ===========================================================================
231
+ // Tests: Incremental Backup
232
+ // ===========================================================================
233
+
234
+ describe('Incremental Backup', () => {
235
+ let manager: InstanceType<typeof BackupManager>
236
+
237
+ beforeEach(() => {
238
+ manager = createBackupManager({
239
+ bucket: mockBucket as unknown as R2Bucket,
240
+ doId: 'test-do-123',
241
+ prefix: 'backups/',
242
+ })
243
+ })
244
+
245
+ it('should create an incremental backup based on last full backup', async () => {
246
+ // First create a full backup
247
+ await manager.createFullBackup(mockPGLite as any)
248
+
249
+ // Then create an incremental backup
250
+ const result = await manager.createIncrementalBackup(mockPGLite as any)
251
+
252
+ expect(result.success).toBe(true)
253
+ expect(result.type).toBe('incremental')
254
+ expect(result.baseBackupId).toBeDefined()
255
+ })
256
+
257
+ it('should only include changed data since last backup', async () => {
258
+ await manager.createFullBackup(mockPGLite as any)
259
+
260
+ // Simulate some changes
261
+ mockPGLite.query.mockResolvedValueOnce({
262
+ rows: [{ lsn: '0/1000' }], // Current WAL position
263
+ })
264
+
265
+ const result = await manager.createIncrementalBackup(mockPGLite as any)
266
+
267
+ expect(result.sizeBytes).toBeDefined()
268
+ // Incremental should be smaller than full backup
269
+ expect(result.changedPages).toBeDefined()
270
+ })
271
+
272
+ it('should fail if no base backup exists', async () => {
273
+ const result = await manager.createIncrementalBackup(mockPGLite as any)
274
+
275
+ expect(result.success).toBe(false)
276
+ expect(result.error).toContain('No base backup found')
277
+ })
278
+
279
+ it('should track incremental state between backups', async () => {
280
+ await manager.createFullBackup(mockPGLite as any)
281
+
282
+ const state = await manager.getIncrementalState()
283
+
284
+ expect(state).toBeDefined()
285
+ expect(state.lastBackupLsn).toBeDefined()
286
+ expect(state.lastBackupTimestamp).toBeDefined()
287
+ expect(state.baseBackupId).toBeDefined()
288
+ })
289
+
290
+ it('should chain incremental backups correctly', async () => {
291
+ await manager.createFullBackup(mockPGLite as any)
292
+
293
+ const inc1 = await manager.createIncrementalBackup(mockPGLite as any)
294
+ const inc2 = await manager.createIncrementalBackup(mockPGLite as any)
295
+
296
+ expect(inc1.backupId).not.toBe(inc2.backupId)
297
+ expect(inc2.baseBackupId).toBe(inc1.backupId)
298
+ })
299
+
300
+ it('should store incremental backup with correct key structure', async () => {
301
+ await manager.createFullBackup(mockPGLite as any)
302
+ await manager.createIncrementalBackup(mockPGLite as any)
303
+
304
+ const putCalls = mockBucket.put.mock.calls
305
+ const incrementalCall = putCalls.find((call: any) =>
306
+ (call[0] as string).includes('incremental')
307
+ )
308
+ expect(incrementalCall).toBeDefined()
309
+ expect(incrementalCall![0]).toMatch(/^backups\/test-do-123\/incremental\//)
310
+ })
311
+
312
+ it('should include parent chain in incremental backup metadata', async () => {
313
+ await manager.createFullBackup(mockPGLite as any)
314
+ const inc1 = await manager.createIncrementalBackup(mockPGLite as any)
315
+ const inc2 = await manager.createIncrementalBackup(mockPGLite as any)
316
+
317
+ expect(inc2.parentChain).toBeDefined()
318
+ expect(inc2.parentChain!.length).toBeGreaterThanOrEqual(2)
319
+ })
320
+ })
321
+
322
+ // ===========================================================================
323
+ // Tests: Backup Manifest
324
+ // ===========================================================================
325
+
326
+ describe('Backup Manifest', () => {
327
+ let manager: InstanceType<typeof BackupManager>
328
+
329
+ beforeEach(() => {
330
+ manager = createBackupManager({
331
+ bucket: mockBucket as unknown as R2Bucket,
332
+ doId: 'test-do-123',
333
+ prefix: 'backups/',
334
+ })
335
+ })
336
+
337
+ it('should create and update the backup manifest after each backup', async () => {
338
+ await manager.createFullBackup(mockPGLite as any)
339
+
340
+ const manifest = await manager.getManifest()
341
+
342
+ expect(manifest).toBeDefined()
343
+ expect(manifest.doId).toBe('test-do-123')
344
+ expect(manifest.entries.length).toBe(1)
345
+ expect(manifest.version).toBeDefined()
346
+ })
347
+
348
+ it('should store manifest in R2 at a known location', async () => {
349
+ await manager.createFullBackup(mockPGLite as any)
350
+
351
+ const manifestKey = 'backups/test-do-123/manifest.json'
352
+ const putCalls = mockBucket.put.mock.calls
353
+ const manifestPut = putCalls.find((call: any) =>
354
+ (call[0] as string).includes('manifest.json')
355
+ )
356
+ expect(manifestPut).toBeDefined()
357
+ })
358
+
359
+ it('should include backup chain information in manifest', async () => {
360
+ await manager.createFullBackup(mockPGLite as any)
361
+ await manager.createIncrementalBackup(mockPGLite as any)
362
+ await manager.createIncrementalBackup(mockPGLite as any)
363
+
364
+ const manifest = await manager.getManifest()
365
+
366
+ expect(manifest.entries.length).toBe(3)
367
+ expect(manifest.entries[0].type).toBe('full')
368
+ expect(manifest.entries[1].type).toBe('incremental')
369
+ expect(manifest.entries[2].type).toBe('incremental')
370
+ })
371
+
372
+ it('should track total backup size in manifest', async () => {
373
+ await manager.createFullBackup(mockPGLite as any)
374
+ await manager.createIncrementalBackup(mockPGLite as any)
375
+
376
+ const manifest = await manager.getManifest()
377
+
378
+ expect(manifest.totalSizeBytes).toBeGreaterThan(0)
379
+ })
380
+
381
+ it('should validate manifest integrity', async () => {
382
+ await manager.createFullBackup(mockPGLite as any)
383
+
384
+ const isValid = await manager.validateManifest()
385
+
386
+ expect(isValid).toBe(true)
387
+ })
388
+
389
+ it('should detect corrupted manifest', async () => {
390
+ // Simulate corrupted manifest in R2
391
+ mockBucket.get.mockResolvedValueOnce({
392
+ text: () => Promise.resolve('invalid json {{{'),
393
+ bytes: () => Promise.resolve(new Uint8Array()),
394
+ })
395
+
396
+ const isValid = await manager.validateManifest()
397
+
398
+ expect(isValid).toBe(false)
399
+ })
400
+
401
+ it('should list all backups from manifest', async () => {
402
+ await manager.createFullBackup(mockPGLite as any)
403
+ await manager.createIncrementalBackup(mockPGLite as any)
404
+
405
+ const backups = await manager.listBackups()
406
+
407
+ expect(backups.length).toBe(2)
408
+ expect(backups[0].backupId).toBeDefined()
409
+ expect(backups[0].timestamp).toBeDefined()
410
+ expect(backups[0].type).toBeDefined()
411
+ expect(backups[0].sizeBytes).toBeDefined()
412
+ })
413
+
414
+ it('should include manifest checksum for integrity verification', async () => {
415
+ await manager.createFullBackup(mockPGLite as any)
416
+
417
+ const manifest = await manager.getManifest()
418
+
419
+ expect(manifest.checksum).toBeDefined()
420
+ })
421
+ })
422
+
423
+ // ===========================================================================
424
+ // Tests: Restore from Backup
425
+ // ===========================================================================
426
+
427
+ describe('Restore from Backup', () => {
428
+ let manager: InstanceType<typeof BackupManager>
429
+
430
+ beforeEach(() => {
431
+ manager = createBackupManager({
432
+ bucket: mockBucket as unknown as R2Bucket,
433
+ doId: 'test-do-123',
434
+ prefix: 'backups/',
435
+ })
436
+ })
437
+
438
+ it('should restore from a full backup by ID', async () => {
439
+ const backup = await manager.createFullBackup(mockPGLite as any)
440
+
441
+ mockBucket.get.mockResolvedValueOnce({
442
+ bytes: () => Promise.resolve(new Uint8Array([1, 2, 3])),
443
+ text: () => Promise.resolve(''),
444
+ })
445
+
446
+ const result = await manager.restoreFromBackup(
447
+ backup.backupId,
448
+ mockPGLite as any
449
+ )
450
+
451
+ expect(result.success).toBe(true)
452
+ expect(result.restoredFromBackupId).toBe(backup.backupId)
453
+ expect(result.tablesRestored).toBeDefined()
454
+ })
455
+
456
+ it('should restore from latest backup when no ID specified', async () => {
457
+ await manager.createFullBackup(mockPGLite as any)
458
+ const latest = await manager.createFullBackup(mockPGLite as any)
459
+
460
+ mockBucket.get.mockResolvedValueOnce({
461
+ bytes: () => Promise.resolve(new Uint8Array([1, 2, 3])),
462
+ text: () => Promise.resolve(''),
463
+ })
464
+
465
+ const result = await manager.restoreFromLatest(mockPGLite as any)
466
+
467
+ expect(result.success).toBe(true)
468
+ expect(result.restoredFromBackupId).toBe(latest.backupId)
469
+ })
470
+
471
+ it('should restore incrementals on top of full backup', async () => {
472
+ const full = await manager.createFullBackup(mockPGLite as any)
473
+ await manager.createIncrementalBackup(mockPGLite as any)
474
+ const inc2 = await manager.createIncrementalBackup(mockPGLite as any)
475
+
476
+ mockBucket.get.mockResolvedValue({
477
+ bytes: () => Promise.resolve(new Uint8Array([1, 2, 3])),
478
+ text: () => Promise.resolve(''),
479
+ })
480
+
481
+ const result = await manager.restoreFromBackup(
482
+ inc2.backupId,
483
+ mockPGLite as any
484
+ )
485
+
486
+ expect(result.success).toBe(true)
487
+ expect(result.backupsApplied).toBeGreaterThan(1) // Full + incrementals
488
+ expect(result.restoredFromBackupId).toBe(inc2.backupId)
489
+ })
490
+
491
+ it('should validate checksum during restore', async () => {
492
+ const backup = await manager.createFullBackup(mockPGLite as any)
493
+
494
+ // Return data with wrong checksum
495
+ mockBucket.get.mockResolvedValueOnce({
496
+ bytes: () => Promise.resolve(new Uint8Array([99, 99, 99])), // Corrupted
497
+ text: () => Promise.resolve(''),
498
+ customMetadata: { checksum: 'wrong-checksum' },
499
+ })
500
+
501
+ const result = await manager.restoreFromBackup(
502
+ backup.backupId,
503
+ mockPGLite as any,
504
+ { validateChecksum: true }
505
+ )
506
+
507
+ expect(result.success).toBe(false)
508
+ expect(result.error).toContain('checksum')
509
+ })
510
+
511
+ it('should fail if backup not found in R2', async () => {
512
+ mockBucket.get.mockResolvedValueOnce(null)
513
+
514
+ const result = await manager.restoreFromBackup(
515
+ 'non-existent-backup-id',
516
+ mockPGLite as any
517
+ )
518
+
519
+ expect(result.success).toBe(false)
520
+ expect(result.error).toContain('not found')
521
+ })
522
+
523
+ it('should decompress backup data during restore', async () => {
524
+ const compressedManager = createBackupManager({
525
+ bucket: mockBucket as unknown as R2Bucket,
526
+ doId: 'test-do-123',
527
+ prefix: 'backups/',
528
+ compression: true,
529
+ })
530
+
531
+ const backup = await compressedManager.createFullBackup(mockPGLite as any)
532
+
533
+ mockBucket.get.mockResolvedValueOnce({
534
+ bytes: () => Promise.resolve(new Uint8Array([1, 2, 3])),
535
+ text: () => Promise.resolve(''),
536
+ })
537
+
538
+ const result = await compressedManager.restoreFromBackup(
539
+ backup.backupId,
540
+ mockPGLite as any
541
+ )
542
+
543
+ expect(result.success).toBe(true)
544
+ })
545
+
546
+ it('should track restore progress', async () => {
547
+ const backup = await manager.createFullBackup(mockPGLite as any)
548
+
549
+ const progressEvents: any[] = []
550
+ mockBucket.get.mockResolvedValueOnce({
551
+ bytes: () => Promise.resolve(new Uint8Array([1, 2, 3])),
552
+ text: () => Promise.resolve(''),
553
+ })
554
+
555
+ const result = await manager.restoreFromBackup(
556
+ backup.backupId,
557
+ mockPGLite as any,
558
+ { onProgress: (event: any) => progressEvents.push(event) }
559
+ )
560
+
561
+ expect(result.success).toBe(true)
562
+ expect(progressEvents.length).toBeGreaterThan(0)
563
+ expect(progressEvents[progressEvents.length - 1].progress).toBe(100)
564
+ })
565
+
566
+ it('should record restore duration', async () => {
567
+ const backup = await manager.createFullBackup(mockPGLite as any)
568
+
569
+ mockBucket.get.mockResolvedValueOnce({
570
+ bytes: () => Promise.resolve(new Uint8Array([1, 2, 3])),
571
+ text: () => Promise.resolve(''),
572
+ })
573
+
574
+ const result = await manager.restoreFromBackup(
575
+ backup.backupId,
576
+ mockPGLite as any
577
+ )
578
+
579
+ expect(result.durationMs).toBeDefined()
580
+ expect(result.durationMs).toBeGreaterThanOrEqual(0)
581
+ })
582
+ })
583
+
584
+ // ===========================================================================
585
+ // Tests: Backup Schedule
586
+ // ===========================================================================
587
+
588
+ describe('Backup Schedule', () => {
589
+ let manager: InstanceType<typeof BackupManager>
590
+
591
+ beforeEach(() => {
592
+ manager = createBackupManager({
593
+ bucket: mockBucket as unknown as R2Bucket,
594
+ doId: 'test-do-123',
595
+ prefix: 'backups/',
596
+ schedule: {
597
+ intervalMs: 3600000, // 1 hour
598
+ type: 'incremental',
599
+ fullBackupIntervalMs: 86400000, // 24 hours
600
+ },
601
+ })
602
+ })
603
+
604
+ it('should determine next backup time based on schedule', () => {
605
+ const nextBackup = manager.getNextBackupTime()
606
+
607
+ expect(nextBackup).toBeDefined()
608
+ expect(nextBackup).toBeGreaterThan(Date.now())
609
+ })
610
+
611
+ it('should determine if full backup is needed based on interval', () => {
612
+ // No backups yet - should need a full backup
613
+ const needsFull = manager.needsFullBackup()
614
+
615
+ expect(needsFull).toBe(true)
616
+ })
617
+
618
+ it('should not need full backup if recent full backup exists', async () => {
619
+ await manager.createFullBackup(mockPGLite as any)
620
+
621
+ const needsFull = manager.needsFullBackup()
622
+
623
+ expect(needsFull).toBe(false)
624
+ })
625
+
626
+ it('should request full backup after fullBackupInterval expires', async () => {
627
+ await manager.createFullBackup(mockPGLite as any)
628
+
629
+ // Advance time past the full backup interval
630
+ vi.advanceTimersByTime(86400001)
631
+
632
+ const needsFull = manager.needsFullBackup()
633
+
634
+ expect(needsFull).toBe(true)
635
+ })
636
+
637
+ it('should return scheduled backup type (full or incremental)', () => {
638
+ const backupType = manager.getScheduledBackupType()
639
+
640
+ // First backup should be full
641
+ expect(backupType).toBe('full')
642
+ })
643
+
644
+ it('should execute scheduled backup via handleAlarm', async () => {
645
+ const result = await manager.handleAlarm(mockPGLite as any)
646
+
647
+ expect(result.backupCreated).toBe(true)
648
+ expect(result.type).toBeDefined()
649
+ expect(result.nextAlarmMs).toBeDefined()
650
+ })
651
+
652
+ it('should return the configured schedule', () => {
653
+ const schedule = manager.getSchedule()
654
+
655
+ expect(schedule.intervalMs).toBe(3600000)
656
+ expect(schedule.type).toBe('incremental')
657
+ expect(schedule.fullBackupIntervalMs).toBe(86400000)
658
+ })
659
+ })
660
+
661
+ // ===========================================================================
662
+ // Tests: Backup Retention
663
+ // ===========================================================================
664
+
665
+ describe('Backup Retention', () => {
666
+ let manager: InstanceType<typeof BackupManager>
667
+
668
+ beforeEach(() => {
669
+ manager = createBackupManager({
670
+ bucket: mockBucket as unknown as R2Bucket,
671
+ doId: 'test-do-123',
672
+ prefix: 'backups/',
673
+ retention: {
674
+ maxBackups: 5,
675
+ maxAgeDays: 7,
676
+ keepMinFullBackups: 2,
677
+ },
678
+ })
679
+ })
680
+
681
+ it('should prune backups exceeding maxBackups', async () => {
682
+ // Create more backups than allowed
683
+ for (let i = 0; i < 7; i++) {
684
+ await manager.createFullBackup(mockPGLite as any)
685
+ vi.advanceTimersByTime(1000)
686
+ }
687
+
688
+ const pruneResult = await manager.pruneBackups()
689
+
690
+ expect(pruneResult.pruned).toBeGreaterThan(0)
691
+ expect(pruneResult.remaining).toBeLessThanOrEqual(5)
692
+ })
693
+
694
+ it('should prune backups older than maxAgeDays', async () => {
695
+ await manager.createFullBackup(mockPGLite as any)
696
+
697
+ // Advance past retention period
698
+ vi.advanceTimersByTime(8 * 24 * 60 * 60 * 1000) // 8 days
699
+
700
+ await manager.createFullBackup(mockPGLite as any)
701
+
702
+ const pruneResult = await manager.pruneBackups()
703
+
704
+ expect(pruneResult.pruned).toBeGreaterThan(0)
705
+ })
706
+
707
+ it('should always keep minimum number of full backups', async () => {
708
+ // Create 3 full backups with time between them
709
+ for (let i = 0; i < 3; i++) {
710
+ await manager.createFullBackup(mockPGLite as any)
711
+ vi.advanceTimersByTime(8 * 24 * 60 * 60 * 1000) // 8 days each
712
+ }
713
+
714
+ const pruneResult = await manager.pruneBackups()
715
+
716
+ // Should keep at least 2 full backups regardless of age
717
+ expect(pruneResult.remaining).toBeGreaterThanOrEqual(2)
718
+ })
719
+
720
+ it('should delete pruned backups from R2', async () => {
721
+ for (let i = 0; i < 7; i++) {
722
+ await manager.createFullBackup(mockPGLite as any)
723
+ vi.advanceTimersByTime(1000)
724
+ }
725
+
726
+ await manager.pruneBackups()
727
+
728
+ expect(mockBucket.delete).toHaveBeenCalled()
729
+ })
730
+
731
+ it('should update manifest after pruning', async () => {
732
+ for (let i = 0; i < 7; i++) {
733
+ await manager.createFullBackup(mockPGLite as any)
734
+ vi.advanceTimersByTime(1000)
735
+ }
736
+
737
+ await manager.pruneBackups()
738
+
739
+ const manifest = await manager.getManifest()
740
+ expect(manifest.entries.length).toBeLessThanOrEqual(5)
741
+ })
742
+
743
+ it('should not prune if no backups exceed retention limits', async () => {
744
+ await manager.createFullBackup(mockPGLite as any)
745
+
746
+ const pruneResult = await manager.pruneBackups()
747
+
748
+ expect(pruneResult.pruned).toBe(0)
749
+ })
750
+
751
+ it('should report deleted backup IDs in prune result', async () => {
752
+ for (let i = 0; i < 7; i++) {
753
+ await manager.createFullBackup(mockPGLite as any)
754
+ vi.advanceTimersByTime(1000)
755
+ }
756
+
757
+ const pruneResult = await manager.pruneBackups()
758
+
759
+ expect(pruneResult.deletedBackupIds).toBeDefined()
760
+ expect(pruneResult.deletedBackupIds.length).toBe(pruneResult.pruned)
761
+ })
762
+ })
763
+
764
+ // ===========================================================================
765
+ // Tests: Backup Statistics
766
+ // ===========================================================================
767
+
768
+ describe('Backup Statistics', () => {
769
+ let manager: InstanceType<typeof BackupManager>
770
+
771
+ beforeEach(() => {
772
+ manager = createBackupManager({
773
+ bucket: mockBucket as unknown as R2Bucket,
774
+ doId: 'test-do-123',
775
+ prefix: 'backups/',
776
+ })
777
+ })
778
+
779
+ it('should track total number of backups created', async () => {
780
+ await manager.createFullBackup(mockPGLite as any)
781
+ await manager.createFullBackup(mockPGLite as any)
782
+
783
+ const stats = manager.getStats()
784
+
785
+ expect(stats.totalBackups).toBe(2)
786
+ })
787
+
788
+ it('should track total backup size', async () => {
789
+ await manager.createFullBackup(mockPGLite as any)
790
+
791
+ const stats = manager.getStats()
792
+
793
+ expect(stats.totalSizeBytes).toBeGreaterThan(0)
794
+ })
795
+
796
+ it('should track last backup timestamp', async () => {
797
+ const beforeBackup = Date.now()
798
+ await manager.createFullBackup(mockPGLite as any)
799
+
800
+ const stats = manager.getStats()
801
+
802
+ expect(stats.lastBackupTimestamp).toBeGreaterThanOrEqual(beforeBackup)
803
+ })
804
+
805
+ it('should track backup success and failure counts', async () => {
806
+ await manager.createFullBackup(mockPGLite as any) // success
807
+
808
+ mockBucket.put.mockRejectedValueOnce(new Error('failed'))
809
+ await manager.createFullBackup(mockPGLite as any) // failure
810
+
811
+ const stats = manager.getStats()
812
+
813
+ expect(stats.successCount).toBe(1)
814
+ expect(stats.failureCount).toBe(1)
815
+ })
816
+
817
+ it('should track average backup duration', async () => {
818
+ await manager.createFullBackup(mockPGLite as any)
819
+ await manager.createFullBackup(mockPGLite as any)
820
+
821
+ const stats = manager.getStats()
822
+
823
+ expect(stats.avgDurationMs).toBeGreaterThanOrEqual(0)
824
+ })
825
+
826
+ it('should reset stats', () => {
827
+ manager.resetStats()
828
+
829
+ const stats = manager.getStats()
830
+
831
+ expect(stats.totalBackups).toBe(0)
832
+ expect(stats.totalSizeBytes).toBe(0)
833
+ })
834
+
835
+ it('should track full vs incremental backup counts separately', async () => {
836
+ await manager.createFullBackup(mockPGLite as any)
837
+ await manager.createIncrementalBackup(mockPGLite as any)
838
+ await manager.createIncrementalBackup(mockPGLite as any)
839
+
840
+ const stats = manager.getStats()
841
+
842
+ expect(stats.fullBackupCount).toBe(1)
843
+ expect(stats.incrementalBackupCount).toBe(2)
844
+ })
845
+ })
846
+
847
+ // ===========================================================================
848
+ // Tests: Edge Cases and Error Handling
849
+ // ===========================================================================
850
+
851
+ describe('Edge Cases', () => {
852
+ let manager: InstanceType<typeof BackupManager>
853
+
854
+ beforeEach(() => {
855
+ manager = createBackupManager({
856
+ bucket: mockBucket as unknown as R2Bucket,
857
+ doId: 'test-do-123',
858
+ prefix: 'backups/',
859
+ })
860
+ })
861
+
862
+ it('should handle empty database backup', async () => {
863
+ mockPGLite.query.mockResolvedValueOnce({ rows: [] }) // No tables
864
+
865
+ const result = await manager.createFullBackup(mockPGLite as any)
866
+
867
+ expect(result.success).toBe(true)
868
+ expect(result.tables).toEqual([])
869
+ })
870
+
871
+ it('should handle concurrent backup requests', async () => {
872
+ const backup1 = manager.createFullBackup(mockPGLite as any)
873
+ const backup2 = manager.createFullBackup(mockPGLite as any)
874
+
875
+ const results = await Promise.all([backup1, backup2])
876
+
877
+ // One should succeed, one should be rejected or queued
878
+ const successes = results.filter((r) => r.success)
879
+ expect(successes.length).toBeGreaterThanOrEqual(1)
880
+ })
881
+
882
+ it('should handle R2 timeout during backup', async () => {
883
+ mockBucket.put.mockImplementationOnce(
884
+ () => new Promise((_, reject) => setTimeout(() => reject(new Error('timeout')), 30000))
885
+ )
886
+
887
+ vi.advanceTimersByTime(30000)
888
+
889
+ const result = await manager.createFullBackup(mockPGLite as any)
890
+
891
+ expect(result.success).toBe(false)
892
+ })
893
+
894
+ it('should handle very large databases', async () => {
895
+ // Simulate a large database with many tables
896
+ const tables = Array.from({ length: 100 }, (_, i) => ({
897
+ tablename: `table_${i}`,
898
+ schemaname: 'public',
899
+ }))
900
+ mockPGLite.query.mockResolvedValueOnce({ rows: tables })
901
+
902
+ const result = await manager.createFullBackup(mockPGLite as any)
903
+
904
+ expect(result.tables!.length).toBe(100)
905
+ })
906
+
907
+ it('should handle PGLite query errors during backup', async () => {
908
+ mockPGLite.query.mockRejectedValueOnce(new Error('PGLite internal error'))
909
+
910
+ const result = await manager.createFullBackup(mockPGLite as any)
911
+
912
+ expect(result.success).toBe(false)
913
+ expect(result.error).toContain('PGLite')
914
+ })
915
+
916
+ it('should handle backup of database with special characters in names', async () => {
917
+ mockPGLite.query.mockResolvedValueOnce({
918
+ rows: [{ tablename: 'table with spaces', schemaname: 'my-schema' }],
919
+ })
920
+
921
+ const result = await manager.createFullBackup(mockPGLite as any)
922
+
923
+ expect(result.success).toBe(true)
924
+ })
925
+
926
+ it('should verify backup exists in R2 after creation', async () => {
927
+ const backup = await manager.createFullBackup(mockPGLite as any)
928
+
929
+ mockBucket.head.mockResolvedValueOnce({ size: 100, etag: 'abc' })
930
+
931
+ const exists = await manager.verifyBackup(backup.backupId)
932
+
933
+ expect(exists).toBe(true)
934
+ })
935
+
936
+ it('should handle manifest not found in R2 gracefully', async () => {
937
+ mockBucket.get.mockResolvedValueOnce(null)
938
+
939
+ const manifest = await manager.getManifest()
940
+
941
+ expect(manifest.entries).toEqual([])
942
+ })
943
+ })
944
+ })