@geekmidas/testkit 0.0.8 → 0.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/dist/PostgresObjectionMigrator-B88aTT0m.cjs +122 -0
  2. package/dist/PostgresObjectionMigrator-DydSgYFv.mjs +117 -0
  3. package/dist/PostgresObjectionMigrator.cjs +4 -0
  4. package/dist/PostgresObjectionMigrator.mjs +4 -0
  5. package/dist/{VitestKyselyTransactionIsolator-AfxPJEwR.mjs → VitestKyselyTransactionIsolator-BKGT9nEG.mjs} +1 -1
  6. package/dist/{VitestKyselyTransactionIsolator-YWnSJiIH.cjs → VitestKyselyTransactionIsolator-CIlpIO78.cjs} +1 -1
  7. package/dist/VitestKyselyTransactionIsolator.cjs +2 -2
  8. package/dist/VitestKyselyTransactionIsolator.mjs +2 -2
  9. package/dist/{VitestObjectionTransactionIsolator-BZRYy8iW.mjs → VitestObjectionTransactionIsolator-BPoLUFop.mjs} +2 -2
  10. package/dist/{VitestObjectionTransactionIsolator-0uX6DW5G.cjs → VitestObjectionTransactionIsolator-DyqLp_in.cjs} +2 -2
  11. package/dist/VitestObjectionTransactionIsolator.cjs +2 -2
  12. package/dist/VitestObjectionTransactionIsolator.mjs +2 -2
  13. package/dist/VitestTransactionIsolator.cjs +1 -1
  14. package/dist/VitestTransactionIsolator.mjs +1 -1
  15. package/dist/__tests__/KyselyFactory.spec.cjs +5 -5
  16. package/dist/__tests__/KyselyFactory.spec.mjs +5 -5
  17. package/dist/__tests__/ObjectionFactory.spec.cjs +1 -1
  18. package/dist/__tests__/ObjectionFactory.spec.mjs +1 -1
  19. package/dist/__tests__/PostgresMigrator.spec.cjs +1 -1
  20. package/dist/__tests__/PostgresMigrator.spec.mjs +1 -1
  21. package/dist/__tests__/PostgresObjectionMigrator.spec.cjs +432 -0
  22. package/dist/__tests__/PostgresObjectionMigrator.spec.mjs +431 -0
  23. package/dist/__tests__/VitestObjectionTransactionIsolator.spec.cjs +130 -0
  24. package/dist/__tests__/VitestObjectionTransactionIsolator.spec.mjs +129 -0
  25. package/dist/__tests__/integration.spec.cjs +5 -5
  26. package/dist/__tests__/integration.spec.mjs +5 -5
  27. package/dist/{helpers-CukcFAU9.mjs → helpers-BEmjyUVE.mjs} +1 -1
  28. package/dist/{helpers-Bnm3Jy9X.cjs → helpers-CNMBePuj.cjs} +12 -0
  29. package/dist/helpers.cjs +1 -1
  30. package/dist/helpers.mjs +1 -1
  31. package/dist/{kysely-B-GOhABm.cjs → kysely-CBfCXxUn.cjs} +2 -2
  32. package/dist/{kysely-CqfoKVXs.mjs → kysely-Cx_1pZYc.mjs} +2 -2
  33. package/dist/kysely.cjs +3 -3
  34. package/dist/kysely.mjs +3 -3
  35. package/dist/objection-CCD8fMLj.cjs +87 -0
  36. package/dist/objection-lsMgM5gP.mjs +82 -0
  37. package/dist/objection.cjs +7 -81
  38. package/dist/objection.mjs +6 -81
  39. package/package.json +1 -1
  40. package/src/PostgresObjectionMigrator.ts +138 -0
  41. package/src/__tests__/PostgresObjectionMigrator.spec.ts +634 -0
  42. package/src/objection.ts +1 -0
  43. /package/dist/{VitestTransactionIsolator-kFL36T8x.mjs → VitestTransactionIsolator-BWwK-ca6.mjs} +0 -0
  44. /package/dist/{VitestTransactionIsolator-DcOz0LZF.cjs → VitestTransactionIsolator-CruLTRRi.cjs} +0 -0
  45. /package/dist/{helpers-CKMlwSYT.mjs → helpers-BuPmgzyQ.mjs} +0 -0
  46. /package/dist/{helpers-H4hO5SZR.cjs → helpers-nEUtQ7eo.cjs} +0 -0
@@ -0,0 +1,634 @@
1
+ import { promises as fs } from 'node:fs';
2
+ import path from 'node:path';
3
+ import knex from 'knex';
4
+ import { Client } from 'pg';
5
+ import {
6
+ afterAll,
7
+ afterEach,
8
+ beforeAll,
9
+ beforeEach,
10
+ describe,
11
+ expect,
12
+ it,
13
+ vi,
14
+ } from 'vitest';
15
+ import { createTestDatabase } from '../../test/helpers';
16
+ import { PostgresObjectionMigrator } from '../PostgresObjectionMigrator';
17
+
18
+ describe('PostgresObjectionMigrator', () => {
19
+ let testDbName: string;
20
+ let cleanupDb: () => Promise<void>;
21
+ let consoleSpy: any;
22
+ let consoleErrorSpy: any;
23
+ let testMigrationsDir: string;
24
+
25
+ beforeAll(async () => {
26
+ // Create a unique test database for each test run
27
+ testDbName = `test_postgres_objection_migrator_${Date.now()}`;
28
+ cleanupDb = await createTestDatabase(testDbName);
29
+
30
+ // Create test migrations directory
31
+ testMigrationsDir = path.join(
32
+ process.cwd(),
33
+ 'test-migrations',
34
+ Date.now().toString(),
35
+ );
36
+ await fs.mkdir(testMigrationsDir, { recursive: true });
37
+
38
+ // Create test migration files
39
+ await fs.writeFile(
40
+ path.join(testMigrationsDir, '001_create_users.js'),
41
+ `
42
+ exports.up = function(knex) {
43
+ return knex.schema.createTable('users', function(table) {
44
+ table.increments('id').primary();
45
+ table.string('name');
46
+ table.string('email').unique();
47
+ table.timestamps(true, true);
48
+ });
49
+ };
50
+
51
+ exports.down = function(knex) {
52
+ return knex.schema.dropTable('users');
53
+ };
54
+ `,
55
+ );
56
+
57
+ await fs.writeFile(
58
+ path.join(testMigrationsDir, '002_create_posts.js'),
59
+ `
60
+ exports.up = function(knex) {
61
+ return knex.schema.createTable('posts', function(table) {
62
+ table.increments('id').primary();
63
+ table.string('title');
64
+ table.text('content');
65
+ table.integer('user_id').unsigned().references('id').inTable('users');
66
+ table.timestamps(true, true);
67
+ });
68
+ };
69
+
70
+ exports.down = function(knex) {
71
+ return knex.schema.dropTable('posts');
72
+ };
73
+ `,
74
+ );
75
+ });
76
+
77
+ beforeEach(() => {
78
+ consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {});
79
+ consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
80
+ });
81
+
82
+ afterEach(() => {
83
+ consoleSpy.mockRestore();
84
+ consoleErrorSpy.mockRestore();
85
+ });
86
+
87
+ afterAll(async () => {
88
+ await cleanupDb();
89
+ // Cleanup test migrations directory
90
+ await fs.rm(testMigrationsDir, { recursive: true, force: true });
91
+ });
92
+
93
+ describe('constructor', () => {
94
+ it('should create a PostgresObjectionMigrator instance', () => {
95
+ const knexInstance = knex({
96
+ client: 'pg',
97
+ connection: `postgresql://geekmidas:geekmidas@localhost:5432/${testDbName}`,
98
+ });
99
+
100
+ const migrator = new PostgresObjectionMigrator({
101
+ uri: `postgresql://geekmidas:geekmidas@localhost:5432/${testDbName}`,
102
+ knex: knexInstance,
103
+ });
104
+
105
+ expect(migrator).toBeInstanceOf(PostgresObjectionMigrator);
106
+ knexInstance.destroy();
107
+ });
108
+ });
109
+
110
+ describe('migrate method', () => {
111
+ it('should run migrations to latest', async () => {
112
+ const newDbName = `test_migrate_${Date.now()}`;
113
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${newDbName}`;
114
+
115
+ const knexInstance = knex({
116
+ client: 'pg',
117
+ connection: uri,
118
+ migrations: {
119
+ directory: testMigrationsDir,
120
+ },
121
+ });
122
+
123
+ const migrator = new PostgresObjectionMigrator({
124
+ uri,
125
+ knex: knexInstance,
126
+ });
127
+
128
+ const cleanup = await migrator.start();
129
+
130
+ // Verify console output
131
+ expect(consoleSpy).toHaveBeenCalledWith(
132
+ `Migrating database: ${newDbName}`,
133
+ );
134
+ expect(consoleSpy).toHaveBeenCalledWith(
135
+ expect.stringContaining('Applied batch'),
136
+ );
137
+
138
+ // Verify tables were created
139
+ const verifyClient = new Client({
140
+ host: 'localhost',
141
+ port: 5432,
142
+ user: 'geekmidas',
143
+ password: 'geekmidas',
144
+ database: newDbName,
145
+ });
146
+ await verifyClient.connect();
147
+
148
+ const tablesResult = await verifyClient.query(`
149
+ SELECT table_name FROM information_schema.tables
150
+ WHERE table_schema = 'public'
151
+ AND table_name IN ('users', 'posts')
152
+ ORDER BY table_name
153
+ `);
154
+ expect(tablesResult.rows).toEqual([
155
+ { table_name: 'posts' },
156
+ { table_name: 'users' },
157
+ ]);
158
+
159
+ await verifyClient.end();
160
+ await cleanup();
161
+ });
162
+
163
+ it('should handle no pending migrations', async () => {
164
+ const newDbName = `test_no_pending_${Date.now()}`;
165
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${newDbName}`;
166
+
167
+ // First, create and migrate the database
168
+ const knexInstance1 = knex({
169
+ client: 'pg',
170
+ connection: uri,
171
+ migrations: {
172
+ directory: testMigrationsDir,
173
+ },
174
+ });
175
+
176
+ const migrator1 = new PostgresObjectionMigrator({
177
+ uri,
178
+ knex: knexInstance1,
179
+ });
180
+
181
+ const cleanup = await migrator1.start();
182
+
183
+ // Clear console spy calls
184
+ consoleSpy.mockClear();
185
+
186
+ // Now try to migrate again - should have no pending migrations
187
+ const knexInstance2 = knex({
188
+ client: 'pg',
189
+ connection: uri,
190
+ migrations: {
191
+ directory: testMigrationsDir,
192
+ },
193
+ });
194
+
195
+ const migrator2 = new PostgresObjectionMigrator({
196
+ uri,
197
+ knex: knexInstance2,
198
+ });
199
+
200
+ await migrator2.migrate();
201
+
202
+ expect(consoleSpy).toHaveBeenCalledWith('No pending migrations to apply');
203
+
204
+ await cleanup();
205
+ });
206
+
207
+ it('should handle migration errors', async () => {
208
+ const newDbName = `test_migration_error_${Date.now()}`;
209
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${newDbName}`;
210
+
211
+ // Create a bad migration file
212
+ const badMigrationsDir = path.join(
213
+ process.cwd(),
214
+ 'bad-migrations',
215
+ Date.now().toString(),
216
+ );
217
+ await fs.mkdir(badMigrationsDir, { recursive: true });
218
+ await fs.writeFile(
219
+ path.join(badMigrationsDir, '001_bad_migration.js'),
220
+ `
221
+ exports.up = function(knex) {
222
+ throw new Error('Migration failed on purpose');
223
+ };
224
+
225
+ exports.down = function(knex) {
226
+ return Promise.resolve();
227
+ };
228
+ `,
229
+ );
230
+
231
+ const knexInstance = knex({
232
+ client: 'pg',
233
+ connection: uri,
234
+ migrations: {
235
+ directory: badMigrationsDir,
236
+ },
237
+ });
238
+
239
+ const migrator = new PostgresObjectionMigrator({
240
+ uri,
241
+ knex: knexInstance,
242
+ });
243
+
244
+ await expect(migrator.start()).rejects.toThrow(
245
+ 'Migration failed on purpose',
246
+ );
247
+
248
+ expect(consoleErrorSpy).toHaveBeenCalledWith(
249
+ 'Failed to apply migrations:',
250
+ expect.any(Error),
251
+ );
252
+
253
+ // Cleanup
254
+ await fs.rm(badMigrationsDir, { recursive: true, force: true });
255
+ const cleanupClient = new Client({
256
+ host: 'localhost',
257
+ port: 5432,
258
+ user: 'geekmidas',
259
+ password: 'geekmidas',
260
+ database: 'postgres',
261
+ });
262
+ await cleanupClient.connect();
263
+ await cleanupClient.query(`DROP DATABASE IF EXISTS "${newDbName}"`);
264
+ await cleanupClient.end();
265
+ });
266
+
267
+ it('should destroy knex connection after migration', async () => {
268
+ const newDbName = `test_destroy_${Date.now()}`;
269
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${newDbName}`;
270
+
271
+ const knexInstance = knex({
272
+ client: 'pg',
273
+ connection: uri,
274
+ migrations: {
275
+ directory: testMigrationsDir,
276
+ },
277
+ });
278
+
279
+ const destroySpy = vi.spyOn(knexInstance, 'destroy');
280
+
281
+ const migrator = new PostgresObjectionMigrator({
282
+ uri,
283
+ knex: knexInstance,
284
+ });
285
+
286
+ const cleanup = await migrator.start();
287
+
288
+ expect(destroySpy).toHaveBeenCalled();
289
+
290
+ await cleanup();
291
+ });
292
+ });
293
+
294
+ describe('rollback method', () => {
295
+ it('should rollback last migration batch', async () => {
296
+ const newDbName = `test_rollback_${Date.now()}`;
297
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${newDbName}`;
298
+
299
+ // First, create and migrate the database
300
+ const knexInstance1 = knex({
301
+ client: 'pg',
302
+ connection: uri,
303
+ migrations: {
304
+ directory: testMigrationsDir,
305
+ },
306
+ });
307
+
308
+ const migrator1 = new PostgresObjectionMigrator({
309
+ uri,
310
+ knex: knexInstance1,
311
+ });
312
+
313
+ const cleanup = await migrator1.start();
314
+
315
+ // Clear console spy
316
+ consoleSpy.mockClear();
317
+
318
+ // Now rollback
319
+ const knexInstance2 = knex({
320
+ client: 'pg',
321
+ connection: uri,
322
+ migrations: {
323
+ directory: testMigrationsDir,
324
+ },
325
+ });
326
+
327
+ const migrator2 = new PostgresObjectionMigrator({
328
+ uri,
329
+ knex: knexInstance2,
330
+ });
331
+
332
+ await migrator2.rollback();
333
+
334
+ expect(consoleSpy).toHaveBeenCalledWith(
335
+ expect.stringContaining('Rolled back batch'),
336
+ );
337
+
338
+ // Verify tables were dropped
339
+ const verifyClient = new Client({
340
+ host: 'localhost',
341
+ port: 5432,
342
+ user: 'geekmidas',
343
+ password: 'geekmidas',
344
+ database: newDbName,
345
+ });
346
+ await verifyClient.connect();
347
+
348
+ const tablesResult = await verifyClient.query(`
349
+ SELECT table_name FROM information_schema.tables
350
+ WHERE table_schema = 'public'
351
+ AND table_name IN ('users', 'posts')
352
+ `);
353
+ expect(tablesResult.rows).toEqual([]);
354
+
355
+ await verifyClient.end();
356
+ await cleanup();
357
+ });
358
+
359
+ it('should handle no migrations to rollback', async () => {
360
+ const newDbName = `test_no_rollback_${Date.now()}`;
361
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${newDbName}`;
362
+
363
+ // Create database without running migrations
364
+ const createClient = new Client({
365
+ host: 'localhost',
366
+ port: 5432,
367
+ user: 'geekmidas',
368
+ password: 'geekmidas',
369
+ database: 'postgres',
370
+ });
371
+ await createClient.connect();
372
+ await createClient.query(`CREATE DATABASE "${newDbName}"`);
373
+ await createClient.end();
374
+
375
+ const knexInstance = knex({
376
+ client: 'pg',
377
+ connection: uri,
378
+ migrations: {
379
+ directory: testMigrationsDir,
380
+ },
381
+ });
382
+
383
+ const migrator = new PostgresObjectionMigrator({
384
+ uri,
385
+ knex: knexInstance,
386
+ });
387
+
388
+ await migrator.rollback();
389
+
390
+ expect(consoleSpy).toHaveBeenCalledWith('No migrations to rollback');
391
+
392
+ // Cleanup
393
+ const cleanupClient = new Client({
394
+ host: 'localhost',
395
+ port: 5432,
396
+ user: 'geekmidas',
397
+ password: 'geekmidas',
398
+ database: 'postgres',
399
+ });
400
+ await cleanupClient.connect();
401
+ await cleanupClient.query(`DROP DATABASE IF EXISTS "${newDbName}"`);
402
+ await cleanupClient.end();
403
+ });
404
+
405
+ it('should handle rollback errors', async () => {
406
+ const newDbName = `test_rollback_error_${Date.now()}`;
407
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${newDbName}`;
408
+
409
+ const knexInstance = knex({
410
+ client: 'pg',
411
+ connection: uri,
412
+ migrations: {
413
+ directory: testMigrationsDir,
414
+ },
415
+ });
416
+
417
+ const migrator = new PostgresObjectionMigrator({
418
+ uri,
419
+ knex: knexInstance,
420
+ });
421
+
422
+ await expect(migrator.rollback()).rejects.toThrow();
423
+
424
+ expect(consoleErrorSpy).toHaveBeenCalledWith(
425
+ 'Failed to rollback migrations:',
426
+ expect.any(Error),
427
+ );
428
+
429
+ await knexInstance.destroy();
430
+ });
431
+ });
432
+
433
+ describe('status method', () => {
434
+ it('should return migration status', async () => {
435
+ const newDbName = `test_status_${Date.now()}`;
436
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${newDbName}`;
437
+
438
+ // First, create and partially migrate the database
439
+ const knexInstance1 = knex({
440
+ client: 'pg',
441
+ connection: uri,
442
+ migrations: {
443
+ directory: testMigrationsDir,
444
+ },
445
+ });
446
+
447
+ const migrator1 = new PostgresObjectionMigrator({
448
+ uri,
449
+ knex: knexInstance1,
450
+ });
451
+
452
+ const cleanup = await migrator1.start();
453
+
454
+ // Get status
455
+ const knexInstance2 = knex({
456
+ client: 'pg',
457
+ connection: uri,
458
+ migrations: {
459
+ directory: testMigrationsDir,
460
+ },
461
+ });
462
+
463
+ const migrator2 = new PostgresObjectionMigrator({
464
+ uri,
465
+ knex: knexInstance2,
466
+ });
467
+
468
+ const status = await migrator2.status();
469
+
470
+ expect(status).toHaveProperty('completed');
471
+ expect(status).toHaveProperty('pending');
472
+ expect(Array.isArray(status.completed)).toBe(true);
473
+ expect(Array.isArray(status.pending)).toBe(true);
474
+
475
+ await cleanup();
476
+ });
477
+
478
+ it('should destroy connection after getting status', async () => {
479
+ const newDbName = `test_status_destroy_${Date.now()}`;
480
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${newDbName}`;
481
+
482
+ const knexInstance = knex({
483
+ client: 'pg',
484
+ connection: uri,
485
+ migrations: {
486
+ directory: testMigrationsDir,
487
+ },
488
+ });
489
+
490
+ const destroySpy = vi.spyOn(knexInstance, 'destroy');
491
+
492
+ const migrator = new PostgresObjectionMigrator({
493
+ uri,
494
+ knex: knexInstance,
495
+ });
496
+
497
+ // Create database first
498
+ const createClient = new Client({
499
+ host: 'localhost',
500
+ port: 5432,
501
+ user: 'geekmidas',
502
+ password: 'geekmidas',
503
+ database: 'postgres',
504
+ });
505
+ await createClient.connect();
506
+ await createClient.query(`CREATE DATABASE "${newDbName}"`);
507
+ await createClient.end();
508
+
509
+ await migrator.status();
510
+
511
+ expect(destroySpy).toHaveBeenCalled();
512
+
513
+ // Cleanup
514
+ const cleanupClient = new Client({
515
+ host: 'localhost',
516
+ port: 5432,
517
+ user: 'geekmidas',
518
+ password: 'geekmidas',
519
+ database: 'postgres',
520
+ });
521
+ await cleanupClient.connect();
522
+ await cleanupClient.query(`DROP DATABASE IF EXISTS "${newDbName}"`);
523
+ await cleanupClient.end();
524
+ });
525
+ });
526
+
527
+ describe('integration scenarios', () => {
528
+ it('should handle complete workflow with complex migrations', async () => {
529
+ const integrationDbName = `test_integration_${Date.now()}`;
530
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${integrationDbName}`;
531
+
532
+ const knexInstance = knex({
533
+ client: 'pg',
534
+ connection: uri,
535
+ migrations: {
536
+ directory: testMigrationsDir,
537
+ },
538
+ });
539
+
540
+ const migrator = new PostgresObjectionMigrator({
541
+ uri,
542
+ knex: knexInstance,
543
+ });
544
+
545
+ // Start migration
546
+ const cleanup = await migrator.start();
547
+
548
+ // Verify we can insert data into the migrated tables
549
+ const testClient = new Client({
550
+ host: 'localhost',
551
+ port: 5432,
552
+ user: 'geekmidas',
553
+ password: 'geekmidas',
554
+ database: integrationDbName,
555
+ });
556
+ await testClient.connect();
557
+
558
+ // Insert a user
559
+ const userResult = await testClient.query(
560
+ `INSERT INTO users (name, email) VALUES ($1, $2) RETURNING id`,
561
+ ['Test User', 'test@example.com'],
562
+ );
563
+ const userId = userResult.rows[0].id;
564
+
565
+ // Insert a post
566
+ await testClient.query(
567
+ `INSERT INTO posts (title, content, user_id) VALUES ($1, $2, $3)`,
568
+ ['Test Post', 'This is a test post', userId],
569
+ );
570
+
571
+ // Verify foreign key constraint works
572
+ const postResult = await testClient.query(
573
+ `SELECT * FROM posts WHERE user_id = $1`,
574
+ [userId],
575
+ );
576
+ expect(postResult.rowCount).toBe(1);
577
+ expect(postResult.rows[0].title).toBe('Test Post');
578
+
579
+ await testClient.end();
580
+
581
+ // Cleanup
582
+ await cleanup();
583
+ });
584
+
585
+ it('should work with transaction-based tests', async () => {
586
+ const transactionDbName = `test_transaction_${Date.now()}`;
587
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${transactionDbName}`;
588
+
589
+ const knexInstance = knex({
590
+ client: 'pg',
591
+ connection: uri,
592
+ migrations: {
593
+ directory: testMigrationsDir,
594
+ },
595
+ });
596
+
597
+ const migrator = new PostgresObjectionMigrator({
598
+ uri,
599
+ knex: knexInstance,
600
+ });
601
+
602
+ const cleanup = await migrator.start();
603
+
604
+ // Create a new knex instance for transaction testing
605
+ const testKnex = knex({
606
+ client: 'pg',
607
+ connection: uri,
608
+ });
609
+
610
+ // Test with transaction
611
+ await testKnex
612
+ .transaction(async (trx) => {
613
+ await trx('users').insert({
614
+ name: 'Transaction User',
615
+ email: 'trx@example.com',
616
+ });
617
+ const users = await trx('users').select('*');
618
+ expect(users.length).toBeGreaterThan(0);
619
+ // Transaction will be rolled back automatically
620
+ throw new Error('Rollback transaction');
621
+ })
622
+ .catch(() => {
623
+ // Expected error
624
+ });
625
+
626
+ // Verify data was rolled back
627
+ const users = await testKnex('users').select('*');
628
+ expect(users.length).toBe(0);
629
+
630
+ await testKnex.destroy();
631
+ await cleanup();
632
+ });
633
+ });
634
+ });
package/src/objection.ts CHANGED
@@ -12,6 +12,7 @@ import { IsolationLevel } from './VitestTransactionIsolator';
12
12
  export { ObjectionFactory } from './ObjectionFactory';
13
13
  export { VitestObjectionTransactionIsolator } from './VitestObjectionTransactionIsolator';
14
14
  export { IsolationLevel } from './VitestTransactionIsolator';
15
+ export { PostgresObjectionMigrator } from './PostgresObjectionMigrator';
15
16
 
16
17
  /**
17
18
  * Creates a wrapped Vitest test API with automatic transaction rollback for Objection.js.