@geekmidas/testkit 0.0.12 → 0.0.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. package/README.md +1 -1
  2. package/dist/{Factory-Bm44VKa-.d.cts → Factory-D7P3bKKb.d.mts} +2 -2
  3. package/dist/{Factory-tjCDNgUK.d.mts → Factory-pNV7ZQ7-.d.cts} +2 -2
  4. package/dist/Factory.d.cts +2 -2
  5. package/dist/Factory.d.mts +2 -2
  6. package/dist/{KyselyFactory-C3Bc3p4L.mjs → KyselyFactory-BcYkC0t2.mjs} +1 -1
  7. package/dist/{KyselyFactory-BoPDDitt.d.cts → KyselyFactory-BrWADI77.d.mts} +3 -3
  8. package/dist/{KyselyFactory-CXtfmMfK.cjs → KyselyFactory-Cf0o2YxO.cjs} +1 -1
  9. package/dist/{KyselyFactory-D82j74t9.d.mts → KyselyFactory-DM2dnUXU.d.cts} +3 -3
  10. package/dist/KyselyFactory.cjs +1 -1
  11. package/dist/KyselyFactory.d.cts +3 -3
  12. package/dist/KyselyFactory.d.mts +3 -3
  13. package/dist/KyselyFactory.mjs +1 -1
  14. package/dist/{ObjectionFactory-qIICOph3.mjs → ObjectionFactory-8hebmnai.mjs} +20 -4
  15. package/dist/{ObjectionFactory-BWMTXsxH.d.cts → ObjectionFactory-B40NQWSe.d.mts} +2 -2
  16. package/dist/{ObjectionFactory-DxIxJagq.cjs → ObjectionFactory-CDriunkS.cjs} +20 -4
  17. package/dist/{ObjectionFactory-CEG5qUrm.d.mts → ObjectionFactory-D3l1VuyX.d.cts} +2 -2
  18. package/dist/ObjectionFactory.cjs +1 -1
  19. package/dist/ObjectionFactory.d.cts +3 -3
  20. package/dist/ObjectionFactory.d.mts +3 -3
  21. package/dist/ObjectionFactory.mjs +1 -1
  22. package/dist/__tests__/KyselyFactory.spec.cjs +2 -2
  23. package/dist/__tests__/KyselyFactory.spec.mjs +2 -2
  24. package/dist/__tests__/ObjectionFactory.spec.cjs +288 -450
  25. package/dist/__tests__/ObjectionFactory.spec.mjs +289 -451
  26. package/dist/__tests__/PostgresKyselyMigrator.spec.cjs +397 -0
  27. package/dist/__tests__/PostgresKyselyMigrator.spec.d.cts +1 -0
  28. package/dist/__tests__/PostgresKyselyMigrator.spec.d.mts +1 -0
  29. package/dist/__tests__/PostgresKyselyMigrator.spec.mjs +396 -0
  30. package/dist/__tests__/PostgresMigrator.spec.cjs +1 -1
  31. package/dist/__tests__/PostgresMigrator.spec.mjs +1 -1
  32. package/dist/__tests__/PostgresObjectionMigrator.spec.cjs +1 -1
  33. package/dist/__tests__/PostgresObjectionMigrator.spec.mjs +1 -1
  34. package/dist/__tests__/VitestObjectionTransactionIsolator.spec.cjs +3 -11
  35. package/dist/__tests__/VitestObjectionTransactionIsolator.spec.mjs +3 -11
  36. package/dist/__tests__/integration.spec.cjs +2 -2
  37. package/dist/__tests__/integration.spec.mjs +2 -2
  38. package/dist/{faker-km9UhOS6.d.cts → faker-BSH1EMtg.d.cts} +2 -2
  39. package/dist/{faker-ChuHaYMR.d.mts → faker-C-Iuk_R1.d.mts} +2 -2
  40. package/dist/faker.d.cts +1 -1
  41. package/dist/faker.d.mts +1 -1
  42. package/dist/{helpers-BEmjyUVE.mjs → helpers-B4TXg3Wp.mjs} +11 -36
  43. package/dist/{helpers-CNMBePuj.cjs → helpers-Bf0nXhbu.cjs} +10 -41
  44. package/dist/kysely.cjs +1 -1
  45. package/dist/kysely.d.cts +3 -3
  46. package/dist/kysely.d.mts +3 -3
  47. package/dist/kysely.mjs +1 -1
  48. package/dist/objection.cjs +1 -1
  49. package/dist/objection.d.cts +3 -3
  50. package/dist/objection.d.mts +3 -3
  51. package/dist/objection.mjs +1 -1
  52. package/package.json +2 -2
  53. package/src/Factory.ts +4 -1
  54. package/src/KyselyFactory.ts +6 -2
  55. package/src/ObjectionFactory.ts +31 -4
  56. package/src/__tests__/ObjectionFactory.spec.ts +423 -542
  57. package/src/__tests__/PostgresKyselyMigrator.spec.ts +690 -0
  58. package/src/__tests__/VitestObjectionTransactionIsolator.spec.ts +0 -8
  59. package/test/helpers.ts +13 -21
  60. package/dist/example.cjs +0 -22
  61. package/dist/example.d.cts +0 -26
  62. package/dist/example.d.mts +0 -26
  63. package/dist/example.mjs +0 -22
  64. package/src/example.ts +0 -45
@@ -0,0 +1,690 @@
1
+ import { Kysely, type MigrationProvider, PostgresDialect, sql } from 'kysely';
2
+ import { Client, Pool } from 'pg';
3
+ import {
4
+ afterAll,
5
+ afterEach,
6
+ beforeAll,
7
+ beforeEach,
8
+ describe,
9
+ expect,
10
+ it,
11
+ vi,
12
+ } from 'vitest';
13
+ import { createTestDatabase } from '../../test/helpers';
14
+ import { PostgresKyselyMigrator } from '../PostgresKyselyMigrator';
15
+ import { PostgresMigrator } from '../PostgresMigrator';
16
+
17
+ // Test database schema
18
+ interface TestSchema {
19
+ users: {
20
+ id: number;
21
+ name: string;
22
+ email: string;
23
+ created_at: Date;
24
+ };
25
+ posts: {
26
+ id: number;
27
+ title: string;
28
+ content: string;
29
+ user_id: number;
30
+ created_at: Date;
31
+ };
32
+ }
33
+
34
+ // Test migration provider
35
+ class TestMigrationProvider implements MigrationProvider {
36
+ private migrations: Record<string, any> = {};
37
+ private shouldError = false;
38
+
39
+ addMigration(
40
+ name: string,
41
+ migration: {
42
+ up: (db: Kysely<any>) => Promise<void>;
43
+ down?: (db: Kysely<any>) => Promise<void>;
44
+ },
45
+ ) {
46
+ this.migrations[name] = migration;
47
+ }
48
+
49
+ setError(shouldError: boolean) {
50
+ this.shouldError = shouldError;
51
+ }
52
+
53
+ async getMigrations() {
54
+ if (this.shouldError) {
55
+ throw new Error('Failed to load migrations');
56
+ }
57
+ return this.migrations;
58
+ }
59
+ }
60
+
61
+ describe('PostgresKyselyMigrator', () => {
62
+ let testDbName: string;
63
+ let cleanupDb: () => Promise<void>;
64
+ let consoleSpy: any;
65
+ let consoleErrorSpy: any;
66
+
67
+ beforeAll(async () => {
68
+ // Create a unique test database for each test run
69
+ testDbName = `test_kysely_migrator_${Date.now()}`;
70
+ cleanupDb = await createTestDatabase(testDbName);
71
+ });
72
+
73
+ beforeEach(() => {
74
+ consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {});
75
+ consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
76
+ });
77
+
78
+ afterEach(() => {
79
+ consoleSpy.mockRestore();
80
+ consoleErrorSpy.mockRestore();
81
+ });
82
+
83
+ afterAll(async () => {
84
+ await cleanupDb();
85
+ });
86
+
87
+ describe('constructor', () => {
88
+ it('should create a PostgresKyselyMigrator instance', () => {
89
+ const db = new Kysely<TestSchema>({
90
+ dialect: new PostgresDialect({
91
+ pool: new Pool({
92
+ host: 'localhost',
93
+ port: 5432,
94
+ user: 'geekmidas',
95
+ password: 'geekmidas',
96
+ database: testDbName,
97
+ }),
98
+ }),
99
+ });
100
+
101
+ const provider = new TestMigrationProvider();
102
+ const migrator = new PostgresKyselyMigrator({
103
+ uri: `postgresql://geekmidas:geekmidas@localhost:5432/${testDbName}`,
104
+ db,
105
+ provider,
106
+ });
107
+
108
+ expect(migrator).toBeInstanceOf(PostgresKyselyMigrator);
109
+ expect(migrator).toBeInstanceOf(PostgresMigrator);
110
+ });
111
+ });
112
+
113
+ describe('migrate method', () => {
114
+ it('should apply migrations successfully', async () => {
115
+ const newDbName = `test_migrate_${Date.now()}`;
116
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${newDbName}`;
117
+
118
+ const db = new Kysely<TestSchema>({
119
+ dialect: new PostgresDialect({
120
+ pool: new Pool({
121
+ host: 'localhost',
122
+ port: 5432,
123
+ user: 'geekmidas',
124
+ password: 'geekmidas',
125
+ database: newDbName,
126
+ }),
127
+ }),
128
+ });
129
+
130
+ const provider = new TestMigrationProvider();
131
+
132
+ // Add test migrations
133
+ provider.addMigration('001_create_users', {
134
+ up: async (db) => {
135
+ await db.schema
136
+ .createTable('users')
137
+ .addColumn('id', 'serial', (col) => col.primaryKey())
138
+ .addColumn('name', 'varchar', (col) => col.notNull())
139
+ .addColumn('email', 'varchar', (col) => col.notNull().unique())
140
+ .addColumn('created_at', 'timestamp', (col) =>
141
+ col.defaultTo(sql`now()`).notNull(),
142
+ )
143
+ .execute();
144
+ },
145
+ down: async (db) => {
146
+ await db.schema.dropTable('users').execute();
147
+ },
148
+ });
149
+
150
+ provider.addMigration('002_create_posts', {
151
+ up: async (db) => {
152
+ await db.schema
153
+ .createTable('posts')
154
+ .addColumn('id', 'serial', (col) => col.primaryKey())
155
+ .addColumn('title', 'varchar', (col) => col.notNull())
156
+ .addColumn('content', 'text', (col) => col.notNull())
157
+ .addColumn('user_id', 'integer', (col) =>
158
+ col.notNull().references('users.id').onDelete('cascade'),
159
+ )
160
+ .addColumn('created_at', 'timestamp', (col) =>
161
+ col.defaultTo(sql`now()`).notNull(),
162
+ )
163
+ .execute();
164
+ },
165
+ down: async (db) => {
166
+ await db.schema.dropTable('posts').execute();
167
+ },
168
+ });
169
+
170
+ const migrator = new PostgresKyselyMigrator({
171
+ uri,
172
+ db,
173
+ provider,
174
+ });
175
+
176
+ // Start the migrator (creates database and runs migrations)
177
+ const cleanup = await migrator.start();
178
+
179
+ expect(consoleSpy).toHaveBeenCalledWith(
180
+ expect.stringContaining('Applied 2 migrations successfully'),
181
+ );
182
+
183
+ // Verify tables were created
184
+ const client = new Client({
185
+ host: 'localhost',
186
+ port: 5432,
187
+ user: 'geekmidas',
188
+ password: 'geekmidas',
189
+ database: newDbName,
190
+ });
191
+
192
+ try {
193
+ await client.connect();
194
+ const tablesResult = await client.query(`
195
+ SELECT table_name FROM information_schema.tables
196
+ WHERE table_schema = 'public'
197
+ AND table_name IN ('users', 'posts')
198
+ ORDER BY table_name
199
+ `);
200
+
201
+ expect(tablesResult.rowCount).toBe(2);
202
+ expect(tablesResult.rows).toEqual([
203
+ { table_name: 'posts' },
204
+ { table_name: 'users' },
205
+ ]);
206
+ } finally {
207
+ await client.end();
208
+ }
209
+
210
+ // Cleanup
211
+ await cleanup();
212
+ });
213
+
214
+ it('should handle migration errors', async () => {
215
+ const errorDbName = `test_migrate_error_${Date.now()}`;
216
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${errorDbName}`;
217
+
218
+ const db = new Kysely<TestSchema>({
219
+ dialect: new PostgresDialect({
220
+ pool: new Pool({
221
+ host: 'localhost',
222
+ port: 5432,
223
+ user: 'geekmidas',
224
+ password: 'geekmidas',
225
+ database: errorDbName,
226
+ }),
227
+ }),
228
+ });
229
+
230
+ const provider = new TestMigrationProvider();
231
+
232
+ // Add a migration that will fail
233
+ provider.addMigration('001_failing_migration', {
234
+ up: async () => {
235
+ throw new Error('Migration failed intentionally');
236
+ },
237
+ });
238
+
239
+ const migrator = new PostgresKyselyMigrator({
240
+ uri,
241
+ db,
242
+ provider,
243
+ });
244
+
245
+ // Expect the start method to throw
246
+ await expect(migrator.start()).rejects.toThrow(
247
+ 'Migration failed intentionally',
248
+ );
249
+
250
+ expect(consoleErrorSpy).toHaveBeenCalledWith(
251
+ expect.any(Error),
252
+ 'Failed to apply migrations',
253
+ );
254
+
255
+ // Ensure db is closed before cleanup
256
+ await db.destroy();
257
+
258
+ // Cleanup the created database
259
+ const cleanupClient = new Client({
260
+ host: 'localhost',
261
+ port: 5432,
262
+ user: 'geekmidas',
263
+ password: 'geekmidas',
264
+ database: 'postgres',
265
+ });
266
+ try {
267
+ await cleanupClient.connect();
268
+ // Force disconnect any existing connections
269
+ await cleanupClient.query(`
270
+ SELECT pg_terminate_backend(pg_stat_activity.pid)
271
+ FROM pg_stat_activity
272
+ WHERE pg_stat_activity.datname = '${errorDbName}'
273
+ AND pid <> pg_backend_pid()
274
+ `);
275
+ await cleanupClient.query(`DROP DATABASE IF EXISTS "${errorDbName}"`);
276
+ } finally {
277
+ await cleanupClient.end();
278
+ }
279
+ });
280
+
281
+ it('should destroy database connection after migrations', async () => {
282
+ const destroyDbName = `test_destroy_${Date.now()}`;
283
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${destroyDbName}`;
284
+
285
+ const db = new Kysely<TestSchema>({
286
+ dialect: new PostgresDialect({
287
+ pool: new Pool({
288
+ host: 'localhost',
289
+ port: 5432,
290
+ user: 'geekmidas',
291
+ password: 'geekmidas',
292
+ database: destroyDbName,
293
+ }),
294
+ }),
295
+ });
296
+
297
+ const destroySpy = vi.spyOn(db, 'destroy');
298
+ const provider = new TestMigrationProvider();
299
+
300
+ provider.addMigration('001_simple', {
301
+ up: async (db) => {
302
+ await db.schema
303
+ .createTable('test')
304
+ .addColumn('id', 'serial', (col) => col.primaryKey())
305
+ .execute();
306
+ },
307
+ });
308
+
309
+ const migrator = new PostgresKyselyMigrator({
310
+ uri,
311
+ db,
312
+ provider,
313
+ });
314
+
315
+ const cleanup = await migrator.start();
316
+
317
+ // Verify destroy was called after migrations
318
+ expect(destroySpy).toHaveBeenCalled();
319
+
320
+ // Cleanup
321
+ await cleanup();
322
+ });
323
+ });
324
+
325
+ describe('integration with PostgresMigrator', () => {
326
+ it('should work with complete workflow', async () => {
327
+ const integrationDbName = `test_integration_${Date.now()}`;
328
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${integrationDbName}`;
329
+
330
+ const db = new Kysely<TestSchema>({
331
+ dialect: new PostgresDialect({
332
+ pool: new Pool({
333
+ host: 'localhost',
334
+ port: 5432,
335
+ user: 'geekmidas',
336
+ password: 'geekmidas',
337
+ database: integrationDbName,
338
+ }),
339
+ }),
340
+ });
341
+
342
+ const provider = new TestMigrationProvider();
343
+
344
+ // Add comprehensive migrations
345
+ provider.addMigration('001_initial_schema', {
346
+ up: async (db) => {
347
+ // Users table
348
+ await db.schema
349
+ .createTable('users')
350
+ .addColumn('id', 'serial', (col) => col.primaryKey())
351
+ .addColumn('name', 'varchar', (col) => col.notNull())
352
+ .addColumn('email', 'varchar', (col) => col.notNull().unique())
353
+ .addColumn('created_at', 'timestamp', (col) =>
354
+ col.defaultTo(sql`now()`).notNull(),
355
+ )
356
+ .execute();
357
+
358
+ // Posts table
359
+ await db.schema
360
+ .createTable('posts')
361
+ .addColumn('id', 'serial', (col) => col.primaryKey())
362
+ .addColumn('title', 'varchar', (col) => col.notNull())
363
+ .addColumn('content', 'text', (col) => col.notNull())
364
+ .addColumn('user_id', 'integer', (col) =>
365
+ col.notNull().references('users.id').onDelete('cascade'),
366
+ )
367
+ .addColumn('created_at', 'timestamp', (col) =>
368
+ col.defaultTo(sql`now()`).notNull(),
369
+ )
370
+ .execute();
371
+
372
+ // Add index
373
+ await db.schema
374
+ .createIndex('idx_posts_user_id')
375
+ .on('posts')
376
+ .column('user_id')
377
+ .execute();
378
+ },
379
+ });
380
+
381
+ provider.addMigration('002_add_updated_at', {
382
+ up: async (db) => {
383
+ await db.schema
384
+ .alterTable('users')
385
+ .addColumn('updated_at', 'timestamp', (col) =>
386
+ col.defaultTo(sql`now()`).notNull(),
387
+ )
388
+ .execute();
389
+
390
+ await db.schema
391
+ .alterTable('posts')
392
+ .addColumn('updated_at', 'timestamp', (col) =>
393
+ col.defaultTo(sql`now()`).notNull(),
394
+ )
395
+ .execute();
396
+ },
397
+ });
398
+
399
+ const migrator = new PostgresKyselyMigrator({
400
+ uri,
401
+ db,
402
+ provider,
403
+ });
404
+
405
+ const cleanup = await migrator.start();
406
+
407
+ expect(consoleSpy).toHaveBeenCalledWith(
408
+ `Migrating database: ${integrationDbName}`,
409
+ );
410
+ expect(consoleSpy).toHaveBeenCalledWith(
411
+ 'Applied 2 migrations successfully',
412
+ );
413
+
414
+ // Verify final schema
415
+ const verifyClient = new Client({
416
+ host: 'localhost',
417
+ port: 5432,
418
+ user: 'geekmidas',
419
+ password: 'geekmidas',
420
+ database: integrationDbName,
421
+ });
422
+
423
+ try {
424
+ await verifyClient.connect();
425
+
426
+ // Check columns exist
427
+ const columnsResult = await verifyClient.query(`
428
+ SELECT table_name, column_name
429
+ FROM information_schema.columns
430
+ WHERE table_schema = 'public'
431
+ AND table_name IN ('users', 'posts')
432
+ AND column_name = 'updated_at'
433
+ ORDER BY table_name
434
+ `);
435
+
436
+ expect(columnsResult.rowCount).toBe(2);
437
+
438
+ // Check index exists
439
+ const indexResult = await verifyClient.query(`
440
+ SELECT indexname FROM pg_indexes
441
+ WHERE schemaname = 'public'
442
+ AND indexname = 'idx_posts_user_id'
443
+ `);
444
+
445
+ expect(indexResult.rowCount).toBe(1);
446
+ } finally {
447
+ await verifyClient.end();
448
+ }
449
+
450
+ // Cleanup
451
+ await cleanup();
452
+ });
453
+
454
+ it('should handle empty migrations', async () => {
455
+ const emptyDbName = `test_empty_${Date.now()}`;
456
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${emptyDbName}`;
457
+
458
+ const db = new Kysely<TestSchema>({
459
+ dialect: new PostgresDialect({
460
+ pool: new Pool({
461
+ host: 'localhost',
462
+ port: 5432,
463
+ user: 'geekmidas',
464
+ password: 'geekmidas',
465
+ database: emptyDbName,
466
+ }),
467
+ }),
468
+ });
469
+
470
+ const provider = new TestMigrationProvider();
471
+ // No migrations added
472
+
473
+ const migrator = new PostgresKyselyMigrator({
474
+ uri,
475
+ db,
476
+ provider,
477
+ });
478
+
479
+ const cleanup = await migrator.start();
480
+
481
+ expect(consoleSpy).toHaveBeenCalledWith(
482
+ 'Applied 0 migrations successfully',
483
+ );
484
+
485
+ await cleanup();
486
+ });
487
+
488
+ it('should work with FileMigrationProvider pattern', async () => {
489
+ const fileProviderDbName = `test_file_provider_${Date.now()}`;
490
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${fileProviderDbName}`;
491
+
492
+ const db = new Kysely<TestSchema>({
493
+ dialect: new PostgresDialect({
494
+ pool: new Pool({
495
+ host: 'localhost',
496
+ port: 5432,
497
+ user: 'geekmidas',
498
+ password: 'geekmidas',
499
+ database: fileProviderDbName,
500
+ }),
501
+ }),
502
+ });
503
+
504
+ // Simulate a file-based migration provider
505
+ const migrations = {
506
+ '2024_01_01_000001_create_users': {
507
+ up: async (db: Kysely<any>) => {
508
+ await db.schema
509
+ .createTable('users')
510
+ .addColumn('id', 'serial', (col) => col.primaryKey())
511
+ .addColumn('username', 'varchar', (col) => col.notNull().unique())
512
+ .execute();
513
+ },
514
+ down: async (db: Kysely<any>) => {
515
+ await db.schema.dropTable('users').execute();
516
+ },
517
+ },
518
+ '2024_01_02_000001_create_sessions': {
519
+ up: async (db: Kysely<any>) => {
520
+ await db.schema
521
+ .createTable('sessions')
522
+ .addColumn('id', 'serial', (col) => col.primaryKey())
523
+ .addColumn('user_id', 'integer', (col) =>
524
+ col.notNull().references('users.id'),
525
+ )
526
+ .addColumn('token', 'varchar', (col) => col.notNull())
527
+ .execute();
528
+ },
529
+ down: async (db: Kysely<any>) => {
530
+ await db.schema.dropTable('sessions').execute();
531
+ },
532
+ },
533
+ };
534
+
535
+ const provider: MigrationProvider = {
536
+ async getMigrations() {
537
+ return migrations;
538
+ },
539
+ };
540
+
541
+ const migrator = new PostgresKyselyMigrator({
542
+ uri,
543
+ db,
544
+ provider,
545
+ });
546
+
547
+ const cleanup = await migrator.start();
548
+
549
+ expect(consoleSpy).toHaveBeenCalledWith(
550
+ 'Applied 2 migrations successfully',
551
+ );
552
+
553
+ // Verify both tables exist
554
+ const verifyClient = new Client({
555
+ host: 'localhost',
556
+ port: 5432,
557
+ user: 'geekmidas',
558
+ password: 'geekmidas',
559
+ database: fileProviderDbName,
560
+ });
561
+
562
+ try {
563
+ await verifyClient.connect();
564
+ const tablesResult = await verifyClient.query(`
565
+ SELECT table_name FROM information_schema.tables
566
+ WHERE table_schema = 'public'
567
+ AND table_name IN ('users', 'sessions')
568
+ ORDER BY table_name
569
+ `);
570
+
571
+ expect(tablesResult.rowCount).toBe(2);
572
+ } finally {
573
+ await verifyClient.end();
574
+ }
575
+
576
+ await cleanup();
577
+ });
578
+ });
579
+
580
+ describe('error scenarios', () => {
581
+ it('should handle provider errors', async () => {
582
+ const providerErrorDbName = `test_provider_error_${Date.now()}`;
583
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${providerErrorDbName}`;
584
+
585
+ const db = new Kysely<TestSchema>({
586
+ dialect: new PostgresDialect({
587
+ pool: new Pool({
588
+ host: 'localhost',
589
+ port: 5432,
590
+ user: 'geekmidas',
591
+ password: 'geekmidas',
592
+ database: providerErrorDbName,
593
+ }),
594
+ }),
595
+ });
596
+
597
+ const provider = new TestMigrationProvider();
598
+ provider.setError(true);
599
+
600
+ const migrator = new PostgresKyselyMigrator({
601
+ uri,
602
+ db,
603
+ provider,
604
+ });
605
+
606
+ await expect(migrator.start()).rejects.toThrow(
607
+ 'Failed to load migrations',
608
+ );
609
+
610
+ // Ensure db is closed
611
+ await db.destroy();
612
+
613
+ // Cleanup
614
+ const cleanupClient = new Client({
615
+ host: 'localhost',
616
+ port: 5432,
617
+ user: 'geekmidas',
618
+ password: 'geekmidas',
619
+ database: 'postgres',
620
+ });
621
+ try {
622
+ await cleanupClient.connect();
623
+ await cleanupClient.query(
624
+ `DROP DATABASE IF EXISTS "${providerErrorDbName}"`,
625
+ );
626
+ } finally {
627
+ await cleanupClient.end();
628
+ }
629
+ }, 10000);
630
+
631
+ it('should handle invalid SQL in migrations', async () => {
632
+ const invalidSqlDbName = `test_invalid_sql_${Date.now()}`;
633
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${invalidSqlDbName}`;
634
+
635
+ const db = new Kysely<TestSchema>({
636
+ dialect: new PostgresDialect({
637
+ pool: new Pool({
638
+ host: 'localhost',
639
+ port: 5432,
640
+ user: 'geekmidas',
641
+ password: 'geekmidas',
642
+ database: invalidSqlDbName,
643
+ }),
644
+ }),
645
+ });
646
+
647
+ const provider = new TestMigrationProvider();
648
+ provider.addMigration('001_invalid_sql', {
649
+ up: async (db) => {
650
+ // Try to reference non-existent table
651
+ await db.schema
652
+ .createTable('posts')
653
+ .addColumn('id', 'serial', (col) => col.primaryKey())
654
+ .addColumn('user_id', 'integer', (col) =>
655
+ col.notNull().references('non_existent_table.id'),
656
+ )
657
+ .execute();
658
+ },
659
+ });
660
+
661
+ const migrator = new PostgresKyselyMigrator({
662
+ uri,
663
+ db,
664
+ provider,
665
+ });
666
+
667
+ await expect(migrator.start()).rejects.toThrow();
668
+
669
+ // Ensure db is closed
670
+ await db.destroy();
671
+
672
+ // Cleanup
673
+ const cleanupClient = new Client({
674
+ host: 'localhost',
675
+ port: 5432,
676
+ user: 'geekmidas',
677
+ password: 'geekmidas',
678
+ database: 'postgres',
679
+ });
680
+ try {
681
+ await cleanupClient.connect();
682
+ await cleanupClient.query(
683
+ `DROP DATABASE IF EXISTS "${invalidSqlDbName}"`,
684
+ );
685
+ } finally {
686
+ await cleanupClient.end();
687
+ }
688
+ }, 10000);
689
+ });
690
+ });