@geekmidas/testkit 0.4.0 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (209) hide show
  1. package/CHANGELOG.md +13 -0
  2. package/dist/Factory-BFVnMMCC.mjs.map +1 -1
  3. package/dist/{Factory-c16c27Y6.d.cts → Factory-BOX312yd.d.cts} +3 -2
  4. package/dist/Factory-BOX312yd.d.cts.map +1 -0
  5. package/dist/Factory-BhjUOBWN.cjs.map +1 -1
  6. package/dist/{Factory-BcGJjLc8.d.mts → Factory-SFupxRC2.d.mts} +2 -1
  7. package/dist/Factory-SFupxRC2.d.mts.map +1 -0
  8. package/dist/Factory.d.cts +2 -2
  9. package/dist/Factory.d.mts +1 -1
  10. package/dist/KyselyFactory-BFqVIn_0.cjs.map +1 -1
  11. package/dist/KyselyFactory-DMswpwji.mjs.map +1 -1
  12. package/dist/{KyselyFactory-uZ45h7YU.d.cts → KyselyFactory-Dy5zzV4B.d.cts} +4 -3
  13. package/dist/KyselyFactory-Dy5zzV4B.d.cts.map +1 -0
  14. package/dist/{KyselyFactory-Cj-EultY.d.mts → KyselyFactory-vAxYodck.d.mts} +3 -2
  15. package/dist/KyselyFactory-vAxYodck.d.mts.map +1 -0
  16. package/dist/KyselyFactory.d.cts +3 -3
  17. package/dist/KyselyFactory.d.mts +2 -2
  18. package/dist/{ObjectionFactory-DL4qkuF1.d.mts → ObjectionFactory-BWjB49-i.d.mts} +3 -2
  19. package/dist/ObjectionFactory-BWjB49-i.d.mts.map +1 -0
  20. package/dist/ObjectionFactory-BeFBYcan.cjs.map +1 -1
  21. package/dist/{ObjectionFactory-CdhzKs4f.d.cts → ObjectionFactory-CD-WFuMJ.d.cts} +4 -3
  22. package/dist/ObjectionFactory-CD-WFuMJ.d.cts.map +1 -0
  23. package/dist/ObjectionFactory-QCJ7u0Ql.mjs.map +1 -1
  24. package/dist/ObjectionFactory.d.cts +3 -3
  25. package/dist/ObjectionFactory.d.mts +2 -2
  26. package/dist/{PostgresKyselyMigrator-upT-hmrz.mjs → PostgresKyselyMigrator-6sE1KOni.mjs} +2 -2
  27. package/dist/PostgresKyselyMigrator-6sE1KOni.mjs.map +1 -0
  28. package/dist/{PostgresKyselyMigrator-CIx3AFSR.d.mts → PostgresKyselyMigrator-CBltSOq5.d.cts} +3 -2
  29. package/dist/PostgresKyselyMigrator-CBltSOq5.d.cts.map +1 -0
  30. package/dist/{PostgresKyselyMigrator-CfytARcA.cjs → PostgresKyselyMigrator-D6IbPq8t.cjs} +2 -2
  31. package/dist/PostgresKyselyMigrator-D6IbPq8t.cjs.map +1 -0
  32. package/dist/{PostgresKyselyMigrator-CQ3aUoy_.d.cts → PostgresKyselyMigrator-DrVWncqd.d.mts} +3 -2
  33. package/dist/PostgresKyselyMigrator-DrVWncqd.d.mts.map +1 -0
  34. package/dist/PostgresKyselyMigrator.cjs +2 -2
  35. package/dist/PostgresKyselyMigrator.d.cts +2 -2
  36. package/dist/PostgresKyselyMigrator.d.mts +2 -2
  37. package/dist/PostgresKyselyMigrator.mjs +2 -2
  38. package/dist/{PostgresMigrator-DbuJGAVy.mjs → PostgresMigrator-BjjenqSd.mjs} +2 -2
  39. package/dist/PostgresMigrator-BjjenqSd.mjs.map +1 -0
  40. package/dist/{PostgresMigrator-D5UkK1_K.d.cts → PostgresMigrator-Bres0U6E.d.cts} +2 -1
  41. package/dist/PostgresMigrator-Bres0U6E.d.cts.map +1 -0
  42. package/dist/{PostgresMigrator-DFcNdCvD.cjs → PostgresMigrator-D6dQn0x2.cjs} +2 -2
  43. package/dist/PostgresMigrator-D6dQn0x2.cjs.map +1 -0
  44. package/dist/{PostgresMigrator-DQaRxoaY.d.mts → PostgresMigrator-S-YYosAC.d.mts} +2 -1
  45. package/dist/PostgresMigrator-S-YYosAC.d.mts.map +1 -0
  46. package/dist/PostgresMigrator.cjs +1 -1
  47. package/dist/PostgresMigrator.d.cts +1 -1
  48. package/dist/PostgresMigrator.d.mts +1 -1
  49. package/dist/PostgresMigrator.mjs +1 -1
  50. package/dist/{PostgresObjectionMigrator-CZHHcCOv.d.cts → PostgresObjectionMigrator-CPfBAP7r.d.cts} +3 -2
  51. package/dist/PostgresObjectionMigrator-CPfBAP7r.d.cts.map +1 -0
  52. package/dist/{PostgresObjectionMigrator-BG6ymgnt.cjs → PostgresObjectionMigrator-DK8ODIHQ.cjs} +2 -2
  53. package/dist/PostgresObjectionMigrator-DK8ODIHQ.cjs.map +1 -0
  54. package/dist/{PostgresObjectionMigrator-D_hCcrQu.d.mts → PostgresObjectionMigrator-DVEqB5tp.d.mts} +3 -2
  55. package/dist/PostgresObjectionMigrator-DVEqB5tp.d.mts.map +1 -0
  56. package/dist/{PostgresObjectionMigrator-DPj2pOpX.mjs → PostgresObjectionMigrator-D_QxXbIN.mjs} +2 -2
  57. package/dist/PostgresObjectionMigrator-D_QxXbIN.mjs.map +1 -0
  58. package/dist/PostgresObjectionMigrator.cjs +2 -2
  59. package/dist/PostgresObjectionMigrator.d.cts +2 -2
  60. package/dist/PostgresObjectionMigrator.d.mts +2 -2
  61. package/dist/PostgresObjectionMigrator.mjs +2 -2
  62. package/dist/{VitestKyselyTransactionIsolator-D3EZZhjZ.d.cts → VitestKyselyTransactionIsolator-CduJlHoT.d.cts} +4 -3
  63. package/dist/VitestKyselyTransactionIsolator-CduJlHoT.d.cts.map +1 -0
  64. package/dist/{VitestKyselyTransactionIsolator-Dxlp1u0f.d.mts → VitestKyselyTransactionIsolator-Cswnnj0k.d.mts} +4 -3
  65. package/dist/VitestKyselyTransactionIsolator-Cswnnj0k.d.mts.map +1 -0
  66. package/dist/{VitestKyselyTransactionIsolator-EvDLk5zg.cjs → VitestKyselyTransactionIsolator-D7RRXOBa.cjs} +2 -2
  67. package/dist/VitestKyselyTransactionIsolator-D7RRXOBa.cjs.map +1 -0
  68. package/dist/{VitestKyselyTransactionIsolator-CNURW8y6.mjs → VitestKyselyTransactionIsolator-DceyIqr4.mjs} +2 -2
  69. package/dist/VitestKyselyTransactionIsolator-DceyIqr4.mjs.map +1 -0
  70. package/dist/VitestKyselyTransactionIsolator.cjs +1 -1
  71. package/dist/VitestKyselyTransactionIsolator.d.cts +2 -2
  72. package/dist/VitestKyselyTransactionIsolator.d.mts +2 -2
  73. package/dist/VitestKyselyTransactionIsolator.mjs +1 -1
  74. package/dist/{VitestObjectionTransactionIsolator-1TpsPqfG.d.cts → VitestObjectionTransactionIsolator-BXoR6xdG.d.cts} +4 -3
  75. package/dist/VitestObjectionTransactionIsolator-BXoR6xdG.d.cts.map +1 -0
  76. package/dist/{VitestObjectionTransactionIsolator-CM5KTAFA.cjs → VitestObjectionTransactionIsolator-CdLRrzNf.cjs} +2 -2
  77. package/dist/VitestObjectionTransactionIsolator-CdLRrzNf.cjs.map +1 -0
  78. package/dist/{VitestObjectionTransactionIsolator-jQFaCz0u.mjs → VitestObjectionTransactionIsolator-OF2osYY5.mjs} +2 -2
  79. package/dist/VitestObjectionTransactionIsolator-OF2osYY5.mjs.map +1 -0
  80. package/dist/{VitestObjectionTransactionIsolator-i9jIgU8Q.d.mts → VitestObjectionTransactionIsolator-x6hY5j4u.d.mts} +4 -3
  81. package/dist/VitestObjectionTransactionIsolator-x6hY5j4u.d.mts.map +1 -0
  82. package/dist/VitestObjectionTransactionIsolator.cjs +1 -1
  83. package/dist/VitestObjectionTransactionIsolator.d.cts +2 -2
  84. package/dist/VitestObjectionTransactionIsolator.d.mts +2 -2
  85. package/dist/VitestObjectionTransactionIsolator.mjs +1 -1
  86. package/dist/{VitestTransactionIsolator-BvR19bYn.d.mts → VitestTransactionIsolator-BNWJqh9f.d.mts} +3 -2
  87. package/dist/VitestTransactionIsolator-BNWJqh9f.d.mts.map +1 -0
  88. package/dist/VitestTransactionIsolator-CMfJXZP8.cjs.map +1 -1
  89. package/dist/{VitestTransactionIsolator-CwQaxZLP.d.cts → VitestTransactionIsolator-CSroc7Df.d.cts} +3 -2
  90. package/dist/VitestTransactionIsolator-CSroc7Df.d.cts.map +1 -0
  91. package/dist/VitestTransactionIsolator-DQ7tLqgV.mjs.map +1 -1
  92. package/dist/VitestTransactionIsolator.d.cts +1 -1
  93. package/dist/VitestTransactionIsolator.d.mts +1 -1
  94. package/dist/aws.cjs.map +1 -1
  95. package/dist/aws.d.cts +2 -0
  96. package/dist/aws.d.cts.map +1 -0
  97. package/dist/aws.d.mts +2 -0
  98. package/dist/aws.d.mts.map +1 -0
  99. package/dist/aws.mjs.map +1 -1
  100. package/dist/benchmark.cjs.map +1 -1
  101. package/dist/benchmark.d.cts +1 -0
  102. package/dist/benchmark.d.cts.map +1 -0
  103. package/dist/benchmark.d.mts +1 -0
  104. package/dist/benchmark.d.mts.map +1 -0
  105. package/dist/benchmark.mjs.map +1 -1
  106. package/dist/better-auth.cjs +29 -30
  107. package/dist/better-auth.cjs.map +1 -1
  108. package/dist/better-auth.d.cts +2 -2
  109. package/dist/better-auth.d.cts.map +1 -0
  110. package/dist/better-auth.d.mts.map +1 -0
  111. package/dist/better-auth.mjs +29 -30
  112. package/dist/better-auth.mjs.map +1 -1
  113. package/dist/directory-B-Ozljzk.mjs.map +1 -1
  114. package/dist/directory-BVC8g7cX.cjs.map +1 -1
  115. package/dist/{directory-BXavAeJZ.d.mts → directory-CVrfTq1I.d.mts} +2 -1
  116. package/dist/directory-CVrfTq1I.d.mts.map +1 -0
  117. package/dist/{directory-Mi7tdOuD.d.cts → directory-DAnMWi50.d.cts} +2 -1
  118. package/dist/directory-DAnMWi50.d.cts.map +1 -0
  119. package/dist/faker-B14IEMIN.cjs.map +1 -1
  120. package/dist/faker-BGKYFoCT.mjs.map +1 -1
  121. package/dist/{faker-DvxiCtxc.d.cts → faker-Cg76aFNO.d.cts} +3 -3
  122. package/dist/faker-Cg76aFNO.d.cts.map +1 -0
  123. package/dist/faker-DHh7xs4u.d.mts.map +1 -0
  124. package/dist/faker.d.cts +1 -1
  125. package/dist/helpers.cjs.map +1 -1
  126. package/dist/helpers.d.cts +1 -0
  127. package/dist/helpers.d.cts.map +1 -0
  128. package/dist/helpers.d.mts +1 -0
  129. package/dist/helpers.d.mts.map +1 -0
  130. package/dist/helpers.mjs.map +1 -1
  131. package/dist/kysely.cjs +3 -3
  132. package/dist/kysely.cjs.map +1 -1
  133. package/dist/kysely.d.cts +8 -7
  134. package/dist/kysely.d.cts.map +1 -0
  135. package/dist/kysely.d.mts +7 -6
  136. package/dist/kysely.d.mts.map +1 -0
  137. package/dist/kysely.mjs +3 -3
  138. package/dist/kysely.mjs.map +1 -1
  139. package/dist/logger.cjs.map +1 -1
  140. package/dist/logger.d.cts +1 -0
  141. package/dist/logger.d.cts.map +1 -0
  142. package/dist/logger.d.mts +1 -0
  143. package/dist/logger.d.mts.map +1 -0
  144. package/dist/logger.mjs.map +1 -1
  145. package/dist/objection.cjs +3 -3
  146. package/dist/objection.cjs.map +1 -1
  147. package/dist/objection.d.cts +8 -7
  148. package/dist/objection.d.cts.map +1 -0
  149. package/dist/objection.d.mts +7 -6
  150. package/dist/objection.d.mts.map +1 -0
  151. package/dist/objection.mjs +3 -3
  152. package/dist/objection.mjs.map +1 -1
  153. package/dist/os/directory.d.cts +1 -1
  154. package/dist/os/directory.d.mts +1 -1
  155. package/dist/os/index.d.cts +1 -1
  156. package/dist/os/index.d.mts +1 -1
  157. package/dist/timer.cjs.map +1 -1
  158. package/dist/timer.d.cts +2 -0
  159. package/dist/timer.d.cts.map +1 -0
  160. package/dist/timer.d.mts +2 -0
  161. package/dist/timer.d.mts.map +1 -0
  162. package/dist/timer.mjs.map +1 -1
  163. package/package.json +5 -5
  164. package/src/Factory.ts +72 -72
  165. package/src/KyselyFactory.ts +330 -330
  166. package/src/ObjectionFactory.ts +354 -355
  167. package/src/PostgresKyselyMigrator.ts +37 -37
  168. package/src/PostgresMigrator.ts +107 -107
  169. package/src/PostgresObjectionMigrator.ts +91 -91
  170. package/src/VitestKyselyTransactionIsolator.ts +27 -27
  171. package/src/VitestObjectionTransactionIsolator.ts +39 -39
  172. package/src/VitestTransactionIsolator.ts +196 -195
  173. package/src/__tests__/Factory.spec.ts +163 -155
  174. package/src/__tests__/KyselyFactory.spec.ts +443 -439
  175. package/src/__tests__/ObjectionFactory.spec.ts +563 -557
  176. package/src/__tests__/PostgresKyselyMigrator.spec.ts +641 -641
  177. package/src/__tests__/PostgresMigrator.spec.ts +341 -341
  178. package/src/__tests__/PostgresObjectionMigrator.spec.ts +578 -578
  179. package/src/__tests__/VitestObjectionTransactionIsolator.spec.ts +114 -114
  180. package/src/__tests__/benchmark.spec.ts +140 -0
  181. package/src/__tests__/better-auth.spec.ts +15 -15
  182. package/src/__tests__/faker.spec.ts +226 -137
  183. package/src/__tests__/integration.spec.ts +597 -597
  184. package/src/__tests__/utilities.spec.ts +211 -0
  185. package/src/aws.ts +104 -104
  186. package/src/benchmark.ts +12 -12
  187. package/src/better-auth.ts +286 -301
  188. package/src/faker.ts +153 -153
  189. package/src/helpers.ts +6 -6
  190. package/src/kysely.ts +33 -33
  191. package/src/logger.ts +10 -10
  192. package/src/objection.ts +31 -31
  193. package/src/os/directory.ts +11 -10
  194. package/src/timer.ts +1 -1
  195. package/test/globalSetup.ts +45 -45
  196. package/test/helpers.ts +189 -189
  197. package/test/migrations/1749664623372_user.ts +13 -13
  198. package/tsconfig.json +9 -0
  199. package/vitest.config.ts +4 -4
  200. package/dist/PostgresKyselyMigrator-CfytARcA.cjs.map +0 -1
  201. package/dist/PostgresKyselyMigrator-upT-hmrz.mjs.map +0 -1
  202. package/dist/PostgresMigrator-DFcNdCvD.cjs.map +0 -1
  203. package/dist/PostgresMigrator-DbuJGAVy.mjs.map +0 -1
  204. package/dist/PostgresObjectionMigrator-BG6ymgnt.cjs.map +0 -1
  205. package/dist/PostgresObjectionMigrator-DPj2pOpX.mjs.map +0 -1
  206. package/dist/VitestKyselyTransactionIsolator-CNURW8y6.mjs.map +0 -1
  207. package/dist/VitestKyselyTransactionIsolator-EvDLk5zg.cjs.map +0 -1
  208. package/dist/VitestObjectionTransactionIsolator-CM5KTAFA.cjs.map +0 -1
  209. package/dist/VitestObjectionTransactionIsolator-jQFaCz0u.mjs.map +0 -1
@@ -3,52 +3,52 @@ import { Client } from 'pg';
3
3
  const TEST_DATABASE_NAME = 'geekmidas_test';
4
4
 
5
5
  export const TEST_DATABASE_CONFIG = {
6
- host: 'localhost',
7
- port: 5432,
8
- user: 'geekmidas',
9
- password: 'geekmidas',
10
- database: TEST_DATABASE_NAME,
6
+ host: 'localhost',
7
+ port: 5432,
8
+ user: 'geekmidas',
9
+ password: 'geekmidas',
10
+ database: TEST_DATABASE_NAME,
11
11
  };
12
12
 
13
13
  export default async function globalSetup() {
14
- const adminConfig = {
15
- host: TEST_DATABASE_CONFIG.host,
16
- port: TEST_DATABASE_CONFIG.port,
17
- user: TEST_DATABASE_CONFIG.user,
18
- password: TEST_DATABASE_CONFIG.password,
19
- database: 'postgres', // Connect to default postgres database
20
- };
21
-
22
- const client = new Client(adminConfig);
23
-
24
- try {
25
- await client.connect();
26
-
27
- // Check if test database exists
28
- const result = await client.query(
29
- `SELECT * FROM pg_catalog.pg_database WHERE datname = $1`,
30
- [TEST_DATABASE_NAME],
31
- );
32
-
33
- // Create test database if it doesn't exist
34
- if (result.rowCount === 0) {
35
- await client.query(`CREATE DATABASE "${TEST_DATABASE_NAME}"`);
36
- } else {
37
- }
38
- } finally {
39
- await client.end();
40
- }
41
-
42
- // Return cleanup function that drops the database
43
- return async () => {
44
- const cleanupClient = new Client(adminConfig);
45
- try {
46
- await cleanupClient.connect();
47
- await cleanupClient.query(
48
- `DROP DATABASE IF EXISTS "${TEST_DATABASE_NAME}"`,
49
- );
50
- } finally {
51
- await cleanupClient.end();
52
- }
53
- };
14
+ const adminConfig = {
15
+ host: TEST_DATABASE_CONFIG.host,
16
+ port: TEST_DATABASE_CONFIG.port,
17
+ user: TEST_DATABASE_CONFIG.user,
18
+ password: TEST_DATABASE_CONFIG.password,
19
+ database: 'postgres', // Connect to default postgres database
20
+ };
21
+
22
+ const client = new Client(adminConfig);
23
+
24
+ try {
25
+ await client.connect();
26
+
27
+ // Check if test database exists
28
+ const result = await client.query(
29
+ `SELECT * FROM pg_catalog.pg_database WHERE datname = $1`,
30
+ [TEST_DATABASE_NAME],
31
+ );
32
+
33
+ // Create test database if it doesn't exist
34
+ if (result.rowCount === 0) {
35
+ await client.query(`CREATE DATABASE "${TEST_DATABASE_NAME}"`);
36
+ } else {
37
+ }
38
+ } finally {
39
+ await client.end();
40
+ }
41
+
42
+ // Return cleanup function that drops the database
43
+ return async () => {
44
+ const cleanupClient = new Client(adminConfig);
45
+ try {
46
+ await cleanupClient.connect();
47
+ await cleanupClient.query(
48
+ `DROP DATABASE IF EXISTS "${TEST_DATABASE_NAME}"`,
49
+ );
50
+ } finally {
51
+ await cleanupClient.end();
52
+ }
53
+ };
54
54
  }
package/test/helpers.ts CHANGED
@@ -1,267 +1,267 @@
1
1
  import type { Knex } from 'knex';
2
2
  import knex from 'knex';
3
3
  import {
4
- CamelCasePlugin,
5
- type ControlledTransaction,
6
- Kysely,
7
- type Migrator,
8
- PostgresDialect,
9
- sql,
4
+ CamelCasePlugin,
5
+ type ControlledTransaction,
6
+ Kysely,
7
+ type Migrator,
8
+ PostgresDialect,
9
+ sql,
10
10
  } from 'kysely';
11
11
  import pg from 'pg';
12
12
  import { TEST_DATABASE_CONFIG } from './globalSetup';
13
13
 
14
14
  export interface TestDatabase {
15
- users: {
16
- id: number;
17
- name: string;
18
- email: string;
19
- role?: 'admin' | 'user';
20
- createdAt: Date;
21
- updatedAt?: Date;
22
- };
23
- posts: {
24
- id: number;
25
- title: string;
26
- content: string;
27
- userId: number;
28
- published?: boolean;
29
- createdAt: Date;
30
- updatedAt?: Date;
31
- };
32
- comments: {
33
- id: number;
34
- content: string;
35
- postId: number;
36
- userId: number;
37
- createdAt: Date;
38
- };
15
+ users: {
16
+ id: number;
17
+ name: string;
18
+ email: string;
19
+ role?: 'admin' | 'user';
20
+ createdAt: Date;
21
+ updatedAt?: Date;
22
+ };
23
+ posts: {
24
+ id: number;
25
+ title: string;
26
+ content: string;
27
+ userId: number;
28
+ published?: boolean;
29
+ createdAt: Date;
30
+ updatedAt?: Date;
31
+ };
32
+ comments: {
33
+ id: number;
34
+ content: string;
35
+ postId: number;
36
+ userId: number;
37
+ createdAt: Date;
38
+ };
39
39
  }
40
40
 
41
41
  /**
42
42
  * Creates a Kysely database instance for testing
43
43
  */
44
44
  export function createKyselyDb(): Kysely<TestDatabase> {
45
- return new Kysely({
46
- dialect: new PostgresDialect({
47
- pool: new pg.Pool(TEST_DATABASE_CONFIG),
48
- }),
49
- plugins: [new CamelCasePlugin()],
50
- });
45
+ return new Kysely({
46
+ dialect: new PostgresDialect({
47
+ pool: new pg.Pool(TEST_DATABASE_CONFIG),
48
+ }),
49
+ plugins: [new CamelCasePlugin()],
50
+ });
51
51
  }
52
52
 
53
53
  /**
54
54
  * Creates a Knex database instance for testing
55
55
  */
56
56
  export function createKnexDb(): Knex {
57
- return knex({
58
- client: 'pg',
59
- connection: TEST_DATABASE_CONFIG,
60
- });
57
+ return knex({
58
+ client: 'pg',
59
+ connection: TEST_DATABASE_CONFIG,
60
+ });
61
61
  }
62
62
 
63
63
  /**
64
64
  * Test setup helper that creates tables within a transaction and returns cleanup
65
65
  */
66
66
  export async function setupKyselyTest(db: Kysely<TestDatabase>): Promise<{
67
- db: Kysely<TestDatabase>;
68
- trx: ControlledTransaction<TestDatabase, []>;
69
- cleanup: () => Promise<void>;
67
+ db: Kysely<TestDatabase>;
68
+ trx: ControlledTransaction<TestDatabase, []>;
69
+ cleanup: () => Promise<void>;
70
70
  }> {
71
- const trx = await db.startTransaction().execute();
71
+ const trx = await db.startTransaction().execute();
72
72
 
73
- // Create tables within the transaction
74
- await createTestTables(db);
73
+ // Create tables within the transaction
74
+ await createTestTables(db);
75
75
 
76
- const cleanup = async () => {
77
- await trx.rollback().execute();
78
- await db.destroy();
79
- };
76
+ const cleanup = async () => {
77
+ await trx.rollback().execute();
78
+ await db.destroy();
79
+ };
80
80
 
81
- return { db, trx, cleanup };
81
+ return { db, trx, cleanup };
82
82
  }
83
83
 
84
84
  /**
85
85
  * Test setup helper for Knex/Objection that creates tables within a transaction
86
86
  */
87
87
  export async function setupKnexTest(): Promise<{
88
- db: Knex;
89
- trx: Knex.Transaction;
90
- cleanup: () => Promise<void>;
88
+ db: Knex;
89
+ trx: Knex.Transaction;
90
+ cleanup: () => Promise<void>;
91
91
  }> {
92
- const db = createKnexDb();
93
- const trx = await db.transaction();
92
+ const db = createKnexDb();
93
+ const trx = await db.transaction();
94
94
 
95
- // Create tables within the transaction
96
- await createTestTablesKnex(trx);
95
+ // Create tables within the transaction
96
+ await createTestTablesKnex(trx);
97
97
 
98
- const cleanup = async () => {
99
- await trx.rollback();
100
- await db.destroy();
101
- };
98
+ const cleanup = async () => {
99
+ await trx.rollback();
100
+ await db.destroy();
101
+ };
102
102
 
103
- return { db, trx, cleanup };
103
+ return { db, trx, cleanup };
104
104
  }
105
105
 
106
106
  /**
107
107
  * Creates test tables using Kysely
108
108
  */
109
109
  export async function createTestTables(
110
- db: Kysely<TestDatabase> | ControlledTransaction<TestDatabase, []>,
110
+ db: Kysely<TestDatabase> | ControlledTransaction<TestDatabase, []>,
111
111
  ): Promise<void> {
112
- // Create users table
113
- await db.schema
114
- .createTable('users')
115
- .addColumn('id', 'bigserial', (col) => col.primaryKey())
116
- .addColumn('name', 'varchar', (col) => col.notNull())
117
- .addColumn('email', 'varchar', (col) => col.notNull().unique())
118
- .addColumn('role', 'varchar', (col) => col.defaultTo('user'))
119
- .addColumn('created_at', 'timestamp', (col) =>
120
- col.defaultTo(sql`now()`).notNull(),
121
- )
122
- .addColumn('updated_at', 'timestamp', (col) =>
123
- col.defaultTo(sql`now()`).notNull(),
124
- )
125
- .execute();
112
+ // Create users table
113
+ await db.schema
114
+ .createTable('users')
115
+ .addColumn('id', 'bigserial', (col) => col.primaryKey())
116
+ .addColumn('name', 'varchar', (col) => col.notNull())
117
+ .addColumn('email', 'varchar', (col) => col.notNull().unique())
118
+ .addColumn('role', 'varchar', (col) => col.defaultTo('user'))
119
+ .addColumn('created_at', 'timestamp', (col) =>
120
+ col.defaultTo(sql`now()`).notNull(),
121
+ )
122
+ .addColumn('updated_at', 'timestamp', (col) =>
123
+ col.defaultTo(sql`now()`).notNull(),
124
+ )
125
+ .execute();
126
126
 
127
- // Create posts table
128
- await db.schema
129
- .createTable('posts')
130
- .addColumn('id', 'bigserial', (col) => col.primaryKey())
131
- .addColumn('title', 'varchar', (col) => col.notNull())
132
- .addColumn('content', 'text', (col) => col.notNull())
133
- .addColumn('user_id', 'bigint', (col) =>
134
- col.notNull().references('users.id').onDelete('cascade'),
135
- )
136
- .addColumn('published', 'boolean', (col) => col.defaultTo(false))
137
- .addColumn('created_at', 'timestamp', (col) =>
138
- col.defaultTo(sql`now()`).notNull(),
139
- )
140
- .addColumn('updated_at', 'timestamp', (col) =>
141
- col.defaultTo(sql`now()`).notNull(),
142
- )
143
- .execute();
127
+ // Create posts table
128
+ await db.schema
129
+ .createTable('posts')
130
+ .addColumn('id', 'bigserial', (col) => col.primaryKey())
131
+ .addColumn('title', 'varchar', (col) => col.notNull())
132
+ .addColumn('content', 'text', (col) => col.notNull())
133
+ .addColumn('user_id', 'bigint', (col) =>
134
+ col.notNull().references('users.id').onDelete('cascade'),
135
+ )
136
+ .addColumn('published', 'boolean', (col) => col.defaultTo(false))
137
+ .addColumn('created_at', 'timestamp', (col) =>
138
+ col.defaultTo(sql`now()`).notNull(),
139
+ )
140
+ .addColumn('updated_at', 'timestamp', (col) =>
141
+ col.defaultTo(sql`now()`).notNull(),
142
+ )
143
+ .execute();
144
144
 
145
- // Create comments table
146
- await db.schema
147
- .createTable('comments')
148
- .addColumn('id', 'bigserial', (col) => col.primaryKey())
149
- .addColumn('content', 'text', (col) => col.notNull())
150
- .addColumn('post_id', 'bigint', (col) =>
151
- col.notNull().references('posts.id').onDelete('cascade'),
152
- )
153
- .addColumn('user_id', 'bigint', (col) =>
154
- col.notNull().references('users.id').onDelete('cascade'),
155
- )
156
- .addColumn('created_at', 'timestamp', (col) =>
157
- col.defaultTo(sql`now()`).notNull(),
158
- )
159
- .execute();
145
+ // Create comments table
146
+ await db.schema
147
+ .createTable('comments')
148
+ .addColumn('id', 'bigserial', (col) => col.primaryKey())
149
+ .addColumn('content', 'text', (col) => col.notNull())
150
+ .addColumn('post_id', 'bigint', (col) =>
151
+ col.notNull().references('posts.id').onDelete('cascade'),
152
+ )
153
+ .addColumn('user_id', 'bigint', (col) =>
154
+ col.notNull().references('users.id').onDelete('cascade'),
155
+ )
156
+ .addColumn('created_at', 'timestamp', (col) =>
157
+ col.defaultTo(sql`now()`).notNull(),
158
+ )
159
+ .execute();
160
160
  }
161
161
 
162
162
  /**
163
163
  * Creates test tables using Knex
164
164
  */
165
165
  export async function createTestTablesKnex(
166
- trx: Knex.Transaction,
166
+ trx: Knex.Transaction,
167
167
  ): Promise<void> {
168
- // Create simple users table for testing factory
169
- await trx.raw(`CREATE EXTENSION IF NOT EXISTS "uuid-ossp"`);
170
- await trx.schema.createTableIfNotExists('users', (table) => {
171
- table.uuid('id').primary().defaultTo(trx.raw('uuid_generate_v4()'));
172
- table.string('name').notNullable();
173
- });
168
+ // Create simple users table for testing factory
169
+ await trx.raw(`CREATE EXTENSION IF NOT EXISTS "uuid-ossp"`);
170
+ await trx.schema.createTableIfNotExists('users', (table) => {
171
+ table.uuid('id').primary().defaultTo(trx.raw('uuid_generate_v4()'));
172
+ table.string('name').notNullable();
173
+ });
174
174
 
175
- // Create simple posts table for testing factory
176
- await trx.schema.createTableIfNotExists('posts', (table) => {
177
- table.uuid('id').primary().defaultTo(trx.raw('uuid_generate_v4()'));
178
- table.string('title').notNullable();
179
- table
180
- .uuid('user_id')
181
- .notNullable()
182
- .references('id')
183
- .inTable('users')
184
- .onDelete('cascade');
185
- });
175
+ // Create simple posts table for testing factory
176
+ await trx.schema.createTableIfNotExists('posts', (table) => {
177
+ table.uuid('id').primary().defaultTo(trx.raw('uuid_generate_v4()'));
178
+ table.string('title').notNullable();
179
+ table
180
+ .uuid('user_id')
181
+ .notNullable()
182
+ .references('id')
183
+ .inTable('users')
184
+ .onDelete('cascade');
185
+ });
186
186
 
187
- // Create simple comments table for testing factory
188
- await trx.schema.createTableIfNotExists('comments', (table) => {
189
- table.uuid('id').primary().defaultTo(trx.raw('uuid_generate_v4()'));
190
- table.text('content').notNullable();
191
- table
192
- .uuid('post_id')
193
- .notNullable()
194
- .references('id')
195
- .inTable('posts')
196
- .onDelete('cascade');
197
- table
198
- .uuid('user_id')
199
- .notNullable()
200
- .references('id')
201
- .inTable('users')
202
- .onDelete('cascade');
203
- });
187
+ // Create simple comments table for testing factory
188
+ await trx.schema.createTableIfNotExists('comments', (table) => {
189
+ table.uuid('id').primary().defaultTo(trx.raw('uuid_generate_v4()'));
190
+ table.text('content').notNullable();
191
+ table
192
+ .uuid('post_id')
193
+ .notNullable()
194
+ .references('id')
195
+ .inTable('posts')
196
+ .onDelete('cascade');
197
+ table
198
+ .uuid('user_id')
199
+ .notNullable()
200
+ .references('id')
201
+ .inTable('users')
202
+ .onDelete('cascade');
203
+ });
204
204
  }
205
205
 
206
206
  /**
207
207
  * Helper for PostgresMigrator tests - creates a separate test database
208
208
  */
209
209
  export async function createTestDatabase(
210
- dbName: string,
210
+ dbName: string,
211
211
  ): Promise<() => Promise<void>> {
212
- const adminConfig = {
213
- host: TEST_DATABASE_CONFIG.host,
214
- port: TEST_DATABASE_CONFIG.port,
215
- user: TEST_DATABASE_CONFIG.user,
216
- password: TEST_DATABASE_CONFIG.password,
217
- database: 'postgres',
218
- };
212
+ const adminConfig = {
213
+ host: TEST_DATABASE_CONFIG.host,
214
+ port: TEST_DATABASE_CONFIG.port,
215
+ user: TEST_DATABASE_CONFIG.user,
216
+ password: TEST_DATABASE_CONFIG.password,
217
+ database: 'postgres',
218
+ };
219
219
 
220
- const client = new pg.Client(adminConfig);
220
+ const client = new pg.Client(adminConfig);
221
221
 
222
- try {
223
- await client.connect();
222
+ try {
223
+ await client.connect();
224
224
 
225
- // Drop database if it exists, then create it
226
- await client.query(`DROP DATABASE IF EXISTS "${dbName}"`);
227
- await client.query(`CREATE DATABASE "${dbName}"`);
228
- } finally {
229
- await client.end();
230
- }
225
+ // Drop database if it exists, then create it
226
+ await client.query(`DROP DATABASE IF EXISTS "${dbName}"`);
227
+ await client.query(`CREATE DATABASE "${dbName}"`);
228
+ } finally {
229
+ await client.end();
230
+ }
231
231
 
232
- // Return cleanup function
233
- return async () => {
234
- const cleanupClient = new pg.Client(adminConfig);
235
- try {
236
- await cleanupClient.connect();
237
- await cleanupClient.query(`DROP DATABASE IF EXISTS "${dbName}"`);
238
- } finally {
239
- await cleanupClient.end();
240
- }
241
- };
232
+ // Return cleanup function
233
+ return async () => {
234
+ const cleanupClient = new pg.Client(adminConfig);
235
+ try {
236
+ await cleanupClient.connect();
237
+ await cleanupClient.query(`DROP DATABASE IF EXISTS "${dbName}"`);
238
+ } finally {
239
+ await cleanupClient.end();
240
+ }
241
+ };
242
242
  }
243
243
 
244
244
  /**
245
245
  * Creates a Kysely migrator for testing
246
246
  */
247
247
  export function createTestMigrator(
248
- db: Kysely<any>,
249
- migrations: Record<
250
- string,
251
- {
252
- up: (db: Kysely<any>) => Promise<void>;
253
- down: (db: Kysely<any>) => Promise<void>;
254
- }
255
- >,
248
+ db: Kysely<any>,
249
+ migrations: Record<
250
+ string,
251
+ {
252
+ up: (db: Kysely<any>) => Promise<void>;
253
+ down: (db: Kysely<any>) => Promise<void>;
254
+ }
255
+ >,
256
256
  ): Migrator {
257
- const { Migrator } = require('kysely');
257
+ const { Migrator } = require('kysely');
258
258
 
259
- return new Migrator({
260
- db,
261
- provider: {
262
- async getMigrations() {
263
- return migrations;
264
- },
265
- },
266
- });
259
+ return new Migrator({
260
+ db,
261
+ provider: {
262
+ async getMigrations() {
263
+ return migrations;
264
+ },
265
+ },
266
+ });
267
267
  }
@@ -2,21 +2,21 @@ import { type Kysely, sql } from 'kysely';
2
2
 
3
3
  // `any` is required here since migrations should be frozen in time. alternatively, keep a "snapshot" db interface.
4
4
  export async function up(db: Kysely<any>): Promise<void> {
5
- await db.schema
6
- .createTable('users')
7
- .addColumn('id', 'bigserial', (col) => col.primaryKey())
8
- .addColumn('email', 'varchar', (col) => col.notNull().unique())
9
- .addColumn('name', 'varchar', (col) => col.notNull())
10
- .addColumn('created_at', 'timestamp', (col) =>
11
- col.defaultTo(sql`now()`).notNull(),
12
- )
13
- .addColumn('updated_at', 'timestamp', (col) =>
14
- col.defaultTo(sql`now()`).notNull(),
15
- )
16
- .execute();
5
+ await db.schema
6
+ .createTable('users')
7
+ .addColumn('id', 'bigserial', (col) => col.primaryKey())
8
+ .addColumn('email', 'varchar', (col) => col.notNull().unique())
9
+ .addColumn('name', 'varchar', (col) => col.notNull())
10
+ .addColumn('created_at', 'timestamp', (col) =>
11
+ col.defaultTo(sql`now()`).notNull(),
12
+ )
13
+ .addColumn('updated_at', 'timestamp', (col) =>
14
+ col.defaultTo(sql`now()`).notNull(),
15
+ )
16
+ .execute();
17
17
  }
18
18
 
19
19
  // `any` is required here since migrations should be frozen in time. alternatively, keep a "snapshot" db interface.
20
20
  export async function down(db: Kysely<any>): Promise<void> {
21
- await db.schema.dropTable('users').execute();
21
+ await db.schema.dropTable('users').execute();
22
22
  }
package/tsconfig.json ADDED
@@ -0,0 +1,9 @@
1
+ {
2
+ "extends": "../../tsconfig.base.json",
3
+ "compilerOptions": {
4
+ "outDir": "./dist",
5
+ "rootDir": "./src",
6
+ "composite": true
7
+ },
8
+ "include": ["src/**/*"]
9
+ }
package/vitest.config.ts CHANGED
@@ -1,8 +1,8 @@
1
1
  import { defineProject } from 'vitest/config';
2
2
 
3
3
  export default defineProject({
4
- test: {
5
- globalSetup: ['test/globalSetup.ts'],
6
- pool: 'forks',
7
- },
4
+ test: {
5
+ globalSetup: ['test/globalSetup.ts'],
6
+ pool: 'forks',
7
+ },
8
8
  });
@@ -1 +0,0 @@
1
- {"version":3,"file":"PostgresKyselyMigrator-CfytARcA.cjs","names":["PostgresMigrator","options: {\n uri: string;\n db: Kysely<any>;\n provider: MigrationProvider;\n }","Migrator"],"sources":["../src/PostgresKyselyMigrator.ts"],"sourcesContent":["import { type Kysely, type MigrationProvider, Migrator } from 'kysely';\nimport { PostgresMigrator } from './PostgresMigrator';\n\n/**\n * Default logger instance for migration operations.\n */\nconst logger = console;\n\n/**\n * PostgreSQL migrator implementation for Kysely ORM.\n * Extends PostgresMigrator to provide Kysely-specific migration functionality.\n * Automatically creates test databases and applies migrations for testing environments.\n *\n * @example\n * ```typescript\n * import { FileMigrationProvider } from 'kysely';\n * import { PostgresKyselyMigrator } from '@geekmidas/testkit';\n *\n * // Create migration provider\n * const provider = new FileMigrationProvider({\n * fs: require('fs'),\n * path: require('path'),\n * migrationFolder: path.join(__dirname, 'migrations')\n * });\n *\n * // Create Kysely instance\n * const db = new Kysely<Database>({\n * dialect: new PostgresDialect({\n * pool: new Pool({ connectionString: uri })\n * })\n * });\n *\n * // Create and use migrator\n * const migrator = new PostgresKyselyMigrator({\n * uri: 'postgresql://localhost:5432/test_db',\n * db,\n * provider\n * });\n *\n * const cleanup = await migrator.start();\n * // Run tests...\n * await cleanup();\n * ```\n */\nexport class PostgresKyselyMigrator extends PostgresMigrator {\n /**\n * Creates a new PostgresKyselyMigrator instance.\n *\n * @param options - Configuration options\n * @param options.uri - PostgreSQL connection URI\n * @param options.db - Kysely database instance\n * @param options.provider - Migration provider for locating migration files\n */\n constructor(\n private options: {\n uri: string;\n db: Kysely<any>;\n provider: MigrationProvider;\n },\n ) {\n super(options.uri);\n }\n\n /**\n * Executes Kysely migrations to the latest version.\n * Implements the abstract migrate() method from PostgresMigrator.\n *\n * @throws Error if migrations fail to apply\n * @returns Promise that resolves when all migrations are applied\n */\n async migrate(): Promise<void> {\n const migrator = new Migrator({\n db: this.options.db,\n provider: this.options.provider,\n });\n const migrations = await migrator.migrateToLatest();\n\n if (migrations.error) {\n logger.error(migrations.error, `Failed to apply migrations`);\n throw migrations.error;\n }\n\n await this.options.db.destroy();\n\n logger.log(`Applied ${migrations.results?.length} migrations successfully`);\n }\n}\n"],"mappings":";;;;;;;;AAMA,MAAM,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAsCf,IAAa,yBAAb,cAA4CA,0CAAiB;;;;;;;;;CAS3D,YACUC,SAKR;AACA,QAAM,QAAQ,IAAI;EANV;CAOT;;;;;;;;CASD,MAAM,UAAyB;EAC7B,MAAM,WAAW,IAAIC,gBAAS;GAC5B,IAAI,KAAK,QAAQ;GACjB,UAAU,KAAK,QAAQ;EACxB;EACD,MAAM,aAAa,MAAM,SAAS,iBAAiB;AAEnD,MAAI,WAAW,OAAO;AACpB,UAAO,MAAM,WAAW,QAAQ,4BAA4B;AAC5D,SAAM,WAAW;EAClB;AAED,QAAM,KAAK,QAAQ,GAAG,SAAS;AAE/B,SAAO,KAAK,UAAU,WAAW,SAAS,OAAO,0BAA0B;CAC5E;AACF"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"PostgresKyselyMigrator-upT-hmrz.mjs","names":["options: {\n uri: string;\n db: Kysely<any>;\n provider: MigrationProvider;\n }"],"sources":["../src/PostgresKyselyMigrator.ts"],"sourcesContent":["import { type Kysely, type MigrationProvider, Migrator } from 'kysely';\nimport { PostgresMigrator } from './PostgresMigrator';\n\n/**\n * Default logger instance for migration operations.\n */\nconst logger = console;\n\n/**\n * PostgreSQL migrator implementation for Kysely ORM.\n * Extends PostgresMigrator to provide Kysely-specific migration functionality.\n * Automatically creates test databases and applies migrations for testing environments.\n *\n * @example\n * ```typescript\n * import { FileMigrationProvider } from 'kysely';\n * import { PostgresKyselyMigrator } from '@geekmidas/testkit';\n *\n * // Create migration provider\n * const provider = new FileMigrationProvider({\n * fs: require('fs'),\n * path: require('path'),\n * migrationFolder: path.join(__dirname, 'migrations')\n * });\n *\n * // Create Kysely instance\n * const db = new Kysely<Database>({\n * dialect: new PostgresDialect({\n * pool: new Pool({ connectionString: uri })\n * })\n * });\n *\n * // Create and use migrator\n * const migrator = new PostgresKyselyMigrator({\n * uri: 'postgresql://localhost:5432/test_db',\n * db,\n * provider\n * });\n *\n * const cleanup = await migrator.start();\n * // Run tests...\n * await cleanup();\n * ```\n */\nexport class PostgresKyselyMigrator extends PostgresMigrator {\n /**\n * Creates a new PostgresKyselyMigrator instance.\n *\n * @param options - Configuration options\n * @param options.uri - PostgreSQL connection URI\n * @param options.db - Kysely database instance\n * @param options.provider - Migration provider for locating migration files\n */\n constructor(\n private options: {\n uri: string;\n db: Kysely<any>;\n provider: MigrationProvider;\n },\n ) {\n super(options.uri);\n }\n\n /**\n * Executes Kysely migrations to the latest version.\n * Implements the abstract migrate() method from PostgresMigrator.\n *\n * @throws Error if migrations fail to apply\n * @returns Promise that resolves when all migrations are applied\n */\n async migrate(): Promise<void> {\n const migrator = new Migrator({\n db: this.options.db,\n provider: this.options.provider,\n });\n const migrations = await migrator.migrateToLatest();\n\n if (migrations.error) {\n logger.error(migrations.error, `Failed to apply migrations`);\n throw migrations.error;\n }\n\n await this.options.db.destroy();\n\n logger.log(`Applied ${migrations.results?.length} migrations successfully`);\n }\n}\n"],"mappings":";;;;;;;AAMA,MAAM,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAsCf,IAAa,yBAAb,cAA4C,iBAAiB;;;;;;;;;CAS3D,YACUA,SAKR;AACA,QAAM,QAAQ,IAAI;EANV;CAOT;;;;;;;;CASD,MAAM,UAAyB;EAC7B,MAAM,WAAW,IAAI,SAAS;GAC5B,IAAI,KAAK,QAAQ;GACjB,UAAU,KAAK,QAAQ;EACxB;EACD,MAAM,aAAa,MAAM,SAAS,iBAAiB;AAEnD,MAAI,WAAW,OAAO;AACpB,UAAO,MAAM,WAAW,QAAQ,4BAA4B;AAC5D,SAAM,WAAW;EAClB;AAED,QAAM,KAAK,QAAQ,GAAG,SAAS;AAE/B,SAAO,KAAK,UAAU,WAAW,SAAS,OAAO,0BAA0B;CAC5E;AACF"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"PostgresMigrator-DFcNdCvD.cjs","names":["uri: string"],"sources":["../src/PostgresMigrator.ts"],"sourcesContent":["import pg from 'pg';\n\nconst { Client } = pg;\n\n/**\n * Creates a PostgreSQL client connected to the 'postgres' database.\n * Extracts connection details from the provided URI.\n *\n * @param uri - PostgreSQL connection URI\n * @returns Object containing the target database name and client instance\n *\n * @example\n * ```typescript\n * const { database, db } = await setupClient('postgresql://user:pass@localhost:5432/mydb');\n * // database = 'mydb'\n * // db = Client instance connected to 'postgres' database\n * ```\n */\nasync function setupClient(uri: string) {\n const url = new URL(uri);\n\n const db = new Client({\n user: url.username,\n password: url.password,\n host: url.hostname,\n port: parseInt(url.port),\n database: 'postgres',\n });\n\n let database = url.pathname.slice(1);\n if (database.includes('?')) {\n database = database.substring(0, database.indexOf('?'));\n }\n return { database, db };\n}\n\n/**\n * Default logger instance for migration operations.\n */\nconst logger = console;\n\n/**\n * Abstract base class for PostgreSQL database migration utilities.\n * Provides database creation, migration, and cleanup functionality for testing.\n * Subclasses must implement the migrate() method to define migration logic.\n *\n * @example\n * ```typescript\n * class MyMigrator extends PostgresMigrator {\n * async migrate(): Promise<void> {\n * // Run your migrations here\n * await this.runMigrations();\n * }\n * }\n *\n * // Use in tests\n * const migrator = new MyMigrator('postgresql://localhost:5432/test_db');\n * const cleanup = await migrator.start();\n *\n * // Run tests...\n *\n * // Clean up\n * await cleanup();\n * ```\n */\nexport abstract class PostgresMigrator {\n /**\n * Creates a new PostgresMigrator instance.\n *\n * @param uri - PostgreSQL connection URI\n */\n constructor(private uri: string) {}\n\n /**\n * Abstract method to be implemented by subclasses.\n * Should contain the migration logic for setting up database schema.\n *\n * @returns Promise that resolves when migrations are complete\n */\n abstract migrate(): Promise<void>;\n\n /**\n * Creates a PostgreSQL database if it doesn't already exist.\n * Connects to the 'postgres' database to check and create the target database.\n *\n * @param uri - PostgreSQL connection URI\n * @returns Object indicating whether the database already existed\n * @private\n */\n private static async create(\n uri: string,\n ): Promise<{ alreadyExisted: boolean }> {\n const { database, db } = await setupClient(uri);\n try {\n await db.connect();\n const result = await db.query(\n `SELECT * FROM pg_catalog.pg_database WHERE datname = '${database}'`,\n );\n\n if (result.rowCount === 0) {\n await db.query(`CREATE DATABASE \"${database}\"`);\n }\n\n return {\n alreadyExisted: result.rowCount ? result.rowCount > 0 : false,\n };\n } finally {\n await db.end();\n }\n }\n\n /**\n * Drops a PostgreSQL database.\n * Used for cleanup after tests are complete.\n *\n * @param uri - PostgreSQL connection URI\n * @throws Error if database cannot be dropped\n * @private\n */\n private static async drop(uri: string): Promise<void> {\n const { database, db } = await setupClient(uri);\n try {\n await db.connect();\n await db.query(`DROP DATABASE \"${database}\"`);\n } finally {\n await db.end();\n }\n }\n\n /**\n * Starts the migration process by creating the database and running migrations.\n * Returns a cleanup function that will drop the database when called.\n *\n * @returns Async cleanup function that drops the created database\n *\n * @example\n * ```typescript\n * const migrator = new MyMigrator('postgresql://localhost:5432/test_db');\n *\n * // Start migrations and get cleanup function\n * const cleanup = await migrator.start();\n *\n * try {\n * // Run your tests here\n * await runTests();\n * } finally {\n * // Always clean up\n * await cleanup();\n * }\n * ```\n */\n async start() {\n const { database, db } = await setupClient(this.uri);\n try {\n await PostgresMigrator.create(this.uri);\n // Implement migration logic here\n await this.migrate();\n logger.log(`Migrating database: ${database}`);\n // Example: await db.query('CREATE TABLE example (id SERIAL PRIMARY KEY)');\n } finally {\n await db.end();\n }\n\n return async () => {\n await PostgresMigrator.drop(this.uri);\n };\n }\n}\n"],"mappings":";;;;AAEA,MAAM,EAAE,QAAQ,GAAG;;;;;;;;;;;;;;;AAgBnB,eAAe,YAAYA,KAAa;CACtC,MAAM,MAAM,IAAI,IAAI;CAEpB,MAAM,KAAK,IAAI,OAAO;EACpB,MAAM,IAAI;EACV,UAAU,IAAI;EACd,MAAM,IAAI;EACV,MAAM,SAAS,IAAI,KAAK;EACxB,UAAU;CACX;CAED,IAAI,WAAW,IAAI,SAAS,MAAM,EAAE;AACpC,KAAI,SAAS,SAAS,IAAI,CACxB,YAAW,SAAS,UAAU,GAAG,SAAS,QAAQ,IAAI,CAAC;AAEzD,QAAO;EAAE;EAAU;CAAI;AACxB;;;;AAKD,MAAM,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;AA0Bf,IAAsB,mBAAtB,MAAsB,iBAAiB;;;;;;CAMrC,YAAoBA,KAAa;EAAb;CAAe;;;;;;;;;CAkBnC,aAAqB,OACnBA,KACsC;EACtC,MAAM,EAAE,UAAU,IAAI,GAAG,MAAM,YAAY,IAAI;AAC/C,MAAI;AACF,SAAM,GAAG,SAAS;GAClB,MAAM,SAAS,MAAM,GAAG,OACrB,wDAAwD,SAAS,GACnE;AAED,OAAI,OAAO,aAAa,EACtB,OAAM,GAAG,OAAO,mBAAmB,SAAS,GAAG;AAGjD,UAAO,EACL,gBAAgB,OAAO,WAAW,OAAO,WAAW,IAAI,MACzD;EACF,UAAS;AACR,SAAM,GAAG,KAAK;EACf;CACF;;;;;;;;;CAUD,aAAqB,KAAKA,KAA4B;EACpD,MAAM,EAAE,UAAU,IAAI,GAAG,MAAM,YAAY,IAAI;AAC/C,MAAI;AACF,SAAM,GAAG,SAAS;AAClB,SAAM,GAAG,OAAO,iBAAiB,SAAS,GAAG;EAC9C,UAAS;AACR,SAAM,GAAG,KAAK;EACf;CACF;;;;;;;;;;;;;;;;;;;;;;;CAwBD,MAAM,QAAQ;EACZ,MAAM,EAAE,UAAU,IAAI,GAAG,MAAM,YAAY,KAAK,IAAI;AACpD,MAAI;AACF,SAAM,iBAAiB,OAAO,KAAK,IAAI;AAEvC,SAAM,KAAK,SAAS;AACpB,UAAO,KAAK,sBAAsB,SAAS,EAAE;EAE9C,UAAS;AACR,SAAM,GAAG,KAAK;EACf;AAED,SAAO,YAAY;AACjB,SAAM,iBAAiB,KAAK,KAAK,IAAI;EACtC;CACF;AACF"}