@geekmidas/testkit 0.0.11 → 0.0.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. package/README.md +1 -1
  2. package/dist/{Factory-B9LP1L18.d.cts → Factory-D7P3bKKb.d.mts} +2 -2
  3. package/dist/{Factory-DiZSNxC0.d.mts → Factory-pNV7ZQ7-.d.cts} +2 -2
  4. package/dist/Factory.d.cts +2 -2
  5. package/dist/Factory.d.mts +2 -2
  6. package/dist/{KyselyFactory-ELiHgHVv.mjs → KyselyFactory-BcYkC0t2.mjs} +2 -2
  7. package/dist/{KyselyFactory-B-zlLbov.d.mts → KyselyFactory-BrWADI77.d.mts} +3 -3
  8. package/dist/{KyselyFactory-Bdq1s1Go.cjs → KyselyFactory-Cf0o2YxO.cjs} +2 -2
  9. package/dist/{KyselyFactory-CrLWxJW4.d.cts → KyselyFactory-DM2dnUXU.d.cts} +3 -3
  10. package/dist/KyselyFactory.cjs +2 -2
  11. package/dist/KyselyFactory.d.cts +3 -3
  12. package/dist/KyselyFactory.d.mts +3 -3
  13. package/dist/KyselyFactory.mjs +2 -2
  14. package/dist/{ObjectionFactory-aqM0dDW7.mjs → ObjectionFactory-8hebmnai.mjs} +21 -5
  15. package/dist/{ObjectionFactory-CJCpvwts.d.mts → ObjectionFactory-B40NQWSe.d.mts} +2 -2
  16. package/dist/{ObjectionFactory-Wq80ypMM.cjs → ObjectionFactory-CDriunkS.cjs} +21 -5
  17. package/dist/{ObjectionFactory-vME-wNlq.d.cts → ObjectionFactory-D3l1VuyX.d.cts} +2 -2
  18. package/dist/ObjectionFactory.cjs +2 -2
  19. package/dist/ObjectionFactory.d.cts +3 -3
  20. package/dist/ObjectionFactory.d.mts +3 -3
  21. package/dist/ObjectionFactory.mjs +2 -2
  22. package/dist/__tests__/KyselyFactory.spec.cjs +3 -3
  23. package/dist/__tests__/KyselyFactory.spec.mjs +3 -3
  24. package/dist/__tests__/ObjectionFactory.spec.cjs +289 -451
  25. package/dist/__tests__/ObjectionFactory.spec.mjs +290 -452
  26. package/dist/__tests__/PostgresKyselyMigrator.spec.cjs +397 -0
  27. package/dist/__tests__/PostgresKyselyMigrator.spec.d.cts +1 -0
  28. package/dist/__tests__/PostgresKyselyMigrator.spec.d.mts +1 -0
  29. package/dist/__tests__/PostgresKyselyMigrator.spec.mjs +396 -0
  30. package/dist/__tests__/PostgresMigrator.spec.cjs +1 -1
  31. package/dist/__tests__/PostgresMigrator.spec.mjs +1 -1
  32. package/dist/__tests__/PostgresObjectionMigrator.spec.cjs +1 -1
  33. package/dist/__tests__/PostgresObjectionMigrator.spec.mjs +1 -1
  34. package/dist/__tests__/VitestObjectionTransactionIsolator.spec.cjs +4 -12
  35. package/dist/__tests__/VitestObjectionTransactionIsolator.spec.mjs +4 -12
  36. package/dist/__tests__/faker.spec.cjs +1 -1
  37. package/dist/__tests__/faker.spec.mjs +1 -1
  38. package/dist/__tests__/integration.spec.cjs +3 -3
  39. package/dist/__tests__/integration.spec.mjs +3 -3
  40. package/dist/{faker-SMN4ira4.cjs → faker-B14IEMIN.cjs} +41 -1
  41. package/dist/{faker-CxKkEeYi.mjs → faker-BGKYFoCT.mjs} +36 -2
  42. package/dist/{faker-DAiFK3T3.d.cts → faker-BSH1EMtg.d.cts} +13 -3
  43. package/dist/{faker-nN9Ki6fn.d.mts → faker-C-Iuk_R1.d.mts} +13 -3
  44. package/dist/faker.cjs +2 -1
  45. package/dist/faker.d.cts +2 -2
  46. package/dist/faker.d.mts +2 -2
  47. package/dist/faker.mjs +2 -2
  48. package/dist/{helpers-BEmjyUVE.mjs → helpers-B4TXg3Wp.mjs} +11 -36
  49. package/dist/{helpers-CNMBePuj.cjs → helpers-Bf0nXhbu.cjs} +10 -41
  50. package/dist/kysely.cjs +2 -2
  51. package/dist/kysely.d.cts +3 -3
  52. package/dist/kysely.d.mts +3 -3
  53. package/dist/kysely.mjs +2 -2
  54. package/dist/objection.cjs +2 -2
  55. package/dist/objection.d.cts +3 -3
  56. package/dist/objection.d.mts +3 -3
  57. package/dist/objection.mjs +2 -2
  58. package/package.json +2 -2
  59. package/src/Factory.ts +4 -1
  60. package/src/KyselyFactory.ts +6 -2
  61. package/src/ObjectionFactory.ts +31 -4
  62. package/src/__tests__/ObjectionFactory.spec.ts +423 -542
  63. package/src/__tests__/PostgresKyselyMigrator.spec.ts +690 -0
  64. package/src/__tests__/VitestObjectionTransactionIsolator.spec.ts +0 -8
  65. package/src/faker.ts +86 -0
  66. package/test/helpers.ts +13 -21
  67. package/dist/example.cjs +0 -22
  68. package/dist/example.d.cts +0 -26
  69. package/dist/example.d.mts +0 -26
  70. package/dist/example.mjs +0 -22
  71. package/src/example.ts +0 -45
@@ -0,0 +1,396 @@
1
+ import { PostgresMigrator } from "../PostgresMigrator-DxPC_gGu.mjs";
2
+ import { PostgresKyselyMigrator } from "../PostgresKyselyMigrator-Bdhl251C.mjs";
3
+ import { createTestDatabase } from "../helpers-B4TXg3Wp.mjs";
4
+ import { Kysely, PostgresDialect, sql } from "kysely";
5
+ import { Client, Pool } from "pg";
6
+ import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
7
+
8
+ //#region src/__tests__/PostgresKyselyMigrator.spec.ts
9
+ var TestMigrationProvider = class {
10
+ migrations = {};
11
+ shouldError = false;
12
+ addMigration(name, migration) {
13
+ this.migrations[name] = migration;
14
+ }
15
+ setError(shouldError) {
16
+ this.shouldError = shouldError;
17
+ }
18
+ async getMigrations() {
19
+ if (this.shouldError) throw new Error("Failed to load migrations");
20
+ return this.migrations;
21
+ }
22
+ };
23
+ describe("PostgresKyselyMigrator", () => {
24
+ let testDbName;
25
+ let cleanupDb;
26
+ let consoleSpy;
27
+ let consoleErrorSpy;
28
+ beforeAll(async () => {
29
+ testDbName = `test_kysely_migrator_${Date.now()}`;
30
+ cleanupDb = await createTestDatabase(testDbName);
31
+ });
32
+ beforeEach(() => {
33
+ consoleSpy = vi.spyOn(console, "log").mockImplementation(() => {});
34
+ consoleErrorSpy = vi.spyOn(console, "error").mockImplementation(() => {});
35
+ });
36
+ afterEach(() => {
37
+ consoleSpy.mockRestore();
38
+ consoleErrorSpy.mockRestore();
39
+ });
40
+ afterAll(async () => {
41
+ await cleanupDb();
42
+ });
43
+ describe("constructor", () => {
44
+ it("should create a PostgresKyselyMigrator instance", () => {
45
+ const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({
46
+ host: "localhost",
47
+ port: 5432,
48
+ user: "geekmidas",
49
+ password: "geekmidas",
50
+ database: testDbName
51
+ }) }) });
52
+ const provider = new TestMigrationProvider();
53
+ const migrator = new PostgresKyselyMigrator({
54
+ uri: `postgresql://geekmidas:geekmidas@localhost:5432/${testDbName}`,
55
+ db,
56
+ provider
57
+ });
58
+ expect(migrator).toBeInstanceOf(PostgresKyselyMigrator);
59
+ expect(migrator).toBeInstanceOf(PostgresMigrator);
60
+ });
61
+ });
62
+ describe("migrate method", () => {
63
+ it("should apply migrations successfully", async () => {
64
+ const newDbName = `test_migrate_${Date.now()}`;
65
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${newDbName}`;
66
+ const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({
67
+ host: "localhost",
68
+ port: 5432,
69
+ user: "geekmidas",
70
+ password: "geekmidas",
71
+ database: newDbName
72
+ }) }) });
73
+ const provider = new TestMigrationProvider();
74
+ provider.addMigration("001_create_users", {
75
+ up: async (db$1) => {
76
+ await db$1.schema.createTable("users").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("name", "varchar", (col) => col.notNull()).addColumn("email", "varchar", (col) => col.notNull().unique()).addColumn("created_at", "timestamp", (col) => col.defaultTo(sql`now()`).notNull()).execute();
77
+ },
78
+ down: async (db$1) => {
79
+ await db$1.schema.dropTable("users").execute();
80
+ }
81
+ });
82
+ provider.addMigration("002_create_posts", {
83
+ up: async (db$1) => {
84
+ await db$1.schema.createTable("posts").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("title", "varchar", (col) => col.notNull()).addColumn("content", "text", (col) => col.notNull()).addColumn("user_id", "integer", (col) => col.notNull().references("users.id").onDelete("cascade")).addColumn("created_at", "timestamp", (col) => col.defaultTo(sql`now()`).notNull()).execute();
85
+ },
86
+ down: async (db$1) => {
87
+ await db$1.schema.dropTable("posts").execute();
88
+ }
89
+ });
90
+ const migrator = new PostgresKyselyMigrator({
91
+ uri,
92
+ db,
93
+ provider
94
+ });
95
+ const cleanup = await migrator.start();
96
+ expect(consoleSpy).toHaveBeenCalledWith(expect.stringContaining("Applied 2 migrations successfully"));
97
+ const client = new Client({
98
+ host: "localhost",
99
+ port: 5432,
100
+ user: "geekmidas",
101
+ password: "geekmidas",
102
+ database: newDbName
103
+ });
104
+ try {
105
+ await client.connect();
106
+ const tablesResult = await client.query(`
107
+ SELECT table_name FROM information_schema.tables
108
+ WHERE table_schema = 'public'
109
+ AND table_name IN ('users', 'posts')
110
+ ORDER BY table_name
111
+ `);
112
+ expect(tablesResult.rowCount).toBe(2);
113
+ expect(tablesResult.rows).toEqual([{ table_name: "posts" }, { table_name: "users" }]);
114
+ } finally {
115
+ await client.end();
116
+ }
117
+ await cleanup();
118
+ });
119
+ it("should handle migration errors", async () => {
120
+ const errorDbName = `test_migrate_error_${Date.now()}`;
121
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${errorDbName}`;
122
+ const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({
123
+ host: "localhost",
124
+ port: 5432,
125
+ user: "geekmidas",
126
+ password: "geekmidas",
127
+ database: errorDbName
128
+ }) }) });
129
+ const provider = new TestMigrationProvider();
130
+ provider.addMigration("001_failing_migration", { up: async () => {
131
+ throw new Error("Migration failed intentionally");
132
+ } });
133
+ const migrator = new PostgresKyselyMigrator({
134
+ uri,
135
+ db,
136
+ provider
137
+ });
138
+ await expect(migrator.start()).rejects.toThrow("Migration failed intentionally");
139
+ expect(consoleErrorSpy).toHaveBeenCalledWith(expect.any(Error), "Failed to apply migrations");
140
+ await db.destroy();
141
+ const cleanupClient = new Client({
142
+ host: "localhost",
143
+ port: 5432,
144
+ user: "geekmidas",
145
+ password: "geekmidas",
146
+ database: "postgres"
147
+ });
148
+ try {
149
+ await cleanupClient.connect();
150
+ await cleanupClient.query(`
151
+ SELECT pg_terminate_backend(pg_stat_activity.pid)
152
+ FROM pg_stat_activity
153
+ WHERE pg_stat_activity.datname = '${errorDbName}'
154
+ AND pid <> pg_backend_pid()
155
+ `);
156
+ await cleanupClient.query(`DROP DATABASE IF EXISTS "${errorDbName}"`);
157
+ } finally {
158
+ await cleanupClient.end();
159
+ }
160
+ });
161
+ it("should destroy database connection after migrations", async () => {
162
+ const destroyDbName = `test_destroy_${Date.now()}`;
163
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${destroyDbName}`;
164
+ const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({
165
+ host: "localhost",
166
+ port: 5432,
167
+ user: "geekmidas",
168
+ password: "geekmidas",
169
+ database: destroyDbName
170
+ }) }) });
171
+ const destroySpy = vi.spyOn(db, "destroy");
172
+ const provider = new TestMigrationProvider();
173
+ provider.addMigration("001_simple", { up: async (db$1) => {
174
+ await db$1.schema.createTable("test").addColumn("id", "serial", (col) => col.primaryKey()).execute();
175
+ } });
176
+ const migrator = new PostgresKyselyMigrator({
177
+ uri,
178
+ db,
179
+ provider
180
+ });
181
+ const cleanup = await migrator.start();
182
+ expect(destroySpy).toHaveBeenCalled();
183
+ await cleanup();
184
+ });
185
+ });
186
+ describe("integration with PostgresMigrator", () => {
187
+ it("should work with complete workflow", async () => {
188
+ const integrationDbName = `test_integration_${Date.now()}`;
189
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${integrationDbName}`;
190
+ const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({
191
+ host: "localhost",
192
+ port: 5432,
193
+ user: "geekmidas",
194
+ password: "geekmidas",
195
+ database: integrationDbName
196
+ }) }) });
197
+ const provider = new TestMigrationProvider();
198
+ provider.addMigration("001_initial_schema", { up: async (db$1) => {
199
+ await db$1.schema.createTable("users").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("name", "varchar", (col) => col.notNull()).addColumn("email", "varchar", (col) => col.notNull().unique()).addColumn("created_at", "timestamp", (col) => col.defaultTo(sql`now()`).notNull()).execute();
200
+ await db$1.schema.createTable("posts").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("title", "varchar", (col) => col.notNull()).addColumn("content", "text", (col) => col.notNull()).addColumn("user_id", "integer", (col) => col.notNull().references("users.id").onDelete("cascade")).addColumn("created_at", "timestamp", (col) => col.defaultTo(sql`now()`).notNull()).execute();
201
+ await db$1.schema.createIndex("idx_posts_user_id").on("posts").column("user_id").execute();
202
+ } });
203
+ provider.addMigration("002_add_updated_at", { up: async (db$1) => {
204
+ await db$1.schema.alterTable("users").addColumn("updated_at", "timestamp", (col) => col.defaultTo(sql`now()`).notNull()).execute();
205
+ await db$1.schema.alterTable("posts").addColumn("updated_at", "timestamp", (col) => col.defaultTo(sql`now()`).notNull()).execute();
206
+ } });
207
+ const migrator = new PostgresKyselyMigrator({
208
+ uri,
209
+ db,
210
+ provider
211
+ });
212
+ const cleanup = await migrator.start();
213
+ expect(consoleSpy).toHaveBeenCalledWith(`Migrating database: ${integrationDbName}`);
214
+ expect(consoleSpy).toHaveBeenCalledWith("Applied 2 migrations successfully");
215
+ const verifyClient = new Client({
216
+ host: "localhost",
217
+ port: 5432,
218
+ user: "geekmidas",
219
+ password: "geekmidas",
220
+ database: integrationDbName
221
+ });
222
+ try {
223
+ await verifyClient.connect();
224
+ const columnsResult = await verifyClient.query(`
225
+ SELECT table_name, column_name
226
+ FROM information_schema.columns
227
+ WHERE table_schema = 'public'
228
+ AND table_name IN ('users', 'posts')
229
+ AND column_name = 'updated_at'
230
+ ORDER BY table_name
231
+ `);
232
+ expect(columnsResult.rowCount).toBe(2);
233
+ const indexResult = await verifyClient.query(`
234
+ SELECT indexname FROM pg_indexes
235
+ WHERE schemaname = 'public'
236
+ AND indexname = 'idx_posts_user_id'
237
+ `);
238
+ expect(indexResult.rowCount).toBe(1);
239
+ } finally {
240
+ await verifyClient.end();
241
+ }
242
+ await cleanup();
243
+ });
244
+ it("should handle empty migrations", async () => {
245
+ const emptyDbName = `test_empty_${Date.now()}`;
246
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${emptyDbName}`;
247
+ const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({
248
+ host: "localhost",
249
+ port: 5432,
250
+ user: "geekmidas",
251
+ password: "geekmidas",
252
+ database: emptyDbName
253
+ }) }) });
254
+ const provider = new TestMigrationProvider();
255
+ const migrator = new PostgresKyselyMigrator({
256
+ uri,
257
+ db,
258
+ provider
259
+ });
260
+ const cleanup = await migrator.start();
261
+ expect(consoleSpy).toHaveBeenCalledWith("Applied 0 migrations successfully");
262
+ await cleanup();
263
+ });
264
+ it("should work with FileMigrationProvider pattern", async () => {
265
+ const fileProviderDbName = `test_file_provider_${Date.now()}`;
266
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${fileProviderDbName}`;
267
+ const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({
268
+ host: "localhost",
269
+ port: 5432,
270
+ user: "geekmidas",
271
+ password: "geekmidas",
272
+ database: fileProviderDbName
273
+ }) }) });
274
+ const migrations = {
275
+ "2024_01_01_000001_create_users": {
276
+ up: async (db$1) => {
277
+ await db$1.schema.createTable("users").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("username", "varchar", (col) => col.notNull().unique()).execute();
278
+ },
279
+ down: async (db$1) => {
280
+ await db$1.schema.dropTable("users").execute();
281
+ }
282
+ },
283
+ "2024_01_02_000001_create_sessions": {
284
+ up: async (db$1) => {
285
+ await db$1.schema.createTable("sessions").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("user_id", "integer", (col) => col.notNull().references("users.id")).addColumn("token", "varchar", (col) => col.notNull()).execute();
286
+ },
287
+ down: async (db$1) => {
288
+ await db$1.schema.dropTable("sessions").execute();
289
+ }
290
+ }
291
+ };
292
+ const provider = { async getMigrations() {
293
+ return migrations;
294
+ } };
295
+ const migrator = new PostgresKyselyMigrator({
296
+ uri,
297
+ db,
298
+ provider
299
+ });
300
+ const cleanup = await migrator.start();
301
+ expect(consoleSpy).toHaveBeenCalledWith("Applied 2 migrations successfully");
302
+ const verifyClient = new Client({
303
+ host: "localhost",
304
+ port: 5432,
305
+ user: "geekmidas",
306
+ password: "geekmidas",
307
+ database: fileProviderDbName
308
+ });
309
+ try {
310
+ await verifyClient.connect();
311
+ const tablesResult = await verifyClient.query(`
312
+ SELECT table_name FROM information_schema.tables
313
+ WHERE table_schema = 'public'
314
+ AND table_name IN ('users', 'sessions')
315
+ ORDER BY table_name
316
+ `);
317
+ expect(tablesResult.rowCount).toBe(2);
318
+ } finally {
319
+ await verifyClient.end();
320
+ }
321
+ await cleanup();
322
+ });
323
+ });
324
+ describe("error scenarios", () => {
325
+ it("should handle provider errors", async () => {
326
+ const providerErrorDbName = `test_provider_error_${Date.now()}`;
327
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${providerErrorDbName}`;
328
+ const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({
329
+ host: "localhost",
330
+ port: 5432,
331
+ user: "geekmidas",
332
+ password: "geekmidas",
333
+ database: providerErrorDbName
334
+ }) }) });
335
+ const provider = new TestMigrationProvider();
336
+ provider.setError(true);
337
+ const migrator = new PostgresKyselyMigrator({
338
+ uri,
339
+ db,
340
+ provider
341
+ });
342
+ await expect(migrator.start()).rejects.toThrow("Failed to load migrations");
343
+ await db.destroy();
344
+ const cleanupClient = new Client({
345
+ host: "localhost",
346
+ port: 5432,
347
+ user: "geekmidas",
348
+ password: "geekmidas",
349
+ database: "postgres"
350
+ });
351
+ try {
352
+ await cleanupClient.connect();
353
+ await cleanupClient.query(`DROP DATABASE IF EXISTS "${providerErrorDbName}"`);
354
+ } finally {
355
+ await cleanupClient.end();
356
+ }
357
+ }, 1e4);
358
+ it("should handle invalid SQL in migrations", async () => {
359
+ const invalidSqlDbName = `test_invalid_sql_${Date.now()}`;
360
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${invalidSqlDbName}`;
361
+ const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({
362
+ host: "localhost",
363
+ port: 5432,
364
+ user: "geekmidas",
365
+ password: "geekmidas",
366
+ database: invalidSqlDbName
367
+ }) }) });
368
+ const provider = new TestMigrationProvider();
369
+ provider.addMigration("001_invalid_sql", { up: async (db$1) => {
370
+ await db$1.schema.createTable("posts").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("user_id", "integer", (col) => col.notNull().references("non_existent_table.id")).execute();
371
+ } });
372
+ const migrator = new PostgresKyselyMigrator({
373
+ uri,
374
+ db,
375
+ provider
376
+ });
377
+ await expect(migrator.start()).rejects.toThrow();
378
+ await db.destroy();
379
+ const cleanupClient = new Client({
380
+ host: "localhost",
381
+ port: 5432,
382
+ user: "geekmidas",
383
+ password: "geekmidas",
384
+ database: "postgres"
385
+ });
386
+ try {
387
+ await cleanupClient.connect();
388
+ await cleanupClient.query(`DROP DATABASE IF EXISTS "${invalidSqlDbName}"`);
389
+ } finally {
390
+ await cleanupClient.end();
391
+ }
392
+ }, 1e4);
393
+ });
394
+ });
395
+
396
+ //#endregion
@@ -1,6 +1,6 @@
1
1
  const require_chunk = require('../chunk-CUT6urMc.cjs');
2
2
  const require_PostgresMigrator = require('../PostgresMigrator-DFcNdCvD.cjs');
3
- const require_helpers = require('../helpers-CNMBePuj.cjs');
3
+ const require_helpers = require('../helpers-Bf0nXhbu.cjs');
4
4
  const pg = require_chunk.__toESM(require("pg"));
5
5
  const vitest = require_chunk.__toESM(require("vitest"));
6
6
 
@@ -1,5 +1,5 @@
1
1
  import { PostgresMigrator } from "../PostgresMigrator-DxPC_gGu.mjs";
2
- import { createTestDatabase } from "../helpers-BEmjyUVE.mjs";
2
+ import { createTestDatabase } from "../helpers-B4TXg3Wp.mjs";
3
3
  import { Client } from "pg";
4
4
  import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
5
5
 
@@ -1,7 +1,7 @@
1
1
  const require_chunk = require('../chunk-CUT6urMc.cjs');
2
2
  require('../PostgresMigrator-DFcNdCvD.cjs');
3
3
  const require_PostgresObjectionMigrator = require('../PostgresObjectionMigrator-BG6ymgnt.cjs');
4
- const require_helpers = require('../helpers-CNMBePuj.cjs');
4
+ const require_helpers = require('../helpers-Bf0nXhbu.cjs');
5
5
  const pg = require_chunk.__toESM(require("pg"));
6
6
  const vitest = require_chunk.__toESM(require("vitest"));
7
7
  const knex = require_chunk.__toESM(require("knex"));
@@ -1,6 +1,6 @@
1
1
  import "../PostgresMigrator-DxPC_gGu.mjs";
2
2
  import { PostgresObjectionMigrator } from "../PostgresObjectionMigrator-G4h5FLvU.mjs";
3
- import { createTestDatabase } from "../helpers-BEmjyUVE.mjs";
3
+ import { createTestDatabase } from "../helpers-B4TXg3Wp.mjs";
4
4
  import { Client } from "pg";
5
5
  import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
6
6
  import knex from "knex";
@@ -1,13 +1,13 @@
1
1
  const require_chunk = require('../chunk-CUT6urMc.cjs');
2
2
  require('../Factory-WMhTNZ9S.cjs');
3
- require('../faker-SMN4ira4.cjs');
4
- require('../ObjectionFactory-Wq80ypMM.cjs');
3
+ require('../faker-B14IEMIN.cjs');
4
+ require('../ObjectionFactory-CDriunkS.cjs');
5
5
  require('../PostgresMigrator-DFcNdCvD.cjs');
6
6
  require('../PostgresObjectionMigrator-BG6ymgnt.cjs');
7
7
  require('../VitestTransactionIsolator-CruLTRRi.cjs');
8
8
  require('../VitestObjectionTransactionIsolator-DyqLp_in.cjs');
9
9
  const require_objection = require('../objection-CCD8fMLj.cjs');
10
- const require_helpers = require('../helpers-CNMBePuj.cjs');
10
+ const require_helpers = require('../helpers-Bf0nXhbu.cjs');
11
11
  const vitest = require_chunk.__toESM(require("vitest"));
12
12
  const objection = require_chunk.__toESM(require("objection"));
13
13
 
@@ -18,8 +18,6 @@ var User = class extends objection.Model {
18
18
  }
19
19
  id;
20
20
  name;
21
- email;
22
- role;
23
21
  createdAt;
24
22
  updatedAt;
25
23
  static get relationMappings() {
@@ -106,24 +104,18 @@ var Comment = class extends objection.Model {
106
104
  }
107
105
  };
108
106
  const knex = require_helpers.createKnexDb();
109
- objection.Model.knex(knex);
110
107
  const it = require_objection.wrapVitestObjectionTransaction(vitest.it, knex, async (trx) => {
111
108
  await require_helpers.createTestTablesKnex(trx);
112
109
  });
113
110
  (0, vitest.describe)("VitestObjectionTransactionIsolator", () => {
114
111
  (0, vitest.describe)("Transaction Isolation", () => {
115
112
  it("should rollback data after test completes", async ({ trx }) => {
116
- const user = await User.query(trx).insert({
117
- name: "Test User",
118
- email: "test@example.com",
119
- role: "user"
120
- });
113
+ const user = await User.query(trx).insert({ name: "Test User" });
121
114
  (0, vitest.expect)(user).toBeDefined();
122
115
  (0, vitest.expect)(user.id).toBeDefined();
123
116
  (0, vitest.expect)(user.name).toBe("Test User");
124
117
  const foundUser = await User.query(trx).findById(user.id);
125
118
  (0, vitest.expect)(foundUser).toBeDefined();
126
- (0, vitest.expect)(foundUser?.email).toBe(user.email);
127
119
  });
128
120
  });
129
121
  });
@@ -1,12 +1,12 @@
1
1
  import "../Factory-z2m01hMj.mjs";
2
- import "../faker-CxKkEeYi.mjs";
3
- import "../ObjectionFactory-aqM0dDW7.mjs";
2
+ import "../faker-BGKYFoCT.mjs";
3
+ import "../ObjectionFactory-8hebmnai.mjs";
4
4
  import "../PostgresMigrator-DxPC_gGu.mjs";
5
5
  import "../PostgresObjectionMigrator-G4h5FLvU.mjs";
6
6
  import "../VitestTransactionIsolator-BWwK-ca6.mjs";
7
7
  import "../VitestObjectionTransactionIsolator-BPoLUFop.mjs";
8
8
  import { wrapVitestObjectionTransaction } from "../objection-lsMgM5gP.mjs";
9
- import { createKnexDb, createTestTablesKnex } from "../helpers-BEmjyUVE.mjs";
9
+ import { createKnexDb, createTestTablesKnex } from "../helpers-B4TXg3Wp.mjs";
10
10
  import { describe, expect, it } from "vitest";
11
11
  import { Model } from "objection";
12
12
 
@@ -17,8 +17,6 @@ var User = class extends Model {
17
17
  }
18
18
  id;
19
19
  name;
20
- email;
21
- role;
22
20
  createdAt;
23
21
  updatedAt;
24
22
  static get relationMappings() {
@@ -105,24 +103,18 @@ var Comment = class extends Model {
105
103
  }
106
104
  };
107
105
  const knex = createKnexDb();
108
- Model.knex(knex);
109
106
  const it$1 = wrapVitestObjectionTransaction(it, knex, async (trx) => {
110
107
  await createTestTablesKnex(trx);
111
108
  });
112
109
  describe("VitestObjectionTransactionIsolator", () => {
113
110
  describe("Transaction Isolation", () => {
114
111
  it$1("should rollback data after test completes", async ({ trx }) => {
115
- const user = await User.query(trx).insert({
116
- name: "Test User",
117
- email: "test@example.com",
118
- role: "user"
119
- });
112
+ const user = await User.query(trx).insert({ name: "Test User" });
120
113
  expect(user).toBeDefined();
121
114
  expect(user.id).toBeDefined();
122
115
  expect(user.name).toBe("Test User");
123
116
  const foundUser = await User.query(trx).findById(user.id);
124
117
  expect(foundUser).toBeDefined();
125
- expect(foundUser?.email).toBe(user.email);
126
118
  });
127
119
  });
128
120
  });
@@ -1,5 +1,5 @@
1
1
  const require_chunk = require('../chunk-CUT6urMc.cjs');
2
- const require_faker = require('../faker-SMN4ira4.cjs');
2
+ const require_faker = require('../faker-B14IEMIN.cjs');
3
3
  const vitest = require_chunk.__toESM(require("vitest"));
4
4
 
5
5
  //#region src/__tests__/faker.spec.ts
@@ -1,4 +1,4 @@
1
- import { faker } from "../faker-CxKkEeYi.mjs";
1
+ import { faker } from "../faker-BGKYFoCT.mjs";
2
2
  import { beforeEach, describe, expect, it } from "vitest";
3
3
 
4
4
  //#region src/__tests__/faker.spec.ts
@@ -1,14 +1,14 @@
1
1
  const require_chunk = require('../chunk-CUT6urMc.cjs');
2
2
  require('../Factory-WMhTNZ9S.cjs');
3
- require('../faker-SMN4ira4.cjs');
4
- const require_KyselyFactory = require('../KyselyFactory-Bdq1s1Go.cjs');
3
+ require('../faker-B14IEMIN.cjs');
4
+ const require_KyselyFactory = require('../KyselyFactory-Cf0o2YxO.cjs');
5
5
  require('../PostgresMigrator-DFcNdCvD.cjs');
6
6
  require('../PostgresKyselyMigrator-CfytARcA.cjs');
7
7
  require('../VitestTransactionIsolator-CruLTRRi.cjs');
8
8
  require('../VitestKyselyTransactionIsolator-CIlpIO78.cjs');
9
9
  const require_helpers = require('../helpers-nEUtQ7eo.cjs');
10
10
  const require_kysely = require('../kysely-CBfCXxUn.cjs');
11
- const require_helpers$1 = require('../helpers-CNMBePuj.cjs');
11
+ const require_helpers$1 = require('../helpers-Bf0nXhbu.cjs');
12
12
  const vitest = require_chunk.__toESM(require("vitest"));
13
13
 
14
14
  //#region src/__tests__/integration.spec.ts
@@ -1,13 +1,13 @@
1
1
  import "../Factory-z2m01hMj.mjs";
2
- import "../faker-CxKkEeYi.mjs";
3
- import { KyselyFactory } from "../KyselyFactory-ELiHgHVv.mjs";
2
+ import "../faker-BGKYFoCT.mjs";
3
+ import { KyselyFactory } from "../KyselyFactory-BcYkC0t2.mjs";
4
4
  import "../PostgresMigrator-DxPC_gGu.mjs";
5
5
  import "../PostgresKyselyMigrator-Bdhl251C.mjs";
6
6
  import "../VitestTransactionIsolator-BWwK-ca6.mjs";
7
7
  import "../VitestKyselyTransactionIsolator-BKGT9nEG.mjs";
8
8
  import { createKyselyDb } from "../helpers-BuPmgzyQ.mjs";
9
9
  import { wrapVitestKyselyTransaction } from "../kysely-Cx_1pZYc.mjs";
10
- import { TEST_DATABASE_CONFIG, createTestTables } from "../helpers-BEmjyUVE.mjs";
10
+ import { TEST_DATABASE_CONFIG, createTestTables } from "../helpers-B4TXg3Wp.mjs";
11
11
  import { beforeAll, describe, expect, it } from "vitest";
12
12
 
13
13
  //#region src/__tests__/integration.spec.ts
@@ -196,6 +196,36 @@ function resetAllSequences() {
196
196
  function price() {
197
197
  return +faker.commerce.price();
198
198
  }
199
+ function coordinateInRadius(center, radius) {
200
+ const earth = 6378137;
201
+ const d = radius / earth;
202
+ const theta = 2 * Math.PI * Math.random();
203
+ const r = d * Math.sqrt(Math.random());
204
+ const lat1 = center.lat * Math.PI / 180;
205
+ const lng1 = center.lng * Math.PI / 180;
206
+ const lat2 = Math.asin(Math.sin(lat1) * Math.cos(r) + Math.cos(lat1) * Math.sin(r) * Math.cos(theta));
207
+ const lng2 = lng1 + Math.atan2(Math.sin(theta) * Math.sin(r) * Math.cos(lat1), Math.cos(r) - Math.sin(lat1) * Math.sin(lat2));
208
+ return {
209
+ lat: lat2 * 180 / Math.PI,
210
+ lng: lng2 * 180 / Math.PI
211
+ };
212
+ }
213
+ function coordinateOutsideRadius(center, minRadiusMeters, maxRadiusMeters) {
214
+ const earth = 6378137;
215
+ const minD = minRadiusMeters / earth;
216
+ const maxD = maxRadiusMeters / earth;
217
+ const theta = 2 * Math.PI * Math.random();
218
+ const r = Math.sqrt(minD * minD + (maxD * maxD - minD * minD) * Math.random());
219
+ const lat1 = center.lat * Math.PI / 180;
220
+ const lng1 = center.lng * Math.PI / 180;
221
+ const lat2 = Math.asin(Math.sin(lat1) * Math.cos(r) + Math.cos(lat1) * Math.sin(r) * Math.cos(theta));
222
+ const lng2 = lng1 + Math.atan2(Math.sin(theta) * Math.sin(r) * Math.cos(lat1), Math.cos(r) - Math.sin(lat1) * Math.sin(lat2));
223
+ const normalizedLng = (lng2 * 180 / Math.PI + 540) % 360 - 180;
224
+ return {
225
+ lat: lat2 * 180 / Math.PI,
226
+ lng: normalizedLng
227
+ };
228
+ }
199
229
  /**
200
230
  * Enhanced faker instance with additional utility methods for testing.
201
231
  * Extends @faker-js/faker with custom methods for common test data generation patterns.
@@ -221,10 +251,20 @@ const faker = Object.freeze(Object.assign({}, __faker_js_faker.faker, {
221
251
  sequence,
222
252
  resetSequence,
223
253
  resetAllSequences,
224
- price
254
+ price,
255
+ coordinates: {
256
+ within: coordinateInRadius,
257
+ outside: coordinateOutsideRadius
258
+ }
225
259
  }));
226
260
 
227
261
  //#endregion
262
+ Object.defineProperty(exports, 'coordinateInRadius', {
263
+ enumerable: true,
264
+ get: function () {
265
+ return coordinateInRadius;
266
+ }
267
+ });
228
268
  Object.defineProperty(exports, 'faker', {
229
269
  enumerable: true,
230
270
  get: function () {