@geekmidas/testkit 0.0.12 → 0.0.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/{Factory-Bm44VKa-.d.cts → Factory-D7P3bKKb.d.mts} +2 -2
- package/dist/{Factory-tjCDNgUK.d.mts → Factory-pNV7ZQ7-.d.cts} +2 -2
- package/dist/Factory.d.cts +2 -2
- package/dist/Factory.d.mts +2 -2
- package/dist/{KyselyFactory-C3Bc3p4L.mjs → KyselyFactory-BcYkC0t2.mjs} +1 -1
- package/dist/{KyselyFactory-BoPDDitt.d.cts → KyselyFactory-BrWADI77.d.mts} +3 -3
- package/dist/{KyselyFactory-CXtfmMfK.cjs → KyselyFactory-Cf0o2YxO.cjs} +1 -1
- package/dist/{KyselyFactory-D82j74t9.d.mts → KyselyFactory-DM2dnUXU.d.cts} +3 -3
- package/dist/KyselyFactory.cjs +1 -1
- package/dist/KyselyFactory.d.cts +3 -3
- package/dist/KyselyFactory.d.mts +3 -3
- package/dist/KyselyFactory.mjs +1 -1
- package/dist/{ObjectionFactory-qIICOph3.mjs → ObjectionFactory-8hebmnai.mjs} +20 -4
- package/dist/{ObjectionFactory-BWMTXsxH.d.cts → ObjectionFactory-B40NQWSe.d.mts} +2 -2
- package/dist/{ObjectionFactory-DxIxJagq.cjs → ObjectionFactory-CDriunkS.cjs} +20 -4
- package/dist/{ObjectionFactory-CEG5qUrm.d.mts → ObjectionFactory-D3l1VuyX.d.cts} +2 -2
- package/dist/ObjectionFactory.cjs +1 -1
- package/dist/ObjectionFactory.d.cts +3 -3
- package/dist/ObjectionFactory.d.mts +3 -3
- package/dist/ObjectionFactory.mjs +1 -1
- package/dist/__tests__/KyselyFactory.spec.cjs +2 -2
- package/dist/__tests__/KyselyFactory.spec.mjs +2 -2
- package/dist/__tests__/ObjectionFactory.spec.cjs +288 -450
- package/dist/__tests__/ObjectionFactory.spec.mjs +289 -451
- package/dist/__tests__/PostgresKyselyMigrator.spec.cjs +397 -0
- package/dist/__tests__/PostgresKyselyMigrator.spec.d.cts +1 -0
- package/dist/__tests__/PostgresKyselyMigrator.spec.d.mts +1 -0
- package/dist/__tests__/PostgresKyselyMigrator.spec.mjs +396 -0
- package/dist/__tests__/PostgresMigrator.spec.cjs +1 -1
- package/dist/__tests__/PostgresMigrator.spec.mjs +1 -1
- package/dist/__tests__/PostgresObjectionMigrator.spec.cjs +1 -1
- package/dist/__tests__/PostgresObjectionMigrator.spec.mjs +1 -1
- package/dist/__tests__/VitestObjectionTransactionIsolator.spec.cjs +3 -11
- package/dist/__tests__/VitestObjectionTransactionIsolator.spec.mjs +3 -11
- package/dist/__tests__/integration.spec.cjs +2 -2
- package/dist/__tests__/integration.spec.mjs +2 -2
- package/dist/{faker-km9UhOS6.d.cts → faker-BSH1EMtg.d.cts} +2 -2
- package/dist/{faker-ChuHaYMR.d.mts → faker-C-Iuk_R1.d.mts} +2 -2
- package/dist/faker.d.cts +1 -1
- package/dist/faker.d.mts +1 -1
- package/dist/{helpers-BEmjyUVE.mjs → helpers-B4TXg3Wp.mjs} +11 -36
- package/dist/{helpers-CNMBePuj.cjs → helpers-Bf0nXhbu.cjs} +10 -41
- package/dist/kysely.cjs +1 -1
- package/dist/kysely.d.cts +3 -3
- package/dist/kysely.d.mts +3 -3
- package/dist/kysely.mjs +1 -1
- package/dist/objection.cjs +1 -1
- package/dist/objection.d.cts +3 -3
- package/dist/objection.d.mts +3 -3
- package/dist/objection.mjs +1 -1
- package/package.json +2 -2
- package/src/Factory.ts +4 -1
- package/src/KyselyFactory.ts +6 -2
- package/src/ObjectionFactory.ts +31 -4
- package/src/__tests__/ObjectionFactory.spec.ts +423 -542
- package/src/__tests__/PostgresKyselyMigrator.spec.ts +690 -0
- package/src/__tests__/VitestObjectionTransactionIsolator.spec.ts +0 -8
- package/test/helpers.ts +13 -21
- package/dist/example.cjs +0 -22
- package/dist/example.d.cts +0 -26
- package/dist/example.d.mts +0 -26
- package/dist/example.mjs +0 -22
- package/src/example.ts +0 -45
|
@@ -0,0 +1,396 @@
|
|
|
1
|
+
import { PostgresMigrator } from "../PostgresMigrator-DxPC_gGu.mjs";
|
|
2
|
+
import { PostgresKyselyMigrator } from "../PostgresKyselyMigrator-Bdhl251C.mjs";
|
|
3
|
+
import { createTestDatabase } from "../helpers-B4TXg3Wp.mjs";
|
|
4
|
+
import { Kysely, PostgresDialect, sql } from "kysely";
|
|
5
|
+
import { Client, Pool } from "pg";
|
|
6
|
+
import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
|
|
7
|
+
|
|
8
|
+
//#region src/__tests__/PostgresKyselyMigrator.spec.ts
|
|
9
|
+
var TestMigrationProvider = class {
|
|
10
|
+
migrations = {};
|
|
11
|
+
shouldError = false;
|
|
12
|
+
addMigration(name, migration) {
|
|
13
|
+
this.migrations[name] = migration;
|
|
14
|
+
}
|
|
15
|
+
setError(shouldError) {
|
|
16
|
+
this.shouldError = shouldError;
|
|
17
|
+
}
|
|
18
|
+
async getMigrations() {
|
|
19
|
+
if (this.shouldError) throw new Error("Failed to load migrations");
|
|
20
|
+
return this.migrations;
|
|
21
|
+
}
|
|
22
|
+
};
|
|
23
|
+
describe("PostgresKyselyMigrator", () => {
|
|
24
|
+
let testDbName;
|
|
25
|
+
let cleanupDb;
|
|
26
|
+
let consoleSpy;
|
|
27
|
+
let consoleErrorSpy;
|
|
28
|
+
beforeAll(async () => {
|
|
29
|
+
testDbName = `test_kysely_migrator_${Date.now()}`;
|
|
30
|
+
cleanupDb = await createTestDatabase(testDbName);
|
|
31
|
+
});
|
|
32
|
+
beforeEach(() => {
|
|
33
|
+
consoleSpy = vi.spyOn(console, "log").mockImplementation(() => {});
|
|
34
|
+
consoleErrorSpy = vi.spyOn(console, "error").mockImplementation(() => {});
|
|
35
|
+
});
|
|
36
|
+
afterEach(() => {
|
|
37
|
+
consoleSpy.mockRestore();
|
|
38
|
+
consoleErrorSpy.mockRestore();
|
|
39
|
+
});
|
|
40
|
+
afterAll(async () => {
|
|
41
|
+
await cleanupDb();
|
|
42
|
+
});
|
|
43
|
+
describe("constructor", () => {
|
|
44
|
+
it("should create a PostgresKyselyMigrator instance", () => {
|
|
45
|
+
const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({
|
|
46
|
+
host: "localhost",
|
|
47
|
+
port: 5432,
|
|
48
|
+
user: "geekmidas",
|
|
49
|
+
password: "geekmidas",
|
|
50
|
+
database: testDbName
|
|
51
|
+
}) }) });
|
|
52
|
+
const provider = new TestMigrationProvider();
|
|
53
|
+
const migrator = new PostgresKyselyMigrator({
|
|
54
|
+
uri: `postgresql://geekmidas:geekmidas@localhost:5432/${testDbName}`,
|
|
55
|
+
db,
|
|
56
|
+
provider
|
|
57
|
+
});
|
|
58
|
+
expect(migrator).toBeInstanceOf(PostgresKyselyMigrator);
|
|
59
|
+
expect(migrator).toBeInstanceOf(PostgresMigrator);
|
|
60
|
+
});
|
|
61
|
+
});
|
|
62
|
+
describe("migrate method", () => {
|
|
63
|
+
it("should apply migrations successfully", async () => {
|
|
64
|
+
const newDbName = `test_migrate_${Date.now()}`;
|
|
65
|
+
const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${newDbName}`;
|
|
66
|
+
const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({
|
|
67
|
+
host: "localhost",
|
|
68
|
+
port: 5432,
|
|
69
|
+
user: "geekmidas",
|
|
70
|
+
password: "geekmidas",
|
|
71
|
+
database: newDbName
|
|
72
|
+
}) }) });
|
|
73
|
+
const provider = new TestMigrationProvider();
|
|
74
|
+
provider.addMigration("001_create_users", {
|
|
75
|
+
up: async (db$1) => {
|
|
76
|
+
await db$1.schema.createTable("users").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("name", "varchar", (col) => col.notNull()).addColumn("email", "varchar", (col) => col.notNull().unique()).addColumn("created_at", "timestamp", (col) => col.defaultTo(sql`now()`).notNull()).execute();
|
|
77
|
+
},
|
|
78
|
+
down: async (db$1) => {
|
|
79
|
+
await db$1.schema.dropTable("users").execute();
|
|
80
|
+
}
|
|
81
|
+
});
|
|
82
|
+
provider.addMigration("002_create_posts", {
|
|
83
|
+
up: async (db$1) => {
|
|
84
|
+
await db$1.schema.createTable("posts").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("title", "varchar", (col) => col.notNull()).addColumn("content", "text", (col) => col.notNull()).addColumn("user_id", "integer", (col) => col.notNull().references("users.id").onDelete("cascade")).addColumn("created_at", "timestamp", (col) => col.defaultTo(sql`now()`).notNull()).execute();
|
|
85
|
+
},
|
|
86
|
+
down: async (db$1) => {
|
|
87
|
+
await db$1.schema.dropTable("posts").execute();
|
|
88
|
+
}
|
|
89
|
+
});
|
|
90
|
+
const migrator = new PostgresKyselyMigrator({
|
|
91
|
+
uri,
|
|
92
|
+
db,
|
|
93
|
+
provider
|
|
94
|
+
});
|
|
95
|
+
const cleanup = await migrator.start();
|
|
96
|
+
expect(consoleSpy).toHaveBeenCalledWith(expect.stringContaining("Applied 2 migrations successfully"));
|
|
97
|
+
const client = new Client({
|
|
98
|
+
host: "localhost",
|
|
99
|
+
port: 5432,
|
|
100
|
+
user: "geekmidas",
|
|
101
|
+
password: "geekmidas",
|
|
102
|
+
database: newDbName
|
|
103
|
+
});
|
|
104
|
+
try {
|
|
105
|
+
await client.connect();
|
|
106
|
+
const tablesResult = await client.query(`
|
|
107
|
+
SELECT table_name FROM information_schema.tables
|
|
108
|
+
WHERE table_schema = 'public'
|
|
109
|
+
AND table_name IN ('users', 'posts')
|
|
110
|
+
ORDER BY table_name
|
|
111
|
+
`);
|
|
112
|
+
expect(tablesResult.rowCount).toBe(2);
|
|
113
|
+
expect(tablesResult.rows).toEqual([{ table_name: "posts" }, { table_name: "users" }]);
|
|
114
|
+
} finally {
|
|
115
|
+
await client.end();
|
|
116
|
+
}
|
|
117
|
+
await cleanup();
|
|
118
|
+
});
|
|
119
|
+
it("should handle migration errors", async () => {
|
|
120
|
+
const errorDbName = `test_migrate_error_${Date.now()}`;
|
|
121
|
+
const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${errorDbName}`;
|
|
122
|
+
const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({
|
|
123
|
+
host: "localhost",
|
|
124
|
+
port: 5432,
|
|
125
|
+
user: "geekmidas",
|
|
126
|
+
password: "geekmidas",
|
|
127
|
+
database: errorDbName
|
|
128
|
+
}) }) });
|
|
129
|
+
const provider = new TestMigrationProvider();
|
|
130
|
+
provider.addMigration("001_failing_migration", { up: async () => {
|
|
131
|
+
throw new Error("Migration failed intentionally");
|
|
132
|
+
} });
|
|
133
|
+
const migrator = new PostgresKyselyMigrator({
|
|
134
|
+
uri,
|
|
135
|
+
db,
|
|
136
|
+
provider
|
|
137
|
+
});
|
|
138
|
+
await expect(migrator.start()).rejects.toThrow("Migration failed intentionally");
|
|
139
|
+
expect(consoleErrorSpy).toHaveBeenCalledWith(expect.any(Error), "Failed to apply migrations");
|
|
140
|
+
await db.destroy();
|
|
141
|
+
const cleanupClient = new Client({
|
|
142
|
+
host: "localhost",
|
|
143
|
+
port: 5432,
|
|
144
|
+
user: "geekmidas",
|
|
145
|
+
password: "geekmidas",
|
|
146
|
+
database: "postgres"
|
|
147
|
+
});
|
|
148
|
+
try {
|
|
149
|
+
await cleanupClient.connect();
|
|
150
|
+
await cleanupClient.query(`
|
|
151
|
+
SELECT pg_terminate_backend(pg_stat_activity.pid)
|
|
152
|
+
FROM pg_stat_activity
|
|
153
|
+
WHERE pg_stat_activity.datname = '${errorDbName}'
|
|
154
|
+
AND pid <> pg_backend_pid()
|
|
155
|
+
`);
|
|
156
|
+
await cleanupClient.query(`DROP DATABASE IF EXISTS "${errorDbName}"`);
|
|
157
|
+
} finally {
|
|
158
|
+
await cleanupClient.end();
|
|
159
|
+
}
|
|
160
|
+
});
|
|
161
|
+
it("should destroy database connection after migrations", async () => {
|
|
162
|
+
const destroyDbName = `test_destroy_${Date.now()}`;
|
|
163
|
+
const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${destroyDbName}`;
|
|
164
|
+
const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({
|
|
165
|
+
host: "localhost",
|
|
166
|
+
port: 5432,
|
|
167
|
+
user: "geekmidas",
|
|
168
|
+
password: "geekmidas",
|
|
169
|
+
database: destroyDbName
|
|
170
|
+
}) }) });
|
|
171
|
+
const destroySpy = vi.spyOn(db, "destroy");
|
|
172
|
+
const provider = new TestMigrationProvider();
|
|
173
|
+
provider.addMigration("001_simple", { up: async (db$1) => {
|
|
174
|
+
await db$1.schema.createTable("test").addColumn("id", "serial", (col) => col.primaryKey()).execute();
|
|
175
|
+
} });
|
|
176
|
+
const migrator = new PostgresKyselyMigrator({
|
|
177
|
+
uri,
|
|
178
|
+
db,
|
|
179
|
+
provider
|
|
180
|
+
});
|
|
181
|
+
const cleanup = await migrator.start();
|
|
182
|
+
expect(destroySpy).toHaveBeenCalled();
|
|
183
|
+
await cleanup();
|
|
184
|
+
});
|
|
185
|
+
});
|
|
186
|
+
describe("integration with PostgresMigrator", () => {
|
|
187
|
+
it("should work with complete workflow", async () => {
|
|
188
|
+
const integrationDbName = `test_integration_${Date.now()}`;
|
|
189
|
+
const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${integrationDbName}`;
|
|
190
|
+
const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({
|
|
191
|
+
host: "localhost",
|
|
192
|
+
port: 5432,
|
|
193
|
+
user: "geekmidas",
|
|
194
|
+
password: "geekmidas",
|
|
195
|
+
database: integrationDbName
|
|
196
|
+
}) }) });
|
|
197
|
+
const provider = new TestMigrationProvider();
|
|
198
|
+
provider.addMigration("001_initial_schema", { up: async (db$1) => {
|
|
199
|
+
await db$1.schema.createTable("users").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("name", "varchar", (col) => col.notNull()).addColumn("email", "varchar", (col) => col.notNull().unique()).addColumn("created_at", "timestamp", (col) => col.defaultTo(sql`now()`).notNull()).execute();
|
|
200
|
+
await db$1.schema.createTable("posts").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("title", "varchar", (col) => col.notNull()).addColumn("content", "text", (col) => col.notNull()).addColumn("user_id", "integer", (col) => col.notNull().references("users.id").onDelete("cascade")).addColumn("created_at", "timestamp", (col) => col.defaultTo(sql`now()`).notNull()).execute();
|
|
201
|
+
await db$1.schema.createIndex("idx_posts_user_id").on("posts").column("user_id").execute();
|
|
202
|
+
} });
|
|
203
|
+
provider.addMigration("002_add_updated_at", { up: async (db$1) => {
|
|
204
|
+
await db$1.schema.alterTable("users").addColumn("updated_at", "timestamp", (col) => col.defaultTo(sql`now()`).notNull()).execute();
|
|
205
|
+
await db$1.schema.alterTable("posts").addColumn("updated_at", "timestamp", (col) => col.defaultTo(sql`now()`).notNull()).execute();
|
|
206
|
+
} });
|
|
207
|
+
const migrator = new PostgresKyselyMigrator({
|
|
208
|
+
uri,
|
|
209
|
+
db,
|
|
210
|
+
provider
|
|
211
|
+
});
|
|
212
|
+
const cleanup = await migrator.start();
|
|
213
|
+
expect(consoleSpy).toHaveBeenCalledWith(`Migrating database: ${integrationDbName}`);
|
|
214
|
+
expect(consoleSpy).toHaveBeenCalledWith("Applied 2 migrations successfully");
|
|
215
|
+
const verifyClient = new Client({
|
|
216
|
+
host: "localhost",
|
|
217
|
+
port: 5432,
|
|
218
|
+
user: "geekmidas",
|
|
219
|
+
password: "geekmidas",
|
|
220
|
+
database: integrationDbName
|
|
221
|
+
});
|
|
222
|
+
try {
|
|
223
|
+
await verifyClient.connect();
|
|
224
|
+
const columnsResult = await verifyClient.query(`
|
|
225
|
+
SELECT table_name, column_name
|
|
226
|
+
FROM information_schema.columns
|
|
227
|
+
WHERE table_schema = 'public'
|
|
228
|
+
AND table_name IN ('users', 'posts')
|
|
229
|
+
AND column_name = 'updated_at'
|
|
230
|
+
ORDER BY table_name
|
|
231
|
+
`);
|
|
232
|
+
expect(columnsResult.rowCount).toBe(2);
|
|
233
|
+
const indexResult = await verifyClient.query(`
|
|
234
|
+
SELECT indexname FROM pg_indexes
|
|
235
|
+
WHERE schemaname = 'public'
|
|
236
|
+
AND indexname = 'idx_posts_user_id'
|
|
237
|
+
`);
|
|
238
|
+
expect(indexResult.rowCount).toBe(1);
|
|
239
|
+
} finally {
|
|
240
|
+
await verifyClient.end();
|
|
241
|
+
}
|
|
242
|
+
await cleanup();
|
|
243
|
+
});
|
|
244
|
+
it("should handle empty migrations", async () => {
|
|
245
|
+
const emptyDbName = `test_empty_${Date.now()}`;
|
|
246
|
+
const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${emptyDbName}`;
|
|
247
|
+
const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({
|
|
248
|
+
host: "localhost",
|
|
249
|
+
port: 5432,
|
|
250
|
+
user: "geekmidas",
|
|
251
|
+
password: "geekmidas",
|
|
252
|
+
database: emptyDbName
|
|
253
|
+
}) }) });
|
|
254
|
+
const provider = new TestMigrationProvider();
|
|
255
|
+
const migrator = new PostgresKyselyMigrator({
|
|
256
|
+
uri,
|
|
257
|
+
db,
|
|
258
|
+
provider
|
|
259
|
+
});
|
|
260
|
+
const cleanup = await migrator.start();
|
|
261
|
+
expect(consoleSpy).toHaveBeenCalledWith("Applied 0 migrations successfully");
|
|
262
|
+
await cleanup();
|
|
263
|
+
});
|
|
264
|
+
it("should work with FileMigrationProvider pattern", async () => {
|
|
265
|
+
const fileProviderDbName = `test_file_provider_${Date.now()}`;
|
|
266
|
+
const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${fileProviderDbName}`;
|
|
267
|
+
const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({
|
|
268
|
+
host: "localhost",
|
|
269
|
+
port: 5432,
|
|
270
|
+
user: "geekmidas",
|
|
271
|
+
password: "geekmidas",
|
|
272
|
+
database: fileProviderDbName
|
|
273
|
+
}) }) });
|
|
274
|
+
const migrations = {
|
|
275
|
+
"2024_01_01_000001_create_users": {
|
|
276
|
+
up: async (db$1) => {
|
|
277
|
+
await db$1.schema.createTable("users").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("username", "varchar", (col) => col.notNull().unique()).execute();
|
|
278
|
+
},
|
|
279
|
+
down: async (db$1) => {
|
|
280
|
+
await db$1.schema.dropTable("users").execute();
|
|
281
|
+
}
|
|
282
|
+
},
|
|
283
|
+
"2024_01_02_000001_create_sessions": {
|
|
284
|
+
up: async (db$1) => {
|
|
285
|
+
await db$1.schema.createTable("sessions").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("user_id", "integer", (col) => col.notNull().references("users.id")).addColumn("token", "varchar", (col) => col.notNull()).execute();
|
|
286
|
+
},
|
|
287
|
+
down: async (db$1) => {
|
|
288
|
+
await db$1.schema.dropTable("sessions").execute();
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
};
|
|
292
|
+
const provider = { async getMigrations() {
|
|
293
|
+
return migrations;
|
|
294
|
+
} };
|
|
295
|
+
const migrator = new PostgresKyselyMigrator({
|
|
296
|
+
uri,
|
|
297
|
+
db,
|
|
298
|
+
provider
|
|
299
|
+
});
|
|
300
|
+
const cleanup = await migrator.start();
|
|
301
|
+
expect(consoleSpy).toHaveBeenCalledWith("Applied 2 migrations successfully");
|
|
302
|
+
const verifyClient = new Client({
|
|
303
|
+
host: "localhost",
|
|
304
|
+
port: 5432,
|
|
305
|
+
user: "geekmidas",
|
|
306
|
+
password: "geekmidas",
|
|
307
|
+
database: fileProviderDbName
|
|
308
|
+
});
|
|
309
|
+
try {
|
|
310
|
+
await verifyClient.connect();
|
|
311
|
+
const tablesResult = await verifyClient.query(`
|
|
312
|
+
SELECT table_name FROM information_schema.tables
|
|
313
|
+
WHERE table_schema = 'public'
|
|
314
|
+
AND table_name IN ('users', 'sessions')
|
|
315
|
+
ORDER BY table_name
|
|
316
|
+
`);
|
|
317
|
+
expect(tablesResult.rowCount).toBe(2);
|
|
318
|
+
} finally {
|
|
319
|
+
await verifyClient.end();
|
|
320
|
+
}
|
|
321
|
+
await cleanup();
|
|
322
|
+
});
|
|
323
|
+
});
|
|
324
|
+
describe("error scenarios", () => {
|
|
325
|
+
it("should handle provider errors", async () => {
|
|
326
|
+
const providerErrorDbName = `test_provider_error_${Date.now()}`;
|
|
327
|
+
const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${providerErrorDbName}`;
|
|
328
|
+
const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({
|
|
329
|
+
host: "localhost",
|
|
330
|
+
port: 5432,
|
|
331
|
+
user: "geekmidas",
|
|
332
|
+
password: "geekmidas",
|
|
333
|
+
database: providerErrorDbName
|
|
334
|
+
}) }) });
|
|
335
|
+
const provider = new TestMigrationProvider();
|
|
336
|
+
provider.setError(true);
|
|
337
|
+
const migrator = new PostgresKyselyMigrator({
|
|
338
|
+
uri,
|
|
339
|
+
db,
|
|
340
|
+
provider
|
|
341
|
+
});
|
|
342
|
+
await expect(migrator.start()).rejects.toThrow("Failed to load migrations");
|
|
343
|
+
await db.destroy();
|
|
344
|
+
const cleanupClient = new Client({
|
|
345
|
+
host: "localhost",
|
|
346
|
+
port: 5432,
|
|
347
|
+
user: "geekmidas",
|
|
348
|
+
password: "geekmidas",
|
|
349
|
+
database: "postgres"
|
|
350
|
+
});
|
|
351
|
+
try {
|
|
352
|
+
await cleanupClient.connect();
|
|
353
|
+
await cleanupClient.query(`DROP DATABASE IF EXISTS "${providerErrorDbName}"`);
|
|
354
|
+
} finally {
|
|
355
|
+
await cleanupClient.end();
|
|
356
|
+
}
|
|
357
|
+
}, 1e4);
|
|
358
|
+
it("should handle invalid SQL in migrations", async () => {
|
|
359
|
+
const invalidSqlDbName = `test_invalid_sql_${Date.now()}`;
|
|
360
|
+
const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${invalidSqlDbName}`;
|
|
361
|
+
const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({
|
|
362
|
+
host: "localhost",
|
|
363
|
+
port: 5432,
|
|
364
|
+
user: "geekmidas",
|
|
365
|
+
password: "geekmidas",
|
|
366
|
+
database: invalidSqlDbName
|
|
367
|
+
}) }) });
|
|
368
|
+
const provider = new TestMigrationProvider();
|
|
369
|
+
provider.addMigration("001_invalid_sql", { up: async (db$1) => {
|
|
370
|
+
await db$1.schema.createTable("posts").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("user_id", "integer", (col) => col.notNull().references("non_existent_table.id")).execute();
|
|
371
|
+
} });
|
|
372
|
+
const migrator = new PostgresKyselyMigrator({
|
|
373
|
+
uri,
|
|
374
|
+
db,
|
|
375
|
+
provider
|
|
376
|
+
});
|
|
377
|
+
await expect(migrator.start()).rejects.toThrow();
|
|
378
|
+
await db.destroy();
|
|
379
|
+
const cleanupClient = new Client({
|
|
380
|
+
host: "localhost",
|
|
381
|
+
port: 5432,
|
|
382
|
+
user: "geekmidas",
|
|
383
|
+
password: "geekmidas",
|
|
384
|
+
database: "postgres"
|
|
385
|
+
});
|
|
386
|
+
try {
|
|
387
|
+
await cleanupClient.connect();
|
|
388
|
+
await cleanupClient.query(`DROP DATABASE IF EXISTS "${invalidSqlDbName}"`);
|
|
389
|
+
} finally {
|
|
390
|
+
await cleanupClient.end();
|
|
391
|
+
}
|
|
392
|
+
}, 1e4);
|
|
393
|
+
});
|
|
394
|
+
});
|
|
395
|
+
|
|
396
|
+
//#endregion
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
const require_chunk = require('../chunk-CUT6urMc.cjs');
|
|
2
2
|
const require_PostgresMigrator = require('../PostgresMigrator-DFcNdCvD.cjs');
|
|
3
|
-
const require_helpers = require('../helpers-
|
|
3
|
+
const require_helpers = require('../helpers-Bf0nXhbu.cjs');
|
|
4
4
|
const pg = require_chunk.__toESM(require("pg"));
|
|
5
5
|
const vitest = require_chunk.__toESM(require("vitest"));
|
|
6
6
|
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { PostgresMigrator } from "../PostgresMigrator-DxPC_gGu.mjs";
|
|
2
|
-
import { createTestDatabase } from "../helpers-
|
|
2
|
+
import { createTestDatabase } from "../helpers-B4TXg3Wp.mjs";
|
|
3
3
|
import { Client } from "pg";
|
|
4
4
|
import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
|
|
5
5
|
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
const require_chunk = require('../chunk-CUT6urMc.cjs');
|
|
2
2
|
require('../PostgresMigrator-DFcNdCvD.cjs');
|
|
3
3
|
const require_PostgresObjectionMigrator = require('../PostgresObjectionMigrator-BG6ymgnt.cjs');
|
|
4
|
-
const require_helpers = require('../helpers-
|
|
4
|
+
const require_helpers = require('../helpers-Bf0nXhbu.cjs');
|
|
5
5
|
const pg = require_chunk.__toESM(require("pg"));
|
|
6
6
|
const vitest = require_chunk.__toESM(require("vitest"));
|
|
7
7
|
const knex = require_chunk.__toESM(require("knex"));
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import "../PostgresMigrator-DxPC_gGu.mjs";
|
|
2
2
|
import { PostgresObjectionMigrator } from "../PostgresObjectionMigrator-G4h5FLvU.mjs";
|
|
3
|
-
import { createTestDatabase } from "../helpers-
|
|
3
|
+
import { createTestDatabase } from "../helpers-B4TXg3Wp.mjs";
|
|
4
4
|
import { Client } from "pg";
|
|
5
5
|
import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
|
|
6
6
|
import knex from "knex";
|
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
const require_chunk = require('../chunk-CUT6urMc.cjs');
|
|
2
2
|
require('../Factory-WMhTNZ9S.cjs');
|
|
3
3
|
require('../faker-B14IEMIN.cjs');
|
|
4
|
-
require('../ObjectionFactory-
|
|
4
|
+
require('../ObjectionFactory-CDriunkS.cjs');
|
|
5
5
|
require('../PostgresMigrator-DFcNdCvD.cjs');
|
|
6
6
|
require('../PostgresObjectionMigrator-BG6ymgnt.cjs');
|
|
7
7
|
require('../VitestTransactionIsolator-CruLTRRi.cjs');
|
|
8
8
|
require('../VitestObjectionTransactionIsolator-DyqLp_in.cjs');
|
|
9
9
|
const require_objection = require('../objection-CCD8fMLj.cjs');
|
|
10
|
-
const require_helpers = require('../helpers-
|
|
10
|
+
const require_helpers = require('../helpers-Bf0nXhbu.cjs');
|
|
11
11
|
const vitest = require_chunk.__toESM(require("vitest"));
|
|
12
12
|
const objection = require_chunk.__toESM(require("objection"));
|
|
13
13
|
|
|
@@ -18,8 +18,6 @@ var User = class extends objection.Model {
|
|
|
18
18
|
}
|
|
19
19
|
id;
|
|
20
20
|
name;
|
|
21
|
-
email;
|
|
22
|
-
role;
|
|
23
21
|
createdAt;
|
|
24
22
|
updatedAt;
|
|
25
23
|
static get relationMappings() {
|
|
@@ -106,24 +104,18 @@ var Comment = class extends objection.Model {
|
|
|
106
104
|
}
|
|
107
105
|
};
|
|
108
106
|
const knex = require_helpers.createKnexDb();
|
|
109
|
-
objection.Model.knex(knex);
|
|
110
107
|
const it = require_objection.wrapVitestObjectionTransaction(vitest.it, knex, async (trx) => {
|
|
111
108
|
await require_helpers.createTestTablesKnex(trx);
|
|
112
109
|
});
|
|
113
110
|
(0, vitest.describe)("VitestObjectionTransactionIsolator", () => {
|
|
114
111
|
(0, vitest.describe)("Transaction Isolation", () => {
|
|
115
112
|
it("should rollback data after test completes", async ({ trx }) => {
|
|
116
|
-
const user = await User.query(trx).insert({
|
|
117
|
-
name: "Test User",
|
|
118
|
-
email: "test@example.com",
|
|
119
|
-
role: "user"
|
|
120
|
-
});
|
|
113
|
+
const user = await User.query(trx).insert({ name: "Test User" });
|
|
121
114
|
(0, vitest.expect)(user).toBeDefined();
|
|
122
115
|
(0, vitest.expect)(user.id).toBeDefined();
|
|
123
116
|
(0, vitest.expect)(user.name).toBe("Test User");
|
|
124
117
|
const foundUser = await User.query(trx).findById(user.id);
|
|
125
118
|
(0, vitest.expect)(foundUser).toBeDefined();
|
|
126
|
-
(0, vitest.expect)(foundUser?.email).toBe(user.email);
|
|
127
119
|
});
|
|
128
120
|
});
|
|
129
121
|
});
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
import "../Factory-z2m01hMj.mjs";
|
|
2
2
|
import "../faker-BGKYFoCT.mjs";
|
|
3
|
-
import "../ObjectionFactory-
|
|
3
|
+
import "../ObjectionFactory-8hebmnai.mjs";
|
|
4
4
|
import "../PostgresMigrator-DxPC_gGu.mjs";
|
|
5
5
|
import "../PostgresObjectionMigrator-G4h5FLvU.mjs";
|
|
6
6
|
import "../VitestTransactionIsolator-BWwK-ca6.mjs";
|
|
7
7
|
import "../VitestObjectionTransactionIsolator-BPoLUFop.mjs";
|
|
8
8
|
import { wrapVitestObjectionTransaction } from "../objection-lsMgM5gP.mjs";
|
|
9
|
-
import { createKnexDb, createTestTablesKnex } from "../helpers-
|
|
9
|
+
import { createKnexDb, createTestTablesKnex } from "../helpers-B4TXg3Wp.mjs";
|
|
10
10
|
import { describe, expect, it } from "vitest";
|
|
11
11
|
import { Model } from "objection";
|
|
12
12
|
|
|
@@ -17,8 +17,6 @@ var User = class extends Model {
|
|
|
17
17
|
}
|
|
18
18
|
id;
|
|
19
19
|
name;
|
|
20
|
-
email;
|
|
21
|
-
role;
|
|
22
20
|
createdAt;
|
|
23
21
|
updatedAt;
|
|
24
22
|
static get relationMappings() {
|
|
@@ -105,24 +103,18 @@ var Comment = class extends Model {
|
|
|
105
103
|
}
|
|
106
104
|
};
|
|
107
105
|
const knex = createKnexDb();
|
|
108
|
-
Model.knex(knex);
|
|
109
106
|
const it$1 = wrapVitestObjectionTransaction(it, knex, async (trx) => {
|
|
110
107
|
await createTestTablesKnex(trx);
|
|
111
108
|
});
|
|
112
109
|
describe("VitestObjectionTransactionIsolator", () => {
|
|
113
110
|
describe("Transaction Isolation", () => {
|
|
114
111
|
it$1("should rollback data after test completes", async ({ trx }) => {
|
|
115
|
-
const user = await User.query(trx).insert({
|
|
116
|
-
name: "Test User",
|
|
117
|
-
email: "test@example.com",
|
|
118
|
-
role: "user"
|
|
119
|
-
});
|
|
112
|
+
const user = await User.query(trx).insert({ name: "Test User" });
|
|
120
113
|
expect(user).toBeDefined();
|
|
121
114
|
expect(user.id).toBeDefined();
|
|
122
115
|
expect(user.name).toBe("Test User");
|
|
123
116
|
const foundUser = await User.query(trx).findById(user.id);
|
|
124
117
|
expect(foundUser).toBeDefined();
|
|
125
|
-
expect(foundUser?.email).toBe(user.email);
|
|
126
118
|
});
|
|
127
119
|
});
|
|
128
120
|
});
|
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
const require_chunk = require('../chunk-CUT6urMc.cjs');
|
|
2
2
|
require('../Factory-WMhTNZ9S.cjs');
|
|
3
3
|
require('../faker-B14IEMIN.cjs');
|
|
4
|
-
const require_KyselyFactory = require('../KyselyFactory-
|
|
4
|
+
const require_KyselyFactory = require('../KyselyFactory-Cf0o2YxO.cjs');
|
|
5
5
|
require('../PostgresMigrator-DFcNdCvD.cjs');
|
|
6
6
|
require('../PostgresKyselyMigrator-CfytARcA.cjs');
|
|
7
7
|
require('../VitestTransactionIsolator-CruLTRRi.cjs');
|
|
8
8
|
require('../VitestKyselyTransactionIsolator-CIlpIO78.cjs');
|
|
9
9
|
const require_helpers = require('../helpers-nEUtQ7eo.cjs');
|
|
10
10
|
const require_kysely = require('../kysely-CBfCXxUn.cjs');
|
|
11
|
-
const require_helpers$1 = require('../helpers-
|
|
11
|
+
const require_helpers$1 = require('../helpers-Bf0nXhbu.cjs');
|
|
12
12
|
const vitest = require_chunk.__toESM(require("vitest"));
|
|
13
13
|
|
|
14
14
|
//#region src/__tests__/integration.spec.ts
|
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
import "../Factory-z2m01hMj.mjs";
|
|
2
2
|
import "../faker-BGKYFoCT.mjs";
|
|
3
|
-
import { KyselyFactory } from "../KyselyFactory-
|
|
3
|
+
import { KyselyFactory } from "../KyselyFactory-BcYkC0t2.mjs";
|
|
4
4
|
import "../PostgresMigrator-DxPC_gGu.mjs";
|
|
5
5
|
import "../PostgresKyselyMigrator-Bdhl251C.mjs";
|
|
6
6
|
import "../VitestTransactionIsolator-BWwK-ca6.mjs";
|
|
7
7
|
import "../VitestKyselyTransactionIsolator-BKGT9nEG.mjs";
|
|
8
8
|
import { createKyselyDb } from "../helpers-BuPmgzyQ.mjs";
|
|
9
9
|
import { wrapVitestKyselyTransaction } from "../kysely-Cx_1pZYc.mjs";
|
|
10
|
-
import { TEST_DATABASE_CONFIG, createTestTables } from "../helpers-
|
|
10
|
+
import { TEST_DATABASE_CONFIG, createTestTables } from "../helpers-B4TXg3Wp.mjs";
|
|
11
11
|
import { beforeAll, describe, expect, it } from "vitest";
|
|
12
12
|
|
|
13
13
|
//#region src/__tests__/integration.spec.ts
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import * as
|
|
1
|
+
import * as _faker_js_faker0 from "@faker-js/faker";
|
|
2
2
|
|
|
3
3
|
//#region src/faker.d.ts
|
|
4
4
|
|
|
@@ -134,7 +134,7 @@ declare function coordinateOutsideRadius(center: Coordinate, minRadiusMeters: nu
|
|
|
134
134
|
* const productPrice = faker.price();
|
|
135
135
|
* ```
|
|
136
136
|
*/
|
|
137
|
-
declare const faker: Readonly<
|
|
137
|
+
declare const faker: Readonly<_faker_js_faker0.Faker & {
|
|
138
138
|
timestamps: typeof timestamps;
|
|
139
139
|
identifier: typeof identifier;
|
|
140
140
|
sequence: typeof sequence;
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import * as
|
|
1
|
+
import * as _faker_js_faker0 from "@faker-js/faker";
|
|
2
2
|
|
|
3
3
|
//#region src/faker.d.ts
|
|
4
4
|
|
|
@@ -134,7 +134,7 @@ declare function coordinateOutsideRadius(center: Coordinate, minRadiusMeters: nu
|
|
|
134
134
|
* const productPrice = faker.price();
|
|
135
135
|
* ```
|
|
136
136
|
*/
|
|
137
|
-
declare const faker$1: Readonly<
|
|
137
|
+
declare const faker$1: Readonly<_faker_js_faker0.Faker & {
|
|
138
138
|
timestamps: typeof timestamps;
|
|
139
139
|
identifier: typeof identifier;
|
|
140
140
|
sequence: typeof sequence;
|
package/dist/faker.d.cts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import { FakerFactory, Timestamps, coordinateInRadius, faker, identifier, resetAllSequences, resetSequence, sequence, timestamps } from "./faker-
|
|
1
|
+
import { FakerFactory, Timestamps, coordinateInRadius, faker, identifier, resetAllSequences, resetSequence, sequence, timestamps } from "./faker-BSH1EMtg.cjs";
|
|
2
2
|
export { FakerFactory, Timestamps, coordinateInRadius, faker, identifier, resetAllSequences, resetSequence, sequence, timestamps };
|
package/dist/faker.d.mts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import { FakerFactory, Timestamps, coordinateInRadius, faker, identifier, resetAllSequences, resetSequence, sequence, timestamps } from "./faker-
|
|
1
|
+
import { FakerFactory, Timestamps, coordinateInRadius, faker, identifier, resetAllSequences, resetSequence, sequence, timestamps } from "./faker-C-Iuk_R1.mjs";
|
|
2
2
|
export { FakerFactory, Timestamps, coordinateInRadius, faker, identifier, resetAllSequences, resetSequence, sequence, timestamps };
|