@geekmidas/testkit 0.0.12 → 0.0.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. package/README.md +1 -1
  2. package/dist/{Factory-Bm44VKa-.d.cts → Factory-D7P3bKKb.d.mts} +2 -2
  3. package/dist/{Factory-tjCDNgUK.d.mts → Factory-pNV7ZQ7-.d.cts} +2 -2
  4. package/dist/Factory.d.cts +2 -2
  5. package/dist/Factory.d.mts +2 -2
  6. package/dist/{KyselyFactory-C3Bc3p4L.mjs → KyselyFactory-BcYkC0t2.mjs} +1 -1
  7. package/dist/{KyselyFactory-BoPDDitt.d.cts → KyselyFactory-BrWADI77.d.mts} +3 -3
  8. package/dist/{KyselyFactory-CXtfmMfK.cjs → KyselyFactory-Cf0o2YxO.cjs} +1 -1
  9. package/dist/{KyselyFactory-D82j74t9.d.mts → KyselyFactory-DM2dnUXU.d.cts} +3 -3
  10. package/dist/KyselyFactory.cjs +1 -1
  11. package/dist/KyselyFactory.d.cts +3 -3
  12. package/dist/KyselyFactory.d.mts +3 -3
  13. package/dist/KyselyFactory.mjs +1 -1
  14. package/dist/{ObjectionFactory-qIICOph3.mjs → ObjectionFactory-8hebmnai.mjs} +20 -4
  15. package/dist/{ObjectionFactory-BWMTXsxH.d.cts → ObjectionFactory-B40NQWSe.d.mts} +2 -2
  16. package/dist/{ObjectionFactory-DxIxJagq.cjs → ObjectionFactory-CDriunkS.cjs} +20 -4
  17. package/dist/{ObjectionFactory-CEG5qUrm.d.mts → ObjectionFactory-D3l1VuyX.d.cts} +2 -2
  18. package/dist/ObjectionFactory.cjs +1 -1
  19. package/dist/ObjectionFactory.d.cts +3 -3
  20. package/dist/ObjectionFactory.d.mts +3 -3
  21. package/dist/ObjectionFactory.mjs +1 -1
  22. package/dist/__tests__/KyselyFactory.spec.cjs +2 -2
  23. package/dist/__tests__/KyselyFactory.spec.mjs +2 -2
  24. package/dist/__tests__/ObjectionFactory.spec.cjs +288 -450
  25. package/dist/__tests__/ObjectionFactory.spec.mjs +289 -451
  26. package/dist/__tests__/PostgresKyselyMigrator.spec.cjs +397 -0
  27. package/dist/__tests__/PostgresKyselyMigrator.spec.d.cts +1 -0
  28. package/dist/__tests__/PostgresKyselyMigrator.spec.d.mts +1 -0
  29. package/dist/__tests__/PostgresKyselyMigrator.spec.mjs +396 -0
  30. package/dist/__tests__/PostgresMigrator.spec.cjs +1 -1
  31. package/dist/__tests__/PostgresMigrator.spec.mjs +1 -1
  32. package/dist/__tests__/PostgresObjectionMigrator.spec.cjs +1 -1
  33. package/dist/__tests__/PostgresObjectionMigrator.spec.mjs +1 -1
  34. package/dist/__tests__/VitestObjectionTransactionIsolator.spec.cjs +3 -11
  35. package/dist/__tests__/VitestObjectionTransactionIsolator.spec.mjs +3 -11
  36. package/dist/__tests__/integration.spec.cjs +2 -2
  37. package/dist/__tests__/integration.spec.mjs +2 -2
  38. package/dist/{faker-km9UhOS6.d.cts → faker-BSH1EMtg.d.cts} +2 -2
  39. package/dist/{faker-ChuHaYMR.d.mts → faker-C-Iuk_R1.d.mts} +2 -2
  40. package/dist/faker.d.cts +1 -1
  41. package/dist/faker.d.mts +1 -1
  42. package/dist/{helpers-BEmjyUVE.mjs → helpers-B4TXg3Wp.mjs} +11 -36
  43. package/dist/{helpers-CNMBePuj.cjs → helpers-Bf0nXhbu.cjs} +10 -41
  44. package/dist/kysely.cjs +1 -1
  45. package/dist/kysely.d.cts +3 -3
  46. package/dist/kysely.d.mts +3 -3
  47. package/dist/kysely.mjs +1 -1
  48. package/dist/objection.cjs +1 -1
  49. package/dist/objection.d.cts +3 -3
  50. package/dist/objection.d.mts +3 -3
  51. package/dist/objection.mjs +1 -1
  52. package/package.json +2 -2
  53. package/src/Factory.ts +4 -1
  54. package/src/KyselyFactory.ts +6 -2
  55. package/src/ObjectionFactory.ts +31 -4
  56. package/src/__tests__/ObjectionFactory.spec.ts +423 -542
  57. package/src/__tests__/PostgresKyselyMigrator.spec.ts +690 -0
  58. package/src/__tests__/VitestObjectionTransactionIsolator.spec.ts +0 -8
  59. package/test/helpers.ts +13 -21
  60. package/dist/example.cjs +0 -22
  61. package/dist/example.d.cts +0 -26
  62. package/dist/example.d.mts +0 -26
  63. package/dist/example.mjs +0 -22
  64. package/src/example.ts +0 -45
@@ -0,0 +1,397 @@
1
+ const require_chunk = require('../chunk-CUT6urMc.cjs');
2
+ const require_PostgresMigrator = require('../PostgresMigrator-DFcNdCvD.cjs');
3
+ const require_PostgresKyselyMigrator = require('../PostgresKyselyMigrator-CfytARcA.cjs');
4
+ const require_helpers = require('../helpers-Bf0nXhbu.cjs');
5
+ const kysely = require_chunk.__toESM(require("kysely"));
6
+ const pg = require_chunk.__toESM(require("pg"));
7
+ const vitest = require_chunk.__toESM(require("vitest"));
8
+
9
+ //#region src/__tests__/PostgresKyselyMigrator.spec.ts
10
+ var TestMigrationProvider = class {
11
+ migrations = {};
12
+ shouldError = false;
13
+ addMigration(name, migration) {
14
+ this.migrations[name] = migration;
15
+ }
16
+ setError(shouldError) {
17
+ this.shouldError = shouldError;
18
+ }
19
+ async getMigrations() {
20
+ if (this.shouldError) throw new Error("Failed to load migrations");
21
+ return this.migrations;
22
+ }
23
+ };
24
+ (0, vitest.describe)("PostgresKyselyMigrator", () => {
25
+ let testDbName;
26
+ let cleanupDb;
27
+ let consoleSpy;
28
+ let consoleErrorSpy;
29
+ (0, vitest.beforeAll)(async () => {
30
+ testDbName = `test_kysely_migrator_${Date.now()}`;
31
+ cleanupDb = await require_helpers.createTestDatabase(testDbName);
32
+ });
33
+ (0, vitest.beforeEach)(() => {
34
+ consoleSpy = vitest.vi.spyOn(console, "log").mockImplementation(() => {});
35
+ consoleErrorSpy = vitest.vi.spyOn(console, "error").mockImplementation(() => {});
36
+ });
37
+ (0, vitest.afterEach)(() => {
38
+ consoleSpy.mockRestore();
39
+ consoleErrorSpy.mockRestore();
40
+ });
41
+ (0, vitest.afterAll)(async () => {
42
+ await cleanupDb();
43
+ });
44
+ (0, vitest.describe)("constructor", () => {
45
+ (0, vitest.it)("should create a PostgresKyselyMigrator instance", () => {
46
+ const db = new kysely.Kysely({ dialect: new kysely.PostgresDialect({ pool: new pg.Pool({
47
+ host: "localhost",
48
+ port: 5432,
49
+ user: "geekmidas",
50
+ password: "geekmidas",
51
+ database: testDbName
52
+ }) }) });
53
+ const provider = new TestMigrationProvider();
54
+ const migrator = new require_PostgresKyselyMigrator.PostgresKyselyMigrator({
55
+ uri: `postgresql://geekmidas:geekmidas@localhost:5432/${testDbName}`,
56
+ db,
57
+ provider
58
+ });
59
+ (0, vitest.expect)(migrator).toBeInstanceOf(require_PostgresKyselyMigrator.PostgresKyselyMigrator);
60
+ (0, vitest.expect)(migrator).toBeInstanceOf(require_PostgresMigrator.PostgresMigrator);
61
+ });
62
+ });
63
+ (0, vitest.describe)("migrate method", () => {
64
+ (0, vitest.it)("should apply migrations successfully", async () => {
65
+ const newDbName = `test_migrate_${Date.now()}`;
66
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${newDbName}`;
67
+ const db = new kysely.Kysely({ dialect: new kysely.PostgresDialect({ pool: new pg.Pool({
68
+ host: "localhost",
69
+ port: 5432,
70
+ user: "geekmidas",
71
+ password: "geekmidas",
72
+ database: newDbName
73
+ }) }) });
74
+ const provider = new TestMigrationProvider();
75
+ provider.addMigration("001_create_users", {
76
+ up: async (db$1) => {
77
+ await db$1.schema.createTable("users").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("name", "varchar", (col) => col.notNull()).addColumn("email", "varchar", (col) => col.notNull().unique()).addColumn("created_at", "timestamp", (col) => col.defaultTo(kysely.sql`now()`).notNull()).execute();
78
+ },
79
+ down: async (db$1) => {
80
+ await db$1.schema.dropTable("users").execute();
81
+ }
82
+ });
83
+ provider.addMigration("002_create_posts", {
84
+ up: async (db$1) => {
85
+ await db$1.schema.createTable("posts").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("title", "varchar", (col) => col.notNull()).addColumn("content", "text", (col) => col.notNull()).addColumn("user_id", "integer", (col) => col.notNull().references("users.id").onDelete("cascade")).addColumn("created_at", "timestamp", (col) => col.defaultTo(kysely.sql`now()`).notNull()).execute();
86
+ },
87
+ down: async (db$1) => {
88
+ await db$1.schema.dropTable("posts").execute();
89
+ }
90
+ });
91
+ const migrator = new require_PostgresKyselyMigrator.PostgresKyselyMigrator({
92
+ uri,
93
+ db,
94
+ provider
95
+ });
96
+ const cleanup = await migrator.start();
97
+ (0, vitest.expect)(consoleSpy).toHaveBeenCalledWith(vitest.expect.stringContaining("Applied 2 migrations successfully"));
98
+ const client = new pg.Client({
99
+ host: "localhost",
100
+ port: 5432,
101
+ user: "geekmidas",
102
+ password: "geekmidas",
103
+ database: newDbName
104
+ });
105
+ try {
106
+ await client.connect();
107
+ const tablesResult = await client.query(`
108
+ SELECT table_name FROM information_schema.tables
109
+ WHERE table_schema = 'public'
110
+ AND table_name IN ('users', 'posts')
111
+ ORDER BY table_name
112
+ `);
113
+ (0, vitest.expect)(tablesResult.rowCount).toBe(2);
114
+ (0, vitest.expect)(tablesResult.rows).toEqual([{ table_name: "posts" }, { table_name: "users" }]);
115
+ } finally {
116
+ await client.end();
117
+ }
118
+ await cleanup();
119
+ });
120
+ (0, vitest.it)("should handle migration errors", async () => {
121
+ const errorDbName = `test_migrate_error_${Date.now()}`;
122
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${errorDbName}`;
123
+ const db = new kysely.Kysely({ dialect: new kysely.PostgresDialect({ pool: new pg.Pool({
124
+ host: "localhost",
125
+ port: 5432,
126
+ user: "geekmidas",
127
+ password: "geekmidas",
128
+ database: errorDbName
129
+ }) }) });
130
+ const provider = new TestMigrationProvider();
131
+ provider.addMigration("001_failing_migration", { up: async () => {
132
+ throw new Error("Migration failed intentionally");
133
+ } });
134
+ const migrator = new require_PostgresKyselyMigrator.PostgresKyselyMigrator({
135
+ uri,
136
+ db,
137
+ provider
138
+ });
139
+ await (0, vitest.expect)(migrator.start()).rejects.toThrow("Migration failed intentionally");
140
+ (0, vitest.expect)(consoleErrorSpy).toHaveBeenCalledWith(vitest.expect.any(Error), "Failed to apply migrations");
141
+ await db.destroy();
142
+ const cleanupClient = new pg.Client({
143
+ host: "localhost",
144
+ port: 5432,
145
+ user: "geekmidas",
146
+ password: "geekmidas",
147
+ database: "postgres"
148
+ });
149
+ try {
150
+ await cleanupClient.connect();
151
+ await cleanupClient.query(`
152
+ SELECT pg_terminate_backend(pg_stat_activity.pid)
153
+ FROM pg_stat_activity
154
+ WHERE pg_stat_activity.datname = '${errorDbName}'
155
+ AND pid <> pg_backend_pid()
156
+ `);
157
+ await cleanupClient.query(`DROP DATABASE IF EXISTS "${errorDbName}"`);
158
+ } finally {
159
+ await cleanupClient.end();
160
+ }
161
+ });
162
+ (0, vitest.it)("should destroy database connection after migrations", async () => {
163
+ const destroyDbName = `test_destroy_${Date.now()}`;
164
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${destroyDbName}`;
165
+ const db = new kysely.Kysely({ dialect: new kysely.PostgresDialect({ pool: new pg.Pool({
166
+ host: "localhost",
167
+ port: 5432,
168
+ user: "geekmidas",
169
+ password: "geekmidas",
170
+ database: destroyDbName
171
+ }) }) });
172
+ const destroySpy = vitest.vi.spyOn(db, "destroy");
173
+ const provider = new TestMigrationProvider();
174
+ provider.addMigration("001_simple", { up: async (db$1) => {
175
+ await db$1.schema.createTable("test").addColumn("id", "serial", (col) => col.primaryKey()).execute();
176
+ } });
177
+ const migrator = new require_PostgresKyselyMigrator.PostgresKyselyMigrator({
178
+ uri,
179
+ db,
180
+ provider
181
+ });
182
+ const cleanup = await migrator.start();
183
+ (0, vitest.expect)(destroySpy).toHaveBeenCalled();
184
+ await cleanup();
185
+ });
186
+ });
187
+ (0, vitest.describe)("integration with PostgresMigrator", () => {
188
+ (0, vitest.it)("should work with complete workflow", async () => {
189
+ const integrationDbName = `test_integration_${Date.now()}`;
190
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${integrationDbName}`;
191
+ const db = new kysely.Kysely({ dialect: new kysely.PostgresDialect({ pool: new pg.Pool({
192
+ host: "localhost",
193
+ port: 5432,
194
+ user: "geekmidas",
195
+ password: "geekmidas",
196
+ database: integrationDbName
197
+ }) }) });
198
+ const provider = new TestMigrationProvider();
199
+ provider.addMigration("001_initial_schema", { up: async (db$1) => {
200
+ await db$1.schema.createTable("users").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("name", "varchar", (col) => col.notNull()).addColumn("email", "varchar", (col) => col.notNull().unique()).addColumn("created_at", "timestamp", (col) => col.defaultTo(kysely.sql`now()`).notNull()).execute();
201
+ await db$1.schema.createTable("posts").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("title", "varchar", (col) => col.notNull()).addColumn("content", "text", (col) => col.notNull()).addColumn("user_id", "integer", (col) => col.notNull().references("users.id").onDelete("cascade")).addColumn("created_at", "timestamp", (col) => col.defaultTo(kysely.sql`now()`).notNull()).execute();
202
+ await db$1.schema.createIndex("idx_posts_user_id").on("posts").column("user_id").execute();
203
+ } });
204
+ provider.addMigration("002_add_updated_at", { up: async (db$1) => {
205
+ await db$1.schema.alterTable("users").addColumn("updated_at", "timestamp", (col) => col.defaultTo(kysely.sql`now()`).notNull()).execute();
206
+ await db$1.schema.alterTable("posts").addColumn("updated_at", "timestamp", (col) => col.defaultTo(kysely.sql`now()`).notNull()).execute();
207
+ } });
208
+ const migrator = new require_PostgresKyselyMigrator.PostgresKyselyMigrator({
209
+ uri,
210
+ db,
211
+ provider
212
+ });
213
+ const cleanup = await migrator.start();
214
+ (0, vitest.expect)(consoleSpy).toHaveBeenCalledWith(`Migrating database: ${integrationDbName}`);
215
+ (0, vitest.expect)(consoleSpy).toHaveBeenCalledWith("Applied 2 migrations successfully");
216
+ const verifyClient = new pg.Client({
217
+ host: "localhost",
218
+ port: 5432,
219
+ user: "geekmidas",
220
+ password: "geekmidas",
221
+ database: integrationDbName
222
+ });
223
+ try {
224
+ await verifyClient.connect();
225
+ const columnsResult = await verifyClient.query(`
226
+ SELECT table_name, column_name
227
+ FROM information_schema.columns
228
+ WHERE table_schema = 'public'
229
+ AND table_name IN ('users', 'posts')
230
+ AND column_name = 'updated_at'
231
+ ORDER BY table_name
232
+ `);
233
+ (0, vitest.expect)(columnsResult.rowCount).toBe(2);
234
+ const indexResult = await verifyClient.query(`
235
+ SELECT indexname FROM pg_indexes
236
+ WHERE schemaname = 'public'
237
+ AND indexname = 'idx_posts_user_id'
238
+ `);
239
+ (0, vitest.expect)(indexResult.rowCount).toBe(1);
240
+ } finally {
241
+ await verifyClient.end();
242
+ }
243
+ await cleanup();
244
+ });
245
+ (0, vitest.it)("should handle empty migrations", async () => {
246
+ const emptyDbName = `test_empty_${Date.now()}`;
247
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${emptyDbName}`;
248
+ const db = new kysely.Kysely({ dialect: new kysely.PostgresDialect({ pool: new pg.Pool({
249
+ host: "localhost",
250
+ port: 5432,
251
+ user: "geekmidas",
252
+ password: "geekmidas",
253
+ database: emptyDbName
254
+ }) }) });
255
+ const provider = new TestMigrationProvider();
256
+ const migrator = new require_PostgresKyselyMigrator.PostgresKyselyMigrator({
257
+ uri,
258
+ db,
259
+ provider
260
+ });
261
+ const cleanup = await migrator.start();
262
+ (0, vitest.expect)(consoleSpy).toHaveBeenCalledWith("Applied 0 migrations successfully");
263
+ await cleanup();
264
+ });
265
+ (0, vitest.it)("should work with FileMigrationProvider pattern", async () => {
266
+ const fileProviderDbName = `test_file_provider_${Date.now()}`;
267
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${fileProviderDbName}`;
268
+ const db = new kysely.Kysely({ dialect: new kysely.PostgresDialect({ pool: new pg.Pool({
269
+ host: "localhost",
270
+ port: 5432,
271
+ user: "geekmidas",
272
+ password: "geekmidas",
273
+ database: fileProviderDbName
274
+ }) }) });
275
+ const migrations = {
276
+ "2024_01_01_000001_create_users": {
277
+ up: async (db$1) => {
278
+ await db$1.schema.createTable("users").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("username", "varchar", (col) => col.notNull().unique()).execute();
279
+ },
280
+ down: async (db$1) => {
281
+ await db$1.schema.dropTable("users").execute();
282
+ }
283
+ },
284
+ "2024_01_02_000001_create_sessions": {
285
+ up: async (db$1) => {
286
+ await db$1.schema.createTable("sessions").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("user_id", "integer", (col) => col.notNull().references("users.id")).addColumn("token", "varchar", (col) => col.notNull()).execute();
287
+ },
288
+ down: async (db$1) => {
289
+ await db$1.schema.dropTable("sessions").execute();
290
+ }
291
+ }
292
+ };
293
+ const provider = { async getMigrations() {
294
+ return migrations;
295
+ } };
296
+ const migrator = new require_PostgresKyselyMigrator.PostgresKyselyMigrator({
297
+ uri,
298
+ db,
299
+ provider
300
+ });
301
+ const cleanup = await migrator.start();
302
+ (0, vitest.expect)(consoleSpy).toHaveBeenCalledWith("Applied 2 migrations successfully");
303
+ const verifyClient = new pg.Client({
304
+ host: "localhost",
305
+ port: 5432,
306
+ user: "geekmidas",
307
+ password: "geekmidas",
308
+ database: fileProviderDbName
309
+ });
310
+ try {
311
+ await verifyClient.connect();
312
+ const tablesResult = await verifyClient.query(`
313
+ SELECT table_name FROM information_schema.tables
314
+ WHERE table_schema = 'public'
315
+ AND table_name IN ('users', 'sessions')
316
+ ORDER BY table_name
317
+ `);
318
+ (0, vitest.expect)(tablesResult.rowCount).toBe(2);
319
+ } finally {
320
+ await verifyClient.end();
321
+ }
322
+ await cleanup();
323
+ });
324
+ });
325
+ (0, vitest.describe)("error scenarios", () => {
326
+ (0, vitest.it)("should handle provider errors", async () => {
327
+ const providerErrorDbName = `test_provider_error_${Date.now()}`;
328
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${providerErrorDbName}`;
329
+ const db = new kysely.Kysely({ dialect: new kysely.PostgresDialect({ pool: new pg.Pool({
330
+ host: "localhost",
331
+ port: 5432,
332
+ user: "geekmidas",
333
+ password: "geekmidas",
334
+ database: providerErrorDbName
335
+ }) }) });
336
+ const provider = new TestMigrationProvider();
337
+ provider.setError(true);
338
+ const migrator = new require_PostgresKyselyMigrator.PostgresKyselyMigrator({
339
+ uri,
340
+ db,
341
+ provider
342
+ });
343
+ await (0, vitest.expect)(migrator.start()).rejects.toThrow("Failed to load migrations");
344
+ await db.destroy();
345
+ const cleanupClient = new pg.Client({
346
+ host: "localhost",
347
+ port: 5432,
348
+ user: "geekmidas",
349
+ password: "geekmidas",
350
+ database: "postgres"
351
+ });
352
+ try {
353
+ await cleanupClient.connect();
354
+ await cleanupClient.query(`DROP DATABASE IF EXISTS "${providerErrorDbName}"`);
355
+ } finally {
356
+ await cleanupClient.end();
357
+ }
358
+ }, 1e4);
359
+ (0, vitest.it)("should handle invalid SQL in migrations", async () => {
360
+ const invalidSqlDbName = `test_invalid_sql_${Date.now()}`;
361
+ const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${invalidSqlDbName}`;
362
+ const db = new kysely.Kysely({ dialect: new kysely.PostgresDialect({ pool: new pg.Pool({
363
+ host: "localhost",
364
+ port: 5432,
365
+ user: "geekmidas",
366
+ password: "geekmidas",
367
+ database: invalidSqlDbName
368
+ }) }) });
369
+ const provider = new TestMigrationProvider();
370
+ provider.addMigration("001_invalid_sql", { up: async (db$1) => {
371
+ await db$1.schema.createTable("posts").addColumn("id", "serial", (col) => col.primaryKey()).addColumn("user_id", "integer", (col) => col.notNull().references("non_existent_table.id")).execute();
372
+ } });
373
+ const migrator = new require_PostgresKyselyMigrator.PostgresKyselyMigrator({
374
+ uri,
375
+ db,
376
+ provider
377
+ });
378
+ await (0, vitest.expect)(migrator.start()).rejects.toThrow();
379
+ await db.destroy();
380
+ const cleanupClient = new pg.Client({
381
+ host: "localhost",
382
+ port: 5432,
383
+ user: "geekmidas",
384
+ password: "geekmidas",
385
+ database: "postgres"
386
+ });
387
+ try {
388
+ await cleanupClient.connect();
389
+ await cleanupClient.query(`DROP DATABASE IF EXISTS "${invalidSqlDbName}"`);
390
+ } finally {
391
+ await cleanupClient.end();
392
+ }
393
+ }, 1e4);
394
+ });
395
+ });
396
+
397
+ //#endregion
@@ -0,0 +1 @@
1
+ export { };
@@ -0,0 +1 @@
1
+ export { };