@geekmidas/testkit 0.0.2 → 0.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/PostgresKyselyMigrator.spec +471 -0
- package/dist/Factory.mjs +1 -1
- package/dist/{KyselyFactory-DiiWtMYe.cjs → KyselyFactory-BGvSMLtd.cjs} +11 -12
- package/dist/{KyselyFactory-DZewtWtJ.mjs → KyselyFactory-ionH4gvk.mjs} +12 -13
- package/dist/KyselyFactory.cjs +2 -1
- package/dist/KyselyFactory.mjs +3 -2
- package/dist/{ObjectionFactory-MAf2m8LI.mjs → ObjectionFactory-CFrtXe7i.mjs} +1 -1
- package/dist/ObjectionFactory.cjs +1 -1
- package/dist/ObjectionFactory.mjs +2 -2
- package/dist/{PostgresKyselyMigrator-ChMJpPrQ.mjs → PostgresKyselyMigrator-CbtiZgfI.mjs} +1 -1
- package/dist/{PostgresKyselyMigrator-rY3hO_-1.cjs → PostgresKyselyMigrator-Cxf2Dp9y.cjs} +3 -2
- package/dist/PostgresKyselyMigrator.cjs +2 -2
- package/dist/PostgresKyselyMigrator.mjs +2 -2
- package/dist/{PostgresMigrator-BJ2-5A_b.cjs → PostgresMigrator-eqyAFSf-.cjs} +2 -39
- package/dist/PostgresMigrator.cjs +1 -1
- package/dist/PostgresMigrator.mjs +1 -1
- package/dist/VitestKyselyTransactionIsolator-DXjWQtDN.mjs +12 -0
- package/dist/VitestKyselyTransactionIsolator-Dh2AgJDd.cjs +17 -0
- package/dist/VitestKyselyTransactionIsolator.cjs +5 -0
- package/dist/VitestKyselyTransactionIsolator.mjs +5 -0
- package/dist/VitestTransactionIsolator-pLwsDo_A.mjs +40 -0
- package/dist/VitestTransactionIsolator-zK5NJ7DQ.cjs +51 -0
- package/dist/VitestTransactionIsolator.cjs +5 -0
- package/dist/VitestTransactionIsolator.mjs +4 -0
- package/dist/__tests__/Factory.spec.cjs +139 -0
- package/dist/__tests__/Factory.spec.mjs +139 -0
- package/dist/__tests__/KyselyFactory.spec.cjs +221 -15008
- package/dist/__tests__/KyselyFactory.spec.mjs +220 -15034
- package/dist/__tests__/ObjectionFactory.spec.cjs +387 -0
- package/dist/__tests__/ObjectionFactory.spec.mjs +386 -0
- package/dist/__tests__/PostgresMigrator.spec.cjs +257 -0
- package/dist/__tests__/PostgresMigrator.spec.mjs +256 -0
- package/dist/__tests__/faker.spec.cjs +115 -0
- package/dist/__tests__/faker.spec.mjs +115 -0
- package/dist/__tests__/integration.spec.cjs +279 -0
- package/dist/__tests__/integration.spec.mjs +279 -0
- package/dist/chunk-DWy1uDak.cjs +39 -0
- package/dist/dist-BM2KvLG1.mjs +5618 -0
- package/dist/dist-DE3gAxQI.cjs +5736 -0
- package/dist/example.cjs +2 -1
- package/dist/example.mjs +3 -2
- package/dist/faker-cGCFcrj2.mjs +85 -0
- package/dist/faker-h6CkRloU.cjs +121 -0
- package/dist/faker.cjs +8 -0
- package/dist/faker.mjs +3 -0
- package/dist/helpers-BnARb5Ap.mjs +19 -0
- package/dist/helpers-C2NH7xcz.cjs +135 -0
- package/dist/helpers-C_RZk04R.cjs +31 -0
- package/dist/helpers-CukcFAU9.mjs +111 -0
- package/dist/helpers.cjs +7 -0
- package/dist/helpers.mjs +6 -0
- package/dist/kysely.cjs +16 -4
- package/dist/kysely.mjs +16 -5
- package/dist/objection.cjs +1 -1
- package/dist/objection.mjs +2 -2
- package/dist/vi.bdSIJ99Y-BgRxGeO2.mjs +9382 -0
- package/dist/vi.bdSIJ99Y-CFuzUeY6.cjs +9393 -0
- package/package.json +11 -6
- package/src/Factory.ts +3 -1
- package/src/KyselyFactory.ts +30 -36
- package/src/VitestKyselyTransactionIsolator.ts +23 -0
- package/src/VitestTransactionIsolator.ts +70 -0
- package/src/__tests__/Factory.spec.ts +164 -0
- package/src/__tests__/KyselyFactory.spec.ts +432 -64
- package/src/__tests__/ObjectionFactory.spec.ts +532 -0
- package/src/__tests__/PostgresMigrator.spec.ts +366 -0
- package/src/__tests__/faker.spec.ts +142 -0
- package/src/__tests__/integration.spec.ts +442 -0
- package/src/faker.ts +112 -0
- package/src/helpers.ts +28 -0
- package/src/kysely.ts +14 -0
- package/test/globalSetup.ts +41 -40
- package/test/helpers.ts +273 -0
- /package/dist/{Factory-DlzMkMzb.mjs → Factory-D52Lsc6Z.mjs} +0 -0
- /package/dist/{ObjectionFactory-DeFYWbzt.cjs → ObjectionFactory-BlkzSEqo.cjs} +0 -0
- /package/dist/{PostgresMigrator-BKaNTth5.mjs → PostgresMigrator-DqeuPy-e.mjs} +0 -0
- /package/dist/{magic-string.es-CxbtJGk_.mjs → magic-string.es-C6yzoryu.mjs} +0 -0
- /package/dist/{magic-string.es-KiPEzMtt.cjs → magic-string.es-jdtJrR0A.cjs} +0 -0
|
@@ -0,0 +1,366 @@
|
|
|
1
|
+
import { Client } from 'pg';
|
|
2
|
+
import {
|
|
3
|
+
afterAll,
|
|
4
|
+
afterEach,
|
|
5
|
+
beforeAll,
|
|
6
|
+
beforeEach,
|
|
7
|
+
describe,
|
|
8
|
+
expect,
|
|
9
|
+
it,
|
|
10
|
+
vi,
|
|
11
|
+
} from 'vitest';
|
|
12
|
+
import { createTestDatabase } from '../../test/helpers';
|
|
13
|
+
import { PostgresMigrator } from '../PostgresMigrator';
|
|
14
|
+
|
|
15
|
+
// Create a concrete implementation for testing
|
|
16
|
+
class TestPostgresMigrator extends PostgresMigrator {
|
|
17
|
+
public migrateCalled = false;
|
|
18
|
+
public migrateError?: Error;
|
|
19
|
+
public customMigrations: Array<() => Promise<void>> = [];
|
|
20
|
+
|
|
21
|
+
async migrate(): Promise<void> {
|
|
22
|
+
this.migrateCalled = true;
|
|
23
|
+
if (this.migrateError) {
|
|
24
|
+
throw this.migrateError;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
// Run any custom migrations
|
|
28
|
+
for (const migration of this.customMigrations) {
|
|
29
|
+
await migration();
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
addMigration(migration: () => Promise<void>) {
|
|
34
|
+
this.customMigrations.push(migration);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
describe('PostgresMigrator', () => {
|
|
39
|
+
let testDbName: string;
|
|
40
|
+
let cleanupDb: () => Promise<void>;
|
|
41
|
+
let consoleSpy: any;
|
|
42
|
+
|
|
43
|
+
beforeAll(async () => {
|
|
44
|
+
// Create a unique test database for each test run
|
|
45
|
+
testDbName = `test_postgres_migrator_${Date.now()}`;
|
|
46
|
+
cleanupDb = await createTestDatabase(testDbName);
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
beforeEach(() => {
|
|
50
|
+
consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {});
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
afterEach(() => {
|
|
54
|
+
consoleSpy.mockRestore();
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
afterAll(async () => {
|
|
58
|
+
await cleanupDb();
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
describe('constructor', () => {
|
|
62
|
+
it('should create a PostgresMigrator instance', () => {
|
|
63
|
+
const migrator = new TestPostgresMigrator(
|
|
64
|
+
`postgresql://geekmidas:geekmidas@localhost:5432/${testDbName}`,
|
|
65
|
+
);
|
|
66
|
+
expect(migrator).toBeInstanceOf(PostgresMigrator);
|
|
67
|
+
});
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
describe('start method', () => {
|
|
71
|
+
it('should create database, migrate, and return cleanup function', async () => {
|
|
72
|
+
const newDbName = `test_start_${Date.now()}`;
|
|
73
|
+
const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${newDbName}`;
|
|
74
|
+
const migrator = new TestPostgresMigrator(uri);
|
|
75
|
+
|
|
76
|
+
// Add a simple migration to verify it runs
|
|
77
|
+
let migrationRan = false;
|
|
78
|
+
migrator.addMigration(async () => {
|
|
79
|
+
migrationRan = true;
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
const cleanup = await migrator.start();
|
|
83
|
+
|
|
84
|
+
expect(migrator.migrateCalled).toBe(true);
|
|
85
|
+
expect(migrationRan).toBe(true);
|
|
86
|
+
expect(consoleSpy).toHaveBeenCalledWith(
|
|
87
|
+
`Migrating database: ${newDbName}`,
|
|
88
|
+
);
|
|
89
|
+
expect(typeof cleanup).toBe('function');
|
|
90
|
+
|
|
91
|
+
// Test cleanup
|
|
92
|
+
await cleanup();
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
it('should handle existing database', async () => {
|
|
96
|
+
// Use the already created test database
|
|
97
|
+
const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${testDbName}`;
|
|
98
|
+
const migrator = new TestPostgresMigrator(uri);
|
|
99
|
+
|
|
100
|
+
let migrationRan = false;
|
|
101
|
+
migrator.addMigration(async () => {
|
|
102
|
+
migrationRan = true;
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
const cleanup = await migrator.start();
|
|
106
|
+
|
|
107
|
+
expect(migrator.migrateCalled).toBe(true);
|
|
108
|
+
expect(migrationRan).toBe(true);
|
|
109
|
+
expect(typeof cleanup).toBe('function');
|
|
110
|
+
|
|
111
|
+
// Test cleanup (but don't actually run it since we need the db for other tests)
|
|
112
|
+
});
|
|
113
|
+
|
|
114
|
+
it('should handle URI with query parameters', async () => {
|
|
115
|
+
const queryDbName = `test_query_params_${Date.now()}`;
|
|
116
|
+
const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${queryDbName}?ssl=false&timeout=30`;
|
|
117
|
+
const migrator = new TestPostgresMigrator(uri);
|
|
118
|
+
|
|
119
|
+
const cleanup = await migrator.start();
|
|
120
|
+
|
|
121
|
+
expect(migrator.migrateCalled).toBe(true);
|
|
122
|
+
expect(typeof cleanup).toBe('function');
|
|
123
|
+
|
|
124
|
+
await cleanup();
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
it('should clean up connections even if migration fails', async () => {
|
|
128
|
+
const failDbName = `test_fail_${Date.now()}`;
|
|
129
|
+
const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${failDbName}`;
|
|
130
|
+
const migrator = new TestPostgresMigrator(uri);
|
|
131
|
+
migrator.migrateError = new Error('Migration failed');
|
|
132
|
+
|
|
133
|
+
await expect(migrator.start()).rejects.toThrow('Migration failed');
|
|
134
|
+
|
|
135
|
+
// Verify the database was still created but migration failed
|
|
136
|
+
expect(migrator.migrateCalled).toBe(true);
|
|
137
|
+
|
|
138
|
+
// Cleanup the failed database
|
|
139
|
+
const cleanupClient = new Client({
|
|
140
|
+
host: 'localhost',
|
|
141
|
+
port: 5432,
|
|
142
|
+
user: 'geekmidas',
|
|
143
|
+
password: 'geekmidas',
|
|
144
|
+
database: 'postgres',
|
|
145
|
+
});
|
|
146
|
+
try {
|
|
147
|
+
await cleanupClient.connect();
|
|
148
|
+
await cleanupClient.query(`DROP DATABASE IF EXISTS "${failDbName}"`);
|
|
149
|
+
} finally {
|
|
150
|
+
await cleanupClient.end();
|
|
151
|
+
}
|
|
152
|
+
});
|
|
153
|
+
|
|
154
|
+
it('should return cleanup function that drops database', async () => {
|
|
155
|
+
const cleanupDbName = `test_cleanup_${Date.now()}`;
|
|
156
|
+
const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${cleanupDbName}`;
|
|
157
|
+
const migrator = new TestPostgresMigrator(uri);
|
|
158
|
+
|
|
159
|
+
const cleanup = await migrator.start();
|
|
160
|
+
|
|
161
|
+
expect(migrator.migrateCalled).toBe(true);
|
|
162
|
+
expect(typeof cleanup).toBe('function');
|
|
163
|
+
|
|
164
|
+
// Verify database exists before cleanup
|
|
165
|
+
const checkClient = new Client({
|
|
166
|
+
host: 'localhost',
|
|
167
|
+
port: 5432,
|
|
168
|
+
user: 'geekmidas',
|
|
169
|
+
password: 'geekmidas',
|
|
170
|
+
database: 'postgres',
|
|
171
|
+
});
|
|
172
|
+
await checkClient.connect();
|
|
173
|
+
const beforeResult = await checkClient.query(
|
|
174
|
+
`SELECT * FROM pg_catalog.pg_database WHERE datname = $1`,
|
|
175
|
+
[cleanupDbName],
|
|
176
|
+
);
|
|
177
|
+
expect(beforeResult.rowCount).toBe(1);
|
|
178
|
+
await checkClient.end();
|
|
179
|
+
|
|
180
|
+
// Call cleanup
|
|
181
|
+
await cleanup();
|
|
182
|
+
|
|
183
|
+
// Verify database was dropped
|
|
184
|
+
const checkClient2 = new Client({
|
|
185
|
+
host: 'localhost',
|
|
186
|
+
port: 5432,
|
|
187
|
+
user: 'geekmidas',
|
|
188
|
+
password: 'geekmidas',
|
|
189
|
+
database: 'postgres',
|
|
190
|
+
});
|
|
191
|
+
await checkClient2.connect();
|
|
192
|
+
const afterResult = await checkClient2.query(
|
|
193
|
+
`SELECT * FROM pg_catalog.pg_database WHERE datname = $1`,
|
|
194
|
+
[cleanupDbName],
|
|
195
|
+
);
|
|
196
|
+
expect(afterResult.rowCount).toBe(0);
|
|
197
|
+
await checkClient2.end();
|
|
198
|
+
});
|
|
199
|
+
});
|
|
200
|
+
|
|
201
|
+
describe('database creation', () => {
|
|
202
|
+
it('should handle connection errors gracefully', async () => {
|
|
203
|
+
// Use invalid credentials to test connection error
|
|
204
|
+
const badDbName = `test_bad_connection_${Date.now()}`;
|
|
205
|
+
const uri = `postgresql://invalid_user:invalid_pass@localhost:5432/${badDbName}`;
|
|
206
|
+
const migrator = new TestPostgresMigrator(uri);
|
|
207
|
+
|
|
208
|
+
await expect(migrator.start()).rejects.toThrow();
|
|
209
|
+
});
|
|
210
|
+
|
|
211
|
+
it('should handle invalid database names', async () => {
|
|
212
|
+
// Use a database name with invalid characters
|
|
213
|
+
const invalidDbName = 'test-invalid-db-name!';
|
|
214
|
+
const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${invalidDbName}`;
|
|
215
|
+
const migrator = new TestPostgresMigrator(uri);
|
|
216
|
+
|
|
217
|
+
await expect(migrator.start).rejects.toThrow();
|
|
218
|
+
});
|
|
219
|
+
});
|
|
220
|
+
|
|
221
|
+
describe('URI parsing', () => {
|
|
222
|
+
it('should parse different URI formats correctly', async () => {
|
|
223
|
+
const testDbName = `test_uri_parsing_${Date.now()}`;
|
|
224
|
+
const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${testDbName}`;
|
|
225
|
+
const migrator = new TestPostgresMigrator(uri);
|
|
226
|
+
|
|
227
|
+
const cleanup = await migrator.start();
|
|
228
|
+
|
|
229
|
+
expect(migrator.migrateCalled).toBe(true);
|
|
230
|
+
expect(typeof cleanup).toBe('function');
|
|
231
|
+
|
|
232
|
+
await cleanup();
|
|
233
|
+
});
|
|
234
|
+
});
|
|
235
|
+
|
|
236
|
+
describe('error handling', () => {
|
|
237
|
+
it('should propagate migration errors', async () => {
|
|
238
|
+
const errorDbName = `test_migration_error_${Date.now()}`;
|
|
239
|
+
const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${errorDbName}`;
|
|
240
|
+
const migrator = new TestPostgresMigrator(uri);
|
|
241
|
+
const migrationError = new Error('Custom migration error');
|
|
242
|
+
migrator.migrateError = migrationError;
|
|
243
|
+
|
|
244
|
+
await expect(migrator.start()).rejects.toThrow('Custom migration error');
|
|
245
|
+
|
|
246
|
+
// Cleanup the created database
|
|
247
|
+
const cleanupClient = new Client({
|
|
248
|
+
host: 'localhost',
|
|
249
|
+
port: 5432,
|
|
250
|
+
user: 'geekmidas',
|
|
251
|
+
password: 'geekmidas',
|
|
252
|
+
database: 'postgres',
|
|
253
|
+
});
|
|
254
|
+
try {
|
|
255
|
+
await cleanupClient.connect();
|
|
256
|
+
await cleanupClient.query(`DROP DATABASE IF EXISTS "${errorDbName}"`);
|
|
257
|
+
} finally {
|
|
258
|
+
await cleanupClient.end();
|
|
259
|
+
}
|
|
260
|
+
});
|
|
261
|
+
|
|
262
|
+
it('should handle cleanup errors gracefully', async () => {
|
|
263
|
+
const cleanupErrorDbName = `test_cleanup_error_${Date.now()}`;
|
|
264
|
+
const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${cleanupErrorDbName}`;
|
|
265
|
+
const migrator = new TestPostgresMigrator(uri);
|
|
266
|
+
|
|
267
|
+
const cleanup = await migrator.start();
|
|
268
|
+
|
|
269
|
+
// Manually drop the database to cause a cleanup error
|
|
270
|
+
const adminClient = new Client({
|
|
271
|
+
host: 'localhost',
|
|
272
|
+
port: 5432,
|
|
273
|
+
user: 'geekmidas',
|
|
274
|
+
password: 'geekmidas',
|
|
275
|
+
database: 'postgres',
|
|
276
|
+
});
|
|
277
|
+
await adminClient.connect();
|
|
278
|
+
await adminClient.query(
|
|
279
|
+
`DROP DATABASE IF EXISTS "${cleanupErrorDbName}"`,
|
|
280
|
+
);
|
|
281
|
+
await adminClient.end();
|
|
282
|
+
|
|
283
|
+
// Now cleanup should fail because database doesn't exist
|
|
284
|
+
await expect(cleanup()).rejects.toThrow();
|
|
285
|
+
});
|
|
286
|
+
});
|
|
287
|
+
|
|
288
|
+
describe('abstract method', () => {
|
|
289
|
+
it('should require concrete implementation of migrate method', () => {
|
|
290
|
+
// TypeScript ensures abstract methods are implemented
|
|
291
|
+
// This test verifies the TestPostgresMigrator implements migrate
|
|
292
|
+
const migrator = new TestPostgresMigrator(
|
|
293
|
+
`postgresql://geekmidas:geekmidas@localhost:5432/${testDbName}`,
|
|
294
|
+
);
|
|
295
|
+
expect(typeof migrator.migrate).toBe('function');
|
|
296
|
+
});
|
|
297
|
+
});
|
|
298
|
+
|
|
299
|
+
describe('integration scenarios', () => {
|
|
300
|
+
it('should handle complete workflow', async () => {
|
|
301
|
+
const integrationDbName = `test_integration_${Date.now()}`;
|
|
302
|
+
const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${integrationDbName}`;
|
|
303
|
+
const migrator = new TestPostgresMigrator(uri);
|
|
304
|
+
|
|
305
|
+
// Add a migration that creates a table
|
|
306
|
+
migrator.addMigration(async () => {
|
|
307
|
+
const client = new Client({
|
|
308
|
+
host: 'localhost',
|
|
309
|
+
port: 5432,
|
|
310
|
+
user: 'geekmidas',
|
|
311
|
+
password: 'geekmidas',
|
|
312
|
+
database: integrationDbName,
|
|
313
|
+
});
|
|
314
|
+
await client.connect();
|
|
315
|
+
await client.query(`
|
|
316
|
+
CREATE TABLE IF NOT EXISTS test_table (
|
|
317
|
+
id SERIAL PRIMARY KEY,
|
|
318
|
+
name VARCHAR(255) NOT NULL
|
|
319
|
+
)
|
|
320
|
+
`);
|
|
321
|
+
await client.end();
|
|
322
|
+
});
|
|
323
|
+
|
|
324
|
+
// Start migration
|
|
325
|
+
const cleanup = await migrator.start();
|
|
326
|
+
|
|
327
|
+
expect(migrator.migrateCalled).toBe(true);
|
|
328
|
+
expect(consoleSpy).toHaveBeenCalledWith(
|
|
329
|
+
`Migrating database: ${integrationDbName}`,
|
|
330
|
+
);
|
|
331
|
+
|
|
332
|
+
// Verify the table was created
|
|
333
|
+
const verifyClient = new Client({
|
|
334
|
+
host: 'localhost',
|
|
335
|
+
port: 5432,
|
|
336
|
+
user: 'geekmidas',
|
|
337
|
+
password: 'geekmidas',
|
|
338
|
+
database: integrationDbName,
|
|
339
|
+
});
|
|
340
|
+
await verifyClient.connect();
|
|
341
|
+
const tableResult = await verifyClient.query(`
|
|
342
|
+
SELECT table_name FROM information_schema.tables
|
|
343
|
+
WHERE table_schema = 'public' AND table_name = 'test_table'
|
|
344
|
+
`);
|
|
345
|
+
expect(tableResult.rowCount).toBe(1);
|
|
346
|
+
await verifyClient.end();
|
|
347
|
+
|
|
348
|
+
// Cleanup
|
|
349
|
+
await cleanup();
|
|
350
|
+
});
|
|
351
|
+
|
|
352
|
+
it('should handle database that already exists and cleanup', async () => {
|
|
353
|
+
// Use the existing test database
|
|
354
|
+
const uri = `postgresql://geekmidas:geekmidas@localhost:5432/${testDbName}`;
|
|
355
|
+
const migrator = new TestPostgresMigrator(uri);
|
|
356
|
+
|
|
357
|
+
// Start migration (database already exists)
|
|
358
|
+
const cleanup = await migrator.start();
|
|
359
|
+
|
|
360
|
+
expect(migrator.migrateCalled).toBe(true);
|
|
361
|
+
|
|
362
|
+
// Don't call cleanup as we need the database for other tests
|
|
363
|
+
expect(typeof cleanup).toBe('function');
|
|
364
|
+
});
|
|
365
|
+
});
|
|
366
|
+
});
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
import { beforeEach, describe, expect, it } from 'vitest';
|
|
2
|
+
import { faker } from '../faker';
|
|
3
|
+
|
|
4
|
+
describe('faker', () => {
|
|
5
|
+
describe('sequence', () => {
|
|
6
|
+
beforeEach(() => {
|
|
7
|
+
faker.resetAllSequences();
|
|
8
|
+
});
|
|
9
|
+
|
|
10
|
+
it('should start from 1 for a new sequence', () => {
|
|
11
|
+
expect(faker.sequence()).toBe(1);
|
|
12
|
+
expect(faker.sequence('custom')).toBe(1);
|
|
13
|
+
});
|
|
14
|
+
|
|
15
|
+
it('should increment on each call', () => {
|
|
16
|
+
expect(faker.sequence()).toBe(1);
|
|
17
|
+
expect(faker.sequence()).toBe(2);
|
|
18
|
+
expect(faker.sequence()).toBe(3);
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
it('should maintain separate counters for different names', () => {
|
|
22
|
+
expect(faker.sequence('users')).toBe(1);
|
|
23
|
+
expect(faker.sequence('posts')).toBe(1);
|
|
24
|
+
expect(faker.sequence('users')).toBe(2);
|
|
25
|
+
expect(faker.sequence('posts')).toBe(2);
|
|
26
|
+
expect(faker.sequence('users')).toBe(3);
|
|
27
|
+
expect(faker.sequence('posts')).toBe(3);
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
it('should handle concurrent-like sequential calls', () => {
|
|
31
|
+
const results: number[] = [];
|
|
32
|
+
for (let i = 0; i < 100; i++) {
|
|
33
|
+
results.push(faker.sequence('concurrent'));
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// Check that all values are unique and sequential
|
|
37
|
+
expect(results).toEqual(Array.from({ length: 100 }, (_, i) => i + 1));
|
|
38
|
+
});
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
describe('resetSequence', () => {
|
|
42
|
+
beforeEach(() => {
|
|
43
|
+
faker.resetAllSequences();
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
it('should reset a specific sequence to 0', () => {
|
|
47
|
+
faker.sequence('test');
|
|
48
|
+
faker.sequence('test');
|
|
49
|
+
expect(faker.sequence('test')).toBe(3);
|
|
50
|
+
|
|
51
|
+
faker.resetSequence('test');
|
|
52
|
+
expect(faker.sequence('test')).toBe(1);
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
it('should reset a specific sequence to a custom value', () => {
|
|
56
|
+
faker.sequence('test');
|
|
57
|
+
faker.resetSequence('test', 10);
|
|
58
|
+
expect(faker.sequence('test')).toBe(11);
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
it('should create a new sequence if it does not exist', () => {
|
|
62
|
+
faker.resetSequence('new', 5);
|
|
63
|
+
expect(faker.sequence('new')).toBe(6);
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
it('should not affect other sequences', () => {
|
|
67
|
+
faker.sequence('test1');
|
|
68
|
+
faker.sequence('test1');
|
|
69
|
+
faker.sequence('test2');
|
|
70
|
+
|
|
71
|
+
faker.resetSequence('test1');
|
|
72
|
+
|
|
73
|
+
expect(faker.sequence('test1')).toBe(1);
|
|
74
|
+
expect(faker.sequence('test2')).toBe(2);
|
|
75
|
+
});
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
describe('resetAllSequences', () => {
|
|
79
|
+
it('should reset all sequences', () => {
|
|
80
|
+
faker.sequence('test1');
|
|
81
|
+
faker.sequence('test1');
|
|
82
|
+
faker.sequence('test2');
|
|
83
|
+
faker.sequence('test2');
|
|
84
|
+
faker.sequence('test2');
|
|
85
|
+
|
|
86
|
+
faker.resetAllSequences();
|
|
87
|
+
|
|
88
|
+
expect(faker.sequence('test1')).toBe(1);
|
|
89
|
+
expect(faker.sequence('test2')).toBe(1);
|
|
90
|
+
expect(faker.sequence()).toBe(1);
|
|
91
|
+
});
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
describe('identifier', () => {
|
|
95
|
+
beforeEach(() => {
|
|
96
|
+
faker.resetAllSequences();
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
it('should include sequence number in identifier', () => {
|
|
100
|
+
const id1 = faker.identifier();
|
|
101
|
+
const id2 = faker.identifier();
|
|
102
|
+
|
|
103
|
+
// Both should be different because of the sequence
|
|
104
|
+
expect(id1).not.toBe(id2);
|
|
105
|
+
|
|
106
|
+
// Should end with sequence numbers
|
|
107
|
+
expect(id1).toMatch(/1$/);
|
|
108
|
+
expect(id2).toMatch(/2$/);
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
it('should use custom suffix when provided', () => {
|
|
112
|
+
const id = faker.identifier('customSuffix');
|
|
113
|
+
expect(id).toMatch(/\.customSuffix$/);
|
|
114
|
+
});
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
describe('timestamps', () => {
|
|
118
|
+
it('should return createdAt and updatedAt dates', () => {
|
|
119
|
+
const { createdAt, updatedAt } = faker.timestamps();
|
|
120
|
+
|
|
121
|
+
expect(createdAt).toBeInstanceOf(Date);
|
|
122
|
+
expect(updatedAt).toBeInstanceOf(Date);
|
|
123
|
+
expect(createdAt.getTime()).toBeLessThanOrEqual(updatedAt.getTime());
|
|
124
|
+
expect(updatedAt.getTime()).toBeLessThanOrEqual(new Date().getTime());
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
it('should have milliseconds set to 0', () => {
|
|
128
|
+
const { createdAt, updatedAt } = faker.timestamps();
|
|
129
|
+
|
|
130
|
+
expect(createdAt.getMilliseconds()).toBe(0);
|
|
131
|
+
expect(updatedAt.getMilliseconds()).toBe(0);
|
|
132
|
+
});
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
describe('price', () => {
|
|
136
|
+
it('should return a number', () => {
|
|
137
|
+
const result = faker.price();
|
|
138
|
+
expect(typeof result).toBe('number');
|
|
139
|
+
expect(result).toBeGreaterThan(0);
|
|
140
|
+
});
|
|
141
|
+
});
|
|
142
|
+
});
|