@donkeylabs/server 2.0.18 → 2.0.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/docs/caching-strategies.md +677 -0
- package/docs/dev-experience.md +656 -0
- package/docs/hot-reload-limitations.md +166 -0
- package/docs/load-testing.md +974 -0
- package/docs/plugin-registry-design.md +1064 -0
- package/docs/production.md +1229 -0
- package/docs/workflows.md +90 -3
- package/package.json +18 -2
- package/src/admin/routes.ts +153 -0
- package/src/core/cron.ts +184 -15
- package/src/core/index.ts +25 -0
- package/src/core/job-adapter-kysely.ts +176 -73
- package/src/core/job-adapter-sqlite.ts +10 -0
- package/src/core/jobs.ts +112 -17
- package/src/core/migrations/workflows/002_add_metadata_column.ts +28 -0
- package/src/core/process-adapter-kysely.ts +62 -21
- package/src/core/storage-adapter-local.test.ts +199 -0
- package/src/core/storage.test.ts +197 -0
- package/src/core/workflow-adapter-kysely.ts +66 -19
- package/src/core/workflow-executor.ts +469 -0
- package/src/core/workflow-proxy.ts +238 -0
- package/src/core/workflow-socket.ts +447 -0
- package/src/core/workflows.test.ts +415 -0
- package/src/core/workflows.ts +782 -9
- package/src/core.ts +17 -6
- package/src/index.ts +14 -0
- package/src/server.ts +40 -26
- package/src/testing/database.test.ts +263 -0
- package/src/testing/database.ts +173 -0
- package/src/testing/e2e.test.ts +189 -0
- package/src/testing/e2e.ts +272 -0
- package/src/testing/index.ts +18 -0
package/src/core.ts
CHANGED
|
@@ -10,7 +10,7 @@ import type { Cron } from "./core/cron";
|
|
|
10
10
|
import type { Jobs } from "./core/jobs";
|
|
11
11
|
import type { SSE } from "./core/sse";
|
|
12
12
|
import type { RateLimiter } from "./core/rate-limiter";
|
|
13
|
-
import type { Errors, CustomErrorRegistry } from "./core/errors";
|
|
13
|
+
import type { Errors, CustomErrorRegistry, ErrorFactory } from "./core/errors";
|
|
14
14
|
import type { Workflows } from "./core/workflows";
|
|
15
15
|
import type { Processes } from "./core/processes";
|
|
16
16
|
import type { Audit } from "./core/audit";
|
|
@@ -408,7 +408,7 @@ export type InferEvents<T> = UnwrapPluginFactory<T> extends { events?: infer E }
|
|
|
408
408
|
export type InferClientConfig<T> = UnwrapPluginFactory<T> extends { client?: infer C } ? C : undefined;
|
|
409
409
|
export type InferCustomErrors<T> = UnwrapPluginFactory<T> extends { customErrors?: infer E } ? E : {};
|
|
410
410
|
|
|
411
|
-
export type { ExtractServices, ExtractSchemas };
|
|
411
|
+
export type { ExtractServices, ExtractSchemas, ErrorFactory };
|
|
412
412
|
|
|
413
413
|
export type Plugin = {
|
|
414
414
|
name: string;
|
|
@@ -494,10 +494,21 @@ export class PluginManager {
|
|
|
494
494
|
* Records that a migration has been applied for a specific plugin.
|
|
495
495
|
*/
|
|
496
496
|
private async recordMigration(pluginName: string, migrationName: string): Promise<void> {
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
497
|
+
try {
|
|
498
|
+
// Use ON CONFLICT DO NOTHING to handle race conditions on hot reload
|
|
499
|
+
await sql`
|
|
500
|
+
INSERT INTO __donkeylabs_migrations__ (plugin_name, migration_name)
|
|
501
|
+
VALUES (${pluginName}, ${migrationName})
|
|
502
|
+
ON CONFLICT (plugin_name, migration_name) DO NOTHING
|
|
503
|
+
`.execute(this.core.db);
|
|
504
|
+
} catch (e: any) {
|
|
505
|
+
// Fallback: ignore UNIQUE constraint errors (migration already recorded)
|
|
506
|
+
if (e?.code === "SQLITE_CONSTRAINT_UNIQUE" || e?.message?.includes("UNIQUE constraint failed")) {
|
|
507
|
+
// Already recorded, ignore
|
|
508
|
+
return;
|
|
509
|
+
}
|
|
510
|
+
throw e;
|
|
511
|
+
}
|
|
501
512
|
}
|
|
502
513
|
|
|
503
514
|
/**
|
package/src/index.ts
CHANGED
|
@@ -91,6 +91,14 @@ export function defineConfig(config: DonkeylabsConfig): DonkeylabsConfig {
|
|
|
91
91
|
// Re-export HttpError for custom error creation
|
|
92
92
|
export { HttpError } from "./core/errors";
|
|
93
93
|
|
|
94
|
+
// Core services types
|
|
95
|
+
export {
|
|
96
|
+
type Logger,
|
|
97
|
+
type LogLevel,
|
|
98
|
+
type ErrorFactory,
|
|
99
|
+
type ErrorFactories,
|
|
100
|
+
} from "./core/index";
|
|
101
|
+
|
|
94
102
|
// Workflows (step functions)
|
|
95
103
|
export {
|
|
96
104
|
workflow,
|
|
@@ -129,3 +137,9 @@ export {
|
|
|
129
137
|
type IntegrationHarnessOptions,
|
|
130
138
|
type IntegrationHarnessResult,
|
|
131
139
|
} from "./harness";
|
|
140
|
+
|
|
141
|
+
// E2E Testing - moved to separate subpath export: "@donkeylabs/server/testing"
|
|
142
|
+
// import { createE2EFixtures, defineE2EConfig } from "@donkeylabs/server/testing";
|
|
143
|
+
|
|
144
|
+
// Database Testing Utilities - moved to separate subpath export: "@donkeylabs/server/testing"
|
|
145
|
+
// import { createTestDatabase, resetTestDatabase } from "@donkeylabs/server/testing";
|
package/src/server.ts
CHANGED
|
@@ -205,6 +205,8 @@ export class AppServer {
|
|
|
205
205
|
private shutdownHandlers: OnShutdownHandler[] = [];
|
|
206
206
|
private errorHandlers: OnErrorHandler[] = [];
|
|
207
207
|
private isShuttingDown = false;
|
|
208
|
+
private isInitialized = false;
|
|
209
|
+
private initializationPromise: Promise<void> | null = null;
|
|
208
210
|
private generateModeSetup = false;
|
|
209
211
|
|
|
210
212
|
// Custom services registry
|
|
@@ -955,6 +957,27 @@ ${factoryFunction}
|
|
|
955
957
|
process.exit(0);
|
|
956
958
|
}
|
|
957
959
|
|
|
960
|
+
// Guard against multiple initializations using promise-based mutex
|
|
961
|
+
// This prevents race conditions when multiple requests arrive concurrently
|
|
962
|
+
if (this.isInitialized) {
|
|
963
|
+
this.coreServices.logger.debug("Server already initialized, skipping");
|
|
964
|
+
return;
|
|
965
|
+
}
|
|
966
|
+
if (this.initializationPromise) {
|
|
967
|
+
this.coreServices.logger.debug("Server initialization in progress, waiting...");
|
|
968
|
+
await this.initializationPromise;
|
|
969
|
+
return;
|
|
970
|
+
}
|
|
971
|
+
|
|
972
|
+
// Create the initialization promise - all concurrent callers will await this same promise
|
|
973
|
+
this.initializationPromise = this.doInitialize();
|
|
974
|
+
await this.initializationPromise;
|
|
975
|
+
}
|
|
976
|
+
|
|
977
|
+
/**
|
|
978
|
+
* Internal initialization logic - only called once via the promise mutex
|
|
979
|
+
*/
|
|
980
|
+
private async doInitialize(): Promise<void> {
|
|
958
981
|
const { logger } = this.coreServices;
|
|
959
982
|
|
|
960
983
|
// Auto-generate types in dev mode if configured
|
|
@@ -963,6 +986,11 @@ ${factoryFunction}
|
|
|
963
986
|
await this.manager.migrate();
|
|
964
987
|
await this.manager.init();
|
|
965
988
|
|
|
989
|
+
// Pass plugins to workflows so handlers can access ctx.plugins
|
|
990
|
+
this.coreServices.workflows.setPlugins(this.manager.getServices());
|
|
991
|
+
|
|
992
|
+
this.isInitialized = true;
|
|
993
|
+
|
|
966
994
|
this.coreServices.cron.start();
|
|
967
995
|
this.coreServices.jobs.start();
|
|
968
996
|
await this.coreServices.workflows.resume();
|
|
@@ -1185,34 +1213,20 @@ ${factoryFunction}
|
|
|
1185
1213
|
process.exit(0);
|
|
1186
1214
|
}
|
|
1187
1215
|
|
|
1188
|
-
|
|
1189
|
-
|
|
1190
|
-
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
1197
|
-
|
|
1198
|
-
|
|
1199
|
-
// 3. Start background services
|
|
1200
|
-
this.coreServices.cron.start();
|
|
1201
|
-
this.coreServices.jobs.start();
|
|
1202
|
-
await this.coreServices.workflows.resume();
|
|
1203
|
-
this.coreServices.processes.start();
|
|
1204
|
-
logger.info("Background services started (cron, jobs, workflows, processes)");
|
|
1205
|
-
|
|
1206
|
-
// 4. Build route map
|
|
1207
|
-
for (const router of this.routers) {
|
|
1208
|
-
for (const route of router.getRoutes()) {
|
|
1209
|
-
if (this.routeMap.has(route.name)) {
|
|
1210
|
-
logger.warn(`Duplicate route detected`, { route: route.name });
|
|
1211
|
-
}
|
|
1212
|
-
this.routeMap.set(route.name, route);
|
|
1216
|
+
// Guard against multiple initializations using promise-based mutex
|
|
1217
|
+
// This prevents race conditions when multiple requests arrive concurrently
|
|
1218
|
+
if (!this.isInitialized) {
|
|
1219
|
+
if (this.initializationPromise) {
|
|
1220
|
+
this.coreServices.logger.debug("Server initialization in progress, waiting...");
|
|
1221
|
+
await this.initializationPromise;
|
|
1222
|
+
} else {
|
|
1223
|
+
// Create the initialization promise - all concurrent callers will await this same promise
|
|
1224
|
+
this.initializationPromise = this.doInitialize();
|
|
1225
|
+
await this.initializationPromise;
|
|
1213
1226
|
}
|
|
1214
1227
|
}
|
|
1215
|
-
|
|
1228
|
+
|
|
1229
|
+
const { logger } = this.coreServices;
|
|
1216
1230
|
|
|
1217
1231
|
// 5. Start HTTP server with port retry logic
|
|
1218
1232
|
const fetchHandler = async (req: Request, server: ReturnType<typeof Bun.serve>) => {
|
|
@@ -0,0 +1,263 @@
|
|
|
1
|
+
// packages/server/src/testing/database.test.ts
|
|
2
|
+
import { describe, it, expect, beforeEach, afterEach } from "bun:test";
|
|
3
|
+
import {
|
|
4
|
+
createTestDatabase,
|
|
5
|
+
resetTestDatabase,
|
|
6
|
+
seedTestData,
|
|
7
|
+
} from "./database";
|
|
8
|
+
import type { Kysely } from "kysely";
|
|
9
|
+
|
|
10
|
+
describe("Database Testing Utilities", () => {
|
|
11
|
+
describe("createTestDatabase", () => {
|
|
12
|
+
let db: Kysely<any>;
|
|
13
|
+
|
|
14
|
+
afterEach(async () => {
|
|
15
|
+
if (db) {
|
|
16
|
+
await db.destroy();
|
|
17
|
+
}
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
it("should create an isolated SQLite database", async () => {
|
|
21
|
+
db = await createTestDatabase({
|
|
22
|
+
type: "sqlite",
|
|
23
|
+
isolated: true,
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
expect(db).toBeDefined();
|
|
27
|
+
// Verify it's functional by creating a table
|
|
28
|
+
await db.schema
|
|
29
|
+
.createTable("test_table")
|
|
30
|
+
.addColumn("id", "text", (col) => col.primaryKey())
|
|
31
|
+
.addColumn("name", "text")
|
|
32
|
+
.execute();
|
|
33
|
+
|
|
34
|
+
// Insert and query
|
|
35
|
+
await db
|
|
36
|
+
.insertInto("test_table" as any)
|
|
37
|
+
.values({ id: "1", name: "Test" })
|
|
38
|
+
.execute();
|
|
39
|
+
|
|
40
|
+
const result = await db
|
|
41
|
+
.selectFrom("test_table" as any)
|
|
42
|
+
.selectAll()
|
|
43
|
+
.executeTakeFirst();
|
|
44
|
+
|
|
45
|
+
expect(result).toEqual({ id: "1", name: "Test" });
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
it("should create an in-memory SQLite database", async () => {
|
|
49
|
+
db = await createTestDatabase({
|
|
50
|
+
type: "sqlite",
|
|
51
|
+
isolated: false,
|
|
52
|
+
connectionString: ":memory:",
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
expect(db).toBeDefined();
|
|
56
|
+
|
|
57
|
+
// Create and use table
|
|
58
|
+
await db.schema
|
|
59
|
+
.createTable("memory_test")
|
|
60
|
+
.addColumn("id", "integer", (col) => col.primaryKey())
|
|
61
|
+
.execute();
|
|
62
|
+
|
|
63
|
+
await db
|
|
64
|
+
.insertInto("memory_test" as any)
|
|
65
|
+
.values({ id: 1 })
|
|
66
|
+
.execute();
|
|
67
|
+
|
|
68
|
+
const count = await db
|
|
69
|
+
.selectFrom("memory_test" as any)
|
|
70
|
+
.select(db.fn.count("id").as("count"))
|
|
71
|
+
.executeTakeFirst();
|
|
72
|
+
|
|
73
|
+
expect(Number(count?.count)).toBe(1);
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
it("should cleanup isolated database on destroy", async () => {
|
|
77
|
+
db = await createTestDatabase({
|
|
78
|
+
type: "sqlite",
|
|
79
|
+
isolated: true,
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
await db.schema
|
|
83
|
+
.createTable("cleanup_test")
|
|
84
|
+
.addColumn("id", "text", (col) => col.primaryKey())
|
|
85
|
+
.execute();
|
|
86
|
+
|
|
87
|
+
// Destroy should not throw and should cleanup
|
|
88
|
+
await db.destroy();
|
|
89
|
+
|
|
90
|
+
// Clear reference so afterEach doesn't try to destroy again
|
|
91
|
+
db = null as any;
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
it("should throw for postgres without connection string", async () => {
|
|
95
|
+
const originalEnv = process.env.TEST_DATABASE_URL;
|
|
96
|
+
delete process.env.TEST_DATABASE_URL;
|
|
97
|
+
|
|
98
|
+
try {
|
|
99
|
+
await expect(
|
|
100
|
+
createTestDatabase({ type: "postgres" })
|
|
101
|
+
).rejects.toThrow("TEST_DATABASE_URL not set for postgres");
|
|
102
|
+
} finally {
|
|
103
|
+
if (originalEnv) {
|
|
104
|
+
process.env.TEST_DATABASE_URL = originalEnv;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
it("should throw for mysql without connection string", async () => {
|
|
110
|
+
const originalEnv = process.env.TEST_DATABASE_URL;
|
|
111
|
+
delete process.env.TEST_DATABASE_URL;
|
|
112
|
+
|
|
113
|
+
try {
|
|
114
|
+
await expect(
|
|
115
|
+
createTestDatabase({ type: "mysql" })
|
|
116
|
+
).rejects.toThrow("TEST_DATABASE_URL not set for mysql");
|
|
117
|
+
} finally {
|
|
118
|
+
if (originalEnv) {
|
|
119
|
+
process.env.TEST_DATABASE_URL = originalEnv;
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
it("should throw for unsupported database type", async () => {
|
|
125
|
+
await expect(
|
|
126
|
+
createTestDatabase({ type: "mongo" as any })
|
|
127
|
+
).rejects.toThrow("Unsupported database type: mongo");
|
|
128
|
+
});
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
describe("resetTestDatabase", () => {
|
|
132
|
+
let db: Kysely<any>;
|
|
133
|
+
|
|
134
|
+
beforeEach(async () => {
|
|
135
|
+
db = await createTestDatabase({
|
|
136
|
+
type: "sqlite",
|
|
137
|
+
isolated: true,
|
|
138
|
+
});
|
|
139
|
+
|
|
140
|
+
// Create test tables
|
|
141
|
+
await db.schema
|
|
142
|
+
.createTable("users")
|
|
143
|
+
.addColumn("id", "text", (col) => col.primaryKey())
|
|
144
|
+
.addColumn("email", "text")
|
|
145
|
+
.execute();
|
|
146
|
+
|
|
147
|
+
await db.schema
|
|
148
|
+
.createTable("posts")
|
|
149
|
+
.addColumn("id", "text", (col) => col.primaryKey())
|
|
150
|
+
.addColumn("title", "text")
|
|
151
|
+
.execute();
|
|
152
|
+
});
|
|
153
|
+
|
|
154
|
+
afterEach(async () => {
|
|
155
|
+
if (db) {
|
|
156
|
+
await db.destroy();
|
|
157
|
+
}
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
it("should truncate all tables", async () => {
|
|
161
|
+
// Insert data
|
|
162
|
+
await db.insertInto("users" as any).values({ id: "1", email: "a@b.com" }).execute();
|
|
163
|
+
await db.insertInto("users" as any).values({ id: "2", email: "c@d.com" }).execute();
|
|
164
|
+
await db.insertInto("posts" as any).values({ id: "1", title: "Post 1" }).execute();
|
|
165
|
+
|
|
166
|
+
// Verify data exists
|
|
167
|
+
const usersBefore = await db.selectFrom("users" as any).selectAll().execute();
|
|
168
|
+
const postsBefore = await db.selectFrom("posts" as any).selectAll().execute();
|
|
169
|
+
expect(usersBefore.length).toBe(2);
|
|
170
|
+
expect(postsBefore.length).toBe(1);
|
|
171
|
+
|
|
172
|
+
// Reset
|
|
173
|
+
await resetTestDatabase(db);
|
|
174
|
+
|
|
175
|
+
// Verify all tables are empty
|
|
176
|
+
const usersAfter = await db.selectFrom("users" as any).selectAll().execute();
|
|
177
|
+
const postsAfter = await db.selectFrom("posts" as any).selectAll().execute();
|
|
178
|
+
expect(usersAfter.length).toBe(0);
|
|
179
|
+
expect(postsAfter.length).toBe(0);
|
|
180
|
+
});
|
|
181
|
+
|
|
182
|
+
it("should handle empty tables", async () => {
|
|
183
|
+
// No data inserted, should not throw
|
|
184
|
+
await resetTestDatabase(db);
|
|
185
|
+
|
|
186
|
+
const users = await db.selectFrom("users" as any).selectAll().execute();
|
|
187
|
+
expect(users.length).toBe(0);
|
|
188
|
+
});
|
|
189
|
+
});
|
|
190
|
+
|
|
191
|
+
describe("seedTestData", () => {
|
|
192
|
+
let db: Kysely<any>;
|
|
193
|
+
|
|
194
|
+
beforeEach(async () => {
|
|
195
|
+
db = await createTestDatabase({
|
|
196
|
+
type: "sqlite",
|
|
197
|
+
isolated: true,
|
|
198
|
+
});
|
|
199
|
+
|
|
200
|
+
await db.schema
|
|
201
|
+
.createTable("users")
|
|
202
|
+
.addColumn("id", "text", (col) => col.primaryKey())
|
|
203
|
+
.addColumn("email", "text")
|
|
204
|
+
.addColumn("name", "text")
|
|
205
|
+
.execute();
|
|
206
|
+
|
|
207
|
+
await db.schema
|
|
208
|
+
.createTable("orders")
|
|
209
|
+
.addColumn("id", "text", (col) => col.primaryKey())
|
|
210
|
+
.addColumn("user_id", "text")
|
|
211
|
+
.addColumn("total", "integer")
|
|
212
|
+
.execute();
|
|
213
|
+
});
|
|
214
|
+
|
|
215
|
+
afterEach(async () => {
|
|
216
|
+
if (db) {
|
|
217
|
+
await db.destroy();
|
|
218
|
+
}
|
|
219
|
+
});
|
|
220
|
+
|
|
221
|
+
it("should seed data into multiple tables", async () => {
|
|
222
|
+
await seedTestData(db, {
|
|
223
|
+
users: [
|
|
224
|
+
{ id: "u1", email: "user1@test.com", name: "User 1" },
|
|
225
|
+
{ id: "u2", email: "user2@test.com", name: "User 2" },
|
|
226
|
+
],
|
|
227
|
+
orders: [
|
|
228
|
+
{ id: "o1", user_id: "u1", total: 100 },
|
|
229
|
+
{ id: "o2", user_id: "u1", total: 200 },
|
|
230
|
+
{ id: "o3", user_id: "u2", total: 50 },
|
|
231
|
+
],
|
|
232
|
+
});
|
|
233
|
+
|
|
234
|
+
const users = await db.selectFrom("users" as any).selectAll().execute();
|
|
235
|
+
const orders = await db.selectFrom("orders" as any).selectAll().execute();
|
|
236
|
+
|
|
237
|
+
expect(users.length).toBe(2);
|
|
238
|
+
expect(orders.length).toBe(3);
|
|
239
|
+
|
|
240
|
+
expect(users[0].email).toBe("user1@test.com");
|
|
241
|
+
expect(orders[0].total).toBe(100);
|
|
242
|
+
});
|
|
243
|
+
|
|
244
|
+
it("should handle empty arrays", async () => {
|
|
245
|
+
await seedTestData(db, {
|
|
246
|
+
users: [],
|
|
247
|
+
});
|
|
248
|
+
|
|
249
|
+
const users = await db.selectFrom("users" as any).selectAll().execute();
|
|
250
|
+
expect(users.length).toBe(0);
|
|
251
|
+
});
|
|
252
|
+
|
|
253
|
+
it("should handle single table", async () => {
|
|
254
|
+
await seedTestData(db, {
|
|
255
|
+
users: [{ id: "1", email: "solo@test.com", name: "Solo" }],
|
|
256
|
+
});
|
|
257
|
+
|
|
258
|
+
const users = await db.selectFrom("users" as any).selectAll().execute();
|
|
259
|
+
expect(users.length).toBe(1);
|
|
260
|
+
expect(users[0].name).toBe("Solo");
|
|
261
|
+
});
|
|
262
|
+
});
|
|
263
|
+
});
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
// packages/server/src/testing/database.ts
|
|
2
|
+
/**
|
|
3
|
+
* Database Testing Utilities
|
|
4
|
+
*
|
|
5
|
+
* Helper functions for setting up and managing test databases.
|
|
6
|
+
*
|
|
7
|
+
* Usage:
|
|
8
|
+
* const db = await createTestDatabase({ type: "sqlite" });
|
|
9
|
+
* await seedTestData(db, { users: [{ email: "test@example.com" }] });
|
|
10
|
+
* // Run tests...
|
|
11
|
+
* await db.destroy();
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
import { Kysely, PostgresDialect, MysqlDialect } from "kysely";
|
|
15
|
+
import { BunSqliteDialect } from "kysely-bun-sqlite";
|
|
16
|
+
import Database from "bun:sqlite";
|
|
17
|
+
import { mkdtempSync, rmSync } from "fs";
|
|
18
|
+
import { tmpdir } from "os";
|
|
19
|
+
import { join } from "path";
|
|
20
|
+
|
|
21
|
+
// pg and mysql2 are optional - dynamically imported when needed
|
|
22
|
+
|
|
23
|
+
export interface TestDatabaseOptions {
|
|
24
|
+
/** Database type */
|
|
25
|
+
type: "sqlite" | "postgres" | "mysql";
|
|
26
|
+
|
|
27
|
+
/** Path to migrations (glob pattern) */
|
|
28
|
+
migrations?: string;
|
|
29
|
+
|
|
30
|
+
/** Run migrations automatically */
|
|
31
|
+
runMigrations?: boolean;
|
|
32
|
+
|
|
33
|
+
/** Isolated database (deleted after tests) */
|
|
34
|
+
isolated?: boolean;
|
|
35
|
+
|
|
36
|
+
/** Connection string (for postgres/mysql) */
|
|
37
|
+
connectionString?: string;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Create a test database instance
|
|
42
|
+
*/
|
|
43
|
+
export async function createTestDatabase(options: TestDatabaseOptions): Promise<Kysely<any>> {
|
|
44
|
+
let db: Kysely<any>;
|
|
45
|
+
let cleanupPath: string | null = null;
|
|
46
|
+
|
|
47
|
+
switch (options.type) {
|
|
48
|
+
case "sqlite": {
|
|
49
|
+
let dbPath: string;
|
|
50
|
+
|
|
51
|
+
if (options.isolated !== false) {
|
|
52
|
+
// Create isolated temp database
|
|
53
|
+
const tmpDir = mkdtempSync(join(tmpdir(), "donkeylabs-test-"));
|
|
54
|
+
dbPath = join(tmpDir, "test.db");
|
|
55
|
+
cleanupPath = tmpDir;
|
|
56
|
+
} else {
|
|
57
|
+
dbPath = options.connectionString || ":memory:";
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
db = new Kysely({
|
|
61
|
+
dialect: new BunSqliteDialect({
|
|
62
|
+
database: new Database(dbPath),
|
|
63
|
+
}),
|
|
64
|
+
});
|
|
65
|
+
break;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
case "postgres": {
|
|
69
|
+
const connectionString = options.connectionString || process.env.TEST_DATABASE_URL;
|
|
70
|
+
if (!connectionString) {
|
|
71
|
+
throw new Error("TEST_DATABASE_URL not set for postgres");
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// Dynamic import - pg is optional peer dependency
|
|
75
|
+
// @ts-ignore - pg may not be installed
|
|
76
|
+
const { Pool: PGPool } = await import("pg");
|
|
77
|
+
|
|
78
|
+
db = new Kysely({
|
|
79
|
+
dialect: new PostgresDialect({
|
|
80
|
+
pool: new PGPool({
|
|
81
|
+
connectionString,
|
|
82
|
+
max: 5,
|
|
83
|
+
}),
|
|
84
|
+
}),
|
|
85
|
+
});
|
|
86
|
+
break;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
case "mysql": {
|
|
90
|
+
const connectionString = options.connectionString || process.env.TEST_DATABASE_URL;
|
|
91
|
+
if (!connectionString) {
|
|
92
|
+
throw new Error("TEST_DATABASE_URL not set for mysql");
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// Dynamic import - mysql2 is optional peer dependency
|
|
96
|
+
// @ts-ignore - mysql2 may not be installed
|
|
97
|
+
const { createPool: createMySQLPool } = await import("mysql2");
|
|
98
|
+
|
|
99
|
+
db = new Kysely({
|
|
100
|
+
dialect: new MysqlDialect({
|
|
101
|
+
pool: createMySQLPool({
|
|
102
|
+
uri: connectionString,
|
|
103
|
+
connectionLimit: 5,
|
|
104
|
+
}),
|
|
105
|
+
}),
|
|
106
|
+
});
|
|
107
|
+
break;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
default:
|
|
111
|
+
throw new Error(`Unsupported database type: ${options.type}`);
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// Run migrations if requested
|
|
115
|
+
if (options.runMigrations && options.migrations) {
|
|
116
|
+
await runMigrations(db, options.migrations);
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// Attach cleanup handler
|
|
120
|
+
if (cleanupPath) {
|
|
121
|
+
const originalDestroy = db.destroy.bind(db);
|
|
122
|
+
db.destroy = async () => {
|
|
123
|
+
await originalDestroy();
|
|
124
|
+
if (cleanupPath) {
|
|
125
|
+
rmSync(cleanupPath, { recursive: true, force: true });
|
|
126
|
+
}
|
|
127
|
+
};
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
return db;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
/**
|
|
134
|
+
* Reset test database (truncate all tables)
|
|
135
|
+
*/
|
|
136
|
+
export async function resetTestDatabase(db: Kysely<any>): Promise<void> {
|
|
137
|
+
// Get all tables
|
|
138
|
+
const tables = await db.introspection.getTables();
|
|
139
|
+
|
|
140
|
+
// Truncate each table
|
|
141
|
+
for (const table of tables) {
|
|
142
|
+
await db.deleteFrom(table.name as any).execute();
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
/**
|
|
147
|
+
* Seed test data into database
|
|
148
|
+
*/
|
|
149
|
+
export async function seedTestData(
|
|
150
|
+
db: Kysely<any>,
|
|
151
|
+
data: Record<string, any[]>
|
|
152
|
+
): Promise<void> {
|
|
153
|
+
for (const [table, rows] of Object.entries(data)) {
|
|
154
|
+
if (rows.length === 0) continue;
|
|
155
|
+
|
|
156
|
+
for (const row of rows) {
|
|
157
|
+
await db.insertInto(table as any).values(row).execute();
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
/**
|
|
163
|
+
* Run migrations on test database (not yet implemented)
|
|
164
|
+
*/
|
|
165
|
+
async function runMigrations(_db: Kysely<any>, migrationsPath: string): Promise<void> {
|
|
166
|
+
// TODO: Implement migration runner integration
|
|
167
|
+
// This would scan the migrations path and run pending migrations
|
|
168
|
+
throw new Error(
|
|
169
|
+
`Migration runner not yet implemented. ` +
|
|
170
|
+
`Please run migrations manually or create tables directly in your tests. ` +
|
|
171
|
+
`Attempted path: ${migrationsPath}`
|
|
172
|
+
);
|
|
173
|
+
}
|