@donkeylabs/server 2.0.19 → 2.0.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/docs/caching-strategies.md +677 -0
- package/docs/dev-experience.md +656 -0
- package/docs/hot-reload-limitations.md +166 -0
- package/docs/load-testing.md +974 -0
- package/docs/plugin-registry-design.md +1064 -0
- package/docs/production.md +1229 -0
- package/docs/workflows.md +90 -3
- package/package.json +1 -1
- package/src/admin/routes.ts +153 -0
- package/src/core/cron.ts +90 -9
- package/src/core/index.ts +31 -0
- package/src/core/job-adapter-kysely.ts +176 -73
- package/src/core/job-adapter-sqlite.ts +10 -0
- package/src/core/jobs.ts +112 -17
- package/src/core/migrations/workflows/002_add_metadata_column.ts +28 -0
- package/src/core/process-adapter-kysely.ts +62 -21
- package/src/core/storage-adapter-local.test.ts +199 -0
- package/src/core/storage.test.ts +197 -0
- package/src/core/workflow-adapter-kysely.ts +66 -19
- package/src/core/workflow-executor.ts +239 -0
- package/src/core/workflow-proxy.ts +238 -0
- package/src/core/workflow-socket.ts +449 -0
- package/src/core/workflow-state-machine.ts +593 -0
- package/src/core/workflows.test.ts +758 -0
- package/src/core/workflows.ts +705 -595
- package/src/core.ts +17 -6
- package/src/index.ts +14 -0
- package/src/testing/database.test.ts +263 -0
- package/src/testing/database.ts +173 -0
- package/src/testing/e2e.test.ts +189 -0
- package/src/testing/e2e.ts +272 -0
- package/src/testing/index.ts +18 -0
package/src/core.ts
CHANGED
|
@@ -10,7 +10,7 @@ import type { Cron } from "./core/cron";
|
|
|
10
10
|
import type { Jobs } from "./core/jobs";
|
|
11
11
|
import type { SSE } from "./core/sse";
|
|
12
12
|
import type { RateLimiter } from "./core/rate-limiter";
|
|
13
|
-
import type { Errors, CustomErrorRegistry } from "./core/errors";
|
|
13
|
+
import type { Errors, CustomErrorRegistry, ErrorFactory } from "./core/errors";
|
|
14
14
|
import type { Workflows } from "./core/workflows";
|
|
15
15
|
import type { Processes } from "./core/processes";
|
|
16
16
|
import type { Audit } from "./core/audit";
|
|
@@ -408,7 +408,7 @@ export type InferEvents<T> = UnwrapPluginFactory<T> extends { events?: infer E }
|
|
|
408
408
|
export type InferClientConfig<T> = UnwrapPluginFactory<T> extends { client?: infer C } ? C : undefined;
|
|
409
409
|
export type InferCustomErrors<T> = UnwrapPluginFactory<T> extends { customErrors?: infer E } ? E : {};
|
|
410
410
|
|
|
411
|
-
export type { ExtractServices, ExtractSchemas };
|
|
411
|
+
export type { ExtractServices, ExtractSchemas, ErrorFactory };
|
|
412
412
|
|
|
413
413
|
export type Plugin = {
|
|
414
414
|
name: string;
|
|
@@ -494,10 +494,21 @@ export class PluginManager {
|
|
|
494
494
|
* Records that a migration has been applied for a specific plugin.
|
|
495
495
|
*/
|
|
496
496
|
private async recordMigration(pluginName: string, migrationName: string): Promise<void> {
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
497
|
+
try {
|
|
498
|
+
// Use ON CONFLICT DO NOTHING to handle race conditions on hot reload
|
|
499
|
+
await sql`
|
|
500
|
+
INSERT INTO __donkeylabs_migrations__ (plugin_name, migration_name)
|
|
501
|
+
VALUES (${pluginName}, ${migrationName})
|
|
502
|
+
ON CONFLICT (plugin_name, migration_name) DO NOTHING
|
|
503
|
+
`.execute(this.core.db);
|
|
504
|
+
} catch (e: any) {
|
|
505
|
+
// Fallback: ignore UNIQUE constraint errors (migration already recorded)
|
|
506
|
+
if (e?.code === "SQLITE_CONSTRAINT_UNIQUE" || e?.message?.includes("UNIQUE constraint failed")) {
|
|
507
|
+
// Already recorded, ignore
|
|
508
|
+
return;
|
|
509
|
+
}
|
|
510
|
+
throw e;
|
|
511
|
+
}
|
|
501
512
|
}
|
|
502
513
|
|
|
503
514
|
/**
|
package/src/index.ts
CHANGED
|
@@ -91,6 +91,14 @@ export function defineConfig(config: DonkeylabsConfig): DonkeylabsConfig {
|
|
|
91
91
|
// Re-export HttpError for custom error creation
|
|
92
92
|
export { HttpError } from "./core/errors";
|
|
93
93
|
|
|
94
|
+
// Core services types
|
|
95
|
+
export {
|
|
96
|
+
type Logger,
|
|
97
|
+
type LogLevel,
|
|
98
|
+
type ErrorFactory,
|
|
99
|
+
type ErrorFactories,
|
|
100
|
+
} from "./core/index";
|
|
101
|
+
|
|
94
102
|
// Workflows (step functions)
|
|
95
103
|
export {
|
|
96
104
|
workflow,
|
|
@@ -129,3 +137,9 @@ export {
|
|
|
129
137
|
type IntegrationHarnessOptions,
|
|
130
138
|
type IntegrationHarnessResult,
|
|
131
139
|
} from "./harness";
|
|
140
|
+
|
|
141
|
+
// E2E Testing - moved to separate subpath export: "@donkeylabs/server/testing"
|
|
142
|
+
// import { createE2EFixtures, defineE2EConfig } from "@donkeylabs/server/testing";
|
|
143
|
+
|
|
144
|
+
// Database Testing Utilities - moved to separate subpath export: "@donkeylabs/server/testing"
|
|
145
|
+
// import { createTestDatabase, resetTestDatabase } from "@donkeylabs/server/testing";
|
|
@@ -0,0 +1,263 @@
|
|
|
1
|
+
// packages/server/src/testing/database.test.ts
|
|
2
|
+
import { describe, it, expect, beforeEach, afterEach } from "bun:test";
|
|
3
|
+
import {
|
|
4
|
+
createTestDatabase,
|
|
5
|
+
resetTestDatabase,
|
|
6
|
+
seedTestData,
|
|
7
|
+
} from "./database";
|
|
8
|
+
import type { Kysely } from "kysely";
|
|
9
|
+
|
|
10
|
+
describe("Database Testing Utilities", () => {
|
|
11
|
+
describe("createTestDatabase", () => {
|
|
12
|
+
let db: Kysely<any>;
|
|
13
|
+
|
|
14
|
+
afterEach(async () => {
|
|
15
|
+
if (db) {
|
|
16
|
+
await db.destroy();
|
|
17
|
+
}
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
it("should create an isolated SQLite database", async () => {
|
|
21
|
+
db = await createTestDatabase({
|
|
22
|
+
type: "sqlite",
|
|
23
|
+
isolated: true,
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
expect(db).toBeDefined();
|
|
27
|
+
// Verify it's functional by creating a table
|
|
28
|
+
await db.schema
|
|
29
|
+
.createTable("test_table")
|
|
30
|
+
.addColumn("id", "text", (col) => col.primaryKey())
|
|
31
|
+
.addColumn("name", "text")
|
|
32
|
+
.execute();
|
|
33
|
+
|
|
34
|
+
// Insert and query
|
|
35
|
+
await db
|
|
36
|
+
.insertInto("test_table" as any)
|
|
37
|
+
.values({ id: "1", name: "Test" })
|
|
38
|
+
.execute();
|
|
39
|
+
|
|
40
|
+
const result = await db
|
|
41
|
+
.selectFrom("test_table" as any)
|
|
42
|
+
.selectAll()
|
|
43
|
+
.executeTakeFirst();
|
|
44
|
+
|
|
45
|
+
expect(result).toEqual({ id: "1", name: "Test" });
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
it("should create an in-memory SQLite database", async () => {
|
|
49
|
+
db = await createTestDatabase({
|
|
50
|
+
type: "sqlite",
|
|
51
|
+
isolated: false,
|
|
52
|
+
connectionString: ":memory:",
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
expect(db).toBeDefined();
|
|
56
|
+
|
|
57
|
+
// Create and use table
|
|
58
|
+
await db.schema
|
|
59
|
+
.createTable("memory_test")
|
|
60
|
+
.addColumn("id", "integer", (col) => col.primaryKey())
|
|
61
|
+
.execute();
|
|
62
|
+
|
|
63
|
+
await db
|
|
64
|
+
.insertInto("memory_test" as any)
|
|
65
|
+
.values({ id: 1 })
|
|
66
|
+
.execute();
|
|
67
|
+
|
|
68
|
+
const count = await db
|
|
69
|
+
.selectFrom("memory_test" as any)
|
|
70
|
+
.select(db.fn.count("id").as("count"))
|
|
71
|
+
.executeTakeFirst();
|
|
72
|
+
|
|
73
|
+
expect(Number(count?.count)).toBe(1);
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
it("should cleanup isolated database on destroy", async () => {
|
|
77
|
+
db = await createTestDatabase({
|
|
78
|
+
type: "sqlite",
|
|
79
|
+
isolated: true,
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
await db.schema
|
|
83
|
+
.createTable("cleanup_test")
|
|
84
|
+
.addColumn("id", "text", (col) => col.primaryKey())
|
|
85
|
+
.execute();
|
|
86
|
+
|
|
87
|
+
// Destroy should not throw and should cleanup
|
|
88
|
+
await db.destroy();
|
|
89
|
+
|
|
90
|
+
// Clear reference so afterEach doesn't try to destroy again
|
|
91
|
+
db = null as any;
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
it("should throw for postgres without connection string", async () => {
|
|
95
|
+
const originalEnv = process.env.TEST_DATABASE_URL;
|
|
96
|
+
delete process.env.TEST_DATABASE_URL;
|
|
97
|
+
|
|
98
|
+
try {
|
|
99
|
+
await expect(
|
|
100
|
+
createTestDatabase({ type: "postgres" })
|
|
101
|
+
).rejects.toThrow("TEST_DATABASE_URL not set for postgres");
|
|
102
|
+
} finally {
|
|
103
|
+
if (originalEnv) {
|
|
104
|
+
process.env.TEST_DATABASE_URL = originalEnv;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
it("should throw for mysql without connection string", async () => {
|
|
110
|
+
const originalEnv = process.env.TEST_DATABASE_URL;
|
|
111
|
+
delete process.env.TEST_DATABASE_URL;
|
|
112
|
+
|
|
113
|
+
try {
|
|
114
|
+
await expect(
|
|
115
|
+
createTestDatabase({ type: "mysql" })
|
|
116
|
+
).rejects.toThrow("TEST_DATABASE_URL not set for mysql");
|
|
117
|
+
} finally {
|
|
118
|
+
if (originalEnv) {
|
|
119
|
+
process.env.TEST_DATABASE_URL = originalEnv;
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
it("should throw for unsupported database type", async () => {
|
|
125
|
+
await expect(
|
|
126
|
+
createTestDatabase({ type: "mongo" as any })
|
|
127
|
+
).rejects.toThrow("Unsupported database type: mongo");
|
|
128
|
+
});
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
describe("resetTestDatabase", () => {
|
|
132
|
+
let db: Kysely<any>;
|
|
133
|
+
|
|
134
|
+
beforeEach(async () => {
|
|
135
|
+
db = await createTestDatabase({
|
|
136
|
+
type: "sqlite",
|
|
137
|
+
isolated: true,
|
|
138
|
+
});
|
|
139
|
+
|
|
140
|
+
// Create test tables
|
|
141
|
+
await db.schema
|
|
142
|
+
.createTable("users")
|
|
143
|
+
.addColumn("id", "text", (col) => col.primaryKey())
|
|
144
|
+
.addColumn("email", "text")
|
|
145
|
+
.execute();
|
|
146
|
+
|
|
147
|
+
await db.schema
|
|
148
|
+
.createTable("posts")
|
|
149
|
+
.addColumn("id", "text", (col) => col.primaryKey())
|
|
150
|
+
.addColumn("title", "text")
|
|
151
|
+
.execute();
|
|
152
|
+
});
|
|
153
|
+
|
|
154
|
+
afterEach(async () => {
|
|
155
|
+
if (db) {
|
|
156
|
+
await db.destroy();
|
|
157
|
+
}
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
it("should truncate all tables", async () => {
|
|
161
|
+
// Insert data
|
|
162
|
+
await db.insertInto("users" as any).values({ id: "1", email: "a@b.com" }).execute();
|
|
163
|
+
await db.insertInto("users" as any).values({ id: "2", email: "c@d.com" }).execute();
|
|
164
|
+
await db.insertInto("posts" as any).values({ id: "1", title: "Post 1" }).execute();
|
|
165
|
+
|
|
166
|
+
// Verify data exists
|
|
167
|
+
const usersBefore = await db.selectFrom("users" as any).selectAll().execute();
|
|
168
|
+
const postsBefore = await db.selectFrom("posts" as any).selectAll().execute();
|
|
169
|
+
expect(usersBefore.length).toBe(2);
|
|
170
|
+
expect(postsBefore.length).toBe(1);
|
|
171
|
+
|
|
172
|
+
// Reset
|
|
173
|
+
await resetTestDatabase(db);
|
|
174
|
+
|
|
175
|
+
// Verify all tables are empty
|
|
176
|
+
const usersAfter = await db.selectFrom("users" as any).selectAll().execute();
|
|
177
|
+
const postsAfter = await db.selectFrom("posts" as any).selectAll().execute();
|
|
178
|
+
expect(usersAfter.length).toBe(0);
|
|
179
|
+
expect(postsAfter.length).toBe(0);
|
|
180
|
+
});
|
|
181
|
+
|
|
182
|
+
it("should handle empty tables", async () => {
|
|
183
|
+
// No data inserted, should not throw
|
|
184
|
+
await resetTestDatabase(db);
|
|
185
|
+
|
|
186
|
+
const users = await db.selectFrom("users" as any).selectAll().execute();
|
|
187
|
+
expect(users.length).toBe(0);
|
|
188
|
+
});
|
|
189
|
+
});
|
|
190
|
+
|
|
191
|
+
describe("seedTestData", () => {
|
|
192
|
+
let db: Kysely<any>;
|
|
193
|
+
|
|
194
|
+
beforeEach(async () => {
|
|
195
|
+
db = await createTestDatabase({
|
|
196
|
+
type: "sqlite",
|
|
197
|
+
isolated: true,
|
|
198
|
+
});
|
|
199
|
+
|
|
200
|
+
await db.schema
|
|
201
|
+
.createTable("users")
|
|
202
|
+
.addColumn("id", "text", (col) => col.primaryKey())
|
|
203
|
+
.addColumn("email", "text")
|
|
204
|
+
.addColumn("name", "text")
|
|
205
|
+
.execute();
|
|
206
|
+
|
|
207
|
+
await db.schema
|
|
208
|
+
.createTable("orders")
|
|
209
|
+
.addColumn("id", "text", (col) => col.primaryKey())
|
|
210
|
+
.addColumn("user_id", "text")
|
|
211
|
+
.addColumn("total", "integer")
|
|
212
|
+
.execute();
|
|
213
|
+
});
|
|
214
|
+
|
|
215
|
+
afterEach(async () => {
|
|
216
|
+
if (db) {
|
|
217
|
+
await db.destroy();
|
|
218
|
+
}
|
|
219
|
+
});
|
|
220
|
+
|
|
221
|
+
it("should seed data into multiple tables", async () => {
|
|
222
|
+
await seedTestData(db, {
|
|
223
|
+
users: [
|
|
224
|
+
{ id: "u1", email: "user1@test.com", name: "User 1" },
|
|
225
|
+
{ id: "u2", email: "user2@test.com", name: "User 2" },
|
|
226
|
+
],
|
|
227
|
+
orders: [
|
|
228
|
+
{ id: "o1", user_id: "u1", total: 100 },
|
|
229
|
+
{ id: "o2", user_id: "u1", total: 200 },
|
|
230
|
+
{ id: "o3", user_id: "u2", total: 50 },
|
|
231
|
+
],
|
|
232
|
+
});
|
|
233
|
+
|
|
234
|
+
const users = await db.selectFrom("users" as any).selectAll().execute();
|
|
235
|
+
const orders = await db.selectFrom("orders" as any).selectAll().execute();
|
|
236
|
+
|
|
237
|
+
expect(users.length).toBe(2);
|
|
238
|
+
expect(orders.length).toBe(3);
|
|
239
|
+
|
|
240
|
+
expect(users[0].email).toBe("user1@test.com");
|
|
241
|
+
expect(orders[0].total).toBe(100);
|
|
242
|
+
});
|
|
243
|
+
|
|
244
|
+
it("should handle empty arrays", async () => {
|
|
245
|
+
await seedTestData(db, {
|
|
246
|
+
users: [],
|
|
247
|
+
});
|
|
248
|
+
|
|
249
|
+
const users = await db.selectFrom("users" as any).selectAll().execute();
|
|
250
|
+
expect(users.length).toBe(0);
|
|
251
|
+
});
|
|
252
|
+
|
|
253
|
+
it("should handle single table", async () => {
|
|
254
|
+
await seedTestData(db, {
|
|
255
|
+
users: [{ id: "1", email: "solo@test.com", name: "Solo" }],
|
|
256
|
+
});
|
|
257
|
+
|
|
258
|
+
const users = await db.selectFrom("users" as any).selectAll().execute();
|
|
259
|
+
expect(users.length).toBe(1);
|
|
260
|
+
expect(users[0].name).toBe("Solo");
|
|
261
|
+
});
|
|
262
|
+
});
|
|
263
|
+
});
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
// packages/server/src/testing/database.ts
|
|
2
|
+
/**
|
|
3
|
+
* Database Testing Utilities
|
|
4
|
+
*
|
|
5
|
+
* Helper functions for setting up and managing test databases.
|
|
6
|
+
*
|
|
7
|
+
* Usage:
|
|
8
|
+
* const db = await createTestDatabase({ type: "sqlite" });
|
|
9
|
+
* await seedTestData(db, { users: [{ email: "test@example.com" }] });
|
|
10
|
+
* // Run tests...
|
|
11
|
+
* await db.destroy();
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
import { Kysely, PostgresDialect, MysqlDialect } from "kysely";
|
|
15
|
+
import { BunSqliteDialect } from "kysely-bun-sqlite";
|
|
16
|
+
import Database from "bun:sqlite";
|
|
17
|
+
import { mkdtempSync, rmSync } from "fs";
|
|
18
|
+
import { tmpdir } from "os";
|
|
19
|
+
import { join } from "path";
|
|
20
|
+
|
|
21
|
+
// pg and mysql2 are optional - dynamically imported when needed
|
|
22
|
+
|
|
23
|
+
export interface TestDatabaseOptions {
|
|
24
|
+
/** Database type */
|
|
25
|
+
type: "sqlite" | "postgres" | "mysql";
|
|
26
|
+
|
|
27
|
+
/** Path to migrations (glob pattern) */
|
|
28
|
+
migrations?: string;
|
|
29
|
+
|
|
30
|
+
/** Run migrations automatically */
|
|
31
|
+
runMigrations?: boolean;
|
|
32
|
+
|
|
33
|
+
/** Isolated database (deleted after tests) */
|
|
34
|
+
isolated?: boolean;
|
|
35
|
+
|
|
36
|
+
/** Connection string (for postgres/mysql) */
|
|
37
|
+
connectionString?: string;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Create a test database instance
|
|
42
|
+
*/
|
|
43
|
+
export async function createTestDatabase(options: TestDatabaseOptions): Promise<Kysely<any>> {
|
|
44
|
+
let db: Kysely<any>;
|
|
45
|
+
let cleanupPath: string | null = null;
|
|
46
|
+
|
|
47
|
+
switch (options.type) {
|
|
48
|
+
case "sqlite": {
|
|
49
|
+
let dbPath: string;
|
|
50
|
+
|
|
51
|
+
if (options.isolated !== false) {
|
|
52
|
+
// Create isolated temp database
|
|
53
|
+
const tmpDir = mkdtempSync(join(tmpdir(), "donkeylabs-test-"));
|
|
54
|
+
dbPath = join(tmpDir, "test.db");
|
|
55
|
+
cleanupPath = tmpDir;
|
|
56
|
+
} else {
|
|
57
|
+
dbPath = options.connectionString || ":memory:";
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
db = new Kysely({
|
|
61
|
+
dialect: new BunSqliteDialect({
|
|
62
|
+
database: new Database(dbPath),
|
|
63
|
+
}),
|
|
64
|
+
});
|
|
65
|
+
break;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
case "postgres": {
|
|
69
|
+
const connectionString = options.connectionString || process.env.TEST_DATABASE_URL;
|
|
70
|
+
if (!connectionString) {
|
|
71
|
+
throw new Error("TEST_DATABASE_URL not set for postgres");
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// Dynamic import - pg is optional peer dependency
|
|
75
|
+
// @ts-ignore - pg may not be installed
|
|
76
|
+
const { Pool: PGPool } = await import("pg");
|
|
77
|
+
|
|
78
|
+
db = new Kysely({
|
|
79
|
+
dialect: new PostgresDialect({
|
|
80
|
+
pool: new PGPool({
|
|
81
|
+
connectionString,
|
|
82
|
+
max: 5,
|
|
83
|
+
}),
|
|
84
|
+
}),
|
|
85
|
+
});
|
|
86
|
+
break;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
case "mysql": {
|
|
90
|
+
const connectionString = options.connectionString || process.env.TEST_DATABASE_URL;
|
|
91
|
+
if (!connectionString) {
|
|
92
|
+
throw new Error("TEST_DATABASE_URL not set for mysql");
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// Dynamic import - mysql2 is optional peer dependency
|
|
96
|
+
// @ts-ignore - mysql2 may not be installed
|
|
97
|
+
const { createPool: createMySQLPool } = await import("mysql2");
|
|
98
|
+
|
|
99
|
+
db = new Kysely({
|
|
100
|
+
dialect: new MysqlDialect({
|
|
101
|
+
pool: createMySQLPool({
|
|
102
|
+
uri: connectionString,
|
|
103
|
+
connectionLimit: 5,
|
|
104
|
+
}),
|
|
105
|
+
}),
|
|
106
|
+
});
|
|
107
|
+
break;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
default:
|
|
111
|
+
throw new Error(`Unsupported database type: ${options.type}`);
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// Run migrations if requested
|
|
115
|
+
if (options.runMigrations && options.migrations) {
|
|
116
|
+
await runMigrations(db, options.migrations);
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// Attach cleanup handler
|
|
120
|
+
if (cleanupPath) {
|
|
121
|
+
const originalDestroy = db.destroy.bind(db);
|
|
122
|
+
db.destroy = async () => {
|
|
123
|
+
await originalDestroy();
|
|
124
|
+
if (cleanupPath) {
|
|
125
|
+
rmSync(cleanupPath, { recursive: true, force: true });
|
|
126
|
+
}
|
|
127
|
+
};
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
return db;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
/**
|
|
134
|
+
* Reset test database (truncate all tables)
|
|
135
|
+
*/
|
|
136
|
+
export async function resetTestDatabase(db: Kysely<any>): Promise<void> {
|
|
137
|
+
// Get all tables
|
|
138
|
+
const tables = await db.introspection.getTables();
|
|
139
|
+
|
|
140
|
+
// Truncate each table
|
|
141
|
+
for (const table of tables) {
|
|
142
|
+
await db.deleteFrom(table.name as any).execute();
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
/**
|
|
147
|
+
* Seed test data into database
|
|
148
|
+
*/
|
|
149
|
+
export async function seedTestData(
|
|
150
|
+
db: Kysely<any>,
|
|
151
|
+
data: Record<string, any[]>
|
|
152
|
+
): Promise<void> {
|
|
153
|
+
for (const [table, rows] of Object.entries(data)) {
|
|
154
|
+
if (rows.length === 0) continue;
|
|
155
|
+
|
|
156
|
+
for (const row of rows) {
|
|
157
|
+
await db.insertInto(table as any).values(row).execute();
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
/**
|
|
163
|
+
* Run migrations on test database (not yet implemented)
|
|
164
|
+
*/
|
|
165
|
+
async function runMigrations(_db: Kysely<any>, migrationsPath: string): Promise<void> {
|
|
166
|
+
// TODO: Implement migration runner integration
|
|
167
|
+
// This would scan the migrations path and run pending migrations
|
|
168
|
+
throw new Error(
|
|
169
|
+
`Migration runner not yet implemented. ` +
|
|
170
|
+
`Please run migrations manually or create tables directly in your tests. ` +
|
|
171
|
+
`Attempted path: ${migrationsPath}`
|
|
172
|
+
);
|
|
173
|
+
}
|
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
// packages/server/src/testing/e2e.test.ts
|
|
2
|
+
import { describe, it, expect } from "bun:test";
|
|
3
|
+
import { defineE2EConfig, createE2EFixtures } from "./e2e";
|
|
4
|
+
|
|
5
|
+
describe("E2E Testing Utilities", () => {
|
|
6
|
+
describe("defineE2EConfig", () => {
|
|
7
|
+
it("should return default configuration", () => {
|
|
8
|
+
const config = defineE2EConfig({
|
|
9
|
+
baseURL: "http://localhost:3000",
|
|
10
|
+
});
|
|
11
|
+
|
|
12
|
+
expect(config.testDir).toBe("./e2e");
|
|
13
|
+
expect(config.timeout).toBe(30000);
|
|
14
|
+
expect(config.expect?.timeout).toBe(5000);
|
|
15
|
+
expect(config.fullyParallel).toBe(true);
|
|
16
|
+
expect(config.use?.baseURL).toBe("http://localhost:3000");
|
|
17
|
+
expect(config.use?.trace).toBe("on-first-retry");
|
|
18
|
+
expect(config.use?.screenshot).toBe("only-on-failure");
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
it("should use custom port", () => {
|
|
22
|
+
const config = defineE2EConfig({
|
|
23
|
+
baseURL: "http://localhost:4000",
|
|
24
|
+
port: 4000,
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
expect(config.use?.baseURL).toBe("http://localhost:4000");
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
it("should use custom timeout", () => {
|
|
31
|
+
const config = defineE2EConfig({
|
|
32
|
+
baseURL: "http://localhost:3000",
|
|
33
|
+
timeout: 60000,
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
expect(config.timeout).toBe(60000);
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
it("should configure chromium by default", () => {
|
|
40
|
+
const config = defineE2EConfig({
|
|
41
|
+
baseURL: "http://localhost:3000",
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
expect(config.projects?.length).toBe(1);
|
|
45
|
+
expect(config.projects?.[0].name).toBe("chromium");
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
it("should add firefox when specified", () => {
|
|
49
|
+
const config = defineE2EConfig({
|
|
50
|
+
baseURL: "http://localhost:3000",
|
|
51
|
+
browsers: ["firefox"],
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
// Chromium + Firefox
|
|
55
|
+
expect(config.projects?.length).toBe(2);
|
|
56
|
+
expect(config.projects?.some((p) => p.name === "firefox")).toBe(true);
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
it("should add webkit when specified", () => {
|
|
60
|
+
const config = defineE2EConfig({
|
|
61
|
+
baseURL: "http://localhost:3000",
|
|
62
|
+
browsers: ["webkit"],
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
expect(config.projects?.some((p) => p.name === "webkit")).toBe(true);
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
it("should add mobile viewports when testMobile is true", () => {
|
|
69
|
+
const config = defineE2EConfig({
|
|
70
|
+
baseURL: "http://localhost:3000",
|
|
71
|
+
testMobile: true,
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
const mobileProjects = config.projects?.filter(
|
|
75
|
+
(p) => p.name.includes("Mobile")
|
|
76
|
+
);
|
|
77
|
+
expect(mobileProjects?.length).toBe(2);
|
|
78
|
+
expect(mobileProjects?.some((p) => p.name === "Mobile Chrome")).toBe(true);
|
|
79
|
+
expect(mobileProjects?.some((p) => p.name === "Mobile Safari")).toBe(true);
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
it("should configure webServer for auto-start", () => {
|
|
83
|
+
const config = defineE2EConfig({
|
|
84
|
+
baseURL: "http://localhost:3000",
|
|
85
|
+
serverEntry: "./src/server.ts",
|
|
86
|
+
autoStart: true,
|
|
87
|
+
});
|
|
88
|
+
|
|
89
|
+
expect(config.webServer).toBeDefined();
|
|
90
|
+
expect(config.webServer?.command).toBe("bun ./src/server.ts");
|
|
91
|
+
expect(config.webServer?.url).toBe("http://localhost:3000");
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
it("should use default dev command when no serverEntry", () => {
|
|
95
|
+
const config = defineE2EConfig({
|
|
96
|
+
baseURL: "http://localhost:3000",
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
expect(config.webServer?.command).toBe("bun run dev");
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
it("should disable webServer when autoStart is false", () => {
|
|
103
|
+
const config = defineE2EConfig({
|
|
104
|
+
baseURL: "http://localhost:3000",
|
|
105
|
+
autoStart: false,
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
expect(config.webServer).toBeUndefined();
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
it("should configure CI settings based on environment", () => {
|
|
112
|
+
const originalCI = process.env.CI;
|
|
113
|
+
|
|
114
|
+
// Test non-CI
|
|
115
|
+
delete process.env.CI;
|
|
116
|
+
const nonCIConfig = defineE2EConfig({
|
|
117
|
+
baseURL: "http://localhost:3000",
|
|
118
|
+
});
|
|
119
|
+
expect(nonCIConfig.forbidOnly).toBe(false);
|
|
120
|
+
expect(nonCIConfig.retries).toBe(0);
|
|
121
|
+
|
|
122
|
+
// Test CI
|
|
123
|
+
process.env.CI = "true";
|
|
124
|
+
const ciConfig = defineE2EConfig({
|
|
125
|
+
baseURL: "http://localhost:3000",
|
|
126
|
+
});
|
|
127
|
+
expect(ciConfig.forbidOnly).toBe(true);
|
|
128
|
+
expect(ciConfig.retries).toBe(2);
|
|
129
|
+
expect(ciConfig.workers).toBe(1);
|
|
130
|
+
|
|
131
|
+
// Restore
|
|
132
|
+
if (originalCI) {
|
|
133
|
+
process.env.CI = originalCI;
|
|
134
|
+
} else {
|
|
135
|
+
delete process.env.CI;
|
|
136
|
+
}
|
|
137
|
+
});
|
|
138
|
+
|
|
139
|
+
it("should configure reporters", () => {
|
|
140
|
+
const config = defineE2EConfig({
|
|
141
|
+
baseURL: "http://localhost:3000",
|
|
142
|
+
});
|
|
143
|
+
|
|
144
|
+
expect(config.reporter).toEqual([["html"], ["list"]]);
|
|
145
|
+
});
|
|
146
|
+
});
|
|
147
|
+
|
|
148
|
+
describe("createE2EFixtures", () => {
|
|
149
|
+
const baseURL = "http://localhost:9999";
|
|
150
|
+
|
|
151
|
+
it("should create api fixture with all HTTP methods", () => {
|
|
152
|
+
const fixtures = createE2EFixtures(baseURL);
|
|
153
|
+
|
|
154
|
+
expect(fixtures.api).toBeDefined();
|
|
155
|
+
expect(typeof fixtures.api).toBe("function");
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
it("should create seed fixture", () => {
|
|
159
|
+
const fixtures = createE2EFixtures(baseURL);
|
|
160
|
+
|
|
161
|
+
expect(fixtures.seed).toBeDefined();
|
|
162
|
+
expect(typeof fixtures.seed).toBe("function");
|
|
163
|
+
});
|
|
164
|
+
|
|
165
|
+
it("should create cleanup fixture", () => {
|
|
166
|
+
const fixtures = createE2EFixtures(baseURL);
|
|
167
|
+
|
|
168
|
+
expect(fixtures.cleanup).toBeDefined();
|
|
169
|
+
expect(typeof fixtures.cleanup).toBe("function");
|
|
170
|
+
});
|
|
171
|
+
|
|
172
|
+
// Integration tests for fixtures would require a running server
|
|
173
|
+
// These tests verify the fixture structure
|
|
174
|
+
});
|
|
175
|
+
|
|
176
|
+
describe("E2EFixtures API client", () => {
|
|
177
|
+
// These tests require a mock server
|
|
178
|
+
// For now we just verify the structure
|
|
179
|
+
|
|
180
|
+
it("should have correct fixture structure", () => {
|
|
181
|
+
const fixtures = createE2EFixtures("http://test:3000");
|
|
182
|
+
|
|
183
|
+
// Verify all expected fixtures exist
|
|
184
|
+
expect(Object.keys(fixtures)).toContain("api");
|
|
185
|
+
expect(Object.keys(fixtures)).toContain("seed");
|
|
186
|
+
expect(Object.keys(fixtures)).toContain("cleanup");
|
|
187
|
+
});
|
|
188
|
+
});
|
|
189
|
+
});
|