bunsane 0.2.4 → 0.2.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/core/ArcheType.ts +67 -34
- package/core/BatchLoader.ts +215 -30
- package/core/Entity.ts +2 -2
- package/core/RequestContext.ts +15 -10
- package/core/RequestLoaders.ts +4 -2
- package/core/cache/CacheProvider.ts +1 -0
- package/core/cache/MemoryCache.ts +10 -1
- package/core/cache/RedisCache.ts +16 -2
- package/core/validateEnv.ts +8 -0
- package/database/DatabaseHelper.ts +113 -1
- package/database/index.ts +78 -45
- package/docs/SCALABILITY_PLAN.md +175 -0
- package/package.json +13 -2
- package/query/CTENode.ts +44 -24
- package/query/ComponentInclusionNode.ts +181 -91
- package/query/Query.ts +9 -9
- package/tests/benchmark/BENCHMARK_DATABASES_PLAN.md +338 -0
- package/tests/benchmark/bunfig.toml +9 -0
- package/tests/benchmark/fixtures/EcommerceComponents.ts +283 -0
- package/tests/benchmark/fixtures/EcommerceDataGenerators.ts +301 -0
- package/tests/benchmark/fixtures/RelationTracker.ts +159 -0
- package/tests/benchmark/fixtures/index.ts +6 -0
- package/tests/benchmark/index.ts +22 -0
- package/tests/benchmark/noop-preload.ts +3 -0
- package/tests/benchmark/runners/BenchmarkLoader.ts +132 -0
- package/tests/benchmark/runners/index.ts +4 -0
- package/tests/benchmark/scenarios/query-benchmarks.test.ts +465 -0
- package/tests/benchmark/scripts/generate-db.ts +344 -0
- package/tests/benchmark/scripts/run-benchmarks.ts +97 -0
- package/tests/integration/query/Query.complexAnalysis.test.ts +557 -0
- package/tests/integration/query/Query.explainAnalyze.test.ts +233 -0
- package/tests/stress/fixtures/RealisticComponents.ts +235 -0
- package/tests/stress/scenarios/realistic-scenarios.test.ts +1081 -0
- package/tests/stress/scenarios/timeout-investigation.test.ts +522 -0
- package/tests/unit/BatchLoader.test.ts +139 -25
|
@@ -0,0 +1,344 @@
|
|
|
1
|
+
#!/usr/bin/env bun
|
|
2
|
+
/**
|
|
3
|
+
* CLI script to generate persistent PGlite benchmark databases.
|
|
4
|
+
*
|
|
5
|
+
* This script is self-contained and does not depend on the framework's
|
|
6
|
+
* database connection - it writes directly to PGlite.
|
|
7
|
+
*
|
|
8
|
+
* Usage:
|
|
9
|
+
* bun tests/benchmark/scripts/generate-db.ts [tier] [--force] [--all]
|
|
10
|
+
*
|
|
11
|
+
* Examples:
|
|
12
|
+
* bun tests/benchmark/scripts/generate-db.ts xs
|
|
13
|
+
* bun tests/benchmark/scripts/generate-db.ts md --force
|
|
14
|
+
* bun tests/benchmark/scripts/generate-db.ts --all
|
|
15
|
+
*/
|
|
16
|
+
import { PGlite } from '@electric-sql/pglite';
|
|
17
|
+
import { existsSync, rmSync, mkdirSync } from 'node:fs';
|
|
18
|
+
import { join, dirname } from 'node:path';
|
|
19
|
+
import { fileURLToPath } from 'node:url';
|
|
20
|
+
import { createHash } from 'node:crypto';
|
|
21
|
+
|
|
22
|
+
import {
|
|
23
|
+
SeededRandom,
|
|
24
|
+
generateUserData,
|
|
25
|
+
generateProductData,
|
|
26
|
+
generateOrderData,
|
|
27
|
+
generateOrderItemData,
|
|
28
|
+
generateReviewData
|
|
29
|
+
} from '../fixtures/EcommerceDataGenerators';
|
|
30
|
+
import { RelationTracker } from '../fixtures/RelationTracker';
|
|
31
|
+
|
|
32
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
33
|
+
const DATABASES_DIR = join(__dirname, '..', 'databases');
|
|
34
|
+
|
|
35
|
+
// Database tier configurations
|
|
36
|
+
const TIERS = {
|
|
37
|
+
xs: { users: 1000, products: 2000, orders: 3000, orderItems: 3000, reviews: 1000 },
|
|
38
|
+
sm: { users: 5000, products: 10000, orders: 15000, orderItems: 15000, reviews: 5000 },
|
|
39
|
+
md: { users: 10000, products: 20000, orders: 30000, orderItems: 30000, reviews: 10000 },
|
|
40
|
+
lg: { users: 50000, products: 100000, orders: 150000, orderItems: 150000, reviews: 50000 },
|
|
41
|
+
xl: { users: 100000, products: 200000, orders: 300000, orderItems: 300000, reviews: 100000 }
|
|
42
|
+
} as const;
|
|
43
|
+
|
|
44
|
+
type Tier = keyof typeof TIERS;
|
|
45
|
+
|
|
46
|
+
const DEFAULT_SEED = 42;
|
|
47
|
+
const BATCH_SIZE = 1000;
|
|
48
|
+
|
|
49
|
+
// Component names and their type IDs (generated deterministically)
|
|
50
|
+
const COMPONENT_TYPE_IDS = new Map<string, string>();
|
|
51
|
+
|
|
52
|
+
function generateTypeId(name: string): string {
|
|
53
|
+
if (COMPONENT_TYPE_IDS.has(name)) {
|
|
54
|
+
return COMPONENT_TYPE_IDS.get(name)!;
|
|
55
|
+
}
|
|
56
|
+
// Generate a SHA256 hash (64 hex chars, matches framework's metadata-storage.ts)
|
|
57
|
+
const typeId = createHash('sha256').update(name).digest('hex');
|
|
58
|
+
COMPONENT_TYPE_IDS.set(name, typeId);
|
|
59
|
+
return typeId;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
// Simple UUID v7 implementation (time-ordered)
|
|
63
|
+
function uuidv7(): string {
|
|
64
|
+
const now = Date.now();
|
|
65
|
+
const timeHex = now.toString(16).padStart(12, '0');
|
|
66
|
+
const randomBytes = crypto.getRandomValues(new Uint8Array(10));
|
|
67
|
+
const randomHex = Array.from(randomBytes).map(b => b.toString(16).padStart(2, '0')).join('');
|
|
68
|
+
return `${timeHex.slice(0, 8)}-${timeHex.slice(8, 12)}-7${randomHex.slice(0, 3)}-${(0x80 | (randomBytes[4]! & 0x3f)).toString(16)}${randomHex.slice(5, 7)}-${randomHex.slice(7, 19)}`;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
interface GenerationResult {
|
|
72
|
+
tier: Tier;
|
|
73
|
+
totalEntities: number;
|
|
74
|
+
totalTime: number;
|
|
75
|
+
recordsPerSecond: number;
|
|
76
|
+
path: string;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
async function initializeSchema(pg: PGlite): Promise<void> {
|
|
80
|
+
await pg.exec(`
|
|
81
|
+
CREATE TABLE IF NOT EXISTS entities (
|
|
82
|
+
id UUID PRIMARY KEY,
|
|
83
|
+
created_at TIMESTAMPTZ DEFAULT NOW(),
|
|
84
|
+
updated_at TIMESTAMPTZ DEFAULT NOW(),
|
|
85
|
+
deleted_at TIMESTAMPTZ DEFAULT NULL
|
|
86
|
+
);
|
|
87
|
+
|
|
88
|
+
CREATE TABLE IF NOT EXISTS components (
|
|
89
|
+
id UUID PRIMARY KEY,
|
|
90
|
+
entity_id UUID NOT NULL REFERENCES entities(id) ON DELETE CASCADE,
|
|
91
|
+
type_id VARCHAR(64) NOT NULL,
|
|
92
|
+
name VARCHAR(128),
|
|
93
|
+
data JSONB NOT NULL DEFAULT '{}',
|
|
94
|
+
created_at TIMESTAMPTZ DEFAULT NOW(),
|
|
95
|
+
updated_at TIMESTAMPTZ DEFAULT NOW(),
|
|
96
|
+
deleted_at TIMESTAMPTZ DEFAULT NULL
|
|
97
|
+
);
|
|
98
|
+
|
|
99
|
+
CREATE TABLE IF NOT EXISTS entity_components (
|
|
100
|
+
entity_id UUID NOT NULL REFERENCES entities(id) ON DELETE CASCADE,
|
|
101
|
+
type_id VARCHAR(64) NOT NULL,
|
|
102
|
+
component_id UUID NOT NULL REFERENCES components(id) ON DELETE CASCADE,
|
|
103
|
+
created_at TIMESTAMPTZ DEFAULT NOW(),
|
|
104
|
+
updated_at TIMESTAMPTZ DEFAULT NOW(),
|
|
105
|
+
deleted_at TIMESTAMPTZ DEFAULT NULL,
|
|
106
|
+
PRIMARY KEY (entity_id, type_id)
|
|
107
|
+
);
|
|
108
|
+
|
|
109
|
+
CREATE INDEX IF NOT EXISTS idx_components_entity_id ON components(entity_id);
|
|
110
|
+
CREATE INDEX IF NOT EXISTS idx_components_type_id ON components(type_id);
|
|
111
|
+
CREATE INDEX IF NOT EXISTS idx_components_name ON components(name);
|
|
112
|
+
CREATE INDEX IF NOT EXISTS idx_entity_components_type_id ON entity_components(type_id);
|
|
113
|
+
CREATE INDEX IF NOT EXISTS idx_entities_deleted_null ON entities(id) WHERE deleted_at IS NULL;
|
|
114
|
+
`);
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
async function seedComponent(
|
|
118
|
+
pg: PGlite,
|
|
119
|
+
componentName: string,
|
|
120
|
+
count: number,
|
|
121
|
+
dataGenerator: (index: number) => Record<string, any>,
|
|
122
|
+
tracker: RelationTracker,
|
|
123
|
+
trackFn?: (entityId: string, data: Record<string, any>) => void,
|
|
124
|
+
onProgress?: (current: number) => void
|
|
125
|
+
): Promise<string[]> {
|
|
126
|
+
const typeId = generateTypeId(componentName);
|
|
127
|
+
const entityIds: string[] = [];
|
|
128
|
+
|
|
129
|
+
for (let i = 0; i < count; i += BATCH_SIZE) {
|
|
130
|
+
const batchSize = Math.min(BATCH_SIZE, count - i);
|
|
131
|
+
const now = new Date().toISOString();
|
|
132
|
+
|
|
133
|
+
let entitiesValues = '';
|
|
134
|
+
let componentsValues = '';
|
|
135
|
+
let entityComponentsValues = '';
|
|
136
|
+
|
|
137
|
+
for (let j = 0; j < batchSize; j++) {
|
|
138
|
+
const entityId = uuidv7();
|
|
139
|
+
const componentId = uuidv7();
|
|
140
|
+
const data = dataGenerator(i + j);
|
|
141
|
+
|
|
142
|
+
entityIds.push(entityId);
|
|
143
|
+
|
|
144
|
+
if (trackFn) {
|
|
145
|
+
trackFn(entityId, data);
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
const sep = j > 0 ? ',' : '';
|
|
149
|
+
entitiesValues += `${sep}('${entityId}', '${now}', '${now}')`;
|
|
150
|
+
componentsValues += `${sep}('${componentId}', '${entityId}', '${typeId}', '${componentName}', '${JSON.stringify(data).replace(/'/g, "''")}', '${now}', '${now}')`;
|
|
151
|
+
entityComponentsValues += `${sep}('${entityId}', '${typeId}', '${componentId}', '${now}', '${now}')`;
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
await pg.exec(`INSERT INTO entities (id, created_at, updated_at) VALUES ${entitiesValues}`);
|
|
155
|
+
await pg.exec(`INSERT INTO components (id, entity_id, type_id, name, data, created_at, updated_at) VALUES ${componentsValues}`);
|
|
156
|
+
await pg.exec(`INSERT INTO entity_components (entity_id, type_id, component_id, created_at, updated_at) VALUES ${entityComponentsValues} ON CONFLICT (entity_id, type_id) DO NOTHING`);
|
|
157
|
+
|
|
158
|
+
if (onProgress) {
|
|
159
|
+
onProgress(i + batchSize);
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
return entityIds;
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
async function generateDatabase(tier: Tier, force: boolean): Promise<GenerationResult> {
|
|
167
|
+
const config = TIERS[tier];
|
|
168
|
+
const dbPath = join(DATABASES_DIR, tier);
|
|
169
|
+
|
|
170
|
+
if (existsSync(dbPath)) {
|
|
171
|
+
if (!force) {
|
|
172
|
+
console.log(`Database for tier '${tier}' already exists at ${dbPath}`);
|
|
173
|
+
console.log('Use --force to regenerate');
|
|
174
|
+
process.exit(0);
|
|
175
|
+
}
|
|
176
|
+
console.log(`Removing existing database at ${dbPath}...`);
|
|
177
|
+
rmSync(dbPath, { recursive: true, force: true });
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
mkdirSync(dbPath, { recursive: true });
|
|
181
|
+
|
|
182
|
+
console.log(`\n=== Generating ${tier.toUpperCase()} tier database ===`);
|
|
183
|
+
console.log(`Path: ${dbPath}`);
|
|
184
|
+
console.log(`Configuration:`);
|
|
185
|
+
console.log(` Users: ${config.users.toLocaleString()}`);
|
|
186
|
+
console.log(` Products: ${config.products.toLocaleString()}`);
|
|
187
|
+
console.log(` Orders: ${config.orders.toLocaleString()}`);
|
|
188
|
+
console.log(` Order Items: ${config.orderItems.toLocaleString()}`);
|
|
189
|
+
console.log(` Reviews: ${config.reviews.toLocaleString()}`);
|
|
190
|
+
|
|
191
|
+
const totalEntities = config.users + config.products + config.orders + config.orderItems + config.reviews;
|
|
192
|
+
console.log(` Total: ${totalEntities.toLocaleString()}`);
|
|
193
|
+
console.log('');
|
|
194
|
+
|
|
195
|
+
const startTime = performance.now();
|
|
196
|
+
|
|
197
|
+
console.log('Initializing PGlite...');
|
|
198
|
+
const pg = new PGlite(dbPath, { relaxedDurability: true });
|
|
199
|
+
await pg.waitReady;
|
|
200
|
+
|
|
201
|
+
console.log('Creating schema...');
|
|
202
|
+
await initializeSchema(pg);
|
|
203
|
+
|
|
204
|
+
const tracker = new RelationTracker();
|
|
205
|
+
const rng = new SeededRandom(DEFAULT_SEED);
|
|
206
|
+
|
|
207
|
+
// Seed Users
|
|
208
|
+
console.log('\nSeeding Users...');
|
|
209
|
+
const userStart = performance.now();
|
|
210
|
+
await seedComponent(
|
|
211
|
+
pg,
|
|
212
|
+
'BenchUser',
|
|
213
|
+
config.users,
|
|
214
|
+
(idx) => generateUserData(idx, rng),
|
|
215
|
+
tracker,
|
|
216
|
+
(entityId) => tracker.addUser(entityId),
|
|
217
|
+
(current) => process.stdout.write(`\r Progress: ${current.toLocaleString()}/${config.users.toLocaleString()}`)
|
|
218
|
+
);
|
|
219
|
+
console.log(`\n Done in ${((performance.now() - userStart) / 1000).toFixed(1)}s`);
|
|
220
|
+
|
|
221
|
+
// Seed Products
|
|
222
|
+
console.log('\nSeeding Products...');
|
|
223
|
+
const productStart = performance.now();
|
|
224
|
+
await seedComponent(
|
|
225
|
+
pg,
|
|
226
|
+
'BenchProduct',
|
|
227
|
+
config.products,
|
|
228
|
+
(idx) => generateProductData(idx, rng),
|
|
229
|
+
tracker,
|
|
230
|
+
(entityId) => tracker.addProduct(entityId),
|
|
231
|
+
(current) => process.stdout.write(`\r Progress: ${current.toLocaleString()}/${config.products.toLocaleString()}`)
|
|
232
|
+
);
|
|
233
|
+
console.log(`\n Done in ${((performance.now() - productStart) / 1000).toFixed(1)}s`);
|
|
234
|
+
|
|
235
|
+
// Seed Orders
|
|
236
|
+
console.log('\nSeeding Orders...');
|
|
237
|
+
const orderStart = performance.now();
|
|
238
|
+
await seedComponent(
|
|
239
|
+
pg,
|
|
240
|
+
'BenchOrder',
|
|
241
|
+
config.orders,
|
|
242
|
+
(idx) => generateOrderData(idx, rng, tracker),
|
|
243
|
+
tracker,
|
|
244
|
+
(entityId, data) => tracker.addOrder(entityId, data.userId),
|
|
245
|
+
(current) => process.stdout.write(`\r Progress: ${current.toLocaleString()}/${config.orders.toLocaleString()}`)
|
|
246
|
+
);
|
|
247
|
+
console.log(`\n Done in ${((performance.now() - orderStart) / 1000).toFixed(1)}s`);
|
|
248
|
+
|
|
249
|
+
// Seed Order Items
|
|
250
|
+
console.log('\nSeeding Order Items...');
|
|
251
|
+
const itemStart = performance.now();
|
|
252
|
+
await seedComponent(
|
|
253
|
+
pg,
|
|
254
|
+
'BenchOrderItem',
|
|
255
|
+
config.orderItems,
|
|
256
|
+
(idx) => generateOrderItemData(idx, rng, tracker),
|
|
257
|
+
tracker,
|
|
258
|
+
undefined,
|
|
259
|
+
(current) => process.stdout.write(`\r Progress: ${current.toLocaleString()}/${config.orderItems.toLocaleString()}`)
|
|
260
|
+
);
|
|
261
|
+
console.log(`\n Done in ${((performance.now() - itemStart) / 1000).toFixed(1)}s`);
|
|
262
|
+
|
|
263
|
+
// Seed Reviews
|
|
264
|
+
console.log('\nSeeding Reviews...');
|
|
265
|
+
const reviewStart = performance.now();
|
|
266
|
+
await seedComponent(
|
|
267
|
+
pg,
|
|
268
|
+
'BenchReview',
|
|
269
|
+
config.reviews,
|
|
270
|
+
(idx) => generateReviewData(idx, rng, tracker),
|
|
271
|
+
tracker,
|
|
272
|
+
undefined,
|
|
273
|
+
(current) => process.stdout.write(`\r Progress: ${current.toLocaleString()}/${config.reviews.toLocaleString()}`)
|
|
274
|
+
);
|
|
275
|
+
console.log(`\n Done in ${((performance.now() - reviewStart) / 1000).toFixed(1)}s`);
|
|
276
|
+
|
|
277
|
+
// Run VACUUM ANALYZE
|
|
278
|
+
console.log('\nRunning VACUUM ANALYZE...');
|
|
279
|
+
await pg.exec('VACUUM ANALYZE entities');
|
|
280
|
+
await pg.exec('VACUUM ANALYZE components');
|
|
281
|
+
await pg.exec('VACUUM ANALYZE entity_components');
|
|
282
|
+
|
|
283
|
+
console.log('Syncing to disk...');
|
|
284
|
+
await pg.close();
|
|
285
|
+
|
|
286
|
+
const totalTime = (performance.now() - startTime) / 1000;
|
|
287
|
+
const recordsPerSecond = Math.round(totalEntities / totalTime);
|
|
288
|
+
|
|
289
|
+
console.log('\n=== Generation Complete ===');
|
|
290
|
+
console.log(`Total time: ${totalTime.toFixed(1)}s`);
|
|
291
|
+
console.log(`Records/second: ${recordsPerSecond.toLocaleString()}`);
|
|
292
|
+
console.log(`Database path: ${dbPath}`);
|
|
293
|
+
|
|
294
|
+
return {
|
|
295
|
+
tier,
|
|
296
|
+
totalEntities,
|
|
297
|
+
totalTime,
|
|
298
|
+
recordsPerSecond,
|
|
299
|
+
path: dbPath
|
|
300
|
+
};
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
// Parse CLI arguments
|
|
304
|
+
const args = process.argv.slice(2);
|
|
305
|
+
const force = args.includes('--force');
|
|
306
|
+
const all = args.includes('--all');
|
|
307
|
+
const tierArg = args.find(a => !a.startsWith('--'));
|
|
308
|
+
|
|
309
|
+
if (!all && !tierArg) {
|
|
310
|
+
console.log('Usage: bun tests/benchmark/scripts/generate-db.ts [tier] [--force] [--all]');
|
|
311
|
+
console.log('\nTiers: xs, sm, md, lg, xl');
|
|
312
|
+
console.log('\nOptions:');
|
|
313
|
+
console.log(' --force Regenerate even if database exists');
|
|
314
|
+
console.log(' --all Generate all tiers');
|
|
315
|
+
console.log('\nExamples:');
|
|
316
|
+
console.log(' bun tests/benchmark/scripts/generate-db.ts xs');
|
|
317
|
+
console.log(' bun tests/benchmark/scripts/generate-db.ts md --force');
|
|
318
|
+
console.log(' bun tests/benchmark/scripts/generate-db.ts --all');
|
|
319
|
+
process.exit(1);
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
if (all) {
|
|
323
|
+
console.log('Generating all database tiers...\n');
|
|
324
|
+
const results: GenerationResult[] = [];
|
|
325
|
+
|
|
326
|
+
for (const tier of Object.keys(TIERS) as Tier[]) {
|
|
327
|
+
results.push(await generateDatabase(tier, force));
|
|
328
|
+
console.log('');
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
console.log('\n=== Summary ===');
|
|
332
|
+
for (const r of results) {
|
|
333
|
+
console.log(`${r.tier.toUpperCase().padEnd(3)} | ${r.totalEntities.toLocaleString().padStart(10)} entities | ${r.totalTime.toFixed(1).padStart(6)}s | ${r.recordsPerSecond.toLocaleString().padStart(8)} rec/s`);
|
|
334
|
+
}
|
|
335
|
+
} else {
|
|
336
|
+
const tier = tierArg as Tier;
|
|
337
|
+
if (!TIERS[tier]) {
|
|
338
|
+
console.error(`Unknown tier: ${tier}`);
|
|
339
|
+
console.error('Valid tiers: xs, sm, md, lg, xl');
|
|
340
|
+
process.exit(1);
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
await generateDatabase(tier, force);
|
|
344
|
+
}
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
#!/usr/bin/env bun
|
|
2
|
+
/**
|
|
3
|
+
* Benchmark runner script.
|
|
4
|
+
*
|
|
5
|
+
* Loads a pre-generated PGlite database and runs benchmarks against it.
|
|
6
|
+
* Sets up the correct environment variables before spawning the test process.
|
|
7
|
+
*
|
|
8
|
+
* Usage:
|
|
9
|
+
* bun tests/benchmark/scripts/run-benchmarks.ts [tier]
|
|
10
|
+
* bun tests/benchmark/scripts/run-benchmarks.ts xs
|
|
11
|
+
* bun tests/benchmark/scripts/run-benchmarks.ts md
|
|
12
|
+
*/
|
|
13
|
+
import { PGlite } from '@electric-sql/pglite';
|
|
14
|
+
import { PGLiteSocketServer } from '@electric-sql/pglite-socket';
|
|
15
|
+
import { existsSync } from 'node:fs';
|
|
16
|
+
import { join, dirname } from 'node:path';
|
|
17
|
+
import { fileURLToPath } from 'node:url';
|
|
18
|
+
import { spawn } from 'child_process';
|
|
19
|
+
|
|
20
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
21
|
+
const DATABASES_DIR = join(__dirname, '..', 'databases');
|
|
22
|
+
const PORT = 54322;
|
|
23
|
+
|
|
24
|
+
type Tier = 'xs' | 'sm' | 'md' | 'lg' | 'xl';
|
|
25
|
+
|
|
26
|
+
const tier = (process.argv[2] || 'xs') as Tier;
|
|
27
|
+
const dbPath = join(DATABASES_DIR, tier);
|
|
28
|
+
|
|
29
|
+
if (!existsSync(dbPath)) {
|
|
30
|
+
console.error(`Benchmark database for tier '${tier}' not found at ${dbPath}`);
|
|
31
|
+
console.error('\nGenerate it first with:');
|
|
32
|
+
console.error(` bun tests/benchmark/scripts/generate-db.ts ${tier}`);
|
|
33
|
+
process.exit(1);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
console.log(`[benchmark] Loading ${tier.toUpperCase()} tier database from ${dbPath}...`);
|
|
37
|
+
|
|
38
|
+
const pg = new PGlite(dbPath);
|
|
39
|
+
await pg.waitReady;
|
|
40
|
+
|
|
41
|
+
// Verify database has data
|
|
42
|
+
const countResult = await pg.query<{ count: string }>('SELECT COUNT(*) as count FROM entities');
|
|
43
|
+
const entityCount = parseInt(countResult.rows[0]?.count || '0');
|
|
44
|
+
|
|
45
|
+
if (entityCount === 0) {
|
|
46
|
+
await pg.close();
|
|
47
|
+
console.error(`Benchmark database for tier '${tier}' is empty.`);
|
|
48
|
+
console.error('Regenerate with:');
|
|
49
|
+
console.error(` bun tests/benchmark/scripts/generate-db.ts ${tier} --force`);
|
|
50
|
+
process.exit(1);
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
console.log(`[benchmark] Loaded ${entityCount.toLocaleString()} entities`);
|
|
54
|
+
|
|
55
|
+
const server = new PGLiteSocketServer({ db: pg, port: PORT });
|
|
56
|
+
await server.start();
|
|
57
|
+
console.log(`[benchmark] Socket server running on port ${PORT}`);
|
|
58
|
+
|
|
59
|
+
// Spawn the test process with correct env vars set before import
|
|
60
|
+
// Use --config to specify benchmark-specific bunfig without the standard preload
|
|
61
|
+
const proc = spawn('bun', ['test', '--config', 'tests/benchmark/bunfig.toml', 'tests/benchmark/scenarios/', '--timeout', '300000'], {
|
|
62
|
+
env: {
|
|
63
|
+
...process.env,
|
|
64
|
+
SKIP_TEST_DB_SETUP: 'true',
|
|
65
|
+
USE_PGLITE: 'true',
|
|
66
|
+
BENCHMARK_TIER: tier,
|
|
67
|
+
// Clear DB_CONNECTION_URL so individual POSTGRES_* vars take precedence
|
|
68
|
+
DB_CONNECTION_URL: '',
|
|
69
|
+
POSTGRES_HOST: 'localhost',
|
|
70
|
+
POSTGRES_PORT: String(PORT),
|
|
71
|
+
POSTGRES_USER: 'postgres',
|
|
72
|
+
POSTGRES_PASSWORD: 'postgres',
|
|
73
|
+
POSTGRES_DB: 'postgres',
|
|
74
|
+
POSTGRES_MAX_CONNECTIONS: '10',
|
|
75
|
+
LOG_LEVEL: 'info',
|
|
76
|
+
// Disable direct partition access since PGlite uses a single components table
|
|
77
|
+
BUNSANE_USE_DIRECT_PARTITION: 'false',
|
|
78
|
+
// Disable LATERAL joins - they don't work correctly with INTERSECT queries
|
|
79
|
+
BUNSANE_USE_LATERAL_JOINS: 'false',
|
|
80
|
+
},
|
|
81
|
+
stdio: 'inherit',
|
|
82
|
+
cwd: join(__dirname, '..', '..', '..'),
|
|
83
|
+
});
|
|
84
|
+
|
|
85
|
+
proc.on('exit', async (code) => {
|
|
86
|
+
console.log('[benchmark] Stopping server...');
|
|
87
|
+
try { await server.stop(); } catch {}
|
|
88
|
+
try { await pg.close(); } catch {}
|
|
89
|
+
process.exit(code ?? 1);
|
|
90
|
+
});
|
|
91
|
+
|
|
92
|
+
proc.on('error', async (err) => {
|
|
93
|
+
console.error('[benchmark] Failed to spawn bun test:', err);
|
|
94
|
+
try { await server.stop(); } catch {}
|
|
95
|
+
try { await pg.close(); } catch {}
|
|
96
|
+
process.exit(1);
|
|
97
|
+
});
|