bunsane 0.2.3 → 0.2.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/config/cache.config.ts +2 -0
  2. package/core/ArcheType.ts +67 -34
  3. package/core/BatchLoader.ts +215 -30
  4. package/core/Entity.ts +2 -2
  5. package/core/RequestContext.ts +15 -10
  6. package/core/RequestLoaders.ts +4 -2
  7. package/core/cache/CacheFactory.ts +3 -1
  8. package/core/cache/CacheProvider.ts +1 -0
  9. package/core/cache/CacheWarmer.ts +45 -23
  10. package/core/cache/MemoryCache.ts +10 -1
  11. package/core/cache/RedisCache.ts +26 -7
  12. package/core/validateEnv.ts +8 -0
  13. package/database/DatabaseHelper.ts +113 -1
  14. package/database/index.ts +78 -45
  15. package/docs/SCALABILITY_PLAN.md +175 -0
  16. package/package.json +13 -2
  17. package/query/CTENode.ts +44 -24
  18. package/query/ComponentInclusionNode.ts +181 -91
  19. package/query/Query.ts +9 -9
  20. package/tests/benchmark/BENCHMARK_DATABASES_PLAN.md +338 -0
  21. package/tests/benchmark/bunfig.toml +9 -0
  22. package/tests/benchmark/fixtures/EcommerceComponents.ts +283 -0
  23. package/tests/benchmark/fixtures/EcommerceDataGenerators.ts +301 -0
  24. package/tests/benchmark/fixtures/RelationTracker.ts +159 -0
  25. package/tests/benchmark/fixtures/index.ts +6 -0
  26. package/tests/benchmark/index.ts +22 -0
  27. package/tests/benchmark/noop-preload.ts +3 -0
  28. package/tests/benchmark/runners/BenchmarkLoader.ts +132 -0
  29. package/tests/benchmark/runners/index.ts +4 -0
  30. package/tests/benchmark/scenarios/query-benchmarks.test.ts +465 -0
  31. package/tests/benchmark/scripts/generate-db.ts +344 -0
  32. package/tests/benchmark/scripts/run-benchmarks.ts +97 -0
  33. package/tests/integration/query/Query.complexAnalysis.test.ts +557 -0
  34. package/tests/integration/query/Query.explainAnalyze.test.ts +233 -0
  35. package/tests/stress/fixtures/RealisticComponents.ts +235 -0
  36. package/tests/stress/scenarios/realistic-scenarios.test.ts +1081 -0
  37. package/tests/stress/scenarios/timeout-investigation.test.ts +522 -0
  38. package/tests/unit/BatchLoader.test.ts +139 -25
@@ -1,5 +1,7 @@
1
1
  import { CacheManager } from './CacheManager.js';
2
2
  import { SchedulerManager } from '../SchedulerManager.js';
3
+ import { Entity } from '../Entity.js';
4
+ import { logger } from '../Logger.js';
3
5
 
4
6
  /**
5
7
  * CacheWarmer preloads frequently accessed data into the cache to improve
@@ -34,7 +36,7 @@ export class CacheWarmer {
34
36
  let warmed = 0;
35
37
  let failed = 0;
36
38
 
37
- console.log(`Starting entity cache warming for ${entityIds.length} ${entityType} entities`);
39
+ logger.info({ msg: `Starting entity cache warming`, count: entityIds.length, entityType });
38
40
 
39
41
  // Process entities in batches to avoid overwhelming the database
40
42
  const batchSize = 10;
@@ -46,7 +48,7 @@ export class CacheWarmer {
46
48
  const entities = await this.loadEntitiesBatch(batch, entityType);
47
49
  warmed += entities.length;
48
50
  } catch (error) {
49
- console.warn(`Failed to warm batch of entities:`, error);
51
+ logger.warn({ msg: 'Failed to warm batch of entities', error });
50
52
  failed += batch.length;
51
53
  }
52
54
 
@@ -55,7 +57,7 @@ export class CacheWarmer {
55
57
  }
56
58
 
57
59
  const duration = Date.now() - startTime;
58
- console.log(`Entity cache warming completed: ${warmed} warmed, ${failed} failed in ${duration}ms`);
60
+ logger.info({ msg: 'Entity cache warming completed', warmed, failed, duration });
59
61
 
60
62
  return { success: failed === 0, warmed, failed, duration };
61
63
  }
@@ -71,7 +73,7 @@ export class CacheWarmer {
71
73
  enabled?: boolean;
72
74
  }): void {
73
75
  if (!config.enabled) {
74
- console.log(`Cache warming job "${config.name}" is disabled`);
76
+ logger.debug({ msg: 'Cache warming job disabled', name: config.name });
75
77
  return;
76
78
  }
77
79
 
@@ -80,18 +82,18 @@ export class CacheWarmer {
80
82
 
81
83
  const job = this.scheduler.scheduleJob(config.name, config.cronExpression, async () => {
82
84
  try {
83
- console.log(`Running scheduled cache warming: ${config.name}`);
85
+ logger.info({ msg: 'Running scheduled cache warming', name: config.name });
84
86
 
85
87
  if (config.type === 'entity') {
86
88
  await this.warmEntityCache(config.config.entityIds, config.config.entityType);
87
89
  }
88
90
  } catch (error) {
89
- console.error(`Scheduled cache warming failed for "${config.name}":`, error);
91
+ logger.error({ msg: 'Scheduled cache warming failed', name: config.name, error });
90
92
  }
91
93
  });
92
94
 
93
95
  this.warmingJobs.set(config.name, job);
94
- console.log(`Scheduled cache warming job "${config.name}" with cron: ${config.cronExpression}`);
96
+ logger.info({ msg: 'Scheduled cache warming job', name: config.name, cron: config.cronExpression });
95
97
  }
96
98
 
97
99
  /**
@@ -102,7 +104,7 @@ export class CacheWarmer {
102
104
  if (job) {
103
105
  job.cancel();
104
106
  this.warmingJobs.delete(name);
105
- console.log(`Cancelled cache warming job: ${name}`);
107
+ logger.info({ msg: 'Cancelled cache warming job', name });
106
108
  return true;
107
109
  }
108
110
  return false;
@@ -126,11 +128,17 @@ export class CacheWarmer {
126
128
  }> {
127
129
  const startTime = Date.now();
128
130
 
129
- // Warm entities
130
- const firstEntity = config.entities?.[0];
131
- const entityResults = firstEntity
132
- ? await this.warmEntityCache(firstEntity.entityIds, firstEntity.entityType)
133
- : { success: true, warmed: 0, failed: 0, duration: 0 };
131
+ // Warm all entity groups
132
+ let entityResults = { success: true, warmed: 0, failed: 0, duration: 0 };
133
+ if (config.entities) {
134
+ for (const entry of config.entities) {
135
+ const result = await this.warmEntityCache(entry.entityIds, entry.entityType);
136
+ entityResults.warmed += result.warmed;
137
+ entityResults.failed += result.failed;
138
+ entityResults.duration += result.duration;
139
+ if (!result.success) entityResults.success = false;
140
+ }
141
+ }
134
142
 
135
143
  const totalDuration = Date.now() - startTime;
136
144
 
@@ -141,17 +149,31 @@ export class CacheWarmer {
141
149
  }
142
150
 
143
151
  /**
144
- * Loads a batch of entities (placeholder - would need actual entity loading logic)
152
+ * Loads a batch of entities from the database and populates the cache.
153
+ * Uses Entity.FindById to load each entity with all its components,
154
+ * then writes the entity and its components into cache via CacheManager.
145
155
  */
146
- private async loadEntitiesBatch(entityIds: string[], entityType: string): Promise<any[]> {
147
- // This is a placeholder - in a real implementation, this would load entities
148
- // from the database using the appropriate entity manager or query system
149
- console.log(`Loading batch of ${entityIds.length} ${entityType} entities: ${entityIds.slice(0, 3).join(', ')}...`);
150
-
151
- // Simulate loading delay
152
- await new Promise(resolve => setTimeout(resolve, 10));
156
+ private async loadEntitiesBatch(entityIds: string[], entityType: string): Promise<Entity[]> {
157
+ const loaded: Entity[] = [];
158
+
159
+ const results = await Promise.allSettled(
160
+ entityIds.map(id => Entity.FindById(id))
161
+ );
162
+
163
+ for (const result of results) {
164
+ if (result.status === 'fulfilled' && result.value) {
165
+ const entity = result.value;
166
+ loaded.push(entity);
167
+
168
+ await this.cacheManager.setEntityWriteThrough(entity);
169
+ const components = entity.componentList();
170
+ if (components.length > 0) {
171
+ await this.cacheManager.setComponentWriteThrough(entity.id, components);
172
+ }
173
+ }
174
+ }
153
175
 
154
- // Return mock entities - in real implementation, this would be actual entity data
155
- return entityIds.map(id => ({ id, type: entityType, loaded: true }));
176
+ logger.debug({ msg: 'Loaded entity batch', entityType, requested: entityIds.length, loaded: loaded.length });
177
+ return loaded;
156
178
  }
157
179
  }
@@ -1,6 +1,14 @@
1
1
  import { type CacheProvider, type CacheStats } from './CacheProvider';
2
2
  import { logger } from '../Logger';
3
3
 
4
+ function formatBytes(bytes: number): string {
5
+ if (bytes === 0) return '0 B';
6
+ const units = ['B', 'KB', 'MB', 'GB', 'TB'];
7
+ const i = Math.floor(Math.log(bytes) / Math.log(1024));
8
+ const value = bytes / Math.pow(1024, i);
9
+ return `${value.toFixed(2)} ${units[i]}`;
10
+ }
11
+
4
12
  interface CacheEntry<T> {
5
13
  value: T;
6
14
  expiresAt?: number;
@@ -158,7 +166,8 @@ export class MemoryCache implements CacheProvider {
158
166
  misses: this.stats.misses,
159
167
  hitRate,
160
168
  size: this.stats.size,
161
- memoryUsage: this.stats.memoryUsage
169
+ memoryUsage: this.stats.memoryUsage,
170
+ memoryUsageHuman: formatBytes(this.stats.memoryUsage)
162
171
  };
163
172
  }
164
173
 
@@ -13,10 +13,22 @@ export interface HealthStatus {
13
13
  connected: boolean;
14
14
  latency: number;
15
15
  memoryUsage?: number;
16
+ memoryUsageHuman?: string;
16
17
  connections?: number;
17
18
  version?: string;
18
19
  }
19
20
 
21
+ /**
22
+ * Format bytes into human-readable string
23
+ */
24
+ function formatBytes(bytes: number): string {
25
+ if (bytes === 0) return '0 B';
26
+ const units = ['B', 'KB', 'MB', 'GB', 'TB'];
27
+ const i = Math.floor(Math.log(bytes) / Math.log(1024));
28
+ const value = bytes / Math.pow(1024, i);
29
+ return `${value.toFixed(2)} ${units[i]}`;
30
+ }
31
+
20
32
  export interface RedisCacheConfig {
21
33
  host: string;
22
34
  port: number;
@@ -27,6 +39,8 @@ export interface RedisCacheConfig {
27
39
  maxRetriesPerRequest?: number;
28
40
  lazyConnect?: boolean;
29
41
  enableReadyCheck?: boolean;
42
+ connectTimeout?: number;
43
+ commandTimeout?: number;
30
44
  }
31
45
 
32
46
  /**
@@ -60,7 +74,8 @@ export class RedisCache implements CacheProvider {
60
74
  maxRetriesPerRequest: config.maxRetriesPerRequest || 3,
61
75
  lazyConnect: config.lazyConnect || false,
62
76
  enableReadyCheck: config.enableReadyCheck || false,
63
- // Connection pooling settings
77
+ connectTimeout: config.connectTimeout ?? 5000,
78
+ commandTimeout: config.commandTimeout ?? 3000,
64
79
  enableOfflineQueue: true,
65
80
  };
66
81
 
@@ -105,7 +120,7 @@ export class RedisCache implements CacheProvider {
105
120
  const memoryMatch = info.match(/used_memory:(\d+)/);
106
121
  if (memoryMatch && memoryMatch[1]) {
107
122
  const memoryUsage = parseInt(memoryMatch[1], 10);
108
- logger.debug({ msg: 'Redis memory usage', memoryUsage });
123
+ logger.debug({ msg: 'Redis memory usage', memoryUsage: formatBytes(memoryUsage), memoryUsageBytes: memoryUsage });
109
124
  }
110
125
  } catch (error) {
111
126
  logger.error({ error, msg: 'Failed to get Redis memory info' });
@@ -194,19 +209,20 @@ export class RedisCache implements CacheProvider {
194
209
  const prefixedKeys = keys.map(k => this.prefixKey(k));
195
210
  const values = await this.client.mget(...prefixedKeys);
196
211
 
197
- return values.map((value, index) => {
212
+ return await Promise.all(values.map(async (value, index) => {
198
213
  if (value === null) {
199
214
  this.stats.misses++;
200
215
  return null;
201
216
  }
202
217
  this.stats.hits++;
203
218
  try {
204
- return JSON.parse(value) as T;
219
+ const parsed = JSON.parse(value);
220
+ return await CompressionUtils.decompress(parsed) as T;
205
221
  } catch (parseError) {
206
222
  logger.error({ error: parseError, key: keys[index], msg: 'Failed to parse cached value' });
207
223
  return null;
208
224
  }
209
- });
225
+ }));
210
226
  } catch (error) {
211
227
  logger.error({ error, msg: 'Redis getMany error' });
212
228
  return new Array(keys.length).fill(null);
@@ -222,7 +238,8 @@ export class RedisCache implements CacheProvider {
222
238
 
223
239
  for (const entry of entries) {
224
240
  const prefixedKey = this.prefixKey(entry.key);
225
- const serializedValue = JSON.stringify(entry.value);
241
+ const compressedValue = await CompressionUtils.compress(entry.value);
242
+ const serializedValue = JSON.stringify(compressedValue);
226
243
 
227
244
  if (entry.ttl) {
228
245
  pipeline.setex(prefixedKey, Math.floor(entry.ttl / 1000), serializedValue);
@@ -306,7 +323,8 @@ export class RedisCache implements CacheProvider {
306
323
  misses: this.stats.misses,
307
324
  hitRate: this.stats.hits / (this.stats.hits + this.stats.misses) || 0,
308
325
  size,
309
- memoryUsage
326
+ memoryUsage,
327
+ memoryUsageHuman: memoryUsage !== undefined ? formatBytes(memoryUsage) : undefined
310
328
  };
311
329
  } catch (error) {
312
330
  logger.error({ error, msg: 'Redis getStats error' });
@@ -351,6 +369,7 @@ export class RedisCache implements CacheProvider {
351
369
  connected: true,
352
370
  latency,
353
371
  memoryUsage,
372
+ memoryUsageHuman: memoryUsage !== undefined ? formatBytes(memoryUsage) : undefined,
354
373
  connections,
355
374
  version
356
375
  };
@@ -52,6 +52,14 @@ const envSchema = z
52
52
  .string()
53
53
  .regex(/^\d+$/, "DB_STATEMENT_TIMEOUT must be numeric")
54
54
  .optional(),
55
+ DB_QUERY_TIMEOUT: z
56
+ .string()
57
+ .regex(/^\d+$/, "DB_QUERY_TIMEOUT must be numeric (milliseconds)")
58
+ .optional(),
59
+ DB_CONNECTION_TIMEOUT: z
60
+ .string()
61
+ .regex(/^\d+$/, "DB_CONNECTION_TIMEOUT must be numeric (seconds)")
62
+ .optional(),
55
63
  })
56
64
  .refine(
57
65
  (env) => {
@@ -108,6 +108,14 @@ export const CreateEntityTable = async () => {
108
108
  updated_at TIMESTAMP DEFAULT NOW(),
109
109
  deleted_at TIMESTAMP
110
110
  );`;
111
+
112
+ // Add partial index for soft-delete queries - critical for 1M+ scale
113
+ // This allows efficient filtering of non-deleted entities
114
+ await db.unsafe(`
115
+ CREATE INDEX IF NOT EXISTS idx_entities_deleted_null
116
+ ON entities (id)
117
+ WHERE deleted_at IS NULL
118
+ `);
111
119
  }
112
120
 
113
121
  export const CreateComponentTable = async () => {
@@ -638,4 +646,108 @@ export const BenchmarkPartitionCounts = async (partitionCounts: number[] = [8, 1
638
646
  return results;
639
647
  }
640
648
 
641
- export const GenerateTableName = (name: string) => `components_${name.toLowerCase().replace(/\s+/g, '_')}`;
649
+ export const GenerateTableName = (name: string) => `components_${name.toLowerCase().replace(/\s+/g, '_')}`;
650
+
651
+ /**
652
+ * Creates a GIN index on a JSONB foreign key field for optimized relation queries.
653
+ * This significantly improves @HasMany and @BelongsTo relation resolution performance.
654
+ *
655
+ * @param tableName The component table name (e.g., 'components_userprofile')
656
+ * @param foreignKeyField The JSONB field name that holds the foreign key (e.g., 'user_id')
657
+ * @returns Promise<boolean> - true if index was created, false if it already exists
658
+ *
659
+ * @example
660
+ * // Create index for user_id foreign key
661
+ * await CreateForeignKeyIndex('components_userprofile', 'user_id');
662
+ */
663
+ export const CreateForeignKeyIndex = async (tableName: string, foreignKeyField: string): Promise<boolean> => {
664
+ tableName = validateIdentifier(tableName);
665
+ foreignKeyField = validateIdentifier(foreignKeyField);
666
+
667
+ const indexName = `idx_${tableName}_fk_${foreignKeyField}`;
668
+
669
+ // Check if index already exists
670
+ const existingIndex = await db.unsafe(`
671
+ SELECT 1 FROM pg_indexes
672
+ WHERE tablename = '${tableName}' AND indexname = '${indexName}'
673
+ `);
674
+
675
+ if (existingIndex.length > 0) {
676
+ logger.trace(`Foreign key index ${indexName} already exists`);
677
+ return false;
678
+ }
679
+
680
+ // Check partition strategy
681
+ const partitionStrategy = await GetPartitionStrategy();
682
+ const useConcurrently = partitionStrategy !== 'hash' && !process.env.USE_PGLITE;
683
+
684
+ try {
685
+ await retryWithBackoff(async () => {
686
+ // Use btree index on the extracted text value for equality lookups (faster than GIN for FK)
687
+ await db.unsafe(`
688
+ CREATE INDEX${useConcurrently ? ' CONCURRENTLY' : ''} IF NOT EXISTS ${indexName}
689
+ ON ${tableName} ((data->>'${foreignKeyField}'))
690
+ WHERE deleted_at IS NULL
691
+ `);
692
+ });
693
+ logger.info(`Created foreign key index ${indexName} on ${tableName}.data->>'${foreignKeyField}'`);
694
+ return true;
695
+ } catch (error: any) {
696
+ if (error.message?.includes('duplicate key value violates unique constraint')) {
697
+ logger.trace(`Foreign key index ${indexName} already exists (concurrent creation)`);
698
+ return false;
699
+ }
700
+ throw error;
701
+ }
702
+ };
703
+
704
+ /**
705
+ * Creates foreign key indexes for all relation fields defined in archetypes.
706
+ * Should be called during database initialization for optimal relation query performance.
707
+ */
708
+ export const CreateRelationIndexes = async (): Promise<void> => {
709
+ const storage = getMetadataStorage();
710
+ const createdIndexes: string[] = [];
711
+
712
+ for (const [archetypeId, relations] of storage.archetypes_relations_map) {
713
+ for (const relation of relations) {
714
+ if (!relation.options?.foreignKey) continue;
715
+
716
+ const foreignKey = relation.options.foreignKey;
717
+ // Skip nested foreign keys (handled differently)
718
+ if (foreignKey.includes('.')) continue;
719
+
720
+ // Find the component that has this foreign key
721
+ const archetypeMetadata = storage.archetypes.find(a =>
722
+ storage.getComponentId(a.name) === archetypeId || a.typeId === archetypeId
723
+ );
724
+
725
+ if (!archetypeMetadata) continue;
726
+
727
+ // Get the component fields for this archetype
728
+ const archetypeFields = storage.archetypes_field_map.get(archetypeId) || [];
729
+
730
+ for (const field of archetypeFields) {
731
+ const componentId = storage.getComponentId(field.component.name);
732
+ const componentProps = storage.getComponentProperties(componentId);
733
+ const hasForeignKey = componentProps.some(prop => prop.propertyKey === foreignKey);
734
+
735
+ if (hasForeignKey) {
736
+ const tableName = GenerateTableName(field.component.name);
737
+ try {
738
+ const created = await CreateForeignKeyIndex(tableName, foreignKey);
739
+ if (created) {
740
+ createdIndexes.push(`${tableName}.${foreignKey}`);
741
+ }
742
+ } catch (error) {
743
+ logger.warn(`Failed to create FK index for ${tableName}.${foreignKey}: ${error}`);
744
+ }
745
+ }
746
+ }
747
+ }
748
+ }
749
+
750
+ if (createdIndexes.length > 0) {
751
+ logger.info(`Created ${createdIndexes.length} relation foreign key indexes`);
752
+ }
753
+ };
package/database/index.ts CHANGED
@@ -1,56 +1,89 @@
1
1
  import {SQL} from "bun";
2
2
  import { logger } from "../core/Logger";
3
3
 
4
- let connectionUrl = `postgres://${process.env.POSTGRES_USER}:${process.env.POSTGRES_PASSWORD}@${process.env.POSTGRES_HOST}:${process.env.POSTGRES_PORT ?? "5432"}/${process.env.POSTGRES_DB}`;
5
- if(process.env.DB_CONNECTION_URL) {
6
- connectionUrl = process.env.DB_CONNECTION_URL;
7
- }
4
+ // Query timeout in milliseconds (default 30s, configurable via env)
5
+ // This is used by Query.exec(), Entity.save(), etc.
6
+ export const QUERY_TIMEOUT_MS = parseInt(process.env.DB_QUERY_TIMEOUT ?? '30000', 10);
7
+
8
+ // Module-level state for the database connection
9
+ let _db: SQL | null = null;
8
10
 
9
- // Add statement_timeout only when explicitly configured (opt-in)
10
- // Note: PgBouncer rejects statement_timeout as a startup parameter — use PostgreSQL config or connect_query instead
11
- if (process.env.USE_PGLITE !== 'true' && process.env.DB_STATEMENT_TIMEOUT) {
12
- try {
13
- const urlObj = new URL(connectionUrl);
14
- urlObj.searchParams.set('options', `-c statement_timeout=${process.env.DB_STATEMENT_TIMEOUT}`);
15
- connectionUrl = urlObj.toString();
16
- } catch {
17
- // Non-standard URL format, skip statement_timeout
11
+ function createDatabase(): SQL {
12
+ let url = `postgres://${process.env.POSTGRES_USER}:${process.env.POSTGRES_PASSWORD}@${process.env.POSTGRES_HOST}:${process.env.POSTGRES_PORT ?? "5432"}/${process.env.POSTGRES_DB}`;
13
+ if(process.env.DB_CONNECTION_URL) {
14
+ url = process.env.DB_CONNECTION_URL;
18
15
  }
19
- }
20
16
 
21
- // Log connection URL with credentials redacted
22
- const redactedUrl = connectionUrl.replace(/:\/\/([^:]+):([^@]+)@/, '://$1:****@');
23
- logger.info(`Database connection URL: ${redactedUrl}`);
24
-
25
- // OPTIMIZED: Reduced from 20 to 10 to prevent overwhelming PGBouncer
26
- // With 5 app instances: 5 × 10 = 50 connections (well under PGBouncer's limit)
27
- const maxConnections = parseInt(process.env.POSTGRES_MAX_CONNECTIONS ?? '10', 10);
28
- logger.info(`Connection pool size: ${maxConnections} connections`);
29
-
30
- const db = new SQL({
31
- url: connectionUrl,
32
- // Connection pool settings - OPTIMIZED for PGBouncer
33
- max: maxConnections,
34
- idleTimeout: 30000, // Close idle connections after 30s
35
- maxLifetime: 600000, // Connection lifetime 10 minutes
36
- connectionTimeout: 30, // Timeout when establishing new connections
37
- onclose: (err) => {
38
- if (err) {
39
- if((err as unknown as { code: string }).code === "ERR_POSTGRES_IDLE_TIMEOUT") {
40
- logger.trace("Closing connection. Idle");
17
+ // Add statement_timeout only when explicitly configured (opt-in)
18
+ // Note: PgBouncer rejects statement_timeout as a startup parameter
19
+ if (process.env.USE_PGLITE !== 'true' && process.env.DB_STATEMENT_TIMEOUT) {
20
+ try {
21
+ const urlObj = new URL(url);
22
+ urlObj.searchParams.set('options', `-c statement_timeout=${process.env.DB_STATEMENT_TIMEOUT}`);
23
+ url = urlObj.toString();
24
+ } catch {
25
+ // Non-standard URL format, skip statement_timeout
26
+ }
27
+ }
28
+
29
+ const redactedUrl = url.replace(/:\/\/([^:]+):([^@]+)@/, '://$1:****@');
30
+ logger.info(`Database connection URL: ${redactedUrl}`);
31
+
32
+ const max = parseInt(process.env.POSTGRES_MAX_CONNECTIONS ?? '10', 10);
33
+ logger.info(`Connection pool size: ${max} connections`);
34
+ logger.info(`Query timeout: ${QUERY_TIMEOUT_MS}ms`);
35
+
36
+ const connTimeout = parseInt(process.env.DB_CONNECTION_TIMEOUT ?? '30', 10);
37
+
38
+ return new SQL({
39
+ url,
40
+ max,
41
+ idleTimeout: 30000,
42
+ maxLifetime: 600000,
43
+ connectionTimeout: connTimeout,
44
+ onclose: (err) => {
45
+ if (err) {
46
+ const errCode = (err as unknown as { code: string }).code;
47
+ if(errCode === "ERR_POSTGRES_IDLE_TIMEOUT") {
48
+ logger.trace("Closing connection. Idle");
49
+ } else if (errCode === "ERR_POSTGRES_CONNECTION_CLOSED") {
50
+ logger.warn("Database connection closed unexpectedly");
51
+ } else {
52
+ logger.error("Database connection closed with error:");
53
+ logger.error(err);
54
+ }
41
55
  } else {
42
- logger.error("Database connection closed with error:");
43
- logger.error(err);
56
+ logger.trace("Database connection closed gracefully.");
44
57
  }
45
- } else {
46
- logger.trace("Database connection closed gracefully.");
47
- }
48
- },
49
- onconnect: () => {
50
- // Log when new connections are created
51
- logger.trace("New database connection established");
58
+ },
59
+ onconnect: () => {
60
+ logger.trace("New database connection established");
61
+ }
62
+ });
63
+ }
64
+
65
+ /**
66
+ * Get the database connection. Lazily initializes on first access.
67
+ * This allows env vars to be set before the first database usage.
68
+ */
69
+ export function getDb(): SQL {
70
+ if (!_db) {
71
+ _db = createDatabase();
52
72
  }
53
- });
73
+ return _db;
74
+ }
75
+
76
+ /**
77
+ * Reinitialize the database connection with current env vars.
78
+ * Used by benchmark tests that set env vars after module load.
79
+ */
80
+ export function resetDatabase(): void {
81
+ _db = createDatabase();
82
+ }
54
83
 
84
+ // For backward compatibility, initialize eagerly on import
85
+ // This ensures existing code using `import db from './database'` continues to work
86
+ // Note: For benchmarks that need delayed initialization, use getDb() or resetDatabase()
87
+ const db = getDb();
55
88
 
56
- export default db;
89
+ export default db;