@spfn/core 0.1.0-alpha.7 → 0.1.0-alpha.72

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/README.md +168 -195
  2. package/dist/auto-loader-JFaZ9gON.d.ts +80 -0
  3. package/dist/cache/index.d.ts +211 -0
  4. package/dist/cache/index.js +992 -0
  5. package/dist/cache/index.js.map +1 -0
  6. package/dist/client/index.d.ts +92 -92
  7. package/dist/client/index.js +80 -85
  8. package/dist/client/index.js.map +1 -1
  9. package/dist/codegen/generators/index.d.ts +19 -0
  10. package/dist/codegen/generators/index.js +1491 -0
  11. package/dist/codegen/generators/index.js.map +1 -0
  12. package/dist/codegen/index.d.ts +76 -60
  13. package/dist/codegen/index.js +1479 -736
  14. package/dist/codegen/index.js.map +1 -1
  15. package/dist/database-errors-BNNmLTJE.d.ts +86 -0
  16. package/dist/db/index.d.ts +844 -44
  17. package/dist/db/index.js +1262 -1309
  18. package/dist/db/index.js.map +1 -1
  19. package/dist/env/index.d.ts +508 -0
  20. package/dist/env/index.js +1106 -0
  21. package/dist/env/index.js.map +1 -0
  22. package/dist/error-handler-wjLL3v-a.d.ts +44 -0
  23. package/dist/errors/index.d.ts +136 -0
  24. package/dist/errors/index.js +172 -0
  25. package/dist/errors/index.js.map +1 -0
  26. package/dist/index-DHiAqhKv.d.ts +101 -0
  27. package/dist/index.d.ts +3 -374
  28. package/dist/index.js +2394 -2176
  29. package/dist/index.js.map +1 -1
  30. package/dist/logger/index.d.ts +94 -0
  31. package/dist/logger/index.js +774 -0
  32. package/dist/logger/index.js.map +1 -0
  33. package/dist/middleware/index.d.ts +33 -0
  34. package/dist/middleware/index.js +890 -0
  35. package/dist/middleware/index.js.map +1 -0
  36. package/dist/route/index.d.ts +21 -53
  37. package/dist/route/index.js +1234 -219
  38. package/dist/route/index.js.map +1 -1
  39. package/dist/server/index.d.ts +18 -0
  40. package/dist/server/index.js +2390 -2058
  41. package/dist/server/index.js.map +1 -1
  42. package/dist/types-Dzggq1Yb.d.ts +170 -0
  43. package/package.json +59 -15
  44. package/dist/auto-loader-C44TcLmM.d.ts +0 -125
  45. package/dist/bind-pssq1NRT.d.ts +0 -34
  46. package/dist/postgres-errors-CY_Es8EJ.d.ts +0 -1703
  47. package/dist/scripts/index.d.ts +0 -24
  48. package/dist/scripts/index.js +0 -1201
  49. package/dist/scripts/index.js.map +0 -1
  50. package/dist/scripts/templates/api-index.template.txt +0 -10
  51. package/dist/scripts/templates/api-tag.template.txt +0 -11
  52. package/dist/scripts/templates/contract.template.txt +0 -87
  53. package/dist/scripts/templates/entity-type.template.txt +0 -31
  54. package/dist/scripts/templates/entity.template.txt +0 -19
  55. package/dist/scripts/templates/index.template.txt +0 -10
  56. package/dist/scripts/templates/repository.template.txt +0 -37
  57. package/dist/scripts/templates/routes-id.template.txt +0 -59
  58. package/dist/scripts/templates/routes-index.template.txt +0 -44
  59. package/dist/types-SlzTr8ZO.d.ts +0 -143
package/dist/db/index.js CHANGED
@@ -1,55 +1,23 @@
1
- import { config } from 'dotenv';
2
1
  import { drizzle } from 'drizzle-orm/postgres-js';
3
- import postgres from 'postgres';
4
2
  import pino from 'pino';
5
- import { existsSync, mkdirSync, createWriteStream } from 'fs';
6
- import { join } from 'path';
3
+ import { existsSync, mkdirSync, accessSync, constants, writeFileSync, unlinkSync, createWriteStream, statSync, readdirSync, renameSync, readFileSync } from 'fs';
4
+ import { join, dirname, basename } from 'path';
5
+ import { config } from 'dotenv';
6
+ import postgres from 'postgres';
7
+ import { bigserial, timestamp, pgSchema } from 'drizzle-orm/pg-core';
7
8
  import { AsyncLocalStorage } from 'async_hooks';
9
+ import { randomUUID } from 'crypto';
8
10
  import { createMiddleware } from 'hono/factory';
9
- import { and, desc, asc, sql, isNull, isNotNull, notInArray, inArray, like, lte, lt, gte, gt, ne, eq } from 'drizzle-orm';
10
- import { bigserial, timestamp } from 'drizzle-orm/pg-core';
11
+ import { eq, and } from 'drizzle-orm';
11
12
 
12
13
  // src/db/manager/factory.ts
13
14
  var PinoAdapter = class _PinoAdapter {
14
15
  logger;
15
- constructor(config2) {
16
- const isProduction = process.env.NODE_ENV === "production";
17
- const isDevelopment = process.env.NODE_ENV === "development";
18
- const fileLoggingEnabled = process.env.LOGGER_FILE_ENABLED === "true";
19
- const targets = [];
20
- if (!isProduction && isDevelopment) {
21
- targets.push({
22
- target: "pino-pretty",
23
- level: "debug",
24
- options: {
25
- colorize: true,
26
- translateTime: "SYS:yyyy-mm-dd HH:MM:ss.l",
27
- ignore: "pid,hostname"
28
- }
29
- });
30
- }
31
- if (fileLoggingEnabled && isProduction) {
32
- const logDir = process.env.LOG_DIR || "./logs";
33
- const maxFileSize = process.env.LOG_MAX_FILE_SIZE || "10M";
34
- const maxFiles = parseInt(process.env.LOG_MAX_FILES || "10", 10);
35
- targets.push({
36
- target: "pino-roll",
37
- level: "info",
38
- options: {
39
- file: `${logDir}/app.log`,
40
- frequency: "daily",
41
- size: maxFileSize,
42
- limit: { count: maxFiles },
43
- mkdir: true
44
- }
45
- });
46
- }
16
+ constructor(config) {
47
17
  this.logger = pino({
48
- level: config2.level,
49
- // Transport 설정 (targets가 있으면 사용, 없으면 기본 stdout)
50
- transport: targets.length > 0 ? { targets } : void 0,
18
+ level: config.level,
51
19
  // 기본 필드
52
- base: config2.module ? { module: config2.module } : void 0
20
+ base: config.module ? { module: config.module } : void 0
53
21
  });
54
22
  }
55
23
  child(module) {
@@ -88,13 +56,183 @@ var PinoAdapter = class _PinoAdapter {
88
56
  }
89
57
  };
90
58
 
59
+ // src/logger/types.ts
60
+ var LOG_LEVEL_PRIORITY = {
61
+ debug: 0,
62
+ info: 1,
63
+ warn: 2,
64
+ error: 3,
65
+ fatal: 4
66
+ };
67
+
68
+ // src/logger/formatters.ts
69
+ var SENSITIVE_KEYS = [
70
+ "password",
71
+ "passwd",
72
+ "pwd",
73
+ "secret",
74
+ "token",
75
+ "apikey",
76
+ "api_key",
77
+ "accesstoken",
78
+ "access_token",
79
+ "refreshtoken",
80
+ "refresh_token",
81
+ "authorization",
82
+ "auth",
83
+ "cookie",
84
+ "session",
85
+ "sessionid",
86
+ "session_id",
87
+ "privatekey",
88
+ "private_key",
89
+ "creditcard",
90
+ "credit_card",
91
+ "cardnumber",
92
+ "card_number",
93
+ "cvv",
94
+ "ssn",
95
+ "pin"
96
+ ];
97
+ var MASKED_VALUE = "***MASKED***";
98
+ function isSensitiveKey(key) {
99
+ const lowerKey = key.toLowerCase();
100
+ return SENSITIVE_KEYS.some((sensitive) => lowerKey.includes(sensitive));
101
+ }
102
+ function maskSensitiveData(data) {
103
+ if (data === null || data === void 0) {
104
+ return data;
105
+ }
106
+ if (Array.isArray(data)) {
107
+ return data.map((item) => maskSensitiveData(item));
108
+ }
109
+ if (typeof data === "object") {
110
+ const masked = {};
111
+ for (const [key, value] of Object.entries(data)) {
112
+ if (isSensitiveKey(key)) {
113
+ masked[key] = MASKED_VALUE;
114
+ } else if (typeof value === "object" && value !== null) {
115
+ masked[key] = maskSensitiveData(value);
116
+ } else {
117
+ masked[key] = value;
118
+ }
119
+ }
120
+ return masked;
121
+ }
122
+ return data;
123
+ }
124
+ var COLORS = {
125
+ reset: "\x1B[0m",
126
+ bright: "\x1B[1m",
127
+ dim: "\x1B[2m",
128
+ // 로그 레벨 컬러
129
+ debug: "\x1B[36m",
130
+ // cyan
131
+ info: "\x1B[32m",
132
+ // green
133
+ warn: "\x1B[33m",
134
+ // yellow
135
+ error: "\x1B[31m",
136
+ // red
137
+ fatal: "\x1B[35m",
138
+ // magenta
139
+ // 추가 컬러
140
+ gray: "\x1B[90m"
141
+ };
142
+ function formatTimestamp(date) {
143
+ return date.toISOString();
144
+ }
145
+ function formatTimestampHuman(date) {
146
+ const year = date.getFullYear();
147
+ const month = String(date.getMonth() + 1).padStart(2, "0");
148
+ const day = String(date.getDate()).padStart(2, "0");
149
+ const hours = String(date.getHours()).padStart(2, "0");
150
+ const minutes = String(date.getMinutes()).padStart(2, "0");
151
+ const seconds = String(date.getSeconds()).padStart(2, "0");
152
+ const ms = String(date.getMilliseconds()).padStart(3, "0");
153
+ return `${year}-${month}-${day} ${hours}:${minutes}:${seconds}.${ms}`;
154
+ }
155
+ function formatError(error) {
156
+ const lines = [];
157
+ lines.push(`${error.name}: ${error.message}`);
158
+ if (error.stack) {
159
+ const stackLines = error.stack.split("\n").slice(1);
160
+ lines.push(...stackLines);
161
+ }
162
+ return lines.join("\n");
163
+ }
164
+ function formatConsole(metadata, colorize = true) {
165
+ const parts = [];
166
+ const timestamp2 = formatTimestampHuman(metadata.timestamp);
167
+ if (colorize) {
168
+ parts.push(`${COLORS.gray}[${timestamp2}]${COLORS.reset}`);
169
+ } else {
170
+ parts.push(`[${timestamp2}]`);
171
+ }
172
+ if (metadata.module) {
173
+ if (colorize) {
174
+ parts.push(`${COLORS.dim}[module=${metadata.module}]${COLORS.reset}`);
175
+ } else {
176
+ parts.push(`[module=${metadata.module}]`);
177
+ }
178
+ }
179
+ if (metadata.context && Object.keys(metadata.context).length > 0) {
180
+ Object.entries(metadata.context).forEach(([key, value]) => {
181
+ const valueStr = typeof value === "string" ? value : String(value);
182
+ if (colorize) {
183
+ parts.push(`${COLORS.dim}[${key}=${valueStr}]${COLORS.reset}`);
184
+ } else {
185
+ parts.push(`[${key}=${valueStr}]`);
186
+ }
187
+ });
188
+ }
189
+ const levelStr = metadata.level.toUpperCase();
190
+ if (colorize) {
191
+ const color = COLORS[metadata.level];
192
+ parts.push(`${color}(${levelStr})${COLORS.reset}:`);
193
+ } else {
194
+ parts.push(`(${levelStr}):`);
195
+ }
196
+ if (colorize) {
197
+ parts.push(`${COLORS.bright}${metadata.message}${COLORS.reset}`);
198
+ } else {
199
+ parts.push(metadata.message);
200
+ }
201
+ let output = parts.join(" ");
202
+ if (metadata.error) {
203
+ output += "\n" + formatError(metadata.error);
204
+ }
205
+ return output;
206
+ }
207
+ function formatJSON(metadata) {
208
+ const obj = {
209
+ timestamp: formatTimestamp(metadata.timestamp),
210
+ level: metadata.level,
211
+ message: metadata.message
212
+ };
213
+ if (metadata.module) {
214
+ obj.module = metadata.module;
215
+ }
216
+ if (metadata.context) {
217
+ obj.context = metadata.context;
218
+ }
219
+ if (metadata.error) {
220
+ obj.error = {
221
+ name: metadata.error.name,
222
+ message: metadata.error.message,
223
+ stack: metadata.error.stack
224
+ };
225
+ }
226
+ return JSON.stringify(obj);
227
+ }
228
+
91
229
  // src/logger/logger.ts
92
230
  var Logger = class _Logger {
93
231
  config;
94
232
  module;
95
- constructor(config2) {
96
- this.config = config2;
97
- this.module = config2.module;
233
+ constructor(config) {
234
+ this.config = config;
235
+ this.module = config.module;
98
236
  }
99
237
  /**
100
238
  * Get current log level
@@ -148,13 +286,17 @@ var Logger = class _Logger {
148
286
  * Log processing (internal)
149
287
  */
150
288
  log(level, message, error, context) {
289
+ if (LOG_LEVEL_PRIORITY[level] < LOG_LEVEL_PRIORITY[this.config.level]) {
290
+ return;
291
+ }
151
292
  const metadata = {
152
293
  timestamp: /* @__PURE__ */ new Date(),
153
294
  level,
154
295
  message,
155
296
  module: this.module,
156
297
  error,
157
- context
298
+ // Mask sensitive information in context to prevent credential leaks
299
+ context: context ? maskSensitiveData(context) : void 0
158
300
  };
159
301
  this.processTransports(metadata);
160
302
  }
@@ -190,130 +332,16 @@ var Logger = class _Logger {
190
332
  }
191
333
  };
192
334
 
193
- // src/logger/types.ts
194
- var LOG_LEVEL_PRIORITY = {
195
- debug: 0,
196
- info: 1,
197
- warn: 2,
198
- error: 3,
199
- fatal: 4
200
- };
201
-
202
- // src/logger/formatters.ts
203
- var COLORS = {
204
- reset: "\x1B[0m",
205
- bright: "\x1B[1m",
206
- dim: "\x1B[2m",
207
- // 로그 레벨 컬러
208
- debug: "\x1B[36m",
209
- // cyan
210
- info: "\x1B[32m",
211
- // green
212
- warn: "\x1B[33m",
213
- // yellow
214
- error: "\x1B[31m",
215
- // red
216
- fatal: "\x1B[35m",
217
- // magenta
218
- // 추가 컬러
219
- gray: "\x1B[90m"
220
- };
221
- function colorizeLevel(level) {
222
- const color = COLORS[level];
223
- const levelStr = level.toUpperCase().padEnd(5);
224
- return `${color}${levelStr}${COLORS.reset}`;
225
- }
226
- function formatTimestamp(date) {
227
- return date.toISOString();
228
- }
229
- function formatTimestampHuman(date) {
230
- const year = date.getFullYear();
231
- const month = String(date.getMonth() + 1).padStart(2, "0");
232
- const day = String(date.getDate()).padStart(2, "0");
233
- const hours = String(date.getHours()).padStart(2, "0");
234
- const minutes = String(date.getMinutes()).padStart(2, "0");
235
- const seconds = String(date.getSeconds()).padStart(2, "0");
236
- const ms = String(date.getMilliseconds()).padStart(3, "0");
237
- return `${year}-${month}-${day} ${hours}:${minutes}:${seconds}.${ms}`;
238
- }
239
- function formatError(error) {
240
- const lines = [];
241
- lines.push(`${error.name}: ${error.message}`);
242
- if (error.stack) {
243
- const stackLines = error.stack.split("\n").slice(1);
244
- lines.push(...stackLines);
245
- }
246
- return lines.join("\n");
247
- }
248
- function formatContext(context) {
249
- try {
250
- return JSON.stringify(context, null, 2);
251
- } catch (error) {
252
- return "[Context serialization failed]";
253
- }
254
- }
255
- function formatConsole(metadata, colorize = true) {
256
- const parts = [];
257
- const timestamp2 = formatTimestampHuman(metadata.timestamp);
258
- if (colorize) {
259
- parts.push(`${COLORS.gray}${timestamp2}${COLORS.reset}`);
260
- } else {
261
- parts.push(timestamp2);
262
- }
263
- if (colorize) {
264
- parts.push(colorizeLevel(metadata.level));
265
- } else {
266
- parts.push(metadata.level.toUpperCase().padEnd(5));
267
- }
268
- if (metadata.module) {
269
- if (colorize) {
270
- parts.push(`${COLORS.dim}[${metadata.module}]${COLORS.reset}`);
271
- } else {
272
- parts.push(`[${metadata.module}]`);
273
- }
274
- }
275
- parts.push(metadata.message);
276
- let output = parts.join(" ");
277
- if (metadata.context && Object.keys(metadata.context).length > 0) {
278
- output += "\n" + formatContext(metadata.context);
279
- }
280
- if (metadata.error) {
281
- output += "\n" + formatError(metadata.error);
282
- }
283
- return output;
284
- }
285
- function formatJSON(metadata) {
286
- const obj = {
287
- timestamp: formatTimestamp(metadata.timestamp),
288
- level: metadata.level,
289
- message: metadata.message
290
- };
291
- if (metadata.module) {
292
- obj.module = metadata.module;
293
- }
294
- if (metadata.context) {
295
- obj.context = metadata.context;
296
- }
297
- if (metadata.error) {
298
- obj.error = {
299
- name: metadata.error.name,
300
- message: metadata.error.message,
301
- stack: metadata.error.stack
302
- };
303
- }
304
- return JSON.stringify(obj);
305
- }
306
-
307
335
  // src/logger/transports/console.ts
308
336
  var ConsoleTransport = class {
309
337
  name = "console";
310
338
  level;
311
339
  enabled;
312
340
  colorize;
313
- constructor(config2) {
314
- this.level = config2.level;
315
- this.enabled = config2.enabled;
316
- this.colorize = config2.colorize ?? true;
341
+ constructor(config) {
342
+ this.level = config.level;
343
+ this.enabled = config.enabled;
344
+ this.colorize = config.colorize ?? true;
317
345
  }
318
346
  async log(metadata) {
319
347
  if (!this.enabled) {
@@ -335,12 +363,16 @@ var FileTransport = class {
335
363
  level;
336
364
  enabled;
337
365
  logDir;
366
+ maxFileSize;
367
+ maxFiles;
338
368
  currentStream = null;
339
369
  currentFilename = null;
340
- constructor(config2) {
341
- this.level = config2.level;
342
- this.enabled = config2.enabled;
343
- this.logDir = config2.logDir;
370
+ constructor(config) {
371
+ this.level = config.level;
372
+ this.enabled = config.enabled;
373
+ this.logDir = config.logDir;
374
+ this.maxFileSize = config.maxFileSize ?? 10 * 1024 * 1024;
375
+ this.maxFiles = config.maxFiles ?? 10;
344
376
  if (!existsSync(this.logDir)) {
345
377
  mkdirSync(this.logDir, { recursive: true });
346
378
  }
@@ -356,6 +388,9 @@ var FileTransport = class {
356
388
  const filename = this.getLogFilename(metadata.timestamp);
357
389
  if (this.currentFilename !== filename) {
358
390
  await this.rotateStream(filename);
391
+ await this.cleanOldFiles();
392
+ } else if (this.currentFilename) {
393
+ await this.checkAndRotateBySize();
359
394
  }
360
395
  if (this.currentStream) {
361
396
  return new Promise((resolve, reject) => {
@@ -412,26 +447,124 @@ var FileTransport = class {
412
447
  });
413
448
  }
414
449
  /**
415
- * 날짜별 로그 파일명 생성
450
+ * 파일 크기 체크 및 크기 기반 로테이션
416
451
  */
417
- getLogFilename(date) {
418
- const year = date.getFullYear();
419
- const month = String(date.getMonth() + 1).padStart(2, "0");
420
- const day = String(date.getDate()).padStart(2, "0");
421
- return `${year}-${month}-${day}.log`;
452
+ async checkAndRotateBySize() {
453
+ if (!this.currentFilename) {
454
+ return;
455
+ }
456
+ const filepath = join(this.logDir, this.currentFilename);
457
+ if (!existsSync(filepath)) {
458
+ return;
459
+ }
460
+ try {
461
+ const stats = statSync(filepath);
462
+ if (stats.size >= this.maxFileSize) {
463
+ await this.rotateBySize();
464
+ }
465
+ } catch (error) {
466
+ const errorMessage = error instanceof Error ? error.message : String(error);
467
+ process.stderr.write(`[FileTransport] Failed to check file size: ${errorMessage}
468
+ `);
469
+ }
422
470
  }
423
- async close() {
471
+ /**
472
+ * 크기 기반 로테이션 수행
473
+ * 예: 2025-01-01.log -> 2025-01-01.1.log, 2025-01-01.1.log -> 2025-01-01.2.log
474
+ */
475
+ async rotateBySize() {
476
+ if (!this.currentFilename) {
477
+ return;
478
+ }
424
479
  await this.closeStream();
480
+ const baseName = this.currentFilename.replace(/\.log$/, "");
481
+ const files = readdirSync(this.logDir);
482
+ const relatedFiles = files.filter((file) => file.startsWith(baseName) && file.endsWith(".log")).sort().reverse();
483
+ for (const file of relatedFiles) {
484
+ const match = file.match(/\.(\d+)\.log$/);
485
+ if (match) {
486
+ const oldNum = parseInt(match[1], 10);
487
+ const newNum = oldNum + 1;
488
+ const oldPath = join(this.logDir, file);
489
+ const newPath2 = join(this.logDir, `${baseName}.${newNum}.log`);
490
+ try {
491
+ renameSync(oldPath, newPath2);
492
+ } catch (error) {
493
+ const errorMessage = error instanceof Error ? error.message : String(error);
494
+ process.stderr.write(`[FileTransport] Failed to rotate file: ${errorMessage}
495
+ `);
496
+ }
497
+ }
498
+ }
499
+ const currentPath = join(this.logDir, this.currentFilename);
500
+ const newPath = join(this.logDir, `${baseName}.1.log`);
501
+ try {
502
+ if (existsSync(currentPath)) {
503
+ renameSync(currentPath, newPath);
504
+ }
505
+ } catch (error) {
506
+ const errorMessage = error instanceof Error ? error.message : String(error);
507
+ process.stderr.write(`[FileTransport] Failed to rotate current file: ${errorMessage}
508
+ `);
509
+ }
510
+ await this.rotateStream(this.currentFilename);
511
+ }
512
+ /**
513
+ * 오래된 로그 파일 정리
514
+ * maxFiles 개수를 초과하는 로그 파일 삭제
515
+ */
516
+ async cleanOldFiles() {
517
+ try {
518
+ if (!existsSync(this.logDir)) {
519
+ return;
520
+ }
521
+ const files = readdirSync(this.logDir);
522
+ const logFiles = files.filter((file) => file.endsWith(".log")).map((file) => {
523
+ const filepath = join(this.logDir, file);
524
+ const stats = statSync(filepath);
525
+ return { file, mtime: stats.mtime };
526
+ }).sort((a, b) => b.mtime.getTime() - a.mtime.getTime());
527
+ if (logFiles.length > this.maxFiles) {
528
+ const filesToDelete = logFiles.slice(this.maxFiles);
529
+ for (const { file } of filesToDelete) {
530
+ const filepath = join(this.logDir, file);
531
+ try {
532
+ unlinkSync(filepath);
533
+ } catch (error) {
534
+ const errorMessage = error instanceof Error ? error.message : String(error);
535
+ process.stderr.write(`[FileTransport] Failed to delete old file "${file}": ${errorMessage}
536
+ `);
537
+ }
538
+ }
539
+ }
540
+ } catch (error) {
541
+ const errorMessage = error instanceof Error ? error.message : String(error);
542
+ process.stderr.write(`[FileTransport] Failed to clean old files: ${errorMessage}
543
+ `);
544
+ }
545
+ }
546
+ /**
547
+ * 날짜별 로그 파일명 생성
548
+ */
549
+ getLogFilename(date) {
550
+ const year = date.getFullYear();
551
+ const month = String(date.getMonth() + 1).padStart(2, "0");
552
+ const day = String(date.getDate()).padStart(2, "0");
553
+ return `${year}-${month}-${day}.log`;
554
+ }
555
+ async close() {
556
+ await this.closeStream();
557
+ }
558
+ };
559
+ function isFileLoggingEnabled() {
560
+ return process.env.LOGGER_FILE_ENABLED === "true";
561
+ }
562
+ function getDefaultLogLevel() {
563
+ const isProduction = process.env.NODE_ENV === "production";
564
+ const isDevelopment = process.env.NODE_ENV === "development";
565
+ if (isDevelopment) {
566
+ return "debug";
425
567
  }
426
- };
427
-
428
- // src/logger/config.ts
429
- function getDefaultLogLevel() {
430
- const isProduction = process.env.NODE_ENV === "production";
431
- const isDevelopment = process.env.NODE_ENV === "development";
432
- if (isDevelopment) {
433
- return "debug";
434
- }
435
568
  if (isProduction) {
436
569
  return "info";
437
570
  }
@@ -458,6 +591,109 @@ function getFileConfig() {
458
591
  maxFiles: 10
459
592
  };
460
593
  }
594
+ function validateDirectoryWritable(dirPath) {
595
+ if (!existsSync(dirPath)) {
596
+ try {
597
+ mkdirSync(dirPath, { recursive: true });
598
+ } catch (error) {
599
+ const errorMessage = error instanceof Error ? error.message : String(error);
600
+ throw new Error(`Failed to create log directory "${dirPath}": ${errorMessage}`);
601
+ }
602
+ }
603
+ try {
604
+ accessSync(dirPath, constants.W_OK);
605
+ } catch {
606
+ throw new Error(`Log directory "${dirPath}" is not writable. Please check permissions.`);
607
+ }
608
+ const testFile = join(dirPath, ".logger-write-test");
609
+ try {
610
+ writeFileSync(testFile, "test", "utf-8");
611
+ unlinkSync(testFile);
612
+ } catch (error) {
613
+ const errorMessage = error instanceof Error ? error.message : String(error);
614
+ throw new Error(`Cannot write to log directory "${dirPath}": ${errorMessage}`);
615
+ }
616
+ }
617
+ function validateFileConfig() {
618
+ if (!isFileLoggingEnabled()) {
619
+ return;
620
+ }
621
+ const logDir = process.env.LOG_DIR;
622
+ if (!logDir) {
623
+ throw new Error(
624
+ "LOG_DIR environment variable is required when LOGGER_FILE_ENABLED=true. Example: LOG_DIR=/var/log/myapp"
625
+ );
626
+ }
627
+ validateDirectoryWritable(logDir);
628
+ }
629
+ function validateSlackConfig() {
630
+ const webhookUrl = process.env.SLACK_WEBHOOK_URL;
631
+ if (!webhookUrl) {
632
+ return;
633
+ }
634
+ if (!webhookUrl.startsWith("https://hooks.slack.com/")) {
635
+ throw new Error(
636
+ `Invalid SLACK_WEBHOOK_URL: "${webhookUrl}". Slack webhook URLs must start with "https://hooks.slack.com/"`
637
+ );
638
+ }
639
+ }
640
+ function validateEmailConfig() {
641
+ const smtpHost = process.env.SMTP_HOST;
642
+ const smtpPort = process.env.SMTP_PORT;
643
+ const emailFrom = process.env.EMAIL_FROM;
644
+ const emailTo = process.env.EMAIL_TO;
645
+ const hasAnyEmailConfig = smtpHost || smtpPort || emailFrom || emailTo;
646
+ if (!hasAnyEmailConfig) {
647
+ return;
648
+ }
649
+ const missingFields = [];
650
+ if (!smtpHost) missingFields.push("SMTP_HOST");
651
+ if (!smtpPort) missingFields.push("SMTP_PORT");
652
+ if (!emailFrom) missingFields.push("EMAIL_FROM");
653
+ if (!emailTo) missingFields.push("EMAIL_TO");
654
+ if (missingFields.length > 0) {
655
+ throw new Error(
656
+ `Email transport configuration incomplete. Missing: ${missingFields.join(", ")}. Either set all required fields or remove all email configuration.`
657
+ );
658
+ }
659
+ const port = parseInt(smtpPort, 10);
660
+ if (isNaN(port) || port < 1 || port > 65535) {
661
+ throw new Error(
662
+ `Invalid SMTP_PORT: "${smtpPort}". Must be a number between 1 and 65535.`
663
+ );
664
+ }
665
+ const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
666
+ if (!emailRegex.test(emailFrom)) {
667
+ throw new Error(`Invalid EMAIL_FROM format: "${emailFrom}"`);
668
+ }
669
+ const recipients = emailTo.split(",").map((e) => e.trim());
670
+ for (const email of recipients) {
671
+ if (!emailRegex.test(email)) {
672
+ throw new Error(`Invalid email address in EMAIL_TO: "${email}"`);
673
+ }
674
+ }
675
+ }
676
+ function validateEnvironment() {
677
+ const nodeEnv = process.env.NODE_ENV;
678
+ if (!nodeEnv) {
679
+ process.stderr.write(
680
+ "[Logger] Warning: NODE_ENV is not set. Defaulting to test environment.\n"
681
+ );
682
+ }
683
+ }
684
+ function validateConfig() {
685
+ try {
686
+ validateEnvironment();
687
+ validateFileConfig();
688
+ validateSlackConfig();
689
+ validateEmailConfig();
690
+ } catch (error) {
691
+ if (error instanceof Error) {
692
+ throw new Error(`[Logger] Configuration validation failed: ${error.message}`);
693
+ }
694
+ throw error;
695
+ }
696
+ }
461
697
 
462
698
  // src/logger/adapters/custom.ts
463
699
  function initializeTransports() {
@@ -472,10 +708,10 @@ function initializeTransports() {
472
708
  }
473
709
  var CustomAdapter = class _CustomAdapter {
474
710
  logger;
475
- constructor(config2) {
711
+ constructor(config) {
476
712
  this.logger = new Logger({
477
- level: config2.level,
478
- module: config2.module,
713
+ level: config.level,
714
+ module: config.module,
479
715
  transports: initializeTransports()
480
716
  });
481
717
  }
@@ -535,7 +771,205 @@ function getAdapterType() {
535
771
  }
536
772
  return "pino";
537
773
  }
538
- var logger = createAdapter(getAdapterType());
774
+ function initializeLogger() {
775
+ validateConfig();
776
+ return createAdapter(getAdapterType());
777
+ }
778
+ var logger = initializeLogger();
779
+
780
+ // src/env/config.ts
781
+ var ENV_FILE_PRIORITY = [
782
+ ".env",
783
+ // Base configuration (lowest priority)
784
+ ".env.{NODE_ENV}",
785
+ // Environment-specific
786
+ ".env.local",
787
+ // Local overrides (excluded in test)
788
+ ".env.{NODE_ENV}.local"
789
+ // Local environment-specific (highest priority)
790
+ ];
791
+ var TEST_ONLY_FILES = [
792
+ ".env.test",
793
+ ".env.test.local"
794
+ ];
795
+
796
+ // src/env/loader.ts
797
+ var envLogger = logger.child("environment");
798
+ var environmentLoaded = false;
799
+ var cachedLoadResult;
800
+ function buildFileList(basePath, nodeEnv) {
801
+ const files = [];
802
+ if (!nodeEnv) {
803
+ files.push(join(basePath, ".env"));
804
+ files.push(join(basePath, ".env.local"));
805
+ return files;
806
+ }
807
+ for (const pattern of ENV_FILE_PRIORITY) {
808
+ const fileName = pattern.replace("{NODE_ENV}", nodeEnv);
809
+ if (nodeEnv === "test" && fileName === ".env.local") {
810
+ continue;
811
+ }
812
+ if (nodeEnv === "local" && pattern === ".env.local") {
813
+ continue;
814
+ }
815
+ if (nodeEnv !== "test" && TEST_ONLY_FILES.includes(fileName)) {
816
+ continue;
817
+ }
818
+ files.push(join(basePath, fileName));
819
+ }
820
+ return files;
821
+ }
822
+ function loadSingleFile(filePath, debug) {
823
+ if (!existsSync(filePath)) {
824
+ if (debug) {
825
+ envLogger.debug("Environment file not found (optional)", {
826
+ path: filePath
827
+ });
828
+ }
829
+ return { success: false, parsed: {}, error: "File not found" };
830
+ }
831
+ try {
832
+ const result = config({ path: filePath });
833
+ if (result.error) {
834
+ envLogger.warn("Failed to parse environment file", {
835
+ path: filePath,
836
+ error: result.error.message
837
+ });
838
+ return {
839
+ success: false,
840
+ parsed: {},
841
+ error: result.error.message
842
+ };
843
+ }
844
+ const parsed = result.parsed || {};
845
+ if (debug) {
846
+ envLogger.debug("Environment file loaded successfully", {
847
+ path: filePath,
848
+ variables: Object.keys(parsed),
849
+ count: Object.keys(parsed).length
850
+ });
851
+ }
852
+ return { success: true, parsed };
853
+ } catch (error) {
854
+ const message = error instanceof Error ? error.message : "Unknown error";
855
+ envLogger.error("Error loading environment file", {
856
+ path: filePath,
857
+ error: message
858
+ });
859
+ return { success: false, parsed: {}, error: message };
860
+ }
861
+ }
862
+ function validateRequiredVars(required, debug) {
863
+ const missing = [];
864
+ for (const varName of required) {
865
+ if (!process.env[varName]) {
866
+ missing.push(varName);
867
+ }
868
+ }
869
+ if (missing.length > 0) {
870
+ const error = `Required environment variables missing: ${missing.join(", ")}`;
871
+ envLogger.error("Environment validation failed", {
872
+ missing,
873
+ required
874
+ });
875
+ throw new Error(error);
876
+ }
877
+ if (debug) {
878
+ envLogger.debug("Required environment variables validated", {
879
+ required,
880
+ allPresent: true
881
+ });
882
+ }
883
+ }
884
+ function loadEnvironment(options = {}) {
885
+ const {
886
+ basePath = process.cwd(),
887
+ customPaths = [],
888
+ debug = false,
889
+ nodeEnv = process.env.NODE_ENV || "",
890
+ required = [],
891
+ useCache = true
892
+ } = options;
893
+ if (useCache && environmentLoaded && cachedLoadResult) {
894
+ if (debug) {
895
+ envLogger.debug("Returning cached environment", {
896
+ loaded: cachedLoadResult.loaded.length,
897
+ variables: Object.keys(cachedLoadResult.parsed).length
898
+ });
899
+ }
900
+ return cachedLoadResult;
901
+ }
902
+ if (debug) {
903
+ envLogger.debug("Loading environment variables", {
904
+ basePath,
905
+ nodeEnv,
906
+ customPaths,
907
+ required
908
+ });
909
+ }
910
+ const result = {
911
+ success: true,
912
+ loaded: [],
913
+ failed: [],
914
+ parsed: {},
915
+ warnings: []
916
+ };
917
+ const standardFiles = buildFileList(basePath, nodeEnv);
918
+ const allFiles = [...standardFiles, ...customPaths];
919
+ if (debug) {
920
+ envLogger.debug("Environment files to load", {
921
+ standardFiles,
922
+ customPaths,
923
+ total: allFiles.length
924
+ });
925
+ }
926
+ const reversedFiles = [...allFiles].reverse();
927
+ for (const filePath of reversedFiles) {
928
+ const fileResult = loadSingleFile(filePath, debug);
929
+ if (fileResult.success) {
930
+ result.loaded.push(filePath);
931
+ Object.assign(result.parsed, fileResult.parsed);
932
+ if (fileResult.parsed["NODE_ENV"]) {
933
+ const fileName = filePath.split("/").pop() || filePath;
934
+ result.warnings.push(
935
+ `NODE_ENV found in ${fileName}. It's recommended to set NODE_ENV via CLI (e.g., 'spfn dev', 'spfn build') instead of .env files for consistent environment behavior.`
936
+ );
937
+ }
938
+ } else if (fileResult.error) {
939
+ result.failed.push({
940
+ path: filePath,
941
+ reason: fileResult.error
942
+ });
943
+ }
944
+ }
945
+ if (debug || result.loaded.length > 0) {
946
+ envLogger.info("Environment loading complete", {
947
+ loaded: result.loaded.length,
948
+ failed: result.failed.length,
949
+ variables: Object.keys(result.parsed).length,
950
+ files: result.loaded
951
+ });
952
+ }
953
+ if (required.length > 0) {
954
+ try {
955
+ validateRequiredVars(required, debug);
956
+ } catch (error) {
957
+ result.success = false;
958
+ result.errors = [
959
+ error instanceof Error ? error.message : "Validation failed"
960
+ ];
961
+ throw error;
962
+ }
963
+ }
964
+ if (result.warnings.length > 0) {
965
+ for (const warning of result.warnings) {
966
+ envLogger.warn(warning);
967
+ }
968
+ }
969
+ environmentLoaded = true;
970
+ cachedLoadResult = result;
971
+ return result;
972
+ }
539
973
 
540
974
  // src/errors/database-errors.ts
541
975
  var DatabaseError = class extends Error {
@@ -575,10 +1009,10 @@ var QueryError = class extends DatabaseError {
575
1009
  this.name = "QueryError";
576
1010
  }
577
1011
  };
578
- var ValidationError = class extends QueryError {
1012
+ var ConstraintViolationError = class extends QueryError {
579
1013
  constructor(message, details) {
580
1014
  super(message, 400, details);
581
- this.name = "ValidationError";
1015
+ this.name = "ConstraintViolationError";
582
1016
  }
583
1017
  };
584
1018
  var TransactionError = class extends DatabaseError {
@@ -643,11 +1077,11 @@ function fromPostgresError(error) {
643
1077
  case "23000":
644
1078
  // integrity_constraint_violation
645
1079
  case "23001":
646
- return new ValidationError(message, { code, constraint: "integrity" });
1080
+ return new ConstraintViolationError(message, { code, constraint: "integrity" });
647
1081
  case "23502":
648
- return new ValidationError(message, { code, constraint: "not_null" });
1082
+ return new ConstraintViolationError(message, { code, constraint: "not_null" });
649
1083
  case "23503":
650
- return new ValidationError(message, { code, constraint: "foreign_key" });
1084
+ return new ConstraintViolationError(message, { code, constraint: "foreign_key" });
651
1085
  case "23505":
652
1086
  const parsed = parseUniqueViolation(message);
653
1087
  if (parsed) {
@@ -655,7 +1089,7 @@ function fromPostgresError(error) {
655
1089
  }
656
1090
  return new DuplicateEntryError("field", "value");
657
1091
  case "23514":
658
- return new ValidationError(message, { code, constraint: "check" });
1092
+ return new ConstraintViolationError(message, { code, constraint: "check" });
659
1093
  // Class 40 — Transaction Rollback
660
1094
  case "40000":
661
1095
  // transaction_rollback
@@ -770,23 +1204,45 @@ async function checkConnection(client) {
770
1204
  }
771
1205
 
772
1206
  // src/db/manager/config.ts
773
- function getPoolConfig(options) {
1207
+ function parseEnvNumber(key, prodDefault, devDefault) {
774
1208
  const isProduction = process.env.NODE_ENV === "production";
775
- const max = options?.max ?? (parseInt(process.env.DB_POOL_MAX || "", 10) || (isProduction ? 20 : 10));
776
- const idleTimeout = options?.idleTimeout ?? (parseInt(process.env.DB_POOL_IDLE_TIMEOUT || "", 10) || (isProduction ? 30 : 20));
777
- return { max, idleTimeout };
1209
+ const envValue = parseInt(process.env[key] || "", 10);
1210
+ return isNaN(envValue) ? isProduction ? prodDefault : devDefault : envValue;
1211
+ }
1212
+ function parseEnvBoolean(key, defaultValue) {
1213
+ const value = process.env[key];
1214
+ if (value === void 0) return defaultValue;
1215
+ return value.toLowerCase() === "true";
1216
+ }
1217
+ function getPoolConfig(options) {
1218
+ return {
1219
+ max: options?.max ?? parseEnvNumber("DB_POOL_MAX", 20, 10),
1220
+ idleTimeout: options?.idleTimeout ?? parseEnvNumber("DB_POOL_IDLE_TIMEOUT", 30, 20)
1221
+ };
778
1222
  }
779
1223
  function getRetryConfig() {
780
- const isProduction = process.env.NODE_ENV === "production";
781
1224
  return {
782
- maxRetries: isProduction ? 5 : 3,
783
- // 프로덕션: 5회, 개발: 3회
784
- initialDelay: 1e3,
785
- // 1초
786
- maxDelay: 16e3,
787
- // 16초
788
- factor: 2
789
- // 2배씩 증가 (1s → 2s → 4s → 8s → 16s)
1225
+ maxRetries: parseEnvNumber("DB_RETRY_MAX", 5, 3),
1226
+ initialDelay: parseEnvNumber("DB_RETRY_INITIAL_DELAY", 100, 50),
1227
+ maxDelay: parseEnvNumber("DB_RETRY_MAX_DELAY", 1e4, 5e3),
1228
+ factor: parseEnvNumber("DB_RETRY_FACTOR", 2, 2)
1229
+ };
1230
+ }
1231
+ function buildHealthCheckConfig(options) {
1232
+ return {
1233
+ enabled: options?.enabled ?? parseEnvBoolean("DB_HEALTH_CHECK_ENABLED", true),
1234
+ interval: options?.interval ?? parseEnvNumber("DB_HEALTH_CHECK_INTERVAL", 6e4, 6e4),
1235
+ reconnect: options?.reconnect ?? parseEnvBoolean("DB_HEALTH_CHECK_RECONNECT", true),
1236
+ maxRetries: options?.maxRetries ?? parseEnvNumber("DB_HEALTH_CHECK_MAX_RETRIES", 3, 3),
1237
+ retryInterval: options?.retryInterval ?? parseEnvNumber("DB_HEALTH_CHECK_RETRY_INTERVAL", 5e3, 5e3)
1238
+ };
1239
+ }
1240
+ function buildMonitoringConfig(options) {
1241
+ const isDevelopment = process.env.NODE_ENV !== "production";
1242
+ return {
1243
+ enabled: options?.enabled ?? parseEnvBoolean("DB_MONITORING_ENABLED", isDevelopment),
1244
+ slowThreshold: options?.slowThreshold ?? parseEnvNumber("DB_MONITORING_SLOW_THRESHOLD", 1e3, 1e3),
1245
+ logQueries: options?.logQueries ?? parseEnvBoolean("DB_MONITORING_LOG_QUERIES", false)
790
1246
  };
791
1247
  }
792
1248
 
@@ -795,81 +1251,113 @@ var dbLogger2 = logger.child("database");
795
1251
  function hasDatabaseConfig() {
796
1252
  return !!(process.env.DATABASE_URL || process.env.DATABASE_WRITE_URL || process.env.DATABASE_READ_URL);
797
1253
  }
1254
+ function detectDatabasePattern() {
1255
+ if (process.env.DATABASE_WRITE_URL && process.env.DATABASE_READ_URL) {
1256
+ return {
1257
+ type: "write-read",
1258
+ write: process.env.DATABASE_WRITE_URL,
1259
+ read: process.env.DATABASE_READ_URL
1260
+ };
1261
+ }
1262
+ if (process.env.DATABASE_URL && process.env.DATABASE_REPLICA_URL) {
1263
+ return {
1264
+ type: "legacy",
1265
+ primary: process.env.DATABASE_URL,
1266
+ replica: process.env.DATABASE_REPLICA_URL
1267
+ };
1268
+ }
1269
+ if (process.env.DATABASE_URL) {
1270
+ return {
1271
+ type: "single",
1272
+ url: process.env.DATABASE_URL
1273
+ };
1274
+ }
1275
+ if (process.env.DATABASE_WRITE_URL) {
1276
+ return {
1277
+ type: "single",
1278
+ url: process.env.DATABASE_WRITE_URL
1279
+ };
1280
+ }
1281
+ return { type: "none" };
1282
+ }
1283
+ async function createWriteReadClients(writeUrl, readUrl, poolConfig, retryConfig) {
1284
+ const writeClient = await createDatabaseConnection(writeUrl, poolConfig, retryConfig);
1285
+ const readClient = await createDatabaseConnection(readUrl, poolConfig, retryConfig);
1286
+ return {
1287
+ write: drizzle(writeClient),
1288
+ read: drizzle(readClient),
1289
+ writeClient,
1290
+ readClient
1291
+ };
1292
+ }
1293
+ async function createSingleClient(url, poolConfig, retryConfig) {
1294
+ const client = await createDatabaseConnection(url, poolConfig, retryConfig);
1295
+ const db = drizzle(client);
1296
+ return {
1297
+ write: db,
1298
+ read: db,
1299
+ writeClient: client,
1300
+ readClient: client
1301
+ };
1302
+ }
798
1303
  async function createDatabaseFromEnv(options) {
799
1304
  if (!hasDatabaseConfig()) {
800
- config({ path: ".env.local" });
1305
+ dbLogger2.debug("No DATABASE_URL found, loading environment variables");
1306
+ const result = loadEnvironment({
1307
+ debug: true
1308
+ });
1309
+ dbLogger2.debug("Environment variables loaded", {
1310
+ success: result.success,
1311
+ loaded: result.loaded.length,
1312
+ hasDatabaseUrl: !!process.env.DATABASE_URL,
1313
+ hasWriteUrl: !!process.env.DATABASE_WRITE_URL,
1314
+ hasReadUrl: !!process.env.DATABASE_READ_URL
1315
+ });
801
1316
  }
802
1317
  if (!hasDatabaseConfig()) {
1318
+ dbLogger2.warn("No database configuration found", {
1319
+ cwd: process.cwd(),
1320
+ nodeEnv: process.env.NODE_ENV,
1321
+ checkedVars: ["DATABASE_URL", "DATABASE_WRITE_URL", "DATABASE_READ_URL"]
1322
+ });
803
1323
  return { write: void 0, read: void 0 };
804
1324
  }
805
1325
  try {
806
1326
  const poolConfig = getPoolConfig(options?.pool);
807
1327
  const retryConfig = getRetryConfig();
808
- if (process.env.DATABASE_WRITE_URL && process.env.DATABASE_READ_URL) {
809
- const writeClient2 = await createDatabaseConnection(
810
- process.env.DATABASE_WRITE_URL,
811
- poolConfig,
812
- retryConfig
813
- );
814
- const readClient2 = await createDatabaseConnection(
815
- process.env.DATABASE_READ_URL,
816
- poolConfig,
817
- retryConfig
818
- );
819
- return {
820
- write: drizzle(writeClient2),
821
- read: drizzle(readClient2),
822
- writeClient: writeClient2,
823
- readClient: readClient2
824
- };
825
- }
826
- if (process.env.DATABASE_URL && process.env.DATABASE_REPLICA_URL) {
827
- const writeClient2 = await createDatabaseConnection(
828
- process.env.DATABASE_URL,
829
- poolConfig,
830
- retryConfig
831
- );
832
- const readClient2 = await createDatabaseConnection(
833
- process.env.DATABASE_REPLICA_URL,
834
- poolConfig,
835
- retryConfig
836
- );
837
- return {
838
- write: drizzle(writeClient2),
839
- read: drizzle(readClient2),
840
- writeClient: writeClient2,
841
- readClient: readClient2
842
- };
843
- }
844
- if (process.env.DATABASE_URL) {
845
- const client = await createDatabaseConnection(
846
- process.env.DATABASE_URL,
847
- poolConfig,
848
- retryConfig
849
- );
850
- const db2 = drizzle(client);
851
- return {
852
- write: db2,
853
- read: db2,
854
- writeClient: client,
855
- readClient: client
856
- };
857
- }
858
- if (process.env.DATABASE_WRITE_URL) {
859
- const client = await createDatabaseConnection(
860
- process.env.DATABASE_WRITE_URL,
861
- poolConfig,
862
- retryConfig
863
- );
864
- const db2 = drizzle(client);
865
- return {
866
- write: db2,
867
- read: db2,
868
- writeClient: client,
869
- readClient: client
870
- };
1328
+ const pattern = detectDatabasePattern();
1329
+ switch (pattern.type) {
1330
+ case "write-read":
1331
+ dbLogger2.debug("Using write-read pattern", {
1332
+ write: pattern.write.replace(/:[^:@]+@/, ":***@"),
1333
+ read: pattern.read.replace(/:[^:@]+@/, ":***@")
1334
+ });
1335
+ return await createWriteReadClients(
1336
+ pattern.write,
1337
+ pattern.read,
1338
+ poolConfig,
1339
+ retryConfig
1340
+ );
1341
+ case "legacy":
1342
+ dbLogger2.debug("Using legacy replica pattern", {
1343
+ primary: pattern.primary.replace(/:[^:@]+@/, ":***@"),
1344
+ replica: pattern.replica.replace(/:[^:@]+@/, ":***@")
1345
+ });
1346
+ return await createWriteReadClients(
1347
+ pattern.primary,
1348
+ pattern.replica,
1349
+ poolConfig,
1350
+ retryConfig
1351
+ );
1352
+ case "single":
1353
+ dbLogger2.debug("Using single database pattern", {
1354
+ url: pattern.url.replace(/:[^:@]+@/, ":***@")
1355
+ });
1356
+ return await createSingleClient(pattern.url, poolConfig, retryConfig);
1357
+ case "none":
1358
+ dbLogger2.warn("No database pattern detected");
1359
+ return { write: void 0, read: void 0 };
871
1360
  }
872
- return { write: void 0, read: void 0 };
873
1361
  } catch (error) {
874
1362
  const message = error instanceof Error ? error.message : "Unknown error";
875
1363
  dbLogger2.error("Failed to create database connection", {
@@ -880,57 +1368,129 @@ async function createDatabaseFromEnv(options) {
880
1368
  hasUrl: !!process.env.DATABASE_URL,
881
1369
  hasReplicaUrl: !!process.env.DATABASE_REPLICA_URL
882
1370
  });
883
- return { write: void 0, read: void 0 };
1371
+ throw new Error(`Database connection failed: ${message}`, { cause: error });
884
1372
  }
885
1373
  }
886
1374
 
887
- // src/db/manager/manager.ts
1375
+ // src/db/manager/global-state.ts
1376
+ var getWriteInstance = () => globalThis.__SPFN_DB_WRITE__;
1377
+ var setWriteInstance = (instance) => {
1378
+ globalThis.__SPFN_DB_WRITE__ = instance;
1379
+ };
1380
+ var getReadInstance = () => globalThis.__SPFN_DB_READ__;
1381
+ var setReadInstance = (instance) => {
1382
+ globalThis.__SPFN_DB_READ__ = instance;
1383
+ };
1384
+ var getWriteClient = () => globalThis.__SPFN_DB_WRITE_CLIENT__;
1385
+ var setWriteClient = (client) => {
1386
+ globalThis.__SPFN_DB_WRITE_CLIENT__ = client;
1387
+ };
1388
+ var getReadClient = () => globalThis.__SPFN_DB_READ_CLIENT__;
1389
+ var setReadClient = (client) => {
1390
+ globalThis.__SPFN_DB_READ_CLIENT__ = client;
1391
+ };
1392
+ var getHealthCheckInterval = () => globalThis.__SPFN_DB_HEALTH_CHECK__;
1393
+ var setHealthCheckInterval = (interval) => {
1394
+ globalThis.__SPFN_DB_HEALTH_CHECK__ = interval;
1395
+ };
1396
+ var setMonitoringConfig = (config) => {
1397
+ globalThis.__SPFN_DB_MONITORING__ = config;
1398
+ };
1399
+
1400
+ // src/db/manager/health-check.ts
888
1401
  var dbLogger3 = logger.child("database");
889
- var writeInstance;
890
- var readInstance;
891
- var writeClient;
892
- var readClient;
893
- var healthCheckInterval;
894
- var monitoringConfig;
1402
+ function startHealthCheck(config, options, getDatabase2, closeDatabase2) {
1403
+ const healthCheck = getHealthCheckInterval();
1404
+ if (healthCheck) {
1405
+ dbLogger3.debug("Health check already running");
1406
+ return;
1407
+ }
1408
+ dbLogger3.info("Starting database health check", {
1409
+ interval: `${config.interval}ms`,
1410
+ reconnect: config.reconnect
1411
+ });
1412
+ const interval = setInterval(async () => {
1413
+ try {
1414
+ const write = getDatabase2("write");
1415
+ const read = getDatabase2("read");
1416
+ if (write) {
1417
+ await write.execute("SELECT 1");
1418
+ }
1419
+ if (read && read !== write) {
1420
+ await read.execute("SELECT 1");
1421
+ }
1422
+ } catch (error) {
1423
+ const message = error instanceof Error ? error.message : "Unknown error";
1424
+ dbLogger3.error("Database health check failed", { error: message });
1425
+ if (config.reconnect) {
1426
+ await attemptReconnection(config, options, closeDatabase2);
1427
+ }
1428
+ }
1429
+ }, config.interval);
1430
+ setHealthCheckInterval(interval);
1431
+ }
1432
+ async function attemptReconnection(config, options, closeDatabase2) {
1433
+ dbLogger3.warn("Attempting database reconnection", {
1434
+ maxRetries: config.maxRetries,
1435
+ retryInterval: `${config.retryInterval}ms`
1436
+ });
1437
+ for (let attempt = 1; attempt <= config.maxRetries; attempt++) {
1438
+ try {
1439
+ dbLogger3.debug(`Reconnection attempt ${attempt}/${config.maxRetries}`);
1440
+ await closeDatabase2();
1441
+ await new Promise((resolve) => setTimeout(resolve, config.retryInterval));
1442
+ const result = await createDatabaseFromEnv(options);
1443
+ if (result.write) {
1444
+ await result.write.execute("SELECT 1");
1445
+ setWriteInstance(result.write);
1446
+ setReadInstance(result.read);
1447
+ setWriteClient(result.writeClient);
1448
+ setReadClient(result.readClient);
1449
+ dbLogger3.info("Database reconnection successful", { attempt });
1450
+ return;
1451
+ }
1452
+ } catch (error) {
1453
+ const message = error instanceof Error ? error.message : "Unknown error";
1454
+ dbLogger3.error(`Reconnection attempt ${attempt} failed`, {
1455
+ error: message,
1456
+ attempt,
1457
+ maxRetries: config.maxRetries
1458
+ });
1459
+ if (attempt === config.maxRetries) {
1460
+ dbLogger3.error("Max reconnection attempts reached, giving up");
1461
+ }
1462
+ }
1463
+ }
1464
+ }
1465
+ function stopHealthCheck() {
1466
+ const healthCheck = getHealthCheckInterval();
1467
+ if (healthCheck) {
1468
+ clearInterval(healthCheck);
1469
+ setHealthCheckInterval(void 0);
1470
+ dbLogger3.info("Database health check stopped");
1471
+ }
1472
+ }
1473
+
1474
+ // src/db/manager/manager.ts
1475
+ var dbLogger4 = logger.child("database");
895
1476
  function getDatabase(type) {
1477
+ const writeInst = getWriteInstance();
1478
+ const readInst = getReadInstance();
1479
+ dbLogger4.debug(`getDatabase() called with type=${type}, writeInstance=${!!writeInst}, readInstance=${!!readInst}`);
896
1480
  if (type === "read") {
897
- return readInstance ?? writeInstance;
1481
+ return readInst ?? writeInst;
898
1482
  }
899
- return writeInstance;
1483
+ return writeInst;
900
1484
  }
901
1485
  function setDatabase(write, read) {
902
- writeInstance = write;
903
- readInstance = read ?? write;
904
- }
905
- function getHealthCheckConfig(options) {
906
- const parseBoolean = (value, defaultValue) => {
907
- if (value === void 0) return defaultValue;
908
- return value.toLowerCase() === "true";
909
- };
910
- return {
911
- enabled: options?.enabled ?? parseBoolean(process.env.DB_HEALTH_CHECK_ENABLED, true),
912
- interval: options?.interval ?? (parseInt(process.env.DB_HEALTH_CHECK_INTERVAL || "", 10) || 6e4),
913
- reconnect: options?.reconnect ?? parseBoolean(process.env.DB_HEALTH_CHECK_RECONNECT, true),
914
- maxRetries: options?.maxRetries ?? (parseInt(process.env.DB_HEALTH_CHECK_MAX_RETRIES || "", 10) || 3),
915
- retryInterval: options?.retryInterval ?? (parseInt(process.env.DB_HEALTH_CHECK_RETRY_INTERVAL || "", 10) || 5e3)
916
- };
917
- }
918
- function getMonitoringConfig(options) {
919
- const isDevelopment = process.env.NODE_ENV !== "production";
920
- const parseBoolean = (value, defaultValue) => {
921
- if (value === void 0) return defaultValue;
922
- return value.toLowerCase() === "true";
923
- };
924
- return {
925
- enabled: options?.enabled ?? parseBoolean(process.env.DB_MONITORING_ENABLED, isDevelopment),
926
- slowThreshold: options?.slowThreshold ?? (parseInt(process.env.DB_MONITORING_SLOW_THRESHOLD || "", 10) || 1e3),
927
- logQueries: options?.logQueries ?? parseBoolean(process.env.DB_MONITORING_LOG_QUERIES, false)
928
- };
1486
+ setWriteInstance(write);
1487
+ setReadInstance(read ?? write);
929
1488
  }
930
1489
  async function initDatabase(options) {
931
- if (writeInstance) {
932
- dbLogger3.debug("Database already initialized");
933
- return { write: writeInstance, read: readInstance };
1490
+ const writeInst = getWriteInstance();
1491
+ if (writeInst) {
1492
+ dbLogger4.debug("Database already initialized");
1493
+ return { write: writeInst, read: getReadInstance() };
934
1494
  }
935
1495
  const result = await createDatabaseFromEnv(options);
936
1496
  if (result.write) {
@@ -939,178 +1499,329 @@ async function initDatabase(options) {
939
1499
  if (result.read && result.read !== result.write) {
940
1500
  await result.read.execute("SELECT 1");
941
1501
  }
942
- writeInstance = result.write;
943
- readInstance = result.read;
944
- writeClient = result.writeClient;
945
- readClient = result.readClient;
1502
+ setWriteInstance(result.write);
1503
+ setReadInstance(result.read);
1504
+ setWriteClient(result.writeClient);
1505
+ setReadClient(result.readClient);
946
1506
  const hasReplica = result.read && result.read !== result.write;
947
- dbLogger3.info(
1507
+ dbLogger4.info(
948
1508
  hasReplica ? "Database connected (Primary + Replica)" : "Database connected"
949
1509
  );
950
- const healthCheckConfig = getHealthCheckConfig(options?.healthCheck);
1510
+ const healthCheckConfig = buildHealthCheckConfig(options?.healthCheck);
951
1511
  if (healthCheckConfig.enabled) {
952
- startHealthCheck(healthCheckConfig);
1512
+ startHealthCheck(healthCheckConfig, options, getDatabase, closeDatabase);
953
1513
  }
954
- monitoringConfig = getMonitoringConfig(options?.monitoring);
955
- if (monitoringConfig.enabled) {
956
- dbLogger3.info("Database query monitoring enabled", {
957
- slowThreshold: `${monitoringConfig.slowThreshold}ms`,
958
- logQueries: monitoringConfig.logQueries
1514
+ const monConfig = buildMonitoringConfig(options?.monitoring);
1515
+ setMonitoringConfig(monConfig);
1516
+ if (monConfig.enabled) {
1517
+ dbLogger4.info("Database query monitoring enabled", {
1518
+ slowThreshold: `${monConfig.slowThreshold}ms`,
1519
+ logQueries: monConfig.logQueries
959
1520
  });
960
1521
  }
961
1522
  } catch (error) {
962
1523
  const message = error instanceof Error ? error.message : "Unknown error";
963
- dbLogger3.error("Database connection failed", { error: message });
1524
+ dbLogger4.error("Database connection failed", { error: message });
964
1525
  await closeDatabase();
965
- return { write: void 0, read: void 0 };
1526
+ throw new Error(`Database connection test failed: ${message}`, { cause: error });
966
1527
  }
967
1528
  } else {
968
- dbLogger3.warn("No database configuration found");
969
- dbLogger3.warn("Set DATABASE_URL environment variable to enable database");
1529
+ dbLogger4.warn("No database configuration found");
1530
+ dbLogger4.warn("Set DATABASE_URL environment variable to enable database");
970
1531
  }
971
- return { write: writeInstance, read: readInstance };
1532
+ return { write: getWriteInstance(), read: getReadInstance() };
972
1533
  }
973
1534
  async function closeDatabase() {
974
- if (!writeInstance && !readInstance) {
975
- dbLogger3.debug("No database connections to close");
1535
+ const writeInst = getWriteInstance();
1536
+ const readInst = getReadInstance();
1537
+ if (!writeInst && !readInst) {
1538
+ dbLogger4.debug("No database connections to close");
976
1539
  return;
977
1540
  }
978
1541
  stopHealthCheck();
979
1542
  try {
980
1543
  const closePromises = [];
981
- if (writeClient) {
982
- dbLogger3.debug("Closing write connection...");
1544
+ const writeC = getWriteClient();
1545
+ if (writeC) {
1546
+ dbLogger4.debug("Closing write connection...");
983
1547
  closePromises.push(
984
- writeClient.end({ timeout: 5 }).then(() => dbLogger3.debug("Write connection closed")).catch((err) => dbLogger3.error("Error closing write connection", err))
1548
+ writeC.end({ timeout: 5 }).then(() => dbLogger4.debug("Write connection closed")).catch((err) => dbLogger4.error("Error closing write connection", err))
985
1549
  );
986
1550
  }
987
- if (readClient && readClient !== writeClient) {
988
- dbLogger3.debug("Closing read connection...");
1551
+ const readC = getReadClient();
1552
+ if (readC && readC !== writeC) {
1553
+ dbLogger4.debug("Closing read connection...");
989
1554
  closePromises.push(
990
- readClient.end({ timeout: 5 }).then(() => dbLogger3.debug("Read connection closed")).catch((err) => dbLogger3.error("Error closing read connection", err))
1555
+ readC.end({ timeout: 5 }).then(() => dbLogger4.debug("Read connection closed")).catch((err) => dbLogger4.error("Error closing read connection", err))
991
1556
  );
992
1557
  }
993
1558
  await Promise.all(closePromises);
994
- dbLogger3.info("All database connections closed");
1559
+ dbLogger4.info("All database connections closed");
995
1560
  } catch (error) {
996
- dbLogger3.error("Error during database cleanup", error);
1561
+ dbLogger4.error("Error during database cleanup", error);
997
1562
  throw error;
998
1563
  } finally {
999
- writeInstance = void 0;
1000
- readInstance = void 0;
1001
- writeClient = void 0;
1002
- readClient = void 0;
1003
- monitoringConfig = void 0;
1564
+ setWriteInstance(void 0);
1565
+ setReadInstance(void 0);
1566
+ setWriteClient(void 0);
1567
+ setReadClient(void 0);
1568
+ setMonitoringConfig(void 0);
1004
1569
  }
1005
1570
  }
1006
1571
  function getDatabaseInfo() {
1572
+ const writeInst = getWriteInstance();
1573
+ const readInst = getReadInstance();
1007
1574
  return {
1008
- hasWrite: !!writeInstance,
1009
- hasRead: !!readInstance,
1010
- isReplica: !!(readInstance && readInstance !== writeInstance)
1575
+ hasWrite: !!writeInst,
1576
+ hasRead: !!readInst,
1577
+ isReplica: !!(readInst && readInst !== writeInst)
1011
1578
  };
1012
1579
  }
1013
- function startHealthCheck(config2) {
1014
- if (healthCheckInterval) {
1015
- dbLogger3.debug("Health check already running");
1016
- return;
1017
- }
1018
- dbLogger3.info("Starting database health check", {
1019
- interval: `${config2.interval}ms`,
1020
- reconnect: config2.reconnect
1021
- });
1022
- healthCheckInterval = setInterval(async () => {
1023
- try {
1024
- const write = getDatabase("write");
1025
- const read = getDatabase("read");
1026
- if (write) {
1027
- await write.execute("SELECT 1");
1028
- }
1029
- if (read && read !== write) {
1030
- await read.execute("SELECT 1");
1580
+ function expandGlobPattern(pattern) {
1581
+ if (!pattern.includes("*")) {
1582
+ return existsSync(pattern) ? [pattern] : [];
1583
+ }
1584
+ const files = [];
1585
+ if (pattern.includes("**")) {
1586
+ const [baseDir, ...rest] = pattern.split("**");
1587
+ const extension = rest.join("").replace(/[\/\\]\*\./g, "").trim();
1588
+ const scanRecursive = (dir) => {
1589
+ if (!existsSync(dir)) return;
1590
+ try {
1591
+ const entries = readdirSync(dir);
1592
+ for (const entry of entries) {
1593
+ const fullPath = join(dir, entry);
1594
+ try {
1595
+ const stat = statSync(fullPath);
1596
+ if (stat.isDirectory()) {
1597
+ scanRecursive(fullPath);
1598
+ } else if (stat.isFile()) {
1599
+ if (!extension || fullPath.endsWith(extension)) {
1600
+ files.push(fullPath);
1601
+ }
1602
+ }
1603
+ } catch {
1604
+ }
1605
+ }
1606
+ } catch {
1031
1607
  }
1032
- dbLogger3.debug("Database health check passed");
1033
- } catch (error) {
1034
- const message = error instanceof Error ? error.message : "Unknown error";
1035
- dbLogger3.error("Database health check failed", { error: message });
1036
- if (config2.reconnect) {
1037
- await attemptReconnection(config2);
1608
+ };
1609
+ scanRecursive(baseDir.trim() || ".");
1610
+ } else if (pattern.includes("*")) {
1611
+ const dir = dirname(pattern);
1612
+ const filePattern = basename(pattern);
1613
+ if (!existsSync(dir)) return [];
1614
+ try {
1615
+ const entries = readdirSync(dir);
1616
+ for (const entry of entries) {
1617
+ const fullPath = join(dir, entry);
1618
+ try {
1619
+ const stat = statSync(fullPath);
1620
+ if (stat.isFile()) {
1621
+ if (filePattern === "*" || filePattern.startsWith("*.") && entry.endsWith(filePattern.slice(1))) {
1622
+ files.push(fullPath);
1623
+ }
1624
+ }
1625
+ } catch {
1626
+ }
1038
1627
  }
1628
+ } catch {
1039
1629
  }
1040
- }, config2.interval);
1630
+ }
1631
+ return files;
1041
1632
  }
1042
- async function attemptReconnection(config2) {
1043
- dbLogger3.warn("Attempting database reconnection", {
1044
- maxRetries: config2.maxRetries,
1045
- retryInterval: `${config2.retryInterval}ms`
1046
- });
1047
- for (let attempt = 1; attempt <= config2.maxRetries; attempt++) {
1633
+ function discoverPackageSchemas(cwd) {
1634
+ const schemas = [];
1635
+ const nodeModulesPath = join(cwd, "node_modules");
1636
+ if (!existsSync(nodeModulesPath)) {
1637
+ return schemas;
1638
+ }
1639
+ const projectPkgPath = join(cwd, "package.json");
1640
+ let directDeps = /* @__PURE__ */ new Set();
1641
+ if (existsSync(projectPkgPath)) {
1048
1642
  try {
1049
- dbLogger3.debug(`Reconnection attempt ${attempt}/${config2.maxRetries}`);
1050
- await closeDatabase();
1051
- await new Promise((resolve) => setTimeout(resolve, config2.retryInterval));
1052
- const result = await createDatabaseFromEnv();
1053
- if (result.write) {
1054
- await result.write.execute("SELECT 1");
1055
- writeInstance = result.write;
1056
- readInstance = result.read;
1057
- writeClient = result.writeClient;
1058
- readClient = result.readClient;
1059
- dbLogger3.info("Database reconnection successful", { attempt });
1060
- return;
1643
+ const projectPkg = JSON.parse(readFileSync(projectPkgPath, "utf-8"));
1644
+ directDeps = /* @__PURE__ */ new Set([
1645
+ ...Object.keys(projectPkg.dependencies || {}),
1646
+ ...Object.keys(projectPkg.devDependencies || {})
1647
+ ]);
1648
+ } catch (error) {
1649
+ }
1650
+ }
1651
+ const checkPackage = (_pkgName, pkgPath) => {
1652
+ const pkgJsonPath = join(pkgPath, "package.json");
1653
+ if (!existsSync(pkgJsonPath)) return;
1654
+ try {
1655
+ const pkgJson = JSON.parse(readFileSync(pkgJsonPath, "utf-8"));
1656
+ if (pkgJson.spfn?.schemas) {
1657
+ const packageSchemas = Array.isArray(pkgJson.spfn.schemas) ? pkgJson.spfn.schemas : [pkgJson.spfn.schemas];
1658
+ for (const schema of packageSchemas) {
1659
+ const absolutePath = join(pkgPath, schema);
1660
+ const expandedFiles = expandGlobPattern(absolutePath);
1661
+ const schemaFiles = expandedFiles.filter(
1662
+ (file) => !file.endsWith("/index.js") && !file.endsWith("/index.ts") && !file.endsWith("/index.mjs") && !file.endsWith("\\index.js") && !file.endsWith("\\index.ts") && !file.endsWith("\\index.mjs")
1663
+ );
1664
+ schemas.push(...schemaFiles);
1665
+ }
1061
1666
  }
1062
1667
  } catch (error) {
1063
- const message = error instanceof Error ? error.message : "Unknown error";
1064
- dbLogger3.error(`Reconnection attempt ${attempt} failed`, {
1065
- error: message,
1066
- attempt,
1067
- maxRetries: config2.maxRetries
1068
- });
1069
- if (attempt === config2.maxRetries) {
1070
- dbLogger3.error("Max reconnection attempts reached, giving up");
1668
+ }
1669
+ };
1670
+ const spfnDir = join(nodeModulesPath, "@spfn");
1671
+ if (existsSync(spfnDir)) {
1672
+ try {
1673
+ const spfnPackages = readdirSync(spfnDir);
1674
+ for (const pkg of spfnPackages) {
1675
+ checkPackage(`@spfn/${pkg}`, join(spfnDir, pkg));
1071
1676
  }
1677
+ } catch (error) {
1072
1678
  }
1073
1679
  }
1074
- }
1075
- function stopHealthCheck() {
1076
- if (healthCheckInterval) {
1077
- clearInterval(healthCheckInterval);
1078
- healthCheckInterval = void 0;
1079
- dbLogger3.info("Database health check stopped");
1680
+ for (const depName of directDeps) {
1681
+ if (depName.startsWith("@spfn/")) continue;
1682
+ const pkgPath = depName.startsWith("@") ? join(nodeModulesPath, ...depName.split("/")) : join(nodeModulesPath, depName);
1683
+ checkPackage(depName, pkgPath);
1080
1684
  }
1685
+ return schemas;
1081
1686
  }
1082
- function getDatabaseMonitoringConfig() {
1083
- return monitoringConfig;
1687
+ function detectDialect(url) {
1688
+ if (url.startsWith("postgres://") || url.startsWith("postgresql://")) {
1689
+ return "postgresql";
1690
+ }
1691
+ if (url.startsWith("mysql://")) {
1692
+ return "mysql";
1693
+ }
1694
+ if (url.startsWith("sqlite://") || url.includes(".db") || url.includes(".sqlite")) {
1695
+ return "sqlite";
1696
+ }
1697
+ throw new Error(
1698
+ `Unsupported database URL format: ${url}. Supported: postgresql://, mysql://, sqlite://`
1699
+ );
1084
1700
  }
1085
-
1086
- // src/db/manager/instance.ts
1087
- var db = new Proxy({}, {
1088
- get(_target, prop) {
1089
- const instance = getDatabase("write");
1090
- if (!instance) {
1701
+ function getDrizzleConfig(options = {}) {
1702
+ const databaseUrl = options.databaseUrl ?? process.env.DATABASE_URL;
1703
+ if (!databaseUrl) {
1704
+ throw new Error(
1705
+ "DATABASE_URL is required. Set it in .env or pass it to getDrizzleConfig()"
1706
+ );
1707
+ }
1708
+ const dialect = options.dialect ?? detectDialect(databaseUrl);
1709
+ const out = options.out ?? "./src/server/drizzle";
1710
+ if (options.packageFilter) {
1711
+ const packageSchemas2 = options.disablePackageDiscovery ? [] : discoverPackageSchemas(options.cwd ?? process.cwd());
1712
+ const filteredSchemas = packageSchemas2.filter(
1713
+ (schemaPath) => schemaPath.includes(`node_modules/${options.packageFilter}/`)
1714
+ );
1715
+ if (filteredSchemas.length === 0) {
1091
1716
  throw new Error(
1092
- "Database not initialized. Set DATABASE_URL environment variable or call initDatabase() first."
1717
+ `No schemas found for package ${options.packageFilter}. Make sure the package is installed and has "spfn.schemas" in package.json.`
1093
1718
  );
1094
1719
  }
1095
- return instance[prop];
1720
+ const schema2 = filteredSchemas.length === 1 ? filteredSchemas[0] : filteredSchemas;
1721
+ return {
1722
+ schema: schema2,
1723
+ out,
1724
+ dialect,
1725
+ dbCredentials: getDbCredentials(dialect, databaseUrl)
1726
+ };
1096
1727
  }
1728
+ const userSchema = options.schema ?? "./src/server/entities/**/*.ts";
1729
+ const userSchemas = Array.isArray(userSchema) ? userSchema : [userSchema];
1730
+ const packageSchemas = options.disablePackageDiscovery ? [] : discoverPackageSchemas(options.cwd ?? process.cwd());
1731
+ const allSchemas = [...userSchemas, ...packageSchemas];
1732
+ const schema = allSchemas.length === 1 ? allSchemas[0] : allSchemas;
1733
+ return {
1734
+ schema,
1735
+ out,
1736
+ dialect,
1737
+ dbCredentials: getDbCredentials(dialect, databaseUrl)
1738
+ };
1739
+ }
1740
+ function getDbCredentials(dialect, url) {
1741
+ switch (dialect) {
1742
+ case "postgresql":
1743
+ case "mysql":
1744
+ return { url };
1745
+ case "sqlite":
1746
+ const dbPath = url.replace("sqlite://", "").replace("sqlite:", "");
1747
+ return { url: dbPath };
1748
+ default:
1749
+ throw new Error(`Unsupported dialect: ${dialect}`);
1750
+ }
1751
+ }
1752
+ function generateDrizzleConfigFile(options = {}) {
1753
+ const config = getDrizzleConfig(options);
1754
+ const schemaValue = Array.isArray(config.schema) ? `[
1755
+ ${config.schema.map((s) => `'${s}'`).join(",\n ")}
1756
+ ]` : `'${config.schema}'`;
1757
+ return `import { defineConfig } from 'drizzle-kit';
1758
+
1759
+ export default defineConfig({
1760
+ schema: ${schemaValue},
1761
+ out: '${config.out}',
1762
+ dialect: '${config.dialect}',
1763
+ dbCredentials: ${JSON.stringify(config.dbCredentials, null, 4)},
1097
1764
  });
1098
- function getRawDb(type = "write") {
1099
- const instance = getDatabase(type);
1100
- if (!instance) {
1101
- throw new Error(
1102
- "Database not initialized. Set DATABASE_URL environment variable or call initDatabase() first."
1103
- );
1765
+ `;
1766
+ }
1767
+ function id() {
1768
+ return bigserial("id", { mode: "number" }).primaryKey();
1769
+ }
1770
+ function timestamps(options) {
1771
+ const updatedAtColumn = timestamp("updated_at", { withTimezone: true, mode: "date" }).defaultNow().notNull();
1772
+ if (options?.autoUpdate) {
1773
+ updatedAtColumn.__autoUpdate = true;
1104
1774
  }
1105
- return instance;
1775
+ return {
1776
+ createdAt: timestamp("created_at", { withTimezone: true, mode: "date" }).defaultNow().notNull(),
1777
+ updatedAt: updatedAtColumn
1778
+ };
1779
+ }
1780
+ function foreignKey(name, reference, options) {
1781
+ return bigserial(`${name}_id`, { mode: "number" }).notNull().references(reference, { onDelete: options?.onDelete ?? "cascade" });
1106
1782
  }
1783
+ function optionalForeignKey(name, reference, options) {
1784
+ return bigserial(`${name}_id`, { mode: "number" }).references(reference, { onDelete: options?.onDelete ?? "set null" });
1785
+ }
1786
+ function createFunctionSchema(packageName) {
1787
+ const schemaName = packageNameToSchema(packageName);
1788
+ return pgSchema(schemaName);
1789
+ }
1790
+ function packageNameToSchema(packageName) {
1791
+ return packageName.replace("@", "").replace("/", "_").replace(/-/g, "_");
1792
+ }
1793
+ function getSchemaInfo(packageName) {
1794
+ const isScoped = packageName.startsWith("@");
1795
+ const scope = isScoped ? packageName.split("/")[0].substring(1) : null;
1796
+ const schemaName = packageNameToSchema(packageName);
1797
+ return {
1798
+ schemaName,
1799
+ isScoped,
1800
+ scope
1801
+ };
1802
+ }
1803
+ var txLogger = logger.child("transaction");
1107
1804
  var asyncContext = new AsyncLocalStorage();
1805
+ function getTransactionContext() {
1806
+ return asyncContext.getStore() ?? null;
1807
+ }
1108
1808
  function getTransaction() {
1109
- const context = asyncContext.getStore();
1809
+ const context = getTransactionContext();
1110
1810
  return context?.tx ?? null;
1111
1811
  }
1112
- function runWithTransaction(tx, callback) {
1113
- return asyncContext.run({ tx }, callback);
1812
+ function runWithTransaction(tx, txId, callback) {
1813
+ const existingContext = getTransactionContext();
1814
+ const newLevel = existingContext ? existingContext.level + 1 : 1;
1815
+ if (existingContext) {
1816
+ txLogger.info("Nested transaction started (SAVEPOINT)", {
1817
+ outerTxId: existingContext.txId,
1818
+ innerTxId: txId,
1819
+ level: newLevel
1820
+ });
1821
+ } else {
1822
+ txLogger.debug("Root transaction context set", { txId, level: newLevel });
1823
+ }
1824
+ return asyncContext.run({ tx, txId, level: newLevel }, callback);
1114
1825
  }
1115
1826
  function Transactional(options = {}) {
1116
1827
  const defaultTimeout = parseInt(process.env.TRANSACTION_TIMEOUT || "30000", 10);
@@ -1119,17 +1830,25 @@ function Transactional(options = {}) {
1119
1830
  enableLogging = true,
1120
1831
  timeout = defaultTimeout
1121
1832
  } = options;
1122
- const txLogger = logger.child("transaction");
1833
+ const txLogger2 = logger.child("transaction");
1123
1834
  return createMiddleware(async (c, next) => {
1124
- const txId = `tx_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
1835
+ const txId = `tx_${randomUUID()}`;
1125
1836
  const startTime = Date.now();
1126
1837
  const route = `${c.req.method} ${c.req.path}`;
1127
1838
  if (enableLogging) {
1128
- txLogger.debug("Transaction started", { txId, route });
1839
+ txLogger2.debug("Transaction started", { txId, route });
1129
1840
  }
1130
1841
  try {
1131
- const transactionPromise = db.transaction(async (tx) => {
1132
- await runWithTransaction(tx, async () => {
1842
+ const writeDb = getDatabase("write");
1843
+ if (!writeDb) {
1844
+ throw new TransactionError(
1845
+ "Database not initialized. Cannot start transaction.",
1846
+ 500,
1847
+ { txId, route }
1848
+ );
1849
+ }
1850
+ const transactionPromise = writeDb.transaction(async (tx) => {
1851
+ await runWithTransaction(tx, txId, async () => {
1133
1852
  await next();
1134
1853
  const contextWithError = c;
1135
1854
  if (contextWithError.error) {
@@ -1160,14 +1879,14 @@ function Transactional(options = {}) {
1160
1879
  const duration = Date.now() - startTime;
1161
1880
  if (enableLogging) {
1162
1881
  if (duration >= slowThreshold) {
1163
- txLogger.warn("Slow transaction committed", {
1882
+ txLogger2.warn("Slow transaction committed", {
1164
1883
  txId,
1165
1884
  route,
1166
1885
  duration: `${duration}ms`,
1167
1886
  threshold: `${slowThreshold}ms`
1168
1887
  });
1169
1888
  } else {
1170
- txLogger.debug("Transaction committed", {
1889
+ txLogger2.debug("Transaction committed", {
1171
1890
  txId,
1172
1891
  route,
1173
1892
  duration: `${duration}ms`
@@ -1178,7 +1897,7 @@ function Transactional(options = {}) {
1178
1897
  const duration = Date.now() - startTime;
1179
1898
  const customError = error instanceof TransactionError ? error : fromPostgresError(error);
1180
1899
  if (enableLogging) {
1181
- txLogger.error("Transaction rolled back", {
1900
+ txLogger2.error("Transaction rolled back", {
1182
1901
  txId,
1183
1902
  route,
1184
1903
  duration: `${duration}ms`,
@@ -1190,910 +1909,144 @@ function Transactional(options = {}) {
1190
1909
  }
1191
1910
  });
1192
1911
  }
1193
- function buildFilters(filters, table) {
1194
- const conditions = [];
1195
- for (const [field, filterCondition] of Object.entries(filters)) {
1196
- const column = table[field];
1197
- if (!column) {
1198
- console.warn(`[buildFilters] Unknown field: ${field}`);
1199
- continue;
1200
- }
1201
- for (const [operator, value] of Object.entries(filterCondition)) {
1202
- const condition = buildCondition(column, operator, value);
1203
- if (condition) {
1204
- conditions.push(condition);
1205
- }
1206
- }
1207
- }
1208
- return conditions.length > 0 ? and(...conditions) : void 0;
1209
- }
1210
- function buildCondition(column, operator, value) {
1211
- switch (operator) {
1212
- case "eq":
1213
- return eq(column, value);
1214
- case "ne":
1215
- return ne(column, value);
1216
- case "gt":
1217
- return gt(column, value);
1218
- case "gte":
1219
- return gte(column, value);
1220
- case "lt":
1221
- return lt(column, value);
1222
- case "lte":
1223
- return lte(column, value);
1224
- case "like":
1225
- return like(column, `%${value}%`);
1226
- case "in":
1227
- if (Array.isArray(value)) {
1228
- return inArray(column, value);
1229
- }
1230
- console.warn(`[buildCondition] 'in' operator requires array value`);
1231
- return void 0;
1232
- case "nin":
1233
- if (Array.isArray(value)) {
1234
- return notInArray(column, value);
1235
- }
1236
- console.warn(`[buildCondition] 'nin' operator requires array value`);
1237
- return void 0;
1238
- case "is":
1239
- if (value === "null") return isNull(column);
1240
- if (value === "notnull") return isNotNull(column);
1241
- console.warn(`[buildCondition] 'is' operator requires 'null' or 'notnull'`);
1242
- return void 0;
1243
- default:
1244
- console.warn(`[buildCondition] Unknown operator: ${operator}`);
1245
- return void 0;
1246
- }
1247
- }
1248
- function buildSort(sortConditions, table) {
1249
- const orderByClauses = [];
1250
- for (const { field, direction } of sortConditions) {
1251
- const column = table[field];
1252
- if (!column) {
1253
- console.warn(`[buildSort] Unknown field: ${field}`);
1254
- continue;
1255
- }
1256
- const clause = direction === "desc" ? desc(column) : asc(column);
1257
- orderByClauses.push(clause);
1258
- }
1259
- return orderByClauses;
1260
- }
1261
- function applyPagination(pagination) {
1262
- const { page, limit } = pagination;
1263
- const offset = (page - 1) * limit;
1264
- return { offset, limit };
1265
- }
1266
- function createPaginationMeta(pagination, total) {
1267
- const { page, limit } = pagination;
1268
- const totalPages = Math.ceil(total / limit);
1269
- return {
1270
- page,
1271
- limit,
1272
- total,
1273
- totalPages,
1274
- hasNext: page < totalPages,
1275
- hasPrev: page > 1
1276
- };
1912
+ function isSQLWrapper(value) {
1913
+ return value && typeof value === "object" && "queryChunks" in value;
1277
1914
  }
1278
- async function countTotal(db2, table, whereCondition) {
1279
- const query = db2.select({ count: sql`count(*)::int` }).from(table);
1280
- if (whereCondition) {
1281
- query.where(whereCondition);
1282
- }
1283
- const [result] = await query;
1284
- return result?.count || 0;
1915
+ function buildWhereFromObject(table, where) {
1916
+ const entries = Object.entries(where).filter(([_, value]) => value !== void 0);
1917
+ if (entries.length === 0) return void 0;
1918
+ const conditions = entries.map(
1919
+ ([key, value]) => eq(table[key], value)
1920
+ );
1921
+ return conditions.length === 1 ? conditions[0] : and(...conditions);
1285
1922
  }
1286
-
1287
- // src/db/repository/query-builder.ts
1288
- var QueryBuilder = class {
1289
- db;
1290
- table;
1291
- filterConditions = [];
1292
- sortConditions = [];
1293
- limitValue;
1294
- offsetValue;
1295
- constructor(db2, table) {
1296
- this.db = db2;
1297
- this.table = table;
1298
- }
1299
- /**
1300
- * Add WHERE conditions
1301
- *
1302
- * Multiple where() calls are combined with AND logic.
1303
- *
1304
- * @param filters - Filter conditions
1305
- * @returns QueryBuilder for chaining
1306
- *
1307
- * @example
1308
- * ```typescript
1309
- * query
1310
- * .where({ status: 'active' })
1311
- * .where({ role: 'admin' }) // AND condition
1312
- * ```
1313
- */
1314
- where(filters) {
1315
- this.filterConditions.push(filters);
1316
- return this;
1317
- }
1318
- /**
1319
- * Add ORDER BY clause
1320
- *
1321
- * Multiple orderBy() calls create multi-column sorting.
1322
- *
1323
- * @param field - Field name to sort by
1324
- * @param direction - Sort direction ('asc' or 'desc')
1325
- * @returns QueryBuilder for chaining
1326
- *
1327
- * @example
1328
- * ```typescript
1329
- * query
1330
- * .orderBy('isPremium', 'desc')
1331
- * .orderBy('createdAt', 'desc')
1332
- * ```
1333
- */
1334
- orderBy(field, direction = "asc") {
1335
- this.sortConditions.push({ field, direction });
1336
- return this;
1337
- }
1338
- /**
1339
- * Set LIMIT clause
1340
- *
1341
- * @param limit - Maximum number of records to return
1342
- * @returns QueryBuilder for chaining
1343
- *
1344
- * @example
1345
- * ```typescript
1346
- * query.limit(10)
1347
- * ```
1348
- */
1349
- limit(limit) {
1350
- this.limitValue = limit;
1351
- return this;
1352
- }
1353
- /**
1354
- * Set OFFSET clause
1355
- *
1356
- * @param offset - Number of records to skip
1357
- * @returns QueryBuilder for chaining
1358
- *
1359
- * @example
1360
- * ```typescript
1361
- * query.offset(20)
1362
- * ```
1363
- */
1364
- offset(offset) {
1365
- this.offsetValue = offset;
1366
- return this;
1367
- }
1368
- /**
1369
- * Execute query and return multiple records
1370
- *
1371
- * @returns Array of records
1372
- *
1373
- * @example
1374
- * ```typescript
1375
- * const users = await query
1376
- * .where({ status: 'active' })
1377
- * .orderBy('createdAt', 'desc')
1378
- * .limit(10)
1379
- * .findMany();
1380
- * ```
1381
- */
1382
- async findMany() {
1383
- const mergedFilters = this.mergeFilters();
1384
- const whereCondition = buildFilters(mergedFilters, this.table);
1385
- const orderBy = buildSort(this.sortConditions, this.table);
1386
- let query = this.db.select().from(this.table).where(whereCondition).orderBy(...orderBy);
1387
- if (this.limitValue !== void 0) {
1388
- query = query.limit(this.limitValue);
1389
- }
1390
- if (this.offsetValue !== void 0) {
1391
- query = query.offset(this.offsetValue);
1392
- }
1393
- return query;
1394
- }
1395
- /**
1396
- * Execute query and return first record
1397
- *
1398
- * @returns First matching record or null
1399
- *
1400
- * @example
1401
- * ```typescript
1402
- * const user = await query
1403
- * .where({ email: 'john@example.com' })
1404
- * .findOne();
1405
- * ```
1406
- */
1407
- async findOne() {
1408
- const results = await this.limit(1).findMany();
1409
- return results[0] ?? null;
1410
- }
1411
- /**
1412
- * Execute query and return count
1413
- *
1414
- * @returns Number of matching records
1415
- *
1416
- * @example
1417
- * ```typescript
1418
- * const count = await query
1419
- * .where({ status: 'active' })
1420
- * .count();
1421
- * ```
1422
- */
1423
- async count() {
1424
- const mergedFilters = this.mergeFilters();
1425
- const whereCondition = buildFilters(mergedFilters, this.table);
1426
- const { count } = await import('drizzle-orm');
1427
- const result = await this.db.select({ count: count() }).from(this.table).where(whereCondition);
1428
- return Number(result[0]?.count ?? 0);
1429
- }
1430
- /**
1431
- * Merge multiple filter conditions into single object
1432
- *
1433
- * Combines all where() calls into one filter object.
1434
- */
1435
- mergeFilters() {
1436
- if (this.filterConditions.length === 0) {
1437
- return {};
1438
- }
1439
- return this.filterConditions.reduce((merged, current) => {
1440
- return { ...merged, ...current };
1441
- }, {});
1442
- }
1443
- };
1444
-
1445
- // src/db/repository/repository.ts
1446
- var Repository = class {
1447
- db;
1448
- table;
1449
- useReplica;
1450
- explicitDb;
1451
- // Track if db was explicitly provided
1452
- autoUpdateField;
1453
- // Field name to auto-update (e.g., 'updatedAt', 'modifiedAt')
1454
- constructor(dbOrTable, tableOrUseReplica, useReplica = true) {
1455
- if ("name" in dbOrTable && typeof dbOrTable.name === "string") {
1456
- this.db = getRawDb("write");
1457
- this.table = dbOrTable;
1458
- this.useReplica = typeof tableOrUseReplica === "boolean" ? tableOrUseReplica : true;
1459
- this.explicitDb = void 0;
1460
- } else {
1461
- this.db = dbOrTable;
1462
- this.table = tableOrUseReplica;
1463
- this.useReplica = useReplica;
1464
- this.explicitDb = this.db;
1465
- }
1466
- this.autoUpdateField = this.detectAutoUpdateField();
1467
- }
1468
- /**
1469
- * Detect which field (if any) should be auto-updated
1470
- *
1471
- * Checks all table columns for __autoUpdate metadata flag.
1472
- * Set by autoUpdateTimestamp() or timestamps({ autoUpdate: true }) helpers.
1473
- *
1474
- * @returns Field name to auto-update, or undefined if none found
1475
- */
1476
- detectAutoUpdateField() {
1477
- if (!this.table || typeof this.table !== "object") {
1478
- return void 0;
1479
- }
1480
- const tableColumns = this.table;
1481
- for (const [fieldName, column] of Object.entries(tableColumns)) {
1482
- if (fieldName.startsWith("_") || fieldName.startsWith("$")) {
1483
- continue;
1484
- }
1485
- if (column && typeof column === "object" && column.__autoUpdate === true) {
1486
- return fieldName;
1487
- }
1488
- }
1489
- return void 0;
1490
- }
1491
- /**
1492
- * Inject auto-update timestamp if configured
1493
- *
1494
- * Only injects if:
1495
- * 1. Table has an auto-update field configured (via autoUpdateTimestamp() or timestamps({ autoUpdate: true }))
1496
- * 2. The field is not already explicitly provided in the data
1497
- *
1498
- * @param data - Update data object
1499
- * @returns Data with auto-update timestamp injected (if applicable)
1500
- */
1501
- injectAutoUpdateTimestamp(data) {
1502
- if (!this.autoUpdateField) {
1503
- return data;
1504
- }
1505
- if (data && this.autoUpdateField in data) {
1506
- return data;
1507
- }
1508
- return {
1509
- ...data,
1510
- [this.autoUpdateField]: /* @__PURE__ */ new Date()
1511
- };
1512
- }
1513
- /**
1514
- * Get id column from table
1515
- *
1516
- * Helper method to reduce code duplication across methods that need id column.
1517
- *
1518
- * @returns The id column object
1519
- * @throws {QueryError} If table does not have an id column
1520
- */
1521
- getIdColumn() {
1522
- const idColumn = this.table.id;
1523
- if (!idColumn) {
1524
- throw new QueryError("Table does not have an id column");
1525
- }
1526
- return idColumn;
1527
- }
1528
- /**
1529
- * Get read-only DB
1530
- *
1531
- * Automatically detects and uses transaction context if available.
1532
- * When in transaction, uses transaction DB to ensure read consistency.
1533
- * Priority: explicitDb > transaction > replica/primary DB
1534
- */
1535
- getReadDb() {
1536
- if (this.explicitDb) {
1537
- return this.explicitDb;
1538
- }
1539
- const tx = getTransaction();
1540
- if (tx) {
1541
- return tx;
1542
- }
1543
- return this.useReplica ? getRawDb("read") : this.db;
1923
+ async function findOne(table, where) {
1924
+ const db = getDatabase("read");
1925
+ if (!db) {
1926
+ throw new Error("Database not initialized. Call initDatabase() first.");
1544
1927
  }
1545
- /**
1546
- * Get write-only DB
1547
- *
1548
- * Automatically detects and uses transaction context if available.
1549
- * Priority: explicitDb > transaction > primary DB
1550
- */
1551
- getWriteDb() {
1552
- if (this.explicitDb) {
1553
- return this.explicitDb;
1554
- }
1555
- const tx = getTransaction();
1556
- if (tx) {
1557
- return tx;
1558
- }
1559
- return getRawDb("write");
1928
+ const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
1929
+ if (!whereClause) {
1930
+ throw new Error("findOne requires at least one where condition");
1560
1931
  }
1561
- /**
1562
- * Execute operation with performance monitoring
1563
- *
1564
- * Wraps database operations with timing and logging for slow queries.
1565
- * Only logs if monitoring is enabled and query exceeds threshold.
1566
- *
1567
- * @param operation - Name of the operation (for logging)
1568
- * @param fn - Async function to execute
1569
- * @returns Result of the operation
1570
- */
1571
- async executeWithMonitoring(operation, fn) {
1572
- const config2 = getDatabaseMonitoringConfig();
1573
- if (!config2?.enabled) {
1574
- return fn();
1575
- }
1576
- const startTime = performance.now();
1577
- try {
1578
- const result = await fn();
1579
- const duration = performance.now() - startTime;
1580
- if (duration >= config2.slowThreshold) {
1581
- const dbLogger4 = logger.child("database");
1582
- const logData = {
1583
- operation,
1584
- table: this.table._.name,
1585
- duration: `${duration.toFixed(2)}ms`,
1586
- threshold: `${config2.slowThreshold}ms`
1587
- };
1588
- dbLogger4.warn("Slow query detected", logData);
1589
- }
1590
- return result;
1591
- } catch (error) {
1592
- const duration = performance.now() - startTime;
1593
- const dbLogger4 = logger.child("database");
1594
- const message = error instanceof Error ? error.message : "Unknown error";
1595
- dbLogger4.error("Query failed", {
1596
- operation,
1597
- table: this.table._.name,
1598
- duration: `${duration.toFixed(2)}ms`,
1599
- error: message
1600
- });
1601
- throw error;
1932
+ const results = await db.select().from(table).where(whereClause).limit(1);
1933
+ return results[0] ?? null;
1934
+ }
1935
+ async function findMany(table, options) {
1936
+ const db = getDatabase("read");
1937
+ if (!db) {
1938
+ throw new Error("Database not initialized. Call initDatabase() first.");
1939
+ }
1940
+ let query = db.select().from(table);
1941
+ if (options?.where) {
1942
+ const whereClause = isSQLWrapper(options.where) ? options.where : options.where ? buildWhereFromObject(table, options.where) : void 0;
1943
+ if (whereClause) {
1944
+ query = query.where(whereClause);
1602
1945
  }
1603
1946
  }
1604
- /**
1605
- * Find all records (uses Replica)
1606
- *
1607
- * @example
1608
- * const users = await userRepo.findAll();
1609
- */
1610
- async findAll() {
1611
- return this.executeWithMonitoring("findAll", async () => {
1612
- const readDb = this.getReadDb();
1613
- return readDb.select().from(this.table);
1614
- });
1615
- }
1616
- /**
1617
- * Find with pagination (uses Replica)
1618
- *
1619
- * @example
1620
- * const result = await userRepo.findPage({
1621
- * filters: { email: { like: 'john' } },
1622
- * sort: [{ field: 'createdAt', direction: 'desc' }],
1623
- * pagination: { page: 1, limit: 20 }
1624
- * });
1625
- */
1626
- async findPage(pageable) {
1627
- return this.executeWithMonitoring("findPage", async () => {
1628
- const { filters = {}, sort = [], pagination = { page: 1, limit: 20 } } = pageable;
1629
- const whereCondition = buildFilters(filters, this.table);
1630
- const orderBy = buildSort(sort, this.table);
1631
- const { offset, limit } = applyPagination(pagination);
1632
- const readDb = this.getReadDb();
1633
- const data = await readDb.select().from(this.table).where(whereCondition).orderBy(...orderBy).limit(limit).offset(offset);
1634
- const total = await countTotal(readDb, this.table, whereCondition);
1635
- const meta = createPaginationMeta(pagination, total);
1636
- return { data, meta };
1637
- });
1638
- }
1639
- /**
1640
- * Find one record by ID (uses Replica)
1641
- *
1642
- * @example
1643
- * const user = await userRepo.findById(1);
1644
- */
1645
- async findById(id2) {
1646
- return this.executeWithMonitoring("findById", async () => {
1647
- const idColumn = this.getIdColumn();
1648
- const { eq: eq2 } = await import('drizzle-orm');
1649
- const readDb = this.getReadDb();
1650
- const [result] = await readDb.select().from(this.table).where(eq2(idColumn, id2));
1651
- return result ?? null;
1652
- });
1653
- }
1654
- /**
1655
- * Find one record by condition (uses Replica)
1656
- *
1657
- * @example
1658
- * const user = await userRepo.findOne(eq(users.email, 'john@example.com'));
1659
- */
1660
- async findOne(where) {
1661
- return this.executeWithMonitoring("findOne", async () => {
1662
- const readDb = this.getReadDb();
1663
- const [result] = await readDb.select().from(this.table).where(where);
1664
- return result ?? null;
1665
- });
1666
- }
1667
- /**
1668
- * Create a new record (uses Primary)
1669
- *
1670
- * @example
1671
- * const user = await userRepo.save({ email: 'john@example.com', name: 'John' });
1672
- */
1673
- async save(data) {
1674
- return this.executeWithMonitoring("save", async () => {
1675
- const writeDb = this.getWriteDb();
1676
- const [result] = await writeDb.insert(this.table).values(data).returning();
1677
- return result;
1678
- });
1679
- }
1680
- /**
1681
- * Update a record (uses Primary)
1682
- *
1683
- * Automatically injects current timestamp if table has auto-update field configured.
1684
- *
1685
- * @example
1686
- * const user = await userRepo.update(1, { name: 'Jane' });
1687
- */
1688
- async update(id2, data) {
1689
- return this.executeWithMonitoring("update", async () => {
1690
- const idColumn = this.getIdColumn();
1691
- const updateData = this.injectAutoUpdateTimestamp(data);
1692
- const { eq: eq2 } = await import('drizzle-orm');
1693
- const writeDb = this.getWriteDb();
1694
- const [result] = await writeDb.update(this.table).set(updateData).where(eq2(idColumn, id2)).returning();
1695
- return result ?? null;
1696
- });
1697
- }
1698
- /**
1699
- * Delete a record (uses Primary)
1700
- *
1701
- * @example
1702
- * const deleted = await userRepo.delete(1);
1703
- */
1704
- async delete(id2) {
1705
- return this.executeWithMonitoring("delete", async () => {
1706
- const idColumn = this.getIdColumn();
1707
- const { eq: eq2 } = await import('drizzle-orm');
1708
- const writeDb = this.getWriteDb();
1709
- const [result] = await writeDb.delete(this.table).where(eq2(idColumn, id2)).returning();
1710
- return result ?? null;
1711
- });
1712
- }
1713
- /**
1714
- * Count records (uses Replica)
1715
- *
1716
- * @example
1717
- * const count = await userRepo.count();
1718
- */
1719
- async count(where) {
1720
- return this.executeWithMonitoring("count", async () => {
1721
- const readDb = this.getReadDb();
1722
- return countTotal(readDb, this.table, where);
1723
- });
1724
- }
1725
- /**
1726
- * Find records by filters (uses Replica)
1727
- *
1728
- * @example
1729
- * const users = await userRepo.findWhere({ email: { like: '@gmail.com' }, status: 'active' });
1730
- */
1731
- async findWhere(filters) {
1732
- return this.executeWithMonitoring("findWhere", async () => {
1733
- const whereCondition = buildFilters(filters, this.table);
1734
- const readDb = this.getReadDb();
1735
- return readDb.select().from(this.table).where(whereCondition);
1736
- });
1737
- }
1738
- /**
1739
- * Find one record by filters (uses Replica)
1740
- *
1741
- * @example
1742
- * const user = await userRepo.findOneWhere({ email: 'john@example.com' });
1743
- */
1744
- async findOneWhere(filters) {
1745
- return this.executeWithMonitoring("findOneWhere", async () => {
1746
- const whereCondition = buildFilters(filters, this.table);
1747
- const readDb = this.getReadDb();
1748
- const [result] = await readDb.select().from(this.table).where(whereCondition);
1749
- return result ?? null;
1750
- });
1947
+ if (options?.orderBy) {
1948
+ const orderByArray = Array.isArray(options.orderBy) ? options.orderBy : [options.orderBy];
1949
+ query = query.orderBy(...orderByArray);
1751
1950
  }
1752
- /**
1753
- * Check if record exists by ID (uses Replica)
1754
- *
1755
- * @example
1756
- * const exists = await userRepo.exists(1);
1757
- */
1758
- async exists(id2) {
1759
- return this.executeWithMonitoring("exists", async () => {
1760
- const idColumn = this.getIdColumn();
1761
- const { eq: eq2 } = await import('drizzle-orm');
1762
- const readDb = this.getReadDb();
1763
- const [result] = await readDb.select().from(this.table).where(eq2(idColumn, id2)).limit(1);
1764
- return !!result;
1765
- });
1951
+ if (options?.limit) {
1952
+ query = query.limit(options.limit);
1766
1953
  }
1767
- /**
1768
- * Check if record exists by filters (uses Replica)
1769
- *
1770
- * @example
1771
- * const exists = await userRepo.existsBy({ email: 'john@example.com' });
1772
- */
1773
- async existsBy(filters) {
1774
- return this.executeWithMonitoring("existsBy", async () => {
1775
- const whereCondition = buildFilters(filters, this.table);
1776
- const readDb = this.getReadDb();
1777
- const [result] = await readDb.select().from(this.table).where(whereCondition).limit(1);
1778
- return !!result;
1779
- });
1780
- }
1781
- /**
1782
- * Count records by filters (uses Replica)
1783
- *
1784
- * @example
1785
- * const count = await userRepo.countBy({ status: 'active' });
1786
- */
1787
- async countBy(filters) {
1788
- return this.executeWithMonitoring("countBy", async () => {
1789
- const whereCondition = buildFilters(filters, this.table);
1790
- const readDb = this.getReadDb();
1791
- return countTotal(readDb, this.table, whereCondition);
1792
- });
1793
- }
1794
- /**
1795
- * Create multiple records (uses Primary)
1796
- *
1797
- * @example
1798
- * const users = await userRepo.saveMany([
1799
- * { email: 'user1@example.com', name: 'User 1' },
1800
- * { email: 'user2@example.com', name: 'User 2' }
1801
- * ]);
1802
- */
1803
- async saveMany(data) {
1804
- return this.executeWithMonitoring("saveMany", async () => {
1805
- const writeDb = this.getWriteDb();
1806
- return writeDb.insert(this.table).values(data).returning();
1807
- });
1808
- }
1809
- /**
1810
- * Update multiple records by filters (uses Primary)
1811
- *
1812
- * Automatically injects current timestamp if table has auto-update field configured.
1813
- *
1814
- * @example
1815
- * const count = await userRepo.updateWhere({ status: 'inactive' }, { status: 'archived' });
1816
- */
1817
- async updateWhere(filters, data) {
1818
- return this.executeWithMonitoring("updateWhere", async () => {
1819
- const updateData = this.injectAutoUpdateTimestamp(data);
1820
- const whereCondition = buildFilters(filters, this.table);
1821
- const writeDb = this.getWriteDb();
1822
- const results = await writeDb.update(this.table).set(updateData).where(whereCondition).returning();
1823
- return results.length;
1824
- });
1825
- }
1826
- /**
1827
- * Delete multiple records by filters (uses Primary)
1828
- *
1829
- * @example
1830
- * const count = await userRepo.deleteWhere({ status: 'banned' });
1831
- */
1832
- async deleteWhere(filters) {
1833
- return this.executeWithMonitoring("deleteWhere", async () => {
1834
- const whereCondition = buildFilters(filters, this.table);
1835
- const writeDb = this.getWriteDb();
1836
- const results = await writeDb.delete(this.table).where(whereCondition).returning();
1837
- return results.length;
1838
- });
1839
- }
1840
- // ============================================================
1841
- // Query Builder (Fluent Interface)
1842
- // ============================================================
1843
- /**
1844
- * Start a chainable query builder (uses Replica)
1845
- *
1846
- * Returns a QueryBuilder instance for building complex queries with method chaining.
1847
- *
1848
- * @returns QueryBuilder instance for chaining
1849
- *
1850
- * @example
1851
- * ```typescript
1852
- * // Simple chaining
1853
- * const users = await userRepo
1854
- * .query()
1855
- * .where({ status: 'active' })
1856
- * .orderBy('createdAt', 'desc')
1857
- * .limit(10)
1858
- * .findMany();
1859
- *
1860
- * // Multiple conditions
1861
- * const admins = await userRepo
1862
- * .query()
1863
- * .where({ role: 'admin' })
1864
- * .where({ status: 'active' }) // AND condition
1865
- * .findMany();
1866
- *
1867
- * // Reusable query
1868
- * const activeQuery = userRepo.query().where({ status: 'active' });
1869
- * const users = await activeQuery.findMany();
1870
- * const count = await activeQuery.count();
1871
- * ```
1872
- */
1873
- query() {
1874
- const readDb = this.getReadDb();
1875
- return new QueryBuilder(readDb, this.table);
1876
- }
1877
- };
1878
-
1879
- // src/db/repository/factory.ts
1880
- var repositoryCache = /* @__PURE__ */ new Map();
1881
- function getCacheKey(table, RepositoryClass) {
1882
- const tableName = table[Symbol.for("drizzle:Name")] || table.name || table.toString();
1883
- const className = RepositoryClass?.name || "Repository";
1884
- return `${tableName}:${className}`;
1885
- }
1886
- function getRepository(table, RepositoryClass) {
1887
- const cacheKey = getCacheKey(table, RepositoryClass);
1888
- let repo = repositoryCache.get(cacheKey);
1889
- if (!repo) {
1890
- if (RepositoryClass) {
1891
- repo = new RepositoryClass(table);
1892
- } else {
1893
- repo = new Repository(table);
1894
- }
1895
- repositoryCache.set(cacheKey, repo);
1954
+ if (options?.offset) {
1955
+ query = query.offset(options.offset);
1896
1956
  }
1897
- return repo;
1898
- }
1899
- function clearRepositoryCache() {
1900
- repositoryCache.clear();
1901
- }
1902
- function getRepositoryCacheSize() {
1903
- return repositoryCache.size;
1904
- }
1905
- var repositoryStorage = new AsyncLocalStorage();
1906
- function getCacheKey2(table, RepositoryClass) {
1907
- const tableName = table[Symbol.for("drizzle:Name")] || table.name || table.toString();
1908
- const className = RepositoryClass?.name || "Repository";
1909
- return `${tableName}:${className}`;
1910
- }
1911
- function withRepositoryScope(fn) {
1912
- const cache = /* @__PURE__ */ new Map();
1913
- return repositoryStorage.run(cache, fn);
1957
+ return query;
1914
1958
  }
1915
- function getScopedRepository(table, RepositoryClass) {
1916
- const cache = repositoryStorage.getStore();
1917
- if (!cache) {
1918
- return RepositoryClass ? new RepositoryClass(table) : new Repository(table);
1959
+ async function create(table, data) {
1960
+ const db = getDatabase("write");
1961
+ if (!db) {
1962
+ throw new Error("Database not initialized. Call initDatabase() first.");
1919
1963
  }
1920
- const key = getCacheKey2(table, RepositoryClass);
1921
- let repo = cache.get(key);
1922
- if (!repo) {
1923
- repo = RepositoryClass ? new RepositoryClass(table) : new Repository(table);
1924
- cache.set(key, repo);
1925
- }
1926
- return repo;
1927
- }
1928
- function RepositoryScope() {
1929
- return async (_c, next) => {
1930
- return withRepositoryScope(() => next());
1931
- };
1964
+ const [result] = await db.insert(table).values(data).returning();
1965
+ return result;
1932
1966
  }
1933
- function getScopedCacheSize() {
1934
- const cache = repositoryStorage.getStore();
1935
- return cache?.size ?? 0;
1936
- }
1937
- function isInRepositoryScope() {
1938
- return repositoryStorage.getStore() !== void 0;
1939
- }
1940
-
1941
- // src/db/repository/relation-registry.ts
1942
- var tableNameCache = /* @__PURE__ */ new WeakMap();
1943
- function getTableName(table) {
1944
- const cached = tableNameCache.get(table);
1945
- if (cached) {
1946
- return cached;
1967
+ async function createMany(table, data) {
1968
+ const db = getDatabase("write");
1969
+ if (!db) {
1970
+ throw new Error("Database not initialized. Call initDatabase() first.");
1947
1971
  }
1948
- const name = table[Symbol.for("drizzle:Name")] || table.constructor.name;
1949
- tableNameCache.set(table, name);
1950
- return name;
1972
+ const results = await db.insert(table).values(data).returning();
1973
+ return results;
1951
1974
  }
1952
-
1953
- // src/db/manager/wrapped-db.ts
1954
- var WrappedDb = class {
1955
- constructor(db2) {
1956
- this.db = db2;
1957
- }
1958
- /**
1959
- * Repository 패턴으로 테이블 접근
1960
- *
1961
- * @example
1962
- * const db = getDb();
1963
- * const userRepo = db.for(users);
1964
- * const result = await userRepo.findPage(pageable);
1965
- */
1966
- for(table) {
1967
- return new Repository(this.db, table);
1968
- }
1969
- /**
1970
- * Drizzle의 모든 메서드를 프록시
1971
- *
1972
- * select, insert, update, delete, transaction 등 모든 Drizzle 메서드 사용 가능
1973
- */
1974
- get select() {
1975
- return this.db.select.bind(this.db);
1976
- }
1977
- get insert() {
1978
- return this.db.insert.bind(this.db);
1979
- }
1980
- get update() {
1981
- return this.db.update.bind(this.db);
1982
- }
1983
- get delete() {
1984
- return this.db.delete.bind(this.db);
1985
- }
1986
- get execute() {
1987
- return this.db.execute.bind(this.db);
1988
- }
1989
- get transaction() {
1990
- return this.db.transaction.bind(this.db);
1991
- }
1992
- get query() {
1993
- return this.db.query;
1975
+ async function upsert(table, data, options) {
1976
+ const db = getDatabase("write");
1977
+ if (!db) {
1978
+ throw new Error("Database not initialized. Call initDatabase() first.");
1979
+ }
1980
+ const [result] = await db.insert(table).values(data).onConflictDoUpdate({
1981
+ target: options.target,
1982
+ set: options.set || data
1983
+ }).returning();
1984
+ return result;
1985
+ }
1986
+ async function updateOne(table, where, data) {
1987
+ const db = getDatabase("write");
1988
+ if (!db) {
1989
+ throw new Error("Database not initialized. Call initDatabase() first.");
1994
1990
  }
1995
- get $with() {
1996
- return this.db.$with.bind(this.db);
1991
+ const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
1992
+ if (!whereClause) {
1993
+ throw new Error("updateOne requires at least one where condition");
1997
1994
  }
1998
- /**
1999
- * Raw Drizzle DB 접근 (필요시)
2000
- */
2001
- get raw() {
2002
- return this.db;
1995
+ const [result] = await db.update(table).set(data).where(whereClause).returning();
1996
+ return result ?? null;
1997
+ }
1998
+ async function updateMany(table, where, data) {
1999
+ const db = getDatabase("write");
2000
+ if (!db) {
2001
+ throw new Error("Database not initialized. Call initDatabase() first.");
2003
2002
  }
2004
- };
2005
-
2006
- // src/db/manager/context.ts
2007
- function getDb(type) {
2008
- const tx = getTransaction();
2009
- if (tx) {
2010
- return new WrappedDb(tx);
2011
- }
2012
- const rawDb = getDatabase(type);
2013
- if (!rawDb) {
2014
- throw new Error(
2015
- "Database not initialized. Set DATABASE_URL environment variable or call initDatabase() first."
2016
- );
2003
+ const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
2004
+ if (!whereClause) {
2005
+ throw new Error("updateMany requires at least one where condition");
2017
2006
  }
2018
- return new WrappedDb(rawDb);
2007
+ const results = await db.update(table).set(data).where(whereClause).returning();
2008
+ return results;
2019
2009
  }
2020
-
2021
- // src/db/manager/config-generator.ts
2022
- function detectDialect(url) {
2023
- if (url.startsWith("postgres://") || url.startsWith("postgresql://")) {
2024
- return "postgresql";
2025
- }
2026
- if (url.startsWith("mysql://")) {
2027
- return "mysql";
2010
+ async function deleteOne(table, where) {
2011
+ const db = getDatabase("write");
2012
+ if (!db) {
2013
+ throw new Error("Database not initialized. Call initDatabase() first.");
2028
2014
  }
2029
- if (url.startsWith("sqlite://") || url.includes(".db") || url.includes(".sqlite")) {
2030
- return "sqlite";
2015
+ const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
2016
+ if (!whereClause) {
2017
+ throw new Error("deleteOne requires at least one where condition");
2031
2018
  }
2032
- throw new Error(
2033
- `Unsupported database URL format: ${url}. Supported: postgresql://, mysql://, sqlite://`
2034
- );
2019
+ const [result] = await db.delete(table).where(whereClause).returning();
2020
+ return result ?? null;
2035
2021
  }
2036
- function getDrizzleConfig(options = {}) {
2037
- const databaseUrl = options.databaseUrl ?? process.env.DATABASE_URL;
2038
- if (!databaseUrl) {
2039
- throw new Error(
2040
- "DATABASE_URL is required. Set it in .env or pass it to getDrizzleConfig()"
2041
- );
2022
+ async function deleteMany(table, where) {
2023
+ const db = getDatabase("write");
2024
+ if (!db) {
2025
+ throw new Error("Database not initialized. Call initDatabase() first.");
2042
2026
  }
2043
- const dialect = options.dialect ?? detectDialect(databaseUrl);
2044
- const schema = options.schema ?? "./src/server/entities/*.ts";
2045
- const out = options.out ?? "./drizzle/migrations";
2046
- return {
2047
- schema,
2048
- out,
2049
- dialect,
2050
- dbCredentials: getDbCredentials(dialect, databaseUrl)
2051
- };
2052
- }
2053
- function getDbCredentials(dialect, url) {
2054
- switch (dialect) {
2055
- case "postgresql":
2056
- case "mysql":
2057
- return { url };
2058
- case "sqlite":
2059
- const dbPath = url.replace("sqlite://", "").replace("sqlite:", "");
2060
- return { url: dbPath };
2061
- default:
2062
- throw new Error(`Unsupported dialect: ${dialect}`);
2027
+ const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
2028
+ if (!whereClause) {
2029
+ throw new Error("deleteMany requires at least one where condition");
2063
2030
  }
2031
+ const results = await db.delete(table).where(whereClause).returning();
2032
+ return results;
2064
2033
  }
2065
- function generateDrizzleConfigFile(options = {}) {
2066
- const config2 = getDrizzleConfig(options);
2067
- return `import { defineConfig } from 'drizzle-kit';
2068
-
2069
- export default defineConfig({
2070
- schema: '${config2.schema}',
2071
- out: '${config2.out}',
2072
- dialect: '${config2.dialect}',
2073
- dbCredentials: ${JSON.stringify(config2.dbCredentials, null, 4)},
2074
- });
2075
- `;
2076
- }
2077
- function id() {
2078
- return bigserial("id", { mode: "number" }).primaryKey();
2079
- }
2080
- function timestamps(options) {
2081
- const updatedAtColumn = timestamp("updated_at", { withTimezone: true, mode: "date" }).defaultNow().notNull();
2082
- if (options?.autoUpdate) {
2083
- updatedAtColumn.__autoUpdate = true;
2034
+ async function count(table, where) {
2035
+ const db = getDatabase("read");
2036
+ if (!db) {
2037
+ throw new Error("Database not initialized. Call initDatabase() first.");
2038
+ }
2039
+ let query = db.select().from(table);
2040
+ if (where) {
2041
+ const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
2042
+ if (whereClause) {
2043
+ query = query.where(whereClause);
2044
+ }
2084
2045
  }
2085
- return {
2086
- createdAt: timestamp("created_at", { withTimezone: true, mode: "date" }).defaultNow().notNull(),
2087
- updatedAt: updatedAtColumn
2088
- };
2089
- }
2090
- function foreignKey(name, reference, options) {
2091
- return bigserial(`${name}_id`, { mode: "number" }).notNull().references(reference, { onDelete: options?.onDelete ?? "cascade" });
2092
- }
2093
- function optionalForeignKey(name, reference, options) {
2094
- return bigserial(`${name}_id`, { mode: "number" }).references(reference, { onDelete: options?.onDelete ?? "set null" });
2046
+ const results = await query;
2047
+ return results.length;
2095
2048
  }
2096
2049
 
2097
- export { QueryBuilder, Repository, RepositoryScope, Transactional, WrappedDb, checkConnection, clearRepositoryCache, closeDatabase, createDatabaseConnection, createDatabaseFromEnv, db, detectDialect, foreignKey, fromPostgresError, generateDrizzleConfigFile, getDatabase, getDatabaseInfo, getDb, getDrizzleConfig, getRawDb, getRepository, getRepositoryCacheSize, getScopedCacheSize, getScopedRepository, getTableName, getTransaction, id, initDatabase, isInRepositoryScope, optionalForeignKey, runWithTransaction, setDatabase, timestamps, withRepositoryScope };
2050
+ export { Transactional, checkConnection, closeDatabase, count, create, createDatabaseConnection, createDatabaseFromEnv, createFunctionSchema, createMany, deleteMany, deleteOne, detectDialect, findMany, findOne, foreignKey, fromPostgresError, generateDrizzleConfigFile, getDatabase, getDatabaseInfo, getDrizzleConfig, getSchemaInfo, getTransaction, id, initDatabase, optionalForeignKey, packageNameToSchema, runWithTransaction, setDatabase, timestamps, updateMany, updateOne, upsert };
2098
2051
  //# sourceMappingURL=index.js.map
2099
2052
  //# sourceMappingURL=index.js.map