@spfn/core 0.1.0-alpha.8 → 0.1.0-alpha.82
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +169 -195
- package/dist/auto-loader-JFaZ9gON.d.ts +80 -0
- package/dist/cache/index.d.ts +211 -0
- package/dist/cache/index.js +1013 -0
- package/dist/cache/index.js.map +1 -0
- package/dist/client/index.d.ts +131 -92
- package/dist/client/index.js +93 -85
- package/dist/client/index.js.map +1 -1
- package/dist/codegen/generators/index.d.ts +19 -0
- package/dist/codegen/generators/index.js +1521 -0
- package/dist/codegen/generators/index.js.map +1 -0
- package/dist/codegen/index.d.ts +76 -60
- package/dist/codegen/index.js +1506 -735
- package/dist/codegen/index.js.map +1 -1
- package/dist/database-errors-BNNmLTJE.d.ts +86 -0
- package/dist/db/index.d.ts +844 -44
- package/dist/db/index.js +1281 -1307
- package/dist/db/index.js.map +1 -1
- package/dist/env/index.d.ts +508 -0
- package/dist/env/index.js +1127 -0
- package/dist/env/index.js.map +1 -0
- package/dist/errors/index.d.ts +136 -0
- package/dist/errors/index.js +172 -0
- package/dist/errors/index.js.map +1 -0
- package/dist/index-DHiAqhKv.d.ts +101 -0
- package/dist/index.d.ts +3 -374
- package/dist/index.js +2424 -2178
- package/dist/index.js.map +1 -1
- package/dist/logger/index.d.ts +94 -0
- package/dist/logger/index.js +795 -0
- package/dist/logger/index.js.map +1 -0
- package/dist/middleware/index.d.ts +60 -0
- package/dist/middleware/index.js +918 -0
- package/dist/middleware/index.js.map +1 -0
- package/dist/route/index.d.ts +21 -53
- package/dist/route/index.js +1259 -219
- package/dist/route/index.js.map +1 -1
- package/dist/server/index.d.ts +18 -0
- package/dist/server/index.js +2419 -2059
- package/dist/server/index.js.map +1 -1
- package/dist/types/index.d.ts +121 -0
- package/dist/types/index.js +38 -0
- package/dist/types/index.js.map +1 -0
- package/dist/types-BXibIEyj.d.ts +60 -0
- package/package.json +67 -17
- package/dist/auto-loader-C44TcLmM.d.ts +0 -125
- package/dist/bind-pssq1NRT.d.ts +0 -34
- package/dist/postgres-errors-CY_Es8EJ.d.ts +0 -1703
- package/dist/scripts/index.d.ts +0 -24
- package/dist/scripts/index.js +0 -1201
- package/dist/scripts/index.js.map +0 -1
- package/dist/scripts/templates/api-index.template.txt +0 -10
- package/dist/scripts/templates/api-tag.template.txt +0 -11
- package/dist/scripts/templates/contract.template.txt +0 -87
- package/dist/scripts/templates/entity-type.template.txt +0 -31
- package/dist/scripts/templates/entity.template.txt +0 -19
- package/dist/scripts/templates/index.template.txt +0 -10
- package/dist/scripts/templates/repository.template.txt +0 -37
- package/dist/scripts/templates/routes-id.template.txt +0 -59
- package/dist/scripts/templates/routes-index.template.txt +0 -44
- package/dist/types-SlzTr8ZO.d.ts +0 -143
package/dist/db/index.js
CHANGED
|
@@ -1,56 +1,45 @@
|
|
|
1
|
-
import { config } from 'dotenv';
|
|
2
1
|
import { drizzle } from 'drizzle-orm/postgres-js';
|
|
3
|
-
import postgres from 'postgres';
|
|
4
2
|
import pino from 'pino';
|
|
5
|
-
import { existsSync, mkdirSync, createWriteStream } from 'fs';
|
|
6
|
-
import { join } from 'path';
|
|
3
|
+
import { existsSync, mkdirSync, accessSync, constants, writeFileSync, unlinkSync, createWriteStream, statSync, readdirSync, renameSync, readFileSync } from 'fs';
|
|
4
|
+
import { join, dirname, basename } from 'path';
|
|
5
|
+
import { config } from 'dotenv';
|
|
6
|
+
import postgres from 'postgres';
|
|
7
|
+
import { bigserial, timestamp, pgSchema } from 'drizzle-orm/pg-core';
|
|
7
8
|
import { AsyncLocalStorage } from 'async_hooks';
|
|
9
|
+
import { randomUUID } from 'crypto';
|
|
8
10
|
import { createMiddleware } from 'hono/factory';
|
|
9
|
-
import {
|
|
10
|
-
import { bigserial, timestamp } from 'drizzle-orm/pg-core';
|
|
11
|
+
import { eq, and } from 'drizzle-orm';
|
|
11
12
|
|
|
12
13
|
// src/db/manager/factory.ts
|
|
13
14
|
var PinoAdapter = class _PinoAdapter {
|
|
14
15
|
logger;
|
|
15
|
-
constructor(
|
|
16
|
-
const isProduction = process.env.NODE_ENV === "production";
|
|
16
|
+
constructor(config) {
|
|
17
17
|
const isDevelopment = process.env.NODE_ENV === "development";
|
|
18
|
-
const
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
18
|
+
const transport = isDevelopment ? {
|
|
19
|
+
target: "pino-pretty",
|
|
20
|
+
options: {
|
|
21
|
+
colorize: true,
|
|
22
|
+
translateTime: "HH:MM:ss.l",
|
|
23
|
+
ignore: "pid,hostname",
|
|
24
|
+
singleLine: false,
|
|
25
|
+
messageFormat: "{module} {msg}",
|
|
26
|
+
errorLikeObjectKeys: ["err", "error"]
|
|
27
|
+
}
|
|
28
|
+
} : void 0;
|
|
29
|
+
try {
|
|
30
|
+
this.logger = pino({
|
|
31
|
+
level: config.level,
|
|
32
|
+
// 기본 필드
|
|
33
|
+
base: config.module ? { module: config.module } : void 0,
|
|
34
|
+
// Transport (pretty print in development if available)
|
|
35
|
+
transport
|
|
29
36
|
});
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
const maxFiles = parseInt(process.env.LOG_MAX_FILES || "10", 10);
|
|
35
|
-
targets.push({
|
|
36
|
-
target: "pino-roll",
|
|
37
|
-
level: "info",
|
|
38
|
-
options: {
|
|
39
|
-
file: `${logDir}/app.log`,
|
|
40
|
-
frequency: "daily",
|
|
41
|
-
size: maxFileSize,
|
|
42
|
-
limit: { count: maxFiles },
|
|
43
|
-
mkdir: true
|
|
44
|
-
}
|
|
37
|
+
} catch (error) {
|
|
38
|
+
this.logger = pino({
|
|
39
|
+
level: config.level,
|
|
40
|
+
base: config.module ? { module: config.module } : void 0
|
|
45
41
|
});
|
|
46
42
|
}
|
|
47
|
-
this.logger = pino({
|
|
48
|
-
level: config2.level,
|
|
49
|
-
// Transport 설정 (targets가 있으면 사용, 없으면 기본 stdout)
|
|
50
|
-
transport: targets.length > 0 ? { targets } : void 0,
|
|
51
|
-
// 기본 필드
|
|
52
|
-
base: config2.module ? { module: config2.module } : void 0
|
|
53
|
-
});
|
|
54
43
|
}
|
|
55
44
|
child(module) {
|
|
56
45
|
const childLogger = new _PinoAdapter({ level: this.logger.level, module });
|
|
@@ -88,13 +77,183 @@ var PinoAdapter = class _PinoAdapter {
|
|
|
88
77
|
}
|
|
89
78
|
};
|
|
90
79
|
|
|
80
|
+
// src/logger/types.ts
|
|
81
|
+
var LOG_LEVEL_PRIORITY = {
|
|
82
|
+
debug: 0,
|
|
83
|
+
info: 1,
|
|
84
|
+
warn: 2,
|
|
85
|
+
error: 3,
|
|
86
|
+
fatal: 4
|
|
87
|
+
};
|
|
88
|
+
|
|
89
|
+
// src/logger/formatters.ts
|
|
90
|
+
var SENSITIVE_KEYS = [
|
|
91
|
+
"password",
|
|
92
|
+
"passwd",
|
|
93
|
+
"pwd",
|
|
94
|
+
"secret",
|
|
95
|
+
"token",
|
|
96
|
+
"apikey",
|
|
97
|
+
"api_key",
|
|
98
|
+
"accesstoken",
|
|
99
|
+
"access_token",
|
|
100
|
+
"refreshtoken",
|
|
101
|
+
"refresh_token",
|
|
102
|
+
"authorization",
|
|
103
|
+
"auth",
|
|
104
|
+
"cookie",
|
|
105
|
+
"session",
|
|
106
|
+
"sessionid",
|
|
107
|
+
"session_id",
|
|
108
|
+
"privatekey",
|
|
109
|
+
"private_key",
|
|
110
|
+
"creditcard",
|
|
111
|
+
"credit_card",
|
|
112
|
+
"cardnumber",
|
|
113
|
+
"card_number",
|
|
114
|
+
"cvv",
|
|
115
|
+
"ssn",
|
|
116
|
+
"pin"
|
|
117
|
+
];
|
|
118
|
+
var MASKED_VALUE = "***MASKED***";
|
|
119
|
+
function isSensitiveKey(key) {
|
|
120
|
+
const lowerKey = key.toLowerCase();
|
|
121
|
+
return SENSITIVE_KEYS.some((sensitive) => lowerKey.includes(sensitive));
|
|
122
|
+
}
|
|
123
|
+
function maskSensitiveData(data) {
|
|
124
|
+
if (data === null || data === void 0) {
|
|
125
|
+
return data;
|
|
126
|
+
}
|
|
127
|
+
if (Array.isArray(data)) {
|
|
128
|
+
return data.map((item) => maskSensitiveData(item));
|
|
129
|
+
}
|
|
130
|
+
if (typeof data === "object") {
|
|
131
|
+
const masked = {};
|
|
132
|
+
for (const [key, value] of Object.entries(data)) {
|
|
133
|
+
if (isSensitiveKey(key)) {
|
|
134
|
+
masked[key] = MASKED_VALUE;
|
|
135
|
+
} else if (typeof value === "object" && value !== null) {
|
|
136
|
+
masked[key] = maskSensitiveData(value);
|
|
137
|
+
} else {
|
|
138
|
+
masked[key] = value;
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
return masked;
|
|
142
|
+
}
|
|
143
|
+
return data;
|
|
144
|
+
}
|
|
145
|
+
var COLORS = {
|
|
146
|
+
reset: "\x1B[0m",
|
|
147
|
+
bright: "\x1B[1m",
|
|
148
|
+
dim: "\x1B[2m",
|
|
149
|
+
// 로그 레벨 컬러
|
|
150
|
+
debug: "\x1B[36m",
|
|
151
|
+
// cyan
|
|
152
|
+
info: "\x1B[32m",
|
|
153
|
+
// green
|
|
154
|
+
warn: "\x1B[33m",
|
|
155
|
+
// yellow
|
|
156
|
+
error: "\x1B[31m",
|
|
157
|
+
// red
|
|
158
|
+
fatal: "\x1B[35m",
|
|
159
|
+
// magenta
|
|
160
|
+
// 추가 컬러
|
|
161
|
+
gray: "\x1B[90m"
|
|
162
|
+
};
|
|
163
|
+
function formatTimestamp(date) {
|
|
164
|
+
return date.toISOString();
|
|
165
|
+
}
|
|
166
|
+
function formatTimestampHuman(date) {
|
|
167
|
+
const year = date.getFullYear();
|
|
168
|
+
const month = String(date.getMonth() + 1).padStart(2, "0");
|
|
169
|
+
const day = String(date.getDate()).padStart(2, "0");
|
|
170
|
+
const hours = String(date.getHours()).padStart(2, "0");
|
|
171
|
+
const minutes = String(date.getMinutes()).padStart(2, "0");
|
|
172
|
+
const seconds = String(date.getSeconds()).padStart(2, "0");
|
|
173
|
+
const ms = String(date.getMilliseconds()).padStart(3, "0");
|
|
174
|
+
return `${year}-${month}-${day} ${hours}:${minutes}:${seconds}.${ms}`;
|
|
175
|
+
}
|
|
176
|
+
function formatError(error) {
|
|
177
|
+
const lines = [];
|
|
178
|
+
lines.push(`${error.name}: ${error.message}`);
|
|
179
|
+
if (error.stack) {
|
|
180
|
+
const stackLines = error.stack.split("\n").slice(1);
|
|
181
|
+
lines.push(...stackLines);
|
|
182
|
+
}
|
|
183
|
+
return lines.join("\n");
|
|
184
|
+
}
|
|
185
|
+
function formatConsole(metadata, colorize = true) {
|
|
186
|
+
const parts = [];
|
|
187
|
+
const timestamp2 = formatTimestampHuman(metadata.timestamp);
|
|
188
|
+
if (colorize) {
|
|
189
|
+
parts.push(`${COLORS.gray}[${timestamp2}]${COLORS.reset}`);
|
|
190
|
+
} else {
|
|
191
|
+
parts.push(`[${timestamp2}]`);
|
|
192
|
+
}
|
|
193
|
+
if (metadata.module) {
|
|
194
|
+
if (colorize) {
|
|
195
|
+
parts.push(`${COLORS.dim}[module=${metadata.module}]${COLORS.reset}`);
|
|
196
|
+
} else {
|
|
197
|
+
parts.push(`[module=${metadata.module}]`);
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
if (metadata.context && Object.keys(metadata.context).length > 0) {
|
|
201
|
+
Object.entries(metadata.context).forEach(([key, value]) => {
|
|
202
|
+
const valueStr = typeof value === "string" ? value : String(value);
|
|
203
|
+
if (colorize) {
|
|
204
|
+
parts.push(`${COLORS.dim}[${key}=${valueStr}]${COLORS.reset}`);
|
|
205
|
+
} else {
|
|
206
|
+
parts.push(`[${key}=${valueStr}]`);
|
|
207
|
+
}
|
|
208
|
+
});
|
|
209
|
+
}
|
|
210
|
+
const levelStr = metadata.level.toUpperCase();
|
|
211
|
+
if (colorize) {
|
|
212
|
+
const color = COLORS[metadata.level];
|
|
213
|
+
parts.push(`${color}(${levelStr})${COLORS.reset}:`);
|
|
214
|
+
} else {
|
|
215
|
+
parts.push(`(${levelStr}):`);
|
|
216
|
+
}
|
|
217
|
+
if (colorize) {
|
|
218
|
+
parts.push(`${COLORS.bright}${metadata.message}${COLORS.reset}`);
|
|
219
|
+
} else {
|
|
220
|
+
parts.push(metadata.message);
|
|
221
|
+
}
|
|
222
|
+
let output = parts.join(" ");
|
|
223
|
+
if (metadata.error) {
|
|
224
|
+
output += "\n" + formatError(metadata.error);
|
|
225
|
+
}
|
|
226
|
+
return output;
|
|
227
|
+
}
|
|
228
|
+
function formatJSON(metadata) {
|
|
229
|
+
const obj = {
|
|
230
|
+
timestamp: formatTimestamp(metadata.timestamp),
|
|
231
|
+
level: metadata.level,
|
|
232
|
+
message: metadata.message
|
|
233
|
+
};
|
|
234
|
+
if (metadata.module) {
|
|
235
|
+
obj.module = metadata.module;
|
|
236
|
+
}
|
|
237
|
+
if (metadata.context) {
|
|
238
|
+
obj.context = metadata.context;
|
|
239
|
+
}
|
|
240
|
+
if (metadata.error) {
|
|
241
|
+
obj.error = {
|
|
242
|
+
name: metadata.error.name,
|
|
243
|
+
message: metadata.error.message,
|
|
244
|
+
stack: metadata.error.stack
|
|
245
|
+
};
|
|
246
|
+
}
|
|
247
|
+
return JSON.stringify(obj);
|
|
248
|
+
}
|
|
249
|
+
|
|
91
250
|
// src/logger/logger.ts
|
|
92
251
|
var Logger = class _Logger {
|
|
93
252
|
config;
|
|
94
253
|
module;
|
|
95
|
-
constructor(
|
|
96
|
-
this.config =
|
|
97
|
-
this.module =
|
|
254
|
+
constructor(config) {
|
|
255
|
+
this.config = config;
|
|
256
|
+
this.module = config.module;
|
|
98
257
|
}
|
|
99
258
|
/**
|
|
100
259
|
* Get current log level
|
|
@@ -148,13 +307,17 @@ var Logger = class _Logger {
|
|
|
148
307
|
* Log processing (internal)
|
|
149
308
|
*/
|
|
150
309
|
log(level, message, error, context) {
|
|
310
|
+
if (LOG_LEVEL_PRIORITY[level] < LOG_LEVEL_PRIORITY[this.config.level]) {
|
|
311
|
+
return;
|
|
312
|
+
}
|
|
151
313
|
const metadata = {
|
|
152
314
|
timestamp: /* @__PURE__ */ new Date(),
|
|
153
315
|
level,
|
|
154
316
|
message,
|
|
155
317
|
module: this.module,
|
|
156
318
|
error,
|
|
157
|
-
context
|
|
319
|
+
// Mask sensitive information in context to prevent credential leaks
|
|
320
|
+
context: context ? maskSensitiveData(context) : void 0
|
|
158
321
|
};
|
|
159
322
|
this.processTransports(metadata);
|
|
160
323
|
}
|
|
@@ -190,130 +353,16 @@ var Logger = class _Logger {
|
|
|
190
353
|
}
|
|
191
354
|
};
|
|
192
355
|
|
|
193
|
-
// src/logger/types.ts
|
|
194
|
-
var LOG_LEVEL_PRIORITY = {
|
|
195
|
-
debug: 0,
|
|
196
|
-
info: 1,
|
|
197
|
-
warn: 2,
|
|
198
|
-
error: 3,
|
|
199
|
-
fatal: 4
|
|
200
|
-
};
|
|
201
|
-
|
|
202
|
-
// src/logger/formatters.ts
|
|
203
|
-
var COLORS = {
|
|
204
|
-
reset: "\x1B[0m",
|
|
205
|
-
bright: "\x1B[1m",
|
|
206
|
-
dim: "\x1B[2m",
|
|
207
|
-
// 로그 레벨 컬러
|
|
208
|
-
debug: "\x1B[36m",
|
|
209
|
-
// cyan
|
|
210
|
-
info: "\x1B[32m",
|
|
211
|
-
// green
|
|
212
|
-
warn: "\x1B[33m",
|
|
213
|
-
// yellow
|
|
214
|
-
error: "\x1B[31m",
|
|
215
|
-
// red
|
|
216
|
-
fatal: "\x1B[35m",
|
|
217
|
-
// magenta
|
|
218
|
-
// 추가 컬러
|
|
219
|
-
gray: "\x1B[90m"
|
|
220
|
-
};
|
|
221
|
-
function colorizeLevel(level) {
|
|
222
|
-
const color = COLORS[level];
|
|
223
|
-
const levelStr = level.toUpperCase().padEnd(5);
|
|
224
|
-
return `${color}${levelStr}${COLORS.reset}`;
|
|
225
|
-
}
|
|
226
|
-
function formatTimestamp(date) {
|
|
227
|
-
return date.toISOString();
|
|
228
|
-
}
|
|
229
|
-
function formatTimestampHuman(date) {
|
|
230
|
-
const year = date.getFullYear();
|
|
231
|
-
const month = String(date.getMonth() + 1).padStart(2, "0");
|
|
232
|
-
const day = String(date.getDate()).padStart(2, "0");
|
|
233
|
-
const hours = String(date.getHours()).padStart(2, "0");
|
|
234
|
-
const minutes = String(date.getMinutes()).padStart(2, "0");
|
|
235
|
-
const seconds = String(date.getSeconds()).padStart(2, "0");
|
|
236
|
-
const ms = String(date.getMilliseconds()).padStart(3, "0");
|
|
237
|
-
return `${year}-${month}-${day} ${hours}:${minutes}:${seconds}.${ms}`;
|
|
238
|
-
}
|
|
239
|
-
function formatError(error) {
|
|
240
|
-
const lines = [];
|
|
241
|
-
lines.push(`${error.name}: ${error.message}`);
|
|
242
|
-
if (error.stack) {
|
|
243
|
-
const stackLines = error.stack.split("\n").slice(1);
|
|
244
|
-
lines.push(...stackLines);
|
|
245
|
-
}
|
|
246
|
-
return lines.join("\n");
|
|
247
|
-
}
|
|
248
|
-
function formatContext(context) {
|
|
249
|
-
try {
|
|
250
|
-
return JSON.stringify(context, null, 2);
|
|
251
|
-
} catch (error) {
|
|
252
|
-
return "[Context serialization failed]";
|
|
253
|
-
}
|
|
254
|
-
}
|
|
255
|
-
function formatConsole(metadata, colorize = true) {
|
|
256
|
-
const parts = [];
|
|
257
|
-
const timestamp2 = formatTimestampHuman(metadata.timestamp);
|
|
258
|
-
if (colorize) {
|
|
259
|
-
parts.push(`${COLORS.gray}${timestamp2}${COLORS.reset}`);
|
|
260
|
-
} else {
|
|
261
|
-
parts.push(timestamp2);
|
|
262
|
-
}
|
|
263
|
-
if (colorize) {
|
|
264
|
-
parts.push(colorizeLevel(metadata.level));
|
|
265
|
-
} else {
|
|
266
|
-
parts.push(metadata.level.toUpperCase().padEnd(5));
|
|
267
|
-
}
|
|
268
|
-
if (metadata.module) {
|
|
269
|
-
if (colorize) {
|
|
270
|
-
parts.push(`${COLORS.dim}[${metadata.module}]${COLORS.reset}`);
|
|
271
|
-
} else {
|
|
272
|
-
parts.push(`[${metadata.module}]`);
|
|
273
|
-
}
|
|
274
|
-
}
|
|
275
|
-
parts.push(metadata.message);
|
|
276
|
-
let output = parts.join(" ");
|
|
277
|
-
if (metadata.context && Object.keys(metadata.context).length > 0) {
|
|
278
|
-
output += "\n" + formatContext(metadata.context);
|
|
279
|
-
}
|
|
280
|
-
if (metadata.error) {
|
|
281
|
-
output += "\n" + formatError(metadata.error);
|
|
282
|
-
}
|
|
283
|
-
return output;
|
|
284
|
-
}
|
|
285
|
-
function formatJSON(metadata) {
|
|
286
|
-
const obj = {
|
|
287
|
-
timestamp: formatTimestamp(metadata.timestamp),
|
|
288
|
-
level: metadata.level,
|
|
289
|
-
message: metadata.message
|
|
290
|
-
};
|
|
291
|
-
if (metadata.module) {
|
|
292
|
-
obj.module = metadata.module;
|
|
293
|
-
}
|
|
294
|
-
if (metadata.context) {
|
|
295
|
-
obj.context = metadata.context;
|
|
296
|
-
}
|
|
297
|
-
if (metadata.error) {
|
|
298
|
-
obj.error = {
|
|
299
|
-
name: metadata.error.name,
|
|
300
|
-
message: metadata.error.message,
|
|
301
|
-
stack: metadata.error.stack
|
|
302
|
-
};
|
|
303
|
-
}
|
|
304
|
-
return JSON.stringify(obj);
|
|
305
|
-
}
|
|
306
|
-
|
|
307
356
|
// src/logger/transports/console.ts
|
|
308
357
|
var ConsoleTransport = class {
|
|
309
358
|
name = "console";
|
|
310
359
|
level;
|
|
311
360
|
enabled;
|
|
312
361
|
colorize;
|
|
313
|
-
constructor(
|
|
314
|
-
this.level =
|
|
315
|
-
this.enabled =
|
|
316
|
-
this.colorize =
|
|
362
|
+
constructor(config) {
|
|
363
|
+
this.level = config.level;
|
|
364
|
+
this.enabled = config.enabled;
|
|
365
|
+
this.colorize = config.colorize ?? true;
|
|
317
366
|
}
|
|
318
367
|
async log(metadata) {
|
|
319
368
|
if (!this.enabled) {
|
|
@@ -335,12 +384,16 @@ var FileTransport = class {
|
|
|
335
384
|
level;
|
|
336
385
|
enabled;
|
|
337
386
|
logDir;
|
|
387
|
+
maxFileSize;
|
|
388
|
+
maxFiles;
|
|
338
389
|
currentStream = null;
|
|
339
390
|
currentFilename = null;
|
|
340
|
-
constructor(
|
|
341
|
-
this.level =
|
|
342
|
-
this.enabled =
|
|
343
|
-
this.logDir =
|
|
391
|
+
constructor(config) {
|
|
392
|
+
this.level = config.level;
|
|
393
|
+
this.enabled = config.enabled;
|
|
394
|
+
this.logDir = config.logDir;
|
|
395
|
+
this.maxFileSize = config.maxFileSize ?? 10 * 1024 * 1024;
|
|
396
|
+
this.maxFiles = config.maxFiles ?? 10;
|
|
344
397
|
if (!existsSync(this.logDir)) {
|
|
345
398
|
mkdirSync(this.logDir, { recursive: true });
|
|
346
399
|
}
|
|
@@ -356,6 +409,9 @@ var FileTransport = class {
|
|
|
356
409
|
const filename = this.getLogFilename(metadata.timestamp);
|
|
357
410
|
if (this.currentFilename !== filename) {
|
|
358
411
|
await this.rotateStream(filename);
|
|
412
|
+
await this.cleanOldFiles();
|
|
413
|
+
} else if (this.currentFilename) {
|
|
414
|
+
await this.checkAndRotateBySize();
|
|
359
415
|
}
|
|
360
416
|
if (this.currentStream) {
|
|
361
417
|
return new Promise((resolve, reject) => {
|
|
@@ -412,25 +468,123 @@ var FileTransport = class {
|
|
|
412
468
|
});
|
|
413
469
|
}
|
|
414
470
|
/**
|
|
415
|
-
*
|
|
471
|
+
* 파일 크기 체크 및 크기 기반 로테이션
|
|
416
472
|
*/
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
473
|
+
async checkAndRotateBySize() {
|
|
474
|
+
if (!this.currentFilename) {
|
|
475
|
+
return;
|
|
476
|
+
}
|
|
477
|
+
const filepath = join(this.logDir, this.currentFilename);
|
|
478
|
+
if (!existsSync(filepath)) {
|
|
479
|
+
return;
|
|
480
|
+
}
|
|
481
|
+
try {
|
|
482
|
+
const stats = statSync(filepath);
|
|
483
|
+
if (stats.size >= this.maxFileSize) {
|
|
484
|
+
await this.rotateBySize();
|
|
485
|
+
}
|
|
486
|
+
} catch (error) {
|
|
487
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
488
|
+
process.stderr.write(`[FileTransport] Failed to check file size: ${errorMessage}
|
|
489
|
+
`);
|
|
490
|
+
}
|
|
422
491
|
}
|
|
423
|
-
|
|
492
|
+
/**
|
|
493
|
+
* 크기 기반 로테이션 수행
|
|
494
|
+
* 예: 2025-01-01.log -> 2025-01-01.1.log, 2025-01-01.1.log -> 2025-01-01.2.log
|
|
495
|
+
*/
|
|
496
|
+
async rotateBySize() {
|
|
497
|
+
if (!this.currentFilename) {
|
|
498
|
+
return;
|
|
499
|
+
}
|
|
424
500
|
await this.closeStream();
|
|
501
|
+
const baseName = this.currentFilename.replace(/\.log$/, "");
|
|
502
|
+
const files = readdirSync(this.logDir);
|
|
503
|
+
const relatedFiles = files.filter((file) => file.startsWith(baseName) && file.endsWith(".log")).sort().reverse();
|
|
504
|
+
for (const file of relatedFiles) {
|
|
505
|
+
const match = file.match(/\.(\d+)\.log$/);
|
|
506
|
+
if (match) {
|
|
507
|
+
const oldNum = parseInt(match[1], 10);
|
|
508
|
+
const newNum = oldNum + 1;
|
|
509
|
+
const oldPath = join(this.logDir, file);
|
|
510
|
+
const newPath2 = join(this.logDir, `${baseName}.${newNum}.log`);
|
|
511
|
+
try {
|
|
512
|
+
renameSync(oldPath, newPath2);
|
|
513
|
+
} catch (error) {
|
|
514
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
515
|
+
process.stderr.write(`[FileTransport] Failed to rotate file: ${errorMessage}
|
|
516
|
+
`);
|
|
517
|
+
}
|
|
518
|
+
}
|
|
519
|
+
}
|
|
520
|
+
const currentPath = join(this.logDir, this.currentFilename);
|
|
521
|
+
const newPath = join(this.logDir, `${baseName}.1.log`);
|
|
522
|
+
try {
|
|
523
|
+
if (existsSync(currentPath)) {
|
|
524
|
+
renameSync(currentPath, newPath);
|
|
525
|
+
}
|
|
526
|
+
} catch (error) {
|
|
527
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
528
|
+
process.stderr.write(`[FileTransport] Failed to rotate current file: ${errorMessage}
|
|
529
|
+
`);
|
|
530
|
+
}
|
|
531
|
+
await this.rotateStream(this.currentFilename);
|
|
425
532
|
}
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
533
|
+
/**
|
|
534
|
+
* 오래된 로그 파일 정리
|
|
535
|
+
* maxFiles 개수를 초과하는 로그 파일 삭제
|
|
536
|
+
*/
|
|
537
|
+
async cleanOldFiles() {
|
|
538
|
+
try {
|
|
539
|
+
if (!existsSync(this.logDir)) {
|
|
540
|
+
return;
|
|
541
|
+
}
|
|
542
|
+
const files = readdirSync(this.logDir);
|
|
543
|
+
const logFiles = files.filter((file) => file.endsWith(".log")).map((file) => {
|
|
544
|
+
const filepath = join(this.logDir, file);
|
|
545
|
+
const stats = statSync(filepath);
|
|
546
|
+
return { file, mtime: stats.mtime };
|
|
547
|
+
}).sort((a, b) => b.mtime.getTime() - a.mtime.getTime());
|
|
548
|
+
if (logFiles.length > this.maxFiles) {
|
|
549
|
+
const filesToDelete = logFiles.slice(this.maxFiles);
|
|
550
|
+
for (const { file } of filesToDelete) {
|
|
551
|
+
const filepath = join(this.logDir, file);
|
|
552
|
+
try {
|
|
553
|
+
unlinkSync(filepath);
|
|
554
|
+
} catch (error) {
|
|
555
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
556
|
+
process.stderr.write(`[FileTransport] Failed to delete old file "${file}": ${errorMessage}
|
|
557
|
+
`);
|
|
558
|
+
}
|
|
559
|
+
}
|
|
560
|
+
}
|
|
561
|
+
} catch (error) {
|
|
562
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
563
|
+
process.stderr.write(`[FileTransport] Failed to clean old files: ${errorMessage}
|
|
564
|
+
`);
|
|
565
|
+
}
|
|
566
|
+
}
|
|
567
|
+
/**
|
|
568
|
+
* 날짜별 로그 파일명 생성
|
|
569
|
+
*/
|
|
570
|
+
getLogFilename(date) {
|
|
571
|
+
const year = date.getFullYear();
|
|
572
|
+
const month = String(date.getMonth() + 1).padStart(2, "0");
|
|
573
|
+
const day = String(date.getDate()).padStart(2, "0");
|
|
574
|
+
return `${year}-${month}-${day}.log`;
|
|
575
|
+
}
|
|
576
|
+
async close() {
|
|
577
|
+
await this.closeStream();
|
|
578
|
+
}
|
|
579
|
+
};
|
|
580
|
+
function isFileLoggingEnabled() {
|
|
581
|
+
return process.env.LOGGER_FILE_ENABLED === "true";
|
|
582
|
+
}
|
|
583
|
+
function getDefaultLogLevel() {
|
|
584
|
+
const isProduction = process.env.NODE_ENV === "production";
|
|
585
|
+
const isDevelopment = process.env.NODE_ENV === "development";
|
|
586
|
+
if (isDevelopment) {
|
|
587
|
+
return "debug";
|
|
434
588
|
}
|
|
435
589
|
if (isProduction) {
|
|
436
590
|
return "info";
|
|
@@ -458,6 +612,109 @@ function getFileConfig() {
|
|
|
458
612
|
maxFiles: 10
|
|
459
613
|
};
|
|
460
614
|
}
|
|
615
|
+
function validateDirectoryWritable(dirPath) {
|
|
616
|
+
if (!existsSync(dirPath)) {
|
|
617
|
+
try {
|
|
618
|
+
mkdirSync(dirPath, { recursive: true });
|
|
619
|
+
} catch (error) {
|
|
620
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
621
|
+
throw new Error(`Failed to create log directory "${dirPath}": ${errorMessage}`);
|
|
622
|
+
}
|
|
623
|
+
}
|
|
624
|
+
try {
|
|
625
|
+
accessSync(dirPath, constants.W_OK);
|
|
626
|
+
} catch {
|
|
627
|
+
throw new Error(`Log directory "${dirPath}" is not writable. Please check permissions.`);
|
|
628
|
+
}
|
|
629
|
+
const testFile = join(dirPath, ".logger-write-test");
|
|
630
|
+
try {
|
|
631
|
+
writeFileSync(testFile, "test", "utf-8");
|
|
632
|
+
unlinkSync(testFile);
|
|
633
|
+
} catch (error) {
|
|
634
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
635
|
+
throw new Error(`Cannot write to log directory "${dirPath}": ${errorMessage}`);
|
|
636
|
+
}
|
|
637
|
+
}
|
|
638
|
+
function validateFileConfig() {
|
|
639
|
+
if (!isFileLoggingEnabled()) {
|
|
640
|
+
return;
|
|
641
|
+
}
|
|
642
|
+
const logDir = process.env.LOG_DIR;
|
|
643
|
+
if (!logDir) {
|
|
644
|
+
throw new Error(
|
|
645
|
+
"LOG_DIR environment variable is required when LOGGER_FILE_ENABLED=true. Example: LOG_DIR=/var/log/myapp"
|
|
646
|
+
);
|
|
647
|
+
}
|
|
648
|
+
validateDirectoryWritable(logDir);
|
|
649
|
+
}
|
|
650
|
+
function validateSlackConfig() {
|
|
651
|
+
const webhookUrl = process.env.SLACK_WEBHOOK_URL;
|
|
652
|
+
if (!webhookUrl) {
|
|
653
|
+
return;
|
|
654
|
+
}
|
|
655
|
+
if (!webhookUrl.startsWith("https://hooks.slack.com/")) {
|
|
656
|
+
throw new Error(
|
|
657
|
+
`Invalid SLACK_WEBHOOK_URL: "${webhookUrl}". Slack webhook URLs must start with "https://hooks.slack.com/"`
|
|
658
|
+
);
|
|
659
|
+
}
|
|
660
|
+
}
|
|
661
|
+
function validateEmailConfig() {
|
|
662
|
+
const smtpHost = process.env.SMTP_HOST;
|
|
663
|
+
const smtpPort = process.env.SMTP_PORT;
|
|
664
|
+
const emailFrom = process.env.EMAIL_FROM;
|
|
665
|
+
const emailTo = process.env.EMAIL_TO;
|
|
666
|
+
const hasAnyEmailConfig = smtpHost || smtpPort || emailFrom || emailTo;
|
|
667
|
+
if (!hasAnyEmailConfig) {
|
|
668
|
+
return;
|
|
669
|
+
}
|
|
670
|
+
const missingFields = [];
|
|
671
|
+
if (!smtpHost) missingFields.push("SMTP_HOST");
|
|
672
|
+
if (!smtpPort) missingFields.push("SMTP_PORT");
|
|
673
|
+
if (!emailFrom) missingFields.push("EMAIL_FROM");
|
|
674
|
+
if (!emailTo) missingFields.push("EMAIL_TO");
|
|
675
|
+
if (missingFields.length > 0) {
|
|
676
|
+
throw new Error(
|
|
677
|
+
`Email transport configuration incomplete. Missing: ${missingFields.join(", ")}. Either set all required fields or remove all email configuration.`
|
|
678
|
+
);
|
|
679
|
+
}
|
|
680
|
+
const port = parseInt(smtpPort, 10);
|
|
681
|
+
if (isNaN(port) || port < 1 || port > 65535) {
|
|
682
|
+
throw new Error(
|
|
683
|
+
`Invalid SMTP_PORT: "${smtpPort}". Must be a number between 1 and 65535.`
|
|
684
|
+
);
|
|
685
|
+
}
|
|
686
|
+
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
|
|
687
|
+
if (!emailRegex.test(emailFrom)) {
|
|
688
|
+
throw new Error(`Invalid EMAIL_FROM format: "${emailFrom}"`);
|
|
689
|
+
}
|
|
690
|
+
const recipients = emailTo.split(",").map((e) => e.trim());
|
|
691
|
+
for (const email of recipients) {
|
|
692
|
+
if (!emailRegex.test(email)) {
|
|
693
|
+
throw new Error(`Invalid email address in EMAIL_TO: "${email}"`);
|
|
694
|
+
}
|
|
695
|
+
}
|
|
696
|
+
}
|
|
697
|
+
function validateEnvironment() {
|
|
698
|
+
const nodeEnv = process.env.NODE_ENV;
|
|
699
|
+
if (!nodeEnv) {
|
|
700
|
+
process.stderr.write(
|
|
701
|
+
"[Logger] Warning: NODE_ENV is not set. Defaulting to test environment.\n"
|
|
702
|
+
);
|
|
703
|
+
}
|
|
704
|
+
}
|
|
705
|
+
function validateConfig() {
|
|
706
|
+
try {
|
|
707
|
+
validateEnvironment();
|
|
708
|
+
validateFileConfig();
|
|
709
|
+
validateSlackConfig();
|
|
710
|
+
validateEmailConfig();
|
|
711
|
+
} catch (error) {
|
|
712
|
+
if (error instanceof Error) {
|
|
713
|
+
throw new Error(`[Logger] Configuration validation failed: ${error.message}`);
|
|
714
|
+
}
|
|
715
|
+
throw error;
|
|
716
|
+
}
|
|
717
|
+
}
|
|
461
718
|
|
|
462
719
|
// src/logger/adapters/custom.ts
|
|
463
720
|
function initializeTransports() {
|
|
@@ -472,10 +729,10 @@ function initializeTransports() {
|
|
|
472
729
|
}
|
|
473
730
|
var CustomAdapter = class _CustomAdapter {
|
|
474
731
|
logger;
|
|
475
|
-
constructor(
|
|
732
|
+
constructor(config) {
|
|
476
733
|
this.logger = new Logger({
|
|
477
|
-
level:
|
|
478
|
-
module:
|
|
734
|
+
level: config.level,
|
|
735
|
+
module: config.module,
|
|
479
736
|
transports: initializeTransports()
|
|
480
737
|
});
|
|
481
738
|
}
|
|
@@ -535,7 +792,205 @@ function getAdapterType() {
|
|
|
535
792
|
}
|
|
536
793
|
return "pino";
|
|
537
794
|
}
|
|
538
|
-
|
|
795
|
+
function initializeLogger() {
|
|
796
|
+
validateConfig();
|
|
797
|
+
return createAdapter(getAdapterType());
|
|
798
|
+
}
|
|
799
|
+
var logger = initializeLogger();
|
|
800
|
+
|
|
801
|
+
// src/env/config.ts
|
|
802
|
+
var ENV_FILE_PRIORITY = [
|
|
803
|
+
".env",
|
|
804
|
+
// Base configuration (lowest priority)
|
|
805
|
+
".env.{NODE_ENV}",
|
|
806
|
+
// Environment-specific
|
|
807
|
+
".env.local",
|
|
808
|
+
// Local overrides (excluded in test)
|
|
809
|
+
".env.{NODE_ENV}.local"
|
|
810
|
+
// Local environment-specific (highest priority)
|
|
811
|
+
];
|
|
812
|
+
var TEST_ONLY_FILES = [
|
|
813
|
+
".env.test",
|
|
814
|
+
".env.test.local"
|
|
815
|
+
];
|
|
816
|
+
|
|
817
|
+
// src/env/loader.ts
|
|
818
|
+
var envLogger = logger.child("environment");
|
|
819
|
+
var environmentLoaded = false;
|
|
820
|
+
var cachedLoadResult;
|
|
821
|
+
function buildFileList(basePath, nodeEnv) {
|
|
822
|
+
const files = [];
|
|
823
|
+
if (!nodeEnv) {
|
|
824
|
+
files.push(join(basePath, ".env"));
|
|
825
|
+
files.push(join(basePath, ".env.local"));
|
|
826
|
+
return files;
|
|
827
|
+
}
|
|
828
|
+
for (const pattern of ENV_FILE_PRIORITY) {
|
|
829
|
+
const fileName = pattern.replace("{NODE_ENV}", nodeEnv);
|
|
830
|
+
if (nodeEnv === "test" && fileName === ".env.local") {
|
|
831
|
+
continue;
|
|
832
|
+
}
|
|
833
|
+
if (nodeEnv === "local" && pattern === ".env.local") {
|
|
834
|
+
continue;
|
|
835
|
+
}
|
|
836
|
+
if (nodeEnv !== "test" && TEST_ONLY_FILES.includes(fileName)) {
|
|
837
|
+
continue;
|
|
838
|
+
}
|
|
839
|
+
files.push(join(basePath, fileName));
|
|
840
|
+
}
|
|
841
|
+
return files;
|
|
842
|
+
}
|
|
843
|
+
function loadSingleFile(filePath, debug) {
|
|
844
|
+
if (!existsSync(filePath)) {
|
|
845
|
+
if (debug) {
|
|
846
|
+
envLogger.debug("Environment file not found (optional)", {
|
|
847
|
+
path: filePath
|
|
848
|
+
});
|
|
849
|
+
}
|
|
850
|
+
return { success: false, parsed: {}, error: "File not found" };
|
|
851
|
+
}
|
|
852
|
+
try {
|
|
853
|
+
const result = config({ path: filePath });
|
|
854
|
+
if (result.error) {
|
|
855
|
+
envLogger.warn("Failed to parse environment file", {
|
|
856
|
+
path: filePath,
|
|
857
|
+
error: result.error.message
|
|
858
|
+
});
|
|
859
|
+
return {
|
|
860
|
+
success: false,
|
|
861
|
+
parsed: {},
|
|
862
|
+
error: result.error.message
|
|
863
|
+
};
|
|
864
|
+
}
|
|
865
|
+
const parsed = result.parsed || {};
|
|
866
|
+
if (debug) {
|
|
867
|
+
envLogger.debug("Environment file loaded successfully", {
|
|
868
|
+
path: filePath,
|
|
869
|
+
variables: Object.keys(parsed),
|
|
870
|
+
count: Object.keys(parsed).length
|
|
871
|
+
});
|
|
872
|
+
}
|
|
873
|
+
return { success: true, parsed };
|
|
874
|
+
} catch (error) {
|
|
875
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
876
|
+
envLogger.error("Error loading environment file", {
|
|
877
|
+
path: filePath,
|
|
878
|
+
error: message
|
|
879
|
+
});
|
|
880
|
+
return { success: false, parsed: {}, error: message };
|
|
881
|
+
}
|
|
882
|
+
}
|
|
883
|
+
function validateRequiredVars(required, debug) {
|
|
884
|
+
const missing = [];
|
|
885
|
+
for (const varName of required) {
|
|
886
|
+
if (!process.env[varName]) {
|
|
887
|
+
missing.push(varName);
|
|
888
|
+
}
|
|
889
|
+
}
|
|
890
|
+
if (missing.length > 0) {
|
|
891
|
+
const error = `Required environment variables missing: ${missing.join(", ")}`;
|
|
892
|
+
envLogger.error("Environment validation failed", {
|
|
893
|
+
missing,
|
|
894
|
+
required
|
|
895
|
+
});
|
|
896
|
+
throw new Error(error);
|
|
897
|
+
}
|
|
898
|
+
if (debug) {
|
|
899
|
+
envLogger.debug("Required environment variables validated", {
|
|
900
|
+
required,
|
|
901
|
+
allPresent: true
|
|
902
|
+
});
|
|
903
|
+
}
|
|
904
|
+
}
|
|
905
|
+
function loadEnvironment(options = {}) {
|
|
906
|
+
const {
|
|
907
|
+
basePath = process.cwd(),
|
|
908
|
+
customPaths = [],
|
|
909
|
+
debug = false,
|
|
910
|
+
nodeEnv = process.env.NODE_ENV || "",
|
|
911
|
+
required = [],
|
|
912
|
+
useCache = true
|
|
913
|
+
} = options;
|
|
914
|
+
if (useCache && environmentLoaded && cachedLoadResult) {
|
|
915
|
+
if (debug) {
|
|
916
|
+
envLogger.debug("Returning cached environment", {
|
|
917
|
+
loaded: cachedLoadResult.loaded.length,
|
|
918
|
+
variables: Object.keys(cachedLoadResult.parsed).length
|
|
919
|
+
});
|
|
920
|
+
}
|
|
921
|
+
return cachedLoadResult;
|
|
922
|
+
}
|
|
923
|
+
if (debug) {
|
|
924
|
+
envLogger.debug("Loading environment variables", {
|
|
925
|
+
basePath,
|
|
926
|
+
nodeEnv,
|
|
927
|
+
customPaths,
|
|
928
|
+
required
|
|
929
|
+
});
|
|
930
|
+
}
|
|
931
|
+
const result = {
|
|
932
|
+
success: true,
|
|
933
|
+
loaded: [],
|
|
934
|
+
failed: [],
|
|
935
|
+
parsed: {},
|
|
936
|
+
warnings: []
|
|
937
|
+
};
|
|
938
|
+
const standardFiles = buildFileList(basePath, nodeEnv);
|
|
939
|
+
const allFiles = [...standardFiles, ...customPaths];
|
|
940
|
+
if (debug) {
|
|
941
|
+
envLogger.debug("Environment files to load", {
|
|
942
|
+
standardFiles,
|
|
943
|
+
customPaths,
|
|
944
|
+
total: allFiles.length
|
|
945
|
+
});
|
|
946
|
+
}
|
|
947
|
+
const reversedFiles = [...allFiles].reverse();
|
|
948
|
+
for (const filePath of reversedFiles) {
|
|
949
|
+
const fileResult = loadSingleFile(filePath, debug);
|
|
950
|
+
if (fileResult.success) {
|
|
951
|
+
result.loaded.push(filePath);
|
|
952
|
+
Object.assign(result.parsed, fileResult.parsed);
|
|
953
|
+
if (fileResult.parsed["NODE_ENV"]) {
|
|
954
|
+
const fileName = filePath.split("/").pop() || filePath;
|
|
955
|
+
result.warnings.push(
|
|
956
|
+
`NODE_ENV found in ${fileName}. It's recommended to set NODE_ENV via CLI (e.g., 'spfn dev', 'spfn build') instead of .env files for consistent environment behavior.`
|
|
957
|
+
);
|
|
958
|
+
}
|
|
959
|
+
} else if (fileResult.error) {
|
|
960
|
+
result.failed.push({
|
|
961
|
+
path: filePath,
|
|
962
|
+
reason: fileResult.error
|
|
963
|
+
});
|
|
964
|
+
}
|
|
965
|
+
}
|
|
966
|
+
if (debug || result.loaded.length > 0) {
|
|
967
|
+
envLogger.info("Environment loading complete", {
|
|
968
|
+
loaded: result.loaded.length,
|
|
969
|
+
failed: result.failed.length,
|
|
970
|
+
variables: Object.keys(result.parsed).length,
|
|
971
|
+
files: result.loaded
|
|
972
|
+
});
|
|
973
|
+
}
|
|
974
|
+
if (required.length > 0) {
|
|
975
|
+
try {
|
|
976
|
+
validateRequiredVars(required, debug);
|
|
977
|
+
} catch (error) {
|
|
978
|
+
result.success = false;
|
|
979
|
+
result.errors = [
|
|
980
|
+
error instanceof Error ? error.message : "Validation failed"
|
|
981
|
+
];
|
|
982
|
+
throw error;
|
|
983
|
+
}
|
|
984
|
+
}
|
|
985
|
+
if (result.warnings.length > 0) {
|
|
986
|
+
for (const warning of result.warnings) {
|
|
987
|
+
envLogger.warn(warning);
|
|
988
|
+
}
|
|
989
|
+
}
|
|
990
|
+
environmentLoaded = true;
|
|
991
|
+
cachedLoadResult = result;
|
|
992
|
+
return result;
|
|
993
|
+
}
|
|
539
994
|
|
|
540
995
|
// src/errors/database-errors.ts
|
|
541
996
|
var DatabaseError = class extends Error {
|
|
@@ -575,10 +1030,10 @@ var QueryError = class extends DatabaseError {
|
|
|
575
1030
|
this.name = "QueryError";
|
|
576
1031
|
}
|
|
577
1032
|
};
|
|
578
|
-
var
|
|
1033
|
+
var ConstraintViolationError = class extends QueryError {
|
|
579
1034
|
constructor(message, details) {
|
|
580
1035
|
super(message, 400, details);
|
|
581
|
-
this.name = "
|
|
1036
|
+
this.name = "ConstraintViolationError";
|
|
582
1037
|
}
|
|
583
1038
|
};
|
|
584
1039
|
var TransactionError = class extends DatabaseError {
|
|
@@ -643,11 +1098,11 @@ function fromPostgresError(error) {
|
|
|
643
1098
|
case "23000":
|
|
644
1099
|
// integrity_constraint_violation
|
|
645
1100
|
case "23001":
|
|
646
|
-
return new
|
|
1101
|
+
return new ConstraintViolationError(message, { code, constraint: "integrity" });
|
|
647
1102
|
case "23502":
|
|
648
|
-
return new
|
|
1103
|
+
return new ConstraintViolationError(message, { code, constraint: "not_null" });
|
|
649
1104
|
case "23503":
|
|
650
|
-
return new
|
|
1105
|
+
return new ConstraintViolationError(message, { code, constraint: "foreign_key" });
|
|
651
1106
|
case "23505":
|
|
652
1107
|
const parsed = parseUniqueViolation(message);
|
|
653
1108
|
if (parsed) {
|
|
@@ -655,7 +1110,7 @@ function fromPostgresError(error) {
|
|
|
655
1110
|
}
|
|
656
1111
|
return new DuplicateEntryError("field", "value");
|
|
657
1112
|
case "23514":
|
|
658
|
-
return new
|
|
1113
|
+
return new ConstraintViolationError(message, { code, constraint: "check" });
|
|
659
1114
|
// Class 40 — Transaction Rollback
|
|
660
1115
|
case "40000":
|
|
661
1116
|
// transaction_rollback
|
|
@@ -770,23 +1225,45 @@ async function checkConnection(client) {
|
|
|
770
1225
|
}
|
|
771
1226
|
|
|
772
1227
|
// src/db/manager/config.ts
|
|
773
|
-
function
|
|
1228
|
+
function parseEnvNumber(key, prodDefault, devDefault) {
|
|
774
1229
|
const isProduction = process.env.NODE_ENV === "production";
|
|
775
|
-
const
|
|
776
|
-
|
|
777
|
-
|
|
1230
|
+
const envValue = parseInt(process.env[key] || "", 10);
|
|
1231
|
+
return isNaN(envValue) ? isProduction ? prodDefault : devDefault : envValue;
|
|
1232
|
+
}
|
|
1233
|
+
function parseEnvBoolean(key, defaultValue) {
|
|
1234
|
+
const value = process.env[key];
|
|
1235
|
+
if (value === void 0) return defaultValue;
|
|
1236
|
+
return value.toLowerCase() === "true";
|
|
1237
|
+
}
|
|
1238
|
+
function getPoolConfig(options) {
|
|
1239
|
+
return {
|
|
1240
|
+
max: options?.max ?? parseEnvNumber("DB_POOL_MAX", 20, 10),
|
|
1241
|
+
idleTimeout: options?.idleTimeout ?? parseEnvNumber("DB_POOL_IDLE_TIMEOUT", 30, 20)
|
|
1242
|
+
};
|
|
778
1243
|
}
|
|
779
1244
|
function getRetryConfig() {
|
|
780
|
-
const isProduction = process.env.NODE_ENV === "production";
|
|
781
1245
|
return {
|
|
782
|
-
maxRetries:
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
1246
|
+
maxRetries: parseEnvNumber("DB_RETRY_MAX", 5, 3),
|
|
1247
|
+
initialDelay: parseEnvNumber("DB_RETRY_INITIAL_DELAY", 100, 50),
|
|
1248
|
+
maxDelay: parseEnvNumber("DB_RETRY_MAX_DELAY", 1e4, 5e3),
|
|
1249
|
+
factor: parseEnvNumber("DB_RETRY_FACTOR", 2, 2)
|
|
1250
|
+
};
|
|
1251
|
+
}
|
|
1252
|
+
function buildHealthCheckConfig(options) {
|
|
1253
|
+
return {
|
|
1254
|
+
enabled: options?.enabled ?? parseEnvBoolean("DB_HEALTH_CHECK_ENABLED", true),
|
|
1255
|
+
interval: options?.interval ?? parseEnvNumber("DB_HEALTH_CHECK_INTERVAL", 6e4, 6e4),
|
|
1256
|
+
reconnect: options?.reconnect ?? parseEnvBoolean("DB_HEALTH_CHECK_RECONNECT", true),
|
|
1257
|
+
maxRetries: options?.maxRetries ?? parseEnvNumber("DB_HEALTH_CHECK_MAX_RETRIES", 3, 3),
|
|
1258
|
+
retryInterval: options?.retryInterval ?? parseEnvNumber("DB_HEALTH_CHECK_RETRY_INTERVAL", 5e3, 5e3)
|
|
1259
|
+
};
|
|
1260
|
+
}
|
|
1261
|
+
function buildMonitoringConfig(options) {
|
|
1262
|
+
const isDevelopment = process.env.NODE_ENV !== "production";
|
|
1263
|
+
return {
|
|
1264
|
+
enabled: options?.enabled ?? parseEnvBoolean("DB_MONITORING_ENABLED", isDevelopment),
|
|
1265
|
+
slowThreshold: options?.slowThreshold ?? parseEnvNumber("DB_MONITORING_SLOW_THRESHOLD", 1e3, 1e3),
|
|
1266
|
+
logQueries: options?.logQueries ?? parseEnvBoolean("DB_MONITORING_LOG_QUERIES", false)
|
|
790
1267
|
};
|
|
791
1268
|
}
|
|
792
1269
|
|
|
@@ -795,81 +1272,113 @@ var dbLogger2 = logger.child("database");
|
|
|
795
1272
|
function hasDatabaseConfig() {
|
|
796
1273
|
return !!(process.env.DATABASE_URL || process.env.DATABASE_WRITE_URL || process.env.DATABASE_READ_URL);
|
|
797
1274
|
}
|
|
1275
|
+
function detectDatabasePattern() {
|
|
1276
|
+
if (process.env.DATABASE_WRITE_URL && process.env.DATABASE_READ_URL) {
|
|
1277
|
+
return {
|
|
1278
|
+
type: "write-read",
|
|
1279
|
+
write: process.env.DATABASE_WRITE_URL,
|
|
1280
|
+
read: process.env.DATABASE_READ_URL
|
|
1281
|
+
};
|
|
1282
|
+
}
|
|
1283
|
+
if (process.env.DATABASE_URL && process.env.DATABASE_REPLICA_URL) {
|
|
1284
|
+
return {
|
|
1285
|
+
type: "legacy",
|
|
1286
|
+
primary: process.env.DATABASE_URL,
|
|
1287
|
+
replica: process.env.DATABASE_REPLICA_URL
|
|
1288
|
+
};
|
|
1289
|
+
}
|
|
1290
|
+
if (process.env.DATABASE_URL) {
|
|
1291
|
+
return {
|
|
1292
|
+
type: "single",
|
|
1293
|
+
url: process.env.DATABASE_URL
|
|
1294
|
+
};
|
|
1295
|
+
}
|
|
1296
|
+
if (process.env.DATABASE_WRITE_URL) {
|
|
1297
|
+
return {
|
|
1298
|
+
type: "single",
|
|
1299
|
+
url: process.env.DATABASE_WRITE_URL
|
|
1300
|
+
};
|
|
1301
|
+
}
|
|
1302
|
+
return { type: "none" };
|
|
1303
|
+
}
|
|
1304
|
+
async function createWriteReadClients(writeUrl, readUrl, poolConfig, retryConfig) {
|
|
1305
|
+
const writeClient = await createDatabaseConnection(writeUrl, poolConfig, retryConfig);
|
|
1306
|
+
const readClient = await createDatabaseConnection(readUrl, poolConfig, retryConfig);
|
|
1307
|
+
return {
|
|
1308
|
+
write: drizzle(writeClient),
|
|
1309
|
+
read: drizzle(readClient),
|
|
1310
|
+
writeClient,
|
|
1311
|
+
readClient
|
|
1312
|
+
};
|
|
1313
|
+
}
|
|
1314
|
+
async function createSingleClient(url, poolConfig, retryConfig) {
|
|
1315
|
+
const client = await createDatabaseConnection(url, poolConfig, retryConfig);
|
|
1316
|
+
const db = drizzle(client);
|
|
1317
|
+
return {
|
|
1318
|
+
write: db,
|
|
1319
|
+
read: db,
|
|
1320
|
+
writeClient: client,
|
|
1321
|
+
readClient: client
|
|
1322
|
+
};
|
|
1323
|
+
}
|
|
798
1324
|
async function createDatabaseFromEnv(options) {
|
|
799
1325
|
if (!hasDatabaseConfig()) {
|
|
800
|
-
|
|
1326
|
+
dbLogger2.debug("No DATABASE_URL found, loading environment variables");
|
|
1327
|
+
const result = loadEnvironment({
|
|
1328
|
+
debug: true
|
|
1329
|
+
});
|
|
1330
|
+
dbLogger2.debug("Environment variables loaded", {
|
|
1331
|
+
success: result.success,
|
|
1332
|
+
loaded: result.loaded.length,
|
|
1333
|
+
hasDatabaseUrl: !!process.env.DATABASE_URL,
|
|
1334
|
+
hasWriteUrl: !!process.env.DATABASE_WRITE_URL,
|
|
1335
|
+
hasReadUrl: !!process.env.DATABASE_READ_URL
|
|
1336
|
+
});
|
|
801
1337
|
}
|
|
802
1338
|
if (!hasDatabaseConfig()) {
|
|
1339
|
+
dbLogger2.warn("No database configuration found", {
|
|
1340
|
+
cwd: process.cwd(),
|
|
1341
|
+
nodeEnv: process.env.NODE_ENV,
|
|
1342
|
+
checkedVars: ["DATABASE_URL", "DATABASE_WRITE_URL", "DATABASE_READ_URL"]
|
|
1343
|
+
});
|
|
803
1344
|
return { write: void 0, read: void 0 };
|
|
804
1345
|
}
|
|
805
1346
|
try {
|
|
806
1347
|
const poolConfig = getPoolConfig(options?.pool);
|
|
807
1348
|
const retryConfig = getRetryConfig();
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
|
|
839
|
-
read:
|
|
840
|
-
writeClient: writeClient2,
|
|
841
|
-
readClient: readClient2
|
|
842
|
-
};
|
|
843
|
-
}
|
|
844
|
-
if (process.env.DATABASE_URL) {
|
|
845
|
-
const client = await createDatabaseConnection(
|
|
846
|
-
process.env.DATABASE_URL,
|
|
847
|
-
poolConfig,
|
|
848
|
-
retryConfig
|
|
849
|
-
);
|
|
850
|
-
const db2 = drizzle(client);
|
|
851
|
-
return {
|
|
852
|
-
write: db2,
|
|
853
|
-
read: db2,
|
|
854
|
-
writeClient: client,
|
|
855
|
-
readClient: client
|
|
856
|
-
};
|
|
857
|
-
}
|
|
858
|
-
if (process.env.DATABASE_WRITE_URL) {
|
|
859
|
-
const client = await createDatabaseConnection(
|
|
860
|
-
process.env.DATABASE_WRITE_URL,
|
|
861
|
-
poolConfig,
|
|
862
|
-
retryConfig
|
|
863
|
-
);
|
|
864
|
-
const db2 = drizzle(client);
|
|
865
|
-
return {
|
|
866
|
-
write: db2,
|
|
867
|
-
read: db2,
|
|
868
|
-
writeClient: client,
|
|
869
|
-
readClient: client
|
|
870
|
-
};
|
|
1349
|
+
const pattern = detectDatabasePattern();
|
|
1350
|
+
switch (pattern.type) {
|
|
1351
|
+
case "write-read":
|
|
1352
|
+
dbLogger2.debug("Using write-read pattern", {
|
|
1353
|
+
write: pattern.write.replace(/:[^:@]+@/, ":***@"),
|
|
1354
|
+
read: pattern.read.replace(/:[^:@]+@/, ":***@")
|
|
1355
|
+
});
|
|
1356
|
+
return await createWriteReadClients(
|
|
1357
|
+
pattern.write,
|
|
1358
|
+
pattern.read,
|
|
1359
|
+
poolConfig,
|
|
1360
|
+
retryConfig
|
|
1361
|
+
);
|
|
1362
|
+
case "legacy":
|
|
1363
|
+
dbLogger2.debug("Using legacy replica pattern", {
|
|
1364
|
+
primary: pattern.primary.replace(/:[^:@]+@/, ":***@"),
|
|
1365
|
+
replica: pattern.replica.replace(/:[^:@]+@/, ":***@")
|
|
1366
|
+
});
|
|
1367
|
+
return await createWriteReadClients(
|
|
1368
|
+
pattern.primary,
|
|
1369
|
+
pattern.replica,
|
|
1370
|
+
poolConfig,
|
|
1371
|
+
retryConfig
|
|
1372
|
+
);
|
|
1373
|
+
case "single":
|
|
1374
|
+
dbLogger2.debug("Using single database pattern", {
|
|
1375
|
+
url: pattern.url.replace(/:[^:@]+@/, ":***@")
|
|
1376
|
+
});
|
|
1377
|
+
return await createSingleClient(pattern.url, poolConfig, retryConfig);
|
|
1378
|
+
case "none":
|
|
1379
|
+
dbLogger2.warn("No database pattern detected");
|
|
1380
|
+
return { write: void 0, read: void 0 };
|
|
871
1381
|
}
|
|
872
|
-
return { write: void 0, read: void 0 };
|
|
873
1382
|
} catch (error) {
|
|
874
1383
|
const message = error instanceof Error ? error.message : "Unknown error";
|
|
875
1384
|
dbLogger2.error("Failed to create database connection", {
|
|
@@ -880,57 +1389,129 @@ async function createDatabaseFromEnv(options) {
|
|
|
880
1389
|
hasUrl: !!process.env.DATABASE_URL,
|
|
881
1390
|
hasReplicaUrl: !!process.env.DATABASE_REPLICA_URL
|
|
882
1391
|
});
|
|
883
|
-
|
|
1392
|
+
throw new Error(`Database connection failed: ${message}`, { cause: error });
|
|
884
1393
|
}
|
|
885
1394
|
}
|
|
886
1395
|
|
|
887
|
-
// src/db/manager/
|
|
1396
|
+
// src/db/manager/global-state.ts
|
|
1397
|
+
var getWriteInstance = () => globalThis.__SPFN_DB_WRITE__;
|
|
1398
|
+
var setWriteInstance = (instance) => {
|
|
1399
|
+
globalThis.__SPFN_DB_WRITE__ = instance;
|
|
1400
|
+
};
|
|
1401
|
+
var getReadInstance = () => globalThis.__SPFN_DB_READ__;
|
|
1402
|
+
var setReadInstance = (instance) => {
|
|
1403
|
+
globalThis.__SPFN_DB_READ__ = instance;
|
|
1404
|
+
};
|
|
1405
|
+
var getWriteClient = () => globalThis.__SPFN_DB_WRITE_CLIENT__;
|
|
1406
|
+
var setWriteClient = (client) => {
|
|
1407
|
+
globalThis.__SPFN_DB_WRITE_CLIENT__ = client;
|
|
1408
|
+
};
|
|
1409
|
+
var getReadClient = () => globalThis.__SPFN_DB_READ_CLIENT__;
|
|
1410
|
+
var setReadClient = (client) => {
|
|
1411
|
+
globalThis.__SPFN_DB_READ_CLIENT__ = client;
|
|
1412
|
+
};
|
|
1413
|
+
var getHealthCheckInterval = () => globalThis.__SPFN_DB_HEALTH_CHECK__;
|
|
1414
|
+
var setHealthCheckInterval = (interval) => {
|
|
1415
|
+
globalThis.__SPFN_DB_HEALTH_CHECK__ = interval;
|
|
1416
|
+
};
|
|
1417
|
+
var setMonitoringConfig = (config) => {
|
|
1418
|
+
globalThis.__SPFN_DB_MONITORING__ = config;
|
|
1419
|
+
};
|
|
1420
|
+
|
|
1421
|
+
// src/db/manager/health-check.ts
|
|
888
1422
|
var dbLogger3 = logger.child("database");
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
1423
|
+
function startHealthCheck(config, options, getDatabase2, closeDatabase2) {
|
|
1424
|
+
const healthCheck = getHealthCheckInterval();
|
|
1425
|
+
if (healthCheck) {
|
|
1426
|
+
dbLogger3.debug("Health check already running");
|
|
1427
|
+
return;
|
|
1428
|
+
}
|
|
1429
|
+
dbLogger3.info("Starting database health check", {
|
|
1430
|
+
interval: `${config.interval}ms`,
|
|
1431
|
+
reconnect: config.reconnect
|
|
1432
|
+
});
|
|
1433
|
+
const interval = setInterval(async () => {
|
|
1434
|
+
try {
|
|
1435
|
+
const write = getDatabase2("write");
|
|
1436
|
+
const read = getDatabase2("read");
|
|
1437
|
+
if (write) {
|
|
1438
|
+
await write.execute("SELECT 1");
|
|
1439
|
+
}
|
|
1440
|
+
if (read && read !== write) {
|
|
1441
|
+
await read.execute("SELECT 1");
|
|
1442
|
+
}
|
|
1443
|
+
} catch (error) {
|
|
1444
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1445
|
+
dbLogger3.error("Database health check failed", { error: message });
|
|
1446
|
+
if (config.reconnect) {
|
|
1447
|
+
await attemptReconnection(config, options, closeDatabase2);
|
|
1448
|
+
}
|
|
1449
|
+
}
|
|
1450
|
+
}, config.interval);
|
|
1451
|
+
setHealthCheckInterval(interval);
|
|
1452
|
+
}
|
|
1453
|
+
async function attemptReconnection(config, options, closeDatabase2) {
|
|
1454
|
+
dbLogger3.warn("Attempting database reconnection", {
|
|
1455
|
+
maxRetries: config.maxRetries,
|
|
1456
|
+
retryInterval: `${config.retryInterval}ms`
|
|
1457
|
+
});
|
|
1458
|
+
for (let attempt = 1; attempt <= config.maxRetries; attempt++) {
|
|
1459
|
+
try {
|
|
1460
|
+
dbLogger3.debug(`Reconnection attempt ${attempt}/${config.maxRetries}`);
|
|
1461
|
+
await closeDatabase2();
|
|
1462
|
+
await new Promise((resolve) => setTimeout(resolve, config.retryInterval));
|
|
1463
|
+
const result = await createDatabaseFromEnv(options);
|
|
1464
|
+
if (result.write) {
|
|
1465
|
+
await result.write.execute("SELECT 1");
|
|
1466
|
+
setWriteInstance(result.write);
|
|
1467
|
+
setReadInstance(result.read);
|
|
1468
|
+
setWriteClient(result.writeClient);
|
|
1469
|
+
setReadClient(result.readClient);
|
|
1470
|
+
dbLogger3.info("Database reconnection successful", { attempt });
|
|
1471
|
+
return;
|
|
1472
|
+
}
|
|
1473
|
+
} catch (error) {
|
|
1474
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1475
|
+
dbLogger3.error(`Reconnection attempt ${attempt} failed`, {
|
|
1476
|
+
error: message,
|
|
1477
|
+
attempt,
|
|
1478
|
+
maxRetries: config.maxRetries
|
|
1479
|
+
});
|
|
1480
|
+
if (attempt === config.maxRetries) {
|
|
1481
|
+
dbLogger3.error("Max reconnection attempts reached, giving up");
|
|
1482
|
+
}
|
|
1483
|
+
}
|
|
1484
|
+
}
|
|
1485
|
+
}
|
|
1486
|
+
function stopHealthCheck() {
|
|
1487
|
+
const healthCheck = getHealthCheckInterval();
|
|
1488
|
+
if (healthCheck) {
|
|
1489
|
+
clearInterval(healthCheck);
|
|
1490
|
+
setHealthCheckInterval(void 0);
|
|
1491
|
+
dbLogger3.info("Database health check stopped");
|
|
1492
|
+
}
|
|
1493
|
+
}
|
|
1494
|
+
|
|
1495
|
+
// src/db/manager/manager.ts
|
|
1496
|
+
var dbLogger4 = logger.child("database");
|
|
895
1497
|
function getDatabase(type) {
|
|
1498
|
+
const writeInst = getWriteInstance();
|
|
1499
|
+
const readInst = getReadInstance();
|
|
1500
|
+
dbLogger4.debug(`getDatabase() called with type=${type}, writeInstance=${!!writeInst}, readInstance=${!!readInst}`);
|
|
896
1501
|
if (type === "read") {
|
|
897
|
-
return
|
|
1502
|
+
return readInst ?? writeInst;
|
|
898
1503
|
}
|
|
899
|
-
return
|
|
1504
|
+
return writeInst;
|
|
900
1505
|
}
|
|
901
1506
|
function setDatabase(write, read) {
|
|
902
|
-
|
|
903
|
-
|
|
904
|
-
}
|
|
905
|
-
function getHealthCheckConfig(options) {
|
|
906
|
-
const parseBoolean = (value, defaultValue) => {
|
|
907
|
-
if (value === void 0) return defaultValue;
|
|
908
|
-
return value.toLowerCase() === "true";
|
|
909
|
-
};
|
|
910
|
-
return {
|
|
911
|
-
enabled: options?.enabled ?? parseBoolean(process.env.DB_HEALTH_CHECK_ENABLED, true),
|
|
912
|
-
interval: options?.interval ?? (parseInt(process.env.DB_HEALTH_CHECK_INTERVAL || "", 10) || 6e4),
|
|
913
|
-
reconnect: options?.reconnect ?? parseBoolean(process.env.DB_HEALTH_CHECK_RECONNECT, true),
|
|
914
|
-
maxRetries: options?.maxRetries ?? (parseInt(process.env.DB_HEALTH_CHECK_MAX_RETRIES || "", 10) || 3),
|
|
915
|
-
retryInterval: options?.retryInterval ?? (parseInt(process.env.DB_HEALTH_CHECK_RETRY_INTERVAL || "", 10) || 5e3)
|
|
916
|
-
};
|
|
917
|
-
}
|
|
918
|
-
function getMonitoringConfig(options) {
|
|
919
|
-
const isDevelopment = process.env.NODE_ENV !== "production";
|
|
920
|
-
const parseBoolean = (value, defaultValue) => {
|
|
921
|
-
if (value === void 0) return defaultValue;
|
|
922
|
-
return value.toLowerCase() === "true";
|
|
923
|
-
};
|
|
924
|
-
return {
|
|
925
|
-
enabled: options?.enabled ?? parseBoolean(process.env.DB_MONITORING_ENABLED, isDevelopment),
|
|
926
|
-
slowThreshold: options?.slowThreshold ?? (parseInt(process.env.DB_MONITORING_SLOW_THRESHOLD || "", 10) || 1e3),
|
|
927
|
-
logQueries: options?.logQueries ?? parseBoolean(process.env.DB_MONITORING_LOG_QUERIES, false)
|
|
928
|
-
};
|
|
1507
|
+
setWriteInstance(write);
|
|
1508
|
+
setReadInstance(read ?? write);
|
|
929
1509
|
}
|
|
930
1510
|
async function initDatabase(options) {
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
1511
|
+
const writeInst = getWriteInstance();
|
|
1512
|
+
if (writeInst) {
|
|
1513
|
+
dbLogger4.debug("Database already initialized");
|
|
1514
|
+
return { write: writeInst, read: getReadInstance() };
|
|
934
1515
|
}
|
|
935
1516
|
const result = await createDatabaseFromEnv(options);
|
|
936
1517
|
if (result.write) {
|
|
@@ -939,178 +1520,329 @@ async function initDatabase(options) {
|
|
|
939
1520
|
if (result.read && result.read !== result.write) {
|
|
940
1521
|
await result.read.execute("SELECT 1");
|
|
941
1522
|
}
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
|
|
1523
|
+
setWriteInstance(result.write);
|
|
1524
|
+
setReadInstance(result.read);
|
|
1525
|
+
setWriteClient(result.writeClient);
|
|
1526
|
+
setReadClient(result.readClient);
|
|
946
1527
|
const hasReplica = result.read && result.read !== result.write;
|
|
947
|
-
|
|
1528
|
+
dbLogger4.info(
|
|
948
1529
|
hasReplica ? "Database connected (Primary + Replica)" : "Database connected"
|
|
949
1530
|
);
|
|
950
|
-
const healthCheckConfig =
|
|
1531
|
+
const healthCheckConfig = buildHealthCheckConfig(options?.healthCheck);
|
|
951
1532
|
if (healthCheckConfig.enabled) {
|
|
952
|
-
startHealthCheck(healthCheckConfig);
|
|
1533
|
+
startHealthCheck(healthCheckConfig, options, getDatabase, closeDatabase);
|
|
953
1534
|
}
|
|
954
|
-
|
|
955
|
-
|
|
956
|
-
|
|
957
|
-
|
|
958
|
-
|
|
1535
|
+
const monConfig = buildMonitoringConfig(options?.monitoring);
|
|
1536
|
+
setMonitoringConfig(monConfig);
|
|
1537
|
+
if (monConfig.enabled) {
|
|
1538
|
+
dbLogger4.info("Database query monitoring enabled", {
|
|
1539
|
+
slowThreshold: `${monConfig.slowThreshold}ms`,
|
|
1540
|
+
logQueries: monConfig.logQueries
|
|
959
1541
|
});
|
|
960
1542
|
}
|
|
961
1543
|
} catch (error) {
|
|
962
1544
|
const message = error instanceof Error ? error.message : "Unknown error";
|
|
963
|
-
|
|
1545
|
+
dbLogger4.error("Database connection failed", { error: message });
|
|
964
1546
|
await closeDatabase();
|
|
965
|
-
|
|
1547
|
+
throw new Error(`Database connection test failed: ${message}`, { cause: error });
|
|
966
1548
|
}
|
|
967
1549
|
} else {
|
|
968
|
-
|
|
969
|
-
|
|
1550
|
+
dbLogger4.warn("No database configuration found");
|
|
1551
|
+
dbLogger4.warn("Set DATABASE_URL environment variable to enable database");
|
|
970
1552
|
}
|
|
971
|
-
return { write:
|
|
1553
|
+
return { write: getWriteInstance(), read: getReadInstance() };
|
|
972
1554
|
}
|
|
973
1555
|
async function closeDatabase() {
|
|
974
|
-
|
|
975
|
-
|
|
1556
|
+
const writeInst = getWriteInstance();
|
|
1557
|
+
const readInst = getReadInstance();
|
|
1558
|
+
if (!writeInst && !readInst) {
|
|
1559
|
+
dbLogger4.debug("No database connections to close");
|
|
976
1560
|
return;
|
|
977
1561
|
}
|
|
978
1562
|
stopHealthCheck();
|
|
979
1563
|
try {
|
|
980
1564
|
const closePromises = [];
|
|
981
|
-
|
|
982
|
-
|
|
1565
|
+
const writeC = getWriteClient();
|
|
1566
|
+
if (writeC) {
|
|
1567
|
+
dbLogger4.debug("Closing write connection...");
|
|
983
1568
|
closePromises.push(
|
|
984
|
-
|
|
1569
|
+
writeC.end({ timeout: 5 }).then(() => dbLogger4.debug("Write connection closed")).catch((err) => dbLogger4.error("Error closing write connection", err))
|
|
985
1570
|
);
|
|
986
1571
|
}
|
|
987
|
-
|
|
988
|
-
|
|
1572
|
+
const readC = getReadClient();
|
|
1573
|
+
if (readC && readC !== writeC) {
|
|
1574
|
+
dbLogger4.debug("Closing read connection...");
|
|
989
1575
|
closePromises.push(
|
|
990
|
-
|
|
1576
|
+
readC.end({ timeout: 5 }).then(() => dbLogger4.debug("Read connection closed")).catch((err) => dbLogger4.error("Error closing read connection", err))
|
|
991
1577
|
);
|
|
992
1578
|
}
|
|
993
1579
|
await Promise.all(closePromises);
|
|
994
|
-
|
|
1580
|
+
dbLogger4.info("All database connections closed");
|
|
995
1581
|
} catch (error) {
|
|
996
|
-
|
|
1582
|
+
dbLogger4.error("Error during database cleanup", error);
|
|
997
1583
|
throw error;
|
|
998
1584
|
} finally {
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
|
|
1585
|
+
setWriteInstance(void 0);
|
|
1586
|
+
setReadInstance(void 0);
|
|
1587
|
+
setWriteClient(void 0);
|
|
1588
|
+
setReadClient(void 0);
|
|
1589
|
+
setMonitoringConfig(void 0);
|
|
1004
1590
|
}
|
|
1005
1591
|
}
|
|
1006
1592
|
function getDatabaseInfo() {
|
|
1593
|
+
const writeInst = getWriteInstance();
|
|
1594
|
+
const readInst = getReadInstance();
|
|
1007
1595
|
return {
|
|
1008
|
-
hasWrite: !!
|
|
1009
|
-
hasRead: !!
|
|
1010
|
-
isReplica: !!(
|
|
1596
|
+
hasWrite: !!writeInst,
|
|
1597
|
+
hasRead: !!readInst,
|
|
1598
|
+
isReplica: !!(readInst && readInst !== writeInst)
|
|
1011
1599
|
};
|
|
1012
1600
|
}
|
|
1013
|
-
function
|
|
1014
|
-
if (
|
|
1015
|
-
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
|
|
1024
|
-
|
|
1025
|
-
|
|
1026
|
-
|
|
1027
|
-
|
|
1028
|
-
|
|
1029
|
-
|
|
1030
|
-
|
|
1601
|
+
function expandGlobPattern(pattern) {
|
|
1602
|
+
if (!pattern.includes("*")) {
|
|
1603
|
+
return existsSync(pattern) ? [pattern] : [];
|
|
1604
|
+
}
|
|
1605
|
+
const files = [];
|
|
1606
|
+
if (pattern.includes("**")) {
|
|
1607
|
+
const [baseDir, ...rest] = pattern.split("**");
|
|
1608
|
+
const extension = rest.join("").replace(/[\/\\]\*\./g, "").trim();
|
|
1609
|
+
const scanRecursive = (dir) => {
|
|
1610
|
+
if (!existsSync(dir)) return;
|
|
1611
|
+
try {
|
|
1612
|
+
const entries = readdirSync(dir);
|
|
1613
|
+
for (const entry of entries) {
|
|
1614
|
+
const fullPath = join(dir, entry);
|
|
1615
|
+
try {
|
|
1616
|
+
const stat = statSync(fullPath);
|
|
1617
|
+
if (stat.isDirectory()) {
|
|
1618
|
+
scanRecursive(fullPath);
|
|
1619
|
+
} else if (stat.isFile()) {
|
|
1620
|
+
if (!extension || fullPath.endsWith(extension)) {
|
|
1621
|
+
files.push(fullPath);
|
|
1622
|
+
}
|
|
1623
|
+
}
|
|
1624
|
+
} catch {
|
|
1625
|
+
}
|
|
1626
|
+
}
|
|
1627
|
+
} catch {
|
|
1031
1628
|
}
|
|
1032
|
-
|
|
1033
|
-
|
|
1034
|
-
|
|
1035
|
-
|
|
1036
|
-
|
|
1037
|
-
|
|
1629
|
+
};
|
|
1630
|
+
scanRecursive(baseDir.trim() || ".");
|
|
1631
|
+
} else if (pattern.includes("*")) {
|
|
1632
|
+
const dir = dirname(pattern);
|
|
1633
|
+
const filePattern = basename(pattern);
|
|
1634
|
+
if (!existsSync(dir)) return [];
|
|
1635
|
+
try {
|
|
1636
|
+
const entries = readdirSync(dir);
|
|
1637
|
+
for (const entry of entries) {
|
|
1638
|
+
const fullPath = join(dir, entry);
|
|
1639
|
+
try {
|
|
1640
|
+
const stat = statSync(fullPath);
|
|
1641
|
+
if (stat.isFile()) {
|
|
1642
|
+
if (filePattern === "*" || filePattern.startsWith("*.") && entry.endsWith(filePattern.slice(1))) {
|
|
1643
|
+
files.push(fullPath);
|
|
1644
|
+
}
|
|
1645
|
+
}
|
|
1646
|
+
} catch {
|
|
1647
|
+
}
|
|
1038
1648
|
}
|
|
1649
|
+
} catch {
|
|
1039
1650
|
}
|
|
1040
|
-
}
|
|
1651
|
+
}
|
|
1652
|
+
return files;
|
|
1041
1653
|
}
|
|
1042
|
-
|
|
1043
|
-
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
|
|
1654
|
+
function discoverPackageSchemas(cwd) {
|
|
1655
|
+
const schemas = [];
|
|
1656
|
+
const nodeModulesPath = join(cwd, "node_modules");
|
|
1657
|
+
if (!existsSync(nodeModulesPath)) {
|
|
1658
|
+
return schemas;
|
|
1659
|
+
}
|
|
1660
|
+
const projectPkgPath = join(cwd, "package.json");
|
|
1661
|
+
let directDeps = /* @__PURE__ */ new Set();
|
|
1662
|
+
if (existsSync(projectPkgPath)) {
|
|
1048
1663
|
try {
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
1664
|
+
const projectPkg = JSON.parse(readFileSync(projectPkgPath, "utf-8"));
|
|
1665
|
+
directDeps = /* @__PURE__ */ new Set([
|
|
1666
|
+
...Object.keys(projectPkg.dependencies || {}),
|
|
1667
|
+
...Object.keys(projectPkg.devDependencies || {})
|
|
1668
|
+
]);
|
|
1669
|
+
} catch (error) {
|
|
1670
|
+
}
|
|
1671
|
+
}
|
|
1672
|
+
const checkPackage = (_pkgName, pkgPath) => {
|
|
1673
|
+
const pkgJsonPath = join(pkgPath, "package.json");
|
|
1674
|
+
if (!existsSync(pkgJsonPath)) return;
|
|
1675
|
+
try {
|
|
1676
|
+
const pkgJson = JSON.parse(readFileSync(pkgJsonPath, "utf-8"));
|
|
1677
|
+
if (pkgJson.spfn?.schemas) {
|
|
1678
|
+
const packageSchemas = Array.isArray(pkgJson.spfn.schemas) ? pkgJson.spfn.schemas : [pkgJson.spfn.schemas];
|
|
1679
|
+
for (const schema of packageSchemas) {
|
|
1680
|
+
const absolutePath = join(pkgPath, schema);
|
|
1681
|
+
const expandedFiles = expandGlobPattern(absolutePath);
|
|
1682
|
+
const schemaFiles = expandedFiles.filter(
|
|
1683
|
+
(file) => !file.endsWith("/index.js") && !file.endsWith("/index.ts") && !file.endsWith("/index.mjs") && !file.endsWith("\\index.js") && !file.endsWith("\\index.ts") && !file.endsWith("\\index.mjs")
|
|
1684
|
+
);
|
|
1685
|
+
schemas.push(...schemaFiles);
|
|
1686
|
+
}
|
|
1061
1687
|
}
|
|
1062
1688
|
} catch (error) {
|
|
1063
|
-
|
|
1064
|
-
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
|
|
1689
|
+
}
|
|
1690
|
+
};
|
|
1691
|
+
const spfnDir = join(nodeModulesPath, "@spfn");
|
|
1692
|
+
if (existsSync(spfnDir)) {
|
|
1693
|
+
try {
|
|
1694
|
+
const spfnPackages = readdirSync(spfnDir);
|
|
1695
|
+
for (const pkg of spfnPackages) {
|
|
1696
|
+
checkPackage(`@spfn/${pkg}`, join(spfnDir, pkg));
|
|
1071
1697
|
}
|
|
1698
|
+
} catch (error) {
|
|
1072
1699
|
}
|
|
1073
1700
|
}
|
|
1074
|
-
|
|
1075
|
-
|
|
1076
|
-
|
|
1077
|
-
|
|
1078
|
-
healthCheckInterval = void 0;
|
|
1079
|
-
dbLogger3.info("Database health check stopped");
|
|
1701
|
+
for (const depName of directDeps) {
|
|
1702
|
+
if (depName.startsWith("@spfn/")) continue;
|
|
1703
|
+
const pkgPath = depName.startsWith("@") ? join(nodeModulesPath, ...depName.split("/")) : join(nodeModulesPath, depName);
|
|
1704
|
+
checkPackage(depName, pkgPath);
|
|
1080
1705
|
}
|
|
1706
|
+
return schemas;
|
|
1081
1707
|
}
|
|
1082
|
-
function
|
|
1083
|
-
|
|
1708
|
+
function detectDialect(url) {
|
|
1709
|
+
if (url.startsWith("postgres://") || url.startsWith("postgresql://")) {
|
|
1710
|
+
return "postgresql";
|
|
1711
|
+
}
|
|
1712
|
+
if (url.startsWith("mysql://")) {
|
|
1713
|
+
return "mysql";
|
|
1714
|
+
}
|
|
1715
|
+
if (url.startsWith("sqlite://") || url.includes(".db") || url.includes(".sqlite")) {
|
|
1716
|
+
return "sqlite";
|
|
1717
|
+
}
|
|
1718
|
+
throw new Error(
|
|
1719
|
+
`Unsupported database URL format: ${url}. Supported: postgresql://, mysql://, sqlite://`
|
|
1720
|
+
);
|
|
1084
1721
|
}
|
|
1085
|
-
|
|
1086
|
-
|
|
1087
|
-
|
|
1088
|
-
|
|
1089
|
-
|
|
1090
|
-
|
|
1722
|
+
function getDrizzleConfig(options = {}) {
|
|
1723
|
+
const databaseUrl = options.databaseUrl ?? process.env.DATABASE_URL;
|
|
1724
|
+
if (!databaseUrl) {
|
|
1725
|
+
throw new Error(
|
|
1726
|
+
"DATABASE_URL is required. Set it in .env or pass it to getDrizzleConfig()"
|
|
1727
|
+
);
|
|
1728
|
+
}
|
|
1729
|
+
const dialect = options.dialect ?? detectDialect(databaseUrl);
|
|
1730
|
+
const out = options.out ?? "./src/server/drizzle";
|
|
1731
|
+
if (options.packageFilter) {
|
|
1732
|
+
const packageSchemas2 = options.disablePackageDiscovery ? [] : discoverPackageSchemas(options.cwd ?? process.cwd());
|
|
1733
|
+
const filteredSchemas = packageSchemas2.filter(
|
|
1734
|
+
(schemaPath) => schemaPath.includes(`node_modules/${options.packageFilter}/`)
|
|
1735
|
+
);
|
|
1736
|
+
if (filteredSchemas.length === 0) {
|
|
1091
1737
|
throw new Error(
|
|
1092
|
-
|
|
1738
|
+
`No schemas found for package ${options.packageFilter}. Make sure the package is installed and has "spfn.schemas" in package.json.`
|
|
1093
1739
|
);
|
|
1094
1740
|
}
|
|
1095
|
-
|
|
1741
|
+
const schema2 = filteredSchemas.length === 1 ? filteredSchemas[0] : filteredSchemas;
|
|
1742
|
+
return {
|
|
1743
|
+
schema: schema2,
|
|
1744
|
+
out,
|
|
1745
|
+
dialect,
|
|
1746
|
+
dbCredentials: getDbCredentials(dialect, databaseUrl)
|
|
1747
|
+
};
|
|
1096
1748
|
}
|
|
1749
|
+
const userSchema = options.schema ?? "./src/server/entities/**/*.ts";
|
|
1750
|
+
const userSchemas = Array.isArray(userSchema) ? userSchema : [userSchema];
|
|
1751
|
+
const packageSchemas = options.disablePackageDiscovery ? [] : discoverPackageSchemas(options.cwd ?? process.cwd());
|
|
1752
|
+
const allSchemas = [...userSchemas, ...packageSchemas];
|
|
1753
|
+
const schema = allSchemas.length === 1 ? allSchemas[0] : allSchemas;
|
|
1754
|
+
return {
|
|
1755
|
+
schema,
|
|
1756
|
+
out,
|
|
1757
|
+
dialect,
|
|
1758
|
+
dbCredentials: getDbCredentials(dialect, databaseUrl)
|
|
1759
|
+
};
|
|
1760
|
+
}
|
|
1761
|
+
function getDbCredentials(dialect, url) {
|
|
1762
|
+
switch (dialect) {
|
|
1763
|
+
case "postgresql":
|
|
1764
|
+
case "mysql":
|
|
1765
|
+
return { url };
|
|
1766
|
+
case "sqlite":
|
|
1767
|
+
const dbPath = url.replace("sqlite://", "").replace("sqlite:", "");
|
|
1768
|
+
return { url: dbPath };
|
|
1769
|
+
default:
|
|
1770
|
+
throw new Error(`Unsupported dialect: ${dialect}`);
|
|
1771
|
+
}
|
|
1772
|
+
}
|
|
1773
|
+
function generateDrizzleConfigFile(options = {}) {
|
|
1774
|
+
const config = getDrizzleConfig(options);
|
|
1775
|
+
const schemaValue = Array.isArray(config.schema) ? `[
|
|
1776
|
+
${config.schema.map((s) => `'${s}'`).join(",\n ")}
|
|
1777
|
+
]` : `'${config.schema}'`;
|
|
1778
|
+
return `import { defineConfig } from 'drizzle-kit';
|
|
1779
|
+
|
|
1780
|
+
export default defineConfig({
|
|
1781
|
+
schema: ${schemaValue},
|
|
1782
|
+
out: '${config.out}',
|
|
1783
|
+
dialect: '${config.dialect}',
|
|
1784
|
+
dbCredentials: ${JSON.stringify(config.dbCredentials, null, 4)},
|
|
1097
1785
|
});
|
|
1098
|
-
|
|
1099
|
-
|
|
1100
|
-
|
|
1101
|
-
|
|
1102
|
-
|
|
1103
|
-
|
|
1786
|
+
`;
|
|
1787
|
+
}
|
|
1788
|
+
function id() {
|
|
1789
|
+
return bigserial("id", { mode: "number" }).primaryKey();
|
|
1790
|
+
}
|
|
1791
|
+
function timestamps(options) {
|
|
1792
|
+
const updatedAtColumn = timestamp("updated_at", { withTimezone: true, mode: "date" }).defaultNow().notNull();
|
|
1793
|
+
if (options?.autoUpdate) {
|
|
1794
|
+
updatedAtColumn.__autoUpdate = true;
|
|
1104
1795
|
}
|
|
1105
|
-
return
|
|
1796
|
+
return {
|
|
1797
|
+
createdAt: timestamp("created_at", { withTimezone: true, mode: "date" }).defaultNow().notNull(),
|
|
1798
|
+
updatedAt: updatedAtColumn
|
|
1799
|
+
};
|
|
1800
|
+
}
|
|
1801
|
+
function foreignKey(name, reference, options) {
|
|
1802
|
+
return bigserial(`${name}_id`, { mode: "number" }).notNull().references(reference, { onDelete: options?.onDelete ?? "cascade" });
|
|
1803
|
+
}
|
|
1804
|
+
function optionalForeignKey(name, reference, options) {
|
|
1805
|
+
return bigserial(`${name}_id`, { mode: "number" }).references(reference, { onDelete: options?.onDelete ?? "set null" });
|
|
1806
|
+
}
|
|
1807
|
+
function createFunctionSchema(packageName) {
|
|
1808
|
+
const schemaName = packageNameToSchema(packageName);
|
|
1809
|
+
return pgSchema(schemaName);
|
|
1106
1810
|
}
|
|
1811
|
+
function packageNameToSchema(packageName) {
|
|
1812
|
+
return packageName.replace("@", "").replace("/", "_").replace(/-/g, "_");
|
|
1813
|
+
}
|
|
1814
|
+
function getSchemaInfo(packageName) {
|
|
1815
|
+
const isScoped = packageName.startsWith("@");
|
|
1816
|
+
const scope = isScoped ? packageName.split("/")[0].substring(1) : null;
|
|
1817
|
+
const schemaName = packageNameToSchema(packageName);
|
|
1818
|
+
return {
|
|
1819
|
+
schemaName,
|
|
1820
|
+
isScoped,
|
|
1821
|
+
scope
|
|
1822
|
+
};
|
|
1823
|
+
}
|
|
1824
|
+
var txLogger = logger.child("transaction");
|
|
1107
1825
|
var asyncContext = new AsyncLocalStorage();
|
|
1826
|
+
function getTransactionContext() {
|
|
1827
|
+
return asyncContext.getStore() ?? null;
|
|
1828
|
+
}
|
|
1108
1829
|
function getTransaction() {
|
|
1109
|
-
const context =
|
|
1830
|
+
const context = getTransactionContext();
|
|
1110
1831
|
return context?.tx ?? null;
|
|
1111
1832
|
}
|
|
1112
|
-
function runWithTransaction(tx, callback) {
|
|
1113
|
-
|
|
1833
|
+
function runWithTransaction(tx, txId, callback) {
|
|
1834
|
+
const existingContext = getTransactionContext();
|
|
1835
|
+
const newLevel = existingContext ? existingContext.level + 1 : 1;
|
|
1836
|
+
if (existingContext) {
|
|
1837
|
+
txLogger.info("Nested transaction started (SAVEPOINT)", {
|
|
1838
|
+
outerTxId: existingContext.txId,
|
|
1839
|
+
innerTxId: txId,
|
|
1840
|
+
level: newLevel
|
|
1841
|
+
});
|
|
1842
|
+
} else {
|
|
1843
|
+
txLogger.debug("Root transaction context set", { txId, level: newLevel });
|
|
1844
|
+
}
|
|
1845
|
+
return asyncContext.run({ tx, txId, level: newLevel }, callback);
|
|
1114
1846
|
}
|
|
1115
1847
|
function Transactional(options = {}) {
|
|
1116
1848
|
const defaultTimeout = parseInt(process.env.TRANSACTION_TIMEOUT || "30000", 10);
|
|
@@ -1119,17 +1851,25 @@ function Transactional(options = {}) {
|
|
|
1119
1851
|
enableLogging = true,
|
|
1120
1852
|
timeout = defaultTimeout
|
|
1121
1853
|
} = options;
|
|
1122
|
-
const
|
|
1854
|
+
const txLogger2 = logger.child("transaction");
|
|
1123
1855
|
return createMiddleware(async (c, next) => {
|
|
1124
|
-
const txId = `tx_${
|
|
1856
|
+
const txId = `tx_${randomUUID()}`;
|
|
1125
1857
|
const startTime = Date.now();
|
|
1126
1858
|
const route = `${c.req.method} ${c.req.path}`;
|
|
1127
1859
|
if (enableLogging) {
|
|
1128
|
-
|
|
1860
|
+
txLogger2.debug("Transaction started", { txId, route });
|
|
1129
1861
|
}
|
|
1130
1862
|
try {
|
|
1131
|
-
const
|
|
1132
|
-
|
|
1863
|
+
const writeDb = getDatabase("write");
|
|
1864
|
+
if (!writeDb) {
|
|
1865
|
+
throw new TransactionError(
|
|
1866
|
+
"Database not initialized. Cannot start transaction.",
|
|
1867
|
+
500,
|
|
1868
|
+
{ txId, route }
|
|
1869
|
+
);
|
|
1870
|
+
}
|
|
1871
|
+
const transactionPromise = writeDb.transaction(async (tx) => {
|
|
1872
|
+
await runWithTransaction(tx, txId, async () => {
|
|
1133
1873
|
await next();
|
|
1134
1874
|
const contextWithError = c;
|
|
1135
1875
|
if (contextWithError.error) {
|
|
@@ -1160,14 +1900,14 @@ function Transactional(options = {}) {
|
|
|
1160
1900
|
const duration = Date.now() - startTime;
|
|
1161
1901
|
if (enableLogging) {
|
|
1162
1902
|
if (duration >= slowThreshold) {
|
|
1163
|
-
|
|
1903
|
+
txLogger2.warn("Slow transaction committed", {
|
|
1164
1904
|
txId,
|
|
1165
1905
|
route,
|
|
1166
1906
|
duration: `${duration}ms`,
|
|
1167
1907
|
threshold: `${slowThreshold}ms`
|
|
1168
1908
|
});
|
|
1169
1909
|
} else {
|
|
1170
|
-
|
|
1910
|
+
txLogger2.debug("Transaction committed", {
|
|
1171
1911
|
txId,
|
|
1172
1912
|
route,
|
|
1173
1913
|
duration: `${duration}ms`
|
|
@@ -1178,7 +1918,7 @@ function Transactional(options = {}) {
|
|
|
1178
1918
|
const duration = Date.now() - startTime;
|
|
1179
1919
|
const customError = error instanceof TransactionError ? error : fromPostgresError(error);
|
|
1180
1920
|
if (enableLogging) {
|
|
1181
|
-
|
|
1921
|
+
txLogger2.error("Transaction rolled back", {
|
|
1182
1922
|
txId,
|
|
1183
1923
|
route,
|
|
1184
1924
|
duration: `${duration}ms`,
|
|
@@ -1190,910 +1930,144 @@ function Transactional(options = {}) {
|
|
|
1190
1930
|
}
|
|
1191
1931
|
});
|
|
1192
1932
|
}
|
|
1193
|
-
function
|
|
1194
|
-
|
|
1195
|
-
for (const [field, filterCondition] of Object.entries(filters)) {
|
|
1196
|
-
const column = table[field];
|
|
1197
|
-
if (!column) {
|
|
1198
|
-
console.warn(`[buildFilters] Unknown field: ${field}`);
|
|
1199
|
-
continue;
|
|
1200
|
-
}
|
|
1201
|
-
for (const [operator, value] of Object.entries(filterCondition)) {
|
|
1202
|
-
const condition = buildCondition(column, operator, value);
|
|
1203
|
-
if (condition) {
|
|
1204
|
-
conditions.push(condition);
|
|
1205
|
-
}
|
|
1206
|
-
}
|
|
1207
|
-
}
|
|
1208
|
-
return conditions.length > 0 ? and(...conditions) : void 0;
|
|
1209
|
-
}
|
|
1210
|
-
function buildCondition(column, operator, value) {
|
|
1211
|
-
switch (operator) {
|
|
1212
|
-
case "eq":
|
|
1213
|
-
return eq(column, value);
|
|
1214
|
-
case "ne":
|
|
1215
|
-
return ne(column, value);
|
|
1216
|
-
case "gt":
|
|
1217
|
-
return gt(column, value);
|
|
1218
|
-
case "gte":
|
|
1219
|
-
return gte(column, value);
|
|
1220
|
-
case "lt":
|
|
1221
|
-
return lt(column, value);
|
|
1222
|
-
case "lte":
|
|
1223
|
-
return lte(column, value);
|
|
1224
|
-
case "like":
|
|
1225
|
-
return like(column, `%${value}%`);
|
|
1226
|
-
case "in":
|
|
1227
|
-
if (Array.isArray(value)) {
|
|
1228
|
-
return inArray(column, value);
|
|
1229
|
-
}
|
|
1230
|
-
console.warn(`[buildCondition] 'in' operator requires array value`);
|
|
1231
|
-
return void 0;
|
|
1232
|
-
case "nin":
|
|
1233
|
-
if (Array.isArray(value)) {
|
|
1234
|
-
return notInArray(column, value);
|
|
1235
|
-
}
|
|
1236
|
-
console.warn(`[buildCondition] 'nin' operator requires array value`);
|
|
1237
|
-
return void 0;
|
|
1238
|
-
case "is":
|
|
1239
|
-
if (value === "null") return isNull(column);
|
|
1240
|
-
if (value === "notnull") return isNotNull(column);
|
|
1241
|
-
console.warn(`[buildCondition] 'is' operator requires 'null' or 'notnull'`);
|
|
1242
|
-
return void 0;
|
|
1243
|
-
default:
|
|
1244
|
-
console.warn(`[buildCondition] Unknown operator: ${operator}`);
|
|
1245
|
-
return void 0;
|
|
1246
|
-
}
|
|
1247
|
-
}
|
|
1248
|
-
function buildSort(sortConditions, table) {
|
|
1249
|
-
const orderByClauses = [];
|
|
1250
|
-
for (const { field, direction } of sortConditions) {
|
|
1251
|
-
const column = table[field];
|
|
1252
|
-
if (!column) {
|
|
1253
|
-
console.warn(`[buildSort] Unknown field: ${field}`);
|
|
1254
|
-
continue;
|
|
1255
|
-
}
|
|
1256
|
-
const clause = direction === "desc" ? desc(column) : asc(column);
|
|
1257
|
-
orderByClauses.push(clause);
|
|
1258
|
-
}
|
|
1259
|
-
return orderByClauses;
|
|
1260
|
-
}
|
|
1261
|
-
function applyPagination(pagination) {
|
|
1262
|
-
const { page, limit } = pagination;
|
|
1263
|
-
const offset = (page - 1) * limit;
|
|
1264
|
-
return { offset, limit };
|
|
1265
|
-
}
|
|
1266
|
-
function createPaginationMeta(pagination, total) {
|
|
1267
|
-
const { page, limit } = pagination;
|
|
1268
|
-
const totalPages = Math.ceil(total / limit);
|
|
1269
|
-
return {
|
|
1270
|
-
page,
|
|
1271
|
-
limit,
|
|
1272
|
-
total,
|
|
1273
|
-
totalPages,
|
|
1274
|
-
hasNext: page < totalPages,
|
|
1275
|
-
hasPrev: page > 1
|
|
1276
|
-
};
|
|
1933
|
+
function isSQLWrapper(value) {
|
|
1934
|
+
return value && typeof value === "object" && "queryChunks" in value;
|
|
1277
1935
|
}
|
|
1278
|
-
|
|
1279
|
-
const
|
|
1280
|
-
if (
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
return
|
|
1936
|
+
function buildWhereFromObject(table, where) {
|
|
1937
|
+
const entries = Object.entries(where).filter(([_, value]) => value !== void 0);
|
|
1938
|
+
if (entries.length === 0) return void 0;
|
|
1939
|
+
const conditions = entries.map(
|
|
1940
|
+
([key, value]) => eq(table[key], value)
|
|
1941
|
+
);
|
|
1942
|
+
return conditions.length === 1 ? conditions[0] : and(...conditions);
|
|
1285
1943
|
}
|
|
1286
|
-
|
|
1287
|
-
|
|
1288
|
-
|
|
1289
|
-
|
|
1290
|
-
table;
|
|
1291
|
-
filterConditions = [];
|
|
1292
|
-
sortConditions = [];
|
|
1293
|
-
limitValue;
|
|
1294
|
-
offsetValue;
|
|
1295
|
-
constructor(db2, table) {
|
|
1296
|
-
this.db = db2;
|
|
1297
|
-
this.table = table;
|
|
1298
|
-
}
|
|
1299
|
-
/**
|
|
1300
|
-
* Add WHERE conditions
|
|
1301
|
-
*
|
|
1302
|
-
* Multiple where() calls are combined with AND logic.
|
|
1303
|
-
*
|
|
1304
|
-
* @param filters - Filter conditions
|
|
1305
|
-
* @returns QueryBuilder for chaining
|
|
1306
|
-
*
|
|
1307
|
-
* @example
|
|
1308
|
-
* ```typescript
|
|
1309
|
-
* query
|
|
1310
|
-
* .where({ status: 'active' })
|
|
1311
|
-
* .where({ role: 'admin' }) // AND condition
|
|
1312
|
-
* ```
|
|
1313
|
-
*/
|
|
1314
|
-
where(filters) {
|
|
1315
|
-
this.filterConditions.push(filters);
|
|
1316
|
-
return this;
|
|
1317
|
-
}
|
|
1318
|
-
/**
|
|
1319
|
-
* Add ORDER BY clause
|
|
1320
|
-
*
|
|
1321
|
-
* Multiple orderBy() calls create multi-column sorting.
|
|
1322
|
-
*
|
|
1323
|
-
* @param field - Field name to sort by
|
|
1324
|
-
* @param direction - Sort direction ('asc' or 'desc')
|
|
1325
|
-
* @returns QueryBuilder for chaining
|
|
1326
|
-
*
|
|
1327
|
-
* @example
|
|
1328
|
-
* ```typescript
|
|
1329
|
-
* query
|
|
1330
|
-
* .orderBy('isPremium', 'desc')
|
|
1331
|
-
* .orderBy('createdAt', 'desc')
|
|
1332
|
-
* ```
|
|
1333
|
-
*/
|
|
1334
|
-
orderBy(field, direction = "asc") {
|
|
1335
|
-
this.sortConditions.push({ field, direction });
|
|
1336
|
-
return this;
|
|
1337
|
-
}
|
|
1338
|
-
/**
|
|
1339
|
-
* Set LIMIT clause
|
|
1340
|
-
*
|
|
1341
|
-
* @param limit - Maximum number of records to return
|
|
1342
|
-
* @returns QueryBuilder for chaining
|
|
1343
|
-
*
|
|
1344
|
-
* @example
|
|
1345
|
-
* ```typescript
|
|
1346
|
-
* query.limit(10)
|
|
1347
|
-
* ```
|
|
1348
|
-
*/
|
|
1349
|
-
limit(limit) {
|
|
1350
|
-
this.limitValue = limit;
|
|
1351
|
-
return this;
|
|
1352
|
-
}
|
|
1353
|
-
/**
|
|
1354
|
-
* Set OFFSET clause
|
|
1355
|
-
*
|
|
1356
|
-
* @param offset - Number of records to skip
|
|
1357
|
-
* @returns QueryBuilder for chaining
|
|
1358
|
-
*
|
|
1359
|
-
* @example
|
|
1360
|
-
* ```typescript
|
|
1361
|
-
* query.offset(20)
|
|
1362
|
-
* ```
|
|
1363
|
-
*/
|
|
1364
|
-
offset(offset) {
|
|
1365
|
-
this.offsetValue = offset;
|
|
1366
|
-
return this;
|
|
1367
|
-
}
|
|
1368
|
-
/**
|
|
1369
|
-
* Execute query and return multiple records
|
|
1370
|
-
*
|
|
1371
|
-
* @returns Array of records
|
|
1372
|
-
*
|
|
1373
|
-
* @example
|
|
1374
|
-
* ```typescript
|
|
1375
|
-
* const users = await query
|
|
1376
|
-
* .where({ status: 'active' })
|
|
1377
|
-
* .orderBy('createdAt', 'desc')
|
|
1378
|
-
* .limit(10)
|
|
1379
|
-
* .findMany();
|
|
1380
|
-
* ```
|
|
1381
|
-
*/
|
|
1382
|
-
async findMany() {
|
|
1383
|
-
const mergedFilters = this.mergeFilters();
|
|
1384
|
-
const whereCondition = buildFilters(mergedFilters, this.table);
|
|
1385
|
-
const orderBy = buildSort(this.sortConditions, this.table);
|
|
1386
|
-
let query = this.db.select().from(this.table).where(whereCondition).orderBy(...orderBy);
|
|
1387
|
-
if (this.limitValue !== void 0) {
|
|
1388
|
-
query = query.limit(this.limitValue);
|
|
1389
|
-
}
|
|
1390
|
-
if (this.offsetValue !== void 0) {
|
|
1391
|
-
query = query.offset(this.offsetValue);
|
|
1392
|
-
}
|
|
1393
|
-
return query;
|
|
1944
|
+
async function findOne(table, where) {
|
|
1945
|
+
const db = getDatabase("read");
|
|
1946
|
+
if (!db) {
|
|
1947
|
+
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
1394
1948
|
}
|
|
1395
|
-
|
|
1396
|
-
|
|
1397
|
-
|
|
1398
|
-
* @returns First matching record or null
|
|
1399
|
-
*
|
|
1400
|
-
* @example
|
|
1401
|
-
* ```typescript
|
|
1402
|
-
* const user = await query
|
|
1403
|
-
* .where({ email: 'john@example.com' })
|
|
1404
|
-
* .findOne();
|
|
1405
|
-
* ```
|
|
1406
|
-
*/
|
|
1407
|
-
async findOne() {
|
|
1408
|
-
const results = await this.limit(1).findMany();
|
|
1409
|
-
return results[0] ?? null;
|
|
1949
|
+
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
1950
|
+
if (!whereClause) {
|
|
1951
|
+
throw new Error("findOne requires at least one where condition");
|
|
1410
1952
|
}
|
|
1411
|
-
|
|
1412
|
-
|
|
1413
|
-
|
|
1414
|
-
|
|
1415
|
-
|
|
1416
|
-
|
|
1417
|
-
|
|
1418
|
-
|
|
1419
|
-
|
|
1420
|
-
|
|
1421
|
-
|
|
1422
|
-
|
|
1423
|
-
|
|
1424
|
-
const mergedFilters = this.mergeFilters();
|
|
1425
|
-
const whereCondition = buildFilters(mergedFilters, this.table);
|
|
1426
|
-
const { count } = await import('drizzle-orm');
|
|
1427
|
-
const result = await this.db.select({ count: count() }).from(this.table).where(whereCondition);
|
|
1428
|
-
return Number(result[0]?.count ?? 0);
|
|
1429
|
-
}
|
|
1430
|
-
/**
|
|
1431
|
-
* Merge multiple filter conditions into single object
|
|
1432
|
-
*
|
|
1433
|
-
* Combines all where() calls into one filter object.
|
|
1434
|
-
*/
|
|
1435
|
-
mergeFilters() {
|
|
1436
|
-
if (this.filterConditions.length === 0) {
|
|
1437
|
-
return {};
|
|
1438
|
-
}
|
|
1439
|
-
return this.filterConditions.reduce((merged, current) => {
|
|
1440
|
-
return { ...merged, ...current };
|
|
1441
|
-
}, {});
|
|
1442
|
-
}
|
|
1443
|
-
};
|
|
1444
|
-
|
|
1445
|
-
// src/db/repository/repository.ts
|
|
1446
|
-
var Repository = class {
|
|
1447
|
-
db;
|
|
1448
|
-
table;
|
|
1449
|
-
useReplica;
|
|
1450
|
-
explicitDb;
|
|
1451
|
-
// Track if db was explicitly provided
|
|
1452
|
-
autoUpdateField;
|
|
1453
|
-
// Field name to auto-update (e.g., 'updatedAt', 'modifiedAt')
|
|
1454
|
-
constructor(dbOrTable, tableOrUseReplica, useReplica = true) {
|
|
1455
|
-
if ("name" in dbOrTable && typeof dbOrTable.name === "string") {
|
|
1456
|
-
this.db = getRawDb("write");
|
|
1457
|
-
this.table = dbOrTable;
|
|
1458
|
-
this.useReplica = typeof tableOrUseReplica === "boolean" ? tableOrUseReplica : true;
|
|
1459
|
-
this.explicitDb = void 0;
|
|
1460
|
-
} else {
|
|
1461
|
-
this.db = dbOrTable;
|
|
1462
|
-
this.table = tableOrUseReplica;
|
|
1463
|
-
this.useReplica = useReplica;
|
|
1464
|
-
this.explicitDb = this.db;
|
|
1465
|
-
}
|
|
1466
|
-
this.autoUpdateField = this.detectAutoUpdateField();
|
|
1467
|
-
}
|
|
1468
|
-
/**
|
|
1469
|
-
* Detect which field (if any) should be auto-updated
|
|
1470
|
-
*
|
|
1471
|
-
* Checks all table columns for __autoUpdate metadata flag.
|
|
1472
|
-
* Set by autoUpdateTimestamp() or timestamps({ autoUpdate: true }) helpers.
|
|
1473
|
-
*
|
|
1474
|
-
* @returns Field name to auto-update, or undefined if none found
|
|
1475
|
-
*/
|
|
1476
|
-
detectAutoUpdateField() {
|
|
1477
|
-
if (!this.table || typeof this.table !== "object") {
|
|
1478
|
-
return void 0;
|
|
1479
|
-
}
|
|
1480
|
-
const tableColumns = this.table;
|
|
1481
|
-
for (const [fieldName, column] of Object.entries(tableColumns)) {
|
|
1482
|
-
if (fieldName.startsWith("_") || fieldName.startsWith("$")) {
|
|
1483
|
-
continue;
|
|
1484
|
-
}
|
|
1485
|
-
if (column && typeof column === "object" && column.__autoUpdate === true) {
|
|
1486
|
-
return fieldName;
|
|
1487
|
-
}
|
|
1488
|
-
}
|
|
1489
|
-
return void 0;
|
|
1490
|
-
}
|
|
1491
|
-
/**
|
|
1492
|
-
* Inject auto-update timestamp if configured
|
|
1493
|
-
*
|
|
1494
|
-
* Only injects if:
|
|
1495
|
-
* 1. Table has an auto-update field configured (via autoUpdateTimestamp() or timestamps({ autoUpdate: true }))
|
|
1496
|
-
* 2. The field is not already explicitly provided in the data
|
|
1497
|
-
*
|
|
1498
|
-
* @param data - Update data object
|
|
1499
|
-
* @returns Data with auto-update timestamp injected (if applicable)
|
|
1500
|
-
*/
|
|
1501
|
-
injectAutoUpdateTimestamp(data) {
|
|
1502
|
-
if (!this.autoUpdateField) {
|
|
1503
|
-
return data;
|
|
1504
|
-
}
|
|
1505
|
-
if (data && this.autoUpdateField in data) {
|
|
1506
|
-
return data;
|
|
1507
|
-
}
|
|
1508
|
-
return {
|
|
1509
|
-
...data,
|
|
1510
|
-
[this.autoUpdateField]: /* @__PURE__ */ new Date()
|
|
1511
|
-
};
|
|
1512
|
-
}
|
|
1513
|
-
/**
|
|
1514
|
-
* Get id column from table
|
|
1515
|
-
*
|
|
1516
|
-
* Helper method to reduce code duplication across methods that need id column.
|
|
1517
|
-
*
|
|
1518
|
-
* @returns The id column object
|
|
1519
|
-
* @throws {QueryError} If table does not have an id column
|
|
1520
|
-
*/
|
|
1521
|
-
getIdColumn() {
|
|
1522
|
-
const idColumn = this.table.id;
|
|
1523
|
-
if (!idColumn) {
|
|
1524
|
-
throw new QueryError("Table does not have an id column");
|
|
1525
|
-
}
|
|
1526
|
-
return idColumn;
|
|
1527
|
-
}
|
|
1528
|
-
/**
|
|
1529
|
-
* Get read-only DB
|
|
1530
|
-
*
|
|
1531
|
-
* Automatically detects and uses transaction context if available.
|
|
1532
|
-
* When in transaction, uses transaction DB to ensure read consistency.
|
|
1533
|
-
* Priority: explicitDb > transaction > replica/primary DB
|
|
1534
|
-
*/
|
|
1535
|
-
getReadDb() {
|
|
1536
|
-
if (this.explicitDb) {
|
|
1537
|
-
return this.explicitDb;
|
|
1538
|
-
}
|
|
1539
|
-
const tx = getTransaction();
|
|
1540
|
-
if (tx) {
|
|
1541
|
-
return tx;
|
|
1542
|
-
}
|
|
1543
|
-
return this.useReplica ? getRawDb("read") : this.db;
|
|
1544
|
-
}
|
|
1545
|
-
/**
|
|
1546
|
-
* Get write-only DB
|
|
1547
|
-
*
|
|
1548
|
-
* Automatically detects and uses transaction context if available.
|
|
1549
|
-
* Priority: explicitDb > transaction > primary DB
|
|
1550
|
-
*/
|
|
1551
|
-
getWriteDb() {
|
|
1552
|
-
if (this.explicitDb) {
|
|
1553
|
-
return this.explicitDb;
|
|
1554
|
-
}
|
|
1555
|
-
const tx = getTransaction();
|
|
1556
|
-
if (tx) {
|
|
1557
|
-
return tx;
|
|
1558
|
-
}
|
|
1559
|
-
return getRawDb("write");
|
|
1560
|
-
}
|
|
1561
|
-
/**
|
|
1562
|
-
* Execute operation with performance monitoring
|
|
1563
|
-
*
|
|
1564
|
-
* Wraps database operations with timing and logging for slow queries.
|
|
1565
|
-
* Only logs if monitoring is enabled and query exceeds threshold.
|
|
1566
|
-
*
|
|
1567
|
-
* @param operation - Name of the operation (for logging)
|
|
1568
|
-
* @param fn - Async function to execute
|
|
1569
|
-
* @returns Result of the operation
|
|
1570
|
-
*/
|
|
1571
|
-
async executeWithMonitoring(operation, fn) {
|
|
1572
|
-
const config2 = getDatabaseMonitoringConfig();
|
|
1573
|
-
if (!config2?.enabled) {
|
|
1574
|
-
return fn();
|
|
1575
|
-
}
|
|
1576
|
-
const startTime = performance.now();
|
|
1577
|
-
try {
|
|
1578
|
-
const result = await fn();
|
|
1579
|
-
const duration = performance.now() - startTime;
|
|
1580
|
-
if (duration >= config2.slowThreshold) {
|
|
1581
|
-
const dbLogger4 = logger.child("database");
|
|
1582
|
-
const logData = {
|
|
1583
|
-
operation,
|
|
1584
|
-
table: this.table._.name,
|
|
1585
|
-
duration: `${duration.toFixed(2)}ms`,
|
|
1586
|
-
threshold: `${config2.slowThreshold}ms`
|
|
1587
|
-
};
|
|
1588
|
-
dbLogger4.warn("Slow query detected", logData);
|
|
1589
|
-
}
|
|
1590
|
-
return result;
|
|
1591
|
-
} catch (error) {
|
|
1592
|
-
const duration = performance.now() - startTime;
|
|
1593
|
-
const dbLogger4 = logger.child("database");
|
|
1594
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1595
|
-
dbLogger4.error("Query failed", {
|
|
1596
|
-
operation,
|
|
1597
|
-
table: this.table._.name,
|
|
1598
|
-
duration: `${duration.toFixed(2)}ms`,
|
|
1599
|
-
error: message
|
|
1600
|
-
});
|
|
1601
|
-
throw error;
|
|
1953
|
+
const results = await db.select().from(table).where(whereClause).limit(1);
|
|
1954
|
+
return results[0] ?? null;
|
|
1955
|
+
}
|
|
1956
|
+
async function findMany(table, options) {
|
|
1957
|
+
const db = getDatabase("read");
|
|
1958
|
+
if (!db) {
|
|
1959
|
+
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
1960
|
+
}
|
|
1961
|
+
let query = db.select().from(table);
|
|
1962
|
+
if (options?.where) {
|
|
1963
|
+
const whereClause = isSQLWrapper(options.where) ? options.where : options.where ? buildWhereFromObject(table, options.where) : void 0;
|
|
1964
|
+
if (whereClause) {
|
|
1965
|
+
query = query.where(whereClause);
|
|
1602
1966
|
}
|
|
1603
1967
|
}
|
|
1604
|
-
|
|
1605
|
-
|
|
1606
|
-
|
|
1607
|
-
* @example
|
|
1608
|
-
* const users = await userRepo.findAll();
|
|
1609
|
-
*/
|
|
1610
|
-
async findAll() {
|
|
1611
|
-
return this.executeWithMonitoring("findAll", async () => {
|
|
1612
|
-
const readDb = this.getReadDb();
|
|
1613
|
-
return readDb.select().from(this.table);
|
|
1614
|
-
});
|
|
1615
|
-
}
|
|
1616
|
-
/**
|
|
1617
|
-
* Find with pagination (uses Replica)
|
|
1618
|
-
*
|
|
1619
|
-
* @example
|
|
1620
|
-
* const result = await userRepo.findPage({
|
|
1621
|
-
* filters: { email: { like: 'john' } },
|
|
1622
|
-
* sort: [{ field: 'createdAt', direction: 'desc' }],
|
|
1623
|
-
* pagination: { page: 1, limit: 20 }
|
|
1624
|
-
* });
|
|
1625
|
-
*/
|
|
1626
|
-
async findPage(pageable) {
|
|
1627
|
-
return this.executeWithMonitoring("findPage", async () => {
|
|
1628
|
-
const { filters = {}, sort = [], pagination = { page: 1, limit: 20 } } = pageable;
|
|
1629
|
-
const whereCondition = buildFilters(filters, this.table);
|
|
1630
|
-
const orderBy = buildSort(sort, this.table);
|
|
1631
|
-
const { offset, limit } = applyPagination(pagination);
|
|
1632
|
-
const readDb = this.getReadDb();
|
|
1633
|
-
const data = await readDb.select().from(this.table).where(whereCondition).orderBy(...orderBy).limit(limit).offset(offset);
|
|
1634
|
-
const total = await countTotal(readDb, this.table, whereCondition);
|
|
1635
|
-
const meta = createPaginationMeta(pagination, total);
|
|
1636
|
-
return { data, meta };
|
|
1637
|
-
});
|
|
1638
|
-
}
|
|
1639
|
-
/**
|
|
1640
|
-
* Find one record by ID (uses Replica)
|
|
1641
|
-
*
|
|
1642
|
-
* @example
|
|
1643
|
-
* const user = await userRepo.findById(1);
|
|
1644
|
-
*/
|
|
1645
|
-
async findById(id2) {
|
|
1646
|
-
return this.executeWithMonitoring("findById", async () => {
|
|
1647
|
-
const idColumn = this.getIdColumn();
|
|
1648
|
-
const { eq: eq2 } = await import('drizzle-orm');
|
|
1649
|
-
const readDb = this.getReadDb();
|
|
1650
|
-
const [result] = await readDb.select().from(this.table).where(eq2(idColumn, id2));
|
|
1651
|
-
return result ?? null;
|
|
1652
|
-
});
|
|
1653
|
-
}
|
|
1654
|
-
/**
|
|
1655
|
-
* Find one record by condition (uses Replica)
|
|
1656
|
-
*
|
|
1657
|
-
* @example
|
|
1658
|
-
* const user = await userRepo.findOne(eq(users.email, 'john@example.com'));
|
|
1659
|
-
*/
|
|
1660
|
-
async findOne(where) {
|
|
1661
|
-
return this.executeWithMonitoring("findOne", async () => {
|
|
1662
|
-
const readDb = this.getReadDb();
|
|
1663
|
-
const [result] = await readDb.select().from(this.table).where(where);
|
|
1664
|
-
return result ?? null;
|
|
1665
|
-
});
|
|
1666
|
-
}
|
|
1667
|
-
/**
|
|
1668
|
-
* Create a new record (uses Primary)
|
|
1669
|
-
*
|
|
1670
|
-
* @example
|
|
1671
|
-
* const user = await userRepo.save({ email: 'john@example.com', name: 'John' });
|
|
1672
|
-
*/
|
|
1673
|
-
async save(data) {
|
|
1674
|
-
return this.executeWithMonitoring("save", async () => {
|
|
1675
|
-
const writeDb = this.getWriteDb();
|
|
1676
|
-
const [result] = await writeDb.insert(this.table).values(data).returning();
|
|
1677
|
-
return result;
|
|
1678
|
-
});
|
|
1679
|
-
}
|
|
1680
|
-
/**
|
|
1681
|
-
* Update a record (uses Primary)
|
|
1682
|
-
*
|
|
1683
|
-
* Automatically injects current timestamp if table has auto-update field configured.
|
|
1684
|
-
*
|
|
1685
|
-
* @example
|
|
1686
|
-
* const user = await userRepo.update(1, { name: 'Jane' });
|
|
1687
|
-
*/
|
|
1688
|
-
async update(id2, data) {
|
|
1689
|
-
return this.executeWithMonitoring("update", async () => {
|
|
1690
|
-
const idColumn = this.getIdColumn();
|
|
1691
|
-
const updateData = this.injectAutoUpdateTimestamp(data);
|
|
1692
|
-
const { eq: eq2 } = await import('drizzle-orm');
|
|
1693
|
-
const writeDb = this.getWriteDb();
|
|
1694
|
-
const [result] = await writeDb.update(this.table).set(updateData).where(eq2(idColumn, id2)).returning();
|
|
1695
|
-
return result ?? null;
|
|
1696
|
-
});
|
|
1697
|
-
}
|
|
1698
|
-
/**
|
|
1699
|
-
* Delete a record (uses Primary)
|
|
1700
|
-
*
|
|
1701
|
-
* @example
|
|
1702
|
-
* const deleted = await userRepo.delete(1);
|
|
1703
|
-
*/
|
|
1704
|
-
async delete(id2) {
|
|
1705
|
-
return this.executeWithMonitoring("delete", async () => {
|
|
1706
|
-
const idColumn = this.getIdColumn();
|
|
1707
|
-
const { eq: eq2 } = await import('drizzle-orm');
|
|
1708
|
-
const writeDb = this.getWriteDb();
|
|
1709
|
-
const [result] = await writeDb.delete(this.table).where(eq2(idColumn, id2)).returning();
|
|
1710
|
-
return result ?? null;
|
|
1711
|
-
});
|
|
1712
|
-
}
|
|
1713
|
-
/**
|
|
1714
|
-
* Count records (uses Replica)
|
|
1715
|
-
*
|
|
1716
|
-
* @example
|
|
1717
|
-
* const count = await userRepo.count();
|
|
1718
|
-
*/
|
|
1719
|
-
async count(where) {
|
|
1720
|
-
return this.executeWithMonitoring("count", async () => {
|
|
1721
|
-
const readDb = this.getReadDb();
|
|
1722
|
-
return countTotal(readDb, this.table, where);
|
|
1723
|
-
});
|
|
1724
|
-
}
|
|
1725
|
-
/**
|
|
1726
|
-
* Find records by filters (uses Replica)
|
|
1727
|
-
*
|
|
1728
|
-
* @example
|
|
1729
|
-
* const users = await userRepo.findWhere({ email: { like: '@gmail.com' }, status: 'active' });
|
|
1730
|
-
*/
|
|
1731
|
-
async findWhere(filters) {
|
|
1732
|
-
return this.executeWithMonitoring("findWhere", async () => {
|
|
1733
|
-
const whereCondition = buildFilters(filters, this.table);
|
|
1734
|
-
const readDb = this.getReadDb();
|
|
1735
|
-
return readDb.select().from(this.table).where(whereCondition);
|
|
1736
|
-
});
|
|
1737
|
-
}
|
|
1738
|
-
/**
|
|
1739
|
-
* Find one record by filters (uses Replica)
|
|
1740
|
-
*
|
|
1741
|
-
* @example
|
|
1742
|
-
* const user = await userRepo.findOneWhere({ email: 'john@example.com' });
|
|
1743
|
-
*/
|
|
1744
|
-
async findOneWhere(filters) {
|
|
1745
|
-
return this.executeWithMonitoring("findOneWhere", async () => {
|
|
1746
|
-
const whereCondition = buildFilters(filters, this.table);
|
|
1747
|
-
const readDb = this.getReadDb();
|
|
1748
|
-
const [result] = await readDb.select().from(this.table).where(whereCondition);
|
|
1749
|
-
return result ?? null;
|
|
1750
|
-
});
|
|
1968
|
+
if (options?.orderBy) {
|
|
1969
|
+
const orderByArray = Array.isArray(options.orderBy) ? options.orderBy : [options.orderBy];
|
|
1970
|
+
query = query.orderBy(...orderByArray);
|
|
1751
1971
|
}
|
|
1752
|
-
|
|
1753
|
-
|
|
1754
|
-
*
|
|
1755
|
-
* @example
|
|
1756
|
-
* const exists = await userRepo.exists(1);
|
|
1757
|
-
*/
|
|
1758
|
-
async exists(id2) {
|
|
1759
|
-
return this.executeWithMonitoring("exists", async () => {
|
|
1760
|
-
const idColumn = this.getIdColumn();
|
|
1761
|
-
const { eq: eq2 } = await import('drizzle-orm');
|
|
1762
|
-
const readDb = this.getReadDb();
|
|
1763
|
-
const [result] = await readDb.select().from(this.table).where(eq2(idColumn, id2)).limit(1);
|
|
1764
|
-
return !!result;
|
|
1765
|
-
});
|
|
1972
|
+
if (options?.limit) {
|
|
1973
|
+
query = query.limit(options.limit);
|
|
1766
1974
|
}
|
|
1767
|
-
|
|
1768
|
-
|
|
1769
|
-
*
|
|
1770
|
-
* @example
|
|
1771
|
-
* const exists = await userRepo.existsBy({ email: 'john@example.com' });
|
|
1772
|
-
*/
|
|
1773
|
-
async existsBy(filters) {
|
|
1774
|
-
return this.executeWithMonitoring("existsBy", async () => {
|
|
1775
|
-
const whereCondition = buildFilters(filters, this.table);
|
|
1776
|
-
const readDb = this.getReadDb();
|
|
1777
|
-
const [result] = await readDb.select().from(this.table).where(whereCondition).limit(1);
|
|
1778
|
-
return !!result;
|
|
1779
|
-
});
|
|
1975
|
+
if (options?.offset) {
|
|
1976
|
+
query = query.offset(options.offset);
|
|
1780
1977
|
}
|
|
1781
|
-
|
|
1782
|
-
* Count records by filters (uses Replica)
|
|
1783
|
-
*
|
|
1784
|
-
* @example
|
|
1785
|
-
* const count = await userRepo.countBy({ status: 'active' });
|
|
1786
|
-
*/
|
|
1787
|
-
async countBy(filters) {
|
|
1788
|
-
return this.executeWithMonitoring("countBy", async () => {
|
|
1789
|
-
const whereCondition = buildFilters(filters, this.table);
|
|
1790
|
-
const readDb = this.getReadDb();
|
|
1791
|
-
return countTotal(readDb, this.table, whereCondition);
|
|
1792
|
-
});
|
|
1793
|
-
}
|
|
1794
|
-
/**
|
|
1795
|
-
* Create multiple records (uses Primary)
|
|
1796
|
-
*
|
|
1797
|
-
* @example
|
|
1798
|
-
* const users = await userRepo.saveMany([
|
|
1799
|
-
* { email: 'user1@example.com', name: 'User 1' },
|
|
1800
|
-
* { email: 'user2@example.com', name: 'User 2' }
|
|
1801
|
-
* ]);
|
|
1802
|
-
*/
|
|
1803
|
-
async saveMany(data) {
|
|
1804
|
-
return this.executeWithMonitoring("saveMany", async () => {
|
|
1805
|
-
const writeDb = this.getWriteDb();
|
|
1806
|
-
return writeDb.insert(this.table).values(data).returning();
|
|
1807
|
-
});
|
|
1808
|
-
}
|
|
1809
|
-
/**
|
|
1810
|
-
* Update multiple records by filters (uses Primary)
|
|
1811
|
-
*
|
|
1812
|
-
* Automatically injects current timestamp if table has auto-update field configured.
|
|
1813
|
-
*
|
|
1814
|
-
* @example
|
|
1815
|
-
* const count = await userRepo.updateWhere({ status: 'inactive' }, { status: 'archived' });
|
|
1816
|
-
*/
|
|
1817
|
-
async updateWhere(filters, data) {
|
|
1818
|
-
return this.executeWithMonitoring("updateWhere", async () => {
|
|
1819
|
-
const updateData = this.injectAutoUpdateTimestamp(data);
|
|
1820
|
-
const whereCondition = buildFilters(filters, this.table);
|
|
1821
|
-
const writeDb = this.getWriteDb();
|
|
1822
|
-
const results = await writeDb.update(this.table).set(updateData).where(whereCondition).returning();
|
|
1823
|
-
return results.length;
|
|
1824
|
-
});
|
|
1825
|
-
}
|
|
1826
|
-
/**
|
|
1827
|
-
* Delete multiple records by filters (uses Primary)
|
|
1828
|
-
*
|
|
1829
|
-
* @example
|
|
1830
|
-
* const count = await userRepo.deleteWhere({ status: 'banned' });
|
|
1831
|
-
*/
|
|
1832
|
-
async deleteWhere(filters) {
|
|
1833
|
-
return this.executeWithMonitoring("deleteWhere", async () => {
|
|
1834
|
-
const whereCondition = buildFilters(filters, this.table);
|
|
1835
|
-
const writeDb = this.getWriteDb();
|
|
1836
|
-
const results = await writeDb.delete(this.table).where(whereCondition).returning();
|
|
1837
|
-
return results.length;
|
|
1838
|
-
});
|
|
1839
|
-
}
|
|
1840
|
-
// ============================================================
|
|
1841
|
-
// Query Builder (Fluent Interface)
|
|
1842
|
-
// ============================================================
|
|
1843
|
-
/**
|
|
1844
|
-
* Start a chainable query builder (uses Replica)
|
|
1845
|
-
*
|
|
1846
|
-
* Returns a QueryBuilder instance for building complex queries with method chaining.
|
|
1847
|
-
*
|
|
1848
|
-
* @returns QueryBuilder instance for chaining
|
|
1849
|
-
*
|
|
1850
|
-
* @example
|
|
1851
|
-
* ```typescript
|
|
1852
|
-
* // Simple chaining
|
|
1853
|
-
* const users = await userRepo
|
|
1854
|
-
* .query()
|
|
1855
|
-
* .where({ status: 'active' })
|
|
1856
|
-
* .orderBy('createdAt', 'desc')
|
|
1857
|
-
* .limit(10)
|
|
1858
|
-
* .findMany();
|
|
1859
|
-
*
|
|
1860
|
-
* // Multiple conditions
|
|
1861
|
-
* const admins = await userRepo
|
|
1862
|
-
* .query()
|
|
1863
|
-
* .where({ role: 'admin' })
|
|
1864
|
-
* .where({ status: 'active' }) // AND condition
|
|
1865
|
-
* .findMany();
|
|
1866
|
-
*
|
|
1867
|
-
* // Reusable query
|
|
1868
|
-
* const activeQuery = userRepo.query().where({ status: 'active' });
|
|
1869
|
-
* const users = await activeQuery.findMany();
|
|
1870
|
-
* const count = await activeQuery.count();
|
|
1871
|
-
* ```
|
|
1872
|
-
*/
|
|
1873
|
-
query() {
|
|
1874
|
-
const readDb = this.getReadDb();
|
|
1875
|
-
return new QueryBuilder(readDb, this.table);
|
|
1876
|
-
}
|
|
1877
|
-
};
|
|
1878
|
-
|
|
1879
|
-
// src/db/repository/factory.ts
|
|
1880
|
-
var repositoryCache = /* @__PURE__ */ new Map();
|
|
1881
|
-
function getCacheKey(table, RepositoryClass) {
|
|
1882
|
-
const tableName = table[Symbol.for("drizzle:Name")] || table.name || table.toString();
|
|
1883
|
-
const className = RepositoryClass?.name || "Repository";
|
|
1884
|
-
return `${tableName}:${className}`;
|
|
1885
|
-
}
|
|
1886
|
-
function getRepository(table, RepositoryClass) {
|
|
1887
|
-
const cacheKey = getCacheKey(table, RepositoryClass);
|
|
1888
|
-
let repo = repositoryCache.get(cacheKey);
|
|
1889
|
-
if (!repo) {
|
|
1890
|
-
if (RepositoryClass) {
|
|
1891
|
-
repo = new RepositoryClass(table);
|
|
1892
|
-
} else {
|
|
1893
|
-
repo = new Repository(table);
|
|
1894
|
-
}
|
|
1895
|
-
repositoryCache.set(cacheKey, repo);
|
|
1896
|
-
}
|
|
1897
|
-
return repo;
|
|
1898
|
-
}
|
|
1899
|
-
function clearRepositoryCache() {
|
|
1900
|
-
repositoryCache.clear();
|
|
1901
|
-
}
|
|
1902
|
-
function getRepositoryCacheSize() {
|
|
1903
|
-
return repositoryCache.size;
|
|
1978
|
+
return query;
|
|
1904
1979
|
}
|
|
1905
|
-
|
|
1906
|
-
|
|
1907
|
-
|
|
1908
|
-
|
|
1909
|
-
return `${tableName}:${className}`;
|
|
1910
|
-
}
|
|
1911
|
-
function withRepositoryScope(fn) {
|
|
1912
|
-
const cache = /* @__PURE__ */ new Map();
|
|
1913
|
-
return repositoryStorage.run(cache, fn);
|
|
1914
|
-
}
|
|
1915
|
-
function getScopedRepository(table, RepositoryClass) {
|
|
1916
|
-
const cache = repositoryStorage.getStore();
|
|
1917
|
-
if (!cache) {
|
|
1918
|
-
return RepositoryClass ? new RepositoryClass(table) : new Repository(table);
|
|
1919
|
-
}
|
|
1920
|
-
const key = getCacheKey2(table, RepositoryClass);
|
|
1921
|
-
let repo = cache.get(key);
|
|
1922
|
-
if (!repo) {
|
|
1923
|
-
repo = RepositoryClass ? new RepositoryClass(table) : new Repository(table);
|
|
1924
|
-
cache.set(key, repo);
|
|
1980
|
+
async function create(table, data) {
|
|
1981
|
+
const db = getDatabase("write");
|
|
1982
|
+
if (!db) {
|
|
1983
|
+
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
1925
1984
|
}
|
|
1926
|
-
|
|
1985
|
+
const [result] = await db.insert(table).values(data).returning();
|
|
1986
|
+
return result;
|
|
1927
1987
|
}
|
|
1928
|
-
function
|
|
1929
|
-
|
|
1930
|
-
|
|
1931
|
-
|
|
1932
|
-
}
|
|
1933
|
-
function getScopedCacheSize() {
|
|
1934
|
-
const cache = repositoryStorage.getStore();
|
|
1935
|
-
return cache?.size ?? 0;
|
|
1936
|
-
}
|
|
1937
|
-
function isInRepositoryScope() {
|
|
1938
|
-
return repositoryStorage.getStore() !== void 0;
|
|
1939
|
-
}
|
|
1940
|
-
|
|
1941
|
-
// src/db/repository/relation-registry.ts
|
|
1942
|
-
var tableNameCache = /* @__PURE__ */ new WeakMap();
|
|
1943
|
-
function getTableName(table) {
|
|
1944
|
-
const cached = tableNameCache.get(table);
|
|
1945
|
-
if (cached) {
|
|
1946
|
-
return cached;
|
|
1988
|
+
async function createMany(table, data) {
|
|
1989
|
+
const db = getDatabase("write");
|
|
1990
|
+
if (!db) {
|
|
1991
|
+
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
1947
1992
|
}
|
|
1948
|
-
const
|
|
1949
|
-
|
|
1950
|
-
return name;
|
|
1993
|
+
const results = await db.insert(table).values(data).returning();
|
|
1994
|
+
return results;
|
|
1951
1995
|
}
|
|
1952
|
-
|
|
1953
|
-
|
|
1954
|
-
|
|
1955
|
-
|
|
1956
|
-
|
|
1957
|
-
|
|
1958
|
-
|
|
1959
|
-
|
|
1960
|
-
|
|
1961
|
-
|
|
1962
|
-
|
|
1963
|
-
|
|
1964
|
-
|
|
1965
|
-
|
|
1966
|
-
|
|
1967
|
-
return new Repository(this.db, table);
|
|
1968
|
-
}
|
|
1969
|
-
/**
|
|
1970
|
-
* Drizzle의 모든 메서드를 프록시
|
|
1971
|
-
*
|
|
1972
|
-
* select, insert, update, delete, transaction 등 모든 Drizzle 메서드 사용 가능
|
|
1973
|
-
*/
|
|
1974
|
-
get select() {
|
|
1975
|
-
return this.db.select.bind(this.db);
|
|
1976
|
-
}
|
|
1977
|
-
get insert() {
|
|
1978
|
-
return this.db.insert.bind(this.db);
|
|
1979
|
-
}
|
|
1980
|
-
get update() {
|
|
1981
|
-
return this.db.update.bind(this.db);
|
|
1982
|
-
}
|
|
1983
|
-
get delete() {
|
|
1984
|
-
return this.db.delete.bind(this.db);
|
|
1985
|
-
}
|
|
1986
|
-
get execute() {
|
|
1987
|
-
return this.db.execute.bind(this.db);
|
|
1988
|
-
}
|
|
1989
|
-
get transaction() {
|
|
1990
|
-
return this.db.transaction.bind(this.db);
|
|
1991
|
-
}
|
|
1992
|
-
get query() {
|
|
1993
|
-
return this.db.query;
|
|
1996
|
+
async function upsert(table, data, options) {
|
|
1997
|
+
const db = getDatabase("write");
|
|
1998
|
+
if (!db) {
|
|
1999
|
+
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2000
|
+
}
|
|
2001
|
+
const [result] = await db.insert(table).values(data).onConflictDoUpdate({
|
|
2002
|
+
target: options.target,
|
|
2003
|
+
set: options.set || data
|
|
2004
|
+
}).returning();
|
|
2005
|
+
return result;
|
|
2006
|
+
}
|
|
2007
|
+
async function updateOne(table, where, data) {
|
|
2008
|
+
const db = getDatabase("write");
|
|
2009
|
+
if (!db) {
|
|
2010
|
+
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
1994
2011
|
}
|
|
1995
|
-
|
|
1996
|
-
|
|
2012
|
+
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2013
|
+
if (!whereClause) {
|
|
2014
|
+
throw new Error("updateOne requires at least one where condition");
|
|
1997
2015
|
}
|
|
1998
|
-
|
|
1999
|
-
|
|
2000
|
-
|
|
2001
|
-
|
|
2002
|
-
|
|
2016
|
+
const [result] = await db.update(table).set(data).where(whereClause).returning();
|
|
2017
|
+
return result ?? null;
|
|
2018
|
+
}
|
|
2019
|
+
async function updateMany(table, where, data) {
|
|
2020
|
+
const db = getDatabase("write");
|
|
2021
|
+
if (!db) {
|
|
2022
|
+
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2003
2023
|
}
|
|
2004
|
-
|
|
2005
|
-
|
|
2006
|
-
|
|
2007
|
-
function getDb(type) {
|
|
2008
|
-
const tx = getTransaction();
|
|
2009
|
-
if (tx) {
|
|
2010
|
-
return new WrappedDb(tx);
|
|
2011
|
-
}
|
|
2012
|
-
const rawDb = getDatabase(type);
|
|
2013
|
-
if (!rawDb) {
|
|
2014
|
-
throw new Error(
|
|
2015
|
-
"Database not initialized. Set DATABASE_URL environment variable or call initDatabase() first."
|
|
2016
|
-
);
|
|
2024
|
+
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2025
|
+
if (!whereClause) {
|
|
2026
|
+
throw new Error("updateMany requires at least one where condition");
|
|
2017
2027
|
}
|
|
2018
|
-
|
|
2028
|
+
const results = await db.update(table).set(data).where(whereClause).returning();
|
|
2029
|
+
return results;
|
|
2019
2030
|
}
|
|
2020
|
-
|
|
2021
|
-
|
|
2022
|
-
|
|
2023
|
-
|
|
2024
|
-
return "postgresql";
|
|
2031
|
+
async function deleteOne(table, where) {
|
|
2032
|
+
const db = getDatabase("write");
|
|
2033
|
+
if (!db) {
|
|
2034
|
+
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2025
2035
|
}
|
|
2026
|
-
|
|
2027
|
-
|
|
2028
|
-
|
|
2029
|
-
if (url.startsWith("sqlite://") || url.includes(".db") || url.includes(".sqlite")) {
|
|
2030
|
-
return "sqlite";
|
|
2036
|
+
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2037
|
+
if (!whereClause) {
|
|
2038
|
+
throw new Error("deleteOne requires at least one where condition");
|
|
2031
2039
|
}
|
|
2032
|
-
|
|
2033
|
-
|
|
2034
|
-
);
|
|
2040
|
+
const [result] = await db.delete(table).where(whereClause).returning();
|
|
2041
|
+
return result ?? null;
|
|
2035
2042
|
}
|
|
2036
|
-
function
|
|
2037
|
-
const
|
|
2038
|
-
if (!
|
|
2039
|
-
throw new Error(
|
|
2040
|
-
"DATABASE_URL is required. Set it in .env or pass it to getDrizzleConfig()"
|
|
2041
|
-
);
|
|
2043
|
+
async function deleteMany(table, where) {
|
|
2044
|
+
const db = getDatabase("write");
|
|
2045
|
+
if (!db) {
|
|
2046
|
+
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2042
2047
|
}
|
|
2043
|
-
const
|
|
2044
|
-
|
|
2045
|
-
|
|
2046
|
-
return {
|
|
2047
|
-
schema,
|
|
2048
|
-
out,
|
|
2049
|
-
dialect,
|
|
2050
|
-
dbCredentials: getDbCredentials(dialect, databaseUrl)
|
|
2051
|
-
};
|
|
2052
|
-
}
|
|
2053
|
-
function getDbCredentials(dialect, url) {
|
|
2054
|
-
switch (dialect) {
|
|
2055
|
-
case "postgresql":
|
|
2056
|
-
case "mysql":
|
|
2057
|
-
return { url };
|
|
2058
|
-
case "sqlite":
|
|
2059
|
-
const dbPath = url.replace("sqlite://", "").replace("sqlite:", "");
|
|
2060
|
-
return { url: dbPath };
|
|
2061
|
-
default:
|
|
2062
|
-
throw new Error(`Unsupported dialect: ${dialect}`);
|
|
2048
|
+
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2049
|
+
if (!whereClause) {
|
|
2050
|
+
throw new Error("deleteMany requires at least one where condition");
|
|
2063
2051
|
}
|
|
2052
|
+
const results = await db.delete(table).where(whereClause).returning();
|
|
2053
|
+
return results;
|
|
2064
2054
|
}
|
|
2065
|
-
function
|
|
2066
|
-
const
|
|
2067
|
-
|
|
2068
|
-
|
|
2069
|
-
|
|
2070
|
-
|
|
2071
|
-
|
|
2072
|
-
|
|
2073
|
-
|
|
2074
|
-
|
|
2075
|
-
|
|
2076
|
-
}
|
|
2077
|
-
function id() {
|
|
2078
|
-
return bigserial("id", { mode: "number" }).primaryKey();
|
|
2079
|
-
}
|
|
2080
|
-
function timestamps(options) {
|
|
2081
|
-
const updatedAtColumn = timestamp("updated_at", { withTimezone: true, mode: "date" }).defaultNow().notNull();
|
|
2082
|
-
if (options?.autoUpdate) {
|
|
2083
|
-
updatedAtColumn.__autoUpdate = true;
|
|
2055
|
+
async function count(table, where) {
|
|
2056
|
+
const db = getDatabase("read");
|
|
2057
|
+
if (!db) {
|
|
2058
|
+
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2059
|
+
}
|
|
2060
|
+
let query = db.select().from(table);
|
|
2061
|
+
if (where) {
|
|
2062
|
+
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2063
|
+
if (whereClause) {
|
|
2064
|
+
query = query.where(whereClause);
|
|
2065
|
+
}
|
|
2084
2066
|
}
|
|
2085
|
-
|
|
2086
|
-
|
|
2087
|
-
updatedAt: updatedAtColumn
|
|
2088
|
-
};
|
|
2089
|
-
}
|
|
2090
|
-
function foreignKey(name, reference, options) {
|
|
2091
|
-
return bigserial(`${name}_id`, { mode: "number" }).notNull().references(reference, { onDelete: options?.onDelete ?? "cascade" });
|
|
2092
|
-
}
|
|
2093
|
-
function optionalForeignKey(name, reference, options) {
|
|
2094
|
-
return bigserial(`${name}_id`, { mode: "number" }).references(reference, { onDelete: options?.onDelete ?? "set null" });
|
|
2067
|
+
const results = await query;
|
|
2068
|
+
return results.length;
|
|
2095
2069
|
}
|
|
2096
2070
|
|
|
2097
|
-
export {
|
|
2071
|
+
export { Transactional, checkConnection, closeDatabase, count, create, createDatabaseConnection, createDatabaseFromEnv, createFunctionSchema, createMany, deleteMany, deleteOne, detectDialect, findMany, findOne, foreignKey, fromPostgresError, generateDrizzleConfigFile, getDatabase, getDatabaseInfo, getDrizzleConfig, getSchemaInfo, getTransaction, id, initDatabase, optionalForeignKey, packageNameToSchema, runWithTransaction, setDatabase, timestamps, updateMany, updateOne, upsert };
|
|
2098
2072
|
//# sourceMappingURL=index.js.map
|
|
2099
2073
|
//# sourceMappingURL=index.js.map
|