@spfn/core 0.1.0-alpha.88 → 0.2.0-beta.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +298 -466
- package/dist/boss-DI1r4kTS.d.ts +244 -0
- package/dist/cache/index.d.ts +13 -33
- package/dist/cache/index.js +14 -703
- package/dist/cache/index.js.map +1 -1
- package/dist/codegen/index.d.ts +214 -17
- package/dist/codegen/index.js +231 -1420
- package/dist/codegen/index.js.map +1 -1
- package/dist/config/index.d.ts +1227 -0
- package/dist/config/index.js +273 -0
- package/dist/config/index.js.map +1 -0
- package/dist/db/index.d.ts +741 -59
- package/dist/db/index.js +1063 -1226
- package/dist/db/index.js.map +1 -1
- package/dist/env/index.d.ts +658 -308
- package/dist/env/index.js +503 -928
- package/dist/env/index.js.map +1 -1
- package/dist/env/loader.d.ts +87 -0
- package/dist/env/loader.js +70 -0
- package/dist/env/loader.js.map +1 -0
- package/dist/errors/index.d.ts +417 -29
- package/dist/errors/index.js +359 -98
- package/dist/errors/index.js.map +1 -1
- package/dist/event/index.d.ts +41 -0
- package/dist/event/index.js +131 -0
- package/dist/event/index.js.map +1 -0
- package/dist/event/sse/client.d.ts +82 -0
- package/dist/event/sse/client.js +115 -0
- package/dist/event/sse/client.js.map +1 -0
- package/dist/event/sse/index.d.ts +40 -0
- package/dist/event/sse/index.js +92 -0
- package/dist/event/sse/index.js.map +1 -0
- package/dist/job/index.d.ts +218 -0
- package/dist/job/index.js +410 -0
- package/dist/job/index.js.map +1 -0
- package/dist/logger/index.d.ts +20 -79
- package/dist/logger/index.js +82 -387
- package/dist/logger/index.js.map +1 -1
- package/dist/middleware/index.d.ts +102 -20
- package/dist/middleware/index.js +51 -705
- package/dist/middleware/index.js.map +1 -1
- package/dist/nextjs/index.d.ts +120 -0
- package/dist/nextjs/index.js +448 -0
- package/dist/nextjs/index.js.map +1 -0
- package/dist/{client/nextjs/index.d.ts → nextjs/server.d.ts} +335 -262
- package/dist/nextjs/server.js +637 -0
- package/dist/nextjs/server.js.map +1 -0
- package/dist/route/index.d.ts +879 -25
- package/dist/route/index.js +697 -1271
- package/dist/route/index.js.map +1 -1
- package/dist/route/types.d.ts +9 -0
- package/dist/route/types.js +3 -0
- package/dist/route/types.js.map +1 -0
- package/dist/router-Di7ENoah.d.ts +151 -0
- package/dist/server/index.d.ts +345 -64
- package/dist/server/index.js +1174 -3233
- package/dist/server/index.js.map +1 -1
- package/dist/types-B-e_f2dQ.d.ts +121 -0
- package/dist/types-BGl4QL1w.d.ts +77 -0
- package/dist/types-BOPTApC2.d.ts +245 -0
- package/docs/cache.md +133 -0
- package/docs/codegen.md +74 -0
- package/docs/database.md +346 -0
- package/docs/entity.md +539 -0
- package/docs/env.md +477 -0
- package/docs/errors.md +319 -0
- package/docs/event.md +116 -0
- package/docs/file-upload.md +717 -0
- package/docs/job.md +131 -0
- package/docs/logger.md +108 -0
- package/docs/middleware.md +337 -0
- package/docs/nextjs.md +241 -0
- package/docs/repository.md +496 -0
- package/docs/route.md +497 -0
- package/docs/server.md +307 -0
- package/package.json +68 -48
- package/dist/auto-loader-JFaZ9gON.d.ts +0 -80
- package/dist/client/index.d.ts +0 -358
- package/dist/client/index.js +0 -357
- package/dist/client/index.js.map +0 -1
- package/dist/client/nextjs/index.js +0 -371
- package/dist/client/nextjs/index.js.map +0 -1
- package/dist/codegen/generators/index.d.ts +0 -19
- package/dist/codegen/generators/index.js +0 -1404
- package/dist/codegen/generators/index.js.map +0 -1
- package/dist/database-errors-BNNmLTJE.d.ts +0 -86
- package/dist/events/index.d.ts +0 -183
- package/dist/events/index.js +0 -77
- package/dist/events/index.js.map +0 -1
- package/dist/index-DHiAqhKv.d.ts +0 -101
- package/dist/index.d.ts +0 -8
- package/dist/index.js +0 -3674
- package/dist/index.js.map +0 -1
- package/dist/types/index.d.ts +0 -121
- package/dist/types/index.js +0 -38
- package/dist/types/index.js.map +0 -1
- package/dist/types-BXibIEyj.d.ts +0 -60
package/dist/db/index.js
CHANGED
|
@@ -1,944 +1,18 @@
|
|
|
1
1
|
import { drizzle } from 'drizzle-orm/postgres-js';
|
|
2
|
-
import {
|
|
3
|
-
import {
|
|
4
|
-
import { config } from 'dotenv';
|
|
2
|
+
import { env } from '@spfn/core/config';
|
|
3
|
+
import { logger } from '@spfn/core/logger';
|
|
5
4
|
import postgres from 'postgres';
|
|
6
|
-
import {
|
|
5
|
+
import { QueryError, ConnectionError, DeadlockError, TransactionError, ConstraintViolationError, DuplicateEntryError, DatabaseError } from '@spfn/core/errors';
|
|
6
|
+
import { parseNumber, parseBoolean } from '@spfn/core/env';
|
|
7
|
+
import { existsSync, readFileSync, readdirSync, statSync } from 'fs';
|
|
8
|
+
import { join, dirname, basename } from 'path';
|
|
9
|
+
import { bigserial, timestamp, uuid as uuid$1, text, jsonb, pgSchema } from 'drizzle-orm/pg-core';
|
|
7
10
|
import { AsyncLocalStorage } from 'async_hooks';
|
|
8
|
-
import { randomUUID } from 'crypto';
|
|
9
11
|
import { createMiddleware } from 'hono/factory';
|
|
10
|
-
import {
|
|
12
|
+
import { randomUUID } from 'crypto';
|
|
13
|
+
import { count as count$1, sql, eq, and } from 'drizzle-orm';
|
|
11
14
|
|
|
12
15
|
// src/db/manager/factory.ts
|
|
13
|
-
|
|
14
|
-
// src/logger/types.ts
|
|
15
|
-
var LOG_LEVEL_PRIORITY = {
|
|
16
|
-
debug: 0,
|
|
17
|
-
info: 1,
|
|
18
|
-
warn: 2,
|
|
19
|
-
error: 3,
|
|
20
|
-
fatal: 4
|
|
21
|
-
};
|
|
22
|
-
|
|
23
|
-
// src/logger/formatters.ts
|
|
24
|
-
var SENSITIVE_KEYS = [
|
|
25
|
-
"password",
|
|
26
|
-
"passwd",
|
|
27
|
-
"pwd",
|
|
28
|
-
"secret",
|
|
29
|
-
"token",
|
|
30
|
-
"apikey",
|
|
31
|
-
"api_key",
|
|
32
|
-
"accesstoken",
|
|
33
|
-
"access_token",
|
|
34
|
-
"refreshtoken",
|
|
35
|
-
"refresh_token",
|
|
36
|
-
"authorization",
|
|
37
|
-
"auth",
|
|
38
|
-
"cookie",
|
|
39
|
-
"session",
|
|
40
|
-
"sessionid",
|
|
41
|
-
"session_id",
|
|
42
|
-
"privatekey",
|
|
43
|
-
"private_key",
|
|
44
|
-
"creditcard",
|
|
45
|
-
"credit_card",
|
|
46
|
-
"cardnumber",
|
|
47
|
-
"card_number",
|
|
48
|
-
"cvv",
|
|
49
|
-
"ssn",
|
|
50
|
-
"pin"
|
|
51
|
-
];
|
|
52
|
-
var MASKED_VALUE = "***MASKED***";
|
|
53
|
-
function isSensitiveKey(key) {
|
|
54
|
-
const lowerKey = key.toLowerCase();
|
|
55
|
-
return SENSITIVE_KEYS.some((sensitive) => lowerKey.includes(sensitive));
|
|
56
|
-
}
|
|
57
|
-
function maskSensitiveData(data) {
|
|
58
|
-
if (data === null || data === void 0) {
|
|
59
|
-
return data;
|
|
60
|
-
}
|
|
61
|
-
if (Array.isArray(data)) {
|
|
62
|
-
return data.map((item) => maskSensitiveData(item));
|
|
63
|
-
}
|
|
64
|
-
if (typeof data === "object") {
|
|
65
|
-
const masked = {};
|
|
66
|
-
for (const [key, value] of Object.entries(data)) {
|
|
67
|
-
if (isSensitiveKey(key)) {
|
|
68
|
-
masked[key] = MASKED_VALUE;
|
|
69
|
-
} else if (typeof value === "object" && value !== null) {
|
|
70
|
-
masked[key] = maskSensitiveData(value);
|
|
71
|
-
} else {
|
|
72
|
-
masked[key] = value;
|
|
73
|
-
}
|
|
74
|
-
}
|
|
75
|
-
return masked;
|
|
76
|
-
}
|
|
77
|
-
return data;
|
|
78
|
-
}
|
|
79
|
-
var COLORS = {
|
|
80
|
-
reset: "\x1B[0m",
|
|
81
|
-
bright: "\x1B[1m",
|
|
82
|
-
dim: "\x1B[2m",
|
|
83
|
-
// 로그 레벨 컬러
|
|
84
|
-
debug: "\x1B[36m",
|
|
85
|
-
// cyan
|
|
86
|
-
info: "\x1B[32m",
|
|
87
|
-
// green
|
|
88
|
-
warn: "\x1B[33m",
|
|
89
|
-
// yellow
|
|
90
|
-
error: "\x1B[31m",
|
|
91
|
-
// red
|
|
92
|
-
fatal: "\x1B[35m",
|
|
93
|
-
// magenta
|
|
94
|
-
// 추가 컬러
|
|
95
|
-
gray: "\x1B[90m"
|
|
96
|
-
};
|
|
97
|
-
function formatTimestamp(date) {
|
|
98
|
-
return date.toISOString();
|
|
99
|
-
}
|
|
100
|
-
function formatTimestampHuman(date) {
|
|
101
|
-
const year = date.getFullYear();
|
|
102
|
-
const month = String(date.getMonth() + 1).padStart(2, "0");
|
|
103
|
-
const day = String(date.getDate()).padStart(2, "0");
|
|
104
|
-
const hours = String(date.getHours()).padStart(2, "0");
|
|
105
|
-
const minutes = String(date.getMinutes()).padStart(2, "0");
|
|
106
|
-
const seconds = String(date.getSeconds()).padStart(2, "0");
|
|
107
|
-
const ms = String(date.getMilliseconds()).padStart(3, "0");
|
|
108
|
-
return `${year}-${month}-${day} ${hours}:${minutes}:${seconds}.${ms}`;
|
|
109
|
-
}
|
|
110
|
-
function formatError(error) {
|
|
111
|
-
const lines = [];
|
|
112
|
-
lines.push(`${error.name}: ${error.message}`);
|
|
113
|
-
if (error.stack) {
|
|
114
|
-
const stackLines = error.stack.split("\n").slice(1);
|
|
115
|
-
lines.push(...stackLines);
|
|
116
|
-
}
|
|
117
|
-
return lines.join("\n");
|
|
118
|
-
}
|
|
119
|
-
function formatConsole(metadata, colorize = true) {
|
|
120
|
-
const parts = [];
|
|
121
|
-
const timestamp2 = formatTimestampHuman(metadata.timestamp);
|
|
122
|
-
if (colorize) {
|
|
123
|
-
parts.push(`${COLORS.gray}[${timestamp2}]${COLORS.reset}`);
|
|
124
|
-
} else {
|
|
125
|
-
parts.push(`[${timestamp2}]`);
|
|
126
|
-
}
|
|
127
|
-
if (metadata.module) {
|
|
128
|
-
if (colorize) {
|
|
129
|
-
parts.push(`${COLORS.dim}[module=${metadata.module}]${COLORS.reset}`);
|
|
130
|
-
} else {
|
|
131
|
-
parts.push(`[module=${metadata.module}]`);
|
|
132
|
-
}
|
|
133
|
-
}
|
|
134
|
-
if (metadata.context && Object.keys(metadata.context).length > 0) {
|
|
135
|
-
Object.entries(metadata.context).forEach(([key, value]) => {
|
|
136
|
-
let valueStr;
|
|
137
|
-
if (typeof value === "string") {
|
|
138
|
-
valueStr = value;
|
|
139
|
-
} else if (typeof value === "object" && value !== null) {
|
|
140
|
-
try {
|
|
141
|
-
valueStr = JSON.stringify(value);
|
|
142
|
-
} catch (error) {
|
|
143
|
-
valueStr = "[circular]";
|
|
144
|
-
}
|
|
145
|
-
} else {
|
|
146
|
-
valueStr = String(value);
|
|
147
|
-
}
|
|
148
|
-
if (colorize) {
|
|
149
|
-
parts.push(`${COLORS.dim}[${key}=${valueStr}]${COLORS.reset}`);
|
|
150
|
-
} else {
|
|
151
|
-
parts.push(`[${key}=${valueStr}]`);
|
|
152
|
-
}
|
|
153
|
-
});
|
|
154
|
-
}
|
|
155
|
-
const levelStr = metadata.level.toUpperCase();
|
|
156
|
-
if (colorize) {
|
|
157
|
-
const color = COLORS[metadata.level];
|
|
158
|
-
parts.push(`${color}(${levelStr})${COLORS.reset}:`);
|
|
159
|
-
} else {
|
|
160
|
-
parts.push(`(${levelStr}):`);
|
|
161
|
-
}
|
|
162
|
-
if (colorize) {
|
|
163
|
-
parts.push(`${COLORS.bright}${metadata.message}${COLORS.reset}`);
|
|
164
|
-
} else {
|
|
165
|
-
parts.push(metadata.message);
|
|
166
|
-
}
|
|
167
|
-
let output = parts.join(" ");
|
|
168
|
-
if (metadata.error) {
|
|
169
|
-
output += "\n" + formatError(metadata.error);
|
|
170
|
-
}
|
|
171
|
-
return output;
|
|
172
|
-
}
|
|
173
|
-
function formatJSON(metadata) {
|
|
174
|
-
const obj = {
|
|
175
|
-
timestamp: formatTimestamp(metadata.timestamp),
|
|
176
|
-
level: metadata.level,
|
|
177
|
-
message: metadata.message
|
|
178
|
-
};
|
|
179
|
-
if (metadata.module) {
|
|
180
|
-
obj.module = metadata.module;
|
|
181
|
-
}
|
|
182
|
-
if (metadata.context) {
|
|
183
|
-
obj.context = metadata.context;
|
|
184
|
-
}
|
|
185
|
-
if (metadata.error) {
|
|
186
|
-
obj.error = {
|
|
187
|
-
name: metadata.error.name,
|
|
188
|
-
message: metadata.error.message,
|
|
189
|
-
stack: metadata.error.stack
|
|
190
|
-
};
|
|
191
|
-
}
|
|
192
|
-
return JSON.stringify(obj);
|
|
193
|
-
}
|
|
194
|
-
|
|
195
|
-
// src/logger/logger.ts
|
|
196
|
-
var Logger = class _Logger {
|
|
197
|
-
config;
|
|
198
|
-
module;
|
|
199
|
-
constructor(config) {
|
|
200
|
-
this.config = config;
|
|
201
|
-
this.module = config.module;
|
|
202
|
-
}
|
|
203
|
-
/**
|
|
204
|
-
* Get current log level
|
|
205
|
-
*/
|
|
206
|
-
get level() {
|
|
207
|
-
return this.config.level;
|
|
208
|
-
}
|
|
209
|
-
/**
|
|
210
|
-
* Create child logger (per module)
|
|
211
|
-
*/
|
|
212
|
-
child(module) {
|
|
213
|
-
return new _Logger({
|
|
214
|
-
...this.config,
|
|
215
|
-
module
|
|
216
|
-
});
|
|
217
|
-
}
|
|
218
|
-
/**
|
|
219
|
-
* Debug log
|
|
220
|
-
*/
|
|
221
|
-
debug(message, context) {
|
|
222
|
-
this.log("debug", message, void 0, context);
|
|
223
|
-
}
|
|
224
|
-
/**
|
|
225
|
-
* Info log
|
|
226
|
-
*/
|
|
227
|
-
info(message, context) {
|
|
228
|
-
this.log("info", message, void 0, context);
|
|
229
|
-
}
|
|
230
|
-
warn(message, errorOrContext, context) {
|
|
231
|
-
if (errorOrContext instanceof Error) {
|
|
232
|
-
this.log("warn", message, errorOrContext, context);
|
|
233
|
-
} else {
|
|
234
|
-
this.log("warn", message, void 0, errorOrContext);
|
|
235
|
-
}
|
|
236
|
-
}
|
|
237
|
-
error(message, errorOrContext, context) {
|
|
238
|
-
if (errorOrContext instanceof Error) {
|
|
239
|
-
this.log("error", message, errorOrContext, context);
|
|
240
|
-
} else {
|
|
241
|
-
this.log("error", message, void 0, errorOrContext);
|
|
242
|
-
}
|
|
243
|
-
}
|
|
244
|
-
fatal(message, errorOrContext, context) {
|
|
245
|
-
if (errorOrContext instanceof Error) {
|
|
246
|
-
this.log("fatal", message, errorOrContext, context);
|
|
247
|
-
} else {
|
|
248
|
-
this.log("fatal", message, void 0, errorOrContext);
|
|
249
|
-
}
|
|
250
|
-
}
|
|
251
|
-
/**
|
|
252
|
-
* Log processing (internal)
|
|
253
|
-
*/
|
|
254
|
-
log(level, message, error, context) {
|
|
255
|
-
if (LOG_LEVEL_PRIORITY[level] < LOG_LEVEL_PRIORITY[this.config.level]) {
|
|
256
|
-
return;
|
|
257
|
-
}
|
|
258
|
-
const metadata = {
|
|
259
|
-
timestamp: /* @__PURE__ */ new Date(),
|
|
260
|
-
level,
|
|
261
|
-
message,
|
|
262
|
-
module: this.module,
|
|
263
|
-
error,
|
|
264
|
-
// Mask sensitive information in context to prevent credential leaks
|
|
265
|
-
context: context ? maskSensitiveData(context) : void 0
|
|
266
|
-
};
|
|
267
|
-
this.processTransports(metadata);
|
|
268
|
-
}
|
|
269
|
-
/**
|
|
270
|
-
* Process Transports
|
|
271
|
-
*/
|
|
272
|
-
processTransports(metadata) {
|
|
273
|
-
const promises = this.config.transports.filter((transport) => transport.enabled).map((transport) => this.safeTransportLog(transport, metadata));
|
|
274
|
-
Promise.all(promises).catch((error) => {
|
|
275
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
276
|
-
process.stderr.write(`[Logger] Transport error: ${errorMessage}
|
|
277
|
-
`);
|
|
278
|
-
});
|
|
279
|
-
}
|
|
280
|
-
/**
|
|
281
|
-
* Transport log (error-safe)
|
|
282
|
-
*/
|
|
283
|
-
async safeTransportLog(transport, metadata) {
|
|
284
|
-
try {
|
|
285
|
-
await transport.log(metadata);
|
|
286
|
-
} catch (error) {
|
|
287
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
288
|
-
process.stderr.write(`[Logger] Transport "${transport.name}" failed: ${errorMessage}
|
|
289
|
-
`);
|
|
290
|
-
}
|
|
291
|
-
}
|
|
292
|
-
/**
|
|
293
|
-
* Close all Transports
|
|
294
|
-
*/
|
|
295
|
-
async close() {
|
|
296
|
-
const closePromises = this.config.transports.filter((transport) => transport.close).map((transport) => transport.close());
|
|
297
|
-
await Promise.all(closePromises);
|
|
298
|
-
}
|
|
299
|
-
};
|
|
300
|
-
|
|
301
|
-
// src/logger/transports/console.ts
|
|
302
|
-
var ConsoleTransport = class {
|
|
303
|
-
name = "console";
|
|
304
|
-
level;
|
|
305
|
-
enabled;
|
|
306
|
-
colorize;
|
|
307
|
-
constructor(config) {
|
|
308
|
-
this.level = config.level;
|
|
309
|
-
this.enabled = config.enabled;
|
|
310
|
-
this.colorize = config.colorize ?? true;
|
|
311
|
-
}
|
|
312
|
-
async log(metadata) {
|
|
313
|
-
if (!this.enabled) {
|
|
314
|
-
return;
|
|
315
|
-
}
|
|
316
|
-
if (LOG_LEVEL_PRIORITY[metadata.level] < LOG_LEVEL_PRIORITY[this.level]) {
|
|
317
|
-
return;
|
|
318
|
-
}
|
|
319
|
-
const message = formatConsole(metadata, this.colorize);
|
|
320
|
-
if (metadata.level === "warn" || metadata.level === "error" || metadata.level === "fatal") {
|
|
321
|
-
console.error(message);
|
|
322
|
-
} else {
|
|
323
|
-
console.log(message);
|
|
324
|
-
}
|
|
325
|
-
}
|
|
326
|
-
};
|
|
327
|
-
var FileTransport = class {
|
|
328
|
-
name = "file";
|
|
329
|
-
level;
|
|
330
|
-
enabled;
|
|
331
|
-
logDir;
|
|
332
|
-
maxFileSize;
|
|
333
|
-
maxFiles;
|
|
334
|
-
currentStream = null;
|
|
335
|
-
currentFilename = null;
|
|
336
|
-
constructor(config) {
|
|
337
|
-
this.level = config.level;
|
|
338
|
-
this.enabled = config.enabled;
|
|
339
|
-
this.logDir = config.logDir;
|
|
340
|
-
this.maxFileSize = config.maxFileSize ?? 10 * 1024 * 1024;
|
|
341
|
-
this.maxFiles = config.maxFiles ?? 10;
|
|
342
|
-
if (!existsSync(this.logDir)) {
|
|
343
|
-
mkdirSync(this.logDir, { recursive: true });
|
|
344
|
-
}
|
|
345
|
-
}
|
|
346
|
-
async log(metadata) {
|
|
347
|
-
if (!this.enabled) {
|
|
348
|
-
return;
|
|
349
|
-
}
|
|
350
|
-
if (LOG_LEVEL_PRIORITY[metadata.level] < LOG_LEVEL_PRIORITY[this.level]) {
|
|
351
|
-
return;
|
|
352
|
-
}
|
|
353
|
-
const message = formatJSON(metadata);
|
|
354
|
-
const filename = this.getLogFilename(metadata.timestamp);
|
|
355
|
-
if (this.currentFilename !== filename) {
|
|
356
|
-
await this.rotateStream(filename);
|
|
357
|
-
await this.cleanOldFiles();
|
|
358
|
-
} else if (this.currentFilename) {
|
|
359
|
-
await this.checkAndRotateBySize();
|
|
360
|
-
}
|
|
361
|
-
if (this.currentStream) {
|
|
362
|
-
return new Promise((resolve, reject) => {
|
|
363
|
-
this.currentStream.write(message + "\n", "utf-8", (error) => {
|
|
364
|
-
if (error) {
|
|
365
|
-
process.stderr.write(`[FileTransport] Failed to write log: ${error.message}
|
|
366
|
-
`);
|
|
367
|
-
reject(error);
|
|
368
|
-
} else {
|
|
369
|
-
resolve();
|
|
370
|
-
}
|
|
371
|
-
});
|
|
372
|
-
});
|
|
373
|
-
}
|
|
374
|
-
}
|
|
375
|
-
/**
|
|
376
|
-
* 스트림 교체 (날짜 변경 시)
|
|
377
|
-
*/
|
|
378
|
-
async rotateStream(filename) {
|
|
379
|
-
if (this.currentStream) {
|
|
380
|
-
await this.closeStream();
|
|
381
|
-
}
|
|
382
|
-
const filepath = join(this.logDir, filename);
|
|
383
|
-
this.currentStream = createWriteStream(filepath, {
|
|
384
|
-
flags: "a",
|
|
385
|
-
// append mode
|
|
386
|
-
encoding: "utf-8"
|
|
387
|
-
});
|
|
388
|
-
this.currentFilename = filename;
|
|
389
|
-
this.currentStream.on("error", (error) => {
|
|
390
|
-
process.stderr.write(`[FileTransport] Stream error: ${error.message}
|
|
391
|
-
`);
|
|
392
|
-
this.currentStream = null;
|
|
393
|
-
this.currentFilename = null;
|
|
394
|
-
});
|
|
395
|
-
}
|
|
396
|
-
/**
|
|
397
|
-
* 현재 스트림 닫기
|
|
398
|
-
*/
|
|
399
|
-
async closeStream() {
|
|
400
|
-
if (!this.currentStream) {
|
|
401
|
-
return;
|
|
402
|
-
}
|
|
403
|
-
return new Promise((resolve, reject) => {
|
|
404
|
-
this.currentStream.end((error) => {
|
|
405
|
-
if (error) {
|
|
406
|
-
reject(error);
|
|
407
|
-
} else {
|
|
408
|
-
this.currentStream = null;
|
|
409
|
-
this.currentFilename = null;
|
|
410
|
-
resolve();
|
|
411
|
-
}
|
|
412
|
-
});
|
|
413
|
-
});
|
|
414
|
-
}
|
|
415
|
-
/**
|
|
416
|
-
* 파일 크기 체크 및 크기 기반 로테이션
|
|
417
|
-
*/
|
|
418
|
-
async checkAndRotateBySize() {
|
|
419
|
-
if (!this.currentFilename) {
|
|
420
|
-
return;
|
|
421
|
-
}
|
|
422
|
-
const filepath = join(this.logDir, this.currentFilename);
|
|
423
|
-
if (!existsSync(filepath)) {
|
|
424
|
-
return;
|
|
425
|
-
}
|
|
426
|
-
try {
|
|
427
|
-
const stats = statSync(filepath);
|
|
428
|
-
if (stats.size >= this.maxFileSize) {
|
|
429
|
-
await this.rotateBySize();
|
|
430
|
-
}
|
|
431
|
-
} catch (error) {
|
|
432
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
433
|
-
process.stderr.write(`[FileTransport] Failed to check file size: ${errorMessage}
|
|
434
|
-
`);
|
|
435
|
-
}
|
|
436
|
-
}
|
|
437
|
-
/**
|
|
438
|
-
* 크기 기반 로테이션 수행
|
|
439
|
-
* 예: 2025-01-01.log -> 2025-01-01.1.log, 2025-01-01.1.log -> 2025-01-01.2.log
|
|
440
|
-
*/
|
|
441
|
-
async rotateBySize() {
|
|
442
|
-
if (!this.currentFilename) {
|
|
443
|
-
return;
|
|
444
|
-
}
|
|
445
|
-
await this.closeStream();
|
|
446
|
-
const baseName = this.currentFilename.replace(/\.log$/, "");
|
|
447
|
-
const files = readdirSync(this.logDir);
|
|
448
|
-
const relatedFiles = files.filter((file) => file.startsWith(baseName) && file.endsWith(".log")).sort().reverse();
|
|
449
|
-
for (const file of relatedFiles) {
|
|
450
|
-
const match = file.match(/\.(\d+)\.log$/);
|
|
451
|
-
if (match) {
|
|
452
|
-
const oldNum = parseInt(match[1], 10);
|
|
453
|
-
const newNum = oldNum + 1;
|
|
454
|
-
const oldPath = join(this.logDir, file);
|
|
455
|
-
const newPath2 = join(this.logDir, `${baseName}.${newNum}.log`);
|
|
456
|
-
try {
|
|
457
|
-
renameSync(oldPath, newPath2);
|
|
458
|
-
} catch (error) {
|
|
459
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
460
|
-
process.stderr.write(`[FileTransport] Failed to rotate file: ${errorMessage}
|
|
461
|
-
`);
|
|
462
|
-
}
|
|
463
|
-
}
|
|
464
|
-
}
|
|
465
|
-
const currentPath = join(this.logDir, this.currentFilename);
|
|
466
|
-
const newPath = join(this.logDir, `${baseName}.1.log`);
|
|
467
|
-
try {
|
|
468
|
-
if (existsSync(currentPath)) {
|
|
469
|
-
renameSync(currentPath, newPath);
|
|
470
|
-
}
|
|
471
|
-
} catch (error) {
|
|
472
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
473
|
-
process.stderr.write(`[FileTransport] Failed to rotate current file: ${errorMessage}
|
|
474
|
-
`);
|
|
475
|
-
}
|
|
476
|
-
await this.rotateStream(this.currentFilename);
|
|
477
|
-
}
|
|
478
|
-
/**
|
|
479
|
-
* 오래된 로그 파일 정리
|
|
480
|
-
* maxFiles 개수를 초과하는 로그 파일 삭제
|
|
481
|
-
*/
|
|
482
|
-
async cleanOldFiles() {
|
|
483
|
-
try {
|
|
484
|
-
if (!existsSync(this.logDir)) {
|
|
485
|
-
return;
|
|
486
|
-
}
|
|
487
|
-
const files = readdirSync(this.logDir);
|
|
488
|
-
const logFiles = files.filter((file) => file.endsWith(".log")).map((file) => {
|
|
489
|
-
const filepath = join(this.logDir, file);
|
|
490
|
-
const stats = statSync(filepath);
|
|
491
|
-
return { file, mtime: stats.mtime };
|
|
492
|
-
}).sort((a, b) => b.mtime.getTime() - a.mtime.getTime());
|
|
493
|
-
if (logFiles.length > this.maxFiles) {
|
|
494
|
-
const filesToDelete = logFiles.slice(this.maxFiles);
|
|
495
|
-
for (const { file } of filesToDelete) {
|
|
496
|
-
const filepath = join(this.logDir, file);
|
|
497
|
-
try {
|
|
498
|
-
unlinkSync(filepath);
|
|
499
|
-
} catch (error) {
|
|
500
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
501
|
-
process.stderr.write(`[FileTransport] Failed to delete old file "${file}": ${errorMessage}
|
|
502
|
-
`);
|
|
503
|
-
}
|
|
504
|
-
}
|
|
505
|
-
}
|
|
506
|
-
} catch (error) {
|
|
507
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
508
|
-
process.stderr.write(`[FileTransport] Failed to clean old files: ${errorMessage}
|
|
509
|
-
`);
|
|
510
|
-
}
|
|
511
|
-
}
|
|
512
|
-
/**
|
|
513
|
-
* 날짜별 로그 파일명 생성
|
|
514
|
-
*/
|
|
515
|
-
getLogFilename(date) {
|
|
516
|
-
const year = date.getFullYear();
|
|
517
|
-
const month = String(date.getMonth() + 1).padStart(2, "0");
|
|
518
|
-
const day = String(date.getDate()).padStart(2, "0");
|
|
519
|
-
return `${year}-${month}-${day}.log`;
|
|
520
|
-
}
|
|
521
|
-
async close() {
|
|
522
|
-
await this.closeStream();
|
|
523
|
-
}
|
|
524
|
-
};
|
|
525
|
-
function isFileLoggingEnabled() {
|
|
526
|
-
return process.env.LOGGER_FILE_ENABLED === "true";
|
|
527
|
-
}
|
|
528
|
-
function getDefaultLogLevel() {
|
|
529
|
-
const isProduction = process.env.NODE_ENV === "production";
|
|
530
|
-
const isDevelopment = process.env.NODE_ENV === "development";
|
|
531
|
-
if (isDevelopment) {
|
|
532
|
-
return "debug";
|
|
533
|
-
}
|
|
534
|
-
if (isProduction) {
|
|
535
|
-
return "info";
|
|
536
|
-
}
|
|
537
|
-
return "warn";
|
|
538
|
-
}
|
|
539
|
-
function getConsoleConfig() {
|
|
540
|
-
const isProduction = process.env.NODE_ENV === "production";
|
|
541
|
-
return {
|
|
542
|
-
level: "debug",
|
|
543
|
-
enabled: true,
|
|
544
|
-
colorize: !isProduction
|
|
545
|
-
// Dev: colored output, Production: plain text
|
|
546
|
-
};
|
|
547
|
-
}
|
|
548
|
-
function getFileConfig() {
|
|
549
|
-
const isProduction = process.env.NODE_ENV === "production";
|
|
550
|
-
return {
|
|
551
|
-
level: "info",
|
|
552
|
-
enabled: isProduction,
|
|
553
|
-
// File logging in production only
|
|
554
|
-
logDir: process.env.LOG_DIR || "./logs",
|
|
555
|
-
maxFileSize: 10 * 1024 * 1024,
|
|
556
|
-
// 10MB
|
|
557
|
-
maxFiles: 10
|
|
558
|
-
};
|
|
559
|
-
}
|
|
560
|
-
function validateDirectoryWritable(dirPath) {
|
|
561
|
-
if (!existsSync(dirPath)) {
|
|
562
|
-
try {
|
|
563
|
-
mkdirSync(dirPath, { recursive: true });
|
|
564
|
-
} catch (error) {
|
|
565
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
566
|
-
throw new Error(`Failed to create log directory "${dirPath}": ${errorMessage}`);
|
|
567
|
-
}
|
|
568
|
-
}
|
|
569
|
-
try {
|
|
570
|
-
accessSync(dirPath, constants.W_OK);
|
|
571
|
-
} catch {
|
|
572
|
-
throw new Error(`Log directory "${dirPath}" is not writable. Please check permissions.`);
|
|
573
|
-
}
|
|
574
|
-
const testFile = join(dirPath, ".logger-write-test");
|
|
575
|
-
try {
|
|
576
|
-
writeFileSync(testFile, "test", "utf-8");
|
|
577
|
-
unlinkSync(testFile);
|
|
578
|
-
} catch (error) {
|
|
579
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
580
|
-
throw new Error(`Cannot write to log directory "${dirPath}": ${errorMessage}`);
|
|
581
|
-
}
|
|
582
|
-
}
|
|
583
|
-
function validateFileConfig() {
|
|
584
|
-
if (!isFileLoggingEnabled()) {
|
|
585
|
-
return;
|
|
586
|
-
}
|
|
587
|
-
const logDir = process.env.LOG_DIR;
|
|
588
|
-
if (!logDir) {
|
|
589
|
-
throw new Error(
|
|
590
|
-
"LOG_DIR environment variable is required when LOGGER_FILE_ENABLED=true. Example: LOG_DIR=/var/log/myapp"
|
|
591
|
-
);
|
|
592
|
-
}
|
|
593
|
-
validateDirectoryWritable(logDir);
|
|
594
|
-
}
|
|
595
|
-
function validateSlackConfig() {
|
|
596
|
-
const webhookUrl = process.env.SLACK_WEBHOOK_URL;
|
|
597
|
-
if (!webhookUrl) {
|
|
598
|
-
return;
|
|
599
|
-
}
|
|
600
|
-
if (!webhookUrl.startsWith("https://hooks.slack.com/")) {
|
|
601
|
-
throw new Error(
|
|
602
|
-
`Invalid SLACK_WEBHOOK_URL: "${webhookUrl}". Slack webhook URLs must start with "https://hooks.slack.com/"`
|
|
603
|
-
);
|
|
604
|
-
}
|
|
605
|
-
}
|
|
606
|
-
function validateEmailConfig() {
|
|
607
|
-
const smtpHost = process.env.SMTP_HOST;
|
|
608
|
-
const smtpPort = process.env.SMTP_PORT;
|
|
609
|
-
const emailFrom = process.env.EMAIL_FROM;
|
|
610
|
-
const emailTo = process.env.EMAIL_TO;
|
|
611
|
-
const hasAnyEmailConfig = smtpHost || smtpPort || emailFrom || emailTo;
|
|
612
|
-
if (!hasAnyEmailConfig) {
|
|
613
|
-
return;
|
|
614
|
-
}
|
|
615
|
-
const missingFields = [];
|
|
616
|
-
if (!smtpHost) missingFields.push("SMTP_HOST");
|
|
617
|
-
if (!smtpPort) missingFields.push("SMTP_PORT");
|
|
618
|
-
if (!emailFrom) missingFields.push("EMAIL_FROM");
|
|
619
|
-
if (!emailTo) missingFields.push("EMAIL_TO");
|
|
620
|
-
if (missingFields.length > 0) {
|
|
621
|
-
throw new Error(
|
|
622
|
-
`Email transport configuration incomplete. Missing: ${missingFields.join(", ")}. Either set all required fields or remove all email configuration.`
|
|
623
|
-
);
|
|
624
|
-
}
|
|
625
|
-
const port = parseInt(smtpPort, 10);
|
|
626
|
-
if (isNaN(port) || port < 1 || port > 65535) {
|
|
627
|
-
throw new Error(
|
|
628
|
-
`Invalid SMTP_PORT: "${smtpPort}". Must be a number between 1 and 65535.`
|
|
629
|
-
);
|
|
630
|
-
}
|
|
631
|
-
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
|
|
632
|
-
if (!emailRegex.test(emailFrom)) {
|
|
633
|
-
throw new Error(`Invalid EMAIL_FROM format: "${emailFrom}"`);
|
|
634
|
-
}
|
|
635
|
-
const recipients = emailTo.split(",").map((e) => e.trim());
|
|
636
|
-
for (const email of recipients) {
|
|
637
|
-
if (!emailRegex.test(email)) {
|
|
638
|
-
throw new Error(`Invalid email address in EMAIL_TO: "${email}"`);
|
|
639
|
-
}
|
|
640
|
-
}
|
|
641
|
-
}
|
|
642
|
-
function validateEnvironment() {
|
|
643
|
-
const nodeEnv = process.env.NODE_ENV;
|
|
644
|
-
if (!nodeEnv) {
|
|
645
|
-
process.stderr.write(
|
|
646
|
-
"[Logger] Warning: NODE_ENV is not set. Defaulting to test environment.\n"
|
|
647
|
-
);
|
|
648
|
-
}
|
|
649
|
-
}
|
|
650
|
-
function validateConfig() {
|
|
651
|
-
try {
|
|
652
|
-
validateEnvironment();
|
|
653
|
-
validateFileConfig();
|
|
654
|
-
validateSlackConfig();
|
|
655
|
-
validateEmailConfig();
|
|
656
|
-
} catch (error) {
|
|
657
|
-
if (error instanceof Error) {
|
|
658
|
-
throw new Error(`[Logger] Configuration validation failed: ${error.message}`);
|
|
659
|
-
}
|
|
660
|
-
throw error;
|
|
661
|
-
}
|
|
662
|
-
}
|
|
663
|
-
|
|
664
|
-
// src/logger/factory.ts
|
|
665
|
-
function initializeTransports() {
|
|
666
|
-
const transports = [];
|
|
667
|
-
const consoleConfig = getConsoleConfig();
|
|
668
|
-
transports.push(new ConsoleTransport(consoleConfig));
|
|
669
|
-
const fileConfig = getFileConfig();
|
|
670
|
-
if (fileConfig.enabled) {
|
|
671
|
-
transports.push(new FileTransport(fileConfig));
|
|
672
|
-
}
|
|
673
|
-
return transports;
|
|
674
|
-
}
|
|
675
|
-
function initializeLogger() {
|
|
676
|
-
validateConfig();
|
|
677
|
-
return new Logger({
|
|
678
|
-
level: getDefaultLogLevel(),
|
|
679
|
-
transports: initializeTransports()
|
|
680
|
-
});
|
|
681
|
-
}
|
|
682
|
-
var logger = initializeLogger();
|
|
683
|
-
|
|
684
|
-
// src/env/config.ts
|
|
685
|
-
var ENV_FILE_PRIORITY = [
|
|
686
|
-
".env",
|
|
687
|
-
// Base configuration (lowest priority)
|
|
688
|
-
".env.{NODE_ENV}",
|
|
689
|
-
// Environment-specific
|
|
690
|
-
".env.local",
|
|
691
|
-
// Local overrides (excluded in test)
|
|
692
|
-
".env.{NODE_ENV}.local"
|
|
693
|
-
// Local environment-specific (highest priority)
|
|
694
|
-
];
|
|
695
|
-
var TEST_ONLY_FILES = [
|
|
696
|
-
".env.test",
|
|
697
|
-
".env.test.local"
|
|
698
|
-
];
|
|
699
|
-
|
|
700
|
-
// src/env/loader.ts
|
|
701
|
-
var envLogger = logger.child("environment");
|
|
702
|
-
var environmentLoaded = false;
|
|
703
|
-
var cachedLoadResult;
|
|
704
|
-
function buildFileList(basePath, nodeEnv) {
|
|
705
|
-
const files = [];
|
|
706
|
-
if (!nodeEnv) {
|
|
707
|
-
files.push(join(basePath, ".env"));
|
|
708
|
-
files.push(join(basePath, ".env.local"));
|
|
709
|
-
return files;
|
|
710
|
-
}
|
|
711
|
-
for (const pattern of ENV_FILE_PRIORITY) {
|
|
712
|
-
const fileName = pattern.replace("{NODE_ENV}", nodeEnv);
|
|
713
|
-
if (nodeEnv === "test" && fileName === ".env.local") {
|
|
714
|
-
continue;
|
|
715
|
-
}
|
|
716
|
-
if (nodeEnv === "local" && pattern === ".env.local") {
|
|
717
|
-
continue;
|
|
718
|
-
}
|
|
719
|
-
if (nodeEnv !== "test" && TEST_ONLY_FILES.includes(fileName)) {
|
|
720
|
-
continue;
|
|
721
|
-
}
|
|
722
|
-
files.push(join(basePath, fileName));
|
|
723
|
-
}
|
|
724
|
-
return files;
|
|
725
|
-
}
|
|
726
|
-
function loadSingleFile(filePath, debug) {
|
|
727
|
-
if (!existsSync(filePath)) {
|
|
728
|
-
if (debug) {
|
|
729
|
-
envLogger.debug("Environment file not found (optional)", {
|
|
730
|
-
path: filePath
|
|
731
|
-
});
|
|
732
|
-
}
|
|
733
|
-
return { success: false, parsed: {}, error: "File not found" };
|
|
734
|
-
}
|
|
735
|
-
try {
|
|
736
|
-
const result = config({ path: filePath });
|
|
737
|
-
if (result.error) {
|
|
738
|
-
envLogger.warn("Failed to parse environment file", {
|
|
739
|
-
path: filePath,
|
|
740
|
-
error: result.error.message
|
|
741
|
-
});
|
|
742
|
-
return {
|
|
743
|
-
success: false,
|
|
744
|
-
parsed: {},
|
|
745
|
-
error: result.error.message
|
|
746
|
-
};
|
|
747
|
-
}
|
|
748
|
-
const parsed = result.parsed || {};
|
|
749
|
-
if (debug) {
|
|
750
|
-
envLogger.debug("Environment file loaded successfully", {
|
|
751
|
-
path: filePath,
|
|
752
|
-
variables: Object.keys(parsed),
|
|
753
|
-
count: Object.keys(parsed).length
|
|
754
|
-
});
|
|
755
|
-
}
|
|
756
|
-
return { success: true, parsed };
|
|
757
|
-
} catch (error) {
|
|
758
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
759
|
-
envLogger.error("Error loading environment file", {
|
|
760
|
-
path: filePath,
|
|
761
|
-
error: message
|
|
762
|
-
});
|
|
763
|
-
return { success: false, parsed: {}, error: message };
|
|
764
|
-
}
|
|
765
|
-
}
|
|
766
|
-
function validateRequiredVars(required, debug) {
|
|
767
|
-
const missing = [];
|
|
768
|
-
for (const varName of required) {
|
|
769
|
-
if (!process.env[varName]) {
|
|
770
|
-
missing.push(varName);
|
|
771
|
-
}
|
|
772
|
-
}
|
|
773
|
-
if (missing.length > 0) {
|
|
774
|
-
const error = `Required environment variables missing: ${missing.join(", ")}`;
|
|
775
|
-
envLogger.error("Environment validation failed", {
|
|
776
|
-
missing,
|
|
777
|
-
required
|
|
778
|
-
});
|
|
779
|
-
throw new Error(error);
|
|
780
|
-
}
|
|
781
|
-
if (debug) {
|
|
782
|
-
envLogger.debug("Required environment variables validated", {
|
|
783
|
-
required,
|
|
784
|
-
allPresent: true
|
|
785
|
-
});
|
|
786
|
-
}
|
|
787
|
-
}
|
|
788
|
-
function loadEnvironment(options = {}) {
|
|
789
|
-
const {
|
|
790
|
-
basePath = process.cwd(),
|
|
791
|
-
customPaths = [],
|
|
792
|
-
debug = false,
|
|
793
|
-
nodeEnv = process.env.NODE_ENV || "",
|
|
794
|
-
required = [],
|
|
795
|
-
useCache = true
|
|
796
|
-
} = options;
|
|
797
|
-
if (useCache && environmentLoaded && cachedLoadResult) {
|
|
798
|
-
if (debug) {
|
|
799
|
-
envLogger.debug("Returning cached environment", {
|
|
800
|
-
loaded: cachedLoadResult.loaded.length,
|
|
801
|
-
variables: Object.keys(cachedLoadResult.parsed).length
|
|
802
|
-
});
|
|
803
|
-
}
|
|
804
|
-
return cachedLoadResult;
|
|
805
|
-
}
|
|
806
|
-
if (debug) {
|
|
807
|
-
envLogger.debug("Loading environment variables", {
|
|
808
|
-
basePath,
|
|
809
|
-
nodeEnv,
|
|
810
|
-
customPaths,
|
|
811
|
-
required
|
|
812
|
-
});
|
|
813
|
-
}
|
|
814
|
-
const result = {
|
|
815
|
-
success: true,
|
|
816
|
-
loaded: [],
|
|
817
|
-
failed: [],
|
|
818
|
-
parsed: {},
|
|
819
|
-
warnings: []
|
|
820
|
-
};
|
|
821
|
-
const standardFiles = buildFileList(basePath, nodeEnv);
|
|
822
|
-
const allFiles = [...standardFiles, ...customPaths];
|
|
823
|
-
if (debug) {
|
|
824
|
-
envLogger.debug("Environment files to load", {
|
|
825
|
-
standardFiles,
|
|
826
|
-
customPaths,
|
|
827
|
-
total: allFiles.length
|
|
828
|
-
});
|
|
829
|
-
}
|
|
830
|
-
const reversedFiles = [...allFiles].reverse();
|
|
831
|
-
for (const filePath of reversedFiles) {
|
|
832
|
-
const fileResult = loadSingleFile(filePath, debug);
|
|
833
|
-
if (fileResult.success) {
|
|
834
|
-
result.loaded.push(filePath);
|
|
835
|
-
Object.assign(result.parsed, fileResult.parsed);
|
|
836
|
-
if (fileResult.parsed["NODE_ENV"]) {
|
|
837
|
-
const fileName = filePath.split("/").pop() || filePath;
|
|
838
|
-
result.warnings.push(
|
|
839
|
-
`NODE_ENV found in ${fileName}. It's recommended to set NODE_ENV via CLI (e.g., 'spfn dev', 'spfn build') instead of .env files for consistent environment behavior.`
|
|
840
|
-
);
|
|
841
|
-
}
|
|
842
|
-
} else if (fileResult.error) {
|
|
843
|
-
result.failed.push({
|
|
844
|
-
path: filePath,
|
|
845
|
-
reason: fileResult.error
|
|
846
|
-
});
|
|
847
|
-
}
|
|
848
|
-
}
|
|
849
|
-
if (debug || result.loaded.length > 0) {
|
|
850
|
-
envLogger.info("Environment loading complete", {
|
|
851
|
-
loaded: result.loaded.length,
|
|
852
|
-
failed: result.failed.length,
|
|
853
|
-
variables: Object.keys(result.parsed).length,
|
|
854
|
-
files: result.loaded
|
|
855
|
-
});
|
|
856
|
-
}
|
|
857
|
-
if (required.length > 0) {
|
|
858
|
-
try {
|
|
859
|
-
validateRequiredVars(required, debug);
|
|
860
|
-
} catch (error) {
|
|
861
|
-
result.success = false;
|
|
862
|
-
result.errors = [
|
|
863
|
-
error instanceof Error ? error.message : "Validation failed"
|
|
864
|
-
];
|
|
865
|
-
throw error;
|
|
866
|
-
}
|
|
867
|
-
}
|
|
868
|
-
if (result.warnings.length > 0) {
|
|
869
|
-
for (const warning of result.warnings) {
|
|
870
|
-
envLogger.warn(warning);
|
|
871
|
-
}
|
|
872
|
-
}
|
|
873
|
-
environmentLoaded = true;
|
|
874
|
-
cachedLoadResult = result;
|
|
875
|
-
return result;
|
|
876
|
-
}
|
|
877
|
-
|
|
878
|
-
// src/errors/database-errors.ts
|
|
879
|
-
var DatabaseError = class extends Error {
|
|
880
|
-
statusCode;
|
|
881
|
-
details;
|
|
882
|
-
timestamp;
|
|
883
|
-
constructor(message, statusCode = 500, details) {
|
|
884
|
-
super(message);
|
|
885
|
-
this.name = "DatabaseError";
|
|
886
|
-
this.statusCode = statusCode;
|
|
887
|
-
this.details = details;
|
|
888
|
-
this.timestamp = /* @__PURE__ */ new Date();
|
|
889
|
-
Error.captureStackTrace(this, this.constructor);
|
|
890
|
-
}
|
|
891
|
-
/**
|
|
892
|
-
* Serialize error for API response
|
|
893
|
-
*/
|
|
894
|
-
toJSON() {
|
|
895
|
-
return {
|
|
896
|
-
name: this.name,
|
|
897
|
-
message: this.message,
|
|
898
|
-
statusCode: this.statusCode,
|
|
899
|
-
details: this.details,
|
|
900
|
-
timestamp: this.timestamp.toISOString()
|
|
901
|
-
};
|
|
902
|
-
}
|
|
903
|
-
};
|
|
904
|
-
var ConnectionError = class extends DatabaseError {
|
|
905
|
-
constructor(message, details) {
|
|
906
|
-
super(message, 503, details);
|
|
907
|
-
this.name = "ConnectionError";
|
|
908
|
-
}
|
|
909
|
-
};
|
|
910
|
-
var QueryError = class extends DatabaseError {
|
|
911
|
-
constructor(message, statusCode = 500, details) {
|
|
912
|
-
super(message, statusCode, details);
|
|
913
|
-
this.name = "QueryError";
|
|
914
|
-
}
|
|
915
|
-
};
|
|
916
|
-
var ConstraintViolationError = class extends QueryError {
|
|
917
|
-
constructor(message, details) {
|
|
918
|
-
super(message, 400, details);
|
|
919
|
-
this.name = "ConstraintViolationError";
|
|
920
|
-
}
|
|
921
|
-
};
|
|
922
|
-
var TransactionError = class extends DatabaseError {
|
|
923
|
-
constructor(message, statusCode = 500, details) {
|
|
924
|
-
super(message, statusCode, details);
|
|
925
|
-
this.name = "TransactionError";
|
|
926
|
-
}
|
|
927
|
-
};
|
|
928
|
-
var DeadlockError = class extends TransactionError {
|
|
929
|
-
constructor(message, details) {
|
|
930
|
-
super(message, 409, details);
|
|
931
|
-
this.name = "DeadlockError";
|
|
932
|
-
}
|
|
933
|
-
};
|
|
934
|
-
var DuplicateEntryError = class extends QueryError {
|
|
935
|
-
constructor(field, value) {
|
|
936
|
-
super(`${field} '${value}' already exists`, 409, { field, value });
|
|
937
|
-
this.name = "DuplicateEntryError";
|
|
938
|
-
}
|
|
939
|
-
};
|
|
940
|
-
|
|
941
|
-
// src/db/postgres-errors.ts
|
|
942
16
|
function parseUniqueViolation(message) {
|
|
943
17
|
const patterns = [
|
|
944
18
|
// Standard format: Key (field)=(value)
|
|
@@ -976,24 +50,24 @@ function fromPostgresError(error) {
|
|
|
976
50
|
case "08007":
|
|
977
51
|
// transaction_resolution_unknown
|
|
978
52
|
case "08P01":
|
|
979
|
-
return new ConnectionError(message, { code });
|
|
53
|
+
return new ConnectionError({ message, details: { code } });
|
|
980
54
|
// Class 23 — Integrity Constraint Violation
|
|
981
55
|
case "23000":
|
|
982
56
|
// integrity_constraint_violation
|
|
983
57
|
case "23001":
|
|
984
|
-
return new ConstraintViolationError(message, { code, constraint: "integrity" });
|
|
58
|
+
return new ConstraintViolationError({ message, details: { code, constraint: "integrity" } });
|
|
985
59
|
case "23502":
|
|
986
|
-
return new ConstraintViolationError(message, { code, constraint: "not_null" });
|
|
60
|
+
return new ConstraintViolationError({ message, details: { code, constraint: "not_null" } });
|
|
987
61
|
case "23503":
|
|
988
|
-
return new ConstraintViolationError(message, { code, constraint: "foreign_key" });
|
|
62
|
+
return new ConstraintViolationError({ message, details: { code, constraint: "foreign_key" } });
|
|
989
63
|
case "23505":
|
|
990
64
|
const parsed = parseUniqueViolation(message);
|
|
991
65
|
if (parsed) {
|
|
992
|
-
return new DuplicateEntryError(parsed.field, parsed.value);
|
|
66
|
+
return new DuplicateEntryError({ field: parsed.field, value: parsed.value });
|
|
993
67
|
}
|
|
994
|
-
return new DuplicateEntryError("field", "value");
|
|
68
|
+
return new DuplicateEntryError({ field: "field", value: "value" });
|
|
995
69
|
case "23514":
|
|
996
|
-
return new ConstraintViolationError(message, { code, constraint: "check" });
|
|
70
|
+
return new ConstraintViolationError({ message, details: { code, constraint: "check" } });
|
|
997
71
|
// Class 40 — Transaction Rollback
|
|
998
72
|
case "40000":
|
|
999
73
|
// transaction_rollback
|
|
@@ -1002,9 +76,9 @@ function fromPostgresError(error) {
|
|
|
1002
76
|
case "40002":
|
|
1003
77
|
// transaction_integrity_constraint_violation
|
|
1004
78
|
case "40003":
|
|
1005
|
-
return new TransactionError(message, 500, { code });
|
|
79
|
+
return new TransactionError({ message, statusCode: 500, details: { code } });
|
|
1006
80
|
case "40P01":
|
|
1007
|
-
return new DeadlockError(message, { code });
|
|
81
|
+
return new DeadlockError({ message, details: { code } });
|
|
1008
82
|
// Class 42 — Syntax Error or Access Rule Violation
|
|
1009
83
|
case "42000":
|
|
1010
84
|
// syntax_error_or_access_rule_violation
|
|
@@ -1027,7 +101,7 @@ function fromPostgresError(error) {
|
|
|
1027
101
|
case "42P01":
|
|
1028
102
|
// undefined_table
|
|
1029
103
|
case "42P02":
|
|
1030
|
-
return new QueryError(message, 400, { code });
|
|
104
|
+
return new QueryError({ message, statusCode: 400, details: { code } });
|
|
1031
105
|
// Class 53 — Insufficient Resources
|
|
1032
106
|
case "53000":
|
|
1033
107
|
// insufficient_resources
|
|
@@ -1036,7 +110,7 @@ function fromPostgresError(error) {
|
|
|
1036
110
|
case "53200":
|
|
1037
111
|
// out_of_memory
|
|
1038
112
|
case "53300":
|
|
1039
|
-
return new ConnectionError(message, { code });
|
|
113
|
+
return new ConnectionError({ message, details: { code } });
|
|
1040
114
|
// Class 57 — Operator Intervention
|
|
1041
115
|
case "57000":
|
|
1042
116
|
// operator_intervention
|
|
@@ -1047,76 +121,209 @@ function fromPostgresError(error) {
|
|
|
1047
121
|
case "57P02":
|
|
1048
122
|
// crash_shutdown
|
|
1049
123
|
case "57P03":
|
|
1050
|
-
return new ConnectionError(message, { code });
|
|
124
|
+
return new ConnectionError({ message, details: { code } });
|
|
1051
125
|
// Default: Unknown error
|
|
1052
126
|
default:
|
|
1053
|
-
return new QueryError(message, 500, { code });
|
|
127
|
+
return new QueryError({ message, statusCode: 500, details: { code } });
|
|
1054
128
|
}
|
|
1055
129
|
}
|
|
1056
130
|
|
|
1057
131
|
// src/db/manager/connection.ts
|
|
1058
|
-
var dbLogger = logger.child("database");
|
|
132
|
+
var dbLogger = logger.child("@spfn/core:database");
|
|
133
|
+
var DEFAULT_CONNECT_TIMEOUT = 10;
|
|
1059
134
|
function delay(ms) {
|
|
1060
135
|
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
1061
136
|
}
|
|
137
|
+
function maskConnectionString(connectionString) {
|
|
138
|
+
try {
|
|
139
|
+
const url = new URL(connectionString);
|
|
140
|
+
if (url.password) {
|
|
141
|
+
return connectionString.replace(`:${url.password}@`, ":***@");
|
|
142
|
+
}
|
|
143
|
+
return connectionString;
|
|
144
|
+
} catch {
|
|
145
|
+
return connectionString.replace(/(:\/\/[^:/@]+:)([^@]+)(@)/, "$1***$3");
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
function isAuthenticationError(error) {
|
|
149
|
+
const message = error.message.toLowerCase();
|
|
150
|
+
return message.includes("password authentication failed") || message.includes("no password supplied") || message.includes("authentication failed") || message.includes("invalid authorization");
|
|
151
|
+
}
|
|
152
|
+
function isDatabaseNotFoundError(error) {
|
|
153
|
+
const message = error.message.toLowerCase();
|
|
154
|
+
return message.includes("database") && message.includes("does not exist");
|
|
155
|
+
}
|
|
156
|
+
function isSSLError(error) {
|
|
157
|
+
const message = error.message.toLowerCase();
|
|
158
|
+
return message.includes("ssl") || message.includes("tls") || message.includes("certificate") || message.includes("self signed certificate") || message.includes("unable to verify");
|
|
159
|
+
}
|
|
160
|
+
function isNonRetryableError(error) {
|
|
161
|
+
return isAuthenticationError(error) || isDatabaseNotFoundError(error) || isSSLError(error);
|
|
162
|
+
}
|
|
163
|
+
function validateRetryConfig(retryConfig) {
|
|
164
|
+
if (retryConfig.maxRetries < 0) {
|
|
165
|
+
throw new ConnectionError({ message: `maxRetries must be non-negative, got ${retryConfig.maxRetries}` });
|
|
166
|
+
}
|
|
167
|
+
if (retryConfig.initialDelay <= 0) {
|
|
168
|
+
throw new ConnectionError({ message: `initialDelay must be positive, got ${retryConfig.initialDelay}` });
|
|
169
|
+
}
|
|
170
|
+
if (retryConfig.factor <= 0) {
|
|
171
|
+
throw new ConnectionError({ message: `factor must be positive, got ${retryConfig.factor}` });
|
|
172
|
+
}
|
|
173
|
+
if (retryConfig.maxDelay <= 0) {
|
|
174
|
+
throw new ConnectionError({ message: `maxDelay must be positive, got ${retryConfig.maxDelay}` });
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
function validatePoolConfig(poolConfig) {
|
|
178
|
+
if (poolConfig.max <= 0) {
|
|
179
|
+
throw new ConnectionError({ message: `pool max must be positive, got ${poolConfig.max}` });
|
|
180
|
+
}
|
|
181
|
+
}
|
|
1062
182
|
async function createDatabaseConnection(connectionString, poolConfig, retryConfig) {
|
|
183
|
+
if (!connectionString) {
|
|
184
|
+
throw new ConnectionError({ message: "Connection string must be a non-empty string" });
|
|
185
|
+
}
|
|
186
|
+
validateRetryConfig(retryConfig);
|
|
187
|
+
validatePoolConfig(poolConfig);
|
|
1063
188
|
let lastError;
|
|
189
|
+
let client;
|
|
1064
190
|
for (let attempt = 0; attempt <= retryConfig.maxRetries; attempt++) {
|
|
1065
191
|
try {
|
|
1066
|
-
|
|
192
|
+
client = postgres(connectionString, {
|
|
1067
193
|
max: poolConfig.max,
|
|
1068
|
-
idle_timeout: poolConfig.idleTimeout
|
|
194
|
+
idle_timeout: poolConfig.idleTimeout,
|
|
195
|
+
connect_timeout: DEFAULT_CONNECT_TIMEOUT
|
|
1069
196
|
});
|
|
1070
197
|
await client`SELECT 1 as test`;
|
|
1071
198
|
if (attempt > 0) {
|
|
1072
|
-
dbLogger.info(
|
|
199
|
+
dbLogger.info(
|
|
200
|
+
"Database connected successfully",
|
|
201
|
+
{ retriesNeeded: attempt }
|
|
202
|
+
);
|
|
1073
203
|
} else {
|
|
1074
204
|
dbLogger.info("Database connected successfully");
|
|
1075
205
|
}
|
|
1076
206
|
return client;
|
|
1077
207
|
} catch (error) {
|
|
208
|
+
if (client) {
|
|
209
|
+
try {
|
|
210
|
+
await client.end();
|
|
211
|
+
} catch {
|
|
212
|
+
}
|
|
213
|
+
client = void 0;
|
|
214
|
+
}
|
|
1078
215
|
lastError = fromPostgresError(error);
|
|
216
|
+
if (isNonRetryableError(lastError)) {
|
|
217
|
+
dbLogger.error(
|
|
218
|
+
"Cannot connect to database (non-retryable error)",
|
|
219
|
+
lastError,
|
|
220
|
+
{
|
|
221
|
+
connectionString: maskConnectionString(connectionString),
|
|
222
|
+
poolConfig: {
|
|
223
|
+
max: poolConfig.max,
|
|
224
|
+
idleTimeout: poolConfig.idleTimeout,
|
|
225
|
+
connectTimeout: DEFAULT_CONNECT_TIMEOUT
|
|
226
|
+
},
|
|
227
|
+
reason: isAuthenticationError(lastError) ? "authentication_failed" : isDatabaseNotFoundError(lastError) ? "database_not_found" : "ssl_error"
|
|
228
|
+
}
|
|
229
|
+
);
|
|
230
|
+
throw new ConnectionError({
|
|
231
|
+
message: `Cannot connect to database: ${lastError.message}`
|
|
232
|
+
});
|
|
233
|
+
}
|
|
1079
234
|
if (attempt < retryConfig.maxRetries) {
|
|
1080
|
-
const
|
|
235
|
+
const baseDelay = Math.min(
|
|
1081
236
|
retryConfig.initialDelay * Math.pow(retryConfig.factor, attempt),
|
|
1082
237
|
retryConfig.maxDelay
|
|
1083
238
|
);
|
|
239
|
+
const jitter = 0.5 + Math.random() * 0.5;
|
|
240
|
+
const delayMs = Math.floor(baseDelay * jitter);
|
|
1084
241
|
dbLogger.warn(
|
|
1085
|
-
|
|
242
|
+
"Database connection failed, retrying...",
|
|
1086
243
|
lastError,
|
|
1087
244
|
{
|
|
1088
245
|
attempt: attempt + 1,
|
|
1089
|
-
|
|
1090
|
-
delayMs
|
|
246
|
+
totalAttempts: retryConfig.maxRetries + 1,
|
|
247
|
+
nextRetryIn: delayMs,
|
|
248
|
+
connectionString: maskConnectionString(connectionString),
|
|
249
|
+
poolConfig: {
|
|
250
|
+
max: poolConfig.max,
|
|
251
|
+
idleTimeout: poolConfig.idleTimeout,
|
|
252
|
+
connectTimeout: DEFAULT_CONNECT_TIMEOUT
|
|
253
|
+
}
|
|
1091
254
|
}
|
|
1092
255
|
);
|
|
1093
256
|
await delay(delayMs);
|
|
1094
257
|
}
|
|
1095
258
|
}
|
|
1096
259
|
}
|
|
1097
|
-
|
|
1098
|
-
|
|
260
|
+
if (!lastError) {
|
|
261
|
+
throw new ConnectionError({
|
|
262
|
+
message: "Unexpected error: no error recorded after failed connection attempts"
|
|
263
|
+
});
|
|
264
|
+
}
|
|
265
|
+
dbLogger.error(
|
|
266
|
+
"Failed to connect to database after all retries",
|
|
267
|
+
lastError,
|
|
268
|
+
{
|
|
269
|
+
totalAttempts: retryConfig.maxRetries + 1,
|
|
270
|
+
connectionString: maskConnectionString(connectionString),
|
|
271
|
+
poolConfig: {
|
|
272
|
+
max: poolConfig.max,
|
|
273
|
+
idleTimeout: poolConfig.idleTimeout,
|
|
274
|
+
connectTimeout: DEFAULT_CONNECT_TIMEOUT
|
|
275
|
+
},
|
|
276
|
+
retryConfig: {
|
|
277
|
+
maxRetries: retryConfig.maxRetries,
|
|
278
|
+
initialDelay: retryConfig.initialDelay,
|
|
279
|
+
factor: retryConfig.factor,
|
|
280
|
+
maxDelay: retryConfig.maxDelay
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
);
|
|
284
|
+
throw new ConnectionError({
|
|
285
|
+
message: `Failed to connect to database after ${retryConfig.maxRetries + 1} attempts: ${lastError.message}`
|
|
286
|
+
});
|
|
1099
287
|
}
|
|
1100
288
|
async function checkConnection(client) {
|
|
1101
289
|
try {
|
|
1102
290
|
await client`SELECT 1 as health_check`;
|
|
1103
291
|
return true;
|
|
1104
292
|
} catch (error) {
|
|
1105
|
-
|
|
293
|
+
const errorObj = fromPostgresError(error);
|
|
294
|
+
dbLogger.error(
|
|
295
|
+
"Database health check failed",
|
|
296
|
+
errorObj,
|
|
297
|
+
{ errorType: errorObj.name }
|
|
298
|
+
);
|
|
1106
299
|
return false;
|
|
1107
300
|
}
|
|
1108
301
|
}
|
|
1109
|
-
|
|
1110
|
-
// src/db/manager/config.ts
|
|
1111
302
|
function parseEnvNumber(key, prodDefault, devDefault) {
|
|
1112
303
|
const isProduction = process.env.NODE_ENV === "production";
|
|
1113
|
-
const
|
|
1114
|
-
|
|
304
|
+
const defaultValue = isProduction ? prodDefault : devDefault;
|
|
305
|
+
const value = process.env[key];
|
|
306
|
+
if (value === void 0) {
|
|
307
|
+
return defaultValue;
|
|
308
|
+
}
|
|
309
|
+
try {
|
|
310
|
+
return parseNumber(value, { min: 0, integer: true });
|
|
311
|
+
} catch (error) {
|
|
312
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
313
|
+
throw new Error(`${key}: ${message}`);
|
|
314
|
+
}
|
|
1115
315
|
}
|
|
1116
316
|
function parseEnvBoolean(key, defaultValue) {
|
|
1117
317
|
const value = process.env[key];
|
|
1118
|
-
if (value === void 0)
|
|
1119
|
-
|
|
318
|
+
if (value === void 0) {
|
|
319
|
+
return defaultValue;
|
|
320
|
+
}
|
|
321
|
+
try {
|
|
322
|
+
return parseBoolean(value);
|
|
323
|
+
} catch (error) {
|
|
324
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
325
|
+
throw new Error(`${key}: ${message}`);
|
|
326
|
+
}
|
|
1120
327
|
}
|
|
1121
328
|
function getPoolConfig(options) {
|
|
1122
329
|
return {
|
|
@@ -1151,48 +358,71 @@ function buildMonitoringConfig(options) {
|
|
|
1151
358
|
}
|
|
1152
359
|
|
|
1153
360
|
// src/db/manager/factory.ts
|
|
1154
|
-
var dbLogger2 = logger.child("database");
|
|
361
|
+
var dbLogger2 = logger.child("@spfn/core:database");
|
|
1155
362
|
function hasDatabaseConfig() {
|
|
1156
|
-
return
|
|
363
|
+
return env.DATABASE_URL !== void 0 || env.DATABASE_WRITE_URL !== void 0 || env.DATABASE_READ_URL !== void 0;
|
|
1157
364
|
}
|
|
1158
365
|
function detectDatabasePattern() {
|
|
1159
|
-
|
|
366
|
+
const DATABASE_WRITE_URL = env.DATABASE_WRITE_URL;
|
|
367
|
+
const DATABASE_READ_URL = env.DATABASE_READ_URL;
|
|
368
|
+
const DATABASE_URL = env.DATABASE_URL;
|
|
369
|
+
if (DATABASE_WRITE_URL && DATABASE_READ_URL) {
|
|
1160
370
|
return {
|
|
1161
371
|
type: "write-read",
|
|
1162
|
-
write:
|
|
1163
|
-
read:
|
|
1164
|
-
};
|
|
1165
|
-
}
|
|
1166
|
-
if (process.env.DATABASE_URL && process.env.DATABASE_REPLICA_URL) {
|
|
1167
|
-
return {
|
|
1168
|
-
type: "legacy",
|
|
1169
|
-
primary: process.env.DATABASE_URL,
|
|
1170
|
-
replica: process.env.DATABASE_REPLICA_URL
|
|
372
|
+
write: DATABASE_WRITE_URL,
|
|
373
|
+
read: DATABASE_READ_URL
|
|
1171
374
|
};
|
|
1172
375
|
}
|
|
1173
|
-
if (
|
|
376
|
+
if (DATABASE_URL) {
|
|
1174
377
|
return {
|
|
1175
378
|
type: "single",
|
|
1176
|
-
url:
|
|
379
|
+
url: DATABASE_URL
|
|
1177
380
|
};
|
|
1178
381
|
}
|
|
1179
|
-
if (
|
|
382
|
+
if (DATABASE_WRITE_URL) {
|
|
1180
383
|
return {
|
|
1181
384
|
type: "single",
|
|
1182
|
-
url:
|
|
385
|
+
url: DATABASE_WRITE_URL
|
|
1183
386
|
};
|
|
1184
387
|
}
|
|
1185
388
|
return { type: "none" };
|
|
1186
389
|
}
|
|
1187
390
|
async function createWriteReadClients(writeUrl, readUrl, poolConfig, retryConfig) {
|
|
1188
|
-
|
|
1189
|
-
|
|
1190
|
-
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
391
|
+
let writeClient;
|
|
392
|
+
let readClient;
|
|
393
|
+
try {
|
|
394
|
+
writeClient = await createDatabaseConnection(writeUrl, poolConfig, retryConfig);
|
|
395
|
+
} catch (error) {
|
|
396
|
+
const errorObj = error instanceof Error ? error : new Error(String(error));
|
|
397
|
+
dbLogger2.error("Failed to connect to write database", errorObj);
|
|
398
|
+
throw new Error(`Write database connection failed: ${errorObj.message}`, { cause: error });
|
|
399
|
+
}
|
|
400
|
+
try {
|
|
401
|
+
readClient = await createDatabaseConnection(readUrl, poolConfig, retryConfig);
|
|
402
|
+
return {
|
|
403
|
+
write: drizzle(writeClient),
|
|
404
|
+
read: drizzle(readClient),
|
|
405
|
+
writeClient,
|
|
406
|
+
readClient
|
|
407
|
+
};
|
|
408
|
+
} catch (error) {
|
|
409
|
+
const errorObj = error instanceof Error ? error : new Error(String(error));
|
|
410
|
+
dbLogger2.warn(
|
|
411
|
+
"Failed to connect to read database (replica). Falling back to write database for read operations.",
|
|
412
|
+
{
|
|
413
|
+
error: errorObj.message,
|
|
414
|
+
readUrl: readUrl.replace(/:[^:@]+@/, ":***@"),
|
|
415
|
+
// Mask password in logs
|
|
416
|
+
fallbackBehavior: "Using write connection for both read and write operations"
|
|
417
|
+
}
|
|
418
|
+
);
|
|
419
|
+
return {
|
|
420
|
+
write: drizzle(writeClient),
|
|
421
|
+
read: drizzle(writeClient),
|
|
422
|
+
writeClient,
|
|
423
|
+
readClient: writeClient
|
|
424
|
+
};
|
|
425
|
+
}
|
|
1196
426
|
}
|
|
1197
427
|
async function createSingleClient(url, poolConfig, retryConfig) {
|
|
1198
428
|
const client = await createDatabaseConnection(url, poolConfig, retryConfig);
|
|
@@ -1206,25 +436,15 @@ async function createSingleClient(url, poolConfig, retryConfig) {
|
|
|
1206
436
|
}
|
|
1207
437
|
async function createDatabaseFromEnv(options) {
|
|
1208
438
|
if (!hasDatabaseConfig()) {
|
|
1209
|
-
|
|
1210
|
-
|
|
1211
|
-
|
|
1212
|
-
|
|
1213
|
-
dbLogger2.debug("Environment variables loaded", {
|
|
1214
|
-
success: result.success,
|
|
1215
|
-
loaded: result.loaded.length,
|
|
1216
|
-
hasDatabaseUrl: !!process.env.DATABASE_URL,
|
|
1217
|
-
hasWriteUrl: !!process.env.DATABASE_WRITE_URL,
|
|
1218
|
-
hasReadUrl: !!process.env.DATABASE_READ_URL
|
|
1219
|
-
});
|
|
1220
|
-
}
|
|
1221
|
-
if (!hasDatabaseConfig()) {
|
|
1222
|
-
dbLogger2.warn("No database configuration found", {
|
|
439
|
+
const error = new Error(
|
|
440
|
+
"No database configuration found. Please set DATABASE_URL, DATABASE_WRITE_URL, or DATABASE_READ_URL environment variable."
|
|
441
|
+
);
|
|
442
|
+
dbLogger2.error("No database configuration found", {
|
|
1223
443
|
cwd: process.cwd(),
|
|
1224
|
-
nodeEnv:
|
|
444
|
+
nodeEnv: env.NODE_ENV,
|
|
1225
445
|
checkedVars: ["DATABASE_URL", "DATABASE_WRITE_URL", "DATABASE_READ_URL"]
|
|
1226
446
|
});
|
|
1227
|
-
|
|
447
|
+
throw error;
|
|
1228
448
|
}
|
|
1229
449
|
try {
|
|
1230
450
|
const poolConfig = getPoolConfig(options?.pool);
|
|
@@ -1232,48 +452,30 @@ async function createDatabaseFromEnv(options) {
|
|
|
1232
452
|
const pattern = detectDatabasePattern();
|
|
1233
453
|
switch (pattern.type) {
|
|
1234
454
|
case "write-read":
|
|
1235
|
-
dbLogger2.debug("Using write-read pattern", {
|
|
1236
|
-
write: pattern.write.replace(/:[^:@]+@/, ":***@"),
|
|
1237
|
-
read: pattern.read.replace(/:[^:@]+@/, ":***@")
|
|
1238
|
-
});
|
|
1239
455
|
return await createWriteReadClients(
|
|
1240
456
|
pattern.write,
|
|
1241
457
|
pattern.read,
|
|
1242
458
|
poolConfig,
|
|
1243
459
|
retryConfig
|
|
1244
460
|
);
|
|
1245
|
-
case "legacy":
|
|
1246
|
-
dbLogger2.debug("Using legacy replica pattern", {
|
|
1247
|
-
primary: pattern.primary.replace(/:[^:@]+@/, ":***@"),
|
|
1248
|
-
replica: pattern.replica.replace(/:[^:@]+@/, ":***@")
|
|
1249
|
-
});
|
|
1250
|
-
return await createWriteReadClients(
|
|
1251
|
-
pattern.primary,
|
|
1252
|
-
pattern.replica,
|
|
1253
|
-
poolConfig,
|
|
1254
|
-
retryConfig
|
|
1255
|
-
);
|
|
1256
461
|
case "single":
|
|
1257
|
-
dbLogger2.debug("Using single database pattern", {
|
|
1258
|
-
url: pattern.url.replace(/:[^:@]+@/, ":***@")
|
|
1259
|
-
});
|
|
1260
462
|
return await createSingleClient(pattern.url, poolConfig, retryConfig);
|
|
1261
|
-
case "none":
|
|
1262
|
-
dbLogger2.warn("No database pattern detected");
|
|
1263
|
-
return { write: void 0, read: void 0 };
|
|
1264
463
|
}
|
|
1265
464
|
} catch (error) {
|
|
1266
|
-
const
|
|
1267
|
-
dbLogger2.error(
|
|
1268
|
-
|
|
1269
|
-
|
|
1270
|
-
|
|
1271
|
-
|
|
1272
|
-
|
|
1273
|
-
|
|
1274
|
-
|
|
1275
|
-
|
|
465
|
+
const errorObj = error instanceof Error ? error : new Error(String(error));
|
|
466
|
+
dbLogger2.error(
|
|
467
|
+
"Failed to create database connection",
|
|
468
|
+
errorObj,
|
|
469
|
+
{
|
|
470
|
+
stage: "initialization",
|
|
471
|
+
hasWriteUrl: process.env.DATABASE_WRITE_URL !== void 0,
|
|
472
|
+
hasReadUrl: process.env.DATABASE_READ_URL !== void 0,
|
|
473
|
+
hasUrl: process.env.DATABASE_URL !== void 0
|
|
474
|
+
}
|
|
475
|
+
);
|
|
476
|
+
throw new Error(`Database connection failed: ${errorObj.message}`, { cause: error });
|
|
1276
477
|
}
|
|
478
|
+
throw new Error("No database pattern detected despite passing config check");
|
|
1277
479
|
}
|
|
1278
480
|
|
|
1279
481
|
// src/db/manager/global-state.ts
|
|
@@ -1300,9 +502,36 @@ var setHealthCheckInterval = (interval) => {
|
|
|
1300
502
|
var setMonitoringConfig = (config) => {
|
|
1301
503
|
globalThis.__SPFN_DB_MONITORING__ = config;
|
|
1302
504
|
};
|
|
1303
|
-
|
|
1304
|
-
|
|
1305
|
-
|
|
505
|
+
var dbLogger3 = logger.child("@spfn/core:database");
|
|
506
|
+
async function testDatabaseConnection(db) {
|
|
507
|
+
await db.execute("SELECT 1");
|
|
508
|
+
}
|
|
509
|
+
async function performHealthCheck(getDatabase2) {
|
|
510
|
+
const write = getDatabase2("write");
|
|
511
|
+
const read = getDatabase2("read");
|
|
512
|
+
await testDatabaseConnection(write);
|
|
513
|
+
if (read !== write) {
|
|
514
|
+
await testDatabaseConnection(read);
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
async function reconnectAndRestore(options, closeDatabase2) {
|
|
518
|
+
await closeDatabase2();
|
|
519
|
+
const result = await createDatabaseFromEnv(options);
|
|
520
|
+
if (!result.write) {
|
|
521
|
+
return false;
|
|
522
|
+
}
|
|
523
|
+
await testDatabaseConnection(result.write);
|
|
524
|
+
if (result.read && result.read !== result.write) {
|
|
525
|
+
await testDatabaseConnection(result.read);
|
|
526
|
+
}
|
|
527
|
+
setWriteInstance(result.write);
|
|
528
|
+
setReadInstance(result.read);
|
|
529
|
+
setWriteClient(result.writeClient);
|
|
530
|
+
setReadClient(result.readClient);
|
|
531
|
+
const monConfig = buildMonitoringConfig(options?.monitoring);
|
|
532
|
+
setMonitoringConfig(monConfig);
|
|
533
|
+
return true;
|
|
534
|
+
}
|
|
1306
535
|
function startHealthCheck(config, options, getDatabase2, closeDatabase2) {
|
|
1307
536
|
const healthCheck = getHealthCheckInterval();
|
|
1308
537
|
if (healthCheck) {
|
|
@@ -1315,14 +544,7 @@ function startHealthCheck(config, options, getDatabase2, closeDatabase2) {
|
|
|
1315
544
|
});
|
|
1316
545
|
const interval = setInterval(async () => {
|
|
1317
546
|
try {
|
|
1318
|
-
|
|
1319
|
-
const read = getDatabase2("read");
|
|
1320
|
-
if (write) {
|
|
1321
|
-
await write.execute("SELECT 1");
|
|
1322
|
-
}
|
|
1323
|
-
if (read && read !== write) {
|
|
1324
|
-
await read.execute("SELECT 1");
|
|
1325
|
-
}
|
|
547
|
+
await performHealthCheck(getDatabase2);
|
|
1326
548
|
} catch (error) {
|
|
1327
549
|
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1328
550
|
dbLogger3.error("Database health check failed", { error: message });
|
|
@@ -1341,17 +563,15 @@ async function attemptReconnection(config, options, closeDatabase2) {
|
|
|
1341
563
|
for (let attempt = 1; attempt <= config.maxRetries; attempt++) {
|
|
1342
564
|
try {
|
|
1343
565
|
dbLogger3.debug(`Reconnection attempt ${attempt}/${config.maxRetries}`);
|
|
1344
|
-
|
|
1345
|
-
|
|
1346
|
-
|
|
1347
|
-
|
|
1348
|
-
|
|
1349
|
-
setWriteInstance(result.write);
|
|
1350
|
-
setReadInstance(result.read);
|
|
1351
|
-
setWriteClient(result.writeClient);
|
|
1352
|
-
setReadClient(result.readClient);
|
|
566
|
+
if (attempt > 1) {
|
|
567
|
+
await new Promise((resolve) => setTimeout(resolve, config.retryInterval));
|
|
568
|
+
}
|
|
569
|
+
const success = await reconnectAndRestore(options, closeDatabase2);
|
|
570
|
+
if (success) {
|
|
1353
571
|
dbLogger3.info("Database reconnection successful", { attempt });
|
|
1354
572
|
return;
|
|
573
|
+
} else {
|
|
574
|
+
dbLogger3.error(`Reconnection attempt ${attempt} failed: No write database instance created`);
|
|
1355
575
|
}
|
|
1356
576
|
} catch (error) {
|
|
1357
577
|
const message = error instanceof Error ? error.message : "Unknown error";
|
|
@@ -1360,9 +580,9 @@ async function attemptReconnection(config, options, closeDatabase2) {
|
|
|
1360
580
|
attempt,
|
|
1361
581
|
maxRetries: config.maxRetries
|
|
1362
582
|
});
|
|
1363
|
-
|
|
1364
|
-
|
|
1365
|
-
|
|
583
|
+
}
|
|
584
|
+
if (attempt === config.maxRetries) {
|
|
585
|
+
dbLogger3.error("Max reconnection attempts reached, giving up");
|
|
1366
586
|
}
|
|
1367
587
|
}
|
|
1368
588
|
}
|
|
@@ -1376,16 +596,61 @@ function stopHealthCheck() {
|
|
|
1376
596
|
}
|
|
1377
597
|
|
|
1378
598
|
// src/db/manager/manager.ts
|
|
1379
|
-
var dbLogger4 = logger.child("database");
|
|
599
|
+
var dbLogger4 = logger.child("@spfn/core:database");
|
|
600
|
+
var DB_CONNECTION_CLOSE_TIMEOUT = 5;
|
|
601
|
+
var STACK_TRACE_SKIP_LINES = 3;
|
|
602
|
+
var STACK_TRACE_PATTERNS = {
|
|
603
|
+
withParens: /\((.+):(\d+):(\d+)\)/,
|
|
604
|
+
withoutParens: /at (.+):(\d+):(\d+)/
|
|
605
|
+
};
|
|
606
|
+
var initPromise = null;
|
|
607
|
+
var isClosing = false;
|
|
608
|
+
async function cleanupDatabaseConnections(writeClient, readClient) {
|
|
609
|
+
const cleanupPromises = [];
|
|
610
|
+
if (writeClient) {
|
|
611
|
+
cleanupPromises.push(
|
|
612
|
+
writeClient.end({ timeout: DB_CONNECTION_CLOSE_TIMEOUT }).catch((err) => {
|
|
613
|
+
dbLogger4.debug("Write client cleanup failed", { error: err });
|
|
614
|
+
})
|
|
615
|
+
);
|
|
616
|
+
}
|
|
617
|
+
if (readClient && readClient !== writeClient) {
|
|
618
|
+
cleanupPromises.push(
|
|
619
|
+
readClient.end({ timeout: DB_CONNECTION_CLOSE_TIMEOUT }).catch((err) => {
|
|
620
|
+
dbLogger4.debug("Read client cleanup failed", { error: err });
|
|
621
|
+
})
|
|
622
|
+
);
|
|
623
|
+
}
|
|
624
|
+
await Promise.allSettled(cleanupPromises);
|
|
625
|
+
}
|
|
626
|
+
async function closeDatabaseClient(client, type) {
|
|
627
|
+
const typeName = type.charAt(0).toUpperCase() + type.slice(1);
|
|
628
|
+
dbLogger4.debug(`Closing ${type} connection...`);
|
|
629
|
+
try {
|
|
630
|
+
await client.end({ timeout: DB_CONNECTION_CLOSE_TIMEOUT });
|
|
631
|
+
dbLogger4.debug(`${typeName} connection closed`);
|
|
632
|
+
} catch (err) {
|
|
633
|
+
const error = err instanceof Error ? err : new Error(String(err));
|
|
634
|
+
dbLogger4.error(`Error closing ${type} connection`, error);
|
|
635
|
+
}
|
|
636
|
+
}
|
|
637
|
+
async function testDatabaseConnections(write, read) {
|
|
638
|
+
if (write) {
|
|
639
|
+
await write.execute("SELECT 1");
|
|
640
|
+
if (read && read !== write) {
|
|
641
|
+
await read.execute("SELECT 1");
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
}
|
|
1380
645
|
function getCallerInfo() {
|
|
1381
646
|
try {
|
|
1382
647
|
const stack = new Error().stack;
|
|
1383
648
|
if (!stack) return void 0;
|
|
1384
649
|
const lines = stack.split("\n");
|
|
1385
|
-
for (let i =
|
|
650
|
+
for (let i = STACK_TRACE_SKIP_LINES; i < lines.length; i++) {
|
|
1386
651
|
const line = lines[i];
|
|
1387
652
|
if (!line.includes("node_modules") && !line.includes("/db/manager/")) {
|
|
1388
|
-
const match = line.match(
|
|
653
|
+
const match = line.match(STACK_TRACE_PATTERNS.withParens) || line.match(STACK_TRACE_PATTERNS.withoutParens);
|
|
1389
654
|
if (match) {
|
|
1390
655
|
const fullPath = match[1];
|
|
1391
656
|
const parts = fullPath.split("/");
|
|
@@ -1399,24 +664,39 @@ function getCallerInfo() {
|
|
|
1399
664
|
break;
|
|
1400
665
|
}
|
|
1401
666
|
}
|
|
1402
|
-
} catch {
|
|
667
|
+
} catch (error) {
|
|
668
|
+
dbLogger4.debug("Failed to extract caller info from stack trace", {
|
|
669
|
+
error: error instanceof Error ? error.message : String(error)
|
|
670
|
+
});
|
|
1403
671
|
}
|
|
1404
672
|
return void 0;
|
|
1405
673
|
}
|
|
674
|
+
function createNotInitializedError(type) {
|
|
675
|
+
return new Error(
|
|
676
|
+
`Database not initialized (type: ${type}). Call initDatabase() first or set DATABASE_URL environment variable.`
|
|
677
|
+
);
|
|
678
|
+
}
|
|
1406
679
|
function getDatabase(type) {
|
|
1407
680
|
const writeInst = getWriteInstance();
|
|
1408
681
|
const readInst = getReadInstance();
|
|
1409
|
-
if (
|
|
682
|
+
if (env.DB_DEBUG_TRACE && env.NODE_ENV !== "production") {
|
|
1410
683
|
const caller = getCallerInfo();
|
|
1411
684
|
dbLogger4.debug("getDatabase() called", {
|
|
1412
|
-
type: type
|
|
685
|
+
type: type ?? "write",
|
|
1413
686
|
hasWrite: !!writeInst,
|
|
1414
687
|
hasRead: !!readInst,
|
|
1415
688
|
caller
|
|
1416
689
|
});
|
|
1417
690
|
}
|
|
1418
691
|
if (type === "read") {
|
|
1419
|
-
|
|
692
|
+
const db = readInst ?? writeInst;
|
|
693
|
+
if (!db) {
|
|
694
|
+
throw createNotInitializedError("read");
|
|
695
|
+
}
|
|
696
|
+
return db;
|
|
697
|
+
}
|
|
698
|
+
if (!writeInst) {
|
|
699
|
+
throw createNotInitializedError("write");
|
|
1420
700
|
}
|
|
1421
701
|
return writeInst;
|
|
1422
702
|
}
|
|
@@ -1425,17 +705,32 @@ function setDatabase(write, read) {
|
|
|
1425
705
|
setReadInstance(read ?? write);
|
|
1426
706
|
}
|
|
1427
707
|
async function initDatabase(options) {
|
|
708
|
+
if (isClosing) {
|
|
709
|
+
throw new Error("Cannot initialize database while closing");
|
|
710
|
+
}
|
|
1428
711
|
const writeInst = getWriteInstance();
|
|
1429
712
|
if (writeInst) {
|
|
1430
713
|
dbLogger4.debug("Database already initialized");
|
|
1431
714
|
return { write: writeInst, read: getReadInstance() };
|
|
1432
715
|
}
|
|
1433
|
-
|
|
1434
|
-
|
|
716
|
+
if (initPromise) {
|
|
717
|
+
dbLogger4.debug("Database initialization in progress, waiting...");
|
|
718
|
+
return await initPromise;
|
|
719
|
+
}
|
|
720
|
+
initPromise = (async () => {
|
|
1435
721
|
try {
|
|
1436
|
-
await
|
|
1437
|
-
|
|
1438
|
-
await result.read
|
|
722
|
+
const result = await createDatabaseFromEnv(options);
|
|
723
|
+
try {
|
|
724
|
+
await testDatabaseConnections(result.write, result.read);
|
|
725
|
+
} catch (error) {
|
|
726
|
+
await cleanupDatabaseConnections(result.writeClient, result.readClient);
|
|
727
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
728
|
+
throw new Error(`Database connection test failed: ${message}`);
|
|
729
|
+
}
|
|
730
|
+
if (isClosing) {
|
|
731
|
+
dbLogger4.warn("Database closed during initialization, cleaning up...");
|
|
732
|
+
await cleanupDatabaseConnections(result.writeClient, result.readClient);
|
|
733
|
+
throw new Error("Database closed during initialization");
|
|
1439
734
|
}
|
|
1440
735
|
setWriteInstance(result.write);
|
|
1441
736
|
setReadInstance(result.read);
|
|
@@ -1457,53 +752,54 @@ async function initDatabase(options) {
|
|
|
1457
752
|
logQueries: monConfig.logQueries
|
|
1458
753
|
});
|
|
1459
754
|
}
|
|
1460
|
-
|
|
1461
|
-
|
|
1462
|
-
|
|
1463
|
-
await closeDatabase();
|
|
1464
|
-
throw new Error(`Database connection test failed: ${message}`, { cause: error });
|
|
755
|
+
return { write: getWriteInstance(), read: getReadInstance() };
|
|
756
|
+
} finally {
|
|
757
|
+
initPromise = null;
|
|
1465
758
|
}
|
|
1466
|
-
}
|
|
1467
|
-
|
|
1468
|
-
dbLogger4.warn("Set DATABASE_URL environment variable to enable database");
|
|
1469
|
-
}
|
|
1470
|
-
return { write: getWriteInstance(), read: getReadInstance() };
|
|
759
|
+
})();
|
|
760
|
+
return await initPromise;
|
|
1471
761
|
}
|
|
1472
762
|
async function closeDatabase() {
|
|
763
|
+
if (isClosing) {
|
|
764
|
+
dbLogger4.debug("Database close already in progress");
|
|
765
|
+
return;
|
|
766
|
+
}
|
|
767
|
+
isClosing = true;
|
|
768
|
+
if (initPromise) {
|
|
769
|
+
dbLogger4.debug("Waiting for database initialization to complete before closing...");
|
|
770
|
+
try {
|
|
771
|
+
await initPromise;
|
|
772
|
+
} catch (_error) {
|
|
773
|
+
dbLogger4.debug("Initialization failed during close, proceeding with cleanup");
|
|
774
|
+
}
|
|
775
|
+
}
|
|
1473
776
|
const writeInst = getWriteInstance();
|
|
1474
777
|
const readInst = getReadInstance();
|
|
1475
778
|
if (!writeInst && !readInst) {
|
|
1476
779
|
dbLogger4.debug("No database connections to close");
|
|
780
|
+
isClosing = false;
|
|
1477
781
|
return;
|
|
1478
782
|
}
|
|
1479
|
-
stopHealthCheck();
|
|
1480
783
|
try {
|
|
784
|
+
stopHealthCheck();
|
|
1481
785
|
const closePromises = [];
|
|
1482
786
|
const writeC = getWriteClient();
|
|
1483
787
|
if (writeC) {
|
|
1484
|
-
|
|
1485
|
-
closePromises.push(
|
|
1486
|
-
writeC.end({ timeout: 5 }).then(() => dbLogger4.debug("Write connection closed")).catch((err) => dbLogger4.error("Error closing write connection", err))
|
|
1487
|
-
);
|
|
788
|
+
closePromises.push(closeDatabaseClient(writeC, "write"));
|
|
1488
789
|
}
|
|
1489
790
|
const readC = getReadClient();
|
|
1490
791
|
if (readC && readC !== writeC) {
|
|
1491
|
-
|
|
1492
|
-
closePromises.push(
|
|
1493
|
-
readC.end({ timeout: 5 }).then(() => dbLogger4.debug("Read connection closed")).catch((err) => dbLogger4.error("Error closing read connection", err))
|
|
1494
|
-
);
|
|
792
|
+
closePromises.push(closeDatabaseClient(readC, "read"));
|
|
1495
793
|
}
|
|
1496
|
-
await Promise.
|
|
794
|
+
await Promise.allSettled(closePromises);
|
|
1497
795
|
dbLogger4.info("All database connections closed");
|
|
1498
|
-
} catch (error) {
|
|
1499
|
-
dbLogger4.error("Error during database cleanup", error);
|
|
1500
|
-
throw error;
|
|
1501
796
|
} finally {
|
|
1502
797
|
setWriteInstance(void 0);
|
|
1503
798
|
setReadInstance(void 0);
|
|
1504
799
|
setWriteClient(void 0);
|
|
1505
800
|
setReadClient(void 0);
|
|
1506
801
|
setMonitoringConfig(void 0);
|
|
802
|
+
isClosing = false;
|
|
1507
803
|
}
|
|
1508
804
|
}
|
|
1509
805
|
function getDatabaseInfo() {
|
|
@@ -1515,59 +811,110 @@ function getDatabaseInfo() {
|
|
|
1515
811
|
isReplica: !!(readInst && readInst !== writeInst)
|
|
1516
812
|
};
|
|
1517
813
|
}
|
|
1518
|
-
|
|
1519
|
-
|
|
1520
|
-
|
|
1521
|
-
|
|
814
|
+
var INDEX_FILE_PATTERNS = [
|
|
815
|
+
"/index",
|
|
816
|
+
"/index.ts",
|
|
817
|
+
"/index.js",
|
|
818
|
+
"/index.mjs",
|
|
819
|
+
"\\index",
|
|
820
|
+
"\\index.ts",
|
|
821
|
+
"\\index.js",
|
|
822
|
+
"\\index.mjs"
|
|
823
|
+
];
|
|
824
|
+
var SUPPORTED_EXTENSIONS = [".ts", ".js", ".mjs"];
|
|
825
|
+
function isIndexFile(filePath) {
|
|
826
|
+
return INDEX_FILE_PATTERNS.some((pattern) => filePath.endsWith(pattern));
|
|
827
|
+
}
|
|
828
|
+
function isAbsolutePath(path) {
|
|
829
|
+
if (path.startsWith("/")) return true;
|
|
830
|
+
return !!path.match(/^[A-Za-z]:[\/\\]/);
|
|
831
|
+
}
|
|
832
|
+
function hasSupportedExtension(filePath) {
|
|
833
|
+
if (filePath.endsWith(".d.ts")) return false;
|
|
834
|
+
return SUPPORTED_EXTENSIONS.some((ext) => filePath.endsWith(ext));
|
|
835
|
+
}
|
|
836
|
+
function filterIndexFiles(files) {
|
|
837
|
+
return files.filter((file) => !isIndexFile(file));
|
|
838
|
+
}
|
|
839
|
+
function scanDirectoryRecursive(dir, extension) {
|
|
1522
840
|
const files = [];
|
|
1523
|
-
if (
|
|
1524
|
-
|
|
1525
|
-
const
|
|
1526
|
-
const
|
|
1527
|
-
|
|
841
|
+
if (!existsSync(dir)) return files;
|
|
842
|
+
try {
|
|
843
|
+
const entries = readdirSync(dir);
|
|
844
|
+
for (const entry of entries) {
|
|
845
|
+
const fullPath = join(dir, entry);
|
|
1528
846
|
try {
|
|
1529
|
-
const
|
|
1530
|
-
|
|
1531
|
-
|
|
1532
|
-
|
|
1533
|
-
|
|
1534
|
-
|
|
1535
|
-
scanRecursive(fullPath);
|
|
1536
|
-
} else if (stat.isFile()) {
|
|
1537
|
-
if (!extension || fullPath.endsWith(extension)) {
|
|
1538
|
-
files.push(fullPath);
|
|
1539
|
-
}
|
|
1540
|
-
}
|
|
1541
|
-
} catch {
|
|
847
|
+
const stat = statSync(fullPath);
|
|
848
|
+
if (stat.isDirectory()) {
|
|
849
|
+
files.push(...scanDirectoryRecursive(fullPath, extension));
|
|
850
|
+
} else if (stat.isFile()) {
|
|
851
|
+
if ((!extension || fullPath.endsWith(extension)) && hasSupportedExtension(fullPath)) {
|
|
852
|
+
files.push(fullPath);
|
|
1542
853
|
}
|
|
1543
854
|
}
|
|
1544
|
-
} catch {
|
|
855
|
+
} catch (error) {
|
|
1545
856
|
}
|
|
1546
|
-
}
|
|
1547
|
-
|
|
1548
|
-
}
|
|
1549
|
-
|
|
1550
|
-
|
|
1551
|
-
|
|
1552
|
-
|
|
1553
|
-
|
|
1554
|
-
|
|
1555
|
-
|
|
1556
|
-
|
|
1557
|
-
|
|
1558
|
-
|
|
1559
|
-
|
|
1560
|
-
|
|
1561
|
-
|
|
857
|
+
}
|
|
858
|
+
} catch (error) {
|
|
859
|
+
}
|
|
860
|
+
return files;
|
|
861
|
+
}
|
|
862
|
+
function scanDirectorySingleLevel(dir, filePattern) {
|
|
863
|
+
const files = [];
|
|
864
|
+
if (!existsSync(dir)) return files;
|
|
865
|
+
try {
|
|
866
|
+
const entries = readdirSync(dir);
|
|
867
|
+
for (const entry of entries) {
|
|
868
|
+
const fullPath = join(dir, entry);
|
|
869
|
+
try {
|
|
870
|
+
const stat = statSync(fullPath);
|
|
871
|
+
if (stat.isFile()) {
|
|
872
|
+
if ((filePattern === "*" || filePattern.startsWith("*.") && entry.endsWith(filePattern.slice(1))) && hasSupportedExtension(fullPath)) {
|
|
873
|
+
files.push(fullPath);
|
|
1562
874
|
}
|
|
1563
|
-
} catch {
|
|
1564
875
|
}
|
|
876
|
+
} catch (error) {
|
|
1565
877
|
}
|
|
1566
|
-
} catch {
|
|
1567
878
|
}
|
|
879
|
+
} catch (error) {
|
|
1568
880
|
}
|
|
1569
881
|
return files;
|
|
1570
882
|
}
|
|
883
|
+
function detectSchemasFromFiles(files) {
|
|
884
|
+
const schemas = /* @__PURE__ */ new Set(["public"]);
|
|
885
|
+
const pgSchemaPattern = /pgSchema\s*\(\s*['"]([^'"]+)['"]\s*\)/g;
|
|
886
|
+
const createSchemaPattern = /createSchema\s*\(\s*['"]([^'"]+)['"]\s*\)/g;
|
|
887
|
+
for (const filePath of files) {
|
|
888
|
+
try {
|
|
889
|
+
const content = readFileSync(filePath, "utf-8");
|
|
890
|
+
let match;
|
|
891
|
+
while ((match = pgSchemaPattern.exec(content)) !== null) {
|
|
892
|
+
schemas.add(match[1]);
|
|
893
|
+
}
|
|
894
|
+
while ((match = createSchemaPattern.exec(content)) !== null) {
|
|
895
|
+
const packageName = match[1];
|
|
896
|
+
const schemaName = packageName.replace(/@/g, "").replace(/\//g, "_").replace(/-/g, "_");
|
|
897
|
+
schemas.add(schemaName);
|
|
898
|
+
}
|
|
899
|
+
} catch {
|
|
900
|
+
}
|
|
901
|
+
}
|
|
902
|
+
return Array.from(schemas);
|
|
903
|
+
}
|
|
904
|
+
function expandGlobPattern(pattern) {
|
|
905
|
+
if (!pattern.includes("*")) {
|
|
906
|
+
return existsSync(pattern) ? [pattern] : [];
|
|
907
|
+
}
|
|
908
|
+
if (pattern.includes("**")) {
|
|
909
|
+
const [baseDir, ...rest] = pattern.split("**");
|
|
910
|
+
const extension = rest.join("").replace(/[\/\\]\*\./g, "").trim();
|
|
911
|
+
const dir2 = baseDir.trim() || ".";
|
|
912
|
+
return scanDirectoryRecursive(dir2, extension || void 0);
|
|
913
|
+
}
|
|
914
|
+
const dir = dirname(pattern);
|
|
915
|
+
const filePattern = basename(pattern);
|
|
916
|
+
return scanDirectorySingleLevel(dir, filePattern);
|
|
917
|
+
}
|
|
1571
918
|
function discoverPackageSchemas(cwd) {
|
|
1572
919
|
const schemas = [];
|
|
1573
920
|
const nodeModulesPath = join(cwd, "node_modules");
|
|
@@ -1596,9 +943,7 @@ function discoverPackageSchemas(cwd) {
|
|
|
1596
943
|
for (const schema of packageSchemas) {
|
|
1597
944
|
const absolutePath = join(pkgPath, schema);
|
|
1598
945
|
const expandedFiles = expandGlobPattern(absolutePath);
|
|
1599
|
-
const schemaFiles = expandedFiles
|
|
1600
|
-
(file) => !file.endsWith("/index.js") && !file.endsWith("/index.ts") && !file.endsWith("/index.mjs") && !file.endsWith("\\index.js") && !file.endsWith("\\index.ts") && !file.endsWith("\\index.mjs")
|
|
1601
|
-
);
|
|
946
|
+
const schemaFiles = filterIndexFiles(expandedFiles);
|
|
1602
947
|
schemas.push(...schemaFiles);
|
|
1603
948
|
}
|
|
1604
949
|
}
|
|
@@ -1637,7 +982,7 @@ function detectDialect(url) {
|
|
|
1637
982
|
);
|
|
1638
983
|
}
|
|
1639
984
|
function getDrizzleConfig(options = {}) {
|
|
1640
|
-
const databaseUrl = options.databaseUrl ??
|
|
985
|
+
const databaseUrl = options.databaseUrl ?? env.DATABASE_URL;
|
|
1641
986
|
if (!databaseUrl) {
|
|
1642
987
|
throw new Error(
|
|
1643
988
|
"DATABASE_URL is required. Set it in .env or pass it to getDrizzleConfig()"
|
|
@@ -1666,13 +1011,33 @@ function getDrizzleConfig(options = {}) {
|
|
|
1666
1011
|
const userSchema = options.schema ?? "./src/server/entities/**/*.ts";
|
|
1667
1012
|
const userSchemas = Array.isArray(userSchema) ? userSchema : [userSchema];
|
|
1668
1013
|
const packageSchemas = options.disablePackageDiscovery ? [] : discoverPackageSchemas(options.cwd ?? process.cwd());
|
|
1669
|
-
|
|
1014
|
+
let allSchemas = [...userSchemas, ...packageSchemas];
|
|
1015
|
+
const cwd = options.cwd ?? process.cwd();
|
|
1016
|
+
let expandedFiles = [];
|
|
1017
|
+
if (options.expandGlobs) {
|
|
1018
|
+
for (const schema2 of allSchemas) {
|
|
1019
|
+
const absoluteSchema = isAbsolutePath(schema2) ? schema2 : join(cwd, schema2);
|
|
1020
|
+
const expanded = expandGlobPattern(absoluteSchema);
|
|
1021
|
+
const filtered = filterIndexFiles(expanded);
|
|
1022
|
+
expandedFiles.push(...filtered);
|
|
1023
|
+
}
|
|
1024
|
+
allSchemas = expandedFiles;
|
|
1025
|
+
}
|
|
1670
1026
|
const schema = allSchemas.length === 1 ? allSchemas[0] : allSchemas;
|
|
1027
|
+
let schemaFilter;
|
|
1028
|
+
if (dialect === "postgresql") {
|
|
1029
|
+
if (options.schemaFilter) {
|
|
1030
|
+
schemaFilter = options.schemaFilter;
|
|
1031
|
+
} else if (options.autoDetectSchemas && expandedFiles.length > 0) {
|
|
1032
|
+
schemaFilter = detectSchemasFromFiles(expandedFiles);
|
|
1033
|
+
}
|
|
1034
|
+
}
|
|
1671
1035
|
return {
|
|
1672
1036
|
schema,
|
|
1673
1037
|
out,
|
|
1674
1038
|
dialect,
|
|
1675
|
-
dbCredentials: getDbCredentials(dialect, databaseUrl)
|
|
1039
|
+
dbCredentials: getDbCredentials(dialect, databaseUrl),
|
|
1040
|
+
schemaFilter
|
|
1676
1041
|
};
|
|
1677
1042
|
}
|
|
1678
1043
|
function getDbCredentials(dialect, url) {
|
|
@@ -1689,30 +1054,35 @@ function getDbCredentials(dialect, url) {
|
|
|
1689
1054
|
}
|
|
1690
1055
|
function generateDrizzleConfigFile(options = {}) {
|
|
1691
1056
|
const config = getDrizzleConfig(options);
|
|
1057
|
+
const cwd = options.cwd ?? process.cwd();
|
|
1058
|
+
const normalizeSchemaPath = (schemaPath) => {
|
|
1059
|
+
if (isAbsolutePath(schemaPath)) {
|
|
1060
|
+
return schemaPath;
|
|
1061
|
+
}
|
|
1062
|
+
return join(cwd, schemaPath);
|
|
1063
|
+
};
|
|
1692
1064
|
const schemaValue = Array.isArray(config.schema) ? `[
|
|
1693
|
-
${config.schema.map((s) => `'${s}'`).join(",\n ")}
|
|
1694
|
-
]` : `'${config.schema}'`;
|
|
1065
|
+
${config.schema.map((s) => `'${normalizeSchemaPath(s)}'`).join(",\n ")}
|
|
1066
|
+
]` : `'${normalizeSchemaPath(config.schema)}'`;
|
|
1067
|
+
const schemaFilterLine = config.schemaFilter && config.schemaFilter.length > 0 ? `
|
|
1068
|
+
schemaFilter: ${JSON.stringify(config.schemaFilter)},` : "";
|
|
1695
1069
|
return `import { defineConfig } from 'drizzle-kit';
|
|
1696
1070
|
|
|
1697
1071
|
export default defineConfig({
|
|
1698
1072
|
schema: ${schemaValue},
|
|
1699
1073
|
out: '${config.out}',
|
|
1700
1074
|
dialect: '${config.dialect}',
|
|
1701
|
-
dbCredentials: ${JSON.stringify(config.dbCredentials, null, 4)}
|
|
1075
|
+
dbCredentials: ${JSON.stringify(config.dbCredentials, null, 4)},${schemaFilterLine}
|
|
1702
1076
|
});
|
|
1703
1077
|
`;
|
|
1704
1078
|
}
|
|
1705
1079
|
function id() {
|
|
1706
1080
|
return bigserial("id", { mode: "number" }).primaryKey();
|
|
1707
1081
|
}
|
|
1708
|
-
function timestamps(
|
|
1709
|
-
const updatedAtColumn = timestamp("updated_at", { withTimezone: true, mode: "date" }).defaultNow().notNull();
|
|
1710
|
-
if (options?.autoUpdate) {
|
|
1711
|
-
updatedAtColumn.__autoUpdate = true;
|
|
1712
|
-
}
|
|
1082
|
+
function timestamps() {
|
|
1713
1083
|
return {
|
|
1714
1084
|
createdAt: timestamp("created_at", { withTimezone: true, mode: "date" }).defaultNow().notNull(),
|
|
1715
|
-
updatedAt:
|
|
1085
|
+
updatedAt: timestamp("updated_at", { withTimezone: true, mode: "date" }).defaultNow().notNull()
|
|
1716
1086
|
};
|
|
1717
1087
|
}
|
|
1718
1088
|
function foreignKey(name, reference, options) {
|
|
@@ -1721,12 +1091,51 @@ function foreignKey(name, reference, options) {
|
|
|
1721
1091
|
function optionalForeignKey(name, reference, options) {
|
|
1722
1092
|
return bigserial(`${name}_id`, { mode: "number" }).references(reference, { onDelete: options?.onDelete ?? "set null" });
|
|
1723
1093
|
}
|
|
1724
|
-
function
|
|
1094
|
+
function uuid() {
|
|
1095
|
+
return uuid$1("id").defaultRandom().primaryKey();
|
|
1096
|
+
}
|
|
1097
|
+
function auditFields() {
|
|
1098
|
+
return {
|
|
1099
|
+
createdBy: text("created_by"),
|
|
1100
|
+
updatedBy: text("updated_by")
|
|
1101
|
+
};
|
|
1102
|
+
}
|
|
1103
|
+
function publishingFields() {
|
|
1104
|
+
return {
|
|
1105
|
+
publishedAt: timestamp("published_at", { withTimezone: true, mode: "date" }),
|
|
1106
|
+
publishedBy: text("published_by")
|
|
1107
|
+
};
|
|
1108
|
+
}
|
|
1109
|
+
function verificationTimestamp(fieldName) {
|
|
1110
|
+
const columnName = fieldName.replace(/([A-Z])/g, "_$1").toLowerCase().replace(/^_/, "") + "_at";
|
|
1111
|
+
return {
|
|
1112
|
+
[fieldName + "At"]: timestamp(columnName, { withTimezone: true, mode: "date" })
|
|
1113
|
+
};
|
|
1114
|
+
}
|
|
1115
|
+
function softDelete() {
|
|
1116
|
+
return {
|
|
1117
|
+
deletedAt: timestamp("deleted_at", { withTimezone: true, mode: "date" }),
|
|
1118
|
+
deletedBy: text("deleted_by")
|
|
1119
|
+
};
|
|
1120
|
+
}
|
|
1121
|
+
function utcTimestamp(fieldName, mode = "date") {
|
|
1122
|
+
return timestamp(fieldName, {
|
|
1123
|
+
withTimezone: true,
|
|
1124
|
+
mode
|
|
1125
|
+
});
|
|
1126
|
+
}
|
|
1127
|
+
function enumText(fieldName, values) {
|
|
1128
|
+
return text(fieldName, { enum: values });
|
|
1129
|
+
}
|
|
1130
|
+
function typedJsonb(fieldName) {
|
|
1131
|
+
return jsonb(fieldName).$type();
|
|
1132
|
+
}
|
|
1133
|
+
function createSchema(packageName) {
|
|
1725
1134
|
const schemaName = packageNameToSchema(packageName);
|
|
1726
1135
|
return pgSchema(schemaName);
|
|
1727
1136
|
}
|
|
1728
1137
|
function packageNameToSchema(packageName) {
|
|
1729
|
-
return packageName.replace(
|
|
1138
|
+
return packageName.replace(/@/g, "").replace(/\//g, "_").replace(/-/g, "_");
|
|
1730
1139
|
}
|
|
1731
1140
|
function getSchemaInfo(packageName) {
|
|
1732
1141
|
const isScoped = packageName.startsWith("@");
|
|
@@ -1738,7 +1147,7 @@ function getSchemaInfo(packageName) {
|
|
|
1738
1147
|
scope
|
|
1739
1148
|
};
|
|
1740
1149
|
}
|
|
1741
|
-
var txLogger = logger.child("transaction");
|
|
1150
|
+
var txLogger = logger.child("@spfn/core:transaction");
|
|
1742
1151
|
var asyncContext = new AsyncLocalStorage();
|
|
1743
1152
|
function getTransactionContext() {
|
|
1744
1153
|
return asyncContext.getStore() ?? null;
|
|
@@ -1761,103 +1170,187 @@ function runWithTransaction(tx, txId, callback) {
|
|
|
1761
1170
|
}
|
|
1762
1171
|
return asyncContext.run({ tx, txId, level: newLevel }, callback);
|
|
1763
1172
|
}
|
|
1764
|
-
|
|
1765
|
-
|
|
1173
|
+
var MAX_TIMEOUT_MS = 2147483647;
|
|
1174
|
+
var txLogger2 = logger.child("@spfn/core:transaction");
|
|
1175
|
+
async function runInTransaction(callback, options = {}) {
|
|
1176
|
+
const defaultTimeout = env.TRANSACTION_TIMEOUT;
|
|
1766
1177
|
const {
|
|
1767
1178
|
slowThreshold = 1e3,
|
|
1768
1179
|
enableLogging = true,
|
|
1769
|
-
|
|
1180
|
+
context = "transaction"
|
|
1770
1181
|
} = options;
|
|
1771
|
-
const
|
|
1772
|
-
|
|
1773
|
-
|
|
1774
|
-
|
|
1775
|
-
|
|
1182
|
+
const timeout = options.timeout ?? defaultTimeout;
|
|
1183
|
+
const txId = `tx_${randomUUID()}`;
|
|
1184
|
+
const validateAndThrow = (condition, message, logMessage, metadata) => {
|
|
1185
|
+
if (condition) {
|
|
1186
|
+
const error = new TransactionError({ message, statusCode: 400, details: metadata });
|
|
1187
|
+
if (enableLogging) {
|
|
1188
|
+
txLogger2.error(logMessage, { ...metadata, error: error.message });
|
|
1189
|
+
}
|
|
1190
|
+
throw error;
|
|
1191
|
+
}
|
|
1192
|
+
};
|
|
1193
|
+
validateAndThrow(
|
|
1194
|
+
typeof callback !== "function",
|
|
1195
|
+
"Callback must be a function",
|
|
1196
|
+
"Invalid callback type",
|
|
1197
|
+
{ txId, context, callbackType: typeof callback }
|
|
1198
|
+
);
|
|
1199
|
+
validateAndThrow(
|
|
1200
|
+
!Number.isInteger(slowThreshold) || slowThreshold < 0,
|
|
1201
|
+
`Invalid slowThreshold value: ${slowThreshold}. Must be a non-negative integer.`,
|
|
1202
|
+
"Invalid slowThreshold",
|
|
1203
|
+
{ txId, context, slowThreshold }
|
|
1204
|
+
);
|
|
1205
|
+
validateAndThrow(
|
|
1206
|
+
!Number.isInteger(timeout),
|
|
1207
|
+
`Invalid timeout value: ${timeout}. Must be an integer.`,
|
|
1208
|
+
"Invalid timeout type",
|
|
1209
|
+
{ txId, context, timeout }
|
|
1210
|
+
);
|
|
1211
|
+
validateAndThrow(
|
|
1212
|
+
timeout < 0,
|
|
1213
|
+
`Invalid timeout value: ${timeout}. Timeout must be non-negative (0 to disable, or 1-${MAX_TIMEOUT_MS}ms).`,
|
|
1214
|
+
"Invalid timeout range",
|
|
1215
|
+
{ txId, context, timeout }
|
|
1216
|
+
);
|
|
1217
|
+
validateAndThrow(
|
|
1218
|
+
timeout > MAX_TIMEOUT_MS,
|
|
1219
|
+
`Invalid timeout value: ${timeout}. Maximum timeout is ${MAX_TIMEOUT_MS}ms.`,
|
|
1220
|
+
"Timeout exceeds maximum",
|
|
1221
|
+
{ txId, context, timeout, maxTimeout: MAX_TIMEOUT_MS }
|
|
1222
|
+
);
|
|
1223
|
+
const writeDb = getDatabase("write");
|
|
1224
|
+
if (!writeDb) {
|
|
1225
|
+
const error = new TransactionError({
|
|
1226
|
+
message: "Database not initialized. Cannot start transaction.",
|
|
1227
|
+
statusCode: 500,
|
|
1228
|
+
details: { txId, context }
|
|
1229
|
+
});
|
|
1776
1230
|
if (enableLogging) {
|
|
1777
|
-
txLogger2.
|
|
1231
|
+
txLogger2.error("Database not initialized", {
|
|
1232
|
+
txId,
|
|
1233
|
+
context,
|
|
1234
|
+
error: error.message
|
|
1235
|
+
});
|
|
1778
1236
|
}
|
|
1779
|
-
|
|
1780
|
-
|
|
1781
|
-
|
|
1782
|
-
|
|
1783
|
-
|
|
1784
|
-
|
|
1785
|
-
|
|
1786
|
-
|
|
1237
|
+
throw error;
|
|
1238
|
+
}
|
|
1239
|
+
const existingContext = getTransactionContext();
|
|
1240
|
+
const isNested = existingContext !== null;
|
|
1241
|
+
if (isNested && timeout > 0 && enableLogging) {
|
|
1242
|
+
txLogger2.warn("Timeout ignored in nested transaction", {
|
|
1243
|
+
txId,
|
|
1244
|
+
context,
|
|
1245
|
+
outerTxId: existingContext.txId,
|
|
1246
|
+
requestedTimeout: `${timeout}ms`,
|
|
1247
|
+
reason: "SET LOCAL statement_timeout affects the entire outer transaction"
|
|
1248
|
+
});
|
|
1249
|
+
}
|
|
1250
|
+
if (enableLogging) {
|
|
1251
|
+
txLogger2.debug("Transaction started", { txId, context });
|
|
1252
|
+
}
|
|
1253
|
+
const startTime = Date.now();
|
|
1254
|
+
try {
|
|
1255
|
+
const result = await writeDb.transaction(async (tx) => {
|
|
1256
|
+
if (timeout > 0 && !isNested) {
|
|
1257
|
+
await tx.execute(sql.raw(`SET LOCAL statement_timeout = ${timeout}`));
|
|
1787
1258
|
}
|
|
1788
|
-
|
|
1789
|
-
await
|
|
1790
|
-
await next();
|
|
1791
|
-
const contextWithError = c;
|
|
1792
|
-
if (contextWithError.error) {
|
|
1793
|
-
throw contextWithError.error;
|
|
1794
|
-
}
|
|
1795
|
-
});
|
|
1259
|
+
return await runWithTransaction(tx, txId, async () => {
|
|
1260
|
+
return await callback(tx);
|
|
1796
1261
|
});
|
|
1797
|
-
|
|
1798
|
-
|
|
1799
|
-
|
|
1800
|
-
|
|
1801
|
-
|
|
1802
|
-
|
|
1803
|
-
|
|
1804
|
-
|
|
1805
|
-
|
|
1806
|
-
route,
|
|
1807
|
-
timeout: `${timeout}ms`
|
|
1808
|
-
}
|
|
1809
|
-
)
|
|
1810
|
-
);
|
|
1811
|
-
}, timeout);
|
|
1262
|
+
});
|
|
1263
|
+
const duration = Date.now() - startTime;
|
|
1264
|
+
if (enableLogging) {
|
|
1265
|
+
if (duration >= slowThreshold) {
|
|
1266
|
+
txLogger2.warn("Slow transaction committed", {
|
|
1267
|
+
txId,
|
|
1268
|
+
context,
|
|
1269
|
+
duration: `${duration}ms`,
|
|
1270
|
+
threshold: `${slowThreshold}ms`
|
|
1812
1271
|
});
|
|
1813
|
-
await Promise.race([transactionPromise, timeoutPromise]);
|
|
1814
1272
|
} else {
|
|
1815
|
-
|
|
1816
|
-
|
|
1817
|
-
|
|
1818
|
-
|
|
1819
|
-
|
|
1820
|
-
txLogger2.warn("Slow transaction committed", {
|
|
1821
|
-
txId,
|
|
1822
|
-
route,
|
|
1823
|
-
duration: `${duration}ms`,
|
|
1824
|
-
threshold: `${slowThreshold}ms`
|
|
1825
|
-
});
|
|
1826
|
-
} else {
|
|
1827
|
-
txLogger2.debug("Transaction committed", {
|
|
1828
|
-
txId,
|
|
1829
|
-
route,
|
|
1830
|
-
duration: `${duration}ms`
|
|
1831
|
-
});
|
|
1832
|
-
}
|
|
1273
|
+
txLogger2.debug("Transaction committed", {
|
|
1274
|
+
txId,
|
|
1275
|
+
context,
|
|
1276
|
+
duration: `${duration}ms`
|
|
1277
|
+
});
|
|
1833
1278
|
}
|
|
1834
|
-
}
|
|
1835
|
-
|
|
1836
|
-
|
|
1837
|
-
|
|
1279
|
+
}
|
|
1280
|
+
return result;
|
|
1281
|
+
} catch (error) {
|
|
1282
|
+
const duration = Date.now() - startTime;
|
|
1283
|
+
if (enableLogging) {
|
|
1284
|
+
if (duration >= slowThreshold) {
|
|
1285
|
+
txLogger2.warn("Slow transaction rolled back", {
|
|
1286
|
+
txId,
|
|
1287
|
+
context,
|
|
1288
|
+
duration: `${duration}ms`,
|
|
1289
|
+
threshold: `${slowThreshold}ms`,
|
|
1290
|
+
error: error instanceof Error ? error.message : String(error),
|
|
1291
|
+
errorType: error instanceof Error ? error.name : "Unknown"
|
|
1292
|
+
});
|
|
1293
|
+
} else {
|
|
1838
1294
|
txLogger2.error("Transaction rolled back", {
|
|
1839
1295
|
txId,
|
|
1840
|
-
|
|
1296
|
+
context,
|
|
1841
1297
|
duration: `${duration}ms`,
|
|
1842
|
-
error:
|
|
1843
|
-
errorType:
|
|
1298
|
+
error: error instanceof Error ? error.message : String(error),
|
|
1299
|
+
errorType: error instanceof Error ? error.name : "Unknown"
|
|
1844
1300
|
});
|
|
1845
1301
|
}
|
|
1846
|
-
|
|
1302
|
+
}
|
|
1303
|
+
throw error;
|
|
1304
|
+
}
|
|
1305
|
+
}
|
|
1306
|
+
|
|
1307
|
+
// src/db/transaction/middleware.ts
|
|
1308
|
+
function Transactional(options = {}) {
|
|
1309
|
+
return createMiddleware(async (c, next) => {
|
|
1310
|
+
const route = `${c.req.method} ${c.req.path}`;
|
|
1311
|
+
try {
|
|
1312
|
+
await runInTransaction(
|
|
1313
|
+
async () => {
|
|
1314
|
+
await next();
|
|
1315
|
+
const contextWithError = c;
|
|
1316
|
+
if (contextWithError.error) {
|
|
1317
|
+
throw contextWithError.error;
|
|
1318
|
+
}
|
|
1319
|
+
},
|
|
1320
|
+
{
|
|
1321
|
+
context: route,
|
|
1322
|
+
...options
|
|
1323
|
+
}
|
|
1324
|
+
);
|
|
1325
|
+
} catch (error) {
|
|
1326
|
+
if (error instanceof DatabaseError) {
|
|
1327
|
+
throw error;
|
|
1328
|
+
}
|
|
1329
|
+
if (error instanceof TransactionError) {
|
|
1330
|
+
throw error;
|
|
1331
|
+
}
|
|
1332
|
+
if (error && typeof error === "object" && "code" in error && typeof error.code === "string") {
|
|
1333
|
+
throw fromPostgresError(error);
|
|
1334
|
+
}
|
|
1335
|
+
throw error;
|
|
1847
1336
|
}
|
|
1848
1337
|
});
|
|
1849
1338
|
}
|
|
1850
1339
|
function isSQLWrapper(value) {
|
|
1851
|
-
return value && typeof value === "object" && "queryChunks" in value;
|
|
1340
|
+
return value !== null && typeof value === "object" && "queryChunks" in value;
|
|
1852
1341
|
}
|
|
1853
1342
|
function buildWhereFromObject(table, where) {
|
|
1854
1343
|
const entries = Object.entries(where).filter(([_, value]) => value !== void 0);
|
|
1855
|
-
if (entries.length === 0)
|
|
1344
|
+
if (entries.length === 0) {
|
|
1345
|
+
return void 0;
|
|
1346
|
+
}
|
|
1856
1347
|
const conditions = entries.map(
|
|
1857
1348
|
([key, value]) => eq(table[key], value)
|
|
1858
1349
|
);
|
|
1859
1350
|
return conditions.length === 1 ? conditions[0] : and(...conditions);
|
|
1860
1351
|
}
|
|
1352
|
+
|
|
1353
|
+
// src/db/helpers.ts
|
|
1861
1354
|
async function findOne(table, where) {
|
|
1862
1355
|
const db = getDatabase("read");
|
|
1863
1356
|
if (!db) {
|
|
@@ -1974,17 +1467,361 @@ async function count(table, where) {
|
|
|
1974
1467
|
if (!db) {
|
|
1975
1468
|
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
1976
1469
|
}
|
|
1977
|
-
let query = db.select().from(table);
|
|
1470
|
+
let query = db.select({ count: count$1() }).from(table);
|
|
1978
1471
|
if (where) {
|
|
1979
1472
|
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
1980
1473
|
if (whereClause) {
|
|
1981
1474
|
query = query.where(whereClause);
|
|
1982
1475
|
}
|
|
1983
1476
|
}
|
|
1984
|
-
const
|
|
1985
|
-
return
|
|
1477
|
+
const [result] = await query;
|
|
1478
|
+
return Number(result?.count ?? 0);
|
|
1986
1479
|
}
|
|
1480
|
+
var RepositoryError = class extends Error {
|
|
1481
|
+
constructor(message, repository, method, table, originalError) {
|
|
1482
|
+
super(message);
|
|
1483
|
+
this.repository = repository;
|
|
1484
|
+
this.method = method;
|
|
1485
|
+
this.table = table;
|
|
1486
|
+
this.originalError = originalError;
|
|
1487
|
+
this.name = "RepositoryError";
|
|
1488
|
+
if (originalError?.stack) {
|
|
1489
|
+
this.stack = originalError.stack;
|
|
1490
|
+
}
|
|
1491
|
+
}
|
|
1492
|
+
};
|
|
1493
|
+
var BaseRepository = class {
|
|
1494
|
+
/**
|
|
1495
|
+
* Write database instance
|
|
1496
|
+
*
|
|
1497
|
+
* Automatically resolves to:
|
|
1498
|
+
* - Transaction DB if running within transaction context
|
|
1499
|
+
* - Global write (primary) instance otherwise
|
|
1500
|
+
*
|
|
1501
|
+
* Use this for INSERT, UPDATE, DELETE operations.
|
|
1502
|
+
*
|
|
1503
|
+
* @example
|
|
1504
|
+
* ```typescript
|
|
1505
|
+
* async create(data: NewUser) {
|
|
1506
|
+
* return await this.db.insert(users).values(data).returning();
|
|
1507
|
+
* }
|
|
1508
|
+
* ```
|
|
1509
|
+
*/
|
|
1510
|
+
get db() {
|
|
1511
|
+
const txDb = getTransaction();
|
|
1512
|
+
if (txDb) {
|
|
1513
|
+
return txDb;
|
|
1514
|
+
}
|
|
1515
|
+
return getDatabase("write");
|
|
1516
|
+
}
|
|
1517
|
+
/**
|
|
1518
|
+
* Read database instance
|
|
1519
|
+
*
|
|
1520
|
+
* Automatically resolves to:
|
|
1521
|
+
* - Transaction DB if running within transaction context
|
|
1522
|
+
* - Global read (replica) instance otherwise
|
|
1523
|
+
*
|
|
1524
|
+
* Use this for SELECT operations to leverage read replicas
|
|
1525
|
+
* and reduce load on the primary database.
|
|
1526
|
+
*
|
|
1527
|
+
* @example
|
|
1528
|
+
* ```typescript
|
|
1529
|
+
* async findById(id: string) {
|
|
1530
|
+
* return await this.readDb
|
|
1531
|
+
* .select()
|
|
1532
|
+
* .from(users)
|
|
1533
|
+
* .where(eq(users.id, id));
|
|
1534
|
+
* }
|
|
1535
|
+
* ```
|
|
1536
|
+
*/
|
|
1537
|
+
get readDb() {
|
|
1538
|
+
const txDb = getTransaction();
|
|
1539
|
+
if (txDb) {
|
|
1540
|
+
return txDb;
|
|
1541
|
+
}
|
|
1542
|
+
return getDatabase("read");
|
|
1543
|
+
}
|
|
1544
|
+
/**
|
|
1545
|
+
* Wrap query execution with repository context
|
|
1546
|
+
*
|
|
1547
|
+
* Enhances error messages with repository information to make debugging easier.
|
|
1548
|
+
* When an error occurs, it will include:
|
|
1549
|
+
* - Repository class name
|
|
1550
|
+
* - Method name
|
|
1551
|
+
* - Table name (if provided)
|
|
1552
|
+
* - Original error details
|
|
1553
|
+
*
|
|
1554
|
+
* @param queryFn - Query function to execute
|
|
1555
|
+
* @param context - Context information (operation name, table name, etc.)
|
|
1556
|
+
* @returns Query result
|
|
1557
|
+
* @throws RepositoryError with enhanced context
|
|
1558
|
+
*
|
|
1559
|
+
* @example
|
|
1560
|
+
* ```typescript
|
|
1561
|
+
* async findById(id: number) {
|
|
1562
|
+
* return await this.withContext(
|
|
1563
|
+
* () => this.readDb.select().from(users).where(eq(users.id, id)),
|
|
1564
|
+
* { method: 'findById', table: 'users' }
|
|
1565
|
+
* );
|
|
1566
|
+
* }
|
|
1567
|
+
* ```
|
|
1568
|
+
*/
|
|
1569
|
+
async withContext(queryFn, context = {}) {
|
|
1570
|
+
try {
|
|
1571
|
+
return await queryFn();
|
|
1572
|
+
} catch (error) {
|
|
1573
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
1574
|
+
const repositoryName = this.constructor.name;
|
|
1575
|
+
throw new RepositoryError(
|
|
1576
|
+
err.message,
|
|
1577
|
+
repositoryName,
|
|
1578
|
+
context.method,
|
|
1579
|
+
context.table,
|
|
1580
|
+
err
|
|
1581
|
+
);
|
|
1582
|
+
}
|
|
1583
|
+
}
|
|
1584
|
+
// ============================================================================
|
|
1585
|
+
// CRUD Methods
|
|
1586
|
+
// ============================================================================
|
|
1587
|
+
/**
|
|
1588
|
+
* Find a single record
|
|
1589
|
+
*
|
|
1590
|
+
* @param table - Drizzle table schema
|
|
1591
|
+
* @param where - Object or SQL condition
|
|
1592
|
+
* @returns Single record or null
|
|
1593
|
+
*
|
|
1594
|
+
* @example
|
|
1595
|
+
* ```typescript
|
|
1596
|
+
* // Object-based
|
|
1597
|
+
* const user = await this._findOne(users, { id: 1 });
|
|
1598
|
+
*
|
|
1599
|
+
* // SQL-based
|
|
1600
|
+
* const user = await this._findOne(users, eq(users.id, 1));
|
|
1601
|
+
* ```
|
|
1602
|
+
*/
|
|
1603
|
+
async _findOne(table, where) {
|
|
1604
|
+
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
1605
|
+
if (!whereClause) {
|
|
1606
|
+
throw new Error("findOne requires at least one where condition");
|
|
1607
|
+
}
|
|
1608
|
+
const results = await this.readDb.select().from(table).where(whereClause).limit(1);
|
|
1609
|
+
return results[0] ?? null;
|
|
1610
|
+
}
|
|
1611
|
+
/**
|
|
1612
|
+
* Find multiple records
|
|
1613
|
+
*
|
|
1614
|
+
* @param table - Drizzle table schema
|
|
1615
|
+
* @param options - Query options
|
|
1616
|
+
* @returns Array of records
|
|
1617
|
+
*
|
|
1618
|
+
* @example
|
|
1619
|
+
* ```typescript
|
|
1620
|
+
* const users = await this._findMany(users, {
|
|
1621
|
+
* where: { active: true },
|
|
1622
|
+
* orderBy: desc(users.createdAt),
|
|
1623
|
+
* limit: 10
|
|
1624
|
+
* });
|
|
1625
|
+
* ```
|
|
1626
|
+
*/
|
|
1627
|
+
async _findMany(table, options) {
|
|
1628
|
+
let query = this.readDb.select().from(table);
|
|
1629
|
+
if (options?.where) {
|
|
1630
|
+
const whereClause = isSQLWrapper(options.where) ? options.where : options.where ? buildWhereFromObject(table, options.where) : void 0;
|
|
1631
|
+
if (whereClause) {
|
|
1632
|
+
query = query.where(whereClause);
|
|
1633
|
+
}
|
|
1634
|
+
}
|
|
1635
|
+
if (options?.orderBy) {
|
|
1636
|
+
const orderByArray = Array.isArray(options.orderBy) ? options.orderBy : [options.orderBy];
|
|
1637
|
+
query = query.orderBy(...orderByArray);
|
|
1638
|
+
}
|
|
1639
|
+
if (options?.limit) {
|
|
1640
|
+
query = query.limit(options.limit);
|
|
1641
|
+
}
|
|
1642
|
+
if (options?.offset) {
|
|
1643
|
+
query = query.offset(options.offset);
|
|
1644
|
+
}
|
|
1645
|
+
return query;
|
|
1646
|
+
}
|
|
1647
|
+
/**
|
|
1648
|
+
* Create a new record
|
|
1649
|
+
*
|
|
1650
|
+
* @param table - Drizzle table schema
|
|
1651
|
+
* @param data - Insert data
|
|
1652
|
+
* @returns Created record
|
|
1653
|
+
*
|
|
1654
|
+
* @example
|
|
1655
|
+
* ```typescript
|
|
1656
|
+
* const user = await this._create(users, {
|
|
1657
|
+
* email: 'test@example.com',
|
|
1658
|
+
* name: 'Test User'
|
|
1659
|
+
* });
|
|
1660
|
+
* ```
|
|
1661
|
+
*/
|
|
1662
|
+
async _create(table, data) {
|
|
1663
|
+
const [result] = await this.db.insert(table).values(data).returning();
|
|
1664
|
+
return result;
|
|
1665
|
+
}
|
|
1666
|
+
/**
|
|
1667
|
+
* Create multiple records
|
|
1668
|
+
*
|
|
1669
|
+
* @param table - Drizzle table schema
|
|
1670
|
+
* @param data - Array of insert data
|
|
1671
|
+
* @returns Array of created records
|
|
1672
|
+
*
|
|
1673
|
+
* @example
|
|
1674
|
+
* ```typescript
|
|
1675
|
+
* const users = await this._createMany(users, [
|
|
1676
|
+
* { email: 'user1@example.com', name: 'User 1' },
|
|
1677
|
+
* { email: 'user2@example.com', name: 'User 2' }
|
|
1678
|
+
* ]);
|
|
1679
|
+
* ```
|
|
1680
|
+
*/
|
|
1681
|
+
async _createMany(table, data) {
|
|
1682
|
+
const results = await this.db.insert(table).values(data).returning();
|
|
1683
|
+
return results;
|
|
1684
|
+
}
|
|
1685
|
+
/**
|
|
1686
|
+
* Upsert a record (INSERT or UPDATE on conflict)
|
|
1687
|
+
*
|
|
1688
|
+
* @param table - Drizzle table schema
|
|
1689
|
+
* @param data - Insert data
|
|
1690
|
+
* @param options - Conflict resolution options
|
|
1691
|
+
* @returns Upserted record
|
|
1692
|
+
*
|
|
1693
|
+
* @example
|
|
1694
|
+
* ```typescript
|
|
1695
|
+
* const cache = await this._upsert(cache, {
|
|
1696
|
+
* key: 'config',
|
|
1697
|
+
* value: {...}
|
|
1698
|
+
* }, {
|
|
1699
|
+
* target: [cache.key],
|
|
1700
|
+
* set: { value: data.value, updatedAt: new Date() }
|
|
1701
|
+
* });
|
|
1702
|
+
* ```
|
|
1703
|
+
*/
|
|
1704
|
+
async _upsert(table, data, options) {
|
|
1705
|
+
const [result] = await this.db.insert(table).values(data).onConflictDoUpdate({
|
|
1706
|
+
target: options.target,
|
|
1707
|
+
set: options.set || data
|
|
1708
|
+
}).returning();
|
|
1709
|
+
return result;
|
|
1710
|
+
}
|
|
1711
|
+
/**
|
|
1712
|
+
* Update a single record
|
|
1713
|
+
*
|
|
1714
|
+
* @param table - Drizzle table schema
|
|
1715
|
+
* @param where - Object or SQL condition
|
|
1716
|
+
* @param data - Update data
|
|
1717
|
+
* @returns Updated record or null
|
|
1718
|
+
*
|
|
1719
|
+
* @example
|
|
1720
|
+
* ```typescript
|
|
1721
|
+
* const user = await this._updateOne(users,
|
|
1722
|
+
* { id: 1 },
|
|
1723
|
+
* { name: 'Updated Name' }
|
|
1724
|
+
* );
|
|
1725
|
+
* ```
|
|
1726
|
+
*/
|
|
1727
|
+
async _updateOne(table, where, data) {
|
|
1728
|
+
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
1729
|
+
if (!whereClause) {
|
|
1730
|
+
throw new Error("updateOne requires at least one where condition");
|
|
1731
|
+
}
|
|
1732
|
+
const [result] = await this.db.update(table).set(data).where(whereClause).returning();
|
|
1733
|
+
return result ?? null;
|
|
1734
|
+
}
|
|
1735
|
+
/**
|
|
1736
|
+
* Update multiple records
|
|
1737
|
+
*
|
|
1738
|
+
* @param table - Drizzle table schema
|
|
1739
|
+
* @param where - Object or SQL condition
|
|
1740
|
+
* @param data - Update data
|
|
1741
|
+
* @returns Array of updated records
|
|
1742
|
+
*
|
|
1743
|
+
* @example
|
|
1744
|
+
* ```typescript
|
|
1745
|
+
* const users = await this._updateMany(users,
|
|
1746
|
+
* { role: 'user' },
|
|
1747
|
+
* { verified: true }
|
|
1748
|
+
* );
|
|
1749
|
+
* ```
|
|
1750
|
+
*/
|
|
1751
|
+
async _updateMany(table, where, data) {
|
|
1752
|
+
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
1753
|
+
if (!whereClause) {
|
|
1754
|
+
throw new Error("updateMany requires at least one where condition");
|
|
1755
|
+
}
|
|
1756
|
+
const results = await this.db.update(table).set(data).where(whereClause).returning();
|
|
1757
|
+
return results;
|
|
1758
|
+
}
|
|
1759
|
+
/**
|
|
1760
|
+
* Delete a single record
|
|
1761
|
+
*
|
|
1762
|
+
* @param table - Drizzle table schema
|
|
1763
|
+
* @param where - Object or SQL condition
|
|
1764
|
+
* @returns Deleted record or null
|
|
1765
|
+
*
|
|
1766
|
+
* @example
|
|
1767
|
+
* ```typescript
|
|
1768
|
+
* const user = await this._deleteOne(users, { id: 1 });
|
|
1769
|
+
* ```
|
|
1770
|
+
*/
|
|
1771
|
+
async _deleteOne(table, where) {
|
|
1772
|
+
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
1773
|
+
if (!whereClause) {
|
|
1774
|
+
throw new Error("deleteOne requires at least one where condition");
|
|
1775
|
+
}
|
|
1776
|
+
const [result] = await this.db.delete(table).where(whereClause).returning();
|
|
1777
|
+
return result ?? null;
|
|
1778
|
+
}
|
|
1779
|
+
/**
|
|
1780
|
+
* Delete multiple records
|
|
1781
|
+
*
|
|
1782
|
+
* @param table - Drizzle table schema
|
|
1783
|
+
* @param where - Object or SQL condition
|
|
1784
|
+
* @returns Array of deleted records
|
|
1785
|
+
*
|
|
1786
|
+
* @example
|
|
1787
|
+
* ```typescript
|
|
1788
|
+
* const users = await this._deleteMany(users, { verified: false });
|
|
1789
|
+
* ```
|
|
1790
|
+
*/
|
|
1791
|
+
async _deleteMany(table, where) {
|
|
1792
|
+
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
1793
|
+
if (!whereClause) {
|
|
1794
|
+
throw new Error("deleteMany requires at least one where condition");
|
|
1795
|
+
}
|
|
1796
|
+
const results = await this.db.delete(table).where(whereClause).returning();
|
|
1797
|
+
return results;
|
|
1798
|
+
}
|
|
1799
|
+
/**
|
|
1800
|
+
* Count records
|
|
1801
|
+
*
|
|
1802
|
+
* @param table - Drizzle table schema
|
|
1803
|
+
* @param where - Optional object or SQL condition
|
|
1804
|
+
* @returns Number of records
|
|
1805
|
+
*
|
|
1806
|
+
* @example
|
|
1807
|
+
* ```typescript
|
|
1808
|
+
* const total = await this._count(users);
|
|
1809
|
+
* const activeUsers = await this._count(users, { active: true });
|
|
1810
|
+
* ```
|
|
1811
|
+
*/
|
|
1812
|
+
async _count(table, where) {
|
|
1813
|
+
let query = this.readDb.select({ count: count$1() }).from(table);
|
|
1814
|
+
if (where) {
|
|
1815
|
+
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
1816
|
+
if (whereClause) {
|
|
1817
|
+
query = query.where(whereClause);
|
|
1818
|
+
}
|
|
1819
|
+
}
|
|
1820
|
+
const [result] = await query;
|
|
1821
|
+
return Number(result?.count ?? 0);
|
|
1822
|
+
}
|
|
1823
|
+
};
|
|
1987
1824
|
|
|
1988
|
-
export { Transactional, checkConnection, closeDatabase, count, create, createDatabaseConnection, createDatabaseFromEnv,
|
|
1825
|
+
export { BaseRepository, RepositoryError, Transactional, auditFields, checkConnection, closeDatabase, count, create, createDatabaseConnection, createDatabaseFromEnv, createMany, createSchema, deleteMany, deleteOne, detectDialect, enumText, findMany, findOne, foreignKey, fromPostgresError, generateDrizzleConfigFile, getDatabase, getDatabaseInfo, getDrizzleConfig, getSchemaInfo, getTransaction, id, initDatabase, optionalForeignKey, packageNameToSchema, publishingFields, runWithTransaction, setDatabase, softDelete, timestamps, typedJsonb, updateMany, updateOne, upsert, utcTimestamp, uuid, verificationTimestamp };
|
|
1989
1826
|
//# sourceMappingURL=index.js.map
|
|
1990
1827
|
//# sourceMappingURL=index.js.map
|