@spfn/core 0.1.0-alpha.88 → 0.2.0-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1046 -384
- package/dist/boss-D-fGtVgM.d.ts +187 -0
- package/dist/cache/index.d.ts +13 -33
- package/dist/cache/index.js +14 -703
- package/dist/cache/index.js.map +1 -1
- package/dist/codegen/index.d.ts +167 -17
- package/dist/codegen/index.js +76 -1419
- package/dist/codegen/index.js.map +1 -1
- package/dist/config/index.d.ts +1191 -0
- package/dist/config/index.js +264 -0
- package/dist/config/index.js.map +1 -0
- package/dist/db/index.d.ts +728 -59
- package/dist/db/index.js +1028 -1225
- package/dist/db/index.js.map +1 -1
- package/dist/env/index.d.ts +579 -308
- package/dist/env/index.js +438 -930
- package/dist/env/index.js.map +1 -1
- package/dist/errors/index.d.ts +417 -29
- package/dist/errors/index.js +359 -98
- package/dist/errors/index.js.map +1 -1
- package/dist/event/index.d.ts +108 -0
- package/dist/event/index.js +122 -0
- package/dist/event/index.js.map +1 -0
- package/dist/job/index.d.ts +172 -0
- package/dist/job/index.js +361 -0
- package/dist/job/index.js.map +1 -0
- package/dist/logger/index.d.ts +20 -79
- package/dist/logger/index.js +82 -387
- package/dist/logger/index.js.map +1 -1
- package/dist/middleware/index.d.ts +2 -11
- package/dist/middleware/index.js +49 -703
- package/dist/middleware/index.js.map +1 -1
- package/dist/nextjs/index.d.ts +120 -0
- package/dist/nextjs/index.js +416 -0
- package/dist/nextjs/index.js.map +1 -0
- package/dist/{client/nextjs/index.d.ts → nextjs/server.d.ts} +288 -262
- package/dist/nextjs/server.js +568 -0
- package/dist/nextjs/server.js.map +1 -0
- package/dist/route/index.d.ts +686 -25
- package/dist/route/index.js +440 -1287
- package/dist/route/index.js.map +1 -1
- package/dist/route/types.d.ts +38 -0
- package/dist/route/types.js +3 -0
- package/dist/route/types.js.map +1 -0
- package/dist/server/index.d.ts +201 -67
- package/dist/server/index.js +921 -3182
- package/dist/server/index.js.map +1 -1
- package/dist/types-BGl4QL1w.d.ts +77 -0
- package/dist/types-DRG2XMTR.d.ts +157 -0
- package/package.json +52 -47
- package/dist/auto-loader-JFaZ9gON.d.ts +0 -80
- package/dist/client/index.d.ts +0 -358
- package/dist/client/index.js +0 -357
- package/dist/client/index.js.map +0 -1
- package/dist/client/nextjs/index.js +0 -371
- package/dist/client/nextjs/index.js.map +0 -1
- package/dist/codegen/generators/index.d.ts +0 -19
- package/dist/codegen/generators/index.js +0 -1404
- package/dist/codegen/generators/index.js.map +0 -1
- package/dist/database-errors-BNNmLTJE.d.ts +0 -86
- package/dist/events/index.d.ts +0 -183
- package/dist/events/index.js +0 -77
- package/dist/events/index.js.map +0 -1
- package/dist/index-DHiAqhKv.d.ts +0 -101
- package/dist/index.d.ts +0 -8
- package/dist/index.js +0 -3674
- package/dist/index.js.map +0 -1
- package/dist/types/index.d.ts +0 -121
- package/dist/types/index.js +0 -38
- package/dist/types/index.js.map +0 -1
- package/dist/types-BXibIEyj.d.ts +0 -60
package/dist/server/index.js
CHANGED
|
@@ -1,17 +1,16 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { join, dirname, relative, basename } from 'path';
|
|
1
|
+
import { env } from '@spfn/core/config';
|
|
3
2
|
import { config } from 'dotenv';
|
|
4
|
-
import
|
|
5
|
-
import {
|
|
6
|
-
import { timestamp, bigserial, pgSchema } from 'drizzle-orm/pg-core';
|
|
7
|
-
import { AsyncLocalStorage } from 'async_hooks';
|
|
8
|
-
import { randomUUID, randomBytes } from 'crypto';
|
|
9
|
-
import { createMiddleware } from 'hono/factory';
|
|
10
|
-
import { eq, and } from 'drizzle-orm';
|
|
3
|
+
import { existsSync } from 'fs';
|
|
4
|
+
import { resolve, join } from 'path';
|
|
11
5
|
import { Hono } from 'hono';
|
|
12
6
|
import { cors } from 'hono/cors';
|
|
13
|
-
import {
|
|
7
|
+
import { registerRoutes } from '@spfn/core/route';
|
|
8
|
+
import { ErrorHandler, RequestLogger } from '@spfn/core/middleware';
|
|
9
|
+
import { initDatabase, getDatabase, closeDatabase } from '@spfn/core/db';
|
|
10
|
+
import { initCache, getCache, closeCache } from '@spfn/core/cache';
|
|
11
|
+
import { logger } from '@spfn/core/logger';
|
|
14
12
|
import { serve } from '@hono/node-server';
|
|
13
|
+
import PgBoss from 'pg-boss';
|
|
15
14
|
import { networkInterfaces } from 'os';
|
|
16
15
|
|
|
17
16
|
var __defProp = Object.defineProperty;
|
|
@@ -24,46 +23,55 @@ var __export = (target, all) => {
|
|
|
24
23
|
__defProp(target, name, { get: all[name], enumerable: true });
|
|
25
24
|
};
|
|
26
25
|
|
|
27
|
-
// src/logger/types.ts
|
|
28
|
-
var LOG_LEVEL_PRIORITY;
|
|
29
|
-
var init_types = __esm({
|
|
30
|
-
"src/logger/types.ts"() {
|
|
31
|
-
LOG_LEVEL_PRIORITY = {
|
|
32
|
-
debug: 0,
|
|
33
|
-
info: 1,
|
|
34
|
-
warn: 2,
|
|
35
|
-
error: 3,
|
|
36
|
-
fatal: 4
|
|
37
|
-
};
|
|
38
|
-
}
|
|
39
|
-
});
|
|
40
|
-
|
|
41
26
|
// src/logger/formatters.ts
|
|
27
|
+
var formatters_exports = {};
|
|
28
|
+
__export(formatters_exports, {
|
|
29
|
+
colorizeLevel: () => colorizeLevel,
|
|
30
|
+
extractPromiseContext: () => extractPromiseContext,
|
|
31
|
+
extractQueryInfo: () => extractQueryInfo,
|
|
32
|
+
formatConsole: () => formatConsole,
|
|
33
|
+
formatContext: () => formatContext,
|
|
34
|
+
formatError: () => formatError,
|
|
35
|
+
formatJSON: () => formatJSON,
|
|
36
|
+
formatTimestamp: () => formatTimestamp,
|
|
37
|
+
formatTimestampHuman: () => formatTimestampHuman,
|
|
38
|
+
formatUnhandledRejection: () => formatUnhandledRejection,
|
|
39
|
+
maskSensitiveData: () => maskSensitiveData
|
|
40
|
+
});
|
|
42
41
|
function isSensitiveKey(key) {
|
|
43
42
|
const lowerKey = key.toLowerCase();
|
|
44
43
|
return SENSITIVE_KEYS.some((sensitive) => lowerKey.includes(sensitive));
|
|
45
44
|
}
|
|
46
|
-
function maskSensitiveData(data) {
|
|
45
|
+
function maskSensitiveData(data, seen = /* @__PURE__ */ new WeakSet()) {
|
|
47
46
|
if (data === null || data === void 0) {
|
|
48
47
|
return data;
|
|
49
48
|
}
|
|
50
|
-
if (
|
|
51
|
-
return data
|
|
49
|
+
if (typeof data !== "object") {
|
|
50
|
+
return data;
|
|
52
51
|
}
|
|
53
|
-
if (
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
52
|
+
if (seen.has(data)) {
|
|
53
|
+
return "[Circular]";
|
|
54
|
+
}
|
|
55
|
+
seen.add(data);
|
|
56
|
+
if (Array.isArray(data)) {
|
|
57
|
+
return data.map((item) => maskSensitiveData(item, seen));
|
|
58
|
+
}
|
|
59
|
+
const masked = {};
|
|
60
|
+
for (const [key, value] of Object.entries(data)) {
|
|
61
|
+
if (isSensitiveKey(key)) {
|
|
62
|
+
masked[key] = MASKED_VALUE;
|
|
63
|
+
} else if (typeof value === "object" && value !== null) {
|
|
64
|
+
masked[key] = maskSensitiveData(value, seen);
|
|
65
|
+
} else {
|
|
66
|
+
masked[key] = value;
|
|
63
67
|
}
|
|
64
|
-
return masked;
|
|
65
68
|
}
|
|
66
|
-
return
|
|
69
|
+
return masked;
|
|
70
|
+
}
|
|
71
|
+
function colorizeLevel(level) {
|
|
72
|
+
const color = COLORS[level];
|
|
73
|
+
const levelStr = level.toUpperCase().padEnd(5);
|
|
74
|
+
return `${color}${levelStr}${COLORS.reset}`;
|
|
67
75
|
}
|
|
68
76
|
function formatTimestamp(date) {
|
|
69
77
|
return date.toISOString();
|
|
@@ -87,13 +95,26 @@ function formatError(error) {
|
|
|
87
95
|
}
|
|
88
96
|
return lines.join("\n");
|
|
89
97
|
}
|
|
98
|
+
function formatContext(context) {
|
|
99
|
+
try {
|
|
100
|
+
return JSON.stringify(context, null, 2);
|
|
101
|
+
} catch (error) {
|
|
102
|
+
return "[Context serialization failed]";
|
|
103
|
+
}
|
|
104
|
+
}
|
|
90
105
|
function formatConsole(metadata, colorize = true) {
|
|
91
106
|
const parts = [];
|
|
92
|
-
const
|
|
107
|
+
const timestamp = formatTimestampHuman(metadata.timestamp);
|
|
108
|
+
if (colorize) {
|
|
109
|
+
parts.push(`${COLORS.gray}[${timestamp}]${COLORS.reset}`);
|
|
110
|
+
} else {
|
|
111
|
+
parts.push(`[${timestamp}]`);
|
|
112
|
+
}
|
|
113
|
+
const pid = process.pid;
|
|
93
114
|
if (colorize) {
|
|
94
|
-
parts.push(`${COLORS.
|
|
115
|
+
parts.push(`${COLORS.dim}[pid=${pid}]${COLORS.reset}`);
|
|
95
116
|
} else {
|
|
96
|
-
parts.push(`[
|
|
117
|
+
parts.push(`[pid=${pid}]`);
|
|
97
118
|
}
|
|
98
119
|
if (metadata.module) {
|
|
99
120
|
if (colorize) {
|
|
@@ -162,6 +183,84 @@ function formatJSON(metadata) {
|
|
|
162
183
|
}
|
|
163
184
|
return JSON.stringify(obj);
|
|
164
185
|
}
|
|
186
|
+
function extractQueryInfo(error) {
|
|
187
|
+
const message = error.message;
|
|
188
|
+
if (!message) return null;
|
|
189
|
+
const result = {};
|
|
190
|
+
const queryMatch = message.match(/(?:Failed query:|Query:)\s*([^\n]+)/);
|
|
191
|
+
if (queryMatch) {
|
|
192
|
+
result.query = queryMatch[1].trim();
|
|
193
|
+
const tableMatch = result.query.match(/(?:UPDATE|INSERT INTO|DELETE FROM|FROM)\s+"?([a-zA-Z_][a-zA-Z0-9_]*)"?\."?([a-zA-Z_][a-zA-Z0-9_]*)"?|(?:UPDATE|INSERT INTO|DELETE FROM|FROM)\s+"?([a-zA-Z_][a-zA-Z0-9_]*)"?/i);
|
|
194
|
+
if (tableMatch) {
|
|
195
|
+
result.table = tableMatch[2] || tableMatch[3] || tableMatch[1];
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
const paramsMatch = message.match(/params:\s*(.+?)(?:\n|$)/);
|
|
199
|
+
if (paramsMatch) {
|
|
200
|
+
const paramsStr = paramsMatch[1].trim();
|
|
201
|
+
try {
|
|
202
|
+
result.params = paramsStr.split(",").map((p) => p.trim());
|
|
203
|
+
} catch (e) {
|
|
204
|
+
result.params = paramsStr;
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
return Object.keys(result).length > 0 ? result : null;
|
|
208
|
+
}
|
|
209
|
+
function extractPromiseContext(error) {
|
|
210
|
+
const context = {};
|
|
211
|
+
if (!error.stack) return context;
|
|
212
|
+
const stackLines = error.stack.split("\n");
|
|
213
|
+
for (let i = 1; i < stackLines.length; i++) {
|
|
214
|
+
const line = stackLines[i].trim();
|
|
215
|
+
if (line.includes("node_modules") || line.includes("node:internal")) continue;
|
|
216
|
+
const match = line.match(/at\s+(?:([a-zA-Z_$][\w$]*(?:\.[a-zA-Z_$][\w$]*)*)\s+)?\(?([^)]+):(\d+):(\d+)\)?/);
|
|
217
|
+
if (match) {
|
|
218
|
+
const [, functionName, filePath, lineNumber, columnNumber] = match;
|
|
219
|
+
const fileNameMatch = filePath.match(/([^/\\]+)$/);
|
|
220
|
+
const fileName = fileNameMatch ? fileNameMatch[1] : filePath;
|
|
221
|
+
context.file = fileName;
|
|
222
|
+
context.line = parseInt(lineNumber, 10);
|
|
223
|
+
context.column = parseInt(columnNumber, 10);
|
|
224
|
+
if (functionName) {
|
|
225
|
+
const methodMatch = functionName.match(/^(.+)\.([^.]+)$/);
|
|
226
|
+
if (methodMatch) {
|
|
227
|
+
const [, className, methodName] = methodMatch;
|
|
228
|
+
context.class = className;
|
|
229
|
+
context.method = methodName;
|
|
230
|
+
if (className.includes("Repository")) {
|
|
231
|
+
context.repository = className;
|
|
232
|
+
}
|
|
233
|
+
} else {
|
|
234
|
+
context.function = functionName;
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
break;
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
return context;
|
|
241
|
+
}
|
|
242
|
+
function formatUnhandledRejection(reason, promise) {
|
|
243
|
+
let error;
|
|
244
|
+
if (reason instanceof Error) {
|
|
245
|
+
error = reason;
|
|
246
|
+
} else if (typeof reason === "string") {
|
|
247
|
+
error = new Error(reason);
|
|
248
|
+
} else {
|
|
249
|
+
error = new Error(JSON.stringify(reason));
|
|
250
|
+
}
|
|
251
|
+
const context = {
|
|
252
|
+
promise: String(promise)
|
|
253
|
+
};
|
|
254
|
+
const promiseContext = extractPromiseContext(error);
|
|
255
|
+
if (Object.keys(promiseContext).length > 0) {
|
|
256
|
+
context.promiseContext = promiseContext;
|
|
257
|
+
}
|
|
258
|
+
const queryInfo = extractQueryInfo(error);
|
|
259
|
+
if (queryInfo) {
|
|
260
|
+
context.queryInfo = queryInfo;
|
|
261
|
+
}
|
|
262
|
+
return { error, context };
|
|
263
|
+
}
|
|
165
264
|
var SENSITIVE_KEYS, MASKED_VALUE, COLORS;
|
|
166
265
|
var init_formatters = __esm({
|
|
167
266
|
"src/logger/formatters.ts"() {
|
|
@@ -201,2813 +300,35 @@ var init_formatters = __esm({
|
|
|
201
300
|
// 로그 레벨 컬러
|
|
202
301
|
debug: "\x1B[36m",
|
|
203
302
|
// cyan
|
|
204
|
-
info: "\x1B[32m",
|
|
205
|
-
// green
|
|
206
|
-
warn: "\x1B[33m",
|
|
207
|
-
// yellow
|
|
208
|
-
error: "\x1B[31m",
|
|
209
|
-
// red
|
|
210
|
-
fatal: "\x1B[35m",
|
|
211
|
-
// magenta
|
|
212
|
-
// 추가 컬러
|
|
213
|
-
gray: "\x1B[90m"
|
|
214
|
-
};
|
|
215
|
-
}
|
|
216
|
-
});
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
}
|
|
231
|
-
/**
|
|
232
|
-
* Get current log level
|
|
233
|
-
*/
|
|
234
|
-
get level() {
|
|
235
|
-
return this.config.level;
|
|
236
|
-
}
|
|
237
|
-
/**
|
|
238
|
-
* Create child logger (per module)
|
|
239
|
-
*/
|
|
240
|
-
child(module) {
|
|
241
|
-
return new _Logger({
|
|
242
|
-
...this.config,
|
|
243
|
-
module
|
|
244
|
-
});
|
|
245
|
-
}
|
|
246
|
-
/**
|
|
247
|
-
* Debug log
|
|
248
|
-
*/
|
|
249
|
-
debug(message, context) {
|
|
250
|
-
this.log("debug", message, void 0, context);
|
|
251
|
-
}
|
|
252
|
-
/**
|
|
253
|
-
* Info log
|
|
254
|
-
*/
|
|
255
|
-
info(message, context) {
|
|
256
|
-
this.log("info", message, void 0, context);
|
|
257
|
-
}
|
|
258
|
-
warn(message, errorOrContext, context) {
|
|
259
|
-
if (errorOrContext instanceof Error) {
|
|
260
|
-
this.log("warn", message, errorOrContext, context);
|
|
261
|
-
} else {
|
|
262
|
-
this.log("warn", message, void 0, errorOrContext);
|
|
263
|
-
}
|
|
264
|
-
}
|
|
265
|
-
error(message, errorOrContext, context) {
|
|
266
|
-
if (errorOrContext instanceof Error) {
|
|
267
|
-
this.log("error", message, errorOrContext, context);
|
|
268
|
-
} else {
|
|
269
|
-
this.log("error", message, void 0, errorOrContext);
|
|
270
|
-
}
|
|
271
|
-
}
|
|
272
|
-
fatal(message, errorOrContext, context) {
|
|
273
|
-
if (errorOrContext instanceof Error) {
|
|
274
|
-
this.log("fatal", message, errorOrContext, context);
|
|
275
|
-
} else {
|
|
276
|
-
this.log("fatal", message, void 0, errorOrContext);
|
|
277
|
-
}
|
|
278
|
-
}
|
|
279
|
-
/**
|
|
280
|
-
* Log processing (internal)
|
|
281
|
-
*/
|
|
282
|
-
log(level, message, error, context) {
|
|
283
|
-
if (LOG_LEVEL_PRIORITY[level] < LOG_LEVEL_PRIORITY[this.config.level]) {
|
|
284
|
-
return;
|
|
285
|
-
}
|
|
286
|
-
const metadata = {
|
|
287
|
-
timestamp: /* @__PURE__ */ new Date(),
|
|
288
|
-
level,
|
|
289
|
-
message,
|
|
290
|
-
module: this.module,
|
|
291
|
-
error,
|
|
292
|
-
// Mask sensitive information in context to prevent credential leaks
|
|
293
|
-
context: context ? maskSensitiveData(context) : void 0
|
|
294
|
-
};
|
|
295
|
-
this.processTransports(metadata);
|
|
296
|
-
}
|
|
297
|
-
/**
|
|
298
|
-
* Process Transports
|
|
299
|
-
*/
|
|
300
|
-
processTransports(metadata) {
|
|
301
|
-
const promises = this.config.transports.filter((transport) => transport.enabled).map((transport) => this.safeTransportLog(transport, metadata));
|
|
302
|
-
Promise.all(promises).catch((error) => {
|
|
303
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
304
|
-
process.stderr.write(`[Logger] Transport error: ${errorMessage}
|
|
305
|
-
`);
|
|
306
|
-
});
|
|
307
|
-
}
|
|
308
|
-
/**
|
|
309
|
-
* Transport log (error-safe)
|
|
310
|
-
*/
|
|
311
|
-
async safeTransportLog(transport, metadata) {
|
|
312
|
-
try {
|
|
313
|
-
await transport.log(metadata);
|
|
314
|
-
} catch (error) {
|
|
315
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
316
|
-
process.stderr.write(`[Logger] Transport "${transport.name}" failed: ${errorMessage}
|
|
317
|
-
`);
|
|
318
|
-
}
|
|
319
|
-
}
|
|
320
|
-
/**
|
|
321
|
-
* Close all Transports
|
|
322
|
-
*/
|
|
323
|
-
async close() {
|
|
324
|
-
const closePromises = this.config.transports.filter((transport) => transport.close).map((transport) => transport.close());
|
|
325
|
-
await Promise.all(closePromises);
|
|
326
|
-
}
|
|
327
|
-
};
|
|
328
|
-
}
|
|
329
|
-
});
|
|
330
|
-
|
|
331
|
-
// src/logger/transports/console.ts
|
|
332
|
-
var ConsoleTransport;
|
|
333
|
-
var init_console = __esm({
|
|
334
|
-
"src/logger/transports/console.ts"() {
|
|
335
|
-
init_types();
|
|
336
|
-
init_formatters();
|
|
337
|
-
ConsoleTransport = class {
|
|
338
|
-
name = "console";
|
|
339
|
-
level;
|
|
340
|
-
enabled;
|
|
341
|
-
colorize;
|
|
342
|
-
constructor(config) {
|
|
343
|
-
this.level = config.level;
|
|
344
|
-
this.enabled = config.enabled;
|
|
345
|
-
this.colorize = config.colorize ?? true;
|
|
346
|
-
}
|
|
347
|
-
async log(metadata) {
|
|
348
|
-
if (!this.enabled) {
|
|
349
|
-
return;
|
|
350
|
-
}
|
|
351
|
-
if (LOG_LEVEL_PRIORITY[metadata.level] < LOG_LEVEL_PRIORITY[this.level]) {
|
|
352
|
-
return;
|
|
353
|
-
}
|
|
354
|
-
const message = formatConsole(metadata, this.colorize);
|
|
355
|
-
if (metadata.level === "warn" || metadata.level === "error" || metadata.level === "fatal") {
|
|
356
|
-
console.error(message);
|
|
357
|
-
} else {
|
|
358
|
-
console.log(message);
|
|
359
|
-
}
|
|
360
|
-
}
|
|
361
|
-
};
|
|
362
|
-
}
|
|
363
|
-
});
|
|
364
|
-
var FileTransport;
|
|
365
|
-
var init_file = __esm({
|
|
366
|
-
"src/logger/transports/file.ts"() {
|
|
367
|
-
init_types();
|
|
368
|
-
init_formatters();
|
|
369
|
-
FileTransport = class {
|
|
370
|
-
name = "file";
|
|
371
|
-
level;
|
|
372
|
-
enabled;
|
|
373
|
-
logDir;
|
|
374
|
-
maxFileSize;
|
|
375
|
-
maxFiles;
|
|
376
|
-
currentStream = null;
|
|
377
|
-
currentFilename = null;
|
|
378
|
-
constructor(config) {
|
|
379
|
-
this.level = config.level;
|
|
380
|
-
this.enabled = config.enabled;
|
|
381
|
-
this.logDir = config.logDir;
|
|
382
|
-
this.maxFileSize = config.maxFileSize ?? 10 * 1024 * 1024;
|
|
383
|
-
this.maxFiles = config.maxFiles ?? 10;
|
|
384
|
-
if (!existsSync(this.logDir)) {
|
|
385
|
-
mkdirSync(this.logDir, { recursive: true });
|
|
386
|
-
}
|
|
387
|
-
}
|
|
388
|
-
async log(metadata) {
|
|
389
|
-
if (!this.enabled) {
|
|
390
|
-
return;
|
|
391
|
-
}
|
|
392
|
-
if (LOG_LEVEL_PRIORITY[metadata.level] < LOG_LEVEL_PRIORITY[this.level]) {
|
|
393
|
-
return;
|
|
394
|
-
}
|
|
395
|
-
const message = formatJSON(metadata);
|
|
396
|
-
const filename = this.getLogFilename(metadata.timestamp);
|
|
397
|
-
if (this.currentFilename !== filename) {
|
|
398
|
-
await this.rotateStream(filename);
|
|
399
|
-
await this.cleanOldFiles();
|
|
400
|
-
} else if (this.currentFilename) {
|
|
401
|
-
await this.checkAndRotateBySize();
|
|
402
|
-
}
|
|
403
|
-
if (this.currentStream) {
|
|
404
|
-
return new Promise((resolve, reject) => {
|
|
405
|
-
this.currentStream.write(message + "\n", "utf-8", (error) => {
|
|
406
|
-
if (error) {
|
|
407
|
-
process.stderr.write(`[FileTransport] Failed to write log: ${error.message}
|
|
408
|
-
`);
|
|
409
|
-
reject(error);
|
|
410
|
-
} else {
|
|
411
|
-
resolve();
|
|
412
|
-
}
|
|
413
|
-
});
|
|
414
|
-
});
|
|
415
|
-
}
|
|
416
|
-
}
|
|
417
|
-
/**
|
|
418
|
-
* 스트림 교체 (날짜 변경 시)
|
|
419
|
-
*/
|
|
420
|
-
async rotateStream(filename) {
|
|
421
|
-
if (this.currentStream) {
|
|
422
|
-
await this.closeStream();
|
|
423
|
-
}
|
|
424
|
-
const filepath = join(this.logDir, filename);
|
|
425
|
-
this.currentStream = createWriteStream(filepath, {
|
|
426
|
-
flags: "a",
|
|
427
|
-
// append mode
|
|
428
|
-
encoding: "utf-8"
|
|
429
|
-
});
|
|
430
|
-
this.currentFilename = filename;
|
|
431
|
-
this.currentStream.on("error", (error) => {
|
|
432
|
-
process.stderr.write(`[FileTransport] Stream error: ${error.message}
|
|
433
|
-
`);
|
|
434
|
-
this.currentStream = null;
|
|
435
|
-
this.currentFilename = null;
|
|
436
|
-
});
|
|
437
|
-
}
|
|
438
|
-
/**
|
|
439
|
-
* 현재 스트림 닫기
|
|
440
|
-
*/
|
|
441
|
-
async closeStream() {
|
|
442
|
-
if (!this.currentStream) {
|
|
443
|
-
return;
|
|
444
|
-
}
|
|
445
|
-
return new Promise((resolve, reject) => {
|
|
446
|
-
this.currentStream.end((error) => {
|
|
447
|
-
if (error) {
|
|
448
|
-
reject(error);
|
|
449
|
-
} else {
|
|
450
|
-
this.currentStream = null;
|
|
451
|
-
this.currentFilename = null;
|
|
452
|
-
resolve();
|
|
453
|
-
}
|
|
454
|
-
});
|
|
455
|
-
});
|
|
456
|
-
}
|
|
457
|
-
/**
|
|
458
|
-
* 파일 크기 체크 및 크기 기반 로테이션
|
|
459
|
-
*/
|
|
460
|
-
async checkAndRotateBySize() {
|
|
461
|
-
if (!this.currentFilename) {
|
|
462
|
-
return;
|
|
463
|
-
}
|
|
464
|
-
const filepath = join(this.logDir, this.currentFilename);
|
|
465
|
-
if (!existsSync(filepath)) {
|
|
466
|
-
return;
|
|
467
|
-
}
|
|
468
|
-
try {
|
|
469
|
-
const stats = statSync(filepath);
|
|
470
|
-
if (stats.size >= this.maxFileSize) {
|
|
471
|
-
await this.rotateBySize();
|
|
472
|
-
}
|
|
473
|
-
} catch (error) {
|
|
474
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
475
|
-
process.stderr.write(`[FileTransport] Failed to check file size: ${errorMessage}
|
|
476
|
-
`);
|
|
477
|
-
}
|
|
478
|
-
}
|
|
479
|
-
/**
|
|
480
|
-
* 크기 기반 로테이션 수행
|
|
481
|
-
* 예: 2025-01-01.log -> 2025-01-01.1.log, 2025-01-01.1.log -> 2025-01-01.2.log
|
|
482
|
-
*/
|
|
483
|
-
async rotateBySize() {
|
|
484
|
-
if (!this.currentFilename) {
|
|
485
|
-
return;
|
|
486
|
-
}
|
|
487
|
-
await this.closeStream();
|
|
488
|
-
const baseName = this.currentFilename.replace(/\.log$/, "");
|
|
489
|
-
const files = readdirSync(this.logDir);
|
|
490
|
-
const relatedFiles = files.filter((file) => file.startsWith(baseName) && file.endsWith(".log")).sort().reverse();
|
|
491
|
-
for (const file of relatedFiles) {
|
|
492
|
-
const match = file.match(/\.(\d+)\.log$/);
|
|
493
|
-
if (match) {
|
|
494
|
-
const oldNum = parseInt(match[1], 10);
|
|
495
|
-
const newNum = oldNum + 1;
|
|
496
|
-
const oldPath = join(this.logDir, file);
|
|
497
|
-
const newPath2 = join(this.logDir, `${baseName}.${newNum}.log`);
|
|
498
|
-
try {
|
|
499
|
-
renameSync(oldPath, newPath2);
|
|
500
|
-
} catch (error) {
|
|
501
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
502
|
-
process.stderr.write(`[FileTransport] Failed to rotate file: ${errorMessage}
|
|
503
|
-
`);
|
|
504
|
-
}
|
|
505
|
-
}
|
|
506
|
-
}
|
|
507
|
-
const currentPath = join(this.logDir, this.currentFilename);
|
|
508
|
-
const newPath = join(this.logDir, `${baseName}.1.log`);
|
|
509
|
-
try {
|
|
510
|
-
if (existsSync(currentPath)) {
|
|
511
|
-
renameSync(currentPath, newPath);
|
|
512
|
-
}
|
|
513
|
-
} catch (error) {
|
|
514
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
515
|
-
process.stderr.write(`[FileTransport] Failed to rotate current file: ${errorMessage}
|
|
516
|
-
`);
|
|
517
|
-
}
|
|
518
|
-
await this.rotateStream(this.currentFilename);
|
|
519
|
-
}
|
|
520
|
-
/**
|
|
521
|
-
* 오래된 로그 파일 정리
|
|
522
|
-
* maxFiles 개수를 초과하는 로그 파일 삭제
|
|
523
|
-
*/
|
|
524
|
-
async cleanOldFiles() {
|
|
525
|
-
try {
|
|
526
|
-
if (!existsSync(this.logDir)) {
|
|
527
|
-
return;
|
|
528
|
-
}
|
|
529
|
-
const files = readdirSync(this.logDir);
|
|
530
|
-
const logFiles = files.filter((file) => file.endsWith(".log")).map((file) => {
|
|
531
|
-
const filepath = join(this.logDir, file);
|
|
532
|
-
const stats = statSync(filepath);
|
|
533
|
-
return { file, mtime: stats.mtime };
|
|
534
|
-
}).sort((a, b) => b.mtime.getTime() - a.mtime.getTime());
|
|
535
|
-
if (logFiles.length > this.maxFiles) {
|
|
536
|
-
const filesToDelete = logFiles.slice(this.maxFiles);
|
|
537
|
-
for (const { file } of filesToDelete) {
|
|
538
|
-
const filepath = join(this.logDir, file);
|
|
539
|
-
try {
|
|
540
|
-
unlinkSync(filepath);
|
|
541
|
-
} catch (error) {
|
|
542
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
543
|
-
process.stderr.write(`[FileTransport] Failed to delete old file "${file}": ${errorMessage}
|
|
544
|
-
`);
|
|
545
|
-
}
|
|
546
|
-
}
|
|
547
|
-
}
|
|
548
|
-
} catch (error) {
|
|
549
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
550
|
-
process.stderr.write(`[FileTransport] Failed to clean old files: ${errorMessage}
|
|
551
|
-
`);
|
|
552
|
-
}
|
|
553
|
-
}
|
|
554
|
-
/**
|
|
555
|
-
* 날짜별 로그 파일명 생성
|
|
556
|
-
*/
|
|
557
|
-
getLogFilename(date) {
|
|
558
|
-
const year = date.getFullYear();
|
|
559
|
-
const month = String(date.getMonth() + 1).padStart(2, "0");
|
|
560
|
-
const day = String(date.getDate()).padStart(2, "0");
|
|
561
|
-
return `${year}-${month}-${day}.log`;
|
|
562
|
-
}
|
|
563
|
-
async close() {
|
|
564
|
-
await this.closeStream();
|
|
565
|
-
}
|
|
566
|
-
};
|
|
567
|
-
}
|
|
568
|
-
});
|
|
569
|
-
function isFileLoggingEnabled() {
|
|
570
|
-
return process.env.LOGGER_FILE_ENABLED === "true";
|
|
571
|
-
}
|
|
572
|
-
function getDefaultLogLevel() {
|
|
573
|
-
const isProduction = process.env.NODE_ENV === "production";
|
|
574
|
-
const isDevelopment = process.env.NODE_ENV === "development";
|
|
575
|
-
if (isDevelopment) {
|
|
576
|
-
return "debug";
|
|
577
|
-
}
|
|
578
|
-
if (isProduction) {
|
|
579
|
-
return "info";
|
|
580
|
-
}
|
|
581
|
-
return "warn";
|
|
582
|
-
}
|
|
583
|
-
function getConsoleConfig() {
|
|
584
|
-
const isProduction = process.env.NODE_ENV === "production";
|
|
585
|
-
return {
|
|
586
|
-
level: "debug",
|
|
587
|
-
enabled: true,
|
|
588
|
-
colorize: !isProduction
|
|
589
|
-
// Dev: colored output, Production: plain text
|
|
590
|
-
};
|
|
591
|
-
}
|
|
592
|
-
function getFileConfig() {
|
|
593
|
-
const isProduction = process.env.NODE_ENV === "production";
|
|
594
|
-
return {
|
|
595
|
-
level: "info",
|
|
596
|
-
enabled: isProduction,
|
|
597
|
-
// File logging in production only
|
|
598
|
-
logDir: process.env.LOG_DIR || "./logs",
|
|
599
|
-
maxFileSize: 10 * 1024 * 1024,
|
|
600
|
-
// 10MB
|
|
601
|
-
maxFiles: 10
|
|
602
|
-
};
|
|
603
|
-
}
|
|
604
|
-
function validateDirectoryWritable(dirPath) {
|
|
605
|
-
if (!existsSync(dirPath)) {
|
|
606
|
-
try {
|
|
607
|
-
mkdirSync(dirPath, { recursive: true });
|
|
608
|
-
} catch (error) {
|
|
609
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
610
|
-
throw new Error(`Failed to create log directory "${dirPath}": ${errorMessage}`);
|
|
611
|
-
}
|
|
612
|
-
}
|
|
613
|
-
try {
|
|
614
|
-
accessSync(dirPath, constants.W_OK);
|
|
615
|
-
} catch {
|
|
616
|
-
throw new Error(`Log directory "${dirPath}" is not writable. Please check permissions.`);
|
|
617
|
-
}
|
|
618
|
-
const testFile = join(dirPath, ".logger-write-test");
|
|
619
|
-
try {
|
|
620
|
-
writeFileSync(testFile, "test", "utf-8");
|
|
621
|
-
unlinkSync(testFile);
|
|
622
|
-
} catch (error) {
|
|
623
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
624
|
-
throw new Error(`Cannot write to log directory "${dirPath}": ${errorMessage}`);
|
|
625
|
-
}
|
|
626
|
-
}
|
|
627
|
-
function validateFileConfig() {
|
|
628
|
-
if (!isFileLoggingEnabled()) {
|
|
629
|
-
return;
|
|
630
|
-
}
|
|
631
|
-
const logDir = process.env.LOG_DIR;
|
|
632
|
-
if (!logDir) {
|
|
633
|
-
throw new Error(
|
|
634
|
-
"LOG_DIR environment variable is required when LOGGER_FILE_ENABLED=true. Example: LOG_DIR=/var/log/myapp"
|
|
635
|
-
);
|
|
636
|
-
}
|
|
637
|
-
validateDirectoryWritable(logDir);
|
|
638
|
-
}
|
|
639
|
-
function validateSlackConfig() {
|
|
640
|
-
const webhookUrl = process.env.SLACK_WEBHOOK_URL;
|
|
641
|
-
if (!webhookUrl) {
|
|
642
|
-
return;
|
|
643
|
-
}
|
|
644
|
-
if (!webhookUrl.startsWith("https://hooks.slack.com/")) {
|
|
645
|
-
throw new Error(
|
|
646
|
-
`Invalid SLACK_WEBHOOK_URL: "${webhookUrl}". Slack webhook URLs must start with "https://hooks.slack.com/"`
|
|
647
|
-
);
|
|
648
|
-
}
|
|
649
|
-
}
|
|
650
|
-
function validateEmailConfig() {
|
|
651
|
-
const smtpHost = process.env.SMTP_HOST;
|
|
652
|
-
const smtpPort = process.env.SMTP_PORT;
|
|
653
|
-
const emailFrom = process.env.EMAIL_FROM;
|
|
654
|
-
const emailTo = process.env.EMAIL_TO;
|
|
655
|
-
const hasAnyEmailConfig = smtpHost || smtpPort || emailFrom || emailTo;
|
|
656
|
-
if (!hasAnyEmailConfig) {
|
|
657
|
-
return;
|
|
658
|
-
}
|
|
659
|
-
const missingFields = [];
|
|
660
|
-
if (!smtpHost) missingFields.push("SMTP_HOST");
|
|
661
|
-
if (!smtpPort) missingFields.push("SMTP_PORT");
|
|
662
|
-
if (!emailFrom) missingFields.push("EMAIL_FROM");
|
|
663
|
-
if (!emailTo) missingFields.push("EMAIL_TO");
|
|
664
|
-
if (missingFields.length > 0) {
|
|
665
|
-
throw new Error(
|
|
666
|
-
`Email transport configuration incomplete. Missing: ${missingFields.join(", ")}. Either set all required fields or remove all email configuration.`
|
|
667
|
-
);
|
|
668
|
-
}
|
|
669
|
-
const port = parseInt(smtpPort, 10);
|
|
670
|
-
if (isNaN(port) || port < 1 || port > 65535) {
|
|
671
|
-
throw new Error(
|
|
672
|
-
`Invalid SMTP_PORT: "${smtpPort}". Must be a number between 1 and 65535.`
|
|
673
|
-
);
|
|
674
|
-
}
|
|
675
|
-
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
|
|
676
|
-
if (!emailRegex.test(emailFrom)) {
|
|
677
|
-
throw new Error(`Invalid EMAIL_FROM format: "${emailFrom}"`);
|
|
678
|
-
}
|
|
679
|
-
const recipients = emailTo.split(",").map((e) => e.trim());
|
|
680
|
-
for (const email of recipients) {
|
|
681
|
-
if (!emailRegex.test(email)) {
|
|
682
|
-
throw new Error(`Invalid email address in EMAIL_TO: "${email}"`);
|
|
683
|
-
}
|
|
684
|
-
}
|
|
685
|
-
}
|
|
686
|
-
function validateEnvironment() {
|
|
687
|
-
const nodeEnv = process.env.NODE_ENV;
|
|
688
|
-
if (!nodeEnv) {
|
|
689
|
-
process.stderr.write(
|
|
690
|
-
"[Logger] Warning: NODE_ENV is not set. Defaulting to test environment.\n"
|
|
691
|
-
);
|
|
692
|
-
}
|
|
693
|
-
}
|
|
694
|
-
function validateConfig() {
|
|
695
|
-
try {
|
|
696
|
-
validateEnvironment();
|
|
697
|
-
validateFileConfig();
|
|
698
|
-
validateSlackConfig();
|
|
699
|
-
validateEmailConfig();
|
|
700
|
-
} catch (error) {
|
|
701
|
-
if (error instanceof Error) {
|
|
702
|
-
throw new Error(`[Logger] Configuration validation failed: ${error.message}`);
|
|
703
|
-
}
|
|
704
|
-
throw error;
|
|
705
|
-
}
|
|
706
|
-
}
|
|
707
|
-
var init_config = __esm({
|
|
708
|
-
"src/logger/config.ts"() {
|
|
709
|
-
}
|
|
710
|
-
});
|
|
711
|
-
|
|
712
|
-
// src/logger/factory.ts
|
|
713
|
-
function initializeTransports() {
|
|
714
|
-
const transports = [];
|
|
715
|
-
const consoleConfig = getConsoleConfig();
|
|
716
|
-
transports.push(new ConsoleTransport(consoleConfig));
|
|
717
|
-
const fileConfig = getFileConfig();
|
|
718
|
-
if (fileConfig.enabled) {
|
|
719
|
-
transports.push(new FileTransport(fileConfig));
|
|
720
|
-
}
|
|
721
|
-
return transports;
|
|
722
|
-
}
|
|
723
|
-
function initializeLogger() {
|
|
724
|
-
validateConfig();
|
|
725
|
-
return new Logger({
|
|
726
|
-
level: getDefaultLogLevel(),
|
|
727
|
-
transports: initializeTransports()
|
|
728
|
-
});
|
|
729
|
-
}
|
|
730
|
-
var logger;
|
|
731
|
-
var init_factory = __esm({
|
|
732
|
-
"src/logger/factory.ts"() {
|
|
733
|
-
init_logger();
|
|
734
|
-
init_console();
|
|
735
|
-
init_file();
|
|
736
|
-
init_config();
|
|
737
|
-
logger = initializeLogger();
|
|
738
|
-
}
|
|
739
|
-
});
|
|
740
|
-
|
|
741
|
-
// src/logger/index.ts
|
|
742
|
-
var init_logger2 = __esm({
|
|
743
|
-
"src/logger/index.ts"() {
|
|
744
|
-
init_factory();
|
|
745
|
-
init_logger();
|
|
746
|
-
}
|
|
747
|
-
});
|
|
748
|
-
|
|
749
|
-
// src/route/function-routes.ts
|
|
750
|
-
var function_routes_exports = {};
|
|
751
|
-
__export(function_routes_exports, {
|
|
752
|
-
discoverFunctionRoutes: () => discoverFunctionRoutes
|
|
753
|
-
});
|
|
754
|
-
function discoverFunctionRoutes(cwd = process.cwd()) {
|
|
755
|
-
const functions = [];
|
|
756
|
-
const nodeModulesPath = join(cwd, "node_modules");
|
|
757
|
-
try {
|
|
758
|
-
const projectPkgPath = join(cwd, "package.json");
|
|
759
|
-
const projectPkg = JSON.parse(readFileSync(projectPkgPath, "utf-8"));
|
|
760
|
-
const dependencies = {
|
|
761
|
-
...projectPkg.dependencies,
|
|
762
|
-
...projectPkg.devDependencies
|
|
763
|
-
};
|
|
764
|
-
for (const [packageName] of Object.entries(dependencies)) {
|
|
765
|
-
if (!packageName.startsWith("@spfn/") && !packageName.startsWith("spfn-")) {
|
|
766
|
-
continue;
|
|
767
|
-
}
|
|
768
|
-
try {
|
|
769
|
-
const pkgPath = join(nodeModulesPath, ...packageName.split("/"), "package.json");
|
|
770
|
-
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
|
|
771
|
-
if (pkg.spfn?.routes?.dir) {
|
|
772
|
-
const { dir } = pkg.spfn.routes;
|
|
773
|
-
const prefix = pkg.spfn.prefix;
|
|
774
|
-
const packagePath = dirname(pkgPath);
|
|
775
|
-
const routesDir = join(packagePath, dir);
|
|
776
|
-
functions.push({
|
|
777
|
-
packageName,
|
|
778
|
-
routesDir,
|
|
779
|
-
packagePath,
|
|
780
|
-
prefix
|
|
781
|
-
// Include prefix in function info
|
|
782
|
-
});
|
|
783
|
-
routeLogger.debug("Discovered function routes", {
|
|
784
|
-
package: packageName,
|
|
785
|
-
dir,
|
|
786
|
-
prefix: prefix || "(none)"
|
|
787
|
-
});
|
|
788
|
-
}
|
|
789
|
-
} catch (error) {
|
|
790
|
-
}
|
|
791
|
-
}
|
|
792
|
-
} catch (error) {
|
|
793
|
-
routeLogger.warn("Failed to discover function routes", {
|
|
794
|
-
error: error instanceof Error ? error.message : "Unknown error"
|
|
795
|
-
});
|
|
796
|
-
}
|
|
797
|
-
return functions;
|
|
798
|
-
}
|
|
799
|
-
var routeLogger;
|
|
800
|
-
var init_function_routes = __esm({
|
|
801
|
-
"src/route/function-routes.ts"() {
|
|
802
|
-
init_logger2();
|
|
803
|
-
routeLogger = logger.child("function-routes");
|
|
804
|
-
}
|
|
805
|
-
});
|
|
806
|
-
|
|
807
|
-
// src/errors/database-errors.ts
|
|
808
|
-
var DatabaseError, ConnectionError, QueryError, ConstraintViolationError, TransactionError, DeadlockError, DuplicateEntryError;
|
|
809
|
-
var init_database_errors = __esm({
|
|
810
|
-
"src/errors/database-errors.ts"() {
|
|
811
|
-
DatabaseError = class extends Error {
|
|
812
|
-
statusCode;
|
|
813
|
-
details;
|
|
814
|
-
timestamp;
|
|
815
|
-
constructor(message, statusCode = 500, details) {
|
|
816
|
-
super(message);
|
|
817
|
-
this.name = "DatabaseError";
|
|
818
|
-
this.statusCode = statusCode;
|
|
819
|
-
this.details = details;
|
|
820
|
-
this.timestamp = /* @__PURE__ */ new Date();
|
|
821
|
-
Error.captureStackTrace(this, this.constructor);
|
|
822
|
-
}
|
|
823
|
-
/**
|
|
824
|
-
* Serialize error for API response
|
|
825
|
-
*/
|
|
826
|
-
toJSON() {
|
|
827
|
-
return {
|
|
828
|
-
name: this.name,
|
|
829
|
-
message: this.message,
|
|
830
|
-
statusCode: this.statusCode,
|
|
831
|
-
details: this.details,
|
|
832
|
-
timestamp: this.timestamp.toISOString()
|
|
833
|
-
};
|
|
834
|
-
}
|
|
835
|
-
};
|
|
836
|
-
ConnectionError = class extends DatabaseError {
|
|
837
|
-
constructor(message, details) {
|
|
838
|
-
super(message, 503, details);
|
|
839
|
-
this.name = "ConnectionError";
|
|
840
|
-
}
|
|
841
|
-
};
|
|
842
|
-
QueryError = class extends DatabaseError {
|
|
843
|
-
constructor(message, statusCode = 500, details) {
|
|
844
|
-
super(message, statusCode, details);
|
|
845
|
-
this.name = "QueryError";
|
|
846
|
-
}
|
|
847
|
-
};
|
|
848
|
-
ConstraintViolationError = class extends QueryError {
|
|
849
|
-
constructor(message, details) {
|
|
850
|
-
super(message, 400, details);
|
|
851
|
-
this.name = "ConstraintViolationError";
|
|
852
|
-
}
|
|
853
|
-
};
|
|
854
|
-
TransactionError = class extends DatabaseError {
|
|
855
|
-
constructor(message, statusCode = 500, details) {
|
|
856
|
-
super(message, statusCode, details);
|
|
857
|
-
this.name = "TransactionError";
|
|
858
|
-
}
|
|
859
|
-
};
|
|
860
|
-
DeadlockError = class extends TransactionError {
|
|
861
|
-
constructor(message, details) {
|
|
862
|
-
super(message, 409, details);
|
|
863
|
-
this.name = "DeadlockError";
|
|
864
|
-
}
|
|
865
|
-
};
|
|
866
|
-
DuplicateEntryError = class extends QueryError {
|
|
867
|
-
constructor(field, value) {
|
|
868
|
-
super(`${field} '${value}' already exists`, 409, { field, value });
|
|
869
|
-
this.name = "DuplicateEntryError";
|
|
870
|
-
}
|
|
871
|
-
};
|
|
872
|
-
}
|
|
873
|
-
});
|
|
874
|
-
|
|
875
|
-
// src/errors/http-errors.ts
|
|
876
|
-
var init_http_errors = __esm({
|
|
877
|
-
"src/errors/http-errors.ts"() {
|
|
878
|
-
}
|
|
879
|
-
});
|
|
880
|
-
|
|
881
|
-
// src/errors/error-utils.ts
|
|
882
|
-
var init_error_utils = __esm({
|
|
883
|
-
"src/errors/error-utils.ts"() {
|
|
884
|
-
init_database_errors();
|
|
885
|
-
init_http_errors();
|
|
886
|
-
}
|
|
887
|
-
});
|
|
888
|
-
|
|
889
|
-
// src/errors/index.ts
|
|
890
|
-
var init_errors = __esm({
|
|
891
|
-
"src/errors/index.ts"() {
|
|
892
|
-
init_database_errors();
|
|
893
|
-
init_http_errors();
|
|
894
|
-
init_error_utils();
|
|
895
|
-
}
|
|
896
|
-
});
|
|
897
|
-
|
|
898
|
-
// src/env/config.ts
|
|
899
|
-
var ENV_FILE_PRIORITY, TEST_ONLY_FILES;
|
|
900
|
-
var init_config2 = __esm({
|
|
901
|
-
"src/env/config.ts"() {
|
|
902
|
-
ENV_FILE_PRIORITY = [
|
|
903
|
-
".env",
|
|
904
|
-
// Base configuration (lowest priority)
|
|
905
|
-
".env.{NODE_ENV}",
|
|
906
|
-
// Environment-specific
|
|
907
|
-
".env.local",
|
|
908
|
-
// Local overrides (excluded in test)
|
|
909
|
-
".env.{NODE_ENV}.local"
|
|
910
|
-
// Local environment-specific (highest priority)
|
|
911
|
-
];
|
|
912
|
-
TEST_ONLY_FILES = [
|
|
913
|
-
".env.test",
|
|
914
|
-
".env.test.local"
|
|
915
|
-
];
|
|
916
|
-
}
|
|
917
|
-
});
|
|
918
|
-
function buildFileList(basePath, nodeEnv) {
|
|
919
|
-
const files = [];
|
|
920
|
-
if (!nodeEnv) {
|
|
921
|
-
files.push(join(basePath, ".env"));
|
|
922
|
-
files.push(join(basePath, ".env.local"));
|
|
923
|
-
return files;
|
|
924
|
-
}
|
|
925
|
-
for (const pattern of ENV_FILE_PRIORITY) {
|
|
926
|
-
const fileName = pattern.replace("{NODE_ENV}", nodeEnv);
|
|
927
|
-
if (nodeEnv === "test" && fileName === ".env.local") {
|
|
928
|
-
continue;
|
|
929
|
-
}
|
|
930
|
-
if (nodeEnv === "local" && pattern === ".env.local") {
|
|
931
|
-
continue;
|
|
932
|
-
}
|
|
933
|
-
if (nodeEnv !== "test" && TEST_ONLY_FILES.includes(fileName)) {
|
|
934
|
-
continue;
|
|
935
|
-
}
|
|
936
|
-
files.push(join(basePath, fileName));
|
|
937
|
-
}
|
|
938
|
-
return files;
|
|
939
|
-
}
|
|
940
|
-
function loadSingleFile(filePath, debug) {
|
|
941
|
-
if (!existsSync(filePath)) {
|
|
942
|
-
if (debug) {
|
|
943
|
-
envLogger.debug("Environment file not found (optional)", {
|
|
944
|
-
path: filePath
|
|
945
|
-
});
|
|
946
|
-
}
|
|
947
|
-
return { success: false, parsed: {}, error: "File not found" };
|
|
948
|
-
}
|
|
949
|
-
try {
|
|
950
|
-
const result = config({ path: filePath });
|
|
951
|
-
if (result.error) {
|
|
952
|
-
envLogger.warn("Failed to parse environment file", {
|
|
953
|
-
path: filePath,
|
|
954
|
-
error: result.error.message
|
|
955
|
-
});
|
|
956
|
-
return {
|
|
957
|
-
success: false,
|
|
958
|
-
parsed: {},
|
|
959
|
-
error: result.error.message
|
|
960
|
-
};
|
|
961
|
-
}
|
|
962
|
-
const parsed = result.parsed || {};
|
|
963
|
-
if (debug) {
|
|
964
|
-
envLogger.debug("Environment file loaded successfully", {
|
|
965
|
-
path: filePath,
|
|
966
|
-
variables: Object.keys(parsed),
|
|
967
|
-
count: Object.keys(parsed).length
|
|
968
|
-
});
|
|
969
|
-
}
|
|
970
|
-
return { success: true, parsed };
|
|
971
|
-
} catch (error) {
|
|
972
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
973
|
-
envLogger.error("Error loading environment file", {
|
|
974
|
-
path: filePath,
|
|
975
|
-
error: message
|
|
976
|
-
});
|
|
977
|
-
return { success: false, parsed: {}, error: message };
|
|
978
|
-
}
|
|
979
|
-
}
|
|
980
|
-
function validateRequiredVars(required, debug) {
|
|
981
|
-
const missing = [];
|
|
982
|
-
for (const varName of required) {
|
|
983
|
-
if (!process.env[varName]) {
|
|
984
|
-
missing.push(varName);
|
|
985
|
-
}
|
|
986
|
-
}
|
|
987
|
-
if (missing.length > 0) {
|
|
988
|
-
const error = `Required environment variables missing: ${missing.join(", ")}`;
|
|
989
|
-
envLogger.error("Environment validation failed", {
|
|
990
|
-
missing,
|
|
991
|
-
required
|
|
992
|
-
});
|
|
993
|
-
throw new Error(error);
|
|
994
|
-
}
|
|
995
|
-
if (debug) {
|
|
996
|
-
envLogger.debug("Required environment variables validated", {
|
|
997
|
-
required,
|
|
998
|
-
allPresent: true
|
|
999
|
-
});
|
|
1000
|
-
}
|
|
1001
|
-
}
|
|
1002
|
-
function loadEnvironment(options = {}) {
|
|
1003
|
-
const {
|
|
1004
|
-
basePath = process.cwd(),
|
|
1005
|
-
customPaths = [],
|
|
1006
|
-
debug = false,
|
|
1007
|
-
nodeEnv = process.env.NODE_ENV || "",
|
|
1008
|
-
required = [],
|
|
1009
|
-
useCache = true
|
|
1010
|
-
} = options;
|
|
1011
|
-
if (useCache && environmentLoaded && cachedLoadResult) {
|
|
1012
|
-
if (debug) {
|
|
1013
|
-
envLogger.debug("Returning cached environment", {
|
|
1014
|
-
loaded: cachedLoadResult.loaded.length,
|
|
1015
|
-
variables: Object.keys(cachedLoadResult.parsed).length
|
|
1016
|
-
});
|
|
1017
|
-
}
|
|
1018
|
-
return cachedLoadResult;
|
|
1019
|
-
}
|
|
1020
|
-
if (debug) {
|
|
1021
|
-
envLogger.debug("Loading environment variables", {
|
|
1022
|
-
basePath,
|
|
1023
|
-
nodeEnv,
|
|
1024
|
-
customPaths,
|
|
1025
|
-
required
|
|
1026
|
-
});
|
|
1027
|
-
}
|
|
1028
|
-
const result = {
|
|
1029
|
-
success: true,
|
|
1030
|
-
loaded: [],
|
|
1031
|
-
failed: [],
|
|
1032
|
-
parsed: {},
|
|
1033
|
-
warnings: []
|
|
1034
|
-
};
|
|
1035
|
-
const standardFiles = buildFileList(basePath, nodeEnv);
|
|
1036
|
-
const allFiles = [...standardFiles, ...customPaths];
|
|
1037
|
-
if (debug) {
|
|
1038
|
-
envLogger.debug("Environment files to load", {
|
|
1039
|
-
standardFiles,
|
|
1040
|
-
customPaths,
|
|
1041
|
-
total: allFiles.length
|
|
1042
|
-
});
|
|
1043
|
-
}
|
|
1044
|
-
const reversedFiles = [...allFiles].reverse();
|
|
1045
|
-
for (const filePath of reversedFiles) {
|
|
1046
|
-
const fileResult = loadSingleFile(filePath, debug);
|
|
1047
|
-
if (fileResult.success) {
|
|
1048
|
-
result.loaded.push(filePath);
|
|
1049
|
-
Object.assign(result.parsed, fileResult.parsed);
|
|
1050
|
-
if (fileResult.parsed["NODE_ENV"]) {
|
|
1051
|
-
const fileName = filePath.split("/").pop() || filePath;
|
|
1052
|
-
result.warnings.push(
|
|
1053
|
-
`NODE_ENV found in ${fileName}. It's recommended to set NODE_ENV via CLI (e.g., 'spfn dev', 'spfn build') instead of .env files for consistent environment behavior.`
|
|
1054
|
-
);
|
|
1055
|
-
}
|
|
1056
|
-
} else if (fileResult.error) {
|
|
1057
|
-
result.failed.push({
|
|
1058
|
-
path: filePath,
|
|
1059
|
-
reason: fileResult.error
|
|
1060
|
-
});
|
|
1061
|
-
}
|
|
1062
|
-
}
|
|
1063
|
-
if (debug || result.loaded.length > 0) {
|
|
1064
|
-
envLogger.info("Environment loading complete", {
|
|
1065
|
-
loaded: result.loaded.length,
|
|
1066
|
-
failed: result.failed.length,
|
|
1067
|
-
variables: Object.keys(result.parsed).length,
|
|
1068
|
-
files: result.loaded
|
|
1069
|
-
});
|
|
1070
|
-
}
|
|
1071
|
-
if (required.length > 0) {
|
|
1072
|
-
try {
|
|
1073
|
-
validateRequiredVars(required, debug);
|
|
1074
|
-
} catch (error) {
|
|
1075
|
-
result.success = false;
|
|
1076
|
-
result.errors = [
|
|
1077
|
-
error instanceof Error ? error.message : "Validation failed"
|
|
1078
|
-
];
|
|
1079
|
-
throw error;
|
|
1080
|
-
}
|
|
1081
|
-
}
|
|
1082
|
-
if (result.warnings.length > 0) {
|
|
1083
|
-
for (const warning of result.warnings) {
|
|
1084
|
-
envLogger.warn(warning);
|
|
1085
|
-
}
|
|
1086
|
-
}
|
|
1087
|
-
environmentLoaded = true;
|
|
1088
|
-
cachedLoadResult = result;
|
|
1089
|
-
return result;
|
|
1090
|
-
}
|
|
1091
|
-
var envLogger, environmentLoaded, cachedLoadResult;
|
|
1092
|
-
var init_loader = __esm({
|
|
1093
|
-
"src/env/loader.ts"() {
|
|
1094
|
-
init_logger2();
|
|
1095
|
-
init_config2();
|
|
1096
|
-
envLogger = logger.child("environment");
|
|
1097
|
-
environmentLoaded = false;
|
|
1098
|
-
}
|
|
1099
|
-
});
|
|
1100
|
-
|
|
1101
|
-
// src/env/validator.ts
|
|
1102
|
-
var init_validator = __esm({
|
|
1103
|
-
"src/env/validator.ts"() {
|
|
1104
|
-
}
|
|
1105
|
-
});
|
|
1106
|
-
|
|
1107
|
-
// src/env/index.ts
|
|
1108
|
-
var init_env = __esm({
|
|
1109
|
-
"src/env/index.ts"() {
|
|
1110
|
-
init_loader();
|
|
1111
|
-
init_config2();
|
|
1112
|
-
init_validator();
|
|
1113
|
-
}
|
|
1114
|
-
});
|
|
1115
|
-
|
|
1116
|
-
// src/db/postgres-errors.ts
|
|
1117
|
-
function parseUniqueViolation(message) {
|
|
1118
|
-
const patterns = [
|
|
1119
|
-
// Standard format: Key (field)=(value)
|
|
1120
|
-
/Key \(([^)]+)\)=\(([^)]+)\)/i,
|
|
1121
|
-
// With quotes: Key ("field")=('value')
|
|
1122
|
-
/Key \(["']?([^)"']+)["']?\)=\(["']?([^)"']+)["']?\)/i,
|
|
1123
|
-
// Alternative format
|
|
1124
|
-
/Key `([^`]+)`=`([^`]+)`/i
|
|
1125
|
-
];
|
|
1126
|
-
for (const pattern of patterns) {
|
|
1127
|
-
const match = message.match(pattern);
|
|
1128
|
-
if (match) {
|
|
1129
|
-
const field = match[1].trim().replace(/["'`]/g, "");
|
|
1130
|
-
const value = match[2].trim().replace(/["'`]/g, "");
|
|
1131
|
-
return { field, value };
|
|
1132
|
-
}
|
|
1133
|
-
}
|
|
1134
|
-
return null;
|
|
1135
|
-
}
|
|
1136
|
-
function fromPostgresError(error) {
|
|
1137
|
-
const code = error?.code;
|
|
1138
|
-
const message = error?.message || "Database error occurred";
|
|
1139
|
-
switch (code) {
|
|
1140
|
-
// Class 08 — Connection Exception
|
|
1141
|
-
case "08000":
|
|
1142
|
-
// connection_exception
|
|
1143
|
-
case "08001":
|
|
1144
|
-
// sqlclient_unable_to_establish_sqlconnection
|
|
1145
|
-
case "08003":
|
|
1146
|
-
// connection_does_not_exist
|
|
1147
|
-
case "08004":
|
|
1148
|
-
// sqlserver_rejected_establishment_of_sqlconnection
|
|
1149
|
-
case "08006":
|
|
1150
|
-
// connection_failure
|
|
1151
|
-
case "08007":
|
|
1152
|
-
// transaction_resolution_unknown
|
|
1153
|
-
case "08P01":
|
|
1154
|
-
return new ConnectionError(message, { code });
|
|
1155
|
-
// Class 23 — Integrity Constraint Violation
|
|
1156
|
-
case "23000":
|
|
1157
|
-
// integrity_constraint_violation
|
|
1158
|
-
case "23001":
|
|
1159
|
-
return new ConstraintViolationError(message, { code, constraint: "integrity" });
|
|
1160
|
-
case "23502":
|
|
1161
|
-
return new ConstraintViolationError(message, { code, constraint: "not_null" });
|
|
1162
|
-
case "23503":
|
|
1163
|
-
return new ConstraintViolationError(message, { code, constraint: "foreign_key" });
|
|
1164
|
-
case "23505":
|
|
1165
|
-
const parsed = parseUniqueViolation(message);
|
|
1166
|
-
if (parsed) {
|
|
1167
|
-
return new DuplicateEntryError(parsed.field, parsed.value);
|
|
1168
|
-
}
|
|
1169
|
-
return new DuplicateEntryError("field", "value");
|
|
1170
|
-
case "23514":
|
|
1171
|
-
return new ConstraintViolationError(message, { code, constraint: "check" });
|
|
1172
|
-
// Class 40 — Transaction Rollback
|
|
1173
|
-
case "40000":
|
|
1174
|
-
// transaction_rollback
|
|
1175
|
-
case "40001":
|
|
1176
|
-
// serialization_failure
|
|
1177
|
-
case "40002":
|
|
1178
|
-
// transaction_integrity_constraint_violation
|
|
1179
|
-
case "40003":
|
|
1180
|
-
return new TransactionError(message, 500, { code });
|
|
1181
|
-
case "40P01":
|
|
1182
|
-
return new DeadlockError(message, { code });
|
|
1183
|
-
// Class 42 — Syntax Error or Access Rule Violation
|
|
1184
|
-
case "42000":
|
|
1185
|
-
// syntax_error_or_access_rule_violation
|
|
1186
|
-
case "42601":
|
|
1187
|
-
// syntax_error
|
|
1188
|
-
case "42501":
|
|
1189
|
-
// insufficient_privilege
|
|
1190
|
-
case "42602":
|
|
1191
|
-
// invalid_name
|
|
1192
|
-
case "42622":
|
|
1193
|
-
// name_too_long
|
|
1194
|
-
case "42701":
|
|
1195
|
-
// duplicate_column
|
|
1196
|
-
case "42702":
|
|
1197
|
-
// ambiguous_column
|
|
1198
|
-
case "42703":
|
|
1199
|
-
// undefined_column
|
|
1200
|
-
case "42704":
|
|
1201
|
-
// undefined_object
|
|
1202
|
-
case "42P01":
|
|
1203
|
-
// undefined_table
|
|
1204
|
-
case "42P02":
|
|
1205
|
-
return new QueryError(message, 400, { code });
|
|
1206
|
-
// Class 53 — Insufficient Resources
|
|
1207
|
-
case "53000":
|
|
1208
|
-
// insufficient_resources
|
|
1209
|
-
case "53100":
|
|
1210
|
-
// disk_full
|
|
1211
|
-
case "53200":
|
|
1212
|
-
// out_of_memory
|
|
1213
|
-
case "53300":
|
|
1214
|
-
return new ConnectionError(message, { code });
|
|
1215
|
-
// Class 57 — Operator Intervention
|
|
1216
|
-
case "57000":
|
|
1217
|
-
// operator_intervention
|
|
1218
|
-
case "57014":
|
|
1219
|
-
// query_canceled
|
|
1220
|
-
case "57P01":
|
|
1221
|
-
// admin_shutdown
|
|
1222
|
-
case "57P02":
|
|
1223
|
-
// crash_shutdown
|
|
1224
|
-
case "57P03":
|
|
1225
|
-
return new ConnectionError(message, { code });
|
|
1226
|
-
// Default: Unknown error
|
|
1227
|
-
default:
|
|
1228
|
-
return new QueryError(message, 500, { code });
|
|
1229
|
-
}
|
|
1230
|
-
}
|
|
1231
|
-
var init_postgres_errors = __esm({
|
|
1232
|
-
"src/db/postgres-errors.ts"() {
|
|
1233
|
-
init_errors();
|
|
1234
|
-
}
|
|
1235
|
-
});
|
|
1236
|
-
function delay(ms) {
|
|
1237
|
-
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
1238
|
-
}
|
|
1239
|
-
async function createDatabaseConnection(connectionString, poolConfig, retryConfig) {
|
|
1240
|
-
let lastError;
|
|
1241
|
-
for (let attempt = 0; attempt <= retryConfig.maxRetries; attempt++) {
|
|
1242
|
-
try {
|
|
1243
|
-
const client = postgres(connectionString, {
|
|
1244
|
-
max: poolConfig.max,
|
|
1245
|
-
idle_timeout: poolConfig.idleTimeout
|
|
1246
|
-
});
|
|
1247
|
-
await client`SELECT 1 as test`;
|
|
1248
|
-
if (attempt > 0) {
|
|
1249
|
-
dbLogger.info(`Database connected successfully after ${attempt} retries`);
|
|
1250
|
-
} else {
|
|
1251
|
-
dbLogger.info("Database connected successfully");
|
|
1252
|
-
}
|
|
1253
|
-
return client;
|
|
1254
|
-
} catch (error) {
|
|
1255
|
-
lastError = fromPostgresError(error);
|
|
1256
|
-
if (attempt < retryConfig.maxRetries) {
|
|
1257
|
-
const delayMs = Math.min(
|
|
1258
|
-
retryConfig.initialDelay * Math.pow(retryConfig.factor, attempt),
|
|
1259
|
-
retryConfig.maxDelay
|
|
1260
|
-
);
|
|
1261
|
-
dbLogger.warn(
|
|
1262
|
-
`Connection failed (attempt ${attempt + 1}/${retryConfig.maxRetries + 1}), retrying in ${delayMs}ms...`,
|
|
1263
|
-
lastError,
|
|
1264
|
-
{
|
|
1265
|
-
attempt: attempt + 1,
|
|
1266
|
-
maxRetries: retryConfig.maxRetries + 1,
|
|
1267
|
-
delayMs
|
|
1268
|
-
}
|
|
1269
|
-
);
|
|
1270
|
-
await delay(delayMs);
|
|
1271
|
-
}
|
|
1272
|
-
}
|
|
1273
|
-
}
|
|
1274
|
-
const errorMessage = `Failed to connect to database after ${retryConfig.maxRetries + 1} attempts: ${lastError?.message || "Unknown error"}`;
|
|
1275
|
-
throw new ConnectionError(errorMessage);
|
|
1276
|
-
}
|
|
1277
|
-
async function checkConnection(client) {
|
|
1278
|
-
try {
|
|
1279
|
-
await client`SELECT 1 as health_check`;
|
|
1280
|
-
return true;
|
|
1281
|
-
} catch (error) {
|
|
1282
|
-
dbLogger.error("Database health check failed", error);
|
|
1283
|
-
return false;
|
|
1284
|
-
}
|
|
1285
|
-
}
|
|
1286
|
-
var dbLogger;
|
|
1287
|
-
var init_connection = __esm({
|
|
1288
|
-
"src/db/manager/connection.ts"() {
|
|
1289
|
-
init_logger2();
|
|
1290
|
-
init_errors();
|
|
1291
|
-
init_postgres_errors();
|
|
1292
|
-
dbLogger = logger.child("database");
|
|
1293
|
-
}
|
|
1294
|
-
});
|
|
1295
|
-
|
|
1296
|
-
// src/db/manager/config.ts
|
|
1297
|
-
function parseEnvNumber(key, prodDefault, devDefault) {
|
|
1298
|
-
const isProduction = process.env.NODE_ENV === "production";
|
|
1299
|
-
const envValue = parseInt(process.env[key] || "", 10);
|
|
1300
|
-
return isNaN(envValue) ? isProduction ? prodDefault : devDefault : envValue;
|
|
1301
|
-
}
|
|
1302
|
-
function parseEnvBoolean(key, defaultValue) {
|
|
1303
|
-
const value = process.env[key];
|
|
1304
|
-
if (value === void 0) return defaultValue;
|
|
1305
|
-
return value.toLowerCase() === "true";
|
|
1306
|
-
}
|
|
1307
|
-
function getPoolConfig(options) {
|
|
1308
|
-
return {
|
|
1309
|
-
max: options?.max ?? parseEnvNumber("DB_POOL_MAX", 20, 10),
|
|
1310
|
-
idleTimeout: options?.idleTimeout ?? parseEnvNumber("DB_POOL_IDLE_TIMEOUT", 30, 20)
|
|
1311
|
-
};
|
|
1312
|
-
}
|
|
1313
|
-
function getRetryConfig() {
|
|
1314
|
-
return {
|
|
1315
|
-
maxRetries: parseEnvNumber("DB_RETRY_MAX", 5, 3),
|
|
1316
|
-
initialDelay: parseEnvNumber("DB_RETRY_INITIAL_DELAY", 100, 50),
|
|
1317
|
-
maxDelay: parseEnvNumber("DB_RETRY_MAX_DELAY", 1e4, 5e3),
|
|
1318
|
-
factor: parseEnvNumber("DB_RETRY_FACTOR", 2, 2)
|
|
1319
|
-
};
|
|
1320
|
-
}
|
|
1321
|
-
function buildHealthCheckConfig(options) {
|
|
1322
|
-
return {
|
|
1323
|
-
enabled: options?.enabled ?? parseEnvBoolean("DB_HEALTH_CHECK_ENABLED", true),
|
|
1324
|
-
interval: options?.interval ?? parseEnvNumber("DB_HEALTH_CHECK_INTERVAL", 6e4, 6e4),
|
|
1325
|
-
reconnect: options?.reconnect ?? parseEnvBoolean("DB_HEALTH_CHECK_RECONNECT", true),
|
|
1326
|
-
maxRetries: options?.maxRetries ?? parseEnvNumber("DB_HEALTH_CHECK_MAX_RETRIES", 3, 3),
|
|
1327
|
-
retryInterval: options?.retryInterval ?? parseEnvNumber("DB_HEALTH_CHECK_RETRY_INTERVAL", 5e3, 5e3)
|
|
1328
|
-
};
|
|
1329
|
-
}
|
|
1330
|
-
function buildMonitoringConfig(options) {
|
|
1331
|
-
const isDevelopment = process.env.NODE_ENV !== "production";
|
|
1332
|
-
return {
|
|
1333
|
-
enabled: options?.enabled ?? parseEnvBoolean("DB_MONITORING_ENABLED", isDevelopment),
|
|
1334
|
-
slowThreshold: options?.slowThreshold ?? parseEnvNumber("DB_MONITORING_SLOW_THRESHOLD", 1e3, 1e3),
|
|
1335
|
-
logQueries: options?.logQueries ?? parseEnvBoolean("DB_MONITORING_LOG_QUERIES", false)
|
|
1336
|
-
};
|
|
1337
|
-
}
|
|
1338
|
-
var init_config3 = __esm({
|
|
1339
|
-
"src/db/manager/config.ts"() {
|
|
1340
|
-
}
|
|
1341
|
-
});
|
|
1342
|
-
function hasDatabaseConfig() {
|
|
1343
|
-
return !!(process.env.DATABASE_URL || process.env.DATABASE_WRITE_URL || process.env.DATABASE_READ_URL);
|
|
1344
|
-
}
|
|
1345
|
-
function detectDatabasePattern() {
|
|
1346
|
-
if (process.env.DATABASE_WRITE_URL && process.env.DATABASE_READ_URL) {
|
|
1347
|
-
return {
|
|
1348
|
-
type: "write-read",
|
|
1349
|
-
write: process.env.DATABASE_WRITE_URL,
|
|
1350
|
-
read: process.env.DATABASE_READ_URL
|
|
1351
|
-
};
|
|
1352
|
-
}
|
|
1353
|
-
if (process.env.DATABASE_URL && process.env.DATABASE_REPLICA_URL) {
|
|
1354
|
-
return {
|
|
1355
|
-
type: "legacy",
|
|
1356
|
-
primary: process.env.DATABASE_URL,
|
|
1357
|
-
replica: process.env.DATABASE_REPLICA_URL
|
|
1358
|
-
};
|
|
1359
|
-
}
|
|
1360
|
-
if (process.env.DATABASE_URL) {
|
|
1361
|
-
return {
|
|
1362
|
-
type: "single",
|
|
1363
|
-
url: process.env.DATABASE_URL
|
|
1364
|
-
};
|
|
1365
|
-
}
|
|
1366
|
-
if (process.env.DATABASE_WRITE_URL) {
|
|
1367
|
-
return {
|
|
1368
|
-
type: "single",
|
|
1369
|
-
url: process.env.DATABASE_WRITE_URL
|
|
1370
|
-
};
|
|
1371
|
-
}
|
|
1372
|
-
return { type: "none" };
|
|
1373
|
-
}
|
|
1374
|
-
async function createWriteReadClients(writeUrl, readUrl, poolConfig, retryConfig) {
|
|
1375
|
-
const writeClient = await createDatabaseConnection(writeUrl, poolConfig, retryConfig);
|
|
1376
|
-
const readClient = await createDatabaseConnection(readUrl, poolConfig, retryConfig);
|
|
1377
|
-
return {
|
|
1378
|
-
write: drizzle(writeClient),
|
|
1379
|
-
read: drizzle(readClient),
|
|
1380
|
-
writeClient,
|
|
1381
|
-
readClient
|
|
1382
|
-
};
|
|
1383
|
-
}
|
|
1384
|
-
async function createSingleClient(url, poolConfig, retryConfig) {
|
|
1385
|
-
const client = await createDatabaseConnection(url, poolConfig, retryConfig);
|
|
1386
|
-
const db = drizzle(client);
|
|
1387
|
-
return {
|
|
1388
|
-
write: db,
|
|
1389
|
-
read: db,
|
|
1390
|
-
writeClient: client,
|
|
1391
|
-
readClient: client
|
|
1392
|
-
};
|
|
1393
|
-
}
|
|
1394
|
-
async function createDatabaseFromEnv(options) {
|
|
1395
|
-
if (!hasDatabaseConfig()) {
|
|
1396
|
-
dbLogger2.debug("No DATABASE_URL found, loading environment variables");
|
|
1397
|
-
const result = loadEnvironment({
|
|
1398
|
-
debug: true
|
|
1399
|
-
});
|
|
1400
|
-
dbLogger2.debug("Environment variables loaded", {
|
|
1401
|
-
success: result.success,
|
|
1402
|
-
loaded: result.loaded.length,
|
|
1403
|
-
hasDatabaseUrl: !!process.env.DATABASE_URL,
|
|
1404
|
-
hasWriteUrl: !!process.env.DATABASE_WRITE_URL,
|
|
1405
|
-
hasReadUrl: !!process.env.DATABASE_READ_URL
|
|
1406
|
-
});
|
|
1407
|
-
}
|
|
1408
|
-
if (!hasDatabaseConfig()) {
|
|
1409
|
-
dbLogger2.warn("No database configuration found", {
|
|
1410
|
-
cwd: process.cwd(),
|
|
1411
|
-
nodeEnv: process.env.NODE_ENV,
|
|
1412
|
-
checkedVars: ["DATABASE_URL", "DATABASE_WRITE_URL", "DATABASE_READ_URL"]
|
|
1413
|
-
});
|
|
1414
|
-
return { write: void 0, read: void 0 };
|
|
1415
|
-
}
|
|
1416
|
-
try {
|
|
1417
|
-
const poolConfig = getPoolConfig(options?.pool);
|
|
1418
|
-
const retryConfig = getRetryConfig();
|
|
1419
|
-
const pattern = detectDatabasePattern();
|
|
1420
|
-
switch (pattern.type) {
|
|
1421
|
-
case "write-read":
|
|
1422
|
-
dbLogger2.debug("Using write-read pattern", {
|
|
1423
|
-
write: pattern.write.replace(/:[^:@]+@/, ":***@"),
|
|
1424
|
-
read: pattern.read.replace(/:[^:@]+@/, ":***@")
|
|
1425
|
-
});
|
|
1426
|
-
return await createWriteReadClients(
|
|
1427
|
-
pattern.write,
|
|
1428
|
-
pattern.read,
|
|
1429
|
-
poolConfig,
|
|
1430
|
-
retryConfig
|
|
1431
|
-
);
|
|
1432
|
-
case "legacy":
|
|
1433
|
-
dbLogger2.debug("Using legacy replica pattern", {
|
|
1434
|
-
primary: pattern.primary.replace(/:[^:@]+@/, ":***@"),
|
|
1435
|
-
replica: pattern.replica.replace(/:[^:@]+@/, ":***@")
|
|
1436
|
-
});
|
|
1437
|
-
return await createWriteReadClients(
|
|
1438
|
-
pattern.primary,
|
|
1439
|
-
pattern.replica,
|
|
1440
|
-
poolConfig,
|
|
1441
|
-
retryConfig
|
|
1442
|
-
);
|
|
1443
|
-
case "single":
|
|
1444
|
-
dbLogger2.debug("Using single database pattern", {
|
|
1445
|
-
url: pattern.url.replace(/:[^:@]+@/, ":***@")
|
|
1446
|
-
});
|
|
1447
|
-
return await createSingleClient(pattern.url, poolConfig, retryConfig);
|
|
1448
|
-
case "none":
|
|
1449
|
-
dbLogger2.warn("No database pattern detected");
|
|
1450
|
-
return { write: void 0, read: void 0 };
|
|
1451
|
-
}
|
|
1452
|
-
} catch (error) {
|
|
1453
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1454
|
-
dbLogger2.error("Failed to create database connection", {
|
|
1455
|
-
error: message,
|
|
1456
|
-
stage: "initialization",
|
|
1457
|
-
hasWriteUrl: !!process.env.DATABASE_WRITE_URL,
|
|
1458
|
-
hasReadUrl: !!process.env.DATABASE_READ_URL,
|
|
1459
|
-
hasUrl: !!process.env.DATABASE_URL,
|
|
1460
|
-
hasReplicaUrl: !!process.env.DATABASE_REPLICA_URL
|
|
1461
|
-
});
|
|
1462
|
-
throw new Error(`Database connection failed: ${message}`, { cause: error });
|
|
1463
|
-
}
|
|
1464
|
-
}
|
|
1465
|
-
var dbLogger2;
|
|
1466
|
-
var init_factory2 = __esm({
|
|
1467
|
-
"src/db/manager/factory.ts"() {
|
|
1468
|
-
init_logger2();
|
|
1469
|
-
init_env();
|
|
1470
|
-
init_connection();
|
|
1471
|
-
init_config3();
|
|
1472
|
-
dbLogger2 = logger.child("database");
|
|
1473
|
-
}
|
|
1474
|
-
});
|
|
1475
|
-
|
|
1476
|
-
// src/db/manager/global-state.ts
|
|
1477
|
-
var getWriteInstance, setWriteInstance, getReadInstance, setReadInstance, getWriteClient, setWriteClient, getReadClient, setReadClient, getHealthCheckInterval, setHealthCheckInterval, setMonitoringConfig;
|
|
1478
|
-
var init_global_state = __esm({
|
|
1479
|
-
"src/db/manager/global-state.ts"() {
|
|
1480
|
-
getWriteInstance = () => globalThis.__SPFN_DB_WRITE__;
|
|
1481
|
-
setWriteInstance = (instance) => {
|
|
1482
|
-
globalThis.__SPFN_DB_WRITE__ = instance;
|
|
1483
|
-
};
|
|
1484
|
-
getReadInstance = () => globalThis.__SPFN_DB_READ__;
|
|
1485
|
-
setReadInstance = (instance) => {
|
|
1486
|
-
globalThis.__SPFN_DB_READ__ = instance;
|
|
1487
|
-
};
|
|
1488
|
-
getWriteClient = () => globalThis.__SPFN_DB_WRITE_CLIENT__;
|
|
1489
|
-
setWriteClient = (client) => {
|
|
1490
|
-
globalThis.__SPFN_DB_WRITE_CLIENT__ = client;
|
|
1491
|
-
};
|
|
1492
|
-
getReadClient = () => globalThis.__SPFN_DB_READ_CLIENT__;
|
|
1493
|
-
setReadClient = (client) => {
|
|
1494
|
-
globalThis.__SPFN_DB_READ_CLIENT__ = client;
|
|
1495
|
-
};
|
|
1496
|
-
getHealthCheckInterval = () => globalThis.__SPFN_DB_HEALTH_CHECK__;
|
|
1497
|
-
setHealthCheckInterval = (interval) => {
|
|
1498
|
-
globalThis.__SPFN_DB_HEALTH_CHECK__ = interval;
|
|
1499
|
-
};
|
|
1500
|
-
setMonitoringConfig = (config) => {
|
|
1501
|
-
globalThis.__SPFN_DB_MONITORING__ = config;
|
|
1502
|
-
};
|
|
1503
|
-
}
|
|
1504
|
-
});
|
|
1505
|
-
|
|
1506
|
-
// src/db/manager/health-check.ts
|
|
1507
|
-
function startHealthCheck(config, options, getDatabase2, closeDatabase2) {
|
|
1508
|
-
const healthCheck = getHealthCheckInterval();
|
|
1509
|
-
if (healthCheck) {
|
|
1510
|
-
dbLogger3.debug("Health check already running");
|
|
1511
|
-
return;
|
|
1512
|
-
}
|
|
1513
|
-
dbLogger3.info("Starting database health check", {
|
|
1514
|
-
interval: `${config.interval}ms`,
|
|
1515
|
-
reconnect: config.reconnect
|
|
1516
|
-
});
|
|
1517
|
-
const interval = setInterval(async () => {
|
|
1518
|
-
try {
|
|
1519
|
-
const write = getDatabase2("write");
|
|
1520
|
-
const read = getDatabase2("read");
|
|
1521
|
-
if (write) {
|
|
1522
|
-
await write.execute("SELECT 1");
|
|
1523
|
-
}
|
|
1524
|
-
if (read && read !== write) {
|
|
1525
|
-
await read.execute("SELECT 1");
|
|
1526
|
-
}
|
|
1527
|
-
} catch (error) {
|
|
1528
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1529
|
-
dbLogger3.error("Database health check failed", { error: message });
|
|
1530
|
-
if (config.reconnect) {
|
|
1531
|
-
await attemptReconnection(config, options, closeDatabase2);
|
|
1532
|
-
}
|
|
1533
|
-
}
|
|
1534
|
-
}, config.interval);
|
|
1535
|
-
setHealthCheckInterval(interval);
|
|
1536
|
-
}
|
|
1537
|
-
async function attemptReconnection(config, options, closeDatabase2) {
|
|
1538
|
-
dbLogger3.warn("Attempting database reconnection", {
|
|
1539
|
-
maxRetries: config.maxRetries,
|
|
1540
|
-
retryInterval: `${config.retryInterval}ms`
|
|
1541
|
-
});
|
|
1542
|
-
for (let attempt = 1; attempt <= config.maxRetries; attempt++) {
|
|
1543
|
-
try {
|
|
1544
|
-
dbLogger3.debug(`Reconnection attempt ${attempt}/${config.maxRetries}`);
|
|
1545
|
-
await closeDatabase2();
|
|
1546
|
-
await new Promise((resolve) => setTimeout(resolve, config.retryInterval));
|
|
1547
|
-
const result = await createDatabaseFromEnv(options);
|
|
1548
|
-
if (result.write) {
|
|
1549
|
-
await result.write.execute("SELECT 1");
|
|
1550
|
-
setWriteInstance(result.write);
|
|
1551
|
-
setReadInstance(result.read);
|
|
1552
|
-
setWriteClient(result.writeClient);
|
|
1553
|
-
setReadClient(result.readClient);
|
|
1554
|
-
dbLogger3.info("Database reconnection successful", { attempt });
|
|
1555
|
-
return;
|
|
1556
|
-
}
|
|
1557
|
-
} catch (error) {
|
|
1558
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1559
|
-
dbLogger3.error(`Reconnection attempt ${attempt} failed`, {
|
|
1560
|
-
error: message,
|
|
1561
|
-
attempt,
|
|
1562
|
-
maxRetries: config.maxRetries
|
|
1563
|
-
});
|
|
1564
|
-
if (attempt === config.maxRetries) {
|
|
1565
|
-
dbLogger3.error("Max reconnection attempts reached, giving up");
|
|
1566
|
-
}
|
|
1567
|
-
}
|
|
1568
|
-
}
|
|
1569
|
-
}
|
|
1570
|
-
function stopHealthCheck() {
|
|
1571
|
-
const healthCheck = getHealthCheckInterval();
|
|
1572
|
-
if (healthCheck) {
|
|
1573
|
-
clearInterval(healthCheck);
|
|
1574
|
-
setHealthCheckInterval(void 0);
|
|
1575
|
-
dbLogger3.info("Database health check stopped");
|
|
1576
|
-
}
|
|
1577
|
-
}
|
|
1578
|
-
var dbLogger3;
|
|
1579
|
-
var init_health_check = __esm({
|
|
1580
|
-
"src/db/manager/health-check.ts"() {
|
|
1581
|
-
init_logger2();
|
|
1582
|
-
init_factory2();
|
|
1583
|
-
init_global_state();
|
|
1584
|
-
dbLogger3 = logger.child("database");
|
|
1585
|
-
}
|
|
1586
|
-
});
|
|
1587
|
-
|
|
1588
|
-
// src/db/manager/manager.ts
|
|
1589
|
-
function getCallerInfo() {
|
|
1590
|
-
try {
|
|
1591
|
-
const stack = new Error().stack;
|
|
1592
|
-
if (!stack) return void 0;
|
|
1593
|
-
const lines = stack.split("\n");
|
|
1594
|
-
for (let i = 3; i < lines.length; i++) {
|
|
1595
|
-
const line = lines[i];
|
|
1596
|
-
if (!line.includes("node_modules") && !line.includes("/db/manager/")) {
|
|
1597
|
-
const match = line.match(/\((.+):(\d+):(\d+)\)/) || line.match(/at (.+):(\d+):(\d+)/);
|
|
1598
|
-
if (match) {
|
|
1599
|
-
const fullPath = match[1];
|
|
1600
|
-
const parts = fullPath.split("/");
|
|
1601
|
-
const srcIndex = parts.lastIndexOf("src");
|
|
1602
|
-
if (srcIndex !== -1) {
|
|
1603
|
-
const relativePath = parts.slice(srcIndex).join("/");
|
|
1604
|
-
return `${relativePath}:${match[2]}`;
|
|
1605
|
-
}
|
|
1606
|
-
return `${fullPath}:${match[2]}`;
|
|
1607
|
-
}
|
|
1608
|
-
break;
|
|
1609
|
-
}
|
|
1610
|
-
}
|
|
1611
|
-
} catch {
|
|
1612
|
-
}
|
|
1613
|
-
return void 0;
|
|
1614
|
-
}
|
|
1615
|
-
function getDatabase(type) {
|
|
1616
|
-
const writeInst = getWriteInstance();
|
|
1617
|
-
const readInst = getReadInstance();
|
|
1618
|
-
if (process.env.DB_DEBUG_TRACE === "true") {
|
|
1619
|
-
const caller = getCallerInfo();
|
|
1620
|
-
dbLogger4.debug("getDatabase() called", {
|
|
1621
|
-
type: type || "write",
|
|
1622
|
-
hasWrite: !!writeInst,
|
|
1623
|
-
hasRead: !!readInst,
|
|
1624
|
-
caller
|
|
1625
|
-
});
|
|
1626
|
-
}
|
|
1627
|
-
if (type === "read") {
|
|
1628
|
-
return readInst ?? writeInst;
|
|
1629
|
-
}
|
|
1630
|
-
return writeInst;
|
|
1631
|
-
}
|
|
1632
|
-
function setDatabase(write, read) {
|
|
1633
|
-
setWriteInstance(write);
|
|
1634
|
-
setReadInstance(read ?? write);
|
|
1635
|
-
}
|
|
1636
|
-
async function initDatabase(options) {
|
|
1637
|
-
const writeInst = getWriteInstance();
|
|
1638
|
-
if (writeInst) {
|
|
1639
|
-
dbLogger4.debug("Database already initialized");
|
|
1640
|
-
return { write: writeInst, read: getReadInstance() };
|
|
1641
|
-
}
|
|
1642
|
-
const result = await createDatabaseFromEnv(options);
|
|
1643
|
-
if (result.write) {
|
|
1644
|
-
try {
|
|
1645
|
-
await result.write.execute("SELECT 1");
|
|
1646
|
-
if (result.read && result.read !== result.write) {
|
|
1647
|
-
await result.read.execute("SELECT 1");
|
|
1648
|
-
}
|
|
1649
|
-
setWriteInstance(result.write);
|
|
1650
|
-
setReadInstance(result.read);
|
|
1651
|
-
setWriteClient(result.writeClient);
|
|
1652
|
-
setReadClient(result.readClient);
|
|
1653
|
-
const hasReplica = result.read && result.read !== result.write;
|
|
1654
|
-
dbLogger4.info(
|
|
1655
|
-
hasReplica ? "Database connected (Primary + Replica)" : "Database connected"
|
|
1656
|
-
);
|
|
1657
|
-
const healthCheckConfig = buildHealthCheckConfig(options?.healthCheck);
|
|
1658
|
-
if (healthCheckConfig.enabled) {
|
|
1659
|
-
startHealthCheck(healthCheckConfig, options, getDatabase, closeDatabase);
|
|
1660
|
-
}
|
|
1661
|
-
const monConfig = buildMonitoringConfig(options?.monitoring);
|
|
1662
|
-
setMonitoringConfig(monConfig);
|
|
1663
|
-
if (monConfig.enabled) {
|
|
1664
|
-
dbLogger4.info("Database query monitoring enabled", {
|
|
1665
|
-
slowThreshold: `${monConfig.slowThreshold}ms`,
|
|
1666
|
-
logQueries: monConfig.logQueries
|
|
1667
|
-
});
|
|
1668
|
-
}
|
|
1669
|
-
} catch (error) {
|
|
1670
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1671
|
-
dbLogger4.error("Database connection failed", { error: message });
|
|
1672
|
-
await closeDatabase();
|
|
1673
|
-
throw new Error(`Database connection test failed: ${message}`, { cause: error });
|
|
1674
|
-
}
|
|
1675
|
-
} else {
|
|
1676
|
-
dbLogger4.warn("No database configuration found");
|
|
1677
|
-
dbLogger4.warn("Set DATABASE_URL environment variable to enable database");
|
|
1678
|
-
}
|
|
1679
|
-
return { write: getWriteInstance(), read: getReadInstance() };
|
|
1680
|
-
}
|
|
1681
|
-
async function closeDatabase() {
|
|
1682
|
-
const writeInst = getWriteInstance();
|
|
1683
|
-
const readInst = getReadInstance();
|
|
1684
|
-
if (!writeInst && !readInst) {
|
|
1685
|
-
dbLogger4.debug("No database connections to close");
|
|
1686
|
-
return;
|
|
1687
|
-
}
|
|
1688
|
-
stopHealthCheck();
|
|
1689
|
-
try {
|
|
1690
|
-
const closePromises = [];
|
|
1691
|
-
const writeC = getWriteClient();
|
|
1692
|
-
if (writeC) {
|
|
1693
|
-
dbLogger4.debug("Closing write connection...");
|
|
1694
|
-
closePromises.push(
|
|
1695
|
-
writeC.end({ timeout: 5 }).then(() => dbLogger4.debug("Write connection closed")).catch((err) => dbLogger4.error("Error closing write connection", err))
|
|
1696
|
-
);
|
|
1697
|
-
}
|
|
1698
|
-
const readC = getReadClient();
|
|
1699
|
-
if (readC && readC !== writeC) {
|
|
1700
|
-
dbLogger4.debug("Closing read connection...");
|
|
1701
|
-
closePromises.push(
|
|
1702
|
-
readC.end({ timeout: 5 }).then(() => dbLogger4.debug("Read connection closed")).catch((err) => dbLogger4.error("Error closing read connection", err))
|
|
1703
|
-
);
|
|
1704
|
-
}
|
|
1705
|
-
await Promise.all(closePromises);
|
|
1706
|
-
dbLogger4.info("All database connections closed");
|
|
1707
|
-
} catch (error) {
|
|
1708
|
-
dbLogger4.error("Error during database cleanup", error);
|
|
1709
|
-
throw error;
|
|
1710
|
-
} finally {
|
|
1711
|
-
setWriteInstance(void 0);
|
|
1712
|
-
setReadInstance(void 0);
|
|
1713
|
-
setWriteClient(void 0);
|
|
1714
|
-
setReadClient(void 0);
|
|
1715
|
-
setMonitoringConfig(void 0);
|
|
1716
|
-
}
|
|
1717
|
-
}
|
|
1718
|
-
function getDatabaseInfo() {
|
|
1719
|
-
const writeInst = getWriteInstance();
|
|
1720
|
-
const readInst = getReadInstance();
|
|
1721
|
-
return {
|
|
1722
|
-
hasWrite: !!writeInst,
|
|
1723
|
-
hasRead: !!readInst,
|
|
1724
|
-
isReplica: !!(readInst && readInst !== writeInst)
|
|
1725
|
-
};
|
|
1726
|
-
}
|
|
1727
|
-
var dbLogger4;
|
|
1728
|
-
var init_manager = __esm({
|
|
1729
|
-
"src/db/manager/manager.ts"() {
|
|
1730
|
-
init_logger2();
|
|
1731
|
-
init_factory2();
|
|
1732
|
-
init_config3();
|
|
1733
|
-
init_global_state();
|
|
1734
|
-
init_health_check();
|
|
1735
|
-
dbLogger4 = logger.child("database");
|
|
1736
|
-
}
|
|
1737
|
-
});
|
|
1738
|
-
|
|
1739
|
-
// src/db/manager/index.ts
|
|
1740
|
-
var init_manager2 = __esm({
|
|
1741
|
-
"src/db/manager/index.ts"() {
|
|
1742
|
-
init_factory2();
|
|
1743
|
-
init_manager();
|
|
1744
|
-
init_connection();
|
|
1745
|
-
}
|
|
1746
|
-
});
|
|
1747
|
-
function expandGlobPattern(pattern) {
|
|
1748
|
-
if (!pattern.includes("*")) {
|
|
1749
|
-
return existsSync(pattern) ? [pattern] : [];
|
|
1750
|
-
}
|
|
1751
|
-
const files = [];
|
|
1752
|
-
if (pattern.includes("**")) {
|
|
1753
|
-
const [baseDir, ...rest] = pattern.split("**");
|
|
1754
|
-
const extension = rest.join("").replace(/[\/\\]\*\./g, "").trim();
|
|
1755
|
-
const scanRecursive = (dir) => {
|
|
1756
|
-
if (!existsSync(dir)) return;
|
|
1757
|
-
try {
|
|
1758
|
-
const entries = readdirSync(dir);
|
|
1759
|
-
for (const entry of entries) {
|
|
1760
|
-
const fullPath = join(dir, entry);
|
|
1761
|
-
try {
|
|
1762
|
-
const stat2 = statSync(fullPath);
|
|
1763
|
-
if (stat2.isDirectory()) {
|
|
1764
|
-
scanRecursive(fullPath);
|
|
1765
|
-
} else if (stat2.isFile()) {
|
|
1766
|
-
if (!extension || fullPath.endsWith(extension)) {
|
|
1767
|
-
files.push(fullPath);
|
|
1768
|
-
}
|
|
1769
|
-
}
|
|
1770
|
-
} catch {
|
|
1771
|
-
}
|
|
1772
|
-
}
|
|
1773
|
-
} catch {
|
|
1774
|
-
}
|
|
1775
|
-
};
|
|
1776
|
-
scanRecursive(baseDir.trim() || ".");
|
|
1777
|
-
} else if (pattern.includes("*")) {
|
|
1778
|
-
const dir = dirname(pattern);
|
|
1779
|
-
const filePattern = basename(pattern);
|
|
1780
|
-
if (!existsSync(dir)) return [];
|
|
1781
|
-
try {
|
|
1782
|
-
const entries = readdirSync(dir);
|
|
1783
|
-
for (const entry of entries) {
|
|
1784
|
-
const fullPath = join(dir, entry);
|
|
1785
|
-
try {
|
|
1786
|
-
const stat2 = statSync(fullPath);
|
|
1787
|
-
if (stat2.isFile()) {
|
|
1788
|
-
if (filePattern === "*" || filePattern.startsWith("*.") && entry.endsWith(filePattern.slice(1))) {
|
|
1789
|
-
files.push(fullPath);
|
|
1790
|
-
}
|
|
1791
|
-
}
|
|
1792
|
-
} catch {
|
|
1793
|
-
}
|
|
1794
|
-
}
|
|
1795
|
-
} catch {
|
|
1796
|
-
}
|
|
1797
|
-
}
|
|
1798
|
-
return files;
|
|
1799
|
-
}
|
|
1800
|
-
function discoverPackageSchemas(cwd) {
|
|
1801
|
-
const schemas = [];
|
|
1802
|
-
const nodeModulesPath = join(cwd, "node_modules");
|
|
1803
|
-
if (!existsSync(nodeModulesPath)) {
|
|
1804
|
-
return schemas;
|
|
1805
|
-
}
|
|
1806
|
-
const projectPkgPath = join(cwd, "package.json");
|
|
1807
|
-
let directDeps = /* @__PURE__ */ new Set();
|
|
1808
|
-
if (existsSync(projectPkgPath)) {
|
|
1809
|
-
try {
|
|
1810
|
-
const projectPkg = JSON.parse(readFileSync(projectPkgPath, "utf-8"));
|
|
1811
|
-
directDeps = /* @__PURE__ */ new Set([
|
|
1812
|
-
...Object.keys(projectPkg.dependencies || {}),
|
|
1813
|
-
...Object.keys(projectPkg.devDependencies || {})
|
|
1814
|
-
]);
|
|
1815
|
-
} catch (error) {
|
|
1816
|
-
}
|
|
1817
|
-
}
|
|
1818
|
-
const checkPackage = (_pkgName, pkgPath) => {
|
|
1819
|
-
const pkgJsonPath = join(pkgPath, "package.json");
|
|
1820
|
-
if (!existsSync(pkgJsonPath)) return;
|
|
1821
|
-
try {
|
|
1822
|
-
const pkgJson = JSON.parse(readFileSync(pkgJsonPath, "utf-8"));
|
|
1823
|
-
if (pkgJson.spfn?.schemas) {
|
|
1824
|
-
const packageSchemas = Array.isArray(pkgJson.spfn.schemas) ? pkgJson.spfn.schemas : [pkgJson.spfn.schemas];
|
|
1825
|
-
for (const schema of packageSchemas) {
|
|
1826
|
-
const absolutePath = join(pkgPath, schema);
|
|
1827
|
-
const expandedFiles = expandGlobPattern(absolutePath);
|
|
1828
|
-
const schemaFiles = expandedFiles.filter(
|
|
1829
|
-
(file) => !file.endsWith("/index.js") && !file.endsWith("/index.ts") && !file.endsWith("/index.mjs") && !file.endsWith("\\index.js") && !file.endsWith("\\index.ts") && !file.endsWith("\\index.mjs")
|
|
1830
|
-
);
|
|
1831
|
-
schemas.push(...schemaFiles);
|
|
1832
|
-
}
|
|
1833
|
-
}
|
|
1834
|
-
} catch (error) {
|
|
1835
|
-
}
|
|
1836
|
-
};
|
|
1837
|
-
const spfnDir = join(nodeModulesPath, "@spfn");
|
|
1838
|
-
if (existsSync(spfnDir)) {
|
|
1839
|
-
try {
|
|
1840
|
-
const spfnPackages = readdirSync(spfnDir);
|
|
1841
|
-
for (const pkg of spfnPackages) {
|
|
1842
|
-
checkPackage(`@spfn/${pkg}`, join(spfnDir, pkg));
|
|
1843
|
-
}
|
|
1844
|
-
} catch (error) {
|
|
1845
|
-
}
|
|
1846
|
-
}
|
|
1847
|
-
for (const depName of directDeps) {
|
|
1848
|
-
if (depName.startsWith("@spfn/")) continue;
|
|
1849
|
-
const pkgPath = depName.startsWith("@") ? join(nodeModulesPath, ...depName.split("/")) : join(nodeModulesPath, depName);
|
|
1850
|
-
checkPackage(depName, pkgPath);
|
|
1851
|
-
}
|
|
1852
|
-
return schemas;
|
|
1853
|
-
}
|
|
1854
|
-
function detectDialect(url) {
|
|
1855
|
-
if (url.startsWith("postgres://") || url.startsWith("postgresql://")) {
|
|
1856
|
-
return "postgresql";
|
|
1857
|
-
}
|
|
1858
|
-
if (url.startsWith("mysql://")) {
|
|
1859
|
-
return "mysql";
|
|
1860
|
-
}
|
|
1861
|
-
if (url.startsWith("sqlite://") || url.includes(".db") || url.includes(".sqlite")) {
|
|
1862
|
-
return "sqlite";
|
|
1863
|
-
}
|
|
1864
|
-
throw new Error(
|
|
1865
|
-
`Unsupported database URL format: ${url}. Supported: postgresql://, mysql://, sqlite://`
|
|
1866
|
-
);
|
|
1867
|
-
}
|
|
1868
|
-
function getDrizzleConfig(options = {}) {
|
|
1869
|
-
const databaseUrl = options.databaseUrl ?? process.env.DATABASE_URL;
|
|
1870
|
-
if (!databaseUrl) {
|
|
1871
|
-
throw new Error(
|
|
1872
|
-
"DATABASE_URL is required. Set it in .env or pass it to getDrizzleConfig()"
|
|
1873
|
-
);
|
|
1874
|
-
}
|
|
1875
|
-
const dialect = options.dialect ?? detectDialect(databaseUrl);
|
|
1876
|
-
const out = options.out ?? "./src/server/drizzle";
|
|
1877
|
-
if (options.packageFilter) {
|
|
1878
|
-
const packageSchemas2 = options.disablePackageDiscovery ? [] : discoverPackageSchemas(options.cwd ?? process.cwd());
|
|
1879
|
-
const filteredSchemas = packageSchemas2.filter(
|
|
1880
|
-
(schemaPath) => schemaPath.includes(`node_modules/${options.packageFilter}/`)
|
|
1881
|
-
);
|
|
1882
|
-
if (filteredSchemas.length === 0) {
|
|
1883
|
-
throw new Error(
|
|
1884
|
-
`No schemas found for package ${options.packageFilter}. Make sure the package is installed and has "spfn.schemas" in package.json.`
|
|
1885
|
-
);
|
|
1886
|
-
}
|
|
1887
|
-
const schema2 = filteredSchemas.length === 1 ? filteredSchemas[0] : filteredSchemas;
|
|
1888
|
-
return {
|
|
1889
|
-
schema: schema2,
|
|
1890
|
-
out,
|
|
1891
|
-
dialect,
|
|
1892
|
-
dbCredentials: getDbCredentials(dialect, databaseUrl)
|
|
1893
|
-
};
|
|
1894
|
-
}
|
|
1895
|
-
const userSchema = options.schema ?? "./src/server/entities/**/*.ts";
|
|
1896
|
-
const userSchemas = Array.isArray(userSchema) ? userSchema : [userSchema];
|
|
1897
|
-
const packageSchemas = options.disablePackageDiscovery ? [] : discoverPackageSchemas(options.cwd ?? process.cwd());
|
|
1898
|
-
const allSchemas = [...userSchemas, ...packageSchemas];
|
|
1899
|
-
const schema = allSchemas.length === 1 ? allSchemas[0] : allSchemas;
|
|
1900
|
-
return {
|
|
1901
|
-
schema,
|
|
1902
|
-
out,
|
|
1903
|
-
dialect,
|
|
1904
|
-
dbCredentials: getDbCredentials(dialect, databaseUrl)
|
|
1905
|
-
};
|
|
1906
|
-
}
|
|
1907
|
-
function getDbCredentials(dialect, url) {
|
|
1908
|
-
switch (dialect) {
|
|
1909
|
-
case "postgresql":
|
|
1910
|
-
case "mysql":
|
|
1911
|
-
return { url };
|
|
1912
|
-
case "sqlite":
|
|
1913
|
-
const dbPath = url.replace("sqlite://", "").replace("sqlite:", "");
|
|
1914
|
-
return { url: dbPath };
|
|
1915
|
-
default:
|
|
1916
|
-
throw new Error(`Unsupported dialect: ${dialect}`);
|
|
1917
|
-
}
|
|
1918
|
-
}
|
|
1919
|
-
function generateDrizzleConfigFile(options = {}) {
|
|
1920
|
-
const config = getDrizzleConfig(options);
|
|
1921
|
-
const schemaValue = Array.isArray(config.schema) ? `[
|
|
1922
|
-
${config.schema.map((s) => `'${s}'`).join(",\n ")}
|
|
1923
|
-
]` : `'${config.schema}'`;
|
|
1924
|
-
return `import { defineConfig } from 'drizzle-kit';
|
|
1925
|
-
|
|
1926
|
-
export default defineConfig({
|
|
1927
|
-
schema: ${schemaValue},
|
|
1928
|
-
out: '${config.out}',
|
|
1929
|
-
dialect: '${config.dialect}',
|
|
1930
|
-
dbCredentials: ${JSON.stringify(config.dbCredentials, null, 4)},
|
|
1931
|
-
});
|
|
1932
|
-
`;
|
|
1933
|
-
}
|
|
1934
|
-
var init_config_generator = __esm({
|
|
1935
|
-
"src/db/manager/config-generator.ts"() {
|
|
1936
|
-
}
|
|
1937
|
-
});
|
|
1938
|
-
function id() {
|
|
1939
|
-
return bigserial("id", { mode: "number" }).primaryKey();
|
|
1940
|
-
}
|
|
1941
|
-
function timestamps(options) {
|
|
1942
|
-
const updatedAtColumn = timestamp("updated_at", { withTimezone: true, mode: "date" }).defaultNow().notNull();
|
|
1943
|
-
if (options?.autoUpdate) {
|
|
1944
|
-
updatedAtColumn.__autoUpdate = true;
|
|
1945
|
-
}
|
|
1946
|
-
return {
|
|
1947
|
-
createdAt: timestamp("created_at", { withTimezone: true, mode: "date" }).defaultNow().notNull(),
|
|
1948
|
-
updatedAt: updatedAtColumn
|
|
1949
|
-
};
|
|
1950
|
-
}
|
|
1951
|
-
function foreignKey(name, reference, options) {
|
|
1952
|
-
return bigserial(`${name}_id`, { mode: "number" }).notNull().references(reference, { onDelete: options?.onDelete ?? "cascade" });
|
|
1953
|
-
}
|
|
1954
|
-
function optionalForeignKey(name, reference, options) {
|
|
1955
|
-
return bigserial(`${name}_id`, { mode: "number" }).references(reference, { onDelete: options?.onDelete ?? "set null" });
|
|
1956
|
-
}
|
|
1957
|
-
var init_helpers = __esm({
|
|
1958
|
-
"src/db/schema/helpers.ts"() {
|
|
1959
|
-
}
|
|
1960
|
-
});
|
|
1961
|
-
|
|
1962
|
-
// src/db/schema/index.ts
|
|
1963
|
-
var init_schema = __esm({
|
|
1964
|
-
"src/db/schema/index.ts"() {
|
|
1965
|
-
init_helpers();
|
|
1966
|
-
}
|
|
1967
|
-
});
|
|
1968
|
-
function createFunctionSchema(packageName) {
|
|
1969
|
-
const schemaName = packageNameToSchema(packageName);
|
|
1970
|
-
return pgSchema(schemaName);
|
|
1971
|
-
}
|
|
1972
|
-
function packageNameToSchema(packageName) {
|
|
1973
|
-
return packageName.replace("@", "").replace("/", "_").replace(/-/g, "_");
|
|
1974
|
-
}
|
|
1975
|
-
function getSchemaInfo(packageName) {
|
|
1976
|
-
const isScoped = packageName.startsWith("@");
|
|
1977
|
-
const scope = isScoped ? packageName.split("/")[0].substring(1) : null;
|
|
1978
|
-
const schemaName = packageNameToSchema(packageName);
|
|
1979
|
-
return {
|
|
1980
|
-
schemaName,
|
|
1981
|
-
isScoped,
|
|
1982
|
-
scope
|
|
1983
|
-
};
|
|
1984
|
-
}
|
|
1985
|
-
var init_schema_helper = __esm({
|
|
1986
|
-
"src/db/schema-helper.ts"() {
|
|
1987
|
-
}
|
|
1988
|
-
});
|
|
1989
|
-
function getTransactionContext() {
|
|
1990
|
-
return asyncContext.getStore() ?? null;
|
|
1991
|
-
}
|
|
1992
|
-
function getTransaction() {
|
|
1993
|
-
const context = getTransactionContext();
|
|
1994
|
-
return context?.tx ?? null;
|
|
1995
|
-
}
|
|
1996
|
-
function runWithTransaction(tx, txId, callback) {
|
|
1997
|
-
const existingContext = getTransactionContext();
|
|
1998
|
-
const newLevel = existingContext ? existingContext.level + 1 : 1;
|
|
1999
|
-
if (existingContext) {
|
|
2000
|
-
txLogger.info("Nested transaction started (SAVEPOINT)", {
|
|
2001
|
-
outerTxId: existingContext.txId,
|
|
2002
|
-
innerTxId: txId,
|
|
2003
|
-
level: newLevel
|
|
2004
|
-
});
|
|
2005
|
-
} else {
|
|
2006
|
-
txLogger.debug("Root transaction context set", { txId, level: newLevel });
|
|
2007
|
-
}
|
|
2008
|
-
return asyncContext.run({ tx, txId, level: newLevel }, callback);
|
|
2009
|
-
}
|
|
2010
|
-
var txLogger, asyncContext;
|
|
2011
|
-
var init_context = __esm({
|
|
2012
|
-
"src/db/transaction/context.ts"() {
|
|
2013
|
-
init_logger2();
|
|
2014
|
-
txLogger = logger.child("transaction");
|
|
2015
|
-
asyncContext = new AsyncLocalStorage();
|
|
2016
|
-
}
|
|
2017
|
-
});
|
|
2018
|
-
function Transactional(options = {}) {
|
|
2019
|
-
const defaultTimeout = parseInt(process.env.TRANSACTION_TIMEOUT || "30000", 10);
|
|
2020
|
-
const {
|
|
2021
|
-
slowThreshold = 1e3,
|
|
2022
|
-
enableLogging = true,
|
|
2023
|
-
timeout = defaultTimeout
|
|
2024
|
-
} = options;
|
|
2025
|
-
const txLogger2 = logger.child("transaction");
|
|
2026
|
-
return createMiddleware(async (c, next) => {
|
|
2027
|
-
const txId = `tx_${randomUUID()}`;
|
|
2028
|
-
const startTime = Date.now();
|
|
2029
|
-
const route = `${c.req.method} ${c.req.path}`;
|
|
2030
|
-
if (enableLogging) {
|
|
2031
|
-
txLogger2.debug("Transaction started", { txId, route });
|
|
2032
|
-
}
|
|
2033
|
-
try {
|
|
2034
|
-
const writeDb = getDatabase("write");
|
|
2035
|
-
if (!writeDb) {
|
|
2036
|
-
throw new TransactionError(
|
|
2037
|
-
"Database not initialized. Cannot start transaction.",
|
|
2038
|
-
500,
|
|
2039
|
-
{ txId, route }
|
|
2040
|
-
);
|
|
2041
|
-
}
|
|
2042
|
-
const transactionPromise = writeDb.transaction(async (tx) => {
|
|
2043
|
-
await runWithTransaction(tx, txId, async () => {
|
|
2044
|
-
await next();
|
|
2045
|
-
const contextWithError = c;
|
|
2046
|
-
if (contextWithError.error) {
|
|
2047
|
-
throw contextWithError.error;
|
|
2048
|
-
}
|
|
2049
|
-
});
|
|
2050
|
-
});
|
|
2051
|
-
if (timeout > 0) {
|
|
2052
|
-
const timeoutPromise = new Promise((_, reject) => {
|
|
2053
|
-
setTimeout(() => {
|
|
2054
|
-
reject(
|
|
2055
|
-
new TransactionError(
|
|
2056
|
-
`Transaction timeout after ${timeout}ms`,
|
|
2057
|
-
500,
|
|
2058
|
-
{
|
|
2059
|
-
txId,
|
|
2060
|
-
route,
|
|
2061
|
-
timeout: `${timeout}ms`
|
|
2062
|
-
}
|
|
2063
|
-
)
|
|
2064
|
-
);
|
|
2065
|
-
}, timeout);
|
|
2066
|
-
});
|
|
2067
|
-
await Promise.race([transactionPromise, timeoutPromise]);
|
|
2068
|
-
} else {
|
|
2069
|
-
await transactionPromise;
|
|
2070
|
-
}
|
|
2071
|
-
const duration = Date.now() - startTime;
|
|
2072
|
-
if (enableLogging) {
|
|
2073
|
-
if (duration >= slowThreshold) {
|
|
2074
|
-
txLogger2.warn("Slow transaction committed", {
|
|
2075
|
-
txId,
|
|
2076
|
-
route,
|
|
2077
|
-
duration: `${duration}ms`,
|
|
2078
|
-
threshold: `${slowThreshold}ms`
|
|
2079
|
-
});
|
|
2080
|
-
} else {
|
|
2081
|
-
txLogger2.debug("Transaction committed", {
|
|
2082
|
-
txId,
|
|
2083
|
-
route,
|
|
2084
|
-
duration: `${duration}ms`
|
|
2085
|
-
});
|
|
2086
|
-
}
|
|
2087
|
-
}
|
|
2088
|
-
} catch (error) {
|
|
2089
|
-
const duration = Date.now() - startTime;
|
|
2090
|
-
const customError = error instanceof TransactionError ? error : fromPostgresError(error);
|
|
2091
|
-
if (enableLogging) {
|
|
2092
|
-
txLogger2.error("Transaction rolled back", {
|
|
2093
|
-
txId,
|
|
2094
|
-
route,
|
|
2095
|
-
duration: `${duration}ms`,
|
|
2096
|
-
error: customError.message,
|
|
2097
|
-
errorType: customError.name
|
|
2098
|
-
});
|
|
2099
|
-
}
|
|
2100
|
-
throw customError;
|
|
2101
|
-
}
|
|
2102
|
-
});
|
|
2103
|
-
}
|
|
2104
|
-
var init_middleware = __esm({
|
|
2105
|
-
"src/db/transaction/middleware.ts"() {
|
|
2106
|
-
init_logger2();
|
|
2107
|
-
init_manager2();
|
|
2108
|
-
init_context();
|
|
2109
|
-
init_errors();
|
|
2110
|
-
init_postgres_errors();
|
|
2111
|
-
}
|
|
2112
|
-
});
|
|
2113
|
-
|
|
2114
|
-
// src/db/transaction/index.ts
|
|
2115
|
-
var init_transaction = __esm({
|
|
2116
|
-
"src/db/transaction/index.ts"() {
|
|
2117
|
-
init_context();
|
|
2118
|
-
init_middleware();
|
|
2119
|
-
}
|
|
2120
|
-
});
|
|
2121
|
-
function isSQLWrapper(value) {
|
|
2122
|
-
return value && typeof value === "object" && "queryChunks" in value;
|
|
2123
|
-
}
|
|
2124
|
-
function buildWhereFromObject(table, where) {
|
|
2125
|
-
const entries = Object.entries(where).filter(([_, value]) => value !== void 0);
|
|
2126
|
-
if (entries.length === 0) return void 0;
|
|
2127
|
-
const conditions = entries.map(
|
|
2128
|
-
([key, value]) => eq(table[key], value)
|
|
2129
|
-
);
|
|
2130
|
-
return conditions.length === 1 ? conditions[0] : and(...conditions);
|
|
2131
|
-
}
|
|
2132
|
-
async function findOne(table, where) {
|
|
2133
|
-
const db = getDatabase("read");
|
|
2134
|
-
if (!db) {
|
|
2135
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2136
|
-
}
|
|
2137
|
-
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2138
|
-
if (!whereClause) {
|
|
2139
|
-
throw new Error("findOne requires at least one where condition");
|
|
2140
|
-
}
|
|
2141
|
-
const results = await db.select().from(table).where(whereClause).limit(1);
|
|
2142
|
-
return results[0] ?? null;
|
|
2143
|
-
}
|
|
2144
|
-
async function findMany(table, options) {
|
|
2145
|
-
const db = getDatabase("read");
|
|
2146
|
-
if (!db) {
|
|
2147
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2148
|
-
}
|
|
2149
|
-
let query = db.select().from(table);
|
|
2150
|
-
if (options?.where) {
|
|
2151
|
-
const whereClause = isSQLWrapper(options.where) ? options.where : options.where ? buildWhereFromObject(table, options.where) : void 0;
|
|
2152
|
-
if (whereClause) {
|
|
2153
|
-
query = query.where(whereClause);
|
|
2154
|
-
}
|
|
2155
|
-
}
|
|
2156
|
-
if (options?.orderBy) {
|
|
2157
|
-
const orderByArray = Array.isArray(options.orderBy) ? options.orderBy : [options.orderBy];
|
|
2158
|
-
query = query.orderBy(...orderByArray);
|
|
2159
|
-
}
|
|
2160
|
-
if (options?.limit) {
|
|
2161
|
-
query = query.limit(options.limit);
|
|
2162
|
-
}
|
|
2163
|
-
if (options?.offset) {
|
|
2164
|
-
query = query.offset(options.offset);
|
|
2165
|
-
}
|
|
2166
|
-
return query;
|
|
2167
|
-
}
|
|
2168
|
-
async function create(table, data) {
|
|
2169
|
-
const db = getDatabase("write");
|
|
2170
|
-
if (!db) {
|
|
2171
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2172
|
-
}
|
|
2173
|
-
const [result] = await db.insert(table).values(data).returning();
|
|
2174
|
-
return result;
|
|
2175
|
-
}
|
|
2176
|
-
async function createMany(table, data) {
|
|
2177
|
-
const db = getDatabase("write");
|
|
2178
|
-
if (!db) {
|
|
2179
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2180
|
-
}
|
|
2181
|
-
const results = await db.insert(table).values(data).returning();
|
|
2182
|
-
return results;
|
|
2183
|
-
}
|
|
2184
|
-
async function upsert(table, data, options) {
|
|
2185
|
-
const db = getDatabase("write");
|
|
2186
|
-
if (!db) {
|
|
2187
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2188
|
-
}
|
|
2189
|
-
const [result] = await db.insert(table).values(data).onConflictDoUpdate({
|
|
2190
|
-
target: options.target,
|
|
2191
|
-
set: options.set || data
|
|
2192
|
-
}).returning();
|
|
2193
|
-
return result;
|
|
2194
|
-
}
|
|
2195
|
-
async function updateOne(table, where, data) {
|
|
2196
|
-
const db = getDatabase("write");
|
|
2197
|
-
if (!db) {
|
|
2198
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2199
|
-
}
|
|
2200
|
-
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2201
|
-
if (!whereClause) {
|
|
2202
|
-
throw new Error("updateOne requires at least one where condition");
|
|
2203
|
-
}
|
|
2204
|
-
const [result] = await db.update(table).set(data).where(whereClause).returning();
|
|
2205
|
-
return result ?? null;
|
|
2206
|
-
}
|
|
2207
|
-
async function updateMany(table, where, data) {
|
|
2208
|
-
const db = getDatabase("write");
|
|
2209
|
-
if (!db) {
|
|
2210
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2211
|
-
}
|
|
2212
|
-
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2213
|
-
if (!whereClause) {
|
|
2214
|
-
throw new Error("updateMany requires at least one where condition");
|
|
2215
|
-
}
|
|
2216
|
-
const results = await db.update(table).set(data).where(whereClause).returning();
|
|
2217
|
-
return results;
|
|
2218
|
-
}
|
|
2219
|
-
async function deleteOne(table, where) {
|
|
2220
|
-
const db = getDatabase("write");
|
|
2221
|
-
if (!db) {
|
|
2222
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2223
|
-
}
|
|
2224
|
-
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2225
|
-
if (!whereClause) {
|
|
2226
|
-
throw new Error("deleteOne requires at least one where condition");
|
|
2227
|
-
}
|
|
2228
|
-
const [result] = await db.delete(table).where(whereClause).returning();
|
|
2229
|
-
return result ?? null;
|
|
2230
|
-
}
|
|
2231
|
-
async function deleteMany(table, where) {
|
|
2232
|
-
const db = getDatabase("write");
|
|
2233
|
-
if (!db) {
|
|
2234
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2235
|
-
}
|
|
2236
|
-
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2237
|
-
if (!whereClause) {
|
|
2238
|
-
throw new Error("deleteMany requires at least one where condition");
|
|
2239
|
-
}
|
|
2240
|
-
const results = await db.delete(table).where(whereClause).returning();
|
|
2241
|
-
return results;
|
|
2242
|
-
}
|
|
2243
|
-
async function count(table, where) {
|
|
2244
|
-
const db = getDatabase("read");
|
|
2245
|
-
if (!db) {
|
|
2246
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2247
|
-
}
|
|
2248
|
-
let query = db.select().from(table);
|
|
2249
|
-
if (where) {
|
|
2250
|
-
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2251
|
-
if (whereClause) {
|
|
2252
|
-
query = query.where(whereClause);
|
|
2253
|
-
}
|
|
2254
|
-
}
|
|
2255
|
-
const results = await query;
|
|
2256
|
-
return results.length;
|
|
2257
|
-
}
|
|
2258
|
-
var init_helpers2 = __esm({
|
|
2259
|
-
"src/db/helpers.ts"() {
|
|
2260
|
-
init_manager2();
|
|
2261
|
-
}
|
|
2262
|
-
});
|
|
2263
|
-
|
|
2264
|
-
// src/db/index.ts
|
|
2265
|
-
var db_exports = {};
|
|
2266
|
-
__export(db_exports, {
|
|
2267
|
-
Transactional: () => Transactional,
|
|
2268
|
-
checkConnection: () => checkConnection,
|
|
2269
|
-
closeDatabase: () => closeDatabase,
|
|
2270
|
-
count: () => count,
|
|
2271
|
-
create: () => create,
|
|
2272
|
-
createDatabaseConnection: () => createDatabaseConnection,
|
|
2273
|
-
createDatabaseFromEnv: () => createDatabaseFromEnv,
|
|
2274
|
-
createFunctionSchema: () => createFunctionSchema,
|
|
2275
|
-
createMany: () => createMany,
|
|
2276
|
-
deleteMany: () => deleteMany,
|
|
2277
|
-
deleteOne: () => deleteOne,
|
|
2278
|
-
detectDialect: () => detectDialect,
|
|
2279
|
-
findMany: () => findMany,
|
|
2280
|
-
findOne: () => findOne,
|
|
2281
|
-
foreignKey: () => foreignKey,
|
|
2282
|
-
fromPostgresError: () => fromPostgresError,
|
|
2283
|
-
generateDrizzleConfigFile: () => generateDrizzleConfigFile,
|
|
2284
|
-
getDatabase: () => getDatabase,
|
|
2285
|
-
getDatabaseInfo: () => getDatabaseInfo,
|
|
2286
|
-
getDrizzleConfig: () => getDrizzleConfig,
|
|
2287
|
-
getSchemaInfo: () => getSchemaInfo,
|
|
2288
|
-
getTransaction: () => getTransaction,
|
|
2289
|
-
id: () => id,
|
|
2290
|
-
initDatabase: () => initDatabase,
|
|
2291
|
-
optionalForeignKey: () => optionalForeignKey,
|
|
2292
|
-
packageNameToSchema: () => packageNameToSchema,
|
|
2293
|
-
runWithTransaction: () => runWithTransaction,
|
|
2294
|
-
setDatabase: () => setDatabase,
|
|
2295
|
-
timestamps: () => timestamps,
|
|
2296
|
-
updateMany: () => updateMany,
|
|
2297
|
-
updateOne: () => updateOne,
|
|
2298
|
-
upsert: () => upsert
|
|
2299
|
-
});
|
|
2300
|
-
var init_db = __esm({
|
|
2301
|
-
"src/db/index.ts"() {
|
|
2302
|
-
init_manager2();
|
|
2303
|
-
init_config_generator();
|
|
2304
|
-
init_schema();
|
|
2305
|
-
init_schema_helper();
|
|
2306
|
-
init_transaction();
|
|
2307
|
-
init_postgres_errors();
|
|
2308
|
-
init_helpers2();
|
|
2309
|
-
}
|
|
2310
|
-
});
|
|
2311
|
-
|
|
2312
|
-
// src/cache/cache-factory.ts
|
|
2313
|
-
function hasCacheConfig() {
|
|
2314
|
-
return !!// Modern (Valkey/Cache)
|
|
2315
|
-
(process.env.VALKEY_URL || process.env.CACHE_URL || process.env.VALKEY_WRITE_URL || process.env.VALKEY_READ_URL || process.env.CACHE_WRITE_URL || process.env.CACHE_READ_URL || process.env.VALKEY_SENTINEL_HOSTS || process.env.VALKEY_CLUSTER_NODES || // Legacy (Redis - backward compatibility)
|
|
2316
|
-
process.env.REDIS_URL || process.env.REDIS_WRITE_URL || process.env.REDIS_READ_URL || process.env.REDIS_SENTINEL_HOSTS || process.env.REDIS_CLUSTER_NODES);
|
|
2317
|
-
}
|
|
2318
|
-
function getEnv(valkeyKey, cacheKey, redisKey) {
|
|
2319
|
-
return process.env[valkeyKey] || process.env[cacheKey] || process.env[redisKey];
|
|
2320
|
-
}
|
|
2321
|
-
function createClient(RedisClient, url) {
|
|
2322
|
-
const options = {};
|
|
2323
|
-
if (url.startsWith("rediss://") || url.startsWith("valkeys://")) {
|
|
2324
|
-
const rejectUnauthorized = getEnv(
|
|
2325
|
-
"VALKEY_TLS_REJECT_UNAUTHORIZED",
|
|
2326
|
-
"CACHE_TLS_REJECT_UNAUTHORIZED",
|
|
2327
|
-
"REDIS_TLS_REJECT_UNAUTHORIZED"
|
|
2328
|
-
);
|
|
2329
|
-
options.tls = {
|
|
2330
|
-
rejectUnauthorized: rejectUnauthorized !== "false"
|
|
2331
|
-
};
|
|
2332
|
-
}
|
|
2333
|
-
return new RedisClient(url, options);
|
|
2334
|
-
}
|
|
2335
|
-
async function createCacheFromEnv() {
|
|
2336
|
-
if (!hasCacheConfig()) {
|
|
2337
|
-
cacheLogger.info("No cache configuration found - running without cache");
|
|
2338
|
-
return { write: void 0, read: void 0 };
|
|
2339
|
-
}
|
|
2340
|
-
try {
|
|
2341
|
-
const ioredis = await import('ioredis');
|
|
2342
|
-
const RedisClient = ioredis.default;
|
|
2343
|
-
const singleUrl = getEnv("VALKEY_URL", "CACHE_URL", "REDIS_URL");
|
|
2344
|
-
const writeUrl = getEnv("VALKEY_WRITE_URL", "CACHE_WRITE_URL", "REDIS_WRITE_URL");
|
|
2345
|
-
const readUrl = getEnv("VALKEY_READ_URL", "CACHE_READ_URL", "REDIS_READ_URL");
|
|
2346
|
-
const clusterNodes = getEnv("VALKEY_CLUSTER_NODES", "CACHE_CLUSTER_NODES", "REDIS_CLUSTER_NODES");
|
|
2347
|
-
const sentinelHosts = getEnv("VALKEY_SENTINEL_HOSTS", "CACHE_SENTINEL_HOSTS", "REDIS_SENTINEL_HOSTS");
|
|
2348
|
-
const masterName = getEnv("VALKEY_MASTER_NAME", "CACHE_MASTER_NAME", "REDIS_MASTER_NAME");
|
|
2349
|
-
const password = getEnv("VALKEY_PASSWORD", "CACHE_PASSWORD", "REDIS_PASSWORD");
|
|
2350
|
-
if (singleUrl && !writeUrl && !readUrl && !clusterNodes) {
|
|
2351
|
-
const client = createClient(RedisClient, singleUrl);
|
|
2352
|
-
cacheLogger.debug("Created single cache instance", { url: singleUrl.replace(/:[^:@]+@/, ":***@") });
|
|
2353
|
-
return { write: client, read: client };
|
|
2354
|
-
}
|
|
2355
|
-
if (writeUrl && readUrl) {
|
|
2356
|
-
const write = createClient(RedisClient, writeUrl);
|
|
2357
|
-
const read = createClient(RedisClient, readUrl);
|
|
2358
|
-
cacheLogger.debug("Created master-replica cache instances");
|
|
2359
|
-
return { write, read };
|
|
2360
|
-
}
|
|
2361
|
-
if (sentinelHosts && masterName) {
|
|
2362
|
-
const sentinels = sentinelHosts.split(",").map((host) => {
|
|
2363
|
-
const [hostname, port] = host.trim().split(":");
|
|
2364
|
-
return { host: hostname, port: Number(port) || 26379 };
|
|
2365
|
-
});
|
|
2366
|
-
const options = {
|
|
2367
|
-
sentinels,
|
|
2368
|
-
name: masterName,
|
|
2369
|
-
password
|
|
2370
|
-
};
|
|
2371
|
-
const client = new RedisClient(options);
|
|
2372
|
-
cacheLogger.debug("Created sentinel cache instance", { masterName, sentinels: sentinels.length });
|
|
2373
|
-
return { write: client, read: client };
|
|
2374
|
-
}
|
|
2375
|
-
if (clusterNodes) {
|
|
2376
|
-
const nodes = clusterNodes.split(",").map((node) => {
|
|
2377
|
-
const [host, port] = node.trim().split(":");
|
|
2378
|
-
return { host, port: Number(port) || 6379 };
|
|
2379
|
-
});
|
|
2380
|
-
const clusterOptions = {
|
|
2381
|
-
redisOptions: {
|
|
2382
|
-
password
|
|
2383
|
-
}
|
|
2384
|
-
};
|
|
2385
|
-
const cluster = new RedisClient.Cluster(nodes, clusterOptions);
|
|
2386
|
-
cacheLogger.debug("Created cluster cache instance", { nodes: nodes.length });
|
|
2387
|
-
return { write: cluster, read: cluster };
|
|
2388
|
-
}
|
|
2389
|
-
if (singleUrl) {
|
|
2390
|
-
const client = createClient(RedisClient, singleUrl);
|
|
2391
|
-
cacheLogger.debug("Created cache instance (fallback)", { url: singleUrl.replace(/:[^:@]+@/, ":***@") });
|
|
2392
|
-
return { write: client, read: client };
|
|
2393
|
-
}
|
|
2394
|
-
cacheLogger.info("No valid cache configuration found - running without cache");
|
|
2395
|
-
return { write: void 0, read: void 0 };
|
|
2396
|
-
} catch (error) {
|
|
2397
|
-
if (error instanceof Error) {
|
|
2398
|
-
if (error.message.includes("Cannot find module")) {
|
|
2399
|
-
cacheLogger.warn(
|
|
2400
|
-
"Cache client library not installed",
|
|
2401
|
-
error,
|
|
2402
|
-
{
|
|
2403
|
-
suggestion: "Install ioredis to enable cache: pnpm install ioredis",
|
|
2404
|
-
mode: "disabled"
|
|
2405
|
-
}
|
|
2406
|
-
);
|
|
2407
|
-
} else {
|
|
2408
|
-
cacheLogger.warn(
|
|
2409
|
-
"Failed to create cache client",
|
|
2410
|
-
error,
|
|
2411
|
-
{ mode: "disabled" }
|
|
2412
|
-
);
|
|
2413
|
-
}
|
|
2414
|
-
} else {
|
|
2415
|
-
cacheLogger.warn(
|
|
2416
|
-
"Failed to create cache client",
|
|
2417
|
-
{ error: String(error), mode: "disabled" }
|
|
2418
|
-
);
|
|
2419
|
-
}
|
|
2420
|
-
return { write: void 0, read: void 0 };
|
|
2421
|
-
}
|
|
2422
|
-
}
|
|
2423
|
-
async function createSingleCacheFromEnv() {
|
|
2424
|
-
const { write } = await createCacheFromEnv();
|
|
2425
|
-
return write;
|
|
2426
|
-
}
|
|
2427
|
-
var cacheLogger;
|
|
2428
|
-
var init_cache_factory = __esm({
|
|
2429
|
-
"src/cache/cache-factory.ts"() {
|
|
2430
|
-
init_logger2();
|
|
2431
|
-
cacheLogger = logger.child("cache");
|
|
2432
|
-
}
|
|
2433
|
-
});
|
|
2434
|
-
|
|
2435
|
-
// src/cache/cache-manager.ts
|
|
2436
|
-
function getCache() {
|
|
2437
|
-
return writeInstance;
|
|
2438
|
-
}
|
|
2439
|
-
function getCacheRead() {
|
|
2440
|
-
return readInstance ?? writeInstance;
|
|
2441
|
-
}
|
|
2442
|
-
function isCacheDisabled() {
|
|
2443
|
-
return isDisabled;
|
|
2444
|
-
}
|
|
2445
|
-
function setCache(write, read) {
|
|
2446
|
-
writeInstance = write;
|
|
2447
|
-
readInstance = read ?? write;
|
|
2448
|
-
isDisabled = !write;
|
|
2449
|
-
}
|
|
2450
|
-
async function initCache() {
|
|
2451
|
-
if (writeInstance) {
|
|
2452
|
-
return { write: writeInstance, read: readInstance, disabled: isDisabled };
|
|
2453
|
-
}
|
|
2454
|
-
const { write, read } = await createCacheFromEnv();
|
|
2455
|
-
if (write) {
|
|
2456
|
-
try {
|
|
2457
|
-
await write.ping();
|
|
2458
|
-
if (read && read !== write) {
|
|
2459
|
-
await read.ping();
|
|
2460
|
-
}
|
|
2461
|
-
writeInstance = write;
|
|
2462
|
-
readInstance = read;
|
|
2463
|
-
isDisabled = false;
|
|
2464
|
-
const hasReplica = read && read !== write;
|
|
2465
|
-
cacheLogger2.info(
|
|
2466
|
-
hasReplica ? "Cache connected (Master-Replica)" : "Cache connected",
|
|
2467
|
-
{ mode: "enabled" }
|
|
2468
|
-
);
|
|
2469
|
-
return { write: writeInstance, read: readInstance, disabled: false };
|
|
2470
|
-
} catch (error) {
|
|
2471
|
-
cacheLogger2.error(
|
|
2472
|
-
"Cache connection failed - running in disabled mode",
|
|
2473
|
-
error instanceof Error ? error : new Error(String(error)),
|
|
2474
|
-
{ mode: "disabled" }
|
|
2475
|
-
);
|
|
2476
|
-
try {
|
|
2477
|
-
await write.quit();
|
|
2478
|
-
if (read && read !== write) {
|
|
2479
|
-
await read.quit();
|
|
2480
|
-
}
|
|
2481
|
-
} catch {
|
|
2482
|
-
}
|
|
2483
|
-
isDisabled = true;
|
|
2484
|
-
return { write: void 0, read: void 0, disabled: true };
|
|
2485
|
-
}
|
|
2486
|
-
}
|
|
2487
|
-
isDisabled = true;
|
|
2488
|
-
cacheLogger2.info("Cache disabled - no configuration or library not installed", { mode: "disabled" });
|
|
2489
|
-
return { write: void 0, read: void 0, disabled: true };
|
|
2490
|
-
}
|
|
2491
|
-
async function closeCache() {
|
|
2492
|
-
if (isDisabled) {
|
|
2493
|
-
cacheLogger2.debug("Cache already disabled, nothing to close");
|
|
2494
|
-
return;
|
|
2495
|
-
}
|
|
2496
|
-
const closePromises = [];
|
|
2497
|
-
if (writeInstance) {
|
|
2498
|
-
closePromises.push(
|
|
2499
|
-
writeInstance.quit().catch((err) => {
|
|
2500
|
-
cacheLogger2.error("Error closing cache write instance", err);
|
|
2501
|
-
})
|
|
2502
|
-
);
|
|
2503
|
-
}
|
|
2504
|
-
if (readInstance && readInstance !== writeInstance) {
|
|
2505
|
-
closePromises.push(
|
|
2506
|
-
readInstance.quit().catch((err) => {
|
|
2507
|
-
cacheLogger2.error("Error closing cache read instance", err);
|
|
2508
|
-
})
|
|
2509
|
-
);
|
|
2510
|
-
}
|
|
2511
|
-
await Promise.all(closePromises);
|
|
2512
|
-
writeInstance = void 0;
|
|
2513
|
-
readInstance = void 0;
|
|
2514
|
-
isDisabled = true;
|
|
2515
|
-
cacheLogger2.info("Cache connections closed", { mode: "disabled" });
|
|
2516
|
-
}
|
|
2517
|
-
function getCacheInfo() {
|
|
2518
|
-
return {
|
|
2519
|
-
hasWrite: !!writeInstance,
|
|
2520
|
-
hasRead: !!readInstance,
|
|
2521
|
-
isReplica: !!(readInstance && readInstance !== writeInstance),
|
|
2522
|
-
disabled: isDisabled
|
|
2523
|
-
};
|
|
2524
|
-
}
|
|
2525
|
-
var cacheLogger2, writeInstance, readInstance, isDisabled, getRedis, getRedisRead, setRedis, initRedis, closeRedis, getRedisInfo;
|
|
2526
|
-
var init_cache_manager = __esm({
|
|
2527
|
-
"src/cache/cache-manager.ts"() {
|
|
2528
|
-
init_cache_factory();
|
|
2529
|
-
init_logger2();
|
|
2530
|
-
cacheLogger2 = logger.child("cache");
|
|
2531
|
-
isDisabled = false;
|
|
2532
|
-
getRedis = getCache;
|
|
2533
|
-
getRedisRead = getCacheRead;
|
|
2534
|
-
setRedis = setCache;
|
|
2535
|
-
initRedis = initCache;
|
|
2536
|
-
closeRedis = closeCache;
|
|
2537
|
-
getRedisInfo = getCacheInfo;
|
|
2538
|
-
}
|
|
2539
|
-
});
|
|
2540
|
-
|
|
2541
|
-
// src/cache/index.ts
|
|
2542
|
-
var cache_exports = {};
|
|
2543
|
-
__export(cache_exports, {
|
|
2544
|
-
closeCache: () => closeCache,
|
|
2545
|
-
closeRedis: () => closeRedis,
|
|
2546
|
-
createCacheFromEnv: () => createCacheFromEnv,
|
|
2547
|
-
createRedisFromEnv: () => createCacheFromEnv,
|
|
2548
|
-
createSingleCacheFromEnv: () => createSingleCacheFromEnv,
|
|
2549
|
-
createSingleRedisFromEnv: () => createSingleCacheFromEnv,
|
|
2550
|
-
getCache: () => getCache,
|
|
2551
|
-
getCacheInfo: () => getCacheInfo,
|
|
2552
|
-
getCacheRead: () => getCacheRead,
|
|
2553
|
-
getRedis: () => getRedis,
|
|
2554
|
-
getRedisInfo: () => getRedisInfo,
|
|
2555
|
-
getRedisRead: () => getRedisRead,
|
|
2556
|
-
initCache: () => initCache,
|
|
2557
|
-
initRedis: () => initRedis,
|
|
2558
|
-
isCacheDisabled: () => isCacheDisabled,
|
|
2559
|
-
setCache: () => setCache,
|
|
2560
|
-
setRedis: () => setRedis
|
|
2561
|
-
});
|
|
2562
|
-
var init_cache = __esm({
|
|
2563
|
-
"src/cache/index.ts"() {
|
|
2564
|
-
init_cache_factory();
|
|
2565
|
-
init_cache_manager();
|
|
2566
|
-
init_cache_manager();
|
|
2567
|
-
init_cache_factory();
|
|
2568
|
-
}
|
|
2569
|
-
});
|
|
2570
|
-
|
|
2571
|
-
// src/route/auto-loader.ts
|
|
2572
|
-
init_logger2();
|
|
2573
|
-
var routeLogger2 = logger.child("route");
|
|
2574
|
-
var AutoRouteLoader = class {
|
|
2575
|
-
constructor(routesDir, debug = false, middlewares = []) {
|
|
2576
|
-
this.routesDir = routesDir;
|
|
2577
|
-
this.debug = debug;
|
|
2578
|
-
this.middlewares = middlewares;
|
|
2579
|
-
}
|
|
2580
|
-
routes = [];
|
|
2581
|
-
debug;
|
|
2582
|
-
middlewares;
|
|
2583
|
-
async load(app) {
|
|
2584
|
-
const startTime = Date.now();
|
|
2585
|
-
const files = await this.scanFiles(this.routesDir);
|
|
2586
|
-
if (files.length === 0) {
|
|
2587
|
-
routeLogger2.warn("No route files found");
|
|
2588
|
-
return this.getStats();
|
|
2589
|
-
}
|
|
2590
|
-
let failureCount = 0;
|
|
2591
|
-
for (const file of files) {
|
|
2592
|
-
const success = await this.loadRoute(app, file);
|
|
2593
|
-
if (success) ; else {
|
|
2594
|
-
failureCount++;
|
|
2595
|
-
}
|
|
2596
|
-
}
|
|
2597
|
-
const elapsed = Date.now() - startTime;
|
|
2598
|
-
const stats = this.getStats();
|
|
2599
|
-
if (this.debug) {
|
|
2600
|
-
this.logStats(stats, elapsed);
|
|
2601
|
-
}
|
|
2602
|
-
if (failureCount > 0) {
|
|
2603
|
-
routeLogger2.warn("Some routes failed to load", { failureCount });
|
|
2604
|
-
}
|
|
2605
|
-
return stats;
|
|
2606
|
-
}
|
|
2607
|
-
/**
|
|
2608
|
-
* Load routes from an external directory (e.g., from SPFN function packages)
|
|
2609
|
-
* Reads package.json spfn.prefix and mounts routes under that prefix
|
|
2610
|
-
*
|
|
2611
|
-
* @param app - Hono app instance
|
|
2612
|
-
* @param routesDir - Directory containing route handlers
|
|
2613
|
-
* @param packageName - Name of the package (for logging)
|
|
2614
|
-
* @param prefix - Optional prefix to mount routes under (from package.json spfn.prefix)
|
|
2615
|
-
* @returns Route statistics
|
|
2616
|
-
*/
|
|
2617
|
-
async loadExternalRoutes(app, routesDir, packageName, prefix) {
|
|
2618
|
-
const startTime = Date.now();
|
|
2619
|
-
const tempRoutesDir = this.routesDir;
|
|
2620
|
-
this.routesDir = routesDir;
|
|
2621
|
-
const files = await this.scanFiles(routesDir);
|
|
2622
|
-
if (files.length === 0) {
|
|
2623
|
-
routeLogger2.warn("No route files found", { dir: routesDir, package: packageName });
|
|
2624
|
-
this.routesDir = tempRoutesDir;
|
|
2625
|
-
return this.getStats();
|
|
2626
|
-
}
|
|
2627
|
-
let successCount = 0;
|
|
2628
|
-
let failureCount = 0;
|
|
2629
|
-
for (const file of files) {
|
|
2630
|
-
const success = await this.loadRoute(app, file, prefix);
|
|
2631
|
-
if (success) {
|
|
2632
|
-
successCount++;
|
|
2633
|
-
} else {
|
|
2634
|
-
failureCount++;
|
|
2635
|
-
}
|
|
2636
|
-
}
|
|
2637
|
-
const elapsed = Date.now() - startTime;
|
|
2638
|
-
if (this.debug) {
|
|
2639
|
-
routeLogger2.info("External routes loaded", {
|
|
2640
|
-
package: packageName,
|
|
2641
|
-
prefix: prefix || "/",
|
|
2642
|
-
total: successCount,
|
|
2643
|
-
failed: failureCount,
|
|
2644
|
-
elapsed: `${elapsed}ms`
|
|
2645
|
-
});
|
|
2646
|
-
}
|
|
2647
|
-
this.routesDir = tempRoutesDir;
|
|
2648
|
-
return this.getStats();
|
|
2649
|
-
}
|
|
2650
|
-
getStats() {
|
|
2651
|
-
const stats = {
|
|
2652
|
-
total: this.routes.length,
|
|
2653
|
-
byPriority: { static: 0, dynamic: 0, catchAll: 0 },
|
|
2654
|
-
byTag: {},
|
|
2655
|
-
routes: this.routes
|
|
2656
|
-
};
|
|
2657
|
-
for (const route of this.routes) {
|
|
2658
|
-
if (route.priority === 1) stats.byPriority.static++;
|
|
2659
|
-
else if (route.priority === 2) stats.byPriority.dynamic++;
|
|
2660
|
-
else if (route.priority === 3) stats.byPriority.catchAll++;
|
|
2661
|
-
if (route.meta?.tags) {
|
|
2662
|
-
for (const tag of route.meta.tags) {
|
|
2663
|
-
stats.byTag[tag] = (stats.byTag[tag] || 0) + 1;
|
|
2664
|
-
}
|
|
2665
|
-
}
|
|
2666
|
-
}
|
|
2667
|
-
return stats;
|
|
2668
|
-
}
|
|
2669
|
-
async scanFiles(dir, files = []) {
|
|
2670
|
-
const entries = await readdir(dir);
|
|
2671
|
-
for (const entry of entries) {
|
|
2672
|
-
const fullPath = join(dir, entry);
|
|
2673
|
-
const fileStat = await stat(fullPath);
|
|
2674
|
-
if (fileStat.isDirectory()) {
|
|
2675
|
-
await this.scanFiles(fullPath, files);
|
|
2676
|
-
} else if (this.isValidRouteFile(entry)) {
|
|
2677
|
-
files.push(fullPath);
|
|
2678
|
-
}
|
|
2679
|
-
}
|
|
2680
|
-
return files;
|
|
2681
|
-
}
|
|
2682
|
-
isValidRouteFile(fileName) {
|
|
2683
|
-
return fileName === "index.ts" || fileName === "index.js" || fileName === "index.mjs";
|
|
2684
|
-
}
|
|
2685
|
-
async loadRoute(app, absolutePath, prefix) {
|
|
2686
|
-
const relativePath = relative(this.routesDir, absolutePath);
|
|
2687
|
-
try {
|
|
2688
|
-
const module = await import(absolutePath);
|
|
2689
|
-
if (!this.validateModule(module, relativePath)) {
|
|
2690
|
-
return false;
|
|
2691
|
-
}
|
|
2692
|
-
const hasContractMetas = module.default._contractMetas && module.default._contractMetas.size > 0;
|
|
2693
|
-
if (!hasContractMetas) {
|
|
2694
|
-
routeLogger2.error("Route must use contract-based routing", {
|
|
2695
|
-
file: relativePath,
|
|
2696
|
-
hint: "Export contracts using satisfies RouteContract and use app.bind()"
|
|
2697
|
-
});
|
|
2698
|
-
return false;
|
|
2699
|
-
}
|
|
2700
|
-
const contractPaths = this.extractContractPaths(module);
|
|
2701
|
-
if (prefix) {
|
|
2702
|
-
const invalidPaths = contractPaths.filter((path) => !path.startsWith(prefix));
|
|
2703
|
-
if (invalidPaths.length > 0) {
|
|
2704
|
-
routeLogger2.error("Contract paths must include the package prefix", {
|
|
2705
|
-
file: relativePath,
|
|
2706
|
-
prefix,
|
|
2707
|
-
invalidPaths,
|
|
2708
|
-
hint: `Contract paths should start with "${prefix}". Example: path: "${prefix}/labels"`
|
|
2709
|
-
});
|
|
2710
|
-
return false;
|
|
2711
|
-
}
|
|
2712
|
-
}
|
|
2713
|
-
this.registerContractBasedMiddlewares(app, contractPaths, module);
|
|
2714
|
-
app.route("/", module.default);
|
|
2715
|
-
contractPaths.forEach((path) => {
|
|
2716
|
-
this.routes.push({
|
|
2717
|
-
path,
|
|
2718
|
-
// Use contract path as-is (already includes prefix)
|
|
2719
|
-
file: relativePath,
|
|
2720
|
-
meta: module.meta,
|
|
2721
|
-
priority: this.calculateContractPriority(path)
|
|
2722
|
-
});
|
|
2723
|
-
if (this.debug) {
|
|
2724
|
-
const icon = path.includes("*") ? "\u2B50" : path.includes(":") ? "\u{1F538}" : "\u{1F539}";
|
|
2725
|
-
routeLogger2.debug(`Registered route: ${path}`, { icon, file: relativePath });
|
|
2726
|
-
}
|
|
2727
|
-
});
|
|
2728
|
-
return true;
|
|
2729
|
-
} catch (error) {
|
|
2730
|
-
this.categorizeAndLogError(error, relativePath);
|
|
2731
|
-
return false;
|
|
2732
|
-
}
|
|
2733
|
-
}
|
|
2734
|
-
extractContractPaths(module) {
|
|
2735
|
-
const paths = /* @__PURE__ */ new Set();
|
|
2736
|
-
if (module.default._contractMetas) {
|
|
2737
|
-
for (const key of module.default._contractMetas.keys()) {
|
|
2738
|
-
const path = key.split(" ")[1];
|
|
2739
|
-
if (path) {
|
|
2740
|
-
paths.add(path);
|
|
2741
|
-
}
|
|
2742
|
-
}
|
|
2743
|
-
}
|
|
2744
|
-
return Array.from(paths);
|
|
2745
|
-
}
|
|
2746
|
-
calculateContractPriority(path) {
|
|
2747
|
-
if (path.includes("*")) return 3;
|
|
2748
|
-
if (path.includes(":")) return 2;
|
|
2749
|
-
return 1;
|
|
2750
|
-
}
|
|
2751
|
-
validateModule(module, relativePath) {
|
|
2752
|
-
if (!module.default) {
|
|
2753
|
-
routeLogger2.error("Route must export Hono instance as default", { file: relativePath });
|
|
2754
|
-
return false;
|
|
2755
|
-
}
|
|
2756
|
-
if (typeof module.default.route !== "function") {
|
|
2757
|
-
routeLogger2.error("Default export is not a Hono instance", { file: relativePath });
|
|
2758
|
-
return false;
|
|
2759
|
-
}
|
|
2760
|
-
return true;
|
|
2761
|
-
}
|
|
2762
|
-
registerContractBasedMiddlewares(app, contractPaths, module) {
|
|
2763
|
-
app.use("*", (c, next) => {
|
|
2764
|
-
const method = c.req.method;
|
|
2765
|
-
const requestPath = new URL(c.req.url).pathname;
|
|
2766
|
-
const key = `${method} ${requestPath}`;
|
|
2767
|
-
const meta = module.default._contractMetas?.get(key);
|
|
2768
|
-
if (meta?.skipMiddlewares) {
|
|
2769
|
-
c.set("_skipMiddlewares", meta.skipMiddlewares);
|
|
2770
|
-
}
|
|
2771
|
-
return next();
|
|
2772
|
-
});
|
|
2773
|
-
for (const contractPath of contractPaths) {
|
|
2774
|
-
const middlewarePath = contractPath === "/" ? "/*" : `${contractPath}/*`;
|
|
2775
|
-
for (const middleware of this.middlewares) {
|
|
2776
|
-
app.use(middlewarePath, async (c, next) => {
|
|
2777
|
-
const skipList = c.get("_skipMiddlewares") || [];
|
|
2778
|
-
if (skipList.includes(middleware.name)) {
|
|
2779
|
-
return next();
|
|
2780
|
-
}
|
|
2781
|
-
return middleware.handler(c, next);
|
|
2782
|
-
});
|
|
2783
|
-
}
|
|
2784
|
-
}
|
|
2785
|
-
}
|
|
2786
|
-
categorizeAndLogError(error, relativePath) {
|
|
2787
|
-
const message = error.message;
|
|
2788
|
-
const stack = error.stack;
|
|
2789
|
-
if (message.includes("Cannot find module") || message.includes("MODULE_NOT_FOUND")) {
|
|
2790
|
-
routeLogger2.error("Missing dependency", {
|
|
2791
|
-
file: relativePath,
|
|
2792
|
-
error: message,
|
|
2793
|
-
hint: "Run: npm install"
|
|
2794
|
-
});
|
|
2795
|
-
} else if (message.includes("SyntaxError") || stack?.includes("SyntaxError")) {
|
|
2796
|
-
routeLogger2.error("Syntax error", {
|
|
2797
|
-
file: relativePath,
|
|
2798
|
-
error: message,
|
|
2799
|
-
...this.debug && stack && {
|
|
2800
|
-
stack: stack.split("\n").slice(0, 5).join("\n")
|
|
2801
|
-
}
|
|
2802
|
-
});
|
|
2803
|
-
} else if (message.includes("Unexpected token")) {
|
|
2804
|
-
routeLogger2.error("Parse error", {
|
|
2805
|
-
file: relativePath,
|
|
2806
|
-
error: message,
|
|
2807
|
-
hint: "Check for syntax errors or invalid TypeScript"
|
|
2808
|
-
});
|
|
2809
|
-
} else {
|
|
2810
|
-
routeLogger2.error("Route loading failed", {
|
|
2811
|
-
file: relativePath,
|
|
2812
|
-
error: message,
|
|
2813
|
-
...this.debug && stack && { stack }
|
|
2814
|
-
});
|
|
2815
|
-
}
|
|
2816
|
-
}
|
|
2817
|
-
logStats(stats, elapsed) {
|
|
2818
|
-
const tagCounts = Object.entries(stats.byTag).map(([tag, count2]) => `${tag}(${count2})`).join(", ");
|
|
2819
|
-
routeLogger2.info("Routes loaded successfully", {
|
|
2820
|
-
total: stats.total,
|
|
2821
|
-
priority: {
|
|
2822
|
-
static: stats.byPriority.static,
|
|
2823
|
-
dynamic: stats.byPriority.dynamic,
|
|
2824
|
-
catchAll: stats.byPriority.catchAll
|
|
2825
|
-
},
|
|
2826
|
-
...tagCounts && { tags: tagCounts },
|
|
2827
|
-
elapsed: `${elapsed}ms`
|
|
2828
|
-
});
|
|
2829
|
-
}
|
|
2830
|
-
};
|
|
2831
|
-
async function loadRoutes(app, options) {
|
|
2832
|
-
const routesDir = options?.routesDir ?? join(process.cwd(), "src", "server", "routes");
|
|
2833
|
-
const debug = options?.debug ?? false;
|
|
2834
|
-
const middlewares = options?.middlewares ?? [];
|
|
2835
|
-
const includeFunctionRoutes = options?.includeFunctionRoutes ?? true;
|
|
2836
|
-
const loader = new AutoRouteLoader(routesDir, debug, middlewares);
|
|
2837
|
-
const stats = await loader.load(app);
|
|
2838
|
-
if (includeFunctionRoutes) {
|
|
2839
|
-
const { discoverFunctionRoutes: discoverFunctionRoutes2 } = await Promise.resolve().then(() => (init_function_routes(), function_routes_exports));
|
|
2840
|
-
const functionRoutes = discoverFunctionRoutes2();
|
|
2841
|
-
if (functionRoutes.length > 0) {
|
|
2842
|
-
routeLogger2.info("Loading function routes", { count: functionRoutes.length });
|
|
2843
|
-
for (const func of functionRoutes) {
|
|
2844
|
-
try {
|
|
2845
|
-
await loader.loadExternalRoutes(app, func.routesDir, func.packageName, func.prefix);
|
|
2846
|
-
routeLogger2.info("Function routes loaded", {
|
|
2847
|
-
package: func.packageName,
|
|
2848
|
-
routesDir: func.routesDir,
|
|
2849
|
-
prefix: func.prefix || "/"
|
|
2850
|
-
});
|
|
2851
|
-
} catch (error) {
|
|
2852
|
-
routeLogger2.error("Failed to load function routes", {
|
|
2853
|
-
package: func.packageName,
|
|
2854
|
-
error: error instanceof Error ? error.message : "Unknown error"
|
|
2855
|
-
});
|
|
2856
|
-
}
|
|
2857
|
-
}
|
|
2858
|
-
}
|
|
2859
|
-
}
|
|
2860
|
-
return stats;
|
|
2861
|
-
}
|
|
2862
|
-
|
|
2863
|
-
// src/route/bind.ts
|
|
2864
|
-
init_errors();
|
|
2865
|
-
init_logger2();
|
|
2866
|
-
|
|
2867
|
-
// src/middleware/error-handler.ts
|
|
2868
|
-
init_logger2();
|
|
2869
|
-
var errorLogger = logger.child("error-handler");
|
|
2870
|
-
function ErrorHandler(options = {}) {
|
|
2871
|
-
const {
|
|
2872
|
-
includeStack = process.env.NODE_ENV !== "production",
|
|
2873
|
-
enableLogging = true
|
|
2874
|
-
} = options;
|
|
2875
|
-
return (err, c) => {
|
|
2876
|
-
const errorWithCode = err;
|
|
2877
|
-
const statusCode = errorWithCode.statusCode || 500;
|
|
2878
|
-
const errorType = err.name || "Error";
|
|
2879
|
-
if (enableLogging) {
|
|
2880
|
-
const logLevel = statusCode >= 500 ? "error" : "warn";
|
|
2881
|
-
const logData = {
|
|
2882
|
-
type: errorType,
|
|
2883
|
-
message: err.message,
|
|
2884
|
-
statusCode,
|
|
2885
|
-
path: c.req.path,
|
|
2886
|
-
method: c.req.method
|
|
2887
|
-
};
|
|
2888
|
-
if (errorWithCode.details) {
|
|
2889
|
-
logData.details = errorWithCode.details;
|
|
2890
|
-
}
|
|
2891
|
-
if (statusCode >= 500 && includeStack) {
|
|
2892
|
-
logData.stack = err.stack;
|
|
2893
|
-
}
|
|
2894
|
-
errorLogger[logLevel]("Error occurred", logData);
|
|
2895
|
-
}
|
|
2896
|
-
const response = {
|
|
2897
|
-
success: false,
|
|
2898
|
-
error: {
|
|
2899
|
-
message: err.message || "Internal Server Error",
|
|
2900
|
-
type: errorType,
|
|
2901
|
-
statusCode
|
|
2902
|
-
}
|
|
2903
|
-
};
|
|
2904
|
-
if (errorWithCode.details) {
|
|
2905
|
-
response.error.details = errorWithCode.details;
|
|
2906
|
-
}
|
|
2907
|
-
if (includeStack) {
|
|
2908
|
-
response.error.stack = err.stack;
|
|
2909
|
-
}
|
|
2910
|
-
return c.json(response, statusCode);
|
|
2911
|
-
};
|
|
2912
|
-
}
|
|
2913
|
-
|
|
2914
|
-
// src/middleware/request-logger.ts
|
|
2915
|
-
init_logger2();
|
|
2916
|
-
var DEFAULT_CONFIG = {
|
|
2917
|
-
excludePaths: ["/health", "/ping", "/favicon.ico"],
|
|
2918
|
-
sensitiveFields: ["password", "token", "apiKey", "secret", "authorization"],
|
|
2919
|
-
slowRequestThreshold: 1e3
|
|
2920
|
-
};
|
|
2921
|
-
function generateRequestId() {
|
|
2922
|
-
const timestamp2 = Date.now();
|
|
2923
|
-
const randomPart = randomBytes(6).toString("hex");
|
|
2924
|
-
return `req_${timestamp2}_${randomPart}`;
|
|
2925
|
-
}
|
|
2926
|
-
function maskSensitiveData2(obj, sensitiveFields, seen = /* @__PURE__ */ new WeakSet()) {
|
|
2927
|
-
if (!obj || typeof obj !== "object") return obj;
|
|
2928
|
-
if (seen.has(obj)) return "[Circular]";
|
|
2929
|
-
seen.add(obj);
|
|
2930
|
-
const lowerFields = sensitiveFields.map((f) => f.toLowerCase());
|
|
2931
|
-
const masked = Array.isArray(obj) ? [...obj] : { ...obj };
|
|
2932
|
-
for (const key in masked) {
|
|
2933
|
-
const lowerKey = key.toLowerCase();
|
|
2934
|
-
if (lowerFields.some((field) => lowerKey.includes(field))) {
|
|
2935
|
-
masked[key] = "***MASKED***";
|
|
2936
|
-
} else if (typeof masked[key] === "object" && masked[key] !== null) {
|
|
2937
|
-
masked[key] = maskSensitiveData2(masked[key], sensitiveFields, seen);
|
|
2938
|
-
}
|
|
2939
|
-
}
|
|
2940
|
-
return masked;
|
|
2941
|
-
}
|
|
2942
|
-
function RequestLogger(config) {
|
|
2943
|
-
const cfg = { ...DEFAULT_CONFIG, ...config };
|
|
2944
|
-
const apiLogger = logger.child("api");
|
|
2945
|
-
return async (c, next) => {
|
|
2946
|
-
const path = new URL(c.req.url).pathname;
|
|
2947
|
-
if (cfg.excludePaths.includes(path)) {
|
|
2948
|
-
return next();
|
|
2949
|
-
}
|
|
2950
|
-
const requestId = generateRequestId();
|
|
2951
|
-
c.set("requestId", requestId);
|
|
2952
|
-
const method = c.req.method;
|
|
2953
|
-
const userAgent = c.req.header("user-agent");
|
|
2954
|
-
const ip = c.req.header("x-forwarded-for") || c.req.header("x-real-ip") || "unknown";
|
|
2955
|
-
const startTime = Date.now();
|
|
2956
|
-
apiLogger.info("Request received", {
|
|
2957
|
-
requestId,
|
|
2958
|
-
method,
|
|
2959
|
-
path,
|
|
2960
|
-
ip,
|
|
2961
|
-
userAgent
|
|
2962
|
-
});
|
|
2963
|
-
try {
|
|
2964
|
-
await next();
|
|
2965
|
-
const duration = Date.now() - startTime;
|
|
2966
|
-
const status = c.res.status;
|
|
2967
|
-
const logData = {
|
|
2968
|
-
requestId,
|
|
2969
|
-
method,
|
|
2970
|
-
path,
|
|
2971
|
-
status,
|
|
2972
|
-
duration
|
|
2973
|
-
};
|
|
2974
|
-
const isSlowRequest = duration >= cfg.slowRequestThreshold;
|
|
2975
|
-
if (isSlowRequest) {
|
|
2976
|
-
logData.slow = true;
|
|
2977
|
-
}
|
|
2978
|
-
if (status >= 400) {
|
|
2979
|
-
try {
|
|
2980
|
-
const responseBody = await c.res.clone().json();
|
|
2981
|
-
logData.response = responseBody;
|
|
2982
|
-
} catch {
|
|
2983
|
-
}
|
|
2984
|
-
if (["POST", "PUT", "PATCH"].includes(method)) {
|
|
2985
|
-
try {
|
|
2986
|
-
const requestBody = await c.req.json();
|
|
2987
|
-
logData.request = maskSensitiveData2(requestBody, cfg.sensitiveFields);
|
|
2988
|
-
} catch {
|
|
2989
|
-
}
|
|
2990
|
-
}
|
|
2991
|
-
}
|
|
2992
|
-
const logLevel = status >= 500 ? "error" : status >= 400 ? "warn" : "info";
|
|
2993
|
-
apiLogger[logLevel]("Request completed", logData);
|
|
2994
|
-
} catch (error) {
|
|
2995
|
-
const duration = Date.now() - startTime;
|
|
2996
|
-
apiLogger.error("Request failed", error, {
|
|
2997
|
-
requestId,
|
|
2998
|
-
method,
|
|
2999
|
-
path,
|
|
3000
|
-
duration
|
|
3001
|
-
});
|
|
3002
|
-
throw error;
|
|
303
|
+
info: "\x1B[32m",
|
|
304
|
+
// green
|
|
305
|
+
warn: "\x1B[33m",
|
|
306
|
+
// yellow
|
|
307
|
+
error: "\x1B[31m",
|
|
308
|
+
// red
|
|
309
|
+
fatal: "\x1B[35m",
|
|
310
|
+
// magenta
|
|
311
|
+
// 추가 컬러
|
|
312
|
+
gray: "\x1B[90m"
|
|
313
|
+
};
|
|
314
|
+
}
|
|
315
|
+
});
|
|
316
|
+
function loadEnvFiles() {
|
|
317
|
+
const cwd = process.cwd();
|
|
318
|
+
const nodeEnv = process.env.NODE_ENV || "development";
|
|
319
|
+
const envFiles = [
|
|
320
|
+
`.env.${nodeEnv}.local`,
|
|
321
|
+
nodeEnv !== "test" ? ".env.local" : null,
|
|
322
|
+
`.env.${nodeEnv}`,
|
|
323
|
+
".env"
|
|
324
|
+
].filter((file) => file !== null);
|
|
325
|
+
for (const file of envFiles) {
|
|
326
|
+
const filePath = resolve(cwd, file);
|
|
327
|
+
if (existsSync(filePath)) {
|
|
328
|
+
config({ path: filePath });
|
|
3003
329
|
}
|
|
3004
|
-
}
|
|
330
|
+
}
|
|
3005
331
|
}
|
|
3006
|
-
|
|
3007
|
-
// src/server/create-server.ts
|
|
3008
|
-
init_logger2();
|
|
3009
|
-
|
|
3010
|
-
// src/server/helpers.ts
|
|
3011
332
|
function createHealthCheckHandler(detailed) {
|
|
3012
333
|
return async (c) => {
|
|
3013
334
|
const response = {
|
|
@@ -3015,12 +336,10 @@ function createHealthCheckHandler(detailed) {
|
|
|
3015
336
|
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
3016
337
|
};
|
|
3017
338
|
if (detailed) {
|
|
3018
|
-
|
|
3019
|
-
const { getRedis: getRedis2 } = await Promise.resolve().then(() => (init_cache(), cache_exports));
|
|
3020
|
-
const db = getDatabase2();
|
|
3021
|
-
let dbStatus = "disconnected";
|
|
339
|
+
let dbStatus = "unknown";
|
|
3022
340
|
let dbError;
|
|
3023
|
-
|
|
341
|
+
try {
|
|
342
|
+
const db = getDatabase();
|
|
3024
343
|
try {
|
|
3025
344
|
await db.execute("SELECT 1");
|
|
3026
345
|
dbStatus = "connected";
|
|
@@ -3028,9 +347,12 @@ function createHealthCheckHandler(detailed) {
|
|
|
3028
347
|
dbStatus = "error";
|
|
3029
348
|
dbError = error instanceof Error ? error.message : String(error);
|
|
3030
349
|
}
|
|
350
|
+
} catch (error) {
|
|
351
|
+
dbStatus = "not_initialized";
|
|
352
|
+
dbError = "Database not available";
|
|
3031
353
|
}
|
|
3032
|
-
const redis =
|
|
3033
|
-
let redisStatus = "
|
|
354
|
+
const redis = getCache();
|
|
355
|
+
let redisStatus = redis ? "unknown" : "not_initialized";
|
|
3034
356
|
let redisError;
|
|
3035
357
|
if (redis) {
|
|
3036
358
|
try {
|
|
@@ -3051,7 +373,7 @@ function createHealthCheckHandler(detailed) {
|
|
|
3051
373
|
...redisError && { error: redisError }
|
|
3052
374
|
}
|
|
3053
375
|
};
|
|
3054
|
-
const hasErrors = dbStatus === "error" || redisStatus === "error";
|
|
376
|
+
const hasErrors = dbStatus === "error" || dbStatus === "not_initialized" || redisStatus === "error";
|
|
3055
377
|
response.status = hasErrors ? "degraded" : "ok";
|
|
3056
378
|
}
|
|
3057
379
|
const statusCode = response.status === "ok" ? 200 : 503;
|
|
@@ -3065,43 +387,43 @@ function applyServerTimeouts(server, timeouts) {
|
|
|
3065
387
|
server.headersTimeout = timeouts.headers;
|
|
3066
388
|
}
|
|
3067
389
|
}
|
|
3068
|
-
function getTimeoutConfig(
|
|
390
|
+
function getTimeoutConfig(config2) {
|
|
3069
391
|
return {
|
|
3070
|
-
request:
|
|
3071
|
-
keepAlive:
|
|
3072
|
-
headers:
|
|
392
|
+
request: config2?.request ?? env.SERVER_TIMEOUT,
|
|
393
|
+
keepAlive: config2?.keepAlive ?? env.SERVER_KEEPALIVE_TIMEOUT,
|
|
394
|
+
headers: config2?.headers ?? env.SERVER_HEADERS_TIMEOUT
|
|
3073
395
|
};
|
|
3074
396
|
}
|
|
3075
|
-
function getShutdownTimeout(
|
|
3076
|
-
return
|
|
397
|
+
function getShutdownTimeout(config2) {
|
|
398
|
+
return config2?.timeout ?? env.SHUTDOWN_TIMEOUT;
|
|
3077
399
|
}
|
|
3078
|
-
function buildMiddlewareOrder(
|
|
400
|
+
function buildMiddlewareOrder(config2) {
|
|
3079
401
|
const order = [];
|
|
3080
|
-
const middlewareConfig =
|
|
402
|
+
const middlewareConfig = config2.middleware ?? {};
|
|
3081
403
|
const enableLogger = middlewareConfig.logger !== false;
|
|
3082
404
|
const enableCors = middlewareConfig.cors !== false;
|
|
3083
405
|
const enableErrorHandler = middlewareConfig.errorHandler !== false;
|
|
3084
406
|
if (enableLogger) order.push("RequestLogger");
|
|
3085
407
|
if (enableCors) order.push("CORS");
|
|
3086
|
-
|
|
3087
|
-
if (
|
|
408
|
+
config2.use?.forEach((_, i) => order.push(`Custom[${i}]`));
|
|
409
|
+
if (config2.beforeRoutes) order.push("beforeRoutes hook");
|
|
3088
410
|
order.push("Routes");
|
|
3089
|
-
if (
|
|
411
|
+
if (config2.afterRoutes) order.push("afterRoutes hook");
|
|
3090
412
|
if (enableErrorHandler) order.push("ErrorHandler");
|
|
3091
413
|
return order;
|
|
3092
414
|
}
|
|
3093
|
-
function buildStartupConfig(
|
|
3094
|
-
const middlewareConfig =
|
|
3095
|
-
const healthCheckConfig =
|
|
415
|
+
function buildStartupConfig(config2, timeouts) {
|
|
416
|
+
const middlewareConfig = config2.middleware ?? {};
|
|
417
|
+
const healthCheckConfig = config2.healthCheck ?? {};
|
|
3096
418
|
const healthCheckEnabled = healthCheckConfig.enabled !== false;
|
|
3097
419
|
const healthCheckPath = healthCheckConfig.path ?? "/health";
|
|
3098
|
-
const healthCheckDetailed = healthCheckConfig.detailed ??
|
|
420
|
+
const healthCheckDetailed = healthCheckConfig.detailed ?? env.NODE_ENV === "development";
|
|
3099
421
|
return {
|
|
3100
422
|
middleware: {
|
|
3101
423
|
logger: middlewareConfig.logger !== false,
|
|
3102
424
|
cors: middlewareConfig.cors !== false,
|
|
3103
425
|
errorHandler: middlewareConfig.errorHandler !== false,
|
|
3104
|
-
custom:
|
|
426
|
+
custom: config2.use?.length ?? 0
|
|
3105
427
|
},
|
|
3106
428
|
healthCheck: healthCheckEnabled ? {
|
|
3107
429
|
enabled: true,
|
|
@@ -3109,8 +431,8 @@ function buildStartupConfig(config, timeouts) {
|
|
|
3109
431
|
detailed: healthCheckDetailed
|
|
3110
432
|
} : { enabled: false },
|
|
3111
433
|
hooks: {
|
|
3112
|
-
beforeRoutes: !!
|
|
3113
|
-
afterRoutes: !!
|
|
434
|
+
beforeRoutes: !!config2.beforeRoutes,
|
|
435
|
+
afterRoutes: !!config2.afterRoutes
|
|
3114
436
|
},
|
|
3115
437
|
timeout: {
|
|
3116
438
|
request: `${timeouts.request}ms`,
|
|
@@ -3118,140 +440,38 @@ function buildStartupConfig(config, timeouts) {
|
|
|
3118
440
|
headers: `${timeouts.headers}ms`
|
|
3119
441
|
},
|
|
3120
442
|
shutdown: {
|
|
3121
|
-
timeout: `${
|
|
443
|
+
timeout: `${config2.shutdown?.timeout ?? env.SHUTDOWN_TIMEOUT}ms`
|
|
3122
444
|
}
|
|
3123
445
|
};
|
|
3124
446
|
}
|
|
3125
|
-
|
|
3126
|
-
// src/server/plugin-discovery.ts
|
|
3127
|
-
init_logger2();
|
|
3128
|
-
var pluginLogger = logger.child("plugin");
|
|
3129
|
-
async function discoverPlugins(cwd = process.cwd()) {
|
|
3130
|
-
const plugins = [];
|
|
3131
|
-
const nodeModulesPath = join(cwd, "node_modules");
|
|
3132
|
-
try {
|
|
3133
|
-
const projectPkgPath = join(cwd, "package.json");
|
|
3134
|
-
if (!existsSync(projectPkgPath)) {
|
|
3135
|
-
pluginLogger.debug("No package.json found, skipping plugin discovery");
|
|
3136
|
-
return plugins;
|
|
3137
|
-
}
|
|
3138
|
-
const projectPkg = JSON.parse(readFileSync(projectPkgPath, "utf-8"));
|
|
3139
|
-
const dependencies = {
|
|
3140
|
-
...projectPkg.dependencies,
|
|
3141
|
-
...projectPkg.devDependencies
|
|
3142
|
-
};
|
|
3143
|
-
for (const [packageName] of Object.entries(dependencies)) {
|
|
3144
|
-
if (!packageName.startsWith("@spfn/")) {
|
|
3145
|
-
continue;
|
|
3146
|
-
}
|
|
3147
|
-
try {
|
|
3148
|
-
const plugin = await loadPluginFromPackage(packageName, nodeModulesPath);
|
|
3149
|
-
if (plugin) {
|
|
3150
|
-
plugins.push(plugin);
|
|
3151
|
-
pluginLogger.info("Plugin discovered", {
|
|
3152
|
-
name: plugin.name,
|
|
3153
|
-
hooks: getPluginHookNames(plugin)
|
|
3154
|
-
});
|
|
3155
|
-
}
|
|
3156
|
-
} catch (error) {
|
|
3157
|
-
pluginLogger.debug("Failed to load plugin", {
|
|
3158
|
-
package: packageName,
|
|
3159
|
-
error: error instanceof Error ? error.message : "Unknown error"
|
|
3160
|
-
});
|
|
3161
|
-
}
|
|
3162
|
-
}
|
|
3163
|
-
} catch (error) {
|
|
3164
|
-
pluginLogger.warn("Plugin discovery failed", {
|
|
3165
|
-
error: error instanceof Error ? error.message : "Unknown error"
|
|
3166
|
-
});
|
|
3167
|
-
}
|
|
3168
|
-
return plugins;
|
|
3169
|
-
}
|
|
3170
|
-
async function loadPluginFromPackage(packageName, nodeModulesPath) {
|
|
3171
|
-
const pkgPath = join(nodeModulesPath, ...packageName.split("/"), "package.json");
|
|
3172
|
-
if (!existsSync(pkgPath)) {
|
|
3173
|
-
return null;
|
|
3174
|
-
}
|
|
3175
|
-
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
|
|
3176
|
-
const packageDir = dirname(pkgPath);
|
|
3177
|
-
const mainEntry = pkg.main || "dist/index.js";
|
|
3178
|
-
const mainPath = join(packageDir, mainEntry);
|
|
3179
|
-
if (!existsSync(mainPath)) {
|
|
3180
|
-
return null;
|
|
3181
|
-
}
|
|
3182
|
-
try {
|
|
3183
|
-
const module = await import(mainPath);
|
|
3184
|
-
if (module.spfnPlugin && isValidPlugin(module.spfnPlugin)) {
|
|
3185
|
-
return module.spfnPlugin;
|
|
3186
|
-
}
|
|
3187
|
-
return null;
|
|
3188
|
-
} catch (error) {
|
|
3189
|
-
return null;
|
|
3190
|
-
}
|
|
3191
|
-
}
|
|
3192
|
-
function isValidPlugin(plugin) {
|
|
3193
|
-
return plugin && typeof plugin === "object" && typeof plugin.name === "string" && (typeof plugin.afterInfrastructure === "function" || typeof plugin.beforeRoutes === "function" || typeof plugin.afterRoutes === "function" || typeof plugin.afterStart === "function" || typeof plugin.beforeShutdown === "function");
|
|
3194
|
-
}
|
|
3195
|
-
function getPluginHookNames(plugin) {
|
|
3196
|
-
const hooks = [];
|
|
3197
|
-
if (plugin.afterInfrastructure) hooks.push("afterInfrastructure");
|
|
3198
|
-
if (plugin.beforeRoutes) hooks.push("beforeRoutes");
|
|
3199
|
-
if (plugin.afterRoutes) hooks.push("afterRoutes");
|
|
3200
|
-
if (plugin.afterStart) hooks.push("afterStart");
|
|
3201
|
-
if (plugin.beforeShutdown) hooks.push("beforeShutdown");
|
|
3202
|
-
return hooks;
|
|
3203
|
-
}
|
|
3204
|
-
async function executePluginHooks(plugins, hookName, ...args) {
|
|
3205
|
-
for (const plugin of plugins) {
|
|
3206
|
-
const hook = plugin[hookName];
|
|
3207
|
-
if (typeof hook === "function") {
|
|
3208
|
-
try {
|
|
3209
|
-
pluginLogger.debug("Executing plugin hook", {
|
|
3210
|
-
plugin: plugin.name,
|
|
3211
|
-
hook: hookName
|
|
3212
|
-
});
|
|
3213
|
-
await hook(...args);
|
|
3214
|
-
} catch (error) {
|
|
3215
|
-
pluginLogger.error("Plugin hook failed", {
|
|
3216
|
-
plugin: plugin.name,
|
|
3217
|
-
hook: hookName,
|
|
3218
|
-
error: error instanceof Error ? error.message : "Unknown error"
|
|
3219
|
-
});
|
|
3220
|
-
throw new Error(
|
|
3221
|
-
`Plugin ${plugin.name} failed in ${hookName} hook: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
3222
|
-
);
|
|
3223
|
-
}
|
|
3224
|
-
}
|
|
3225
|
-
}
|
|
3226
|
-
}
|
|
447
|
+
var serverLogger = logger.child("@spfn/core:server");
|
|
3227
448
|
|
|
3228
449
|
// src/server/create-server.ts
|
|
3229
|
-
|
|
3230
|
-
async function createServer(config, plugins = []) {
|
|
450
|
+
async function createServer(config2) {
|
|
3231
451
|
const cwd = process.cwd();
|
|
3232
452
|
const appPath = join(cwd, "src", "server", "app.ts");
|
|
3233
|
-
const appJsPath = join(cwd, "src", "server", "app
|
|
453
|
+
const appJsPath = join(cwd, "src", "server", "app");
|
|
3234
454
|
if (existsSync(appPath) || existsSync(appJsPath)) {
|
|
3235
|
-
return await loadCustomApp(appPath, appJsPath,
|
|
455
|
+
return await loadCustomApp(appPath, appJsPath, config2);
|
|
3236
456
|
}
|
|
3237
|
-
return await createAutoConfiguredApp(
|
|
457
|
+
return await createAutoConfiguredApp(config2);
|
|
3238
458
|
}
|
|
3239
|
-
async function loadCustomApp(appPath, appJsPath,
|
|
3240
|
-
const
|
|
459
|
+
async function loadCustomApp(appPath, appJsPath, config2) {
|
|
460
|
+
const actualPath = existsSync(appPath) ? appPath : appJsPath;
|
|
461
|
+
const appModule = await import(actualPath);
|
|
3241
462
|
const appFactory = appModule.default;
|
|
3242
463
|
if (!appFactory) {
|
|
3243
464
|
throw new Error("app.ts must export a default function that returns a Hono app");
|
|
3244
465
|
}
|
|
3245
466
|
const app = await appFactory();
|
|
3246
|
-
|
|
3247
|
-
|
|
3248
|
-
|
|
3249
|
-
await executePluginHooks(plugins, "afterRoutes", app);
|
|
467
|
+
if (config2?.routes) {
|
|
468
|
+
registerRoutes(app, config2.routes, config2.middlewares);
|
|
469
|
+
}
|
|
3250
470
|
return app;
|
|
3251
471
|
}
|
|
3252
|
-
async function createAutoConfiguredApp(
|
|
472
|
+
async function createAutoConfiguredApp(config2) {
|
|
3253
473
|
const app = new Hono();
|
|
3254
|
-
const middlewareConfig =
|
|
474
|
+
const middlewareConfig = config2?.middleware ?? {};
|
|
3255
475
|
const enableLogger = middlewareConfig.logger !== false;
|
|
3256
476
|
const enableCors = middlewareConfig.cors !== false;
|
|
3257
477
|
const enableErrorHandler = middlewareConfig.errorHandler !== false;
|
|
@@ -3261,29 +481,30 @@ async function createAutoConfiguredApp(config, plugins = []) {
|
|
|
3261
481
|
await next();
|
|
3262
482
|
});
|
|
3263
483
|
}
|
|
3264
|
-
applyDefaultMiddleware(app,
|
|
3265
|
-
|
|
3266
|
-
|
|
3267
|
-
|
|
3268
|
-
|
|
3269
|
-
await
|
|
3270
|
-
await
|
|
3271
|
-
await
|
|
484
|
+
applyDefaultMiddleware(app, config2, enableLogger, enableCors);
|
|
485
|
+
if (Array.isArray(config2?.use)) {
|
|
486
|
+
config2.use.forEach((mw) => app.use("*", mw));
|
|
487
|
+
}
|
|
488
|
+
registerHealthCheckEndpoint(app, config2);
|
|
489
|
+
await executeBeforeRoutesHook(app, config2);
|
|
490
|
+
await loadAppRoutes(app, config2);
|
|
491
|
+
await executeAfterRoutesHook(app, config2);
|
|
3272
492
|
if (enableErrorHandler) {
|
|
3273
493
|
app.onError(ErrorHandler());
|
|
3274
494
|
}
|
|
3275
495
|
return app;
|
|
3276
496
|
}
|
|
3277
|
-
function applyDefaultMiddleware(app,
|
|
497
|
+
function applyDefaultMiddleware(app, config2, enableLogger, enableCors) {
|
|
3278
498
|
if (enableLogger) {
|
|
3279
499
|
app.use("*", RequestLogger());
|
|
3280
500
|
}
|
|
3281
|
-
if (enableCors
|
|
3282
|
-
|
|
501
|
+
if (enableCors) {
|
|
502
|
+
const corsOptions = config2?.cors !== false ? config2?.cors : void 0;
|
|
503
|
+
app.use("*", cors(corsOptions));
|
|
3283
504
|
}
|
|
3284
505
|
}
|
|
3285
|
-
function registerHealthCheckEndpoint(app,
|
|
3286
|
-
const healthCheckConfig =
|
|
506
|
+
function registerHealthCheckEndpoint(app, config2) {
|
|
507
|
+
const healthCheckConfig = config2?.healthCheck ?? {};
|
|
3287
508
|
const healthCheckEnabled = healthCheckConfig.enabled !== false;
|
|
3288
509
|
const healthCheckPath = healthCheckConfig.path ?? "/health";
|
|
3289
510
|
const healthCheckDetailed = healthCheckConfig.detailed ?? process.env.NODE_ENV === "development";
|
|
@@ -3292,41 +513,218 @@ function registerHealthCheckEndpoint(app, config) {
|
|
|
3292
513
|
serverLogger.debug(`Health check endpoint enabled at ${healthCheckPath}`);
|
|
3293
514
|
}
|
|
3294
515
|
}
|
|
3295
|
-
async function executeBeforeRoutesHook(app,
|
|
3296
|
-
if (
|
|
3297
|
-
|
|
516
|
+
async function executeBeforeRoutesHook(app, config2) {
|
|
517
|
+
if (config2?.lifecycle?.beforeRoutes) {
|
|
518
|
+
await config2.lifecycle.beforeRoutes(app);
|
|
3298
519
|
}
|
|
3299
|
-
|
|
3300
|
-
|
|
3301
|
-
|
|
3302
|
-
|
|
3303
|
-
|
|
520
|
+
}
|
|
521
|
+
async function loadAppRoutes(app, config2) {
|
|
522
|
+
const debug = isDebugMode(config2);
|
|
523
|
+
if (config2?.routes) {
|
|
524
|
+
registerRoutes(app, config2.routes, config2.middlewares);
|
|
525
|
+
if (debug) {
|
|
526
|
+
serverLogger.info("\u2713 Routes registered");
|
|
527
|
+
}
|
|
528
|
+
} else if (debug) {
|
|
529
|
+
serverLogger.warn("\u26A0\uFE0F No routes configured. Use defineServerConfig().routes() to register routes.");
|
|
530
|
+
}
|
|
531
|
+
}
|
|
532
|
+
async function executeAfterRoutesHook(app, config2) {
|
|
533
|
+
if (config2?.lifecycle?.afterRoutes) {
|
|
534
|
+
await config2.lifecycle.afterRoutes(app);
|
|
3304
535
|
}
|
|
3305
536
|
}
|
|
3306
|
-
|
|
3307
|
-
|
|
3308
|
-
|
|
3309
|
-
|
|
3310
|
-
|
|
3311
|
-
|
|
537
|
+
function isDebugMode(config2) {
|
|
538
|
+
return config2?.debug ?? process.env.NODE_ENV === "development";
|
|
539
|
+
}
|
|
540
|
+
var jobLogger = logger.child("@spfn/core:job");
|
|
541
|
+
var bossInstance = null;
|
|
542
|
+
var bossConfig = null;
|
|
543
|
+
async function initBoss(config2) {
|
|
544
|
+
if (bossInstance) {
|
|
545
|
+
jobLogger.warn("pg-boss already initialized, returning existing instance");
|
|
546
|
+
return bossInstance;
|
|
547
|
+
}
|
|
548
|
+
jobLogger.info("Initializing pg-boss...");
|
|
549
|
+
bossConfig = config2;
|
|
550
|
+
bossInstance = new PgBoss({
|
|
551
|
+
connectionString: config2.connectionString,
|
|
552
|
+
schema: config2.schema ?? "spfn_queue",
|
|
553
|
+
maintenanceIntervalSeconds: config2.maintenanceIntervalSeconds ?? 120,
|
|
554
|
+
monitorIntervalSeconds: config2.monitorIntervalSeconds
|
|
3312
555
|
});
|
|
556
|
+
bossInstance.on("error", (error) => {
|
|
557
|
+
jobLogger.error("pg-boss error:", error);
|
|
558
|
+
});
|
|
559
|
+
await bossInstance.start();
|
|
560
|
+
jobLogger.info("pg-boss started successfully");
|
|
561
|
+
return bossInstance;
|
|
562
|
+
}
|
|
563
|
+
function getBoss() {
|
|
564
|
+
return bossInstance;
|
|
3313
565
|
}
|
|
3314
|
-
async function
|
|
3315
|
-
if (!
|
|
566
|
+
async function stopBoss() {
|
|
567
|
+
if (!bossInstance) {
|
|
3316
568
|
return;
|
|
3317
569
|
}
|
|
570
|
+
jobLogger.info("Stopping pg-boss...");
|
|
3318
571
|
try {
|
|
3319
|
-
await
|
|
572
|
+
await bossInstance.stop({ graceful: true, timeout: 3e4 });
|
|
573
|
+
jobLogger.info("pg-boss stopped gracefully");
|
|
3320
574
|
} catch (error) {
|
|
3321
|
-
|
|
3322
|
-
throw
|
|
575
|
+
jobLogger.error("Error stopping pg-boss:", error);
|
|
576
|
+
throw error;
|
|
577
|
+
} finally {
|
|
578
|
+
bossInstance = null;
|
|
579
|
+
bossConfig = null;
|
|
3323
580
|
}
|
|
3324
581
|
}
|
|
582
|
+
function shouldClearOnStart() {
|
|
583
|
+
return bossConfig?.clearOnStart ?? false;
|
|
584
|
+
}
|
|
3325
585
|
|
|
3326
|
-
// src/
|
|
3327
|
-
|
|
3328
|
-
|
|
3329
|
-
|
|
586
|
+
// src/job/job-router.ts
|
|
587
|
+
function isJobDef(value) {
|
|
588
|
+
return value !== null && typeof value === "object" && "name" in value && "handler" in value && "send" in value && "run" in value;
|
|
589
|
+
}
|
|
590
|
+
function isJobRouter(value) {
|
|
591
|
+
return value !== null && typeof value === "object" && "jobs" in value && "_jobs" in value;
|
|
592
|
+
}
|
|
593
|
+
function collectJobs(router, prefix = "") {
|
|
594
|
+
const jobs = [];
|
|
595
|
+
for (const [key, value] of Object.entries(router.jobs)) {
|
|
596
|
+
const name = prefix ? `${prefix}.${key}` : key;
|
|
597
|
+
if (isJobRouter(value)) {
|
|
598
|
+
jobs.push(...collectJobs(value, name));
|
|
599
|
+
} else if (isJobDef(value)) {
|
|
600
|
+
jobs.push(value);
|
|
601
|
+
}
|
|
602
|
+
}
|
|
603
|
+
return jobs;
|
|
604
|
+
}
|
|
605
|
+
var jobLogger2 = logger.child("@spfn/core:job");
|
|
606
|
+
function getEventQueueName(eventName) {
|
|
607
|
+
return `event:${eventName}`;
|
|
608
|
+
}
|
|
609
|
+
function getDefaultJobOptions(options) {
|
|
610
|
+
return {
|
|
611
|
+
retryLimit: options?.retryLimit ?? 3,
|
|
612
|
+
retryDelay: options?.retryDelay ?? 1e3,
|
|
613
|
+
expireInSeconds: options?.expireInSeconds ?? 300
|
|
614
|
+
};
|
|
615
|
+
}
|
|
616
|
+
async function registerJobs(router) {
|
|
617
|
+
const boss = getBoss();
|
|
618
|
+
if (!boss) {
|
|
619
|
+
throw new Error(
|
|
620
|
+
"pg-boss not initialized. Call initBoss() before registerJobs()"
|
|
621
|
+
);
|
|
622
|
+
}
|
|
623
|
+
const jobs = collectJobs(router);
|
|
624
|
+
const clearOnStart = shouldClearOnStart();
|
|
625
|
+
jobLogger2.info(`Registering ${jobs.length} job(s)...`);
|
|
626
|
+
if (clearOnStart) {
|
|
627
|
+
jobLogger2.info("Clearing existing jobs before registration...");
|
|
628
|
+
for (const job2 of jobs) {
|
|
629
|
+
await boss.deleteAllJobs(job2.name);
|
|
630
|
+
if (job2.subscribedEvent) {
|
|
631
|
+
const eventQueue = getEventQueueName(job2.subscribedEvent);
|
|
632
|
+
await boss.deleteAllJobs(eventQueue);
|
|
633
|
+
}
|
|
634
|
+
}
|
|
635
|
+
jobLogger2.info("Existing jobs cleared");
|
|
636
|
+
}
|
|
637
|
+
for (const job2 of jobs) {
|
|
638
|
+
await registerJob(job2);
|
|
639
|
+
}
|
|
640
|
+
jobLogger2.info("All jobs registered successfully");
|
|
641
|
+
}
|
|
642
|
+
async function registerWorker(boss, job2, queueName) {
|
|
643
|
+
await boss.work(
|
|
644
|
+
queueName,
|
|
645
|
+
{ batchSize: 1 },
|
|
646
|
+
async (jobs) => {
|
|
647
|
+
for (const pgBossJob of jobs) {
|
|
648
|
+
jobLogger2.debug(`[Job:${job2.name}] Executing...`, { jobId: pgBossJob.id });
|
|
649
|
+
const startTime = Date.now();
|
|
650
|
+
try {
|
|
651
|
+
if (job2.inputSchema) {
|
|
652
|
+
await job2.handler(pgBossJob.data);
|
|
653
|
+
} else {
|
|
654
|
+
await job2.handler();
|
|
655
|
+
}
|
|
656
|
+
const duration = Date.now() - startTime;
|
|
657
|
+
jobLogger2.info(`[Job:${job2.name}] Completed in ${duration}ms`, {
|
|
658
|
+
jobId: pgBossJob.id,
|
|
659
|
+
duration
|
|
660
|
+
});
|
|
661
|
+
} catch (error) {
|
|
662
|
+
const duration = Date.now() - startTime;
|
|
663
|
+
jobLogger2.error(`[Job:${job2.name}] Failed after ${duration}ms`, {
|
|
664
|
+
jobId: pgBossJob.id,
|
|
665
|
+
duration,
|
|
666
|
+
error: error instanceof Error ? error.message : String(error)
|
|
667
|
+
});
|
|
668
|
+
throw error;
|
|
669
|
+
}
|
|
670
|
+
}
|
|
671
|
+
}
|
|
672
|
+
);
|
|
673
|
+
}
|
|
674
|
+
function connectEventToQueue(boss, job2, queueName) {
|
|
675
|
+
if (!job2._subscribedEventDef) {
|
|
676
|
+
return;
|
|
677
|
+
}
|
|
678
|
+
const eventDef = job2._subscribedEventDef;
|
|
679
|
+
eventDef._registerJobQueue(queueName, async (queue, payload) => {
|
|
680
|
+
await boss.send(queue, payload, getDefaultJobOptions(job2.options));
|
|
681
|
+
});
|
|
682
|
+
jobLogger2.debug(`[Job:${job2.name}] Connected to event: ${job2.subscribedEvent}`);
|
|
683
|
+
}
|
|
684
|
+
async function registerCronSchedule(boss, job2) {
|
|
685
|
+
if (!job2.cronExpression) {
|
|
686
|
+
return;
|
|
687
|
+
}
|
|
688
|
+
jobLogger2.debug(`[Job:${job2.name}] Scheduling cron: ${job2.cronExpression}`);
|
|
689
|
+
await boss.schedule(
|
|
690
|
+
job2.name,
|
|
691
|
+
job2.cronExpression,
|
|
692
|
+
{},
|
|
693
|
+
getDefaultJobOptions(job2.options)
|
|
694
|
+
);
|
|
695
|
+
jobLogger2.info(`[Job:${job2.name}] Cron scheduled: ${job2.cronExpression}`);
|
|
696
|
+
}
|
|
697
|
+
async function queueRunOnceJob(boss, job2) {
|
|
698
|
+
if (!job2.runOnce) {
|
|
699
|
+
return;
|
|
700
|
+
}
|
|
701
|
+
jobLogger2.debug(`[Job:${job2.name}] Queuing runOnce job`);
|
|
702
|
+
await boss.send(
|
|
703
|
+
job2.name,
|
|
704
|
+
{},
|
|
705
|
+
{
|
|
706
|
+
...getDefaultJobOptions(job2.options),
|
|
707
|
+
singletonKey: `runOnce:${job2.name}`
|
|
708
|
+
}
|
|
709
|
+
);
|
|
710
|
+
jobLogger2.info(`[Job:${job2.name}] runOnce job queued`);
|
|
711
|
+
}
|
|
712
|
+
async function registerJob(job2) {
|
|
713
|
+
const boss = getBoss();
|
|
714
|
+
if (!boss) {
|
|
715
|
+
throw new Error("pg-boss not initialized");
|
|
716
|
+
}
|
|
717
|
+
const queueName = job2.subscribedEvent ? getEventQueueName(job2.subscribedEvent) : job2.name;
|
|
718
|
+
jobLogger2.debug(`Registering job: ${job2.name}`, {
|
|
719
|
+
queueName,
|
|
720
|
+
subscribedEvent: job2.subscribedEvent
|
|
721
|
+
});
|
|
722
|
+
await registerWorker(boss, job2, queueName);
|
|
723
|
+
connectEventToQueue(boss, job2, queueName);
|
|
724
|
+
await registerCronSchedule(boss, job2);
|
|
725
|
+
await queueRunOnceJob(boss, job2);
|
|
726
|
+
jobLogger2.debug(`Job registered: ${job2.name}`);
|
|
727
|
+
}
|
|
3330
728
|
function getNetworkAddress() {
|
|
3331
729
|
const nets = networkInterfaces();
|
|
3332
730
|
for (const name of Object.keys(nets)) {
|
|
@@ -3364,16 +762,16 @@ function printBanner(options) {
|
|
|
3364
762
|
}
|
|
3365
763
|
|
|
3366
764
|
// src/server/validation.ts
|
|
3367
|
-
function validateServerConfig(
|
|
3368
|
-
if (
|
|
3369
|
-
if (!Number.isInteger(
|
|
765
|
+
function validateServerConfig(config2) {
|
|
766
|
+
if (config2.port !== void 0) {
|
|
767
|
+
if (!Number.isInteger(config2.port) || config2.port < 0 || config2.port > 65535) {
|
|
3370
768
|
throw new Error(
|
|
3371
|
-
`Invalid port: ${
|
|
769
|
+
`Invalid port: ${config2.port}. Port must be an integer between 0 and 65535.`
|
|
3372
770
|
);
|
|
3373
771
|
}
|
|
3374
772
|
}
|
|
3375
|
-
if (
|
|
3376
|
-
const { request, keepAlive, headers } =
|
|
773
|
+
if (config2.timeout) {
|
|
774
|
+
const { request, keepAlive, headers } = config2.timeout;
|
|
3377
775
|
if (request !== void 0 && (request < 0 || !Number.isFinite(request))) {
|
|
3378
776
|
throw new Error(`Invalid timeout.request: ${request}. Must be a positive number.`);
|
|
3379
777
|
}
|
|
@@ -3389,41 +787,51 @@ function validateServerConfig(config) {
|
|
|
3389
787
|
);
|
|
3390
788
|
}
|
|
3391
789
|
}
|
|
3392
|
-
if (
|
|
3393
|
-
const timeout =
|
|
790
|
+
if (config2.shutdown?.timeout !== void 0) {
|
|
791
|
+
const timeout = config2.shutdown.timeout;
|
|
3394
792
|
if (timeout < 0 || !Number.isFinite(timeout)) {
|
|
3395
793
|
throw new Error(`Invalid shutdown.timeout: ${timeout}. Must be a positive number.`);
|
|
3396
794
|
}
|
|
3397
795
|
}
|
|
3398
|
-
if (
|
|
3399
|
-
if (!
|
|
796
|
+
if (config2.healthCheck?.path) {
|
|
797
|
+
if (!config2.healthCheck.path.startsWith("/")) {
|
|
3400
798
|
throw new Error(
|
|
3401
|
-
`Invalid healthCheck.path: "${
|
|
799
|
+
`Invalid healthCheck.path: "${config2.healthCheck.path}". Must start with "/".`
|
|
3402
800
|
);
|
|
3403
801
|
}
|
|
3404
802
|
}
|
|
3405
803
|
}
|
|
3406
|
-
|
|
3407
|
-
|
|
3408
|
-
|
|
3409
|
-
|
|
3410
|
-
|
|
804
|
+
var DEFAULT_MAX_LISTENERS = 15;
|
|
805
|
+
var TIMEOUTS = {
|
|
806
|
+
SERVER_CLOSE: 5e3,
|
|
807
|
+
DATABASE_CLOSE: 5e3,
|
|
808
|
+
REDIS_CLOSE: 5e3,
|
|
809
|
+
PRODUCTION_ERROR_SHUTDOWN: 1e4
|
|
810
|
+
};
|
|
811
|
+
var CONFIG_FILE_PATHS = [
|
|
812
|
+
".spfn/server/server.config.mjs",
|
|
813
|
+
".spfn/server/server.config",
|
|
814
|
+
"src/server/server.config",
|
|
815
|
+
"src/server/server.config.ts"
|
|
816
|
+
];
|
|
817
|
+
var processHandlersRegistered = false;
|
|
818
|
+
async function startServer(config2) {
|
|
819
|
+
loadEnvFiles();
|
|
820
|
+
const finalConfig = await loadAndMergeConfig(config2);
|
|
3411
821
|
const { host, port, debug } = finalConfig;
|
|
3412
822
|
validateServerConfig(finalConfig);
|
|
823
|
+
if (!host || !port) {
|
|
824
|
+
throw new Error("Server host and port are required");
|
|
825
|
+
}
|
|
3413
826
|
if (debug) {
|
|
3414
827
|
logMiddlewareOrder(finalConfig);
|
|
3415
828
|
}
|
|
3416
|
-
|
|
3417
|
-
|
|
3418
|
-
|
|
3419
|
-
serverLogger2.info("Plugins discovered", {
|
|
3420
|
-
count: plugins.length,
|
|
3421
|
-
plugins: plugins.map((p) => p.name)
|
|
3422
|
-
});
|
|
3423
|
-
}
|
|
829
|
+
const shutdownState = {
|
|
830
|
+
isShuttingDown: false
|
|
831
|
+
};
|
|
3424
832
|
try {
|
|
3425
|
-
await initializeInfrastructure(finalConfig
|
|
3426
|
-
const app = await createServer(finalConfig
|
|
833
|
+
await initializeInfrastructure(finalConfig);
|
|
834
|
+
const app = await createServer(finalConfig);
|
|
3427
835
|
const server = startHttpServer(app, host, port);
|
|
3428
836
|
const timeouts = getTimeoutConfig(finalConfig.timeout);
|
|
3429
837
|
applyServerTimeouts(server, timeouts);
|
|
@@ -3434,236 +842,567 @@ async function startServer(config) {
|
|
|
3434
842
|
port
|
|
3435
843
|
});
|
|
3436
844
|
logServerStarted(debug, host, port, finalConfig, timeouts);
|
|
3437
|
-
const shutdownServer = createShutdownHandler(server, finalConfig,
|
|
3438
|
-
const shutdown = createGracefulShutdown(shutdownServer, finalConfig);
|
|
3439
|
-
|
|
845
|
+
const shutdownServer = createShutdownHandler(server, finalConfig, shutdownState);
|
|
846
|
+
const shutdown = createGracefulShutdown(shutdownServer, finalConfig, shutdownState);
|
|
847
|
+
registerProcessHandlers(shutdown);
|
|
3440
848
|
const serverInstance = {
|
|
3441
849
|
server,
|
|
3442
850
|
app,
|
|
3443
851
|
config: finalConfig,
|
|
3444
852
|
close: async () => {
|
|
3445
|
-
|
|
853
|
+
serverLogger.info("Manual server shutdown requested");
|
|
854
|
+
if (shutdownState.isShuttingDown) {
|
|
855
|
+
serverLogger.warn("Shutdown already in progress, ignoring manual close request");
|
|
856
|
+
return;
|
|
857
|
+
}
|
|
858
|
+
shutdownState.isShuttingDown = true;
|
|
3446
859
|
await shutdownServer();
|
|
3447
860
|
}
|
|
3448
861
|
};
|
|
3449
862
|
if (finalConfig.lifecycle?.afterStart) {
|
|
3450
|
-
|
|
863
|
+
serverLogger.debug("Executing afterStart hook...");
|
|
3451
864
|
try {
|
|
3452
865
|
await finalConfig.lifecycle.afterStart(serverInstance);
|
|
3453
866
|
} catch (error) {
|
|
3454
|
-
|
|
867
|
+
serverLogger.error("afterStart hook failed", error);
|
|
3455
868
|
}
|
|
3456
869
|
}
|
|
3457
|
-
await executePluginHooks(plugins, "afterStart", serverInstance);
|
|
3458
870
|
return serverInstance;
|
|
3459
871
|
} catch (error) {
|
|
3460
872
|
const err = error;
|
|
3461
|
-
|
|
873
|
+
serverLogger.error("Server initialization failed", err);
|
|
3462
874
|
await cleanupOnFailure(finalConfig);
|
|
3463
875
|
throw error;
|
|
3464
876
|
}
|
|
3465
877
|
}
|
|
3466
|
-
async function loadAndMergeConfig(
|
|
878
|
+
async function loadAndMergeConfig(config2) {
|
|
3467
879
|
const cwd = process.cwd();
|
|
3468
|
-
const configPath = join(cwd, "src", "server", "server.config.ts");
|
|
3469
|
-
const configJsPath = join(cwd, "src", "server", "server.config.js");
|
|
3470
|
-
const builtConfigMjsPath = join(cwd, ".spfn", "server", "server.config.mjs");
|
|
3471
|
-
const builtConfigPath = join(cwd, ".spfn", "server", "server.config.js");
|
|
3472
880
|
let fileConfig = {};
|
|
3473
|
-
|
|
3474
|
-
|
|
3475
|
-
|
|
3476
|
-
|
|
3477
|
-
|
|
3478
|
-
|
|
3479
|
-
|
|
3480
|
-
|
|
3481
|
-
|
|
3482
|
-
|
|
3483
|
-
|
|
3484
|
-
|
|
881
|
+
let loadedConfigPath = null;
|
|
882
|
+
for (const configPath of CONFIG_FILE_PATHS) {
|
|
883
|
+
const fullPath = join(cwd, configPath);
|
|
884
|
+
if (existsSync(fullPath)) {
|
|
885
|
+
try {
|
|
886
|
+
const configModule = await import(fullPath);
|
|
887
|
+
fileConfig = configModule.default ?? {};
|
|
888
|
+
loadedConfigPath = configPath;
|
|
889
|
+
break;
|
|
890
|
+
} catch (error) {
|
|
891
|
+
serverLogger.error(`Failed to load config from ${configPath} - file exists but import failed`, error);
|
|
892
|
+
}
|
|
893
|
+
}
|
|
894
|
+
}
|
|
895
|
+
if (loadedConfigPath) {
|
|
896
|
+
serverLogger.debug(`Loaded configuration from ${loadedConfigPath}`);
|
|
897
|
+
} else {
|
|
898
|
+
serverLogger.debug("No configuration file found, using defaults");
|
|
3485
899
|
}
|
|
3486
900
|
return {
|
|
3487
901
|
...fileConfig,
|
|
3488
|
-
...
|
|
3489
|
-
port:
|
|
3490
|
-
host:
|
|
902
|
+
...config2,
|
|
903
|
+
port: config2?.port ?? fileConfig?.port ?? env.PORT,
|
|
904
|
+
host: config2?.host ?? fileConfig?.host ?? env.HOST
|
|
3491
905
|
};
|
|
3492
906
|
}
|
|
3493
|
-
function
|
|
3494
|
-
|
|
3495
|
-
|
|
3496
|
-
|
|
3497
|
-
}
|
|
907
|
+
function getInfrastructureConfig(config2) {
|
|
908
|
+
return {
|
|
909
|
+
database: config2.infrastructure?.database !== false,
|
|
910
|
+
redis: config2.infrastructure?.redis !== false
|
|
911
|
+
};
|
|
3498
912
|
}
|
|
3499
|
-
async function initializeInfrastructure(
|
|
3500
|
-
if (
|
|
3501
|
-
|
|
3502
|
-
|
|
3503
|
-
await config.lifecycle.beforeInfrastructure(config);
|
|
3504
|
-
} catch (error) {
|
|
3505
|
-
serverLogger2.error("beforeInfrastructure hook failed", error);
|
|
3506
|
-
throw new Error("Server initialization failed in beforeInfrastructure hook");
|
|
3507
|
-
}
|
|
913
|
+
async function initializeInfrastructure(config2) {
|
|
914
|
+
if (config2.lifecycle?.beforeInfrastructure) {
|
|
915
|
+
serverLogger.debug("Executing beforeInfrastructure hook...");
|
|
916
|
+
await config2.lifecycle.beforeInfrastructure(config2);
|
|
3508
917
|
}
|
|
3509
|
-
const
|
|
3510
|
-
if (
|
|
3511
|
-
|
|
3512
|
-
await initDatabase(
|
|
918
|
+
const infraConfig = getInfrastructureConfig(config2);
|
|
919
|
+
if (infraConfig.database) {
|
|
920
|
+
serverLogger.debug("Initializing database...");
|
|
921
|
+
await initDatabase(config2.database);
|
|
3513
922
|
} else {
|
|
3514
|
-
|
|
923
|
+
serverLogger.debug("Database initialization disabled");
|
|
3515
924
|
}
|
|
3516
|
-
|
|
3517
|
-
|
|
3518
|
-
|
|
3519
|
-
await initRedis();
|
|
925
|
+
if (infraConfig.redis) {
|
|
926
|
+
serverLogger.debug("Initializing Redis...");
|
|
927
|
+
await initCache();
|
|
3520
928
|
} else {
|
|
3521
|
-
|
|
929
|
+
serverLogger.debug("Redis initialization disabled");
|
|
3522
930
|
}
|
|
3523
|
-
if (
|
|
3524
|
-
|
|
3525
|
-
|
|
3526
|
-
|
|
3527
|
-
|
|
3528
|
-
|
|
3529
|
-
|
|
931
|
+
if (config2.lifecycle?.afterInfrastructure) {
|
|
932
|
+
serverLogger.debug("Executing afterInfrastructure hook...");
|
|
933
|
+
await config2.lifecycle.afterInfrastructure();
|
|
934
|
+
}
|
|
935
|
+
if (config2.jobs) {
|
|
936
|
+
const dbUrl = env.DATABASE_URL;
|
|
937
|
+
if (!dbUrl) {
|
|
938
|
+
throw new Error(
|
|
939
|
+
"Jobs require database connection. Ensure DATABASE_URL is set or database is enabled."
|
|
940
|
+
);
|
|
3530
941
|
}
|
|
942
|
+
serverLogger.debug("Initializing pg-boss...");
|
|
943
|
+
await initBoss({
|
|
944
|
+
connectionString: dbUrl,
|
|
945
|
+
...config2.jobsConfig
|
|
946
|
+
});
|
|
947
|
+
serverLogger.debug("Registering jobs...");
|
|
948
|
+
await registerJobs(config2.jobs);
|
|
3531
949
|
}
|
|
3532
|
-
await executePluginHooks(plugins, "afterInfrastructure");
|
|
3533
950
|
}
|
|
3534
951
|
function startHttpServer(app, host, port) {
|
|
3535
|
-
|
|
3536
|
-
|
|
952
|
+
serverLogger.debug(`Starting server on ${host}:${port}...`);
|
|
953
|
+
return serve({
|
|
3537
954
|
fetch: app.fetch,
|
|
3538
955
|
port,
|
|
3539
956
|
hostname: host
|
|
3540
957
|
});
|
|
3541
|
-
|
|
958
|
+
}
|
|
959
|
+
function logMiddlewareOrder(config2) {
|
|
960
|
+
const middlewareOrder = buildMiddlewareOrder(config2);
|
|
961
|
+
serverLogger.debug("Middleware execution order", {
|
|
962
|
+
order: middlewareOrder
|
|
963
|
+
});
|
|
3542
964
|
}
|
|
3543
965
|
function logServerTimeouts(timeouts) {
|
|
3544
|
-
|
|
966
|
+
serverLogger.info("Server timeouts configured", {
|
|
3545
967
|
request: `${timeouts.request}ms`,
|
|
3546
968
|
keepAlive: `${timeouts.keepAlive}ms`,
|
|
3547
969
|
headers: `${timeouts.headers}ms`
|
|
3548
970
|
});
|
|
3549
971
|
}
|
|
3550
|
-
function logServerStarted(debug, host, port,
|
|
3551
|
-
const startupConfig = buildStartupConfig(
|
|
3552
|
-
|
|
972
|
+
function logServerStarted(debug, host, port, config2, timeouts) {
|
|
973
|
+
const startupConfig = buildStartupConfig(config2, timeouts);
|
|
974
|
+
serverLogger.info("Server started successfully", {
|
|
3553
975
|
mode: debug ? "development" : "production",
|
|
3554
976
|
host,
|
|
3555
977
|
port,
|
|
3556
978
|
config: startupConfig
|
|
3557
979
|
});
|
|
3558
980
|
}
|
|
3559
|
-
function createShutdownHandler(server,
|
|
981
|
+
function createShutdownHandler(server, config2, shutdownState) {
|
|
3560
982
|
return async () => {
|
|
3561
|
-
|
|
3562
|
-
|
|
3563
|
-
|
|
3564
|
-
|
|
3565
|
-
|
|
3566
|
-
|
|
983
|
+
if (shutdownState.isShuttingDown) {
|
|
984
|
+
serverLogger.debug("Shutdown already in progress for this instance, skipping");
|
|
985
|
+
return;
|
|
986
|
+
}
|
|
987
|
+
shutdownState.isShuttingDown = true;
|
|
988
|
+
serverLogger.debug("Closing HTTP server...");
|
|
989
|
+
let timeoutId;
|
|
990
|
+
await Promise.race([
|
|
991
|
+
new Promise((resolve2, reject) => {
|
|
992
|
+
server.close((err) => {
|
|
993
|
+
if (timeoutId) clearTimeout(timeoutId);
|
|
994
|
+
if (err) {
|
|
995
|
+
serverLogger.error("HTTP server close error", err);
|
|
996
|
+
reject(err);
|
|
997
|
+
} else {
|
|
998
|
+
serverLogger.info("HTTP server closed");
|
|
999
|
+
resolve2();
|
|
1000
|
+
}
|
|
1001
|
+
});
|
|
1002
|
+
}),
|
|
1003
|
+
new Promise((_, reject) => {
|
|
1004
|
+
timeoutId = setTimeout(() => {
|
|
1005
|
+
reject(new Error(`HTTP server close timeout after ${TIMEOUTS.SERVER_CLOSE}ms`));
|
|
1006
|
+
}, TIMEOUTS.SERVER_CLOSE);
|
|
1007
|
+
})
|
|
1008
|
+
]).catch((error) => {
|
|
1009
|
+
if (timeoutId) clearTimeout(timeoutId);
|
|
1010
|
+
serverLogger.warn("HTTP server close timeout, forcing shutdown", error);
|
|
3567
1011
|
});
|
|
3568
|
-
if (
|
|
3569
|
-
|
|
1012
|
+
if (config2.jobs) {
|
|
1013
|
+
serverLogger.debug("Stopping pg-boss...");
|
|
3570
1014
|
try {
|
|
3571
|
-
await
|
|
1015
|
+
await stopBoss();
|
|
3572
1016
|
} catch (error) {
|
|
3573
|
-
|
|
1017
|
+
serverLogger.error("pg-boss stop failed", error);
|
|
3574
1018
|
}
|
|
3575
1019
|
}
|
|
3576
|
-
|
|
3577
|
-
|
|
3578
|
-
|
|
3579
|
-
|
|
1020
|
+
if (config2.lifecycle?.beforeShutdown) {
|
|
1021
|
+
serverLogger.debug("Executing beforeShutdown hook...");
|
|
1022
|
+
try {
|
|
1023
|
+
await config2.lifecycle.beforeShutdown();
|
|
1024
|
+
} catch (error) {
|
|
1025
|
+
serverLogger.error("beforeShutdown hook failed", error);
|
|
1026
|
+
}
|
|
3580
1027
|
}
|
|
3581
|
-
const
|
|
3582
|
-
|
|
3583
|
-
|
|
3584
|
-
|
|
3585
|
-
await closeDatabase();
|
|
1028
|
+
const infraConfig = getInfrastructureConfig(config2);
|
|
1029
|
+
if (infraConfig.database) {
|
|
1030
|
+
serverLogger.debug("Closing database connections...");
|
|
1031
|
+
await closeInfrastructure(closeDatabase, "Database", TIMEOUTS.DATABASE_CLOSE);
|
|
3586
1032
|
}
|
|
3587
|
-
if (
|
|
3588
|
-
|
|
3589
|
-
await
|
|
1033
|
+
if (infraConfig.redis) {
|
|
1034
|
+
serverLogger.debug("Closing Redis connections...");
|
|
1035
|
+
await closeInfrastructure(closeCache, "Redis", TIMEOUTS.REDIS_CLOSE);
|
|
3590
1036
|
}
|
|
3591
|
-
|
|
1037
|
+
serverLogger.info("Server shutdown completed");
|
|
3592
1038
|
};
|
|
3593
1039
|
}
|
|
3594
|
-
function
|
|
1040
|
+
async function closeInfrastructure(closeFn, name, timeout) {
|
|
1041
|
+
let timeoutId;
|
|
1042
|
+
try {
|
|
1043
|
+
await Promise.race([
|
|
1044
|
+
closeFn().then(() => {
|
|
1045
|
+
if (timeoutId) clearTimeout(timeoutId);
|
|
1046
|
+
}),
|
|
1047
|
+
new Promise((_, reject) => {
|
|
1048
|
+
timeoutId = setTimeout(() => {
|
|
1049
|
+
reject(new Error(`${name} close timeout after ${timeout}ms`));
|
|
1050
|
+
}, timeout);
|
|
1051
|
+
})
|
|
1052
|
+
]);
|
|
1053
|
+
serverLogger.info(`${name} connections closed successfully`);
|
|
1054
|
+
} catch (error) {
|
|
1055
|
+
if (timeoutId) clearTimeout(timeoutId);
|
|
1056
|
+
serverLogger.error(`${name} close failed or timed out`, error);
|
|
1057
|
+
}
|
|
1058
|
+
}
|
|
1059
|
+
function createGracefulShutdown(shutdownServer, config2, shutdownState) {
|
|
3595
1060
|
return async (signal) => {
|
|
3596
|
-
|
|
3597
|
-
|
|
3598
|
-
|
|
3599
|
-
|
|
3600
|
-
|
|
3601
|
-
|
|
3602
|
-
|
|
1061
|
+
if (shutdownState.isShuttingDown) {
|
|
1062
|
+
serverLogger.warn(`${signal} received but shutdown already in progress, ignoring`);
|
|
1063
|
+
return;
|
|
1064
|
+
}
|
|
1065
|
+
serverLogger.info(`${signal} received, starting graceful shutdown...`);
|
|
1066
|
+
const shutdownTimeout = getShutdownTimeout(config2.shutdown);
|
|
1067
|
+
let timeoutId;
|
|
3603
1068
|
try {
|
|
3604
1069
|
await Promise.race([
|
|
3605
|
-
shutdownServer()
|
|
3606
|
-
|
|
1070
|
+
shutdownServer().then(() => {
|
|
1071
|
+
if (timeoutId) clearTimeout(timeoutId);
|
|
1072
|
+
}),
|
|
1073
|
+
new Promise((_, reject) => {
|
|
1074
|
+
timeoutId = setTimeout(() => {
|
|
1075
|
+
reject(new Error(`Graceful shutdown timeout after ${shutdownTimeout}ms`));
|
|
1076
|
+
}, shutdownTimeout);
|
|
1077
|
+
})
|
|
3607
1078
|
]);
|
|
3608
|
-
|
|
1079
|
+
if (timeoutId) clearTimeout(timeoutId);
|
|
1080
|
+
serverLogger.info("Graceful shutdown completed successfully");
|
|
3609
1081
|
process.exit(0);
|
|
3610
1082
|
} catch (error) {
|
|
1083
|
+
if (timeoutId) clearTimeout(timeoutId);
|
|
3611
1084
|
const err = error;
|
|
3612
1085
|
if (err.message && err.message.includes("timeout")) {
|
|
3613
|
-
|
|
1086
|
+
serverLogger.error("Graceful shutdown timeout, forcing exit", err);
|
|
3614
1087
|
} else {
|
|
3615
|
-
|
|
1088
|
+
serverLogger.error("Error during graceful shutdown", err);
|
|
3616
1089
|
}
|
|
3617
1090
|
process.exit(1);
|
|
3618
1091
|
}
|
|
3619
1092
|
};
|
|
3620
1093
|
}
|
|
3621
|
-
function
|
|
3622
|
-
|
|
3623
|
-
|
|
3624
|
-
process.
|
|
1094
|
+
function handleProcessError(errorType, shutdown) {
|
|
1095
|
+
const isProduction = env.NODE_ENV === "production";
|
|
1096
|
+
const isDevelopment = env.NODE_ENV === "development";
|
|
1097
|
+
if (isDevelopment || process.env.WATCH_MODE === "true") {
|
|
1098
|
+
serverLogger.info("Exiting immediately for clean restart");
|
|
1099
|
+
process.exit(1);
|
|
1100
|
+
} else if (isProduction) {
|
|
1101
|
+
serverLogger.info(`Attempting graceful shutdown after ${errorType}`);
|
|
1102
|
+
const forceExitTimer = setTimeout(() => {
|
|
1103
|
+
serverLogger.error(`Forced exit after ${TIMEOUTS.PRODUCTION_ERROR_SHUTDOWN}ms - graceful shutdown did not complete`);
|
|
1104
|
+
process.exit(1);
|
|
1105
|
+
}, TIMEOUTS.PRODUCTION_ERROR_SHUTDOWN);
|
|
1106
|
+
shutdown(errorType).then(() => {
|
|
1107
|
+
clearTimeout(forceExitTimer);
|
|
1108
|
+
serverLogger.info("Graceful shutdown completed, exiting");
|
|
1109
|
+
process.exit(0);
|
|
1110
|
+
}).catch((shutdownError) => {
|
|
1111
|
+
clearTimeout(forceExitTimer);
|
|
1112
|
+
serverLogger.error("Graceful shutdown failed", shutdownError);
|
|
1113
|
+
process.exit(1);
|
|
1114
|
+
});
|
|
1115
|
+
} else {
|
|
1116
|
+
serverLogger.info("Exiting immediately");
|
|
1117
|
+
process.exit(1);
|
|
1118
|
+
}
|
|
1119
|
+
}
|
|
1120
|
+
function registerProcessHandlers(shutdown) {
|
|
1121
|
+
if (processHandlersRegistered) {
|
|
1122
|
+
serverLogger.debug("Process handlers already registered, skipping");
|
|
1123
|
+
return;
|
|
1124
|
+
}
|
|
1125
|
+
processHandlersRegistered = true;
|
|
1126
|
+
const currentMax = process.getMaxListeners();
|
|
1127
|
+
if (currentMax < DEFAULT_MAX_LISTENERS) {
|
|
1128
|
+
process.setMaxListeners(DEFAULT_MAX_LISTENERS);
|
|
1129
|
+
}
|
|
1130
|
+
process.on("SIGTERM", () => {
|
|
1131
|
+
shutdown("SIGTERM").catch((error) => {
|
|
1132
|
+
serverLogger.error("SIGTERM handler failed", error);
|
|
1133
|
+
process.exit(1);
|
|
1134
|
+
});
|
|
1135
|
+
});
|
|
1136
|
+
process.on("SIGINT", () => {
|
|
1137
|
+
shutdown("SIGINT").catch((error) => {
|
|
1138
|
+
serverLogger.error("SIGINT handler failed", error);
|
|
1139
|
+
process.exit(1);
|
|
1140
|
+
});
|
|
1141
|
+
});
|
|
3625
1142
|
process.on("uncaughtException", (error) => {
|
|
3626
1143
|
if (error.message?.includes("EADDRINUSE")) {
|
|
3627
|
-
|
|
3628
|
-
error: error.message,
|
|
3629
|
-
stack: error.stack,
|
|
1144
|
+
serverLogger.error("Port conflict detected - detailed trace:", error, {
|
|
3630
1145
|
code: error.code,
|
|
3631
1146
|
port: error.port,
|
|
3632
1147
|
address: error.address,
|
|
3633
1148
|
syscall: error.syscall
|
|
3634
1149
|
});
|
|
3635
1150
|
} else {
|
|
3636
|
-
|
|
1151
|
+
serverLogger.error("Uncaught exception", error);
|
|
3637
1152
|
}
|
|
3638
|
-
|
|
3639
|
-
process.exit(1);
|
|
1153
|
+
handleProcessError("UNCAUGHT_EXCEPTION", shutdown);
|
|
3640
1154
|
});
|
|
3641
1155
|
process.on("unhandledRejection", (reason, promise) => {
|
|
3642
|
-
|
|
3643
|
-
|
|
3644
|
-
|
|
3645
|
-
|
|
3646
|
-
|
|
3647
|
-
|
|
1156
|
+
if (reason instanceof Error) {
|
|
1157
|
+
Promise.resolve().then(() => (init_formatters(), formatters_exports)).then(({ formatUnhandledRejection: formatUnhandledRejection2 }) => {
|
|
1158
|
+
const { error, context } = formatUnhandledRejection2(reason, promise);
|
|
1159
|
+
serverLogger.error("Unhandled promise rejection", error, context);
|
|
1160
|
+
}).catch(() => {
|
|
1161
|
+
serverLogger.error("Unhandled promise rejection", reason, {
|
|
1162
|
+
promise
|
|
1163
|
+
});
|
|
1164
|
+
});
|
|
1165
|
+
} else {
|
|
1166
|
+
serverLogger.error("Unhandled promise rejection", {
|
|
1167
|
+
reason,
|
|
1168
|
+
promise
|
|
1169
|
+
});
|
|
1170
|
+
}
|
|
1171
|
+
handleProcessError("UNHANDLED_REJECTION", shutdown);
|
|
3648
1172
|
});
|
|
1173
|
+
serverLogger.debug("Process-level shutdown handlers registered successfully");
|
|
3649
1174
|
}
|
|
3650
|
-
async function cleanupOnFailure(
|
|
1175
|
+
async function cleanupOnFailure(config2) {
|
|
3651
1176
|
try {
|
|
3652
|
-
|
|
3653
|
-
const
|
|
3654
|
-
|
|
3655
|
-
|
|
3656
|
-
await closeDatabase();
|
|
1177
|
+
serverLogger.debug("Cleaning up after initialization failure...");
|
|
1178
|
+
const infraConfig = getInfrastructureConfig(config2);
|
|
1179
|
+
if (infraConfig.database) {
|
|
1180
|
+
await closeInfrastructure(closeDatabase, "Database", TIMEOUTS.DATABASE_CLOSE);
|
|
3657
1181
|
}
|
|
3658
|
-
if (
|
|
3659
|
-
await
|
|
1182
|
+
if (infraConfig.redis) {
|
|
1183
|
+
await closeInfrastructure(closeCache, "Redis", TIMEOUTS.REDIS_CLOSE);
|
|
3660
1184
|
}
|
|
3661
|
-
|
|
1185
|
+
serverLogger.debug("Cleanup completed");
|
|
3662
1186
|
} catch (cleanupError) {
|
|
3663
|
-
|
|
1187
|
+
serverLogger.error("Cleanup failed", cleanupError);
|
|
1188
|
+
}
|
|
1189
|
+
}
|
|
1190
|
+
|
|
1191
|
+
// src/server/config-builder.ts
|
|
1192
|
+
function collectHooks(lifecycles, key) {
|
|
1193
|
+
return lifecycles.map((lc) => lc[key]).filter((hook) => hook !== void 0);
|
|
1194
|
+
}
|
|
1195
|
+
function createMergedHook(hooks) {
|
|
1196
|
+
if (hooks.length === 0) {
|
|
1197
|
+
return void 0;
|
|
1198
|
+
}
|
|
1199
|
+
return (async (...args) => {
|
|
1200
|
+
for (const hook of hooks) {
|
|
1201
|
+
await hook(...args);
|
|
1202
|
+
}
|
|
1203
|
+
});
|
|
1204
|
+
}
|
|
1205
|
+
var ServerConfigBuilder = class {
|
|
1206
|
+
config = {};
|
|
1207
|
+
lifecycles = [];
|
|
1208
|
+
/**
|
|
1209
|
+
* Set server port
|
|
1210
|
+
*/
|
|
1211
|
+
port(port) {
|
|
1212
|
+
this.config.port = port;
|
|
1213
|
+
return this;
|
|
3664
1214
|
}
|
|
1215
|
+
/**
|
|
1216
|
+
* Set server hostname
|
|
1217
|
+
*/
|
|
1218
|
+
host(host) {
|
|
1219
|
+
this.config.host = host;
|
|
1220
|
+
return this;
|
|
1221
|
+
}
|
|
1222
|
+
/**
|
|
1223
|
+
* Set CORS configuration
|
|
1224
|
+
*/
|
|
1225
|
+
cors(cors2) {
|
|
1226
|
+
this.config.cors = cors2;
|
|
1227
|
+
return this;
|
|
1228
|
+
}
|
|
1229
|
+
/**
|
|
1230
|
+
* Configure built-in middleware
|
|
1231
|
+
*/
|
|
1232
|
+
middleware(middleware) {
|
|
1233
|
+
this.config.middleware = middleware;
|
|
1234
|
+
return this;
|
|
1235
|
+
}
|
|
1236
|
+
/**
|
|
1237
|
+
* Add custom middleware
|
|
1238
|
+
*/
|
|
1239
|
+
use(handlers) {
|
|
1240
|
+
this.config.use = handlers;
|
|
1241
|
+
return this;
|
|
1242
|
+
}
|
|
1243
|
+
/**
|
|
1244
|
+
* Add named middlewares for route-level skip control
|
|
1245
|
+
*/
|
|
1246
|
+
middlewares(middlewares) {
|
|
1247
|
+
this.config.middlewares = middlewares;
|
|
1248
|
+
return this;
|
|
1249
|
+
}
|
|
1250
|
+
/**
|
|
1251
|
+
* Register define-route based router
|
|
1252
|
+
*
|
|
1253
|
+
* Automatically applies:
|
|
1254
|
+
* - Global middlewares from router._globalMiddlewares (via .use())
|
|
1255
|
+
* - Package routers from router._packageRouters (via .packages())
|
|
1256
|
+
*
|
|
1257
|
+
* @example
|
|
1258
|
+
* ```typescript
|
|
1259
|
+
* const appRouter = defineRouter({
|
|
1260
|
+
* getUser: route.get('/users/:id')...
|
|
1261
|
+
* })
|
|
1262
|
+
* .packages([authRouter, cmsAppRouter])
|
|
1263
|
+
* .use([authMiddleware]);
|
|
1264
|
+
*
|
|
1265
|
+
* export default defineServerConfig()
|
|
1266
|
+
* .routes(appRouter) // middlewares auto-applied
|
|
1267
|
+
* .build();
|
|
1268
|
+
* ```
|
|
1269
|
+
*/
|
|
1270
|
+
routes(router) {
|
|
1271
|
+
this.config.routes = router;
|
|
1272
|
+
const allGlobalMiddlewares = [];
|
|
1273
|
+
if (router._globalMiddlewares?.length > 0) {
|
|
1274
|
+
allGlobalMiddlewares.push(...router._globalMiddlewares);
|
|
1275
|
+
}
|
|
1276
|
+
if (router._packageRouters?.length > 0) {
|
|
1277
|
+
for (const pkgRouter of router._packageRouters) {
|
|
1278
|
+
if (pkgRouter._globalMiddlewares?.length > 0) {
|
|
1279
|
+
allGlobalMiddlewares.push(...pkgRouter._globalMiddlewares);
|
|
1280
|
+
}
|
|
1281
|
+
}
|
|
1282
|
+
}
|
|
1283
|
+
if (allGlobalMiddlewares.length > 0) {
|
|
1284
|
+
this.config.middlewares = [
|
|
1285
|
+
...this.config.middlewares || [],
|
|
1286
|
+
...allGlobalMiddlewares
|
|
1287
|
+
];
|
|
1288
|
+
}
|
|
1289
|
+
return this;
|
|
1290
|
+
}
|
|
1291
|
+
/**
|
|
1292
|
+
* Register background jobs router
|
|
1293
|
+
*
|
|
1294
|
+
* @example
|
|
1295
|
+
* ```typescript
|
|
1296
|
+
* import { job, defineJobRouter } from '@spfn/core/job';
|
|
1297
|
+
*
|
|
1298
|
+
* const sendEmail = job('send-email')
|
|
1299
|
+
* .input(Type.Object({ to: Type.String() }))
|
|
1300
|
+
* .handler(async (input) => { ... });
|
|
1301
|
+
*
|
|
1302
|
+
* const jobRouter = defineJobRouter({ sendEmail });
|
|
1303
|
+
*
|
|
1304
|
+
* export default defineServerConfig()
|
|
1305
|
+
* .routes(appRouter)
|
|
1306
|
+
* .jobs(jobRouter)
|
|
1307
|
+
* .build();
|
|
1308
|
+
* ```
|
|
1309
|
+
*/
|
|
1310
|
+
jobs(router, config2) {
|
|
1311
|
+
this.config.jobs = router;
|
|
1312
|
+
if (config2) {
|
|
1313
|
+
this.config.jobsConfig = config2;
|
|
1314
|
+
}
|
|
1315
|
+
return this;
|
|
1316
|
+
}
|
|
1317
|
+
/**
|
|
1318
|
+
* Enable/disable debug mode
|
|
1319
|
+
*/
|
|
1320
|
+
debug(enabled) {
|
|
1321
|
+
this.config.debug = enabled;
|
|
1322
|
+
return this;
|
|
1323
|
+
}
|
|
1324
|
+
/**
|
|
1325
|
+
* Configure database settings
|
|
1326
|
+
*/
|
|
1327
|
+
database(database) {
|
|
1328
|
+
this.config.database = database;
|
|
1329
|
+
return this;
|
|
1330
|
+
}
|
|
1331
|
+
/**
|
|
1332
|
+
* Configure server timeout settings
|
|
1333
|
+
*/
|
|
1334
|
+
timeout(timeout) {
|
|
1335
|
+
this.config.timeout = timeout;
|
|
1336
|
+
return this;
|
|
1337
|
+
}
|
|
1338
|
+
/**
|
|
1339
|
+
* Configure graceful shutdown settings
|
|
1340
|
+
*/
|
|
1341
|
+
shutdown(shutdown) {
|
|
1342
|
+
this.config.shutdown = shutdown;
|
|
1343
|
+
return this;
|
|
1344
|
+
}
|
|
1345
|
+
/**
|
|
1346
|
+
* Configure health check endpoint
|
|
1347
|
+
*/
|
|
1348
|
+
healthCheck(healthCheck) {
|
|
1349
|
+
this.config.healthCheck = healthCheck;
|
|
1350
|
+
return this;
|
|
1351
|
+
}
|
|
1352
|
+
/**
|
|
1353
|
+
* Configure infrastructure initialization
|
|
1354
|
+
*/
|
|
1355
|
+
infrastructure(infrastructure) {
|
|
1356
|
+
this.config.infrastructure = infrastructure;
|
|
1357
|
+
return this;
|
|
1358
|
+
}
|
|
1359
|
+
/**
|
|
1360
|
+
* Configure lifecycle hooks
|
|
1361
|
+
* Can be called multiple times - hooks will be executed in registration order
|
|
1362
|
+
*/
|
|
1363
|
+
lifecycle(lifecycle) {
|
|
1364
|
+
if (lifecycle) {
|
|
1365
|
+
this.lifecycles.push(lifecycle);
|
|
1366
|
+
}
|
|
1367
|
+
return this;
|
|
1368
|
+
}
|
|
1369
|
+
/**
|
|
1370
|
+
* Build and return the final configuration
|
|
1371
|
+
*/
|
|
1372
|
+
build() {
|
|
1373
|
+
if (this.lifecycles.length > 0) {
|
|
1374
|
+
serverLogger.info("Merging lifecycles", { count: this.lifecycles.length });
|
|
1375
|
+
this.config.lifecycle = this.mergeLifecycles();
|
|
1376
|
+
}
|
|
1377
|
+
return this.config;
|
|
1378
|
+
}
|
|
1379
|
+
mergeLifecycles() {
|
|
1380
|
+
return {
|
|
1381
|
+
beforeInfrastructure: createMergedHook(
|
|
1382
|
+
collectHooks(this.lifecycles, "beforeInfrastructure")
|
|
1383
|
+
),
|
|
1384
|
+
afterInfrastructure: createMergedHook(
|
|
1385
|
+
collectHooks(this.lifecycles, "afterInfrastructure")
|
|
1386
|
+
),
|
|
1387
|
+
beforeRoutes: createMergedHook(
|
|
1388
|
+
collectHooks(this.lifecycles, "beforeRoutes")
|
|
1389
|
+
),
|
|
1390
|
+
afterRoutes: createMergedHook(
|
|
1391
|
+
collectHooks(this.lifecycles, "afterRoutes")
|
|
1392
|
+
),
|
|
1393
|
+
afterStart: createMergedHook(
|
|
1394
|
+
collectHooks(this.lifecycles, "afterStart")
|
|
1395
|
+
),
|
|
1396
|
+
beforeShutdown: createMergedHook(
|
|
1397
|
+
collectHooks(this.lifecycles, "beforeShutdown")
|
|
1398
|
+
)
|
|
1399
|
+
};
|
|
1400
|
+
}
|
|
1401
|
+
};
|
|
1402
|
+
function defineServerConfig() {
|
|
1403
|
+
return new ServerConfigBuilder();
|
|
3665
1404
|
}
|
|
3666
1405
|
|
|
3667
|
-
export { createServer, startServer };
|
|
1406
|
+
export { createServer, defineServerConfig, loadEnvFiles, startServer };
|
|
3668
1407
|
//# sourceMappingURL=index.js.map
|
|
3669
1408
|
//# sourceMappingURL=index.js.map
|