@spfn/core 0.1.0-alpha.88 → 0.2.0-beta.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +298 -466
- package/dist/boss-DI1r4kTS.d.ts +244 -0
- package/dist/cache/index.d.ts +13 -33
- package/dist/cache/index.js +14 -703
- package/dist/cache/index.js.map +1 -1
- package/dist/codegen/index.d.ts +214 -17
- package/dist/codegen/index.js +231 -1420
- package/dist/codegen/index.js.map +1 -1
- package/dist/config/index.d.ts +1227 -0
- package/dist/config/index.js +273 -0
- package/dist/config/index.js.map +1 -0
- package/dist/db/index.d.ts +741 -59
- package/dist/db/index.js +1063 -1226
- package/dist/db/index.js.map +1 -1
- package/dist/env/index.d.ts +658 -308
- package/dist/env/index.js +503 -928
- package/dist/env/index.js.map +1 -1
- package/dist/env/loader.d.ts +87 -0
- package/dist/env/loader.js +70 -0
- package/dist/env/loader.js.map +1 -0
- package/dist/errors/index.d.ts +417 -29
- package/dist/errors/index.js +359 -98
- package/dist/errors/index.js.map +1 -1
- package/dist/event/index.d.ts +41 -0
- package/dist/event/index.js +131 -0
- package/dist/event/index.js.map +1 -0
- package/dist/event/sse/client.d.ts +82 -0
- package/dist/event/sse/client.js +115 -0
- package/dist/event/sse/client.js.map +1 -0
- package/dist/event/sse/index.d.ts +40 -0
- package/dist/event/sse/index.js +92 -0
- package/dist/event/sse/index.js.map +1 -0
- package/dist/job/index.d.ts +218 -0
- package/dist/job/index.js +410 -0
- package/dist/job/index.js.map +1 -0
- package/dist/logger/index.d.ts +20 -79
- package/dist/logger/index.js +82 -387
- package/dist/logger/index.js.map +1 -1
- package/dist/middleware/index.d.ts +102 -20
- package/dist/middleware/index.js +51 -705
- package/dist/middleware/index.js.map +1 -1
- package/dist/nextjs/index.d.ts +120 -0
- package/dist/nextjs/index.js +448 -0
- package/dist/nextjs/index.js.map +1 -0
- package/dist/{client/nextjs/index.d.ts → nextjs/server.d.ts} +335 -262
- package/dist/nextjs/server.js +637 -0
- package/dist/nextjs/server.js.map +1 -0
- package/dist/route/index.d.ts +879 -25
- package/dist/route/index.js +697 -1271
- package/dist/route/index.js.map +1 -1
- package/dist/route/types.d.ts +9 -0
- package/dist/route/types.js +3 -0
- package/dist/route/types.js.map +1 -0
- package/dist/router-Di7ENoah.d.ts +151 -0
- package/dist/server/index.d.ts +345 -64
- package/dist/server/index.js +1174 -3233
- package/dist/server/index.js.map +1 -1
- package/dist/types-B-e_f2dQ.d.ts +121 -0
- package/dist/types-BGl4QL1w.d.ts +77 -0
- package/dist/types-BOPTApC2.d.ts +245 -0
- package/docs/cache.md +133 -0
- package/docs/codegen.md +74 -0
- package/docs/database.md +346 -0
- package/docs/entity.md +539 -0
- package/docs/env.md +477 -0
- package/docs/errors.md +319 -0
- package/docs/event.md +116 -0
- package/docs/file-upload.md +717 -0
- package/docs/job.md +131 -0
- package/docs/logger.md +108 -0
- package/docs/middleware.md +337 -0
- package/docs/nextjs.md +241 -0
- package/docs/repository.md +496 -0
- package/docs/route.md +497 -0
- package/docs/server.md +307 -0
- package/package.json +68 -48
- package/dist/auto-loader-JFaZ9gON.d.ts +0 -80
- package/dist/client/index.d.ts +0 -358
- package/dist/client/index.js +0 -357
- package/dist/client/index.js.map +0 -1
- package/dist/client/nextjs/index.js +0 -371
- package/dist/client/nextjs/index.js.map +0 -1
- package/dist/codegen/generators/index.d.ts +0 -19
- package/dist/codegen/generators/index.js +0 -1404
- package/dist/codegen/generators/index.js.map +0 -1
- package/dist/database-errors-BNNmLTJE.d.ts +0 -86
- package/dist/events/index.d.ts +0 -183
- package/dist/events/index.js +0 -77
- package/dist/events/index.js.map +0 -1
- package/dist/index-DHiAqhKv.d.ts +0 -101
- package/dist/index.d.ts +0 -8
- package/dist/index.js +0 -3674
- package/dist/index.js.map +0 -1
- package/dist/types/index.d.ts +0 -121
- package/dist/types/index.js +0 -38
- package/dist/types/index.js.map +0 -1
- package/dist/types-BXibIEyj.d.ts +0 -60
package/dist/server/index.js
CHANGED
|
@@ -1,17 +1,17 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { join, dirname, relative, basename } from 'path';
|
|
1
|
+
import { env } from '@spfn/core/config';
|
|
3
2
|
import { config } from 'dotenv';
|
|
4
|
-
import
|
|
5
|
-
import {
|
|
6
|
-
import { timestamp, bigserial, pgSchema } from 'drizzle-orm/pg-core';
|
|
7
|
-
import { AsyncLocalStorage } from 'async_hooks';
|
|
8
|
-
import { randomUUID, randomBytes } from 'crypto';
|
|
9
|
-
import { createMiddleware } from 'hono/factory';
|
|
10
|
-
import { eq, and } from 'drizzle-orm';
|
|
3
|
+
import { existsSync } from 'fs';
|
|
4
|
+
import { resolve, join } from 'path';
|
|
11
5
|
import { Hono } from 'hono';
|
|
12
6
|
import { cors } from 'hono/cors';
|
|
13
|
-
import {
|
|
7
|
+
import { registerRoutes } from '@spfn/core/route';
|
|
8
|
+
import { ErrorHandler, RequestLogger } from '@spfn/core/middleware';
|
|
9
|
+
import { streamSSE } from 'hono/streaming';
|
|
10
|
+
import { logger } from '@spfn/core/logger';
|
|
11
|
+
import { initDatabase, getDatabase, closeDatabase } from '@spfn/core/db';
|
|
12
|
+
import { initCache, getCache, closeCache } from '@spfn/core/cache';
|
|
14
13
|
import { serve } from '@hono/node-server';
|
|
14
|
+
import PgBoss from 'pg-boss';
|
|
15
15
|
import { networkInterfaces } from 'os';
|
|
16
16
|
|
|
17
17
|
var __defProp = Object.defineProperty;
|
|
@@ -24,46 +24,55 @@ var __export = (target, all) => {
|
|
|
24
24
|
__defProp(target, name, { get: all[name], enumerable: true });
|
|
25
25
|
};
|
|
26
26
|
|
|
27
|
-
// src/logger/types.ts
|
|
28
|
-
var LOG_LEVEL_PRIORITY;
|
|
29
|
-
var init_types = __esm({
|
|
30
|
-
"src/logger/types.ts"() {
|
|
31
|
-
LOG_LEVEL_PRIORITY = {
|
|
32
|
-
debug: 0,
|
|
33
|
-
info: 1,
|
|
34
|
-
warn: 2,
|
|
35
|
-
error: 3,
|
|
36
|
-
fatal: 4
|
|
37
|
-
};
|
|
38
|
-
}
|
|
39
|
-
});
|
|
40
|
-
|
|
41
27
|
// src/logger/formatters.ts
|
|
28
|
+
var formatters_exports = {};
|
|
29
|
+
__export(formatters_exports, {
|
|
30
|
+
colorizeLevel: () => colorizeLevel,
|
|
31
|
+
extractPromiseContext: () => extractPromiseContext,
|
|
32
|
+
extractQueryInfo: () => extractQueryInfo,
|
|
33
|
+
formatConsole: () => formatConsole,
|
|
34
|
+
formatContext: () => formatContext,
|
|
35
|
+
formatError: () => formatError,
|
|
36
|
+
formatJSON: () => formatJSON,
|
|
37
|
+
formatTimestamp: () => formatTimestamp,
|
|
38
|
+
formatTimestampHuman: () => formatTimestampHuman,
|
|
39
|
+
formatUnhandledRejection: () => formatUnhandledRejection,
|
|
40
|
+
maskSensitiveData: () => maskSensitiveData
|
|
41
|
+
});
|
|
42
42
|
function isSensitiveKey(key) {
|
|
43
43
|
const lowerKey = key.toLowerCase();
|
|
44
44
|
return SENSITIVE_KEYS.some((sensitive) => lowerKey.includes(sensitive));
|
|
45
45
|
}
|
|
46
|
-
function maskSensitiveData(data) {
|
|
46
|
+
function maskSensitiveData(data, seen = /* @__PURE__ */ new WeakSet()) {
|
|
47
47
|
if (data === null || data === void 0) {
|
|
48
48
|
return data;
|
|
49
49
|
}
|
|
50
|
-
if (
|
|
51
|
-
return data
|
|
50
|
+
if (typeof data !== "object") {
|
|
51
|
+
return data;
|
|
52
52
|
}
|
|
53
|
-
if (
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
53
|
+
if (seen.has(data)) {
|
|
54
|
+
return "[Circular]";
|
|
55
|
+
}
|
|
56
|
+
seen.add(data);
|
|
57
|
+
if (Array.isArray(data)) {
|
|
58
|
+
return data.map((item) => maskSensitiveData(item, seen));
|
|
59
|
+
}
|
|
60
|
+
const masked = {};
|
|
61
|
+
for (const [key, value] of Object.entries(data)) {
|
|
62
|
+
if (isSensitiveKey(key)) {
|
|
63
|
+
masked[key] = MASKED_VALUE;
|
|
64
|
+
} else if (typeof value === "object" && value !== null) {
|
|
65
|
+
masked[key] = maskSensitiveData(value, seen);
|
|
66
|
+
} else {
|
|
67
|
+
masked[key] = value;
|
|
63
68
|
}
|
|
64
|
-
return masked;
|
|
65
69
|
}
|
|
66
|
-
return
|
|
70
|
+
return masked;
|
|
71
|
+
}
|
|
72
|
+
function colorizeLevel(level) {
|
|
73
|
+
const color = COLORS[level];
|
|
74
|
+
const levelStr = level.toUpperCase().padEnd(5);
|
|
75
|
+
return `${color}${levelStr}${COLORS.reset}`;
|
|
67
76
|
}
|
|
68
77
|
function formatTimestamp(date) {
|
|
69
78
|
return date.toISOString();
|
|
@@ -87,13 +96,26 @@ function formatError(error) {
|
|
|
87
96
|
}
|
|
88
97
|
return lines.join("\n");
|
|
89
98
|
}
|
|
99
|
+
function formatContext(context) {
|
|
100
|
+
try {
|
|
101
|
+
return JSON.stringify(context, null, 2);
|
|
102
|
+
} catch (error) {
|
|
103
|
+
return "[Context serialization failed]";
|
|
104
|
+
}
|
|
105
|
+
}
|
|
90
106
|
function formatConsole(metadata, colorize = true) {
|
|
91
107
|
const parts = [];
|
|
92
|
-
const
|
|
108
|
+
const timestamp = formatTimestampHuman(metadata.timestamp);
|
|
109
|
+
if (colorize) {
|
|
110
|
+
parts.push(`${COLORS.gray}[${timestamp}]${COLORS.reset}`);
|
|
111
|
+
} else {
|
|
112
|
+
parts.push(`[${timestamp}]`);
|
|
113
|
+
}
|
|
114
|
+
const pid = process.pid;
|
|
93
115
|
if (colorize) {
|
|
94
|
-
parts.push(`${COLORS.
|
|
116
|
+
parts.push(`${COLORS.dim}[pid=${pid}]${COLORS.reset}`);
|
|
95
117
|
} else {
|
|
96
|
-
parts.push(`[
|
|
118
|
+
parts.push(`[pid=${pid}]`);
|
|
97
119
|
}
|
|
98
120
|
if (metadata.module) {
|
|
99
121
|
if (colorize) {
|
|
@@ -162,6 +184,84 @@ function formatJSON(metadata) {
|
|
|
162
184
|
}
|
|
163
185
|
return JSON.stringify(obj);
|
|
164
186
|
}
|
|
187
|
+
function extractQueryInfo(error) {
|
|
188
|
+
const message = error.message;
|
|
189
|
+
if (!message) return null;
|
|
190
|
+
const result = {};
|
|
191
|
+
const queryMatch = message.match(/(?:Failed query:|Query:)\s*([^\n]+)/);
|
|
192
|
+
if (queryMatch) {
|
|
193
|
+
result.query = queryMatch[1].trim();
|
|
194
|
+
const tableMatch = result.query.match(/(?:UPDATE|INSERT INTO|DELETE FROM|FROM)\s+"?([a-zA-Z_][a-zA-Z0-9_]*)"?\."?([a-zA-Z_][a-zA-Z0-9_]*)"?|(?:UPDATE|INSERT INTO|DELETE FROM|FROM)\s+"?([a-zA-Z_][a-zA-Z0-9_]*)"?/i);
|
|
195
|
+
if (tableMatch) {
|
|
196
|
+
result.table = tableMatch[2] || tableMatch[3] || tableMatch[1];
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
const paramsMatch = message.match(/params:\s*(.+?)(?:\n|$)/);
|
|
200
|
+
if (paramsMatch) {
|
|
201
|
+
const paramsStr = paramsMatch[1].trim();
|
|
202
|
+
try {
|
|
203
|
+
result.params = paramsStr.split(",").map((p) => p.trim());
|
|
204
|
+
} catch (e) {
|
|
205
|
+
result.params = paramsStr;
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
return Object.keys(result).length > 0 ? result : null;
|
|
209
|
+
}
|
|
210
|
+
function extractPromiseContext(error) {
|
|
211
|
+
const context = {};
|
|
212
|
+
if (!error.stack) return context;
|
|
213
|
+
const stackLines = error.stack.split("\n");
|
|
214
|
+
for (let i = 1; i < stackLines.length; i++) {
|
|
215
|
+
const line = stackLines[i].trim();
|
|
216
|
+
if (line.includes("node_modules") || line.includes("node:internal")) continue;
|
|
217
|
+
const match = line.match(/at\s+(?:([a-zA-Z_$][\w$]*(?:\.[a-zA-Z_$][\w$]*)*)\s+)?\(?([^)]+):(\d+):(\d+)\)?/);
|
|
218
|
+
if (match) {
|
|
219
|
+
const [, functionName, filePath, lineNumber, columnNumber] = match;
|
|
220
|
+
const fileNameMatch = filePath.match(/([^/\\]+)$/);
|
|
221
|
+
const fileName = fileNameMatch ? fileNameMatch[1] : filePath;
|
|
222
|
+
context.file = fileName;
|
|
223
|
+
context.line = parseInt(lineNumber, 10);
|
|
224
|
+
context.column = parseInt(columnNumber, 10);
|
|
225
|
+
if (functionName) {
|
|
226
|
+
const methodMatch = functionName.match(/^(.+)\.([^.]+)$/);
|
|
227
|
+
if (methodMatch) {
|
|
228
|
+
const [, className, methodName] = methodMatch;
|
|
229
|
+
context.class = className;
|
|
230
|
+
context.method = methodName;
|
|
231
|
+
if (className.includes("Repository")) {
|
|
232
|
+
context.repository = className;
|
|
233
|
+
}
|
|
234
|
+
} else {
|
|
235
|
+
context.function = functionName;
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
break;
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
return context;
|
|
242
|
+
}
|
|
243
|
+
function formatUnhandledRejection(reason, promise) {
|
|
244
|
+
let error;
|
|
245
|
+
if (reason instanceof Error) {
|
|
246
|
+
error = reason;
|
|
247
|
+
} else if (typeof reason === "string") {
|
|
248
|
+
error = new Error(reason);
|
|
249
|
+
} else {
|
|
250
|
+
error = new Error(JSON.stringify(reason));
|
|
251
|
+
}
|
|
252
|
+
const context = {
|
|
253
|
+
promise: String(promise)
|
|
254
|
+
};
|
|
255
|
+
const promiseContext = extractPromiseContext(error);
|
|
256
|
+
if (Object.keys(promiseContext).length > 0) {
|
|
257
|
+
context.promiseContext = promiseContext;
|
|
258
|
+
}
|
|
259
|
+
const queryInfo = extractQueryInfo(error);
|
|
260
|
+
if (queryInfo) {
|
|
261
|
+
context.queryInfo = queryInfo;
|
|
262
|
+
}
|
|
263
|
+
return { error, context };
|
|
264
|
+
}
|
|
165
265
|
var SENSITIVE_KEYS, MASKED_VALUE, COLORS;
|
|
166
266
|
var init_formatters = __esm({
|
|
167
267
|
"src/logger/formatters.ts"() {
|
|
@@ -214,3044 +314,252 @@ var init_formatters = __esm({
|
|
|
214
314
|
};
|
|
215
315
|
}
|
|
216
316
|
});
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
this.log("error", message, errorOrContext, context);
|
|
268
|
-
} else {
|
|
269
|
-
this.log("error", message, void 0, errorOrContext);
|
|
270
|
-
}
|
|
271
|
-
}
|
|
272
|
-
fatal(message, errorOrContext, context) {
|
|
273
|
-
if (errorOrContext instanceof Error) {
|
|
274
|
-
this.log("fatal", message, errorOrContext, context);
|
|
275
|
-
} else {
|
|
276
|
-
this.log("fatal", message, void 0, errorOrContext);
|
|
277
|
-
}
|
|
278
|
-
}
|
|
279
|
-
/**
|
|
280
|
-
* Log processing (internal)
|
|
281
|
-
*/
|
|
282
|
-
log(level, message, error, context) {
|
|
283
|
-
if (LOG_LEVEL_PRIORITY[level] < LOG_LEVEL_PRIORITY[this.config.level]) {
|
|
284
|
-
return;
|
|
285
|
-
}
|
|
286
|
-
const metadata = {
|
|
287
|
-
timestamp: /* @__PURE__ */ new Date(),
|
|
288
|
-
level,
|
|
289
|
-
message,
|
|
290
|
-
module: this.module,
|
|
291
|
-
error,
|
|
292
|
-
// Mask sensitive information in context to prevent credential leaks
|
|
293
|
-
context: context ? maskSensitiveData(context) : void 0
|
|
294
|
-
};
|
|
295
|
-
this.processTransports(metadata);
|
|
296
|
-
}
|
|
297
|
-
/**
|
|
298
|
-
* Process Transports
|
|
299
|
-
*/
|
|
300
|
-
processTransports(metadata) {
|
|
301
|
-
const promises = this.config.transports.filter((transport) => transport.enabled).map((transport) => this.safeTransportLog(transport, metadata));
|
|
302
|
-
Promise.all(promises).catch((error) => {
|
|
303
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
304
|
-
process.stderr.write(`[Logger] Transport error: ${errorMessage}
|
|
305
|
-
`);
|
|
306
|
-
});
|
|
307
|
-
}
|
|
308
|
-
/**
|
|
309
|
-
* Transport log (error-safe)
|
|
310
|
-
*/
|
|
311
|
-
async safeTransportLog(transport, metadata) {
|
|
312
|
-
try {
|
|
313
|
-
await transport.log(metadata);
|
|
314
|
-
} catch (error) {
|
|
315
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
316
|
-
process.stderr.write(`[Logger] Transport "${transport.name}" failed: ${errorMessage}
|
|
317
|
-
`);
|
|
318
|
-
}
|
|
319
|
-
}
|
|
320
|
-
/**
|
|
321
|
-
* Close all Transports
|
|
322
|
-
*/
|
|
323
|
-
async close() {
|
|
324
|
-
const closePromises = this.config.transports.filter((transport) => transport.close).map((transport) => transport.close());
|
|
325
|
-
await Promise.all(closePromises);
|
|
326
|
-
}
|
|
327
|
-
};
|
|
328
|
-
}
|
|
329
|
-
});
|
|
330
|
-
|
|
331
|
-
// src/logger/transports/console.ts
|
|
332
|
-
var ConsoleTransport;
|
|
333
|
-
var init_console = __esm({
|
|
334
|
-
"src/logger/transports/console.ts"() {
|
|
335
|
-
init_types();
|
|
336
|
-
init_formatters();
|
|
337
|
-
ConsoleTransport = class {
|
|
338
|
-
name = "console";
|
|
339
|
-
level;
|
|
340
|
-
enabled;
|
|
341
|
-
colorize;
|
|
342
|
-
constructor(config) {
|
|
343
|
-
this.level = config.level;
|
|
344
|
-
this.enabled = config.enabled;
|
|
345
|
-
this.colorize = config.colorize ?? true;
|
|
346
|
-
}
|
|
347
|
-
async log(metadata) {
|
|
348
|
-
if (!this.enabled) {
|
|
349
|
-
return;
|
|
350
|
-
}
|
|
351
|
-
if (LOG_LEVEL_PRIORITY[metadata.level] < LOG_LEVEL_PRIORITY[this.level]) {
|
|
352
|
-
return;
|
|
353
|
-
}
|
|
354
|
-
const message = formatConsole(metadata, this.colorize);
|
|
355
|
-
if (metadata.level === "warn" || metadata.level === "error" || metadata.level === "fatal") {
|
|
356
|
-
console.error(message);
|
|
357
|
-
} else {
|
|
358
|
-
console.log(message);
|
|
359
|
-
}
|
|
360
|
-
}
|
|
361
|
-
};
|
|
362
|
-
}
|
|
363
|
-
});
|
|
364
|
-
var FileTransport;
|
|
365
|
-
var init_file = __esm({
|
|
366
|
-
"src/logger/transports/file.ts"() {
|
|
367
|
-
init_types();
|
|
368
|
-
init_formatters();
|
|
369
|
-
FileTransport = class {
|
|
370
|
-
name = "file";
|
|
371
|
-
level;
|
|
372
|
-
enabled;
|
|
373
|
-
logDir;
|
|
374
|
-
maxFileSize;
|
|
375
|
-
maxFiles;
|
|
376
|
-
currentStream = null;
|
|
377
|
-
currentFilename = null;
|
|
378
|
-
constructor(config) {
|
|
379
|
-
this.level = config.level;
|
|
380
|
-
this.enabled = config.enabled;
|
|
381
|
-
this.logDir = config.logDir;
|
|
382
|
-
this.maxFileSize = config.maxFileSize ?? 10 * 1024 * 1024;
|
|
383
|
-
this.maxFiles = config.maxFiles ?? 10;
|
|
384
|
-
if (!existsSync(this.logDir)) {
|
|
385
|
-
mkdirSync(this.logDir, { recursive: true });
|
|
386
|
-
}
|
|
387
|
-
}
|
|
388
|
-
async log(metadata) {
|
|
389
|
-
if (!this.enabled) {
|
|
390
|
-
return;
|
|
391
|
-
}
|
|
392
|
-
if (LOG_LEVEL_PRIORITY[metadata.level] < LOG_LEVEL_PRIORITY[this.level]) {
|
|
393
|
-
return;
|
|
394
|
-
}
|
|
395
|
-
const message = formatJSON(metadata);
|
|
396
|
-
const filename = this.getLogFilename(metadata.timestamp);
|
|
397
|
-
if (this.currentFilename !== filename) {
|
|
398
|
-
await this.rotateStream(filename);
|
|
399
|
-
await this.cleanOldFiles();
|
|
400
|
-
} else if (this.currentFilename) {
|
|
401
|
-
await this.checkAndRotateBySize();
|
|
317
|
+
function loadEnvFiles() {
|
|
318
|
+
const cwd = process.cwd();
|
|
319
|
+
const nodeEnv = process.env.NODE_ENV || "development";
|
|
320
|
+
const envFiles = [
|
|
321
|
+
".env.server.local",
|
|
322
|
+
".env.server",
|
|
323
|
+
`.env.${nodeEnv}.local`,
|
|
324
|
+
nodeEnv !== "test" ? ".env.local" : null,
|
|
325
|
+
`.env.${nodeEnv}`,
|
|
326
|
+
".env"
|
|
327
|
+
].filter((file) => file !== null);
|
|
328
|
+
for (const file of envFiles) {
|
|
329
|
+
const filePath = resolve(cwd, file);
|
|
330
|
+
if (existsSync(filePath)) {
|
|
331
|
+
config({ path: filePath });
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
var sseLogger = logger.child("@spfn/core:sse");
|
|
336
|
+
function createSSEHandler(router, config2 = {}) {
|
|
337
|
+
const {
|
|
338
|
+
pingInterval = 3e4
|
|
339
|
+
// headers: customHeaders = {}, // Reserved for future use
|
|
340
|
+
} = config2;
|
|
341
|
+
return async (c) => {
|
|
342
|
+
const eventsParam = c.req.query("events");
|
|
343
|
+
if (!eventsParam) {
|
|
344
|
+
return c.json({ error: "Missing events parameter" }, 400);
|
|
345
|
+
}
|
|
346
|
+
const requestedEvents = eventsParam.split(",").map((e) => e.trim());
|
|
347
|
+
const validEventNames = router.eventNames;
|
|
348
|
+
const invalidEvents = requestedEvents.filter((e) => !validEventNames.includes(e));
|
|
349
|
+
if (invalidEvents.length > 0) {
|
|
350
|
+
return c.json({
|
|
351
|
+
error: "Invalid event names",
|
|
352
|
+
invalidEvents,
|
|
353
|
+
validEvents: validEventNames
|
|
354
|
+
}, 400);
|
|
355
|
+
}
|
|
356
|
+
sseLogger.debug("SSE connection requested", {
|
|
357
|
+
events: requestedEvents,
|
|
358
|
+
clientIp: c.req.header("x-forwarded-for") || c.req.header("x-real-ip")
|
|
359
|
+
});
|
|
360
|
+
return streamSSE(c, async (stream) => {
|
|
361
|
+
const unsubscribes = [];
|
|
362
|
+
let messageId = 0;
|
|
363
|
+
for (const eventName of requestedEvents) {
|
|
364
|
+
const eventDef = router.events[eventName];
|
|
365
|
+
if (!eventDef) {
|
|
366
|
+
continue;
|
|
402
367
|
}
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
}
|
|
413
|
-
});
|
|
368
|
+
const unsubscribe = eventDef.subscribe((payload) => {
|
|
369
|
+
messageId++;
|
|
370
|
+
const message = {
|
|
371
|
+
event: eventName,
|
|
372
|
+
data: payload
|
|
373
|
+
};
|
|
374
|
+
sseLogger.debug("SSE sending event", {
|
|
375
|
+
event: eventName,
|
|
376
|
+
messageId
|
|
414
377
|
});
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
*/
|
|
420
|
-
async rotateStream(filename) {
|
|
421
|
-
if (this.currentStream) {
|
|
422
|
-
await this.closeStream();
|
|
423
|
-
}
|
|
424
|
-
const filepath = join(this.logDir, filename);
|
|
425
|
-
this.currentStream = createWriteStream(filepath, {
|
|
426
|
-
flags: "a",
|
|
427
|
-
// append mode
|
|
428
|
-
encoding: "utf-8"
|
|
429
|
-
});
|
|
430
|
-
this.currentFilename = filename;
|
|
431
|
-
this.currentStream.on("error", (error) => {
|
|
432
|
-
process.stderr.write(`[FileTransport] Stream error: ${error.message}
|
|
433
|
-
`);
|
|
434
|
-
this.currentStream = null;
|
|
435
|
-
this.currentFilename = null;
|
|
436
|
-
});
|
|
437
|
-
}
|
|
438
|
-
/**
|
|
439
|
-
* 현재 스트림 닫기
|
|
440
|
-
*/
|
|
441
|
-
async closeStream() {
|
|
442
|
-
if (!this.currentStream) {
|
|
443
|
-
return;
|
|
444
|
-
}
|
|
445
|
-
return new Promise((resolve, reject) => {
|
|
446
|
-
this.currentStream.end((error) => {
|
|
447
|
-
if (error) {
|
|
448
|
-
reject(error);
|
|
449
|
-
} else {
|
|
450
|
-
this.currentStream = null;
|
|
451
|
-
this.currentFilename = null;
|
|
452
|
-
resolve();
|
|
453
|
-
}
|
|
378
|
+
void stream.writeSSE({
|
|
379
|
+
id: String(messageId),
|
|
380
|
+
event: eventName,
|
|
381
|
+
data: JSON.stringify(message)
|
|
454
382
|
});
|
|
455
383
|
});
|
|
384
|
+
unsubscribes.push(unsubscribe);
|
|
456
385
|
}
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
386
|
+
sseLogger.info("SSE connection established", {
|
|
387
|
+
events: requestedEvents,
|
|
388
|
+
subscriptionCount: unsubscribes.length
|
|
389
|
+
});
|
|
390
|
+
await stream.writeSSE({
|
|
391
|
+
event: "connected",
|
|
392
|
+
data: JSON.stringify({
|
|
393
|
+
subscribedEvents: requestedEvents,
|
|
394
|
+
timestamp: Date.now()
|
|
395
|
+
})
|
|
396
|
+
});
|
|
397
|
+
const pingTimer = setInterval(() => {
|
|
398
|
+
void stream.writeSSE({
|
|
399
|
+
event: "ping",
|
|
400
|
+
data: JSON.stringify({ timestamp: Date.now() })
|
|
401
|
+
});
|
|
402
|
+
}, pingInterval);
|
|
403
|
+
const abortSignal = c.req.raw.signal;
|
|
404
|
+
while (!abortSignal.aborted) {
|
|
405
|
+
await stream.sleep(pingInterval);
|
|
406
|
+
}
|
|
407
|
+
clearInterval(pingTimer);
|
|
408
|
+
unsubscribes.forEach((fn) => fn());
|
|
409
|
+
sseLogger.info("SSE connection closed", {
|
|
410
|
+
events: requestedEvents
|
|
411
|
+
});
|
|
412
|
+
}, async (err) => {
|
|
413
|
+
sseLogger.error("SSE stream error", {
|
|
414
|
+
error: err.message
|
|
415
|
+
});
|
|
416
|
+
});
|
|
417
|
+
};
|
|
418
|
+
}
|
|
419
|
+
function createHealthCheckHandler(detailed) {
|
|
420
|
+
return async (c) => {
|
|
421
|
+
const response = {
|
|
422
|
+
status: "ok",
|
|
423
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
424
|
+
};
|
|
425
|
+
if (detailed) {
|
|
426
|
+
let dbStatus = "unknown";
|
|
427
|
+
let dbError;
|
|
428
|
+
try {
|
|
429
|
+
const db = getDatabase();
|
|
468
430
|
try {
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
await this.rotateBySize();
|
|
472
|
-
}
|
|
431
|
+
await db.execute("SELECT 1");
|
|
432
|
+
dbStatus = "connected";
|
|
473
433
|
} catch (error) {
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
`);
|
|
434
|
+
dbStatus = "error";
|
|
435
|
+
dbError = error instanceof Error ? error.message : String(error);
|
|
477
436
|
}
|
|
437
|
+
} catch (error) {
|
|
438
|
+
dbStatus = "not_initialized";
|
|
439
|
+
dbError = "Database not available";
|
|
478
440
|
}
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
async rotateBySize() {
|
|
484
|
-
if (!this.currentFilename) {
|
|
485
|
-
return;
|
|
486
|
-
}
|
|
487
|
-
await this.closeStream();
|
|
488
|
-
const baseName = this.currentFilename.replace(/\.log$/, "");
|
|
489
|
-
const files = readdirSync(this.logDir);
|
|
490
|
-
const relatedFiles = files.filter((file) => file.startsWith(baseName) && file.endsWith(".log")).sort().reverse();
|
|
491
|
-
for (const file of relatedFiles) {
|
|
492
|
-
const match = file.match(/\.(\d+)\.log$/);
|
|
493
|
-
if (match) {
|
|
494
|
-
const oldNum = parseInt(match[1], 10);
|
|
495
|
-
const newNum = oldNum + 1;
|
|
496
|
-
const oldPath = join(this.logDir, file);
|
|
497
|
-
const newPath2 = join(this.logDir, `${baseName}.${newNum}.log`);
|
|
498
|
-
try {
|
|
499
|
-
renameSync(oldPath, newPath2);
|
|
500
|
-
} catch (error) {
|
|
501
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
502
|
-
process.stderr.write(`[FileTransport] Failed to rotate file: ${errorMessage}
|
|
503
|
-
`);
|
|
504
|
-
}
|
|
505
|
-
}
|
|
506
|
-
}
|
|
507
|
-
const currentPath = join(this.logDir, this.currentFilename);
|
|
508
|
-
const newPath = join(this.logDir, `${baseName}.1.log`);
|
|
441
|
+
const redis = getCache();
|
|
442
|
+
let redisStatus = redis ? "unknown" : "not_initialized";
|
|
443
|
+
let redisError;
|
|
444
|
+
if (redis) {
|
|
509
445
|
try {
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
}
|
|
446
|
+
await redis.ping();
|
|
447
|
+
redisStatus = "connected";
|
|
513
448
|
} catch (error) {
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
`);
|
|
449
|
+
redisStatus = "error";
|
|
450
|
+
redisError = error instanceof Error ? error.message : String(error);
|
|
517
451
|
}
|
|
518
|
-
await this.rotateStream(this.currentFilename);
|
|
519
452
|
}
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
}
|
|
529
|
-
const files = readdirSync(this.logDir);
|
|
530
|
-
const logFiles = files.filter((file) => file.endsWith(".log")).map((file) => {
|
|
531
|
-
const filepath = join(this.logDir, file);
|
|
532
|
-
const stats = statSync(filepath);
|
|
533
|
-
return { file, mtime: stats.mtime };
|
|
534
|
-
}).sort((a, b) => b.mtime.getTime() - a.mtime.getTime());
|
|
535
|
-
if (logFiles.length > this.maxFiles) {
|
|
536
|
-
const filesToDelete = logFiles.slice(this.maxFiles);
|
|
537
|
-
for (const { file } of filesToDelete) {
|
|
538
|
-
const filepath = join(this.logDir, file);
|
|
539
|
-
try {
|
|
540
|
-
unlinkSync(filepath);
|
|
541
|
-
} catch (error) {
|
|
542
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
543
|
-
process.stderr.write(`[FileTransport] Failed to delete old file "${file}": ${errorMessage}
|
|
544
|
-
`);
|
|
545
|
-
}
|
|
546
|
-
}
|
|
547
|
-
}
|
|
548
|
-
} catch (error) {
|
|
549
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
550
|
-
process.stderr.write(`[FileTransport] Failed to clean old files: ${errorMessage}
|
|
551
|
-
`);
|
|
453
|
+
response.services = {
|
|
454
|
+
database: {
|
|
455
|
+
status: dbStatus,
|
|
456
|
+
...dbError && { error: dbError }
|
|
457
|
+
},
|
|
458
|
+
redis: {
|
|
459
|
+
status: redisStatus,
|
|
460
|
+
...redisError && { error: redisError }
|
|
552
461
|
}
|
|
553
|
-
}
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
const day = String(date.getDate()).padStart(2, "0");
|
|
561
|
-
return `${year}-${month}-${day}.log`;
|
|
562
|
-
}
|
|
563
|
-
async close() {
|
|
564
|
-
await this.closeStream();
|
|
565
|
-
}
|
|
566
|
-
};
|
|
567
|
-
}
|
|
568
|
-
});
|
|
569
|
-
function isFileLoggingEnabled() {
|
|
570
|
-
return process.env.LOGGER_FILE_ENABLED === "true";
|
|
462
|
+
};
|
|
463
|
+
const hasErrors = dbStatus === "error" || dbStatus === "not_initialized" || redisStatus === "error";
|
|
464
|
+
response.status = hasErrors ? "degraded" : "ok";
|
|
465
|
+
}
|
|
466
|
+
const statusCode = response.status === "ok" ? 200 : 503;
|
|
467
|
+
return c.json(response, statusCode);
|
|
468
|
+
};
|
|
571
469
|
}
|
|
572
|
-
function
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
}
|
|
578
|
-
if (isProduction) {
|
|
579
|
-
return "info";
|
|
470
|
+
function applyServerTimeouts(server, timeouts) {
|
|
471
|
+
if ("timeout" in server) {
|
|
472
|
+
server.timeout = timeouts.request;
|
|
473
|
+
server.keepAliveTimeout = timeouts.keepAlive;
|
|
474
|
+
server.headersTimeout = timeouts.headers;
|
|
580
475
|
}
|
|
581
|
-
return "warn";
|
|
582
|
-
}
|
|
583
|
-
function getConsoleConfig() {
|
|
584
|
-
const isProduction = process.env.NODE_ENV === "production";
|
|
585
|
-
return {
|
|
586
|
-
level: "debug",
|
|
587
|
-
enabled: true,
|
|
588
|
-
colorize: !isProduction
|
|
589
|
-
// Dev: colored output, Production: plain text
|
|
590
|
-
};
|
|
591
476
|
}
|
|
592
|
-
function
|
|
593
|
-
const isProduction = process.env.NODE_ENV === "production";
|
|
477
|
+
function getTimeoutConfig(config2) {
|
|
594
478
|
return {
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
logDir: process.env.LOG_DIR || "./logs",
|
|
599
|
-
maxFileSize: 10 * 1024 * 1024,
|
|
600
|
-
// 10MB
|
|
601
|
-
maxFiles: 10
|
|
479
|
+
request: config2?.request ?? env.SERVER_TIMEOUT,
|
|
480
|
+
keepAlive: config2?.keepAlive ?? env.SERVER_KEEPALIVE_TIMEOUT,
|
|
481
|
+
headers: config2?.headers ?? env.SERVER_HEADERS_TIMEOUT
|
|
602
482
|
};
|
|
603
483
|
}
|
|
604
|
-
function
|
|
605
|
-
|
|
606
|
-
try {
|
|
607
|
-
mkdirSync(dirPath, { recursive: true });
|
|
608
|
-
} catch (error) {
|
|
609
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
610
|
-
throw new Error(`Failed to create log directory "${dirPath}": ${errorMessage}`);
|
|
611
|
-
}
|
|
612
|
-
}
|
|
613
|
-
try {
|
|
614
|
-
accessSync(dirPath, constants.W_OK);
|
|
615
|
-
} catch {
|
|
616
|
-
throw new Error(`Log directory "${dirPath}" is not writable. Please check permissions.`);
|
|
617
|
-
}
|
|
618
|
-
const testFile = join(dirPath, ".logger-write-test");
|
|
619
|
-
try {
|
|
620
|
-
writeFileSync(testFile, "test", "utf-8");
|
|
621
|
-
unlinkSync(testFile);
|
|
622
|
-
} catch (error) {
|
|
623
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
624
|
-
throw new Error(`Cannot write to log directory "${dirPath}": ${errorMessage}`);
|
|
625
|
-
}
|
|
626
|
-
}
|
|
627
|
-
function validateFileConfig() {
|
|
628
|
-
if (!isFileLoggingEnabled()) {
|
|
629
|
-
return;
|
|
630
|
-
}
|
|
631
|
-
const logDir = process.env.LOG_DIR;
|
|
632
|
-
if (!logDir) {
|
|
633
|
-
throw new Error(
|
|
634
|
-
"LOG_DIR environment variable is required when LOGGER_FILE_ENABLED=true. Example: LOG_DIR=/var/log/myapp"
|
|
635
|
-
);
|
|
636
|
-
}
|
|
637
|
-
validateDirectoryWritable(logDir);
|
|
638
|
-
}
|
|
639
|
-
function validateSlackConfig() {
|
|
640
|
-
const webhookUrl = process.env.SLACK_WEBHOOK_URL;
|
|
641
|
-
if (!webhookUrl) {
|
|
642
|
-
return;
|
|
643
|
-
}
|
|
644
|
-
if (!webhookUrl.startsWith("https://hooks.slack.com/")) {
|
|
645
|
-
throw new Error(
|
|
646
|
-
`Invalid SLACK_WEBHOOK_URL: "${webhookUrl}". Slack webhook URLs must start with "https://hooks.slack.com/"`
|
|
647
|
-
);
|
|
648
|
-
}
|
|
649
|
-
}
|
|
650
|
-
function validateEmailConfig() {
|
|
651
|
-
const smtpHost = process.env.SMTP_HOST;
|
|
652
|
-
const smtpPort = process.env.SMTP_PORT;
|
|
653
|
-
const emailFrom = process.env.EMAIL_FROM;
|
|
654
|
-
const emailTo = process.env.EMAIL_TO;
|
|
655
|
-
const hasAnyEmailConfig = smtpHost || smtpPort || emailFrom || emailTo;
|
|
656
|
-
if (!hasAnyEmailConfig) {
|
|
657
|
-
return;
|
|
658
|
-
}
|
|
659
|
-
const missingFields = [];
|
|
660
|
-
if (!smtpHost) missingFields.push("SMTP_HOST");
|
|
661
|
-
if (!smtpPort) missingFields.push("SMTP_PORT");
|
|
662
|
-
if (!emailFrom) missingFields.push("EMAIL_FROM");
|
|
663
|
-
if (!emailTo) missingFields.push("EMAIL_TO");
|
|
664
|
-
if (missingFields.length > 0) {
|
|
665
|
-
throw new Error(
|
|
666
|
-
`Email transport configuration incomplete. Missing: ${missingFields.join(", ")}. Either set all required fields or remove all email configuration.`
|
|
667
|
-
);
|
|
668
|
-
}
|
|
669
|
-
const port = parseInt(smtpPort, 10);
|
|
670
|
-
if (isNaN(port) || port < 1 || port > 65535) {
|
|
671
|
-
throw new Error(
|
|
672
|
-
`Invalid SMTP_PORT: "${smtpPort}". Must be a number between 1 and 65535.`
|
|
673
|
-
);
|
|
674
|
-
}
|
|
675
|
-
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
|
|
676
|
-
if (!emailRegex.test(emailFrom)) {
|
|
677
|
-
throw new Error(`Invalid EMAIL_FROM format: "${emailFrom}"`);
|
|
678
|
-
}
|
|
679
|
-
const recipients = emailTo.split(",").map((e) => e.trim());
|
|
680
|
-
for (const email of recipients) {
|
|
681
|
-
if (!emailRegex.test(email)) {
|
|
682
|
-
throw new Error(`Invalid email address in EMAIL_TO: "${email}"`);
|
|
683
|
-
}
|
|
684
|
-
}
|
|
685
|
-
}
|
|
686
|
-
function validateEnvironment() {
|
|
687
|
-
const nodeEnv = process.env.NODE_ENV;
|
|
688
|
-
if (!nodeEnv) {
|
|
689
|
-
process.stderr.write(
|
|
690
|
-
"[Logger] Warning: NODE_ENV is not set. Defaulting to test environment.\n"
|
|
691
|
-
);
|
|
692
|
-
}
|
|
693
|
-
}
|
|
694
|
-
function validateConfig() {
|
|
695
|
-
try {
|
|
696
|
-
validateEnvironment();
|
|
697
|
-
validateFileConfig();
|
|
698
|
-
validateSlackConfig();
|
|
699
|
-
validateEmailConfig();
|
|
700
|
-
} catch (error) {
|
|
701
|
-
if (error instanceof Error) {
|
|
702
|
-
throw new Error(`[Logger] Configuration validation failed: ${error.message}`);
|
|
703
|
-
}
|
|
704
|
-
throw error;
|
|
705
|
-
}
|
|
706
|
-
}
|
|
707
|
-
var init_config = __esm({
|
|
708
|
-
"src/logger/config.ts"() {
|
|
709
|
-
}
|
|
710
|
-
});
|
|
711
|
-
|
|
712
|
-
// src/logger/factory.ts
|
|
713
|
-
function initializeTransports() {
|
|
714
|
-
const transports = [];
|
|
715
|
-
const consoleConfig = getConsoleConfig();
|
|
716
|
-
transports.push(new ConsoleTransport(consoleConfig));
|
|
717
|
-
const fileConfig = getFileConfig();
|
|
718
|
-
if (fileConfig.enabled) {
|
|
719
|
-
transports.push(new FileTransport(fileConfig));
|
|
720
|
-
}
|
|
721
|
-
return transports;
|
|
722
|
-
}
|
|
723
|
-
function initializeLogger() {
|
|
724
|
-
validateConfig();
|
|
725
|
-
return new Logger({
|
|
726
|
-
level: getDefaultLogLevel(),
|
|
727
|
-
transports: initializeTransports()
|
|
728
|
-
});
|
|
484
|
+
function getShutdownTimeout(config2) {
|
|
485
|
+
return config2?.timeout ?? env.SHUTDOWN_TIMEOUT;
|
|
729
486
|
}
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
}
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
init_factory();
|
|
745
|
-
init_logger();
|
|
746
|
-
}
|
|
747
|
-
});
|
|
748
|
-
|
|
749
|
-
// src/route/function-routes.ts
|
|
750
|
-
var function_routes_exports = {};
|
|
751
|
-
__export(function_routes_exports, {
|
|
752
|
-
discoverFunctionRoutes: () => discoverFunctionRoutes
|
|
753
|
-
});
|
|
754
|
-
function discoverFunctionRoutes(cwd = process.cwd()) {
|
|
755
|
-
const functions = [];
|
|
756
|
-
const nodeModulesPath = join(cwd, "node_modules");
|
|
757
|
-
try {
|
|
758
|
-
const projectPkgPath = join(cwd, "package.json");
|
|
759
|
-
const projectPkg = JSON.parse(readFileSync(projectPkgPath, "utf-8"));
|
|
760
|
-
const dependencies = {
|
|
761
|
-
...projectPkg.dependencies,
|
|
762
|
-
...projectPkg.devDependencies
|
|
763
|
-
};
|
|
764
|
-
for (const [packageName] of Object.entries(dependencies)) {
|
|
765
|
-
if (!packageName.startsWith("@spfn/") && !packageName.startsWith("spfn-")) {
|
|
766
|
-
continue;
|
|
767
|
-
}
|
|
768
|
-
try {
|
|
769
|
-
const pkgPath = join(nodeModulesPath, ...packageName.split("/"), "package.json");
|
|
770
|
-
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
|
|
771
|
-
if (pkg.spfn?.routes?.dir) {
|
|
772
|
-
const { dir } = pkg.spfn.routes;
|
|
773
|
-
const prefix = pkg.spfn.prefix;
|
|
774
|
-
const packagePath = dirname(pkgPath);
|
|
775
|
-
const routesDir = join(packagePath, dir);
|
|
776
|
-
functions.push({
|
|
777
|
-
packageName,
|
|
778
|
-
routesDir,
|
|
779
|
-
packagePath,
|
|
780
|
-
prefix
|
|
781
|
-
// Include prefix in function info
|
|
782
|
-
});
|
|
783
|
-
routeLogger.debug("Discovered function routes", {
|
|
784
|
-
package: packageName,
|
|
785
|
-
dir,
|
|
786
|
-
prefix: prefix || "(none)"
|
|
787
|
-
});
|
|
788
|
-
}
|
|
789
|
-
} catch (error) {
|
|
790
|
-
}
|
|
791
|
-
}
|
|
792
|
-
} catch (error) {
|
|
793
|
-
routeLogger.warn("Failed to discover function routes", {
|
|
794
|
-
error: error instanceof Error ? error.message : "Unknown error"
|
|
795
|
-
});
|
|
796
|
-
}
|
|
797
|
-
return functions;
|
|
487
|
+
function buildMiddlewareOrder(config2) {
|
|
488
|
+
const order = [];
|
|
489
|
+
const middlewareConfig = config2.middleware ?? {};
|
|
490
|
+
const enableLogger = middlewareConfig.logger !== false;
|
|
491
|
+
const enableCors = middlewareConfig.cors !== false;
|
|
492
|
+
const enableErrorHandler = middlewareConfig.errorHandler !== false;
|
|
493
|
+
if (enableLogger) order.push("RequestLogger");
|
|
494
|
+
if (enableCors) order.push("CORS");
|
|
495
|
+
config2.use?.forEach((_, i) => order.push(`Custom[${i}]`));
|
|
496
|
+
if (config2.beforeRoutes) order.push("beforeRoutes hook");
|
|
497
|
+
order.push("Routes");
|
|
498
|
+
if (config2.afterRoutes) order.push("afterRoutes hook");
|
|
499
|
+
if (enableErrorHandler) order.push("ErrorHandler");
|
|
500
|
+
return order;
|
|
798
501
|
}
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
}
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
name: this.name,
|
|
829
|
-
message: this.message,
|
|
830
|
-
statusCode: this.statusCode,
|
|
831
|
-
details: this.details,
|
|
832
|
-
timestamp: this.timestamp.toISOString()
|
|
833
|
-
};
|
|
834
|
-
}
|
|
835
|
-
};
|
|
836
|
-
ConnectionError = class extends DatabaseError {
|
|
837
|
-
constructor(message, details) {
|
|
838
|
-
super(message, 503, details);
|
|
839
|
-
this.name = "ConnectionError";
|
|
840
|
-
}
|
|
841
|
-
};
|
|
842
|
-
QueryError = class extends DatabaseError {
|
|
843
|
-
constructor(message, statusCode = 500, details) {
|
|
844
|
-
super(message, statusCode, details);
|
|
845
|
-
this.name = "QueryError";
|
|
846
|
-
}
|
|
847
|
-
};
|
|
848
|
-
ConstraintViolationError = class extends QueryError {
|
|
849
|
-
constructor(message, details) {
|
|
850
|
-
super(message, 400, details);
|
|
851
|
-
this.name = "ConstraintViolationError";
|
|
852
|
-
}
|
|
853
|
-
};
|
|
854
|
-
TransactionError = class extends DatabaseError {
|
|
855
|
-
constructor(message, statusCode = 500, details) {
|
|
856
|
-
super(message, statusCode, details);
|
|
857
|
-
this.name = "TransactionError";
|
|
858
|
-
}
|
|
859
|
-
};
|
|
860
|
-
DeadlockError = class extends TransactionError {
|
|
861
|
-
constructor(message, details) {
|
|
862
|
-
super(message, 409, details);
|
|
863
|
-
this.name = "DeadlockError";
|
|
864
|
-
}
|
|
865
|
-
};
|
|
866
|
-
DuplicateEntryError = class extends QueryError {
|
|
867
|
-
constructor(field, value) {
|
|
868
|
-
super(`${field} '${value}' already exists`, 409, { field, value });
|
|
869
|
-
this.name = "DuplicateEntryError";
|
|
870
|
-
}
|
|
871
|
-
};
|
|
872
|
-
}
|
|
873
|
-
});
|
|
874
|
-
|
|
875
|
-
// src/errors/http-errors.ts
|
|
876
|
-
var init_http_errors = __esm({
|
|
877
|
-
"src/errors/http-errors.ts"() {
|
|
878
|
-
}
|
|
879
|
-
});
|
|
880
|
-
|
|
881
|
-
// src/errors/error-utils.ts
|
|
882
|
-
var init_error_utils = __esm({
|
|
883
|
-
"src/errors/error-utils.ts"() {
|
|
884
|
-
init_database_errors();
|
|
885
|
-
init_http_errors();
|
|
886
|
-
}
|
|
887
|
-
});
|
|
888
|
-
|
|
889
|
-
// src/errors/index.ts
|
|
890
|
-
var init_errors = __esm({
|
|
891
|
-
"src/errors/index.ts"() {
|
|
892
|
-
init_database_errors();
|
|
893
|
-
init_http_errors();
|
|
894
|
-
init_error_utils();
|
|
895
|
-
}
|
|
896
|
-
});
|
|
897
|
-
|
|
898
|
-
// src/env/config.ts
|
|
899
|
-
var ENV_FILE_PRIORITY, TEST_ONLY_FILES;
|
|
900
|
-
var init_config2 = __esm({
|
|
901
|
-
"src/env/config.ts"() {
|
|
902
|
-
ENV_FILE_PRIORITY = [
|
|
903
|
-
".env",
|
|
904
|
-
// Base configuration (lowest priority)
|
|
905
|
-
".env.{NODE_ENV}",
|
|
906
|
-
// Environment-specific
|
|
907
|
-
".env.local",
|
|
908
|
-
// Local overrides (excluded in test)
|
|
909
|
-
".env.{NODE_ENV}.local"
|
|
910
|
-
// Local environment-specific (highest priority)
|
|
911
|
-
];
|
|
912
|
-
TEST_ONLY_FILES = [
|
|
913
|
-
".env.test",
|
|
914
|
-
".env.test.local"
|
|
915
|
-
];
|
|
916
|
-
}
|
|
917
|
-
});
|
|
918
|
-
function buildFileList(basePath, nodeEnv) {
|
|
919
|
-
const files = [];
|
|
920
|
-
if (!nodeEnv) {
|
|
921
|
-
files.push(join(basePath, ".env"));
|
|
922
|
-
files.push(join(basePath, ".env.local"));
|
|
923
|
-
return files;
|
|
924
|
-
}
|
|
925
|
-
for (const pattern of ENV_FILE_PRIORITY) {
|
|
926
|
-
const fileName = pattern.replace("{NODE_ENV}", nodeEnv);
|
|
927
|
-
if (nodeEnv === "test" && fileName === ".env.local") {
|
|
928
|
-
continue;
|
|
929
|
-
}
|
|
930
|
-
if (nodeEnv === "local" && pattern === ".env.local") {
|
|
931
|
-
continue;
|
|
932
|
-
}
|
|
933
|
-
if (nodeEnv !== "test" && TEST_ONLY_FILES.includes(fileName)) {
|
|
934
|
-
continue;
|
|
935
|
-
}
|
|
936
|
-
files.push(join(basePath, fileName));
|
|
937
|
-
}
|
|
938
|
-
return files;
|
|
939
|
-
}
|
|
940
|
-
function loadSingleFile(filePath, debug) {
|
|
941
|
-
if (!existsSync(filePath)) {
|
|
942
|
-
if (debug) {
|
|
943
|
-
envLogger.debug("Environment file not found (optional)", {
|
|
944
|
-
path: filePath
|
|
945
|
-
});
|
|
946
|
-
}
|
|
947
|
-
return { success: false, parsed: {}, error: "File not found" };
|
|
948
|
-
}
|
|
949
|
-
try {
|
|
950
|
-
const result = config({ path: filePath });
|
|
951
|
-
if (result.error) {
|
|
952
|
-
envLogger.warn("Failed to parse environment file", {
|
|
953
|
-
path: filePath,
|
|
954
|
-
error: result.error.message
|
|
955
|
-
});
|
|
956
|
-
return {
|
|
957
|
-
success: false,
|
|
958
|
-
parsed: {},
|
|
959
|
-
error: result.error.message
|
|
960
|
-
};
|
|
961
|
-
}
|
|
962
|
-
const parsed = result.parsed || {};
|
|
963
|
-
if (debug) {
|
|
964
|
-
envLogger.debug("Environment file loaded successfully", {
|
|
965
|
-
path: filePath,
|
|
966
|
-
variables: Object.keys(parsed),
|
|
967
|
-
count: Object.keys(parsed).length
|
|
968
|
-
});
|
|
969
|
-
}
|
|
970
|
-
return { success: true, parsed };
|
|
971
|
-
} catch (error) {
|
|
972
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
973
|
-
envLogger.error("Error loading environment file", {
|
|
974
|
-
path: filePath,
|
|
975
|
-
error: message
|
|
976
|
-
});
|
|
977
|
-
return { success: false, parsed: {}, error: message };
|
|
978
|
-
}
|
|
979
|
-
}
|
|
980
|
-
function validateRequiredVars(required, debug) {
|
|
981
|
-
const missing = [];
|
|
982
|
-
for (const varName of required) {
|
|
983
|
-
if (!process.env[varName]) {
|
|
984
|
-
missing.push(varName);
|
|
985
|
-
}
|
|
986
|
-
}
|
|
987
|
-
if (missing.length > 0) {
|
|
988
|
-
const error = `Required environment variables missing: ${missing.join(", ")}`;
|
|
989
|
-
envLogger.error("Environment validation failed", {
|
|
990
|
-
missing,
|
|
991
|
-
required
|
|
992
|
-
});
|
|
993
|
-
throw new Error(error);
|
|
994
|
-
}
|
|
995
|
-
if (debug) {
|
|
996
|
-
envLogger.debug("Required environment variables validated", {
|
|
997
|
-
required,
|
|
998
|
-
allPresent: true
|
|
999
|
-
});
|
|
1000
|
-
}
|
|
1001
|
-
}
|
|
1002
|
-
function loadEnvironment(options = {}) {
|
|
1003
|
-
const {
|
|
1004
|
-
basePath = process.cwd(),
|
|
1005
|
-
customPaths = [],
|
|
1006
|
-
debug = false,
|
|
1007
|
-
nodeEnv = process.env.NODE_ENV || "",
|
|
1008
|
-
required = [],
|
|
1009
|
-
useCache = true
|
|
1010
|
-
} = options;
|
|
1011
|
-
if (useCache && environmentLoaded && cachedLoadResult) {
|
|
1012
|
-
if (debug) {
|
|
1013
|
-
envLogger.debug("Returning cached environment", {
|
|
1014
|
-
loaded: cachedLoadResult.loaded.length,
|
|
1015
|
-
variables: Object.keys(cachedLoadResult.parsed).length
|
|
1016
|
-
});
|
|
1017
|
-
}
|
|
1018
|
-
return cachedLoadResult;
|
|
1019
|
-
}
|
|
1020
|
-
if (debug) {
|
|
1021
|
-
envLogger.debug("Loading environment variables", {
|
|
1022
|
-
basePath,
|
|
1023
|
-
nodeEnv,
|
|
1024
|
-
customPaths,
|
|
1025
|
-
required
|
|
1026
|
-
});
|
|
1027
|
-
}
|
|
1028
|
-
const result = {
|
|
1029
|
-
success: true,
|
|
1030
|
-
loaded: [],
|
|
1031
|
-
failed: [],
|
|
1032
|
-
parsed: {},
|
|
1033
|
-
warnings: []
|
|
1034
|
-
};
|
|
1035
|
-
const standardFiles = buildFileList(basePath, nodeEnv);
|
|
1036
|
-
const allFiles = [...standardFiles, ...customPaths];
|
|
1037
|
-
if (debug) {
|
|
1038
|
-
envLogger.debug("Environment files to load", {
|
|
1039
|
-
standardFiles,
|
|
1040
|
-
customPaths,
|
|
1041
|
-
total: allFiles.length
|
|
1042
|
-
});
|
|
1043
|
-
}
|
|
1044
|
-
const reversedFiles = [...allFiles].reverse();
|
|
1045
|
-
for (const filePath of reversedFiles) {
|
|
1046
|
-
const fileResult = loadSingleFile(filePath, debug);
|
|
1047
|
-
if (fileResult.success) {
|
|
1048
|
-
result.loaded.push(filePath);
|
|
1049
|
-
Object.assign(result.parsed, fileResult.parsed);
|
|
1050
|
-
if (fileResult.parsed["NODE_ENV"]) {
|
|
1051
|
-
const fileName = filePath.split("/").pop() || filePath;
|
|
1052
|
-
result.warnings.push(
|
|
1053
|
-
`NODE_ENV found in ${fileName}. It's recommended to set NODE_ENV via CLI (e.g., 'spfn dev', 'spfn build') instead of .env files for consistent environment behavior.`
|
|
1054
|
-
);
|
|
1055
|
-
}
|
|
1056
|
-
} else if (fileResult.error) {
|
|
1057
|
-
result.failed.push({
|
|
1058
|
-
path: filePath,
|
|
1059
|
-
reason: fileResult.error
|
|
1060
|
-
});
|
|
1061
|
-
}
|
|
1062
|
-
}
|
|
1063
|
-
if (debug || result.loaded.length > 0) {
|
|
1064
|
-
envLogger.info("Environment loading complete", {
|
|
1065
|
-
loaded: result.loaded.length,
|
|
1066
|
-
failed: result.failed.length,
|
|
1067
|
-
variables: Object.keys(result.parsed).length,
|
|
1068
|
-
files: result.loaded
|
|
1069
|
-
});
|
|
1070
|
-
}
|
|
1071
|
-
if (required.length > 0) {
|
|
1072
|
-
try {
|
|
1073
|
-
validateRequiredVars(required, debug);
|
|
1074
|
-
} catch (error) {
|
|
1075
|
-
result.success = false;
|
|
1076
|
-
result.errors = [
|
|
1077
|
-
error instanceof Error ? error.message : "Validation failed"
|
|
1078
|
-
];
|
|
1079
|
-
throw error;
|
|
1080
|
-
}
|
|
1081
|
-
}
|
|
1082
|
-
if (result.warnings.length > 0) {
|
|
1083
|
-
for (const warning of result.warnings) {
|
|
1084
|
-
envLogger.warn(warning);
|
|
1085
|
-
}
|
|
1086
|
-
}
|
|
1087
|
-
environmentLoaded = true;
|
|
1088
|
-
cachedLoadResult = result;
|
|
1089
|
-
return result;
|
|
1090
|
-
}
|
|
1091
|
-
var envLogger, environmentLoaded, cachedLoadResult;
|
|
1092
|
-
var init_loader = __esm({
|
|
1093
|
-
"src/env/loader.ts"() {
|
|
1094
|
-
init_logger2();
|
|
1095
|
-
init_config2();
|
|
1096
|
-
envLogger = logger.child("environment");
|
|
1097
|
-
environmentLoaded = false;
|
|
1098
|
-
}
|
|
1099
|
-
});
|
|
1100
|
-
|
|
1101
|
-
// src/env/validator.ts
|
|
1102
|
-
var init_validator = __esm({
|
|
1103
|
-
"src/env/validator.ts"() {
|
|
1104
|
-
}
|
|
1105
|
-
});
|
|
1106
|
-
|
|
1107
|
-
// src/env/index.ts
|
|
1108
|
-
var init_env = __esm({
|
|
1109
|
-
"src/env/index.ts"() {
|
|
1110
|
-
init_loader();
|
|
1111
|
-
init_config2();
|
|
1112
|
-
init_validator();
|
|
1113
|
-
}
|
|
1114
|
-
});
|
|
1115
|
-
|
|
1116
|
-
// src/db/postgres-errors.ts
|
|
1117
|
-
function parseUniqueViolation(message) {
|
|
1118
|
-
const patterns = [
|
|
1119
|
-
// Standard format: Key (field)=(value)
|
|
1120
|
-
/Key \(([^)]+)\)=\(([^)]+)\)/i,
|
|
1121
|
-
// With quotes: Key ("field")=('value')
|
|
1122
|
-
/Key \(["']?([^)"']+)["']?\)=\(["']?([^)"']+)["']?\)/i,
|
|
1123
|
-
// Alternative format
|
|
1124
|
-
/Key `([^`]+)`=`([^`]+)`/i
|
|
1125
|
-
];
|
|
1126
|
-
for (const pattern of patterns) {
|
|
1127
|
-
const match = message.match(pattern);
|
|
1128
|
-
if (match) {
|
|
1129
|
-
const field = match[1].trim().replace(/["'`]/g, "");
|
|
1130
|
-
const value = match[2].trim().replace(/["'`]/g, "");
|
|
1131
|
-
return { field, value };
|
|
1132
|
-
}
|
|
1133
|
-
}
|
|
1134
|
-
return null;
|
|
1135
|
-
}
|
|
1136
|
-
function fromPostgresError(error) {
|
|
1137
|
-
const code = error?.code;
|
|
1138
|
-
const message = error?.message || "Database error occurred";
|
|
1139
|
-
switch (code) {
|
|
1140
|
-
// Class 08 — Connection Exception
|
|
1141
|
-
case "08000":
|
|
1142
|
-
// connection_exception
|
|
1143
|
-
case "08001":
|
|
1144
|
-
// sqlclient_unable_to_establish_sqlconnection
|
|
1145
|
-
case "08003":
|
|
1146
|
-
// connection_does_not_exist
|
|
1147
|
-
case "08004":
|
|
1148
|
-
// sqlserver_rejected_establishment_of_sqlconnection
|
|
1149
|
-
case "08006":
|
|
1150
|
-
// connection_failure
|
|
1151
|
-
case "08007":
|
|
1152
|
-
// transaction_resolution_unknown
|
|
1153
|
-
case "08P01":
|
|
1154
|
-
return new ConnectionError(message, { code });
|
|
1155
|
-
// Class 23 — Integrity Constraint Violation
|
|
1156
|
-
case "23000":
|
|
1157
|
-
// integrity_constraint_violation
|
|
1158
|
-
case "23001":
|
|
1159
|
-
return new ConstraintViolationError(message, { code, constraint: "integrity" });
|
|
1160
|
-
case "23502":
|
|
1161
|
-
return new ConstraintViolationError(message, { code, constraint: "not_null" });
|
|
1162
|
-
case "23503":
|
|
1163
|
-
return new ConstraintViolationError(message, { code, constraint: "foreign_key" });
|
|
1164
|
-
case "23505":
|
|
1165
|
-
const parsed = parseUniqueViolation(message);
|
|
1166
|
-
if (parsed) {
|
|
1167
|
-
return new DuplicateEntryError(parsed.field, parsed.value);
|
|
1168
|
-
}
|
|
1169
|
-
return new DuplicateEntryError("field", "value");
|
|
1170
|
-
case "23514":
|
|
1171
|
-
return new ConstraintViolationError(message, { code, constraint: "check" });
|
|
1172
|
-
// Class 40 — Transaction Rollback
|
|
1173
|
-
case "40000":
|
|
1174
|
-
// transaction_rollback
|
|
1175
|
-
case "40001":
|
|
1176
|
-
// serialization_failure
|
|
1177
|
-
case "40002":
|
|
1178
|
-
// transaction_integrity_constraint_violation
|
|
1179
|
-
case "40003":
|
|
1180
|
-
return new TransactionError(message, 500, { code });
|
|
1181
|
-
case "40P01":
|
|
1182
|
-
return new DeadlockError(message, { code });
|
|
1183
|
-
// Class 42 — Syntax Error or Access Rule Violation
|
|
1184
|
-
case "42000":
|
|
1185
|
-
// syntax_error_or_access_rule_violation
|
|
1186
|
-
case "42601":
|
|
1187
|
-
// syntax_error
|
|
1188
|
-
case "42501":
|
|
1189
|
-
// insufficient_privilege
|
|
1190
|
-
case "42602":
|
|
1191
|
-
// invalid_name
|
|
1192
|
-
case "42622":
|
|
1193
|
-
// name_too_long
|
|
1194
|
-
case "42701":
|
|
1195
|
-
// duplicate_column
|
|
1196
|
-
case "42702":
|
|
1197
|
-
// ambiguous_column
|
|
1198
|
-
case "42703":
|
|
1199
|
-
// undefined_column
|
|
1200
|
-
case "42704":
|
|
1201
|
-
// undefined_object
|
|
1202
|
-
case "42P01":
|
|
1203
|
-
// undefined_table
|
|
1204
|
-
case "42P02":
|
|
1205
|
-
return new QueryError(message, 400, { code });
|
|
1206
|
-
// Class 53 — Insufficient Resources
|
|
1207
|
-
case "53000":
|
|
1208
|
-
// insufficient_resources
|
|
1209
|
-
case "53100":
|
|
1210
|
-
// disk_full
|
|
1211
|
-
case "53200":
|
|
1212
|
-
// out_of_memory
|
|
1213
|
-
case "53300":
|
|
1214
|
-
return new ConnectionError(message, { code });
|
|
1215
|
-
// Class 57 — Operator Intervention
|
|
1216
|
-
case "57000":
|
|
1217
|
-
// operator_intervention
|
|
1218
|
-
case "57014":
|
|
1219
|
-
// query_canceled
|
|
1220
|
-
case "57P01":
|
|
1221
|
-
// admin_shutdown
|
|
1222
|
-
case "57P02":
|
|
1223
|
-
// crash_shutdown
|
|
1224
|
-
case "57P03":
|
|
1225
|
-
return new ConnectionError(message, { code });
|
|
1226
|
-
// Default: Unknown error
|
|
1227
|
-
default:
|
|
1228
|
-
return new QueryError(message, 500, { code });
|
|
1229
|
-
}
|
|
1230
|
-
}
|
|
1231
|
-
var init_postgres_errors = __esm({
|
|
1232
|
-
"src/db/postgres-errors.ts"() {
|
|
1233
|
-
init_errors();
|
|
1234
|
-
}
|
|
1235
|
-
});
|
|
1236
|
-
function delay(ms) {
|
|
1237
|
-
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
1238
|
-
}
|
|
1239
|
-
async function createDatabaseConnection(connectionString, poolConfig, retryConfig) {
|
|
1240
|
-
let lastError;
|
|
1241
|
-
for (let attempt = 0; attempt <= retryConfig.maxRetries; attempt++) {
|
|
1242
|
-
try {
|
|
1243
|
-
const client = postgres(connectionString, {
|
|
1244
|
-
max: poolConfig.max,
|
|
1245
|
-
idle_timeout: poolConfig.idleTimeout
|
|
1246
|
-
});
|
|
1247
|
-
await client`SELECT 1 as test`;
|
|
1248
|
-
if (attempt > 0) {
|
|
1249
|
-
dbLogger.info(`Database connected successfully after ${attempt} retries`);
|
|
1250
|
-
} else {
|
|
1251
|
-
dbLogger.info("Database connected successfully");
|
|
1252
|
-
}
|
|
1253
|
-
return client;
|
|
1254
|
-
} catch (error) {
|
|
1255
|
-
lastError = fromPostgresError(error);
|
|
1256
|
-
if (attempt < retryConfig.maxRetries) {
|
|
1257
|
-
const delayMs = Math.min(
|
|
1258
|
-
retryConfig.initialDelay * Math.pow(retryConfig.factor, attempt),
|
|
1259
|
-
retryConfig.maxDelay
|
|
1260
|
-
);
|
|
1261
|
-
dbLogger.warn(
|
|
1262
|
-
`Connection failed (attempt ${attempt + 1}/${retryConfig.maxRetries + 1}), retrying in ${delayMs}ms...`,
|
|
1263
|
-
lastError,
|
|
1264
|
-
{
|
|
1265
|
-
attempt: attempt + 1,
|
|
1266
|
-
maxRetries: retryConfig.maxRetries + 1,
|
|
1267
|
-
delayMs
|
|
1268
|
-
}
|
|
1269
|
-
);
|
|
1270
|
-
await delay(delayMs);
|
|
1271
|
-
}
|
|
1272
|
-
}
|
|
1273
|
-
}
|
|
1274
|
-
const errorMessage = `Failed to connect to database after ${retryConfig.maxRetries + 1} attempts: ${lastError?.message || "Unknown error"}`;
|
|
1275
|
-
throw new ConnectionError(errorMessage);
|
|
1276
|
-
}
|
|
1277
|
-
async function checkConnection(client) {
|
|
1278
|
-
try {
|
|
1279
|
-
await client`SELECT 1 as health_check`;
|
|
1280
|
-
return true;
|
|
1281
|
-
} catch (error) {
|
|
1282
|
-
dbLogger.error("Database health check failed", error);
|
|
1283
|
-
return false;
|
|
1284
|
-
}
|
|
1285
|
-
}
|
|
1286
|
-
var dbLogger;
|
|
1287
|
-
var init_connection = __esm({
|
|
1288
|
-
"src/db/manager/connection.ts"() {
|
|
1289
|
-
init_logger2();
|
|
1290
|
-
init_errors();
|
|
1291
|
-
init_postgres_errors();
|
|
1292
|
-
dbLogger = logger.child("database");
|
|
1293
|
-
}
|
|
1294
|
-
});
|
|
1295
|
-
|
|
1296
|
-
// src/db/manager/config.ts
|
|
1297
|
-
function parseEnvNumber(key, prodDefault, devDefault) {
|
|
1298
|
-
const isProduction = process.env.NODE_ENV === "production";
|
|
1299
|
-
const envValue = parseInt(process.env[key] || "", 10);
|
|
1300
|
-
return isNaN(envValue) ? isProduction ? prodDefault : devDefault : envValue;
|
|
1301
|
-
}
|
|
1302
|
-
function parseEnvBoolean(key, defaultValue) {
|
|
1303
|
-
const value = process.env[key];
|
|
1304
|
-
if (value === void 0) return defaultValue;
|
|
1305
|
-
return value.toLowerCase() === "true";
|
|
1306
|
-
}
|
|
1307
|
-
function getPoolConfig(options) {
|
|
1308
|
-
return {
|
|
1309
|
-
max: options?.max ?? parseEnvNumber("DB_POOL_MAX", 20, 10),
|
|
1310
|
-
idleTimeout: options?.idleTimeout ?? parseEnvNumber("DB_POOL_IDLE_TIMEOUT", 30, 20)
|
|
1311
|
-
};
|
|
1312
|
-
}
|
|
1313
|
-
function getRetryConfig() {
|
|
1314
|
-
return {
|
|
1315
|
-
maxRetries: parseEnvNumber("DB_RETRY_MAX", 5, 3),
|
|
1316
|
-
initialDelay: parseEnvNumber("DB_RETRY_INITIAL_DELAY", 100, 50),
|
|
1317
|
-
maxDelay: parseEnvNumber("DB_RETRY_MAX_DELAY", 1e4, 5e3),
|
|
1318
|
-
factor: parseEnvNumber("DB_RETRY_FACTOR", 2, 2)
|
|
1319
|
-
};
|
|
1320
|
-
}
|
|
1321
|
-
function buildHealthCheckConfig(options) {
|
|
1322
|
-
return {
|
|
1323
|
-
enabled: options?.enabled ?? parseEnvBoolean("DB_HEALTH_CHECK_ENABLED", true),
|
|
1324
|
-
interval: options?.interval ?? parseEnvNumber("DB_HEALTH_CHECK_INTERVAL", 6e4, 6e4),
|
|
1325
|
-
reconnect: options?.reconnect ?? parseEnvBoolean("DB_HEALTH_CHECK_RECONNECT", true),
|
|
1326
|
-
maxRetries: options?.maxRetries ?? parseEnvNumber("DB_HEALTH_CHECK_MAX_RETRIES", 3, 3),
|
|
1327
|
-
retryInterval: options?.retryInterval ?? parseEnvNumber("DB_HEALTH_CHECK_RETRY_INTERVAL", 5e3, 5e3)
|
|
1328
|
-
};
|
|
1329
|
-
}
|
|
1330
|
-
function buildMonitoringConfig(options) {
|
|
1331
|
-
const isDevelopment = process.env.NODE_ENV !== "production";
|
|
1332
|
-
return {
|
|
1333
|
-
enabled: options?.enabled ?? parseEnvBoolean("DB_MONITORING_ENABLED", isDevelopment),
|
|
1334
|
-
slowThreshold: options?.slowThreshold ?? parseEnvNumber("DB_MONITORING_SLOW_THRESHOLD", 1e3, 1e3),
|
|
1335
|
-
logQueries: options?.logQueries ?? parseEnvBoolean("DB_MONITORING_LOG_QUERIES", false)
|
|
1336
|
-
};
|
|
1337
|
-
}
|
|
1338
|
-
var init_config3 = __esm({
|
|
1339
|
-
"src/db/manager/config.ts"() {
|
|
1340
|
-
}
|
|
1341
|
-
});
|
|
1342
|
-
function hasDatabaseConfig() {
|
|
1343
|
-
return !!(process.env.DATABASE_URL || process.env.DATABASE_WRITE_URL || process.env.DATABASE_READ_URL);
|
|
1344
|
-
}
|
|
1345
|
-
function detectDatabasePattern() {
|
|
1346
|
-
if (process.env.DATABASE_WRITE_URL && process.env.DATABASE_READ_URL) {
|
|
1347
|
-
return {
|
|
1348
|
-
type: "write-read",
|
|
1349
|
-
write: process.env.DATABASE_WRITE_URL,
|
|
1350
|
-
read: process.env.DATABASE_READ_URL
|
|
1351
|
-
};
|
|
1352
|
-
}
|
|
1353
|
-
if (process.env.DATABASE_URL && process.env.DATABASE_REPLICA_URL) {
|
|
1354
|
-
return {
|
|
1355
|
-
type: "legacy",
|
|
1356
|
-
primary: process.env.DATABASE_URL,
|
|
1357
|
-
replica: process.env.DATABASE_REPLICA_URL
|
|
1358
|
-
};
|
|
1359
|
-
}
|
|
1360
|
-
if (process.env.DATABASE_URL) {
|
|
1361
|
-
return {
|
|
1362
|
-
type: "single",
|
|
1363
|
-
url: process.env.DATABASE_URL
|
|
1364
|
-
};
|
|
1365
|
-
}
|
|
1366
|
-
if (process.env.DATABASE_WRITE_URL) {
|
|
1367
|
-
return {
|
|
1368
|
-
type: "single",
|
|
1369
|
-
url: process.env.DATABASE_WRITE_URL
|
|
1370
|
-
};
|
|
1371
|
-
}
|
|
1372
|
-
return { type: "none" };
|
|
1373
|
-
}
|
|
1374
|
-
async function createWriteReadClients(writeUrl, readUrl, poolConfig, retryConfig) {
|
|
1375
|
-
const writeClient = await createDatabaseConnection(writeUrl, poolConfig, retryConfig);
|
|
1376
|
-
const readClient = await createDatabaseConnection(readUrl, poolConfig, retryConfig);
|
|
1377
|
-
return {
|
|
1378
|
-
write: drizzle(writeClient),
|
|
1379
|
-
read: drizzle(readClient),
|
|
1380
|
-
writeClient,
|
|
1381
|
-
readClient
|
|
1382
|
-
};
|
|
1383
|
-
}
|
|
1384
|
-
async function createSingleClient(url, poolConfig, retryConfig) {
|
|
1385
|
-
const client = await createDatabaseConnection(url, poolConfig, retryConfig);
|
|
1386
|
-
const db = drizzle(client);
|
|
1387
|
-
return {
|
|
1388
|
-
write: db,
|
|
1389
|
-
read: db,
|
|
1390
|
-
writeClient: client,
|
|
1391
|
-
readClient: client
|
|
1392
|
-
};
|
|
1393
|
-
}
|
|
1394
|
-
async function createDatabaseFromEnv(options) {
|
|
1395
|
-
if (!hasDatabaseConfig()) {
|
|
1396
|
-
dbLogger2.debug("No DATABASE_URL found, loading environment variables");
|
|
1397
|
-
const result = loadEnvironment({
|
|
1398
|
-
debug: true
|
|
1399
|
-
});
|
|
1400
|
-
dbLogger2.debug("Environment variables loaded", {
|
|
1401
|
-
success: result.success,
|
|
1402
|
-
loaded: result.loaded.length,
|
|
1403
|
-
hasDatabaseUrl: !!process.env.DATABASE_URL,
|
|
1404
|
-
hasWriteUrl: !!process.env.DATABASE_WRITE_URL,
|
|
1405
|
-
hasReadUrl: !!process.env.DATABASE_READ_URL
|
|
1406
|
-
});
|
|
1407
|
-
}
|
|
1408
|
-
if (!hasDatabaseConfig()) {
|
|
1409
|
-
dbLogger2.warn("No database configuration found", {
|
|
1410
|
-
cwd: process.cwd(),
|
|
1411
|
-
nodeEnv: process.env.NODE_ENV,
|
|
1412
|
-
checkedVars: ["DATABASE_URL", "DATABASE_WRITE_URL", "DATABASE_READ_URL"]
|
|
1413
|
-
});
|
|
1414
|
-
return { write: void 0, read: void 0 };
|
|
1415
|
-
}
|
|
1416
|
-
try {
|
|
1417
|
-
const poolConfig = getPoolConfig(options?.pool);
|
|
1418
|
-
const retryConfig = getRetryConfig();
|
|
1419
|
-
const pattern = detectDatabasePattern();
|
|
1420
|
-
switch (pattern.type) {
|
|
1421
|
-
case "write-read":
|
|
1422
|
-
dbLogger2.debug("Using write-read pattern", {
|
|
1423
|
-
write: pattern.write.replace(/:[^:@]+@/, ":***@"),
|
|
1424
|
-
read: pattern.read.replace(/:[^:@]+@/, ":***@")
|
|
1425
|
-
});
|
|
1426
|
-
return await createWriteReadClients(
|
|
1427
|
-
pattern.write,
|
|
1428
|
-
pattern.read,
|
|
1429
|
-
poolConfig,
|
|
1430
|
-
retryConfig
|
|
1431
|
-
);
|
|
1432
|
-
case "legacy":
|
|
1433
|
-
dbLogger2.debug("Using legacy replica pattern", {
|
|
1434
|
-
primary: pattern.primary.replace(/:[^:@]+@/, ":***@"),
|
|
1435
|
-
replica: pattern.replica.replace(/:[^:@]+@/, ":***@")
|
|
1436
|
-
});
|
|
1437
|
-
return await createWriteReadClients(
|
|
1438
|
-
pattern.primary,
|
|
1439
|
-
pattern.replica,
|
|
1440
|
-
poolConfig,
|
|
1441
|
-
retryConfig
|
|
1442
|
-
);
|
|
1443
|
-
case "single":
|
|
1444
|
-
dbLogger2.debug("Using single database pattern", {
|
|
1445
|
-
url: pattern.url.replace(/:[^:@]+@/, ":***@")
|
|
1446
|
-
});
|
|
1447
|
-
return await createSingleClient(pattern.url, poolConfig, retryConfig);
|
|
1448
|
-
case "none":
|
|
1449
|
-
dbLogger2.warn("No database pattern detected");
|
|
1450
|
-
return { write: void 0, read: void 0 };
|
|
1451
|
-
}
|
|
1452
|
-
} catch (error) {
|
|
1453
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1454
|
-
dbLogger2.error("Failed to create database connection", {
|
|
1455
|
-
error: message,
|
|
1456
|
-
stage: "initialization",
|
|
1457
|
-
hasWriteUrl: !!process.env.DATABASE_WRITE_URL,
|
|
1458
|
-
hasReadUrl: !!process.env.DATABASE_READ_URL,
|
|
1459
|
-
hasUrl: !!process.env.DATABASE_URL,
|
|
1460
|
-
hasReplicaUrl: !!process.env.DATABASE_REPLICA_URL
|
|
1461
|
-
});
|
|
1462
|
-
throw new Error(`Database connection failed: ${message}`, { cause: error });
|
|
1463
|
-
}
|
|
1464
|
-
}
|
|
1465
|
-
var dbLogger2;
|
|
1466
|
-
var init_factory2 = __esm({
|
|
1467
|
-
"src/db/manager/factory.ts"() {
|
|
1468
|
-
init_logger2();
|
|
1469
|
-
init_env();
|
|
1470
|
-
init_connection();
|
|
1471
|
-
init_config3();
|
|
1472
|
-
dbLogger2 = logger.child("database");
|
|
1473
|
-
}
|
|
1474
|
-
});
|
|
1475
|
-
|
|
1476
|
-
// src/db/manager/global-state.ts
|
|
1477
|
-
var getWriteInstance, setWriteInstance, getReadInstance, setReadInstance, getWriteClient, setWriteClient, getReadClient, setReadClient, getHealthCheckInterval, setHealthCheckInterval, setMonitoringConfig;
|
|
1478
|
-
var init_global_state = __esm({
|
|
1479
|
-
"src/db/manager/global-state.ts"() {
|
|
1480
|
-
getWriteInstance = () => globalThis.__SPFN_DB_WRITE__;
|
|
1481
|
-
setWriteInstance = (instance) => {
|
|
1482
|
-
globalThis.__SPFN_DB_WRITE__ = instance;
|
|
1483
|
-
};
|
|
1484
|
-
getReadInstance = () => globalThis.__SPFN_DB_READ__;
|
|
1485
|
-
setReadInstance = (instance) => {
|
|
1486
|
-
globalThis.__SPFN_DB_READ__ = instance;
|
|
1487
|
-
};
|
|
1488
|
-
getWriteClient = () => globalThis.__SPFN_DB_WRITE_CLIENT__;
|
|
1489
|
-
setWriteClient = (client) => {
|
|
1490
|
-
globalThis.__SPFN_DB_WRITE_CLIENT__ = client;
|
|
1491
|
-
};
|
|
1492
|
-
getReadClient = () => globalThis.__SPFN_DB_READ_CLIENT__;
|
|
1493
|
-
setReadClient = (client) => {
|
|
1494
|
-
globalThis.__SPFN_DB_READ_CLIENT__ = client;
|
|
1495
|
-
};
|
|
1496
|
-
getHealthCheckInterval = () => globalThis.__SPFN_DB_HEALTH_CHECK__;
|
|
1497
|
-
setHealthCheckInterval = (interval) => {
|
|
1498
|
-
globalThis.__SPFN_DB_HEALTH_CHECK__ = interval;
|
|
1499
|
-
};
|
|
1500
|
-
setMonitoringConfig = (config) => {
|
|
1501
|
-
globalThis.__SPFN_DB_MONITORING__ = config;
|
|
1502
|
-
};
|
|
1503
|
-
}
|
|
1504
|
-
});
|
|
1505
|
-
|
|
1506
|
-
// src/db/manager/health-check.ts
|
|
1507
|
-
function startHealthCheck(config, options, getDatabase2, closeDatabase2) {
|
|
1508
|
-
const healthCheck = getHealthCheckInterval();
|
|
1509
|
-
if (healthCheck) {
|
|
1510
|
-
dbLogger3.debug("Health check already running");
|
|
1511
|
-
return;
|
|
1512
|
-
}
|
|
1513
|
-
dbLogger3.info("Starting database health check", {
|
|
1514
|
-
interval: `${config.interval}ms`,
|
|
1515
|
-
reconnect: config.reconnect
|
|
1516
|
-
});
|
|
1517
|
-
const interval = setInterval(async () => {
|
|
1518
|
-
try {
|
|
1519
|
-
const write = getDatabase2("write");
|
|
1520
|
-
const read = getDatabase2("read");
|
|
1521
|
-
if (write) {
|
|
1522
|
-
await write.execute("SELECT 1");
|
|
1523
|
-
}
|
|
1524
|
-
if (read && read !== write) {
|
|
1525
|
-
await read.execute("SELECT 1");
|
|
1526
|
-
}
|
|
1527
|
-
} catch (error) {
|
|
1528
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1529
|
-
dbLogger3.error("Database health check failed", { error: message });
|
|
1530
|
-
if (config.reconnect) {
|
|
1531
|
-
await attemptReconnection(config, options, closeDatabase2);
|
|
1532
|
-
}
|
|
1533
|
-
}
|
|
1534
|
-
}, config.interval);
|
|
1535
|
-
setHealthCheckInterval(interval);
|
|
1536
|
-
}
|
|
1537
|
-
async function attemptReconnection(config, options, closeDatabase2) {
|
|
1538
|
-
dbLogger3.warn("Attempting database reconnection", {
|
|
1539
|
-
maxRetries: config.maxRetries,
|
|
1540
|
-
retryInterval: `${config.retryInterval}ms`
|
|
1541
|
-
});
|
|
1542
|
-
for (let attempt = 1; attempt <= config.maxRetries; attempt++) {
|
|
1543
|
-
try {
|
|
1544
|
-
dbLogger3.debug(`Reconnection attempt ${attempt}/${config.maxRetries}`);
|
|
1545
|
-
await closeDatabase2();
|
|
1546
|
-
await new Promise((resolve) => setTimeout(resolve, config.retryInterval));
|
|
1547
|
-
const result = await createDatabaseFromEnv(options);
|
|
1548
|
-
if (result.write) {
|
|
1549
|
-
await result.write.execute("SELECT 1");
|
|
1550
|
-
setWriteInstance(result.write);
|
|
1551
|
-
setReadInstance(result.read);
|
|
1552
|
-
setWriteClient(result.writeClient);
|
|
1553
|
-
setReadClient(result.readClient);
|
|
1554
|
-
dbLogger3.info("Database reconnection successful", { attempt });
|
|
1555
|
-
return;
|
|
1556
|
-
}
|
|
1557
|
-
} catch (error) {
|
|
1558
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1559
|
-
dbLogger3.error(`Reconnection attempt ${attempt} failed`, {
|
|
1560
|
-
error: message,
|
|
1561
|
-
attempt,
|
|
1562
|
-
maxRetries: config.maxRetries
|
|
1563
|
-
});
|
|
1564
|
-
if (attempt === config.maxRetries) {
|
|
1565
|
-
dbLogger3.error("Max reconnection attempts reached, giving up");
|
|
1566
|
-
}
|
|
1567
|
-
}
|
|
1568
|
-
}
|
|
1569
|
-
}
|
|
1570
|
-
function stopHealthCheck() {
|
|
1571
|
-
const healthCheck = getHealthCheckInterval();
|
|
1572
|
-
if (healthCheck) {
|
|
1573
|
-
clearInterval(healthCheck);
|
|
1574
|
-
setHealthCheckInterval(void 0);
|
|
1575
|
-
dbLogger3.info("Database health check stopped");
|
|
1576
|
-
}
|
|
1577
|
-
}
|
|
1578
|
-
var dbLogger3;
|
|
1579
|
-
var init_health_check = __esm({
|
|
1580
|
-
"src/db/manager/health-check.ts"() {
|
|
1581
|
-
init_logger2();
|
|
1582
|
-
init_factory2();
|
|
1583
|
-
init_global_state();
|
|
1584
|
-
dbLogger3 = logger.child("database");
|
|
1585
|
-
}
|
|
1586
|
-
});
|
|
1587
|
-
|
|
1588
|
-
// src/db/manager/manager.ts
|
|
1589
|
-
function getCallerInfo() {
|
|
1590
|
-
try {
|
|
1591
|
-
const stack = new Error().stack;
|
|
1592
|
-
if (!stack) return void 0;
|
|
1593
|
-
const lines = stack.split("\n");
|
|
1594
|
-
for (let i = 3; i < lines.length; i++) {
|
|
1595
|
-
const line = lines[i];
|
|
1596
|
-
if (!line.includes("node_modules") && !line.includes("/db/manager/")) {
|
|
1597
|
-
const match = line.match(/\((.+):(\d+):(\d+)\)/) || line.match(/at (.+):(\d+):(\d+)/);
|
|
1598
|
-
if (match) {
|
|
1599
|
-
const fullPath = match[1];
|
|
1600
|
-
const parts = fullPath.split("/");
|
|
1601
|
-
const srcIndex = parts.lastIndexOf("src");
|
|
1602
|
-
if (srcIndex !== -1) {
|
|
1603
|
-
const relativePath = parts.slice(srcIndex).join("/");
|
|
1604
|
-
return `${relativePath}:${match[2]}`;
|
|
1605
|
-
}
|
|
1606
|
-
return `${fullPath}:${match[2]}`;
|
|
1607
|
-
}
|
|
1608
|
-
break;
|
|
1609
|
-
}
|
|
1610
|
-
}
|
|
1611
|
-
} catch {
|
|
1612
|
-
}
|
|
1613
|
-
return void 0;
|
|
1614
|
-
}
|
|
1615
|
-
function getDatabase(type) {
|
|
1616
|
-
const writeInst = getWriteInstance();
|
|
1617
|
-
const readInst = getReadInstance();
|
|
1618
|
-
if (process.env.DB_DEBUG_TRACE === "true") {
|
|
1619
|
-
const caller = getCallerInfo();
|
|
1620
|
-
dbLogger4.debug("getDatabase() called", {
|
|
1621
|
-
type: type || "write",
|
|
1622
|
-
hasWrite: !!writeInst,
|
|
1623
|
-
hasRead: !!readInst,
|
|
1624
|
-
caller
|
|
1625
|
-
});
|
|
1626
|
-
}
|
|
1627
|
-
if (type === "read") {
|
|
1628
|
-
return readInst ?? writeInst;
|
|
1629
|
-
}
|
|
1630
|
-
return writeInst;
|
|
1631
|
-
}
|
|
1632
|
-
function setDatabase(write, read) {
|
|
1633
|
-
setWriteInstance(write);
|
|
1634
|
-
setReadInstance(read ?? write);
|
|
1635
|
-
}
|
|
1636
|
-
async function initDatabase(options) {
|
|
1637
|
-
const writeInst = getWriteInstance();
|
|
1638
|
-
if (writeInst) {
|
|
1639
|
-
dbLogger4.debug("Database already initialized");
|
|
1640
|
-
return { write: writeInst, read: getReadInstance() };
|
|
1641
|
-
}
|
|
1642
|
-
const result = await createDatabaseFromEnv(options);
|
|
1643
|
-
if (result.write) {
|
|
1644
|
-
try {
|
|
1645
|
-
await result.write.execute("SELECT 1");
|
|
1646
|
-
if (result.read && result.read !== result.write) {
|
|
1647
|
-
await result.read.execute("SELECT 1");
|
|
1648
|
-
}
|
|
1649
|
-
setWriteInstance(result.write);
|
|
1650
|
-
setReadInstance(result.read);
|
|
1651
|
-
setWriteClient(result.writeClient);
|
|
1652
|
-
setReadClient(result.readClient);
|
|
1653
|
-
const hasReplica = result.read && result.read !== result.write;
|
|
1654
|
-
dbLogger4.info(
|
|
1655
|
-
hasReplica ? "Database connected (Primary + Replica)" : "Database connected"
|
|
1656
|
-
);
|
|
1657
|
-
const healthCheckConfig = buildHealthCheckConfig(options?.healthCheck);
|
|
1658
|
-
if (healthCheckConfig.enabled) {
|
|
1659
|
-
startHealthCheck(healthCheckConfig, options, getDatabase, closeDatabase);
|
|
1660
|
-
}
|
|
1661
|
-
const monConfig = buildMonitoringConfig(options?.monitoring);
|
|
1662
|
-
setMonitoringConfig(monConfig);
|
|
1663
|
-
if (monConfig.enabled) {
|
|
1664
|
-
dbLogger4.info("Database query monitoring enabled", {
|
|
1665
|
-
slowThreshold: `${monConfig.slowThreshold}ms`,
|
|
1666
|
-
logQueries: monConfig.logQueries
|
|
1667
|
-
});
|
|
1668
|
-
}
|
|
1669
|
-
} catch (error) {
|
|
1670
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1671
|
-
dbLogger4.error("Database connection failed", { error: message });
|
|
1672
|
-
await closeDatabase();
|
|
1673
|
-
throw new Error(`Database connection test failed: ${message}`, { cause: error });
|
|
1674
|
-
}
|
|
1675
|
-
} else {
|
|
1676
|
-
dbLogger4.warn("No database configuration found");
|
|
1677
|
-
dbLogger4.warn("Set DATABASE_URL environment variable to enable database");
|
|
1678
|
-
}
|
|
1679
|
-
return { write: getWriteInstance(), read: getReadInstance() };
|
|
1680
|
-
}
|
|
1681
|
-
async function closeDatabase() {
|
|
1682
|
-
const writeInst = getWriteInstance();
|
|
1683
|
-
const readInst = getReadInstance();
|
|
1684
|
-
if (!writeInst && !readInst) {
|
|
1685
|
-
dbLogger4.debug("No database connections to close");
|
|
1686
|
-
return;
|
|
1687
|
-
}
|
|
1688
|
-
stopHealthCheck();
|
|
1689
|
-
try {
|
|
1690
|
-
const closePromises = [];
|
|
1691
|
-
const writeC = getWriteClient();
|
|
1692
|
-
if (writeC) {
|
|
1693
|
-
dbLogger4.debug("Closing write connection...");
|
|
1694
|
-
closePromises.push(
|
|
1695
|
-
writeC.end({ timeout: 5 }).then(() => dbLogger4.debug("Write connection closed")).catch((err) => dbLogger4.error("Error closing write connection", err))
|
|
1696
|
-
);
|
|
1697
|
-
}
|
|
1698
|
-
const readC = getReadClient();
|
|
1699
|
-
if (readC && readC !== writeC) {
|
|
1700
|
-
dbLogger4.debug("Closing read connection...");
|
|
1701
|
-
closePromises.push(
|
|
1702
|
-
readC.end({ timeout: 5 }).then(() => dbLogger4.debug("Read connection closed")).catch((err) => dbLogger4.error("Error closing read connection", err))
|
|
1703
|
-
);
|
|
1704
|
-
}
|
|
1705
|
-
await Promise.all(closePromises);
|
|
1706
|
-
dbLogger4.info("All database connections closed");
|
|
1707
|
-
} catch (error) {
|
|
1708
|
-
dbLogger4.error("Error during database cleanup", error);
|
|
1709
|
-
throw error;
|
|
1710
|
-
} finally {
|
|
1711
|
-
setWriteInstance(void 0);
|
|
1712
|
-
setReadInstance(void 0);
|
|
1713
|
-
setWriteClient(void 0);
|
|
1714
|
-
setReadClient(void 0);
|
|
1715
|
-
setMonitoringConfig(void 0);
|
|
1716
|
-
}
|
|
1717
|
-
}
|
|
1718
|
-
function getDatabaseInfo() {
|
|
1719
|
-
const writeInst = getWriteInstance();
|
|
1720
|
-
const readInst = getReadInstance();
|
|
1721
|
-
return {
|
|
1722
|
-
hasWrite: !!writeInst,
|
|
1723
|
-
hasRead: !!readInst,
|
|
1724
|
-
isReplica: !!(readInst && readInst !== writeInst)
|
|
1725
|
-
};
|
|
1726
|
-
}
|
|
1727
|
-
var dbLogger4;
|
|
1728
|
-
var init_manager = __esm({
|
|
1729
|
-
"src/db/manager/manager.ts"() {
|
|
1730
|
-
init_logger2();
|
|
1731
|
-
init_factory2();
|
|
1732
|
-
init_config3();
|
|
1733
|
-
init_global_state();
|
|
1734
|
-
init_health_check();
|
|
1735
|
-
dbLogger4 = logger.child("database");
|
|
1736
|
-
}
|
|
1737
|
-
});
|
|
1738
|
-
|
|
1739
|
-
// src/db/manager/index.ts
|
|
1740
|
-
var init_manager2 = __esm({
|
|
1741
|
-
"src/db/manager/index.ts"() {
|
|
1742
|
-
init_factory2();
|
|
1743
|
-
init_manager();
|
|
1744
|
-
init_connection();
|
|
1745
|
-
}
|
|
1746
|
-
});
|
|
1747
|
-
function expandGlobPattern(pattern) {
|
|
1748
|
-
if (!pattern.includes("*")) {
|
|
1749
|
-
return existsSync(pattern) ? [pattern] : [];
|
|
1750
|
-
}
|
|
1751
|
-
const files = [];
|
|
1752
|
-
if (pattern.includes("**")) {
|
|
1753
|
-
const [baseDir, ...rest] = pattern.split("**");
|
|
1754
|
-
const extension = rest.join("").replace(/[\/\\]\*\./g, "").trim();
|
|
1755
|
-
const scanRecursive = (dir) => {
|
|
1756
|
-
if (!existsSync(dir)) return;
|
|
1757
|
-
try {
|
|
1758
|
-
const entries = readdirSync(dir);
|
|
1759
|
-
for (const entry of entries) {
|
|
1760
|
-
const fullPath = join(dir, entry);
|
|
1761
|
-
try {
|
|
1762
|
-
const stat2 = statSync(fullPath);
|
|
1763
|
-
if (stat2.isDirectory()) {
|
|
1764
|
-
scanRecursive(fullPath);
|
|
1765
|
-
} else if (stat2.isFile()) {
|
|
1766
|
-
if (!extension || fullPath.endsWith(extension)) {
|
|
1767
|
-
files.push(fullPath);
|
|
1768
|
-
}
|
|
1769
|
-
}
|
|
1770
|
-
} catch {
|
|
1771
|
-
}
|
|
1772
|
-
}
|
|
1773
|
-
} catch {
|
|
1774
|
-
}
|
|
1775
|
-
};
|
|
1776
|
-
scanRecursive(baseDir.trim() || ".");
|
|
1777
|
-
} else if (pattern.includes("*")) {
|
|
1778
|
-
const dir = dirname(pattern);
|
|
1779
|
-
const filePattern = basename(pattern);
|
|
1780
|
-
if (!existsSync(dir)) return [];
|
|
1781
|
-
try {
|
|
1782
|
-
const entries = readdirSync(dir);
|
|
1783
|
-
for (const entry of entries) {
|
|
1784
|
-
const fullPath = join(dir, entry);
|
|
1785
|
-
try {
|
|
1786
|
-
const stat2 = statSync(fullPath);
|
|
1787
|
-
if (stat2.isFile()) {
|
|
1788
|
-
if (filePattern === "*" || filePattern.startsWith("*.") && entry.endsWith(filePattern.slice(1))) {
|
|
1789
|
-
files.push(fullPath);
|
|
1790
|
-
}
|
|
1791
|
-
}
|
|
1792
|
-
} catch {
|
|
1793
|
-
}
|
|
1794
|
-
}
|
|
1795
|
-
} catch {
|
|
1796
|
-
}
|
|
1797
|
-
}
|
|
1798
|
-
return files;
|
|
1799
|
-
}
|
|
1800
|
-
function discoverPackageSchemas(cwd) {
|
|
1801
|
-
const schemas = [];
|
|
1802
|
-
const nodeModulesPath = join(cwd, "node_modules");
|
|
1803
|
-
if (!existsSync(nodeModulesPath)) {
|
|
1804
|
-
return schemas;
|
|
1805
|
-
}
|
|
1806
|
-
const projectPkgPath = join(cwd, "package.json");
|
|
1807
|
-
let directDeps = /* @__PURE__ */ new Set();
|
|
1808
|
-
if (existsSync(projectPkgPath)) {
|
|
1809
|
-
try {
|
|
1810
|
-
const projectPkg = JSON.parse(readFileSync(projectPkgPath, "utf-8"));
|
|
1811
|
-
directDeps = /* @__PURE__ */ new Set([
|
|
1812
|
-
...Object.keys(projectPkg.dependencies || {}),
|
|
1813
|
-
...Object.keys(projectPkg.devDependencies || {})
|
|
1814
|
-
]);
|
|
1815
|
-
} catch (error) {
|
|
1816
|
-
}
|
|
1817
|
-
}
|
|
1818
|
-
const checkPackage = (_pkgName, pkgPath) => {
|
|
1819
|
-
const pkgJsonPath = join(pkgPath, "package.json");
|
|
1820
|
-
if (!existsSync(pkgJsonPath)) return;
|
|
1821
|
-
try {
|
|
1822
|
-
const pkgJson = JSON.parse(readFileSync(pkgJsonPath, "utf-8"));
|
|
1823
|
-
if (pkgJson.spfn?.schemas) {
|
|
1824
|
-
const packageSchemas = Array.isArray(pkgJson.spfn.schemas) ? pkgJson.spfn.schemas : [pkgJson.spfn.schemas];
|
|
1825
|
-
for (const schema of packageSchemas) {
|
|
1826
|
-
const absolutePath = join(pkgPath, schema);
|
|
1827
|
-
const expandedFiles = expandGlobPattern(absolutePath);
|
|
1828
|
-
const schemaFiles = expandedFiles.filter(
|
|
1829
|
-
(file) => !file.endsWith("/index.js") && !file.endsWith("/index.ts") && !file.endsWith("/index.mjs") && !file.endsWith("\\index.js") && !file.endsWith("\\index.ts") && !file.endsWith("\\index.mjs")
|
|
1830
|
-
);
|
|
1831
|
-
schemas.push(...schemaFiles);
|
|
1832
|
-
}
|
|
1833
|
-
}
|
|
1834
|
-
} catch (error) {
|
|
1835
|
-
}
|
|
1836
|
-
};
|
|
1837
|
-
const spfnDir = join(nodeModulesPath, "@spfn");
|
|
1838
|
-
if (existsSync(spfnDir)) {
|
|
1839
|
-
try {
|
|
1840
|
-
const spfnPackages = readdirSync(spfnDir);
|
|
1841
|
-
for (const pkg of spfnPackages) {
|
|
1842
|
-
checkPackage(`@spfn/${pkg}`, join(spfnDir, pkg));
|
|
1843
|
-
}
|
|
1844
|
-
} catch (error) {
|
|
1845
|
-
}
|
|
1846
|
-
}
|
|
1847
|
-
for (const depName of directDeps) {
|
|
1848
|
-
if (depName.startsWith("@spfn/")) continue;
|
|
1849
|
-
const pkgPath = depName.startsWith("@") ? join(nodeModulesPath, ...depName.split("/")) : join(nodeModulesPath, depName);
|
|
1850
|
-
checkPackage(depName, pkgPath);
|
|
1851
|
-
}
|
|
1852
|
-
return schemas;
|
|
1853
|
-
}
|
|
1854
|
-
function detectDialect(url) {
|
|
1855
|
-
if (url.startsWith("postgres://") || url.startsWith("postgresql://")) {
|
|
1856
|
-
return "postgresql";
|
|
1857
|
-
}
|
|
1858
|
-
if (url.startsWith("mysql://")) {
|
|
1859
|
-
return "mysql";
|
|
1860
|
-
}
|
|
1861
|
-
if (url.startsWith("sqlite://") || url.includes(".db") || url.includes(".sqlite")) {
|
|
1862
|
-
return "sqlite";
|
|
1863
|
-
}
|
|
1864
|
-
throw new Error(
|
|
1865
|
-
`Unsupported database URL format: ${url}. Supported: postgresql://, mysql://, sqlite://`
|
|
1866
|
-
);
|
|
1867
|
-
}
|
|
1868
|
-
function getDrizzleConfig(options = {}) {
|
|
1869
|
-
const databaseUrl = options.databaseUrl ?? process.env.DATABASE_URL;
|
|
1870
|
-
if (!databaseUrl) {
|
|
1871
|
-
throw new Error(
|
|
1872
|
-
"DATABASE_URL is required. Set it in .env or pass it to getDrizzleConfig()"
|
|
1873
|
-
);
|
|
1874
|
-
}
|
|
1875
|
-
const dialect = options.dialect ?? detectDialect(databaseUrl);
|
|
1876
|
-
const out = options.out ?? "./src/server/drizzle";
|
|
1877
|
-
if (options.packageFilter) {
|
|
1878
|
-
const packageSchemas2 = options.disablePackageDiscovery ? [] : discoverPackageSchemas(options.cwd ?? process.cwd());
|
|
1879
|
-
const filteredSchemas = packageSchemas2.filter(
|
|
1880
|
-
(schemaPath) => schemaPath.includes(`node_modules/${options.packageFilter}/`)
|
|
1881
|
-
);
|
|
1882
|
-
if (filteredSchemas.length === 0) {
|
|
1883
|
-
throw new Error(
|
|
1884
|
-
`No schemas found for package ${options.packageFilter}. Make sure the package is installed and has "spfn.schemas" in package.json.`
|
|
1885
|
-
);
|
|
1886
|
-
}
|
|
1887
|
-
const schema2 = filteredSchemas.length === 1 ? filteredSchemas[0] : filteredSchemas;
|
|
1888
|
-
return {
|
|
1889
|
-
schema: schema2,
|
|
1890
|
-
out,
|
|
1891
|
-
dialect,
|
|
1892
|
-
dbCredentials: getDbCredentials(dialect, databaseUrl)
|
|
1893
|
-
};
|
|
1894
|
-
}
|
|
1895
|
-
const userSchema = options.schema ?? "./src/server/entities/**/*.ts";
|
|
1896
|
-
const userSchemas = Array.isArray(userSchema) ? userSchema : [userSchema];
|
|
1897
|
-
const packageSchemas = options.disablePackageDiscovery ? [] : discoverPackageSchemas(options.cwd ?? process.cwd());
|
|
1898
|
-
const allSchemas = [...userSchemas, ...packageSchemas];
|
|
1899
|
-
const schema = allSchemas.length === 1 ? allSchemas[0] : allSchemas;
|
|
1900
|
-
return {
|
|
1901
|
-
schema,
|
|
1902
|
-
out,
|
|
1903
|
-
dialect,
|
|
1904
|
-
dbCredentials: getDbCredentials(dialect, databaseUrl)
|
|
1905
|
-
};
|
|
1906
|
-
}
|
|
1907
|
-
function getDbCredentials(dialect, url) {
|
|
1908
|
-
switch (dialect) {
|
|
1909
|
-
case "postgresql":
|
|
1910
|
-
case "mysql":
|
|
1911
|
-
return { url };
|
|
1912
|
-
case "sqlite":
|
|
1913
|
-
const dbPath = url.replace("sqlite://", "").replace("sqlite:", "");
|
|
1914
|
-
return { url: dbPath };
|
|
1915
|
-
default:
|
|
1916
|
-
throw new Error(`Unsupported dialect: ${dialect}`);
|
|
1917
|
-
}
|
|
1918
|
-
}
|
|
1919
|
-
function generateDrizzleConfigFile(options = {}) {
|
|
1920
|
-
const config = getDrizzleConfig(options);
|
|
1921
|
-
const schemaValue = Array.isArray(config.schema) ? `[
|
|
1922
|
-
${config.schema.map((s) => `'${s}'`).join(",\n ")}
|
|
1923
|
-
]` : `'${config.schema}'`;
|
|
1924
|
-
return `import { defineConfig } from 'drizzle-kit';
|
|
1925
|
-
|
|
1926
|
-
export default defineConfig({
|
|
1927
|
-
schema: ${schemaValue},
|
|
1928
|
-
out: '${config.out}',
|
|
1929
|
-
dialect: '${config.dialect}',
|
|
1930
|
-
dbCredentials: ${JSON.stringify(config.dbCredentials, null, 4)},
|
|
1931
|
-
});
|
|
1932
|
-
`;
|
|
1933
|
-
}
|
|
1934
|
-
var init_config_generator = __esm({
|
|
1935
|
-
"src/db/manager/config-generator.ts"() {
|
|
1936
|
-
}
|
|
1937
|
-
});
|
|
1938
|
-
function id() {
|
|
1939
|
-
return bigserial("id", { mode: "number" }).primaryKey();
|
|
1940
|
-
}
|
|
1941
|
-
function timestamps(options) {
|
|
1942
|
-
const updatedAtColumn = timestamp("updated_at", { withTimezone: true, mode: "date" }).defaultNow().notNull();
|
|
1943
|
-
if (options?.autoUpdate) {
|
|
1944
|
-
updatedAtColumn.__autoUpdate = true;
|
|
1945
|
-
}
|
|
1946
|
-
return {
|
|
1947
|
-
createdAt: timestamp("created_at", { withTimezone: true, mode: "date" }).defaultNow().notNull(),
|
|
1948
|
-
updatedAt: updatedAtColumn
|
|
1949
|
-
};
|
|
1950
|
-
}
|
|
1951
|
-
function foreignKey(name, reference, options) {
|
|
1952
|
-
return bigserial(`${name}_id`, { mode: "number" }).notNull().references(reference, { onDelete: options?.onDelete ?? "cascade" });
|
|
1953
|
-
}
|
|
1954
|
-
function optionalForeignKey(name, reference, options) {
|
|
1955
|
-
return bigserial(`${name}_id`, { mode: "number" }).references(reference, { onDelete: options?.onDelete ?? "set null" });
|
|
1956
|
-
}
|
|
1957
|
-
var init_helpers = __esm({
|
|
1958
|
-
"src/db/schema/helpers.ts"() {
|
|
1959
|
-
}
|
|
1960
|
-
});
|
|
1961
|
-
|
|
1962
|
-
// src/db/schema/index.ts
|
|
1963
|
-
var init_schema = __esm({
|
|
1964
|
-
"src/db/schema/index.ts"() {
|
|
1965
|
-
init_helpers();
|
|
1966
|
-
}
|
|
1967
|
-
});
|
|
1968
|
-
function createFunctionSchema(packageName) {
|
|
1969
|
-
const schemaName = packageNameToSchema(packageName);
|
|
1970
|
-
return pgSchema(schemaName);
|
|
1971
|
-
}
|
|
1972
|
-
function packageNameToSchema(packageName) {
|
|
1973
|
-
return packageName.replace("@", "").replace("/", "_").replace(/-/g, "_");
|
|
1974
|
-
}
|
|
1975
|
-
function getSchemaInfo(packageName) {
|
|
1976
|
-
const isScoped = packageName.startsWith("@");
|
|
1977
|
-
const scope = isScoped ? packageName.split("/")[0].substring(1) : null;
|
|
1978
|
-
const schemaName = packageNameToSchema(packageName);
|
|
1979
|
-
return {
|
|
1980
|
-
schemaName,
|
|
1981
|
-
isScoped,
|
|
1982
|
-
scope
|
|
1983
|
-
};
|
|
1984
|
-
}
|
|
1985
|
-
var init_schema_helper = __esm({
|
|
1986
|
-
"src/db/schema-helper.ts"() {
|
|
1987
|
-
}
|
|
1988
|
-
});
|
|
1989
|
-
function getTransactionContext() {
|
|
1990
|
-
return asyncContext.getStore() ?? null;
|
|
1991
|
-
}
|
|
1992
|
-
function getTransaction() {
|
|
1993
|
-
const context = getTransactionContext();
|
|
1994
|
-
return context?.tx ?? null;
|
|
1995
|
-
}
|
|
1996
|
-
function runWithTransaction(tx, txId, callback) {
|
|
1997
|
-
const existingContext = getTransactionContext();
|
|
1998
|
-
const newLevel = existingContext ? existingContext.level + 1 : 1;
|
|
1999
|
-
if (existingContext) {
|
|
2000
|
-
txLogger.info("Nested transaction started (SAVEPOINT)", {
|
|
2001
|
-
outerTxId: existingContext.txId,
|
|
2002
|
-
innerTxId: txId,
|
|
2003
|
-
level: newLevel
|
|
2004
|
-
});
|
|
2005
|
-
} else {
|
|
2006
|
-
txLogger.debug("Root transaction context set", { txId, level: newLevel });
|
|
2007
|
-
}
|
|
2008
|
-
return asyncContext.run({ tx, txId, level: newLevel }, callback);
|
|
2009
|
-
}
|
|
2010
|
-
var txLogger, asyncContext;
|
|
2011
|
-
var init_context = __esm({
|
|
2012
|
-
"src/db/transaction/context.ts"() {
|
|
2013
|
-
init_logger2();
|
|
2014
|
-
txLogger = logger.child("transaction");
|
|
2015
|
-
asyncContext = new AsyncLocalStorage();
|
|
2016
|
-
}
|
|
2017
|
-
});
|
|
2018
|
-
function Transactional(options = {}) {
|
|
2019
|
-
const defaultTimeout = parseInt(process.env.TRANSACTION_TIMEOUT || "30000", 10);
|
|
2020
|
-
const {
|
|
2021
|
-
slowThreshold = 1e3,
|
|
2022
|
-
enableLogging = true,
|
|
2023
|
-
timeout = defaultTimeout
|
|
2024
|
-
} = options;
|
|
2025
|
-
const txLogger2 = logger.child("transaction");
|
|
2026
|
-
return createMiddleware(async (c, next) => {
|
|
2027
|
-
const txId = `tx_${randomUUID()}`;
|
|
2028
|
-
const startTime = Date.now();
|
|
2029
|
-
const route = `${c.req.method} ${c.req.path}`;
|
|
2030
|
-
if (enableLogging) {
|
|
2031
|
-
txLogger2.debug("Transaction started", { txId, route });
|
|
2032
|
-
}
|
|
2033
|
-
try {
|
|
2034
|
-
const writeDb = getDatabase("write");
|
|
2035
|
-
if (!writeDb) {
|
|
2036
|
-
throw new TransactionError(
|
|
2037
|
-
"Database not initialized. Cannot start transaction.",
|
|
2038
|
-
500,
|
|
2039
|
-
{ txId, route }
|
|
2040
|
-
);
|
|
2041
|
-
}
|
|
2042
|
-
const transactionPromise = writeDb.transaction(async (tx) => {
|
|
2043
|
-
await runWithTransaction(tx, txId, async () => {
|
|
2044
|
-
await next();
|
|
2045
|
-
const contextWithError = c;
|
|
2046
|
-
if (contextWithError.error) {
|
|
2047
|
-
throw contextWithError.error;
|
|
2048
|
-
}
|
|
2049
|
-
});
|
|
2050
|
-
});
|
|
2051
|
-
if (timeout > 0) {
|
|
2052
|
-
const timeoutPromise = new Promise((_, reject) => {
|
|
2053
|
-
setTimeout(() => {
|
|
2054
|
-
reject(
|
|
2055
|
-
new TransactionError(
|
|
2056
|
-
`Transaction timeout after ${timeout}ms`,
|
|
2057
|
-
500,
|
|
2058
|
-
{
|
|
2059
|
-
txId,
|
|
2060
|
-
route,
|
|
2061
|
-
timeout: `${timeout}ms`
|
|
2062
|
-
}
|
|
2063
|
-
)
|
|
2064
|
-
);
|
|
2065
|
-
}, timeout);
|
|
2066
|
-
});
|
|
2067
|
-
await Promise.race([transactionPromise, timeoutPromise]);
|
|
2068
|
-
} else {
|
|
2069
|
-
await transactionPromise;
|
|
2070
|
-
}
|
|
2071
|
-
const duration = Date.now() - startTime;
|
|
2072
|
-
if (enableLogging) {
|
|
2073
|
-
if (duration >= slowThreshold) {
|
|
2074
|
-
txLogger2.warn("Slow transaction committed", {
|
|
2075
|
-
txId,
|
|
2076
|
-
route,
|
|
2077
|
-
duration: `${duration}ms`,
|
|
2078
|
-
threshold: `${slowThreshold}ms`
|
|
2079
|
-
});
|
|
2080
|
-
} else {
|
|
2081
|
-
txLogger2.debug("Transaction committed", {
|
|
2082
|
-
txId,
|
|
2083
|
-
route,
|
|
2084
|
-
duration: `${duration}ms`
|
|
2085
|
-
});
|
|
2086
|
-
}
|
|
2087
|
-
}
|
|
2088
|
-
} catch (error) {
|
|
2089
|
-
const duration = Date.now() - startTime;
|
|
2090
|
-
const customError = error instanceof TransactionError ? error : fromPostgresError(error);
|
|
2091
|
-
if (enableLogging) {
|
|
2092
|
-
txLogger2.error("Transaction rolled back", {
|
|
2093
|
-
txId,
|
|
2094
|
-
route,
|
|
2095
|
-
duration: `${duration}ms`,
|
|
2096
|
-
error: customError.message,
|
|
2097
|
-
errorType: customError.name
|
|
2098
|
-
});
|
|
2099
|
-
}
|
|
2100
|
-
throw customError;
|
|
2101
|
-
}
|
|
2102
|
-
});
|
|
2103
|
-
}
|
|
2104
|
-
var init_middleware = __esm({
|
|
2105
|
-
"src/db/transaction/middleware.ts"() {
|
|
2106
|
-
init_logger2();
|
|
2107
|
-
init_manager2();
|
|
2108
|
-
init_context();
|
|
2109
|
-
init_errors();
|
|
2110
|
-
init_postgres_errors();
|
|
2111
|
-
}
|
|
2112
|
-
});
|
|
2113
|
-
|
|
2114
|
-
// src/db/transaction/index.ts
|
|
2115
|
-
var init_transaction = __esm({
|
|
2116
|
-
"src/db/transaction/index.ts"() {
|
|
2117
|
-
init_context();
|
|
2118
|
-
init_middleware();
|
|
2119
|
-
}
|
|
2120
|
-
});
|
|
2121
|
-
function isSQLWrapper(value) {
|
|
2122
|
-
return value && typeof value === "object" && "queryChunks" in value;
|
|
2123
|
-
}
|
|
2124
|
-
function buildWhereFromObject(table, where) {
|
|
2125
|
-
const entries = Object.entries(where).filter(([_, value]) => value !== void 0);
|
|
2126
|
-
if (entries.length === 0) return void 0;
|
|
2127
|
-
const conditions = entries.map(
|
|
2128
|
-
([key, value]) => eq(table[key], value)
|
|
2129
|
-
);
|
|
2130
|
-
return conditions.length === 1 ? conditions[0] : and(...conditions);
|
|
2131
|
-
}
|
|
2132
|
-
async function findOne(table, where) {
|
|
2133
|
-
const db = getDatabase("read");
|
|
2134
|
-
if (!db) {
|
|
2135
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2136
|
-
}
|
|
2137
|
-
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2138
|
-
if (!whereClause) {
|
|
2139
|
-
throw new Error("findOne requires at least one where condition");
|
|
2140
|
-
}
|
|
2141
|
-
const results = await db.select().from(table).where(whereClause).limit(1);
|
|
2142
|
-
return results[0] ?? null;
|
|
2143
|
-
}
|
|
2144
|
-
async function findMany(table, options) {
|
|
2145
|
-
const db = getDatabase("read");
|
|
2146
|
-
if (!db) {
|
|
2147
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2148
|
-
}
|
|
2149
|
-
let query = db.select().from(table);
|
|
2150
|
-
if (options?.where) {
|
|
2151
|
-
const whereClause = isSQLWrapper(options.where) ? options.where : options.where ? buildWhereFromObject(table, options.where) : void 0;
|
|
2152
|
-
if (whereClause) {
|
|
2153
|
-
query = query.where(whereClause);
|
|
2154
|
-
}
|
|
2155
|
-
}
|
|
2156
|
-
if (options?.orderBy) {
|
|
2157
|
-
const orderByArray = Array.isArray(options.orderBy) ? options.orderBy : [options.orderBy];
|
|
2158
|
-
query = query.orderBy(...orderByArray);
|
|
2159
|
-
}
|
|
2160
|
-
if (options?.limit) {
|
|
2161
|
-
query = query.limit(options.limit);
|
|
2162
|
-
}
|
|
2163
|
-
if (options?.offset) {
|
|
2164
|
-
query = query.offset(options.offset);
|
|
2165
|
-
}
|
|
2166
|
-
return query;
|
|
2167
|
-
}
|
|
2168
|
-
async function create(table, data) {
|
|
2169
|
-
const db = getDatabase("write");
|
|
2170
|
-
if (!db) {
|
|
2171
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2172
|
-
}
|
|
2173
|
-
const [result] = await db.insert(table).values(data).returning();
|
|
2174
|
-
return result;
|
|
2175
|
-
}
|
|
2176
|
-
async function createMany(table, data) {
|
|
2177
|
-
const db = getDatabase("write");
|
|
2178
|
-
if (!db) {
|
|
2179
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2180
|
-
}
|
|
2181
|
-
const results = await db.insert(table).values(data).returning();
|
|
2182
|
-
return results;
|
|
2183
|
-
}
|
|
2184
|
-
async function upsert(table, data, options) {
|
|
2185
|
-
const db = getDatabase("write");
|
|
2186
|
-
if (!db) {
|
|
2187
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2188
|
-
}
|
|
2189
|
-
const [result] = await db.insert(table).values(data).onConflictDoUpdate({
|
|
2190
|
-
target: options.target,
|
|
2191
|
-
set: options.set || data
|
|
2192
|
-
}).returning();
|
|
2193
|
-
return result;
|
|
2194
|
-
}
|
|
2195
|
-
async function updateOne(table, where, data) {
|
|
2196
|
-
const db = getDatabase("write");
|
|
2197
|
-
if (!db) {
|
|
2198
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2199
|
-
}
|
|
2200
|
-
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2201
|
-
if (!whereClause) {
|
|
2202
|
-
throw new Error("updateOne requires at least one where condition");
|
|
2203
|
-
}
|
|
2204
|
-
const [result] = await db.update(table).set(data).where(whereClause).returning();
|
|
2205
|
-
return result ?? null;
|
|
2206
|
-
}
|
|
2207
|
-
async function updateMany(table, where, data) {
|
|
2208
|
-
const db = getDatabase("write");
|
|
2209
|
-
if (!db) {
|
|
2210
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2211
|
-
}
|
|
2212
|
-
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2213
|
-
if (!whereClause) {
|
|
2214
|
-
throw new Error("updateMany requires at least one where condition");
|
|
2215
|
-
}
|
|
2216
|
-
const results = await db.update(table).set(data).where(whereClause).returning();
|
|
2217
|
-
return results;
|
|
2218
|
-
}
|
|
2219
|
-
async function deleteOne(table, where) {
|
|
2220
|
-
const db = getDatabase("write");
|
|
2221
|
-
if (!db) {
|
|
2222
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2223
|
-
}
|
|
2224
|
-
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2225
|
-
if (!whereClause) {
|
|
2226
|
-
throw new Error("deleteOne requires at least one where condition");
|
|
2227
|
-
}
|
|
2228
|
-
const [result] = await db.delete(table).where(whereClause).returning();
|
|
2229
|
-
return result ?? null;
|
|
2230
|
-
}
|
|
2231
|
-
async function deleteMany(table, where) {
|
|
2232
|
-
const db = getDatabase("write");
|
|
2233
|
-
if (!db) {
|
|
2234
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2235
|
-
}
|
|
2236
|
-
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2237
|
-
if (!whereClause) {
|
|
2238
|
-
throw new Error("deleteMany requires at least one where condition");
|
|
2239
|
-
}
|
|
2240
|
-
const results = await db.delete(table).where(whereClause).returning();
|
|
2241
|
-
return results;
|
|
2242
|
-
}
|
|
2243
|
-
async function count(table, where) {
|
|
2244
|
-
const db = getDatabase("read");
|
|
2245
|
-
if (!db) {
|
|
2246
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2247
|
-
}
|
|
2248
|
-
let query = db.select().from(table);
|
|
2249
|
-
if (where) {
|
|
2250
|
-
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2251
|
-
if (whereClause) {
|
|
2252
|
-
query = query.where(whereClause);
|
|
2253
|
-
}
|
|
2254
|
-
}
|
|
2255
|
-
const results = await query;
|
|
2256
|
-
return results.length;
|
|
2257
|
-
}
|
|
2258
|
-
var init_helpers2 = __esm({
|
|
2259
|
-
"src/db/helpers.ts"() {
|
|
2260
|
-
init_manager2();
|
|
2261
|
-
}
|
|
2262
|
-
});
|
|
2263
|
-
|
|
2264
|
-
// src/db/index.ts
|
|
2265
|
-
var db_exports = {};
|
|
2266
|
-
__export(db_exports, {
|
|
2267
|
-
Transactional: () => Transactional,
|
|
2268
|
-
checkConnection: () => checkConnection,
|
|
2269
|
-
closeDatabase: () => closeDatabase,
|
|
2270
|
-
count: () => count,
|
|
2271
|
-
create: () => create,
|
|
2272
|
-
createDatabaseConnection: () => createDatabaseConnection,
|
|
2273
|
-
createDatabaseFromEnv: () => createDatabaseFromEnv,
|
|
2274
|
-
createFunctionSchema: () => createFunctionSchema,
|
|
2275
|
-
createMany: () => createMany,
|
|
2276
|
-
deleteMany: () => deleteMany,
|
|
2277
|
-
deleteOne: () => deleteOne,
|
|
2278
|
-
detectDialect: () => detectDialect,
|
|
2279
|
-
findMany: () => findMany,
|
|
2280
|
-
findOne: () => findOne,
|
|
2281
|
-
foreignKey: () => foreignKey,
|
|
2282
|
-
fromPostgresError: () => fromPostgresError,
|
|
2283
|
-
generateDrizzleConfigFile: () => generateDrizzleConfigFile,
|
|
2284
|
-
getDatabase: () => getDatabase,
|
|
2285
|
-
getDatabaseInfo: () => getDatabaseInfo,
|
|
2286
|
-
getDrizzleConfig: () => getDrizzleConfig,
|
|
2287
|
-
getSchemaInfo: () => getSchemaInfo,
|
|
2288
|
-
getTransaction: () => getTransaction,
|
|
2289
|
-
id: () => id,
|
|
2290
|
-
initDatabase: () => initDatabase,
|
|
2291
|
-
optionalForeignKey: () => optionalForeignKey,
|
|
2292
|
-
packageNameToSchema: () => packageNameToSchema,
|
|
2293
|
-
runWithTransaction: () => runWithTransaction,
|
|
2294
|
-
setDatabase: () => setDatabase,
|
|
2295
|
-
timestamps: () => timestamps,
|
|
2296
|
-
updateMany: () => updateMany,
|
|
2297
|
-
updateOne: () => updateOne,
|
|
2298
|
-
upsert: () => upsert
|
|
2299
|
-
});
|
|
2300
|
-
var init_db = __esm({
|
|
2301
|
-
"src/db/index.ts"() {
|
|
2302
|
-
init_manager2();
|
|
2303
|
-
init_config_generator();
|
|
2304
|
-
init_schema();
|
|
2305
|
-
init_schema_helper();
|
|
2306
|
-
init_transaction();
|
|
2307
|
-
init_postgres_errors();
|
|
2308
|
-
init_helpers2();
|
|
2309
|
-
}
|
|
2310
|
-
});
|
|
2311
|
-
|
|
2312
|
-
// src/cache/cache-factory.ts
|
|
2313
|
-
function hasCacheConfig() {
|
|
2314
|
-
return !!// Modern (Valkey/Cache)
|
|
2315
|
-
(process.env.VALKEY_URL || process.env.CACHE_URL || process.env.VALKEY_WRITE_URL || process.env.VALKEY_READ_URL || process.env.CACHE_WRITE_URL || process.env.CACHE_READ_URL || process.env.VALKEY_SENTINEL_HOSTS || process.env.VALKEY_CLUSTER_NODES || // Legacy (Redis - backward compatibility)
|
|
2316
|
-
process.env.REDIS_URL || process.env.REDIS_WRITE_URL || process.env.REDIS_READ_URL || process.env.REDIS_SENTINEL_HOSTS || process.env.REDIS_CLUSTER_NODES);
|
|
2317
|
-
}
|
|
2318
|
-
function getEnv(valkeyKey, cacheKey, redisKey) {
|
|
2319
|
-
return process.env[valkeyKey] || process.env[cacheKey] || process.env[redisKey];
|
|
2320
|
-
}
|
|
2321
|
-
function createClient(RedisClient, url) {
|
|
2322
|
-
const options = {};
|
|
2323
|
-
if (url.startsWith("rediss://") || url.startsWith("valkeys://")) {
|
|
2324
|
-
const rejectUnauthorized = getEnv(
|
|
2325
|
-
"VALKEY_TLS_REJECT_UNAUTHORIZED",
|
|
2326
|
-
"CACHE_TLS_REJECT_UNAUTHORIZED",
|
|
2327
|
-
"REDIS_TLS_REJECT_UNAUTHORIZED"
|
|
2328
|
-
);
|
|
2329
|
-
options.tls = {
|
|
2330
|
-
rejectUnauthorized: rejectUnauthorized !== "false"
|
|
2331
|
-
};
|
|
2332
|
-
}
|
|
2333
|
-
return new RedisClient(url, options);
|
|
2334
|
-
}
|
|
2335
|
-
async function createCacheFromEnv() {
|
|
2336
|
-
if (!hasCacheConfig()) {
|
|
2337
|
-
cacheLogger.info("No cache configuration found - running without cache");
|
|
2338
|
-
return { write: void 0, read: void 0 };
|
|
2339
|
-
}
|
|
2340
|
-
try {
|
|
2341
|
-
const ioredis = await import('ioredis');
|
|
2342
|
-
const RedisClient = ioredis.default;
|
|
2343
|
-
const singleUrl = getEnv("VALKEY_URL", "CACHE_URL", "REDIS_URL");
|
|
2344
|
-
const writeUrl = getEnv("VALKEY_WRITE_URL", "CACHE_WRITE_URL", "REDIS_WRITE_URL");
|
|
2345
|
-
const readUrl = getEnv("VALKEY_READ_URL", "CACHE_READ_URL", "REDIS_READ_URL");
|
|
2346
|
-
const clusterNodes = getEnv("VALKEY_CLUSTER_NODES", "CACHE_CLUSTER_NODES", "REDIS_CLUSTER_NODES");
|
|
2347
|
-
const sentinelHosts = getEnv("VALKEY_SENTINEL_HOSTS", "CACHE_SENTINEL_HOSTS", "REDIS_SENTINEL_HOSTS");
|
|
2348
|
-
const masterName = getEnv("VALKEY_MASTER_NAME", "CACHE_MASTER_NAME", "REDIS_MASTER_NAME");
|
|
2349
|
-
const password = getEnv("VALKEY_PASSWORD", "CACHE_PASSWORD", "REDIS_PASSWORD");
|
|
2350
|
-
if (singleUrl && !writeUrl && !readUrl && !clusterNodes) {
|
|
2351
|
-
const client = createClient(RedisClient, singleUrl);
|
|
2352
|
-
cacheLogger.debug("Created single cache instance", { url: singleUrl.replace(/:[^:@]+@/, ":***@") });
|
|
2353
|
-
return { write: client, read: client };
|
|
2354
|
-
}
|
|
2355
|
-
if (writeUrl && readUrl) {
|
|
2356
|
-
const write = createClient(RedisClient, writeUrl);
|
|
2357
|
-
const read = createClient(RedisClient, readUrl);
|
|
2358
|
-
cacheLogger.debug("Created master-replica cache instances");
|
|
2359
|
-
return { write, read };
|
|
2360
|
-
}
|
|
2361
|
-
if (sentinelHosts && masterName) {
|
|
2362
|
-
const sentinels = sentinelHosts.split(",").map((host) => {
|
|
2363
|
-
const [hostname, port] = host.trim().split(":");
|
|
2364
|
-
return { host: hostname, port: Number(port) || 26379 };
|
|
2365
|
-
});
|
|
2366
|
-
const options = {
|
|
2367
|
-
sentinels,
|
|
2368
|
-
name: masterName,
|
|
2369
|
-
password
|
|
2370
|
-
};
|
|
2371
|
-
const client = new RedisClient(options);
|
|
2372
|
-
cacheLogger.debug("Created sentinel cache instance", { masterName, sentinels: sentinels.length });
|
|
2373
|
-
return { write: client, read: client };
|
|
2374
|
-
}
|
|
2375
|
-
if (clusterNodes) {
|
|
2376
|
-
const nodes = clusterNodes.split(",").map((node) => {
|
|
2377
|
-
const [host, port] = node.trim().split(":");
|
|
2378
|
-
return { host, port: Number(port) || 6379 };
|
|
2379
|
-
});
|
|
2380
|
-
const clusterOptions = {
|
|
2381
|
-
redisOptions: {
|
|
2382
|
-
password
|
|
2383
|
-
}
|
|
2384
|
-
};
|
|
2385
|
-
const cluster = new RedisClient.Cluster(nodes, clusterOptions);
|
|
2386
|
-
cacheLogger.debug("Created cluster cache instance", { nodes: nodes.length });
|
|
2387
|
-
return { write: cluster, read: cluster };
|
|
2388
|
-
}
|
|
2389
|
-
if (singleUrl) {
|
|
2390
|
-
const client = createClient(RedisClient, singleUrl);
|
|
2391
|
-
cacheLogger.debug("Created cache instance (fallback)", { url: singleUrl.replace(/:[^:@]+@/, ":***@") });
|
|
2392
|
-
return { write: client, read: client };
|
|
2393
|
-
}
|
|
2394
|
-
cacheLogger.info("No valid cache configuration found - running without cache");
|
|
2395
|
-
return { write: void 0, read: void 0 };
|
|
2396
|
-
} catch (error) {
|
|
2397
|
-
if (error instanceof Error) {
|
|
2398
|
-
if (error.message.includes("Cannot find module")) {
|
|
2399
|
-
cacheLogger.warn(
|
|
2400
|
-
"Cache client library not installed",
|
|
2401
|
-
error,
|
|
2402
|
-
{
|
|
2403
|
-
suggestion: "Install ioredis to enable cache: pnpm install ioredis",
|
|
2404
|
-
mode: "disabled"
|
|
2405
|
-
}
|
|
2406
|
-
);
|
|
2407
|
-
} else {
|
|
2408
|
-
cacheLogger.warn(
|
|
2409
|
-
"Failed to create cache client",
|
|
2410
|
-
error,
|
|
2411
|
-
{ mode: "disabled" }
|
|
2412
|
-
);
|
|
2413
|
-
}
|
|
2414
|
-
} else {
|
|
2415
|
-
cacheLogger.warn(
|
|
2416
|
-
"Failed to create cache client",
|
|
2417
|
-
{ error: String(error), mode: "disabled" }
|
|
2418
|
-
);
|
|
2419
|
-
}
|
|
2420
|
-
return { write: void 0, read: void 0 };
|
|
2421
|
-
}
|
|
2422
|
-
}
|
|
2423
|
-
async function createSingleCacheFromEnv() {
|
|
2424
|
-
const { write } = await createCacheFromEnv();
|
|
2425
|
-
return write;
|
|
2426
|
-
}
|
|
2427
|
-
var cacheLogger;
|
|
2428
|
-
var init_cache_factory = __esm({
|
|
2429
|
-
"src/cache/cache-factory.ts"() {
|
|
2430
|
-
init_logger2();
|
|
2431
|
-
cacheLogger = logger.child("cache");
|
|
2432
|
-
}
|
|
2433
|
-
});
|
|
2434
|
-
|
|
2435
|
-
// src/cache/cache-manager.ts
|
|
2436
|
-
function getCache() {
|
|
2437
|
-
return writeInstance;
|
|
2438
|
-
}
|
|
2439
|
-
function getCacheRead() {
|
|
2440
|
-
return readInstance ?? writeInstance;
|
|
2441
|
-
}
|
|
2442
|
-
function isCacheDisabled() {
|
|
2443
|
-
return isDisabled;
|
|
2444
|
-
}
|
|
2445
|
-
function setCache(write, read) {
|
|
2446
|
-
writeInstance = write;
|
|
2447
|
-
readInstance = read ?? write;
|
|
2448
|
-
isDisabled = !write;
|
|
2449
|
-
}
|
|
2450
|
-
async function initCache() {
|
|
2451
|
-
if (writeInstance) {
|
|
2452
|
-
return { write: writeInstance, read: readInstance, disabled: isDisabled };
|
|
2453
|
-
}
|
|
2454
|
-
const { write, read } = await createCacheFromEnv();
|
|
2455
|
-
if (write) {
|
|
2456
|
-
try {
|
|
2457
|
-
await write.ping();
|
|
2458
|
-
if (read && read !== write) {
|
|
2459
|
-
await read.ping();
|
|
2460
|
-
}
|
|
2461
|
-
writeInstance = write;
|
|
2462
|
-
readInstance = read;
|
|
2463
|
-
isDisabled = false;
|
|
2464
|
-
const hasReplica = read && read !== write;
|
|
2465
|
-
cacheLogger2.info(
|
|
2466
|
-
hasReplica ? "Cache connected (Master-Replica)" : "Cache connected",
|
|
2467
|
-
{ mode: "enabled" }
|
|
2468
|
-
);
|
|
2469
|
-
return { write: writeInstance, read: readInstance, disabled: false };
|
|
2470
|
-
} catch (error) {
|
|
2471
|
-
cacheLogger2.error(
|
|
2472
|
-
"Cache connection failed - running in disabled mode",
|
|
2473
|
-
error instanceof Error ? error : new Error(String(error)),
|
|
2474
|
-
{ mode: "disabled" }
|
|
2475
|
-
);
|
|
2476
|
-
try {
|
|
2477
|
-
await write.quit();
|
|
2478
|
-
if (read && read !== write) {
|
|
2479
|
-
await read.quit();
|
|
2480
|
-
}
|
|
2481
|
-
} catch {
|
|
2482
|
-
}
|
|
2483
|
-
isDisabled = true;
|
|
2484
|
-
return { write: void 0, read: void 0, disabled: true };
|
|
2485
|
-
}
|
|
2486
|
-
}
|
|
2487
|
-
isDisabled = true;
|
|
2488
|
-
cacheLogger2.info("Cache disabled - no configuration or library not installed", { mode: "disabled" });
|
|
2489
|
-
return { write: void 0, read: void 0, disabled: true };
|
|
2490
|
-
}
|
|
2491
|
-
async function closeCache() {
|
|
2492
|
-
if (isDisabled) {
|
|
2493
|
-
cacheLogger2.debug("Cache already disabled, nothing to close");
|
|
2494
|
-
return;
|
|
2495
|
-
}
|
|
2496
|
-
const closePromises = [];
|
|
2497
|
-
if (writeInstance) {
|
|
2498
|
-
closePromises.push(
|
|
2499
|
-
writeInstance.quit().catch((err) => {
|
|
2500
|
-
cacheLogger2.error("Error closing cache write instance", err);
|
|
2501
|
-
})
|
|
2502
|
-
);
|
|
2503
|
-
}
|
|
2504
|
-
if (readInstance && readInstance !== writeInstance) {
|
|
2505
|
-
closePromises.push(
|
|
2506
|
-
readInstance.quit().catch((err) => {
|
|
2507
|
-
cacheLogger2.error("Error closing cache read instance", err);
|
|
2508
|
-
})
|
|
2509
|
-
);
|
|
2510
|
-
}
|
|
2511
|
-
await Promise.all(closePromises);
|
|
2512
|
-
writeInstance = void 0;
|
|
2513
|
-
readInstance = void 0;
|
|
2514
|
-
isDisabled = true;
|
|
2515
|
-
cacheLogger2.info("Cache connections closed", { mode: "disabled" });
|
|
2516
|
-
}
|
|
2517
|
-
function getCacheInfo() {
|
|
2518
|
-
return {
|
|
2519
|
-
hasWrite: !!writeInstance,
|
|
2520
|
-
hasRead: !!readInstance,
|
|
2521
|
-
isReplica: !!(readInstance && readInstance !== writeInstance),
|
|
2522
|
-
disabled: isDisabled
|
|
2523
|
-
};
|
|
2524
|
-
}
|
|
2525
|
-
var cacheLogger2, writeInstance, readInstance, isDisabled, getRedis, getRedisRead, setRedis, initRedis, closeRedis, getRedisInfo;
|
|
2526
|
-
var init_cache_manager = __esm({
|
|
2527
|
-
"src/cache/cache-manager.ts"() {
|
|
2528
|
-
init_cache_factory();
|
|
2529
|
-
init_logger2();
|
|
2530
|
-
cacheLogger2 = logger.child("cache");
|
|
2531
|
-
isDisabled = false;
|
|
2532
|
-
getRedis = getCache;
|
|
2533
|
-
getRedisRead = getCacheRead;
|
|
2534
|
-
setRedis = setCache;
|
|
2535
|
-
initRedis = initCache;
|
|
2536
|
-
closeRedis = closeCache;
|
|
2537
|
-
getRedisInfo = getCacheInfo;
|
|
2538
|
-
}
|
|
2539
|
-
});
|
|
2540
|
-
|
|
2541
|
-
// src/cache/index.ts
|
|
2542
|
-
var cache_exports = {};
|
|
2543
|
-
__export(cache_exports, {
|
|
2544
|
-
closeCache: () => closeCache,
|
|
2545
|
-
closeRedis: () => closeRedis,
|
|
2546
|
-
createCacheFromEnv: () => createCacheFromEnv,
|
|
2547
|
-
createRedisFromEnv: () => createCacheFromEnv,
|
|
2548
|
-
createSingleCacheFromEnv: () => createSingleCacheFromEnv,
|
|
2549
|
-
createSingleRedisFromEnv: () => createSingleCacheFromEnv,
|
|
2550
|
-
getCache: () => getCache,
|
|
2551
|
-
getCacheInfo: () => getCacheInfo,
|
|
2552
|
-
getCacheRead: () => getCacheRead,
|
|
2553
|
-
getRedis: () => getRedis,
|
|
2554
|
-
getRedisInfo: () => getRedisInfo,
|
|
2555
|
-
getRedisRead: () => getRedisRead,
|
|
2556
|
-
initCache: () => initCache,
|
|
2557
|
-
initRedis: () => initRedis,
|
|
2558
|
-
isCacheDisabled: () => isCacheDisabled,
|
|
2559
|
-
setCache: () => setCache,
|
|
2560
|
-
setRedis: () => setRedis
|
|
2561
|
-
});
|
|
2562
|
-
var init_cache = __esm({
|
|
2563
|
-
"src/cache/index.ts"() {
|
|
2564
|
-
init_cache_factory();
|
|
2565
|
-
init_cache_manager();
|
|
2566
|
-
init_cache_manager();
|
|
2567
|
-
init_cache_factory();
|
|
2568
|
-
}
|
|
2569
|
-
});
|
|
2570
|
-
|
|
2571
|
-
// src/route/auto-loader.ts
|
|
2572
|
-
init_logger2();
|
|
2573
|
-
var routeLogger2 = logger.child("route");
|
|
2574
|
-
var AutoRouteLoader = class {
|
|
2575
|
-
constructor(routesDir, debug = false, middlewares = []) {
|
|
2576
|
-
this.routesDir = routesDir;
|
|
2577
|
-
this.debug = debug;
|
|
2578
|
-
this.middlewares = middlewares;
|
|
2579
|
-
}
|
|
2580
|
-
routes = [];
|
|
2581
|
-
debug;
|
|
2582
|
-
middlewares;
|
|
2583
|
-
async load(app) {
|
|
2584
|
-
const startTime = Date.now();
|
|
2585
|
-
const files = await this.scanFiles(this.routesDir);
|
|
2586
|
-
if (files.length === 0) {
|
|
2587
|
-
routeLogger2.warn("No route files found");
|
|
2588
|
-
return this.getStats();
|
|
2589
|
-
}
|
|
2590
|
-
let failureCount = 0;
|
|
2591
|
-
for (const file of files) {
|
|
2592
|
-
const success = await this.loadRoute(app, file);
|
|
2593
|
-
if (success) ; else {
|
|
2594
|
-
failureCount++;
|
|
2595
|
-
}
|
|
2596
|
-
}
|
|
2597
|
-
const elapsed = Date.now() - startTime;
|
|
2598
|
-
const stats = this.getStats();
|
|
2599
|
-
if (this.debug) {
|
|
2600
|
-
this.logStats(stats, elapsed);
|
|
2601
|
-
}
|
|
2602
|
-
if (failureCount > 0) {
|
|
2603
|
-
routeLogger2.warn("Some routes failed to load", { failureCount });
|
|
2604
|
-
}
|
|
2605
|
-
return stats;
|
|
2606
|
-
}
|
|
2607
|
-
/**
|
|
2608
|
-
* Load routes from an external directory (e.g., from SPFN function packages)
|
|
2609
|
-
* Reads package.json spfn.prefix and mounts routes under that prefix
|
|
2610
|
-
*
|
|
2611
|
-
* @param app - Hono app instance
|
|
2612
|
-
* @param routesDir - Directory containing route handlers
|
|
2613
|
-
* @param packageName - Name of the package (for logging)
|
|
2614
|
-
* @param prefix - Optional prefix to mount routes under (from package.json spfn.prefix)
|
|
2615
|
-
* @returns Route statistics
|
|
2616
|
-
*/
|
|
2617
|
-
async loadExternalRoutes(app, routesDir, packageName, prefix) {
|
|
2618
|
-
const startTime = Date.now();
|
|
2619
|
-
const tempRoutesDir = this.routesDir;
|
|
2620
|
-
this.routesDir = routesDir;
|
|
2621
|
-
const files = await this.scanFiles(routesDir);
|
|
2622
|
-
if (files.length === 0) {
|
|
2623
|
-
routeLogger2.warn("No route files found", { dir: routesDir, package: packageName });
|
|
2624
|
-
this.routesDir = tempRoutesDir;
|
|
2625
|
-
return this.getStats();
|
|
2626
|
-
}
|
|
2627
|
-
let successCount = 0;
|
|
2628
|
-
let failureCount = 0;
|
|
2629
|
-
for (const file of files) {
|
|
2630
|
-
const success = await this.loadRoute(app, file, prefix);
|
|
2631
|
-
if (success) {
|
|
2632
|
-
successCount++;
|
|
2633
|
-
} else {
|
|
2634
|
-
failureCount++;
|
|
2635
|
-
}
|
|
2636
|
-
}
|
|
2637
|
-
const elapsed = Date.now() - startTime;
|
|
2638
|
-
if (this.debug) {
|
|
2639
|
-
routeLogger2.info("External routes loaded", {
|
|
2640
|
-
package: packageName,
|
|
2641
|
-
prefix: prefix || "/",
|
|
2642
|
-
total: successCount,
|
|
2643
|
-
failed: failureCount,
|
|
2644
|
-
elapsed: `${elapsed}ms`
|
|
2645
|
-
});
|
|
2646
|
-
}
|
|
2647
|
-
this.routesDir = tempRoutesDir;
|
|
2648
|
-
return this.getStats();
|
|
2649
|
-
}
|
|
2650
|
-
getStats() {
|
|
2651
|
-
const stats = {
|
|
2652
|
-
total: this.routes.length,
|
|
2653
|
-
byPriority: { static: 0, dynamic: 0, catchAll: 0 },
|
|
2654
|
-
byTag: {},
|
|
2655
|
-
routes: this.routes
|
|
2656
|
-
};
|
|
2657
|
-
for (const route of this.routes) {
|
|
2658
|
-
if (route.priority === 1) stats.byPriority.static++;
|
|
2659
|
-
else if (route.priority === 2) stats.byPriority.dynamic++;
|
|
2660
|
-
else if (route.priority === 3) stats.byPriority.catchAll++;
|
|
2661
|
-
if (route.meta?.tags) {
|
|
2662
|
-
for (const tag of route.meta.tags) {
|
|
2663
|
-
stats.byTag[tag] = (stats.byTag[tag] || 0) + 1;
|
|
2664
|
-
}
|
|
2665
|
-
}
|
|
2666
|
-
}
|
|
2667
|
-
return stats;
|
|
2668
|
-
}
|
|
2669
|
-
async scanFiles(dir, files = []) {
|
|
2670
|
-
const entries = await readdir(dir);
|
|
2671
|
-
for (const entry of entries) {
|
|
2672
|
-
const fullPath = join(dir, entry);
|
|
2673
|
-
const fileStat = await stat(fullPath);
|
|
2674
|
-
if (fileStat.isDirectory()) {
|
|
2675
|
-
await this.scanFiles(fullPath, files);
|
|
2676
|
-
} else if (this.isValidRouteFile(entry)) {
|
|
2677
|
-
files.push(fullPath);
|
|
2678
|
-
}
|
|
2679
|
-
}
|
|
2680
|
-
return files;
|
|
2681
|
-
}
|
|
2682
|
-
isValidRouteFile(fileName) {
|
|
2683
|
-
return fileName === "index.ts" || fileName === "index.js" || fileName === "index.mjs";
|
|
2684
|
-
}
|
|
2685
|
-
async loadRoute(app, absolutePath, prefix) {
|
|
2686
|
-
const relativePath = relative(this.routesDir, absolutePath);
|
|
2687
|
-
try {
|
|
2688
|
-
const module = await import(absolutePath);
|
|
2689
|
-
if (!this.validateModule(module, relativePath)) {
|
|
2690
|
-
return false;
|
|
2691
|
-
}
|
|
2692
|
-
const hasContractMetas = module.default._contractMetas && module.default._contractMetas.size > 0;
|
|
2693
|
-
if (!hasContractMetas) {
|
|
2694
|
-
routeLogger2.error("Route must use contract-based routing", {
|
|
2695
|
-
file: relativePath,
|
|
2696
|
-
hint: "Export contracts using satisfies RouteContract and use app.bind()"
|
|
2697
|
-
});
|
|
2698
|
-
return false;
|
|
2699
|
-
}
|
|
2700
|
-
const contractPaths = this.extractContractPaths(module);
|
|
2701
|
-
if (prefix) {
|
|
2702
|
-
const invalidPaths = contractPaths.filter((path) => !path.startsWith(prefix));
|
|
2703
|
-
if (invalidPaths.length > 0) {
|
|
2704
|
-
routeLogger2.error("Contract paths must include the package prefix", {
|
|
2705
|
-
file: relativePath,
|
|
2706
|
-
prefix,
|
|
2707
|
-
invalidPaths,
|
|
2708
|
-
hint: `Contract paths should start with "${prefix}". Example: path: "${prefix}/labels"`
|
|
2709
|
-
});
|
|
2710
|
-
return false;
|
|
2711
|
-
}
|
|
2712
|
-
}
|
|
2713
|
-
this.registerContractBasedMiddlewares(app, contractPaths, module);
|
|
2714
|
-
app.route("/", module.default);
|
|
2715
|
-
contractPaths.forEach((path) => {
|
|
2716
|
-
this.routes.push({
|
|
2717
|
-
path,
|
|
2718
|
-
// Use contract path as-is (already includes prefix)
|
|
2719
|
-
file: relativePath,
|
|
2720
|
-
meta: module.meta,
|
|
2721
|
-
priority: this.calculateContractPriority(path)
|
|
2722
|
-
});
|
|
2723
|
-
if (this.debug) {
|
|
2724
|
-
const icon = path.includes("*") ? "\u2B50" : path.includes(":") ? "\u{1F538}" : "\u{1F539}";
|
|
2725
|
-
routeLogger2.debug(`Registered route: ${path}`, { icon, file: relativePath });
|
|
2726
|
-
}
|
|
2727
|
-
});
|
|
2728
|
-
return true;
|
|
2729
|
-
} catch (error) {
|
|
2730
|
-
this.categorizeAndLogError(error, relativePath);
|
|
2731
|
-
return false;
|
|
2732
|
-
}
|
|
2733
|
-
}
|
|
2734
|
-
extractContractPaths(module) {
|
|
2735
|
-
const paths = /* @__PURE__ */ new Set();
|
|
2736
|
-
if (module.default._contractMetas) {
|
|
2737
|
-
for (const key of module.default._contractMetas.keys()) {
|
|
2738
|
-
const path = key.split(" ")[1];
|
|
2739
|
-
if (path) {
|
|
2740
|
-
paths.add(path);
|
|
2741
|
-
}
|
|
2742
|
-
}
|
|
2743
|
-
}
|
|
2744
|
-
return Array.from(paths);
|
|
2745
|
-
}
|
|
2746
|
-
calculateContractPriority(path) {
|
|
2747
|
-
if (path.includes("*")) return 3;
|
|
2748
|
-
if (path.includes(":")) return 2;
|
|
2749
|
-
return 1;
|
|
2750
|
-
}
|
|
2751
|
-
validateModule(module, relativePath) {
|
|
2752
|
-
if (!module.default) {
|
|
2753
|
-
routeLogger2.error("Route must export Hono instance as default", { file: relativePath });
|
|
2754
|
-
return false;
|
|
2755
|
-
}
|
|
2756
|
-
if (typeof module.default.route !== "function") {
|
|
2757
|
-
routeLogger2.error("Default export is not a Hono instance", { file: relativePath });
|
|
2758
|
-
return false;
|
|
2759
|
-
}
|
|
2760
|
-
return true;
|
|
2761
|
-
}
|
|
2762
|
-
registerContractBasedMiddlewares(app, contractPaths, module) {
|
|
2763
|
-
app.use("*", (c, next) => {
|
|
2764
|
-
const method = c.req.method;
|
|
2765
|
-
const requestPath = new URL(c.req.url).pathname;
|
|
2766
|
-
const key = `${method} ${requestPath}`;
|
|
2767
|
-
const meta = module.default._contractMetas?.get(key);
|
|
2768
|
-
if (meta?.skipMiddlewares) {
|
|
2769
|
-
c.set("_skipMiddlewares", meta.skipMiddlewares);
|
|
2770
|
-
}
|
|
2771
|
-
return next();
|
|
2772
|
-
});
|
|
2773
|
-
for (const contractPath of contractPaths) {
|
|
2774
|
-
const middlewarePath = contractPath === "/" ? "/*" : `${contractPath}/*`;
|
|
2775
|
-
for (const middleware of this.middlewares) {
|
|
2776
|
-
app.use(middlewarePath, async (c, next) => {
|
|
2777
|
-
const skipList = c.get("_skipMiddlewares") || [];
|
|
2778
|
-
if (skipList.includes(middleware.name)) {
|
|
2779
|
-
return next();
|
|
2780
|
-
}
|
|
2781
|
-
return middleware.handler(c, next);
|
|
2782
|
-
});
|
|
2783
|
-
}
|
|
2784
|
-
}
|
|
2785
|
-
}
|
|
2786
|
-
categorizeAndLogError(error, relativePath) {
|
|
2787
|
-
const message = error.message;
|
|
2788
|
-
const stack = error.stack;
|
|
2789
|
-
if (message.includes("Cannot find module") || message.includes("MODULE_NOT_FOUND")) {
|
|
2790
|
-
routeLogger2.error("Missing dependency", {
|
|
2791
|
-
file: relativePath,
|
|
2792
|
-
error: message,
|
|
2793
|
-
hint: "Run: npm install"
|
|
2794
|
-
});
|
|
2795
|
-
} else if (message.includes("SyntaxError") || stack?.includes("SyntaxError")) {
|
|
2796
|
-
routeLogger2.error("Syntax error", {
|
|
2797
|
-
file: relativePath,
|
|
2798
|
-
error: message,
|
|
2799
|
-
...this.debug && stack && {
|
|
2800
|
-
stack: stack.split("\n").slice(0, 5).join("\n")
|
|
2801
|
-
}
|
|
2802
|
-
});
|
|
2803
|
-
} else if (message.includes("Unexpected token")) {
|
|
2804
|
-
routeLogger2.error("Parse error", {
|
|
2805
|
-
file: relativePath,
|
|
2806
|
-
error: message,
|
|
2807
|
-
hint: "Check for syntax errors or invalid TypeScript"
|
|
2808
|
-
});
|
|
2809
|
-
} else {
|
|
2810
|
-
routeLogger2.error("Route loading failed", {
|
|
2811
|
-
file: relativePath,
|
|
2812
|
-
error: message,
|
|
2813
|
-
...this.debug && stack && { stack }
|
|
2814
|
-
});
|
|
2815
|
-
}
|
|
2816
|
-
}
|
|
2817
|
-
logStats(stats, elapsed) {
|
|
2818
|
-
const tagCounts = Object.entries(stats.byTag).map(([tag, count2]) => `${tag}(${count2})`).join(", ");
|
|
2819
|
-
routeLogger2.info("Routes loaded successfully", {
|
|
2820
|
-
total: stats.total,
|
|
2821
|
-
priority: {
|
|
2822
|
-
static: stats.byPriority.static,
|
|
2823
|
-
dynamic: stats.byPriority.dynamic,
|
|
2824
|
-
catchAll: stats.byPriority.catchAll
|
|
2825
|
-
},
|
|
2826
|
-
...tagCounts && { tags: tagCounts },
|
|
2827
|
-
elapsed: `${elapsed}ms`
|
|
2828
|
-
});
|
|
2829
|
-
}
|
|
2830
|
-
};
|
|
2831
|
-
async function loadRoutes(app, options) {
|
|
2832
|
-
const routesDir = options?.routesDir ?? join(process.cwd(), "src", "server", "routes");
|
|
2833
|
-
const debug = options?.debug ?? false;
|
|
2834
|
-
const middlewares = options?.middlewares ?? [];
|
|
2835
|
-
const includeFunctionRoutes = options?.includeFunctionRoutes ?? true;
|
|
2836
|
-
const loader = new AutoRouteLoader(routesDir, debug, middlewares);
|
|
2837
|
-
const stats = await loader.load(app);
|
|
2838
|
-
if (includeFunctionRoutes) {
|
|
2839
|
-
const { discoverFunctionRoutes: discoverFunctionRoutes2 } = await Promise.resolve().then(() => (init_function_routes(), function_routes_exports));
|
|
2840
|
-
const functionRoutes = discoverFunctionRoutes2();
|
|
2841
|
-
if (functionRoutes.length > 0) {
|
|
2842
|
-
routeLogger2.info("Loading function routes", { count: functionRoutes.length });
|
|
2843
|
-
for (const func of functionRoutes) {
|
|
2844
|
-
try {
|
|
2845
|
-
await loader.loadExternalRoutes(app, func.routesDir, func.packageName, func.prefix);
|
|
2846
|
-
routeLogger2.info("Function routes loaded", {
|
|
2847
|
-
package: func.packageName,
|
|
2848
|
-
routesDir: func.routesDir,
|
|
2849
|
-
prefix: func.prefix || "/"
|
|
2850
|
-
});
|
|
2851
|
-
} catch (error) {
|
|
2852
|
-
routeLogger2.error("Failed to load function routes", {
|
|
2853
|
-
package: func.packageName,
|
|
2854
|
-
error: error instanceof Error ? error.message : "Unknown error"
|
|
2855
|
-
});
|
|
2856
|
-
}
|
|
2857
|
-
}
|
|
2858
|
-
}
|
|
2859
|
-
}
|
|
2860
|
-
return stats;
|
|
2861
|
-
}
|
|
2862
|
-
|
|
2863
|
-
// src/route/bind.ts
|
|
2864
|
-
init_errors();
|
|
2865
|
-
init_logger2();
|
|
2866
|
-
|
|
2867
|
-
// src/middleware/error-handler.ts
|
|
2868
|
-
init_logger2();
|
|
2869
|
-
var errorLogger = logger.child("error-handler");
|
|
2870
|
-
function ErrorHandler(options = {}) {
|
|
2871
|
-
const {
|
|
2872
|
-
includeStack = process.env.NODE_ENV !== "production",
|
|
2873
|
-
enableLogging = true
|
|
2874
|
-
} = options;
|
|
2875
|
-
return (err, c) => {
|
|
2876
|
-
const errorWithCode = err;
|
|
2877
|
-
const statusCode = errorWithCode.statusCode || 500;
|
|
2878
|
-
const errorType = err.name || "Error";
|
|
2879
|
-
if (enableLogging) {
|
|
2880
|
-
const logLevel = statusCode >= 500 ? "error" : "warn";
|
|
2881
|
-
const logData = {
|
|
2882
|
-
type: errorType,
|
|
2883
|
-
message: err.message,
|
|
2884
|
-
statusCode,
|
|
2885
|
-
path: c.req.path,
|
|
2886
|
-
method: c.req.method
|
|
2887
|
-
};
|
|
2888
|
-
if (errorWithCode.details) {
|
|
2889
|
-
logData.details = errorWithCode.details;
|
|
2890
|
-
}
|
|
2891
|
-
if (statusCode >= 500 && includeStack) {
|
|
2892
|
-
logData.stack = err.stack;
|
|
2893
|
-
}
|
|
2894
|
-
errorLogger[logLevel]("Error occurred", logData);
|
|
2895
|
-
}
|
|
2896
|
-
const response = {
|
|
2897
|
-
success: false,
|
|
2898
|
-
error: {
|
|
2899
|
-
message: err.message || "Internal Server Error",
|
|
2900
|
-
type: errorType,
|
|
2901
|
-
statusCode
|
|
2902
|
-
}
|
|
2903
|
-
};
|
|
2904
|
-
if (errorWithCode.details) {
|
|
2905
|
-
response.error.details = errorWithCode.details;
|
|
2906
|
-
}
|
|
2907
|
-
if (includeStack) {
|
|
2908
|
-
response.error.stack = err.stack;
|
|
2909
|
-
}
|
|
2910
|
-
return c.json(response, statusCode);
|
|
2911
|
-
};
|
|
2912
|
-
}
|
|
2913
|
-
|
|
2914
|
-
// src/middleware/request-logger.ts
|
|
2915
|
-
init_logger2();
|
|
2916
|
-
var DEFAULT_CONFIG = {
|
|
2917
|
-
excludePaths: ["/health", "/ping", "/favicon.ico"],
|
|
2918
|
-
sensitiveFields: ["password", "token", "apiKey", "secret", "authorization"],
|
|
2919
|
-
slowRequestThreshold: 1e3
|
|
2920
|
-
};
|
|
2921
|
-
function generateRequestId() {
|
|
2922
|
-
const timestamp2 = Date.now();
|
|
2923
|
-
const randomPart = randomBytes(6).toString("hex");
|
|
2924
|
-
return `req_${timestamp2}_${randomPart}`;
|
|
2925
|
-
}
|
|
2926
|
-
function maskSensitiveData2(obj, sensitiveFields, seen = /* @__PURE__ */ new WeakSet()) {
|
|
2927
|
-
if (!obj || typeof obj !== "object") return obj;
|
|
2928
|
-
if (seen.has(obj)) return "[Circular]";
|
|
2929
|
-
seen.add(obj);
|
|
2930
|
-
const lowerFields = sensitiveFields.map((f) => f.toLowerCase());
|
|
2931
|
-
const masked = Array.isArray(obj) ? [...obj] : { ...obj };
|
|
2932
|
-
for (const key in masked) {
|
|
2933
|
-
const lowerKey = key.toLowerCase();
|
|
2934
|
-
if (lowerFields.some((field) => lowerKey.includes(field))) {
|
|
2935
|
-
masked[key] = "***MASKED***";
|
|
2936
|
-
} else if (typeof masked[key] === "object" && masked[key] !== null) {
|
|
2937
|
-
masked[key] = maskSensitiveData2(masked[key], sensitiveFields, seen);
|
|
2938
|
-
}
|
|
2939
|
-
}
|
|
2940
|
-
return masked;
|
|
2941
|
-
}
|
|
2942
|
-
function RequestLogger(config) {
|
|
2943
|
-
const cfg = { ...DEFAULT_CONFIG, ...config };
|
|
2944
|
-
const apiLogger = logger.child("api");
|
|
2945
|
-
return async (c, next) => {
|
|
2946
|
-
const path = new URL(c.req.url).pathname;
|
|
2947
|
-
if (cfg.excludePaths.includes(path)) {
|
|
2948
|
-
return next();
|
|
2949
|
-
}
|
|
2950
|
-
const requestId = generateRequestId();
|
|
2951
|
-
c.set("requestId", requestId);
|
|
2952
|
-
const method = c.req.method;
|
|
2953
|
-
const userAgent = c.req.header("user-agent");
|
|
2954
|
-
const ip = c.req.header("x-forwarded-for") || c.req.header("x-real-ip") || "unknown";
|
|
2955
|
-
const startTime = Date.now();
|
|
2956
|
-
apiLogger.info("Request received", {
|
|
2957
|
-
requestId,
|
|
2958
|
-
method,
|
|
2959
|
-
path,
|
|
2960
|
-
ip,
|
|
2961
|
-
userAgent
|
|
2962
|
-
});
|
|
2963
|
-
try {
|
|
2964
|
-
await next();
|
|
2965
|
-
const duration = Date.now() - startTime;
|
|
2966
|
-
const status = c.res.status;
|
|
2967
|
-
const logData = {
|
|
2968
|
-
requestId,
|
|
2969
|
-
method,
|
|
2970
|
-
path,
|
|
2971
|
-
status,
|
|
2972
|
-
duration
|
|
2973
|
-
};
|
|
2974
|
-
const isSlowRequest = duration >= cfg.slowRequestThreshold;
|
|
2975
|
-
if (isSlowRequest) {
|
|
2976
|
-
logData.slow = true;
|
|
2977
|
-
}
|
|
2978
|
-
if (status >= 400) {
|
|
2979
|
-
try {
|
|
2980
|
-
const responseBody = await c.res.clone().json();
|
|
2981
|
-
logData.response = responseBody;
|
|
2982
|
-
} catch {
|
|
2983
|
-
}
|
|
2984
|
-
if (["POST", "PUT", "PATCH"].includes(method)) {
|
|
2985
|
-
try {
|
|
2986
|
-
const requestBody = await c.req.json();
|
|
2987
|
-
logData.request = maskSensitiveData2(requestBody, cfg.sensitiveFields);
|
|
2988
|
-
} catch {
|
|
2989
|
-
}
|
|
2990
|
-
}
|
|
2991
|
-
}
|
|
2992
|
-
const logLevel = status >= 500 ? "error" : status >= 400 ? "warn" : "info";
|
|
2993
|
-
apiLogger[logLevel]("Request completed", logData);
|
|
2994
|
-
} catch (error) {
|
|
2995
|
-
const duration = Date.now() - startTime;
|
|
2996
|
-
apiLogger.error("Request failed", error, {
|
|
2997
|
-
requestId,
|
|
2998
|
-
method,
|
|
2999
|
-
path,
|
|
3000
|
-
duration
|
|
3001
|
-
});
|
|
3002
|
-
throw error;
|
|
3003
|
-
}
|
|
3004
|
-
};
|
|
3005
|
-
}
|
|
3006
|
-
|
|
3007
|
-
// src/server/create-server.ts
|
|
3008
|
-
init_logger2();
|
|
3009
|
-
|
|
3010
|
-
// src/server/helpers.ts
|
|
3011
|
-
function createHealthCheckHandler(detailed) {
|
|
3012
|
-
return async (c) => {
|
|
3013
|
-
const response = {
|
|
3014
|
-
status: "ok",
|
|
3015
|
-
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
3016
|
-
};
|
|
3017
|
-
if (detailed) {
|
|
3018
|
-
const { getDatabase: getDatabase2 } = await Promise.resolve().then(() => (init_db(), db_exports));
|
|
3019
|
-
const { getRedis: getRedis2 } = await Promise.resolve().then(() => (init_cache(), cache_exports));
|
|
3020
|
-
const db = getDatabase2();
|
|
3021
|
-
let dbStatus = "disconnected";
|
|
3022
|
-
let dbError;
|
|
3023
|
-
if (db) {
|
|
3024
|
-
try {
|
|
3025
|
-
await db.execute("SELECT 1");
|
|
3026
|
-
dbStatus = "connected";
|
|
3027
|
-
} catch (error) {
|
|
3028
|
-
dbStatus = "error";
|
|
3029
|
-
dbError = error instanceof Error ? error.message : String(error);
|
|
3030
|
-
}
|
|
3031
|
-
}
|
|
3032
|
-
const redis = getRedis2();
|
|
3033
|
-
let redisStatus = "disconnected";
|
|
3034
|
-
let redisError;
|
|
3035
|
-
if (redis) {
|
|
3036
|
-
try {
|
|
3037
|
-
await redis.ping();
|
|
3038
|
-
redisStatus = "connected";
|
|
3039
|
-
} catch (error) {
|
|
3040
|
-
redisStatus = "error";
|
|
3041
|
-
redisError = error instanceof Error ? error.message : String(error);
|
|
3042
|
-
}
|
|
3043
|
-
}
|
|
3044
|
-
response.services = {
|
|
3045
|
-
database: {
|
|
3046
|
-
status: dbStatus,
|
|
3047
|
-
...dbError && { error: dbError }
|
|
3048
|
-
},
|
|
3049
|
-
redis: {
|
|
3050
|
-
status: redisStatus,
|
|
3051
|
-
...redisError && { error: redisError }
|
|
3052
|
-
}
|
|
3053
|
-
};
|
|
3054
|
-
const hasErrors = dbStatus === "error" || redisStatus === "error";
|
|
3055
|
-
response.status = hasErrors ? "degraded" : "ok";
|
|
3056
|
-
}
|
|
3057
|
-
const statusCode = response.status === "ok" ? 200 : 503;
|
|
3058
|
-
return c.json(response, statusCode);
|
|
3059
|
-
};
|
|
3060
|
-
}
|
|
3061
|
-
function applyServerTimeouts(server, timeouts) {
|
|
3062
|
-
if ("timeout" in server) {
|
|
3063
|
-
server.timeout = timeouts.request;
|
|
3064
|
-
server.keepAliveTimeout = timeouts.keepAlive;
|
|
3065
|
-
server.headersTimeout = timeouts.headers;
|
|
3066
|
-
}
|
|
3067
|
-
}
|
|
3068
|
-
function getTimeoutConfig(config) {
|
|
3069
|
-
return {
|
|
3070
|
-
request: config?.request ?? (parseInt(process.env.SERVER_TIMEOUT || "", 10) || 12e4),
|
|
3071
|
-
keepAlive: config?.keepAlive ?? (parseInt(process.env.SERVER_KEEPALIVE_TIMEOUT || "", 10) || 65e3),
|
|
3072
|
-
headers: config?.headers ?? (parseInt(process.env.SERVER_HEADERS_TIMEOUT || "", 10) || 6e4)
|
|
3073
|
-
};
|
|
3074
|
-
}
|
|
3075
|
-
function getShutdownTimeout(config) {
|
|
3076
|
-
return config?.timeout ?? (parseInt(process.env.SHUTDOWN_TIMEOUT || "", 10) || 3e4);
|
|
3077
|
-
}
|
|
3078
|
-
function buildMiddlewareOrder(config) {
|
|
3079
|
-
const order = [];
|
|
3080
|
-
const middlewareConfig = config.middleware ?? {};
|
|
3081
|
-
const enableLogger = middlewareConfig.logger !== false;
|
|
3082
|
-
const enableCors = middlewareConfig.cors !== false;
|
|
3083
|
-
const enableErrorHandler = middlewareConfig.errorHandler !== false;
|
|
3084
|
-
if (enableLogger) order.push("RequestLogger");
|
|
3085
|
-
if (enableCors) order.push("CORS");
|
|
3086
|
-
config.use?.forEach((_, i) => order.push(`Custom[${i}]`));
|
|
3087
|
-
if (config.beforeRoutes) order.push("beforeRoutes hook");
|
|
3088
|
-
order.push("Routes");
|
|
3089
|
-
if (config.afterRoutes) order.push("afterRoutes hook");
|
|
3090
|
-
if (enableErrorHandler) order.push("ErrorHandler");
|
|
3091
|
-
return order;
|
|
3092
|
-
}
|
|
3093
|
-
function buildStartupConfig(config, timeouts) {
|
|
3094
|
-
const middlewareConfig = config.middleware ?? {};
|
|
3095
|
-
const healthCheckConfig = config.healthCheck ?? {};
|
|
3096
|
-
const healthCheckEnabled = healthCheckConfig.enabled !== false;
|
|
3097
|
-
const healthCheckPath = healthCheckConfig.path ?? "/health";
|
|
3098
|
-
const healthCheckDetailed = healthCheckConfig.detailed ?? process.env.NODE_ENV === "development";
|
|
3099
|
-
return {
|
|
3100
|
-
middleware: {
|
|
3101
|
-
logger: middlewareConfig.logger !== false,
|
|
3102
|
-
cors: middlewareConfig.cors !== false,
|
|
3103
|
-
errorHandler: middlewareConfig.errorHandler !== false,
|
|
3104
|
-
custom: config.use?.length ?? 0
|
|
3105
|
-
},
|
|
3106
|
-
healthCheck: healthCheckEnabled ? {
|
|
3107
|
-
enabled: true,
|
|
3108
|
-
path: healthCheckPath,
|
|
3109
|
-
detailed: healthCheckDetailed
|
|
3110
|
-
} : { enabled: false },
|
|
3111
|
-
hooks: {
|
|
3112
|
-
beforeRoutes: !!config.beforeRoutes,
|
|
3113
|
-
afterRoutes: !!config.afterRoutes
|
|
3114
|
-
},
|
|
3115
|
-
timeout: {
|
|
3116
|
-
request: `${timeouts.request}ms`,
|
|
3117
|
-
keepAlive: `${timeouts.keepAlive}ms`,
|
|
3118
|
-
headers: `${timeouts.headers}ms`
|
|
3119
|
-
},
|
|
3120
|
-
shutdown: {
|
|
3121
|
-
timeout: `${config.shutdown?.timeout ?? 3e4}ms`
|
|
3122
|
-
}
|
|
3123
|
-
};
|
|
3124
|
-
}
|
|
3125
|
-
|
|
3126
|
-
// src/server/plugin-discovery.ts
|
|
3127
|
-
init_logger2();
|
|
3128
|
-
var pluginLogger = logger.child("plugin");
|
|
3129
|
-
async function discoverPlugins(cwd = process.cwd()) {
|
|
3130
|
-
const plugins = [];
|
|
3131
|
-
const nodeModulesPath = join(cwd, "node_modules");
|
|
3132
|
-
try {
|
|
3133
|
-
const projectPkgPath = join(cwd, "package.json");
|
|
3134
|
-
if (!existsSync(projectPkgPath)) {
|
|
3135
|
-
pluginLogger.debug("No package.json found, skipping plugin discovery");
|
|
3136
|
-
return plugins;
|
|
3137
|
-
}
|
|
3138
|
-
const projectPkg = JSON.parse(readFileSync(projectPkgPath, "utf-8"));
|
|
3139
|
-
const dependencies = {
|
|
3140
|
-
...projectPkg.dependencies,
|
|
3141
|
-
...projectPkg.devDependencies
|
|
3142
|
-
};
|
|
3143
|
-
for (const [packageName] of Object.entries(dependencies)) {
|
|
3144
|
-
if (!packageName.startsWith("@spfn/")) {
|
|
3145
|
-
continue;
|
|
3146
|
-
}
|
|
3147
|
-
try {
|
|
3148
|
-
const plugin = await loadPluginFromPackage(packageName, nodeModulesPath);
|
|
3149
|
-
if (plugin) {
|
|
3150
|
-
plugins.push(plugin);
|
|
3151
|
-
pluginLogger.info("Plugin discovered", {
|
|
3152
|
-
name: plugin.name,
|
|
3153
|
-
hooks: getPluginHookNames(plugin)
|
|
3154
|
-
});
|
|
3155
|
-
}
|
|
3156
|
-
} catch (error) {
|
|
3157
|
-
pluginLogger.debug("Failed to load plugin", {
|
|
3158
|
-
package: packageName,
|
|
3159
|
-
error: error instanceof Error ? error.message : "Unknown error"
|
|
3160
|
-
});
|
|
3161
|
-
}
|
|
3162
|
-
}
|
|
3163
|
-
} catch (error) {
|
|
3164
|
-
pluginLogger.warn("Plugin discovery failed", {
|
|
3165
|
-
error: error instanceof Error ? error.message : "Unknown error"
|
|
3166
|
-
});
|
|
3167
|
-
}
|
|
3168
|
-
return plugins;
|
|
3169
|
-
}
|
|
3170
|
-
async function loadPluginFromPackage(packageName, nodeModulesPath) {
|
|
3171
|
-
const pkgPath = join(nodeModulesPath, ...packageName.split("/"), "package.json");
|
|
3172
|
-
if (!existsSync(pkgPath)) {
|
|
3173
|
-
return null;
|
|
3174
|
-
}
|
|
3175
|
-
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
|
|
3176
|
-
const packageDir = dirname(pkgPath);
|
|
3177
|
-
const mainEntry = pkg.main || "dist/index.js";
|
|
3178
|
-
const mainPath = join(packageDir, mainEntry);
|
|
3179
|
-
if (!existsSync(mainPath)) {
|
|
3180
|
-
return null;
|
|
3181
|
-
}
|
|
3182
|
-
try {
|
|
3183
|
-
const module = await import(mainPath);
|
|
3184
|
-
if (module.spfnPlugin && isValidPlugin(module.spfnPlugin)) {
|
|
3185
|
-
return module.spfnPlugin;
|
|
3186
|
-
}
|
|
3187
|
-
return null;
|
|
3188
|
-
} catch (error) {
|
|
3189
|
-
return null;
|
|
3190
|
-
}
|
|
3191
|
-
}
|
|
3192
|
-
function isValidPlugin(plugin) {
|
|
3193
|
-
return plugin && typeof plugin === "object" && typeof plugin.name === "string" && (typeof plugin.afterInfrastructure === "function" || typeof plugin.beforeRoutes === "function" || typeof plugin.afterRoutes === "function" || typeof plugin.afterStart === "function" || typeof plugin.beforeShutdown === "function");
|
|
3194
|
-
}
|
|
3195
|
-
function getPluginHookNames(plugin) {
|
|
3196
|
-
const hooks = [];
|
|
3197
|
-
if (plugin.afterInfrastructure) hooks.push("afterInfrastructure");
|
|
3198
|
-
if (plugin.beforeRoutes) hooks.push("beforeRoutes");
|
|
3199
|
-
if (plugin.afterRoutes) hooks.push("afterRoutes");
|
|
3200
|
-
if (plugin.afterStart) hooks.push("afterStart");
|
|
3201
|
-
if (plugin.beforeShutdown) hooks.push("beforeShutdown");
|
|
3202
|
-
return hooks;
|
|
3203
|
-
}
|
|
3204
|
-
async function executePluginHooks(plugins, hookName, ...args) {
|
|
3205
|
-
for (const plugin of plugins) {
|
|
3206
|
-
const hook = plugin[hookName];
|
|
3207
|
-
if (typeof hook === "function") {
|
|
3208
|
-
try {
|
|
3209
|
-
pluginLogger.debug("Executing plugin hook", {
|
|
3210
|
-
plugin: plugin.name,
|
|
3211
|
-
hook: hookName
|
|
3212
|
-
});
|
|
3213
|
-
await hook(...args);
|
|
3214
|
-
} catch (error) {
|
|
3215
|
-
pluginLogger.error("Plugin hook failed", {
|
|
3216
|
-
plugin: plugin.name,
|
|
3217
|
-
hook: hookName,
|
|
3218
|
-
error: error instanceof Error ? error.message : "Unknown error"
|
|
3219
|
-
});
|
|
3220
|
-
throw new Error(
|
|
3221
|
-
`Plugin ${plugin.name} failed in ${hookName} hook: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
3222
|
-
);
|
|
3223
|
-
}
|
|
502
|
+
function buildStartupConfig(config2, timeouts) {
|
|
503
|
+
const middlewareConfig = config2.middleware ?? {};
|
|
504
|
+
const healthCheckConfig = config2.healthCheck ?? {};
|
|
505
|
+
const healthCheckEnabled = healthCheckConfig.enabled !== false;
|
|
506
|
+
const healthCheckPath = healthCheckConfig.path ?? "/health";
|
|
507
|
+
const healthCheckDetailed = healthCheckConfig.detailed ?? env.NODE_ENV === "development";
|
|
508
|
+
return {
|
|
509
|
+
middleware: {
|
|
510
|
+
logger: middlewareConfig.logger !== false,
|
|
511
|
+
cors: middlewareConfig.cors !== false,
|
|
512
|
+
errorHandler: middlewareConfig.errorHandler !== false,
|
|
513
|
+
custom: config2.use?.length ?? 0
|
|
514
|
+
},
|
|
515
|
+
healthCheck: healthCheckEnabled ? {
|
|
516
|
+
enabled: true,
|
|
517
|
+
path: healthCheckPath,
|
|
518
|
+
detailed: healthCheckDetailed
|
|
519
|
+
} : { enabled: false },
|
|
520
|
+
hooks: {
|
|
521
|
+
beforeRoutes: !!config2.beforeRoutes,
|
|
522
|
+
afterRoutes: !!config2.afterRoutes
|
|
523
|
+
},
|
|
524
|
+
timeout: {
|
|
525
|
+
request: `${timeouts.request}ms`,
|
|
526
|
+
keepAlive: `${timeouts.keepAlive}ms`,
|
|
527
|
+
headers: `${timeouts.headers}ms`
|
|
528
|
+
},
|
|
529
|
+
shutdown: {
|
|
530
|
+
timeout: `${config2.shutdown?.timeout ?? env.SHUTDOWN_TIMEOUT}ms`
|
|
3224
531
|
}
|
|
3225
|
-
}
|
|
532
|
+
};
|
|
3226
533
|
}
|
|
534
|
+
var serverLogger = logger.child("@spfn/core:server");
|
|
3227
535
|
|
|
3228
536
|
// src/server/create-server.ts
|
|
3229
|
-
|
|
3230
|
-
async function createServer(config, plugins = []) {
|
|
537
|
+
async function createServer(config2) {
|
|
3231
538
|
const cwd = process.cwd();
|
|
3232
539
|
const appPath = join(cwd, "src", "server", "app.ts");
|
|
3233
|
-
const appJsPath = join(cwd, "src", "server", "app
|
|
540
|
+
const appJsPath = join(cwd, "src", "server", "app");
|
|
3234
541
|
if (existsSync(appPath) || existsSync(appJsPath)) {
|
|
3235
|
-
return await loadCustomApp(appPath, appJsPath,
|
|
542
|
+
return await loadCustomApp(appPath, appJsPath, config2);
|
|
3236
543
|
}
|
|
3237
|
-
return await createAutoConfiguredApp(
|
|
544
|
+
return await createAutoConfiguredApp(config2);
|
|
3238
545
|
}
|
|
3239
|
-
async function loadCustomApp(appPath, appJsPath,
|
|
3240
|
-
const
|
|
546
|
+
async function loadCustomApp(appPath, appJsPath, config2) {
|
|
547
|
+
const actualPath = existsSync(appPath) ? appPath : appJsPath;
|
|
548
|
+
const appModule = await import(actualPath);
|
|
3241
549
|
const appFactory = appModule.default;
|
|
3242
550
|
if (!appFactory) {
|
|
3243
551
|
throw new Error("app.ts must export a default function that returns a Hono app");
|
|
3244
552
|
}
|
|
3245
553
|
const app = await appFactory();
|
|
3246
|
-
|
|
3247
|
-
|
|
3248
|
-
|
|
3249
|
-
|
|
554
|
+
if (config2?.routes) {
|
|
555
|
+
const routes = registerRoutes(app, config2.routes, config2.middlewares);
|
|
556
|
+
logRegisteredRoutes(routes, config2?.debug ?? false);
|
|
557
|
+
}
|
|
3250
558
|
return app;
|
|
3251
559
|
}
|
|
3252
|
-
async function createAutoConfiguredApp(
|
|
560
|
+
async function createAutoConfiguredApp(config2) {
|
|
3253
561
|
const app = new Hono();
|
|
3254
|
-
const middlewareConfig =
|
|
562
|
+
const middlewareConfig = config2?.middleware ?? {};
|
|
3255
563
|
const enableLogger = middlewareConfig.logger !== false;
|
|
3256
564
|
const enableCors = middlewareConfig.cors !== false;
|
|
3257
565
|
const enableErrorHandler = middlewareConfig.errorHandler !== false;
|
|
@@ -3261,29 +569,31 @@ async function createAutoConfiguredApp(config, plugins = []) {
|
|
|
3261
569
|
await next();
|
|
3262
570
|
});
|
|
3263
571
|
}
|
|
3264
|
-
applyDefaultMiddleware(app,
|
|
3265
|
-
|
|
3266
|
-
|
|
3267
|
-
|
|
3268
|
-
|
|
3269
|
-
await
|
|
3270
|
-
await
|
|
3271
|
-
|
|
572
|
+
applyDefaultMiddleware(app, config2, enableLogger, enableCors);
|
|
573
|
+
if (Array.isArray(config2?.use)) {
|
|
574
|
+
config2.use.forEach((mw) => app.use("*", mw));
|
|
575
|
+
}
|
|
576
|
+
registerHealthCheckEndpoint(app, config2);
|
|
577
|
+
await executeBeforeRoutesHook(app, config2);
|
|
578
|
+
await loadAppRoutes(app, config2);
|
|
579
|
+
registerSSEEndpoint(app, config2);
|
|
580
|
+
await executeAfterRoutesHook(app, config2);
|
|
3272
581
|
if (enableErrorHandler) {
|
|
3273
582
|
app.onError(ErrorHandler());
|
|
3274
583
|
}
|
|
3275
584
|
return app;
|
|
3276
585
|
}
|
|
3277
|
-
function applyDefaultMiddleware(app,
|
|
586
|
+
function applyDefaultMiddleware(app, config2, enableLogger, enableCors) {
|
|
3278
587
|
if (enableLogger) {
|
|
3279
588
|
app.use("*", RequestLogger());
|
|
3280
589
|
}
|
|
3281
|
-
if (enableCors
|
|
3282
|
-
|
|
590
|
+
if (enableCors) {
|
|
591
|
+
const corsOptions = config2?.cors !== false ? config2?.cors : void 0;
|
|
592
|
+
app.use("*", cors(corsOptions));
|
|
3283
593
|
}
|
|
3284
594
|
}
|
|
3285
|
-
function registerHealthCheckEndpoint(app,
|
|
3286
|
-
const healthCheckConfig =
|
|
595
|
+
function registerHealthCheckEndpoint(app, config2) {
|
|
596
|
+
const healthCheckConfig = config2?.healthCheck ?? {};
|
|
3287
597
|
const healthCheckEnabled = healthCheckConfig.enabled !== false;
|
|
3288
598
|
const healthCheckPath = healthCheckConfig.path ?? "/health";
|
|
3289
599
|
const healthCheckDetailed = healthCheckConfig.detailed ?? process.env.NODE_ENV === "development";
|
|
@@ -3292,41 +602,255 @@ function registerHealthCheckEndpoint(app, config) {
|
|
|
3292
602
|
serverLogger.debug(`Health check endpoint enabled at ${healthCheckPath}`);
|
|
3293
603
|
}
|
|
3294
604
|
}
|
|
3295
|
-
async function executeBeforeRoutesHook(app,
|
|
3296
|
-
if (
|
|
605
|
+
async function executeBeforeRoutesHook(app, config2) {
|
|
606
|
+
if (config2?.lifecycle?.beforeRoutes) {
|
|
607
|
+
await config2.lifecycle.beforeRoutes(app);
|
|
608
|
+
}
|
|
609
|
+
}
|
|
610
|
+
async function loadAppRoutes(app, config2) {
|
|
611
|
+
const debug = isDebugMode(config2);
|
|
612
|
+
if (config2?.routes) {
|
|
613
|
+
const routes = registerRoutes(app, config2.routes, config2.middlewares);
|
|
614
|
+
logRegisteredRoutes(routes, debug);
|
|
615
|
+
} else if (debug) {
|
|
616
|
+
serverLogger.warn("\u26A0\uFE0F No routes configured. Use defineServerConfig().routes() to register routes.");
|
|
617
|
+
}
|
|
618
|
+
}
|
|
619
|
+
function logRegisteredRoutes(routes, debug) {
|
|
620
|
+
if (routes.length === 0) {
|
|
621
|
+
if (debug) {
|
|
622
|
+
serverLogger.warn("\u26A0\uFE0F No routes registered");
|
|
623
|
+
}
|
|
3297
624
|
return;
|
|
3298
625
|
}
|
|
3299
|
-
|
|
3300
|
-
|
|
3301
|
-
|
|
3302
|
-
|
|
3303
|
-
|
|
626
|
+
const sortedRoutes = [...routes].sort((a, b) => a.path.localeCompare(b.path));
|
|
627
|
+
const maxMethodLen = Math.max(...sortedRoutes.map((r) => r.method.length));
|
|
628
|
+
const routeLines = sortedRoutes.map(
|
|
629
|
+
(r) => ` ${r.method.padEnd(maxMethodLen)} ${r.path}`
|
|
630
|
+
).join("\n");
|
|
631
|
+
serverLogger.info(`\u2713 Routes registered (${routes.length}):
|
|
632
|
+
${routeLines}`);
|
|
633
|
+
}
|
|
634
|
+
async function executeAfterRoutesHook(app, config2) {
|
|
635
|
+
if (config2?.lifecycle?.afterRoutes) {
|
|
636
|
+
await config2.lifecycle.afterRoutes(app);
|
|
637
|
+
}
|
|
638
|
+
}
|
|
639
|
+
function registerSSEEndpoint(app, config2) {
|
|
640
|
+
if (!config2?.events) {
|
|
641
|
+
return;
|
|
642
|
+
}
|
|
643
|
+
const eventsConfig = config2.eventsConfig ?? {};
|
|
644
|
+
const path = eventsConfig.path ?? "/events/stream";
|
|
645
|
+
const debug = isDebugMode(config2);
|
|
646
|
+
app.get(path, createSSEHandler(config2.events, eventsConfig));
|
|
647
|
+
if (debug) {
|
|
648
|
+
const eventNames = config2.events.eventNames;
|
|
649
|
+
serverLogger.info(`\u2713 SSE endpoint registered at ${path}`, {
|
|
650
|
+
events: eventNames
|
|
651
|
+
});
|
|
3304
652
|
}
|
|
3305
653
|
}
|
|
3306
|
-
|
|
3307
|
-
|
|
3308
|
-
|
|
3309
|
-
|
|
3310
|
-
|
|
3311
|
-
|
|
654
|
+
function isDebugMode(config2) {
|
|
655
|
+
return config2?.debug ?? process.env.NODE_ENV === "development";
|
|
656
|
+
}
|
|
657
|
+
var jobLogger = logger.child("@spfn/core:job");
|
|
658
|
+
var bossInstance = null;
|
|
659
|
+
var bossConfig = null;
|
|
660
|
+
async function initBoss(options) {
|
|
661
|
+
if (bossInstance) {
|
|
662
|
+
jobLogger.warn("pg-boss already initialized, returning existing instance");
|
|
663
|
+
return bossInstance;
|
|
664
|
+
}
|
|
665
|
+
jobLogger.info("Initializing pg-boss...");
|
|
666
|
+
bossConfig = options;
|
|
667
|
+
const pgBossOptions = {
|
|
668
|
+
connectionString: options.connectionString,
|
|
669
|
+
schema: options.schema ?? "spfn_queue",
|
|
670
|
+
maintenanceIntervalSeconds: options.maintenanceIntervalSeconds ?? 120
|
|
671
|
+
};
|
|
672
|
+
if (options.monitorIntervalSeconds !== void 0 && options.monitorIntervalSeconds >= 1) {
|
|
673
|
+
pgBossOptions.monitorIntervalSeconds = options.monitorIntervalSeconds;
|
|
674
|
+
}
|
|
675
|
+
bossInstance = new PgBoss(pgBossOptions);
|
|
676
|
+
bossInstance.on("error", (error) => {
|
|
677
|
+
jobLogger.error("pg-boss error:", error);
|
|
3312
678
|
});
|
|
679
|
+
await bossInstance.start();
|
|
680
|
+
jobLogger.info("pg-boss started successfully");
|
|
681
|
+
return bossInstance;
|
|
3313
682
|
}
|
|
3314
|
-
|
|
3315
|
-
|
|
683
|
+
function getBoss() {
|
|
684
|
+
return bossInstance;
|
|
685
|
+
}
|
|
686
|
+
async function stopBoss() {
|
|
687
|
+
if (!bossInstance) {
|
|
3316
688
|
return;
|
|
3317
689
|
}
|
|
690
|
+
jobLogger.info("Stopping pg-boss...");
|
|
3318
691
|
try {
|
|
3319
|
-
await
|
|
692
|
+
await bossInstance.stop({ graceful: true, timeout: 3e4 });
|
|
693
|
+
jobLogger.info("pg-boss stopped gracefully");
|
|
3320
694
|
} catch (error) {
|
|
3321
|
-
|
|
3322
|
-
throw
|
|
695
|
+
jobLogger.error("Error stopping pg-boss:", error);
|
|
696
|
+
throw error;
|
|
697
|
+
} finally {
|
|
698
|
+
bossInstance = null;
|
|
699
|
+
bossConfig = null;
|
|
3323
700
|
}
|
|
3324
701
|
}
|
|
702
|
+
function shouldClearOnStart() {
|
|
703
|
+
return bossConfig?.clearOnStart ?? false;
|
|
704
|
+
}
|
|
3325
705
|
|
|
3326
|
-
// src/
|
|
3327
|
-
|
|
3328
|
-
|
|
3329
|
-
|
|
706
|
+
// src/job/job-router.ts
|
|
707
|
+
function isJobDef(value) {
|
|
708
|
+
return value !== null && typeof value === "object" && "name" in value && "handler" in value && "send" in value && "run" in value;
|
|
709
|
+
}
|
|
710
|
+
function isJobRouter(value) {
|
|
711
|
+
return value !== null && typeof value === "object" && "jobs" in value && "_jobs" in value;
|
|
712
|
+
}
|
|
713
|
+
function collectJobs(router, prefix = "") {
|
|
714
|
+
const jobs = [];
|
|
715
|
+
for (const [key, value] of Object.entries(router.jobs)) {
|
|
716
|
+
const name = prefix ? `${prefix}.${key}` : key;
|
|
717
|
+
if (isJobRouter(value)) {
|
|
718
|
+
jobs.push(...collectJobs(value, name));
|
|
719
|
+
} else if (isJobDef(value)) {
|
|
720
|
+
jobs.push(value);
|
|
721
|
+
}
|
|
722
|
+
}
|
|
723
|
+
return jobs;
|
|
724
|
+
}
|
|
725
|
+
var jobLogger2 = logger.child("@spfn/core:job");
|
|
726
|
+
function getEventQueueName(eventName) {
|
|
727
|
+
return `event:${eventName}`;
|
|
728
|
+
}
|
|
729
|
+
function getDefaultJobOptions(options) {
|
|
730
|
+
return {
|
|
731
|
+
retryLimit: options?.retryLimit ?? 3,
|
|
732
|
+
retryDelay: options?.retryDelay ?? 1e3,
|
|
733
|
+
expireInSeconds: options?.expireInSeconds ?? 300
|
|
734
|
+
};
|
|
735
|
+
}
|
|
736
|
+
async function registerJobs(router) {
|
|
737
|
+
const boss = getBoss();
|
|
738
|
+
if (!boss) {
|
|
739
|
+
throw new Error(
|
|
740
|
+
"pg-boss not initialized. Call initBoss() before registerJobs()"
|
|
741
|
+
);
|
|
742
|
+
}
|
|
743
|
+
const jobs = collectJobs(router);
|
|
744
|
+
const clearOnStart = shouldClearOnStart();
|
|
745
|
+
jobLogger2.info(`Registering ${jobs.length} job(s)...`);
|
|
746
|
+
if (clearOnStart) {
|
|
747
|
+
jobLogger2.info("Clearing existing jobs before registration...");
|
|
748
|
+
for (const job2 of jobs) {
|
|
749
|
+
await boss.deleteAllJobs(job2.name);
|
|
750
|
+
if (job2.subscribedEvent) {
|
|
751
|
+
const eventQueue = getEventQueueName(job2.subscribedEvent);
|
|
752
|
+
await boss.deleteAllJobs(eventQueue);
|
|
753
|
+
}
|
|
754
|
+
}
|
|
755
|
+
jobLogger2.info("Existing jobs cleared");
|
|
756
|
+
}
|
|
757
|
+
for (const job2 of jobs) {
|
|
758
|
+
await registerJob(job2);
|
|
759
|
+
}
|
|
760
|
+
jobLogger2.info("All jobs registered successfully");
|
|
761
|
+
}
|
|
762
|
+
async function ensureQueue(boss, queueName) {
|
|
763
|
+
await boss.createQueue(queueName);
|
|
764
|
+
}
|
|
765
|
+
async function registerWorker(boss, job2, queueName) {
|
|
766
|
+
await ensureQueue(boss, queueName);
|
|
767
|
+
await boss.work(
|
|
768
|
+
queueName,
|
|
769
|
+
{ batchSize: 1 },
|
|
770
|
+
async (jobs) => {
|
|
771
|
+
for (const pgBossJob of jobs) {
|
|
772
|
+
jobLogger2.debug(`[Job:${job2.name}] Executing...`, { jobId: pgBossJob.id });
|
|
773
|
+
const startTime = Date.now();
|
|
774
|
+
try {
|
|
775
|
+
if (job2.inputSchema) {
|
|
776
|
+
await job2.handler(pgBossJob.data);
|
|
777
|
+
} else {
|
|
778
|
+
await job2.handler();
|
|
779
|
+
}
|
|
780
|
+
const duration = Date.now() - startTime;
|
|
781
|
+
jobLogger2.info(`[Job:${job2.name}] Completed in ${duration}ms`, {
|
|
782
|
+
jobId: pgBossJob.id,
|
|
783
|
+
duration
|
|
784
|
+
});
|
|
785
|
+
} catch (error) {
|
|
786
|
+
const duration = Date.now() - startTime;
|
|
787
|
+
jobLogger2.error(`[Job:${job2.name}] Failed after ${duration}ms`, {
|
|
788
|
+
jobId: pgBossJob.id,
|
|
789
|
+
duration,
|
|
790
|
+
error: error instanceof Error ? error.message : String(error)
|
|
791
|
+
});
|
|
792
|
+
throw error;
|
|
793
|
+
}
|
|
794
|
+
}
|
|
795
|
+
}
|
|
796
|
+
);
|
|
797
|
+
}
|
|
798
|
+
function connectEventToQueue(boss, job2, queueName) {
|
|
799
|
+
if (!job2._subscribedEventDef) {
|
|
800
|
+
return;
|
|
801
|
+
}
|
|
802
|
+
const eventDef = job2._subscribedEventDef;
|
|
803
|
+
eventDef._registerJobQueue(queueName, async (queue, payload) => {
|
|
804
|
+
await boss.send(queue, payload, getDefaultJobOptions(job2.options));
|
|
805
|
+
});
|
|
806
|
+
jobLogger2.debug(`[Job:${job2.name}] Connected to event: ${job2.subscribedEvent}`);
|
|
807
|
+
}
|
|
808
|
+
async function registerCronSchedule(boss, job2) {
|
|
809
|
+
if (!job2.cronExpression) {
|
|
810
|
+
return;
|
|
811
|
+
}
|
|
812
|
+
jobLogger2.debug(`[Job:${job2.name}] Scheduling cron: ${job2.cronExpression}`);
|
|
813
|
+
await ensureQueue(boss, job2.name);
|
|
814
|
+
await boss.schedule(
|
|
815
|
+
job2.name,
|
|
816
|
+
job2.cronExpression,
|
|
817
|
+
{},
|
|
818
|
+
getDefaultJobOptions(job2.options)
|
|
819
|
+
);
|
|
820
|
+
jobLogger2.info(`[Job:${job2.name}] Cron scheduled: ${job2.cronExpression}`);
|
|
821
|
+
}
|
|
822
|
+
async function queueRunOnceJob(boss, job2) {
|
|
823
|
+
if (!job2.runOnce) {
|
|
824
|
+
return;
|
|
825
|
+
}
|
|
826
|
+
jobLogger2.debug(`[Job:${job2.name}] Queuing runOnce job`);
|
|
827
|
+
await ensureQueue(boss, job2.name);
|
|
828
|
+
await boss.send(
|
|
829
|
+
job2.name,
|
|
830
|
+
{},
|
|
831
|
+
{
|
|
832
|
+
...getDefaultJobOptions(job2.options),
|
|
833
|
+
singletonKey: `runOnce:${job2.name}`
|
|
834
|
+
}
|
|
835
|
+
);
|
|
836
|
+
jobLogger2.info(`[Job:${job2.name}] runOnce job queued`);
|
|
837
|
+
}
|
|
838
|
+
async function registerJob(job2) {
|
|
839
|
+
const boss = getBoss();
|
|
840
|
+
if (!boss) {
|
|
841
|
+
throw new Error("pg-boss not initialized");
|
|
842
|
+
}
|
|
843
|
+
const queueName = job2.subscribedEvent ? getEventQueueName(job2.subscribedEvent) : job2.name;
|
|
844
|
+
jobLogger2.debug(`Registering job: ${job2.name}`, {
|
|
845
|
+
queueName,
|
|
846
|
+
subscribedEvent: job2.subscribedEvent
|
|
847
|
+
});
|
|
848
|
+
await registerWorker(boss, job2, queueName);
|
|
849
|
+
connectEventToQueue(boss, job2, queueName);
|
|
850
|
+
await registerCronSchedule(boss, job2);
|
|
851
|
+
await queueRunOnceJob(boss, job2);
|
|
852
|
+
jobLogger2.debug(`Job registered: ${job2.name}`);
|
|
853
|
+
}
|
|
3330
854
|
function getNetworkAddress() {
|
|
3331
855
|
const nets = networkInterfaces();
|
|
3332
856
|
for (const name of Object.keys(nets)) {
|
|
@@ -3364,16 +888,16 @@ function printBanner(options) {
|
|
|
3364
888
|
}
|
|
3365
889
|
|
|
3366
890
|
// src/server/validation.ts
|
|
3367
|
-
function validateServerConfig(
|
|
3368
|
-
if (
|
|
3369
|
-
if (!Number.isInteger(
|
|
891
|
+
function validateServerConfig(config2) {
|
|
892
|
+
if (config2.port !== void 0) {
|
|
893
|
+
if (!Number.isInteger(config2.port) || config2.port < 0 || config2.port > 65535) {
|
|
3370
894
|
throw new Error(
|
|
3371
|
-
`Invalid port: ${
|
|
895
|
+
`Invalid port: ${config2.port}. Port must be an integer between 0 and 65535.`
|
|
3372
896
|
);
|
|
3373
897
|
}
|
|
3374
898
|
}
|
|
3375
|
-
if (
|
|
3376
|
-
const { request, keepAlive, headers } =
|
|
899
|
+
if (config2.timeout) {
|
|
900
|
+
const { request, keepAlive, headers } = config2.timeout;
|
|
3377
901
|
if (request !== void 0 && (request < 0 || !Number.isFinite(request))) {
|
|
3378
902
|
throw new Error(`Invalid timeout.request: ${request}. Must be a positive number.`);
|
|
3379
903
|
}
|
|
@@ -3389,41 +913,51 @@ function validateServerConfig(config) {
|
|
|
3389
913
|
);
|
|
3390
914
|
}
|
|
3391
915
|
}
|
|
3392
|
-
if (
|
|
3393
|
-
const timeout =
|
|
916
|
+
if (config2.shutdown?.timeout !== void 0) {
|
|
917
|
+
const timeout = config2.shutdown.timeout;
|
|
3394
918
|
if (timeout < 0 || !Number.isFinite(timeout)) {
|
|
3395
919
|
throw new Error(`Invalid shutdown.timeout: ${timeout}. Must be a positive number.`);
|
|
3396
920
|
}
|
|
3397
921
|
}
|
|
3398
|
-
if (
|
|
3399
|
-
if (!
|
|
922
|
+
if (config2.healthCheck?.path) {
|
|
923
|
+
if (!config2.healthCheck.path.startsWith("/")) {
|
|
3400
924
|
throw new Error(
|
|
3401
|
-
`Invalid healthCheck.path: "${
|
|
925
|
+
`Invalid healthCheck.path: "${config2.healthCheck.path}". Must start with "/".`
|
|
3402
926
|
);
|
|
3403
927
|
}
|
|
3404
928
|
}
|
|
3405
929
|
}
|
|
3406
|
-
|
|
3407
|
-
|
|
3408
|
-
|
|
3409
|
-
|
|
3410
|
-
|
|
930
|
+
var DEFAULT_MAX_LISTENERS = 15;
|
|
931
|
+
var TIMEOUTS = {
|
|
932
|
+
SERVER_CLOSE: 5e3,
|
|
933
|
+
DATABASE_CLOSE: 5e3,
|
|
934
|
+
REDIS_CLOSE: 5e3,
|
|
935
|
+
PRODUCTION_ERROR_SHUTDOWN: 1e4
|
|
936
|
+
};
|
|
937
|
+
var CONFIG_FILE_PATHS = [
|
|
938
|
+
".spfn/server/server.config.mjs",
|
|
939
|
+
".spfn/server/server.config",
|
|
940
|
+
"src/server/server.config",
|
|
941
|
+
"src/server/server.config.ts"
|
|
942
|
+
];
|
|
943
|
+
var processHandlersRegistered = false;
|
|
944
|
+
async function startServer(config2) {
|
|
945
|
+
loadEnvFiles();
|
|
946
|
+
const finalConfig = await loadAndMergeConfig(config2);
|
|
3411
947
|
const { host, port, debug } = finalConfig;
|
|
3412
948
|
validateServerConfig(finalConfig);
|
|
949
|
+
if (!host || !port) {
|
|
950
|
+
throw new Error("Server host and port are required");
|
|
951
|
+
}
|
|
3413
952
|
if (debug) {
|
|
3414
953
|
logMiddlewareOrder(finalConfig);
|
|
3415
954
|
}
|
|
3416
|
-
|
|
3417
|
-
|
|
3418
|
-
|
|
3419
|
-
serverLogger2.info("Plugins discovered", {
|
|
3420
|
-
count: plugins.length,
|
|
3421
|
-
plugins: plugins.map((p) => p.name)
|
|
3422
|
-
});
|
|
3423
|
-
}
|
|
955
|
+
const shutdownState = {
|
|
956
|
+
isShuttingDown: false
|
|
957
|
+
};
|
|
3424
958
|
try {
|
|
3425
|
-
await initializeInfrastructure(finalConfig
|
|
3426
|
-
const app = await createServer(finalConfig
|
|
959
|
+
await initializeInfrastructure(finalConfig);
|
|
960
|
+
const app = await createServer(finalConfig);
|
|
3427
961
|
const server = startHttpServer(app, host, port);
|
|
3428
962
|
const timeouts = getTimeoutConfig(finalConfig.timeout);
|
|
3429
963
|
applyServerTimeouts(server, timeouts);
|
|
@@ -3434,236 +968,643 @@ async function startServer(config) {
|
|
|
3434
968
|
port
|
|
3435
969
|
});
|
|
3436
970
|
logServerStarted(debug, host, port, finalConfig, timeouts);
|
|
3437
|
-
const shutdownServer = createShutdownHandler(server, finalConfig,
|
|
3438
|
-
const shutdown = createGracefulShutdown(shutdownServer, finalConfig);
|
|
3439
|
-
|
|
971
|
+
const shutdownServer = createShutdownHandler(server, finalConfig, shutdownState);
|
|
972
|
+
const shutdown = createGracefulShutdown(shutdownServer, finalConfig, shutdownState);
|
|
973
|
+
registerProcessHandlers(shutdown);
|
|
3440
974
|
const serverInstance = {
|
|
3441
975
|
server,
|
|
3442
976
|
app,
|
|
3443
977
|
config: finalConfig,
|
|
3444
978
|
close: async () => {
|
|
3445
|
-
|
|
979
|
+
serverLogger.info("Manual server shutdown requested");
|
|
980
|
+
if (shutdownState.isShuttingDown) {
|
|
981
|
+
serverLogger.warn("Shutdown already in progress, ignoring manual close request");
|
|
982
|
+
return;
|
|
983
|
+
}
|
|
984
|
+
shutdownState.isShuttingDown = true;
|
|
3446
985
|
await shutdownServer();
|
|
3447
986
|
}
|
|
3448
987
|
};
|
|
3449
988
|
if (finalConfig.lifecycle?.afterStart) {
|
|
3450
|
-
|
|
989
|
+
serverLogger.debug("Executing afterStart hook...");
|
|
3451
990
|
try {
|
|
3452
991
|
await finalConfig.lifecycle.afterStart(serverInstance);
|
|
3453
992
|
} catch (error) {
|
|
3454
|
-
|
|
993
|
+
serverLogger.error("afterStart hook failed", error);
|
|
3455
994
|
}
|
|
3456
995
|
}
|
|
3457
|
-
await executePluginHooks(plugins, "afterStart", serverInstance);
|
|
3458
996
|
return serverInstance;
|
|
3459
997
|
} catch (error) {
|
|
3460
998
|
const err = error;
|
|
3461
|
-
|
|
999
|
+
serverLogger.error("Server initialization failed", err);
|
|
3462
1000
|
await cleanupOnFailure(finalConfig);
|
|
3463
1001
|
throw error;
|
|
3464
1002
|
}
|
|
3465
1003
|
}
|
|
3466
|
-
async function loadAndMergeConfig(
|
|
1004
|
+
async function loadAndMergeConfig(config2) {
|
|
3467
1005
|
const cwd = process.cwd();
|
|
3468
|
-
const configPath = join(cwd, "src", "server", "server.config.ts");
|
|
3469
|
-
const configJsPath = join(cwd, "src", "server", "server.config.js");
|
|
3470
|
-
const builtConfigMjsPath = join(cwd, ".spfn", "server", "server.config.mjs");
|
|
3471
|
-
const builtConfigPath = join(cwd, ".spfn", "server", "server.config.js");
|
|
3472
1006
|
let fileConfig = {};
|
|
3473
|
-
|
|
3474
|
-
|
|
3475
|
-
|
|
3476
|
-
|
|
3477
|
-
|
|
3478
|
-
|
|
3479
|
-
|
|
3480
|
-
|
|
3481
|
-
|
|
3482
|
-
|
|
3483
|
-
|
|
3484
|
-
|
|
1007
|
+
let loadedConfigPath = null;
|
|
1008
|
+
for (const configPath of CONFIG_FILE_PATHS) {
|
|
1009
|
+
const fullPath = join(cwd, configPath);
|
|
1010
|
+
if (existsSync(fullPath)) {
|
|
1011
|
+
try {
|
|
1012
|
+
const configModule = await import(fullPath);
|
|
1013
|
+
fileConfig = configModule.default ?? {};
|
|
1014
|
+
loadedConfigPath = configPath;
|
|
1015
|
+
break;
|
|
1016
|
+
} catch (error) {
|
|
1017
|
+
serverLogger.error(`Failed to load config from ${configPath} - file exists but import failed`, error);
|
|
1018
|
+
}
|
|
1019
|
+
}
|
|
1020
|
+
}
|
|
1021
|
+
if (loadedConfigPath) {
|
|
1022
|
+
serverLogger.debug(`Loaded configuration from ${loadedConfigPath}`);
|
|
1023
|
+
} else {
|
|
1024
|
+
serverLogger.debug("No configuration file found, using defaults");
|
|
3485
1025
|
}
|
|
3486
1026
|
return {
|
|
3487
1027
|
...fileConfig,
|
|
3488
|
-
...
|
|
3489
|
-
port:
|
|
3490
|
-
host:
|
|
1028
|
+
...config2,
|
|
1029
|
+
port: config2?.port ?? fileConfig?.port ?? env.PORT,
|
|
1030
|
+
host: config2?.host ?? fileConfig?.host ?? env.HOST
|
|
3491
1031
|
};
|
|
3492
1032
|
}
|
|
3493
|
-
function
|
|
3494
|
-
|
|
3495
|
-
|
|
3496
|
-
|
|
3497
|
-
}
|
|
1033
|
+
function getInfrastructureConfig(config2) {
|
|
1034
|
+
return {
|
|
1035
|
+
database: config2.infrastructure?.database !== false,
|
|
1036
|
+
redis: config2.infrastructure?.redis !== false
|
|
1037
|
+
};
|
|
3498
1038
|
}
|
|
3499
|
-
async function initializeInfrastructure(
|
|
3500
|
-
if (
|
|
3501
|
-
|
|
3502
|
-
|
|
3503
|
-
await config.lifecycle.beforeInfrastructure(config);
|
|
3504
|
-
} catch (error) {
|
|
3505
|
-
serverLogger2.error("beforeInfrastructure hook failed", error);
|
|
3506
|
-
throw new Error("Server initialization failed in beforeInfrastructure hook");
|
|
3507
|
-
}
|
|
1039
|
+
async function initializeInfrastructure(config2) {
|
|
1040
|
+
if (config2.lifecycle?.beforeInfrastructure) {
|
|
1041
|
+
serverLogger.debug("Executing beforeInfrastructure hook...");
|
|
1042
|
+
await config2.lifecycle.beforeInfrastructure(config2);
|
|
3508
1043
|
}
|
|
3509
|
-
const
|
|
3510
|
-
if (
|
|
3511
|
-
|
|
3512
|
-
await initDatabase(
|
|
1044
|
+
const infraConfig = getInfrastructureConfig(config2);
|
|
1045
|
+
if (infraConfig.database) {
|
|
1046
|
+
serverLogger.debug("Initializing database...");
|
|
1047
|
+
await initDatabase(config2.database);
|
|
3513
1048
|
} else {
|
|
3514
|
-
|
|
1049
|
+
serverLogger.debug("Database initialization disabled");
|
|
3515
1050
|
}
|
|
3516
|
-
|
|
3517
|
-
|
|
3518
|
-
|
|
3519
|
-
await initRedis();
|
|
1051
|
+
if (infraConfig.redis) {
|
|
1052
|
+
serverLogger.debug("Initializing Redis...");
|
|
1053
|
+
await initCache();
|
|
3520
1054
|
} else {
|
|
3521
|
-
|
|
1055
|
+
serverLogger.debug("Redis initialization disabled");
|
|
3522
1056
|
}
|
|
3523
|
-
if (
|
|
3524
|
-
|
|
3525
|
-
|
|
3526
|
-
|
|
3527
|
-
|
|
3528
|
-
|
|
3529
|
-
|
|
1057
|
+
if (config2.lifecycle?.afterInfrastructure) {
|
|
1058
|
+
serverLogger.debug("Executing afterInfrastructure hook...");
|
|
1059
|
+
await config2.lifecycle.afterInfrastructure();
|
|
1060
|
+
}
|
|
1061
|
+
if (config2.jobs) {
|
|
1062
|
+
const dbUrl = env.DATABASE_URL;
|
|
1063
|
+
if (!dbUrl) {
|
|
1064
|
+
throw new Error(
|
|
1065
|
+
"Jobs require database connection. Ensure DATABASE_URL is set or database is enabled."
|
|
1066
|
+
);
|
|
1067
|
+
}
|
|
1068
|
+
serverLogger.debug("Initializing pg-boss...");
|
|
1069
|
+
await initBoss({
|
|
1070
|
+
connectionString: dbUrl,
|
|
1071
|
+
...config2.jobsConfig
|
|
1072
|
+
});
|
|
1073
|
+
serverLogger.debug("Registering jobs...");
|
|
1074
|
+
await registerJobs(config2.jobs);
|
|
1075
|
+
}
|
|
1076
|
+
if (config2.workflows) {
|
|
1077
|
+
const infraConfig2 = getInfrastructureConfig(config2);
|
|
1078
|
+
if (!infraConfig2.database) {
|
|
1079
|
+
throw new Error(
|
|
1080
|
+
"Workflows require database connection. Ensure database is enabled in infrastructure config."
|
|
1081
|
+
);
|
|
3530
1082
|
}
|
|
1083
|
+
serverLogger.debug("Initializing workflow engine...");
|
|
1084
|
+
config2.workflows._init(
|
|
1085
|
+
getDatabase(),
|
|
1086
|
+
config2.workflowsConfig
|
|
1087
|
+
);
|
|
1088
|
+
serverLogger.info("Workflow engine initialized");
|
|
3531
1089
|
}
|
|
3532
|
-
await executePluginHooks(plugins, "afterInfrastructure");
|
|
3533
1090
|
}
|
|
3534
1091
|
function startHttpServer(app, host, port) {
|
|
3535
|
-
|
|
3536
|
-
|
|
1092
|
+
serverLogger.debug(`Starting server on ${host}:${port}...`);
|
|
1093
|
+
return serve({
|
|
3537
1094
|
fetch: app.fetch,
|
|
3538
1095
|
port,
|
|
3539
1096
|
hostname: host
|
|
3540
1097
|
});
|
|
3541
|
-
|
|
1098
|
+
}
|
|
1099
|
+
function logMiddlewareOrder(config2) {
|
|
1100
|
+
const middlewareOrder = buildMiddlewareOrder(config2);
|
|
1101
|
+
serverLogger.debug("Middleware execution order", {
|
|
1102
|
+
order: middlewareOrder
|
|
1103
|
+
});
|
|
3542
1104
|
}
|
|
3543
1105
|
function logServerTimeouts(timeouts) {
|
|
3544
|
-
|
|
1106
|
+
serverLogger.info("Server timeouts configured", {
|
|
3545
1107
|
request: `${timeouts.request}ms`,
|
|
3546
1108
|
keepAlive: `${timeouts.keepAlive}ms`,
|
|
3547
1109
|
headers: `${timeouts.headers}ms`
|
|
3548
1110
|
});
|
|
3549
1111
|
}
|
|
3550
|
-
function logServerStarted(debug, host, port,
|
|
3551
|
-
const startupConfig = buildStartupConfig(
|
|
3552
|
-
|
|
1112
|
+
function logServerStarted(debug, host, port, config2, timeouts) {
|
|
1113
|
+
const startupConfig = buildStartupConfig(config2, timeouts);
|
|
1114
|
+
serverLogger.info("Server started successfully", {
|
|
3553
1115
|
mode: debug ? "development" : "production",
|
|
3554
1116
|
host,
|
|
3555
1117
|
port,
|
|
3556
1118
|
config: startupConfig
|
|
3557
1119
|
});
|
|
3558
1120
|
}
|
|
3559
|
-
function createShutdownHandler(server,
|
|
1121
|
+
function createShutdownHandler(server, config2, shutdownState) {
|
|
3560
1122
|
return async () => {
|
|
3561
|
-
|
|
3562
|
-
|
|
3563
|
-
|
|
3564
|
-
|
|
3565
|
-
|
|
3566
|
-
|
|
1123
|
+
if (shutdownState.isShuttingDown) {
|
|
1124
|
+
serverLogger.debug("Shutdown already in progress for this instance, skipping");
|
|
1125
|
+
return;
|
|
1126
|
+
}
|
|
1127
|
+
shutdownState.isShuttingDown = true;
|
|
1128
|
+
serverLogger.debug("Closing HTTP server...");
|
|
1129
|
+
let timeoutId;
|
|
1130
|
+
await Promise.race([
|
|
1131
|
+
new Promise((resolve2, reject) => {
|
|
1132
|
+
server.close((err) => {
|
|
1133
|
+
if (timeoutId) clearTimeout(timeoutId);
|
|
1134
|
+
if (err) {
|
|
1135
|
+
serverLogger.error("HTTP server close error", err);
|
|
1136
|
+
reject(err);
|
|
1137
|
+
} else {
|
|
1138
|
+
serverLogger.info("HTTP server closed");
|
|
1139
|
+
resolve2();
|
|
1140
|
+
}
|
|
1141
|
+
});
|
|
1142
|
+
}),
|
|
1143
|
+
new Promise((_, reject) => {
|
|
1144
|
+
timeoutId = setTimeout(() => {
|
|
1145
|
+
reject(new Error(`HTTP server close timeout after ${TIMEOUTS.SERVER_CLOSE}ms`));
|
|
1146
|
+
}, TIMEOUTS.SERVER_CLOSE);
|
|
1147
|
+
})
|
|
1148
|
+
]).catch((error) => {
|
|
1149
|
+
if (timeoutId) clearTimeout(timeoutId);
|
|
1150
|
+
serverLogger.warn("HTTP server close timeout, forcing shutdown", error);
|
|
3567
1151
|
});
|
|
3568
|
-
if (
|
|
3569
|
-
|
|
1152
|
+
if (config2.jobs) {
|
|
1153
|
+
serverLogger.debug("Stopping pg-boss...");
|
|
3570
1154
|
try {
|
|
3571
|
-
await
|
|
1155
|
+
await stopBoss();
|
|
3572
1156
|
} catch (error) {
|
|
3573
|
-
|
|
1157
|
+
serverLogger.error("pg-boss stop failed", error);
|
|
3574
1158
|
}
|
|
3575
1159
|
}
|
|
3576
|
-
|
|
3577
|
-
|
|
3578
|
-
|
|
3579
|
-
|
|
1160
|
+
if (config2.lifecycle?.beforeShutdown) {
|
|
1161
|
+
serverLogger.debug("Executing beforeShutdown hook...");
|
|
1162
|
+
try {
|
|
1163
|
+
await config2.lifecycle.beforeShutdown();
|
|
1164
|
+
} catch (error) {
|
|
1165
|
+
serverLogger.error("beforeShutdown hook failed", error);
|
|
1166
|
+
}
|
|
3580
1167
|
}
|
|
3581
|
-
const
|
|
3582
|
-
|
|
3583
|
-
|
|
3584
|
-
|
|
3585
|
-
await closeDatabase();
|
|
1168
|
+
const infraConfig = getInfrastructureConfig(config2);
|
|
1169
|
+
if (infraConfig.database) {
|
|
1170
|
+
serverLogger.debug("Closing database connections...");
|
|
1171
|
+
await closeInfrastructure(closeDatabase, "Database", TIMEOUTS.DATABASE_CLOSE);
|
|
3586
1172
|
}
|
|
3587
|
-
if (
|
|
3588
|
-
|
|
3589
|
-
await
|
|
1173
|
+
if (infraConfig.redis) {
|
|
1174
|
+
serverLogger.debug("Closing Redis connections...");
|
|
1175
|
+
await closeInfrastructure(closeCache, "Redis", TIMEOUTS.REDIS_CLOSE);
|
|
3590
1176
|
}
|
|
3591
|
-
|
|
1177
|
+
serverLogger.info("Server shutdown completed");
|
|
3592
1178
|
};
|
|
3593
1179
|
}
|
|
3594
|
-
function
|
|
1180
|
+
async function closeInfrastructure(closeFn, name, timeout) {
|
|
1181
|
+
let timeoutId;
|
|
1182
|
+
try {
|
|
1183
|
+
await Promise.race([
|
|
1184
|
+
closeFn().then(() => {
|
|
1185
|
+
if (timeoutId) clearTimeout(timeoutId);
|
|
1186
|
+
}),
|
|
1187
|
+
new Promise((_, reject) => {
|
|
1188
|
+
timeoutId = setTimeout(() => {
|
|
1189
|
+
reject(new Error(`${name} close timeout after ${timeout}ms`));
|
|
1190
|
+
}, timeout);
|
|
1191
|
+
})
|
|
1192
|
+
]);
|
|
1193
|
+
serverLogger.info(`${name} connections closed successfully`);
|
|
1194
|
+
} catch (error) {
|
|
1195
|
+
if (timeoutId) clearTimeout(timeoutId);
|
|
1196
|
+
serverLogger.error(`${name} close failed or timed out`, error);
|
|
1197
|
+
}
|
|
1198
|
+
}
|
|
1199
|
+
function createGracefulShutdown(shutdownServer, config2, shutdownState) {
|
|
3595
1200
|
return async (signal) => {
|
|
3596
|
-
|
|
3597
|
-
|
|
3598
|
-
|
|
3599
|
-
|
|
3600
|
-
|
|
3601
|
-
|
|
3602
|
-
|
|
1201
|
+
if (shutdownState.isShuttingDown) {
|
|
1202
|
+
serverLogger.warn(`${signal} received but shutdown already in progress, ignoring`);
|
|
1203
|
+
return;
|
|
1204
|
+
}
|
|
1205
|
+
serverLogger.info(`${signal} received, starting graceful shutdown...`);
|
|
1206
|
+
const shutdownTimeout = getShutdownTimeout(config2.shutdown);
|
|
1207
|
+
let timeoutId;
|
|
3603
1208
|
try {
|
|
3604
1209
|
await Promise.race([
|
|
3605
|
-
shutdownServer()
|
|
3606
|
-
|
|
1210
|
+
shutdownServer().then(() => {
|
|
1211
|
+
if (timeoutId) clearTimeout(timeoutId);
|
|
1212
|
+
}),
|
|
1213
|
+
new Promise((_, reject) => {
|
|
1214
|
+
timeoutId = setTimeout(() => {
|
|
1215
|
+
reject(new Error(`Graceful shutdown timeout after ${shutdownTimeout}ms`));
|
|
1216
|
+
}, shutdownTimeout);
|
|
1217
|
+
})
|
|
3607
1218
|
]);
|
|
3608
|
-
|
|
1219
|
+
if (timeoutId) clearTimeout(timeoutId);
|
|
1220
|
+
serverLogger.info("Graceful shutdown completed successfully");
|
|
3609
1221
|
process.exit(0);
|
|
3610
1222
|
} catch (error) {
|
|
1223
|
+
if (timeoutId) clearTimeout(timeoutId);
|
|
3611
1224
|
const err = error;
|
|
3612
1225
|
if (err.message && err.message.includes("timeout")) {
|
|
3613
|
-
|
|
1226
|
+
serverLogger.error("Graceful shutdown timeout, forcing exit", err);
|
|
3614
1227
|
} else {
|
|
3615
|
-
|
|
1228
|
+
serverLogger.error("Error during graceful shutdown", err);
|
|
3616
1229
|
}
|
|
3617
1230
|
process.exit(1);
|
|
3618
1231
|
}
|
|
3619
1232
|
};
|
|
3620
1233
|
}
|
|
3621
|
-
function
|
|
3622
|
-
|
|
3623
|
-
|
|
3624
|
-
process.
|
|
1234
|
+
function handleProcessError(errorType, shutdown) {
|
|
1235
|
+
const isProduction = env.NODE_ENV === "production";
|
|
1236
|
+
const isDevelopment = env.NODE_ENV === "development";
|
|
1237
|
+
if (isDevelopment || process.env.WATCH_MODE === "true") {
|
|
1238
|
+
serverLogger.info("Exiting immediately for clean restart");
|
|
1239
|
+
process.exit(1);
|
|
1240
|
+
} else if (isProduction) {
|
|
1241
|
+
serverLogger.info(`Attempting graceful shutdown after ${errorType}`);
|
|
1242
|
+
const forceExitTimer = setTimeout(() => {
|
|
1243
|
+
serverLogger.error(`Forced exit after ${TIMEOUTS.PRODUCTION_ERROR_SHUTDOWN}ms - graceful shutdown did not complete`);
|
|
1244
|
+
process.exit(1);
|
|
1245
|
+
}, TIMEOUTS.PRODUCTION_ERROR_SHUTDOWN);
|
|
1246
|
+
shutdown(errorType).then(() => {
|
|
1247
|
+
clearTimeout(forceExitTimer);
|
|
1248
|
+
serverLogger.info("Graceful shutdown completed, exiting");
|
|
1249
|
+
process.exit(0);
|
|
1250
|
+
}).catch((shutdownError) => {
|
|
1251
|
+
clearTimeout(forceExitTimer);
|
|
1252
|
+
serverLogger.error("Graceful shutdown failed", shutdownError);
|
|
1253
|
+
process.exit(1);
|
|
1254
|
+
});
|
|
1255
|
+
} else {
|
|
1256
|
+
serverLogger.info("Exiting immediately");
|
|
1257
|
+
process.exit(1);
|
|
1258
|
+
}
|
|
1259
|
+
}
|
|
1260
|
+
function registerProcessHandlers(shutdown) {
|
|
1261
|
+
if (processHandlersRegistered) {
|
|
1262
|
+
serverLogger.debug("Process handlers already registered, skipping");
|
|
1263
|
+
return;
|
|
1264
|
+
}
|
|
1265
|
+
processHandlersRegistered = true;
|
|
1266
|
+
const currentMax = process.getMaxListeners();
|
|
1267
|
+
if (currentMax < DEFAULT_MAX_LISTENERS) {
|
|
1268
|
+
process.setMaxListeners(DEFAULT_MAX_LISTENERS);
|
|
1269
|
+
}
|
|
1270
|
+
process.on("SIGTERM", () => {
|
|
1271
|
+
shutdown("SIGTERM").catch((error) => {
|
|
1272
|
+
serverLogger.error("SIGTERM handler failed", error);
|
|
1273
|
+
process.exit(1);
|
|
1274
|
+
});
|
|
1275
|
+
});
|
|
1276
|
+
process.on("SIGINT", () => {
|
|
1277
|
+
shutdown("SIGINT").catch((error) => {
|
|
1278
|
+
serverLogger.error("SIGINT handler failed", error);
|
|
1279
|
+
process.exit(1);
|
|
1280
|
+
});
|
|
1281
|
+
});
|
|
3625
1282
|
process.on("uncaughtException", (error) => {
|
|
3626
1283
|
if (error.message?.includes("EADDRINUSE")) {
|
|
3627
|
-
|
|
3628
|
-
error: error.message,
|
|
3629
|
-
stack: error.stack,
|
|
1284
|
+
serverLogger.error("Port conflict detected - detailed trace:", error, {
|
|
3630
1285
|
code: error.code,
|
|
3631
1286
|
port: error.port,
|
|
3632
1287
|
address: error.address,
|
|
3633
1288
|
syscall: error.syscall
|
|
3634
1289
|
});
|
|
3635
1290
|
} else {
|
|
3636
|
-
|
|
1291
|
+
serverLogger.error("Uncaught exception", error);
|
|
3637
1292
|
}
|
|
3638
|
-
|
|
3639
|
-
process.exit(1);
|
|
1293
|
+
handleProcessError("UNCAUGHT_EXCEPTION", shutdown);
|
|
3640
1294
|
});
|
|
3641
1295
|
process.on("unhandledRejection", (reason, promise) => {
|
|
3642
|
-
|
|
3643
|
-
|
|
3644
|
-
|
|
3645
|
-
|
|
3646
|
-
|
|
3647
|
-
|
|
1296
|
+
if (reason instanceof Error) {
|
|
1297
|
+
Promise.resolve().then(() => (init_formatters(), formatters_exports)).then(({ formatUnhandledRejection: formatUnhandledRejection2 }) => {
|
|
1298
|
+
const { error, context } = formatUnhandledRejection2(reason, promise);
|
|
1299
|
+
serverLogger.error("Unhandled promise rejection", error, context);
|
|
1300
|
+
}).catch(() => {
|
|
1301
|
+
serverLogger.error("Unhandled promise rejection", reason, {
|
|
1302
|
+
promise
|
|
1303
|
+
});
|
|
1304
|
+
});
|
|
1305
|
+
} else {
|
|
1306
|
+
serverLogger.error("Unhandled promise rejection", {
|
|
1307
|
+
reason,
|
|
1308
|
+
promise
|
|
1309
|
+
});
|
|
1310
|
+
}
|
|
1311
|
+
handleProcessError("UNHANDLED_REJECTION", shutdown);
|
|
3648
1312
|
});
|
|
1313
|
+
serverLogger.debug("Process-level shutdown handlers registered successfully");
|
|
3649
1314
|
}
|
|
3650
|
-
async function cleanupOnFailure(
|
|
1315
|
+
async function cleanupOnFailure(config2) {
|
|
3651
1316
|
try {
|
|
3652
|
-
|
|
3653
|
-
const
|
|
3654
|
-
|
|
3655
|
-
|
|
3656
|
-
await closeDatabase();
|
|
1317
|
+
serverLogger.debug("Cleaning up after initialization failure...");
|
|
1318
|
+
const infraConfig = getInfrastructureConfig(config2);
|
|
1319
|
+
if (infraConfig.database) {
|
|
1320
|
+
await closeInfrastructure(closeDatabase, "Database", TIMEOUTS.DATABASE_CLOSE);
|
|
3657
1321
|
}
|
|
3658
|
-
if (
|
|
3659
|
-
await
|
|
1322
|
+
if (infraConfig.redis) {
|
|
1323
|
+
await closeInfrastructure(closeCache, "Redis", TIMEOUTS.REDIS_CLOSE);
|
|
3660
1324
|
}
|
|
3661
|
-
|
|
1325
|
+
serverLogger.debug("Cleanup completed");
|
|
3662
1326
|
} catch (cleanupError) {
|
|
3663
|
-
|
|
1327
|
+
serverLogger.error("Cleanup failed", cleanupError);
|
|
1328
|
+
}
|
|
1329
|
+
}
|
|
1330
|
+
|
|
1331
|
+
// src/server/config-builder.ts
|
|
1332
|
+
function collectHooks(lifecycles, key) {
|
|
1333
|
+
return lifecycles.map((lc) => lc[key]).filter((hook) => hook !== void 0);
|
|
1334
|
+
}
|
|
1335
|
+
function createMergedHook(hooks) {
|
|
1336
|
+
if (hooks.length === 0) {
|
|
1337
|
+
return void 0;
|
|
1338
|
+
}
|
|
1339
|
+
return (async (...args) => {
|
|
1340
|
+
for (const hook of hooks) {
|
|
1341
|
+
await hook(...args);
|
|
1342
|
+
}
|
|
1343
|
+
});
|
|
1344
|
+
}
|
|
1345
|
+
var ServerConfigBuilder = class {
|
|
1346
|
+
config = {};
|
|
1347
|
+
lifecycles = [];
|
|
1348
|
+
/**
|
|
1349
|
+
* Set server port
|
|
1350
|
+
*/
|
|
1351
|
+
port(port) {
|
|
1352
|
+
this.config.port = port;
|
|
1353
|
+
return this;
|
|
1354
|
+
}
|
|
1355
|
+
/**
|
|
1356
|
+
* Set server hostname
|
|
1357
|
+
*/
|
|
1358
|
+
host(host) {
|
|
1359
|
+
this.config.host = host;
|
|
1360
|
+
return this;
|
|
1361
|
+
}
|
|
1362
|
+
/**
|
|
1363
|
+
* Set CORS configuration
|
|
1364
|
+
*/
|
|
1365
|
+
cors(cors2) {
|
|
1366
|
+
this.config.cors = cors2;
|
|
1367
|
+
return this;
|
|
1368
|
+
}
|
|
1369
|
+
/**
|
|
1370
|
+
* Configure built-in middleware
|
|
1371
|
+
*/
|
|
1372
|
+
middleware(middleware) {
|
|
1373
|
+
this.config.middleware = middleware;
|
|
1374
|
+
return this;
|
|
1375
|
+
}
|
|
1376
|
+
/**
|
|
1377
|
+
* Add custom middleware
|
|
1378
|
+
*/
|
|
1379
|
+
use(handlers) {
|
|
1380
|
+
this.config.use = handlers;
|
|
1381
|
+
return this;
|
|
1382
|
+
}
|
|
1383
|
+
/**
|
|
1384
|
+
* Add named middlewares for route-level skip control
|
|
1385
|
+
*/
|
|
1386
|
+
middlewares(middlewares) {
|
|
1387
|
+
this.config.middlewares = middlewares;
|
|
1388
|
+
return this;
|
|
1389
|
+
}
|
|
1390
|
+
/**
|
|
1391
|
+
* Register define-route based router
|
|
1392
|
+
*
|
|
1393
|
+
* Automatically applies:
|
|
1394
|
+
* - Global middlewares from router._globalMiddlewares (via .use())
|
|
1395
|
+
* - Package routers from router._packageRouters (via .packages())
|
|
1396
|
+
*
|
|
1397
|
+
* @example
|
|
1398
|
+
* ```typescript
|
|
1399
|
+
* const appRouter = defineRouter({
|
|
1400
|
+
* getUser: route.get('/users/:id')...
|
|
1401
|
+
* })
|
|
1402
|
+
* .packages([authRouter, cmsAppRouter])
|
|
1403
|
+
* .use([authMiddleware]);
|
|
1404
|
+
*
|
|
1405
|
+
* export default defineServerConfig()
|
|
1406
|
+
* .routes(appRouter) // middlewares auto-applied
|
|
1407
|
+
* .build();
|
|
1408
|
+
* ```
|
|
1409
|
+
*/
|
|
1410
|
+
routes(router) {
|
|
1411
|
+
this.config.routes = router;
|
|
1412
|
+
const allGlobalMiddlewares = [];
|
|
1413
|
+
if (router._globalMiddlewares?.length > 0) {
|
|
1414
|
+
allGlobalMiddlewares.push(...router._globalMiddlewares);
|
|
1415
|
+
}
|
|
1416
|
+
if (router._packageRouters?.length > 0) {
|
|
1417
|
+
for (const pkgRouter of router._packageRouters) {
|
|
1418
|
+
if (pkgRouter._globalMiddlewares?.length > 0) {
|
|
1419
|
+
allGlobalMiddlewares.push(...pkgRouter._globalMiddlewares);
|
|
1420
|
+
}
|
|
1421
|
+
}
|
|
1422
|
+
}
|
|
1423
|
+
if (allGlobalMiddlewares.length > 0) {
|
|
1424
|
+
this.config.middlewares = [
|
|
1425
|
+
...this.config.middlewares || [],
|
|
1426
|
+
...allGlobalMiddlewares
|
|
1427
|
+
];
|
|
1428
|
+
}
|
|
1429
|
+
return this;
|
|
1430
|
+
}
|
|
1431
|
+
/**
|
|
1432
|
+
* Register background jobs router
|
|
1433
|
+
*
|
|
1434
|
+
* @example
|
|
1435
|
+
* ```typescript
|
|
1436
|
+
* import { job, defineJobRouter } from '@spfn/core/job';
|
|
1437
|
+
*
|
|
1438
|
+
* const sendEmail = job('send-email')
|
|
1439
|
+
* .input(Type.Object({ to: Type.String() }))
|
|
1440
|
+
* .handler(async (input) => { ... });
|
|
1441
|
+
*
|
|
1442
|
+
* const jobRouter = defineJobRouter({ sendEmail });
|
|
1443
|
+
*
|
|
1444
|
+
* export default defineServerConfig()
|
|
1445
|
+
* .routes(appRouter)
|
|
1446
|
+
* .jobs(jobRouter)
|
|
1447
|
+
* .build();
|
|
1448
|
+
* ```
|
|
1449
|
+
*/
|
|
1450
|
+
jobs(router, config2) {
|
|
1451
|
+
this.config.jobs = router;
|
|
1452
|
+
if (config2) {
|
|
1453
|
+
this.config.jobsConfig = config2;
|
|
1454
|
+
}
|
|
1455
|
+
return this;
|
|
1456
|
+
}
|
|
1457
|
+
/**
|
|
1458
|
+
* Register event router for SSE (Server-Sent Events)
|
|
1459
|
+
*
|
|
1460
|
+
* Enables real-time event streaming to frontend clients.
|
|
1461
|
+
* Events defined with defineEvent() can be subscribed by:
|
|
1462
|
+
* - Backend: .subscribe() for internal handlers
|
|
1463
|
+
* - Jobs: .on(event) for background processing
|
|
1464
|
+
* - Frontend: SSE stream for real-time updates
|
|
1465
|
+
*
|
|
1466
|
+
* @example
|
|
1467
|
+
* ```typescript
|
|
1468
|
+
* import { defineEvent, defineEventRouter } from '@spfn/core/event';
|
|
1469
|
+
*
|
|
1470
|
+
* const userCreated = defineEvent('user.created', Type.Object({
|
|
1471
|
+
* userId: Type.String(),
|
|
1472
|
+
* }));
|
|
1473
|
+
*
|
|
1474
|
+
* const eventRouter = defineEventRouter({ userCreated });
|
|
1475
|
+
*
|
|
1476
|
+
* export default defineServerConfig()
|
|
1477
|
+
* .routes(appRouter)
|
|
1478
|
+
* .events(eventRouter) // → GET /events/stream
|
|
1479
|
+
* .build();
|
|
1480
|
+
*
|
|
1481
|
+
* // Custom path
|
|
1482
|
+
* .events(eventRouter, { path: '/sse' })
|
|
1483
|
+
* ```
|
|
1484
|
+
*/
|
|
1485
|
+
events(router, config2) {
|
|
1486
|
+
this.config.events = router;
|
|
1487
|
+
if (config2) {
|
|
1488
|
+
this.config.eventsConfig = config2;
|
|
1489
|
+
}
|
|
1490
|
+
return this;
|
|
1491
|
+
}
|
|
1492
|
+
/**
|
|
1493
|
+
* Enable/disable debug mode
|
|
1494
|
+
*/
|
|
1495
|
+
debug(enabled) {
|
|
1496
|
+
this.config.debug = enabled;
|
|
1497
|
+
return this;
|
|
1498
|
+
}
|
|
1499
|
+
/**
|
|
1500
|
+
* Configure database settings
|
|
1501
|
+
*/
|
|
1502
|
+
database(database) {
|
|
1503
|
+
this.config.database = database;
|
|
1504
|
+
return this;
|
|
3664
1505
|
}
|
|
1506
|
+
/**
|
|
1507
|
+
* Configure server timeout settings
|
|
1508
|
+
*/
|
|
1509
|
+
timeout(timeout) {
|
|
1510
|
+
this.config.timeout = timeout;
|
|
1511
|
+
return this;
|
|
1512
|
+
}
|
|
1513
|
+
/**
|
|
1514
|
+
* Configure graceful shutdown settings
|
|
1515
|
+
*/
|
|
1516
|
+
shutdown(shutdown) {
|
|
1517
|
+
this.config.shutdown = shutdown;
|
|
1518
|
+
return this;
|
|
1519
|
+
}
|
|
1520
|
+
/**
|
|
1521
|
+
* Configure health check endpoint
|
|
1522
|
+
*/
|
|
1523
|
+
healthCheck(healthCheck) {
|
|
1524
|
+
this.config.healthCheck = healthCheck;
|
|
1525
|
+
return this;
|
|
1526
|
+
}
|
|
1527
|
+
/**
|
|
1528
|
+
* Configure infrastructure initialization
|
|
1529
|
+
*/
|
|
1530
|
+
infrastructure(infrastructure) {
|
|
1531
|
+
this.config.infrastructure = infrastructure;
|
|
1532
|
+
return this;
|
|
1533
|
+
}
|
|
1534
|
+
/**
|
|
1535
|
+
* Register workflow router for workflow orchestration
|
|
1536
|
+
*
|
|
1537
|
+
* Automatically initializes the workflow engine after database is ready.
|
|
1538
|
+
*
|
|
1539
|
+
* @example
|
|
1540
|
+
* ```typescript
|
|
1541
|
+
* import { defineWorkflowRouter } from '@spfn/workflow';
|
|
1542
|
+
*
|
|
1543
|
+
* const workflowRouter = defineWorkflowRouter([
|
|
1544
|
+
* provisionTenant,
|
|
1545
|
+
* deprovisionTenant,
|
|
1546
|
+
* ]);
|
|
1547
|
+
*
|
|
1548
|
+
* export default defineServerConfig()
|
|
1549
|
+
* .routes(appRouter)
|
|
1550
|
+
* .workflows(workflowRouter)
|
|
1551
|
+
* .build();
|
|
1552
|
+
* ```
|
|
1553
|
+
*/
|
|
1554
|
+
workflows(router, config2) {
|
|
1555
|
+
this.config.workflows = router;
|
|
1556
|
+
if (config2) {
|
|
1557
|
+
this.config.workflowsConfig = config2;
|
|
1558
|
+
}
|
|
1559
|
+
return this;
|
|
1560
|
+
}
|
|
1561
|
+
/**
|
|
1562
|
+
* Configure lifecycle hooks
|
|
1563
|
+
* Can be called multiple times - hooks will be executed in registration order
|
|
1564
|
+
*/
|
|
1565
|
+
lifecycle(lifecycle) {
|
|
1566
|
+
if (lifecycle) {
|
|
1567
|
+
this.lifecycles.push(lifecycle);
|
|
1568
|
+
}
|
|
1569
|
+
return this;
|
|
1570
|
+
}
|
|
1571
|
+
/**
|
|
1572
|
+
* Build and return the final configuration
|
|
1573
|
+
*/
|
|
1574
|
+
build() {
|
|
1575
|
+
if (this.lifecycles.length > 0) {
|
|
1576
|
+
serverLogger.info("Merging lifecycles", { count: this.lifecycles.length });
|
|
1577
|
+
this.config.lifecycle = this.mergeLifecycles();
|
|
1578
|
+
}
|
|
1579
|
+
return this.config;
|
|
1580
|
+
}
|
|
1581
|
+
mergeLifecycles() {
|
|
1582
|
+
return {
|
|
1583
|
+
beforeInfrastructure: createMergedHook(
|
|
1584
|
+
collectHooks(this.lifecycles, "beforeInfrastructure")
|
|
1585
|
+
),
|
|
1586
|
+
afterInfrastructure: createMergedHook(
|
|
1587
|
+
collectHooks(this.lifecycles, "afterInfrastructure")
|
|
1588
|
+
),
|
|
1589
|
+
beforeRoutes: createMergedHook(
|
|
1590
|
+
collectHooks(this.lifecycles, "beforeRoutes")
|
|
1591
|
+
),
|
|
1592
|
+
afterRoutes: createMergedHook(
|
|
1593
|
+
collectHooks(this.lifecycles, "afterRoutes")
|
|
1594
|
+
),
|
|
1595
|
+
afterStart: createMergedHook(
|
|
1596
|
+
collectHooks(this.lifecycles, "afterStart")
|
|
1597
|
+
),
|
|
1598
|
+
beforeShutdown: createMergedHook(
|
|
1599
|
+
collectHooks(this.lifecycles, "beforeShutdown")
|
|
1600
|
+
)
|
|
1601
|
+
};
|
|
1602
|
+
}
|
|
1603
|
+
};
|
|
1604
|
+
function defineServerConfig() {
|
|
1605
|
+
return new ServerConfigBuilder();
|
|
3665
1606
|
}
|
|
3666
1607
|
|
|
3667
|
-
export { createServer, startServer };
|
|
1608
|
+
export { createServer, defineServerConfig, loadEnvFiles, startServer };
|
|
3668
1609
|
//# sourceMappingURL=index.js.map
|
|
3669
1610
|
//# sourceMappingURL=index.js.map
|