@spfn/core 0.1.0-alpha.88 → 0.2.0-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1046 -384
- package/dist/boss-D-fGtVgM.d.ts +187 -0
- package/dist/cache/index.d.ts +13 -33
- package/dist/cache/index.js +14 -703
- package/dist/cache/index.js.map +1 -1
- package/dist/codegen/index.d.ts +167 -17
- package/dist/codegen/index.js +76 -1419
- package/dist/codegen/index.js.map +1 -1
- package/dist/config/index.d.ts +1191 -0
- package/dist/config/index.js +264 -0
- package/dist/config/index.js.map +1 -0
- package/dist/db/index.d.ts +728 -59
- package/dist/db/index.js +1028 -1225
- package/dist/db/index.js.map +1 -1
- package/dist/env/index.d.ts +579 -308
- package/dist/env/index.js +438 -930
- package/dist/env/index.js.map +1 -1
- package/dist/errors/index.d.ts +417 -29
- package/dist/errors/index.js +359 -98
- package/dist/errors/index.js.map +1 -1
- package/dist/event/index.d.ts +108 -0
- package/dist/event/index.js +122 -0
- package/dist/event/index.js.map +1 -0
- package/dist/job/index.d.ts +172 -0
- package/dist/job/index.js +361 -0
- package/dist/job/index.js.map +1 -0
- package/dist/logger/index.d.ts +20 -79
- package/dist/logger/index.js +82 -387
- package/dist/logger/index.js.map +1 -1
- package/dist/middleware/index.d.ts +2 -11
- package/dist/middleware/index.js +49 -703
- package/dist/middleware/index.js.map +1 -1
- package/dist/nextjs/index.d.ts +120 -0
- package/dist/nextjs/index.js +416 -0
- package/dist/nextjs/index.js.map +1 -0
- package/dist/{client/nextjs/index.d.ts → nextjs/server.d.ts} +288 -262
- package/dist/nextjs/server.js +568 -0
- package/dist/nextjs/server.js.map +1 -0
- package/dist/route/index.d.ts +686 -25
- package/dist/route/index.js +440 -1287
- package/dist/route/index.js.map +1 -1
- package/dist/route/types.d.ts +38 -0
- package/dist/route/types.js +3 -0
- package/dist/route/types.js.map +1 -0
- package/dist/server/index.d.ts +201 -67
- package/dist/server/index.js +921 -3182
- package/dist/server/index.js.map +1 -1
- package/dist/types-BGl4QL1w.d.ts +77 -0
- package/dist/types-DRG2XMTR.d.ts +157 -0
- package/package.json +52 -47
- package/dist/auto-loader-JFaZ9gON.d.ts +0 -80
- package/dist/client/index.d.ts +0 -358
- package/dist/client/index.js +0 -357
- package/dist/client/index.js.map +0 -1
- package/dist/client/nextjs/index.js +0 -371
- package/dist/client/nextjs/index.js.map +0 -1
- package/dist/codegen/generators/index.d.ts +0 -19
- package/dist/codegen/generators/index.js +0 -1404
- package/dist/codegen/generators/index.js.map +0 -1
- package/dist/database-errors-BNNmLTJE.d.ts +0 -86
- package/dist/events/index.d.ts +0 -183
- package/dist/events/index.js +0 -77
- package/dist/events/index.js.map +0 -1
- package/dist/index-DHiAqhKv.d.ts +0 -101
- package/dist/index.d.ts +0 -8
- package/dist/index.js +0 -3674
- package/dist/index.js.map +0 -1
- package/dist/types/index.d.ts +0 -121
- package/dist/types/index.js +0 -38
- package/dist/types/index.js.map +0 -1
- package/dist/types-BXibIEyj.d.ts +0 -60
package/dist/index.js
DELETED
|
@@ -1,3674 +0,0 @@
|
|
|
1
|
-
import { readFileSync, existsSync, readdirSync, statSync, mkdirSync, accessSync, constants, writeFileSync, unlinkSync, createWriteStream, renameSync } from 'fs';
|
|
2
|
-
import { join, dirname, relative, basename } from 'path';
|
|
3
|
-
import { config } from 'dotenv';
|
|
4
|
-
import postgres from 'postgres';
|
|
5
|
-
import { drizzle } from 'drizzle-orm/postgres-js';
|
|
6
|
-
import { timestamp, bigserial, pgSchema } from 'drizzle-orm/pg-core';
|
|
7
|
-
import { AsyncLocalStorage } from 'async_hooks';
|
|
8
|
-
import { randomUUID, randomBytes } from 'crypto';
|
|
9
|
-
import { createMiddleware } from 'hono/factory';
|
|
10
|
-
import { eq, and } from 'drizzle-orm';
|
|
11
|
-
import { Hono } from 'hono';
|
|
12
|
-
import { cors } from 'hono/cors';
|
|
13
|
-
import { readdir, stat } from 'fs/promises';
|
|
14
|
-
import { serve } from '@hono/node-server';
|
|
15
|
-
import { networkInterfaces } from 'os';
|
|
16
|
-
|
|
17
|
-
var __defProp = Object.defineProperty;
|
|
18
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
19
|
-
var __esm = (fn, res) => function __init() {
|
|
20
|
-
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
|
|
21
|
-
};
|
|
22
|
-
var __export = (target, all) => {
|
|
23
|
-
for (var name in all)
|
|
24
|
-
__defProp(target, name, { get: all[name], enumerable: true });
|
|
25
|
-
};
|
|
26
|
-
|
|
27
|
-
// src/logger/types.ts
|
|
28
|
-
var LOG_LEVEL_PRIORITY;
|
|
29
|
-
var init_types = __esm({
|
|
30
|
-
"src/logger/types.ts"() {
|
|
31
|
-
LOG_LEVEL_PRIORITY = {
|
|
32
|
-
debug: 0,
|
|
33
|
-
info: 1,
|
|
34
|
-
warn: 2,
|
|
35
|
-
error: 3,
|
|
36
|
-
fatal: 4
|
|
37
|
-
};
|
|
38
|
-
}
|
|
39
|
-
});
|
|
40
|
-
|
|
41
|
-
// src/logger/formatters.ts
|
|
42
|
-
function isSensitiveKey(key) {
|
|
43
|
-
const lowerKey = key.toLowerCase();
|
|
44
|
-
return SENSITIVE_KEYS.some((sensitive) => lowerKey.includes(sensitive));
|
|
45
|
-
}
|
|
46
|
-
function maskSensitiveData(data) {
|
|
47
|
-
if (data === null || data === void 0) {
|
|
48
|
-
return data;
|
|
49
|
-
}
|
|
50
|
-
if (Array.isArray(data)) {
|
|
51
|
-
return data.map((item) => maskSensitiveData(item));
|
|
52
|
-
}
|
|
53
|
-
if (typeof data === "object") {
|
|
54
|
-
const masked = {};
|
|
55
|
-
for (const [key, value] of Object.entries(data)) {
|
|
56
|
-
if (isSensitiveKey(key)) {
|
|
57
|
-
masked[key] = MASKED_VALUE;
|
|
58
|
-
} else if (typeof value === "object" && value !== null) {
|
|
59
|
-
masked[key] = maskSensitiveData(value);
|
|
60
|
-
} else {
|
|
61
|
-
masked[key] = value;
|
|
62
|
-
}
|
|
63
|
-
}
|
|
64
|
-
return masked;
|
|
65
|
-
}
|
|
66
|
-
return data;
|
|
67
|
-
}
|
|
68
|
-
function formatTimestamp(date) {
|
|
69
|
-
return date.toISOString();
|
|
70
|
-
}
|
|
71
|
-
function formatTimestampHuman(date) {
|
|
72
|
-
const year = date.getFullYear();
|
|
73
|
-
const month = String(date.getMonth() + 1).padStart(2, "0");
|
|
74
|
-
const day = String(date.getDate()).padStart(2, "0");
|
|
75
|
-
const hours = String(date.getHours()).padStart(2, "0");
|
|
76
|
-
const minutes = String(date.getMinutes()).padStart(2, "0");
|
|
77
|
-
const seconds = String(date.getSeconds()).padStart(2, "0");
|
|
78
|
-
const ms = String(date.getMilliseconds()).padStart(3, "0");
|
|
79
|
-
return `${year}-${month}-${day} ${hours}:${minutes}:${seconds}.${ms}`;
|
|
80
|
-
}
|
|
81
|
-
function formatError(error) {
|
|
82
|
-
const lines = [];
|
|
83
|
-
lines.push(`${error.name}: ${error.message}`);
|
|
84
|
-
if (error.stack) {
|
|
85
|
-
const stackLines = error.stack.split("\n").slice(1);
|
|
86
|
-
lines.push(...stackLines);
|
|
87
|
-
}
|
|
88
|
-
return lines.join("\n");
|
|
89
|
-
}
|
|
90
|
-
function formatConsole(metadata, colorize = true) {
|
|
91
|
-
const parts = [];
|
|
92
|
-
const timestamp2 = formatTimestampHuman(metadata.timestamp);
|
|
93
|
-
if (colorize) {
|
|
94
|
-
parts.push(`${COLORS.gray}[${timestamp2}]${COLORS.reset}`);
|
|
95
|
-
} else {
|
|
96
|
-
parts.push(`[${timestamp2}]`);
|
|
97
|
-
}
|
|
98
|
-
if (metadata.module) {
|
|
99
|
-
if (colorize) {
|
|
100
|
-
parts.push(`${COLORS.dim}[module=${metadata.module}]${COLORS.reset}`);
|
|
101
|
-
} else {
|
|
102
|
-
parts.push(`[module=${metadata.module}]`);
|
|
103
|
-
}
|
|
104
|
-
}
|
|
105
|
-
if (metadata.context && Object.keys(metadata.context).length > 0) {
|
|
106
|
-
Object.entries(metadata.context).forEach(([key, value]) => {
|
|
107
|
-
let valueStr;
|
|
108
|
-
if (typeof value === "string") {
|
|
109
|
-
valueStr = value;
|
|
110
|
-
} else if (typeof value === "object" && value !== null) {
|
|
111
|
-
try {
|
|
112
|
-
valueStr = JSON.stringify(value);
|
|
113
|
-
} catch (error) {
|
|
114
|
-
valueStr = "[circular]";
|
|
115
|
-
}
|
|
116
|
-
} else {
|
|
117
|
-
valueStr = String(value);
|
|
118
|
-
}
|
|
119
|
-
if (colorize) {
|
|
120
|
-
parts.push(`${COLORS.dim}[${key}=${valueStr}]${COLORS.reset}`);
|
|
121
|
-
} else {
|
|
122
|
-
parts.push(`[${key}=${valueStr}]`);
|
|
123
|
-
}
|
|
124
|
-
});
|
|
125
|
-
}
|
|
126
|
-
const levelStr = metadata.level.toUpperCase();
|
|
127
|
-
if (colorize) {
|
|
128
|
-
const color = COLORS[metadata.level];
|
|
129
|
-
parts.push(`${color}(${levelStr})${COLORS.reset}:`);
|
|
130
|
-
} else {
|
|
131
|
-
parts.push(`(${levelStr}):`);
|
|
132
|
-
}
|
|
133
|
-
if (colorize) {
|
|
134
|
-
parts.push(`${COLORS.bright}${metadata.message}${COLORS.reset}`);
|
|
135
|
-
} else {
|
|
136
|
-
parts.push(metadata.message);
|
|
137
|
-
}
|
|
138
|
-
let output = parts.join(" ");
|
|
139
|
-
if (metadata.error) {
|
|
140
|
-
output += "\n" + formatError(metadata.error);
|
|
141
|
-
}
|
|
142
|
-
return output;
|
|
143
|
-
}
|
|
144
|
-
function formatJSON(metadata) {
|
|
145
|
-
const obj = {
|
|
146
|
-
timestamp: formatTimestamp(metadata.timestamp),
|
|
147
|
-
level: metadata.level,
|
|
148
|
-
message: metadata.message
|
|
149
|
-
};
|
|
150
|
-
if (metadata.module) {
|
|
151
|
-
obj.module = metadata.module;
|
|
152
|
-
}
|
|
153
|
-
if (metadata.context) {
|
|
154
|
-
obj.context = metadata.context;
|
|
155
|
-
}
|
|
156
|
-
if (metadata.error) {
|
|
157
|
-
obj.error = {
|
|
158
|
-
name: metadata.error.name,
|
|
159
|
-
message: metadata.error.message,
|
|
160
|
-
stack: metadata.error.stack
|
|
161
|
-
};
|
|
162
|
-
}
|
|
163
|
-
return JSON.stringify(obj);
|
|
164
|
-
}
|
|
165
|
-
var SENSITIVE_KEYS, MASKED_VALUE, COLORS;
|
|
166
|
-
var init_formatters = __esm({
|
|
167
|
-
"src/logger/formatters.ts"() {
|
|
168
|
-
SENSITIVE_KEYS = [
|
|
169
|
-
"password",
|
|
170
|
-
"passwd",
|
|
171
|
-
"pwd",
|
|
172
|
-
"secret",
|
|
173
|
-
"token",
|
|
174
|
-
"apikey",
|
|
175
|
-
"api_key",
|
|
176
|
-
"accesstoken",
|
|
177
|
-
"access_token",
|
|
178
|
-
"refreshtoken",
|
|
179
|
-
"refresh_token",
|
|
180
|
-
"authorization",
|
|
181
|
-
"auth",
|
|
182
|
-
"cookie",
|
|
183
|
-
"session",
|
|
184
|
-
"sessionid",
|
|
185
|
-
"session_id",
|
|
186
|
-
"privatekey",
|
|
187
|
-
"private_key",
|
|
188
|
-
"creditcard",
|
|
189
|
-
"credit_card",
|
|
190
|
-
"cardnumber",
|
|
191
|
-
"card_number",
|
|
192
|
-
"cvv",
|
|
193
|
-
"ssn",
|
|
194
|
-
"pin"
|
|
195
|
-
];
|
|
196
|
-
MASKED_VALUE = "***MASKED***";
|
|
197
|
-
COLORS = {
|
|
198
|
-
reset: "\x1B[0m",
|
|
199
|
-
bright: "\x1B[1m",
|
|
200
|
-
dim: "\x1B[2m",
|
|
201
|
-
// 로그 레벨 컬러
|
|
202
|
-
debug: "\x1B[36m",
|
|
203
|
-
// cyan
|
|
204
|
-
info: "\x1B[32m",
|
|
205
|
-
// green
|
|
206
|
-
warn: "\x1B[33m",
|
|
207
|
-
// yellow
|
|
208
|
-
error: "\x1B[31m",
|
|
209
|
-
// red
|
|
210
|
-
fatal: "\x1B[35m",
|
|
211
|
-
// magenta
|
|
212
|
-
// 추가 컬러
|
|
213
|
-
gray: "\x1B[90m"
|
|
214
|
-
};
|
|
215
|
-
}
|
|
216
|
-
});
|
|
217
|
-
|
|
218
|
-
// src/logger/logger.ts
|
|
219
|
-
var Logger;
|
|
220
|
-
var init_logger = __esm({
|
|
221
|
-
"src/logger/logger.ts"() {
|
|
222
|
-
init_types();
|
|
223
|
-
init_formatters();
|
|
224
|
-
Logger = class _Logger {
|
|
225
|
-
config;
|
|
226
|
-
module;
|
|
227
|
-
constructor(config) {
|
|
228
|
-
this.config = config;
|
|
229
|
-
this.module = config.module;
|
|
230
|
-
}
|
|
231
|
-
/**
|
|
232
|
-
* Get current log level
|
|
233
|
-
*/
|
|
234
|
-
get level() {
|
|
235
|
-
return this.config.level;
|
|
236
|
-
}
|
|
237
|
-
/**
|
|
238
|
-
* Create child logger (per module)
|
|
239
|
-
*/
|
|
240
|
-
child(module) {
|
|
241
|
-
return new _Logger({
|
|
242
|
-
...this.config,
|
|
243
|
-
module
|
|
244
|
-
});
|
|
245
|
-
}
|
|
246
|
-
/**
|
|
247
|
-
* Debug log
|
|
248
|
-
*/
|
|
249
|
-
debug(message, context) {
|
|
250
|
-
this.log("debug", message, void 0, context);
|
|
251
|
-
}
|
|
252
|
-
/**
|
|
253
|
-
* Info log
|
|
254
|
-
*/
|
|
255
|
-
info(message, context) {
|
|
256
|
-
this.log("info", message, void 0, context);
|
|
257
|
-
}
|
|
258
|
-
warn(message, errorOrContext, context) {
|
|
259
|
-
if (errorOrContext instanceof Error) {
|
|
260
|
-
this.log("warn", message, errorOrContext, context);
|
|
261
|
-
} else {
|
|
262
|
-
this.log("warn", message, void 0, errorOrContext);
|
|
263
|
-
}
|
|
264
|
-
}
|
|
265
|
-
error(message, errorOrContext, context) {
|
|
266
|
-
if (errorOrContext instanceof Error) {
|
|
267
|
-
this.log("error", message, errorOrContext, context);
|
|
268
|
-
} else {
|
|
269
|
-
this.log("error", message, void 0, errorOrContext);
|
|
270
|
-
}
|
|
271
|
-
}
|
|
272
|
-
fatal(message, errorOrContext, context) {
|
|
273
|
-
if (errorOrContext instanceof Error) {
|
|
274
|
-
this.log("fatal", message, errorOrContext, context);
|
|
275
|
-
} else {
|
|
276
|
-
this.log("fatal", message, void 0, errorOrContext);
|
|
277
|
-
}
|
|
278
|
-
}
|
|
279
|
-
/**
|
|
280
|
-
* Log processing (internal)
|
|
281
|
-
*/
|
|
282
|
-
log(level, message, error, context) {
|
|
283
|
-
if (LOG_LEVEL_PRIORITY[level] < LOG_LEVEL_PRIORITY[this.config.level]) {
|
|
284
|
-
return;
|
|
285
|
-
}
|
|
286
|
-
const metadata = {
|
|
287
|
-
timestamp: /* @__PURE__ */ new Date(),
|
|
288
|
-
level,
|
|
289
|
-
message,
|
|
290
|
-
module: this.module,
|
|
291
|
-
error,
|
|
292
|
-
// Mask sensitive information in context to prevent credential leaks
|
|
293
|
-
context: context ? maskSensitiveData(context) : void 0
|
|
294
|
-
};
|
|
295
|
-
this.processTransports(metadata);
|
|
296
|
-
}
|
|
297
|
-
/**
|
|
298
|
-
* Process Transports
|
|
299
|
-
*/
|
|
300
|
-
processTransports(metadata) {
|
|
301
|
-
const promises = this.config.transports.filter((transport) => transport.enabled).map((transport) => this.safeTransportLog(transport, metadata));
|
|
302
|
-
Promise.all(promises).catch((error) => {
|
|
303
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
304
|
-
process.stderr.write(`[Logger] Transport error: ${errorMessage}
|
|
305
|
-
`);
|
|
306
|
-
});
|
|
307
|
-
}
|
|
308
|
-
/**
|
|
309
|
-
* Transport log (error-safe)
|
|
310
|
-
*/
|
|
311
|
-
async safeTransportLog(transport, metadata) {
|
|
312
|
-
try {
|
|
313
|
-
await transport.log(metadata);
|
|
314
|
-
} catch (error) {
|
|
315
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
316
|
-
process.stderr.write(`[Logger] Transport "${transport.name}" failed: ${errorMessage}
|
|
317
|
-
`);
|
|
318
|
-
}
|
|
319
|
-
}
|
|
320
|
-
/**
|
|
321
|
-
* Close all Transports
|
|
322
|
-
*/
|
|
323
|
-
async close() {
|
|
324
|
-
const closePromises = this.config.transports.filter((transport) => transport.close).map((transport) => transport.close());
|
|
325
|
-
await Promise.all(closePromises);
|
|
326
|
-
}
|
|
327
|
-
};
|
|
328
|
-
}
|
|
329
|
-
});
|
|
330
|
-
|
|
331
|
-
// src/logger/transports/console.ts
|
|
332
|
-
var ConsoleTransport;
|
|
333
|
-
var init_console = __esm({
|
|
334
|
-
"src/logger/transports/console.ts"() {
|
|
335
|
-
init_types();
|
|
336
|
-
init_formatters();
|
|
337
|
-
ConsoleTransport = class {
|
|
338
|
-
name = "console";
|
|
339
|
-
level;
|
|
340
|
-
enabled;
|
|
341
|
-
colorize;
|
|
342
|
-
constructor(config) {
|
|
343
|
-
this.level = config.level;
|
|
344
|
-
this.enabled = config.enabled;
|
|
345
|
-
this.colorize = config.colorize ?? true;
|
|
346
|
-
}
|
|
347
|
-
async log(metadata) {
|
|
348
|
-
if (!this.enabled) {
|
|
349
|
-
return;
|
|
350
|
-
}
|
|
351
|
-
if (LOG_LEVEL_PRIORITY[metadata.level] < LOG_LEVEL_PRIORITY[this.level]) {
|
|
352
|
-
return;
|
|
353
|
-
}
|
|
354
|
-
const message = formatConsole(metadata, this.colorize);
|
|
355
|
-
if (metadata.level === "warn" || metadata.level === "error" || metadata.level === "fatal") {
|
|
356
|
-
console.error(message);
|
|
357
|
-
} else {
|
|
358
|
-
console.log(message);
|
|
359
|
-
}
|
|
360
|
-
}
|
|
361
|
-
};
|
|
362
|
-
}
|
|
363
|
-
});
|
|
364
|
-
var FileTransport;
|
|
365
|
-
var init_file = __esm({
|
|
366
|
-
"src/logger/transports/file.ts"() {
|
|
367
|
-
init_types();
|
|
368
|
-
init_formatters();
|
|
369
|
-
FileTransport = class {
|
|
370
|
-
name = "file";
|
|
371
|
-
level;
|
|
372
|
-
enabled;
|
|
373
|
-
logDir;
|
|
374
|
-
maxFileSize;
|
|
375
|
-
maxFiles;
|
|
376
|
-
currentStream = null;
|
|
377
|
-
currentFilename = null;
|
|
378
|
-
constructor(config) {
|
|
379
|
-
this.level = config.level;
|
|
380
|
-
this.enabled = config.enabled;
|
|
381
|
-
this.logDir = config.logDir;
|
|
382
|
-
this.maxFileSize = config.maxFileSize ?? 10 * 1024 * 1024;
|
|
383
|
-
this.maxFiles = config.maxFiles ?? 10;
|
|
384
|
-
if (!existsSync(this.logDir)) {
|
|
385
|
-
mkdirSync(this.logDir, { recursive: true });
|
|
386
|
-
}
|
|
387
|
-
}
|
|
388
|
-
async log(metadata) {
|
|
389
|
-
if (!this.enabled) {
|
|
390
|
-
return;
|
|
391
|
-
}
|
|
392
|
-
if (LOG_LEVEL_PRIORITY[metadata.level] < LOG_LEVEL_PRIORITY[this.level]) {
|
|
393
|
-
return;
|
|
394
|
-
}
|
|
395
|
-
const message = formatJSON(metadata);
|
|
396
|
-
const filename = this.getLogFilename(metadata.timestamp);
|
|
397
|
-
if (this.currentFilename !== filename) {
|
|
398
|
-
await this.rotateStream(filename);
|
|
399
|
-
await this.cleanOldFiles();
|
|
400
|
-
} else if (this.currentFilename) {
|
|
401
|
-
await this.checkAndRotateBySize();
|
|
402
|
-
}
|
|
403
|
-
if (this.currentStream) {
|
|
404
|
-
return new Promise((resolve, reject) => {
|
|
405
|
-
this.currentStream.write(message + "\n", "utf-8", (error) => {
|
|
406
|
-
if (error) {
|
|
407
|
-
process.stderr.write(`[FileTransport] Failed to write log: ${error.message}
|
|
408
|
-
`);
|
|
409
|
-
reject(error);
|
|
410
|
-
} else {
|
|
411
|
-
resolve();
|
|
412
|
-
}
|
|
413
|
-
});
|
|
414
|
-
});
|
|
415
|
-
}
|
|
416
|
-
}
|
|
417
|
-
/**
|
|
418
|
-
* 스트림 교체 (날짜 변경 시)
|
|
419
|
-
*/
|
|
420
|
-
async rotateStream(filename) {
|
|
421
|
-
if (this.currentStream) {
|
|
422
|
-
await this.closeStream();
|
|
423
|
-
}
|
|
424
|
-
const filepath = join(this.logDir, filename);
|
|
425
|
-
this.currentStream = createWriteStream(filepath, {
|
|
426
|
-
flags: "a",
|
|
427
|
-
// append mode
|
|
428
|
-
encoding: "utf-8"
|
|
429
|
-
});
|
|
430
|
-
this.currentFilename = filename;
|
|
431
|
-
this.currentStream.on("error", (error) => {
|
|
432
|
-
process.stderr.write(`[FileTransport] Stream error: ${error.message}
|
|
433
|
-
`);
|
|
434
|
-
this.currentStream = null;
|
|
435
|
-
this.currentFilename = null;
|
|
436
|
-
});
|
|
437
|
-
}
|
|
438
|
-
/**
|
|
439
|
-
* 현재 스트림 닫기
|
|
440
|
-
*/
|
|
441
|
-
async closeStream() {
|
|
442
|
-
if (!this.currentStream) {
|
|
443
|
-
return;
|
|
444
|
-
}
|
|
445
|
-
return new Promise((resolve, reject) => {
|
|
446
|
-
this.currentStream.end((error) => {
|
|
447
|
-
if (error) {
|
|
448
|
-
reject(error);
|
|
449
|
-
} else {
|
|
450
|
-
this.currentStream = null;
|
|
451
|
-
this.currentFilename = null;
|
|
452
|
-
resolve();
|
|
453
|
-
}
|
|
454
|
-
});
|
|
455
|
-
});
|
|
456
|
-
}
|
|
457
|
-
/**
|
|
458
|
-
* 파일 크기 체크 및 크기 기반 로테이션
|
|
459
|
-
*/
|
|
460
|
-
async checkAndRotateBySize() {
|
|
461
|
-
if (!this.currentFilename) {
|
|
462
|
-
return;
|
|
463
|
-
}
|
|
464
|
-
const filepath = join(this.logDir, this.currentFilename);
|
|
465
|
-
if (!existsSync(filepath)) {
|
|
466
|
-
return;
|
|
467
|
-
}
|
|
468
|
-
try {
|
|
469
|
-
const stats = statSync(filepath);
|
|
470
|
-
if (stats.size >= this.maxFileSize) {
|
|
471
|
-
await this.rotateBySize();
|
|
472
|
-
}
|
|
473
|
-
} catch (error) {
|
|
474
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
475
|
-
process.stderr.write(`[FileTransport] Failed to check file size: ${errorMessage}
|
|
476
|
-
`);
|
|
477
|
-
}
|
|
478
|
-
}
|
|
479
|
-
/**
|
|
480
|
-
* 크기 기반 로테이션 수행
|
|
481
|
-
* 예: 2025-01-01.log -> 2025-01-01.1.log, 2025-01-01.1.log -> 2025-01-01.2.log
|
|
482
|
-
*/
|
|
483
|
-
async rotateBySize() {
|
|
484
|
-
if (!this.currentFilename) {
|
|
485
|
-
return;
|
|
486
|
-
}
|
|
487
|
-
await this.closeStream();
|
|
488
|
-
const baseName = this.currentFilename.replace(/\.log$/, "");
|
|
489
|
-
const files = readdirSync(this.logDir);
|
|
490
|
-
const relatedFiles = files.filter((file) => file.startsWith(baseName) && file.endsWith(".log")).sort().reverse();
|
|
491
|
-
for (const file of relatedFiles) {
|
|
492
|
-
const match = file.match(/\.(\d+)\.log$/);
|
|
493
|
-
if (match) {
|
|
494
|
-
const oldNum = parseInt(match[1], 10);
|
|
495
|
-
const newNum = oldNum + 1;
|
|
496
|
-
const oldPath = join(this.logDir, file);
|
|
497
|
-
const newPath2 = join(this.logDir, `${baseName}.${newNum}.log`);
|
|
498
|
-
try {
|
|
499
|
-
renameSync(oldPath, newPath2);
|
|
500
|
-
} catch (error) {
|
|
501
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
502
|
-
process.stderr.write(`[FileTransport] Failed to rotate file: ${errorMessage}
|
|
503
|
-
`);
|
|
504
|
-
}
|
|
505
|
-
}
|
|
506
|
-
}
|
|
507
|
-
const currentPath = join(this.logDir, this.currentFilename);
|
|
508
|
-
const newPath = join(this.logDir, `${baseName}.1.log`);
|
|
509
|
-
try {
|
|
510
|
-
if (existsSync(currentPath)) {
|
|
511
|
-
renameSync(currentPath, newPath);
|
|
512
|
-
}
|
|
513
|
-
} catch (error) {
|
|
514
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
515
|
-
process.stderr.write(`[FileTransport] Failed to rotate current file: ${errorMessage}
|
|
516
|
-
`);
|
|
517
|
-
}
|
|
518
|
-
await this.rotateStream(this.currentFilename);
|
|
519
|
-
}
|
|
520
|
-
/**
|
|
521
|
-
* 오래된 로그 파일 정리
|
|
522
|
-
* maxFiles 개수를 초과하는 로그 파일 삭제
|
|
523
|
-
*/
|
|
524
|
-
async cleanOldFiles() {
|
|
525
|
-
try {
|
|
526
|
-
if (!existsSync(this.logDir)) {
|
|
527
|
-
return;
|
|
528
|
-
}
|
|
529
|
-
const files = readdirSync(this.logDir);
|
|
530
|
-
const logFiles = files.filter((file) => file.endsWith(".log")).map((file) => {
|
|
531
|
-
const filepath = join(this.logDir, file);
|
|
532
|
-
const stats = statSync(filepath);
|
|
533
|
-
return { file, mtime: stats.mtime };
|
|
534
|
-
}).sort((a, b) => b.mtime.getTime() - a.mtime.getTime());
|
|
535
|
-
if (logFiles.length > this.maxFiles) {
|
|
536
|
-
const filesToDelete = logFiles.slice(this.maxFiles);
|
|
537
|
-
for (const { file } of filesToDelete) {
|
|
538
|
-
const filepath = join(this.logDir, file);
|
|
539
|
-
try {
|
|
540
|
-
unlinkSync(filepath);
|
|
541
|
-
} catch (error) {
|
|
542
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
543
|
-
process.stderr.write(`[FileTransport] Failed to delete old file "${file}": ${errorMessage}
|
|
544
|
-
`);
|
|
545
|
-
}
|
|
546
|
-
}
|
|
547
|
-
}
|
|
548
|
-
} catch (error) {
|
|
549
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
550
|
-
process.stderr.write(`[FileTransport] Failed to clean old files: ${errorMessage}
|
|
551
|
-
`);
|
|
552
|
-
}
|
|
553
|
-
}
|
|
554
|
-
/**
|
|
555
|
-
* 날짜별 로그 파일명 생성
|
|
556
|
-
*/
|
|
557
|
-
getLogFilename(date) {
|
|
558
|
-
const year = date.getFullYear();
|
|
559
|
-
const month = String(date.getMonth() + 1).padStart(2, "0");
|
|
560
|
-
const day = String(date.getDate()).padStart(2, "0");
|
|
561
|
-
return `${year}-${month}-${day}.log`;
|
|
562
|
-
}
|
|
563
|
-
async close() {
|
|
564
|
-
await this.closeStream();
|
|
565
|
-
}
|
|
566
|
-
};
|
|
567
|
-
}
|
|
568
|
-
});
|
|
569
|
-
function isFileLoggingEnabled() {
|
|
570
|
-
return process.env.LOGGER_FILE_ENABLED === "true";
|
|
571
|
-
}
|
|
572
|
-
function getDefaultLogLevel() {
|
|
573
|
-
const isProduction = process.env.NODE_ENV === "production";
|
|
574
|
-
const isDevelopment = process.env.NODE_ENV === "development";
|
|
575
|
-
if (isDevelopment) {
|
|
576
|
-
return "debug";
|
|
577
|
-
}
|
|
578
|
-
if (isProduction) {
|
|
579
|
-
return "info";
|
|
580
|
-
}
|
|
581
|
-
return "warn";
|
|
582
|
-
}
|
|
583
|
-
function getConsoleConfig() {
|
|
584
|
-
const isProduction = process.env.NODE_ENV === "production";
|
|
585
|
-
return {
|
|
586
|
-
level: "debug",
|
|
587
|
-
enabled: true,
|
|
588
|
-
colorize: !isProduction
|
|
589
|
-
// Dev: colored output, Production: plain text
|
|
590
|
-
};
|
|
591
|
-
}
|
|
592
|
-
function getFileConfig() {
|
|
593
|
-
const isProduction = process.env.NODE_ENV === "production";
|
|
594
|
-
return {
|
|
595
|
-
level: "info",
|
|
596
|
-
enabled: isProduction,
|
|
597
|
-
// File logging in production only
|
|
598
|
-
logDir: process.env.LOG_DIR || "./logs",
|
|
599
|
-
maxFileSize: 10 * 1024 * 1024,
|
|
600
|
-
// 10MB
|
|
601
|
-
maxFiles: 10
|
|
602
|
-
};
|
|
603
|
-
}
|
|
604
|
-
function validateDirectoryWritable(dirPath) {
|
|
605
|
-
if (!existsSync(dirPath)) {
|
|
606
|
-
try {
|
|
607
|
-
mkdirSync(dirPath, { recursive: true });
|
|
608
|
-
} catch (error) {
|
|
609
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
610
|
-
throw new Error(`Failed to create log directory "${dirPath}": ${errorMessage}`);
|
|
611
|
-
}
|
|
612
|
-
}
|
|
613
|
-
try {
|
|
614
|
-
accessSync(dirPath, constants.W_OK);
|
|
615
|
-
} catch {
|
|
616
|
-
throw new Error(`Log directory "${dirPath}" is not writable. Please check permissions.`);
|
|
617
|
-
}
|
|
618
|
-
const testFile = join(dirPath, ".logger-write-test");
|
|
619
|
-
try {
|
|
620
|
-
writeFileSync(testFile, "test", "utf-8");
|
|
621
|
-
unlinkSync(testFile);
|
|
622
|
-
} catch (error) {
|
|
623
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
624
|
-
throw new Error(`Cannot write to log directory "${dirPath}": ${errorMessage}`);
|
|
625
|
-
}
|
|
626
|
-
}
|
|
627
|
-
function validateFileConfig() {
|
|
628
|
-
if (!isFileLoggingEnabled()) {
|
|
629
|
-
return;
|
|
630
|
-
}
|
|
631
|
-
const logDir = process.env.LOG_DIR;
|
|
632
|
-
if (!logDir) {
|
|
633
|
-
throw new Error(
|
|
634
|
-
"LOG_DIR environment variable is required when LOGGER_FILE_ENABLED=true. Example: LOG_DIR=/var/log/myapp"
|
|
635
|
-
);
|
|
636
|
-
}
|
|
637
|
-
validateDirectoryWritable(logDir);
|
|
638
|
-
}
|
|
639
|
-
function validateSlackConfig() {
|
|
640
|
-
const webhookUrl = process.env.SLACK_WEBHOOK_URL;
|
|
641
|
-
if (!webhookUrl) {
|
|
642
|
-
return;
|
|
643
|
-
}
|
|
644
|
-
if (!webhookUrl.startsWith("https://hooks.slack.com/")) {
|
|
645
|
-
throw new Error(
|
|
646
|
-
`Invalid SLACK_WEBHOOK_URL: "${webhookUrl}". Slack webhook URLs must start with "https://hooks.slack.com/"`
|
|
647
|
-
);
|
|
648
|
-
}
|
|
649
|
-
}
|
|
650
|
-
function validateEmailConfig() {
|
|
651
|
-
const smtpHost = process.env.SMTP_HOST;
|
|
652
|
-
const smtpPort = process.env.SMTP_PORT;
|
|
653
|
-
const emailFrom = process.env.EMAIL_FROM;
|
|
654
|
-
const emailTo = process.env.EMAIL_TO;
|
|
655
|
-
const hasAnyEmailConfig = smtpHost || smtpPort || emailFrom || emailTo;
|
|
656
|
-
if (!hasAnyEmailConfig) {
|
|
657
|
-
return;
|
|
658
|
-
}
|
|
659
|
-
const missingFields = [];
|
|
660
|
-
if (!smtpHost) missingFields.push("SMTP_HOST");
|
|
661
|
-
if (!smtpPort) missingFields.push("SMTP_PORT");
|
|
662
|
-
if (!emailFrom) missingFields.push("EMAIL_FROM");
|
|
663
|
-
if (!emailTo) missingFields.push("EMAIL_TO");
|
|
664
|
-
if (missingFields.length > 0) {
|
|
665
|
-
throw new Error(
|
|
666
|
-
`Email transport configuration incomplete. Missing: ${missingFields.join(", ")}. Either set all required fields or remove all email configuration.`
|
|
667
|
-
);
|
|
668
|
-
}
|
|
669
|
-
const port = parseInt(smtpPort, 10);
|
|
670
|
-
if (isNaN(port) || port < 1 || port > 65535) {
|
|
671
|
-
throw new Error(
|
|
672
|
-
`Invalid SMTP_PORT: "${smtpPort}". Must be a number between 1 and 65535.`
|
|
673
|
-
);
|
|
674
|
-
}
|
|
675
|
-
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
|
|
676
|
-
if (!emailRegex.test(emailFrom)) {
|
|
677
|
-
throw new Error(`Invalid EMAIL_FROM format: "${emailFrom}"`);
|
|
678
|
-
}
|
|
679
|
-
const recipients = emailTo.split(",").map((e) => e.trim());
|
|
680
|
-
for (const email of recipients) {
|
|
681
|
-
if (!emailRegex.test(email)) {
|
|
682
|
-
throw new Error(`Invalid email address in EMAIL_TO: "${email}"`);
|
|
683
|
-
}
|
|
684
|
-
}
|
|
685
|
-
}
|
|
686
|
-
function validateEnvironment() {
|
|
687
|
-
const nodeEnv = process.env.NODE_ENV;
|
|
688
|
-
if (!nodeEnv) {
|
|
689
|
-
process.stderr.write(
|
|
690
|
-
"[Logger] Warning: NODE_ENV is not set. Defaulting to test environment.\n"
|
|
691
|
-
);
|
|
692
|
-
}
|
|
693
|
-
}
|
|
694
|
-
function validateConfig() {
|
|
695
|
-
try {
|
|
696
|
-
validateEnvironment();
|
|
697
|
-
validateFileConfig();
|
|
698
|
-
validateSlackConfig();
|
|
699
|
-
validateEmailConfig();
|
|
700
|
-
} catch (error) {
|
|
701
|
-
if (error instanceof Error) {
|
|
702
|
-
throw new Error(`[Logger] Configuration validation failed: ${error.message}`);
|
|
703
|
-
}
|
|
704
|
-
throw error;
|
|
705
|
-
}
|
|
706
|
-
}
|
|
707
|
-
var init_config = __esm({
|
|
708
|
-
"src/logger/config.ts"() {
|
|
709
|
-
}
|
|
710
|
-
});
|
|
711
|
-
|
|
712
|
-
// src/logger/factory.ts
|
|
713
|
-
function initializeTransports() {
|
|
714
|
-
const transports = [];
|
|
715
|
-
const consoleConfig = getConsoleConfig();
|
|
716
|
-
transports.push(new ConsoleTransport(consoleConfig));
|
|
717
|
-
const fileConfig = getFileConfig();
|
|
718
|
-
if (fileConfig.enabled) {
|
|
719
|
-
transports.push(new FileTransport(fileConfig));
|
|
720
|
-
}
|
|
721
|
-
return transports;
|
|
722
|
-
}
|
|
723
|
-
function initializeLogger() {
|
|
724
|
-
validateConfig();
|
|
725
|
-
return new Logger({
|
|
726
|
-
level: getDefaultLogLevel(),
|
|
727
|
-
transports: initializeTransports()
|
|
728
|
-
});
|
|
729
|
-
}
|
|
730
|
-
var logger;
|
|
731
|
-
var init_factory = __esm({
|
|
732
|
-
"src/logger/factory.ts"() {
|
|
733
|
-
init_logger();
|
|
734
|
-
init_console();
|
|
735
|
-
init_file();
|
|
736
|
-
init_config();
|
|
737
|
-
logger = initializeLogger();
|
|
738
|
-
}
|
|
739
|
-
});
|
|
740
|
-
|
|
741
|
-
// src/logger/index.ts
|
|
742
|
-
var init_logger2 = __esm({
|
|
743
|
-
"src/logger/index.ts"() {
|
|
744
|
-
init_factory();
|
|
745
|
-
init_logger();
|
|
746
|
-
}
|
|
747
|
-
});
|
|
748
|
-
|
|
749
|
-
// src/route/function-routes.ts
|
|
750
|
-
var function_routes_exports = {};
|
|
751
|
-
__export(function_routes_exports, {
|
|
752
|
-
discoverFunctionRoutes: () => discoverFunctionRoutes
|
|
753
|
-
});
|
|
754
|
-
function discoverFunctionRoutes(cwd = process.cwd()) {
|
|
755
|
-
const functions = [];
|
|
756
|
-
const nodeModulesPath = join(cwd, "node_modules");
|
|
757
|
-
try {
|
|
758
|
-
const projectPkgPath = join(cwd, "package.json");
|
|
759
|
-
const projectPkg = JSON.parse(readFileSync(projectPkgPath, "utf-8"));
|
|
760
|
-
const dependencies = {
|
|
761
|
-
...projectPkg.dependencies,
|
|
762
|
-
...projectPkg.devDependencies
|
|
763
|
-
};
|
|
764
|
-
for (const [packageName] of Object.entries(dependencies)) {
|
|
765
|
-
if (!packageName.startsWith("@spfn/") && !packageName.startsWith("spfn-")) {
|
|
766
|
-
continue;
|
|
767
|
-
}
|
|
768
|
-
try {
|
|
769
|
-
const pkgPath = join(nodeModulesPath, ...packageName.split("/"), "package.json");
|
|
770
|
-
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
|
|
771
|
-
if (pkg.spfn?.routes?.dir) {
|
|
772
|
-
const { dir } = pkg.spfn.routes;
|
|
773
|
-
const prefix = pkg.spfn.prefix;
|
|
774
|
-
const packagePath = dirname(pkgPath);
|
|
775
|
-
const routesDir = join(packagePath, dir);
|
|
776
|
-
functions.push({
|
|
777
|
-
packageName,
|
|
778
|
-
routesDir,
|
|
779
|
-
packagePath,
|
|
780
|
-
prefix
|
|
781
|
-
// Include prefix in function info
|
|
782
|
-
});
|
|
783
|
-
routeLogger.debug("Discovered function routes", {
|
|
784
|
-
package: packageName,
|
|
785
|
-
dir,
|
|
786
|
-
prefix: prefix || "(none)"
|
|
787
|
-
});
|
|
788
|
-
}
|
|
789
|
-
} catch (error) {
|
|
790
|
-
}
|
|
791
|
-
}
|
|
792
|
-
} catch (error) {
|
|
793
|
-
routeLogger.warn("Failed to discover function routes", {
|
|
794
|
-
error: error instanceof Error ? error.message : "Unknown error"
|
|
795
|
-
});
|
|
796
|
-
}
|
|
797
|
-
return functions;
|
|
798
|
-
}
|
|
799
|
-
var routeLogger;
|
|
800
|
-
var init_function_routes = __esm({
|
|
801
|
-
"src/route/function-routes.ts"() {
|
|
802
|
-
init_logger2();
|
|
803
|
-
routeLogger = logger.child("function-routes");
|
|
804
|
-
}
|
|
805
|
-
});
|
|
806
|
-
|
|
807
|
-
// src/errors/database-errors.ts
|
|
808
|
-
var DatabaseError, ConnectionError, QueryError, ConstraintViolationError, TransactionError, DeadlockError, DuplicateEntryError;
|
|
809
|
-
var init_database_errors = __esm({
|
|
810
|
-
"src/errors/database-errors.ts"() {
|
|
811
|
-
DatabaseError = class extends Error {
|
|
812
|
-
statusCode;
|
|
813
|
-
details;
|
|
814
|
-
timestamp;
|
|
815
|
-
constructor(message, statusCode = 500, details) {
|
|
816
|
-
super(message);
|
|
817
|
-
this.name = "DatabaseError";
|
|
818
|
-
this.statusCode = statusCode;
|
|
819
|
-
this.details = details;
|
|
820
|
-
this.timestamp = /* @__PURE__ */ new Date();
|
|
821
|
-
Error.captureStackTrace(this, this.constructor);
|
|
822
|
-
}
|
|
823
|
-
/**
|
|
824
|
-
* Serialize error for API response
|
|
825
|
-
*/
|
|
826
|
-
toJSON() {
|
|
827
|
-
return {
|
|
828
|
-
name: this.name,
|
|
829
|
-
message: this.message,
|
|
830
|
-
statusCode: this.statusCode,
|
|
831
|
-
details: this.details,
|
|
832
|
-
timestamp: this.timestamp.toISOString()
|
|
833
|
-
};
|
|
834
|
-
}
|
|
835
|
-
};
|
|
836
|
-
ConnectionError = class extends DatabaseError {
|
|
837
|
-
constructor(message, details) {
|
|
838
|
-
super(message, 503, details);
|
|
839
|
-
this.name = "ConnectionError";
|
|
840
|
-
}
|
|
841
|
-
};
|
|
842
|
-
QueryError = class extends DatabaseError {
|
|
843
|
-
constructor(message, statusCode = 500, details) {
|
|
844
|
-
super(message, statusCode, details);
|
|
845
|
-
this.name = "QueryError";
|
|
846
|
-
}
|
|
847
|
-
};
|
|
848
|
-
ConstraintViolationError = class extends QueryError {
|
|
849
|
-
constructor(message, details) {
|
|
850
|
-
super(message, 400, details);
|
|
851
|
-
this.name = "ConstraintViolationError";
|
|
852
|
-
}
|
|
853
|
-
};
|
|
854
|
-
TransactionError = class extends DatabaseError {
|
|
855
|
-
constructor(message, statusCode = 500, details) {
|
|
856
|
-
super(message, statusCode, details);
|
|
857
|
-
this.name = "TransactionError";
|
|
858
|
-
}
|
|
859
|
-
};
|
|
860
|
-
DeadlockError = class extends TransactionError {
|
|
861
|
-
constructor(message, details) {
|
|
862
|
-
super(message, 409, details);
|
|
863
|
-
this.name = "DeadlockError";
|
|
864
|
-
}
|
|
865
|
-
};
|
|
866
|
-
DuplicateEntryError = class extends QueryError {
|
|
867
|
-
constructor(field, value) {
|
|
868
|
-
super(`${field} '${value}' already exists`, 409, { field, value });
|
|
869
|
-
this.name = "DuplicateEntryError";
|
|
870
|
-
}
|
|
871
|
-
};
|
|
872
|
-
}
|
|
873
|
-
});
|
|
874
|
-
|
|
875
|
-
// src/errors/http-errors.ts
|
|
876
|
-
var init_http_errors = __esm({
|
|
877
|
-
"src/errors/http-errors.ts"() {
|
|
878
|
-
}
|
|
879
|
-
});
|
|
880
|
-
|
|
881
|
-
// src/errors/error-utils.ts
|
|
882
|
-
var init_error_utils = __esm({
|
|
883
|
-
"src/errors/error-utils.ts"() {
|
|
884
|
-
init_database_errors();
|
|
885
|
-
init_http_errors();
|
|
886
|
-
}
|
|
887
|
-
});
|
|
888
|
-
|
|
889
|
-
// src/errors/index.ts
|
|
890
|
-
var init_errors = __esm({
|
|
891
|
-
"src/errors/index.ts"() {
|
|
892
|
-
init_database_errors();
|
|
893
|
-
init_http_errors();
|
|
894
|
-
init_error_utils();
|
|
895
|
-
}
|
|
896
|
-
});
|
|
897
|
-
|
|
898
|
-
// src/env/config.ts
|
|
899
|
-
var ENV_FILE_PRIORITY, TEST_ONLY_FILES;
|
|
900
|
-
var init_config2 = __esm({
|
|
901
|
-
"src/env/config.ts"() {
|
|
902
|
-
ENV_FILE_PRIORITY = [
|
|
903
|
-
".env",
|
|
904
|
-
// Base configuration (lowest priority)
|
|
905
|
-
".env.{NODE_ENV}",
|
|
906
|
-
// Environment-specific
|
|
907
|
-
".env.local",
|
|
908
|
-
// Local overrides (excluded in test)
|
|
909
|
-
".env.{NODE_ENV}.local"
|
|
910
|
-
// Local environment-specific (highest priority)
|
|
911
|
-
];
|
|
912
|
-
TEST_ONLY_FILES = [
|
|
913
|
-
".env.test",
|
|
914
|
-
".env.test.local"
|
|
915
|
-
];
|
|
916
|
-
}
|
|
917
|
-
});
|
|
918
|
-
function buildFileList(basePath, nodeEnv) {
|
|
919
|
-
const files = [];
|
|
920
|
-
if (!nodeEnv) {
|
|
921
|
-
files.push(join(basePath, ".env"));
|
|
922
|
-
files.push(join(basePath, ".env.local"));
|
|
923
|
-
return files;
|
|
924
|
-
}
|
|
925
|
-
for (const pattern of ENV_FILE_PRIORITY) {
|
|
926
|
-
const fileName = pattern.replace("{NODE_ENV}", nodeEnv);
|
|
927
|
-
if (nodeEnv === "test" && fileName === ".env.local") {
|
|
928
|
-
continue;
|
|
929
|
-
}
|
|
930
|
-
if (nodeEnv === "local" && pattern === ".env.local") {
|
|
931
|
-
continue;
|
|
932
|
-
}
|
|
933
|
-
if (nodeEnv !== "test" && TEST_ONLY_FILES.includes(fileName)) {
|
|
934
|
-
continue;
|
|
935
|
-
}
|
|
936
|
-
files.push(join(basePath, fileName));
|
|
937
|
-
}
|
|
938
|
-
return files;
|
|
939
|
-
}
|
|
940
|
-
function loadSingleFile(filePath, debug) {
|
|
941
|
-
if (!existsSync(filePath)) {
|
|
942
|
-
if (debug) {
|
|
943
|
-
envLogger.debug("Environment file not found (optional)", {
|
|
944
|
-
path: filePath
|
|
945
|
-
});
|
|
946
|
-
}
|
|
947
|
-
return { success: false, parsed: {}, error: "File not found" };
|
|
948
|
-
}
|
|
949
|
-
try {
|
|
950
|
-
const result = config({ path: filePath });
|
|
951
|
-
if (result.error) {
|
|
952
|
-
envLogger.warn("Failed to parse environment file", {
|
|
953
|
-
path: filePath,
|
|
954
|
-
error: result.error.message
|
|
955
|
-
});
|
|
956
|
-
return {
|
|
957
|
-
success: false,
|
|
958
|
-
parsed: {},
|
|
959
|
-
error: result.error.message
|
|
960
|
-
};
|
|
961
|
-
}
|
|
962
|
-
const parsed = result.parsed || {};
|
|
963
|
-
if (debug) {
|
|
964
|
-
envLogger.debug("Environment file loaded successfully", {
|
|
965
|
-
path: filePath,
|
|
966
|
-
variables: Object.keys(parsed),
|
|
967
|
-
count: Object.keys(parsed).length
|
|
968
|
-
});
|
|
969
|
-
}
|
|
970
|
-
return { success: true, parsed };
|
|
971
|
-
} catch (error) {
|
|
972
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
973
|
-
envLogger.error("Error loading environment file", {
|
|
974
|
-
path: filePath,
|
|
975
|
-
error: message
|
|
976
|
-
});
|
|
977
|
-
return { success: false, parsed: {}, error: message };
|
|
978
|
-
}
|
|
979
|
-
}
|
|
980
|
-
function validateRequiredVars(required, debug) {
|
|
981
|
-
const missing = [];
|
|
982
|
-
for (const varName of required) {
|
|
983
|
-
if (!process.env[varName]) {
|
|
984
|
-
missing.push(varName);
|
|
985
|
-
}
|
|
986
|
-
}
|
|
987
|
-
if (missing.length > 0) {
|
|
988
|
-
const error = `Required environment variables missing: ${missing.join(", ")}`;
|
|
989
|
-
envLogger.error("Environment validation failed", {
|
|
990
|
-
missing,
|
|
991
|
-
required
|
|
992
|
-
});
|
|
993
|
-
throw new Error(error);
|
|
994
|
-
}
|
|
995
|
-
if (debug) {
|
|
996
|
-
envLogger.debug("Required environment variables validated", {
|
|
997
|
-
required,
|
|
998
|
-
allPresent: true
|
|
999
|
-
});
|
|
1000
|
-
}
|
|
1001
|
-
}
|
|
1002
|
-
function loadEnvironment(options = {}) {
|
|
1003
|
-
const {
|
|
1004
|
-
basePath = process.cwd(),
|
|
1005
|
-
customPaths = [],
|
|
1006
|
-
debug = false,
|
|
1007
|
-
nodeEnv = process.env.NODE_ENV || "",
|
|
1008
|
-
required = [],
|
|
1009
|
-
useCache = true
|
|
1010
|
-
} = options;
|
|
1011
|
-
if (useCache && environmentLoaded && cachedLoadResult) {
|
|
1012
|
-
if (debug) {
|
|
1013
|
-
envLogger.debug("Returning cached environment", {
|
|
1014
|
-
loaded: cachedLoadResult.loaded.length,
|
|
1015
|
-
variables: Object.keys(cachedLoadResult.parsed).length
|
|
1016
|
-
});
|
|
1017
|
-
}
|
|
1018
|
-
return cachedLoadResult;
|
|
1019
|
-
}
|
|
1020
|
-
if (debug) {
|
|
1021
|
-
envLogger.debug("Loading environment variables", {
|
|
1022
|
-
basePath,
|
|
1023
|
-
nodeEnv,
|
|
1024
|
-
customPaths,
|
|
1025
|
-
required
|
|
1026
|
-
});
|
|
1027
|
-
}
|
|
1028
|
-
const result = {
|
|
1029
|
-
success: true,
|
|
1030
|
-
loaded: [],
|
|
1031
|
-
failed: [],
|
|
1032
|
-
parsed: {},
|
|
1033
|
-
warnings: []
|
|
1034
|
-
};
|
|
1035
|
-
const standardFiles = buildFileList(basePath, nodeEnv);
|
|
1036
|
-
const allFiles = [...standardFiles, ...customPaths];
|
|
1037
|
-
if (debug) {
|
|
1038
|
-
envLogger.debug("Environment files to load", {
|
|
1039
|
-
standardFiles,
|
|
1040
|
-
customPaths,
|
|
1041
|
-
total: allFiles.length
|
|
1042
|
-
});
|
|
1043
|
-
}
|
|
1044
|
-
const reversedFiles = [...allFiles].reverse();
|
|
1045
|
-
for (const filePath of reversedFiles) {
|
|
1046
|
-
const fileResult = loadSingleFile(filePath, debug);
|
|
1047
|
-
if (fileResult.success) {
|
|
1048
|
-
result.loaded.push(filePath);
|
|
1049
|
-
Object.assign(result.parsed, fileResult.parsed);
|
|
1050
|
-
if (fileResult.parsed["NODE_ENV"]) {
|
|
1051
|
-
const fileName = filePath.split("/").pop() || filePath;
|
|
1052
|
-
result.warnings.push(
|
|
1053
|
-
`NODE_ENV found in ${fileName}. It's recommended to set NODE_ENV via CLI (e.g., 'spfn dev', 'spfn build') instead of .env files for consistent environment behavior.`
|
|
1054
|
-
);
|
|
1055
|
-
}
|
|
1056
|
-
} else if (fileResult.error) {
|
|
1057
|
-
result.failed.push({
|
|
1058
|
-
path: filePath,
|
|
1059
|
-
reason: fileResult.error
|
|
1060
|
-
});
|
|
1061
|
-
}
|
|
1062
|
-
}
|
|
1063
|
-
if (debug || result.loaded.length > 0) {
|
|
1064
|
-
envLogger.info("Environment loading complete", {
|
|
1065
|
-
loaded: result.loaded.length,
|
|
1066
|
-
failed: result.failed.length,
|
|
1067
|
-
variables: Object.keys(result.parsed).length,
|
|
1068
|
-
files: result.loaded
|
|
1069
|
-
});
|
|
1070
|
-
}
|
|
1071
|
-
if (required.length > 0) {
|
|
1072
|
-
try {
|
|
1073
|
-
validateRequiredVars(required, debug);
|
|
1074
|
-
} catch (error) {
|
|
1075
|
-
result.success = false;
|
|
1076
|
-
result.errors = [
|
|
1077
|
-
error instanceof Error ? error.message : "Validation failed"
|
|
1078
|
-
];
|
|
1079
|
-
throw error;
|
|
1080
|
-
}
|
|
1081
|
-
}
|
|
1082
|
-
if (result.warnings.length > 0) {
|
|
1083
|
-
for (const warning of result.warnings) {
|
|
1084
|
-
envLogger.warn(warning);
|
|
1085
|
-
}
|
|
1086
|
-
}
|
|
1087
|
-
environmentLoaded = true;
|
|
1088
|
-
cachedLoadResult = result;
|
|
1089
|
-
return result;
|
|
1090
|
-
}
|
|
1091
|
-
var envLogger, environmentLoaded, cachedLoadResult;
|
|
1092
|
-
var init_loader = __esm({
|
|
1093
|
-
"src/env/loader.ts"() {
|
|
1094
|
-
init_logger2();
|
|
1095
|
-
init_config2();
|
|
1096
|
-
envLogger = logger.child("environment");
|
|
1097
|
-
environmentLoaded = false;
|
|
1098
|
-
}
|
|
1099
|
-
});
|
|
1100
|
-
|
|
1101
|
-
// src/env/validator.ts
|
|
1102
|
-
var init_validator = __esm({
|
|
1103
|
-
"src/env/validator.ts"() {
|
|
1104
|
-
}
|
|
1105
|
-
});
|
|
1106
|
-
|
|
1107
|
-
// src/env/index.ts
|
|
1108
|
-
var init_env = __esm({
|
|
1109
|
-
"src/env/index.ts"() {
|
|
1110
|
-
init_loader();
|
|
1111
|
-
init_config2();
|
|
1112
|
-
init_validator();
|
|
1113
|
-
}
|
|
1114
|
-
});
|
|
1115
|
-
|
|
1116
|
-
// src/db/postgres-errors.ts
|
|
1117
|
-
function parseUniqueViolation(message) {
|
|
1118
|
-
const patterns = [
|
|
1119
|
-
// Standard format: Key (field)=(value)
|
|
1120
|
-
/Key \(([^)]+)\)=\(([^)]+)\)/i,
|
|
1121
|
-
// With quotes: Key ("field")=('value')
|
|
1122
|
-
/Key \(["']?([^)"']+)["']?\)=\(["']?([^)"']+)["']?\)/i,
|
|
1123
|
-
// Alternative format
|
|
1124
|
-
/Key `([^`]+)`=`([^`]+)`/i
|
|
1125
|
-
];
|
|
1126
|
-
for (const pattern of patterns) {
|
|
1127
|
-
const match = message.match(pattern);
|
|
1128
|
-
if (match) {
|
|
1129
|
-
const field = match[1].trim().replace(/["'`]/g, "");
|
|
1130
|
-
const value = match[2].trim().replace(/["'`]/g, "");
|
|
1131
|
-
return { field, value };
|
|
1132
|
-
}
|
|
1133
|
-
}
|
|
1134
|
-
return null;
|
|
1135
|
-
}
|
|
1136
|
-
function fromPostgresError(error) {
|
|
1137
|
-
const code = error?.code;
|
|
1138
|
-
const message = error?.message || "Database error occurred";
|
|
1139
|
-
switch (code) {
|
|
1140
|
-
// Class 08 — Connection Exception
|
|
1141
|
-
case "08000":
|
|
1142
|
-
// connection_exception
|
|
1143
|
-
case "08001":
|
|
1144
|
-
// sqlclient_unable_to_establish_sqlconnection
|
|
1145
|
-
case "08003":
|
|
1146
|
-
// connection_does_not_exist
|
|
1147
|
-
case "08004":
|
|
1148
|
-
// sqlserver_rejected_establishment_of_sqlconnection
|
|
1149
|
-
case "08006":
|
|
1150
|
-
// connection_failure
|
|
1151
|
-
case "08007":
|
|
1152
|
-
// transaction_resolution_unknown
|
|
1153
|
-
case "08P01":
|
|
1154
|
-
return new ConnectionError(message, { code });
|
|
1155
|
-
// Class 23 — Integrity Constraint Violation
|
|
1156
|
-
case "23000":
|
|
1157
|
-
// integrity_constraint_violation
|
|
1158
|
-
case "23001":
|
|
1159
|
-
return new ConstraintViolationError(message, { code, constraint: "integrity" });
|
|
1160
|
-
case "23502":
|
|
1161
|
-
return new ConstraintViolationError(message, { code, constraint: "not_null" });
|
|
1162
|
-
case "23503":
|
|
1163
|
-
return new ConstraintViolationError(message, { code, constraint: "foreign_key" });
|
|
1164
|
-
case "23505":
|
|
1165
|
-
const parsed = parseUniqueViolation(message);
|
|
1166
|
-
if (parsed) {
|
|
1167
|
-
return new DuplicateEntryError(parsed.field, parsed.value);
|
|
1168
|
-
}
|
|
1169
|
-
return new DuplicateEntryError("field", "value");
|
|
1170
|
-
case "23514":
|
|
1171
|
-
return new ConstraintViolationError(message, { code, constraint: "check" });
|
|
1172
|
-
// Class 40 — Transaction Rollback
|
|
1173
|
-
case "40000":
|
|
1174
|
-
// transaction_rollback
|
|
1175
|
-
case "40001":
|
|
1176
|
-
// serialization_failure
|
|
1177
|
-
case "40002":
|
|
1178
|
-
// transaction_integrity_constraint_violation
|
|
1179
|
-
case "40003":
|
|
1180
|
-
return new TransactionError(message, 500, { code });
|
|
1181
|
-
case "40P01":
|
|
1182
|
-
return new DeadlockError(message, { code });
|
|
1183
|
-
// Class 42 — Syntax Error or Access Rule Violation
|
|
1184
|
-
case "42000":
|
|
1185
|
-
// syntax_error_or_access_rule_violation
|
|
1186
|
-
case "42601":
|
|
1187
|
-
// syntax_error
|
|
1188
|
-
case "42501":
|
|
1189
|
-
// insufficient_privilege
|
|
1190
|
-
case "42602":
|
|
1191
|
-
// invalid_name
|
|
1192
|
-
case "42622":
|
|
1193
|
-
// name_too_long
|
|
1194
|
-
case "42701":
|
|
1195
|
-
// duplicate_column
|
|
1196
|
-
case "42702":
|
|
1197
|
-
// ambiguous_column
|
|
1198
|
-
case "42703":
|
|
1199
|
-
// undefined_column
|
|
1200
|
-
case "42704":
|
|
1201
|
-
// undefined_object
|
|
1202
|
-
case "42P01":
|
|
1203
|
-
// undefined_table
|
|
1204
|
-
case "42P02":
|
|
1205
|
-
return new QueryError(message, 400, { code });
|
|
1206
|
-
// Class 53 — Insufficient Resources
|
|
1207
|
-
case "53000":
|
|
1208
|
-
// insufficient_resources
|
|
1209
|
-
case "53100":
|
|
1210
|
-
// disk_full
|
|
1211
|
-
case "53200":
|
|
1212
|
-
// out_of_memory
|
|
1213
|
-
case "53300":
|
|
1214
|
-
return new ConnectionError(message, { code });
|
|
1215
|
-
// Class 57 — Operator Intervention
|
|
1216
|
-
case "57000":
|
|
1217
|
-
// operator_intervention
|
|
1218
|
-
case "57014":
|
|
1219
|
-
// query_canceled
|
|
1220
|
-
case "57P01":
|
|
1221
|
-
// admin_shutdown
|
|
1222
|
-
case "57P02":
|
|
1223
|
-
// crash_shutdown
|
|
1224
|
-
case "57P03":
|
|
1225
|
-
return new ConnectionError(message, { code });
|
|
1226
|
-
// Default: Unknown error
|
|
1227
|
-
default:
|
|
1228
|
-
return new QueryError(message, 500, { code });
|
|
1229
|
-
}
|
|
1230
|
-
}
|
|
1231
|
-
var init_postgres_errors = __esm({
|
|
1232
|
-
"src/db/postgres-errors.ts"() {
|
|
1233
|
-
init_errors();
|
|
1234
|
-
}
|
|
1235
|
-
});
|
|
1236
|
-
function delay(ms) {
|
|
1237
|
-
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
1238
|
-
}
|
|
1239
|
-
async function createDatabaseConnection(connectionString, poolConfig, retryConfig) {
|
|
1240
|
-
let lastError;
|
|
1241
|
-
for (let attempt = 0; attempt <= retryConfig.maxRetries; attempt++) {
|
|
1242
|
-
try {
|
|
1243
|
-
const client = postgres(connectionString, {
|
|
1244
|
-
max: poolConfig.max,
|
|
1245
|
-
idle_timeout: poolConfig.idleTimeout
|
|
1246
|
-
});
|
|
1247
|
-
await client`SELECT 1 as test`;
|
|
1248
|
-
if (attempt > 0) {
|
|
1249
|
-
dbLogger.info(`Database connected successfully after ${attempt} retries`);
|
|
1250
|
-
} else {
|
|
1251
|
-
dbLogger.info("Database connected successfully");
|
|
1252
|
-
}
|
|
1253
|
-
return client;
|
|
1254
|
-
} catch (error) {
|
|
1255
|
-
lastError = fromPostgresError(error);
|
|
1256
|
-
if (attempt < retryConfig.maxRetries) {
|
|
1257
|
-
const delayMs = Math.min(
|
|
1258
|
-
retryConfig.initialDelay * Math.pow(retryConfig.factor, attempt),
|
|
1259
|
-
retryConfig.maxDelay
|
|
1260
|
-
);
|
|
1261
|
-
dbLogger.warn(
|
|
1262
|
-
`Connection failed (attempt ${attempt + 1}/${retryConfig.maxRetries + 1}), retrying in ${delayMs}ms...`,
|
|
1263
|
-
lastError,
|
|
1264
|
-
{
|
|
1265
|
-
attempt: attempt + 1,
|
|
1266
|
-
maxRetries: retryConfig.maxRetries + 1,
|
|
1267
|
-
delayMs
|
|
1268
|
-
}
|
|
1269
|
-
);
|
|
1270
|
-
await delay(delayMs);
|
|
1271
|
-
}
|
|
1272
|
-
}
|
|
1273
|
-
}
|
|
1274
|
-
const errorMessage = `Failed to connect to database after ${retryConfig.maxRetries + 1} attempts: ${lastError?.message || "Unknown error"}`;
|
|
1275
|
-
throw new ConnectionError(errorMessage);
|
|
1276
|
-
}
|
|
1277
|
-
async function checkConnection(client) {
|
|
1278
|
-
try {
|
|
1279
|
-
await client`SELECT 1 as health_check`;
|
|
1280
|
-
return true;
|
|
1281
|
-
} catch (error) {
|
|
1282
|
-
dbLogger.error("Database health check failed", error);
|
|
1283
|
-
return false;
|
|
1284
|
-
}
|
|
1285
|
-
}
|
|
1286
|
-
var dbLogger;
|
|
1287
|
-
var init_connection = __esm({
|
|
1288
|
-
"src/db/manager/connection.ts"() {
|
|
1289
|
-
init_logger2();
|
|
1290
|
-
init_errors();
|
|
1291
|
-
init_postgres_errors();
|
|
1292
|
-
dbLogger = logger.child("database");
|
|
1293
|
-
}
|
|
1294
|
-
});
|
|
1295
|
-
|
|
1296
|
-
// src/db/manager/config.ts
|
|
1297
|
-
function parseEnvNumber(key, prodDefault, devDefault) {
|
|
1298
|
-
const isProduction = process.env.NODE_ENV === "production";
|
|
1299
|
-
const envValue = parseInt(process.env[key] || "", 10);
|
|
1300
|
-
return isNaN(envValue) ? isProduction ? prodDefault : devDefault : envValue;
|
|
1301
|
-
}
|
|
1302
|
-
function parseEnvBoolean(key, defaultValue) {
|
|
1303
|
-
const value = process.env[key];
|
|
1304
|
-
if (value === void 0) return defaultValue;
|
|
1305
|
-
return value.toLowerCase() === "true";
|
|
1306
|
-
}
|
|
1307
|
-
function getPoolConfig(options) {
|
|
1308
|
-
return {
|
|
1309
|
-
max: options?.max ?? parseEnvNumber("DB_POOL_MAX", 20, 10),
|
|
1310
|
-
idleTimeout: options?.idleTimeout ?? parseEnvNumber("DB_POOL_IDLE_TIMEOUT", 30, 20)
|
|
1311
|
-
};
|
|
1312
|
-
}
|
|
1313
|
-
function getRetryConfig() {
|
|
1314
|
-
return {
|
|
1315
|
-
maxRetries: parseEnvNumber("DB_RETRY_MAX", 5, 3),
|
|
1316
|
-
initialDelay: parseEnvNumber("DB_RETRY_INITIAL_DELAY", 100, 50),
|
|
1317
|
-
maxDelay: parseEnvNumber("DB_RETRY_MAX_DELAY", 1e4, 5e3),
|
|
1318
|
-
factor: parseEnvNumber("DB_RETRY_FACTOR", 2, 2)
|
|
1319
|
-
};
|
|
1320
|
-
}
|
|
1321
|
-
function buildHealthCheckConfig(options) {
|
|
1322
|
-
return {
|
|
1323
|
-
enabled: options?.enabled ?? parseEnvBoolean("DB_HEALTH_CHECK_ENABLED", true),
|
|
1324
|
-
interval: options?.interval ?? parseEnvNumber("DB_HEALTH_CHECK_INTERVAL", 6e4, 6e4),
|
|
1325
|
-
reconnect: options?.reconnect ?? parseEnvBoolean("DB_HEALTH_CHECK_RECONNECT", true),
|
|
1326
|
-
maxRetries: options?.maxRetries ?? parseEnvNumber("DB_HEALTH_CHECK_MAX_RETRIES", 3, 3),
|
|
1327
|
-
retryInterval: options?.retryInterval ?? parseEnvNumber("DB_HEALTH_CHECK_RETRY_INTERVAL", 5e3, 5e3)
|
|
1328
|
-
};
|
|
1329
|
-
}
|
|
1330
|
-
function buildMonitoringConfig(options) {
|
|
1331
|
-
const isDevelopment = process.env.NODE_ENV !== "production";
|
|
1332
|
-
return {
|
|
1333
|
-
enabled: options?.enabled ?? parseEnvBoolean("DB_MONITORING_ENABLED", isDevelopment),
|
|
1334
|
-
slowThreshold: options?.slowThreshold ?? parseEnvNumber("DB_MONITORING_SLOW_THRESHOLD", 1e3, 1e3),
|
|
1335
|
-
logQueries: options?.logQueries ?? parseEnvBoolean("DB_MONITORING_LOG_QUERIES", false)
|
|
1336
|
-
};
|
|
1337
|
-
}
|
|
1338
|
-
var init_config3 = __esm({
|
|
1339
|
-
"src/db/manager/config.ts"() {
|
|
1340
|
-
}
|
|
1341
|
-
});
|
|
1342
|
-
function hasDatabaseConfig() {
|
|
1343
|
-
return !!(process.env.DATABASE_URL || process.env.DATABASE_WRITE_URL || process.env.DATABASE_READ_URL);
|
|
1344
|
-
}
|
|
1345
|
-
function detectDatabasePattern() {
|
|
1346
|
-
if (process.env.DATABASE_WRITE_URL && process.env.DATABASE_READ_URL) {
|
|
1347
|
-
return {
|
|
1348
|
-
type: "write-read",
|
|
1349
|
-
write: process.env.DATABASE_WRITE_URL,
|
|
1350
|
-
read: process.env.DATABASE_READ_URL
|
|
1351
|
-
};
|
|
1352
|
-
}
|
|
1353
|
-
if (process.env.DATABASE_URL && process.env.DATABASE_REPLICA_URL) {
|
|
1354
|
-
return {
|
|
1355
|
-
type: "legacy",
|
|
1356
|
-
primary: process.env.DATABASE_URL,
|
|
1357
|
-
replica: process.env.DATABASE_REPLICA_URL
|
|
1358
|
-
};
|
|
1359
|
-
}
|
|
1360
|
-
if (process.env.DATABASE_URL) {
|
|
1361
|
-
return {
|
|
1362
|
-
type: "single",
|
|
1363
|
-
url: process.env.DATABASE_URL
|
|
1364
|
-
};
|
|
1365
|
-
}
|
|
1366
|
-
if (process.env.DATABASE_WRITE_URL) {
|
|
1367
|
-
return {
|
|
1368
|
-
type: "single",
|
|
1369
|
-
url: process.env.DATABASE_WRITE_URL
|
|
1370
|
-
};
|
|
1371
|
-
}
|
|
1372
|
-
return { type: "none" };
|
|
1373
|
-
}
|
|
1374
|
-
async function createWriteReadClients(writeUrl, readUrl, poolConfig, retryConfig) {
|
|
1375
|
-
const writeClient = await createDatabaseConnection(writeUrl, poolConfig, retryConfig);
|
|
1376
|
-
const readClient = await createDatabaseConnection(readUrl, poolConfig, retryConfig);
|
|
1377
|
-
return {
|
|
1378
|
-
write: drizzle(writeClient),
|
|
1379
|
-
read: drizzle(readClient),
|
|
1380
|
-
writeClient,
|
|
1381
|
-
readClient
|
|
1382
|
-
};
|
|
1383
|
-
}
|
|
1384
|
-
async function createSingleClient(url, poolConfig, retryConfig) {
|
|
1385
|
-
const client = await createDatabaseConnection(url, poolConfig, retryConfig);
|
|
1386
|
-
const db = drizzle(client);
|
|
1387
|
-
return {
|
|
1388
|
-
write: db,
|
|
1389
|
-
read: db,
|
|
1390
|
-
writeClient: client,
|
|
1391
|
-
readClient: client
|
|
1392
|
-
};
|
|
1393
|
-
}
|
|
1394
|
-
async function createDatabaseFromEnv(options) {
|
|
1395
|
-
if (!hasDatabaseConfig()) {
|
|
1396
|
-
dbLogger2.debug("No DATABASE_URL found, loading environment variables");
|
|
1397
|
-
const result = loadEnvironment({
|
|
1398
|
-
debug: true
|
|
1399
|
-
});
|
|
1400
|
-
dbLogger2.debug("Environment variables loaded", {
|
|
1401
|
-
success: result.success,
|
|
1402
|
-
loaded: result.loaded.length,
|
|
1403
|
-
hasDatabaseUrl: !!process.env.DATABASE_URL,
|
|
1404
|
-
hasWriteUrl: !!process.env.DATABASE_WRITE_URL,
|
|
1405
|
-
hasReadUrl: !!process.env.DATABASE_READ_URL
|
|
1406
|
-
});
|
|
1407
|
-
}
|
|
1408
|
-
if (!hasDatabaseConfig()) {
|
|
1409
|
-
dbLogger2.warn("No database configuration found", {
|
|
1410
|
-
cwd: process.cwd(),
|
|
1411
|
-
nodeEnv: process.env.NODE_ENV,
|
|
1412
|
-
checkedVars: ["DATABASE_URL", "DATABASE_WRITE_URL", "DATABASE_READ_URL"]
|
|
1413
|
-
});
|
|
1414
|
-
return { write: void 0, read: void 0 };
|
|
1415
|
-
}
|
|
1416
|
-
try {
|
|
1417
|
-
const poolConfig = getPoolConfig(options?.pool);
|
|
1418
|
-
const retryConfig = getRetryConfig();
|
|
1419
|
-
const pattern = detectDatabasePattern();
|
|
1420
|
-
switch (pattern.type) {
|
|
1421
|
-
case "write-read":
|
|
1422
|
-
dbLogger2.debug("Using write-read pattern", {
|
|
1423
|
-
write: pattern.write.replace(/:[^:@]+@/, ":***@"),
|
|
1424
|
-
read: pattern.read.replace(/:[^:@]+@/, ":***@")
|
|
1425
|
-
});
|
|
1426
|
-
return await createWriteReadClients(
|
|
1427
|
-
pattern.write,
|
|
1428
|
-
pattern.read,
|
|
1429
|
-
poolConfig,
|
|
1430
|
-
retryConfig
|
|
1431
|
-
);
|
|
1432
|
-
case "legacy":
|
|
1433
|
-
dbLogger2.debug("Using legacy replica pattern", {
|
|
1434
|
-
primary: pattern.primary.replace(/:[^:@]+@/, ":***@"),
|
|
1435
|
-
replica: pattern.replica.replace(/:[^:@]+@/, ":***@")
|
|
1436
|
-
});
|
|
1437
|
-
return await createWriteReadClients(
|
|
1438
|
-
pattern.primary,
|
|
1439
|
-
pattern.replica,
|
|
1440
|
-
poolConfig,
|
|
1441
|
-
retryConfig
|
|
1442
|
-
);
|
|
1443
|
-
case "single":
|
|
1444
|
-
dbLogger2.debug("Using single database pattern", {
|
|
1445
|
-
url: pattern.url.replace(/:[^:@]+@/, ":***@")
|
|
1446
|
-
});
|
|
1447
|
-
return await createSingleClient(pattern.url, poolConfig, retryConfig);
|
|
1448
|
-
case "none":
|
|
1449
|
-
dbLogger2.warn("No database pattern detected");
|
|
1450
|
-
return { write: void 0, read: void 0 };
|
|
1451
|
-
}
|
|
1452
|
-
} catch (error) {
|
|
1453
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1454
|
-
dbLogger2.error("Failed to create database connection", {
|
|
1455
|
-
error: message,
|
|
1456
|
-
stage: "initialization",
|
|
1457
|
-
hasWriteUrl: !!process.env.DATABASE_WRITE_URL,
|
|
1458
|
-
hasReadUrl: !!process.env.DATABASE_READ_URL,
|
|
1459
|
-
hasUrl: !!process.env.DATABASE_URL,
|
|
1460
|
-
hasReplicaUrl: !!process.env.DATABASE_REPLICA_URL
|
|
1461
|
-
});
|
|
1462
|
-
throw new Error(`Database connection failed: ${message}`, { cause: error });
|
|
1463
|
-
}
|
|
1464
|
-
}
|
|
1465
|
-
var dbLogger2;
|
|
1466
|
-
var init_factory2 = __esm({
|
|
1467
|
-
"src/db/manager/factory.ts"() {
|
|
1468
|
-
init_logger2();
|
|
1469
|
-
init_env();
|
|
1470
|
-
init_connection();
|
|
1471
|
-
init_config3();
|
|
1472
|
-
dbLogger2 = logger.child("database");
|
|
1473
|
-
}
|
|
1474
|
-
});
|
|
1475
|
-
|
|
1476
|
-
// src/db/manager/global-state.ts
|
|
1477
|
-
var getWriteInstance, setWriteInstance, getReadInstance, setReadInstance, getWriteClient, setWriteClient, getReadClient, setReadClient, getHealthCheckInterval, setHealthCheckInterval, setMonitoringConfig;
|
|
1478
|
-
var init_global_state = __esm({
|
|
1479
|
-
"src/db/manager/global-state.ts"() {
|
|
1480
|
-
getWriteInstance = () => globalThis.__SPFN_DB_WRITE__;
|
|
1481
|
-
setWriteInstance = (instance) => {
|
|
1482
|
-
globalThis.__SPFN_DB_WRITE__ = instance;
|
|
1483
|
-
};
|
|
1484
|
-
getReadInstance = () => globalThis.__SPFN_DB_READ__;
|
|
1485
|
-
setReadInstance = (instance) => {
|
|
1486
|
-
globalThis.__SPFN_DB_READ__ = instance;
|
|
1487
|
-
};
|
|
1488
|
-
getWriteClient = () => globalThis.__SPFN_DB_WRITE_CLIENT__;
|
|
1489
|
-
setWriteClient = (client) => {
|
|
1490
|
-
globalThis.__SPFN_DB_WRITE_CLIENT__ = client;
|
|
1491
|
-
};
|
|
1492
|
-
getReadClient = () => globalThis.__SPFN_DB_READ_CLIENT__;
|
|
1493
|
-
setReadClient = (client) => {
|
|
1494
|
-
globalThis.__SPFN_DB_READ_CLIENT__ = client;
|
|
1495
|
-
};
|
|
1496
|
-
getHealthCheckInterval = () => globalThis.__SPFN_DB_HEALTH_CHECK__;
|
|
1497
|
-
setHealthCheckInterval = (interval) => {
|
|
1498
|
-
globalThis.__SPFN_DB_HEALTH_CHECK__ = interval;
|
|
1499
|
-
};
|
|
1500
|
-
setMonitoringConfig = (config) => {
|
|
1501
|
-
globalThis.__SPFN_DB_MONITORING__ = config;
|
|
1502
|
-
};
|
|
1503
|
-
}
|
|
1504
|
-
});
|
|
1505
|
-
|
|
1506
|
-
// src/db/manager/health-check.ts
|
|
1507
|
-
function startHealthCheck(config, options, getDatabase2, closeDatabase2) {
|
|
1508
|
-
const healthCheck = getHealthCheckInterval();
|
|
1509
|
-
if (healthCheck) {
|
|
1510
|
-
dbLogger3.debug("Health check already running");
|
|
1511
|
-
return;
|
|
1512
|
-
}
|
|
1513
|
-
dbLogger3.info("Starting database health check", {
|
|
1514
|
-
interval: `${config.interval}ms`,
|
|
1515
|
-
reconnect: config.reconnect
|
|
1516
|
-
});
|
|
1517
|
-
const interval = setInterval(async () => {
|
|
1518
|
-
try {
|
|
1519
|
-
const write = getDatabase2("write");
|
|
1520
|
-
const read = getDatabase2("read");
|
|
1521
|
-
if (write) {
|
|
1522
|
-
await write.execute("SELECT 1");
|
|
1523
|
-
}
|
|
1524
|
-
if (read && read !== write) {
|
|
1525
|
-
await read.execute("SELECT 1");
|
|
1526
|
-
}
|
|
1527
|
-
} catch (error) {
|
|
1528
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1529
|
-
dbLogger3.error("Database health check failed", { error: message });
|
|
1530
|
-
if (config.reconnect) {
|
|
1531
|
-
await attemptReconnection(config, options, closeDatabase2);
|
|
1532
|
-
}
|
|
1533
|
-
}
|
|
1534
|
-
}, config.interval);
|
|
1535
|
-
setHealthCheckInterval(interval);
|
|
1536
|
-
}
|
|
1537
|
-
async function attemptReconnection(config, options, closeDatabase2) {
|
|
1538
|
-
dbLogger3.warn("Attempting database reconnection", {
|
|
1539
|
-
maxRetries: config.maxRetries,
|
|
1540
|
-
retryInterval: `${config.retryInterval}ms`
|
|
1541
|
-
});
|
|
1542
|
-
for (let attempt = 1; attempt <= config.maxRetries; attempt++) {
|
|
1543
|
-
try {
|
|
1544
|
-
dbLogger3.debug(`Reconnection attempt ${attempt}/${config.maxRetries}`);
|
|
1545
|
-
await closeDatabase2();
|
|
1546
|
-
await new Promise((resolve) => setTimeout(resolve, config.retryInterval));
|
|
1547
|
-
const result = await createDatabaseFromEnv(options);
|
|
1548
|
-
if (result.write) {
|
|
1549
|
-
await result.write.execute("SELECT 1");
|
|
1550
|
-
setWriteInstance(result.write);
|
|
1551
|
-
setReadInstance(result.read);
|
|
1552
|
-
setWriteClient(result.writeClient);
|
|
1553
|
-
setReadClient(result.readClient);
|
|
1554
|
-
dbLogger3.info("Database reconnection successful", { attempt });
|
|
1555
|
-
return;
|
|
1556
|
-
}
|
|
1557
|
-
} catch (error) {
|
|
1558
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1559
|
-
dbLogger3.error(`Reconnection attempt ${attempt} failed`, {
|
|
1560
|
-
error: message,
|
|
1561
|
-
attempt,
|
|
1562
|
-
maxRetries: config.maxRetries
|
|
1563
|
-
});
|
|
1564
|
-
if (attempt === config.maxRetries) {
|
|
1565
|
-
dbLogger3.error("Max reconnection attempts reached, giving up");
|
|
1566
|
-
}
|
|
1567
|
-
}
|
|
1568
|
-
}
|
|
1569
|
-
}
|
|
1570
|
-
function stopHealthCheck() {
|
|
1571
|
-
const healthCheck = getHealthCheckInterval();
|
|
1572
|
-
if (healthCheck) {
|
|
1573
|
-
clearInterval(healthCheck);
|
|
1574
|
-
setHealthCheckInterval(void 0);
|
|
1575
|
-
dbLogger3.info("Database health check stopped");
|
|
1576
|
-
}
|
|
1577
|
-
}
|
|
1578
|
-
var dbLogger3;
|
|
1579
|
-
var init_health_check = __esm({
|
|
1580
|
-
"src/db/manager/health-check.ts"() {
|
|
1581
|
-
init_logger2();
|
|
1582
|
-
init_factory2();
|
|
1583
|
-
init_global_state();
|
|
1584
|
-
dbLogger3 = logger.child("database");
|
|
1585
|
-
}
|
|
1586
|
-
});
|
|
1587
|
-
|
|
1588
|
-
// src/db/manager/manager.ts
|
|
1589
|
-
function getCallerInfo() {
|
|
1590
|
-
try {
|
|
1591
|
-
const stack = new Error().stack;
|
|
1592
|
-
if (!stack) return void 0;
|
|
1593
|
-
const lines = stack.split("\n");
|
|
1594
|
-
for (let i = 3; i < lines.length; i++) {
|
|
1595
|
-
const line = lines[i];
|
|
1596
|
-
if (!line.includes("node_modules") && !line.includes("/db/manager/")) {
|
|
1597
|
-
const match = line.match(/\((.+):(\d+):(\d+)\)/) || line.match(/at (.+):(\d+):(\d+)/);
|
|
1598
|
-
if (match) {
|
|
1599
|
-
const fullPath = match[1];
|
|
1600
|
-
const parts = fullPath.split("/");
|
|
1601
|
-
const srcIndex = parts.lastIndexOf("src");
|
|
1602
|
-
if (srcIndex !== -1) {
|
|
1603
|
-
const relativePath = parts.slice(srcIndex).join("/");
|
|
1604
|
-
return `${relativePath}:${match[2]}`;
|
|
1605
|
-
}
|
|
1606
|
-
return `${fullPath}:${match[2]}`;
|
|
1607
|
-
}
|
|
1608
|
-
break;
|
|
1609
|
-
}
|
|
1610
|
-
}
|
|
1611
|
-
} catch {
|
|
1612
|
-
}
|
|
1613
|
-
return void 0;
|
|
1614
|
-
}
|
|
1615
|
-
function getDatabase(type) {
|
|
1616
|
-
const writeInst = getWriteInstance();
|
|
1617
|
-
const readInst = getReadInstance();
|
|
1618
|
-
if (process.env.DB_DEBUG_TRACE === "true") {
|
|
1619
|
-
const caller = getCallerInfo();
|
|
1620
|
-
dbLogger4.debug("getDatabase() called", {
|
|
1621
|
-
type: type || "write",
|
|
1622
|
-
hasWrite: !!writeInst,
|
|
1623
|
-
hasRead: !!readInst,
|
|
1624
|
-
caller
|
|
1625
|
-
});
|
|
1626
|
-
}
|
|
1627
|
-
if (type === "read") {
|
|
1628
|
-
return readInst ?? writeInst;
|
|
1629
|
-
}
|
|
1630
|
-
return writeInst;
|
|
1631
|
-
}
|
|
1632
|
-
function setDatabase(write, read) {
|
|
1633
|
-
setWriteInstance(write);
|
|
1634
|
-
setReadInstance(read ?? write);
|
|
1635
|
-
}
|
|
1636
|
-
async function initDatabase(options) {
|
|
1637
|
-
const writeInst = getWriteInstance();
|
|
1638
|
-
if (writeInst) {
|
|
1639
|
-
dbLogger4.debug("Database already initialized");
|
|
1640
|
-
return { write: writeInst, read: getReadInstance() };
|
|
1641
|
-
}
|
|
1642
|
-
const result = await createDatabaseFromEnv(options);
|
|
1643
|
-
if (result.write) {
|
|
1644
|
-
try {
|
|
1645
|
-
await result.write.execute("SELECT 1");
|
|
1646
|
-
if (result.read && result.read !== result.write) {
|
|
1647
|
-
await result.read.execute("SELECT 1");
|
|
1648
|
-
}
|
|
1649
|
-
setWriteInstance(result.write);
|
|
1650
|
-
setReadInstance(result.read);
|
|
1651
|
-
setWriteClient(result.writeClient);
|
|
1652
|
-
setReadClient(result.readClient);
|
|
1653
|
-
const hasReplica = result.read && result.read !== result.write;
|
|
1654
|
-
dbLogger4.info(
|
|
1655
|
-
hasReplica ? "Database connected (Primary + Replica)" : "Database connected"
|
|
1656
|
-
);
|
|
1657
|
-
const healthCheckConfig = buildHealthCheckConfig(options?.healthCheck);
|
|
1658
|
-
if (healthCheckConfig.enabled) {
|
|
1659
|
-
startHealthCheck(healthCheckConfig, options, getDatabase, closeDatabase);
|
|
1660
|
-
}
|
|
1661
|
-
const monConfig = buildMonitoringConfig(options?.monitoring);
|
|
1662
|
-
setMonitoringConfig(monConfig);
|
|
1663
|
-
if (monConfig.enabled) {
|
|
1664
|
-
dbLogger4.info("Database query monitoring enabled", {
|
|
1665
|
-
slowThreshold: `${monConfig.slowThreshold}ms`,
|
|
1666
|
-
logQueries: monConfig.logQueries
|
|
1667
|
-
});
|
|
1668
|
-
}
|
|
1669
|
-
} catch (error) {
|
|
1670
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1671
|
-
dbLogger4.error("Database connection failed", { error: message });
|
|
1672
|
-
await closeDatabase();
|
|
1673
|
-
throw new Error(`Database connection test failed: ${message}`, { cause: error });
|
|
1674
|
-
}
|
|
1675
|
-
} else {
|
|
1676
|
-
dbLogger4.warn("No database configuration found");
|
|
1677
|
-
dbLogger4.warn("Set DATABASE_URL environment variable to enable database");
|
|
1678
|
-
}
|
|
1679
|
-
return { write: getWriteInstance(), read: getReadInstance() };
|
|
1680
|
-
}
|
|
1681
|
-
async function closeDatabase() {
|
|
1682
|
-
const writeInst = getWriteInstance();
|
|
1683
|
-
const readInst = getReadInstance();
|
|
1684
|
-
if (!writeInst && !readInst) {
|
|
1685
|
-
dbLogger4.debug("No database connections to close");
|
|
1686
|
-
return;
|
|
1687
|
-
}
|
|
1688
|
-
stopHealthCheck();
|
|
1689
|
-
try {
|
|
1690
|
-
const closePromises = [];
|
|
1691
|
-
const writeC = getWriteClient();
|
|
1692
|
-
if (writeC) {
|
|
1693
|
-
dbLogger4.debug("Closing write connection...");
|
|
1694
|
-
closePromises.push(
|
|
1695
|
-
writeC.end({ timeout: 5 }).then(() => dbLogger4.debug("Write connection closed")).catch((err) => dbLogger4.error("Error closing write connection", err))
|
|
1696
|
-
);
|
|
1697
|
-
}
|
|
1698
|
-
const readC = getReadClient();
|
|
1699
|
-
if (readC && readC !== writeC) {
|
|
1700
|
-
dbLogger4.debug("Closing read connection...");
|
|
1701
|
-
closePromises.push(
|
|
1702
|
-
readC.end({ timeout: 5 }).then(() => dbLogger4.debug("Read connection closed")).catch((err) => dbLogger4.error("Error closing read connection", err))
|
|
1703
|
-
);
|
|
1704
|
-
}
|
|
1705
|
-
await Promise.all(closePromises);
|
|
1706
|
-
dbLogger4.info("All database connections closed");
|
|
1707
|
-
} catch (error) {
|
|
1708
|
-
dbLogger4.error("Error during database cleanup", error);
|
|
1709
|
-
throw error;
|
|
1710
|
-
} finally {
|
|
1711
|
-
setWriteInstance(void 0);
|
|
1712
|
-
setReadInstance(void 0);
|
|
1713
|
-
setWriteClient(void 0);
|
|
1714
|
-
setReadClient(void 0);
|
|
1715
|
-
setMonitoringConfig(void 0);
|
|
1716
|
-
}
|
|
1717
|
-
}
|
|
1718
|
-
function getDatabaseInfo() {
|
|
1719
|
-
const writeInst = getWriteInstance();
|
|
1720
|
-
const readInst = getReadInstance();
|
|
1721
|
-
return {
|
|
1722
|
-
hasWrite: !!writeInst,
|
|
1723
|
-
hasRead: !!readInst,
|
|
1724
|
-
isReplica: !!(readInst && readInst !== writeInst)
|
|
1725
|
-
};
|
|
1726
|
-
}
|
|
1727
|
-
var dbLogger4;
|
|
1728
|
-
var init_manager = __esm({
|
|
1729
|
-
"src/db/manager/manager.ts"() {
|
|
1730
|
-
init_logger2();
|
|
1731
|
-
init_factory2();
|
|
1732
|
-
init_config3();
|
|
1733
|
-
init_global_state();
|
|
1734
|
-
init_health_check();
|
|
1735
|
-
dbLogger4 = logger.child("database");
|
|
1736
|
-
}
|
|
1737
|
-
});
|
|
1738
|
-
|
|
1739
|
-
// src/db/manager/index.ts
|
|
1740
|
-
var init_manager2 = __esm({
|
|
1741
|
-
"src/db/manager/index.ts"() {
|
|
1742
|
-
init_factory2();
|
|
1743
|
-
init_manager();
|
|
1744
|
-
init_connection();
|
|
1745
|
-
}
|
|
1746
|
-
});
|
|
1747
|
-
function expandGlobPattern(pattern) {
|
|
1748
|
-
if (!pattern.includes("*")) {
|
|
1749
|
-
return existsSync(pattern) ? [pattern] : [];
|
|
1750
|
-
}
|
|
1751
|
-
const files = [];
|
|
1752
|
-
if (pattern.includes("**")) {
|
|
1753
|
-
const [baseDir, ...rest] = pattern.split("**");
|
|
1754
|
-
const extension = rest.join("").replace(/[\/\\]\*\./g, "").trim();
|
|
1755
|
-
const scanRecursive = (dir) => {
|
|
1756
|
-
if (!existsSync(dir)) return;
|
|
1757
|
-
try {
|
|
1758
|
-
const entries = readdirSync(dir);
|
|
1759
|
-
for (const entry of entries) {
|
|
1760
|
-
const fullPath = join(dir, entry);
|
|
1761
|
-
try {
|
|
1762
|
-
const stat2 = statSync(fullPath);
|
|
1763
|
-
if (stat2.isDirectory()) {
|
|
1764
|
-
scanRecursive(fullPath);
|
|
1765
|
-
} else if (stat2.isFile()) {
|
|
1766
|
-
if (!extension || fullPath.endsWith(extension)) {
|
|
1767
|
-
files.push(fullPath);
|
|
1768
|
-
}
|
|
1769
|
-
}
|
|
1770
|
-
} catch {
|
|
1771
|
-
}
|
|
1772
|
-
}
|
|
1773
|
-
} catch {
|
|
1774
|
-
}
|
|
1775
|
-
};
|
|
1776
|
-
scanRecursive(baseDir.trim() || ".");
|
|
1777
|
-
} else if (pattern.includes("*")) {
|
|
1778
|
-
const dir = dirname(pattern);
|
|
1779
|
-
const filePattern = basename(pattern);
|
|
1780
|
-
if (!existsSync(dir)) return [];
|
|
1781
|
-
try {
|
|
1782
|
-
const entries = readdirSync(dir);
|
|
1783
|
-
for (const entry of entries) {
|
|
1784
|
-
const fullPath = join(dir, entry);
|
|
1785
|
-
try {
|
|
1786
|
-
const stat2 = statSync(fullPath);
|
|
1787
|
-
if (stat2.isFile()) {
|
|
1788
|
-
if (filePattern === "*" || filePattern.startsWith("*.") && entry.endsWith(filePattern.slice(1))) {
|
|
1789
|
-
files.push(fullPath);
|
|
1790
|
-
}
|
|
1791
|
-
}
|
|
1792
|
-
} catch {
|
|
1793
|
-
}
|
|
1794
|
-
}
|
|
1795
|
-
} catch {
|
|
1796
|
-
}
|
|
1797
|
-
}
|
|
1798
|
-
return files;
|
|
1799
|
-
}
|
|
1800
|
-
function discoverPackageSchemas(cwd) {
|
|
1801
|
-
const schemas = [];
|
|
1802
|
-
const nodeModulesPath = join(cwd, "node_modules");
|
|
1803
|
-
if (!existsSync(nodeModulesPath)) {
|
|
1804
|
-
return schemas;
|
|
1805
|
-
}
|
|
1806
|
-
const projectPkgPath = join(cwd, "package.json");
|
|
1807
|
-
let directDeps = /* @__PURE__ */ new Set();
|
|
1808
|
-
if (existsSync(projectPkgPath)) {
|
|
1809
|
-
try {
|
|
1810
|
-
const projectPkg = JSON.parse(readFileSync(projectPkgPath, "utf-8"));
|
|
1811
|
-
directDeps = /* @__PURE__ */ new Set([
|
|
1812
|
-
...Object.keys(projectPkg.dependencies || {}),
|
|
1813
|
-
...Object.keys(projectPkg.devDependencies || {})
|
|
1814
|
-
]);
|
|
1815
|
-
} catch (error) {
|
|
1816
|
-
}
|
|
1817
|
-
}
|
|
1818
|
-
const checkPackage = (_pkgName, pkgPath) => {
|
|
1819
|
-
const pkgJsonPath = join(pkgPath, "package.json");
|
|
1820
|
-
if (!existsSync(pkgJsonPath)) return;
|
|
1821
|
-
try {
|
|
1822
|
-
const pkgJson = JSON.parse(readFileSync(pkgJsonPath, "utf-8"));
|
|
1823
|
-
if (pkgJson.spfn?.schemas) {
|
|
1824
|
-
const packageSchemas = Array.isArray(pkgJson.spfn.schemas) ? pkgJson.spfn.schemas : [pkgJson.spfn.schemas];
|
|
1825
|
-
for (const schema of packageSchemas) {
|
|
1826
|
-
const absolutePath = join(pkgPath, schema);
|
|
1827
|
-
const expandedFiles = expandGlobPattern(absolutePath);
|
|
1828
|
-
const schemaFiles = expandedFiles.filter(
|
|
1829
|
-
(file) => !file.endsWith("/index.js") && !file.endsWith("/index.ts") && !file.endsWith("/index.mjs") && !file.endsWith("\\index.js") && !file.endsWith("\\index.ts") && !file.endsWith("\\index.mjs")
|
|
1830
|
-
);
|
|
1831
|
-
schemas.push(...schemaFiles);
|
|
1832
|
-
}
|
|
1833
|
-
}
|
|
1834
|
-
} catch (error) {
|
|
1835
|
-
}
|
|
1836
|
-
};
|
|
1837
|
-
const spfnDir = join(nodeModulesPath, "@spfn");
|
|
1838
|
-
if (existsSync(spfnDir)) {
|
|
1839
|
-
try {
|
|
1840
|
-
const spfnPackages = readdirSync(spfnDir);
|
|
1841
|
-
for (const pkg of spfnPackages) {
|
|
1842
|
-
checkPackage(`@spfn/${pkg}`, join(spfnDir, pkg));
|
|
1843
|
-
}
|
|
1844
|
-
} catch (error) {
|
|
1845
|
-
}
|
|
1846
|
-
}
|
|
1847
|
-
for (const depName of directDeps) {
|
|
1848
|
-
if (depName.startsWith("@spfn/")) continue;
|
|
1849
|
-
const pkgPath = depName.startsWith("@") ? join(nodeModulesPath, ...depName.split("/")) : join(nodeModulesPath, depName);
|
|
1850
|
-
checkPackage(depName, pkgPath);
|
|
1851
|
-
}
|
|
1852
|
-
return schemas;
|
|
1853
|
-
}
|
|
1854
|
-
function detectDialect(url) {
|
|
1855
|
-
if (url.startsWith("postgres://") || url.startsWith("postgresql://")) {
|
|
1856
|
-
return "postgresql";
|
|
1857
|
-
}
|
|
1858
|
-
if (url.startsWith("mysql://")) {
|
|
1859
|
-
return "mysql";
|
|
1860
|
-
}
|
|
1861
|
-
if (url.startsWith("sqlite://") || url.includes(".db") || url.includes(".sqlite")) {
|
|
1862
|
-
return "sqlite";
|
|
1863
|
-
}
|
|
1864
|
-
throw new Error(
|
|
1865
|
-
`Unsupported database URL format: ${url}. Supported: postgresql://, mysql://, sqlite://`
|
|
1866
|
-
);
|
|
1867
|
-
}
|
|
1868
|
-
function getDrizzleConfig(options = {}) {
|
|
1869
|
-
const databaseUrl = options.databaseUrl ?? process.env.DATABASE_URL;
|
|
1870
|
-
if (!databaseUrl) {
|
|
1871
|
-
throw new Error(
|
|
1872
|
-
"DATABASE_URL is required. Set it in .env or pass it to getDrizzleConfig()"
|
|
1873
|
-
);
|
|
1874
|
-
}
|
|
1875
|
-
const dialect = options.dialect ?? detectDialect(databaseUrl);
|
|
1876
|
-
const out = options.out ?? "./src/server/drizzle";
|
|
1877
|
-
if (options.packageFilter) {
|
|
1878
|
-
const packageSchemas2 = options.disablePackageDiscovery ? [] : discoverPackageSchemas(options.cwd ?? process.cwd());
|
|
1879
|
-
const filteredSchemas = packageSchemas2.filter(
|
|
1880
|
-
(schemaPath) => schemaPath.includes(`node_modules/${options.packageFilter}/`)
|
|
1881
|
-
);
|
|
1882
|
-
if (filteredSchemas.length === 0) {
|
|
1883
|
-
throw new Error(
|
|
1884
|
-
`No schemas found for package ${options.packageFilter}. Make sure the package is installed and has "spfn.schemas" in package.json.`
|
|
1885
|
-
);
|
|
1886
|
-
}
|
|
1887
|
-
const schema2 = filteredSchemas.length === 1 ? filteredSchemas[0] : filteredSchemas;
|
|
1888
|
-
return {
|
|
1889
|
-
schema: schema2,
|
|
1890
|
-
out,
|
|
1891
|
-
dialect,
|
|
1892
|
-
dbCredentials: getDbCredentials(dialect, databaseUrl)
|
|
1893
|
-
};
|
|
1894
|
-
}
|
|
1895
|
-
const userSchema = options.schema ?? "./src/server/entities/**/*.ts";
|
|
1896
|
-
const userSchemas = Array.isArray(userSchema) ? userSchema : [userSchema];
|
|
1897
|
-
const packageSchemas = options.disablePackageDiscovery ? [] : discoverPackageSchemas(options.cwd ?? process.cwd());
|
|
1898
|
-
const allSchemas = [...userSchemas, ...packageSchemas];
|
|
1899
|
-
const schema = allSchemas.length === 1 ? allSchemas[0] : allSchemas;
|
|
1900
|
-
return {
|
|
1901
|
-
schema,
|
|
1902
|
-
out,
|
|
1903
|
-
dialect,
|
|
1904
|
-
dbCredentials: getDbCredentials(dialect, databaseUrl)
|
|
1905
|
-
};
|
|
1906
|
-
}
|
|
1907
|
-
function getDbCredentials(dialect, url) {
|
|
1908
|
-
switch (dialect) {
|
|
1909
|
-
case "postgresql":
|
|
1910
|
-
case "mysql":
|
|
1911
|
-
return { url };
|
|
1912
|
-
case "sqlite":
|
|
1913
|
-
const dbPath = url.replace("sqlite://", "").replace("sqlite:", "");
|
|
1914
|
-
return { url: dbPath };
|
|
1915
|
-
default:
|
|
1916
|
-
throw new Error(`Unsupported dialect: ${dialect}`);
|
|
1917
|
-
}
|
|
1918
|
-
}
|
|
1919
|
-
function generateDrizzleConfigFile(options = {}) {
|
|
1920
|
-
const config = getDrizzleConfig(options);
|
|
1921
|
-
const schemaValue = Array.isArray(config.schema) ? `[
|
|
1922
|
-
${config.schema.map((s) => `'${s}'`).join(",\n ")}
|
|
1923
|
-
]` : `'${config.schema}'`;
|
|
1924
|
-
return `import { defineConfig } from 'drizzle-kit';
|
|
1925
|
-
|
|
1926
|
-
export default defineConfig({
|
|
1927
|
-
schema: ${schemaValue},
|
|
1928
|
-
out: '${config.out}',
|
|
1929
|
-
dialect: '${config.dialect}',
|
|
1930
|
-
dbCredentials: ${JSON.stringify(config.dbCredentials, null, 4)},
|
|
1931
|
-
});
|
|
1932
|
-
`;
|
|
1933
|
-
}
|
|
1934
|
-
var init_config_generator = __esm({
|
|
1935
|
-
"src/db/manager/config-generator.ts"() {
|
|
1936
|
-
}
|
|
1937
|
-
});
|
|
1938
|
-
function id() {
|
|
1939
|
-
return bigserial("id", { mode: "number" }).primaryKey();
|
|
1940
|
-
}
|
|
1941
|
-
function timestamps(options) {
|
|
1942
|
-
const updatedAtColumn = timestamp("updated_at", { withTimezone: true, mode: "date" }).defaultNow().notNull();
|
|
1943
|
-
if (options?.autoUpdate) {
|
|
1944
|
-
updatedAtColumn.__autoUpdate = true;
|
|
1945
|
-
}
|
|
1946
|
-
return {
|
|
1947
|
-
createdAt: timestamp("created_at", { withTimezone: true, mode: "date" }).defaultNow().notNull(),
|
|
1948
|
-
updatedAt: updatedAtColumn
|
|
1949
|
-
};
|
|
1950
|
-
}
|
|
1951
|
-
function foreignKey(name, reference, options) {
|
|
1952
|
-
return bigserial(`${name}_id`, { mode: "number" }).notNull().references(reference, { onDelete: options?.onDelete ?? "cascade" });
|
|
1953
|
-
}
|
|
1954
|
-
function optionalForeignKey(name, reference, options) {
|
|
1955
|
-
return bigserial(`${name}_id`, { mode: "number" }).references(reference, { onDelete: options?.onDelete ?? "set null" });
|
|
1956
|
-
}
|
|
1957
|
-
var init_helpers = __esm({
|
|
1958
|
-
"src/db/schema/helpers.ts"() {
|
|
1959
|
-
}
|
|
1960
|
-
});
|
|
1961
|
-
|
|
1962
|
-
// src/db/schema/index.ts
|
|
1963
|
-
var init_schema = __esm({
|
|
1964
|
-
"src/db/schema/index.ts"() {
|
|
1965
|
-
init_helpers();
|
|
1966
|
-
}
|
|
1967
|
-
});
|
|
1968
|
-
function createFunctionSchema(packageName) {
|
|
1969
|
-
const schemaName = packageNameToSchema(packageName);
|
|
1970
|
-
return pgSchema(schemaName);
|
|
1971
|
-
}
|
|
1972
|
-
function packageNameToSchema(packageName) {
|
|
1973
|
-
return packageName.replace("@", "").replace("/", "_").replace(/-/g, "_");
|
|
1974
|
-
}
|
|
1975
|
-
function getSchemaInfo(packageName) {
|
|
1976
|
-
const isScoped = packageName.startsWith("@");
|
|
1977
|
-
const scope = isScoped ? packageName.split("/")[0].substring(1) : null;
|
|
1978
|
-
const schemaName = packageNameToSchema(packageName);
|
|
1979
|
-
return {
|
|
1980
|
-
schemaName,
|
|
1981
|
-
isScoped,
|
|
1982
|
-
scope
|
|
1983
|
-
};
|
|
1984
|
-
}
|
|
1985
|
-
var init_schema_helper = __esm({
|
|
1986
|
-
"src/db/schema-helper.ts"() {
|
|
1987
|
-
}
|
|
1988
|
-
});
|
|
1989
|
-
function getTransactionContext() {
|
|
1990
|
-
return asyncContext.getStore() ?? null;
|
|
1991
|
-
}
|
|
1992
|
-
function getTransaction() {
|
|
1993
|
-
const context = getTransactionContext();
|
|
1994
|
-
return context?.tx ?? null;
|
|
1995
|
-
}
|
|
1996
|
-
function runWithTransaction(tx, txId, callback) {
|
|
1997
|
-
const existingContext = getTransactionContext();
|
|
1998
|
-
const newLevel = existingContext ? existingContext.level + 1 : 1;
|
|
1999
|
-
if (existingContext) {
|
|
2000
|
-
txLogger.info("Nested transaction started (SAVEPOINT)", {
|
|
2001
|
-
outerTxId: existingContext.txId,
|
|
2002
|
-
innerTxId: txId,
|
|
2003
|
-
level: newLevel
|
|
2004
|
-
});
|
|
2005
|
-
} else {
|
|
2006
|
-
txLogger.debug("Root transaction context set", { txId, level: newLevel });
|
|
2007
|
-
}
|
|
2008
|
-
return asyncContext.run({ tx, txId, level: newLevel }, callback);
|
|
2009
|
-
}
|
|
2010
|
-
var txLogger, asyncContext;
|
|
2011
|
-
var init_context = __esm({
|
|
2012
|
-
"src/db/transaction/context.ts"() {
|
|
2013
|
-
init_logger2();
|
|
2014
|
-
txLogger = logger.child("transaction");
|
|
2015
|
-
asyncContext = new AsyncLocalStorage();
|
|
2016
|
-
}
|
|
2017
|
-
});
|
|
2018
|
-
function Transactional(options = {}) {
|
|
2019
|
-
const defaultTimeout = parseInt(process.env.TRANSACTION_TIMEOUT || "30000", 10);
|
|
2020
|
-
const {
|
|
2021
|
-
slowThreshold = 1e3,
|
|
2022
|
-
enableLogging = true,
|
|
2023
|
-
timeout = defaultTimeout
|
|
2024
|
-
} = options;
|
|
2025
|
-
const txLogger2 = logger.child("transaction");
|
|
2026
|
-
return createMiddleware(async (c, next) => {
|
|
2027
|
-
const txId = `tx_${randomUUID()}`;
|
|
2028
|
-
const startTime = Date.now();
|
|
2029
|
-
const route = `${c.req.method} ${c.req.path}`;
|
|
2030
|
-
if (enableLogging) {
|
|
2031
|
-
txLogger2.debug("Transaction started", { txId, route });
|
|
2032
|
-
}
|
|
2033
|
-
try {
|
|
2034
|
-
const writeDb = getDatabase("write");
|
|
2035
|
-
if (!writeDb) {
|
|
2036
|
-
throw new TransactionError(
|
|
2037
|
-
"Database not initialized. Cannot start transaction.",
|
|
2038
|
-
500,
|
|
2039
|
-
{ txId, route }
|
|
2040
|
-
);
|
|
2041
|
-
}
|
|
2042
|
-
const transactionPromise = writeDb.transaction(async (tx) => {
|
|
2043
|
-
await runWithTransaction(tx, txId, async () => {
|
|
2044
|
-
await next();
|
|
2045
|
-
const contextWithError = c;
|
|
2046
|
-
if (contextWithError.error) {
|
|
2047
|
-
throw contextWithError.error;
|
|
2048
|
-
}
|
|
2049
|
-
});
|
|
2050
|
-
});
|
|
2051
|
-
if (timeout > 0) {
|
|
2052
|
-
const timeoutPromise = new Promise((_, reject) => {
|
|
2053
|
-
setTimeout(() => {
|
|
2054
|
-
reject(
|
|
2055
|
-
new TransactionError(
|
|
2056
|
-
`Transaction timeout after ${timeout}ms`,
|
|
2057
|
-
500,
|
|
2058
|
-
{
|
|
2059
|
-
txId,
|
|
2060
|
-
route,
|
|
2061
|
-
timeout: `${timeout}ms`
|
|
2062
|
-
}
|
|
2063
|
-
)
|
|
2064
|
-
);
|
|
2065
|
-
}, timeout);
|
|
2066
|
-
});
|
|
2067
|
-
await Promise.race([transactionPromise, timeoutPromise]);
|
|
2068
|
-
} else {
|
|
2069
|
-
await transactionPromise;
|
|
2070
|
-
}
|
|
2071
|
-
const duration = Date.now() - startTime;
|
|
2072
|
-
if (enableLogging) {
|
|
2073
|
-
if (duration >= slowThreshold) {
|
|
2074
|
-
txLogger2.warn("Slow transaction committed", {
|
|
2075
|
-
txId,
|
|
2076
|
-
route,
|
|
2077
|
-
duration: `${duration}ms`,
|
|
2078
|
-
threshold: `${slowThreshold}ms`
|
|
2079
|
-
});
|
|
2080
|
-
} else {
|
|
2081
|
-
txLogger2.debug("Transaction committed", {
|
|
2082
|
-
txId,
|
|
2083
|
-
route,
|
|
2084
|
-
duration: `${duration}ms`
|
|
2085
|
-
});
|
|
2086
|
-
}
|
|
2087
|
-
}
|
|
2088
|
-
} catch (error) {
|
|
2089
|
-
const duration = Date.now() - startTime;
|
|
2090
|
-
const customError = error instanceof TransactionError ? error : fromPostgresError(error);
|
|
2091
|
-
if (enableLogging) {
|
|
2092
|
-
txLogger2.error("Transaction rolled back", {
|
|
2093
|
-
txId,
|
|
2094
|
-
route,
|
|
2095
|
-
duration: `${duration}ms`,
|
|
2096
|
-
error: customError.message,
|
|
2097
|
-
errorType: customError.name
|
|
2098
|
-
});
|
|
2099
|
-
}
|
|
2100
|
-
throw customError;
|
|
2101
|
-
}
|
|
2102
|
-
});
|
|
2103
|
-
}
|
|
2104
|
-
var init_middleware = __esm({
|
|
2105
|
-
"src/db/transaction/middleware.ts"() {
|
|
2106
|
-
init_logger2();
|
|
2107
|
-
init_manager2();
|
|
2108
|
-
init_context();
|
|
2109
|
-
init_errors();
|
|
2110
|
-
init_postgres_errors();
|
|
2111
|
-
}
|
|
2112
|
-
});
|
|
2113
|
-
|
|
2114
|
-
// src/db/transaction/index.ts
|
|
2115
|
-
var init_transaction = __esm({
|
|
2116
|
-
"src/db/transaction/index.ts"() {
|
|
2117
|
-
init_context();
|
|
2118
|
-
init_middleware();
|
|
2119
|
-
}
|
|
2120
|
-
});
|
|
2121
|
-
function isSQLWrapper(value) {
|
|
2122
|
-
return value && typeof value === "object" && "queryChunks" in value;
|
|
2123
|
-
}
|
|
2124
|
-
function buildWhereFromObject(table, where) {
|
|
2125
|
-
const entries = Object.entries(where).filter(([_, value]) => value !== void 0);
|
|
2126
|
-
if (entries.length === 0) return void 0;
|
|
2127
|
-
const conditions = entries.map(
|
|
2128
|
-
([key, value]) => eq(table[key], value)
|
|
2129
|
-
);
|
|
2130
|
-
return conditions.length === 1 ? conditions[0] : and(...conditions);
|
|
2131
|
-
}
|
|
2132
|
-
async function findOne(table, where) {
|
|
2133
|
-
const db = getDatabase("read");
|
|
2134
|
-
if (!db) {
|
|
2135
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2136
|
-
}
|
|
2137
|
-
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2138
|
-
if (!whereClause) {
|
|
2139
|
-
throw new Error("findOne requires at least one where condition");
|
|
2140
|
-
}
|
|
2141
|
-
const results = await db.select().from(table).where(whereClause).limit(1);
|
|
2142
|
-
return results[0] ?? null;
|
|
2143
|
-
}
|
|
2144
|
-
async function findMany(table, options) {
|
|
2145
|
-
const db = getDatabase("read");
|
|
2146
|
-
if (!db) {
|
|
2147
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2148
|
-
}
|
|
2149
|
-
let query = db.select().from(table);
|
|
2150
|
-
if (options?.where) {
|
|
2151
|
-
const whereClause = isSQLWrapper(options.where) ? options.where : options.where ? buildWhereFromObject(table, options.where) : void 0;
|
|
2152
|
-
if (whereClause) {
|
|
2153
|
-
query = query.where(whereClause);
|
|
2154
|
-
}
|
|
2155
|
-
}
|
|
2156
|
-
if (options?.orderBy) {
|
|
2157
|
-
const orderByArray = Array.isArray(options.orderBy) ? options.orderBy : [options.orderBy];
|
|
2158
|
-
query = query.orderBy(...orderByArray);
|
|
2159
|
-
}
|
|
2160
|
-
if (options?.limit) {
|
|
2161
|
-
query = query.limit(options.limit);
|
|
2162
|
-
}
|
|
2163
|
-
if (options?.offset) {
|
|
2164
|
-
query = query.offset(options.offset);
|
|
2165
|
-
}
|
|
2166
|
-
return query;
|
|
2167
|
-
}
|
|
2168
|
-
async function create(table, data) {
|
|
2169
|
-
const db = getDatabase("write");
|
|
2170
|
-
if (!db) {
|
|
2171
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2172
|
-
}
|
|
2173
|
-
const [result] = await db.insert(table).values(data).returning();
|
|
2174
|
-
return result;
|
|
2175
|
-
}
|
|
2176
|
-
async function createMany(table, data) {
|
|
2177
|
-
const db = getDatabase("write");
|
|
2178
|
-
if (!db) {
|
|
2179
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2180
|
-
}
|
|
2181
|
-
const results = await db.insert(table).values(data).returning();
|
|
2182
|
-
return results;
|
|
2183
|
-
}
|
|
2184
|
-
async function upsert(table, data, options) {
|
|
2185
|
-
const db = getDatabase("write");
|
|
2186
|
-
if (!db) {
|
|
2187
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2188
|
-
}
|
|
2189
|
-
const [result] = await db.insert(table).values(data).onConflictDoUpdate({
|
|
2190
|
-
target: options.target,
|
|
2191
|
-
set: options.set || data
|
|
2192
|
-
}).returning();
|
|
2193
|
-
return result;
|
|
2194
|
-
}
|
|
2195
|
-
async function updateOne(table, where, data) {
|
|
2196
|
-
const db = getDatabase("write");
|
|
2197
|
-
if (!db) {
|
|
2198
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2199
|
-
}
|
|
2200
|
-
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2201
|
-
if (!whereClause) {
|
|
2202
|
-
throw new Error("updateOne requires at least one where condition");
|
|
2203
|
-
}
|
|
2204
|
-
const [result] = await db.update(table).set(data).where(whereClause).returning();
|
|
2205
|
-
return result ?? null;
|
|
2206
|
-
}
|
|
2207
|
-
async function updateMany(table, where, data) {
|
|
2208
|
-
const db = getDatabase("write");
|
|
2209
|
-
if (!db) {
|
|
2210
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2211
|
-
}
|
|
2212
|
-
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2213
|
-
if (!whereClause) {
|
|
2214
|
-
throw new Error("updateMany requires at least one where condition");
|
|
2215
|
-
}
|
|
2216
|
-
const results = await db.update(table).set(data).where(whereClause).returning();
|
|
2217
|
-
return results;
|
|
2218
|
-
}
|
|
2219
|
-
async function deleteOne(table, where) {
|
|
2220
|
-
const db = getDatabase("write");
|
|
2221
|
-
if (!db) {
|
|
2222
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2223
|
-
}
|
|
2224
|
-
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2225
|
-
if (!whereClause) {
|
|
2226
|
-
throw new Error("deleteOne requires at least one where condition");
|
|
2227
|
-
}
|
|
2228
|
-
const [result] = await db.delete(table).where(whereClause).returning();
|
|
2229
|
-
return result ?? null;
|
|
2230
|
-
}
|
|
2231
|
-
async function deleteMany(table, where) {
|
|
2232
|
-
const db = getDatabase("write");
|
|
2233
|
-
if (!db) {
|
|
2234
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2235
|
-
}
|
|
2236
|
-
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2237
|
-
if (!whereClause) {
|
|
2238
|
-
throw new Error("deleteMany requires at least one where condition");
|
|
2239
|
-
}
|
|
2240
|
-
const results = await db.delete(table).where(whereClause).returning();
|
|
2241
|
-
return results;
|
|
2242
|
-
}
|
|
2243
|
-
async function count(table, where) {
|
|
2244
|
-
const db = getDatabase("read");
|
|
2245
|
-
if (!db) {
|
|
2246
|
-
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2247
|
-
}
|
|
2248
|
-
let query = db.select().from(table);
|
|
2249
|
-
if (where) {
|
|
2250
|
-
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2251
|
-
if (whereClause) {
|
|
2252
|
-
query = query.where(whereClause);
|
|
2253
|
-
}
|
|
2254
|
-
}
|
|
2255
|
-
const results = await query;
|
|
2256
|
-
return results.length;
|
|
2257
|
-
}
|
|
2258
|
-
var init_helpers2 = __esm({
|
|
2259
|
-
"src/db/helpers.ts"() {
|
|
2260
|
-
init_manager2();
|
|
2261
|
-
}
|
|
2262
|
-
});
|
|
2263
|
-
|
|
2264
|
-
// src/db/index.ts
|
|
2265
|
-
var db_exports = {};
|
|
2266
|
-
__export(db_exports, {
|
|
2267
|
-
Transactional: () => Transactional,
|
|
2268
|
-
checkConnection: () => checkConnection,
|
|
2269
|
-
closeDatabase: () => closeDatabase,
|
|
2270
|
-
count: () => count,
|
|
2271
|
-
create: () => create,
|
|
2272
|
-
createDatabaseConnection: () => createDatabaseConnection,
|
|
2273
|
-
createDatabaseFromEnv: () => createDatabaseFromEnv,
|
|
2274
|
-
createFunctionSchema: () => createFunctionSchema,
|
|
2275
|
-
createMany: () => createMany,
|
|
2276
|
-
deleteMany: () => deleteMany,
|
|
2277
|
-
deleteOne: () => deleteOne,
|
|
2278
|
-
detectDialect: () => detectDialect,
|
|
2279
|
-
findMany: () => findMany,
|
|
2280
|
-
findOne: () => findOne,
|
|
2281
|
-
foreignKey: () => foreignKey,
|
|
2282
|
-
fromPostgresError: () => fromPostgresError,
|
|
2283
|
-
generateDrizzleConfigFile: () => generateDrizzleConfigFile,
|
|
2284
|
-
getDatabase: () => getDatabase,
|
|
2285
|
-
getDatabaseInfo: () => getDatabaseInfo,
|
|
2286
|
-
getDrizzleConfig: () => getDrizzleConfig,
|
|
2287
|
-
getSchemaInfo: () => getSchemaInfo,
|
|
2288
|
-
getTransaction: () => getTransaction,
|
|
2289
|
-
id: () => id,
|
|
2290
|
-
initDatabase: () => initDatabase,
|
|
2291
|
-
optionalForeignKey: () => optionalForeignKey,
|
|
2292
|
-
packageNameToSchema: () => packageNameToSchema,
|
|
2293
|
-
runWithTransaction: () => runWithTransaction,
|
|
2294
|
-
setDatabase: () => setDatabase,
|
|
2295
|
-
timestamps: () => timestamps,
|
|
2296
|
-
updateMany: () => updateMany,
|
|
2297
|
-
updateOne: () => updateOne,
|
|
2298
|
-
upsert: () => upsert
|
|
2299
|
-
});
|
|
2300
|
-
var init_db = __esm({
|
|
2301
|
-
"src/db/index.ts"() {
|
|
2302
|
-
init_manager2();
|
|
2303
|
-
init_config_generator();
|
|
2304
|
-
init_schema();
|
|
2305
|
-
init_schema_helper();
|
|
2306
|
-
init_transaction();
|
|
2307
|
-
init_postgres_errors();
|
|
2308
|
-
init_helpers2();
|
|
2309
|
-
}
|
|
2310
|
-
});
|
|
2311
|
-
|
|
2312
|
-
// src/cache/cache-factory.ts
|
|
2313
|
-
function hasCacheConfig() {
|
|
2314
|
-
return !!// Modern (Valkey/Cache)
|
|
2315
|
-
(process.env.VALKEY_URL || process.env.CACHE_URL || process.env.VALKEY_WRITE_URL || process.env.VALKEY_READ_URL || process.env.CACHE_WRITE_URL || process.env.CACHE_READ_URL || process.env.VALKEY_SENTINEL_HOSTS || process.env.VALKEY_CLUSTER_NODES || // Legacy (Redis - backward compatibility)
|
|
2316
|
-
process.env.REDIS_URL || process.env.REDIS_WRITE_URL || process.env.REDIS_READ_URL || process.env.REDIS_SENTINEL_HOSTS || process.env.REDIS_CLUSTER_NODES);
|
|
2317
|
-
}
|
|
2318
|
-
function getEnv(valkeyKey, cacheKey, redisKey) {
|
|
2319
|
-
return process.env[valkeyKey] || process.env[cacheKey] || process.env[redisKey];
|
|
2320
|
-
}
|
|
2321
|
-
function createClient(RedisClient, url) {
|
|
2322
|
-
const options = {};
|
|
2323
|
-
if (url.startsWith("rediss://") || url.startsWith("valkeys://")) {
|
|
2324
|
-
const rejectUnauthorized = getEnv(
|
|
2325
|
-
"VALKEY_TLS_REJECT_UNAUTHORIZED",
|
|
2326
|
-
"CACHE_TLS_REJECT_UNAUTHORIZED",
|
|
2327
|
-
"REDIS_TLS_REJECT_UNAUTHORIZED"
|
|
2328
|
-
);
|
|
2329
|
-
options.tls = {
|
|
2330
|
-
rejectUnauthorized: rejectUnauthorized !== "false"
|
|
2331
|
-
};
|
|
2332
|
-
}
|
|
2333
|
-
return new RedisClient(url, options);
|
|
2334
|
-
}
|
|
2335
|
-
async function createCacheFromEnv() {
|
|
2336
|
-
if (!hasCacheConfig()) {
|
|
2337
|
-
cacheLogger.info("No cache configuration found - running without cache");
|
|
2338
|
-
return { write: void 0, read: void 0 };
|
|
2339
|
-
}
|
|
2340
|
-
try {
|
|
2341
|
-
const ioredis = await import('ioredis');
|
|
2342
|
-
const RedisClient = ioredis.default;
|
|
2343
|
-
const singleUrl = getEnv("VALKEY_URL", "CACHE_URL", "REDIS_URL");
|
|
2344
|
-
const writeUrl = getEnv("VALKEY_WRITE_URL", "CACHE_WRITE_URL", "REDIS_WRITE_URL");
|
|
2345
|
-
const readUrl = getEnv("VALKEY_READ_URL", "CACHE_READ_URL", "REDIS_READ_URL");
|
|
2346
|
-
const clusterNodes = getEnv("VALKEY_CLUSTER_NODES", "CACHE_CLUSTER_NODES", "REDIS_CLUSTER_NODES");
|
|
2347
|
-
const sentinelHosts = getEnv("VALKEY_SENTINEL_HOSTS", "CACHE_SENTINEL_HOSTS", "REDIS_SENTINEL_HOSTS");
|
|
2348
|
-
const masterName = getEnv("VALKEY_MASTER_NAME", "CACHE_MASTER_NAME", "REDIS_MASTER_NAME");
|
|
2349
|
-
const password = getEnv("VALKEY_PASSWORD", "CACHE_PASSWORD", "REDIS_PASSWORD");
|
|
2350
|
-
if (singleUrl && !writeUrl && !readUrl && !clusterNodes) {
|
|
2351
|
-
const client = createClient(RedisClient, singleUrl);
|
|
2352
|
-
cacheLogger.debug("Created single cache instance", { url: singleUrl.replace(/:[^:@]+@/, ":***@") });
|
|
2353
|
-
return { write: client, read: client };
|
|
2354
|
-
}
|
|
2355
|
-
if (writeUrl && readUrl) {
|
|
2356
|
-
const write = createClient(RedisClient, writeUrl);
|
|
2357
|
-
const read = createClient(RedisClient, readUrl);
|
|
2358
|
-
cacheLogger.debug("Created master-replica cache instances");
|
|
2359
|
-
return { write, read };
|
|
2360
|
-
}
|
|
2361
|
-
if (sentinelHosts && masterName) {
|
|
2362
|
-
const sentinels = sentinelHosts.split(",").map((host) => {
|
|
2363
|
-
const [hostname, port] = host.trim().split(":");
|
|
2364
|
-
return { host: hostname, port: Number(port) || 26379 };
|
|
2365
|
-
});
|
|
2366
|
-
const options = {
|
|
2367
|
-
sentinels,
|
|
2368
|
-
name: masterName,
|
|
2369
|
-
password
|
|
2370
|
-
};
|
|
2371
|
-
const client = new RedisClient(options);
|
|
2372
|
-
cacheLogger.debug("Created sentinel cache instance", { masterName, sentinels: sentinels.length });
|
|
2373
|
-
return { write: client, read: client };
|
|
2374
|
-
}
|
|
2375
|
-
if (clusterNodes) {
|
|
2376
|
-
const nodes = clusterNodes.split(",").map((node) => {
|
|
2377
|
-
const [host, port] = node.trim().split(":");
|
|
2378
|
-
return { host, port: Number(port) || 6379 };
|
|
2379
|
-
});
|
|
2380
|
-
const clusterOptions = {
|
|
2381
|
-
redisOptions: {
|
|
2382
|
-
password
|
|
2383
|
-
}
|
|
2384
|
-
};
|
|
2385
|
-
const cluster = new RedisClient.Cluster(nodes, clusterOptions);
|
|
2386
|
-
cacheLogger.debug("Created cluster cache instance", { nodes: nodes.length });
|
|
2387
|
-
return { write: cluster, read: cluster };
|
|
2388
|
-
}
|
|
2389
|
-
if (singleUrl) {
|
|
2390
|
-
const client = createClient(RedisClient, singleUrl);
|
|
2391
|
-
cacheLogger.debug("Created cache instance (fallback)", { url: singleUrl.replace(/:[^:@]+@/, ":***@") });
|
|
2392
|
-
return { write: client, read: client };
|
|
2393
|
-
}
|
|
2394
|
-
cacheLogger.info("No valid cache configuration found - running without cache");
|
|
2395
|
-
return { write: void 0, read: void 0 };
|
|
2396
|
-
} catch (error) {
|
|
2397
|
-
if (error instanceof Error) {
|
|
2398
|
-
if (error.message.includes("Cannot find module")) {
|
|
2399
|
-
cacheLogger.warn(
|
|
2400
|
-
"Cache client library not installed",
|
|
2401
|
-
error,
|
|
2402
|
-
{
|
|
2403
|
-
suggestion: "Install ioredis to enable cache: pnpm install ioredis",
|
|
2404
|
-
mode: "disabled"
|
|
2405
|
-
}
|
|
2406
|
-
);
|
|
2407
|
-
} else {
|
|
2408
|
-
cacheLogger.warn(
|
|
2409
|
-
"Failed to create cache client",
|
|
2410
|
-
error,
|
|
2411
|
-
{ mode: "disabled" }
|
|
2412
|
-
);
|
|
2413
|
-
}
|
|
2414
|
-
} else {
|
|
2415
|
-
cacheLogger.warn(
|
|
2416
|
-
"Failed to create cache client",
|
|
2417
|
-
{ error: String(error), mode: "disabled" }
|
|
2418
|
-
);
|
|
2419
|
-
}
|
|
2420
|
-
return { write: void 0, read: void 0 };
|
|
2421
|
-
}
|
|
2422
|
-
}
|
|
2423
|
-
async function createSingleCacheFromEnv() {
|
|
2424
|
-
const { write } = await createCacheFromEnv();
|
|
2425
|
-
return write;
|
|
2426
|
-
}
|
|
2427
|
-
var cacheLogger;
|
|
2428
|
-
var init_cache_factory = __esm({
|
|
2429
|
-
"src/cache/cache-factory.ts"() {
|
|
2430
|
-
init_logger2();
|
|
2431
|
-
cacheLogger = logger.child("cache");
|
|
2432
|
-
}
|
|
2433
|
-
});
|
|
2434
|
-
|
|
2435
|
-
// src/cache/cache-manager.ts
|
|
2436
|
-
function getCache() {
|
|
2437
|
-
return writeInstance;
|
|
2438
|
-
}
|
|
2439
|
-
function getCacheRead() {
|
|
2440
|
-
return readInstance ?? writeInstance;
|
|
2441
|
-
}
|
|
2442
|
-
function isCacheDisabled() {
|
|
2443
|
-
return isDisabled;
|
|
2444
|
-
}
|
|
2445
|
-
function setCache(write, read) {
|
|
2446
|
-
writeInstance = write;
|
|
2447
|
-
readInstance = read ?? write;
|
|
2448
|
-
isDisabled = !write;
|
|
2449
|
-
}
|
|
2450
|
-
async function initCache() {
|
|
2451
|
-
if (writeInstance) {
|
|
2452
|
-
return { write: writeInstance, read: readInstance, disabled: isDisabled };
|
|
2453
|
-
}
|
|
2454
|
-
const { write, read } = await createCacheFromEnv();
|
|
2455
|
-
if (write) {
|
|
2456
|
-
try {
|
|
2457
|
-
await write.ping();
|
|
2458
|
-
if (read && read !== write) {
|
|
2459
|
-
await read.ping();
|
|
2460
|
-
}
|
|
2461
|
-
writeInstance = write;
|
|
2462
|
-
readInstance = read;
|
|
2463
|
-
isDisabled = false;
|
|
2464
|
-
const hasReplica = read && read !== write;
|
|
2465
|
-
cacheLogger2.info(
|
|
2466
|
-
hasReplica ? "Cache connected (Master-Replica)" : "Cache connected",
|
|
2467
|
-
{ mode: "enabled" }
|
|
2468
|
-
);
|
|
2469
|
-
return { write: writeInstance, read: readInstance, disabled: false };
|
|
2470
|
-
} catch (error) {
|
|
2471
|
-
cacheLogger2.error(
|
|
2472
|
-
"Cache connection failed - running in disabled mode",
|
|
2473
|
-
error instanceof Error ? error : new Error(String(error)),
|
|
2474
|
-
{ mode: "disabled" }
|
|
2475
|
-
);
|
|
2476
|
-
try {
|
|
2477
|
-
await write.quit();
|
|
2478
|
-
if (read && read !== write) {
|
|
2479
|
-
await read.quit();
|
|
2480
|
-
}
|
|
2481
|
-
} catch {
|
|
2482
|
-
}
|
|
2483
|
-
isDisabled = true;
|
|
2484
|
-
return { write: void 0, read: void 0, disabled: true };
|
|
2485
|
-
}
|
|
2486
|
-
}
|
|
2487
|
-
isDisabled = true;
|
|
2488
|
-
cacheLogger2.info("Cache disabled - no configuration or library not installed", { mode: "disabled" });
|
|
2489
|
-
return { write: void 0, read: void 0, disabled: true };
|
|
2490
|
-
}
|
|
2491
|
-
async function closeCache() {
|
|
2492
|
-
if (isDisabled) {
|
|
2493
|
-
cacheLogger2.debug("Cache already disabled, nothing to close");
|
|
2494
|
-
return;
|
|
2495
|
-
}
|
|
2496
|
-
const closePromises = [];
|
|
2497
|
-
if (writeInstance) {
|
|
2498
|
-
closePromises.push(
|
|
2499
|
-
writeInstance.quit().catch((err) => {
|
|
2500
|
-
cacheLogger2.error("Error closing cache write instance", err);
|
|
2501
|
-
})
|
|
2502
|
-
);
|
|
2503
|
-
}
|
|
2504
|
-
if (readInstance && readInstance !== writeInstance) {
|
|
2505
|
-
closePromises.push(
|
|
2506
|
-
readInstance.quit().catch((err) => {
|
|
2507
|
-
cacheLogger2.error("Error closing cache read instance", err);
|
|
2508
|
-
})
|
|
2509
|
-
);
|
|
2510
|
-
}
|
|
2511
|
-
await Promise.all(closePromises);
|
|
2512
|
-
writeInstance = void 0;
|
|
2513
|
-
readInstance = void 0;
|
|
2514
|
-
isDisabled = true;
|
|
2515
|
-
cacheLogger2.info("Cache connections closed", { mode: "disabled" });
|
|
2516
|
-
}
|
|
2517
|
-
function getCacheInfo() {
|
|
2518
|
-
return {
|
|
2519
|
-
hasWrite: !!writeInstance,
|
|
2520
|
-
hasRead: !!readInstance,
|
|
2521
|
-
isReplica: !!(readInstance && readInstance !== writeInstance),
|
|
2522
|
-
disabled: isDisabled
|
|
2523
|
-
};
|
|
2524
|
-
}
|
|
2525
|
-
var cacheLogger2, writeInstance, readInstance, isDisabled, getRedis, getRedisRead, setRedis, initRedis, closeRedis, getRedisInfo;
|
|
2526
|
-
var init_cache_manager = __esm({
|
|
2527
|
-
"src/cache/cache-manager.ts"() {
|
|
2528
|
-
init_cache_factory();
|
|
2529
|
-
init_logger2();
|
|
2530
|
-
cacheLogger2 = logger.child("cache");
|
|
2531
|
-
isDisabled = false;
|
|
2532
|
-
getRedis = getCache;
|
|
2533
|
-
getRedisRead = getCacheRead;
|
|
2534
|
-
setRedis = setCache;
|
|
2535
|
-
initRedis = initCache;
|
|
2536
|
-
closeRedis = closeCache;
|
|
2537
|
-
getRedisInfo = getCacheInfo;
|
|
2538
|
-
}
|
|
2539
|
-
});
|
|
2540
|
-
|
|
2541
|
-
// src/cache/index.ts
|
|
2542
|
-
var cache_exports = {};
|
|
2543
|
-
__export(cache_exports, {
|
|
2544
|
-
closeCache: () => closeCache,
|
|
2545
|
-
closeRedis: () => closeRedis,
|
|
2546
|
-
createCacheFromEnv: () => createCacheFromEnv,
|
|
2547
|
-
createRedisFromEnv: () => createCacheFromEnv,
|
|
2548
|
-
createSingleCacheFromEnv: () => createSingleCacheFromEnv,
|
|
2549
|
-
createSingleRedisFromEnv: () => createSingleCacheFromEnv,
|
|
2550
|
-
getCache: () => getCache,
|
|
2551
|
-
getCacheInfo: () => getCacheInfo,
|
|
2552
|
-
getCacheRead: () => getCacheRead,
|
|
2553
|
-
getRedis: () => getRedis,
|
|
2554
|
-
getRedisInfo: () => getRedisInfo,
|
|
2555
|
-
getRedisRead: () => getRedisRead,
|
|
2556
|
-
initCache: () => initCache,
|
|
2557
|
-
initRedis: () => initRedis,
|
|
2558
|
-
isCacheDisabled: () => isCacheDisabled,
|
|
2559
|
-
setCache: () => setCache,
|
|
2560
|
-
setRedis: () => setRedis
|
|
2561
|
-
});
|
|
2562
|
-
var init_cache = __esm({
|
|
2563
|
-
"src/cache/index.ts"() {
|
|
2564
|
-
init_cache_factory();
|
|
2565
|
-
init_cache_manager();
|
|
2566
|
-
init_cache_manager();
|
|
2567
|
-
init_cache_factory();
|
|
2568
|
-
}
|
|
2569
|
-
});
|
|
2570
|
-
|
|
2571
|
-
// src/route/auto-loader.ts
|
|
2572
|
-
init_logger2();
|
|
2573
|
-
var routeLogger2 = logger.child("route");
|
|
2574
|
-
var AutoRouteLoader = class {
|
|
2575
|
-
constructor(routesDir, debug = false, middlewares = []) {
|
|
2576
|
-
this.routesDir = routesDir;
|
|
2577
|
-
this.debug = debug;
|
|
2578
|
-
this.middlewares = middlewares;
|
|
2579
|
-
}
|
|
2580
|
-
routes = [];
|
|
2581
|
-
debug;
|
|
2582
|
-
middlewares;
|
|
2583
|
-
async load(app) {
|
|
2584
|
-
const startTime = Date.now();
|
|
2585
|
-
const files = await this.scanFiles(this.routesDir);
|
|
2586
|
-
if (files.length === 0) {
|
|
2587
|
-
routeLogger2.warn("No route files found");
|
|
2588
|
-
return this.getStats();
|
|
2589
|
-
}
|
|
2590
|
-
let failureCount = 0;
|
|
2591
|
-
for (const file of files) {
|
|
2592
|
-
const success = await this.loadRoute(app, file);
|
|
2593
|
-
if (success) ; else {
|
|
2594
|
-
failureCount++;
|
|
2595
|
-
}
|
|
2596
|
-
}
|
|
2597
|
-
const elapsed = Date.now() - startTime;
|
|
2598
|
-
const stats = this.getStats();
|
|
2599
|
-
if (this.debug) {
|
|
2600
|
-
this.logStats(stats, elapsed);
|
|
2601
|
-
}
|
|
2602
|
-
if (failureCount > 0) {
|
|
2603
|
-
routeLogger2.warn("Some routes failed to load", { failureCount });
|
|
2604
|
-
}
|
|
2605
|
-
return stats;
|
|
2606
|
-
}
|
|
2607
|
-
/**
|
|
2608
|
-
* Load routes from an external directory (e.g., from SPFN function packages)
|
|
2609
|
-
* Reads package.json spfn.prefix and mounts routes under that prefix
|
|
2610
|
-
*
|
|
2611
|
-
* @param app - Hono app instance
|
|
2612
|
-
* @param routesDir - Directory containing route handlers
|
|
2613
|
-
* @param packageName - Name of the package (for logging)
|
|
2614
|
-
* @param prefix - Optional prefix to mount routes under (from package.json spfn.prefix)
|
|
2615
|
-
* @returns Route statistics
|
|
2616
|
-
*/
|
|
2617
|
-
async loadExternalRoutes(app, routesDir, packageName, prefix) {
|
|
2618
|
-
const startTime = Date.now();
|
|
2619
|
-
const tempRoutesDir = this.routesDir;
|
|
2620
|
-
this.routesDir = routesDir;
|
|
2621
|
-
const files = await this.scanFiles(routesDir);
|
|
2622
|
-
if (files.length === 0) {
|
|
2623
|
-
routeLogger2.warn("No route files found", { dir: routesDir, package: packageName });
|
|
2624
|
-
this.routesDir = tempRoutesDir;
|
|
2625
|
-
return this.getStats();
|
|
2626
|
-
}
|
|
2627
|
-
let successCount = 0;
|
|
2628
|
-
let failureCount = 0;
|
|
2629
|
-
for (const file of files) {
|
|
2630
|
-
const success = await this.loadRoute(app, file, prefix);
|
|
2631
|
-
if (success) {
|
|
2632
|
-
successCount++;
|
|
2633
|
-
} else {
|
|
2634
|
-
failureCount++;
|
|
2635
|
-
}
|
|
2636
|
-
}
|
|
2637
|
-
const elapsed = Date.now() - startTime;
|
|
2638
|
-
if (this.debug) {
|
|
2639
|
-
routeLogger2.info("External routes loaded", {
|
|
2640
|
-
package: packageName,
|
|
2641
|
-
prefix: prefix || "/",
|
|
2642
|
-
total: successCount,
|
|
2643
|
-
failed: failureCount,
|
|
2644
|
-
elapsed: `${elapsed}ms`
|
|
2645
|
-
});
|
|
2646
|
-
}
|
|
2647
|
-
this.routesDir = tempRoutesDir;
|
|
2648
|
-
return this.getStats();
|
|
2649
|
-
}
|
|
2650
|
-
getStats() {
|
|
2651
|
-
const stats = {
|
|
2652
|
-
total: this.routes.length,
|
|
2653
|
-
byPriority: { static: 0, dynamic: 0, catchAll: 0 },
|
|
2654
|
-
byTag: {},
|
|
2655
|
-
routes: this.routes
|
|
2656
|
-
};
|
|
2657
|
-
for (const route of this.routes) {
|
|
2658
|
-
if (route.priority === 1) stats.byPriority.static++;
|
|
2659
|
-
else if (route.priority === 2) stats.byPriority.dynamic++;
|
|
2660
|
-
else if (route.priority === 3) stats.byPriority.catchAll++;
|
|
2661
|
-
if (route.meta?.tags) {
|
|
2662
|
-
for (const tag of route.meta.tags) {
|
|
2663
|
-
stats.byTag[tag] = (stats.byTag[tag] || 0) + 1;
|
|
2664
|
-
}
|
|
2665
|
-
}
|
|
2666
|
-
}
|
|
2667
|
-
return stats;
|
|
2668
|
-
}
|
|
2669
|
-
async scanFiles(dir, files = []) {
|
|
2670
|
-
const entries = await readdir(dir);
|
|
2671
|
-
for (const entry of entries) {
|
|
2672
|
-
const fullPath = join(dir, entry);
|
|
2673
|
-
const fileStat = await stat(fullPath);
|
|
2674
|
-
if (fileStat.isDirectory()) {
|
|
2675
|
-
await this.scanFiles(fullPath, files);
|
|
2676
|
-
} else if (this.isValidRouteFile(entry)) {
|
|
2677
|
-
files.push(fullPath);
|
|
2678
|
-
}
|
|
2679
|
-
}
|
|
2680
|
-
return files;
|
|
2681
|
-
}
|
|
2682
|
-
isValidRouteFile(fileName) {
|
|
2683
|
-
return fileName === "index.ts" || fileName === "index.js" || fileName === "index.mjs";
|
|
2684
|
-
}
|
|
2685
|
-
async loadRoute(app, absolutePath, prefix) {
|
|
2686
|
-
const relativePath = relative(this.routesDir, absolutePath);
|
|
2687
|
-
try {
|
|
2688
|
-
const module = await import(absolutePath);
|
|
2689
|
-
if (!this.validateModule(module, relativePath)) {
|
|
2690
|
-
return false;
|
|
2691
|
-
}
|
|
2692
|
-
const hasContractMetas = module.default._contractMetas && module.default._contractMetas.size > 0;
|
|
2693
|
-
if (!hasContractMetas) {
|
|
2694
|
-
routeLogger2.error("Route must use contract-based routing", {
|
|
2695
|
-
file: relativePath,
|
|
2696
|
-
hint: "Export contracts using satisfies RouteContract and use app.bind()"
|
|
2697
|
-
});
|
|
2698
|
-
return false;
|
|
2699
|
-
}
|
|
2700
|
-
const contractPaths = this.extractContractPaths(module);
|
|
2701
|
-
if (prefix) {
|
|
2702
|
-
const invalidPaths = contractPaths.filter((path) => !path.startsWith(prefix));
|
|
2703
|
-
if (invalidPaths.length > 0) {
|
|
2704
|
-
routeLogger2.error("Contract paths must include the package prefix", {
|
|
2705
|
-
file: relativePath,
|
|
2706
|
-
prefix,
|
|
2707
|
-
invalidPaths,
|
|
2708
|
-
hint: `Contract paths should start with "${prefix}". Example: path: "${prefix}/labels"`
|
|
2709
|
-
});
|
|
2710
|
-
return false;
|
|
2711
|
-
}
|
|
2712
|
-
}
|
|
2713
|
-
this.registerContractBasedMiddlewares(app, contractPaths, module);
|
|
2714
|
-
app.route("/", module.default);
|
|
2715
|
-
contractPaths.forEach((path) => {
|
|
2716
|
-
this.routes.push({
|
|
2717
|
-
path,
|
|
2718
|
-
// Use contract path as-is (already includes prefix)
|
|
2719
|
-
file: relativePath,
|
|
2720
|
-
meta: module.meta,
|
|
2721
|
-
priority: this.calculateContractPriority(path)
|
|
2722
|
-
});
|
|
2723
|
-
if (this.debug) {
|
|
2724
|
-
const icon = path.includes("*") ? "\u2B50" : path.includes(":") ? "\u{1F538}" : "\u{1F539}";
|
|
2725
|
-
routeLogger2.debug(`Registered route: ${path}`, { icon, file: relativePath });
|
|
2726
|
-
}
|
|
2727
|
-
});
|
|
2728
|
-
return true;
|
|
2729
|
-
} catch (error) {
|
|
2730
|
-
this.categorizeAndLogError(error, relativePath);
|
|
2731
|
-
return false;
|
|
2732
|
-
}
|
|
2733
|
-
}
|
|
2734
|
-
extractContractPaths(module) {
|
|
2735
|
-
const paths = /* @__PURE__ */ new Set();
|
|
2736
|
-
if (module.default._contractMetas) {
|
|
2737
|
-
for (const key of module.default._contractMetas.keys()) {
|
|
2738
|
-
const path = key.split(" ")[1];
|
|
2739
|
-
if (path) {
|
|
2740
|
-
paths.add(path);
|
|
2741
|
-
}
|
|
2742
|
-
}
|
|
2743
|
-
}
|
|
2744
|
-
return Array.from(paths);
|
|
2745
|
-
}
|
|
2746
|
-
calculateContractPriority(path) {
|
|
2747
|
-
if (path.includes("*")) return 3;
|
|
2748
|
-
if (path.includes(":")) return 2;
|
|
2749
|
-
return 1;
|
|
2750
|
-
}
|
|
2751
|
-
validateModule(module, relativePath) {
|
|
2752
|
-
if (!module.default) {
|
|
2753
|
-
routeLogger2.error("Route must export Hono instance as default", { file: relativePath });
|
|
2754
|
-
return false;
|
|
2755
|
-
}
|
|
2756
|
-
if (typeof module.default.route !== "function") {
|
|
2757
|
-
routeLogger2.error("Default export is not a Hono instance", { file: relativePath });
|
|
2758
|
-
return false;
|
|
2759
|
-
}
|
|
2760
|
-
return true;
|
|
2761
|
-
}
|
|
2762
|
-
registerContractBasedMiddlewares(app, contractPaths, module) {
|
|
2763
|
-
app.use("*", (c, next) => {
|
|
2764
|
-
const method = c.req.method;
|
|
2765
|
-
const requestPath = new URL(c.req.url).pathname;
|
|
2766
|
-
const key = `${method} ${requestPath}`;
|
|
2767
|
-
const meta = module.default._contractMetas?.get(key);
|
|
2768
|
-
if (meta?.skipMiddlewares) {
|
|
2769
|
-
c.set("_skipMiddlewares", meta.skipMiddlewares);
|
|
2770
|
-
}
|
|
2771
|
-
return next();
|
|
2772
|
-
});
|
|
2773
|
-
for (const contractPath of contractPaths) {
|
|
2774
|
-
const middlewarePath = contractPath === "/" ? "/*" : `${contractPath}/*`;
|
|
2775
|
-
for (const middleware of this.middlewares) {
|
|
2776
|
-
app.use(middlewarePath, async (c, next) => {
|
|
2777
|
-
const skipList = c.get("_skipMiddlewares") || [];
|
|
2778
|
-
if (skipList.includes(middleware.name)) {
|
|
2779
|
-
return next();
|
|
2780
|
-
}
|
|
2781
|
-
return middleware.handler(c, next);
|
|
2782
|
-
});
|
|
2783
|
-
}
|
|
2784
|
-
}
|
|
2785
|
-
}
|
|
2786
|
-
categorizeAndLogError(error, relativePath) {
|
|
2787
|
-
const message = error.message;
|
|
2788
|
-
const stack = error.stack;
|
|
2789
|
-
if (message.includes("Cannot find module") || message.includes("MODULE_NOT_FOUND")) {
|
|
2790
|
-
routeLogger2.error("Missing dependency", {
|
|
2791
|
-
file: relativePath,
|
|
2792
|
-
error: message,
|
|
2793
|
-
hint: "Run: npm install"
|
|
2794
|
-
});
|
|
2795
|
-
} else if (message.includes("SyntaxError") || stack?.includes("SyntaxError")) {
|
|
2796
|
-
routeLogger2.error("Syntax error", {
|
|
2797
|
-
file: relativePath,
|
|
2798
|
-
error: message,
|
|
2799
|
-
...this.debug && stack && {
|
|
2800
|
-
stack: stack.split("\n").slice(0, 5).join("\n")
|
|
2801
|
-
}
|
|
2802
|
-
});
|
|
2803
|
-
} else if (message.includes("Unexpected token")) {
|
|
2804
|
-
routeLogger2.error("Parse error", {
|
|
2805
|
-
file: relativePath,
|
|
2806
|
-
error: message,
|
|
2807
|
-
hint: "Check for syntax errors or invalid TypeScript"
|
|
2808
|
-
});
|
|
2809
|
-
} else {
|
|
2810
|
-
routeLogger2.error("Route loading failed", {
|
|
2811
|
-
file: relativePath,
|
|
2812
|
-
error: message,
|
|
2813
|
-
...this.debug && stack && { stack }
|
|
2814
|
-
});
|
|
2815
|
-
}
|
|
2816
|
-
}
|
|
2817
|
-
logStats(stats, elapsed) {
|
|
2818
|
-
const tagCounts = Object.entries(stats.byTag).map(([tag, count2]) => `${tag}(${count2})`).join(", ");
|
|
2819
|
-
routeLogger2.info("Routes loaded successfully", {
|
|
2820
|
-
total: stats.total,
|
|
2821
|
-
priority: {
|
|
2822
|
-
static: stats.byPriority.static,
|
|
2823
|
-
dynamic: stats.byPriority.dynamic,
|
|
2824
|
-
catchAll: stats.byPriority.catchAll
|
|
2825
|
-
},
|
|
2826
|
-
...tagCounts && { tags: tagCounts },
|
|
2827
|
-
elapsed: `${elapsed}ms`
|
|
2828
|
-
});
|
|
2829
|
-
}
|
|
2830
|
-
};
|
|
2831
|
-
async function loadRoutes(app, options) {
|
|
2832
|
-
const routesDir = options?.routesDir ?? join(process.cwd(), "src", "server", "routes");
|
|
2833
|
-
const debug = options?.debug ?? false;
|
|
2834
|
-
const middlewares = options?.middlewares ?? [];
|
|
2835
|
-
const includeFunctionRoutes = options?.includeFunctionRoutes ?? true;
|
|
2836
|
-
const loader = new AutoRouteLoader(routesDir, debug, middlewares);
|
|
2837
|
-
const stats = await loader.load(app);
|
|
2838
|
-
if (includeFunctionRoutes) {
|
|
2839
|
-
const { discoverFunctionRoutes: discoverFunctionRoutes2 } = await Promise.resolve().then(() => (init_function_routes(), function_routes_exports));
|
|
2840
|
-
const functionRoutes = discoverFunctionRoutes2();
|
|
2841
|
-
if (functionRoutes.length > 0) {
|
|
2842
|
-
routeLogger2.info("Loading function routes", { count: functionRoutes.length });
|
|
2843
|
-
for (const func of functionRoutes) {
|
|
2844
|
-
try {
|
|
2845
|
-
await loader.loadExternalRoutes(app, func.routesDir, func.packageName, func.prefix);
|
|
2846
|
-
routeLogger2.info("Function routes loaded", {
|
|
2847
|
-
package: func.packageName,
|
|
2848
|
-
routesDir: func.routesDir,
|
|
2849
|
-
prefix: func.prefix || "/"
|
|
2850
|
-
});
|
|
2851
|
-
} catch (error) {
|
|
2852
|
-
routeLogger2.error("Failed to load function routes", {
|
|
2853
|
-
package: func.packageName,
|
|
2854
|
-
error: error instanceof Error ? error.message : "Unknown error"
|
|
2855
|
-
});
|
|
2856
|
-
}
|
|
2857
|
-
}
|
|
2858
|
-
}
|
|
2859
|
-
}
|
|
2860
|
-
return stats;
|
|
2861
|
-
}
|
|
2862
|
-
|
|
2863
|
-
// src/route/bind.ts
|
|
2864
|
-
init_errors();
|
|
2865
|
-
init_logger2();
|
|
2866
|
-
|
|
2867
|
-
// src/middleware/error-handler.ts
|
|
2868
|
-
init_logger2();
|
|
2869
|
-
var errorLogger = logger.child("error-handler");
|
|
2870
|
-
function ErrorHandler(options = {}) {
|
|
2871
|
-
const {
|
|
2872
|
-
includeStack = process.env.NODE_ENV !== "production",
|
|
2873
|
-
enableLogging = true
|
|
2874
|
-
} = options;
|
|
2875
|
-
return (err, c) => {
|
|
2876
|
-
const errorWithCode = err;
|
|
2877
|
-
const statusCode = errorWithCode.statusCode || 500;
|
|
2878
|
-
const errorType = err.name || "Error";
|
|
2879
|
-
if (enableLogging) {
|
|
2880
|
-
const logLevel = statusCode >= 500 ? "error" : "warn";
|
|
2881
|
-
const logData = {
|
|
2882
|
-
type: errorType,
|
|
2883
|
-
message: err.message,
|
|
2884
|
-
statusCode,
|
|
2885
|
-
path: c.req.path,
|
|
2886
|
-
method: c.req.method
|
|
2887
|
-
};
|
|
2888
|
-
if (errorWithCode.details) {
|
|
2889
|
-
logData.details = errorWithCode.details;
|
|
2890
|
-
}
|
|
2891
|
-
if (statusCode >= 500 && includeStack) {
|
|
2892
|
-
logData.stack = err.stack;
|
|
2893
|
-
}
|
|
2894
|
-
errorLogger[logLevel]("Error occurred", logData);
|
|
2895
|
-
}
|
|
2896
|
-
const response = {
|
|
2897
|
-
success: false,
|
|
2898
|
-
error: {
|
|
2899
|
-
message: err.message || "Internal Server Error",
|
|
2900
|
-
type: errorType,
|
|
2901
|
-
statusCode
|
|
2902
|
-
}
|
|
2903
|
-
};
|
|
2904
|
-
if (errorWithCode.details) {
|
|
2905
|
-
response.error.details = errorWithCode.details;
|
|
2906
|
-
}
|
|
2907
|
-
if (includeStack) {
|
|
2908
|
-
response.error.stack = err.stack;
|
|
2909
|
-
}
|
|
2910
|
-
return c.json(response, statusCode);
|
|
2911
|
-
};
|
|
2912
|
-
}
|
|
2913
|
-
|
|
2914
|
-
// src/middleware/request-logger.ts
|
|
2915
|
-
init_logger2();
|
|
2916
|
-
var DEFAULT_CONFIG = {
|
|
2917
|
-
excludePaths: ["/health", "/ping", "/favicon.ico"],
|
|
2918
|
-
sensitiveFields: ["password", "token", "apiKey", "secret", "authorization"],
|
|
2919
|
-
slowRequestThreshold: 1e3
|
|
2920
|
-
};
|
|
2921
|
-
function generateRequestId() {
|
|
2922
|
-
const timestamp2 = Date.now();
|
|
2923
|
-
const randomPart = randomBytes(6).toString("hex");
|
|
2924
|
-
return `req_${timestamp2}_${randomPart}`;
|
|
2925
|
-
}
|
|
2926
|
-
function maskSensitiveData2(obj, sensitiveFields, seen = /* @__PURE__ */ new WeakSet()) {
|
|
2927
|
-
if (!obj || typeof obj !== "object") return obj;
|
|
2928
|
-
if (seen.has(obj)) return "[Circular]";
|
|
2929
|
-
seen.add(obj);
|
|
2930
|
-
const lowerFields = sensitiveFields.map((f) => f.toLowerCase());
|
|
2931
|
-
const masked = Array.isArray(obj) ? [...obj] : { ...obj };
|
|
2932
|
-
for (const key in masked) {
|
|
2933
|
-
const lowerKey = key.toLowerCase();
|
|
2934
|
-
if (lowerFields.some((field) => lowerKey.includes(field))) {
|
|
2935
|
-
masked[key] = "***MASKED***";
|
|
2936
|
-
} else if (typeof masked[key] === "object" && masked[key] !== null) {
|
|
2937
|
-
masked[key] = maskSensitiveData2(masked[key], sensitiveFields, seen);
|
|
2938
|
-
}
|
|
2939
|
-
}
|
|
2940
|
-
return masked;
|
|
2941
|
-
}
|
|
2942
|
-
function RequestLogger(config) {
|
|
2943
|
-
const cfg = { ...DEFAULT_CONFIG, ...config };
|
|
2944
|
-
const apiLogger = logger.child("api");
|
|
2945
|
-
return async (c, next) => {
|
|
2946
|
-
const path = new URL(c.req.url).pathname;
|
|
2947
|
-
if (cfg.excludePaths.includes(path)) {
|
|
2948
|
-
return next();
|
|
2949
|
-
}
|
|
2950
|
-
const requestId = generateRequestId();
|
|
2951
|
-
c.set("requestId", requestId);
|
|
2952
|
-
const method = c.req.method;
|
|
2953
|
-
const userAgent = c.req.header("user-agent");
|
|
2954
|
-
const ip = c.req.header("x-forwarded-for") || c.req.header("x-real-ip") || "unknown";
|
|
2955
|
-
const startTime = Date.now();
|
|
2956
|
-
apiLogger.info("Request received", {
|
|
2957
|
-
requestId,
|
|
2958
|
-
method,
|
|
2959
|
-
path,
|
|
2960
|
-
ip,
|
|
2961
|
-
userAgent
|
|
2962
|
-
});
|
|
2963
|
-
try {
|
|
2964
|
-
await next();
|
|
2965
|
-
const duration = Date.now() - startTime;
|
|
2966
|
-
const status = c.res.status;
|
|
2967
|
-
const logData = {
|
|
2968
|
-
requestId,
|
|
2969
|
-
method,
|
|
2970
|
-
path,
|
|
2971
|
-
status,
|
|
2972
|
-
duration
|
|
2973
|
-
};
|
|
2974
|
-
const isSlowRequest = duration >= cfg.slowRequestThreshold;
|
|
2975
|
-
if (isSlowRequest) {
|
|
2976
|
-
logData.slow = true;
|
|
2977
|
-
}
|
|
2978
|
-
if (status >= 400) {
|
|
2979
|
-
try {
|
|
2980
|
-
const responseBody = await c.res.clone().json();
|
|
2981
|
-
logData.response = responseBody;
|
|
2982
|
-
} catch {
|
|
2983
|
-
}
|
|
2984
|
-
if (["POST", "PUT", "PATCH"].includes(method)) {
|
|
2985
|
-
try {
|
|
2986
|
-
const requestBody = await c.req.json();
|
|
2987
|
-
logData.request = maskSensitiveData2(requestBody, cfg.sensitiveFields);
|
|
2988
|
-
} catch {
|
|
2989
|
-
}
|
|
2990
|
-
}
|
|
2991
|
-
}
|
|
2992
|
-
const logLevel = status >= 500 ? "error" : status >= 400 ? "warn" : "info";
|
|
2993
|
-
apiLogger[logLevel]("Request completed", logData);
|
|
2994
|
-
} catch (error) {
|
|
2995
|
-
const duration = Date.now() - startTime;
|
|
2996
|
-
apiLogger.error("Request failed", error, {
|
|
2997
|
-
requestId,
|
|
2998
|
-
method,
|
|
2999
|
-
path,
|
|
3000
|
-
duration
|
|
3001
|
-
});
|
|
3002
|
-
throw error;
|
|
3003
|
-
}
|
|
3004
|
-
};
|
|
3005
|
-
}
|
|
3006
|
-
|
|
3007
|
-
// src/route/types.ts
|
|
3008
|
-
function isHttpMethod(value) {
|
|
3009
|
-
return typeof value === "string" && ["GET", "POST", "PUT", "PATCH", "DELETE"].includes(value);
|
|
3010
|
-
}
|
|
3011
|
-
|
|
3012
|
-
// src/server/create-server.ts
|
|
3013
|
-
init_logger2();
|
|
3014
|
-
|
|
3015
|
-
// src/server/helpers.ts
|
|
3016
|
-
function createHealthCheckHandler(detailed) {
|
|
3017
|
-
return async (c) => {
|
|
3018
|
-
const response = {
|
|
3019
|
-
status: "ok",
|
|
3020
|
-
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
3021
|
-
};
|
|
3022
|
-
if (detailed) {
|
|
3023
|
-
const { getDatabase: getDatabase2 } = await Promise.resolve().then(() => (init_db(), db_exports));
|
|
3024
|
-
const { getRedis: getRedis2 } = await Promise.resolve().then(() => (init_cache(), cache_exports));
|
|
3025
|
-
const db = getDatabase2();
|
|
3026
|
-
let dbStatus = "disconnected";
|
|
3027
|
-
let dbError;
|
|
3028
|
-
if (db) {
|
|
3029
|
-
try {
|
|
3030
|
-
await db.execute("SELECT 1");
|
|
3031
|
-
dbStatus = "connected";
|
|
3032
|
-
} catch (error) {
|
|
3033
|
-
dbStatus = "error";
|
|
3034
|
-
dbError = error instanceof Error ? error.message : String(error);
|
|
3035
|
-
}
|
|
3036
|
-
}
|
|
3037
|
-
const redis = getRedis2();
|
|
3038
|
-
let redisStatus = "disconnected";
|
|
3039
|
-
let redisError;
|
|
3040
|
-
if (redis) {
|
|
3041
|
-
try {
|
|
3042
|
-
await redis.ping();
|
|
3043
|
-
redisStatus = "connected";
|
|
3044
|
-
} catch (error) {
|
|
3045
|
-
redisStatus = "error";
|
|
3046
|
-
redisError = error instanceof Error ? error.message : String(error);
|
|
3047
|
-
}
|
|
3048
|
-
}
|
|
3049
|
-
response.services = {
|
|
3050
|
-
database: {
|
|
3051
|
-
status: dbStatus,
|
|
3052
|
-
...dbError && { error: dbError }
|
|
3053
|
-
},
|
|
3054
|
-
redis: {
|
|
3055
|
-
status: redisStatus,
|
|
3056
|
-
...redisError && { error: redisError }
|
|
3057
|
-
}
|
|
3058
|
-
};
|
|
3059
|
-
const hasErrors = dbStatus === "error" || redisStatus === "error";
|
|
3060
|
-
response.status = hasErrors ? "degraded" : "ok";
|
|
3061
|
-
}
|
|
3062
|
-
const statusCode = response.status === "ok" ? 200 : 503;
|
|
3063
|
-
return c.json(response, statusCode);
|
|
3064
|
-
};
|
|
3065
|
-
}
|
|
3066
|
-
function applyServerTimeouts(server, timeouts) {
|
|
3067
|
-
if ("timeout" in server) {
|
|
3068
|
-
server.timeout = timeouts.request;
|
|
3069
|
-
server.keepAliveTimeout = timeouts.keepAlive;
|
|
3070
|
-
server.headersTimeout = timeouts.headers;
|
|
3071
|
-
}
|
|
3072
|
-
}
|
|
3073
|
-
function getTimeoutConfig(config) {
|
|
3074
|
-
return {
|
|
3075
|
-
request: config?.request ?? (parseInt(process.env.SERVER_TIMEOUT || "", 10) || 12e4),
|
|
3076
|
-
keepAlive: config?.keepAlive ?? (parseInt(process.env.SERVER_KEEPALIVE_TIMEOUT || "", 10) || 65e3),
|
|
3077
|
-
headers: config?.headers ?? (parseInt(process.env.SERVER_HEADERS_TIMEOUT || "", 10) || 6e4)
|
|
3078
|
-
};
|
|
3079
|
-
}
|
|
3080
|
-
function getShutdownTimeout(config) {
|
|
3081
|
-
return config?.timeout ?? (parseInt(process.env.SHUTDOWN_TIMEOUT || "", 10) || 3e4);
|
|
3082
|
-
}
|
|
3083
|
-
function buildMiddlewareOrder(config) {
|
|
3084
|
-
const order = [];
|
|
3085
|
-
const middlewareConfig = config.middleware ?? {};
|
|
3086
|
-
const enableLogger = middlewareConfig.logger !== false;
|
|
3087
|
-
const enableCors = middlewareConfig.cors !== false;
|
|
3088
|
-
const enableErrorHandler = middlewareConfig.errorHandler !== false;
|
|
3089
|
-
if (enableLogger) order.push("RequestLogger");
|
|
3090
|
-
if (enableCors) order.push("CORS");
|
|
3091
|
-
config.use?.forEach((_, i) => order.push(`Custom[${i}]`));
|
|
3092
|
-
if (config.beforeRoutes) order.push("beforeRoutes hook");
|
|
3093
|
-
order.push("Routes");
|
|
3094
|
-
if (config.afterRoutes) order.push("afterRoutes hook");
|
|
3095
|
-
if (enableErrorHandler) order.push("ErrorHandler");
|
|
3096
|
-
return order;
|
|
3097
|
-
}
|
|
3098
|
-
function buildStartupConfig(config, timeouts) {
|
|
3099
|
-
const middlewareConfig = config.middleware ?? {};
|
|
3100
|
-
const healthCheckConfig = config.healthCheck ?? {};
|
|
3101
|
-
const healthCheckEnabled = healthCheckConfig.enabled !== false;
|
|
3102
|
-
const healthCheckPath = healthCheckConfig.path ?? "/health";
|
|
3103
|
-
const healthCheckDetailed = healthCheckConfig.detailed ?? process.env.NODE_ENV === "development";
|
|
3104
|
-
return {
|
|
3105
|
-
middleware: {
|
|
3106
|
-
logger: middlewareConfig.logger !== false,
|
|
3107
|
-
cors: middlewareConfig.cors !== false,
|
|
3108
|
-
errorHandler: middlewareConfig.errorHandler !== false,
|
|
3109
|
-
custom: config.use?.length ?? 0
|
|
3110
|
-
},
|
|
3111
|
-
healthCheck: healthCheckEnabled ? {
|
|
3112
|
-
enabled: true,
|
|
3113
|
-
path: healthCheckPath,
|
|
3114
|
-
detailed: healthCheckDetailed
|
|
3115
|
-
} : { enabled: false },
|
|
3116
|
-
hooks: {
|
|
3117
|
-
beforeRoutes: !!config.beforeRoutes,
|
|
3118
|
-
afterRoutes: !!config.afterRoutes
|
|
3119
|
-
},
|
|
3120
|
-
timeout: {
|
|
3121
|
-
request: `${timeouts.request}ms`,
|
|
3122
|
-
keepAlive: `${timeouts.keepAlive}ms`,
|
|
3123
|
-
headers: `${timeouts.headers}ms`
|
|
3124
|
-
},
|
|
3125
|
-
shutdown: {
|
|
3126
|
-
timeout: `${config.shutdown?.timeout ?? 3e4}ms`
|
|
3127
|
-
}
|
|
3128
|
-
};
|
|
3129
|
-
}
|
|
3130
|
-
|
|
3131
|
-
// src/server/plugin-discovery.ts
|
|
3132
|
-
init_logger2();
|
|
3133
|
-
var pluginLogger = logger.child("plugin");
|
|
3134
|
-
async function discoverPlugins(cwd = process.cwd()) {
|
|
3135
|
-
const plugins = [];
|
|
3136
|
-
const nodeModulesPath = join(cwd, "node_modules");
|
|
3137
|
-
try {
|
|
3138
|
-
const projectPkgPath = join(cwd, "package.json");
|
|
3139
|
-
if (!existsSync(projectPkgPath)) {
|
|
3140
|
-
pluginLogger.debug("No package.json found, skipping plugin discovery");
|
|
3141
|
-
return plugins;
|
|
3142
|
-
}
|
|
3143
|
-
const projectPkg = JSON.parse(readFileSync(projectPkgPath, "utf-8"));
|
|
3144
|
-
const dependencies = {
|
|
3145
|
-
...projectPkg.dependencies,
|
|
3146
|
-
...projectPkg.devDependencies
|
|
3147
|
-
};
|
|
3148
|
-
for (const [packageName] of Object.entries(dependencies)) {
|
|
3149
|
-
if (!packageName.startsWith("@spfn/")) {
|
|
3150
|
-
continue;
|
|
3151
|
-
}
|
|
3152
|
-
try {
|
|
3153
|
-
const plugin = await loadPluginFromPackage(packageName, nodeModulesPath);
|
|
3154
|
-
if (plugin) {
|
|
3155
|
-
plugins.push(plugin);
|
|
3156
|
-
pluginLogger.info("Plugin discovered", {
|
|
3157
|
-
name: plugin.name,
|
|
3158
|
-
hooks: getPluginHookNames(plugin)
|
|
3159
|
-
});
|
|
3160
|
-
}
|
|
3161
|
-
} catch (error) {
|
|
3162
|
-
pluginLogger.debug("Failed to load plugin", {
|
|
3163
|
-
package: packageName,
|
|
3164
|
-
error: error instanceof Error ? error.message : "Unknown error"
|
|
3165
|
-
});
|
|
3166
|
-
}
|
|
3167
|
-
}
|
|
3168
|
-
} catch (error) {
|
|
3169
|
-
pluginLogger.warn("Plugin discovery failed", {
|
|
3170
|
-
error: error instanceof Error ? error.message : "Unknown error"
|
|
3171
|
-
});
|
|
3172
|
-
}
|
|
3173
|
-
return plugins;
|
|
3174
|
-
}
|
|
3175
|
-
async function loadPluginFromPackage(packageName, nodeModulesPath) {
|
|
3176
|
-
const pkgPath = join(nodeModulesPath, ...packageName.split("/"), "package.json");
|
|
3177
|
-
if (!existsSync(pkgPath)) {
|
|
3178
|
-
return null;
|
|
3179
|
-
}
|
|
3180
|
-
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
|
|
3181
|
-
const packageDir = dirname(pkgPath);
|
|
3182
|
-
const mainEntry = pkg.main || "dist/index.js";
|
|
3183
|
-
const mainPath = join(packageDir, mainEntry);
|
|
3184
|
-
if (!existsSync(mainPath)) {
|
|
3185
|
-
return null;
|
|
3186
|
-
}
|
|
3187
|
-
try {
|
|
3188
|
-
const module = await import(mainPath);
|
|
3189
|
-
if (module.spfnPlugin && isValidPlugin(module.spfnPlugin)) {
|
|
3190
|
-
return module.spfnPlugin;
|
|
3191
|
-
}
|
|
3192
|
-
return null;
|
|
3193
|
-
} catch (error) {
|
|
3194
|
-
return null;
|
|
3195
|
-
}
|
|
3196
|
-
}
|
|
3197
|
-
function isValidPlugin(plugin) {
|
|
3198
|
-
return plugin && typeof plugin === "object" && typeof plugin.name === "string" && (typeof plugin.afterInfrastructure === "function" || typeof plugin.beforeRoutes === "function" || typeof plugin.afterRoutes === "function" || typeof plugin.afterStart === "function" || typeof plugin.beforeShutdown === "function");
|
|
3199
|
-
}
|
|
3200
|
-
function getPluginHookNames(plugin) {
|
|
3201
|
-
const hooks = [];
|
|
3202
|
-
if (plugin.afterInfrastructure) hooks.push("afterInfrastructure");
|
|
3203
|
-
if (plugin.beforeRoutes) hooks.push("beforeRoutes");
|
|
3204
|
-
if (plugin.afterRoutes) hooks.push("afterRoutes");
|
|
3205
|
-
if (plugin.afterStart) hooks.push("afterStart");
|
|
3206
|
-
if (plugin.beforeShutdown) hooks.push("beforeShutdown");
|
|
3207
|
-
return hooks;
|
|
3208
|
-
}
|
|
3209
|
-
async function executePluginHooks(plugins, hookName, ...args) {
|
|
3210
|
-
for (const plugin of plugins) {
|
|
3211
|
-
const hook = plugin[hookName];
|
|
3212
|
-
if (typeof hook === "function") {
|
|
3213
|
-
try {
|
|
3214
|
-
pluginLogger.debug("Executing plugin hook", {
|
|
3215
|
-
plugin: plugin.name,
|
|
3216
|
-
hook: hookName
|
|
3217
|
-
});
|
|
3218
|
-
await hook(...args);
|
|
3219
|
-
} catch (error) {
|
|
3220
|
-
pluginLogger.error("Plugin hook failed", {
|
|
3221
|
-
plugin: plugin.name,
|
|
3222
|
-
hook: hookName,
|
|
3223
|
-
error: error instanceof Error ? error.message : "Unknown error"
|
|
3224
|
-
});
|
|
3225
|
-
throw new Error(
|
|
3226
|
-
`Plugin ${plugin.name} failed in ${hookName} hook: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
3227
|
-
);
|
|
3228
|
-
}
|
|
3229
|
-
}
|
|
3230
|
-
}
|
|
3231
|
-
}
|
|
3232
|
-
|
|
3233
|
-
// src/server/create-server.ts
|
|
3234
|
-
var serverLogger = logger.child("server");
|
|
3235
|
-
async function createServer(config, plugins = []) {
|
|
3236
|
-
const cwd = process.cwd();
|
|
3237
|
-
const appPath = join(cwd, "src", "server", "app.ts");
|
|
3238
|
-
const appJsPath = join(cwd, "src", "server", "app.js");
|
|
3239
|
-
if (existsSync(appPath) || existsSync(appJsPath)) {
|
|
3240
|
-
return await loadCustomApp(appPath, appJsPath, config, plugins);
|
|
3241
|
-
}
|
|
3242
|
-
return await createAutoConfiguredApp(config, plugins);
|
|
3243
|
-
}
|
|
3244
|
-
async function loadCustomApp(appPath, appJsPath, config, plugins = []) {
|
|
3245
|
-
const appModule = await (existsSync(appPath) ? import(appPath) : import(appJsPath));
|
|
3246
|
-
const appFactory = appModule.default;
|
|
3247
|
-
if (!appFactory) {
|
|
3248
|
-
throw new Error("app.ts must export a default function that returns a Hono app");
|
|
3249
|
-
}
|
|
3250
|
-
const app = await appFactory();
|
|
3251
|
-
await executePluginHooks(plugins, "beforeRoutes", app);
|
|
3252
|
-
const debug = config?.debug ?? process.env.NODE_ENV === "development";
|
|
3253
|
-
await loadRoutes(app, { routesDir: config?.routesPath, debug });
|
|
3254
|
-
await executePluginHooks(plugins, "afterRoutes", app);
|
|
3255
|
-
return app;
|
|
3256
|
-
}
|
|
3257
|
-
async function createAutoConfiguredApp(config, plugins = []) {
|
|
3258
|
-
const app = new Hono();
|
|
3259
|
-
const middlewareConfig = config?.middleware ?? {};
|
|
3260
|
-
const enableLogger = middlewareConfig.logger !== false;
|
|
3261
|
-
const enableCors = middlewareConfig.cors !== false;
|
|
3262
|
-
const enableErrorHandler = middlewareConfig.errorHandler !== false;
|
|
3263
|
-
if (enableErrorHandler) {
|
|
3264
|
-
app.use("*", async (c, next) => {
|
|
3265
|
-
c.set("errorHandlerEnabled", true);
|
|
3266
|
-
await next();
|
|
3267
|
-
});
|
|
3268
|
-
}
|
|
3269
|
-
applyDefaultMiddleware(app, config, enableLogger, enableCors);
|
|
3270
|
-
config?.use?.forEach((mw) => app.use("*", mw));
|
|
3271
|
-
registerHealthCheckEndpoint(app, config);
|
|
3272
|
-
await executeBeforeRoutesHook(app, config);
|
|
3273
|
-
await executePluginHooks(plugins, "beforeRoutes", app);
|
|
3274
|
-
await loadAppRoutes(app, config);
|
|
3275
|
-
await executeAfterRoutesHook(app, config);
|
|
3276
|
-
await executePluginHooks(plugins, "afterRoutes", app);
|
|
3277
|
-
if (enableErrorHandler) {
|
|
3278
|
-
app.onError(ErrorHandler());
|
|
3279
|
-
}
|
|
3280
|
-
return app;
|
|
3281
|
-
}
|
|
3282
|
-
function applyDefaultMiddleware(app, config, enableLogger, enableCors) {
|
|
3283
|
-
if (enableLogger) {
|
|
3284
|
-
app.use("*", RequestLogger());
|
|
3285
|
-
}
|
|
3286
|
-
if (enableCors && config?.cors !== false) {
|
|
3287
|
-
app.use("*", cors(config?.cors));
|
|
3288
|
-
}
|
|
3289
|
-
}
|
|
3290
|
-
function registerHealthCheckEndpoint(app, config) {
|
|
3291
|
-
const healthCheckConfig = config?.healthCheck ?? {};
|
|
3292
|
-
const healthCheckEnabled = healthCheckConfig.enabled !== false;
|
|
3293
|
-
const healthCheckPath = healthCheckConfig.path ?? "/health";
|
|
3294
|
-
const healthCheckDetailed = healthCheckConfig.detailed ?? process.env.NODE_ENV === "development";
|
|
3295
|
-
if (healthCheckEnabled) {
|
|
3296
|
-
app.get(healthCheckPath, createHealthCheckHandler(healthCheckDetailed));
|
|
3297
|
-
serverLogger.debug(`Health check endpoint enabled at ${healthCheckPath}`);
|
|
3298
|
-
}
|
|
3299
|
-
}
|
|
3300
|
-
async function executeBeforeRoutesHook(app, config) {
|
|
3301
|
-
if (!config?.lifecycle?.beforeRoutes) {
|
|
3302
|
-
return;
|
|
3303
|
-
}
|
|
3304
|
-
try {
|
|
3305
|
-
await config.lifecycle.beforeRoutes(app);
|
|
3306
|
-
} catch (error) {
|
|
3307
|
-
serverLogger.error("beforeRoutes hook failed", error);
|
|
3308
|
-
throw new Error("Server initialization failed in beforeRoutes hook");
|
|
3309
|
-
}
|
|
3310
|
-
}
|
|
3311
|
-
async function loadAppRoutes(app, config) {
|
|
3312
|
-
const debug = config?.debug ?? process.env.NODE_ENV === "development";
|
|
3313
|
-
await loadRoutes(app, {
|
|
3314
|
-
routesDir: config?.routesPath,
|
|
3315
|
-
debug,
|
|
3316
|
-
middlewares: config?.middlewares
|
|
3317
|
-
});
|
|
3318
|
-
}
|
|
3319
|
-
async function executeAfterRoutesHook(app, config) {
|
|
3320
|
-
if (!config?.lifecycle?.afterRoutes) {
|
|
3321
|
-
return;
|
|
3322
|
-
}
|
|
3323
|
-
try {
|
|
3324
|
-
await config.lifecycle.afterRoutes(app);
|
|
3325
|
-
} catch (error) {
|
|
3326
|
-
serverLogger.error("afterRoutes hook failed", error);
|
|
3327
|
-
throw new Error("Server initialization failed in afterRoutes hook");
|
|
3328
|
-
}
|
|
3329
|
-
}
|
|
3330
|
-
|
|
3331
|
-
// src/server/start-server.ts
|
|
3332
|
-
init_cache();
|
|
3333
|
-
init_db();
|
|
3334
|
-
init_logger2();
|
|
3335
|
-
function getNetworkAddress() {
|
|
3336
|
-
const nets = networkInterfaces();
|
|
3337
|
-
for (const name of Object.keys(nets)) {
|
|
3338
|
-
const netGroup = nets[name];
|
|
3339
|
-
if (!netGroup) continue;
|
|
3340
|
-
for (const net of netGroup) {
|
|
3341
|
-
if (net.family === "IPv4" && !net.internal) {
|
|
3342
|
-
return net.address;
|
|
3343
|
-
}
|
|
3344
|
-
}
|
|
3345
|
-
}
|
|
3346
|
-
return null;
|
|
3347
|
-
}
|
|
3348
|
-
function printBanner(options) {
|
|
3349
|
-
const { mode, host, port } = options;
|
|
3350
|
-
console.log("");
|
|
3351
|
-
console.log(" _____ ____ ______ _ _");
|
|
3352
|
-
console.log(" / ____| _ \\| ____| \\ | |");
|
|
3353
|
-
console.log(" | (___ | |_) | |__ | \\| |");
|
|
3354
|
-
console.log(" \\___ \\| __/| __| | . ` |");
|
|
3355
|
-
console.log(" ____) | | | | | |\\ |");
|
|
3356
|
-
console.log(" |_____/|_| |_| |_| \\_|");
|
|
3357
|
-
console.log("");
|
|
3358
|
-
console.log(` Mode: ${mode}`);
|
|
3359
|
-
if (host === "0.0.0.0") {
|
|
3360
|
-
const networkIP = getNetworkAddress();
|
|
3361
|
-
console.log(` \u25B2 Local: http://localhost:${port}`);
|
|
3362
|
-
if (networkIP) {
|
|
3363
|
-
console.log(` \u25B2 Network: http://${networkIP}:${port}`);
|
|
3364
|
-
}
|
|
3365
|
-
} else {
|
|
3366
|
-
console.log(` \u25B2 Local: http://${host}:${port}`);
|
|
3367
|
-
}
|
|
3368
|
-
console.log("");
|
|
3369
|
-
}
|
|
3370
|
-
|
|
3371
|
-
// src/server/validation.ts
|
|
3372
|
-
function validateServerConfig(config) {
|
|
3373
|
-
if (config.port !== void 0) {
|
|
3374
|
-
if (!Number.isInteger(config.port) || config.port < 0 || config.port > 65535) {
|
|
3375
|
-
throw new Error(
|
|
3376
|
-
`Invalid port: ${config.port}. Port must be an integer between 0 and 65535.`
|
|
3377
|
-
);
|
|
3378
|
-
}
|
|
3379
|
-
}
|
|
3380
|
-
if (config.timeout) {
|
|
3381
|
-
const { request, keepAlive, headers } = config.timeout;
|
|
3382
|
-
if (request !== void 0 && (request < 0 || !Number.isFinite(request))) {
|
|
3383
|
-
throw new Error(`Invalid timeout.request: ${request}. Must be a positive number.`);
|
|
3384
|
-
}
|
|
3385
|
-
if (keepAlive !== void 0 && (keepAlive < 0 || !Number.isFinite(keepAlive))) {
|
|
3386
|
-
throw new Error(`Invalid timeout.keepAlive: ${keepAlive}. Must be a positive number.`);
|
|
3387
|
-
}
|
|
3388
|
-
if (headers !== void 0 && (headers < 0 || !Number.isFinite(headers))) {
|
|
3389
|
-
throw new Error(`Invalid timeout.headers: ${headers}. Must be a positive number.`);
|
|
3390
|
-
}
|
|
3391
|
-
if (headers && request && headers > request) {
|
|
3392
|
-
throw new Error(
|
|
3393
|
-
`Invalid timeout configuration: headers timeout (${headers}ms) cannot exceed request timeout (${request}ms).`
|
|
3394
|
-
);
|
|
3395
|
-
}
|
|
3396
|
-
}
|
|
3397
|
-
if (config.shutdown?.timeout !== void 0) {
|
|
3398
|
-
const timeout = config.shutdown.timeout;
|
|
3399
|
-
if (timeout < 0 || !Number.isFinite(timeout)) {
|
|
3400
|
-
throw new Error(`Invalid shutdown.timeout: ${timeout}. Must be a positive number.`);
|
|
3401
|
-
}
|
|
3402
|
-
}
|
|
3403
|
-
if (config.healthCheck?.path) {
|
|
3404
|
-
if (!config.healthCheck.path.startsWith("/")) {
|
|
3405
|
-
throw new Error(
|
|
3406
|
-
`Invalid healthCheck.path: "${config.healthCheck.path}". Must start with "/".`
|
|
3407
|
-
);
|
|
3408
|
-
}
|
|
3409
|
-
}
|
|
3410
|
-
}
|
|
3411
|
-
|
|
3412
|
-
// src/server/start-server.ts
|
|
3413
|
-
var serverLogger2 = logger.child("server");
|
|
3414
|
-
async function startServer(config) {
|
|
3415
|
-
const finalConfig = await loadAndMergeConfig(config);
|
|
3416
|
-
const { host, port, debug } = finalConfig;
|
|
3417
|
-
validateServerConfig(finalConfig);
|
|
3418
|
-
if (debug) {
|
|
3419
|
-
logMiddlewareOrder(finalConfig);
|
|
3420
|
-
}
|
|
3421
|
-
serverLogger2.debug("Discovering plugins...");
|
|
3422
|
-
const plugins = await discoverPlugins();
|
|
3423
|
-
if (plugins.length > 0) {
|
|
3424
|
-
serverLogger2.info("Plugins discovered", {
|
|
3425
|
-
count: plugins.length,
|
|
3426
|
-
plugins: plugins.map((p) => p.name)
|
|
3427
|
-
});
|
|
3428
|
-
}
|
|
3429
|
-
try {
|
|
3430
|
-
await initializeInfrastructure(finalConfig, plugins);
|
|
3431
|
-
const app = await createServer(finalConfig, plugins);
|
|
3432
|
-
const server = startHttpServer(app, host, port);
|
|
3433
|
-
const timeouts = getTimeoutConfig(finalConfig.timeout);
|
|
3434
|
-
applyServerTimeouts(server, timeouts);
|
|
3435
|
-
logServerTimeouts(timeouts);
|
|
3436
|
-
printBanner({
|
|
3437
|
-
mode: debug ? "Development" : "Production",
|
|
3438
|
-
host,
|
|
3439
|
-
port
|
|
3440
|
-
});
|
|
3441
|
-
logServerStarted(debug, host, port, finalConfig, timeouts);
|
|
3442
|
-
const shutdownServer = createShutdownHandler(server, finalConfig, plugins);
|
|
3443
|
-
const shutdown = createGracefulShutdown(shutdownServer, finalConfig);
|
|
3444
|
-
registerShutdownHandlers(shutdown);
|
|
3445
|
-
const serverInstance = {
|
|
3446
|
-
server,
|
|
3447
|
-
app,
|
|
3448
|
-
config: finalConfig,
|
|
3449
|
-
close: async () => {
|
|
3450
|
-
serverLogger2.info("Manual server shutdown requested");
|
|
3451
|
-
await shutdownServer();
|
|
3452
|
-
}
|
|
3453
|
-
};
|
|
3454
|
-
if (finalConfig.lifecycle?.afterStart) {
|
|
3455
|
-
serverLogger2.debug("Executing afterStart hook...");
|
|
3456
|
-
try {
|
|
3457
|
-
await finalConfig.lifecycle.afterStart(serverInstance);
|
|
3458
|
-
} catch (error) {
|
|
3459
|
-
serverLogger2.error("afterStart hook failed", error);
|
|
3460
|
-
}
|
|
3461
|
-
}
|
|
3462
|
-
await executePluginHooks(plugins, "afterStart", serverInstance);
|
|
3463
|
-
return serverInstance;
|
|
3464
|
-
} catch (error) {
|
|
3465
|
-
const err = error;
|
|
3466
|
-
serverLogger2.error("Server initialization failed", err);
|
|
3467
|
-
await cleanupOnFailure(finalConfig);
|
|
3468
|
-
throw error;
|
|
3469
|
-
}
|
|
3470
|
-
}
|
|
3471
|
-
async function loadAndMergeConfig(config) {
|
|
3472
|
-
const cwd = process.cwd();
|
|
3473
|
-
const configPath = join(cwd, "src", "server", "server.config.ts");
|
|
3474
|
-
const configJsPath = join(cwd, "src", "server", "server.config.js");
|
|
3475
|
-
const builtConfigMjsPath = join(cwd, ".spfn", "server", "server.config.mjs");
|
|
3476
|
-
const builtConfigPath = join(cwd, ".spfn", "server", "server.config.js");
|
|
3477
|
-
let fileConfig = {};
|
|
3478
|
-
if (existsSync(builtConfigMjsPath)) {
|
|
3479
|
-
const configModule = await import(builtConfigMjsPath);
|
|
3480
|
-
fileConfig = configModule.default ?? {};
|
|
3481
|
-
} else if (existsSync(builtConfigPath)) {
|
|
3482
|
-
const configModule = await import(builtConfigPath);
|
|
3483
|
-
fileConfig = configModule.default ?? {};
|
|
3484
|
-
} else if (existsSync(configJsPath)) {
|
|
3485
|
-
const configModule = await import(configJsPath);
|
|
3486
|
-
fileConfig = configModule.default ?? {};
|
|
3487
|
-
} else if (existsSync(configPath)) {
|
|
3488
|
-
const configModule = await import(configPath);
|
|
3489
|
-
fileConfig = configModule.default ?? {};
|
|
3490
|
-
}
|
|
3491
|
-
return {
|
|
3492
|
-
...fileConfig,
|
|
3493
|
-
...config,
|
|
3494
|
-
port: config?.port ?? fileConfig?.port ?? (parseInt(process.env.PORT || "", 10) || 4e3),
|
|
3495
|
-
host: config?.host ?? fileConfig?.host ?? (process.env.HOST || "localhost")
|
|
3496
|
-
};
|
|
3497
|
-
}
|
|
3498
|
-
function logMiddlewareOrder(config) {
|
|
3499
|
-
const middlewareOrder = buildMiddlewareOrder(config);
|
|
3500
|
-
serverLogger2.debug("Middleware execution order", {
|
|
3501
|
-
order: middlewareOrder
|
|
3502
|
-
});
|
|
3503
|
-
}
|
|
3504
|
-
async function initializeInfrastructure(config, plugins) {
|
|
3505
|
-
if (config.lifecycle?.beforeInfrastructure) {
|
|
3506
|
-
serverLogger2.debug("Executing beforeInfrastructure hook...");
|
|
3507
|
-
try {
|
|
3508
|
-
await config.lifecycle.beforeInfrastructure(config);
|
|
3509
|
-
} catch (error) {
|
|
3510
|
-
serverLogger2.error("beforeInfrastructure hook failed", error);
|
|
3511
|
-
throw new Error("Server initialization failed in beforeInfrastructure hook");
|
|
3512
|
-
}
|
|
3513
|
-
}
|
|
3514
|
-
const shouldInitDatabase = config.infrastructure?.database !== false;
|
|
3515
|
-
if (shouldInitDatabase) {
|
|
3516
|
-
serverLogger2.debug("Initializing database...");
|
|
3517
|
-
await initDatabase(config.database);
|
|
3518
|
-
} else {
|
|
3519
|
-
serverLogger2.debug("Database initialization disabled");
|
|
3520
|
-
}
|
|
3521
|
-
const shouldInitRedis = config.infrastructure?.redis !== false;
|
|
3522
|
-
if (shouldInitRedis) {
|
|
3523
|
-
serverLogger2.debug("Initializing Redis...");
|
|
3524
|
-
await initRedis();
|
|
3525
|
-
} else {
|
|
3526
|
-
serverLogger2.debug("Redis initialization disabled");
|
|
3527
|
-
}
|
|
3528
|
-
if (config.lifecycle?.afterInfrastructure) {
|
|
3529
|
-
serverLogger2.debug("Executing afterInfrastructure hook...");
|
|
3530
|
-
try {
|
|
3531
|
-
await config.lifecycle.afterInfrastructure();
|
|
3532
|
-
} catch (error) {
|
|
3533
|
-
serverLogger2.error("afterInfrastructure hook failed", error);
|
|
3534
|
-
throw new Error("Server initialization failed in afterInfrastructure hook");
|
|
3535
|
-
}
|
|
3536
|
-
}
|
|
3537
|
-
await executePluginHooks(plugins, "afterInfrastructure");
|
|
3538
|
-
}
|
|
3539
|
-
function startHttpServer(app, host, port) {
|
|
3540
|
-
serverLogger2.debug(`Starting server on ${host}:${port}...`);
|
|
3541
|
-
const server = serve({
|
|
3542
|
-
fetch: app.fetch,
|
|
3543
|
-
port,
|
|
3544
|
-
hostname: host
|
|
3545
|
-
});
|
|
3546
|
-
return server;
|
|
3547
|
-
}
|
|
3548
|
-
function logServerTimeouts(timeouts) {
|
|
3549
|
-
serverLogger2.info("Server timeouts configured", {
|
|
3550
|
-
request: `${timeouts.request}ms`,
|
|
3551
|
-
keepAlive: `${timeouts.keepAlive}ms`,
|
|
3552
|
-
headers: `${timeouts.headers}ms`
|
|
3553
|
-
});
|
|
3554
|
-
}
|
|
3555
|
-
function logServerStarted(debug, host, port, config, timeouts) {
|
|
3556
|
-
const startupConfig = buildStartupConfig(config, timeouts);
|
|
3557
|
-
serverLogger2.info("Server started successfully", {
|
|
3558
|
-
mode: debug ? "development" : "production",
|
|
3559
|
-
host,
|
|
3560
|
-
port,
|
|
3561
|
-
config: startupConfig
|
|
3562
|
-
});
|
|
3563
|
-
}
|
|
3564
|
-
function createShutdownHandler(server, config, plugins) {
|
|
3565
|
-
return async () => {
|
|
3566
|
-
serverLogger2.debug("Closing HTTP server...");
|
|
3567
|
-
await new Promise((resolve) => {
|
|
3568
|
-
server.close(() => {
|
|
3569
|
-
serverLogger2.info("HTTP server closed");
|
|
3570
|
-
resolve();
|
|
3571
|
-
});
|
|
3572
|
-
});
|
|
3573
|
-
if (config.lifecycle?.beforeShutdown) {
|
|
3574
|
-
serverLogger2.debug("Executing beforeShutdown hook...");
|
|
3575
|
-
try {
|
|
3576
|
-
await config.lifecycle.beforeShutdown();
|
|
3577
|
-
} catch (error) {
|
|
3578
|
-
serverLogger2.error("beforeShutdown hook failed", error);
|
|
3579
|
-
}
|
|
3580
|
-
}
|
|
3581
|
-
try {
|
|
3582
|
-
await executePluginHooks(plugins, "beforeShutdown");
|
|
3583
|
-
} catch (error) {
|
|
3584
|
-
serverLogger2.error("Plugin beforeShutdown hooks failed", error);
|
|
3585
|
-
}
|
|
3586
|
-
const shouldCloseDatabase = config.infrastructure?.database !== false;
|
|
3587
|
-
const shouldCloseRedis = config.infrastructure?.redis !== false;
|
|
3588
|
-
if (shouldCloseDatabase) {
|
|
3589
|
-
serverLogger2.debug("Closing database connections...");
|
|
3590
|
-
await closeDatabase();
|
|
3591
|
-
}
|
|
3592
|
-
if (shouldCloseRedis) {
|
|
3593
|
-
serverLogger2.debug("Closing Redis connections...");
|
|
3594
|
-
await closeRedis();
|
|
3595
|
-
}
|
|
3596
|
-
serverLogger2.info("Server shutdown completed");
|
|
3597
|
-
};
|
|
3598
|
-
}
|
|
3599
|
-
function createGracefulShutdown(shutdownServer, config) {
|
|
3600
|
-
return async (signal) => {
|
|
3601
|
-
serverLogger2.info(`${signal} received, starting graceful shutdown...`);
|
|
3602
|
-
const shutdownTimeout = getShutdownTimeout(config.shutdown);
|
|
3603
|
-
const timeoutPromise = new Promise((_, reject) => {
|
|
3604
|
-
setTimeout(() => {
|
|
3605
|
-
reject(new Error(`Graceful shutdown timeout after ${shutdownTimeout}ms`));
|
|
3606
|
-
}, shutdownTimeout);
|
|
3607
|
-
});
|
|
3608
|
-
try {
|
|
3609
|
-
await Promise.race([
|
|
3610
|
-
shutdownServer(),
|
|
3611
|
-
timeoutPromise
|
|
3612
|
-
]);
|
|
3613
|
-
serverLogger2.info("Graceful shutdown completed successfully");
|
|
3614
|
-
process.exit(0);
|
|
3615
|
-
} catch (error) {
|
|
3616
|
-
const err = error;
|
|
3617
|
-
if (err.message && err.message.includes("timeout")) {
|
|
3618
|
-
serverLogger2.error("Graceful shutdown timeout, forcing exit", err);
|
|
3619
|
-
} else {
|
|
3620
|
-
serverLogger2.error("Error during graceful shutdown", err);
|
|
3621
|
-
}
|
|
3622
|
-
process.exit(1);
|
|
3623
|
-
}
|
|
3624
|
-
};
|
|
3625
|
-
}
|
|
3626
|
-
function registerShutdownHandlers(shutdown) {
|
|
3627
|
-
process.setMaxListeners(15);
|
|
3628
|
-
process.on("SIGTERM", () => shutdown("SIGTERM"));
|
|
3629
|
-
process.on("SIGINT", () => shutdown("SIGINT"));
|
|
3630
|
-
process.on("uncaughtException", (error) => {
|
|
3631
|
-
if (error.message?.includes("EADDRINUSE")) {
|
|
3632
|
-
serverLogger2.error("Port conflict detected - detailed trace:", {
|
|
3633
|
-
error: error.message,
|
|
3634
|
-
stack: error.stack,
|
|
3635
|
-
code: error.code,
|
|
3636
|
-
port: error.port,
|
|
3637
|
-
address: error.address,
|
|
3638
|
-
syscall: error.syscall
|
|
3639
|
-
});
|
|
3640
|
-
} else {
|
|
3641
|
-
serverLogger2.error("Uncaught exception", error);
|
|
3642
|
-
}
|
|
3643
|
-
serverLogger2.info("Exiting immediately for clean restart");
|
|
3644
|
-
process.exit(1);
|
|
3645
|
-
});
|
|
3646
|
-
process.on("unhandledRejection", (reason, promise) => {
|
|
3647
|
-
serverLogger2.error("Unhandled promise rejection", {
|
|
3648
|
-
reason,
|
|
3649
|
-
promise
|
|
3650
|
-
});
|
|
3651
|
-
serverLogger2.info("Exiting immediately for clean restart");
|
|
3652
|
-
process.exit(1);
|
|
3653
|
-
});
|
|
3654
|
-
}
|
|
3655
|
-
async function cleanupOnFailure(config) {
|
|
3656
|
-
try {
|
|
3657
|
-
serverLogger2.debug("Cleaning up after initialization failure...");
|
|
3658
|
-
const shouldCleanupDatabase = config.infrastructure?.database !== false;
|
|
3659
|
-
const shouldCleanupRedis = config.infrastructure?.redis !== false;
|
|
3660
|
-
if (shouldCleanupDatabase) {
|
|
3661
|
-
await closeDatabase();
|
|
3662
|
-
}
|
|
3663
|
-
if (shouldCleanupRedis) {
|
|
3664
|
-
await closeRedis();
|
|
3665
|
-
}
|
|
3666
|
-
serverLogger2.debug("Cleanup completed");
|
|
3667
|
-
} catch (cleanupError) {
|
|
3668
|
-
serverLogger2.error("Cleanup failed", cleanupError);
|
|
3669
|
-
}
|
|
3670
|
-
}
|
|
3671
|
-
|
|
3672
|
-
export { createServer, isHttpMethod, startServer };
|
|
3673
|
-
//# sourceMappingURL=index.js.map
|
|
3674
|
-
//# sourceMappingURL=index.js.map
|