@spfn/core 0.1.0-alpha.8 → 0.1.0-alpha.81
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +169 -195
- package/dist/auto-loader-JFaZ9gON.d.ts +80 -0
- package/dist/cache/index.d.ts +211 -0
- package/dist/cache/index.js +992 -0
- package/dist/cache/index.js.map +1 -0
- package/dist/client/index.d.ts +131 -92
- package/dist/client/index.js +93 -85
- package/dist/client/index.js.map +1 -1
- package/dist/codegen/generators/index.d.ts +19 -0
- package/dist/codegen/generators/index.js +1500 -0
- package/dist/codegen/generators/index.js.map +1 -0
- package/dist/codegen/index.d.ts +76 -60
- package/dist/codegen/index.js +1486 -736
- package/dist/codegen/index.js.map +1 -1
- package/dist/database-errors-BNNmLTJE.d.ts +86 -0
- package/dist/db/index.d.ts +844 -44
- package/dist/db/index.js +1262 -1309
- package/dist/db/index.js.map +1 -1
- package/dist/env/index.d.ts +508 -0
- package/dist/env/index.js +1106 -0
- package/dist/env/index.js.map +1 -0
- package/dist/error-handler-wjLL3v-a.d.ts +44 -0
- package/dist/errors/index.d.ts +136 -0
- package/dist/errors/index.js +172 -0
- package/dist/errors/index.js.map +1 -0
- package/dist/index-DHiAqhKv.d.ts +101 -0
- package/dist/index.d.ts +3 -374
- package/dist/index.js +2404 -2179
- package/dist/index.js.map +1 -1
- package/dist/logger/index.d.ts +94 -0
- package/dist/logger/index.js +774 -0
- package/dist/logger/index.js.map +1 -0
- package/dist/middleware/index.d.ts +33 -0
- package/dist/middleware/index.js +897 -0
- package/dist/middleware/index.js.map +1 -0
- package/dist/route/index.d.ts +21 -53
- package/dist/route/index.js +1238 -219
- package/dist/route/index.js.map +1 -1
- package/dist/server/index.d.ts +18 -0
- package/dist/server/index.js +2400 -2061
- package/dist/server/index.js.map +1 -1
- package/dist/types-DYueuoD6.d.ts +162 -0
- package/package.json +59 -15
- package/dist/auto-loader-C44TcLmM.d.ts +0 -125
- package/dist/bind-pssq1NRT.d.ts +0 -34
- package/dist/postgres-errors-CY_Es8EJ.d.ts +0 -1703
- package/dist/scripts/index.d.ts +0 -24
- package/dist/scripts/index.js +0 -1201
- package/dist/scripts/index.js.map +0 -1
- package/dist/scripts/templates/api-index.template.txt +0 -10
- package/dist/scripts/templates/api-tag.template.txt +0 -11
- package/dist/scripts/templates/contract.template.txt +0 -87
- package/dist/scripts/templates/entity-type.template.txt +0 -31
- package/dist/scripts/templates/entity.template.txt +0 -19
- package/dist/scripts/templates/index.template.txt +0 -10
- package/dist/scripts/templates/repository.template.txt +0 -37
- package/dist/scripts/templates/routes-id.template.txt +0 -59
- package/dist/scripts/templates/routes-index.template.txt +0 -44
- package/dist/types-SlzTr8ZO.d.ts +0 -143
package/dist/server/index.js
CHANGED
|
@@ -1,18 +1,19 @@
|
|
|
1
1
|
import pino from 'pino';
|
|
2
|
-
import { existsSync, mkdirSync, createWriteStream } from 'fs';
|
|
3
|
-
import { join, relative } from 'path';
|
|
4
|
-
import postgres from 'postgres';
|
|
2
|
+
import { readFileSync, existsSync, readdirSync, statSync, mkdirSync, accessSync, constants, writeFileSync, unlinkSync, createWriteStream, renameSync } from 'fs';
|
|
3
|
+
import { join, dirname, relative, basename } from 'path';
|
|
5
4
|
import { config } from 'dotenv';
|
|
5
|
+
import postgres from 'postgres';
|
|
6
6
|
import { drizzle } from 'drizzle-orm/postgres-js';
|
|
7
|
+
import { timestamp, bigserial, pgSchema } from 'drizzle-orm/pg-core';
|
|
7
8
|
import { AsyncLocalStorage } from 'async_hooks';
|
|
9
|
+
import { randomUUID, randomBytes } from 'crypto';
|
|
8
10
|
import { createMiddleware } from 'hono/factory';
|
|
9
|
-
import {
|
|
10
|
-
import { timestamp, bigserial } from 'drizzle-orm/pg-core';
|
|
11
|
+
import { eq, and } from 'drizzle-orm';
|
|
11
12
|
import { Hono } from 'hono';
|
|
12
|
-
import { serve } from '@hono/node-server';
|
|
13
13
|
import { cors } from 'hono/cors';
|
|
14
14
|
import { readdir, stat } from 'fs/promises';
|
|
15
|
-
import {
|
|
15
|
+
import { serve } from '@hono/node-server';
|
|
16
|
+
import { networkInterfaces } from 'os';
|
|
16
17
|
|
|
17
18
|
var __defProp = Object.defineProperty;
|
|
18
19
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
@@ -23,117 +24,16 @@ var __export = (target, all) => {
|
|
|
23
24
|
for (var name in all)
|
|
24
25
|
__defProp(target, name, { get: all[name], enumerable: true });
|
|
25
26
|
};
|
|
26
|
-
|
|
27
|
-
// src/errors/database-errors.ts
|
|
28
|
-
var DatabaseError, ConnectionError, QueryError, ValidationError, TransactionError, DeadlockError, DuplicateEntryError;
|
|
29
|
-
var init_database_errors = __esm({
|
|
30
|
-
"src/errors/database-errors.ts"() {
|
|
31
|
-
DatabaseError = class extends Error {
|
|
32
|
-
statusCode;
|
|
33
|
-
details;
|
|
34
|
-
timestamp;
|
|
35
|
-
constructor(message, statusCode = 500, details) {
|
|
36
|
-
super(message);
|
|
37
|
-
this.name = "DatabaseError";
|
|
38
|
-
this.statusCode = statusCode;
|
|
39
|
-
this.details = details;
|
|
40
|
-
this.timestamp = /* @__PURE__ */ new Date();
|
|
41
|
-
Error.captureStackTrace(this, this.constructor);
|
|
42
|
-
}
|
|
43
|
-
/**
|
|
44
|
-
* Serialize error for API response
|
|
45
|
-
*/
|
|
46
|
-
toJSON() {
|
|
47
|
-
return {
|
|
48
|
-
name: this.name,
|
|
49
|
-
message: this.message,
|
|
50
|
-
statusCode: this.statusCode,
|
|
51
|
-
details: this.details,
|
|
52
|
-
timestamp: this.timestamp.toISOString()
|
|
53
|
-
};
|
|
54
|
-
}
|
|
55
|
-
};
|
|
56
|
-
ConnectionError = class extends DatabaseError {
|
|
57
|
-
constructor(message, details) {
|
|
58
|
-
super(message, 503, details);
|
|
59
|
-
this.name = "ConnectionError";
|
|
60
|
-
}
|
|
61
|
-
};
|
|
62
|
-
QueryError = class extends DatabaseError {
|
|
63
|
-
constructor(message, statusCode = 500, details) {
|
|
64
|
-
super(message, statusCode, details);
|
|
65
|
-
this.name = "QueryError";
|
|
66
|
-
}
|
|
67
|
-
};
|
|
68
|
-
ValidationError = class extends QueryError {
|
|
69
|
-
constructor(message, details) {
|
|
70
|
-
super(message, 400, details);
|
|
71
|
-
this.name = "ValidationError";
|
|
72
|
-
}
|
|
73
|
-
};
|
|
74
|
-
TransactionError = class extends DatabaseError {
|
|
75
|
-
constructor(message, statusCode = 500, details) {
|
|
76
|
-
super(message, statusCode, details);
|
|
77
|
-
this.name = "TransactionError";
|
|
78
|
-
}
|
|
79
|
-
};
|
|
80
|
-
DeadlockError = class extends TransactionError {
|
|
81
|
-
constructor(message, details) {
|
|
82
|
-
super(message, 409, details);
|
|
83
|
-
this.name = "DeadlockError";
|
|
84
|
-
}
|
|
85
|
-
};
|
|
86
|
-
DuplicateEntryError = class extends QueryError {
|
|
87
|
-
constructor(field, value) {
|
|
88
|
-
super(`${field} '${value}' already exists`, 409, { field, value });
|
|
89
|
-
this.name = "DuplicateEntryError";
|
|
90
|
-
}
|
|
91
|
-
};
|
|
92
|
-
}
|
|
93
|
-
});
|
|
94
27
|
var PinoAdapter;
|
|
95
28
|
var init_pino = __esm({
|
|
96
29
|
"src/logger/adapters/pino.ts"() {
|
|
97
30
|
PinoAdapter = class _PinoAdapter {
|
|
98
31
|
logger;
|
|
99
|
-
constructor(
|
|
100
|
-
const isProduction = process.env.NODE_ENV === "production";
|
|
101
|
-
const isDevelopment = process.env.NODE_ENV === "development";
|
|
102
|
-
const fileLoggingEnabled = process.env.LOGGER_FILE_ENABLED === "true";
|
|
103
|
-
const targets = [];
|
|
104
|
-
if (!isProduction && isDevelopment) {
|
|
105
|
-
targets.push({
|
|
106
|
-
target: "pino-pretty",
|
|
107
|
-
level: "debug",
|
|
108
|
-
options: {
|
|
109
|
-
colorize: true,
|
|
110
|
-
translateTime: "SYS:yyyy-mm-dd HH:MM:ss.l",
|
|
111
|
-
ignore: "pid,hostname"
|
|
112
|
-
}
|
|
113
|
-
});
|
|
114
|
-
}
|
|
115
|
-
if (fileLoggingEnabled && isProduction) {
|
|
116
|
-
const logDir = process.env.LOG_DIR || "./logs";
|
|
117
|
-
const maxFileSize = process.env.LOG_MAX_FILE_SIZE || "10M";
|
|
118
|
-
const maxFiles = parseInt(process.env.LOG_MAX_FILES || "10", 10);
|
|
119
|
-
targets.push({
|
|
120
|
-
target: "pino-roll",
|
|
121
|
-
level: "info",
|
|
122
|
-
options: {
|
|
123
|
-
file: `${logDir}/app.log`,
|
|
124
|
-
frequency: "daily",
|
|
125
|
-
size: maxFileSize,
|
|
126
|
-
limit: { count: maxFiles },
|
|
127
|
-
mkdir: true
|
|
128
|
-
}
|
|
129
|
-
});
|
|
130
|
-
}
|
|
32
|
+
constructor(config) {
|
|
131
33
|
this.logger = pino({
|
|
132
|
-
level:
|
|
133
|
-
// Transport 설정 (targets가 있으면 사용, 없으면 기본 stdout)
|
|
134
|
-
transport: targets.length > 0 ? { targets } : void 0,
|
|
34
|
+
level: config.level,
|
|
135
35
|
// 기본 필드
|
|
136
|
-
base:
|
|
36
|
+
base: config.module ? { module: config.module } : void 0
|
|
137
37
|
});
|
|
138
38
|
}
|
|
139
39
|
child(module) {
|
|
@@ -174,16 +74,198 @@ var init_pino = __esm({
|
|
|
174
74
|
}
|
|
175
75
|
});
|
|
176
76
|
|
|
77
|
+
// src/logger/types.ts
|
|
78
|
+
var LOG_LEVEL_PRIORITY;
|
|
79
|
+
var init_types = __esm({
|
|
80
|
+
"src/logger/types.ts"() {
|
|
81
|
+
LOG_LEVEL_PRIORITY = {
|
|
82
|
+
debug: 0,
|
|
83
|
+
info: 1,
|
|
84
|
+
warn: 2,
|
|
85
|
+
error: 3,
|
|
86
|
+
fatal: 4
|
|
87
|
+
};
|
|
88
|
+
}
|
|
89
|
+
});
|
|
90
|
+
|
|
91
|
+
// src/logger/formatters.ts
|
|
92
|
+
function isSensitiveKey(key) {
|
|
93
|
+
const lowerKey = key.toLowerCase();
|
|
94
|
+
return SENSITIVE_KEYS.some((sensitive) => lowerKey.includes(sensitive));
|
|
95
|
+
}
|
|
96
|
+
function maskSensitiveData(data) {
|
|
97
|
+
if (data === null || data === void 0) {
|
|
98
|
+
return data;
|
|
99
|
+
}
|
|
100
|
+
if (Array.isArray(data)) {
|
|
101
|
+
return data.map((item) => maskSensitiveData(item));
|
|
102
|
+
}
|
|
103
|
+
if (typeof data === "object") {
|
|
104
|
+
const masked = {};
|
|
105
|
+
for (const [key, value] of Object.entries(data)) {
|
|
106
|
+
if (isSensitiveKey(key)) {
|
|
107
|
+
masked[key] = MASKED_VALUE;
|
|
108
|
+
} else if (typeof value === "object" && value !== null) {
|
|
109
|
+
masked[key] = maskSensitiveData(value);
|
|
110
|
+
} else {
|
|
111
|
+
masked[key] = value;
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
return masked;
|
|
115
|
+
}
|
|
116
|
+
return data;
|
|
117
|
+
}
|
|
118
|
+
function formatTimestamp(date) {
|
|
119
|
+
return date.toISOString();
|
|
120
|
+
}
|
|
121
|
+
function formatTimestampHuman(date) {
|
|
122
|
+
const year = date.getFullYear();
|
|
123
|
+
const month = String(date.getMonth() + 1).padStart(2, "0");
|
|
124
|
+
const day = String(date.getDate()).padStart(2, "0");
|
|
125
|
+
const hours = String(date.getHours()).padStart(2, "0");
|
|
126
|
+
const minutes = String(date.getMinutes()).padStart(2, "0");
|
|
127
|
+
const seconds = String(date.getSeconds()).padStart(2, "0");
|
|
128
|
+
const ms = String(date.getMilliseconds()).padStart(3, "0");
|
|
129
|
+
return `${year}-${month}-${day} ${hours}:${minutes}:${seconds}.${ms}`;
|
|
130
|
+
}
|
|
131
|
+
function formatError(error) {
|
|
132
|
+
const lines = [];
|
|
133
|
+
lines.push(`${error.name}: ${error.message}`);
|
|
134
|
+
if (error.stack) {
|
|
135
|
+
const stackLines = error.stack.split("\n").slice(1);
|
|
136
|
+
lines.push(...stackLines);
|
|
137
|
+
}
|
|
138
|
+
return lines.join("\n");
|
|
139
|
+
}
|
|
140
|
+
function formatConsole(metadata, colorize = true) {
|
|
141
|
+
const parts = [];
|
|
142
|
+
const timestamp2 = formatTimestampHuman(metadata.timestamp);
|
|
143
|
+
if (colorize) {
|
|
144
|
+
parts.push(`${COLORS.gray}[${timestamp2}]${COLORS.reset}`);
|
|
145
|
+
} else {
|
|
146
|
+
parts.push(`[${timestamp2}]`);
|
|
147
|
+
}
|
|
148
|
+
if (metadata.module) {
|
|
149
|
+
if (colorize) {
|
|
150
|
+
parts.push(`${COLORS.dim}[module=${metadata.module}]${COLORS.reset}`);
|
|
151
|
+
} else {
|
|
152
|
+
parts.push(`[module=${metadata.module}]`);
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
if (metadata.context && Object.keys(metadata.context).length > 0) {
|
|
156
|
+
Object.entries(metadata.context).forEach(([key, value]) => {
|
|
157
|
+
const valueStr = typeof value === "string" ? value : String(value);
|
|
158
|
+
if (colorize) {
|
|
159
|
+
parts.push(`${COLORS.dim}[${key}=${valueStr}]${COLORS.reset}`);
|
|
160
|
+
} else {
|
|
161
|
+
parts.push(`[${key}=${valueStr}]`);
|
|
162
|
+
}
|
|
163
|
+
});
|
|
164
|
+
}
|
|
165
|
+
const levelStr = metadata.level.toUpperCase();
|
|
166
|
+
if (colorize) {
|
|
167
|
+
const color = COLORS[metadata.level];
|
|
168
|
+
parts.push(`${color}(${levelStr})${COLORS.reset}:`);
|
|
169
|
+
} else {
|
|
170
|
+
parts.push(`(${levelStr}):`);
|
|
171
|
+
}
|
|
172
|
+
if (colorize) {
|
|
173
|
+
parts.push(`${COLORS.bright}${metadata.message}${COLORS.reset}`);
|
|
174
|
+
} else {
|
|
175
|
+
parts.push(metadata.message);
|
|
176
|
+
}
|
|
177
|
+
let output = parts.join(" ");
|
|
178
|
+
if (metadata.error) {
|
|
179
|
+
output += "\n" + formatError(metadata.error);
|
|
180
|
+
}
|
|
181
|
+
return output;
|
|
182
|
+
}
|
|
183
|
+
function formatJSON(metadata) {
|
|
184
|
+
const obj = {
|
|
185
|
+
timestamp: formatTimestamp(metadata.timestamp),
|
|
186
|
+
level: metadata.level,
|
|
187
|
+
message: metadata.message
|
|
188
|
+
};
|
|
189
|
+
if (metadata.module) {
|
|
190
|
+
obj.module = metadata.module;
|
|
191
|
+
}
|
|
192
|
+
if (metadata.context) {
|
|
193
|
+
obj.context = metadata.context;
|
|
194
|
+
}
|
|
195
|
+
if (metadata.error) {
|
|
196
|
+
obj.error = {
|
|
197
|
+
name: metadata.error.name,
|
|
198
|
+
message: metadata.error.message,
|
|
199
|
+
stack: metadata.error.stack
|
|
200
|
+
};
|
|
201
|
+
}
|
|
202
|
+
return JSON.stringify(obj);
|
|
203
|
+
}
|
|
204
|
+
var SENSITIVE_KEYS, MASKED_VALUE, COLORS;
|
|
205
|
+
var init_formatters = __esm({
|
|
206
|
+
"src/logger/formatters.ts"() {
|
|
207
|
+
SENSITIVE_KEYS = [
|
|
208
|
+
"password",
|
|
209
|
+
"passwd",
|
|
210
|
+
"pwd",
|
|
211
|
+
"secret",
|
|
212
|
+
"token",
|
|
213
|
+
"apikey",
|
|
214
|
+
"api_key",
|
|
215
|
+
"accesstoken",
|
|
216
|
+
"access_token",
|
|
217
|
+
"refreshtoken",
|
|
218
|
+
"refresh_token",
|
|
219
|
+
"authorization",
|
|
220
|
+
"auth",
|
|
221
|
+
"cookie",
|
|
222
|
+
"session",
|
|
223
|
+
"sessionid",
|
|
224
|
+
"session_id",
|
|
225
|
+
"privatekey",
|
|
226
|
+
"private_key",
|
|
227
|
+
"creditcard",
|
|
228
|
+
"credit_card",
|
|
229
|
+
"cardnumber",
|
|
230
|
+
"card_number",
|
|
231
|
+
"cvv",
|
|
232
|
+
"ssn",
|
|
233
|
+
"pin"
|
|
234
|
+
];
|
|
235
|
+
MASKED_VALUE = "***MASKED***";
|
|
236
|
+
COLORS = {
|
|
237
|
+
reset: "\x1B[0m",
|
|
238
|
+
bright: "\x1B[1m",
|
|
239
|
+
dim: "\x1B[2m",
|
|
240
|
+
// 로그 레벨 컬러
|
|
241
|
+
debug: "\x1B[36m",
|
|
242
|
+
// cyan
|
|
243
|
+
info: "\x1B[32m",
|
|
244
|
+
// green
|
|
245
|
+
warn: "\x1B[33m",
|
|
246
|
+
// yellow
|
|
247
|
+
error: "\x1B[31m",
|
|
248
|
+
// red
|
|
249
|
+
fatal: "\x1B[35m",
|
|
250
|
+
// magenta
|
|
251
|
+
// 추가 컬러
|
|
252
|
+
gray: "\x1B[90m"
|
|
253
|
+
};
|
|
254
|
+
}
|
|
255
|
+
});
|
|
256
|
+
|
|
177
257
|
// src/logger/logger.ts
|
|
178
258
|
var Logger;
|
|
179
259
|
var init_logger = __esm({
|
|
180
260
|
"src/logger/logger.ts"() {
|
|
261
|
+
init_types();
|
|
262
|
+
init_formatters();
|
|
181
263
|
Logger = class _Logger {
|
|
182
264
|
config;
|
|
183
265
|
module;
|
|
184
|
-
constructor(
|
|
185
|
-
this.config =
|
|
186
|
-
this.module =
|
|
266
|
+
constructor(config) {
|
|
267
|
+
this.config = config;
|
|
268
|
+
this.module = config.module;
|
|
187
269
|
}
|
|
188
270
|
/**
|
|
189
271
|
* Get current log level
|
|
@@ -237,13 +319,17 @@ var init_logger = __esm({
|
|
|
237
319
|
* Log processing (internal)
|
|
238
320
|
*/
|
|
239
321
|
log(level, message, error, context) {
|
|
322
|
+
if (LOG_LEVEL_PRIORITY[level] < LOG_LEVEL_PRIORITY[this.config.level]) {
|
|
323
|
+
return;
|
|
324
|
+
}
|
|
240
325
|
const metadata = {
|
|
241
326
|
timestamp: /* @__PURE__ */ new Date(),
|
|
242
327
|
level,
|
|
243
328
|
message,
|
|
244
329
|
module: this.module,
|
|
245
330
|
error,
|
|
246
|
-
context
|
|
331
|
+
// Mask sensitive information in context to prevent credential leaks
|
|
332
|
+
context: context ? maskSensitiveData(context) : void 0
|
|
247
333
|
};
|
|
248
334
|
this.processTransports(metadata);
|
|
249
335
|
}
|
|
@@ -281,160 +367,36 @@ var init_logger = __esm({
|
|
|
281
367
|
}
|
|
282
368
|
});
|
|
283
369
|
|
|
284
|
-
// src/logger/
|
|
285
|
-
var
|
|
286
|
-
var
|
|
287
|
-
"src/logger/
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
}
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
}
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
const ms = String(date.getMilliseconds()).padStart(3, "0");
|
|
315
|
-
return `${year}-${month}-${day} ${hours}:${minutes}:${seconds}.${ms}`;
|
|
316
|
-
}
|
|
317
|
-
function formatError(error) {
|
|
318
|
-
const lines = [];
|
|
319
|
-
lines.push(`${error.name}: ${error.message}`);
|
|
320
|
-
if (error.stack) {
|
|
321
|
-
const stackLines = error.stack.split("\n").slice(1);
|
|
322
|
-
lines.push(...stackLines);
|
|
323
|
-
}
|
|
324
|
-
return lines.join("\n");
|
|
325
|
-
}
|
|
326
|
-
function formatContext(context) {
|
|
327
|
-
try {
|
|
328
|
-
return JSON.stringify(context, null, 2);
|
|
329
|
-
} catch (error) {
|
|
330
|
-
return "[Context serialization failed]";
|
|
331
|
-
}
|
|
332
|
-
}
|
|
333
|
-
function formatConsole(metadata, colorize = true) {
|
|
334
|
-
const parts = [];
|
|
335
|
-
const timestamp2 = formatTimestampHuman(metadata.timestamp);
|
|
336
|
-
if (colorize) {
|
|
337
|
-
parts.push(`${COLORS.gray}${timestamp2}${COLORS.reset}`);
|
|
338
|
-
} else {
|
|
339
|
-
parts.push(timestamp2);
|
|
340
|
-
}
|
|
341
|
-
if (colorize) {
|
|
342
|
-
parts.push(colorizeLevel(metadata.level));
|
|
343
|
-
} else {
|
|
344
|
-
parts.push(metadata.level.toUpperCase().padEnd(5));
|
|
345
|
-
}
|
|
346
|
-
if (metadata.module) {
|
|
347
|
-
if (colorize) {
|
|
348
|
-
parts.push(`${COLORS.dim}[${metadata.module}]${COLORS.reset}`);
|
|
349
|
-
} else {
|
|
350
|
-
parts.push(`[${metadata.module}]`);
|
|
351
|
-
}
|
|
352
|
-
}
|
|
353
|
-
parts.push(metadata.message);
|
|
354
|
-
let output = parts.join(" ");
|
|
355
|
-
if (metadata.context && Object.keys(metadata.context).length > 0) {
|
|
356
|
-
output += "\n" + formatContext(metadata.context);
|
|
357
|
-
}
|
|
358
|
-
if (metadata.error) {
|
|
359
|
-
output += "\n" + formatError(metadata.error);
|
|
360
|
-
}
|
|
361
|
-
return output;
|
|
362
|
-
}
|
|
363
|
-
function formatJSON(metadata) {
|
|
364
|
-
const obj = {
|
|
365
|
-
timestamp: formatTimestamp(metadata.timestamp),
|
|
366
|
-
level: metadata.level,
|
|
367
|
-
message: metadata.message
|
|
368
|
-
};
|
|
369
|
-
if (metadata.module) {
|
|
370
|
-
obj.module = metadata.module;
|
|
371
|
-
}
|
|
372
|
-
if (metadata.context) {
|
|
373
|
-
obj.context = metadata.context;
|
|
374
|
-
}
|
|
375
|
-
if (metadata.error) {
|
|
376
|
-
obj.error = {
|
|
377
|
-
name: metadata.error.name,
|
|
378
|
-
message: metadata.error.message,
|
|
379
|
-
stack: metadata.error.stack
|
|
380
|
-
};
|
|
381
|
-
}
|
|
382
|
-
return JSON.stringify(obj);
|
|
383
|
-
}
|
|
384
|
-
var COLORS;
|
|
385
|
-
var init_formatters = __esm({
|
|
386
|
-
"src/logger/formatters.ts"() {
|
|
387
|
-
COLORS = {
|
|
388
|
-
reset: "\x1B[0m",
|
|
389
|
-
bright: "\x1B[1m",
|
|
390
|
-
dim: "\x1B[2m",
|
|
391
|
-
// 로그 레벨 컬러
|
|
392
|
-
debug: "\x1B[36m",
|
|
393
|
-
// cyan
|
|
394
|
-
info: "\x1B[32m",
|
|
395
|
-
// green
|
|
396
|
-
warn: "\x1B[33m",
|
|
397
|
-
// yellow
|
|
398
|
-
error: "\x1B[31m",
|
|
399
|
-
// red
|
|
400
|
-
fatal: "\x1B[35m",
|
|
401
|
-
// magenta
|
|
402
|
-
// 추가 컬러
|
|
403
|
-
gray: "\x1B[90m"
|
|
404
|
-
};
|
|
405
|
-
}
|
|
406
|
-
});
|
|
407
|
-
|
|
408
|
-
// src/logger/transports/console.ts
|
|
409
|
-
var ConsoleTransport;
|
|
410
|
-
var init_console = __esm({
|
|
411
|
-
"src/logger/transports/console.ts"() {
|
|
412
|
-
init_types();
|
|
413
|
-
init_formatters();
|
|
414
|
-
ConsoleTransport = class {
|
|
415
|
-
name = "console";
|
|
416
|
-
level;
|
|
417
|
-
enabled;
|
|
418
|
-
colorize;
|
|
419
|
-
constructor(config2) {
|
|
420
|
-
this.level = config2.level;
|
|
421
|
-
this.enabled = config2.enabled;
|
|
422
|
-
this.colorize = config2.colorize ?? true;
|
|
423
|
-
}
|
|
424
|
-
async log(metadata) {
|
|
425
|
-
if (!this.enabled) {
|
|
426
|
-
return;
|
|
427
|
-
}
|
|
428
|
-
if (LOG_LEVEL_PRIORITY[metadata.level] < LOG_LEVEL_PRIORITY[this.level]) {
|
|
429
|
-
return;
|
|
430
|
-
}
|
|
431
|
-
const message = formatConsole(metadata, this.colorize);
|
|
432
|
-
if (metadata.level === "warn" || metadata.level === "error" || metadata.level === "fatal") {
|
|
433
|
-
console.error(message);
|
|
434
|
-
} else {
|
|
435
|
-
console.log(message);
|
|
436
|
-
}
|
|
437
|
-
}
|
|
370
|
+
// src/logger/transports/console.ts
|
|
371
|
+
var ConsoleTransport;
|
|
372
|
+
var init_console = __esm({
|
|
373
|
+
"src/logger/transports/console.ts"() {
|
|
374
|
+
init_types();
|
|
375
|
+
init_formatters();
|
|
376
|
+
ConsoleTransport = class {
|
|
377
|
+
name = "console";
|
|
378
|
+
level;
|
|
379
|
+
enabled;
|
|
380
|
+
colorize;
|
|
381
|
+
constructor(config) {
|
|
382
|
+
this.level = config.level;
|
|
383
|
+
this.enabled = config.enabled;
|
|
384
|
+
this.colorize = config.colorize ?? true;
|
|
385
|
+
}
|
|
386
|
+
async log(metadata) {
|
|
387
|
+
if (!this.enabled) {
|
|
388
|
+
return;
|
|
389
|
+
}
|
|
390
|
+
if (LOG_LEVEL_PRIORITY[metadata.level] < LOG_LEVEL_PRIORITY[this.level]) {
|
|
391
|
+
return;
|
|
392
|
+
}
|
|
393
|
+
const message = formatConsole(metadata, this.colorize);
|
|
394
|
+
if (metadata.level === "warn" || metadata.level === "error" || metadata.level === "fatal") {
|
|
395
|
+
console.error(message);
|
|
396
|
+
} else {
|
|
397
|
+
console.log(message);
|
|
398
|
+
}
|
|
399
|
+
}
|
|
438
400
|
};
|
|
439
401
|
}
|
|
440
402
|
});
|
|
@@ -448,12 +410,16 @@ var init_file = __esm({
|
|
|
448
410
|
level;
|
|
449
411
|
enabled;
|
|
450
412
|
logDir;
|
|
413
|
+
maxFileSize;
|
|
414
|
+
maxFiles;
|
|
451
415
|
currentStream = null;
|
|
452
416
|
currentFilename = null;
|
|
453
|
-
constructor(
|
|
454
|
-
this.level =
|
|
455
|
-
this.enabled =
|
|
456
|
-
this.logDir =
|
|
417
|
+
constructor(config) {
|
|
418
|
+
this.level = config.level;
|
|
419
|
+
this.enabled = config.enabled;
|
|
420
|
+
this.logDir = config.logDir;
|
|
421
|
+
this.maxFileSize = config.maxFileSize ?? 10 * 1024 * 1024;
|
|
422
|
+
this.maxFiles = config.maxFiles ?? 10;
|
|
457
423
|
if (!existsSync(this.logDir)) {
|
|
458
424
|
mkdirSync(this.logDir, { recursive: true });
|
|
459
425
|
}
|
|
@@ -469,6 +435,9 @@ var init_file = __esm({
|
|
|
469
435
|
const filename = this.getLogFilename(metadata.timestamp);
|
|
470
436
|
if (this.currentFilename !== filename) {
|
|
471
437
|
await this.rotateStream(filename);
|
|
438
|
+
await this.cleanOldFiles();
|
|
439
|
+
} else if (this.currentFilename) {
|
|
440
|
+
await this.checkAndRotateBySize();
|
|
472
441
|
}
|
|
473
442
|
if (this.currentStream) {
|
|
474
443
|
return new Promise((resolve, reject) => {
|
|
@@ -524,6 +493,103 @@ var init_file = __esm({
|
|
|
524
493
|
});
|
|
525
494
|
});
|
|
526
495
|
}
|
|
496
|
+
/**
|
|
497
|
+
* 파일 크기 체크 및 크기 기반 로테이션
|
|
498
|
+
*/
|
|
499
|
+
async checkAndRotateBySize() {
|
|
500
|
+
if (!this.currentFilename) {
|
|
501
|
+
return;
|
|
502
|
+
}
|
|
503
|
+
const filepath = join(this.logDir, this.currentFilename);
|
|
504
|
+
if (!existsSync(filepath)) {
|
|
505
|
+
return;
|
|
506
|
+
}
|
|
507
|
+
try {
|
|
508
|
+
const stats = statSync(filepath);
|
|
509
|
+
if (stats.size >= this.maxFileSize) {
|
|
510
|
+
await this.rotateBySize();
|
|
511
|
+
}
|
|
512
|
+
} catch (error) {
|
|
513
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
514
|
+
process.stderr.write(`[FileTransport] Failed to check file size: ${errorMessage}
|
|
515
|
+
`);
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
/**
|
|
519
|
+
* 크기 기반 로테이션 수행
|
|
520
|
+
* 예: 2025-01-01.log -> 2025-01-01.1.log, 2025-01-01.1.log -> 2025-01-01.2.log
|
|
521
|
+
*/
|
|
522
|
+
async rotateBySize() {
|
|
523
|
+
if (!this.currentFilename) {
|
|
524
|
+
return;
|
|
525
|
+
}
|
|
526
|
+
await this.closeStream();
|
|
527
|
+
const baseName = this.currentFilename.replace(/\.log$/, "");
|
|
528
|
+
const files = readdirSync(this.logDir);
|
|
529
|
+
const relatedFiles = files.filter((file) => file.startsWith(baseName) && file.endsWith(".log")).sort().reverse();
|
|
530
|
+
for (const file of relatedFiles) {
|
|
531
|
+
const match = file.match(/\.(\d+)\.log$/);
|
|
532
|
+
if (match) {
|
|
533
|
+
const oldNum = parseInt(match[1], 10);
|
|
534
|
+
const newNum = oldNum + 1;
|
|
535
|
+
const oldPath = join(this.logDir, file);
|
|
536
|
+
const newPath2 = join(this.logDir, `${baseName}.${newNum}.log`);
|
|
537
|
+
try {
|
|
538
|
+
renameSync(oldPath, newPath2);
|
|
539
|
+
} catch (error) {
|
|
540
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
541
|
+
process.stderr.write(`[FileTransport] Failed to rotate file: ${errorMessage}
|
|
542
|
+
`);
|
|
543
|
+
}
|
|
544
|
+
}
|
|
545
|
+
}
|
|
546
|
+
const currentPath = join(this.logDir, this.currentFilename);
|
|
547
|
+
const newPath = join(this.logDir, `${baseName}.1.log`);
|
|
548
|
+
try {
|
|
549
|
+
if (existsSync(currentPath)) {
|
|
550
|
+
renameSync(currentPath, newPath);
|
|
551
|
+
}
|
|
552
|
+
} catch (error) {
|
|
553
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
554
|
+
process.stderr.write(`[FileTransport] Failed to rotate current file: ${errorMessage}
|
|
555
|
+
`);
|
|
556
|
+
}
|
|
557
|
+
await this.rotateStream(this.currentFilename);
|
|
558
|
+
}
|
|
559
|
+
/**
|
|
560
|
+
* 오래된 로그 파일 정리
|
|
561
|
+
* maxFiles 개수를 초과하는 로그 파일 삭제
|
|
562
|
+
*/
|
|
563
|
+
async cleanOldFiles() {
|
|
564
|
+
try {
|
|
565
|
+
if (!existsSync(this.logDir)) {
|
|
566
|
+
return;
|
|
567
|
+
}
|
|
568
|
+
const files = readdirSync(this.logDir);
|
|
569
|
+
const logFiles = files.filter((file) => file.endsWith(".log")).map((file) => {
|
|
570
|
+
const filepath = join(this.logDir, file);
|
|
571
|
+
const stats = statSync(filepath);
|
|
572
|
+
return { file, mtime: stats.mtime };
|
|
573
|
+
}).sort((a, b) => b.mtime.getTime() - a.mtime.getTime());
|
|
574
|
+
if (logFiles.length > this.maxFiles) {
|
|
575
|
+
const filesToDelete = logFiles.slice(this.maxFiles);
|
|
576
|
+
for (const { file } of filesToDelete) {
|
|
577
|
+
const filepath = join(this.logDir, file);
|
|
578
|
+
try {
|
|
579
|
+
unlinkSync(filepath);
|
|
580
|
+
} catch (error) {
|
|
581
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
582
|
+
process.stderr.write(`[FileTransport] Failed to delete old file "${file}": ${errorMessage}
|
|
583
|
+
`);
|
|
584
|
+
}
|
|
585
|
+
}
|
|
586
|
+
}
|
|
587
|
+
} catch (error) {
|
|
588
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
589
|
+
process.stderr.write(`[FileTransport] Failed to clean old files: ${errorMessage}
|
|
590
|
+
`);
|
|
591
|
+
}
|
|
592
|
+
}
|
|
527
593
|
/**
|
|
528
594
|
* 날짜별 로그 파일명 생성
|
|
529
595
|
*/
|
|
@@ -539,8 +605,9 @@ var init_file = __esm({
|
|
|
539
605
|
};
|
|
540
606
|
}
|
|
541
607
|
});
|
|
542
|
-
|
|
543
|
-
|
|
608
|
+
function isFileLoggingEnabled() {
|
|
609
|
+
return process.env.LOGGER_FILE_ENABLED === "true";
|
|
610
|
+
}
|
|
544
611
|
function getDefaultLogLevel() {
|
|
545
612
|
const isProduction = process.env.NODE_ENV === "production";
|
|
546
613
|
const isDevelopment = process.env.NODE_ENV === "development";
|
|
@@ -573,6 +640,109 @@ function getFileConfig() {
|
|
|
573
640
|
maxFiles: 10
|
|
574
641
|
};
|
|
575
642
|
}
|
|
643
|
+
function validateDirectoryWritable(dirPath) {
|
|
644
|
+
if (!existsSync(dirPath)) {
|
|
645
|
+
try {
|
|
646
|
+
mkdirSync(dirPath, { recursive: true });
|
|
647
|
+
} catch (error) {
|
|
648
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
649
|
+
throw new Error(`Failed to create log directory "${dirPath}": ${errorMessage}`);
|
|
650
|
+
}
|
|
651
|
+
}
|
|
652
|
+
try {
|
|
653
|
+
accessSync(dirPath, constants.W_OK);
|
|
654
|
+
} catch {
|
|
655
|
+
throw new Error(`Log directory "${dirPath}" is not writable. Please check permissions.`);
|
|
656
|
+
}
|
|
657
|
+
const testFile = join(dirPath, ".logger-write-test");
|
|
658
|
+
try {
|
|
659
|
+
writeFileSync(testFile, "test", "utf-8");
|
|
660
|
+
unlinkSync(testFile);
|
|
661
|
+
} catch (error) {
|
|
662
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
663
|
+
throw new Error(`Cannot write to log directory "${dirPath}": ${errorMessage}`);
|
|
664
|
+
}
|
|
665
|
+
}
|
|
666
|
+
function validateFileConfig() {
|
|
667
|
+
if (!isFileLoggingEnabled()) {
|
|
668
|
+
return;
|
|
669
|
+
}
|
|
670
|
+
const logDir = process.env.LOG_DIR;
|
|
671
|
+
if (!logDir) {
|
|
672
|
+
throw new Error(
|
|
673
|
+
"LOG_DIR environment variable is required when LOGGER_FILE_ENABLED=true. Example: LOG_DIR=/var/log/myapp"
|
|
674
|
+
);
|
|
675
|
+
}
|
|
676
|
+
validateDirectoryWritable(logDir);
|
|
677
|
+
}
|
|
678
|
+
function validateSlackConfig() {
|
|
679
|
+
const webhookUrl = process.env.SLACK_WEBHOOK_URL;
|
|
680
|
+
if (!webhookUrl) {
|
|
681
|
+
return;
|
|
682
|
+
}
|
|
683
|
+
if (!webhookUrl.startsWith("https://hooks.slack.com/")) {
|
|
684
|
+
throw new Error(
|
|
685
|
+
`Invalid SLACK_WEBHOOK_URL: "${webhookUrl}". Slack webhook URLs must start with "https://hooks.slack.com/"`
|
|
686
|
+
);
|
|
687
|
+
}
|
|
688
|
+
}
|
|
689
|
+
function validateEmailConfig() {
|
|
690
|
+
const smtpHost = process.env.SMTP_HOST;
|
|
691
|
+
const smtpPort = process.env.SMTP_PORT;
|
|
692
|
+
const emailFrom = process.env.EMAIL_FROM;
|
|
693
|
+
const emailTo = process.env.EMAIL_TO;
|
|
694
|
+
const hasAnyEmailConfig = smtpHost || smtpPort || emailFrom || emailTo;
|
|
695
|
+
if (!hasAnyEmailConfig) {
|
|
696
|
+
return;
|
|
697
|
+
}
|
|
698
|
+
const missingFields = [];
|
|
699
|
+
if (!smtpHost) missingFields.push("SMTP_HOST");
|
|
700
|
+
if (!smtpPort) missingFields.push("SMTP_PORT");
|
|
701
|
+
if (!emailFrom) missingFields.push("EMAIL_FROM");
|
|
702
|
+
if (!emailTo) missingFields.push("EMAIL_TO");
|
|
703
|
+
if (missingFields.length > 0) {
|
|
704
|
+
throw new Error(
|
|
705
|
+
`Email transport configuration incomplete. Missing: ${missingFields.join(", ")}. Either set all required fields or remove all email configuration.`
|
|
706
|
+
);
|
|
707
|
+
}
|
|
708
|
+
const port = parseInt(smtpPort, 10);
|
|
709
|
+
if (isNaN(port) || port < 1 || port > 65535) {
|
|
710
|
+
throw new Error(
|
|
711
|
+
`Invalid SMTP_PORT: "${smtpPort}". Must be a number between 1 and 65535.`
|
|
712
|
+
);
|
|
713
|
+
}
|
|
714
|
+
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
|
|
715
|
+
if (!emailRegex.test(emailFrom)) {
|
|
716
|
+
throw new Error(`Invalid EMAIL_FROM format: "${emailFrom}"`);
|
|
717
|
+
}
|
|
718
|
+
const recipients = emailTo.split(",").map((e) => e.trim());
|
|
719
|
+
for (const email of recipients) {
|
|
720
|
+
if (!emailRegex.test(email)) {
|
|
721
|
+
throw new Error(`Invalid email address in EMAIL_TO: "${email}"`);
|
|
722
|
+
}
|
|
723
|
+
}
|
|
724
|
+
}
|
|
725
|
+
function validateEnvironment() {
|
|
726
|
+
const nodeEnv = process.env.NODE_ENV;
|
|
727
|
+
if (!nodeEnv) {
|
|
728
|
+
process.stderr.write(
|
|
729
|
+
"[Logger] Warning: NODE_ENV is not set. Defaulting to test environment.\n"
|
|
730
|
+
);
|
|
731
|
+
}
|
|
732
|
+
}
|
|
733
|
+
function validateConfig() {
|
|
734
|
+
try {
|
|
735
|
+
validateEnvironment();
|
|
736
|
+
validateFileConfig();
|
|
737
|
+
validateSlackConfig();
|
|
738
|
+
validateEmailConfig();
|
|
739
|
+
} catch (error) {
|
|
740
|
+
if (error instanceof Error) {
|
|
741
|
+
throw new Error(`[Logger] Configuration validation failed: ${error.message}`);
|
|
742
|
+
}
|
|
743
|
+
throw error;
|
|
744
|
+
}
|
|
745
|
+
}
|
|
576
746
|
var init_config = __esm({
|
|
577
747
|
"src/logger/config.ts"() {
|
|
578
748
|
}
|
|
@@ -598,10 +768,10 @@ var init_custom = __esm({
|
|
|
598
768
|
init_config();
|
|
599
769
|
CustomAdapter = class _CustomAdapter {
|
|
600
770
|
logger;
|
|
601
|
-
constructor(
|
|
771
|
+
constructor(config) {
|
|
602
772
|
this.logger = new Logger({
|
|
603
|
-
level:
|
|
604
|
-
module:
|
|
773
|
+
level: config.level,
|
|
774
|
+
module: config.module,
|
|
605
775
|
transports: initializeTransports()
|
|
606
776
|
});
|
|
607
777
|
}
|
|
@@ -663,13 +833,17 @@ function getAdapterType() {
|
|
|
663
833
|
}
|
|
664
834
|
return "pino";
|
|
665
835
|
}
|
|
836
|
+
function initializeLogger() {
|
|
837
|
+
validateConfig();
|
|
838
|
+
return createAdapter(getAdapterType());
|
|
839
|
+
}
|
|
666
840
|
var logger;
|
|
667
841
|
var init_adapter_factory = __esm({
|
|
668
842
|
"src/logger/adapter-factory.ts"() {
|
|
669
843
|
init_pino();
|
|
670
844
|
init_custom();
|
|
671
845
|
init_config();
|
|
672
|
-
logger =
|
|
846
|
+
logger = initializeLogger();
|
|
673
847
|
}
|
|
674
848
|
});
|
|
675
849
|
|
|
@@ -680,192 +854,129 @@ var init_logger2 = __esm({
|
|
|
680
854
|
}
|
|
681
855
|
});
|
|
682
856
|
|
|
683
|
-
// src/
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
rejectUnauthorized: process.env.REDIS_TLS_REJECT_UNAUTHORIZED !== "false"
|
|
692
|
-
};
|
|
693
|
-
}
|
|
694
|
-
return new RedisClient(url, options);
|
|
695
|
-
}
|
|
696
|
-
async function createRedisFromEnv() {
|
|
697
|
-
if (!hasRedisConfig()) {
|
|
698
|
-
return { write: void 0, read: void 0 };
|
|
699
|
-
}
|
|
857
|
+
// src/route/function-routes.ts
|
|
858
|
+
var function_routes_exports = {};
|
|
859
|
+
__export(function_routes_exports, {
|
|
860
|
+
discoverFunctionRoutes: () => discoverFunctionRoutes
|
|
861
|
+
});
|
|
862
|
+
function discoverFunctionRoutes(cwd = process.cwd()) {
|
|
863
|
+
const functions = [];
|
|
864
|
+
const nodeModulesPath = join(cwd, "node_modules");
|
|
700
865
|
try {
|
|
701
|
-
const
|
|
702
|
-
const
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
}
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
redisOptions: {
|
|
732
|
-
password: process.env.REDIS_PASSWORD
|
|
866
|
+
const projectPkgPath = join(cwd, "package.json");
|
|
867
|
+
const projectPkg = JSON.parse(readFileSync(projectPkgPath, "utf-8"));
|
|
868
|
+
const dependencies = {
|
|
869
|
+
...projectPkg.dependencies,
|
|
870
|
+
...projectPkg.devDependencies
|
|
871
|
+
};
|
|
872
|
+
for (const [packageName] of Object.entries(dependencies)) {
|
|
873
|
+
if (!packageName.startsWith("@spfn/") && !packageName.startsWith("spfn-")) {
|
|
874
|
+
continue;
|
|
875
|
+
}
|
|
876
|
+
try {
|
|
877
|
+
const pkgPath = join(nodeModulesPath, ...packageName.split("/"), "package.json");
|
|
878
|
+
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
|
|
879
|
+
if (pkg.spfn?.routes?.dir) {
|
|
880
|
+
const { dir } = pkg.spfn.routes;
|
|
881
|
+
const prefix = pkg.spfn.prefix;
|
|
882
|
+
const packagePath = dirname(pkgPath);
|
|
883
|
+
const routesDir = join(packagePath, dir);
|
|
884
|
+
functions.push({
|
|
885
|
+
packageName,
|
|
886
|
+
routesDir,
|
|
887
|
+
packagePath,
|
|
888
|
+
prefix
|
|
889
|
+
// Include prefix in function info
|
|
890
|
+
});
|
|
891
|
+
routeLogger.debug("Discovered function routes", {
|
|
892
|
+
package: packageName,
|
|
893
|
+
dir,
|
|
894
|
+
prefix: prefix || "(none)"
|
|
895
|
+
});
|
|
733
896
|
}
|
|
734
|
-
}
|
|
735
|
-
|
|
736
|
-
return { write: cluster, read: cluster };
|
|
737
|
-
}
|
|
738
|
-
if (process.env.REDIS_URL) {
|
|
739
|
-
const client = createClient(RedisClient, process.env.REDIS_URL);
|
|
740
|
-
return { write: client, read: client };
|
|
897
|
+
} catch (error) {
|
|
898
|
+
}
|
|
741
899
|
}
|
|
742
|
-
return { write: void 0, read: void 0 };
|
|
743
900
|
} catch (error) {
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
error,
|
|
748
|
-
{ suggestion: "Using memory-only cache. Install ioredis: npm install ioredis" }
|
|
749
|
-
);
|
|
750
|
-
} else {
|
|
751
|
-
cacheLogger.warn(
|
|
752
|
-
"Failed to create Redis client",
|
|
753
|
-
{ error: String(error), suggestion: "Using memory-only cache. Install ioredis: npm install ioredis" }
|
|
754
|
-
);
|
|
755
|
-
}
|
|
756
|
-
return { write: void 0, read: void 0 };
|
|
901
|
+
routeLogger.warn("Failed to discover function routes", {
|
|
902
|
+
error: error instanceof Error ? error.message : "Unknown error"
|
|
903
|
+
});
|
|
757
904
|
}
|
|
905
|
+
return functions;
|
|
758
906
|
}
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
}
|
|
763
|
-
var cacheLogger;
|
|
764
|
-
var init_redis_factory = __esm({
|
|
765
|
-
"src/cache/redis-factory.ts"() {
|
|
907
|
+
var routeLogger;
|
|
908
|
+
var init_function_routes = __esm({
|
|
909
|
+
"src/route/function-routes.ts"() {
|
|
766
910
|
init_logger2();
|
|
767
|
-
|
|
911
|
+
routeLogger = logger.child("function-routes");
|
|
768
912
|
}
|
|
769
913
|
});
|
|
770
914
|
|
|
771
|
-
// src/
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
const { write, read } = await createRedisFromEnv();
|
|
787
|
-
if (write) {
|
|
788
|
-
try {
|
|
789
|
-
await write.ping();
|
|
790
|
-
if (read && read !== write) {
|
|
791
|
-
await read.ping();
|
|
915
|
+
// src/errors/database-errors.ts
|
|
916
|
+
var DatabaseError, ConnectionError, QueryError, ConstraintViolationError, TransactionError, DeadlockError, DuplicateEntryError;
|
|
917
|
+
var init_database_errors = __esm({
|
|
918
|
+
"src/errors/database-errors.ts"() {
|
|
919
|
+
DatabaseError = class extends Error {
|
|
920
|
+
statusCode;
|
|
921
|
+
details;
|
|
922
|
+
timestamp;
|
|
923
|
+
constructor(message, statusCode = 500, details) {
|
|
924
|
+
super(message);
|
|
925
|
+
this.name = "DatabaseError";
|
|
926
|
+
this.statusCode = statusCode;
|
|
927
|
+
this.details = details;
|
|
928
|
+
this.timestamp = /* @__PURE__ */ new Date();
|
|
929
|
+
Error.captureStackTrace(this, this.constructor);
|
|
792
930
|
}
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
try {
|
|
805
|
-
await write.quit();
|
|
806
|
-
if (read && read !== write) {
|
|
807
|
-
await read.quit();
|
|
808
|
-
}
|
|
809
|
-
} catch {
|
|
931
|
+
/**
|
|
932
|
+
* Serialize error for API response
|
|
933
|
+
*/
|
|
934
|
+
toJSON() {
|
|
935
|
+
return {
|
|
936
|
+
name: this.name,
|
|
937
|
+
message: this.message,
|
|
938
|
+
statusCode: this.statusCode,
|
|
939
|
+
details: this.details,
|
|
940
|
+
timestamp: this.timestamp.toISOString()
|
|
941
|
+
};
|
|
810
942
|
}
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
}
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
init_logger2();
|
|
849
|
-
cacheLogger2 = logger.child("cache");
|
|
850
|
-
}
|
|
851
|
-
});
|
|
852
|
-
|
|
853
|
-
// src/cache/index.ts
|
|
854
|
-
var cache_exports = {};
|
|
855
|
-
__export(cache_exports, {
|
|
856
|
-
closeRedis: () => closeRedis,
|
|
857
|
-
createRedisFromEnv: () => createRedisFromEnv,
|
|
858
|
-
createSingleRedisFromEnv: () => createSingleRedisFromEnv,
|
|
859
|
-
getRedis: () => getRedis,
|
|
860
|
-
getRedisInfo: () => getRedisInfo,
|
|
861
|
-
getRedisRead: () => getRedisRead,
|
|
862
|
-
initRedis: () => initRedis,
|
|
863
|
-
setRedis: () => setRedis
|
|
864
|
-
});
|
|
865
|
-
var init_cache = __esm({
|
|
866
|
-
"src/cache/index.ts"() {
|
|
867
|
-
init_redis_factory();
|
|
868
|
-
init_redis_manager();
|
|
943
|
+
};
|
|
944
|
+
ConnectionError = class extends DatabaseError {
|
|
945
|
+
constructor(message, details) {
|
|
946
|
+
super(message, 503, details);
|
|
947
|
+
this.name = "ConnectionError";
|
|
948
|
+
}
|
|
949
|
+
};
|
|
950
|
+
QueryError = class extends DatabaseError {
|
|
951
|
+
constructor(message, statusCode = 500, details) {
|
|
952
|
+
super(message, statusCode, details);
|
|
953
|
+
this.name = "QueryError";
|
|
954
|
+
}
|
|
955
|
+
};
|
|
956
|
+
ConstraintViolationError = class extends QueryError {
|
|
957
|
+
constructor(message, details) {
|
|
958
|
+
super(message, 400, details);
|
|
959
|
+
this.name = "ConstraintViolationError";
|
|
960
|
+
}
|
|
961
|
+
};
|
|
962
|
+
TransactionError = class extends DatabaseError {
|
|
963
|
+
constructor(message, statusCode = 500, details) {
|
|
964
|
+
super(message, statusCode, details);
|
|
965
|
+
this.name = "TransactionError";
|
|
966
|
+
}
|
|
967
|
+
};
|
|
968
|
+
DeadlockError = class extends TransactionError {
|
|
969
|
+
constructor(message, details) {
|
|
970
|
+
super(message, 409, details);
|
|
971
|
+
this.name = "DeadlockError";
|
|
972
|
+
}
|
|
973
|
+
};
|
|
974
|
+
DuplicateEntryError = class extends QueryError {
|
|
975
|
+
constructor(field, value) {
|
|
976
|
+
super(`${field} '${value}' already exists`, 409, { field, value });
|
|
977
|
+
this.name = "DuplicateEntryError";
|
|
978
|
+
}
|
|
979
|
+
};
|
|
869
980
|
}
|
|
870
981
|
});
|
|
871
982
|
|
|
@@ -892,6 +1003,224 @@ var init_errors = __esm({
|
|
|
892
1003
|
}
|
|
893
1004
|
});
|
|
894
1005
|
|
|
1006
|
+
// src/env/config.ts
|
|
1007
|
+
var ENV_FILE_PRIORITY, TEST_ONLY_FILES;
|
|
1008
|
+
var init_config2 = __esm({
|
|
1009
|
+
"src/env/config.ts"() {
|
|
1010
|
+
ENV_FILE_PRIORITY = [
|
|
1011
|
+
".env",
|
|
1012
|
+
// Base configuration (lowest priority)
|
|
1013
|
+
".env.{NODE_ENV}",
|
|
1014
|
+
// Environment-specific
|
|
1015
|
+
".env.local",
|
|
1016
|
+
// Local overrides (excluded in test)
|
|
1017
|
+
".env.{NODE_ENV}.local"
|
|
1018
|
+
// Local environment-specific (highest priority)
|
|
1019
|
+
];
|
|
1020
|
+
TEST_ONLY_FILES = [
|
|
1021
|
+
".env.test",
|
|
1022
|
+
".env.test.local"
|
|
1023
|
+
];
|
|
1024
|
+
}
|
|
1025
|
+
});
|
|
1026
|
+
function buildFileList(basePath, nodeEnv) {
|
|
1027
|
+
const files = [];
|
|
1028
|
+
if (!nodeEnv) {
|
|
1029
|
+
files.push(join(basePath, ".env"));
|
|
1030
|
+
files.push(join(basePath, ".env.local"));
|
|
1031
|
+
return files;
|
|
1032
|
+
}
|
|
1033
|
+
for (const pattern of ENV_FILE_PRIORITY) {
|
|
1034
|
+
const fileName = pattern.replace("{NODE_ENV}", nodeEnv);
|
|
1035
|
+
if (nodeEnv === "test" && fileName === ".env.local") {
|
|
1036
|
+
continue;
|
|
1037
|
+
}
|
|
1038
|
+
if (nodeEnv === "local" && pattern === ".env.local") {
|
|
1039
|
+
continue;
|
|
1040
|
+
}
|
|
1041
|
+
if (nodeEnv !== "test" && TEST_ONLY_FILES.includes(fileName)) {
|
|
1042
|
+
continue;
|
|
1043
|
+
}
|
|
1044
|
+
files.push(join(basePath, fileName));
|
|
1045
|
+
}
|
|
1046
|
+
return files;
|
|
1047
|
+
}
|
|
1048
|
+
function loadSingleFile(filePath, debug) {
|
|
1049
|
+
if (!existsSync(filePath)) {
|
|
1050
|
+
if (debug) {
|
|
1051
|
+
envLogger.debug("Environment file not found (optional)", {
|
|
1052
|
+
path: filePath
|
|
1053
|
+
});
|
|
1054
|
+
}
|
|
1055
|
+
return { success: false, parsed: {}, error: "File not found" };
|
|
1056
|
+
}
|
|
1057
|
+
try {
|
|
1058
|
+
const result = config({ path: filePath });
|
|
1059
|
+
if (result.error) {
|
|
1060
|
+
envLogger.warn("Failed to parse environment file", {
|
|
1061
|
+
path: filePath,
|
|
1062
|
+
error: result.error.message
|
|
1063
|
+
});
|
|
1064
|
+
return {
|
|
1065
|
+
success: false,
|
|
1066
|
+
parsed: {},
|
|
1067
|
+
error: result.error.message
|
|
1068
|
+
};
|
|
1069
|
+
}
|
|
1070
|
+
const parsed = result.parsed || {};
|
|
1071
|
+
if (debug) {
|
|
1072
|
+
envLogger.debug("Environment file loaded successfully", {
|
|
1073
|
+
path: filePath,
|
|
1074
|
+
variables: Object.keys(parsed),
|
|
1075
|
+
count: Object.keys(parsed).length
|
|
1076
|
+
});
|
|
1077
|
+
}
|
|
1078
|
+
return { success: true, parsed };
|
|
1079
|
+
} catch (error) {
|
|
1080
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1081
|
+
envLogger.error("Error loading environment file", {
|
|
1082
|
+
path: filePath,
|
|
1083
|
+
error: message
|
|
1084
|
+
});
|
|
1085
|
+
return { success: false, parsed: {}, error: message };
|
|
1086
|
+
}
|
|
1087
|
+
}
|
|
1088
|
+
function validateRequiredVars(required, debug) {
|
|
1089
|
+
const missing = [];
|
|
1090
|
+
for (const varName of required) {
|
|
1091
|
+
if (!process.env[varName]) {
|
|
1092
|
+
missing.push(varName);
|
|
1093
|
+
}
|
|
1094
|
+
}
|
|
1095
|
+
if (missing.length > 0) {
|
|
1096
|
+
const error = `Required environment variables missing: ${missing.join(", ")}`;
|
|
1097
|
+
envLogger.error("Environment validation failed", {
|
|
1098
|
+
missing,
|
|
1099
|
+
required
|
|
1100
|
+
});
|
|
1101
|
+
throw new Error(error);
|
|
1102
|
+
}
|
|
1103
|
+
if (debug) {
|
|
1104
|
+
envLogger.debug("Required environment variables validated", {
|
|
1105
|
+
required,
|
|
1106
|
+
allPresent: true
|
|
1107
|
+
});
|
|
1108
|
+
}
|
|
1109
|
+
}
|
|
1110
|
+
function loadEnvironment(options = {}) {
|
|
1111
|
+
const {
|
|
1112
|
+
basePath = process.cwd(),
|
|
1113
|
+
customPaths = [],
|
|
1114
|
+
debug = false,
|
|
1115
|
+
nodeEnv = process.env.NODE_ENV || "",
|
|
1116
|
+
required = [],
|
|
1117
|
+
useCache = true
|
|
1118
|
+
} = options;
|
|
1119
|
+
if (useCache && environmentLoaded && cachedLoadResult) {
|
|
1120
|
+
if (debug) {
|
|
1121
|
+
envLogger.debug("Returning cached environment", {
|
|
1122
|
+
loaded: cachedLoadResult.loaded.length,
|
|
1123
|
+
variables: Object.keys(cachedLoadResult.parsed).length
|
|
1124
|
+
});
|
|
1125
|
+
}
|
|
1126
|
+
return cachedLoadResult;
|
|
1127
|
+
}
|
|
1128
|
+
if (debug) {
|
|
1129
|
+
envLogger.debug("Loading environment variables", {
|
|
1130
|
+
basePath,
|
|
1131
|
+
nodeEnv,
|
|
1132
|
+
customPaths,
|
|
1133
|
+
required
|
|
1134
|
+
});
|
|
1135
|
+
}
|
|
1136
|
+
const result = {
|
|
1137
|
+
success: true,
|
|
1138
|
+
loaded: [],
|
|
1139
|
+
failed: [],
|
|
1140
|
+
parsed: {},
|
|
1141
|
+
warnings: []
|
|
1142
|
+
};
|
|
1143
|
+
const standardFiles = buildFileList(basePath, nodeEnv);
|
|
1144
|
+
const allFiles = [...standardFiles, ...customPaths];
|
|
1145
|
+
if (debug) {
|
|
1146
|
+
envLogger.debug("Environment files to load", {
|
|
1147
|
+
standardFiles,
|
|
1148
|
+
customPaths,
|
|
1149
|
+
total: allFiles.length
|
|
1150
|
+
});
|
|
1151
|
+
}
|
|
1152
|
+
const reversedFiles = [...allFiles].reverse();
|
|
1153
|
+
for (const filePath of reversedFiles) {
|
|
1154
|
+
const fileResult = loadSingleFile(filePath, debug);
|
|
1155
|
+
if (fileResult.success) {
|
|
1156
|
+
result.loaded.push(filePath);
|
|
1157
|
+
Object.assign(result.parsed, fileResult.parsed);
|
|
1158
|
+
if (fileResult.parsed["NODE_ENV"]) {
|
|
1159
|
+
const fileName = filePath.split("/").pop() || filePath;
|
|
1160
|
+
result.warnings.push(
|
|
1161
|
+
`NODE_ENV found in ${fileName}. It's recommended to set NODE_ENV via CLI (e.g., 'spfn dev', 'spfn build') instead of .env files for consistent environment behavior.`
|
|
1162
|
+
);
|
|
1163
|
+
}
|
|
1164
|
+
} else if (fileResult.error) {
|
|
1165
|
+
result.failed.push({
|
|
1166
|
+
path: filePath,
|
|
1167
|
+
reason: fileResult.error
|
|
1168
|
+
});
|
|
1169
|
+
}
|
|
1170
|
+
}
|
|
1171
|
+
if (debug || result.loaded.length > 0) {
|
|
1172
|
+
envLogger.info("Environment loading complete", {
|
|
1173
|
+
loaded: result.loaded.length,
|
|
1174
|
+
failed: result.failed.length,
|
|
1175
|
+
variables: Object.keys(result.parsed).length,
|
|
1176
|
+
files: result.loaded
|
|
1177
|
+
});
|
|
1178
|
+
}
|
|
1179
|
+
if (required.length > 0) {
|
|
1180
|
+
try {
|
|
1181
|
+
validateRequiredVars(required, debug);
|
|
1182
|
+
} catch (error) {
|
|
1183
|
+
result.success = false;
|
|
1184
|
+
result.errors = [
|
|
1185
|
+
error instanceof Error ? error.message : "Validation failed"
|
|
1186
|
+
];
|
|
1187
|
+
throw error;
|
|
1188
|
+
}
|
|
1189
|
+
}
|
|
1190
|
+
if (result.warnings.length > 0) {
|
|
1191
|
+
for (const warning of result.warnings) {
|
|
1192
|
+
envLogger.warn(warning);
|
|
1193
|
+
}
|
|
1194
|
+
}
|
|
1195
|
+
environmentLoaded = true;
|
|
1196
|
+
cachedLoadResult = result;
|
|
1197
|
+
return result;
|
|
1198
|
+
}
|
|
1199
|
+
var envLogger, environmentLoaded, cachedLoadResult;
|
|
1200
|
+
var init_loader = __esm({
|
|
1201
|
+
"src/env/loader.ts"() {
|
|
1202
|
+
init_logger2();
|
|
1203
|
+
init_config2();
|
|
1204
|
+
envLogger = logger.child("environment");
|
|
1205
|
+
environmentLoaded = false;
|
|
1206
|
+
}
|
|
1207
|
+
});
|
|
1208
|
+
|
|
1209
|
+
// src/env/validator.ts
|
|
1210
|
+
var init_validator = __esm({
|
|
1211
|
+
"src/env/validator.ts"() {
|
|
1212
|
+
}
|
|
1213
|
+
});
|
|
1214
|
+
|
|
1215
|
+
// src/env/index.ts
|
|
1216
|
+
var init_env = __esm({
|
|
1217
|
+
"src/env/index.ts"() {
|
|
1218
|
+
init_loader();
|
|
1219
|
+
init_config2();
|
|
1220
|
+
init_validator();
|
|
1221
|
+
}
|
|
1222
|
+
});
|
|
1223
|
+
|
|
895
1224
|
// src/db/postgres-errors.ts
|
|
896
1225
|
function parseUniqueViolation(message) {
|
|
897
1226
|
const patterns = [
|
|
@@ -935,11 +1264,11 @@ function fromPostgresError(error) {
|
|
|
935
1264
|
case "23000":
|
|
936
1265
|
// integrity_constraint_violation
|
|
937
1266
|
case "23001":
|
|
938
|
-
return new
|
|
1267
|
+
return new ConstraintViolationError(message, { code, constraint: "integrity" });
|
|
939
1268
|
case "23502":
|
|
940
|
-
return new
|
|
1269
|
+
return new ConstraintViolationError(message, { code, constraint: "not_null" });
|
|
941
1270
|
case "23503":
|
|
942
|
-
return new
|
|
1271
|
+
return new ConstraintViolationError(message, { code, constraint: "foreign_key" });
|
|
943
1272
|
case "23505":
|
|
944
1273
|
const parsed = parseUniqueViolation(message);
|
|
945
1274
|
if (parsed) {
|
|
@@ -947,7 +1276,7 @@ function fromPostgresError(error) {
|
|
|
947
1276
|
}
|
|
948
1277
|
return new DuplicateEntryError("field", "value");
|
|
949
1278
|
case "23514":
|
|
950
|
-
return new
|
|
1279
|
+
return new ConstraintViolationError(message, { code, constraint: "check" });
|
|
951
1280
|
// Class 40 — Transaction Rollback
|
|
952
1281
|
case "40000":
|
|
953
1282
|
// transaction_rollback
|
|
@@ -1009,7 +1338,7 @@ function fromPostgresError(error) {
|
|
|
1009
1338
|
}
|
|
1010
1339
|
var init_postgres_errors = __esm({
|
|
1011
1340
|
"src/db/postgres-errors.ts"() {
|
|
1012
|
-
|
|
1341
|
+
init_errors();
|
|
1013
1342
|
}
|
|
1014
1343
|
});
|
|
1015
1344
|
function delay(ms) {
|
|
@@ -1073,107 +1402,161 @@ var init_connection = __esm({
|
|
|
1073
1402
|
});
|
|
1074
1403
|
|
|
1075
1404
|
// src/db/manager/config.ts
|
|
1076
|
-
function
|
|
1405
|
+
function parseEnvNumber(key, prodDefault, devDefault) {
|
|
1077
1406
|
const isProduction = process.env.NODE_ENV === "production";
|
|
1078
|
-
const
|
|
1079
|
-
|
|
1080
|
-
|
|
1407
|
+
const envValue = parseInt(process.env[key] || "", 10);
|
|
1408
|
+
return isNaN(envValue) ? isProduction ? prodDefault : devDefault : envValue;
|
|
1409
|
+
}
|
|
1410
|
+
function parseEnvBoolean(key, defaultValue) {
|
|
1411
|
+
const value = process.env[key];
|
|
1412
|
+
if (value === void 0) return defaultValue;
|
|
1413
|
+
return value.toLowerCase() === "true";
|
|
1414
|
+
}
|
|
1415
|
+
function getPoolConfig(options) {
|
|
1416
|
+
return {
|
|
1417
|
+
max: options?.max ?? parseEnvNumber("DB_POOL_MAX", 20, 10),
|
|
1418
|
+
idleTimeout: options?.idleTimeout ?? parseEnvNumber("DB_POOL_IDLE_TIMEOUT", 30, 20)
|
|
1419
|
+
};
|
|
1081
1420
|
}
|
|
1082
1421
|
function getRetryConfig() {
|
|
1083
|
-
const isProduction = process.env.NODE_ENV === "production";
|
|
1084
1422
|
return {
|
|
1085
|
-
maxRetries:
|
|
1086
|
-
|
|
1087
|
-
|
|
1088
|
-
|
|
1089
|
-
maxDelay: 16e3,
|
|
1090
|
-
// 16초
|
|
1091
|
-
factor: 2
|
|
1092
|
-
// 2배씩 증가 (1s → 2s → 4s → 8s → 16s)
|
|
1423
|
+
maxRetries: parseEnvNumber("DB_RETRY_MAX", 5, 3),
|
|
1424
|
+
initialDelay: parseEnvNumber("DB_RETRY_INITIAL_DELAY", 100, 50),
|
|
1425
|
+
maxDelay: parseEnvNumber("DB_RETRY_MAX_DELAY", 1e4, 5e3),
|
|
1426
|
+
factor: parseEnvNumber("DB_RETRY_FACTOR", 2, 2)
|
|
1093
1427
|
};
|
|
1094
1428
|
}
|
|
1095
|
-
|
|
1429
|
+
function buildHealthCheckConfig(options) {
|
|
1430
|
+
return {
|
|
1431
|
+
enabled: options?.enabled ?? parseEnvBoolean("DB_HEALTH_CHECK_ENABLED", true),
|
|
1432
|
+
interval: options?.interval ?? parseEnvNumber("DB_HEALTH_CHECK_INTERVAL", 6e4, 6e4),
|
|
1433
|
+
reconnect: options?.reconnect ?? parseEnvBoolean("DB_HEALTH_CHECK_RECONNECT", true),
|
|
1434
|
+
maxRetries: options?.maxRetries ?? parseEnvNumber("DB_HEALTH_CHECK_MAX_RETRIES", 3, 3),
|
|
1435
|
+
retryInterval: options?.retryInterval ?? parseEnvNumber("DB_HEALTH_CHECK_RETRY_INTERVAL", 5e3, 5e3)
|
|
1436
|
+
};
|
|
1437
|
+
}
|
|
1438
|
+
function buildMonitoringConfig(options) {
|
|
1439
|
+
const isDevelopment = process.env.NODE_ENV !== "production";
|
|
1440
|
+
return {
|
|
1441
|
+
enabled: options?.enabled ?? parseEnvBoolean("DB_MONITORING_ENABLED", isDevelopment),
|
|
1442
|
+
slowThreshold: options?.slowThreshold ?? parseEnvNumber("DB_MONITORING_SLOW_THRESHOLD", 1e3, 1e3),
|
|
1443
|
+
logQueries: options?.logQueries ?? parseEnvBoolean("DB_MONITORING_LOG_QUERIES", false)
|
|
1444
|
+
};
|
|
1445
|
+
}
|
|
1446
|
+
var init_config3 = __esm({
|
|
1096
1447
|
"src/db/manager/config.ts"() {
|
|
1097
1448
|
}
|
|
1098
1449
|
});
|
|
1099
1450
|
function hasDatabaseConfig() {
|
|
1100
1451
|
return !!(process.env.DATABASE_URL || process.env.DATABASE_WRITE_URL || process.env.DATABASE_READ_URL);
|
|
1101
1452
|
}
|
|
1453
|
+
function detectDatabasePattern() {
|
|
1454
|
+
if (process.env.DATABASE_WRITE_URL && process.env.DATABASE_READ_URL) {
|
|
1455
|
+
return {
|
|
1456
|
+
type: "write-read",
|
|
1457
|
+
write: process.env.DATABASE_WRITE_URL,
|
|
1458
|
+
read: process.env.DATABASE_READ_URL
|
|
1459
|
+
};
|
|
1460
|
+
}
|
|
1461
|
+
if (process.env.DATABASE_URL && process.env.DATABASE_REPLICA_URL) {
|
|
1462
|
+
return {
|
|
1463
|
+
type: "legacy",
|
|
1464
|
+
primary: process.env.DATABASE_URL,
|
|
1465
|
+
replica: process.env.DATABASE_REPLICA_URL
|
|
1466
|
+
};
|
|
1467
|
+
}
|
|
1468
|
+
if (process.env.DATABASE_URL) {
|
|
1469
|
+
return {
|
|
1470
|
+
type: "single",
|
|
1471
|
+
url: process.env.DATABASE_URL
|
|
1472
|
+
};
|
|
1473
|
+
}
|
|
1474
|
+
if (process.env.DATABASE_WRITE_URL) {
|
|
1475
|
+
return {
|
|
1476
|
+
type: "single",
|
|
1477
|
+
url: process.env.DATABASE_WRITE_URL
|
|
1478
|
+
};
|
|
1479
|
+
}
|
|
1480
|
+
return { type: "none" };
|
|
1481
|
+
}
|
|
1482
|
+
async function createWriteReadClients(writeUrl, readUrl, poolConfig, retryConfig) {
|
|
1483
|
+
const writeClient = await createDatabaseConnection(writeUrl, poolConfig, retryConfig);
|
|
1484
|
+
const readClient = await createDatabaseConnection(readUrl, poolConfig, retryConfig);
|
|
1485
|
+
return {
|
|
1486
|
+
write: drizzle(writeClient),
|
|
1487
|
+
read: drizzle(readClient),
|
|
1488
|
+
writeClient,
|
|
1489
|
+
readClient
|
|
1490
|
+
};
|
|
1491
|
+
}
|
|
1492
|
+
async function createSingleClient(url, poolConfig, retryConfig) {
|
|
1493
|
+
const client = await createDatabaseConnection(url, poolConfig, retryConfig);
|
|
1494
|
+
const db = drizzle(client);
|
|
1495
|
+
return {
|
|
1496
|
+
write: db,
|
|
1497
|
+
read: db,
|
|
1498
|
+
writeClient: client,
|
|
1499
|
+
readClient: client
|
|
1500
|
+
};
|
|
1501
|
+
}
|
|
1102
1502
|
async function createDatabaseFromEnv(options) {
|
|
1103
1503
|
if (!hasDatabaseConfig()) {
|
|
1104
|
-
|
|
1504
|
+
dbLogger2.debug("No DATABASE_URL found, loading environment variables");
|
|
1505
|
+
const result = loadEnvironment({
|
|
1506
|
+
debug: true
|
|
1507
|
+
});
|
|
1508
|
+
dbLogger2.debug("Environment variables loaded", {
|
|
1509
|
+
success: result.success,
|
|
1510
|
+
loaded: result.loaded.length,
|
|
1511
|
+
hasDatabaseUrl: !!process.env.DATABASE_URL,
|
|
1512
|
+
hasWriteUrl: !!process.env.DATABASE_WRITE_URL,
|
|
1513
|
+
hasReadUrl: !!process.env.DATABASE_READ_URL
|
|
1514
|
+
});
|
|
1105
1515
|
}
|
|
1106
1516
|
if (!hasDatabaseConfig()) {
|
|
1517
|
+
dbLogger2.warn("No database configuration found", {
|
|
1518
|
+
cwd: process.cwd(),
|
|
1519
|
+
nodeEnv: process.env.NODE_ENV,
|
|
1520
|
+
checkedVars: ["DATABASE_URL", "DATABASE_WRITE_URL", "DATABASE_READ_URL"]
|
|
1521
|
+
});
|
|
1107
1522
|
return { write: void 0, read: void 0 };
|
|
1108
1523
|
}
|
|
1109
1524
|
try {
|
|
1110
1525
|
const poolConfig = getPoolConfig(options?.pool);
|
|
1111
1526
|
const retryConfig = getRetryConfig();
|
|
1112
|
-
|
|
1113
|
-
|
|
1114
|
-
|
|
1115
|
-
|
|
1116
|
-
|
|
1117
|
-
|
|
1118
|
-
|
|
1119
|
-
|
|
1120
|
-
|
|
1121
|
-
|
|
1122
|
-
|
|
1123
|
-
|
|
1124
|
-
|
|
1125
|
-
|
|
1126
|
-
|
|
1127
|
-
|
|
1128
|
-
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
|
|
1140
|
-
|
|
1141
|
-
|
|
1142
|
-
|
|
1143
|
-
read:
|
|
1144
|
-
writeClient: writeClient2,
|
|
1145
|
-
readClient: readClient2
|
|
1146
|
-
};
|
|
1147
|
-
}
|
|
1148
|
-
if (process.env.DATABASE_URL) {
|
|
1149
|
-
const client = await createDatabaseConnection(
|
|
1150
|
-
process.env.DATABASE_URL,
|
|
1151
|
-
poolConfig,
|
|
1152
|
-
retryConfig
|
|
1153
|
-
);
|
|
1154
|
-
const db2 = drizzle(client);
|
|
1155
|
-
return {
|
|
1156
|
-
write: db2,
|
|
1157
|
-
read: db2,
|
|
1158
|
-
writeClient: client,
|
|
1159
|
-
readClient: client
|
|
1160
|
-
};
|
|
1161
|
-
}
|
|
1162
|
-
if (process.env.DATABASE_WRITE_URL) {
|
|
1163
|
-
const client = await createDatabaseConnection(
|
|
1164
|
-
process.env.DATABASE_WRITE_URL,
|
|
1165
|
-
poolConfig,
|
|
1166
|
-
retryConfig
|
|
1167
|
-
);
|
|
1168
|
-
const db2 = drizzle(client);
|
|
1169
|
-
return {
|
|
1170
|
-
write: db2,
|
|
1171
|
-
read: db2,
|
|
1172
|
-
writeClient: client,
|
|
1173
|
-
readClient: client
|
|
1174
|
-
};
|
|
1527
|
+
const pattern = detectDatabasePattern();
|
|
1528
|
+
switch (pattern.type) {
|
|
1529
|
+
case "write-read":
|
|
1530
|
+
dbLogger2.debug("Using write-read pattern", {
|
|
1531
|
+
write: pattern.write.replace(/:[^:@]+@/, ":***@"),
|
|
1532
|
+
read: pattern.read.replace(/:[^:@]+@/, ":***@")
|
|
1533
|
+
});
|
|
1534
|
+
return await createWriteReadClients(
|
|
1535
|
+
pattern.write,
|
|
1536
|
+
pattern.read,
|
|
1537
|
+
poolConfig,
|
|
1538
|
+
retryConfig
|
|
1539
|
+
);
|
|
1540
|
+
case "legacy":
|
|
1541
|
+
dbLogger2.debug("Using legacy replica pattern", {
|
|
1542
|
+
primary: pattern.primary.replace(/:[^:@]+@/, ":***@"),
|
|
1543
|
+
replica: pattern.replica.replace(/:[^:@]+@/, ":***@")
|
|
1544
|
+
});
|
|
1545
|
+
return await createWriteReadClients(
|
|
1546
|
+
pattern.primary,
|
|
1547
|
+
pattern.replica,
|
|
1548
|
+
poolConfig,
|
|
1549
|
+
retryConfig
|
|
1550
|
+
);
|
|
1551
|
+
case "single":
|
|
1552
|
+
dbLogger2.debug("Using single database pattern", {
|
|
1553
|
+
url: pattern.url.replace(/:[^:@]+@/, ":***@")
|
|
1554
|
+
});
|
|
1555
|
+
return await createSingleClient(pattern.url, poolConfig, retryConfig);
|
|
1556
|
+
case "none":
|
|
1557
|
+
dbLogger2.warn("No database pattern detected");
|
|
1558
|
+
return { write: void 0, read: void 0 };
|
|
1175
1559
|
}
|
|
1176
|
-
return { write: void 0, read: void 0 };
|
|
1177
1560
|
} catch (error) {
|
|
1178
1561
|
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1179
1562
|
dbLogger2.error("Failed to create database connection", {
|
|
@@ -1184,59 +1567,151 @@ async function createDatabaseFromEnv(options) {
|
|
|
1184
1567
|
hasUrl: !!process.env.DATABASE_URL,
|
|
1185
1568
|
hasReplicaUrl: !!process.env.DATABASE_REPLICA_URL
|
|
1186
1569
|
});
|
|
1187
|
-
|
|
1570
|
+
throw new Error(`Database connection failed: ${message}`, { cause: error });
|
|
1188
1571
|
}
|
|
1189
1572
|
}
|
|
1190
1573
|
var dbLogger2;
|
|
1191
1574
|
var init_factory = __esm({
|
|
1192
1575
|
"src/db/manager/factory.ts"() {
|
|
1193
|
-
init_connection();
|
|
1194
|
-
init_config2();
|
|
1195
1576
|
init_logger2();
|
|
1577
|
+
init_env();
|
|
1578
|
+
init_connection();
|
|
1579
|
+
init_config3();
|
|
1196
1580
|
dbLogger2 = logger.child("database");
|
|
1197
1581
|
}
|
|
1198
1582
|
});
|
|
1199
1583
|
|
|
1200
|
-
// src/db/manager/
|
|
1584
|
+
// src/db/manager/global-state.ts
|
|
1585
|
+
var getWriteInstance, setWriteInstance, getReadInstance, setReadInstance, getWriteClient, setWriteClient, getReadClient, setReadClient, getHealthCheckInterval, setHealthCheckInterval, setMonitoringConfig;
|
|
1586
|
+
var init_global_state = __esm({
|
|
1587
|
+
"src/db/manager/global-state.ts"() {
|
|
1588
|
+
getWriteInstance = () => globalThis.__SPFN_DB_WRITE__;
|
|
1589
|
+
setWriteInstance = (instance) => {
|
|
1590
|
+
globalThis.__SPFN_DB_WRITE__ = instance;
|
|
1591
|
+
};
|
|
1592
|
+
getReadInstance = () => globalThis.__SPFN_DB_READ__;
|
|
1593
|
+
setReadInstance = (instance) => {
|
|
1594
|
+
globalThis.__SPFN_DB_READ__ = instance;
|
|
1595
|
+
};
|
|
1596
|
+
getWriteClient = () => globalThis.__SPFN_DB_WRITE_CLIENT__;
|
|
1597
|
+
setWriteClient = (client) => {
|
|
1598
|
+
globalThis.__SPFN_DB_WRITE_CLIENT__ = client;
|
|
1599
|
+
};
|
|
1600
|
+
getReadClient = () => globalThis.__SPFN_DB_READ_CLIENT__;
|
|
1601
|
+
setReadClient = (client) => {
|
|
1602
|
+
globalThis.__SPFN_DB_READ_CLIENT__ = client;
|
|
1603
|
+
};
|
|
1604
|
+
getHealthCheckInterval = () => globalThis.__SPFN_DB_HEALTH_CHECK__;
|
|
1605
|
+
setHealthCheckInterval = (interval) => {
|
|
1606
|
+
globalThis.__SPFN_DB_HEALTH_CHECK__ = interval;
|
|
1607
|
+
};
|
|
1608
|
+
setMonitoringConfig = (config) => {
|
|
1609
|
+
globalThis.__SPFN_DB_MONITORING__ = config;
|
|
1610
|
+
};
|
|
1611
|
+
}
|
|
1612
|
+
});
|
|
1613
|
+
|
|
1614
|
+
// src/db/manager/health-check.ts
|
|
1615
|
+
function startHealthCheck(config, options, getDatabase2, closeDatabase2) {
|
|
1616
|
+
const healthCheck = getHealthCheckInterval();
|
|
1617
|
+
if (healthCheck) {
|
|
1618
|
+
dbLogger3.debug("Health check already running");
|
|
1619
|
+
return;
|
|
1620
|
+
}
|
|
1621
|
+
dbLogger3.info("Starting database health check", {
|
|
1622
|
+
interval: `${config.interval}ms`,
|
|
1623
|
+
reconnect: config.reconnect
|
|
1624
|
+
});
|
|
1625
|
+
const interval = setInterval(async () => {
|
|
1626
|
+
try {
|
|
1627
|
+
const write = getDatabase2("write");
|
|
1628
|
+
const read = getDatabase2("read");
|
|
1629
|
+
if (write) {
|
|
1630
|
+
await write.execute("SELECT 1");
|
|
1631
|
+
}
|
|
1632
|
+
if (read && read !== write) {
|
|
1633
|
+
await read.execute("SELECT 1");
|
|
1634
|
+
}
|
|
1635
|
+
} catch (error) {
|
|
1636
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1637
|
+
dbLogger3.error("Database health check failed", { error: message });
|
|
1638
|
+
if (config.reconnect) {
|
|
1639
|
+
await attemptReconnection(config, options, closeDatabase2);
|
|
1640
|
+
}
|
|
1641
|
+
}
|
|
1642
|
+
}, config.interval);
|
|
1643
|
+
setHealthCheckInterval(interval);
|
|
1644
|
+
}
|
|
1645
|
+
async function attemptReconnection(config, options, closeDatabase2) {
|
|
1646
|
+
dbLogger3.warn("Attempting database reconnection", {
|
|
1647
|
+
maxRetries: config.maxRetries,
|
|
1648
|
+
retryInterval: `${config.retryInterval}ms`
|
|
1649
|
+
});
|
|
1650
|
+
for (let attempt = 1; attempt <= config.maxRetries; attempt++) {
|
|
1651
|
+
try {
|
|
1652
|
+
dbLogger3.debug(`Reconnection attempt ${attempt}/${config.maxRetries}`);
|
|
1653
|
+
await closeDatabase2();
|
|
1654
|
+
await new Promise((resolve) => setTimeout(resolve, config.retryInterval));
|
|
1655
|
+
const result = await createDatabaseFromEnv(options);
|
|
1656
|
+
if (result.write) {
|
|
1657
|
+
await result.write.execute("SELECT 1");
|
|
1658
|
+
setWriteInstance(result.write);
|
|
1659
|
+
setReadInstance(result.read);
|
|
1660
|
+
setWriteClient(result.writeClient);
|
|
1661
|
+
setReadClient(result.readClient);
|
|
1662
|
+
dbLogger3.info("Database reconnection successful", { attempt });
|
|
1663
|
+
return;
|
|
1664
|
+
}
|
|
1665
|
+
} catch (error) {
|
|
1666
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1667
|
+
dbLogger3.error(`Reconnection attempt ${attempt} failed`, {
|
|
1668
|
+
error: message,
|
|
1669
|
+
attempt,
|
|
1670
|
+
maxRetries: config.maxRetries
|
|
1671
|
+
});
|
|
1672
|
+
if (attempt === config.maxRetries) {
|
|
1673
|
+
dbLogger3.error("Max reconnection attempts reached, giving up");
|
|
1674
|
+
}
|
|
1675
|
+
}
|
|
1676
|
+
}
|
|
1677
|
+
}
|
|
1678
|
+
function stopHealthCheck() {
|
|
1679
|
+
const healthCheck = getHealthCheckInterval();
|
|
1680
|
+
if (healthCheck) {
|
|
1681
|
+
clearInterval(healthCheck);
|
|
1682
|
+
setHealthCheckInterval(void 0);
|
|
1683
|
+
dbLogger3.info("Database health check stopped");
|
|
1684
|
+
}
|
|
1685
|
+
}
|
|
1686
|
+
var dbLogger3;
|
|
1687
|
+
var init_health_check = __esm({
|
|
1688
|
+
"src/db/manager/health-check.ts"() {
|
|
1689
|
+
init_logger2();
|
|
1690
|
+
init_factory();
|
|
1691
|
+
init_global_state();
|
|
1692
|
+
dbLogger3 = logger.child("database");
|
|
1693
|
+
}
|
|
1694
|
+
});
|
|
1695
|
+
|
|
1696
|
+
// src/db/manager/manager.ts
|
|
1201
1697
|
function getDatabase(type) {
|
|
1698
|
+
const writeInst = getWriteInstance();
|
|
1699
|
+
const readInst = getReadInstance();
|
|
1700
|
+
dbLogger4.debug(`getDatabase() called with type=${type}, writeInstance=${!!writeInst}, readInstance=${!!readInst}`);
|
|
1202
1701
|
if (type === "read") {
|
|
1203
|
-
return
|
|
1702
|
+
return readInst ?? writeInst;
|
|
1204
1703
|
}
|
|
1205
|
-
return
|
|
1704
|
+
return writeInst;
|
|
1206
1705
|
}
|
|
1207
1706
|
function setDatabase(write, read) {
|
|
1208
|
-
|
|
1209
|
-
|
|
1210
|
-
}
|
|
1211
|
-
function getHealthCheckConfig(options) {
|
|
1212
|
-
const parseBoolean = (value, defaultValue) => {
|
|
1213
|
-
if (value === void 0) return defaultValue;
|
|
1214
|
-
return value.toLowerCase() === "true";
|
|
1215
|
-
};
|
|
1216
|
-
return {
|
|
1217
|
-
enabled: options?.enabled ?? parseBoolean(process.env.DB_HEALTH_CHECK_ENABLED, true),
|
|
1218
|
-
interval: options?.interval ?? (parseInt(process.env.DB_HEALTH_CHECK_INTERVAL || "", 10) || 6e4),
|
|
1219
|
-
reconnect: options?.reconnect ?? parseBoolean(process.env.DB_HEALTH_CHECK_RECONNECT, true),
|
|
1220
|
-
maxRetries: options?.maxRetries ?? (parseInt(process.env.DB_HEALTH_CHECK_MAX_RETRIES || "", 10) || 3),
|
|
1221
|
-
retryInterval: options?.retryInterval ?? (parseInt(process.env.DB_HEALTH_CHECK_RETRY_INTERVAL || "", 10) || 5e3)
|
|
1222
|
-
};
|
|
1223
|
-
}
|
|
1224
|
-
function getMonitoringConfig(options) {
|
|
1225
|
-
const isDevelopment = process.env.NODE_ENV !== "production";
|
|
1226
|
-
const parseBoolean = (value, defaultValue) => {
|
|
1227
|
-
if (value === void 0) return defaultValue;
|
|
1228
|
-
return value.toLowerCase() === "true";
|
|
1229
|
-
};
|
|
1230
|
-
return {
|
|
1231
|
-
enabled: options?.enabled ?? parseBoolean(process.env.DB_MONITORING_ENABLED, isDevelopment),
|
|
1232
|
-
slowThreshold: options?.slowThreshold ?? (parseInt(process.env.DB_MONITORING_SLOW_THRESHOLD || "", 10) || 1e3),
|
|
1233
|
-
logQueries: options?.logQueries ?? parseBoolean(process.env.DB_MONITORING_LOG_QUERIES, false)
|
|
1234
|
-
};
|
|
1707
|
+
setWriteInstance(write);
|
|
1708
|
+
setReadInstance(read ?? write);
|
|
1235
1709
|
}
|
|
1236
1710
|
async function initDatabase(options) {
|
|
1237
|
-
|
|
1238
|
-
|
|
1239
|
-
|
|
1711
|
+
const writeInst = getWriteInstance();
|
|
1712
|
+
if (writeInst) {
|
|
1713
|
+
dbLogger4.debug("Database already initialized");
|
|
1714
|
+
return { write: writeInst, read: getReadInstance() };
|
|
1240
1715
|
}
|
|
1241
1716
|
const result = await createDatabaseFromEnv(options);
|
|
1242
1717
|
if (result.write) {
|
|
@@ -1245,195 +1720,372 @@ async function initDatabase(options) {
|
|
|
1245
1720
|
if (result.read && result.read !== result.write) {
|
|
1246
1721
|
await result.read.execute("SELECT 1");
|
|
1247
1722
|
}
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1723
|
+
setWriteInstance(result.write);
|
|
1724
|
+
setReadInstance(result.read);
|
|
1725
|
+
setWriteClient(result.writeClient);
|
|
1726
|
+
setReadClient(result.readClient);
|
|
1252
1727
|
const hasReplica = result.read && result.read !== result.write;
|
|
1253
|
-
|
|
1728
|
+
dbLogger4.info(
|
|
1254
1729
|
hasReplica ? "Database connected (Primary + Replica)" : "Database connected"
|
|
1255
1730
|
);
|
|
1256
|
-
const healthCheckConfig =
|
|
1731
|
+
const healthCheckConfig = buildHealthCheckConfig(options?.healthCheck);
|
|
1257
1732
|
if (healthCheckConfig.enabled) {
|
|
1258
|
-
startHealthCheck(healthCheckConfig);
|
|
1259
|
-
}
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
|
|
1264
|
-
|
|
1733
|
+
startHealthCheck(healthCheckConfig, options, getDatabase, closeDatabase);
|
|
1734
|
+
}
|
|
1735
|
+
const monConfig = buildMonitoringConfig(options?.monitoring);
|
|
1736
|
+
setMonitoringConfig(monConfig);
|
|
1737
|
+
if (monConfig.enabled) {
|
|
1738
|
+
dbLogger4.info("Database query monitoring enabled", {
|
|
1739
|
+
slowThreshold: `${monConfig.slowThreshold}ms`,
|
|
1740
|
+
logQueries: monConfig.logQueries
|
|
1265
1741
|
});
|
|
1266
1742
|
}
|
|
1267
1743
|
} catch (error) {
|
|
1268
1744
|
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1269
|
-
|
|
1745
|
+
dbLogger4.error("Database connection failed", { error: message });
|
|
1270
1746
|
await closeDatabase();
|
|
1271
|
-
|
|
1747
|
+
throw new Error(`Database connection test failed: ${message}`, { cause: error });
|
|
1272
1748
|
}
|
|
1273
1749
|
} else {
|
|
1274
|
-
|
|
1275
|
-
|
|
1750
|
+
dbLogger4.warn("No database configuration found");
|
|
1751
|
+
dbLogger4.warn("Set DATABASE_URL environment variable to enable database");
|
|
1276
1752
|
}
|
|
1277
|
-
return { write:
|
|
1753
|
+
return { write: getWriteInstance(), read: getReadInstance() };
|
|
1278
1754
|
}
|
|
1279
1755
|
async function closeDatabase() {
|
|
1280
|
-
|
|
1281
|
-
|
|
1756
|
+
const writeInst = getWriteInstance();
|
|
1757
|
+
const readInst = getReadInstance();
|
|
1758
|
+
if (!writeInst && !readInst) {
|
|
1759
|
+
dbLogger4.debug("No database connections to close");
|
|
1282
1760
|
return;
|
|
1283
1761
|
}
|
|
1284
1762
|
stopHealthCheck();
|
|
1285
1763
|
try {
|
|
1286
1764
|
const closePromises = [];
|
|
1287
|
-
|
|
1288
|
-
|
|
1765
|
+
const writeC = getWriteClient();
|
|
1766
|
+
if (writeC) {
|
|
1767
|
+
dbLogger4.debug("Closing write connection...");
|
|
1289
1768
|
closePromises.push(
|
|
1290
|
-
|
|
1769
|
+
writeC.end({ timeout: 5 }).then(() => dbLogger4.debug("Write connection closed")).catch((err) => dbLogger4.error("Error closing write connection", err))
|
|
1291
1770
|
);
|
|
1292
1771
|
}
|
|
1293
|
-
|
|
1294
|
-
|
|
1772
|
+
const readC = getReadClient();
|
|
1773
|
+
if (readC && readC !== writeC) {
|
|
1774
|
+
dbLogger4.debug("Closing read connection...");
|
|
1295
1775
|
closePromises.push(
|
|
1296
|
-
|
|
1776
|
+
readC.end({ timeout: 5 }).then(() => dbLogger4.debug("Read connection closed")).catch((err) => dbLogger4.error("Error closing read connection", err))
|
|
1297
1777
|
);
|
|
1298
1778
|
}
|
|
1299
1779
|
await Promise.all(closePromises);
|
|
1300
|
-
|
|
1780
|
+
dbLogger4.info("All database connections closed");
|
|
1301
1781
|
} catch (error) {
|
|
1302
|
-
|
|
1782
|
+
dbLogger4.error("Error during database cleanup", error);
|
|
1303
1783
|
throw error;
|
|
1304
1784
|
} finally {
|
|
1305
|
-
|
|
1306
|
-
|
|
1307
|
-
|
|
1308
|
-
|
|
1309
|
-
|
|
1785
|
+
setWriteInstance(void 0);
|
|
1786
|
+
setReadInstance(void 0);
|
|
1787
|
+
setWriteClient(void 0);
|
|
1788
|
+
setReadClient(void 0);
|
|
1789
|
+
setMonitoringConfig(void 0);
|
|
1310
1790
|
}
|
|
1311
1791
|
}
|
|
1312
1792
|
function getDatabaseInfo() {
|
|
1793
|
+
const writeInst = getWriteInstance();
|
|
1794
|
+
const readInst = getReadInstance();
|
|
1313
1795
|
return {
|
|
1314
|
-
hasWrite: !!
|
|
1315
|
-
hasRead: !!
|
|
1316
|
-
isReplica: !!(
|
|
1796
|
+
hasWrite: !!writeInst,
|
|
1797
|
+
hasRead: !!readInst,
|
|
1798
|
+
isReplica: !!(readInst && readInst !== writeInst)
|
|
1317
1799
|
};
|
|
1318
1800
|
}
|
|
1319
|
-
|
|
1320
|
-
|
|
1321
|
-
|
|
1322
|
-
|
|
1801
|
+
var dbLogger4;
|
|
1802
|
+
var init_manager = __esm({
|
|
1803
|
+
"src/db/manager/manager.ts"() {
|
|
1804
|
+
init_logger2();
|
|
1805
|
+
init_factory();
|
|
1806
|
+
init_config3();
|
|
1807
|
+
init_global_state();
|
|
1808
|
+
init_health_check();
|
|
1809
|
+
dbLogger4 = logger.child("database");
|
|
1323
1810
|
}
|
|
1324
|
-
|
|
1325
|
-
|
|
1326
|
-
|
|
1327
|
-
|
|
1328
|
-
|
|
1329
|
-
|
|
1330
|
-
|
|
1331
|
-
|
|
1332
|
-
|
|
1333
|
-
|
|
1334
|
-
|
|
1335
|
-
|
|
1336
|
-
|
|
1811
|
+
});
|
|
1812
|
+
|
|
1813
|
+
// src/db/manager/index.ts
|
|
1814
|
+
var init_manager2 = __esm({
|
|
1815
|
+
"src/db/manager/index.ts"() {
|
|
1816
|
+
init_factory();
|
|
1817
|
+
init_manager();
|
|
1818
|
+
init_connection();
|
|
1819
|
+
}
|
|
1820
|
+
});
|
|
1821
|
+
function expandGlobPattern(pattern) {
|
|
1822
|
+
if (!pattern.includes("*")) {
|
|
1823
|
+
return existsSync(pattern) ? [pattern] : [];
|
|
1824
|
+
}
|
|
1825
|
+
const files = [];
|
|
1826
|
+
if (pattern.includes("**")) {
|
|
1827
|
+
const [baseDir, ...rest] = pattern.split("**");
|
|
1828
|
+
const extension = rest.join("").replace(/[\/\\]\*\./g, "").trim();
|
|
1829
|
+
const scanRecursive = (dir) => {
|
|
1830
|
+
if (!existsSync(dir)) return;
|
|
1831
|
+
try {
|
|
1832
|
+
const entries = readdirSync(dir);
|
|
1833
|
+
for (const entry of entries) {
|
|
1834
|
+
const fullPath = join(dir, entry);
|
|
1835
|
+
try {
|
|
1836
|
+
const stat2 = statSync(fullPath);
|
|
1837
|
+
if (stat2.isDirectory()) {
|
|
1838
|
+
scanRecursive(fullPath);
|
|
1839
|
+
} else if (stat2.isFile()) {
|
|
1840
|
+
if (!extension || fullPath.endsWith(extension)) {
|
|
1841
|
+
files.push(fullPath);
|
|
1842
|
+
}
|
|
1843
|
+
}
|
|
1844
|
+
} catch {
|
|
1845
|
+
}
|
|
1846
|
+
}
|
|
1847
|
+
} catch {
|
|
1337
1848
|
}
|
|
1338
|
-
|
|
1339
|
-
|
|
1340
|
-
|
|
1341
|
-
|
|
1342
|
-
|
|
1343
|
-
|
|
1849
|
+
};
|
|
1850
|
+
scanRecursive(baseDir.trim() || ".");
|
|
1851
|
+
} else if (pattern.includes("*")) {
|
|
1852
|
+
const dir = dirname(pattern);
|
|
1853
|
+
const filePattern = basename(pattern);
|
|
1854
|
+
if (!existsSync(dir)) return [];
|
|
1855
|
+
try {
|
|
1856
|
+
const entries = readdirSync(dir);
|
|
1857
|
+
for (const entry of entries) {
|
|
1858
|
+
const fullPath = join(dir, entry);
|
|
1859
|
+
try {
|
|
1860
|
+
const stat2 = statSync(fullPath);
|
|
1861
|
+
if (stat2.isFile()) {
|
|
1862
|
+
if (filePattern === "*" || filePattern.startsWith("*.") && entry.endsWith(filePattern.slice(1))) {
|
|
1863
|
+
files.push(fullPath);
|
|
1864
|
+
}
|
|
1865
|
+
}
|
|
1866
|
+
} catch {
|
|
1867
|
+
}
|
|
1344
1868
|
}
|
|
1869
|
+
} catch {
|
|
1345
1870
|
}
|
|
1346
|
-
}
|
|
1871
|
+
}
|
|
1872
|
+
return files;
|
|
1347
1873
|
}
|
|
1348
|
-
|
|
1349
|
-
|
|
1350
|
-
|
|
1351
|
-
|
|
1352
|
-
|
|
1353
|
-
|
|
1874
|
+
function discoverPackageSchemas(cwd) {
|
|
1875
|
+
const schemas = [];
|
|
1876
|
+
const nodeModulesPath = join(cwd, "node_modules");
|
|
1877
|
+
if (!existsSync(nodeModulesPath)) {
|
|
1878
|
+
return schemas;
|
|
1879
|
+
}
|
|
1880
|
+
const projectPkgPath = join(cwd, "package.json");
|
|
1881
|
+
let directDeps = /* @__PURE__ */ new Set();
|
|
1882
|
+
if (existsSync(projectPkgPath)) {
|
|
1354
1883
|
try {
|
|
1355
|
-
|
|
1356
|
-
|
|
1357
|
-
|
|
1358
|
-
|
|
1359
|
-
|
|
1360
|
-
|
|
1361
|
-
|
|
1362
|
-
|
|
1363
|
-
|
|
1364
|
-
|
|
1365
|
-
|
|
1366
|
-
|
|
1884
|
+
const projectPkg = JSON.parse(readFileSync(projectPkgPath, "utf-8"));
|
|
1885
|
+
directDeps = /* @__PURE__ */ new Set([
|
|
1886
|
+
...Object.keys(projectPkg.dependencies || {}),
|
|
1887
|
+
...Object.keys(projectPkg.devDependencies || {})
|
|
1888
|
+
]);
|
|
1889
|
+
} catch (error) {
|
|
1890
|
+
}
|
|
1891
|
+
}
|
|
1892
|
+
const checkPackage = (_pkgName, pkgPath) => {
|
|
1893
|
+
const pkgJsonPath = join(pkgPath, "package.json");
|
|
1894
|
+
if (!existsSync(pkgJsonPath)) return;
|
|
1895
|
+
try {
|
|
1896
|
+
const pkgJson = JSON.parse(readFileSync(pkgJsonPath, "utf-8"));
|
|
1897
|
+
if (pkgJson.spfn?.schemas) {
|
|
1898
|
+
const packageSchemas = Array.isArray(pkgJson.spfn.schemas) ? pkgJson.spfn.schemas : [pkgJson.spfn.schemas];
|
|
1899
|
+
for (const schema of packageSchemas) {
|
|
1900
|
+
const absolutePath = join(pkgPath, schema);
|
|
1901
|
+
const expandedFiles = expandGlobPattern(absolutePath);
|
|
1902
|
+
const schemaFiles = expandedFiles.filter(
|
|
1903
|
+
(file) => !file.endsWith("/index.js") && !file.endsWith("/index.ts") && !file.endsWith("/index.mjs") && !file.endsWith("\\index.js") && !file.endsWith("\\index.ts") && !file.endsWith("\\index.mjs")
|
|
1904
|
+
);
|
|
1905
|
+
schemas.push(...schemaFiles);
|
|
1906
|
+
}
|
|
1367
1907
|
}
|
|
1368
1908
|
} catch (error) {
|
|
1369
|
-
|
|
1370
|
-
|
|
1371
|
-
|
|
1372
|
-
|
|
1373
|
-
|
|
1374
|
-
|
|
1375
|
-
|
|
1376
|
-
|
|
1909
|
+
}
|
|
1910
|
+
};
|
|
1911
|
+
const spfnDir = join(nodeModulesPath, "@spfn");
|
|
1912
|
+
if (existsSync(spfnDir)) {
|
|
1913
|
+
try {
|
|
1914
|
+
const spfnPackages = readdirSync(spfnDir);
|
|
1915
|
+
for (const pkg of spfnPackages) {
|
|
1916
|
+
checkPackage(`@spfn/${pkg}`, join(spfnDir, pkg));
|
|
1377
1917
|
}
|
|
1918
|
+
} catch (error) {
|
|
1378
1919
|
}
|
|
1379
1920
|
}
|
|
1921
|
+
for (const depName of directDeps) {
|
|
1922
|
+
if (depName.startsWith("@spfn/")) continue;
|
|
1923
|
+
const pkgPath = depName.startsWith("@") ? join(nodeModulesPath, ...depName.split("/")) : join(nodeModulesPath, depName);
|
|
1924
|
+
checkPackage(depName, pkgPath);
|
|
1925
|
+
}
|
|
1926
|
+
return schemas;
|
|
1380
1927
|
}
|
|
1381
|
-
function
|
|
1382
|
-
if (
|
|
1383
|
-
|
|
1384
|
-
|
|
1385
|
-
|
|
1928
|
+
function detectDialect(url) {
|
|
1929
|
+
if (url.startsWith("postgres://") || url.startsWith("postgresql://")) {
|
|
1930
|
+
return "postgresql";
|
|
1931
|
+
}
|
|
1932
|
+
if (url.startsWith("mysql://")) {
|
|
1933
|
+
return "mysql";
|
|
1934
|
+
}
|
|
1935
|
+
if (url.startsWith("sqlite://") || url.includes(".db") || url.includes(".sqlite")) {
|
|
1936
|
+
return "sqlite";
|
|
1937
|
+
}
|
|
1938
|
+
throw new Error(
|
|
1939
|
+
`Unsupported database URL format: ${url}. Supported: postgresql://, mysql://, sqlite://`
|
|
1940
|
+
);
|
|
1941
|
+
}
|
|
1942
|
+
function getDrizzleConfig(options = {}) {
|
|
1943
|
+
const databaseUrl = options.databaseUrl ?? process.env.DATABASE_URL;
|
|
1944
|
+
if (!databaseUrl) {
|
|
1945
|
+
throw new Error(
|
|
1946
|
+
"DATABASE_URL is required. Set it in .env or pass it to getDrizzleConfig()"
|
|
1947
|
+
);
|
|
1948
|
+
}
|
|
1949
|
+
const dialect = options.dialect ?? detectDialect(databaseUrl);
|
|
1950
|
+
const out = options.out ?? "./src/server/drizzle";
|
|
1951
|
+
if (options.packageFilter) {
|
|
1952
|
+
const packageSchemas2 = options.disablePackageDiscovery ? [] : discoverPackageSchemas(options.cwd ?? process.cwd());
|
|
1953
|
+
const filteredSchemas = packageSchemas2.filter(
|
|
1954
|
+
(schemaPath) => schemaPath.includes(`node_modules/${options.packageFilter}/`)
|
|
1955
|
+
);
|
|
1956
|
+
if (filteredSchemas.length === 0) {
|
|
1957
|
+
throw new Error(
|
|
1958
|
+
`No schemas found for package ${options.packageFilter}. Make sure the package is installed and has "spfn.schemas" in package.json.`
|
|
1959
|
+
);
|
|
1960
|
+
}
|
|
1961
|
+
const schema2 = filteredSchemas.length === 1 ? filteredSchemas[0] : filteredSchemas;
|
|
1962
|
+
return {
|
|
1963
|
+
schema: schema2,
|
|
1964
|
+
out,
|
|
1965
|
+
dialect,
|
|
1966
|
+
dbCredentials: getDbCredentials(dialect, databaseUrl)
|
|
1967
|
+
};
|
|
1968
|
+
}
|
|
1969
|
+
const userSchema = options.schema ?? "./src/server/entities/**/*.ts";
|
|
1970
|
+
const userSchemas = Array.isArray(userSchema) ? userSchema : [userSchema];
|
|
1971
|
+
const packageSchemas = options.disablePackageDiscovery ? [] : discoverPackageSchemas(options.cwd ?? process.cwd());
|
|
1972
|
+
const allSchemas = [...userSchemas, ...packageSchemas];
|
|
1973
|
+
const schema = allSchemas.length === 1 ? allSchemas[0] : allSchemas;
|
|
1974
|
+
return {
|
|
1975
|
+
schema,
|
|
1976
|
+
out,
|
|
1977
|
+
dialect,
|
|
1978
|
+
dbCredentials: getDbCredentials(dialect, databaseUrl)
|
|
1979
|
+
};
|
|
1980
|
+
}
|
|
1981
|
+
function getDbCredentials(dialect, url) {
|
|
1982
|
+
switch (dialect) {
|
|
1983
|
+
case "postgresql":
|
|
1984
|
+
case "mysql":
|
|
1985
|
+
return { url };
|
|
1986
|
+
case "sqlite":
|
|
1987
|
+
const dbPath = url.replace("sqlite://", "").replace("sqlite:", "");
|
|
1988
|
+
return { url: dbPath };
|
|
1989
|
+
default:
|
|
1990
|
+
throw new Error(`Unsupported dialect: ${dialect}`);
|
|
1386
1991
|
}
|
|
1387
1992
|
}
|
|
1388
|
-
function
|
|
1389
|
-
|
|
1993
|
+
function generateDrizzleConfigFile(options = {}) {
|
|
1994
|
+
const config = getDrizzleConfig(options);
|
|
1995
|
+
const schemaValue = Array.isArray(config.schema) ? `[
|
|
1996
|
+
${config.schema.map((s) => `'${s}'`).join(",\n ")}
|
|
1997
|
+
]` : `'${config.schema}'`;
|
|
1998
|
+
return `import { defineConfig } from 'drizzle-kit';
|
|
1999
|
+
|
|
2000
|
+
export default defineConfig({
|
|
2001
|
+
schema: ${schemaValue},
|
|
2002
|
+
out: '${config.out}',
|
|
2003
|
+
dialect: '${config.dialect}',
|
|
2004
|
+
dbCredentials: ${JSON.stringify(config.dbCredentials, null, 4)},
|
|
2005
|
+
});
|
|
2006
|
+
`;
|
|
1390
2007
|
}
|
|
1391
|
-
var
|
|
1392
|
-
|
|
1393
|
-
|
|
1394
|
-
|
|
1395
|
-
|
|
1396
|
-
|
|
2008
|
+
var init_config_generator = __esm({
|
|
2009
|
+
"src/db/manager/config-generator.ts"() {
|
|
2010
|
+
}
|
|
2011
|
+
});
|
|
2012
|
+
function id() {
|
|
2013
|
+
return bigserial("id", { mode: "number" }).primaryKey();
|
|
2014
|
+
}
|
|
2015
|
+
function timestamps(options) {
|
|
2016
|
+
const updatedAtColumn = timestamp("updated_at", { withTimezone: true, mode: "date" }).defaultNow().notNull();
|
|
2017
|
+
if (options?.autoUpdate) {
|
|
2018
|
+
updatedAtColumn.__autoUpdate = true;
|
|
2019
|
+
}
|
|
2020
|
+
return {
|
|
2021
|
+
createdAt: timestamp("created_at", { withTimezone: true, mode: "date" }).defaultNow().notNull(),
|
|
2022
|
+
updatedAt: updatedAtColumn
|
|
2023
|
+
};
|
|
2024
|
+
}
|
|
2025
|
+
function foreignKey(name, reference, options) {
|
|
2026
|
+
return bigserial(`${name}_id`, { mode: "number" }).notNull().references(reference, { onDelete: options?.onDelete ?? "cascade" });
|
|
2027
|
+
}
|
|
2028
|
+
function optionalForeignKey(name, reference, options) {
|
|
2029
|
+
return bigserial(`${name}_id`, { mode: "number" }).references(reference, { onDelete: options?.onDelete ?? "set null" });
|
|
2030
|
+
}
|
|
2031
|
+
var init_helpers = __esm({
|
|
2032
|
+
"src/db/schema/helpers.ts"() {
|
|
1397
2033
|
}
|
|
1398
2034
|
});
|
|
1399
2035
|
|
|
1400
|
-
// src/db/
|
|
1401
|
-
|
|
1402
|
-
|
|
1403
|
-
|
|
1404
|
-
throw new Error(
|
|
1405
|
-
"Database not initialized. Set DATABASE_URL environment variable or call initDatabase() first."
|
|
1406
|
-
);
|
|
2036
|
+
// src/db/schema/index.ts
|
|
2037
|
+
var init_schema = __esm({
|
|
2038
|
+
"src/db/schema/index.ts"() {
|
|
2039
|
+
init_helpers();
|
|
1407
2040
|
}
|
|
1408
|
-
|
|
2041
|
+
});
|
|
2042
|
+
function createFunctionSchema(packageName) {
|
|
2043
|
+
const schemaName = packageNameToSchema(packageName);
|
|
2044
|
+
return pgSchema(schemaName);
|
|
1409
2045
|
}
|
|
1410
|
-
|
|
1411
|
-
|
|
1412
|
-
|
|
1413
|
-
|
|
1414
|
-
|
|
1415
|
-
|
|
1416
|
-
|
|
1417
|
-
|
|
1418
|
-
|
|
1419
|
-
|
|
1420
|
-
|
|
1421
|
-
|
|
1422
|
-
|
|
1423
|
-
|
|
1424
|
-
|
|
2046
|
+
function packageNameToSchema(packageName) {
|
|
2047
|
+
return packageName.replace("@", "").replace("/", "_").replace(/-/g, "_");
|
|
2048
|
+
}
|
|
2049
|
+
function getSchemaInfo(packageName) {
|
|
2050
|
+
const isScoped = packageName.startsWith("@");
|
|
2051
|
+
const scope = isScoped ? packageName.split("/")[0].substring(1) : null;
|
|
2052
|
+
const schemaName = packageNameToSchema(packageName);
|
|
2053
|
+
return {
|
|
2054
|
+
schemaName,
|
|
2055
|
+
isScoped,
|
|
2056
|
+
scope
|
|
2057
|
+
};
|
|
2058
|
+
}
|
|
2059
|
+
var init_schema_helper = __esm({
|
|
2060
|
+
"src/db/schema-helper.ts"() {
|
|
1425
2061
|
}
|
|
1426
2062
|
});
|
|
2063
|
+
function getTransactionContext() {
|
|
2064
|
+
return asyncContext.getStore() ?? null;
|
|
2065
|
+
}
|
|
1427
2066
|
function getTransaction() {
|
|
1428
|
-
const context =
|
|
2067
|
+
const context = getTransactionContext();
|
|
1429
2068
|
return context?.tx ?? null;
|
|
1430
2069
|
}
|
|
1431
|
-
function runWithTransaction(tx, callback) {
|
|
1432
|
-
|
|
2070
|
+
function runWithTransaction(tx, txId, callback) {
|
|
2071
|
+
const existingContext = getTransactionContext();
|
|
2072
|
+
const newLevel = existingContext ? existingContext.level + 1 : 1;
|
|
2073
|
+
if (existingContext) {
|
|
2074
|
+
txLogger.info("Nested transaction started (SAVEPOINT)", {
|
|
2075
|
+
outerTxId: existingContext.txId,
|
|
2076
|
+
innerTxId: txId,
|
|
2077
|
+
level: newLevel
|
|
2078
|
+
});
|
|
2079
|
+
} else {
|
|
2080
|
+
txLogger.debug("Root transaction context set", { txId, level: newLevel });
|
|
2081
|
+
}
|
|
2082
|
+
return asyncContext.run({ tx, txId, level: newLevel }, callback);
|
|
1433
2083
|
}
|
|
1434
|
-
var asyncContext;
|
|
2084
|
+
var txLogger, asyncContext;
|
|
1435
2085
|
var init_context = __esm({
|
|
1436
2086
|
"src/db/transaction/context.ts"() {
|
|
2087
|
+
init_logger2();
|
|
2088
|
+
txLogger = logger.child("transaction");
|
|
1437
2089
|
asyncContext = new AsyncLocalStorage();
|
|
1438
2090
|
}
|
|
1439
2091
|
});
|
|
@@ -1444,17 +2096,25 @@ function Transactional(options = {}) {
|
|
|
1444
2096
|
enableLogging = true,
|
|
1445
2097
|
timeout = defaultTimeout
|
|
1446
2098
|
} = options;
|
|
1447
|
-
const
|
|
2099
|
+
const txLogger2 = logger.child("transaction");
|
|
1448
2100
|
return createMiddleware(async (c, next) => {
|
|
1449
|
-
const txId = `tx_${
|
|
2101
|
+
const txId = `tx_${randomUUID()}`;
|
|
1450
2102
|
const startTime = Date.now();
|
|
1451
2103
|
const route = `${c.req.method} ${c.req.path}`;
|
|
1452
2104
|
if (enableLogging) {
|
|
1453
|
-
|
|
2105
|
+
txLogger2.debug("Transaction started", { txId, route });
|
|
1454
2106
|
}
|
|
1455
2107
|
try {
|
|
1456
|
-
const
|
|
1457
|
-
|
|
2108
|
+
const writeDb = getDatabase("write");
|
|
2109
|
+
if (!writeDb) {
|
|
2110
|
+
throw new TransactionError(
|
|
2111
|
+
"Database not initialized. Cannot start transaction.",
|
|
2112
|
+
500,
|
|
2113
|
+
{ txId, route }
|
|
2114
|
+
);
|
|
2115
|
+
}
|
|
2116
|
+
const transactionPromise = writeDb.transaction(async (tx) => {
|
|
2117
|
+
await runWithTransaction(tx, txId, async () => {
|
|
1458
2118
|
await next();
|
|
1459
2119
|
const contextWithError = c;
|
|
1460
2120
|
if (contextWithError.error) {
|
|
@@ -1485,14 +2145,14 @@ function Transactional(options = {}) {
|
|
|
1485
2145
|
const duration = Date.now() - startTime;
|
|
1486
2146
|
if (enableLogging) {
|
|
1487
2147
|
if (duration >= slowThreshold) {
|
|
1488
|
-
|
|
2148
|
+
txLogger2.warn("Slow transaction committed", {
|
|
1489
2149
|
txId,
|
|
1490
2150
|
route,
|
|
1491
2151
|
duration: `${duration}ms`,
|
|
1492
2152
|
threshold: `${slowThreshold}ms`
|
|
1493
2153
|
});
|
|
1494
2154
|
} else {
|
|
1495
|
-
|
|
2155
|
+
txLogger2.debug("Transaction committed", {
|
|
1496
2156
|
txId,
|
|
1497
2157
|
route,
|
|
1498
2158
|
duration: `${duration}ms`
|
|
@@ -1503,7 +2163,7 @@ function Transactional(options = {}) {
|
|
|
1503
2163
|
const duration = Date.now() - startTime;
|
|
1504
2164
|
const customError = error instanceof TransactionError ? error : fromPostgresError(error);
|
|
1505
2165
|
if (enableLogging) {
|
|
1506
|
-
|
|
2166
|
+
txLogger2.error("Transaction rolled back", {
|
|
1507
2167
|
txId,
|
|
1508
2168
|
route,
|
|
1509
2169
|
duration: `${duration}ms`,
|
|
@@ -1517,9 +2177,9 @@ function Transactional(options = {}) {
|
|
|
1517
2177
|
}
|
|
1518
2178
|
var init_middleware = __esm({
|
|
1519
2179
|
"src/db/transaction/middleware.ts"() {
|
|
1520
|
-
init_db();
|
|
1521
|
-
init_context();
|
|
1522
2180
|
init_logger2();
|
|
2181
|
+
init_manager2();
|
|
2182
|
+
init_context();
|
|
1523
2183
|
init_errors();
|
|
1524
2184
|
init_postgres_errors();
|
|
1525
2185
|
}
|
|
@@ -1532,1048 +2192,459 @@ var init_transaction = __esm({
|
|
|
1532
2192
|
init_middleware();
|
|
1533
2193
|
}
|
|
1534
2194
|
});
|
|
1535
|
-
function
|
|
1536
|
-
|
|
1537
|
-
|
|
1538
|
-
|
|
1539
|
-
|
|
1540
|
-
|
|
1541
|
-
|
|
2195
|
+
function isSQLWrapper(value) {
|
|
2196
|
+
return value && typeof value === "object" && "queryChunks" in value;
|
|
2197
|
+
}
|
|
2198
|
+
function buildWhereFromObject(table, where) {
|
|
2199
|
+
const entries = Object.entries(where).filter(([_, value]) => value !== void 0);
|
|
2200
|
+
if (entries.length === 0) return void 0;
|
|
2201
|
+
const conditions = entries.map(
|
|
2202
|
+
([key, value]) => eq(table[key], value)
|
|
2203
|
+
);
|
|
2204
|
+
return conditions.length === 1 ? conditions[0] : and(...conditions);
|
|
2205
|
+
}
|
|
2206
|
+
async function findOne(table, where) {
|
|
2207
|
+
const db = getDatabase("read");
|
|
2208
|
+
if (!db) {
|
|
2209
|
+
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2210
|
+
}
|
|
2211
|
+
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2212
|
+
if (!whereClause) {
|
|
2213
|
+
throw new Error("findOne requires at least one where condition");
|
|
2214
|
+
}
|
|
2215
|
+
const results = await db.select().from(table).where(whereClause).limit(1);
|
|
2216
|
+
return results[0] ?? null;
|
|
2217
|
+
}
|
|
2218
|
+
async function findMany(table, options) {
|
|
2219
|
+
const db = getDatabase("read");
|
|
2220
|
+
if (!db) {
|
|
2221
|
+
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2222
|
+
}
|
|
2223
|
+
let query = db.select().from(table);
|
|
2224
|
+
if (options?.where) {
|
|
2225
|
+
const whereClause = isSQLWrapper(options.where) ? options.where : options.where ? buildWhereFromObject(table, options.where) : void 0;
|
|
2226
|
+
if (whereClause) {
|
|
2227
|
+
query = query.where(whereClause);
|
|
1542
2228
|
}
|
|
1543
|
-
for (const [operator, value] of Object.entries(filterCondition)) {
|
|
1544
|
-
const condition = buildCondition(column, operator, value);
|
|
1545
|
-
if (condition) {
|
|
1546
|
-
conditions.push(condition);
|
|
1547
|
-
}
|
|
1548
|
-
}
|
|
1549
|
-
}
|
|
1550
|
-
return conditions.length > 0 ? and(...conditions) : void 0;
|
|
1551
|
-
}
|
|
1552
|
-
function buildCondition(column, operator, value) {
|
|
1553
|
-
switch (operator) {
|
|
1554
|
-
case "eq":
|
|
1555
|
-
return eq(column, value);
|
|
1556
|
-
case "ne":
|
|
1557
|
-
return ne(column, value);
|
|
1558
|
-
case "gt":
|
|
1559
|
-
return gt(column, value);
|
|
1560
|
-
case "gte":
|
|
1561
|
-
return gte(column, value);
|
|
1562
|
-
case "lt":
|
|
1563
|
-
return lt(column, value);
|
|
1564
|
-
case "lte":
|
|
1565
|
-
return lte(column, value);
|
|
1566
|
-
case "like":
|
|
1567
|
-
return like(column, `%${value}%`);
|
|
1568
|
-
case "in":
|
|
1569
|
-
if (Array.isArray(value)) {
|
|
1570
|
-
return inArray(column, value);
|
|
1571
|
-
}
|
|
1572
|
-
console.warn(`[buildCondition] 'in' operator requires array value`);
|
|
1573
|
-
return void 0;
|
|
1574
|
-
case "nin":
|
|
1575
|
-
if (Array.isArray(value)) {
|
|
1576
|
-
return notInArray(column, value);
|
|
1577
|
-
}
|
|
1578
|
-
console.warn(`[buildCondition] 'nin' operator requires array value`);
|
|
1579
|
-
return void 0;
|
|
1580
|
-
case "is":
|
|
1581
|
-
if (value === "null") return isNull(column);
|
|
1582
|
-
if (value === "notnull") return isNotNull(column);
|
|
1583
|
-
console.warn(`[buildCondition] 'is' operator requires 'null' or 'notnull'`);
|
|
1584
|
-
return void 0;
|
|
1585
|
-
default:
|
|
1586
|
-
console.warn(`[buildCondition] Unknown operator: ${operator}`);
|
|
1587
|
-
return void 0;
|
|
1588
2229
|
}
|
|
2230
|
+
if (options?.orderBy) {
|
|
2231
|
+
const orderByArray = Array.isArray(options.orderBy) ? options.orderBy : [options.orderBy];
|
|
2232
|
+
query = query.orderBy(...orderByArray);
|
|
2233
|
+
}
|
|
2234
|
+
if (options?.limit) {
|
|
2235
|
+
query = query.limit(options.limit);
|
|
2236
|
+
}
|
|
2237
|
+
if (options?.offset) {
|
|
2238
|
+
query = query.offset(options.offset);
|
|
2239
|
+
}
|
|
2240
|
+
return query;
|
|
1589
2241
|
}
|
|
1590
|
-
function
|
|
1591
|
-
const
|
|
1592
|
-
|
|
1593
|
-
|
|
1594
|
-
if (!column) {
|
|
1595
|
-
console.warn(`[buildSort] Unknown field: ${field}`);
|
|
1596
|
-
continue;
|
|
1597
|
-
}
|
|
1598
|
-
const clause = direction === "desc" ? desc(column) : asc(column);
|
|
1599
|
-
orderByClauses.push(clause);
|
|
2242
|
+
async function create(table, data) {
|
|
2243
|
+
const db = getDatabase("write");
|
|
2244
|
+
if (!db) {
|
|
2245
|
+
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
1600
2246
|
}
|
|
1601
|
-
|
|
2247
|
+
const [result] = await db.insert(table).values(data).returning();
|
|
2248
|
+
return result;
|
|
1602
2249
|
}
|
|
1603
|
-
function
|
|
1604
|
-
const
|
|
1605
|
-
|
|
1606
|
-
|
|
2250
|
+
async function createMany(table, data) {
|
|
2251
|
+
const db = getDatabase("write");
|
|
2252
|
+
if (!db) {
|
|
2253
|
+
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2254
|
+
}
|
|
2255
|
+
const results = await db.insert(table).values(data).returning();
|
|
2256
|
+
return results;
|
|
1607
2257
|
}
|
|
1608
|
-
function
|
|
1609
|
-
const
|
|
1610
|
-
|
|
1611
|
-
|
|
1612
|
-
|
|
1613
|
-
|
|
1614
|
-
|
|
1615
|
-
|
|
1616
|
-
|
|
1617
|
-
|
|
1618
|
-
};
|
|
2258
|
+
async function upsert(table, data, options) {
|
|
2259
|
+
const db = getDatabase("write");
|
|
2260
|
+
if (!db) {
|
|
2261
|
+
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2262
|
+
}
|
|
2263
|
+
const [result] = await db.insert(table).values(data).onConflictDoUpdate({
|
|
2264
|
+
target: options.target,
|
|
2265
|
+
set: options.set || data
|
|
2266
|
+
}).returning();
|
|
2267
|
+
return result;
|
|
1619
2268
|
}
|
|
1620
|
-
async function
|
|
1621
|
-
const
|
|
1622
|
-
if (
|
|
1623
|
-
|
|
2269
|
+
async function updateOne(table, where, data) {
|
|
2270
|
+
const db = getDatabase("write");
|
|
2271
|
+
if (!db) {
|
|
2272
|
+
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2273
|
+
}
|
|
2274
|
+
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2275
|
+
if (!whereClause) {
|
|
2276
|
+
throw new Error("updateOne requires at least one where condition");
|
|
1624
2277
|
}
|
|
1625
|
-
const [result] = await
|
|
1626
|
-
return result
|
|
2278
|
+
const [result] = await db.update(table).set(data).where(whereClause).returning();
|
|
2279
|
+
return result ?? null;
|
|
1627
2280
|
}
|
|
1628
|
-
|
|
1629
|
-
|
|
2281
|
+
async function updateMany(table, where, data) {
|
|
2282
|
+
const db = getDatabase("write");
|
|
2283
|
+
if (!db) {
|
|
2284
|
+
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
1630
2285
|
}
|
|
1631
|
-
|
|
1632
|
-
|
|
1633
|
-
|
|
1634
|
-
var QueryBuilder;
|
|
1635
|
-
var init_query_builder = __esm({
|
|
1636
|
-
"src/db/repository/query-builder.ts"() {
|
|
1637
|
-
init_filters();
|
|
1638
|
-
QueryBuilder = class {
|
|
1639
|
-
db;
|
|
1640
|
-
table;
|
|
1641
|
-
filterConditions = [];
|
|
1642
|
-
sortConditions = [];
|
|
1643
|
-
limitValue;
|
|
1644
|
-
offsetValue;
|
|
1645
|
-
constructor(db2, table) {
|
|
1646
|
-
this.db = db2;
|
|
1647
|
-
this.table = table;
|
|
1648
|
-
}
|
|
1649
|
-
/**
|
|
1650
|
-
* Add WHERE conditions
|
|
1651
|
-
*
|
|
1652
|
-
* Multiple where() calls are combined with AND logic.
|
|
1653
|
-
*
|
|
1654
|
-
* @param filters - Filter conditions
|
|
1655
|
-
* @returns QueryBuilder for chaining
|
|
1656
|
-
*
|
|
1657
|
-
* @example
|
|
1658
|
-
* ```typescript
|
|
1659
|
-
* query
|
|
1660
|
-
* .where({ status: 'active' })
|
|
1661
|
-
* .where({ role: 'admin' }) // AND condition
|
|
1662
|
-
* ```
|
|
1663
|
-
*/
|
|
1664
|
-
where(filters) {
|
|
1665
|
-
this.filterConditions.push(filters);
|
|
1666
|
-
return this;
|
|
1667
|
-
}
|
|
1668
|
-
/**
|
|
1669
|
-
* Add ORDER BY clause
|
|
1670
|
-
*
|
|
1671
|
-
* Multiple orderBy() calls create multi-column sorting.
|
|
1672
|
-
*
|
|
1673
|
-
* @param field - Field name to sort by
|
|
1674
|
-
* @param direction - Sort direction ('asc' or 'desc')
|
|
1675
|
-
* @returns QueryBuilder for chaining
|
|
1676
|
-
*
|
|
1677
|
-
* @example
|
|
1678
|
-
* ```typescript
|
|
1679
|
-
* query
|
|
1680
|
-
* .orderBy('isPremium', 'desc')
|
|
1681
|
-
* .orderBy('createdAt', 'desc')
|
|
1682
|
-
* ```
|
|
1683
|
-
*/
|
|
1684
|
-
orderBy(field, direction = "asc") {
|
|
1685
|
-
this.sortConditions.push({ field, direction });
|
|
1686
|
-
return this;
|
|
1687
|
-
}
|
|
1688
|
-
/**
|
|
1689
|
-
* Set LIMIT clause
|
|
1690
|
-
*
|
|
1691
|
-
* @param limit - Maximum number of records to return
|
|
1692
|
-
* @returns QueryBuilder for chaining
|
|
1693
|
-
*
|
|
1694
|
-
* @example
|
|
1695
|
-
* ```typescript
|
|
1696
|
-
* query.limit(10)
|
|
1697
|
-
* ```
|
|
1698
|
-
*/
|
|
1699
|
-
limit(limit) {
|
|
1700
|
-
this.limitValue = limit;
|
|
1701
|
-
return this;
|
|
1702
|
-
}
|
|
1703
|
-
/**
|
|
1704
|
-
* Set OFFSET clause
|
|
1705
|
-
*
|
|
1706
|
-
* @param offset - Number of records to skip
|
|
1707
|
-
* @returns QueryBuilder for chaining
|
|
1708
|
-
*
|
|
1709
|
-
* @example
|
|
1710
|
-
* ```typescript
|
|
1711
|
-
* query.offset(20)
|
|
1712
|
-
* ```
|
|
1713
|
-
*/
|
|
1714
|
-
offset(offset) {
|
|
1715
|
-
this.offsetValue = offset;
|
|
1716
|
-
return this;
|
|
1717
|
-
}
|
|
1718
|
-
/**
|
|
1719
|
-
* Execute query and return multiple records
|
|
1720
|
-
*
|
|
1721
|
-
* @returns Array of records
|
|
1722
|
-
*
|
|
1723
|
-
* @example
|
|
1724
|
-
* ```typescript
|
|
1725
|
-
* const users = await query
|
|
1726
|
-
* .where({ status: 'active' })
|
|
1727
|
-
* .orderBy('createdAt', 'desc')
|
|
1728
|
-
* .limit(10)
|
|
1729
|
-
* .findMany();
|
|
1730
|
-
* ```
|
|
1731
|
-
*/
|
|
1732
|
-
async findMany() {
|
|
1733
|
-
const mergedFilters = this.mergeFilters();
|
|
1734
|
-
const whereCondition = buildFilters(mergedFilters, this.table);
|
|
1735
|
-
const orderBy = buildSort(this.sortConditions, this.table);
|
|
1736
|
-
let query = this.db.select().from(this.table).where(whereCondition).orderBy(...orderBy);
|
|
1737
|
-
if (this.limitValue !== void 0) {
|
|
1738
|
-
query = query.limit(this.limitValue);
|
|
1739
|
-
}
|
|
1740
|
-
if (this.offsetValue !== void 0) {
|
|
1741
|
-
query = query.offset(this.offsetValue);
|
|
1742
|
-
}
|
|
1743
|
-
return query;
|
|
1744
|
-
}
|
|
1745
|
-
/**
|
|
1746
|
-
* Execute query and return first record
|
|
1747
|
-
*
|
|
1748
|
-
* @returns First matching record or null
|
|
1749
|
-
*
|
|
1750
|
-
* @example
|
|
1751
|
-
* ```typescript
|
|
1752
|
-
* const user = await query
|
|
1753
|
-
* .where({ email: 'john@example.com' })
|
|
1754
|
-
* .findOne();
|
|
1755
|
-
* ```
|
|
1756
|
-
*/
|
|
1757
|
-
async findOne() {
|
|
1758
|
-
const results = await this.limit(1).findMany();
|
|
1759
|
-
return results[0] ?? null;
|
|
1760
|
-
}
|
|
1761
|
-
/**
|
|
1762
|
-
* Execute query and return count
|
|
1763
|
-
*
|
|
1764
|
-
* @returns Number of matching records
|
|
1765
|
-
*
|
|
1766
|
-
* @example
|
|
1767
|
-
* ```typescript
|
|
1768
|
-
* const count = await query
|
|
1769
|
-
* .where({ status: 'active' })
|
|
1770
|
-
* .count();
|
|
1771
|
-
* ```
|
|
1772
|
-
*/
|
|
1773
|
-
async count() {
|
|
1774
|
-
const mergedFilters = this.mergeFilters();
|
|
1775
|
-
const whereCondition = buildFilters(mergedFilters, this.table);
|
|
1776
|
-
const { count } = await import('drizzle-orm');
|
|
1777
|
-
const result = await this.db.select({ count: count() }).from(this.table).where(whereCondition);
|
|
1778
|
-
return Number(result[0]?.count ?? 0);
|
|
1779
|
-
}
|
|
1780
|
-
/**
|
|
1781
|
-
* Merge multiple filter conditions into single object
|
|
1782
|
-
*
|
|
1783
|
-
* Combines all where() calls into one filter object.
|
|
1784
|
-
*/
|
|
1785
|
-
mergeFilters() {
|
|
1786
|
-
if (this.filterConditions.length === 0) {
|
|
1787
|
-
return {};
|
|
1788
|
-
}
|
|
1789
|
-
return this.filterConditions.reduce((merged, current) => {
|
|
1790
|
-
return { ...merged, ...current };
|
|
1791
|
-
}, {});
|
|
1792
|
-
}
|
|
1793
|
-
};
|
|
1794
|
-
}
|
|
1795
|
-
});
|
|
1796
|
-
|
|
1797
|
-
// src/db/repository/repository.ts
|
|
1798
|
-
var Repository;
|
|
1799
|
-
var init_repository = __esm({
|
|
1800
|
-
"src/db/repository/repository.ts"() {
|
|
1801
|
-
init_filters();
|
|
1802
|
-
init_manager2();
|
|
1803
|
-
init_transaction();
|
|
1804
|
-
init_errors();
|
|
1805
|
-
init_query_builder();
|
|
1806
|
-
init_logger2();
|
|
1807
|
-
Repository = class {
|
|
1808
|
-
db;
|
|
1809
|
-
table;
|
|
1810
|
-
useReplica;
|
|
1811
|
-
explicitDb;
|
|
1812
|
-
// Track if db was explicitly provided
|
|
1813
|
-
autoUpdateField;
|
|
1814
|
-
// Field name to auto-update (e.g., 'updatedAt', 'modifiedAt')
|
|
1815
|
-
constructor(dbOrTable, tableOrUseReplica, useReplica = true) {
|
|
1816
|
-
if ("name" in dbOrTable && typeof dbOrTable.name === "string") {
|
|
1817
|
-
this.db = getRawDb("write");
|
|
1818
|
-
this.table = dbOrTable;
|
|
1819
|
-
this.useReplica = typeof tableOrUseReplica === "boolean" ? tableOrUseReplica : true;
|
|
1820
|
-
this.explicitDb = void 0;
|
|
1821
|
-
} else {
|
|
1822
|
-
this.db = dbOrTable;
|
|
1823
|
-
this.table = tableOrUseReplica;
|
|
1824
|
-
this.useReplica = useReplica;
|
|
1825
|
-
this.explicitDb = this.db;
|
|
1826
|
-
}
|
|
1827
|
-
this.autoUpdateField = this.detectAutoUpdateField();
|
|
1828
|
-
}
|
|
1829
|
-
/**
|
|
1830
|
-
* Detect which field (if any) should be auto-updated
|
|
1831
|
-
*
|
|
1832
|
-
* Checks all table columns for __autoUpdate metadata flag.
|
|
1833
|
-
* Set by autoUpdateTimestamp() or timestamps({ autoUpdate: true }) helpers.
|
|
1834
|
-
*
|
|
1835
|
-
* @returns Field name to auto-update, or undefined if none found
|
|
1836
|
-
*/
|
|
1837
|
-
detectAutoUpdateField() {
|
|
1838
|
-
if (!this.table || typeof this.table !== "object") {
|
|
1839
|
-
return void 0;
|
|
1840
|
-
}
|
|
1841
|
-
const tableColumns = this.table;
|
|
1842
|
-
for (const [fieldName, column] of Object.entries(tableColumns)) {
|
|
1843
|
-
if (fieldName.startsWith("_") || fieldName.startsWith("$")) {
|
|
1844
|
-
continue;
|
|
1845
|
-
}
|
|
1846
|
-
if (column && typeof column === "object" && column.__autoUpdate === true) {
|
|
1847
|
-
return fieldName;
|
|
1848
|
-
}
|
|
1849
|
-
}
|
|
1850
|
-
return void 0;
|
|
1851
|
-
}
|
|
1852
|
-
/**
|
|
1853
|
-
* Inject auto-update timestamp if configured
|
|
1854
|
-
*
|
|
1855
|
-
* Only injects if:
|
|
1856
|
-
* 1. Table has an auto-update field configured (via autoUpdateTimestamp() or timestamps({ autoUpdate: true }))
|
|
1857
|
-
* 2. The field is not already explicitly provided in the data
|
|
1858
|
-
*
|
|
1859
|
-
* @param data - Update data object
|
|
1860
|
-
* @returns Data with auto-update timestamp injected (if applicable)
|
|
1861
|
-
*/
|
|
1862
|
-
injectAutoUpdateTimestamp(data) {
|
|
1863
|
-
if (!this.autoUpdateField) {
|
|
1864
|
-
return data;
|
|
1865
|
-
}
|
|
1866
|
-
if (data && this.autoUpdateField in data) {
|
|
1867
|
-
return data;
|
|
1868
|
-
}
|
|
1869
|
-
return {
|
|
1870
|
-
...data,
|
|
1871
|
-
[this.autoUpdateField]: /* @__PURE__ */ new Date()
|
|
1872
|
-
};
|
|
1873
|
-
}
|
|
1874
|
-
/**
|
|
1875
|
-
* Get id column from table
|
|
1876
|
-
*
|
|
1877
|
-
* Helper method to reduce code duplication across methods that need id column.
|
|
1878
|
-
*
|
|
1879
|
-
* @returns The id column object
|
|
1880
|
-
* @throws {QueryError} If table does not have an id column
|
|
1881
|
-
*/
|
|
1882
|
-
getIdColumn() {
|
|
1883
|
-
const idColumn = this.table.id;
|
|
1884
|
-
if (!idColumn) {
|
|
1885
|
-
throw new QueryError("Table does not have an id column");
|
|
1886
|
-
}
|
|
1887
|
-
return idColumn;
|
|
1888
|
-
}
|
|
1889
|
-
/**
|
|
1890
|
-
* Get read-only DB
|
|
1891
|
-
*
|
|
1892
|
-
* Automatically detects and uses transaction context if available.
|
|
1893
|
-
* When in transaction, uses transaction DB to ensure read consistency.
|
|
1894
|
-
* Priority: explicitDb > transaction > replica/primary DB
|
|
1895
|
-
*/
|
|
1896
|
-
getReadDb() {
|
|
1897
|
-
if (this.explicitDb) {
|
|
1898
|
-
return this.explicitDb;
|
|
1899
|
-
}
|
|
1900
|
-
const tx = getTransaction();
|
|
1901
|
-
if (tx) {
|
|
1902
|
-
return tx;
|
|
1903
|
-
}
|
|
1904
|
-
return this.useReplica ? getRawDb("read") : this.db;
|
|
1905
|
-
}
|
|
1906
|
-
/**
|
|
1907
|
-
* Get write-only DB
|
|
1908
|
-
*
|
|
1909
|
-
* Automatically detects and uses transaction context if available.
|
|
1910
|
-
* Priority: explicitDb > transaction > primary DB
|
|
1911
|
-
*/
|
|
1912
|
-
getWriteDb() {
|
|
1913
|
-
if (this.explicitDb) {
|
|
1914
|
-
return this.explicitDb;
|
|
1915
|
-
}
|
|
1916
|
-
const tx = getTransaction();
|
|
1917
|
-
if (tx) {
|
|
1918
|
-
return tx;
|
|
1919
|
-
}
|
|
1920
|
-
return getRawDb("write");
|
|
1921
|
-
}
|
|
1922
|
-
/**
|
|
1923
|
-
* Execute operation with performance monitoring
|
|
1924
|
-
*
|
|
1925
|
-
* Wraps database operations with timing and logging for slow queries.
|
|
1926
|
-
* Only logs if monitoring is enabled and query exceeds threshold.
|
|
1927
|
-
*
|
|
1928
|
-
* @param operation - Name of the operation (for logging)
|
|
1929
|
-
* @param fn - Async function to execute
|
|
1930
|
-
* @returns Result of the operation
|
|
1931
|
-
*/
|
|
1932
|
-
async executeWithMonitoring(operation, fn) {
|
|
1933
|
-
const config2 = getDatabaseMonitoringConfig();
|
|
1934
|
-
if (!config2?.enabled) {
|
|
1935
|
-
return fn();
|
|
1936
|
-
}
|
|
1937
|
-
const startTime = performance.now();
|
|
1938
|
-
try {
|
|
1939
|
-
const result = await fn();
|
|
1940
|
-
const duration = performance.now() - startTime;
|
|
1941
|
-
if (duration >= config2.slowThreshold) {
|
|
1942
|
-
const dbLogger4 = logger.child("database");
|
|
1943
|
-
const logData = {
|
|
1944
|
-
operation,
|
|
1945
|
-
table: this.table._.name,
|
|
1946
|
-
duration: `${duration.toFixed(2)}ms`,
|
|
1947
|
-
threshold: `${config2.slowThreshold}ms`
|
|
1948
|
-
};
|
|
1949
|
-
dbLogger4.warn("Slow query detected", logData);
|
|
1950
|
-
}
|
|
1951
|
-
return result;
|
|
1952
|
-
} catch (error) {
|
|
1953
|
-
const duration = performance.now() - startTime;
|
|
1954
|
-
const dbLogger4 = logger.child("database");
|
|
1955
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1956
|
-
dbLogger4.error("Query failed", {
|
|
1957
|
-
operation,
|
|
1958
|
-
table: this.table._.name,
|
|
1959
|
-
duration: `${duration.toFixed(2)}ms`,
|
|
1960
|
-
error: message
|
|
1961
|
-
});
|
|
1962
|
-
throw error;
|
|
1963
|
-
}
|
|
1964
|
-
}
|
|
1965
|
-
/**
|
|
1966
|
-
* Find all records (uses Replica)
|
|
1967
|
-
*
|
|
1968
|
-
* @example
|
|
1969
|
-
* const users = await userRepo.findAll();
|
|
1970
|
-
*/
|
|
1971
|
-
async findAll() {
|
|
1972
|
-
return this.executeWithMonitoring("findAll", async () => {
|
|
1973
|
-
const readDb = this.getReadDb();
|
|
1974
|
-
return readDb.select().from(this.table);
|
|
1975
|
-
});
|
|
1976
|
-
}
|
|
1977
|
-
/**
|
|
1978
|
-
* Find with pagination (uses Replica)
|
|
1979
|
-
*
|
|
1980
|
-
* @example
|
|
1981
|
-
* const result = await userRepo.findPage({
|
|
1982
|
-
* filters: { email: { like: 'john' } },
|
|
1983
|
-
* sort: [{ field: 'createdAt', direction: 'desc' }],
|
|
1984
|
-
* pagination: { page: 1, limit: 20 }
|
|
1985
|
-
* });
|
|
1986
|
-
*/
|
|
1987
|
-
async findPage(pageable) {
|
|
1988
|
-
return this.executeWithMonitoring("findPage", async () => {
|
|
1989
|
-
const { filters = {}, sort = [], pagination = { page: 1, limit: 20 } } = pageable;
|
|
1990
|
-
const whereCondition = buildFilters(filters, this.table);
|
|
1991
|
-
const orderBy = buildSort(sort, this.table);
|
|
1992
|
-
const { offset, limit } = applyPagination(pagination);
|
|
1993
|
-
const readDb = this.getReadDb();
|
|
1994
|
-
const data = await readDb.select().from(this.table).where(whereCondition).orderBy(...orderBy).limit(limit).offset(offset);
|
|
1995
|
-
const total = await countTotal(readDb, this.table, whereCondition);
|
|
1996
|
-
const meta = createPaginationMeta(pagination, total);
|
|
1997
|
-
return { data, meta };
|
|
1998
|
-
});
|
|
1999
|
-
}
|
|
2000
|
-
/**
|
|
2001
|
-
* Find one record by ID (uses Replica)
|
|
2002
|
-
*
|
|
2003
|
-
* @example
|
|
2004
|
-
* const user = await userRepo.findById(1);
|
|
2005
|
-
*/
|
|
2006
|
-
async findById(id2) {
|
|
2007
|
-
return this.executeWithMonitoring("findById", async () => {
|
|
2008
|
-
const idColumn = this.getIdColumn();
|
|
2009
|
-
const { eq: eq2 } = await import('drizzle-orm');
|
|
2010
|
-
const readDb = this.getReadDb();
|
|
2011
|
-
const [result] = await readDb.select().from(this.table).where(eq2(idColumn, id2));
|
|
2012
|
-
return result ?? null;
|
|
2013
|
-
});
|
|
2014
|
-
}
|
|
2015
|
-
/**
|
|
2016
|
-
* Find one record by condition (uses Replica)
|
|
2017
|
-
*
|
|
2018
|
-
* @example
|
|
2019
|
-
* const user = await userRepo.findOne(eq(users.email, 'john@example.com'));
|
|
2020
|
-
*/
|
|
2021
|
-
async findOne(where) {
|
|
2022
|
-
return this.executeWithMonitoring("findOne", async () => {
|
|
2023
|
-
const readDb = this.getReadDb();
|
|
2024
|
-
const [result] = await readDb.select().from(this.table).where(where);
|
|
2025
|
-
return result ?? null;
|
|
2026
|
-
});
|
|
2027
|
-
}
|
|
2028
|
-
/**
|
|
2029
|
-
* Create a new record (uses Primary)
|
|
2030
|
-
*
|
|
2031
|
-
* @example
|
|
2032
|
-
* const user = await userRepo.save({ email: 'john@example.com', name: 'John' });
|
|
2033
|
-
*/
|
|
2034
|
-
async save(data) {
|
|
2035
|
-
return this.executeWithMonitoring("save", async () => {
|
|
2036
|
-
const writeDb = this.getWriteDb();
|
|
2037
|
-
const [result] = await writeDb.insert(this.table).values(data).returning();
|
|
2038
|
-
return result;
|
|
2039
|
-
});
|
|
2040
|
-
}
|
|
2041
|
-
/**
|
|
2042
|
-
* Update a record (uses Primary)
|
|
2043
|
-
*
|
|
2044
|
-
* Automatically injects current timestamp if table has auto-update field configured.
|
|
2045
|
-
*
|
|
2046
|
-
* @example
|
|
2047
|
-
* const user = await userRepo.update(1, { name: 'Jane' });
|
|
2048
|
-
*/
|
|
2049
|
-
async update(id2, data) {
|
|
2050
|
-
return this.executeWithMonitoring("update", async () => {
|
|
2051
|
-
const idColumn = this.getIdColumn();
|
|
2052
|
-
const updateData = this.injectAutoUpdateTimestamp(data);
|
|
2053
|
-
const { eq: eq2 } = await import('drizzle-orm');
|
|
2054
|
-
const writeDb = this.getWriteDb();
|
|
2055
|
-
const [result] = await writeDb.update(this.table).set(updateData).where(eq2(idColumn, id2)).returning();
|
|
2056
|
-
return result ?? null;
|
|
2057
|
-
});
|
|
2058
|
-
}
|
|
2059
|
-
/**
|
|
2060
|
-
* Delete a record (uses Primary)
|
|
2061
|
-
*
|
|
2062
|
-
* @example
|
|
2063
|
-
* const deleted = await userRepo.delete(1);
|
|
2064
|
-
*/
|
|
2065
|
-
async delete(id2) {
|
|
2066
|
-
return this.executeWithMonitoring("delete", async () => {
|
|
2067
|
-
const idColumn = this.getIdColumn();
|
|
2068
|
-
const { eq: eq2 } = await import('drizzle-orm');
|
|
2069
|
-
const writeDb = this.getWriteDb();
|
|
2070
|
-
const [result] = await writeDb.delete(this.table).where(eq2(idColumn, id2)).returning();
|
|
2071
|
-
return result ?? null;
|
|
2072
|
-
});
|
|
2073
|
-
}
|
|
2074
|
-
/**
|
|
2075
|
-
* Count records (uses Replica)
|
|
2076
|
-
*
|
|
2077
|
-
* @example
|
|
2078
|
-
* const count = await userRepo.count();
|
|
2079
|
-
*/
|
|
2080
|
-
async count(where) {
|
|
2081
|
-
return this.executeWithMonitoring("count", async () => {
|
|
2082
|
-
const readDb = this.getReadDb();
|
|
2083
|
-
return countTotal(readDb, this.table, where);
|
|
2084
|
-
});
|
|
2085
|
-
}
|
|
2086
|
-
/**
|
|
2087
|
-
* Find records by filters (uses Replica)
|
|
2088
|
-
*
|
|
2089
|
-
* @example
|
|
2090
|
-
* const users = await userRepo.findWhere({ email: { like: '@gmail.com' }, status: 'active' });
|
|
2091
|
-
*/
|
|
2092
|
-
async findWhere(filters) {
|
|
2093
|
-
return this.executeWithMonitoring("findWhere", async () => {
|
|
2094
|
-
const whereCondition = buildFilters(filters, this.table);
|
|
2095
|
-
const readDb = this.getReadDb();
|
|
2096
|
-
return readDb.select().from(this.table).where(whereCondition);
|
|
2097
|
-
});
|
|
2098
|
-
}
|
|
2099
|
-
/**
|
|
2100
|
-
* Find one record by filters (uses Replica)
|
|
2101
|
-
*
|
|
2102
|
-
* @example
|
|
2103
|
-
* const user = await userRepo.findOneWhere({ email: 'john@example.com' });
|
|
2104
|
-
*/
|
|
2105
|
-
async findOneWhere(filters) {
|
|
2106
|
-
return this.executeWithMonitoring("findOneWhere", async () => {
|
|
2107
|
-
const whereCondition = buildFilters(filters, this.table);
|
|
2108
|
-
const readDb = this.getReadDb();
|
|
2109
|
-
const [result] = await readDb.select().from(this.table).where(whereCondition);
|
|
2110
|
-
return result ?? null;
|
|
2111
|
-
});
|
|
2112
|
-
}
|
|
2113
|
-
/**
|
|
2114
|
-
* Check if record exists by ID (uses Replica)
|
|
2115
|
-
*
|
|
2116
|
-
* @example
|
|
2117
|
-
* const exists = await userRepo.exists(1);
|
|
2118
|
-
*/
|
|
2119
|
-
async exists(id2) {
|
|
2120
|
-
return this.executeWithMonitoring("exists", async () => {
|
|
2121
|
-
const idColumn = this.getIdColumn();
|
|
2122
|
-
const { eq: eq2 } = await import('drizzle-orm');
|
|
2123
|
-
const readDb = this.getReadDb();
|
|
2124
|
-
const [result] = await readDb.select().from(this.table).where(eq2(idColumn, id2)).limit(1);
|
|
2125
|
-
return !!result;
|
|
2126
|
-
});
|
|
2127
|
-
}
|
|
2128
|
-
/**
|
|
2129
|
-
* Check if record exists by filters (uses Replica)
|
|
2130
|
-
*
|
|
2131
|
-
* @example
|
|
2132
|
-
* const exists = await userRepo.existsBy({ email: 'john@example.com' });
|
|
2133
|
-
*/
|
|
2134
|
-
async existsBy(filters) {
|
|
2135
|
-
return this.executeWithMonitoring("existsBy", async () => {
|
|
2136
|
-
const whereCondition = buildFilters(filters, this.table);
|
|
2137
|
-
const readDb = this.getReadDb();
|
|
2138
|
-
const [result] = await readDb.select().from(this.table).where(whereCondition).limit(1);
|
|
2139
|
-
return !!result;
|
|
2140
|
-
});
|
|
2141
|
-
}
|
|
2142
|
-
/**
|
|
2143
|
-
* Count records by filters (uses Replica)
|
|
2144
|
-
*
|
|
2145
|
-
* @example
|
|
2146
|
-
* const count = await userRepo.countBy({ status: 'active' });
|
|
2147
|
-
*/
|
|
2148
|
-
async countBy(filters) {
|
|
2149
|
-
return this.executeWithMonitoring("countBy", async () => {
|
|
2150
|
-
const whereCondition = buildFilters(filters, this.table);
|
|
2151
|
-
const readDb = this.getReadDb();
|
|
2152
|
-
return countTotal(readDb, this.table, whereCondition);
|
|
2153
|
-
});
|
|
2154
|
-
}
|
|
2155
|
-
/**
|
|
2156
|
-
* Create multiple records (uses Primary)
|
|
2157
|
-
*
|
|
2158
|
-
* @example
|
|
2159
|
-
* const users = await userRepo.saveMany([
|
|
2160
|
-
* { email: 'user1@example.com', name: 'User 1' },
|
|
2161
|
-
* { email: 'user2@example.com', name: 'User 2' }
|
|
2162
|
-
* ]);
|
|
2163
|
-
*/
|
|
2164
|
-
async saveMany(data) {
|
|
2165
|
-
return this.executeWithMonitoring("saveMany", async () => {
|
|
2166
|
-
const writeDb = this.getWriteDb();
|
|
2167
|
-
return writeDb.insert(this.table).values(data).returning();
|
|
2168
|
-
});
|
|
2169
|
-
}
|
|
2170
|
-
/**
|
|
2171
|
-
* Update multiple records by filters (uses Primary)
|
|
2172
|
-
*
|
|
2173
|
-
* Automatically injects current timestamp if table has auto-update field configured.
|
|
2174
|
-
*
|
|
2175
|
-
* @example
|
|
2176
|
-
* const count = await userRepo.updateWhere({ status: 'inactive' }, { status: 'archived' });
|
|
2177
|
-
*/
|
|
2178
|
-
async updateWhere(filters, data) {
|
|
2179
|
-
return this.executeWithMonitoring("updateWhere", async () => {
|
|
2180
|
-
const updateData = this.injectAutoUpdateTimestamp(data);
|
|
2181
|
-
const whereCondition = buildFilters(filters, this.table);
|
|
2182
|
-
const writeDb = this.getWriteDb();
|
|
2183
|
-
const results = await writeDb.update(this.table).set(updateData).where(whereCondition).returning();
|
|
2184
|
-
return results.length;
|
|
2185
|
-
});
|
|
2186
|
-
}
|
|
2187
|
-
/**
|
|
2188
|
-
* Delete multiple records by filters (uses Primary)
|
|
2189
|
-
*
|
|
2190
|
-
* @example
|
|
2191
|
-
* const count = await userRepo.deleteWhere({ status: 'banned' });
|
|
2192
|
-
*/
|
|
2193
|
-
async deleteWhere(filters) {
|
|
2194
|
-
return this.executeWithMonitoring("deleteWhere", async () => {
|
|
2195
|
-
const whereCondition = buildFilters(filters, this.table);
|
|
2196
|
-
const writeDb = this.getWriteDb();
|
|
2197
|
-
const results = await writeDb.delete(this.table).where(whereCondition).returning();
|
|
2198
|
-
return results.length;
|
|
2199
|
-
});
|
|
2200
|
-
}
|
|
2201
|
-
// ============================================================
|
|
2202
|
-
// Query Builder (Fluent Interface)
|
|
2203
|
-
// ============================================================
|
|
2204
|
-
/**
|
|
2205
|
-
* Start a chainable query builder (uses Replica)
|
|
2206
|
-
*
|
|
2207
|
-
* Returns a QueryBuilder instance for building complex queries with method chaining.
|
|
2208
|
-
*
|
|
2209
|
-
* @returns QueryBuilder instance for chaining
|
|
2210
|
-
*
|
|
2211
|
-
* @example
|
|
2212
|
-
* ```typescript
|
|
2213
|
-
* // Simple chaining
|
|
2214
|
-
* const users = await userRepo
|
|
2215
|
-
* .query()
|
|
2216
|
-
* .where({ status: 'active' })
|
|
2217
|
-
* .orderBy('createdAt', 'desc')
|
|
2218
|
-
* .limit(10)
|
|
2219
|
-
* .findMany();
|
|
2220
|
-
*
|
|
2221
|
-
* // Multiple conditions
|
|
2222
|
-
* const admins = await userRepo
|
|
2223
|
-
* .query()
|
|
2224
|
-
* .where({ role: 'admin' })
|
|
2225
|
-
* .where({ status: 'active' }) // AND condition
|
|
2226
|
-
* .findMany();
|
|
2227
|
-
*
|
|
2228
|
-
* // Reusable query
|
|
2229
|
-
* const activeQuery = userRepo.query().where({ status: 'active' });
|
|
2230
|
-
* const users = await activeQuery.findMany();
|
|
2231
|
-
* const count = await activeQuery.count();
|
|
2232
|
-
* ```
|
|
2233
|
-
*/
|
|
2234
|
-
query() {
|
|
2235
|
-
const readDb = this.getReadDb();
|
|
2236
|
-
return new QueryBuilder(readDb, this.table);
|
|
2237
|
-
}
|
|
2238
|
-
};
|
|
2239
|
-
}
|
|
2240
|
-
});
|
|
2241
|
-
|
|
2242
|
-
// src/db/repository/factory.ts
|
|
2243
|
-
function getCacheKey(table, RepositoryClass) {
|
|
2244
|
-
const tableName = table[Symbol.for("drizzle:Name")] || table.name || table.toString();
|
|
2245
|
-
const className = RepositoryClass?.name || "Repository";
|
|
2246
|
-
return `${tableName}:${className}`;
|
|
2247
|
-
}
|
|
2248
|
-
function getRepository(table, RepositoryClass) {
|
|
2249
|
-
const cacheKey = getCacheKey(table, RepositoryClass);
|
|
2250
|
-
let repo = repositoryCache.get(cacheKey);
|
|
2251
|
-
if (!repo) {
|
|
2252
|
-
if (RepositoryClass) {
|
|
2253
|
-
repo = new RepositoryClass(table);
|
|
2254
|
-
} else {
|
|
2255
|
-
repo = new Repository(table);
|
|
2256
|
-
}
|
|
2257
|
-
repositoryCache.set(cacheKey, repo);
|
|
2286
|
+
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2287
|
+
if (!whereClause) {
|
|
2288
|
+
throw new Error("updateMany requires at least one where condition");
|
|
2258
2289
|
}
|
|
2259
|
-
|
|
2260
|
-
|
|
2261
|
-
function clearRepositoryCache() {
|
|
2262
|
-
repositoryCache.clear();
|
|
2290
|
+
const results = await db.update(table).set(data).where(whereClause).returning();
|
|
2291
|
+
return results;
|
|
2263
2292
|
}
|
|
2264
|
-
function
|
|
2265
|
-
|
|
2266
|
-
|
|
2267
|
-
|
|
2268
|
-
var init_factory2 = __esm({
|
|
2269
|
-
"src/db/repository/factory.ts"() {
|
|
2270
|
-
init_repository();
|
|
2271
|
-
repositoryCache = /* @__PURE__ */ new Map();
|
|
2293
|
+
async function deleteOne(table, where) {
|
|
2294
|
+
const db = getDatabase("write");
|
|
2295
|
+
if (!db) {
|
|
2296
|
+
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2272
2297
|
}
|
|
2273
|
-
|
|
2274
|
-
|
|
2275
|
-
|
|
2276
|
-
const className = RepositoryClass?.name || "Repository";
|
|
2277
|
-
return `${tableName}:${className}`;
|
|
2278
|
-
}
|
|
2279
|
-
function withRepositoryScope(fn) {
|
|
2280
|
-
const cache = /* @__PURE__ */ new Map();
|
|
2281
|
-
return repositoryStorage.run(cache, fn);
|
|
2282
|
-
}
|
|
2283
|
-
function getScopedRepository(table, RepositoryClass) {
|
|
2284
|
-
const cache = repositoryStorage.getStore();
|
|
2285
|
-
if (!cache) {
|
|
2286
|
-
return RepositoryClass ? new RepositoryClass(table) : new Repository(table);
|
|
2287
|
-
}
|
|
2288
|
-
const key = getCacheKey2(table, RepositoryClass);
|
|
2289
|
-
let repo = cache.get(key);
|
|
2290
|
-
if (!repo) {
|
|
2291
|
-
repo = RepositoryClass ? new RepositoryClass(table) : new Repository(table);
|
|
2292
|
-
cache.set(key, repo);
|
|
2293
|
-
}
|
|
2294
|
-
return repo;
|
|
2295
|
-
}
|
|
2296
|
-
function RepositoryScope() {
|
|
2297
|
-
return async (_c, next) => {
|
|
2298
|
-
return withRepositoryScope(() => next());
|
|
2299
|
-
};
|
|
2300
|
-
}
|
|
2301
|
-
function getScopedCacheSize() {
|
|
2302
|
-
const cache = repositoryStorage.getStore();
|
|
2303
|
-
return cache?.size ?? 0;
|
|
2304
|
-
}
|
|
2305
|
-
function isInRepositoryScope() {
|
|
2306
|
-
return repositoryStorage.getStore() !== void 0;
|
|
2307
|
-
}
|
|
2308
|
-
var repositoryStorage;
|
|
2309
|
-
var init_request_scope = __esm({
|
|
2310
|
-
"src/db/repository/request-scope.ts"() {
|
|
2311
|
-
init_repository();
|
|
2312
|
-
repositoryStorage = new AsyncLocalStorage();
|
|
2298
|
+
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2299
|
+
if (!whereClause) {
|
|
2300
|
+
throw new Error("deleteOne requires at least one where condition");
|
|
2313
2301
|
}
|
|
2314
|
-
|
|
2315
|
-
|
|
2316
|
-
// src/db/repository/relation-registry.ts
|
|
2317
|
-
function getTableName(table) {
|
|
2318
|
-
const cached = tableNameCache.get(table);
|
|
2319
|
-
if (cached) {
|
|
2320
|
-
return cached;
|
|
2321
|
-
}
|
|
2322
|
-
const name = table[Symbol.for("drizzle:Name")] || table.constructor.name;
|
|
2323
|
-
tableNameCache.set(table, name);
|
|
2324
|
-
return name;
|
|
2302
|
+
const [result] = await db.delete(table).where(whereClause).returning();
|
|
2303
|
+
return result ?? null;
|
|
2325
2304
|
}
|
|
2326
|
-
|
|
2327
|
-
|
|
2328
|
-
|
|
2329
|
-
|
|
2305
|
+
async function deleteMany(table, where) {
|
|
2306
|
+
const db = getDatabase("write");
|
|
2307
|
+
if (!db) {
|
|
2308
|
+
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2330
2309
|
}
|
|
2331
|
-
|
|
2332
|
-
|
|
2333
|
-
|
|
2334
|
-
var init_repository2 = __esm({
|
|
2335
|
-
"src/db/repository/index.ts"() {
|
|
2336
|
-
init_repository();
|
|
2337
|
-
init_factory2();
|
|
2338
|
-
init_request_scope();
|
|
2339
|
-
init_query_builder();
|
|
2340
|
-
init_relation_registry();
|
|
2341
|
-
init_filters();
|
|
2342
|
-
}
|
|
2343
|
-
});
|
|
2344
|
-
|
|
2345
|
-
// src/db/manager/wrapped-db.ts
|
|
2346
|
-
var WrappedDb;
|
|
2347
|
-
var init_wrapped_db = __esm({
|
|
2348
|
-
"src/db/manager/wrapped-db.ts"() {
|
|
2349
|
-
init_repository2();
|
|
2350
|
-
WrappedDb = class {
|
|
2351
|
-
constructor(db2) {
|
|
2352
|
-
this.db = db2;
|
|
2353
|
-
}
|
|
2354
|
-
/**
|
|
2355
|
-
* Repository 패턴으로 테이블 접근
|
|
2356
|
-
*
|
|
2357
|
-
* @example
|
|
2358
|
-
* const db = getDb();
|
|
2359
|
-
* const userRepo = db.for(users);
|
|
2360
|
-
* const result = await userRepo.findPage(pageable);
|
|
2361
|
-
*/
|
|
2362
|
-
for(table) {
|
|
2363
|
-
return new Repository(this.db, table);
|
|
2364
|
-
}
|
|
2365
|
-
/**
|
|
2366
|
-
* Drizzle의 모든 메서드를 프록시
|
|
2367
|
-
*
|
|
2368
|
-
* select, insert, update, delete, transaction 등 모든 Drizzle 메서드 사용 가능
|
|
2369
|
-
*/
|
|
2370
|
-
get select() {
|
|
2371
|
-
return this.db.select.bind(this.db);
|
|
2372
|
-
}
|
|
2373
|
-
get insert() {
|
|
2374
|
-
return this.db.insert.bind(this.db);
|
|
2375
|
-
}
|
|
2376
|
-
get update() {
|
|
2377
|
-
return this.db.update.bind(this.db);
|
|
2378
|
-
}
|
|
2379
|
-
get delete() {
|
|
2380
|
-
return this.db.delete.bind(this.db);
|
|
2381
|
-
}
|
|
2382
|
-
get execute() {
|
|
2383
|
-
return this.db.execute.bind(this.db);
|
|
2384
|
-
}
|
|
2385
|
-
get transaction() {
|
|
2386
|
-
return this.db.transaction.bind(this.db);
|
|
2387
|
-
}
|
|
2388
|
-
get query() {
|
|
2389
|
-
return this.db.query;
|
|
2390
|
-
}
|
|
2391
|
-
get $with() {
|
|
2392
|
-
return this.db.$with.bind(this.db);
|
|
2393
|
-
}
|
|
2394
|
-
/**
|
|
2395
|
-
* Raw Drizzle DB 접근 (필요시)
|
|
2396
|
-
*/
|
|
2397
|
-
get raw() {
|
|
2398
|
-
return this.db;
|
|
2399
|
-
}
|
|
2400
|
-
};
|
|
2401
|
-
}
|
|
2402
|
-
});
|
|
2403
|
-
|
|
2404
|
-
// src/db/manager/context.ts
|
|
2405
|
-
function getDb(type) {
|
|
2406
|
-
const tx = getTransaction();
|
|
2407
|
-
if (tx) {
|
|
2408
|
-
return new WrappedDb(tx);
|
|
2409
|
-
}
|
|
2410
|
-
const rawDb = getDatabase(type);
|
|
2411
|
-
if (!rawDb) {
|
|
2412
|
-
throw new Error(
|
|
2413
|
-
"Database not initialized. Set DATABASE_URL environment variable or call initDatabase() first."
|
|
2414
|
-
);
|
|
2415
|
-
}
|
|
2416
|
-
return new WrappedDb(rawDb);
|
|
2417
|
-
}
|
|
2418
|
-
var init_context2 = __esm({
|
|
2419
|
-
"src/db/manager/context.ts"() {
|
|
2420
|
-
init_transaction();
|
|
2421
|
-
init_manager();
|
|
2422
|
-
init_wrapped_db();
|
|
2423
|
-
}
|
|
2424
|
-
});
|
|
2425
|
-
|
|
2426
|
-
// src/db/manager/index.ts
|
|
2427
|
-
var init_manager2 = __esm({
|
|
2428
|
-
"src/db/manager/index.ts"() {
|
|
2429
|
-
init_instance();
|
|
2430
|
-
init_context2();
|
|
2431
|
-
init_factory();
|
|
2432
|
-
init_manager();
|
|
2433
|
-
init_connection();
|
|
2434
|
-
init_wrapped_db();
|
|
2435
|
-
}
|
|
2436
|
-
});
|
|
2437
|
-
|
|
2438
|
-
// src/db/manager/config-generator.ts
|
|
2439
|
-
function detectDialect(url) {
|
|
2440
|
-
if (url.startsWith("postgres://") || url.startsWith("postgresql://")) {
|
|
2441
|
-
return "postgresql";
|
|
2442
|
-
}
|
|
2443
|
-
if (url.startsWith("mysql://")) {
|
|
2444
|
-
return "mysql";
|
|
2445
|
-
}
|
|
2446
|
-
if (url.startsWith("sqlite://") || url.includes(".db") || url.includes(".sqlite")) {
|
|
2447
|
-
return "sqlite";
|
|
2448
|
-
}
|
|
2449
|
-
throw new Error(
|
|
2450
|
-
`Unsupported database URL format: ${url}. Supported: postgresql://, mysql://, sqlite://`
|
|
2451
|
-
);
|
|
2452
|
-
}
|
|
2453
|
-
function getDrizzleConfig(options = {}) {
|
|
2454
|
-
const databaseUrl = options.databaseUrl ?? process.env.DATABASE_URL;
|
|
2455
|
-
if (!databaseUrl) {
|
|
2456
|
-
throw new Error(
|
|
2457
|
-
"DATABASE_URL is required. Set it in .env or pass it to getDrizzleConfig()"
|
|
2458
|
-
);
|
|
2459
|
-
}
|
|
2460
|
-
const dialect = options.dialect ?? detectDialect(databaseUrl);
|
|
2461
|
-
const schema = options.schema ?? "./src/server/entities/*.ts";
|
|
2462
|
-
const out = options.out ?? "./drizzle/migrations";
|
|
2463
|
-
return {
|
|
2464
|
-
schema,
|
|
2465
|
-
out,
|
|
2466
|
-
dialect,
|
|
2467
|
-
dbCredentials: getDbCredentials(dialect, databaseUrl)
|
|
2468
|
-
};
|
|
2469
|
-
}
|
|
2470
|
-
function getDbCredentials(dialect, url) {
|
|
2471
|
-
switch (dialect) {
|
|
2472
|
-
case "postgresql":
|
|
2473
|
-
case "mysql":
|
|
2474
|
-
return { url };
|
|
2475
|
-
case "sqlite":
|
|
2476
|
-
const dbPath = url.replace("sqlite://", "").replace("sqlite:", "");
|
|
2477
|
-
return { url: dbPath };
|
|
2478
|
-
default:
|
|
2479
|
-
throw new Error(`Unsupported dialect: ${dialect}`);
|
|
2310
|
+
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2311
|
+
if (!whereClause) {
|
|
2312
|
+
throw new Error("deleteMany requires at least one where condition");
|
|
2480
2313
|
}
|
|
2314
|
+
const results = await db.delete(table).where(whereClause).returning();
|
|
2315
|
+
return results;
|
|
2481
2316
|
}
|
|
2482
|
-
function
|
|
2483
|
-
const
|
|
2484
|
-
|
|
2485
|
-
|
|
2486
|
-
export default defineConfig({
|
|
2487
|
-
schema: '${config2.schema}',
|
|
2488
|
-
out: '${config2.out}',
|
|
2489
|
-
dialect: '${config2.dialect}',
|
|
2490
|
-
dbCredentials: ${JSON.stringify(config2.dbCredentials, null, 4)},
|
|
2491
|
-
});
|
|
2492
|
-
`;
|
|
2493
|
-
}
|
|
2494
|
-
var init_config_generator = __esm({
|
|
2495
|
-
"src/db/manager/config-generator.ts"() {
|
|
2317
|
+
async function count(table, where) {
|
|
2318
|
+
const db = getDatabase("read");
|
|
2319
|
+
if (!db) {
|
|
2320
|
+
throw new Error("Database not initialized. Call initDatabase() first.");
|
|
2496
2321
|
}
|
|
2497
|
-
|
|
2498
|
-
|
|
2499
|
-
|
|
2500
|
-
|
|
2501
|
-
|
|
2502
|
-
|
|
2503
|
-
if (options?.autoUpdate) {
|
|
2504
|
-
updatedAtColumn.__autoUpdate = true;
|
|
2322
|
+
let query = db.select().from(table);
|
|
2323
|
+
if (where) {
|
|
2324
|
+
const whereClause = isSQLWrapper(where) ? where : where ? buildWhereFromObject(table, where) : void 0;
|
|
2325
|
+
if (whereClause) {
|
|
2326
|
+
query = query.where(whereClause);
|
|
2327
|
+
}
|
|
2505
2328
|
}
|
|
2506
|
-
|
|
2507
|
-
|
|
2508
|
-
updatedAt: updatedAtColumn
|
|
2509
|
-
};
|
|
2329
|
+
const results = await query;
|
|
2330
|
+
return results.length;
|
|
2510
2331
|
}
|
|
2511
|
-
|
|
2512
|
-
|
|
2513
|
-
|
|
2514
|
-
function optionalForeignKey(name, reference, options) {
|
|
2515
|
-
return bigserial(`${name}_id`, { mode: "number" }).references(reference, { onDelete: options?.onDelete ?? "set null" });
|
|
2516
|
-
}
|
|
2517
|
-
var init_helpers = __esm({
|
|
2518
|
-
"src/db/schema/helpers.ts"() {
|
|
2519
|
-
}
|
|
2520
|
-
});
|
|
2521
|
-
|
|
2522
|
-
// src/db/schema/index.ts
|
|
2523
|
-
var init_schema = __esm({
|
|
2524
|
-
"src/db/schema/index.ts"() {
|
|
2525
|
-
init_helpers();
|
|
2332
|
+
var init_helpers2 = __esm({
|
|
2333
|
+
"src/db/helpers.ts"() {
|
|
2334
|
+
init_manager2();
|
|
2526
2335
|
}
|
|
2527
2336
|
});
|
|
2528
2337
|
|
|
2529
2338
|
// src/db/index.ts
|
|
2530
2339
|
var db_exports = {};
|
|
2531
2340
|
__export(db_exports, {
|
|
2532
|
-
QueryBuilder: () => QueryBuilder,
|
|
2533
|
-
Repository: () => Repository,
|
|
2534
|
-
RepositoryScope: () => RepositoryScope,
|
|
2535
2341
|
Transactional: () => Transactional,
|
|
2536
|
-
WrappedDb: () => WrappedDb,
|
|
2537
2342
|
checkConnection: () => checkConnection,
|
|
2538
|
-
clearRepositoryCache: () => clearRepositoryCache,
|
|
2539
2343
|
closeDatabase: () => closeDatabase,
|
|
2344
|
+
count: () => count,
|
|
2345
|
+
create: () => create,
|
|
2540
2346
|
createDatabaseConnection: () => createDatabaseConnection,
|
|
2541
2347
|
createDatabaseFromEnv: () => createDatabaseFromEnv,
|
|
2542
|
-
|
|
2348
|
+
createFunctionSchema: () => createFunctionSchema,
|
|
2349
|
+
createMany: () => createMany,
|
|
2350
|
+
deleteMany: () => deleteMany,
|
|
2351
|
+
deleteOne: () => deleteOne,
|
|
2543
2352
|
detectDialect: () => detectDialect,
|
|
2353
|
+
findMany: () => findMany,
|
|
2354
|
+
findOne: () => findOne,
|
|
2544
2355
|
foreignKey: () => foreignKey,
|
|
2545
2356
|
fromPostgresError: () => fromPostgresError,
|
|
2546
2357
|
generateDrizzleConfigFile: () => generateDrizzleConfigFile,
|
|
2547
2358
|
getDatabase: () => getDatabase,
|
|
2548
2359
|
getDatabaseInfo: () => getDatabaseInfo,
|
|
2549
|
-
getDb: () => getDb,
|
|
2550
2360
|
getDrizzleConfig: () => getDrizzleConfig,
|
|
2551
|
-
|
|
2552
|
-
getRepository: () => getRepository,
|
|
2553
|
-
getRepositoryCacheSize: () => getRepositoryCacheSize,
|
|
2554
|
-
getScopedCacheSize: () => getScopedCacheSize,
|
|
2555
|
-
getScopedRepository: () => getScopedRepository,
|
|
2556
|
-
getTableName: () => getTableName,
|
|
2361
|
+
getSchemaInfo: () => getSchemaInfo,
|
|
2557
2362
|
getTransaction: () => getTransaction,
|
|
2558
2363
|
id: () => id,
|
|
2559
2364
|
initDatabase: () => initDatabase,
|
|
2560
|
-
isInRepositoryScope: () => isInRepositoryScope,
|
|
2561
2365
|
optionalForeignKey: () => optionalForeignKey,
|
|
2366
|
+
packageNameToSchema: () => packageNameToSchema,
|
|
2562
2367
|
runWithTransaction: () => runWithTransaction,
|
|
2563
2368
|
setDatabase: () => setDatabase,
|
|
2564
2369
|
timestamps: () => timestamps,
|
|
2565
|
-
|
|
2370
|
+
updateMany: () => updateMany,
|
|
2371
|
+
updateOne: () => updateOne,
|
|
2372
|
+
upsert: () => upsert
|
|
2566
2373
|
});
|
|
2567
2374
|
var init_db = __esm({
|
|
2568
2375
|
"src/db/index.ts"() {
|
|
2569
2376
|
init_manager2();
|
|
2570
2377
|
init_config_generator();
|
|
2571
|
-
init_repository2();
|
|
2572
2378
|
init_schema();
|
|
2379
|
+
init_schema_helper();
|
|
2573
2380
|
init_transaction();
|
|
2574
2381
|
init_postgres_errors();
|
|
2382
|
+
init_helpers2();
|
|
2383
|
+
}
|
|
2384
|
+
});
|
|
2385
|
+
|
|
2386
|
+
// src/cache/cache-factory.ts
|
|
2387
|
+
function hasCacheConfig() {
|
|
2388
|
+
return !!// Modern (Valkey/Cache)
|
|
2389
|
+
(process.env.VALKEY_URL || process.env.CACHE_URL || process.env.VALKEY_WRITE_URL || process.env.VALKEY_READ_URL || process.env.CACHE_WRITE_URL || process.env.CACHE_READ_URL || process.env.VALKEY_SENTINEL_HOSTS || process.env.VALKEY_CLUSTER_NODES || // Legacy (Redis - backward compatibility)
|
|
2390
|
+
process.env.REDIS_URL || process.env.REDIS_WRITE_URL || process.env.REDIS_READ_URL || process.env.REDIS_SENTINEL_HOSTS || process.env.REDIS_CLUSTER_NODES);
|
|
2391
|
+
}
|
|
2392
|
+
function getEnv(valkeyKey, cacheKey, redisKey) {
|
|
2393
|
+
return process.env[valkeyKey] || process.env[cacheKey] || process.env[redisKey];
|
|
2394
|
+
}
|
|
2395
|
+
function createClient(RedisClient, url) {
|
|
2396
|
+
const options = {};
|
|
2397
|
+
if (url.startsWith("rediss://") || url.startsWith("valkeys://")) {
|
|
2398
|
+
const rejectUnauthorized = getEnv(
|
|
2399
|
+
"VALKEY_TLS_REJECT_UNAUTHORIZED",
|
|
2400
|
+
"CACHE_TLS_REJECT_UNAUTHORIZED",
|
|
2401
|
+
"REDIS_TLS_REJECT_UNAUTHORIZED"
|
|
2402
|
+
);
|
|
2403
|
+
options.tls = {
|
|
2404
|
+
rejectUnauthorized: rejectUnauthorized !== "false"
|
|
2405
|
+
};
|
|
2406
|
+
}
|
|
2407
|
+
return new RedisClient(url, options);
|
|
2408
|
+
}
|
|
2409
|
+
async function createCacheFromEnv() {
|
|
2410
|
+
if (!hasCacheConfig()) {
|
|
2411
|
+
cacheLogger.info("No cache configuration found - running without cache");
|
|
2412
|
+
return { write: void 0, read: void 0 };
|
|
2413
|
+
}
|
|
2414
|
+
try {
|
|
2415
|
+
const ioredis = await import('ioredis');
|
|
2416
|
+
const RedisClient = ioredis.default;
|
|
2417
|
+
const singleUrl = getEnv("VALKEY_URL", "CACHE_URL", "REDIS_URL");
|
|
2418
|
+
const writeUrl = getEnv("VALKEY_WRITE_URL", "CACHE_WRITE_URL", "REDIS_WRITE_URL");
|
|
2419
|
+
const readUrl = getEnv("VALKEY_READ_URL", "CACHE_READ_URL", "REDIS_READ_URL");
|
|
2420
|
+
const clusterNodes = getEnv("VALKEY_CLUSTER_NODES", "CACHE_CLUSTER_NODES", "REDIS_CLUSTER_NODES");
|
|
2421
|
+
const sentinelHosts = getEnv("VALKEY_SENTINEL_HOSTS", "CACHE_SENTINEL_HOSTS", "REDIS_SENTINEL_HOSTS");
|
|
2422
|
+
const masterName = getEnv("VALKEY_MASTER_NAME", "CACHE_MASTER_NAME", "REDIS_MASTER_NAME");
|
|
2423
|
+
const password = getEnv("VALKEY_PASSWORD", "CACHE_PASSWORD", "REDIS_PASSWORD");
|
|
2424
|
+
if (singleUrl && !writeUrl && !readUrl && !clusterNodes) {
|
|
2425
|
+
const client = createClient(RedisClient, singleUrl);
|
|
2426
|
+
cacheLogger.debug("Created single cache instance", { url: singleUrl.replace(/:[^:@]+@/, ":***@") });
|
|
2427
|
+
return { write: client, read: client };
|
|
2428
|
+
}
|
|
2429
|
+
if (writeUrl && readUrl) {
|
|
2430
|
+
const write = createClient(RedisClient, writeUrl);
|
|
2431
|
+
const read = createClient(RedisClient, readUrl);
|
|
2432
|
+
cacheLogger.debug("Created master-replica cache instances");
|
|
2433
|
+
return { write, read };
|
|
2434
|
+
}
|
|
2435
|
+
if (sentinelHosts && masterName) {
|
|
2436
|
+
const sentinels = sentinelHosts.split(",").map((host) => {
|
|
2437
|
+
const [hostname, port] = host.trim().split(":");
|
|
2438
|
+
return { host: hostname, port: Number(port) || 26379 };
|
|
2439
|
+
});
|
|
2440
|
+
const options = {
|
|
2441
|
+
sentinels,
|
|
2442
|
+
name: masterName,
|
|
2443
|
+
password
|
|
2444
|
+
};
|
|
2445
|
+
const client = new RedisClient(options);
|
|
2446
|
+
cacheLogger.debug("Created sentinel cache instance", { masterName, sentinels: sentinels.length });
|
|
2447
|
+
return { write: client, read: client };
|
|
2448
|
+
}
|
|
2449
|
+
if (clusterNodes) {
|
|
2450
|
+
const nodes = clusterNodes.split(",").map((node) => {
|
|
2451
|
+
const [host, port] = node.trim().split(":");
|
|
2452
|
+
return { host, port: Number(port) || 6379 };
|
|
2453
|
+
});
|
|
2454
|
+
const clusterOptions = {
|
|
2455
|
+
redisOptions: {
|
|
2456
|
+
password
|
|
2457
|
+
}
|
|
2458
|
+
};
|
|
2459
|
+
const cluster = new RedisClient.Cluster(nodes, clusterOptions);
|
|
2460
|
+
cacheLogger.debug("Created cluster cache instance", { nodes: nodes.length });
|
|
2461
|
+
return { write: cluster, read: cluster };
|
|
2462
|
+
}
|
|
2463
|
+
if (singleUrl) {
|
|
2464
|
+
const client = createClient(RedisClient, singleUrl);
|
|
2465
|
+
cacheLogger.debug("Created cache instance (fallback)", { url: singleUrl.replace(/:[^:@]+@/, ":***@") });
|
|
2466
|
+
return { write: client, read: client };
|
|
2467
|
+
}
|
|
2468
|
+
cacheLogger.info("No valid cache configuration found - running without cache");
|
|
2469
|
+
return { write: void 0, read: void 0 };
|
|
2470
|
+
} catch (error) {
|
|
2471
|
+
if (error instanceof Error) {
|
|
2472
|
+
if (error.message.includes("Cannot find module")) {
|
|
2473
|
+
cacheLogger.warn(
|
|
2474
|
+
"Cache client library not installed",
|
|
2475
|
+
error,
|
|
2476
|
+
{
|
|
2477
|
+
suggestion: "Install ioredis to enable cache: pnpm install ioredis",
|
|
2478
|
+
mode: "disabled"
|
|
2479
|
+
}
|
|
2480
|
+
);
|
|
2481
|
+
} else {
|
|
2482
|
+
cacheLogger.warn(
|
|
2483
|
+
"Failed to create cache client",
|
|
2484
|
+
error,
|
|
2485
|
+
{ mode: "disabled" }
|
|
2486
|
+
);
|
|
2487
|
+
}
|
|
2488
|
+
} else {
|
|
2489
|
+
cacheLogger.warn(
|
|
2490
|
+
"Failed to create cache client",
|
|
2491
|
+
{ error: String(error), mode: "disabled" }
|
|
2492
|
+
);
|
|
2493
|
+
}
|
|
2494
|
+
return { write: void 0, read: void 0 };
|
|
2495
|
+
}
|
|
2496
|
+
}
|
|
2497
|
+
async function createSingleCacheFromEnv() {
|
|
2498
|
+
const { write } = await createCacheFromEnv();
|
|
2499
|
+
return write;
|
|
2500
|
+
}
|
|
2501
|
+
var cacheLogger;
|
|
2502
|
+
var init_cache_factory = __esm({
|
|
2503
|
+
"src/cache/cache-factory.ts"() {
|
|
2504
|
+
init_logger2();
|
|
2505
|
+
cacheLogger = logger.child("cache");
|
|
2575
2506
|
}
|
|
2576
2507
|
});
|
|
2508
|
+
|
|
2509
|
+
// src/cache/cache-manager.ts
|
|
2510
|
+
function getCache() {
|
|
2511
|
+
return writeInstance;
|
|
2512
|
+
}
|
|
2513
|
+
function getCacheRead() {
|
|
2514
|
+
return readInstance ?? writeInstance;
|
|
2515
|
+
}
|
|
2516
|
+
function isCacheDisabled() {
|
|
2517
|
+
return isDisabled;
|
|
2518
|
+
}
|
|
2519
|
+
function setCache(write, read) {
|
|
2520
|
+
writeInstance = write;
|
|
2521
|
+
readInstance = read ?? write;
|
|
2522
|
+
isDisabled = !write;
|
|
2523
|
+
}
|
|
2524
|
+
async function initCache() {
|
|
2525
|
+
if (writeInstance) {
|
|
2526
|
+
return { write: writeInstance, read: readInstance, disabled: isDisabled };
|
|
2527
|
+
}
|
|
2528
|
+
const { write, read } = await createCacheFromEnv();
|
|
2529
|
+
if (write) {
|
|
2530
|
+
try {
|
|
2531
|
+
await write.ping();
|
|
2532
|
+
if (read && read !== write) {
|
|
2533
|
+
await read.ping();
|
|
2534
|
+
}
|
|
2535
|
+
writeInstance = write;
|
|
2536
|
+
readInstance = read;
|
|
2537
|
+
isDisabled = false;
|
|
2538
|
+
const hasReplica = read && read !== write;
|
|
2539
|
+
cacheLogger2.info(
|
|
2540
|
+
hasReplica ? "Cache connected (Master-Replica)" : "Cache connected",
|
|
2541
|
+
{ mode: "enabled" }
|
|
2542
|
+
);
|
|
2543
|
+
return { write: writeInstance, read: readInstance, disabled: false };
|
|
2544
|
+
} catch (error) {
|
|
2545
|
+
cacheLogger2.error(
|
|
2546
|
+
"Cache connection failed - running in disabled mode",
|
|
2547
|
+
error instanceof Error ? error : new Error(String(error)),
|
|
2548
|
+
{ mode: "disabled" }
|
|
2549
|
+
);
|
|
2550
|
+
try {
|
|
2551
|
+
await write.quit();
|
|
2552
|
+
if (read && read !== write) {
|
|
2553
|
+
await read.quit();
|
|
2554
|
+
}
|
|
2555
|
+
} catch {
|
|
2556
|
+
}
|
|
2557
|
+
isDisabled = true;
|
|
2558
|
+
return { write: void 0, read: void 0, disabled: true };
|
|
2559
|
+
}
|
|
2560
|
+
}
|
|
2561
|
+
isDisabled = true;
|
|
2562
|
+
cacheLogger2.info("Cache disabled - no configuration or library not installed", { mode: "disabled" });
|
|
2563
|
+
return { write: void 0, read: void 0, disabled: true };
|
|
2564
|
+
}
|
|
2565
|
+
async function closeCache() {
|
|
2566
|
+
if (isDisabled) {
|
|
2567
|
+
cacheLogger2.debug("Cache already disabled, nothing to close");
|
|
2568
|
+
return;
|
|
2569
|
+
}
|
|
2570
|
+
const closePromises = [];
|
|
2571
|
+
if (writeInstance) {
|
|
2572
|
+
closePromises.push(
|
|
2573
|
+
writeInstance.quit().catch((err) => {
|
|
2574
|
+
cacheLogger2.error("Error closing cache write instance", err);
|
|
2575
|
+
})
|
|
2576
|
+
);
|
|
2577
|
+
}
|
|
2578
|
+
if (readInstance && readInstance !== writeInstance) {
|
|
2579
|
+
closePromises.push(
|
|
2580
|
+
readInstance.quit().catch((err) => {
|
|
2581
|
+
cacheLogger2.error("Error closing cache read instance", err);
|
|
2582
|
+
})
|
|
2583
|
+
);
|
|
2584
|
+
}
|
|
2585
|
+
await Promise.all(closePromises);
|
|
2586
|
+
writeInstance = void 0;
|
|
2587
|
+
readInstance = void 0;
|
|
2588
|
+
isDisabled = true;
|
|
2589
|
+
cacheLogger2.info("Cache connections closed", { mode: "disabled" });
|
|
2590
|
+
}
|
|
2591
|
+
function getCacheInfo() {
|
|
2592
|
+
return {
|
|
2593
|
+
hasWrite: !!writeInstance,
|
|
2594
|
+
hasRead: !!readInstance,
|
|
2595
|
+
isReplica: !!(readInstance && readInstance !== writeInstance),
|
|
2596
|
+
disabled: isDisabled
|
|
2597
|
+
};
|
|
2598
|
+
}
|
|
2599
|
+
var cacheLogger2, writeInstance, readInstance, isDisabled, getRedis, getRedisRead, setRedis, initRedis, closeRedis, getRedisInfo;
|
|
2600
|
+
var init_cache_manager = __esm({
|
|
2601
|
+
"src/cache/cache-manager.ts"() {
|
|
2602
|
+
init_cache_factory();
|
|
2603
|
+
init_logger2();
|
|
2604
|
+
cacheLogger2 = logger.child("cache");
|
|
2605
|
+
isDisabled = false;
|
|
2606
|
+
getRedis = getCache;
|
|
2607
|
+
getRedisRead = getCacheRead;
|
|
2608
|
+
setRedis = setCache;
|
|
2609
|
+
initRedis = initCache;
|
|
2610
|
+
closeRedis = closeCache;
|
|
2611
|
+
getRedisInfo = getCacheInfo;
|
|
2612
|
+
}
|
|
2613
|
+
});
|
|
2614
|
+
|
|
2615
|
+
// src/cache/index.ts
|
|
2616
|
+
var cache_exports = {};
|
|
2617
|
+
__export(cache_exports, {
|
|
2618
|
+
closeCache: () => closeCache,
|
|
2619
|
+
closeRedis: () => closeRedis,
|
|
2620
|
+
createCacheFromEnv: () => createCacheFromEnv,
|
|
2621
|
+
createRedisFromEnv: () => createCacheFromEnv,
|
|
2622
|
+
createSingleCacheFromEnv: () => createSingleCacheFromEnv,
|
|
2623
|
+
createSingleRedisFromEnv: () => createSingleCacheFromEnv,
|
|
2624
|
+
getCache: () => getCache,
|
|
2625
|
+
getCacheInfo: () => getCacheInfo,
|
|
2626
|
+
getCacheRead: () => getCacheRead,
|
|
2627
|
+
getRedis: () => getRedis,
|
|
2628
|
+
getRedisInfo: () => getRedisInfo,
|
|
2629
|
+
getRedisRead: () => getRedisRead,
|
|
2630
|
+
initCache: () => initCache,
|
|
2631
|
+
initRedis: () => initRedis,
|
|
2632
|
+
isCacheDisabled: () => isCacheDisabled,
|
|
2633
|
+
setCache: () => setCache,
|
|
2634
|
+
setRedis: () => setRedis
|
|
2635
|
+
});
|
|
2636
|
+
var init_cache = __esm({
|
|
2637
|
+
"src/cache/index.ts"() {
|
|
2638
|
+
init_cache_factory();
|
|
2639
|
+
init_cache_manager();
|
|
2640
|
+
init_cache_manager();
|
|
2641
|
+
init_cache_factory();
|
|
2642
|
+
}
|
|
2643
|
+
});
|
|
2644
|
+
|
|
2645
|
+
// src/route/auto-loader.ts
|
|
2646
|
+
init_logger2();
|
|
2647
|
+
var routeLogger2 = logger.child("route");
|
|
2577
2648
|
var AutoRouteLoader = class {
|
|
2578
2649
|
constructor(routesDir, debug = false, middlewares = []) {
|
|
2579
2650
|
this.routesDir = routesDir;
|
|
@@ -2581,36 +2652,18 @@ var AutoRouteLoader = class {
|
|
|
2581
2652
|
this.middlewares = middlewares;
|
|
2582
2653
|
}
|
|
2583
2654
|
routes = [];
|
|
2584
|
-
registeredRoutes = /* @__PURE__ */ new Map();
|
|
2585
|
-
// normalized path → file
|
|
2586
2655
|
debug;
|
|
2587
2656
|
middlewares;
|
|
2588
|
-
/**
|
|
2589
|
-
* Load all routes from directory
|
|
2590
|
-
*/
|
|
2591
2657
|
async load(app) {
|
|
2592
2658
|
const startTime = Date.now();
|
|
2593
2659
|
const files = await this.scanFiles(this.routesDir);
|
|
2594
2660
|
if (files.length === 0) {
|
|
2595
|
-
|
|
2661
|
+
routeLogger2.warn("No route files found");
|
|
2596
2662
|
return this.getStats();
|
|
2597
2663
|
}
|
|
2598
|
-
const filesWithPriority = files.map((file) => ({
|
|
2599
|
-
path: file,
|
|
2600
|
-
priority: this.calculatePriority(relative(this.routesDir, file))
|
|
2601
|
-
}));
|
|
2602
|
-
filesWithPriority.sort((a, b) => a.priority - b.priority);
|
|
2603
|
-
if (this.debug) {
|
|
2604
|
-
console.log(`
|
|
2605
|
-
\u{1F4CB} Route Registration Order:`);
|
|
2606
|
-
console.log(` Priority 1 (Static): ${filesWithPriority.filter((f) => f.priority === 1).length} routes`);
|
|
2607
|
-
console.log(` Priority 2 (Dynamic): ${filesWithPriority.filter((f) => f.priority === 2).length} routes`);
|
|
2608
|
-
console.log(` Priority 3 (Catch-all): ${filesWithPriority.filter((f) => f.priority === 3).length} routes
|
|
2609
|
-
`);
|
|
2610
|
-
}
|
|
2611
2664
|
let failureCount = 0;
|
|
2612
|
-
for (const
|
|
2613
|
-
const success = await this.loadRoute(app,
|
|
2665
|
+
for (const file of files) {
|
|
2666
|
+
const success = await this.loadRoute(app, file);
|
|
2614
2667
|
if (success) ; else {
|
|
2615
2668
|
failureCount++;
|
|
2616
2669
|
}
|
|
@@ -2621,13 +2674,53 @@ var AutoRouteLoader = class {
|
|
|
2621
2674
|
this.logStats(stats, elapsed);
|
|
2622
2675
|
}
|
|
2623
2676
|
if (failureCount > 0) {
|
|
2624
|
-
|
|
2677
|
+
routeLogger2.warn("Some routes failed to load", { failureCount });
|
|
2625
2678
|
}
|
|
2626
2679
|
return stats;
|
|
2627
2680
|
}
|
|
2628
2681
|
/**
|
|
2629
|
-
*
|
|
2682
|
+
* Load routes from an external directory (e.g., from SPFN function packages)
|
|
2683
|
+
* Reads package.json spfn.prefix and mounts routes under that prefix
|
|
2684
|
+
*
|
|
2685
|
+
* @param app - Hono app instance
|
|
2686
|
+
* @param routesDir - Directory containing route handlers
|
|
2687
|
+
* @param packageName - Name of the package (for logging)
|
|
2688
|
+
* @param prefix - Optional prefix to mount routes under (from package.json spfn.prefix)
|
|
2689
|
+
* @returns Route statistics
|
|
2630
2690
|
*/
|
|
2691
|
+
async loadExternalRoutes(app, routesDir, packageName, prefix) {
|
|
2692
|
+
const startTime = Date.now();
|
|
2693
|
+
const tempRoutesDir = this.routesDir;
|
|
2694
|
+
this.routesDir = routesDir;
|
|
2695
|
+
const files = await this.scanFiles(routesDir);
|
|
2696
|
+
if (files.length === 0) {
|
|
2697
|
+
routeLogger2.warn("No route files found", { dir: routesDir, package: packageName });
|
|
2698
|
+
this.routesDir = tempRoutesDir;
|
|
2699
|
+
return this.getStats();
|
|
2700
|
+
}
|
|
2701
|
+
let successCount = 0;
|
|
2702
|
+
let failureCount = 0;
|
|
2703
|
+
for (const file of files) {
|
|
2704
|
+
const success = await this.loadRoute(app, file, prefix);
|
|
2705
|
+
if (success) {
|
|
2706
|
+
successCount++;
|
|
2707
|
+
} else {
|
|
2708
|
+
failureCount++;
|
|
2709
|
+
}
|
|
2710
|
+
}
|
|
2711
|
+
const elapsed = Date.now() - startTime;
|
|
2712
|
+
if (this.debug) {
|
|
2713
|
+
routeLogger2.info("External routes loaded", {
|
|
2714
|
+
package: packageName,
|
|
2715
|
+
prefix: prefix || "/",
|
|
2716
|
+
total: successCount,
|
|
2717
|
+
failed: failureCount,
|
|
2718
|
+
elapsed: `${elapsed}ms`
|
|
2719
|
+
});
|
|
2720
|
+
}
|
|
2721
|
+
this.routesDir = tempRoutesDir;
|
|
2722
|
+
return this.getStats();
|
|
2723
|
+
}
|
|
2631
2724
|
getStats() {
|
|
2632
2725
|
const stats = {
|
|
2633
2726
|
total: this.routes.length,
|
|
@@ -2647,12 +2740,6 @@ var AutoRouteLoader = class {
|
|
|
2647
2740
|
}
|
|
2648
2741
|
return stats;
|
|
2649
2742
|
}
|
|
2650
|
-
// ========================================================================
|
|
2651
|
-
// Private Methods
|
|
2652
|
-
// ========================================================================
|
|
2653
|
-
/**
|
|
2654
|
-
* Recursively scan directory for .ts files
|
|
2655
|
-
*/
|
|
2656
2743
|
async scanFiles(dir, files = []) {
|
|
2657
2744
|
const entries = await readdir(dir);
|
|
2658
2745
|
for (const entry of entries) {
|
|
@@ -2666,191 +2753,189 @@ var AutoRouteLoader = class {
|
|
|
2666
2753
|
}
|
|
2667
2754
|
return files;
|
|
2668
2755
|
}
|
|
2669
|
-
/**
|
|
2670
|
-
* Check if file is a valid route file
|
|
2671
|
-
*/
|
|
2672
2756
|
isValidRouteFile(fileName) {
|
|
2673
|
-
return fileName
|
|
2757
|
+
return fileName === "index.ts" || fileName === "index.js" || fileName === "index.mjs";
|
|
2674
2758
|
}
|
|
2675
|
-
|
|
2676
|
-
* Load and register a single route
|
|
2677
|
-
* Returns true if successful, false if failed
|
|
2678
|
-
*/
|
|
2679
|
-
async loadRoute(app, absolutePath) {
|
|
2759
|
+
async loadRoute(app, absolutePath, prefix) {
|
|
2680
2760
|
const relativePath = relative(this.routesDir, absolutePath);
|
|
2681
2761
|
try {
|
|
2682
|
-
const module = await import(absolutePath);
|
|
2683
|
-
if (!module
|
|
2684
|
-
console.error(`\u274C ${relativePath}: Must export Hono instance as default`);
|
|
2685
|
-
return false;
|
|
2686
|
-
}
|
|
2687
|
-
if (typeof module.default.route !== "function") {
|
|
2688
|
-
console.error(`\u274C ${relativePath}: Default export is not a Hono instance`);
|
|
2689
|
-
return false;
|
|
2690
|
-
}
|
|
2691
|
-
const urlPath = this.fileToPath(relativePath);
|
|
2692
|
-
const priority = this.calculatePriority(relativePath);
|
|
2693
|
-
const normalizedPath = this.normalizePath(urlPath);
|
|
2694
|
-
const existingFile = this.registeredRoutes.get(normalizedPath);
|
|
2695
|
-
if (existingFile) {
|
|
2696
|
-
console.warn(`\u26A0\uFE0F Route conflict detected:`);
|
|
2697
|
-
console.warn(` Path: ${urlPath} (normalized: ${normalizedPath})`);
|
|
2698
|
-
console.warn(` Already registered by: ${existingFile}`);
|
|
2699
|
-
console.warn(` Attempted by: ${relativePath}`);
|
|
2700
|
-
console.warn(` \u2192 Skipping duplicate registration`);
|
|
2762
|
+
const module = await import(absolutePath);
|
|
2763
|
+
if (!this.validateModule(module, relativePath)) {
|
|
2701
2764
|
return false;
|
|
2702
2765
|
}
|
|
2703
|
-
this.registeredRoutes.set(normalizedPath, relativePath);
|
|
2704
2766
|
const hasContractMetas = module.default._contractMetas && module.default._contractMetas.size > 0;
|
|
2705
|
-
if (hasContractMetas) {
|
|
2706
|
-
|
|
2707
|
-
|
|
2708
|
-
|
|
2709
|
-
const requestPath = new URL(c.req.url).pathname;
|
|
2710
|
-
const relativePath2 = requestPath.startsWith(urlPath) ? requestPath.slice(urlPath.length) || "/" : requestPath;
|
|
2711
|
-
const key = `${method} ${relativePath2}`;
|
|
2712
|
-
const meta = module.default._contractMetas?.get(key);
|
|
2713
|
-
if (meta?.skipMiddlewares) {
|
|
2714
|
-
c.set("_skipMiddlewares", meta.skipMiddlewares);
|
|
2715
|
-
}
|
|
2716
|
-
return next();
|
|
2767
|
+
if (!hasContractMetas) {
|
|
2768
|
+
routeLogger2.error("Route must use contract-based routing", {
|
|
2769
|
+
file: relativePath,
|
|
2770
|
+
hint: "Export contracts using satisfies RouteContract and use app.bind()"
|
|
2717
2771
|
});
|
|
2718
|
-
|
|
2719
|
-
|
|
2720
|
-
|
|
2721
|
-
|
|
2722
|
-
|
|
2723
|
-
|
|
2724
|
-
|
|
2772
|
+
return false;
|
|
2773
|
+
}
|
|
2774
|
+
const contractPaths = this.extractContractPaths(module);
|
|
2775
|
+
if (prefix) {
|
|
2776
|
+
const invalidPaths = contractPaths.filter((path) => !path.startsWith(prefix));
|
|
2777
|
+
if (invalidPaths.length > 0) {
|
|
2778
|
+
routeLogger2.error("Contract paths must include the package prefix", {
|
|
2779
|
+
file: relativePath,
|
|
2780
|
+
prefix,
|
|
2781
|
+
invalidPaths,
|
|
2782
|
+
hint: `Contract paths should start with "${prefix}". Example: path: "${prefix}/labels"`
|
|
2725
2783
|
});
|
|
2726
|
-
|
|
2727
|
-
} else {
|
|
2728
|
-
const skipList = module.meta?.skipMiddlewares || [];
|
|
2729
|
-
const activeMiddlewares = this.middlewares.filter((m) => !skipList.includes(m.name));
|
|
2730
|
-
for (const middleware of activeMiddlewares) {
|
|
2731
|
-
app.use(urlPath, middleware.handler);
|
|
2784
|
+
return false;
|
|
2732
2785
|
}
|
|
2733
2786
|
}
|
|
2734
|
-
|
|
2735
|
-
|
|
2736
|
-
|
|
2737
|
-
|
|
2738
|
-
|
|
2739
|
-
|
|
2787
|
+
this.registerContractBasedMiddlewares(app, contractPaths, module);
|
|
2788
|
+
app.route("/", module.default);
|
|
2789
|
+
contractPaths.forEach((path) => {
|
|
2790
|
+
this.routes.push({
|
|
2791
|
+
path,
|
|
2792
|
+
// Use contract path as-is (already includes prefix)
|
|
2793
|
+
file: relativePath,
|
|
2794
|
+
meta: module.meta,
|
|
2795
|
+
priority: this.calculateContractPriority(path)
|
|
2796
|
+
});
|
|
2797
|
+
if (this.debug) {
|
|
2798
|
+
const icon = path.includes("*") ? "\u2B50" : path.includes(":") ? "\u{1F538}" : "\u{1F539}";
|
|
2799
|
+
routeLogger2.debug(`Registered route: ${path}`, { icon, file: relativePath });
|
|
2800
|
+
}
|
|
2740
2801
|
});
|
|
2741
|
-
if (this.debug) {
|
|
2742
|
-
const icon = priority === 1 ? "\u{1F539}" : priority === 2 ? "\u{1F538}" : "\u2B50";
|
|
2743
|
-
console.log(` ${icon} ${urlPath.padEnd(40)} \u2192 ${relativePath}`);
|
|
2744
|
-
}
|
|
2745
2802
|
return true;
|
|
2746
2803
|
} catch (error) {
|
|
2747
|
-
|
|
2748
|
-
|
|
2749
|
-
|
|
2750
|
-
|
|
2751
|
-
|
|
2752
|
-
|
|
2753
|
-
|
|
2754
|
-
|
|
2755
|
-
|
|
2756
|
-
|
|
2757
|
-
|
|
2758
|
-
stackLines.forEach((line) => console.error(` ${line}`));
|
|
2759
|
-
}
|
|
2760
|
-
} else if (err.message.includes("Unexpected token")) {
|
|
2761
|
-
console.error(`\u274C ${relativePath}: Parse error`);
|
|
2762
|
-
console.error(` ${err.message}`);
|
|
2763
|
-
console.error(` \u2192 Check for syntax errors or invalid TypeScript`);
|
|
2764
|
-
} else {
|
|
2765
|
-
console.error(`\u274C ${relativePath}: ${err.message}`);
|
|
2766
|
-
if (this.debug && err.stack) {
|
|
2767
|
-
console.error(` Stack: ${err.stack}`);
|
|
2804
|
+
this.categorizeAndLogError(error, relativePath);
|
|
2805
|
+
return false;
|
|
2806
|
+
}
|
|
2807
|
+
}
|
|
2808
|
+
extractContractPaths(module) {
|
|
2809
|
+
const paths = /* @__PURE__ */ new Set();
|
|
2810
|
+
if (module.default._contractMetas) {
|
|
2811
|
+
for (const key of module.default._contractMetas.keys()) {
|
|
2812
|
+
const path = key.split(" ")[1];
|
|
2813
|
+
if (path) {
|
|
2814
|
+
paths.add(path);
|
|
2768
2815
|
}
|
|
2769
2816
|
}
|
|
2770
|
-
return false;
|
|
2771
2817
|
}
|
|
2818
|
+
return Array.from(paths);
|
|
2772
2819
|
}
|
|
2773
|
-
|
|
2774
|
-
|
|
2775
|
-
|
|
2776
|
-
|
|
2777
|
-
|
|
2778
|
-
|
|
2779
|
-
|
|
2780
|
-
|
|
2781
|
-
|
|
2782
|
-
let path = filePath.replace(/\.ts$/, "");
|
|
2783
|
-
const segments = path.split("/");
|
|
2784
|
-
if (segments[segments.length - 1] === "index") {
|
|
2785
|
-
segments.pop();
|
|
2820
|
+
calculateContractPriority(path) {
|
|
2821
|
+
if (path.includes("*")) return 3;
|
|
2822
|
+
if (path.includes(":")) return 2;
|
|
2823
|
+
return 1;
|
|
2824
|
+
}
|
|
2825
|
+
validateModule(module, relativePath) {
|
|
2826
|
+
if (!module.default) {
|
|
2827
|
+
routeLogger2.error("Route must export Hono instance as default", { file: relativePath });
|
|
2828
|
+
return false;
|
|
2786
2829
|
}
|
|
2787
|
-
|
|
2788
|
-
|
|
2789
|
-
|
|
2790
|
-
|
|
2791
|
-
|
|
2792
|
-
|
|
2830
|
+
if (typeof module.default.route !== "function") {
|
|
2831
|
+
routeLogger2.error("Default export is not a Hono instance", { file: relativePath });
|
|
2832
|
+
return false;
|
|
2833
|
+
}
|
|
2834
|
+
return true;
|
|
2835
|
+
}
|
|
2836
|
+
registerContractBasedMiddlewares(app, contractPaths, module) {
|
|
2837
|
+
app.use("*", (c, next) => {
|
|
2838
|
+
const method = c.req.method;
|
|
2839
|
+
const requestPath = new URL(c.req.url).pathname;
|
|
2840
|
+
const key = `${method} ${requestPath}`;
|
|
2841
|
+
const meta = module.default._contractMetas?.get(key);
|
|
2842
|
+
if (meta?.skipMiddlewares) {
|
|
2843
|
+
c.set("_skipMiddlewares", meta.skipMiddlewares);
|
|
2793
2844
|
}
|
|
2794
|
-
|
|
2795
|
-
|
|
2845
|
+
return next();
|
|
2846
|
+
});
|
|
2847
|
+
for (const contractPath of contractPaths) {
|
|
2848
|
+
const middlewarePath = contractPath === "/" ? "/*" : `${contractPath}/*`;
|
|
2849
|
+
for (const middleware of this.middlewares) {
|
|
2850
|
+
app.use(middlewarePath, async (c, next) => {
|
|
2851
|
+
const skipList = c.get("_skipMiddlewares") || [];
|
|
2852
|
+
if (skipList.includes(middleware.name)) {
|
|
2853
|
+
return next();
|
|
2854
|
+
}
|
|
2855
|
+
return middleware.handler(c, next);
|
|
2856
|
+
});
|
|
2796
2857
|
}
|
|
2797
|
-
|
|
2798
|
-
}).filter((seg) => seg !== null);
|
|
2799
|
-
const result = "/" + transformed.join("/");
|
|
2800
|
-
return result.replace(/\/+/g, "/").replace(/\/$/, "") || "/";
|
|
2801
|
-
}
|
|
2802
|
-
/**
|
|
2803
|
-
* Calculate route priority
|
|
2804
|
-
* 1 = static, 2 = dynamic, 3 = catch-all
|
|
2805
|
-
*/
|
|
2806
|
-
calculatePriority(path) {
|
|
2807
|
-
if (/\[\.\.\.[\w-]+]/.test(path)) return 3;
|
|
2808
|
-
if (/\[[\w-]+]/.test(path)) return 2;
|
|
2809
|
-
return 1;
|
|
2858
|
+
}
|
|
2810
2859
|
}
|
|
2811
|
-
|
|
2812
|
-
|
|
2813
|
-
|
|
2814
|
-
|
|
2815
|
-
|
|
2816
|
-
|
|
2817
|
-
|
|
2818
|
-
|
|
2819
|
-
|
|
2820
|
-
|
|
2821
|
-
|
|
2822
|
-
|
|
2823
|
-
|
|
2860
|
+
categorizeAndLogError(error, relativePath) {
|
|
2861
|
+
const message = error.message;
|
|
2862
|
+
const stack = error.stack;
|
|
2863
|
+
if (message.includes("Cannot find module") || message.includes("MODULE_NOT_FOUND")) {
|
|
2864
|
+
routeLogger2.error("Missing dependency", {
|
|
2865
|
+
file: relativePath,
|
|
2866
|
+
error: message,
|
|
2867
|
+
hint: "Run: npm install"
|
|
2868
|
+
});
|
|
2869
|
+
} else if (message.includes("SyntaxError") || stack?.includes("SyntaxError")) {
|
|
2870
|
+
routeLogger2.error("Syntax error", {
|
|
2871
|
+
file: relativePath,
|
|
2872
|
+
error: message,
|
|
2873
|
+
...this.debug && stack && {
|
|
2874
|
+
stack: stack.split("\n").slice(0, 5).join("\n")
|
|
2875
|
+
}
|
|
2876
|
+
});
|
|
2877
|
+
} else if (message.includes("Unexpected token")) {
|
|
2878
|
+
routeLogger2.error("Parse error", {
|
|
2879
|
+
file: relativePath,
|
|
2880
|
+
error: message,
|
|
2881
|
+
hint: "Check for syntax errors or invalid TypeScript"
|
|
2882
|
+
});
|
|
2883
|
+
} else {
|
|
2884
|
+
routeLogger2.error("Route loading failed", {
|
|
2885
|
+
file: relativePath,
|
|
2886
|
+
error: message,
|
|
2887
|
+
...this.debug && stack && { stack }
|
|
2888
|
+
});
|
|
2889
|
+
}
|
|
2824
2890
|
}
|
|
2825
|
-
/**
|
|
2826
|
-
* Log statistics
|
|
2827
|
-
*/
|
|
2828
2891
|
logStats(stats, elapsed) {
|
|
2829
|
-
|
|
2830
|
-
|
|
2831
|
-
|
|
2832
|
-
|
|
2833
|
-
|
|
2834
|
-
|
|
2835
|
-
|
|
2836
|
-
|
|
2837
|
-
|
|
2838
|
-
|
|
2839
|
-
|
|
2840
|
-
\u2705 Routes loaded in ${elapsed}ms
|
|
2841
|
-
`);
|
|
2892
|
+
const tagCounts = Object.entries(stats.byTag).map(([tag, count2]) => `${tag}(${count2})`).join(", ");
|
|
2893
|
+
routeLogger2.info("Routes loaded successfully", {
|
|
2894
|
+
total: stats.total,
|
|
2895
|
+
priority: {
|
|
2896
|
+
static: stats.byPriority.static,
|
|
2897
|
+
dynamic: stats.byPriority.dynamic,
|
|
2898
|
+
catchAll: stats.byPriority.catchAll
|
|
2899
|
+
},
|
|
2900
|
+
...tagCounts && { tags: tagCounts },
|
|
2901
|
+
elapsed: `${elapsed}ms`
|
|
2902
|
+
});
|
|
2842
2903
|
}
|
|
2843
2904
|
};
|
|
2844
2905
|
async function loadRoutes(app, options) {
|
|
2845
2906
|
const routesDir = options?.routesDir ?? join(process.cwd(), "src", "server", "routes");
|
|
2846
2907
|
const debug = options?.debug ?? false;
|
|
2847
2908
|
const middlewares = options?.middlewares ?? [];
|
|
2909
|
+
const includeFunctionRoutes = options?.includeFunctionRoutes ?? true;
|
|
2848
2910
|
const loader = new AutoRouteLoader(routesDir, debug, middlewares);
|
|
2849
|
-
|
|
2911
|
+
const stats = await loader.load(app);
|
|
2912
|
+
if (includeFunctionRoutes) {
|
|
2913
|
+
const { discoverFunctionRoutes: discoverFunctionRoutes2 } = await Promise.resolve().then(() => (init_function_routes(), function_routes_exports));
|
|
2914
|
+
const functionRoutes = discoverFunctionRoutes2();
|
|
2915
|
+
if (functionRoutes.length > 0) {
|
|
2916
|
+
routeLogger2.info("Loading function routes", { count: functionRoutes.length });
|
|
2917
|
+
for (const func of functionRoutes) {
|
|
2918
|
+
try {
|
|
2919
|
+
await loader.loadExternalRoutes(app, func.routesDir, func.packageName, func.prefix);
|
|
2920
|
+
routeLogger2.info("Function routes loaded", {
|
|
2921
|
+
package: func.packageName,
|
|
2922
|
+
routesDir: func.routesDir,
|
|
2923
|
+
prefix: func.prefix || "/"
|
|
2924
|
+
});
|
|
2925
|
+
} catch (error) {
|
|
2926
|
+
routeLogger2.error("Failed to load function routes", {
|
|
2927
|
+
package: func.packageName,
|
|
2928
|
+
error: error instanceof Error ? error.message : "Unknown error"
|
|
2929
|
+
});
|
|
2930
|
+
}
|
|
2931
|
+
}
|
|
2932
|
+
}
|
|
2933
|
+
}
|
|
2934
|
+
return stats;
|
|
2850
2935
|
}
|
|
2851
2936
|
|
|
2852
2937
|
// src/route/bind.ts
|
|
2853
|
-
|
|
2938
|
+
init_errors();
|
|
2854
2939
|
|
|
2855
2940
|
// src/middleware/error-handler.ts
|
|
2856
2941
|
init_logger2();
|
|
@@ -2861,27 +2946,36 @@ function ErrorHandler(options = {}) {
|
|
|
2861
2946
|
enableLogging = true
|
|
2862
2947
|
} = options;
|
|
2863
2948
|
return (err, c) => {
|
|
2864
|
-
const
|
|
2949
|
+
const errorWithCode = err;
|
|
2950
|
+
const statusCode = errorWithCode.statusCode || 500;
|
|
2865
2951
|
const errorType = err.name || "Error";
|
|
2866
2952
|
if (enableLogging) {
|
|
2867
2953
|
const logLevel = statusCode >= 500 ? "error" : "warn";
|
|
2868
|
-
|
|
2954
|
+
const logData = {
|
|
2869
2955
|
type: errorType,
|
|
2870
2956
|
message: err.message,
|
|
2871
2957
|
statusCode,
|
|
2872
2958
|
path: c.req.path,
|
|
2873
2959
|
method: c.req.method
|
|
2874
|
-
}
|
|
2960
|
+
};
|
|
2961
|
+
if (errorWithCode.details) {
|
|
2962
|
+
logData.details = errorWithCode.details;
|
|
2963
|
+
}
|
|
2964
|
+
if (statusCode >= 500 && includeStack) {
|
|
2965
|
+
logData.stack = err.stack;
|
|
2966
|
+
}
|
|
2967
|
+
errorLogger[logLevel]("Error occurred", logData);
|
|
2875
2968
|
}
|
|
2876
2969
|
const response = {
|
|
2970
|
+
success: false,
|
|
2877
2971
|
error: {
|
|
2878
2972
|
message: err.message || "Internal Server Error",
|
|
2879
2973
|
type: errorType,
|
|
2880
2974
|
statusCode
|
|
2881
2975
|
}
|
|
2882
2976
|
};
|
|
2883
|
-
if (
|
|
2884
|
-
response.error.details =
|
|
2977
|
+
if (errorWithCode.details) {
|
|
2978
|
+
response.error.details = errorWithCode.details;
|
|
2885
2979
|
}
|
|
2886
2980
|
if (includeStack) {
|
|
2887
2981
|
response.error.stack = err.stack;
|
|
@@ -2896,15 +2990,14 @@ var DEFAULT_CONFIG = {
|
|
|
2896
2990
|
excludePaths: ["/health", "/ping", "/favicon.ico"],
|
|
2897
2991
|
sensitiveFields: ["password", "token", "apiKey", "secret", "authorization"],
|
|
2898
2992
|
slowRequestThreshold: 1e3
|
|
2899
|
-
// 1 second
|
|
2900
2993
|
};
|
|
2901
2994
|
function generateRequestId() {
|
|
2902
2995
|
const timestamp2 = Date.now();
|
|
2903
2996
|
const randomPart = randomBytes(6).toString("hex");
|
|
2904
2997
|
return `req_${timestamp2}_${randomPart}`;
|
|
2905
2998
|
}
|
|
2906
|
-
function RequestLogger(
|
|
2907
|
-
const cfg = { ...DEFAULT_CONFIG, ...
|
|
2999
|
+
function RequestLogger(config) {
|
|
3000
|
+
const cfg = { ...DEFAULT_CONFIG, ...config };
|
|
2908
3001
|
const apiLogger = logger.child("api");
|
|
2909
3002
|
return async (c, next) => {
|
|
2910
3003
|
const path = new URL(c.req.url).pathname;
|
|
@@ -2941,15 +3034,6 @@ function RequestLogger(config2) {
|
|
|
2941
3034
|
logData.slow = true;
|
|
2942
3035
|
}
|
|
2943
3036
|
apiLogger[logLevel]("Request completed", logData);
|
|
2944
|
-
if (isSlowRequest) {
|
|
2945
|
-
apiLogger.warn("Slow request detected", {
|
|
2946
|
-
requestId,
|
|
2947
|
-
method,
|
|
2948
|
-
path,
|
|
2949
|
-
duration,
|
|
2950
|
-
threshold: cfg.slowRequestThreshold
|
|
2951
|
-
});
|
|
2952
|
-
}
|
|
2953
3037
|
} catch (error) {
|
|
2954
3038
|
const duration = Date.now() - startTime;
|
|
2955
3039
|
apiLogger.error("Request failed", error, {
|
|
@@ -2963,215 +3047,470 @@ function RequestLogger(config2) {
|
|
|
2963
3047
|
};
|
|
2964
3048
|
}
|
|
2965
3049
|
|
|
2966
|
-
// src/server/server.ts
|
|
2967
|
-
init_cache();
|
|
2968
|
-
init_db();
|
|
3050
|
+
// src/server/create-server.ts
|
|
2969
3051
|
init_logger2();
|
|
3052
|
+
|
|
3053
|
+
// src/server/helpers.ts
|
|
3054
|
+
function createHealthCheckHandler(detailed) {
|
|
3055
|
+
return async (c) => {
|
|
3056
|
+
const response = {
|
|
3057
|
+
status: "ok",
|
|
3058
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
3059
|
+
};
|
|
3060
|
+
if (detailed) {
|
|
3061
|
+
const { getDatabase: getDatabase2 } = await Promise.resolve().then(() => (init_db(), db_exports));
|
|
3062
|
+
const { getRedis: getRedis2 } = await Promise.resolve().then(() => (init_cache(), cache_exports));
|
|
3063
|
+
const db = getDatabase2();
|
|
3064
|
+
let dbStatus = "disconnected";
|
|
3065
|
+
let dbError;
|
|
3066
|
+
if (db) {
|
|
3067
|
+
try {
|
|
3068
|
+
await db.execute("SELECT 1");
|
|
3069
|
+
dbStatus = "connected";
|
|
3070
|
+
} catch (error) {
|
|
3071
|
+
dbStatus = "error";
|
|
3072
|
+
dbError = error instanceof Error ? error.message : String(error);
|
|
3073
|
+
}
|
|
3074
|
+
}
|
|
3075
|
+
const redis = getRedis2();
|
|
3076
|
+
let redisStatus = "disconnected";
|
|
3077
|
+
let redisError;
|
|
3078
|
+
if (redis) {
|
|
3079
|
+
try {
|
|
3080
|
+
await redis.ping();
|
|
3081
|
+
redisStatus = "connected";
|
|
3082
|
+
} catch (error) {
|
|
3083
|
+
redisStatus = "error";
|
|
3084
|
+
redisError = error instanceof Error ? error.message : String(error);
|
|
3085
|
+
}
|
|
3086
|
+
}
|
|
3087
|
+
response.services = {
|
|
3088
|
+
database: {
|
|
3089
|
+
status: dbStatus,
|
|
3090
|
+
...dbError && { error: dbError }
|
|
3091
|
+
},
|
|
3092
|
+
redis: {
|
|
3093
|
+
status: redisStatus,
|
|
3094
|
+
...redisError && { error: redisError }
|
|
3095
|
+
}
|
|
3096
|
+
};
|
|
3097
|
+
const hasErrors = dbStatus === "error" || redisStatus === "error";
|
|
3098
|
+
response.status = hasErrors ? "degraded" : "ok";
|
|
3099
|
+
}
|
|
3100
|
+
const statusCode = response.status === "ok" ? 200 : 503;
|
|
3101
|
+
return c.json(response, statusCode);
|
|
3102
|
+
};
|
|
3103
|
+
}
|
|
3104
|
+
function applyServerTimeouts(server, timeouts) {
|
|
3105
|
+
if ("timeout" in server) {
|
|
3106
|
+
server.timeout = timeouts.request;
|
|
3107
|
+
server.keepAliveTimeout = timeouts.keepAlive;
|
|
3108
|
+
server.headersTimeout = timeouts.headers;
|
|
3109
|
+
}
|
|
3110
|
+
}
|
|
3111
|
+
function getTimeoutConfig(config) {
|
|
3112
|
+
return {
|
|
3113
|
+
request: config?.request ?? (parseInt(process.env.SERVER_TIMEOUT || "", 10) || 12e4),
|
|
3114
|
+
keepAlive: config?.keepAlive ?? (parseInt(process.env.SERVER_KEEPALIVE_TIMEOUT || "", 10) || 65e3),
|
|
3115
|
+
headers: config?.headers ?? (parseInt(process.env.SERVER_HEADERS_TIMEOUT || "", 10) || 6e4)
|
|
3116
|
+
};
|
|
3117
|
+
}
|
|
3118
|
+
function getShutdownTimeout(config) {
|
|
3119
|
+
return config?.timeout ?? (parseInt(process.env.SHUTDOWN_TIMEOUT || "", 10) || 3e4);
|
|
3120
|
+
}
|
|
3121
|
+
function buildMiddlewareOrder(config) {
|
|
3122
|
+
const order = [];
|
|
3123
|
+
const middlewareConfig = config.middleware ?? {};
|
|
3124
|
+
const enableLogger = middlewareConfig.logger !== false;
|
|
3125
|
+
const enableCors = middlewareConfig.cors !== false;
|
|
3126
|
+
const enableErrorHandler = middlewareConfig.errorHandler !== false;
|
|
3127
|
+
if (enableLogger) order.push("RequestLogger");
|
|
3128
|
+
if (enableCors) order.push("CORS");
|
|
3129
|
+
config.use?.forEach((_, i) => order.push(`Custom[${i}]`));
|
|
3130
|
+
if (config.beforeRoutes) order.push("beforeRoutes hook");
|
|
3131
|
+
order.push("Routes");
|
|
3132
|
+
if (config.afterRoutes) order.push("afterRoutes hook");
|
|
3133
|
+
if (enableErrorHandler) order.push("ErrorHandler");
|
|
3134
|
+
return order;
|
|
3135
|
+
}
|
|
3136
|
+
function buildStartupConfig(config, timeouts) {
|
|
3137
|
+
const middlewareConfig = config.middleware ?? {};
|
|
3138
|
+
const healthCheckConfig = config.healthCheck ?? {};
|
|
3139
|
+
const healthCheckEnabled = healthCheckConfig.enabled !== false;
|
|
3140
|
+
const healthCheckPath = healthCheckConfig.path ?? "/health";
|
|
3141
|
+
const healthCheckDetailed = healthCheckConfig.detailed ?? process.env.NODE_ENV === "development";
|
|
3142
|
+
return {
|
|
3143
|
+
middleware: {
|
|
3144
|
+
logger: middlewareConfig.logger !== false,
|
|
3145
|
+
cors: middlewareConfig.cors !== false,
|
|
3146
|
+
errorHandler: middlewareConfig.errorHandler !== false,
|
|
3147
|
+
custom: config.use?.length ?? 0
|
|
3148
|
+
},
|
|
3149
|
+
healthCheck: healthCheckEnabled ? {
|
|
3150
|
+
enabled: true,
|
|
3151
|
+
path: healthCheckPath,
|
|
3152
|
+
detailed: healthCheckDetailed
|
|
3153
|
+
} : { enabled: false },
|
|
3154
|
+
hooks: {
|
|
3155
|
+
beforeRoutes: !!config.beforeRoutes,
|
|
3156
|
+
afterRoutes: !!config.afterRoutes
|
|
3157
|
+
},
|
|
3158
|
+
timeout: {
|
|
3159
|
+
request: `${timeouts.request}ms`,
|
|
3160
|
+
keepAlive: `${timeouts.keepAlive}ms`,
|
|
3161
|
+
headers: `${timeouts.headers}ms`
|
|
3162
|
+
},
|
|
3163
|
+
shutdown: {
|
|
3164
|
+
timeout: `${config.shutdown?.timeout ?? 3e4}ms`
|
|
3165
|
+
}
|
|
3166
|
+
};
|
|
3167
|
+
}
|
|
3168
|
+
|
|
3169
|
+
// src/server/create-server.ts
|
|
2970
3170
|
var serverLogger = logger.child("server");
|
|
2971
|
-
async function createServer(
|
|
3171
|
+
async function createServer(config) {
|
|
2972
3172
|
const cwd = process.cwd();
|
|
2973
3173
|
const appPath = join(cwd, "src", "server", "app.ts");
|
|
2974
3174
|
const appJsPath = join(cwd, "src", "server", "app.js");
|
|
2975
3175
|
if (existsSync(appPath) || existsSync(appJsPath)) {
|
|
2976
|
-
|
|
2977
|
-
|
|
2978
|
-
|
|
2979
|
-
|
|
2980
|
-
|
|
2981
|
-
|
|
2982
|
-
|
|
2983
|
-
|
|
2984
|
-
|
|
3176
|
+
return await loadCustomApp(appPath, appJsPath, config);
|
|
3177
|
+
}
|
|
3178
|
+
return await createAutoConfiguredApp(config);
|
|
3179
|
+
}
|
|
3180
|
+
async function loadCustomApp(appPath, appJsPath, config) {
|
|
3181
|
+
const appModule = await (existsSync(appPath) ? import(appPath) : import(appJsPath));
|
|
3182
|
+
const appFactory = appModule.default;
|
|
3183
|
+
if (!appFactory) {
|
|
3184
|
+
throw new Error("app.ts must export a default function that returns a Hono app");
|
|
2985
3185
|
}
|
|
3186
|
+
const app = await appFactory();
|
|
3187
|
+
const debug = config?.debug ?? process.env.NODE_ENV === "development";
|
|
3188
|
+
await loadRoutes(app, { routesDir: config?.routesPath, debug });
|
|
3189
|
+
return app;
|
|
3190
|
+
}
|
|
3191
|
+
async function createAutoConfiguredApp(config) {
|
|
2986
3192
|
const app = new Hono();
|
|
2987
|
-
const middlewareConfig =
|
|
3193
|
+
const middlewareConfig = config?.middleware ?? {};
|
|
2988
3194
|
const enableLogger = middlewareConfig.logger !== false;
|
|
2989
3195
|
const enableCors = middlewareConfig.cors !== false;
|
|
2990
3196
|
const enableErrorHandler = middlewareConfig.errorHandler !== false;
|
|
3197
|
+
if (enableErrorHandler) {
|
|
3198
|
+
app.use("*", async (c, next) => {
|
|
3199
|
+
c.set("errorHandlerEnabled", true);
|
|
3200
|
+
await next();
|
|
3201
|
+
});
|
|
3202
|
+
}
|
|
3203
|
+
applyDefaultMiddleware(app, config, enableLogger, enableCors);
|
|
3204
|
+
config?.use?.forEach((mw) => app.use("*", mw));
|
|
3205
|
+
registerHealthCheckEndpoint(app, config);
|
|
3206
|
+
await executeBeforeRoutesHook(app, config);
|
|
3207
|
+
await loadAppRoutes(app, config);
|
|
3208
|
+
await executeAfterRoutesHook(app, config);
|
|
3209
|
+
if (enableErrorHandler) {
|
|
3210
|
+
app.onError(ErrorHandler());
|
|
3211
|
+
}
|
|
3212
|
+
return app;
|
|
3213
|
+
}
|
|
3214
|
+
function applyDefaultMiddleware(app, config, enableLogger, enableCors) {
|
|
2991
3215
|
if (enableLogger) {
|
|
2992
3216
|
app.use("*", RequestLogger());
|
|
2993
3217
|
}
|
|
2994
|
-
if (enableCors &&
|
|
2995
|
-
app.use("*", cors(
|
|
3218
|
+
if (enableCors && config?.cors !== false) {
|
|
3219
|
+
app.use("*", cors(config?.cors));
|
|
2996
3220
|
}
|
|
2997
|
-
|
|
2998
|
-
|
|
3221
|
+
}
|
|
3222
|
+
function registerHealthCheckEndpoint(app, config) {
|
|
3223
|
+
const healthCheckConfig = config?.healthCheck ?? {};
|
|
2999
3224
|
const healthCheckEnabled = healthCheckConfig.enabled !== false;
|
|
3000
3225
|
const healthCheckPath = healthCheckConfig.path ?? "/health";
|
|
3001
3226
|
const healthCheckDetailed = healthCheckConfig.detailed ?? process.env.NODE_ENV === "development";
|
|
3002
3227
|
if (healthCheckEnabled) {
|
|
3003
|
-
app.get(healthCheckPath,
|
|
3004
|
-
const response = {
|
|
3005
|
-
status: "ok",
|
|
3006
|
-
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
3007
|
-
};
|
|
3008
|
-
if (healthCheckDetailed) {
|
|
3009
|
-
const { getDatabase: getDatabase2 } = await Promise.resolve().then(() => (init_db(), db_exports));
|
|
3010
|
-
const { getRedis: getRedis2 } = await Promise.resolve().then(() => (init_cache(), cache_exports));
|
|
3011
|
-
const db2 = getDatabase2();
|
|
3012
|
-
let dbStatus = "disconnected";
|
|
3013
|
-
if (db2) {
|
|
3014
|
-
try {
|
|
3015
|
-
await db2.execute("SELECT 1");
|
|
3016
|
-
dbStatus = "connected";
|
|
3017
|
-
} catch {
|
|
3018
|
-
dbStatus = "error";
|
|
3019
|
-
}
|
|
3020
|
-
}
|
|
3021
|
-
const redis = getRedis2();
|
|
3022
|
-
let redisStatus = "disconnected";
|
|
3023
|
-
if (redis) {
|
|
3024
|
-
try {
|
|
3025
|
-
await redis.ping();
|
|
3026
|
-
redisStatus = "connected";
|
|
3027
|
-
} catch {
|
|
3028
|
-
redisStatus = "error";
|
|
3029
|
-
}
|
|
3030
|
-
}
|
|
3031
|
-
response.services = {
|
|
3032
|
-
database: dbStatus,
|
|
3033
|
-
redis: redisStatus
|
|
3034
|
-
};
|
|
3035
|
-
}
|
|
3036
|
-
return c.json(response);
|
|
3037
|
-
});
|
|
3228
|
+
app.get(healthCheckPath, createHealthCheckHandler(healthCheckDetailed));
|
|
3038
3229
|
serverLogger.debug(`Health check endpoint enabled at ${healthCheckPath}`);
|
|
3039
3230
|
}
|
|
3040
|
-
|
|
3041
|
-
|
|
3231
|
+
}
|
|
3232
|
+
async function executeBeforeRoutesHook(app, config) {
|
|
3233
|
+
if (!config?.beforeRoutes) {
|
|
3234
|
+
return;
|
|
3235
|
+
}
|
|
3236
|
+
try {
|
|
3237
|
+
await config.beforeRoutes(app);
|
|
3238
|
+
} catch (error) {
|
|
3239
|
+
serverLogger.error("beforeRoutes hook failed", error);
|
|
3240
|
+
throw new Error("Server initialization failed in beforeRoutes hook");
|
|
3241
|
+
}
|
|
3242
|
+
}
|
|
3243
|
+
async function loadAppRoutes(app, config) {
|
|
3244
|
+
const debug = config?.debug ?? process.env.NODE_ENV === "development";
|
|
3042
3245
|
await loadRoutes(app, {
|
|
3043
|
-
routesDir:
|
|
3246
|
+
routesDir: config?.routesPath,
|
|
3044
3247
|
debug,
|
|
3045
|
-
middlewares:
|
|
3248
|
+
middlewares: config?.middlewares
|
|
3046
3249
|
});
|
|
3047
|
-
|
|
3048
|
-
|
|
3049
|
-
|
|
3250
|
+
}
|
|
3251
|
+
async function executeAfterRoutesHook(app, config) {
|
|
3252
|
+
if (!config?.afterRoutes) {
|
|
3253
|
+
return;
|
|
3254
|
+
}
|
|
3255
|
+
try {
|
|
3256
|
+
await config.afterRoutes(app);
|
|
3257
|
+
} catch (error) {
|
|
3258
|
+
serverLogger.error("afterRoutes hook failed", error);
|
|
3259
|
+
throw new Error("Server initialization failed in afterRoutes hook");
|
|
3050
3260
|
}
|
|
3051
|
-
return app;
|
|
3052
3261
|
}
|
|
3053
|
-
|
|
3054
|
-
|
|
3055
|
-
|
|
3056
|
-
|
|
3057
|
-
|
|
3058
|
-
|
|
3059
|
-
|
|
3060
|
-
|
|
3262
|
+
|
|
3263
|
+
// src/server/start-server.ts
|
|
3264
|
+
init_cache();
|
|
3265
|
+
init_db();
|
|
3266
|
+
init_logger2();
|
|
3267
|
+
function getNetworkAddress() {
|
|
3268
|
+
const nets = networkInterfaces();
|
|
3269
|
+
for (const name of Object.keys(nets)) {
|
|
3270
|
+
const netGroup = nets[name];
|
|
3271
|
+
if (!netGroup) continue;
|
|
3272
|
+
for (const net of netGroup) {
|
|
3273
|
+
if (net.family === "IPv4" && !net.internal) {
|
|
3274
|
+
return net.address;
|
|
3275
|
+
}
|
|
3276
|
+
}
|
|
3061
3277
|
}
|
|
3062
|
-
|
|
3063
|
-
|
|
3064
|
-
|
|
3065
|
-
|
|
3066
|
-
|
|
3067
|
-
|
|
3278
|
+
return null;
|
|
3279
|
+
}
|
|
3280
|
+
function printBanner(options) {
|
|
3281
|
+
const { mode, host, port } = options;
|
|
3282
|
+
console.log("");
|
|
3283
|
+
console.log(" _____ ____ ______ _ _");
|
|
3284
|
+
console.log(" / ____| _ \\| ____| \\ | |");
|
|
3285
|
+
console.log(" | (___ | |_) | |__ | \\| |");
|
|
3286
|
+
console.log(" \\___ \\| __/| __| | . ` |");
|
|
3287
|
+
console.log(" ____) | | | | | |\\ |");
|
|
3288
|
+
console.log(" |_____/|_| |_| |_| \\_|");
|
|
3289
|
+
console.log("");
|
|
3290
|
+
console.log(` Mode: ${mode}`);
|
|
3291
|
+
if (host === "0.0.0.0") {
|
|
3292
|
+
const networkIP = getNetworkAddress();
|
|
3293
|
+
console.log(` \u25B2 Local: http://localhost:${port}`);
|
|
3294
|
+
if (networkIP) {
|
|
3295
|
+
console.log(` \u25B2 Network: http://${networkIP}:${port}`);
|
|
3296
|
+
}
|
|
3297
|
+
} else {
|
|
3298
|
+
console.log(` \u25B2 Local: http://${host}:${port}`);
|
|
3299
|
+
}
|
|
3300
|
+
console.log("");
|
|
3301
|
+
}
|
|
3302
|
+
|
|
3303
|
+
// src/server/validation.ts
|
|
3304
|
+
function validateServerConfig(config) {
|
|
3305
|
+
if (config.port !== void 0) {
|
|
3306
|
+
if (!Number.isInteger(config.port) || config.port < 0 || config.port > 65535) {
|
|
3307
|
+
throw new Error(
|
|
3308
|
+
`Invalid port: ${config.port}. Port must be an integer between 0 and 65535.`
|
|
3309
|
+
);
|
|
3310
|
+
}
|
|
3311
|
+
}
|
|
3312
|
+
if (config.timeout) {
|
|
3313
|
+
const { request, keepAlive, headers } = config.timeout;
|
|
3314
|
+
if (request !== void 0 && (request < 0 || !Number.isFinite(request))) {
|
|
3315
|
+
throw new Error(`Invalid timeout.request: ${request}. Must be a positive number.`);
|
|
3316
|
+
}
|
|
3317
|
+
if (keepAlive !== void 0 && (keepAlive < 0 || !Number.isFinite(keepAlive))) {
|
|
3318
|
+
throw new Error(`Invalid timeout.keepAlive: ${keepAlive}. Must be a positive number.`);
|
|
3319
|
+
}
|
|
3320
|
+
if (headers !== void 0 && (headers < 0 || !Number.isFinite(headers))) {
|
|
3321
|
+
throw new Error(`Invalid timeout.headers: ${headers}. Must be a positive number.`);
|
|
3322
|
+
}
|
|
3323
|
+
if (headers && request && headers > request) {
|
|
3324
|
+
throw new Error(
|
|
3325
|
+
`Invalid timeout configuration: headers timeout (${headers}ms) cannot exceed request timeout (${request}ms).`
|
|
3326
|
+
);
|
|
3327
|
+
}
|
|
3328
|
+
}
|
|
3329
|
+
if (config.shutdown?.timeout !== void 0) {
|
|
3330
|
+
const timeout = config.shutdown.timeout;
|
|
3331
|
+
if (timeout < 0 || !Number.isFinite(timeout)) {
|
|
3332
|
+
throw new Error(`Invalid shutdown.timeout: ${timeout}. Must be a positive number.`);
|
|
3333
|
+
}
|
|
3334
|
+
}
|
|
3335
|
+
if (config.healthCheck?.path) {
|
|
3336
|
+
if (!config.healthCheck.path.startsWith("/")) {
|
|
3337
|
+
throw new Error(
|
|
3338
|
+
`Invalid healthCheck.path: "${config.healthCheck.path}". Must start with "/".`
|
|
3339
|
+
);
|
|
3340
|
+
}
|
|
3341
|
+
}
|
|
3342
|
+
}
|
|
3343
|
+
|
|
3344
|
+
// src/server/start-server.ts
|
|
3345
|
+
var serverLogger2 = logger.child("server");
|
|
3346
|
+
async function startServer(config) {
|
|
3347
|
+
const finalConfig = await loadAndMergeConfig(config);
|
|
3068
3348
|
const { host, port, debug } = finalConfig;
|
|
3349
|
+
validateServerConfig(finalConfig);
|
|
3350
|
+
if (debug) {
|
|
3351
|
+
logMiddlewareOrder(finalConfig);
|
|
3352
|
+
}
|
|
3069
3353
|
try {
|
|
3070
|
-
|
|
3071
|
-
await initDatabase(finalConfig.database);
|
|
3072
|
-
serverLogger.debug("Initializing Redis...");
|
|
3073
|
-
await initRedis();
|
|
3074
|
-
serverLogger.debug("Creating Hono app...");
|
|
3354
|
+
await initializeInfrastructure(finalConfig);
|
|
3075
3355
|
const app = await createServer(finalConfig);
|
|
3076
|
-
|
|
3077
|
-
const
|
|
3078
|
-
|
|
3079
|
-
|
|
3080
|
-
|
|
3081
|
-
|
|
3082
|
-
|
|
3083
|
-
|
|
3084
|
-
const timeoutConfig = finalConfig.timeout ?? {};
|
|
3085
|
-
const requestTimeout = timeoutConfig.request ?? (parseInt(process.env.SERVER_TIMEOUT || "", 10) || 12e4);
|
|
3086
|
-
const keepAliveTimeout = timeoutConfig.keepAlive ?? (parseInt(process.env.SERVER_KEEPALIVE_TIMEOUT || "", 10) || 65e3);
|
|
3087
|
-
const headersTimeout = timeoutConfig.headers ?? (parseInt(process.env.SERVER_HEADERS_TIMEOUT || "", 10) || 6e4);
|
|
3088
|
-
if ("timeout" in server) {
|
|
3089
|
-
server.timeout = requestTimeout;
|
|
3090
|
-
server.keepAliveTimeout = keepAliveTimeout;
|
|
3091
|
-
server.headersTimeout = headersTimeout;
|
|
3092
|
-
}
|
|
3093
|
-
serverLogger.info("Server timeouts configured", {
|
|
3094
|
-
request: `${requestTimeout}ms`,
|
|
3095
|
-
keepAlive: `${keepAliveTimeout}ms`,
|
|
3096
|
-
headers: `${headersTimeout}ms`
|
|
3097
|
-
});
|
|
3098
|
-
console.log(` \u25B2 SPFN ${debug ? "dev" : "production"}`);
|
|
3099
|
-
console.log(` - Local: http://${host}:${port}`);
|
|
3100
|
-
console.log("");
|
|
3101
|
-
const shutdownServer = async () => {
|
|
3102
|
-
serverLogger.debug("Closing HTTP server...");
|
|
3103
|
-
await new Promise((resolve) => {
|
|
3104
|
-
server.close(() => {
|
|
3105
|
-
serverLogger.info("HTTP server closed");
|
|
3106
|
-
resolve();
|
|
3107
|
-
});
|
|
3108
|
-
});
|
|
3109
|
-
serverLogger.debug("Closing database connections...");
|
|
3110
|
-
await closeDatabase();
|
|
3111
|
-
serverLogger.debug("Closing Redis connections...");
|
|
3112
|
-
await closeRedis();
|
|
3113
|
-
serverLogger.info("Server shutdown completed");
|
|
3114
|
-
};
|
|
3115
|
-
const shutdown = async (signal) => {
|
|
3116
|
-
serverLogger.info(`${signal} received, starting graceful shutdown...`);
|
|
3117
|
-
const shutdownTimeout = finalConfig.shutdown?.timeout ?? (parseInt(process.env.SHUTDOWN_TIMEOUT || "", 10) || 3e4);
|
|
3118
|
-
const timeoutPromise = new Promise((_, reject) => {
|
|
3119
|
-
setTimeout(() => {
|
|
3120
|
-
reject(new Error(`Graceful shutdown timeout after ${shutdownTimeout}ms`));
|
|
3121
|
-
}, shutdownTimeout);
|
|
3122
|
-
});
|
|
3123
|
-
try {
|
|
3124
|
-
await Promise.race([
|
|
3125
|
-
shutdownServer(),
|
|
3126
|
-
timeoutPromise
|
|
3127
|
-
]);
|
|
3128
|
-
serverLogger.info("Graceful shutdown completed successfully");
|
|
3129
|
-
process.exit(0);
|
|
3130
|
-
} catch (error) {
|
|
3131
|
-
const err = error;
|
|
3132
|
-
if (err.message && err.message.includes("timeout")) {
|
|
3133
|
-
serverLogger.error("Graceful shutdown timeout, forcing exit", err);
|
|
3134
|
-
} else {
|
|
3135
|
-
serverLogger.error("Error during graceful shutdown", err);
|
|
3136
|
-
}
|
|
3137
|
-
process.exit(1);
|
|
3138
|
-
}
|
|
3139
|
-
};
|
|
3140
|
-
const close = async () => {
|
|
3141
|
-
serverLogger.info("Manual server shutdown requested");
|
|
3142
|
-
await shutdownServer();
|
|
3143
|
-
};
|
|
3144
|
-
process.on("SIGTERM", () => shutdown("SIGTERM"));
|
|
3145
|
-
process.on("SIGINT", () => shutdown("SIGINT"));
|
|
3146
|
-
process.on("uncaughtException", (error) => {
|
|
3147
|
-
serverLogger.error("Uncaught exception", error);
|
|
3148
|
-
shutdown("UNCAUGHT_EXCEPTION");
|
|
3149
|
-
});
|
|
3150
|
-
process.on("unhandledRejection", (reason, promise) => {
|
|
3151
|
-
serverLogger.error("Unhandled promise rejection", {
|
|
3152
|
-
reason,
|
|
3153
|
-
promise
|
|
3154
|
-
});
|
|
3155
|
-
shutdown("UNHANDLED_REJECTION");
|
|
3356
|
+
const server = startHttpServer(app, host, port);
|
|
3357
|
+
const timeouts = getTimeoutConfig(finalConfig.timeout);
|
|
3358
|
+
applyServerTimeouts(server, timeouts);
|
|
3359
|
+
logServerTimeouts(timeouts);
|
|
3360
|
+
printBanner({
|
|
3361
|
+
mode: debug ? "Development" : "Production",
|
|
3362
|
+
host,
|
|
3363
|
+
port
|
|
3156
3364
|
});
|
|
3365
|
+
logServerStarted(debug, host, port, finalConfig, timeouts);
|
|
3366
|
+
const shutdownServer = createShutdownHandler(server);
|
|
3367
|
+
const shutdown = createGracefulShutdown(shutdownServer, finalConfig);
|
|
3368
|
+
registerShutdownHandlers(shutdown);
|
|
3157
3369
|
return {
|
|
3158
3370
|
server,
|
|
3159
3371
|
app,
|
|
3160
3372
|
config: finalConfig,
|
|
3161
|
-
close
|
|
3373
|
+
close: async () => {
|
|
3374
|
+
serverLogger2.info("Manual server shutdown requested");
|
|
3375
|
+
await shutdownServer();
|
|
3376
|
+
}
|
|
3162
3377
|
};
|
|
3163
3378
|
} catch (error) {
|
|
3164
3379
|
const err = error;
|
|
3165
|
-
|
|
3380
|
+
serverLogger2.error("Server initialization failed", err);
|
|
3381
|
+
await cleanupOnFailure();
|
|
3382
|
+
throw error;
|
|
3383
|
+
}
|
|
3384
|
+
}
|
|
3385
|
+
async function loadAndMergeConfig(config) {
|
|
3386
|
+
const cwd = process.cwd();
|
|
3387
|
+
const configPath = join(cwd, "src", "server", "server.config.ts");
|
|
3388
|
+
const configJsPath = join(cwd, "src", "server", "server.config.js");
|
|
3389
|
+
const builtConfigMjsPath = join(cwd, ".spfn", "server", "server.config.mjs");
|
|
3390
|
+
const builtConfigPath = join(cwd, ".spfn", "server", "server.config.js");
|
|
3391
|
+
let fileConfig = {};
|
|
3392
|
+
if (existsSync(builtConfigMjsPath)) {
|
|
3393
|
+
const configModule = await import(builtConfigMjsPath);
|
|
3394
|
+
fileConfig = configModule.default ?? {};
|
|
3395
|
+
} else if (existsSync(builtConfigPath)) {
|
|
3396
|
+
const configModule = await import(builtConfigPath);
|
|
3397
|
+
fileConfig = configModule.default ?? {};
|
|
3398
|
+
} else if (existsSync(configJsPath)) {
|
|
3399
|
+
const configModule = await import(configJsPath);
|
|
3400
|
+
fileConfig = configModule.default ?? {};
|
|
3401
|
+
} else if (existsSync(configPath)) {
|
|
3402
|
+
const configModule = await import(configPath);
|
|
3403
|
+
fileConfig = configModule.default ?? {};
|
|
3404
|
+
}
|
|
3405
|
+
return {
|
|
3406
|
+
...fileConfig,
|
|
3407
|
+
...config,
|
|
3408
|
+
port: config?.port ?? fileConfig?.port ?? (parseInt(process.env.PORT || "", 10) || 4e3),
|
|
3409
|
+
host: config?.host ?? fileConfig?.host ?? (process.env.HOST || "localhost")
|
|
3410
|
+
};
|
|
3411
|
+
}
|
|
3412
|
+
function logMiddlewareOrder(config) {
|
|
3413
|
+
const middlewareOrder = buildMiddlewareOrder(config);
|
|
3414
|
+
serverLogger2.debug("Middleware execution order", {
|
|
3415
|
+
order: middlewareOrder
|
|
3416
|
+
});
|
|
3417
|
+
}
|
|
3418
|
+
async function initializeInfrastructure(config) {
|
|
3419
|
+
serverLogger2.debug("Initializing database...");
|
|
3420
|
+
await initDatabase(config.database);
|
|
3421
|
+
serverLogger2.debug("Initializing Redis...");
|
|
3422
|
+
await initRedis();
|
|
3423
|
+
}
|
|
3424
|
+
function startHttpServer(app, host, port) {
|
|
3425
|
+
serverLogger2.debug(`Starting server on ${host}:${port}...`);
|
|
3426
|
+
return serve({
|
|
3427
|
+
fetch: app.fetch,
|
|
3428
|
+
port,
|
|
3429
|
+
hostname: host
|
|
3430
|
+
});
|
|
3431
|
+
}
|
|
3432
|
+
function logServerTimeouts(timeouts) {
|
|
3433
|
+
serverLogger2.info("Server timeouts configured", {
|
|
3434
|
+
request: `${timeouts.request}ms`,
|
|
3435
|
+
keepAlive: `${timeouts.keepAlive}ms`,
|
|
3436
|
+
headers: `${timeouts.headers}ms`
|
|
3437
|
+
});
|
|
3438
|
+
}
|
|
3439
|
+
function logServerStarted(debug, host, port, config, timeouts) {
|
|
3440
|
+
const startupConfig = buildStartupConfig(config, timeouts);
|
|
3441
|
+
serverLogger2.info("Server started successfully", {
|
|
3442
|
+
mode: debug ? "development" : "production",
|
|
3443
|
+
host,
|
|
3444
|
+
port,
|
|
3445
|
+
config: startupConfig
|
|
3446
|
+
});
|
|
3447
|
+
}
|
|
3448
|
+
function createShutdownHandler(server) {
|
|
3449
|
+
return async () => {
|
|
3450
|
+
serverLogger2.debug("Closing HTTP server...");
|
|
3451
|
+
await new Promise((resolve) => {
|
|
3452
|
+
server.close(() => {
|
|
3453
|
+
serverLogger2.info("HTTP server closed");
|
|
3454
|
+
resolve();
|
|
3455
|
+
});
|
|
3456
|
+
});
|
|
3457
|
+
serverLogger2.debug("Closing database connections...");
|
|
3458
|
+
await closeDatabase();
|
|
3459
|
+
serverLogger2.debug("Closing Redis connections...");
|
|
3460
|
+
await closeRedis();
|
|
3461
|
+
serverLogger2.info("Server shutdown completed");
|
|
3462
|
+
};
|
|
3463
|
+
}
|
|
3464
|
+
function createGracefulShutdown(shutdownServer, config) {
|
|
3465
|
+
return async (signal) => {
|
|
3466
|
+
serverLogger2.info(`${signal} received, starting graceful shutdown...`);
|
|
3467
|
+
const shutdownTimeout = getShutdownTimeout(config.shutdown);
|
|
3468
|
+
const timeoutPromise = new Promise((_, reject) => {
|
|
3469
|
+
setTimeout(() => {
|
|
3470
|
+
reject(new Error(`Graceful shutdown timeout after ${shutdownTimeout}ms`));
|
|
3471
|
+
}, shutdownTimeout);
|
|
3472
|
+
});
|
|
3166
3473
|
try {
|
|
3167
|
-
|
|
3168
|
-
|
|
3169
|
-
|
|
3170
|
-
|
|
3171
|
-
|
|
3172
|
-
|
|
3474
|
+
await Promise.race([
|
|
3475
|
+
shutdownServer(),
|
|
3476
|
+
timeoutPromise
|
|
3477
|
+
]);
|
|
3478
|
+
serverLogger2.info("Graceful shutdown completed successfully");
|
|
3479
|
+
process.exit(0);
|
|
3480
|
+
} catch (error) {
|
|
3481
|
+
const err = error;
|
|
3482
|
+
if (err.message && err.message.includes("timeout")) {
|
|
3483
|
+
serverLogger2.error("Graceful shutdown timeout, forcing exit", err);
|
|
3484
|
+
} else {
|
|
3485
|
+
serverLogger2.error("Error during graceful shutdown", err);
|
|
3486
|
+
}
|
|
3487
|
+
process.exit(1);
|
|
3173
3488
|
}
|
|
3174
|
-
|
|
3489
|
+
};
|
|
3490
|
+
}
|
|
3491
|
+
function registerShutdownHandlers(shutdown) {
|
|
3492
|
+
process.on("SIGTERM", () => shutdown("SIGTERM"));
|
|
3493
|
+
process.on("SIGINT", () => shutdown("SIGINT"));
|
|
3494
|
+
process.on("uncaughtException", (error) => {
|
|
3495
|
+
serverLogger2.error("Uncaught exception", error);
|
|
3496
|
+
shutdown("UNCAUGHT_EXCEPTION");
|
|
3497
|
+
});
|
|
3498
|
+
process.on("unhandledRejection", (reason, promise) => {
|
|
3499
|
+
serverLogger2.error("Unhandled promise rejection", {
|
|
3500
|
+
reason,
|
|
3501
|
+
promise
|
|
3502
|
+
});
|
|
3503
|
+
shutdown("UNHANDLED_REJECTION");
|
|
3504
|
+
});
|
|
3505
|
+
}
|
|
3506
|
+
async function cleanupOnFailure() {
|
|
3507
|
+
try {
|
|
3508
|
+
serverLogger2.debug("Cleaning up after initialization failure...");
|
|
3509
|
+
await closeDatabase();
|
|
3510
|
+
await closeRedis();
|
|
3511
|
+
serverLogger2.debug("Cleanup completed");
|
|
3512
|
+
} catch (cleanupError) {
|
|
3513
|
+
serverLogger2.error("Cleanup failed", cleanupError);
|
|
3175
3514
|
}
|
|
3176
3515
|
}
|
|
3177
3516
|
|