nodecore-kit 0.3.0 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +530 -42
- package/dist/index.cjs +1534 -206
- package/dist/index.d.ts +919 -36
- package/dist/index.js +1510 -207
- package/package.json +9 -2
package/dist/index.cjs
CHANGED
|
@@ -34,58 +34,241 @@ __export(src_exports, {
|
|
|
34
34
|
AuthenticationError: () => AuthenticationError,
|
|
35
35
|
AuthorizationError: () => AuthorizationError,
|
|
36
36
|
BadRequestError: () => BadRequestError,
|
|
37
|
+
Cron: () => Cron,
|
|
37
38
|
ExistingError: () => ExistingError,
|
|
38
39
|
HTTP_STATUS: () => HTTP_STATUS,
|
|
39
40
|
HTTP_STATUS_CODE_ERROR: () => HTTP_STATUS_CODE_ERROR,
|
|
40
41
|
NoContent: () => NoContent,
|
|
41
42
|
NotFoundError: () => NotFoundError,
|
|
42
43
|
Redis: () => Redis,
|
|
44
|
+
S3: () => S3,
|
|
43
45
|
SQS: () => SQS,
|
|
44
46
|
ServerError: () => ServerError,
|
|
45
47
|
TokenExpiredError: () => TokenExpiredError,
|
|
46
48
|
TokenInvalidError: () => TokenInvalidError,
|
|
47
49
|
ValidationError: () => ValidationError,
|
|
48
50
|
WinstonLogger: () => WinstonLogger,
|
|
51
|
+
camelCase: () => camelCase,
|
|
49
52
|
capitalize: () => capitalize,
|
|
53
|
+
countOccurrences: () => countOccurrences,
|
|
54
|
+
debounce: () => debounce,
|
|
50
55
|
errorHandler: () => errorHandler,
|
|
51
56
|
expressErrorMiddleware: () => expressErrorMiddleware,
|
|
57
|
+
flattenObject: () => flattenObject,
|
|
52
58
|
formatDate: () => formatDate,
|
|
59
|
+
hashService: () => hashService,
|
|
60
|
+
isArray: () => isArray,
|
|
61
|
+
isBlank: () => isBlank,
|
|
62
|
+
isBoolean: () => isBoolean,
|
|
63
|
+
isDate: () => isDate,
|
|
64
|
+
isEmail: () => isEmail,
|
|
53
65
|
isEmpty: () => isEmpty,
|
|
66
|
+
isInteger: () => isInteger,
|
|
67
|
+
isJSON: () => isJSON,
|
|
68
|
+
isNegative: () => isNegative,
|
|
69
|
+
isNil: () => isNil,
|
|
70
|
+
isNumber: () => isNumber,
|
|
54
71
|
isObject: () => isObject,
|
|
55
|
-
|
|
72
|
+
isPositive: () => isPositive,
|
|
73
|
+
isString: () => isString,
|
|
74
|
+
isURL: () => isURL,
|
|
75
|
+
isUUID: () => isUUID,
|
|
76
|
+
joiMiddleware: () => joiMiddleware,
|
|
77
|
+
joiValidate: () => joiValidate,
|
|
56
78
|
jwtService: () => jwtService,
|
|
79
|
+
kebabCase: () => kebabCase,
|
|
57
80
|
makeRequest: () => makeRequest,
|
|
81
|
+
maskString: () => maskString,
|
|
82
|
+
memoize: () => memoize,
|
|
83
|
+
normalizeWhitespace: () => normalizeWhitespace,
|
|
84
|
+
once: () => once,
|
|
58
85
|
paginate: () => paginate,
|
|
59
86
|
parseJSON: () => parseJSON,
|
|
87
|
+
pascalCase: () => pascalCase,
|
|
88
|
+
retry: () => retry,
|
|
89
|
+
reverse: () => reverse,
|
|
60
90
|
sleep: () => sleep,
|
|
91
|
+
snakeCase: () => snakeCase,
|
|
92
|
+
splitWords: () => splitWords,
|
|
61
93
|
stringifyJSON: () => stringifyJSON,
|
|
94
|
+
throttle: () => throttle,
|
|
95
|
+
timeout: () => timeout,
|
|
96
|
+
toLowerCase: () => toLowerCase,
|
|
97
|
+
toUpperCase: () => toUpperCase,
|
|
98
|
+
truncate: () => truncate,
|
|
99
|
+
unflattenObject: () => unflattenObject,
|
|
62
100
|
uuid: () => uuid
|
|
63
101
|
});
|
|
64
102
|
module.exports = __toCommonJS(src_exports);
|
|
65
103
|
|
|
66
104
|
// src/transport/http.ts
|
|
67
105
|
var import_axios = __toESM(require("axios"), 1);
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
106
|
+
|
|
107
|
+
// src/core/async.ts
|
|
108
|
+
var sleep = (ms) => new Promise((res) => setTimeout(res, ms));
|
|
109
|
+
var retry = async (fn, options = {}) => {
|
|
110
|
+
const { retries = 3, delay = 500, exponential = true, onError } = options;
|
|
111
|
+
const attempt = async (remaining, currentDelay) => {
|
|
112
|
+
try {
|
|
113
|
+
return await fn();
|
|
114
|
+
} catch (err) {
|
|
115
|
+
if (remaining <= 0)
|
|
116
|
+
throw err;
|
|
117
|
+
onError?.(err, retries - remaining + 1);
|
|
118
|
+
await sleep(currentDelay);
|
|
119
|
+
const nextDelay = exponential ? currentDelay * 2 : currentDelay;
|
|
120
|
+
return attempt(remaining - 1, nextDelay);
|
|
121
|
+
}
|
|
122
|
+
};
|
|
123
|
+
return attempt(retries, delay);
|
|
124
|
+
};
|
|
125
|
+
var timeout = (promise, ms) => {
|
|
126
|
+
let timer;
|
|
127
|
+
const race = Promise.race([
|
|
128
|
+
promise,
|
|
129
|
+
new Promise((_, reject) => {
|
|
130
|
+
timer = setTimeout(
|
|
131
|
+
() => reject(new Error(`Operation timed out after ${ms}ms`)),
|
|
132
|
+
ms
|
|
133
|
+
);
|
|
134
|
+
})
|
|
135
|
+
]);
|
|
136
|
+
return race.finally(() => clearTimeout(timer));
|
|
137
|
+
};
|
|
138
|
+
var debounce = (fn, delay) => {
|
|
139
|
+
let timer;
|
|
140
|
+
let lastArgs;
|
|
141
|
+
const debounced = (...args) => {
|
|
142
|
+
lastArgs = args;
|
|
143
|
+
clearTimeout(timer);
|
|
144
|
+
timer = setTimeout(() => {
|
|
145
|
+
timer = void 0;
|
|
146
|
+
fn(...args);
|
|
147
|
+
}, delay);
|
|
148
|
+
return void 0;
|
|
149
|
+
};
|
|
150
|
+
debounced.cancel = () => {
|
|
151
|
+
clearTimeout(timer);
|
|
152
|
+
timer = void 0;
|
|
153
|
+
lastArgs = void 0;
|
|
154
|
+
};
|
|
155
|
+
debounced.flush = (...args) => {
|
|
156
|
+
clearTimeout(timer);
|
|
157
|
+
timer = void 0;
|
|
158
|
+
const callArgs = args.length ? args : lastArgs;
|
|
159
|
+
if (callArgs)
|
|
160
|
+
return fn(...callArgs);
|
|
161
|
+
return void 0;
|
|
162
|
+
};
|
|
163
|
+
return debounced;
|
|
164
|
+
};
|
|
165
|
+
var throttle = (fn, limit, { trailing = false } = {}) => {
|
|
166
|
+
let inThrottle = false;
|
|
167
|
+
let trailingTimer;
|
|
168
|
+
let lastArgs;
|
|
169
|
+
const throttled = (...args) => {
|
|
170
|
+
lastArgs = args;
|
|
171
|
+
if (!inThrottle) {
|
|
172
|
+
const result = fn(...args);
|
|
173
|
+
inThrottle = true;
|
|
174
|
+
setTimeout(() => {
|
|
175
|
+
inThrottle = false;
|
|
176
|
+
if (trailing && lastArgs) {
|
|
177
|
+
fn(...lastArgs);
|
|
178
|
+
lastArgs = void 0;
|
|
179
|
+
}
|
|
180
|
+
}, limit);
|
|
181
|
+
return result;
|
|
182
|
+
}
|
|
183
|
+
return void 0;
|
|
184
|
+
};
|
|
185
|
+
throttled.cancel = () => {
|
|
186
|
+
clearTimeout(trailingTimer);
|
|
187
|
+
inThrottle = false;
|
|
188
|
+
lastArgs = void 0;
|
|
189
|
+
};
|
|
190
|
+
return throttled;
|
|
191
|
+
};
|
|
192
|
+
var memoize = (fn, keyFn) => {
|
|
193
|
+
const cache = /* @__PURE__ */ new Map();
|
|
194
|
+
const memoized = (...args) => {
|
|
195
|
+
const key = keyFn ? keyFn(...args) : JSON.stringify(args);
|
|
196
|
+
if (cache.has(key))
|
|
197
|
+
return cache.get(key);
|
|
198
|
+
const result = fn(...args);
|
|
199
|
+
if (result instanceof Promise) {
|
|
200
|
+
return result.then((val) => {
|
|
201
|
+
cache.set(key, val);
|
|
202
|
+
return val;
|
|
203
|
+
}).catch((err) => {
|
|
204
|
+
cache.delete(key);
|
|
205
|
+
throw err;
|
|
206
|
+
});
|
|
207
|
+
}
|
|
208
|
+
cache.set(key, result);
|
|
209
|
+
return result;
|
|
210
|
+
};
|
|
211
|
+
memoized.cache = cache;
|
|
212
|
+
memoized.clear = () => cache.clear();
|
|
213
|
+
return memoized;
|
|
214
|
+
};
|
|
215
|
+
var once = (fn) => {
|
|
216
|
+
let called = false;
|
|
217
|
+
let result;
|
|
218
|
+
return (...args) => {
|
|
219
|
+
if (!called) {
|
|
220
|
+
called = true;
|
|
221
|
+
result = fn(...args);
|
|
222
|
+
}
|
|
223
|
+
return result;
|
|
224
|
+
};
|
|
225
|
+
};
|
|
226
|
+
|
|
227
|
+
// src/transport/http.ts
|
|
228
|
+
var makeRequest = async (options, _retryCount = 0) => {
|
|
229
|
+
const {
|
|
230
|
+
url,
|
|
231
|
+
method = "GET",
|
|
232
|
+
headers = {},
|
|
233
|
+
token,
|
|
234
|
+
data,
|
|
235
|
+
params,
|
|
236
|
+
timeout: timeout2 = 1e4,
|
|
237
|
+
retries = 0,
|
|
238
|
+
onProgress
|
|
239
|
+
} = options;
|
|
240
|
+
const resolvedHeaders = {
|
|
241
|
+
"X-Requested-With": "XMLHttpRequest",
|
|
242
|
+
...headers,
|
|
243
|
+
...token ? { Authorization: `Bearer ${token}` } : {}
|
|
244
|
+
};
|
|
75
245
|
try {
|
|
76
|
-
|
|
77
|
-
token && (headers["Authorization"] = token);
|
|
78
|
-
const payload = {
|
|
246
|
+
const result = await (0, import_axios.default)({
|
|
79
247
|
method,
|
|
80
248
|
url,
|
|
81
|
-
headers
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
249
|
+
headers: resolvedHeaders,
|
|
250
|
+
data,
|
|
251
|
+
params,
|
|
252
|
+
timeout: timeout2,
|
|
253
|
+
...onProgress && {
|
|
254
|
+
onUploadProgress: onProgress,
|
|
255
|
+
onDownloadProgress: onProgress
|
|
256
|
+
}
|
|
257
|
+
});
|
|
86
258
|
return result.data;
|
|
87
259
|
} catch (err) {
|
|
88
|
-
|
|
260
|
+
const shouldRetry = _retryCount < retries && (!err.response || err.response.status >= 500);
|
|
261
|
+
if (shouldRetry) {
|
|
262
|
+
await sleep(2 ** _retryCount * 300);
|
|
263
|
+
return makeRequest(options, _retryCount + 1);
|
|
264
|
+
}
|
|
265
|
+
const error = {
|
|
266
|
+
isHttpError: true,
|
|
267
|
+
message: err.response?.data?.message ?? err.message ?? "Request failed",
|
|
268
|
+
httpStatusCode: err.response?.status ?? null,
|
|
269
|
+
data: err.response?.data ?? null
|
|
270
|
+
};
|
|
271
|
+
throw error;
|
|
89
272
|
}
|
|
90
273
|
};
|
|
91
274
|
|
|
@@ -129,22 +312,12 @@ var AppError = class extends Error {
|
|
|
129
312
|
};
|
|
130
313
|
var ValidationError = class extends AppError {
|
|
131
314
|
constructor(message, meta) {
|
|
132
|
-
super(
|
|
133
|
-
HTTP_STATUS.UNPROCESSABLE_ENTITY,
|
|
134
|
-
message,
|
|
135
|
-
"VALIDATION_ERROR",
|
|
136
|
-
meta
|
|
137
|
-
);
|
|
315
|
+
super(HTTP_STATUS.UNPROCESSABLE_ENTITY, message, "VALIDATION_ERROR", meta);
|
|
138
316
|
}
|
|
139
317
|
};
|
|
140
318
|
var AuthenticationError = class extends AppError {
|
|
141
319
|
constructor(message, meta) {
|
|
142
|
-
super(
|
|
143
|
-
HTTP_STATUS.UNAUTHORIZED,
|
|
144
|
-
message,
|
|
145
|
-
"AUTHENTICATION_ERROR",
|
|
146
|
-
meta
|
|
147
|
-
);
|
|
320
|
+
super(HTTP_STATUS.UNAUTHORIZED, message, "AUTHENTICATION_ERROR", meta);
|
|
148
321
|
}
|
|
149
322
|
};
|
|
150
323
|
var AuthorizationError = class extends AppError {
|
|
@@ -265,20 +438,20 @@ var stringifyJSON = (value) => {
|
|
|
265
438
|
return value;
|
|
266
439
|
}
|
|
267
440
|
};
|
|
268
|
-
var isObject = (val) => val && typeof val === "object" && !Array.isArray(val);
|
|
269
|
-
var sleep = (ms) => new Promise((res) => setTimeout(res, ms));
|
|
270
|
-
var capitalize = (str) => str.charAt(0).toUpperCase() + str.slice(1);
|
|
271
|
-
var isEmpty = (val) => val === null || val === void 0 || typeof val === "object" && Object.keys(val).length === 0 || typeof val === "string" && val.trim() === "";
|
|
272
441
|
|
|
273
442
|
// src/core/uuid.ts
|
|
274
443
|
var import_uuid = require("uuid");
|
|
275
444
|
var uuid = {
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
445
|
+
/**
|
|
446
|
+
* Converts a UUID string to its optimized binary representation (Buffer).
|
|
447
|
+
* Reorders bytes for better index performance in databases like MySQL.
|
|
448
|
+
* If no UUID is provided, generates a new v1 UUID.
|
|
449
|
+
*/
|
|
450
|
+
toBinary: (value) => {
|
|
451
|
+
if (Buffer.isBuffer(value))
|
|
452
|
+
return value;
|
|
453
|
+
const id = value ?? (0, import_uuid.v1)();
|
|
454
|
+
const buf = Buffer.from(id.replace(/-/g, ""), "hex");
|
|
282
455
|
return Buffer.concat([
|
|
283
456
|
buf.subarray(6, 8),
|
|
284
457
|
buf.subarray(4, 6),
|
|
@@ -286,9 +459,12 @@ var uuid = {
|
|
|
286
459
|
buf.subarray(8, 16)
|
|
287
460
|
]);
|
|
288
461
|
},
|
|
462
|
+
/**
|
|
463
|
+
* Converts a binary UUID Buffer back to its string representation.
|
|
464
|
+
*/
|
|
289
465
|
toString: (binary) => {
|
|
290
466
|
if (!binary)
|
|
291
|
-
throw new Error("
|
|
467
|
+
throw new Error("A binary UUID value is required");
|
|
292
468
|
if (typeof binary === "string")
|
|
293
469
|
return binary;
|
|
294
470
|
return [
|
|
@@ -299,90 +475,287 @@ var uuid = {
|
|
|
299
475
|
binary.toString("hex", 10, 16)
|
|
300
476
|
].join("-");
|
|
301
477
|
},
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
478
|
+
/**
|
|
479
|
+
* Generates a new UUID string.
|
|
480
|
+
* Defaults to v4 (random). Pass "v1" for time-based UUIDs.
|
|
481
|
+
*
|
|
482
|
+
* @example
|
|
483
|
+
* uuid.get() // v4 UUID
|
|
484
|
+
* uuid.get("v1") // v1 UUID
|
|
485
|
+
*/
|
|
486
|
+
get: (version = "v4") => {
|
|
487
|
+
return version === "v1" ? (0, import_uuid.v1)() : (0, import_uuid.v4)();
|
|
308
488
|
},
|
|
309
|
-
|
|
489
|
+
/**
|
|
490
|
+
* Returns true if the given string is a valid UUID.
|
|
491
|
+
*/
|
|
492
|
+
isValid: (value) => (0, import_uuid.validate)(value),
|
|
493
|
+
/** The nil UUID — all zeros. Useful as a default/placeholder. */
|
|
494
|
+
nil: "00000000-0000-0000-0000-000000000000",
|
|
495
|
+
/**
|
|
496
|
+
* Converts specified keys of an object from binary UUIDs to strings.
|
|
497
|
+
* Returns a shallow copy — does NOT mutate the original.
|
|
498
|
+
*
|
|
499
|
+
* @example
|
|
500
|
+
* uuid.manyToString({ id: <Buffer>, name: "foo" }, ["id"])
|
|
501
|
+
* // { id: "xxxxxxxx-...", name: "foo" }
|
|
502
|
+
*/
|
|
310
503
|
manyToString: (data, keys = []) => {
|
|
311
504
|
if (!data)
|
|
312
|
-
return;
|
|
505
|
+
return data;
|
|
506
|
+
const result = { ...data };
|
|
313
507
|
keys.forEach((key) => {
|
|
314
|
-
if (
|
|
315
|
-
|
|
508
|
+
if (result[key] != null)
|
|
509
|
+
result[key] = uuid.toString(result[key]);
|
|
316
510
|
});
|
|
317
|
-
return
|
|
511
|
+
return result;
|
|
318
512
|
},
|
|
513
|
+
/**
|
|
514
|
+
* Converts specified keys of an object from UUID strings to binary Buffers.
|
|
515
|
+
* Returns a shallow copy — does NOT mutate the original.
|
|
516
|
+
*
|
|
517
|
+
* @example
|
|
518
|
+
* uuid.manyToBinary({ id: "xxxxxxxx-...", name: "foo" }, ["id"])
|
|
519
|
+
* // { id: <Buffer>, name: "foo" }
|
|
520
|
+
*/
|
|
319
521
|
manyToBinary: (data, keys = []) => {
|
|
320
522
|
if (!data)
|
|
321
|
-
return;
|
|
523
|
+
return data;
|
|
524
|
+
const result = { ...data };
|
|
322
525
|
keys.forEach((key) => {
|
|
323
|
-
if (
|
|
324
|
-
|
|
526
|
+
if (result[key] != null)
|
|
527
|
+
result[key] = uuid.toBinary(result[key]);
|
|
325
528
|
});
|
|
326
|
-
return
|
|
529
|
+
return result;
|
|
530
|
+
}
|
|
531
|
+
};
|
|
532
|
+
|
|
533
|
+
// src/core/object.ts
|
|
534
|
+
var flattenObject = (obj, { separator = ".", prefix = "" } = {}) => {
|
|
535
|
+
if (!obj || typeof obj !== "object")
|
|
536
|
+
return {};
|
|
537
|
+
const res = {};
|
|
538
|
+
const isPlainObject = (val) => val !== null && typeof val === "object" && !Array.isArray(val) && !(val instanceof Date) && !(val instanceof RegExp);
|
|
539
|
+
for (const key in obj) {
|
|
540
|
+
if (!Object.prototype.hasOwnProperty.call(obj, key))
|
|
541
|
+
continue;
|
|
542
|
+
const newKey = prefix ? `${prefix}${separator}${key}` : key;
|
|
543
|
+
if (isPlainObject(obj[key])) {
|
|
544
|
+
Object.assign(res, flattenObject(obj[key], { separator, prefix: newKey }));
|
|
545
|
+
} else {
|
|
546
|
+
res[newKey] = obj[key];
|
|
547
|
+
}
|
|
548
|
+
}
|
|
549
|
+
return res;
|
|
550
|
+
};
|
|
551
|
+
var unflattenObject = (obj, separator = ".") => {
|
|
552
|
+
if (!obj || typeof obj !== "object")
|
|
553
|
+
return {};
|
|
554
|
+
const result = {};
|
|
555
|
+
for (const key in obj) {
|
|
556
|
+
if (!Object.prototype.hasOwnProperty.call(obj, key))
|
|
557
|
+
continue;
|
|
558
|
+
const keys = key.split(separator);
|
|
559
|
+
keys.reduce((acc, part, index) => {
|
|
560
|
+
if (index === keys.length - 1) {
|
|
561
|
+
acc[part] = obj[key];
|
|
562
|
+
return acc;
|
|
563
|
+
}
|
|
564
|
+
acc[part] = acc[part] && typeof acc[part] === "object" ? acc[part] : {};
|
|
565
|
+
return acc[part];
|
|
566
|
+
}, result);
|
|
567
|
+
}
|
|
568
|
+
return result;
|
|
569
|
+
};
|
|
570
|
+
|
|
571
|
+
// src/core/string.ts
|
|
572
|
+
var splitWords = (str) => str.replace(/\W+/g, " ").split(/ |\B(?=[A-Z])/).map((w) => w.toLowerCase()).filter(Boolean);
|
|
573
|
+
var capitalize = (str) => {
|
|
574
|
+
if (!str)
|
|
575
|
+
return "";
|
|
576
|
+
return str.charAt(0).toUpperCase() + str.slice(1);
|
|
577
|
+
};
|
|
578
|
+
var toUpperCase = (str) => str.toUpperCase();
|
|
579
|
+
var toLowerCase = (str) => str.toLowerCase();
|
|
580
|
+
var camelCase = (str) => {
|
|
581
|
+
if (!str)
|
|
582
|
+
return "";
|
|
583
|
+
return str.trim().toLowerCase().replace(/[-_\s]+(.)?/g, (_, c) => c ? c.toUpperCase() : "");
|
|
584
|
+
};
|
|
585
|
+
var pascalCase = (str) => {
|
|
586
|
+
if (!str)
|
|
587
|
+
return "";
|
|
588
|
+
return camelCase(str).replace(/^(.)/, (c) => c.toUpperCase());
|
|
589
|
+
};
|
|
590
|
+
var snakeCase = (str) => {
|
|
591
|
+
if (!str)
|
|
592
|
+
return "";
|
|
593
|
+
return splitWords(str).join("_");
|
|
594
|
+
};
|
|
595
|
+
var kebabCase = (str) => {
|
|
596
|
+
if (!str)
|
|
597
|
+
return "";
|
|
598
|
+
return splitWords(str).join("-");
|
|
599
|
+
};
|
|
600
|
+
var truncate = (str, length = 50, suffix = "...") => {
|
|
601
|
+
if (!str)
|
|
602
|
+
return "";
|
|
603
|
+
if (str.length <= length)
|
|
604
|
+
return str;
|
|
605
|
+
return str.slice(0, length - suffix.length).trimEnd() + suffix;
|
|
606
|
+
};
|
|
607
|
+
var maskString = (str, visible = 4) => {
|
|
608
|
+
if (!str)
|
|
609
|
+
return "";
|
|
610
|
+
const visibleCount = Math.min(visible, str.length);
|
|
611
|
+
const maskedLength = str.length - visibleCount;
|
|
612
|
+
return "*".repeat(maskedLength) + str.slice(maskedLength);
|
|
613
|
+
};
|
|
614
|
+
var isBlank = (str) => !str || str.trim().length === 0;
|
|
615
|
+
var reverse = (str) => {
|
|
616
|
+
if (!str)
|
|
617
|
+
return "";
|
|
618
|
+
return str.split("").reverse().join("");
|
|
619
|
+
};
|
|
620
|
+
var countOccurrences = (str, substr) => {
|
|
621
|
+
if (!str || !substr)
|
|
622
|
+
return 0;
|
|
623
|
+
return str.split(substr).length - 1;
|
|
624
|
+
};
|
|
625
|
+
var normalizeWhitespace = (str) => {
|
|
626
|
+
if (!str)
|
|
627
|
+
return "";
|
|
628
|
+
return str.trim().replace(/\s+/g, " ");
|
|
629
|
+
};
|
|
630
|
+
|
|
631
|
+
// src/core/validation.ts
|
|
632
|
+
var isObject = (val) => val !== null && typeof val === "object" && !Array.isArray(val);
|
|
633
|
+
var isEmail = (value) => /^[^\s@]+@[^\s@]+\.[^\s@]+$/.test(value);
|
|
634
|
+
var isUUID = (value) => /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test(value);
|
|
635
|
+
var isNumber = (value) => typeof value === "number" && isFinite(value);
|
|
636
|
+
var isJSON = (value) => {
|
|
637
|
+
if (!value || typeof value !== "string")
|
|
638
|
+
return false;
|
|
639
|
+
try {
|
|
640
|
+
JSON.parse(value);
|
|
641
|
+
return true;
|
|
642
|
+
} catch {
|
|
643
|
+
return false;
|
|
644
|
+
}
|
|
645
|
+
};
|
|
646
|
+
var isDate = (value) => value instanceof Date && !isNaN(value.getTime());
|
|
647
|
+
var isURL = (value) => {
|
|
648
|
+
try {
|
|
649
|
+
const url = new URL(value);
|
|
650
|
+
return url.protocol === "http:" || url.protocol === "https:";
|
|
651
|
+
} catch {
|
|
652
|
+
return false;
|
|
327
653
|
}
|
|
328
654
|
};
|
|
655
|
+
var isBoolean = (value) => typeof value === "boolean";
|
|
656
|
+
var isString = (value) => typeof value === "string";
|
|
657
|
+
var isArray = (value) => Array.isArray(value);
|
|
658
|
+
var isInteger = (value) => typeof value === "number" && Number.isInteger(value);
|
|
659
|
+
var isPositive = (value) => isNumber(value) && value > 0;
|
|
660
|
+
var isNegative = (value) => isNumber(value) && value < 0;
|
|
661
|
+
var isNil = (value) => value === null || value === void 0;
|
|
662
|
+
var isEmpty = (val) => {
|
|
663
|
+
if (isNil(val))
|
|
664
|
+
return true;
|
|
665
|
+
if (typeof val === "string")
|
|
666
|
+
return val.trim().length === 0;
|
|
667
|
+
if (Array.isArray(val))
|
|
668
|
+
return val.length === 0;
|
|
669
|
+
if (isObject(val))
|
|
670
|
+
return Object.keys(val).length === 0;
|
|
671
|
+
return false;
|
|
672
|
+
};
|
|
329
673
|
|
|
330
674
|
// src/transport/express/joiValidator.ts
|
|
331
|
-
var
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
675
|
+
var DEFAULT_OPTIONS = {
|
|
676
|
+
abortEarly: false,
|
|
677
|
+
allowUnknown: false,
|
|
678
|
+
stripUnknown: true
|
|
679
|
+
};
|
|
680
|
+
var validateField = (schema, data, options = DEFAULT_OPTIONS) => {
|
|
681
|
+
const mergedOptions = { ...DEFAULT_OPTIONS, ...options };
|
|
682
|
+
const { error, value } = schema.validate(data, mergedOptions);
|
|
683
|
+
if (error) {
|
|
684
|
+
const message = error.details.map((d) => d.message).join("; ");
|
|
685
|
+
throw new ValidationError(message);
|
|
335
686
|
}
|
|
336
|
-
return
|
|
687
|
+
return value;
|
|
337
688
|
};
|
|
338
|
-
|
|
339
|
-
if (!
|
|
340
|
-
throw new ValidationError(
|
|
341
|
-
"Kindly supply validation schema to joiValidator"
|
|
342
|
-
);
|
|
343
|
-
if (!isMiddleware) {
|
|
344
|
-
return validate(constraint.schema, constraint.data, constraint.option);
|
|
689
|
+
var joiMiddleware = (constraints) => {
|
|
690
|
+
if (!constraints || !Object.keys(constraints).length) {
|
|
691
|
+
throw new ValidationError("joiMiddleware requires at least one constraint");
|
|
345
692
|
}
|
|
346
693
|
return async (req, res, next) => {
|
|
347
694
|
try {
|
|
348
|
-
if (
|
|
349
|
-
req.body =
|
|
350
|
-
|
|
695
|
+
if (constraints.body) {
|
|
696
|
+
req.body = validateField(
|
|
697
|
+
constraints.body.schema,
|
|
351
698
|
req.body,
|
|
352
|
-
|
|
699
|
+
constraints.body.options
|
|
353
700
|
);
|
|
354
701
|
}
|
|
355
|
-
if (
|
|
356
|
-
req.params =
|
|
357
|
-
|
|
702
|
+
if (constraints.params) {
|
|
703
|
+
req.params = validateField(
|
|
704
|
+
constraints.params.schema,
|
|
358
705
|
req.params,
|
|
359
|
-
|
|
706
|
+
constraints.params.options
|
|
360
707
|
);
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
708
|
+
}
|
|
709
|
+
if (constraints.query) {
|
|
710
|
+
req.query = validateField(
|
|
711
|
+
constraints.query.schema,
|
|
364
712
|
req.query,
|
|
365
|
-
|
|
713
|
+
constraints.query.options
|
|
366
714
|
);
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
715
|
+
}
|
|
716
|
+
if (constraints.headers) {
|
|
717
|
+
req.headers = validateField(
|
|
718
|
+
constraints.headers.schema,
|
|
370
719
|
req.headers,
|
|
371
|
-
|
|
720
|
+
constraints.headers.options
|
|
372
721
|
);
|
|
373
|
-
|
|
722
|
+
}
|
|
723
|
+
if (constraints.files) {
|
|
724
|
+
req.files = validateField(
|
|
725
|
+
constraints.files.schema,
|
|
726
|
+
req.files,
|
|
727
|
+
constraints.files.options
|
|
728
|
+
);
|
|
729
|
+
}
|
|
730
|
+
next();
|
|
374
731
|
} catch (err) {
|
|
375
732
|
next(err);
|
|
376
733
|
}
|
|
377
734
|
};
|
|
378
|
-
}
|
|
735
|
+
};
|
|
736
|
+
var joiValidate = ({
|
|
737
|
+
schema,
|
|
738
|
+
data,
|
|
739
|
+
options
|
|
740
|
+
}) => {
|
|
741
|
+
if (!schema)
|
|
742
|
+
throw new ValidationError("joiValidate requires a schema");
|
|
743
|
+
return validateField(schema, data, options);
|
|
744
|
+
};
|
|
379
745
|
|
|
380
746
|
// src/adapters/redis.ts
|
|
381
747
|
var import_ioredis = __toESM(require("ioredis"), 1);
|
|
748
|
+
var defaultLogger = {
|
|
749
|
+
info: (msg, meta) => console.info(msg, meta),
|
|
750
|
+
error: (msg, meta) => console.error(msg, meta),
|
|
751
|
+
warn: (msg, meta) => console.warn(msg, meta),
|
|
752
|
+
debug: (msg, meta) => console.debug(msg, meta)
|
|
753
|
+
};
|
|
382
754
|
var Redis = class {
|
|
383
|
-
constructor(url, options = {}) {
|
|
755
|
+
constructor(url, options = {}, logger) {
|
|
384
756
|
if (!url)
|
|
385
757
|
throw new ValidationError("Redis connection URL is required");
|
|
758
|
+
this.logger = logger ?? defaultLogger;
|
|
386
759
|
this.client = new import_ioredis.default(url, {
|
|
387
760
|
maxRetriesPerRequest: 3,
|
|
388
761
|
enableReadyCheck: true,
|
|
@@ -391,22 +764,13 @@ var Redis = class {
|
|
|
391
764
|
});
|
|
392
765
|
this.registerListeners();
|
|
393
766
|
}
|
|
767
|
+
// ─── Lifecycle ────────────────────────────────────────────────────────────
|
|
394
768
|
registerListeners() {
|
|
395
|
-
this.client.on("connect", () =>
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
this.client.on("
|
|
399
|
-
|
|
400
|
-
});
|
|
401
|
-
this.client.on("error", (err) => {
|
|
402
|
-
console.error("\u{1F534} Redis error:", err);
|
|
403
|
-
});
|
|
404
|
-
this.client.on("close", () => {
|
|
405
|
-
console.warn("\u{1F7E0} Redis connection closed");
|
|
406
|
-
});
|
|
407
|
-
this.client.on("reconnecting", () => {
|
|
408
|
-
console.warn("\u{1F7E1} Redis reconnecting...");
|
|
409
|
-
});
|
|
769
|
+
this.client.on("connect", () => this.logger.info("Redis connected"));
|
|
770
|
+
this.client.on("ready", () => this.logger.info("Redis ready"));
|
|
771
|
+
this.client.on("close", () => this.logger.warn("Redis connection closed"));
|
|
772
|
+
this.client.on("reconnecting", () => this.logger.warn("Redis reconnecting..."));
|
|
773
|
+
this.client.on("error", (err) => this.logger.error("Redis error", { err }));
|
|
410
774
|
}
|
|
411
775
|
async start() {
|
|
412
776
|
try {
|
|
@@ -419,19 +783,22 @@ var Redis = class {
|
|
|
419
783
|
}
|
|
420
784
|
async disconnect() {
|
|
421
785
|
try {
|
|
422
|
-
if (this.client.status !== "end")
|
|
786
|
+
if (this.client.status !== "end")
|
|
423
787
|
await this.client.quit();
|
|
424
|
-
}
|
|
425
788
|
} catch {
|
|
426
789
|
await this.client.disconnect();
|
|
427
790
|
}
|
|
428
791
|
}
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
792
|
+
// ─── Key Helpers ──────────────────────────────────────────────────────────
|
|
793
|
+
validateKey(key) {
|
|
794
|
+
if (!key || typeof key !== "string") {
|
|
795
|
+
throw new ValidationError("Redis key must be a non-empty string");
|
|
432
796
|
}
|
|
433
|
-
return this.client.keys(pattern);
|
|
434
797
|
}
|
|
798
|
+
buildKey(...parts) {
|
|
799
|
+
return parts.join(":");
|
|
800
|
+
}
|
|
801
|
+
// ─── Serialization ────────────────────────────────────────────────────────
|
|
435
802
|
serialize(data) {
|
|
436
803
|
if (typeof data === "string")
|
|
437
804
|
return data;
|
|
@@ -444,38 +811,25 @@ var Redis = class {
|
|
|
444
811
|
return data;
|
|
445
812
|
return parseJSON(data);
|
|
446
813
|
}
|
|
814
|
+
// ─── Core Operations ─────────────────────────────────────────────────────
|
|
447
815
|
async set(key, data) {
|
|
448
|
-
|
|
449
|
-
throw new ValidationError("Redis key must be a string");
|
|
450
|
-
}
|
|
816
|
+
this.validateKey(key);
|
|
451
817
|
return this.client.set(key, this.serialize(data));
|
|
452
818
|
}
|
|
453
819
|
async setEx(key, data, duration) {
|
|
454
|
-
|
|
455
|
-
throw new ValidationError("Redis key must be a string");
|
|
456
|
-
}
|
|
820
|
+
this.validateKey(key);
|
|
457
821
|
const ttl = this.parseDuration(duration);
|
|
458
822
|
return this.client.setex(key, ttl, this.serialize(data));
|
|
459
823
|
}
|
|
460
824
|
async get(key, parse = true) {
|
|
461
|
-
|
|
462
|
-
throw new ValidationError("Redis key must be a string");
|
|
463
|
-
}
|
|
825
|
+
this.validateKey(key);
|
|
464
826
|
const data = await this.client.get(key);
|
|
465
827
|
return this.deserialize(data, parse);
|
|
466
828
|
}
|
|
467
829
|
async delete(key) {
|
|
468
|
-
|
|
469
|
-
throw new ValidationError("Redis key must be a string");
|
|
470
|
-
}
|
|
830
|
+
this.validateKey(key);
|
|
471
831
|
return Boolean(await this.client.del(key));
|
|
472
832
|
}
|
|
473
|
-
async deleteAll(prefix) {
|
|
474
|
-
const keys = await this.keys(prefix);
|
|
475
|
-
if (!keys.length)
|
|
476
|
-
return 0;
|
|
477
|
-
return this.client.del(...keys);
|
|
478
|
-
}
|
|
479
833
|
async exists(key) {
|
|
480
834
|
return Boolean(await this.client.exists(key));
|
|
481
835
|
}
|
|
@@ -486,54 +840,199 @@ var Redis = class {
|
|
|
486
840
|
const ttl = this.parseDuration(duration);
|
|
487
841
|
return Boolean(await this.client.expire(key, ttl));
|
|
488
842
|
}
|
|
489
|
-
|
|
843
|
+
// ─── Increment / Decrement ────────────────────────────────────────────────
|
|
844
|
+
/**
|
|
845
|
+
* Atomically increments a counter. Creates it at 1 if it doesn't exist.
|
|
846
|
+
* Optionally sets a TTL on first creation.
|
|
847
|
+
*
|
|
848
|
+
* @example
|
|
849
|
+
* await redis.increment("rate:user:123"); // 1, 2, 3...
|
|
850
|
+
* await redis.increment("rate:user:123", "1 hour"); // resets TTL each time
|
|
851
|
+
*/
|
|
852
|
+
async increment(key, ttl) {
|
|
853
|
+
this.validateKey(key);
|
|
854
|
+
const value = await this.client.incr(key);
|
|
855
|
+
if (ttl && value === 1)
|
|
856
|
+
await this.expire(key, ttl);
|
|
857
|
+
return value;
|
|
858
|
+
}
|
|
859
|
+
/**
|
|
860
|
+
* Atomically decrements a counter.
|
|
861
|
+
*/
|
|
862
|
+
async decrement(key) {
|
|
863
|
+
this.validateKey(key);
|
|
864
|
+
return this.client.decr(key);
|
|
865
|
+
}
|
|
866
|
+
// ─── Hash Operations ──────────────────────────────────────────────────────
|
|
867
|
+
/**
|
|
868
|
+
* Sets one or more fields on a Redis hash.
|
|
869
|
+
*
|
|
870
|
+
* @example
|
|
871
|
+
* await redis.hset("user:1", { name: "Alice", role: "admin" });
|
|
872
|
+
*/
|
|
873
|
+
async hset(key, data) {
|
|
874
|
+
this.validateKey(key);
|
|
875
|
+
const serialized = Object.fromEntries(
|
|
876
|
+
Object.entries(data).map(([k, v]) => [k, this.serialize(v)])
|
|
877
|
+
);
|
|
878
|
+
return this.client.hset(key, serialized);
|
|
879
|
+
}
|
|
880
|
+
/**
|
|
881
|
+
* Gets a single field from a Redis hash.
|
|
882
|
+
*/
|
|
883
|
+
async hget(key, field) {
|
|
884
|
+
this.validateKey(key);
|
|
885
|
+
const data = await this.client.hget(key, field);
|
|
886
|
+
return this.deserialize(data);
|
|
887
|
+
}
|
|
888
|
+
/**
|
|
889
|
+
* Gets all fields from a Redis hash as a typed object.
|
|
890
|
+
*/
|
|
891
|
+
async hgetAll(key) {
|
|
892
|
+
this.validateKey(key);
|
|
893
|
+
const data = await this.client.hgetall(key);
|
|
894
|
+
if (!data || Object.keys(data).length === 0)
|
|
895
|
+
return null;
|
|
896
|
+
return Object.fromEntries(
|
|
897
|
+
Object.entries(data).map(([k, v]) => [k, this.deserialize(v)])
|
|
898
|
+
);
|
|
899
|
+
}
|
|
900
|
+
/**
|
|
901
|
+
* Deletes one or more fields from a Redis hash.
|
|
902
|
+
*/
|
|
903
|
+
async hdel(key, ...fields) {
|
|
904
|
+
this.validateKey(key);
|
|
905
|
+
return this.client.hdel(key, ...fields);
|
|
906
|
+
}
|
|
907
|
+
// ─── Scan-based Key Operations ────────────────────────────────────────────
|
|
908
|
+
/**
|
|
909
|
+
* Safely scans for keys matching a pattern using SCAN (non-blocking).
|
|
910
|
+
* Prefer this over KEYS in production — KEYS blocks the event loop.
|
|
911
|
+
*
|
|
912
|
+
* @example
|
|
913
|
+
* await redis.scan("user:*") // ["user:1", "user:2", ...]
|
|
914
|
+
*/
|
|
915
|
+
async scan(pattern) {
|
|
916
|
+
if (!pattern || typeof pattern !== "string") {
|
|
917
|
+
throw new ValidationError("Redis scan pattern must be a string");
|
|
918
|
+
}
|
|
919
|
+
const keys = [];
|
|
920
|
+
let cursor = "0";
|
|
921
|
+
do {
|
|
922
|
+
const [nextCursor, batch] = await this.client.scan(
|
|
923
|
+
cursor,
|
|
924
|
+
"MATCH",
|
|
925
|
+
pattern,
|
|
926
|
+
"COUNT",
|
|
927
|
+
100
|
|
928
|
+
);
|
|
929
|
+
cursor = nextCursor;
|
|
930
|
+
keys.push(...batch);
|
|
931
|
+
} while (cursor !== "0");
|
|
932
|
+
return keys;
|
|
933
|
+
}
|
|
934
|
+
/**
|
|
935
|
+
* Deletes all keys matching a pattern using SCAN + batched DEL.
|
|
936
|
+
* Safe for large keyspaces.
|
|
937
|
+
*
|
|
938
|
+
* @example
|
|
939
|
+
* await redis.deleteByPattern("session:*") // clears all sessions
|
|
940
|
+
*/
|
|
941
|
+
async deleteByPattern(pattern) {
|
|
942
|
+
const keys = await this.scan(pattern);
|
|
943
|
+
if (!keys.length)
|
|
944
|
+
return 0;
|
|
945
|
+
const batchSize = 100;
|
|
946
|
+
let deleted = 0;
|
|
947
|
+
for (let i = 0; i < keys.length; i += batchSize) {
|
|
948
|
+
const batch = keys.slice(i, i + batchSize);
|
|
949
|
+
deleted += await this.client.del(...batch);
|
|
950
|
+
}
|
|
951
|
+
return deleted;
|
|
952
|
+
}
|
|
953
|
+
/**
|
|
954
|
+
* @deprecated Use `scan()` instead — KEYS blocks the Redis event loop.
|
|
955
|
+
*/
|
|
956
|
+
async keys(pattern) {
|
|
957
|
+
if (!pattern || typeof pattern !== "string") {
|
|
958
|
+
throw new ValidationError("Redis key pattern must be a string");
|
|
959
|
+
}
|
|
960
|
+
this.logger.warn("redis.keys() uses KEYS command \u2014 use redis.scan() in production");
|
|
961
|
+
return this.client.keys(pattern);
|
|
962
|
+
}
|
|
963
|
+
/**
|
|
964
|
+
* @deprecated Use `deleteByPattern()` instead.
|
|
965
|
+
*/
|
|
966
|
+
async deleteAll(prefix) {
|
|
967
|
+
this.logger.warn("redis.deleteAll() uses KEYS \u2014 use redis.deleteByPattern() in production");
|
|
968
|
+
const keys = await this.keys(prefix);
|
|
969
|
+
if (!keys.length)
|
|
970
|
+
return 0;
|
|
971
|
+
return this.client.del(...keys);
|
|
972
|
+
}
|
|
973
|
+
/**
|
|
974
|
+
* Flushes the current database. Intended for testing only.
|
|
975
|
+
* Throws in production unless `force: true` is passed.
|
|
976
|
+
*/
|
|
977
|
+
async flush(force = false) {
|
|
978
|
+
if (process.env.NODE_ENV === "production" && !force) {
|
|
979
|
+
throw new ServerError("redis.flush() is disabled in production. Pass force=true to override.");
|
|
980
|
+
}
|
|
490
981
|
await this.client.flushdb();
|
|
982
|
+
this.logger.warn("Redis database flushed", { env: process.env.NODE_ENV });
|
|
983
|
+
}
|
|
984
|
+
// ─── Auth Cache Helpers ───────────────────────────────────────────────────
|
|
985
|
+
authKey(id) {
|
|
986
|
+
return this.buildKey("auth", id, "token");
|
|
987
|
+
}
|
|
988
|
+
tokenKey(ref) {
|
|
989
|
+
return this.buildKey("auth", "token", ref);
|
|
491
990
|
}
|
|
492
|
-
// ───────────────────────────────
|
|
493
|
-
// Auth Cache Helpers
|
|
494
|
-
// ───────────────────────────────
|
|
495
991
|
async getCachedUser(id, throwError = true) {
|
|
496
|
-
const
|
|
497
|
-
const user = await this.get(userToken);
|
|
992
|
+
const user = await this.get(this.authKey(id));
|
|
498
993
|
if (!user && throwError) {
|
|
499
|
-
throw new AuthenticationError("
|
|
994
|
+
throw new AuthenticationError("Session not found, please log in again");
|
|
500
995
|
}
|
|
501
996
|
return user;
|
|
502
997
|
}
|
|
503
998
|
async cacheUser(user, ttl = "1 day") {
|
|
504
999
|
if (!user?.id || !user?.tokenRef) {
|
|
505
|
-
throw new ValidationError("
|
|
1000
|
+
throw new ValidationError("User object must have `id` and `tokenRef` fields");
|
|
506
1001
|
}
|
|
507
1002
|
await Promise.all([
|
|
508
|
-
this.setEx(user.tokenRef, user, ttl),
|
|
509
|
-
this.setEx(
|
|
1003
|
+
this.setEx(this.tokenKey(user.tokenRef), user, ttl),
|
|
1004
|
+
this.setEx(this.authKey(user.id), user, ttl)
|
|
510
1005
|
]);
|
|
511
1006
|
}
|
|
1007
|
+
/**
|
|
1008
|
+
* Atomically updates an array field on a cached user.
|
|
1009
|
+
* Operates on a fresh copy to avoid mutating the cached object before re-save.
|
|
1010
|
+
*/
|
|
512
1011
|
async updateAuthData(userId, key, value, action = "ADD") {
|
|
513
1012
|
const user = await this.getCachedUser(userId, false);
|
|
514
1013
|
if (!user)
|
|
515
1014
|
return null;
|
|
516
1015
|
if (!Array.isArray(user[key]))
|
|
517
1016
|
return user;
|
|
518
|
-
|
|
519
|
-
user
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
await this.cacheUser(user);
|
|
525
|
-
return user;
|
|
1017
|
+
const updated = {
|
|
1018
|
+
...user,
|
|
1019
|
+
[key]: action === "ADD" ? user[key].includes(value) ? user[key] : [...user[key], value] : user[key].filter((v) => v !== value)
|
|
1020
|
+
};
|
|
1021
|
+
await this.cacheUser(updated);
|
|
1022
|
+
return updated;
|
|
526
1023
|
}
|
|
527
|
-
//
|
|
528
|
-
// Helpers
|
|
529
|
-
// ───────────────────────────────
|
|
1024
|
+
// ─── Duration Parser ──────────────────────────────────────────────────────
|
|
530
1025
|
parseDuration(duration) {
|
|
531
1026
|
if (typeof duration === "number")
|
|
532
1027
|
return duration;
|
|
533
|
-
const
|
|
1028
|
+
const parts = duration.trim().split(/\s+/);
|
|
1029
|
+
if (parts.length !== 2) {
|
|
1030
|
+
throw new ValidationError(`Invalid duration format: "${duration}". Expected e.g. "1 hour"`);
|
|
1031
|
+
}
|
|
1032
|
+
const [valueStr, unit] = parts;
|
|
534
1033
|
const value = Number(valueStr);
|
|
535
|
-
if (Number.isNaN(value)) {
|
|
536
|
-
throw new ValidationError(`
|
|
1034
|
+
if (Number.isNaN(value) || value <= 0) {
|
|
1035
|
+
throw new ValidationError(`Duration value must be a positive number, got: "${valueStr}"`);
|
|
537
1036
|
}
|
|
538
1037
|
switch (unit) {
|
|
539
1038
|
case "days":
|
|
@@ -549,34 +1048,58 @@ var Redis = class {
|
|
|
549
1048
|
case "second":
|
|
550
1049
|
return value;
|
|
551
1050
|
default:
|
|
552
|
-
throw new ValidationError(`Invalid duration unit: ${unit}`);
|
|
1051
|
+
throw new ValidationError(`Invalid duration unit: "${unit}". Use seconds, minutes, hours, or days`);
|
|
553
1052
|
}
|
|
554
1053
|
}
|
|
555
1054
|
};
|
|
556
1055
|
|
|
557
1056
|
// src/adapters/sqs.ts
|
|
558
|
-
var
|
|
1057
|
+
var import_client_sqs = require("@aws-sdk/client-sqs");
|
|
1058
|
+
var defaultLogger2 = {
|
|
1059
|
+
info: (msg, meta) => console.info(msg, meta),
|
|
1060
|
+
error: (msg, meta) => console.error(msg, meta),
|
|
1061
|
+
warn: (msg, meta) => console.warn(msg, meta),
|
|
1062
|
+
debug: (msg, meta) => console.debug(msg, meta)
|
|
1063
|
+
};
|
|
559
1064
|
var SQS = class {
|
|
560
1065
|
constructor(config, logger) {
|
|
561
|
-
this.
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
warn: (msg, meta) => console.warn(msg, meta),
|
|
565
|
-
debug: (msg, meta) => console.debug(msg, meta)
|
|
566
|
-
};
|
|
567
|
-
this.client = new import_aws_sdk.default.SQS({
|
|
1066
|
+
this.polling = false;
|
|
1067
|
+
this.logger = logger ?? defaultLogger2;
|
|
1068
|
+
this.client = new import_client_sqs.SQSClient({
|
|
568
1069
|
region: config.region,
|
|
569
|
-
|
|
570
|
-
|
|
1070
|
+
credentials: {
|
|
1071
|
+
accessKeyId: config.accessKeyId,
|
|
1072
|
+
secretAccessKey: config.secretAccessKey
|
|
1073
|
+
}
|
|
571
1074
|
});
|
|
572
1075
|
this.logger.info("SQS client initialized", { region: config.region });
|
|
573
1076
|
}
|
|
574
|
-
|
|
1077
|
+
// ─── Enqueue ───────────────────────────────────────────────────────────────
|
|
1078
|
+
/**
|
|
1079
|
+
* Sends a message to an SQS queue.
|
|
1080
|
+
* Automatically serializes objects to JSON.
|
|
1081
|
+
*
|
|
1082
|
+
* @example
|
|
1083
|
+
* await sqs.enqueue({ queueUrl, message: { event: "user.created", userId: 1 } });
|
|
1084
|
+
*/
|
|
1085
|
+
async enqueue({
|
|
1086
|
+
queueUrl,
|
|
1087
|
+
message,
|
|
1088
|
+
messageGroupId,
|
|
1089
|
+
messageDeduplicationId,
|
|
1090
|
+
delaySeconds,
|
|
1091
|
+
attributes
|
|
1092
|
+
}) {
|
|
575
1093
|
try {
|
|
576
|
-
|
|
1094
|
+
const input = {
|
|
577
1095
|
QueueUrl: queueUrl,
|
|
578
|
-
MessageBody: typeof message === "string" ? message : JSON.stringify(message)
|
|
579
|
-
|
|
1096
|
+
MessageBody: typeof message === "string" ? message : JSON.stringify(message),
|
|
1097
|
+
...messageGroupId && { MessageGroupId: messageGroupId },
|
|
1098
|
+
...messageDeduplicationId && { MessageDeduplicationId: messageDeduplicationId },
|
|
1099
|
+
...delaySeconds !== void 0 && { DelaySeconds: delaySeconds },
|
|
1100
|
+
...attributes && { MessageAttributes: attributes }
|
|
1101
|
+
};
|
|
1102
|
+
await this.client.send(new import_client_sqs.SendMessageCommand(input));
|
|
580
1103
|
this.logger.info("Message enqueued", { queueUrl });
|
|
581
1104
|
return true;
|
|
582
1105
|
} catch (err) {
|
|
@@ -584,43 +1107,99 @@ var SQS = class {
|
|
|
584
1107
|
throw new ServerError("Failed to enqueue SQS message", { cause: err });
|
|
585
1108
|
}
|
|
586
1109
|
}
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
1110
|
+
// ─── Dequeue ───────────────────────────────────────────────────────────────
|
|
1111
|
+
/**
|
|
1112
|
+
* Starts long-polling a queue and passes each message to `consumerFunction`.
|
|
1113
|
+
* Runs until `stop()` is called.
|
|
1114
|
+
*
|
|
1115
|
+
* Delete behaviour:
|
|
1116
|
+
* - On success → always deletes
|
|
1117
|
+
* - On failure + DLQ → moves to DLQ, then deletes
|
|
1118
|
+
* - On failure + useRedrivePolicy → does NOT delete (lets SQS retry)
|
|
1119
|
+
* - On failure + no DLQ + no redrive → logs and deletes to avoid poison pill loop
|
|
1120
|
+
*/
|
|
1121
|
+
async dequeue({
|
|
1122
|
+
queueUrl,
|
|
1123
|
+
consumerFunction,
|
|
1124
|
+
dlqUrl,
|
|
1125
|
+
maxNumberOfMessages = 10,
|
|
1126
|
+
waitTimeSeconds = 20,
|
|
1127
|
+
visibilityTimeout,
|
|
1128
|
+
useRedrivePolicy = false
|
|
1129
|
+
}) {
|
|
1130
|
+
this.polling = true;
|
|
1131
|
+
let consecutiveErrors = 0;
|
|
1132
|
+
this.logger.info("SQS polling started", { queueUrl });
|
|
1133
|
+
while (this.polling) {
|
|
596
1134
|
try {
|
|
597
|
-
const { Messages } = await this.client.
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
}
|
|
621
|
-
}
|
|
1135
|
+
const { Messages } = await this.client.send(
|
|
1136
|
+
new import_client_sqs.ReceiveMessageCommand({
|
|
1137
|
+
QueueUrl: queueUrl,
|
|
1138
|
+
MaxNumberOfMessages: maxNumberOfMessages,
|
|
1139
|
+
WaitTimeSeconds: waitTimeSeconds,
|
|
1140
|
+
...visibilityTimeout && { VisibilityTimeout: visibilityTimeout }
|
|
1141
|
+
})
|
|
1142
|
+
);
|
|
1143
|
+
consecutiveErrors = 0;
|
|
1144
|
+
if (!Messages?.length)
|
|
1145
|
+
continue;
|
|
1146
|
+
await Promise.allSettled(
|
|
1147
|
+
Messages.map(
|
|
1148
|
+
({ Body, ReceiptHandle }) => this.processMessage({
|
|
1149
|
+
Body,
|
|
1150
|
+
ReceiptHandle,
|
|
1151
|
+
queueUrl,
|
|
1152
|
+
dlqUrl,
|
|
1153
|
+
useRedrivePolicy,
|
|
1154
|
+
consumerFunction
|
|
1155
|
+
})
|
|
1156
|
+
)
|
|
1157
|
+
);
|
|
622
1158
|
} catch (err) {
|
|
623
|
-
|
|
1159
|
+
consecutiveErrors++;
|
|
1160
|
+
this.logger.error("SQSPollingError", { err, queueUrl, consecutiveErrors });
|
|
1161
|
+
const backoff = Math.min(1e3 * 2 ** consecutiveErrors, 3e4);
|
|
1162
|
+
await new Promise((resolve) => setTimeout(resolve, backoff));
|
|
1163
|
+
}
|
|
1164
|
+
}
|
|
1165
|
+
this.logger.info("SQS polling stopped", { queueUrl });
|
|
1166
|
+
}
|
|
1167
|
+
/**
|
|
1168
|
+
* Gracefully stops the polling loop after the current batch completes.
|
|
1169
|
+
*/
|
|
1170
|
+
stop() {
|
|
1171
|
+
this.polling = false;
|
|
1172
|
+
this.logger.info("SQS stop signal received");
|
|
1173
|
+
}
|
|
1174
|
+
// ─── Private ───────────────────────────────────────────────────────────────
|
|
1175
|
+
async processMessage({
|
|
1176
|
+
Body,
|
|
1177
|
+
ReceiptHandle,
|
|
1178
|
+
queueUrl,
|
|
1179
|
+
dlqUrl,
|
|
1180
|
+
useRedrivePolicy,
|
|
1181
|
+
consumerFunction
|
|
1182
|
+
}) {
|
|
1183
|
+
if (!Body || !ReceiptHandle)
|
|
1184
|
+
return;
|
|
1185
|
+
let shouldDelete = true;
|
|
1186
|
+
try {
|
|
1187
|
+
const message = parseJSON(Body);
|
|
1188
|
+
await consumerFunction(message);
|
|
1189
|
+
} catch (err) {
|
|
1190
|
+
this.logger.error("SQSConsumerError", { err, queueUrl });
|
|
1191
|
+
if (dlqUrl) {
|
|
1192
|
+
await this.enqueue({ queueUrl: dlqUrl, message: Body });
|
|
1193
|
+
} else if (useRedrivePolicy) {
|
|
1194
|
+
shouldDelete = false;
|
|
1195
|
+
} else {
|
|
1196
|
+
this.logger.warn("SQSMessageDropped \u2014 no DLQ or redrive configured", { queueUrl });
|
|
1197
|
+
}
|
|
1198
|
+
} finally {
|
|
1199
|
+
if (shouldDelete) {
|
|
1200
|
+
await this.client.send(
|
|
1201
|
+
new import_client_sqs.DeleteMessageCommand({ QueueUrl: queueUrl, ReceiptHandle })
|
|
1202
|
+
);
|
|
624
1203
|
}
|
|
625
1204
|
}
|
|
626
1205
|
}
|
|
@@ -628,29 +1207,587 @@ var SQS = class {
|
|
|
628
1207
|
|
|
629
1208
|
// src/adapters/loggers/winston.ts
|
|
630
1209
|
var import_winston = __toESM(require("winston"), 1);
|
|
1210
|
+
var serializeErrors = (0, import_winston.format)((info) => {
|
|
1211
|
+
if (info.meta instanceof Error) {
|
|
1212
|
+
info.meta = {
|
|
1213
|
+
message: info.meta.message,
|
|
1214
|
+
stack: info.meta.stack,
|
|
1215
|
+
name: info.meta.name
|
|
1216
|
+
};
|
|
1217
|
+
}
|
|
1218
|
+
if (info instanceof Error) {
|
|
1219
|
+
info.stack = info.stack;
|
|
1220
|
+
info.message = info.message;
|
|
1221
|
+
}
|
|
1222
|
+
return info;
|
|
1223
|
+
});
|
|
1224
|
+
var prettyFormat = import_winston.format.combine(
|
|
1225
|
+
import_winston.format.colorize(),
|
|
1226
|
+
import_winston.format.timestamp({ format: "YYYY-MM-DD HH:mm:ss" }),
|
|
1227
|
+
import_winston.format.printf(({ timestamp, level, message, service, ...meta }) => {
|
|
1228
|
+
const svc = service ? `[${service}] ` : "";
|
|
1229
|
+
const metaStr = Object.keys(meta).length ? `
|
|
1230
|
+
${JSON.stringify(meta, null, 2)}` : "";
|
|
1231
|
+
return `${timestamp} ${level}: ${svc}${message}${metaStr}`;
|
|
1232
|
+
})
|
|
1233
|
+
);
|
|
1234
|
+
var jsonFormat = import_winston.format.combine(
|
|
1235
|
+
serializeErrors(),
|
|
1236
|
+
import_winston.format.timestamp(),
|
|
1237
|
+
import_winston.format.json()
|
|
1238
|
+
);
|
|
631
1239
|
var WinstonLogger = class {
|
|
632
|
-
constructor() {
|
|
1240
|
+
constructor(options = {}) {
|
|
1241
|
+
const {
|
|
1242
|
+
level = process.env.NODE_ENV === "development" ? "debug" : "info",
|
|
1243
|
+
service,
|
|
1244
|
+
file,
|
|
1245
|
+
pretty = process.env.NODE_ENV === "development",
|
|
1246
|
+
defaultMeta = {}
|
|
1247
|
+
} = options;
|
|
1248
|
+
const transports = [
|
|
1249
|
+
new import_winston.default.transports.Console({
|
|
1250
|
+
format: pretty ? prettyFormat : jsonFormat
|
|
1251
|
+
})
|
|
1252
|
+
];
|
|
1253
|
+
if (file?.path) {
|
|
1254
|
+
transports.push(
|
|
1255
|
+
new import_winston.default.transports.File({
|
|
1256
|
+
filename: file.path,
|
|
1257
|
+
format: jsonFormat
|
|
1258
|
+
})
|
|
1259
|
+
);
|
|
1260
|
+
}
|
|
1261
|
+
if (file?.errorPath) {
|
|
1262
|
+
transports.push(
|
|
1263
|
+
new import_winston.default.transports.File({
|
|
1264
|
+
filename: file.errorPath,
|
|
1265
|
+
level: "error",
|
|
1266
|
+
format: jsonFormat
|
|
1267
|
+
})
|
|
1268
|
+
);
|
|
1269
|
+
}
|
|
633
1270
|
this.logger = import_winston.default.createLogger({
|
|
634
|
-
|
|
1271
|
+
level,
|
|
1272
|
+
defaultMeta: { service, ...defaultMeta },
|
|
1273
|
+
transports,
|
|
1274
|
+
// Prevent winston from exiting on uncaught exceptions in logger itself
|
|
1275
|
+
exitOnError: false
|
|
635
1276
|
});
|
|
636
1277
|
}
|
|
1278
|
+
// ─── Logger Interface ─────────────────────────────────────────────────────
|
|
637
1279
|
info(message, meta) {
|
|
638
|
-
this.logger.info(message, meta);
|
|
1280
|
+
this.logger.info(message, { meta });
|
|
639
1281
|
}
|
|
640
1282
|
error(message, meta) {
|
|
641
|
-
this.logger.error(message, meta);
|
|
1283
|
+
this.logger.error(message, { meta });
|
|
642
1284
|
}
|
|
643
1285
|
warn(message, meta) {
|
|
644
|
-
this.logger.warn(message, meta);
|
|
1286
|
+
this.logger.warn(message, { meta });
|
|
645
1287
|
}
|
|
646
1288
|
debug(message, meta) {
|
|
647
|
-
this.logger.debug(message, meta);
|
|
1289
|
+
this.logger.debug(message, { meta });
|
|
1290
|
+
}
|
|
1291
|
+
http(message, meta) {
|
|
1292
|
+
this.logger.http(message, { meta });
|
|
1293
|
+
}
|
|
1294
|
+
// ─── Extended API ─────────────────────────────────────────────────────────
|
|
1295
|
+
/**
|
|
1296
|
+
* Returns a child logger with additional metadata attached to every entry.
|
|
1297
|
+
* Useful for scoping logs to a request, service, or job.
|
|
1298
|
+
*
|
|
1299
|
+
* @example
|
|
1300
|
+
* const log = logger.child({ requestId: "abc-123", userId: "u-1" });
|
|
1301
|
+
* log.info("User fetched"); // → { requestId: "abc-123", userId: "u-1", message: "User fetched" }
|
|
1302
|
+
*/
|
|
1303
|
+
child(meta) {
|
|
1304
|
+
const child = Object.create(this);
|
|
1305
|
+
child.logger = this.logger.child(meta);
|
|
1306
|
+
return child;
|
|
1307
|
+
}
|
|
1308
|
+
/**
|
|
1309
|
+
* Dynamically changes the log level at runtime.
|
|
1310
|
+
* Useful for temporarily enabling debug logs in production.
|
|
1311
|
+
*
|
|
1312
|
+
* @example
|
|
1313
|
+
* logger.setLevel("debug");
|
|
1314
|
+
*/
|
|
1315
|
+
setLevel(level) {
|
|
1316
|
+
this.logger.level = level;
|
|
1317
|
+
}
|
|
1318
|
+
/**
|
|
1319
|
+
* Returns true if the given level would currently be logged.
|
|
1320
|
+
*
|
|
1321
|
+
* @example
|
|
1322
|
+
* if (logger.isLevelEnabled("debug")) { ... }
|
|
1323
|
+
*/
|
|
1324
|
+
isLevelEnabled(level) {
|
|
1325
|
+
return this.logger.isLevelEnabled(level);
|
|
1326
|
+
}
|
|
1327
|
+
};
|
|
1328
|
+
|
|
1329
|
+
// src/adapters/s3.ts
|
|
1330
|
+
var import_client_s3 = require("@aws-sdk/client-s3");
|
|
1331
|
+
var import_s3_request_presigner = require("@aws-sdk/s3-request-presigner");
|
|
1332
|
+
var defaultLogger3 = {
|
|
1333
|
+
info: (msg, meta) => console.info(msg, meta),
|
|
1334
|
+
error: (msg, meta) => console.error(msg, meta),
|
|
1335
|
+
warn: (msg, meta) => console.warn(msg, meta),
|
|
1336
|
+
debug: (msg, meta) => console.debug(msg, meta)
|
|
1337
|
+
};
|
|
1338
|
+
var S3 = class {
|
|
1339
|
+
constructor(config, logger) {
|
|
1340
|
+
this.logger = logger ?? defaultLogger3;
|
|
1341
|
+
this.defaultBucket = config.defaultBucket;
|
|
1342
|
+
this.region = config.region;
|
|
1343
|
+
this.client = new import_client_s3.S3Client({
|
|
1344
|
+
region: config.region,
|
|
1345
|
+
credentials: {
|
|
1346
|
+
accessKeyId: config.accessKeyId,
|
|
1347
|
+
secretAccessKey: config.secretAccessKey
|
|
1348
|
+
}
|
|
1349
|
+
});
|
|
1350
|
+
this.logger.info("S3 client initialized", { region: config.region });
|
|
1351
|
+
}
|
|
1352
|
+
// ─── Private Helpers ─────────────────────────────────────────────────────
|
|
1353
|
+
getBucket(bucket) {
|
|
1354
|
+
const target = bucket ?? this.defaultBucket;
|
|
1355
|
+
if (!target)
|
|
1356
|
+
throw new ServerError("S3 bucket not provided");
|
|
1357
|
+
return target;
|
|
1358
|
+
}
|
|
1359
|
+
getObjectUrl(bucket, key) {
|
|
1360
|
+
return `https://${bucket}.s3.${this.region}.amazonaws.com/${key}`;
|
|
1361
|
+
}
|
|
1362
|
+
async streamToBuffer(stream) {
|
|
1363
|
+
const chunks = [];
|
|
1364
|
+
for await (const chunk of stream)
|
|
1365
|
+
chunks.push(chunk);
|
|
1366
|
+
return Buffer.concat(chunks);
|
|
1367
|
+
}
|
|
1368
|
+
// ─── Upload ──────────────────────────────────────────────────────────────
|
|
1369
|
+
/**
|
|
1370
|
+
* Uploads a file to S3. Returns the bucket, key, and public URL.
|
|
1371
|
+
*
|
|
1372
|
+
* @example
|
|
1373
|
+
* const result = await s3.upload({ key: "avatars/user-1.png", body: buffer, contentType: "image/png" });
|
|
1374
|
+
* result.url // "https://my-bucket.s3.us-east-1.amazonaws.com/avatars/user-1.png"
|
|
1375
|
+
*/
|
|
1376
|
+
async upload({
|
|
1377
|
+
bucket,
|
|
1378
|
+
key,
|
|
1379
|
+
body,
|
|
1380
|
+
contentType,
|
|
1381
|
+
metadata,
|
|
1382
|
+
acl
|
|
1383
|
+
}) {
|
|
1384
|
+
const targetBucket = this.getBucket(bucket);
|
|
1385
|
+
try {
|
|
1386
|
+
const input = {
|
|
1387
|
+
Bucket: targetBucket,
|
|
1388
|
+
Key: key,
|
|
1389
|
+
Body: body,
|
|
1390
|
+
...contentType && { ContentType: contentType },
|
|
1391
|
+
...metadata && { Metadata: metadata },
|
|
1392
|
+
...acl && { ACL: acl }
|
|
1393
|
+
};
|
|
1394
|
+
await this.client.send(new import_client_s3.PutObjectCommand(input));
|
|
1395
|
+
this.logger.info("S3 upload successful", { bucket: targetBucket, key });
|
|
1396
|
+
return {
|
|
1397
|
+
bucket: targetBucket,
|
|
1398
|
+
key,
|
|
1399
|
+
url: this.getObjectUrl(targetBucket, key)
|
|
1400
|
+
};
|
|
1401
|
+
} catch (err) {
|
|
1402
|
+
this.logger.error("S3UploadError", { err, bucket: targetBucket, key });
|
|
1403
|
+
throw new ServerError("Failed to upload to S3", { cause: err });
|
|
1404
|
+
}
|
|
1405
|
+
}
|
|
1406
|
+
// ─── Download ─────────────────────────────────────────────────────────────
|
|
1407
|
+
/**
|
|
1408
|
+
* Downloads an S3 object and returns it as a Buffer.
|
|
1409
|
+
*/
|
|
1410
|
+
async download({ bucket, key }) {
|
|
1411
|
+
const targetBucket = this.getBucket(bucket);
|
|
1412
|
+
try {
|
|
1413
|
+
const response = await this.client.send(
|
|
1414
|
+
new import_client_s3.GetObjectCommand({ Bucket: targetBucket, Key: key })
|
|
1415
|
+
);
|
|
1416
|
+
if (!response.Body)
|
|
1417
|
+
throw new ServerError("Empty S3 response body");
|
|
1418
|
+
const buffer = await this.streamToBuffer(response.Body);
|
|
1419
|
+
this.logger.info("S3 download successful", { bucket: targetBucket, key });
|
|
1420
|
+
return buffer;
|
|
1421
|
+
} catch (err) {
|
|
1422
|
+
this.logger.error("S3DownloadError", { err, bucket: targetBucket, key });
|
|
1423
|
+
throw new ServerError("Failed to download from S3", { cause: err });
|
|
1424
|
+
}
|
|
1425
|
+
}
|
|
1426
|
+
/**
|
|
1427
|
+
* Returns the raw readable stream for an S3 object.
|
|
1428
|
+
* Prefer this over `download` for large files.
|
|
1429
|
+
*/
|
|
1430
|
+
async stream({ bucket, key }) {
|
|
1431
|
+
const targetBucket = this.getBucket(bucket);
|
|
1432
|
+
try {
|
|
1433
|
+
const response = await this.client.send(
|
|
1434
|
+
new import_client_s3.GetObjectCommand({ Bucket: targetBucket, Key: key })
|
|
1435
|
+
);
|
|
1436
|
+
if (!response.Body)
|
|
1437
|
+
throw new ServerError("Empty S3 response body");
|
|
1438
|
+
this.logger.info("S3 stream ready", { bucket: targetBucket, key });
|
|
1439
|
+
return response.Body;
|
|
1440
|
+
} catch (err) {
|
|
1441
|
+
this.logger.error("S3StreamError", { err, bucket: targetBucket, key });
|
|
1442
|
+
throw new ServerError("Failed to stream from S3", { cause: err });
|
|
1443
|
+
}
|
|
1444
|
+
}
|
|
1445
|
+
// ─── Delete ───────────────────────────────────────────────────────────────
|
|
1446
|
+
async delete({ bucket, key }) {
|
|
1447
|
+
const targetBucket = this.getBucket(bucket);
|
|
1448
|
+
try {
|
|
1449
|
+
await this.client.send(
|
|
1450
|
+
new import_client_s3.DeleteObjectCommand({ Bucket: targetBucket, Key: key })
|
|
1451
|
+
);
|
|
1452
|
+
this.logger.info("S3 object deleted", { bucket: targetBucket, key });
|
|
1453
|
+
return true;
|
|
1454
|
+
} catch (err) {
|
|
1455
|
+
this.logger.error("S3DeleteError", { err, bucket: targetBucket, key });
|
|
1456
|
+
throw new ServerError("Failed to delete S3 object", { cause: err });
|
|
1457
|
+
}
|
|
1458
|
+
}
|
|
1459
|
+
// ─── Copy ─────────────────────────────────────────────────────────────────
|
|
1460
|
+
/**
|
|
1461
|
+
* Copies an object within S3 — within the same bucket or across buckets.
|
|
1462
|
+
*
|
|
1463
|
+
* @example
|
|
1464
|
+
* await s3.copy({ sourceKey: "uploads/tmp.png", destinationKey: "avatars/user-1.png" });
|
|
1465
|
+
*/
|
|
1466
|
+
async copy({
|
|
1467
|
+
sourceBucket,
|
|
1468
|
+
sourceKey,
|
|
1469
|
+
destinationBucket,
|
|
1470
|
+
destinationKey
|
|
1471
|
+
}) {
|
|
1472
|
+
const srcBucket = this.getBucket(sourceBucket);
|
|
1473
|
+
const dstBucket = this.getBucket(destinationBucket);
|
|
1474
|
+
try {
|
|
1475
|
+
await this.client.send(
|
|
1476
|
+
new import_client_s3.CopyObjectCommand({
|
|
1477
|
+
CopySource: `${srcBucket}/${sourceKey}`,
|
|
1478
|
+
Bucket: dstBucket,
|
|
1479
|
+
Key: destinationKey
|
|
1480
|
+
})
|
|
1481
|
+
);
|
|
1482
|
+
this.logger.info("S3 object copied", { srcBucket, sourceKey, dstBucket, destinationKey });
|
|
1483
|
+
return {
|
|
1484
|
+
bucket: dstBucket,
|
|
1485
|
+
key: destinationKey,
|
|
1486
|
+
url: this.getObjectUrl(dstBucket, destinationKey)
|
|
1487
|
+
};
|
|
1488
|
+
} catch (err) {
|
|
1489
|
+
this.logger.error("S3CopyError", { err, sourceKey, destinationKey });
|
|
1490
|
+
throw new ServerError("Failed to copy S3 object", { cause: err });
|
|
1491
|
+
}
|
|
1492
|
+
}
|
|
1493
|
+
// ─── Exists ───────────────────────────────────────────────────────────────
|
|
1494
|
+
/**
|
|
1495
|
+
* Returns true if the object exists.
|
|
1496
|
+
* Throws on non-404 errors (permissions, network) rather than silently returning false.
|
|
1497
|
+
*/
|
|
1498
|
+
async exists({ bucket, key }) {
|
|
1499
|
+
const targetBucket = this.getBucket(bucket);
|
|
1500
|
+
try {
|
|
1501
|
+
await this.client.send(
|
|
1502
|
+
new import_client_s3.HeadObjectCommand({ Bucket: targetBucket, Key: key })
|
|
1503
|
+
);
|
|
1504
|
+
return true;
|
|
1505
|
+
} catch (err) {
|
|
1506
|
+
if (err instanceof import_client_s3.NotFound || err?.name === "NotFound" || err?.$metadata?.httpStatusCode === 404) {
|
|
1507
|
+
return false;
|
|
1508
|
+
}
|
|
1509
|
+
this.logger.error("S3ExistsError", { err, bucket: targetBucket, key });
|
|
1510
|
+
throw new ServerError("Failed to check S3 object existence", { cause: err });
|
|
1511
|
+
}
|
|
1512
|
+
}
|
|
1513
|
+
// ─── Signed URLs ──────────────────────────────────────────────────────────
|
|
1514
|
+
/**
|
|
1515
|
+
* Generates a pre-signed URL for downloading an object (GET).
|
|
1516
|
+
* Default expiry: 1 hour.
|
|
1517
|
+
*/
|
|
1518
|
+
async getSignedDownloadUrl({ bucket, key, expiresIn = 3600 }) {
|
|
1519
|
+
const targetBucket = this.getBucket(bucket);
|
|
1520
|
+
try {
|
|
1521
|
+
const url = await (0, import_s3_request_presigner.getSignedUrl)(
|
|
1522
|
+
this.client,
|
|
1523
|
+
new import_client_s3.GetObjectCommand({ Bucket: targetBucket, Key: key }),
|
|
1524
|
+
{ expiresIn }
|
|
1525
|
+
);
|
|
1526
|
+
this.logger.info("S3 signed download URL generated", { bucket: targetBucket, key });
|
|
1527
|
+
return url;
|
|
1528
|
+
} catch (err) {
|
|
1529
|
+
this.logger.error("S3SignedDownloadUrlError", { err, bucket: targetBucket, key });
|
|
1530
|
+
throw new ServerError("Failed to generate signed download URL", { cause: err });
|
|
1531
|
+
}
|
|
1532
|
+
}
|
|
1533
|
+
/**
|
|
1534
|
+
* Generates a pre-signed URL for uploading an object directly (PUT).
|
|
1535
|
+
* Use this for browser → S3 direct uploads without proxying through your server.
|
|
1536
|
+
*
|
|
1537
|
+
* @example
|
|
1538
|
+
* const url = await s3.getSignedUploadUrl({ key: "avatars/user-1.png", contentType: "image/png" });
|
|
1539
|
+
* // Client does: fetch(url, { method: "PUT", body: file })
|
|
1540
|
+
*/
|
|
1541
|
+
async getSignedUploadUrl({
|
|
1542
|
+
bucket,
|
|
1543
|
+
key,
|
|
1544
|
+
expiresIn = 3600,
|
|
1545
|
+
contentType
|
|
1546
|
+
}) {
|
|
1547
|
+
const targetBucket = this.getBucket(bucket);
|
|
1548
|
+
try {
|
|
1549
|
+
const url = await (0, import_s3_request_presigner.getSignedUrl)(
|
|
1550
|
+
this.client,
|
|
1551
|
+
new import_client_s3.PutObjectCommand({
|
|
1552
|
+
Bucket: targetBucket,
|
|
1553
|
+
Key: key,
|
|
1554
|
+
...contentType && { ContentType: contentType }
|
|
1555
|
+
}),
|
|
1556
|
+
{ expiresIn }
|
|
1557
|
+
);
|
|
1558
|
+
this.logger.info("S3 signed upload URL generated", { bucket: targetBucket, key });
|
|
1559
|
+
return url;
|
|
1560
|
+
} catch (err) {
|
|
1561
|
+
this.logger.error("S3SignedUploadUrlError", { err, bucket: targetBucket, key });
|
|
1562
|
+
throw new ServerError("Failed to generate signed upload URL", { cause: err });
|
|
1563
|
+
}
|
|
1564
|
+
}
|
|
1565
|
+
// ─── Bucket Preset ────────────────────────────────────────────────────────
|
|
1566
|
+
/**
|
|
1567
|
+
* Returns a scoped helper with the bucket pre-filled.
|
|
1568
|
+
*
|
|
1569
|
+
* @example
|
|
1570
|
+
* const avatars = s3.bucket("my-avatars-bucket");
|
|
1571
|
+
* await avatars.upload({ key: "user-1.png", body: buffer });
|
|
1572
|
+
*/
|
|
1573
|
+
bucket(bucketName) {
|
|
1574
|
+
return {
|
|
1575
|
+
upload: (opts) => this.upload({ ...opts, bucket: bucketName }),
|
|
1576
|
+
download: (opts) => this.download({ ...opts, bucket: bucketName }),
|
|
1577
|
+
stream: (opts) => this.stream({ ...opts, bucket: bucketName }),
|
|
1578
|
+
delete: (opts) => this.delete({ ...opts, bucket: bucketName }),
|
|
1579
|
+
exists: (opts) => this.exists({ ...opts, bucket: bucketName }),
|
|
1580
|
+
copy: (opts) => this.copy({ ...opts, destinationBucket: bucketName }),
|
|
1581
|
+
getSignedDownloadUrl: (opts) => this.getSignedDownloadUrl({ ...opts, bucket: bucketName }),
|
|
1582
|
+
getSignedUploadUrl: (opts) => this.getSignedUploadUrl({ ...opts, bucket: bucketName })
|
|
1583
|
+
};
|
|
1584
|
+
}
|
|
1585
|
+
};
|
|
1586
|
+
|
|
1587
|
+
// src/adapters/cron.ts
|
|
1588
|
+
var import_node_cron = __toESM(require("node-cron"), 1);
|
|
1589
|
+
var defaultLogger4 = {
|
|
1590
|
+
info: (msg, meta) => console.info(msg, meta),
|
|
1591
|
+
error: (msg, meta) => console.error(msg, meta),
|
|
1592
|
+
warn: (msg, meta) => console.warn(msg, meta),
|
|
1593
|
+
debug: (msg, meta) => console.debug(msg, meta)
|
|
1594
|
+
};
|
|
1595
|
+
var SHORTHANDS = {
|
|
1596
|
+
"every minute": "* * * * *",
|
|
1597
|
+
"every 5 minutes": "*/5 * * * *",
|
|
1598
|
+
"every 10 minutes": "*/10 * * * *",
|
|
1599
|
+
"every 15 minutes": "*/15 * * * *",
|
|
1600
|
+
"every 30 minutes": "*/30 * * * *",
|
|
1601
|
+
"every hour": "0 * * * *",
|
|
1602
|
+
"every 6 hours": "0 */6 * * *",
|
|
1603
|
+
"every 12 hours": "0 */12 * * *",
|
|
1604
|
+
"every day": "0 0 * * *",
|
|
1605
|
+
"every day at noon": "0 12 * * *",
|
|
1606
|
+
"every week": "0 0 * * 0",
|
|
1607
|
+
"every month": "0 0 1 * *"
|
|
1608
|
+
};
|
|
1609
|
+
var Cron = class {
|
|
1610
|
+
constructor(logger) {
|
|
1611
|
+
this.jobs = /* @__PURE__ */ new Map();
|
|
1612
|
+
this.logger = logger ?? defaultLogger4;
|
|
1613
|
+
}
|
|
1614
|
+
// ─── Register ─────────────────────────────────────────────────────────────
|
|
1615
|
+
/**
|
|
1616
|
+
* Registers and starts a cron job.
|
|
1617
|
+
*
|
|
1618
|
+
* @example
|
|
1619
|
+
* cron.register({
|
|
1620
|
+
* name: "send-digest",
|
|
1621
|
+
* schedule: "every day at noon",
|
|
1622
|
+
* handler: async () => { await sendDigestEmails(); },
|
|
1623
|
+
* timezone: "America/New_York",
|
|
1624
|
+
* });
|
|
1625
|
+
*/
|
|
1626
|
+
register(options) {
|
|
1627
|
+
const { name, schedule, handler, runOnInit = false, timezone, preventOverlap = true } = options;
|
|
1628
|
+
if (!name)
|
|
1629
|
+
throw new ValidationError("Cron job name is required");
|
|
1630
|
+
if (!handler)
|
|
1631
|
+
throw new ValidationError("Cron job handler is required");
|
|
1632
|
+
if (this.jobs.has(name)) {
|
|
1633
|
+
throw new ValidationError(`Cron job "${name}" is already registered. Use replace() to update it.`);
|
|
1634
|
+
}
|
|
1635
|
+
const expression = SHORTHANDS[schedule] ?? schedule;
|
|
1636
|
+
if (!import_node_cron.default.validate(expression)) {
|
|
1637
|
+
throw new ValidationError(`Invalid cron expression for job "${name}": "${schedule}"`);
|
|
1638
|
+
}
|
|
1639
|
+
const status = {
|
|
1640
|
+
name,
|
|
1641
|
+
schedule: expression,
|
|
1642
|
+
running: true,
|
|
1643
|
+
lastRun: null,
|
|
1644
|
+
lastError: null,
|
|
1645
|
+
executionCount: 0,
|
|
1646
|
+
errorCount: 0
|
|
1647
|
+
};
|
|
1648
|
+
const record = {
|
|
1649
|
+
options,
|
|
1650
|
+
status,
|
|
1651
|
+
executing: false,
|
|
1652
|
+
task: null
|
|
1653
|
+
// assigned below
|
|
1654
|
+
};
|
|
1655
|
+
const task = import_node_cron.default.schedule(
|
|
1656
|
+
expression,
|
|
1657
|
+
() => this.execute(name),
|
|
1658
|
+
{ timezone }
|
|
1659
|
+
);
|
|
1660
|
+
record.task = task;
|
|
1661
|
+
this.jobs.set(name, record);
|
|
1662
|
+
this.logger.info(`Cron job registered`, { name, schedule: expression, timezone });
|
|
1663
|
+
if (runOnInit) {
|
|
1664
|
+
this.execute(name);
|
|
1665
|
+
}
|
|
1666
|
+
}
|
|
1667
|
+
// ─── Execute ──────────────────────────────────────────────────────────────
|
|
1668
|
+
async execute(name) {
|
|
1669
|
+
const record = this.jobs.get(name);
|
|
1670
|
+
if (!record)
|
|
1671
|
+
return;
|
|
1672
|
+
const { preventOverlap = true, handler } = record.options;
|
|
1673
|
+
if (preventOverlap && record.executing) {
|
|
1674
|
+
this.logger.warn(`Cron job "${name}" skipped \u2014 previous execution still running`);
|
|
1675
|
+
return;
|
|
1676
|
+
}
|
|
1677
|
+
record.executing = true;
|
|
1678
|
+
record.status.lastRun = /* @__PURE__ */ new Date();
|
|
1679
|
+
record.status.executionCount++;
|
|
1680
|
+
this.logger.debug?.(`Cron job started`, { name, executionCount: record.status.executionCount });
|
|
1681
|
+
try {
|
|
1682
|
+
await handler();
|
|
1683
|
+
this.logger.debug?.(`Cron job completed`, { name });
|
|
1684
|
+
} catch (err) {
|
|
1685
|
+
record.status.errorCount++;
|
|
1686
|
+
record.status.lastError = err;
|
|
1687
|
+
this.logger.error(`Cron job failed`, { name, err });
|
|
1688
|
+
} finally {
|
|
1689
|
+
record.executing = false;
|
|
1690
|
+
}
|
|
1691
|
+
}
|
|
1692
|
+
// ─── Control ──────────────────────────────────────────────────────────────
|
|
1693
|
+
/**
|
|
1694
|
+
* Stops a running job without removing it.
|
|
1695
|
+
* Can be resumed with start().
|
|
1696
|
+
*/
|
|
1697
|
+
stop(name) {
|
|
1698
|
+
const record = this.getJob(name);
|
|
1699
|
+
record.task.stop();
|
|
1700
|
+
record.status.running = false;
|
|
1701
|
+
this.logger.info(`Cron job stopped`, { name });
|
|
1702
|
+
}
|
|
1703
|
+
/**
|
|
1704
|
+
* Resumes a stopped job.
|
|
1705
|
+
*/
|
|
1706
|
+
start(name) {
|
|
1707
|
+
const record = this.getJob(name);
|
|
1708
|
+
record.task.start();
|
|
1709
|
+
record.status.running = true;
|
|
1710
|
+
this.logger.info(`Cron job started`, { name });
|
|
1711
|
+
}
|
|
1712
|
+
/**
|
|
1713
|
+
* Stops and removes a job entirely.
|
|
1714
|
+
*/
|
|
1715
|
+
remove(name) {
|
|
1716
|
+
const record = this.getJob(name);
|
|
1717
|
+
record.task.stop();
|
|
1718
|
+
this.jobs.delete(name);
|
|
1719
|
+
this.logger.info(`Cron job removed`, { name });
|
|
1720
|
+
}
|
|
1721
|
+
/**
|
|
1722
|
+
* Replaces an existing job with a new configuration.
|
|
1723
|
+
* Useful for updating schedules at runtime.
|
|
1724
|
+
*/
|
|
1725
|
+
replace(options) {
|
|
1726
|
+
if (this.jobs.has(options.name))
|
|
1727
|
+
this.remove(options.name);
|
|
1728
|
+
this.register(options);
|
|
1729
|
+
}
|
|
1730
|
+
/**
|
|
1731
|
+
* Manually triggers a job outside its schedule.
|
|
1732
|
+
* Respects preventOverlap.
|
|
1733
|
+
*
|
|
1734
|
+
* @example
|
|
1735
|
+
* await cron.run("send-digest");
|
|
1736
|
+
*/
|
|
1737
|
+
async run(name) {
|
|
1738
|
+
this.getJob(name);
|
|
1739
|
+
await this.execute(name);
|
|
1740
|
+
}
|
|
1741
|
+
/**
|
|
1742
|
+
* Stops all registered jobs. Call this on process shutdown.
|
|
1743
|
+
*
|
|
1744
|
+
* @example
|
|
1745
|
+
* process.on("SIGTERM", () => cron.stopAll());
|
|
1746
|
+
*/
|
|
1747
|
+
stopAll() {
|
|
1748
|
+
for (const [name, record] of this.jobs) {
|
|
1749
|
+
record.task.stop();
|
|
1750
|
+
record.status.running = false;
|
|
1751
|
+
}
|
|
1752
|
+
this.logger.info(`All cron jobs stopped`, { count: this.jobs.size });
|
|
1753
|
+
}
|
|
1754
|
+
// ─── Introspection ────────────────────────────────────────────────────────
|
|
1755
|
+
/**
|
|
1756
|
+
* Returns the status of a single job.
|
|
1757
|
+
*/
|
|
1758
|
+
status(name) {
|
|
1759
|
+
return { ...this.getJob(name).status };
|
|
1760
|
+
}
|
|
1761
|
+
/**
|
|
1762
|
+
* Returns the status of all registered jobs.
|
|
1763
|
+
*/
|
|
1764
|
+
statusAll() {
|
|
1765
|
+
return Array.from(this.jobs.values()).map((r) => ({ ...r.status }));
|
|
1766
|
+
}
|
|
1767
|
+
/**
|
|
1768
|
+
* Returns true if a job with the given name is registered.
|
|
1769
|
+
*/
|
|
1770
|
+
has(name) {
|
|
1771
|
+
return this.jobs.has(name);
|
|
1772
|
+
}
|
|
1773
|
+
// ─── Private ──────────────────────────────────────────────────────────────
|
|
1774
|
+
getJob(name) {
|
|
1775
|
+
const record = this.jobs.get(name);
|
|
1776
|
+
if (!record)
|
|
1777
|
+
throw new ServerError(`Cron job "${name}" not found`);
|
|
1778
|
+
return record;
|
|
648
1779
|
}
|
|
649
1780
|
};
|
|
650
1781
|
|
|
651
1782
|
// src/security/jwt.ts
|
|
652
1783
|
var import_jsonwebtoken = __toESM(require("jsonwebtoken"), 1);
|
|
653
1784
|
var jwtService = {
|
|
1785
|
+
/**
|
|
1786
|
+
* Signs a payload and returns a JWT string.
|
|
1787
|
+
*
|
|
1788
|
+
* @example
|
|
1789
|
+
* const token = await jwtService.encode({ data: { userId: 1 }, secretKey: "secret" });
|
|
1790
|
+
*/
|
|
654
1791
|
async encode({
|
|
655
1792
|
data,
|
|
656
1793
|
secretKey,
|
|
@@ -672,6 +1809,13 @@ var jwtService = {
|
|
|
672
1809
|
});
|
|
673
1810
|
});
|
|
674
1811
|
},
|
|
1812
|
+
/**
|
|
1813
|
+
* Verifies and decodes a JWT string.
|
|
1814
|
+
* Throws a typed `JwtError` on expiry, invalid signature, or not-yet-valid tokens.
|
|
1815
|
+
*
|
|
1816
|
+
* @example
|
|
1817
|
+
* const payload = await jwtService.decode<{ userId: number }>({ token, secretKey: "secret" });
|
|
1818
|
+
*/
|
|
675
1819
|
async decode({
|
|
676
1820
|
token,
|
|
677
1821
|
secretKey,
|
|
@@ -694,6 +1838,152 @@ var jwtService = {
|
|
|
694
1838
|
resolve(decoded);
|
|
695
1839
|
});
|
|
696
1840
|
});
|
|
1841
|
+
},
|
|
1842
|
+
/**
|
|
1843
|
+
* Returns the expiry date of a token without verifying it.
|
|
1844
|
+
* Returns null if the token has no expiry or cannot be decoded.
|
|
1845
|
+
*
|
|
1846
|
+
* @example
|
|
1847
|
+
* jwtService.getExpiry(token) // Date | null
|
|
1848
|
+
*/
|
|
1849
|
+
getExpiry(token) {
|
|
1850
|
+
const decoded = import_jsonwebtoken.default.decode(token);
|
|
1851
|
+
if (!decoded?.exp)
|
|
1852
|
+
return null;
|
|
1853
|
+
return new Date(decoded.exp * 1e3);
|
|
1854
|
+
},
|
|
1855
|
+
/**
|
|
1856
|
+
* Returns true if the token is expired, without verifying the signature.
|
|
1857
|
+
* Useful for checking whether to refresh a token before making a request.
|
|
1858
|
+
*
|
|
1859
|
+
* @example
|
|
1860
|
+
* if (jwtService.isExpired(token)) { ... }
|
|
1861
|
+
*/
|
|
1862
|
+
isExpired(token) {
|
|
1863
|
+
const expiry = this.getExpiry(token);
|
|
1864
|
+
if (!expiry)
|
|
1865
|
+
return false;
|
|
1866
|
+
return expiry < /* @__PURE__ */ new Date();
|
|
1867
|
+
}
|
|
1868
|
+
};
|
|
1869
|
+
|
|
1870
|
+
// src/security/hash.ts
|
|
1871
|
+
var import_bcrypt = __toESM(require("bcrypt"), 1);
|
|
1872
|
+
var import_crypto = __toESM(require("crypto"), 1);
|
|
1873
|
+
var hashService = {
|
|
1874
|
+
// ─── bcrypt ───────────────────────────────────────────────────────────────
|
|
1875
|
+
/**
|
|
1876
|
+
* Hashes a plain text value using bcrypt.
|
|
1877
|
+
* Use for passwords — bcrypt is intentionally slow and salted.
|
|
1878
|
+
*
|
|
1879
|
+
* @example
|
|
1880
|
+
* const hashed = await hashService.hash("myPassword123");
|
|
1881
|
+
*/
|
|
1882
|
+
async hash(plain, { rounds = 12 } = {}) {
|
|
1883
|
+
if (!plain)
|
|
1884
|
+
throw new Error("Value to hash is required");
|
|
1885
|
+
return import_bcrypt.default.hash(plain, rounds);
|
|
1886
|
+
},
|
|
1887
|
+
/**
|
|
1888
|
+
* Compares a plain text value against a bcrypt hash.
|
|
1889
|
+
*
|
|
1890
|
+
* @example
|
|
1891
|
+
* const match = await hashService.compare("myPassword123", storedHash);
|
|
1892
|
+
* if (!match) throw new AuthenticationError("Invalid credentials");
|
|
1893
|
+
*/
|
|
1894
|
+
async compare(plain, hashed) {
|
|
1895
|
+
if (!plain || !hashed)
|
|
1896
|
+
return false;
|
|
1897
|
+
return import_bcrypt.default.compare(plain, hashed);
|
|
1898
|
+
},
|
|
1899
|
+
/**
|
|
1900
|
+
* Returns true if the string looks like a bcrypt hash.
|
|
1901
|
+
* Useful for detecting already-hashed values before double-hashing.
|
|
1902
|
+
*
|
|
1903
|
+
* @example
|
|
1904
|
+
* hashService.isBcryptHash("$2b$12$...") // true
|
|
1905
|
+
*/
|
|
1906
|
+
isBcryptHash(value) {
|
|
1907
|
+
return /^\$2[abxy]\$\d{2}\$/.test(value);
|
|
1908
|
+
},
|
|
1909
|
+
// ─── HMAC ─────────────────────────────────────────────────────────────────
|
|
1910
|
+
/**
|
|
1911
|
+
* Creates an HMAC signature for a value using a secret key.
|
|
1912
|
+
* Use for signing data (webhooks, tokens, URLs) — NOT for passwords.
|
|
1913
|
+
*
|
|
1914
|
+
* @example
|
|
1915
|
+
* const sig = hashService.hmac("payload body", process.env.WEBHOOK_SECRET);
|
|
1916
|
+
*/
|
|
1917
|
+
hmac(value, secret, { algorithm = "sha256", encoding = "hex" } = {}) {
|
|
1918
|
+
if (!value)
|
|
1919
|
+
throw new Error("Value is required for HMAC");
|
|
1920
|
+
if (!secret)
|
|
1921
|
+
throw new Error("Secret key is required for HMAC");
|
|
1922
|
+
return import_crypto.default.createHmac(algorithm, secret).update(value).digest(encoding);
|
|
1923
|
+
},
|
|
1924
|
+
/**
|
|
1925
|
+
* Verifies an HMAC signature using a timing-safe comparison.
|
|
1926
|
+
* Always use this instead of `===` to prevent timing attacks.
|
|
1927
|
+
*
|
|
1928
|
+
* @example
|
|
1929
|
+
* const valid = hashService.verifyHmac(payload, secret, incomingSignature);
|
|
1930
|
+
* if (!valid) throw new Error("Invalid webhook signature");
|
|
1931
|
+
*/
|
|
1932
|
+
verifyHmac(value, secret, signature, options) {
|
|
1933
|
+
try {
|
|
1934
|
+
const expected = this.hmac(value, secret, options);
|
|
1935
|
+
return import_crypto.default.timingSafeEqual(
|
|
1936
|
+
Buffer.from(expected),
|
|
1937
|
+
Buffer.from(signature)
|
|
1938
|
+
);
|
|
1939
|
+
} catch {
|
|
1940
|
+
return false;
|
|
1941
|
+
}
|
|
1942
|
+
},
|
|
1943
|
+
// ─── SHA ──────────────────────────────────────────────────────────────────
|
|
1944
|
+
/**
|
|
1945
|
+
* Creates a one-way SHA hash of a value (no secret).
|
|
1946
|
+
* Use for content fingerprinting, cache keys, or deduplication.
|
|
1947
|
+
* NOT suitable for passwords.
|
|
1948
|
+
*
|
|
1949
|
+
* @example
|
|
1950
|
+
* const fingerprint = hashService.sha256("file contents here");
|
|
1951
|
+
*/
|
|
1952
|
+
sha256(value, encoding = "hex") {
|
|
1953
|
+
if (!value)
|
|
1954
|
+
throw new Error("Value is required for sha256");
|
|
1955
|
+
return import_crypto.default.createHash("sha256").update(value).digest(encoding);
|
|
1956
|
+
},
|
|
1957
|
+
sha512(value, encoding = "hex") {
|
|
1958
|
+
if (!value)
|
|
1959
|
+
throw new Error("Value is required for sha512");
|
|
1960
|
+
return import_crypto.default.createHash("sha512").update(value).digest(encoding);
|
|
1961
|
+
},
|
|
1962
|
+
// ─── Random Tokens ────────────────────────────────────────────────────────
|
|
1963
|
+
/**
|
|
1964
|
+
* Generates a cryptographically secure random token.
|
|
1965
|
+
* Use for password reset tokens, email verification, API keys, etc.
|
|
1966
|
+
*
|
|
1967
|
+
* @example
|
|
1968
|
+
* const token = hashService.generateToken(); // 64-char hex string
|
|
1969
|
+
* const token = hashService.generateToken({ bytes: 16, encoding: "base64url" });
|
|
1970
|
+
*/
|
|
1971
|
+
generateToken({ bytes = 32, encoding = "hex" } = {}) {
|
|
1972
|
+
return import_crypto.default.randomBytes(bytes).toString(encoding);
|
|
1973
|
+
},
|
|
1974
|
+
/**
|
|
1975
|
+
* Generates a token and returns both the raw value (to send to user)
|
|
1976
|
+
* and its SHA-256 hash (to store in the database).
|
|
1977
|
+
*
|
|
1978
|
+
* @example
|
|
1979
|
+
* const { token, hashed } = hashService.generateHashedToken();
|
|
1980
|
+
* await db.user.update({ resetToken: hashed, resetTokenExpiry: ... });
|
|
1981
|
+
* await email.send({ to: user.email, token }); // send raw token to user
|
|
1982
|
+
*/
|
|
1983
|
+
generateHashedToken(options) {
|
|
1984
|
+
const token = this.generateToken(options);
|
|
1985
|
+
const hashed = this.sha256(token);
|
|
1986
|
+
return { token, hashed };
|
|
697
1987
|
}
|
|
698
1988
|
};
|
|
699
1989
|
// Annotate the CommonJS export names for ESM import in node:
|
|
@@ -702,30 +1992,68 @@ var jwtService = {
|
|
|
702
1992
|
AuthenticationError,
|
|
703
1993
|
AuthorizationError,
|
|
704
1994
|
BadRequestError,
|
|
1995
|
+
Cron,
|
|
705
1996
|
ExistingError,
|
|
706
1997
|
HTTP_STATUS,
|
|
707
1998
|
HTTP_STATUS_CODE_ERROR,
|
|
708
1999
|
NoContent,
|
|
709
2000
|
NotFoundError,
|
|
710
2001
|
Redis,
|
|
2002
|
+
S3,
|
|
711
2003
|
SQS,
|
|
712
2004
|
ServerError,
|
|
713
2005
|
TokenExpiredError,
|
|
714
2006
|
TokenInvalidError,
|
|
715
2007
|
ValidationError,
|
|
716
2008
|
WinstonLogger,
|
|
2009
|
+
camelCase,
|
|
717
2010
|
capitalize,
|
|
2011
|
+
countOccurrences,
|
|
2012
|
+
debounce,
|
|
718
2013
|
errorHandler,
|
|
719
2014
|
expressErrorMiddleware,
|
|
2015
|
+
flattenObject,
|
|
720
2016
|
formatDate,
|
|
2017
|
+
hashService,
|
|
2018
|
+
isArray,
|
|
2019
|
+
isBlank,
|
|
2020
|
+
isBoolean,
|
|
2021
|
+
isDate,
|
|
2022
|
+
isEmail,
|
|
721
2023
|
isEmpty,
|
|
2024
|
+
isInteger,
|
|
2025
|
+
isJSON,
|
|
2026
|
+
isNegative,
|
|
2027
|
+
isNil,
|
|
2028
|
+
isNumber,
|
|
722
2029
|
isObject,
|
|
723
|
-
|
|
2030
|
+
isPositive,
|
|
2031
|
+
isString,
|
|
2032
|
+
isURL,
|
|
2033
|
+
isUUID,
|
|
2034
|
+
joiMiddleware,
|
|
2035
|
+
joiValidate,
|
|
724
2036
|
jwtService,
|
|
2037
|
+
kebabCase,
|
|
725
2038
|
makeRequest,
|
|
2039
|
+
maskString,
|
|
2040
|
+
memoize,
|
|
2041
|
+
normalizeWhitespace,
|
|
2042
|
+
once,
|
|
726
2043
|
paginate,
|
|
727
2044
|
parseJSON,
|
|
2045
|
+
pascalCase,
|
|
2046
|
+
retry,
|
|
2047
|
+
reverse,
|
|
728
2048
|
sleep,
|
|
2049
|
+
snakeCase,
|
|
2050
|
+
splitWords,
|
|
729
2051
|
stringifyJSON,
|
|
2052
|
+
throttle,
|
|
2053
|
+
timeout,
|
|
2054
|
+
toLowerCase,
|
|
2055
|
+
toUpperCase,
|
|
2056
|
+
truncate,
|
|
2057
|
+
unflattenObject,
|
|
730
2058
|
uuid
|
|
731
2059
|
});
|