nodecore-kit 0.3.0 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +530 -42
- package/dist/index.cjs +1534 -206
- package/dist/index.d.ts +919 -36
- package/dist/index.js +1510 -207
- package/package.json +9 -2
package/dist/index.js
CHANGED
|
@@ -1,26 +1,171 @@
|
|
|
1
1
|
// src/transport/http.ts
|
|
2
2
|
import Axios from "axios";
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
3
|
+
|
|
4
|
+
// src/core/async.ts
|
|
5
|
+
var sleep = (ms) => new Promise((res) => setTimeout(res, ms));
|
|
6
|
+
var retry = async (fn, options = {}) => {
|
|
7
|
+
const { retries = 3, delay = 500, exponential = true, onError } = options;
|
|
8
|
+
const attempt = async (remaining, currentDelay) => {
|
|
9
|
+
try {
|
|
10
|
+
return await fn();
|
|
11
|
+
} catch (err) {
|
|
12
|
+
if (remaining <= 0)
|
|
13
|
+
throw err;
|
|
14
|
+
onError?.(err, retries - remaining + 1);
|
|
15
|
+
await sleep(currentDelay);
|
|
16
|
+
const nextDelay = exponential ? currentDelay * 2 : currentDelay;
|
|
17
|
+
return attempt(remaining - 1, nextDelay);
|
|
18
|
+
}
|
|
19
|
+
};
|
|
20
|
+
return attempt(retries, delay);
|
|
21
|
+
};
|
|
22
|
+
var timeout = (promise, ms) => {
|
|
23
|
+
let timer;
|
|
24
|
+
const race = Promise.race([
|
|
25
|
+
promise,
|
|
26
|
+
new Promise((_, reject) => {
|
|
27
|
+
timer = setTimeout(
|
|
28
|
+
() => reject(new Error(`Operation timed out after ${ms}ms`)),
|
|
29
|
+
ms
|
|
30
|
+
);
|
|
31
|
+
})
|
|
32
|
+
]);
|
|
33
|
+
return race.finally(() => clearTimeout(timer));
|
|
34
|
+
};
|
|
35
|
+
var debounce = (fn, delay) => {
|
|
36
|
+
let timer;
|
|
37
|
+
let lastArgs;
|
|
38
|
+
const debounced = (...args) => {
|
|
39
|
+
lastArgs = args;
|
|
40
|
+
clearTimeout(timer);
|
|
41
|
+
timer = setTimeout(() => {
|
|
42
|
+
timer = void 0;
|
|
43
|
+
fn(...args);
|
|
44
|
+
}, delay);
|
|
45
|
+
return void 0;
|
|
46
|
+
};
|
|
47
|
+
debounced.cancel = () => {
|
|
48
|
+
clearTimeout(timer);
|
|
49
|
+
timer = void 0;
|
|
50
|
+
lastArgs = void 0;
|
|
51
|
+
};
|
|
52
|
+
debounced.flush = (...args) => {
|
|
53
|
+
clearTimeout(timer);
|
|
54
|
+
timer = void 0;
|
|
55
|
+
const callArgs = args.length ? args : lastArgs;
|
|
56
|
+
if (callArgs)
|
|
57
|
+
return fn(...callArgs);
|
|
58
|
+
return void 0;
|
|
59
|
+
};
|
|
60
|
+
return debounced;
|
|
61
|
+
};
|
|
62
|
+
var throttle = (fn, limit, { trailing = false } = {}) => {
|
|
63
|
+
let inThrottle = false;
|
|
64
|
+
let trailingTimer;
|
|
65
|
+
let lastArgs;
|
|
66
|
+
const throttled = (...args) => {
|
|
67
|
+
lastArgs = args;
|
|
68
|
+
if (!inThrottle) {
|
|
69
|
+
const result = fn(...args);
|
|
70
|
+
inThrottle = true;
|
|
71
|
+
setTimeout(() => {
|
|
72
|
+
inThrottle = false;
|
|
73
|
+
if (trailing && lastArgs) {
|
|
74
|
+
fn(...lastArgs);
|
|
75
|
+
lastArgs = void 0;
|
|
76
|
+
}
|
|
77
|
+
}, limit);
|
|
78
|
+
return result;
|
|
79
|
+
}
|
|
80
|
+
return void 0;
|
|
81
|
+
};
|
|
82
|
+
throttled.cancel = () => {
|
|
83
|
+
clearTimeout(trailingTimer);
|
|
84
|
+
inThrottle = false;
|
|
85
|
+
lastArgs = void 0;
|
|
86
|
+
};
|
|
87
|
+
return throttled;
|
|
88
|
+
};
|
|
89
|
+
var memoize = (fn, keyFn) => {
|
|
90
|
+
const cache = /* @__PURE__ */ new Map();
|
|
91
|
+
const memoized = (...args) => {
|
|
92
|
+
const key = keyFn ? keyFn(...args) : JSON.stringify(args);
|
|
93
|
+
if (cache.has(key))
|
|
94
|
+
return cache.get(key);
|
|
95
|
+
const result = fn(...args);
|
|
96
|
+
if (result instanceof Promise) {
|
|
97
|
+
return result.then((val) => {
|
|
98
|
+
cache.set(key, val);
|
|
99
|
+
return val;
|
|
100
|
+
}).catch((err) => {
|
|
101
|
+
cache.delete(key);
|
|
102
|
+
throw err;
|
|
103
|
+
});
|
|
104
|
+
}
|
|
105
|
+
cache.set(key, result);
|
|
106
|
+
return result;
|
|
107
|
+
};
|
|
108
|
+
memoized.cache = cache;
|
|
109
|
+
memoized.clear = () => cache.clear();
|
|
110
|
+
return memoized;
|
|
111
|
+
};
|
|
112
|
+
var once = (fn) => {
|
|
113
|
+
let called = false;
|
|
114
|
+
let result;
|
|
115
|
+
return (...args) => {
|
|
116
|
+
if (!called) {
|
|
117
|
+
called = true;
|
|
118
|
+
result = fn(...args);
|
|
119
|
+
}
|
|
120
|
+
return result;
|
|
121
|
+
};
|
|
122
|
+
};
|
|
123
|
+
|
|
124
|
+
// src/transport/http.ts
|
|
125
|
+
var makeRequest = async (options, _retryCount = 0) => {
|
|
126
|
+
const {
|
|
127
|
+
url,
|
|
128
|
+
method = "GET",
|
|
129
|
+
headers = {},
|
|
130
|
+
token,
|
|
131
|
+
data,
|
|
132
|
+
params,
|
|
133
|
+
timeout: timeout2 = 1e4,
|
|
134
|
+
retries = 0,
|
|
135
|
+
onProgress
|
|
136
|
+
} = options;
|
|
137
|
+
const resolvedHeaders = {
|
|
138
|
+
"X-Requested-With": "XMLHttpRequest",
|
|
139
|
+
...headers,
|
|
140
|
+
...token ? { Authorization: `Bearer ${token}` } : {}
|
|
141
|
+
};
|
|
10
142
|
try {
|
|
11
|
-
|
|
12
|
-
token && (headers["Authorization"] = token);
|
|
13
|
-
const payload = {
|
|
143
|
+
const result = await Axios({
|
|
14
144
|
method,
|
|
15
145
|
url,
|
|
16
|
-
headers
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
146
|
+
headers: resolvedHeaders,
|
|
147
|
+
data,
|
|
148
|
+
params,
|
|
149
|
+
timeout: timeout2,
|
|
150
|
+
...onProgress && {
|
|
151
|
+
onUploadProgress: onProgress,
|
|
152
|
+
onDownloadProgress: onProgress
|
|
153
|
+
}
|
|
154
|
+
});
|
|
21
155
|
return result.data;
|
|
22
156
|
} catch (err) {
|
|
23
|
-
|
|
157
|
+
const shouldRetry = _retryCount < retries && (!err.response || err.response.status >= 500);
|
|
158
|
+
if (shouldRetry) {
|
|
159
|
+
await sleep(2 ** _retryCount * 300);
|
|
160
|
+
return makeRequest(options, _retryCount + 1);
|
|
161
|
+
}
|
|
162
|
+
const error = {
|
|
163
|
+
isHttpError: true,
|
|
164
|
+
message: err.response?.data?.message ?? err.message ?? "Request failed",
|
|
165
|
+
httpStatusCode: err.response?.status ?? null,
|
|
166
|
+
data: err.response?.data ?? null
|
|
167
|
+
};
|
|
168
|
+
throw error;
|
|
24
169
|
}
|
|
25
170
|
};
|
|
26
171
|
|
|
@@ -64,22 +209,12 @@ var AppError = class extends Error {
|
|
|
64
209
|
};
|
|
65
210
|
var ValidationError = class extends AppError {
|
|
66
211
|
constructor(message, meta) {
|
|
67
|
-
super(
|
|
68
|
-
HTTP_STATUS.UNPROCESSABLE_ENTITY,
|
|
69
|
-
message,
|
|
70
|
-
"VALIDATION_ERROR",
|
|
71
|
-
meta
|
|
72
|
-
);
|
|
212
|
+
super(HTTP_STATUS.UNPROCESSABLE_ENTITY, message, "VALIDATION_ERROR", meta);
|
|
73
213
|
}
|
|
74
214
|
};
|
|
75
215
|
var AuthenticationError = class extends AppError {
|
|
76
216
|
constructor(message, meta) {
|
|
77
|
-
super(
|
|
78
|
-
HTTP_STATUS.UNAUTHORIZED,
|
|
79
|
-
message,
|
|
80
|
-
"AUTHENTICATION_ERROR",
|
|
81
|
-
meta
|
|
82
|
-
);
|
|
217
|
+
super(HTTP_STATUS.UNAUTHORIZED, message, "AUTHENTICATION_ERROR", meta);
|
|
83
218
|
}
|
|
84
219
|
};
|
|
85
220
|
var AuthorizationError = class extends AppError {
|
|
@@ -200,20 +335,20 @@ var stringifyJSON = (value) => {
|
|
|
200
335
|
return value;
|
|
201
336
|
}
|
|
202
337
|
};
|
|
203
|
-
var isObject = (val) => val && typeof val === "object" && !Array.isArray(val);
|
|
204
|
-
var sleep = (ms) => new Promise((res) => setTimeout(res, ms));
|
|
205
|
-
var capitalize = (str) => str.charAt(0).toUpperCase() + str.slice(1);
|
|
206
|
-
var isEmpty = (val) => val === null || val === void 0 || typeof val === "object" && Object.keys(val).length === 0 || typeof val === "string" && val.trim() === "";
|
|
207
338
|
|
|
208
339
|
// src/core/uuid.ts
|
|
209
|
-
import { v1 as uuidV1, v4 as uuidV4, validate as
|
|
340
|
+
import { v1 as uuidV1, v4 as uuidV4, validate as uuidValidate } from "uuid";
|
|
210
341
|
var uuid = {
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
342
|
+
/**
|
|
343
|
+
* Converts a UUID string to its optimized binary representation (Buffer).
|
|
344
|
+
* Reorders bytes for better index performance in databases like MySQL.
|
|
345
|
+
* If no UUID is provided, generates a new v1 UUID.
|
|
346
|
+
*/
|
|
347
|
+
toBinary: (value) => {
|
|
348
|
+
if (Buffer.isBuffer(value))
|
|
349
|
+
return value;
|
|
350
|
+
const id = value ?? uuidV1();
|
|
351
|
+
const buf = Buffer.from(id.replace(/-/g, ""), "hex");
|
|
217
352
|
return Buffer.concat([
|
|
218
353
|
buf.subarray(6, 8),
|
|
219
354
|
buf.subarray(4, 6),
|
|
@@ -221,9 +356,12 @@ var uuid = {
|
|
|
221
356
|
buf.subarray(8, 16)
|
|
222
357
|
]);
|
|
223
358
|
},
|
|
359
|
+
/**
|
|
360
|
+
* Converts a binary UUID Buffer back to its string representation.
|
|
361
|
+
*/
|
|
224
362
|
toString: (binary) => {
|
|
225
363
|
if (!binary)
|
|
226
|
-
throw new Error("
|
|
364
|
+
throw new Error("A binary UUID value is required");
|
|
227
365
|
if (typeof binary === "string")
|
|
228
366
|
return binary;
|
|
229
367
|
return [
|
|
@@ -234,90 +372,287 @@ var uuid = {
|
|
|
234
372
|
binary.toString("hex", 10, 16)
|
|
235
373
|
].join("-");
|
|
236
374
|
},
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
375
|
+
/**
|
|
376
|
+
* Generates a new UUID string.
|
|
377
|
+
* Defaults to v4 (random). Pass "v1" for time-based UUIDs.
|
|
378
|
+
*
|
|
379
|
+
* @example
|
|
380
|
+
* uuid.get() // v4 UUID
|
|
381
|
+
* uuid.get("v1") // v1 UUID
|
|
382
|
+
*/
|
|
383
|
+
get: (version = "v4") => {
|
|
384
|
+
return version === "v1" ? uuidV1() : uuidV4();
|
|
243
385
|
},
|
|
244
|
-
|
|
386
|
+
/**
|
|
387
|
+
* Returns true if the given string is a valid UUID.
|
|
388
|
+
*/
|
|
389
|
+
isValid: (value) => uuidValidate(value),
|
|
390
|
+
/** The nil UUID — all zeros. Useful as a default/placeholder. */
|
|
391
|
+
nil: "00000000-0000-0000-0000-000000000000",
|
|
392
|
+
/**
|
|
393
|
+
* Converts specified keys of an object from binary UUIDs to strings.
|
|
394
|
+
* Returns a shallow copy — does NOT mutate the original.
|
|
395
|
+
*
|
|
396
|
+
* @example
|
|
397
|
+
* uuid.manyToString({ id: <Buffer>, name: "foo" }, ["id"])
|
|
398
|
+
* // { id: "xxxxxxxx-...", name: "foo" }
|
|
399
|
+
*/
|
|
245
400
|
manyToString: (data, keys = []) => {
|
|
246
401
|
if (!data)
|
|
247
|
-
return;
|
|
402
|
+
return data;
|
|
403
|
+
const result = { ...data };
|
|
248
404
|
keys.forEach((key) => {
|
|
249
|
-
if (
|
|
250
|
-
|
|
405
|
+
if (result[key] != null)
|
|
406
|
+
result[key] = uuid.toString(result[key]);
|
|
251
407
|
});
|
|
252
|
-
return
|
|
408
|
+
return result;
|
|
253
409
|
},
|
|
410
|
+
/**
|
|
411
|
+
* Converts specified keys of an object from UUID strings to binary Buffers.
|
|
412
|
+
* Returns a shallow copy — does NOT mutate the original.
|
|
413
|
+
*
|
|
414
|
+
* @example
|
|
415
|
+
* uuid.manyToBinary({ id: "xxxxxxxx-...", name: "foo" }, ["id"])
|
|
416
|
+
* // { id: <Buffer>, name: "foo" }
|
|
417
|
+
*/
|
|
254
418
|
manyToBinary: (data, keys = []) => {
|
|
255
419
|
if (!data)
|
|
256
|
-
return;
|
|
420
|
+
return data;
|
|
421
|
+
const result = { ...data };
|
|
257
422
|
keys.forEach((key) => {
|
|
258
|
-
if (
|
|
259
|
-
|
|
423
|
+
if (result[key] != null)
|
|
424
|
+
result[key] = uuid.toBinary(result[key]);
|
|
260
425
|
});
|
|
261
|
-
return
|
|
426
|
+
return result;
|
|
427
|
+
}
|
|
428
|
+
};
|
|
429
|
+
|
|
430
|
+
// src/core/object.ts
|
|
431
|
+
var flattenObject = (obj, { separator = ".", prefix = "" } = {}) => {
|
|
432
|
+
if (!obj || typeof obj !== "object")
|
|
433
|
+
return {};
|
|
434
|
+
const res = {};
|
|
435
|
+
const isPlainObject = (val) => val !== null && typeof val === "object" && !Array.isArray(val) && !(val instanceof Date) && !(val instanceof RegExp);
|
|
436
|
+
for (const key in obj) {
|
|
437
|
+
if (!Object.prototype.hasOwnProperty.call(obj, key))
|
|
438
|
+
continue;
|
|
439
|
+
const newKey = prefix ? `${prefix}${separator}${key}` : key;
|
|
440
|
+
if (isPlainObject(obj[key])) {
|
|
441
|
+
Object.assign(res, flattenObject(obj[key], { separator, prefix: newKey }));
|
|
442
|
+
} else {
|
|
443
|
+
res[newKey] = obj[key];
|
|
444
|
+
}
|
|
262
445
|
}
|
|
446
|
+
return res;
|
|
447
|
+
};
|
|
448
|
+
var unflattenObject = (obj, separator = ".") => {
|
|
449
|
+
if (!obj || typeof obj !== "object")
|
|
450
|
+
return {};
|
|
451
|
+
const result = {};
|
|
452
|
+
for (const key in obj) {
|
|
453
|
+
if (!Object.prototype.hasOwnProperty.call(obj, key))
|
|
454
|
+
continue;
|
|
455
|
+
const keys = key.split(separator);
|
|
456
|
+
keys.reduce((acc, part, index) => {
|
|
457
|
+
if (index === keys.length - 1) {
|
|
458
|
+
acc[part] = obj[key];
|
|
459
|
+
return acc;
|
|
460
|
+
}
|
|
461
|
+
acc[part] = acc[part] && typeof acc[part] === "object" ? acc[part] : {};
|
|
462
|
+
return acc[part];
|
|
463
|
+
}, result);
|
|
464
|
+
}
|
|
465
|
+
return result;
|
|
466
|
+
};
|
|
467
|
+
|
|
468
|
+
// src/core/string.ts
|
|
469
|
+
var splitWords = (str) => str.replace(/\W+/g, " ").split(/ |\B(?=[A-Z])/).map((w) => w.toLowerCase()).filter(Boolean);
|
|
470
|
+
var capitalize = (str) => {
|
|
471
|
+
if (!str)
|
|
472
|
+
return "";
|
|
473
|
+
return str.charAt(0).toUpperCase() + str.slice(1);
|
|
474
|
+
};
|
|
475
|
+
var toUpperCase = (str) => str.toUpperCase();
|
|
476
|
+
var toLowerCase = (str) => str.toLowerCase();
|
|
477
|
+
var camelCase = (str) => {
|
|
478
|
+
if (!str)
|
|
479
|
+
return "";
|
|
480
|
+
return str.trim().toLowerCase().replace(/[-_\s]+(.)?/g, (_, c) => c ? c.toUpperCase() : "");
|
|
481
|
+
};
|
|
482
|
+
var pascalCase = (str) => {
|
|
483
|
+
if (!str)
|
|
484
|
+
return "";
|
|
485
|
+
return camelCase(str).replace(/^(.)/, (c) => c.toUpperCase());
|
|
486
|
+
};
|
|
487
|
+
var snakeCase = (str) => {
|
|
488
|
+
if (!str)
|
|
489
|
+
return "";
|
|
490
|
+
return splitWords(str).join("_");
|
|
491
|
+
};
|
|
492
|
+
var kebabCase = (str) => {
|
|
493
|
+
if (!str)
|
|
494
|
+
return "";
|
|
495
|
+
return splitWords(str).join("-");
|
|
496
|
+
};
|
|
497
|
+
var truncate = (str, length = 50, suffix = "...") => {
|
|
498
|
+
if (!str)
|
|
499
|
+
return "";
|
|
500
|
+
if (str.length <= length)
|
|
501
|
+
return str;
|
|
502
|
+
return str.slice(0, length - suffix.length).trimEnd() + suffix;
|
|
503
|
+
};
|
|
504
|
+
var maskString = (str, visible = 4) => {
|
|
505
|
+
if (!str)
|
|
506
|
+
return "";
|
|
507
|
+
const visibleCount = Math.min(visible, str.length);
|
|
508
|
+
const maskedLength = str.length - visibleCount;
|
|
509
|
+
return "*".repeat(maskedLength) + str.slice(maskedLength);
|
|
510
|
+
};
|
|
511
|
+
var isBlank = (str) => !str || str.trim().length === 0;
|
|
512
|
+
var reverse = (str) => {
|
|
513
|
+
if (!str)
|
|
514
|
+
return "";
|
|
515
|
+
return str.split("").reverse().join("");
|
|
516
|
+
};
|
|
517
|
+
var countOccurrences = (str, substr) => {
|
|
518
|
+
if (!str || !substr)
|
|
519
|
+
return 0;
|
|
520
|
+
return str.split(substr).length - 1;
|
|
521
|
+
};
|
|
522
|
+
var normalizeWhitespace = (str) => {
|
|
523
|
+
if (!str)
|
|
524
|
+
return "";
|
|
525
|
+
return str.trim().replace(/\s+/g, " ");
|
|
526
|
+
};
|
|
527
|
+
|
|
528
|
+
// src/core/validation.ts
|
|
529
|
+
var isObject = (val) => val !== null && typeof val === "object" && !Array.isArray(val);
|
|
530
|
+
var isEmail = (value) => /^[^\s@]+@[^\s@]+\.[^\s@]+$/.test(value);
|
|
531
|
+
var isUUID = (value) => /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test(value);
|
|
532
|
+
var isNumber = (value) => typeof value === "number" && isFinite(value);
|
|
533
|
+
var isJSON = (value) => {
|
|
534
|
+
if (!value || typeof value !== "string")
|
|
535
|
+
return false;
|
|
536
|
+
try {
|
|
537
|
+
JSON.parse(value);
|
|
538
|
+
return true;
|
|
539
|
+
} catch {
|
|
540
|
+
return false;
|
|
541
|
+
}
|
|
542
|
+
};
|
|
543
|
+
var isDate = (value) => value instanceof Date && !isNaN(value.getTime());
|
|
544
|
+
var isURL = (value) => {
|
|
545
|
+
try {
|
|
546
|
+
const url = new URL(value);
|
|
547
|
+
return url.protocol === "http:" || url.protocol === "https:";
|
|
548
|
+
} catch {
|
|
549
|
+
return false;
|
|
550
|
+
}
|
|
551
|
+
};
|
|
552
|
+
var isBoolean = (value) => typeof value === "boolean";
|
|
553
|
+
var isString = (value) => typeof value === "string";
|
|
554
|
+
var isArray = (value) => Array.isArray(value);
|
|
555
|
+
var isInteger = (value) => typeof value === "number" && Number.isInteger(value);
|
|
556
|
+
var isPositive = (value) => isNumber(value) && value > 0;
|
|
557
|
+
var isNegative = (value) => isNumber(value) && value < 0;
|
|
558
|
+
var isNil = (value) => value === null || value === void 0;
|
|
559
|
+
var isEmpty = (val) => {
|
|
560
|
+
if (isNil(val))
|
|
561
|
+
return true;
|
|
562
|
+
if (typeof val === "string")
|
|
563
|
+
return val.trim().length === 0;
|
|
564
|
+
if (Array.isArray(val))
|
|
565
|
+
return val.length === 0;
|
|
566
|
+
if (isObject(val))
|
|
567
|
+
return Object.keys(val).length === 0;
|
|
568
|
+
return false;
|
|
263
569
|
};
|
|
264
570
|
|
|
265
571
|
// src/transport/express/joiValidator.ts
|
|
266
|
-
var
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
572
|
+
var DEFAULT_OPTIONS = {
|
|
573
|
+
abortEarly: false,
|
|
574
|
+
allowUnknown: false,
|
|
575
|
+
stripUnknown: true
|
|
576
|
+
};
|
|
577
|
+
var validateField = (schema, data, options = DEFAULT_OPTIONS) => {
|
|
578
|
+
const mergedOptions = { ...DEFAULT_OPTIONS, ...options };
|
|
579
|
+
const { error, value } = schema.validate(data, mergedOptions);
|
|
580
|
+
if (error) {
|
|
581
|
+
const message = error.details.map((d) => d.message).join("; ");
|
|
582
|
+
throw new ValidationError(message);
|
|
270
583
|
}
|
|
271
|
-
return
|
|
584
|
+
return value;
|
|
272
585
|
};
|
|
273
|
-
|
|
274
|
-
if (!
|
|
275
|
-
throw new ValidationError(
|
|
276
|
-
"Kindly supply validation schema to joiValidator"
|
|
277
|
-
);
|
|
278
|
-
if (!isMiddleware) {
|
|
279
|
-
return validate(constraint.schema, constraint.data, constraint.option);
|
|
586
|
+
var joiMiddleware = (constraints) => {
|
|
587
|
+
if (!constraints || !Object.keys(constraints).length) {
|
|
588
|
+
throw new ValidationError("joiMiddleware requires at least one constraint");
|
|
280
589
|
}
|
|
281
590
|
return async (req, res, next) => {
|
|
282
591
|
try {
|
|
283
|
-
if (
|
|
284
|
-
req.body =
|
|
285
|
-
|
|
592
|
+
if (constraints.body) {
|
|
593
|
+
req.body = validateField(
|
|
594
|
+
constraints.body.schema,
|
|
286
595
|
req.body,
|
|
287
|
-
|
|
596
|
+
constraints.body.options
|
|
288
597
|
);
|
|
289
598
|
}
|
|
290
|
-
if (
|
|
291
|
-
req.params =
|
|
292
|
-
|
|
599
|
+
if (constraints.params) {
|
|
600
|
+
req.params = validateField(
|
|
601
|
+
constraints.params.schema,
|
|
293
602
|
req.params,
|
|
294
|
-
|
|
603
|
+
constraints.params.options
|
|
295
604
|
);
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
605
|
+
}
|
|
606
|
+
if (constraints.query) {
|
|
607
|
+
req.query = validateField(
|
|
608
|
+
constraints.query.schema,
|
|
299
609
|
req.query,
|
|
300
|
-
|
|
610
|
+
constraints.query.options
|
|
301
611
|
);
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
612
|
+
}
|
|
613
|
+
if (constraints.headers) {
|
|
614
|
+
req.headers = validateField(
|
|
615
|
+
constraints.headers.schema,
|
|
305
616
|
req.headers,
|
|
306
|
-
|
|
617
|
+
constraints.headers.options
|
|
307
618
|
);
|
|
308
|
-
|
|
619
|
+
}
|
|
620
|
+
if (constraints.files) {
|
|
621
|
+
req.files = validateField(
|
|
622
|
+
constraints.files.schema,
|
|
623
|
+
req.files,
|
|
624
|
+
constraints.files.options
|
|
625
|
+
);
|
|
626
|
+
}
|
|
627
|
+
next();
|
|
309
628
|
} catch (err) {
|
|
310
629
|
next(err);
|
|
311
630
|
}
|
|
312
631
|
};
|
|
313
|
-
}
|
|
632
|
+
};
|
|
633
|
+
var joiValidate = ({
|
|
634
|
+
schema,
|
|
635
|
+
data,
|
|
636
|
+
options
|
|
637
|
+
}) => {
|
|
638
|
+
if (!schema)
|
|
639
|
+
throw new ValidationError("joiValidate requires a schema");
|
|
640
|
+
return validateField(schema, data, options);
|
|
641
|
+
};
|
|
314
642
|
|
|
315
643
|
// src/adapters/redis.ts
|
|
316
644
|
import RedisClient from "ioredis";
|
|
645
|
+
var defaultLogger = {
|
|
646
|
+
info: (msg, meta) => console.info(msg, meta),
|
|
647
|
+
error: (msg, meta) => console.error(msg, meta),
|
|
648
|
+
warn: (msg, meta) => console.warn(msg, meta),
|
|
649
|
+
debug: (msg, meta) => console.debug(msg, meta)
|
|
650
|
+
};
|
|
317
651
|
var Redis = class {
|
|
318
|
-
constructor(url, options = {}) {
|
|
652
|
+
constructor(url, options = {}, logger) {
|
|
319
653
|
if (!url)
|
|
320
654
|
throw new ValidationError("Redis connection URL is required");
|
|
655
|
+
this.logger = logger ?? defaultLogger;
|
|
321
656
|
this.client = new RedisClient(url, {
|
|
322
657
|
maxRetriesPerRequest: 3,
|
|
323
658
|
enableReadyCheck: true,
|
|
@@ -326,22 +661,13 @@ var Redis = class {
|
|
|
326
661
|
});
|
|
327
662
|
this.registerListeners();
|
|
328
663
|
}
|
|
664
|
+
// ─── Lifecycle ────────────────────────────────────────────────────────────
|
|
329
665
|
registerListeners() {
|
|
330
|
-
this.client.on("connect", () =>
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
this.client.on("
|
|
334
|
-
|
|
335
|
-
});
|
|
336
|
-
this.client.on("error", (err) => {
|
|
337
|
-
console.error("\u{1F534} Redis error:", err);
|
|
338
|
-
});
|
|
339
|
-
this.client.on("close", () => {
|
|
340
|
-
console.warn("\u{1F7E0} Redis connection closed");
|
|
341
|
-
});
|
|
342
|
-
this.client.on("reconnecting", () => {
|
|
343
|
-
console.warn("\u{1F7E1} Redis reconnecting...");
|
|
344
|
-
});
|
|
666
|
+
this.client.on("connect", () => this.logger.info("Redis connected"));
|
|
667
|
+
this.client.on("ready", () => this.logger.info("Redis ready"));
|
|
668
|
+
this.client.on("close", () => this.logger.warn("Redis connection closed"));
|
|
669
|
+
this.client.on("reconnecting", () => this.logger.warn("Redis reconnecting..."));
|
|
670
|
+
this.client.on("error", (err) => this.logger.error("Redis error", { err }));
|
|
345
671
|
}
|
|
346
672
|
async start() {
|
|
347
673
|
try {
|
|
@@ -354,19 +680,22 @@ var Redis = class {
|
|
|
354
680
|
}
|
|
355
681
|
async disconnect() {
|
|
356
682
|
try {
|
|
357
|
-
if (this.client.status !== "end")
|
|
683
|
+
if (this.client.status !== "end")
|
|
358
684
|
await this.client.quit();
|
|
359
|
-
}
|
|
360
685
|
} catch {
|
|
361
686
|
await this.client.disconnect();
|
|
362
687
|
}
|
|
363
688
|
}
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
689
|
+
// ─── Key Helpers ──────────────────────────────────────────────────────────
|
|
690
|
+
validateKey(key) {
|
|
691
|
+
if (!key || typeof key !== "string") {
|
|
692
|
+
throw new ValidationError("Redis key must be a non-empty string");
|
|
367
693
|
}
|
|
368
|
-
return this.client.keys(pattern);
|
|
369
694
|
}
|
|
695
|
+
buildKey(...parts) {
|
|
696
|
+
return parts.join(":");
|
|
697
|
+
}
|
|
698
|
+
// ─── Serialization ────────────────────────────────────────────────────────
|
|
370
699
|
serialize(data) {
|
|
371
700
|
if (typeof data === "string")
|
|
372
701
|
return data;
|
|
@@ -379,38 +708,25 @@ var Redis = class {
|
|
|
379
708
|
return data;
|
|
380
709
|
return parseJSON(data);
|
|
381
710
|
}
|
|
711
|
+
// ─── Core Operations ─────────────────────────────────────────────────────
|
|
382
712
|
async set(key, data) {
|
|
383
|
-
|
|
384
|
-
throw new ValidationError("Redis key must be a string");
|
|
385
|
-
}
|
|
713
|
+
this.validateKey(key);
|
|
386
714
|
return this.client.set(key, this.serialize(data));
|
|
387
715
|
}
|
|
388
716
|
async setEx(key, data, duration) {
|
|
389
|
-
|
|
390
|
-
throw new ValidationError("Redis key must be a string");
|
|
391
|
-
}
|
|
717
|
+
this.validateKey(key);
|
|
392
718
|
const ttl = this.parseDuration(duration);
|
|
393
719
|
return this.client.setex(key, ttl, this.serialize(data));
|
|
394
720
|
}
|
|
395
721
|
async get(key, parse = true) {
|
|
396
|
-
|
|
397
|
-
throw new ValidationError("Redis key must be a string");
|
|
398
|
-
}
|
|
722
|
+
this.validateKey(key);
|
|
399
723
|
const data = await this.client.get(key);
|
|
400
724
|
return this.deserialize(data, parse);
|
|
401
725
|
}
|
|
402
726
|
async delete(key) {
|
|
403
|
-
|
|
404
|
-
throw new ValidationError("Redis key must be a string");
|
|
405
|
-
}
|
|
727
|
+
this.validateKey(key);
|
|
406
728
|
return Boolean(await this.client.del(key));
|
|
407
729
|
}
|
|
408
|
-
async deleteAll(prefix) {
|
|
409
|
-
const keys = await this.keys(prefix);
|
|
410
|
-
if (!keys.length)
|
|
411
|
-
return 0;
|
|
412
|
-
return this.client.del(...keys);
|
|
413
|
-
}
|
|
414
730
|
async exists(key) {
|
|
415
731
|
return Boolean(await this.client.exists(key));
|
|
416
732
|
}
|
|
@@ -421,54 +737,199 @@ var Redis = class {
|
|
|
421
737
|
const ttl = this.parseDuration(duration);
|
|
422
738
|
return Boolean(await this.client.expire(key, ttl));
|
|
423
739
|
}
|
|
424
|
-
|
|
740
|
+
// ─── Increment / Decrement ────────────────────────────────────────────────
|
|
741
|
+
/**
|
|
742
|
+
* Atomically increments a counter. Creates it at 1 if it doesn't exist.
|
|
743
|
+
* Optionally sets a TTL on first creation.
|
|
744
|
+
*
|
|
745
|
+
* @example
|
|
746
|
+
* await redis.increment("rate:user:123"); // 1, 2, 3...
|
|
747
|
+
* await redis.increment("rate:user:123", "1 hour"); // resets TTL each time
|
|
748
|
+
*/
|
|
749
|
+
async increment(key, ttl) {
|
|
750
|
+
this.validateKey(key);
|
|
751
|
+
const value = await this.client.incr(key);
|
|
752
|
+
if (ttl && value === 1)
|
|
753
|
+
await this.expire(key, ttl);
|
|
754
|
+
return value;
|
|
755
|
+
}
|
|
756
|
+
/**
|
|
757
|
+
* Atomically decrements a counter.
|
|
758
|
+
*/
|
|
759
|
+
async decrement(key) {
|
|
760
|
+
this.validateKey(key);
|
|
761
|
+
return this.client.decr(key);
|
|
762
|
+
}
|
|
763
|
+
// ─── Hash Operations ──────────────────────────────────────────────────────
|
|
764
|
+
/**
|
|
765
|
+
* Sets one or more fields on a Redis hash.
|
|
766
|
+
*
|
|
767
|
+
* @example
|
|
768
|
+
* await redis.hset("user:1", { name: "Alice", role: "admin" });
|
|
769
|
+
*/
|
|
770
|
+
async hset(key, data) {
|
|
771
|
+
this.validateKey(key);
|
|
772
|
+
const serialized = Object.fromEntries(
|
|
773
|
+
Object.entries(data).map(([k, v]) => [k, this.serialize(v)])
|
|
774
|
+
);
|
|
775
|
+
return this.client.hset(key, serialized);
|
|
776
|
+
}
|
|
777
|
+
/**
|
|
778
|
+
* Gets a single field from a Redis hash.
|
|
779
|
+
*/
|
|
780
|
+
async hget(key, field) {
|
|
781
|
+
this.validateKey(key);
|
|
782
|
+
const data = await this.client.hget(key, field);
|
|
783
|
+
return this.deserialize(data);
|
|
784
|
+
}
|
|
785
|
+
/**
|
|
786
|
+
* Gets all fields from a Redis hash as a typed object.
|
|
787
|
+
*/
|
|
788
|
+
async hgetAll(key) {
|
|
789
|
+
this.validateKey(key);
|
|
790
|
+
const data = await this.client.hgetall(key);
|
|
791
|
+
if (!data || Object.keys(data).length === 0)
|
|
792
|
+
return null;
|
|
793
|
+
return Object.fromEntries(
|
|
794
|
+
Object.entries(data).map(([k, v]) => [k, this.deserialize(v)])
|
|
795
|
+
);
|
|
796
|
+
}
|
|
797
|
+
/**
|
|
798
|
+
* Deletes one or more fields from a Redis hash.
|
|
799
|
+
*/
|
|
800
|
+
async hdel(key, ...fields) {
|
|
801
|
+
this.validateKey(key);
|
|
802
|
+
return this.client.hdel(key, ...fields);
|
|
803
|
+
}
|
|
804
|
+
// ─── Scan-based Key Operations ────────────────────────────────────────────
|
|
805
|
+
/**
|
|
806
|
+
* Safely scans for keys matching a pattern using SCAN (non-blocking).
|
|
807
|
+
* Prefer this over KEYS in production — KEYS blocks the event loop.
|
|
808
|
+
*
|
|
809
|
+
* @example
|
|
810
|
+
* await redis.scan("user:*") // ["user:1", "user:2", ...]
|
|
811
|
+
*/
|
|
812
|
+
async scan(pattern) {
|
|
813
|
+
if (!pattern || typeof pattern !== "string") {
|
|
814
|
+
throw new ValidationError("Redis scan pattern must be a string");
|
|
815
|
+
}
|
|
816
|
+
const keys = [];
|
|
817
|
+
let cursor = "0";
|
|
818
|
+
do {
|
|
819
|
+
const [nextCursor, batch] = await this.client.scan(
|
|
820
|
+
cursor,
|
|
821
|
+
"MATCH",
|
|
822
|
+
pattern,
|
|
823
|
+
"COUNT",
|
|
824
|
+
100
|
|
825
|
+
);
|
|
826
|
+
cursor = nextCursor;
|
|
827
|
+
keys.push(...batch);
|
|
828
|
+
} while (cursor !== "0");
|
|
829
|
+
return keys;
|
|
830
|
+
}
|
|
831
|
+
/**
|
|
832
|
+
* Deletes all keys matching a pattern using SCAN + batched DEL.
|
|
833
|
+
* Safe for large keyspaces.
|
|
834
|
+
*
|
|
835
|
+
* @example
|
|
836
|
+
* await redis.deleteByPattern("session:*") // clears all sessions
|
|
837
|
+
*/
|
|
838
|
+
async deleteByPattern(pattern) {
|
|
839
|
+
const keys = await this.scan(pattern);
|
|
840
|
+
if (!keys.length)
|
|
841
|
+
return 0;
|
|
842
|
+
const batchSize = 100;
|
|
843
|
+
let deleted = 0;
|
|
844
|
+
for (let i = 0; i < keys.length; i += batchSize) {
|
|
845
|
+
const batch = keys.slice(i, i + batchSize);
|
|
846
|
+
deleted += await this.client.del(...batch);
|
|
847
|
+
}
|
|
848
|
+
return deleted;
|
|
849
|
+
}
|
|
850
|
+
/**
|
|
851
|
+
* @deprecated Use `scan()` instead — KEYS blocks the Redis event loop.
|
|
852
|
+
*/
|
|
853
|
+
async keys(pattern) {
|
|
854
|
+
if (!pattern || typeof pattern !== "string") {
|
|
855
|
+
throw new ValidationError("Redis key pattern must be a string");
|
|
856
|
+
}
|
|
857
|
+
this.logger.warn("redis.keys() uses KEYS command \u2014 use redis.scan() in production");
|
|
858
|
+
return this.client.keys(pattern);
|
|
859
|
+
}
|
|
860
|
+
/**
|
|
861
|
+
* @deprecated Use `deleteByPattern()` instead.
|
|
862
|
+
*/
|
|
863
|
+
async deleteAll(prefix) {
|
|
864
|
+
this.logger.warn("redis.deleteAll() uses KEYS \u2014 use redis.deleteByPattern() in production");
|
|
865
|
+
const keys = await this.keys(prefix);
|
|
866
|
+
if (!keys.length)
|
|
867
|
+
return 0;
|
|
868
|
+
return this.client.del(...keys);
|
|
869
|
+
}
|
|
870
|
+
/**
|
|
871
|
+
* Flushes the current database. Intended for testing only.
|
|
872
|
+
* Throws in production unless `force: true` is passed.
|
|
873
|
+
*/
|
|
874
|
+
async flush(force = false) {
|
|
875
|
+
if (process.env.NODE_ENV === "production" && !force) {
|
|
876
|
+
throw new ServerError("redis.flush() is disabled in production. Pass force=true to override.");
|
|
877
|
+
}
|
|
425
878
|
await this.client.flushdb();
|
|
879
|
+
this.logger.warn("Redis database flushed", { env: process.env.NODE_ENV });
|
|
880
|
+
}
|
|
881
|
+
// ─── Auth Cache Helpers ───────────────────────────────────────────────────
|
|
882
|
+
authKey(id) {
|
|
883
|
+
return this.buildKey("auth", id, "token");
|
|
884
|
+
}
|
|
885
|
+
tokenKey(ref) {
|
|
886
|
+
return this.buildKey("auth", "token", ref);
|
|
426
887
|
}
|
|
427
|
-
// ───────────────────────────────
|
|
428
|
-
// Auth Cache Helpers
|
|
429
|
-
// ───────────────────────────────
|
|
430
888
|
async getCachedUser(id, throwError = true) {
|
|
431
|
-
const
|
|
432
|
-
const user = await this.get(userToken);
|
|
889
|
+
const user = await this.get(this.authKey(id));
|
|
433
890
|
if (!user && throwError) {
|
|
434
|
-
throw new AuthenticationError("
|
|
891
|
+
throw new AuthenticationError("Session not found, please log in again");
|
|
435
892
|
}
|
|
436
893
|
return user;
|
|
437
894
|
}
|
|
438
895
|
async cacheUser(user, ttl = "1 day") {
|
|
439
896
|
if (!user?.id || !user?.tokenRef) {
|
|
440
|
-
throw new ValidationError("
|
|
897
|
+
throw new ValidationError("User object must have `id` and `tokenRef` fields");
|
|
441
898
|
}
|
|
442
899
|
await Promise.all([
|
|
443
|
-
this.setEx(user.tokenRef, user, ttl),
|
|
444
|
-
this.setEx(
|
|
900
|
+
this.setEx(this.tokenKey(user.tokenRef), user, ttl),
|
|
901
|
+
this.setEx(this.authKey(user.id), user, ttl)
|
|
445
902
|
]);
|
|
446
903
|
}
|
|
904
|
+
/**
|
|
905
|
+
* Atomically updates an array field on a cached user.
|
|
906
|
+
* Operates on a fresh copy to avoid mutating the cached object before re-save.
|
|
907
|
+
*/
|
|
447
908
|
async updateAuthData(userId, key, value, action = "ADD") {
|
|
448
909
|
const user = await this.getCachedUser(userId, false);
|
|
449
910
|
if (!user)
|
|
450
911
|
return null;
|
|
451
912
|
if (!Array.isArray(user[key]))
|
|
452
913
|
return user;
|
|
453
|
-
|
|
454
|
-
user
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
await this.cacheUser(user);
|
|
460
|
-
return user;
|
|
914
|
+
const updated = {
|
|
915
|
+
...user,
|
|
916
|
+
[key]: action === "ADD" ? user[key].includes(value) ? user[key] : [...user[key], value] : user[key].filter((v) => v !== value)
|
|
917
|
+
};
|
|
918
|
+
await this.cacheUser(updated);
|
|
919
|
+
return updated;
|
|
461
920
|
}
|
|
462
|
-
//
|
|
463
|
-
// Helpers
|
|
464
|
-
// ───────────────────────────────
|
|
921
|
+
// ─── Duration Parser ──────────────────────────────────────────────────────
|
|
465
922
|
parseDuration(duration) {
|
|
466
923
|
if (typeof duration === "number")
|
|
467
924
|
return duration;
|
|
468
|
-
const
|
|
925
|
+
const parts = duration.trim().split(/\s+/);
|
|
926
|
+
if (parts.length !== 2) {
|
|
927
|
+
throw new ValidationError(`Invalid duration format: "${duration}". Expected e.g. "1 hour"`);
|
|
928
|
+
}
|
|
929
|
+
const [valueStr, unit] = parts;
|
|
469
930
|
const value = Number(valueStr);
|
|
470
|
-
if (Number.isNaN(value)) {
|
|
471
|
-
throw new ValidationError(`
|
|
931
|
+
if (Number.isNaN(value) || value <= 0) {
|
|
932
|
+
throw new ValidationError(`Duration value must be a positive number, got: "${valueStr}"`);
|
|
472
933
|
}
|
|
473
934
|
switch (unit) {
|
|
474
935
|
case "days":
|
|
@@ -484,34 +945,63 @@ var Redis = class {
|
|
|
484
945
|
case "second":
|
|
485
946
|
return value;
|
|
486
947
|
default:
|
|
487
|
-
throw new ValidationError(`Invalid duration unit: ${unit}`);
|
|
948
|
+
throw new ValidationError(`Invalid duration unit: "${unit}". Use seconds, minutes, hours, or days`);
|
|
488
949
|
}
|
|
489
950
|
}
|
|
490
951
|
};
|
|
491
952
|
|
|
492
953
|
// src/adapters/sqs.ts
|
|
493
|
-
import
|
|
954
|
+
import {
|
|
955
|
+
SQSClient,
|
|
956
|
+
SendMessageCommand,
|
|
957
|
+
ReceiveMessageCommand,
|
|
958
|
+
DeleteMessageCommand
|
|
959
|
+
} from "@aws-sdk/client-sqs";
|
|
960
|
+
var defaultLogger2 = {
|
|
961
|
+
info: (msg, meta) => console.info(msg, meta),
|
|
962
|
+
error: (msg, meta) => console.error(msg, meta),
|
|
963
|
+
warn: (msg, meta) => console.warn(msg, meta),
|
|
964
|
+
debug: (msg, meta) => console.debug(msg, meta)
|
|
965
|
+
};
|
|
494
966
|
var SQS = class {
|
|
495
967
|
constructor(config, logger) {
|
|
496
|
-
this.
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
warn: (msg, meta) => console.warn(msg, meta),
|
|
500
|
-
debug: (msg, meta) => console.debug(msg, meta)
|
|
501
|
-
};
|
|
502
|
-
this.client = new AWS.SQS({
|
|
968
|
+
this.polling = false;
|
|
969
|
+
this.logger = logger ?? defaultLogger2;
|
|
970
|
+
this.client = new SQSClient({
|
|
503
971
|
region: config.region,
|
|
504
|
-
|
|
505
|
-
|
|
972
|
+
credentials: {
|
|
973
|
+
accessKeyId: config.accessKeyId,
|
|
974
|
+
secretAccessKey: config.secretAccessKey
|
|
975
|
+
}
|
|
506
976
|
});
|
|
507
977
|
this.logger.info("SQS client initialized", { region: config.region });
|
|
508
978
|
}
|
|
509
|
-
|
|
979
|
+
// ─── Enqueue ───────────────────────────────────────────────────────────────
|
|
980
|
+
/**
|
|
981
|
+
* Sends a message to an SQS queue.
|
|
982
|
+
* Automatically serializes objects to JSON.
|
|
983
|
+
*
|
|
984
|
+
* @example
|
|
985
|
+
* await sqs.enqueue({ queueUrl, message: { event: "user.created", userId: 1 } });
|
|
986
|
+
*/
|
|
987
|
+
async enqueue({
|
|
988
|
+
queueUrl,
|
|
989
|
+
message,
|
|
990
|
+
messageGroupId,
|
|
991
|
+
messageDeduplicationId,
|
|
992
|
+
delaySeconds,
|
|
993
|
+
attributes
|
|
994
|
+
}) {
|
|
510
995
|
try {
|
|
511
|
-
|
|
996
|
+
const input = {
|
|
512
997
|
QueueUrl: queueUrl,
|
|
513
|
-
MessageBody: typeof message === "string" ? message : JSON.stringify(message)
|
|
514
|
-
|
|
998
|
+
MessageBody: typeof message === "string" ? message : JSON.stringify(message),
|
|
999
|
+
...messageGroupId && { MessageGroupId: messageGroupId },
|
|
1000
|
+
...messageDeduplicationId && { MessageDeduplicationId: messageDeduplicationId },
|
|
1001
|
+
...delaySeconds !== void 0 && { DelaySeconds: delaySeconds },
|
|
1002
|
+
...attributes && { MessageAttributes: attributes }
|
|
1003
|
+
};
|
|
1004
|
+
await this.client.send(new SendMessageCommand(input));
|
|
515
1005
|
this.logger.info("Message enqueued", { queueUrl });
|
|
516
1006
|
return true;
|
|
517
1007
|
} catch (err) {
|
|
@@ -519,73 +1009,695 @@ var SQS = class {
|
|
|
519
1009
|
throw new ServerError("Failed to enqueue SQS message", { cause: err });
|
|
520
1010
|
}
|
|
521
1011
|
}
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
1012
|
+
// ─── Dequeue ───────────────────────────────────────────────────────────────
|
|
1013
|
+
/**
|
|
1014
|
+
* Starts long-polling a queue and passes each message to `consumerFunction`.
|
|
1015
|
+
* Runs until `stop()` is called.
|
|
1016
|
+
*
|
|
1017
|
+
* Delete behaviour:
|
|
1018
|
+
* - On success → always deletes
|
|
1019
|
+
* - On failure + DLQ → moves to DLQ, then deletes
|
|
1020
|
+
* - On failure + useRedrivePolicy → does NOT delete (lets SQS retry)
|
|
1021
|
+
* - On failure + no DLQ + no redrive → logs and deletes to avoid poison pill loop
|
|
1022
|
+
*/
|
|
1023
|
+
async dequeue({
|
|
1024
|
+
queueUrl,
|
|
1025
|
+
consumerFunction,
|
|
1026
|
+
dlqUrl,
|
|
1027
|
+
maxNumberOfMessages = 10,
|
|
1028
|
+
waitTimeSeconds = 20,
|
|
1029
|
+
visibilityTimeout,
|
|
1030
|
+
useRedrivePolicy = false
|
|
1031
|
+
}) {
|
|
1032
|
+
this.polling = true;
|
|
1033
|
+
let consecutiveErrors = 0;
|
|
1034
|
+
this.logger.info("SQS polling started", { queueUrl });
|
|
1035
|
+
while (this.polling) {
|
|
531
1036
|
try {
|
|
532
|
-
const { Messages } = await this.client.
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
}
|
|
556
|
-
}
|
|
1037
|
+
const { Messages } = await this.client.send(
|
|
1038
|
+
new ReceiveMessageCommand({
|
|
1039
|
+
QueueUrl: queueUrl,
|
|
1040
|
+
MaxNumberOfMessages: maxNumberOfMessages,
|
|
1041
|
+
WaitTimeSeconds: waitTimeSeconds,
|
|
1042
|
+
...visibilityTimeout && { VisibilityTimeout: visibilityTimeout }
|
|
1043
|
+
})
|
|
1044
|
+
);
|
|
1045
|
+
consecutiveErrors = 0;
|
|
1046
|
+
if (!Messages?.length)
|
|
1047
|
+
continue;
|
|
1048
|
+
await Promise.allSettled(
|
|
1049
|
+
Messages.map(
|
|
1050
|
+
({ Body, ReceiptHandle }) => this.processMessage({
|
|
1051
|
+
Body,
|
|
1052
|
+
ReceiptHandle,
|
|
1053
|
+
queueUrl,
|
|
1054
|
+
dlqUrl,
|
|
1055
|
+
useRedrivePolicy,
|
|
1056
|
+
consumerFunction
|
|
1057
|
+
})
|
|
1058
|
+
)
|
|
1059
|
+
);
|
|
557
1060
|
} catch (err) {
|
|
558
|
-
|
|
1061
|
+
consecutiveErrors++;
|
|
1062
|
+
this.logger.error("SQSPollingError", { err, queueUrl, consecutiveErrors });
|
|
1063
|
+
const backoff = Math.min(1e3 * 2 ** consecutiveErrors, 3e4);
|
|
1064
|
+
await new Promise((resolve) => setTimeout(resolve, backoff));
|
|
1065
|
+
}
|
|
1066
|
+
}
|
|
1067
|
+
this.logger.info("SQS polling stopped", { queueUrl });
|
|
1068
|
+
}
|
|
1069
|
+
/**
|
|
1070
|
+
* Gracefully stops the polling loop after the current batch completes.
|
|
1071
|
+
*/
|
|
1072
|
+
stop() {
|
|
1073
|
+
this.polling = false;
|
|
1074
|
+
this.logger.info("SQS stop signal received");
|
|
1075
|
+
}
|
|
1076
|
+
// ─── Private ───────────────────────────────────────────────────────────────
|
|
1077
|
+
async processMessage({
|
|
1078
|
+
Body,
|
|
1079
|
+
ReceiptHandle,
|
|
1080
|
+
queueUrl,
|
|
1081
|
+
dlqUrl,
|
|
1082
|
+
useRedrivePolicy,
|
|
1083
|
+
consumerFunction
|
|
1084
|
+
}) {
|
|
1085
|
+
if (!Body || !ReceiptHandle)
|
|
1086
|
+
return;
|
|
1087
|
+
let shouldDelete = true;
|
|
1088
|
+
try {
|
|
1089
|
+
const message = parseJSON(Body);
|
|
1090
|
+
await consumerFunction(message);
|
|
1091
|
+
} catch (err) {
|
|
1092
|
+
this.logger.error("SQSConsumerError", { err, queueUrl });
|
|
1093
|
+
if (dlqUrl) {
|
|
1094
|
+
await this.enqueue({ queueUrl: dlqUrl, message: Body });
|
|
1095
|
+
} else if (useRedrivePolicy) {
|
|
1096
|
+
shouldDelete = false;
|
|
1097
|
+
} else {
|
|
1098
|
+
this.logger.warn("SQSMessageDropped \u2014 no DLQ or redrive configured", { queueUrl });
|
|
1099
|
+
}
|
|
1100
|
+
} finally {
|
|
1101
|
+
if (shouldDelete) {
|
|
1102
|
+
await this.client.send(
|
|
1103
|
+
new DeleteMessageCommand({ QueueUrl: queueUrl, ReceiptHandle })
|
|
1104
|
+
);
|
|
559
1105
|
}
|
|
560
1106
|
}
|
|
561
1107
|
}
|
|
562
1108
|
};
|
|
563
1109
|
|
|
564
1110
|
// src/adapters/loggers/winston.ts
|
|
565
|
-
import winston from "winston";
|
|
1111
|
+
import winston, { format } from "winston";
|
|
1112
|
+
var serializeErrors = format((info) => {
|
|
1113
|
+
if (info.meta instanceof Error) {
|
|
1114
|
+
info.meta = {
|
|
1115
|
+
message: info.meta.message,
|
|
1116
|
+
stack: info.meta.stack,
|
|
1117
|
+
name: info.meta.name
|
|
1118
|
+
};
|
|
1119
|
+
}
|
|
1120
|
+
if (info instanceof Error) {
|
|
1121
|
+
info.stack = info.stack;
|
|
1122
|
+
info.message = info.message;
|
|
1123
|
+
}
|
|
1124
|
+
return info;
|
|
1125
|
+
});
|
|
1126
|
+
var prettyFormat = format.combine(
|
|
1127
|
+
format.colorize(),
|
|
1128
|
+
format.timestamp({ format: "YYYY-MM-DD HH:mm:ss" }),
|
|
1129
|
+
format.printf(({ timestamp, level, message, service, ...meta }) => {
|
|
1130
|
+
const svc = service ? `[${service}] ` : "";
|
|
1131
|
+
const metaStr = Object.keys(meta).length ? `
|
|
1132
|
+
${JSON.stringify(meta, null, 2)}` : "";
|
|
1133
|
+
return `${timestamp} ${level}: ${svc}${message}${metaStr}`;
|
|
1134
|
+
})
|
|
1135
|
+
);
|
|
1136
|
+
var jsonFormat = format.combine(
|
|
1137
|
+
serializeErrors(),
|
|
1138
|
+
format.timestamp(),
|
|
1139
|
+
format.json()
|
|
1140
|
+
);
|
|
566
1141
|
var WinstonLogger = class {
|
|
567
|
-
constructor() {
|
|
1142
|
+
constructor(options = {}) {
|
|
1143
|
+
const {
|
|
1144
|
+
level = process.env.NODE_ENV === "development" ? "debug" : "info",
|
|
1145
|
+
service,
|
|
1146
|
+
file,
|
|
1147
|
+
pretty = process.env.NODE_ENV === "development",
|
|
1148
|
+
defaultMeta = {}
|
|
1149
|
+
} = options;
|
|
1150
|
+
const transports = [
|
|
1151
|
+
new winston.transports.Console({
|
|
1152
|
+
format: pretty ? prettyFormat : jsonFormat
|
|
1153
|
+
})
|
|
1154
|
+
];
|
|
1155
|
+
if (file?.path) {
|
|
1156
|
+
transports.push(
|
|
1157
|
+
new winston.transports.File({
|
|
1158
|
+
filename: file.path,
|
|
1159
|
+
format: jsonFormat
|
|
1160
|
+
})
|
|
1161
|
+
);
|
|
1162
|
+
}
|
|
1163
|
+
if (file?.errorPath) {
|
|
1164
|
+
transports.push(
|
|
1165
|
+
new winston.transports.File({
|
|
1166
|
+
filename: file.errorPath,
|
|
1167
|
+
level: "error",
|
|
1168
|
+
format: jsonFormat
|
|
1169
|
+
})
|
|
1170
|
+
);
|
|
1171
|
+
}
|
|
568
1172
|
this.logger = winston.createLogger({
|
|
569
|
-
|
|
1173
|
+
level,
|
|
1174
|
+
defaultMeta: { service, ...defaultMeta },
|
|
1175
|
+
transports,
|
|
1176
|
+
// Prevent winston from exiting on uncaught exceptions in logger itself
|
|
1177
|
+
exitOnError: false
|
|
570
1178
|
});
|
|
571
1179
|
}
|
|
1180
|
+
// ─── Logger Interface ─────────────────────────────────────────────────────
|
|
572
1181
|
info(message, meta) {
|
|
573
|
-
this.logger.info(message, meta);
|
|
1182
|
+
this.logger.info(message, { meta });
|
|
574
1183
|
}
|
|
575
1184
|
error(message, meta) {
|
|
576
|
-
this.logger.error(message, meta);
|
|
1185
|
+
this.logger.error(message, { meta });
|
|
577
1186
|
}
|
|
578
1187
|
warn(message, meta) {
|
|
579
|
-
this.logger.warn(message, meta);
|
|
1188
|
+
this.logger.warn(message, { meta });
|
|
580
1189
|
}
|
|
581
1190
|
debug(message, meta) {
|
|
582
|
-
this.logger.debug(message, meta);
|
|
1191
|
+
this.logger.debug(message, { meta });
|
|
1192
|
+
}
|
|
1193
|
+
http(message, meta) {
|
|
1194
|
+
this.logger.http(message, { meta });
|
|
1195
|
+
}
|
|
1196
|
+
// ─── Extended API ─────────────────────────────────────────────────────────
|
|
1197
|
+
/**
|
|
1198
|
+
* Returns a child logger with additional metadata attached to every entry.
|
|
1199
|
+
* Useful for scoping logs to a request, service, or job.
|
|
1200
|
+
*
|
|
1201
|
+
* @example
|
|
1202
|
+
* const log = logger.child({ requestId: "abc-123", userId: "u-1" });
|
|
1203
|
+
* log.info("User fetched"); // → { requestId: "abc-123", userId: "u-1", message: "User fetched" }
|
|
1204
|
+
*/
|
|
1205
|
+
child(meta) {
|
|
1206
|
+
const child = Object.create(this);
|
|
1207
|
+
child.logger = this.logger.child(meta);
|
|
1208
|
+
return child;
|
|
1209
|
+
}
|
|
1210
|
+
/**
|
|
1211
|
+
* Dynamically changes the log level at runtime.
|
|
1212
|
+
* Useful for temporarily enabling debug logs in production.
|
|
1213
|
+
*
|
|
1214
|
+
* @example
|
|
1215
|
+
* logger.setLevel("debug");
|
|
1216
|
+
*/
|
|
1217
|
+
setLevel(level) {
|
|
1218
|
+
this.logger.level = level;
|
|
1219
|
+
}
|
|
1220
|
+
/**
|
|
1221
|
+
* Returns true if the given level would currently be logged.
|
|
1222
|
+
*
|
|
1223
|
+
* @example
|
|
1224
|
+
* if (logger.isLevelEnabled("debug")) { ... }
|
|
1225
|
+
*/
|
|
1226
|
+
isLevelEnabled(level) {
|
|
1227
|
+
return this.logger.isLevelEnabled(level);
|
|
1228
|
+
}
|
|
1229
|
+
};
|
|
1230
|
+
|
|
1231
|
+
// src/adapters/s3.ts
|
|
1232
|
+
import {
|
|
1233
|
+
S3Client,
|
|
1234
|
+
PutObjectCommand,
|
|
1235
|
+
GetObjectCommand,
|
|
1236
|
+
DeleteObjectCommand,
|
|
1237
|
+
CopyObjectCommand,
|
|
1238
|
+
HeadObjectCommand,
|
|
1239
|
+
NotFound
|
|
1240
|
+
} from "@aws-sdk/client-s3";
|
|
1241
|
+
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
|
|
1242
|
+
var defaultLogger3 = {
|
|
1243
|
+
info: (msg, meta) => console.info(msg, meta),
|
|
1244
|
+
error: (msg, meta) => console.error(msg, meta),
|
|
1245
|
+
warn: (msg, meta) => console.warn(msg, meta),
|
|
1246
|
+
debug: (msg, meta) => console.debug(msg, meta)
|
|
1247
|
+
};
|
|
1248
|
+
var S3 = class {
|
|
1249
|
+
constructor(config, logger) {
|
|
1250
|
+
this.logger = logger ?? defaultLogger3;
|
|
1251
|
+
this.defaultBucket = config.defaultBucket;
|
|
1252
|
+
this.region = config.region;
|
|
1253
|
+
this.client = new S3Client({
|
|
1254
|
+
region: config.region,
|
|
1255
|
+
credentials: {
|
|
1256
|
+
accessKeyId: config.accessKeyId,
|
|
1257
|
+
secretAccessKey: config.secretAccessKey
|
|
1258
|
+
}
|
|
1259
|
+
});
|
|
1260
|
+
this.logger.info("S3 client initialized", { region: config.region });
|
|
1261
|
+
}
|
|
1262
|
+
// ─── Private Helpers ─────────────────────────────────────────────────────
|
|
1263
|
+
getBucket(bucket) {
|
|
1264
|
+
const target = bucket ?? this.defaultBucket;
|
|
1265
|
+
if (!target)
|
|
1266
|
+
throw new ServerError("S3 bucket not provided");
|
|
1267
|
+
return target;
|
|
1268
|
+
}
|
|
1269
|
+
getObjectUrl(bucket, key) {
|
|
1270
|
+
return `https://${bucket}.s3.${this.region}.amazonaws.com/${key}`;
|
|
1271
|
+
}
|
|
1272
|
+
async streamToBuffer(stream) {
|
|
1273
|
+
const chunks = [];
|
|
1274
|
+
for await (const chunk of stream)
|
|
1275
|
+
chunks.push(chunk);
|
|
1276
|
+
return Buffer.concat(chunks);
|
|
1277
|
+
}
|
|
1278
|
+
// ─── Upload ──────────────────────────────────────────────────────────────
|
|
1279
|
+
/**
|
|
1280
|
+
* Uploads a file to S3. Returns the bucket, key, and public URL.
|
|
1281
|
+
*
|
|
1282
|
+
* @example
|
|
1283
|
+
* const result = await s3.upload({ key: "avatars/user-1.png", body: buffer, contentType: "image/png" });
|
|
1284
|
+
* result.url // "https://my-bucket.s3.us-east-1.amazonaws.com/avatars/user-1.png"
|
|
1285
|
+
*/
|
|
1286
|
+
async upload({
|
|
1287
|
+
bucket,
|
|
1288
|
+
key,
|
|
1289
|
+
body,
|
|
1290
|
+
contentType,
|
|
1291
|
+
metadata,
|
|
1292
|
+
acl
|
|
1293
|
+
}) {
|
|
1294
|
+
const targetBucket = this.getBucket(bucket);
|
|
1295
|
+
try {
|
|
1296
|
+
const input = {
|
|
1297
|
+
Bucket: targetBucket,
|
|
1298
|
+
Key: key,
|
|
1299
|
+
Body: body,
|
|
1300
|
+
...contentType && { ContentType: contentType },
|
|
1301
|
+
...metadata && { Metadata: metadata },
|
|
1302
|
+
...acl && { ACL: acl }
|
|
1303
|
+
};
|
|
1304
|
+
await this.client.send(new PutObjectCommand(input));
|
|
1305
|
+
this.logger.info("S3 upload successful", { bucket: targetBucket, key });
|
|
1306
|
+
return {
|
|
1307
|
+
bucket: targetBucket,
|
|
1308
|
+
key,
|
|
1309
|
+
url: this.getObjectUrl(targetBucket, key)
|
|
1310
|
+
};
|
|
1311
|
+
} catch (err) {
|
|
1312
|
+
this.logger.error("S3UploadError", { err, bucket: targetBucket, key });
|
|
1313
|
+
throw new ServerError("Failed to upload to S3", { cause: err });
|
|
1314
|
+
}
|
|
1315
|
+
}
|
|
1316
|
+
// ─── Download ─────────────────────────────────────────────────────────────
|
|
1317
|
+
/**
|
|
1318
|
+
* Downloads an S3 object and returns it as a Buffer.
|
|
1319
|
+
*/
|
|
1320
|
+
async download({ bucket, key }) {
|
|
1321
|
+
const targetBucket = this.getBucket(bucket);
|
|
1322
|
+
try {
|
|
1323
|
+
const response = await this.client.send(
|
|
1324
|
+
new GetObjectCommand({ Bucket: targetBucket, Key: key })
|
|
1325
|
+
);
|
|
1326
|
+
if (!response.Body)
|
|
1327
|
+
throw new ServerError("Empty S3 response body");
|
|
1328
|
+
const buffer = await this.streamToBuffer(response.Body);
|
|
1329
|
+
this.logger.info("S3 download successful", { bucket: targetBucket, key });
|
|
1330
|
+
return buffer;
|
|
1331
|
+
} catch (err) {
|
|
1332
|
+
this.logger.error("S3DownloadError", { err, bucket: targetBucket, key });
|
|
1333
|
+
throw new ServerError("Failed to download from S3", { cause: err });
|
|
1334
|
+
}
|
|
1335
|
+
}
|
|
1336
|
+
/**
|
|
1337
|
+
* Returns the raw readable stream for an S3 object.
|
|
1338
|
+
* Prefer this over `download` for large files.
|
|
1339
|
+
*/
|
|
1340
|
+
async stream({ bucket, key }) {
|
|
1341
|
+
const targetBucket = this.getBucket(bucket);
|
|
1342
|
+
try {
|
|
1343
|
+
const response = await this.client.send(
|
|
1344
|
+
new GetObjectCommand({ Bucket: targetBucket, Key: key })
|
|
1345
|
+
);
|
|
1346
|
+
if (!response.Body)
|
|
1347
|
+
throw new ServerError("Empty S3 response body");
|
|
1348
|
+
this.logger.info("S3 stream ready", { bucket: targetBucket, key });
|
|
1349
|
+
return response.Body;
|
|
1350
|
+
} catch (err) {
|
|
1351
|
+
this.logger.error("S3StreamError", { err, bucket: targetBucket, key });
|
|
1352
|
+
throw new ServerError("Failed to stream from S3", { cause: err });
|
|
1353
|
+
}
|
|
1354
|
+
}
|
|
1355
|
+
// ─── Delete ───────────────────────────────────────────────────────────────
|
|
1356
|
+
async delete({ bucket, key }) {
|
|
1357
|
+
const targetBucket = this.getBucket(bucket);
|
|
1358
|
+
try {
|
|
1359
|
+
await this.client.send(
|
|
1360
|
+
new DeleteObjectCommand({ Bucket: targetBucket, Key: key })
|
|
1361
|
+
);
|
|
1362
|
+
this.logger.info("S3 object deleted", { bucket: targetBucket, key });
|
|
1363
|
+
return true;
|
|
1364
|
+
} catch (err) {
|
|
1365
|
+
this.logger.error("S3DeleteError", { err, bucket: targetBucket, key });
|
|
1366
|
+
throw new ServerError("Failed to delete S3 object", { cause: err });
|
|
1367
|
+
}
|
|
1368
|
+
}
|
|
1369
|
+
// ─── Copy ─────────────────────────────────────────────────────────────────
|
|
1370
|
+
/**
|
|
1371
|
+
* Copies an object within S3 — within the same bucket or across buckets.
|
|
1372
|
+
*
|
|
1373
|
+
* @example
|
|
1374
|
+
* await s3.copy({ sourceKey: "uploads/tmp.png", destinationKey: "avatars/user-1.png" });
|
|
1375
|
+
*/
|
|
1376
|
+
async copy({
|
|
1377
|
+
sourceBucket,
|
|
1378
|
+
sourceKey,
|
|
1379
|
+
destinationBucket,
|
|
1380
|
+
destinationKey
|
|
1381
|
+
}) {
|
|
1382
|
+
const srcBucket = this.getBucket(sourceBucket);
|
|
1383
|
+
const dstBucket = this.getBucket(destinationBucket);
|
|
1384
|
+
try {
|
|
1385
|
+
await this.client.send(
|
|
1386
|
+
new CopyObjectCommand({
|
|
1387
|
+
CopySource: `${srcBucket}/${sourceKey}`,
|
|
1388
|
+
Bucket: dstBucket,
|
|
1389
|
+
Key: destinationKey
|
|
1390
|
+
})
|
|
1391
|
+
);
|
|
1392
|
+
this.logger.info("S3 object copied", { srcBucket, sourceKey, dstBucket, destinationKey });
|
|
1393
|
+
return {
|
|
1394
|
+
bucket: dstBucket,
|
|
1395
|
+
key: destinationKey,
|
|
1396
|
+
url: this.getObjectUrl(dstBucket, destinationKey)
|
|
1397
|
+
};
|
|
1398
|
+
} catch (err) {
|
|
1399
|
+
this.logger.error("S3CopyError", { err, sourceKey, destinationKey });
|
|
1400
|
+
throw new ServerError("Failed to copy S3 object", { cause: err });
|
|
1401
|
+
}
|
|
1402
|
+
}
|
|
1403
|
+
// ─── Exists ───────────────────────────────────────────────────────────────
|
|
1404
|
+
/**
|
|
1405
|
+
* Returns true if the object exists.
|
|
1406
|
+
* Throws on non-404 errors (permissions, network) rather than silently returning false.
|
|
1407
|
+
*/
|
|
1408
|
+
async exists({ bucket, key }) {
|
|
1409
|
+
const targetBucket = this.getBucket(bucket);
|
|
1410
|
+
try {
|
|
1411
|
+
await this.client.send(
|
|
1412
|
+
new HeadObjectCommand({ Bucket: targetBucket, Key: key })
|
|
1413
|
+
);
|
|
1414
|
+
return true;
|
|
1415
|
+
} catch (err) {
|
|
1416
|
+
if (err instanceof NotFound || err?.name === "NotFound" || err?.$metadata?.httpStatusCode === 404) {
|
|
1417
|
+
return false;
|
|
1418
|
+
}
|
|
1419
|
+
this.logger.error("S3ExistsError", { err, bucket: targetBucket, key });
|
|
1420
|
+
throw new ServerError("Failed to check S3 object existence", { cause: err });
|
|
1421
|
+
}
|
|
1422
|
+
}
|
|
1423
|
+
// ─── Signed URLs ──────────────────────────────────────────────────────────
|
|
1424
|
+
/**
|
|
1425
|
+
* Generates a pre-signed URL for downloading an object (GET).
|
|
1426
|
+
* Default expiry: 1 hour.
|
|
1427
|
+
*/
|
|
1428
|
+
async getSignedDownloadUrl({ bucket, key, expiresIn = 3600 }) {
|
|
1429
|
+
const targetBucket = this.getBucket(bucket);
|
|
1430
|
+
try {
|
|
1431
|
+
const url = await getSignedUrl(
|
|
1432
|
+
this.client,
|
|
1433
|
+
new GetObjectCommand({ Bucket: targetBucket, Key: key }),
|
|
1434
|
+
{ expiresIn }
|
|
1435
|
+
);
|
|
1436
|
+
this.logger.info("S3 signed download URL generated", { bucket: targetBucket, key });
|
|
1437
|
+
return url;
|
|
1438
|
+
} catch (err) {
|
|
1439
|
+
this.logger.error("S3SignedDownloadUrlError", { err, bucket: targetBucket, key });
|
|
1440
|
+
throw new ServerError("Failed to generate signed download URL", { cause: err });
|
|
1441
|
+
}
|
|
1442
|
+
}
|
|
1443
|
+
/**
|
|
1444
|
+
* Generates a pre-signed URL for uploading an object directly (PUT).
|
|
1445
|
+
* Use this for browser → S3 direct uploads without proxying through your server.
|
|
1446
|
+
*
|
|
1447
|
+
* @example
|
|
1448
|
+
* const url = await s3.getSignedUploadUrl({ key: "avatars/user-1.png", contentType: "image/png" });
|
|
1449
|
+
* // Client does: fetch(url, { method: "PUT", body: file })
|
|
1450
|
+
*/
|
|
1451
|
+
async getSignedUploadUrl({
|
|
1452
|
+
bucket,
|
|
1453
|
+
key,
|
|
1454
|
+
expiresIn = 3600,
|
|
1455
|
+
contentType
|
|
1456
|
+
}) {
|
|
1457
|
+
const targetBucket = this.getBucket(bucket);
|
|
1458
|
+
try {
|
|
1459
|
+
const url = await getSignedUrl(
|
|
1460
|
+
this.client,
|
|
1461
|
+
new PutObjectCommand({
|
|
1462
|
+
Bucket: targetBucket,
|
|
1463
|
+
Key: key,
|
|
1464
|
+
...contentType && { ContentType: contentType }
|
|
1465
|
+
}),
|
|
1466
|
+
{ expiresIn }
|
|
1467
|
+
);
|
|
1468
|
+
this.logger.info("S3 signed upload URL generated", { bucket: targetBucket, key });
|
|
1469
|
+
return url;
|
|
1470
|
+
} catch (err) {
|
|
1471
|
+
this.logger.error("S3SignedUploadUrlError", { err, bucket: targetBucket, key });
|
|
1472
|
+
throw new ServerError("Failed to generate signed upload URL", { cause: err });
|
|
1473
|
+
}
|
|
1474
|
+
}
|
|
1475
|
+
// ─── Bucket Preset ────────────────────────────────────────────────────────
|
|
1476
|
+
/**
|
|
1477
|
+
* Returns a scoped helper with the bucket pre-filled.
|
|
1478
|
+
*
|
|
1479
|
+
* @example
|
|
1480
|
+
* const avatars = s3.bucket("my-avatars-bucket");
|
|
1481
|
+
* await avatars.upload({ key: "user-1.png", body: buffer });
|
|
1482
|
+
*/
|
|
1483
|
+
bucket(bucketName) {
|
|
1484
|
+
return {
|
|
1485
|
+
upload: (opts) => this.upload({ ...opts, bucket: bucketName }),
|
|
1486
|
+
download: (opts) => this.download({ ...opts, bucket: bucketName }),
|
|
1487
|
+
stream: (opts) => this.stream({ ...opts, bucket: bucketName }),
|
|
1488
|
+
delete: (opts) => this.delete({ ...opts, bucket: bucketName }),
|
|
1489
|
+
exists: (opts) => this.exists({ ...opts, bucket: bucketName }),
|
|
1490
|
+
copy: (opts) => this.copy({ ...opts, destinationBucket: bucketName }),
|
|
1491
|
+
getSignedDownloadUrl: (opts) => this.getSignedDownloadUrl({ ...opts, bucket: bucketName }),
|
|
1492
|
+
getSignedUploadUrl: (opts) => this.getSignedUploadUrl({ ...opts, bucket: bucketName })
|
|
1493
|
+
};
|
|
1494
|
+
}
|
|
1495
|
+
};
|
|
1496
|
+
|
|
1497
|
+
// src/adapters/cron.ts
|
|
1498
|
+
import cron from "node-cron";
|
|
1499
|
+
var defaultLogger4 = {
|
|
1500
|
+
info: (msg, meta) => console.info(msg, meta),
|
|
1501
|
+
error: (msg, meta) => console.error(msg, meta),
|
|
1502
|
+
warn: (msg, meta) => console.warn(msg, meta),
|
|
1503
|
+
debug: (msg, meta) => console.debug(msg, meta)
|
|
1504
|
+
};
|
|
1505
|
+
var SHORTHANDS = {
|
|
1506
|
+
"every minute": "* * * * *",
|
|
1507
|
+
"every 5 minutes": "*/5 * * * *",
|
|
1508
|
+
"every 10 minutes": "*/10 * * * *",
|
|
1509
|
+
"every 15 minutes": "*/15 * * * *",
|
|
1510
|
+
"every 30 minutes": "*/30 * * * *",
|
|
1511
|
+
"every hour": "0 * * * *",
|
|
1512
|
+
"every 6 hours": "0 */6 * * *",
|
|
1513
|
+
"every 12 hours": "0 */12 * * *",
|
|
1514
|
+
"every day": "0 0 * * *",
|
|
1515
|
+
"every day at noon": "0 12 * * *",
|
|
1516
|
+
"every week": "0 0 * * 0",
|
|
1517
|
+
"every month": "0 0 1 * *"
|
|
1518
|
+
};
|
|
1519
|
+
var Cron = class {
|
|
1520
|
+
constructor(logger) {
|
|
1521
|
+
this.jobs = /* @__PURE__ */ new Map();
|
|
1522
|
+
this.logger = logger ?? defaultLogger4;
|
|
1523
|
+
}
|
|
1524
|
+
// ─── Register ─────────────────────────────────────────────────────────────
|
|
1525
|
+
/**
|
|
1526
|
+
* Registers and starts a cron job.
|
|
1527
|
+
*
|
|
1528
|
+
* @example
|
|
1529
|
+
* cron.register({
|
|
1530
|
+
* name: "send-digest",
|
|
1531
|
+
* schedule: "every day at noon",
|
|
1532
|
+
* handler: async () => { await sendDigestEmails(); },
|
|
1533
|
+
* timezone: "America/New_York",
|
|
1534
|
+
* });
|
|
1535
|
+
*/
|
|
1536
|
+
register(options) {
|
|
1537
|
+
const { name, schedule, handler, runOnInit = false, timezone, preventOverlap = true } = options;
|
|
1538
|
+
if (!name)
|
|
1539
|
+
throw new ValidationError("Cron job name is required");
|
|
1540
|
+
if (!handler)
|
|
1541
|
+
throw new ValidationError("Cron job handler is required");
|
|
1542
|
+
if (this.jobs.has(name)) {
|
|
1543
|
+
throw new ValidationError(`Cron job "${name}" is already registered. Use replace() to update it.`);
|
|
1544
|
+
}
|
|
1545
|
+
const expression = SHORTHANDS[schedule] ?? schedule;
|
|
1546
|
+
if (!cron.validate(expression)) {
|
|
1547
|
+
throw new ValidationError(`Invalid cron expression for job "${name}": "${schedule}"`);
|
|
1548
|
+
}
|
|
1549
|
+
const status = {
|
|
1550
|
+
name,
|
|
1551
|
+
schedule: expression,
|
|
1552
|
+
running: true,
|
|
1553
|
+
lastRun: null,
|
|
1554
|
+
lastError: null,
|
|
1555
|
+
executionCount: 0,
|
|
1556
|
+
errorCount: 0
|
|
1557
|
+
};
|
|
1558
|
+
const record = {
|
|
1559
|
+
options,
|
|
1560
|
+
status,
|
|
1561
|
+
executing: false,
|
|
1562
|
+
task: null
|
|
1563
|
+
// assigned below
|
|
1564
|
+
};
|
|
1565
|
+
const task = cron.schedule(
|
|
1566
|
+
expression,
|
|
1567
|
+
() => this.execute(name),
|
|
1568
|
+
{ timezone }
|
|
1569
|
+
);
|
|
1570
|
+
record.task = task;
|
|
1571
|
+
this.jobs.set(name, record);
|
|
1572
|
+
this.logger.info(`Cron job registered`, { name, schedule: expression, timezone });
|
|
1573
|
+
if (runOnInit) {
|
|
1574
|
+
this.execute(name);
|
|
1575
|
+
}
|
|
1576
|
+
}
|
|
1577
|
+
// ─── Execute ──────────────────────────────────────────────────────────────
|
|
1578
|
+
async execute(name) {
|
|
1579
|
+
const record = this.jobs.get(name);
|
|
1580
|
+
if (!record)
|
|
1581
|
+
return;
|
|
1582
|
+
const { preventOverlap = true, handler } = record.options;
|
|
1583
|
+
if (preventOverlap && record.executing) {
|
|
1584
|
+
this.logger.warn(`Cron job "${name}" skipped \u2014 previous execution still running`);
|
|
1585
|
+
return;
|
|
1586
|
+
}
|
|
1587
|
+
record.executing = true;
|
|
1588
|
+
record.status.lastRun = /* @__PURE__ */ new Date();
|
|
1589
|
+
record.status.executionCount++;
|
|
1590
|
+
this.logger.debug?.(`Cron job started`, { name, executionCount: record.status.executionCount });
|
|
1591
|
+
try {
|
|
1592
|
+
await handler();
|
|
1593
|
+
this.logger.debug?.(`Cron job completed`, { name });
|
|
1594
|
+
} catch (err) {
|
|
1595
|
+
record.status.errorCount++;
|
|
1596
|
+
record.status.lastError = err;
|
|
1597
|
+
this.logger.error(`Cron job failed`, { name, err });
|
|
1598
|
+
} finally {
|
|
1599
|
+
record.executing = false;
|
|
1600
|
+
}
|
|
1601
|
+
}
|
|
1602
|
+
// ─── Control ──────────────────────────────────────────────────────────────
|
|
1603
|
+
/**
|
|
1604
|
+
* Stops a running job without removing it.
|
|
1605
|
+
* Can be resumed with start().
|
|
1606
|
+
*/
|
|
1607
|
+
stop(name) {
|
|
1608
|
+
const record = this.getJob(name);
|
|
1609
|
+
record.task.stop();
|
|
1610
|
+
record.status.running = false;
|
|
1611
|
+
this.logger.info(`Cron job stopped`, { name });
|
|
1612
|
+
}
|
|
1613
|
+
/**
|
|
1614
|
+
* Resumes a stopped job.
|
|
1615
|
+
*/
|
|
1616
|
+
start(name) {
|
|
1617
|
+
const record = this.getJob(name);
|
|
1618
|
+
record.task.start();
|
|
1619
|
+
record.status.running = true;
|
|
1620
|
+
this.logger.info(`Cron job started`, { name });
|
|
1621
|
+
}
|
|
1622
|
+
/**
|
|
1623
|
+
* Stops and removes a job entirely.
|
|
1624
|
+
*/
|
|
1625
|
+
remove(name) {
|
|
1626
|
+
const record = this.getJob(name);
|
|
1627
|
+
record.task.stop();
|
|
1628
|
+
this.jobs.delete(name);
|
|
1629
|
+
this.logger.info(`Cron job removed`, { name });
|
|
1630
|
+
}
|
|
1631
|
+
/**
|
|
1632
|
+
* Replaces an existing job with a new configuration.
|
|
1633
|
+
* Useful for updating schedules at runtime.
|
|
1634
|
+
*/
|
|
1635
|
+
replace(options) {
|
|
1636
|
+
if (this.jobs.has(options.name))
|
|
1637
|
+
this.remove(options.name);
|
|
1638
|
+
this.register(options);
|
|
1639
|
+
}
|
|
1640
|
+
/**
|
|
1641
|
+
* Manually triggers a job outside its schedule.
|
|
1642
|
+
* Respects preventOverlap.
|
|
1643
|
+
*
|
|
1644
|
+
* @example
|
|
1645
|
+
* await cron.run("send-digest");
|
|
1646
|
+
*/
|
|
1647
|
+
async run(name) {
|
|
1648
|
+
this.getJob(name);
|
|
1649
|
+
await this.execute(name);
|
|
1650
|
+
}
|
|
1651
|
+
/**
|
|
1652
|
+
* Stops all registered jobs. Call this on process shutdown.
|
|
1653
|
+
*
|
|
1654
|
+
* @example
|
|
1655
|
+
* process.on("SIGTERM", () => cron.stopAll());
|
|
1656
|
+
*/
|
|
1657
|
+
stopAll() {
|
|
1658
|
+
for (const [name, record] of this.jobs) {
|
|
1659
|
+
record.task.stop();
|
|
1660
|
+
record.status.running = false;
|
|
1661
|
+
}
|
|
1662
|
+
this.logger.info(`All cron jobs stopped`, { count: this.jobs.size });
|
|
1663
|
+
}
|
|
1664
|
+
// ─── Introspection ────────────────────────────────────────────────────────
|
|
1665
|
+
/**
|
|
1666
|
+
* Returns the status of a single job.
|
|
1667
|
+
*/
|
|
1668
|
+
status(name) {
|
|
1669
|
+
return { ...this.getJob(name).status };
|
|
1670
|
+
}
|
|
1671
|
+
/**
|
|
1672
|
+
* Returns the status of all registered jobs.
|
|
1673
|
+
*/
|
|
1674
|
+
statusAll() {
|
|
1675
|
+
return Array.from(this.jobs.values()).map((r) => ({ ...r.status }));
|
|
1676
|
+
}
|
|
1677
|
+
/**
|
|
1678
|
+
* Returns true if a job with the given name is registered.
|
|
1679
|
+
*/
|
|
1680
|
+
has(name) {
|
|
1681
|
+
return this.jobs.has(name);
|
|
1682
|
+
}
|
|
1683
|
+
// ─── Private ──────────────────────────────────────────────────────────────
|
|
1684
|
+
getJob(name) {
|
|
1685
|
+
const record = this.jobs.get(name);
|
|
1686
|
+
if (!record)
|
|
1687
|
+
throw new ServerError(`Cron job "${name}" not found`);
|
|
1688
|
+
return record;
|
|
583
1689
|
}
|
|
584
1690
|
};
|
|
585
1691
|
|
|
586
1692
|
// src/security/jwt.ts
|
|
587
1693
|
import jwt from "jsonwebtoken";
|
|
588
1694
|
var jwtService = {
|
|
1695
|
+
/**
|
|
1696
|
+
* Signs a payload and returns a JWT string.
|
|
1697
|
+
*
|
|
1698
|
+
* @example
|
|
1699
|
+
* const token = await jwtService.encode({ data: { userId: 1 }, secretKey: "secret" });
|
|
1700
|
+
*/
|
|
589
1701
|
async encode({
|
|
590
1702
|
data,
|
|
591
1703
|
secretKey,
|
|
@@ -607,6 +1719,13 @@ var jwtService = {
|
|
|
607
1719
|
});
|
|
608
1720
|
});
|
|
609
1721
|
},
|
|
1722
|
+
/**
|
|
1723
|
+
* Verifies and decodes a JWT string.
|
|
1724
|
+
* Throws a typed `JwtError` on expiry, invalid signature, or not-yet-valid tokens.
|
|
1725
|
+
*
|
|
1726
|
+
* @example
|
|
1727
|
+
* const payload = await jwtService.decode<{ userId: number }>({ token, secretKey: "secret" });
|
|
1728
|
+
*/
|
|
610
1729
|
async decode({
|
|
611
1730
|
token,
|
|
612
1731
|
secretKey,
|
|
@@ -629,6 +1748,152 @@ var jwtService = {
|
|
|
629
1748
|
resolve(decoded);
|
|
630
1749
|
});
|
|
631
1750
|
});
|
|
1751
|
+
},
|
|
1752
|
+
/**
|
|
1753
|
+
* Returns the expiry date of a token without verifying it.
|
|
1754
|
+
* Returns null if the token has no expiry or cannot be decoded.
|
|
1755
|
+
*
|
|
1756
|
+
* @example
|
|
1757
|
+
* jwtService.getExpiry(token) // Date | null
|
|
1758
|
+
*/
|
|
1759
|
+
getExpiry(token) {
|
|
1760
|
+
const decoded = jwt.decode(token);
|
|
1761
|
+
if (!decoded?.exp)
|
|
1762
|
+
return null;
|
|
1763
|
+
return new Date(decoded.exp * 1e3);
|
|
1764
|
+
},
|
|
1765
|
+
/**
|
|
1766
|
+
* Returns true if the token is expired, without verifying the signature.
|
|
1767
|
+
* Useful for checking whether to refresh a token before making a request.
|
|
1768
|
+
*
|
|
1769
|
+
* @example
|
|
1770
|
+
* if (jwtService.isExpired(token)) { ... }
|
|
1771
|
+
*/
|
|
1772
|
+
isExpired(token) {
|
|
1773
|
+
const expiry = this.getExpiry(token);
|
|
1774
|
+
if (!expiry)
|
|
1775
|
+
return false;
|
|
1776
|
+
return expiry < /* @__PURE__ */ new Date();
|
|
1777
|
+
}
|
|
1778
|
+
};
|
|
1779
|
+
|
|
1780
|
+
// src/security/hash.ts
|
|
1781
|
+
import bcrypt from "bcrypt";
|
|
1782
|
+
import crypto from "crypto";
|
|
1783
|
+
var hashService = {
|
|
1784
|
+
// ─── bcrypt ───────────────────────────────────────────────────────────────
|
|
1785
|
+
/**
|
|
1786
|
+
* Hashes a plain text value using bcrypt.
|
|
1787
|
+
* Use for passwords — bcrypt is intentionally slow and salted.
|
|
1788
|
+
*
|
|
1789
|
+
* @example
|
|
1790
|
+
* const hashed = await hashService.hash("myPassword123");
|
|
1791
|
+
*/
|
|
1792
|
+
async hash(plain, { rounds = 12 } = {}) {
|
|
1793
|
+
if (!plain)
|
|
1794
|
+
throw new Error("Value to hash is required");
|
|
1795
|
+
return bcrypt.hash(plain, rounds);
|
|
1796
|
+
},
|
|
1797
|
+
/**
|
|
1798
|
+
* Compares a plain text value against a bcrypt hash.
|
|
1799
|
+
*
|
|
1800
|
+
* @example
|
|
1801
|
+
* const match = await hashService.compare("myPassword123", storedHash);
|
|
1802
|
+
* if (!match) throw new AuthenticationError("Invalid credentials");
|
|
1803
|
+
*/
|
|
1804
|
+
async compare(plain, hashed) {
|
|
1805
|
+
if (!plain || !hashed)
|
|
1806
|
+
return false;
|
|
1807
|
+
return bcrypt.compare(plain, hashed);
|
|
1808
|
+
},
|
|
1809
|
+
/**
|
|
1810
|
+
* Returns true if the string looks like a bcrypt hash.
|
|
1811
|
+
* Useful for detecting already-hashed values before double-hashing.
|
|
1812
|
+
*
|
|
1813
|
+
* @example
|
|
1814
|
+
* hashService.isBcryptHash("$2b$12$...") // true
|
|
1815
|
+
*/
|
|
1816
|
+
isBcryptHash(value) {
|
|
1817
|
+
return /^\$2[abxy]\$\d{2}\$/.test(value);
|
|
1818
|
+
},
|
|
1819
|
+
// ─── HMAC ─────────────────────────────────────────────────────────────────
|
|
1820
|
+
/**
|
|
1821
|
+
* Creates an HMAC signature for a value using a secret key.
|
|
1822
|
+
* Use for signing data (webhooks, tokens, URLs) — NOT for passwords.
|
|
1823
|
+
*
|
|
1824
|
+
* @example
|
|
1825
|
+
* const sig = hashService.hmac("payload body", process.env.WEBHOOK_SECRET);
|
|
1826
|
+
*/
|
|
1827
|
+
hmac(value, secret, { algorithm = "sha256", encoding = "hex" } = {}) {
|
|
1828
|
+
if (!value)
|
|
1829
|
+
throw new Error("Value is required for HMAC");
|
|
1830
|
+
if (!secret)
|
|
1831
|
+
throw new Error("Secret key is required for HMAC");
|
|
1832
|
+
return crypto.createHmac(algorithm, secret).update(value).digest(encoding);
|
|
1833
|
+
},
|
|
1834
|
+
/**
|
|
1835
|
+
* Verifies an HMAC signature using a timing-safe comparison.
|
|
1836
|
+
* Always use this instead of `===` to prevent timing attacks.
|
|
1837
|
+
*
|
|
1838
|
+
* @example
|
|
1839
|
+
* const valid = hashService.verifyHmac(payload, secret, incomingSignature);
|
|
1840
|
+
* if (!valid) throw new Error("Invalid webhook signature");
|
|
1841
|
+
*/
|
|
1842
|
+
verifyHmac(value, secret, signature, options) {
|
|
1843
|
+
try {
|
|
1844
|
+
const expected = this.hmac(value, secret, options);
|
|
1845
|
+
return crypto.timingSafeEqual(
|
|
1846
|
+
Buffer.from(expected),
|
|
1847
|
+
Buffer.from(signature)
|
|
1848
|
+
);
|
|
1849
|
+
} catch {
|
|
1850
|
+
return false;
|
|
1851
|
+
}
|
|
1852
|
+
},
|
|
1853
|
+
// ─── SHA ──────────────────────────────────────────────────────────────────
|
|
1854
|
+
/**
|
|
1855
|
+
* Creates a one-way SHA hash of a value (no secret).
|
|
1856
|
+
* Use for content fingerprinting, cache keys, or deduplication.
|
|
1857
|
+
* NOT suitable for passwords.
|
|
1858
|
+
*
|
|
1859
|
+
* @example
|
|
1860
|
+
* const fingerprint = hashService.sha256("file contents here");
|
|
1861
|
+
*/
|
|
1862
|
+
sha256(value, encoding = "hex") {
|
|
1863
|
+
if (!value)
|
|
1864
|
+
throw new Error("Value is required for sha256");
|
|
1865
|
+
return crypto.createHash("sha256").update(value).digest(encoding);
|
|
1866
|
+
},
|
|
1867
|
+
sha512(value, encoding = "hex") {
|
|
1868
|
+
if (!value)
|
|
1869
|
+
throw new Error("Value is required for sha512");
|
|
1870
|
+
return crypto.createHash("sha512").update(value).digest(encoding);
|
|
1871
|
+
},
|
|
1872
|
+
// ─── Random Tokens ────────────────────────────────────────────────────────
|
|
1873
|
+
/**
|
|
1874
|
+
* Generates a cryptographically secure random token.
|
|
1875
|
+
* Use for password reset tokens, email verification, API keys, etc.
|
|
1876
|
+
*
|
|
1877
|
+
* @example
|
|
1878
|
+
* const token = hashService.generateToken(); // 64-char hex string
|
|
1879
|
+
* const token = hashService.generateToken({ bytes: 16, encoding: "base64url" });
|
|
1880
|
+
*/
|
|
1881
|
+
generateToken({ bytes = 32, encoding = "hex" } = {}) {
|
|
1882
|
+
return crypto.randomBytes(bytes).toString(encoding);
|
|
1883
|
+
},
|
|
1884
|
+
/**
|
|
1885
|
+
* Generates a token and returns both the raw value (to send to user)
|
|
1886
|
+
* and its SHA-256 hash (to store in the database).
|
|
1887
|
+
*
|
|
1888
|
+
* @example
|
|
1889
|
+
* const { token, hashed } = hashService.generateHashedToken();
|
|
1890
|
+
* await db.user.update({ resetToken: hashed, resetTokenExpiry: ... });
|
|
1891
|
+
* await email.send({ to: user.email, token }); // send raw token to user
|
|
1892
|
+
*/
|
|
1893
|
+
generateHashedToken(options) {
|
|
1894
|
+
const token = this.generateToken(options);
|
|
1895
|
+
const hashed = this.sha256(token);
|
|
1896
|
+
return { token, hashed };
|
|
632
1897
|
}
|
|
633
1898
|
};
|
|
634
1899
|
export {
|
|
@@ -636,30 +1901,68 @@ export {
|
|
|
636
1901
|
AuthenticationError,
|
|
637
1902
|
AuthorizationError,
|
|
638
1903
|
BadRequestError,
|
|
1904
|
+
Cron,
|
|
639
1905
|
ExistingError,
|
|
640
1906
|
HTTP_STATUS,
|
|
641
1907
|
HTTP_STATUS_CODE_ERROR,
|
|
642
1908
|
NoContent,
|
|
643
1909
|
NotFoundError,
|
|
644
1910
|
Redis,
|
|
1911
|
+
S3,
|
|
645
1912
|
SQS,
|
|
646
1913
|
ServerError,
|
|
647
1914
|
TokenExpiredError,
|
|
648
1915
|
TokenInvalidError,
|
|
649
1916
|
ValidationError,
|
|
650
1917
|
WinstonLogger,
|
|
1918
|
+
camelCase,
|
|
651
1919
|
capitalize,
|
|
1920
|
+
countOccurrences,
|
|
1921
|
+
debounce,
|
|
652
1922
|
errorHandler,
|
|
653
1923
|
expressErrorMiddleware,
|
|
1924
|
+
flattenObject,
|
|
654
1925
|
formatDate,
|
|
1926
|
+
hashService,
|
|
1927
|
+
isArray,
|
|
1928
|
+
isBlank,
|
|
1929
|
+
isBoolean,
|
|
1930
|
+
isDate,
|
|
1931
|
+
isEmail,
|
|
655
1932
|
isEmpty,
|
|
1933
|
+
isInteger,
|
|
1934
|
+
isJSON,
|
|
1935
|
+
isNegative,
|
|
1936
|
+
isNil,
|
|
1937
|
+
isNumber,
|
|
656
1938
|
isObject,
|
|
657
|
-
|
|
1939
|
+
isPositive,
|
|
1940
|
+
isString,
|
|
1941
|
+
isURL,
|
|
1942
|
+
isUUID,
|
|
1943
|
+
joiMiddleware,
|
|
1944
|
+
joiValidate,
|
|
658
1945
|
jwtService,
|
|
1946
|
+
kebabCase,
|
|
659
1947
|
makeRequest,
|
|
1948
|
+
maskString,
|
|
1949
|
+
memoize,
|
|
1950
|
+
normalizeWhitespace,
|
|
1951
|
+
once,
|
|
660
1952
|
paginate,
|
|
661
1953
|
parseJSON,
|
|
1954
|
+
pascalCase,
|
|
1955
|
+
retry,
|
|
1956
|
+
reverse,
|
|
662
1957
|
sleep,
|
|
1958
|
+
snakeCase,
|
|
1959
|
+
splitWords,
|
|
663
1960
|
stringifyJSON,
|
|
1961
|
+
throttle,
|
|
1962
|
+
timeout,
|
|
1963
|
+
toLowerCase,
|
|
1964
|
+
toUpperCase,
|
|
1965
|
+
truncate,
|
|
1966
|
+
unflattenObject,
|
|
664
1967
|
uuid
|
|
665
1968
|
};
|