codeweaver 3.1.3 → 4.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +56 -73
- package/package.json +23 -1
- package/src/config.ts +17 -15
- package/src/constants.ts +1 -0
- package/src/core/aws/api-gateway.ts +187 -0
- package/src/core/aws/basic-types.ts +147 -0
- package/src/core/aws/dynamodb.ts +187 -0
- package/src/core/aws/index.ts +9 -0
- package/src/core/aws/lambda.ts +199 -0
- package/src/core/aws/message-broker.ts +167 -0
- package/src/core/aws/message.ts +259 -0
- package/src/core/aws/s3.ts +136 -0
- package/src/core/aws/utilities.ts +44 -0
- package/src/core/cache/basic-types.ts +17 -0
- package/src/core/cache/decorator.ts +72 -0
- package/src/core/cache/index.ts +4 -0
- package/src/core/cache/memory-cache.class.ts +119 -0
- package/src/{utilities/cache/redis-cache.ts → core/cache/redis-cache.class.ts} +58 -10
- package/src/core/container/basic-types.ts +10 -0
- package/src/{utilities → core/container}/container.ts +7 -17
- package/src/core/container/index.ts +2 -0
- package/src/{utilities → core/error}/error-handling.ts +1 -65
- package/src/core/error/index.ts +3 -0
- package/src/core/error/response-error.ts +45 -0
- package/src/core/error/send-http-error.ts +15 -0
- package/src/core/file/file-helpers.ts +166 -0
- package/src/core/file/index.ts +1 -0
- package/src/{utilities → core/helpers}/assignment.ts +2 -2
- package/src/core/helpers/comparison.ts +86 -0
- package/src/{utilities → core/helpers}/conversion.ts +2 -2
- package/src/core/helpers/decorators.ts +316 -0
- package/src/core/helpers/format.ts +9 -0
- package/src/core/helpers/index.ts +7 -0
- package/src/core/helpers/range.ts +67 -0
- package/src/core/helpers/types.ts +3 -0
- package/src/core/logger/index.ts +4 -0
- package/src/{utilities/logger/logger.config.ts → core/logger/winston-logger.config.ts} +1 -1
- package/src/{utilities → core}/logger/winston-logger.service.ts +3 -3
- package/src/core/message-broker/bullmq/basic-types.ts +67 -0
- package/src/core/message-broker/bullmq/broker.ts +141 -0
- package/src/core/message-broker/bullmq/index.ts +3 -0
- package/src/core/message-broker/bullmq/queue.ts +58 -0
- package/src/core/message-broker/bullmq/worker.ts +68 -0
- package/src/core/message-broker/kafka/basic-types.ts +45 -0
- package/src/core/message-broker/kafka/consumer.ts +95 -0
- package/src/core/message-broker/kafka/index.ts +3 -0
- package/src/core/message-broker/kafka/producer.ts +113 -0
- package/src/core/message-broker/rabitmq/basic-types.ts +44 -0
- package/src/core/message-broker/rabitmq/channel.ts +95 -0
- package/src/core/message-broker/rabitmq/consumer.ts +94 -0
- package/src/core/message-broker/rabitmq/index.ts +4 -0
- package/src/core/message-broker/rabitmq/producer.ts +100 -0
- package/src/core/message-broker/utilities.ts +50 -0
- package/src/core/middlewares/basic-types.ts +39 -0
- package/src/core/middlewares/decorators.ts +244 -0
- package/src/core/middlewares/index.ts +3 -0
- package/src/core/middlewares/middlewares.ts +246 -0
- package/src/core/parallel/index.ts +3 -0
- package/src/{utilities → core}/parallel/parallel.ts +11 -1
- package/src/core/rate-limit/basic-types.ts +43 -0
- package/src/core/rate-limit/index.ts +4 -0
- package/src/core/rate-limit/memory-store.ts +65 -0
- package/src/core/rate-limit/rate-limit.ts +134 -0
- package/src/core/rate-limit/redis-store.ts +141 -0
- package/src/core/retry/basic-types.ts +21 -0
- package/src/core/retry/decorator.ts +139 -0
- package/src/core/retry/index.ts +2 -0
- package/src/main.ts +6 -8
- package/src/routers/orders/index.router.ts +5 -1
- package/src/routers/orders/order.controller.ts +54 -64
- package/src/routers/products/index.router.ts +2 -1
- package/src/routers/products/product.controller.ts +33 -68
- package/src/routers/users/index.router.ts +1 -1
- package/src/routers/users/user.controller.ts +25 -50
- package/src/utilities/cache/memory-cache.ts +0 -74
- /package/src/{utilities → core}/logger/base-logger.interface.ts +0 -0
- /package/src/{utilities → core}/logger/logger.service.ts +0 -0
- /package/src/{utilities → core}/parallel/chanel.ts +0 -0
- /package/src/{utilities → core}/parallel/worker-pool.ts +0 -0
|
@@ -1,5 +1,15 @@
|
|
|
1
1
|
import { WorkerPool } from "./worker-pool";
|
|
2
2
|
|
|
3
|
+
/**
|
|
4
|
+
* Checks if a given function is an async function.
|
|
5
|
+
*
|
|
6
|
+
* @param {(...args: any[]) => any} fn - The function to check.
|
|
7
|
+
* @returns {boolean} True if the function is an async function, false otherwise.
|
|
8
|
+
*/
|
|
9
|
+
export function isAsync(fn: (...args: any[]) => any) {
|
|
10
|
+
return typeof fn === "function" && fn.constructor.name === "AsyncFunction";
|
|
11
|
+
}
|
|
12
|
+
|
|
3
13
|
/**
|
|
4
14
|
* Creates a setter function that, when invoked, rebinds the captured
|
|
5
15
|
* destination binding to the provided source value.
|
|
@@ -10,7 +20,7 @@ import { WorkerPool } from "./worker-pool";
|
|
|
10
20
|
* @returns {() => void} A function that, when called, rebinds the captured `destination` to `source`.
|
|
11
21
|
*/
|
|
12
22
|
export function set<T>(destination: T, source: T): () => void {
|
|
13
|
-
return () => {
|
|
23
|
+
return async () => {
|
|
14
24
|
destination = source;
|
|
15
25
|
};
|
|
16
26
|
}
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Rate limit bucket.
|
|
3
|
+
*/
|
|
4
|
+
export type RateLimitRecord = {
|
|
5
|
+
bucketId: string;
|
|
6
|
+
rateLimitAllowedCalls: number;
|
|
7
|
+
rateLimitTimeSpan: number;
|
|
8
|
+
tokens: number;
|
|
9
|
+
resetTime: number;
|
|
10
|
+
lastUpdated: number;
|
|
11
|
+
};
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Abstract store interface for rate limit buckets.
|
|
15
|
+
* Implementations can be in-memory (single instance) or Redis-backed
|
|
16
|
+
* (shared across multiple instances).
|
|
17
|
+
*/
|
|
18
|
+
export interface RateLimitStore {
|
|
19
|
+
/**
|
|
20
|
+
* Retrieve the bucket. If it doesn't exist, initialize it
|
|
21
|
+
* with the provided capacity and window.
|
|
22
|
+
* @param bucketId- identifier for the bucket
|
|
23
|
+
* @param rateLimitAllowedCalls - bucket capacity (max tokens)
|
|
24
|
+
* @param rateLimitTimeSpan - refill window in milliseconds
|
|
25
|
+
* @returns A promise that resolves to the bucket record
|
|
26
|
+
*/
|
|
27
|
+
getBucket(
|
|
28
|
+
bucketId: string,
|
|
29
|
+
rateLimitAllowedCalls: number,
|
|
30
|
+
rateLimitTimeSpan: number
|
|
31
|
+
): Promise<RateLimitRecord>;
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Remove a bucket
|
|
35
|
+
* @param bucketId - bucket identifier
|
|
36
|
+
*/
|
|
37
|
+
remove(bucketId: string): Promise<void>;
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Remove all buckets
|
|
41
|
+
*/
|
|
42
|
+
removeAll(): Promise<void>;
|
|
43
|
+
}
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import { RateLimitStore, RateLimitRecord } from "./basic-types";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* In-memory rate limit store (per-process).
|
|
5
|
+
* Suitable for single-instance deployments or local testing.
|
|
6
|
+
*/
|
|
7
|
+
export class MemoryRateLimitStore implements RateLimitStore {
|
|
8
|
+
private store: Map<string, RateLimitRecord> = new Map();
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Retrieve (or initialize) the bucket.
|
|
12
|
+
* If there is no existing bucket, create one with full capacity.
|
|
13
|
+
* @param userId - user identifier
|
|
14
|
+
* @param rateLimitAllowedCalls - bucket capacity
|
|
15
|
+
* @param rateLimitTimeSpan - refill window
|
|
16
|
+
* @returns Promise resolving to the bucket
|
|
17
|
+
*/
|
|
18
|
+
async getBucket(
|
|
19
|
+
bucketId: string,
|
|
20
|
+
rateLimitAllowedCalls: number,
|
|
21
|
+
rateLimitTimeSpan: number
|
|
22
|
+
): Promise<RateLimitRecord> {
|
|
23
|
+
let bucket = this.store.get(bucketId);
|
|
24
|
+
const now = Date.now();
|
|
25
|
+
if (bucket == null) {
|
|
26
|
+
bucket = {
|
|
27
|
+
bucketId,
|
|
28
|
+
rateLimitAllowedCalls,
|
|
29
|
+
tokens: rateLimitAllowedCalls,
|
|
30
|
+
resetTime: now,
|
|
31
|
+
lastUpdated: now,
|
|
32
|
+
rateLimitTimeSpan,
|
|
33
|
+
};
|
|
34
|
+
this.store.set(bucketId, bucket);
|
|
35
|
+
return bucket;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
const elapsed = now - bucket.resetTime;
|
|
39
|
+
if (elapsed > bucket.rateLimitTimeSpan) {
|
|
40
|
+
bucket.resetTime = now;
|
|
41
|
+
bucket.tokens = bucket.rateLimitAllowedCalls;
|
|
42
|
+
} else if (bucket.tokens > 0) {
|
|
43
|
+
bucket.tokens--;
|
|
44
|
+
}
|
|
45
|
+
bucket.lastUpdated = now;
|
|
46
|
+
|
|
47
|
+
this.store.set(bucketId, bucket);
|
|
48
|
+
return bucket;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Remove a bucket
|
|
53
|
+
* @param bucketId - bucket identifier
|
|
54
|
+
*/
|
|
55
|
+
async remove(bucketId: string): Promise<void> {
|
|
56
|
+
this.store.delete(bucketId);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Remove all buckets
|
|
61
|
+
*/
|
|
62
|
+
async removeAll(): Promise<void> {
|
|
63
|
+
this.store.clear();
|
|
64
|
+
}
|
|
65
|
+
}
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import { Response, NextFunction } from "express";
|
|
2
|
+
import { RateLimitStore } from "./basic-types";
|
|
3
|
+
import { MemoryRateLimitStore } from "./memory-store";
|
|
4
|
+
import { ResponseError } from "@/core/error";
|
|
5
|
+
import { config } from "@/config";
|
|
6
|
+
import { resolve } from "@/core/container";
|
|
7
|
+
import { UserRequest } from "@/core/middlewares";
|
|
8
|
+
import { createMethodDecorator } from "../helpers";
|
|
9
|
+
|
|
10
|
+
const globalBucketId = "[global]";
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Rate limiter factory for per-user (or global) limits.
|
|
14
|
+
*
|
|
15
|
+
* @param rateLimitTimeSpan - window duration in milliseconds
|
|
16
|
+
* @param rateLimitAllowedCalls - bucket capacity (max tokens)
|
|
17
|
+
* @param bucketName - true for a global bucket, false for per-user buckets
|
|
18
|
+
* @param store - optional RateLimitStore instance; if omitted, defaults to in-memory
|
|
19
|
+
* @returns Express middleware function
|
|
20
|
+
*/
|
|
21
|
+
export function rateLimitMiddleware<UserIdType = string>(
|
|
22
|
+
rateLimitTimeSpan: number = config.rateLimitTimeSpan,
|
|
23
|
+
rateLimitAllowedCalls: number = config.rateLimitAllowedCalls,
|
|
24
|
+
exceedHandler?: (next: NextFunction) => Promise<void>,
|
|
25
|
+
bucketName: string = globalBucketId,
|
|
26
|
+
store: RateLimitStore = resolve(MemoryRateLimitStore)
|
|
27
|
+
) {
|
|
28
|
+
return async (
|
|
29
|
+
req: UserRequest<UserIdType>,
|
|
30
|
+
_res: Response,
|
|
31
|
+
next: NextFunction
|
|
32
|
+
) => {
|
|
33
|
+
let bucketId: string;
|
|
34
|
+
if (bucketName == globalBucketId) {
|
|
35
|
+
bucketId = `${bucketName}."${req.baseUrl}${req.path}:${req.method}`;
|
|
36
|
+
} else {
|
|
37
|
+
const user = req.user?.id ?? "[anonymous]";
|
|
38
|
+
bucketId = `${bucketName}."${req.baseUrl}${req.path}:${req.method}."${user}`; // per-user (anonymous if no user)
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const now = Date.now();
|
|
42
|
+
|
|
43
|
+
const bucket = await store.getBucket(
|
|
44
|
+
bucketId,
|
|
45
|
+
rateLimitAllowedCalls,
|
|
46
|
+
rateLimitTimeSpan
|
|
47
|
+
);
|
|
48
|
+
|
|
49
|
+
if (bucket.tokens >= 1) {
|
|
50
|
+
next();
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
if (exceedHandler != null) {
|
|
54
|
+
return await exceedHandler(next);
|
|
55
|
+
} else {
|
|
56
|
+
throw new ResponseError(
|
|
57
|
+
"Too many requests, please try again later.",
|
|
58
|
+
429
|
|
59
|
+
);
|
|
60
|
+
}
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* Rate limit decorator for controller methods.
|
|
66
|
+
* @param rateLimitTimeSpan - window duration in milliseconds
|
|
67
|
+
* @param rateLimitAllowedCalls - bucket capacity (max tokens)
|
|
68
|
+
* @param bucketName
|
|
69
|
+
* @param store - optional RateLimitStore instance; if omitted, defaults to in-memory
|
|
70
|
+
*
|
|
71
|
+
* Example usage:
|
|
72
|
+
* class UserController {
|
|
73
|
+
* @tokenBucketRateLimit(60000, 20, false) // 20 calls per minute per user
|
|
74
|
+
* async create(...) { ... }
|
|
75
|
+
* }
|
|
76
|
+
*/
|
|
77
|
+
export function RateLimit(
|
|
78
|
+
rateLimitTimeSpan: number = config.rateLimitTimeSpan,
|
|
79
|
+
rateLimitAllowedCalls: number = config.rateLimitAllowedCalls,
|
|
80
|
+
exceedHandler?: () => Promise<any>,
|
|
81
|
+
bucketName: string = globalBucketId,
|
|
82
|
+
store: RateLimitStore = resolve(MemoryRateLimitStore)
|
|
83
|
+
) {
|
|
84
|
+
return createMethodDecorator<
|
|
85
|
+
[number, number, (() => Promise<any>) | undefined, string, RateLimitStore],
|
|
86
|
+
[UserRequest]
|
|
87
|
+
>(
|
|
88
|
+
async (
|
|
89
|
+
[
|
|
90
|
+
rateLimitTimeSpan,
|
|
91
|
+
rateLimitAllowedCalls,
|
|
92
|
+
exceedHandler,
|
|
93
|
+
bucketName,
|
|
94
|
+
store,
|
|
95
|
+
],
|
|
96
|
+
[],
|
|
97
|
+
method,
|
|
98
|
+
args,
|
|
99
|
+
classInstance
|
|
100
|
+
) => {
|
|
101
|
+
let bucketId: string;
|
|
102
|
+
if (bucketName == globalBucketId) {
|
|
103
|
+
bucketId = `${bucketName}."${classInstance.constructor.name}:${method.name}"`;
|
|
104
|
+
} else {
|
|
105
|
+
// Try to locate a real Express Request object in arguments
|
|
106
|
+
const reqCandidate = args[0] ?? null;
|
|
107
|
+
const userFromReq: any = reqCandidate?.user;
|
|
108
|
+
const userId = userFromReq?.id ? String(userFromReq.id) : "[anonymous]";
|
|
109
|
+
|
|
110
|
+
// Bucket identity
|
|
111
|
+
bucketId = `${bucketName}."${classInstance.constructor.name}:${method.name}".${userId}`; // per-user (anonymous if no user)
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
const bucket = await store.getBucket(
|
|
115
|
+
bucketId,
|
|
116
|
+
rateLimitAllowedCalls,
|
|
117
|
+
rateLimitTimeSpan
|
|
118
|
+
);
|
|
119
|
+
|
|
120
|
+
if (bucket.tokens >= 1) {
|
|
121
|
+
return await method.apply(classInstance, args);
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
if (exceedHandler != null) {
|
|
125
|
+
return await exceedHandler();
|
|
126
|
+
} else {
|
|
127
|
+
throw new ResponseError(
|
|
128
|
+
"Too many requests, please try again later.",
|
|
129
|
+
429
|
|
130
|
+
);
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
)(rateLimitTimeSpan, rateLimitAllowedCalls, exceedHandler, bucketName, store);
|
|
134
|
+
}
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
import { RateLimitStore, RateLimitRecord } from "./basic-types";
|
|
2
|
+
import { Redis } from "ioredis";
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Redis-backed rate limit store.
|
|
6
|
+
* Uses a Lua script to perform atomic refill + consume operations.
|
|
7
|
+
*/
|
|
8
|
+
export class RedisRateLimitStore implements RateLimitStore {
|
|
9
|
+
private redis: Redis;
|
|
10
|
+
private keyPrefix = "rl:bucket:";
|
|
11
|
+
|
|
12
|
+
// Lua script for atomic refill + consume
|
|
13
|
+
// KEYS[1] = bucketKey
|
|
14
|
+
// ARGV[1] = rateLimitAllowedCalls
|
|
15
|
+
// ARGV[2] = rateLimitTimeSpan
|
|
16
|
+
// ARGV[3] = now (ms)
|
|
17
|
+
private luaScript = `
|
|
18
|
+
local key = KEYS[1]
|
|
19
|
+
local rateLimitAllowedCalls = tonumber(ARGV[1])
|
|
20
|
+
local rateLimitTimeSpan = tonumber(ARGV[2])
|
|
21
|
+
local now = tonumber(ARGV[3])
|
|
22
|
+
|
|
23
|
+
local bucket = redis.call("HMGET", key, "tokens", "resetTime", "lastUpdated", "rateLimitAllowedCalls", "rateLimitTimeSpan")
|
|
24
|
+
local tokens = tonumber(bucket[1] or rateLimitAllowedCalls)
|
|
25
|
+
local resetTime = tonumber(bucket[2] or now)
|
|
26
|
+
local lastUpdated = tonumber(bucket[3] or now)
|
|
27
|
+
rateLimitAllowedCalls = tonumber(bucket[4] or rateLimitAllowedCalls)
|
|
28
|
+
rateLimitTimeSpan = tonumber(bucket[5] or rateLimitTimeSpan)
|
|
29
|
+
|
|
30
|
+
local elapsed = now - resetTime
|
|
31
|
+
if elapsed > rateLimitTimeSpan then
|
|
32
|
+
resetTime = now
|
|
33
|
+
tokens = rateLimitAllowedCalls
|
|
34
|
+
end if tokens > 0 then
|
|
35
|
+
tokens = tokens - 1 // consume
|
|
36
|
+
end
|
|
37
|
+
lastUpdated = now
|
|
38
|
+
|
|
39
|
+
redis.call("HMSET", key, "tokens", tokens, "resetTime", resetTime, "lastUpdated", lastUpdated, "rateLimitAllowedCalls", rateLimitAllowedCalls, "rateLimitTimeSpan", rateLimitTimeSpan)
|
|
40
|
+
return {1, tokens, resetTime, lastUpdated }
|
|
41
|
+
`;
|
|
42
|
+
|
|
43
|
+
constructor(
|
|
44
|
+
options: {
|
|
45
|
+
host?: string;
|
|
46
|
+
port?: number;
|
|
47
|
+
password?: string;
|
|
48
|
+
db?: number;
|
|
49
|
+
} = {}
|
|
50
|
+
) {
|
|
51
|
+
this.redis = new Redis(options);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Remove a rate limit bucket.
|
|
56
|
+
* @param bucketId - rate limit bucket identifier
|
|
57
|
+
* @returns A promise that resolves when the bucket is removed
|
|
58
|
+
*/
|
|
59
|
+
remove(bucketId: string): Promise<void> {
|
|
60
|
+
this.redis.del(this.bucketKey(bucketId));
|
|
61
|
+
return Promise.resolve();
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* Remove all rate limit buckets.
|
|
66
|
+
* Flushes the entire Redis database.
|
|
67
|
+
* Use with caution in a shared Redis instance.
|
|
68
|
+
* @returns A promise that resolves when all buckets are removed
|
|
69
|
+
*/
|
|
70
|
+
removeAll(): Promise<void> {
|
|
71
|
+
this.redis.flushdb();
|
|
72
|
+
return Promise.resolve();
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Generate a Redis key for the given rate limit bucket.
|
|
77
|
+
* @param bucketId - rate limit bucket identifier
|
|
78
|
+
* @returns Redis key for the bucket
|
|
79
|
+
*/
|
|
80
|
+
private bucketKey(bucketId: string) {
|
|
81
|
+
return this.keyPrefix + encodeURIComponent(bucketId);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
async getBucket(
|
|
85
|
+
bucketId: string,
|
|
86
|
+
rateLimitAllowedCalls: number,
|
|
87
|
+
rateLimitTimeSpan: number
|
|
88
|
+
): Promise<RateLimitRecord> {
|
|
89
|
+
const key = this.bucketKey(bucketId);
|
|
90
|
+
const now = Date.now();
|
|
91
|
+
|
|
92
|
+
const result = await this.redis.eval(
|
|
93
|
+
this.luaScript,
|
|
94
|
+
key,
|
|
95
|
+
rateLimitAllowedCalls,
|
|
96
|
+
rateLimitTimeSpan,
|
|
97
|
+
now
|
|
98
|
+
);
|
|
99
|
+
|
|
100
|
+
const [ok, tokens, resetTime, lastUpdated] = result as [
|
|
101
|
+
number,
|
|
102
|
+
number,
|
|
103
|
+
number,
|
|
104
|
+
number
|
|
105
|
+
];
|
|
106
|
+
if (ok !== 1) {
|
|
107
|
+
// Fallback: reset bucket
|
|
108
|
+
const bucket: RateLimitRecord = {
|
|
109
|
+
bucketId,
|
|
110
|
+
rateLimitAllowedCalls,
|
|
111
|
+
tokens: rateLimitAllowedCalls,
|
|
112
|
+
resetTime: now,
|
|
113
|
+
lastUpdated: now,
|
|
114
|
+
rateLimitTimeSpan,
|
|
115
|
+
};
|
|
116
|
+
await this.redis.hmset(key, {
|
|
117
|
+
tokens: String(bucket.tokens),
|
|
118
|
+
resetTime: String(bucket.resetTime),
|
|
119
|
+
lastUpdated: String(bucket.lastUpdated),
|
|
120
|
+
rateLimitAllowedCalls: String(bucket.rateLimitAllowedCalls),
|
|
121
|
+
rateLimitTimeSpan: String(bucket.rateLimitTimeSpan),
|
|
122
|
+
});
|
|
123
|
+
return bucket;
|
|
124
|
+
}
|
|
125
|
+
return {
|
|
126
|
+
bucketId,
|
|
127
|
+
rateLimitAllowedCalls,
|
|
128
|
+
rateLimitTimeSpan,
|
|
129
|
+
tokens,
|
|
130
|
+
resetTime,
|
|
131
|
+
lastUpdated,
|
|
132
|
+
};
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
/**
|
|
136
|
+
* Optional: cleanly close Redis connection.
|
|
137
|
+
*/
|
|
138
|
+
quit() {
|
|
139
|
+
return this.redis.quit();
|
|
140
|
+
}
|
|
141
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Options for retry decorator
|
|
3
|
+
*/
|
|
4
|
+
export type RetryOptions = {
|
|
5
|
+
/** number of total attempts (initial try + retries) */
|
|
6
|
+
retries?: number;
|
|
7
|
+
/** delay between attempts in ms. */
|
|
8
|
+
delay?: number;
|
|
9
|
+
/** whether to use exponential backoff. */
|
|
10
|
+
backoff?: boolean;
|
|
11
|
+
/** maximum delay when using backoff (ms). */
|
|
12
|
+
maxDelay?: number;
|
|
13
|
+
/** add randomness to delay. */
|
|
14
|
+
jitter?: boolean;
|
|
15
|
+
/** optional callback invoked before each retry attempt (except for the first). */
|
|
16
|
+
onRetry?: (attempt: number, error: any) => void;
|
|
17
|
+
/** optional AbortSignal to cancel retries */
|
|
18
|
+
signal?: AbortSignal;
|
|
19
|
+
/** optional timeout for the whole operation (in ms). If provided, cancels after timeout */
|
|
20
|
+
timeoutMs?: number;
|
|
21
|
+
};
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
import { RetryOptions } from "./basic-types";
|
|
2
|
+
import { createMethodDecorator } from "../helpers";
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Sleep for a given number of milliseconds, aborting if the signal is aborted.
|
|
6
|
+
* @param ms The number of milliseconds to sleep.
|
|
7
|
+
* @param signal An optional AbortSignal to abort the sleep.
|
|
8
|
+
* @returns A Promise that resolves when the sleep is complete.
|
|
9
|
+
* @throws An error if the signal is aborted.
|
|
10
|
+
*/
|
|
11
|
+
export function sleep(ms: number, signal?: AbortSignal): Promise<void> {
|
|
12
|
+
return new Promise((resolve, reject) => {
|
|
13
|
+
if (signal?.aborted) return reject(new Error("Aborted"));
|
|
14
|
+
const t = setTimeout(() => resolve(), ms);
|
|
15
|
+
signal?.addEventListener("abort", () => {
|
|
16
|
+
clearTimeout(t);
|
|
17
|
+
reject(new Error("Aborted"));
|
|
18
|
+
});
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Introduce randomness into a delay to prevent multiple requests from being
|
|
24
|
+
* sent at the same time.
|
|
25
|
+
* @param delay The base delay in milliseconds.
|
|
26
|
+
* @param jitter If true, introduce a random jitter of up to 50% of the delay.
|
|
27
|
+
* @returns The jittered delay in milliseconds, or the original delay if jitter is false.
|
|
28
|
+
*/
|
|
29
|
+
export function jitterDelay(delay: number, jitter = true) {
|
|
30
|
+
if (!jitter) return delay;
|
|
31
|
+
// +/- 50% jitter
|
|
32
|
+
const deviation = Math.floor(Math.random() * delay * 0.5);
|
|
33
|
+
const sign = Math.random() < 0.5 ? -1 : 1;
|
|
34
|
+
return Math.max(0, delay + sign * deviation);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Retry decorator
|
|
39
|
+
* Retry a method with exponential backoff and jitter.
|
|
40
|
+
* @param options RetryOptions
|
|
41
|
+
* @returns MethodDecorator
|
|
42
|
+
*/
|
|
43
|
+
export const Retry = createMethodDecorator<[RetryOptions], []>(
|
|
44
|
+
async ([options], [], method, methodArgs) => {
|
|
45
|
+
const {
|
|
46
|
+
retries = 2,
|
|
47
|
+
delay = 1000,
|
|
48
|
+
backoff = false,
|
|
49
|
+
maxDelay = 30000,
|
|
50
|
+
jitter: useJitter = true,
|
|
51
|
+
onRetry,
|
|
52
|
+
signal,
|
|
53
|
+
timeoutMs,
|
|
54
|
+
} = options;
|
|
55
|
+
let attempts = 0;
|
|
56
|
+
let currentDelay = delay;
|
|
57
|
+
let lastError: any;
|
|
58
|
+
|
|
59
|
+
// If the user provided a per-call AbortSignal, respect it; otherwise use a local one
|
|
60
|
+
const abortController = new AbortController();
|
|
61
|
+
const combinedSignal = signal as AbortSignal | undefined;
|
|
62
|
+
|
|
63
|
+
// If an external signal is provided, wire it to abort this operation when it aborts
|
|
64
|
+
if (combinedSignal) {
|
|
65
|
+
if (combinedSignal.aborted) {
|
|
66
|
+
throw new Error("Operation aborted");
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
const onAbort = () => abortController.abort();
|
|
70
|
+
combinedSignal.addEventListener("abort", onAbort);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// Optional per-call timeout
|
|
74
|
+
let timeoutHandle: any;
|
|
75
|
+
let timeoutReached = false;
|
|
76
|
+
await new Promise<void>((_resolve, reject) => {
|
|
77
|
+
if (timeoutMs != null) {
|
|
78
|
+
timeoutHandle = setTimeout(() => {
|
|
79
|
+
timeoutReached = true;
|
|
80
|
+
reject(new Error("Operation timed out"));
|
|
81
|
+
}, timeoutMs);
|
|
82
|
+
}
|
|
83
|
+
});
|
|
84
|
+
|
|
85
|
+
// Run retries
|
|
86
|
+
while (true) {
|
|
87
|
+
// If timeout configured, race the call with timeout
|
|
88
|
+
const attemptPromise = (async () => {
|
|
89
|
+
attempts++;
|
|
90
|
+
try {
|
|
91
|
+
const result = await method.apply(this, methodArgs);
|
|
92
|
+
if (timeoutHandle) clearTimeout(timeoutHandle);
|
|
93
|
+
return result;
|
|
94
|
+
} catch (err) {
|
|
95
|
+
lastError = err;
|
|
96
|
+
// If this was the last allowed attempt, rethrow
|
|
97
|
+
if (attempts > retries) {
|
|
98
|
+
if (timeoutHandle) clearTimeout(timeoutHandle);
|
|
99
|
+
throw err;
|
|
100
|
+
}
|
|
101
|
+
// notify onRetry
|
|
102
|
+
if (onRetry != null) {
|
|
103
|
+
onRetry(attempts, err);
|
|
104
|
+
}
|
|
105
|
+
// compute next delay
|
|
106
|
+
let nextDelay = currentDelay;
|
|
107
|
+
if (backoff) {
|
|
108
|
+
nextDelay = Math.min(currentDelay * 2, maxDelay);
|
|
109
|
+
currentDelay = nextDelay;
|
|
110
|
+
} else {
|
|
111
|
+
nextDelay = currentDelay;
|
|
112
|
+
}
|
|
113
|
+
// apply jitter
|
|
114
|
+
nextDelay = jitterDelay(nextDelay, useJitter);
|
|
115
|
+
await sleep(nextDelay, combinedSignal);
|
|
116
|
+
// then retry
|
|
117
|
+
throw new Error("Retry"); // control flow not ideal; we re-enter loop
|
|
118
|
+
}
|
|
119
|
+
})();
|
|
120
|
+
|
|
121
|
+
// We need to break out correctly; instead, handle with catch:
|
|
122
|
+
try {
|
|
123
|
+
const result = await attemptPromise;
|
|
124
|
+
if (timeoutHandle) clearTimeout(timeoutHandle);
|
|
125
|
+
return result;
|
|
126
|
+
} catch (err) {
|
|
127
|
+
if (timeoutReached) throw err;
|
|
128
|
+
// if we caught a non-retryable error or exhausted retries, rethrow
|
|
129
|
+
// We signal by checking if lastError and attempts > retries
|
|
130
|
+
if (attempts > retries) {
|
|
131
|
+
if (timeoutHandle) clearTimeout(timeoutHandle);
|
|
132
|
+
throw lastError ?? err;
|
|
133
|
+
}
|
|
134
|
+
// otherwise loop to retry
|
|
135
|
+
continue;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
);
|
package/src/main.ts
CHANGED
|
@@ -1,17 +1,16 @@
|
|
|
1
1
|
import express, { NextFunction, Request, Response } from "express";
|
|
2
2
|
import fs from "fs";
|
|
3
3
|
import path from "path";
|
|
4
|
-
import { config } from "./config";
|
|
5
|
-
import { ResponseError } from "./utilities/error-handling";
|
|
6
|
-
import { resolve } from "./utilities/container";
|
|
7
|
-
import { WinstonLoggerService } from "./utilities/logger/winston-logger.service";
|
|
8
4
|
import "dotenv/config";
|
|
9
5
|
import {
|
|
10
6
|
defaultRouter,
|
|
11
7
|
routerDir,
|
|
12
8
|
routerExtension,
|
|
13
9
|
swaggerPath,
|
|
14
|
-
} from "
|
|
10
|
+
} from "@/constants";
|
|
11
|
+
import { config } from "@/config";
|
|
12
|
+
import { ResponseError } from "@/core/error";
|
|
13
|
+
import { logger } from "@/core/logger";
|
|
15
14
|
//import cors from "cors";
|
|
16
15
|
|
|
17
16
|
/**
|
|
@@ -68,7 +67,7 @@ const app = express();
|
|
|
68
67
|
app.use(express.json());
|
|
69
68
|
app.use(express.urlencoded({ extended: true }));
|
|
70
69
|
|
|
71
|
-
//
|
|
70
|
+
//Automatically import all routers from the /src/routers directory
|
|
72
71
|
const routersPath = path.join(__dirname, routerDir);
|
|
73
72
|
loadRouters(routersPath);
|
|
74
73
|
|
|
@@ -82,8 +81,7 @@ if (config.swagger) {
|
|
|
82
81
|
|
|
83
82
|
// General error handler
|
|
84
83
|
app.use(
|
|
85
|
-
(error: ResponseError,
|
|
86
|
-
const logger = resolve(WinstonLoggerService);
|
|
84
|
+
(error: ResponseError, _req: Request, res: Response, _next: NextFunction) => {
|
|
87
85
|
const status = error.status ?? 500;
|
|
88
86
|
const errorObject = {
|
|
89
87
|
status,
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { Router, Request, Response } from "express";
|
|
2
2
|
import asyncHandler from "express-async-handler";
|
|
3
3
|
import OrderController from "./order.controller";
|
|
4
|
-
import { resolve } from "@/
|
|
4
|
+
import { resolve } from "@/core/container";
|
|
5
5
|
|
|
6
6
|
const router = Router();
|
|
7
7
|
const orderController = resolve(OrderController);
|
|
@@ -97,6 +97,8 @@ router.get(
|
|
|
97
97
|
* responses:
|
|
98
98
|
* 200:
|
|
99
99
|
* description: Order details
|
|
100
|
+
* 404:
|
|
101
|
+
* description: Order not found
|
|
100
102
|
*/
|
|
101
103
|
router.get(
|
|
102
104
|
"/:id",
|
|
@@ -122,6 +124,8 @@ router.get(
|
|
|
122
124
|
* responses:
|
|
123
125
|
* 200:
|
|
124
126
|
* description: Order canceled successfully
|
|
127
|
+
* 400:
|
|
128
|
+
* description: Cancellation is only available when the order is in processing status.
|
|
125
129
|
*/
|
|
126
130
|
router.patch(
|
|
127
131
|
"/:id/cancel",
|