@ovencord/rest 2.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,153 @@
1
+
2
+ import type { REST } from '../REST.js';
3
+ import type { IHandler } from '../interfaces/Handler.js';
4
+ import { RESTEvents } from '../utils/constants.js';
5
+ import type { ResponseLike, HandlerRequestData, RouteData, RateLimitData } from '../utils/types.js';
6
+ import { normalizeRateLimitOffset, onRateLimit, sleep } from '../utils/utils.js';
7
+ import { handleErrors, incrementInvalidCount, makeNetworkRequest } from './Shared.js';
8
+
9
+ /**
10
+ * The structure used to handle burst requests for a given bucket.
11
+ * Burst requests have no ratelimit handling but allow for pre- and post-processing
12
+ * of data in the same manner as sequentially queued requests.
13
+ *
14
+ * @remarks
15
+ * This queue may still emit a rate limit error if an unexpected 429 is hit
16
+ */
17
+ export class BurstHandler implements IHandler {
18
+ /**
19
+ * {@inheritdoc IHandler.id}
20
+ */
21
+ public readonly id: string;
22
+
23
+ /**
24
+ * {@inheritDoc IHandler.inactive}
25
+ */
26
+ public inactive = false;
27
+
28
+ /**
29
+ * @param manager - The request manager
30
+ * @param hash - The hash that this RequestHandler handles
31
+ * @param majorParameter - The major parameter for this handler
32
+ */
33
+ public constructor(
34
+ private readonly manager: REST,
35
+ private readonly hash: string,
36
+ private readonly majorParameter: string,
37
+ ) {
38
+ this.id = `${hash}:${majorParameter}`;
39
+ }
40
+
41
+ /**
42
+ * Emits a debug message
43
+ *
44
+ * @param message - The message to debug
45
+ */
46
+ private debug(message: string) {
47
+ this.manager.emit(RESTEvents.Debug, `[REST ${this.id}] ${message}`);
48
+ }
49
+
50
+ /**
51
+ * {@inheritDoc IHandler.queueRequest}
52
+ */
53
+ public async queueRequest(
54
+ routeId: RouteData,
55
+ url: string,
56
+ options: RequestInit,
57
+ requestData: HandlerRequestData,
58
+ ): Promise<ResponseLike> {
59
+ return this.runRequest(routeId, url, options, requestData);
60
+ }
61
+
62
+ /**
63
+ * The method that actually makes the request to the API, and updates info about the bucket accordingly
64
+ *
65
+ * @param routeId - The generalized API route with literal ids for major parameters
66
+ * @param url - The fully resolved URL to make the request to
67
+ * @param options - The fetch options needed to make the request
68
+ * @param requestData - Extra data from the user's request needed for errors and additional processing
69
+ * @param retries - The number of retries this request has already attempted (recursion)
70
+ */
71
+ private async runRequest(
72
+ routeId: RouteData,
73
+ url: string,
74
+ options: RequestInit,
75
+ requestData: HandlerRequestData,
76
+ retries = 0,
77
+ ): Promise<ResponseLike> {
78
+ const method = options.method ?? 'get';
79
+
80
+ const res = await makeNetworkRequest(this.manager, routeId, url, options, requestData, retries);
81
+
82
+ // Retry requested
83
+ if (res === null) {
84
+ // eslint-disable-next-line no-param-reassign
85
+ return this.runRequest(routeId, url, options, requestData, ++retries);
86
+ }
87
+
88
+ const status = res.status;
89
+ let retryAfter = 0;
90
+ const retry = res.headers.get('Retry-After');
91
+
92
+ // Amount of time in milliseconds until we should retry if rate limited (globally or otherwise)
93
+ const offset = normalizeRateLimitOffset(this.manager.options.offset, routeId.bucketRoute);
94
+ if (retry) retryAfter = Number(retry) * 1_000 + offset;
95
+
96
+ // Count the invalid requests
97
+ if (status === 401 || status === 403 || status === 429) {
98
+ incrementInvalidCount(this.manager);
99
+ }
100
+
101
+ if (status >= 200 && status < 300) {
102
+ return res;
103
+ } else if (status === 429) {
104
+ // Unexpected ratelimit
105
+ const isGlobal = res.headers.has('X-RateLimit-Global');
106
+ const scope = (res.headers.get('X-RateLimit-Scope') ?? 'user') as RateLimitData['scope'];
107
+
108
+ await onRateLimit(this.manager, {
109
+ global: isGlobal,
110
+ method,
111
+ url,
112
+ route: routeId.bucketRoute,
113
+ majorParameter: this.majorParameter,
114
+ hash: this.hash,
115
+ limit: Number.POSITIVE_INFINITY,
116
+ timeToReset: retryAfter,
117
+ retryAfter,
118
+ sublimitTimeout: 0,
119
+ scope,
120
+ });
121
+
122
+ this.debug(
123
+ [
124
+ 'Encountered unexpected 429 rate limit',
125
+ ` Global : ${isGlobal}`,
126
+ ` Method : ${method}`,
127
+ ` URL : ${url}`,
128
+ ` Bucket : ${routeId.bucketRoute}`,
129
+ ` Major parameter: ${routeId.majorParameter}`,
130
+ ` Hash : ${this.hash}`,
131
+ ` Limit : ${Number.POSITIVE_INFINITY}`,
132
+ ` Retry After : ${retryAfter}ms`,
133
+ ` Sublimit : None`,
134
+ ` Scope : ${scope}`,
135
+ ].join('\n'),
136
+ );
137
+
138
+ // We are bypassing all other limits, but an encountered limit should be respected (it's probably a non-punished rate limit anyways)
139
+ await sleep(retryAfter);
140
+
141
+ // Since this is not a server side issue, the next request should pass, so we don't bump the retries counter
142
+ return this.runRequest(routeId, url, options, requestData, retries);
143
+ } else {
144
+ const handled = await handleErrors(this.manager, res, method, url, requestData, retries, routeId);
145
+ if (handled === null) {
146
+ // eslint-disable-next-line no-param-reassign
147
+ return this.runRequest(routeId, url, options, requestData, ++retries);
148
+ }
149
+
150
+ return handled;
151
+ }
152
+ }
153
+ }
@@ -0,0 +1,431 @@
1
+ import { AsyncQueue } from '../utils/AsyncQueue.js';
2
+ import type { REST } from '../REST.js';
3
+ import type { IHandler } from '../interfaces/Handler.js';
4
+ import { RESTEvents } from '../utils/constants.js';
5
+ import type { RateLimitData, ResponseLike, HandlerRequestData, RouteData } from '../utils/types.js';
6
+ import { hasSublimit, normalizeRateLimitOffset, onRateLimit, sleep } from '../utils/utils.js';
7
+ import { handleErrors, incrementInvalidCount, makeNetworkRequest } from './Shared.js';
8
+
9
+ const enum QueueType {
10
+ Standard,
11
+ Sublimit,
12
+ }
13
+
14
+ /**
15
+ * The structure used to handle sequential requests for a given bucket
16
+ */
17
+ export class SequentialHandler implements IHandler {
18
+ /**
19
+ * {@inheritDoc IHandler.id}
20
+ */
21
+ public readonly id: string;
22
+
23
+ /**
24
+ * The time this rate limit bucket will reset
25
+ */
26
+ private reset = -1;
27
+
28
+ /**
29
+ * The remaining requests that can be made before we are rate limited
30
+ */
31
+ private remaining = 1;
32
+
33
+ /**
34
+ * The total number of requests that can be made before we are rate limited
35
+ */
36
+ private limit = Number.POSITIVE_INFINITY;
37
+
38
+ /**
39
+ * The interface used to sequence async requests sequentially
40
+ */
41
+ #asyncQueue = new AsyncQueue();
42
+
43
+ /**
44
+ * The interface used to sequence sublimited async requests sequentially
45
+ */
46
+ #sublimitedQueue: AsyncQueue | null = null;
47
+
48
+ /**
49
+ * A promise wrapper for when the sublimited queue is finished being processed or null when not being processed
50
+ */
51
+ #sublimitPromise: { promise: Promise<void>; resolve(): void } | null = null;
52
+
53
+ /**
54
+ * Whether the sublimit queue needs to be shifted in the finally block
55
+ */
56
+ #shiftSublimit = false;
57
+
58
+ /**
59
+ * @param manager - The request manager
60
+ * @param hash - The hash that this RequestHandler handles
61
+ * @param majorParameter - The major parameter for this handler
62
+ */
63
+ public constructor(
64
+ private readonly manager: REST,
65
+ private readonly hash: string,
66
+ private readonly majorParameter: string,
67
+ ) {
68
+ this.id = `${hash}:${majorParameter}`;
69
+ }
70
+
71
+ /**
72
+ * {@inheritDoc IHandler.inactive}
73
+ */
74
+ public get inactive(): boolean {
75
+ return (
76
+ this.#asyncQueue.remaining === 0 &&
77
+ (this.#sublimitedQueue === null || this.#sublimitedQueue.remaining === 0) &&
78
+ !this.limited
79
+ );
80
+ }
81
+
82
+ /**
83
+ * If the rate limit bucket is currently limited by the global limit
84
+ */
85
+ private get globalLimited(): boolean {
86
+ return this.manager.globalRemaining <= 0 && Date.now() < this.manager.globalReset;
87
+ }
88
+
89
+ /**
90
+ * If the rate limit bucket is currently limited by its limit
91
+ */
92
+ private get localLimited(): boolean {
93
+ return this.remaining <= 0 && Date.now() < this.reset;
94
+ }
95
+
96
+ /**
97
+ * If the rate limit bucket is currently limited
98
+ */
99
+ private get limited(): boolean {
100
+ return this.globalLimited || this.localLimited;
101
+ }
102
+
103
+ /**
104
+ * The time until queued requests can continue
105
+ */
106
+ private getTimeToReset(routeId: RouteData): number {
107
+ const offset = normalizeRateLimitOffset(this.manager.options.offset, routeId.bucketRoute);
108
+ return this.reset + offset - Date.now();
109
+ }
110
+
111
+ /**
112
+ * Emits a debug message
113
+ *
114
+ * @param message - The message to debug
115
+ */
116
+ private debug(message: string) {
117
+ this.manager.emit(RESTEvents.Debug, `[REST ${this.id}] ${message}`);
118
+ }
119
+
120
+ /**
121
+ * Delay all requests for the specified amount of time, handling global rate limits
122
+ *
123
+ * @param time - The amount of time to delay all requests for
124
+ */
125
+ private async globalDelayFor(time: number): Promise<void> {
126
+ await sleep(time);
127
+ this.manager.globalDelay = null;
128
+ }
129
+
130
+ /**
131
+ * {@inheritDoc IHandler.queueRequest}
132
+ */
133
+ public async queueRequest(
134
+ routeId: RouteData,
135
+ url: string,
136
+ options: RequestInit,
137
+ requestData: HandlerRequestData,
138
+ ): Promise<ResponseLike> {
139
+ let queue = this.#asyncQueue;
140
+ let queueType = QueueType.Standard;
141
+ // Separate sublimited requests when already sublimited
142
+ if (this.#sublimitedQueue && hasSublimit(routeId.bucketRoute, requestData.body, options.method)) {
143
+ queue = this.#sublimitedQueue!;
144
+ queueType = QueueType.Sublimit;
145
+ }
146
+
147
+ // Wait for any previous requests to be completed before this one is run
148
+ await queue.wait(requestData.signal ? { signal: requestData.signal } : undefined);
149
+ // This set handles retroactively sublimiting requests
150
+ if (queueType === QueueType.Standard) {
151
+ if (this.#sublimitedQueue && hasSublimit(routeId.bucketRoute, requestData.body, options.method)) {
152
+ /**
153
+ * Remove the request from the standard queue, it should never be possible to get here while processing the
154
+ * sublimit queue so there is no need to worry about shifting the wrong request
155
+ */
156
+ queue = this.#sublimitedQueue!;
157
+ const wait = queue.wait();
158
+ this.#asyncQueue.shift();
159
+ await wait;
160
+ } else if (this.#sublimitPromise) {
161
+ // Stall requests while the sublimit queue gets processed
162
+ await this.#sublimitPromise.promise;
163
+ }
164
+ }
165
+
166
+ try {
167
+ // Make the request, and return the results
168
+ return await this.runRequest(routeId, url, options, requestData);
169
+ } finally {
170
+ // Allow the next request to fire
171
+ queue.shift();
172
+ if (this.#shiftSublimit) {
173
+ this.#shiftSublimit = false;
174
+ this.#sublimitedQueue?.shift();
175
+ }
176
+
177
+ // If this request is the last request in a sublimit
178
+ if (this.#sublimitedQueue?.remaining === 0) {
179
+ this.#sublimitPromise?.resolve();
180
+ this.#sublimitedQueue = null;
181
+ }
182
+ }
183
+ }
184
+
185
+ /**
186
+ * The method that actually makes the request to the api, and updates info about the bucket accordingly
187
+ *
188
+ * @param routeId - The generalized api route with literal ids for major parameters
189
+ * @param url - The fully resolved url to make the request to
190
+ * @param options - The fetch options needed to make the request
191
+ * @param requestData - Extra data from the user's request needed for errors and additional processing
192
+ * @param retries - The number of retries this request has already attempted (recursion)
193
+ */
194
+ private async runRequest(
195
+ routeId: RouteData,
196
+ url: string,
197
+ options: RequestInit,
198
+ requestData: HandlerRequestData,
199
+ retries = 0,
200
+ ): Promise<ResponseLike> {
201
+ /*
202
+ * After calculations have been done, pre-emptively stop further requests
203
+ * Potentially loop until this task can run if e.g. the global rate limit is hit twice
204
+ */
205
+ while (this.limited) {
206
+ const isGlobal = this.globalLimited;
207
+ let limit: number;
208
+ let timeout: number;
209
+ let delay: Promise<void>;
210
+
211
+ if (isGlobal) {
212
+ const offset = normalizeRateLimitOffset(this.manager.options.offset, routeId.bucketRoute);
213
+
214
+ // Set RateLimitData based on the global limit
215
+ limit = this.manager.options.globalRequestsPerSecond;
216
+ timeout = this.manager.globalReset + offset - Date.now();
217
+ // If this is the first task to reach the global timeout, set the global delay
218
+ // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
219
+ if (!this.manager.globalDelay) {
220
+ // The global delay function clears the global delay state when it is resolved
221
+ this.manager.globalDelay = this.globalDelayFor(timeout);
222
+ }
223
+
224
+ delay = this.manager.globalDelay;
225
+ } else {
226
+ // Set RateLimitData based on the route-specific limit
227
+ limit = this.limit;
228
+ timeout = this.getTimeToReset(routeId);
229
+ delay = sleep(timeout);
230
+ }
231
+
232
+ const rateLimitData: RateLimitData = {
233
+ global: isGlobal,
234
+ method: options.method ?? 'get',
235
+ url,
236
+ route: routeId.bucketRoute,
237
+ majorParameter: this.majorParameter,
238
+ hash: this.hash,
239
+ limit,
240
+ timeToReset: timeout,
241
+ retryAfter: timeout,
242
+ sublimitTimeout: 0,
243
+ scope: 'user',
244
+ };
245
+
246
+ // Let library users know they have hit a rate limit
247
+ this.manager.emit(RESTEvents.RateLimited, rateLimitData);
248
+ // Determine whether a RateLimitError should be thrown
249
+ await onRateLimit(this.manager, rateLimitData);
250
+
251
+ // When not erroring, emit debug for what is happening
252
+ if (isGlobal) {
253
+ this.debug(`Global rate limit hit, blocking all requests for ${timeout}ms`);
254
+ } else {
255
+ this.debug(`Waiting ${timeout}ms for rate limit to pass`);
256
+ }
257
+
258
+ // Wait the remaining time left before the rate limit resets
259
+ await delay;
260
+ }
261
+
262
+ // As the request goes out, update the global usage information
263
+ if (!this.manager.globalReset || this.manager.globalReset < Date.now()) {
264
+ this.manager.globalReset = Date.now() + 1_000;
265
+ this.manager.globalRemaining = this.manager.options.globalRequestsPerSecond;
266
+ }
267
+
268
+ this.manager.globalRemaining--;
269
+
270
+ const method = options.method ?? 'get';
271
+
272
+ const res = await makeNetworkRequest(this.manager, routeId, url, options, requestData, retries);
273
+
274
+ // Retry requested
275
+ if (res === null) {
276
+ // eslint-disable-next-line no-param-reassign
277
+ return this.runRequest(routeId, url, options, requestData, ++retries);
278
+ }
279
+
280
+ const status = res.status;
281
+ let retryAfter = 0;
282
+
283
+ const limit = res.headers.get('X-RateLimit-Limit');
284
+ const remaining = res.headers.get('X-RateLimit-Remaining');
285
+ const reset = res.headers.get('X-RateLimit-Reset-After');
286
+ const hash = res.headers.get('X-RateLimit-Bucket');
287
+ const retry = res.headers.get('Retry-After');
288
+ const scope = (res.headers.get('X-RateLimit-Scope') ?? 'user') as RateLimitData['scope'];
289
+
290
+ const offset = normalizeRateLimitOffset(this.manager.options.offset, routeId.bucketRoute);
291
+
292
+ // Update the total number of requests that can be made before the rate limit resets
293
+ this.limit = limit ? Number(limit) : Number.POSITIVE_INFINITY;
294
+ // Update the number of remaining requests that can be made before the rate limit resets
295
+ this.remaining = remaining ? Number(remaining) : 1;
296
+ // Update the time when this rate limit resets (reset-after is in seconds)
297
+ this.reset = reset ? Number(reset) * 1_000 + Date.now() + offset : Date.now();
298
+
299
+ // Amount of time in milliseconds until we should retry if rate limited (globally or otherwise)
300
+ if (retry) retryAfter = Number(retry) * 1_000 + offset;
301
+
302
+ // Handle buckets via the hash header retroactively
303
+ if (hash && hash !== this.hash) {
304
+ // Let library users know when rate limit buckets have been updated
305
+ this.debug(['Received bucket hash update', ` Old Hash : ${this.hash}`, ` New Hash : ${hash}`].join('\n'));
306
+ // This queue will eventually be eliminated via attrition
307
+ this.manager.hashes.set(
308
+ `${method}:${routeId.bucketRoute}${typeof requestData.auth === 'string' ? `:${requestData.auth}` : ''}`,
309
+ { value: hash, lastAccess: Date.now() },
310
+ );
311
+ } else if (hash) {
312
+ // Handle the case where hash value doesn't change
313
+ // Fetch the hash data from the manager
314
+ const hashData = this.manager.hashes.get(
315
+ `${method}:${routeId.bucketRoute}${typeof requestData.auth === 'string' ? `:${requestData.auth}` : ''}`,
316
+ );
317
+
318
+ // When fetched, update the last access of the hash
319
+ if (hashData) {
320
+ hashData.lastAccess = Date.now();
321
+ }
322
+ }
323
+
324
+ // Handle retryAfter, which means we have actually hit a rate limit
325
+ let sublimitTimeout: number | null = null;
326
+ if (retryAfter > 0) {
327
+ if (res.headers.has('X-RateLimit-Global')) {
328
+ this.manager.globalRemaining = 0;
329
+ this.manager.globalReset = Date.now() + retryAfter;
330
+ } else if (!this.localLimited) {
331
+ /*
332
+ * This is a sublimit (e.g. 2 channel name changes/10 minutes) since the headers don't indicate a
333
+ * route-wide rate limit. Don't update remaining or reset to avoid rate limiting the whole
334
+ * endpoint, just set a reset time on the request itself to avoid retrying too soon.
335
+ */
336
+ sublimitTimeout = retryAfter;
337
+ }
338
+ }
339
+
340
+ // Count the invalid requests
341
+ if (status === 401 || status === 403 || status === 429) {
342
+ incrementInvalidCount(this.manager);
343
+ }
344
+
345
+ if (res.ok) {
346
+ return res;
347
+ } else if (status === 429) {
348
+ // A rate limit was hit - this may happen if the route isn't associated with an official bucket hash yet, or when first globally rate limited
349
+ const isGlobal = this.globalLimited;
350
+ let limit: number;
351
+ let timeout: number;
352
+
353
+ if (isGlobal) {
354
+ const offset = normalizeRateLimitOffset(this.manager.options.offset, routeId.bucketRoute);
355
+
356
+ // Set RateLimitData based on the global limit
357
+ limit = this.manager.options.globalRequestsPerSecond;
358
+ timeout = this.manager.globalReset + offset - Date.now();
359
+ } else {
360
+ // Set RateLimitData based on the route-specific limit
361
+ limit = this.limit;
362
+ timeout = this.getTimeToReset(routeId);
363
+ }
364
+
365
+ await onRateLimit(this.manager, {
366
+ global: isGlobal,
367
+ method,
368
+ url,
369
+ route: routeId.bucketRoute,
370
+ majorParameter: this.majorParameter,
371
+ hash: this.hash,
372
+ limit,
373
+ timeToReset: timeout,
374
+ retryAfter,
375
+ sublimitTimeout: sublimitTimeout ?? 0,
376
+ scope,
377
+ });
378
+
379
+ this.debug(
380
+ [
381
+ 'Encountered unexpected 429 rate limit',
382
+ ` Global : ${isGlobal.toString()}`,
383
+ ` Method : ${method}`,
384
+ ` URL : ${url}`,
385
+ ` Bucket : ${routeId.bucketRoute}`,
386
+ ` Major parameter: ${routeId.majorParameter}`,
387
+ ` Hash : ${this.hash}`,
388
+ ` Limit : ${limit}`,
389
+ ` Retry After : ${retryAfter}ms`,
390
+ ` Sublimit : ${sublimitTimeout ? `${sublimitTimeout}ms` : 'None'}`,
391
+ ` Scope : ${scope}`,
392
+ ].join('\n'),
393
+ );
394
+
395
+ // If caused by a sublimit, wait it out here so other requests on the route can be handled
396
+ if (sublimitTimeout) {
397
+ // Normally the sublimit queue will not exist, however, if a sublimit is hit while in the sublimit queue, it will
398
+ const firstSublimit = !this.#sublimitedQueue;
399
+ if (firstSublimit) {
400
+ this.#sublimitedQueue = new AsyncQueue();
401
+ void this.#sublimitedQueue.wait();
402
+ this.#asyncQueue.shift();
403
+ }
404
+
405
+ this.#sublimitPromise?.resolve();
406
+ this.#sublimitPromise = null;
407
+ await sleep(sublimitTimeout);
408
+ let resolve: () => void;
409
+ // eslint-disable-next-line no-promise-executor-return
410
+ const promise = new Promise<void>((res) => (resolve = res));
411
+ this.#sublimitPromise = { promise, resolve: resolve! };
412
+ if (firstSublimit) {
413
+ // Re-queue this request so it can be shifted by the finally
414
+ await this.#asyncQueue.wait();
415
+ this.#shiftSublimit = true;
416
+ }
417
+ }
418
+
419
+ // Since this is not a server side issue, the next request should pass, so we don't bump the retries counter
420
+ return this.runRequest(routeId, url, options, requestData, retries);
421
+ } else {
422
+ const handled = await handleErrors(this.manager, res, method, url, requestData, retries, routeId);
423
+ if (handled === null) {
424
+ // eslint-disable-next-line no-param-reassign
425
+ return this.runRequest(routeId, url, options, requestData, ++retries);
426
+ }
427
+
428
+ return handled;
429
+ }
430
+ }
431
+ }