@clickup/rest-client 2.10.292 → 2.10.294
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.base.js +23 -2
- package/README.md +1 -1
- package/SECURITY.md +39 -0
- package/dist/.eslintcache +1 -0
- package/dist/RestClient.js +1 -1
- package/dist/RestClient.js.map +1 -1
- package/dist/RestOptions.d.ts +5 -7
- package/dist/RestOptions.d.ts.map +1 -1
- package/dist/RestOptions.js +2 -2
- package/dist/RestOptions.js.map +1 -1
- package/dist/RestRequest.d.ts.map +1 -1
- package/dist/RestRequest.js +7 -13
- package/dist/RestRequest.js.map +1 -1
- package/dist/RestResponse.d.ts +4 -1
- package/dist/RestResponse.d.ts.map +1 -1
- package/dist/RestResponse.js +2 -1
- package/dist/RestResponse.js.map +1 -1
- package/dist/RestStream.d.ts +6 -1
- package/dist/RestStream.d.ts.map +1 -1
- package/dist/RestStream.js +5 -0
- package/dist/RestStream.js.map +1 -1
- package/dist/helpers/depaginate.d.ts +1 -1
- package/dist/helpers/depaginate.d.ts.map +1 -1
- package/dist/helpers/depaginate.js.map +1 -1
- package/dist/internal/RestFetchReader.d.ts +9 -2
- package/dist/internal/RestFetchReader.d.ts.map +1 -1
- package/dist/internal/RestFetchReader.js +20 -12
- package/dist/internal/RestFetchReader.js.map +1 -1
- package/dist/internal/RestRangeUploader.js.map +1 -1
- package/dist/internal/calcRetryDelay.js.map +1 -1
- package/dist/internal/inferResBodyEncoding.d.ts +8 -0
- package/dist/internal/inferResBodyEncoding.d.ts.map +1 -0
- package/dist/internal/inferResBodyEncoding.js +34 -0
- package/dist/internal/inferResBodyEncoding.js.map +1 -0
- package/dist/internal/inspectPossibleJSON.js.map +1 -1
- package/dist/internal/substituteParams.js.map +1 -1
- package/dist/internal/throwIfErrorResponse.js.map +1 -1
- package/dist/middlewares/paceRequests.js.map +1 -1
- package/dist/pacers/PacerQPS.js.map +1 -1
- package/docs/README.md +1 -1
- package/docs/classes/RestResponse.md +19 -8
- package/docs/classes/RestStream.md +10 -5
- package/docs/interfaces/RestOptions.md +20 -21
- package/docs/modules.md +1 -1
- package/jest.config.js +3 -0
- package/package.json +36 -7
- package/src/RestClient.ts +490 -0
- package/src/RestOptions.ts +186 -0
- package/src/RestRequest.ts +441 -0
- package/src/RestResponse.ts +49 -0
- package/src/RestStream.ts +89 -0
- package/src/errors/RestContentSizeOverLimitError.ts +3 -0
- package/src/errors/RestError.ts +6 -0
- package/src/errors/RestRateLimitError.ts +8 -0
- package/src/errors/RestResponseError.ts +46 -0
- package/src/errors/RestRetriableError.ts +8 -0
- package/src/errors/RestTimeoutError.ts +3 -0
- package/src/errors/RestTokenInvalidError.ts +8 -0
- package/src/helpers/depaginate.ts +37 -0
- package/src/index.ts +50 -0
- package/src/internal/RestFetchReader.ts +188 -0
- package/src/internal/RestRangeUploader.ts +61 -0
- package/src/internal/calcRetryDelay.ts +59 -0
- package/src/internal/inferResBodyEncoding.ts +33 -0
- package/src/internal/inspectPossibleJSON.ts +71 -0
- package/src/internal/prependNewlineIfMultiline.ts +3 -0
- package/src/internal/substituteParams.ts +25 -0
- package/src/internal/throwIfErrorResponse.ts +89 -0
- package/src/internal/toFloatMs.ts +3 -0
- package/src/middlewares/paceRequests.ts +42 -0
- package/src/pacers/Pacer.ts +22 -0
- package/src/pacers/PacerComposite.ts +29 -0
- package/src/pacers/PacerQPS.ts +147 -0
- package/tsconfig.json +3 -10
- package/typedoc.json +6 -1
- package/dist/tsconfig.tsbuildinfo +0 -1
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Keeps calling a function with an updating cursor, and depaginates all the
|
|
3
|
+
* results until the cursor returned is null or undefined.
|
|
4
|
+
*
|
|
5
|
+
* On each call, the inner function needs to return an array with two elements:
|
|
6
|
+
* 1. Array or results, which could be empty, but not null or undefined.
|
|
7
|
+
* 2. A new cursor.
|
|
8
|
+
*/
|
|
9
|
+
export default async function* depaginate<TItem, TCursor = string>(
|
|
10
|
+
readFunc: (
|
|
11
|
+
cursor: TCursor | undefined
|
|
12
|
+
) => Promise<readonly [TItem[], TCursor | null | undefined]>
|
|
13
|
+
): AsyncGenerator<TItem, void, undefined> {
|
|
14
|
+
let prevCursor: TCursor | null | undefined = undefined;
|
|
15
|
+
let cursor: TCursor | null | undefined = undefined;
|
|
16
|
+
for (;;) {
|
|
17
|
+
let items: TItem[];
|
|
18
|
+
[items, cursor] = await readFunc(cursor === null ? undefined : cursor);
|
|
19
|
+
yield* items;
|
|
20
|
+
|
|
21
|
+
if (cursor === null || cursor === undefined) {
|
|
22
|
+
break;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
if (JSON.stringify(prevCursor) === JSON.stringify(cursor)) {
|
|
26
|
+
throw Error(
|
|
27
|
+
"Depagination got stuck: prevCursor=" +
|
|
28
|
+
JSON.stringify(prevCursor) +
|
|
29
|
+
", cursor=" +
|
|
30
|
+
JSON.stringify(cursor) +
|
|
31
|
+
" (they must differ)"
|
|
32
|
+
);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
prevCursor = cursor;
|
|
36
|
+
}
|
|
37
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import { Headers } from "node-fetch";
|
|
2
|
+
import RestContentSizeOverLimitError from "./errors/RestContentSizeOverLimitError";
|
|
3
|
+
import RestError from "./errors/RestError";
|
|
4
|
+
import RestRateLimitError from "./errors/RestRateLimitError";
|
|
5
|
+
import RestResponseError from "./errors/RestResponseError";
|
|
6
|
+
import RestRetriableError from "./errors/RestRetriableError";
|
|
7
|
+
import RestTimeoutError from "./errors/RestTimeoutError";
|
|
8
|
+
import RestTokenInvalidError from "./errors/RestTokenInvalidError";
|
|
9
|
+
import depaginate from "./helpers/depaginate";
|
|
10
|
+
import paceRequests from "./middlewares/paceRequests";
|
|
11
|
+
import Pacer from "./pacers/Pacer";
|
|
12
|
+
import PacerComposite from "./pacers/PacerComposite";
|
|
13
|
+
import PacerQPS, { PacerQPSBackend } from "./pacers/PacerQPS";
|
|
14
|
+
import RestClient, { TokenGetter } from "./RestClient";
|
|
15
|
+
import RestOptions, {
|
|
16
|
+
RestLogEvent,
|
|
17
|
+
Middleware,
|
|
18
|
+
Agents,
|
|
19
|
+
DEFAULT_OPTIONS,
|
|
20
|
+
} from "./RestOptions";
|
|
21
|
+
import RestRequest from "./RestRequest";
|
|
22
|
+
import RestResponse from "./RestResponse";
|
|
23
|
+
import RestStream from "./RestStream";
|
|
24
|
+
|
|
25
|
+
export {
|
|
26
|
+
Agents,
|
|
27
|
+
DEFAULT_OPTIONS,
|
|
28
|
+
depaginate,
|
|
29
|
+
Headers,
|
|
30
|
+
Middleware,
|
|
31
|
+
Pacer,
|
|
32
|
+
PacerComposite,
|
|
33
|
+
paceRequests,
|
|
34
|
+
PacerQPS,
|
|
35
|
+
PacerQPSBackend,
|
|
36
|
+
RestClient,
|
|
37
|
+
RestContentSizeOverLimitError,
|
|
38
|
+
RestError,
|
|
39
|
+
RestLogEvent,
|
|
40
|
+
RestOptions,
|
|
41
|
+
RestRateLimitError,
|
|
42
|
+
RestRequest,
|
|
43
|
+
RestResponse,
|
|
44
|
+
RestResponseError,
|
|
45
|
+
RestRetriableError,
|
|
46
|
+
RestStream,
|
|
47
|
+
RestTimeoutError,
|
|
48
|
+
RestTokenInvalidError,
|
|
49
|
+
TokenGetter,
|
|
50
|
+
};
|
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
import type { Agent as HttpAgent } from "http";
|
|
2
|
+
import AbortControllerPolyfilled from "abort-controller";
|
|
3
|
+
import { Memoize } from "fast-typescript-memoize";
|
|
4
|
+
import type { RequestInit } from "node-fetch";
|
|
5
|
+
import fetch, { Headers, Request } from "node-fetch";
|
|
6
|
+
import inferResBodyEncoding from "./inferResBodyEncoding";
|
|
7
|
+
|
|
8
|
+
export interface RestFetchReaderOptions {
|
|
9
|
+
timeoutMs?: number;
|
|
10
|
+
heartbeat?: () => Promise<void>;
|
|
11
|
+
onTimeout?: (reader: RestFetchReader, e: any) => void;
|
|
12
|
+
onAfterRead?: (reader: RestFetchReader) => void;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* A low-level stateful reader engine on top of node-fetch which implements
|
|
17
|
+
* "preload first N chars and then leave the rest ready for iteration" pattern,
|
|
18
|
+
* with global timeout for the entire fetching time.
|
|
19
|
+
*
|
|
20
|
+
* Once created, the object MUST be iterated in full to consume the rest of the
|
|
21
|
+
* stream and close the connection. In case you're not interested in its entire
|
|
22
|
+
* content, you must prematurely "return" (close) the iterator.
|
|
23
|
+
*
|
|
24
|
+
* The abstraction is intentionally kept independent on all others, to make it
|
|
25
|
+
* simple and testable separately.
|
|
26
|
+
*/
|
|
27
|
+
export default class RestFetchReader {
|
|
28
|
+
private _status = 0;
|
|
29
|
+
private _headers = new Headers();
|
|
30
|
+
private _textFetched = "";
|
|
31
|
+
private _textIsPartial = true;
|
|
32
|
+
private _charsRead = 0;
|
|
33
|
+
|
|
34
|
+
constructor(
|
|
35
|
+
private _url: string,
|
|
36
|
+
private _reqInit: RequestInit,
|
|
37
|
+
private _options: RestFetchReaderOptions
|
|
38
|
+
) {}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Returns the number of characters read from the stream so far.
|
|
42
|
+
*/
|
|
43
|
+
get charsRead() {
|
|
44
|
+
return this._charsRead;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Returns the Agent instance used for this request. It's implied that
|
|
49
|
+
* RestRequest#agent always points to a http.Agent object.
|
|
50
|
+
*/
|
|
51
|
+
get agent() {
|
|
52
|
+
return (
|
|
53
|
+
this._reqInit.agent &&
|
|
54
|
+
typeof this._reqInit.agent === "object" &&
|
|
55
|
+
"sockets" in this._reqInit.agent
|
|
56
|
+
? this._reqInit.agent
|
|
57
|
+
: null
|
|
58
|
+
) as HttpAgent | null;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Returns HTTP status after preload() was called.
|
|
63
|
+
*/
|
|
64
|
+
get status() {
|
|
65
|
+
return this._status;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Returns HTTP headers after preload() was called.
|
|
70
|
+
*/
|
|
71
|
+
get headers() {
|
|
72
|
+
return this._headers;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Returns the data preloaded so far.
|
|
77
|
+
*/
|
|
78
|
+
get textFetched(): string {
|
|
79
|
+
return this._textFetched;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
/**
|
|
83
|
+
* If true, then there is a chance that reading more from the stream will
|
|
84
|
+
* return more data.
|
|
85
|
+
*/
|
|
86
|
+
get textIsPartial() {
|
|
87
|
+
return this._textIsPartial;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* Reads preloadChars chars or a little bit more from the response and puts
|
|
92
|
+
* them to this.textFetched. Leaves the rest of the data in res.body for
|
|
93
|
+
* future reads if there are more data to fetch (you must consume them or
|
|
94
|
+
* close the stream, otherwise the connection will remain open).
|
|
95
|
+
*/
|
|
96
|
+
async preload(preloadChars: number) {
|
|
97
|
+
const generator = this[Symbol.asyncIterator]();
|
|
98
|
+
try {
|
|
99
|
+
while (this._charsRead < preloadChars) {
|
|
100
|
+
const { value, done } = await generator.next();
|
|
101
|
+
if (done) {
|
|
102
|
+
this._textIsPartial = false;
|
|
103
|
+
await generator.return();
|
|
104
|
+
return;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
this._textFetched += value;
|
|
108
|
+
}
|
|
109
|
+
} catch (e: unknown) {
|
|
110
|
+
await generator.return();
|
|
111
|
+
throw e;
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
/**
|
|
116
|
+
* Closes the connection.
|
|
117
|
+
*/
|
|
118
|
+
async close() {
|
|
119
|
+
await this[Symbol.asyncIterator]().return();
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
/**
|
|
123
|
+
* Returns an async generator for the rest of the data. Must be consumed
|
|
124
|
+
* entirely, otherwise the connection may remain dangling.
|
|
125
|
+
*
|
|
126
|
+
* Memoization is important here, to return the same generator when we call
|
|
127
|
+
* this method multiple times and to not start a new iteration over and over.
|
|
128
|
+
*/
|
|
129
|
+
@Memoize()
|
|
130
|
+
async *[Symbol.asyncIterator]() {
|
|
131
|
+
const { timeoutMs, onTimeout, onAfterRead } = this._options;
|
|
132
|
+
|
|
133
|
+
// Some of react-client users are still on v14 node.
|
|
134
|
+
const controller =
|
|
135
|
+
typeof AbortController === "undefined"
|
|
136
|
+
? new AbortControllerPolyfilled()
|
|
137
|
+
: new AbortController();
|
|
138
|
+
|
|
139
|
+
const timeout = timeoutMs
|
|
140
|
+
? setTimeout(() => controller.abort(), timeoutMs)
|
|
141
|
+
: undefined;
|
|
142
|
+
|
|
143
|
+
try {
|
|
144
|
+
// DO NOT use fetch(fetchReq) with one argument! It clones the stream
|
|
145
|
+
// which just doesn't work in practice, even with file streams. I wasted
|
|
146
|
+
// 4h on debugging this: fetch(fetchReq.url, fetchReq) works and
|
|
147
|
+
// fetch(fetchReq) doesn't for e.g. Dropbox API and
|
|
148
|
+
// https://stackoverflow.com/a/44577569
|
|
149
|
+
const res = await fetch(
|
|
150
|
+
this._url,
|
|
151
|
+
new Request(this._url, {
|
|
152
|
+
...this._reqInit,
|
|
153
|
+
signal: controller.signal as any,
|
|
154
|
+
})
|
|
155
|
+
);
|
|
156
|
+
this._status = res.status;
|
|
157
|
+
this._headers = res.headers;
|
|
158
|
+
|
|
159
|
+
// See https://nodejs.org/api/stream.html#readablesetencodingencoding on
|
|
160
|
+
// how Node streams and setEncoding() handle decoding when the returned
|
|
161
|
+
// chunks cross the boundaries of multi-byte characters (TL;DR: it works
|
|
162
|
+
// fine, that's why we work with string and not Buffer here).
|
|
163
|
+
res.body.setEncoding(inferResBodyEncoding(res));
|
|
164
|
+
|
|
165
|
+
await this._options.heartbeat?.();
|
|
166
|
+
for await (const chunk of res.body) {
|
|
167
|
+
await this._options.heartbeat?.();
|
|
168
|
+
this._charsRead += chunk.length;
|
|
169
|
+
yield chunk as string;
|
|
170
|
+
onAfterRead?.(this);
|
|
171
|
+
}
|
|
172
|
+
} catch (e: unknown) {
|
|
173
|
+
if (controller.signal.aborted && onTimeout) {
|
|
174
|
+
onTimeout(this, e);
|
|
175
|
+
} else {
|
|
176
|
+
throw e;
|
|
177
|
+
}
|
|
178
|
+
} finally {
|
|
179
|
+
timeout && clearTimeout(timeout);
|
|
180
|
+
// If someone stops iterating prematurely, we forcefully close the
|
|
181
|
+
// connection in all cases. Theoretically, stopping the iteration on
|
|
182
|
+
// res.body should've closed the connection, but in practice it doesn't
|
|
183
|
+
// happen; it looks like a bug in node-fetch, and thus, we must always use
|
|
184
|
+
// the AbortController in the end.
|
|
185
|
+
controller.abort();
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
}
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import type RestClient from "../RestClient";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Sends a series of Content-Range requests to an URL.
|
|
5
|
+
* - The stream size is unknown in advance even theoretically. So we read it
|
|
6
|
+
* with chunkSize+1 bytes chunks (+1 is to know for sure, is there something
|
|
7
|
+
* else left in the stream or not) and then send data with chunkSize bytes
|
|
8
|
+
* chunks.
|
|
9
|
+
* - The last chunk is a terminating one (and we know, which one is the last),
|
|
10
|
+
* so we reflect it in "Content-Range: x-y/S" format setting S to the total
|
|
11
|
+
* number of bytes in the stream.
|
|
12
|
+
*/
|
|
13
|
+
export default class RestRangeUploader {
|
|
14
|
+
private _pos = 0;
|
|
15
|
+
|
|
16
|
+
constructor(
|
|
17
|
+
private _client: RestClient,
|
|
18
|
+
private _chunkSize: number,
|
|
19
|
+
private _method: "POST" | "PUT",
|
|
20
|
+
private _path: string,
|
|
21
|
+
private _mimeType: string
|
|
22
|
+
) {}
|
|
23
|
+
|
|
24
|
+
async upload(stream: AsyncIterable<Buffer>) {
|
|
25
|
+
let buf = Buffer.allocUnsafe(0);
|
|
26
|
+
let res: string | null = null;
|
|
27
|
+
for await (const readData of stream) {
|
|
28
|
+
buf = Buffer.concat([buf, readData]);
|
|
29
|
+
while (buf.length >= this._chunkSize + 1) {
|
|
30
|
+
res = await this._flush(buf.slice(0, this._chunkSize), false);
|
|
31
|
+
buf = Buffer.from(buf.slice(this._chunkSize));
|
|
32
|
+
}
|
|
33
|
+
// After this `while` loop finishes, there is always something left in buf
|
|
34
|
+
// (due to the +1 trick). It guarantees that we have a chance to call
|
|
35
|
+
// flush(..., true) for the very last chunk.
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
if (buf.length > 0) {
|
|
39
|
+
res = await this._flush(buf, true);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
return res;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
private async _flush(buf: Buffer, isLast: boolean) {
|
|
46
|
+
if (buf.length === 0) {
|
|
47
|
+
return null;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
const totalSize = isLast ? this._pos + buf.length : "*";
|
|
51
|
+
const res = await this._client
|
|
52
|
+
.writeRaw(this._path, buf, this._mimeType, this._method, "*/*")
|
|
53
|
+
.setHeader(
|
|
54
|
+
"Content-Range",
|
|
55
|
+
`bytes ${this._pos}-${this._pos + buf.length - 1}/${totalSize}`
|
|
56
|
+
)
|
|
57
|
+
.text();
|
|
58
|
+
this._pos += buf.length;
|
|
59
|
+
return res;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import RestContentSizeOverLimitError from "../errors/RestContentSizeOverLimitError";
|
|
2
|
+
import RestRateLimitError from "../errors/RestRateLimitError";
|
|
3
|
+
import RestRetriableError from "../errors/RestRetriableError";
|
|
4
|
+
import RestTokenInvalidError from "../errors/RestTokenInvalidError";
|
|
5
|
+
import type RestOptions from "../RestOptions";
|
|
6
|
+
import type RestResponse from "../RestResponse";
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Returns a new retry delay of the error needs to be retried, otherwise
|
|
10
|
+
* "no_retry".
|
|
11
|
+
*/
|
|
12
|
+
export default function calcRetryDelay(
|
|
13
|
+
error: any,
|
|
14
|
+
options: RestOptions,
|
|
15
|
+
res: RestResponse,
|
|
16
|
+
retryDelayMs: number
|
|
17
|
+
): number | "no_retry" {
|
|
18
|
+
if (
|
|
19
|
+
error instanceof RestRateLimitError ||
|
|
20
|
+
error instanceof RestRetriableError
|
|
21
|
+
) {
|
|
22
|
+
// We've already made a decision to retry this error.
|
|
23
|
+
return Math.min(
|
|
24
|
+
options.retryDelayMaxMs,
|
|
25
|
+
Math.max(retryDelayMs, error.delayMs)
|
|
26
|
+
);
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
switch (options.isRetriableError(res, error)) {
|
|
30
|
+
case "RETRY":
|
|
31
|
+
default:
|
|
32
|
+
break; // number returned
|
|
33
|
+
|
|
34
|
+
case "BEST_EFFORT":
|
|
35
|
+
if (error instanceof RestTokenInvalidError) {
|
|
36
|
+
return "no_retry";
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
if (
|
|
40
|
+
!(error instanceof RestRateLimitError) &&
|
|
41
|
+
res.status >= 400 &&
|
|
42
|
+
res.status <= 499
|
|
43
|
+
) {
|
|
44
|
+
return "no_retry";
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
if (error instanceof RestContentSizeOverLimitError) {
|
|
48
|
+
// Content size ... over limit.
|
|
49
|
+
return "no_retry";
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
break;
|
|
53
|
+
|
|
54
|
+
case "NEVER_RETRY":
|
|
55
|
+
return "no_retry";
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
return retryDelayMs;
|
|
59
|
+
}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import type { Response } from "node-fetch";
|
|
2
|
+
|
|
3
|
+
const CHARSET_RE =
|
|
4
|
+
/(?:charset|encoding)\s{0,10}=\s{0,10}['"]? {0,10}([-\w]{1,100})/i;
|
|
5
|
+
const BUFFER_ENCODINGS = ["ascii", "utf8", "utf-8", "utf16le", "ucs2", "ucs-2"];
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Tries its best to infer the encoding of the Response, falling back to UTF-8
|
|
9
|
+
* as an opinionated default value on failure.
|
|
10
|
+
*/
|
|
11
|
+
export default function inferResBodyEncoding(res: Response): BufferEncoding {
|
|
12
|
+
const contentType = res.headers.get("content-type")?.toLowerCase();
|
|
13
|
+
const charset = contentType?.match(CHARSET_RE)
|
|
14
|
+
? RegExp.$1.toLowerCase()
|
|
15
|
+
: undefined;
|
|
16
|
+
return contentType?.startsWith("application/octet-stream")
|
|
17
|
+
? // It's a binary Content-Type.
|
|
18
|
+
"binary"
|
|
19
|
+
: charset && !BUFFER_ENCODINGS.includes(charset)
|
|
20
|
+
? // The charset is provided in Content-Type, but unknown by Buffer.
|
|
21
|
+
"binary"
|
|
22
|
+
: charset && BUFFER_ENCODINGS.includes(charset)
|
|
23
|
+
? // Charset is provided in Content-Type header, and Buffer knows
|
|
24
|
+
// how to decode it.
|
|
25
|
+
(charset as BufferEncoding)
|
|
26
|
+
: // An opinionated choice is made here to always default-decode the
|
|
27
|
+
// response stream as UTF-8. This is because JSON is by definition a UTF-8
|
|
28
|
+
// stream, and people often time respond with JSONs forgetting to provide
|
|
29
|
+
// "; charset=utf-8" part of the Content-Type header (or they forget
|
|
30
|
+
// Content-Type header at all, or put some wrong value as "text/plain"
|
|
31
|
+
// there; there is an endless list of mistake variations here).
|
|
32
|
+
"utf-8";
|
|
33
|
+
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import { inspect } from "util";
|
|
2
|
+
import sortBy from "lodash/sortBy";
|
|
3
|
+
import truncate from "lodash/truncate";
|
|
4
|
+
|
|
5
|
+
export default function inspectPossibleJSON(
|
|
6
|
+
headers: { get(name: string): string | null },
|
|
7
|
+
text: string | Buffer | NodeJS.ReadableStream,
|
|
8
|
+
maxOutputLen: number
|
|
9
|
+
): string {
|
|
10
|
+
const MAX_LEN_TO_TRY_PARSE = 1024 * 1024;
|
|
11
|
+
|
|
12
|
+
if (typeof text === "string" && text.length > MAX_LEN_TO_TRY_PARSE) {
|
|
13
|
+
// Don't even try to JSON-parse if the text is too long.
|
|
14
|
+
return ellipsis(text, maxOutputLen);
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
if (text instanceof Buffer) {
|
|
18
|
+
return `<Buffer: ${text.length} bytes>`;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
if (!text || typeof text === "string") {
|
|
22
|
+
if (!(headers.get("content-type") || "").match(/json/)) {
|
|
23
|
+
return ellipsis(text, maxOutputLen);
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
try {
|
|
27
|
+
const json = JSON.parse(text);
|
|
28
|
+
|
|
29
|
+
if (json && typeof json === "object" && !(json instanceof Array)) {
|
|
30
|
+
// Move error/errors fields on top for better logging. This is a poor
|
|
31
|
+
// man's approach: of course not all APIs return error/errors fields at
|
|
32
|
+
// all, but it's hard to reorder at any other layer of abstraction.
|
|
33
|
+
reorderObjectProps(json, (k) =>
|
|
34
|
+
k === "error" || k === "errors" ? "" : k
|
|
35
|
+
);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
return ellipsis(
|
|
39
|
+
inspect(json, { depth: 20, compact: true }),
|
|
40
|
+
maxOutputLen
|
|
41
|
+
);
|
|
42
|
+
} catch (e: any) {
|
|
43
|
+
return ellipsis(text, maxOutputLen);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
return "<Stream>";
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* In-place-reorders keys in a given object. The important part is to do it
|
|
52
|
+
* in-place to e.g. be able to alter some @Memoized values.
|
|
53
|
+
*/
|
|
54
|
+
function reorderObjectProps(
|
|
55
|
+
obj: Record<string, any>,
|
|
56
|
+
ranker: (k: string, v: any) => string | number
|
|
57
|
+
) {
|
|
58
|
+
const entries = Object.entries(obj);
|
|
59
|
+
for (const k in obj) {
|
|
60
|
+
delete obj[k];
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
Object.assign(
|
|
64
|
+
obj,
|
|
65
|
+
Object.fromEntries(sortBy(entries, ([k, v]) => ranker(k, v)))
|
|
66
|
+
);
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
function ellipsis(text: any, length: number) {
|
|
70
|
+
return truncate("" + text, { length }).trimEnd();
|
|
71
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Allows to use URLs like /some/:abc/other and pass { abc: "xyz" } as one of
|
|
3
|
+
* body parameters. Such body parameters will be excluded from the body before
|
|
4
|
+
* sending the request (so they're "moved" into the URL).
|
|
5
|
+
*/
|
|
6
|
+
export default function substituteParams<TBody>(
|
|
7
|
+
url: string,
|
|
8
|
+
body: TBody
|
|
9
|
+
): [string, TBody] {
|
|
10
|
+
if (!body || typeof body !== "object") {
|
|
11
|
+
return [url, body];
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
url = url.replace(/:([a-z0-9_]+)/gi, (match, param) => {
|
|
15
|
+
const value = (body as any)[param];
|
|
16
|
+
if (typeof value === "string" || typeof value === "number") {
|
|
17
|
+
body = { ...body };
|
|
18
|
+
delete (body as any)[param];
|
|
19
|
+
return encodeURIComponent(value);
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
return match;
|
|
23
|
+
});
|
|
24
|
+
return [url, body];
|
|
25
|
+
}
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import RestRateLimitError from "../errors/RestRateLimitError";
|
|
2
|
+
import RestResponseError from "../errors/RestResponseError";
|
|
3
|
+
import RestRetriableError from "../errors/RestRetriableError";
|
|
4
|
+
import RestTokenInvalidError from "../errors/RestTokenInvalidError";
|
|
5
|
+
import type RestOptions from "../RestOptions";
|
|
6
|
+
import type RestResponse from "../RestResponse";
|
|
7
|
+
|
|
8
|
+
const STATUS_TOO_MANY_REQUESTS = 429;
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* The general idea is that we turn all logical errors into exceptions and then
|
|
12
|
+
* deal with exceptions only. I.e. throwing an exception becomes an internal API
|
|
13
|
+
* convention for errors. This is because fetch() throws its own exceptions, and
|
|
14
|
+
* also there may be some exceptions during validation of the response, or
|
|
15
|
+
* inside a middleware etc.
|
|
16
|
+
*/
|
|
17
|
+
export default function throwIfErrorResponse(
|
|
18
|
+
options: RestOptions,
|
|
19
|
+
res: RestResponse
|
|
20
|
+
) {
|
|
21
|
+
const isSuccessResponse = options.isSuccessResponse(res);
|
|
22
|
+
if (isSuccessResponse === "SUCCESS") {
|
|
23
|
+
return;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
const rateLimitDelayMs = options.isRateLimitError(res);
|
|
27
|
+
switch (rateLimitDelayMs) {
|
|
28
|
+
case "RATE_LIMIT":
|
|
29
|
+
throw new RestRateLimitError(
|
|
30
|
+
`isRateLimitError() returned ${rateLimitDelayMs}`,
|
|
31
|
+
0,
|
|
32
|
+
res
|
|
33
|
+
);
|
|
34
|
+
case "BEST_EFFORT":
|
|
35
|
+
if (res.status === STATUS_TOO_MANY_REQUESTS) {
|
|
36
|
+
const retryAfterHeader = res.headers.get("Retry-After") || "0";
|
|
37
|
+
throw new RestRateLimitError(
|
|
38
|
+
`Rate limited by HTTP status ${STATUS_TOO_MANY_REQUESTS}`,
|
|
39
|
+
parseInt(retryAfterHeader) || 0,
|
|
40
|
+
res
|
|
41
|
+
);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
break;
|
|
45
|
+
case "SOMETHING_ELSE":
|
|
46
|
+
break;
|
|
47
|
+
default:
|
|
48
|
+
throw new RestRateLimitError(
|
|
49
|
+
`isRateLimitError() returned retry delay ${rateLimitDelayMs} ms`,
|
|
50
|
+
rateLimitDelayMs,
|
|
51
|
+
res
|
|
52
|
+
);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
const isTokenInvalidError = options.isTokenInvalidError(res);
|
|
56
|
+
if (isTokenInvalidError) {
|
|
57
|
+
throw new RestTokenInvalidError("Invalid app token", res);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
const retryDelayMs = options.isRetriableError(res, null);
|
|
61
|
+
switch (retryDelayMs) {
|
|
62
|
+
case "RETRY":
|
|
63
|
+
throw new RestRetriableError(
|
|
64
|
+
`isRetriableError() returned ${retryDelayMs}`,
|
|
65
|
+
0,
|
|
66
|
+
res
|
|
67
|
+
);
|
|
68
|
+
case "BEST_EFFORT":
|
|
69
|
+
case "NEVER_RETRY":
|
|
70
|
+
break;
|
|
71
|
+
default:
|
|
72
|
+
throw new RestRetriableError(
|
|
73
|
+
`"isRetriableError() returned retry delay ${retryDelayMs} ms`,
|
|
74
|
+
retryDelayMs,
|
|
75
|
+
res
|
|
76
|
+
);
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
if (isSuccessResponse === "THROW") {
|
|
80
|
+
throw new RestResponseError(
|
|
81
|
+
`isSuccessResponse() returned ${isSuccessResponse}`,
|
|
82
|
+
res
|
|
83
|
+
);
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
if (res.status >= 300) {
|
|
87
|
+
throw new RestResponseError("", res);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import type Pacer from "../pacers/Pacer";
|
|
2
|
+
import type { Middleware } from "../RestOptions";
|
|
3
|
+
import type RestRequest from "../RestRequest";
|
|
4
|
+
|
|
5
|
+
const MIN_LOG_DELAY_MS = 10;
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Rest Client middleware that adds some delay between requests using one of
|
|
9
|
+
* Pacer implementations.
|
|
10
|
+
*/
|
|
11
|
+
export default function paceRequests(
|
|
12
|
+
pacer: Pacer | ((req: RestRequest) => Promise<Pacer | null>) | null
|
|
13
|
+
): Middleware {
|
|
14
|
+
return async (req, next) => {
|
|
15
|
+
if (typeof pacer === "function") {
|
|
16
|
+
pacer = await pacer(req);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
if (pacer) {
|
|
20
|
+
const { delayMs, reason } = await pacer.touch();
|
|
21
|
+
if (delayMs > 0) {
|
|
22
|
+
await req.options.heartbeater.delay(delayMs);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
if (delayMs > MIN_LOG_DELAY_MS) {
|
|
26
|
+
req.options.logger({
|
|
27
|
+
attempt: 0,
|
|
28
|
+
req,
|
|
29
|
+
res: "backoff_delay",
|
|
30
|
+
exception: null,
|
|
31
|
+
timestamp: Date.now(),
|
|
32
|
+
elapsed: delayMs,
|
|
33
|
+
isFinalAttempt: true,
|
|
34
|
+
privateDataInResponse: false,
|
|
35
|
+
comment: reason,
|
|
36
|
+
});
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
return next(req);
|
|
41
|
+
};
|
|
42
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* A result of some Pacer work.
|
|
3
|
+
*/
|
|
4
|
+
export interface PacerDelay {
|
|
5
|
+
delayMs: number;
|
|
6
|
+
reason: string;
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Pacer is a class which allows to pace requests on some resource identified by
|
|
11
|
+
* the instance of this class.
|
|
12
|
+
*/
|
|
13
|
+
export default interface Pacer {
|
|
14
|
+
/** Human readable name of the pacer, used when composing multiple pacers. */
|
|
15
|
+
readonly name: string;
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Signals that we're about to send a request. Returns the delay we need to
|
|
19
|
+
* wait for before actually sending.
|
|
20
|
+
*/
|
|
21
|
+
touch(): Promise<PacerDelay>;
|
|
22
|
+
}
|