@continuedev/fetch 1.0.13 → 1.0.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/certs.d.ts +17 -0
- package/dist/certs.js +105 -0
- package/dist/certs.test.d.ts +1 -0
- package/dist/certs.test.js +139 -0
- package/dist/fetch.js +7 -10
- package/dist/fetch.test.js +0 -1
- package/dist/getAgentOptions.d.ts +2 -2
- package/dist/getAgentOptions.js +13 -40
- package/dist/getAgentOptions.test.js +129 -77
- package/dist/index.d.ts +2 -1
- package/dist/index.js +2 -1
- package/dist/node-fetch-patch.d.ts +16 -0
- package/dist/node-fetch-patch.js +395 -0
- package/dist/node-fetch-patch.test.d.ts +1 -0
- package/dist/node-fetch-patch.test.js +50 -0
- package/dist/stream.js +27 -4
- package/dist/util.d.ts +6 -1
- package/dist/util.js +57 -17
- package/dist/util.test.js +92 -18
- package/package.json +1 -1
- package/release.config.js +3 -0
- package/src/certs.test.ts +187 -0
- package/src/certs.ts +129 -0
- package/src/fetch.ts +7 -11
- package/src/getAgentOptions.test.ts +158 -91
- package/src/getAgentOptions.ts +23 -44
- package/src/index.ts +3 -0
- package/src/node-fetch-patch.js +518 -0
- package/src/node-fetch-patch.test.js +67 -0
- package/src/node_modules/.vite/vitest/d41d8cd98f00b204e9800998ecf8427e/results.json +1 -0
- package/src/node_modules/.vite/vitest/da39a3ee5e6b4b0d3255bfef95601890afd80709/results.json +1 -0
- package/src/stream.ts +29 -4
- package/src/util.test.ts +130 -18
- package/src/util.ts +84 -19
- package/src/fetch.test.ts +0 -173
|
@@ -0,0 +1,395 @@
|
|
|
1
|
+
/*
|
|
2
|
+
This is a patched version of index.js from node-fetch 3.3.2 (see packages/fetch/node_modules/node-fetch/src/index.js)
|
|
3
|
+
This is to fix "Premature close" errors caused by chunked encoding assumptions - see https://github.com/continuedev/continue/issues/5502
|
|
4
|
+
The patch adjusts boundary chunk detection logic (see "PATCH" comment next to patch)
|
|
5
|
+
And is based on https://github.com/node-fetch/node-fetch/issues/1576
|
|
6
|
+
*/
|
|
7
|
+
/**
|
|
8
|
+
* Index.js
|
|
9
|
+
*
|
|
10
|
+
* a request API compatible with window.fetch
|
|
11
|
+
*
|
|
12
|
+
* All spec algorithm step numbers are based on https://fetch.spec.whatwg.org/commit-snapshots/ae716822cb3a61843226cd090eefc6589446c1d2/.
|
|
13
|
+
*/
|
|
14
|
+
import { Buffer } from "node:buffer";
|
|
15
|
+
import http from "node:http";
|
|
16
|
+
import https from "node:https";
|
|
17
|
+
import Stream, { PassThrough, pipeline as pump } from "node:stream";
|
|
18
|
+
import zlib from "node:zlib";
|
|
19
|
+
import dataUriToBuffer from "data-uri-to-buffer";
|
|
20
|
+
import { Blob, blobFrom, blobFromSync, File, fileFrom, fileFromSync, } from "fetch-blob/from.js";
|
|
21
|
+
import { FormData } from "formdata-polyfill/esm.min.js";
|
|
22
|
+
import { clone, writeToStream } from "node-fetch/src/body.js";
|
|
23
|
+
import { AbortError } from "node-fetch/src/errors/abort-error.js";
|
|
24
|
+
import { FetchError } from "node-fetch/src/errors/fetch-error.js";
|
|
25
|
+
import Headers, { fromRawHeaders } from "node-fetch/src/headers.js";
|
|
26
|
+
import Request, { getNodeRequestOptions } from "node-fetch/src/request.js";
|
|
27
|
+
import Response from "node-fetch/src/response.js";
|
|
28
|
+
import { isRedirect } from "node-fetch/src/utils/is-redirect.js";
|
|
29
|
+
import { isDomainOrSubdomain, isSameProtocol, } from "node-fetch/src/utils/is.js";
|
|
30
|
+
import { parseReferrerPolicyFromHeader } from "node-fetch/src/utils/referrer.js";
|
|
31
|
+
export { AbortError, Blob, blobFrom, blobFromSync, FetchError, File, fileFrom, fileFromSync, FormData, Headers, isRedirect, Request, Response, };
|
|
32
|
+
const supportedSchemas = new Set(["data:", "http:", "https:"]);
|
|
33
|
+
/**
|
|
34
|
+
* Fetch function
|
|
35
|
+
*
|
|
36
|
+
* @param {string | URL | import('./request').default} url - Absolute url or Request instance
|
|
37
|
+
* @param {*} [options_] - Fetch options
|
|
38
|
+
* @return {Promise<import('./response').default>}
|
|
39
|
+
*/
|
|
40
|
+
export default async function fetch(url, options_) {
|
|
41
|
+
return new Promise((resolve, reject) => {
|
|
42
|
+
// Build request object
|
|
43
|
+
const request = new Request(url, options_);
|
|
44
|
+
const { parsedURL, options } = getNodeRequestOptions(request);
|
|
45
|
+
if (!supportedSchemas.has(parsedURL.protocol)) {
|
|
46
|
+
throw new TypeError(`node-fetch cannot load ${url}. URL scheme "${parsedURL.protocol.replace(/:$/, "")}" is not supported.`);
|
|
47
|
+
}
|
|
48
|
+
if (parsedURL.protocol === "data:") {
|
|
49
|
+
const data = dataUriToBuffer(request.url);
|
|
50
|
+
const response = new Response(data, {
|
|
51
|
+
headers: { "Content-Type": data.typeFull },
|
|
52
|
+
});
|
|
53
|
+
resolve(response);
|
|
54
|
+
return;
|
|
55
|
+
}
|
|
56
|
+
// Wrap http.request into fetch
|
|
57
|
+
const send = (parsedURL.protocol === "https:" ? https : http).request;
|
|
58
|
+
const { signal } = request;
|
|
59
|
+
let response = null;
|
|
60
|
+
const abort = () => {
|
|
61
|
+
const error = new AbortError("The operation was aborted.");
|
|
62
|
+
reject(error);
|
|
63
|
+
if (request.body && request.body instanceof Stream.Readable) {
|
|
64
|
+
request.body.destroy(error);
|
|
65
|
+
}
|
|
66
|
+
if (!response || !response.body) {
|
|
67
|
+
return;
|
|
68
|
+
}
|
|
69
|
+
response.body.emit("error", error);
|
|
70
|
+
};
|
|
71
|
+
if (signal && signal.aborted) {
|
|
72
|
+
abort();
|
|
73
|
+
return;
|
|
74
|
+
}
|
|
75
|
+
const abortAndFinalize = () => {
|
|
76
|
+
abort();
|
|
77
|
+
finalize();
|
|
78
|
+
};
|
|
79
|
+
// Send request
|
|
80
|
+
const request_ = send(parsedURL.toString(), options);
|
|
81
|
+
if (signal) {
|
|
82
|
+
signal.addEventListener("abort", abortAndFinalize);
|
|
83
|
+
}
|
|
84
|
+
const finalize = () => {
|
|
85
|
+
request_.abort();
|
|
86
|
+
if (signal) {
|
|
87
|
+
signal.removeEventListener("abort", abortAndFinalize);
|
|
88
|
+
}
|
|
89
|
+
};
|
|
90
|
+
request_.on("error", (error) => {
|
|
91
|
+
reject(new FetchError(`request to ${request.url} failed, reason: ${error.message}`, "system", error));
|
|
92
|
+
finalize();
|
|
93
|
+
});
|
|
94
|
+
fixResponseChunkedTransferBadEnding(request_, (error) => {
|
|
95
|
+
if (response && response.body) {
|
|
96
|
+
response.body.destroy(error);
|
|
97
|
+
}
|
|
98
|
+
});
|
|
99
|
+
/* c8 ignore next 18 */
|
|
100
|
+
if (process.version < "v14") {
|
|
101
|
+
// Before Node.js 14, pipeline() does not fully support async iterators and does not always
|
|
102
|
+
// properly handle when the socket close/end events are out of order.
|
|
103
|
+
request_.on("socket", (s) => {
|
|
104
|
+
let endedWithEventsCount;
|
|
105
|
+
s.prependListener("end", () => {
|
|
106
|
+
endedWithEventsCount = s._eventsCount;
|
|
107
|
+
});
|
|
108
|
+
s.prependListener("close", (hadError) => {
|
|
109
|
+
// if end happened before close but the socket didn't emit an error, do it now
|
|
110
|
+
if (response && endedWithEventsCount < s._eventsCount && !hadError) {
|
|
111
|
+
const error = new Error("Premature close");
|
|
112
|
+
error.code = "ERR_STREAM_PREMATURE_CLOSE";
|
|
113
|
+
response.body.emit("error", error);
|
|
114
|
+
}
|
|
115
|
+
});
|
|
116
|
+
});
|
|
117
|
+
}
|
|
118
|
+
request_.on("response", (response_) => {
|
|
119
|
+
request_.setTimeout(0);
|
|
120
|
+
const headers = fromRawHeaders(response_.rawHeaders);
|
|
121
|
+
// HTTP fetch step 5
|
|
122
|
+
if (isRedirect(response_.statusCode)) {
|
|
123
|
+
// HTTP fetch step 5.2
|
|
124
|
+
const location = headers.get("Location");
|
|
125
|
+
// HTTP fetch step 5.3
|
|
126
|
+
let locationURL = null;
|
|
127
|
+
try {
|
|
128
|
+
locationURL =
|
|
129
|
+
location === null ? null : new URL(location, request.url);
|
|
130
|
+
}
|
|
131
|
+
catch {
|
|
132
|
+
// error here can only be invalid URL in Location: header
|
|
133
|
+
// do not throw when options.redirect == manual
|
|
134
|
+
// let the user extract the errorneous redirect URL
|
|
135
|
+
if (request.redirect !== "manual") {
|
|
136
|
+
reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, "invalid-redirect"));
|
|
137
|
+
finalize();
|
|
138
|
+
return;
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
// HTTP fetch step 5.5
|
|
142
|
+
switch (request.redirect) {
|
|
143
|
+
case "error":
|
|
144
|
+
reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, "no-redirect"));
|
|
145
|
+
finalize();
|
|
146
|
+
return;
|
|
147
|
+
case "manual":
|
|
148
|
+
// Nothing to do
|
|
149
|
+
break;
|
|
150
|
+
case "follow": {
|
|
151
|
+
// HTTP-redirect fetch step 2
|
|
152
|
+
if (locationURL === null) {
|
|
153
|
+
break;
|
|
154
|
+
}
|
|
155
|
+
// HTTP-redirect fetch step 5
|
|
156
|
+
if (request.counter >= request.follow) {
|
|
157
|
+
reject(new FetchError(`maximum redirect reached at: ${request.url}`, "max-redirect"));
|
|
158
|
+
finalize();
|
|
159
|
+
return;
|
|
160
|
+
}
|
|
161
|
+
// HTTP-redirect fetch step 6 (counter increment)
|
|
162
|
+
// Create a new Request object.
|
|
163
|
+
const requestOptions = {
|
|
164
|
+
headers: new Headers(request.headers),
|
|
165
|
+
follow: request.follow,
|
|
166
|
+
counter: request.counter + 1,
|
|
167
|
+
agent: request.agent,
|
|
168
|
+
compress: request.compress,
|
|
169
|
+
method: request.method,
|
|
170
|
+
body: clone(request),
|
|
171
|
+
signal: request.signal,
|
|
172
|
+
size: request.size,
|
|
173
|
+
referrer: request.referrer,
|
|
174
|
+
referrerPolicy: request.referrerPolicy,
|
|
175
|
+
};
|
|
176
|
+
// when forwarding sensitive headers like "Authorization",
|
|
177
|
+
// "WWW-Authenticate", and "Cookie" to untrusted targets,
|
|
178
|
+
// headers will be ignored when following a redirect to a domain
|
|
179
|
+
// that is not a subdomain match or exact match of the initial domain.
|
|
180
|
+
// For example, a redirect from "foo.com" to either "foo.com" or "sub.foo.com"
|
|
181
|
+
// will forward the sensitive headers, but a redirect to "bar.com" will not.
|
|
182
|
+
// headers will also be ignored when following a redirect to a domain using
|
|
183
|
+
// a different protocol. For example, a redirect from "https://foo.com" to "http://foo.com"
|
|
184
|
+
// will not forward the sensitive headers
|
|
185
|
+
if (!isDomainOrSubdomain(request.url, locationURL) ||
|
|
186
|
+
!isSameProtocol(request.url, locationURL)) {
|
|
187
|
+
for (const name of [
|
|
188
|
+
"authorization",
|
|
189
|
+
"www-authenticate",
|
|
190
|
+
"cookie",
|
|
191
|
+
"cookie2",
|
|
192
|
+
]) {
|
|
193
|
+
requestOptions.headers.delete(name);
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
// HTTP-redirect fetch step 9
|
|
197
|
+
if (response_.statusCode !== 303 &&
|
|
198
|
+
request.body &&
|
|
199
|
+
options_.body instanceof Stream.Readable) {
|
|
200
|
+
reject(new FetchError("Cannot follow redirect with body being a readable stream", "unsupported-redirect"));
|
|
201
|
+
finalize();
|
|
202
|
+
return;
|
|
203
|
+
}
|
|
204
|
+
// HTTP-redirect fetch step 11
|
|
205
|
+
if (response_.statusCode === 303 ||
|
|
206
|
+
((response_.statusCode === 301 || response_.statusCode === 302) &&
|
|
207
|
+
request.method === "POST")) {
|
|
208
|
+
requestOptions.method = "GET";
|
|
209
|
+
requestOptions.body = undefined;
|
|
210
|
+
requestOptions.headers.delete("content-length");
|
|
211
|
+
}
|
|
212
|
+
// HTTP-redirect fetch step 14
|
|
213
|
+
const responseReferrerPolicy = parseReferrerPolicyFromHeader(headers);
|
|
214
|
+
if (responseReferrerPolicy) {
|
|
215
|
+
requestOptions.referrerPolicy = responseReferrerPolicy;
|
|
216
|
+
}
|
|
217
|
+
// HTTP-redirect fetch step 15
|
|
218
|
+
resolve(fetch(new Request(locationURL, requestOptions)));
|
|
219
|
+
finalize();
|
|
220
|
+
return;
|
|
221
|
+
}
|
|
222
|
+
default:
|
|
223
|
+
return reject(new TypeError(`Redirect option '${request.redirect}' is not a valid value of RequestRedirect`));
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
// Prepare response
|
|
227
|
+
if (signal) {
|
|
228
|
+
response_.once("end", () => {
|
|
229
|
+
signal.removeEventListener("abort", abortAndFinalize);
|
|
230
|
+
});
|
|
231
|
+
}
|
|
232
|
+
let body = pump(response_, new PassThrough(), (error) => {
|
|
233
|
+
if (error) {
|
|
234
|
+
reject(error);
|
|
235
|
+
}
|
|
236
|
+
});
|
|
237
|
+
// see https://github.com/nodejs/node/pull/29376
|
|
238
|
+
/* c8 ignore next 3 */
|
|
239
|
+
if (process.version < "v12.10") {
|
|
240
|
+
response_.on("aborted", abortAndFinalize);
|
|
241
|
+
}
|
|
242
|
+
const responseOptions = {
|
|
243
|
+
url: request.url,
|
|
244
|
+
status: response_.statusCode,
|
|
245
|
+
statusText: response_.statusMessage,
|
|
246
|
+
headers,
|
|
247
|
+
size: request.size,
|
|
248
|
+
counter: request.counter,
|
|
249
|
+
highWaterMark: request.highWaterMark,
|
|
250
|
+
};
|
|
251
|
+
// HTTP-network fetch step 12.1.1.3
|
|
252
|
+
const codings = headers.get("Content-Encoding");
|
|
253
|
+
// HTTP-network fetch step 12.1.1.4: handle content codings
|
|
254
|
+
// in following scenarios we ignore compression support
|
|
255
|
+
// 1. compression support is disabled
|
|
256
|
+
// 2. HEAD request
|
|
257
|
+
// 3. no Content-Encoding header
|
|
258
|
+
// 4. no content response (204)
|
|
259
|
+
// 5. content not modified response (304)
|
|
260
|
+
if (!request.compress ||
|
|
261
|
+
request.method === "HEAD" ||
|
|
262
|
+
codings === null ||
|
|
263
|
+
response_.statusCode === 204 ||
|
|
264
|
+
response_.statusCode === 304) {
|
|
265
|
+
response = new Response(body, responseOptions);
|
|
266
|
+
resolve(response);
|
|
267
|
+
return;
|
|
268
|
+
}
|
|
269
|
+
// For Node v6+
|
|
270
|
+
// Be less strict when decoding compressed responses, since sometimes
|
|
271
|
+
// servers send slightly invalid responses that are still accepted
|
|
272
|
+
// by common browsers.
|
|
273
|
+
// Always using Z_SYNC_FLUSH is what cURL does.
|
|
274
|
+
const zlibOptions = {
|
|
275
|
+
flush: zlib.Z_SYNC_FLUSH,
|
|
276
|
+
finishFlush: zlib.Z_SYNC_FLUSH,
|
|
277
|
+
};
|
|
278
|
+
// For gzip
|
|
279
|
+
if (codings === "gzip" || codings === "x-gzip") {
|
|
280
|
+
body = pump(body, zlib.createGunzip(zlibOptions), (error) => {
|
|
281
|
+
if (error) {
|
|
282
|
+
reject(error);
|
|
283
|
+
}
|
|
284
|
+
});
|
|
285
|
+
response = new Response(body, responseOptions);
|
|
286
|
+
resolve(response);
|
|
287
|
+
return;
|
|
288
|
+
}
|
|
289
|
+
// For deflate
|
|
290
|
+
if (codings === "deflate" || codings === "x-deflate") {
|
|
291
|
+
// Handle the infamous raw deflate response from old servers
|
|
292
|
+
// a hack for old IIS and Apache servers
|
|
293
|
+
const raw = pump(response_, new PassThrough(), (error) => {
|
|
294
|
+
if (error) {
|
|
295
|
+
reject(error);
|
|
296
|
+
}
|
|
297
|
+
});
|
|
298
|
+
raw.once("data", (chunk) => {
|
|
299
|
+
// See http://stackoverflow.com/questions/37519828
|
|
300
|
+
if ((chunk[0] & 0x0f) === 0x08) {
|
|
301
|
+
body = pump(body, zlib.createInflate(), (error) => {
|
|
302
|
+
if (error) {
|
|
303
|
+
reject(error);
|
|
304
|
+
}
|
|
305
|
+
});
|
|
306
|
+
}
|
|
307
|
+
else {
|
|
308
|
+
body = pump(body, zlib.createInflateRaw(), (error) => {
|
|
309
|
+
if (error) {
|
|
310
|
+
reject(error);
|
|
311
|
+
}
|
|
312
|
+
});
|
|
313
|
+
}
|
|
314
|
+
response = new Response(body, responseOptions);
|
|
315
|
+
resolve(response);
|
|
316
|
+
});
|
|
317
|
+
raw.once("end", () => {
|
|
318
|
+
// Some old IIS servers return zero-length OK deflate responses, so
|
|
319
|
+
// 'data' is never emitted. See https://github.com/node-fetch/node-fetch/pull/903
|
|
320
|
+
if (!response) {
|
|
321
|
+
response = new Response(body, responseOptions);
|
|
322
|
+
resolve(response);
|
|
323
|
+
}
|
|
324
|
+
});
|
|
325
|
+
return;
|
|
326
|
+
}
|
|
327
|
+
// For br
|
|
328
|
+
if (codings === "br") {
|
|
329
|
+
body = pump(body, zlib.createBrotliDecompress(), (error) => {
|
|
330
|
+
if (error) {
|
|
331
|
+
reject(error);
|
|
332
|
+
}
|
|
333
|
+
});
|
|
334
|
+
response = new Response(body, responseOptions);
|
|
335
|
+
resolve(response);
|
|
336
|
+
return;
|
|
337
|
+
}
|
|
338
|
+
// Otherwise, use response as-is
|
|
339
|
+
response = new Response(body, responseOptions);
|
|
340
|
+
resolve(response);
|
|
341
|
+
});
|
|
342
|
+
// eslint-disable-next-line promise/prefer-await-to-then
|
|
343
|
+
writeToStream(request_, request).catch(reject);
|
|
344
|
+
});
|
|
345
|
+
}
|
|
346
|
+
function fixResponseChunkedTransferBadEnding(request, errorCallback) {
|
|
347
|
+
const LAST_CHUNK = Buffer.from("0\r\n\r\n");
|
|
348
|
+
let isChunkedTransfer = false;
|
|
349
|
+
let properLastChunkReceived = false;
|
|
350
|
+
let previousChunk;
|
|
351
|
+
request.on("response", (response) => {
|
|
352
|
+
const { headers } = response;
|
|
353
|
+
isChunkedTransfer =
|
|
354
|
+
headers["transfer-encoding"] === "chunked" && !headers["content-length"];
|
|
355
|
+
});
|
|
356
|
+
request.on("socket", (socket) => {
|
|
357
|
+
const onSocketClose = () => {
|
|
358
|
+
if (isChunkedTransfer && !properLastChunkReceived) {
|
|
359
|
+
const error = new Error("Premature close");
|
|
360
|
+
error.code = "ERR_STREAM_PREMATURE_CLOSE";
|
|
361
|
+
errorCallback(error);
|
|
362
|
+
}
|
|
363
|
+
};
|
|
364
|
+
// const onData = buf => {
|
|
365
|
+
// properLastChunkReceived = Buffer.compare(buf.slice(-5), LAST_CHUNK) === 0;
|
|
366
|
+
// // Sometimes final 0-length chunk and end of message code are in separate packets
|
|
367
|
+
// if (!properLastChunkReceived && previousChunk) {
|
|
368
|
+
// properLastChunkReceived = (
|
|
369
|
+
// Buffer.compare(previousChunk.slice(-3), LAST_CHUNK.slice(0, 3)) === 0 &&
|
|
370
|
+
// Buffer.compare(buf.slice(-2), LAST_CHUNK.slice(3)) === 0
|
|
371
|
+
// );
|
|
372
|
+
// }
|
|
373
|
+
// previousChunk = buf;
|
|
374
|
+
// };
|
|
375
|
+
// PATCH - THIS IS WHERE THE PATCH IS - FIXES BOUNDARY CHUNKING ISSUE
|
|
376
|
+
// See https://github.com/node-fetch/node-fetch/issues/1576
|
|
377
|
+
const onData = (buf) => {
|
|
378
|
+
properLastChunkReceived = Buffer.compare(buf.slice(-5), LAST_CHUNK) === 0;
|
|
379
|
+
// Sometimes final 0-length chunk and end of message code are in separate packets
|
|
380
|
+
if (!properLastChunkReceived && previousChunk) {
|
|
381
|
+
if (buf.length < 5) {
|
|
382
|
+
properLastChunkReceived =
|
|
383
|
+
Buffer.compare(Buffer.from([...previousChunk.slice(-5), ...buf]).slice(-5), LAST_CHUNK) === 0;
|
|
384
|
+
}
|
|
385
|
+
}
|
|
386
|
+
previousChunk = buf;
|
|
387
|
+
};
|
|
388
|
+
socket.prependListener("close", onSocketClose);
|
|
389
|
+
socket.on("data", onData);
|
|
390
|
+
request.on("close", () => {
|
|
391
|
+
socket.removeListener("close", onSocketClose);
|
|
392
|
+
socket.removeListener("data", onData);
|
|
393
|
+
});
|
|
394
|
+
});
|
|
395
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import { Buffer } from "node:buffer";
|
|
2
|
+
import { describe, expect, it } from "vitest";
|
|
3
|
+
// Tests the boundary chunking patch to node fetch made in node-fetch-patch.js (see that file for details).
|
|
4
|
+
describe("Chunked Transfer Encoding Patch Logic", () => {
|
|
5
|
+
// The LAST_CHUNK marker "0\r\n\r\n"
|
|
6
|
+
const LAST_CHUNK = Buffer.from("0\r\n\r\n");
|
|
7
|
+
it("should detect complete marker at the end (normal case)", () => {
|
|
8
|
+
const normalCase = Buffer.from("data data data 0\r\n\r\n");
|
|
9
|
+
const result = Buffer.compare(normalCase.slice(-5), LAST_CHUNK) === 0;
|
|
10
|
+
expect(result).toBe(true);
|
|
11
|
+
});
|
|
12
|
+
it("should handle chunks split across original pattern (0\\r\\n | \\r\\n)", () => {
|
|
13
|
+
const previousChunk = Buffer.from("data data 0\r\n");
|
|
14
|
+
const lastChunk = Buffer.from("\r\n");
|
|
15
|
+
const result = Buffer.compare(previousChunk.slice(-3), LAST_CHUNK.slice(0, 3)) === 0 &&
|
|
16
|
+
Buffer.compare(lastChunk.slice(-2), LAST_CHUNK.slice(3)) === 0;
|
|
17
|
+
expect(result).toBe(true);
|
|
18
|
+
});
|
|
19
|
+
it("should handle problematic case from issue #1576 with new patch", () => {
|
|
20
|
+
// The problematic case: data ends with "0\r\n0\r" and the next chunk is "\n\r\n"
|
|
21
|
+
const previousChunk = Buffer.from([
|
|
22
|
+
...Array(10).fill(42),
|
|
23
|
+
48,
|
|
24
|
+
13,
|
|
25
|
+
10,
|
|
26
|
+
48,
|
|
27
|
+
13,
|
|
28
|
+
]); // some data + "0\r\n0\r"
|
|
29
|
+
const lastChunk = Buffer.from([10, 13, 10]); // "\n\r\n"
|
|
30
|
+
// Check 1: Direct check for complete marker (should fail)
|
|
31
|
+
const check1 = Buffer.compare(lastChunk.slice(-5), LAST_CHUNK) === 0;
|
|
32
|
+
expect(check1).toBe(false);
|
|
33
|
+
// Check 2: Original split pattern check (should fail)
|
|
34
|
+
const check2 = Buffer.compare(previousChunk.slice(-3), LAST_CHUNK.slice(0, 3)) === 0 &&
|
|
35
|
+
Buffer.compare(lastChunk.slice(-2), LAST_CHUNK.slice(3)) === 0;
|
|
36
|
+
expect(check2).toBe(false);
|
|
37
|
+
// Check 3: New patch check (should pass)
|
|
38
|
+
const lastChunkLength = lastChunk.length;
|
|
39
|
+
const newPatchCheck = lastChunkLength < 5 &&
|
|
40
|
+
Buffer.compare(Buffer.from([...previousChunk.slice(-5), ...lastChunk]).slice(-5), LAST_CHUNK) === 0;
|
|
41
|
+
expect(newPatchCheck).toBe(true);
|
|
42
|
+
// Verification of the combined buffer
|
|
43
|
+
const combinedBuffer = Buffer.from([
|
|
44
|
+
...previousChunk.slice(-5),
|
|
45
|
+
...lastChunk,
|
|
46
|
+
]);
|
|
47
|
+
const combinedResult = Buffer.compare(combinedBuffer.slice(-5), LAST_CHUNK) === 0;
|
|
48
|
+
expect(combinedResult).toBe(true);
|
|
49
|
+
});
|
|
50
|
+
});
|
package/dist/stream.js
CHANGED
|
@@ -16,12 +16,14 @@ export async function* streamResponse(response) {
|
|
|
16
16
|
}
|
|
17
17
|
// Get the major version of Node.js
|
|
18
18
|
const nodeMajorVersion = parseInt(process.versions.node.split(".")[0], 10);
|
|
19
|
+
let chunks = 0;
|
|
19
20
|
try {
|
|
20
21
|
if (nodeMajorVersion >= 20) {
|
|
21
22
|
// Use the new API for Node 20 and above
|
|
22
23
|
const stream = ReadableStream.from(response.body);
|
|
23
24
|
for await (const chunk of stream.pipeThrough(new TextDecoderStream("utf-8"))) {
|
|
24
25
|
yield chunk;
|
|
26
|
+
chunks++;
|
|
25
27
|
}
|
|
26
28
|
}
|
|
27
29
|
else {
|
|
@@ -31,12 +33,28 @@ export async function* streamResponse(response) {
|
|
|
31
33
|
const nodeStream = response.body;
|
|
32
34
|
for await (const chunk of toAsyncIterable(nodeStream)) {
|
|
33
35
|
yield decoder.decode(chunk, { stream: true });
|
|
36
|
+
chunks++;
|
|
34
37
|
}
|
|
35
38
|
}
|
|
36
39
|
}
|
|
37
40
|
catch (e) {
|
|
38
|
-
if (e instanceof Error
|
|
39
|
-
|
|
41
|
+
if (e instanceof Error) {
|
|
42
|
+
if (e.name.startsWith("AbortError")) {
|
|
43
|
+
return; // In case of client-side cancellation, just return
|
|
44
|
+
}
|
|
45
|
+
if (e.message.toLowerCase().includes("premature close")) {
|
|
46
|
+
// Premature close can happen for various reasons, including:
|
|
47
|
+
// - Malformed chunks of data received from the server
|
|
48
|
+
// - The server closed the connection before sending the complete response
|
|
49
|
+
// - Long delays from the server during streaming
|
|
50
|
+
// - 'Keep alive' header being used in combination with an http agent and a set, low number of maxSockets
|
|
51
|
+
if (chunks === 0) {
|
|
52
|
+
throw new Error("Stream was closed before any data was received. Try again. (Premature Close)");
|
|
53
|
+
}
|
|
54
|
+
else {
|
|
55
|
+
throw new Error("The response was cancelled mid-stream. Try again. (Premature Close).");
|
|
56
|
+
}
|
|
57
|
+
}
|
|
40
58
|
}
|
|
41
59
|
throw e;
|
|
42
60
|
}
|
|
@@ -106,8 +124,13 @@ export async function* streamJSON(response) {
|
|
|
106
124
|
let position;
|
|
107
125
|
while ((position = buffer.indexOf("\n")) >= 0) {
|
|
108
126
|
const line = buffer.slice(0, position);
|
|
109
|
-
|
|
110
|
-
|
|
127
|
+
try {
|
|
128
|
+
const data = JSON.parse(line);
|
|
129
|
+
yield data;
|
|
130
|
+
}
|
|
131
|
+
catch (e) {
|
|
132
|
+
throw new Error(`Malformed JSON sent from server: ${line}`);
|
|
133
|
+
}
|
|
111
134
|
buffer = buffer.slice(position + 1);
|
|
112
135
|
}
|
|
113
136
|
}
|
package/dist/util.d.ts
CHANGED
|
@@ -1,12 +1,17 @@
|
|
|
1
|
+
import { RequestOptions } from "@continuedev/config-types";
|
|
1
2
|
/**
|
|
2
3
|
* Gets the proxy settings from environment variables
|
|
3
4
|
* @param protocol The URL protocol (http: or https:)
|
|
4
5
|
* @returns The proxy URL if available, otherwise undefined
|
|
5
6
|
*/
|
|
6
7
|
export declare function getProxyFromEnv(protocol: string): string | undefined;
|
|
8
|
+
export declare function getProxy(protocol: string, requestOptions?: RequestOptions): string | undefined;
|
|
9
|
+
export declare function getEnvNoProxyPatterns(): string[];
|
|
10
|
+
export declare function getReqOptionsNoProxyPatterns(options: RequestOptions | undefined): string[];
|
|
11
|
+
export declare function patternMatchesHostname(hostname: string, pattern: string): boolean;
|
|
7
12
|
/**
|
|
8
13
|
* Checks if a hostname should bypass proxy based on NO_PROXY environment variable
|
|
9
14
|
* @param hostname The hostname to check
|
|
10
15
|
* @returns True if the hostname should bypass proxy
|
|
11
16
|
*/
|
|
12
|
-
export declare function shouldBypassProxy(hostname: string): boolean;
|
|
17
|
+
export declare function shouldBypassProxy(hostname: string, requestOptions: RequestOptions | undefined): boolean;
|
package/dist/util.js
CHANGED
|
@@ -14,26 +14,66 @@ export function getProxyFromEnv(protocol) {
|
|
|
14
14
|
return process.env.HTTP_PROXY || process.env.http_proxy;
|
|
15
15
|
}
|
|
16
16
|
}
|
|
17
|
+
// Note that request options proxy (per model) takes precedence over environment variables
|
|
18
|
+
export function getProxy(protocol, requestOptions) {
|
|
19
|
+
if (requestOptions?.proxy) {
|
|
20
|
+
return requestOptions.proxy;
|
|
21
|
+
}
|
|
22
|
+
return getProxyFromEnv(protocol);
|
|
23
|
+
}
|
|
24
|
+
export function getEnvNoProxyPatterns() {
|
|
25
|
+
const envValue = process.env.NO_PROXY || process.env.no_proxy;
|
|
26
|
+
if (envValue) {
|
|
27
|
+
return envValue
|
|
28
|
+
.split(",")
|
|
29
|
+
.map((item) => item.trim().toLowerCase())
|
|
30
|
+
.filter((i) => !!i);
|
|
31
|
+
}
|
|
32
|
+
else {
|
|
33
|
+
return [];
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
export function getReqOptionsNoProxyPatterns(options) {
|
|
37
|
+
return (options?.noProxy?.map((i) => i.trim().toLowerCase()).filter((i) => !!i) ??
|
|
38
|
+
[]);
|
|
39
|
+
}
|
|
40
|
+
export function patternMatchesHostname(hostname, pattern) {
|
|
41
|
+
// Split hostname and pattern to separate hostname and port
|
|
42
|
+
const [hostnameWithoutPort, hostnamePort] = hostname.toLowerCase().split(":");
|
|
43
|
+
const [patternWithoutPort, patternPort] = pattern.toLowerCase().split(":");
|
|
44
|
+
// If pattern specifies a port but hostname doesn't match it, no match
|
|
45
|
+
if (patternPort && (!hostnamePort || hostnamePort !== patternPort)) {
|
|
46
|
+
return false;
|
|
47
|
+
}
|
|
48
|
+
// Now compare just the hostname parts
|
|
49
|
+
// exact match
|
|
50
|
+
if (patternWithoutPort === hostnameWithoutPort) {
|
|
51
|
+
return true;
|
|
52
|
+
}
|
|
53
|
+
// wildcard domain match (*.example.com)
|
|
54
|
+
if (patternWithoutPort.startsWith("*.") &&
|
|
55
|
+
hostnameWithoutPort.endsWith(patternWithoutPort.substring(1))) {
|
|
56
|
+
return true;
|
|
57
|
+
}
|
|
58
|
+
// Domain suffix match (.example.com)
|
|
59
|
+
if (patternWithoutPort.startsWith(".") &&
|
|
60
|
+
hostnameWithoutPort.endsWith(patternWithoutPort.slice(1))) {
|
|
61
|
+
return true;
|
|
62
|
+
}
|
|
63
|
+
// TODO IP address ranges
|
|
64
|
+
// TODO CIDR notation
|
|
65
|
+
return false;
|
|
66
|
+
}
|
|
17
67
|
/**
|
|
18
68
|
* Checks if a hostname should bypass proxy based on NO_PROXY environment variable
|
|
19
69
|
* @param hostname The hostname to check
|
|
20
70
|
* @returns True if the hostname should bypass proxy
|
|
21
71
|
*/
|
|
22
|
-
export function shouldBypassProxy(hostname) {
|
|
23
|
-
const
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
if (item === hostname)
|
|
30
|
-
return true;
|
|
31
|
-
// Wildcard domain match (*.example.com)
|
|
32
|
-
if (item.startsWith("*.") && hostname.endsWith(item.substring(1)))
|
|
33
|
-
return true;
|
|
34
|
-
// Domain suffix match (.example.com)
|
|
35
|
-
if (item.startsWith(".") && hostname.endsWith(item.slice(1)))
|
|
36
|
-
return true;
|
|
37
|
-
return false;
|
|
38
|
-
});
|
|
72
|
+
export function shouldBypassProxy(hostname, requestOptions) {
|
|
73
|
+
const ignores = [
|
|
74
|
+
...getEnvNoProxyPatterns(),
|
|
75
|
+
...getReqOptionsNoProxyPatterns(requestOptions),
|
|
76
|
+
];
|
|
77
|
+
const hostLowerCase = hostname.toLowerCase();
|
|
78
|
+
return ignores.some((ignore) => patternMatchesHostname(hostLowerCase, ignore));
|
|
39
79
|
}
|