@whatwg-node/node-fetch 0.7.6-alpha-20250103123055-7d3b49ae3cf2bf03e15b5c69c5a0b590eb1a0232 → 0.7.6-alpha-20250106172915-e3e79162475cede19c6fd63458cb73e68a450518
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cjs/Blob.js +19 -17
- package/cjs/Body.js +20 -17
- package/cjs/FormData.js +10 -9
- package/cjs/ReadableStream.js +7 -6
- package/cjs/Request.js +4 -4
- package/cjs/Response.js +2 -2
- package/cjs/TextEncoderDecoder.js +6 -5
- package/cjs/URL.js +5 -4
- package/cjs/WritableStream.js +4 -4
- package/cjs/fetch.js +6 -5
- package/cjs/fetchCurl.js +7 -7
- package/cjs/fetchNodeHttp.js +21 -16
- package/esm/Blob.js +4 -2
- package/esm/Body.js +5 -2
- package/esm/FormData.js +1 -0
- package/esm/ReadableStream.js +2 -1
- package/esm/Request.js +2 -2
- package/esm/Response.js +1 -1
- package/esm/TextEncoderDecoder.js +1 -0
- package/esm/URL.js +3 -3
- package/esm/WritableStream.js +1 -1
- package/esm/fetch.js +3 -2
- package/esm/fetchCurl.js +4 -4
- package/esm/fetchNodeHttp.js +13 -8
- package/package.json +1 -1
- package/typings/Blob.d.cts +1 -0
- package/typings/Blob.d.ts +1 -0
- package/typings/Body.d.cts +2 -1
- package/typings/Body.d.ts +2 -1
- package/typings/ReadableStream.d.cts +1 -1
- package/typings/ReadableStream.d.ts +1 -1
- package/typings/Request.d.cts +8 -8
- package/typings/Request.d.ts +8 -8
- package/typings/Response.d.cts +5 -5
- package/typings/Response.d.ts +5 -5
- package/typings/TextEncoderDecoder.d.cts +1 -0
- package/typings/TextEncoderDecoder.d.ts +1 -0
- package/typings/WritableStream.d.cts +1 -1
- package/typings/WritableStream.d.ts +1 -1
- package/typings/utils.d.cts +1 -1
- package/typings/utils.d.ts +1 -1
package/cjs/Blob.js
CHANGED
@@ -10,20 +10,22 @@ exports.hasStreamMethod = hasStreamMethod;
|
|
10
10
|
exports.hasBlobSignature = hasBlobSignature;
|
11
11
|
exports.isArrayBuffer = isArrayBuffer;
|
12
12
|
/* eslint-disable @typescript-eslint/no-unsafe-declaration-merging */
|
13
|
+
/* eslint-disable @typescript-eslint/ban-ts-comment */
|
14
|
+
const node_buffer_1 = require("node:buffer");
|
13
15
|
const ReadableStream_js_1 = require("./ReadableStream.js");
|
14
16
|
const utils_js_1 = require("./utils.js");
|
15
17
|
function getBlobPartAsBuffer(blobPart) {
|
16
18
|
if (typeof blobPart === 'string') {
|
17
|
-
return Buffer.from(blobPart);
|
19
|
+
return node_buffer_1.Buffer.from(blobPart);
|
18
20
|
}
|
19
|
-
else if (Buffer.isBuffer(blobPart)) {
|
21
|
+
else if (node_buffer_1.Buffer.isBuffer(blobPart)) {
|
20
22
|
return blobPart;
|
21
23
|
}
|
22
24
|
else if ((0, utils_js_1.isArrayBufferView)(blobPart)) {
|
23
|
-
return Buffer.from(blobPart.buffer, blobPart.byteOffset, blobPart.byteLength);
|
25
|
+
return node_buffer_1.Buffer.from(blobPart.buffer, blobPart.byteOffset, blobPart.byteLength);
|
24
26
|
}
|
25
27
|
else {
|
26
|
-
return Buffer.from(blobPart);
|
28
|
+
return node_buffer_1.Buffer.from(blobPart);
|
27
29
|
}
|
28
30
|
}
|
29
31
|
function hasBufferMethod(obj) {
|
@@ -81,13 +83,13 @@ class PonyfillBlob {
|
|
81
83
|
}
|
82
84
|
if (hasBytesMethod(blobPart)) {
|
83
85
|
return blobPart.bytes().then(bytes => {
|
84
|
-
this._buffer = Buffer.from(bytes);
|
86
|
+
this._buffer = node_buffer_1.Buffer.from(bytes);
|
85
87
|
return this._buffer;
|
86
88
|
});
|
87
89
|
}
|
88
90
|
if (hasArrayBufferMethod(blobPart)) {
|
89
91
|
return blobPart.arrayBuffer().then(arrayBuf => {
|
90
|
-
this._buffer = Buffer.from(arrayBuf, undefined, blobPart.size);
|
92
|
+
this._buffer = node_buffer_1.Buffer.from(arrayBuf, undefined, blobPart.size);
|
91
93
|
return this._buffer;
|
92
94
|
});
|
93
95
|
}
|
@@ -104,13 +106,13 @@ class PonyfillBlob {
|
|
104
106
|
}
|
105
107
|
else if (hasArrayBufferMethod(blobPart)) {
|
106
108
|
jobs.push(blobPart.arrayBuffer().then(arrayBuf => {
|
107
|
-
bufferChunks[i] = Buffer.from(arrayBuf, undefined, blobPart.size);
|
109
|
+
bufferChunks[i] = node_buffer_1.Buffer.from(arrayBuf, undefined, blobPart.size);
|
108
110
|
}));
|
109
111
|
return undefined;
|
110
112
|
}
|
111
113
|
else if (hasBytesMethod(blobPart)) {
|
112
114
|
jobs.push(blobPart.bytes().then(bytes => {
|
113
|
-
bufferChunks[i] = Buffer.from(bytes);
|
115
|
+
bufferChunks[i] = node_buffer_1.Buffer.from(bytes);
|
114
116
|
}));
|
115
117
|
return undefined;
|
116
118
|
}
|
@@ -119,13 +121,13 @@ class PonyfillBlob {
|
|
119
121
|
}
|
120
122
|
});
|
121
123
|
if (jobs.length > 0) {
|
122
|
-
return Promise.all(jobs).then(() => Buffer.concat(bufferChunks, this._size || undefined));
|
124
|
+
return Promise.all(jobs).then(() => node_buffer_1.Buffer.concat(bufferChunks, this._size || undefined));
|
123
125
|
}
|
124
|
-
return (0, utils_js_1.fakePromise)(Buffer.concat(bufferChunks, this._size || undefined));
|
126
|
+
return (0, utils_js_1.fakePromise)(node_buffer_1.Buffer.concat(bufferChunks, this._size || undefined));
|
125
127
|
}
|
126
128
|
arrayBuffer() {
|
127
129
|
if (this._buffer) {
|
128
|
-
// @ts-
|
130
|
+
// @ts-ignore - Mismatch between Buffer and ArrayBuffer
|
129
131
|
return (0, utils_js_1.fakePromise)(this._buffer);
|
130
132
|
}
|
131
133
|
if (this.blobParts.length === 1) {
|
@@ -136,7 +138,7 @@ class PonyfillBlob {
|
|
136
138
|
return this.blobParts[0].arrayBuffer();
|
137
139
|
}
|
138
140
|
}
|
139
|
-
// @ts-
|
141
|
+
// @ts-ignore - Mismatch between Buffer and ArrayBuffer
|
140
142
|
return this.buffer();
|
141
143
|
}
|
142
144
|
bytes() {
|
@@ -144,12 +146,12 @@ class PonyfillBlob {
|
|
144
146
|
return (0, utils_js_1.fakePromise)(this._buffer);
|
145
147
|
}
|
146
148
|
if (this.blobParts.length === 1) {
|
147
|
-
if (Buffer.isBuffer(this.blobParts[0])) {
|
149
|
+
if (node_buffer_1.Buffer.isBuffer(this.blobParts[0])) {
|
148
150
|
this._buffer = this.blobParts[0];
|
149
151
|
return (0, utils_js_1.fakePromise)(this.blobParts[0]);
|
150
152
|
}
|
151
153
|
if (this.blobParts[0] instanceof Uint8Array) {
|
152
|
-
this._buffer = Buffer.from(this.blobParts[0]);
|
154
|
+
this._buffer = node_buffer_1.Buffer.from(this.blobParts[0]);
|
153
155
|
return (0, utils_js_1.fakePromise)(this.blobParts[0]);
|
154
156
|
}
|
155
157
|
if (hasBytesMethod(this.blobParts[0])) {
|
@@ -209,7 +211,7 @@ class PonyfillBlob {
|
|
209
211
|
this._size = 0;
|
210
212
|
for (const blobPart of this.blobParts) {
|
211
213
|
if (typeof blobPart === 'string') {
|
212
|
-
this._size += Buffer.byteLength(blobPart);
|
214
|
+
this._size += node_buffer_1.Buffer.byteLength(blobPart);
|
213
215
|
}
|
214
216
|
else if (hasSizeProperty(blobPart)) {
|
215
217
|
this._size += blobPart.size;
|
@@ -266,13 +268,13 @@ class PonyfillBlob {
|
|
266
268
|
}
|
267
269
|
if (hasBytesMethod(blobPart)) {
|
268
270
|
return blobPart.bytes().then(bytes => {
|
269
|
-
const buf = Buffer.from(bytes);
|
271
|
+
const buf = node_buffer_1.Buffer.from(bytes);
|
270
272
|
controller.enqueue(buf);
|
271
273
|
});
|
272
274
|
}
|
273
275
|
if (hasArrayBufferMethod(blobPart)) {
|
274
276
|
return blobPart.arrayBuffer().then(arrayBuffer => {
|
275
|
-
const buf = Buffer.from(arrayBuffer, undefined, blobPart.size);
|
277
|
+
const buf = node_buffer_1.Buffer.from(arrayBuffer, undefined, blobPart.size);
|
276
278
|
controller.enqueue(buf);
|
277
279
|
});
|
278
280
|
}
|
package/cjs/Body.js
CHANGED
@@ -2,7 +2,9 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.PonyfillBody = void 0;
|
4
4
|
const tslib_1 = require("tslib");
|
5
|
-
|
5
|
+
/* eslint-disable @typescript-eslint/ban-ts-comment */
|
6
|
+
const node_buffer_1 = require("node:buffer");
|
7
|
+
const node_stream_1 = require("node:stream");
|
6
8
|
const busboy_1 = tslib_1.__importDefault(require("busboy"));
|
7
9
|
const Blob_js_1 = require("./Blob.js");
|
8
10
|
const File_js_1 = require("./File.js");
|
@@ -41,7 +43,7 @@ class PonyfillBody {
|
|
41
43
|
_buffer;
|
42
44
|
generateBody() {
|
43
45
|
if (this._generatedBody?.readable?.destroyed && this._buffer) {
|
44
|
-
this._generatedBody.readable =
|
46
|
+
this._generatedBody.readable = node_stream_1.Readable.from(this._buffer);
|
45
47
|
}
|
46
48
|
if (this._generatedBody) {
|
47
49
|
return this._generatedBody;
|
@@ -227,6 +229,7 @@ class PonyfillBody {
|
|
227
229
|
});
|
228
230
|
bb.on('error', (err = 'An error occurred while parsing the form data') => {
|
229
231
|
const errMessage = err.message || err.toString();
|
232
|
+
// @ts-ignore - `cause` is in `TypeError`in node
|
230
233
|
reject(new TypeError(errMessage, err.cause));
|
231
234
|
});
|
232
235
|
_body?.readable.pipe(bb);
|
@@ -245,13 +248,13 @@ class PonyfillBody {
|
|
245
248
|
}
|
246
249
|
if ((0, Blob_js_1.hasBytesMethod)(this.bodyInit)) {
|
247
250
|
return this.bodyInit.bytes().then(bytes => {
|
248
|
-
this._buffer = Buffer.from(bytes);
|
251
|
+
this._buffer = node_buffer_1.Buffer.from(bytes);
|
249
252
|
return this._buffer;
|
250
253
|
});
|
251
254
|
}
|
252
255
|
if ((0, Blob_js_1.hasArrayBufferMethod)(this.bodyInit)) {
|
253
256
|
return this.bodyInit.arrayBuffer().then(buf => {
|
254
|
-
this._buffer = Buffer.from(buf, undefined, buf.byteLength);
|
257
|
+
this._buffer = node_buffer_1.Buffer.from(buf, undefined, buf.byteLength);
|
255
258
|
return this._buffer;
|
256
259
|
});
|
257
260
|
}
|
@@ -261,7 +264,7 @@ class PonyfillBody {
|
|
261
264
|
this._buffer = chunks[0];
|
262
265
|
return this._buffer;
|
263
266
|
}
|
264
|
-
this._buffer = Buffer.concat(chunks);
|
267
|
+
this._buffer = node_buffer_1.Buffer.concat(chunks);
|
265
268
|
return this._buffer;
|
266
269
|
});
|
267
270
|
}
|
@@ -269,7 +272,7 @@ class PonyfillBody {
|
|
269
272
|
return this.buffer();
|
270
273
|
}
|
271
274
|
arrayBuffer() {
|
272
|
-
// @ts-
|
275
|
+
// @ts-ignore - Mismatch between Buffer and ArrayBuffer
|
273
276
|
return this.buffer();
|
274
277
|
}
|
275
278
|
_json = null;
|
@@ -315,7 +318,7 @@ function processBodyInit(bodyInit) {
|
|
315
318
|
};
|
316
319
|
}
|
317
320
|
if (typeof bodyInit === 'string') {
|
318
|
-
const buffer = Buffer.from(bodyInit);
|
321
|
+
const buffer = node_buffer_1.Buffer.from(bodyInit);
|
319
322
|
const contentLength = buffer.byteLength;
|
320
323
|
return {
|
321
324
|
bodyType: BodyInitType.String,
|
@@ -323,33 +326,33 @@ function processBodyInit(bodyInit) {
|
|
323
326
|
contentLength,
|
324
327
|
buffer,
|
325
328
|
bodyFactory() {
|
326
|
-
const readable =
|
329
|
+
const readable = node_stream_1.Readable.from(buffer);
|
327
330
|
return new ReadableStream_js_1.PonyfillReadableStream(readable);
|
328
331
|
},
|
329
332
|
};
|
330
333
|
}
|
331
|
-
if (Buffer.isBuffer(bodyInit)) {
|
334
|
+
if (node_buffer_1.Buffer.isBuffer(bodyInit)) {
|
332
335
|
return {
|
333
336
|
bodyType: BodyInitType.Buffer,
|
334
337
|
contentType: null,
|
335
338
|
contentLength: bodyInit.length,
|
336
339
|
buffer: bodyInit,
|
337
340
|
bodyFactory() {
|
338
|
-
const readable =
|
341
|
+
const readable = node_stream_1.Readable.from(bodyInit);
|
339
342
|
const body = new ReadableStream_js_1.PonyfillReadableStream(readable);
|
340
343
|
return body;
|
341
344
|
},
|
342
345
|
};
|
343
346
|
}
|
344
347
|
if ((0, utils_js_1.isArrayBufferView)(bodyInit)) {
|
345
|
-
const buffer = Buffer.from(bodyInit.buffer, bodyInit.byteOffset, bodyInit.byteLength);
|
348
|
+
const buffer = node_buffer_1.Buffer.from(bodyInit.buffer, bodyInit.byteOffset, bodyInit.byteLength);
|
346
349
|
return {
|
347
350
|
bodyType: BodyInitType.Buffer,
|
348
351
|
contentLength: bodyInit.byteLength,
|
349
352
|
contentType: null,
|
350
353
|
buffer,
|
351
354
|
bodyFactory() {
|
352
|
-
const readable =
|
355
|
+
const readable = node_stream_1.Readable.from(buffer);
|
353
356
|
const body = new ReadableStream_js_1.PonyfillReadableStream(readable);
|
354
357
|
return body;
|
355
358
|
},
|
@@ -375,20 +378,20 @@ function processBodyInit(bodyInit) {
|
|
375
378
|
}
|
376
379
|
if (bodyInit instanceof ArrayBuffer) {
|
377
380
|
const contentLength = bodyInit.byteLength;
|
378
|
-
const buffer = Buffer.from(bodyInit, undefined, bodyInit.byteLength);
|
381
|
+
const buffer = node_buffer_1.Buffer.from(bodyInit, undefined, bodyInit.byteLength);
|
379
382
|
return {
|
380
383
|
bodyType: BodyInitType.Buffer,
|
381
384
|
contentType: null,
|
382
385
|
contentLength,
|
383
386
|
buffer,
|
384
387
|
bodyFactory() {
|
385
|
-
const readable =
|
388
|
+
const readable = node_stream_1.Readable.from(buffer);
|
386
389
|
const body = new ReadableStream_js_1.PonyfillReadableStream(readable);
|
387
390
|
return body;
|
388
391
|
},
|
389
392
|
};
|
390
393
|
}
|
391
|
-
if (bodyInit instanceof
|
394
|
+
if (bodyInit instanceof node_stream_1.Readable) {
|
392
395
|
return {
|
393
396
|
bodyType: BodyInitType.Readable,
|
394
397
|
contentType: null,
|
@@ -406,7 +409,7 @@ function processBodyInit(bodyInit) {
|
|
406
409
|
contentType,
|
407
410
|
contentLength: null,
|
408
411
|
bodyFactory() {
|
409
|
-
const body = new ReadableStream_js_1.PonyfillReadableStream(
|
412
|
+
const body = new ReadableStream_js_1.PonyfillReadableStream(node_stream_1.Readable.from(bodyInit.toString()));
|
410
413
|
return body;
|
411
414
|
},
|
412
415
|
};
|
@@ -438,7 +441,7 @@ function processBodyInit(bodyInit) {
|
|
438
441
|
contentLength: null,
|
439
442
|
bodyType: BodyInitType.AsyncIterable,
|
440
443
|
bodyFactory() {
|
441
|
-
const readable =
|
444
|
+
const readable = node_stream_1.Readable.from(bodyInit);
|
442
445
|
return new ReadableStream_js_1.PonyfillReadableStream(readable);
|
443
446
|
},
|
444
447
|
};
|
package/cjs/FormData.js
CHANGED
@@ -2,6 +2,7 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.PonyfillFormData = void 0;
|
4
4
|
exports.getStreamFromFormData = getStreamFromFormData;
|
5
|
+
const node_buffer_1 = require("node:buffer");
|
5
6
|
const IteratorObject_js_1 = require("./IteratorObject.js");
|
6
7
|
const ReadableStream_js_1 = require("./ReadableStream.js");
|
7
8
|
class PonyfillFormData {
|
@@ -79,13 +80,13 @@ function getStreamFromFormData(formData, boundary = '---') {
|
|
79
80
|
start: controller => {
|
80
81
|
formData.forEach((value, key) => {
|
81
82
|
if (!sentInitialHeader) {
|
82
|
-
controller.enqueue(Buffer.from(`--${boundary}\r\n`));
|
83
|
+
controller.enqueue(node_buffer_1.Buffer.from(`--${boundary}\r\n`));
|
83
84
|
sentInitialHeader = true;
|
84
85
|
}
|
85
86
|
entries.push([key, value]);
|
86
87
|
});
|
87
88
|
if (!sentInitialHeader) {
|
88
|
-
controller.enqueue(Buffer.from(`--${boundary}--\r\n`));
|
89
|
+
controller.enqueue(node_buffer_1.Buffer.from(`--${boundary}--\r\n`));
|
89
90
|
controller.close();
|
90
91
|
}
|
91
92
|
},
|
@@ -94,31 +95,31 @@ function getStreamFromFormData(formData, boundary = '---') {
|
|
94
95
|
if (entry) {
|
95
96
|
const [key, value] = entry;
|
96
97
|
if (typeof value === 'string') {
|
97
|
-
controller.enqueue(Buffer.from(`Content-Disposition: form-data; name="${key}"\r\n\r\n`));
|
98
|
-
controller.enqueue(Buffer.from(value));
|
98
|
+
controller.enqueue(node_buffer_1.Buffer.from(`Content-Disposition: form-data; name="${key}"\r\n\r\n`));
|
99
|
+
controller.enqueue(node_buffer_1.Buffer.from(value));
|
99
100
|
}
|
100
101
|
else {
|
101
102
|
let filenamePart = '';
|
102
103
|
if (value.name) {
|
103
104
|
filenamePart = `; filename="${value.name}"`;
|
104
105
|
}
|
105
|
-
controller.enqueue(Buffer.from(`Content-Disposition: form-data; name="${key}"${filenamePart}\r\n`));
|
106
|
-
controller.enqueue(Buffer.from(`Content-Type: ${value.type || 'application/octet-stream'}\r\n\r\n`));
|
106
|
+
controller.enqueue(node_buffer_1.Buffer.from(`Content-Disposition: form-data; name="${key}"${filenamePart}\r\n`));
|
107
|
+
controller.enqueue(node_buffer_1.Buffer.from(`Content-Type: ${value.type || 'application/octet-stream'}\r\n\r\n`));
|
107
108
|
const entryStream = value.stream();
|
108
109
|
for await (const chunk of entryStream) {
|
109
110
|
controller.enqueue(chunk);
|
110
111
|
}
|
111
112
|
}
|
112
113
|
if (entries.length === 0) {
|
113
|
-
controller.enqueue(Buffer.from(`\r\n--${boundary}--\r\n`));
|
114
|
+
controller.enqueue(node_buffer_1.Buffer.from(`\r\n--${boundary}--\r\n`));
|
114
115
|
controller.close();
|
115
116
|
}
|
116
117
|
else {
|
117
|
-
controller.enqueue(Buffer.from(`\r\n--${boundary}\r\n`));
|
118
|
+
controller.enqueue(node_buffer_1.Buffer.from(`\r\n--${boundary}\r\n`));
|
118
119
|
}
|
119
120
|
}
|
120
121
|
else {
|
121
|
-
controller.enqueue(Buffer.from(`\r\n--${boundary}--\r\n`));
|
122
|
+
controller.enqueue(node_buffer_1.Buffer.from(`\r\n--${boundary}--\r\n`));
|
122
123
|
controller.close();
|
123
124
|
}
|
124
125
|
},
|
package/cjs/ReadableStream.js
CHANGED
@@ -1,7 +1,8 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.PonyfillReadableStream = void 0;
|
4
|
-
const
|
4
|
+
const node_buffer_1 = require("node:buffer");
|
5
|
+
const node_stream_1 = require("node:stream");
|
5
6
|
const utils_js_1 = require("./utils.js");
|
6
7
|
function createController(desiredSize, readable) {
|
7
8
|
let chunks = [];
|
@@ -10,7 +11,7 @@ function createController(desiredSize, readable) {
|
|
10
11
|
return {
|
11
12
|
desiredSize,
|
12
13
|
enqueue(chunk) {
|
13
|
-
const buf = typeof chunk === 'string' ? Buffer.from(chunk) : chunk;
|
14
|
+
const buf = typeof chunk === 'string' ? node_buffer_1.Buffer.from(chunk) : chunk;
|
14
15
|
if (!flushed) {
|
15
16
|
chunks.push(buf);
|
16
17
|
}
|
@@ -37,7 +38,7 @@ function createController(desiredSize, readable) {
|
|
37
38
|
_flush() {
|
38
39
|
flushed = true;
|
39
40
|
if (chunks.length > 0) {
|
40
|
-
const concatenated = chunks.length > 1 ? Buffer.concat(chunks) : chunks[0];
|
41
|
+
const concatenated = chunks.length > 1 ? node_buffer_1.Buffer.concat(chunks) : chunks[0];
|
41
42
|
readable.push(concatenated);
|
42
43
|
chunks = [];
|
43
44
|
}
|
@@ -60,7 +61,7 @@ class PonyfillReadableStream {
|
|
60
61
|
this.readable = underlyingSource;
|
61
62
|
}
|
62
63
|
else if (isReadableStream(underlyingSource)) {
|
63
|
-
this.readable =
|
64
|
+
this.readable = node_stream_1.Readable.fromWeb(underlyingSource);
|
64
65
|
}
|
65
66
|
else {
|
66
67
|
let started = false;
|
@@ -80,7 +81,7 @@ class PonyfillReadableStream {
|
|
80
81
|
controller._flush();
|
81
82
|
ongoing = false;
|
82
83
|
};
|
83
|
-
this.readable = new
|
84
|
+
this.readable = new node_stream_1.Readable({
|
84
85
|
read(desiredSize) {
|
85
86
|
if (ongoing) {
|
86
87
|
return;
|
@@ -212,7 +213,7 @@ class PonyfillReadableStream {
|
|
212
213
|
return isReadableStream(instance);
|
213
214
|
}
|
214
215
|
static from(iterable) {
|
215
|
-
return new PonyfillReadableStream(
|
216
|
+
return new PonyfillReadableStream(node_stream_1.Readable.from(iterable));
|
216
217
|
}
|
217
218
|
}
|
218
219
|
exports.PonyfillReadableStream = PonyfillReadableStream;
|
package/cjs/Request.js
CHANGED
@@ -1,8 +1,8 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.PonyfillRequest = void 0;
|
4
|
-
const
|
5
|
-
const
|
4
|
+
const node_http_1 = require("node:http");
|
5
|
+
const node_https_1 = require("node:https");
|
6
6
|
const Body_js_1 = require("./Body.js");
|
7
7
|
const Headers_js_1 = require("./Headers.js");
|
8
8
|
const URL_js_1 = require("./URL.js");
|
@@ -70,10 +70,10 @@ class PonyfillRequest extends Body_js_1.PonyfillBody {
|
|
70
70
|
if (requestInit.agent === false) {
|
71
71
|
this.agent = false;
|
72
72
|
}
|
73
|
-
else if (protocol.startsWith('http:') && requestInit.agent instanceof
|
73
|
+
else if (protocol.startsWith('http:') && requestInit.agent instanceof node_http_1.Agent) {
|
74
74
|
this.agent = requestInit.agent;
|
75
75
|
}
|
76
|
-
else if (protocol.startsWith('https:') && requestInit.agent instanceof
|
76
|
+
else if (protocol.startsWith('https:') && requestInit.agent instanceof node_https_1.Agent) {
|
77
77
|
this.agent = requestInit.agent;
|
78
78
|
}
|
79
79
|
}
|
package/cjs/Response.js
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.PonyfillResponse = void 0;
|
4
|
-
const
|
4
|
+
const node_http_1 = require("node:http");
|
5
5
|
const Body_js_1 = require("./Body.js");
|
6
6
|
const Headers_js_1 = require("./Headers.js");
|
7
7
|
const JSON_CONTENT_TYPE = 'application/json; charset=utf-8';
|
@@ -14,7 +14,7 @@ class PonyfillResponse extends Body_js_1.PonyfillBody {
|
|
14
14
|
? init.headers
|
15
15
|
: new Headers_js_1.PonyfillHeaders(init?.headers);
|
16
16
|
this.status = init?.status || 200;
|
17
|
-
this.statusText = init?.statusText ||
|
17
|
+
this.statusText = init?.statusText || node_http_1.STATUS_CODES[this.status] || 'OK';
|
18
18
|
this.url = init?.url || '';
|
19
19
|
this.redirected = init?.redirected || false;
|
20
20
|
this.type = init?.type || 'default';
|
@@ -2,6 +2,7 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.PonyfillTextDecoder = exports.PonyfillTextEncoder = void 0;
|
4
4
|
exports.PonyfillBtoa = PonyfillBtoa;
|
5
|
+
const node_buffer_1 = require("node:buffer");
|
5
6
|
const utils_js_1 = require("./utils.js");
|
6
7
|
class PonyfillTextEncoder {
|
7
8
|
encoding;
|
@@ -9,7 +10,7 @@ class PonyfillTextEncoder {
|
|
9
10
|
this.encoding = encoding;
|
10
11
|
}
|
11
12
|
encode(input) {
|
12
|
-
return Buffer.from(input, this.encoding);
|
13
|
+
return node_buffer_1.Buffer.from(input, this.encoding);
|
13
14
|
}
|
14
15
|
encodeInto(source, destination) {
|
15
16
|
const buffer = this.encode(source);
|
@@ -33,16 +34,16 @@ class PonyfillTextDecoder {
|
|
33
34
|
}
|
34
35
|
}
|
35
36
|
decode(input) {
|
36
|
-
if (Buffer.isBuffer(input)) {
|
37
|
+
if (node_buffer_1.Buffer.isBuffer(input)) {
|
37
38
|
return input.toString(this.encoding);
|
38
39
|
}
|
39
40
|
if ((0, utils_js_1.isArrayBufferView)(input)) {
|
40
|
-
return Buffer.from(input.buffer, input.byteOffset, input.byteLength).toString(this.encoding);
|
41
|
+
return node_buffer_1.Buffer.from(input.buffer, input.byteOffset, input.byteLength).toString(this.encoding);
|
41
42
|
}
|
42
|
-
return Buffer.from(input).toString(this.encoding);
|
43
|
+
return node_buffer_1.Buffer.from(input).toString(this.encoding);
|
43
44
|
}
|
44
45
|
}
|
45
46
|
exports.PonyfillTextDecoder = PonyfillTextDecoder;
|
46
47
|
function PonyfillBtoa(input) {
|
47
|
-
return Buffer.from(input, 'binary').toString('base64');
|
48
|
+
return node_buffer_1.Buffer.from(input, 'binary').toString('base64');
|
48
49
|
}
|
package/cjs/URL.js
CHANGED
@@ -1,13 +1,14 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.PonyfillURL = void 0;
|
4
|
-
const
|
5
|
-
const
|
4
|
+
const tslib_1 = require("tslib");
|
5
|
+
const node_buffer_1 = tslib_1.__importDefault(require("node:buffer"));
|
6
|
+
const node_crypto_1 = require("node:crypto");
|
6
7
|
class PonyfillURL extends URL {
|
7
8
|
// This part is only needed to handle `PonyfillBlob` objects
|
8
9
|
static blobRegistry = new Map();
|
9
10
|
static createObjectURL(blob) {
|
10
|
-
const blobUrl = `blob:whatwgnode:${(0,
|
11
|
+
const blobUrl = `blob:whatwgnode:${(0, node_crypto_1.randomUUID)()}`;
|
11
12
|
this.blobRegistry.set(blobUrl, blob);
|
12
13
|
return blobUrl;
|
13
14
|
}
|
@@ -20,7 +21,7 @@ class PonyfillURL extends URL {
|
|
20
21
|
}
|
21
22
|
}
|
22
23
|
static getBlobFromURL(url) {
|
23
|
-
return (this.blobRegistry.get(url) ||
|
24
|
+
return (this.blobRegistry.get(url) || node_buffer_1.default?.resolveObjectURL?.(url));
|
24
25
|
}
|
25
26
|
}
|
26
27
|
exports.PonyfillURL = PonyfillURL;
|
package/cjs/WritableStream.js
CHANGED
@@ -1,16 +1,16 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.PonyfillWritableStream = void 0;
|
4
|
-
const
|
4
|
+
const node_stream_1 = require("node:stream");
|
5
5
|
const utils_js_1 = require("./utils.js");
|
6
6
|
class PonyfillWritableStream {
|
7
7
|
writable;
|
8
8
|
constructor(underlyingSink) {
|
9
|
-
if (underlyingSink instanceof
|
9
|
+
if (underlyingSink instanceof node_stream_1.Writable) {
|
10
10
|
this.writable = underlyingSink;
|
11
11
|
}
|
12
12
|
else if (underlyingSink) {
|
13
|
-
const writable = new
|
13
|
+
const writable = new node_stream_1.Writable({
|
14
14
|
write(chunk, _encoding, callback) {
|
15
15
|
try {
|
16
16
|
const result = underlyingSink.write?.(chunk, controller);
|
@@ -95,7 +95,7 @@ class PonyfillWritableStream {
|
|
95
95
|
});
|
96
96
|
}
|
97
97
|
else {
|
98
|
-
this.writable = new
|
98
|
+
this.writable = new node_stream_1.Writable();
|
99
99
|
}
|
100
100
|
}
|
101
101
|
getWriter() {
|
package/cjs/fetch.js
CHANGED
@@ -1,8 +1,9 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.fetchPonyfill = fetchPonyfill;
|
4
|
-
const
|
5
|
-
const
|
4
|
+
const node_buffer_1 = require("node:buffer");
|
5
|
+
const node_fs_1 = require("node:fs");
|
6
|
+
const node_url_1 = require("node:url");
|
6
7
|
const fetchCurl_js_1 = require("./fetchCurl.js");
|
7
8
|
const fetchNodeHttp_js_1 = require("./fetchNodeHttp.js");
|
8
9
|
const Request_js_1 = require("./Request.js");
|
@@ -11,15 +12,15 @@ const URL_js_1 = require("./URL.js");
|
|
11
12
|
const utils_js_1 = require("./utils.js");
|
12
13
|
const BASE64_SUFFIX = ';base64';
|
13
14
|
function getResponseForFile(url) {
|
14
|
-
const path = (0,
|
15
|
-
const readable = (0,
|
15
|
+
const path = (0, node_url_1.fileURLToPath)(url);
|
16
|
+
const readable = (0, node_fs_1.createReadStream)(path);
|
16
17
|
return new Response_js_1.PonyfillResponse(readable);
|
17
18
|
}
|
18
19
|
function getResponseForDataUri(url) {
|
19
20
|
const [mimeType = 'text/plain', ...datas] = url.substring(5).split(',');
|
20
21
|
const data = decodeURIComponent(datas.join(','));
|
21
22
|
if (mimeType.endsWith(BASE64_SUFFIX)) {
|
22
|
-
const buffer = Buffer.from(data, 'base64url');
|
23
|
+
const buffer = node_buffer_1.Buffer.from(data, 'base64url');
|
23
24
|
const realMimeType = mimeType.slice(0, -BASE64_SUFFIX.length);
|
24
25
|
return new Response_js_1.PonyfillResponse(buffer, {
|
25
26
|
status: 200,
|
package/cjs/fetchCurl.js
CHANGED
@@ -1,8 +1,9 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.fetchCurl = fetchCurl;
|
4
|
-
const
|
5
|
-
const
|
4
|
+
const node_stream_1 = require("node:stream");
|
5
|
+
const promises_1 = require("node:stream/promises");
|
6
|
+
const node_tls_1 = require("node:tls");
|
6
7
|
const Response_js_1 = require("./Response.js");
|
7
8
|
const utils_js_1 = require("./utils.js");
|
8
9
|
function fetchCurl(fetchRequest) {
|
@@ -17,7 +18,7 @@ function fetchCurl(fetchRequest) {
|
|
17
18
|
curlHandle.setOpt('CAINFO', process.env.NODE_EXTRA_CA_CERTS);
|
18
19
|
}
|
19
20
|
else {
|
20
|
-
curlHandle.setOpt('CAINFO_BLOB',
|
21
|
+
curlHandle.setOpt('CAINFO_BLOB', node_tls_1.rootCertificates.join('\n'));
|
21
22
|
}
|
22
23
|
curlHandle.enable(CurlFeature.StreamResponse);
|
23
24
|
curlHandle.setStreamProgressCallback(function () {
|
@@ -34,7 +35,7 @@ function fetchCurl(fetchRequest) {
|
|
34
35
|
const nodeReadable = (fetchRequest.body != null
|
35
36
|
? (0, utils_js_1.isNodeReadable)(fetchRequest.body)
|
36
37
|
? fetchRequest.body
|
37
|
-
:
|
38
|
+
: node_stream_1.Readable.from(fetchRequest.body)
|
38
39
|
: null);
|
39
40
|
if (nodeReadable) {
|
40
41
|
curlHandle.setOpt('UPLOAD', true);
|
@@ -100,9 +101,8 @@ function fetchCurl(fetchRequest) {
|
|
100
101
|
}
|
101
102
|
});
|
102
103
|
curlHandle.once('stream', function streamListener(stream, status, headersBuf) {
|
103
|
-
const outputStream = new
|
104
|
-
|
105
|
-
.pipeline(stream, outputStream, {
|
104
|
+
const outputStream = new node_stream_1.PassThrough();
|
105
|
+
(0, promises_1.pipeline)(stream, outputStream, {
|
106
106
|
end: true,
|
107
107
|
signal: fetchRequest['_signal'] ?? undefined,
|
108
108
|
})
|
package/cjs/fetchNodeHttp.js
CHANGED
@@ -1,20 +1,21 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.fetchNodeHttp = fetchNodeHttp;
|
4
|
-
const
|
5
|
-
const
|
6
|
-
const
|
7
|
-
const
|
4
|
+
const node_http_1 = require("node:http");
|
5
|
+
const node_https_1 = require("node:https");
|
6
|
+
const node_stream_1 = require("node:stream");
|
7
|
+
const promises_1 = require("node:stream/promises");
|
8
|
+
const node_zlib_1 = require("node:zlib");
|
8
9
|
const Request_js_1 = require("./Request.js");
|
9
10
|
const Response_js_1 = require("./Response.js");
|
10
11
|
const URL_js_1 = require("./URL.js");
|
11
12
|
const utils_js_1 = require("./utils.js");
|
12
13
|
function getRequestFnForProtocol(url) {
|
13
14
|
if (url.startsWith('http:')) {
|
14
|
-
return
|
15
|
+
return node_http_1.request;
|
15
16
|
}
|
16
17
|
else if (url.startsWith('https:')) {
|
17
|
-
return
|
18
|
+
return node_https_1.request;
|
18
19
|
}
|
19
20
|
throw new Error(`Unsupported protocol: ${url.split(':')[0] || url}`);
|
20
21
|
}
|
@@ -25,7 +26,7 @@ function fetchNodeHttp(fetchRequest) {
|
|
25
26
|
const nodeReadable = (fetchRequest.body != null
|
26
27
|
? (0, utils_js_1.isNodeReadable)(fetchRequest.body)
|
27
28
|
? fetchRequest.body
|
28
|
-
:
|
29
|
+
: node_stream_1.Readable.from(fetchRequest.body)
|
29
30
|
: null);
|
30
31
|
const headersSerializer = fetchRequest.headersSerializer || utils_js_1.getHeadersObj;
|
31
32
|
const nodeHeaders = headersSerializer(fetchRequest.headers);
|
@@ -56,21 +57,21 @@ function fetchNodeHttp(fetchRequest) {
|
|
56
57
|
switch (contentEncoding) {
|
57
58
|
case 'x-gzip':
|
58
59
|
case 'gzip':
|
59
|
-
outputStream = (0,
|
60
|
+
outputStream = (0, node_zlib_1.createGunzip)();
|
60
61
|
break;
|
61
62
|
case 'x-deflate':
|
62
63
|
case 'deflate':
|
63
|
-
outputStream = (0,
|
64
|
+
outputStream = (0, node_zlib_1.createInflate)();
|
64
65
|
break;
|
65
66
|
case 'x-deflate-raw':
|
66
67
|
case 'deflate-raw':
|
67
|
-
outputStream = (0,
|
68
|
+
outputStream = (0, node_zlib_1.createInflateRaw)();
|
68
69
|
break;
|
69
70
|
case 'br':
|
70
|
-
outputStream = (0,
|
71
|
+
outputStream = (0, node_zlib_1.createBrotliDecompress)();
|
71
72
|
break;
|
72
73
|
default:
|
73
|
-
outputStream = new
|
74
|
+
outputStream = new node_stream_1.PassThrough();
|
74
75
|
}
|
75
76
|
if (nodeResponse.headers.location) {
|
76
77
|
if (fetchRequest.redirect === 'error') {
|
@@ -90,8 +91,7 @@ function fetchNodeHttp(fetchRequest) {
|
|
90
91
|
return;
|
91
92
|
}
|
92
93
|
}
|
93
|
-
|
94
|
-
.pipeline(nodeResponse, outputStream, {
|
94
|
+
(0, promises_1.pipeline)(nodeResponse, outputStream, {
|
95
95
|
signal: fetchRequest['_signal'] ?? undefined,
|
96
96
|
end: true,
|
97
97
|
})
|
@@ -101,9 +101,14 @@ function fetchNodeHttp(fetchRequest) {
|
|
101
101
|
}
|
102
102
|
})
|
103
103
|
.catch(reject);
|
104
|
+
const statusCode = nodeResponse.statusCode || 200;
|
105
|
+
let statusText = nodeResponse.statusMessage || node_http_1.STATUS_CODES[statusCode];
|
106
|
+
if (statusText == null) {
|
107
|
+
statusText = '';
|
108
|
+
}
|
104
109
|
const ponyfillResponse = new Response_js_1.PonyfillResponse(outputStream, {
|
105
|
-
status:
|
106
|
-
statusText
|
110
|
+
status: statusCode,
|
111
|
+
statusText,
|
107
112
|
headers: nodeResponse.headers,
|
108
113
|
url: fetchRequest.url,
|
109
114
|
});
|
package/esm/Blob.js
CHANGED
@@ -1,4 +1,6 @@
|
|
1
1
|
/* eslint-disable @typescript-eslint/no-unsafe-declaration-merging */
|
2
|
+
/* eslint-disable @typescript-eslint/ban-ts-comment */
|
3
|
+
import { Buffer } from 'node:buffer';
|
2
4
|
import { PonyfillReadableStream } from './ReadableStream.js';
|
3
5
|
import { fakePromise, isArrayBufferView } from './utils.js';
|
4
6
|
function getBlobPartAsBuffer(blobPart) {
|
@@ -114,7 +116,7 @@ export class PonyfillBlob {
|
|
114
116
|
}
|
115
117
|
arrayBuffer() {
|
116
118
|
if (this._buffer) {
|
117
|
-
// @ts-
|
119
|
+
// @ts-ignore - Mismatch between Buffer and ArrayBuffer
|
118
120
|
return fakePromise(this._buffer);
|
119
121
|
}
|
120
122
|
if (this.blobParts.length === 1) {
|
@@ -125,7 +127,7 @@ export class PonyfillBlob {
|
|
125
127
|
return this.blobParts[0].arrayBuffer();
|
126
128
|
}
|
127
129
|
}
|
128
|
-
// @ts-
|
130
|
+
// @ts-ignore - Mismatch between Buffer and ArrayBuffer
|
129
131
|
return this.buffer();
|
130
132
|
}
|
131
133
|
bytes() {
|
package/esm/Body.js
CHANGED
@@ -1,4 +1,6 @@
|
|
1
|
-
|
1
|
+
/* eslint-disable @typescript-eslint/ban-ts-comment */
|
2
|
+
import { Buffer } from 'node:buffer';
|
3
|
+
import { Readable } from 'node:stream';
|
2
4
|
import busboy from 'busboy';
|
3
5
|
import { hasArrayBufferMethod, hasBufferMethod, hasBytesMethod, PonyfillBlob } from './Blob.js';
|
4
6
|
import { PonyfillFile } from './File.js';
|
@@ -223,6 +225,7 @@ export class PonyfillBody {
|
|
223
225
|
});
|
224
226
|
bb.on('error', (err = 'An error occurred while parsing the form data') => {
|
225
227
|
const errMessage = err.message || err.toString();
|
228
|
+
// @ts-ignore - `cause` is in `TypeError`in node
|
226
229
|
reject(new TypeError(errMessage, err.cause));
|
227
230
|
});
|
228
231
|
_body?.readable.pipe(bb);
|
@@ -265,7 +268,7 @@ export class PonyfillBody {
|
|
265
268
|
return this.buffer();
|
266
269
|
}
|
267
270
|
arrayBuffer() {
|
268
|
-
// @ts-
|
271
|
+
// @ts-ignore - Mismatch between Buffer and ArrayBuffer
|
269
272
|
return this.buffer();
|
270
273
|
}
|
271
274
|
_json = null;
|
package/esm/FormData.js
CHANGED
package/esm/ReadableStream.js
CHANGED
package/esm/Request.js
CHANGED
@@ -1,5 +1,5 @@
|
|
1
|
-
import { Agent as HTTPAgent } from 'http';
|
2
|
-
import { Agent as HTTPSAgent } from 'https';
|
1
|
+
import { Agent as HTTPAgent } from 'node:http';
|
2
|
+
import { Agent as HTTPSAgent } from 'node:https';
|
3
3
|
import { PonyfillBody } from './Body.js';
|
4
4
|
import { isHeadersLike, PonyfillHeaders } from './Headers.js';
|
5
5
|
import { PonyfillURL } from './URL.js';
|
package/esm/Response.js
CHANGED
package/esm/URL.js
CHANGED
@@ -1,5 +1,5 @@
|
|
1
|
-
import
|
2
|
-
import { randomUUID } from 'crypto';
|
1
|
+
import NodeBuffer from 'node:buffer';
|
2
|
+
import { randomUUID } from 'node:crypto';
|
3
3
|
export class PonyfillURL extends URL {
|
4
4
|
// This part is only needed to handle `PonyfillBlob` objects
|
5
5
|
static blobRegistry = new Map();
|
@@ -17,6 +17,6 @@ export class PonyfillURL extends URL {
|
|
17
17
|
}
|
18
18
|
}
|
19
19
|
static getBlobFromURL(url) {
|
20
|
-
return (this.blobRegistry.get(url) || resolveObjectURL(url));
|
20
|
+
return (this.blobRegistry.get(url) || NodeBuffer?.resolveObjectURL?.(url));
|
21
21
|
}
|
22
22
|
}
|
package/esm/WritableStream.js
CHANGED
package/esm/fetch.js
CHANGED
@@ -1,5 +1,6 @@
|
|
1
|
-
import {
|
2
|
-
import {
|
1
|
+
import { Buffer } from 'node:buffer';
|
2
|
+
import { createReadStream } from 'node:fs';
|
3
|
+
import { fileURLToPath } from 'node:url';
|
3
4
|
import { fetchCurl } from './fetchCurl.js';
|
4
5
|
import { fetchNodeHttp } from './fetchNodeHttp.js';
|
5
6
|
import { PonyfillRequest } from './Request.js';
|
package/esm/fetchCurl.js
CHANGED
@@ -1,5 +1,6 @@
|
|
1
|
-
import { PassThrough, Readable
|
2
|
-
import {
|
1
|
+
import { PassThrough, Readable } from 'node:stream';
|
2
|
+
import { pipeline } from 'node:stream/promises';
|
3
|
+
import { rootCertificates } from 'node:tls';
|
3
4
|
import { PonyfillResponse } from './Response.js';
|
4
5
|
import { createDeferredPromise, defaultHeadersSerializer, isNodeReadable } from './utils.js';
|
5
6
|
export function fetchCurl(fetchRequest) {
|
@@ -98,8 +99,7 @@ export function fetchCurl(fetchRequest) {
|
|
98
99
|
});
|
99
100
|
curlHandle.once('stream', function streamListener(stream, status, headersBuf) {
|
100
101
|
const outputStream = new PassThrough();
|
101
|
-
|
102
|
-
.pipeline(stream, outputStream, {
|
102
|
+
pipeline(stream, outputStream, {
|
103
103
|
end: true,
|
104
104
|
signal: fetchRequest['_signal'] ?? undefined,
|
105
105
|
})
|
package/esm/fetchNodeHttp.js
CHANGED
@@ -1,7 +1,8 @@
|
|
1
|
-
import { request as httpRequest } from 'http';
|
2
|
-
import { request as httpsRequest } from 'https';
|
3
|
-
import { PassThrough, Readable
|
4
|
-
import {
|
1
|
+
import { request as httpRequest, STATUS_CODES } from 'node:http';
|
2
|
+
import { request as httpsRequest } from 'node:https';
|
3
|
+
import { PassThrough, Readable } from 'node:stream';
|
4
|
+
import { pipeline } from 'node:stream/promises';
|
5
|
+
import { createBrotliDecompress, createGunzip, createInflate, createInflateRaw } from 'node:zlib';
|
5
6
|
import { PonyfillRequest } from './Request.js';
|
6
7
|
import { PonyfillResponse } from './Response.js';
|
7
8
|
import { PonyfillURL } from './URL.js';
|
@@ -87,8 +88,7 @@ export function fetchNodeHttp(fetchRequest) {
|
|
87
88
|
return;
|
88
89
|
}
|
89
90
|
}
|
90
|
-
|
91
|
-
.pipeline(nodeResponse, outputStream, {
|
91
|
+
pipeline(nodeResponse, outputStream, {
|
92
92
|
signal: fetchRequest['_signal'] ?? undefined,
|
93
93
|
end: true,
|
94
94
|
})
|
@@ -98,9 +98,14 @@ export function fetchNodeHttp(fetchRequest) {
|
|
98
98
|
}
|
99
99
|
})
|
100
100
|
.catch(reject);
|
101
|
+
const statusCode = nodeResponse.statusCode || 200;
|
102
|
+
let statusText = nodeResponse.statusMessage || STATUS_CODES[statusCode];
|
103
|
+
if (statusText == null) {
|
104
|
+
statusText = '';
|
105
|
+
}
|
101
106
|
const ponyfillResponse = new PonyfillResponse(outputStream, {
|
102
|
-
status:
|
103
|
-
statusText
|
107
|
+
status: statusCode,
|
108
|
+
statusText,
|
104
109
|
headers: nodeResponse.headers,
|
105
110
|
url: fetchRequest.url,
|
106
111
|
});
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@whatwg-node/node-fetch",
|
3
|
-
"version": "0.7.6-alpha-
|
3
|
+
"version": "0.7.6-alpha-20250106172915-e3e79162475cede19c6fd63458cb73e68a450518",
|
4
4
|
"description": "Fetch API implementation for Node",
|
5
5
|
"sideEffects": false,
|
6
6
|
"dependencies": {
|
package/typings/Blob.d.cts
CHANGED
package/typings/Blob.d.ts
CHANGED
package/typings/Body.d.cts
CHANGED
@@ -1,4 +1,5 @@
|
|
1
|
-
import {
|
1
|
+
import { Buffer } from 'node:buffer';
|
2
|
+
import { Readable } from 'node:stream';
|
2
3
|
import { PonyfillBlob } from './Blob.cjs';
|
3
4
|
import { PonyfillFormData } from './FormData.cjs';
|
4
5
|
import { PonyfillReadableStream } from './ReadableStream.cjs';
|
package/typings/Body.d.ts
CHANGED
@@ -1,4 +1,5 @@
|
|
1
|
-
import {
|
1
|
+
import { Buffer } from 'node:buffer';
|
2
|
+
import { Readable } from 'node:stream';
|
2
3
|
import { PonyfillBlob } from './Blob.js';
|
3
4
|
import { PonyfillFormData } from './FormData.js';
|
4
5
|
import { PonyfillReadableStream } from './ReadableStream.js';
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import { Readable } from 'stream';
|
1
|
+
import { Readable } from 'node:stream';
|
2
2
|
export declare class PonyfillReadableStream<T> implements ReadableStream<T> {
|
3
3
|
readable: Readable;
|
4
4
|
constructor(underlyingSource?: UnderlyingSource<T> | Readable | ReadableStream<T> | PonyfillReadableStream<T>);
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import { Readable } from 'stream';
|
1
|
+
import { Readable } from 'node:stream';
|
2
2
|
export declare class PonyfillReadableStream<T> implements ReadableStream<T> {
|
3
3
|
readable: Readable;
|
4
4
|
constructor(underlyingSource?: UnderlyingSource<T> | Readable | ReadableStream<T> | PonyfillReadableStream<T>);
|
package/typings/Request.d.cts
CHANGED
@@ -1,18 +1,18 @@
|
|
1
|
-
import { Agent as HTTPAgent } from 'http';
|
2
|
-
import { Agent as HTTPSAgent } from 'https';
|
1
|
+
import { Agent as HTTPAgent } from 'node:http';
|
2
|
+
import { Agent as HTTPSAgent } from 'node:https';
|
3
3
|
import { BodyPonyfillInit, PonyfillBody, PonyfillBodyOptions } from './Body.cjs';
|
4
4
|
import { PonyfillHeadersInit } from './Headers.cjs';
|
5
5
|
export type RequestPonyfillInit = PonyfillBodyOptions & Omit<RequestInit, 'body' | 'headers'> & {
|
6
|
-
body?: BodyPonyfillInit | null;
|
7
|
-
duplex?: 'half' | 'full';
|
8
|
-
headers?: PonyfillHeadersInit;
|
9
|
-
headersSerializer?: HeadersSerializer;
|
10
|
-
agent?: HTTPAgent | HTTPSAgent | false;
|
6
|
+
body?: BodyPonyfillInit | null | undefined;
|
7
|
+
duplex?: 'half' | 'full' | undefined;
|
8
|
+
headers?: PonyfillHeadersInit | undefined;
|
9
|
+
headersSerializer?: HeadersSerializer | undefined;
|
10
|
+
agent?: HTTPAgent | HTTPSAgent | false | undefined;
|
11
11
|
};
|
12
12
|
type HeadersSerializer = (headers: Headers, onContentLength?: (contentLength: string) => void) => string[];
|
13
13
|
export declare class PonyfillRequest<TJSON = any> extends PonyfillBody<TJSON> implements Request {
|
14
14
|
constructor(input: RequestInfo | URL, options?: RequestPonyfillInit);
|
15
|
-
headersSerializer?: HeadersSerializer;
|
15
|
+
headersSerializer?: HeadersSerializer | undefined;
|
16
16
|
cache: RequestCache;
|
17
17
|
credentials: RequestCredentials;
|
18
18
|
destination: RequestDestination;
|
package/typings/Request.d.ts
CHANGED
@@ -1,18 +1,18 @@
|
|
1
|
-
import { Agent as HTTPAgent } from 'http';
|
2
|
-
import { Agent as HTTPSAgent } from 'https';
|
1
|
+
import { Agent as HTTPAgent } from 'node:http';
|
2
|
+
import { Agent as HTTPSAgent } from 'node:https';
|
3
3
|
import { BodyPonyfillInit, PonyfillBody, PonyfillBodyOptions } from './Body.js';
|
4
4
|
import { PonyfillHeadersInit } from './Headers.js';
|
5
5
|
export type RequestPonyfillInit = PonyfillBodyOptions & Omit<RequestInit, 'body' | 'headers'> & {
|
6
|
-
body?: BodyPonyfillInit | null;
|
7
|
-
duplex?: 'half' | 'full';
|
8
|
-
headers?: PonyfillHeadersInit;
|
9
|
-
headersSerializer?: HeadersSerializer;
|
10
|
-
agent?: HTTPAgent | HTTPSAgent | false;
|
6
|
+
body?: BodyPonyfillInit | null | undefined;
|
7
|
+
duplex?: 'half' | 'full' | undefined;
|
8
|
+
headers?: PonyfillHeadersInit | undefined;
|
9
|
+
headersSerializer?: HeadersSerializer | undefined;
|
10
|
+
agent?: HTTPAgent | HTTPSAgent | false | undefined;
|
11
11
|
};
|
12
12
|
type HeadersSerializer = (headers: Headers, onContentLength?: (contentLength: string) => void) => string[];
|
13
13
|
export declare class PonyfillRequest<TJSON = any> extends PonyfillBody<TJSON> implements Request {
|
14
14
|
constructor(input: RequestInfo | URL, options?: RequestPonyfillInit);
|
15
|
-
headersSerializer?: HeadersSerializer;
|
15
|
+
headersSerializer?: HeadersSerializer | undefined;
|
16
16
|
cache: RequestCache;
|
17
17
|
credentials: RequestCredentials;
|
18
18
|
destination: RequestDestination;
|
package/typings/Response.d.cts
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
import { BodyPonyfillInit, PonyfillBody, PonyfillBodyOptions } from './Body.cjs';
|
2
2
|
import { PonyfillHeadersInit } from './Headers.cjs';
|
3
3
|
export type ResponsePonyfilInit = PonyfillBodyOptions & Omit<ResponseInit, 'headers'> & {
|
4
|
-
url?: string;
|
5
|
-
redirected?: boolean;
|
6
|
-
headers?: PonyfillHeadersInit;
|
7
|
-
type?: ResponseType;
|
4
|
+
url?: string | undefined;
|
5
|
+
redirected?: boolean | undefined;
|
6
|
+
headers?: PonyfillHeadersInit | undefined;
|
7
|
+
type?: ResponseType | undefined;
|
8
8
|
};
|
9
9
|
export declare class PonyfillResponse<TJSON = any> extends PonyfillBody<TJSON> implements Response {
|
10
10
|
headers: Headers;
|
11
|
-
constructor(body?: BodyPonyfillInit | null, init?: ResponsePonyfilInit);
|
11
|
+
constructor(body?: BodyPonyfillInit | null | undefined, init?: ResponsePonyfilInit);
|
12
12
|
get ok(): boolean;
|
13
13
|
status: number;
|
14
14
|
statusText: string;
|
package/typings/Response.d.ts
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
import { BodyPonyfillInit, PonyfillBody, PonyfillBodyOptions } from './Body.js';
|
2
2
|
import { PonyfillHeadersInit } from './Headers.js';
|
3
3
|
export type ResponsePonyfilInit = PonyfillBodyOptions & Omit<ResponseInit, 'headers'> & {
|
4
|
-
url?: string;
|
5
|
-
redirected?: boolean;
|
6
|
-
headers?: PonyfillHeadersInit;
|
7
|
-
type?: ResponseType;
|
4
|
+
url?: string | undefined;
|
5
|
+
redirected?: boolean | undefined;
|
6
|
+
headers?: PonyfillHeadersInit | undefined;
|
7
|
+
type?: ResponseType | undefined;
|
8
8
|
};
|
9
9
|
export declare class PonyfillResponse<TJSON = any> extends PonyfillBody<TJSON> implements Response {
|
10
10
|
headers: Headers;
|
11
|
-
constructor(body?: BodyPonyfillInit | null, init?: ResponsePonyfilInit);
|
11
|
+
constructor(body?: BodyPonyfillInit | null | undefined, init?: ResponsePonyfilInit);
|
12
12
|
get ok(): boolean;
|
13
13
|
status: number;
|
14
14
|
statusText: string;
|
package/typings/utils.d.cts
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
import { Readable } from 'stream';
|
1
|
+
import { Readable } from 'node:stream';
|
2
2
|
export declare function getHeadersObj(headers: Headers): Record<string, string>;
|
3
3
|
export declare function defaultHeadersSerializer(headers: Headers, onContentLength?: (value: string) => void): string[];
|
4
4
|
export declare function fakePromise<T>(value: T): Promise<T>;
|
package/typings/utils.d.ts
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
import { Readable } from 'stream';
|
1
|
+
import { Readable } from 'node:stream';
|
2
2
|
export declare function getHeadersObj(headers: Headers): Record<string, string>;
|
3
3
|
export declare function defaultHeadersSerializer(headers: Headers, onContentLength?: (value: string) => void): string[];
|
4
4
|
export declare function fakePromise<T>(value: T): Promise<T>;
|