@npy/fetch 0.1.3 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/_internal/consts.cjs +4 -0
- package/_internal/consts.d.cts +3 -0
- package/_internal/consts.d.ts +3 -0
- package/_internal/consts.js +4 -0
- package/_internal/decode-stream-error.cjs +18 -0
- package/{src/_internal/decode-stream-error.ts → _internal/decode-stream-error.d.cts} +2 -7
- package/_internal/decode-stream-error.d.ts +11 -0
- package/_internal/decode-stream-error.js +18 -0
- package/_internal/error-mapping.cjs +44 -0
- package/_internal/error-mapping.d.cts +15 -0
- package/_internal/error-mapping.d.ts +15 -0
- package/_internal/error-mapping.js +41 -0
- package/_internal/guards.cjs +23 -0
- package/_internal/guards.d.cts +15 -0
- package/_internal/guards.d.ts +15 -0
- package/_internal/guards.js +15 -0
- package/_internal/net.cjs +95 -0
- package/_internal/net.d.cts +11 -0
- package/_internal/net.d.ts +11 -0
- package/_internal/net.js +92 -0
- package/_internal/promises.cjs +18 -0
- package/_internal/promises.d.cts +1 -0
- package/_internal/promises.d.ts +1 -0
- package/_internal/promises.js +18 -0
- package/_internal/streams.cjs +37 -0
- package/_internal/streams.d.cts +21 -0
- package/_internal/streams.d.ts +21 -0
- package/_internal/streams.js +36 -0
- package/_internal/symbols.cjs +4 -0
- package/_internal/symbols.d.cts +1 -0
- package/_internal/symbols.d.ts +1 -0
- package/_internal/symbols.js +4 -0
- package/_virtual/_rolldown/runtime.cjs +23 -0
- package/agent-pool.cjs +96 -0
- package/agent-pool.d.cts +2 -0
- package/agent-pool.d.ts +2 -0
- package/agent-pool.js +95 -0
- package/agent.cjs +260 -0
- package/agent.d.cts +3 -0
- package/agent.d.ts +3 -0
- package/agent.js +259 -0
- package/body.cjs +105 -0
- package/body.d.cts +12 -0
- package/body.d.ts +12 -0
- package/body.js +102 -0
- package/dialers/index.d.cts +3 -0
- package/dialers/index.d.ts +3 -0
- package/dialers/proxy.cjs +56 -0
- package/dialers/proxy.d.cts +27 -0
- package/dialers/proxy.d.ts +27 -0
- package/dialers/proxy.js +55 -0
- package/dialers/tcp.cjs +92 -0
- package/dialers/tcp.d.cts +57 -0
- package/dialers/tcp.d.ts +57 -0
- package/dialers/tcp.js +89 -0
- package/encoding.cjs +114 -0
- package/encoding.d.cts +35 -0
- package/encoding.d.ts +35 -0
- package/encoding.js +110 -0
- package/errors.cjs +275 -0
- package/errors.d.cts +110 -0
- package/errors.d.ts +110 -0
- package/errors.js +259 -0
- package/fetch.cjs +353 -0
- package/fetch.d.cts +58 -0
- package/fetch.d.ts +58 -0
- package/fetch.js +350 -0
- package/http-client.cjs +75 -0
- package/http-client.d.cts +39 -0
- package/http-client.d.ts +39 -0
- package/http-client.js +75 -0
- package/index.cjs +49 -0
- package/index.d.cts +14 -0
- package/index.d.ts +14 -0
- package/index.js +11 -0
- package/io/_utils.cjs +56 -0
- package/io/_utils.d.cts +10 -0
- package/io/_utils.d.ts +10 -0
- package/io/_utils.js +51 -0
- package/io/buf-writer.cjs +149 -0
- package/io/buf-writer.d.cts +13 -0
- package/io/buf-writer.d.ts +13 -0
- package/io/buf-writer.js +148 -0
- package/io/io.cjs +199 -0
- package/io/io.d.cts +5 -0
- package/io/io.d.ts +5 -0
- package/io/io.js +198 -0
- package/io/readers.cjs +337 -0
- package/io/readers.d.cts +69 -0
- package/io/readers.d.ts +69 -0
- package/io/readers.js +333 -0
- package/io/writers.cjs +196 -0
- package/io/writers.d.cts +22 -0
- package/io/writers.d.ts +22 -0
- package/io/writers.js +195 -0
- package/package.json +30 -25
- package/{src/types/agent.ts → types/agent.d.cts} +21 -47
- package/types/agent.d.ts +72 -0
- package/{src/types/dialer.ts → types/dialer.d.cts} +9 -19
- package/types/dialer.d.ts +30 -0
- package/types/index.d.cts +2 -0
- package/types/index.d.ts +2 -0
- package/bun.lock +0 -68
- package/examples/custom-proxy-client.ts +0 -32
- package/examples/http-client.ts +0 -47
- package/examples/proxy.ts +0 -16
- package/examples/simple.ts +0 -15
- package/src/_internal/consts.ts +0 -3
- package/src/_internal/error-mapping.ts +0 -160
- package/src/_internal/guards.ts +0 -78
- package/src/_internal/net.ts +0 -173
- package/src/_internal/promises.ts +0 -22
- package/src/_internal/streams.ts +0 -52
- package/src/_internal/symbols.ts +0 -1
- package/src/agent-pool.ts +0 -157
- package/src/agent.ts +0 -408
- package/src/body.ts +0 -179
- package/src/dialers/index.ts +0 -3
- package/src/dialers/proxy.ts +0 -102
- package/src/dialers/tcp.ts +0 -162
- package/src/encoding.ts +0 -222
- package/src/errors.ts +0 -357
- package/src/fetch.ts +0 -626
- package/src/http-client.ts +0 -111
- package/src/index.ts +0 -14
- package/src/io/_utils.ts +0 -82
- package/src/io/buf-writer.ts +0 -183
- package/src/io/io.ts +0 -322
- package/src/io/readers.ts +0 -576
- package/src/io/writers.ts +0 -331
- package/src/types/index.ts +0 -2
- package/tests/agent-pool.test.ts +0 -111
- package/tests/agent.test.ts +0 -134
- package/tests/body.test.ts +0 -228
- package/tests/errors.test.ts +0 -152
- package/tests/fetch.test.ts +0 -421
- package/tests/io-options.test.ts +0 -127
- package/tests/multipart.test.ts +0 -348
- package/tests/test-utils.ts +0 -335
- package/tsconfig.json +0 -15
package/io/readers.js
ADDED
|
@@ -0,0 +1,333 @@
|
|
|
1
|
+
import { parseMaxBytes } from "./_utils.js";
|
|
2
|
+
import { CRLF_BYTES } from "../_internal/consts.js";
|
|
3
|
+
import { Bytes, DelimiterCodec, read } from "@fuman/io";
|
|
4
|
+
import { ConnectionClosedError } from "@fuman/net";
|
|
5
|
+
//#region src/io/readers.ts
|
|
6
|
+
var invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/g;
|
|
7
|
+
function sanitizeHeaderValue(v) {
|
|
8
|
+
return v.replace(invalidHeaderCharRegex, (m) => encodeURI(m));
|
|
9
|
+
}
|
|
10
|
+
var LineReader = class {
|
|
11
|
+
#src;
|
|
12
|
+
#buf;
|
|
13
|
+
#codec = new DelimiterCodec(CRLF_BYTES, { strategy: "discard" });
|
|
14
|
+
#eof = false;
|
|
15
|
+
#readChunkSize;
|
|
16
|
+
#maxBufferedBytes;
|
|
17
|
+
#maxLineSize;
|
|
18
|
+
#closed = false;
|
|
19
|
+
close;
|
|
20
|
+
static Options;
|
|
21
|
+
constructor(src, opts = {}) {
|
|
22
|
+
this.#src = src;
|
|
23
|
+
this.#buf = Bytes.alloc(opts.bufferSize);
|
|
24
|
+
this.#readChunkSize = opts.readChunkSize ?? 16 * 1024;
|
|
25
|
+
this.#maxBufferedBytes = opts.maxBufferedBytes ?? 256 * 1024;
|
|
26
|
+
this.#maxLineSize = opts.maxLineSize ?? 64 * 1024;
|
|
27
|
+
this.close = this.#close.bind(this);
|
|
28
|
+
}
|
|
29
|
+
async read(into) {
|
|
30
|
+
if (this.#closed) return 0;
|
|
31
|
+
if (this.#buf.available > 0) {
|
|
32
|
+
const n = Math.min(into.length, this.#buf.available);
|
|
33
|
+
into.set(this.#buf.readSync(n));
|
|
34
|
+
this.#buf.reclaim();
|
|
35
|
+
return n;
|
|
36
|
+
}
|
|
37
|
+
if (this.#eof) return 0;
|
|
38
|
+
try {
|
|
39
|
+
return await this.#src.read(into);
|
|
40
|
+
} catch (e) {
|
|
41
|
+
if (e instanceof ConnectionClosedError) {
|
|
42
|
+
this.#eof = true;
|
|
43
|
+
return 0;
|
|
44
|
+
}
|
|
45
|
+
throw e;
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
async readLine() {
|
|
49
|
+
if (this.#closed) return null;
|
|
50
|
+
for (;;) {
|
|
51
|
+
const frame = this.#codec.decode(this.#buf, this.#eof);
|
|
52
|
+
if (frame !== null) {
|
|
53
|
+
this.#buf.reclaim();
|
|
54
|
+
return read.rawString(frame, frame.length);
|
|
55
|
+
}
|
|
56
|
+
if (this.#eof) return null;
|
|
57
|
+
if (this.#buf.available > this.#maxLineSize) throw new Error(`line too large (> ${this.#maxLineSize} bytes)`);
|
|
58
|
+
if (this.#buf.available > this.#maxBufferedBytes) throw new Error(`buffer too large while searching for delimiter (> ${this.#maxBufferedBytes} bytes)`);
|
|
59
|
+
await this.#pull();
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
async readHeaders(opts = {}) {
|
|
63
|
+
const maxHeaderSize = opts.maxHeaderSize ?? 64 * 1024;
|
|
64
|
+
const acc = /* @__PURE__ */ new Map();
|
|
65
|
+
const validator = new Headers();
|
|
66
|
+
let lastKey = null;
|
|
67
|
+
let firstLine = true;
|
|
68
|
+
let consumed = 0;
|
|
69
|
+
for (;;) {
|
|
70
|
+
const line = await this.readLine();
|
|
71
|
+
if (line === null) throw new Error("Unexpected EOF while reading HTTP headers");
|
|
72
|
+
consumed += line.length + 2;
|
|
73
|
+
if (consumed > maxHeaderSize) throw new Error(`HTTP headers too large (> ${maxHeaderSize} bytes)`);
|
|
74
|
+
if (line === "") break;
|
|
75
|
+
if (firstLine && (line[0] === " " || line[0] === " ")) throw new Error(`malformed HTTP header initial line: ${line}`);
|
|
76
|
+
firstLine = false;
|
|
77
|
+
if (line[0] === " " || line[0] === " ") {
|
|
78
|
+
if (!lastKey) throw new Error(`malformed HTTP header continuation line: ${line}`);
|
|
79
|
+
const arr = acc.get(lastKey);
|
|
80
|
+
if (!arr || arr.length === 0) throw new Error(`malformed HTTP header continuation line: ${line}`);
|
|
81
|
+
const piece = sanitizeHeaderValue(line.trim());
|
|
82
|
+
arr[arr.length - 1] = `${arr[arr.length - 1]} ${piece}`.trim();
|
|
83
|
+
continue;
|
|
84
|
+
}
|
|
85
|
+
const idx = line.indexOf(":");
|
|
86
|
+
if (idx === -1) throw new Error(`malformed HTTP header line: ${line}`);
|
|
87
|
+
const rawName = line.slice(0, idx).trim();
|
|
88
|
+
if (rawName === "") {
|
|
89
|
+
lastKey = null;
|
|
90
|
+
continue;
|
|
91
|
+
}
|
|
92
|
+
const name = rawName.toLowerCase();
|
|
93
|
+
const value = sanitizeHeaderValue(line.slice(idx + 1).trim());
|
|
94
|
+
try {
|
|
95
|
+
validator.append(name, value);
|
|
96
|
+
} catch {
|
|
97
|
+
lastKey = null;
|
|
98
|
+
continue;
|
|
99
|
+
}
|
|
100
|
+
const arr = acc.get(name);
|
|
101
|
+
if (arr) arr.push(value);
|
|
102
|
+
else acc.set(name, [value]);
|
|
103
|
+
lastKey = name;
|
|
104
|
+
}
|
|
105
|
+
const headers = new Headers();
|
|
106
|
+
for (const [k, values] of acc) for (const v of values) try {
|
|
107
|
+
headers.append(k, v);
|
|
108
|
+
} catch {}
|
|
109
|
+
return headers;
|
|
110
|
+
}
|
|
111
|
+
async #pull() {
|
|
112
|
+
const into = this.#buf.writeSync(this.#readChunkSize);
|
|
113
|
+
try {
|
|
114
|
+
const n = await this.#src.read(into);
|
|
115
|
+
this.#buf.disposeWriteSync(n);
|
|
116
|
+
if (n === 0) this.#eof = true;
|
|
117
|
+
} catch (e) {
|
|
118
|
+
this.#buf.disposeWriteSync(0);
|
|
119
|
+
if (e instanceof ConnectionClosedError) {
|
|
120
|
+
this.#eof = true;
|
|
121
|
+
return;
|
|
122
|
+
}
|
|
123
|
+
throw e;
|
|
124
|
+
} finally {
|
|
125
|
+
this.#buf.reclaim();
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
async #close() {
|
|
129
|
+
if (this.#closed) return;
|
|
130
|
+
this.#closed = true;
|
|
131
|
+
await this.#src.close();
|
|
132
|
+
}
|
|
133
|
+
};
|
|
134
|
+
var BodyReader = class {
|
|
135
|
+
#src;
|
|
136
|
+
#remaining;
|
|
137
|
+
#maxResponseSize;
|
|
138
|
+
#readSoFar = 0;
|
|
139
|
+
#closed = false;
|
|
140
|
+
close;
|
|
141
|
+
static Options;
|
|
142
|
+
constructor(src, contentLength, opts = {}) {
|
|
143
|
+
this.#src = src;
|
|
144
|
+
this.#remaining = contentLength;
|
|
145
|
+
this.#maxResponseSize = parseMaxBytes(opts.maxBodySize);
|
|
146
|
+
this.close = this.#close.bind(this);
|
|
147
|
+
}
|
|
148
|
+
async read(into) {
|
|
149
|
+
if (this.#closed) return 0;
|
|
150
|
+
if (this.#remaining === 0) return 0;
|
|
151
|
+
if (this.#maxResponseSize != null) {
|
|
152
|
+
const remainingLimit = this.#maxResponseSize - this.#readSoFar;
|
|
153
|
+
if (remainingLimit <= 0) throw new Error(`body too large (> ${this.#maxResponseSize} bytes)`);
|
|
154
|
+
if (into.length > remainingLimit) into = into.subarray(0, remainingLimit);
|
|
155
|
+
}
|
|
156
|
+
let max = into.length;
|
|
157
|
+
if (this.#remaining != null) max = Math.min(max, this.#remaining);
|
|
158
|
+
if (max === 0) return 0;
|
|
159
|
+
const view = max === into.length ? into : into.subarray(0, max);
|
|
160
|
+
let n = 0;
|
|
161
|
+
try {
|
|
162
|
+
n = await this.#src.read(view);
|
|
163
|
+
} catch (e) {
|
|
164
|
+
if (e instanceof ConnectionClosedError) n = 0;
|
|
165
|
+
else throw e;
|
|
166
|
+
}
|
|
167
|
+
if (n === 0) {
|
|
168
|
+
if (this.#remaining != null) throw new Error("Unexpected EOF while reading fixed-length body");
|
|
169
|
+
return 0;
|
|
170
|
+
}
|
|
171
|
+
this.#readSoFar += n;
|
|
172
|
+
if (this.#remaining != null) this.#remaining -= n;
|
|
173
|
+
return n;
|
|
174
|
+
}
|
|
175
|
+
async #close() {
|
|
176
|
+
if (this.#closed) return;
|
|
177
|
+
this.#closed = true;
|
|
178
|
+
await this.#src.close();
|
|
179
|
+
}
|
|
180
|
+
};
|
|
181
|
+
var ChunkedBodyReader = class {
|
|
182
|
+
#src;
|
|
183
|
+
#buf;
|
|
184
|
+
#codec = new DelimiterCodec(CRLF_BYTES, { strategy: "discard" });
|
|
185
|
+
#readChunkSize;
|
|
186
|
+
#maxLineSize;
|
|
187
|
+
#maxChunkSize;
|
|
188
|
+
#maxResponseSize;
|
|
189
|
+
#readSoFar = 0;
|
|
190
|
+
#eof = false;
|
|
191
|
+
#closed = false;
|
|
192
|
+
#state = { kind: "size" };
|
|
193
|
+
close;
|
|
194
|
+
static Options;
|
|
195
|
+
constructor(src, opts = {}) {
|
|
196
|
+
this.#src = src;
|
|
197
|
+
this.#buf = Bytes.alloc(opts.bufferSize);
|
|
198
|
+
this.#readChunkSize = opts.readChunkSize ?? 16 * 1024;
|
|
199
|
+
this.#maxLineSize = opts.maxLineSize ?? 64 * 1024;
|
|
200
|
+
this.#maxChunkSize = opts.maxChunkSize ?? 16 * 1024 * 1024;
|
|
201
|
+
this.#maxResponseSize = parseMaxBytes(opts.maxBodySize);
|
|
202
|
+
this.close = this.#close.bind(this);
|
|
203
|
+
}
|
|
204
|
+
async read(into) {
|
|
205
|
+
if (this.#closed) return 0;
|
|
206
|
+
for (;;) {
|
|
207
|
+
if (this.#state.kind === "done") return 0;
|
|
208
|
+
let view = into;
|
|
209
|
+
if (this.#maxResponseSize != null) {
|
|
210
|
+
const remainingLimit = this.#maxResponseSize - this.#readSoFar;
|
|
211
|
+
if (remainingLimit <= 0) throw new Error(`body too large (> ${this.#maxResponseSize} bytes)`);
|
|
212
|
+
if (view.length > remainingLimit) view = view.subarray(0, remainingLimit);
|
|
213
|
+
}
|
|
214
|
+
if (view.length === 0) return 0;
|
|
215
|
+
if (this.#state.kind === "data") {
|
|
216
|
+
if (this.#state.remaining === 0) {
|
|
217
|
+
this.#state = { kind: "crlf" };
|
|
218
|
+
continue;
|
|
219
|
+
}
|
|
220
|
+
if (this.#buf.available > 0) {
|
|
221
|
+
const n = Math.min(view.length, this.#state.remaining, this.#buf.available);
|
|
222
|
+
view.set(this.#buf.readSync(n));
|
|
223
|
+
this.#buf.reclaim();
|
|
224
|
+
this.#readSoFar += n;
|
|
225
|
+
this.#state = {
|
|
226
|
+
kind: "data",
|
|
227
|
+
remaining: this.#state.remaining - n
|
|
228
|
+
};
|
|
229
|
+
return n;
|
|
230
|
+
}
|
|
231
|
+
const max = Math.min(view.length, this.#state.remaining);
|
|
232
|
+
const slice = max === view.length ? view : view.subarray(0, max);
|
|
233
|
+
const n = await this.#readFromSrc(slice);
|
|
234
|
+
if (n === 0) throw new Error("Unexpected EOF while reading chunked body");
|
|
235
|
+
this.#readSoFar += n;
|
|
236
|
+
this.#state = {
|
|
237
|
+
kind: "data",
|
|
238
|
+
remaining: this.#state.remaining - n
|
|
239
|
+
};
|
|
240
|
+
return n;
|
|
241
|
+
}
|
|
242
|
+
if (this.#state.kind === "size") {
|
|
243
|
+
const line = await this.#readLine();
|
|
244
|
+
if (line === null) throw new Error("Unexpected EOF while reading chunk size");
|
|
245
|
+
const semi = line.indexOf(";");
|
|
246
|
+
const token = (semi === -1 ? line : line.slice(0, semi)).trim();
|
|
247
|
+
if (token === "") throw new Error(`invalid chunk size line: ${line}`);
|
|
248
|
+
const size = Number.parseInt(token, 16);
|
|
249
|
+
if (!Number.isFinite(size) || Number.isNaN(size) || size < 0) throw new Error(`invalid chunk size: ${token}`);
|
|
250
|
+
if (size > this.#maxChunkSize) throw new Error(`chunk too large (> ${this.#maxChunkSize} bytes)`);
|
|
251
|
+
if (this.#maxResponseSize != null) {
|
|
252
|
+
if (size > this.#maxResponseSize - this.#readSoFar) throw new Error(`body too large (> ${this.#maxResponseSize} bytes)`);
|
|
253
|
+
}
|
|
254
|
+
this.#state = size === 0 ? { kind: "trailers" } : {
|
|
255
|
+
kind: "data",
|
|
256
|
+
remaining: size
|
|
257
|
+
};
|
|
258
|
+
continue;
|
|
259
|
+
}
|
|
260
|
+
if (this.#state.kind === "crlf") {
|
|
261
|
+
await this.#consumeCrlf();
|
|
262
|
+
this.#state = { kind: "size" };
|
|
263
|
+
continue;
|
|
264
|
+
}
|
|
265
|
+
if (this.#state.kind === "trailers") for (;;) {
|
|
266
|
+
const line = await this.#readLine();
|
|
267
|
+
if (line === null) throw new Error("Unexpected EOF while reading chunked trailers");
|
|
268
|
+
if (line === "") {
|
|
269
|
+
this.#state = { kind: "done" };
|
|
270
|
+
return 0;
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
async #readFromSrc(into) {
|
|
276
|
+
if (this.#eof) return 0;
|
|
277
|
+
try {
|
|
278
|
+
const n = await this.#src.read(into);
|
|
279
|
+
if (n === 0) this.#eof = true;
|
|
280
|
+
return n;
|
|
281
|
+
} catch (e) {
|
|
282
|
+
if (e instanceof ConnectionClosedError) {
|
|
283
|
+
this.#eof = true;
|
|
284
|
+
return 0;
|
|
285
|
+
}
|
|
286
|
+
throw e;
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
async #pull() {
|
|
290
|
+
const into = this.#buf.writeSync(this.#readChunkSize);
|
|
291
|
+
try {
|
|
292
|
+
const n = await this.#readFromSrc(into);
|
|
293
|
+
this.#buf.disposeWriteSync(n);
|
|
294
|
+
} catch (e) {
|
|
295
|
+
this.#buf.disposeWriteSync(0);
|
|
296
|
+
if (e instanceof ConnectionClosedError) {
|
|
297
|
+
this.#eof = true;
|
|
298
|
+
return;
|
|
299
|
+
}
|
|
300
|
+
throw e;
|
|
301
|
+
} finally {
|
|
302
|
+
this.#buf.reclaim();
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
async #readLine() {
|
|
306
|
+
for (;;) {
|
|
307
|
+
const frame = this.#codec.decode(this.#buf, this.#eof);
|
|
308
|
+
if (frame !== null) {
|
|
309
|
+
this.#buf.reclaim();
|
|
310
|
+
return read.rawString(frame, frame.length);
|
|
311
|
+
}
|
|
312
|
+
if (this.#eof) return null;
|
|
313
|
+
if (this.#buf.available > this.#maxLineSize) throw new Error(`chunk line too large (> ${this.#maxLineSize} bytes)`);
|
|
314
|
+
await this.#pull();
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
async #consumeCrlf() {
|
|
318
|
+
while (this.#buf.available < 2) {
|
|
319
|
+
if (this.#eof) throw new Error("Unexpected EOF while reading chunk terminator");
|
|
320
|
+
await this.#pull();
|
|
321
|
+
}
|
|
322
|
+
const two = this.#buf.readSync(2);
|
|
323
|
+
this.#buf.reclaim();
|
|
324
|
+
if (two[0] !== CRLF_BYTES[0] || two[1] !== CRLF_BYTES[1]) throw new Error("Invalid chunked encoding: missing CRLF after chunk data");
|
|
325
|
+
}
|
|
326
|
+
async #close() {
|
|
327
|
+
if (this.#closed) return;
|
|
328
|
+
this.#closed = true;
|
|
329
|
+
await this.#src.close();
|
|
330
|
+
}
|
|
331
|
+
};
|
|
332
|
+
//#endregion
|
|
333
|
+
export { BodyReader, ChunkedBodyReader, LineReader, sanitizeHeaderValue };
|
package/io/writers.cjs
ADDED
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
require("../_virtual/_rolldown/runtime.cjs");
|
|
2
|
+
const require_streams = require("../_internal/streams.cjs");
|
|
3
|
+
const require_encoding = require("../encoding.cjs");
|
|
4
|
+
const require__utils = require("./_utils.cjs");
|
|
5
|
+
const require_readers = require("./readers.cjs");
|
|
6
|
+
const require_guards = require("../_internal/guards.cjs");
|
|
7
|
+
const require_body = require("../body.cjs");
|
|
8
|
+
const require_buf_writer = require("./buf-writer.cjs");
|
|
9
|
+
let _fuman_node = require("@fuman/node");
|
|
10
|
+
let _fuman_io = require("@fuman/io");
|
|
11
|
+
//#region src/io/writers.ts
|
|
12
|
+
function toRequestTarget(url) {
|
|
13
|
+
return (url.pathname?.startsWith("/") ? url.pathname : "/") + (url.search || "");
|
|
14
|
+
}
|
|
15
|
+
function encodeHead(into, head) {
|
|
16
|
+
_fuman_io.write.rawString(into, `${head.method.toUpperCase()} ${head.target} HTTP/1.1\r
|
|
17
|
+
`);
|
|
18
|
+
for (const [k, v] of head.headers) _fuman_io.write.rawString(into, `${k}: ${require_readers.sanitizeHeaderValue(v)}\r
|
|
19
|
+
`);
|
|
20
|
+
_fuman_io.write.rawString(into, "\r\n");
|
|
21
|
+
}
|
|
22
|
+
function prepareBody(headers, init) {
|
|
23
|
+
const state = require_body.extractBody(init);
|
|
24
|
+
if (state.body != null && !headers.has("content-type")) headers.set("content-type", state.contentType ?? "application/octet-stream");
|
|
25
|
+
const body = state.body;
|
|
26
|
+
if (body == null) return { kind: "none" };
|
|
27
|
+
if (body instanceof Uint8Array) return {
|
|
28
|
+
kind: "bytes",
|
|
29
|
+
bytes: body,
|
|
30
|
+
length: body.byteLength
|
|
31
|
+
};
|
|
32
|
+
if (require_guards.isReadableStream(body)) return {
|
|
33
|
+
kind: "stream",
|
|
34
|
+
stream: body,
|
|
35
|
+
length: state.contentLength
|
|
36
|
+
};
|
|
37
|
+
if (require_guards.isFumanReadable(body)) return {
|
|
38
|
+
kind: "stream",
|
|
39
|
+
stream: (0, _fuman_io.fumanReadableToWeb)(body),
|
|
40
|
+
length: state.contentLength
|
|
41
|
+
};
|
|
42
|
+
return {
|
|
43
|
+
kind: "stream",
|
|
44
|
+
stream: (0, _fuman_node.nodeReadableToWeb)(body),
|
|
45
|
+
length: state.contentLength
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
function finalizeDelimitation(headers, body) {
|
|
49
|
+
if (body.kind === "none") return { chunked: false };
|
|
50
|
+
const te = require__utils.parseTransferEncoding(headers);
|
|
51
|
+
if (te.has) {
|
|
52
|
+
headers.delete("content-length");
|
|
53
|
+
if (!te.chunked) {
|
|
54
|
+
const tokens = [...te.codings, "chunked"].filter(Boolean);
|
|
55
|
+
headers.set("transfer-encoding", tokens.join(", "));
|
|
56
|
+
}
|
|
57
|
+
return { chunked: true };
|
|
58
|
+
}
|
|
59
|
+
const knownLength = body.kind === "bytes" ? body.length : body.length ?? null;
|
|
60
|
+
if (knownLength != null) {
|
|
61
|
+
const existing = require__utils.parseContentLength(headers);
|
|
62
|
+
if (existing != null && existing !== knownLength) throw new Error(`Conflicting content-length: header=${existing} body=${knownLength}`);
|
|
63
|
+
if (existing == null) headers.set("content-length", String(knownLength));
|
|
64
|
+
return { chunked: false };
|
|
65
|
+
}
|
|
66
|
+
if (require__utils.parseContentLength(headers) != null) return { chunked: false };
|
|
67
|
+
headers.set("transfer-encoding", "chunked");
|
|
68
|
+
headers.delete("content-length");
|
|
69
|
+
return { chunked: true };
|
|
70
|
+
}
|
|
71
|
+
function createBufferedConnWriter(dst, opts) {
|
|
72
|
+
const bufferSize = opts.writeBufferSize ?? 16 * 1024;
|
|
73
|
+
const directWriteThreshold = opts.directWriteThreshold ?? 64 * 1024;
|
|
74
|
+
const bufWriter = new require_buf_writer.BufWriter(dst, bufferSize);
|
|
75
|
+
const flush = async () => {
|
|
76
|
+
await bufWriter.flush();
|
|
77
|
+
};
|
|
78
|
+
const writeBytes = async (bytes) => {
|
|
79
|
+
if (bytes.length === 0) return;
|
|
80
|
+
if (bytes.length >= directWriteThreshold) {
|
|
81
|
+
await bufWriter.flush();
|
|
82
|
+
await dst.write(bytes);
|
|
83
|
+
return;
|
|
84
|
+
}
|
|
85
|
+
await bufWriter.write(bytes);
|
|
86
|
+
};
|
|
87
|
+
const writeRawString = async (str) => {
|
|
88
|
+
if (str.length === 0) return;
|
|
89
|
+
_fuman_io.write.rawString(bufWriter, str);
|
|
90
|
+
};
|
|
91
|
+
return {
|
|
92
|
+
flush,
|
|
93
|
+
writeBytes,
|
|
94
|
+
writeRawString,
|
|
95
|
+
directWriteThreshold
|
|
96
|
+
};
|
|
97
|
+
}
|
|
98
|
+
async function writeBody(dst, body, chunked, opts, signal) {
|
|
99
|
+
const bw = createBufferedConnWriter(dst, opts);
|
|
100
|
+
const writeChunk = async (chunk) => {
|
|
101
|
+
if (signal?.aborted) throw signal.reason ?? /* @__PURE__ */ new Error("Request aborted");
|
|
102
|
+
if (!chunked) {
|
|
103
|
+
await bw.writeBytes(chunk);
|
|
104
|
+
return;
|
|
105
|
+
}
|
|
106
|
+
if (chunk.length === 0) return;
|
|
107
|
+
await bw.writeRawString(chunk.length.toString(16));
|
|
108
|
+
await bw.writeRawString("\r\n");
|
|
109
|
+
await bw.writeBytes(chunk);
|
|
110
|
+
await bw.writeRawString("\r\n");
|
|
111
|
+
};
|
|
112
|
+
if (body.kind === "bytes") await writeChunk(body.bytes);
|
|
113
|
+
else for await (const chunk of body.stream) await writeChunk(chunk);
|
|
114
|
+
if (chunked) await bw.writeRawString(`0\r
|
|
115
|
+
\r
|
|
116
|
+
`);
|
|
117
|
+
await bw.flush();
|
|
118
|
+
}
|
|
119
|
+
async function writeCoalesced(dst, scratch, head, bodyBytes, chunked) {
|
|
120
|
+
scratch.reset();
|
|
121
|
+
encodeHead(scratch, head);
|
|
122
|
+
if (!chunked) {
|
|
123
|
+
_fuman_io.write.bytes(scratch, bodyBytes);
|
|
124
|
+
await dst.write(scratch.result());
|
|
125
|
+
scratch.reset();
|
|
126
|
+
return;
|
|
127
|
+
}
|
|
128
|
+
_fuman_io.write.rawString(scratch, bodyBytes.length.toString(16));
|
|
129
|
+
_fuman_io.write.rawString(scratch, "\r\n");
|
|
130
|
+
_fuman_io.write.bytes(scratch, bodyBytes);
|
|
131
|
+
_fuman_io.write.rawString(scratch, `\r
|
|
132
|
+
0\r
|
|
133
|
+
\r
|
|
134
|
+
`);
|
|
135
|
+
await dst.write(scratch.result());
|
|
136
|
+
scratch.reset();
|
|
137
|
+
}
|
|
138
|
+
function createRequestWriter(dst, opts = {}) {
|
|
139
|
+
const scratch = _fuman_io.Bytes.alloc(opts.writeBufferSize ?? 16 * 1024);
|
|
140
|
+
const write = async (req) => {
|
|
141
|
+
if (req.signal?.aborted) throw req.signal.reason ?? /* @__PURE__ */ new Error("Request aborted");
|
|
142
|
+
const method = req.method.toUpperCase();
|
|
143
|
+
const headers = req.headers ? new Headers(req.headers) : new Headers();
|
|
144
|
+
const url = req.url;
|
|
145
|
+
if (!headers.has("host")) headers.set("host", url.host);
|
|
146
|
+
if (!headers.has("date")) headers.set("date", (/* @__PURE__ */ new Date()).toUTCString());
|
|
147
|
+
const target = toRequestTarget(url);
|
|
148
|
+
let body = prepareBody(headers, req.body ?? null);
|
|
149
|
+
const ceRaw = headers.get("content-encoding") ?? void 0;
|
|
150
|
+
const ceEncoders = require_encoding.createEncoders(ceRaw);
|
|
151
|
+
if (body.kind !== "none" && ceEncoders.length > 0) {
|
|
152
|
+
body = {
|
|
153
|
+
kind: "stream",
|
|
154
|
+
stream: require_encoding.encodeStream(body.kind === "stream" ? body.stream : require_streams.bytesToStream(body.bytes), ceRaw),
|
|
155
|
+
length: null
|
|
156
|
+
};
|
|
157
|
+
headers.delete("content-length");
|
|
158
|
+
}
|
|
159
|
+
const teInfo = require__utils.parseTransferEncoding(headers);
|
|
160
|
+
if (body.kind !== "none" && teInfo.has && teInfo.codings.length > 0) {
|
|
161
|
+
body = {
|
|
162
|
+
kind: "stream",
|
|
163
|
+
stream: require_encoding.encodeStream(body.kind === "stream" ? body.stream : require_streams.bytesToStream(body.bytes), teInfo.codings),
|
|
164
|
+
length: null
|
|
165
|
+
};
|
|
166
|
+
headers.delete("content-length");
|
|
167
|
+
}
|
|
168
|
+
const { chunked } = finalizeDelimitation(headers, body);
|
|
169
|
+
const head = {
|
|
170
|
+
method,
|
|
171
|
+
target,
|
|
172
|
+
headers
|
|
173
|
+
};
|
|
174
|
+
if (body.kind === "bytes") {
|
|
175
|
+
const max = opts.coalesceBodyMaxBytes ?? 64 * 1024;
|
|
176
|
+
if (body.bytes.length <= max) {
|
|
177
|
+
await writeCoalesced(dst, scratch, head, body.bytes, chunked);
|
|
178
|
+
return;
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
scratch.reset();
|
|
182
|
+
encodeHead(scratch, head);
|
|
183
|
+
await dst.write(scratch.result());
|
|
184
|
+
scratch.reset();
|
|
185
|
+
if (body.kind === "none") return;
|
|
186
|
+
if (body.kind === "bytes" && !chunked) {
|
|
187
|
+
if (req.signal?.aborted) throw req.signal.reason ?? /* @__PURE__ */ new Error("Request aborted");
|
|
188
|
+
await dst.write(body.bytes);
|
|
189
|
+
return;
|
|
190
|
+
}
|
|
191
|
+
await writeBody(dst, body, chunked, opts, req.signal);
|
|
192
|
+
};
|
|
193
|
+
return { write };
|
|
194
|
+
}
|
|
195
|
+
//#endregion
|
|
196
|
+
exports.createRequestWriter = createRequestWriter;
|
package/io/writers.d.cts
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { IWritable } from '@fuman/io';
|
|
2
|
+
import { BodyInit } from '../body';
|
|
3
|
+
type Destination = IWritable;
|
|
4
|
+
export declare namespace Writers {
|
|
5
|
+
interface Options {
|
|
6
|
+
writeBufferSize?: number;
|
|
7
|
+
directWriteThreshold?: number;
|
|
8
|
+
coalesceBodyMaxBytes?: number;
|
|
9
|
+
}
|
|
10
|
+
interface Request {
|
|
11
|
+
url: URL;
|
|
12
|
+
method: string;
|
|
13
|
+
headers?: Headers;
|
|
14
|
+
body?: BodyInit | null;
|
|
15
|
+
signal?: AbortSignal;
|
|
16
|
+
}
|
|
17
|
+
interface Writer {
|
|
18
|
+
write(req: Request): Promise<void>;
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
export declare function createRequestWriter(dst: Destination, opts?: Writers.Options): Writers.Writer;
|
|
22
|
+
export {};
|
package/io/writers.d.ts
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { IWritable } from '@fuman/io';
|
|
2
|
+
import { BodyInit } from '../body';
|
|
3
|
+
type Destination = IWritable;
|
|
4
|
+
export declare namespace Writers {
|
|
5
|
+
interface Options {
|
|
6
|
+
writeBufferSize?: number;
|
|
7
|
+
directWriteThreshold?: number;
|
|
8
|
+
coalesceBodyMaxBytes?: number;
|
|
9
|
+
}
|
|
10
|
+
interface Request {
|
|
11
|
+
url: URL;
|
|
12
|
+
method: string;
|
|
13
|
+
headers?: Headers;
|
|
14
|
+
body?: BodyInit | null;
|
|
15
|
+
signal?: AbortSignal;
|
|
16
|
+
}
|
|
17
|
+
interface Writer {
|
|
18
|
+
write(req: Request): Promise<void>;
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
export declare function createRequestWriter(dst: Destination, opts?: Writers.Options): Writers.Writer;
|
|
22
|
+
export {};
|