@trpc/server 11.0.0-next-beta.206 → 11.0.0-next-beta.216

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/dist/adapters/aws-lambda/index.js +14 -105
  2. package/dist/adapters/aws-lambda/index.mjs +1 -90
  3. package/dist/adapters/aws-lambda/utils.js +100 -0
  4. package/dist/adapters/aws-lambda/utils.mjs +93 -0
  5. package/dist/adapters/express.js +1 -7
  6. package/dist/adapters/express.mjs +1 -5
  7. package/dist/adapters/fastify/fastifyRequestHandler.js +81 -0
  8. package/dist/adapters/fastify/fastifyRequestHandler.mjs +79 -0
  9. package/dist/adapters/fastify/fastifyTRPCPlugin.js +51 -0
  10. package/dist/adapters/fastify/fastifyTRPCPlugin.mjs +49 -0
  11. package/dist/adapters/fastify/index.js +4 -128
  12. package/dist/adapters/fastify/index.mjs +2 -128
  13. package/dist/adapters/fetch/fetchRequestHandler.js +118 -0
  14. package/dist/adapters/fetch/fetchRequestHandler.mjs +116 -0
  15. package/dist/adapters/fetch/index.js +2 -115
  16. package/dist/adapters/fetch/index.mjs +1 -116
  17. package/dist/adapters/next.js +1 -6
  18. package/dist/adapters/next.mjs +1 -4
  19. package/dist/adapters/node-http/content-type/form-data/fileUploadHandler.js +161 -0
  20. package/dist/adapters/node-http/content-type/form-data/fileUploadHandler.mjs +157 -0
  21. package/dist/adapters/node-http/content-type/form-data/index.js +20 -646
  22. package/dist/adapters/node-http/content-type/form-data/index.mjs +9 -631
  23. package/dist/adapters/node-http/content-type/form-data/memoryUploadHandler.js +29 -0
  24. package/dist/adapters/node-http/content-type/form-data/memoryUploadHandler.mjs +27 -0
  25. package/dist/adapters/node-http/content-type/form-data/streamSlice.js +46 -0
  26. package/dist/adapters/node-http/content-type/form-data/streamSlice.mjs +44 -0
  27. package/dist/adapters/node-http/content-type/form-data/uploadHandler.js +30 -0
  28. package/dist/adapters/node-http/content-type/form-data/uploadHandler.mjs +26 -0
  29. package/dist/adapters/node-http/content-type/json/getPostBody.js +42 -0
  30. package/dist/adapters/node-http/content-type/json/getPostBody.mjs +40 -0
  31. package/dist/adapters/node-http/content-type/json/index.js +3 -42
  32. package/dist/adapters/node-http/content-type/json/index.mjs +2 -39
  33. package/dist/adapters/node-http/index.js +1 -7
  34. package/dist/adapters/node-http/index.mjs +1 -5
  35. package/dist/{contentType-72ed9df5.mjs → adapters/node-http/internals/contentType.mjs} +1 -1
  36. package/dist/{nodeHTTPRequestHandler-83441c73.js → adapters/node-http/nodeHTTPRequestHandler.js} +2 -2
  37. package/dist/{nodeHTTPRequestHandler-0223fac5.mjs → adapters/node-http/nodeHTTPRequestHandler.mjs} +2 -2
  38. package/dist/adapters/standalone.js +2 -12
  39. package/dist/adapters/standalone.mjs +1 -5
  40. package/dist/adapters/ws.js +0 -2
  41. package/dist/bundle-analysis.json +97 -97
  42. package/dist/http.js +1 -3
  43. package/dist/index.js +10 -12
  44. package/dist/node_modules/.pnpm/@web3-storage_multipart-parser@1.0.0/node_modules/@web3-storage/multipart-parser/esm/src/index.js +203 -0
  45. package/dist/node_modules/.pnpm/@web3-storage_multipart-parser@1.0.0/node_modules/@web3-storage/multipart-parser/esm/src/index.mjs +201 -0
  46. package/dist/node_modules/.pnpm/@web3-storage_multipart-parser@1.0.0/node_modules/@web3-storage/multipart-parser/esm/src/search.js +167 -0
  47. package/dist/node_modules/.pnpm/@web3-storage_multipart-parser@1.0.0/node_modules/@web3-storage/multipart-parser/esm/src/search.mjs +163 -0
  48. package/dist/node_modules/.pnpm/@web3-storage_multipart-parser@1.0.0/node_modules/@web3-storage/multipart-parser/esm/src/utils.js +35 -0
  49. package/dist/node_modules/.pnpm/@web3-storage_multipart-parser@1.0.0/node_modules/@web3-storage/multipart-parser/esm/src/utils.mjs +30 -0
  50. package/dist/observable.js +1 -3
  51. package/dist/rpc.js +1 -3
  52. package/dist/shared.js +2 -4
  53. package/package.json +4 -4
  54. package/dist/contentType-24c44bba.js +0 -5
  55. package/dist/nodeHTTPRequestHandler-aa0dce4e.js +0 -105
  56. /package/dist/{contentType-d9d22104.js → adapters/node-http/internals/contentType.js} +0 -0
@@ -1,634 +1,12 @@
1
1
  import * as fs from 'node:fs/promises';
2
- import { mkdir, rm, unlink, stat } from 'node:fs/promises';
3
- import { Transform, finished, Readable } from 'node:stream';
4
- import { c as createNodeHTTPContentTypeHandler } from '../../../../contentType-72ed9df5.mjs';
5
- import { randomBytes } from 'node:crypto';
6
- import { createWriteStream, statSync, createReadStream } from 'node:fs';
7
- import { tmpdir } from 'node:os';
8
- import { resolve, dirname, basename, extname } from 'node:path';
9
- import { promisify } from 'node:util';
10
-
11
- function stringToArray(s) {
12
- const utf8 = unescape(encodeURIComponent(s));
13
- return Uint8Array.from(utf8, (_, i) => utf8.charCodeAt(i));
14
- }
15
- function arrayToString(a) {
16
- const utf8 = String.fromCharCode.apply(null, a);
17
- return decodeURIComponent(escape(utf8));
18
- }
19
- function mergeArrays(...arrays) {
20
- const out = new Uint8Array(arrays.reduce((total, arr) => total + arr.length, 0));
21
- let offset = 0;
22
- for (const arr of arrays) {
23
- out.set(arr, offset);
24
- offset += arr.length;
25
- }
26
- return out;
27
- }
28
- function arraysEqual(a, b) {
29
- if (a.length !== b.length) {
30
- return false;
31
- }
32
- for (let i = 0; i < a.length; i++) {
33
- if (a[i] !== b[i]) {
34
- return false;
35
- }
36
- }
37
- return true;
38
- }
39
-
40
- function coerce(a) {
41
- if (a instanceof Uint8Array) {
42
- return index => a[index];
43
- }
44
- return a;
45
- }
46
- function jsmemcmp(buf1, pos1, buf2, pos2, len) {
47
- const fn1 = coerce(buf1);
48
- const fn2 = coerce(buf2);
49
- for (let i = 0; i < len; ++i) {
50
- if (fn1(pos1 + i) !== fn2(pos2 + i)) {
51
- return false;
52
- }
53
- }
54
- return true;
55
- }
56
- function createOccurenceTable(s) {
57
- const table = new Array(256).fill(s.length);
58
- if (s.length > 1) {
59
- for (let i = 0; i < s.length - 1; i++) {
60
- table[s[i]] = s.length - 1 - i;
61
- }
62
- }
63
- return table;
64
- }
65
- const MATCH = Symbol('Match');
66
- class StreamSearch {
67
- constructor(needle) {
68
- this._lookbehind = new Uint8Array();
69
- if (typeof needle === 'string') {
70
- this._needle = needle = stringToArray(needle);
71
- } else {
72
- this._needle = needle;
73
- }
74
- this._lastChar = needle[needle.length - 1];
75
- this._occ = createOccurenceTable(needle);
76
- }
77
- feed(chunk) {
78
- let pos = 0;
79
- let tokens;
80
- const allTokens = [];
81
- while (pos !== chunk.length) {
82
- [pos, ...tokens] = this._feed(chunk, pos);
83
- allTokens.push(...tokens);
84
- }
85
- return allTokens;
86
- }
87
- end() {
88
- const tail = this._lookbehind;
89
- this._lookbehind = new Uint8Array();
90
- return tail;
91
- }
92
- _feed(data, bufPos) {
93
- const tokens = [];
94
- let pos = -this._lookbehind.length;
95
- if (pos < 0) {
96
- while (pos < 0 && pos <= data.length - this._needle.length) {
97
- const ch = this._charAt(data, pos + this._needle.length - 1);
98
- if (ch === this._lastChar && this._memcmp(data, pos, this._needle.length - 1)) {
99
- if (pos > -this._lookbehind.length) {
100
- tokens.push(this._lookbehind.slice(0, this._lookbehind.length + pos));
101
- }
102
- tokens.push(MATCH);
103
- this._lookbehind = new Uint8Array();
104
- return [
105
- pos + this._needle.length,
106
- ...tokens
107
- ];
108
- } else {
109
- pos += this._occ[ch];
110
- }
111
- }
112
- if (pos < 0) {
113
- while (pos < 0 && !this._memcmp(data, pos, data.length - pos)) {
114
- pos++;
115
- }
116
- }
117
- if (pos >= 0) {
118
- tokens.push(this._lookbehind);
119
- this._lookbehind = new Uint8Array();
120
- } else {
121
- const bytesToCutOff = this._lookbehind.length + pos;
122
- if (bytesToCutOff > 0) {
123
- tokens.push(this._lookbehind.slice(0, bytesToCutOff));
124
- this._lookbehind = this._lookbehind.slice(bytesToCutOff);
125
- }
126
- this._lookbehind = Uint8Array.from(new Array(this._lookbehind.length + data.length), (_, i) => this._charAt(data, i - this._lookbehind.length));
127
- return [
128
- data.length,
129
- ...tokens
130
- ];
131
- }
132
- }
133
- pos += bufPos;
134
- while (pos <= data.length - this._needle.length) {
135
- const ch = data[pos + this._needle.length - 1];
136
- if (ch === this._lastChar && data[pos] === this._needle[0] && jsmemcmp(this._needle, 0, data, pos, this._needle.length - 1)) {
137
- if (pos > bufPos) {
138
- tokens.push(data.slice(bufPos, pos));
139
- }
140
- tokens.push(MATCH);
141
- return [
142
- pos + this._needle.length,
143
- ...tokens
144
- ];
145
- } else {
146
- pos += this._occ[ch];
147
- }
148
- }
149
- if (pos < data.length) {
150
- while (pos < data.length && (data[pos] !== this._needle[0] || !jsmemcmp(data, pos, this._needle, 0, data.length - pos))) {
151
- ++pos;
152
- }
153
- if (pos < data.length) {
154
- this._lookbehind = data.slice(pos);
155
- }
156
- }
157
- if (pos > 0) {
158
- tokens.push(data.slice(bufPos, pos < data.length ? pos : data.length));
159
- }
160
- return [
161
- data.length,
162
- ...tokens
163
- ];
164
- }
165
- _charAt(data, pos) {
166
- if (pos < 0) {
167
- return this._lookbehind[this._lookbehind.length + pos];
168
- }
169
- return data[pos];
170
- }
171
- _memcmp(data, pos, len) {
172
- return jsmemcmp(this._charAt.bind(this, data), pos, this._needle, 0, len);
173
- }
174
- }
175
- class ReadableStreamSearch {
176
- constructor(needle, _readableStream) {
177
- this._readableStream = _readableStream;
178
- this._search = new StreamSearch(needle);
179
- }
180
- async *[Symbol.asyncIterator]() {
181
- const reader = this._readableStream.getReader();
182
- try {
183
- while (true) {
184
- const result = await reader.read();
185
- if (result.done) {
186
- break;
187
- }
188
- yield* this._search.feed(result.value);
189
- }
190
- const tail = this._search.end();
191
- if (tail.length) {
192
- yield tail;
193
- }
194
- } finally {
195
- reader.releaseLock();
196
- }
197
- }
198
- }
199
-
200
- const mergeArrays2 = Function.prototype.apply.bind(mergeArrays, undefined);
201
- const dash = stringToArray('--');
202
- const CRLF = stringToArray('\r\n');
203
- function parseContentDisposition(header) {
204
- const parts = header.split(';').map(part => part.trim());
205
- if (parts.shift() !== 'form-data') {
206
- throw new Error('malformed content-disposition header: missing "form-data" in `' + JSON.stringify(parts) + '`');
207
- }
208
- const out = {};
209
- for (const part of parts) {
210
- const kv = part.split('=', 2);
211
- if (kv.length !== 2) {
212
- throw new Error('malformed content-disposition header: key-value pair not found - ' + part + ' in `' + header + '`');
213
- }
214
- const [name, value] = kv;
215
- if (value[0] === '"' && value[value.length - 1] === '"') {
216
- out[name] = value.slice(1, -1).replace(/\\"/g, '"');
217
- } else if (value[0] !== '"' && value[value.length - 1] !== '"') {
218
- out[name] = value;
219
- } else if (value[0] === '"' && value[value.length - 1] !== '"' || value[0] !== '"' && value[value.length - 1] === '"') {
220
- throw new Error('malformed content-disposition header: mismatched quotations in `' + header + '`');
221
- }
222
- }
223
- if (!out.name) {
224
- throw new Error('malformed content-disposition header: missing field name in `' + header + '`');
225
- }
226
- return out;
227
- }
228
- function parsePartHeaders(lines) {
229
- const entries = [];
230
- let disposition = false;
231
- let line;
232
- while (typeof (line = lines.shift()) !== 'undefined') {
233
- const colon = line.indexOf(':');
234
- if (colon === -1) {
235
- throw new Error('malformed multipart-form header: missing colon');
236
- }
237
- const header = line.slice(0, colon).trim().toLowerCase();
238
- const value = line.slice(colon + 1).trim();
239
- switch (header) {
240
- case 'content-disposition':
241
- disposition = true;
242
- entries.push(...Object.entries(parseContentDisposition(value)));
243
- break;
244
- case 'content-type':
245
- entries.push([
246
- 'contentType',
247
- value
248
- ]);
249
- }
250
- }
251
- if (!disposition) {
252
- throw new Error('malformed multipart-form header: missing content-disposition');
253
- }
254
- return Object.fromEntries(entries);
255
- }
256
- async function readHeaderLines(it, needle) {
257
- let firstChunk = true;
258
- let lastTokenWasMatch = false;
259
- const headerLines = [[]];
260
- const crlfSearch = new StreamSearch(CRLF);
261
- for (;;) {
262
- const result = await it.next();
263
- if (result.done) {
264
- throw new Error('malformed multipart-form data: unexpected end of stream');
265
- }
266
- if (firstChunk && result.value !== MATCH && arraysEqual(result.value.slice(0, 2), dash)) {
267
- return [
268
- undefined,
269
- new Uint8Array()
270
- ];
271
- }
272
- let chunk;
273
- if (result.value !== MATCH) {
274
- chunk = result.value;
275
- } else if (!lastTokenWasMatch) {
276
- chunk = needle;
277
- } else {
278
- throw new Error('malformed multipart-form data: unexpected boundary');
279
- }
280
- if (!chunk.length) {
281
- continue;
282
- }
283
- if (firstChunk) {
284
- firstChunk = false;
285
- }
286
- const tokens = crlfSearch.feed(chunk);
287
- for (const [i, token] of tokens.entries()) {
288
- const isMatch = token === MATCH;
289
- if (!isMatch && !token.length) {
290
- continue;
291
- }
292
- if (lastTokenWasMatch && isMatch) {
293
- tokens.push(crlfSearch.end());
294
- return [
295
- headerLines.filter(chunks => chunks.length).map(mergeArrays2).map(arrayToString),
296
- mergeArrays(...tokens.slice(i + 1).map(token => token === MATCH ? CRLF : token))
297
- ];
298
- }
299
- if (lastTokenWasMatch = isMatch) {
300
- headerLines.push([]);
301
- } else {
302
- headerLines[headerLines.length - 1].push(token);
303
- }
304
- }
305
- }
306
- }
307
- async function* streamMultipart(body, boundary) {
308
- const needle = mergeArrays(dash, stringToArray(boundary));
309
- const it = new ReadableStreamSearch(needle, body)[Symbol.asyncIterator]();
310
- for (;;) {
311
- const result = await it.next();
312
- if (result.done) {
313
- return;
314
- }
315
- if (result.value === MATCH) {
316
- break;
317
- }
318
- }
319
- const crlfSearch = new StreamSearch(CRLF);
320
- for (;;) {
321
- const [headerLines, tail] = await readHeaderLines(it, needle);
322
- if (!headerLines) {
323
- return;
324
- }
325
- async function nextToken() {
326
- const result = await it.next();
327
- if (result.done) {
328
- throw new Error('malformed multipart-form data: unexpected end of stream');
329
- }
330
- return result;
331
- }
332
- let trailingCRLF = false;
333
- function feedChunk(chunk) {
334
- const chunks = [];
335
- for (const token of crlfSearch.feed(chunk)) {
336
- if (trailingCRLF) {
337
- chunks.push(CRLF);
338
- }
339
- if (!(trailingCRLF = token === MATCH)) {
340
- chunks.push(token);
341
- }
342
- }
343
- return mergeArrays(...chunks);
344
- }
345
- let done = false;
346
- async function nextChunk() {
347
- const result = await nextToken();
348
- let chunk;
349
- if (result.value !== MATCH) {
350
- chunk = result.value;
351
- } else if (!trailingCRLF) {
352
- chunk = CRLF;
353
- } else {
354
- done = true;
355
- return { value: crlfSearch.end() };
356
- }
357
- return { value: feedChunk(chunk) };
358
- }
359
- const bufferedChunks = [{ value: feedChunk(tail) }];
360
- yield {
361
- ...parsePartHeaders(headerLines),
362
- data: {
363
- [Symbol.asyncIterator]() {
364
- return this;
365
- },
366
- async next() {
367
- for (;;) {
368
- const result = bufferedChunks.shift();
369
- if (!result) {
370
- break;
371
- }
372
- if (result.value.length > 0) {
373
- return result;
374
- }
375
- }
376
- for (;;) {
377
- if (done) {
378
- return {
379
- done,
380
- value: undefined
381
- };
382
- }
383
- const result = await nextChunk();
384
- if (result.value.length > 0) {
385
- return result;
386
- }
387
- }
388
- }
389
- }
390
- };
391
- while (!done) {
392
- bufferedChunks.push(await nextChunk());
393
- }
394
- }
395
- }
396
-
397
- class SliceStream extends Transform {
398
- _transform(chunk, _, done) {
399
- this.indexOffset += chunk.length;
400
- if (!this.emitUp && this.indexOffset >= this.startIndex) {
401
- this.emitUp = true;
402
- const start = chunk.length - (this.indexOffset - this.startIndex);
403
- if (this.indexOffset > this.endIndex) {
404
- const end = chunk.length - (this.indexOffset - this.endIndex);
405
- this.emitDown = true;
406
- this.push(chunk.slice(start, end));
407
- } else {
408
- this.push(chunk.slice(start, chunk.length));
409
- }
410
- done();
411
- return;
412
- }
413
- if (this.emitUp && !this.emitDown) {
414
- if (this.indexOffset >= this.endIndex) {
415
- this.emitDown = true;
416
- this.push(chunk.slice(0, chunk.length - (this.indexOffset - this.endIndex)));
417
- } else {
418
- this.push(chunk);
419
- }
420
- done();
421
- return;
422
- }
423
- done();
424
- }
425
- constructor(startIndex = 0, endIndex = Infinity){
426
- super();
427
- this.startIndex = startIndex;
428
- this.endIndex = endIndex;
429
- this.indexOffset = 0;
430
- this.emitUp = false;
431
- this.emitDown = false;
432
- }
433
- }
434
- function streamSlice(startIndex = 0, endIndex = Infinity) {
435
- return new SliceStream(startIndex, endIndex);
436
- }
437
-
438
- function composeUploadHandlers(...handlers) {
439
- return async (part)=>{
440
- for (const handler of handlers){
441
- const value = await handler(part);
442
- if (typeof value !== 'undefined' && value !== null) {
443
- return value;
444
- }
445
- }
446
- return undefined;
447
- };
448
- }
449
- class MaxPartSizeExceededError extends Error {
450
- constructor(field, maxBytes){
451
- super(`Field "${field}" exceeded upload size of ${maxBytes} bytes.`);
452
- this.field = field;
453
- this.maxBytes = maxBytes;
454
- }
455
- }
456
- class MaxBodySizeExceededError extends Error {
457
- constructor(maxBytes){
458
- super(`Body exceeded upload size of ${maxBytes} bytes.`);
459
- this.maxBytes = maxBytes;
460
- }
461
- }
462
-
463
- async function readableStreamToString(stream, encoding) {
464
- const reader = stream.getReader();
465
- const chunks = [];
466
- async function read() {
467
- const { done , value } = await reader.read();
468
- if (done) {
469
- return;
470
- } else if (value) {
471
- chunks.push(value);
472
- }
473
- await read();
474
- }
475
- await read();
476
- return Buffer.concat(chunks).toString(encoding);
477
- }
478
- const defaultFilePathResolver = ({ filename , })=>{
479
- const ext = filename ? extname(filename) : '';
480
- return 'upload_' + randomBytes(4).readUInt32LE(0) + ext;
481
- };
482
- async function uniqueFile(filepath) {
483
- const ext = extname(filepath);
484
- let uniqueFilepath = filepath;
485
- for(let i = 1; await stat(uniqueFilepath).then(()=>true).catch(()=>false); i++){
486
- uniqueFilepath = (ext ? filepath.slice(0, -ext.length) : filepath) + `-${new Date().getTime()}${ext}`;
487
- }
488
- return uniqueFilepath;
489
- }
490
- function createFileUploadHandler({ directory =tmpdir() , avoidFileConflicts =true , file =defaultFilePathResolver , filter , maxPartSize =3000000 } = {}) {
491
- return async ({ name , filename , contentType , data })=>{
492
- if (!filename || filter && !await filter({
493
- name,
494
- filename,
495
- contentType
496
- })) {
497
- return undefined;
498
- }
499
- const dir = typeof directory === 'string' ? directory : directory({
500
- name,
501
- filename,
502
- contentType
503
- });
504
- if (!dir) {
505
- return undefined;
506
- }
507
- const filedir = resolve(dir);
508
- const path = typeof file === 'string' ? file : file({
509
- name,
510
- filename,
511
- contentType
512
- });
513
- if (!path) {
514
- return undefined;
515
- }
516
- let filepath = resolve(filedir, path);
517
- if (avoidFileConflicts) {
518
- filepath = await uniqueFile(filepath);
519
- }
520
- await mkdir(dirname(filepath), {
521
- recursive: true
522
- }).catch(()=>{});
523
- const writeFileStream = createWriteStream(filepath);
524
- let size = 0;
525
- let deleteFile = false;
526
- try {
527
- for await (const chunk of data){
528
- size += chunk.byteLength;
529
- if (size > maxPartSize) {
530
- deleteFile = true;
531
- throw new MaxPartSizeExceededError(name, maxPartSize);
532
- }
533
- writeFileStream.write(chunk);
534
- }
535
- } finally{
536
- writeFileStream.end();
537
- await promisify(finished)(writeFileStream);
538
- if (deleteFile) {
539
- await rm(filepath).catch(()=>{});
540
- }
541
- }
542
- return new NodeOnDiskFile(filepath, contentType);
543
- };
544
- }
545
- let _toStringTag = Symbol.toStringTag;
546
- class NodeOnDiskFile {
547
- get size() {
548
- const stats = statSync(this.filepath);
549
- if (this.slicer) {
550
- const slice = this.slicer.end - this.slicer.start;
551
- return slice < 0 ? 0 : slice > stats.size ? stats.size : slice;
552
- }
553
- return stats.size;
554
- }
555
- slice(start, end, type) {
556
- if (typeof start === 'number' && start < 0) start = this.size + start;
557
- if (typeof end === 'number' && end < 0) end = this.size + end;
558
- const startOffset = this.slicer?.start ?? 0;
559
- start = startOffset + (start ?? 0);
560
- end = startOffset + (end ?? this.size);
561
- return new NodeOnDiskFile(this.filepath, typeof type === 'string' ? type : this.type, {
562
- start,
563
- end
564
- });
565
- }
566
- async arrayBuffer() {
567
- let stream = createReadStream(this.filepath);
568
- if (this.slicer) {
569
- stream = stream.pipe(streamSlice(this.slicer.start, this.slicer.end));
570
- }
571
- return new Promise((resolve, reject)=>{
572
- const buf = [];
573
- stream.on('data', (chunk)=>buf.push(chunk));
574
- stream.on('end', ()=>{
575
- resolve(Buffer.concat(buf));
576
- });
577
- stream.on('error', (err)=>{
578
- reject(err);
579
- });
580
- });
581
- }
582
- stream() {
583
- let stream = createReadStream(this.filepath);
584
- if (this.slicer) {
585
- stream = stream.pipe(streamSlice(this.slicer.start, this.slicer.end));
586
- }
587
- return Readable.toWeb(stream);
588
- }
589
- async text() {
590
- return readableStreamToString(this.stream());
591
- }
592
- remove() {
593
- return unlink(this.filepath);
594
- }
595
- getFilePath() {
596
- return this.filepath;
597
- }
598
- constructor(filepath, type, slicer){
599
- this.filepath = filepath;
600
- this.type = type;
601
- this.slicer = slicer;
602
- this.lastModified = 0;
603
- this.webkitRelativePath = '';
604
- this[_toStringTag] = 'File';
605
- this.name = basename(filepath);
606
- }
607
- }
608
-
609
- function createMemoryUploadHandler({ filter , maxPartSize =3000000 } = {}) {
610
- return async ({ filename , contentType , name , data })=>{
611
- if (filter && !await filter({
612
- filename,
613
- contentType,
614
- name
615
- })) {
616
- return undefined;
617
- }
618
- let size = 0;
619
- const chunks = [];
620
- for await (const chunk of data){
621
- size += chunk.byteLength;
622
- if (size > maxPartSize) {
623
- throw new MaxPartSizeExceededError(name, maxPartSize);
624
- }
625
- chunks.push(chunk);
626
- }
627
- return new File(chunks, filename, {
628
- type: contentType
629
- });
630
- };
631
- }
2
+ import { Readable } from 'node:stream';
3
+ import { streamMultipart } from '../../../../node_modules/.pnpm/@web3-storage_multipart-parser@1.0.0/node_modules/@web3-storage/multipart-parser/esm/src/index.mjs';
4
+ import { createNodeHTTPContentTypeHandler } from '../../internals/contentType.mjs';
5
+ import { NodeOnDiskFile } from './fileUploadHandler.mjs';
6
+ export { createFileUploadHandler as experimental_createFileUploadHandler } from './fileUploadHandler.mjs';
7
+ import { MaxBodySizeExceededError } from './uploadHandler.mjs';
8
+ export { MaxPartSizeExceededError, composeUploadHandlers as experimental_composeUploadHandlers } from './uploadHandler.mjs';
9
+ export { createMemoryUploadHandler as experimental_createMemoryUploadHandler } from './memoryUploadHandler.mjs';
632
10
 
633
11
  const utfTextDecoder = new TextDecoder('utf-8');
634
12
  /**
@@ -727,4 +105,4 @@ const nodeHTTPFormDataContentTypeHandler = createNodeHTTPContentTypeHandler({
727
105
  }
728
106
  });
729
107
 
730
- export { MaxBodySizeExceededError, MaxPartSizeExceededError, NodeOnDiskFile as experimental_NodeOnDiskFile, composeUploadHandlers as experimental_composeUploadHandlers, createFileUploadHandler as experimental_createFileUploadHandler, createMemoryUploadHandler as experimental_createMemoryUploadHandler, isMultipartFormDataRequest as experimental_isMultipartFormDataRequest, parseMultipartFormData as experimental_parseMultipartFormData, nodeHTTPFormDataContentTypeHandler };
108
+ export { MaxBodySizeExceededError, NodeOnDiskFile as experimental_NodeOnDiskFile, isMultipartFormDataRequest as experimental_isMultipartFormDataRequest, parseMultipartFormData as experimental_parseMultipartFormData, nodeHTTPFormDataContentTypeHandler };
@@ -0,0 +1,29 @@
1
+ 'use strict';
2
+
3
+ var uploadHandler = require('./uploadHandler.js');
4
+
5
+ function createMemoryUploadHandler({ filter , maxPartSize =3000000 } = {}) {
6
+ return async ({ filename , contentType , name , data })=>{
7
+ if (filter && !await filter({
8
+ filename,
9
+ contentType,
10
+ name
11
+ })) {
12
+ return undefined;
13
+ }
14
+ let size = 0;
15
+ const chunks = [];
16
+ for await (const chunk of data){
17
+ size += chunk.byteLength;
18
+ if (size > maxPartSize) {
19
+ throw new uploadHandler.MaxPartSizeExceededError(name, maxPartSize);
20
+ }
21
+ chunks.push(chunk);
22
+ }
23
+ return new File(chunks, filename, {
24
+ type: contentType
25
+ });
26
+ };
27
+ }
28
+
29
+ exports.createMemoryUploadHandler = createMemoryUploadHandler;
@@ -0,0 +1,27 @@
1
+ import { MaxPartSizeExceededError } from './uploadHandler.mjs';
2
+
3
+ function createMemoryUploadHandler({ filter , maxPartSize =3000000 } = {}) {
4
+ return async ({ filename , contentType , name , data })=>{
5
+ if (filter && !await filter({
6
+ filename,
7
+ contentType,
8
+ name
9
+ })) {
10
+ return undefined;
11
+ }
12
+ let size = 0;
13
+ const chunks = [];
14
+ for await (const chunk of data){
15
+ size += chunk.byteLength;
16
+ if (size > maxPartSize) {
17
+ throw new MaxPartSizeExceededError(name, maxPartSize);
18
+ }
19
+ chunks.push(chunk);
20
+ }
21
+ return new File(chunks, filename, {
22
+ type: contentType
23
+ });
24
+ };
25
+ }
26
+
27
+ export { createMemoryUploadHandler };