@trpc/server 11.0.0-rc.370 → 11.0.0-rc.374

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/dist/adapters/node-http/incomingMessageToRequest.js +21 -19
  2. package/dist/adapters/node-http/incomingMessageToRequest.mjs +21 -19
  3. package/dist/bundle-analysis.json +122 -103
  4. package/dist/unstable-core-do-not-import/clientish/serialize.d.ts +1 -1
  5. package/dist/unstable-core-do-not-import/clientish/serialize.d.ts.map +1 -1
  6. package/dist/unstable-core-do-not-import/http/resolveResponse.d.ts.map +1 -1
  7. package/dist/unstable-core-do-not-import/http/resolveResponse.js +79 -52
  8. package/dist/unstable-core-do-not-import/http/resolveResponse.mjs +78 -51
  9. package/dist/unstable-core-do-not-import/initTRPC.d.ts.map +1 -1
  10. package/dist/unstable-core-do-not-import/initTRPC.js +2 -1
  11. package/dist/unstable-core-do-not-import/initTRPC.mjs +2 -1
  12. package/dist/unstable-core-do-not-import/rootConfig.d.ts +6 -0
  13. package/dist/unstable-core-do-not-import/rootConfig.d.ts.map +1 -1
  14. package/dist/unstable-core-do-not-import/stream/stream.d.ts +94 -0
  15. package/dist/unstable-core-do-not-import/stream/stream.d.ts.map +1 -0
  16. package/dist/unstable-core-do-not-import/stream/stream.js +466 -0
  17. package/dist/unstable-core-do-not-import/stream/stream.mjs +462 -0
  18. package/dist/unstable-core-do-not-import/utils.d.ts +2 -1
  19. package/dist/unstable-core-do-not-import/utils.d.ts.map +1 -1
  20. package/dist/unstable-core-do-not-import/utils.js +4 -0
  21. package/dist/unstable-core-do-not-import/utils.mjs +4 -1
  22. package/dist/unstable-core-do-not-import.d.ts +1 -0
  23. package/dist/unstable-core-do-not-import.d.ts.map +1 -1
  24. package/dist/unstable-core-do-not-import.js +5 -0
  25. package/dist/unstable-core-do-not-import.mjs +2 -1
  26. package/package.json +2 -2
  27. package/src/adapters/node-http/incomingMessageToRequest.ts +23 -23
  28. package/src/unstable-core-do-not-import/clientish/serialize.ts +1 -0
  29. package/src/unstable-core-do-not-import/http/resolveResponse.ts +83 -52
  30. package/src/unstable-core-do-not-import/initTRPC.ts +1 -0
  31. package/src/unstable-core-do-not-import/rootConfig.ts +7 -0
  32. package/src/unstable-core-do-not-import/stream/stream.ts +580 -0
  33. package/src/unstable-core-do-not-import/utils.ts +7 -1
  34. package/src/unstable-core-do-not-import.ts +1 -0
@@ -0,0 +1,580 @@
1
+ /* eslint-disable @typescript-eslint/no-non-null-assertion */
2
+ import { isAsyncIterable, isFunction, isObject } from '../utils';
3
+
4
+ // ---------- utils
5
+
6
+ function createReadableStream<TValue = unknown>() {
7
+ let controller: ReadableStreamDefaultController<TValue> =
8
+ null as unknown as ReadableStreamDefaultController<TValue>;
9
+ const stream = new ReadableStream<TValue>({
10
+ start(c) {
11
+ controller = c;
12
+ },
13
+ });
14
+
15
+ return [stream, controller] as const;
16
+ }
17
+
18
+ /**
19
+ * A subset of the standard ReadableStream properties needed by tRPC internally.
20
+ * @see ReadableStream from lib.dom.d.ts
21
+ */
22
+ export type WebReadableStreamEsque = {
23
+ getReader: () => ReadableStreamDefaultReader<Uint8Array>;
24
+ };
25
+
26
+ export type NodeJSReadableStreamEsque = {
27
+ on(
28
+ eventName: string | symbol,
29
+ listener: (...args: any[]) => void,
30
+ ): NodeJSReadableStreamEsque;
31
+ };
32
+
33
+ // ---------- types
34
+ const CHUNK_VALUE_TYPE_PROMISE = 0;
35
+ type CHUNK_VALUE_TYPE_PROMISE = typeof CHUNK_VALUE_TYPE_PROMISE;
36
+ const CHUNK_VALUE_TYPE_ASYNC_ITERABLE = 1;
37
+ type CHUNK_VALUE_TYPE_ASYNC_ITERABLE = typeof CHUNK_VALUE_TYPE_ASYNC_ITERABLE;
38
+
39
+ const PROMISE_STATUS_FULFILLED = 0;
40
+ type PROMISE_STATUS_FULFILLED = typeof PROMISE_STATUS_FULFILLED;
41
+ const PROMISE_STATUS_REJECTED = 1;
42
+ type PROMISE_STATUS_REJECTED = typeof PROMISE_STATUS_REJECTED;
43
+
44
+ const ASYNC_ITERABLE_STATUS_DONE = 0;
45
+ type ASYNC_ITERABLE_STATUS_DONE = typeof ASYNC_ITERABLE_STATUS_DONE;
46
+ const ASYNC_ITERABLE_STATUS_VALUE = 1;
47
+ type ASYNC_ITERABLE_STATUS_VALUE = typeof ASYNC_ITERABLE_STATUS_VALUE;
48
+ const ASYNC_ITERABLE_STATUS_ERROR = 2;
49
+ type ASYNC_ITERABLE_STATUS_ERROR = typeof ASYNC_ITERABLE_STATUS_ERROR;
50
+
51
+ type ChunkDefinitionKey =
52
+ // root should be replaced
53
+ | null
54
+ // at array path
55
+ | number
56
+ // at key path
57
+ | string;
58
+
59
+ type ChunkIndex = number & { __chunkIndex: true };
60
+ type ChunkValueType =
61
+ | CHUNK_VALUE_TYPE_PROMISE
62
+ | CHUNK_VALUE_TYPE_ASYNC_ITERABLE;
63
+ type ChunkDefinition = [
64
+ key: ChunkDefinitionKey,
65
+ type: ChunkValueType,
66
+ chunkId: ChunkIndex,
67
+ ];
68
+ type HydratedValue = [
69
+ // data
70
+ [unknown],
71
+ // chunk descriptions
72
+ ...ChunkDefinition[],
73
+ ];
74
+
75
+ type Head = Record<string, HydratedValue>;
76
+ type PromiseChunk =
77
+ | [
78
+ chunkIndex: ChunkIndex,
79
+ status: PROMISE_STATUS_FULFILLED,
80
+ value: HydratedValue,
81
+ ]
82
+ | [
83
+ chunkIndex: ChunkIndex,
84
+ status: PROMISE_STATUS_REJECTED,
85
+ // do we want to serialize errors?
86
+ // , error?: unknown
87
+ ];
88
+ type IterableChunk =
89
+ | [chunkIndex: ChunkIndex, status: ASYNC_ITERABLE_STATUS_DONE]
90
+ | [
91
+ chunkIndex: ChunkIndex,
92
+ status: ASYNC_ITERABLE_STATUS_VALUE,
93
+ value: HydratedValue,
94
+ ]
95
+ | [
96
+ chunkIndex: ChunkIndex,
97
+ status: ASYNC_ITERABLE_STATUS_ERROR,
98
+ // do we want to serialize errors?
99
+ // , error?: unknown
100
+ ];
101
+ type ChunkData = PromiseChunk | IterableChunk;
102
+ type PlaceholderValue = 0 & { __placeholder: true };
103
+ export function isPromise(value: unknown): value is Promise<unknown> {
104
+ return (
105
+ (isObject(value) || isFunction(value)) &&
106
+ typeof value?.['then'] === 'function' &&
107
+ typeof value?.['catch'] === 'function'
108
+ );
109
+ }
110
+
111
+ type Serialize = (value: any) => any;
112
+ type Deserialize = (value: any) => any;
113
+
114
+ export type ProducerOnError = (opts: {
115
+ error: unknown;
116
+ path: (string | number)[];
117
+ }) => void;
118
+ export interface ProducerOptions {
119
+ serialize?: Serialize;
120
+ data: Record<number, unknown>;
121
+ onError?: ProducerOnError;
122
+ maxDepth?: number;
123
+ }
124
+
125
+ class MaxDepthError extends Error {
126
+ constructor(public path: (string | number)[]) {
127
+ super('Max depth reached at path: ' + path.join('.'));
128
+ }
129
+ }
130
+
131
+ function createBatchStreamProducer(opts: ProducerOptions) {
132
+ const { data } = opts;
133
+ let counter = 0 as ChunkIndex;
134
+ const placeholder = 0 as PlaceholderValue;
135
+
136
+ const [stream, controller] = createReadableStream<ChunkData>();
137
+ const pending = new Set<ChunkIndex>();
138
+ function maybeClose() {
139
+ if (pending.size === 0) {
140
+ controller.close();
141
+ }
142
+ }
143
+ function hydratePromise(
144
+ promise: Promise<unknown>,
145
+ path: (string | number)[],
146
+ ) {
147
+ //
148
+ const error = checkMaxDepth(path);
149
+ if (error) {
150
+ promise.catch(() => {
151
+ // ignore
152
+ });
153
+ promise = Promise.reject(error);
154
+ }
155
+ const idx = counter++ as ChunkIndex;
156
+ pending.add(idx);
157
+ const enqueue = (value: PromiseChunk) => {
158
+ controller.enqueue(value);
159
+ };
160
+ promise
161
+ .then((it) => {
162
+ enqueue([idx, PROMISE_STATUS_FULFILLED, hydrate(it, path)]);
163
+ })
164
+ .catch((err) => {
165
+ opts.onError?.({ error: err, path });
166
+ enqueue([idx, PROMISE_STATUS_REJECTED]);
167
+ })
168
+ .finally(() => {
169
+ pending.delete(idx);
170
+ maybeClose();
171
+ });
172
+ return idx;
173
+ }
174
+ function hydrateAsyncIterable(
175
+ iterable: AsyncIterable<unknown>,
176
+ path: (string | number)[],
177
+ ) {
178
+ const error = checkMaxDepth(path);
179
+ if (error) {
180
+ iterable = {
181
+ [Symbol.asyncIterator]() {
182
+ throw error;
183
+ },
184
+ };
185
+ }
186
+ const idx = counter++ as ChunkIndex;
187
+ pending.add(idx);
188
+ void (async () => {
189
+ try {
190
+ for await (const item of iterable) {
191
+ controller.enqueue([
192
+ idx,
193
+ ASYNC_ITERABLE_STATUS_VALUE,
194
+ hydrate(item, path),
195
+ ]);
196
+ }
197
+ controller.enqueue([idx, ASYNC_ITERABLE_STATUS_DONE]);
198
+ } catch (error) {
199
+ opts.onError?.({ error, path });
200
+ controller.enqueue([idx, ASYNC_ITERABLE_STATUS_ERROR]);
201
+ } finally {
202
+ pending.delete(idx);
203
+ maybeClose();
204
+ }
205
+ })();
206
+ return idx;
207
+ }
208
+ function checkMaxDepth(path: (string | number)[]) {
209
+ if (opts.maxDepth && path.length > opts.maxDepth) {
210
+ return new MaxDepthError(path);
211
+ }
212
+ return null;
213
+ }
214
+ function hydrateChunk(
215
+ value: unknown,
216
+ path: (string | number)[],
217
+ ): null | [type: ChunkValueType, chunkId: ChunkIndex] {
218
+ if (isPromise(value)) {
219
+ return [CHUNK_VALUE_TYPE_PROMISE, hydratePromise(value, path)];
220
+ }
221
+ if (isAsyncIterable(value)) {
222
+ if (opts.maxDepth && path.length >= opts.maxDepth) {
223
+ throw new Error('Max depth reached');
224
+ }
225
+ return [
226
+ CHUNK_VALUE_TYPE_ASYNC_ITERABLE,
227
+ hydrateAsyncIterable(value, path),
228
+ ];
229
+ }
230
+ return null;
231
+ }
232
+ function hydrate(value: unknown, path: (string | number)[]): HydratedValue {
233
+ const reg = hydrateChunk(value, path);
234
+ if (reg) {
235
+ return [[placeholder], [null, ...reg]];
236
+ }
237
+ if (!isObject(value)) {
238
+ return [[value]];
239
+ }
240
+ const newObj = {} as Record<string, unknown>;
241
+ const asyncValues: ChunkDefinition[] = [];
242
+ for (const [key, item] of Object.entries(value)) {
243
+ const transformed = hydrateChunk(item, [...path, key]);
244
+ if (!transformed) {
245
+ newObj[key] = item;
246
+ continue;
247
+ }
248
+ newObj[key] = placeholder;
249
+ asyncValues.push([key, ...transformed]);
250
+ }
251
+ return [[newObj], ...asyncValues];
252
+ }
253
+
254
+ const newHead: Head = {};
255
+ for (const [key, item] of Object.entries(data)) {
256
+ newHead[key] = hydrate(item, [key]);
257
+ }
258
+
259
+ return [newHead, stream] as const;
260
+ }
261
+ /**
262
+ * JSON Lines stream producer
263
+ * @see https://jsonlines.org/
264
+ */
265
+ export function jsonlStreamProducer(opts: ProducerOptions) {
266
+ let [head, stream] = createBatchStreamProducer(opts);
267
+
268
+ const { serialize } = opts;
269
+ if (serialize) {
270
+ head = serialize(head);
271
+ stream = stream.pipeThrough(
272
+ new TransformStream({
273
+ transform(chunk, controller) {
274
+ controller.enqueue(serialize(chunk));
275
+ },
276
+ }),
277
+ );
278
+ }
279
+
280
+ return stream
281
+ .pipeThrough(
282
+ new TransformStream({
283
+ start(controller) {
284
+ controller.enqueue(JSON.stringify(head) + '\n');
285
+ },
286
+ transform(chunk, controller) {
287
+ controller.enqueue(JSON.stringify(chunk) + '\n');
288
+ },
289
+ }),
290
+ )
291
+ .pipeThrough(new TextEncoderStream());
292
+ }
293
+ class StreamInterruptedError extends Error {
294
+ constructor(cause?: unknown) {
295
+ // eslint-disable-next-line @typescript-eslint/ban-ts-comment
296
+ // @ts-ignore https://github.com/tc39/proposal-error-cause
297
+ super('Invalid response or stream interrupted', { cause });
298
+ }
299
+ }
300
+ class AsyncError extends Error {
301
+ constructor(public readonly data: unknown) {
302
+ super('Received error from server');
303
+ }
304
+ }
305
+ export type ConsumerOnError = (opts: { error: unknown }) => void;
306
+
307
+ const nodeJsStreamToReaderEsque = (source: NodeJSReadableStreamEsque) => {
308
+ return {
309
+ getReader() {
310
+ const [stream, controller] = createReadableStream<Uint8Array>();
311
+ source.on('data', (chunk) => {
312
+ controller.enqueue(chunk);
313
+ });
314
+ source.on('end', () => {
315
+ controller.close();
316
+ });
317
+ source.on('error', (error) => {
318
+ controller.error(error);
319
+ });
320
+ return stream.getReader();
321
+ },
322
+ };
323
+ };
324
+
325
+ function createLineAccumulator(
326
+ from: NodeJSReadableStreamEsque | WebReadableStreamEsque,
327
+ ) {
328
+ const reader =
329
+ 'getReader' in from
330
+ ? from.getReader()
331
+ : nodeJsStreamToReaderEsque(from).getReader();
332
+
333
+ let lineAggregate = '';
334
+
335
+ return new ReadableStream({
336
+ async pull(controller) {
337
+ const { done, value } = await reader.read();
338
+
339
+ if (done) {
340
+ controller.close();
341
+ } else {
342
+ controller.enqueue(value);
343
+ }
344
+ },
345
+ cancel() {
346
+ return reader.cancel();
347
+ },
348
+ })
349
+ .pipeThrough(new TextDecoderStream())
350
+ .pipeThrough(
351
+ new TransformStream<string, string>({
352
+ transform(chunk, controller) {
353
+ lineAggregate += chunk;
354
+ const parts = lineAggregate.split('\n');
355
+ lineAggregate = parts.pop() ?? '';
356
+ for (const part of parts) {
357
+ controller.enqueue(part);
358
+ }
359
+ },
360
+ }),
361
+ );
362
+ }
363
+ function createConsumerStream<THead>(
364
+ from: NodeJSReadableStreamEsque | WebReadableStreamEsque,
365
+ ) {
366
+ const stream = createLineAccumulator(from);
367
+
368
+ let sentHead = false;
369
+ return stream.pipeThrough(
370
+ new TransformStream<string, ChunkData | THead>({
371
+ transform(line, controller) {
372
+ if (!sentHead) {
373
+ const head = JSON.parse(line);
374
+ controller.enqueue(head as THead);
375
+ sentHead = true;
376
+ } else {
377
+ const chunk: ChunkData = JSON.parse(line);
378
+ controller.enqueue(chunk);
379
+ }
380
+ },
381
+ }),
382
+ );
383
+ }
384
+
385
+ function createDeferred<TValue>() {
386
+ let resolve: (value: TValue) => void;
387
+ let reject: (error: unknown) => void;
388
+ const promise = new Promise<TValue>((res, rej) => {
389
+ resolve = res;
390
+ reject = rej;
391
+ });
392
+
393
+ return { promise, resolve: resolve!, reject: reject! };
394
+ }
395
+
396
+ type Deferred<TValue> = ReturnType<typeof createDeferred<TValue>>;
397
+
398
+ /**
399
+ * JSON Lines stream consumer
400
+ * @see https://jsonlines.org/
401
+ */
402
+ export async function jsonlStreamConsumer<THead>(opts: {
403
+ from: NodeJSReadableStreamEsque | WebReadableStreamEsque;
404
+ deserialize?: Deserialize;
405
+ onError?: ConsumerOnError;
406
+ }) {
407
+ const { deserialize = (v) => v } = opts;
408
+
409
+ let source = createConsumerStream<Head>(opts.from);
410
+ if (deserialize) {
411
+ source = source.pipeThrough(
412
+ new TransformStream({
413
+ transform(chunk, controller) {
414
+ controller.enqueue(deserialize(chunk));
415
+ },
416
+ }),
417
+ );
418
+ }
419
+ let headDeferred: null | Deferred<THead> = createDeferred();
420
+
421
+ type ControllerChunk = ChunkData | StreamInterruptedError;
422
+ type ChunkController = ReadableStreamDefaultController<ControllerChunk>;
423
+ const chunkDeferred = new Map<ChunkIndex, Deferred<ChunkController>>();
424
+ const controllers = new Map<ChunkIndex, ChunkController>();
425
+
426
+ function dehydrateChunkDefinition(value: ChunkDefinition) {
427
+ const [_path, type, chunkId] = value;
428
+
429
+ const [stream, controller] = createReadableStream<ChunkData>();
430
+ controllers.set(chunkId, controller);
431
+
432
+ // resolve chunk deferred if it exists
433
+ const deferred = chunkDeferred.get(chunkId);
434
+ if (deferred) {
435
+ deferred.resolve(controller);
436
+ chunkDeferred.delete(chunkId);
437
+ }
438
+
439
+ switch (type) {
440
+ case CHUNK_VALUE_TYPE_PROMISE: {
441
+ return new Promise((resolve, reject) => {
442
+ // listen for next value in the stream
443
+ const reader = stream.getReader();
444
+ reader
445
+ .read()
446
+ .then((it) => {
447
+ if (it.done) {
448
+ reject(new Error('Promise chunk ended without value'));
449
+ return;
450
+ }
451
+ if (it.value instanceof StreamInterruptedError) {
452
+ reject(it.value);
453
+ return;
454
+ }
455
+ const value = it.value;
456
+ const [_chunkId, status, data] = value as PromiseChunk;
457
+ switch (status) {
458
+ case PROMISE_STATUS_FULFILLED:
459
+ resolve(dehydrate(data));
460
+ break;
461
+ case PROMISE_STATUS_REJECTED:
462
+ reject(new AsyncError(data));
463
+ break;
464
+ }
465
+ })
466
+ .catch(reject)
467
+ .finally(() => {
468
+ // reader.releaseLock();
469
+ controllers.delete(chunkId);
470
+ });
471
+ });
472
+ }
473
+ case CHUNK_VALUE_TYPE_ASYNC_ITERABLE: {
474
+ return {
475
+ [Symbol.asyncIterator]: async function* () {
476
+ const reader = stream.getReader();
477
+ while (true) {
478
+ const { done, value } = await reader.read();
479
+ if (done) {
480
+ break;
481
+ }
482
+ if (value instanceof StreamInterruptedError) {
483
+ throw value;
484
+ }
485
+
486
+ const [_chunkId, status, data] = value as IterableChunk;
487
+
488
+ switch (status) {
489
+ case ASYNC_ITERABLE_STATUS_VALUE:
490
+ yield dehydrate(data);
491
+ break;
492
+ case ASYNC_ITERABLE_STATUS_DONE:
493
+ controllers.delete(chunkId);
494
+ return;
495
+ case ASYNC_ITERABLE_STATUS_ERROR:
496
+ controllers.delete(chunkId);
497
+ throw new AsyncError(data);
498
+ }
499
+ }
500
+ },
501
+ };
502
+ }
503
+ }
504
+ }
505
+
506
+ function dehydrate(value: HydratedValue): unknown {
507
+ const [[data], ...asyncProps] = value;
508
+
509
+ for (const value of asyncProps) {
510
+ const dehydrated = dehydrateChunkDefinition(value);
511
+
512
+ const [path] = value;
513
+ if (path === null) {
514
+ return dehydrated;
515
+ }
516
+
517
+ (data as any)[path] = dehydrated;
518
+ }
519
+ return data;
520
+ }
521
+
522
+ const closeOrAbort = (reason?: unknown) => {
523
+ const error = new StreamInterruptedError(reason);
524
+
525
+ headDeferred?.reject(error);
526
+ for (const deferred of chunkDeferred.values()) {
527
+ deferred.reject(error);
528
+ }
529
+ chunkDeferred.clear();
530
+ for (const controller of controllers.values()) {
531
+ controller.enqueue(error);
532
+ controller.close();
533
+ }
534
+ controllers.clear();
535
+ };
536
+ source
537
+ .pipeTo(
538
+ new WritableStream({
539
+ async write(chunkOrHead) {
540
+ if (headDeferred) {
541
+ const head = chunkOrHead as Record<number | string, unknown>;
542
+
543
+ for (const [key, value] of Object.entries(chunkOrHead)) {
544
+ const parsed = dehydrate(value);
545
+ head[key] = parsed;
546
+ }
547
+ headDeferred.resolve(head as THead);
548
+ headDeferred = null;
549
+ return;
550
+ }
551
+ const chunk = chunkOrHead as ChunkData;
552
+ const [idx] = chunk;
553
+ let controller = controllers.get(idx);
554
+ if (!controller) {
555
+ let deferred = chunkDeferred.get(idx);
556
+ if (!deferred) {
557
+ deferred = createDeferred<ChunkController>();
558
+ chunkDeferred.set(idx, deferred);
559
+ }
560
+
561
+ controller = await deferred.promise;
562
+ }
563
+ controller.enqueue(chunk);
564
+ },
565
+ close: closeOrAbort,
566
+ abort: closeOrAbort,
567
+ }),
568
+ )
569
+ .catch((error) => {
570
+ opts.onError?.({ error });
571
+ closeOrAbort(error);
572
+ });
573
+
574
+ return [
575
+ await headDeferred.promise,
576
+ {
577
+ controllers,
578
+ },
579
+ ] as const;
580
+ }
@@ -31,7 +31,7 @@ export function isObject(value: unknown): value is Record<string, unknown> {
31
31
  return !!value && !Array.isArray(value) && typeof value === 'object';
32
32
  }
33
33
 
34
- type AnyFn = (...args: any[]) => unknown;
34
+ type AnyFn = ((...args: any[]) => unknown) & Record<keyof any, unknown>;
35
35
  export function isFunction(fn: unknown): fn is AnyFn {
36
36
  return typeof fn === 'function';
37
37
  }
@@ -45,3 +45,9 @@ export function omitPrototype<TObj extends Record<string, unknown>>(
45
45
  ): TObj {
46
46
  return Object.assign(Object.create(null), obj);
47
47
  }
48
+
49
+ export function isAsyncIterable<TValue>(
50
+ value: unknown,
51
+ ): value is AsyncIterable<TValue> {
52
+ return isObject(value) && Symbol.asyncIterator in value;
53
+ }
@@ -33,3 +33,4 @@ export * from './unstable-core-do-not-import/rpc';
33
33
  export * from './unstable-core-do-not-import/transformer';
34
34
  export * from './unstable-core-do-not-import/types';
35
35
  export * from './unstable-core-do-not-import/utils';
36
+ export * from './unstable-core-do-not-import/stream/stream';