@trpc/server 11.0.0-rc.370 → 11.0.0-rc.374
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adapters/node-http/incomingMessageToRequest.js +21 -19
- package/dist/adapters/node-http/incomingMessageToRequest.mjs +21 -19
- package/dist/bundle-analysis.json +122 -103
- package/dist/unstable-core-do-not-import/clientish/serialize.d.ts +1 -1
- package/dist/unstable-core-do-not-import/clientish/serialize.d.ts.map +1 -1
- package/dist/unstable-core-do-not-import/http/resolveResponse.d.ts.map +1 -1
- package/dist/unstable-core-do-not-import/http/resolveResponse.js +79 -52
- package/dist/unstable-core-do-not-import/http/resolveResponse.mjs +78 -51
- package/dist/unstable-core-do-not-import/initTRPC.d.ts.map +1 -1
- package/dist/unstable-core-do-not-import/initTRPC.js +2 -1
- package/dist/unstable-core-do-not-import/initTRPC.mjs +2 -1
- package/dist/unstable-core-do-not-import/rootConfig.d.ts +6 -0
- package/dist/unstable-core-do-not-import/rootConfig.d.ts.map +1 -1
- package/dist/unstable-core-do-not-import/stream/stream.d.ts +94 -0
- package/dist/unstable-core-do-not-import/stream/stream.d.ts.map +1 -0
- package/dist/unstable-core-do-not-import/stream/stream.js +466 -0
- package/dist/unstable-core-do-not-import/stream/stream.mjs +462 -0
- package/dist/unstable-core-do-not-import/utils.d.ts +2 -1
- package/dist/unstable-core-do-not-import/utils.d.ts.map +1 -1
- package/dist/unstable-core-do-not-import/utils.js +4 -0
- package/dist/unstable-core-do-not-import/utils.mjs +4 -1
- package/dist/unstable-core-do-not-import.d.ts +1 -0
- package/dist/unstable-core-do-not-import.d.ts.map +1 -1
- package/dist/unstable-core-do-not-import.js +5 -0
- package/dist/unstable-core-do-not-import.mjs +2 -1
- package/package.json +2 -2
- package/src/adapters/node-http/incomingMessageToRequest.ts +23 -23
- package/src/unstable-core-do-not-import/clientish/serialize.ts +1 -0
- package/src/unstable-core-do-not-import/http/resolveResponse.ts +83 -52
- package/src/unstable-core-do-not-import/initTRPC.ts +1 -0
- package/src/unstable-core-do-not-import/rootConfig.ts +7 -0
- package/src/unstable-core-do-not-import/stream/stream.ts +580 -0
- package/src/unstable-core-do-not-import/utils.ts +7 -1
- package/src/unstable-core-do-not-import.ts +1 -0
|
@@ -0,0 +1,466 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var utils = require('../utils.js');
|
|
4
|
+
|
|
5
|
+
// ---------- utils
|
|
6
|
+
function createReadableStream() {
|
|
7
|
+
let controller = null;
|
|
8
|
+
const stream = new ReadableStream({
|
|
9
|
+
start (c) {
|
|
10
|
+
controller = c;
|
|
11
|
+
}
|
|
12
|
+
});
|
|
13
|
+
return [
|
|
14
|
+
stream,
|
|
15
|
+
controller
|
|
16
|
+
];
|
|
17
|
+
}
|
|
18
|
+
// ---------- types
|
|
19
|
+
const CHUNK_VALUE_TYPE_PROMISE = 0;
|
|
20
|
+
const CHUNK_VALUE_TYPE_ASYNC_ITERABLE = 1;
|
|
21
|
+
const PROMISE_STATUS_FULFILLED = 0;
|
|
22
|
+
const PROMISE_STATUS_REJECTED = 1;
|
|
23
|
+
const ASYNC_ITERABLE_STATUS_DONE = 0;
|
|
24
|
+
const ASYNC_ITERABLE_STATUS_VALUE = 1;
|
|
25
|
+
const ASYNC_ITERABLE_STATUS_ERROR = 2;
|
|
26
|
+
function isPromise(value) {
|
|
27
|
+
return (utils.isObject(value) || utils.isFunction(value)) && typeof value?.['then'] === 'function' && typeof value?.['catch'] === 'function';
|
|
28
|
+
}
|
|
29
|
+
class MaxDepthError extends Error {
|
|
30
|
+
constructor(path){
|
|
31
|
+
super('Max depth reached at path: ' + path.join('.'));
|
|
32
|
+
this.path = path;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
function createBatchStreamProducer(opts) {
|
|
36
|
+
const { data } = opts;
|
|
37
|
+
let counter = 0;
|
|
38
|
+
const placeholder = 0;
|
|
39
|
+
const [stream, controller] = createReadableStream();
|
|
40
|
+
const pending = new Set();
|
|
41
|
+
function maybeClose() {
|
|
42
|
+
if (pending.size === 0) {
|
|
43
|
+
controller.close();
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
function hydratePromise(promise, path) {
|
|
47
|
+
//
|
|
48
|
+
const error = checkMaxDepth(path);
|
|
49
|
+
if (error) {
|
|
50
|
+
promise.catch(()=>{
|
|
51
|
+
// ignore
|
|
52
|
+
});
|
|
53
|
+
promise = Promise.reject(error);
|
|
54
|
+
}
|
|
55
|
+
const idx = counter++;
|
|
56
|
+
pending.add(idx);
|
|
57
|
+
const enqueue = (value)=>{
|
|
58
|
+
controller.enqueue(value);
|
|
59
|
+
};
|
|
60
|
+
promise.then((it)=>{
|
|
61
|
+
enqueue([
|
|
62
|
+
idx,
|
|
63
|
+
PROMISE_STATUS_FULFILLED,
|
|
64
|
+
hydrate(it, path)
|
|
65
|
+
]);
|
|
66
|
+
}).catch((err)=>{
|
|
67
|
+
opts.onError?.({
|
|
68
|
+
error: err,
|
|
69
|
+
path
|
|
70
|
+
});
|
|
71
|
+
enqueue([
|
|
72
|
+
idx,
|
|
73
|
+
PROMISE_STATUS_REJECTED
|
|
74
|
+
]);
|
|
75
|
+
}).finally(()=>{
|
|
76
|
+
pending.delete(idx);
|
|
77
|
+
maybeClose();
|
|
78
|
+
});
|
|
79
|
+
return idx;
|
|
80
|
+
}
|
|
81
|
+
function hydrateAsyncIterable(iterable, path) {
|
|
82
|
+
const error = checkMaxDepth(path);
|
|
83
|
+
if (error) {
|
|
84
|
+
iterable = {
|
|
85
|
+
[Symbol.asyncIterator] () {
|
|
86
|
+
throw error;
|
|
87
|
+
}
|
|
88
|
+
};
|
|
89
|
+
}
|
|
90
|
+
const idx = counter++;
|
|
91
|
+
pending.add(idx);
|
|
92
|
+
void (async ()=>{
|
|
93
|
+
try {
|
|
94
|
+
for await (const item of iterable){
|
|
95
|
+
controller.enqueue([
|
|
96
|
+
idx,
|
|
97
|
+
ASYNC_ITERABLE_STATUS_VALUE,
|
|
98
|
+
hydrate(item, path)
|
|
99
|
+
]);
|
|
100
|
+
}
|
|
101
|
+
controller.enqueue([
|
|
102
|
+
idx,
|
|
103
|
+
ASYNC_ITERABLE_STATUS_DONE
|
|
104
|
+
]);
|
|
105
|
+
} catch (error) {
|
|
106
|
+
opts.onError?.({
|
|
107
|
+
error,
|
|
108
|
+
path
|
|
109
|
+
});
|
|
110
|
+
controller.enqueue([
|
|
111
|
+
idx,
|
|
112
|
+
ASYNC_ITERABLE_STATUS_ERROR
|
|
113
|
+
]);
|
|
114
|
+
} finally{
|
|
115
|
+
pending.delete(idx);
|
|
116
|
+
maybeClose();
|
|
117
|
+
}
|
|
118
|
+
})();
|
|
119
|
+
return idx;
|
|
120
|
+
}
|
|
121
|
+
function checkMaxDepth(path) {
|
|
122
|
+
if (opts.maxDepth && path.length > opts.maxDepth) {
|
|
123
|
+
return new MaxDepthError(path);
|
|
124
|
+
}
|
|
125
|
+
return null;
|
|
126
|
+
}
|
|
127
|
+
function hydrateChunk(value, path) {
|
|
128
|
+
if (isPromise(value)) {
|
|
129
|
+
return [
|
|
130
|
+
CHUNK_VALUE_TYPE_PROMISE,
|
|
131
|
+
hydratePromise(value, path)
|
|
132
|
+
];
|
|
133
|
+
}
|
|
134
|
+
if (utils.isAsyncIterable(value)) {
|
|
135
|
+
if (opts.maxDepth && path.length >= opts.maxDepth) {
|
|
136
|
+
throw new Error('Max depth reached');
|
|
137
|
+
}
|
|
138
|
+
return [
|
|
139
|
+
CHUNK_VALUE_TYPE_ASYNC_ITERABLE,
|
|
140
|
+
hydrateAsyncIterable(value, path)
|
|
141
|
+
];
|
|
142
|
+
}
|
|
143
|
+
return null;
|
|
144
|
+
}
|
|
145
|
+
function hydrate(value, path) {
|
|
146
|
+
const reg = hydrateChunk(value, path);
|
|
147
|
+
if (reg) {
|
|
148
|
+
return [
|
|
149
|
+
[
|
|
150
|
+
placeholder
|
|
151
|
+
],
|
|
152
|
+
[
|
|
153
|
+
null,
|
|
154
|
+
...reg
|
|
155
|
+
]
|
|
156
|
+
];
|
|
157
|
+
}
|
|
158
|
+
if (!utils.isObject(value)) {
|
|
159
|
+
return [
|
|
160
|
+
[
|
|
161
|
+
value
|
|
162
|
+
]
|
|
163
|
+
];
|
|
164
|
+
}
|
|
165
|
+
const newObj = {};
|
|
166
|
+
const asyncValues = [];
|
|
167
|
+
for (const [key, item] of Object.entries(value)){
|
|
168
|
+
const transformed = hydrateChunk(item, [
|
|
169
|
+
...path,
|
|
170
|
+
key
|
|
171
|
+
]);
|
|
172
|
+
if (!transformed) {
|
|
173
|
+
newObj[key] = item;
|
|
174
|
+
continue;
|
|
175
|
+
}
|
|
176
|
+
newObj[key] = placeholder;
|
|
177
|
+
asyncValues.push([
|
|
178
|
+
key,
|
|
179
|
+
...transformed
|
|
180
|
+
]);
|
|
181
|
+
}
|
|
182
|
+
return [
|
|
183
|
+
[
|
|
184
|
+
newObj
|
|
185
|
+
],
|
|
186
|
+
...asyncValues
|
|
187
|
+
];
|
|
188
|
+
}
|
|
189
|
+
const newHead = {};
|
|
190
|
+
for (const [key, item] of Object.entries(data)){
|
|
191
|
+
newHead[key] = hydrate(item, [
|
|
192
|
+
key
|
|
193
|
+
]);
|
|
194
|
+
}
|
|
195
|
+
return [
|
|
196
|
+
newHead,
|
|
197
|
+
stream
|
|
198
|
+
];
|
|
199
|
+
}
|
|
200
|
+
/**
|
|
201
|
+
* JSON Lines stream producer
|
|
202
|
+
* @see https://jsonlines.org/
|
|
203
|
+
*/ function jsonlStreamProducer(opts) {
|
|
204
|
+
let [head, stream] = createBatchStreamProducer(opts);
|
|
205
|
+
const { serialize } = opts;
|
|
206
|
+
if (serialize) {
|
|
207
|
+
head = serialize(head);
|
|
208
|
+
stream = stream.pipeThrough(new TransformStream({
|
|
209
|
+
transform (chunk, controller) {
|
|
210
|
+
controller.enqueue(serialize(chunk));
|
|
211
|
+
}
|
|
212
|
+
}));
|
|
213
|
+
}
|
|
214
|
+
return stream.pipeThrough(new TransformStream({
|
|
215
|
+
start (controller) {
|
|
216
|
+
controller.enqueue(JSON.stringify(head) + '\n');
|
|
217
|
+
},
|
|
218
|
+
transform (chunk, controller) {
|
|
219
|
+
controller.enqueue(JSON.stringify(chunk) + '\n');
|
|
220
|
+
}
|
|
221
|
+
})).pipeThrough(new TextEncoderStream());
|
|
222
|
+
}
|
|
223
|
+
class StreamInterruptedError extends Error {
|
|
224
|
+
constructor(cause){
|
|
225
|
+
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
226
|
+
// @ts-ignore https://github.com/tc39/proposal-error-cause
|
|
227
|
+
super('Invalid response or stream interrupted', {
|
|
228
|
+
cause
|
|
229
|
+
});
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
class AsyncError extends Error {
|
|
233
|
+
constructor(data){
|
|
234
|
+
super('Received error from server');
|
|
235
|
+
this.data = data;
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
const nodeJsStreamToReaderEsque = (source)=>{
|
|
239
|
+
return {
|
|
240
|
+
getReader () {
|
|
241
|
+
const [stream, controller] = createReadableStream();
|
|
242
|
+
source.on('data', (chunk)=>{
|
|
243
|
+
controller.enqueue(chunk);
|
|
244
|
+
});
|
|
245
|
+
source.on('end', ()=>{
|
|
246
|
+
controller.close();
|
|
247
|
+
});
|
|
248
|
+
source.on('error', (error)=>{
|
|
249
|
+
controller.error(error);
|
|
250
|
+
});
|
|
251
|
+
return stream.getReader();
|
|
252
|
+
}
|
|
253
|
+
};
|
|
254
|
+
};
|
|
255
|
+
function createLineAccumulator(from) {
|
|
256
|
+
const reader = 'getReader' in from ? from.getReader() : nodeJsStreamToReaderEsque(from).getReader();
|
|
257
|
+
let lineAggregate = '';
|
|
258
|
+
return new ReadableStream({
|
|
259
|
+
async pull (controller) {
|
|
260
|
+
const { done , value } = await reader.read();
|
|
261
|
+
if (done) {
|
|
262
|
+
controller.close();
|
|
263
|
+
} else {
|
|
264
|
+
controller.enqueue(value);
|
|
265
|
+
}
|
|
266
|
+
},
|
|
267
|
+
cancel () {
|
|
268
|
+
return reader.cancel();
|
|
269
|
+
}
|
|
270
|
+
}).pipeThrough(new TextDecoderStream()).pipeThrough(new TransformStream({
|
|
271
|
+
transform (chunk, controller) {
|
|
272
|
+
lineAggregate += chunk;
|
|
273
|
+
const parts = lineAggregate.split('\n');
|
|
274
|
+
lineAggregate = parts.pop() ?? '';
|
|
275
|
+
for (const part of parts){
|
|
276
|
+
controller.enqueue(part);
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
}));
|
|
280
|
+
}
|
|
281
|
+
function createConsumerStream(from) {
|
|
282
|
+
const stream = createLineAccumulator(from);
|
|
283
|
+
let sentHead = false;
|
|
284
|
+
return stream.pipeThrough(new TransformStream({
|
|
285
|
+
transform (line, controller) {
|
|
286
|
+
if (!sentHead) {
|
|
287
|
+
const head = JSON.parse(line);
|
|
288
|
+
controller.enqueue(head);
|
|
289
|
+
sentHead = true;
|
|
290
|
+
} else {
|
|
291
|
+
const chunk = JSON.parse(line);
|
|
292
|
+
controller.enqueue(chunk);
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
}));
|
|
296
|
+
}
|
|
297
|
+
function createDeferred() {
|
|
298
|
+
let resolve;
|
|
299
|
+
let reject;
|
|
300
|
+
const promise = new Promise((res, rej)=>{
|
|
301
|
+
resolve = res;
|
|
302
|
+
reject = rej;
|
|
303
|
+
});
|
|
304
|
+
return {
|
|
305
|
+
promise,
|
|
306
|
+
resolve: resolve,
|
|
307
|
+
reject: reject
|
|
308
|
+
};
|
|
309
|
+
}
|
|
310
|
+
/**
|
|
311
|
+
* JSON Lines stream consumer
|
|
312
|
+
* @see https://jsonlines.org/
|
|
313
|
+
*/ async function jsonlStreamConsumer(opts) {
|
|
314
|
+
const { deserialize =(v)=>v } = opts;
|
|
315
|
+
let source = createConsumerStream(opts.from);
|
|
316
|
+
if (deserialize) {
|
|
317
|
+
source = source.pipeThrough(new TransformStream({
|
|
318
|
+
transform (chunk, controller) {
|
|
319
|
+
controller.enqueue(deserialize(chunk));
|
|
320
|
+
}
|
|
321
|
+
}));
|
|
322
|
+
}
|
|
323
|
+
let headDeferred = createDeferred();
|
|
324
|
+
const chunkDeferred = new Map();
|
|
325
|
+
const controllers = new Map();
|
|
326
|
+
function dehydrateChunkDefinition(value) {
|
|
327
|
+
const [_path, type, chunkId] = value;
|
|
328
|
+
const [stream, controller] = createReadableStream();
|
|
329
|
+
controllers.set(chunkId, controller);
|
|
330
|
+
// resolve chunk deferred if it exists
|
|
331
|
+
const deferred = chunkDeferred.get(chunkId);
|
|
332
|
+
if (deferred) {
|
|
333
|
+
deferred.resolve(controller);
|
|
334
|
+
chunkDeferred.delete(chunkId);
|
|
335
|
+
}
|
|
336
|
+
switch(type){
|
|
337
|
+
case CHUNK_VALUE_TYPE_PROMISE:
|
|
338
|
+
{
|
|
339
|
+
return new Promise((resolve, reject)=>{
|
|
340
|
+
// listen for next value in the stream
|
|
341
|
+
const reader = stream.getReader();
|
|
342
|
+
reader.read().then((it)=>{
|
|
343
|
+
if (it.done) {
|
|
344
|
+
reject(new Error('Promise chunk ended without value'));
|
|
345
|
+
return;
|
|
346
|
+
}
|
|
347
|
+
if (it.value instanceof StreamInterruptedError) {
|
|
348
|
+
reject(it.value);
|
|
349
|
+
return;
|
|
350
|
+
}
|
|
351
|
+
const value = it.value;
|
|
352
|
+
const [_chunkId, status, data] = value;
|
|
353
|
+
switch(status){
|
|
354
|
+
case PROMISE_STATUS_FULFILLED:
|
|
355
|
+
resolve(dehydrate(data));
|
|
356
|
+
break;
|
|
357
|
+
case PROMISE_STATUS_REJECTED:
|
|
358
|
+
reject(new AsyncError(data));
|
|
359
|
+
break;
|
|
360
|
+
}
|
|
361
|
+
}).catch(reject).finally(()=>{
|
|
362
|
+
// reader.releaseLock();
|
|
363
|
+
controllers.delete(chunkId);
|
|
364
|
+
});
|
|
365
|
+
});
|
|
366
|
+
}
|
|
367
|
+
case CHUNK_VALUE_TYPE_ASYNC_ITERABLE:
|
|
368
|
+
{
|
|
369
|
+
return {
|
|
370
|
+
[Symbol.asyncIterator]: async function*() {
|
|
371
|
+
const reader = stream.getReader();
|
|
372
|
+
while(true){
|
|
373
|
+
const { done , value } = await reader.read();
|
|
374
|
+
if (done) {
|
|
375
|
+
break;
|
|
376
|
+
}
|
|
377
|
+
if (value instanceof StreamInterruptedError) {
|
|
378
|
+
throw value;
|
|
379
|
+
}
|
|
380
|
+
const [_chunkId, status, data] = value;
|
|
381
|
+
switch(status){
|
|
382
|
+
case ASYNC_ITERABLE_STATUS_VALUE:
|
|
383
|
+
yield dehydrate(data);
|
|
384
|
+
break;
|
|
385
|
+
case ASYNC_ITERABLE_STATUS_DONE:
|
|
386
|
+
controllers.delete(chunkId);
|
|
387
|
+
return;
|
|
388
|
+
case ASYNC_ITERABLE_STATUS_ERROR:
|
|
389
|
+
controllers.delete(chunkId);
|
|
390
|
+
throw new AsyncError(data);
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
}
|
|
394
|
+
};
|
|
395
|
+
}
|
|
396
|
+
}
|
|
397
|
+
}
|
|
398
|
+
function dehydrate(value) {
|
|
399
|
+
const [[data], ...asyncProps] = value;
|
|
400
|
+
for (const value1 of asyncProps){
|
|
401
|
+
const dehydrated = dehydrateChunkDefinition(value1);
|
|
402
|
+
const [path] = value1;
|
|
403
|
+
if (path === null) {
|
|
404
|
+
return dehydrated;
|
|
405
|
+
}
|
|
406
|
+
data[path] = dehydrated;
|
|
407
|
+
}
|
|
408
|
+
return data;
|
|
409
|
+
}
|
|
410
|
+
const closeOrAbort = (reason)=>{
|
|
411
|
+
const error = new StreamInterruptedError(reason);
|
|
412
|
+
headDeferred?.reject(error);
|
|
413
|
+
for (const deferred of chunkDeferred.values()){
|
|
414
|
+
deferred.reject(error);
|
|
415
|
+
}
|
|
416
|
+
chunkDeferred.clear();
|
|
417
|
+
for (const controller of controllers.values()){
|
|
418
|
+
controller.enqueue(error);
|
|
419
|
+
controller.close();
|
|
420
|
+
}
|
|
421
|
+
controllers.clear();
|
|
422
|
+
};
|
|
423
|
+
source.pipeTo(new WritableStream({
|
|
424
|
+
async write (chunkOrHead) {
|
|
425
|
+
if (headDeferred) {
|
|
426
|
+
const head = chunkOrHead;
|
|
427
|
+
for (const [key, value] of Object.entries(chunkOrHead)){
|
|
428
|
+
const parsed = dehydrate(value);
|
|
429
|
+
head[key] = parsed;
|
|
430
|
+
}
|
|
431
|
+
headDeferred.resolve(head);
|
|
432
|
+
headDeferred = null;
|
|
433
|
+
return;
|
|
434
|
+
}
|
|
435
|
+
const chunk = chunkOrHead;
|
|
436
|
+
const [idx] = chunk;
|
|
437
|
+
let controller = controllers.get(idx);
|
|
438
|
+
if (!controller) {
|
|
439
|
+
let deferred = chunkDeferred.get(idx);
|
|
440
|
+
if (!deferred) {
|
|
441
|
+
deferred = createDeferred();
|
|
442
|
+
chunkDeferred.set(idx, deferred);
|
|
443
|
+
}
|
|
444
|
+
controller = await deferred.promise;
|
|
445
|
+
}
|
|
446
|
+
controller.enqueue(chunk);
|
|
447
|
+
},
|
|
448
|
+
close: closeOrAbort,
|
|
449
|
+
abort: closeOrAbort
|
|
450
|
+
})).catch((error)=>{
|
|
451
|
+
opts.onError?.({
|
|
452
|
+
error
|
|
453
|
+
});
|
|
454
|
+
closeOrAbort(error);
|
|
455
|
+
});
|
|
456
|
+
return [
|
|
457
|
+
await headDeferred.promise,
|
|
458
|
+
{
|
|
459
|
+
controllers
|
|
460
|
+
}
|
|
461
|
+
];
|
|
462
|
+
}
|
|
463
|
+
|
|
464
|
+
exports.isPromise = isPromise;
|
|
465
|
+
exports.jsonlStreamConsumer = jsonlStreamConsumer;
|
|
466
|
+
exports.jsonlStreamProducer = jsonlStreamProducer;
|