@replit/river 0.23.16 → 0.200.0-rc.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-45HIR2AS.js +1993 -0
- package/dist/chunk-45HIR2AS.js.map +1 -0
- package/dist/{chunk-YXDAOVP7.js → chunk-75B5D7MR.js} +2 -2
- package/dist/{chunk-WN77AT67.js → chunk-AJBZUHSI.js} +22 -6
- package/dist/chunk-AJBZUHSI.js.map +1 -0
- package/dist/{chunk-R47IZD67.js → chunk-EMIERCU7.js} +2 -2
- package/dist/{chunk-MQCGG6KL.js → chunk-LH2VHMQM.js} +4 -4
- package/dist/{chunk-6LCL2ZZF.js → chunk-QMM35C3H.js} +1 -1
- package/dist/chunk-QMM35C3H.js.map +1 -0
- package/dist/{chunk-JA7XGTAL.js → chunk-W75HU4F6.js} +4 -4
- package/dist/{chunk-UDXM64QK.js → chunk-WQVOMUNR.js} +2 -2
- package/dist/{chunk-TXSQRTZB.js → chunk-XLXRFSRB.js} +25 -10
- package/dist/chunk-XLXRFSRB.js.map +1 -0
- package/dist/{connection-d738cc08.d.ts → connection-0638316b.d.ts} +1 -1
- package/dist/{connection-99a67d3e.d.ts → connection-c6521735.d.ts} +1 -1
- package/dist/{index-ea74cdbb.d.ts → index-10ebd26a.d.ts} +33 -33
- package/dist/logging/index.cjs.map +1 -1
- package/dist/logging/index.d.cts +1 -1
- package/dist/logging/index.d.ts +1 -1
- package/dist/logging/index.js +1 -1
- package/dist/router/index.cjs +1109 -968
- package/dist/router/index.cjs.map +1 -1
- package/dist/router/index.d.cts +19 -23
- package/dist/router/index.d.ts +19 -23
- package/dist/router/index.js +12 -6
- package/dist/services-34d97070.d.ts +1366 -0
- package/dist/transport/impls/uds/client.cjs +20 -4
- package/dist/transport/impls/uds/client.cjs.map +1 -1
- package/dist/transport/impls/uds/client.d.cts +3 -4
- package/dist/transport/impls/uds/client.d.ts +3 -4
- package/dist/transport/impls/uds/client.js +6 -6
- package/dist/transport/impls/uds/server.cjs +20 -4
- package/dist/transport/impls/uds/server.cjs.map +1 -1
- package/dist/transport/impls/uds/server.d.cts +4 -4
- package/dist/transport/impls/uds/server.d.ts +4 -4
- package/dist/transport/impls/uds/server.js +6 -6
- package/dist/transport/impls/ws/client.cjs +20 -4
- package/dist/transport/impls/ws/client.cjs.map +1 -1
- package/dist/transport/impls/ws/client.d.cts +5 -6
- package/dist/transport/impls/ws/client.d.ts +5 -6
- package/dist/transport/impls/ws/client.js +6 -6
- package/dist/transport/impls/ws/server.cjs +20 -4
- package/dist/transport/impls/ws/server.cjs.map +1 -1
- package/dist/transport/impls/ws/server.d.cts +4 -4
- package/dist/transport/impls/ws/server.d.ts +4 -4
- package/dist/transport/impls/ws/server.js +6 -6
- package/dist/transport/index.cjs +20 -4
- package/dist/transport/index.cjs.map +1 -1
- package/dist/transport/index.d.cts +27 -5
- package/dist/transport/index.d.ts +27 -5
- package/dist/transport/index.js +6 -6
- package/dist/util/testHelpers.cjs +354 -310
- package/dist/util/testHelpers.cjs.map +1 -1
- package/dist/util/testHelpers.d.cts +32 -21
- package/dist/util/testHelpers.d.ts +32 -21
- package/dist/util/testHelpers.js +76 -42
- package/dist/util/testHelpers.js.map +1 -1
- package/package.json +2 -3
- package/dist/chunk-6LCL2ZZF.js.map +0 -1
- package/dist/chunk-LTSLICON.js +0 -1865
- package/dist/chunk-LTSLICON.js.map +0 -1
- package/dist/chunk-TXSQRTZB.js.map +0 -1
- package/dist/chunk-WN77AT67.js.map +0 -1
- package/dist/client-0926d3d6.d.ts +0 -52
- package/dist/handshake-75d0124f.d.ts +0 -516
- package/dist/server-3740c5d9.d.ts +0 -24
- package/dist/services-75e84a9f.d.ts +0 -709
- /package/dist/{chunk-YXDAOVP7.js.map → chunk-75B5D7MR.js.map} +0 -0
- /package/dist/{chunk-R47IZD67.js.map → chunk-EMIERCU7.js.map} +0 -0
- /package/dist/{chunk-MQCGG6KL.js.map → chunk-LH2VHMQM.js.map} +0 -0
- /package/dist/{chunk-JA7XGTAL.js.map → chunk-W75HU4F6.js.map} +0 -0
- /package/dist/{chunk-UDXM64QK.js.map → chunk-WQVOMUNR.js.map} +0 -0
|
@@ -0,0 +1,1993 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ControlMessageCloseSchema,
|
|
3
|
+
ControlMessagePayloadSchema,
|
|
4
|
+
coerceErrorString,
|
|
5
|
+
createHandlerSpan,
|
|
6
|
+
createProcTelemetryInfo,
|
|
7
|
+
getPropagationContext,
|
|
8
|
+
isStreamAbort,
|
|
9
|
+
isStreamClose,
|
|
10
|
+
isStreamCloseRequest,
|
|
11
|
+
isStreamOpen
|
|
12
|
+
} from "./chunk-XLXRFSRB.js";
|
|
13
|
+
|
|
14
|
+
// router/services.ts
|
|
15
|
+
import { Type } from "@sinclair/typebox";
|
|
16
|
+
function serializeSchema(services, handshakeSchema) {
|
|
17
|
+
const serializedServiceObject = Object.entries(services).reduce((acc, [name, value]) => {
|
|
18
|
+
acc[name] = value.serialize();
|
|
19
|
+
return acc;
|
|
20
|
+
}, {});
|
|
21
|
+
const schema = {
|
|
22
|
+
services: serializedServiceObject
|
|
23
|
+
};
|
|
24
|
+
if (handshakeSchema) {
|
|
25
|
+
schema.handshakeSchema = Type.Strict(handshakeSchema);
|
|
26
|
+
}
|
|
27
|
+
return schema;
|
|
28
|
+
}
|
|
29
|
+
var ServiceSchema = class _ServiceSchema {
|
|
30
|
+
/**
|
|
31
|
+
* Factory function for creating a fresh state.
|
|
32
|
+
*/
|
|
33
|
+
initializeState;
|
|
34
|
+
/**
|
|
35
|
+
* The procedures for this service.
|
|
36
|
+
*/
|
|
37
|
+
procedures;
|
|
38
|
+
/**
|
|
39
|
+
* @param config - The configuration for this service.
|
|
40
|
+
* @param procedures - The procedures for this service.
|
|
41
|
+
*/
|
|
42
|
+
constructor(config, procedures) {
|
|
43
|
+
this.initializeState = config.initializeState;
|
|
44
|
+
this.procedures = procedures;
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* Creates a {@link ServiceScaffold}, which can be used to define procedures
|
|
48
|
+
* that can then be merged into a {@link ServiceSchema}, via the scaffold's
|
|
49
|
+
* `finalize` method.
|
|
50
|
+
*
|
|
51
|
+
* There are two patterns that work well with this method. The first is using
|
|
52
|
+
* it to separate the definition of procedures from the definition of the
|
|
53
|
+
* service's configuration:
|
|
54
|
+
* ```ts
|
|
55
|
+
* const MyServiceScaffold = ServiceSchema.scaffold({
|
|
56
|
+
* initializeState: () => ({ count: 0 }),
|
|
57
|
+
* });
|
|
58
|
+
*
|
|
59
|
+
* const incrementProcedures = MyServiceScaffold.procedures({
|
|
60
|
+
* increment: Procedure.rpc({
|
|
61
|
+
* init: Type.Object({ amount: Type.Number() }),
|
|
62
|
+
* output: Type.Object({ current: Type.Number() }),
|
|
63
|
+
* async handler(ctx, init) {
|
|
64
|
+
* ctx.state.count += init.amount;
|
|
65
|
+
* return Ok({ current: ctx.state.count });
|
|
66
|
+
* }
|
|
67
|
+
* }),
|
|
68
|
+
* })
|
|
69
|
+
*
|
|
70
|
+
* const MyService = MyServiceScaffold.finalize({
|
|
71
|
+
* ...incrementProcedures,
|
|
72
|
+
* // you can also directly define procedures here
|
|
73
|
+
* });
|
|
74
|
+
* ```
|
|
75
|
+
* This might be really handy if you have a very large service and you're
|
|
76
|
+
* wanting to split it over multiple files. You can define the scaffold
|
|
77
|
+
* in one file, and then import that scaffold in other files where you
|
|
78
|
+
* define procedures - and then finally import the scaffolds and your
|
|
79
|
+
* procedure objects in a final file where you finalize the scaffold into
|
|
80
|
+
* a service schema.
|
|
81
|
+
*
|
|
82
|
+
* The other way is to use it like in a builder pattern:
|
|
83
|
+
* ```ts
|
|
84
|
+
* const MyService = ServiceSchema
|
|
85
|
+
* .scaffold({ initializeState: () => ({ count: 0 }) })
|
|
86
|
+
* .finalize({
|
|
87
|
+
* increment: Procedure.rpc({
|
|
88
|
+
* init: Type.Object({ amount: Type.Number() }),
|
|
89
|
+
* output: Type.Object({ current: Type.Number() }),
|
|
90
|
+
* async handler(ctx, init) {
|
|
91
|
+
* ctx.state.count += init.amount;
|
|
92
|
+
* return Ok({ current: ctx.state.count });
|
|
93
|
+
* }
|
|
94
|
+
* }),
|
|
95
|
+
* })
|
|
96
|
+
* ```
|
|
97
|
+
* Depending on your preferences, this may be a more appealing way to define
|
|
98
|
+
* a schema versus using the {@link ServiceSchema.define} method.
|
|
99
|
+
*/
|
|
100
|
+
static scaffold(config) {
|
|
101
|
+
return new ServiceScaffold(config);
|
|
102
|
+
}
|
|
103
|
+
// actual implementation
|
|
104
|
+
static define(configOrProcedures, maybeProcedures) {
|
|
105
|
+
let config;
|
|
106
|
+
let procedures;
|
|
107
|
+
if ("initializeState" in configOrProcedures && typeof configOrProcedures.initializeState === "function") {
|
|
108
|
+
if (!maybeProcedures) {
|
|
109
|
+
throw new Error("Expected procedures to be defined");
|
|
110
|
+
}
|
|
111
|
+
config = configOrProcedures;
|
|
112
|
+
procedures = maybeProcedures;
|
|
113
|
+
} else {
|
|
114
|
+
config = { initializeState: () => ({}) };
|
|
115
|
+
procedures = configOrProcedures;
|
|
116
|
+
}
|
|
117
|
+
return new _ServiceSchema(config, procedures);
|
|
118
|
+
}
|
|
119
|
+
/**
|
|
120
|
+
* Serializes this schema's procedures into a plain object that is JSON compatible.
|
|
121
|
+
*/
|
|
122
|
+
serialize() {
|
|
123
|
+
return {
|
|
124
|
+
procedures: Object.fromEntries(
|
|
125
|
+
Object.entries(this.procedures).map(([procName, procDef]) => [
|
|
126
|
+
procName,
|
|
127
|
+
{
|
|
128
|
+
init: Type.Strict(procDef.init),
|
|
129
|
+
output: Type.Strict(procDef.output),
|
|
130
|
+
// Only add `description` field if the type declares it.
|
|
131
|
+
..."description" in procDef ? { description: procDef.description } : {},
|
|
132
|
+
// Only add the `errors` field if the type declares it.
|
|
133
|
+
..."errors" in procDef ? {
|
|
134
|
+
errors: Type.Strict(procDef.errors)
|
|
135
|
+
} : {},
|
|
136
|
+
type: procDef.type,
|
|
137
|
+
// Only add the `input` field if the type declares it.
|
|
138
|
+
..."input" in procDef ? {
|
|
139
|
+
input: Type.Strict(procDef.input)
|
|
140
|
+
} : {}
|
|
141
|
+
}
|
|
142
|
+
])
|
|
143
|
+
)
|
|
144
|
+
};
|
|
145
|
+
}
|
|
146
|
+
/**
|
|
147
|
+
* Instantiates this schema into a {@link Service} object.
|
|
148
|
+
*
|
|
149
|
+
* You probably don't need this, usually the River server will handle this
|
|
150
|
+
* for you.
|
|
151
|
+
*/
|
|
152
|
+
instantiate(extendedContext) {
|
|
153
|
+
return Object.freeze({
|
|
154
|
+
state: this.initializeState(extendedContext),
|
|
155
|
+
procedures: this.procedures
|
|
156
|
+
});
|
|
157
|
+
}
|
|
158
|
+
};
|
|
159
|
+
var ServiceScaffold = class {
|
|
160
|
+
/**
|
|
161
|
+
* The configuration for this service.
|
|
162
|
+
*/
|
|
163
|
+
config;
|
|
164
|
+
/**
|
|
165
|
+
* @param config - The configuration for this service.
|
|
166
|
+
*/
|
|
167
|
+
constructor(config) {
|
|
168
|
+
this.config = config;
|
|
169
|
+
}
|
|
170
|
+
/**
|
|
171
|
+
* Define procedures for this service. Use the {@link Procedure} constructors
|
|
172
|
+
* to create them. This returns the procedures object, which can then be
|
|
173
|
+
* passed to {@link ServiceSchema.finalize} to create a {@link ServiceSchema}.
|
|
174
|
+
*
|
|
175
|
+
* @example
|
|
176
|
+
* ```
|
|
177
|
+
* const myProcedures = MyServiceScaffold.procedures({
|
|
178
|
+
* myRPC: Procedure.rpc({
|
|
179
|
+
* // ...
|
|
180
|
+
* }),
|
|
181
|
+
* });
|
|
182
|
+
*
|
|
183
|
+
* const MyService = MyServiceScaffold.finalize({
|
|
184
|
+
* ...myProcedures,
|
|
185
|
+
* });
|
|
186
|
+
* ```
|
|
187
|
+
*
|
|
188
|
+
* @param procedures - The procedures for this service.
|
|
189
|
+
*/
|
|
190
|
+
procedures(procedures) {
|
|
191
|
+
return procedures;
|
|
192
|
+
}
|
|
193
|
+
/**
|
|
194
|
+
* Finalizes the scaffold into a {@link ServiceSchema}. This is where you
|
|
195
|
+
* provide the service's procedures and get a {@link ServiceSchema} in return.
|
|
196
|
+
*
|
|
197
|
+
* You can directly define procedures here, or you can define them separately
|
|
198
|
+
* with the {@link ServiceScaffold.procedures} method, and then pass them here.
|
|
199
|
+
*
|
|
200
|
+
* @example
|
|
201
|
+
* ```
|
|
202
|
+
* const MyService = MyServiceScaffold.finalize({
|
|
203
|
+
* myRPC: Procedure.rpc({
|
|
204
|
+
* // ...
|
|
205
|
+
* }),
|
|
206
|
+
* // e.g. from the procedures method
|
|
207
|
+
* ...myOtherProcedures,
|
|
208
|
+
* });
|
|
209
|
+
* ```
|
|
210
|
+
*/
|
|
211
|
+
finalize(procedures) {
|
|
212
|
+
return ServiceSchema.define(this.config, procedures);
|
|
213
|
+
}
|
|
214
|
+
};
|
|
215
|
+
|
|
216
|
+
// router/diff.ts
|
|
217
|
+
function diffServerSchema(oldServer, newServer, options) {
|
|
218
|
+
const allServices = /* @__PURE__ */ new Set([
|
|
219
|
+
...Object.keys(oldServer.services),
|
|
220
|
+
...Object.keys(newServer.services)
|
|
221
|
+
]);
|
|
222
|
+
const breakages = {};
|
|
223
|
+
for (const serviceName of allServices) {
|
|
224
|
+
const oldService = oldServer.services[serviceName];
|
|
225
|
+
const newService = newServer.services[serviceName];
|
|
226
|
+
const breakage = diffService(oldService, newService, options);
|
|
227
|
+
if (breakage) {
|
|
228
|
+
breakages[serviceName] = breakage;
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
if (Object.keys(breakages).length) {
|
|
232
|
+
return { serviceBreakages: breakages };
|
|
233
|
+
}
|
|
234
|
+
return null;
|
|
235
|
+
}
|
|
236
|
+
function diffService(oldService, newService, options) {
|
|
237
|
+
if (!newService) {
|
|
238
|
+
return options?.allowServiceRemoval ? null : { reason: "removed" };
|
|
239
|
+
}
|
|
240
|
+
if (!oldService) {
|
|
241
|
+
return null;
|
|
242
|
+
}
|
|
243
|
+
const allProcedures = /* @__PURE__ */ new Set([
|
|
244
|
+
...Object.keys(oldService.procedures),
|
|
245
|
+
...Object.keys(newService.procedures)
|
|
246
|
+
]);
|
|
247
|
+
const breakages = {};
|
|
248
|
+
for (const procedureName of allProcedures) {
|
|
249
|
+
const aProcedure = oldService.procedures[procedureName];
|
|
250
|
+
const bProcedure = newService.procedures[procedureName];
|
|
251
|
+
const breakage = diffProcedure(aProcedure, bProcedure, options);
|
|
252
|
+
if (breakage) {
|
|
253
|
+
breakages[procedureName] = breakage;
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
if (Object.keys(breakages).length) {
|
|
257
|
+
return { reason: "modified", procedureBreakages: breakages };
|
|
258
|
+
}
|
|
259
|
+
return null;
|
|
260
|
+
}
|
|
261
|
+
function diffProcedure(oldProcedure, newProcedure, options) {
|
|
262
|
+
if (!newProcedure) {
|
|
263
|
+
return options?.allowProcedureRemoval ? null : { reason: "removed" };
|
|
264
|
+
}
|
|
265
|
+
if (!oldProcedure) {
|
|
266
|
+
return null;
|
|
267
|
+
}
|
|
268
|
+
if (oldProcedure.type !== newProcedure.type) {
|
|
269
|
+
return {
|
|
270
|
+
reason: "type-changed",
|
|
271
|
+
oldType: oldProcedure.type,
|
|
272
|
+
newType: newProcedure.type
|
|
273
|
+
};
|
|
274
|
+
}
|
|
275
|
+
const inputBreakage = diffProcedureField(
|
|
276
|
+
oldProcedure.input,
|
|
277
|
+
newProcedure.input,
|
|
278
|
+
"client"
|
|
279
|
+
);
|
|
280
|
+
const initBreakage = diffProcedureField(
|
|
281
|
+
oldProcedure.init,
|
|
282
|
+
newProcedure.init,
|
|
283
|
+
"client"
|
|
284
|
+
);
|
|
285
|
+
const outputBreakage = diffProcedureField(
|
|
286
|
+
oldProcedure.output,
|
|
287
|
+
newProcedure.output,
|
|
288
|
+
"server"
|
|
289
|
+
);
|
|
290
|
+
if (inputBreakage ?? initBreakage ?? outputBreakage) {
|
|
291
|
+
const result = {
|
|
292
|
+
reason: "modified"
|
|
293
|
+
};
|
|
294
|
+
if (inputBreakage) {
|
|
295
|
+
result.input = inputBreakage;
|
|
296
|
+
}
|
|
297
|
+
if (initBreakage) {
|
|
298
|
+
result.init = initBreakage;
|
|
299
|
+
}
|
|
300
|
+
if (outputBreakage) {
|
|
301
|
+
result.output = outputBreakage;
|
|
302
|
+
}
|
|
303
|
+
return result;
|
|
304
|
+
}
|
|
305
|
+
return null;
|
|
306
|
+
}
|
|
307
|
+
function diffProcedureField(oldSchema, newSchema, origin) {
|
|
308
|
+
if (!oldSchema && !newSchema) {
|
|
309
|
+
return null;
|
|
310
|
+
}
|
|
311
|
+
const diffBreakage = diffRequired(oldSchema, newSchema, origin, false, false);
|
|
312
|
+
if (diffBreakage) {
|
|
313
|
+
return diffBreakage;
|
|
314
|
+
}
|
|
315
|
+
if (!oldSchema || !newSchema) {
|
|
316
|
+
throw new Error("Appease typescript, this should never happen");
|
|
317
|
+
}
|
|
318
|
+
return diffJSONSchema(oldSchema, newSchema, origin);
|
|
319
|
+
}
|
|
320
|
+
function diffRequired(oldSchema, newSchema, origin, oldRequired, newRequired) {
|
|
321
|
+
if (!newSchema && !oldSchema) {
|
|
322
|
+
throw new Error("Both old and new schema are undefined");
|
|
323
|
+
}
|
|
324
|
+
if (!newSchema) {
|
|
325
|
+
if (!oldRequired && origin == "server") {
|
|
326
|
+
return null;
|
|
327
|
+
}
|
|
328
|
+
return { reason: "removed-required" };
|
|
329
|
+
}
|
|
330
|
+
if (!oldSchema) {
|
|
331
|
+
if (newRequired && origin === "client") {
|
|
332
|
+
return { reason: "new-required" };
|
|
333
|
+
}
|
|
334
|
+
return null;
|
|
335
|
+
}
|
|
336
|
+
if (origin === "client" && !oldRequired && newRequired) {
|
|
337
|
+
return { reason: "new-required" };
|
|
338
|
+
}
|
|
339
|
+
if (origin === "server" && oldRequired && !newRequired) {
|
|
340
|
+
return { reason: "removed-required" };
|
|
341
|
+
}
|
|
342
|
+
return null;
|
|
343
|
+
}
|
|
344
|
+
function diffJSONSchema(oldSchema, newSchema, origin) {
|
|
345
|
+
if (oldSchema.type !== newSchema.type) {
|
|
346
|
+
return {
|
|
347
|
+
reason: "type-changed",
|
|
348
|
+
oldType: getReportingType(oldSchema),
|
|
349
|
+
newType: getReportingType(newSchema)
|
|
350
|
+
};
|
|
351
|
+
}
|
|
352
|
+
if (getReportingType(oldSchema) !== getReportingType(newSchema)) {
|
|
353
|
+
return {
|
|
354
|
+
reason: "type-changed",
|
|
355
|
+
oldType: getReportingType(oldSchema),
|
|
356
|
+
newType: getReportingType(newSchema)
|
|
357
|
+
};
|
|
358
|
+
}
|
|
359
|
+
if ("const" in oldSchema && "const" in newSchema && oldSchema.const !== newSchema.const) {
|
|
360
|
+
return {
|
|
361
|
+
reason: "type-changed",
|
|
362
|
+
oldType: `${getReportingType(oldSchema)}-const-${oldSchema.const}`,
|
|
363
|
+
newType: `${getReportingType(newSchema)}-const-${newSchema.const}`
|
|
364
|
+
};
|
|
365
|
+
}
|
|
366
|
+
if ("const" in oldSchema && !("const" in newSchema) && origin === "server") {
|
|
367
|
+
return {
|
|
368
|
+
reason: "type-changed",
|
|
369
|
+
oldType: `${getReportingType(oldSchema)}-const-${oldSchema.const}`,
|
|
370
|
+
newType: getReportingType(newSchema)
|
|
371
|
+
};
|
|
372
|
+
}
|
|
373
|
+
if ("const" in newSchema && !("const" in oldSchema) && origin === "client") {
|
|
374
|
+
return {
|
|
375
|
+
reason: "type-changed",
|
|
376
|
+
oldType: getReportingType(oldSchema),
|
|
377
|
+
newType: `${getReportingType(newSchema)}-const-${newSchema.const}`
|
|
378
|
+
};
|
|
379
|
+
}
|
|
380
|
+
const breakages = {};
|
|
381
|
+
if ("$ref" in newSchema) {
|
|
382
|
+
if (newSchema.$ref !== oldSchema.$ref) {
|
|
383
|
+
return {
|
|
384
|
+
reason: "type-changed",
|
|
385
|
+
oldType: getReportingType(oldSchema),
|
|
386
|
+
newType: getReportingType(newSchema)
|
|
387
|
+
};
|
|
388
|
+
}
|
|
389
|
+
} else if ("not" in newSchema) {
|
|
390
|
+
const notBreakage = diffJSONSchema(
|
|
391
|
+
oldSchema.not,
|
|
392
|
+
newSchema.not,
|
|
393
|
+
origin
|
|
394
|
+
);
|
|
395
|
+
if (notBreakage) {
|
|
396
|
+
breakages.not = notBreakage;
|
|
397
|
+
}
|
|
398
|
+
} else if ("anyOf" in newSchema) {
|
|
399
|
+
const oldAnyOfStringified = oldSchema.anyOf.map((el) => JSON.stringify(el)).sort();
|
|
400
|
+
const newAnyOfStringified = newSchema.anyOf.map((el) => JSON.stringify(el)).sort();
|
|
401
|
+
const anyOfBreakages = {};
|
|
402
|
+
for (let i = 0; i < oldAnyOfStringified.length; i++) {
|
|
403
|
+
if (newAnyOfStringified.includes(oldAnyOfStringified[i])) {
|
|
404
|
+
continue;
|
|
405
|
+
}
|
|
406
|
+
if (!newAnyOfStringified[i]) {
|
|
407
|
+
if (origin === "server") {
|
|
408
|
+
continue;
|
|
409
|
+
}
|
|
410
|
+
anyOfBreakages[`old-${i}`] = { reason: "removed-required" };
|
|
411
|
+
} else {
|
|
412
|
+
const breakage = diffJSONSchema(
|
|
413
|
+
JSON.parse(oldAnyOfStringified[i]),
|
|
414
|
+
JSON.parse(newAnyOfStringified[i]),
|
|
415
|
+
origin
|
|
416
|
+
);
|
|
417
|
+
if (breakage) {
|
|
418
|
+
anyOfBreakages[`old-${i}`] = breakage;
|
|
419
|
+
}
|
|
420
|
+
}
|
|
421
|
+
}
|
|
422
|
+
for (let i = 0; i < newAnyOfStringified.length; i++) {
|
|
423
|
+
if (oldAnyOfStringified.includes(newAnyOfStringified[i])) {
|
|
424
|
+
continue;
|
|
425
|
+
}
|
|
426
|
+
if (!oldAnyOfStringified[i]) {
|
|
427
|
+
if (origin === "client") {
|
|
428
|
+
continue;
|
|
429
|
+
}
|
|
430
|
+
anyOfBreakages[`new-${i}`] = { reason: "new-required" };
|
|
431
|
+
} else {
|
|
432
|
+
const breakage = diffJSONSchema(
|
|
433
|
+
JSON.parse(oldAnyOfStringified[i]),
|
|
434
|
+
JSON.parse(newAnyOfStringified[i]),
|
|
435
|
+
origin
|
|
436
|
+
);
|
|
437
|
+
if (breakage) {
|
|
438
|
+
anyOfBreakages[`new-${i}`] = breakage;
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
if (Object.keys(anyOfBreakages).length > 0) {
|
|
443
|
+
breakages.anyOf = {
|
|
444
|
+
reason: "field-breakage",
|
|
445
|
+
fieldBreakages: anyOfBreakages
|
|
446
|
+
};
|
|
447
|
+
}
|
|
448
|
+
} else if ("oneOf" in newSchema) {
|
|
449
|
+
throw new Error("oneOf is not supported, typebox does not emit it");
|
|
450
|
+
} else if ("allOf" in newSchema) {
|
|
451
|
+
if (newSchema.allOf.length !== oldSchema.allOf.length) {
|
|
452
|
+
breakages.allOf = {
|
|
453
|
+
reason: "type-changed",
|
|
454
|
+
oldType: `${oldSchema.allOf}`,
|
|
455
|
+
newType: `${newSchema.allOf}`
|
|
456
|
+
};
|
|
457
|
+
} else {
|
|
458
|
+
for (let i = 0; i < newSchema.allOf.length; i++) {
|
|
459
|
+
const breakage = diffJSONSchema(
|
|
460
|
+
oldSchema.allOf[i],
|
|
461
|
+
newSchema.allOf[i],
|
|
462
|
+
origin
|
|
463
|
+
);
|
|
464
|
+
if (breakage) {
|
|
465
|
+
breakages.allOf = breakage;
|
|
466
|
+
break;
|
|
467
|
+
}
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
} else if (newSchema.type === "array") {
|
|
471
|
+
const itemsBreakages = diffJSONSchema(
|
|
472
|
+
oldSchema.items,
|
|
473
|
+
newSchema.items,
|
|
474
|
+
origin
|
|
475
|
+
);
|
|
476
|
+
if (itemsBreakages) {
|
|
477
|
+
breakages.items = itemsBreakages;
|
|
478
|
+
}
|
|
479
|
+
if (oldSchema.minItems < newSchema.minItems) {
|
|
480
|
+
if (origin === "client") {
|
|
481
|
+
breakages.minItems = {
|
|
482
|
+
reason: "type-changed",
|
|
483
|
+
oldType: `${oldSchema.minItems}`,
|
|
484
|
+
newType: `${newSchema.minItems}`
|
|
485
|
+
};
|
|
486
|
+
}
|
|
487
|
+
} else if (oldSchema.minItems > newSchema.minItems) {
|
|
488
|
+
if (origin === "server") {
|
|
489
|
+
breakages.minItems = {
|
|
490
|
+
reason: "type-changed",
|
|
491
|
+
oldType: `${oldSchema.minItems}`,
|
|
492
|
+
newType: `${newSchema.minItems}`
|
|
493
|
+
};
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
if (oldSchema.maxItems < newSchema.maxItems) {
|
|
497
|
+
if (origin === "server") {
|
|
498
|
+
breakages.maxItems = {
|
|
499
|
+
reason: "type-changed",
|
|
500
|
+
oldType: `${oldSchema.maxItems}`,
|
|
501
|
+
newType: `${newSchema.maxItems}`
|
|
502
|
+
};
|
|
503
|
+
}
|
|
504
|
+
} else if (oldSchema.maxItems > newSchema.maxItems) {
|
|
505
|
+
if (origin === "client") {
|
|
506
|
+
breakages.maxItems = {
|
|
507
|
+
reason: "type-changed",
|
|
508
|
+
oldType: `${oldSchema.maxItems}`,
|
|
509
|
+
newType: `${newSchema.maxItems}`
|
|
510
|
+
};
|
|
511
|
+
}
|
|
512
|
+
}
|
|
513
|
+
if (!oldSchema.uniqueItems && newSchema.uniqueItems && origin === "client") {
|
|
514
|
+
breakages.uniqueItems = {
|
|
515
|
+
reason: "type-changed",
|
|
516
|
+
oldType: `${!!oldSchema.uniqueItems}`,
|
|
517
|
+
newType: `${!!newSchema.uniqueItems}`
|
|
518
|
+
};
|
|
519
|
+
}
|
|
520
|
+
if ("contains" in newSchema !== "contains" in oldSchema) {
|
|
521
|
+
if ("contains" in newSchema && !("contains" in oldSchema) && origin === "client") {
|
|
522
|
+
breakages.contains = {
|
|
523
|
+
reason: "type-changed",
|
|
524
|
+
oldType: "no-contains",
|
|
525
|
+
newType: "contains"
|
|
526
|
+
};
|
|
527
|
+
}
|
|
528
|
+
} else if ("contains" in newSchema) {
|
|
529
|
+
const containsBreakage = diffJSONSchema(
|
|
530
|
+
oldSchema.contains,
|
|
531
|
+
newSchema.contains,
|
|
532
|
+
origin
|
|
533
|
+
);
|
|
534
|
+
if (containsBreakage) {
|
|
535
|
+
breakages.contains = containsBreakage;
|
|
536
|
+
}
|
|
537
|
+
}
|
|
538
|
+
if (oldSchema.minContains < newSchema.minContains) {
|
|
539
|
+
if (origin === "client") {
|
|
540
|
+
breakages.minContains = {
|
|
541
|
+
reason: "type-changed",
|
|
542
|
+
oldType: `${oldSchema.minContains}`,
|
|
543
|
+
newType: `${newSchema.minContains}`
|
|
544
|
+
};
|
|
545
|
+
}
|
|
546
|
+
} else if (oldSchema.minContains > newSchema.minContains) {
|
|
547
|
+
if (origin === "server") {
|
|
548
|
+
breakages.minContains = {
|
|
549
|
+
reason: "type-changed",
|
|
550
|
+
oldType: `${oldSchema.minContains}`,
|
|
551
|
+
newType: `${newSchema.minContains}`
|
|
552
|
+
};
|
|
553
|
+
}
|
|
554
|
+
}
|
|
555
|
+
if (oldSchema.maxContains < newSchema.maxContains) {
|
|
556
|
+
if (origin === "server") {
|
|
557
|
+
breakages.maxContains = {
|
|
558
|
+
reason: "type-changed",
|
|
559
|
+
oldType: `${oldSchema.maxContains}`,
|
|
560
|
+
newType: `${newSchema.maxContains}`
|
|
561
|
+
};
|
|
562
|
+
}
|
|
563
|
+
} else if (oldSchema.maxContains > newSchema.maxContains) {
|
|
564
|
+
if (origin === "client") {
|
|
565
|
+
breakages.maxContains = {
|
|
566
|
+
reason: "type-changed",
|
|
567
|
+
oldType: `${oldSchema.maxContains}`,
|
|
568
|
+
newType: `${newSchema.maxContains}`
|
|
569
|
+
};
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
} else if (newSchema.type === "object") {
|
|
573
|
+
if ("properties" in newSchema !== "properties" in oldSchema) {
|
|
574
|
+
return {
|
|
575
|
+
reason: "type-changed",
|
|
576
|
+
oldType: "properties" in oldSchema ? "probably-object" : "probably-record",
|
|
577
|
+
newType: "properties" in newSchema ? "probably-object" : "probably-record"
|
|
578
|
+
};
|
|
579
|
+
}
|
|
580
|
+
if ("properties" in newSchema) {
|
|
581
|
+
const propertiesBreakages = diffObjectProperties(
|
|
582
|
+
oldSchema.properties,
|
|
583
|
+
newSchema.properties,
|
|
584
|
+
origin,
|
|
585
|
+
oldSchema.required,
|
|
586
|
+
newSchema.required
|
|
587
|
+
);
|
|
588
|
+
if (Object.keys(propertiesBreakages).length) {
|
|
589
|
+
breakages.properties = {
|
|
590
|
+
reason: "field-breakage",
|
|
591
|
+
fieldBreakages: propertiesBreakages
|
|
592
|
+
};
|
|
593
|
+
}
|
|
594
|
+
}
|
|
595
|
+
if ("patternProperties" in newSchema) {
|
|
596
|
+
const patternPropertiesBreakages = diffObjectProperties(
|
|
597
|
+
oldSchema.patternProperties,
|
|
598
|
+
newSchema.patternProperties,
|
|
599
|
+
origin,
|
|
600
|
+
oldSchema.required,
|
|
601
|
+
newSchema.required
|
|
602
|
+
);
|
|
603
|
+
if (Object.keys(patternPropertiesBreakages).length) {
|
|
604
|
+
breakages.patternProperties = {
|
|
605
|
+
reason: "field-breakage",
|
|
606
|
+
fieldBreakages: patternPropertiesBreakages
|
|
607
|
+
};
|
|
608
|
+
}
|
|
609
|
+
}
|
|
610
|
+
if ("additionalProperties" in newSchema || "additionalProperties" in oldSchema) {
|
|
611
|
+
throw new Error("additionalProperties is not supported");
|
|
612
|
+
}
|
|
613
|
+
if ("minProperties" in newSchema || "minProperties" in oldSchema) {
|
|
614
|
+
throw new Error("minProperties is not supported");
|
|
615
|
+
}
|
|
616
|
+
if ("maxProperties" in newSchema || "maxProperties" in oldSchema) {
|
|
617
|
+
throw new Error("maxProperties is not supported");
|
|
618
|
+
}
|
|
619
|
+
}
|
|
620
|
+
if (Object.keys(breakages).length) {
|
|
621
|
+
return {
|
|
622
|
+
reason: "field-breakage",
|
|
623
|
+
fieldBreakages: breakages
|
|
624
|
+
};
|
|
625
|
+
}
|
|
626
|
+
return null;
|
|
627
|
+
}
|
|
628
|
+
function diffObjectProperties(oldProperties, newProperties, origin, oldRequiredProperties = [], newRequiredProperties = []) {
|
|
629
|
+
const allProperties = /* @__PURE__ */ new Set([
|
|
630
|
+
...Object.keys(oldProperties),
|
|
631
|
+
...Object.keys(newProperties)
|
|
632
|
+
]);
|
|
633
|
+
const breakages = {};
|
|
634
|
+
for (const propertyName of allProperties) {
|
|
635
|
+
const requiredBreakage = diffRequired(
|
|
636
|
+
oldProperties[propertyName],
|
|
637
|
+
newProperties[propertyName],
|
|
638
|
+
origin,
|
|
639
|
+
oldRequiredProperties.includes(propertyName),
|
|
640
|
+
newRequiredProperties.includes(propertyName)
|
|
641
|
+
);
|
|
642
|
+
if (requiredBreakage) {
|
|
643
|
+
breakages[propertyName] = requiredBreakage;
|
|
644
|
+
} else if (oldProperties[propertyName] && newProperties[propertyName]) {
|
|
645
|
+
const propertyBreakage = diffJSONSchema(
|
|
646
|
+
oldProperties[propertyName],
|
|
647
|
+
newProperties[propertyName],
|
|
648
|
+
origin
|
|
649
|
+
);
|
|
650
|
+
if (propertyBreakage) {
|
|
651
|
+
breakages[propertyName] = propertyBreakage;
|
|
652
|
+
}
|
|
653
|
+
}
|
|
654
|
+
}
|
|
655
|
+
return breakages;
|
|
656
|
+
}
|
|
657
|
+
function getReportingType(schema) {
|
|
658
|
+
if ("not" in schema) {
|
|
659
|
+
return "not";
|
|
660
|
+
}
|
|
661
|
+
if ("anyOf" in schema) {
|
|
662
|
+
return "anyOf";
|
|
663
|
+
}
|
|
664
|
+
if ("allOf" in schema) {
|
|
665
|
+
return "allOf";
|
|
666
|
+
}
|
|
667
|
+
if ("$ref" in schema) {
|
|
668
|
+
return "$ref";
|
|
669
|
+
}
|
|
670
|
+
if (schema.type && typeof schema.type === "string") {
|
|
671
|
+
return schema.type;
|
|
672
|
+
}
|
|
673
|
+
throw new Error(
|
|
674
|
+
"Subschema not supported, probably a conditional subschema. Check logs."
|
|
675
|
+
);
|
|
676
|
+
}
|
|
677
|
+
|
|
678
|
+
// router/result.ts
|
|
679
|
+
import {
|
|
680
|
+
Type as Type2
|
|
681
|
+
} from "@sinclair/typebox";
|
|
682
|
+
var ErrResultSchema = (t) => Type2.Object({
|
|
683
|
+
ok: Type2.Literal(false),
|
|
684
|
+
payload: t
|
|
685
|
+
});
|
|
686
|
+
var AnyResultSchema = Type2.Union([
|
|
687
|
+
Type2.Object({
|
|
688
|
+
ok: Type2.Literal(false),
|
|
689
|
+
payload: Type2.Object({
|
|
690
|
+
code: Type2.String(),
|
|
691
|
+
message: Type2.String(),
|
|
692
|
+
extras: Type2.Optional(Type2.Unknown())
|
|
693
|
+
})
|
|
694
|
+
}),
|
|
695
|
+
Type2.Object({
|
|
696
|
+
ok: Type2.Literal(true),
|
|
697
|
+
payload: Type2.Unknown()
|
|
698
|
+
})
|
|
699
|
+
]);
|
|
700
|
+
function Ok(payload) {
|
|
701
|
+
return {
|
|
702
|
+
ok: true,
|
|
703
|
+
payload
|
|
704
|
+
};
|
|
705
|
+
}
|
|
706
|
+
function Err(error) {
|
|
707
|
+
return {
|
|
708
|
+
ok: false,
|
|
709
|
+
payload: error
|
|
710
|
+
};
|
|
711
|
+
}
|
|
712
|
+
|
|
713
|
+
// router/streams.ts
|
|
714
|
+
var StreamDrainedError = {
|
|
715
|
+
code: "STREAM_DRAINED",
|
|
716
|
+
message: "Stream was drained"
|
|
717
|
+
};
|
|
718
|
+
var ReadStreamImpl = class {
|
|
719
|
+
/**
|
|
720
|
+
* Whether the stream is closed.
|
|
721
|
+
*/
|
|
722
|
+
closed = false;
|
|
723
|
+
/**
|
|
724
|
+
* A list of listeners that will be called when the stream is closed.
|
|
725
|
+
*/
|
|
726
|
+
onCloseListeners;
|
|
727
|
+
/**
|
|
728
|
+
* Whether the user has requested to close the stream.
|
|
729
|
+
*/
|
|
730
|
+
closeRequested = false;
|
|
731
|
+
/**
|
|
732
|
+
* Used to signal to the outside world that the user has requested to close the stream.
|
|
733
|
+
*/
|
|
734
|
+
closeRequestCallback;
|
|
735
|
+
/**
|
|
736
|
+
* Whether the stream is locked.
|
|
737
|
+
*/
|
|
738
|
+
locked = false;
|
|
739
|
+
/**
|
|
740
|
+
* Whether drain was called.
|
|
741
|
+
*/
|
|
742
|
+
drained = false;
|
|
743
|
+
/**
|
|
744
|
+
* This flag allows us to avoid cases where drain was called,
|
|
745
|
+
* but the stream is fully consumed and closed. We don't need
|
|
746
|
+
* to signal that drain was closed.
|
|
747
|
+
*/
|
|
748
|
+
didDrainDisposeValues = false;
|
|
749
|
+
/**
|
|
750
|
+
* A list of values that have been pushed to the stream but not yet emitted to the user.
|
|
751
|
+
*/
|
|
752
|
+
queue = [];
|
|
753
|
+
/**
|
|
754
|
+
* Used by methods in the class to signal to the iterator that it
|
|
755
|
+
* should check for the next value.
|
|
756
|
+
*/
|
|
757
|
+
nextPromise = null;
|
|
758
|
+
/**
|
|
759
|
+
* Resolves nextPromise
|
|
760
|
+
*/
|
|
761
|
+
resolveNextPromise = null;
|
|
762
|
+
constructor(closeRequestCallback) {
|
|
763
|
+
this.closeRequestCallback = closeRequestCallback;
|
|
764
|
+
this.onCloseListeners = /* @__PURE__ */ new Set();
|
|
765
|
+
}
|
|
766
|
+
[Symbol.asyncIterator]() {
|
|
767
|
+
if (this.isLocked()) {
|
|
768
|
+
throw new TypeError("ReadStream is already locked");
|
|
769
|
+
}
|
|
770
|
+
let didSignalDrain = false;
|
|
771
|
+
this.locked = true;
|
|
772
|
+
return {
|
|
773
|
+
next: async () => {
|
|
774
|
+
if (this.drained && didSignalDrain) {
|
|
775
|
+
return {
|
|
776
|
+
done: true,
|
|
777
|
+
value: void 0
|
|
778
|
+
};
|
|
779
|
+
}
|
|
780
|
+
while (this.queue.length === 0) {
|
|
781
|
+
if (this.isClosed() && !this.didDrainDisposeValues) {
|
|
782
|
+
return {
|
|
783
|
+
done: true,
|
|
784
|
+
value: void 0
|
|
785
|
+
};
|
|
786
|
+
}
|
|
787
|
+
if (this.drained) {
|
|
788
|
+
didSignalDrain = true;
|
|
789
|
+
return {
|
|
790
|
+
done: false,
|
|
791
|
+
value: Err(StreamDrainedError)
|
|
792
|
+
};
|
|
793
|
+
}
|
|
794
|
+
if (!this.nextPromise) {
|
|
795
|
+
this.nextPromise = new Promise((resolve) => {
|
|
796
|
+
this.resolveNextPromise = resolve;
|
|
797
|
+
});
|
|
798
|
+
}
|
|
799
|
+
await this.nextPromise;
|
|
800
|
+
this.nextPromise = null;
|
|
801
|
+
this.resolveNextPromise = null;
|
|
802
|
+
}
|
|
803
|
+
const value = this.queue.shift();
|
|
804
|
+
return { done: false, value };
|
|
805
|
+
},
|
|
806
|
+
return: async () => {
|
|
807
|
+
this.drain();
|
|
808
|
+
return { done: true, value: void 0 };
|
|
809
|
+
}
|
|
810
|
+
};
|
|
811
|
+
}
|
|
812
|
+
async asArray() {
|
|
813
|
+
const array = [];
|
|
814
|
+
for await (const value of this) {
|
|
815
|
+
array.push(value);
|
|
816
|
+
}
|
|
817
|
+
return array;
|
|
818
|
+
}
|
|
819
|
+
drain() {
|
|
820
|
+
if (this.drained) {
|
|
821
|
+
return;
|
|
822
|
+
}
|
|
823
|
+
this.locked = true;
|
|
824
|
+
this.drained = true;
|
|
825
|
+
this.didDrainDisposeValues = this.queue.length > 0;
|
|
826
|
+
this.queue.length = 0;
|
|
827
|
+
this.resolveNextPromise?.();
|
|
828
|
+
}
|
|
829
|
+
isClosed() {
|
|
830
|
+
return this.closed;
|
|
831
|
+
}
|
|
832
|
+
isLocked() {
|
|
833
|
+
return this.locked;
|
|
834
|
+
}
|
|
835
|
+
onClose(cb) {
|
|
836
|
+
if (this.isClosed()) {
|
|
837
|
+
throw new Error("Stream is already closed");
|
|
838
|
+
}
|
|
839
|
+
this.onCloseListeners.add(cb);
|
|
840
|
+
return () => {
|
|
841
|
+
this.onCloseListeners.delete(cb);
|
|
842
|
+
};
|
|
843
|
+
}
|
|
844
|
+
requestClose() {
|
|
845
|
+
if (this.isClosed()) {
|
|
846
|
+
throw new Error("Cannot request close after stream already closed");
|
|
847
|
+
}
|
|
848
|
+
if (!this.closeRequested) {
|
|
849
|
+
this.closeRequested = true;
|
|
850
|
+
this.closeRequestCallback();
|
|
851
|
+
}
|
|
852
|
+
return new Promise((resolve) => {
|
|
853
|
+
this.onClose(() => {
|
|
854
|
+
resolve(void 0);
|
|
855
|
+
});
|
|
856
|
+
});
|
|
857
|
+
}
|
|
858
|
+
isCloseRequested() {
|
|
859
|
+
return this.closeRequested;
|
|
860
|
+
}
|
|
861
|
+
/**
|
|
862
|
+
* @internal meant for use within river, not exposed as a public API
|
|
863
|
+
*
|
|
864
|
+
* Pushes a value to the stream.
|
|
865
|
+
*/
|
|
866
|
+
pushValue(value) {
|
|
867
|
+
if (this.drained) {
|
|
868
|
+
return;
|
|
869
|
+
}
|
|
870
|
+
if (this.closed) {
|
|
871
|
+
throw new Error("Cannot push to closed stream");
|
|
872
|
+
}
|
|
873
|
+
this.queue.push(value);
|
|
874
|
+
this.resolveNextPromise?.();
|
|
875
|
+
}
|
|
876
|
+
/**
|
|
877
|
+
* @internal meant for use within river, not exposed as a public API
|
|
878
|
+
*
|
|
879
|
+
* Triggers the close of the stream. Make sure to push all remaining
|
|
880
|
+
* values before calling this method.
|
|
881
|
+
*/
|
|
882
|
+
triggerClose() {
|
|
883
|
+
if (this.isClosed()) {
|
|
884
|
+
throw new Error("Unexpected closing multiple times");
|
|
885
|
+
}
|
|
886
|
+
this.closed = true;
|
|
887
|
+
this.resolveNextPromise?.();
|
|
888
|
+
this.onCloseListeners.forEach((cb) => cb());
|
|
889
|
+
this.onCloseListeners.clear();
|
|
890
|
+
}
|
|
891
|
+
/**
|
|
892
|
+
* @internal meant for use within river, not exposed as a public API
|
|
893
|
+
*/
|
|
894
|
+
hasValuesInQueue() {
|
|
895
|
+
return this.queue.length > 0;
|
|
896
|
+
}
|
|
897
|
+
};
|
|
898
|
+
var WriteStreamImpl = class {
|
|
899
|
+
/**
|
|
900
|
+
* Passed via constructor to pass on write requests
|
|
901
|
+
*/
|
|
902
|
+
writeCb;
|
|
903
|
+
/**
|
|
904
|
+
* Passed via constructor to pass on close requests
|
|
905
|
+
*/
|
|
906
|
+
closeCb;
|
|
907
|
+
/**
|
|
908
|
+
* Whether the stream is closed.
|
|
909
|
+
*/
|
|
910
|
+
closed = false;
|
|
911
|
+
/**
|
|
912
|
+
* Whether the reader has requested to close the stream.
|
|
913
|
+
*/
|
|
914
|
+
closeRequested = false;
|
|
915
|
+
/**
|
|
916
|
+
* A list of listeners that will be called when the stream is closed.
|
|
917
|
+
*/
|
|
918
|
+
onCloseListeners;
|
|
919
|
+
constructor(writeCb, closeCb) {
|
|
920
|
+
this.writeCb = writeCb;
|
|
921
|
+
this.closeCb = closeCb;
|
|
922
|
+
this.onCloseListeners = /* @__PURE__ */ new Set();
|
|
923
|
+
}
|
|
924
|
+
write(value) {
|
|
925
|
+
if (this.isClosed()) {
|
|
926
|
+
throw new Error("Cannot write to closed stream");
|
|
927
|
+
}
|
|
928
|
+
this.writeCb(value);
|
|
929
|
+
}
|
|
930
|
+
close() {
|
|
931
|
+
if (this.isClosed()) {
|
|
932
|
+
return;
|
|
933
|
+
}
|
|
934
|
+
this.closed = true;
|
|
935
|
+
this.closeCb();
|
|
936
|
+
}
|
|
937
|
+
isCloseRequested() {
|
|
938
|
+
return this.closeRequested;
|
|
939
|
+
}
|
|
940
|
+
onCloseRequest(cb) {
|
|
941
|
+
if (this.isClosed()) {
|
|
942
|
+
throw new Error("Stream is already closed");
|
|
943
|
+
}
|
|
944
|
+
this.onCloseListeners.add(cb);
|
|
945
|
+
return () => this.onCloseListeners.delete(cb);
|
|
946
|
+
}
|
|
947
|
+
isClosed() {
|
|
948
|
+
return this.closed;
|
|
949
|
+
}
|
|
950
|
+
/**
|
|
951
|
+
* @internal meant for use within river, not exposed as a public API
|
|
952
|
+
*
|
|
953
|
+
* Triggers a close request.
|
|
954
|
+
*/
|
|
955
|
+
triggerCloseRequest() {
|
|
956
|
+
if (this.isCloseRequested()) {
|
|
957
|
+
throw new Error("Cannot trigger close request multiple times");
|
|
958
|
+
}
|
|
959
|
+
if (this.isClosed()) {
|
|
960
|
+
throw new Error("Cannot trigger close request on closed stream");
|
|
961
|
+
}
|
|
962
|
+
this.closeRequested = true;
|
|
963
|
+
this.onCloseListeners.forEach((cb) => cb());
|
|
964
|
+
this.onCloseListeners.clear();
|
|
965
|
+
}
|
|
966
|
+
};
|
|
967
|
+
|
|
968
|
+
// router/procedures.ts
|
|
969
|
+
import { Type as Type3 } from "@sinclair/typebox";
|
|
970
|
+
var INTERNAL_RIVER_ERROR_CODE = "INTERNAL_RIVER_ERROR";
|
|
971
|
+
var UNCAUGHT_ERROR_CODE = "UNCAUGHT_ERROR";
|
|
972
|
+
var UNEXPECTED_DISCONNECT_CODE = "UNEXPECTED_DISCONNECT";
|
|
973
|
+
var INVALID_REQUEST_CODE = "INVALID_REQUEST";
|
|
974
|
+
var ABORT_CODE = "ABORT";
|
|
975
|
+
var OutputReaderErrorSchema = Type3.Object({
|
|
976
|
+
code: Type3.Union([
|
|
977
|
+
Type3.Literal(INTERNAL_RIVER_ERROR_CODE),
|
|
978
|
+
Type3.Literal(UNCAUGHT_ERROR_CODE),
|
|
979
|
+
Type3.Literal(UNEXPECTED_DISCONNECT_CODE),
|
|
980
|
+
Type3.Literal(INVALID_REQUEST_CODE),
|
|
981
|
+
Type3.Literal(ABORT_CODE)
|
|
982
|
+
]),
|
|
983
|
+
message: Type3.String()
|
|
984
|
+
});
|
|
985
|
+
var InputReaderErrorSchema = Type3.Object({
|
|
986
|
+
code: Type3.Union([
|
|
987
|
+
Type3.Literal(UNCAUGHT_ERROR_CODE),
|
|
988
|
+
Type3.Literal(UNEXPECTED_DISCONNECT_CODE),
|
|
989
|
+
Type3.Literal(INVALID_REQUEST_CODE),
|
|
990
|
+
Type3.Literal(ABORT_CODE)
|
|
991
|
+
]),
|
|
992
|
+
message: Type3.String()
|
|
993
|
+
});
|
|
994
|
+
function rpc({
|
|
995
|
+
init,
|
|
996
|
+
output,
|
|
997
|
+
errors = Type3.Never(),
|
|
998
|
+
description,
|
|
999
|
+
handler
|
|
1000
|
+
}) {
|
|
1001
|
+
return {
|
|
1002
|
+
...description ? { description } : {},
|
|
1003
|
+
type: "rpc",
|
|
1004
|
+
init,
|
|
1005
|
+
output,
|
|
1006
|
+
errors,
|
|
1007
|
+
handler
|
|
1008
|
+
};
|
|
1009
|
+
}
|
|
1010
|
+
function upload({
|
|
1011
|
+
init,
|
|
1012
|
+
input,
|
|
1013
|
+
output,
|
|
1014
|
+
errors = Type3.Never(),
|
|
1015
|
+
description,
|
|
1016
|
+
handler
|
|
1017
|
+
}) {
|
|
1018
|
+
return {
|
|
1019
|
+
type: "upload",
|
|
1020
|
+
...description ? { description } : {},
|
|
1021
|
+
init,
|
|
1022
|
+
input,
|
|
1023
|
+
output,
|
|
1024
|
+
errors,
|
|
1025
|
+
handler
|
|
1026
|
+
};
|
|
1027
|
+
}
|
|
1028
|
+
function subscription({
|
|
1029
|
+
init,
|
|
1030
|
+
output,
|
|
1031
|
+
errors = Type3.Never(),
|
|
1032
|
+
description,
|
|
1033
|
+
handler
|
|
1034
|
+
}) {
|
|
1035
|
+
return {
|
|
1036
|
+
type: "subscription",
|
|
1037
|
+
...description ? { description } : {},
|
|
1038
|
+
init,
|
|
1039
|
+
output,
|
|
1040
|
+
errors,
|
|
1041
|
+
handler
|
|
1042
|
+
};
|
|
1043
|
+
}
|
|
1044
|
+
function stream({
|
|
1045
|
+
init,
|
|
1046
|
+
input,
|
|
1047
|
+
output,
|
|
1048
|
+
errors = Type3.Never(),
|
|
1049
|
+
description,
|
|
1050
|
+
handler
|
|
1051
|
+
}) {
|
|
1052
|
+
return {
|
|
1053
|
+
type: "stream",
|
|
1054
|
+
...description ? { description } : {},
|
|
1055
|
+
init,
|
|
1056
|
+
input,
|
|
1057
|
+
output,
|
|
1058
|
+
errors,
|
|
1059
|
+
handler
|
|
1060
|
+
};
|
|
1061
|
+
}
|
|
1062
|
+
var Procedure = {
|
|
1063
|
+
rpc,
|
|
1064
|
+
upload,
|
|
1065
|
+
subscription,
|
|
1066
|
+
stream
|
|
1067
|
+
};
|
|
1068
|
+
|
|
1069
|
+
// router/client.ts
|
|
1070
|
+
import { nanoid } from "nanoid";
|
|
1071
|
+
import { Value } from "@sinclair/typebox/value";
|
|
1072
|
+
var OutputErrResultSchema = ErrResultSchema(OutputReaderErrorSchema);
|
|
1073
|
+
var noop = () => {
|
|
1074
|
+
};
|
|
1075
|
+
function _createRecursiveProxy(callback, path) {
|
|
1076
|
+
const proxy = new Proxy(noop, {
|
|
1077
|
+
// property access, recurse and add field to path
|
|
1078
|
+
get(_obj, key) {
|
|
1079
|
+
if (typeof key !== "string")
|
|
1080
|
+
return void 0;
|
|
1081
|
+
return _createRecursiveProxy(callback, [...path, key]);
|
|
1082
|
+
},
|
|
1083
|
+
// hit the end, let's invoke the handler
|
|
1084
|
+
apply(_target, _this, args) {
|
|
1085
|
+
return callback({
|
|
1086
|
+
path,
|
|
1087
|
+
args
|
|
1088
|
+
});
|
|
1089
|
+
}
|
|
1090
|
+
});
|
|
1091
|
+
return proxy;
|
|
1092
|
+
}
|
|
1093
|
+
var defaultClientOptions = {
|
|
1094
|
+
connectOnInvoke: true,
|
|
1095
|
+
eagerlyConnect: true
|
|
1096
|
+
};
|
|
1097
|
+
function createClient(transport, serverId, providedClientOptions = {}) {
|
|
1098
|
+
if (providedClientOptions.handshakeOptions) {
|
|
1099
|
+
transport.extendHandshake(providedClientOptions.handshakeOptions);
|
|
1100
|
+
}
|
|
1101
|
+
const clientOptions = { ...defaultClientOptions, ...providedClientOptions };
|
|
1102
|
+
if (clientOptions.eagerlyConnect) {
|
|
1103
|
+
void transport.connect(serverId);
|
|
1104
|
+
}
|
|
1105
|
+
return _createRecursiveProxy((opts) => {
|
|
1106
|
+
const [serviceName, procName, procMethod] = [...opts.path];
|
|
1107
|
+
if (!(serviceName && procName && procMethod)) {
|
|
1108
|
+
throw new Error(
|
|
1109
|
+
"invalid river call, ensure the service and procedure you are calling exists"
|
|
1110
|
+
);
|
|
1111
|
+
}
|
|
1112
|
+
const [init, callOptions] = opts.args;
|
|
1113
|
+
if (clientOptions.connectOnInvoke && !transport.connections.has(serverId)) {
|
|
1114
|
+
void transport.connect(serverId);
|
|
1115
|
+
}
|
|
1116
|
+
if (procMethod !== "rpc" && procMethod !== "subscribe" && procMethod !== "stream" && procMethod !== "upload") {
|
|
1117
|
+
throw new Error(
|
|
1118
|
+
`invalid river call, unknown procedure type ${procMethod}`
|
|
1119
|
+
);
|
|
1120
|
+
}
|
|
1121
|
+
return handleProc(
|
|
1122
|
+
procMethod === "subscribe" ? "subscription" : procMethod,
|
|
1123
|
+
transport,
|
|
1124
|
+
serverId,
|
|
1125
|
+
init,
|
|
1126
|
+
serviceName,
|
|
1127
|
+
procName,
|
|
1128
|
+
callOptions ? callOptions.signal : void 0
|
|
1129
|
+
);
|
|
1130
|
+
}, []);
|
|
1131
|
+
}
|
|
1132
|
+
function handleProc(procType, transport, serverId, init, serviceName, procedureName, abortSignal) {
|
|
1133
|
+
const procClosesWithInit = procType === "rpc" || procType === "subscription";
|
|
1134
|
+
const streamId = nanoid();
|
|
1135
|
+
const { span, ctx } = createProcTelemetryInfo(
|
|
1136
|
+
transport,
|
|
1137
|
+
procType,
|
|
1138
|
+
serviceName,
|
|
1139
|
+
procedureName,
|
|
1140
|
+
streamId
|
|
1141
|
+
);
|
|
1142
|
+
let cleanClose = true;
|
|
1143
|
+
const inputWriter = new WriteStreamImpl(
|
|
1144
|
+
(rawIn) => {
|
|
1145
|
+
transport.send(serverId, {
|
|
1146
|
+
streamId,
|
|
1147
|
+
payload: rawIn,
|
|
1148
|
+
controlFlags: 0,
|
|
1149
|
+
tracing: getPropagationContext(ctx)
|
|
1150
|
+
});
|
|
1151
|
+
},
|
|
1152
|
+
() => {
|
|
1153
|
+
span.addEvent("inputWriter closed");
|
|
1154
|
+
if (!procClosesWithInit && cleanClose) {
|
|
1155
|
+
transport.sendCloseControl(serverId, streamId);
|
|
1156
|
+
}
|
|
1157
|
+
if (outputReader.isClosed()) {
|
|
1158
|
+
cleanup();
|
|
1159
|
+
}
|
|
1160
|
+
}
|
|
1161
|
+
);
|
|
1162
|
+
const outputReader = new ReadStreamImpl(() => {
|
|
1163
|
+
transport.sendRequestCloseControl(serverId, streamId);
|
|
1164
|
+
});
|
|
1165
|
+
outputReader.onClose(() => {
|
|
1166
|
+
span.addEvent("outputReader closed");
|
|
1167
|
+
if (inputWriter.isClosed()) {
|
|
1168
|
+
cleanup();
|
|
1169
|
+
}
|
|
1170
|
+
});
|
|
1171
|
+
function cleanup() {
|
|
1172
|
+
transport.removeEventListener("message", onMessage);
|
|
1173
|
+
transport.removeEventListener("sessionStatus", onSessionStatus);
|
|
1174
|
+
abortSignal?.removeEventListener("abort", onClientAbort);
|
|
1175
|
+
span.end();
|
|
1176
|
+
}
|
|
1177
|
+
function onClientAbort() {
|
|
1178
|
+
if (outputReader.isClosed() && inputWriter.isClosed()) {
|
|
1179
|
+
return;
|
|
1180
|
+
}
|
|
1181
|
+
span.addEvent("sending abort");
|
|
1182
|
+
cleanClose = false;
|
|
1183
|
+
if (!outputReader.isClosed()) {
|
|
1184
|
+
outputReader.pushValue(
|
|
1185
|
+
Err({
|
|
1186
|
+
code: ABORT_CODE,
|
|
1187
|
+
message: "Aborted by client"
|
|
1188
|
+
})
|
|
1189
|
+
);
|
|
1190
|
+
outputReader.triggerClose();
|
|
1191
|
+
}
|
|
1192
|
+
inputWriter.close();
|
|
1193
|
+
transport.sendAbort(
|
|
1194
|
+
serverId,
|
|
1195
|
+
streamId,
|
|
1196
|
+
Err({
|
|
1197
|
+
code: ABORT_CODE,
|
|
1198
|
+
message: "Aborted by client"
|
|
1199
|
+
})
|
|
1200
|
+
);
|
|
1201
|
+
}
|
|
1202
|
+
function onMessage(msg) {
|
|
1203
|
+
if (msg.streamId !== streamId)
|
|
1204
|
+
return;
|
|
1205
|
+
if (msg.to !== transport.clientId) {
|
|
1206
|
+
transport.log?.error("Got stream message from unexpected client", {
|
|
1207
|
+
clientId: transport.clientId,
|
|
1208
|
+
transportMessage: msg
|
|
1209
|
+
});
|
|
1210
|
+
return;
|
|
1211
|
+
}
|
|
1212
|
+
if (isStreamCloseRequest(msg.controlFlags)) {
|
|
1213
|
+
inputWriter.triggerCloseRequest();
|
|
1214
|
+
}
|
|
1215
|
+
if (isStreamAbort(msg.controlFlags)) {
|
|
1216
|
+
cleanClose = false;
|
|
1217
|
+
span.addEvent("received abort");
|
|
1218
|
+
let abortResult;
|
|
1219
|
+
if (Value.Check(OutputErrResultSchema, msg.payload)) {
|
|
1220
|
+
abortResult = msg.payload;
|
|
1221
|
+
} else {
|
|
1222
|
+
abortResult = Err({
|
|
1223
|
+
code: ABORT_CODE,
|
|
1224
|
+
message: "Stream aborted with invalid payload"
|
|
1225
|
+
});
|
|
1226
|
+
transport.log?.error(
|
|
1227
|
+
"Got stream abort without a valid protocol error",
|
|
1228
|
+
{
|
|
1229
|
+
clientId: transport.clientId,
|
|
1230
|
+
transportMessage: msg,
|
|
1231
|
+
validationErrors: [
|
|
1232
|
+
...Value.Errors(OutputErrResultSchema, msg.payload)
|
|
1233
|
+
]
|
|
1234
|
+
}
|
|
1235
|
+
);
|
|
1236
|
+
}
|
|
1237
|
+
if (!outputReader.isClosed()) {
|
|
1238
|
+
outputReader.pushValue(abortResult);
|
|
1239
|
+
outputReader.triggerClose();
|
|
1240
|
+
}
|
|
1241
|
+
inputWriter.close();
|
|
1242
|
+
return;
|
|
1243
|
+
}
|
|
1244
|
+
if (outputReader.isClosed()) {
|
|
1245
|
+
span.recordException("Received message after output stream is closed");
|
|
1246
|
+
transport.log?.error("Received message after output stream is closed", {
|
|
1247
|
+
clientId: transport.clientId,
|
|
1248
|
+
transportMessage: msg
|
|
1249
|
+
});
|
|
1250
|
+
return;
|
|
1251
|
+
}
|
|
1252
|
+
if (!Value.Check(ControlMessageCloseSchema, msg.payload)) {
|
|
1253
|
+
if (Value.Check(AnyResultSchema, msg.payload)) {
|
|
1254
|
+
outputReader.pushValue(msg.payload);
|
|
1255
|
+
} else {
|
|
1256
|
+
transport.log?.error(
|
|
1257
|
+
"Got non-control payload, but was not a valid result",
|
|
1258
|
+
{
|
|
1259
|
+
clientId: transport.clientId,
|
|
1260
|
+
transportMessage: msg,
|
|
1261
|
+
validationErrors: [...Value.Errors(AnyResultSchema, msg.payload)]
|
|
1262
|
+
}
|
|
1263
|
+
);
|
|
1264
|
+
}
|
|
1265
|
+
}
|
|
1266
|
+
if (isStreamClose(msg.controlFlags)) {
|
|
1267
|
+
span.addEvent("received output close");
|
|
1268
|
+
outputReader.triggerClose();
|
|
1269
|
+
}
|
|
1270
|
+
}
|
|
1271
|
+
function onSessionStatus(evt) {
|
|
1272
|
+
if (evt.status !== "disconnect") {
|
|
1273
|
+
return;
|
|
1274
|
+
}
|
|
1275
|
+
if (evt.session.to !== serverId) {
|
|
1276
|
+
return;
|
|
1277
|
+
}
|
|
1278
|
+
cleanClose = false;
|
|
1279
|
+
if (!outputReader.isClosed()) {
|
|
1280
|
+
outputReader.pushValue(
|
|
1281
|
+
Err({
|
|
1282
|
+
code: UNEXPECTED_DISCONNECT_CODE,
|
|
1283
|
+
message: `${serverId} unexpectedly disconnected`
|
|
1284
|
+
})
|
|
1285
|
+
);
|
|
1286
|
+
}
|
|
1287
|
+
inputWriter.close();
|
|
1288
|
+
outputReader.triggerClose();
|
|
1289
|
+
}
|
|
1290
|
+
abortSignal?.addEventListener("abort", onClientAbort);
|
|
1291
|
+
transport.addEventListener("message", onMessage);
|
|
1292
|
+
transport.addEventListener("sessionStatus", onSessionStatus);
|
|
1293
|
+
transport.send(serverId, {
|
|
1294
|
+
streamId,
|
|
1295
|
+
serviceName,
|
|
1296
|
+
procedureName,
|
|
1297
|
+
tracing: getPropagationContext(ctx),
|
|
1298
|
+
payload: init,
|
|
1299
|
+
controlFlags: procClosesWithInit ? 2 /* StreamOpenBit */ | 8 /* StreamClosedBit */ : 2 /* StreamOpenBit */
|
|
1300
|
+
});
|
|
1301
|
+
if (procClosesWithInit) {
|
|
1302
|
+
inputWriter.close();
|
|
1303
|
+
}
|
|
1304
|
+
if (procType === "subscription") {
|
|
1305
|
+
return outputReader;
|
|
1306
|
+
}
|
|
1307
|
+
if (procType === "rpc") {
|
|
1308
|
+
return getSingleMessage(outputReader, transport.log);
|
|
1309
|
+
}
|
|
1310
|
+
if (procType === "upload") {
|
|
1311
|
+
let didFinalize = false;
|
|
1312
|
+
return [
|
|
1313
|
+
inputWriter,
|
|
1314
|
+
async () => {
|
|
1315
|
+
if (didFinalize) {
|
|
1316
|
+
throw new Error("upload stream already finalized");
|
|
1317
|
+
}
|
|
1318
|
+
didFinalize = true;
|
|
1319
|
+
if (!inputWriter.isClosed()) {
|
|
1320
|
+
inputWriter.close();
|
|
1321
|
+
}
|
|
1322
|
+
return getSingleMessage(outputReader, transport.log);
|
|
1323
|
+
}
|
|
1324
|
+
];
|
|
1325
|
+
}
|
|
1326
|
+
return [inputWriter, outputReader];
|
|
1327
|
+
}
|
|
1328
|
+
async function getSingleMessage(outputReader, log) {
|
|
1329
|
+
const ret = await outputReader.asArray();
|
|
1330
|
+
if (ret.length > 1) {
|
|
1331
|
+
log?.error("Expected single message from server, got multiple");
|
|
1332
|
+
}
|
|
1333
|
+
return ret[0];
|
|
1334
|
+
}
|
|
1335
|
+
|
|
1336
|
+
// router/server.ts
|
|
1337
|
+
import { Value as Value2 } from "@sinclair/typebox/value";
|
|
1338
|
+
import { SpanStatusCode } from "@opentelemetry/api";
|
|
1339
|
+
var InputErrResultSchema = ErrResultSchema(InputReaderErrorSchema);
|
|
1340
|
+
var RiverServer = class {
|
|
1341
|
+
transport;
|
|
1342
|
+
contextMap;
|
|
1343
|
+
log;
|
|
1344
|
+
/**
|
|
1345
|
+
* We create a tombstones for streams aborted by the server
|
|
1346
|
+
* so that we don't hit errors when the client has inflight
|
|
1347
|
+
* requests it sent before it saw the abort.
|
|
1348
|
+
* We track aborted streams for every session separately, so
|
|
1349
|
+
* that bad clients don't affect good clients.
|
|
1350
|
+
*/
|
|
1351
|
+
serverAbortedStreams;
|
|
1352
|
+
maxAbortedStreamTombstonesPerSession;
|
|
1353
|
+
openStreams;
|
|
1354
|
+
services;
|
|
1355
|
+
constructor(transport, services, handshakeOptions, extendedContext, maxAbortedStreamTombstonesPerSession = 200) {
|
|
1356
|
+
const instances = {};
|
|
1357
|
+
this.services = instances;
|
|
1358
|
+
this.contextMap = /* @__PURE__ */ new Map();
|
|
1359
|
+
for (const [name, service] of Object.entries(services)) {
|
|
1360
|
+
const instance = service.instantiate(extendedContext ?? {});
|
|
1361
|
+
instances[name] = instance;
|
|
1362
|
+
this.contextMap.set(instance, {
|
|
1363
|
+
...extendedContext,
|
|
1364
|
+
state: instance.state
|
|
1365
|
+
});
|
|
1366
|
+
}
|
|
1367
|
+
if (handshakeOptions) {
|
|
1368
|
+
transport.extendHandshake(handshakeOptions);
|
|
1369
|
+
}
|
|
1370
|
+
this.transport = transport;
|
|
1371
|
+
this.openStreams = /* @__PURE__ */ new Set();
|
|
1372
|
+
this.serverAbortedStreams = /* @__PURE__ */ new Map();
|
|
1373
|
+
this.maxAbortedStreamTombstonesPerSession = maxAbortedStreamTombstonesPerSession;
|
|
1374
|
+
this.log = transport.log;
|
|
1375
|
+
const handleMessage = (msg) => {
|
|
1376
|
+
if (msg.to !== this.transport.clientId) {
|
|
1377
|
+
this.log?.info(
|
|
1378
|
+
`got msg with destination that isn't this server, ignoring`,
|
|
1379
|
+
{
|
|
1380
|
+
clientId: this.transport.clientId,
|
|
1381
|
+
transportMessage: msg
|
|
1382
|
+
}
|
|
1383
|
+
);
|
|
1384
|
+
return;
|
|
1385
|
+
}
|
|
1386
|
+
if (this.openStreams.has(msg.streamId)) {
|
|
1387
|
+
return;
|
|
1388
|
+
}
|
|
1389
|
+
if (this.serverAbortedStreams.get(msg.from)?.has(msg.streamId)) {
|
|
1390
|
+
return;
|
|
1391
|
+
}
|
|
1392
|
+
const validated = this.validateNewProcStream(msg);
|
|
1393
|
+
if (!validated) {
|
|
1394
|
+
return;
|
|
1395
|
+
}
|
|
1396
|
+
this.createNewProcStream(validated);
|
|
1397
|
+
};
|
|
1398
|
+
this.transport.addEventListener("message", handleMessage);
|
|
1399
|
+
const handleSessionStatus = (evt) => {
|
|
1400
|
+
if (evt.status !== "disconnect")
|
|
1401
|
+
return;
|
|
1402
|
+
const disconnectedClientId = evt.session.to;
|
|
1403
|
+
this.log?.info(
|
|
1404
|
+
`got session disconnect from ${disconnectedClientId}, cleaning up streams`,
|
|
1405
|
+
evt.session.loggingMetadata
|
|
1406
|
+
);
|
|
1407
|
+
this.serverAbortedStreams.delete(disconnectedClientId);
|
|
1408
|
+
};
|
|
1409
|
+
this.transport.addEventListener("sessionStatus", handleSessionStatus);
|
|
1410
|
+
this.transport.addEventListener("transportStatus", (evt) => {
|
|
1411
|
+
if (evt.status !== "closed")
|
|
1412
|
+
return;
|
|
1413
|
+
this.transport.removeEventListener("message", handleMessage);
|
|
1414
|
+
this.transport.removeEventListener("sessionStatus", handleSessionStatus);
|
|
1415
|
+
});
|
|
1416
|
+
}
|
|
1417
|
+
createNewProcStream({
|
|
1418
|
+
procedure,
|
|
1419
|
+
procedureName,
|
|
1420
|
+
service,
|
|
1421
|
+
serviceName,
|
|
1422
|
+
sessionMetadata,
|
|
1423
|
+
loggingMetadata,
|
|
1424
|
+
streamId,
|
|
1425
|
+
controlFlags,
|
|
1426
|
+
initPayload,
|
|
1427
|
+
from,
|
|
1428
|
+
tracingCtx
|
|
1429
|
+
}) {
|
|
1430
|
+
this.openStreams.add(streamId);
|
|
1431
|
+
let cleanClose = true;
|
|
1432
|
+
const onServerAbort = (errResult) => {
|
|
1433
|
+
if (inputReader.isClosed() && outputWriter.isClosed()) {
|
|
1434
|
+
return;
|
|
1435
|
+
}
|
|
1436
|
+
cleanClose = false;
|
|
1437
|
+
if (!inputReader.isClosed()) {
|
|
1438
|
+
inputReader.pushValue(errResult);
|
|
1439
|
+
inputReader.triggerClose();
|
|
1440
|
+
}
|
|
1441
|
+
outputWriter.close();
|
|
1442
|
+
this.abortStream(from, streamId, errResult);
|
|
1443
|
+
};
|
|
1444
|
+
const onHandlerAbort = () => {
|
|
1445
|
+
onServerAbort(
|
|
1446
|
+
Err({
|
|
1447
|
+
code: ABORT_CODE,
|
|
1448
|
+
message: "Aborted by server procedure handler"
|
|
1449
|
+
})
|
|
1450
|
+
);
|
|
1451
|
+
};
|
|
1452
|
+
const handlerAbortController = new AbortController();
|
|
1453
|
+
handlerAbortController.signal.addEventListener("abort", onHandlerAbort);
|
|
1454
|
+
const clientAbortController = new AbortController();
|
|
1455
|
+
const onSessionStatus = (evt) => {
|
|
1456
|
+
if (evt.status !== "disconnect") {
|
|
1457
|
+
return;
|
|
1458
|
+
}
|
|
1459
|
+
if (evt.session.to !== from) {
|
|
1460
|
+
return;
|
|
1461
|
+
}
|
|
1462
|
+
cleanClose = false;
|
|
1463
|
+
const errPayload = {
|
|
1464
|
+
code: UNEXPECTED_DISCONNECT_CODE,
|
|
1465
|
+
message: `client unexpectedly disconnected`
|
|
1466
|
+
};
|
|
1467
|
+
if (!inputReader.isClosed()) {
|
|
1468
|
+
inputReader.pushValue(Err(errPayload));
|
|
1469
|
+
inputReader.triggerClose();
|
|
1470
|
+
}
|
|
1471
|
+
clientAbortController.abort(errPayload);
|
|
1472
|
+
outputWriter.close();
|
|
1473
|
+
};
|
|
1474
|
+
this.transport.addEventListener("sessionStatus", onSessionStatus);
|
|
1475
|
+
const onMessage = (msg) => {
|
|
1476
|
+
if (streamId !== msg.streamId) {
|
|
1477
|
+
return;
|
|
1478
|
+
}
|
|
1479
|
+
if (msg.from !== from) {
|
|
1480
|
+
this.log?.error("Got stream message from unexpected client", {
|
|
1481
|
+
...loggingMetadata,
|
|
1482
|
+
clientId: this.transport.clientId,
|
|
1483
|
+
transportMessage: msg,
|
|
1484
|
+
tags: ["invariant-violation"]
|
|
1485
|
+
});
|
|
1486
|
+
return;
|
|
1487
|
+
}
|
|
1488
|
+
if (isStreamCloseRequest(msg.controlFlags)) {
|
|
1489
|
+
outputWriter.triggerCloseRequest();
|
|
1490
|
+
}
|
|
1491
|
+
if (isStreamAbort(msg.controlFlags)) {
|
|
1492
|
+
let abortResult;
|
|
1493
|
+
if (Value2.Check(InputErrResultSchema, msg.payload)) {
|
|
1494
|
+
abortResult = msg.payload;
|
|
1495
|
+
} else {
|
|
1496
|
+
abortResult = Err({
|
|
1497
|
+
code: ABORT_CODE,
|
|
1498
|
+
message: "Stream aborted, client sent invalid payload"
|
|
1499
|
+
});
|
|
1500
|
+
this.log?.warn("Got stream abort without a valid protocol error", {
|
|
1501
|
+
...loggingMetadata,
|
|
1502
|
+
clientId: this.transport.clientId,
|
|
1503
|
+
transportMessage: msg,
|
|
1504
|
+
validationErrors: [
|
|
1505
|
+
...Value2.Errors(InputErrResultSchema, msg.payload)
|
|
1506
|
+
],
|
|
1507
|
+
tags: ["invalid-request"]
|
|
1508
|
+
});
|
|
1509
|
+
}
|
|
1510
|
+
if (!inputReader.isClosed()) {
|
|
1511
|
+
inputReader.pushValue(abortResult);
|
|
1512
|
+
inputReader.triggerClose();
|
|
1513
|
+
}
|
|
1514
|
+
outputWriter.close();
|
|
1515
|
+
clientAbortController.abort(abortResult.payload);
|
|
1516
|
+
return;
|
|
1517
|
+
}
|
|
1518
|
+
if (inputReader.isClosed()) {
|
|
1519
|
+
this.log?.warn("Received message after input stream is closed", {
|
|
1520
|
+
...loggingMetadata,
|
|
1521
|
+
clientId: this.transport.clientId,
|
|
1522
|
+
transportMessage: msg,
|
|
1523
|
+
tags: ["invalid-request"]
|
|
1524
|
+
});
|
|
1525
|
+
onServerAbort(
|
|
1526
|
+
Err({
|
|
1527
|
+
code: INVALID_REQUEST_CODE,
|
|
1528
|
+
message: "Received message after input stream is closed"
|
|
1529
|
+
})
|
|
1530
|
+
);
|
|
1531
|
+
return;
|
|
1532
|
+
}
|
|
1533
|
+
if ("input" in procedure && Value2.Check(procedure.input, msg.payload)) {
|
|
1534
|
+
inputReader.pushValue(Ok(msg.payload));
|
|
1535
|
+
} else if (!Value2.Check(ControlMessagePayloadSchema, msg.payload)) {
|
|
1536
|
+
const validationErrors = [
|
|
1537
|
+
...Value2.Errors(ControlMessagePayloadSchema, msg.payload)
|
|
1538
|
+
];
|
|
1539
|
+
let errMessage = "Expected control payload for procedure with no input";
|
|
1540
|
+
if ("input" in procedure) {
|
|
1541
|
+
errMessage = "Expected either control or input payload, validation failed for both";
|
|
1542
|
+
validationErrors.push(...Value2.Errors(procedure.input, msg.payload));
|
|
1543
|
+
}
|
|
1544
|
+
this.log?.warn(errMessage, {
|
|
1545
|
+
...loggingMetadata,
|
|
1546
|
+
clientId: this.transport.clientId,
|
|
1547
|
+
transportMessage: msg,
|
|
1548
|
+
validationErrors,
|
|
1549
|
+
tags: ["invalid-request"]
|
|
1550
|
+
});
|
|
1551
|
+
onServerAbort(
|
|
1552
|
+
Err({
|
|
1553
|
+
code: INVALID_REQUEST_CODE,
|
|
1554
|
+
message: errMessage
|
|
1555
|
+
})
|
|
1556
|
+
);
|
|
1557
|
+
}
|
|
1558
|
+
if (isStreamClose(msg.controlFlags)) {
|
|
1559
|
+
inputReader.triggerClose();
|
|
1560
|
+
}
|
|
1561
|
+
};
|
|
1562
|
+
this.transport.addEventListener("message", onMessage);
|
|
1563
|
+
const onFinishedCallbacks = [];
|
|
1564
|
+
const cleanup = () => {
|
|
1565
|
+
this.transport.removeEventListener("message", onMessage);
|
|
1566
|
+
this.transport.removeEventListener("sessionStatus", onSessionStatus);
|
|
1567
|
+
handlerAbortController.signal.addEventListener("abort", onHandlerAbort);
|
|
1568
|
+
this.openStreams.delete(streamId);
|
|
1569
|
+
onFinishedCallbacks.forEach((cb) => {
|
|
1570
|
+
try {
|
|
1571
|
+
cb();
|
|
1572
|
+
} catch {
|
|
1573
|
+
}
|
|
1574
|
+
});
|
|
1575
|
+
onFinishedCallbacks.length = 0;
|
|
1576
|
+
};
|
|
1577
|
+
const inputReader = new ReadStreamImpl(() => {
|
|
1578
|
+
this.transport.sendRequestCloseControl(from, streamId);
|
|
1579
|
+
});
|
|
1580
|
+
inputReader.onClose(() => {
|
|
1581
|
+
if (outputWriter.isClosed()) {
|
|
1582
|
+
cleanup();
|
|
1583
|
+
}
|
|
1584
|
+
});
|
|
1585
|
+
const procClosesWithResponse = procedure.type === "rpc" || procedure.type === "upload";
|
|
1586
|
+
const outputWriter = new WriteStreamImpl(
|
|
1587
|
+
(response) => {
|
|
1588
|
+
this.transport.send(from, {
|
|
1589
|
+
streamId,
|
|
1590
|
+
controlFlags: procClosesWithResponse ? 8 /* StreamClosedBit */ : 0,
|
|
1591
|
+
payload: response
|
|
1592
|
+
});
|
|
1593
|
+
},
|
|
1594
|
+
() => {
|
|
1595
|
+
if (!procClosesWithResponse && cleanClose) {
|
|
1596
|
+
this.transport.sendCloseControl(from, streamId);
|
|
1597
|
+
}
|
|
1598
|
+
if (inputReader.isClosed()) {
|
|
1599
|
+
cleanup();
|
|
1600
|
+
}
|
|
1601
|
+
}
|
|
1602
|
+
);
|
|
1603
|
+
const onHandlerError = (err, span) => {
|
|
1604
|
+
const errorMsg = coerceErrorString(err);
|
|
1605
|
+
span.recordException(err instanceof Error ? err : new Error(errorMsg));
|
|
1606
|
+
span.setStatus({ code: SpanStatusCode.ERROR });
|
|
1607
|
+
onServerAbort(
|
|
1608
|
+
Err({
|
|
1609
|
+
code: UNCAUGHT_ERROR_CODE,
|
|
1610
|
+
message: errorMsg
|
|
1611
|
+
})
|
|
1612
|
+
);
|
|
1613
|
+
};
|
|
1614
|
+
if (isStreamClose(controlFlags)) {
|
|
1615
|
+
inputReader.triggerClose();
|
|
1616
|
+
} else if (procedure.type === "rpc" || procedure.type === "subscription") {
|
|
1617
|
+
this.log?.warn(`${procedure.type} sent an init without a stream close`, {
|
|
1618
|
+
...loggingMetadata,
|
|
1619
|
+
clientId: this.transport.clientId
|
|
1620
|
+
});
|
|
1621
|
+
}
|
|
1622
|
+
const serviceContextWithTransportInfo = {
|
|
1623
|
+
...this.getContext(service, serviceName),
|
|
1624
|
+
from,
|
|
1625
|
+
metadata: sessionMetadata,
|
|
1626
|
+
abortController: handlerAbortController,
|
|
1627
|
+
clientAbortSignal: clientAbortController.signal,
|
|
1628
|
+
onRequestFinished: (cb) => {
|
|
1629
|
+
if (inputReader.isClosed() && outputWriter.isClosed()) {
|
|
1630
|
+
try {
|
|
1631
|
+
cb();
|
|
1632
|
+
} catch {
|
|
1633
|
+
}
|
|
1634
|
+
return;
|
|
1635
|
+
}
|
|
1636
|
+
onFinishedCallbacks.push(cb);
|
|
1637
|
+
}
|
|
1638
|
+
};
|
|
1639
|
+
switch (procedure.type) {
|
|
1640
|
+
case "rpc":
|
|
1641
|
+
void createHandlerSpan(
|
|
1642
|
+
procedure.type,
|
|
1643
|
+
serviceName,
|
|
1644
|
+
procedureName,
|
|
1645
|
+
streamId,
|
|
1646
|
+
tracingCtx,
|
|
1647
|
+
async (span) => {
|
|
1648
|
+
try {
|
|
1649
|
+
const outputMessage = await procedure.handler(
|
|
1650
|
+
serviceContextWithTransportInfo,
|
|
1651
|
+
initPayload
|
|
1652
|
+
);
|
|
1653
|
+
if (outputWriter.isClosed()) {
|
|
1654
|
+
return;
|
|
1655
|
+
}
|
|
1656
|
+
outputWriter.write(outputMessage);
|
|
1657
|
+
outputWriter.close();
|
|
1658
|
+
} catch (err) {
|
|
1659
|
+
onHandlerError(err, span);
|
|
1660
|
+
} finally {
|
|
1661
|
+
span.end();
|
|
1662
|
+
}
|
|
1663
|
+
}
|
|
1664
|
+
);
|
|
1665
|
+
break;
|
|
1666
|
+
case "stream":
|
|
1667
|
+
void createHandlerSpan(
|
|
1668
|
+
procedure.type,
|
|
1669
|
+
serviceName,
|
|
1670
|
+
procedureName,
|
|
1671
|
+
streamId,
|
|
1672
|
+
tracingCtx,
|
|
1673
|
+
async (span) => {
|
|
1674
|
+
try {
|
|
1675
|
+
await procedure.handler(
|
|
1676
|
+
serviceContextWithTransportInfo,
|
|
1677
|
+
initPayload,
|
|
1678
|
+
inputReader,
|
|
1679
|
+
outputWriter
|
|
1680
|
+
);
|
|
1681
|
+
} catch (err) {
|
|
1682
|
+
onHandlerError(err, span);
|
|
1683
|
+
} finally {
|
|
1684
|
+
span.end();
|
|
1685
|
+
}
|
|
1686
|
+
}
|
|
1687
|
+
);
|
|
1688
|
+
break;
|
|
1689
|
+
case "subscription":
|
|
1690
|
+
void createHandlerSpan(
|
|
1691
|
+
procedure.type,
|
|
1692
|
+
serviceName,
|
|
1693
|
+
procedureName,
|
|
1694
|
+
streamId,
|
|
1695
|
+
tracingCtx,
|
|
1696
|
+
async (span) => {
|
|
1697
|
+
try {
|
|
1698
|
+
await procedure.handler(
|
|
1699
|
+
serviceContextWithTransportInfo,
|
|
1700
|
+
initPayload,
|
|
1701
|
+
outputWriter
|
|
1702
|
+
);
|
|
1703
|
+
} catch (err) {
|
|
1704
|
+
onHandlerError(err, span);
|
|
1705
|
+
} finally {
|
|
1706
|
+
span.end();
|
|
1707
|
+
}
|
|
1708
|
+
}
|
|
1709
|
+
);
|
|
1710
|
+
break;
|
|
1711
|
+
case "upload":
|
|
1712
|
+
void createHandlerSpan(
|
|
1713
|
+
procedure.type,
|
|
1714
|
+
serviceName,
|
|
1715
|
+
procedureName,
|
|
1716
|
+
streamId,
|
|
1717
|
+
tracingCtx,
|
|
1718
|
+
async (span) => {
|
|
1719
|
+
try {
|
|
1720
|
+
const outputMessage = await procedure.handler(
|
|
1721
|
+
serviceContextWithTransportInfo,
|
|
1722
|
+
initPayload,
|
|
1723
|
+
inputReader
|
|
1724
|
+
);
|
|
1725
|
+
if (outputWriter.isClosed()) {
|
|
1726
|
+
return;
|
|
1727
|
+
}
|
|
1728
|
+
outputWriter.write(outputMessage);
|
|
1729
|
+
outputWriter.close();
|
|
1730
|
+
} catch (err) {
|
|
1731
|
+
onHandlerError(err, span);
|
|
1732
|
+
} finally {
|
|
1733
|
+
span.end();
|
|
1734
|
+
}
|
|
1735
|
+
}
|
|
1736
|
+
);
|
|
1737
|
+
break;
|
|
1738
|
+
default:
|
|
1739
|
+
this.log?.error(
|
|
1740
|
+
`got request for invalid procedure type ${procedure.type} at ${serviceName}.${procedureName}`,
|
|
1741
|
+
{
|
|
1742
|
+
...loggingMetadata,
|
|
1743
|
+
tags: ["invariant-violation"]
|
|
1744
|
+
}
|
|
1745
|
+
);
|
|
1746
|
+
return;
|
|
1747
|
+
}
|
|
1748
|
+
}
|
|
1749
|
+
getContext(service, serviceName) {
|
|
1750
|
+
const context = this.contextMap.get(service);
|
|
1751
|
+
if (!context) {
|
|
1752
|
+
const err = `no context found for ${serviceName}`;
|
|
1753
|
+
this.log?.error(err, {
|
|
1754
|
+
clientId: this.transport.clientId,
|
|
1755
|
+
tags: ["invariant-violation"]
|
|
1756
|
+
});
|
|
1757
|
+
throw new Error(err);
|
|
1758
|
+
}
|
|
1759
|
+
return context;
|
|
1760
|
+
}
|
|
1761
|
+
validateNewProcStream(initMessage) {
|
|
1762
|
+
const session = this.transport.sessions.get(initMessage.from);
|
|
1763
|
+
if (!session) {
|
|
1764
|
+
const errMessage = `couldn't find a session for ${initMessage.from}`;
|
|
1765
|
+
this.log?.error(`couldn't find session for ${initMessage.from}`, {
|
|
1766
|
+
clientId: this.transport.clientId,
|
|
1767
|
+
transportMessage: initMessage,
|
|
1768
|
+
tags: ["invariant-violation"]
|
|
1769
|
+
});
|
|
1770
|
+
this.abortStream(
|
|
1771
|
+
initMessage.from,
|
|
1772
|
+
initMessage.streamId,
|
|
1773
|
+
Err({
|
|
1774
|
+
code: INTERNAL_RIVER_ERROR_CODE,
|
|
1775
|
+
message: errMessage
|
|
1776
|
+
})
|
|
1777
|
+
);
|
|
1778
|
+
return null;
|
|
1779
|
+
}
|
|
1780
|
+
const sessionMetadata = this.transport.sessionHandshakeMetadata.get(session);
|
|
1781
|
+
if (!sessionMetadata) {
|
|
1782
|
+
const errMessage = `session doesn't have handshake metadata`;
|
|
1783
|
+
this.log?.error(errMessage, {
|
|
1784
|
+
...session.loggingMetadata,
|
|
1785
|
+
tags: ["invariant-violation"]
|
|
1786
|
+
});
|
|
1787
|
+
this.abortStream(
|
|
1788
|
+
initMessage.from,
|
|
1789
|
+
initMessage.streamId,
|
|
1790
|
+
Err({
|
|
1791
|
+
code: INTERNAL_RIVER_ERROR_CODE,
|
|
1792
|
+
message: errMessage
|
|
1793
|
+
})
|
|
1794
|
+
);
|
|
1795
|
+
return null;
|
|
1796
|
+
}
|
|
1797
|
+
if (!isStreamOpen(initMessage.controlFlags)) {
|
|
1798
|
+
const errMessage = `can't create a new procedure stream from a message that doesn't have the stream open bit set`;
|
|
1799
|
+
this.log?.warn(errMessage, {
|
|
1800
|
+
...session.loggingMetadata,
|
|
1801
|
+
clientId: this.transport.clientId,
|
|
1802
|
+
transportMessage: initMessage,
|
|
1803
|
+
tags: ["invalid-request"]
|
|
1804
|
+
});
|
|
1805
|
+
this.abortStream(
|
|
1806
|
+
initMessage.from,
|
|
1807
|
+
initMessage.streamId,
|
|
1808
|
+
Err({
|
|
1809
|
+
code: INVALID_REQUEST_CODE,
|
|
1810
|
+
message: errMessage
|
|
1811
|
+
})
|
|
1812
|
+
);
|
|
1813
|
+
return null;
|
|
1814
|
+
}
|
|
1815
|
+
if (!initMessage.serviceName) {
|
|
1816
|
+
const errMessage = `missing service name in stream open message`;
|
|
1817
|
+
this.log?.warn(errMessage, {
|
|
1818
|
+
...session.loggingMetadata,
|
|
1819
|
+
clientId: this.transport.clientId,
|
|
1820
|
+
transportMessage: initMessage,
|
|
1821
|
+
tags: ["invalid-request"]
|
|
1822
|
+
});
|
|
1823
|
+
this.abortStream(
|
|
1824
|
+
initMessage.from,
|
|
1825
|
+
initMessage.streamId,
|
|
1826
|
+
Err({
|
|
1827
|
+
code: INVALID_REQUEST_CODE,
|
|
1828
|
+
message: errMessage
|
|
1829
|
+
})
|
|
1830
|
+
);
|
|
1831
|
+
return null;
|
|
1832
|
+
}
|
|
1833
|
+
if (!initMessage.procedureName) {
|
|
1834
|
+
const errMessage = `missing procedure name in stream open message`;
|
|
1835
|
+
this.log?.warn(errMessage, {
|
|
1836
|
+
...session.loggingMetadata,
|
|
1837
|
+
clientId: this.transport.clientId,
|
|
1838
|
+
transportMessage: initMessage,
|
|
1839
|
+
tags: ["invalid-request"]
|
|
1840
|
+
});
|
|
1841
|
+
this.abortStream(
|
|
1842
|
+
initMessage.from,
|
|
1843
|
+
initMessage.streamId,
|
|
1844
|
+
Err({
|
|
1845
|
+
code: INVALID_REQUEST_CODE,
|
|
1846
|
+
message: errMessage
|
|
1847
|
+
})
|
|
1848
|
+
);
|
|
1849
|
+
return null;
|
|
1850
|
+
}
|
|
1851
|
+
if (!(initMessage.serviceName in this.services)) {
|
|
1852
|
+
const errMessage = `couldn't find service ${initMessage.serviceName}`;
|
|
1853
|
+
this.log?.warn(errMessage, {
|
|
1854
|
+
...session.loggingMetadata,
|
|
1855
|
+
clientId: this.transport.clientId,
|
|
1856
|
+
transportMessage: initMessage,
|
|
1857
|
+
tags: ["invalid-request"]
|
|
1858
|
+
});
|
|
1859
|
+
this.abortStream(
|
|
1860
|
+
initMessage.from,
|
|
1861
|
+
initMessage.streamId,
|
|
1862
|
+
Err({
|
|
1863
|
+
code: INVALID_REQUEST_CODE,
|
|
1864
|
+
message: errMessage
|
|
1865
|
+
})
|
|
1866
|
+
);
|
|
1867
|
+
return null;
|
|
1868
|
+
}
|
|
1869
|
+
const service = this.services[initMessage.serviceName];
|
|
1870
|
+
if (!(initMessage.procedureName in service.procedures)) {
|
|
1871
|
+
const errMessage = `couldn't find a matching procedure for ${initMessage.serviceName}.${initMessage.procedureName}`;
|
|
1872
|
+
this.log?.warn(errMessage, {
|
|
1873
|
+
...session.loggingMetadata,
|
|
1874
|
+
clientId: this.transport.clientId,
|
|
1875
|
+
transportMessage: initMessage,
|
|
1876
|
+
tags: ["invalid-request"]
|
|
1877
|
+
});
|
|
1878
|
+
this.abortStream(
|
|
1879
|
+
initMessage.from,
|
|
1880
|
+
initMessage.streamId,
|
|
1881
|
+
Err({
|
|
1882
|
+
code: INVALID_REQUEST_CODE,
|
|
1883
|
+
message: errMessage
|
|
1884
|
+
})
|
|
1885
|
+
);
|
|
1886
|
+
return null;
|
|
1887
|
+
}
|
|
1888
|
+
const procedure = service.procedures[initMessage.procedureName];
|
|
1889
|
+
if (!Value2.Check(procedure.init, initMessage.payload)) {
|
|
1890
|
+
const errMessage = `procedure init failed validation`;
|
|
1891
|
+
this.log?.warn(errMessage, {
|
|
1892
|
+
...session.loggingMetadata,
|
|
1893
|
+
clientId: this.transport.clientId,
|
|
1894
|
+
transportMessage: initMessage,
|
|
1895
|
+
tags: ["invalid-request"]
|
|
1896
|
+
});
|
|
1897
|
+
this.abortStream(
|
|
1898
|
+
initMessage.from,
|
|
1899
|
+
initMessage.streamId,
|
|
1900
|
+
Err({
|
|
1901
|
+
code: INVALID_REQUEST_CODE,
|
|
1902
|
+
message: errMessage
|
|
1903
|
+
})
|
|
1904
|
+
);
|
|
1905
|
+
return null;
|
|
1906
|
+
}
|
|
1907
|
+
return {
|
|
1908
|
+
sessionMetadata,
|
|
1909
|
+
procedure,
|
|
1910
|
+
procedureName: initMessage.procedureName,
|
|
1911
|
+
service,
|
|
1912
|
+
serviceName: initMessage.serviceName,
|
|
1913
|
+
loggingMetadata: {
|
|
1914
|
+
...session.loggingMetadata,
|
|
1915
|
+
transportMessage: initMessage
|
|
1916
|
+
},
|
|
1917
|
+
streamId: initMessage.streamId,
|
|
1918
|
+
controlFlags: initMessage.controlFlags,
|
|
1919
|
+
tracingCtx: initMessage.tracing,
|
|
1920
|
+
initPayload: initMessage.payload,
|
|
1921
|
+
from: initMessage.from
|
|
1922
|
+
};
|
|
1923
|
+
}
|
|
1924
|
+
abortStream(to, streamId, payload) {
|
|
1925
|
+
let abortedForSession = this.serverAbortedStreams.get(to);
|
|
1926
|
+
if (!abortedForSession) {
|
|
1927
|
+
abortedForSession = new LRUSet(this.maxAbortedStreamTombstonesPerSession);
|
|
1928
|
+
this.serverAbortedStreams.set(to, abortedForSession);
|
|
1929
|
+
}
|
|
1930
|
+
abortedForSession.add(streamId);
|
|
1931
|
+
this.transport.sendAbort(to, streamId, payload);
|
|
1932
|
+
}
|
|
1933
|
+
};
|
|
1934
|
+
var LRUSet = class {
|
|
1935
|
+
items;
|
|
1936
|
+
maxItems;
|
|
1937
|
+
constructor(maxItems) {
|
|
1938
|
+
this.items = /* @__PURE__ */ new Set();
|
|
1939
|
+
this.maxItems = maxItems;
|
|
1940
|
+
}
|
|
1941
|
+
add(item) {
|
|
1942
|
+
if (this.items.has(item)) {
|
|
1943
|
+
this.items.delete(item);
|
|
1944
|
+
} else if (this.items.size >= this.maxItems) {
|
|
1945
|
+
const first = this.items.values().next();
|
|
1946
|
+
if (!first.done) {
|
|
1947
|
+
this.items.delete(first.value);
|
|
1948
|
+
}
|
|
1949
|
+
}
|
|
1950
|
+
this.items.add(item);
|
|
1951
|
+
}
|
|
1952
|
+
has(item) {
|
|
1953
|
+
return this.items.has(item);
|
|
1954
|
+
}
|
|
1955
|
+
};
|
|
1956
|
+
function createServer(transport, services, providedServerOptions) {
|
|
1957
|
+
return new RiverServer(
|
|
1958
|
+
transport,
|
|
1959
|
+
services,
|
|
1960
|
+
providedServerOptions?.handshakeOptions,
|
|
1961
|
+
providedServerOptions?.extendedContext,
|
|
1962
|
+
providedServerOptions?.maxAbortedStreamTombstonesPerSession
|
|
1963
|
+
);
|
|
1964
|
+
}
|
|
1965
|
+
|
|
1966
|
+
// router/handshake.ts
|
|
1967
|
+
function createClientHandshakeOptions(schema, construct) {
|
|
1968
|
+
return { schema, construct };
|
|
1969
|
+
}
|
|
1970
|
+
function createServerHandshakeOptions(schema, validate) {
|
|
1971
|
+
return { schema, validate };
|
|
1972
|
+
}
|
|
1973
|
+
|
|
1974
|
+
export {
|
|
1975
|
+
serializeSchema,
|
|
1976
|
+
ServiceSchema,
|
|
1977
|
+
diffServerSchema,
|
|
1978
|
+
Ok,
|
|
1979
|
+
Err,
|
|
1980
|
+
ReadStreamImpl,
|
|
1981
|
+
WriteStreamImpl,
|
|
1982
|
+
UNCAUGHT_ERROR_CODE,
|
|
1983
|
+
UNEXPECTED_DISCONNECT_CODE,
|
|
1984
|
+
INVALID_REQUEST_CODE,
|
|
1985
|
+
OutputReaderErrorSchema,
|
|
1986
|
+
InputReaderErrorSchema,
|
|
1987
|
+
Procedure,
|
|
1988
|
+
createClient,
|
|
1989
|
+
createServer,
|
|
1990
|
+
createClientHandshakeOptions,
|
|
1991
|
+
createServerHandshakeOptions
|
|
1992
|
+
};
|
|
1993
|
+
//# sourceMappingURL=chunk-45HIR2AS.js.map
|