@matter/node 0.16.7 → 0.16.8-alpha.0-20260125-38e62bc3e
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/behavior/system/software-update/OtaAnnouncements.d.ts.map +1 -1
- package/dist/cjs/behavior/system/software-update/OtaAnnouncements.js +10 -8
- package/dist/cjs/behavior/system/software-update/OtaAnnouncements.js.map +1 -1
- package/dist/cjs/behaviors/thermostat/ThermostatServer.d.ts.map +1 -1
- package/dist/cjs/behaviors/thermostat/ThermostatServer.js +12 -8
- package/dist/cjs/behaviors/thermostat/ThermostatServer.js.map +1 -1
- package/dist/cjs/node/client/ClientNodeInteraction.d.ts +4 -1
- package/dist/cjs/node/client/ClientNodeInteraction.d.ts.map +1 -1
- package/dist/cjs/node/client/ClientNodeInteraction.js +4 -1
- package/dist/cjs/node/client/ClientNodeInteraction.js.map +1 -1
- package/dist/cjs/node/server/InteractionServer.d.ts +2 -2
- package/dist/cjs/node/server/InteractionServer.d.ts.map +1 -1
- package/dist/cjs/node/server/InteractionServer.js +130 -78
- package/dist/cjs/node/server/InteractionServer.js.map +1 -1
- package/dist/cjs/node/server/OnlineServerInteraction.d.ts +10 -2
- package/dist/cjs/node/server/OnlineServerInteraction.d.ts.map +1 -1
- package/dist/cjs/node/server/OnlineServerInteraction.js +9 -1
- package/dist/cjs/node/server/OnlineServerInteraction.js.map +1 -1
- package/dist/cjs/storage/client/RemoteWriter.d.ts.map +1 -1
- package/dist/cjs/storage/client/RemoteWriter.js +1 -2
- package/dist/cjs/storage/client/RemoteWriter.js.map +1 -1
- package/dist/esm/behavior/system/software-update/OtaAnnouncements.d.ts.map +1 -1
- package/dist/esm/behavior/system/software-update/OtaAnnouncements.js +11 -9
- package/dist/esm/behavior/system/software-update/OtaAnnouncements.js.map +1 -1
- package/dist/esm/behaviors/thermostat/ThermostatServer.d.ts.map +1 -1
- package/dist/esm/behaviors/thermostat/ThermostatServer.js +12 -8
- package/dist/esm/behaviors/thermostat/ThermostatServer.js.map +1 -1
- package/dist/esm/node/client/ClientNodeInteraction.d.ts +4 -1
- package/dist/esm/node/client/ClientNodeInteraction.d.ts.map +1 -1
- package/dist/esm/node/client/ClientNodeInteraction.js +4 -1
- package/dist/esm/node/client/ClientNodeInteraction.js.map +1 -1
- package/dist/esm/node/server/InteractionServer.d.ts +2 -2
- package/dist/esm/node/server/InteractionServer.d.ts.map +1 -1
- package/dist/esm/node/server/InteractionServer.js +130 -78
- package/dist/esm/node/server/InteractionServer.js.map +1 -1
- package/dist/esm/node/server/OnlineServerInteraction.d.ts +10 -2
- package/dist/esm/node/server/OnlineServerInteraction.d.ts.map +1 -1
- package/dist/esm/node/server/OnlineServerInteraction.js +9 -1
- package/dist/esm/node/server/OnlineServerInteraction.js.map +1 -1
- package/dist/esm/storage/client/RemoteWriter.d.ts.map +1 -1
- package/dist/esm/storage/client/RemoteWriter.js +1 -2
- package/dist/esm/storage/client/RemoteWriter.js.map +1 -1
- package/package.json +7 -7
- package/src/behavior/system/software-update/OtaAnnouncements.ts +12 -9
- package/src/behaviors/thermostat/ThermostatServer.ts +15 -9
- package/src/node/client/ClientNodeInteraction.ts +4 -1
- package/src/node/server/InteractionServer.ts +197 -92
- package/src/node/server/OnlineServerInteraction.ts +13 -2
- package/src/storage/client/RemoteWriter.ts +1 -2
|
@@ -29,6 +29,7 @@ import {
|
|
|
29
29
|
InteractionRecipient,
|
|
30
30
|
InteractionServerMessenger,
|
|
31
31
|
InvokeRequest,
|
|
32
|
+
InvokeResponseForSend,
|
|
32
33
|
Mark,
|
|
33
34
|
Message,
|
|
34
35
|
MessageExchange,
|
|
@@ -45,10 +46,13 @@ import {
|
|
|
45
46
|
TimedRequest,
|
|
46
47
|
WriteRequest,
|
|
47
48
|
WriteResponse,
|
|
49
|
+
WriteResult,
|
|
48
50
|
} from "#protocol";
|
|
49
51
|
import {
|
|
52
|
+
AttributeData,
|
|
50
53
|
DEFAULT_MAX_PATHS_PER_INVOKE,
|
|
51
54
|
INTERACTION_PROTOCOL_ID,
|
|
55
|
+
InvokeResponseData,
|
|
52
56
|
ReceivedStatusResponseError,
|
|
53
57
|
Status,
|
|
54
58
|
StatusCode,
|
|
@@ -313,10 +317,11 @@ export class InteractionServer implements ProtocolHandler, InteractionRecipient
|
|
|
313
317
|
async handleWriteRequest(
|
|
314
318
|
exchange: MessageExchange,
|
|
315
319
|
writeRequest: WriteRequest,
|
|
320
|
+
messenger: InteractionServerMessenger,
|
|
316
321
|
message: Message,
|
|
317
|
-
): Promise<
|
|
318
|
-
|
|
319
|
-
|
|
322
|
+
): Promise<void> {
|
|
323
|
+
let { suppressResponse, writeRequests, moreChunkedMessages } = writeRequest;
|
|
324
|
+
const { timedRequest, interactionModelRevision } = writeRequest;
|
|
320
325
|
const sessionType = message.packetHeader.sessionType;
|
|
321
326
|
|
|
322
327
|
logger.info(() => [
|
|
@@ -350,7 +355,7 @@ export class InteractionServer implements ProtocolHandler, InteractionRecipient
|
|
|
350
355
|
}
|
|
351
356
|
|
|
352
357
|
if (exchange.hasExpiredTimedInteraction()) {
|
|
353
|
-
exchange.clearTimedInteraction();
|
|
358
|
+
exchange.clearTimedInteraction();
|
|
354
359
|
throw new StatusResponseError(`Timed request window expired. Decline write request.`, StatusCode.Timeout);
|
|
355
360
|
}
|
|
356
361
|
|
|
@@ -379,25 +384,120 @@ export class InteractionServer implements ProtocolHandler, InteractionRecipient
|
|
|
379
384
|
);
|
|
380
385
|
}
|
|
381
386
|
|
|
382
|
-
//
|
|
387
|
+
// Track the previous processed attribute path for list operations across chunks.
|
|
388
|
+
// A list ADD (listIndex === null) is only valid if the previous write was to the same attribute.
|
|
389
|
+
let previousProcessedAttributePath: WriteResult.ConcreteAttributePath | undefined;
|
|
383
390
|
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
exchange,
|
|
388
|
-
message,
|
|
389
|
-
true, // always fabric filtered
|
|
390
|
-
receivedWithinTimedInteraction,
|
|
391
|
-
),
|
|
392
|
-
);
|
|
391
|
+
// Process chunks until moreChunkedMessages is false
|
|
392
|
+
while (true) {
|
|
393
|
+
const allResponses = new Array<WriteResult.AttributeStatus>();
|
|
393
394
|
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
395
|
+
// Separate write requests into batches based on list validity
|
|
396
|
+
// A list ADD without a prior REPLACE_ALL to the same attribute gets a BUSY response
|
|
397
|
+
let currentBatch = new Array<AttributeData>();
|
|
398
|
+
|
|
399
|
+
const processBatch = async () => {
|
|
400
|
+
if (currentBatch.length === 0) {
|
|
401
|
+
return;
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
const context = this.#prepareOnlineContext(
|
|
405
|
+
exchange,
|
|
406
|
+
message,
|
|
407
|
+
true, // always fabric filtered
|
|
408
|
+
receivedWithinTimedInteraction,
|
|
409
|
+
);
|
|
410
|
+
|
|
411
|
+
// Send batch to OnlineServerInteraction
|
|
412
|
+
const batchRequest = { ...writeRequest, writeRequests: currentBatch, suppressResponse: false };
|
|
413
|
+
const batchResults = await this.#serverInteraction.write(batchRequest, context);
|
|
414
|
+
if (batchResults) {
|
|
415
|
+
allResponses.push(...batchResults);
|
|
416
|
+
}
|
|
417
|
+
|
|
418
|
+
currentBatch = [];
|
|
419
|
+
};
|
|
420
|
+
|
|
421
|
+
for (const request of writeRequests) {
|
|
422
|
+
const { path } = request;
|
|
423
|
+
const listIndex = path.listIndex;
|
|
424
|
+
|
|
425
|
+
if (listIndex === null) {
|
|
426
|
+
// This is a list ADD - check if a previous path matches
|
|
427
|
+
if (
|
|
428
|
+
previousProcessedAttributePath?.endpointId !== path.endpointId ||
|
|
429
|
+
previousProcessedAttributePath?.clusterId !== path.clusterId ||
|
|
430
|
+
previousProcessedAttributePath?.attributeId !== path.attributeId
|
|
431
|
+
) {
|
|
432
|
+
// Invalid ADD - process any pending batch first
|
|
433
|
+
await processBatch();
|
|
434
|
+
|
|
435
|
+
// According to Specification, ADDs are only allowed with a REPLACE before them
|
|
436
|
+
// Chip SDK returns "BUSY" in cases where this rule is not followed, so we do too
|
|
437
|
+
allResponses.push({
|
|
438
|
+
kind: "attr-status",
|
|
439
|
+
path: path as WriteResult.ConcreteAttributePath,
|
|
440
|
+
status: Status.Busy,
|
|
441
|
+
});
|
|
442
|
+
|
|
443
|
+
// Don't update previousProcessedAttributePath for BUSY responses
|
|
444
|
+
continue;
|
|
445
|
+
}
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
// Valid write - add to batch and update tracking
|
|
449
|
+
currentBatch.push(request);
|
|
450
|
+
if (path.endpointId !== undefined && path.clusterId !== undefined && path.attributeId !== undefined) {
|
|
451
|
+
previousProcessedAttributePath = path as WriteResult.ConcreteAttributePath;
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
// Process any remaining batch
|
|
456
|
+
await processBatch();
|
|
457
|
+
|
|
458
|
+
if (suppressResponse) {
|
|
459
|
+
// No response to send, we are done
|
|
460
|
+
break;
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
// Send WriteResponse for this chunk
|
|
464
|
+
const chunkResponse: WriteResponse = {
|
|
465
|
+
writeResponses: allResponses.map(({ path, status, clusterStatus }) => ({
|
|
466
|
+
path,
|
|
467
|
+
status: { status, clusterStatus },
|
|
468
|
+
})),
|
|
469
|
+
interactionModelRevision: Specification.INTERACTION_MODEL_REVISION,
|
|
470
|
+
};
|
|
471
|
+
|
|
472
|
+
await messenger.sendWriteResponse(chunkResponse, {
|
|
473
|
+
logContext: moreChunkedMessages ? "WriteResponse-chunk" : undefined,
|
|
474
|
+
});
|
|
475
|
+
|
|
476
|
+
if (!moreChunkedMessages) {
|
|
477
|
+
// Was the last message, so we are done
|
|
478
|
+
break;
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
// Wait for the next chunk
|
|
482
|
+
const nextChunk = await messenger.readNextWriteRequest();
|
|
483
|
+
const nextRequest = nextChunk.writeRequest;
|
|
484
|
+
({ writeRequests, moreChunkedMessages, suppressResponse } = nextRequest);
|
|
485
|
+
|
|
486
|
+
logger.info(() => [
|
|
487
|
+
"Write",
|
|
488
|
+
Mark.INBOUND,
|
|
489
|
+
exchange.via,
|
|
490
|
+
Diagnostic.asFlags({ suppressResponse, moreChunkedMessages }),
|
|
491
|
+
Diagnostic.weak(writeRequests.map(req => this.#node.protocol.inspectPath(req.path)).join(", ")),
|
|
492
|
+
]);
|
|
493
|
+
|
|
494
|
+
if (suppressResponse) {
|
|
495
|
+
throw new StatusResponseError(
|
|
496
|
+
"Multiple chunked messages and SuppressResponse cannot be used together in write messages",
|
|
497
|
+
StatusCode.InvalidAction,
|
|
498
|
+
);
|
|
499
|
+
}
|
|
500
|
+
}
|
|
401
501
|
}
|
|
402
502
|
|
|
403
503
|
async handleSubscribeRequest(
|
|
@@ -738,7 +838,7 @@ export class InteractionServer implements ProtocolHandler, InteractionRecipient
|
|
|
738
838
|
|
|
739
839
|
const receivedWithinTimedInteraction = exchange.hasActiveTimedInteraction();
|
|
740
840
|
if (exchange.hasExpiredTimedInteraction()) {
|
|
741
|
-
exchange.clearTimedInteraction();
|
|
841
|
+
exchange.clearTimedInteraction();
|
|
742
842
|
throw new StatusResponseError(`Timed request window expired. Decline invoke request.`, StatusCode.Timeout);
|
|
743
843
|
}
|
|
744
844
|
|
|
@@ -767,92 +867,78 @@ export class InteractionServer implements ProtocolHandler, InteractionRecipient
|
|
|
767
867
|
);
|
|
768
868
|
}
|
|
769
869
|
|
|
870
|
+
const context = this.#prepareOnlineContext(exchange, message, undefined, receivedWithinTimedInteraction);
|
|
871
|
+
|
|
770
872
|
const isGroupSession = message.packetHeader.sessionType === SessionType.Group;
|
|
771
|
-
|
|
873
|
+
|
|
874
|
+
// Get the invoke-results from the server interaction
|
|
875
|
+
const results = this.#serverInteraction.invoke(request, context);
|
|
876
|
+
|
|
877
|
+
// For suppressResponse or group sessions, just consume the iterator without sending responses
|
|
878
|
+
if (suppressResponse || isGroupSession) {
|
|
879
|
+
for await (const _chunk of results);
|
|
880
|
+
return;
|
|
881
|
+
}
|
|
882
|
+
|
|
883
|
+
// Track accumulated responses for the current message
|
|
884
|
+
const currentChunkResponses = new Array<InvokeResponseData>();
|
|
885
|
+
const emptyInvokeResponse: InvokeResponseForSend = {
|
|
772
886
|
suppressResponse: false, // Deprecated but must be present
|
|
773
887
|
interactionModelRevision: Specification.INTERACTION_MODEL_REVISION,
|
|
774
888
|
invokeResponses: [],
|
|
775
|
-
moreChunkedMessages: invokeRequests.length > 1, // Assume for now we have multiple responses when having multiple invokes
|
|
776
889
|
};
|
|
777
|
-
const
|
|
778
|
-
let messageSize =
|
|
779
|
-
let
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
const
|
|
785
|
-
invokeResponse: TypeFromSchema<typeof TlvInvokeResponseData>,
|
|
786
|
-
): Promise<void> => {
|
|
787
|
-
invokeResultsProcessed++;
|
|
788
|
-
|
|
789
|
-
if (isGroupSession) {
|
|
790
|
-
// We send no responses at all for group sessions
|
|
791
|
-
return;
|
|
792
|
-
}
|
|
890
|
+
const emptyInvokeResponseLength = TlvInvokeResponseForSend.encode(emptyInvokeResponse).byteLength;
|
|
891
|
+
let messageSize = emptyInvokeResponseLength;
|
|
892
|
+
let chunkedTransmissionTerminated = false;
|
|
893
|
+
|
|
894
|
+
/**
|
|
895
|
+
* Send a chunk when the message size limit would be exceeded.
|
|
896
|
+
*/
|
|
897
|
+
const sendChunkIfNeeded = async (invokeResponse: InvokeResponseData): Promise<void> => {
|
|
793
898
|
const encodedInvokeResponse = TlvInvokeResponseData.encodeTlv(invokeResponse);
|
|
794
899
|
const invokeResponseBytes = TlvAny.getEncodedByteLength(encodedInvokeResponse);
|
|
795
900
|
|
|
796
|
-
if
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
);
|
|
814
|
-
}
|
|
815
|
-
const moreChunkedMessages = lastMessageProcessed ? undefined : true;
|
|
816
|
-
await messenger.send(
|
|
817
|
-
MessageType.InvokeResponse,
|
|
818
|
-
TlvInvokeResponseForSend.encode({
|
|
819
|
-
...invokeResponseMessage,
|
|
820
|
-
moreChunkedMessages,
|
|
821
|
-
}),
|
|
822
|
-
{
|
|
823
|
-
logContext: {
|
|
824
|
-
invokeMsgFlags: Diagnostic.asFlags({
|
|
825
|
-
suppressResponse,
|
|
826
|
-
moreChunkedMessages,
|
|
827
|
-
}),
|
|
828
|
-
},
|
|
829
|
-
},
|
|
830
|
-
);
|
|
831
|
-
invokeResponseMessage.invokeResponses = [];
|
|
832
|
-
messageSize = emptyInvokeResponseBytes.byteLength;
|
|
833
|
-
}
|
|
834
|
-
if (!lastMessageProcessed) {
|
|
835
|
-
invokeResultsProcessed--; // Correct counter again because we recall the method
|
|
836
|
-
return processResponseResult(invokeResponse);
|
|
901
|
+
// Check if adding this response would exceed message size
|
|
902
|
+
if (messageSize + invokeResponseBytes > exchange.maxPayloadSize && currentChunkResponses.length > 0) {
|
|
903
|
+
logger.debug(
|
|
904
|
+
"Invoke (chunk)",
|
|
905
|
+
Mark.OUTBOUND,
|
|
906
|
+
exchange.via,
|
|
907
|
+
Diagnostic.dict({ commands: currentChunkResponses.length }),
|
|
908
|
+
);
|
|
909
|
+
|
|
910
|
+
const chunkResponse: InvokeResponseForSend = {
|
|
911
|
+
...emptyInvokeResponse,
|
|
912
|
+
invokeResponses: currentChunkResponses.map(r => TlvInvokeResponseData.encodeTlv(r)),
|
|
913
|
+
};
|
|
914
|
+
|
|
915
|
+
if (!(await messenger.sendInvokeResponseChunk(chunkResponse))) {
|
|
916
|
+
chunkedTransmissionTerminated = true;
|
|
917
|
+
return;
|
|
837
918
|
}
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
919
|
+
|
|
920
|
+
// Reset for next chunk
|
|
921
|
+
currentChunkResponses.length = 0;
|
|
922
|
+
messageSize = emptyInvokeResponseLength;
|
|
841
923
|
}
|
|
924
|
+
|
|
925
|
+
// Add to the current chunk
|
|
926
|
+
currentChunkResponses.push(invokeResponse);
|
|
927
|
+
messageSize += invokeResponseBytes;
|
|
842
928
|
};
|
|
843
929
|
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
throw new InternalError("Received response that should be suppressed for invoke");
|
|
930
|
+
// Process all invoke results
|
|
931
|
+
for await (const chunk of results) {
|
|
932
|
+
if (chunkedTransmissionTerminated) {
|
|
933
|
+
// Client terminated the chunked series, continue consuming but don't send
|
|
934
|
+
continue;
|
|
850
935
|
}
|
|
936
|
+
|
|
851
937
|
for (const data of chunk) {
|
|
852
938
|
switch (data.kind) {
|
|
853
939
|
case "cmd-response": {
|
|
854
940
|
const { path: commandPath, commandRef, data: commandFields } = data;
|
|
855
|
-
await
|
|
941
|
+
await sendChunkIfNeeded({
|
|
856
942
|
command: {
|
|
857
943
|
commandPath,
|
|
858
944
|
commandFields,
|
|
@@ -864,13 +950,32 @@ export class InteractionServer implements ProtocolHandler, InteractionRecipient
|
|
|
864
950
|
|
|
865
951
|
case "cmd-status": {
|
|
866
952
|
const { path, commandRef, status, clusterStatus } = data;
|
|
867
|
-
await
|
|
953
|
+
await sendChunkIfNeeded({
|
|
868
954
|
status: { commandPath: path, status: { status, clusterStatus }, commandRef },
|
|
869
955
|
});
|
|
956
|
+
break;
|
|
870
957
|
}
|
|
871
958
|
}
|
|
872
959
|
}
|
|
873
960
|
}
|
|
961
|
+
|
|
962
|
+
// Send the final response if not already terminated
|
|
963
|
+
if (!chunkedTransmissionTerminated) {
|
|
964
|
+
if (currentChunkResponses.length > 0) {
|
|
965
|
+
logger.debug(
|
|
966
|
+
"Invoke (final)",
|
|
967
|
+
Mark.OUTBOUND,
|
|
968
|
+
exchange.via,
|
|
969
|
+
Diagnostic.dict({ commands: currentChunkResponses.length }),
|
|
970
|
+
);
|
|
971
|
+
}
|
|
972
|
+
|
|
973
|
+
const finalResponse: InvokeResponseForSend = {
|
|
974
|
+
...emptyInvokeResponse,
|
|
975
|
+
invokeResponses: currentChunkResponses.map(r => TlvInvokeResponseData.encodeTlv(r)),
|
|
976
|
+
};
|
|
977
|
+
await messenger.sendInvokeResponse(finalResponse);
|
|
978
|
+
}
|
|
874
979
|
}
|
|
875
980
|
|
|
876
981
|
handleTimedRequest(exchange: MessageExchange, { timeout, interactionModelRevision }: TimedRequest) {
|
|
@@ -3,6 +3,7 @@ import { NotImplementedError } from "#general";
|
|
|
3
3
|
import {
|
|
4
4
|
Interactable,
|
|
5
5
|
Invoke,
|
|
6
|
+
InvokeResult,
|
|
6
7
|
NodeProtocol,
|
|
7
8
|
Read,
|
|
8
9
|
ReadResult,
|
|
@@ -35,12 +36,21 @@ export class OnlineServerInteraction implements Interactable<RemoteActorContext.
|
|
|
35
36
|
throw new NotImplementedError("subscribe not implemented");
|
|
36
37
|
}
|
|
37
38
|
|
|
39
|
+
/**
|
|
40
|
+
* Process write requests and return results.
|
|
41
|
+
* The caller is responsible for messaging/chunking and list state tracking.
|
|
42
|
+
*/
|
|
38
43
|
async write<T extends Write>(request: T, context: RemoteActorContext.Options): WriteResult<T> {
|
|
39
44
|
return RemoteActorContext(context).act(session => this.#interaction.write(request, session));
|
|
40
45
|
}
|
|
41
46
|
|
|
42
|
-
|
|
47
|
+
/**
|
|
48
|
+
* Process invoke requests and yield results.
|
|
49
|
+
* The caller is responsible for messaging/chunking.
|
|
50
|
+
*/
|
|
51
|
+
async *invoke(request: Invoke, context: RemoteActorContext.Options): InvokeResult {
|
|
43
52
|
const session = RemoteActorContext({ ...context, command: true }).open();
|
|
53
|
+
|
|
44
54
|
try {
|
|
45
55
|
for await (const chunk of this.#interaction.invoke(request, session)) {
|
|
46
56
|
yield chunk;
|
|
@@ -48,6 +58,7 @@ export class OnlineServerInteraction implements Interactable<RemoteActorContext.
|
|
|
48
58
|
} catch (error) {
|
|
49
59
|
await session.reject(error);
|
|
50
60
|
}
|
|
51
|
-
|
|
61
|
+
|
|
62
|
+
await session.resolve(undefined);
|
|
52
63
|
}
|
|
53
64
|
}
|
|
@@ -55,8 +55,7 @@ export function RemoteWriter(node: ClientNode, structure: ClientStructure): Remo
|
|
|
55
55
|
}
|
|
56
56
|
|
|
57
57
|
const write = Write(...attrWrites);
|
|
58
|
-
|
|
59
|
-
WriteResult.assertSuccess(response);
|
|
58
|
+
WriteResult.assertSuccess(await node.interaction.write(write));
|
|
60
59
|
};
|
|
61
60
|
}
|
|
62
61
|
|