@dxos/echo-pipeline 0.7.4 → 0.7.5-feature-compute.4d9d99a

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/dist/lib/browser/{chunk-LZK5YFYE.mjs → chunk-QBMTPEMY.mjs} +111 -38
  2. package/dist/lib/browser/chunk-QBMTPEMY.mjs.map +7 -0
  3. package/dist/lib/browser/index.mjs +171 -77
  4. package/dist/lib/browser/index.mjs.map +4 -4
  5. package/dist/lib/browser/meta.json +1 -1
  6. package/dist/lib/browser/testing/index.mjs +1 -1
  7. package/dist/lib/node/{chunk-MACQJ2EP.cjs → chunk-NPZ57MV5.cjs} +110 -40
  8. package/dist/lib/node/chunk-NPZ57MV5.cjs.map +7 -0
  9. package/dist/lib/node/index.cjs +184 -94
  10. package/dist/lib/node/index.cjs.map +4 -4
  11. package/dist/lib/node/meta.json +1 -1
  12. package/dist/lib/node/testing/index.cjs +10 -10
  13. package/dist/lib/node-esm/{chunk-JIZPSASG.mjs → chunk-OY5N3ZIV.mjs} +111 -38
  14. package/dist/lib/node-esm/chunk-OY5N3ZIV.mjs.map +7 -0
  15. package/dist/lib/node-esm/index.mjs +171 -77
  16. package/dist/lib/node-esm/index.mjs.map +4 -4
  17. package/dist/lib/node-esm/meta.json +1 -1
  18. package/dist/lib/node-esm/testing/index.mjs +1 -1
  19. package/dist/types/src/automerge/automerge-host.d.ts +5 -1
  20. package/dist/types/src/automerge/automerge-host.d.ts.map +1 -1
  21. package/dist/types/src/automerge/collection-synchronizer.d.ts +1 -0
  22. package/dist/types/src/automerge/collection-synchronizer.d.ts.map +1 -1
  23. package/dist/types/src/automerge/echo-network-adapter.d.ts +1 -0
  24. package/dist/types/src/automerge/echo-network-adapter.d.ts.map +1 -1
  25. package/dist/types/src/automerge/leveldb-storage-adapter.d.ts +1 -1
  26. package/dist/types/src/db-host/echo-host.d.ts +3 -2
  27. package/dist/types/src/db-host/echo-host.d.ts.map +1 -1
  28. package/dist/types/src/edge/echo-edge-replicator.d.ts.map +1 -1
  29. package/dist/types/src/edge/inflight-request-limiter.d.ts +24 -0
  30. package/dist/types/src/edge/inflight-request-limiter.d.ts.map +1 -0
  31. package/dist/types/src/pipeline/pipeline.d.ts +1 -0
  32. package/dist/types/src/pipeline/pipeline.d.ts.map +1 -1
  33. package/dist/types/src/space/control-pipeline.d.ts +9 -0
  34. package/dist/types/src/space/control-pipeline.d.ts.map +1 -1
  35. package/dist/types/src/space/space-manager.d.ts +1 -0
  36. package/dist/types/src/space/space-manager.d.ts.map +1 -1
  37. package/dist/types/tsconfig.tsbuildinfo +1 -0
  38. package/package.json +34 -34
  39. package/src/automerge/automerge-host.ts +49 -14
  40. package/src/automerge/collection-synchronizer.ts +8 -4
  41. package/src/automerge/echo-network-adapter.ts +7 -0
  42. package/src/automerge/mesh-echo-replicator.ts +2 -2
  43. package/src/db-host/echo-host.ts +4 -1
  44. package/src/edge/echo-edge-replicator.ts +34 -18
  45. package/src/edge/inflight-request-limiter.ts +69 -0
  46. package/src/pipeline/pipeline.ts +9 -1
  47. package/src/space/control-pipeline.ts +25 -2
  48. package/src/space/space-manager.ts +17 -1
  49. package/dist/lib/browser/chunk-LZK5YFYE.mjs.map +0 -7
  50. package/dist/lib/node/chunk-MACQJ2EP.cjs.map +0 -7
  51. package/dist/lib/node-esm/chunk-JIZPSASG.mjs.map +0 -7
@@ -28,11 +28,11 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
28
28
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
29
  var node_exports = {};
30
30
  __export(node_exports, {
31
- AuthExtension: () => import_chunk_MACQJ2EP.AuthExtension,
32
- AuthStatus: () => import_chunk_MACQJ2EP.AuthStatus,
31
+ AuthExtension: () => import_chunk_NPZ57MV5.AuthExtension,
32
+ AuthStatus: () => import_chunk_NPZ57MV5.AuthStatus,
33
33
  AutomergeHost: () => AutomergeHost,
34
- CredentialRetrieverExtension: () => import_chunk_MACQJ2EP.CredentialRetrieverExtension,
35
- CredentialServerExtension: () => import_chunk_MACQJ2EP.CredentialServerExtension,
34
+ CredentialRetrieverExtension: () => import_chunk_NPZ57MV5.CredentialRetrieverExtension,
35
+ CredentialServerExtension: () => import_chunk_NPZ57MV5.CredentialServerExtension,
36
36
  DataServiceImpl: () => DataServiceImpl,
37
37
  DatabaseRoot: () => DatabaseRoot,
38
38
  DocumentsSynchronizer: () => DocumentsSynchronizer,
@@ -40,37 +40,37 @@ __export(node_exports, {
40
40
  EchoEdgeReplicator: () => EchoEdgeReplicator,
41
41
  EchoHost: () => EchoHost,
42
42
  LevelDBStorageAdapter: () => LevelDBStorageAdapter,
43
- MOCK_AUTH_PROVIDER: () => import_chunk_MACQJ2EP.MOCK_AUTH_PROVIDER,
44
- MOCK_AUTH_VERIFIER: () => import_chunk_MACQJ2EP.MOCK_AUTH_VERIFIER,
43
+ MOCK_AUTH_PROVIDER: () => import_chunk_NPZ57MV5.MOCK_AUTH_PROVIDER,
44
+ MOCK_AUTH_VERIFIER: () => import_chunk_NPZ57MV5.MOCK_AUTH_VERIFIER,
45
45
  MeshEchoReplicator: () => MeshEchoReplicator,
46
- MetadataStore: () => import_chunk_MACQJ2EP.MetadataStore,
47
- Pipeline: () => import_chunk_MACQJ2EP.Pipeline,
46
+ MetadataStore: () => import_chunk_NPZ57MV5.MetadataStore,
47
+ Pipeline: () => import_chunk_NPZ57MV5.Pipeline,
48
48
  QueryServiceImpl: () => QueryServiceImpl,
49
49
  QueryState: () => QueryState,
50
- Space: () => import_chunk_MACQJ2EP.Space,
50
+ Space: () => import_chunk_NPZ57MV5.Space,
51
51
  SpaceDocumentListUpdatedEvent: () => SpaceDocumentListUpdatedEvent,
52
- SpaceManager: () => import_chunk_MACQJ2EP.SpaceManager,
53
- SpaceProtocol: () => import_chunk_MACQJ2EP.SpaceProtocol,
54
- SpaceProtocolSession: () => import_chunk_MACQJ2EP.SpaceProtocolSession,
52
+ SpaceManager: () => import_chunk_NPZ57MV5.SpaceManager,
53
+ SpaceProtocol: () => import_chunk_NPZ57MV5.SpaceProtocol,
54
+ SpaceProtocolSession: () => import_chunk_NPZ57MV5.SpaceProtocolSession,
55
55
  SpaceStateManager: () => SpaceStateManager,
56
- TimeframeClock: () => import_chunk_MACQJ2EP.TimeframeClock,
57
- codec: () => import_chunk_MACQJ2EP.codec,
58
- createIdFromSpaceKey: () => import_chunk_MACQJ2EP.createIdFromSpaceKey,
59
- createMappedFeedWriter: () => import_chunk_MACQJ2EP.createMappedFeedWriter,
56
+ TimeframeClock: () => import_chunk_NPZ57MV5.TimeframeClock,
57
+ codec: () => import_chunk_NPZ57MV5.codec,
58
+ createIdFromSpaceKey: () => import_chunk_NPZ57MV5.createIdFromSpaceKey,
59
+ createMappedFeedWriter: () => import_chunk_NPZ57MV5.createMappedFeedWriter,
60
60
  deriveCollectionIdFromSpaceId: () => deriveCollectionIdFromSpaceId,
61
61
  diffCollectionState: () => diffCollectionState,
62
62
  encodingOptions: () => encodingOptions,
63
63
  findInlineObjectOfType: () => findInlineObjectOfType,
64
64
  getSpaceIdFromCollectionId: () => getSpaceIdFromCollectionId,
65
65
  getSpaceKeyFromDoc: () => getSpaceKeyFromDoc,
66
- hasInvitationExpired: () => import_chunk_MACQJ2EP.hasInvitationExpired,
67
- mapFeedIndexesToTimeframe: () => import_chunk_MACQJ2EP.mapFeedIndexesToTimeframe,
68
- mapTimeframeToFeedIndexes: () => import_chunk_MACQJ2EP.mapTimeframeToFeedIndexes,
69
- startAfter: () => import_chunk_MACQJ2EP.startAfter,
70
- valueEncoding: () => import_chunk_MACQJ2EP.valueEncoding
66
+ hasInvitationExpired: () => import_chunk_NPZ57MV5.hasInvitationExpired,
67
+ mapFeedIndexesToTimeframe: () => import_chunk_NPZ57MV5.mapFeedIndexesToTimeframe,
68
+ mapTimeframeToFeedIndexes: () => import_chunk_NPZ57MV5.mapTimeframeToFeedIndexes,
69
+ startAfter: () => import_chunk_NPZ57MV5.startAfter,
70
+ valueEncoding: () => import_chunk_NPZ57MV5.valueEncoding
71
71
  });
72
72
  module.exports = __toCommonJS(node_exports);
73
- var import_chunk_MACQJ2EP = require("./chunk-MACQJ2EP.cjs");
73
+ var import_chunk_NPZ57MV5 = require("./chunk-NPZ57MV5.cjs");
74
74
  var import_async = require("@dxos/async");
75
75
  var import_stream = require("@dxos/codec-protobuf/stream");
76
76
  var import_invariant = require("@dxos/invariant");
@@ -154,7 +154,6 @@ var import_invariant11 = require("@dxos/invariant");
154
154
  var A4 = __toESM(require("@dxos/automerge/automerge"));
155
155
  var import_log10 = require("@dxos/log");
156
156
  var import_async8 = require("@dxos/async");
157
- var A5 = __toESM(require("@dxos/automerge/automerge"));
158
157
  var import_automerge_repo5 = require("@dxos/automerge/automerge-repo");
159
158
  var import_context12 = require("@dxos/context");
160
159
  var import_crypto = require("@dxos/crypto");
@@ -164,6 +163,9 @@ var import_protocols6 = require("@dxos/protocols");
164
163
  var import_buf = require("@dxos/protocols/buf");
165
164
  var import_messenger_pb = require("@dxos/protocols/buf/dxos/edge/messenger_pb");
166
165
  var import_util6 = require("@dxos/util");
166
+ var import_async9 = require("@dxos/async");
167
+ var import_context13 = require("@dxos/context");
168
+ var import_log12 = require("@dxos/log");
167
169
  var import_echo_protocol5 = require("@dxos/echo-protocol");
168
170
  var __dxlog_file = "/home/runner/work/dxos/dxos/packages/core/echo/echo-pipeline/src/db-host/documents-synchronizer.ts";
169
171
  var MAX_UPDATE_FREQ = 10;
@@ -418,7 +420,7 @@ var CollectionSynchronizer = class extends import_context3.Resource {
418
420
  return;
419
421
  }
420
422
  for (const [collectionId, state] of this._perCollectionStates.entries()) {
421
- if (this._shouldSyncCollection(collectionId, peerId)) {
423
+ if (this._activeCollections.has(collectionId) && this._shouldSyncCollection(collectionId, peerId)) {
422
424
  state.interestedPeers.add(peerId);
423
425
  state.lastQueried.set(peerId, Date.now());
424
426
  this._queryCollectionState(collectionId, peerId);
@@ -460,11 +462,18 @@ var CollectionSynchronizer = class extends import_context3.Resource {
460
462
  });
461
463
  validateCollectionState(state);
462
464
  const perCollectionState = this._getOrCreatePerCollectionState(collectionId);
463
- perCollectionState.remoteStates.set(peerId, state);
464
- this.remoteStateUpdated.emit({
465
- peerId,
466
- collectionId
467
- });
465
+ const existingState = perCollectionState.remoteStates.get(peerId) ?? {
466
+ documents: {}
467
+ };
468
+ const diff = diffCollectionState(existingState, state);
469
+ if (diff.missingOnLocal.length > 0 || diff.different.length > 0) {
470
+ perCollectionState.remoteStates.set(peerId, state);
471
+ this.remoteStateUpdated.emit({
472
+ peerId,
473
+ collectionId,
474
+ newDocsAppeared: diff.missingOnLocal.length > 0
475
+ });
476
+ }
468
477
  }
469
478
  _getOrCreatePerCollectionState(collectionId) {
470
479
  return (0, import_util.defaultMap)(this._perCollectionStates, collectionId, () => ({
@@ -616,7 +625,7 @@ var EchoNetworkAdapter = class extends import_automerge_repo2.NetworkAdapter {
616
625
  getContainingSpaceForDocument: this._params.getContainingSpaceForDocument,
617
626
  getContainingSpaceIdForDocument: async (documentId) => {
618
627
  const key = await this._params.getContainingSpaceForDocument(documentId);
619
- return key ? (0, import_chunk_MACQJ2EP.createIdFromSpaceKey)(key) : null;
628
+ return key ? (0, import_chunk_NPZ57MV5.createIdFromSpaceKey)(key) : null;
620
629
  }
621
630
  });
622
631
  }
@@ -772,6 +781,12 @@ var EchoNetworkAdapter = class extends import_automerge_repo2.NetworkAdapter {
772
781
  }
773
782
  this._params.monitor?.recordMessageReceived(message);
774
783
  }
784
+ onConnectionAuthScopeChanged(peer) {
785
+ const entry = this._connections.get(peer);
786
+ if (entry) {
787
+ this._onConnectionAuthScopeChanged(entry.connection);
788
+ }
789
+ }
775
790
  /**
776
791
  * Trigger doc-synchronizer shared documents set recalculation. Happens on peer-candidate.
777
792
  * TODO(y): replace with a proper API call when sharePolicy update becomes supported by automerge-repo
@@ -781,14 +796,14 @@ var EchoNetworkAdapter = class extends import_automerge_repo2.NetworkAdapter {
781
796
  peerId: connection.peerId
782
797
  }, {
783
798
  F: __dxlog_file3,
784
- L: 254,
799
+ L: 261,
785
800
  S: this,
786
801
  C: (f, a) => f(...a)
787
802
  });
788
803
  const entry = this._connections.get(connection.peerId);
789
804
  (0, import_invariant4.invariant)(entry, void 0, {
790
805
  F: __dxlog_file3,
791
- L: 256,
806
+ L: 263,
792
807
  S: this,
793
808
  A: [
794
809
  "entry",
@@ -805,14 +820,14 @@ var EchoNetworkAdapter = class extends import_automerge_repo2.NetworkAdapter {
805
820
  peerId: connection.peerId
806
821
  }, {
807
822
  F: __dxlog_file3,
808
- L: 262,
823
+ L: 269,
809
824
  S: this,
810
825
  C: (f, a) => f(...a)
811
826
  });
812
827
  const entry = this._connections.get(connection.peerId);
813
828
  (0, import_invariant4.invariant)(entry, void 0, {
814
829
  F: __dxlog_file3,
815
- L: 264,
830
+ L: 271,
816
831
  S: this,
817
832
  A: [
818
833
  "entry",
@@ -826,13 +841,13 @@ var EchoNetworkAdapter = class extends import_automerge_repo2.NetworkAdapter {
826
841
  this._params.monitor?.recordPeerDisconnected(connection.peerId);
827
842
  void entry.reader.cancel().catch((err) => import_log5.log.catch(err, void 0, {
828
843
  F: __dxlog_file3,
829
- L: 270,
844
+ L: 277,
830
845
  S: this,
831
846
  C: (f, a) => f(...a)
832
847
  }));
833
848
  void entry.writer.abort().catch((err) => import_log5.log.catch(err, void 0, {
834
849
  F: __dxlog_file3,
835
- L: 271,
850
+ L: 278,
836
851
  S: this,
837
852
  C: (f, a) => f(...a)
838
853
  }));
@@ -992,7 +1007,7 @@ function _ts_decorate2(decorators, target, key, desc) {
992
1007
  }
993
1008
  var __dxlog_file4 = "/home/runner/work/dxos/dxos/packages/core/echo/echo-pipeline/src/automerge/automerge-host.ts";
994
1009
  var AutomergeHost = class extends import_context2.Resource {
995
- constructor({ db, indexMetadataStore, dataMonitor, peerIdProvider }) {
1010
+ constructor({ db, indexMetadataStore, dataMonitor, peerIdProvider, getSpaceKeyByRootDocumentId }) {
996
1011
  super();
997
1012
  this._collectionSynchronizer = new CollectionSynchronizer({
998
1013
  queryCollectionState: this._queryCollectionState.bind(this),
@@ -1021,6 +1036,7 @@ var AutomergeHost = class extends import_context2.Resource {
1021
1036
  });
1022
1037
  this._indexMetadataStore = indexMetadataStore;
1023
1038
  this._peerIdProvider = peerIdProvider;
1039
+ this._getSpaceKeyByRootDocumentId = getSpaceKeyByRootDocumentId;
1024
1040
  }
1025
1041
  async _open() {
1026
1042
  this._peerId = `host-${this._peerIdProvider?.() ?? import_keys2.PublicKey.random().toHex()}`;
@@ -1034,13 +1050,22 @@ var AutomergeHost = class extends import_context2.Resource {
1034
1050
  this._echoNetworkAdapter
1035
1051
  ]
1036
1052
  });
1037
- import_async3.Event.wrap(this._echoNetworkAdapter, "peer-candidate").on(this._ctx, (e) => this._onPeerConnected(e.peerId));
1038
- import_async3.Event.wrap(this._echoNetworkAdapter, "peer-disconnected").on(this._ctx, (e) => this._onPeerDisconnected(e.peerId));
1039
- this._collectionSynchronizer.remoteStateUpdated.on(this._ctx, ({ collectionId, peerId }) => {
1053
+ let updatingAuthScope = false;
1054
+ import_async3.Event.wrap(this._echoNetworkAdapter, "peer-candidate").on(this._ctx, (e) => !updatingAuthScope && this._onPeerConnected(e.peerId));
1055
+ import_async3.Event.wrap(this._echoNetworkAdapter, "peer-disconnected").on(this._ctx, (e) => !updatingAuthScope && this._onPeerDisconnected(e.peerId));
1056
+ this._collectionSynchronizer.remoteStateUpdated.on(this._ctx, ({ collectionId, peerId, newDocsAppeared }) => {
1040
1057
  this._onRemoteCollectionStateUpdated(collectionId, peerId);
1041
1058
  this.collectionStateUpdated.emit({
1042
1059
  collectionId
1043
1060
  });
1061
+ if (newDocsAppeared) {
1062
+ updatingAuthScope = true;
1063
+ try {
1064
+ this._echoNetworkAdapter.onConnectionAuthScopeChanged(peerId);
1065
+ } finally {
1066
+ updatingAuthScope = false;
1067
+ }
1068
+ }
1044
1069
  });
1045
1070
  await this._echoNetworkAdapter.open();
1046
1071
  await this._collectionSynchronizer.open();
@@ -1123,7 +1148,7 @@ var AutomergeHost = class extends import_context2.Resource {
1123
1148
  await Promise.all(headsToWait.map(async (entry, index) => {
1124
1149
  const handle = await this.loadDoc(import_context2.Context.default(void 0, {
1125
1150
  F: __dxlog_file4,
1126
- L: 239
1151
+ L: 264
1127
1152
  }), entry.documentId);
1128
1153
  await waitForHeads(handle, entry.heads);
1129
1154
  }));
@@ -1136,7 +1161,7 @@ var AutomergeHost = class extends import_context2.Resource {
1136
1161
  documentId
1137
1162
  }, {
1138
1163
  F: __dxlog_file4,
1139
- L: 251,
1164
+ L: 276,
1140
1165
  S: this,
1141
1166
  C: (f, a) => f(...a)
1142
1167
  });
@@ -1152,7 +1177,7 @@ var AutomergeHost = class extends import_context2.Resource {
1152
1177
  documentId
1153
1178
  }, {
1154
1179
  F: __dxlog_file4,
1155
- L: 255,
1180
+ L: 280,
1156
1181
  S: this,
1157
1182
  C: (f, a) => f(...a)
1158
1183
  });
@@ -1161,7 +1186,7 @@ var AutomergeHost = class extends import_context2.Resource {
1161
1186
  const doc = handle.docSync();
1162
1187
  (0, import_invariant3.invariant)(doc, void 0, {
1163
1188
  F: __dxlog_file4,
1164
- L: 260,
1189
+ L: 285,
1165
1190
  S: this,
1166
1191
  A: [
1167
1192
  "doc",
@@ -1175,7 +1200,7 @@ var AutomergeHost = class extends import_context2.Resource {
1175
1200
  }
1176
1201
  import_log3.log.info("done re-indexing heads", void 0, {
1177
1202
  F: __dxlog_file4,
1178
- L: 267,
1203
+ L: 292,
1179
1204
  S: this,
1180
1205
  C: (f, a) => f(...a)
1181
1206
  });
@@ -1259,14 +1284,17 @@ var AutomergeHost = class extends import_context2.Resource {
1259
1284
  }
1260
1285
  async _getContainingSpaceForDocument(documentId) {
1261
1286
  const doc = this._repo.handles[documentId]?.docSync();
1262
- if (!doc) {
1263
- return null;
1287
+ if (doc) {
1288
+ const spaceKeyHex = getSpaceKeyFromDoc(doc);
1289
+ if (spaceKeyHex) {
1290
+ return import_keys2.PublicKey.from(spaceKeyHex);
1291
+ }
1264
1292
  }
1265
- const spaceKeyHex = getSpaceKeyFromDoc(doc);
1266
- if (!spaceKeyHex) {
1267
- return null;
1293
+ const rootDocSpaceKey = this._getSpaceKeyByRootDocumentId?.(documentId);
1294
+ if (rootDocSpaceKey) {
1295
+ return rootDocSpaceKey;
1268
1296
  }
1269
- return import_keys2.PublicKey.from(spaceKeyHex);
1297
+ return null;
1270
1298
  }
1271
1299
  /**
1272
1300
  * Flush documents to disk.
@@ -1380,11 +1408,14 @@ var AutomergeHost = class extends import_context2.Resource {
1380
1408
  if (toReplicate.length === 0) {
1381
1409
  return;
1382
1410
  }
1383
- import_log3.log.info("replication documents after collection sync", {
1411
+ import_log3.log.info("replicating documents after collection sync", {
1412
+ collectionId,
1413
+ peerId,
1414
+ toReplicate,
1384
1415
  count: toReplicate.length
1385
1416
  }, {
1386
1417
  F: __dxlog_file4,
1387
- L: 499,
1418
+ L: 531,
1388
1419
  S: this,
1389
1420
  C: (f, a) => f(...a)
1390
1421
  });
@@ -1451,7 +1482,7 @@ var changeIsPresentInDoc = (doc, changeHash) => {
1451
1482
  var decodeCollectionState = (state) => {
1452
1483
  (0, import_invariant3.invariant)(typeof state === "object" && state !== null, "Invalid state", {
1453
1484
  F: __dxlog_file4,
1454
- L: 557,
1485
+ L: 592,
1455
1486
  S: void 0,
1456
1487
  A: [
1457
1488
  "typeof state === 'object' && state !== null",
@@ -1717,10 +1748,10 @@ var MeshEchoReplicator = class {
1717
1748
  documentId: params.documentId,
1718
1749
  peerId: connection.peerId
1719
1750
  });
1720
- (0, import_log6.log)("document not found locally for share policy check, accepting the remote document", {
1751
+ (0, import_log6.log)("document not found locally for share policy check", {
1721
1752
  peerId: connection.peerId,
1722
1753
  documentId: params.documentId,
1723
- remoteDocumentExists
1754
+ acceptDocument: remoteDocumentExists
1724
1755
  }, {
1725
1756
  F: __dxlog_file7,
1726
1757
  L: 91,
@@ -1729,7 +1760,7 @@ var MeshEchoReplicator = class {
1729
1760
  });
1730
1761
  return remoteDocumentExists;
1731
1762
  }
1732
- const spaceId = await (0, import_chunk_MACQJ2EP.createIdFromSpaceKey)(spaceKey);
1763
+ const spaceId = await (0, import_chunk_NPZ57MV5.createIdFromSpaceKey)(spaceKey);
1733
1764
  const authorizedDevices = this._authorizedDevices.get(spaceId);
1734
1765
  if (!connection.remoteDeviceKey) {
1735
1766
  (0, import_log6.log)("device key not found for share policy check", {
@@ -1800,7 +1831,7 @@ var MeshEchoReplicator = class {
1800
1831
  S: this,
1801
1832
  C: (f, a) => f(...a)
1802
1833
  });
1803
- const spaceId = await (0, import_chunk_MACQJ2EP.createIdFromSpaceKey)(spaceKey);
1834
+ const spaceId = await (0, import_chunk_NPZ57MV5.createIdFromSpaceKey)(spaceKey);
1804
1835
  (0, import_util3.defaultMap)(this._authorizedDevices, spaceId, () => new import_util3.ComplexSet(import_keys3.PublicKey.hash)).add(deviceKey);
1805
1836
  for (const connection of this._connections) {
1806
1837
  if (connection.remoteDeviceKey && connection.remoteDeviceKey.equals(deviceKey)) {
@@ -2917,7 +2948,7 @@ var INDEXER_CONFIG = {
2917
2948
  ]
2918
2949
  };
2919
2950
  var EchoHost = class extends import_context7.Resource {
2920
- constructor({ kv, peerIdProvider }) {
2951
+ constructor({ kv, peerIdProvider, getSpaceKeyByRootDocumentId }) {
2921
2952
  super();
2922
2953
  this._spaceStateManager = new SpaceStateManager();
2923
2954
  this._indexMetadataStore = new import_indexing2.IndexMetadataStore({
@@ -2928,7 +2959,8 @@ var EchoHost = class extends import_context7.Resource {
2928
2959
  db: kv,
2929
2960
  dataMonitor: this._echoDataMonitor,
2930
2961
  indexMetadataStore: this._indexMetadataStore,
2931
- peerIdProvider
2962
+ peerIdProvider,
2963
+ getSpaceKeyByRootDocumentId
2932
2964
  });
2933
2965
  this._indexer = new import_indexing2.Indexer({
2934
2966
  db: kv,
@@ -3053,7 +3085,7 @@ var EchoHost = class extends import_context7.Resource {
3053
3085
  async createSpaceRoot(spaceKey) {
3054
3086
  (0, import_invariant8.invariant)(this._lifecycleState === import_context7.LifecycleState.OPEN, void 0, {
3055
3087
  F: __dxlog_file15,
3056
- L: 217,
3088
+ L: 220,
3057
3089
  S: this,
3058
3090
  A: [
3059
3091
  "this._lifecycleState === LifecycleState.OPEN",
@@ -3081,7 +3113,7 @@ var EchoHost = class extends import_context7.Resource {
3081
3113
  async openSpaceRoot(spaceId, automergeUrl) {
3082
3114
  (0, import_invariant8.invariant)(this._lifecycleState === import_context7.LifecycleState.OPEN, void 0, {
3083
3115
  F: __dxlog_file15,
3084
- L: 236,
3116
+ L: 239,
3085
3117
  S: this,
3086
3118
  A: [
3087
3119
  "this._lifecycleState === LifecycleState.OPEN",
@@ -3108,6 +3140,57 @@ var EchoHost = class extends import_context7.Resource {
3108
3140
  await this._automergeHost.removeReplicator(replicator);
3109
3141
  }
3110
3142
  };
3143
+ var __dxlog_file16 = "/home/runner/work/dxos/dxos/packages/core/echo/echo-pipeline/src/edge/inflight-request-limiter.ts";
3144
+ var InflightRequestLimiter = class extends import_context13.Resource {
3145
+ constructor(_config) {
3146
+ super();
3147
+ this._config = _config;
3148
+ this._inflightRequestBalance = 0;
3149
+ this._requestBarrier = new import_async9.Trigger();
3150
+ }
3151
+ async _open() {
3152
+ this._inflightRequestBalance = 0;
3153
+ this._requestBarrier.reset();
3154
+ this._requestBarrier.wake();
3155
+ }
3156
+ async _close() {
3157
+ this._inflightRequestBalance = 0;
3158
+ this._requestBarrier.throw(new Error("Rate limiter closed."));
3159
+ clearTimeout(this._resetBalanceTimeout);
3160
+ }
3161
+ async rateLimit(message) {
3162
+ if (message.type !== "sync") {
3163
+ return;
3164
+ }
3165
+ while (this._inflightRequestBalance >= this._config.maxInflightRequests) {
3166
+ await this._requestBarrier.wait();
3167
+ }
3168
+ this._inflightRequestBalance++;
3169
+ if (this._inflightRequestBalance === this._config.maxInflightRequests) {
3170
+ this._requestBarrier.reset();
3171
+ this._resetBalanceTimeout = setTimeout(() => {
3172
+ import_log12.log.warn("Request balance has not changed during specified timeout, resetting request limiter.", void 0, {
3173
+ F: __dxlog_file16,
3174
+ L: 52,
3175
+ S: this,
3176
+ C: (f, a) => f(...a)
3177
+ });
3178
+ this._inflightRequestBalance = 0;
3179
+ this._requestBarrier.wake();
3180
+ }, this._config.resetBalanceTimeoutMs);
3181
+ }
3182
+ }
3183
+ handleResponse(message) {
3184
+ if (message.type !== "sync") {
3185
+ return;
3186
+ }
3187
+ this._inflightRequestBalance--;
3188
+ if (this._inflightRequestBalance + 1 === this._config.maxInflightRequests) {
3189
+ this._requestBarrier.wake();
3190
+ clearInterval(this._resetBalanceTimeout);
3191
+ }
3192
+ }
3193
+ };
3111
3194
  function _using_ctx() {
3112
3195
  var _disposeSuppressedError = typeof SuppressedError === "function" ? SuppressedError : function(error, suppressed) {
3113
3196
  var err = new Error();
@@ -3170,7 +3253,7 @@ function _using_ctx() {
3170
3253
  }
3171
3254
  };
3172
3255
  }
3173
- var __dxlog_file16 = "/home/runner/work/dxos/dxos/packages/core/echo/echo-pipeline/src/edge/echo-edge-replicator.ts";
3256
+ var __dxlog_file17 = "/home/runner/work/dxos/dxos/packages/core/echo/echo-pipeline/src/edge/echo-edge-replicator.ts";
3174
3257
  var INITIAL_RESTART_DELAY = 500;
3175
3258
  var RESTART_DELAY_JITTER = 250;
3176
3259
  var MAX_RESTART_DELAY = 5e3;
@@ -3190,14 +3273,14 @@ var EchoEdgeReplicator = class {
3190
3273
  peerId: context.peerId,
3191
3274
  connectedSpaces: this._connectedSpaces.size
3192
3275
  }, {
3193
- F: __dxlog_file16,
3276
+ F: __dxlog_file17,
3194
3277
  L: 60,
3195
3278
  S: this,
3196
3279
  C: (f, a) => f(...a)
3197
3280
  });
3198
3281
  this._context = context;
3199
3282
  this._ctx = import_context12.Context.default(void 0, {
3200
- F: __dxlog_file16,
3283
+ F: __dxlog_file17,
3201
3284
  L: 63
3202
3285
  });
3203
3286
  this._ctx.onDispose(this._edgeConnection.onReconnected(() => {
@@ -3276,7 +3359,7 @@ var EchoEdgeReplicator = class {
3276
3359
  }
3277
3360
  async _openConnection(spaceId, reconnects = 0) {
3278
3361
  (0, import_invariant12.invariant)(this._context, void 0, {
3279
- F: __dxlog_file16,
3362
+ F: __dxlog_file17,
3280
3363
  L: 124,
3281
3364
  S: this,
3282
3365
  A: [
@@ -3285,7 +3368,7 @@ var EchoEdgeReplicator = class {
3285
3368
  ]
3286
3369
  });
3287
3370
  (0, import_invariant12.invariant)(!this._connections.has(spaceId), void 0, {
3288
- F: __dxlog_file16,
3371
+ F: __dxlog_file17,
3289
3372
  L: 125,
3290
3373
  S: this,
3291
3374
  A: [
@@ -3315,7 +3398,7 @@ var EchoEdgeReplicator = class {
3315
3398
  reconnects,
3316
3399
  restartDelay
3317
3400
  }, {
3318
- F: __dxlog_file16,
3401
+ F: __dxlog_file17,
3319
3402
  L: 148,
3320
3403
  S: this,
3321
3404
  C: (f, a) => f(...a)
@@ -3347,10 +3430,16 @@ var EchoEdgeReplicator = class {
3347
3430
  await connection.open();
3348
3431
  }
3349
3432
  };
3433
+ var MAX_INFLIGHT_REQUESTS = 5;
3434
+ var MAX_RATE_LIMIT_WAIT_TIME_MS = 3e3;
3350
3435
  var EdgeReplicatorConnection = class extends import_context12.Resource {
3351
3436
  constructor({ edgeConnection, spaceId, context, sharedPolicyEnabled, onRemoteConnected, onRemoteDisconnected, onRestartRequested }) {
3352
3437
  super();
3353
3438
  this._remotePeerId = null;
3439
+ this._requestLimiter = new InflightRequestLimiter({
3440
+ maxInflightRequests: MAX_INFLIGHT_REQUESTS,
3441
+ resetBalanceTimeoutMs: MAX_RATE_LIMIT_WAIT_TIME_MS
3442
+ });
3354
3443
  this._edgeConnection = edgeConnection;
3355
3444
  this._spaceId = spaceId;
3356
3445
  this._context = context;
@@ -3367,17 +3456,19 @@ var EdgeReplicatorConnection = class extends import_context12.Resource {
3367
3456
  });
3368
3457
  this.writable = new WritableStream({
3369
3458
  write: async (message, controller) => {
3459
+ await this._requestLimiter.rateLimit(message);
3370
3460
  await this._sendMessage(message);
3371
3461
  }
3372
3462
  });
3373
3463
  }
3374
3464
  async _open(ctx) {
3375
3465
  (0, import_log11.log)("open", void 0, {
3376
- F: __dxlog_file16,
3377
- L: 242,
3466
+ F: __dxlog_file17,
3467
+ L: 251,
3378
3468
  S: this,
3379
3469
  C: (f, a) => f(...a)
3380
3470
  });
3471
+ await this._requestLimiter.open();
3381
3472
  this._ctx.onDispose(this._edgeConnection.onMessage((msg) => {
3382
3473
  this._onMessage(msg);
3383
3474
  }));
@@ -3385,18 +3476,19 @@ var EdgeReplicatorConnection = class extends import_context12.Resource {
3385
3476
  }
3386
3477
  async _close() {
3387
3478
  (0, import_log11.log)("close", void 0, {
3388
- F: __dxlog_file16,
3389
- L: 254,
3479
+ F: __dxlog_file17,
3480
+ L: 266,
3390
3481
  S: this,
3391
3482
  C: (f, a) => f(...a)
3392
3483
  });
3393
3484
  this._readableStreamController.close();
3485
+ await this._requestLimiter.close();
3394
3486
  await this._onRemoteDisconnected();
3395
3487
  }
3396
3488
  get peerId() {
3397
3489
  (0, import_invariant12.invariant)(this._remotePeerId, "Not connected", {
3398
- F: __dxlog_file16,
3399
- L: 260,
3490
+ F: __dxlog_file17,
3491
+ L: 275,
3400
3492
  S: this,
3401
3493
  A: [
3402
3494
  "this._remotePeerId",
@@ -3415,12 +3507,13 @@ var EdgeReplicatorConnection = class extends import_context12.Resource {
3415
3507
  documentId: params.documentId,
3416
3508
  peerId: this._remotePeerId
3417
3509
  });
3418
- import_log11.log.info("document not found locally for share policy check, accepting the remote document", {
3510
+ import_log11.log.verbose("edge-replicator document not found locally for share policy check", {
3419
3511
  documentId: params.documentId,
3420
- remoteDocumentExists
3512
+ acceptDocument: remoteDocumentExists,
3513
+ remoteId: this._remotePeerId
3421
3514
  }, {
3422
- F: __dxlog_file16,
3423
- L: 275,
3515
+ F: __dxlog_file17,
3516
+ L: 290,
3424
3517
  S: this,
3425
3518
  C: (f, a) => f(...a)
3426
3519
  });
@@ -3433,23 +3526,20 @@ var EdgeReplicatorConnection = class extends import_context12.Resource {
3433
3526
  return true;
3434
3527
  }
3435
3528
  const spaceId = getSpaceIdFromCollectionId(params.collectionId);
3436
- return spaceId === this._spaceId;
3529
+ return spaceId === this._spaceId && params.collectionId.split(":").length === 3;
3437
3530
  }
3438
3531
  _onMessage(message) {
3439
3532
  if (message.serviceId !== this._targetServiceId) {
3440
3533
  return;
3441
3534
  }
3442
3535
  const payload = import_automerge_repo5.cbor.decode(message.payload.value);
3443
- (0, import_log11.log)("recv", () => {
3444
- const decodedData = payload.type === "sync" && payload.data ? A5.decodeSyncMessage(payload.data) : payload.type === "collection-state" ? payload.state : payload;
3445
- return {
3446
- from: message.serviceId,
3447
- type: payload.type,
3448
- decodedData
3449
- };
3536
+ import_log11.log.verbose("edge replicator receive", {
3537
+ type: payload.type,
3538
+ documentId: payload.type === "sync" && payload.documentId,
3539
+ remoteId: this._remotePeerId
3450
3540
  }, {
3451
- F: __dxlog_file16,
3452
- L: 302,
3541
+ F: __dxlog_file17,
3542
+ L: 319,
3453
3543
  S: this,
3454
3544
  C: (f, a) => f(...a)
3455
3545
  });
@@ -3461,18 +3551,18 @@ var EdgeReplicatorConnection = class extends import_context12.Resource {
3461
3551
  this._onRestartRequested();
3462
3552
  return;
3463
3553
  }
3554
+ this._requestLimiter.handleResponse(message);
3464
3555
  this._readableStreamController.enqueue(message);
3465
3556
  }
3466
3557
  async _sendMessage(message) {
3467
3558
  message.targetId = this._targetServiceId;
3468
- (0, import_log11.log)("send", {
3559
+ import_log11.log.verbose("edge replicator send", {
3469
3560
  type: message.type,
3470
- senderId: message.senderId,
3471
- targetId: message.targetId,
3472
- documentId: message.documentId
3561
+ documentId: message.type === "sync" && message.documentId,
3562
+ remoteId: this._remotePeerId
3473
3563
  }, {
3474
- F: __dxlog_file16,
3475
- L: 332,
3564
+ F: __dxlog_file17,
3565
+ L: 348,
3476
3566
  S: this,
3477
3567
  C: (f, a) => f(...a)
3478
3568
  });