@automerge/automerge-repo 2.0.0-alpha.7 → 2.0.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. package/README.md +8 -8
  2. package/dist/AutomergeUrl.d.ts +17 -5
  3. package/dist/AutomergeUrl.d.ts.map +1 -1
  4. package/dist/AutomergeUrl.js +71 -24
  5. package/dist/DocHandle.d.ts +68 -45
  6. package/dist/DocHandle.d.ts.map +1 -1
  7. package/dist/DocHandle.js +166 -69
  8. package/dist/FindProgress.d.ts +30 -0
  9. package/dist/FindProgress.d.ts.map +1 -0
  10. package/dist/FindProgress.js +1 -0
  11. package/dist/RemoteHeadsSubscriptions.d.ts +4 -5
  12. package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -1
  13. package/dist/RemoteHeadsSubscriptions.js +4 -1
  14. package/dist/Repo.d.ts +46 -6
  15. package/dist/Repo.d.ts.map +1 -1
  16. package/dist/Repo.js +252 -67
  17. package/dist/helpers/abortable.d.ts +36 -0
  18. package/dist/helpers/abortable.d.ts.map +1 -0
  19. package/dist/helpers/abortable.js +47 -0
  20. package/dist/helpers/arraysAreEqual.d.ts.map +1 -1
  21. package/dist/helpers/bufferFromHex.d.ts +3 -0
  22. package/dist/helpers/bufferFromHex.d.ts.map +1 -0
  23. package/dist/helpers/bufferFromHex.js +13 -0
  24. package/dist/helpers/debounce.d.ts.map +1 -1
  25. package/dist/helpers/eventPromise.d.ts.map +1 -1
  26. package/dist/helpers/headsAreSame.d.ts +2 -2
  27. package/dist/helpers/headsAreSame.d.ts.map +1 -1
  28. package/dist/helpers/mergeArrays.d.ts +1 -1
  29. package/dist/helpers/mergeArrays.d.ts.map +1 -1
  30. package/dist/helpers/pause.d.ts.map +1 -1
  31. package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
  32. package/dist/helpers/tests/network-adapter-tests.js +13 -13
  33. package/dist/helpers/tests/storage-adapter-tests.d.ts +2 -2
  34. package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
  35. package/dist/helpers/tests/storage-adapter-tests.js +25 -48
  36. package/dist/helpers/throttle.d.ts.map +1 -1
  37. package/dist/helpers/withTimeout.d.ts.map +1 -1
  38. package/dist/index.d.ts +2 -1
  39. package/dist/index.d.ts.map +1 -1
  40. package/dist/index.js +1 -1
  41. package/dist/network/messages.d.ts.map +1 -1
  42. package/dist/storage/StorageSubsystem.d.ts +15 -1
  43. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  44. package/dist/storage/StorageSubsystem.js +50 -14
  45. package/dist/synchronizer/CollectionSynchronizer.d.ts +4 -3
  46. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  47. package/dist/synchronizer/CollectionSynchronizer.js +34 -15
  48. package/dist/synchronizer/DocSynchronizer.d.ts +3 -2
  49. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  50. package/dist/synchronizer/DocSynchronizer.js +51 -27
  51. package/dist/synchronizer/Synchronizer.d.ts +11 -0
  52. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  53. package/dist/types.d.ts +4 -1
  54. package/dist/types.d.ts.map +1 -1
  55. package/fuzz/fuzz.ts +3 -3
  56. package/package.json +3 -4
  57. package/src/AutomergeUrl.ts +101 -26
  58. package/src/DocHandle.ts +235 -82
  59. package/src/FindProgress.ts +48 -0
  60. package/src/RemoteHeadsSubscriptions.ts +11 -9
  61. package/src/Repo.ts +368 -74
  62. package/src/helpers/abortable.ts +62 -0
  63. package/src/helpers/bufferFromHex.ts +14 -0
  64. package/src/helpers/headsAreSame.ts +2 -2
  65. package/src/helpers/tests/network-adapter-tests.ts +14 -13
  66. package/src/helpers/tests/storage-adapter-tests.ts +44 -86
  67. package/src/index.ts +7 -0
  68. package/src/storage/StorageSubsystem.ts +66 -16
  69. package/src/synchronizer/CollectionSynchronizer.ts +37 -16
  70. package/src/synchronizer/DocSynchronizer.ts +59 -32
  71. package/src/synchronizer/Synchronizer.ts +14 -0
  72. package/src/types.ts +4 -1
  73. package/test/AutomergeUrl.test.ts +130 -0
  74. package/test/CollectionSynchronizer.test.ts +4 -4
  75. package/test/DocHandle.test.ts +181 -38
  76. package/test/DocSynchronizer.test.ts +10 -3
  77. package/test/Repo.test.ts +376 -203
  78. package/test/StorageSubsystem.test.ts +80 -1
  79. package/test/remoteHeads.test.ts +27 -12
@@ -1 +1 @@
1
- {"version":3,"file":"DocSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/DocSynchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAGnD,OAAO,EACL,SAAS,EAKV,MAAM,iBAAiB,CAAA;AACxB,OAAO,EAEL,gBAAgB,EAEhB,WAAW,EACX,cAAc,EACd,WAAW,EAEZ,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AACpC,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAGhD,KAAK,kBAAkB,GAAG,SAAS,GAAG,KAAK,GAAG,aAAa,GAAG,OAAO,CAAA;AAOrE,UAAU,qBAAqB;IAC7B,MAAM,EAAE,SAAS,CAAC,OAAO,CAAC,CAAA;IAC1B,eAAe,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,OAAO,CAAC,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC,CAAA;CACvE;AAED;;;GAGG;AACH,qBAAa,eAAgB,SAAQ,YAAY;;IAE/C,gBAAgB,SAAM;gBAsBV,EAAE,MAAM,EAAE,eAAe,EAAE,EAAE,qBAAqB;IAyB9D,IAAI,UAAU,uCAEb;IAED,IAAI,UAAU,qCAEb;IAkID,OAAO,CAAC,MAAM,EAAE,MAAM;IAItB,SAAS,CAAC,OAAO,EAAE,MAAM,EAAE;IAmD3B,OAAO,CAAC,MAAM,EAAE,MAAM;IAKtB,cAAc,CAAC,OAAO,EAAE,WAAW;IAkBnC,uBAAuB,CAAC,OAAO,EAAE,gBAAgB;IAuBjD,kBAAkB,CAAC,OAAO,EAAE,WAAW,GAAG,cAAc;IA+ExD,OAAO,IAAI;QAAE,KAAK,EAAE,MAAM,EAAE,CAAC;QAAC,IAAI,EAAE;YAAE,MAAM,EAAE,MAAM,CAAC;YAAC,UAAU,EAAE,MAAM,CAAA;SAAE,CAAA;KAAE;CAM7E"}
1
+ {"version":3,"file":"DocSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/DocSynchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAGnD,OAAO,EACL,SAAS,EAKV,MAAM,iBAAiB,CAAA;AACxB,OAAO,EAEL,gBAAgB,EAEhB,WAAW,EACX,cAAc,EACd,WAAW,EAEZ,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AACpC,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAGhD,KAAK,kBAAkB,GAAG,SAAS,GAAG,KAAK,GAAG,aAAa,GAAG,OAAO,CAAA;AAOrE,UAAU,qBAAqB;IAC7B,MAAM,EAAE,SAAS,CAAC,OAAO,CAAC,CAAA;IAC1B,MAAM,EAAE,MAAM,CAAA;IACd,eAAe,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,OAAO,CAAC,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC,CAAA;CACvE;AAED;;;GAGG;AACH,qBAAa,eAAgB,SAAQ,YAAY;;IAE/C,gBAAgB,SAAM;gBAyBV,EAAE,MAAM,EAAE,MAAM,EAAE,eAAe,EAAE,EAAE,qBAAqB;IAyBtE,IAAI,UAAU,uCAEb;IAED,IAAI,UAAU,qCAEb;IAqID,OAAO,CAAC,MAAM,EAAE,MAAM;IAIhB,SAAS,CAAC,OAAO,EAAE,MAAM,EAAE;IA8DjC,OAAO,CAAC,MAAM,EAAE,MAAM;IAKtB,cAAc,CAAC,OAAO,EAAE,WAAW;IAkBnC,uBAAuB,CAAC,OAAO,EAAE,gBAAgB;IAuBjD,kBAAkB,CAAC,OAAO,EAAE,WAAW,GAAG,cAAc;IAwFxD,OAAO,IAAI;QAAE,KAAK,EAAE,MAAM,EAAE,CAAC;QAAC,IAAI,EAAE;YAAE,MAAM,EAAE,MAAM,CAAC;YAAC,UAAU,EAAE,MAAM,CAAA;SAAE,CAAA;KAAE;CAM7E"}
@@ -19,11 +19,15 @@ export class DocSynchronizer extends Synchronizer {
19
19
  /** Sync state for each peer we've communicated with (including inactive peers) */
20
20
  #syncStates = {};
21
21
  #pendingSyncMessages = [];
22
+ // We keep this around at least in part for debugging.
23
+ // eslint-disable-next-line no-unused-private-class-members
24
+ #peerId;
22
25
  #syncStarted = false;
23
26
  #handle;
24
27
  #onLoadSyncState;
25
- constructor({ handle, onLoadSyncState }) {
28
+ constructor({ handle, peerId, onLoadSyncState }) {
26
29
  super();
30
+ this.#peerId = peerId;
27
31
  this.#handle = handle;
28
32
  this.#onLoadSyncState =
29
33
  onLoadSyncState ?? (() => Promise.resolve(undefined));
@@ -33,7 +37,6 @@ export class DocSynchronizer extends Synchronizer {
33
37
  handle.on("ephemeral-message-outbound", payload => this.#broadcastToPeers(payload));
34
38
  // Process pending sync messages immediately after the handle becomes ready.
35
39
  void (async () => {
36
- await handle.doc([READY, REQUESTING]);
37
40
  this.#processAllPendingSyncMessages();
38
41
  })();
39
42
  }
@@ -45,11 +48,14 @@ export class DocSynchronizer extends Synchronizer {
45
48
  }
46
49
  /// PRIVATE
47
50
  async #syncWithPeers() {
48
- this.#log(`syncWithPeers`);
49
- const doc = await this.#handle.doc();
50
- if (doc === undefined)
51
- return;
52
- this.#peers.forEach(peerId => this.#sendSyncMessage(peerId, doc));
51
+ try {
52
+ await this.#handle.whenReady();
53
+ const doc = this.#handle.doc(); // XXX THIS ONE IS WEIRD
54
+ this.#peers.forEach(peerId => this.#sendSyncMessage(peerId, doc));
55
+ }
56
+ catch (e) {
57
+ console.log("sync with peers threw an exception");
58
+ }
53
59
  }
54
60
  async #broadcastToPeers({ data, }) {
55
61
  this.#log(`broadcastToPeers`, this.#peers);
@@ -151,25 +157,24 @@ export class DocSynchronizer extends Synchronizer {
151
157
  hasPeer(peerId) {
152
158
  return this.#peers.includes(peerId);
153
159
  }
154
- beginSync(peerIds) {
155
- const noPeersWithDocument = peerIds.every(peerId => this.#peerDocumentStatuses[peerId] in ["unavailable", "wants"]);
156
- // At this point if we don't have anything in our storage, we need to use an empty doc to sync
157
- // with; but we don't want to surface that state to the front end
158
- const docPromise = this.#handle
159
- .doc([READY, REQUESTING, UNAVAILABLE])
160
- .then(doc => {
161
- // we register out peers first, then say that sync has started
160
+ async beginSync(peerIds) {
161
+ void this.#handle
162
+ .whenReady([READY, REQUESTING, UNAVAILABLE])
163
+ .then(() => {
162
164
  this.#syncStarted = true;
163
165
  this.#checkDocUnavailable();
164
- const wasUnavailable = doc === undefined;
165
- if (wasUnavailable && noPeersWithDocument) {
166
- return;
167
- }
168
- // If the doc is unavailable we still need a blank document to generate
169
- // the sync message from
170
- return doc ?? A.init();
166
+ })
167
+ .catch(e => {
168
+ console.log("caught whenready", e);
169
+ this.#syncStarted = true;
170
+ this.#checkDocUnavailable();
171
+ });
172
+ const peersWithDocument = this.#peers.some(peerId => {
173
+ return this.#peerDocumentStatuses[peerId] == "has";
171
174
  });
172
- this.#log(`beginSync: ${peerIds.join(", ")}`);
175
+ if (peersWithDocument) {
176
+ await this.#handle.whenReady();
177
+ }
173
178
  peerIds.forEach(peerId => {
174
179
  this.#withSyncState(peerId, syncState => {
175
180
  // HACK: if we have a sync state already, we round-trip it through the encoding system to make
@@ -178,11 +183,22 @@ export class DocSynchronizer extends Synchronizer {
178
183
  // TODO: cover that case with a test and remove this hack
179
184
  const reparsedSyncState = A.decodeSyncState(A.encodeSyncState(syncState));
180
185
  this.#setSyncState(peerId, reparsedSyncState);
181
- docPromise
182
- .then(doc => {
183
- if (doc) {
184
- this.#sendSyncMessage(peerId, doc);
186
+ // At this point if we don't have anything in our storage, we need to use an empty doc to sync
187
+ // with; but we don't want to surface that state to the front end
188
+ this.#handle
189
+ .whenReady([READY, REQUESTING, UNAVAILABLE])
190
+ .then(() => {
191
+ const doc = this.#handle.isReady()
192
+ ? this.#handle.doc()
193
+ : A.init();
194
+ const noPeersWithDocument = peerIds.every(peerId => this.#peerDocumentStatuses[peerId] in ["unavailable", "wants"]);
195
+ const wasUnavailable = doc === undefined;
196
+ if (wasUnavailable && noPeersWithDocument) {
197
+ return;
185
198
  }
199
+ // If the doc is unavailable we still need a blank document to generate
200
+ // the sync message from
201
+ this.#sendSyncMessage(peerId, doc ?? A.init());
186
202
  })
187
203
  .catch(err => {
188
204
  this.#log(`Error loading doc for ${peerId}: ${err}`);
@@ -252,7 +268,15 @@ export class DocSynchronizer extends Synchronizer {
252
268
  }
253
269
  this.#withSyncState(message.senderId, syncState => {
254
270
  this.#handle.update(doc => {
271
+ const start = performance.now();
255
272
  const [newDoc, newSyncState] = A.receiveSyncMessage(doc, syncState, message.data);
273
+ const end = performance.now();
274
+ this.emit("metrics", {
275
+ type: "receive-sync-message",
276
+ documentId: this.#handle.documentId,
277
+ durationMillis: end - start,
278
+ ...A.stats(doc),
279
+ });
256
280
  this.#setSyncState(message.senderId, newSyncState);
257
281
  // respond to just this peer (as required)
258
282
  this.#sendSyncMessage(message.senderId, doc);
@@ -9,6 +9,7 @@ export interface SynchronizerEvents {
9
9
  message: (payload: MessageContents) => void;
10
10
  "sync-state": (payload: SyncStatePayload) => void;
11
11
  "open-doc": (arg: OpenDocMessage) => void;
12
+ metrics: (arg: DocSyncMetrics) => void;
12
13
  }
13
14
  /** Notify the repo that the sync state has changed */
14
15
  export interface SyncStatePayload {
@@ -16,4 +17,14 @@ export interface SyncStatePayload {
16
17
  documentId: DocumentId;
17
18
  syncState: SyncState;
18
19
  }
20
+ export type DocSyncMetrics = {
21
+ type: "receive-sync-message";
22
+ documentId: DocumentId;
23
+ durationMillis: number;
24
+ numOps: number;
25
+ numChanges: number;
26
+ } | {
27
+ type: "doc-denied";
28
+ documentId: DocumentId;
29
+ };
19
30
  //# sourceMappingURL=Synchronizer.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"Synchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/Synchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAC5C,OAAO,EACL,eAAe,EACf,cAAc,EACd,WAAW,EACZ,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EAAE,SAAS,EAAE,MAAM,2BAA2B,CAAA;AACrD,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAEhD,8BAAsB,YAAa,SAAQ,YAAY,CAAC,kBAAkB,CAAC;IACzE,QAAQ,CAAC,cAAc,CAAC,OAAO,EAAE,WAAW,GAAG,IAAI;CACpD;AAED,MAAM,WAAW,kBAAkB;IACjC,OAAO,EAAE,CAAC,OAAO,EAAE,eAAe,KAAK,IAAI,CAAA;IAC3C,YAAY,EAAE,CAAC,OAAO,EAAE,gBAAgB,KAAK,IAAI,CAAA;IACjD,UAAU,EAAE,CAAC,GAAG,EAAE,cAAc,KAAK,IAAI,CAAA;CAC1C;AAED,uDAAuD;AACvD,MAAM,WAAW,gBAAgB;IAC/B,MAAM,EAAE,MAAM,CAAA;IACd,UAAU,EAAE,UAAU,CAAA;IACtB,SAAS,EAAE,SAAS,CAAA;CACrB"}
1
+ {"version":3,"file":"Synchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/Synchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAC5C,OAAO,EACL,eAAe,EACf,cAAc,EACd,WAAW,EACZ,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EAAE,SAAS,EAAE,MAAM,2BAA2B,CAAA;AACrD,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAEhD,8BAAsB,YAAa,SAAQ,YAAY,CAAC,kBAAkB,CAAC;IACzE,QAAQ,CAAC,cAAc,CAAC,OAAO,EAAE,WAAW,GAAG,IAAI;CACpD;AAED,MAAM,WAAW,kBAAkB;IACjC,OAAO,EAAE,CAAC,OAAO,EAAE,eAAe,KAAK,IAAI,CAAA;IAC3C,YAAY,EAAE,CAAC,OAAO,EAAE,gBAAgB,KAAK,IAAI,CAAA;IACjD,UAAU,EAAE,CAAC,GAAG,EAAE,cAAc,KAAK,IAAI,CAAA;IACzC,OAAO,EAAE,CAAC,GAAG,EAAE,cAAc,KAAK,IAAI,CAAA;CACvC;AAED,uDAAuD;AACvD,MAAM,WAAW,gBAAgB;IAC/B,MAAM,EAAE,MAAM,CAAA;IACd,UAAU,EAAE,UAAU,CAAA;IACtB,SAAS,EAAE,SAAS,CAAA;CACrB;AAED,MAAM,MAAM,cAAc,GACtB;IACE,IAAI,EAAE,sBAAsB,CAAA;IAC5B,UAAU,EAAE,UAAU,CAAA;IACtB,cAAc,EAAE,MAAM,CAAA;IACtB,MAAM,EAAE,MAAM,CAAA;IACd,UAAU,EAAE,MAAM,CAAA;CACnB,GACD;IACE,IAAI,EAAE,YAAY,CAAA;IAClB,UAAU,EAAE,UAAU,CAAA;CACvB,CAAA"}
package/dist/types.d.ts CHANGED
@@ -26,12 +26,15 @@ export type LegacyDocumentId = string & {
26
26
  __legacyDocumentId: true;
27
27
  };
28
28
  export type AnyDocumentId = AutomergeUrl | DocumentId | BinaryDocumentId | LegacyDocumentId;
29
+ export type UrlHeads = string[] & {
30
+ __automergeUrlHeads: unknown;
31
+ };
29
32
  /** A branded type for peer IDs */
30
33
  export type PeerId = string & {
31
34
  __peerId: true;
32
35
  };
33
36
  /** A randomly generated string created when the {@link Repo} starts up */
34
37
  export type SessionId = string & {
35
- __SessionId: true;
38
+ __sessionId: true;
36
39
  };
37
40
  //# sourceMappingURL=types.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,MAAM,MAAM,YAAY,GAAG,MAAM,GAAG;IAAE,aAAa,EAAE,IAAI,CAAA;CAAE,CAAA;AAE3D;;;;;GAKG;AACH,MAAM,MAAM,UAAU,GAAG,MAAM,GAAG;IAAE,YAAY,EAAE,IAAI,CAAA;CAAE,CAAA;AAExD,iGAAiG;AACjG,MAAM,MAAM,gBAAgB,GAAG,UAAU,GAAG;IAAE,kBAAkB,EAAE,IAAI,CAAA;CAAE,CAAA;AAExE;;;GAGG;AACH,MAAM,MAAM,gBAAgB,GAAG,MAAM,GAAG;IAAE,kBAAkB,EAAE,IAAI,CAAA;CAAE,CAAA;AAEpE,MAAM,MAAM,aAAa,GACrB,YAAY,GACZ,UAAU,GACV,gBAAgB,GAChB,gBAAgB,CAAA;AAEpB,kCAAkC;AAClC,MAAM,MAAM,MAAM,GAAG,MAAM,GAAG;IAAE,QAAQ,EAAE,IAAI,CAAA;CAAE,CAAA;AAEhD,0EAA0E;AAC1E,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG;IAAE,WAAW,EAAE,IAAI,CAAA;CAAE,CAAA"}
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,MAAM,MAAM,YAAY,GAAG,MAAM,GAAG;IAAE,aAAa,EAAE,IAAI,CAAA;CAAE,CAAA;AAE3D;;;;;GAKG;AACH,MAAM,MAAM,UAAU,GAAG,MAAM,GAAG;IAAE,YAAY,EAAE,IAAI,CAAA;CAAE,CAAA;AAExD,iGAAiG;AACjG,MAAM,MAAM,gBAAgB,GAAG,UAAU,GAAG;IAAE,kBAAkB,EAAE,IAAI,CAAA;CAAE,CAAA;AAExE;;;GAGG;AACH,MAAM,MAAM,gBAAgB,GAAG,MAAM,GAAG;IAAE,kBAAkB,EAAE,IAAI,CAAA;CAAE,CAAA;AAEpE,MAAM,MAAM,aAAa,GACrB,YAAY,GACZ,UAAU,GACV,gBAAgB,GAChB,gBAAgB,CAAA;AAGpB,MAAM,MAAM,QAAQ,GAAG,MAAM,EAAE,GAAG;IAAE,mBAAmB,EAAE,OAAO,CAAA;CAAE,CAAA;AAElE,kCAAkC;AAClC,MAAM,MAAM,MAAM,GAAG,MAAM,GAAG;IAAE,QAAQ,EAAE,IAAI,CAAA;CAAE,CAAA;AAEhD,0EAA0E;AAC1E,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG;IAAE,WAAW,EAAE,IAAI,CAAA;CAAE,CAAA"}
package/fuzz/fuzz.ts CHANGED
@@ -107,9 +107,9 @@ for (let i = 0; i < 100000; i++) {
107
107
  })
108
108
 
109
109
  await pause(0)
110
- const a = await aliceRepo.find(doc.url).doc()
111
- const b = await bobRepo.find(doc.url).doc()
112
- const c = await charlieRepo.find(doc.url).doc()
110
+ const a = (await aliceRepo.find(doc.url)).doc()
111
+ const b = (await bobRepo.find(doc.url)).doc()
112
+ const c = (await charlieRepo.find(doc.url)).doc()
113
113
  assert.deepStrictEqual(a, b, "A and B should be equal")
114
114
  assert.deepStrictEqual(b, c, "B and C should be equal")
115
115
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@automerge/automerge-repo",
3
- "version": "2.0.0-alpha.7",
3
+ "version": "2.0.0-beta.2",
4
4
  "description": "A repository object to manage a collection of automerge documents",
5
5
  "repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo",
6
6
  "author": "Peter van Hardenberg <pvh@pvh.ca>",
@@ -20,6 +20,7 @@
20
20
  },
21
21
  "devDependencies": {
22
22
  "http-server": "^14.1.0",
23
+ "ts-node": "^10.9.2",
23
24
  "vite": "^5.0.8"
24
25
  },
25
26
  "dependencies": {
@@ -29,8 +30,6 @@
29
30
  "debug": "^4.3.4",
30
31
  "eventemitter3": "^5.0.1",
31
32
  "fast-sha256": "^1.3.0",
32
- "tiny-typed-emitter": "^2.1.0",
33
- "ts-node": "^10.9.1",
34
33
  "uuid": "^9.0.0",
35
34
  "xstate": "^5.9.1"
36
35
  },
@@ -60,5 +59,5 @@
60
59
  "publishConfig": {
61
60
  "access": "public"
62
61
  },
63
- "gitHead": "4279df7dad7ef5f33b1544e3945dab3839fa5ef6"
62
+ "gitHead": "70b1423aad5595782bb1ed6cfe21244e2693e226"
64
63
  }
@@ -4,26 +4,54 @@ import type {
4
4
  BinaryDocumentId,
5
5
  DocumentId,
6
6
  AnyDocumentId,
7
+ UrlHeads,
7
8
  } from "./types.js"
9
+
8
10
  import * as Uuid from "uuid"
9
11
  import bs58check from "bs58check"
12
+ import {
13
+ uint8ArrayFromHexString,
14
+ uint8ArrayToHexString,
15
+ } from "./helpers/bufferFromHex.js"
16
+
17
+ import type { Heads as AutomergeHeads } from "@automerge/automerge/slim"
10
18
 
11
19
  export const urlPrefix = "automerge:"
12
20
 
21
+ interface ParsedAutomergeUrl {
22
+ /** unencoded DocumentId */
23
+ binaryDocumentId: BinaryDocumentId
24
+ /** bs58 encoded DocumentId */
25
+ documentId: DocumentId
26
+ /** Optional array of heads, if specified in URL */
27
+ heads?: UrlHeads
28
+ /** Optional hex array of heads, in Automerge core format */
29
+ hexHeads?: string[] // AKA: heads
30
+ }
31
+
13
32
  /** Given an Automerge URL, returns the DocumentId in both base58check-encoded form and binary form */
14
- export const parseAutomergeUrl = (url: AutomergeUrl) => {
33
+ export const parseAutomergeUrl = (url: AutomergeUrl): ParsedAutomergeUrl => {
34
+ const [baseUrl, headsSection, ...rest] = url.split("#")
35
+ if (rest.length > 0) {
36
+ throw new Error("Invalid URL: contains multiple heads sections")
37
+ }
15
38
  const regex = new RegExp(`^${urlPrefix}(\\w+)$`)
16
- const [, docMatch] = url.match(regex) || []
39
+ const [, docMatch] = baseUrl.match(regex) || []
17
40
  const documentId = docMatch as DocumentId
18
41
  const binaryDocumentId = documentIdToBinary(documentId)
19
42
 
20
43
  if (!binaryDocumentId) throw new Error("Invalid document URL: " + url)
21
- return {
22
- /** unencoded DocumentId */
23
- binaryDocumentId,
24
- /** encoded DocumentId */
25
- documentId,
26
- }
44
+ if (headsSection === undefined) return { binaryDocumentId, documentId }
45
+
46
+ const heads = (headsSection === "" ? [] : headsSection.split("|")) as UrlHeads
47
+ const hexHeads = heads.map(head => {
48
+ try {
49
+ return uint8ArrayToHexString(bs58check.decode(head))
50
+ } catch (e) {
51
+ throw new Error(`Invalid head in URL: ${head}`)
52
+ }
53
+ })
54
+ return { binaryDocumentId, hexHeads, documentId, heads }
27
55
  }
28
56
 
29
57
  /**
@@ -32,38 +60,78 @@ export const parseAutomergeUrl = (url: AutomergeUrl) => {
32
60
  */
33
61
  export const stringifyAutomergeUrl = (
34
62
  arg: UrlOptions | DocumentId | BinaryDocumentId
35
- ) => {
36
- const documentId =
37
- arg instanceof Uint8Array || typeof arg === "string"
38
- ? arg
39
- : "documentId" in arg
40
- ? arg.documentId
41
- : undefined
63
+ ): AutomergeUrl => {
64
+ if (arg instanceof Uint8Array || typeof arg === "string") {
65
+ return (urlPrefix +
66
+ (arg instanceof Uint8Array
67
+ ? binaryToDocumentId(arg)
68
+ : arg)) as AutomergeUrl
69
+ }
70
+
71
+ const { documentId, heads = undefined } = arg
72
+
73
+ if (documentId === undefined)
74
+ throw new Error("Invalid documentId: " + documentId)
42
75
 
43
76
  const encodedDocumentId =
44
77
  documentId instanceof Uint8Array
45
78
  ? binaryToDocumentId(documentId)
46
- : typeof documentId === "string"
47
- ? documentId
48
- : undefined
79
+ : documentId
80
+
81
+ let url = `${urlPrefix}${encodedDocumentId}`
82
+
83
+ if (heads !== undefined) {
84
+ heads.forEach(head => {
85
+ try {
86
+ bs58check.decode(head)
87
+ } catch (e) {
88
+ throw new Error(`Invalid head: ${head}`)
89
+ }
90
+ })
91
+ url += "#" + heads.join("|")
92
+ }
49
93
 
50
- if (encodedDocumentId === undefined)
51
- throw new Error("Invalid documentId: " + documentId)
94
+ return url as AutomergeUrl
95
+ }
52
96
 
53
- return (urlPrefix + encodedDocumentId) as AutomergeUrl
97
+ /** Helper to extract just the heads from a URL if they exist */
98
+ export const getHeadsFromUrl = (url: AutomergeUrl): string[] | undefined => {
99
+ const { heads } = parseAutomergeUrl(url)
100
+ return heads
54
101
  }
55
102
 
103
+ export const anyDocumentIdToAutomergeUrl = (id: AnyDocumentId) =>
104
+ isValidAutomergeUrl(id)
105
+ ? id
106
+ : isValidDocumentId(id)
107
+ ? stringifyAutomergeUrl({ documentId: id })
108
+ : isValidUuid(id)
109
+ ? parseLegacyUUID(id)
110
+ : undefined
111
+
56
112
  /**
57
113
  * Given a string, returns true if it is a valid Automerge URL. This function also acts as a type
58
114
  * discriminator in Typescript.
59
115
  */
60
116
  export const isValidAutomergeUrl = (str: unknown): str is AutomergeUrl => {
61
- if (typeof str !== "string") return false
62
- if (!str || !str.startsWith(urlPrefix)) return false
63
- const automergeUrl = str as AutomergeUrl
117
+ if (typeof str !== "string" || !str || !str.startsWith(urlPrefix))
118
+ return false
64
119
  try {
65
- const { documentId } = parseAutomergeUrl(automergeUrl)
66
- return isValidDocumentId(documentId)
120
+ const { documentId, heads } = parseAutomergeUrl(str as AutomergeUrl)
121
+ if (!isValidDocumentId(documentId)) return false
122
+ if (
123
+ heads &&
124
+ !heads.every(head => {
125
+ try {
126
+ bs58check.decode(head)
127
+ return true
128
+ } catch {
129
+ return false
130
+ }
131
+ })
132
+ )
133
+ return false
134
+ return true
67
135
  } catch {
68
136
  return false
69
137
  }
@@ -97,6 +165,12 @@ export const documentIdToBinary = (docId: DocumentId) =>
97
165
  export const binaryToDocumentId = (docId: BinaryDocumentId) =>
98
166
  bs58check.encode(docId) as DocumentId
99
167
 
168
+ export const encodeHeads = (heads: AutomergeHeads): UrlHeads =>
169
+ heads.map(h => bs58check.encode(uint8ArrayFromHexString(h))) as UrlHeads
170
+
171
+ export const decodeHeads = (heads: UrlHeads): AutomergeHeads =>
172
+ heads.map(h => uint8ArrayToHexString(bs58check.decode(h))) as AutomergeHeads
173
+
100
174
  export const parseLegacyUUID = (str: string) => {
101
175
  if (!Uuid.validate(str)) return undefined
102
176
  const documentId = Uuid.parse(str) as BinaryDocumentId
@@ -141,4 +215,5 @@ export const interpretAsDocumentId = (id: AnyDocumentId) => {
141
215
 
142
216
  type UrlOptions = {
143
217
  documentId: DocumentId | BinaryDocumentId
218
+ heads?: UrlHeads
144
219
  }