@automerge/automerge-repo 2.0.0-alpha.2 → 2.0.0-alpha.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. package/README.md +5 -6
  2. package/dist/AutomergeUrl.d.ts +17 -5
  3. package/dist/AutomergeUrl.d.ts.map +1 -1
  4. package/dist/AutomergeUrl.js +71 -24
  5. package/dist/DocHandle.d.ts +89 -20
  6. package/dist/DocHandle.d.ts.map +1 -1
  7. package/dist/DocHandle.js +189 -28
  8. package/dist/FindProgress.d.ts +30 -0
  9. package/dist/FindProgress.d.ts.map +1 -0
  10. package/dist/FindProgress.js +1 -0
  11. package/dist/RemoteHeadsSubscriptions.d.ts +4 -5
  12. package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -1
  13. package/dist/RemoteHeadsSubscriptions.js +4 -1
  14. package/dist/Repo.d.ts +44 -6
  15. package/dist/Repo.d.ts.map +1 -1
  16. package/dist/Repo.js +226 -87
  17. package/dist/entrypoints/fullfat.d.ts +1 -0
  18. package/dist/entrypoints/fullfat.d.ts.map +1 -1
  19. package/dist/entrypoints/fullfat.js +1 -2
  20. package/dist/helpers/abortable.d.ts +39 -0
  21. package/dist/helpers/abortable.d.ts.map +1 -0
  22. package/dist/helpers/abortable.js +45 -0
  23. package/dist/helpers/bufferFromHex.d.ts +3 -0
  24. package/dist/helpers/bufferFromHex.d.ts.map +1 -0
  25. package/dist/helpers/bufferFromHex.js +13 -0
  26. package/dist/helpers/headsAreSame.d.ts +2 -2
  27. package/dist/helpers/headsAreSame.d.ts.map +1 -1
  28. package/dist/helpers/mergeArrays.d.ts +1 -1
  29. package/dist/helpers/mergeArrays.d.ts.map +1 -1
  30. package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
  31. package/dist/helpers/tests/network-adapter-tests.js +13 -13
  32. package/dist/helpers/tests/storage-adapter-tests.d.ts +2 -2
  33. package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
  34. package/dist/helpers/tests/storage-adapter-tests.js +25 -48
  35. package/dist/index.d.ts +1 -1
  36. package/dist/index.d.ts.map +1 -1
  37. package/dist/index.js +1 -1
  38. package/dist/storage/StorageSubsystem.d.ts +11 -1
  39. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  40. package/dist/storage/StorageSubsystem.js +20 -4
  41. package/dist/synchronizer/CollectionSynchronizer.d.ts +17 -3
  42. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  43. package/dist/synchronizer/CollectionSynchronizer.js +43 -18
  44. package/dist/synchronizer/DocSynchronizer.d.ts +10 -2
  45. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  46. package/dist/synchronizer/DocSynchronizer.js +30 -8
  47. package/dist/synchronizer/Synchronizer.d.ts +11 -0
  48. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  49. package/dist/types.d.ts +4 -1
  50. package/dist/types.d.ts.map +1 -1
  51. package/fuzz/fuzz.ts +3 -3
  52. package/package.json +3 -3
  53. package/src/AutomergeUrl.ts +101 -26
  54. package/src/DocHandle.ts +256 -38
  55. package/src/FindProgress.ts +48 -0
  56. package/src/RemoteHeadsSubscriptions.ts +11 -9
  57. package/src/Repo.ts +310 -95
  58. package/src/entrypoints/fullfat.ts +1 -2
  59. package/src/helpers/abortable.ts +61 -0
  60. package/src/helpers/bufferFromHex.ts +14 -0
  61. package/src/helpers/headsAreSame.ts +2 -2
  62. package/src/helpers/tests/network-adapter-tests.ts +14 -13
  63. package/src/helpers/tests/storage-adapter-tests.ts +44 -86
  64. package/src/index.ts +2 -0
  65. package/src/storage/StorageSubsystem.ts +29 -4
  66. package/src/synchronizer/CollectionSynchronizer.ts +56 -19
  67. package/src/synchronizer/DocSynchronizer.ts +34 -9
  68. package/src/synchronizer/Synchronizer.ts +14 -0
  69. package/src/types.ts +4 -1
  70. package/test/AutomergeUrl.test.ts +130 -0
  71. package/test/CollectionSynchronizer.test.ts +4 -4
  72. package/test/DocHandle.test.ts +189 -29
  73. package/test/DocSynchronizer.test.ts +10 -3
  74. package/test/Repo.test.ts +377 -191
  75. package/test/StorageSubsystem.test.ts +17 -0
  76. package/test/remoteHeads.test.ts +27 -12
@@ -19,11 +19,15 @@ export class DocSynchronizer extends Synchronizer {
19
19
  /** Sync state for each peer we've communicated with (including inactive peers) */
20
20
  #syncStates = {};
21
21
  #pendingSyncMessages = [];
22
+ // We keep this around at least in part for debugging.
23
+ // eslint-disable-next-line no-unused-private-class-members
24
+ #peerId;
22
25
  #syncStarted = false;
23
26
  #handle;
24
27
  #onLoadSyncState;
25
- constructor({ handle, onLoadSyncState }) {
28
+ constructor({ handle, peerId, onLoadSyncState }) {
26
29
  super();
30
+ this.#peerId = peerId;
27
31
  this.#handle = handle;
28
32
  this.#onLoadSyncState =
29
33
  onLoadSyncState ?? (() => Promise.resolve(undefined));
@@ -33,7 +37,7 @@ export class DocSynchronizer extends Synchronizer {
33
37
  handle.on("ephemeral-message-outbound", payload => this.#broadcastToPeers(payload));
34
38
  // Process pending sync messages immediately after the handle becomes ready.
35
39
  void (async () => {
36
- await handle.doc([READY, REQUESTING]);
40
+ await handle.whenReady([READY, REQUESTING]);
37
41
  this.#processAllPendingSyncMessages();
38
42
  })();
39
43
  }
@@ -45,8 +49,7 @@ export class DocSynchronizer extends Synchronizer {
45
49
  }
46
50
  /// PRIVATE
47
51
  async #syncWithPeers() {
48
- this.#log(`syncWithPeers`);
49
- const doc = await this.#handle.doc();
52
+ const doc = await this.#handle.legacyAsyncDoc(); // XXX THIS ONE IS WEIRD
50
53
  if (doc === undefined)
51
54
  return;
52
55
  this.#peers.forEach(peerId => this.#sendSyncMessage(peerId, doc));
@@ -151,12 +154,12 @@ export class DocSynchronizer extends Synchronizer {
151
154
  hasPeer(peerId) {
152
155
  return this.#peers.includes(peerId);
153
156
  }
154
- beginSync(peerIds) {
157
+ async beginSync(peerIds) {
155
158
  const noPeersWithDocument = peerIds.every(peerId => this.#peerDocumentStatuses[peerId] in ["unavailable", "wants"]);
156
159
  // At this point if we don't have anything in our storage, we need to use an empty doc to sync
157
160
  // with; but we don't want to surface that state to the front end
158
- const docPromise = this.#handle
159
- .doc([READY, REQUESTING, UNAVAILABLE])
161
+ const docPromise = this.#handle // TODO THIS IS ALSO WEIRD
162
+ .legacyAsyncDoc([READY, REQUESTING, UNAVAILABLE])
160
163
  .then(doc => {
161
164
  // we register out peers first, then say that sync has started
162
165
  this.#syncStarted = true;
@@ -169,7 +172,12 @@ export class DocSynchronizer extends Synchronizer {
169
172
  // the sync message from
170
173
  return doc ?? A.init();
171
174
  });
172
- this.#log(`beginSync: ${peerIds.join(", ")}`);
175
+ const peersWithDocument = this.#peers.some(peerId => {
176
+ return this.#peerDocumentStatuses[peerId] == "has";
177
+ });
178
+ if (peersWithDocument) {
179
+ await this.#handle.whenReady();
180
+ }
173
181
  peerIds.forEach(peerId => {
174
182
  this.#withSyncState(peerId, syncState => {
175
183
  // HACK: if we have a sync state already, we round-trip it through the encoding system to make
@@ -252,7 +260,15 @@ export class DocSynchronizer extends Synchronizer {
252
260
  }
253
261
  this.#withSyncState(message.senderId, syncState => {
254
262
  this.#handle.update(doc => {
263
+ const start = performance.now();
255
264
  const [newDoc, newSyncState] = A.receiveSyncMessage(doc, syncState, message.data);
265
+ const end = performance.now();
266
+ this.emit("metrics", {
267
+ type: "receive-sync-message",
268
+ documentId: this.#handle.documentId,
269
+ durationMillis: end - start,
270
+ ...A.stats(doc),
271
+ });
256
272
  this.#setSyncState(message.senderId, newSyncState);
257
273
  // respond to just this peer (as required)
258
274
  this.#sendSyncMessage(message.senderId, doc);
@@ -286,4 +302,10 @@ export class DocSynchronizer extends Synchronizer {
286
302
  }
287
303
  this.#pendingSyncMessages = [];
288
304
  }
305
+ metrics() {
306
+ return {
307
+ peers: this.#peers,
308
+ size: this.#handle.metrics(),
309
+ };
310
+ }
289
311
  }
@@ -9,6 +9,7 @@ export interface SynchronizerEvents {
9
9
  message: (payload: MessageContents) => void;
10
10
  "sync-state": (payload: SyncStatePayload) => void;
11
11
  "open-doc": (arg: OpenDocMessage) => void;
12
+ metrics: (arg: DocSyncMetrics) => void;
12
13
  }
13
14
  /** Notify the repo that the sync state has changed */
14
15
  export interface SyncStatePayload {
@@ -16,4 +17,14 @@ export interface SyncStatePayload {
16
17
  documentId: DocumentId;
17
18
  syncState: SyncState;
18
19
  }
20
+ export type DocSyncMetrics = {
21
+ type: "receive-sync-message";
22
+ documentId: DocumentId;
23
+ durationMillis: number;
24
+ numOps: number;
25
+ numChanges: number;
26
+ } | {
27
+ type: "doc-denied";
28
+ documentId: DocumentId;
29
+ };
19
30
  //# sourceMappingURL=Synchronizer.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"Synchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/Synchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAC5C,OAAO,EACL,eAAe,EACf,cAAc,EACd,WAAW,EACZ,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EAAE,SAAS,EAAE,MAAM,2BAA2B,CAAA;AACrD,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAEhD,8BAAsB,YAAa,SAAQ,YAAY,CAAC,kBAAkB,CAAC;IACzE,QAAQ,CAAC,cAAc,CAAC,OAAO,EAAE,WAAW,GAAG,IAAI;CACpD;AAED,MAAM,WAAW,kBAAkB;IACjC,OAAO,EAAE,CAAC,OAAO,EAAE,eAAe,KAAK,IAAI,CAAA;IAC3C,YAAY,EAAE,CAAC,OAAO,EAAE,gBAAgB,KAAK,IAAI,CAAA;IACjD,UAAU,EAAE,CAAC,GAAG,EAAE,cAAc,KAAK,IAAI,CAAA;CAC1C;AAED,uDAAuD;AACvD,MAAM,WAAW,gBAAgB;IAC/B,MAAM,EAAE,MAAM,CAAA;IACd,UAAU,EAAE,UAAU,CAAA;IACtB,SAAS,EAAE,SAAS,CAAA;CACrB"}
1
+ {"version":3,"file":"Synchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/Synchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAC5C,OAAO,EACL,eAAe,EACf,cAAc,EACd,WAAW,EACZ,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EAAE,SAAS,EAAE,MAAM,2BAA2B,CAAA;AACrD,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAEhD,8BAAsB,YAAa,SAAQ,YAAY,CAAC,kBAAkB,CAAC;IACzE,QAAQ,CAAC,cAAc,CAAC,OAAO,EAAE,WAAW,GAAG,IAAI;CACpD;AAED,MAAM,WAAW,kBAAkB;IACjC,OAAO,EAAE,CAAC,OAAO,EAAE,eAAe,KAAK,IAAI,CAAA;IAC3C,YAAY,EAAE,CAAC,OAAO,EAAE,gBAAgB,KAAK,IAAI,CAAA;IACjD,UAAU,EAAE,CAAC,GAAG,EAAE,cAAc,KAAK,IAAI,CAAA;IACzC,OAAO,EAAE,CAAC,GAAG,EAAE,cAAc,KAAK,IAAI,CAAA;CACvC;AAED,uDAAuD;AACvD,MAAM,WAAW,gBAAgB;IAC/B,MAAM,EAAE,MAAM,CAAA;IACd,UAAU,EAAE,UAAU,CAAA;IACtB,SAAS,EAAE,SAAS,CAAA;CACrB;AAED,MAAM,MAAM,cAAc,GACtB;IACE,IAAI,EAAE,sBAAsB,CAAA;IAC5B,UAAU,EAAE,UAAU,CAAA;IACtB,cAAc,EAAE,MAAM,CAAA;IACtB,MAAM,EAAE,MAAM,CAAA;IACd,UAAU,EAAE,MAAM,CAAA;CACnB,GACD;IACE,IAAI,EAAE,YAAY,CAAA;IAClB,UAAU,EAAE,UAAU,CAAA;CACvB,CAAA"}
package/dist/types.d.ts CHANGED
@@ -26,12 +26,15 @@ export type LegacyDocumentId = string & {
26
26
  __legacyDocumentId: true;
27
27
  };
28
28
  export type AnyDocumentId = AutomergeUrl | DocumentId | BinaryDocumentId | LegacyDocumentId;
29
+ export type UrlHeads = string[] & {
30
+ __automergeUrlHeads: unknown;
31
+ };
29
32
  /** A branded type for peer IDs */
30
33
  export type PeerId = string & {
31
34
  __peerId: true;
32
35
  };
33
36
  /** A randomly generated string created when the {@link Repo} starts up */
34
37
  export type SessionId = string & {
35
- __SessionId: true;
38
+ __sessionId: true;
36
39
  };
37
40
  //# sourceMappingURL=types.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,MAAM,MAAM,YAAY,GAAG,MAAM,GAAG;IAAE,aAAa,EAAE,IAAI,CAAA;CAAE,CAAA;AAE3D;;;;;GAKG;AACH,MAAM,MAAM,UAAU,GAAG,MAAM,GAAG;IAAE,YAAY,EAAE,IAAI,CAAA;CAAE,CAAA;AAExD,iGAAiG;AACjG,MAAM,MAAM,gBAAgB,GAAG,UAAU,GAAG;IAAE,kBAAkB,EAAE,IAAI,CAAA;CAAE,CAAA;AAExE;;;GAGG;AACH,MAAM,MAAM,gBAAgB,GAAG,MAAM,GAAG;IAAE,kBAAkB,EAAE,IAAI,CAAA;CAAE,CAAA;AAEpE,MAAM,MAAM,aAAa,GACrB,YAAY,GACZ,UAAU,GACV,gBAAgB,GAChB,gBAAgB,CAAA;AAEpB,kCAAkC;AAClC,MAAM,MAAM,MAAM,GAAG,MAAM,GAAG;IAAE,QAAQ,EAAE,IAAI,CAAA;CAAE,CAAA;AAEhD,0EAA0E;AAC1E,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG;IAAE,WAAW,EAAE,IAAI,CAAA;CAAE,CAAA"}
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,MAAM,MAAM,YAAY,GAAG,MAAM,GAAG;IAAE,aAAa,EAAE,IAAI,CAAA;CAAE,CAAA;AAE3D;;;;;GAKG;AACH,MAAM,MAAM,UAAU,GAAG,MAAM,GAAG;IAAE,YAAY,EAAE,IAAI,CAAA;CAAE,CAAA;AAExD,iGAAiG;AACjG,MAAM,MAAM,gBAAgB,GAAG,UAAU,GAAG;IAAE,kBAAkB,EAAE,IAAI,CAAA;CAAE,CAAA;AAExE;;;GAGG;AACH,MAAM,MAAM,gBAAgB,GAAG,MAAM,GAAG;IAAE,kBAAkB,EAAE,IAAI,CAAA;CAAE,CAAA;AAEpE,MAAM,MAAM,aAAa,GACrB,YAAY,GACZ,UAAU,GACV,gBAAgB,GAChB,gBAAgB,CAAA;AAGpB,MAAM,MAAM,QAAQ,GAAG,MAAM,EAAE,GAAG;IAAE,mBAAmB,EAAE,OAAO,CAAA;CAAE,CAAA;AAElE,kCAAkC;AAClC,MAAM,MAAM,MAAM,GAAG,MAAM,GAAG;IAAE,QAAQ,EAAE,IAAI,CAAA;CAAE,CAAA;AAEhD,0EAA0E;AAC1E,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG;IAAE,WAAW,EAAE,IAAI,CAAA;CAAE,CAAA"}
package/fuzz/fuzz.ts CHANGED
@@ -107,9 +107,9 @@ for (let i = 0; i < 100000; i++) {
107
107
  })
108
108
 
109
109
  await pause(0)
110
- const a = await aliceRepo.find(doc.url).doc()
111
- const b = await bobRepo.find(doc.url).doc()
112
- const c = await charlieRepo.find(doc.url).doc()
110
+ const a = (await aliceRepo.find(doc.url)).doc()
111
+ const b = (await bobRepo.find(doc.url)).doc()
112
+ const c = (await charlieRepo.find(doc.url)).doc()
113
113
  assert.deepStrictEqual(a, b, "A and B should be equal")
114
114
  assert.deepStrictEqual(b, c, "B and C should be equal")
115
115
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@automerge/automerge-repo",
3
- "version": "2.0.0-alpha.2",
3
+ "version": "2.0.0-alpha.22",
4
4
  "description": "A repository object to manage a collection of automerge documents",
5
5
  "repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo",
6
6
  "author": "Peter van Hardenberg <pvh@pvh.ca>",
@@ -23,7 +23,7 @@
23
23
  "vite": "^5.0.8"
24
24
  },
25
25
  "dependencies": {
26
- "@automerge/automerge": "^2.2.7",
26
+ "@automerge/automerge": "^2.2.8",
27
27
  "bs58check": "^3.0.1",
28
28
  "cbor-x": "^1.3.0",
29
29
  "debug": "^4.3.4",
@@ -60,5 +60,5 @@
60
60
  "publishConfig": {
61
61
  "access": "public"
62
62
  },
63
- "gitHead": "373ce97a1c3153b476926290942e8a55cde7874b"
63
+ "gitHead": "b30af9827bed4615ba3c5e9ee93ca483915e4016"
64
64
  }
@@ -4,26 +4,54 @@ import type {
4
4
  BinaryDocumentId,
5
5
  DocumentId,
6
6
  AnyDocumentId,
7
+ UrlHeads,
7
8
  } from "./types.js"
9
+
8
10
  import * as Uuid from "uuid"
9
11
  import bs58check from "bs58check"
12
+ import {
13
+ uint8ArrayFromHexString,
14
+ uint8ArrayToHexString,
15
+ } from "./helpers/bufferFromHex.js"
16
+
17
+ import type { Heads as AutomergeHeads } from "@automerge/automerge/slim"
10
18
 
11
19
  export const urlPrefix = "automerge:"
12
20
 
21
+ interface ParsedAutomergeUrl {
22
+ /** unencoded DocumentId */
23
+ binaryDocumentId: BinaryDocumentId
24
+ /** bs58 encoded DocumentId */
25
+ documentId: DocumentId
26
+ /** Optional array of heads, if specified in URL */
27
+ heads?: UrlHeads
28
+ /** Optional hex array of heads, in Automerge core format */
29
+ hexHeads?: string[] // AKA: heads
30
+ }
31
+
13
32
  /** Given an Automerge URL, returns the DocumentId in both base58check-encoded form and binary form */
14
- export const parseAutomergeUrl = (url: AutomergeUrl) => {
33
+ export const parseAutomergeUrl = (url: AutomergeUrl): ParsedAutomergeUrl => {
34
+ const [baseUrl, headsSection, ...rest] = url.split("#")
35
+ if (rest.length > 0) {
36
+ throw new Error("Invalid URL: contains multiple heads sections")
37
+ }
15
38
  const regex = new RegExp(`^${urlPrefix}(\\w+)$`)
16
- const [, docMatch] = url.match(regex) || []
39
+ const [, docMatch] = baseUrl.match(regex) || []
17
40
  const documentId = docMatch as DocumentId
18
41
  const binaryDocumentId = documentIdToBinary(documentId)
19
42
 
20
43
  if (!binaryDocumentId) throw new Error("Invalid document URL: " + url)
21
- return {
22
- /** unencoded DocumentId */
23
- binaryDocumentId,
24
- /** encoded DocumentId */
25
- documentId,
26
- }
44
+ if (headsSection === undefined) return { binaryDocumentId, documentId }
45
+
46
+ const heads = (headsSection === "" ? [] : headsSection.split("|")) as UrlHeads
47
+ const hexHeads = heads.map(head => {
48
+ try {
49
+ return uint8ArrayToHexString(bs58check.decode(head))
50
+ } catch (e) {
51
+ throw new Error(`Invalid head in URL: ${head}`)
52
+ }
53
+ })
54
+ return { binaryDocumentId, hexHeads, documentId, heads }
27
55
  }
28
56
 
29
57
  /**
@@ -32,38 +60,78 @@ export const parseAutomergeUrl = (url: AutomergeUrl) => {
32
60
  */
33
61
  export const stringifyAutomergeUrl = (
34
62
  arg: UrlOptions | DocumentId | BinaryDocumentId
35
- ) => {
36
- const documentId =
37
- arg instanceof Uint8Array || typeof arg === "string"
38
- ? arg
39
- : "documentId" in arg
40
- ? arg.documentId
41
- : undefined
63
+ ): AutomergeUrl => {
64
+ if (arg instanceof Uint8Array || typeof arg === "string") {
65
+ return (urlPrefix +
66
+ (arg instanceof Uint8Array
67
+ ? binaryToDocumentId(arg)
68
+ : arg)) as AutomergeUrl
69
+ }
70
+
71
+ const { documentId, heads = undefined } = arg
72
+
73
+ if (documentId === undefined)
74
+ throw new Error("Invalid documentId: " + documentId)
42
75
 
43
76
  const encodedDocumentId =
44
77
  documentId instanceof Uint8Array
45
78
  ? binaryToDocumentId(documentId)
46
- : typeof documentId === "string"
47
- ? documentId
48
- : undefined
79
+ : documentId
80
+
81
+ let url = `${urlPrefix}${encodedDocumentId}`
82
+
83
+ if (heads !== undefined) {
84
+ heads.forEach(head => {
85
+ try {
86
+ bs58check.decode(head)
87
+ } catch (e) {
88
+ throw new Error(`Invalid head: ${head}`)
89
+ }
90
+ })
91
+ url += "#" + heads.join("|")
92
+ }
49
93
 
50
- if (encodedDocumentId === undefined)
51
- throw new Error("Invalid documentId: " + documentId)
94
+ return url as AutomergeUrl
95
+ }
52
96
 
53
- return (urlPrefix + encodedDocumentId) as AutomergeUrl
97
+ /** Helper to extract just the heads from a URL if they exist */
98
+ export const getHeadsFromUrl = (url: AutomergeUrl): string[] | undefined => {
99
+ const { heads } = parseAutomergeUrl(url)
100
+ return heads
54
101
  }
55
102
 
103
+ export const anyDocumentIdToAutomergeUrl = (id: AnyDocumentId) =>
104
+ isValidAutomergeUrl(id)
105
+ ? id
106
+ : isValidDocumentId(id)
107
+ ? stringifyAutomergeUrl({ documentId: id })
108
+ : isValidUuid(id)
109
+ ? parseLegacyUUID(id)
110
+ : undefined
111
+
56
112
  /**
57
113
  * Given a string, returns true if it is a valid Automerge URL. This function also acts as a type
58
114
  * discriminator in Typescript.
59
115
  */
60
116
  export const isValidAutomergeUrl = (str: unknown): str is AutomergeUrl => {
61
- if (typeof str !== "string") return false
62
- if (!str || !str.startsWith(urlPrefix)) return false
63
- const automergeUrl = str as AutomergeUrl
117
+ if (typeof str !== "string" || !str || !str.startsWith(urlPrefix))
118
+ return false
64
119
  try {
65
- const { documentId } = parseAutomergeUrl(automergeUrl)
66
- return isValidDocumentId(documentId)
120
+ const { documentId, heads } = parseAutomergeUrl(str as AutomergeUrl)
121
+ if (!isValidDocumentId(documentId)) return false
122
+ if (
123
+ heads &&
124
+ !heads.every(head => {
125
+ try {
126
+ bs58check.decode(head)
127
+ return true
128
+ } catch {
129
+ return false
130
+ }
131
+ })
132
+ )
133
+ return false
134
+ return true
67
135
  } catch {
68
136
  return false
69
137
  }
@@ -97,6 +165,12 @@ export const documentIdToBinary = (docId: DocumentId) =>
97
165
  export const binaryToDocumentId = (docId: BinaryDocumentId) =>
98
166
  bs58check.encode(docId) as DocumentId
99
167
 
168
+ export const encodeHeads = (heads: AutomergeHeads): UrlHeads =>
169
+ heads.map(h => bs58check.encode(uint8ArrayFromHexString(h))) as UrlHeads
170
+
171
+ export const decodeHeads = (heads: UrlHeads): AutomergeHeads =>
172
+ heads.map(h => uint8ArrayToHexString(bs58check.decode(h))) as AutomergeHeads
173
+
100
174
  export const parseLegacyUUID = (str: string) => {
101
175
  if (!Uuid.validate(str)) return undefined
102
176
  const documentId = Uuid.parse(str) as BinaryDocumentId
@@ -141,4 +215,5 @@ export const interpretAsDocumentId = (id: AnyDocumentId) => {
141
215
 
142
216
  type UrlOptions = {
143
217
  documentId: DocumentId | BinaryDocumentId
218
+ heads?: UrlHeads
144
219
  }