@automerge/automerge-repo 1.0.12 → 1.0.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/dist/AutomergeUrl.d.ts +45 -0
  2. package/dist/AutomergeUrl.d.ts.map +1 -0
  3. package/dist/AutomergeUrl.js +108 -0
  4. package/dist/DocHandle.js +1 -1
  5. package/dist/Repo.d.ts +5 -5
  6. package/dist/Repo.d.ts.map +1 -1
  7. package/dist/Repo.js +10 -21
  8. package/dist/helpers/cbor.js +1 -1
  9. package/dist/index.d.ts +3 -3
  10. package/dist/index.d.ts.map +1 -1
  11. package/dist/index.js +1 -1
  12. package/dist/network/NetworkAdapter.d.ts +3 -3
  13. package/dist/network/NetworkAdapter.d.ts.map +1 -1
  14. package/dist/network/messages.d.ts +7 -18
  15. package/dist/network/messages.d.ts.map +1 -1
  16. package/dist/storage/StorageAdapter.d.ts +19 -22
  17. package/dist/storage/StorageAdapter.d.ts.map +1 -1
  18. package/dist/storage/StorageAdapter.js +2 -2
  19. package/dist/storage/StorageSubsystem.d.ts +39 -3
  20. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  21. package/dist/storage/StorageSubsystem.js +128 -75
  22. package/dist/storage/chunkTypeFromKey.d.ts +13 -0
  23. package/dist/storage/chunkTypeFromKey.d.ts.map +1 -0
  24. package/dist/storage/chunkTypeFromKey.js +18 -0
  25. package/dist/storage/keyHash.d.ts +4 -0
  26. package/dist/storage/keyHash.d.ts.map +1 -0
  27. package/dist/storage/keyHash.js +15 -0
  28. package/dist/storage/types.d.ts +37 -0
  29. package/dist/storage/types.d.ts.map +1 -0
  30. package/dist/storage/types.js +1 -0
  31. package/dist/synchronizer/CollectionSynchronizer.js +1 -1
  32. package/dist/types.d.ts +20 -12
  33. package/dist/types.d.ts.map +1 -1
  34. package/package.json +2 -2
  35. package/src/AutomergeUrl.ts +144 -0
  36. package/src/DocHandle.ts +1 -1
  37. package/src/Repo.ts +14 -26
  38. package/src/helpers/cbor.ts +1 -1
  39. package/src/index.ts +12 -4
  40. package/src/network/NetworkAdapter.ts +3 -3
  41. package/src/network/messages.ts +8 -21
  42. package/src/storage/StorageAdapter.ts +23 -30
  43. package/src/storage/StorageSubsystem.ts +159 -93
  44. package/src/storage/chunkTypeFromKey.ts +22 -0
  45. package/src/storage/keyHash.ts +17 -0
  46. package/src/storage/types.ts +39 -0
  47. package/src/synchronizer/CollectionSynchronizer.ts +1 -1
  48. package/src/types.ts +23 -11
  49. package/test/AutomergeUrl.test.ts +100 -0
  50. package/test/DocHandle.test.ts +1 -1
  51. package/test/DocSynchronizer.test.ts +1 -1
  52. package/test/Repo.test.ts +22 -6
  53. package/test/StorageSubsystem.test.ts +144 -36
  54. package/test/helpers/DummyStorageAdapter.ts +2 -4
  55. package/dist/DocUrl.d.ts +0 -39
  56. package/dist/DocUrl.d.ts.map +0 -1
  57. package/dist/DocUrl.js +0 -74
  58. package/src/DocUrl.ts +0 -96
@@ -1,28 +1,127 @@
1
1
  import * as A from "@automerge/automerge/next";
2
2
  import debug from "debug";
3
- import * as sha256 from "fast-sha256";
4
3
  import { headsAreSame } from "../helpers/headsAreSame.js";
5
4
  import { mergeArrays } from "../helpers/mergeArrays.js";
6
- function keyHash(binary) {
7
- const hash = sha256.hash(binary);
8
- const hashArray = Array.from(new Uint8Array(hash)); // convert buffer to byte array
9
- const hashHex = hashArray.map(b => ("00" + b.toString(16)).slice(-2)).join(""); // convert bytes to hex string
10
- return hashHex;
11
- }
12
- function headsHash(heads) {
13
- const encoder = new TextEncoder();
14
- const headsbinary = mergeArrays(heads.map((h) => encoder.encode(h)));
15
- return keyHash(headsbinary);
16
- }
5
+ import { keyHash, headsHash } from "./keyHash.js";
6
+ import { chunkTypeFromKey } from "./chunkTypeFromKey.js";
7
+ /**
8
+ * The storage subsystem is responsible for saving and loading Automerge documents to and from
9
+ * storage adapter. It also provides a generic key/value storage interface for other uses.
10
+ */
17
11
  export class StorageSubsystem {
12
+ /** The storage adapter to use for saving and loading documents */
18
13
  #storageAdapter;
19
- #chunkInfos = new Map();
14
+ /** Record of the latest heads we've loaded or saved for each document */
20
15
  #storedHeads = new Map();
16
+ /** Metadata on the chunks we've already loaded for each document */
17
+ #chunkInfos = new Map();
18
+ /** Flag to avoid compacting when a compaction is already underway */
19
+ #compacting = false;
21
20
  #log = debug(`automerge-repo:storage-subsystem`);
22
- #snapshotting = false;
23
21
  constructor(storageAdapter) {
24
22
  this.#storageAdapter = storageAdapter;
25
23
  }
24
+ // ARBITRARY KEY/VALUE STORAGE
25
+ // The `load`, `save`, and `remove` methods are for generic key/value storage, as opposed to
26
+ // Automerge documents. For example, they're used by the LocalFirstAuthProvider to persist the
27
+ // encrypted team graph that encodes group membership and permissions.
28
+ //
29
+ // The namespace parameter is to prevent collisions with other users of the storage subsystem.
30
+ // Typically this will be the name of the plug-in, adapter, or other system that is using it. For
31
+ // example, the LocalFirstAuthProvider uses the namespace `LocalFirstAuthProvider`.
32
+ /** Loads a value from storage. */
33
+ async load(
34
+ /** Namespace to prevent collisions with other users of the storage subsystem. */
35
+ namespace,
36
+ /** Key to load. Typically a UUID or other unique identifier, but could be any string. */
37
+ key) {
38
+ const storageKey = [namespace, key];
39
+ return await this.#storageAdapter.load(storageKey);
40
+ }
41
+ /** Saves a value in storage. */
42
+ async save(
43
+ /** Namespace to prevent collisions with other users of the storage subsystem. */
44
+ namespace,
45
+ /** Key to load. Typically a UUID or other unique identifier, but could be any string. */
46
+ key,
47
+ /** Data to save, as a binary blob. */
48
+ data) {
49
+ const storageKey = [namespace, key];
50
+ await this.#storageAdapter.save(storageKey, data);
51
+ }
52
+ /** Removes a value from storage. */
53
+ async remove(
54
+ /** Namespace to prevent collisions with other users of the storage subsystem. */
55
+ namespace,
56
+ /** Key to remove. Typically a UUID or other unique identifier, but could be any string. */
57
+ key) {
58
+ const storageKey = [namespace, key];
59
+ await this.#storageAdapter.remove(storageKey);
60
+ }
61
+ // AUTOMERGE DOCUMENT STORAGE
62
+ /**
63
+ * Loads the Automerge document with the given ID from storage.
64
+ */
65
+ async loadDoc(documentId) {
66
+ // Load all the chunks for this document
67
+ const chunks = await this.#storageAdapter.loadRange([documentId]);
68
+ const binaries = [];
69
+ const chunkInfos = [];
70
+ for (const chunk of chunks) {
71
+ // chunks might have been deleted in the interim
72
+ if (chunk.data === undefined)
73
+ continue;
74
+ const chunkType = chunkTypeFromKey(chunk.key);
75
+ if (chunkType == null)
76
+ continue;
77
+ chunkInfos.push({
78
+ key: chunk.key,
79
+ type: chunkType,
80
+ size: chunk.data.length,
81
+ });
82
+ binaries.push(chunk.data);
83
+ }
84
+ this.#chunkInfos.set(documentId, chunkInfos);
85
+ // Merge the chunks into a single binary
86
+ const binary = mergeArrays(binaries);
87
+ if (binary.length === 0)
88
+ return null;
89
+ // Load into an Automerge document
90
+ const newDoc = A.loadIncremental(A.init(), binary);
91
+ // Record the latest heads for the document
92
+ this.#storedHeads.set(documentId, A.getHeads(newDoc));
93
+ return newDoc;
94
+ }
95
+ /**
96
+ * Saves the provided Automerge document to storage.
97
+ *
98
+ * @remarks
99
+ * Under the hood this makes incremental saves until the incremental size is greater than the
100
+ * snapshot size, at which point the document is compacted into a single snapshot.
101
+ */
102
+ async saveDoc(documentId, doc) {
103
+ // Don't bother saving if the document hasn't changed
104
+ if (!this.#shouldSave(documentId, doc))
105
+ return;
106
+ const sourceChunks = this.#chunkInfos.get(documentId) ?? [];
107
+ if (this.#shouldCompact(sourceChunks)) {
108
+ await this.#saveTotal(documentId, doc, sourceChunks);
109
+ }
110
+ else {
111
+ await this.#saveIncremental(documentId, doc);
112
+ }
113
+ this.#storedHeads.set(documentId, A.getHeads(doc));
114
+ }
115
+ /**
116
+ * Removes the Automerge document with the given ID from storage
117
+ */
118
+ async removeDoc(documentId) {
119
+ await this.#storageAdapter.removeRange([documentId, "snapshot"]);
120
+ await this.#storageAdapter.removeRange([documentId, "incremental"]);
121
+ }
122
+ /**
123
+ * Saves just the incremental changes since the last save.
124
+ */
26
125
  async #saveIncremental(documentId, doc) {
27
126
  const binary = A.saveSince(doc, this.#storedHeads.get(documentId) ?? []);
28
127
  if (binary && binary.length > 0) {
@@ -43,8 +142,11 @@ export class StorageSubsystem {
43
142
  return Promise.resolve();
44
143
  }
45
144
  }
145
+ /**
146
+ * Compacts the document storage into a single shapshot.
147
+ */
46
148
  async #saveTotal(documentId, doc, sourceChunks) {
47
- this.#snapshotting = true;
149
+ this.#compacting = true;
48
150
  const binary = A.save(doc);
49
151
  const snapshotHash = headsHash(A.getHeads(doc));
50
152
  const key = [documentId, "snapshot", snapshotHash];
@@ -58,66 +160,30 @@ export class StorageSubsystem {
58
160
  const newChunkInfos = this.#chunkInfos.get(documentId)?.filter(c => !oldKeys.has(c.key)) ?? [];
59
161
  newChunkInfos.push({ key, type: "snapshot", size: binary.length });
60
162
  this.#chunkInfos.set(documentId, newChunkInfos);
61
- this.#snapshotting = false;
62
- }
63
- async loadDoc(documentId) {
64
- const loaded = await this.#storageAdapter.loadRange([documentId]);
65
- const binaries = [];
66
- const chunkInfos = [];
67
- for (const chunk of loaded) {
68
- const chunkType = chunkTypeFromKey(chunk.key);
69
- if (chunkType == null) {
70
- continue;
71
- }
72
- chunkInfos.push({
73
- key: chunk.key,
74
- type: chunkType,
75
- size: chunk.data.length,
76
- });
77
- binaries.push(chunk.data);
78
- }
79
- this.#chunkInfos.set(documentId, chunkInfos);
80
- const binary = mergeArrays(binaries);
81
- if (binary.length === 0) {
82
- return null;
83
- }
84
- const newDoc = A.loadIncremental(A.init(), binary);
85
- this.#storedHeads.set(documentId, A.getHeads(newDoc));
86
- return newDoc;
87
- }
88
- async saveDoc(documentId, doc) {
89
- if (!this.#shouldSave(documentId, doc)) {
90
- return;
91
- }
92
- const sourceChunks = this.#chunkInfos.get(documentId) ?? [];
93
- if (this.#shouldCompact(sourceChunks)) {
94
- void this.#saveTotal(documentId, doc, sourceChunks);
95
- }
96
- else {
97
- void this.#saveIncremental(documentId, doc);
98
- }
99
- this.#storedHeads.set(documentId, A.getHeads(doc));
100
- }
101
- async remove(documentId) {
102
- void this.#storageAdapter.removeRange([documentId, "snapshot"]);
103
- void this.#storageAdapter.removeRange([documentId, "incremental"]);
163
+ this.#compacting = false;
104
164
  }
165
+ /**
166
+ * Returns true if the document has changed since the last time it was saved.
167
+ */
105
168
  #shouldSave(documentId, doc) {
106
169
  const oldHeads = this.#storedHeads.get(documentId);
107
170
  if (!oldHeads) {
171
+ // we haven't saved this document before
108
172
  return true;
109
173
  }
110
174
  const newHeads = A.getHeads(doc);
111
175
  if (headsAreSame(newHeads, oldHeads)) {
176
+ // the document hasn't changed
112
177
  return false;
113
178
  }
114
- return true;
179
+ return true; // the document has changed
115
180
  }
181
+ /**
182
+ * We only compact if the incremental size is greater than the snapshot size.
183
+ */
116
184
  #shouldCompact(sourceChunks) {
117
- if (this.#snapshotting) {
185
+ if (this.#compacting)
118
186
  return false;
119
- }
120
- // compact if the incremental size is greater than the snapshot size
121
187
  let snapshotSize = 0;
122
188
  let incrementalSize = 0;
123
189
  for (const chunk of sourceChunks) {
@@ -131,16 +197,3 @@ export class StorageSubsystem {
131
197
  return incrementalSize >= snapshotSize;
132
198
  }
133
199
  }
134
- function chunkTypeFromKey(key) {
135
- if (key.length < 2) {
136
- return null;
137
- }
138
- const chunkTypeStr = key[key.length - 2];
139
- if (chunkTypeStr === "snapshot" || chunkTypeStr === "incremental") {
140
- const chunkType = chunkTypeStr;
141
- return chunkType;
142
- }
143
- else {
144
- return null;
145
- }
146
- }
@@ -0,0 +1,13 @@
1
+ import { StorageKey } from "./types.js";
2
+ import { ChunkType } from "./types.js";
3
+ /**
4
+ * Keys for storing Automerge documents are of the form:
5
+ * ```ts
6
+ * [documentId, "snapshot", hash] // OR
7
+ * [documentId, "incremental", hash]
8
+ * ```
9
+ * This function returns the chunk type ("snapshot" or "incremental") if the key is in one of these
10
+ * forms.
11
+ */
12
+ export declare function chunkTypeFromKey(key: StorageKey): ChunkType | null;
13
+ //# sourceMappingURL=chunkTypeFromKey.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"chunkTypeFromKey.d.ts","sourceRoot":"","sources":["../../src/storage/chunkTypeFromKey.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,YAAY,CAAA;AACvC,OAAO,EAAE,SAAS,EAAE,MAAM,YAAY,CAAA;AAEtC;;;;;;;;GAQG;AACH,wBAAgB,gBAAgB,CAAC,GAAG,EAAE,UAAU,GAAG,SAAS,GAAG,IAAI,CASlE"}
@@ -0,0 +1,18 @@
1
+ /**
2
+ * Keys for storing Automerge documents are of the form:
3
+ * ```ts
4
+ * [documentId, "snapshot", hash] // OR
5
+ * [documentId, "incremental", hash]
6
+ * ```
7
+ * This function returns the chunk type ("snapshot" or "incremental") if the key is in one of these
8
+ * forms.
9
+ */
10
+ export function chunkTypeFromKey(key) {
11
+ if (key.length < 2)
12
+ return null;
13
+ const chunkTypeStr = key[key.length - 2]; // next-to-last element in key
14
+ if (chunkTypeStr === "snapshot" || chunkTypeStr === "incremental") {
15
+ return chunkTypeStr;
16
+ }
17
+ return null;
18
+ }
@@ -0,0 +1,4 @@
1
+ import * as A from "@automerge/automerge/next";
2
+ export declare function keyHash(binary: Uint8Array): string;
3
+ export declare function headsHash(heads: A.Heads): string;
4
+ //# sourceMappingURL=keyHash.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"keyHash.d.ts","sourceRoot":"","sources":["../../src/storage/keyHash.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,2BAA2B,CAAA;AAI9C,wBAAgB,OAAO,CAAC,MAAM,EAAE,UAAU,UAIzC;AACD,wBAAgB,SAAS,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,GAAG,MAAM,CAIhD"}
@@ -0,0 +1,15 @@
1
+ import * as sha256 from "fast-sha256";
2
+ import { mergeArrays } from "../helpers/mergeArrays.js";
3
+ export function keyHash(binary) {
4
+ // calculate hash
5
+ const hash = sha256.hash(binary);
6
+ return bufferToHexString(hash);
7
+ }
8
+ export function headsHash(heads) {
9
+ const encoder = new TextEncoder();
10
+ const headsbinary = mergeArrays(heads.map((h) => encoder.encode(h)));
11
+ return keyHash(headsbinary);
12
+ }
13
+ function bufferToHexString(data) {
14
+ return Array.from(data, byte => byte.toString(16).padStart(2, "0")).join("");
15
+ }
@@ -0,0 +1,37 @@
1
+ /**
2
+ * A chunk is a snapshot or incremental change that is stored in a {@link StorageAdapter}.
3
+ */
4
+ export type Chunk = {
5
+ key: StorageKey;
6
+ data: Uint8Array | undefined;
7
+ };
8
+ /**
9
+ * Metadata about a chunk of data loaded from storage. This is stored on the StorageSubsystem so
10
+ * when we are compacting we know what chunks we can safely delete.
11
+ */
12
+ export type ChunkInfo = {
13
+ key: StorageKey;
14
+ type: ChunkType;
15
+ size: number;
16
+ };
17
+ export type ChunkType = "snapshot" | "incremental";
18
+ /**
19
+ * A storage key is an array of strings that represents a path to a value in a
20
+ * {@link StorageAdapter}.
21
+ *
22
+ * @remarks
23
+ * Storage keys are arrays because they are hierarchical and they allow the storage subsystem to do
24
+ * range queries for all keys that have a particular prefix. For example, incremental changes for a
25
+ * given document might be stored under `[<documentId>, "incremental", <SHA256>]`.
26
+ *
27
+ * automerge-repo mostly uses keys in the following form:
28
+ * ```ts
29
+ * [documentId, "snapshot", hash] // OR
30
+ * [documentId, "incremental", hash]
31
+ * ```
32
+ *
33
+ * However, the storage adapter implementation should be agnostic to the meaning of the key and
34
+ * should not assume any particular structure.
35
+ **/
36
+ export type StorageKey = string[];
37
+ //# sourceMappingURL=types.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/storage/types.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,MAAM,MAAM,KAAK,GAAG;IAClB,GAAG,EAAE,UAAU,CAAA;IACf,IAAI,EAAE,UAAU,GAAG,SAAS,CAAA;CAC7B,CAAA;AAED;;;GAGG;AACH,MAAM,MAAM,SAAS,GAAG;IACtB,GAAG,EAAE,UAAU,CAAA;IACf,IAAI,EAAE,SAAS,CAAA;IACf,IAAI,EAAE,MAAM,CAAA;CACb,CAAA;AAED,MAAM,MAAM,SAAS,GAAG,UAAU,GAAG,aAAa,CAAA;AAElD;;;;;;;;;;;;;;;;;IAiBI;AACJ,MAAM,MAAM,UAAU,GAAG,MAAM,EAAE,CAAA"}
@@ -0,0 +1 @@
1
+ export {};
@@ -1,5 +1,5 @@
1
1
  import debug from "debug";
2
- import { stringifyAutomergeUrl } from "../DocUrl.js";
2
+ import { stringifyAutomergeUrl } from "../AutomergeUrl.js";
3
3
  import { DocSynchronizer } from "./DocSynchronizer.js";
4
4
  import { Synchronizer } from "./Synchronizer.js";
5
5
  const log = debug("automerge-repo:collectionsync");
package/dist/types.d.ts CHANGED
@@ -1,23 +1,31 @@
1
- /** The ID of a document. Typically you should use a {@link AutomergeUrl} instead.
2
- */
3
- export type DocumentId = string & {
4
- __documentId: true;
5
- };
6
- /** A branded string representing a URL for a document
7
- *
8
- * @remarks
9
- * An automerge URL has the form `automerge:<base58 encoded string>`. This
10
- * type is returned from various routines which validate a url.
11
- *
1
+ /**
2
+ * A branded string representing a URL for a document, in the form `automerge:<base58check encoded
3
+ * string>`; for example, `automerge:4NMNnkMhL8jXrdJ9jamS58PAVdXu`.
12
4
  */
13
5
  export type AutomergeUrl = string & {
14
6
  __documentUrl: true;
15
7
  };
16
- /** A document ID as a Uint8Array instead of a bas58 encoded string. Typically you should use a {@link AutomergeUrl} instead.
8
+ /**
9
+ * The base58check-encoded UUID of a document. This is the string following the `automerge:`
10
+ * protocol prefix in an AutomergeUrl; for example, `4NMNnkMhL8jXrdJ9jamS58PAVdXu`. When recording
11
+ * links to an Automerge document in another Automerge document, you should store a
12
+ * {@link AutomergeUrl} instead.
17
13
  */
14
+ export type DocumentId = string & {
15
+ __documentId: true;
16
+ };
17
+ /** The unencoded UUID of a document. Typically you should use a {@link AutomergeUrl} instead. */
18
18
  export type BinaryDocumentId = Uint8Array & {
19
19
  __binaryDocumentId: true;
20
20
  };
21
+ /**
22
+ * A UUID encoded as a hex string. As of v1.0, a {@link DocumentID} is stored as a base58-encoded string with a checksum.
23
+ * Support for this format will be removed in a future version.
24
+ */
25
+ export type LegacyDocumentId = string & {
26
+ __legacyDocumentId: true;
27
+ };
28
+ export type AnyDocumentId = AutomergeUrl | DocumentId | BinaryDocumentId | LegacyDocumentId;
21
29
  /** A branded type for peer IDs */
22
30
  export type PeerId = string & {
23
31
  __peerId: true;
@@ -1 +1 @@
1
- {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA;GACG;AACH,MAAM,MAAM,UAAU,GAAG,MAAM,GAAG;IAAE,YAAY,EAAE,IAAI,CAAA;CAAE,CAAA;AAExD;;;;;;GAMG;AACH,MAAM,MAAM,YAAY,GAAG,MAAM,GAAG;IAAE,aAAa,EAAE,IAAI,CAAA;CAAE,CAAA;AAE3D;GACG;AACH,MAAM,MAAM,gBAAgB,GAAG,UAAU,GAAG;IAAE,kBAAkB,EAAE,IAAI,CAAA;CAAE,CAAA;AAExE,kCAAkC;AAClC,MAAM,MAAM,MAAM,GAAG,MAAM,GAAG;IAAE,QAAQ,EAAE,IAAI,CAAA;CAAE,CAAA;AAEhD,0EAA0E;AAC1E,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG;IAAE,WAAW,EAAE,IAAI,CAAA;CAAE,CAAA"}
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,MAAM,MAAM,YAAY,GAAG,MAAM,GAAG;IAAE,aAAa,EAAE,IAAI,CAAA;CAAE,CAAA;AAE3D;;;;;GAKG;AACH,MAAM,MAAM,UAAU,GAAG,MAAM,GAAG;IAAE,YAAY,EAAE,IAAI,CAAA;CAAE,CAAA;AAExD,iGAAiG;AACjG,MAAM,MAAM,gBAAgB,GAAG,UAAU,GAAG;IAAE,kBAAkB,EAAE,IAAI,CAAA;CAAE,CAAA;AAExE;;;GAGG;AACH,MAAM,MAAM,gBAAgB,GAAG,MAAM,GAAG;IAAE,kBAAkB,EAAE,IAAI,CAAA;CAAE,CAAA;AAEpE,MAAM,MAAM,aAAa,GACrB,YAAY,GACZ,UAAU,GACV,gBAAgB,GAChB,gBAAgB,CAAA;AAEpB,kCAAkC;AAClC,MAAM,MAAM,MAAM,GAAG,MAAM,GAAG;IAAE,QAAQ,EAAE,IAAI,CAAA;CAAE,CAAA;AAEhD,0EAA0E;AAC1E,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG;IAAE,WAAW,EAAE,IAAI,CAAA;CAAE,CAAA"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@automerge/automerge-repo",
3
- "version": "1.0.12",
3
+ "version": "1.0.14",
4
4
  "description": "A repository object to manage a collection of automerge documents",
5
5
  "repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo",
6
6
  "author": "Peter van Hardenberg <pvh@pvh.ca>",
@@ -57,5 +57,5 @@
57
57
  "publishConfig": {
58
58
  "access": "public"
59
59
  },
60
- "gitHead": "254bad1c774fa2a881265aaad5283af231bf72eb"
60
+ "gitHead": "c4a155e56613bbd396d3b764f3a2ec5807ac02db"
61
61
  }
@@ -0,0 +1,144 @@
1
+ import type {
2
+ LegacyDocumentId,
3
+ AutomergeUrl,
4
+ BinaryDocumentId,
5
+ DocumentId,
6
+ AnyDocumentId,
7
+ } from "./types.js"
8
+ import * as Uuid from "uuid"
9
+ import bs58check from "bs58check"
10
+
11
+ export const urlPrefix = "automerge:"
12
+
13
+ /** Given an Automerge URL, returns the DocumentId in both base58check-encoded form and binary form */
14
+ export const parseAutomergeUrl = (url: AutomergeUrl) => {
15
+ const regex = new RegExp(`^${urlPrefix}(\\w+)$`)
16
+ const [_, docMatch] = url.match(regex) || []
17
+ const documentId = docMatch as DocumentId
18
+ const binaryDocumentId = documentIdToBinary(documentId)
19
+
20
+ if (!binaryDocumentId) throw new Error("Invalid document URL: " + url)
21
+ return {
22
+ /** unencoded DocumentId */
23
+ binaryDocumentId,
24
+ /** encoded DocumentId */
25
+ documentId,
26
+ }
27
+ }
28
+
29
+ /**
30
+ * Given a documentId in either binary or base58check-encoded form, returns an Automerge URL.
31
+ * Throws on invalid input.
32
+ */
33
+ export const stringifyAutomergeUrl = (
34
+ arg: UrlOptions | DocumentId | BinaryDocumentId
35
+ ) => {
36
+ let documentId =
37
+ arg instanceof Uint8Array || typeof arg === "string"
38
+ ? arg
39
+ : "documentId" in arg
40
+ ? arg.documentId
41
+ : undefined
42
+
43
+ const encodedDocumentId =
44
+ documentId instanceof Uint8Array
45
+ ? binaryToDocumentId(documentId)
46
+ : typeof documentId === "string"
47
+ ? documentId
48
+ : undefined
49
+
50
+ if (encodedDocumentId === undefined)
51
+ throw new Error("Invalid documentId: " + documentId)
52
+
53
+ return (urlPrefix + encodedDocumentId) as AutomergeUrl
54
+ }
55
+
56
+ /**
57
+ * Given a string, returns true if it is a valid Automerge URL. This function also acts as a type
58
+ * discriminator in Typescript.
59
+ */
60
+ export const isValidAutomergeUrl = (
61
+ str: string | undefined | null
62
+ ): str is AutomergeUrl => {
63
+ if (!str || !str.startsWith(urlPrefix)) return false
64
+ const automergeUrl = str as AutomergeUrl
65
+ try {
66
+ const { documentId } = parseAutomergeUrl(automergeUrl)
67
+ return isValidDocumentId(documentId)
68
+ } catch {
69
+ return false
70
+ }
71
+ }
72
+
73
+ export const isValidDocumentId = (str: string): str is DocumentId => {
74
+ // try to decode from base58
75
+ const binaryDocumentID = documentIdToBinary(str as DocumentId)
76
+ if (binaryDocumentID === undefined) return false // invalid base58check encoding
77
+
78
+ // confirm that the document ID is a valid UUID
79
+ const documentId = Uuid.stringify(binaryDocumentID)
80
+ return Uuid.validate(documentId)
81
+ }
82
+
83
+ export const isValidUuid = (str: string): str is LegacyDocumentId =>
84
+ Uuid.validate(str)
85
+
86
+ /**
87
+ * Returns a new Automerge URL with a random UUID documentId. Called by Repo.create(), and also used by tests.
88
+ */
89
+ export const generateAutomergeUrl = (): AutomergeUrl => {
90
+ const documentId = Uuid.v4(null, new Uint8Array(16)) as BinaryDocumentId
91
+ return stringifyAutomergeUrl({ documentId })
92
+ }
93
+
94
+ export const documentIdToBinary = (docId: DocumentId) =>
95
+ bs58check.decodeUnsafe(docId) as BinaryDocumentId | undefined
96
+
97
+ export const binaryToDocumentId = (docId: BinaryDocumentId) =>
98
+ bs58check.encode(docId) as DocumentId
99
+
100
+ export const parseLegacyUUID = (str: string) => {
101
+ if (!Uuid.validate(str)) return undefined
102
+ const documentId = Uuid.parse(str) as BinaryDocumentId
103
+ return stringifyAutomergeUrl({ documentId })
104
+ }
105
+
106
+ /**
107
+ * Given any valid expression of a document ID, returns a DocumentId in base58check-encoded form.
108
+ *
109
+ * Currently supports:
110
+ * - base58check-encoded DocumentId
111
+ * - Automerge URL
112
+ * - legacy UUID
113
+ * - binary DocumentId
114
+ *
115
+ * Throws on invalid input.
116
+ */
117
+ export const interpretAsDocumentId = (id: AnyDocumentId) => {
118
+ // binary
119
+ if (id instanceof Uint8Array) return binaryToDocumentId(id)
120
+
121
+ // url
122
+ if (isValidAutomergeUrl(id)) return parseAutomergeUrl(id).documentId
123
+
124
+ // base58check
125
+ if (isValidDocumentId(id)) return id
126
+
127
+ // legacy UUID
128
+ if (isValidUuid(id)) {
129
+ console.warn(
130
+ "Future versions will not support UUIDs as document IDs; use Automerge URLs instead."
131
+ )
132
+ const binaryDocumentID = Uuid.parse(id) as BinaryDocumentId
133
+ return binaryToDocumentId(binaryDocumentID)
134
+ }
135
+
136
+ // none of the above
137
+ throw new Error(`Invalid AutomergeUrl: '${id}'`)
138
+ }
139
+
140
+ // TYPES
141
+
142
+ type UrlOptions = {
143
+ documentId: DocumentId | BinaryDocumentId
144
+ }
package/src/DocHandle.ts CHANGED
@@ -14,7 +14,7 @@ import {
14
14
  TypegenDisabled,
15
15
  } from "xstate"
16
16
  import { waitFor } from "xstate/lib/waitFor.js"
17
- import { stringifyAutomergeUrl } from "./DocUrl.js"
17
+ import { stringifyAutomergeUrl } from "./AutomergeUrl.js"
18
18
  import { encode } from "./helpers/cbor.js"
19
19
  import { headsAreSame } from "./helpers/headsAreSame.js"
20
20
  import { withTimeout } from "./helpers/withTimeout.js"
package/src/Repo.ts CHANGED
@@ -1,20 +1,19 @@
1
1
  import { next as Automerge } from "@automerge/automerge"
2
2
  import debug from "debug"
3
3
  import { EventEmitter } from "eventemitter3"
4
- import { DocHandle, DocHandleEncodedChangePayload } from "./DocHandle.js"
5
4
  import {
6
5
  generateAutomergeUrl,
7
- isValidAutomergeUrl,
6
+ interpretAsDocumentId,
8
7
  parseAutomergeUrl,
9
- parseLegacyUUID,
10
- } from "./DocUrl.js"
8
+ } from "./AutomergeUrl.js"
9
+ import { DocHandle, DocHandleEncodedChangePayload } from "./DocHandle.js"
11
10
  import { throttle } from "./helpers/throttle.js"
12
11
  import { NetworkAdapter } from "./network/NetworkAdapter.js"
13
12
  import { NetworkSubsystem } from "./network/NetworkSubsystem.js"
14
13
  import { StorageAdapter } from "./storage/StorageAdapter.js"
15
14
  import { StorageSubsystem } from "./storage/StorageSubsystem.js"
16
15
  import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js"
17
- import { DocumentId, PeerId, type AutomergeUrl } from "./types.js"
16
+ import type { AnyDocumentId, DocumentId, PeerId } from "./types.js"
18
17
 
19
18
  /** A Repo is a collection of documents with networking, syncing, and storage capabilities. */
20
19
  /** The `Repo` is the main entry point of this library
@@ -107,7 +106,7 @@ export class Repo extends EventEmitter<RepoEvents> {
107
106
  // synchronizer.removeDocument(documentId)
108
107
 
109
108
  if (storageSubsystem) {
110
- storageSubsystem.remove(documentId).catch(err => {
109
+ storageSubsystem.removeDoc(documentId).catch(err => {
111
110
  this.#log("error deleting document", { documentId, err })
112
111
  })
113
112
  }
@@ -247,22 +246,11 @@ export class Repo extends EventEmitter<RepoEvents> {
247
246
  * event to advertise interest in the document.
248
247
  */
249
248
  find<T>(
250
- /** The documentId of the handle to retrieve */
251
- automergeUrl: AutomergeUrl
249
+ /** The url or documentId of the handle to retrieve */
250
+ id: AnyDocumentId
252
251
  ): DocHandle<T> {
253
- if (!isValidAutomergeUrl(automergeUrl)) {
254
- const maybeAutomergeUrl = parseLegacyUUID(automergeUrl)
255
- if (maybeAutomergeUrl) {
256
- console.warn(
257
- "Legacy UUID document ID detected, converting to AutomergeUrl. This will be removed in a future version."
258
- )
259
- automergeUrl = maybeAutomergeUrl
260
- } else {
261
- throw new Error(`Invalid AutomergeUrl: '${automergeUrl}'`)
262
- }
263
- }
252
+ const documentId = interpretAsDocumentId(id)
264
253
 
265
- const { documentId } = parseAutomergeUrl(automergeUrl)
266
254
  // If we have the handle cached, return it
267
255
  if (this.#handleCache[documentId]) {
268
256
  if (this.#handleCache[documentId].isUnavailable()) {
@@ -282,16 +270,16 @@ export class Repo extends EventEmitter<RepoEvents> {
282
270
  }
283
271
 
284
272
  delete(
285
- /** The documentId of the handle to delete */
286
- id: DocumentId | AutomergeUrl
273
+ /** The url or documentId of the handle to delete */
274
+ id: AnyDocumentId
287
275
  ) {
288
- if (isValidAutomergeUrl(id)) id = parseAutomergeUrl(id).documentId
276
+ const documentId = interpretAsDocumentId(id)
289
277
 
290
- const handle = this.#getHandle(id, false)
278
+ const handle = this.#getHandle(documentId, false)
291
279
  handle.delete()
292
280
 
293
- delete this.#handleCache[id]
294
- this.emit("delete-document", { documentId: id })
281
+ delete this.#handleCache[documentId]
282
+ this.emit("delete-document", { documentId })
295
283
  }
296
284
  }
297
285