@automerge/automerge-repo 0.2.1 → 1.0.0-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/README.md +7 -24
  2. package/dist/DocCollection.d.ts +4 -4
  3. package/dist/DocCollection.d.ts.map +1 -1
  4. package/dist/DocCollection.js +25 -17
  5. package/dist/DocHandle.d.ts +46 -10
  6. package/dist/DocHandle.d.ts.map +1 -1
  7. package/dist/DocHandle.js +101 -36
  8. package/dist/DocUrl.d.ts +38 -18
  9. package/dist/DocUrl.d.ts.map +1 -1
  10. package/dist/DocUrl.js +63 -24
  11. package/dist/Repo.d.ts.map +1 -1
  12. package/dist/Repo.js +4 -6
  13. package/dist/helpers/headsAreSame.d.ts +1 -1
  14. package/dist/helpers/headsAreSame.d.ts.map +1 -1
  15. package/dist/helpers/tests/network-adapter-tests.js +10 -10
  16. package/dist/index.d.ts +3 -2
  17. package/dist/index.d.ts.map +1 -1
  18. package/dist/index.js +1 -0
  19. package/dist/network/NetworkAdapter.d.ts +2 -3
  20. package/dist/network/NetworkAdapter.d.ts.map +1 -1
  21. package/dist/network/NetworkSubsystem.d.ts +2 -3
  22. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  23. package/dist/network/NetworkSubsystem.js +9 -13
  24. package/dist/storage/StorageAdapter.d.ts +9 -5
  25. package/dist/storage/StorageAdapter.d.ts.map +1 -1
  26. package/dist/storage/StorageSubsystem.d.ts +2 -2
  27. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  28. package/dist/storage/StorageSubsystem.js +73 -25
  29. package/dist/synchronizer/CollectionSynchronizer.d.ts +1 -1
  30. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  31. package/dist/synchronizer/CollectionSynchronizer.js +5 -1
  32. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  33. package/dist/synchronizer/DocSynchronizer.js +6 -5
  34. package/dist/types.d.ts +6 -0
  35. package/dist/types.d.ts.map +1 -1
  36. package/package.json +8 -5
  37. package/src/DocCollection.ts +32 -22
  38. package/src/DocHandle.ts +121 -47
  39. package/src/DocUrl.ts +90 -0
  40. package/src/Repo.ts +5 -8
  41. package/src/helpers/tests/network-adapter-tests.ts +10 -10
  42. package/src/index.ts +7 -5
  43. package/src/network/NetworkAdapter.ts +2 -3
  44. package/src/network/NetworkSubsystem.ts +9 -14
  45. package/src/storage/StorageAdapter.ts +7 -5
  46. package/src/storage/StorageSubsystem.ts +95 -34
  47. package/src/synchronizer/CollectionSynchronizer.ts +10 -2
  48. package/src/synchronizer/DocSynchronizer.ts +7 -6
  49. package/src/types.ts +4 -1
  50. package/test/CollectionSynchronizer.test.ts +1 -1
  51. package/test/DocCollection.test.ts +3 -2
  52. package/test/DocHandle.test.ts +32 -26
  53. package/test/DocSynchronizer.test.ts +3 -2
  54. package/test/Repo.test.ts +76 -27
  55. package/test/StorageSubsystem.test.ts +10 -7
  56. package/test/helpers/DummyNetworkAdapter.ts +2 -2
  57. package/test/helpers/DummyStorageAdapter.ts +8 -4
package/dist/DocUrl.js CHANGED
@@ -1,28 +1,67 @@
1
- import Base58 from "bs58";
2
- import { crc16 } from "js-crc";
3
- export const linkForDocumentId = id => withCrc("automerge://" + encode(id));
4
- export const documentIdFromShareLink = link => {
5
- const { key } = parts(link);
6
- return key;
1
+ import { v4 as uuid } from "uuid";
2
+ import bs58check from "bs58check";
3
+ export const urlPrefix = "automerge:";
4
+ /**
5
+ * given an Automerge URL, return a decoded DocumentId (and the encoded DocumentId)
6
+ *
7
+ * @param url
8
+ * @returns { documentId: Uint8Array(16), encodedDocumentId: bs58check.encode(documentId) }
9
+ */
10
+ export const parseAutomergeUrl = (url) => {
11
+ const { binaryDocumentId: binaryDocumentId, encodedDocumentId } = parts(url);
12
+ if (!binaryDocumentId)
13
+ throw new Error("Invalid document URL: " + url);
14
+ return { binaryDocumentId, encodedDocumentId };
7
15
  };
8
- export const isValidShareLink = str => {
9
- const { nonCrc, crc } = parts(str);
10
- return Boolean(nonCrc) && Boolean(crc) && crc16(nonCrc) === crc;
16
+ /**
17
+ * Given a documentId in either canonical form, return an Automerge URL
18
+ * Throws on invalid input.
19
+ * Note: this is an object because we anticipate adding fields in the future.
20
+ * @param { documentId: EncodedDocumentId | DocumentId }
21
+ * @returns AutomergeUrl
22
+ */
23
+ export const stringifyAutomergeUrl = ({ documentId, }) => {
24
+ if (documentId instanceof Uint8Array)
25
+ return (urlPrefix +
26
+ binaryToDocumentId(documentId));
27
+ else if (typeof documentId === "string") {
28
+ return (urlPrefix + documentId);
29
+ }
30
+ throw new Error("Invalid documentId: " + documentId);
11
31
  };
12
- export const parts = str => {
13
- const p = encodedParts(str);
14
- return {
15
- key: p.key && decode(p.key),
16
- nonCrc: p.nonCrc,
17
- crc: p.crc && decode(p.crc),
18
- };
32
+ /**
33
+ * Given a string, return true if it is a valid Automerge URL
34
+ * also acts as a type discriminator in Typescript.
35
+ * @param str: URL candidate
36
+ * @returns boolean
37
+ */
38
+ export const isValidAutomergeUrl = (str) => {
39
+ if (!str.startsWith(urlPrefix))
40
+ return false;
41
+ const { binaryDocumentId: documentId } = parts(str);
42
+ return documentId ? true : false;
19
43
  };
20
- export const encodedParts = str => {
21
- const [m, nonCrc, key, crc] = str.match(/^(pxlpshr:\/\/(\w+))\/(\w{1,4})$/) || [];
22
- return { nonCrc, key, crc };
44
+ /**
45
+ * generateAutomergeUrl produces a new AutomergeUrl.
46
+ * generally only called by create(), but used in tests as well.
47
+ * @returns a new Automerge URL with a random UUID documentId
48
+ */
49
+ export const generateAutomergeUrl = () => stringifyAutomergeUrl({
50
+ documentId: uuid(null, new Uint8Array(16)),
51
+ });
52
+ export const documentIdToBinary = (docId) => bs58check.decodeUnsafe(docId);
53
+ export const binaryToDocumentId = (docId) => bs58check.encode(docId);
54
+ /**
55
+ * parts breaks up the URL into constituent pieces,
56
+ * eventually this could include things like heads, so we use this structure
57
+ * we return both a binary & string-encoded version of the document ID
58
+ * @param str
59
+ * @returns { binaryDocumentId, encodedDocumentId }
60
+ */
61
+ const parts = (str) => {
62
+ const regex = new RegExp(`^${urlPrefix}(\\w+)$`);
63
+ const [m, docMatch] = str.match(regex) || [];
64
+ const encodedDocumentId = docMatch;
65
+ const binaryDocumentId = documentIdToBinary(encodedDocumentId);
66
+ return { binaryDocumentId, encodedDocumentId };
23
67
  };
24
- export const withCrc = str => str + `/` + encode(crc16(str));
25
- export const encode = str => Base58.encode(hexToBuffer(str));
26
- export const decode = str => bufferToHex(Base58.decode(str));
27
- export const hexToBuffer = key => Buffer.isBuffer(key) ? key : Buffer.from(key, "hex");
28
- export const bufferToHex = key => Buffer.isBuffer(key) ? key.toString("hex") : key;
@@ -1 +1 @@
1
- {"version":3,"file":"Repo.d.ts","sourceRoot":"","sources":["../src/Repo.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAA;AAClD,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAA;AAClD,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAEhE,OAAO,EAAa,UAAU,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAM1D,oFAAoF;AACpF,qBAAa,IAAK,SAAQ,aAAa;;IAGrC,gBAAgB,EAAE,gBAAgB,CAAA;IAClC,gBAAgB,CAAC,EAAE,gBAAgB,CAAA;IACnC,aAAa,EAAE,aAAa,CAAA;gBAEhB,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,WAAW,EAAE,EAAE,UAAU;CA+GlE;AAED,MAAM,WAAW,UAAU;IACzB,4BAA4B;IAC5B,MAAM,CAAC,EAAE,MAAM,CAAA;IAEf,gDAAgD;IAChD,OAAO,CAAC,EAAE,cAAc,CAAA;IAExB,oDAAoD;IACpD,OAAO,EAAE,cAAc,EAAE,CAAA;IAEzB;;;OAGG;IACH,WAAW,CAAC,EAAE,WAAW,CAAA;CAC1B;AAED,MAAM,MAAM,WAAW,GAAG,CACxB,MAAM,EAAE,MAAM,EACd,UAAU,CAAC,EAAE,UAAU,KACpB,OAAO,CAAC,OAAO,CAAC,CAAA"}
1
+ {"version":3,"file":"Repo.d.ts","sourceRoot":"","sources":["../src/Repo.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAA;AAClD,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAA;AAClD,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAEhE,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAI/C,oFAAoF;AACpF,qBAAa,IAAK,SAAQ,aAAa;;IAGrC,gBAAgB,EAAE,gBAAgB,CAAA;IAClC,gBAAgB,CAAC,EAAE,gBAAgB,CAAA;IACnC,aAAa,EAAE,aAAa,CAAA;gBAEhB,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,WAAW,EAAE,EAAE,UAAU;CA8GlE;AAED,MAAM,WAAW,UAAU;IACzB,4BAA4B;IAC5B,MAAM,CAAC,EAAE,MAAM,CAAA;IAEf,gDAAgD;IAChD,OAAO,CAAC,EAAE,cAAc,CAAA;IAExB,oDAAoD;IACpD,OAAO,EAAE,cAAc,EAAE,CAAA;IAEzB;;;OAGG;IACH,WAAW,CAAC,EAAE,WAAW,CAAA;CAC1B;AAED,MAAM,MAAM,WAAW,GAAG,CACxB,MAAM,EAAE,MAAM,EACd,UAAU,CAAC,EAAE,UAAU,KACpB,OAAO,CAAC,OAAO,CAAC,CAAA"}
package/dist/Repo.js CHANGED
@@ -4,7 +4,6 @@ import { NetworkSubsystem } from "./network/NetworkSubsystem.js";
4
4
  import { StorageSubsystem } from "./storage/StorageSubsystem.js";
5
5
  import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js";
6
6
  import debug from "debug";
7
- const SYNC_CHANNEL = "sync_channel";
8
7
  /** A Repo is a DocCollection with networking, syncing, and storage capabilities. */
9
8
  export class Repo extends DocCollection {
10
9
  #log;
@@ -21,8 +20,7 @@ export class Repo extends DocCollection {
21
20
  this.on("document", async ({ handle }) => {
22
21
  if (storageSubsystem) {
23
22
  // Save when the document changes
24
- handle.on("change", async ({ handle }) => {
25
- const doc = await handle.value();
23
+ handle.on("heads-changed", async ({ handle, doc }) => {
26
24
  await storageSubsystem.save(handle.documentId, doc);
27
25
  });
28
26
  // Try to load from disk
@@ -33,11 +31,11 @@ export class Repo extends DocCollection {
33
31
  // Register the document with the synchronizer. This advertises our interest in the document.
34
32
  synchronizer.addDocument(handle.documentId);
35
33
  });
36
- this.on("delete-document", ({ documentId }) => {
34
+ this.on("delete-document", ({ encodedDocumentId }) => {
37
35
  // TODO Pass the delete on to the network
38
36
  // synchronizer.removeDocument(documentId)
39
37
  if (storageSubsystem) {
40
- storageSubsystem.remove(documentId);
38
+ storageSubsystem.remove(encodedDocumentId);
41
39
  }
42
40
  });
43
41
  // SYNCHRONIZER
@@ -82,7 +80,7 @@ export class Repo extends DocCollection {
82
80
  }
83
81
  });
84
82
  // We establish a special channel for sync messages
85
- networkSubsystem.join(SYNC_CHANNEL);
83
+ networkSubsystem.join();
86
84
  // EPHEMERAL DATA
87
85
  // The ephemeral data subsystem uses the network to send and receive messages that are not
88
86
  // persisted to storage, e.g. cursor position, presence, etc.
@@ -1,3 +1,3 @@
1
1
  import * as A from "@automerge/automerge";
2
- export declare const headsAreSame: <T>(a: A.unstable.Doc<T>, b: A.unstable.Doc<T>) => boolean;
2
+ export declare const headsAreSame: <T>(a: A.next.Doc<T>, b: A.next.Doc<T>) => boolean;
3
3
  //# sourceMappingURL=headsAreSame.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"headsAreSame.d.ts","sourceRoot":"","sources":["../../src/helpers/headsAreSame.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,sBAAsB,CAAA;AAGzC,eAAO,MAAM,YAAY,4DAIxB,CAAA"}
1
+ {"version":3,"file":"headsAreSame.d.ts","sourceRoot":"","sources":["../../src/helpers/headsAreSame.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,sBAAsB,CAAA;AAGzC,eAAO,MAAM,YAAY,oDAIxB,CAAA"}
@@ -31,21 +31,21 @@ export function runAdapterTests(_setup, title) {
31
31
  const aliceHandle = aliceRepo.create();
32
32
  // Bob receives the document
33
33
  await eventPromise(bobRepo, "document");
34
- const bobHandle = bobRepo.find(aliceHandle.documentId);
34
+ const bobHandle = bobRepo.find(aliceHandle.url);
35
35
  // Alice changes the document
36
36
  aliceHandle.change(d => {
37
37
  d.foo = "bar";
38
38
  });
39
39
  // Bob receives the change
40
40
  await eventPromise(bobHandle, "change");
41
- assert.equal((await bobHandle.value()).foo, "bar");
41
+ assert.equal((await bobHandle.doc()).foo, "bar");
42
42
  // Bob changes the document
43
43
  bobHandle.change(d => {
44
44
  d.foo = "baz";
45
45
  });
46
46
  // Alice receives the change
47
47
  await eventPromise(aliceHandle, "change");
48
- assert.equal((await aliceHandle.value()).foo, "baz");
48
+ assert.equal((await aliceHandle.doc()).foo, "baz");
49
49
  };
50
50
  // Run the test in both directions, in case they're different types of adapters
51
51
  {
@@ -69,27 +69,27 @@ export function runAdapterTests(_setup, title) {
69
69
  const charlieRepo = new Repo({ network: c, peerId: charlie });
70
70
  // Alice creates a document
71
71
  const aliceHandle = aliceRepo.create();
72
- const documentId = aliceHandle.documentId;
72
+ const docUrl = aliceHandle.url;
73
73
  // Bob and Charlie receive the document
74
74
  await eventPromises([bobRepo, charlieRepo], "document");
75
- const bobHandle = bobRepo.find(documentId);
76
- const charlieHandle = charlieRepo.find(documentId);
75
+ const bobHandle = bobRepo.find(docUrl);
76
+ const charlieHandle = charlieRepo.find(docUrl);
77
77
  // Alice changes the document
78
78
  aliceHandle.change(d => {
79
79
  d.foo = "bar";
80
80
  });
81
81
  // Bob and Charlie receive the change
82
82
  await eventPromises([bobHandle, charlieHandle], "change");
83
- assert.equal((await bobHandle.value()).foo, "bar");
84
- assert.equal((await charlieHandle.value()).foo, "bar");
83
+ assert.equal((await bobHandle.doc()).foo, "bar");
84
+ assert.equal((await charlieHandle.doc()).foo, "bar");
85
85
  // Charlie changes the document
86
86
  charlieHandle.change(d => {
87
87
  d.foo = "baz";
88
88
  });
89
89
  // Alice and Bob receive the change
90
90
  await eventPromises([aliceHandle, bobHandle], "change");
91
- assert.equal((await bobHandle.value()).foo, "baz");
92
- assert.equal((await charlieHandle.value()).foo, "baz");
91
+ assert.equal((await bobHandle.doc()).foo, "baz");
92
+ assert.equal((await charlieHandle.doc()).foo, "baz");
93
93
  teardown();
94
94
  });
95
95
  // TODO: with BroadcastChannel, this test never ends, because it goes into an infinite loop,
package/dist/index.d.ts CHANGED
@@ -1,12 +1,13 @@
1
1
  export { DocCollection } from "./DocCollection.js";
2
2
  export { DocHandle, HandleState } from "./DocHandle.js";
3
- export type { DocHandleChangePayload, DocHandlePatchPayload, } from "./DocHandle.js";
3
+ export type { DocHandleChangePayload } from "./DocHandle.js";
4
4
  export { NetworkAdapter } from "./network/NetworkAdapter.js";
5
5
  export type { InboundMessagePayload, MessagePayload, OpenPayload, PeerCandidatePayload, PeerDisconnectedPayload, } from "./network/NetworkAdapter.js";
6
6
  export { NetworkSubsystem } from "./network/NetworkSubsystem.js";
7
7
  export { Repo, type SharePolicy } from "./Repo.js";
8
- export { StorageAdapter } from "./storage/StorageAdapter.js";
8
+ export { StorageAdapter, type StorageKey } from "./storage/StorageAdapter.js";
9
9
  export { StorageSubsystem } from "./storage/StorageSubsystem.js";
10
10
  export { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js";
11
+ export { parseAutomergeUrl, isValidAutomergeUrl, stringifyAutomergeUrl as generateAutomergeUrl, } from "./DocUrl.js";
11
12
  export * from "./types.js";
12
13
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAA;AAClD,OAAO,EAAE,SAAS,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAA;AACvD,YAAY,EACV,sBAAsB,EACtB,qBAAqB,GACtB,MAAM,gBAAgB,CAAA;AACvB,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,YAAY,EACV,qBAAqB,EACrB,cAAc,EACd,WAAW,EACX,oBAAoB,EACpB,uBAAuB,GACxB,MAAM,6BAA6B,CAAA;AACpC,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,IAAI,EAAE,KAAK,WAAW,EAAE,MAAM,WAAW,CAAA;AAClD,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,sBAAsB,EAAE,MAAM,0CAA0C,CAAA;AACjF,cAAc,YAAY,CAAA"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAA;AAClD,OAAO,EAAE,SAAS,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAA;AACvD,YAAY,EAAE,sBAAsB,EAAE,MAAM,gBAAgB,CAAA;AAC5D,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,YAAY,EACV,qBAAqB,EACrB,cAAc,EACd,WAAW,EACX,oBAAoB,EACpB,uBAAuB,GACxB,MAAM,6BAA6B,CAAA;AACpC,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,IAAI,EAAE,KAAK,WAAW,EAAE,MAAM,WAAW,CAAA;AAClD,OAAO,EAAE,cAAc,EAAE,KAAK,UAAU,EAAE,MAAM,6BAA6B,CAAA;AAC7E,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,sBAAsB,EAAE,MAAM,0CAA0C,CAAA;AACjF,OAAO,EACL,iBAAiB,EACjB,mBAAmB,EACnB,qBAAqB,IAAI,oBAAoB,GAC9C,MAAM,aAAa,CAAA;AACpB,cAAc,YAAY,CAAA"}
package/dist/index.js CHANGED
@@ -6,4 +6,5 @@ export { Repo } from "./Repo.js";
6
6
  export { StorageAdapter } from "./storage/StorageAdapter.js";
7
7
  export { StorageSubsystem } from "./storage/StorageSubsystem.js";
8
8
  export { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js";
9
+ export { parseAutomergeUrl, isValidAutomergeUrl, stringifyAutomergeUrl as generateAutomergeUrl, } from "./DocUrl.js";
9
10
  export * from "./types.js";
@@ -4,8 +4,8 @@ export declare abstract class NetworkAdapter extends EventEmitter<NetworkAdapter
4
4
  peerId?: PeerId;
5
5
  abstract connect(url?: string): void;
6
6
  abstract sendMessage(peerId: PeerId, channelId: ChannelId, message: Uint8Array, broadcast: boolean): void;
7
- abstract join(channelId: ChannelId): void;
8
- abstract leave(channelId: ChannelId): void;
7
+ abstract join(): void;
8
+ abstract leave(): void;
9
9
  }
10
10
  export interface NetworkAdapterEvents {
11
11
  open: (payload: OpenPayload) => void;
@@ -19,7 +19,6 @@ export interface OpenPayload {
19
19
  }
20
20
  export interface PeerCandidatePayload {
21
21
  peerId: PeerId;
22
- channelId: ChannelId;
23
22
  }
24
23
  export interface MessagePayload {
25
24
  targetId: PeerId;
@@ -1 +1 @@
1
- {"version":3,"file":"NetworkAdapter.d.ts","sourceRoot":"","sources":["../../src/network/NetworkAdapter.ts"],"names":[],"mappings":"AAAA,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,aAAa,CAAA;AAE/C,8BAAsB,cAAe,SAAQ,YAAY,CAAC,oBAAoB,CAAC;IAC7E,MAAM,CAAC,EAAE,MAAM,CAAA;IAEf,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,MAAM,GAAG,IAAI;IAEpC,QAAQ,CAAC,WAAW,CAClB,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,UAAU,EACnB,SAAS,EAAE,OAAO,GACjB,IAAI;IAEP,QAAQ,CAAC,IAAI,CAAC,SAAS,EAAE,SAAS,GAAG,IAAI;IAEzC,QAAQ,CAAC,KAAK,CAAC,SAAS,EAAE,SAAS,GAAG,IAAI;CAC3C;AAID,MAAM,WAAW,oBAAoB;IACnC,IAAI,EAAE,CAAC,OAAO,EAAE,WAAW,KAAK,IAAI,CAAA;IACpC,KAAK,EAAE,MAAM,IAAI,CAAA;IACjB,gBAAgB,EAAE,CAAC,OAAO,EAAE,oBAAoB,KAAK,IAAI,CAAA;IACzD,mBAAmB,EAAE,CAAC,OAAO,EAAE,uBAAuB,KAAK,IAAI,CAAA;IAC/D,OAAO,EAAE,CAAC,OAAO,EAAE,qBAAqB,KAAK,IAAI,CAAA;CAClD;AAED,MAAM,WAAW,WAAW;IAC1B,OAAO,EAAE,cAAc,CAAA;CACxB;AAED,MAAM,WAAW,oBAAoB;IACnC,MAAM,EAAE,MAAM,CAAA;IACd,SAAS,EAAE,SAAS,CAAA;CACrB;AAED,MAAM,WAAW,cAAc;IAC7B,QAAQ,EAAE,MAAM,CAAA;IAChB,SAAS,EAAE,SAAS,CAAA;IACpB,OAAO,EAAE,UAAU,CAAA;IACnB,SAAS,EAAE,OAAO,CAAA;CACnB;AAED,MAAM,WAAW,qBAAsB,SAAQ,cAAc;IAC3D,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,QAAQ,EAAE,MAAM,CAAA;CACjB;AAED,MAAM,WAAW,uBAAuB;IACtC,MAAM,EAAE,MAAM,CAAA;CACf"}
1
+ {"version":3,"file":"NetworkAdapter.d.ts","sourceRoot":"","sources":["../../src/network/NetworkAdapter.ts"],"names":[],"mappings":"AAAA,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,aAAa,CAAA;AAE/C,8BAAsB,cAAe,SAAQ,YAAY,CAAC,oBAAoB,CAAC;IAC7E,MAAM,CAAC,EAAE,MAAM,CAAA;IAEf,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,MAAM,GAAG,IAAI;IAEpC,QAAQ,CAAC,WAAW,CAClB,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,UAAU,EACnB,SAAS,EAAE,OAAO,GACjB,IAAI;IAEP,QAAQ,CAAC,IAAI,IAAI,IAAI;IAErB,QAAQ,CAAC,KAAK,IAAI,IAAI;CACvB;AAID,MAAM,WAAW,oBAAoB;IACnC,IAAI,EAAE,CAAC,OAAO,EAAE,WAAW,KAAK,IAAI,CAAA;IACpC,KAAK,EAAE,MAAM,IAAI,CAAA;IACjB,gBAAgB,EAAE,CAAC,OAAO,EAAE,oBAAoB,KAAK,IAAI,CAAA;IACzD,mBAAmB,EAAE,CAAC,OAAO,EAAE,uBAAuB,KAAK,IAAI,CAAA;IAC/D,OAAO,EAAE,CAAC,OAAO,EAAE,qBAAqB,KAAK,IAAI,CAAA;CAClD;AAED,MAAM,WAAW,WAAW;IAC1B,OAAO,EAAE,cAAc,CAAA;CACxB;AAED,MAAM,WAAW,oBAAoB;IACnC,MAAM,EAAE,MAAM,CAAA;CACf;AAED,MAAM,WAAW,cAAc;IAC7B,QAAQ,EAAE,MAAM,CAAA;IAChB,SAAS,EAAE,SAAS,CAAA;IACpB,OAAO,EAAE,UAAU,CAAA;IACnB,SAAS,EAAE,OAAO,CAAA;CACnB;AAED,MAAM,WAAW,qBAAsB,SAAQ,cAAc;IAC3D,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,QAAQ,EAAE,MAAM,CAAA;CACjB;AAED,MAAM,WAAW,uBAAuB;IACtC,MAAM,EAAE,MAAM,CAAA;CACf"}
@@ -8,8 +8,8 @@ export declare class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvent
8
8
  constructor(adapters: NetworkAdapter[], peerId?: PeerId);
9
9
  addNetworkAdapter(networkAdapter: NetworkAdapter): void;
10
10
  sendMessage(peerId: PeerId, channelId: ChannelId, message: Uint8Array, broadcast: boolean): void;
11
- join(channelId: ChannelId): void;
12
- leave(channelId: ChannelId): void;
11
+ join(): void;
12
+ leave(): void;
13
13
  }
14
14
  export interface NetworkSubsystemEvents {
15
15
  peer: (payload: PeerPayload) => void;
@@ -18,6 +18,5 @@ export interface NetworkSubsystemEvents {
18
18
  }
19
19
  export interface PeerPayload {
20
20
  peerId: PeerId;
21
- channelId: ChannelId;
22
21
  }
23
22
  //# sourceMappingURL=NetworkSubsystem.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"NetworkSubsystem.d.ts","sourceRoot":"","sources":["../../src/network/NetworkSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EACL,qBAAqB,EACrB,cAAc,EACd,uBAAuB,EACxB,MAAM,qBAAqB,CAAA;AAC5B,OAAO,EAAE,SAAS,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAI/C,qBAAa,gBAAiB,SAAQ,YAAY,CAAC,sBAAsB,CAAC;;IAMtE,OAAO,CAAC,QAAQ;IACT,MAAM;gBADL,QAAQ,EAAE,cAAc,EAAE,EAC3B,MAAM,SAAiB;IAQhC,iBAAiB,CAAC,cAAc,EAAE,cAAc;IAsDhD,WAAW,CACT,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,UAAU,EACnB,SAAS,EAAE,OAAO;IAkBpB,IAAI,CAAC,SAAS,EAAE,SAAS;IAMzB,KAAK,CAAC,SAAS,EAAE,SAAS;CAK3B;AAQD,MAAM,WAAW,sBAAsB;IACrC,IAAI,EAAE,CAAC,OAAO,EAAE,WAAW,KAAK,IAAI,CAAA;IACpC,mBAAmB,EAAE,CAAC,OAAO,EAAE,uBAAuB,KAAK,IAAI,CAAA;IAC/D,OAAO,EAAE,CAAC,OAAO,EAAE,qBAAqB,KAAK,IAAI,CAAA;CAClD;AAED,MAAM,WAAW,WAAW;IAC1B,MAAM,EAAE,MAAM,CAAA;IACd,SAAS,EAAE,SAAS,CAAA;CACrB"}
1
+ {"version":3,"file":"NetworkSubsystem.d.ts","sourceRoot":"","sources":["../../src/network/NetworkSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EACL,qBAAqB,EACrB,cAAc,EACd,uBAAuB,EACxB,MAAM,qBAAqB,CAAA;AAC5B,OAAO,EAAE,SAAS,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAI/C,qBAAa,gBAAiB,SAAQ,YAAY,CAAC,sBAAsB,CAAC;;IAKtE,OAAO,CAAC,QAAQ;IACT,MAAM;gBADL,QAAQ,EAAE,cAAc,EAAE,EAC3B,MAAM,SAAiB;IAOhC,iBAAiB,CAAC,cAAc,EAAE,cAAc;IAsDhD,WAAW,CACT,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,UAAU,EACnB,SAAS,EAAE,OAAO;IAkBpB,IAAI;IAKJ,KAAK;CAIN;AAQD,MAAM,WAAW,sBAAsB;IACrC,IAAI,EAAE,CAAC,OAAO,EAAE,WAAW,KAAK,IAAI,CAAA;IACpC,mBAAmB,EAAE,CAAC,OAAO,EAAE,uBAAuB,KAAK,IAAI,CAAA;IAC/D,OAAO,EAAE,CAAC,OAAO,EAAE,qBAAqB,KAAK,IAAI,CAAA;CAClD;AAED,MAAM,WAAW,WAAW;IAC1B,MAAM,EAAE,MAAM,CAAA;CACf"}
@@ -5,25 +5,23 @@ export class NetworkSubsystem extends EventEmitter {
5
5
  peerId;
6
6
  #log;
7
7
  #adaptersByPeer = {};
8
- #channels;
9
8
  constructor(adapters, peerId = randomPeerId()) {
10
9
  super();
11
10
  this.adapters = adapters;
12
11
  this.peerId = peerId;
13
12
  this.#log = debug(`automerge-repo:network:${this.peerId}`);
14
- this.#channels = [];
15
13
  this.adapters.forEach(a => this.addNetworkAdapter(a));
16
14
  }
17
15
  addNetworkAdapter(networkAdapter) {
18
16
  networkAdapter.connect(this.peerId);
19
- networkAdapter.on("peer-candidate", ({ peerId, channelId }) => {
17
+ networkAdapter.on("peer-candidate", ({ peerId }) => {
20
18
  this.#log(`peer candidate: ${peerId} `);
21
19
  // TODO: This is where authentication would happen
22
20
  if (!this.#adaptersByPeer[peerId]) {
23
21
  // TODO: handle losing a server here
24
22
  this.#adaptersByPeer[peerId] = networkAdapter;
25
23
  }
26
- this.emit("peer", { peerId, channelId });
24
+ this.emit("peer", { peerId });
27
25
  });
28
26
  networkAdapter.on("peer-disconnected", ({ peerId }) => {
29
27
  this.#log(`peer disconnected: ${peerId} `);
@@ -54,7 +52,7 @@ export class NetworkSubsystem extends EventEmitter {
54
52
  }
55
53
  });
56
54
  });
57
- this.#channels.forEach(c => networkAdapter.join(c));
55
+ networkAdapter.join();
58
56
  }
59
57
  sendMessage(peerId, channelId, message, broadcast) {
60
58
  if (broadcast) {
@@ -73,15 +71,13 @@ export class NetworkSubsystem extends EventEmitter {
73
71
  peer.sendMessage(peerId, channelId, message, false);
74
72
  }
75
73
  }
76
- join(channelId) {
77
- this.#log(`Joining channel ${channelId}`);
78
- this.#channels.push(channelId);
79
- this.adapters.forEach(a => a.join(channelId));
74
+ join() {
75
+ this.#log(`Joining network`);
76
+ this.adapters.forEach(a => a.join());
80
77
  }
81
- leave(channelId) {
82
- this.#log(`Leaving channel ${channelId}`);
83
- this.#channels = this.#channels.filter(c => c !== channelId);
84
- this.adapters.forEach(a => a.leave(channelId));
78
+ leave() {
79
+ this.#log(`Leaving network`);
80
+ this.adapters.forEach(a => a.leave());
85
81
  }
86
82
  }
87
83
  function randomPeerId() {
@@ -1,8 +1,12 @@
1
1
  export declare abstract class StorageAdapter {
2
- abstract load(key: string[]): Promise<Uint8Array | undefined>;
3
- abstract save(key: string[], data: Uint8Array): Promise<void>;
4
- abstract remove(key: string[]): Promise<void>;
5
- abstract loadRange(keyPrefix: string[]): Promise<Uint8Array[]>;
6
- abstract removeRange(keyPrefix: string[]): Promise<void>;
2
+ abstract load(key: StorageKey): Promise<Uint8Array | undefined>;
3
+ abstract save(key: StorageKey, data: Uint8Array): Promise<void>;
4
+ abstract remove(key: StorageKey): Promise<void>;
5
+ abstract loadRange(keyPrefix: StorageKey): Promise<{
6
+ key: StorageKey;
7
+ data: Uint8Array;
8
+ }[]>;
9
+ abstract removeRange(keyPrefix: StorageKey): Promise<void>;
7
10
  }
11
+ export type StorageKey = string[];
8
12
  //# sourceMappingURL=StorageAdapter.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"StorageAdapter.d.ts","sourceRoot":"","sources":["../../src/storage/StorageAdapter.ts"],"names":[],"mappings":"AAAA,8BAAsB,cAAc;IAMlC,QAAQ,CAAC,IAAI,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,UAAU,GAAG,SAAS,CAAC;IAC7D,QAAQ,CAAC,IAAI,CAAC,GAAG,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC;IAC7D,QAAQ,CAAC,MAAM,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAO7C,QAAQ,CAAC,SAAS,CAAC,SAAS,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC;IAC9D,QAAQ,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;CACzD"}
1
+ {"version":3,"file":"StorageAdapter.d.ts","sourceRoot":"","sources":["../../src/storage/StorageAdapter.ts"],"names":[],"mappings":"AAAA,8BAAsB,cAAc;IAMlC,QAAQ,CAAC,IAAI,CAAC,GAAG,EAAE,UAAU,GAAG,OAAO,CAAC,UAAU,GAAG,SAAS,CAAC;IAC/D,QAAQ,CAAC,IAAI,CAAC,GAAG,EAAE,UAAU,EAAE,IAAI,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC;IAC/D,QAAQ,CAAC,MAAM,CAAC,GAAG,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC;IAO/C,QAAQ,CAAC,SAAS,CAAC,SAAS,EAAE,UAAU,GAAG,OAAO,CAAC;QAAC,GAAG,EAAE,UAAU,CAAC;QAAC,IAAI,EAAE,UAAU,CAAA;KAAC,EAAE,CAAC;IACzF,QAAQ,CAAC,WAAW,CAAC,SAAS,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC;CAC3D;AAED,MAAM,MAAO,UAAU,GAAG,MAAM,EAAE,CAAA"}
@@ -1,11 +1,11 @@
1
1
  import * as A from "@automerge/automerge";
2
- import { DocumentId } from "../types.js";
3
2
  import { StorageAdapter } from "./StorageAdapter.js";
3
+ import { type DocumentId } from "../types.js";
4
+ export type ChunkType = "snapshot" | "incremental";
4
5
  export declare class StorageSubsystem {
5
6
  #private;
6
7
  constructor(storageAdapter: StorageAdapter);
7
8
  loadBinary(documentId: DocumentId): Promise<Uint8Array>;
8
- load<T>(documentId: DocumentId, prevDoc?: A.Doc<T>): Promise<A.Doc<T>>;
9
9
  save(documentId: DocumentId, doc: A.Doc<unknown>): Promise<void>;
10
10
  remove(documentId: DocumentId): Promise<void>;
11
11
  }
@@ -1 +1 @@
1
- {"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,sBAAsB,CAAA;AACzC,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AACxC,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAA;AAWpD,qBAAa,gBAAgB;;gBAGf,cAAc,EAAE,cAAc;IAwBpC,UAAU,CAAC,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;IAUvD,IAAI,CAAC,CAAC,EACV,UAAU,EAAE,UAAU,EACtB,OAAO,GAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAe,GAC9B,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;IAMd,IAAI,CAAC,UAAU,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC;IAQhD,MAAM,CAAC,UAAU,EAAE,UAAU;CAUpC"}
1
+ {"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,sBAAsB,CAAA;AACzC,OAAO,EAAE,cAAc,EAAc,MAAM,qBAAqB,CAAA;AAEhE,OAAO,EAAE,KAAK,UAAU,EAAE,MAAM,aAAa,CAAA;AAW7C,MAAM,MAAM,SAAS,GAAG,UAAU,GAAG,aAAa,CAAA;AAelD,qBAAa,gBAAgB;;gBAIf,cAAc,EAAE,cAAc;IA6CpC,UAAU,CAAC,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;IAoBvD,IAAI,CAAC,UAAU,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAShE,MAAM,CAAC,UAAU,EAAE,UAAU;CAkBpC"}
@@ -1,14 +1,20 @@
1
1
  import * as A from "@automerge/automerge";
2
- import { mergeArrays } from "../helpers/mergeArrays.js";
3
2
  import * as sha256 from "fast-sha256";
3
+ import { mergeArrays } from "../helpers/mergeArrays.js";
4
4
  function keyHash(binary) {
5
5
  const hash = sha256.hash(binary);
6
6
  const hashArray = Array.from(new Uint8Array(hash)); // convert buffer to byte array
7
7
  const hashHex = hashArray.map(b => ("00" + b.toString(16)).slice(-2)).join(""); // convert bytes to hex string
8
8
  return hashHex;
9
9
  }
10
+ function headsHash(heads) {
11
+ let encoder = new TextEncoder();
12
+ let headsbinary = mergeArrays(heads.map(h => encoder.encode(h)));
13
+ return keyHash(headsbinary);
14
+ }
10
15
  export class StorageSubsystem {
11
16
  #storageAdapter;
17
+ #chunkInfos = new Map();
12
18
  constructor(storageAdapter) {
13
19
  this.#storageAdapter = storageAdapter;
14
20
  }
@@ -16,47 +22,89 @@ export class StorageSubsystem {
16
22
  const binary = A.saveIncremental(doc);
17
23
  if (binary && binary.length > 0) {
18
24
  const key = [documentId, "incremental", keyHash(binary)];
19
- return await this.#storageAdapter.save(key, binary);
25
+ await this.#storageAdapter.save(key, binary);
26
+ if (!this.#chunkInfos.has(documentId)) {
27
+ this.#chunkInfos.set(documentId, []);
28
+ }
29
+ this.#chunkInfos.get(documentId).push({
30
+ key,
31
+ type: "incremental",
32
+ size: binary.length,
33
+ });
20
34
  }
21
35
  else {
22
- Promise.resolve();
36
+ return Promise.resolve();
23
37
  }
24
38
  }
25
- async #saveTotal(documentId, doc) {
39
+ async #saveTotal(documentId, doc, sourceChunks) {
26
40
  const binary = A.save(doc);
27
- // TODO: this is still racy if two nodes are both writing to the store
28
- await this.#storageAdapter.save([documentId, "snapshot"], binary);
29
- // don't start deleting the incremental keys until save is done!
30
- return this.#storageAdapter.removeRange([documentId, "incremental"]);
41
+ const key = [documentId, "snapshot", headsHash(A.getHeads(doc))];
42
+ const oldKeys = new Set(sourceChunks.map(c => c.key));
43
+ await this.#storageAdapter.save(key, binary);
44
+ for (const key of oldKeys) {
45
+ await this.#storageAdapter.remove(key);
46
+ }
47
+ const newChunkInfos = this.#chunkInfos.get(documentId)?.filter(c => !oldKeys.has(c.key)) ?? [];
48
+ newChunkInfos.push({ key, type: "snapshot", size: binary.length });
49
+ this.#chunkInfos.set(documentId, newChunkInfos);
31
50
  }
32
51
  async loadBinary(documentId) {
33
- // it would probably be best to ensure .snapshot comes back first
34
- // prevent the race condition with saveIncremental
35
- const binaries = await this.#storageAdapter.loadRange([
36
- documentId,
37
- ]);
52
+ const loaded = await this.#storageAdapter.loadRange([documentId]);
53
+ const binaries = [];
54
+ const chunkInfos = [];
55
+ for (const chunk of loaded) {
56
+ const chunkType = chunkTypeFromKey(chunk.key);
57
+ if (chunkType == null) {
58
+ continue;
59
+ }
60
+ chunkInfos.push({
61
+ key: chunk.key,
62
+ type: chunkType,
63
+ size: chunk.data.length,
64
+ });
65
+ binaries.push(chunk.data);
66
+ }
67
+ this.#chunkInfos.set(documentId, chunkInfos);
38
68
  return mergeArrays(binaries);
39
69
  }
40
- async load(documentId, prevDoc = A.init()) {
41
- const doc = A.loadIncremental(prevDoc, await this.loadBinary(documentId));
42
- A.saveIncremental(doc);
43
- return doc;
44
- }
45
70
  async save(documentId, doc) {
46
- if (this.#shouldCompact(documentId)) {
47
- return this.#saveTotal(documentId, doc);
71
+ let sourceChunks = this.#chunkInfos.get(documentId) ?? [];
72
+ if (this.#shouldCompact(sourceChunks)) {
73
+ this.#saveTotal(documentId, doc, sourceChunks);
48
74
  }
49
75
  else {
50
- return this.#saveIncremental(documentId, doc);
76
+ this.#saveIncremental(documentId, doc);
51
77
  }
52
78
  }
53
79
  async remove(documentId) {
54
80
  this.#storageAdapter.remove([documentId, "snapshot"]);
55
81
  this.#storageAdapter.removeRange([documentId, "incremental"]);
56
82
  }
57
- // TODO: make this, you know, good.
58
- // this is probably fine
59
- #shouldCompact(documentId) {
60
- return Math.random() < 0.05; // this.#changeCount[documentId] >= 20
83
+ #shouldCompact(sourceChunks) {
84
+ // compact if the incremental size is greater than the snapshot size
85
+ let snapshotSize = 0;
86
+ let incrementalSize = 0;
87
+ for (const chunk of sourceChunks) {
88
+ if (chunk.type === "snapshot") {
89
+ snapshotSize += chunk.size;
90
+ }
91
+ else {
92
+ incrementalSize += chunk.size;
93
+ }
94
+ }
95
+ return incrementalSize > snapshotSize;
96
+ }
97
+ }
98
+ function chunkTypeFromKey(key) {
99
+ if (key.length < 2) {
100
+ return null;
101
+ }
102
+ const chunkTypeStr = key[key.length - 2];
103
+ if (chunkTypeStr === "snapshot" || chunkTypeStr === "incremental") {
104
+ const chunkType = chunkTypeStr;
105
+ return chunkType;
106
+ }
107
+ else {
108
+ return null;
61
109
  }
62
110
  }
@@ -1,5 +1,5 @@
1
1
  import { DocCollection } from "../DocCollection.js";
2
- import { ChannelId, DocumentId, PeerId } from "../types.js";
2
+ import { ChannelId, PeerId, DocumentId } from "../types.js";
3
3
  import { Synchronizer } from "./Synchronizer.js";
4
4
  /** A CollectionSynchronizer is responsible for synchronizing a DocCollection with peers. */
5
5
  export declare class CollectionSynchronizer extends Synchronizer {
@@ -1 +1 @@
1
- {"version":3,"file":"CollectionSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/CollectionSynchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAA;AAEnD,OAAO,EAAE,SAAS,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAE3D,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAKhD,4FAA4F;AAC5F,qBAAa,sBAAuB,SAAQ,YAAY;;IAO1C,OAAO,CAAC,IAAI;gBAAJ,IAAI,EAAE,aAAa;IAiCvC;;;OAGG;IACG,kBAAkB,CACtB,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,UAAU;IAgBrB;;OAEG;IACH,WAAW,CAAC,UAAU,EAAE,UAAU;IAUlC,cAAc,CAAC,UAAU,EAAE,UAAU;IAIrC,2DAA2D;IAC3D,OAAO,CAAC,MAAM,EAAE,MAAM;IAWtB,uDAAuD;IACvD,UAAU,CAAC,MAAM,EAAE,MAAM;CAQ1B"}
1
+ {"version":3,"file":"CollectionSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/CollectionSynchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAA;AAOnD,OAAO,EAAE,SAAS,EAAoB,MAAM,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAE7E,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAKhD,4FAA4F;AAC5F,qBAAa,sBAAuB,SAAQ,YAAY;;IAO1C,OAAO,CAAC,IAAI;gBAAJ,IAAI,EAAE,aAAa;IAiCvC;;;OAGG;IACG,kBAAkB,CACtB,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,UAAU;IAmBrB;;OAEG;IACH,WAAW,CAAC,UAAU,EAAE,UAAU;IAUlC,cAAc,CAAC,UAAU,EAAE,UAAU;IAIrC,2DAA2D;IAC3D,OAAO,CAAC,MAAM,EAAE,MAAM;IAWtB,uDAAuD;IACvD,UAAU,CAAC,MAAM,EAAE,MAAM;CAQ1B"}
@@ -1,3 +1,4 @@
1
+ import { stringifyAutomergeUrl, } from "../DocUrl.js";
1
2
  import { DocSynchronizer } from "./DocSynchronizer.js";
2
3
  import { Synchronizer } from "./Synchronizer.js";
3
4
  import debug from "debug";
@@ -16,7 +17,7 @@ export class CollectionSynchronizer extends Synchronizer {
16
17
  /** Returns a synchronizer for the given document, creating one if it doesn't already exist. */
17
18
  #fetchDocSynchronizer(documentId) {
18
19
  if (!this.#docSynchronizers[documentId]) {
19
- const handle = this.repo.find(documentId);
20
+ const handle = this.repo.find(stringifyAutomergeUrl({ documentId }));
20
21
  this.#docSynchronizers[documentId] = this.#initDocSynchronizer(handle);
21
22
  }
22
23
  return this.#docSynchronizers[documentId];
@@ -46,6 +47,9 @@ export class CollectionSynchronizer extends Synchronizer {
46
47
  async receiveSyncMessage(peerId, channelId, message) {
47
48
  log(`onSyncMessage: ${peerId}, ${channelId}, ${message.byteLength}bytes`);
48
49
  const documentId = channelId;
50
+ if (!documentId) {
51
+ throw new Error("received a message with an invalid documentId");
52
+ }
49
53
  const docSynchronizer = await this.#fetchDocSynchronizer(documentId);
50
54
  await docSynchronizer.receiveSyncMessage(peerId, channelId, message);
51
55
  // Initiate sync with any new peers
@@ -1 +1 @@
1
- {"version":3,"file":"DocSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/DocSynchronizer.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAA;AAC3C,OAAO,EAAE,SAAS,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAC/C,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAIhD;;;GAGG;AACH,qBAAa,eAAgB,SAAQ,YAAY;;IAanC,OAAO,CAAC,MAAM;gBAAN,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC;IAgB1C,IAAI,UAAU,qCAEb;IAuED,OAAO,CAAC,MAAM,EAAE,MAAM;IAItB,SAAS,CAAC,MAAM,EAAE,MAAM;IAkBxB,OAAO,CAAC,MAAM,EAAE,MAAM;IAKtB,kBAAkB,CAChB,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,UAAU;CAsCtB"}
1
+ {"version":3,"file":"DocSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/DocSynchronizer.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,SAAS,EAAqB,MAAM,iBAAiB,CAAA;AAC9D,OAAO,EAAE,SAAS,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAC/C,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAIhD;;;GAGG;AACH,qBAAa,eAAgB,SAAQ,YAAY;;IAanC,OAAO,CAAC,MAAM;gBAAN,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC;IAgB1C,IAAI,UAAU,qCAEb;IAwED,OAAO,CAAC,MAAM,EAAE,MAAM;IAItB,SAAS,CAAC,MAAM,EAAE,MAAM;IAkBxB,OAAO,CAAC,MAAM,EAAE,MAAM;IAKtB,kBAAkB,CAChB,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,UAAU;CAsCtB"}
@@ -1,4 +1,5 @@
1
1
  import * as A from "@automerge/automerge";
2
+ import { READY, REQUESTING } from "../DocHandle.js";
2
3
  import { Synchronizer } from "./Synchronizer.js";
3
4
  import debug from "debug";
4
5
  /**
@@ -25,7 +26,7 @@ export class DocSynchronizer extends Synchronizer {
25
26
  handle.on("change", () => this.#syncWithPeers());
26
27
  // Process pending sync messages immediately after the handle becomes ready.
27
28
  void (async () => {
28
- await handle.loadAttemptedValue();
29
+ await handle.doc([READY, REQUESTING]);
29
30
  this.#processAllPendingSyncMessages();
30
31
  })();
31
32
  }
@@ -35,7 +36,7 @@ export class DocSynchronizer extends Synchronizer {
35
36
  /// PRIVATE
36
37
  async #syncWithPeers() {
37
38
  this.#log(`syncWithPeers`);
38
- const doc = await this.handle.value();
39
+ const doc = await this.handle.doc();
39
40
  this.#peers.forEach(peerId => this.#sendSyncMessage(peerId, doc));
40
41
  }
41
42
  #getSyncState(peerId) {
@@ -92,7 +93,7 @@ export class DocSynchronizer extends Synchronizer {
92
93
  this.#log(`beginSync: ${peerId}`);
93
94
  // At this point if we don't have anything in our storage, we need to use an empty doc to sync
94
95
  // with; but we don't want to surface that state to the front end
95
- void this.handle.loadAttemptedValue().then(doc => {
96
+ void this.handle.doc([READY, REQUESTING]).then(doc => {
96
97
  // HACK: if we have a sync state already, we round-trip it through the encoding system to make
97
98
  // sure state is preserved. This prevents an infinite loop caused by failed attempts to send
98
99
  // messages during disconnection.
@@ -108,10 +109,10 @@ export class DocSynchronizer extends Synchronizer {
108
109
  this.#peers = this.#peers.filter(p => p !== peerId);
109
110
  }
110
111
  receiveSyncMessage(peerId, channelId, message) {
111
- if (channelId !== this.documentId)
112
+ if (channelId !== this.handle.documentId)
112
113
  throw new Error(`channelId doesn't match documentId`);
113
114
  // We need to block receiving the syncMessages until we've checked local storage
114
- if (!this.handle.isReadyOrRequesting()) {
115
+ if (!this.handle.inState([READY, REQUESTING])) {
115
116
  this.#pendingSyncMessages.push({ peerId, message });
116
117
  return;
117
118
  }