@automerge/automerge-repo 2.0.0-alpha.20 → 2.0.0-alpha.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,39 @@
1
+ /**
2
+ * Creates a promise that rejects when the signal is aborted.
3
+ *
4
+ * @remarks
5
+ * This utility creates a promise that rejects when the provided AbortSignal is aborted.
6
+ * It's designed to be used with Promise.race() to make operations abortable.
7
+ *
8
+ * @example
9
+ * ```typescript
10
+ * const controller = new AbortController();
11
+ *
12
+ * try {
13
+ * const result = await Promise.race([
14
+ * fetch('https://api.example.com/data'),
15
+ * abortable(controller.signal)
16
+ * ]);
17
+ * } catch (err) {
18
+ * if (err.name === 'AbortError') {
19
+ * console.log('The operation was aborted');
20
+ * }
21
+ * }
22
+ *
23
+ * // Later, to abort:
24
+ * controller.abort();
25
+ * ```
26
+ *
27
+ * @param signal - An AbortSignal that can be used to abort the operation
28
+ * @param cleanup - Optional cleanup function that will be called if aborted
29
+ * @returns A promise that rejects with AbortError when the signal is aborted
30
+ * @throws {DOMException} With name "AbortError" when aborted
31
+ */
32
+ export declare function abortable(signal?: AbortSignal, cleanup?: () => void): Promise<never>;
33
+ /**
34
+ * Include this type in an options object to pass an AbortSignal to a function.
35
+ */
36
+ export interface AbortOptions {
37
+ signal?: AbortSignal;
38
+ }
39
+ //# sourceMappingURL=abortable.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"abortable.d.ts","sourceRoot":"","sources":["../../src/helpers/abortable.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8BG;AACH,wBAAgB,SAAS,CACvB,MAAM,CAAC,EAAE,WAAW,EACpB,OAAO,CAAC,EAAE,MAAM,IAAI,GACnB,OAAO,CAAC,KAAK,CAAC,CAmBhB;AAED;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B,MAAM,CAAC,EAAE,WAAW,CAAA;CACrB"}
@@ -0,0 +1,45 @@
1
+ /**
2
+ * Creates a promise that rejects when the signal is aborted.
3
+ *
4
+ * @remarks
5
+ * This utility creates a promise that rejects when the provided AbortSignal is aborted.
6
+ * It's designed to be used with Promise.race() to make operations abortable.
7
+ *
8
+ * @example
9
+ * ```typescript
10
+ * const controller = new AbortController();
11
+ *
12
+ * try {
13
+ * const result = await Promise.race([
14
+ * fetch('https://api.example.com/data'),
15
+ * abortable(controller.signal)
16
+ * ]);
17
+ * } catch (err) {
18
+ * if (err.name === 'AbortError') {
19
+ * console.log('The operation was aborted');
20
+ * }
21
+ * }
22
+ *
23
+ * // Later, to abort:
24
+ * controller.abort();
25
+ * ```
26
+ *
27
+ * @param signal - An AbortSignal that can be used to abort the operation
28
+ * @param cleanup - Optional cleanup function that will be called if aborted
29
+ * @returns A promise that rejects with AbortError when the signal is aborted
30
+ * @throws {DOMException} With name "AbortError" when aborted
31
+ */
32
+ export function abortable(signal, cleanup) {
33
+ if (signal?.aborted) {
34
+ throw new DOMException("Operation aborted", "AbortError");
35
+ }
36
+ if (!signal) {
37
+ return new Promise(() => { }); // Never resolves
38
+ }
39
+ return new Promise((_, reject) => {
40
+ signal.addEventListener("abort", () => {
41
+ cleanup?.();
42
+ reject(new DOMException("Operation aborted", "AbortError"));
43
+ }, { once: true });
44
+ });
45
+ }
@@ -1 +1 @@
1
- {"version":3,"file":"network-adapter-tests.d.ts","sourceRoot":"","sources":["../../../src/helpers/tests/network-adapter-tests.ts"],"names":[],"mappings":"AAUA,OAAO,KAAK,EAAE,uBAAuB,EAAE,MAAM,0CAA0C,CAAA;AAIvF;;;;;;;;;;;GAWG;AACH,wBAAgB,sBAAsB,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CA0Q5E;AAID,KAAK,OAAO,GAAG,uBAAuB,GAAG,uBAAuB,EAAE,CAAA;AAElE,MAAM,MAAM,OAAO,GAAG,MAAM,OAAO,CAAC;IAClC,QAAQ,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;IACrC,QAAQ,CAAC,EAAE,MAAM,IAAI,CAAA;CACtB,CAAC,CAAA"}
1
+ {"version":3,"file":"network-adapter-tests.d.ts","sourceRoot":"","sources":["../../../src/helpers/tests/network-adapter-tests.ts"],"names":[],"mappings":"AAUA,OAAO,KAAK,EAAE,uBAAuB,EAAE,MAAM,0CAA0C,CAAA;AAIvF;;;;;;;;;;;GAWG;AACH,wBAAgB,sBAAsB,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CA2Q5E;AAID,KAAK,OAAO,GAAG,uBAAuB,GAAG,uBAAuB,EAAE,CAAA;AAElE,MAAM,MAAM,OAAO,GAAG,MAAM,OAAO,CAAC;IAClC,QAAQ,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;IACrC,QAAQ,CAAC,EAAE,MAAM,IAAI,CAAA;CACtB,CAAC,CAAA"}
@@ -30,23 +30,23 @@ export function runNetworkAdapterTests(_setup, title) {
30
30
  const bobRepo = new Repo({ network: b, peerId: bob });
31
31
  // Alice creates a document
32
32
  const aliceHandle = aliceRepo.create();
33
- // Bob receives the document
34
- await eventPromise(bobRepo, "document");
35
- const bobHandle = bobRepo.find(aliceHandle.url);
33
+ // TODO: ... let connections complete. this shouldn't be necessary.
34
+ await pause(50);
35
+ const bobHandle = await bobRepo.find(aliceHandle.url);
36
36
  // Alice changes the document
37
37
  aliceHandle.change(d => {
38
38
  d.foo = "bar";
39
39
  });
40
40
  // Bob receives the change
41
41
  await eventPromise(bobHandle, "change");
42
- assert.equal((await bobHandle.doc())?.foo, "bar");
42
+ assert.equal((await bobHandle).doc()?.foo, "bar");
43
43
  // Bob changes the document
44
44
  bobHandle.change(d => {
45
45
  d.foo = "baz";
46
46
  });
47
47
  // Alice receives the change
48
48
  await eventPromise(aliceHandle, "change");
49
- assert.equal((await aliceHandle.doc())?.foo, "baz");
49
+ assert.equal(aliceHandle.doc().foo, "baz");
50
50
  };
51
51
  // Run the test in both directions, in case they're different types of adapters
52
52
  {
@@ -72,25 +72,25 @@ export function runNetworkAdapterTests(_setup, title) {
72
72
  const aliceHandle = aliceRepo.create();
73
73
  const docUrl = aliceHandle.url;
74
74
  // Bob and Charlie receive the document
75
- await eventPromises([bobRepo, charlieRepo], "document");
76
- const bobHandle = bobRepo.find(docUrl);
77
- const charlieHandle = charlieRepo.find(docUrl);
75
+ await pause(50);
76
+ const bobHandle = await bobRepo.find(docUrl);
77
+ const charlieHandle = await charlieRepo.find(docUrl);
78
78
  // Alice changes the document
79
79
  aliceHandle.change(d => {
80
80
  d.foo = "bar";
81
81
  });
82
82
  // Bob and Charlie receive the change
83
83
  await eventPromises([bobHandle, charlieHandle], "change");
84
- assert.equal((await bobHandle.doc())?.foo, "bar");
85
- assert.equal((await charlieHandle.doc())?.foo, "bar");
84
+ assert.equal(bobHandle.doc().foo, "bar");
85
+ assert.equal(charlieHandle.doc().foo, "bar");
86
86
  // Charlie changes the document
87
87
  charlieHandle.change(d => {
88
88
  d.foo = "baz";
89
89
  });
90
90
  // Alice and Bob receive the change
91
91
  await eventPromises([aliceHandle, bobHandle], "change");
92
- assert.equal((await bobHandle.doc())?.foo, "baz");
93
- assert.equal((await charlieHandle.doc())?.foo, "baz");
92
+ assert.equal(bobHandle.doc().foo, "baz");
93
+ assert.equal(charlieHandle.doc().foo, "baz");
94
94
  teardown();
95
95
  });
96
96
  it("can broadcast a message", async () => {
@@ -101,7 +101,7 @@ export function runNetworkAdapterTests(_setup, title) {
101
101
  const charlieRepo = new Repo({ network: c, peerId: charlie });
102
102
  await eventPromises([aliceRepo, bobRepo, charlieRepo].map(r => r.networkSubsystem), "peer");
103
103
  const aliceHandle = aliceRepo.create();
104
- const charlieHandle = charlieRepo.find(aliceHandle.url);
104
+ const charlieHandle = await charlieRepo.find(aliceHandle.url);
105
105
  // pause to give charlie a chance to let alice know it wants the doc
106
106
  await pause(100);
107
107
  const alicePresenceData = { presence: "alice" };
@@ -1,3 +1,4 @@
1
+ import { DocHandle } from "../DocHandle.js";
1
2
  import { Repo } from "../Repo.js";
2
3
  import { DocMessage } from "../network/messages.js";
3
4
  import { AutomergeUrl, DocumentId, PeerId } from "../types.js";
@@ -19,7 +20,7 @@ export declare class CollectionSynchronizer extends Synchronizer {
19
20
  /**
20
21
  * Starts synchronizing the given document with all peers that we share it generously with.
21
22
  */
22
- addDocument(documentId: DocumentId): void;
23
+ addDocument(handle: DocHandle<unknown>): void;
23
24
  removeDocument(documentId: DocumentId): void;
24
25
  /** Adds a peer and maybe starts synchronizing with them */
25
26
  addPeer(peerId: PeerId): void;
@@ -1 +1 @@
1
- {"version":3,"file":"CollectionSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/CollectionSynchronizer.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,IAAI,EAAE,MAAM,YAAY,CAAA;AACjC,OAAO,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAA;AACnD,OAAO,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAC9D,OAAO,EAAE,eAAe,EAAE,MAAM,sBAAsB,CAAA;AACtD,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAIhD,4FAA4F;AAC5F,qBAAa,sBAAuB,SAAQ,YAAY;;IAa1C,OAAO,CAAC,IAAI;IATxB,kDAAkD;IAClD,cAAc;IACd,gBAAgB,EAAE,MAAM,CAAC,UAAU,EAAE,eAAe,CAAC,CAAK;gBAOtC,IAAI,EAAE,IAAI,EAAE,QAAQ,GAAE,YAAY,EAAO;IAuD7D;;;OAGG;IACG,cAAc,CAAC,OAAO,EAAE,UAAU;IAsCxC;;OAEG;IACH,WAAW,CAAC,UAAU,EAAE,UAAU;IAalC,cAAc,CAAC,UAAU,EAAE,UAAU;IAIrC,2DAA2D;IAC3D,OAAO,CAAC,MAAM,EAAE,MAAM;IAgBtB,uDAAuD;IACvD,UAAU,CAAC,MAAM,EAAE,MAAM;IASzB,+CAA+C;IAC/C,IAAI,KAAK,IAAI,MAAM,EAAE,CAEpB;IAED,OAAO,IAAI;QACT,CAAC,GAAG,EAAE,MAAM,GAAG;YACb,KAAK,EAAE,MAAM,EAAE,CAAA;YACf,IAAI,EAAE;gBAAE,MAAM,EAAE,MAAM,CAAC;gBAAC,UAAU,EAAE,MAAM,CAAA;aAAE,CAAA;SAC7C,CAAA;KACF;CASF"}
1
+ {"version":3,"file":"CollectionSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/CollectionSynchronizer.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAA;AAE3C,OAAO,EAAE,IAAI,EAAE,MAAM,YAAY,CAAA;AACjC,OAAO,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAA;AACnD,OAAO,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAC9D,OAAO,EAAE,eAAe,EAAE,MAAM,sBAAsB,CAAA;AACtD,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAIhD,4FAA4F;AAC5F,qBAAa,sBAAuB,SAAQ,YAAY;;IAa1C,OAAO,CAAC,IAAI;IATxB,kDAAkD;IAClD,cAAc;IACd,gBAAgB,EAAE,MAAM,CAAC,UAAU,EAAE,eAAe,CAAC,CAAK;gBAOtC,IAAI,EAAE,IAAI,EAAE,QAAQ,GAAE,YAAY,EAAO;IAwD7D;;;OAGG;IACG,cAAc,CAAC,OAAO,EAAE,UAAU;IAyCxC;;OAEG;IACH,WAAW,CAAC,MAAM,EAAE,SAAS,CAAC,OAAO,CAAC;IAatC,cAAc,CAAC,UAAU,EAAE,UAAU;IAIrC,2DAA2D;IAC3D,OAAO,CAAC,MAAM,EAAE,MAAM;IAgBtB,uDAAuD;IACvD,UAAU,CAAC,MAAM,EAAE,MAAM;IASzB,+CAA+C;IAC/C,IAAI,KAAK,IAAI,MAAM,EAAE,CAEpB;IAED,OAAO,IAAI;QACT,CAAC,GAAG,EAAE,MAAM,GAAG;YACb,KAAK,EAAE,MAAM,EAAE,CAAA;YACf,IAAI,EAAE;gBAAE,MAAM,EAAE,MAAM,CAAC;gBAAC,UAAU,EAAE,MAAM,CAAA;aAAE,CAAA;SAC7C,CAAA;KACF;CASF"}
@@ -1,5 +1,5 @@
1
1
  import debug from "debug";
2
- import { parseAutomergeUrl, stringifyAutomergeUrl } from "../AutomergeUrl.js";
2
+ import { parseAutomergeUrl } from "../AutomergeUrl.js";
3
3
  import { DocSynchronizer } from "./DocSynchronizer.js";
4
4
  import { Synchronizer } from "./Synchronizer.js";
5
5
  const log = debug("automerge-repo:collectionsync");
@@ -20,17 +20,18 @@ export class CollectionSynchronizer extends Synchronizer {
20
20
  this.#denylist = denylist.map(url => parseAutomergeUrl(url).documentId);
21
21
  }
22
22
  /** Returns a synchronizer for the given document, creating one if it doesn't already exist. */
23
- #fetchDocSynchronizer(documentId) {
24
- if (!this.docSynchronizers[documentId]) {
25
- const handle = this.repo.find(stringifyAutomergeUrl({ documentId }));
26
- this.docSynchronizers[documentId] = this.#initDocSynchronizer(handle);
23
+ #fetchDocSynchronizer(handle) {
24
+ if (!this.docSynchronizers[handle.documentId]) {
25
+ this.docSynchronizers[handle.documentId] =
26
+ this.#initDocSynchronizer(handle);
27
27
  }
28
- return this.docSynchronizers[documentId];
28
+ return this.docSynchronizers[handle.documentId];
29
29
  }
30
30
  /** Creates a new docSynchronizer and sets it up to propagate messages */
31
31
  #initDocSynchronizer(handle) {
32
32
  const docSynchronizer = new DocSynchronizer({
33
33
  handle,
34
+ peerId: this.repo.networkSubsystem.peerId,
34
35
  onLoadSyncState: async (peerId) => {
35
36
  if (!this.repo.storageSubsystem) {
36
37
  return;
@@ -83,23 +84,26 @@ export class CollectionSynchronizer extends Synchronizer {
83
84
  return;
84
85
  }
85
86
  this.#docSetUp[documentId] = true;
86
- const docSynchronizer = this.#fetchDocSynchronizer(documentId);
87
+ const handle = await this.repo.find(documentId, {
88
+ allowableStates: ["ready", "unavailable", "requesting"],
89
+ });
90
+ const docSynchronizer = this.#fetchDocSynchronizer(handle);
87
91
  docSynchronizer.receiveMessage(message);
88
92
  // Initiate sync with any new peers
89
93
  const peers = await this.#documentGenerousPeers(documentId);
90
- docSynchronizer.beginSync(peers.filter(peerId => !docSynchronizer.hasPeer(peerId)));
94
+ void docSynchronizer.beginSync(peers.filter(peerId => !docSynchronizer.hasPeer(peerId)));
91
95
  }
92
96
  /**
93
97
  * Starts synchronizing the given document with all peers that we share it generously with.
94
98
  */
95
- addDocument(documentId) {
99
+ addDocument(handle) {
96
100
  // HACK: this is a hack to prevent us from adding the same document twice
97
- if (this.#docSetUp[documentId]) {
101
+ if (this.#docSetUp[handle.documentId]) {
98
102
  return;
99
103
  }
100
- const docSynchronizer = this.#fetchDocSynchronizer(documentId);
101
- void this.#documentGenerousPeers(documentId).then(peers => {
102
- docSynchronizer.beginSync(peers);
104
+ const docSynchronizer = this.#fetchDocSynchronizer(handle);
105
+ void this.#documentGenerousPeers(handle.documentId).then(peers => {
106
+ void docSynchronizer.beginSync(peers);
103
107
  });
104
108
  }
105
109
  // TODO: implement this
@@ -118,7 +122,7 @@ export class CollectionSynchronizer extends Synchronizer {
118
122
  const { documentId } = docSynchronizer;
119
123
  void this.repo.sharePolicy(peerId, documentId).then(okToShare => {
120
124
  if (okToShare)
121
- docSynchronizer.beginSync([peerId]);
125
+ void docSynchronizer.beginSync([peerId]);
122
126
  });
123
127
  }
124
128
  }
@@ -6,6 +6,7 @@ import { Synchronizer } from "./Synchronizer.js";
6
6
  type PeerDocumentStatus = "unknown" | "has" | "unavailable" | "wants";
7
7
  interface DocSynchronizerConfig {
8
8
  handle: DocHandle<unknown>;
9
+ peerId: PeerId;
9
10
  onLoadSyncState?: (peerId: PeerId) => Promise<A.SyncState | undefined>;
10
11
  }
11
12
  /**
@@ -15,11 +16,11 @@ interface DocSynchronizerConfig {
15
16
  export declare class DocSynchronizer extends Synchronizer {
16
17
  #private;
17
18
  syncDebounceRate: number;
18
- constructor({ handle, onLoadSyncState }: DocSynchronizerConfig);
19
+ constructor({ handle, peerId, onLoadSyncState }: DocSynchronizerConfig);
19
20
  get peerStates(): Record<PeerId, PeerDocumentStatus>;
20
21
  get documentId(): import("../types.js").DocumentId;
21
22
  hasPeer(peerId: PeerId): boolean;
22
- beginSync(peerIds: PeerId[]): void;
23
+ beginSync(peerIds: PeerId[]): Promise<void>;
23
24
  endSync(peerId: PeerId): void;
24
25
  receiveMessage(message: RepoMessage): void;
25
26
  receiveEphemeralMessage(message: EphemeralMessage): void;
@@ -1 +1 @@
1
- {"version":3,"file":"DocSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/DocSynchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAGnD,OAAO,EACL,SAAS,EAKV,MAAM,iBAAiB,CAAA;AACxB,OAAO,EAEL,gBAAgB,EAEhB,WAAW,EACX,cAAc,EACd,WAAW,EAEZ,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AACpC,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAGhD,KAAK,kBAAkB,GAAG,SAAS,GAAG,KAAK,GAAG,aAAa,GAAG,OAAO,CAAA;AAOrE,UAAU,qBAAqB;IAC7B,MAAM,EAAE,SAAS,CAAC,OAAO,CAAC,CAAA;IAC1B,eAAe,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,OAAO,CAAC,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC,CAAA;CACvE;AAED;;;GAGG;AACH,qBAAa,eAAgB,SAAQ,YAAY;;IAE/C,gBAAgB,SAAM;gBAsBV,EAAE,MAAM,EAAE,eAAe,EAAE,EAAE,qBAAqB;IAyB9D,IAAI,UAAU,uCAEb;IAED,IAAI,UAAU,qCAEb;IAkID,OAAO,CAAC,MAAM,EAAE,MAAM;IAItB,SAAS,CAAC,OAAO,EAAE,MAAM,EAAE;IAmD3B,OAAO,CAAC,MAAM,EAAE,MAAM;IAKtB,cAAc,CAAC,OAAO,EAAE,WAAW;IAkBnC,uBAAuB,CAAC,OAAO,EAAE,gBAAgB;IAuBjD,kBAAkB,CAAC,OAAO,EAAE,WAAW,GAAG,cAAc;IAuFxD,OAAO,IAAI;QAAE,KAAK,EAAE,MAAM,EAAE,CAAC;QAAC,IAAI,EAAE;YAAE,MAAM,EAAE,MAAM,CAAC;YAAC,UAAU,EAAE,MAAM,CAAA;SAAE,CAAA;KAAE;CAM7E"}
1
+ {"version":3,"file":"DocSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/DocSynchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAGnD,OAAO,EACL,SAAS,EAKV,MAAM,iBAAiB,CAAA;AACxB,OAAO,EAEL,gBAAgB,EAEhB,WAAW,EACX,cAAc,EACd,WAAW,EAEZ,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AACpC,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAGhD,KAAK,kBAAkB,GAAG,SAAS,GAAG,KAAK,GAAG,aAAa,GAAG,OAAO,CAAA;AAOrE,UAAU,qBAAqB;IAC7B,MAAM,EAAE,SAAS,CAAC,OAAO,CAAC,CAAA;IAC1B,MAAM,EAAE,MAAM,CAAA;IACd,eAAe,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,OAAO,CAAC,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC,CAAA;CACvE;AAED;;;GAGG;AACH,qBAAa,eAAgB,SAAQ,YAAY;;IAE/C,gBAAgB,SAAM;gBAyBV,EAAE,MAAM,EAAE,MAAM,EAAE,eAAe,EAAE,EAAE,qBAAqB;IA0BtE,IAAI,UAAU,uCAEb;IAED,IAAI,UAAU,qCAEb;IAiID,OAAO,CAAC,MAAM,EAAE,MAAM;IAIhB,SAAS,CAAC,OAAO,EAAE,MAAM,EAAE;IAwDjC,OAAO,CAAC,MAAM,EAAE,MAAM;IAKtB,cAAc,CAAC,OAAO,EAAE,WAAW;IAkBnC,uBAAuB,CAAC,OAAO,EAAE,gBAAgB;IAuBjD,kBAAkB,CAAC,OAAO,EAAE,WAAW,GAAG,cAAc;IAwFxD,OAAO,IAAI;QAAE,KAAK,EAAE,MAAM,EAAE,CAAC;QAAC,IAAI,EAAE;YAAE,MAAM,EAAE,MAAM,CAAC;YAAC,UAAU,EAAE,MAAM,CAAA;SAAE,CAAA;KAAE;CAM7E"}
@@ -19,11 +19,15 @@ export class DocSynchronizer extends Synchronizer {
19
19
  /** Sync state for each peer we've communicated with (including inactive peers) */
20
20
  #syncStates = {};
21
21
  #pendingSyncMessages = [];
22
+ // We keep this around at least in part for debugging.
23
+ // eslint-disable-next-line no-unused-private-class-members
24
+ #peerId;
22
25
  #syncStarted = false;
23
26
  #handle;
24
27
  #onLoadSyncState;
25
- constructor({ handle, onLoadSyncState }) {
28
+ constructor({ handle, peerId, onLoadSyncState }) {
26
29
  super();
30
+ this.#peerId = peerId;
27
31
  this.#handle = handle;
28
32
  this.#onLoadSyncState =
29
33
  onLoadSyncState ?? (() => Promise.resolve(undefined));
@@ -33,7 +37,7 @@ export class DocSynchronizer extends Synchronizer {
33
37
  handle.on("ephemeral-message-outbound", payload => this.#broadcastToPeers(payload));
34
38
  // Process pending sync messages immediately after the handle becomes ready.
35
39
  void (async () => {
36
- await handle.doc([READY, REQUESTING]);
40
+ await handle.whenReady([READY, REQUESTING]);
37
41
  this.#processAllPendingSyncMessages();
38
42
  })();
39
43
  }
@@ -45,8 +49,7 @@ export class DocSynchronizer extends Synchronizer {
45
49
  }
46
50
  /// PRIVATE
47
51
  async #syncWithPeers() {
48
- this.#log(`syncWithPeers`);
49
- const doc = await this.#handle.doc();
52
+ const doc = await this.#handle.legacyAsyncDoc(); // XXX THIS ONE IS WEIRD
50
53
  if (doc === undefined)
51
54
  return;
52
55
  this.#peers.forEach(peerId => this.#sendSyncMessage(peerId, doc));
@@ -151,12 +154,12 @@ export class DocSynchronizer extends Synchronizer {
151
154
  hasPeer(peerId) {
152
155
  return this.#peers.includes(peerId);
153
156
  }
154
- beginSync(peerIds) {
157
+ async beginSync(peerIds) {
155
158
  const noPeersWithDocument = peerIds.every(peerId => this.#peerDocumentStatuses[peerId] in ["unavailable", "wants"]);
156
159
  // At this point if we don't have anything in our storage, we need to use an empty doc to sync
157
160
  // with; but we don't want to surface that state to the front end
158
- const docPromise = this.#handle
159
- .doc([READY, REQUESTING, UNAVAILABLE])
161
+ const docPromise = this.#handle // TODO THIS IS ALSO WEIRD
162
+ .legacyAsyncDoc([READY, REQUESTING, UNAVAILABLE])
160
163
  .then(doc => {
161
164
  // we register out peers first, then say that sync has started
162
165
  this.#syncStarted = true;
@@ -169,7 +172,12 @@ export class DocSynchronizer extends Synchronizer {
169
172
  // the sync message from
170
173
  return doc ?? A.init();
171
174
  });
172
- this.#log(`beginSync: ${peerIds.join(", ")}`);
175
+ const peersWithDocument = this.#peers.some(peerId => {
176
+ return this.#peerDocumentStatuses[peerId] == "has";
177
+ });
178
+ if (peersWithDocument) {
179
+ await this.#handle.whenReady();
180
+ }
173
181
  peerIds.forEach(peerId => {
174
182
  this.#withSyncState(peerId, syncState => {
175
183
  // HACK: if we have a sync state already, we round-trip it through the encoding system to make
package/fuzz/fuzz.ts CHANGED
@@ -107,9 +107,9 @@ for (let i = 0; i < 100000; i++) {
107
107
  })
108
108
 
109
109
  await pause(0)
110
- const a = await aliceRepo.find(doc.url).doc()
111
- const b = await bobRepo.find(doc.url).doc()
112
- const c = await charlieRepo.find(doc.url).doc()
110
+ const a = (await aliceRepo.find(doc.url)).doc()
111
+ const b = (await bobRepo.find(doc.url)).doc()
112
+ const c = (await charlieRepo.find(doc.url)).doc()
113
113
  assert.deepStrictEqual(a, b, "A and B should be equal")
114
114
  assert.deepStrictEqual(b, c, "B and C should be equal")
115
115
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@automerge/automerge-repo",
3
- "version": "2.0.0-alpha.20",
3
+ "version": "2.0.0-alpha.22",
4
4
  "description": "A repository object to manage a collection of automerge documents",
5
5
  "repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo",
6
6
  "author": "Peter van Hardenberg <pvh@pvh.ca>",
@@ -60,5 +60,5 @@
60
60
  "publishConfig": {
61
61
  "access": "public"
62
62
  },
63
- "gitHead": "d53bc37be0fd923ff40f3cf7e2bd06a0496ddb73"
63
+ "gitHead": "b30af9827bed4615ba3c5e9ee93ca483915e4016"
64
64
  }
package/src/DocHandle.ts CHANGED
@@ -83,12 +83,12 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
83
83
  this.emit("delete", { handle: this })
84
84
  return { doc: A.init() }
85
85
  }),
86
+ onUnavailable: assign(() => {
87
+ return { doc: A.init() }
88
+ }),
86
89
  onUnload: assign(() => {
87
90
  return { doc: A.init() }
88
91
  }),
89
- onUnavailable: () => {
90
- this.emit("unavailable", { handle: this })
91
- },
92
92
  },
93
93
  }).createMachine({
94
94
  /** @xstate-layout N4IgpgJg5mDOIC5QAoC2BDAxgCwJYDswBKAYgFUAFAEQEEAVAUQG0AGAXUVAAcB7WXAC64e+TiAAeiAOwAOAKwA6ACxSAzKqks1ATjlTdAGhABPRAFolAJksKN2y1KtKAbFLla5AX09G0WPISkVAwAMgyMrBxIILz8QiJikggAjCzOijKqLEqqybJyLizaRqYIFpbJtro5Uo7J2o5S3r4YOATECrgQADZgJADCAEoM9MzsYrGCwqLRSeoyCtra8pa5adquySXmDjY5ac7JljLJeepKzSB+bYGdPX0AYgCSAHJUkRN8UwmziM7HCgqyVcUnqcmScmcMm2ZV2yiyzkOx1OalUFx8V1aAQ63R46AgBCgJGGAEUyAwAMp0D7RSbxGagJKHFgKOSWJTJGRSCosCpKaEmRCqbQKU5yXINeTaer6LwY67YogKXH4wkkKgAeX6AH1hjQqABNGncL70xKIJQ5RY5BHOJag6wwpRyEWImQVeT1aWrVSXBXtJUqgn4Ik0ADqNCedG1L3CYY1gwA0saYqbpuaEG4pKLksKpFDgcsCjDhTnxTKpTLdH6sQGFOgAO7oKYhl5gAQNngAJwA1iRY3R40ndSNDSm6enfpm5BkWAVkvy7bpuTCKq7ndZnfVeSwuTX-HWu2AAI4AVzgQhD6q12rILxoADVIyEaAAhMLjtM-RmIE4LVSQi4nLLDIGzOCWwLKA0cgyLBoFWNy+43B0R5nheaqajqepjuMtJfgyEh-FoixqMCoKqOyhzgYKCDOq6UIeuCSxHOoSGKgop74OgABuzbdOgABGvTXlho5GrhJpxJOP4pLulT6KoMhpJY2hzsWNF0QobqMV6LG+pc+A8BAcBiP6gSfFJ36EQgKksksKxrHamwwmY7gLKB85QjBzoAWxdZdL0FnfARST8ooLC7qoTnWBU4pyC5ViVMKBQaHUDQuM4fm3EGhJBWaU7-CysEAUp3LpEpWw0WYRw2LmqzgqciIsCxWUdI2zaXlAbYdt2PZ5dJ1n5jY2iJY1ikOIcMJHCyUWHC62hRZkUVNPKta3Kh56wJ1-VWUyzhFc64JWJCtQNBBzhQW4cHwbsrVKpxPF8YJgV4ZZIWIKkiKiiNSkqZYWjzCWaQ5hFh0AcCuR3QoR74qUknBRmzholpv3OkpRQNNRpTzaKTWKbIWR5FDxm9AIkA7e9skUYCWayLILBZGoLkUSKbIyIdpxHPoyTeN4QA */
@@ -281,7 +281,7 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
281
281
  * This is the recommended way to access a handle's document. Note that this waits for the handle
282
282
  * to be ready if necessary. If loading (or synchronization) fails, this will never resolve.
283
283
  */
284
- async doc(
284
+ async legacyAsyncDoc(
285
285
  /** states to wait for, such as "LOADING". mostly for internal use. */
286
286
  awaitStates: HandleState[] = ["ready", "unavailable"]
287
287
  ) {
@@ -292,45 +292,42 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
292
292
  // if we timed out, return undefined
293
293
  return undefined
294
294
  }
295
- // If we have fixed heads, return a view at those heads
296
- if (this.#fixedHeads) {
297
- const doc = this.#doc
298
- if (!doc || this.isUnavailable()) return undefined
299
- return A.view(doc, decodeHeads(this.#fixedHeads))
300
- }
301
295
  // Return the document
302
296
  return !this.isUnavailable() ? this.#doc : undefined
303
297
  }
304
298
 
305
299
  /**
306
- * Synchronously returns the current state of the Automerge document this handle manages, or
307
- * undefined. Consider using `await handle.doc()` instead. Check `isReady()`, or use `whenReady()`
308
- * if you want to make sure loading is complete first.
309
- *
310
- * Not to be confused with the SyncState of the document, which describes the state of the
311
- * synchronization process.
300
+ * Returns the current state of the Automerge document this handle manages.
312
301
  *
313
- * Note that `undefined` is not a valid Automerge document, so the return from this function is
314
- * unambigous.
302
+ * @returns the current document
303
+ * @throws on deleted and unavailable documents
315
304
  *
316
- * @returns the current document, or undefined if the document is not ready.
317
305
  */
318
- docSync() {
319
- if (!this.isReady()) return undefined
306
+ doc() {
307
+ if (!this.isReady()) throw new Error("DocHandle is not ready")
320
308
  if (this.#fixedHeads) {
321
- const doc = this.#doc
322
- return doc ? A.view(doc, decodeHeads(this.#fixedHeads)) : undefined
309
+ return A.view(this.#doc, decodeHeads(this.#fixedHeads))
323
310
  }
324
311
  return this.#doc
325
312
  }
326
313
 
314
+ /**
315
+ *
316
+ * @deprecated */
317
+ docSync() {
318
+ console.warn(
319
+ "docSync is deprecated. Use doc() instead. This function will be removed as part of the 2.0 release."
320
+ )
321
+ return this.doc()
322
+ }
323
+
327
324
  /**
328
325
  * Returns the current "heads" of the document, akin to a git commit.
329
326
  * This precisely defines the state of a document.
330
327
  * @returns the current document's heads, or undefined if the document is not ready
331
328
  */
332
- heads(): UrlHeads | undefined {
333
- if (!this.isReady()) return undefined
329
+ heads(): UrlHeads {
330
+ if (!this.isReady()) throw new Error("DocHandle is not ready")
334
331
  if (this.#fixedHeads) {
335
332
  return this.#fixedHeads
336
333
  }
@@ -365,8 +362,8 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
365
362
  }
366
363
 
367
364
  /**
368
- * Creates a new DocHandle with a fixed "view" at the given point in time represented
369
- * by the `heads` passed in. The return value is the same type as docSync() and will return
365
+ * Creates a fixed "view" of an automerge document at the given point in time represented
366
+ * by the `heads` passed in. The return value is the same type as doc() and will return
370
367
  * undefined if the object hasn't finished loading.
371
368
  *
372
369
  * @remarks
@@ -426,7 +423,7 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
426
423
  if (!otherHeads) throw new Error("Other document's heads not available")
427
424
 
428
425
  // Create a temporary merged doc to verify shared history and compute diff
429
- const mergedDoc = A.merge(A.clone(doc), first.docSync()!)
426
+ const mergedDoc = A.merge(A.clone(doc), first.doc()!)
430
427
  // Use the merged doc to compute the diff
431
428
  return A.diff(
432
429
  mergedDoc,
@@ -591,10 +588,7 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
591
588
  `DocHandle#${this.documentId} is in view-only mode at specific heads. Use clone() to create a new document from this state.`
592
589
  )
593
590
  }
594
- const mergingDoc = otherHandle.docSync()
595
- if (!mergingDoc) {
596
- throw new Error("The document to be merged in is falsy, aborting.")
597
- }
591
+ const mergingDoc = otherHandle.doc()
598
592
 
599
593
  this.update(doc => {
600
594
  return A.merge(doc, mergingDoc)
@@ -680,7 +674,6 @@ export interface DocHandleEvents<T> {
680
674
  "heads-changed": (payload: DocHandleEncodedChangePayload<T>) => void
681
675
  change: (payload: DocHandleChangePayload<T>) => void
682
676
  delete: (payload: DocHandleDeletePayload<T>) => void
683
- unavailable: (payload: DocHandleUnavailablePayload<T>) => void
684
677
  "ephemeral-message": (payload: DocHandleEphemeralMessagePayload<T>) => void
685
678
  "ephemeral-message-outbound": (
686
679
  payload: DocHandleOutboundEphemeralMessagePayload<T>
@@ -0,0 +1,48 @@
1
+ import { DocHandle } from "./DocHandle.js"
2
+
3
+ export type FindProgressState =
4
+ | "loading"
5
+ | "ready"
6
+ | "failed"
7
+ | "aborted"
8
+ | "unavailable"
9
+
10
+ interface FindProgressBase<T> {
11
+ state: FindProgressState
12
+ handle: DocHandle<T>
13
+ }
14
+
15
+ interface FindProgressLoading<T> extends FindProgressBase<T> {
16
+ state: "loading"
17
+ progress: number
18
+ }
19
+
20
+ interface FindProgressReady<T> extends FindProgressBase<T> {
21
+ state: "ready"
22
+ }
23
+
24
+ interface FindProgressFailed<T> extends FindProgressBase<T> {
25
+ state: "failed"
26
+ error: Error
27
+ }
28
+
29
+ interface FindProgressUnavailable<T> extends FindProgressBase<T> {
30
+ state: "unavailable"
31
+ }
32
+
33
+ interface FindProgressAborted<T> extends FindProgressBase<T> {
34
+ state: "aborted"
35
+ }
36
+
37
+ export type FindProgress<T> =
38
+ | FindProgressLoading<T>
39
+ | FindProgressReady<T>
40
+ | FindProgressFailed<T>
41
+ | FindProgressUnavailable<T>
42
+ | FindProgressAborted<T>
43
+
44
+ export type FindProgressWithMethods<T> = FindProgress<T> & {
45
+ next: () => Promise<FindProgressWithMethods<T>>
46
+ // TODO: i don't like this allowableStates
47
+ untilReady: (allowableStates: string[]) => Promise<DocHandle<T>>
48
+ }