@automerge/automerge-repo 0.2.1 → 1.0.0-alpha.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/README.md +7 -24
  2. package/dist/DocCollection.d.ts +4 -4
  3. package/dist/DocCollection.d.ts.map +1 -1
  4. package/dist/DocCollection.js +25 -17
  5. package/dist/DocHandle.d.ts +46 -13
  6. package/dist/DocHandle.d.ts.map +1 -1
  7. package/dist/DocHandle.js +104 -53
  8. package/dist/DocUrl.d.ts +38 -18
  9. package/dist/DocUrl.d.ts.map +1 -1
  10. package/dist/DocUrl.js +63 -24
  11. package/dist/Repo.d.ts.map +1 -1
  12. package/dist/Repo.js +9 -9
  13. package/dist/helpers/headsAreSame.d.ts +2 -2
  14. package/dist/helpers/headsAreSame.d.ts.map +1 -1
  15. package/dist/helpers/headsAreSame.js +1 -4
  16. package/dist/helpers/tests/network-adapter-tests.js +10 -10
  17. package/dist/index.d.ts +3 -2
  18. package/dist/index.d.ts.map +1 -1
  19. package/dist/index.js +1 -0
  20. package/dist/network/NetworkAdapter.d.ts +2 -3
  21. package/dist/network/NetworkAdapter.d.ts.map +1 -1
  22. package/dist/network/NetworkSubsystem.d.ts +2 -3
  23. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  24. package/dist/network/NetworkSubsystem.js +9 -13
  25. package/dist/storage/StorageAdapter.d.ts +9 -5
  26. package/dist/storage/StorageAdapter.d.ts.map +1 -1
  27. package/dist/storage/StorageSubsystem.d.ts +4 -4
  28. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  29. package/dist/storage/StorageSubsystem.js +109 -31
  30. package/dist/synchronizer/CollectionSynchronizer.d.ts +1 -1
  31. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  32. package/dist/synchronizer/CollectionSynchronizer.js +5 -1
  33. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  34. package/dist/synchronizer/DocSynchronizer.js +6 -5
  35. package/dist/types.d.ts +6 -0
  36. package/dist/types.d.ts.map +1 -1
  37. package/package.json +8 -5
  38. package/src/DocCollection.ts +32 -22
  39. package/src/DocHandle.ts +119 -77
  40. package/src/DocUrl.ts +90 -0
  41. package/src/Repo.ts +10 -11
  42. package/src/helpers/headsAreSame.ts +3 -5
  43. package/src/helpers/tests/network-adapter-tests.ts +10 -10
  44. package/src/index.ts +7 -5
  45. package/src/network/NetworkAdapter.ts +2 -3
  46. package/src/network/NetworkSubsystem.ts +9 -14
  47. package/src/storage/StorageAdapter.ts +7 -5
  48. package/src/storage/StorageSubsystem.ts +133 -36
  49. package/src/synchronizer/CollectionSynchronizer.ts +10 -2
  50. package/src/synchronizer/DocSynchronizer.ts +7 -6
  51. package/src/types.ts +4 -1
  52. package/test/CollectionSynchronizer.test.ts +1 -1
  53. package/test/DocCollection.test.ts +3 -2
  54. package/test/DocHandle.test.ts +40 -35
  55. package/test/DocSynchronizer.test.ts +3 -2
  56. package/test/Repo.test.ts +134 -27
  57. package/test/StorageSubsystem.test.ts +13 -10
  58. package/test/helpers/DummyNetworkAdapter.ts +2 -2
  59. package/test/helpers/DummyStorageAdapter.ts +8 -4
@@ -1,3 +1,4 @@
1
+ import { stringifyAutomergeUrl, } from "../DocUrl.js";
1
2
  import { DocSynchronizer } from "./DocSynchronizer.js";
2
3
  import { Synchronizer } from "./Synchronizer.js";
3
4
  import debug from "debug";
@@ -16,7 +17,7 @@ export class CollectionSynchronizer extends Synchronizer {
16
17
  /** Returns a synchronizer for the given document, creating one if it doesn't already exist. */
17
18
  #fetchDocSynchronizer(documentId) {
18
19
  if (!this.#docSynchronizers[documentId]) {
19
- const handle = this.repo.find(documentId);
20
+ const handle = this.repo.find(stringifyAutomergeUrl({ documentId }));
20
21
  this.#docSynchronizers[documentId] = this.#initDocSynchronizer(handle);
21
22
  }
22
23
  return this.#docSynchronizers[documentId];
@@ -46,6 +47,9 @@ export class CollectionSynchronizer extends Synchronizer {
46
47
  async receiveSyncMessage(peerId, channelId, message) {
47
48
  log(`onSyncMessage: ${peerId}, ${channelId}, ${message.byteLength}bytes`);
48
49
  const documentId = channelId;
50
+ if (!documentId) {
51
+ throw new Error("received a message with an invalid documentId");
52
+ }
49
53
  const docSynchronizer = await this.#fetchDocSynchronizer(documentId);
50
54
  await docSynchronizer.receiveSyncMessage(peerId, channelId, message);
51
55
  // Initiate sync with any new peers
@@ -1 +1 @@
1
- {"version":3,"file":"DocSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/DocSynchronizer.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAA;AAC3C,OAAO,EAAE,SAAS,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAC/C,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAIhD;;;GAGG;AACH,qBAAa,eAAgB,SAAQ,YAAY;;IAanC,OAAO,CAAC,MAAM;gBAAN,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC;IAgB1C,IAAI,UAAU,qCAEb;IAuED,OAAO,CAAC,MAAM,EAAE,MAAM;IAItB,SAAS,CAAC,MAAM,EAAE,MAAM;IAkBxB,OAAO,CAAC,MAAM,EAAE,MAAM;IAKtB,kBAAkB,CAChB,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,UAAU;CAsCtB"}
1
+ {"version":3,"file":"DocSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/DocSynchronizer.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,SAAS,EAAqB,MAAM,iBAAiB,CAAA;AAC9D,OAAO,EAAE,SAAS,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAC/C,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAIhD;;;GAGG;AACH,qBAAa,eAAgB,SAAQ,YAAY;;IAanC,OAAO,CAAC,MAAM;gBAAN,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC;IAgB1C,IAAI,UAAU,qCAEb;IAwED,OAAO,CAAC,MAAM,EAAE,MAAM;IAItB,SAAS,CAAC,MAAM,EAAE,MAAM;IAkBxB,OAAO,CAAC,MAAM,EAAE,MAAM;IAKtB,kBAAkB,CAChB,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,UAAU;CAsCtB"}
@@ -1,4 +1,5 @@
1
1
  import * as A from "@automerge/automerge";
2
+ import { READY, REQUESTING } from "../DocHandle.js";
2
3
  import { Synchronizer } from "./Synchronizer.js";
3
4
  import debug from "debug";
4
5
  /**
@@ -25,7 +26,7 @@ export class DocSynchronizer extends Synchronizer {
25
26
  handle.on("change", () => this.#syncWithPeers());
26
27
  // Process pending sync messages immediately after the handle becomes ready.
27
28
  void (async () => {
28
- await handle.loadAttemptedValue();
29
+ await handle.doc([READY, REQUESTING]);
29
30
  this.#processAllPendingSyncMessages();
30
31
  })();
31
32
  }
@@ -35,7 +36,7 @@ export class DocSynchronizer extends Synchronizer {
35
36
  /// PRIVATE
36
37
  async #syncWithPeers() {
37
38
  this.#log(`syncWithPeers`);
38
- const doc = await this.handle.value();
39
+ const doc = await this.handle.doc();
39
40
  this.#peers.forEach(peerId => this.#sendSyncMessage(peerId, doc));
40
41
  }
41
42
  #getSyncState(peerId) {
@@ -92,7 +93,7 @@ export class DocSynchronizer extends Synchronizer {
92
93
  this.#log(`beginSync: ${peerId}`);
93
94
  // At this point if we don't have anything in our storage, we need to use an empty doc to sync
94
95
  // with; but we don't want to surface that state to the front end
95
- void this.handle.loadAttemptedValue().then(doc => {
96
+ void this.handle.doc([READY, REQUESTING]).then(doc => {
96
97
  // HACK: if we have a sync state already, we round-trip it through the encoding system to make
97
98
  // sure state is preserved. This prevents an infinite loop caused by failed attempts to send
98
99
  // messages during disconnection.
@@ -108,10 +109,10 @@ export class DocSynchronizer extends Synchronizer {
108
109
  this.#peers = this.#peers.filter(p => p !== peerId);
109
110
  }
110
111
  receiveSyncMessage(peerId, channelId, message) {
111
- if (channelId !== this.documentId)
112
+ if (channelId !== this.handle.documentId)
112
113
  throw new Error(`channelId doesn't match documentId`);
113
114
  // We need to block receiving the syncMessages until we've checked local storage
114
- if (!this.handle.isReadyOrRequesting()) {
115
+ if (!this.handle.inState([READY, REQUESTING])) {
115
116
  this.#pendingSyncMessages.push({ peerId, message });
116
117
  return;
117
118
  }
package/dist/types.d.ts CHANGED
@@ -1,6 +1,12 @@
1
1
  export type DocumentId = string & {
2
2
  __documentId: true;
3
3
  };
4
+ export type AutomergeUrl = string & {
5
+ __documentUrl: true;
6
+ };
7
+ export type BinaryDocumentId = Uint8Array & {
8
+ __binaryDocumentId: true;
9
+ };
4
10
  export type PeerId = string & {
5
11
  __peerId: false;
6
12
  };
@@ -1 +1 @@
1
- {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,UAAU,GAAG,MAAM,GAAG;IAAE,YAAY,EAAE,IAAI,CAAA;CAAE,CAAA;AACxD,MAAM,MAAM,MAAM,GAAG,MAAM,GAAG;IAAE,QAAQ,EAAE,KAAK,CAAA;CAAE,CAAA;AACjD,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG;IAAE,WAAW,EAAE,KAAK,CAAA;CAAE,CAAA"}
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,UAAU,GAAG,MAAM,GAAG;IAAE,YAAY,EAAE,IAAI,CAAA;CAAE,CAAA;AACxD,MAAM,MAAM,YAAY,GAAG,MAAM,GAAG;IAAE,aAAa,EAAE,IAAI,CAAA;CAAE,CAAA;AAC3D,MAAM,MAAM,gBAAgB,GAAG,UAAU,GAAG;IAAE,kBAAkB,EAAE,IAAI,CAAA;CAAE,CAAA;AAExE,MAAM,MAAM,MAAM,GAAG,MAAM,GAAG;IAAE,QAAQ,EAAE,KAAK,CAAA;CAAE,CAAA;AACjD,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG;IAAE,WAAW,EAAE,KAAK,CAAA;CAAE,CAAA"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@automerge/automerge-repo",
3
- "version": "0.2.1",
3
+ "version": "1.0.0-alpha.2",
4
4
  "description": "A repository object to manage a collection of automerge documents",
5
5
  "repository": "https://github.com/automerge/automerge-repo",
6
6
  "author": "Peter van Hardenberg <pvh@pvh.ca>",
@@ -22,19 +22,22 @@
22
22
  },
23
23
  "devDependencies": {
24
24
  "@types/debug": "^4.1.7",
25
+ "@types/node": "^20.4.8",
25
26
  "@types/uuid": "^8.3.4",
26
27
  "@types/ws": "^8.5.3",
27
28
  "@typescript-eslint/eslint-plugin": "^5.33.0",
28
29
  "@typescript-eslint/parser": "^5.33.0",
29
- "http-server": "^14.1.0"
30
+ "http-server": "^14.1.0",
31
+ "typescript": "^5.1.6"
30
32
  },
31
33
  "peerDependencies": {
32
- "@automerge/automerge": "^2.1.0-alpha.8"
34
+ "@automerge/automerge": "^2.1.0-alpha.10"
33
35
  },
34
36
  "dependencies": {
37
+ "bs58check": "^3.0.1",
35
38
  "cbor-x": "^1.3.0",
36
39
  "debug": "^4.3.4",
37
- "eventemitter3": "^4.0.7",
40
+ "eventemitter3": "^5.0.1",
38
41
  "fast-sha256": "^1.3.0",
39
42
  "tiny-typed-emitter": "^2.1.0",
40
43
  "ts-node": "^10.9.1",
@@ -62,5 +65,5 @@
62
65
  "publishConfig": {
63
66
  "access": "public"
64
67
  },
65
- "gitHead": "7f048ecaa62eb1246f54773c6b10bada0767497b"
68
+ "gitHead": "b5830dde8f135b694809698aaad2a9fdc79a9898"
66
69
  }
@@ -1,8 +1,14 @@
1
1
  import EventEmitter from "eventemitter3"
2
- import { v4 as uuid } from "uuid"
3
2
  import { DocHandle } from "./DocHandle.js"
4
- import { type DocumentId } from "./types.js"
3
+ import { DocumentId, type BinaryDocumentId, AutomergeUrl } from "./types.js"
5
4
  import { type SharePolicy } from "./Repo.js"
5
+ import {
6
+ documentIdToBinary,
7
+ binaryToDocumentId,
8
+ generateAutomergeUrl,
9
+ isValidAutomergeUrl,
10
+ parseAutomergeUrl,
11
+ } from "./DocUrl.js"
6
12
 
7
13
  /**
8
14
  * A DocCollection is a collection of DocHandles. It supports creating new documents and finding
@@ -30,6 +36,7 @@ export class DocCollection extends EventEmitter<DocCollectionEvents> {
30
36
  if (this.#handleCache[documentId]) return this.#handleCache[documentId]
31
37
 
32
38
  // If not, create a new handle, cache it, and return it
39
+ if (!documentId) throw new Error(`Invalid documentId ${documentId}`)
33
40
  const handle = new DocHandle<T>(documentId, { isNew })
34
41
  this.#handleCache[documentId] = handle
35
42
  return handle
@@ -64,8 +71,9 @@ export class DocCollection extends EventEmitter<DocCollectionEvents> {
64
71
  // or
65
72
  // - pass a "reify" function that takes a `<any>` and returns `<T>`
66
73
 
67
- const documentId = uuid() as DocumentId
68
- const handle = this.#getHandle<T>(documentId, true) as DocHandle<T>
74
+ // Generate a new UUID and store it in the buffer
75
+ const { encodedDocumentId } = parseAutomergeUrl(generateAutomergeUrl())
76
+ const handle = this.#getHandle<T>(encodedDocumentId, true) as DocHandle<T>
69
77
  this.emit("document", { handle })
70
78
  return handle
71
79
  }
@@ -76,35 +84,37 @@ export class DocCollection extends EventEmitter<DocCollectionEvents> {
76
84
  */
77
85
  find<T>(
78
86
  /** The documentId of the handle to retrieve */
79
- documentId: DocumentId
87
+ automergeUrl: AutomergeUrl
80
88
  ): DocHandle<T> {
81
- // TODO: we want a way to make sure we don't yield intermediate document states during initial synchronization
89
+ if (!isValidAutomergeUrl(automergeUrl)) {
90
+ throw new Error(`Invalid AutomergeUrl: '${automergeUrl}'`)
91
+ }
82
92
 
83
- // If we already have a handle, return it
84
- if (this.#handleCache[documentId])
85
- return this.#handleCache[documentId] as DocHandle<T>
86
-
87
- // Otherwise, create a new handle
88
- const handle = this.#getHandle<T>(documentId, false) as DocHandle<T>
89
-
90
- // we don't directly initialize a value here because the StorageSubsystem and Synchronizers go
91
- // and get the data asynchronously and block on read instead of on create
93
+ const { encodedDocumentId } = parseAutomergeUrl(automergeUrl)
94
+ // If we have the handle cached, return it
95
+ if (this.#handleCache[encodedDocumentId])
96
+ return this.#handleCache[encodedDocumentId]
92
97
 
93
- // emit a document event to advertise interest in this document
98
+ const handle = this.#getHandle<T>(encodedDocumentId, false) as DocHandle<T>
94
99
  this.emit("document", { handle })
95
-
96
100
  return handle
97
101
  }
98
102
 
99
103
  delete(
100
104
  /** The documentId of the handle to delete */
101
- documentId: DocumentId
105
+ id: DocumentId | AutomergeUrl
102
106
  ) {
103
- const handle = this.#getHandle(documentId, false)
107
+ if (isValidAutomergeUrl(id)) {
108
+ ;({ encodedDocumentId: id } = parseAutomergeUrl(id))
109
+ }
110
+
111
+ const handle = this.#getHandle(id, false)
104
112
  handle.delete()
105
113
 
106
- delete this.#handleCache[documentId]
107
- this.emit("delete-document", { documentId })
114
+ delete this.#handleCache[id]
115
+ this.emit("delete-document", {
116
+ encodedDocumentId: id,
117
+ })
108
118
  }
109
119
  }
110
120
 
@@ -119,5 +129,5 @@ interface DocumentPayload {
119
129
  }
120
130
 
121
131
  interface DeleteDocumentPayload {
122
- documentId: DocumentId
132
+ encodedDocumentId: DocumentId
123
133
  }
package/src/DocHandle.ts CHANGED
@@ -17,7 +17,8 @@ import { waitFor } from "xstate/lib/waitFor.js"
17
17
  import { headsAreSame } from "./helpers/headsAreSame.js"
18
18
  import { pause } from "./helpers/pause.js"
19
19
  import { TimeoutError, withTimeout } from "./helpers/withTimeout.js"
20
- import type { ChannelId, DocumentId, PeerId } from "./types.js"
20
+ import type { ChannelId, DocumentId, PeerId, AutomergeUrl } from "./types.js"
21
+ import { stringifyAutomergeUrl } from "./DocUrl.js"
21
22
 
22
23
  /** DocHandle is a wrapper around a single Automerge document that lets us listen for changes. */
23
24
  export class DocHandle<T> //
@@ -28,30 +29,32 @@ export class DocHandle<T> //
28
29
  #machine: DocHandleXstateMachine<T>
29
30
  #timeoutDelay: number
30
31
 
32
+ get url(): AutomergeUrl {
33
+ return stringifyAutomergeUrl({ documentId: this.documentId })
34
+ }
35
+
31
36
  constructor(
32
37
  public documentId: DocumentId,
33
- { isNew = false, timeoutDelay = 700000 }: DocHandleOptions = {}
38
+ { isNew = false, timeoutDelay = 60_000 }: DocHandleOptions = {}
34
39
  ) {
35
40
  super()
36
41
  this.#timeoutDelay = timeoutDelay
37
- this.#log = debug(`automerge-repo:dochandle:${documentId.slice(0, 5)}`)
42
+ this.#log = debug(`automerge-repo:dochandle:${this.documentId.slice(0, 5)}`)
38
43
 
39
44
  // initial doc
40
- const doc = A.init<T>({
41
- patchCallback: (patches, patchInfo) =>
42
- this.emit("patch", { handle: this, patches, patchInfo }),
43
- })
45
+ const doc = A.init<T>()
44
46
 
45
47
  /**
46
48
  * Internally we use a state machine to orchestrate document loading and/or syncing, in order to
47
49
  * avoid requesting data we already have, or surfacing intermediate values to the consumer.
48
50
  *
49
- * ┌─────────┐ ┌────────────┐
50
- * ┌───────┐ ┌──FIND──┤ loading ├─REQUEST──►│ requesting ├─UPDATE──┐
51
+ * ┌─────────────────────┬─────────TIMEOUT────►┌────────┐
52
+ * ┌───┴─────┐ ┌───┴────────┐ │ failed
53
+ * ┌───────┐ ┌──FIND──┤ loading ├─REQUEST──►│ requesting ├─UPDATE──┐ └────────┘
51
54
  * │ idle ├──┤ └───┬─────┘ └────────────┘ │
52
- * └───────┘ │ │ └─►┌─────────┐
53
- * │ └───────LOAD───────────────────────────────►│ ready │
54
- * └──CREATE───────────────────────────────────────────────►└─────────┘
55
+ * └───────┘ │ │ └─►┌────────┐
56
+ * │ └───────LOAD───────────────────────────────►│ ready │
57
+ * └──CREATE───────────────────────────────────────────────►└────────┘
55
58
  */
56
59
  this.#machine = interpret(
57
60
  createMachine<DocHandleContext<T>, DocHandleEvent<T>>(
@@ -60,7 +63,7 @@ export class DocHandle<T> //
60
63
 
61
64
  id: "docHandle",
62
65
  initial: IDLE,
63
- context: { documentId, doc },
66
+ context: { documentId: this.documentId, doc },
64
67
  states: {
65
68
  idle: {
66
69
  on: {
@@ -74,12 +77,18 @@ export class DocHandle<T> //
74
77
  },
75
78
  loading: {
76
79
  on: {
77
- // LOAD is called by the Repo if the document is found in storage
78
- LOAD: { actions: "onLoad", target: READY },
80
+ // UPDATE is called by the Repo if the document is found in storage
81
+ UPDATE: { actions: "onUpdate", target: READY },
79
82
  // REQUEST is called by the Repo if the document is not found in storage
80
83
  REQUEST: { target: REQUESTING },
81
84
  DELETE: { actions: "onDelete", target: DELETED },
82
85
  },
86
+ after: [
87
+ {
88
+ delay: this.#timeoutDelay,
89
+ target: FAILED,
90
+ },
91
+ ],
83
92
  },
84
93
  requesting: {
85
94
  on: {
@@ -89,6 +98,12 @@ export class DocHandle<T> //
89
98
  REQUEST_COMPLETE: { target: READY },
90
99
  DELETE: { actions: "onDelete", target: DELETED },
91
100
  },
101
+ after: [
102
+ {
103
+ delay: this.#timeoutDelay,
104
+ target: FAILED,
105
+ },
106
+ ],
92
107
  },
93
108
  ready: {
94
109
  on: {
@@ -97,22 +112,18 @@ export class DocHandle<T> //
97
112
  DELETE: { actions: "onDelete", target: DELETED },
98
113
  },
99
114
  },
100
- error: {},
101
- deleted: {},
115
+ failed: {
116
+ type: "final",
117
+ },
118
+ deleted: {
119
+ type: "final",
120
+ },
102
121
  },
103
122
  },
104
123
 
105
124
  {
106
125
  actions: {
107
- /** Apply the binary changes from storage and put the updated doc on context */
108
- onLoad: assign((context, { payload }: LoadEvent) => {
109
- const { binary } = payload
110
- const { doc } = context
111
- const newDoc = A.loadIncremental(doc, binary)
112
- return { doc: newDoc }
113
- }),
114
-
115
- /** Put the updated doc on context; if it's different, emit a `change` event */
126
+ /** Put the updated doc on context */
116
127
  onUpdate: assign((context, { payload }: UpdateEvent<T>) => {
117
128
  const { doc: oldDoc } = context
118
129
 
@@ -133,33 +144,36 @@ export class DocHandle<T> //
133
144
  const oldDoc = history?.context?.doc
134
145
  const newDoc = context.doc
135
146
 
136
- const docChanged = newDoc && oldDoc && !headsAreSame(newDoc, oldDoc)
147
+ console.log(`${event} ${state}`, newDoc)
148
+
149
+ const docChanged = newDoc && oldDoc && !headsAreSame(A.getHeads(newDoc), A.getHeads(oldDoc))
137
150
  if (docChanged) {
138
- this.emit("change", { handle: this, doc: newDoc })
151
+ this.emit("heads-changed", { handle: this, doc: newDoc })
152
+
153
+ const patches = A.diff(newDoc, A.getHeads(oldDoc), A.getHeads(newDoc))
154
+ if (patches.length > 0) {
155
+ const source = "change" // TODO: pass along the source (load/change/network)
156
+ this.emit("change", {
157
+ handle: this,
158
+ doc: newDoc,
159
+ patches,
160
+ patchInfo: { before: oldDoc, after: newDoc, source },
161
+ })
162
+ }
163
+
139
164
  if (!this.isReady()) {
140
165
  this.#machine.send(REQUEST_COMPLETE)
141
166
  }
142
167
  }
143
- this.#log(`${event} → ${state}`, this.#doc)
144
168
  })
145
169
  .start()
146
170
 
147
171
  this.#machine.send(isNew ? CREATE : FIND)
148
172
  }
149
173
 
150
- get doc() {
151
- if (!this.isReady()) {
152
- throw new Error(
153
- `DocHandle#${this.documentId} is not ready. Check \`handle.isReady()\` before accessing the document.`
154
- )
155
- }
156
-
157
- return this.#doc
158
- }
159
-
160
174
  // PRIVATE
161
175
 
162
- /** Returns the current document */
176
+ /** Returns the current document, regardless of state */
163
177
  get #doc() {
164
178
  return this.#machine?.getSnapshot().context.doc
165
179
  }
@@ -175,7 +189,7 @@ export class DocHandle<T> //
175
189
  return Promise.any(
176
190
  awaitStates.map(state =>
177
191
  waitFor(this.#machine, s => s.matches(state), {
178
- timeout: this.#timeoutDelay, // match the delay above
192
+ timeout: this.#timeoutDelay * 2000, // longer than the delay above for testing
179
193
  })
180
194
  )
181
195
  )
@@ -183,19 +197,48 @@ export class DocHandle<T> //
183
197
 
184
198
  // PUBLIC
185
199
 
186
- isReady = () => this.#state === READY
187
- isReadyOrRequesting = () =>
188
- this.#state === READY || this.#state === REQUESTING
189
- isDeleted = () => this.#state === DELETED
200
+ /**
201
+ * Checks if the document is ready for accessing or changes.
202
+ * Note that for documents already stored locally this occurs before synchronization
203
+ * with any peers. We do not currently have an equivalent `whenSynced()`.
204
+ */
205
+ isReady = () => this.inState([HandleState.READY])
206
+ /**
207
+ * Checks if this document has been marked as deleted.
208
+ * Deleted documents are removed from local storage and the sync process.
209
+ * It's not currently possible at runtime to undelete a document.
210
+ * @returns true if the document has been marked as deleted
211
+ */
212
+ isDeleted = () => this.inState([HandleState.DELETED])
213
+ inState = (states: HandleState[]) =>
214
+ states.some(this.#machine?.getSnapshot().matches)
215
+
216
+ get state() {
217
+ return this.#machine?.getSnapshot().value
218
+ }
190
219
 
191
220
  /**
192
- * Returns the current document, waiting for the handle to be ready if necessary.
221
+ * Use this to block until the document handle has finished loading.
222
+ * The async equivalent to checking `inState()`.
223
+ * @param awaitStates = [READY]
224
+ * @returns
193
225
  */
194
- async value(awaitStates: HandleState[] = [READY]) {
226
+ async whenReady(awaitStates: HandleState[] = [READY]): Promise<void> {
227
+ await withTimeout(this.#statePromise(awaitStates), this.#timeoutDelay)
228
+ }
229
+
230
+ /**
231
+ * Returns the current state of the Automerge document this handle manages.
232
+ * Note that this waits for the handle to be ready if necessary, and currently, if
233
+ * loading (or synchronization) fails, will never resolve.
234
+ *
235
+ * @param {awaitStates=[READY]} optional states to wait for, such as "LOADING". mostly for internal use.
236
+ */
237
+ async doc(awaitStates: HandleState[] = [READY]): Promise<A.Doc<T>> {
195
238
  await pause() // yield one tick because reasons
196
239
  try {
197
240
  // wait for the document to enter one of the desired states
198
- await withTimeout(this.#statePromise(awaitStates), this.#timeoutDelay)
241
+ await this.#statePromise(awaitStates)
199
242
  } catch (error) {
200
243
  if (error instanceof TimeoutError)
201
244
  throw new Error(`DocHandle: timed out loading ${this.documentId}`)
@@ -205,20 +248,29 @@ export class DocHandle<T> //
205
248
  return this.#doc
206
249
  }
207
250
 
208
- async loadAttemptedValue() {
209
- return this.value([READY, REQUESTING])
210
- }
211
-
212
- /** `load` is called by the repo when the document is found in storage */
213
- load(binary: Uint8Array) {
214
- if (binary.length) {
215
- this.#machine.send(LOAD, { payload: { binary } })
251
+ /**
252
+ * Returns the current state of the Automerge document this handle manages, or undefined.
253
+ * Useful in a synchronous context. Consider using `await handle.doc()` instead, check `isReady()`,
254
+ * or use `whenReady()` if you want to make sure loading is complete first.
255
+ *
256
+ * Do not confuse this with the SyncState of the document, which describes the state of the synchronization process.
257
+ *
258
+ * Note that `undefined` is not a valid Automerge document so the return from this function is unambigous.
259
+ * @returns the current document, or undefined if the document is not ready
260
+ */
261
+ docSync(): A.Doc<T> | undefined {
262
+ if (!this.isReady()) {
263
+ return undefined
216
264
  }
265
+
266
+ return this.#doc
217
267
  }
218
268
 
219
269
  /** `update` is called by the repo when we receive changes from the network */
220
270
  update(callback: (doc: A.Doc<T>) => A.Doc<T>) {
221
- this.#machine.send(UPDATE, { payload: { callback } })
271
+ this.#machine.send(UPDATE, {
272
+ payload: { callback },
273
+ })
222
274
  }
223
275
 
224
276
  /** `change` is called by the repo when the document is changed locally */
@@ -250,7 +302,7 @@ export class DocHandle<T> //
250
302
  this.#machine.send(UPDATE, {
251
303
  payload: {
252
304
  callback: (doc: A.Doc<T>) => {
253
- return A.changeAt(doc, heads, options, callback)
305
+ return A.changeAt(doc, heads, options, callback).newDoc
254
306
  },
255
307
  },
256
308
  })
@@ -280,7 +332,7 @@ export interface DocHandleMessagePayload {
280
332
  data: Uint8Array
281
333
  }
282
334
 
283
- export interface DocHandleChangePayload<T> {
335
+ export interface DocHandleEncodedChangePayload<T> {
284
336
  handle: DocHandle<T>
285
337
  doc: A.Doc<T>
286
338
  }
@@ -289,15 +341,16 @@ export interface DocHandleDeletePayload<T> {
289
341
  handle: DocHandle<T>
290
342
  }
291
343
 
292
- export interface DocHandlePatchPayload<T> {
344
+ export interface DocHandleChangePayload<T> {
293
345
  handle: DocHandle<T>
346
+ doc: A.Doc<T>
294
347
  patches: A.Patch[]
295
348
  patchInfo: A.PatchInfo<T>
296
349
  }
297
350
 
298
351
  export interface DocHandleEvents<T> {
352
+ "heads-changed": (payload: DocHandleEncodedChangePayload<T>) => void
299
353
  change: (payload: DocHandleChangePayload<T>) => void
300
- patch: (payload: DocHandlePatchPayload<T>) => void
301
354
  delete: (payload: DocHandleDeletePayload<T>) => void
302
355
  }
303
356
 
@@ -310,7 +363,7 @@ export const HandleState = {
310
363
  LOADING: "loading",
311
364
  REQUESTING: "requesting",
312
365
  READY: "ready",
313
- ERROR: "error",
366
+ FAILED: "failed",
314
367
  DELETED: "deleted",
315
368
  } as const
316
369
  export type HandleState = (typeof HandleState)[keyof typeof HandleState]
@@ -325,7 +378,7 @@ type DocHandleMachineState = {
325
378
  // context
326
379
 
327
380
  interface DocHandleContext<T> {
328
- documentId: string
381
+ documentId: DocumentId
329
382
  doc: A.Doc<T>
330
383
  }
331
384
 
@@ -333,7 +386,6 @@ interface DocHandleContext<T> {
333
386
 
334
387
  export const Event = {
335
388
  CREATE: "CREATE",
336
- LOAD: "LOAD",
337
389
  FIND: "FIND",
338
390
  REQUEST: "REQUEST",
339
391
  REQUEST_COMPLETE: "REQUEST_COMPLETE",
@@ -344,7 +396,6 @@ export const Event = {
344
396
  type Event = (typeof Event)[keyof typeof Event]
345
397
 
346
398
  type CreateEvent = { type: typeof CREATE; payload: { documentId: string } }
347
- type LoadEvent = { type: typeof LOAD; payload: { binary: Uint8Array } }
348
399
  type FindEvent = { type: typeof FIND; payload: { documentId: string } }
349
400
  type RequestEvent = { type: typeof REQUEST }
350
401
  type RequestCompleteEvent = { type: typeof REQUEST_COMPLETE }
@@ -357,7 +408,6 @@ type TimeoutEvent = { type: typeof TIMEOUT }
357
408
 
358
409
  type DocHandleEvent<T> =
359
410
  | CreateEvent
360
- | LoadEvent
361
411
  | FindEvent
362
412
  | RequestEvent
363
413
  | RequestCompleteEvent
@@ -383,14 +433,6 @@ type DocHandleXstateMachine<T> = Interpreter<
383
433
 
384
434
  // CONSTANTS
385
435
 
386
- const { IDLE, LOADING, REQUESTING, READY, ERROR, DELETED } = HandleState
387
- const {
388
- CREATE,
389
- LOAD,
390
- FIND,
391
- REQUEST,
392
- UPDATE,
393
- TIMEOUT,
394
- DELETE,
395
- REQUEST_COMPLETE,
396
- } = Event
436
+ export const { IDLE, LOADING, REQUESTING, READY, FAILED, DELETED } = HandleState
437
+ const { CREATE, FIND, REQUEST, UPDATE, TIMEOUT, DELETE, REQUEST_COMPLETE } =
438
+ Event