@automerge/automerge-repo 2.0.0-alpha.7 → 2.0.0-collectionsync-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (164) hide show
  1. package/dist/CollectionHandle.d.ts +14 -0
  2. package/dist/CollectionHandle.d.ts.map +1 -0
  3. package/dist/CollectionHandle.js +37 -0
  4. package/dist/DocHandle.d.ts +37 -6
  5. package/dist/DocHandle.d.ts.map +1 -1
  6. package/dist/DocHandle.js +64 -6
  7. package/dist/DocUrl.d.ts +47 -0
  8. package/dist/DocUrl.d.ts.map +1 -0
  9. package/dist/DocUrl.js +72 -0
  10. package/dist/EphemeralData.d.ts +20 -0
  11. package/dist/EphemeralData.d.ts.map +1 -0
  12. package/dist/EphemeralData.js +1 -0
  13. package/dist/Repo.d.ts +28 -7
  14. package/dist/Repo.d.ts.map +1 -1
  15. package/dist/Repo.js +142 -143
  16. package/dist/ferigan.d.ts +51 -0
  17. package/dist/ferigan.d.ts.map +1 -0
  18. package/dist/ferigan.js +98 -0
  19. package/dist/helpers/tests/storage-adapter-tests.d.ts +2 -2
  20. package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
  21. package/dist/helpers/tests/storage-adapter-tests.js +19 -39
  22. package/dist/index.d.ts +2 -0
  23. package/dist/index.d.ts.map +1 -1
  24. package/dist/index.js +1 -0
  25. package/dist/network/NetworkSubsystem.d.ts +1 -0
  26. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  27. package/dist/network/NetworkSubsystem.js +3 -0
  28. package/dist/network/messages.d.ts +7 -1
  29. package/dist/network/messages.d.ts.map +1 -1
  30. package/dist/network/messages.js +2 -1
  31. package/dist/src/DocHandle.d.ts +182 -0
  32. package/dist/src/DocHandle.d.ts.map +1 -0
  33. package/dist/src/DocHandle.js +405 -0
  34. package/dist/src/DocUrl.d.ts +49 -0
  35. package/dist/src/DocUrl.d.ts.map +1 -0
  36. package/dist/src/DocUrl.js +72 -0
  37. package/dist/src/EphemeralData.d.ts +19 -0
  38. package/dist/src/EphemeralData.d.ts.map +1 -0
  39. package/dist/src/EphemeralData.js +1 -0
  40. package/dist/src/Repo.d.ts +74 -0
  41. package/dist/src/Repo.d.ts.map +1 -0
  42. package/dist/src/Repo.js +208 -0
  43. package/dist/src/helpers/arraysAreEqual.d.ts +2 -0
  44. package/dist/src/helpers/arraysAreEqual.d.ts.map +1 -0
  45. package/dist/src/helpers/arraysAreEqual.js +2 -0
  46. package/dist/src/helpers/cbor.d.ts +4 -0
  47. package/dist/src/helpers/cbor.d.ts.map +1 -0
  48. package/dist/src/helpers/cbor.js +8 -0
  49. package/dist/src/helpers/eventPromise.d.ts +11 -0
  50. package/dist/src/helpers/eventPromise.d.ts.map +1 -0
  51. package/dist/src/helpers/eventPromise.js +7 -0
  52. package/dist/src/helpers/headsAreSame.d.ts +2 -0
  53. package/dist/src/helpers/headsAreSame.d.ts.map +1 -0
  54. package/dist/src/helpers/headsAreSame.js +4 -0
  55. package/dist/src/helpers/mergeArrays.d.ts +2 -0
  56. package/dist/src/helpers/mergeArrays.d.ts.map +1 -0
  57. package/dist/src/helpers/mergeArrays.js +15 -0
  58. package/dist/src/helpers/pause.d.ts +6 -0
  59. package/dist/src/helpers/pause.d.ts.map +1 -0
  60. package/dist/src/helpers/pause.js +10 -0
  61. package/dist/src/helpers/tests/network-adapter-tests.d.ts +21 -0
  62. package/dist/src/helpers/tests/network-adapter-tests.d.ts.map +1 -0
  63. package/dist/src/helpers/tests/network-adapter-tests.js +122 -0
  64. package/dist/src/helpers/withTimeout.d.ts +12 -0
  65. package/dist/src/helpers/withTimeout.d.ts.map +1 -0
  66. package/dist/src/helpers/withTimeout.js +24 -0
  67. package/dist/src/index.d.ts +53 -0
  68. package/dist/src/index.d.ts.map +1 -0
  69. package/dist/src/index.js +40 -0
  70. package/dist/src/network/NetworkAdapter.d.ts +26 -0
  71. package/dist/src/network/NetworkAdapter.d.ts.map +1 -0
  72. package/dist/src/network/NetworkAdapter.js +4 -0
  73. package/dist/src/network/NetworkSubsystem.d.ts +23 -0
  74. package/dist/src/network/NetworkSubsystem.d.ts.map +1 -0
  75. package/dist/src/network/NetworkSubsystem.js +120 -0
  76. package/dist/src/network/messages.d.ts +85 -0
  77. package/dist/src/network/messages.d.ts.map +1 -0
  78. package/dist/src/network/messages.js +23 -0
  79. package/dist/src/storage/StorageAdapter.d.ts +14 -0
  80. package/dist/src/storage/StorageAdapter.d.ts.map +1 -0
  81. package/dist/src/storage/StorageAdapter.js +1 -0
  82. package/dist/src/storage/StorageSubsystem.d.ts +12 -0
  83. package/dist/src/storage/StorageSubsystem.d.ts.map +1 -0
  84. package/dist/src/storage/StorageSubsystem.js +145 -0
  85. package/dist/src/synchronizer/CollectionSynchronizer.d.ts +25 -0
  86. package/dist/src/synchronizer/CollectionSynchronizer.d.ts.map +1 -0
  87. package/dist/src/synchronizer/CollectionSynchronizer.js +106 -0
  88. package/dist/src/synchronizer/DocSynchronizer.d.ts +29 -0
  89. package/dist/src/synchronizer/DocSynchronizer.d.ts.map +1 -0
  90. package/dist/src/synchronizer/DocSynchronizer.js +263 -0
  91. package/dist/src/synchronizer/Synchronizer.d.ts +9 -0
  92. package/dist/src/synchronizer/Synchronizer.d.ts.map +1 -0
  93. package/dist/src/synchronizer/Synchronizer.js +2 -0
  94. package/dist/src/types.d.ts +16 -0
  95. package/dist/src/types.d.ts.map +1 -0
  96. package/dist/src/types.js +1 -0
  97. package/dist/storage/StorageAdapter.d.ts +9 -0
  98. package/dist/storage/StorageAdapter.d.ts.map +1 -1
  99. package/dist/storage/StorageAdapter.js +33 -0
  100. package/dist/storage/StorageSubsystem.d.ts +12 -2
  101. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  102. package/dist/storage/StorageSubsystem.js +42 -100
  103. package/dist/synchronizer/CollectionSynchronizer.d.ts +4 -2
  104. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  105. package/dist/synchronizer/CollectionSynchronizer.js +28 -15
  106. package/dist/synchronizer/DocSynchronizer.d.ts +6 -5
  107. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  108. package/dist/synchronizer/DocSynchronizer.js +76 -178
  109. package/dist/synchronizer/Synchronizer.d.ts +11 -0
  110. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  111. package/dist/test/CollectionSynchronizer.test.d.ts +2 -0
  112. package/dist/test/CollectionSynchronizer.test.d.ts.map +1 -0
  113. package/dist/test/CollectionSynchronizer.test.js +57 -0
  114. package/dist/test/DocHandle.test.d.ts +2 -0
  115. package/dist/test/DocHandle.test.d.ts.map +1 -0
  116. package/dist/test/DocHandle.test.js +238 -0
  117. package/dist/test/DocSynchronizer.test.d.ts +2 -0
  118. package/dist/test/DocSynchronizer.test.d.ts.map +1 -0
  119. package/dist/test/DocSynchronizer.test.js +111 -0
  120. package/dist/test/Network.test.d.ts +2 -0
  121. package/dist/test/Network.test.d.ts.map +1 -0
  122. package/dist/test/Network.test.js +11 -0
  123. package/dist/test/Repo.test.d.ts +2 -0
  124. package/dist/test/Repo.test.d.ts.map +1 -0
  125. package/dist/test/Repo.test.js +568 -0
  126. package/dist/test/StorageSubsystem.test.d.ts +2 -0
  127. package/dist/test/StorageSubsystem.test.d.ts.map +1 -0
  128. package/dist/test/StorageSubsystem.test.js +56 -0
  129. package/dist/test/helpers/DummyNetworkAdapter.d.ts +9 -0
  130. package/dist/test/helpers/DummyNetworkAdapter.d.ts.map +1 -0
  131. package/dist/test/helpers/DummyNetworkAdapter.js +15 -0
  132. package/dist/test/helpers/DummyStorageAdapter.d.ts +16 -0
  133. package/dist/test/helpers/DummyStorageAdapter.d.ts.map +1 -0
  134. package/dist/test/helpers/DummyStorageAdapter.js +33 -0
  135. package/dist/test/helpers/generate-large-object.d.ts +5 -0
  136. package/dist/test/helpers/generate-large-object.d.ts.map +1 -0
  137. package/dist/test/helpers/generate-large-object.js +9 -0
  138. package/dist/test/helpers/getRandomItem.d.ts +2 -0
  139. package/dist/test/helpers/getRandomItem.d.ts.map +1 -0
  140. package/dist/test/helpers/getRandomItem.js +4 -0
  141. package/dist/test/types.d.ts +4 -0
  142. package/dist/test/types.d.ts.map +1 -0
  143. package/dist/test/types.js +1 -0
  144. package/package.json +3 -3
  145. package/src/CollectionHandle.ts +54 -0
  146. package/src/DocHandle.ts +80 -8
  147. package/src/Repo.ts +192 -183
  148. package/src/ferigan.ts +184 -0
  149. package/src/helpers/tests/storage-adapter-tests.ts +31 -62
  150. package/src/index.ts +2 -0
  151. package/src/network/NetworkSubsystem.ts +4 -0
  152. package/src/network/messages.ts +11 -2
  153. package/src/storage/StorageAdapter.ts +42 -0
  154. package/src/storage/StorageSubsystem.ts +59 -119
  155. package/src/synchronizer/CollectionSynchronizer.ts +34 -26
  156. package/src/synchronizer/DocSynchronizer.ts +84 -231
  157. package/src/synchronizer/Synchronizer.ts +14 -0
  158. package/test/CollectionSynchronizer.test.ts +4 -2
  159. package/test/DocHandle.test.ts +72 -13
  160. package/test/DocSynchronizer.test.ts +6 -1
  161. package/test/RemoteHeadsSubscriptions.test.ts +1 -1
  162. package/test/Repo.test.ts +225 -117
  163. package/test/StorageSubsystem.test.ts +20 -16
  164. package/test/remoteHeads.test.ts +1 -1
@@ -0,0 +1,405 @@
1
+ import * as A from "@automerge/automerge/next"
2
+ import debug from "debug"
3
+ import { EventEmitter } from "eventemitter3"
4
+ import { assign, createMachine, interpret } from "xstate"
5
+ import { waitFor } from "xstate/lib/waitFor.js"
6
+ import { headsAreSame } from "./helpers/headsAreSame.js"
7
+ import { pause } from "./helpers/pause.js"
8
+ import { TimeoutError, withTimeout } from "./helpers/withTimeout.js"
9
+ import { stringifyAutomergeUrl } from "./DocUrl.js"
10
+ import { encode } from "./helpers/cbor.js"
11
+ /** DocHandle is a wrapper around a single Automerge document that lets us
12
+ * listen for changes and notify the network and storage of new changes.
13
+ *
14
+ * @remarks
15
+ * A `DocHandle` represents a document which is being managed by a {@link Repo}.
16
+ * To obtain `DocHandle` use {@link Repo.find} or {@link Repo.create}.
17
+ *
18
+ * To modify the underlying document use either {@link DocHandle.change} or
19
+ * {@link DocHandle.changeAt}. These methods will notify the `Repo` that some
20
+ * change has occured and the `Repo` will save any new changes to the
21
+ * attached {@link StorageAdapter} and send sync messages to connected peers.
22
+ * */
23
+ export class DocHandle //
24
+ extends EventEmitter
25
+ {
26
+ documentId
27
+ #log
28
+ #machine
29
+ #timeoutDelay
30
+ /** The URL of this document
31
+ *
32
+ * @remarks
33
+ * This can be used to request the document from an instance of {@link Repo}
34
+ */
35
+ get url() {
36
+ return stringifyAutomergeUrl({ documentId: this.documentId })
37
+ }
38
+ /** @hidden */
39
+ constructor(documentId, { isNew = false, timeoutDelay = 60_000 } = {}) {
40
+ super()
41
+ this.documentId = documentId
42
+ this.#timeoutDelay = timeoutDelay
43
+ this.#log = debug(`automerge-repo:dochandle:${this.documentId.slice(0, 5)}`)
44
+ // initial doc
45
+ let doc = A.init()
46
+ // Make an empty change so that we have something to save to disk
47
+ if (isNew) {
48
+ doc = A.emptyChange(doc, {})
49
+ }
50
+ /**
51
+ * Internally we use a state machine to orchestrate document loading and/or syncing, in order to
52
+ * avoid requesting data we already have, or surfacing intermediate values to the consumer.
53
+ *
54
+ * ┌─────────────────────┬─────────TIMEOUT────►┌────────┐
55
+ * ┌───┴─────┐ ┌───┴────────┐ │ failed │
56
+ * ┌───────┐ ┌──FIND──┤ loading ├─REQUEST──►│ requesting ├─UPDATE──┐ └────────┘
57
+ * │ idle ├──┤ └───┬─────┘ └────────────┘ │
58
+ * └───────┘ │ │ └─►┌────────┐
59
+ * │ └───────LOAD───────────────────────────────►│ ready │
60
+ * └──CREATE───────────────────────────────────────────────►└────────┘
61
+ */
62
+ this.#machine = interpret(
63
+ createMachine(
64
+ {
65
+ predictableActionArguments: true,
66
+ id: "docHandle",
67
+ initial: IDLE,
68
+ context: { documentId: this.documentId, doc },
69
+ states: {
70
+ idle: {
71
+ on: {
72
+ // If we're creating a new document, we don't need to load anything
73
+ CREATE: { target: READY },
74
+ // If we're accessing an existing document, we need to request it from storage
75
+ // and/or the network
76
+ FIND: { target: LOADING },
77
+ DELETE: { actions: "onDelete", target: DELETED },
78
+ },
79
+ },
80
+ loading: {
81
+ on: {
82
+ // UPDATE is called by the Repo if the document is found in storage
83
+ UPDATE: { actions: "onUpdate", target: READY },
84
+ // REQUEST is called by the Repo if the document is not found in storage
85
+ REQUEST: { target: REQUESTING },
86
+ // AWAIT_NETWORK is called by the repo if the document is not found in storage but the network is not yet ready
87
+ AWAIT_NETWORK: { target: AWAITING_NETWORK },
88
+ DELETE: { actions: "onDelete", target: DELETED },
89
+ },
90
+ after: [
91
+ {
92
+ delay: this.#timeoutDelay,
93
+ target: FAILED,
94
+ },
95
+ ],
96
+ },
97
+ awaitingNetwork: {
98
+ on: {
99
+ NETWORK_READY: { target: REQUESTING },
100
+ },
101
+ },
102
+ requesting: {
103
+ on: {
104
+ MARK_UNAVAILABLE: {
105
+ target: UNAVAILABLE,
106
+ actions: "onUnavailable",
107
+ },
108
+ // UPDATE is called by the Repo when we receive changes from the network
109
+ UPDATE: { actions: "onUpdate" },
110
+ // REQUEST_COMPLETE is called from `onUpdate` when the doc has been fully loaded from the network
111
+ REQUEST_COMPLETE: { target: READY },
112
+ DELETE: { actions: "onDelete", target: DELETED },
113
+ },
114
+ after: [
115
+ {
116
+ delay: this.#timeoutDelay,
117
+ target: FAILED,
118
+ },
119
+ ],
120
+ },
121
+ ready: {
122
+ on: {
123
+ // UPDATE is called by the Repo when we receive changes from the network
124
+ UPDATE: { actions: "onUpdate", target: READY },
125
+ DELETE: { actions: "onDelete", target: DELETED },
126
+ },
127
+ },
128
+ failed: {
129
+ type: "final",
130
+ },
131
+ deleted: {
132
+ type: "final",
133
+ },
134
+ unavailable: {
135
+ on: {
136
+ UPDATE: { actions: "onUpdate" },
137
+ // REQUEST_COMPLETE is called from `onUpdate` when the doc has been fully loaded from the network
138
+ REQUEST_COMPLETE: { target: READY },
139
+ DELETE: { actions: "onDelete", target: DELETED },
140
+ },
141
+ },
142
+ },
143
+ },
144
+ {
145
+ actions: {
146
+ /** Put the updated doc on context */
147
+ onUpdate: assign((context, { payload }) => {
148
+ const { doc: oldDoc } = context
149
+ const { callback } = payload
150
+ const newDoc = callback(oldDoc)
151
+ return { doc: newDoc }
152
+ }),
153
+ onDelete: assign(() => {
154
+ this.emit("delete", { handle: this })
155
+ return { doc: undefined }
156
+ }),
157
+ onUnavailable: assign(context => {
158
+ const { doc } = context
159
+ this.emit("unavailable", { handle: this })
160
+ return { doc }
161
+ }),
162
+ },
163
+ }
164
+ )
165
+ )
166
+ .onTransition(({ value: state, history, context }, event) => {
167
+ const oldDoc = history?.context?.doc
168
+ const newDoc = context.doc
169
+ this.#log(`${history?.value}: ${event.type} → ${state}`, newDoc)
170
+ const docChanged =
171
+ newDoc &&
172
+ oldDoc &&
173
+ !headsAreSame(A.getHeads(newDoc), A.getHeads(oldDoc))
174
+ if (docChanged) {
175
+ this.emit("heads-changed", { handle: this, doc: newDoc })
176
+ const patches = A.diff(newDoc, A.getHeads(oldDoc), A.getHeads(newDoc))
177
+ if (patches.length > 0) {
178
+ const source = "change" // TODO: pass along the source (load/change/network)
179
+ this.emit("change", {
180
+ handle: this,
181
+ doc: newDoc,
182
+ patches,
183
+ patchInfo: { before: oldDoc, after: newDoc, source },
184
+ })
185
+ }
186
+ if (!this.isReady()) {
187
+ this.#machine.send(REQUEST_COMPLETE)
188
+ }
189
+ }
190
+ })
191
+ .start()
192
+ this.#machine.send(isNew ? CREATE : FIND)
193
+ }
194
+ // PRIVATE
195
+ /** Returns the current document, regardless of state */
196
+ get #doc() {
197
+ return this.#machine?.getSnapshot().context.doc
198
+ }
199
+ /** Returns the docHandle's state (READY, etc.) */
200
+ get #state() {
201
+ return this.#machine?.getSnapshot().value
202
+ }
203
+ /** Returns a promise that resolves when the docHandle is in one of the given states */
204
+ #statePromise(awaitStates) {
205
+ if (!Array.isArray(awaitStates)) awaitStates = [awaitStates]
206
+ return Promise.any(
207
+ awaitStates.map(state =>
208
+ waitFor(this.#machine, s => s.matches(state), {
209
+ timeout: this.#timeoutDelay * 2000, // longer than the delay above for testing
210
+ })
211
+ )
212
+ )
213
+ }
214
+ // PUBLIC
215
+ /**
216
+ * Checks if the document is ready for accessing or changes.
217
+ * Note that for documents already stored locally this occurs before synchronization
218
+ * with any peers. We do not currently have an equivalent `whenSynced()`.
219
+ */
220
+ isReady = () => this.inState([HandleState.READY])
221
+ /**
222
+ * Checks if this document has been marked as deleted.
223
+ * Deleted documents are removed from local storage and the sync process.
224
+ * It's not currently possible at runtime to undelete a document.
225
+ * @returns true if the document has been marked as deleted
226
+ */
227
+ isDeleted = () => this.inState([HandleState.DELETED])
228
+ isUnavailable = () => this.inState([HandleState.UNAVAILABLE])
229
+ inState = states => states.some(this.#machine?.getSnapshot().matches)
230
+ /** @hidden */
231
+ get state() {
232
+ return this.#machine?.getSnapshot().value
233
+ }
234
+ /**
235
+ * Use this to block until the document handle has finished loading.
236
+ * The async equivalent to checking `inState()`.
237
+ * @param awaitStates = [READY]
238
+ * @returns
239
+ */
240
+ async whenReady(awaitStates = [READY]) {
241
+ await withTimeout(this.#statePromise(awaitStates), this.#timeoutDelay)
242
+ }
243
+ /**
244
+ * Returns the current state of the Automerge document this handle manages.
245
+ * Note that this waits for the handle to be ready if necessary, and currently, if
246
+ * loading (or synchronization) fails, will never resolve.
247
+ *
248
+ * @param {awaitStates=[READY]} optional states to wait for, such as "LOADING". mostly for internal use.
249
+ */
250
+ async doc(awaitStates = [READY, UNAVAILABLE]) {
251
+ await pause() // yield one tick because reasons
252
+ try {
253
+ // wait for the document to enter one of the desired states
254
+ await this.#statePromise(awaitStates)
255
+ } catch (error) {
256
+ if (error instanceof TimeoutError)
257
+ throw new Error(`DocHandle: timed out loading ${this.documentId}`)
258
+ else throw error
259
+ }
260
+ // Return the document
261
+ return !this.isUnavailable() ? this.#doc : undefined
262
+ }
263
+ /**
264
+ * Returns the current state of the Automerge document this handle manages, or undefined.
265
+ * Useful in a synchronous context. Consider using `await handle.doc()` instead, check `isReady()`,
266
+ * or use `whenReady()` if you want to make sure loading is complete first.
267
+ *
268
+ * Do not confuse this with the SyncState of the document, which describes the state of the synchronization process.
269
+ *
270
+ * Note that `undefined` is not a valid Automerge document so the return from this function is unambigous.
271
+ * @returns the current document, or undefined if the document is not ready
272
+ */
273
+ docSync() {
274
+ if (!this.isReady()) {
275
+ return undefined
276
+ }
277
+ return this.#doc
278
+ }
279
+ /** `update` is called by the repo when we receive changes from the network
280
+ * @hidden
281
+ * */
282
+ update(callback) {
283
+ this.#machine.send(UPDATE, {
284
+ payload: { callback },
285
+ })
286
+ }
287
+ /** `change` is called by the repo when the document is changed locally */
288
+ change(callback, options = {}) {
289
+ if (!this.isReady()) {
290
+ throw new Error(
291
+ `DocHandle#${this.documentId} is not ready. Check \`handle.isReady()\` before accessing the document.`
292
+ )
293
+ }
294
+ this.#machine.send(UPDATE, {
295
+ payload: {
296
+ callback: doc => {
297
+ return A.change(doc, options, callback)
298
+ },
299
+ },
300
+ })
301
+ }
302
+ /** Make a change as if the document were at `heads`
303
+ *
304
+ * @returns A set of heads representing the concurrent change that was made.
305
+ */
306
+ changeAt(heads, callback, options = {}) {
307
+ if (!this.isReady()) {
308
+ throw new Error(
309
+ `DocHandle#${this.documentId} is not ready. Check \`handle.isReady()\` before accessing the document.`
310
+ )
311
+ }
312
+ let resultHeads = undefined
313
+ this.#machine.send(UPDATE, {
314
+ payload: {
315
+ callback: doc => {
316
+ const result = A.changeAt(doc, heads, options, callback)
317
+ resultHeads = result.newHeads
318
+ return result.newDoc
319
+ },
320
+ },
321
+ })
322
+ return resultHeads
323
+ }
324
+ unavailable() {
325
+ this.#machine.send(MARK_UNAVAILABLE)
326
+ }
327
+ /** `request` is called by the repo when the document is not found in storage
328
+ * @hidden
329
+ * */
330
+ request() {
331
+ if (this.#state === LOADING) this.#machine.send(REQUEST)
332
+ }
333
+ /** @hidden */
334
+ awaitNetwork() {
335
+ if (this.#state === LOADING) this.#machine.send(AWAIT_NETWORK)
336
+ }
337
+ /** @hidden */
338
+ networkReady() {
339
+ if (this.#state === AWAITING_NETWORK) this.#machine.send(NETWORK_READY)
340
+ }
341
+ /** `delete` is called by the repo when the document is deleted */
342
+ delete() {
343
+ this.#machine.send(DELETE)
344
+ }
345
+ /** `broadcast` sends an arbitrary ephemeral message out to all reachable peers who would receive sync messages from you
346
+ * it has no guarantee of delivery, and is not persisted to the underlying automerge doc in any way.
347
+ * messages will have a sending PeerId but this is *not* a useful user identifier.
348
+ * a user could have multiple tabs open and would appear as multiple PeerIds.
349
+ * every message source must have a unique PeerId.
350
+ */
351
+ broadcast(message) {
352
+ this.emit("ephemeral-message-outbound", {
353
+ handle: this,
354
+ data: encode(message),
355
+ })
356
+ }
357
+ }
358
+ // STATE MACHINE TYPES
359
+ // state
360
+ export const HandleState = {
361
+ IDLE: "idle",
362
+ LOADING: "loading",
363
+ AWAITING_NETWORK: "awaitingNetwork",
364
+ REQUESTING: "requesting",
365
+ READY: "ready",
366
+ FAILED: "failed",
367
+ DELETED: "deleted",
368
+ UNAVAILABLE: "unavailable",
369
+ }
370
+ // events
371
+ export const Event = {
372
+ CREATE: "CREATE",
373
+ FIND: "FIND",
374
+ REQUEST: "REQUEST",
375
+ REQUEST_COMPLETE: "REQUEST_COMPLETE",
376
+ AWAIT_NETWORK: "AWAIT_NETWORK",
377
+ NETWORK_READY: "NETWORK_READY",
378
+ UPDATE: "UPDATE",
379
+ TIMEOUT: "TIMEOUT",
380
+ DELETE: "DELETE",
381
+ MARK_UNAVAILABLE: "MARK_UNAVAILABLE",
382
+ }
383
+ // CONSTANTS
384
+ export const {
385
+ IDLE,
386
+ LOADING,
387
+ AWAITING_NETWORK,
388
+ REQUESTING,
389
+ READY,
390
+ FAILED,
391
+ DELETED,
392
+ UNAVAILABLE,
393
+ } = HandleState
394
+ const {
395
+ CREATE,
396
+ FIND,
397
+ REQUEST,
398
+ UPDATE,
399
+ TIMEOUT,
400
+ DELETE,
401
+ REQUEST_COMPLETE,
402
+ MARK_UNAVAILABLE,
403
+ AWAIT_NETWORK,
404
+ NETWORK_READY,
405
+ } = Event
@@ -0,0 +1,49 @@
1
+ import {
2
+ type AutomergeUrl,
3
+ type BinaryDocumentId,
4
+ type DocumentId,
5
+ } from "./types.js"
6
+ export declare const urlPrefix = "automerge:"
7
+ /**
8
+ * given an Automerge URL, return a decoded DocumentId (and the encoded DocumentId)
9
+ *
10
+ * @param url
11
+ * @returns { binaryDocumentId: BinaryDocumentId, documentId: DocumentId }
12
+ */
13
+ export declare const parseAutomergeUrl: (url: AutomergeUrl) => {
14
+ binaryDocumentId: BinaryDocumentId
15
+ documentId: DocumentId
16
+ }
17
+ interface StringifyAutomergeUrlOptions {
18
+ documentId: DocumentId | BinaryDocumentId
19
+ }
20
+ /**
21
+ * Given a documentId in either canonical form, return an Automerge URL
22
+ * Throws on invalid input.
23
+ * Note: this is an object because we anticipate adding fields in the future.
24
+ * @param { documentId: BinaryDocumentId | DocumentId }
25
+ * @returns AutomergeUrl
26
+ */
27
+ export declare const stringifyAutomergeUrl: ({
28
+ documentId,
29
+ }: StringifyAutomergeUrlOptions) => AutomergeUrl
30
+ /**
31
+ * Given a string, return true if it is a valid Automerge URL
32
+ * also acts as a type discriminator in Typescript.
33
+ * @param str: URL candidate
34
+ * @returns boolean
35
+ */
36
+ export declare const isValidAutomergeUrl: (str: string) => str is AutomergeUrl
37
+ /**
38
+ * generateAutomergeUrl produces a new AutomergeUrl.
39
+ * generally only called by create(), but used in tests as well.
40
+ * @returns a new Automerge URL with a random UUID documentId
41
+ */
42
+ export declare const generateAutomergeUrl: () => AutomergeUrl
43
+ export declare const documentIdToBinary: (
44
+ docId: DocumentId
45
+ ) => BinaryDocumentId | undefined
46
+ export declare const binaryToDocumentId: (docId: BinaryDocumentId) => DocumentId
47
+ export declare const parseLegacyUUID: (str: string) => AutomergeUrl | undefined
48
+ export {}
49
+ //# sourceMappingURL=DocUrl.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"DocUrl.d.ts","sourceRoot":"","sources":["../../src/DocUrl.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,KAAK,YAAY,EACjB,KAAK,gBAAgB,EACrB,KAAK,UAAU,EAChB,MAAM,YAAY,CAAA;AAInB,eAAO,MAAM,SAAS,eAAe,CAAA;AAErC;;;;;GAKG;AACH,eAAO,MAAM,iBAAiB,QAAS,YAAY;;;CAIlD,CAAA;AAED,UAAU,4BAA4B;IACpC,UAAU,EAAE,UAAU,GAAG,gBAAgB,CAAA;CAC1C;AAED;;;;;;GAMG;AACH,eAAO,MAAM,qBAAqB,oBAE/B,4BAA4B,KAAG,YAQjC,CAAA;AAED;;;;;GAKG;AACH,eAAO,MAAM,mBAAmB,QAAS,MAAM,wBAK9C,CAAA;AAED;;;;GAIG;AACH,eAAO,MAAM,oBAAoB,QAAO,YAGpC,CAAA;AAEJ,eAAO,MAAM,kBAAkB,UACtB,UAAU,KAChB,gBAAgB,GAAG,SACyC,CAAA;AAE/D,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,KAAG,UACtB,CAAA;AAEvC,eAAO,MAAM,eAAe,QAAS,MAAM,KAAG,YAAY,GAAG,SAM5D,CAAA"}
@@ -0,0 +1,72 @@
1
+ import * as Uuid from "uuid"
2
+ import bs58check from "bs58check"
3
+ export const urlPrefix = "automerge:"
4
+ /**
5
+ * given an Automerge URL, return a decoded DocumentId (and the encoded DocumentId)
6
+ *
7
+ * @param url
8
+ * @returns { binaryDocumentId: BinaryDocumentId, documentId: DocumentId }
9
+ */
10
+ export const parseAutomergeUrl = url => {
11
+ const { binaryDocumentId, documentId } = parts(url)
12
+ if (!binaryDocumentId) throw new Error("Invalid document URL: " + url)
13
+ return { binaryDocumentId, documentId }
14
+ }
15
+ /**
16
+ * Given a documentId in either canonical form, return an Automerge URL
17
+ * Throws on invalid input.
18
+ * Note: this is an object because we anticipate adding fields in the future.
19
+ * @param { documentId: BinaryDocumentId | DocumentId }
20
+ * @returns AutomergeUrl
21
+ */
22
+ export const stringifyAutomergeUrl = ({ documentId }) => {
23
+ if (documentId instanceof Uint8Array)
24
+ return urlPrefix + binaryToDocumentId(documentId)
25
+ else if (typeof documentId === "string") {
26
+ return urlPrefix + documentId
27
+ }
28
+ throw new Error("Invalid documentId: " + documentId)
29
+ }
30
+ /**
31
+ * Given a string, return true if it is a valid Automerge URL
32
+ * also acts as a type discriminator in Typescript.
33
+ * @param str: URL candidate
34
+ * @returns boolean
35
+ */
36
+ export const isValidAutomergeUrl = str => {
37
+ if (!str.startsWith(urlPrefix)) return false
38
+ const { binaryDocumentId: documentId } = parts(str)
39
+ return documentId ? true : false
40
+ }
41
+ /**
42
+ * generateAutomergeUrl produces a new AutomergeUrl.
43
+ * generally only called by create(), but used in tests as well.
44
+ * @returns a new Automerge URL with a random UUID documentId
45
+ */
46
+ export const generateAutomergeUrl = () =>
47
+ stringifyAutomergeUrl({
48
+ documentId: Uuid.v4(null, new Uint8Array(16)),
49
+ })
50
+ export const documentIdToBinary = docId => bs58check.decodeUnsafe(docId)
51
+ export const binaryToDocumentId = docId => bs58check.encode(docId)
52
+ export const parseLegacyUUID = str => {
53
+ if (Uuid.validate(str)) {
54
+ const uuid = Uuid.parse(str)
55
+ return stringifyAutomergeUrl({ documentId: uuid })
56
+ }
57
+ return undefined
58
+ }
59
+ /**
60
+ * parts breaks up the URL into constituent pieces,
61
+ * eventually this could include things like heads, so we use this structure
62
+ * we return both a binary & string-encoded version of the document ID
63
+ * @param str
64
+ * @returns { binaryDocumentId, documentId }
65
+ */
66
+ const parts = str => {
67
+ const regex = new RegExp(`^${urlPrefix}(\\w+)$`)
68
+ const [_, docMatch] = str.match(regex) || []
69
+ const documentId = docMatch
70
+ const binaryDocumentId = documentIdToBinary(documentId)
71
+ return { binaryDocumentId, documentId }
72
+ }
@@ -0,0 +1,19 @@
1
+ import { DocumentId, PeerId } from "./index.js"
2
+ import { EphemeralMessageContents } from "./network/messages.js"
3
+ export type SessionId = string & {
4
+ __SessionId: false
5
+ }
6
+ export interface EphemeralDataPayload {
7
+ documentId: DocumentId
8
+ peerId: PeerId
9
+ data: {
10
+ peerId: PeerId
11
+ documentId: DocumentId
12
+ data: unknown
13
+ }
14
+ }
15
+ export type EphemeralDataMessageEvents = {
16
+ message: (event: EphemeralMessageContents) => void
17
+ data: (event: EphemeralDataPayload) => void
18
+ }
19
+ //# sourceMappingURL=EphemeralData.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"EphemeralData.d.ts","sourceRoot":"","sources":["../../src/EphemeralData.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAC/C,OAAO,EAAE,wBAAwB,EAAE,MAAM,uBAAuB,CAAA;AAGhE,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG;IAAE,WAAW,EAAE,KAAK,CAAA;CAAE,CAAA;AAEvD,MAAM,WAAW,oBAAoB;IACnC,UAAU,EAAE,UAAU,CAAA;IACtB,MAAM,EAAE,MAAM,CAAA;IACd,IAAI,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,UAAU,CAAC;QAAC,IAAI,EAAE,OAAO,CAAA;KAAE,CAAA;CAChE;AAED,MAAM,MAAM,0BAA0B,GAAG;IACvC,OAAO,EAAE,CAAC,KAAK,EAAE,wBAAwB,KAAK,IAAI,CAAA;IAClD,IAAI,EAAE,CAAC,KAAK,EAAE,oBAAoB,KAAK,IAAI,CAAA;CAC5C,CAAA"}
@@ -0,0 +1 @@
1
+ export {}
@@ -0,0 +1,74 @@
1
+ import { NetworkAdapter } from "./network/NetworkAdapter.js"
2
+ import { NetworkSubsystem } from "./network/NetworkSubsystem.js"
3
+ import { StorageAdapter } from "./storage/StorageAdapter.js"
4
+ import { StorageSubsystem } from "./storage/StorageSubsystem.js"
5
+ import { type AutomergeUrl, DocumentId, PeerId } from "./types.js"
6
+ import { DocHandle } from "./DocHandle.js"
7
+ import { EventEmitter } from "eventemitter3"
8
+ /** A Repo is a collection of documents with networking, syncing, and storage capabilities. */
9
+ /** The `Repo` is the main entry point of this library
10
+ *
11
+ * @remarks
12
+ * To construct a `Repo` you will need an {@link StorageAdapter} and one or
13
+ * more {@link NetworkAdapter}s. Once you have a `Repo` you can use it to
14
+ * obtain {@link DocHandle}s.
15
+ */
16
+ export declare class Repo extends EventEmitter<DocCollectionEvents> {
17
+ #private
18
+ networkSubsystem: NetworkSubsystem
19
+ storageSubsystem?: StorageSubsystem
20
+ /** By default, we share generously with all peers. */
21
+ sharePolicy: SharePolicy
22
+ constructor({ storage, network, peerId, sharePolicy }: RepoConfig)
23
+ /** Returns all the handles we have cached. */
24
+ get handles(): Record<DocumentId, DocHandle<any>>
25
+ /**
26
+ * Creates a new document and returns a handle to it. The initial value of the document is
27
+ * an empty object `{}`. Its documentId is generated by the system. we emit a `document` event
28
+ * to advertise interest in the document.
29
+ */
30
+ create<T>(): DocHandle<T>
31
+ /**
32
+ * Retrieves a document by id. It gets data from the local system, but also emits a `document`
33
+ * event to advertise interest in the document.
34
+ */
35
+ find<T>(
36
+ /** The documentId of the handle to retrieve */
37
+ automergeUrl: AutomergeUrl
38
+ ): DocHandle<T>
39
+ delete(
40
+ /** The documentId of the handle to delete */
41
+ id: DocumentId | AutomergeUrl
42
+ ): void
43
+ }
44
+ export interface RepoConfig {
45
+ /** Our unique identifier */
46
+ peerId?: PeerId
47
+ /** A storage adapter can be provided, or not */
48
+ storage?: StorageAdapter
49
+ /** One or more network adapters must be provided */
50
+ network: NetworkAdapter[]
51
+ /**
52
+ * Normal peers typically share generously with everyone (meaning we sync all our documents with
53
+ * all peers). A server only syncs documents that a peer explicitly requests by ID.
54
+ */
55
+ sharePolicy?: SharePolicy
56
+ }
57
+ export type SharePolicy = (
58
+ peerId: PeerId,
59
+ documentId?: DocumentId
60
+ ) => Promise<boolean>
61
+ interface DocCollectionEvents {
62
+ document: (arg: DocumentPayload) => void
63
+ "delete-document": (arg: DeleteDocumentPayload) => void
64
+ "unavailable-document": (arg: DeleteDocumentPayload) => void
65
+ }
66
+ interface DocumentPayload {
67
+ handle: DocHandle<any>
68
+ isNew: boolean
69
+ }
70
+ interface DeleteDocumentPayload {
71
+ documentId: DocumentId
72
+ }
73
+ export {}
74
+ //# sourceMappingURL=Repo.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"Repo.d.ts","sourceRoot":"","sources":["../../src/Repo.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAEhE,OAAO,EAAE,KAAK,YAAY,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AASlE,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAA;AAC1C,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAG5C,8FAA8F;AAC9F;;;;;;GAMG;AACH,qBAAa,IAAK,SAAQ,YAAY,CAAC,mBAAmB,CAAC;;IAGzD,gBAAgB,EAAE,gBAAgB,CAAA;IAClC,gBAAgB,CAAC,EAAE,gBAAgB,CAAA;IAGnC,sDAAsD;IACtD,WAAW,EAAE,WAAW,CAAmB;gBAE/B,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,WAAW,EAAE,EAAE,UAAU;IAuHjE,8CAA8C;IAC9C,IAAI,OAAO,uCAEV;IAED;;;;OAIG;IACH,MAAM,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC,CAAC;IA0BzB;;;OAGG;IACH,IAAI,CAAC,CAAC;IACJ,+CAA+C;IAC/C,YAAY,EAAE,YAAY,GACzB,SAAS,CAAC,CAAC,CAAC;IAgCf,MAAM;IACJ,6CAA6C;IAC7C,EAAE,EAAE,UAAU,GAAG,YAAY;CAchC;AAED,MAAM,WAAW,UAAU;IACzB,4BAA4B;IAC5B,MAAM,CAAC,EAAE,MAAM,CAAA;IAEf,gDAAgD;IAChD,OAAO,CAAC,EAAE,cAAc,CAAA;IAExB,oDAAoD;IACpD,OAAO,EAAE,cAAc,EAAE,CAAA;IAEzB;;;OAGG;IACH,WAAW,CAAC,EAAE,WAAW,CAAA;CAC1B;AAED,MAAM,MAAM,WAAW,GAAG,CACxB,MAAM,EAAE,MAAM,EACd,UAAU,CAAC,EAAE,UAAU,KACpB,OAAO,CAAC,OAAO,CAAC,CAAA;AAGrB,UAAU,mBAAmB;IAC3B,QAAQ,EAAE,CAAC,GAAG,EAAE,eAAe,KAAK,IAAI,CAAA;IACxC,iBAAiB,EAAE,CAAC,GAAG,EAAE,qBAAqB,KAAK,IAAI,CAAA;IACvD,sBAAsB,EAAE,CAAC,GAAG,EAAE,qBAAqB,KAAK,IAAI,CAAA;CAC7D;AAED,UAAU,eAAe;IACvB,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC,CAAA;IACtB,KAAK,EAAE,OAAO,CAAA;CACf;AAED,UAAU,qBAAqB;IAC7B,UAAU,EAAE,UAAU,CAAA;CACvB"}