@automerge/automerge-repo 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (99) hide show
  1. package/.eslintrc +28 -0
  2. package/.mocharc.json +5 -0
  3. package/README.md +298 -0
  4. package/TODO.md +54 -0
  5. package/dist/DocCollection.d.ts +44 -0
  6. package/dist/DocCollection.d.ts.map +1 -0
  7. package/dist/DocCollection.js +85 -0
  8. package/dist/DocHandle.d.ts +78 -0
  9. package/dist/DocHandle.d.ts.map +1 -0
  10. package/dist/DocHandle.js +227 -0
  11. package/dist/EphemeralData.d.ts +27 -0
  12. package/dist/EphemeralData.d.ts.map +1 -0
  13. package/dist/EphemeralData.js +28 -0
  14. package/dist/Repo.d.ts +30 -0
  15. package/dist/Repo.d.ts.map +1 -0
  16. package/dist/Repo.js +97 -0
  17. package/dist/helpers/arraysAreEqual.d.ts +2 -0
  18. package/dist/helpers/arraysAreEqual.d.ts.map +1 -0
  19. package/dist/helpers/arraysAreEqual.js +1 -0
  20. package/dist/helpers/eventPromise.d.ts +5 -0
  21. package/dist/helpers/eventPromise.d.ts.map +1 -0
  22. package/dist/helpers/eventPromise.js +6 -0
  23. package/dist/helpers/headsAreSame.d.ts +3 -0
  24. package/dist/helpers/headsAreSame.d.ts.map +1 -0
  25. package/dist/helpers/headsAreSame.js +7 -0
  26. package/dist/helpers/mergeArrays.d.ts +2 -0
  27. package/dist/helpers/mergeArrays.d.ts.map +1 -0
  28. package/dist/helpers/mergeArrays.js +15 -0
  29. package/dist/helpers/pause.d.ts +3 -0
  30. package/dist/helpers/pause.d.ts.map +1 -0
  31. package/dist/helpers/pause.js +7 -0
  32. package/dist/helpers/withTimeout.d.ts +9 -0
  33. package/dist/helpers/withTimeout.d.ts.map +1 -0
  34. package/dist/helpers/withTimeout.js +22 -0
  35. package/dist/index.d.ts +13 -0
  36. package/dist/index.d.ts.map +1 -0
  37. package/dist/index.js +10 -0
  38. package/dist/network/NetworkAdapter.d.ts +37 -0
  39. package/dist/network/NetworkAdapter.d.ts.map +1 -0
  40. package/dist/network/NetworkAdapter.js +4 -0
  41. package/dist/network/NetworkSubsystem.d.ts +23 -0
  42. package/dist/network/NetworkSubsystem.d.ts.map +1 -0
  43. package/dist/network/NetworkSubsystem.js +89 -0
  44. package/dist/storage/StorageAdapter.d.ts +6 -0
  45. package/dist/storage/StorageAdapter.d.ts.map +1 -0
  46. package/dist/storage/StorageAdapter.js +2 -0
  47. package/dist/storage/StorageSubsystem.d.ts +12 -0
  48. package/dist/storage/StorageSubsystem.d.ts.map +1 -0
  49. package/dist/storage/StorageSubsystem.js +65 -0
  50. package/dist/synchronizer/CollectionSynchronizer.d.ts +24 -0
  51. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -0
  52. package/dist/synchronizer/CollectionSynchronizer.js +92 -0
  53. package/dist/synchronizer/DocSynchronizer.d.ts +18 -0
  54. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -0
  55. package/dist/synchronizer/DocSynchronizer.js +136 -0
  56. package/dist/synchronizer/Synchronizer.d.ts +10 -0
  57. package/dist/synchronizer/Synchronizer.d.ts.map +1 -0
  58. package/dist/synchronizer/Synchronizer.js +3 -0
  59. package/dist/test-utilities/adapter-tests.d.ts +21 -0
  60. package/dist/test-utilities/adapter-tests.d.ts.map +1 -0
  61. package/dist/test-utilities/adapter-tests.js +117 -0
  62. package/dist/types.d.ts +10 -0
  63. package/dist/types.d.ts.map +1 -0
  64. package/dist/types.js +1 -0
  65. package/fuzz/fuzz.ts +129 -0
  66. package/package.json +65 -0
  67. package/src/DocCollection.ts +123 -0
  68. package/src/DocHandle.ts +386 -0
  69. package/src/EphemeralData.ts +46 -0
  70. package/src/Repo.ts +155 -0
  71. package/src/helpers/arraysAreEqual.ts +2 -0
  72. package/src/helpers/eventPromise.ts +10 -0
  73. package/src/helpers/headsAreSame.ts +8 -0
  74. package/src/helpers/mergeArrays.ts +17 -0
  75. package/src/helpers/pause.ts +9 -0
  76. package/src/helpers/withTimeout.ts +28 -0
  77. package/src/index.ts +22 -0
  78. package/src/network/NetworkAdapter.ts +54 -0
  79. package/src/network/NetworkSubsystem.ts +130 -0
  80. package/src/storage/StorageAdapter.ts +5 -0
  81. package/src/storage/StorageSubsystem.ts +91 -0
  82. package/src/synchronizer/CollectionSynchronizer.ts +112 -0
  83. package/src/synchronizer/DocSynchronizer.ts +182 -0
  84. package/src/synchronizer/Synchronizer.ts +15 -0
  85. package/src/test-utilities/adapter-tests.ts +163 -0
  86. package/src/types.ts +3 -0
  87. package/test/CollectionSynchronizer.test.ts +73 -0
  88. package/test/DocCollection.test.ts +19 -0
  89. package/test/DocHandle.test.ts +281 -0
  90. package/test/DocSynchronizer.test.ts +68 -0
  91. package/test/EphemeralData.test.ts +44 -0
  92. package/test/Network.test.ts +13 -0
  93. package/test/Repo.test.ts +367 -0
  94. package/test/StorageSubsystem.test.ts +78 -0
  95. package/test/helpers/DummyNetworkAdapter.ts +8 -0
  96. package/test/helpers/DummyStorageAdapter.ts +23 -0
  97. package/test/helpers/getRandomItem.ts +4 -0
  98. package/test/types.ts +3 -0
  99. package/tsconfig.json +16 -0
@@ -0,0 +1,386 @@
1
+ import * as A from "@automerge/automerge"
2
+ import debug from "debug"
3
+ import EventEmitter from "eventemitter3"
4
+ import {
5
+ assign,
6
+ BaseActionObject,
7
+ createMachine,
8
+ interpret,
9
+ Interpreter,
10
+ ResolveTypegenMeta,
11
+ ServiceMap,
12
+ StateSchema,
13
+ StateValue,
14
+ TypegenDisabled,
15
+ } from "xstate"
16
+ import { waitFor } from "xstate/lib/waitFor.js"
17
+ import { headsAreSame } from "./helpers/headsAreSame.js"
18
+ import { pause } from "./helpers/pause.js"
19
+ import type { ChannelId, DocumentId, PeerId } from "./types.js"
20
+ import { withTimeout, TimeoutError } from "./helpers/withTimeout.js"
21
+
22
+ /** DocHandle is a wrapper around a single Automerge document that lets us listen for changes. */
23
+ export class DocHandle<T> //
24
+ extends EventEmitter<DocHandleEvents<T>>
25
+ {
26
+ #log: debug.Debugger
27
+
28
+ #machine: DocHandleXstateMachine<T>
29
+ #timeoutDelay: number
30
+
31
+ constructor(
32
+ public documentId: DocumentId,
33
+ { isNew = false, timeoutDelay = 700000 }: DocHandleOptions = {}
34
+ ) {
35
+ super()
36
+ this.#timeoutDelay = timeoutDelay
37
+ this.#log = debug(`automerge-repo:dochandle:${documentId.slice(0, 5)}`)
38
+
39
+ // initial doc
40
+ const doc = A.init<T>({
41
+ patchCallback: (patches, { before, after }) =>
42
+ this.emit("patch", { handle: this, patches, before, after }),
43
+ })
44
+
45
+ /**
46
+ * Internally we use a state machine to orchestrate document loading and/or syncing, in order to
47
+ * avoid requesting data we already have, or surfacing intermediate values to the consumer.
48
+ *
49
+ * ┌─────────┐ ┌────────────┐
50
+ * ┌───────┐ ┌──FIND──┤ loading ├─REQUEST──►│ requesting ├─UPDATE──┐
51
+ * │ idle ├──┤ └───┬─────┘ └────────────┘ │
52
+ * └───────┘ │ │ └─►┌─────────┐
53
+ * │ └───────LOAD───────────────────────────────►│ ready │
54
+ * └──CREATE───────────────────────────────────────────────►└─────────┘
55
+ */
56
+ this.#machine = interpret(
57
+ createMachine<DocHandleContext<T>, DocHandleEvent<T>>(
58
+ {
59
+ predictableActionArguments: true,
60
+
61
+ id: "docHandle",
62
+ initial: IDLE,
63
+ context: { documentId, doc },
64
+ states: {
65
+ idle: {
66
+ on: {
67
+ // If we're creating a new document, we don't need to load anything
68
+ CREATE: { target: READY },
69
+ // If we're accessing an existing document, we need to request it from storage
70
+ // and/or the network
71
+ FIND: { target: LOADING },
72
+ DELETE: { actions: "onDelete", target: DELETED },
73
+ },
74
+ },
75
+ loading: {
76
+ on: {
77
+ // LOAD is called by the Repo if the document is found in storage
78
+ LOAD: { actions: "onLoad", target: READY },
79
+ // REQUEST is called by the Repo if the document is not found in storage
80
+ REQUEST: { target: REQUESTING },
81
+ DELETE: { actions: "onDelete", target: DELETED },
82
+ },
83
+ },
84
+ requesting: {
85
+ on: {
86
+ // UPDATE is called by the Repo when we receive changes from the network
87
+ UPDATE: { actions: "onUpdate" },
88
+ // REQUEST_COMPLETE is called from `onUpdate` when the doc has been fully loaded from the network
89
+ REQUEST_COMPLETE: { target: READY },
90
+ DELETE: { actions: "onDelete", target: DELETED },
91
+ },
92
+ },
93
+ ready: {
94
+ on: {
95
+ // UPDATE is called by the Repo when we receive changes from the network
96
+ UPDATE: { actions: "onUpdate", target: READY },
97
+ DELETE: { actions: "onDelete", target: DELETED },
98
+ },
99
+ },
100
+ error: {},
101
+ deleted: {},
102
+ },
103
+ },
104
+
105
+ {
106
+ actions: {
107
+ /** Apply the binary changes from storage and put the updated doc on context */
108
+ onLoad: assign((context, { payload }: LoadEvent) => {
109
+ const { binary } = payload
110
+ const { doc } = context
111
+ const newDoc = A.loadIncremental(doc, binary)
112
+ return { doc: newDoc }
113
+ }),
114
+
115
+ /** Put the updated doc on context; if it's different, emit a `change` event */
116
+ onUpdate: assign((context, { payload }: UpdateEvent<T>) => {
117
+ const { doc: oldDoc } = context
118
+
119
+ const { callback } = payload
120
+ const newDoc = callback(oldDoc)
121
+
122
+ const docChanged = !headsAreSame(newDoc, oldDoc)
123
+ if (docChanged) {
124
+ this.emit("change", { handle: this, doc: newDoc })
125
+ if (!this.isReady()) {
126
+ this.#machine.send(REQUEST_COMPLETE)
127
+ }
128
+ }
129
+ return { doc: newDoc }
130
+ }),
131
+ onDelete: assign(() => {
132
+ this.emit("delete", { handle: this })
133
+ return { doc: undefined }
134
+ }),
135
+ },
136
+ }
137
+ )
138
+ )
139
+ .onTransition(({ value: state }, { type: event }) =>
140
+ this.#log(`${event} → ${state}`, this.#doc)
141
+ )
142
+ .start()
143
+
144
+ this.#machine.send(isNew ? CREATE : FIND)
145
+ }
146
+
147
+ get doc() {
148
+ if (!this.isReady()) {
149
+ throw new Error(
150
+ `DocHandle#${this.documentId} is not ready. Check \`handle.isReady()\` before accessing the document.`
151
+ )
152
+ }
153
+
154
+ return this.#doc
155
+ }
156
+
157
+ // PRIVATE
158
+
159
+ /** Returns the current document */
160
+ get #doc() {
161
+ return this.#machine?.getSnapshot().context.doc
162
+ }
163
+
164
+ /** Returns the docHandle's state (READY, etc.) */
165
+ get #state() {
166
+ return this.#machine?.getSnapshot().value
167
+ }
168
+
169
+ /** Returns a promise that resolves when the docHandle is in one of the given states */
170
+ #statePromise(awaitStates: HandleState | HandleState[]) {
171
+ if (!Array.isArray(awaitStates)) awaitStates = [awaitStates]
172
+ return Promise.any(
173
+ awaitStates.map(state => waitFor(this.#machine, s => s.matches(state)))
174
+ )
175
+ }
176
+
177
+ // PUBLIC
178
+
179
+ isReady = () => this.#state === READY
180
+ isReadyOrRequesting = () =>
181
+ this.#state === READY || this.#state === REQUESTING
182
+ isDeleted = () => this.#state === DELETED
183
+
184
+ /**
185
+ * Returns the current document, waiting for the handle to be ready if necessary.
186
+ */
187
+ async value(awaitStates: HandleState[] = [READY]) {
188
+ await pause() // yield one tick because reasons
189
+ try {
190
+ // wait for the document to enter one of the desired states
191
+ await withTimeout(this.#statePromise(awaitStates), this.#timeoutDelay)
192
+ } catch (error) {
193
+ if (error instanceof TimeoutError)
194
+ throw new Error(`DocHandle: timed out loading ${this.documentId}`)
195
+ else throw error
196
+ }
197
+ // Return the document
198
+ return this.#doc
199
+ }
200
+
201
+ async loadAttemptedValue() {
202
+ return this.value([READY, REQUESTING])
203
+ }
204
+
205
+ /** `load` is called by the repo when the document is found in storage */
206
+ load(binary: Uint8Array) {
207
+ if (binary.length) {
208
+ this.#machine.send(LOAD, { payload: { binary } })
209
+ }
210
+ }
211
+
212
+ /** `update` is called by the repo when we receive changes from the network */
213
+ update(callback: (doc: A.Doc<T>) => A.Doc<T>) {
214
+ this.#machine.send(UPDATE, { payload: { callback } })
215
+ }
216
+
217
+ /** `change` is called by the repo when the document is changed locally */
218
+ change(callback: A.ChangeFn<T>, options: A.ChangeOptions<T> = {}) {
219
+ if (!this.isReady()) {
220
+ throw new Error(
221
+ `DocHandle#${this.documentId} is not ready. Check \`handle.isReady()\` before accessing the document.`
222
+ )
223
+ }
224
+ this.#machine.send(UPDATE, {
225
+ payload: {
226
+ callback: (doc: A.Doc<T>) => {
227
+ return A.change(doc, options, callback)
228
+ },
229
+ },
230
+ })
231
+ }
232
+
233
+ changeAt(heads: A.Heads, callback: A.ChangeFn<T>, options: A.ChangeOptions<T> = {}) {
234
+ if (!this.isReady()) {
235
+ throw new Error(
236
+ `DocHandle#${this.documentId} is not ready. Check \`handle.isReady()\` before accessing the document.`
237
+ )
238
+ }
239
+ this.#machine.send(UPDATE, {
240
+ payload: {
241
+ callback: (doc: A.Doc<T>) => {
242
+ return A.changeAt(doc, heads, options, callback)
243
+ },
244
+ },
245
+ })
246
+ }
247
+
248
+ /** `request` is called by the repo when the document is not found in storage */
249
+ request() {
250
+ if (this.#state === LOADING) this.#machine.send(REQUEST)
251
+ }
252
+
253
+ /** `delete` is called by the repo when the document is deleted */
254
+ delete() {
255
+ this.#machine.send(DELETE)
256
+ }
257
+ }
258
+
259
+ // WRAPPER CLASS TYPES
260
+
261
+ interface DocHandleOptions {
262
+ isNew?: boolean
263
+ timeoutDelay?: number
264
+ }
265
+
266
+ export interface DocHandleMessagePayload {
267
+ destinationId: PeerId
268
+ channelId: ChannelId
269
+ data: Uint8Array
270
+ }
271
+
272
+ export interface DocHandleChangePayload<T> {
273
+ handle: DocHandle<T>
274
+ doc: A.Doc<T>
275
+ }
276
+
277
+ export interface DocHandleDeletePayload<T> {
278
+ handle: DocHandle<T>
279
+ }
280
+
281
+ export interface DocHandlePatchPayload<T> {
282
+ handle: DocHandle<T>
283
+ patches: A.Patch[]
284
+ before: A.Doc<T>
285
+ after: A.Doc<T>
286
+ }
287
+
288
+ export interface DocHandleEvents<T> {
289
+ change: (payload: DocHandleChangePayload<T>) => void
290
+ patch: (payload: DocHandlePatchPayload<T>) => void
291
+ delete: (payload: DocHandleDeletePayload<T>) => void
292
+ }
293
+
294
+ // STATE MACHINE TYPES
295
+
296
+ // state
297
+
298
+ export const HandleState = {
299
+ IDLE: "idle",
300
+ LOADING: "loading",
301
+ REQUESTING: "requesting",
302
+ READY: "ready",
303
+ ERROR: "error",
304
+ DELETED: "deleted",
305
+ } as const
306
+ export type HandleState = (typeof HandleState)[keyof typeof HandleState]
307
+
308
+ type DocHandleMachineState = {
309
+ states: Record<
310
+ (typeof HandleState)[keyof typeof HandleState],
311
+ StateSchema<HandleState>
312
+ >
313
+ }
314
+
315
+ // context
316
+
317
+ interface DocHandleContext<T> {
318
+ documentId: string
319
+ doc: A.Doc<T>
320
+ }
321
+
322
+ // events
323
+
324
+ export const Event = {
325
+ CREATE: "CREATE",
326
+ LOAD: "LOAD",
327
+ FIND: "FIND",
328
+ REQUEST: "REQUEST",
329
+ REQUEST_COMPLETE: "REQUEST_COMPLETE",
330
+ UPDATE: "UPDATE",
331
+ TIMEOUT: "TIMEOUT",
332
+ DELETE: "DELETE",
333
+ } as const
334
+ type Event = (typeof Event)[keyof typeof Event]
335
+
336
+ type CreateEvent = { type: typeof CREATE; payload: { documentId: string } }
337
+ type LoadEvent = { type: typeof LOAD; payload: { binary: Uint8Array } }
338
+ type FindEvent = { type: typeof FIND; payload: { documentId: string } }
339
+ type RequestEvent = { type: typeof REQUEST }
340
+ type RequestCompleteEvent = { type: typeof REQUEST_COMPLETE }
341
+ type DeleteEvent = { type: typeof DELETE }
342
+ type UpdateEvent<T> = {
343
+ type: typeof UPDATE
344
+ payload: { callback: (doc: A.Doc<T>) => A.Doc<T> }
345
+ }
346
+ type TimeoutEvent = { type: typeof TIMEOUT }
347
+
348
+ type DocHandleEvent<T> =
349
+ | CreateEvent
350
+ | LoadEvent
351
+ | FindEvent
352
+ | RequestEvent
353
+ | RequestCompleteEvent
354
+ | UpdateEvent<T>
355
+ | TimeoutEvent
356
+ | DeleteEvent
357
+
358
+ type DocHandleXstateMachine<T> = Interpreter<
359
+ DocHandleContext<T>,
360
+ DocHandleMachineState,
361
+ DocHandleEvent<T>,
362
+ {
363
+ value: StateValue // Should this be unknown or T?
364
+ context: DocHandleContext<T>
365
+ },
366
+ ResolveTypegenMeta<
367
+ TypegenDisabled,
368
+ DocHandleEvent<T>,
369
+ BaseActionObject,
370
+ ServiceMap
371
+ >
372
+ >
373
+
374
+ // CONSTANTS
375
+
376
+ const { IDLE, LOADING, REQUESTING, READY, ERROR, DELETED } = HandleState
377
+ const {
378
+ CREATE,
379
+ LOAD,
380
+ FIND,
381
+ REQUEST,
382
+ UPDATE,
383
+ TIMEOUT,
384
+ DELETE,
385
+ REQUEST_COMPLETE,
386
+ } = Event
@@ -0,0 +1,46 @@
1
+ import { decode, encode } from "cbor-x"
2
+ import EventEmitter from "eventemitter3"
3
+ import { ChannelId, PeerId } from "./index.js"
4
+ import { MessagePayload } from "./network/NetworkAdapter.js"
5
+
6
+ /**
7
+ * EphemeralData provides a mechanism to broadcast short-lived data — cursor positions, presence,
8
+ * heartbeats, etc. — that is useful in the moment but not worth persisting.
9
+ */
10
+ export class EphemeralData extends EventEmitter<EphemeralDataMessageEvents> {
11
+ /** Broadcast an ephemeral message */
12
+ broadcast(channelId: ChannelId, message: unknown) {
13
+ const messageBytes = encode(message)
14
+
15
+ this.emit("message", {
16
+ targetId: "*" as PeerId, // TODO: we don't really need a targetId for broadcast
17
+ channelId: ("m/" + channelId) as ChannelId,
18
+ message: messageBytes,
19
+ broadcast: true,
20
+ })
21
+ }
22
+
23
+ /** Receive an ephemeral message */
24
+ receive(senderId: PeerId, grossChannelId: ChannelId, message: Uint8Array) {
25
+ const data = decode(message)
26
+ const channelId = grossChannelId.slice(2) as ChannelId
27
+ this.emit("data", {
28
+ peerId: senderId,
29
+ channelId,
30
+ data,
31
+ })
32
+ }
33
+ }
34
+
35
+ // types
36
+
37
+ export interface EphemeralDataPayload {
38
+ channelId: ChannelId
39
+ peerId: PeerId
40
+ data: { peerId: PeerId; channelId: ChannelId; data: unknown }
41
+ }
42
+
43
+ export type EphemeralDataMessageEvents = {
44
+ message: (event: MessagePayload) => void
45
+ data: (event: EphemeralDataPayload) => void
46
+ }
package/src/Repo.ts ADDED
@@ -0,0 +1,155 @@
1
+ import { DocCollection } from "./DocCollection.js"
2
+ import { EphemeralData } from "./EphemeralData.js"
3
+ import { NetworkAdapter } from "./network/NetworkAdapter.js"
4
+ import { NetworkSubsystem } from "./network/NetworkSubsystem.js"
5
+ import { StorageAdapter } from "./storage/StorageAdapter.js"
6
+ import { StorageSubsystem } from "./storage/StorageSubsystem.js"
7
+ import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js"
8
+ import { ChannelId, DocumentId, PeerId } from "./types.js"
9
+
10
+ import debug from "debug"
11
+
12
+ const SYNC_CHANNEL = "sync_channel" as ChannelId
13
+
14
+ /** A Repo is a DocCollection with networking, syncing, and storage capabilities. */
15
+ export class Repo extends DocCollection {
16
+ #log: debug.Debugger
17
+
18
+ networkSubsystem: NetworkSubsystem
19
+ storageSubsystem?: StorageSubsystem
20
+ ephemeralData: EphemeralData
21
+
22
+ constructor({ storage, network, peerId, sharePolicy }: RepoConfig) {
23
+ super()
24
+ this.#log = debug(`automerge-repo:repo`)
25
+ this.sharePolicy = sharePolicy ?? this.sharePolicy
26
+
27
+ // DOC COLLECTION
28
+
29
+ // The `document` event is fired by the DocCollection any time we create a new document or look
30
+ // up a document by ID. We listen for it in order to wire up storage and network synchronization.
31
+ this.on("document", async ({ handle }) => {
32
+ if (storageSubsystem) {
33
+ // Save when the document changes
34
+ handle.on("change", async ({ handle }) => {
35
+ const doc = await handle.value()
36
+ storageSubsystem.save(handle.documentId, doc)
37
+ })
38
+
39
+ // Try to load from disk
40
+ const binary = await storageSubsystem.loadBinary(handle.documentId)
41
+ handle.load(binary)
42
+ }
43
+
44
+ handle.request()
45
+
46
+ // Register the document with the synchronizer. This advertises our interest in the document.
47
+ synchronizer.addDocument(handle.documentId)
48
+ })
49
+
50
+ this.on("delete-document", ({ documentId }) => {
51
+ // TODO Pass the delete on to the network
52
+ // synchronizer.removeDocument(documentId)
53
+
54
+ if (storageSubsystem) {
55
+ storageSubsystem.remove(documentId)
56
+ }
57
+ })
58
+
59
+ // SYNCHRONIZER
60
+ // The synchronizer uses the network subsystem to keep documents in sync with peers.
61
+
62
+ const synchronizer = new CollectionSynchronizer(this)
63
+
64
+ // When the synchronizer emits sync messages, send them to peers
65
+ synchronizer.on(
66
+ "message",
67
+ ({ targetId, channelId, message, broadcast }) => {
68
+ this.#log(`sending sync message to ${targetId}`)
69
+ networkSubsystem.sendMessage(targetId, channelId, message, broadcast)
70
+ }
71
+ )
72
+
73
+ // STORAGE
74
+ // The storage subsystem has access to some form of persistence, and deals with save and loading documents.
75
+
76
+ const storageSubsystem = storage ? new StorageSubsystem(storage) : undefined
77
+ this.storageSubsystem = storageSubsystem
78
+
79
+ // NETWORK
80
+ // The network subsystem deals with sending and receiving messages to and from peers.
81
+
82
+ const networkSubsystem = new NetworkSubsystem(network, peerId)
83
+ this.networkSubsystem = networkSubsystem
84
+
85
+ // When we get a new peer, register it with the synchronizer
86
+ networkSubsystem.on("peer", async ({ peerId }) => {
87
+ this.#log("peer connected", { peerId })
88
+ synchronizer.addPeer(peerId)
89
+ })
90
+
91
+ // When a peer disconnects, remove it from the synchronizer
92
+ networkSubsystem.on("peer-disconnected", ({ peerId }) => {
93
+ synchronizer.removePeer(peerId)
94
+ })
95
+
96
+ // Handle incoming messages
97
+ networkSubsystem.on("message", async msg => {
98
+ const { senderId, channelId, message } = msg
99
+
100
+ // TODO: this demands a more principled way of associating channels with recipients
101
+
102
+ // Ephemeral channel ids start with "m/"
103
+ if (channelId.startsWith("m/")) {
104
+ // Ephemeral message
105
+ this.#log(`receiving ephemeral message from ${senderId}`)
106
+ ephemeralData.receive(senderId, channelId, message)
107
+ } else {
108
+ // Sync message
109
+ this.#log(`receiving sync message from ${senderId}`)
110
+ await synchronizer.receiveSyncMessage(senderId, channelId, message)
111
+ }
112
+ })
113
+
114
+ // We establish a special channel for sync messages
115
+ networkSubsystem.join(SYNC_CHANNEL)
116
+
117
+ // EPHEMERAL DATA
118
+ // The ephemeral data subsystem uses the network to send and receive messages that are not
119
+ // persisted to storage, e.g. cursor position, presence, etc.
120
+
121
+ const ephemeralData = new EphemeralData()
122
+ this.ephemeralData = ephemeralData
123
+
124
+ // Send ephemeral messages to peers
125
+ ephemeralData.on(
126
+ "message",
127
+ ({ targetId, channelId, message, broadcast }) => {
128
+ this.#log(`sending ephemeral message to ${targetId}`)
129
+ networkSubsystem.sendMessage(targetId, channelId, message, broadcast)
130
+ }
131
+ )
132
+ }
133
+ }
134
+
135
+ export interface RepoConfig {
136
+ /** Our unique identifier */
137
+ peerId?: PeerId
138
+
139
+ /** A storage adapter can be provided, or not */
140
+ storage?: StorageAdapter
141
+
142
+ /** One or more network adapters must be provided */
143
+ network: NetworkAdapter[]
144
+
145
+ /**
146
+ * Normal peers typically share generously with everyone (meaning we sync all our documents with
147
+ * all peers). A server only syncs documents that a peer explicitly requests by ID.
148
+ */
149
+ sharePolicy?: SharePolicy
150
+ }
151
+
152
+ export type SharePolicy = (
153
+ peerId: PeerId,
154
+ documentId?: DocumentId
155
+ ) => Promise<boolean>
@@ -0,0 +1,2 @@
1
+ export const arraysAreEqual = <T>(a: T[], b: T[]) =>
2
+ a.length === b.length && a.every((element, index) => element === b[index])
@@ -0,0 +1,10 @@
1
+ import EventEmitter from "eventemitter3"
2
+
3
+ /** Returns a promise that resolves when the given event is emitted on the given emitter. */
4
+ export const eventPromise = (emitter: EventEmitter, event: string) =>
5
+ new Promise<any>(resolve => emitter.once(event, d => resolve(d)))
6
+
7
+ export const eventPromises = (emitters: EventEmitter[], event: string) => {
8
+ const promises = emitters.map(emitter => eventPromise(emitter, event))
9
+ return Promise.all(promises)
10
+ }
@@ -0,0 +1,8 @@
1
+ import * as A from "@automerge/automerge"
2
+ import { arraysAreEqual } from "./arraysAreEqual.js"
3
+
4
+ export const headsAreSame = <T>(a: A.Doc<T>, b: A.Doc<T>) => {
5
+ const aHeads = A.getHeads(a)
6
+ const bHeads = A.getHeads(b)
7
+ return arraysAreEqual(aHeads, bHeads)
8
+ }
@@ -0,0 +1,17 @@
1
+ export function mergeArrays(myArrays: Uint8Array[]) {
2
+ // Get the total length of all arrays.
3
+ let length = 0
4
+ myArrays.forEach(item => {
5
+ length += item.length
6
+ })
7
+
8
+ // Create a new array with total length and merge all source arrays.
9
+ const mergedArray = new Uint8Array(length)
10
+ let offset = 0
11
+ myArrays.forEach(item => {
12
+ mergedArray.set(item, offset)
13
+ offset += item.length
14
+ })
15
+
16
+ return mergedArray
17
+ }
@@ -0,0 +1,9 @@
1
+ export const pause = (t = 0) =>
2
+ new Promise<void>(resolve => setTimeout(() => resolve(), t))
3
+
4
+ export function rejectOnTimeout<T>(promise: Promise<T>, millis: number): Promise<T> {
5
+ return Promise.race([
6
+ promise,
7
+ pause(millis).then(() => { throw new Error("timeout exceeded") }),
8
+ ])
9
+ }
@@ -0,0 +1,28 @@
1
+ /**
2
+ * If `promise` is resolved before `t` ms elapse, the timeout is cleared and the result of the
3
+ * promise is returned. If the timeout ends first, a `TimeoutError` is thrown.
4
+ */
5
+ export const withTimeout = async <T>(
6
+ promise: Promise<T>,
7
+ t: number
8
+ ): Promise<T> => {
9
+ let timeoutId: ReturnType<typeof setTimeout>
10
+ const timeoutPromise = new Promise<never>((_, reject) => {
11
+ timeoutId = setTimeout(
12
+ () => reject(new TimeoutError(`withTimeout: timed out after ${t}ms`)),
13
+ t
14
+ )
15
+ })
16
+ try {
17
+ return await Promise.race([promise, timeoutPromise])
18
+ } finally {
19
+ clearTimeout(timeoutId!)
20
+ }
21
+ }
22
+
23
+ export class TimeoutError extends Error {
24
+ constructor(message: string) {
25
+ super(message)
26
+ this.name = "TimeoutError"
27
+ }
28
+ }
package/src/index.ts ADDED
@@ -0,0 +1,22 @@
1
+ export { DocCollection } from "./DocCollection.js"
2
+ export { DocHandle, HandleState } from "./DocHandle.js"
3
+ export type {
4
+ DocHandleChangePayload,
5
+ DocHandleMessagePayload,
6
+ DocHandlePatchPayload,
7
+ } from "./DocHandle.js"
8
+ export { NetworkAdapter } from "./network/NetworkAdapter.js"
9
+ export type {
10
+ InboundMessagePayload,
11
+ MessagePayload,
12
+ OpenPayload,
13
+ PeerCandidatePayload,
14
+ PeerDisconnectedPayload,
15
+ } from "./network/NetworkAdapter.js"
16
+ export { NetworkSubsystem } from "./network/NetworkSubsystem.js"
17
+ export { Repo, type SharePolicy } from "./Repo.js"
18
+ export { StorageAdapter } from "./storage/StorageAdapter.js"
19
+ export { StorageSubsystem } from "./storage/StorageSubsystem.js"
20
+ export { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js"
21
+ export * from "./types.js"
22
+ export * from "./test-utilities/adapter-tests.js"