@automerge/automerge-repo 1.0.0-alpha.0 → 1.0.0-alpha.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/dist/DocCollection.d.ts +2 -1
  2. package/dist/DocCollection.d.ts.map +1 -1
  3. package/dist/DocCollection.js +17 -8
  4. package/dist/DocHandle.d.ts +27 -7
  5. package/dist/DocHandle.d.ts.map +1 -1
  6. package/dist/DocHandle.js +47 -23
  7. package/dist/DocUrl.d.ts +3 -3
  8. package/dist/DocUrl.js +9 -9
  9. package/dist/EphemeralData.d.ts +8 -16
  10. package/dist/EphemeralData.d.ts.map +1 -1
  11. package/dist/EphemeralData.js +1 -28
  12. package/dist/Repo.d.ts +0 -2
  13. package/dist/Repo.d.ts.map +1 -1
  14. package/dist/Repo.js +18 -36
  15. package/dist/helpers/headsAreSame.d.ts +2 -2
  16. package/dist/helpers/headsAreSame.d.ts.map +1 -1
  17. package/dist/helpers/headsAreSame.js +1 -4
  18. package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
  19. package/dist/helpers/tests/network-adapter-tests.js +15 -13
  20. package/dist/index.d.ts +2 -1
  21. package/dist/index.d.ts.map +1 -1
  22. package/dist/network/NetworkAdapter.d.ts +4 -13
  23. package/dist/network/NetworkAdapter.d.ts.map +1 -1
  24. package/dist/network/NetworkSubsystem.d.ts +5 -4
  25. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  26. package/dist/network/NetworkSubsystem.js +39 -25
  27. package/dist/network/messages.d.ts +57 -0
  28. package/dist/network/messages.d.ts.map +1 -0
  29. package/dist/network/messages.js +21 -0
  30. package/dist/storage/StorageSubsystem.d.ts +2 -2
  31. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  32. package/dist/storage/StorageSubsystem.js +36 -6
  33. package/dist/synchronizer/CollectionSynchronizer.d.ts +3 -2
  34. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  35. package/dist/synchronizer/CollectionSynchronizer.js +19 -13
  36. package/dist/synchronizer/DocSynchronizer.d.ts +9 -3
  37. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  38. package/dist/synchronizer/DocSynchronizer.js +145 -29
  39. package/dist/synchronizer/Synchronizer.d.ts +3 -4
  40. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  41. package/dist/types.d.ts +1 -3
  42. package/dist/types.d.ts.map +1 -1
  43. package/fuzz/fuzz.ts +4 -4
  44. package/package.json +3 -3
  45. package/src/DocCollection.ts +19 -9
  46. package/src/DocHandle.ts +82 -37
  47. package/src/DocUrl.ts +9 -9
  48. package/src/EphemeralData.ts +6 -36
  49. package/src/Repo.ts +20 -52
  50. package/src/helpers/headsAreSame.ts +3 -5
  51. package/src/helpers/tests/network-adapter-tests.ts +18 -14
  52. package/src/index.ts +12 -2
  53. package/src/network/NetworkAdapter.ts +4 -20
  54. package/src/network/NetworkSubsystem.ts +61 -38
  55. package/src/network/messages.ts +123 -0
  56. package/src/storage/StorageSubsystem.ts +42 -6
  57. package/src/synchronizer/CollectionSynchronizer.ts +38 -19
  58. package/src/synchronizer/DocSynchronizer.ts +196 -38
  59. package/src/synchronizer/Synchronizer.ts +3 -8
  60. package/src/types.ts +4 -1
  61. package/test/CollectionSynchronizer.test.ts +6 -7
  62. package/test/DocHandle.test.ts +36 -22
  63. package/test/DocSynchronizer.test.ts +85 -9
  64. package/test/Repo.test.ts +279 -59
  65. package/test/StorageSubsystem.test.ts +9 -9
  66. package/test/helpers/DummyNetworkAdapter.ts +1 -1
  67. package/tsconfig.json +2 -1
  68. package/test/EphemeralData.test.ts +0 -44
package/src/DocHandle.ts CHANGED
@@ -17,14 +17,9 @@ import { waitFor } from "xstate/lib/waitFor.js"
17
17
  import { headsAreSame } from "./helpers/headsAreSame.js"
18
18
  import { pause } from "./helpers/pause.js"
19
19
  import { TimeoutError, withTimeout } from "./helpers/withTimeout.js"
20
- import type {
21
- BinaryDocumentId,
22
- ChannelId,
23
- DocumentId,
24
- PeerId,
25
- AutomergeUrl,
26
- } from "./types.js"
27
- import { binaryToDocumentId, stringifyAutomergeUrl } from "./DocUrl.js"
20
+ import type { DocumentId, PeerId, AutomergeUrl } from "./types.js"
21
+ import { stringifyAutomergeUrl } from "./DocUrl.js"
22
+ import { encode } from "cbor-x"
28
23
 
29
24
  /** DocHandle is a wrapper around a single Automerge document that lets us listen for changes. */
30
25
  export class DocHandle<T> //
@@ -83,8 +78,8 @@ export class DocHandle<T> //
83
78
  },
84
79
  loading: {
85
80
  on: {
86
- // LOAD is called by the Repo if the document is found in storage
87
- LOAD: { actions: "onLoad", target: READY },
81
+ // UPDATE is called by the Repo if the document is found in storage
82
+ UPDATE: { actions: "onUpdate", target: READY },
88
83
  // REQUEST is called by the Repo if the document is not found in storage
89
84
  REQUEST: { target: REQUESTING },
90
85
  DELETE: { actions: "onDelete", target: DELETED },
@@ -98,6 +93,10 @@ export class DocHandle<T> //
98
93
  },
99
94
  requesting: {
100
95
  on: {
96
+ MARK_UNAVAILABLE: {
97
+ target: UNAVAILABLE,
98
+ actions: "onUnavailable",
99
+ },
101
100
  // UPDATE is called by the Repo when we receive changes from the network
102
101
  UPDATE: { actions: "onUpdate" },
103
102
  // REQUEST_COMPLETE is called from `onUpdate` when the doc has been fully loaded from the network
@@ -124,20 +123,20 @@ export class DocHandle<T> //
124
123
  deleted: {
125
124
  type: "final",
126
125
  },
126
+ unavailable: {
127
+ on: {
128
+ UPDATE: { actions: "onUpdate" },
129
+ // REQUEST_COMPLETE is called from `onUpdate` when the doc has been fully loaded from the network
130
+ REQUEST_COMPLETE: { target: READY },
131
+ DELETE: { actions: "onDelete", target: DELETED },
132
+ },
133
+ },
127
134
  },
128
135
  },
129
136
 
130
137
  {
131
138
  actions: {
132
- /** Apply the binary changes from storage and put the updated doc on context */
133
- onLoad: assign((context, { payload }: LoadEvent) => {
134
- const { binary } = payload
135
- const { doc } = context
136
- const newDoc = A.loadIncremental(doc, binary)
137
- return { doc: newDoc }
138
- }),
139
-
140
- /** Put the updated doc on context; if it's different, emit a `change` event */
139
+ /** Put the updated doc on context */
141
140
  onUpdate: assign((context, { payload }: UpdateEvent<T>) => {
142
141
  const { doc: oldDoc } = context
143
142
 
@@ -150,6 +149,12 @@ export class DocHandle<T> //
150
149
  this.emit("delete", { handle: this })
151
150
  return { doc: undefined }
152
151
  }),
152
+ onUnavailable: assign(context => {
153
+ const { doc } = context
154
+
155
+ this.emit("unavailable", { handle: this })
156
+ return { doc }
157
+ }),
153
158
  },
154
159
  }
155
160
  )
@@ -158,9 +163,12 @@ export class DocHandle<T> //
158
163
  const oldDoc = history?.context?.doc
159
164
  const newDoc = context.doc
160
165
 
161
- this.#log(`${event} → ${state}`, newDoc)
166
+ this.#log(`${history?.value}: ${event.type} → ${state}`, newDoc)
162
167
 
163
- const docChanged = newDoc && oldDoc && !headsAreSame(newDoc, oldDoc)
168
+ const docChanged =
169
+ newDoc &&
170
+ oldDoc &&
171
+ !headsAreSame(A.getHeads(newDoc), A.getHeads(oldDoc))
164
172
  if (docChanged) {
165
173
  this.emit("heads-changed", { handle: this, doc: newDoc })
166
174
 
@@ -224,6 +232,7 @@ export class DocHandle<T> //
224
232
  * @returns true if the document has been marked as deleted
225
233
  */
226
234
  isDeleted = () => this.inState([HandleState.DELETED])
235
+ isUnavailable = () => this.inState([HandleState.UNAVAILABLE])
227
236
  inState = (states: HandleState[]) =>
228
237
  states.some(this.#machine?.getSnapshot().matches)
229
238
 
@@ -248,7 +257,9 @@ export class DocHandle<T> //
248
257
  *
249
258
  * @param {awaitStates=[READY]} optional states to wait for, such as "LOADING". mostly for internal use.
250
259
  */
251
- async doc(awaitStates: HandleState[] = [READY]): Promise<A.Doc<T>> {
260
+ async doc(
261
+ awaitStates: HandleState[] = [READY, UNAVAILABLE]
262
+ ): Promise<A.Doc<T> | undefined> {
252
263
  await pause() // yield one tick because reasons
253
264
  try {
254
265
  // wait for the document to enter one of the desired states
@@ -259,7 +270,7 @@ export class DocHandle<T> //
259
270
  else throw error
260
271
  }
261
272
  // Return the document
262
- return this.#doc
273
+ return !this.isUnavailable() ? this.#doc : undefined
263
274
  }
264
275
 
265
276
  /**
@@ -280,13 +291,6 @@ export class DocHandle<T> //
280
291
  return this.#doc
281
292
  }
282
293
 
283
- /** `load` is called by the repo when the document is found in storage */
284
- load(binary: Uint8Array) {
285
- if (binary.length && binary.length > 0) {
286
- this.#machine.send(LOAD, { payload: { binary } })
287
- }
288
- }
289
-
290
294
  /** `update` is called by the repo when we receive changes from the network */
291
295
  update(callback: (doc: A.Doc<T>) => A.Doc<T>) {
292
296
  this.#machine.send(UPDATE, {
@@ -329,6 +333,10 @@ export class DocHandle<T> //
329
333
  })
330
334
  }
331
335
 
336
+ unavailable() {
337
+ this.#machine.send(MARK_UNAVAILABLE)
338
+ }
339
+
332
340
  /** `request` is called by the repo when the document is not found in storage */
333
341
  request() {
334
342
  if (this.#state === LOADING) this.#machine.send(REQUEST)
@@ -338,6 +346,19 @@ export class DocHandle<T> //
338
346
  delete() {
339
347
  this.#machine.send(DELETE)
340
348
  }
349
+
350
+ /** `broadcast` sends an arbitrary ephemeral message out to all reachable peers who would receive sync messages from you
351
+ * it has no guarantee of delivery, and is not persisted to the underlying automerge doc in any way.
352
+ * messages will have a sending PeerId but this is *not* a useful user identifier.
353
+ * a user could have multiple tabs open and would appear as multiple PeerIds.
354
+ * every message source must have a unique PeerId.
355
+ */
356
+ broadcast(message: any) {
357
+ this.emit("ephemeral-message-outbound", {
358
+ handle: this,
359
+ data: encode(message),
360
+ })
361
+ }
341
362
  }
342
363
 
343
364
  // WRAPPER CLASS TYPES
@@ -349,7 +370,7 @@ interface DocHandleOptions {
349
370
 
350
371
  export interface DocHandleMessagePayload {
351
372
  destinationId: PeerId
352
- channelId: ChannelId
373
+ documentId: DocumentId
353
374
  data: Uint8Array
354
375
  }
355
376
 
@@ -369,10 +390,26 @@ export interface DocHandleChangePayload<T> {
369
390
  patchInfo: A.PatchInfo<T>
370
391
  }
371
392
 
393
+ export interface DocHandleEphemeralMessagePayload {
394
+ handle: DocHandle<any>
395
+ senderId: PeerId
396
+ message: unknown
397
+ }
398
+
399
+ export interface DocHandleOutboundEphemeralMessagePayload {
400
+ handle: DocHandle<any>
401
+ data: Uint8Array
402
+ }
403
+
372
404
  export interface DocHandleEvents<T> {
373
405
  "heads-changed": (payload: DocHandleEncodedChangePayload<T>) => void
374
406
  change: (payload: DocHandleChangePayload<T>) => void
375
407
  delete: (payload: DocHandleDeletePayload<T>) => void
408
+ unavailable: (payload: DocHandleDeletePayload<T>) => void
409
+ "ephemeral-message": (payload: DocHandleEphemeralMessagePayload) => void
410
+ "ephemeral-message-outbound": (
411
+ payload: DocHandleOutboundEphemeralMessagePayload
412
+ ) => void
376
413
  }
377
414
 
378
415
  // STATE MACHINE TYPES
@@ -386,6 +423,7 @@ export const HandleState = {
386
423
  READY: "ready",
387
424
  FAILED: "failed",
388
425
  DELETED: "deleted",
426
+ UNAVAILABLE: "unavailable",
389
427
  } as const
390
428
  export type HandleState = (typeof HandleState)[keyof typeof HandleState]
391
429
 
@@ -407,18 +445,17 @@ interface DocHandleContext<T> {
407
445
 
408
446
  export const Event = {
409
447
  CREATE: "CREATE",
410
- LOAD: "LOAD",
411
448
  FIND: "FIND",
412
449
  REQUEST: "REQUEST",
413
450
  REQUEST_COMPLETE: "REQUEST_COMPLETE",
414
451
  UPDATE: "UPDATE",
415
452
  TIMEOUT: "TIMEOUT",
416
453
  DELETE: "DELETE",
454
+ MARK_UNAVAILABLE: "MARK_UNAVAILABLE",
417
455
  } as const
418
456
  type Event = (typeof Event)[keyof typeof Event]
419
457
 
420
458
  type CreateEvent = { type: typeof CREATE; payload: { documentId: string } }
421
- type LoadEvent = { type: typeof LOAD; payload: { binary: Uint8Array } }
422
459
  type FindEvent = { type: typeof FIND; payload: { documentId: string } }
423
460
  type RequestEvent = { type: typeof REQUEST }
424
461
  type RequestCompleteEvent = { type: typeof REQUEST_COMPLETE }
@@ -428,16 +465,17 @@ type UpdateEvent<T> = {
428
465
  payload: { callback: (doc: A.Doc<T>) => A.Doc<T> }
429
466
  }
430
467
  type TimeoutEvent = { type: typeof TIMEOUT }
468
+ type MarkUnavailableEvent = { type: typeof MARK_UNAVAILABLE }
431
469
 
432
470
  type DocHandleEvent<T> =
433
471
  | CreateEvent
434
- | LoadEvent
435
472
  | FindEvent
436
473
  | RequestEvent
437
474
  | RequestCompleteEvent
438
475
  | UpdateEvent<T>
439
476
  | TimeoutEvent
440
477
  | DeleteEvent
478
+ | MarkUnavailableEvent
441
479
 
442
480
  type DocHandleXstateMachine<T> = Interpreter<
443
481
  DocHandleContext<T>,
@@ -456,15 +494,22 @@ type DocHandleXstateMachine<T> = Interpreter<
456
494
  >
457
495
 
458
496
  // CONSTANTS
459
-
460
- export const { IDLE, LOADING, REQUESTING, READY, FAILED, DELETED } = HandleState
497
+ export const {
498
+ IDLE,
499
+ LOADING,
500
+ REQUESTING,
501
+ READY,
502
+ FAILED,
503
+ DELETED,
504
+ UNAVAILABLE,
505
+ } = HandleState
461
506
  const {
462
507
  CREATE,
463
- LOAD,
464
508
  FIND,
465
509
  REQUEST,
466
510
  UPDATE,
467
511
  TIMEOUT,
468
512
  DELETE,
469
513
  REQUEST_COMPLETE,
514
+ MARK_UNAVAILABLE,
470
515
  } = Event
package/src/DocUrl.ts CHANGED
@@ -12,12 +12,12 @@ export const urlPrefix = "automerge:"
12
12
  * given an Automerge URL, return a decoded DocumentId (and the encoded DocumentId)
13
13
  *
14
14
  * @param url
15
- * @returns { documentId: Uint8Array(16), encodedDocumentId: bs58check.encode(documentId) }
15
+ * @returns { binaryDocumentId: BinaryDocumentId, documentId: DocumentId }
16
16
  */
17
17
  export const parseAutomergeUrl = (url: AutomergeUrl) => {
18
- const { binaryDocumentId: binaryDocumentId, encodedDocumentId } = parts(url)
18
+ const { binaryDocumentId, documentId } = parts(url)
19
19
  if (!binaryDocumentId) throw new Error("Invalid document URL: " + url)
20
- return { binaryDocumentId, encodedDocumentId }
20
+ return { binaryDocumentId, documentId }
21
21
  }
22
22
 
23
23
  interface StringifyAutomergeUrlOptions {
@@ -28,7 +28,7 @@ interface StringifyAutomergeUrlOptions {
28
28
  * Given a documentId in either canonical form, return an Automerge URL
29
29
  * Throws on invalid input.
30
30
  * Note: this is an object because we anticipate adding fields in the future.
31
- * @param { documentId: EncodedDocumentId | DocumentId }
31
+ * @param { documentId: BinaryDocumentId | DocumentId }
32
32
  * @returns AutomergeUrl
33
33
  */
34
34
  export const stringifyAutomergeUrl = ({
@@ -79,12 +79,12 @@ export const binaryToDocumentId = (docId: BinaryDocumentId): DocumentId =>
79
79
  * eventually this could include things like heads, so we use this structure
80
80
  * we return both a binary & string-encoded version of the document ID
81
81
  * @param str
82
- * @returns { binaryDocumentId, encodedDocumentId }
82
+ * @returns { binaryDocumentId, documentId }
83
83
  */
84
84
  const parts = (str: string) => {
85
85
  const regex = new RegExp(`^${urlPrefix}(\\w+)$`)
86
- const [m, docMatch] = str.match(regex) || []
87
- const encodedDocumentId = docMatch as DocumentId
88
- const binaryDocumentId = documentIdToBinary(encodedDocumentId)
89
- return { binaryDocumentId, encodedDocumentId }
86
+ const [_, docMatch] = str.match(regex) || []
87
+ const documentId = docMatch as DocumentId
88
+ const binaryDocumentId = documentIdToBinary(documentId)
89
+ return { binaryDocumentId, documentId }
90
90
  }
@@ -1,46 +1,16 @@
1
- import { decode, encode } from "cbor-x"
2
- import EventEmitter from "eventemitter3"
3
- import { ChannelId, PeerId } from "./index.js"
4
- import { MessagePayload } from "./network/NetworkAdapter.js"
5
-
6
- /**
7
- * EphemeralData provides a mechanism to broadcast short-lived data — cursor positions, presence,
8
- * heartbeats, etc. — that is useful in the moment but not worth persisting.
9
- */
10
- export class EphemeralData extends EventEmitter<EphemeralDataMessageEvents> {
11
- /** Broadcast an ephemeral message */
12
- broadcast(channelId: ChannelId, message: unknown) {
13
- const messageBytes = encode(message)
14
-
15
- this.emit("message", {
16
- targetId: "*" as PeerId, // TODO: we don't really need a targetId for broadcast
17
- channelId: ("m/" + channelId) as ChannelId,
18
- message: messageBytes,
19
- broadcast: true,
20
- })
21
- }
22
-
23
- /** Receive an ephemeral message */
24
- receive(senderId: PeerId, grossChannelId: ChannelId, message: Uint8Array) {
25
- const data = decode(message)
26
- const channelId = grossChannelId.slice(2) as ChannelId
27
- this.emit("data", {
28
- peerId: senderId,
29
- channelId,
30
- data,
31
- })
32
- }
33
- }
1
+ import { DocumentId, PeerId } from "./index.js"
2
+ import { EphemeralMessageContents } from "./network/messages.js"
34
3
 
35
4
  // types
5
+ export type SessionId = string & { __SessionId: false }
36
6
 
37
7
  export interface EphemeralDataPayload {
38
- channelId: ChannelId
8
+ documentId: DocumentId
39
9
  peerId: PeerId
40
- data: { peerId: PeerId; channelId: ChannelId; data: unknown }
10
+ data: { peerId: PeerId; documentId: DocumentId; data: unknown }
41
11
  }
42
12
 
43
13
  export type EphemeralDataMessageEvents = {
44
- message: (event: MessagePayload) => void
14
+ message: (event: EphemeralMessageContents) => void
45
15
  data: (event: EphemeralDataPayload) => void
46
16
  }
package/src/Repo.ts CHANGED
@@ -1,5 +1,5 @@
1
+ import debug from "debug"
1
2
  import { DocCollection } from "./DocCollection.js"
2
- import { EphemeralData } from "./EphemeralData.js"
3
3
  import { NetworkAdapter } from "./network/NetworkAdapter.js"
4
4
  import { NetworkSubsystem } from "./network/NetworkSubsystem.js"
5
5
  import { StorageAdapter } from "./storage/StorageAdapter.js"
@@ -7,15 +7,12 @@ import { StorageSubsystem } from "./storage/StorageSubsystem.js"
7
7
  import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js"
8
8
  import { DocumentId, PeerId } from "./types.js"
9
9
 
10
- import debug from "debug"
11
-
12
10
  /** A Repo is a DocCollection with networking, syncing, and storage capabilities. */
13
11
  export class Repo extends DocCollection {
14
12
  #log: debug.Debugger
15
13
 
16
14
  networkSubsystem: NetworkSubsystem
17
15
  storageSubsystem?: StorageSubsystem
18
- ephemeralData: EphemeralData
19
16
 
20
17
  constructor({ storage, network, peerId, sharePolicy }: RepoConfig) {
21
18
  super()
@@ -30,52 +27,55 @@ export class Repo extends DocCollection {
30
27
  if (storageSubsystem) {
31
28
  // Save when the document changes
32
29
  handle.on("heads-changed", async ({ handle, doc }) => {
33
- await storageSubsystem.save(handle.documentId, doc)
30
+ await storageSubsystem.saveDoc(handle.documentId, doc)
34
31
  })
35
32
 
36
33
  // Try to load from disk
37
- const binary = await storageSubsystem.loadBinary(handle.documentId)
38
- handle.load(binary)
34
+ const loadedDoc = await storageSubsystem.loadDoc(handle.documentId)
35
+ if (loadedDoc) {
36
+ handle.update(() => loadedDoc)
37
+ }
39
38
  }
40
39
 
40
+ handle.on("unavailable", () => {
41
+ this.#log("document unavailable", { documentId: handle.documentId })
42
+ this.emit("unavailable-document", {
43
+ documentId: handle.documentId,
44
+ })
45
+ })
46
+
41
47
  handle.request()
42
48
 
43
49
  // Register the document with the synchronizer. This advertises our interest in the document.
44
50
  synchronizer.addDocument(handle.documentId)
45
51
  })
46
52
 
47
- this.on("delete-document", ({ encodedDocumentId }) => {
53
+ this.on("delete-document", ({ documentId }) => {
48
54
  // TODO Pass the delete on to the network
49
55
  // synchronizer.removeDocument(documentId)
50
56
 
51
57
  if (storageSubsystem) {
52
- storageSubsystem.remove(encodedDocumentId)
58
+ storageSubsystem.remove(documentId)
53
59
  }
54
60
  })
55
61
 
56
62
  // SYNCHRONIZER
57
63
  // The synchronizer uses the network subsystem to keep documents in sync with peers.
58
-
59
64
  const synchronizer = new CollectionSynchronizer(this)
60
65
 
61
66
  // When the synchronizer emits sync messages, send them to peers
62
- synchronizer.on(
63
- "message",
64
- ({ targetId, channelId, message, broadcast }) => {
65
- this.#log(`sending sync message to ${targetId}`)
66
- networkSubsystem.sendMessage(targetId, channelId, message, broadcast)
67
- }
68
- )
67
+ synchronizer.on("message", message => {
68
+ this.#log(`sending sync message to ${message.targetId}`)
69
+ networkSubsystem.send(message)
70
+ })
69
71
 
70
72
  // STORAGE
71
73
  // The storage subsystem has access to some form of persistence, and deals with save and loading documents.
72
-
73
74
  const storageSubsystem = storage ? new StorageSubsystem(storage) : undefined
74
75
  this.storageSubsystem = storageSubsystem
75
76
 
76
77
  // NETWORK
77
78
  // The network subsystem deals with sending and receiving messages to and from peers.
78
-
79
79
  const networkSubsystem = new NetworkSubsystem(network, peerId)
80
80
  this.networkSubsystem = networkSubsystem
81
81
 
@@ -92,40 +92,8 @@ export class Repo extends DocCollection {
92
92
 
93
93
  // Handle incoming messages
94
94
  networkSubsystem.on("message", async msg => {
95
- const { senderId, channelId, message } = msg
96
-
97
- // TODO: this demands a more principled way of associating channels with recipients
98
-
99
- // Ephemeral channel ids start with "m/"
100
- if (channelId.startsWith("m/")) {
101
- // Ephemeral message
102
- this.#log(`receiving ephemeral message from ${senderId}`)
103
- ephemeralData.receive(senderId, channelId, message)
104
- } else {
105
- // Sync message
106
- this.#log(`receiving sync message from ${senderId}`)
107
- await synchronizer.receiveSyncMessage(senderId, channelId, message)
108
- }
95
+ await synchronizer.receiveMessage(msg)
109
96
  })
110
-
111
- // We establish a special channel for sync messages
112
- networkSubsystem.join()
113
-
114
- // EPHEMERAL DATA
115
- // The ephemeral data subsystem uses the network to send and receive messages that are not
116
- // persisted to storage, e.g. cursor position, presence, etc.
117
-
118
- const ephemeralData = new EphemeralData()
119
- this.ephemeralData = ephemeralData
120
-
121
- // Send ephemeral messages to peers
122
- ephemeralData.on(
123
- "message",
124
- ({ targetId, channelId, message, broadcast }) => {
125
- this.#log(`sending ephemeral message to ${targetId}`)
126
- networkSubsystem.sendMessage(targetId, channelId, message, broadcast)
127
- }
128
- )
129
97
  }
130
98
  }
131
99
 
@@ -1,8 +1,6 @@
1
- import * as A from "@automerge/automerge"
1
+ import {Heads} from "@automerge/automerge"
2
2
  import { arraysAreEqual } from "./arraysAreEqual.js"
3
3
 
4
- export const headsAreSame = <T>(a: A.Doc<T>, b: A.Doc<T>) => {
5
- const aHeads = A.getHeads(a)
6
- const bHeads = A.getHeads(b)
7
- return arraysAreEqual(aHeads, bHeads)
4
+ export const headsAreSame = (a: Heads, b: Heads) => {
5
+ return arraysAreEqual(a, b)
8
6
  }
@@ -1,7 +1,8 @@
1
- import { PeerId, Repo, type NetworkAdapter, ChannelId } from "../../index.js"
1
+ import { PeerId, Repo, type NetworkAdapter, DocumentId } from "../../index.js"
2
2
  import { eventPromise, eventPromises } from "../eventPromise.js"
3
3
  import { assert } from "chai"
4
4
  import { describe, it } from "mocha"
5
+ import { pause } from "../pause.js"
5
6
 
6
7
  /**
7
8
  * Runs a series of tests against a set of three peers, each represented by one or more instantiated
@@ -46,7 +47,7 @@ export function runAdapterTests(_setup: SetupFn, title?: string): void {
46
47
 
47
48
  // Bob receives the change
48
49
  await eventPromise(bobHandle, "change")
49
- assert.equal((await bobHandle.doc()).foo, "bar")
50
+ assert.equal((await bobHandle.doc())?.foo, "bar")
50
51
 
51
52
  // Bob changes the document
52
53
  bobHandle.change(d => {
@@ -55,7 +56,7 @@ export function runAdapterTests(_setup: SetupFn, title?: string): void {
55
56
 
56
57
  // Alice receives the change
57
58
  await eventPromise(aliceHandle, "change")
58
- assert.equal((await aliceHandle.doc()).foo, "baz")
59
+ assert.equal((await aliceHandle.doc())?.foo, "baz")
59
60
  }
60
61
 
61
62
  // Run the test in both directions, in case they're different types of adapters
@@ -97,8 +98,8 @@ export function runAdapterTests(_setup: SetupFn, title?: string): void {
97
98
 
98
99
  // Bob and Charlie receive the change
99
100
  await eventPromises([bobHandle, charlieHandle], "change")
100
- assert.equal((await bobHandle.doc()).foo, "bar")
101
- assert.equal((await charlieHandle.doc()).foo, "bar")
101
+ assert.equal((await bobHandle.doc())?.foo, "bar")
102
+ assert.equal((await charlieHandle.doc())?.foo, "bar")
102
103
 
103
104
  // Charlie changes the document
104
105
  charlieHandle.change(d => {
@@ -107,15 +108,13 @@ export function runAdapterTests(_setup: SetupFn, title?: string): void {
107
108
 
108
109
  // Alice and Bob receive the change
109
110
  await eventPromises([aliceHandle, bobHandle], "change")
110
- assert.equal((await bobHandle.doc()).foo, "baz")
111
- assert.equal((await charlieHandle.doc()).foo, "baz")
111
+ assert.equal((await bobHandle.doc())?.foo, "baz")
112
+ assert.equal((await charlieHandle.doc())?.foo, "baz")
112
113
 
113
114
  teardown()
114
115
  })
115
116
 
116
- // TODO: with BroadcastChannel, this test never ends, because it goes into an infinite loop,
117
- // because the network has cycles (see #92)
118
- it.skip("can broadcast a message", async () => {
117
+ it("can broadcast a message", async () => {
119
118
  const { adapters, teardown } = await setup()
120
119
  const [a, b, c] = adapters
121
120
 
@@ -128,13 +127,18 @@ export function runAdapterTests(_setup: SetupFn, title?: string): void {
128
127
  "peer"
129
128
  )
130
129
 
131
- const channelId = "broadcast" as ChannelId
130
+ const aliceHandle = aliceRepo.create<TestDoc>()
131
+ const charlieHandle = charlieRepo.find(aliceHandle.url)
132
+
133
+ // pause to give charlie a chance to let alice know it wants the doc
134
+ await pause(100)
135
+
132
136
  const alicePresenceData = { presence: "alice" }
137
+ aliceHandle.broadcast(alicePresenceData)
133
138
 
134
- aliceRepo.ephemeralData.broadcast(channelId, alicePresenceData)
135
- const { data } = await eventPromise(charlieRepo.ephemeralData, "data")
139
+ const { message } = await eventPromise(charlieHandle, "ephemeral-message")
136
140
 
137
- assert.deepStrictEqual(data, alicePresenceData)
141
+ assert.deepStrictEqual(message, alicePresenceData)
138
142
  teardown()
139
143
  })
140
144
  })
package/src/index.ts CHANGED
@@ -3,12 +3,22 @@ export { DocHandle, HandleState } from "./DocHandle.js"
3
3
  export type { DocHandleChangePayload } from "./DocHandle.js"
4
4
  export { NetworkAdapter } from "./network/NetworkAdapter.js"
5
5
  export type {
6
- InboundMessagePayload,
7
- MessagePayload,
8
6
  OpenPayload,
9
7
  PeerCandidatePayload,
10
8
  PeerDisconnectedPayload,
11
9
  } from "./network/NetworkAdapter.js"
10
+
11
+ // This is a bit confusing right now, but:
12
+ // Message is the type for messages used outside of the network adapters
13
+ // there are some extra internal network adapter-only messages on NetworkAdapterMessage
14
+ // and Message is (as of this writing) a union type for EphmeralMessage and SyncMessage
15
+ export type {
16
+ Message,
17
+ NetworkAdapterMessage,
18
+ EphemeralMessage,
19
+ SyncMessage,
20
+ } from "./network/messages.js"
21
+
12
22
  export { NetworkSubsystem } from "./network/NetworkSubsystem.js"
13
23
  export { Repo, type SharePolicy } from "./Repo.js"
14
24
  export { StorageAdapter, type StorageKey } from "./storage/StorageAdapter.js"
@@ -1,17 +1,13 @@
1
1
  import EventEmitter from "eventemitter3"
2
- import { PeerId, ChannelId } from "../types.js"
2
+ import { PeerId } from "../types.js"
3
+ import { Message } from "./messages.js"
3
4
 
4
5
  export abstract class NetworkAdapter extends EventEmitter<NetworkAdapterEvents> {
5
6
  peerId?: PeerId // hmmm, maybe not
6
7
 
7
8
  abstract connect(url?: string): void
8
9
 
9
- abstract sendMessage(
10
- peerId: PeerId,
11
- channelId: ChannelId,
12
- message: Uint8Array,
13
- broadcast: boolean
14
- ): void
10
+ abstract send(message: Message): void
15
11
 
16
12
  abstract join(): void
17
13
 
@@ -25,7 +21,7 @@ export interface NetworkAdapterEvents {
25
21
  close: () => void
26
22
  "peer-candidate": (payload: PeerCandidatePayload) => void
27
23
  "peer-disconnected": (payload: PeerDisconnectedPayload) => void
28
- message: (payload: InboundMessagePayload) => void
24
+ message: (payload: Message) => void
29
25
  }
30
26
 
31
27
  export interface OpenPayload {
@@ -36,18 +32,6 @@ export interface PeerCandidatePayload {
36
32
  peerId: PeerId
37
33
  }
38
34
 
39
- export interface MessagePayload {
40
- targetId: PeerId
41
- channelId: ChannelId
42
- message: Uint8Array
43
- broadcast: boolean
44
- }
45
-
46
- export interface InboundMessagePayload extends MessagePayload {
47
- type?: string
48
- senderId: PeerId
49
- }
50
-
51
35
  export interface PeerDisconnectedPayload {
52
36
  peerId: PeerId
53
37
  }