@automerge/automerge-repo 1.0.0-alpha.0 → 1.0.0-alpha.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/dist/DocCollection.d.ts +2 -1
  2. package/dist/DocCollection.d.ts.map +1 -1
  3. package/dist/DocCollection.js +17 -8
  4. package/dist/DocHandle.d.ts +27 -7
  5. package/dist/DocHandle.d.ts.map +1 -1
  6. package/dist/DocHandle.js +47 -23
  7. package/dist/DocUrl.d.ts +3 -3
  8. package/dist/DocUrl.js +9 -9
  9. package/dist/EphemeralData.d.ts +8 -16
  10. package/dist/EphemeralData.d.ts.map +1 -1
  11. package/dist/EphemeralData.js +1 -28
  12. package/dist/Repo.d.ts +0 -2
  13. package/dist/Repo.d.ts.map +1 -1
  14. package/dist/Repo.js +18 -36
  15. package/dist/helpers/headsAreSame.d.ts +2 -2
  16. package/dist/helpers/headsAreSame.d.ts.map +1 -1
  17. package/dist/helpers/headsAreSame.js +1 -4
  18. package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
  19. package/dist/helpers/tests/network-adapter-tests.js +15 -13
  20. package/dist/index.d.ts +2 -1
  21. package/dist/index.d.ts.map +1 -1
  22. package/dist/network/NetworkAdapter.d.ts +4 -13
  23. package/dist/network/NetworkAdapter.d.ts.map +1 -1
  24. package/dist/network/NetworkSubsystem.d.ts +5 -4
  25. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  26. package/dist/network/NetworkSubsystem.js +39 -25
  27. package/dist/network/messages.d.ts +57 -0
  28. package/dist/network/messages.d.ts.map +1 -0
  29. package/dist/network/messages.js +21 -0
  30. package/dist/storage/StorageSubsystem.d.ts +2 -2
  31. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  32. package/dist/storage/StorageSubsystem.js +36 -6
  33. package/dist/synchronizer/CollectionSynchronizer.d.ts +3 -2
  34. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  35. package/dist/synchronizer/CollectionSynchronizer.js +19 -13
  36. package/dist/synchronizer/DocSynchronizer.d.ts +9 -3
  37. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  38. package/dist/synchronizer/DocSynchronizer.js +145 -29
  39. package/dist/synchronizer/Synchronizer.d.ts +3 -4
  40. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  41. package/dist/types.d.ts +1 -3
  42. package/dist/types.d.ts.map +1 -1
  43. package/fuzz/fuzz.ts +4 -4
  44. package/package.json +3 -3
  45. package/src/DocCollection.ts +19 -9
  46. package/src/DocHandle.ts +82 -37
  47. package/src/DocUrl.ts +9 -9
  48. package/src/EphemeralData.ts +6 -36
  49. package/src/Repo.ts +20 -52
  50. package/src/helpers/headsAreSame.ts +3 -5
  51. package/src/helpers/tests/network-adapter-tests.ts +18 -14
  52. package/src/index.ts +12 -2
  53. package/src/network/NetworkAdapter.ts +4 -20
  54. package/src/network/NetworkSubsystem.ts +61 -38
  55. package/src/network/messages.ts +123 -0
  56. package/src/storage/StorageSubsystem.ts +42 -6
  57. package/src/synchronizer/CollectionSynchronizer.ts +38 -19
  58. package/src/synchronizer/DocSynchronizer.ts +196 -38
  59. package/src/synchronizer/Synchronizer.ts +3 -8
  60. package/src/types.ts +4 -1
  61. package/test/CollectionSynchronizer.test.ts +6 -7
  62. package/test/DocHandle.test.ts +36 -22
  63. package/test/DocSynchronizer.test.ts +85 -9
  64. package/test/Repo.test.ts +279 -59
  65. package/test/StorageSubsystem.test.ts +9 -9
  66. package/test/helpers/DummyNetworkAdapter.ts +1 -1
  67. package/tsconfig.json +2 -1
  68. package/test/EphemeralData.test.ts +0 -44
@@ -1,17 +1,31 @@
1
1
  import EventEmitter from "eventemitter3"
2
+ import { PeerId } from "../types.js"
3
+ import { NetworkAdapter, PeerDisconnectedPayload } from "./NetworkAdapter.js"
4
+
2
5
  import {
3
- InboundMessagePayload,
4
- NetworkAdapter,
5
- PeerDisconnectedPayload,
6
- } from "./NetworkAdapter.js"
7
- import { ChannelId, PeerId } from "../types.js"
6
+ EphemeralMessage,
7
+ isEphemeralMessage,
8
+ isValidMessage,
9
+ Message,
10
+ MessageContents,
11
+ } from "./messages.js"
8
12
 
9
13
  import debug from "debug"
14
+ import { SessionId } from "../EphemeralData.js"
15
+
16
+ type EphemeralMessageSource = `${PeerId}:${SessionId}`
17
+
18
+ const getEphemeralMessageSource = (message: EphemeralMessage) =>
19
+ `${message.senderId}:${message.sessionId}` as EphemeralMessageSource
10
20
 
11
21
  export class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
12
22
  #log: debug.Debugger
13
23
  #adaptersByPeer: Record<PeerId, NetworkAdapter> = {}
14
24
 
25
+ #count = 0
26
+ #sessionId: SessionId = Math.random().toString(36).slice(2) as SessionId
27
+ #ephemeralSessionCounts: Record<EphemeralMessageSource, number> = {}
28
+
15
29
  constructor(
16
30
  private adapters: NetworkAdapter[],
17
31
  public peerId = randomPeerId()
@@ -44,20 +58,24 @@ export class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
44
58
  })
45
59
 
46
60
  networkAdapter.on("message", msg => {
47
- const { senderId, channelId, broadcast, message } = msg
48
- this.#log(`message from ${senderId}`)
49
-
50
- // If we receive a broadcast message from a network adapter we need to re-broadcast it to all
51
- // our other peers. This is the world's worst gossip protocol.
52
-
53
- // TODO: This relies on the network forming a tree! If there are cycles, this approach will
54
- // loop messages around forever.
55
- if (broadcast) {
56
- Object.entries(this.#adaptersByPeer)
57
- .filter(([id]) => id !== senderId)
58
- .forEach(([id, peer]) => {
59
- peer.sendMessage(id as PeerId, channelId, message, broadcast)
60
- })
61
+ if (!isValidMessage(msg)) {
62
+ this.#log(`invalid message: ${JSON.stringify(msg)}`)
63
+ return
64
+ }
65
+
66
+ this.#log(`message from ${msg.senderId}`)
67
+
68
+ if (isEphemeralMessage(msg)) {
69
+ const source = getEphemeralMessageSource(msg)
70
+ if (
71
+ this.#ephemeralSessionCounts[source] === undefined ||
72
+ msg.count > this.#ephemeralSessionCounts[source]
73
+ ) {
74
+ this.#ephemeralSessionCounts[source] = msg.count
75
+ this.emit("message", msg)
76
+ }
77
+
78
+ return
61
79
  }
62
80
 
63
81
  this.emit("message", msg)
@@ -75,25 +93,30 @@ export class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
75
93
  networkAdapter.join()
76
94
  }
77
95
 
78
- sendMessage(
79
- peerId: PeerId,
80
- channelId: ChannelId,
81
- message: Uint8Array,
82
- broadcast: boolean
83
- ) {
84
- if (broadcast) {
85
- Object.entries(this.#adaptersByPeer).forEach(([id, peer]) => {
86
- this.#log(`sending broadcast to ${id}`)
87
- peer.sendMessage(id as PeerId, channelId, message, true)
88
- })
96
+ send(message: MessageContents) {
97
+ const peer = this.#adaptersByPeer[message.targetId]
98
+ if (!peer) {
99
+ this.#log(`Tried to send message but peer not found: ${message.targetId}`)
100
+ return
101
+ }
102
+ this.#log(`Sending message to ${message.targetId}`)
103
+
104
+ if (isEphemeralMessage(message)) {
105
+ const outbound =
106
+ "count" in message
107
+ ? message
108
+ : {
109
+ ...message,
110
+ count: ++this.#count,
111
+ sessionId: this.#sessionId,
112
+ senderId: this.peerId,
113
+ }
114
+ this.#log("Ephemeral message", outbound)
115
+ peer.send(outbound)
89
116
  } else {
90
- const peer = this.#adaptersByPeer[peerId]
91
- if (!peer) {
92
- this.#log(`Tried to send message but peer not found: ${peerId}`)
93
- return
94
- }
95
- this.#log(`Sending message to ${peerId}`)
96
- peer.sendMessage(peerId, channelId, message, false)
117
+ const outbound = { ...message, senderId: this.peerId }
118
+ this.#log("Sync message", outbound)
119
+ peer.send(outbound)
97
120
  }
98
121
  }
99
122
 
@@ -117,7 +140,7 @@ function randomPeerId() {
117
140
  export interface NetworkSubsystemEvents {
118
141
  peer: (payload: PeerPayload) => void
119
142
  "peer-disconnected": (payload: PeerDisconnectedPayload) => void
120
- message: (payload: InboundMessagePayload) => void
143
+ message: (payload: Message) => void
121
144
  }
122
145
 
123
146
  export interface PeerPayload {
@@ -0,0 +1,123 @@
1
+ // utilities
2
+ import { SessionId } from "../EphemeralData"
3
+ import { DocumentId, PeerId } from "../types"
4
+
5
+ export function isValidMessage(
6
+ message: NetworkAdapterMessage
7
+ ): message is
8
+ | SyncMessage
9
+ | EphemeralMessage
10
+ | RequestMessage
11
+ | DocumentUnavailableMessage {
12
+ return (
13
+ typeof message === "object" &&
14
+ typeof message.type === "string" &&
15
+ typeof message.senderId === "string" &&
16
+ (isSyncMessage(message) ||
17
+ isEphemeralMessage(message) ||
18
+ isRequestMessage(message) ||
19
+ isDocumentUnavailableMessage(message))
20
+ )
21
+ }
22
+
23
+ export function isDocumentUnavailableMessage(
24
+ message: NetworkAdapterMessage
25
+ ): message is DocumentUnavailableMessage {
26
+ return message.type === "doc-unavailable"
27
+ }
28
+
29
+ export function isRequestMessage(
30
+ message: NetworkAdapterMessage
31
+ ): message is RequestMessage {
32
+ return message.type === "request"
33
+ }
34
+
35
+ export function isSyncMessage(
36
+ message: NetworkAdapterMessage
37
+ ): message is SyncMessage {
38
+ return message.type === "sync"
39
+ }
40
+
41
+ export function isEphemeralMessage(
42
+ message: NetworkAdapterMessage | MessageContents
43
+ ): message is EphemeralMessage | EphemeralMessageContents {
44
+ return message.type === "ephemeral"
45
+ }
46
+
47
+ export interface SyncMessageEnvelope {
48
+ senderId: PeerId
49
+ }
50
+
51
+ export interface SyncMessageContents {
52
+ type: "sync"
53
+ data: Uint8Array
54
+ targetId: PeerId
55
+ documentId: DocumentId
56
+ }
57
+
58
+ export type SyncMessage = SyncMessageEnvelope & SyncMessageContents
59
+
60
+ export interface EphemeralMessageEnvelope {
61
+ senderId: PeerId
62
+ count: number
63
+ sessionId: SessionId
64
+ }
65
+
66
+ export interface EphemeralMessageContents {
67
+ type: "ephemeral"
68
+ targetId: PeerId
69
+ documentId: DocumentId
70
+ data: Uint8Array
71
+ }
72
+
73
+ export type EphemeralMessage = EphemeralMessageEnvelope &
74
+ EphemeralMessageContents
75
+
76
+ export interface DocumentUnavailableMessageContents {
77
+ type: "doc-unavailable"
78
+ documentId: DocumentId
79
+ targetId: PeerId
80
+ }
81
+
82
+ export type DocumentUnavailableMessage = SyncMessageEnvelope &
83
+ DocumentUnavailableMessageContents
84
+
85
+ export interface RequestMessageContents {
86
+ type: "request"
87
+ data: Uint8Array
88
+ targetId: PeerId
89
+ documentId: DocumentId
90
+ }
91
+
92
+ export type RequestMessage = SyncMessageEnvelope & RequestMessageContents
93
+
94
+ export type MessageContents =
95
+ | SyncMessageContents
96
+ | EphemeralMessageContents
97
+ | RequestMessageContents
98
+ | DocumentUnavailableMessageContents
99
+
100
+ export type Message =
101
+ | SyncMessage
102
+ | EphemeralMessage
103
+ | RequestMessage
104
+ | DocumentUnavailableMessage
105
+
106
+ export type SynchronizerMessage =
107
+ | SyncMessage
108
+ | RequestMessage
109
+ | DocumentUnavailableMessage
110
+ | EphemeralMessage
111
+
112
+ type ArriveMessage = {
113
+ senderId: PeerId
114
+ type: "arrive"
115
+ }
116
+
117
+ type WelcomeMessage = {
118
+ senderId: PeerId
119
+ targetId: PeerId
120
+ type: "welcome"
121
+ }
122
+
123
+ export type NetworkAdapterMessage = ArriveMessage | WelcomeMessage | Message
@@ -3,6 +3,8 @@ import { StorageAdapter, StorageKey } from "./StorageAdapter.js"
3
3
  import * as sha256 from "fast-sha256"
4
4
  import { type DocumentId } from "../types.js"
5
5
  import { mergeArrays } from "../helpers/mergeArrays.js"
6
+ import debug from "debug"
7
+ import { headsAreSame } from "../helpers/headsAreSame.js"
6
8
 
7
9
  // Metadata about a chunk of data loaded from storage. This is stored on the
8
10
  // StorageSubsystem so when we are compacting we know what chunks we can safely delete
@@ -30,6 +32,8 @@ function headsHash(heads: A.Heads): string {
30
32
  export class StorageSubsystem {
31
33
  #storageAdapter: StorageAdapter
32
34
  #chunkInfos: Map<DocumentId, StorageChunkInfo[]> = new Map()
35
+ #storedHeads: Map<DocumentId, A.Heads> = new Map()
36
+ #log = debug(`automerge-repo:storage-subsystem`)
33
37
 
34
38
  constructor(storageAdapter: StorageAdapter) {
35
39
  this.#storageAdapter = storageAdapter
@@ -39,9 +43,10 @@ export class StorageSubsystem {
39
43
  documentId: DocumentId,
40
44
  doc: A.Doc<unknown>
41
45
  ): Promise<void> {
42
- const binary = A.saveIncremental(doc)
46
+ const binary = A.saveSince(doc, this.#storedHeads.get(documentId) ?? [])
43
47
  if (binary && binary.length > 0) {
44
48
  const key = [documentId, "incremental", keyHash(binary)]
49
+ this.#log(`Saving incremental ${key} for document ${documentId}`)
45
50
  await this.#storageAdapter.save(key, binary)
46
51
  if (!this.#chunkInfos.has(documentId)) {
47
52
  this.#chunkInfos.set(documentId, [])
@@ -51,6 +56,7 @@ export class StorageSubsystem {
51
56
  type: "incremental",
52
57
  size: binary.length,
53
58
  })
59
+ this.#storedHeads.set(documentId, A.getHeads(doc))
54
60
  } else {
55
61
  return Promise.resolve()
56
62
  }
@@ -62,8 +68,14 @@ export class StorageSubsystem {
62
68
  sourceChunks: StorageChunkInfo[]
63
69
  ): Promise<void> {
64
70
  const binary = A.save(doc)
65
- const key = [documentId, "snapshot", headsHash(A.getHeads(doc))]
66
- const oldKeys = new Set(sourceChunks.map(c => c.key))
71
+ const snapshotHash = headsHash(A.getHeads(doc))
72
+ const key = [documentId, "snapshot", snapshotHash]
73
+ const oldKeys = new Set(
74
+ sourceChunks.map(c => c.key).filter(k => k[2] !== snapshotHash)
75
+ )
76
+
77
+ this.#log(`Saving snapshot ${key} for document ${documentId}`)
78
+ this.#log(`deleting old chunks ${Array.from(oldKeys)}`)
67
79
 
68
80
  await this.#storageAdapter.save(key, binary)
69
81
 
@@ -76,7 +88,7 @@ export class StorageSubsystem {
76
88
  this.#chunkInfos.set(documentId, newChunkInfos)
77
89
  }
78
90
 
79
- async loadBinary(documentId: DocumentId): Promise<Uint8Array> {
91
+ async loadDoc(documentId: DocumentId): Promise<A.Doc<unknown> | null> {
80
92
  const loaded = await this.#storageAdapter.loadRange([documentId])
81
93
  const binaries = []
82
94
  const chunkInfos: StorageChunkInfo[] = []
@@ -93,16 +105,26 @@ export class StorageSubsystem {
93
105
  binaries.push(chunk.data)
94
106
  }
95
107
  this.#chunkInfos.set(documentId, chunkInfos)
96
- return mergeArrays(binaries)
108
+ const binary = mergeArrays(binaries)
109
+ if (binary.length === 0) {
110
+ return null
111
+ }
112
+ const newDoc = A.loadIncremental(A.init(), binary)
113
+ this.#storedHeads.set(documentId, A.getHeads(newDoc))
114
+ return newDoc
97
115
  }
98
116
 
99
- async save(documentId: DocumentId, doc: A.Doc<unknown>): Promise<void> {
117
+ async saveDoc(documentId: DocumentId, doc: A.Doc<unknown>): Promise<void> {
118
+ if (!this.#shouldSave(documentId, doc)) {
119
+ return
120
+ }
100
121
  let sourceChunks = this.#chunkInfos.get(documentId) ?? []
101
122
  if (this.#shouldCompact(sourceChunks)) {
102
123
  this.#saveTotal(documentId, doc, sourceChunks)
103
124
  } else {
104
125
  this.#saveIncremental(documentId, doc)
105
126
  }
127
+ this.#storedHeads.set(documentId, A.getHeads(doc))
106
128
  }
107
129
 
108
130
  async remove(documentId: DocumentId) {
@@ -110,6 +132,20 @@ export class StorageSubsystem {
110
132
  this.#storageAdapter.removeRange([documentId, "incremental"])
111
133
  }
112
134
 
135
+ #shouldSave(documentId: DocumentId, doc: A.Doc<unknown>): boolean {
136
+ const oldHeads = this.#storedHeads.get(documentId)
137
+ if (!oldHeads) {
138
+ return true
139
+ }
140
+
141
+ const newHeads = A.getHeads(doc)
142
+ if (headsAreSame(newHeads, oldHeads)) {
143
+ return false
144
+ }
145
+
146
+ return true
147
+ }
148
+
113
149
  #shouldCompact(sourceChunks: StorageChunkInfo[]) {
114
150
  // compact if the incremental size is greater than the snapshot size
115
151
  let snapshotSize = 0
@@ -5,11 +5,17 @@ import {
5
5
  binaryToDocumentId,
6
6
  stringifyAutomergeUrl,
7
7
  } from "../DocUrl.js"
8
- import { ChannelId, BinaryDocumentId, PeerId, DocumentId } from "../types.js"
8
+ import { PeerId, DocumentId } from "../types.js"
9
9
  import { DocSynchronizer } from "./DocSynchronizer.js"
10
10
  import { Synchronizer } from "./Synchronizer.js"
11
11
 
12
12
  import debug from "debug"
13
+ import {
14
+ DocumentUnavailableMessage,
15
+ RequestMessage,
16
+ SynchronizerMessage,
17
+ SyncMessage,
18
+ } from "../network/messages.js"
13
19
  const log = debug("automerge-repo:collectionsync")
14
20
 
15
21
  /** A CollectionSynchronizer is responsible for synchronizing a DocCollection with peers. */
@@ -20,6 +26,9 @@ export class CollectionSynchronizer extends Synchronizer {
20
26
  /** A map of documentIds to their synchronizers */
21
27
  #docSynchronizers: Record<DocumentId, DocSynchronizer> = {}
22
28
 
29
+ /** Used to determine if the document is know to the Collection and a synchronizer exists or is being set up */
30
+ #docSetUp: Record<DocumentId, boolean> = {}
31
+
23
32
  constructor(private repo: DocCollection) {
24
33
  super()
25
34
  }
@@ -57,37 +66,42 @@ export class CollectionSynchronizer extends Synchronizer {
57
66
  * When we receive a sync message for a document we haven't got in memory, we
58
67
  * register it with the repo and start synchronizing
59
68
  */
60
- async receiveSyncMessage(
61
- peerId: PeerId,
62
- channelId: ChannelId,
63
- message: Uint8Array
64
- ) {
65
- log(`onSyncMessage: ${peerId}, ${channelId}, ${message.byteLength}bytes`)
66
-
67
- const documentId = channelId as unknown as DocumentId
69
+ async receiveMessage(message: SynchronizerMessage) {
70
+ log(
71
+ `onSyncMessage: ${message.senderId}, ${message.documentId}, ${
72
+ "data" in message ? message.data.byteLength + "bytes" : ""
73
+ }`
74
+ )
75
+
76
+ const documentId = message.documentId
68
77
  if (!documentId) {
69
78
  throw new Error("received a message with an invalid documentId")
70
79
  }
71
- const docSynchronizer = await this.#fetchDocSynchronizer(documentId)
72
80
 
73
- await docSynchronizer.receiveSyncMessage(peerId, channelId, message)
81
+ this.#docSetUp[documentId] = true
82
+
83
+ const docSynchronizer = this.#fetchDocSynchronizer(documentId)
84
+
85
+ docSynchronizer.receiveMessage(message)
74
86
 
75
87
  // Initiate sync with any new peers
76
88
  const peers = await this.#documentGenerousPeers(documentId)
77
- peers
78
- .filter(peerId => !docSynchronizer.hasPeer(peerId))
79
- .forEach(peerId => docSynchronizer.beginSync(peerId))
89
+ docSynchronizer.beginSync(
90
+ peers.filter(peerId => !docSynchronizer.hasPeer(peerId))
91
+ )
80
92
  }
81
93
 
82
94
  /**
83
95
  * Starts synchronizing the given document with all peers that we share it generously with.
84
96
  */
85
97
  addDocument(documentId: DocumentId) {
98
+ // HACK: this is a hack to prevent us from adding the same document twice
99
+ if (this.#docSetUp[documentId]) {
100
+ return
101
+ }
86
102
  const docSynchronizer = this.#fetchDocSynchronizer(documentId)
87
103
  void this.#documentGenerousPeers(documentId).then(peers => {
88
- peers.forEach(peerId => {
89
- docSynchronizer.beginSync(peerId)
90
- })
104
+ docSynchronizer.beginSync(peers)
91
105
  })
92
106
  }
93
107
 
@@ -99,11 +113,16 @@ export class CollectionSynchronizer extends Synchronizer {
99
113
  /** Adds a peer and maybe starts synchronizing with them */
100
114
  addPeer(peerId: PeerId) {
101
115
  log(`adding ${peerId} & synchronizing with them`)
116
+
117
+ if (this.#peers.has(peerId)) {
118
+ return
119
+ }
120
+
102
121
  this.#peers.add(peerId)
103
122
  for (const docSynchronizer of Object.values(this.#docSynchronizers)) {
104
123
  const { documentId } = docSynchronizer
105
- void this.repo.sharePolicy(peerId, documentId).then(okToShare => {
106
- if (okToShare) docSynchronizer.beginSync(peerId)
124
+ this.repo.sharePolicy(peerId, documentId).then(okToShare => {
125
+ if (okToShare) docSynchronizer.beginSync([peerId])
107
126
  })
108
127
  }
109
128
  }