@automerge/automerge-repo 2.0.0-alpha.7 → 2.0.0-collectionsync-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (164) hide show
  1. package/dist/CollectionHandle.d.ts +14 -0
  2. package/dist/CollectionHandle.d.ts.map +1 -0
  3. package/dist/CollectionHandle.js +37 -0
  4. package/dist/DocHandle.d.ts +37 -6
  5. package/dist/DocHandle.d.ts.map +1 -1
  6. package/dist/DocHandle.js +64 -6
  7. package/dist/DocUrl.d.ts +47 -0
  8. package/dist/DocUrl.d.ts.map +1 -0
  9. package/dist/DocUrl.js +72 -0
  10. package/dist/EphemeralData.d.ts +20 -0
  11. package/dist/EphemeralData.d.ts.map +1 -0
  12. package/dist/EphemeralData.js +1 -0
  13. package/dist/Repo.d.ts +28 -7
  14. package/dist/Repo.d.ts.map +1 -1
  15. package/dist/Repo.js +142 -143
  16. package/dist/ferigan.d.ts +51 -0
  17. package/dist/ferigan.d.ts.map +1 -0
  18. package/dist/ferigan.js +98 -0
  19. package/dist/helpers/tests/storage-adapter-tests.d.ts +2 -2
  20. package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
  21. package/dist/helpers/tests/storage-adapter-tests.js +19 -39
  22. package/dist/index.d.ts +2 -0
  23. package/dist/index.d.ts.map +1 -1
  24. package/dist/index.js +1 -0
  25. package/dist/network/NetworkSubsystem.d.ts +1 -0
  26. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  27. package/dist/network/NetworkSubsystem.js +3 -0
  28. package/dist/network/messages.d.ts +7 -1
  29. package/dist/network/messages.d.ts.map +1 -1
  30. package/dist/network/messages.js +2 -1
  31. package/dist/src/DocHandle.d.ts +182 -0
  32. package/dist/src/DocHandle.d.ts.map +1 -0
  33. package/dist/src/DocHandle.js +405 -0
  34. package/dist/src/DocUrl.d.ts +49 -0
  35. package/dist/src/DocUrl.d.ts.map +1 -0
  36. package/dist/src/DocUrl.js +72 -0
  37. package/dist/src/EphemeralData.d.ts +19 -0
  38. package/dist/src/EphemeralData.d.ts.map +1 -0
  39. package/dist/src/EphemeralData.js +1 -0
  40. package/dist/src/Repo.d.ts +74 -0
  41. package/dist/src/Repo.d.ts.map +1 -0
  42. package/dist/src/Repo.js +208 -0
  43. package/dist/src/helpers/arraysAreEqual.d.ts +2 -0
  44. package/dist/src/helpers/arraysAreEqual.d.ts.map +1 -0
  45. package/dist/src/helpers/arraysAreEqual.js +2 -0
  46. package/dist/src/helpers/cbor.d.ts +4 -0
  47. package/dist/src/helpers/cbor.d.ts.map +1 -0
  48. package/dist/src/helpers/cbor.js +8 -0
  49. package/dist/src/helpers/eventPromise.d.ts +11 -0
  50. package/dist/src/helpers/eventPromise.d.ts.map +1 -0
  51. package/dist/src/helpers/eventPromise.js +7 -0
  52. package/dist/src/helpers/headsAreSame.d.ts +2 -0
  53. package/dist/src/helpers/headsAreSame.d.ts.map +1 -0
  54. package/dist/src/helpers/headsAreSame.js +4 -0
  55. package/dist/src/helpers/mergeArrays.d.ts +2 -0
  56. package/dist/src/helpers/mergeArrays.d.ts.map +1 -0
  57. package/dist/src/helpers/mergeArrays.js +15 -0
  58. package/dist/src/helpers/pause.d.ts +6 -0
  59. package/dist/src/helpers/pause.d.ts.map +1 -0
  60. package/dist/src/helpers/pause.js +10 -0
  61. package/dist/src/helpers/tests/network-adapter-tests.d.ts +21 -0
  62. package/dist/src/helpers/tests/network-adapter-tests.d.ts.map +1 -0
  63. package/dist/src/helpers/tests/network-adapter-tests.js +122 -0
  64. package/dist/src/helpers/withTimeout.d.ts +12 -0
  65. package/dist/src/helpers/withTimeout.d.ts.map +1 -0
  66. package/dist/src/helpers/withTimeout.js +24 -0
  67. package/dist/src/index.d.ts +53 -0
  68. package/dist/src/index.d.ts.map +1 -0
  69. package/dist/src/index.js +40 -0
  70. package/dist/src/network/NetworkAdapter.d.ts +26 -0
  71. package/dist/src/network/NetworkAdapter.d.ts.map +1 -0
  72. package/dist/src/network/NetworkAdapter.js +4 -0
  73. package/dist/src/network/NetworkSubsystem.d.ts +23 -0
  74. package/dist/src/network/NetworkSubsystem.d.ts.map +1 -0
  75. package/dist/src/network/NetworkSubsystem.js +120 -0
  76. package/dist/src/network/messages.d.ts +85 -0
  77. package/dist/src/network/messages.d.ts.map +1 -0
  78. package/dist/src/network/messages.js +23 -0
  79. package/dist/src/storage/StorageAdapter.d.ts +14 -0
  80. package/dist/src/storage/StorageAdapter.d.ts.map +1 -0
  81. package/dist/src/storage/StorageAdapter.js +1 -0
  82. package/dist/src/storage/StorageSubsystem.d.ts +12 -0
  83. package/dist/src/storage/StorageSubsystem.d.ts.map +1 -0
  84. package/dist/src/storage/StorageSubsystem.js +145 -0
  85. package/dist/src/synchronizer/CollectionSynchronizer.d.ts +25 -0
  86. package/dist/src/synchronizer/CollectionSynchronizer.d.ts.map +1 -0
  87. package/dist/src/synchronizer/CollectionSynchronizer.js +106 -0
  88. package/dist/src/synchronizer/DocSynchronizer.d.ts +29 -0
  89. package/dist/src/synchronizer/DocSynchronizer.d.ts.map +1 -0
  90. package/dist/src/synchronizer/DocSynchronizer.js +263 -0
  91. package/dist/src/synchronizer/Synchronizer.d.ts +9 -0
  92. package/dist/src/synchronizer/Synchronizer.d.ts.map +1 -0
  93. package/dist/src/synchronizer/Synchronizer.js +2 -0
  94. package/dist/src/types.d.ts +16 -0
  95. package/dist/src/types.d.ts.map +1 -0
  96. package/dist/src/types.js +1 -0
  97. package/dist/storage/StorageAdapter.d.ts +9 -0
  98. package/dist/storage/StorageAdapter.d.ts.map +1 -1
  99. package/dist/storage/StorageAdapter.js +33 -0
  100. package/dist/storage/StorageSubsystem.d.ts +12 -2
  101. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  102. package/dist/storage/StorageSubsystem.js +42 -100
  103. package/dist/synchronizer/CollectionSynchronizer.d.ts +4 -2
  104. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  105. package/dist/synchronizer/CollectionSynchronizer.js +28 -15
  106. package/dist/synchronizer/DocSynchronizer.d.ts +6 -5
  107. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  108. package/dist/synchronizer/DocSynchronizer.js +76 -178
  109. package/dist/synchronizer/Synchronizer.d.ts +11 -0
  110. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  111. package/dist/test/CollectionSynchronizer.test.d.ts +2 -0
  112. package/dist/test/CollectionSynchronizer.test.d.ts.map +1 -0
  113. package/dist/test/CollectionSynchronizer.test.js +57 -0
  114. package/dist/test/DocHandle.test.d.ts +2 -0
  115. package/dist/test/DocHandle.test.d.ts.map +1 -0
  116. package/dist/test/DocHandle.test.js +238 -0
  117. package/dist/test/DocSynchronizer.test.d.ts +2 -0
  118. package/dist/test/DocSynchronizer.test.d.ts.map +1 -0
  119. package/dist/test/DocSynchronizer.test.js +111 -0
  120. package/dist/test/Network.test.d.ts +2 -0
  121. package/dist/test/Network.test.d.ts.map +1 -0
  122. package/dist/test/Network.test.js +11 -0
  123. package/dist/test/Repo.test.d.ts +2 -0
  124. package/dist/test/Repo.test.d.ts.map +1 -0
  125. package/dist/test/Repo.test.js +568 -0
  126. package/dist/test/StorageSubsystem.test.d.ts +2 -0
  127. package/dist/test/StorageSubsystem.test.d.ts.map +1 -0
  128. package/dist/test/StorageSubsystem.test.js +56 -0
  129. package/dist/test/helpers/DummyNetworkAdapter.d.ts +9 -0
  130. package/dist/test/helpers/DummyNetworkAdapter.d.ts.map +1 -0
  131. package/dist/test/helpers/DummyNetworkAdapter.js +15 -0
  132. package/dist/test/helpers/DummyStorageAdapter.d.ts +16 -0
  133. package/dist/test/helpers/DummyStorageAdapter.d.ts.map +1 -0
  134. package/dist/test/helpers/DummyStorageAdapter.js +33 -0
  135. package/dist/test/helpers/generate-large-object.d.ts +5 -0
  136. package/dist/test/helpers/generate-large-object.d.ts.map +1 -0
  137. package/dist/test/helpers/generate-large-object.js +9 -0
  138. package/dist/test/helpers/getRandomItem.d.ts +2 -0
  139. package/dist/test/helpers/getRandomItem.d.ts.map +1 -0
  140. package/dist/test/helpers/getRandomItem.js +4 -0
  141. package/dist/test/types.d.ts +4 -0
  142. package/dist/test/types.d.ts.map +1 -0
  143. package/dist/test/types.js +1 -0
  144. package/package.json +3 -3
  145. package/src/CollectionHandle.ts +54 -0
  146. package/src/DocHandle.ts +80 -8
  147. package/src/Repo.ts +192 -183
  148. package/src/ferigan.ts +184 -0
  149. package/src/helpers/tests/storage-adapter-tests.ts +31 -62
  150. package/src/index.ts +2 -0
  151. package/src/network/NetworkSubsystem.ts +4 -0
  152. package/src/network/messages.ts +11 -2
  153. package/src/storage/StorageAdapter.ts +42 -0
  154. package/src/storage/StorageSubsystem.ts +59 -119
  155. package/src/synchronizer/CollectionSynchronizer.ts +34 -26
  156. package/src/synchronizer/DocSynchronizer.ts +84 -231
  157. package/src/synchronizer/Synchronizer.ts +14 -0
  158. package/test/CollectionSynchronizer.test.ts +4 -2
  159. package/test/DocHandle.test.ts +72 -13
  160. package/test/DocSynchronizer.test.ts +6 -1
  161. package/test/RemoteHeadsSubscriptions.test.ts +1 -1
  162. package/test/Repo.test.ts +225 -117
  163. package/test/StorageSubsystem.test.ts +20 -16
  164. package/test/remoteHeads.test.ts +1 -1
@@ -8,28 +8,39 @@ import { ChunkInfo, StorageKey, StorageId } from "./types.js"
8
8
  import { keyHash, headsHash } from "./keyHash.js"
9
9
  import { chunkTypeFromKey } from "./chunkTypeFromKey.js"
10
10
  import * as Uuid from "uuid"
11
+ import { EventEmitter } from "eventemitter3"
12
+
13
+ type StorageSubsystemEvents = {
14
+ "document-loaded": (arg: {
15
+ documentId: DocumentId
16
+ durationMillis: number
17
+ numOps: number
18
+ numChanges: number
19
+ }) => void
20
+ }
11
21
 
12
22
  /**
13
23
  * The storage subsystem is responsible for saving and loading Automerge documents to and from
14
24
  * storage adapter. It also provides a generic key/value storage interface for other uses.
15
25
  */
16
- export class StorageSubsystem {
26
+ export class StorageSubsystem extends EventEmitter<StorageSubsystemEvents> {
17
27
  /** The storage adapter to use for saving and loading documents */
18
28
  #storageAdapter: StorageAdapterInterface
19
29
 
20
30
  /** Record of the latest heads we've loaded or saved for each document */
21
31
  #storedHeads: Map<DocumentId, A.Heads> = new Map()
22
32
 
23
- /** Metadata on the chunks we've already loaded for each document */
24
- #chunkInfos: Map<DocumentId, ChunkInfo[]> = new Map()
25
-
26
- /** Flag to avoid compacting when a compaction is already underway */
27
- #compacting = false
28
-
29
33
  #log = debug(`automerge-repo:storage-subsystem`)
30
34
 
31
- constructor(storageAdapter: StorageAdapterInterface) {
35
+ #beelay: A.beelay.Beelay
36
+
37
+ constructor(
38
+ beelay: A.beelay.Beelay,
39
+ storageAdapter: StorageAdapterInterface
40
+ ) {
41
+ super()
32
42
  this.#storageAdapter = storageAdapter
43
+ this.#beelay = beelay
33
44
  }
34
45
 
35
46
  async id(): Promise<StorageId> {
@@ -104,37 +115,26 @@ export class StorageSubsystem {
104
115
  * Loads the Automerge document with the given ID from storage.
105
116
  */
106
117
  async loadDoc<T>(documentId: DocumentId): Promise<A.Doc<T> | null> {
107
- // Load all the chunks for this document
108
- const chunks = await this.#storageAdapter.loadRange([documentId])
109
- const binaries = []
110
- const chunkInfos: ChunkInfo[] = []
111
-
112
- for (const chunk of chunks) {
113
- // chunks might have been deleted in the interim
114
- if (chunk.data === undefined) continue
115
-
116
- const chunkType = chunkTypeFromKey(chunk.key)
117
- if (chunkType == null) continue
118
-
119
- chunkInfos.push({
120
- key: chunk.key,
121
- type: chunkType,
122
- size: chunk.data.length,
123
- })
124
- binaries.push(chunk.data)
118
+ const doc = await this.#beelay.loadDocument(documentId)
119
+ if (doc == null) {
120
+ return null
125
121
  }
126
- this.#chunkInfos.set(documentId, chunkInfos)
127
-
128
- // Merge the chunks into a single binary
122
+ const binaries = doc.map(c => c.contents)
129
123
  const binary = mergeArrays(binaries)
130
124
  if (binary.length === 0) return null
131
125
 
132
126
  // Load into an Automerge document
127
+ const start = performance.now()
133
128
  const newDoc = A.loadIncremental(A.init(), binary) as A.Doc<T>
129
+ const end = performance.now()
130
+ this.emit("document-loaded", {
131
+ documentId,
132
+ durationMillis: end - start,
133
+ ...A.stats(newDoc),
134
+ })
134
135
 
135
136
  // Record the latest heads for the document
136
137
  this.#storedHeads.set(documentId, A.getHeads(newDoc))
137
-
138
138
  return newDoc
139
139
  }
140
140
 
@@ -149,13 +149,36 @@ export class StorageSubsystem {
149
149
  // Don't bother saving if the document hasn't changed
150
150
  if (!this.#shouldSave(documentId, doc)) return
151
151
 
152
- const sourceChunks = this.#chunkInfos.get(documentId) ?? []
153
- if (this.#shouldCompact(sourceChunks)) {
154
- await this.#saveTotal(documentId, doc, sourceChunks)
155
- } else {
156
- await this.#saveIncremental(documentId, doc)
157
- }
152
+ const changes = A.getChanges(
153
+ A.view(doc, this.#storedHeads.get(documentId) ?? []),
154
+ doc
155
+ )
156
+
157
+ const commits = changes.map(c => {
158
+ const decoded = A.decodeChange(c)
159
+ return {
160
+ parents: decoded.deps,
161
+ hash: decoded.hash,
162
+ contents: c,
163
+ }
164
+ })
165
+ let done = this.#beelay
166
+ .addCommits({
167
+ docId: documentId,
168
+ commits: changes.map(c => {
169
+ const decoded = A.decodeChange(c)
170
+ return {
171
+ parents: decoded.deps,
172
+ hash: decoded.hash,
173
+ contents: c,
174
+ }
175
+ }),
176
+ })
177
+ .catch(e => {
178
+ console.error(`Error saving document ${documentId}: ${e}`)
179
+ })
158
180
  this.#storedHeads.set(documentId, A.getHeads(doc))
181
+ await done
159
182
  }
160
183
 
161
184
  /**
@@ -167,66 +190,6 @@ export class StorageSubsystem {
167
190
  await this.#storageAdapter.removeRange([documentId, "sync-state"])
168
191
  }
169
192
 
170
- /**
171
- * Saves just the incremental changes since the last save.
172
- */
173
- async #saveIncremental(
174
- documentId: DocumentId,
175
- doc: A.Doc<unknown>
176
- ): Promise<void> {
177
- const binary = A.saveSince(doc, this.#storedHeads.get(documentId) ?? [])
178
- if (binary && binary.length > 0) {
179
- const key = [documentId, "incremental", keyHash(binary)]
180
- this.#log(`Saving incremental ${key} for document ${documentId}`)
181
- await this.#storageAdapter.save(key, binary)
182
- if (!this.#chunkInfos.has(documentId)) {
183
- this.#chunkInfos.set(documentId, [])
184
- }
185
- this.#chunkInfos.get(documentId)!.push({
186
- key,
187
- type: "incremental",
188
- size: binary.length,
189
- })
190
- this.#storedHeads.set(documentId, A.getHeads(doc))
191
- } else {
192
- return Promise.resolve()
193
- }
194
- }
195
-
196
- /**
197
- * Compacts the document storage into a single shapshot.
198
- */
199
- async #saveTotal(
200
- documentId: DocumentId,
201
- doc: A.Doc<unknown>,
202
- sourceChunks: ChunkInfo[]
203
- ): Promise<void> {
204
- this.#compacting = true
205
-
206
- const binary = A.save(doc)
207
- const snapshotHash = headsHash(A.getHeads(doc))
208
- const key = [documentId, "snapshot", snapshotHash]
209
- const oldKeys = new Set(
210
- sourceChunks.map(c => c.key).filter(k => k[2] !== snapshotHash)
211
- )
212
-
213
- this.#log(`Saving snapshot ${key} for document ${documentId}`)
214
- this.#log(`deleting old chunks ${Array.from(oldKeys)}`)
215
-
216
- await this.#storageAdapter.save(key, binary)
217
-
218
- for (const key of oldKeys) {
219
- await this.#storageAdapter.remove(key)
220
- }
221
-
222
- const newChunkInfos =
223
- this.#chunkInfos.get(documentId)?.filter(c => !oldKeys.has(c.key)) ?? []
224
- newChunkInfos.push({ key, type: "snapshot", size: binary.length })
225
-
226
- this.#chunkInfos.set(documentId, newChunkInfos)
227
- this.#compacting = false
228
- }
229
-
230
193
  async loadSyncState(
231
194
  documentId: DocumentId,
232
195
  storageId: StorageId
@@ -268,27 +231,4 @@ export class StorageSubsystem {
268
231
 
269
232
  return true // the document has changed
270
233
  }
271
-
272
- /**
273
- * We only compact if the incremental size is greater than the snapshot size.
274
- */
275
- #shouldCompact(sourceChunks: ChunkInfo[]) {
276
- if (this.#compacting) return false
277
-
278
- let snapshotSize = 0
279
- let incrementalSize = 0
280
- for (const chunk of sourceChunks) {
281
- if (chunk.type === "snapshot") {
282
- snapshotSize += chunk.size
283
- } else {
284
- incrementalSize += chunk.size
285
- }
286
- }
287
- // if the file is currently small, don't worry, just compact
288
- // this might seem a bit arbitrary (1k is arbitrary) but is designed to ensure compaction
289
- // for documents with only a single large change on top of an empty (or nearly empty) document
290
- // for example: imported NPM modules, images, etc.
291
- // if we have even more incrementals (so far) than the snapshot, compact
292
- return snapshotSize < 1024 || incrementalSize >= snapshotSize
293
- }
294
234
  }
@@ -1,11 +1,12 @@
1
1
  import debug from "debug"
2
2
  import { DocHandle } from "../DocHandle.js"
3
- import { stringifyAutomergeUrl } from "../AutomergeUrl.js"
3
+ import { parseAutomergeUrl, stringifyAutomergeUrl } from "../AutomergeUrl.js"
4
4
  import { Repo } from "../Repo.js"
5
5
  import { DocMessage } from "../network/messages.js"
6
- import { DocumentId, PeerId } from "../types.js"
6
+ import { AutomergeUrl, DocumentId, PeerId } from "../types.js"
7
7
  import { DocSynchronizer } from "./DocSynchronizer.js"
8
8
  import { Synchronizer } from "./Synchronizer.js"
9
+ import { next as A } from "@automerge/automerge"
9
10
 
10
11
  const log = debug("automerge-repo:collectionsync")
11
12
 
@@ -21,8 +22,15 @@ export class CollectionSynchronizer extends Synchronizer {
21
22
  /** Used to determine if the document is know to the Collection and a synchronizer exists or is being set up */
22
23
  #docSetUp: Record<DocumentId, boolean> = {}
23
24
 
24
- constructor(private repo: Repo) {
25
+ #denylist: DocumentId[]
26
+
27
+ constructor(
28
+ private beelay: A.beelay.Beelay,
29
+ private repo: Repo,
30
+ denylist: AutomergeUrl[] = []
31
+ ) {
25
32
  super()
33
+ this.#denylist = denylist.map(url => parseAutomergeUrl(url).documentId)
26
34
  }
27
35
 
28
36
  /** Returns a synchronizer for the given document, creating one if it doesn't already exist. */
@@ -37,27 +45,13 @@ export class CollectionSynchronizer extends Synchronizer {
37
45
  /** Creates a new docSynchronizer and sets it up to propagate messages */
38
46
  #initDocSynchronizer(handle: DocHandle<unknown>): DocSynchronizer {
39
47
  const docSynchronizer = new DocSynchronizer({
48
+ beelay: this.beelay,
40
49
  handle,
41
- onLoadSyncState: async peerId => {
42
- if (!this.repo.storageSubsystem) {
43
- return
44
- }
45
-
46
- const { storageId, isEphemeral } =
47
- this.repo.peerMetadataByPeerId[peerId] || {}
48
- if (!storageId || isEphemeral) {
49
- return
50
- }
51
-
52
- return this.repo.storageSubsystem.loadSyncState(
53
- handle.documentId,
54
- storageId
55
- )
56
- },
57
50
  })
58
51
  docSynchronizer.on("message", event => this.emit("message", event))
59
52
  docSynchronizer.on("open-doc", event => this.emit("open-doc", event))
60
53
  docSynchronizer.on("sync-state", event => this.emit("sync-state", event))
54
+ docSynchronizer.on("metrics", event => this.emit("metrics", event))
61
55
  return docSynchronizer
62
56
  }
63
57
 
@@ -90,6 +84,19 @@ export class CollectionSynchronizer extends Synchronizer {
90
84
  throw new Error("received a message with an invalid documentId")
91
85
  }
92
86
 
87
+ if (this.#denylist.includes(documentId)) {
88
+ this.emit("metrics", {
89
+ type: "doc-denied",
90
+ documentId,
91
+ })
92
+ this.emit("message", {
93
+ type: "doc-unavailable",
94
+ documentId,
95
+ targetId: message.senderId,
96
+ })
97
+ return
98
+ }
99
+
93
100
  this.#docSetUp[documentId] = true
94
101
 
95
102
  const docSynchronizer = this.#fetchDocSynchronizer(documentId)
@@ -161,12 +168,13 @@ export class CollectionSynchronizer extends Synchronizer {
161
168
  size: { numOps: number; numChanges: number }
162
169
  }
163
170
  } {
164
- return Object.fromEntries(
165
- Object.entries(this.docSynchronizers).map(
166
- ([documentId, synchronizer]) => {
167
- return [documentId, synchronizer.metrics()]
168
- }
169
- )
170
- )
171
+ return {}
172
+ // return Object.fromEntries(
173
+ // Object.entries(this.docSynchronizers).map(
174
+ // ([documentId, synchronizer]) => {
175
+ // return [documentId, synchronizer.metrics()]
176
+ // }
177
+ // )
178
+ // )
171
179
  }
172
180
  }