@automerge/automerge-repo 2.0.0-alpha.2 → 2.0.0-alpha.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. package/README.md +5 -6
  2. package/dist/AutomergeUrl.d.ts +17 -5
  3. package/dist/AutomergeUrl.d.ts.map +1 -1
  4. package/dist/AutomergeUrl.js +71 -24
  5. package/dist/DocHandle.d.ts +89 -20
  6. package/dist/DocHandle.d.ts.map +1 -1
  7. package/dist/DocHandle.js +189 -28
  8. package/dist/FindProgress.d.ts +30 -0
  9. package/dist/FindProgress.d.ts.map +1 -0
  10. package/dist/FindProgress.js +1 -0
  11. package/dist/RemoteHeadsSubscriptions.d.ts +4 -5
  12. package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -1
  13. package/dist/RemoteHeadsSubscriptions.js +4 -1
  14. package/dist/Repo.d.ts +44 -6
  15. package/dist/Repo.d.ts.map +1 -1
  16. package/dist/Repo.js +226 -87
  17. package/dist/entrypoints/fullfat.d.ts +1 -0
  18. package/dist/entrypoints/fullfat.d.ts.map +1 -1
  19. package/dist/entrypoints/fullfat.js +1 -2
  20. package/dist/helpers/abortable.d.ts +39 -0
  21. package/dist/helpers/abortable.d.ts.map +1 -0
  22. package/dist/helpers/abortable.js +45 -0
  23. package/dist/helpers/bufferFromHex.d.ts +3 -0
  24. package/dist/helpers/bufferFromHex.d.ts.map +1 -0
  25. package/dist/helpers/bufferFromHex.js +13 -0
  26. package/dist/helpers/headsAreSame.d.ts +2 -2
  27. package/dist/helpers/headsAreSame.d.ts.map +1 -1
  28. package/dist/helpers/mergeArrays.d.ts +1 -1
  29. package/dist/helpers/mergeArrays.d.ts.map +1 -1
  30. package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
  31. package/dist/helpers/tests/network-adapter-tests.js +13 -13
  32. package/dist/helpers/tests/storage-adapter-tests.d.ts +2 -2
  33. package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
  34. package/dist/helpers/tests/storage-adapter-tests.js +25 -48
  35. package/dist/index.d.ts +1 -1
  36. package/dist/index.d.ts.map +1 -1
  37. package/dist/index.js +1 -1
  38. package/dist/storage/StorageSubsystem.d.ts +11 -1
  39. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  40. package/dist/storage/StorageSubsystem.js +20 -4
  41. package/dist/synchronizer/CollectionSynchronizer.d.ts +17 -3
  42. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  43. package/dist/synchronizer/CollectionSynchronizer.js +43 -18
  44. package/dist/synchronizer/DocSynchronizer.d.ts +10 -2
  45. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  46. package/dist/synchronizer/DocSynchronizer.js +30 -8
  47. package/dist/synchronizer/Synchronizer.d.ts +11 -0
  48. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  49. package/dist/types.d.ts +4 -1
  50. package/dist/types.d.ts.map +1 -1
  51. package/fuzz/fuzz.ts +3 -3
  52. package/package.json +3 -3
  53. package/src/AutomergeUrl.ts +101 -26
  54. package/src/DocHandle.ts +256 -38
  55. package/src/FindProgress.ts +48 -0
  56. package/src/RemoteHeadsSubscriptions.ts +11 -9
  57. package/src/Repo.ts +310 -95
  58. package/src/entrypoints/fullfat.ts +1 -2
  59. package/src/helpers/abortable.ts +61 -0
  60. package/src/helpers/bufferFromHex.ts +14 -0
  61. package/src/helpers/headsAreSame.ts +2 -2
  62. package/src/helpers/tests/network-adapter-tests.ts +14 -13
  63. package/src/helpers/tests/storage-adapter-tests.ts +44 -86
  64. package/src/index.ts +2 -0
  65. package/src/storage/StorageSubsystem.ts +29 -4
  66. package/src/synchronizer/CollectionSynchronizer.ts +56 -19
  67. package/src/synchronizer/DocSynchronizer.ts +34 -9
  68. package/src/synchronizer/Synchronizer.ts +14 -0
  69. package/src/types.ts +4 -1
  70. package/test/AutomergeUrl.test.ts +130 -0
  71. package/test/CollectionSynchronizer.test.ts +4 -4
  72. package/test/DocHandle.test.ts +189 -29
  73. package/test/DocSynchronizer.test.ts +10 -3
  74. package/test/Repo.test.ts +377 -191
  75. package/test/StorageSubsystem.test.ts +17 -0
  76. package/test/remoteHeads.test.ts +27 -12
@@ -49,9 +49,10 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
49
49
  // Alice creates a document
50
50
  const aliceHandle = aliceRepo.create<TestDoc>()
51
51
 
52
- // Bob receives the document
53
- await eventPromise(bobRepo, "document")
54
- const bobHandle = bobRepo.find<TestDoc>(aliceHandle.url)
52
+ // TODO: ... let connections complete. this shouldn't be necessary.
53
+ await pause(50)
54
+
55
+ const bobHandle = await bobRepo.find<TestDoc>(aliceHandle.url)
55
56
 
56
57
  // Alice changes the document
57
58
  aliceHandle.change(d => {
@@ -60,7 +61,7 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
60
61
 
61
62
  // Bob receives the change
62
63
  await eventPromise(bobHandle, "change")
63
- assert.equal((await bobHandle.doc())?.foo, "bar")
64
+ assert.equal((await bobHandle).doc()?.foo, "bar")
64
65
 
65
66
  // Bob changes the document
66
67
  bobHandle.change(d => {
@@ -69,7 +70,7 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
69
70
 
70
71
  // Alice receives the change
71
72
  await eventPromise(aliceHandle, "change")
72
- assert.equal((await aliceHandle.doc())?.foo, "baz")
73
+ assert.equal(aliceHandle.doc().foo, "baz")
73
74
  }
74
75
 
75
76
  // Run the test in both directions, in case they're different types of adapters
@@ -100,9 +101,9 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
100
101
  const docUrl = aliceHandle.url
101
102
 
102
103
  // Bob and Charlie receive the document
103
- await eventPromises([bobRepo, charlieRepo], "document")
104
- const bobHandle = bobRepo.find<TestDoc>(docUrl)
105
- const charlieHandle = charlieRepo.find<TestDoc>(docUrl)
104
+ await pause(50)
105
+ const bobHandle = await bobRepo.find<TestDoc>(docUrl)
106
+ const charlieHandle = await charlieRepo.find<TestDoc>(docUrl)
106
107
 
107
108
  // Alice changes the document
108
109
  aliceHandle.change(d => {
@@ -111,8 +112,8 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
111
112
 
112
113
  // Bob and Charlie receive the change
113
114
  await eventPromises([bobHandle, charlieHandle], "change")
114
- assert.equal((await bobHandle.doc())?.foo, "bar")
115
- assert.equal((await charlieHandle.doc())?.foo, "bar")
115
+ assert.equal(bobHandle.doc().foo, "bar")
116
+ assert.equal(charlieHandle.doc().foo, "bar")
116
117
 
117
118
  // Charlie changes the document
118
119
  charlieHandle.change(d => {
@@ -121,8 +122,8 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
121
122
 
122
123
  // Alice and Bob receive the change
123
124
  await eventPromises([aliceHandle, bobHandle], "change")
124
- assert.equal((await bobHandle.doc())?.foo, "baz")
125
- assert.equal((await charlieHandle.doc())?.foo, "baz")
125
+ assert.equal(bobHandle.doc().foo, "baz")
126
+ assert.equal(charlieHandle.doc().foo, "baz")
126
127
 
127
128
  teardown()
128
129
  })
@@ -141,7 +142,7 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
141
142
  )
142
143
 
143
144
  const aliceHandle = aliceRepo.create<TestDoc>()
144
- const charlieHandle = charlieRepo.find(aliceHandle.url)
145
+ const charlieHandle = await charlieRepo.find(aliceHandle.url)
145
146
 
146
147
  // pause to give charlie a chance to let alice know it wants the doc
147
148
  await pause(100)
@@ -1,4 +1,4 @@
1
- import { describe, expect, it } from "vitest"
1
+ import { describe, expect, beforeEach, it as _it } from "vitest"
2
2
 
3
3
  import type { StorageAdapterInterface } from "../../storage/StorageAdapterInterface.js"
4
4
 
@@ -8,120 +8,90 @@ const PAYLOAD_C = () => new Uint8Array([2, 111, 74, 131, 236, 96, 142, 193])
8
8
 
9
9
  const LARGE_PAYLOAD = new Uint8Array(100000).map(() => Math.random() * 256)
10
10
 
11
- export function runStorageAdapterTests(_setup: SetupFn, title?: string): void {
12
- const setup = async () => {
13
- const { adapter, teardown = NO_OP } = await _setup()
14
- return { adapter, teardown }
15
- }
11
+ type AdapterTestContext = {
12
+ adapter: StorageAdapterInterface
13
+ }
14
+
15
+ const it = _it<AdapterTestContext>
16
+
17
+ export function runStorageAdapterTests(setup: SetupFn, title?: string): void {
18
+ beforeEach<AdapterTestContext>(async ctx => {
19
+ const { adapter, teardown = NO_OP } = await setup()
20
+ ctx.adapter = adapter
21
+ return teardown
22
+ })
16
23
 
17
24
  describe(`Storage adapter acceptance tests ${
18
25
  title ? `(${title})` : ""
19
26
  }`, () => {
20
27
  describe("load", () => {
21
- it("should return undefined if there is no data", async () => {
22
- const { adapter, teardown } = await setup()
23
-
28
+ it("should return undefined if there is no data", async ({ adapter }) => {
24
29
  const actual = await adapter.load(["AAAAA", "sync-state", "xxxxx"])
25
30
  expect(actual).toBeUndefined()
26
-
27
- teardown()
28
31
  })
29
32
  })
30
33
 
31
34
  describe("save and load", () => {
32
- it("should return data that was saved", async () => {
33
- const { adapter, teardown } = await setup()
34
-
35
+ it("should return data that was saved", async ({ adapter }) => {
35
36
  await adapter.save(["storage-adapter-id"], PAYLOAD_A())
36
37
  const actual = await adapter.load(["storage-adapter-id"])
37
38
  expect(actual).toStrictEqual(PAYLOAD_A())
38
-
39
- teardown()
40
39
  })
41
40
 
42
- it("should work with composite keys", async () => {
43
- const { adapter, teardown } = await setup()
44
-
41
+ it("should work with composite keys", async ({ adapter }) => {
45
42
  await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A())
46
43
  const actual = await adapter.load(["AAAAA", "sync-state", "xxxxx"])
47
44
  expect(actual).toStrictEqual(PAYLOAD_A())
48
-
49
- teardown()
50
45
  })
51
46
 
52
- it("should work with a large payload", async () => {
53
- const { adapter, teardown } = await setup()
54
-
47
+ it("should work with a large payload", async ({ adapter }) => {
55
48
  await adapter.save(["AAAAA", "sync-state", "xxxxx"], LARGE_PAYLOAD)
56
49
  const actual = await adapter.load(["AAAAA", "sync-state", "xxxxx"])
57
50
  expect(actual).toStrictEqual(LARGE_PAYLOAD)
58
-
59
- teardown()
60
51
  })
61
52
  })
62
53
 
63
54
  describe("loadRange", () => {
64
- it("should return an empty array if there is no data", async () => {
65
- const { adapter, teardown } = await setup()
66
-
55
+ it("should return an empty array if there is no data", async ({
56
+ adapter,
57
+ }) => {
67
58
  expect(await adapter.loadRange(["AAAAA"])).toStrictEqual([])
68
-
69
- teardown()
70
59
  })
71
60
  })
72
61
 
73
62
  describe("save and loadRange", () => {
74
- it("should return all the data that matches the key", async () => {
75
- const { adapter, teardown } = await setup()
76
-
63
+ it("should return all the data that matches the key", async ({
64
+ adapter,
65
+ }) => {
77
66
  await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A())
78
67
  await adapter.save(["AAAAA", "snapshot", "yyyyy"], PAYLOAD_B())
79
68
  await adapter.save(["AAAAA", "sync-state", "zzzzz"], PAYLOAD_C())
80
69
 
81
- expect(await adapter.loadRange(["AAAAA"])).toStrictEqual(
82
- expect.arrayContaining([
83
- { key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
84
- { key: ["AAAAA", "snapshot", "yyyyy"], data: PAYLOAD_B() },
85
- { key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() },
86
- ])
87
- )
88
-
89
- expect(await adapter.loadRange(["AAAAA", "sync-state"])).toStrictEqual(
90
- expect.arrayContaining([
91
- { key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
92
- { key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() },
93
- ])
94
- )
95
-
96
- teardown()
97
- })
70
+ expect(await adapter.loadRange(["AAAAA"])).toStrictEqual([
71
+ { key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
72
+ { key: ["AAAAA", "snapshot", "yyyyy"], data: PAYLOAD_B() },
73
+ { key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() },
74
+ ])
98
75
 
99
- it("should only load values that match they key", async () => {
100
- const { adapter, teardown } = await setup()
76
+ expect(await adapter.loadRange(["AAAAA", "sync-state"])).toStrictEqual([
77
+ { key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
78
+ { key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() },
79
+ ])
80
+ })
101
81
 
82
+ it("should only load values that match they key", async ({ adapter }) => {
102
83
  await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A())
103
84
  await adapter.save(["BBBBB", "sync-state", "zzzzz"], PAYLOAD_C())
104
85
 
105
86
  const actual = await adapter.loadRange(["AAAAA"])
106
- expect(actual).toStrictEqual(
107
- expect.arrayContaining([
108
- { key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
109
- ])
110
- )
111
- expect(actual).toStrictEqual(
112
- expect.not.arrayContaining([
113
- { key: ["BBBBB", "sync-state", "zzzzz"], data: PAYLOAD_C() },
114
- ])
115
- )
116
-
117
- teardown()
87
+ expect(actual).toStrictEqual([
88
+ { key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
89
+ ])
118
90
  })
119
91
  })
120
92
 
121
93
  describe("save and remove", () => {
122
- it("after removing, should be empty", async () => {
123
- const { adapter, teardown } = await setup()
124
-
94
+ it("after removing, should be empty", async ({ adapter }) => {
125
95
  await adapter.save(["AAAAA", "snapshot", "xxxxx"], PAYLOAD_A())
126
96
  await adapter.remove(["AAAAA", "snapshot", "xxxxx"])
127
97
 
@@ -129,30 +99,24 @@ export function runStorageAdapterTests(_setup: SetupFn, title?: string): void {
129
99
  expect(
130
100
  await adapter.load(["AAAAA", "snapshot", "xxxxx"])
131
101
  ).toBeUndefined()
132
-
133
- teardown()
134
102
  })
135
103
  })
136
104
 
137
105
  describe("save and save", () => {
138
- it("should overwrite data saved with the same key", async () => {
139
- const { adapter, teardown } = await setup()
140
-
106
+ it("should overwrite data saved with the same key", async ({
107
+ adapter,
108
+ }) => {
141
109
  await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A())
142
110
  await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_B())
143
111
 
144
112
  expect(await adapter.loadRange(["AAAAA", "sync-state"])).toStrictEqual([
145
113
  { key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_B() },
146
114
  ])
147
-
148
- teardown()
149
115
  })
150
116
  })
151
117
 
152
118
  describe("removeRange", () => {
153
- it("should remove a range of records", async () => {
154
- const { adapter, teardown } = await setup()
155
-
119
+ it("should remove a range of records", async ({ adapter }) => {
156
120
  await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A())
157
121
  await adapter.save(["AAAAA", "snapshot", "yyyyy"], PAYLOAD_B())
158
122
  await adapter.save(["AAAAA", "sync-state", "zzzzz"], PAYLOAD_C())
@@ -162,13 +126,9 @@ export function runStorageAdapterTests(_setup: SetupFn, title?: string): void {
162
126
  expect(await adapter.loadRange(["AAAAA"])).toStrictEqual([
163
127
  { key: ["AAAAA", "snapshot", "yyyyy"], data: PAYLOAD_B() },
164
128
  ])
165
-
166
- teardown()
167
129
  })
168
130
 
169
- it("should not remove records that don't match", async () => {
170
- const { adapter, teardown } = await setup()
171
-
131
+ it("should not remove records that don't match", async ({ adapter }) => {
172
132
  await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A())
173
133
  await adapter.save(["BBBBB", "sync-state", "zzzzz"], PAYLOAD_B())
174
134
 
@@ -178,8 +138,6 @@ export function runStorageAdapterTests(_setup: SetupFn, title?: string): void {
178
138
  expect(actual).toStrictEqual([
179
139
  { key: ["BBBBB", "sync-state", "zzzzz"], data: PAYLOAD_B() },
180
140
  ])
181
-
182
- teardown()
183
141
  })
184
142
  })
185
143
  })
@@ -189,5 +147,5 @@ const NO_OP = () => {}
189
147
 
190
148
  export type SetupFn = () => Promise<{
191
149
  adapter: StorageAdapterInterface
192
- teardown?: () => void
150
+ teardown?: () => void | Promise<void>
193
151
  }>
package/src/index.ts CHANGED
@@ -34,6 +34,8 @@ export {
34
34
  stringifyAutomergeUrl,
35
35
  interpretAsDocumentId,
36
36
  generateAutomergeUrl,
37
+ encodeHeads,
38
+ decodeHeads,
37
39
  } from "./AutomergeUrl.js"
38
40
  export { Repo } from "./Repo.js"
39
41
  export { NetworkAdapter } from "./network/NetworkAdapter.js"
@@ -8,12 +8,23 @@ import { ChunkInfo, StorageKey, StorageId } from "./types.js"
8
8
  import { keyHash, headsHash } from "./keyHash.js"
9
9
  import { chunkTypeFromKey } from "./chunkTypeFromKey.js"
10
10
  import * as Uuid from "uuid"
11
+ import { EventEmitter } from "eventemitter3"
12
+ import { encodeHeads } from "../AutomergeUrl.js"
13
+
14
+ type StorageSubsystemEvents = {
15
+ "document-loaded": (arg: {
16
+ documentId: DocumentId
17
+ durationMillis: number
18
+ numOps: number
19
+ numChanges: number
20
+ }) => void
21
+ }
11
22
 
12
23
  /**
13
24
  * The storage subsystem is responsible for saving and loading Automerge documents to and from
14
25
  * storage adapter. It also provides a generic key/value storage interface for other uses.
15
26
  */
16
- export class StorageSubsystem {
27
+ export class StorageSubsystem extends EventEmitter<StorageSubsystemEvents> {
17
28
  /** The storage adapter to use for saving and loading documents */
18
29
  #storageAdapter: StorageAdapterInterface
19
30
 
@@ -29,6 +40,7 @@ export class StorageSubsystem {
29
40
  #log = debug(`automerge-repo:storage-subsystem`)
30
41
 
31
42
  constructor(storageAdapter: StorageAdapterInterface) {
43
+ super()
32
44
  this.#storageAdapter = storageAdapter
33
45
  }
34
46
 
@@ -130,7 +142,14 @@ export class StorageSubsystem {
130
142
  if (binary.length === 0) return null
131
143
 
132
144
  // Load into an Automerge document
145
+ const start = performance.now()
133
146
  const newDoc = A.loadIncremental(A.init(), binary) as A.Doc<T>
147
+ const end = performance.now()
148
+ this.emit("document-loaded", {
149
+ documentId,
150
+ durationMillis: end - start,
151
+ ...A.stats(newDoc),
152
+ })
134
153
 
135
154
  // Record the latest heads for the document
136
155
  this.#storedHeads.set(documentId, A.getHeads(newDoc))
@@ -155,6 +174,7 @@ export class StorageSubsystem {
155
174
  } else {
156
175
  await this.#saveIncremental(documentId, doc)
157
176
  }
177
+
158
178
  this.#storedHeads.set(documentId, A.getHeads(doc))
159
179
  }
160
180
 
@@ -232,8 +252,13 @@ export class StorageSubsystem {
232
252
  storageId: StorageId
233
253
  ): Promise<A.SyncState | undefined> {
234
254
  const key = [documentId, "sync-state", storageId]
235
- const loaded = await this.#storageAdapter.load(key)
236
- return loaded ? A.decodeSyncState(loaded) : undefined
255
+ try {
256
+ const loaded = await this.#storageAdapter.load(key)
257
+ return loaded ? A.decodeSyncState(loaded) : undefined
258
+ } catch (e) {
259
+ this.#log(`Error loading sync state for ${documentId} from ${storageId}`)
260
+ return undefined
261
+ }
237
262
  }
238
263
 
239
264
  async saveSyncState(
@@ -256,7 +281,7 @@ export class StorageSubsystem {
256
281
  }
257
282
 
258
283
  const newHeads = A.getHeads(doc)
259
- if (headsAreSame(newHeads, oldHeads)) {
284
+ if (headsAreSame(encodeHeads(newHeads), encodeHeads(oldHeads))) {
260
285
  // the document hasn't changed
261
286
  return false
262
287
  }
@@ -1,9 +1,9 @@
1
1
  import debug from "debug"
2
2
  import { DocHandle } from "../DocHandle.js"
3
- import { stringifyAutomergeUrl } from "../AutomergeUrl.js"
3
+ import { parseAutomergeUrl } from "../AutomergeUrl.js"
4
4
  import { Repo } from "../Repo.js"
5
5
  import { DocMessage } from "../network/messages.js"
6
- import { DocumentId, PeerId } from "../types.js"
6
+ import { AutomergeUrl, DocumentId, PeerId } from "../types.js"
7
7
  import { DocSynchronizer } from "./DocSynchronizer.js"
8
8
  import { Synchronizer } from "./Synchronizer.js"
9
9
 
@@ -15,28 +15,33 @@ export class CollectionSynchronizer extends Synchronizer {
15
15
  #peers: Set<PeerId> = new Set()
16
16
 
17
17
  /** A map of documentIds to their synchronizers */
18
- #docSynchronizers: Record<DocumentId, DocSynchronizer> = {}
18
+ /** @hidden */
19
+ docSynchronizers: Record<DocumentId, DocSynchronizer> = {}
19
20
 
20
21
  /** Used to determine if the document is know to the Collection and a synchronizer exists or is being set up */
21
22
  #docSetUp: Record<DocumentId, boolean> = {}
22
23
 
23
- constructor(private repo: Repo) {
24
+ #denylist: DocumentId[]
25
+
26
+ constructor(private repo: Repo, denylist: AutomergeUrl[] = []) {
24
27
  super()
28
+ this.#denylist = denylist.map(url => parseAutomergeUrl(url).documentId)
25
29
  }
26
30
 
27
31
  /** Returns a synchronizer for the given document, creating one if it doesn't already exist. */
28
- #fetchDocSynchronizer(documentId: DocumentId) {
29
- if (!this.#docSynchronizers[documentId]) {
30
- const handle = this.repo.find(stringifyAutomergeUrl({ documentId }))
31
- this.#docSynchronizers[documentId] = this.#initDocSynchronizer(handle)
32
+ #fetchDocSynchronizer(handle: DocHandle<unknown>) {
33
+ if (!this.docSynchronizers[handle.documentId]) {
34
+ this.docSynchronizers[handle.documentId] =
35
+ this.#initDocSynchronizer(handle)
32
36
  }
33
- return this.#docSynchronizers[documentId]
37
+ return this.docSynchronizers[handle.documentId]
34
38
  }
35
39
 
36
40
  /** Creates a new docSynchronizer and sets it up to propagate messages */
37
41
  #initDocSynchronizer(handle: DocHandle<unknown>): DocSynchronizer {
38
42
  const docSynchronizer = new DocSynchronizer({
39
43
  handle,
44
+ peerId: this.repo.networkSubsystem.peerId,
40
45
  onLoadSyncState: async peerId => {
41
46
  if (!this.repo.storageSubsystem) {
42
47
  return
@@ -57,6 +62,7 @@ export class CollectionSynchronizer extends Synchronizer {
57
62
  docSynchronizer.on("message", event => this.emit("message", event))
58
63
  docSynchronizer.on("open-doc", event => this.emit("open-doc", event))
59
64
  docSynchronizer.on("sync-state", event => this.emit("sync-state", event))
65
+ docSynchronizer.on("metrics", event => this.emit("metrics", event))
60
66
  return docSynchronizer
61
67
  }
62
68
 
@@ -89,15 +95,31 @@ export class CollectionSynchronizer extends Synchronizer {
89
95
  throw new Error("received a message with an invalid documentId")
90
96
  }
91
97
 
98
+ if (this.#denylist.includes(documentId)) {
99
+ this.emit("metrics", {
100
+ type: "doc-denied",
101
+ documentId,
102
+ })
103
+ this.emit("message", {
104
+ type: "doc-unavailable",
105
+ documentId,
106
+ targetId: message.senderId,
107
+ })
108
+ return
109
+ }
110
+
92
111
  this.#docSetUp[documentId] = true
93
112
 
94
- const docSynchronizer = this.#fetchDocSynchronizer(documentId)
113
+ const handle = await this.repo.find(documentId, {
114
+ allowableStates: ["ready", "unavailable", "requesting"],
115
+ })
116
+ const docSynchronizer = this.#fetchDocSynchronizer(handle)
95
117
 
96
118
  docSynchronizer.receiveMessage(message)
97
119
 
98
120
  // Initiate sync with any new peers
99
121
  const peers = await this.#documentGenerousPeers(documentId)
100
- docSynchronizer.beginSync(
122
+ void docSynchronizer.beginSync(
101
123
  peers.filter(peerId => !docSynchronizer.hasPeer(peerId))
102
124
  )
103
125
  }
@@ -105,14 +127,14 @@ export class CollectionSynchronizer extends Synchronizer {
105
127
  /**
106
128
  * Starts synchronizing the given document with all peers that we share it generously with.
107
129
  */
108
- addDocument(documentId: DocumentId) {
130
+ addDocument(handle: DocHandle<unknown>) {
109
131
  // HACK: this is a hack to prevent us from adding the same document twice
110
- if (this.#docSetUp[documentId]) {
132
+ if (this.#docSetUp[handle.documentId]) {
111
133
  return
112
134
  }
113
- const docSynchronizer = this.#fetchDocSynchronizer(documentId)
114
- void this.#documentGenerousPeers(documentId).then(peers => {
115
- docSynchronizer.beginSync(peers)
135
+ const docSynchronizer = this.#fetchDocSynchronizer(handle)
136
+ void this.#documentGenerousPeers(handle.documentId).then(peers => {
137
+ void docSynchronizer.beginSync(peers)
116
138
  })
117
139
  }
118
140
 
@@ -131,10 +153,10 @@ export class CollectionSynchronizer extends Synchronizer {
131
153
  }
132
154
 
133
155
  this.#peers.add(peerId)
134
- for (const docSynchronizer of Object.values(this.#docSynchronizers)) {
156
+ for (const docSynchronizer of Object.values(this.docSynchronizers)) {
135
157
  const { documentId } = docSynchronizer
136
158
  void this.repo.sharePolicy(peerId, documentId).then(okToShare => {
137
- if (okToShare) docSynchronizer.beginSync([peerId])
159
+ if (okToShare) void docSynchronizer.beginSync([peerId])
138
160
  })
139
161
  }
140
162
  }
@@ -144,7 +166,7 @@ export class CollectionSynchronizer extends Synchronizer {
144
166
  log(`removing peer ${peerId}`)
145
167
  this.#peers.delete(peerId)
146
168
 
147
- for (const docSynchronizer of Object.values(this.#docSynchronizers)) {
169
+ for (const docSynchronizer of Object.values(this.docSynchronizers)) {
148
170
  docSynchronizer.endSync(peerId)
149
171
  }
150
172
  }
@@ -153,4 +175,19 @@ export class CollectionSynchronizer extends Synchronizer {
153
175
  get peers(): PeerId[] {
154
176
  return Array.from(this.#peers)
155
177
  }
178
+
179
+ metrics(): {
180
+ [key: string]: {
181
+ peers: PeerId[]
182
+ size: { numOps: number; numChanges: number }
183
+ }
184
+ } {
185
+ return Object.fromEntries(
186
+ Object.entries(this.docSynchronizers).map(
187
+ ([documentId, synchronizer]) => {
188
+ return [documentId, synchronizer.metrics()]
189
+ }
190
+ )
191
+ )
192
+ }
156
193
  }
@@ -30,6 +30,7 @@ type PendingMessage = {
30
30
 
31
31
  interface DocSynchronizerConfig {
32
32
  handle: DocHandle<unknown>
33
+ peerId: PeerId
33
34
  onLoadSyncState?: (peerId: PeerId) => Promise<A.SyncState | undefined>
34
35
  }
35
36
 
@@ -56,13 +57,17 @@ export class DocSynchronizer extends Synchronizer {
56
57
 
57
58
  #pendingSyncMessages: Array<PendingMessage> = []
58
59
 
60
+ // We keep this around at least in part for debugging.
61
+ // eslint-disable-next-line no-unused-private-class-members
62
+ #peerId: PeerId
59
63
  #syncStarted = false
60
64
 
61
65
  #handle: DocHandle<unknown>
62
66
  #onLoadSyncState: (peerId: PeerId) => Promise<A.SyncState | undefined>
63
67
 
64
- constructor({ handle, onLoadSyncState }: DocSynchronizerConfig) {
68
+ constructor({ handle, peerId, onLoadSyncState }: DocSynchronizerConfig) {
65
69
  super()
70
+ this.#peerId = peerId
66
71
  this.#handle = handle
67
72
  this.#onLoadSyncState =
68
73
  onLoadSyncState ?? (() => Promise.resolve(undefined))
@@ -81,7 +86,7 @@ export class DocSynchronizer extends Synchronizer {
81
86
 
82
87
  // Process pending sync messages immediately after the handle becomes ready.
83
88
  void (async () => {
84
- await handle.doc([READY, REQUESTING])
89
+ await handle.whenReady([READY, REQUESTING])
85
90
  this.#processAllPendingSyncMessages()
86
91
  })()
87
92
  }
@@ -97,8 +102,7 @@ export class DocSynchronizer extends Synchronizer {
97
102
  /// PRIVATE
98
103
 
99
104
  async #syncWithPeers() {
100
- this.#log(`syncWithPeers`)
101
- const doc = await this.#handle.doc()
105
+ const doc = await this.#handle.legacyAsyncDoc() // XXX THIS ONE IS WEIRD
102
106
  if (doc === undefined) return
103
107
  this.#peers.forEach(peerId => this.#sendSyncMessage(peerId, doc))
104
108
  }
@@ -226,16 +230,15 @@ export class DocSynchronizer extends Synchronizer {
226
230
  return this.#peers.includes(peerId)
227
231
  }
228
232
 
229
- beginSync(peerIds: PeerId[]) {
233
+ async beginSync(peerIds: PeerId[]) {
230
234
  const noPeersWithDocument = peerIds.every(
231
235
  peerId => this.#peerDocumentStatuses[peerId] in ["unavailable", "wants"]
232
236
  )
233
237
 
234
238
  // At this point if we don't have anything in our storage, we need to use an empty doc to sync
235
239
  // with; but we don't want to surface that state to the front end
236
-
237
- const docPromise = this.#handle
238
- .doc([READY, REQUESTING, UNAVAILABLE])
240
+ const docPromise = this.#handle // TODO THIS IS ALSO WEIRD
241
+ .legacyAsyncDoc([READY, REQUESTING, UNAVAILABLE])
239
242
  .then(doc => {
240
243
  // we register out peers first, then say that sync has started
241
244
  this.#syncStarted = true
@@ -251,7 +254,13 @@ export class DocSynchronizer extends Synchronizer {
251
254
  return doc ?? A.init<unknown>()
252
255
  })
253
256
 
254
- this.#log(`beginSync: ${peerIds.join(", ")}`)
257
+ const peersWithDocument = this.#peers.some(peerId => {
258
+ return this.#peerDocumentStatuses[peerId] == "has"
259
+ })
260
+
261
+ if (peersWithDocument) {
262
+ await this.#handle.whenReady()
263
+ }
255
264
 
256
265
  peerIds.forEach(peerId => {
257
266
  this.#withSyncState(peerId, syncState => {
@@ -351,11 +360,20 @@ export class DocSynchronizer extends Synchronizer {
351
360
 
352
361
  this.#withSyncState(message.senderId, syncState => {
353
362
  this.#handle.update(doc => {
363
+ const start = performance.now()
364
+
354
365
  const [newDoc, newSyncState] = A.receiveSyncMessage(
355
366
  doc,
356
367
  syncState,
357
368
  message.data
358
369
  )
370
+ const end = performance.now()
371
+ this.emit("metrics", {
372
+ type: "receive-sync-message",
373
+ documentId: this.#handle.documentId,
374
+ durationMillis: end - start,
375
+ ...A.stats(doc),
376
+ })
359
377
 
360
378
  this.#setSyncState(message.senderId, newSyncState)
361
379
 
@@ -401,4 +419,11 @@ export class DocSynchronizer extends Synchronizer {
401
419
 
402
420
  this.#pendingSyncMessages = []
403
421
  }
422
+
423
+ metrics(): { peers: PeerId[]; size: { numOps: number; numChanges: number } } {
424
+ return {
425
+ peers: this.#peers,
426
+ size: this.#handle.metrics(),
427
+ }
428
+ }
404
429
  }