@automerge/automerge-repo 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (99) hide show
  1. package/.eslintrc +28 -0
  2. package/.mocharc.json +5 -0
  3. package/README.md +298 -0
  4. package/TODO.md +54 -0
  5. package/dist/DocCollection.d.ts +44 -0
  6. package/dist/DocCollection.d.ts.map +1 -0
  7. package/dist/DocCollection.js +85 -0
  8. package/dist/DocHandle.d.ts +78 -0
  9. package/dist/DocHandle.d.ts.map +1 -0
  10. package/dist/DocHandle.js +227 -0
  11. package/dist/EphemeralData.d.ts +27 -0
  12. package/dist/EphemeralData.d.ts.map +1 -0
  13. package/dist/EphemeralData.js +28 -0
  14. package/dist/Repo.d.ts +30 -0
  15. package/dist/Repo.d.ts.map +1 -0
  16. package/dist/Repo.js +97 -0
  17. package/dist/helpers/arraysAreEqual.d.ts +2 -0
  18. package/dist/helpers/arraysAreEqual.d.ts.map +1 -0
  19. package/dist/helpers/arraysAreEqual.js +1 -0
  20. package/dist/helpers/eventPromise.d.ts +5 -0
  21. package/dist/helpers/eventPromise.d.ts.map +1 -0
  22. package/dist/helpers/eventPromise.js +6 -0
  23. package/dist/helpers/headsAreSame.d.ts +3 -0
  24. package/dist/helpers/headsAreSame.d.ts.map +1 -0
  25. package/dist/helpers/headsAreSame.js +7 -0
  26. package/dist/helpers/mergeArrays.d.ts +2 -0
  27. package/dist/helpers/mergeArrays.d.ts.map +1 -0
  28. package/dist/helpers/mergeArrays.js +15 -0
  29. package/dist/helpers/pause.d.ts +3 -0
  30. package/dist/helpers/pause.d.ts.map +1 -0
  31. package/dist/helpers/pause.js +7 -0
  32. package/dist/helpers/withTimeout.d.ts +9 -0
  33. package/dist/helpers/withTimeout.d.ts.map +1 -0
  34. package/dist/helpers/withTimeout.js +22 -0
  35. package/dist/index.d.ts +13 -0
  36. package/dist/index.d.ts.map +1 -0
  37. package/dist/index.js +10 -0
  38. package/dist/network/NetworkAdapter.d.ts +37 -0
  39. package/dist/network/NetworkAdapter.d.ts.map +1 -0
  40. package/dist/network/NetworkAdapter.js +4 -0
  41. package/dist/network/NetworkSubsystem.d.ts +23 -0
  42. package/dist/network/NetworkSubsystem.d.ts.map +1 -0
  43. package/dist/network/NetworkSubsystem.js +89 -0
  44. package/dist/storage/StorageAdapter.d.ts +6 -0
  45. package/dist/storage/StorageAdapter.d.ts.map +1 -0
  46. package/dist/storage/StorageAdapter.js +2 -0
  47. package/dist/storage/StorageSubsystem.d.ts +12 -0
  48. package/dist/storage/StorageSubsystem.d.ts.map +1 -0
  49. package/dist/storage/StorageSubsystem.js +65 -0
  50. package/dist/synchronizer/CollectionSynchronizer.d.ts +24 -0
  51. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -0
  52. package/dist/synchronizer/CollectionSynchronizer.js +92 -0
  53. package/dist/synchronizer/DocSynchronizer.d.ts +18 -0
  54. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -0
  55. package/dist/synchronizer/DocSynchronizer.js +136 -0
  56. package/dist/synchronizer/Synchronizer.d.ts +10 -0
  57. package/dist/synchronizer/Synchronizer.d.ts.map +1 -0
  58. package/dist/synchronizer/Synchronizer.js +3 -0
  59. package/dist/test-utilities/adapter-tests.d.ts +21 -0
  60. package/dist/test-utilities/adapter-tests.d.ts.map +1 -0
  61. package/dist/test-utilities/adapter-tests.js +117 -0
  62. package/dist/types.d.ts +10 -0
  63. package/dist/types.d.ts.map +1 -0
  64. package/dist/types.js +1 -0
  65. package/fuzz/fuzz.ts +129 -0
  66. package/package.json +65 -0
  67. package/src/DocCollection.ts +123 -0
  68. package/src/DocHandle.ts +386 -0
  69. package/src/EphemeralData.ts +46 -0
  70. package/src/Repo.ts +155 -0
  71. package/src/helpers/arraysAreEqual.ts +2 -0
  72. package/src/helpers/eventPromise.ts +10 -0
  73. package/src/helpers/headsAreSame.ts +8 -0
  74. package/src/helpers/mergeArrays.ts +17 -0
  75. package/src/helpers/pause.ts +9 -0
  76. package/src/helpers/withTimeout.ts +28 -0
  77. package/src/index.ts +22 -0
  78. package/src/network/NetworkAdapter.ts +54 -0
  79. package/src/network/NetworkSubsystem.ts +130 -0
  80. package/src/storage/StorageAdapter.ts +5 -0
  81. package/src/storage/StorageSubsystem.ts +91 -0
  82. package/src/synchronizer/CollectionSynchronizer.ts +112 -0
  83. package/src/synchronizer/DocSynchronizer.ts +182 -0
  84. package/src/synchronizer/Synchronizer.ts +15 -0
  85. package/src/test-utilities/adapter-tests.ts +163 -0
  86. package/src/types.ts +3 -0
  87. package/test/CollectionSynchronizer.test.ts +73 -0
  88. package/test/DocCollection.test.ts +19 -0
  89. package/test/DocHandle.test.ts +281 -0
  90. package/test/DocSynchronizer.test.ts +68 -0
  91. package/test/EphemeralData.test.ts +44 -0
  92. package/test/Network.test.ts +13 -0
  93. package/test/Repo.test.ts +367 -0
  94. package/test/StorageSubsystem.test.ts +78 -0
  95. package/test/helpers/DummyNetworkAdapter.ts +8 -0
  96. package/test/helpers/DummyStorageAdapter.ts +23 -0
  97. package/test/helpers/getRandomItem.ts +4 -0
  98. package/test/types.ts +3 -0
  99. package/tsconfig.json +16 -0
@@ -0,0 +1,163 @@
1
+ import { PeerId, Repo, type NetworkAdapter, ChannelId } from "../index.js"
2
+ import {
3
+ eventPromise,
4
+ eventPromises,
5
+ } from "../helpers/eventPromise.js"
6
+ import { assert } from "chai"
7
+ import { describe, it } from "mocha"
8
+
9
+ /**
10
+ * Runs a series of tests against a set of three peers, each represented by one or more instantiated
11
+ * network adapters.
12
+ *
13
+ * The adapter `setup` function should return an object with the following properties:
14
+ *
15
+ * - `adapters`: A tuple representing three peers' network configuration. Each element can be either
16
+ * a single adapter or an array of adapters. Each will be used to instantiate a Repo for that
17
+ * peer.
18
+ * - `teardown`: An optional function that will be called after the tests have run. This can be used
19
+ * to clean up any resources that were created during the test.
20
+ */
21
+ export function runAdapterTests(_setup: SetupFn, title?: string): void {
22
+ // Wrap the provided setup function
23
+ const setup = async () => {
24
+ const { adapters, teardown = NO_OP } = await _setup()
25
+
26
+ // these might be individual adapters or arrays of adapters; normalize them to arrays
27
+ const [a, b, c] = adapters.map(toArray)
28
+
29
+ return { adapters: [a, b, c], teardown }
30
+ }
31
+
32
+ describe(`Adapter acceptance tests ${title ? `(${title})` : ""}`, () => {
33
+ it("can sync 2 repos", async () => {
34
+ const doTest = async (a: NetworkAdapter[], b: NetworkAdapter[]) => {
35
+ const aliceRepo = new Repo({ network: a, peerId: alice })
36
+ const bobRepo = new Repo({ network: b, peerId: bob })
37
+
38
+ // Alice creates a document
39
+ const aliceHandle = aliceRepo.create<TestDoc>()
40
+
41
+ // Bob receives the document
42
+ await eventPromise(bobRepo, "document")
43
+ const bobHandle = bobRepo.find<TestDoc>(aliceHandle.documentId)
44
+
45
+ // Alice changes the document
46
+ aliceHandle.change(d => {
47
+ d.foo = "bar"
48
+ })
49
+
50
+ // Bob receives the change
51
+ await eventPromise(bobHandle, "change")
52
+ assert.equal((await bobHandle.value()).foo, "bar")
53
+
54
+ // Bob changes the document
55
+ bobHandle.change(d => {
56
+ d.foo = "baz"
57
+ })
58
+
59
+ // Alice receives the change
60
+ await eventPromise(aliceHandle, "change")
61
+ assert.equal((await aliceHandle.value()).foo, "baz")
62
+ }
63
+
64
+ // Run the test in both directions, in case they're different types of adapters
65
+ {
66
+ const { adapters, teardown } = await setup()
67
+ const [x, y] = adapters
68
+ await doTest(x, y) // x is Alice
69
+ teardown()
70
+ }
71
+ {
72
+ const { adapters, teardown } = await setup()
73
+ const [x, y] = adapters
74
+ await doTest(y, x) // y is Alice
75
+ teardown()
76
+ }
77
+ })
78
+
79
+ it("can sync 3 repos", async () => {
80
+ const { adapters, teardown } = await setup()
81
+ const [a, b, c] = adapters
82
+
83
+ const aliceRepo = new Repo({ network: a, peerId: alice })
84
+ const bobRepo = new Repo({ network: b, peerId: bob })
85
+ const charlieRepo = new Repo({ network: c, peerId: charlie })
86
+
87
+ // Alice creates a document
88
+ const aliceHandle = aliceRepo.create<TestDoc>()
89
+ const documentId = aliceHandle.documentId
90
+
91
+ // Bob and Charlie receive the document
92
+ await eventPromises([bobRepo, charlieRepo], "document")
93
+ const bobHandle = bobRepo.find<TestDoc>(documentId)
94
+ const charlieHandle = charlieRepo.find<TestDoc>(documentId)
95
+
96
+ // Alice changes the document
97
+ aliceHandle.change(d => {
98
+ d.foo = "bar"
99
+ })
100
+
101
+ // Bob and Charlie receive the change
102
+ await eventPromises([bobHandle, charlieHandle], "change")
103
+ assert.equal((await bobHandle.value()).foo, "bar")
104
+ assert.equal((await charlieHandle.value()).foo, "bar")
105
+
106
+ // Charlie changes the document
107
+ charlieHandle.change(d => {
108
+ d.foo = "baz"
109
+ })
110
+
111
+ // Alice and Bob receive the change
112
+ await eventPromises([aliceHandle, bobHandle], "change")
113
+ assert.equal((await bobHandle.value()).foo, "baz")
114
+ assert.equal((await charlieHandle.value()).foo, "baz")
115
+
116
+ teardown()
117
+ })
118
+
119
+ // TODO: with BroadcastChannel, this test never ends, because it goes into an infinite loop,
120
+ // because the network has cycles (see #92)
121
+ it.skip("can broadcast a message", async () => {
122
+ const { adapters, teardown } = await setup()
123
+ const [a, b, c] = adapters
124
+
125
+ const aliceRepo = new Repo({ network: a, peerId: alice })
126
+ const bobRepo = new Repo({ network: b, peerId: bob })
127
+ const charlieRepo = new Repo({ network: c, peerId: charlie })
128
+
129
+ await eventPromises(
130
+ [aliceRepo, bobRepo, charlieRepo].map(r => r.networkSubsystem),
131
+ "peer"
132
+ )
133
+
134
+ const channelId = "broadcast" as ChannelId
135
+ const alicePresenceData = { presence: "alice" }
136
+
137
+ aliceRepo.ephemeralData.broadcast(channelId, alicePresenceData)
138
+ const { data } = await eventPromise(charlieRepo.ephemeralData, "data")
139
+
140
+ assert.deepStrictEqual(data, alicePresenceData)
141
+ teardown()
142
+ })
143
+ })
144
+ }
145
+
146
+ const NO_OP = () => { }
147
+
148
+ type Network = NetworkAdapter | NetworkAdapter[]
149
+
150
+ export type SetupFn = () => Promise<{
151
+ adapters: [Network, Network, Network]
152
+ teardown?: () => void
153
+ }>
154
+
155
+ type TestDoc = {
156
+ foo: string
157
+ }
158
+
159
+ const toArray = <T>(x: T | T[]) => (Array.isArray(x) ? x : [x])
160
+
161
+ const alice = "alice" as PeerId
162
+ const bob = "bob" as PeerId
163
+ const charlie = "charlie" as PeerId
package/src/types.ts ADDED
@@ -0,0 +1,3 @@
1
+ export type DocumentId = string & { __documentId: true }
2
+ export type PeerId = string & { __peerId: false }
3
+ export type ChannelId = string & { __channelId: false }
@@ -0,0 +1,73 @@
1
+ import { CollectionSynchronizer } from "../src/synchronizer/CollectionSynchronizer.js"
2
+ import { ChannelId, DocCollection, DocumentId, PeerId } from "../src"
3
+ import assert from "assert"
4
+ import { beforeEach } from "mocha"
5
+ import { MessagePayload } from "../src/network/NetworkAdapter.js"
6
+
7
+ describe("CollectionSynchronizer", () => {
8
+ let collection: DocCollection
9
+ let synchronizer: CollectionSynchronizer
10
+
11
+ beforeEach(() => {
12
+ collection = new DocCollection()
13
+ synchronizer = new CollectionSynchronizer(collection)
14
+ })
15
+
16
+ it("is not null", async () => {
17
+ assert(synchronizer !== null)
18
+ })
19
+
20
+ it("starts synchronizing a document to peers when added", done => {
21
+ const handle = collection.create()
22
+ synchronizer.addPeer("peer1" as PeerId)
23
+
24
+ synchronizer.once("message", (event: MessagePayload) => {
25
+ assert(event.targetId === "peer1")
26
+ assert(event.channelId === (handle.documentId as unknown as ChannelId))
27
+ done()
28
+ })
29
+
30
+ synchronizer.addDocument(handle.documentId)
31
+ })
32
+
33
+ it("starts synchronizing existing documents when a peer is added", done => {
34
+ const handle = collection.create()
35
+ synchronizer.addDocument(handle.documentId)
36
+ synchronizer.once("message", (event: MessagePayload) => {
37
+ assert(event.targetId === "peer1")
38
+ assert(event.channelId === (handle.documentId as unknown as ChannelId))
39
+ done()
40
+ })
41
+ synchronizer.addPeer("peer1" as PeerId)
42
+ })
43
+
44
+ it("should not synchronize to a peer which is excluded from the share policy", done => {
45
+ const handle = collection.create()
46
+
47
+ collection.sharePolicy = async (peerId: PeerId) => peerId !== "peer1"
48
+
49
+ synchronizer.addDocument(handle.documentId)
50
+ synchronizer.once("message", () => {
51
+ done(new Error("Should not have sent a message"))
52
+ })
53
+ synchronizer.addPeer("peer1" as PeerId)
54
+
55
+ setTimeout(done)
56
+ })
57
+
58
+ it("should not synchronize a document which is excluded from the share policy", done => {
59
+ const handle = collection.create()
60
+ collection.sharePolicy = async (_, documentId) =>
61
+ documentId !== handle.documentId
62
+
63
+ synchronizer.addPeer("peer2" as PeerId)
64
+
65
+ synchronizer.once("message", () => {
66
+ done(new Error("Should not have sent a message"))
67
+ })
68
+
69
+ synchronizer.addDocument(handle.documentId)
70
+
71
+ setTimeout(done)
72
+ })
73
+ })
@@ -0,0 +1,19 @@
1
+ import assert from "assert"
2
+ import { DocCollection, DocumentId } from "../src"
3
+ import { TestDoc } from "./types.js"
4
+
5
+ const MISSING_DOCID = "non-existent-docID" as DocumentId
6
+
7
+ describe("DocCollection", () => {
8
+ it("can create documents which are ready to go", async () => {
9
+ const collection = new DocCollection()
10
+ const handle = collection.create<TestDoc>()
11
+ assert(handle.isReady() === true)
12
+ })
13
+
14
+ it("can start finding documents and they shouldn't be ready", () => {
15
+ const collection = new DocCollection()
16
+ const handle = collection.find<TestDoc>(MISSING_DOCID)
17
+ assert(handle.isReady() === false)
18
+ })
19
+ })
@@ -0,0 +1,281 @@
1
+ import * as A from "@automerge/automerge"
2
+ import assert from "assert"
3
+ import { it } from "mocha"
4
+ import { DocHandle, DocHandleChangePayload, DocumentId } from "../src"
5
+ import { pause } from "../src/helpers/pause"
6
+ import { TestDoc } from "./types.js"
7
+
8
+ describe("DocHandle", () => {
9
+ const TEST_ID = "test-document-id" as DocumentId
10
+
11
+ const binaryFromMockStorage = () => {
12
+ const doc = A.change<{ foo: string }>(A.init(), d => (d.foo = "bar"))
13
+ const binary = A.save(doc)
14
+ return binary
15
+ }
16
+
17
+ it("should take the UUID passed into it", () => {
18
+ const handle = new DocHandle(TEST_ID)
19
+ assert.equal(handle.documentId, TEST_ID)
20
+ })
21
+
22
+ it("should become ready when a document is loaded", async () => {
23
+ const handle = new DocHandle<TestDoc>(TEST_ID)
24
+ assert.equal(handle.isReady(), false)
25
+
26
+ // simulate loading from storage
27
+ handle.load(binaryFromMockStorage())
28
+
29
+ assert.equal(handle.isReady(), true)
30
+ const doc = await handle.value()
31
+ assert.equal(doc.foo, "bar")
32
+ })
33
+
34
+ it("should allow sync access to the doc", async () => {
35
+ const handle = new DocHandle<TestDoc>(TEST_ID)
36
+ assert.equal(handle.isReady(), false)
37
+
38
+ // simulate loading from storage
39
+ handle.load(binaryFromMockStorage())
40
+
41
+ assert.equal(handle.isReady(), true)
42
+ const doc = await handle.value()
43
+ assert.deepEqual(doc, handle.doc)
44
+ })
45
+
46
+ it("should throws an error if we accessing the doc before ready", async () => {
47
+ const handle = new DocHandle<TestDoc>(TEST_ID)
48
+
49
+ assert.throws(() => handle.doc)
50
+ })
51
+
52
+ it("should not return a value until ready", async () => {
53
+ const handle = new DocHandle<TestDoc>(TEST_ID)
54
+ assert.equal(handle.isReady(), false)
55
+
56
+ // simulate loading from storage
57
+ handle.load(binaryFromMockStorage())
58
+
59
+ const doc = await handle.value()
60
+
61
+ assert.equal(handle.isReady(), true)
62
+ assert.equal(doc.foo, "bar")
63
+ })
64
+
65
+ it("should block changes until ready()", async () => {
66
+ const handle = new DocHandle<TestDoc>(TEST_ID)
67
+
68
+ // can't make changes in LOADING state
69
+ assert.equal(handle.isReady(), false)
70
+ assert.throws(() => handle.change(d => (d.foo = "baz")))
71
+
72
+ // simulate loading from storage
73
+ handle.load(binaryFromMockStorage())
74
+
75
+ // now we're in READY state so we can make changes
76
+ assert.equal(handle.isReady(), true)
77
+ handle.change(d => (d.foo = "pizza"))
78
+
79
+ const doc = await handle.value()
80
+ assert.equal(doc.foo, "pizza")
81
+ })
82
+
83
+ it("should not be ready while requesting from the network", async () => {
84
+ const handle = new DocHandle<TestDoc>(TEST_ID)
85
+
86
+ // we don't have it in storage, so we request it from the network
87
+ handle.request()
88
+
89
+ assert.throws(() => handle.doc)
90
+ assert.equal(handle.isReady(), false)
91
+ assert.throws(() => handle.change(h => {}))
92
+ })
93
+
94
+ it("should become ready if the document is updated by the network", async () => {
95
+ const handle = new DocHandle<TestDoc>(TEST_ID)
96
+
97
+ // we don't have it in storage, so we request it from the network
98
+ handle.request()
99
+
100
+ // simulate updating from the network
101
+ handle.update(doc => {
102
+ return A.change(doc, d => (d.foo = "bar"))
103
+ })
104
+
105
+ const doc = await handle.value()
106
+ assert.equal(handle.isReady(), true)
107
+ assert.equal(doc.foo, "bar")
108
+ })
109
+
110
+ it("should emit a change message when changes happen", async () => {
111
+ const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
112
+
113
+ const p = new Promise<DocHandleChangePayload<TestDoc>>(resolve =>
114
+ handle.once("change", d => resolve(d))
115
+ )
116
+
117
+ handle.change(doc => {
118
+ doc.foo = "bar"
119
+ })
120
+
121
+ const doc = await handle.value()
122
+ assert.equal(doc.foo, "bar")
123
+
124
+ const changePayload = await p
125
+ assert.deepStrictEqual(changePayload.doc, doc)
126
+ assert.deepStrictEqual(changePayload.handle, handle)
127
+ })
128
+
129
+ it("should not emit a change message if no change happens via update", done => {
130
+ const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
131
+ handle.once("change", () => {
132
+ done(new Error("shouldn't have changed"))
133
+ })
134
+ handle.update(d => {
135
+ setTimeout(done, 0)
136
+ return d
137
+ })
138
+ })
139
+
140
+ it("should emit distinct change messages when consecutive changes happen", async () => {
141
+ const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
142
+
143
+ let calls = 0
144
+ const p = new Promise(resolve =>
145
+ handle.on("change", async ({ doc: d }) => {
146
+ if (calls === 0) {
147
+ assert.equal(d.foo, "bar")
148
+ calls++
149
+ return
150
+ }
151
+ assert.equal(d.foo, "baz")
152
+ resolve(d)
153
+ })
154
+ )
155
+
156
+ handle.change(doc => {
157
+ doc.foo = "bar"
158
+ })
159
+
160
+ handle.change(doc => {
161
+ doc.foo = "baz"
162
+ })
163
+
164
+ const doc = await handle.value()
165
+ assert.equal(doc.foo, "baz")
166
+
167
+ return p
168
+ })
169
+
170
+ it("should emit a patch message when changes happen", async () => {
171
+ const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
172
+ const p = new Promise(resolve => handle.once("patch", d => resolve(d)))
173
+
174
+ handle.change(doc => {
175
+ doc.foo = "bar"
176
+ })
177
+
178
+ await p
179
+ const doc = await handle.value()
180
+ assert.equal(doc.foo, "bar")
181
+ })
182
+
183
+ it("should not emit a patch message if no change happens", done => {
184
+ const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
185
+ handle.on("patch", () => {
186
+ done(new Error("shouldn't have patched"))
187
+ })
188
+ handle.change(_doc => {
189
+ // do nothing
190
+ setTimeout(done, 0)
191
+ })
192
+ })
193
+
194
+ it("should time out if the document is not loaded", async () => {
195
+ // set docHandle time out after 5 ms
196
+ const handle = new DocHandle<TestDoc>(TEST_ID, { timeoutDelay: 5 })
197
+
198
+ // we're not going to load
199
+ await pause(10)
200
+
201
+ // so it should time out
202
+ return assert.rejects(handle.value, "DocHandle timed out")
203
+ })
204
+
205
+ it("should not time out if the document is loaded in time", async () => {
206
+ // set docHandle time out after 5 ms
207
+ const handle = new DocHandle<TestDoc>(TEST_ID, { timeoutDelay: 5 })
208
+
209
+ // simulate loading from storage before the timeout expires
210
+ handle.load(binaryFromMockStorage())
211
+
212
+ // now it should not time out
213
+ const doc = await handle.value()
214
+ assert.equal(doc.foo, "bar")
215
+ })
216
+
217
+ it("should time out if the document is not updated from the network", async () => {
218
+ // set docHandle time out after 5 ms
219
+ const handle = new DocHandle<TestDoc>(TEST_ID, { timeoutDelay: 5 })
220
+
221
+ // simulate requesting from the network
222
+ handle.request()
223
+
224
+ // there's no update
225
+ await pause(10)
226
+
227
+ // so it should time out
228
+ return assert.rejects(handle.value, "DocHandle timed out")
229
+ })
230
+
231
+ it("should not time out if the document is updated in time", async () => {
232
+ // set docHandle time out after 5 ms
233
+ const handle = new DocHandle<TestDoc>(TEST_ID, { timeoutDelay: 5 })
234
+
235
+ // simulate requesting from the network
236
+ handle.request()
237
+
238
+ // simulate updating from the network before the timeout expires
239
+ handle.update(doc => {
240
+ return A.change(doc, d => (d.foo = "bar"))
241
+ })
242
+
243
+ // now it should not time out
244
+ const doc = await handle.value()
245
+ assert.equal(doc.foo, "bar")
246
+ })
247
+
248
+ it("should emit a delete event when deleted", async () => {
249
+ const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
250
+
251
+ const p = new Promise<void>(resolve =>
252
+ handle.once("delete", () => resolve())
253
+ )
254
+ handle.delete()
255
+ await p
256
+
257
+ assert.equal(handle.isDeleted(), true)
258
+ })
259
+
260
+ it("should allow changing at old heads", async () => {
261
+ const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
262
+
263
+ handle.change(doc => {
264
+ doc.foo = "bar"
265
+ })
266
+
267
+ const headsBefore = A.getHeads(handle.doc)
268
+
269
+ handle.change(doc => {
270
+ doc.foo = "rab"
271
+ })
272
+
273
+ let wasBar = false
274
+ handle.changeAt(headsBefore, doc => {
275
+ wasBar = doc.foo === "bar"
276
+ doc.foo = "baz"
277
+ })
278
+
279
+ assert(wasBar, "foo should have been bar as we changed at the old heads")
280
+ })
281
+ })
@@ -0,0 +1,68 @@
1
+ import assert from "assert"
2
+ import { DocumentId, PeerId } from "../src/types.js"
3
+ import { DocHandle } from "../src/DocHandle.js"
4
+ import { DocSynchronizer } from "../src/synchronizer/DocSynchronizer.js"
5
+ import { eventPromise } from "../src/helpers/eventPromise.js"
6
+ import { TestDoc } from "./types.js"
7
+
8
+ const alice = "alice" as PeerId
9
+ const bob = "bob" as PeerId
10
+
11
+ describe("DocSynchronizer", () => {
12
+ let handle: DocHandle<TestDoc>
13
+ let docSynchronizer: DocSynchronizer
14
+
15
+ const setup = () => {
16
+ const docId = "synced-doc" as DocumentId
17
+ handle = new DocHandle<TestDoc>(docId, { isNew: true })
18
+ docSynchronizer = new DocSynchronizer(handle)
19
+ return { handle, docSynchronizer }
20
+ }
21
+
22
+ it("takes the handle passed into it", () => {
23
+ const { handle, docSynchronizer } = setup()
24
+ assert(docSynchronizer.documentId === handle.documentId)
25
+ })
26
+
27
+ it("emits a syncMessage when beginSync is called", async () => {
28
+ const { docSynchronizer } = setup()
29
+ docSynchronizer.beginSync(alice)
30
+ const { targetId } = await eventPromise(docSynchronizer, "message")
31
+ assert.equal(targetId, "alice")
32
+ })
33
+
34
+ it("emits a syncMessage to peers when the handle is updated", async () => {
35
+ const { handle, docSynchronizer } = setup()
36
+ docSynchronizer.beginSync(alice)
37
+ handle.change(doc => {
38
+ doc.foo = "bar"
39
+ })
40
+ const { targetId } = await eventPromise(docSynchronizer, "message")
41
+ assert.equal(targetId, "alice")
42
+ })
43
+
44
+ it("still syncs with a peer after it disconnects and reconnects", async () => {
45
+ const { handle, docSynchronizer } = setup()
46
+
47
+ // first connection
48
+ {
49
+ await docSynchronizer.beginSync(bob)
50
+ handle.change(doc => {
51
+ doc.foo = "a change"
52
+ })
53
+ const { targetId } = await eventPromise(docSynchronizer, "message")
54
+ assert.equal(targetId, "bob")
55
+ docSynchronizer.endSync(bob)
56
+ }
57
+
58
+ // second connection
59
+ {
60
+ await docSynchronizer.beginSync(bob)
61
+ handle.change(doc => {
62
+ doc.foo = "another change"
63
+ })
64
+ const { targetId } = await eventPromise(docSynchronizer, "message")
65
+ assert.equal(targetId, "bob")
66
+ }
67
+ })
68
+ })
@@ -0,0 +1,44 @@
1
+ import assert from "assert"
2
+ import * as CBOR from "cbor-x"
3
+ import { EphemeralData } from "../src/EphemeralData.js"
4
+ import { ChannelId, PeerId } from "../src/types.js"
5
+
6
+ describe("EphemeralData", () => {
7
+ const ephemeral = new EphemeralData()
8
+ const otherPeerId = "other_peer" as PeerId
9
+ const destinationChannelId = "channel_id" as ChannelId
10
+ const messageData = { foo: "bar" }
11
+
12
+ it("should emit a network message on broadcast()", done => {
13
+ ephemeral.on("message", event => {
14
+ try {
15
+ const { targetId, channelId, message, broadcast } = event
16
+ assert.deepStrictEqual(CBOR.decode(message), messageData)
17
+ assert.strictEqual(broadcast, true)
18
+ assert.strictEqual(channelId, channelId)
19
+ done()
20
+ } catch (e) {
21
+ done(e)
22
+ }
23
+ })
24
+ ephemeral.broadcast(destinationChannelId, messageData)
25
+ })
26
+
27
+ it("should emit a data event on receive()", done => {
28
+ ephemeral.on("data", ({ peerId, channelId, data }) => {
29
+ try {
30
+ assert.deepStrictEqual(peerId, otherPeerId)
31
+ assert.deepStrictEqual(channelId, destinationChannelId)
32
+ assert.deepStrictEqual(data, messageData)
33
+ done()
34
+ } catch (e) {
35
+ done(e)
36
+ }
37
+ })
38
+ ephemeral.receive(
39
+ otherPeerId,
40
+ ("m/" + destinationChannelId) as ChannelId, // TODO: this is nonsense
41
+ CBOR.encode(messageData)
42
+ )
43
+ })
44
+ })
@@ -0,0 +1,13 @@
1
+ import assert from "assert"
2
+ import { NetworkSubsystem } from "../src/network/NetworkSubsystem.js"
3
+
4
+ // Note: The sync tests in `Repo.test.ts` exercise the network subsystem, and the suite in
5
+ // `automerge-repo` provides test utilities that individual adapters can
6
+ // use to ensure that they work correctly.
7
+
8
+ describe("Network subsystem", () => {
9
+ it("Can be instantiated", () => {
10
+ const network = new NetworkSubsystem([])
11
+ assert(network !== null)
12
+ })
13
+ })