@automerge/automerge-repo 1.2.1 → 2.0.0-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/dist/AutomergeUrl.d.ts +3 -3
  2. package/dist/AutomergeUrl.d.ts.map +1 -1
  3. package/dist/AutomergeUrl.js +5 -1
  4. package/dist/DocHandle.d.ts +11 -10
  5. package/dist/DocHandle.d.ts.map +1 -1
  6. package/dist/DocHandle.js +23 -43
  7. package/dist/Repo.d.ts +1 -1
  8. package/dist/Repo.d.ts.map +1 -1
  9. package/dist/Repo.js +53 -36
  10. package/dist/entrypoints/slim.d.ts +1 -0
  11. package/dist/entrypoints/slim.d.ts.map +1 -1
  12. package/dist/entrypoints/slim.js +1 -0
  13. package/dist/helpers/DummyNetworkAdapter.d.ts +3 -0
  14. package/dist/helpers/DummyNetworkAdapter.d.ts.map +1 -1
  15. package/dist/helpers/DummyNetworkAdapter.js +24 -5
  16. package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
  17. package/dist/helpers/tests/network-adapter-tests.js +88 -1
  18. package/dist/helpers/throttle.d.ts +1 -1
  19. package/dist/helpers/throttle.js +1 -1
  20. package/dist/network/NetworkAdapter.d.ts +2 -0
  21. package/dist/network/NetworkAdapter.d.ts.map +1 -1
  22. package/dist/network/NetworkAdapterInterface.d.ts +2 -2
  23. package/dist/network/NetworkAdapterInterface.d.ts.map +1 -1
  24. package/dist/network/NetworkSubsystem.d.ts +5 -2
  25. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  26. package/dist/network/NetworkSubsystem.js +21 -25
  27. package/package.json +3 -3
  28. package/src/AutomergeUrl.ts +6 -6
  29. package/src/DocHandle.ts +27 -57
  30. package/src/Repo.ts +55 -40
  31. package/src/entrypoints/slim.ts +1 -0
  32. package/src/helpers/DummyNetworkAdapter.ts +29 -5
  33. package/src/helpers/tests/network-adapter-tests.ts +121 -1
  34. package/src/helpers/throttle.ts +1 -1
  35. package/src/network/NetworkAdapter.ts +3 -0
  36. package/src/network/NetworkAdapterInterface.ts +4 -3
  37. package/src/network/NetworkSubsystem.ts +24 -31
  38. package/test/AutomergeUrl.test.ts +4 -0
  39. package/test/DocHandle.test.ts +20 -24
  40. package/test/DocSynchronizer.test.ts +5 -1
  41. package/test/NetworkSubsystem.test.ts +107 -0
  42. package/test/Repo.test.ts +37 -15
  43. package/test/remoteHeads.test.ts +3 -3
  44. package/test/Network.test.ts +0 -14
package/src/Repo.ts CHANGED
@@ -23,6 +23,10 @@ import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js
23
23
  import { SyncStatePayload } from "./synchronizer/Synchronizer.js"
24
24
  import type { AnyDocumentId, DocumentId, PeerId } from "./types.js"
25
25
 
26
+ function randomPeerId() {
27
+ return ("peer-" + Math.random().toString(36).slice(4)) as PeerId
28
+ }
29
+
26
30
  /** A Repo is a collection of documents with networking, syncing, and storage capabilities. */
27
31
  /** The `Repo` is the main entry point of this library
28
32
  *
@@ -61,7 +65,7 @@ export class Repo extends EventEmitter<RepoEvents> {
61
65
  constructor({
62
66
  storage,
63
67
  network = [],
64
- peerId,
68
+ peerId = randomPeerId(),
65
69
  sharePolicy,
66
70
  isEphemeral = storage === undefined,
67
71
  enableRemoteHeadsGossiping = false,
@@ -75,7 +79,7 @@ export class Repo extends EventEmitter<RepoEvents> {
75
79
 
76
80
  // The `document` event is fired by the DocCollection any time we create a new document or look
77
81
  // up a document by ID. We listen for it in order to wire up storage and network synchronization.
78
- this.on("document", async ({ handle, isNew }) => {
82
+ this.on("document", async ({ handle }) => {
79
83
  if (storageSubsystem) {
80
84
  // Save when the document changes, but no more often than saveDebounceRate.
81
85
  const saveFn = ({
@@ -85,17 +89,6 @@ export class Repo extends EventEmitter<RepoEvents> {
85
89
  void storageSubsystem.saveDoc(handle.documentId, doc)
86
90
  }
87
91
  handle.on("heads-changed", throttle(saveFn, this.saveDebounceRate))
88
-
89
- if (isNew) {
90
- // this is a new document, immediately save it
91
- await storageSubsystem.saveDoc(handle.documentId, handle.docSync()!)
92
- } else {
93
- // Try to load from disk
94
- const loadedDoc = await storageSubsystem.loadDoc(handle.documentId)
95
- if (loadedDoc) {
96
- handle.update(() => loadedDoc)
97
- }
98
- }
99
92
  }
100
93
 
101
94
  handle.on("unavailable", () => {
@@ -105,20 +98,6 @@ export class Repo extends EventEmitter<RepoEvents> {
105
98
  })
106
99
  })
107
100
 
108
- if (this.networkSubsystem.isReady()) {
109
- handle.request()
110
- } else {
111
- handle.awaitNetwork()
112
- this.networkSubsystem
113
- .whenReady()
114
- .then(() => {
115
- handle.networkReady()
116
- })
117
- .catch(err => {
118
- this.#log("error waiting for network", { err })
119
- })
120
- }
121
-
122
101
  // Register the document with the synchronizer. This advertises our interest in the document.
123
102
  this.#synchronizer.addDocument(handle.documentId)
124
103
  })
@@ -324,20 +303,16 @@ export class Repo extends EventEmitter<RepoEvents> {
324
303
  /** Returns an existing handle if we have it; creates one otherwise. */
325
304
  #getHandle<T>({
326
305
  documentId,
327
- isNew,
328
- initialValue,
329
306
  }: {
330
307
  /** The documentId of the handle to look up or create */
331
308
  documentId: DocumentId /** If we know we're creating a new document, specify this so we can have access to it immediately */
332
- isNew: boolean
333
- initialValue?: T
334
309
  }) {
335
310
  // If we have the handle cached, return it
336
311
  if (this.#handleCache[documentId]) return this.#handleCache[documentId]
337
312
 
338
313
  // If not, create a new handle, cache it, and return it
339
314
  if (!documentId) throw new Error(`Invalid documentId ${documentId}`)
340
- const handle = new DocHandle<T>(documentId, { isNew, initialValue })
315
+ const handle = new DocHandle<T>(documentId)
341
316
  this.#handleCache[documentId] = handle
342
317
  return handle
343
318
  }
@@ -366,10 +341,21 @@ export class Repo extends EventEmitter<RepoEvents> {
366
341
  const { documentId } = parseAutomergeUrl(generateAutomergeUrl())
367
342
  const handle = this.#getHandle<T>({
368
343
  documentId,
369
- isNew: true,
370
- initialValue,
371
344
  }) as DocHandle<T>
372
- this.emit("document", { handle, isNew: true })
345
+
346
+ this.emit("document", { handle })
347
+
348
+ handle.update(() => {
349
+ let nextDoc: Automerge.Doc<T>
350
+ if (initialValue) {
351
+ nextDoc = Automerge.from(initialValue)
352
+ } else {
353
+ nextDoc = Automerge.emptyChange(Automerge.init())
354
+ }
355
+ return nextDoc
356
+ })
357
+
358
+ handle.doneLoading()
373
359
  return handle
374
360
  }
375
361
 
@@ -434,11 +420,34 @@ export class Repo extends EventEmitter<RepoEvents> {
434
420
  return this.#handleCache[documentId]
435
421
  }
436
422
 
423
+ // If we don't already have the handle, make an empty one and try loading it
437
424
  const handle = this.#getHandle<T>({
438
425
  documentId,
439
- isNew: false,
440
426
  }) as DocHandle<T>
441
- this.emit("document", { handle, isNew: false })
427
+
428
+ // Try to load from disk before telling anyone else about it
429
+ if (this.storageSubsystem) {
430
+ void this.storageSubsystem.loadDoc(handle.documentId).then(loadedDoc => {
431
+ if (loadedDoc) {
432
+ // uhhhh, sorry if you're reading this because we were lying to the type system
433
+ handle.update(() => loadedDoc as Automerge.Doc<T>)
434
+ handle.doneLoading()
435
+ } else {
436
+ this.networkSubsystem
437
+ .whenReady()
438
+ .then(() => {
439
+ handle.request()
440
+ })
441
+ .catch(err => {
442
+ this.#log("error waiting for network", { err })
443
+ })
444
+ this.emit("document", { handle })
445
+ }
446
+ })
447
+ } else {
448
+ handle.request()
449
+ this.emit("document", { handle })
450
+ }
442
451
  return handle
443
452
  }
444
453
 
@@ -448,7 +457,7 @@ export class Repo extends EventEmitter<RepoEvents> {
448
457
  ) {
449
458
  const documentId = interpretAsDocumentId(id)
450
459
 
451
- const handle = this.#getHandle({ documentId, isNew: false })
460
+ const handle = this.#getHandle({ documentId })
452
461
  handle.delete()
453
462
 
454
463
  delete this.#handleCache[documentId]
@@ -465,7 +474,7 @@ export class Repo extends EventEmitter<RepoEvents> {
465
474
  async export(id: AnyDocumentId): Promise<Uint8Array | undefined> {
466
475
  const documentId = interpretAsDocumentId(id)
467
476
 
468
- const handle = this.#getHandle({ documentId, isNew: false })
477
+ const handle = this.#getHandle({ documentId })
469
478
  const doc = await handle.doc()
470
479
  if (!doc) return undefined
471
480
  return Automerge.save(doc)
@@ -529,6 +538,13 @@ export class Repo extends EventEmitter<RepoEvents> {
529
538
  })
530
539
  )
531
540
  }
541
+
542
+ shutdown(): Promise<void> {
543
+ this.networkSubsystem.adapters.forEach(adapter => {
544
+ adapter.disconnect()
545
+ })
546
+ return this.flush()
547
+ }
532
548
  }
533
549
 
534
550
  export interface RepoConfig {
@@ -582,7 +598,6 @@ export interface RepoEvents {
582
598
 
583
599
  export interface DocumentPayload {
584
600
  handle: DocHandle<any>
585
- isNew: boolean
586
601
  }
587
602
 
588
603
  export interface DeleteDocumentPayload {
@@ -1 +1,2 @@
1
1
  export * from "../index.js"
2
+ export { initializeBase64Wasm, initializeWasm } from "@automerge/automerge/slim"
@@ -2,20 +2,44 @@ import { pause } from "../../src/helpers/pause.js"
2
2
  import { Message, NetworkAdapter, PeerId } from "../../src/index.js"
3
3
 
4
4
  export class DummyNetworkAdapter extends NetworkAdapter {
5
- #startReady: boolean
6
5
  #sendMessage?: SendMessageFn
7
6
 
7
+ #ready = false
8
+ #readyResolver?: () => void
9
+ #readyPromise: Promise<void> = new Promise<void>(resolve => {
10
+ this.#readyResolver = resolve
11
+ })
12
+
13
+ isReady() {
14
+ return this.#ready
15
+ }
16
+
17
+ whenReady() {
18
+ return this.#readyPromise
19
+ }
20
+
21
+ #forceReady() {
22
+ if (!this.#ready) {
23
+ this.#ready = true
24
+ this.#readyResolver?.()
25
+ }
26
+ }
27
+
28
+ // A public wrapper for use in tests!
29
+ forceReady() {
30
+ this.#forceReady()
31
+ }
32
+
8
33
  constructor(opts: Options = { startReady: true }) {
9
34
  super()
10
- this.#startReady = opts.startReady || false
35
+ if (opts.startReady) {
36
+ this.#forceReady()
37
+ }
11
38
  this.#sendMessage = opts.sendMessage
12
39
  }
13
40
 
14
41
  connect(peerId: PeerId) {
15
42
  this.peerId = peerId
16
- if (this.#startReady) {
17
- this.emit("ready", { network: this })
18
- }
19
43
  }
20
44
 
21
45
  disconnect() {}
@@ -1,6 +1,13 @@
1
1
  import assert from "assert"
2
2
  import { describe, expect, it } from "vitest"
3
- import { PeerId, PeerMetadata, Repo, StorageId } from "../../index.js"
3
+ import {
4
+ generateAutomergeUrl,
5
+ parseAutomergeUrl,
6
+ PeerId,
7
+ PeerMetadata,
8
+ Repo,
9
+ StorageId,
10
+ } from "../../index.js"
4
11
  import type { NetworkAdapterInterface } from "../../network/NetworkAdapterInterface.js"
5
12
  import { eventPromise, eventPromises } from "../eventPromise.js"
6
13
  import { pause } from "../pause.js"
@@ -169,6 +176,119 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
169
176
 
170
177
  teardown()
171
178
  })
179
+
180
+ it("should emit disconnect events on disconnect", async () => {
181
+ const { adapters, teardown } = await setup()
182
+ const left = adapters[0][0]
183
+ const right = adapters[1][0]
184
+
185
+ const leftPeerId = "left" as PeerId
186
+ const rightPeerId = "right" as PeerId
187
+
188
+ const leftRepo = new Repo({
189
+ network: [left],
190
+ peerId: leftPeerId,
191
+ })
192
+
193
+ const rightRepo = new Repo({
194
+ network: [right],
195
+ peerId: rightPeerId,
196
+ })
197
+
198
+ await Promise.all([
199
+ eventPromise(leftRepo.networkSubsystem, "peer"),
200
+ eventPromise(rightRepo.networkSubsystem, "peer"),
201
+ ])
202
+
203
+ const disconnectionPromises = Promise.all([
204
+ eventPromise(leftRepo.networkSubsystem, "peer-disconnected"),
205
+ eventPromise(rightRepo.networkSubsystem, "peer-disconnected"),
206
+ ])
207
+ left.disconnect()
208
+
209
+ await disconnectionPromises
210
+ teardown()
211
+ })
212
+
213
+ it("should not send messages after disconnect", async () => {
214
+ const { adapters, teardown } = await setup()
215
+ const left = adapters[0][0]
216
+ const right = adapters[1][0]
217
+
218
+ const leftPeerId = "left" as PeerId
219
+ const rightPeerId = "right" as PeerId
220
+
221
+ const leftRepo = new Repo({
222
+ network: [left],
223
+ peerId: leftPeerId,
224
+ })
225
+
226
+ const rightRepo = new Repo({
227
+ network: [right],
228
+ peerId: rightPeerId,
229
+ })
230
+
231
+ await Promise.all([
232
+ eventPromise(rightRepo.networkSubsystem, "peer"),
233
+ eventPromise(leftRepo.networkSubsystem, "peer"),
234
+ ])
235
+
236
+ const disconnected = eventPromise(right, "peer-disconnected")
237
+
238
+ left.disconnect()
239
+ await disconnected
240
+
241
+ const rightReceivedFromLeft = new Promise(resolve => {
242
+ right.on("message", msg => {
243
+ if (msg.senderId === leftPeerId) {
244
+ resolve(null)
245
+ }
246
+ })
247
+ })
248
+
249
+ const rightReceived = Promise.race([rightReceivedFromLeft, pause(10)])
250
+
251
+ const documentId = parseAutomergeUrl(generateAutomergeUrl()).documentId
252
+ left.send({
253
+ type: "foo",
254
+ data: new Uint8Array([1, 2, 3]),
255
+ documentId,
256
+ senderId: leftPeerId,
257
+ targetId: rightPeerId,
258
+ })
259
+
260
+ assert.equal(await rightReceived, null)
261
+ teardown()
262
+ })
263
+
264
+ it("should support reconnecting after disconnect", async () => {
265
+ const { adapters, teardown } = await setup()
266
+ const left = adapters[0][0]
267
+ const right = adapters[1][0]
268
+
269
+ const leftPeerId = "left" as PeerId
270
+ const rightPeerId = "right" as PeerId
271
+
272
+ const _leftRepo = new Repo({
273
+ network: [left],
274
+ peerId: leftPeerId,
275
+ })
276
+
277
+ const rightRepo = new Repo({
278
+ network: [right],
279
+ peerId: rightPeerId,
280
+ })
281
+
282
+ await eventPromise(rightRepo.networkSubsystem, "peer")
283
+
284
+ left.disconnect()
285
+
286
+ await pause(10)
287
+
288
+ left.connect(leftPeerId)
289
+ await eventPromise(left, "peer-candidate")
290
+ teardown()
291
+ })
172
292
  })
173
293
  }
174
294
 
@@ -20,7 +20,7 @@
20
20
  *
21
21
  *
22
22
  * Example usage:
23
- * const callback = debounce((ev) => { doSomethingExpensiveOrOccasional() }, 100)
23
+ * const callback = throttle((ev) => { doSomethingExpensiveOrOccasional() }, 100)
24
24
  * target.addEventListener('frequent-event', callback);
25
25
  *
26
26
  */
@@ -23,6 +23,9 @@ export abstract class NetworkAdapter
23
23
  peerId?: PeerId
24
24
  peerMetadata?: PeerMetadata
25
25
 
26
+ abstract isReady(): boolean
27
+ abstract whenReady(): Promise<void>
28
+
26
29
  /** Called by the {@link Repo} to start the connection process
27
30
  *
28
31
  * @argument peerId - the peerId of this repo
@@ -32,12 +32,16 @@ export interface NetworkAdapterInterface
32
32
  peerId?: PeerId
33
33
  peerMetadata?: PeerMetadata
34
34
 
35
+ isReady(): boolean
36
+ whenReady(): Promise<void>
37
+
35
38
  /** Called by the {@link Repo} to start the connection process
36
39
  *
37
40
  * @argument peerId - the peerId of this repo
38
41
  * @argument peerMetadata - how this adapter should present itself to other peers
39
42
  */
40
43
  connect(peerId: PeerId, peerMetadata?: PeerMetadata): void
44
+ // TODO: should this just return a ready promise?
41
45
 
42
46
  /** Called by the {@link Repo} to send a message to a peer
43
47
  *
@@ -52,9 +56,6 @@ export interface NetworkAdapterInterface
52
56
  // events & payloads
53
57
 
54
58
  export interface NetworkAdapterEvents {
55
- /** Emitted when the network is ready to be used */
56
- ready: (payload: OpenPayload) => void
57
-
58
59
  /** Emitted when the network is closed */
59
60
  close: () => void
60
61
 
@@ -26,12 +26,11 @@ export class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
26
26
  #count = 0
27
27
  #sessionId: SessionId = Math.random().toString(36).slice(2) as SessionId
28
28
  #ephemeralSessionCounts: Record<EphemeralMessageSource, number> = {}
29
- #readyAdapterCount = 0
30
- #adapters: NetworkAdapterInterface[] = []
29
+ adapters: NetworkAdapterInterface[] = []
31
30
 
32
31
  constructor(
33
32
  adapters: NetworkAdapterInterface[],
34
- public peerId = randomPeerId(),
33
+ public peerId: PeerId,
35
34
  private peerMetadata: Promise<PeerMetadata>
36
35
  ) {
37
36
  super()
@@ -39,25 +38,25 @@ export class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
39
38
  adapters.forEach(a => this.addNetworkAdapter(a))
40
39
  }
41
40
 
41
+ disconnect() {
42
+ this.adapters.forEach(a => a.disconnect())
43
+ }
44
+
45
+ reconnect() {
46
+ this.adapters.forEach(a => a.connect(this.peerId))
47
+ }
48
+
42
49
  addNetworkAdapter(networkAdapter: NetworkAdapterInterface) {
43
- this.#adapters.push(networkAdapter)
44
- networkAdapter.once("ready", () => {
45
- this.#readyAdapterCount++
46
- this.#log(
47
- "Adapters ready: ",
48
- this.#readyAdapterCount,
49
- "/",
50
- this.#adapters.length
51
- )
52
- if (this.#readyAdapterCount === this.#adapters.length) {
53
- this.emit("ready")
54
- }
55
- })
50
+ this.adapters.push(networkAdapter)
56
51
 
57
52
  networkAdapter.on("peer-candidate", ({ peerId, peerMetadata }) => {
58
53
  this.#log(`peer candidate: ${peerId} `)
59
54
  // TODO: This is where authentication would happen
60
55
 
56
+ // TODO: on reconnection, this would create problems!
57
+ // the server would see a reconnection as a late-arriving channel
58
+ // for an existing peer and decide to ignore it until the connection
59
+ // times out: turns out my ICE/SIP emulation laziness did not pay off here
61
60
  if (!this.#adaptersByPeer[peerId]) {
62
61
  // TODO: handle losing a server here
63
62
  this.#adaptersByPeer[peerId] = networkAdapter
@@ -114,6 +113,13 @@ export class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
114
113
  })
115
114
  }
116
115
 
116
+ // TODO: this probably introduces a race condition for the ready event
117
+ // but I plan to refactor that as part of this branch in another patch
118
+ removeNetworkAdapter(networkAdapter: NetworkAdapterInterface) {
119
+ this.adapters = this.adapters.filter(a => a !== networkAdapter)
120
+ networkAdapter.disconnect()
121
+ }
122
+
117
123
  send(message: MessageContents) {
118
124
  const peer = this.#adaptersByPeer[message.targetId]
119
125
  if (!peer) {
@@ -153,33 +159,20 @@ export class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
153
159
  }
154
160
 
155
161
  isReady = () => {
156
- return this.#readyAdapterCount === this.#adapters.length
162
+ return this.adapters.every(a => a.isReady())
157
163
  }
158
164
 
159
165
  whenReady = async () => {
160
- if (this.isReady()) {
161
- return
162
- } else {
163
- return new Promise<void>(resolve => {
164
- this.once("ready", () => {
165
- resolve()
166
- })
167
- })
168
- }
166
+ return Promise.all(this.adapters.map(a => a.whenReady()))
169
167
  }
170
168
  }
171
169
 
172
- function randomPeerId() {
173
- return `user-${Math.round(Math.random() * 100000)}` as PeerId
174
- }
175
-
176
170
  // events & payloads
177
171
 
178
172
  export interface NetworkSubsystemEvents {
179
173
  peer: (payload: PeerPayload) => void
180
174
  "peer-disconnected": (payload: PeerDisconnectedPayload) => void
181
175
  message: (payload: RepoMessage) => void
182
- ready: () => void
183
176
  }
184
177
 
185
178
  export interface PeerPayload {
@@ -96,5 +96,9 @@ describe("AutomergeUrl", () => {
96
96
  const url = stringifyAutomergeUrl({ documentId: badUuidDocumentId })
97
97
  assert(isValidAutomergeUrl(url) === false)
98
98
  })
99
+
100
+ it("should return false for a documentId that is just some random type", () => {
101
+ assert(isValidAutomergeUrl({ foo: "bar" } as unknown) === false)
102
+ })
99
103
  })
100
104
  })
@@ -10,6 +10,12 @@ import { TestDoc } from "./types.js"
10
10
 
11
11
  describe("DocHandle", () => {
12
12
  const TEST_ID = parseAutomergeUrl(generateAutomergeUrl()).documentId
13
+ const setup = (options?) => {
14
+ const handle = new DocHandle<TestDoc>(TEST_ID, options)
15
+ handle.update(() => A.init())
16
+ handle.doneLoading()
17
+ return handle
18
+ }
13
19
 
14
20
  const docFromMockStorage = (doc: A.Doc<{ foo: string }>) => {
15
21
  return A.change<{ foo: string }>(doc, d => (d.foo = "bar"))
@@ -20,15 +26,6 @@ describe("DocHandle", () => {
20
26
  assert.equal(handle.documentId, TEST_ID)
21
27
  })
22
28
 
23
- it("should take an initial value", async () => {
24
- const handle = new DocHandle(TEST_ID, {
25
- isNew: true,
26
- initialValue: { foo: "bar" },
27
- })
28
- const doc = await handle.doc()
29
- assert.equal(doc.foo, "bar")
30
- })
31
-
32
29
  it("should become ready when a document is loaded", async () => {
33
30
  const handle = new DocHandle<TestDoc>(TEST_ID)
34
31
  assert.equal(handle.isReady(), false)
@@ -55,7 +52,6 @@ describe("DocHandle", () => {
55
52
 
56
53
  it("should return undefined if we access the doc before ready", async () => {
57
54
  const handle = new DocHandle<TestDoc>(TEST_ID)
58
-
59
55
  assert.equal(handle.docSync(), undefined)
60
56
  })
61
57
 
@@ -73,10 +69,8 @@ describe("DocHandle", () => {
73
69
  })
74
70
 
75
71
  it("should return the heads when requested", async () => {
76
- const handle = new DocHandle<TestDoc>(TEST_ID, {
77
- isNew: true,
78
- initialValue: { foo: "bar" },
79
- })
72
+ const handle = setup()
73
+ handle.change(d => (d.foo = "bar"))
80
74
  assert.equal(handle.isReady(), true)
81
75
 
82
76
  const heads = A.getHeads(handle.docSync())
@@ -94,6 +88,7 @@ describe("DocHandle", () => {
94
88
  * Once there's a Repo#stop API this case should be covered in accompanying
95
89
  * tests and the following test removed.
96
90
  */
91
+ // TODO as part of future cleanup: move this to Repo
97
92
  it("no pending timers after a document is loaded", async () => {
98
93
  vi.useFakeTimers()
99
94
  const timerCount = vi.getTimerCount()
@@ -159,7 +154,7 @@ describe("DocHandle", () => {
159
154
  })
160
155
 
161
156
  it("should emit a change message when changes happen", async () => {
162
- const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
157
+ const handle = setup()
163
158
 
164
159
  const p = new Promise<DocHandleChangePayload<TestDoc>>(resolve =>
165
160
  handle.once("change", d => resolve(d))
@@ -179,7 +174,7 @@ describe("DocHandle", () => {
179
174
 
180
175
  it("should not emit a change message if no change happens via update", () =>
181
176
  new Promise<void>((done, reject) => {
182
- const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
177
+ const handle = setup()
183
178
  handle.once("change", () => {
184
179
  reject(new Error("shouldn't have changed"))
185
180
  })
@@ -190,7 +185,7 @@ describe("DocHandle", () => {
190
185
  }))
191
186
 
192
187
  it("should update the internal doc prior to emitting the change message", async () => {
193
- const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
188
+ const handle = setup()
194
189
 
195
190
  const p = new Promise<void>(resolve =>
196
191
  handle.once("change", ({ handle, doc }) => {
@@ -208,7 +203,7 @@ describe("DocHandle", () => {
208
203
  })
209
204
 
210
205
  it("should emit distinct change messages when consecutive changes happen", async () => {
211
- const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
206
+ const handle = setup()
212
207
 
213
208
  let calls = 0
214
209
  const p = new Promise(resolve =>
@@ -238,7 +233,7 @@ describe("DocHandle", () => {
238
233
  })
239
234
 
240
235
  it("should emit a change message when changes happen", async () => {
241
- const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
236
+ const handle = setup()
242
237
  const p = new Promise(resolve => handle.once("change", d => resolve(d)))
243
238
 
244
239
  handle.change(doc => {
@@ -252,7 +247,7 @@ describe("DocHandle", () => {
252
247
 
253
248
  it("should not emit a patch message if no change happens", () =>
254
249
  new Promise<void>((done, reject) => {
255
- const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
250
+ const handle = setup()
256
251
  handle.on("change", () => {
257
252
  reject(new Error("shouldn't have changed"))
258
253
  })
@@ -301,7 +296,7 @@ describe("DocHandle", () => {
301
296
 
302
297
  it("should not time out if the document is updated in time", async () => {
303
298
  // set docHandle time out after 5 ms
304
- const handle = new DocHandle<TestDoc>(TEST_ID, { timeoutDelay: 1 })
299
+ const handle = setup({ timeoutDelay: 1 })
305
300
 
306
301
  // simulate requesting from the network
307
302
  handle.request()
@@ -319,7 +314,7 @@ describe("DocHandle", () => {
319
314
  })
320
315
 
321
316
  it("should emit a delete event when deleted", async () => {
322
- const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
317
+ const handle = setup()
323
318
 
324
319
  const p = new Promise<void>(resolve =>
325
320
  handle.once("delete", () => resolve())
@@ -331,7 +326,7 @@ describe("DocHandle", () => {
331
326
  })
332
327
 
333
328
  it("should allow changing at old heads", async () => {
334
- const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
329
+ const handle = setup()
335
330
 
336
331
  handle.change(doc => {
337
332
  doc.foo = "bar"
@@ -355,7 +350,8 @@ describe("DocHandle", () => {
355
350
 
356
351
  describe("ephemeral messaging", () => {
357
352
  it("can broadcast a message for the network to send out", async () => {
358
- const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
353
+ const handle = setup()
354
+
359
355
  const message = { foo: "bar" }
360
356
 
361
357
  const promise = eventPromise(handle, "ephemeral-message-outbound")