@automerge/automerge-repo 1.1.5 → 1.1.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/Repo.ts CHANGED
@@ -60,12 +60,12 @@ export class Repo extends EventEmitter<RepoEvents> {
60
60
 
61
61
  constructor({
62
62
  storage,
63
- network,
63
+ network = [],
64
64
  peerId,
65
65
  sharePolicy,
66
66
  isEphemeral = storage === undefined,
67
67
  enableRemoteHeadsGossiping = false,
68
- }: RepoConfig) {
68
+ }: RepoConfig = {}) {
69
69
  super()
70
70
  this.#remoteHeadsGossipingEnabled = enableRemoteHeadsGossiping
71
71
  this.#log = debug(`automerge-repo:repo`)
@@ -507,30 +507,27 @@ export class Repo extends EventEmitter<RepoEvents> {
507
507
  }
508
508
 
509
509
  /**
510
- * Waits for Repo to finish write changes to disk.
511
- * @hidden this API is experimental and may change
512
- * @param documents - if provided, only waits for the specified documents
513
- * @param timeout - if provided, the maximum time to wait in milliseconds (rejects on timeout)
510
+ * Writes Documents to a disk.
511
+ * @hidden this API is experimental and may change.
512
+ * @param documents - if provided, only writes the specified documents.
514
513
  * @returns Promise<void>
515
514
  */
516
- async flush(documents?: DocumentId[], timeout?: number): Promise<void> {
515
+ async flush(documents?: DocumentId[]): Promise<void> {
517
516
  if (!this.storageSubsystem) {
518
- return Promise.resolve()
517
+ return
519
518
  }
520
519
  const handles = documents
521
520
  ? documents.map(id => this.#handleCache[id])
522
521
  : Object.values(this.#handleCache)
523
- return Promise.all(
522
+ await Promise.all(
524
523
  handles.map(async handle => {
525
524
  const doc = handle.docSync()
526
525
  if (!doc) {
527
526
  return
528
527
  }
529
- return this.storageSubsystem!.flush(handle.documentId, doc, timeout)
528
+ return this.storageSubsystem!.saveDoc(handle.documentId, doc)
530
529
  })
531
- ).then(() => {
532
- /* No-op. To return `voi`d and not `void[]` */
533
- })
530
+ )
534
531
  }
535
532
  }
536
533
 
@@ -545,8 +542,8 @@ export interface RepoConfig {
545
542
  /** A storage adapter can be provided, or not */
546
543
  storage?: StorageAdapterInterface
547
544
 
548
- /** One or more network adapters must be provided */
549
- network: NetworkAdapterInterface[]
545
+ /** A list of network adapters (more can be added at runtime). */
546
+ network?: NetworkAdapterInterface[]
550
547
 
551
548
  /**
552
549
  * Normal peers typically share generously with everyone (meaning we sync all our documents with
package/src/index.ts CHANGED
@@ -90,3 +90,46 @@ export type {
90
90
  } from "./storage/types.js"
91
91
 
92
92
  export * from "./types.js"
93
+
94
+ // export commonly used data types
95
+ export { Counter, RawString, Cursor } from "@automerge/automerge/next"
96
+
97
+ // export some automerge API types
98
+ export type {
99
+ Doc,
100
+ Heads,
101
+ Patch,
102
+ PatchCallback,
103
+ Prop,
104
+ ActorId,
105
+ Change,
106
+ ChangeFn,
107
+ Mark,
108
+ MarkSet,
109
+ MarkRange,
110
+ MarkValue,
111
+ } from "@automerge/automerge/next"
112
+
113
+ // export a few utility functions that aren't in automerge-repo
114
+ // NB that these should probably all just be available via the dochandle
115
+ export {
116
+ getChanges,
117
+ getAllChanges,
118
+ applyChanges,
119
+ view,
120
+ getConflicts,
121
+ } from "@automerge/automerge/next"
122
+
123
+ // export type-specific utility functions
124
+ // these mostly can't be on the data-type in question because
125
+ // JS strings can't have methods added to them
126
+ export {
127
+ getCursor,
128
+ getCursorPosition,
129
+ splice,
130
+ updateText,
131
+ insertAt,
132
+ deleteAt,
133
+ mark,
134
+ unmark,
135
+ } from "@automerge/automerge/next"
@@ -8,7 +8,6 @@ import { ChunkInfo, StorageKey, StorageId } from "./types.js"
8
8
  import { keyHash, headsHash } from "./keyHash.js"
9
9
  import { chunkTypeFromKey } from "./chunkTypeFromKey.js"
10
10
  import * as Uuid from "uuid"
11
- import { EventEmitter } from "eventemitter3"
12
11
 
13
12
  /**
14
13
  * The storage subsystem is responsible for saving and loading Automerge documents to and from
@@ -29,8 +28,6 @@ export class StorageSubsystem {
29
28
 
30
29
  #log = debug(`automerge-repo:storage-subsystem`)
31
30
 
32
- #saved = new EventEmitter<{ saved: () => void }>()
33
-
34
31
  constructor(storageAdapter: StorageAdapterInterface) {
35
32
  this.#storageAdapter = storageAdapter
36
33
  }
@@ -159,7 +156,6 @@ export class StorageSubsystem {
159
156
  await this.#saveIncremental(documentId, doc)
160
157
  }
161
158
  this.#storedHeads.set(documentId, A.getHeads(doc))
162
- this.#saved.emit("saved")
163
159
  }
164
160
 
165
161
  /**
@@ -249,34 +245,6 @@ export class StorageSubsystem {
249
245
  await this.#storageAdapter.save(key, A.encodeSyncState(syncState))
250
246
  }
251
247
 
252
- /**
253
- * Waiting for document state to be written to disk.
254
- * @deprecated because it will be changed soon.
255
- */
256
- async flush(documentId: DocumentId, doc: A.Doc<unknown>, timeout?: number) {
257
- return new Promise<void>((resolve, reject) => {
258
- let timeoutId: NodeJS.Timeout
259
- if (timeout) {
260
- timeoutId = setTimeout(() => {
261
- this.#saved.off("saved", checkIfSaved)
262
- reject(new Error("Timed out waiting for save"))
263
- }, timeout)
264
- }
265
-
266
- const checkIfSaved = () => {
267
- if (!this.#shouldSave(documentId, doc)) {
268
- this.#saved.off("saved", checkIfSaved)
269
- clearTimeout(timeoutId)
270
- resolve()
271
- }
272
- }
273
-
274
- this.#saved.on("saved", checkIfSaved)
275
-
276
- checkIfSaved()
277
- })
278
- }
279
-
280
248
  /**
281
249
  * Returns true if the document has changed since the last time it was saved.
282
250
  */
@@ -8,9 +8,7 @@ describe("CollectionSynchronizer", () => {
8
8
  let synchronizer: CollectionSynchronizer
9
9
 
10
10
  beforeEach(() => {
11
- repo = new Repo({
12
- network: [],
13
- })
11
+ repo = new Repo()
14
12
  synchronizer = new CollectionSynchronizer(repo)
15
13
  })
16
14
 
@@ -72,6 +72,24 @@ describe("DocHandle", () => {
72
72
  assert.equal(doc?.foo, "bar")
73
73
  })
74
74
 
75
+ it("should return the heads when requested", async () => {
76
+ const handle = new DocHandle<TestDoc>(TEST_ID, {
77
+ isNew: true,
78
+ initialValue: { foo: "bar" },
79
+ })
80
+ assert.equal(handle.isReady(), true)
81
+
82
+ const heads = A.getHeads(handle.docSync())
83
+ assert.notDeepEqual(handle.heads(), [])
84
+ assert.deepEqual(heads, handle.heads())
85
+ })
86
+
87
+ it("should return undefined if the heads aren't loaded", async () => {
88
+ const handle = new DocHandle<TestDoc>(TEST_ID)
89
+ assert.equal(handle.isReady(), false)
90
+ assert.deepEqual(handle.heads(), undefined)
91
+ })
92
+
75
93
  /**
76
94
  * Once there's a Repo#stop API this case should be covered in accompanying
77
95
  * tests and the following test removed.
@@ -319,7 +337,7 @@ describe("DocHandle", () => {
319
337
  doc.foo = "bar"
320
338
  })
321
339
 
322
- const headsBefore = A.getHeads(handle.docSync()!)
340
+ const headsBefore = handle.heads()!
323
341
 
324
342
  handle.change(doc => {
325
343
  doc.foo = "rab"
@@ -68,10 +68,7 @@ describe("DocSynchronizer", () => {
68
68
 
69
69
  assert.equal(message2.peerId, "alice")
70
70
  assert.equal(message2.documentId, handle.documentId)
71
- assert.deepEqual(
72
- message2.syncState.lastSentHeads,
73
- A.getHeads(handle.docSync())
74
- )
71
+ assert.deepEqual(message2.syncState.lastSentHeads, handle.heads())
75
72
  })
76
73
 
77
74
  it("still syncs with a peer after it disconnects and reconnects", async () => {
package/test/Repo.test.ts CHANGED
@@ -3,7 +3,6 @@ import { MessageChannelNetworkAdapter } from "../../automerge-repo-network-messa
3
3
  import assert from "assert"
4
4
  import * as Uuid from "uuid"
5
5
  import { describe, expect, it } from "vitest"
6
- import { HandleState, READY } from "../src/DocHandle.js"
7
6
  import { parseAutomergeUrl } from "../src/AutomergeUrl.js"
8
7
  import {
9
8
  generateAutomergeUrl,
@@ -29,14 +28,12 @@ import {
29
28
  } from "./helpers/generate-large-object.js"
30
29
  import { getRandomItem } from "./helpers/getRandomItem.js"
31
30
  import { TestDoc } from "./types.js"
32
- import { StorageId } from "../src/storage/types.js"
31
+ import { StorageId, StorageKey } from "../src/storage/types.js"
33
32
 
34
33
  describe("Repo", () => {
35
34
  describe("constructor", () => {
36
- it("can be instantiated without network adapters", () => {
37
- const repo = new Repo({
38
- network: [],
39
- })
35
+ it("can be instantiated without any configuration", () => {
36
+ const repo = new Repo()
40
37
  expect(repo).toBeInstanceOf(Repo)
41
38
  })
42
39
  })
@@ -126,8 +123,7 @@ describe("Repo", () => {
126
123
  })
127
124
  const v = await handle.doc()
128
125
  assert.equal(handle.isReady(), true)
129
-
130
- assert.equal(v?.foo, "bar")
126
+ assert.equal(v.foo, "bar")
131
127
  })
132
128
 
133
129
  it("can clone a document", () => {
@@ -207,12 +203,11 @@ describe("Repo", () => {
207
203
  assert.equal(doc, undefined)
208
204
  })
209
205
 
210
- it("fires an 'unavailable' event when you don't have the document locally and network to connect to", async () => {
206
+ it("emits an unavailable event when you don't have the document locally and are not connected to anyone", async () => {
211
207
  const { repo } = setup()
212
208
  const url = generateAutomergeUrl()
213
209
  const handle = repo.find<TestDoc>(url)
214
210
  assert.equal(handle.isReady(), false)
215
-
216
211
  await eventPromise(handle, "unavailable")
217
212
  })
218
213
 
@@ -255,7 +250,6 @@ describe("Repo", () => {
255
250
 
256
251
  const repo2 = new Repo({
257
252
  storage: storageAdapter,
258
- network: [],
259
253
  })
260
254
 
261
255
  const bobHandle = repo2.find<TestDoc>(handle.url)
@@ -277,7 +271,6 @@ describe("Repo", () => {
277
271
 
278
272
  const repo2 = new Repo({
279
273
  storage: storageAdapter,
280
- network: [],
281
274
  })
282
275
 
283
276
  const bobHandle = repo2.find<TestDoc>(handle.url)
@@ -364,7 +357,6 @@ describe("Repo", () => {
364
357
 
365
358
  const repo = new Repo({
366
359
  storage,
367
- network: [],
368
360
  })
369
361
 
370
362
  const handle = repo.create<{ count: number }>()
@@ -382,7 +374,6 @@ describe("Repo", () => {
382
374
 
383
375
  const repo2 = new Repo({
384
376
  storage,
385
- network: [],
386
377
  })
387
378
  const handle2 = repo2.find(handle.url)
388
379
  await handle2.doc()
@@ -395,7 +386,6 @@ describe("Repo", () => {
395
386
 
396
387
  const repo = new Repo({
397
388
  storage,
398
- network: [],
399
389
  })
400
390
 
401
391
  const handle = repo.create<{ count: number }>()
@@ -410,7 +400,6 @@ describe("Repo", () => {
410
400
  for (let i = 0; i < 3; i++) {
411
401
  const repo2 = new Repo({
412
402
  storage,
413
- network: [],
414
403
  })
415
404
  const handle2 = repo2.find(handle.url)
416
405
  await handle2.doc()
@@ -451,40 +440,66 @@ describe("Repo", () => {
451
440
  expect(A.getHistory(v)).toEqual(A.getHistory(updatedDoc))
452
441
  })
453
442
 
454
- it("throws an error if we try to import an invalid document", async () => {
443
+ it("throws an error if we try to import a nonsensical byte array", async () => {
455
444
  const { repo } = setup()
456
445
  expect(() => {
457
- repo.import<TestDoc>(A.init<TestDoc> as unknown as Uint8Array)
446
+ repo.import<TestDoc>(new Uint8Array([1, 2, 3]))
458
447
  }).toThrow()
459
448
  })
449
+
450
+ // TODO: not sure if this is the desired behavior from `import`.
451
+
452
+ it("makes an empty document if we try to import an automerge doc", async () => {
453
+ const { repo } = setup()
454
+ // @ts-ignore - passing something other than UInt8Array
455
+ const handle = repo.import<TestDoc>(A.from({ foo: 123 }))
456
+ const doc = await handle.doc()
457
+ expect(doc).toEqual({})
458
+ })
459
+
460
+ it("makes an empty document if we try to import a plain object", async () => {
461
+ const { repo } = setup()
462
+ // @ts-ignore - passing something other than UInt8Array
463
+ const handle = repo.import<TestDoc>({ foo: 123 })
464
+ const doc = await handle.doc()
465
+ expect(doc).toEqual({})
466
+ })
460
467
  })
461
468
 
462
469
  describe("flush behaviour", () => {
463
470
  const setup = () => {
464
- let blockedSaves = []
465
- let resume = () => {
466
- blockedSaves.forEach(resolve => resolve())
467
- blockedSaves = []
471
+ let blockedSaves = new Set<{ path: StorageKey; resolve: () => void }>()
472
+ let resume = (documentIds?: DocumentId[]) => {
473
+ const savesToUnblock = documentIds
474
+ ? Array.from(blockedSaves).filter(({ path }) =>
475
+ documentIds.some(documentId => path.includes(documentId))
476
+ )
477
+ : Array.from(blockedSaves)
478
+ savesToUnblock.forEach(({ resolve }) => resolve())
468
479
  }
469
480
  const pausedStorage = new DummyStorageAdapter()
470
481
  {
471
482
  const originalSave = pausedStorage.save.bind(pausedStorage)
472
483
  pausedStorage.save = async (...args) => {
473
- await new Promise(resolve => {
474
- console.log("made a promise", ...args[0])
475
- blockedSaves.push(resolve)
484
+ await new Promise<void>(resolve => {
485
+ const blockedSave = {
486
+ path: args[0],
487
+ resolve: () => {
488
+ resolve()
489
+ blockedSaves.delete(blockedSave)
490
+ },
491
+ }
492
+ blockedSaves.add(blockedSave)
476
493
  })
477
494
  await pause(0)
478
495
  // otherwise all the save promises resolve together
479
496
  // which prevents testing flushing a single docID
480
- console.log("resuming save", ...args[0])
481
497
  return originalSave(...args)
482
498
  }
483
499
  }
484
500
 
485
501
  const repo = new Repo({
486
502
  storage: pausedStorage,
487
- network: [],
488
503
  })
489
504
 
490
505
  // Create a pair of handles
@@ -501,7 +516,6 @@ describe("Repo", () => {
501
516
  // Reload repo
502
517
  const repo2 = new Repo({
503
518
  storage: pausedStorage,
504
- network: [],
505
519
  })
506
520
 
507
521
  // Could not find the document that is not yet saved because of slow storage.
@@ -524,7 +538,6 @@ describe("Repo", () => {
524
538
  // Reload repo
525
539
  const repo = new Repo({
526
540
  storage: pausedStorage,
527
- network: [],
528
541
  })
529
542
 
530
543
  expect(
@@ -540,7 +553,7 @@ describe("Repo", () => {
540
553
  const { resume, pausedStorage, repo, handle, handle2 } = setup()
541
554
 
542
555
  const flushPromise = repo.flush([handle.documentId])
543
- resume()
556
+ resume([handle.documentId])
544
557
  await flushPromise
545
558
 
546
559
  // Check that the data is now saved.
@@ -550,7 +563,6 @@ describe("Repo", () => {
550
563
  // Reload repo
551
564
  const repo = new Repo({
552
565
  storage: pausedStorage,
553
- network: [],
554
566
  })
555
567
 
556
568
  expect(
@@ -564,14 +576,22 @@ describe("Repo", () => {
564
576
  }
565
577
  })
566
578
 
567
- it("should time out with failure after a specified delay", async () => {
568
- const { resume, pausedStorage, repo, handle, handle2 } = setup()
569
-
570
- const flushPromise = repo.flush([handle.documentId], 10)
571
- expect(flushPromise).rejects.toThrowError("Timed out waiting for save")
579
+ it("flush right before change should resolve correctly", async () => {
580
+ const repo = new Repo({
581
+ network: [],
582
+ storage: new DummyStorageAdapter(),
583
+ })
584
+ const handle = repo.create<{ field?: string }>()
572
585
 
573
- // Check that the data is now saved.
574
- expect(pausedStorage.keys().length).toBe(0)
586
+ for (let i = 0; i < 10; i++) {
587
+ const flushPromise = repo.flush([handle.documentId])
588
+ handle.change((doc: any) => {
589
+ doc.field += Array(1024)
590
+ .fill(Math.random() * 10)
591
+ .join("")
592
+ })
593
+ await flushPromise
594
+ }
575
595
  })
576
596
  })
577
597
 
@@ -810,9 +830,7 @@ describe("Repo", () => {
810
830
  it("charlieRepo can request a document not initially shared with it", async () => {
811
831
  const { charlieRepo, notForCharlie, teardown } = await setup()
812
832
 
813
- const handle = charlieRepo.find<TestDoc>(
814
- stringifyAutomergeUrl({ documentId: notForCharlie })
815
- )
833
+ const handle = charlieRepo.find<TestDoc>(notForCharlie)
816
834
 
817
835
  await pause(50)
818
836
 
@@ -826,9 +844,7 @@ describe("Repo", () => {
826
844
  it("charlieRepo can request a document across a network of multiple peers", async () => {
827
845
  const { charlieRepo, notForBob, teardown } = await setup()
828
846
 
829
- const handle = charlieRepo.find<TestDoc>(
830
- stringifyAutomergeUrl({ documentId: notForBob })
831
- )
847
+ const handle = charlieRepo.find<TestDoc>(notForBob)
832
848
 
833
849
  await pause(50)
834
850
 
@@ -850,7 +866,16 @@ describe("Repo", () => {
850
866
  teardown()
851
867
  })
852
868
 
853
- it("fires an 'unavailable' event when a document is not available on the network", async () => {
869
+ it("emits an unavailable event when it's not found on the network", async () => {
870
+ const { aliceRepo, teardown } = await setup()
871
+ const url = generateAutomergeUrl()
872
+ const handle = aliceRepo.find(url)
873
+ assert.equal(handle.isReady(), false)
874
+ await eventPromise(handle, "unavailable")
875
+ teardown()
876
+ })
877
+
878
+ it("emits an unavailable event every time an unavailable doc is requested", async () => {
854
879
  const { charlieRepo, teardown } = await setup()
855
880
  const url = generateAutomergeUrl()
856
881
  const handle = charlieRepo.find<TestDoc>(url)
@@ -861,10 +886,13 @@ describe("Repo", () => {
861
886
  eventPromise(charlieRepo, "unavailable-document"),
862
887
  ])
863
888
 
864
- // make sure it fires a second time if the doc is still unavailable
889
+ // make sure it emits a second time if the doc is still unavailable
865
890
  const handle2 = charlieRepo.find<TestDoc>(url)
866
891
  assert.equal(handle2.isReady(), false)
867
- await eventPromise(handle2, "unavailable")
892
+ await Promise.all([
893
+ eventPromise(handle, "unavailable"),
894
+ eventPromise(charlieRepo, "unavailable-document"),
895
+ ])
868
896
 
869
897
  teardown()
870
898
  })
@@ -888,7 +916,7 @@ describe("Repo", () => {
888
916
 
889
917
  await eventPromise(aliceRepo.networkSubsystem, "peer")
890
918
 
891
- const doc = await handle.doc([READY])
919
+ const doc = await handle.doc(["ready"])
892
920
  assert.deepStrictEqual(doc, { foo: "baz" })
893
921
 
894
922
  // an additional find should also return the correct resolved document
@@ -912,7 +940,6 @@ describe("Repo", () => {
912
940
  // we have a storage containing the document to pass to a new repo later
913
941
  const storage = new DummyStorageAdapter()
914
942
  const isolatedRepo = new Repo({
915
- network: [],
916
943
  storage,
917
944
  })
918
945
  const unsyncedHandle = isolatedRepo.create<TestDoc>()
@@ -970,17 +997,6 @@ describe("Repo", () => {
970
997
  teardown()
971
998
  })
972
999
 
973
- it("can emit an 'unavailable' event when it's not found on the network", async () => {
974
- const { charlieRepo, teardown } = await setup()
975
-
976
- const url = generateAutomergeUrl()
977
- const handle = charlieRepo.find<TestDoc>(url)
978
- assert.equal(handle.isReady(), false)
979
-
980
- await eventPromise(handle, "unavailable")
981
- teardown()
982
- })
983
-
984
1000
  it("syncs a bunch of changes", async () => {
985
1001
  const { aliceRepo, bobRepo, charlieRepo, teardown } = await setup()
986
1002
 
@@ -1064,8 +1080,7 @@ describe("Repo", () => {
1064
1080
  bobHandle.documentId,
1065
1081
  await charlieRepo!.storageSubsystem.id()
1066
1082
  )
1067
- const docHeads = A.getHeads(bobHandle.docSync())
1068
- assert.deepStrictEqual(storedSyncState.sharedHeads, docHeads)
1083
+ assert.deepStrictEqual(storedSyncState.sharedHeads, bobHandle.heads())
1069
1084
 
1070
1085
  teardown()
1071
1086
  })
@@ -1118,7 +1133,6 @@ describe("Repo", () => {
1118
1133
  // setup new repo which uses bob's storage
1119
1134
  const bob2Repo = new Repo({
1120
1135
  storage: bobStorage,
1121
- network: [],
1122
1136
  peerId: "bob-2" as PeerId,
1123
1137
  })
1124
1138
 
@@ -1182,18 +1196,15 @@ describe("Repo", () => {
1182
1196
  // pause to let the sync happen
1183
1197
  await pause(100)
1184
1198
 
1185
- const charlieHeads = A.getHeads(charlieHandle.docSync())
1186
- const bobHeads = A.getHeads(handle.docSync())
1187
-
1188
- assert.deepStrictEqual(charlieHeads, bobHeads)
1199
+ assert.deepStrictEqual(charlieHandle.heads(), handle.heads())
1189
1200
 
1190
1201
  const nextRemoteHeads = await nextRemoteHeadsPromise
1191
1202
  assert.deepStrictEqual(nextRemoteHeads.storageId, charliedStorageId)
1192
- assert.deepStrictEqual(nextRemoteHeads.heads, charlieHeads)
1203
+ assert.deepStrictEqual(nextRemoteHeads.heads, charlieHandle.heads())
1193
1204
 
1194
1205
  assert.deepStrictEqual(
1195
1206
  handle.getRemoteHeads(charliedStorageId),
1196
- A.getHeads(charlieHandle.docSync())
1207
+ charlieHandle.heads()
1197
1208
  )
1198
1209
 
1199
1210
  teardown()
@@ -1230,14 +1241,14 @@ describe("Repo", () => {
1230
1241
 
1231
1242
  const bobDoc = bobRepo.find(aliceDoc.url)
1232
1243
  bobDoc.unavailable()
1233
- await bobDoc.whenReady([HandleState.UNAVAILABLE])
1244
+ await eventPromise(bobDoc, "unavailable")
1234
1245
 
1235
1246
  aliceAdapter.peerCandidate(bob)
1236
1247
  // Bob isn't yet connected to Alice and can't respond to her sync message
1237
1248
  await pause(100)
1238
1249
  bobAdapter.peerCandidate(alice)
1239
1250
 
1240
- await bobDoc.whenReady([HandleState.READY])
1251
+ await bobDoc.whenReady()
1241
1252
 
1242
1253
  assert.equal(bobDoc.isReady(), true)
1243
1254
  })
@@ -152,7 +152,7 @@ describe("DocHandle.remoteHeads", () => {
152
152
  // wait for alice's service worker to acknowledge the change
153
153
  const { heads } = await aliceSeenByBobPromise
154
154
 
155
- assert.deepStrictEqual(heads, A.getHeads(aliceServiceWorkerDoc.docSync()))
155
+ assert.deepStrictEqual(heads, aliceServiceWorkerDoc.heads())
156
156
  })
157
157
 
158
158
  it("should report remoteHeads only for documents the subscriber has open", async () => {
package/tsconfig.json CHANGED
@@ -10,6 +10,7 @@
10
10
  "esModuleInterop": true,
11
11
  "forceConsistentCasingInFileNames": true,
12
12
  "strict": true,
13
+ "strictNullChecks": true,
13
14
  "skipLibCheck": true
14
15
  },
15
16
  "include": ["src/**/*.ts"],