@automerge/automerge-repo 1.0.5 → 1.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/.eslintrc +1 -1
  2. package/dist/DocHandle.d.ts +20 -7
  3. package/dist/DocHandle.d.ts.map +1 -1
  4. package/dist/DocHandle.js +27 -7
  5. package/dist/EphemeralData.d.ts +2 -2
  6. package/dist/EphemeralData.d.ts.map +1 -1
  7. package/dist/Repo.d.ts +16 -0
  8. package/dist/Repo.d.ts.map +1 -1
  9. package/dist/Repo.js +38 -10
  10. package/dist/helpers/cbor.d.ts +2 -2
  11. package/dist/helpers/cbor.d.ts.map +1 -1
  12. package/dist/helpers/cbor.js +1 -1
  13. package/dist/helpers/pause.d.ts.map +1 -1
  14. package/dist/helpers/pause.js +3 -1
  15. package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
  16. package/dist/helpers/tests/network-adapter-tests.js +2 -2
  17. package/dist/index.d.ts +11 -9
  18. package/dist/index.d.ts.map +1 -1
  19. package/dist/index.js +4 -4
  20. package/dist/network/NetworkAdapter.d.ts +3 -3
  21. package/dist/network/NetworkAdapter.d.ts.map +1 -1
  22. package/dist/network/NetworkSubsystem.d.ts +2 -2
  23. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  24. package/dist/network/NetworkSubsystem.js +30 -18
  25. package/dist/network/messages.d.ts +38 -68
  26. package/dist/network/messages.d.ts.map +1 -1
  27. package/dist/network/messages.js +13 -21
  28. package/dist/storage/StorageSubsystem.js +7 -7
  29. package/dist/synchronizer/CollectionSynchronizer.d.ts +3 -3
  30. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  31. package/dist/synchronizer/CollectionSynchronizer.js +2 -2
  32. package/dist/synchronizer/DocSynchronizer.d.ts +3 -3
  33. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  34. package/dist/synchronizer/DocSynchronizer.js +22 -29
  35. package/dist/synchronizer/Synchronizer.d.ts +2 -2
  36. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  37. package/dist/types.d.ts +5 -1
  38. package/dist/types.d.ts.map +1 -1
  39. package/package.json +5 -13
  40. package/src/DocHandle.ts +38 -14
  41. package/src/EphemeralData.ts +2 -2
  42. package/src/Repo.ts +46 -12
  43. package/src/helpers/cbor.ts +4 -4
  44. package/src/helpers/pause.ts +7 -2
  45. package/src/helpers/tests/network-adapter-tests.ts +3 -3
  46. package/src/helpers/withTimeout.ts +2 -2
  47. package/src/index.ts +36 -29
  48. package/src/network/NetworkAdapter.ts +7 -3
  49. package/src/network/NetworkSubsystem.ts +31 -23
  50. package/src/network/messages.ts +88 -151
  51. package/src/storage/StorageSubsystem.ts +8 -8
  52. package/src/synchronizer/CollectionSynchronizer.ts +6 -15
  53. package/src/synchronizer/DocSynchronizer.ts +34 -48
  54. package/src/synchronizer/Synchronizer.ts +2 -2
  55. package/src/types.ts +8 -3
  56. package/test/CollectionSynchronizer.test.ts +58 -53
  57. package/test/DocHandle.test.ts +35 -36
  58. package/test/DocSynchronizer.test.ts +9 -8
  59. package/test/Network.test.ts +1 -0
  60. package/test/Repo.test.ts +273 -88
  61. package/test/StorageSubsystem.test.ts +6 -9
  62. package/test/tsconfig.json +8 -0
  63. package/test/types.ts +2 -0
  64. package/typedoc.json +3 -3
  65. package/.mocharc.json +0 -5
package/test/Repo.test.ts CHANGED
@@ -1,7 +1,13 @@
1
- import assert from "assert"
2
1
  import { MessageChannelNetworkAdapter } from "@automerge/automerge-repo-network-messagechannel"
3
- import { BroadcastChannelNetworkAdapter } from "@automerge/automerge-repo-network-broadcastchannel"
4
-
2
+ import assert from "assert"
3
+ import * as Uuid from "uuid"
4
+ import { describe, it } from "vitest"
5
+ import { parseAutomergeUrl } from "../dist/DocUrl.js"
6
+ import { READY } from "../src/DocHandle.js"
7
+ import { generateAutomergeUrl, stringifyAutomergeUrl } from "../src/DocUrl.js"
8
+ import { Repo } from "../src/Repo.js"
9
+ import { eventPromise } from "../src/helpers/eventPromise.js"
10
+ import { pause } from "../src/helpers/pause.js"
5
11
  import {
6
12
  AutomergeUrl,
7
13
  DocHandle,
@@ -9,22 +15,14 @@ import {
9
15
  PeerId,
10
16
  SharePolicy,
11
17
  } from "../src/index.js"
12
- import { eventPromise } from "../src/helpers/eventPromise.js"
13
- import { pause, rejectOnTimeout } from "../src/helpers/pause.js"
14
- import { Repo } from "../src/Repo.js"
15
18
  import { DummyNetworkAdapter } from "./helpers/DummyNetworkAdapter.js"
16
19
  import { DummyStorageAdapter } from "./helpers/DummyStorageAdapter.js"
17
- import { getRandomItem } from "./helpers/getRandomItem.js"
18
- import { TestDoc } from "./types.js"
19
- import { generateAutomergeUrl, stringifyAutomergeUrl } from "../src/DocUrl.js"
20
- import { READY, AWAITING_NETWORK } from "../src/DocHandle.js"
21
20
  import {
22
- generateLargeObject,
23
21
  LargeObject,
22
+ generateLargeObject,
24
23
  } from "./helpers/generate-large-object.js"
25
- import { parseAutomergeUrl } from "../dist/DocUrl.js"
26
-
27
- import * as Uuid from "uuid"
24
+ import { getRandomItem } from "./helpers/getRandomItem.js"
25
+ import { TestDoc } from "./types.js"
28
26
 
29
27
  describe("Repo", () => {
30
28
  describe("single repo", () => {
@@ -66,6 +64,8 @@ describe("Repo", () => {
66
64
  })
67
65
 
68
66
  it("can find a document using a legacy UUID (for now)", () => {
67
+ disableConsoleWarn()
68
+
69
69
  const { repo } = setup()
70
70
  const handle = repo.create<TestDoc>()
71
71
  handle.change((d: TestDoc) => {
@@ -79,6 +79,8 @@ describe("Repo", () => {
79
79
  const handle2 = repo.find(legacyDocumentId)
80
80
  assert.equal(handle, handle2)
81
81
  assert.deepEqual(handle2.docSync(), { foo: "bar" })
82
+
83
+ reenableConsoleWarn()
82
84
  })
83
85
 
84
86
  it("can change a document", async () => {
@@ -93,6 +95,65 @@ describe("Repo", () => {
93
95
  assert.equal(v?.foo, "bar")
94
96
  })
95
97
 
98
+ it("can clone a document", () => {
99
+ const { repo } = setup()
100
+ const handle = repo.create<TestDoc>()
101
+ handle.change(d => {
102
+ d.foo = "bar"
103
+ })
104
+ const handle2 = repo.clone(handle)
105
+ assert.equal(handle2.isReady(), true)
106
+ assert.notEqual(handle.documentId, handle2.documentId)
107
+ assert.deepStrictEqual(handle.docSync(), handle2.docSync())
108
+ assert.deepStrictEqual(handle2.docSync(), { foo: "bar" })
109
+ })
110
+
111
+ it("the cloned documents are distinct", () => {
112
+ const { repo } = setup()
113
+ const handle = repo.create<TestDoc>()
114
+ handle.change(d => {
115
+ d.foo = "bar"
116
+ })
117
+ const handle2 = repo.clone(handle)
118
+
119
+ handle.change(d => {
120
+ d.bar = "bif"
121
+ })
122
+ handle2.change(d => {
123
+ d.baz = "baz"
124
+ })
125
+
126
+ assert.notDeepStrictEqual(handle.docSync(), handle2.docSync())
127
+ assert.deepStrictEqual(handle.docSync(), { foo: "bar", bar: "bif" })
128
+ assert.deepStrictEqual(handle2.docSync(), { foo: "bar", baz: "baz" })
129
+ })
130
+
131
+ it("the cloned documents can merge", () => {
132
+ const { repo } = setup()
133
+ const handle = repo.create<TestDoc>()
134
+ handle.change(d => {
135
+ d.foo = "bar"
136
+ })
137
+ const handle2 = repo.clone(handle)
138
+
139
+ handle.change(d => {
140
+ d.bar = "bif"
141
+ })
142
+ handle2.change(d => {
143
+ d.baz = "baz"
144
+ })
145
+
146
+ handle.merge(handle2)
147
+
148
+ assert.deepStrictEqual(handle.docSync(), {
149
+ foo: "bar",
150
+ bar: "bif",
151
+ baz: "baz",
152
+ })
153
+ // only the one handle should be changed
154
+ assert.deepStrictEqual(handle2.docSync(), { foo: "bar", baz: "baz" })
155
+ })
156
+
96
157
  it("throws an error if we try to find a handle with an invalid AutomergeUrl", async () => {
97
158
  const { repo } = setup()
98
159
  try {
@@ -198,19 +259,14 @@ describe("Repo", () => {
198
259
  })
199
260
  // we now have a snapshot and an incremental change in storage
200
261
  assert.equal(handle.isReady(), true)
201
- await handle.doc()
262
+ const foo = await handle.doc()
263
+ assert.equal(foo?.foo, "bar")
264
+
265
+ await pause()
202
266
  repo.delete(handle.documentId)
203
267
 
204
268
  assert(handle.isDeleted())
205
269
  assert.equal(repo.handles[handle.documentId], undefined)
206
-
207
- const bobHandle = repo.find<TestDoc>(handle.url)
208
- await assert.rejects(
209
- rejectOnTimeout(bobHandle.doc(), 10),
210
- "document should have been deleted"
211
- )
212
-
213
- assert(!bobHandle.isReady())
214
270
  })
215
271
 
216
272
  it("can delete an existing document by url", async () => {
@@ -221,36 +277,31 @@ describe("Repo", () => {
221
277
  })
222
278
  assert.equal(handle.isReady(), true)
223
279
  await handle.doc()
280
+
281
+ await pause()
224
282
  repo.delete(handle.url)
225
283
 
226
284
  assert(handle.isDeleted())
227
285
  assert.equal(repo.handles[handle.documentId], undefined)
228
-
229
- const bobHandle = repo.find<TestDoc>(handle.url)
230
- await assert.rejects(
231
- rejectOnTimeout(bobHandle.doc(), 10),
232
- "document should have been deleted"
233
- )
234
-
235
- assert(!bobHandle.isReady())
236
286
  })
237
287
 
238
- it("deleting a document emits an event", async done => {
239
- const { repo } = setup()
240
- const handle = repo.create<TestDoc>()
241
- handle.change(d => {
242
- d.foo = "bar"
243
- })
244
- assert.equal(handle.isReady(), true)
288
+ it("deleting a document emits an event", async () =>
289
+ new Promise<void>(done => {
290
+ const { repo } = setup()
291
+ const handle = repo.create<TestDoc>()
292
+ handle.change(d => {
293
+ d.foo = "bar"
294
+ })
295
+ assert.equal(handle.isReady(), true)
245
296
 
246
- repo.on("delete-document", ({ documentId }) => {
247
- assert.equal(documentId, handle.documentId)
297
+ repo.on("delete-document", ({ documentId }) => {
298
+ assert.equal(documentId, handle.documentId)
248
299
 
249
- done()
250
- })
300
+ done()
301
+ })
251
302
 
252
- repo.delete(handle.documentId)
253
- })
303
+ repo.delete(handle.documentId)
304
+ }))
254
305
 
255
306
  it("storage state doesn't change across reloads when the document hasn't changed", async () => {
256
307
  const storage = new DummyStorageAdapter()
@@ -344,16 +395,16 @@ describe("Repo", () => {
344
395
  return true
345
396
  }
346
397
 
347
- const setupRepos = (connectAlice = true) => {
398
+ const setupLinearNetwork = (connectAlice = true) => {
348
399
  // Set up three repos; connect Alice to Bob, and Bob to Charlie
349
400
 
350
- const aliceBobChannel = new MessageChannel()
351
- const bobCharlieChannel = new MessageChannel()
401
+ const abChannel = new MessageChannel()
402
+ const bcChannel = new MessageChannel()
352
403
 
353
- const { port1: aliceToBob, port2: bobToAlice } = aliceBobChannel
354
- const { port1: bobToCharlie, port2: charlieToBob } = bobCharlieChannel
404
+ const { port1: ab, port2: ba } = abChannel
405
+ const { port1: bc, port2: cb } = bcChannel
355
406
 
356
- const aliceNetworkAdapter = new MessageChannelNetworkAdapter(aliceToBob)
407
+ const aliceNetworkAdapter = new MessageChannelNetworkAdapter(ab)
357
408
 
358
409
  const aliceRepo = new Repo({
359
410
  network: connectAlice ? [aliceNetworkAdapter] : [],
@@ -363,26 +414,26 @@ describe("Repo", () => {
363
414
 
364
415
  const bobRepo = new Repo({
365
416
  network: [
366
- new MessageChannelNetworkAdapter(bobToAlice),
367
- new MessageChannelNetworkAdapter(bobToCharlie),
417
+ new MessageChannelNetworkAdapter(ba),
418
+ new MessageChannelNetworkAdapter(bc),
368
419
  ],
369
420
  peerId: "bob" as PeerId,
370
421
  sharePolicy,
371
422
  })
372
423
 
373
424
  const charlieRepo = new Repo({
374
- network: [new MessageChannelNetworkAdapter(charlieToBob)],
425
+ network: [new MessageChannelNetworkAdapter(cb)],
375
426
  peerId: "charlie" as PeerId,
376
427
  })
377
428
 
378
429
  const teardown = () => {
379
- aliceBobChannel.port1.close()
380
- bobCharlieChannel.port1.close()
430
+ abChannel.port1.close()
431
+ bcChannel.port1.close()
381
432
  }
382
433
 
383
434
  function doConnectAlice() {
384
435
  aliceRepo.networkSubsystem.addNetworkAdapter(
385
- new MessageChannelNetworkAdapter(aliceToBob)
436
+ new MessageChannelNetworkAdapter(ab)
386
437
  )
387
438
  //bobRepo.networkSubsystem.addNetworkAdapter(new MessageChannelNetworkAdapter(bobToAlice))
388
439
  }
@@ -400,9 +451,60 @@ describe("Repo", () => {
400
451
  }
401
452
  }
402
453
 
454
+ const setupMeshNetwork = () => {
455
+ // Set up three repos; connect Alice to Bob, Bob to Charlie, and Alice to Charlie
456
+
457
+ const abChannel = new MessageChannel()
458
+ const bcChannel = new MessageChannel()
459
+ const acChannel = new MessageChannel()
460
+
461
+ const { port1: ab, port2: ba } = abChannel
462
+ const { port1: bc, port2: cb } = bcChannel
463
+ const { port1: ac, port2: ca } = acChannel
464
+
465
+ const aliceRepo = new Repo({
466
+ network: [
467
+ new MessageChannelNetworkAdapter(ab),
468
+ new MessageChannelNetworkAdapter(ac),
469
+ ],
470
+ peerId: "alice" as PeerId,
471
+ sharePolicy,
472
+ })
473
+
474
+ const bobRepo = new Repo({
475
+ network: [
476
+ new MessageChannelNetworkAdapter(ba),
477
+ new MessageChannelNetworkAdapter(bc),
478
+ ],
479
+ peerId: "bob" as PeerId,
480
+ sharePolicy,
481
+ })
482
+
483
+ const charlieRepo = new Repo({
484
+ network: [
485
+ new MessageChannelNetworkAdapter(ca),
486
+ new MessageChannelNetworkAdapter(cb),
487
+ ],
488
+ peerId: "charlie" as PeerId,
489
+ })
490
+
491
+ const teardown = () => {
492
+ abChannel.port1.close()
493
+ bcChannel.port1.close()
494
+ acChannel.port1.close()
495
+ }
496
+
497
+ return {
498
+ teardown,
499
+ aliceRepo,
500
+ bobRepo,
501
+ charlieRepo,
502
+ }
503
+ }
504
+
403
505
  const setup = async (connectAlice = true) => {
404
506
  const { teardown, aliceRepo, bobRepo, charlieRepo, connectAliceToBob } =
405
- setupRepos(connectAlice)
507
+ setupLinearNetwork(connectAlice)
406
508
 
407
509
  const aliceHandle = aliceRepo.create<TestDoc>()
408
510
  aliceHandle.change(d => {
@@ -561,6 +663,56 @@ describe("Repo", () => {
561
663
  teardown()
562
664
  })
563
665
 
666
+ it("a previously unavailable document becomes available if the network adapter initially has no peers", async () => {
667
+ // It is possible for a network adapter to be ready without any peer
668
+ // being announced (e.g. the BroadcastChannelNetworkAdapter). In this
669
+ // case attempting to `Repo.find` a document which is not in the storage
670
+ // will result in an unavailable document. If a peer is later announced
671
+ // on the NetworkAdapter we should attempt to sync with the new peer and
672
+ // if the new peer has the document, the DocHandle should eventually
673
+ // transition to "ready"
674
+
675
+ // first create a repo with no network adapter and add a document so that
676
+ // we have a storage containing the document to pass to a new repo later
677
+ const storage = new DummyStorageAdapter()
678
+ const isolatedRepo = new Repo({
679
+ network: [],
680
+ storage,
681
+ })
682
+ const unsyncedHandle = isolatedRepo.create<TestDoc>()
683
+ const url = unsyncedHandle.url
684
+
685
+ // Now create a message channel to connect two repos
686
+ const abChannel = new MessageChannel()
687
+ const { port1: ab, port2: ba } = abChannel
688
+
689
+ // Create an empty repo to request the document from
690
+ const a = new Repo({
691
+ network: [new MessageChannelNetworkAdapter(ab)],
692
+ peerId: "a" as PeerId,
693
+ sharePolicy: async () => true
694
+ })
695
+
696
+ const handle = a.find(url)
697
+
698
+ // We expect this to be unavailable as there is no connected peer and
699
+ // the repo has no storage.
700
+ await eventPromise(handle, "unavailable")
701
+
702
+ // Now create a repo pointing at the storage containing the document and
703
+ // connect it to the other end of the MessageChannel
704
+ const b = new Repo({
705
+ storage,
706
+ peerId: "b" as PeerId,
707
+ network: [new MessageChannelNetworkAdapter(ba)],
708
+ })
709
+
710
+ // The empty repo should be notified of the new peer, send it a request
711
+ // and eventually resolve the handle to "READY"
712
+ await handle.whenReady()
713
+
714
+ })
715
+
564
716
  it("a deleted document from charlieRepo can be refetched", async () => {
565
717
  const { charlieRepo, aliceHandle, teardown } = await setup()
566
718
 
@@ -583,34 +735,8 @@ describe("Repo", () => {
583
735
  teardown()
584
736
  })
585
737
 
586
- const setupMeshNetwork = async () => {
587
- const aliceRepo = new Repo({
588
- network: [new BroadcastChannelNetworkAdapter()],
589
- peerId: "alice" as PeerId,
590
- })
591
-
592
- const bobRepo = new Repo({
593
- network: [new BroadcastChannelNetworkAdapter()],
594
- peerId: "bob" as PeerId,
595
- })
596
-
597
- const charlieRepo = new Repo({
598
- network: [new BroadcastChannelNetworkAdapter()],
599
- peerId: "charlie" as PeerId,
600
- })
601
-
602
- // pause to let the network set up
603
- await pause(50)
604
-
605
- return {
606
- aliceRepo,
607
- bobRepo,
608
- charlieRepo,
609
- }
610
- }
611
-
612
738
  it("can emit an 'unavailable' event when it's not found on the network", async () => {
613
- const { charlieRepo } = await setupMeshNetwork()
739
+ const { charlieRepo } = setupMeshNetwork()
614
740
 
615
741
  const url = generateAutomergeUrl()
616
742
  const handle = charlieRepo.find<TestDoc>(url)
@@ -689,10 +815,8 @@ describe("Repo", () => {
689
815
  })
690
816
 
691
817
  it("can broadcast a message without entering into an infinite loop", async () => {
692
- const { aliceRepo, bobRepo, charlieRepo } = await setupMeshNetwork()
818
+ const { aliceRepo, bobRepo, charlieRepo } = setupMeshNetwork()
693
819
 
694
- // pause to let the network set up
695
- await pause(50)
696
820
  const message = { presence: "alex" }
697
821
 
698
822
  const aliceHandle = aliceRepo.create<TestDoc>()
@@ -706,7 +830,7 @@ describe("Repo", () => {
706
830
  }, 100)
707
831
 
708
832
  aliceHandle.on("ephemeral-message", () => {
709
- reject("alice got the message")
833
+ reject(new Error("alice got the message"))
710
834
  })
711
835
  })
712
836
 
@@ -726,5 +850,66 @@ describe("Repo", () => {
726
850
  assert.deepStrictEqual(bob.message, message)
727
851
  assert.deepStrictEqual(charlie.message, message)
728
852
  })
853
+
854
+ it("notifies peers when a document is cloned", async () => {
855
+ const { bobRepo, charlieRepo } = setupMeshNetwork()
856
+
857
+ // pause to let the network set up
858
+ await pause(50)
859
+
860
+ const handle = bobRepo.create<TestDoc>()
861
+ handle.change(d => {
862
+ d.foo = "bar"
863
+ })
864
+ const handle2 = bobRepo.clone(handle)
865
+
866
+ // pause to let the sync happen
867
+ await pause(50)
868
+
869
+ const charlieHandle = charlieRepo.find(handle2.url)
870
+ await charlieHandle.doc()
871
+ assert.deepStrictEqual(charlieHandle.docSync(), { foo: "bar" })
872
+ })
873
+
874
+ it("notifies peers when a document is merged", async () => {
875
+ const { bobRepo, charlieRepo } = setupMeshNetwork()
876
+
877
+ // pause to let the network set up
878
+ await pause(50)
879
+
880
+ const handle = bobRepo.create<TestDoc>()
881
+ handle.change(d => {
882
+ d.foo = "bar"
883
+ })
884
+ const handle2 = bobRepo.clone(handle)
885
+
886
+ // pause to let the sync happen
887
+ await pause(50)
888
+
889
+ const charlieHandle = charlieRepo.find(handle2.url)
890
+ await charlieHandle.doc()
891
+ assert.deepStrictEqual(charlieHandle.docSync(), { foo: "bar" })
892
+
893
+ // now make a change to doc2 on bobs side and merge it into doc1
894
+ handle2.change(d => {
895
+ d.foo = "baz"
896
+ })
897
+ handle.merge(handle2)
898
+
899
+ // wait for the network to do it's thang
900
+ await pause(50)
901
+
902
+ await charlieHandle.doc()
903
+ assert.deepStrictEqual(charlieHandle.docSync(), { foo: "baz" })
904
+ })
729
905
  })
730
906
  })
907
+
908
+ const disableConsoleWarn = () => {
909
+ console["_warn"] = console.warn
910
+ console.warn = () => {}
911
+ }
912
+
913
+ const reenableConsoleWarn = () => {
914
+ console.warn = console["_warn"]
915
+ }
@@ -1,16 +1,13 @@
1
+ import { NodeFSStorageAdapter } from "@automerge/automerge-repo-storage-nodefs"
2
+ import * as A from "@automerge/automerge/next"
3
+ import assert from "assert"
1
4
  import fs from "fs"
2
5
  import os from "os"
3
6
  import path from "path"
4
-
5
- import assert from "assert"
6
-
7
- import * as A from "@automerge/automerge/next"
8
-
9
- import { DummyStorageAdapter } from "./helpers/DummyStorageAdapter.js"
10
- import { NodeFSStorageAdapter } from "@automerge/automerge-repo-storage-nodefs"
11
-
12
- import { StorageSubsystem } from "../src/storage/StorageSubsystem.js"
7
+ import { describe, it } from "vitest"
13
8
  import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl.js"
9
+ import { StorageSubsystem } from "../src/storage/StorageSubsystem.js"
10
+ import { DummyStorageAdapter } from "./helpers/DummyStorageAdapter.js"
14
11
 
15
12
  const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "automerge-repo-tests"))
16
13
 
@@ -0,0 +1,8 @@
1
+ {
2
+ "compilerOptions": {
3
+ "module": "NodeNext",
4
+ "moduleResolution": "Node16",
5
+ "noEmit": true
6
+ },
7
+ "include": ["**/*.ts"]
8
+ }
package/test/types.ts CHANGED
@@ -1,3 +1,5 @@
1
1
  export interface TestDoc {
2
2
  foo: string
3
+ bar?: string
4
+ baz?: string
3
5
  }
package/typedoc.json CHANGED
@@ -1,5 +1,5 @@
1
1
  {
2
- "extends": "../../typedoc.base.json",
3
- "entryPoints": ["src/index.ts"],
4
- "readme": "none"
2
+ "extends": ["../../typedoc.base.json"],
3
+ "entryPoints": ["src/index.ts"],
4
+ "readme": "none"
5
5
  }
package/.mocharc.json DELETED
@@ -1,5 +0,0 @@
1
- {
2
- "extension": ["ts"],
3
- "spec": "test/*.test.ts",
4
- "loader": "ts-node/esm"
5
- }