@automerge/automerge-repo 0.2.1 → 1.0.0-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/README.md +7 -24
  2. package/dist/DocCollection.d.ts +4 -4
  3. package/dist/DocCollection.d.ts.map +1 -1
  4. package/dist/DocCollection.js +25 -17
  5. package/dist/DocHandle.d.ts +46 -10
  6. package/dist/DocHandle.d.ts.map +1 -1
  7. package/dist/DocHandle.js +101 -36
  8. package/dist/DocUrl.d.ts +38 -18
  9. package/dist/DocUrl.d.ts.map +1 -1
  10. package/dist/DocUrl.js +63 -24
  11. package/dist/Repo.d.ts.map +1 -1
  12. package/dist/Repo.js +4 -6
  13. package/dist/helpers/headsAreSame.d.ts +1 -1
  14. package/dist/helpers/headsAreSame.d.ts.map +1 -1
  15. package/dist/helpers/tests/network-adapter-tests.js +10 -10
  16. package/dist/index.d.ts +3 -2
  17. package/dist/index.d.ts.map +1 -1
  18. package/dist/index.js +1 -0
  19. package/dist/network/NetworkAdapter.d.ts +2 -3
  20. package/dist/network/NetworkAdapter.d.ts.map +1 -1
  21. package/dist/network/NetworkSubsystem.d.ts +2 -3
  22. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  23. package/dist/network/NetworkSubsystem.js +9 -13
  24. package/dist/storage/StorageAdapter.d.ts +9 -5
  25. package/dist/storage/StorageAdapter.d.ts.map +1 -1
  26. package/dist/storage/StorageSubsystem.d.ts +2 -2
  27. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  28. package/dist/storage/StorageSubsystem.js +73 -25
  29. package/dist/synchronizer/CollectionSynchronizer.d.ts +1 -1
  30. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  31. package/dist/synchronizer/CollectionSynchronizer.js +5 -1
  32. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  33. package/dist/synchronizer/DocSynchronizer.js +6 -5
  34. package/dist/types.d.ts +6 -0
  35. package/dist/types.d.ts.map +1 -1
  36. package/package.json +8 -5
  37. package/src/DocCollection.ts +32 -22
  38. package/src/DocHandle.ts +121 -47
  39. package/src/DocUrl.ts +90 -0
  40. package/src/Repo.ts +5 -8
  41. package/src/helpers/tests/network-adapter-tests.ts +10 -10
  42. package/src/index.ts +7 -5
  43. package/src/network/NetworkAdapter.ts +2 -3
  44. package/src/network/NetworkSubsystem.ts +9 -14
  45. package/src/storage/StorageAdapter.ts +7 -5
  46. package/src/storage/StorageSubsystem.ts +95 -34
  47. package/src/synchronizer/CollectionSynchronizer.ts +10 -2
  48. package/src/synchronizer/DocSynchronizer.ts +7 -6
  49. package/src/types.ts +4 -1
  50. package/test/CollectionSynchronizer.test.ts +1 -1
  51. package/test/DocCollection.test.ts +3 -2
  52. package/test/DocHandle.test.ts +32 -26
  53. package/test/DocSynchronizer.test.ts +3 -2
  54. package/test/Repo.test.ts +76 -27
  55. package/test/StorageSubsystem.test.ts +10 -7
  56. package/test/helpers/DummyNetworkAdapter.ts +2 -2
  57. package/test/helpers/DummyStorageAdapter.ts +8 -4
@@ -37,7 +37,7 @@ export function runAdapterTests(_setup: SetupFn, title?: string): void {
37
37
 
38
38
  // Bob receives the document
39
39
  await eventPromise(bobRepo, "document")
40
- const bobHandle = bobRepo.find<TestDoc>(aliceHandle.documentId)
40
+ const bobHandle = bobRepo.find<TestDoc>(aliceHandle.url)
41
41
 
42
42
  // Alice changes the document
43
43
  aliceHandle.change(d => {
@@ -46,7 +46,7 @@ export function runAdapterTests(_setup: SetupFn, title?: string): void {
46
46
 
47
47
  // Bob receives the change
48
48
  await eventPromise(bobHandle, "change")
49
- assert.equal((await bobHandle.value()).foo, "bar")
49
+ assert.equal((await bobHandle.doc()).foo, "bar")
50
50
 
51
51
  // Bob changes the document
52
52
  bobHandle.change(d => {
@@ -55,7 +55,7 @@ export function runAdapterTests(_setup: SetupFn, title?: string): void {
55
55
 
56
56
  // Alice receives the change
57
57
  await eventPromise(aliceHandle, "change")
58
- assert.equal((await aliceHandle.value()).foo, "baz")
58
+ assert.equal((await aliceHandle.doc()).foo, "baz")
59
59
  }
60
60
 
61
61
  // Run the test in both directions, in case they're different types of adapters
@@ -83,12 +83,12 @@ export function runAdapterTests(_setup: SetupFn, title?: string): void {
83
83
 
84
84
  // Alice creates a document
85
85
  const aliceHandle = aliceRepo.create<TestDoc>()
86
- const documentId = aliceHandle.documentId
86
+ const docUrl = aliceHandle.url
87
87
 
88
88
  // Bob and Charlie receive the document
89
89
  await eventPromises([bobRepo, charlieRepo], "document")
90
- const bobHandle = bobRepo.find<TestDoc>(documentId)
91
- const charlieHandle = charlieRepo.find<TestDoc>(documentId)
90
+ const bobHandle = bobRepo.find<TestDoc>(docUrl)
91
+ const charlieHandle = charlieRepo.find<TestDoc>(docUrl)
92
92
 
93
93
  // Alice changes the document
94
94
  aliceHandle.change(d => {
@@ -97,8 +97,8 @@ export function runAdapterTests(_setup: SetupFn, title?: string): void {
97
97
 
98
98
  // Bob and Charlie receive the change
99
99
  await eventPromises([bobHandle, charlieHandle], "change")
100
- assert.equal((await bobHandle.value()).foo, "bar")
101
- assert.equal((await charlieHandle.value()).foo, "bar")
100
+ assert.equal((await bobHandle.doc()).foo, "bar")
101
+ assert.equal((await charlieHandle.doc()).foo, "bar")
102
102
 
103
103
  // Charlie changes the document
104
104
  charlieHandle.change(d => {
@@ -107,8 +107,8 @@ export function runAdapterTests(_setup: SetupFn, title?: string): void {
107
107
 
108
108
  // Alice and Bob receive the change
109
109
  await eventPromises([aliceHandle, bobHandle], "change")
110
- assert.equal((await bobHandle.value()).foo, "baz")
111
- assert.equal((await charlieHandle.value()).foo, "baz")
110
+ assert.equal((await bobHandle.doc()).foo, "baz")
111
+ assert.equal((await charlieHandle.doc()).foo, "baz")
112
112
 
113
113
  teardown()
114
114
  })
package/src/index.ts CHANGED
@@ -1,9 +1,6 @@
1
1
  export { DocCollection } from "./DocCollection.js"
2
2
  export { DocHandle, HandleState } from "./DocHandle.js"
3
- export type {
4
- DocHandleChangePayload,
5
- DocHandlePatchPayload,
6
- } from "./DocHandle.js"
3
+ export type { DocHandleChangePayload } from "./DocHandle.js"
7
4
  export { NetworkAdapter } from "./network/NetworkAdapter.js"
8
5
  export type {
9
6
  InboundMessagePayload,
@@ -14,7 +11,12 @@ export type {
14
11
  } from "./network/NetworkAdapter.js"
15
12
  export { NetworkSubsystem } from "./network/NetworkSubsystem.js"
16
13
  export { Repo, type SharePolicy } from "./Repo.js"
17
- export { StorageAdapter } from "./storage/StorageAdapter.js"
14
+ export { StorageAdapter, type StorageKey } from "./storage/StorageAdapter.js"
18
15
  export { StorageSubsystem } from "./storage/StorageSubsystem.js"
19
16
  export { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js"
17
+ export {
18
+ parseAutomergeUrl,
19
+ isValidAutomergeUrl,
20
+ stringifyAutomergeUrl as generateAutomergeUrl,
21
+ } from "./DocUrl.js"
20
22
  export * from "./types.js"
@@ -13,9 +13,9 @@ export abstract class NetworkAdapter extends EventEmitter<NetworkAdapterEvents>
13
13
  broadcast: boolean
14
14
  ): void
15
15
 
16
- abstract join(channelId: ChannelId): void
16
+ abstract join(): void
17
17
 
18
- abstract leave(channelId: ChannelId): void
18
+ abstract leave(): void
19
19
  }
20
20
 
21
21
  // events & payloads
@@ -34,7 +34,6 @@ export interface OpenPayload {
34
34
 
35
35
  export interface PeerCandidatePayload {
36
36
  peerId: PeerId
37
- channelId: ChannelId
38
37
  }
39
38
 
40
39
  export interface MessagePayload {
@@ -11,7 +11,6 @@ import debug from "debug"
11
11
  export class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
12
12
  #log: debug.Debugger
13
13
  #adaptersByPeer: Record<PeerId, NetworkAdapter> = {}
14
- #channels: ChannelId[]
15
14
 
16
15
  constructor(
17
16
  private adapters: NetworkAdapter[],
@@ -19,14 +18,13 @@ export class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
19
18
  ) {
20
19
  super()
21
20
  this.#log = debug(`automerge-repo:network:${this.peerId}`)
22
- this.#channels = []
23
21
  this.adapters.forEach(a => this.addNetworkAdapter(a))
24
22
  }
25
23
 
26
24
  addNetworkAdapter(networkAdapter: NetworkAdapter) {
27
25
  networkAdapter.connect(this.peerId)
28
26
 
29
- networkAdapter.on("peer-candidate", ({ peerId, channelId }) => {
27
+ networkAdapter.on("peer-candidate", ({ peerId }) => {
30
28
  this.#log(`peer candidate: ${peerId} `)
31
29
 
32
30
  // TODO: This is where authentication would happen
@@ -36,7 +34,7 @@ export class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
36
34
  this.#adaptersByPeer[peerId] = networkAdapter
37
35
  }
38
36
 
39
- this.emit("peer", { peerId, channelId })
37
+ this.emit("peer", { peerId })
40
38
  })
41
39
 
42
40
  networkAdapter.on("peer-disconnected", ({ peerId }) => {
@@ -74,7 +72,7 @@ export class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
74
72
  })
75
73
  })
76
74
 
77
- this.#channels.forEach(c => networkAdapter.join(c))
75
+ networkAdapter.join()
78
76
  }
79
77
 
80
78
  sendMessage(
@@ -99,16 +97,14 @@ export class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
99
97
  }
100
98
  }
101
99
 
102
- join(channelId: ChannelId) {
103
- this.#log(`Joining channel ${channelId}`)
104
- this.#channels.push(channelId)
105
- this.adapters.forEach(a => a.join(channelId))
100
+ join() {
101
+ this.#log(`Joining network`)
102
+ this.adapters.forEach(a => a.join())
106
103
  }
107
104
 
108
- leave(channelId: ChannelId) {
109
- this.#log(`Leaving channel ${channelId}`)
110
- this.#channels = this.#channels.filter(c => c !== channelId)
111
- this.adapters.forEach(a => a.leave(channelId))
105
+ leave() {
106
+ this.#log(`Leaving network`)
107
+ this.adapters.forEach(a => a.leave())
112
108
  }
113
109
  }
114
110
 
@@ -126,5 +122,4 @@ export interface NetworkSubsystemEvents {
126
122
 
127
123
  export interface PeerPayload {
128
124
  peerId: PeerId
129
- channelId: ChannelId
130
125
  }
@@ -4,15 +4,17 @@ export abstract class StorageAdapter {
4
4
  // [documentId, "snapshot"] or [documentId, "incremental", "0"]
5
5
  // but the storage adapter is agnostic to the meaning of the key
6
6
  // and we expect to store other data in the future such as syncstates
7
- abstract load(key: string[]): Promise<Uint8Array | undefined>
8
- abstract save(key: string[], data: Uint8Array): Promise<void>
9
- abstract remove(key: string[]): Promise<void>
7
+ abstract load(key: StorageKey): Promise<Uint8Array | undefined>
8
+ abstract save(key: StorageKey, data: Uint8Array): Promise<void>
9
+ abstract remove(key: StorageKey): Promise<void>
10
10
 
11
11
  // the keyprefix will match any key that starts with the given array
12
12
  // for example, [documentId, "incremental"] will match all incremental saves
13
13
  // or [documentId] will match all data for a given document
14
14
  // be careful! this will also match [documentId, "syncState"]!
15
15
  // (we aren't using this yet but keep it in mind.)
16
- abstract loadRange(keyPrefix: string[]): Promise<Uint8Array[]>
17
- abstract removeRange(keyPrefix: string[]): Promise<void>
16
+ abstract loadRange(keyPrefix: StorageKey): Promise<{key: StorageKey, data: Uint8Array}[]>
17
+ abstract removeRange(keyPrefix: StorageKey): Promise<void>
18
18
  }
19
+
20
+ export type StorageKey = string[]
@@ -1,8 +1,18 @@
1
1
  import * as A from "@automerge/automerge"
2
- import { DocumentId } from "../types.js"
3
- import { StorageAdapter } from "./StorageAdapter.js"
4
- import { mergeArrays } from "../helpers/mergeArrays.js"
2
+ import { StorageAdapter, StorageKey } from "./StorageAdapter.js"
5
3
  import * as sha256 from "fast-sha256"
4
+ import { type DocumentId } from "../types.js"
5
+ import { mergeArrays } from "../helpers/mergeArrays.js"
6
+
7
+ // Metadata about a chunk of data loaded from storage. This is stored on the
8
+ // StorageSubsystem so when we are compacting we know what chunks we can safely delete
9
+ type StorageChunkInfo = {
10
+ key: StorageKey
11
+ type: ChunkType
12
+ size: number
13
+ }
14
+
15
+ export type ChunkType = "snapshot" | "incremental"
6
16
 
7
17
  function keyHash(binary: Uint8Array) {
8
18
  const hash = sha256.hash(binary)
@@ -11,57 +21,87 @@ function keyHash(binary: Uint8Array) {
11
21
  return hashHex
12
22
  }
13
23
 
24
+ function headsHash(heads: A.Heads): string {
25
+ let encoder = new TextEncoder()
26
+ let headsbinary = mergeArrays(heads.map(h => encoder.encode(h)))
27
+ return keyHash(headsbinary)
28
+ }
29
+
14
30
  export class StorageSubsystem {
15
31
  #storageAdapter: StorageAdapter
32
+ #chunkInfos: Map<DocumentId, StorageChunkInfo[]> = new Map()
16
33
 
17
34
  constructor(storageAdapter: StorageAdapter) {
18
35
  this.#storageAdapter = storageAdapter
19
36
  }
20
37
 
21
- async #saveIncremental(documentId: DocumentId, doc: A.Doc<unknown>) {
38
+ async #saveIncremental(
39
+ documentId: DocumentId,
40
+ doc: A.Doc<unknown>
41
+ ): Promise<void> {
22
42
  const binary = A.saveIncremental(doc)
23
43
  if (binary && binary.length > 0) {
24
44
  const key = [documentId, "incremental", keyHash(binary)]
25
- return await this.#storageAdapter.save(key, binary)
45
+ await this.#storageAdapter.save(key, binary)
46
+ if (!this.#chunkInfos.has(documentId)) {
47
+ this.#chunkInfos.set(documentId, [])
48
+ }
49
+ this.#chunkInfos.get(documentId)!!.push({
50
+ key,
51
+ type: "incremental",
52
+ size: binary.length,
53
+ })
26
54
  } else {
27
- Promise.resolve()
55
+ return Promise.resolve()
28
56
  }
29
57
  }
30
58
 
31
- async #saveTotal(documentId: DocumentId, doc: A.Doc<unknown>) {
59
+ async #saveTotal(
60
+ documentId: DocumentId,
61
+ doc: A.Doc<unknown>,
62
+ sourceChunks: StorageChunkInfo[]
63
+ ): Promise<void> {
32
64
  const binary = A.save(doc)
65
+ const key = [documentId, "snapshot", headsHash(A.getHeads(doc))]
66
+ const oldKeys = new Set(sourceChunks.map(c => c.key))
33
67
 
34
- // TODO: this is still racy if two nodes are both writing to the store
35
- await this.#storageAdapter.save([documentId, "snapshot"], binary)
68
+ await this.#storageAdapter.save(key, binary)
36
69
 
37
- // don't start deleting the incremental keys until save is done!
38
- return this.#storageAdapter.removeRange([documentId, "incremental"])
70
+ for (const key of oldKeys) {
71
+ await this.#storageAdapter.remove(key)
72
+ }
73
+ const newChunkInfos =
74
+ this.#chunkInfos.get(documentId)?.filter(c => !oldKeys.has(c.key)) ?? []
75
+ newChunkInfos.push({ key, type: "snapshot", size: binary.length })
76
+ this.#chunkInfos.set(documentId, newChunkInfos)
39
77
  }
40
78
 
41
79
  async loadBinary(documentId: DocumentId): Promise<Uint8Array> {
42
- // it would probably be best to ensure .snapshot comes back first
43
- // prevent the race condition with saveIncremental
44
- const binaries: Uint8Array[] = await this.#storageAdapter.loadRange([
45
- documentId,
46
- ])
47
-
80
+ const loaded = await this.#storageAdapter.loadRange([documentId])
81
+ const binaries = []
82
+ const chunkInfos: StorageChunkInfo[] = []
83
+ for (const chunk of loaded) {
84
+ const chunkType = chunkTypeFromKey(chunk.key)
85
+ if (chunkType == null) {
86
+ continue
87
+ }
88
+ chunkInfos.push({
89
+ key: chunk.key,
90
+ type: chunkType,
91
+ size: chunk.data.length,
92
+ })
93
+ binaries.push(chunk.data)
94
+ }
95
+ this.#chunkInfos.set(documentId, chunkInfos)
48
96
  return mergeArrays(binaries)
49
97
  }
50
98
 
51
- async load<T>(
52
- documentId: DocumentId,
53
- prevDoc: A.Doc<T> = A.init<T>()
54
- ): Promise<A.Doc<T>> {
55
- const doc = A.loadIncremental(prevDoc, await this.loadBinary(documentId))
56
- A.saveIncremental(doc)
57
- return doc
58
- }
59
-
60
- async save(documentId: DocumentId, doc: A.Doc<unknown>) {
61
- if (this.#shouldCompact(documentId)) {
62
- return this.#saveTotal(documentId, doc)
99
+ async save(documentId: DocumentId, doc: A.Doc<unknown>): Promise<void> {
100
+ let sourceChunks = this.#chunkInfos.get(documentId) ?? []
101
+ if (this.#shouldCompact(sourceChunks)) {
102
+ this.#saveTotal(documentId, doc, sourceChunks)
63
103
  } else {
64
- return this.#saveIncremental(documentId, doc)
104
+ this.#saveIncremental(documentId, doc)
65
105
  }
66
106
  }
67
107
 
@@ -70,9 +110,30 @@ export class StorageSubsystem {
70
110
  this.#storageAdapter.removeRange([documentId, "incremental"])
71
111
  }
72
112
 
73
- // TODO: make this, you know, good.
74
- // this is probably fine
75
- #shouldCompact(documentId: DocumentId) {
76
- return Math.random() < 0.05 // this.#changeCount[documentId] >= 20
113
+ #shouldCompact(sourceChunks: StorageChunkInfo[]) {
114
+ // compact if the incremental size is greater than the snapshot size
115
+ let snapshotSize = 0
116
+ let incrementalSize = 0
117
+ for (const chunk of sourceChunks) {
118
+ if (chunk.type === "snapshot") {
119
+ snapshotSize += chunk.size
120
+ } else {
121
+ incrementalSize += chunk.size
122
+ }
123
+ }
124
+ return incrementalSize > snapshotSize
125
+ }
126
+ }
127
+
128
+ function chunkTypeFromKey(key: StorageKey): ChunkType | null {
129
+ if (key.length < 2) {
130
+ return null
131
+ }
132
+ const chunkTypeStr = key[key.length - 2]
133
+ if (chunkTypeStr === "snapshot" || chunkTypeStr === "incremental") {
134
+ const chunkType: ChunkType = chunkTypeStr
135
+ return chunkType
136
+ } else {
137
+ return null
77
138
  }
78
139
  }
@@ -1,6 +1,11 @@
1
1
  import { DocCollection } from "../DocCollection.js"
2
2
  import { DocHandle } from "../DocHandle.js"
3
- import { ChannelId, DocumentId, PeerId } from "../types.js"
3
+ import {
4
+ documentIdToBinary,
5
+ binaryToDocumentId,
6
+ stringifyAutomergeUrl,
7
+ } from "../DocUrl.js"
8
+ import { ChannelId, BinaryDocumentId, PeerId, DocumentId } from "../types.js"
4
9
  import { DocSynchronizer } from "./DocSynchronizer.js"
5
10
  import { Synchronizer } from "./Synchronizer.js"
6
11
 
@@ -22,7 +27,7 @@ export class CollectionSynchronizer extends Synchronizer {
22
27
  /** Returns a synchronizer for the given document, creating one if it doesn't already exist. */
23
28
  #fetchDocSynchronizer(documentId: DocumentId) {
24
29
  if (!this.#docSynchronizers[documentId]) {
25
- const handle = this.repo.find(documentId)
30
+ const handle = this.repo.find(stringifyAutomergeUrl({ documentId }))
26
31
  this.#docSynchronizers[documentId] = this.#initDocSynchronizer(handle)
27
32
  }
28
33
  return this.#docSynchronizers[documentId]
@@ -60,6 +65,9 @@ export class CollectionSynchronizer extends Synchronizer {
60
65
  log(`onSyncMessage: ${peerId}, ${channelId}, ${message.byteLength}bytes`)
61
66
 
62
67
  const documentId = channelId as unknown as DocumentId
68
+ if (!documentId) {
69
+ throw new Error("received a message with an invalid documentId")
70
+ }
63
71
  const docSynchronizer = await this.#fetchDocSynchronizer(documentId)
64
72
 
65
73
  await docSynchronizer.receiveSyncMessage(peerId, channelId, message)
@@ -1,5 +1,5 @@
1
1
  import * as A from "@automerge/automerge"
2
- import { DocHandle } from "../DocHandle.js"
2
+ import { DocHandle, READY, REQUESTING } from "../DocHandle.js"
3
3
  import { ChannelId, PeerId } from "../types.js"
4
4
  import { Synchronizer } from "./Synchronizer.js"
5
5
 
@@ -33,7 +33,7 @@ export class DocSynchronizer extends Synchronizer {
33
33
 
34
34
  // Process pending sync messages immediately after the handle becomes ready.
35
35
  void (async () => {
36
- await handle.loadAttemptedValue()
36
+ await handle.doc([READY, REQUESTING])
37
37
  this.#processAllPendingSyncMessages()
38
38
  })()
39
39
  }
@@ -46,7 +46,7 @@ export class DocSynchronizer extends Synchronizer {
46
46
 
47
47
  async #syncWithPeers() {
48
48
  this.#log(`syncWithPeers`)
49
- const doc = await this.handle.value()
49
+ const doc = await this.handle.doc()
50
50
  this.#peers.forEach(peerId => this.#sendSyncMessage(peerId, doc))
51
51
  }
52
52
 
@@ -78,6 +78,7 @@ export class DocSynchronizer extends Synchronizer {
78
78
  this.#logMessage(`sendSyncMessage 🡒 ${peerId}`, message)
79
79
 
80
80
  const channelId = this.handle.documentId as string as ChannelId
81
+
81
82
  this.emit("message", {
82
83
  targetId: peerId,
83
84
  channelId,
@@ -120,7 +121,7 @@ export class DocSynchronizer extends Synchronizer {
120
121
 
121
122
  // At this point if we don't have anything in our storage, we need to use an empty doc to sync
122
123
  // with; but we don't want to surface that state to the front end
123
- void this.handle.loadAttemptedValue().then(doc => {
124
+ void this.handle.doc([READY, REQUESTING]).then(doc => {
124
125
  // HACK: if we have a sync state already, we round-trip it through the encoding system to make
125
126
  // sure state is preserved. This prevents an infinite loop caused by failed attempts to send
126
127
  // messages during disconnection.
@@ -143,11 +144,11 @@ export class DocSynchronizer extends Synchronizer {
143
144
  channelId: ChannelId,
144
145
  message: Uint8Array
145
146
  ) {
146
- if ((channelId as string) !== (this.documentId as string))
147
+ if ((channelId as string) !== (this.handle.documentId as string))
147
148
  throw new Error(`channelId doesn't match documentId`)
148
149
 
149
150
  // We need to block receiving the syncMessages until we've checked local storage
150
- if (!this.handle.isReadyOrRequesting()) {
151
+ if (!this.handle.inState([READY, REQUESTING])) {
151
152
  this.#pendingSyncMessages.push({ peerId, message })
152
153
  return
153
154
  }
package/src/types.ts CHANGED
@@ -1,3 +1,6 @@
1
- export type DocumentId = string & { __documentId: true }
1
+ export type DocumentId = string & { __documentId: true } // for logging
2
+ export type AutomergeUrl = string & { __documentUrl: true } // for opening / linking
3
+ export type BinaryDocumentId = Uint8Array & { __binaryDocumentId: true } // for storing / syncing
4
+
2
5
  export type PeerId = string & { __peerId: false }
3
6
  export type ChannelId = string & { __channelId: false }
@@ -1,5 +1,5 @@
1
1
  import { CollectionSynchronizer } from "../src/synchronizer/CollectionSynchronizer.js"
2
- import { ChannelId, DocCollection, DocumentId, PeerId } from "../src"
2
+ import { ChannelId, DocCollection, BinaryDocumentId, PeerId } from "../src"
3
3
  import assert from "assert"
4
4
  import { beforeEach } from "mocha"
5
5
  import { MessagePayload } from "../src/network/NetworkAdapter.js"
@@ -1,8 +1,9 @@
1
1
  import assert from "assert"
2
- import { DocCollection, DocumentId } from "../src"
2
+ import { DocCollection, BinaryDocumentId } from "../src"
3
3
  import { TestDoc } from "./types.js"
4
+ import { generateAutomergeUrl, stringifyAutomergeUrl } from "../src/DocUrl"
4
5
 
5
- const MISSING_DOCID = "non-existent-docID" as DocumentId
6
+ const MISSING_DOCID = generateAutomergeUrl()
6
7
 
7
8
  describe("DocCollection", () => {
8
9
  it("can create documents which are ready to go", async () => {