@automerge/automerge-repo 1.0.0-alpha.4 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/.eslintrc +2 -2
  2. package/dist/DocHandle.d.ts +5 -1
  3. package/dist/DocHandle.d.ts.map +1 -1
  4. package/dist/DocHandle.js +11 -3
  5. package/dist/Repo.d.ts +38 -4
  6. package/dist/Repo.d.ts.map +1 -1
  7. package/dist/Repo.js +95 -3
  8. package/dist/index.d.ts +0 -1
  9. package/dist/index.d.ts.map +1 -1
  10. package/dist/index.js +0 -1
  11. package/dist/network/NetworkAdapter.d.ts +2 -3
  12. package/dist/network/NetworkAdapter.d.ts.map +1 -1
  13. package/dist/network/NetworkSubsystem.d.ts +1 -3
  14. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  15. package/dist/network/NetworkSubsystem.js +0 -9
  16. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  17. package/dist/storage/StorageSubsystem.js +8 -2
  18. package/dist/synchronizer/CollectionSynchronizer.d.ts +2 -2
  19. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  20. package/package.json +3 -3
  21. package/src/DocHandle.ts +13 -5
  22. package/src/Repo.ts +130 -4
  23. package/src/index.ts +0 -1
  24. package/src/network/NetworkAdapter.ts +2 -4
  25. package/src/network/NetworkSubsystem.ts +14 -23
  26. package/src/storage/StorageSubsystem.ts +9 -2
  27. package/src/synchronizer/CollectionSynchronizer.ts +2 -2
  28. package/test/CollectionSynchronizer.test.ts +12 -11
  29. package/test/DocHandle.test.ts +2 -1
  30. package/test/Repo.test.ts +26 -5
  31. package/test/helpers/DummyNetworkAdapter.ts +2 -3
  32. package/test/helpers/generate-large-object.ts +13 -0
  33. package/dist/DocCollection.d.ts +0 -46
  34. package/dist/DocCollection.d.ts.map +0 -1
  35. package/dist/DocCollection.js +0 -102
  36. package/src/DocCollection.ts +0 -144
  37. package/test/DocCollection.test.ts +0 -20
package/src/Repo.ts CHANGED
@@ -1,18 +1,26 @@
1
1
  import debug from "debug"
2
- import { DocCollection } from "./DocCollection.js"
3
2
  import { NetworkAdapter } from "./network/NetworkAdapter.js"
4
3
  import { NetworkSubsystem } from "./network/NetworkSubsystem.js"
5
4
  import { StorageAdapter } from "./storage/StorageAdapter.js"
6
5
  import { StorageSubsystem } from "./storage/StorageSubsystem.js"
7
6
  import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js"
8
- import { DocumentId, PeerId } from "./types.js"
7
+ import { type AutomergeUrl, DocumentId, PeerId } from "./types.js"
8
+ import { v4 as uuid } from "uuid"
9
+ import { parseAutomergeUrl, generateAutomergeUrl, isValidAutomergeUrl } from "./DocUrl.js"
9
10
 
10
- /** A Repo is a DocCollection with networking, syncing, and storage capabilities. */
11
- export class Repo extends DocCollection {
11
+ import { DocHandle } from "./DocHandle.js"
12
+ import { EventEmitter } from "eventemitter3"
13
+
14
+ /** A Repo is a collection of documents with networking, syncing, and storage capabilities. */
15
+ export class Repo extends EventEmitter<DocCollectionEvents> {
12
16
  #log: debug.Debugger
13
17
 
14
18
  networkSubsystem: NetworkSubsystem
15
19
  storageSubsystem?: StorageSubsystem
20
+ #handleCache: Record<DocumentId, DocHandle<any>> = {}
21
+
22
+ /** By default, we share generously with all peers. */
23
+ sharePolicy: SharePolicy = async () => true
16
24
 
17
25
  constructor({ storage, network, peerId, sharePolicy }: RepoConfig) {
18
26
  super()
@@ -111,6 +119,108 @@ export class Repo extends DocCollection {
111
119
  await synchronizer.receiveMessage(msg)
112
120
  })
113
121
  }
122
+
123
+ /** Returns an existing handle if we have it; creates one otherwise. */
124
+ #getHandle<T>(
125
+ /** The documentId of the handle to look up or create */
126
+ documentId: DocumentId,
127
+
128
+ /** If we know we're creating a new document, specify this so we can have access to it immediately */
129
+ isNew: boolean
130
+ ) {
131
+ // If we have the handle cached, return it
132
+ if (this.#handleCache[documentId]) return this.#handleCache[documentId]
133
+
134
+ // If not, create a new handle, cache it, and return it
135
+ if (!documentId) throw new Error(`Invalid documentId ${documentId}`)
136
+ const handle = new DocHandle<T>(documentId, { isNew })
137
+ this.#handleCache[documentId] = handle
138
+ return handle
139
+ }
140
+
141
+ /** Returns all the handles we have cached. */
142
+ get handles() {
143
+ return this.#handleCache
144
+ }
145
+
146
+ /**
147
+ * Creates a new document and returns a handle to it. The initial value of the document is
148
+ * an empty object `{}`. Its documentId is generated by the system. we emit a `document` event
149
+ * to advertise interest in the document.
150
+ */
151
+ create<T>(): DocHandle<T> {
152
+ // TODO:
153
+ // either
154
+ // - pass an initial value and do something like this to ensure that you get a valid initial value
155
+
156
+ // const myInitialValue = {
157
+ // tasks: [],
158
+ // filter: "all",
159
+ //
160
+ // const guaranteeInitialValue = (doc: any) => {
161
+ // if (!doc.tasks) doc.tasks = []
162
+ // if (!doc.filter) doc.filter = "all"
163
+
164
+ // return { ...myInitialValue, ...doc }
165
+ // }
166
+
167
+ // or
168
+ // - pass a "reify" function that takes a `<any>` and returns `<T>`
169
+
170
+ // Generate a new UUID and store it in the buffer
171
+ const { documentId } = parseAutomergeUrl(generateAutomergeUrl())
172
+ const handle = this.#getHandle<T>(documentId, true) as DocHandle<T>
173
+ this.emit("document", { handle, isNew: true })
174
+ return handle
175
+ }
176
+
177
+ /**
178
+ * Retrieves a document by id. It gets data from the local system, but also emits a `document`
179
+ * event to advertise interest in the document.
180
+ */
181
+ find<T>(
182
+ /** The documentId of the handle to retrieve */
183
+ automergeUrl: AutomergeUrl
184
+ ): DocHandle<T> {
185
+ if (!isValidAutomergeUrl(automergeUrl)) {
186
+ throw new Error(`Invalid AutomergeUrl: '${automergeUrl}'`)
187
+ }
188
+
189
+ const { documentId } = parseAutomergeUrl(automergeUrl)
190
+ // If we have the handle cached, return it
191
+ if (this.#handleCache[documentId]) {
192
+ if (this.#handleCache[documentId].isUnavailable()) {
193
+ // this ensures that the event fires after the handle has been returned
194
+ setTimeout(() => {
195
+ this.#handleCache[documentId].emit("unavailable", {
196
+ handle: this.#handleCache[documentId],
197
+ })
198
+ })
199
+ }
200
+ return this.#handleCache[documentId]
201
+ }
202
+
203
+ const handle = this.#getHandle<T>(documentId, false) as DocHandle<T>
204
+ this.emit("document", { handle, isNew: false })
205
+ return handle
206
+ }
207
+
208
+ delete(
209
+ /** The documentId of the handle to delete */
210
+ id: DocumentId | AutomergeUrl
211
+ ) {
212
+ if (isValidAutomergeUrl(id)) {
213
+ ;({ documentId: id } = parseAutomergeUrl(id))
214
+ }
215
+
216
+ const handle = this.#getHandle(id, false)
217
+ handle.delete()
218
+
219
+ delete this.#handleCache[id]
220
+ this.emit("delete-document", {
221
+ documentId: id,
222
+ })
223
+ }
114
224
  }
115
225
 
116
226
  export interface RepoConfig {
@@ -134,3 +244,19 @@ export type SharePolicy = (
134
244
  peerId: PeerId,
135
245
  documentId?: DocumentId
136
246
  ) => Promise<boolean>
247
+
248
+ // events & payloads
249
+ interface DocCollectionEvents {
250
+ document: (arg: DocumentPayload) => void
251
+ "delete-document": (arg: DeleteDocumentPayload) => void
252
+ "unavailable-document": (arg: DeleteDocumentPayload) => void
253
+ }
254
+
255
+ interface DocumentPayload {
256
+ handle: DocHandle<any>
257
+ isNew: boolean
258
+ }
259
+
260
+ interface DeleteDocumentPayload {
261
+ documentId: DocumentId
262
+ }
package/src/index.ts CHANGED
@@ -1,4 +1,3 @@
1
- export { DocCollection } from "./DocCollection.js"
2
1
  export { DocHandle, HandleState } from "./DocHandle.js"
3
2
  export type { DocHandleChangePayload } from "./DocHandle.js"
4
3
  export { NetworkAdapter } from "./network/NetworkAdapter.js"
@@ -5,13 +5,11 @@ import { Message } from "./messages.js"
5
5
  export abstract class NetworkAdapter extends EventEmitter<NetworkAdapterEvents> {
6
6
  peerId?: PeerId // hmmm, maybe not
7
7
 
8
- abstract connect(url?: string): void
8
+ abstract connect(peerId: PeerId): void
9
9
 
10
10
  abstract send(message: Message): void
11
11
 
12
- abstract join(): void
13
-
14
- abstract leave(): void
12
+ abstract disconnect(): void
15
13
  }
16
14
 
17
15
  // events & payloads
@@ -25,13 +25,10 @@ export class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
25
25
  #count = 0
26
26
  #sessionId: SessionId = Math.random().toString(36).slice(2) as SessionId
27
27
  #ephemeralSessionCounts: Record<EphemeralMessageSource, number> = {}
28
- #readyAdapterCount = 0
28
+ #readyAdapterCount = 0
29
29
  #adapters: NetworkAdapter[] = []
30
30
 
31
- constructor(
32
- adapters: NetworkAdapter[],
33
- public peerId = randomPeerId()
34
- ) {
31
+ constructor(adapters: NetworkAdapter[], public peerId = randomPeerId()) {
35
32
  super()
36
33
  this.#log = debug(`automerge-repo:network:${this.peerId}`)
37
34
  adapters.forEach(a => this.addNetworkAdapter(a))
@@ -41,7 +38,12 @@ export class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
41
38
  this.#adapters.push(networkAdapter)
42
39
  networkAdapter.once("ready", () => {
43
40
  this.#readyAdapterCount++
44
- this.#log("Adapters ready: ", this.#readyAdapterCount, "/", this.#adapters.length)
41
+ this.#log(
42
+ "Adapters ready: ",
43
+ this.#readyAdapterCount,
44
+ "/",
45
+ this.#adapters.length
46
+ )
45
47
  if (this.#readyAdapterCount === this.#adapters.length) {
46
48
  this.emit("ready")
47
49
  }
@@ -100,7 +102,6 @@ export class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
100
102
  })
101
103
 
102
104
  networkAdapter.connect(this.peerId)
103
- networkAdapter.join()
104
105
  }
105
106
 
106
107
  send(message: MessageContents) {
@@ -116,11 +117,11 @@ export class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
116
117
  "count" in message
117
118
  ? message
118
119
  : {
119
- ...message,
120
- count: ++this.#count,
121
- sessionId: this.#sessionId,
122
- senderId: this.peerId,
123
- }
120
+ ...message,
121
+ count: ++this.#count,
122
+ sessionId: this.#sessionId,
123
+ senderId: this.peerId,
124
+ }
124
125
  this.#log("Ephemeral message", outbound)
125
126
  peer.send(outbound)
126
127
  } else {
@@ -130,16 +131,6 @@ export class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
130
131
  }
131
132
  }
132
133
 
133
- join() {
134
- this.#log(`Joining network`)
135
- this.#adapters.forEach(a => a.join())
136
- }
137
-
138
- leave() {
139
- this.#log(`Leaving network`)
140
- this.#adapters.forEach(a => a.leave())
141
- }
142
-
143
134
  isReady = () => {
144
135
  return this.#readyAdapterCount === this.#adapters.length
145
136
  }
@@ -167,7 +158,7 @@ export interface NetworkSubsystemEvents {
167
158
  peer: (payload: PeerPayload) => void
168
159
  "peer-disconnected": (payload: PeerDisconnectedPayload) => void
169
160
  message: (payload: Message) => void
170
- "ready": () => void
161
+ ready: () => void
171
162
  }
172
163
 
173
164
  export interface PeerPayload {
@@ -35,6 +35,8 @@ export class StorageSubsystem {
35
35
  #storedHeads: Map<DocumentId, A.Heads> = new Map()
36
36
  #log = debug(`automerge-repo:storage-subsystem`)
37
37
 
38
+ #snapshotting = false
39
+
38
40
  constructor(storageAdapter: StorageAdapter) {
39
41
  this.#storageAdapter = storageAdapter
40
42
  }
@@ -67,6 +69,7 @@ export class StorageSubsystem {
67
69
  doc: A.Doc<unknown>,
68
70
  sourceChunks: StorageChunkInfo[]
69
71
  ): Promise<void> {
72
+ this.#snapshotting = true
70
73
  const binary = A.save(doc)
71
74
  const snapshotHash = headsHash(A.getHeads(doc))
72
75
  const key = [documentId, "snapshot", snapshotHash]
@@ -86,6 +89,7 @@ export class StorageSubsystem {
86
89
  this.#chunkInfos.get(documentId)?.filter(c => !oldKeys.has(c.key)) ?? []
87
90
  newChunkInfos.push({ key, type: "snapshot", size: binary.length })
88
91
  this.#chunkInfos.set(documentId, newChunkInfos)
92
+ this.#snapshotting = false
89
93
  }
90
94
 
91
95
  async loadDoc(documentId: DocumentId): Promise<A.Doc<unknown> | null> {
@@ -128,7 +132,7 @@ export class StorageSubsystem {
128
132
  }
129
133
 
130
134
  async remove(documentId: DocumentId) {
131
- this.#storageAdapter.remove([documentId, "snapshot"])
135
+ this.#storageAdapter.removeRange([documentId, "snapshot"])
132
136
  this.#storageAdapter.removeRange([documentId, "incremental"])
133
137
  }
134
138
 
@@ -147,6 +151,9 @@ export class StorageSubsystem {
147
151
  }
148
152
 
149
153
  #shouldCompact(sourceChunks: StorageChunkInfo[]) {
154
+ if (this.#snapshotting) {
155
+ return false
156
+ }
150
157
  // compact if the incremental size is greater than the snapshot size
151
158
  let snapshotSize = 0
152
159
  let incrementalSize = 0
@@ -157,7 +164,7 @@ export class StorageSubsystem {
157
164
  incrementalSize += chunk.size
158
165
  }
159
166
  }
160
- return incrementalSize > snapshotSize
167
+ return incrementalSize >= snapshotSize
161
168
  }
162
169
  }
163
170
 
@@ -1,4 +1,4 @@
1
- import { DocCollection } from "../DocCollection.js"
1
+ import { Repo } from "../Repo.js"
2
2
  import { DocHandle } from "../DocHandle.js"
3
3
  import {
4
4
  documentIdToBinary,
@@ -29,7 +29,7 @@ export class CollectionSynchronizer extends Synchronizer {
29
29
  /** Used to determine if the document is know to the Collection and a synchronizer exists or is being set up */
30
30
  #docSetUp: Record<DocumentId, boolean> = {}
31
31
 
32
- constructor(private repo: DocCollection) {
32
+ constructor(private repo: Repo) {
33
33
  super()
34
34
  }
35
35
 
@@ -1,15 +1,17 @@
1
1
  import assert from "assert"
2
2
  import { beforeEach } from "mocha"
3
- import { DocCollection, PeerId } from "../src/index.js"
3
+ import { PeerId, Repo } from "../src/index.js"
4
4
  import { CollectionSynchronizer } from "../src/synchronizer/CollectionSynchronizer.js"
5
5
 
6
6
  describe("CollectionSynchronizer", () => {
7
- let collection: DocCollection
7
+ let repo: Repo
8
8
  let synchronizer: CollectionSynchronizer
9
9
 
10
10
  beforeEach(() => {
11
- collection = new DocCollection()
12
- synchronizer = new CollectionSynchronizer(collection)
11
+ repo = new Repo({
12
+ network: [],
13
+ })
14
+ synchronizer = new CollectionSynchronizer(repo)
13
15
  })
14
16
 
15
17
  it("is not null", async () => {
@@ -17,7 +19,7 @@ describe("CollectionSynchronizer", () => {
17
19
  })
18
20
 
19
21
  it("starts synchronizing a document to peers when added", done => {
20
- const handle = collection.create()
22
+ const handle = repo.create()
21
23
  synchronizer.addPeer("peer1" as PeerId)
22
24
 
23
25
  synchronizer.once("message", event => {
@@ -30,7 +32,7 @@ describe("CollectionSynchronizer", () => {
30
32
  })
31
33
 
32
34
  it("starts synchronizing existing documents when a peer is added", done => {
33
- const handle = collection.create()
35
+ const handle = repo.create()
34
36
  synchronizer.addDocument(handle.documentId)
35
37
  synchronizer.once("message", event => {
36
38
  assert(event.targetId === "peer1")
@@ -41,9 +43,9 @@ describe("CollectionSynchronizer", () => {
41
43
  })
42
44
 
43
45
  it("should not synchronize to a peer which is excluded from the share policy", done => {
44
- const handle = collection.create()
46
+ const handle = repo.create()
45
47
 
46
- collection.sharePolicy = async (peerId: PeerId) => peerId !== "peer1"
48
+ repo.sharePolicy = async (peerId: PeerId) => peerId !== "peer1"
47
49
 
48
50
  synchronizer.addDocument(handle.documentId)
49
51
  synchronizer.once("message", () => {
@@ -55,9 +57,8 @@ describe("CollectionSynchronizer", () => {
55
57
  })
56
58
 
57
59
  it("should not synchronize a document which is excluded from the share policy", done => {
58
- const handle = collection.create()
59
- collection.sharePolicy = async (_, documentId) =>
60
- documentId !== handle.documentId
60
+ const handle = repo.create()
61
+ repo.sharePolicy = async (_, documentId) => documentId !== handle.documentId
61
62
 
62
63
  synchronizer.addPeer("peer2" as PeerId)
63
64
 
@@ -295,10 +295,11 @@ describe("DocHandle", () => {
295
295
  })
296
296
 
297
297
  let wasBar = false
298
- handle.changeAt(headsBefore, doc => {
298
+ let newHeads = handle.changeAt(headsBefore, doc => {
299
299
  wasBar = doc.foo === "bar"
300
300
  doc.foo = "baz"
301
301
  })
302
+ assert(newHeads && newHeads.length > 0, "should have new heads")
302
303
 
303
304
  assert(wasBar, "foo should have been bar as we changed at the old heads")
304
305
  })
package/test/Repo.test.ts CHANGED
@@ -18,6 +18,10 @@ import { getRandomItem } from "./helpers/getRandomItem.js"
18
18
  import { TestDoc } from "./types.js"
19
19
  import { generateAutomergeUrl, stringifyAutomergeUrl } from "../src/DocUrl.js"
20
20
  import { READY, AWAITING_NETWORK } from "../src/DocHandle.js"
21
+ import {
22
+ generateLargeObject,
23
+ LargeObject,
24
+ } from "./helpers/generate-large-object.js"
21
25
 
22
26
  describe("Repo", () => {
23
27
  describe("single repo", () => {
@@ -43,6 +47,7 @@ describe("Repo", () => {
43
47
  const { repo } = setup()
44
48
  const handle = repo.create()
45
49
  assert.notEqual(handle.documentId, null)
50
+ assert.equal(handle.isReady(), true)
46
51
  })
47
52
 
48
53
  it("can change a document", async () => {
@@ -98,7 +103,6 @@ describe("Repo", () => {
98
103
 
99
104
  networkAdapter.emit("ready", { network: networkAdapter })
100
105
  await eventPromise(handle, "unavailable")
101
-
102
106
  })
103
107
 
104
108
  it("can find a created document", async () => {
@@ -130,7 +134,6 @@ describe("Repo", () => {
130
134
  const bobHandle = repo2.find<TestDoc>(handle.url)
131
135
  await bobHandle.whenReady()
132
136
  assert.equal(bobHandle.isReady(), true)
133
-
134
137
  })
135
138
 
136
139
  it("saves the document when changed and can find it again", async () => {
@@ -162,6 +165,7 @@ describe("Repo", () => {
162
165
  handle.change(d => {
163
166
  d.foo = "bar"
164
167
  })
168
+ // we now have a snapshot and an incremental change in storage
165
169
  assert.equal(handle.isReady(), true)
166
170
  await handle.doc()
167
171
  repo.delete(handle.documentId)
@@ -274,6 +278,21 @@ describe("Repo", () => {
274
278
  assert(storage.keys().length !== 0)
275
279
  }
276
280
  })
281
+
282
+ it("doesn't create multiple snapshots in storage when a series of large changes are made in succession", async () => {
283
+ const { repo, storageAdapter } = setup()
284
+ const handle = repo.create<{ objects: LargeObject[] }>()
285
+
286
+ for (let i = 0; i < 5; i++) {
287
+ handle.change(d => {
288
+ d.objects = []
289
+ d.objects.push(generateLargeObject(100))
290
+ })
291
+ }
292
+
293
+ const storageKeyTypes = storageAdapter.keys().map(k => k.split(".")[1])
294
+ assert(storageKeyTypes.filter(k => k === "snapshot").length === 1)
295
+ })
277
296
  })
278
297
 
279
298
  describe("sync", async () => {
@@ -331,7 +350,9 @@ describe("Repo", () => {
331
350
  }
332
351
 
333
352
  function doConnectAlice() {
334
- aliceRepo.networkSubsystem.addNetworkAdapter(new MessageChannelNetworkAdapter(aliceToBob))
353
+ aliceRepo.networkSubsystem.addNetworkAdapter(
354
+ new MessageChannelNetworkAdapter(aliceToBob)
355
+ )
335
356
  //bobRepo.networkSubsystem.addNetworkAdapter(new MessageChannelNetworkAdapter(bobToAlice))
336
357
  }
337
358
 
@@ -580,9 +601,9 @@ describe("Repo", () => {
580
601
  const doc =
581
602
  Math.random() < 0.5
582
603
  ? // heads, create a new doc
583
- repo.create<TestDoc>()
604
+ repo.create<TestDoc>()
584
605
  : // tails, pick a random doc
585
- (getRandomItem(docs) as DocHandle<TestDoc>)
606
+ (getRandomItem(docs) as DocHandle<TestDoc>)
586
607
 
587
608
  // make sure the doc is ready
588
609
  if (!doc.isReady()) {
@@ -6,12 +6,11 @@ export class DummyNetworkAdapter extends NetworkAdapter {
6
6
  super()
7
7
  this.#startReady = startReady
8
8
  }
9
- send() {}
9
+ send() { }
10
10
  connect(_: string) {
11
11
  if (this.#startReady) {
12
12
  this.emit("ready", { network: this })
13
13
  }
14
14
  }
15
- join() {}
16
- leave() {}
15
+ disconnect() { }
17
16
  }
@@ -0,0 +1,13 @@
1
+ export type LargeObject = { [key: string]: number }
2
+
3
+ export function generateLargeObject(size: number): LargeObject {
4
+ const largeObject: LargeObject = {}
5
+
6
+ for (let i = 0; i < size; i++) {
7
+ const key = `key${i}`
8
+ const value = Math.random()
9
+ largeObject[key] = value
10
+ }
11
+
12
+ return largeObject
13
+ }
@@ -1,46 +0,0 @@
1
- import { EventEmitter } from "eventemitter3";
2
- import { DocHandle } from "./DocHandle.js";
3
- import { DocumentId, AutomergeUrl } from "./types.js";
4
- import { type SharePolicy } from "./Repo.js";
5
- /**
6
- * A DocCollection is a collection of DocHandles. It supports creating new documents and finding
7
- * documents by ID.
8
- * */
9
- export declare class DocCollection extends EventEmitter<DocCollectionEvents> {
10
- #private;
11
- /** By default, we share generously with all peers. */
12
- sharePolicy: SharePolicy;
13
- constructor();
14
- /** Returns all the handles we have cached. */
15
- get handles(): Record<DocumentId, DocHandle<any>>;
16
- /**
17
- * Creates a new document and returns a handle to it. The initial value of the document is
18
- * an empty object `{}`. Its documentId is generated by the system. we emit a `document` event
19
- * to advertise interest in the document.
20
- */
21
- create<T>(): DocHandle<T>;
22
- /**
23
- * Retrieves a document by id. It gets data from the local system, but also emits a `document`
24
- * event to advertise interest in the document.
25
- */
26
- find<T>(
27
- /** The documentId of the handle to retrieve */
28
- automergeUrl: AutomergeUrl): DocHandle<T>;
29
- delete(
30
- /** The documentId of the handle to delete */
31
- id: DocumentId | AutomergeUrl): void;
32
- }
33
- interface DocCollectionEvents {
34
- document: (arg: DocumentPayload) => void;
35
- "delete-document": (arg: DeleteDocumentPayload) => void;
36
- "unavailable-document": (arg: DeleteDocumentPayload) => void;
37
- }
38
- interface DocumentPayload {
39
- handle: DocHandle<any>;
40
- isNew: boolean;
41
- }
42
- interface DeleteDocumentPayload {
43
- documentId: DocumentId;
44
- }
45
- export {};
46
- //# sourceMappingURL=DocCollection.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"DocCollection.d.ts","sourceRoot":"","sources":["../src/DocCollection.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAC5C,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAA;AAC1C,OAAO,EAAE,UAAU,EAAyB,YAAY,EAAE,MAAM,YAAY,CAAA;AAC5E,OAAO,EAAE,KAAK,WAAW,EAAE,MAAM,WAAW,CAAA;AAS5C;;;KAGK;AACL,qBAAa,aAAc,SAAQ,YAAY,CAAC,mBAAmB,CAAC;;IAGlE,sDAAsD;IACtD,WAAW,EAAE,WAAW,CAAmB;;IAwB3C,8CAA8C;IAC9C,IAAI,OAAO,uCAEV;IAED;;;;OAIG;IACH,MAAM,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC,CAAC;IA0BzB;;;OAGG;IACH,IAAI,CAAC,CAAC;IACJ,+CAA+C;IAC/C,YAAY,EAAE,YAAY,GACzB,SAAS,CAAC,CAAC,CAAC;IAwBf,MAAM;IACJ,6CAA6C;IAC7C,EAAE,EAAE,UAAU,GAAG,YAAY;CAchC;AAGD,UAAU,mBAAmB;IAC3B,QAAQ,EAAE,CAAC,GAAG,EAAE,eAAe,KAAK,IAAI,CAAA;IACxC,iBAAiB,EAAE,CAAC,GAAG,EAAE,qBAAqB,KAAK,IAAI,CAAA;IACvD,sBAAsB,EAAE,CAAC,GAAG,EAAE,qBAAqB,KAAK,IAAI,CAAA;CAC7D;AAED,UAAU,eAAe;IACvB,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC,CAAA;IACtB,KAAK,EAAE,OAAO,CAAA;CACf;AAED,UAAU,qBAAqB;IAC7B,UAAU,EAAE,UAAU,CAAA;CACvB"}
@@ -1,102 +0,0 @@
1
- import { EventEmitter } from "eventemitter3";
2
- import { DocHandle } from "./DocHandle.js";
3
- import { generateAutomergeUrl, isValidAutomergeUrl, parseAutomergeUrl, } from "./DocUrl.js";
4
- /**
5
- * A DocCollection is a collection of DocHandles. It supports creating new documents and finding
6
- * documents by ID.
7
- * */
8
- export class DocCollection extends EventEmitter {
9
- #handleCache = {};
10
- /** By default, we share generously with all peers. */
11
- sharePolicy = async () => true;
12
- constructor() {
13
- super();
14
- }
15
- /** Returns an existing handle if we have it; creates one otherwise. */
16
- #getHandle(
17
- /** The documentId of the handle to look up or create */
18
- documentId,
19
- /** If we know we're creating a new document, specify this so we can have access to it immediately */
20
- isNew) {
21
- // If we have the handle cached, return it
22
- if (this.#handleCache[documentId])
23
- return this.#handleCache[documentId];
24
- // If not, create a new handle, cache it, and return it
25
- if (!documentId)
26
- throw new Error(`Invalid documentId ${documentId}`);
27
- const handle = new DocHandle(documentId, { isNew });
28
- this.#handleCache[documentId] = handle;
29
- return handle;
30
- }
31
- /** Returns all the handles we have cached. */
32
- get handles() {
33
- return this.#handleCache;
34
- }
35
- /**
36
- * Creates a new document and returns a handle to it. The initial value of the document is
37
- * an empty object `{}`. Its documentId is generated by the system. we emit a `document` event
38
- * to advertise interest in the document.
39
- */
40
- create() {
41
- // TODO:
42
- // either
43
- // - pass an initial value and do something like this to ensure that you get a valid initial value
44
- // const myInitialValue = {
45
- // tasks: [],
46
- // filter: "all",
47
- //
48
- // const guaranteeInitialValue = (doc: any) => {
49
- // if (!doc.tasks) doc.tasks = []
50
- // if (!doc.filter) doc.filter = "all"
51
- // return { ...myInitialValue, ...doc }
52
- // }
53
- // or
54
- // - pass a "reify" function that takes a `<any>` and returns `<T>`
55
- // Generate a new UUID and store it in the buffer
56
- const { documentId } = parseAutomergeUrl(generateAutomergeUrl());
57
- const handle = this.#getHandle(documentId, true);
58
- this.emit("document", { handle, isNew: true });
59
- return handle;
60
- }
61
- /**
62
- * Retrieves a document by id. It gets data from the local system, but also emits a `document`
63
- * event to advertise interest in the document.
64
- */
65
- find(
66
- /** The documentId of the handle to retrieve */
67
- automergeUrl) {
68
- if (!isValidAutomergeUrl(automergeUrl)) {
69
- throw new Error(`Invalid AutomergeUrl: '${automergeUrl}'`);
70
- }
71
- const { documentId } = parseAutomergeUrl(automergeUrl);
72
- // If we have the handle cached, return it
73
- if (this.#handleCache[documentId]) {
74
- if (this.#handleCache[documentId].isUnavailable()) {
75
- // this ensures that the event fires after the handle has been returned
76
- setTimeout(() => {
77
- this.#handleCache[documentId].emit("unavailable", {
78
- handle: this.#handleCache[documentId],
79
- });
80
- });
81
- }
82
- return this.#handleCache[documentId];
83
- }
84
- const handle = this.#getHandle(documentId, false);
85
- this.emit("document", { handle, isNew: false });
86
- return handle;
87
- }
88
- delete(
89
- /** The documentId of the handle to delete */
90
- id) {
91
- if (isValidAutomergeUrl(id)) {
92
- ;
93
- ({ documentId: id } = parseAutomergeUrl(id));
94
- }
95
- const handle = this.#getHandle(id, false);
96
- handle.delete();
97
- delete this.#handleCache[id];
98
- this.emit("delete-document", {
99
- documentId: id,
100
- });
101
- }
102
- }