@automerge/automerge-repo 2.0.0-alpha.6 → 2.0.0-collectionsync-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/CollectionHandle.d.ts +14 -0
- package/dist/CollectionHandle.d.ts.map +1 -0
- package/dist/CollectionHandle.js +37 -0
- package/dist/DocHandle.d.ts +67 -2
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +113 -2
- package/dist/DocUrl.d.ts +47 -0
- package/dist/DocUrl.d.ts.map +1 -0
- package/dist/DocUrl.js +72 -0
- package/dist/EphemeralData.d.ts +20 -0
- package/dist/EphemeralData.d.ts.map +1 -0
- package/dist/EphemeralData.js +1 -0
- package/dist/Repo.d.ts +28 -7
- package/dist/Repo.d.ts.map +1 -1
- package/dist/Repo.js +142 -143
- package/dist/ferigan.d.ts +51 -0
- package/dist/ferigan.d.ts.map +1 -0
- package/dist/ferigan.js +98 -0
- package/dist/helpers/tests/storage-adapter-tests.d.ts +2 -2
- package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
- package/dist/helpers/tests/storage-adapter-tests.js +19 -39
- package/dist/index.d.ts +2 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -0
- package/dist/network/NetworkSubsystem.d.ts +1 -0
- package/dist/network/NetworkSubsystem.d.ts.map +1 -1
- package/dist/network/NetworkSubsystem.js +3 -0
- package/dist/network/messages.d.ts +7 -1
- package/dist/network/messages.d.ts.map +1 -1
- package/dist/network/messages.js +2 -1
- package/dist/src/DocHandle.d.ts +182 -0
- package/dist/src/DocHandle.d.ts.map +1 -0
- package/dist/src/DocHandle.js +405 -0
- package/dist/src/DocUrl.d.ts +49 -0
- package/dist/src/DocUrl.d.ts.map +1 -0
- package/dist/src/DocUrl.js +72 -0
- package/dist/src/EphemeralData.d.ts +19 -0
- package/dist/src/EphemeralData.d.ts.map +1 -0
- package/dist/src/EphemeralData.js +1 -0
- package/dist/src/Repo.d.ts +74 -0
- package/dist/src/Repo.d.ts.map +1 -0
- package/dist/src/Repo.js +208 -0
- package/dist/src/helpers/arraysAreEqual.d.ts +2 -0
- package/dist/src/helpers/arraysAreEqual.d.ts.map +1 -0
- package/dist/src/helpers/arraysAreEqual.js +2 -0
- package/dist/src/helpers/cbor.d.ts +4 -0
- package/dist/src/helpers/cbor.d.ts.map +1 -0
- package/dist/src/helpers/cbor.js +8 -0
- package/dist/src/helpers/eventPromise.d.ts +11 -0
- package/dist/src/helpers/eventPromise.d.ts.map +1 -0
- package/dist/src/helpers/eventPromise.js +7 -0
- package/dist/src/helpers/headsAreSame.d.ts +2 -0
- package/dist/src/helpers/headsAreSame.d.ts.map +1 -0
- package/dist/src/helpers/headsAreSame.js +4 -0
- package/dist/src/helpers/mergeArrays.d.ts +2 -0
- package/dist/src/helpers/mergeArrays.d.ts.map +1 -0
- package/dist/src/helpers/mergeArrays.js +15 -0
- package/dist/src/helpers/pause.d.ts +6 -0
- package/dist/src/helpers/pause.d.ts.map +1 -0
- package/dist/src/helpers/pause.js +10 -0
- package/dist/src/helpers/tests/network-adapter-tests.d.ts +21 -0
- package/dist/src/helpers/tests/network-adapter-tests.d.ts.map +1 -0
- package/dist/src/helpers/tests/network-adapter-tests.js +122 -0
- package/dist/src/helpers/withTimeout.d.ts +12 -0
- package/dist/src/helpers/withTimeout.d.ts.map +1 -0
- package/dist/src/helpers/withTimeout.js +24 -0
- package/dist/src/index.d.ts +53 -0
- package/dist/src/index.d.ts.map +1 -0
- package/dist/src/index.js +40 -0
- package/dist/src/network/NetworkAdapter.d.ts +26 -0
- package/dist/src/network/NetworkAdapter.d.ts.map +1 -0
- package/dist/src/network/NetworkAdapter.js +4 -0
- package/dist/src/network/NetworkSubsystem.d.ts +23 -0
- package/dist/src/network/NetworkSubsystem.d.ts.map +1 -0
- package/dist/src/network/NetworkSubsystem.js +120 -0
- package/dist/src/network/messages.d.ts +85 -0
- package/dist/src/network/messages.d.ts.map +1 -0
- package/dist/src/network/messages.js +23 -0
- package/dist/src/storage/StorageAdapter.d.ts +14 -0
- package/dist/src/storage/StorageAdapter.d.ts.map +1 -0
- package/dist/src/storage/StorageAdapter.js +1 -0
- package/dist/src/storage/StorageSubsystem.d.ts +12 -0
- package/dist/src/storage/StorageSubsystem.d.ts.map +1 -0
- package/dist/src/storage/StorageSubsystem.js +145 -0
- package/dist/src/synchronizer/CollectionSynchronizer.d.ts +25 -0
- package/dist/src/synchronizer/CollectionSynchronizer.d.ts.map +1 -0
- package/dist/src/synchronizer/CollectionSynchronizer.js +106 -0
- package/dist/src/synchronizer/DocSynchronizer.d.ts +29 -0
- package/dist/src/synchronizer/DocSynchronizer.d.ts.map +1 -0
- package/dist/src/synchronizer/DocSynchronizer.js +263 -0
- package/dist/src/synchronizer/Synchronizer.d.ts +9 -0
- package/dist/src/synchronizer/Synchronizer.d.ts.map +1 -0
- package/dist/src/synchronizer/Synchronizer.js +2 -0
- package/dist/src/types.d.ts +16 -0
- package/dist/src/types.d.ts.map +1 -0
- package/dist/src/types.js +1 -0
- package/dist/storage/StorageAdapter.d.ts +9 -0
- package/dist/storage/StorageAdapter.d.ts.map +1 -1
- package/dist/storage/StorageAdapter.js +33 -0
- package/dist/storage/StorageSubsystem.d.ts +12 -2
- package/dist/storage/StorageSubsystem.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.js +42 -100
- package/dist/synchronizer/CollectionSynchronizer.d.ts +4 -2
- package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.js +28 -15
- package/dist/synchronizer/DocSynchronizer.d.ts +6 -5
- package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/DocSynchronizer.js +76 -178
- package/dist/synchronizer/Synchronizer.d.ts +11 -0
- package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
- package/dist/test/CollectionSynchronizer.test.d.ts +2 -0
- package/dist/test/CollectionSynchronizer.test.d.ts.map +1 -0
- package/dist/test/CollectionSynchronizer.test.js +57 -0
- package/dist/test/DocHandle.test.d.ts +2 -0
- package/dist/test/DocHandle.test.d.ts.map +1 -0
- package/dist/test/DocHandle.test.js +238 -0
- package/dist/test/DocSynchronizer.test.d.ts +2 -0
- package/dist/test/DocSynchronizer.test.d.ts.map +1 -0
- package/dist/test/DocSynchronizer.test.js +111 -0
- package/dist/test/Network.test.d.ts +2 -0
- package/dist/test/Network.test.d.ts.map +1 -0
- package/dist/test/Network.test.js +11 -0
- package/dist/test/Repo.test.d.ts +2 -0
- package/dist/test/Repo.test.d.ts.map +1 -0
- package/dist/test/Repo.test.js +568 -0
- package/dist/test/StorageSubsystem.test.d.ts +2 -0
- package/dist/test/StorageSubsystem.test.d.ts.map +1 -0
- package/dist/test/StorageSubsystem.test.js +56 -0
- package/dist/test/helpers/DummyNetworkAdapter.d.ts +9 -0
- package/dist/test/helpers/DummyNetworkAdapter.d.ts.map +1 -0
- package/dist/test/helpers/DummyNetworkAdapter.js +15 -0
- package/dist/test/helpers/DummyStorageAdapter.d.ts +16 -0
- package/dist/test/helpers/DummyStorageAdapter.d.ts.map +1 -0
- package/dist/test/helpers/DummyStorageAdapter.js +33 -0
- package/dist/test/helpers/generate-large-object.d.ts +5 -0
- package/dist/test/helpers/generate-large-object.d.ts.map +1 -0
- package/dist/test/helpers/generate-large-object.js +9 -0
- package/dist/test/helpers/getRandomItem.d.ts +2 -0
- package/dist/test/helpers/getRandomItem.d.ts.map +1 -0
- package/dist/test/helpers/getRandomItem.js +4 -0
- package/dist/test/types.d.ts +4 -0
- package/dist/test/types.d.ts.map +1 -0
- package/dist/test/types.js +1 -0
- package/package.json +3 -3
- package/src/CollectionHandle.ts +54 -0
- package/src/DocHandle.ts +133 -4
- package/src/Repo.ts +192 -183
- package/src/ferigan.ts +184 -0
- package/src/helpers/tests/storage-adapter-tests.ts +31 -62
- package/src/index.ts +2 -0
- package/src/network/NetworkSubsystem.ts +4 -0
- package/src/network/messages.ts +11 -2
- package/src/storage/StorageAdapter.ts +42 -0
- package/src/storage/StorageSubsystem.ts +59 -119
- package/src/synchronizer/CollectionSynchronizer.ts +34 -26
- package/src/synchronizer/DocSynchronizer.ts +84 -231
- package/src/synchronizer/Synchronizer.ts +14 -0
- package/test/CollectionSynchronizer.test.ts +4 -2
- package/test/DocHandle.test.ts +141 -0
- package/test/DocSynchronizer.test.ts +6 -1
- package/test/RemoteHeadsSubscriptions.test.ts +1 -1
- package/test/Repo.test.ts +225 -117
- package/test/StorageSubsystem.test.ts +20 -16
- package/test/remoteHeads.test.ts +1 -1
package/dist/src/Repo.js
ADDED
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
import debug from "debug"
|
|
2
|
+
import { NetworkSubsystem } from "./network/NetworkSubsystem.js"
|
|
3
|
+
import { StorageSubsystem } from "./storage/StorageSubsystem.js"
|
|
4
|
+
import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js"
|
|
5
|
+
import {
|
|
6
|
+
parseAutomergeUrl,
|
|
7
|
+
generateAutomergeUrl,
|
|
8
|
+
isValidAutomergeUrl,
|
|
9
|
+
parseLegacyUUID,
|
|
10
|
+
} from "./DocUrl.js"
|
|
11
|
+
import { DocHandle } from "./DocHandle.js"
|
|
12
|
+
import { EventEmitter } from "eventemitter3"
|
|
13
|
+
/** A Repo is a collection of documents with networking, syncing, and storage capabilities. */
|
|
14
|
+
/** The `Repo` is the main entry point of this library
|
|
15
|
+
*
|
|
16
|
+
* @remarks
|
|
17
|
+
* To construct a `Repo` you will need an {@link StorageAdapter} and one or
|
|
18
|
+
* more {@link NetworkAdapter}s. Once you have a `Repo` you can use it to
|
|
19
|
+
* obtain {@link DocHandle}s.
|
|
20
|
+
*/
|
|
21
|
+
export class Repo extends EventEmitter {
|
|
22
|
+
#log
|
|
23
|
+
networkSubsystem
|
|
24
|
+
storageSubsystem
|
|
25
|
+
#handleCache = {}
|
|
26
|
+
/** By default, we share generously with all peers. */
|
|
27
|
+
sharePolicy = async () => true
|
|
28
|
+
constructor({ storage, network, peerId, sharePolicy }) {
|
|
29
|
+
super()
|
|
30
|
+
this.#log = debug(`automerge-repo:repo`)
|
|
31
|
+
this.sharePolicy = sharePolicy ?? this.sharePolicy
|
|
32
|
+
// DOC COLLECTION
|
|
33
|
+
// The `document` event is fired by the DocCollection any time we create a new document or look
|
|
34
|
+
// up a document by ID. We listen for it in order to wire up storage and network synchronization.
|
|
35
|
+
this.on("document", async ({ handle, isNew }) => {
|
|
36
|
+
if (storageSubsystem) {
|
|
37
|
+
// Save when the document changes
|
|
38
|
+
handle.on("heads-changed", async ({ handle, doc }) => {
|
|
39
|
+
await storageSubsystem.saveDoc(handle.documentId, doc)
|
|
40
|
+
})
|
|
41
|
+
if (isNew) {
|
|
42
|
+
// this is a new document, immediately save it
|
|
43
|
+
await storageSubsystem.saveDoc(handle.documentId, handle.docSync())
|
|
44
|
+
} else {
|
|
45
|
+
// Try to load from disk
|
|
46
|
+
const loadedDoc = await storageSubsystem.loadDoc(handle.documentId)
|
|
47
|
+
if (loadedDoc) {
|
|
48
|
+
handle.update(() => loadedDoc)
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
handle.on("unavailable", () => {
|
|
53
|
+
this.#log("document unavailable", { documentId: handle.documentId })
|
|
54
|
+
this.emit("unavailable-document", {
|
|
55
|
+
documentId: handle.documentId,
|
|
56
|
+
})
|
|
57
|
+
})
|
|
58
|
+
if (this.networkSubsystem.isReady()) {
|
|
59
|
+
handle.request()
|
|
60
|
+
} else {
|
|
61
|
+
handle.awaitNetwork()
|
|
62
|
+
this.networkSubsystem
|
|
63
|
+
.whenReady()
|
|
64
|
+
.then(() => {
|
|
65
|
+
handle.networkReady()
|
|
66
|
+
})
|
|
67
|
+
.catch(err => {
|
|
68
|
+
this.#log("error waiting for network", { err })
|
|
69
|
+
})
|
|
70
|
+
}
|
|
71
|
+
// Register the document with the synchronizer. This advertises our interest in the document.
|
|
72
|
+
synchronizer.addDocument(handle.documentId)
|
|
73
|
+
})
|
|
74
|
+
this.on("delete-document", ({ documentId }) => {
|
|
75
|
+
// TODO Pass the delete on to the network
|
|
76
|
+
// synchronizer.removeDocument(documentId)
|
|
77
|
+
if (storageSubsystem) {
|
|
78
|
+
storageSubsystem.remove(documentId).catch(err => {
|
|
79
|
+
this.#log("error deleting document", { documentId, err })
|
|
80
|
+
})
|
|
81
|
+
}
|
|
82
|
+
})
|
|
83
|
+
// SYNCHRONIZER
|
|
84
|
+
// The synchronizer uses the network subsystem to keep documents in sync with peers.
|
|
85
|
+
const synchronizer = new CollectionSynchronizer(this)
|
|
86
|
+
// When the synchronizer emits sync messages, send them to peers
|
|
87
|
+
synchronizer.on("message", message => {
|
|
88
|
+
this.#log(`sending sync message to ${message.targetId}`)
|
|
89
|
+
networkSubsystem.send(message)
|
|
90
|
+
})
|
|
91
|
+
// STORAGE
|
|
92
|
+
// The storage subsystem has access to some form of persistence, and deals with save and loading documents.
|
|
93
|
+
const storageSubsystem = storage ? new StorageSubsystem(storage) : undefined
|
|
94
|
+
this.storageSubsystem = storageSubsystem
|
|
95
|
+
// NETWORK
|
|
96
|
+
// The network subsystem deals with sending and receiving messages to and from peers.
|
|
97
|
+
const networkSubsystem = new NetworkSubsystem(network, peerId)
|
|
98
|
+
this.networkSubsystem = networkSubsystem
|
|
99
|
+
// When we get a new peer, register it with the synchronizer
|
|
100
|
+
networkSubsystem.on("peer", async ({ peerId }) => {
|
|
101
|
+
this.#log("peer connected", { peerId })
|
|
102
|
+
synchronizer.addPeer(peerId)
|
|
103
|
+
})
|
|
104
|
+
// When a peer disconnects, remove it from the synchronizer
|
|
105
|
+
networkSubsystem.on("peer-disconnected", ({ peerId }) => {
|
|
106
|
+
synchronizer.removePeer(peerId)
|
|
107
|
+
})
|
|
108
|
+
// Handle incoming messages
|
|
109
|
+
networkSubsystem.on("message", async msg => {
|
|
110
|
+
await synchronizer.receiveMessage(msg)
|
|
111
|
+
})
|
|
112
|
+
}
|
|
113
|
+
/** Returns an existing handle if we have it; creates one otherwise. */
|
|
114
|
+
#getHandle(
|
|
115
|
+
/** The documentId of the handle to look up or create */
|
|
116
|
+
documentId,
|
|
117
|
+
/** If we know we're creating a new document, specify this so we can have access to it immediately */
|
|
118
|
+
isNew
|
|
119
|
+
) {
|
|
120
|
+
// If we have the handle cached, return it
|
|
121
|
+
if (this.#handleCache[documentId]) return this.#handleCache[documentId]
|
|
122
|
+
// If not, create a new handle, cache it, and return it
|
|
123
|
+
if (!documentId) throw new Error(`Invalid documentId ${documentId}`)
|
|
124
|
+
const handle = new DocHandle(documentId, { isNew })
|
|
125
|
+
this.#handleCache[documentId] = handle
|
|
126
|
+
return handle
|
|
127
|
+
}
|
|
128
|
+
/** Returns all the handles we have cached. */
|
|
129
|
+
get handles() {
|
|
130
|
+
return this.#handleCache
|
|
131
|
+
}
|
|
132
|
+
/**
|
|
133
|
+
* Creates a new document and returns a handle to it. The initial value of the document is
|
|
134
|
+
* an empty object `{}`. Its documentId is generated by the system. we emit a `document` event
|
|
135
|
+
* to advertise interest in the document.
|
|
136
|
+
*/
|
|
137
|
+
create() {
|
|
138
|
+
// TODO:
|
|
139
|
+
// either
|
|
140
|
+
// - pass an initial value and do something like this to ensure that you get a valid initial value
|
|
141
|
+
// const myInitialValue = {
|
|
142
|
+
// tasks: [],
|
|
143
|
+
// filter: "all",
|
|
144
|
+
//
|
|
145
|
+
// const guaranteeInitialValue = (doc: any) => {
|
|
146
|
+
// if (!doc.tasks) doc.tasks = []
|
|
147
|
+
// if (!doc.filter) doc.filter = "all"
|
|
148
|
+
// return { ...myInitialValue, ...doc }
|
|
149
|
+
// }
|
|
150
|
+
// or
|
|
151
|
+
// - pass a "reify" function that takes a `<any>` and returns `<T>`
|
|
152
|
+
// Generate a new UUID and store it in the buffer
|
|
153
|
+
const { documentId } = parseAutomergeUrl(generateAutomergeUrl())
|
|
154
|
+
const handle = this.#getHandle(documentId, true)
|
|
155
|
+
this.emit("document", { handle, isNew: true })
|
|
156
|
+
return handle
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Retrieves a document by id. It gets data from the local system, but also emits a `document`
|
|
160
|
+
* event to advertise interest in the document.
|
|
161
|
+
*/
|
|
162
|
+
find(
|
|
163
|
+
/** The documentId of the handle to retrieve */
|
|
164
|
+
automergeUrl
|
|
165
|
+
) {
|
|
166
|
+
if (!isValidAutomergeUrl(automergeUrl)) {
|
|
167
|
+
let maybeAutomergeUrl = parseLegacyUUID(automergeUrl)
|
|
168
|
+
if (maybeAutomergeUrl) {
|
|
169
|
+
console.warn(
|
|
170
|
+
"Legacy UUID document ID detected, converting to AutomergeUrl. This will be removed in a future version."
|
|
171
|
+
)
|
|
172
|
+
automergeUrl = maybeAutomergeUrl
|
|
173
|
+
} else {
|
|
174
|
+
throw new Error(`Invalid AutomergeUrl: '${automergeUrl}'`)
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
const { documentId } = parseAutomergeUrl(automergeUrl)
|
|
178
|
+
// If we have the handle cached, return it
|
|
179
|
+
if (this.#handleCache[documentId]) {
|
|
180
|
+
if (this.#handleCache[documentId].isUnavailable()) {
|
|
181
|
+
// this ensures that the event fires after the handle has been returned
|
|
182
|
+
setTimeout(() => {
|
|
183
|
+
this.#handleCache[documentId].emit("unavailable", {
|
|
184
|
+
handle: this.#handleCache[documentId],
|
|
185
|
+
})
|
|
186
|
+
})
|
|
187
|
+
}
|
|
188
|
+
return this.#handleCache[documentId]
|
|
189
|
+
}
|
|
190
|
+
const handle = this.#getHandle(documentId, false)
|
|
191
|
+
this.emit("document", { handle, isNew: false })
|
|
192
|
+
return handle
|
|
193
|
+
}
|
|
194
|
+
delete(
|
|
195
|
+
/** The documentId of the handle to delete */
|
|
196
|
+
id
|
|
197
|
+
) {
|
|
198
|
+
if (isValidAutomergeUrl(id)) {
|
|
199
|
+
;({ documentId: id } = parseAutomergeUrl(id))
|
|
200
|
+
}
|
|
201
|
+
const handle = this.#getHandle(id, false)
|
|
202
|
+
handle.delete()
|
|
203
|
+
delete this.#handleCache[id]
|
|
204
|
+
this.emit("delete-document", {
|
|
205
|
+
documentId: id,
|
|
206
|
+
})
|
|
207
|
+
}
|
|
208
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"arraysAreEqual.d.ts","sourceRoot":"","sources":["../../../src/helpers/arraysAreEqual.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,cAAc,gCACiD,CAAA"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cbor.d.ts","sourceRoot":"","sources":["../../../src/helpers/cbor.ts"],"names":[],"mappings":";AAEA,wBAAgB,MAAM,CAAC,GAAG,EAAE,GAAG,GAAG,MAAM,CAGvC;AAED,wBAAgB,MAAM,CAAC,GAAG,EAAE,MAAM,GAAG,UAAU,GAAG,GAAG,CAEpD"}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { EventEmitter } from "eventemitter3"
|
|
2
|
+
/** Returns a promise that resolves when the given event is emitted on the given emitter. */
|
|
3
|
+
export declare const eventPromise: (
|
|
4
|
+
emitter: EventEmitter,
|
|
5
|
+
event: string
|
|
6
|
+
) => Promise<any>
|
|
7
|
+
export declare const eventPromises: (
|
|
8
|
+
emitters: EventEmitter[],
|
|
9
|
+
event: string
|
|
10
|
+
) => Promise<any[]>
|
|
11
|
+
//# sourceMappingURL=eventPromise.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"eventPromise.d.ts","sourceRoot":"","sources":["../../../src/helpers/eventPromise.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAE5C,4FAA4F;AAC5F,eAAO,MAAM,YAAY,YAAa,YAAY,SAAS,MAAM,iBACE,CAAA;AAEnE,eAAO,MAAM,aAAa,aAAc,YAAY,EAAE,SAAS,MAAM,mBAGpE,CAAA"}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
/** Returns a promise that resolves when the given event is emitted on the given emitter. */
|
|
2
|
+
export const eventPromise = (emitter, event) =>
|
|
3
|
+
new Promise(resolve => emitter.once(event, d => resolve(d)))
|
|
4
|
+
export const eventPromises = (emitters, event) => {
|
|
5
|
+
const promises = emitters.map(emitter => eventPromise(emitter, event))
|
|
6
|
+
return Promise.all(promises)
|
|
7
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"headsAreSame.d.ts","sourceRoot":"","sources":["../../../src/helpers/headsAreSame.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,YAAY,iCAExB,CAAA"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"mergeArrays.d.ts","sourceRoot":"","sources":["../../../src/helpers/mergeArrays.ts"],"names":[],"mappings":"AAAA,wBAAgB,WAAW,CAAC,QAAQ,EAAE,UAAU,EAAE,cAgBjD"}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
export function mergeArrays(myArrays) {
|
|
2
|
+
// Get the total length of all arrays.
|
|
3
|
+
let length = 0
|
|
4
|
+
myArrays.forEach(item => {
|
|
5
|
+
length += item.length
|
|
6
|
+
})
|
|
7
|
+
// Create a new array with total length and merge all source arrays.
|
|
8
|
+
const mergedArray = new Uint8Array(length)
|
|
9
|
+
let offset = 0
|
|
10
|
+
myArrays.forEach(item => {
|
|
11
|
+
mergedArray.set(item, offset)
|
|
12
|
+
offset += item.length
|
|
13
|
+
})
|
|
14
|
+
return mergedArray
|
|
15
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"pause.d.ts","sourceRoot":"","sources":["../../../src/helpers/pause.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,KAAK,+BAC4C,CAAA;AAE9D,wBAAgB,eAAe,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC,CAAC,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,CAAC,CAAC,CAKlF"}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { type NetworkAdapter } from "../../index.js"
|
|
2
|
+
/**
|
|
3
|
+
* Runs a series of tests against a set of three peers, each represented by one or more instantiated
|
|
4
|
+
* network adapters.
|
|
5
|
+
*
|
|
6
|
+
* The adapter `setup` function should return an object with the following properties:
|
|
7
|
+
*
|
|
8
|
+
* - `adapters`: A tuple representing three peers' network configuration. Each element can be either
|
|
9
|
+
* a single adapter or an array of adapters. Each will be used to instantiate a Repo for that
|
|
10
|
+
* peer.
|
|
11
|
+
* - `teardown`: An optional function that will be called after the tests have run. This can be used
|
|
12
|
+
* to clean up any resources that were created during the test.
|
|
13
|
+
*/
|
|
14
|
+
export declare function runAdapterTests(_setup: SetupFn, title?: string): void
|
|
15
|
+
type Network = NetworkAdapter | NetworkAdapter[]
|
|
16
|
+
export type SetupFn = () => Promise<{
|
|
17
|
+
adapters: [Network, Network, Network]
|
|
18
|
+
teardown?: () => void
|
|
19
|
+
}>
|
|
20
|
+
export {}
|
|
21
|
+
//# sourceMappingURL=network-adapter-tests.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"network-adapter-tests.d.ts","sourceRoot":"","sources":["../../../../src/helpers/tests/network-adapter-tests.ts"],"names":[],"mappings":"AAAA,OAAO,EAAgB,KAAK,cAAc,EAAc,MAAM,gBAAgB,CAAA;AAM9E;;;;;;;;;;;GAWG;AACH,wBAAgB,eAAe,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CA8HrE;AAID,KAAK,OAAO,GAAG,cAAc,GAAG,cAAc,EAAE,CAAA;AAEhD,MAAM,MAAM,OAAO,GAAG,MAAM,OAAO,CAAC;IAClC,QAAQ,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;IACrC,QAAQ,CAAC,EAAE,MAAM,IAAI,CAAA;CACtB,CAAC,CAAA"}
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
import { Repo } from "../../index.js"
|
|
2
|
+
import { eventPromise, eventPromises } from "../eventPromise.js"
|
|
3
|
+
import { assert } from "chai"
|
|
4
|
+
import { describe, it } from "mocha"
|
|
5
|
+
import { pause } from "../pause.js"
|
|
6
|
+
/**
|
|
7
|
+
* Runs a series of tests against a set of three peers, each represented by one or more instantiated
|
|
8
|
+
* network adapters.
|
|
9
|
+
*
|
|
10
|
+
* The adapter `setup` function should return an object with the following properties:
|
|
11
|
+
*
|
|
12
|
+
* - `adapters`: A tuple representing three peers' network configuration. Each element can be either
|
|
13
|
+
* a single adapter or an array of adapters. Each will be used to instantiate a Repo for that
|
|
14
|
+
* peer.
|
|
15
|
+
* - `teardown`: An optional function that will be called after the tests have run. This can be used
|
|
16
|
+
* to clean up any resources that were created during the test.
|
|
17
|
+
*/
|
|
18
|
+
export function runAdapterTests(_setup, title) {
|
|
19
|
+
// Wrap the provided setup function
|
|
20
|
+
const setup = async () => {
|
|
21
|
+
const { adapters, teardown = NO_OP } = await _setup()
|
|
22
|
+
// these might be individual adapters or arrays of adapters; normalize them to arrays
|
|
23
|
+
const [a, b, c] = adapters.map(toArray)
|
|
24
|
+
return { adapters: [a, b, c], teardown }
|
|
25
|
+
}
|
|
26
|
+
describe(`Adapter acceptance tests ${title ? `(${title})` : ""}`, () => {
|
|
27
|
+
it("can sync 2 repos", async () => {
|
|
28
|
+
const doTest = async (a, b) => {
|
|
29
|
+
const aliceRepo = new Repo({ network: a, peerId: alice })
|
|
30
|
+
const bobRepo = new Repo({ network: b, peerId: bob })
|
|
31
|
+
// Alice creates a document
|
|
32
|
+
const aliceHandle = aliceRepo.create()
|
|
33
|
+
// Bob receives the document
|
|
34
|
+
await eventPromise(bobRepo, "document")
|
|
35
|
+
const bobHandle = bobRepo.find(aliceHandle.url)
|
|
36
|
+
// Alice changes the document
|
|
37
|
+
aliceHandle.change(d => {
|
|
38
|
+
d.foo = "bar"
|
|
39
|
+
})
|
|
40
|
+
// Bob receives the change
|
|
41
|
+
await eventPromise(bobHandle, "change")
|
|
42
|
+
assert.equal((await bobHandle.doc())?.foo, "bar")
|
|
43
|
+
// Bob changes the document
|
|
44
|
+
bobHandle.change(d => {
|
|
45
|
+
d.foo = "baz"
|
|
46
|
+
})
|
|
47
|
+
// Alice receives the change
|
|
48
|
+
await eventPromise(aliceHandle, "change")
|
|
49
|
+
assert.equal((await aliceHandle.doc())?.foo, "baz")
|
|
50
|
+
}
|
|
51
|
+
// Run the test in both directions, in case they're different types of adapters
|
|
52
|
+
{
|
|
53
|
+
const { adapters, teardown } = await setup()
|
|
54
|
+
const [x, y] = adapters
|
|
55
|
+
await doTest(x, y) // x is Alice
|
|
56
|
+
teardown()
|
|
57
|
+
}
|
|
58
|
+
{
|
|
59
|
+
const { adapters, teardown } = await setup()
|
|
60
|
+
const [x, y] = adapters
|
|
61
|
+
await doTest(y, x) // y is Alice
|
|
62
|
+
teardown()
|
|
63
|
+
}
|
|
64
|
+
})
|
|
65
|
+
it("can sync 3 repos", async () => {
|
|
66
|
+
const { adapters, teardown } = await setup()
|
|
67
|
+
const [a, b, c] = adapters
|
|
68
|
+
const aliceRepo = new Repo({ network: a, peerId: alice })
|
|
69
|
+
const bobRepo = new Repo({ network: b, peerId: bob })
|
|
70
|
+
const charlieRepo = new Repo({ network: c, peerId: charlie })
|
|
71
|
+
// Alice creates a document
|
|
72
|
+
const aliceHandle = aliceRepo.create()
|
|
73
|
+
const docUrl = aliceHandle.url
|
|
74
|
+
// Bob and Charlie receive the document
|
|
75
|
+
await eventPromises([bobRepo, charlieRepo], "document")
|
|
76
|
+
const bobHandle = bobRepo.find(docUrl)
|
|
77
|
+
const charlieHandle = charlieRepo.find(docUrl)
|
|
78
|
+
// Alice changes the document
|
|
79
|
+
aliceHandle.change(d => {
|
|
80
|
+
d.foo = "bar"
|
|
81
|
+
})
|
|
82
|
+
// Bob and Charlie receive the change
|
|
83
|
+
await eventPromises([bobHandle, charlieHandle], "change")
|
|
84
|
+
assert.equal((await bobHandle.doc())?.foo, "bar")
|
|
85
|
+
assert.equal((await charlieHandle.doc())?.foo, "bar")
|
|
86
|
+
// Charlie changes the document
|
|
87
|
+
charlieHandle.change(d => {
|
|
88
|
+
d.foo = "baz"
|
|
89
|
+
})
|
|
90
|
+
// Alice and Bob receive the change
|
|
91
|
+
await eventPromises([aliceHandle, bobHandle], "change")
|
|
92
|
+
assert.equal((await bobHandle.doc())?.foo, "baz")
|
|
93
|
+
assert.equal((await charlieHandle.doc())?.foo, "baz")
|
|
94
|
+
teardown()
|
|
95
|
+
})
|
|
96
|
+
it("can broadcast a message", async () => {
|
|
97
|
+
const { adapters, teardown } = await setup()
|
|
98
|
+
const [a, b, c] = adapters
|
|
99
|
+
const aliceRepo = new Repo({ network: a, peerId: alice })
|
|
100
|
+
const bobRepo = new Repo({ network: b, peerId: bob })
|
|
101
|
+
const charlieRepo = new Repo({ network: c, peerId: charlie })
|
|
102
|
+
await eventPromises(
|
|
103
|
+
[aliceRepo, bobRepo, charlieRepo].map(r => r.networkSubsystem),
|
|
104
|
+
"peer"
|
|
105
|
+
)
|
|
106
|
+
const aliceHandle = aliceRepo.create()
|
|
107
|
+
const charlieHandle = charlieRepo.find(aliceHandle.url)
|
|
108
|
+
// pause to give charlie a chance to let alice know it wants the doc
|
|
109
|
+
await pause(100)
|
|
110
|
+
const alicePresenceData = { presence: "alice" }
|
|
111
|
+
aliceHandle.broadcast(alicePresenceData)
|
|
112
|
+
const { message } = await eventPromise(charlieHandle, "ephemeral-message")
|
|
113
|
+
assert.deepStrictEqual(message, alicePresenceData)
|
|
114
|
+
teardown()
|
|
115
|
+
})
|
|
116
|
+
})
|
|
117
|
+
}
|
|
118
|
+
const NO_OP = () => {}
|
|
119
|
+
const toArray = x => (Array.isArray(x) ? x : [x])
|
|
120
|
+
const alice = "alice"
|
|
121
|
+
const bob = "bob"
|
|
122
|
+
const charlie = "charlie"
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* If `promise` is resolved before `t` ms elapse, the timeout is cleared and the result of the
|
|
3
|
+
* promise is returned. If the timeout ends first, a `TimeoutError` is thrown.
|
|
4
|
+
*/
|
|
5
|
+
export declare const withTimeout: <T>(
|
|
6
|
+
promise: Promise<T>,
|
|
7
|
+
t: number
|
|
8
|
+
) => Promise<T>
|
|
9
|
+
export declare class TimeoutError extends Error {
|
|
10
|
+
constructor(message: string)
|
|
11
|
+
}
|
|
12
|
+
//# sourceMappingURL=withTimeout.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"withTimeout.d.ts","sourceRoot":"","sources":["../../../src/helpers/withTimeout.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,eAAO,MAAM,WAAW,8BAEnB,MAAM,eAcV,CAAA;AAED,qBAAa,YAAa,SAAQ,KAAK;gBACzB,OAAO,EAAE,MAAM;CAI5B"}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* If `promise` is resolved before `t` ms elapse, the timeout is cleared and the result of the
|
|
3
|
+
* promise is returned. If the timeout ends first, a `TimeoutError` is thrown.
|
|
4
|
+
*/
|
|
5
|
+
export const withTimeout = async (promise, t) => {
|
|
6
|
+
let timeoutId
|
|
7
|
+
const timeoutPromise = new Promise((_, reject) => {
|
|
8
|
+
timeoutId = setTimeout(
|
|
9
|
+
() => reject(new TimeoutError(`withTimeout: timed out after ${t}ms`)),
|
|
10
|
+
t
|
|
11
|
+
)
|
|
12
|
+
})
|
|
13
|
+
try {
|
|
14
|
+
return await Promise.race([promise, timeoutPromise])
|
|
15
|
+
} finally {
|
|
16
|
+
clearTimeout(timeoutId)
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
export class TimeoutError extends Error {
|
|
20
|
+
constructor(message) {
|
|
21
|
+
super(message)
|
|
22
|
+
this.name = "TimeoutError"
|
|
23
|
+
}
|
|
24
|
+
}
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @packageDocumentation
|
|
3
|
+
*
|
|
4
|
+
* The [`automerge`](https://www.npmjs.com/package/@automerge/automerge) CRDT
|
|
5
|
+
* provides a core CRDT data structure and an implementation of a storage
|
|
6
|
+
* format and sync protocol but doesn't provide the plumbing to use these tools
|
|
7
|
+
* in a JS application. `automerge-repo` provides the plumbing.
|
|
8
|
+
*
|
|
9
|
+
* The main entry point is the {@link Repo} class, which you instantiate with
|
|
10
|
+
* a {@link StorageAdapter} and zero or more {@link NetworkAdapter}s. Once you
|
|
11
|
+
* have a repo you can use it to create {@link DocHandle}s. {@link DocHandle}s
|
|
12
|
+
* are a reference to a document, identified by a {@link AutomergeUrl}, a place to
|
|
13
|
+
* listen for changes to the document, and to make new changes.
|
|
14
|
+
*
|
|
15
|
+
* A typical example of how to use this library then might look like this:
|
|
16
|
+
*
|
|
17
|
+
* ```typescript
|
|
18
|
+
* import { Repo } from "@automerge/automerge-repo";
|
|
19
|
+
*
|
|
20
|
+
* const repo = new Repo({
|
|
21
|
+
* storage: <storage adapter>,
|
|
22
|
+
* network: [<network adapter>, <network adapter>]
|
|
23
|
+
* })
|
|
24
|
+
*
|
|
25
|
+
* const handle = repo.create
|
|
26
|
+
* ```
|
|
27
|
+
*/
|
|
28
|
+
export { DocHandle, HandleState, type DocHandleOptions } from "./DocHandle.js"
|
|
29
|
+
export type { DocHandleChangePayload } from "./DocHandle.js"
|
|
30
|
+
export { NetworkAdapter } from "./network/NetworkAdapter.js"
|
|
31
|
+
export type {
|
|
32
|
+
OpenPayload,
|
|
33
|
+
PeerCandidatePayload,
|
|
34
|
+
PeerDisconnectedPayload,
|
|
35
|
+
} from "./network/NetworkAdapter.js"
|
|
36
|
+
export type {
|
|
37
|
+
Message,
|
|
38
|
+
NetworkAdapterMessage,
|
|
39
|
+
EphemeralMessage,
|
|
40
|
+
SyncMessage,
|
|
41
|
+
} from "./network/messages.js"
|
|
42
|
+
export { isValidMessage } from "./network/messages.js"
|
|
43
|
+
export { Repo, type SharePolicy, type RepoConfig } from "./Repo.js"
|
|
44
|
+
export { StorageAdapter, type StorageKey } from "./storage/StorageAdapter.js"
|
|
45
|
+
export {
|
|
46
|
+
parseAutomergeUrl,
|
|
47
|
+
isValidAutomergeUrl,
|
|
48
|
+
stringifyAutomergeUrl as generateAutomergeUrl,
|
|
49
|
+
} from "./DocUrl.js"
|
|
50
|
+
export * from "./types.js"
|
|
51
|
+
/** @hidden **/
|
|
52
|
+
export * as cbor from "./helpers/cbor.js"
|
|
53
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;GA0BG;AAEH,OAAO,EAAE,SAAS,EAAE,WAAW,EAAE,KAAK,gBAAgB,EAAE,MAAM,gBAAgB,CAAA;AAC9E,YAAY,EAAE,sBAAsB,EAAE,MAAM,gBAAgB,CAAA;AAC5D,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,YAAY,EACV,WAAW,EACX,oBAAoB,EACpB,uBAAuB,GACxB,MAAM,6BAA6B,CAAA;AAMpC,YAAY,EACV,OAAO,EACP,qBAAqB,EACrB,gBAAgB,EAChB,WAAW,GACZ,MAAM,uBAAuB,CAAA;AAC9B,OAAO,EAAE,cAAc,EAAE,MAAM,uBAAuB,CAAA;AAEtD,OAAO,EAAE,IAAI,EAAE,KAAK,WAAW,EAAE,KAAK,UAAU,EAAE,MAAM,WAAW,CAAA;AACnE,OAAO,EAAE,cAAc,EAAE,KAAK,UAAU,EAAE,MAAM,6BAA6B,CAAA;AAC7E,OAAO,EACL,iBAAiB,EACjB,mBAAmB,EACnB,qBAAqB,IAAI,oBAAoB,GAC9C,MAAM,aAAa,CAAA;AACpB,cAAc,YAAY,CAAA;AAE1B,eAAe;AACf,OAAO,KAAK,IAAI,MAAM,mBAAmB,CAAA"}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @packageDocumentation
|
|
3
|
+
*
|
|
4
|
+
* The [`automerge`](https://www.npmjs.com/package/@automerge/automerge) CRDT
|
|
5
|
+
* provides a core CRDT data structure and an implementation of a storage
|
|
6
|
+
* format and sync protocol but doesn't provide the plumbing to use these tools
|
|
7
|
+
* in a JS application. `automerge-repo` provides the plumbing.
|
|
8
|
+
*
|
|
9
|
+
* The main entry point is the {@link Repo} class, which you instantiate with
|
|
10
|
+
* a {@link StorageAdapter} and zero or more {@link NetworkAdapter}s. Once you
|
|
11
|
+
* have a repo you can use it to create {@link DocHandle}s. {@link DocHandle}s
|
|
12
|
+
* are a reference to a document, identified by a {@link AutomergeUrl}, a place to
|
|
13
|
+
* listen for changes to the document, and to make new changes.
|
|
14
|
+
*
|
|
15
|
+
* A typical example of how to use this library then might look like this:
|
|
16
|
+
*
|
|
17
|
+
* ```typescript
|
|
18
|
+
* import { Repo } from "@automerge/automerge-repo";
|
|
19
|
+
*
|
|
20
|
+
* const repo = new Repo({
|
|
21
|
+
* storage: <storage adapter>,
|
|
22
|
+
* network: [<network adapter>, <network adapter>]
|
|
23
|
+
* })
|
|
24
|
+
*
|
|
25
|
+
* const handle = repo.create
|
|
26
|
+
* ```
|
|
27
|
+
*/
|
|
28
|
+
export { DocHandle, HandleState } from "./DocHandle.js"
|
|
29
|
+
export { NetworkAdapter } from "./network/NetworkAdapter.js"
|
|
30
|
+
export { isValidMessage } from "./network/messages.js"
|
|
31
|
+
export { Repo } from "./Repo.js"
|
|
32
|
+
export { StorageAdapter } from "./storage/StorageAdapter.js"
|
|
33
|
+
export {
|
|
34
|
+
parseAutomergeUrl,
|
|
35
|
+
isValidAutomergeUrl,
|
|
36
|
+
stringifyAutomergeUrl as generateAutomergeUrl,
|
|
37
|
+
} from "./DocUrl.js"
|
|
38
|
+
export * from "./types.js"
|
|
39
|
+
/** @hidden **/
|
|
40
|
+
export * as cbor from "./helpers/cbor.js"
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { EventEmitter } from "eventemitter3"
|
|
2
|
+
import { PeerId } from "../types.js"
|
|
3
|
+
import { Message } from "./messages.js"
|
|
4
|
+
export declare abstract class NetworkAdapter extends EventEmitter<NetworkAdapterEvents> {
|
|
5
|
+
peerId?: PeerId
|
|
6
|
+
abstract connect(peerId: PeerId): void
|
|
7
|
+
abstract send(message: Message): void
|
|
8
|
+
abstract disconnect(): void
|
|
9
|
+
}
|
|
10
|
+
export interface NetworkAdapterEvents {
|
|
11
|
+
ready: (payload: OpenPayload) => void
|
|
12
|
+
close: () => void
|
|
13
|
+
"peer-candidate": (payload: PeerCandidatePayload) => void
|
|
14
|
+
"peer-disconnected": (payload: PeerDisconnectedPayload) => void
|
|
15
|
+
message: (payload: Message) => void
|
|
16
|
+
}
|
|
17
|
+
export interface OpenPayload {
|
|
18
|
+
network: NetworkAdapter
|
|
19
|
+
}
|
|
20
|
+
export interface PeerCandidatePayload {
|
|
21
|
+
peerId: PeerId
|
|
22
|
+
}
|
|
23
|
+
export interface PeerDisconnectedPayload {
|
|
24
|
+
peerId: PeerId
|
|
25
|
+
}
|
|
26
|
+
//# sourceMappingURL=NetworkAdapter.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"NetworkAdapter.d.ts","sourceRoot":"","sources":["../../../src/network/NetworkAdapter.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAC5C,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AACpC,OAAO,EAAE,OAAO,EAAE,MAAM,eAAe,CAAA;AAEvC,8BAAsB,cAAe,SAAQ,YAAY,CAAC,oBAAoB,CAAC;IAC7E,MAAM,CAAC,EAAE,MAAM,CAAA;IAEf,QAAQ,CAAC,OAAO,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI;IAEtC,QAAQ,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,GAAG,IAAI;IAErC,QAAQ,CAAC,UAAU,IAAI,IAAI;CAC5B;AAID,MAAM,WAAW,oBAAoB;IACnC,KAAK,EAAE,CAAC,OAAO,EAAE,WAAW,KAAK,IAAI,CAAA;IACrC,KAAK,EAAE,MAAM,IAAI,CAAA;IACjB,gBAAgB,EAAE,CAAC,OAAO,EAAE,oBAAoB,KAAK,IAAI,CAAA;IACzD,mBAAmB,EAAE,CAAC,OAAO,EAAE,uBAAuB,KAAK,IAAI,CAAA;IAC/D,OAAO,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,CAAA;CACpC;AAED,MAAM,WAAW,WAAW;IAC1B,OAAO,EAAE,cAAc,CAAA;CACxB;AAED,MAAM,WAAW,oBAAoB;IACnC,MAAM,EAAE,MAAM,CAAA;CACf;AAED,MAAM,WAAW,uBAAuB;IACtC,MAAM,EAAE,MAAM,CAAA;CACf"}
|