@automerge/automerge-repo 2.0.0-alpha.2 → 2.0.0-alpha.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/AutomergeUrl.d.ts +17 -5
- package/dist/AutomergeUrl.d.ts.map +1 -1
- package/dist/AutomergeUrl.js +71 -24
- package/dist/DocHandle.d.ts +80 -8
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +181 -10
- package/dist/RemoteHeadsSubscriptions.d.ts +4 -5
- package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -1
- package/dist/RemoteHeadsSubscriptions.js +4 -1
- package/dist/Repo.d.ts +35 -2
- package/dist/Repo.d.ts.map +1 -1
- package/dist/Repo.js +112 -70
- package/dist/entrypoints/fullfat.d.ts +1 -0
- package/dist/entrypoints/fullfat.d.ts.map +1 -1
- package/dist/entrypoints/fullfat.js +1 -2
- package/dist/helpers/bufferFromHex.d.ts +3 -0
- package/dist/helpers/bufferFromHex.d.ts.map +1 -0
- package/dist/helpers/bufferFromHex.js +13 -0
- package/dist/helpers/headsAreSame.d.ts +2 -2
- package/dist/helpers/headsAreSame.d.ts.map +1 -1
- package/dist/helpers/mergeArrays.d.ts +1 -1
- package/dist/helpers/mergeArrays.d.ts.map +1 -1
- package/dist/helpers/tests/storage-adapter-tests.d.ts +2 -2
- package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
- package/dist/helpers/tests/storage-adapter-tests.js +25 -48
- package/dist/index.d.ts +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -1
- package/dist/storage/StorageSubsystem.d.ts +11 -1
- package/dist/storage/StorageSubsystem.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.js +20 -4
- package/dist/synchronizer/CollectionSynchronizer.d.ts +15 -2
- package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.js +29 -8
- package/dist/synchronizer/DocSynchronizer.d.ts +7 -0
- package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/DocSynchronizer.js +14 -0
- package/dist/synchronizer/Synchronizer.d.ts +11 -0
- package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
- package/dist/types.d.ts +4 -1
- package/dist/types.d.ts.map +1 -1
- package/package.json +3 -3
- package/src/AutomergeUrl.ts +101 -26
- package/src/DocHandle.ts +245 -20
- package/src/RemoteHeadsSubscriptions.ts +11 -9
- package/src/Repo.ts +163 -68
- package/src/entrypoints/fullfat.ts +1 -2
- package/src/helpers/bufferFromHex.ts +14 -0
- package/src/helpers/headsAreSame.ts +2 -2
- package/src/helpers/tests/storage-adapter-tests.ts +44 -86
- package/src/index.ts +2 -0
- package/src/storage/StorageSubsystem.ts +29 -4
- package/src/synchronizer/CollectionSynchronizer.ts +42 -9
- package/src/synchronizer/DocSynchronizer.ts +15 -0
- package/src/synchronizer/Synchronizer.ts +14 -0
- package/src/types.ts +4 -1
- package/test/AutomergeUrl.test.ts +130 -0
- package/test/DocHandle.test.ts +209 -2
- package/test/DocSynchronizer.test.ts +10 -3
- package/test/Repo.test.ts +228 -3
- package/test/StorageSubsystem.test.ts +17 -0
package/dist/Repo.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import { next as Automerge } from "@automerge/automerge/slim";
|
|
2
2
|
import debug from "debug";
|
|
3
3
|
import { EventEmitter } from "eventemitter3";
|
|
4
|
-
import { generateAutomergeUrl, interpretAsDocumentId, parseAutomergeUrl, } from "./AutomergeUrl.js";
|
|
5
|
-
import { DocHandle } from "./DocHandle.js";
|
|
4
|
+
import { encodeHeads, generateAutomergeUrl, interpretAsDocumentId, isValidAutomergeUrl, parseAutomergeUrl, } from "./AutomergeUrl.js";
|
|
5
|
+
import { DELETED, DocHandle, READY, UNAVAILABLE, UNLOADED, } from "./DocHandle.js";
|
|
6
6
|
import { RemoteHeadsSubscriptions } from "./RemoteHeadsSubscriptions.js";
|
|
7
7
|
import { headsAreSame } from "./helpers/headsAreSame.js";
|
|
8
8
|
import { throttle } from "./helpers/throttle.js";
|
|
@@ -30,7 +30,8 @@ export class Repo extends EventEmitter {
|
|
|
30
30
|
/** @hidden */
|
|
31
31
|
saveDebounceRate = 100;
|
|
32
32
|
#handleCache = {};
|
|
33
|
-
|
|
33
|
+
/** @hidden */
|
|
34
|
+
synchronizer;
|
|
34
35
|
/** By default, we share generously with all peers. */
|
|
35
36
|
/** @hidden */
|
|
36
37
|
sharePolicy = async () => true;
|
|
@@ -39,31 +40,11 @@ export class Repo extends EventEmitter {
|
|
|
39
40
|
peerMetadataByPeerId = {};
|
|
40
41
|
#remoteHeadsSubscriptions = new RemoteHeadsSubscriptions();
|
|
41
42
|
#remoteHeadsGossipingEnabled = false;
|
|
42
|
-
constructor({ storage, network = [], peerId = randomPeerId(), sharePolicy, isEphemeral = storage === undefined, enableRemoteHeadsGossiping = false, } = {}) {
|
|
43
|
+
constructor({ storage, network = [], peerId = randomPeerId(), sharePolicy, isEphemeral = storage === undefined, enableRemoteHeadsGossiping = false, denylist = [], } = {}) {
|
|
43
44
|
super();
|
|
44
45
|
this.#remoteHeadsGossipingEnabled = enableRemoteHeadsGossiping;
|
|
45
46
|
this.#log = debug(`automerge-repo:repo`);
|
|
46
47
|
this.sharePolicy = sharePolicy ?? this.sharePolicy;
|
|
47
|
-
// DOC COLLECTION
|
|
48
|
-
// The `document` event is fired by the DocCollection any time we create a new document or look
|
|
49
|
-
// up a document by ID. We listen for it in order to wire up storage and network synchronization.
|
|
50
|
-
this.on("document", async ({ handle }) => {
|
|
51
|
-
if (storageSubsystem) {
|
|
52
|
-
// Save when the document changes, but no more often than saveDebounceRate.
|
|
53
|
-
const saveFn = ({ handle, doc, }) => {
|
|
54
|
-
void storageSubsystem.saveDoc(handle.documentId, doc);
|
|
55
|
-
};
|
|
56
|
-
handle.on("heads-changed", throttle(saveFn, this.saveDebounceRate));
|
|
57
|
-
}
|
|
58
|
-
handle.on("unavailable", () => {
|
|
59
|
-
this.#log("document unavailable", { documentId: handle.documentId });
|
|
60
|
-
this.emit("unavailable-document", {
|
|
61
|
-
documentId: handle.documentId,
|
|
62
|
-
});
|
|
63
|
-
});
|
|
64
|
-
// Register the document with the synchronizer. This advertises our interest in the document.
|
|
65
|
-
this.#synchronizer.addDocument(handle.documentId);
|
|
66
|
-
});
|
|
67
48
|
this.on("delete-document", ({ documentId }) => {
|
|
68
49
|
// TODO Pass the delete on to the network
|
|
69
50
|
// synchronizer.removeDocument(documentId)
|
|
@@ -75,20 +56,25 @@ export class Repo extends EventEmitter {
|
|
|
75
56
|
});
|
|
76
57
|
// SYNCHRONIZER
|
|
77
58
|
// The synchronizer uses the network subsystem to keep documents in sync with peers.
|
|
78
|
-
this
|
|
59
|
+
this.synchronizer = new CollectionSynchronizer(this, denylist);
|
|
79
60
|
// When the synchronizer emits messages, send them to peers
|
|
80
|
-
this
|
|
61
|
+
this.synchronizer.on("message", message => {
|
|
81
62
|
this.#log(`sending ${message.type} message to ${message.targetId}`);
|
|
82
63
|
networkSubsystem.send(message);
|
|
83
64
|
});
|
|
65
|
+
// Forward metrics from doc synchronizers
|
|
66
|
+
this.synchronizer.on("metrics", event => this.emit("doc-metrics", event));
|
|
84
67
|
if (this.#remoteHeadsGossipingEnabled) {
|
|
85
|
-
this
|
|
68
|
+
this.synchronizer.on("open-doc", ({ peerId, documentId }) => {
|
|
86
69
|
this.#remoteHeadsSubscriptions.subscribePeerToDoc(peerId, documentId);
|
|
87
70
|
});
|
|
88
71
|
}
|
|
89
72
|
// STORAGE
|
|
90
73
|
// The storage subsystem has access to some form of persistence, and deals with save and loading documents.
|
|
91
74
|
const storageSubsystem = storage ? new StorageSubsystem(storage) : undefined;
|
|
75
|
+
if (storageSubsystem) {
|
|
76
|
+
storageSubsystem.on("document-loaded", event => this.emit("doc-metrics", { type: "doc-loaded", ...event }));
|
|
77
|
+
}
|
|
92
78
|
this.storageSubsystem = storageSubsystem;
|
|
93
79
|
// NETWORK
|
|
94
80
|
// The network subsystem deals with sending and receiving messages to and from peers.
|
|
@@ -113,18 +99,18 @@ export class Repo extends EventEmitter {
|
|
|
113
99
|
.catch(err => {
|
|
114
100
|
console.log("error in share policy", { err });
|
|
115
101
|
});
|
|
116
|
-
this
|
|
102
|
+
this.synchronizer.addPeer(peerId);
|
|
117
103
|
});
|
|
118
104
|
// When a peer disconnects, remove it from the synchronizer
|
|
119
105
|
networkSubsystem.on("peer-disconnected", ({ peerId }) => {
|
|
120
|
-
this
|
|
106
|
+
this.synchronizer.removePeer(peerId);
|
|
121
107
|
this.#remoteHeadsSubscriptions.removePeer(peerId);
|
|
122
108
|
});
|
|
123
109
|
// Handle incoming messages
|
|
124
110
|
networkSubsystem.on("message", async (msg) => {
|
|
125
111
|
this.#receiveMessage(msg);
|
|
126
112
|
});
|
|
127
|
-
this
|
|
113
|
+
this.synchronizer.on("sync-state", message => {
|
|
128
114
|
this.#saveSyncState(message);
|
|
129
115
|
const handle = this.#handleCache[message.documentId];
|
|
130
116
|
const { storageId } = this.peerMetadataByPeerId[message.peerId] || {};
|
|
@@ -133,11 +119,12 @@ export class Repo extends EventEmitter {
|
|
|
133
119
|
}
|
|
134
120
|
const heads = handle.getRemoteHeads(storageId);
|
|
135
121
|
const haveHeadsChanged = message.syncState.theirHeads &&
|
|
136
|
-
(!heads ||
|
|
122
|
+
(!heads ||
|
|
123
|
+
!headsAreSame(heads, encodeHeads(message.syncState.theirHeads)));
|
|
137
124
|
if (haveHeadsChanged && message.syncState.theirHeads) {
|
|
138
|
-
handle.setRemoteHeads(storageId, message.syncState.theirHeads);
|
|
125
|
+
handle.setRemoteHeads(storageId, encodeHeads(message.syncState.theirHeads));
|
|
139
126
|
if (storageId && this.#remoteHeadsGossipingEnabled) {
|
|
140
|
-
this.#remoteHeadsSubscriptions.handleImmediateRemoteHeadsChanged(message.documentId, storageId, message.syncState.theirHeads);
|
|
127
|
+
this.#remoteHeadsSubscriptions.handleImmediateRemoteHeadsChanged(message.documentId, storageId, encodeHeads(message.syncState.theirHeads));
|
|
141
128
|
}
|
|
142
129
|
}
|
|
143
130
|
});
|
|
@@ -172,6 +159,28 @@ export class Repo extends EventEmitter {
|
|
|
172
159
|
});
|
|
173
160
|
}
|
|
174
161
|
}
|
|
162
|
+
// The `document` event is fired by the DocCollection any time we create a new document or look
|
|
163
|
+
// up a document by ID. We listen for it in order to wire up storage and network synchronization.
|
|
164
|
+
#registerHandleWithSubsystems(handle) {
|
|
165
|
+
const { storageSubsystem } = this;
|
|
166
|
+
if (storageSubsystem) {
|
|
167
|
+
// Save when the document changes, but no more often than saveDebounceRate.
|
|
168
|
+
const saveFn = ({ handle, doc }) => {
|
|
169
|
+
void storageSubsystem.saveDoc(handle.documentId, doc);
|
|
170
|
+
};
|
|
171
|
+
handle.on("heads-changed", throttle(saveFn, this.saveDebounceRate));
|
|
172
|
+
}
|
|
173
|
+
handle.on("unavailable", () => {
|
|
174
|
+
this.#log("document unavailable", { documentId: handle.documentId });
|
|
175
|
+
this.emit("unavailable-document", {
|
|
176
|
+
documentId: handle.documentId,
|
|
177
|
+
});
|
|
178
|
+
});
|
|
179
|
+
// Register the document with the synchronizer. This advertises our interest in the document.
|
|
180
|
+
this.synchronizer.addDocument(handle.documentId);
|
|
181
|
+
// Preserve the old event in case anyone was using it.
|
|
182
|
+
this.emit("document", { handle });
|
|
183
|
+
}
|
|
175
184
|
#receiveMessage(message) {
|
|
176
185
|
switch (message.type) {
|
|
177
186
|
case "remote-subscription-change":
|
|
@@ -188,7 +197,7 @@ export class Repo extends EventEmitter {
|
|
|
188
197
|
case "request":
|
|
189
198
|
case "ephemeral":
|
|
190
199
|
case "doc-unavailable":
|
|
191
|
-
this
|
|
200
|
+
this.synchronizer.receiveMessage(message).catch(err => {
|
|
192
201
|
console.log("error receiving message", { err });
|
|
193
202
|
});
|
|
194
203
|
}
|
|
@@ -229,7 +238,7 @@ export class Repo extends EventEmitter {
|
|
|
229
238
|
}
|
|
230
239
|
/** Returns a list of all connected peer ids */
|
|
231
240
|
get peers() {
|
|
232
|
-
return this
|
|
241
|
+
return this.synchronizer.peers;
|
|
233
242
|
}
|
|
234
243
|
getStorageIdOfPeer(peerId) {
|
|
235
244
|
return this.peerMetadataByPeerId[peerId]?.storageId;
|
|
@@ -245,7 +254,7 @@ export class Repo extends EventEmitter {
|
|
|
245
254
|
const handle = this.#getHandle({
|
|
246
255
|
documentId,
|
|
247
256
|
});
|
|
248
|
-
this
|
|
257
|
+
this.#registerHandleWithSubsystems(handle);
|
|
249
258
|
handle.update(() => {
|
|
250
259
|
let nextDoc;
|
|
251
260
|
if (initialValue) {
|
|
@@ -277,7 +286,7 @@ export class Repo extends EventEmitter {
|
|
|
277
286
|
clone(clonedHandle) {
|
|
278
287
|
if (!clonedHandle.isReady()) {
|
|
279
288
|
throw new Error(`Cloned handle is not yet in ready state.
|
|
280
|
-
(Try await handle.
|
|
289
|
+
(Try await handle.whenReady() first.)`);
|
|
281
290
|
}
|
|
282
291
|
const sourceDoc = clonedHandle.docSync();
|
|
283
292
|
if (!sourceDoc) {
|
|
@@ -297,49 +306,51 @@ export class Repo extends EventEmitter {
|
|
|
297
306
|
find(
|
|
298
307
|
/** The url or documentId of the handle to retrieve */
|
|
299
308
|
id) {
|
|
300
|
-
const documentId =
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
309
|
+
const { documentId, heads } = isValidAutomergeUrl(id)
|
|
310
|
+
? parseAutomergeUrl(id)
|
|
311
|
+
: { documentId: interpretAsDocumentId(id), heads: undefined };
|
|
312
|
+
const cachedHandle = this.#handleCache[documentId];
|
|
313
|
+
if (cachedHandle) {
|
|
314
|
+
if (cachedHandle.isUnavailable()) {
|
|
304
315
|
// this ensures that the event fires after the handle has been returned
|
|
305
316
|
setTimeout(() => {
|
|
306
|
-
|
|
307
|
-
handle:
|
|
317
|
+
cachedHandle.emit("unavailable", {
|
|
318
|
+
handle: cachedHandle,
|
|
308
319
|
});
|
|
309
320
|
});
|
|
310
321
|
}
|
|
311
|
-
return
|
|
322
|
+
// If we already have the handle, return it immediately (or a view of the handle if heads are specified)
|
|
323
|
+
return heads ? cachedHandle.view(heads) : cachedHandle;
|
|
312
324
|
}
|
|
313
325
|
// If we don't already have the handle, make an empty one and try loading it
|
|
314
326
|
const handle = this.#getHandle({
|
|
315
327
|
documentId,
|
|
316
328
|
});
|
|
317
|
-
//
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
return handle;
|
|
329
|
+
// Loading & network is going to be asynchronous no matter what,
|
|
330
|
+
// but we want to return the handle immediately.
|
|
331
|
+
const attemptLoad = this.storageSubsystem
|
|
332
|
+
? this.storageSubsystem.loadDoc(handle.documentId)
|
|
333
|
+
: Promise.resolve(null);
|
|
334
|
+
attemptLoad
|
|
335
|
+
.then(async (loadedDoc) => {
|
|
336
|
+
if (loadedDoc) {
|
|
337
|
+
// uhhhh, sorry if you're reading this because we were lying to the type system
|
|
338
|
+
handle.update(() => loadedDoc);
|
|
339
|
+
handle.doneLoading();
|
|
340
|
+
}
|
|
341
|
+
else {
|
|
342
|
+
// we want to wait for the network subsystem to be ready before
|
|
343
|
+
// we request the document. this prevents entering unavailable during initialization.
|
|
344
|
+
await this.networkSubsystem.whenReady();
|
|
345
|
+
handle.request();
|
|
346
|
+
}
|
|
347
|
+
this.#registerHandleWithSubsystems(handle);
|
|
348
|
+
})
|
|
349
|
+
.catch(err => {
|
|
350
|
+
this.#log("error waiting for network", { err });
|
|
351
|
+
});
|
|
352
|
+
// If we already have the handle, return it immediately (or a view of the handle if heads are specified)
|
|
353
|
+
return heads ? handle.view(heads) : handle;
|
|
343
354
|
}
|
|
344
355
|
delete(
|
|
345
356
|
/** The url or documentId of the handle to delete */
|
|
@@ -415,10 +426,41 @@ export class Repo extends EventEmitter {
|
|
|
415
426
|
return this.storageSubsystem.saveDoc(handle.documentId, doc);
|
|
416
427
|
}));
|
|
417
428
|
}
|
|
429
|
+
/**
|
|
430
|
+
* Removes a DocHandle from the handleCache.
|
|
431
|
+
* @hidden this API is experimental and may change.
|
|
432
|
+
* @param documentId - documentId of the DocHandle to remove from handleCache, if present in cache.
|
|
433
|
+
* @returns Promise<void>
|
|
434
|
+
*/
|
|
435
|
+
async removeFromCache(documentId) {
|
|
436
|
+
if (!this.#handleCache[documentId]) {
|
|
437
|
+
this.#log(`WARN: removeFromCache called but handle not found in handleCache for documentId: ${documentId}`);
|
|
438
|
+
return;
|
|
439
|
+
}
|
|
440
|
+
const handle = this.#getHandle({ documentId });
|
|
441
|
+
const doc = await handle.doc([READY, UNLOADED, DELETED, UNAVAILABLE]);
|
|
442
|
+
if (doc) {
|
|
443
|
+
if (handle.isReady()) {
|
|
444
|
+
handle.unload();
|
|
445
|
+
}
|
|
446
|
+
else {
|
|
447
|
+
this.#log(`WARN: removeFromCache called but handle for documentId: ${documentId} in unexpected state: ${handle.state}`);
|
|
448
|
+
}
|
|
449
|
+
delete this.#handleCache[documentId];
|
|
450
|
+
// TODO: remove document from synchronizer when removeDocument is implemented
|
|
451
|
+
// this.synchronizer.removeDocument(documentId)
|
|
452
|
+
}
|
|
453
|
+
else {
|
|
454
|
+
this.#log(`WARN: removeFromCache called but doc undefined for documentId: ${documentId}`);
|
|
455
|
+
}
|
|
456
|
+
}
|
|
418
457
|
shutdown() {
|
|
419
458
|
this.networkSubsystem.adapters.forEach(adapter => {
|
|
420
459
|
adapter.disconnect();
|
|
421
460
|
});
|
|
422
461
|
return this.flush();
|
|
423
462
|
}
|
|
463
|
+
metrics() {
|
|
464
|
+
return { documents: this.synchronizer.metrics() };
|
|
465
|
+
}
|
|
424
466
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"fullfat.d.ts","sourceRoot":"","sources":["../../src/entrypoints/fullfat.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAA"}
|
|
1
|
+
{"version":3,"file":"fullfat.d.ts","sourceRoot":"","sources":["../../src/entrypoints/fullfat.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAA;AAS3B,OAAO,sBAAsB,CAAA"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"bufferFromHex.d.ts","sourceRoot":"","sources":["../../src/helpers/bufferFromHex.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,uBAAuB,cAAe,MAAM,KAAG,UAS3D,CAAA;AAED,eAAO,MAAM,qBAAqB,SAAU,UAAU,KAAG,MAExD,CAAA"}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export const uint8ArrayFromHexString = (hexString) => {
|
|
2
|
+
if (hexString.length % 2 !== 0) {
|
|
3
|
+
throw new Error("Hex string must have an even length");
|
|
4
|
+
}
|
|
5
|
+
const bytes = new Uint8Array(hexString.length / 2);
|
|
6
|
+
for (let i = 0; i < hexString.length; i += 2) {
|
|
7
|
+
bytes[i >> 1] = parseInt(hexString.slice(i, i + 2), 16);
|
|
8
|
+
}
|
|
9
|
+
return bytes;
|
|
10
|
+
};
|
|
11
|
+
export const uint8ArrayToHexString = (data) => {
|
|
12
|
+
return Array.from(data, byte => byte.toString(16).padStart(2, "0")).join("");
|
|
13
|
+
};
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import {
|
|
2
|
-
export declare const headsAreSame: (a:
|
|
1
|
+
import type { UrlHeads } from "../types.js";
|
|
2
|
+
export declare const headsAreSame: (a: UrlHeads, b: UrlHeads) => boolean;
|
|
3
3
|
//# sourceMappingURL=headsAreSame.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"headsAreSame.d.ts","sourceRoot":"","sources":["../../src/helpers/headsAreSame.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"headsAreSame.d.ts","sourceRoot":"","sources":["../../src/helpers/headsAreSame.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAA;AAE3C,eAAO,MAAM,YAAY,MAAO,QAAQ,KAAK,QAAQ,YAEpD,CAAA"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export declare function mergeArrays(myArrays: Uint8Array[]): Uint8Array
|
|
1
|
+
export declare function mergeArrays(myArrays: Uint8Array[]): Uint8Array<ArrayBuffer>;
|
|
2
2
|
//# sourceMappingURL=mergeArrays.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"mergeArrays.d.ts","sourceRoot":"","sources":["../../src/helpers/mergeArrays.ts"],"names":[],"mappings":"AAAA,wBAAgB,WAAW,CAAC,QAAQ,EAAE,UAAU,EAAE,
|
|
1
|
+
{"version":3,"file":"mergeArrays.d.ts","sourceRoot":"","sources":["../../src/helpers/mergeArrays.ts"],"names":[],"mappings":"AAAA,wBAAgB,WAAW,CAAC,QAAQ,EAAE,UAAU,EAAE,2BAgBjD"}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import type { StorageAdapterInterface } from "../../storage/StorageAdapterInterface.js";
|
|
2
|
-
export declare function runStorageAdapterTests(
|
|
2
|
+
export declare function runStorageAdapterTests(setup: SetupFn, title?: string): void;
|
|
3
3
|
export type SetupFn = () => Promise<{
|
|
4
4
|
adapter: StorageAdapterInterface;
|
|
5
|
-
teardown?: () => void
|
|
5
|
+
teardown?: () => void | Promise<void>;
|
|
6
6
|
}>;
|
|
7
7
|
//# sourceMappingURL=storage-adapter-tests.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"storage-adapter-tests.d.ts","sourceRoot":"","sources":["../../../src/helpers/tests/storage-adapter-tests.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,uBAAuB,EAAE,MAAM,0CAA0C,CAAA;
|
|
1
|
+
{"version":3,"file":"storage-adapter-tests.d.ts","sourceRoot":"","sources":["../../../src/helpers/tests/storage-adapter-tests.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,uBAAuB,EAAE,MAAM,0CAA0C,CAAA;AAcvF,wBAAgB,sBAAsB,CAAC,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CA+H3E;AAID,MAAM,MAAM,OAAO,GAAG,MAAM,OAAO,CAAC;IAClC,OAAO,EAAE,uBAAuB,CAAA;IAChC,QAAQ,CAAC,EAAE,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;CACtC,CAAC,CAAA"}
|
|
@@ -1,107 +1,87 @@
|
|
|
1
|
-
import { describe, expect, it } from "vitest";
|
|
1
|
+
import { describe, expect, beforeEach, it as _it } from "vitest";
|
|
2
2
|
const PAYLOAD_A = () => new Uint8Array([0, 1, 127, 99, 154, 235]);
|
|
3
3
|
const PAYLOAD_B = () => new Uint8Array([1, 76, 160, 53, 57, 10, 230]);
|
|
4
4
|
const PAYLOAD_C = () => new Uint8Array([2, 111, 74, 131, 236, 96, 142, 193]);
|
|
5
5
|
const LARGE_PAYLOAD = new Uint8Array(100000).map(() => Math.random() * 256);
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
6
|
+
const it = (_it);
|
|
7
|
+
export function runStorageAdapterTests(setup, title) {
|
|
8
|
+
beforeEach(async (ctx) => {
|
|
9
|
+
const { adapter, teardown = NO_OP } = await setup();
|
|
10
|
+
ctx.adapter = adapter;
|
|
11
|
+
return teardown;
|
|
12
|
+
});
|
|
11
13
|
describe(`Storage adapter acceptance tests ${title ? `(${title})` : ""}`, () => {
|
|
12
14
|
describe("load", () => {
|
|
13
|
-
it("should return undefined if there is no data", async () => {
|
|
14
|
-
const { adapter, teardown } = await setup();
|
|
15
|
+
it("should return undefined if there is no data", async ({ adapter }) => {
|
|
15
16
|
const actual = await adapter.load(["AAAAA", "sync-state", "xxxxx"]);
|
|
16
17
|
expect(actual).toBeUndefined();
|
|
17
|
-
teardown();
|
|
18
18
|
});
|
|
19
19
|
});
|
|
20
20
|
describe("save and load", () => {
|
|
21
|
-
it("should return data that was saved", async () => {
|
|
22
|
-
const { adapter, teardown } = await setup();
|
|
21
|
+
it("should return data that was saved", async ({ adapter }) => {
|
|
23
22
|
await adapter.save(["storage-adapter-id"], PAYLOAD_A());
|
|
24
23
|
const actual = await adapter.load(["storage-adapter-id"]);
|
|
25
24
|
expect(actual).toStrictEqual(PAYLOAD_A());
|
|
26
|
-
teardown();
|
|
27
25
|
});
|
|
28
|
-
it("should work with composite keys", async () => {
|
|
29
|
-
const { adapter, teardown } = await setup();
|
|
26
|
+
it("should work with composite keys", async ({ adapter }) => {
|
|
30
27
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A());
|
|
31
28
|
const actual = await adapter.load(["AAAAA", "sync-state", "xxxxx"]);
|
|
32
29
|
expect(actual).toStrictEqual(PAYLOAD_A());
|
|
33
|
-
teardown();
|
|
34
30
|
});
|
|
35
|
-
it("should work with a large payload", async () => {
|
|
36
|
-
const { adapter, teardown } = await setup();
|
|
31
|
+
it("should work with a large payload", async ({ adapter }) => {
|
|
37
32
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], LARGE_PAYLOAD);
|
|
38
33
|
const actual = await adapter.load(["AAAAA", "sync-state", "xxxxx"]);
|
|
39
34
|
expect(actual).toStrictEqual(LARGE_PAYLOAD);
|
|
40
|
-
teardown();
|
|
41
35
|
});
|
|
42
36
|
});
|
|
43
37
|
describe("loadRange", () => {
|
|
44
|
-
it("should return an empty array if there is no data", async () => {
|
|
45
|
-
const { adapter, teardown } = await setup();
|
|
38
|
+
it("should return an empty array if there is no data", async ({ adapter, }) => {
|
|
46
39
|
expect(await adapter.loadRange(["AAAAA"])).toStrictEqual([]);
|
|
47
|
-
teardown();
|
|
48
40
|
});
|
|
49
41
|
});
|
|
50
42
|
describe("save and loadRange", () => {
|
|
51
|
-
it("should return all the data that matches the key", async () => {
|
|
52
|
-
const { adapter, teardown } = await setup();
|
|
43
|
+
it("should return all the data that matches the key", async ({ adapter, }) => {
|
|
53
44
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A());
|
|
54
45
|
await adapter.save(["AAAAA", "snapshot", "yyyyy"], PAYLOAD_B());
|
|
55
46
|
await adapter.save(["AAAAA", "sync-state", "zzzzz"], PAYLOAD_C());
|
|
56
|
-
expect(await adapter.loadRange(["AAAAA"])).toStrictEqual(
|
|
47
|
+
expect(await adapter.loadRange(["AAAAA"])).toStrictEqual([
|
|
57
48
|
{ key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
|
|
58
49
|
{ key: ["AAAAA", "snapshot", "yyyyy"], data: PAYLOAD_B() },
|
|
59
50
|
{ key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() },
|
|
60
|
-
])
|
|
61
|
-
expect(await adapter.loadRange(["AAAAA", "sync-state"])).toStrictEqual(
|
|
51
|
+
]);
|
|
52
|
+
expect(await adapter.loadRange(["AAAAA", "sync-state"])).toStrictEqual([
|
|
62
53
|
{ key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
|
|
63
54
|
{ key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() },
|
|
64
|
-
])
|
|
65
|
-
teardown();
|
|
55
|
+
]);
|
|
66
56
|
});
|
|
67
|
-
it("should only load values that match they key", async () => {
|
|
68
|
-
const { adapter, teardown } = await setup();
|
|
57
|
+
it("should only load values that match they key", async ({ adapter }) => {
|
|
69
58
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A());
|
|
70
59
|
await adapter.save(["BBBBB", "sync-state", "zzzzz"], PAYLOAD_C());
|
|
71
60
|
const actual = await adapter.loadRange(["AAAAA"]);
|
|
72
|
-
expect(actual).toStrictEqual(
|
|
61
|
+
expect(actual).toStrictEqual([
|
|
73
62
|
{ key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
|
|
74
|
-
])
|
|
75
|
-
expect(actual).toStrictEqual(expect.not.arrayContaining([
|
|
76
|
-
{ key: ["BBBBB", "sync-state", "zzzzz"], data: PAYLOAD_C() },
|
|
77
|
-
]));
|
|
78
|
-
teardown();
|
|
63
|
+
]);
|
|
79
64
|
});
|
|
80
65
|
});
|
|
81
66
|
describe("save and remove", () => {
|
|
82
|
-
it("after removing, should be empty", async () => {
|
|
83
|
-
const { adapter, teardown } = await setup();
|
|
67
|
+
it("after removing, should be empty", async ({ adapter }) => {
|
|
84
68
|
await adapter.save(["AAAAA", "snapshot", "xxxxx"], PAYLOAD_A());
|
|
85
69
|
await adapter.remove(["AAAAA", "snapshot", "xxxxx"]);
|
|
86
70
|
expect(await adapter.loadRange(["AAAAA"])).toStrictEqual([]);
|
|
87
71
|
expect(await adapter.load(["AAAAA", "snapshot", "xxxxx"])).toBeUndefined();
|
|
88
|
-
teardown();
|
|
89
72
|
});
|
|
90
73
|
});
|
|
91
74
|
describe("save and save", () => {
|
|
92
|
-
it("should overwrite data saved with the same key", async () => {
|
|
93
|
-
const { adapter, teardown } = await setup();
|
|
75
|
+
it("should overwrite data saved with the same key", async ({ adapter, }) => {
|
|
94
76
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A());
|
|
95
77
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_B());
|
|
96
78
|
expect(await adapter.loadRange(["AAAAA", "sync-state"])).toStrictEqual([
|
|
97
79
|
{ key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_B() },
|
|
98
80
|
]);
|
|
99
|
-
teardown();
|
|
100
81
|
});
|
|
101
82
|
});
|
|
102
83
|
describe("removeRange", () => {
|
|
103
|
-
it("should remove a range of records", async () => {
|
|
104
|
-
const { adapter, teardown } = await setup();
|
|
84
|
+
it("should remove a range of records", async ({ adapter }) => {
|
|
105
85
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A());
|
|
106
86
|
await adapter.save(["AAAAA", "snapshot", "yyyyy"], PAYLOAD_B());
|
|
107
87
|
await adapter.save(["AAAAA", "sync-state", "zzzzz"], PAYLOAD_C());
|
|
@@ -109,10 +89,8 @@ export function runStorageAdapterTests(_setup, title) {
|
|
|
109
89
|
expect(await adapter.loadRange(["AAAAA"])).toStrictEqual([
|
|
110
90
|
{ key: ["AAAAA", "snapshot", "yyyyy"], data: PAYLOAD_B() },
|
|
111
91
|
]);
|
|
112
|
-
teardown();
|
|
113
92
|
});
|
|
114
|
-
it("should not remove records that don't match", async () => {
|
|
115
|
-
const { adapter, teardown } = await setup();
|
|
93
|
+
it("should not remove records that don't match", async ({ adapter }) => {
|
|
116
94
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A());
|
|
117
95
|
await adapter.save(["BBBBB", "sync-state", "zzzzz"], PAYLOAD_B());
|
|
118
96
|
await adapter.removeRange(["AAAAA"]);
|
|
@@ -120,7 +98,6 @@ export function runStorageAdapterTests(_setup, title) {
|
|
|
120
98
|
expect(actual).toStrictEqual([
|
|
121
99
|
{ key: ["BBBBB", "sync-state", "zzzzz"], data: PAYLOAD_B() },
|
|
122
100
|
]);
|
|
123
|
-
teardown();
|
|
124
101
|
});
|
|
125
102
|
});
|
|
126
103
|
});
|
package/dist/index.d.ts
CHANGED
|
@@ -26,7 +26,7 @@
|
|
|
26
26
|
* ```
|
|
27
27
|
*/
|
|
28
28
|
export { DocHandle } from "./DocHandle.js";
|
|
29
|
-
export { isValidAutomergeUrl, isValidDocumentId, parseAutomergeUrl, stringifyAutomergeUrl, interpretAsDocumentId, generateAutomergeUrl, } from "./AutomergeUrl.js";
|
|
29
|
+
export { isValidAutomergeUrl, isValidDocumentId, parseAutomergeUrl, stringifyAutomergeUrl, interpretAsDocumentId, generateAutomergeUrl, encodeHeads, decodeHeads, } from "./AutomergeUrl.js";
|
|
30
30
|
export { Repo } from "./Repo.js";
|
|
31
31
|
export { NetworkAdapter } from "./network/NetworkAdapter.js";
|
|
32
32
|
export type { NetworkAdapterInterface } from "./network/NetworkAdapterInterface.js";
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;GA0BG;AAEH,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAA;AAC1C,OAAO,EACL,mBAAmB,EACnB,iBAAiB,EACjB,iBAAiB,EACjB,qBAAqB,EACrB,qBAAqB,EACrB,oBAAoB,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;GA0BG;AAEH,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAA;AAC1C,OAAO,EACL,mBAAmB,EACnB,iBAAiB,EACjB,iBAAiB,EACjB,qBAAqB,EACrB,qBAAqB,EACrB,oBAAoB,EACpB,WAAW,EACX,WAAW,GACZ,MAAM,mBAAmB,CAAA;AAC1B,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAChC,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,YAAY,EAAE,uBAAuB,EAAE,MAAM,sCAAsC,CAAA;AACnF,OAAO,EAAE,aAAa,EAAE,MAAM,uBAAuB,CAAA;AACrD,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,YAAY,EAAE,uBAAuB,EAAE,MAAM,sCAAsC,CAAA;AAEnF,eAAe;AACf,OAAO,KAAK,IAAI,MAAM,mBAAmB,CAAA;AAIzC,YAAY,EACV,sBAAsB,EACtB,sBAAsB,EACtB,6BAA6B,EAC7B,gCAAgC,EAChC,2BAA2B,EAC3B,eAAe,EACf,gBAAgB,EAChB,wCAAwC,EACxC,WAAW,GACZ,MAAM,gBAAgB,CAAA;AAEvB,YAAY,EACV,qBAAqB,EACrB,eAAe,EACf,UAAU,EACV,UAAU,EACV,WAAW,GACZ,MAAM,WAAW,CAAA;AAElB,YAAY,EACV,oBAAoB,EACpB,WAAW,EACX,oBAAoB,EACpB,uBAAuB,EACvB,YAAY,GACb,MAAM,sCAAsC,CAAA;AAE7C,YAAY,EACV,0BAA0B,EAC1B,gBAAgB,EAChB,OAAO,EACP,WAAW,EACX,cAAc,EACd,WAAW,GACZ,MAAM,uBAAuB,CAAA;AAE9B,YAAY,EACV,KAAK,EACL,SAAS,EACT,SAAS,EACT,UAAU,EACV,SAAS,GACV,MAAM,oBAAoB,CAAA;AAE3B,cAAc,YAAY,CAAA;AAG1B,OAAO,EAAE,OAAO,EAAE,SAAS,EAAE,MAAM,gCAAgC,CAAA;AAGnE,YAAY,EACV,GAAG,EACH,KAAK,EACL,KAAK,EACL,aAAa,EACb,IAAI,EACJ,OAAO,EACP,MAAM,EACN,QAAQ,EACR,IAAI,EACJ,OAAO,EACP,SAAS,EACT,SAAS,EACT,MAAM,GACP,MAAM,gCAAgC,CAAA;AAIvC,OAAO,EACL,UAAU,EACV,aAAa,EACb,YAAY,EACZ,IAAI,EACJ,YAAY,GACb,MAAM,gCAAgC,CAAA;AAKvC,OAAO,EACL,SAAS,EACT,iBAAiB,EACjB,MAAM,EACN,UAAU,EACV,QAAQ,EACR,QAAQ,EACR,IAAI,EACJ,MAAM,GACP,MAAM,gCAAgC,CAAA"}
|
package/dist/index.js
CHANGED
|
@@ -26,7 +26,7 @@
|
|
|
26
26
|
* ```
|
|
27
27
|
*/
|
|
28
28
|
export { DocHandle } from "./DocHandle.js";
|
|
29
|
-
export { isValidAutomergeUrl, isValidDocumentId, parseAutomergeUrl, stringifyAutomergeUrl, interpretAsDocumentId, generateAutomergeUrl, } from "./AutomergeUrl.js";
|
|
29
|
+
export { isValidAutomergeUrl, isValidDocumentId, parseAutomergeUrl, stringifyAutomergeUrl, interpretAsDocumentId, generateAutomergeUrl, encodeHeads, decodeHeads, } from "./AutomergeUrl.js";
|
|
30
30
|
export { Repo } from "./Repo.js";
|
|
31
31
|
export { NetworkAdapter } from "./network/NetworkAdapter.js";
|
|
32
32
|
export { isRepoMessage } from "./network/messages.js";
|
|
@@ -2,11 +2,20 @@ import * as A from "@automerge/automerge/slim/next";
|
|
|
2
2
|
import { type DocumentId } from "../types.js";
|
|
3
3
|
import { StorageAdapterInterface } from "./StorageAdapterInterface.js";
|
|
4
4
|
import { StorageId } from "./types.js";
|
|
5
|
+
import { EventEmitter } from "eventemitter3";
|
|
6
|
+
type StorageSubsystemEvents = {
|
|
7
|
+
"document-loaded": (arg: {
|
|
8
|
+
documentId: DocumentId;
|
|
9
|
+
durationMillis: number;
|
|
10
|
+
numOps: number;
|
|
11
|
+
numChanges: number;
|
|
12
|
+
}) => void;
|
|
13
|
+
};
|
|
5
14
|
/**
|
|
6
15
|
* The storage subsystem is responsible for saving and loading Automerge documents to and from
|
|
7
16
|
* storage adapter. It also provides a generic key/value storage interface for other uses.
|
|
8
17
|
*/
|
|
9
|
-
export declare class StorageSubsystem {
|
|
18
|
+
export declare class StorageSubsystem extends EventEmitter<StorageSubsystemEvents> {
|
|
10
19
|
#private;
|
|
11
20
|
constructor(storageAdapter: StorageAdapterInterface);
|
|
12
21
|
id(): Promise<StorageId>;
|
|
@@ -49,4 +58,5 @@ export declare class StorageSubsystem {
|
|
|
49
58
|
loadSyncState(documentId: DocumentId, storageId: StorageId): Promise<A.SyncState | undefined>;
|
|
50
59
|
saveSyncState(documentId: DocumentId, storageId: StorageId, syncState: A.SyncState): Promise<void>;
|
|
51
60
|
}
|
|
61
|
+
export {};
|
|
52
62
|
//# sourceMappingURL=StorageSubsystem.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAInD,OAAO,EAAE,KAAK,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,OAAO,EAAE,uBAAuB,EAAE,MAAM,8BAA8B,CAAA;AACtE,OAAO,EAAyB,SAAS,EAAE,MAAM,YAAY,CAAA;
|
|
1
|
+
{"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAInD,OAAO,EAAE,KAAK,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,OAAO,EAAE,uBAAuB,EAAE,MAAM,8BAA8B,CAAA;AACtE,OAAO,EAAyB,SAAS,EAAE,MAAM,YAAY,CAAA;AAI7D,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAG5C,KAAK,sBAAsB,GAAG;IAC5B,iBAAiB,EAAE,CAAC,GAAG,EAAE;QACvB,UAAU,EAAE,UAAU,CAAA;QACtB,cAAc,EAAE,MAAM,CAAA;QACtB,MAAM,EAAE,MAAM,CAAA;QACd,UAAU,EAAE,MAAM,CAAA;KACnB,KAAK,IAAI,CAAA;CACX,CAAA;AAED;;;GAGG;AACH,qBAAa,gBAAiB,SAAQ,YAAY,CAAC,sBAAsB,CAAC;;gBAe5D,cAAc,EAAE,uBAAuB;IAK7C,EAAE,IAAI,OAAO,CAAC,SAAS,CAAC;IA2B9B,kCAAkC;IAC5B,IAAI;IACR,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,yFAAyF;IACzF,GAAG,EAAE,MAAM,GACV,OAAO,CAAC,UAAU,GAAG,SAAS,CAAC;IAKlC,gCAAgC;IAC1B,IAAI;IACR,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,yFAAyF;IACzF,GAAG,EAAE,MAAM;IAEX,sCAAsC;IACtC,IAAI,EAAE,UAAU,GACf,OAAO,CAAC,IAAI,CAAC;IAKhB,oCAAoC;IAC9B,MAAM;IACV,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,2FAA2F;IAC3F,GAAG,EAAE,MAAM,GACV,OAAO,CAAC,IAAI,CAAC;IAOhB;;OAEG;IACG,OAAO,CAAC,CAAC,EAAE,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;IA0ClE;;;;;;OAMG;IACG,OAAO,CAAC,UAAU,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAczE;;OAEG;IACG,SAAS,CAAC,UAAU,EAAE,UAAU;IAkEhC,aAAa,CACjB,UAAU,EAAE,UAAU,EACtB,SAAS,EAAE,SAAS,GACnB,OAAO,CAAC,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC;IAW7B,aAAa,CACjB,UAAU,EAAE,UAAU,EACtB,SAAS,EAAE,SAAS,EACpB,SAAS,EAAE,CAAC,CAAC,SAAS,GACrB,OAAO,CAAC,IAAI,CAAC;CA8CjB"}
|