@automerge/automerge-repo 2.0.0-alpha.13 → 2.0.0-alpha.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/AutomergeUrl.d.ts +19 -4
- package/dist/AutomergeUrl.d.ts.map +1 -1
- package/dist/AutomergeUrl.js +71 -24
- package/dist/DocHandle.d.ts +21 -17
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +83 -26
- package/dist/RemoteHeadsSubscriptions.d.ts +4 -4
- package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -1
- package/dist/RemoteHeadsSubscriptions.js +4 -1
- package/dist/Repo.d.ts +11 -2
- package/dist/Repo.d.ts.map +1 -1
- package/dist/Repo.js +19 -14
- package/dist/helpers/bufferFromHex.d.ts +3 -0
- package/dist/helpers/bufferFromHex.d.ts.map +1 -0
- package/dist/helpers/bufferFromHex.js +13 -0
- package/dist/helpers/headsAreSame.d.ts +2 -2
- package/dist/helpers/headsAreSame.d.ts.map +1 -1
- package/dist/helpers/mergeArrays.d.ts +1 -1
- package/dist/helpers/mergeArrays.d.ts.map +1 -1
- package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
- package/dist/helpers/tests/storage-adapter-tests.js +6 -9
- package/dist/storage/StorageSubsystem.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.js +2 -1
- package/dist/synchronizer/CollectionSynchronizer.d.ts +2 -2
- package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.js +16 -2
- package/dist/synchronizer/Synchronizer.d.ts +3 -0
- package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
- package/package.json +2 -2
- package/src/AutomergeUrl.ts +103 -26
- package/src/DocHandle.ts +130 -37
- package/src/RemoteHeadsSubscriptions.ts +11 -8
- package/src/Repo.ts +41 -13
- package/src/helpers/bufferFromHex.ts +14 -0
- package/src/helpers/headsAreSame.ts +2 -2
- package/src/helpers/tests/storage-adapter-tests.ts +13 -24
- package/src/storage/StorageSubsystem.ts +3 -1
- package/src/synchronizer/CollectionSynchronizer.ts +19 -3
- package/src/synchronizer/Synchronizer.ts +12 -7
- package/test/AutomergeUrl.test.ts +130 -0
- package/test/DocHandle.test.ts +70 -4
- package/test/DocSynchronizer.test.ts +10 -3
- package/test/Repo.test.ts +155 -3
package/dist/Repo.d.ts
CHANGED
|
@@ -7,7 +7,7 @@ import { StorageSubsystem } from "./storage/StorageSubsystem.js";
|
|
|
7
7
|
import { StorageId } from "./storage/types.js";
|
|
8
8
|
import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js";
|
|
9
9
|
import { DocSyncMetrics } from "./synchronizer/Synchronizer.js";
|
|
10
|
-
import type { AnyDocumentId, DocumentId, PeerId } from "./types.js";
|
|
10
|
+
import type { AnyDocumentId, AutomergeUrl, DocumentId, PeerId } from "./types.js";
|
|
11
11
|
/** A Repo is a collection of documents with networking, syncing, and storage capabilities. */
|
|
12
12
|
/** The `Repo` is the main entry point of this library
|
|
13
13
|
*
|
|
@@ -33,7 +33,7 @@ export declare class Repo extends EventEmitter<RepoEvents> {
|
|
|
33
33
|
/** maps peer id to to persistence information (storageId, isEphemeral), access by collection synchronizer */
|
|
34
34
|
/** @hidden */
|
|
35
35
|
peerMetadataByPeerId: Record<PeerId, PeerMetadata>;
|
|
36
|
-
constructor({ storage, network, peerId, sharePolicy, isEphemeral, enableRemoteHeadsGossiping, }?: RepoConfig);
|
|
36
|
+
constructor({ storage, network, peerId, sharePolicy, isEphemeral, enableRemoteHeadsGossiping, denylist, }?: RepoConfig);
|
|
37
37
|
/** Returns all the handles we have cached. */
|
|
38
38
|
get handles(): Record<DocumentId, DocHandle<any>>;
|
|
39
39
|
/** Returns a list of all connected peer ids */
|
|
@@ -126,6 +126,12 @@ export interface RepoConfig {
|
|
|
126
126
|
* Whether to enable the experimental remote heads gossiping feature
|
|
127
127
|
*/
|
|
128
128
|
enableRemoteHeadsGossiping?: boolean;
|
|
129
|
+
/**
|
|
130
|
+
* A list of automerge URLs which should never be loaded regardless of what
|
|
131
|
+
* messages are received or what the share policy is. This is useful to avoid
|
|
132
|
+
* loading documents that are known to be too resource intensive.
|
|
133
|
+
*/
|
|
134
|
+
denylist?: AutomergeUrl[];
|
|
129
135
|
}
|
|
130
136
|
/** A function that determines whether we should share a document with a peer
|
|
131
137
|
*
|
|
@@ -157,5 +163,8 @@ export type DocMetrics = DocSyncMetrics | {
|
|
|
157
163
|
durationMillis: number;
|
|
158
164
|
numOps: number;
|
|
159
165
|
numChanges: number;
|
|
166
|
+
} | {
|
|
167
|
+
type: "doc-denied";
|
|
168
|
+
documentId: DocumentId;
|
|
160
169
|
};
|
|
161
170
|
//# sourceMappingURL=Repo.d.ts.map
|
package/dist/Repo.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Repo.d.ts","sourceRoot":"","sources":["../src/Repo.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;
|
|
1
|
+
{"version":3,"file":"Repo.d.ts","sourceRoot":"","sources":["../src/Repo.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAQ5C,OAAO,EAEL,SAAS,EAKV,MAAM,gBAAgB,CAAA;AAIvB,OAAO,EACL,uBAAuB,EACvB,KAAK,YAAY,EAClB,MAAM,sCAAsC,CAAA;AAC7C,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAEhE,OAAO,EAAE,uBAAuB,EAAE,MAAM,sCAAsC,CAAA;AAC9E,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,SAAS,EAAE,MAAM,oBAAoB,CAAA;AAC9C,OAAO,EAAE,sBAAsB,EAAE,MAAM,0CAA0C,CAAA;AACjF,OAAO,EACL,cAAc,EAEf,MAAM,gCAAgC,CAAA;AACvC,OAAO,KAAK,EACV,aAAa,EACb,YAAY,EACZ,UAAU,EACV,MAAM,EACP,MAAM,YAAY,CAAA;AAMnB,8FAA8F;AAC9F;;;;;;GAMG;AACH,qBAAa,IAAK,SAAQ,YAAY,CAAC,UAAU,CAAC;;IAGhD,cAAc;IACd,gBAAgB,EAAE,gBAAgB,CAAA;IAClC,cAAc;IACd,gBAAgB,CAAC,EAAE,gBAAgB,CAAA;IAEnC,mDAAmD;IACnD,cAAc;IACd,gBAAgB,SAAM;IAItB,cAAc;IACd,YAAY,EAAE,sBAAsB,CAAA;IAEpC,sDAAsD;IACtD,cAAc;IACd,WAAW,EAAE,WAAW,CAAmB;IAE3C,8GAA8G;IAC9G,cAAc;IACd,oBAAoB,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAK;gBAK3C,EACV,OAAO,EACP,OAAY,EACZ,MAAuB,EACvB,WAAW,EACX,WAAmC,EACnC,0BAAkC,EAClC,QAAa,GACd,GAAE,UAAe;IAoQlB,8CAA8C;IAC9C,IAAI,OAAO,uCAEV;IAED,+CAA+C;IAC/C,IAAI,KAAK,IAAI,MAAM,EAAE,CAEpB;IAED,kBAAkB,CAAC,MAAM,EAAE,MAAM,GAAG,SAAS,GAAG,SAAS;IAIzD;;;;OAIG;IACH,MAAM,CAAC,CAAC,EAAE,YAAY,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC;IAuBzC;;;;;;;;;;;;;;OAcG;IACH,KAAK,CAAC,CAAC,EAAE,YAAY,EAAE,SAAS,CAAC,CAAC,CAAC;IAuBnC;;;OAGG;IACH,IAAI,CAAC,CAAC;IACJ,sDAAsD;IACtD,EAAE,EAAE,aAAa,GAChB,SAAS,CAAC,CAAC,CAAC;IAoDf,MAAM;IACJ,oDAAoD;IACpD,EAAE,EAAE,aAAa;IAWnB;;;;;;OAMG;IACG,MAAM,CAAC,EAAE,EAAE,aAAa,GAAG,OAAO,CAAC,UAAU,GAAG,SAAS,CAAC;IAShE;;;OAGG;IACH,MAAM,CAAC,CAAC,EAAE,MAAM,EAAE,UAAU;IAY5B,kBAAkB,YAAa,SAAS,EAAE,UASzC;IAED,SAAS,QAAa,OAAO,CAAC,SAAS,GAAG,SAAS,CAAC,CAMnD;IAED;;;;;OAKG;IACG,KAAK,CAAC,SAAS,CAAC,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAkBpD;;;;;OAKG;IACG,eAAe,CAAC,UAAU,EAAE,UAAU;IA2B5C,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAOzB,OAAO,IAAI;QAAE,SAAS,EAAE;YAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;SAAE,CAAA;KAAE;CAGjD;AAED,MAAM,WAAW,UAAU;IACzB,4BAA4B;IAC5B,MAAM,CAAC,EAAE,MAAM,CAAA;IAEf;8DAC0D;IAC1D,WAAW,CAAC,EAAE,OAAO,CAAA;IAErB,gDAAgD;IAChD,OAAO,CAAC,EAAE,uBAAuB,CAAA;IAEjC,iEAAiE;IACjE,OAAO,CAAC,EAAE,uBAAuB,EAAE,CAAA;IAEnC;;;OAGG;IACH,WAAW,CAAC,EAAE,WAAW,CAAA;IAEzB;;OAEG;IACH,0BAA0B,CAAC,EAAE,OAAO,CAAA;IAEpC;;;;OAIG;IACH,QAAQ,CAAC,EAAE,YAAY,EAAE,CAAA;CAC1B;AAED;;;;;;;KAOK;AACL,MAAM,MAAM,WAAW,GAAG,CACxB,MAAM,EAAE,MAAM,EACd,UAAU,CAAC,EAAE,UAAU,KACpB,OAAO,CAAC,OAAO,CAAC,CAAA;AAGrB,MAAM,WAAW,UAAU;IACzB,+CAA+C;IAC/C,QAAQ,EAAE,CAAC,GAAG,EAAE,eAAe,KAAK,IAAI,CAAA;IACxC,6BAA6B;IAC7B,iBAAiB,EAAE,CAAC,GAAG,EAAE,qBAAqB,KAAK,IAAI,CAAA;IACvD,4FAA4F;IAC5F,sBAAsB,EAAE,CAAC,GAAG,EAAE,qBAAqB,KAAK,IAAI,CAAA;IAC5D,aAAa,EAAE,CAAC,GAAG,EAAE,UAAU,KAAK,IAAI,CAAA;CACzC;AAED,MAAM,WAAW,eAAe;IAC9B,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC,CAAA;CACvB;AAED,MAAM,WAAW,qBAAqB;IACpC,UAAU,EAAE,UAAU,CAAA;CACvB;AAED,MAAM,MAAM,UAAU,GAClB,cAAc,GACd;IACE,IAAI,EAAE,YAAY,CAAA;IAClB,UAAU,EAAE,UAAU,CAAA;IACtB,cAAc,EAAE,MAAM,CAAA;IACtB,MAAM,EAAE,MAAM,CAAA;IACd,UAAU,EAAE,MAAM,CAAA;CACnB,GACD;IACE,IAAI,EAAE,YAAY,CAAA;IAClB,UAAU,EAAE,UAAU,CAAA;CACvB,CAAA"}
|
package/dist/Repo.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { next as Automerge } from "@automerge/automerge/slim";
|
|
2
2
|
import debug from "debug";
|
|
3
3
|
import { EventEmitter } from "eventemitter3";
|
|
4
|
-
import { generateAutomergeUrl, interpretAsDocumentId, parseAutomergeUrl, } from "./AutomergeUrl.js";
|
|
4
|
+
import { encodeHeads, generateAutomergeUrl, interpretAsDocumentId, isValidAutomergeUrl, parseAutomergeUrl, } from "./AutomergeUrl.js";
|
|
5
5
|
import { DELETED, DocHandle, READY, UNAVAILABLE, UNLOADED, } from "./DocHandle.js";
|
|
6
6
|
import { RemoteHeadsSubscriptions } from "./RemoteHeadsSubscriptions.js";
|
|
7
7
|
import { headsAreSame } from "./helpers/headsAreSame.js";
|
|
@@ -40,7 +40,7 @@ export class Repo extends EventEmitter {
|
|
|
40
40
|
peerMetadataByPeerId = {};
|
|
41
41
|
#remoteHeadsSubscriptions = new RemoteHeadsSubscriptions();
|
|
42
42
|
#remoteHeadsGossipingEnabled = false;
|
|
43
|
-
constructor({ storage, network = [], peerId = randomPeerId(), sharePolicy, isEphemeral = storage === undefined, enableRemoteHeadsGossiping = false, } = {}) {
|
|
43
|
+
constructor({ storage, network = [], peerId = randomPeerId(), sharePolicy, isEphemeral = storage === undefined, enableRemoteHeadsGossiping = false, denylist = [], } = {}) {
|
|
44
44
|
super();
|
|
45
45
|
this.#remoteHeadsGossipingEnabled = enableRemoteHeadsGossiping;
|
|
46
46
|
this.#log = debug(`automerge-repo:repo`);
|
|
@@ -56,7 +56,7 @@ export class Repo extends EventEmitter {
|
|
|
56
56
|
});
|
|
57
57
|
// SYNCHRONIZER
|
|
58
58
|
// The synchronizer uses the network subsystem to keep documents in sync with peers.
|
|
59
|
-
this.synchronizer = new CollectionSynchronizer(this);
|
|
59
|
+
this.synchronizer = new CollectionSynchronizer(this, denylist);
|
|
60
60
|
// When the synchronizer emits messages, send them to peers
|
|
61
61
|
this.synchronizer.on("message", message => {
|
|
62
62
|
this.#log(`sending ${message.type} message to ${message.targetId}`);
|
|
@@ -119,11 +119,12 @@ export class Repo extends EventEmitter {
|
|
|
119
119
|
}
|
|
120
120
|
const heads = handle.getRemoteHeads(storageId);
|
|
121
121
|
const haveHeadsChanged = message.syncState.theirHeads &&
|
|
122
|
-
(!heads ||
|
|
122
|
+
(!heads ||
|
|
123
|
+
!headsAreSame(heads, encodeHeads(message.syncState.theirHeads)));
|
|
123
124
|
if (haveHeadsChanged && message.syncState.theirHeads) {
|
|
124
|
-
handle.setRemoteHeads(storageId, message.syncState.theirHeads);
|
|
125
|
+
handle.setRemoteHeads(storageId, encodeHeads(message.syncState.theirHeads));
|
|
125
126
|
if (storageId && this.#remoteHeadsGossipingEnabled) {
|
|
126
|
-
this.#remoteHeadsSubscriptions.handleImmediateRemoteHeadsChanged(message.documentId, storageId, message.syncState.theirHeads);
|
|
127
|
+
this.#remoteHeadsSubscriptions.handleImmediateRemoteHeadsChanged(message.documentId, storageId, encodeHeads(message.syncState.theirHeads));
|
|
127
128
|
}
|
|
128
129
|
}
|
|
129
130
|
});
|
|
@@ -305,18 +306,21 @@ export class Repo extends EventEmitter {
|
|
|
305
306
|
find(
|
|
306
307
|
/** The url or documentId of the handle to retrieve */
|
|
307
308
|
id) {
|
|
308
|
-
const documentId =
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
309
|
+
const { documentId, heads } = isValidAutomergeUrl(id)
|
|
310
|
+
? parseAutomergeUrl(id)
|
|
311
|
+
: { documentId: interpretAsDocumentId(id), heads: undefined };
|
|
312
|
+
const cachedHandle = this.#handleCache[documentId];
|
|
313
|
+
if (cachedHandle) {
|
|
314
|
+
if (cachedHandle.isUnavailable()) {
|
|
312
315
|
// this ensures that the event fires after the handle has been returned
|
|
313
316
|
setTimeout(() => {
|
|
314
|
-
|
|
315
|
-
handle:
|
|
317
|
+
cachedHandle.emit("unavailable", {
|
|
318
|
+
handle: cachedHandle,
|
|
316
319
|
});
|
|
317
320
|
});
|
|
318
321
|
}
|
|
319
|
-
return
|
|
322
|
+
// If we already have the handle, return it immediately (or a view of the handle if heads are specified)
|
|
323
|
+
return heads ? cachedHandle.view(heads) : cachedHandle;
|
|
320
324
|
}
|
|
321
325
|
// If we don't already have the handle, make an empty one and try loading it
|
|
322
326
|
const handle = this.#getHandle({
|
|
@@ -345,7 +349,8 @@ export class Repo extends EventEmitter {
|
|
|
345
349
|
.catch(err => {
|
|
346
350
|
this.#log("error waiting for network", { err });
|
|
347
351
|
});
|
|
348
|
-
return handle
|
|
352
|
+
// If we already have the handle, return it immediately (or a view of the handle if heads are specified)
|
|
353
|
+
return heads ? handle.view(heads) : handle;
|
|
349
354
|
}
|
|
350
355
|
delete(
|
|
351
356
|
/** The url or documentId of the handle to delete */
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"bufferFromHex.d.ts","sourceRoot":"","sources":["../../src/helpers/bufferFromHex.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,uBAAuB,cAAe,MAAM,KAAG,UAS3D,CAAA;AAED,eAAO,MAAM,qBAAqB,SAAU,UAAU,KAAG,MAExD,CAAA"}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export const uint8ArrayFromHexString = (hexString) => {
|
|
2
|
+
if (hexString.length % 2 !== 0) {
|
|
3
|
+
throw new Error("Hex string must have an even length");
|
|
4
|
+
}
|
|
5
|
+
const bytes = new Uint8Array(hexString.length / 2);
|
|
6
|
+
for (let i = 0; i < hexString.length; i += 2) {
|
|
7
|
+
bytes[i >> 1] = parseInt(hexString.slice(i, i + 2), 16);
|
|
8
|
+
}
|
|
9
|
+
return bytes;
|
|
10
|
+
};
|
|
11
|
+
export const uint8ArrayToHexString = (data) => {
|
|
12
|
+
return Array.from(data, byte => byte.toString(16).padStart(2, "0")).join("");
|
|
13
|
+
};
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import {
|
|
2
|
-
export declare const headsAreSame: (a:
|
|
1
|
+
import { UrlHeads } from "../AutomergeUrl.js";
|
|
2
|
+
export declare const headsAreSame: (a: UrlHeads, b: UrlHeads) => boolean;
|
|
3
3
|
//# sourceMappingURL=headsAreSame.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"headsAreSame.d.ts","sourceRoot":"","sources":["../../src/helpers/headsAreSame.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"headsAreSame.d.ts","sourceRoot":"","sources":["../../src/helpers/headsAreSame.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,QAAQ,EAAE,MAAM,oBAAoB,CAAA;AAE7C,eAAO,MAAM,YAAY,MAAO,QAAQ,KAAK,QAAQ,YAEpD,CAAA"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export declare function mergeArrays(myArrays: Uint8Array[]): Uint8Array
|
|
1
|
+
export declare function mergeArrays(myArrays: Uint8Array[]): Uint8Array<ArrayBuffer>;
|
|
2
2
|
//# sourceMappingURL=mergeArrays.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"mergeArrays.d.ts","sourceRoot":"","sources":["../../src/helpers/mergeArrays.ts"],"names":[],"mappings":"AAAA,wBAAgB,WAAW,CAAC,QAAQ,EAAE,UAAU,EAAE,
|
|
1
|
+
{"version":3,"file":"mergeArrays.d.ts","sourceRoot":"","sources":["../../src/helpers/mergeArrays.ts"],"names":[],"mappings":"AAAA,wBAAgB,WAAW,CAAC,QAAQ,EAAE,UAAU,EAAE,2BAgBjD"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"storage-adapter-tests.d.ts","sourceRoot":"","sources":["../../../src/helpers/tests/storage-adapter-tests.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,uBAAuB,EAAE,MAAM,0CAA0C,CAAA;AAcvF,wBAAgB,sBAAsB,CAAC,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,
|
|
1
|
+
{"version":3,"file":"storage-adapter-tests.d.ts","sourceRoot":"","sources":["../../../src/helpers/tests/storage-adapter-tests.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,uBAAuB,EAAE,MAAM,0CAA0C,CAAA;AAcvF,wBAAgB,sBAAsB,CAAC,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CA+H3E;AAID,MAAM,MAAM,OAAO,GAAG,MAAM,OAAO,CAAC;IAClC,OAAO,EAAE,uBAAuB,CAAA;IAChC,QAAQ,CAAC,EAAE,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;CACtC,CAAC,CAAA"}
|
|
@@ -44,26 +44,23 @@ export function runStorageAdapterTests(setup, title) {
|
|
|
44
44
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A());
|
|
45
45
|
await adapter.save(["AAAAA", "snapshot", "yyyyy"], PAYLOAD_B());
|
|
46
46
|
await adapter.save(["AAAAA", "sync-state", "zzzzz"], PAYLOAD_C());
|
|
47
|
-
expect(await adapter.loadRange(["AAAAA"])).toStrictEqual(
|
|
47
|
+
expect(await adapter.loadRange(["AAAAA"])).toStrictEqual([
|
|
48
48
|
{ key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
|
|
49
49
|
{ key: ["AAAAA", "snapshot", "yyyyy"], data: PAYLOAD_B() },
|
|
50
50
|
{ key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() },
|
|
51
|
-
])
|
|
52
|
-
expect(await adapter.loadRange(["AAAAA", "sync-state"])).toStrictEqual(
|
|
51
|
+
]);
|
|
52
|
+
expect(await adapter.loadRange(["AAAAA", "sync-state"])).toStrictEqual([
|
|
53
53
|
{ key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
|
|
54
54
|
{ key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() },
|
|
55
|
-
])
|
|
55
|
+
]);
|
|
56
56
|
});
|
|
57
57
|
it("should only load values that match they key", async ({ adapter }) => {
|
|
58
58
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A());
|
|
59
59
|
await adapter.save(["BBBBB", "sync-state", "zzzzz"], PAYLOAD_C());
|
|
60
60
|
const actual = await adapter.loadRange(["AAAAA"]);
|
|
61
|
-
expect(actual).toStrictEqual(
|
|
61
|
+
expect(actual).toStrictEqual([
|
|
62
62
|
{ key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
|
|
63
|
-
])
|
|
64
|
-
expect(actual).toStrictEqual(expect.not.arrayContaining([
|
|
65
|
-
{ key: ["BBBBB", "sync-state", "zzzzz"], data: PAYLOAD_C() },
|
|
66
|
-
]));
|
|
63
|
+
]);
|
|
67
64
|
});
|
|
68
65
|
});
|
|
69
66
|
describe("save and remove", () => {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAInD,OAAO,EAAE,KAAK,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,OAAO,EAAE,uBAAuB,EAAE,MAAM,8BAA8B,CAAA;AACtE,OAAO,EAAyB,SAAS,EAAE,MAAM,YAAY,CAAA;AAI7D,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;
|
|
1
|
+
{"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAInD,OAAO,EAAE,KAAK,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,OAAO,EAAE,uBAAuB,EAAE,MAAM,8BAA8B,CAAA;AACtE,OAAO,EAAyB,SAAS,EAAE,MAAM,YAAY,CAAA;AAI7D,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAG5C,KAAK,sBAAsB,GAAG;IAC5B,iBAAiB,EAAE,CAAC,GAAG,EAAE;QACvB,UAAU,EAAE,UAAU,CAAA;QACtB,cAAc,EAAE,MAAM,CAAA;QACtB,MAAM,EAAE,MAAM,CAAA;QACd,UAAU,EAAE,MAAM,CAAA;KACnB,KAAK,IAAI,CAAA;CACX,CAAA;AAED;;;GAGG;AACH,qBAAa,gBAAiB,SAAQ,YAAY,CAAC,sBAAsB,CAAC;;gBAe5D,cAAc,EAAE,uBAAuB;IAK7C,EAAE,IAAI,OAAO,CAAC,SAAS,CAAC;IA2B9B,kCAAkC;IAC5B,IAAI;IACR,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,yFAAyF;IACzF,GAAG,EAAE,MAAM,GACV,OAAO,CAAC,UAAU,GAAG,SAAS,CAAC;IAKlC,gCAAgC;IAC1B,IAAI;IACR,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,yFAAyF;IACzF,GAAG,EAAE,MAAM;IAEX,sCAAsC;IACtC,IAAI,EAAE,UAAU,GACf,OAAO,CAAC,IAAI,CAAC;IAKhB,oCAAoC;IAC9B,MAAM;IACV,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,2FAA2F;IAC3F,GAAG,EAAE,MAAM,GACV,OAAO,CAAC,IAAI,CAAC;IAOhB;;OAEG;IACG,OAAO,CAAC,CAAC,EAAE,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;IA0ClE;;;;;;OAMG;IACG,OAAO,CAAC,UAAU,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAczE;;OAEG;IACG,SAAS,CAAC,UAAU,EAAE,UAAU;IAkEhC,aAAa,CACjB,UAAU,EAAE,UAAU,EACtB,SAAS,EAAE,SAAS,GACnB,OAAO,CAAC,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC;IAW7B,aAAa,CACjB,UAAU,EAAE,UAAU,EACtB,SAAS,EAAE,SAAS,EACpB,SAAS,EAAE,CAAC,CAAC,SAAS,GACrB,OAAO,CAAC,IAAI,CAAC;CA8CjB"}
|
|
@@ -6,6 +6,7 @@ import { keyHash, headsHash } from "./keyHash.js";
|
|
|
6
6
|
import { chunkTypeFromKey } from "./chunkTypeFromKey.js";
|
|
7
7
|
import * as Uuid from "uuid";
|
|
8
8
|
import { EventEmitter } from "eventemitter3";
|
|
9
|
+
import { encodeHeads } from "../AutomergeUrl.js";
|
|
9
10
|
/**
|
|
10
11
|
* The storage subsystem is responsible for saving and loading Automerge documents to and from
|
|
11
12
|
* storage adapter. It also provides a generic key/value storage interface for other uses.
|
|
@@ -210,7 +211,7 @@ export class StorageSubsystem extends EventEmitter {
|
|
|
210
211
|
return true;
|
|
211
212
|
}
|
|
212
213
|
const newHeads = A.getHeads(doc);
|
|
213
|
-
if (headsAreSame(newHeads, oldHeads)) {
|
|
214
|
+
if (headsAreSame(encodeHeads(newHeads), encodeHeads(oldHeads))) {
|
|
214
215
|
// the document hasn't changed
|
|
215
216
|
return false;
|
|
216
217
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { Repo } from "../Repo.js";
|
|
2
2
|
import { DocMessage } from "../network/messages.js";
|
|
3
|
-
import { DocumentId, PeerId } from "../types.js";
|
|
3
|
+
import { AutomergeUrl, DocumentId, PeerId } from "../types.js";
|
|
4
4
|
import { DocSynchronizer } from "./DocSynchronizer.js";
|
|
5
5
|
import { Synchronizer } from "./Synchronizer.js";
|
|
6
6
|
/** A CollectionSynchronizer is responsible for synchronizing a DocCollection with peers. */
|
|
@@ -10,7 +10,7 @@ export declare class CollectionSynchronizer extends Synchronizer {
|
|
|
10
10
|
/** A map of documentIds to their synchronizers */
|
|
11
11
|
/** @hidden */
|
|
12
12
|
docSynchronizers: Record<DocumentId, DocSynchronizer>;
|
|
13
|
-
constructor(repo: Repo);
|
|
13
|
+
constructor(repo: Repo, denylist?: AutomergeUrl[]);
|
|
14
14
|
/**
|
|
15
15
|
* When we receive a sync message for a document we haven't got in memory, we
|
|
16
16
|
* register it with the repo and start synchronizing
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"CollectionSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/CollectionSynchronizer.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,IAAI,EAAE,MAAM,YAAY,CAAA;AACjC,OAAO,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAA;AACnD,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;
|
|
1
|
+
{"version":3,"file":"CollectionSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/CollectionSynchronizer.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,IAAI,EAAE,MAAM,YAAY,CAAA;AACjC,OAAO,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAA;AACnD,OAAO,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAC9D,OAAO,EAAE,eAAe,EAAE,MAAM,sBAAsB,CAAA;AACtD,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAIhD,4FAA4F;AAC5F,qBAAa,sBAAuB,SAAQ,YAAY;;IAa1C,OAAO,CAAC,IAAI;IATxB,kDAAkD;IAClD,cAAc;IACd,gBAAgB,EAAE,MAAM,CAAC,UAAU,EAAE,eAAe,CAAC,CAAK;gBAOtC,IAAI,EAAE,IAAI,EAAE,QAAQ,GAAE,YAAY,EAAO;IAuD7D;;;OAGG;IACG,cAAc,CAAC,OAAO,EAAE,UAAU;IAsCxC;;OAEG;IACH,WAAW,CAAC,UAAU,EAAE,UAAU;IAalC,cAAc,CAAC,UAAU,EAAE,UAAU;IAIrC,2DAA2D;IAC3D,OAAO,CAAC,MAAM,EAAE,MAAM;IAgBtB,uDAAuD;IACvD,UAAU,CAAC,MAAM,EAAE,MAAM;IASzB,+CAA+C;IAC/C,IAAI,KAAK,IAAI,MAAM,EAAE,CAEpB;IAED,OAAO,IAAI;QACT,CAAC,GAAG,EAAE,MAAM,GAAG;YACb,KAAK,EAAE,MAAM,EAAE,CAAA;YACf,IAAI,EAAE;gBAAE,MAAM,EAAE,MAAM,CAAC;gBAAC,UAAU,EAAE,MAAM,CAAA;aAAE,CAAA;SAC7C,CAAA;KACF;CASF"}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import debug from "debug";
|
|
2
|
-
import { stringifyAutomergeUrl } from "../AutomergeUrl.js";
|
|
2
|
+
import { parseAutomergeUrl, stringifyAutomergeUrl } from "../AutomergeUrl.js";
|
|
3
3
|
import { DocSynchronizer } from "./DocSynchronizer.js";
|
|
4
4
|
import { Synchronizer } from "./Synchronizer.js";
|
|
5
5
|
const log = debug("automerge-repo:collectionsync");
|
|
@@ -13,9 +13,11 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
13
13
|
docSynchronizers = {};
|
|
14
14
|
/** Used to determine if the document is know to the Collection and a synchronizer exists or is being set up */
|
|
15
15
|
#docSetUp = {};
|
|
16
|
-
|
|
16
|
+
#denylist;
|
|
17
|
+
constructor(repo, denylist = []) {
|
|
17
18
|
super();
|
|
18
19
|
this.repo = repo;
|
|
20
|
+
this.#denylist = denylist.map(url => parseAutomergeUrl(url).documentId);
|
|
19
21
|
}
|
|
20
22
|
/** Returns a synchronizer for the given document, creating one if it doesn't already exist. */
|
|
21
23
|
#fetchDocSynchronizer(documentId) {
|
|
@@ -68,6 +70,18 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
68
70
|
if (!documentId) {
|
|
69
71
|
throw new Error("received a message with an invalid documentId");
|
|
70
72
|
}
|
|
73
|
+
if (this.#denylist.includes(documentId)) {
|
|
74
|
+
this.emit("metrics", {
|
|
75
|
+
type: "doc-denied",
|
|
76
|
+
documentId,
|
|
77
|
+
});
|
|
78
|
+
this.emit("message", {
|
|
79
|
+
type: "doc-unavailable",
|
|
80
|
+
documentId,
|
|
81
|
+
targetId: message.senderId,
|
|
82
|
+
});
|
|
83
|
+
return;
|
|
84
|
+
}
|
|
71
85
|
this.#docSetUp[documentId] = true;
|
|
72
86
|
const docSynchronizer = this.#fetchDocSynchronizer(documentId);
|
|
73
87
|
docSynchronizer.receiveMessage(message);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Synchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/Synchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAC5C,OAAO,EACL,eAAe,EACf,cAAc,EACd,WAAW,EACZ,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EAAE,SAAS,EAAE,MAAM,2BAA2B,CAAA;AACrD,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAEhD,8BAAsB,YAAa,SAAQ,YAAY,CAAC,kBAAkB,CAAC;IACzE,QAAQ,CAAC,cAAc,CAAC,OAAO,EAAE,WAAW,GAAG,IAAI;CACpD;AAED,MAAM,WAAW,kBAAkB;IACjC,OAAO,EAAE,CAAC,OAAO,EAAE,eAAe,KAAK,IAAI,CAAA;IAC3C,YAAY,EAAE,CAAC,OAAO,EAAE,gBAAgB,KAAK,IAAI,CAAA;IACjD,UAAU,EAAE,CAAC,GAAG,EAAE,cAAc,KAAK,IAAI,CAAA;IACzC,OAAO,EAAE,CAAC,GAAG,EAAE,cAAc,KAAK,IAAI,CAAA;CACvC;AAED,uDAAuD;AACvD,MAAM,WAAW,gBAAgB;IAC/B,MAAM,EAAE,MAAM,CAAA;IACd,UAAU,EAAE,UAAU,CAAA;IACtB,SAAS,EAAE,SAAS,CAAA;CACrB;AAED,MAAM,MAAM,cAAc,
|
|
1
|
+
{"version":3,"file":"Synchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/Synchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAC5C,OAAO,EACL,eAAe,EACf,cAAc,EACd,WAAW,EACZ,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EAAE,SAAS,EAAE,MAAM,2BAA2B,CAAA;AACrD,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAEhD,8BAAsB,YAAa,SAAQ,YAAY,CAAC,kBAAkB,CAAC;IACzE,QAAQ,CAAC,cAAc,CAAC,OAAO,EAAE,WAAW,GAAG,IAAI;CACpD;AAED,MAAM,WAAW,kBAAkB;IACjC,OAAO,EAAE,CAAC,OAAO,EAAE,eAAe,KAAK,IAAI,CAAA;IAC3C,YAAY,EAAE,CAAC,OAAO,EAAE,gBAAgB,KAAK,IAAI,CAAA;IACjD,UAAU,EAAE,CAAC,GAAG,EAAE,cAAc,KAAK,IAAI,CAAA;IACzC,OAAO,EAAE,CAAC,GAAG,EAAE,cAAc,KAAK,IAAI,CAAA;CACvC;AAED,uDAAuD;AACvD,MAAM,WAAW,gBAAgB;IAC/B,MAAM,EAAE,MAAM,CAAA;IACd,UAAU,EAAE,UAAU,CAAA;IACtB,SAAS,EAAE,SAAS,CAAA;CACrB;AAED,MAAM,MAAM,cAAc,GACtB;IACE,IAAI,EAAE,sBAAsB,CAAA;IAC5B,UAAU,EAAE,UAAU,CAAA;IACtB,cAAc,EAAE,MAAM,CAAA;IACtB,MAAM,EAAE,MAAM,CAAA;IACd,UAAU,EAAE,MAAM,CAAA;CACnB,GACD;IACE,IAAI,EAAE,YAAY,CAAA;IAClB,UAAU,EAAE,UAAU,CAAA;CACvB,CAAA"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@automerge/automerge-repo",
|
|
3
|
-
"version": "2.0.0-alpha.
|
|
3
|
+
"version": "2.0.0-alpha.16",
|
|
4
4
|
"description": "A repository object to manage a collection of automerge documents",
|
|
5
5
|
"repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo",
|
|
6
6
|
"author": "Peter van Hardenberg <pvh@pvh.ca>",
|
|
@@ -60,5 +60,5 @@
|
|
|
60
60
|
"publishConfig": {
|
|
61
61
|
"access": "public"
|
|
62
62
|
},
|
|
63
|
-
"gitHead": "
|
|
63
|
+
"gitHead": "04e79be28aca7dc566ced3b2c32e05ca5d37ce69"
|
|
64
64
|
}
|
package/src/AutomergeUrl.ts
CHANGED
|
@@ -5,25 +5,55 @@ import type {
|
|
|
5
5
|
DocumentId,
|
|
6
6
|
AnyDocumentId,
|
|
7
7
|
} from "./types.js"
|
|
8
|
+
|
|
8
9
|
import * as Uuid from "uuid"
|
|
9
10
|
import bs58check from "bs58check"
|
|
11
|
+
import {
|
|
12
|
+
uint8ArrayFromHexString,
|
|
13
|
+
uint8ArrayToHexString,
|
|
14
|
+
} from "./helpers/bufferFromHex.js"
|
|
15
|
+
|
|
16
|
+
import type { Heads as AutomergeHeads } from "@automerge/automerge/slim"
|
|
10
17
|
|
|
11
18
|
export const urlPrefix = "automerge:"
|
|
12
19
|
|
|
20
|
+
// We need to define our own version of heads because the AutomergeHeads type is not bs58check encoded
|
|
21
|
+
export type UrlHeads = string[] & { __automergeUrlHeadsBrand: unknown }
|
|
22
|
+
|
|
23
|
+
interface ParsedAutomergeUrl {
|
|
24
|
+
/** unencoded DocumentId */
|
|
25
|
+
binaryDocumentId: BinaryDocumentId
|
|
26
|
+
/** bs58 encoded DocumentId */
|
|
27
|
+
documentId: DocumentId
|
|
28
|
+
/** Optional array of heads, if specified in URL */
|
|
29
|
+
heads?: UrlHeads
|
|
30
|
+
/** Optional hex array of heads, in Automerge core format */
|
|
31
|
+
hexHeads?: string[] // AKA: heads
|
|
32
|
+
}
|
|
33
|
+
|
|
13
34
|
/** Given an Automerge URL, returns the DocumentId in both base58check-encoded form and binary form */
|
|
14
|
-
export const parseAutomergeUrl = (url: AutomergeUrl) => {
|
|
35
|
+
export const parseAutomergeUrl = (url: AutomergeUrl): ParsedAutomergeUrl => {
|
|
36
|
+
const [baseUrl, headsSection, ...rest] = url.split("#")
|
|
37
|
+
if (rest.length > 0) {
|
|
38
|
+
throw new Error("Invalid URL: contains multiple heads sections")
|
|
39
|
+
}
|
|
15
40
|
const regex = new RegExp(`^${urlPrefix}(\\w+)$`)
|
|
16
|
-
const [, docMatch] =
|
|
41
|
+
const [, docMatch] = baseUrl.match(regex) || []
|
|
17
42
|
const documentId = docMatch as DocumentId
|
|
18
43
|
const binaryDocumentId = documentIdToBinary(documentId)
|
|
19
44
|
|
|
20
45
|
if (!binaryDocumentId) throw new Error("Invalid document URL: " + url)
|
|
21
|
-
return {
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
46
|
+
if (headsSection === undefined) return { binaryDocumentId, documentId }
|
|
47
|
+
|
|
48
|
+
const heads = (headsSection === "" ? [] : headsSection.split("|")) as UrlHeads
|
|
49
|
+
const hexHeads = heads.map(head => {
|
|
50
|
+
try {
|
|
51
|
+
return uint8ArrayToHexString(bs58check.decode(head))
|
|
52
|
+
} catch (e) {
|
|
53
|
+
throw new Error(`Invalid head in URL: ${head}`)
|
|
54
|
+
}
|
|
55
|
+
})
|
|
56
|
+
return { binaryDocumentId, hexHeads, documentId, heads }
|
|
27
57
|
}
|
|
28
58
|
|
|
29
59
|
/**
|
|
@@ -32,38 +62,78 @@ export const parseAutomergeUrl = (url: AutomergeUrl) => {
|
|
|
32
62
|
*/
|
|
33
63
|
export const stringifyAutomergeUrl = (
|
|
34
64
|
arg: UrlOptions | DocumentId | BinaryDocumentId
|
|
35
|
-
) => {
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
65
|
+
): AutomergeUrl => {
|
|
66
|
+
if (arg instanceof Uint8Array || typeof arg === "string") {
|
|
67
|
+
return (urlPrefix +
|
|
68
|
+
(arg instanceof Uint8Array
|
|
69
|
+
? binaryToDocumentId(arg)
|
|
70
|
+
: arg)) as AutomergeUrl
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const { documentId, heads = undefined } = arg
|
|
74
|
+
|
|
75
|
+
if (documentId === undefined)
|
|
76
|
+
throw new Error("Invalid documentId: " + documentId)
|
|
42
77
|
|
|
43
78
|
const encodedDocumentId =
|
|
44
79
|
documentId instanceof Uint8Array
|
|
45
80
|
? binaryToDocumentId(documentId)
|
|
46
|
-
:
|
|
47
|
-
|
|
48
|
-
|
|
81
|
+
: documentId
|
|
82
|
+
|
|
83
|
+
let url = `${urlPrefix}${encodedDocumentId}`
|
|
84
|
+
|
|
85
|
+
if (heads !== undefined) {
|
|
86
|
+
heads.forEach(head => {
|
|
87
|
+
try {
|
|
88
|
+
bs58check.decode(head)
|
|
89
|
+
} catch (e) {
|
|
90
|
+
throw new Error(`Invalid head: ${head}`)
|
|
91
|
+
}
|
|
92
|
+
})
|
|
93
|
+
url += "#" + heads.join("|")
|
|
94
|
+
}
|
|
49
95
|
|
|
50
|
-
|
|
51
|
-
|
|
96
|
+
return url as AutomergeUrl
|
|
97
|
+
}
|
|
52
98
|
|
|
53
|
-
|
|
99
|
+
/** Helper to extract just the heads from a URL if they exist */
|
|
100
|
+
export const getHeadsFromUrl = (url: AutomergeUrl): string[] | undefined => {
|
|
101
|
+
const { heads } = parseAutomergeUrl(url)
|
|
102
|
+
return heads
|
|
54
103
|
}
|
|
55
104
|
|
|
105
|
+
export const anyDocumentIdToAutomergeUrl = (id: AnyDocumentId) =>
|
|
106
|
+
isValidAutomergeUrl(id)
|
|
107
|
+
? id
|
|
108
|
+
: isValidDocumentId(id)
|
|
109
|
+
? stringifyAutomergeUrl({ documentId: id })
|
|
110
|
+
: isValidUuid(id)
|
|
111
|
+
? parseLegacyUUID(id)
|
|
112
|
+
: undefined
|
|
113
|
+
|
|
56
114
|
/**
|
|
57
115
|
* Given a string, returns true if it is a valid Automerge URL. This function also acts as a type
|
|
58
116
|
* discriminator in Typescript.
|
|
59
117
|
*/
|
|
60
118
|
export const isValidAutomergeUrl = (str: unknown): str is AutomergeUrl => {
|
|
61
|
-
if (typeof str !== "string"
|
|
62
|
-
|
|
63
|
-
const automergeUrl = str as AutomergeUrl
|
|
119
|
+
if (typeof str !== "string" || !str || !str.startsWith(urlPrefix))
|
|
120
|
+
return false
|
|
64
121
|
try {
|
|
65
|
-
const { documentId } = parseAutomergeUrl(
|
|
66
|
-
|
|
122
|
+
const { documentId, heads } = parseAutomergeUrl(str as AutomergeUrl)
|
|
123
|
+
if (!isValidDocumentId(documentId)) return false
|
|
124
|
+
if (
|
|
125
|
+
heads &&
|
|
126
|
+
!heads.every(head => {
|
|
127
|
+
try {
|
|
128
|
+
bs58check.decode(head)
|
|
129
|
+
return true
|
|
130
|
+
} catch {
|
|
131
|
+
return false
|
|
132
|
+
}
|
|
133
|
+
})
|
|
134
|
+
)
|
|
135
|
+
return false
|
|
136
|
+
return true
|
|
67
137
|
} catch {
|
|
68
138
|
return false
|
|
69
139
|
}
|
|
@@ -97,6 +167,12 @@ export const documentIdToBinary = (docId: DocumentId) =>
|
|
|
97
167
|
export const binaryToDocumentId = (docId: BinaryDocumentId) =>
|
|
98
168
|
bs58check.encode(docId) as DocumentId
|
|
99
169
|
|
|
170
|
+
export const encodeHeads = (heads: AutomergeHeads): UrlHeads =>
|
|
171
|
+
heads.map(h => bs58check.encode(uint8ArrayFromHexString(h))) as UrlHeads
|
|
172
|
+
|
|
173
|
+
export const decodeHeads = (heads: UrlHeads): AutomergeHeads =>
|
|
174
|
+
heads.map(h => uint8ArrayToHexString(bs58check.decode(h))) as AutomergeHeads
|
|
175
|
+
|
|
100
176
|
export const parseLegacyUUID = (str: string) => {
|
|
101
177
|
if (!Uuid.validate(str)) return undefined
|
|
102
178
|
const documentId = Uuid.parse(str) as BinaryDocumentId
|
|
@@ -141,4 +217,5 @@ export const interpretAsDocumentId = (id: AnyDocumentId) => {
|
|
|
141
217
|
|
|
142
218
|
type UrlOptions = {
|
|
143
219
|
documentId: DocumentId | BinaryDocumentId
|
|
220
|
+
heads?: UrlHeads
|
|
144
221
|
}
|