@automerge/automerge-repo 2.0.0-alpha.14 → 2.0.0-alpha.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/AutomergeUrl.d.ts +19 -4
- package/dist/AutomergeUrl.d.ts.map +1 -1
- package/dist/AutomergeUrl.js +71 -24
- package/dist/DocHandle.d.ts +21 -17
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +83 -26
- package/dist/RemoteHeadsSubscriptions.d.ts +4 -4
- package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -1
- package/dist/RemoteHeadsSubscriptions.js +4 -1
- package/dist/Repo.d.ts.map +1 -1
- package/dist/Repo.js +17 -12
- package/dist/helpers/bufferFromHex.d.ts +3 -0
- package/dist/helpers/bufferFromHex.d.ts.map +1 -0
- package/dist/helpers/bufferFromHex.js +13 -0
- package/dist/helpers/headsAreSame.d.ts +2 -2
- package/dist/helpers/headsAreSame.d.ts.map +1 -1
- package/dist/helpers/mergeArrays.d.ts +1 -1
- package/dist/helpers/mergeArrays.d.ts.map +1 -1
- package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
- package/dist/helpers/tests/storage-adapter-tests.js +6 -9
- package/dist/storage/StorageSubsystem.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.js +2 -1
- package/package.json +2 -2
- package/src/AutomergeUrl.ts +103 -26
- package/src/DocHandle.ts +130 -37
- package/src/RemoteHeadsSubscriptions.ts +11 -8
- package/src/Repo.ts +22 -11
- package/src/helpers/bufferFromHex.ts +14 -0
- package/src/helpers/headsAreSame.ts +2 -2
- package/src/helpers/tests/storage-adapter-tests.ts +13 -24
- package/src/storage/StorageSubsystem.ts +3 -1
- package/test/AutomergeUrl.test.ts +130 -0
- package/test/DocHandle.test.ts +70 -4
- package/test/DocSynchronizer.test.ts +10 -3
- package/test/Repo.test.ts +117 -3
package/dist/Repo.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { next as Automerge } from "@automerge/automerge/slim";
|
|
2
2
|
import debug from "debug";
|
|
3
3
|
import { EventEmitter } from "eventemitter3";
|
|
4
|
-
import { generateAutomergeUrl, interpretAsDocumentId, parseAutomergeUrl, } from "./AutomergeUrl.js";
|
|
4
|
+
import { encodeHeads, generateAutomergeUrl, interpretAsDocumentId, isValidAutomergeUrl, parseAutomergeUrl, } from "./AutomergeUrl.js";
|
|
5
5
|
import { DELETED, DocHandle, READY, UNAVAILABLE, UNLOADED, } from "./DocHandle.js";
|
|
6
6
|
import { RemoteHeadsSubscriptions } from "./RemoteHeadsSubscriptions.js";
|
|
7
7
|
import { headsAreSame } from "./helpers/headsAreSame.js";
|
|
@@ -119,11 +119,12 @@ export class Repo extends EventEmitter {
|
|
|
119
119
|
}
|
|
120
120
|
const heads = handle.getRemoteHeads(storageId);
|
|
121
121
|
const haveHeadsChanged = message.syncState.theirHeads &&
|
|
122
|
-
(!heads ||
|
|
122
|
+
(!heads ||
|
|
123
|
+
!headsAreSame(heads, encodeHeads(message.syncState.theirHeads)));
|
|
123
124
|
if (haveHeadsChanged && message.syncState.theirHeads) {
|
|
124
|
-
handle.setRemoteHeads(storageId, message.syncState.theirHeads);
|
|
125
|
+
handle.setRemoteHeads(storageId, encodeHeads(message.syncState.theirHeads));
|
|
125
126
|
if (storageId && this.#remoteHeadsGossipingEnabled) {
|
|
126
|
-
this.#remoteHeadsSubscriptions.handleImmediateRemoteHeadsChanged(message.documentId, storageId, message.syncState.theirHeads);
|
|
127
|
+
this.#remoteHeadsSubscriptions.handleImmediateRemoteHeadsChanged(message.documentId, storageId, encodeHeads(message.syncState.theirHeads));
|
|
127
128
|
}
|
|
128
129
|
}
|
|
129
130
|
});
|
|
@@ -305,18 +306,21 @@ export class Repo extends EventEmitter {
|
|
|
305
306
|
find(
|
|
306
307
|
/** The url or documentId of the handle to retrieve */
|
|
307
308
|
id) {
|
|
308
|
-
const documentId =
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
309
|
+
const { documentId, heads } = isValidAutomergeUrl(id)
|
|
310
|
+
? parseAutomergeUrl(id)
|
|
311
|
+
: { documentId: interpretAsDocumentId(id), heads: undefined };
|
|
312
|
+
const cachedHandle = this.#handleCache[documentId];
|
|
313
|
+
if (cachedHandle) {
|
|
314
|
+
if (cachedHandle.isUnavailable()) {
|
|
312
315
|
// this ensures that the event fires after the handle has been returned
|
|
313
316
|
setTimeout(() => {
|
|
314
|
-
|
|
315
|
-
handle:
|
|
317
|
+
cachedHandle.emit("unavailable", {
|
|
318
|
+
handle: cachedHandle,
|
|
316
319
|
});
|
|
317
320
|
});
|
|
318
321
|
}
|
|
319
|
-
return
|
|
322
|
+
// If we already have the handle, return it immediately (or a view of the handle if heads are specified)
|
|
323
|
+
return heads ? cachedHandle.view(heads) : cachedHandle;
|
|
320
324
|
}
|
|
321
325
|
// If we don't already have the handle, make an empty one and try loading it
|
|
322
326
|
const handle = this.#getHandle({
|
|
@@ -345,7 +349,8 @@ export class Repo extends EventEmitter {
|
|
|
345
349
|
.catch(err => {
|
|
346
350
|
this.#log("error waiting for network", { err });
|
|
347
351
|
});
|
|
348
|
-
return handle
|
|
352
|
+
// If we already have the handle, return it immediately (or a view of the handle if heads are specified)
|
|
353
|
+
return heads ? handle.view(heads) : handle;
|
|
349
354
|
}
|
|
350
355
|
delete(
|
|
351
356
|
/** The url or documentId of the handle to delete */
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"bufferFromHex.d.ts","sourceRoot":"","sources":["../../src/helpers/bufferFromHex.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,uBAAuB,cAAe,MAAM,KAAG,UAS3D,CAAA;AAED,eAAO,MAAM,qBAAqB,SAAU,UAAU,KAAG,MAExD,CAAA"}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export const uint8ArrayFromHexString = (hexString) => {
|
|
2
|
+
if (hexString.length % 2 !== 0) {
|
|
3
|
+
throw new Error("Hex string must have an even length");
|
|
4
|
+
}
|
|
5
|
+
const bytes = new Uint8Array(hexString.length / 2);
|
|
6
|
+
for (let i = 0; i < hexString.length; i += 2) {
|
|
7
|
+
bytes[i >> 1] = parseInt(hexString.slice(i, i + 2), 16);
|
|
8
|
+
}
|
|
9
|
+
return bytes;
|
|
10
|
+
};
|
|
11
|
+
export const uint8ArrayToHexString = (data) => {
|
|
12
|
+
return Array.from(data, byte => byte.toString(16).padStart(2, "0")).join("");
|
|
13
|
+
};
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import {
|
|
2
|
-
export declare const headsAreSame: (a:
|
|
1
|
+
import { UrlHeads } from "../AutomergeUrl.js";
|
|
2
|
+
export declare const headsAreSame: (a: UrlHeads, b: UrlHeads) => boolean;
|
|
3
3
|
//# sourceMappingURL=headsAreSame.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"headsAreSame.d.ts","sourceRoot":"","sources":["../../src/helpers/headsAreSame.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"headsAreSame.d.ts","sourceRoot":"","sources":["../../src/helpers/headsAreSame.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,QAAQ,EAAE,MAAM,oBAAoB,CAAA;AAE7C,eAAO,MAAM,YAAY,MAAO,QAAQ,KAAK,QAAQ,YAEpD,CAAA"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export declare function mergeArrays(myArrays: Uint8Array[]): Uint8Array
|
|
1
|
+
export declare function mergeArrays(myArrays: Uint8Array[]): Uint8Array<ArrayBuffer>;
|
|
2
2
|
//# sourceMappingURL=mergeArrays.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"mergeArrays.d.ts","sourceRoot":"","sources":["../../src/helpers/mergeArrays.ts"],"names":[],"mappings":"AAAA,wBAAgB,WAAW,CAAC,QAAQ,EAAE,UAAU,EAAE,
|
|
1
|
+
{"version":3,"file":"mergeArrays.d.ts","sourceRoot":"","sources":["../../src/helpers/mergeArrays.ts"],"names":[],"mappings":"AAAA,wBAAgB,WAAW,CAAC,QAAQ,EAAE,UAAU,EAAE,2BAgBjD"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"storage-adapter-tests.d.ts","sourceRoot":"","sources":["../../../src/helpers/tests/storage-adapter-tests.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,uBAAuB,EAAE,MAAM,0CAA0C,CAAA;AAcvF,wBAAgB,sBAAsB,CAAC,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,
|
|
1
|
+
{"version":3,"file":"storage-adapter-tests.d.ts","sourceRoot":"","sources":["../../../src/helpers/tests/storage-adapter-tests.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,uBAAuB,EAAE,MAAM,0CAA0C,CAAA;AAcvF,wBAAgB,sBAAsB,CAAC,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CA+H3E;AAID,MAAM,MAAM,OAAO,GAAG,MAAM,OAAO,CAAC;IAClC,OAAO,EAAE,uBAAuB,CAAA;IAChC,QAAQ,CAAC,EAAE,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;CACtC,CAAC,CAAA"}
|
|
@@ -44,26 +44,23 @@ export function runStorageAdapterTests(setup, title) {
|
|
|
44
44
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A());
|
|
45
45
|
await adapter.save(["AAAAA", "snapshot", "yyyyy"], PAYLOAD_B());
|
|
46
46
|
await adapter.save(["AAAAA", "sync-state", "zzzzz"], PAYLOAD_C());
|
|
47
|
-
expect(await adapter.loadRange(["AAAAA"])).toStrictEqual(
|
|
47
|
+
expect(await adapter.loadRange(["AAAAA"])).toStrictEqual([
|
|
48
48
|
{ key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
|
|
49
49
|
{ key: ["AAAAA", "snapshot", "yyyyy"], data: PAYLOAD_B() },
|
|
50
50
|
{ key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() },
|
|
51
|
-
])
|
|
52
|
-
expect(await adapter.loadRange(["AAAAA", "sync-state"])).toStrictEqual(
|
|
51
|
+
]);
|
|
52
|
+
expect(await adapter.loadRange(["AAAAA", "sync-state"])).toStrictEqual([
|
|
53
53
|
{ key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
|
|
54
54
|
{ key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() },
|
|
55
|
-
])
|
|
55
|
+
]);
|
|
56
56
|
});
|
|
57
57
|
it("should only load values that match they key", async ({ adapter }) => {
|
|
58
58
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A());
|
|
59
59
|
await adapter.save(["BBBBB", "sync-state", "zzzzz"], PAYLOAD_C());
|
|
60
60
|
const actual = await adapter.loadRange(["AAAAA"]);
|
|
61
|
-
expect(actual).toStrictEqual(
|
|
61
|
+
expect(actual).toStrictEqual([
|
|
62
62
|
{ key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
|
|
63
|
-
])
|
|
64
|
-
expect(actual).toStrictEqual(expect.not.arrayContaining([
|
|
65
|
-
{ key: ["BBBBB", "sync-state", "zzzzz"], data: PAYLOAD_C() },
|
|
66
|
-
]));
|
|
63
|
+
]);
|
|
67
64
|
});
|
|
68
65
|
});
|
|
69
66
|
describe("save and remove", () => {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAInD,OAAO,EAAE,KAAK,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,OAAO,EAAE,uBAAuB,EAAE,MAAM,8BAA8B,CAAA;AACtE,OAAO,EAAyB,SAAS,EAAE,MAAM,YAAY,CAAA;AAI7D,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;
|
|
1
|
+
{"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAInD,OAAO,EAAE,KAAK,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,OAAO,EAAE,uBAAuB,EAAE,MAAM,8BAA8B,CAAA;AACtE,OAAO,EAAyB,SAAS,EAAE,MAAM,YAAY,CAAA;AAI7D,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAG5C,KAAK,sBAAsB,GAAG;IAC5B,iBAAiB,EAAE,CAAC,GAAG,EAAE;QACvB,UAAU,EAAE,UAAU,CAAA;QACtB,cAAc,EAAE,MAAM,CAAA;QACtB,MAAM,EAAE,MAAM,CAAA;QACd,UAAU,EAAE,MAAM,CAAA;KACnB,KAAK,IAAI,CAAA;CACX,CAAA;AAED;;;GAGG;AACH,qBAAa,gBAAiB,SAAQ,YAAY,CAAC,sBAAsB,CAAC;;gBAe5D,cAAc,EAAE,uBAAuB;IAK7C,EAAE,IAAI,OAAO,CAAC,SAAS,CAAC;IA2B9B,kCAAkC;IAC5B,IAAI;IACR,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,yFAAyF;IACzF,GAAG,EAAE,MAAM,GACV,OAAO,CAAC,UAAU,GAAG,SAAS,CAAC;IAKlC,gCAAgC;IAC1B,IAAI;IACR,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,yFAAyF;IACzF,GAAG,EAAE,MAAM;IAEX,sCAAsC;IACtC,IAAI,EAAE,UAAU,GACf,OAAO,CAAC,IAAI,CAAC;IAKhB,oCAAoC;IAC9B,MAAM;IACV,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,2FAA2F;IAC3F,GAAG,EAAE,MAAM,GACV,OAAO,CAAC,IAAI,CAAC;IAOhB;;OAEG;IACG,OAAO,CAAC,CAAC,EAAE,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;IA0ClE;;;;;;OAMG;IACG,OAAO,CAAC,UAAU,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAczE;;OAEG;IACG,SAAS,CAAC,UAAU,EAAE,UAAU;IAkEhC,aAAa,CACjB,UAAU,EAAE,UAAU,EACtB,SAAS,EAAE,SAAS,GACnB,OAAO,CAAC,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC;IAW7B,aAAa,CACjB,UAAU,EAAE,UAAU,EACtB,SAAS,EAAE,SAAS,EACpB,SAAS,EAAE,CAAC,CAAC,SAAS,GACrB,OAAO,CAAC,IAAI,CAAC;CA8CjB"}
|
|
@@ -6,6 +6,7 @@ import { keyHash, headsHash } from "./keyHash.js";
|
|
|
6
6
|
import { chunkTypeFromKey } from "./chunkTypeFromKey.js";
|
|
7
7
|
import * as Uuid from "uuid";
|
|
8
8
|
import { EventEmitter } from "eventemitter3";
|
|
9
|
+
import { encodeHeads } from "../AutomergeUrl.js";
|
|
9
10
|
/**
|
|
10
11
|
* The storage subsystem is responsible for saving and loading Automerge documents to and from
|
|
11
12
|
* storage adapter. It also provides a generic key/value storage interface for other uses.
|
|
@@ -210,7 +211,7 @@ export class StorageSubsystem extends EventEmitter {
|
|
|
210
211
|
return true;
|
|
211
212
|
}
|
|
212
213
|
const newHeads = A.getHeads(doc);
|
|
213
|
-
if (headsAreSame(newHeads, oldHeads)) {
|
|
214
|
+
if (headsAreSame(encodeHeads(newHeads), encodeHeads(oldHeads))) {
|
|
214
215
|
// the document hasn't changed
|
|
215
216
|
return false;
|
|
216
217
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@automerge/automerge-repo",
|
|
3
|
-
"version": "2.0.0-alpha.
|
|
3
|
+
"version": "2.0.0-alpha.16",
|
|
4
4
|
"description": "A repository object to manage a collection of automerge documents",
|
|
5
5
|
"repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo",
|
|
6
6
|
"author": "Peter van Hardenberg <pvh@pvh.ca>",
|
|
@@ -60,5 +60,5 @@
|
|
|
60
60
|
"publishConfig": {
|
|
61
61
|
"access": "public"
|
|
62
62
|
},
|
|
63
|
-
"gitHead": "
|
|
63
|
+
"gitHead": "04e79be28aca7dc566ced3b2c32e05ca5d37ce69"
|
|
64
64
|
}
|
package/src/AutomergeUrl.ts
CHANGED
|
@@ -5,25 +5,55 @@ import type {
|
|
|
5
5
|
DocumentId,
|
|
6
6
|
AnyDocumentId,
|
|
7
7
|
} from "./types.js"
|
|
8
|
+
|
|
8
9
|
import * as Uuid from "uuid"
|
|
9
10
|
import bs58check from "bs58check"
|
|
11
|
+
import {
|
|
12
|
+
uint8ArrayFromHexString,
|
|
13
|
+
uint8ArrayToHexString,
|
|
14
|
+
} from "./helpers/bufferFromHex.js"
|
|
15
|
+
|
|
16
|
+
import type { Heads as AutomergeHeads } from "@automerge/automerge/slim"
|
|
10
17
|
|
|
11
18
|
export const urlPrefix = "automerge:"
|
|
12
19
|
|
|
20
|
+
// We need to define our own version of heads because the AutomergeHeads type is not bs58check encoded
|
|
21
|
+
export type UrlHeads = string[] & { __automergeUrlHeadsBrand: unknown }
|
|
22
|
+
|
|
23
|
+
interface ParsedAutomergeUrl {
|
|
24
|
+
/** unencoded DocumentId */
|
|
25
|
+
binaryDocumentId: BinaryDocumentId
|
|
26
|
+
/** bs58 encoded DocumentId */
|
|
27
|
+
documentId: DocumentId
|
|
28
|
+
/** Optional array of heads, if specified in URL */
|
|
29
|
+
heads?: UrlHeads
|
|
30
|
+
/** Optional hex array of heads, in Automerge core format */
|
|
31
|
+
hexHeads?: string[] // AKA: heads
|
|
32
|
+
}
|
|
33
|
+
|
|
13
34
|
/** Given an Automerge URL, returns the DocumentId in both base58check-encoded form and binary form */
|
|
14
|
-
export const parseAutomergeUrl = (url: AutomergeUrl) => {
|
|
35
|
+
export const parseAutomergeUrl = (url: AutomergeUrl): ParsedAutomergeUrl => {
|
|
36
|
+
const [baseUrl, headsSection, ...rest] = url.split("#")
|
|
37
|
+
if (rest.length > 0) {
|
|
38
|
+
throw new Error("Invalid URL: contains multiple heads sections")
|
|
39
|
+
}
|
|
15
40
|
const regex = new RegExp(`^${urlPrefix}(\\w+)$`)
|
|
16
|
-
const [, docMatch] =
|
|
41
|
+
const [, docMatch] = baseUrl.match(regex) || []
|
|
17
42
|
const documentId = docMatch as DocumentId
|
|
18
43
|
const binaryDocumentId = documentIdToBinary(documentId)
|
|
19
44
|
|
|
20
45
|
if (!binaryDocumentId) throw new Error("Invalid document URL: " + url)
|
|
21
|
-
return {
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
46
|
+
if (headsSection === undefined) return { binaryDocumentId, documentId }
|
|
47
|
+
|
|
48
|
+
const heads = (headsSection === "" ? [] : headsSection.split("|")) as UrlHeads
|
|
49
|
+
const hexHeads = heads.map(head => {
|
|
50
|
+
try {
|
|
51
|
+
return uint8ArrayToHexString(bs58check.decode(head))
|
|
52
|
+
} catch (e) {
|
|
53
|
+
throw new Error(`Invalid head in URL: ${head}`)
|
|
54
|
+
}
|
|
55
|
+
})
|
|
56
|
+
return { binaryDocumentId, hexHeads, documentId, heads }
|
|
27
57
|
}
|
|
28
58
|
|
|
29
59
|
/**
|
|
@@ -32,38 +62,78 @@ export const parseAutomergeUrl = (url: AutomergeUrl) => {
|
|
|
32
62
|
*/
|
|
33
63
|
export const stringifyAutomergeUrl = (
|
|
34
64
|
arg: UrlOptions | DocumentId | BinaryDocumentId
|
|
35
|
-
) => {
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
65
|
+
): AutomergeUrl => {
|
|
66
|
+
if (arg instanceof Uint8Array || typeof arg === "string") {
|
|
67
|
+
return (urlPrefix +
|
|
68
|
+
(arg instanceof Uint8Array
|
|
69
|
+
? binaryToDocumentId(arg)
|
|
70
|
+
: arg)) as AutomergeUrl
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const { documentId, heads = undefined } = arg
|
|
74
|
+
|
|
75
|
+
if (documentId === undefined)
|
|
76
|
+
throw new Error("Invalid documentId: " + documentId)
|
|
42
77
|
|
|
43
78
|
const encodedDocumentId =
|
|
44
79
|
documentId instanceof Uint8Array
|
|
45
80
|
? binaryToDocumentId(documentId)
|
|
46
|
-
:
|
|
47
|
-
|
|
48
|
-
|
|
81
|
+
: documentId
|
|
82
|
+
|
|
83
|
+
let url = `${urlPrefix}${encodedDocumentId}`
|
|
84
|
+
|
|
85
|
+
if (heads !== undefined) {
|
|
86
|
+
heads.forEach(head => {
|
|
87
|
+
try {
|
|
88
|
+
bs58check.decode(head)
|
|
89
|
+
} catch (e) {
|
|
90
|
+
throw new Error(`Invalid head: ${head}`)
|
|
91
|
+
}
|
|
92
|
+
})
|
|
93
|
+
url += "#" + heads.join("|")
|
|
94
|
+
}
|
|
49
95
|
|
|
50
|
-
|
|
51
|
-
|
|
96
|
+
return url as AutomergeUrl
|
|
97
|
+
}
|
|
52
98
|
|
|
53
|
-
|
|
99
|
+
/** Helper to extract just the heads from a URL if they exist */
|
|
100
|
+
export const getHeadsFromUrl = (url: AutomergeUrl): string[] | undefined => {
|
|
101
|
+
const { heads } = parseAutomergeUrl(url)
|
|
102
|
+
return heads
|
|
54
103
|
}
|
|
55
104
|
|
|
105
|
+
export const anyDocumentIdToAutomergeUrl = (id: AnyDocumentId) =>
|
|
106
|
+
isValidAutomergeUrl(id)
|
|
107
|
+
? id
|
|
108
|
+
: isValidDocumentId(id)
|
|
109
|
+
? stringifyAutomergeUrl({ documentId: id })
|
|
110
|
+
: isValidUuid(id)
|
|
111
|
+
? parseLegacyUUID(id)
|
|
112
|
+
: undefined
|
|
113
|
+
|
|
56
114
|
/**
|
|
57
115
|
* Given a string, returns true if it is a valid Automerge URL. This function also acts as a type
|
|
58
116
|
* discriminator in Typescript.
|
|
59
117
|
*/
|
|
60
118
|
export const isValidAutomergeUrl = (str: unknown): str is AutomergeUrl => {
|
|
61
|
-
if (typeof str !== "string"
|
|
62
|
-
|
|
63
|
-
const automergeUrl = str as AutomergeUrl
|
|
119
|
+
if (typeof str !== "string" || !str || !str.startsWith(urlPrefix))
|
|
120
|
+
return false
|
|
64
121
|
try {
|
|
65
|
-
const { documentId } = parseAutomergeUrl(
|
|
66
|
-
|
|
122
|
+
const { documentId, heads } = parseAutomergeUrl(str as AutomergeUrl)
|
|
123
|
+
if (!isValidDocumentId(documentId)) return false
|
|
124
|
+
if (
|
|
125
|
+
heads &&
|
|
126
|
+
!heads.every(head => {
|
|
127
|
+
try {
|
|
128
|
+
bs58check.decode(head)
|
|
129
|
+
return true
|
|
130
|
+
} catch {
|
|
131
|
+
return false
|
|
132
|
+
}
|
|
133
|
+
})
|
|
134
|
+
)
|
|
135
|
+
return false
|
|
136
|
+
return true
|
|
67
137
|
} catch {
|
|
68
138
|
return false
|
|
69
139
|
}
|
|
@@ -97,6 +167,12 @@ export const documentIdToBinary = (docId: DocumentId) =>
|
|
|
97
167
|
export const binaryToDocumentId = (docId: BinaryDocumentId) =>
|
|
98
168
|
bs58check.encode(docId) as DocumentId
|
|
99
169
|
|
|
170
|
+
export const encodeHeads = (heads: AutomergeHeads): UrlHeads =>
|
|
171
|
+
heads.map(h => bs58check.encode(uint8ArrayFromHexString(h))) as UrlHeads
|
|
172
|
+
|
|
173
|
+
export const decodeHeads = (heads: UrlHeads): AutomergeHeads =>
|
|
174
|
+
heads.map(h => uint8ArrayToHexString(bs58check.decode(h))) as AutomergeHeads
|
|
175
|
+
|
|
100
176
|
export const parseLegacyUUID = (str: string) => {
|
|
101
177
|
if (!Uuid.validate(str)) return undefined
|
|
102
178
|
const documentId = Uuid.parse(str) as BinaryDocumentId
|
|
@@ -141,4 +217,5 @@ export const interpretAsDocumentId = (id: AnyDocumentId) => {
|
|
|
141
217
|
|
|
142
218
|
type UrlOptions = {
|
|
143
219
|
documentId: DocumentId | BinaryDocumentId
|
|
220
|
+
heads?: UrlHeads
|
|
144
221
|
}
|