@automerge/automerge-repo 0.2.1 → 1.0.0-alpha.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +7 -24
- package/dist/DocCollection.d.ts +4 -4
- package/dist/DocCollection.d.ts.map +1 -1
- package/dist/DocCollection.js +25 -17
- package/dist/DocHandle.d.ts +46 -13
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +104 -53
- package/dist/DocUrl.d.ts +38 -18
- package/dist/DocUrl.d.ts.map +1 -1
- package/dist/DocUrl.js +63 -24
- package/dist/Repo.d.ts.map +1 -1
- package/dist/Repo.js +9 -9
- package/dist/helpers/headsAreSame.d.ts +2 -2
- package/dist/helpers/headsAreSame.d.ts.map +1 -1
- package/dist/helpers/headsAreSame.js +1 -4
- package/dist/helpers/tests/network-adapter-tests.js +10 -10
- package/dist/index.d.ts +3 -2
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -0
- package/dist/network/NetworkAdapter.d.ts +2 -3
- package/dist/network/NetworkAdapter.d.ts.map +1 -1
- package/dist/network/NetworkSubsystem.d.ts +2 -3
- package/dist/network/NetworkSubsystem.d.ts.map +1 -1
- package/dist/network/NetworkSubsystem.js +9 -13
- package/dist/storage/StorageAdapter.d.ts +9 -5
- package/dist/storage/StorageAdapter.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.d.ts +4 -4
- package/dist/storage/StorageSubsystem.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.js +109 -31
- package/dist/synchronizer/CollectionSynchronizer.d.ts +1 -1
- package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.js +5 -1
- package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/DocSynchronizer.js +6 -5
- package/dist/types.d.ts +6 -0
- package/dist/types.d.ts.map +1 -1
- package/package.json +8 -5
- package/src/DocCollection.ts +32 -22
- package/src/DocHandle.ts +119 -77
- package/src/DocUrl.ts +90 -0
- package/src/Repo.ts +10 -11
- package/src/helpers/headsAreSame.ts +3 -5
- package/src/helpers/tests/network-adapter-tests.ts +10 -10
- package/src/index.ts +7 -5
- package/src/network/NetworkAdapter.ts +2 -3
- package/src/network/NetworkSubsystem.ts +9 -14
- package/src/storage/StorageAdapter.ts +7 -5
- package/src/storage/StorageSubsystem.ts +133 -36
- package/src/synchronizer/CollectionSynchronizer.ts +10 -2
- package/src/synchronizer/DocSynchronizer.ts +7 -6
- package/src/types.ts +4 -1
- package/test/CollectionSynchronizer.test.ts +1 -1
- package/test/DocCollection.test.ts +3 -2
- package/test/DocHandle.test.ts +40 -35
- package/test/DocSynchronizer.test.ts +3 -2
- package/test/Repo.test.ts +134 -27
- package/test/StorageSubsystem.test.ts +13 -10
- package/test/helpers/DummyNetworkAdapter.ts +2 -2
- package/test/helpers/DummyStorageAdapter.ts +8 -4
package/dist/DocUrl.js
CHANGED
|
@@ -1,28 +1,67 @@
|
|
|
1
|
-
import
|
|
2
|
-
import
|
|
3
|
-
export const
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
1
|
+
import { v4 as uuid } from "uuid";
|
|
2
|
+
import bs58check from "bs58check";
|
|
3
|
+
export const urlPrefix = "automerge:";
|
|
4
|
+
/**
|
|
5
|
+
* given an Automerge URL, return a decoded DocumentId (and the encoded DocumentId)
|
|
6
|
+
*
|
|
7
|
+
* @param url
|
|
8
|
+
* @returns { documentId: Uint8Array(16), encodedDocumentId: bs58check.encode(documentId) }
|
|
9
|
+
*/
|
|
10
|
+
export const parseAutomergeUrl = (url) => {
|
|
11
|
+
const { binaryDocumentId: binaryDocumentId, encodedDocumentId } = parts(url);
|
|
12
|
+
if (!binaryDocumentId)
|
|
13
|
+
throw new Error("Invalid document URL: " + url);
|
|
14
|
+
return { binaryDocumentId, encodedDocumentId };
|
|
7
15
|
};
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
16
|
+
/**
|
|
17
|
+
* Given a documentId in either canonical form, return an Automerge URL
|
|
18
|
+
* Throws on invalid input.
|
|
19
|
+
* Note: this is an object because we anticipate adding fields in the future.
|
|
20
|
+
* @param { documentId: EncodedDocumentId | DocumentId }
|
|
21
|
+
* @returns AutomergeUrl
|
|
22
|
+
*/
|
|
23
|
+
export const stringifyAutomergeUrl = ({ documentId, }) => {
|
|
24
|
+
if (documentId instanceof Uint8Array)
|
|
25
|
+
return (urlPrefix +
|
|
26
|
+
binaryToDocumentId(documentId));
|
|
27
|
+
else if (typeof documentId === "string") {
|
|
28
|
+
return (urlPrefix + documentId);
|
|
29
|
+
}
|
|
30
|
+
throw new Error("Invalid documentId: " + documentId);
|
|
11
31
|
};
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
32
|
+
/**
|
|
33
|
+
* Given a string, return true if it is a valid Automerge URL
|
|
34
|
+
* also acts as a type discriminator in Typescript.
|
|
35
|
+
* @param str: URL candidate
|
|
36
|
+
* @returns boolean
|
|
37
|
+
*/
|
|
38
|
+
export const isValidAutomergeUrl = (str) => {
|
|
39
|
+
if (!str.startsWith(urlPrefix))
|
|
40
|
+
return false;
|
|
41
|
+
const { binaryDocumentId: documentId } = parts(str);
|
|
42
|
+
return documentId ? true : false;
|
|
19
43
|
};
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
44
|
+
/**
|
|
45
|
+
* generateAutomergeUrl produces a new AutomergeUrl.
|
|
46
|
+
* generally only called by create(), but used in tests as well.
|
|
47
|
+
* @returns a new Automerge URL with a random UUID documentId
|
|
48
|
+
*/
|
|
49
|
+
export const generateAutomergeUrl = () => stringifyAutomergeUrl({
|
|
50
|
+
documentId: uuid(null, new Uint8Array(16)),
|
|
51
|
+
});
|
|
52
|
+
export const documentIdToBinary = (docId) => bs58check.decodeUnsafe(docId);
|
|
53
|
+
export const binaryToDocumentId = (docId) => bs58check.encode(docId);
|
|
54
|
+
/**
|
|
55
|
+
* parts breaks up the URL into constituent pieces,
|
|
56
|
+
* eventually this could include things like heads, so we use this structure
|
|
57
|
+
* we return both a binary & string-encoded version of the document ID
|
|
58
|
+
* @param str
|
|
59
|
+
* @returns { binaryDocumentId, encodedDocumentId }
|
|
60
|
+
*/
|
|
61
|
+
const parts = (str) => {
|
|
62
|
+
const regex = new RegExp(`^${urlPrefix}(\\w+)$`);
|
|
63
|
+
const [m, docMatch] = str.match(regex) || [];
|
|
64
|
+
const encodedDocumentId = docMatch;
|
|
65
|
+
const binaryDocumentId = documentIdToBinary(encodedDocumentId);
|
|
66
|
+
return { binaryDocumentId, encodedDocumentId };
|
|
23
67
|
};
|
|
24
|
-
export const withCrc = str => str + `/` + encode(crc16(str));
|
|
25
|
-
export const encode = str => Base58.encode(hexToBuffer(str));
|
|
26
|
-
export const decode = str => bufferToHex(Base58.decode(str));
|
|
27
|
-
export const hexToBuffer = key => Buffer.isBuffer(key) ? key : Buffer.from(key, "hex");
|
|
28
|
-
export const bufferToHex = key => Buffer.isBuffer(key) ? key.toString("hex") : key;
|
package/dist/Repo.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Repo.d.ts","sourceRoot":"","sources":["../src/Repo.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAA;AAClD,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAA;AAClD,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAEhE,OAAO,
|
|
1
|
+
{"version":3,"file":"Repo.d.ts","sourceRoot":"","sources":["../src/Repo.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAA;AAClD,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAA;AAClD,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAEhE,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAI/C,oFAAoF;AACpF,qBAAa,IAAK,SAAQ,aAAa;;IAGrC,gBAAgB,EAAE,gBAAgB,CAAA;IAClC,gBAAgB,CAAC,EAAE,gBAAgB,CAAA;IACnC,aAAa,EAAE,aAAa,CAAA;gBAEhB,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,WAAW,EAAE,EAAE,UAAU;CAgHlE;AAED,MAAM,WAAW,UAAU;IACzB,4BAA4B;IAC5B,MAAM,CAAC,EAAE,MAAM,CAAA;IAEf,gDAAgD;IAChD,OAAO,CAAC,EAAE,cAAc,CAAA;IAExB,oDAAoD;IACpD,OAAO,EAAE,cAAc,EAAE,CAAA;IAEzB;;;OAGG;IACH,WAAW,CAAC,EAAE,WAAW,CAAA;CAC1B;AAED,MAAM,MAAM,WAAW,GAAG,CACxB,MAAM,EAAE,MAAM,EACd,UAAU,CAAC,EAAE,UAAU,KACpB,OAAO,CAAC,OAAO,CAAC,CAAA"}
|
package/dist/Repo.js
CHANGED
|
@@ -4,7 +4,6 @@ import { NetworkSubsystem } from "./network/NetworkSubsystem.js";
|
|
|
4
4
|
import { StorageSubsystem } from "./storage/StorageSubsystem.js";
|
|
5
5
|
import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js";
|
|
6
6
|
import debug from "debug";
|
|
7
|
-
const SYNC_CHANNEL = "sync_channel";
|
|
8
7
|
/** A Repo is a DocCollection with networking, syncing, and storage capabilities. */
|
|
9
8
|
export class Repo extends DocCollection {
|
|
10
9
|
#log;
|
|
@@ -21,23 +20,24 @@ export class Repo extends DocCollection {
|
|
|
21
20
|
this.on("document", async ({ handle }) => {
|
|
22
21
|
if (storageSubsystem) {
|
|
23
22
|
// Save when the document changes
|
|
24
|
-
handle.on("
|
|
25
|
-
|
|
26
|
-
await storageSubsystem.save(handle.documentId, doc);
|
|
23
|
+
handle.on("heads-changed", async ({ handle, doc }) => {
|
|
24
|
+
await storageSubsystem.saveDoc(handle.documentId, doc);
|
|
27
25
|
});
|
|
28
26
|
// Try to load from disk
|
|
29
|
-
const
|
|
30
|
-
|
|
27
|
+
const loadedDoc = await storageSubsystem.loadDoc(handle.documentId);
|
|
28
|
+
if (loadedDoc) {
|
|
29
|
+
handle.update(() => loadedDoc);
|
|
30
|
+
}
|
|
31
31
|
}
|
|
32
32
|
handle.request();
|
|
33
33
|
// Register the document with the synchronizer. This advertises our interest in the document.
|
|
34
34
|
synchronizer.addDocument(handle.documentId);
|
|
35
35
|
});
|
|
36
|
-
this.on("delete-document", ({
|
|
36
|
+
this.on("delete-document", ({ encodedDocumentId }) => {
|
|
37
37
|
// TODO Pass the delete on to the network
|
|
38
38
|
// synchronizer.removeDocument(documentId)
|
|
39
39
|
if (storageSubsystem) {
|
|
40
|
-
storageSubsystem.remove(
|
|
40
|
+
storageSubsystem.remove(encodedDocumentId);
|
|
41
41
|
}
|
|
42
42
|
});
|
|
43
43
|
// SYNCHRONIZER
|
|
@@ -82,7 +82,7 @@ export class Repo extends DocCollection {
|
|
|
82
82
|
}
|
|
83
83
|
});
|
|
84
84
|
// We establish a special channel for sync messages
|
|
85
|
-
networkSubsystem.join(
|
|
85
|
+
networkSubsystem.join();
|
|
86
86
|
// EPHEMERAL DATA
|
|
87
87
|
// The ephemeral data subsystem uses the network to send and receive messages that are not
|
|
88
88
|
// persisted to storage, e.g. cursor position, presence, etc.
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import
|
|
2
|
-
export declare const headsAreSame:
|
|
1
|
+
import { Heads } from "@automerge/automerge";
|
|
2
|
+
export declare const headsAreSame: (a: Heads, b: Heads) => boolean;
|
|
3
3
|
//# sourceMappingURL=headsAreSame.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"headsAreSame.d.ts","sourceRoot":"","sources":["../../src/helpers/headsAreSame.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,
|
|
1
|
+
{"version":3,"file":"headsAreSame.d.ts","sourceRoot":"","sources":["../../src/helpers/headsAreSame.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,KAAK,EAAC,MAAM,sBAAsB,CAAA;AAG1C,eAAO,MAAM,YAAY,iCAExB,CAAA"}
|
|
@@ -1,7 +1,4 @@
|
|
|
1
|
-
import * as A from "@automerge/automerge";
|
|
2
1
|
import { arraysAreEqual } from "./arraysAreEqual.js";
|
|
3
2
|
export const headsAreSame = (a, b) => {
|
|
4
|
-
|
|
5
|
-
const bHeads = A.getHeads(b);
|
|
6
|
-
return arraysAreEqual(aHeads, bHeads);
|
|
3
|
+
return arraysAreEqual(a, b);
|
|
7
4
|
};
|
|
@@ -31,21 +31,21 @@ export function runAdapterTests(_setup, title) {
|
|
|
31
31
|
const aliceHandle = aliceRepo.create();
|
|
32
32
|
// Bob receives the document
|
|
33
33
|
await eventPromise(bobRepo, "document");
|
|
34
|
-
const bobHandle = bobRepo.find(aliceHandle.
|
|
34
|
+
const bobHandle = bobRepo.find(aliceHandle.url);
|
|
35
35
|
// Alice changes the document
|
|
36
36
|
aliceHandle.change(d => {
|
|
37
37
|
d.foo = "bar";
|
|
38
38
|
});
|
|
39
39
|
// Bob receives the change
|
|
40
40
|
await eventPromise(bobHandle, "change");
|
|
41
|
-
assert.equal((await bobHandle.
|
|
41
|
+
assert.equal((await bobHandle.doc()).foo, "bar");
|
|
42
42
|
// Bob changes the document
|
|
43
43
|
bobHandle.change(d => {
|
|
44
44
|
d.foo = "baz";
|
|
45
45
|
});
|
|
46
46
|
// Alice receives the change
|
|
47
47
|
await eventPromise(aliceHandle, "change");
|
|
48
|
-
assert.equal((await aliceHandle.
|
|
48
|
+
assert.equal((await aliceHandle.doc()).foo, "baz");
|
|
49
49
|
};
|
|
50
50
|
// Run the test in both directions, in case they're different types of adapters
|
|
51
51
|
{
|
|
@@ -69,27 +69,27 @@ export function runAdapterTests(_setup, title) {
|
|
|
69
69
|
const charlieRepo = new Repo({ network: c, peerId: charlie });
|
|
70
70
|
// Alice creates a document
|
|
71
71
|
const aliceHandle = aliceRepo.create();
|
|
72
|
-
const
|
|
72
|
+
const docUrl = aliceHandle.url;
|
|
73
73
|
// Bob and Charlie receive the document
|
|
74
74
|
await eventPromises([bobRepo, charlieRepo], "document");
|
|
75
|
-
const bobHandle = bobRepo.find(
|
|
76
|
-
const charlieHandle = charlieRepo.find(
|
|
75
|
+
const bobHandle = bobRepo.find(docUrl);
|
|
76
|
+
const charlieHandle = charlieRepo.find(docUrl);
|
|
77
77
|
// Alice changes the document
|
|
78
78
|
aliceHandle.change(d => {
|
|
79
79
|
d.foo = "bar";
|
|
80
80
|
});
|
|
81
81
|
// Bob and Charlie receive the change
|
|
82
82
|
await eventPromises([bobHandle, charlieHandle], "change");
|
|
83
|
-
assert.equal((await bobHandle.
|
|
84
|
-
assert.equal((await charlieHandle.
|
|
83
|
+
assert.equal((await bobHandle.doc()).foo, "bar");
|
|
84
|
+
assert.equal((await charlieHandle.doc()).foo, "bar");
|
|
85
85
|
// Charlie changes the document
|
|
86
86
|
charlieHandle.change(d => {
|
|
87
87
|
d.foo = "baz";
|
|
88
88
|
});
|
|
89
89
|
// Alice and Bob receive the change
|
|
90
90
|
await eventPromises([aliceHandle, bobHandle], "change");
|
|
91
|
-
assert.equal((await bobHandle.
|
|
92
|
-
assert.equal((await charlieHandle.
|
|
91
|
+
assert.equal((await bobHandle.doc()).foo, "baz");
|
|
92
|
+
assert.equal((await charlieHandle.doc()).foo, "baz");
|
|
93
93
|
teardown();
|
|
94
94
|
});
|
|
95
95
|
// TODO: with BroadcastChannel, this test never ends, because it goes into an infinite loop,
|
package/dist/index.d.ts
CHANGED
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
export { DocCollection } from "./DocCollection.js";
|
|
2
2
|
export { DocHandle, HandleState } from "./DocHandle.js";
|
|
3
|
-
export type { DocHandleChangePayload
|
|
3
|
+
export type { DocHandleChangePayload } from "./DocHandle.js";
|
|
4
4
|
export { NetworkAdapter } from "./network/NetworkAdapter.js";
|
|
5
5
|
export type { InboundMessagePayload, MessagePayload, OpenPayload, PeerCandidatePayload, PeerDisconnectedPayload, } from "./network/NetworkAdapter.js";
|
|
6
6
|
export { NetworkSubsystem } from "./network/NetworkSubsystem.js";
|
|
7
7
|
export { Repo, type SharePolicy } from "./Repo.js";
|
|
8
|
-
export { StorageAdapter } from "./storage/StorageAdapter.js";
|
|
8
|
+
export { StorageAdapter, type StorageKey } from "./storage/StorageAdapter.js";
|
|
9
9
|
export { StorageSubsystem } from "./storage/StorageSubsystem.js";
|
|
10
10
|
export { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js";
|
|
11
|
+
export { parseAutomergeUrl, isValidAutomergeUrl, stringifyAutomergeUrl as generateAutomergeUrl, } from "./DocUrl.js";
|
|
11
12
|
export * from "./types.js";
|
|
12
13
|
//# sourceMappingURL=index.d.ts.map
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAA;AAClD,OAAO,EAAE,SAAS,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAA;AACvD,YAAY,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAA;AAClD,OAAO,EAAE,SAAS,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAA;AACvD,YAAY,EAAE,sBAAsB,EAAE,MAAM,gBAAgB,CAAA;AAC5D,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,YAAY,EACV,qBAAqB,EACrB,cAAc,EACd,WAAW,EACX,oBAAoB,EACpB,uBAAuB,GACxB,MAAM,6BAA6B,CAAA;AACpC,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,IAAI,EAAE,KAAK,WAAW,EAAE,MAAM,WAAW,CAAA;AAClD,OAAO,EAAE,cAAc,EAAE,KAAK,UAAU,EAAE,MAAM,6BAA6B,CAAA;AAC7E,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,sBAAsB,EAAE,MAAM,0CAA0C,CAAA;AACjF,OAAO,EACL,iBAAiB,EACjB,mBAAmB,EACnB,qBAAqB,IAAI,oBAAoB,GAC9C,MAAM,aAAa,CAAA;AACpB,cAAc,YAAY,CAAA"}
|
package/dist/index.js
CHANGED
|
@@ -6,4 +6,5 @@ export { Repo } from "./Repo.js";
|
|
|
6
6
|
export { StorageAdapter } from "./storage/StorageAdapter.js";
|
|
7
7
|
export { StorageSubsystem } from "./storage/StorageSubsystem.js";
|
|
8
8
|
export { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js";
|
|
9
|
+
export { parseAutomergeUrl, isValidAutomergeUrl, stringifyAutomergeUrl as generateAutomergeUrl, } from "./DocUrl.js";
|
|
9
10
|
export * from "./types.js";
|
|
@@ -4,8 +4,8 @@ export declare abstract class NetworkAdapter extends EventEmitter<NetworkAdapter
|
|
|
4
4
|
peerId?: PeerId;
|
|
5
5
|
abstract connect(url?: string): void;
|
|
6
6
|
abstract sendMessage(peerId: PeerId, channelId: ChannelId, message: Uint8Array, broadcast: boolean): void;
|
|
7
|
-
abstract join(
|
|
8
|
-
abstract leave(
|
|
7
|
+
abstract join(): void;
|
|
8
|
+
abstract leave(): void;
|
|
9
9
|
}
|
|
10
10
|
export interface NetworkAdapterEvents {
|
|
11
11
|
open: (payload: OpenPayload) => void;
|
|
@@ -19,7 +19,6 @@ export interface OpenPayload {
|
|
|
19
19
|
}
|
|
20
20
|
export interface PeerCandidatePayload {
|
|
21
21
|
peerId: PeerId;
|
|
22
|
-
channelId: ChannelId;
|
|
23
22
|
}
|
|
24
23
|
export interface MessagePayload {
|
|
25
24
|
targetId: PeerId;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"NetworkAdapter.d.ts","sourceRoot":"","sources":["../../src/network/NetworkAdapter.ts"],"names":[],"mappings":"AAAA,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,aAAa,CAAA;AAE/C,8BAAsB,cAAe,SAAQ,YAAY,CAAC,oBAAoB,CAAC;IAC7E,MAAM,CAAC,EAAE,MAAM,CAAA;IAEf,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,MAAM,GAAG,IAAI;IAEpC,QAAQ,CAAC,WAAW,CAClB,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,UAAU,EACnB,SAAS,EAAE,OAAO,GACjB,IAAI;IAEP,QAAQ,CAAC,IAAI,
|
|
1
|
+
{"version":3,"file":"NetworkAdapter.d.ts","sourceRoot":"","sources":["../../src/network/NetworkAdapter.ts"],"names":[],"mappings":"AAAA,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,aAAa,CAAA;AAE/C,8BAAsB,cAAe,SAAQ,YAAY,CAAC,oBAAoB,CAAC;IAC7E,MAAM,CAAC,EAAE,MAAM,CAAA;IAEf,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,MAAM,GAAG,IAAI;IAEpC,QAAQ,CAAC,WAAW,CAClB,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,UAAU,EACnB,SAAS,EAAE,OAAO,GACjB,IAAI;IAEP,QAAQ,CAAC,IAAI,IAAI,IAAI;IAErB,QAAQ,CAAC,KAAK,IAAI,IAAI;CACvB;AAID,MAAM,WAAW,oBAAoB;IACnC,IAAI,EAAE,CAAC,OAAO,EAAE,WAAW,KAAK,IAAI,CAAA;IACpC,KAAK,EAAE,MAAM,IAAI,CAAA;IACjB,gBAAgB,EAAE,CAAC,OAAO,EAAE,oBAAoB,KAAK,IAAI,CAAA;IACzD,mBAAmB,EAAE,CAAC,OAAO,EAAE,uBAAuB,KAAK,IAAI,CAAA;IAC/D,OAAO,EAAE,CAAC,OAAO,EAAE,qBAAqB,KAAK,IAAI,CAAA;CAClD;AAED,MAAM,WAAW,WAAW;IAC1B,OAAO,EAAE,cAAc,CAAA;CACxB;AAED,MAAM,WAAW,oBAAoB;IACnC,MAAM,EAAE,MAAM,CAAA;CACf;AAED,MAAM,WAAW,cAAc;IAC7B,QAAQ,EAAE,MAAM,CAAA;IAChB,SAAS,EAAE,SAAS,CAAA;IACpB,OAAO,EAAE,UAAU,CAAA;IACnB,SAAS,EAAE,OAAO,CAAA;CACnB;AAED,MAAM,WAAW,qBAAsB,SAAQ,cAAc;IAC3D,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,QAAQ,EAAE,MAAM,CAAA;CACjB;AAED,MAAM,WAAW,uBAAuB;IACtC,MAAM,EAAE,MAAM,CAAA;CACf"}
|
|
@@ -8,8 +8,8 @@ export declare class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvent
|
|
|
8
8
|
constructor(adapters: NetworkAdapter[], peerId?: PeerId);
|
|
9
9
|
addNetworkAdapter(networkAdapter: NetworkAdapter): void;
|
|
10
10
|
sendMessage(peerId: PeerId, channelId: ChannelId, message: Uint8Array, broadcast: boolean): void;
|
|
11
|
-
join(
|
|
12
|
-
leave(
|
|
11
|
+
join(): void;
|
|
12
|
+
leave(): void;
|
|
13
13
|
}
|
|
14
14
|
export interface NetworkSubsystemEvents {
|
|
15
15
|
peer: (payload: PeerPayload) => void;
|
|
@@ -18,6 +18,5 @@ export interface NetworkSubsystemEvents {
|
|
|
18
18
|
}
|
|
19
19
|
export interface PeerPayload {
|
|
20
20
|
peerId: PeerId;
|
|
21
|
-
channelId: ChannelId;
|
|
22
21
|
}
|
|
23
22
|
//# sourceMappingURL=NetworkSubsystem.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"NetworkSubsystem.d.ts","sourceRoot":"","sources":["../../src/network/NetworkSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EACL,qBAAqB,EACrB,cAAc,EACd,uBAAuB,EACxB,MAAM,qBAAqB,CAAA;AAC5B,OAAO,EAAE,SAAS,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAI/C,qBAAa,gBAAiB,SAAQ,YAAY,CAAC,sBAAsB,CAAC;;
|
|
1
|
+
{"version":3,"file":"NetworkSubsystem.d.ts","sourceRoot":"","sources":["../../src/network/NetworkSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EACL,qBAAqB,EACrB,cAAc,EACd,uBAAuB,EACxB,MAAM,qBAAqB,CAAA;AAC5B,OAAO,EAAE,SAAS,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAI/C,qBAAa,gBAAiB,SAAQ,YAAY,CAAC,sBAAsB,CAAC;;IAKtE,OAAO,CAAC,QAAQ;IACT,MAAM;gBADL,QAAQ,EAAE,cAAc,EAAE,EAC3B,MAAM,SAAiB;IAOhC,iBAAiB,CAAC,cAAc,EAAE,cAAc;IAsDhD,WAAW,CACT,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,UAAU,EACnB,SAAS,EAAE,OAAO;IAkBpB,IAAI;IAKJ,KAAK;CAIN;AAQD,MAAM,WAAW,sBAAsB;IACrC,IAAI,EAAE,CAAC,OAAO,EAAE,WAAW,KAAK,IAAI,CAAA;IACpC,mBAAmB,EAAE,CAAC,OAAO,EAAE,uBAAuB,KAAK,IAAI,CAAA;IAC/D,OAAO,EAAE,CAAC,OAAO,EAAE,qBAAqB,KAAK,IAAI,CAAA;CAClD;AAED,MAAM,WAAW,WAAW;IAC1B,MAAM,EAAE,MAAM,CAAA;CACf"}
|
|
@@ -5,25 +5,23 @@ export class NetworkSubsystem extends EventEmitter {
|
|
|
5
5
|
peerId;
|
|
6
6
|
#log;
|
|
7
7
|
#adaptersByPeer = {};
|
|
8
|
-
#channels;
|
|
9
8
|
constructor(adapters, peerId = randomPeerId()) {
|
|
10
9
|
super();
|
|
11
10
|
this.adapters = adapters;
|
|
12
11
|
this.peerId = peerId;
|
|
13
12
|
this.#log = debug(`automerge-repo:network:${this.peerId}`);
|
|
14
|
-
this.#channels = [];
|
|
15
13
|
this.adapters.forEach(a => this.addNetworkAdapter(a));
|
|
16
14
|
}
|
|
17
15
|
addNetworkAdapter(networkAdapter) {
|
|
18
16
|
networkAdapter.connect(this.peerId);
|
|
19
|
-
networkAdapter.on("peer-candidate", ({ peerId
|
|
17
|
+
networkAdapter.on("peer-candidate", ({ peerId }) => {
|
|
20
18
|
this.#log(`peer candidate: ${peerId} `);
|
|
21
19
|
// TODO: This is where authentication would happen
|
|
22
20
|
if (!this.#adaptersByPeer[peerId]) {
|
|
23
21
|
// TODO: handle losing a server here
|
|
24
22
|
this.#adaptersByPeer[peerId] = networkAdapter;
|
|
25
23
|
}
|
|
26
|
-
this.emit("peer", { peerId
|
|
24
|
+
this.emit("peer", { peerId });
|
|
27
25
|
});
|
|
28
26
|
networkAdapter.on("peer-disconnected", ({ peerId }) => {
|
|
29
27
|
this.#log(`peer disconnected: ${peerId} `);
|
|
@@ -54,7 +52,7 @@ export class NetworkSubsystem extends EventEmitter {
|
|
|
54
52
|
}
|
|
55
53
|
});
|
|
56
54
|
});
|
|
57
|
-
|
|
55
|
+
networkAdapter.join();
|
|
58
56
|
}
|
|
59
57
|
sendMessage(peerId, channelId, message, broadcast) {
|
|
60
58
|
if (broadcast) {
|
|
@@ -73,15 +71,13 @@ export class NetworkSubsystem extends EventEmitter {
|
|
|
73
71
|
peer.sendMessage(peerId, channelId, message, false);
|
|
74
72
|
}
|
|
75
73
|
}
|
|
76
|
-
join(
|
|
77
|
-
this.#log(`Joining
|
|
78
|
-
this
|
|
79
|
-
this.adapters.forEach(a => a.join(channelId));
|
|
74
|
+
join() {
|
|
75
|
+
this.#log(`Joining network`);
|
|
76
|
+
this.adapters.forEach(a => a.join());
|
|
80
77
|
}
|
|
81
|
-
leave(
|
|
82
|
-
this.#log(`Leaving
|
|
83
|
-
this
|
|
84
|
-
this.adapters.forEach(a => a.leave(channelId));
|
|
78
|
+
leave() {
|
|
79
|
+
this.#log(`Leaving network`);
|
|
80
|
+
this.adapters.forEach(a => a.leave());
|
|
85
81
|
}
|
|
86
82
|
}
|
|
87
83
|
function randomPeerId() {
|
|
@@ -1,8 +1,12 @@
|
|
|
1
1
|
export declare abstract class StorageAdapter {
|
|
2
|
-
abstract load(key:
|
|
3
|
-
abstract save(key:
|
|
4
|
-
abstract remove(key:
|
|
5
|
-
abstract loadRange(keyPrefix:
|
|
6
|
-
|
|
2
|
+
abstract load(key: StorageKey): Promise<Uint8Array | undefined>;
|
|
3
|
+
abstract save(key: StorageKey, data: Uint8Array): Promise<void>;
|
|
4
|
+
abstract remove(key: StorageKey): Promise<void>;
|
|
5
|
+
abstract loadRange(keyPrefix: StorageKey): Promise<{
|
|
6
|
+
key: StorageKey;
|
|
7
|
+
data: Uint8Array;
|
|
8
|
+
}[]>;
|
|
9
|
+
abstract removeRange(keyPrefix: StorageKey): Promise<void>;
|
|
7
10
|
}
|
|
11
|
+
export type StorageKey = string[];
|
|
8
12
|
//# sourceMappingURL=StorageAdapter.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"StorageAdapter.d.ts","sourceRoot":"","sources":["../../src/storage/StorageAdapter.ts"],"names":[],"mappings":"AAAA,8BAAsB,cAAc;IAMlC,QAAQ,CAAC,IAAI,CAAC,GAAG,EAAE,
|
|
1
|
+
{"version":3,"file":"StorageAdapter.d.ts","sourceRoot":"","sources":["../../src/storage/StorageAdapter.ts"],"names":[],"mappings":"AAAA,8BAAsB,cAAc;IAMlC,QAAQ,CAAC,IAAI,CAAC,GAAG,EAAE,UAAU,GAAG,OAAO,CAAC,UAAU,GAAG,SAAS,CAAC;IAC/D,QAAQ,CAAC,IAAI,CAAC,GAAG,EAAE,UAAU,EAAE,IAAI,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC;IAC/D,QAAQ,CAAC,MAAM,CAAC,GAAG,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC;IAO/C,QAAQ,CAAC,SAAS,CAAC,SAAS,EAAE,UAAU,GAAG,OAAO,CAAC;QAAC,GAAG,EAAE,UAAU,CAAC;QAAC,IAAI,EAAE,UAAU,CAAA;KAAC,EAAE,CAAC;IACzF,QAAQ,CAAC,WAAW,CAAC,SAAS,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC;CAC3D;AAED,MAAM,MAAO,UAAU,GAAG,MAAM,EAAE,CAAA"}
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
import * as A from "@automerge/automerge";
|
|
2
|
-
import { DocumentId } from "../types.js";
|
|
3
2
|
import { StorageAdapter } from "./StorageAdapter.js";
|
|
3
|
+
import { type DocumentId } from "../types.js";
|
|
4
|
+
export type ChunkType = "snapshot" | "incremental";
|
|
4
5
|
export declare class StorageSubsystem {
|
|
5
6
|
#private;
|
|
6
7
|
constructor(storageAdapter: StorageAdapter);
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
save(documentId: DocumentId, doc: A.Doc<unknown>): Promise<void>;
|
|
8
|
+
loadDoc(documentId: DocumentId): Promise<A.Doc<unknown> | null>;
|
|
9
|
+
saveDoc(documentId: DocumentId, doc: A.Doc<unknown>): Promise<void>;
|
|
10
10
|
remove(documentId: DocumentId): Promise<void>;
|
|
11
11
|
}
|
|
12
12
|
//# sourceMappingURL=StorageSubsystem.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,sBAAsB,CAAA;AACzC,OAAO,EAAE,
|
|
1
|
+
{"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,sBAAsB,CAAA;AACzC,OAAO,EAAE,cAAc,EAAc,MAAM,qBAAqB,CAAA;AAEhE,OAAO,EAAE,KAAK,UAAU,EAAE,MAAM,aAAa,CAAA;AAa7C,MAAM,MAAM,SAAS,GAAG,UAAU,GAAG,aAAa,CAAA;AAelD,qBAAa,gBAAgB;;gBAMf,cAAc,EAAE,cAAc;IAqDpC,OAAO,CAAC,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,IAAI,CAAC;IA0B/D,OAAO,CAAC,UAAU,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAanE,MAAM,CAAC,UAAU,EAAE,UAAU;CAgCpC"}
|
|
@@ -1,62 +1,140 @@
|
|
|
1
1
|
import * as A from "@automerge/automerge";
|
|
2
|
-
import { mergeArrays } from "../helpers/mergeArrays.js";
|
|
3
2
|
import * as sha256 from "fast-sha256";
|
|
3
|
+
import { mergeArrays } from "../helpers/mergeArrays.js";
|
|
4
|
+
import debug from "debug";
|
|
5
|
+
import { headsAreSame } from "../helpers/headsAreSame.js";
|
|
4
6
|
function keyHash(binary) {
|
|
5
7
|
const hash = sha256.hash(binary);
|
|
6
8
|
const hashArray = Array.from(new Uint8Array(hash)); // convert buffer to byte array
|
|
7
9
|
const hashHex = hashArray.map(b => ("00" + b.toString(16)).slice(-2)).join(""); // convert bytes to hex string
|
|
8
10
|
return hashHex;
|
|
9
11
|
}
|
|
12
|
+
function headsHash(heads) {
|
|
13
|
+
let encoder = new TextEncoder();
|
|
14
|
+
let headsbinary = mergeArrays(heads.map(h => encoder.encode(h)));
|
|
15
|
+
return keyHash(headsbinary);
|
|
16
|
+
}
|
|
10
17
|
export class StorageSubsystem {
|
|
11
18
|
#storageAdapter;
|
|
19
|
+
#chunkInfos = new Map();
|
|
20
|
+
#storedHeads = new Map();
|
|
21
|
+
#log = debug(`automerge-repo:storage-subsystem`);
|
|
12
22
|
constructor(storageAdapter) {
|
|
13
23
|
this.#storageAdapter = storageAdapter;
|
|
14
24
|
}
|
|
15
25
|
async #saveIncremental(documentId, doc) {
|
|
16
|
-
const binary = A.
|
|
26
|
+
const binary = A.saveSince(doc, this.#storedHeads.get(documentId) ?? []);
|
|
17
27
|
if (binary && binary.length > 0) {
|
|
18
28
|
const key = [documentId, "incremental", keyHash(binary)];
|
|
19
|
-
|
|
29
|
+
this.#log(`Saving incremental ${key} for document ${documentId}`);
|
|
30
|
+
await this.#storageAdapter.save(key, binary);
|
|
31
|
+
if (!this.#chunkInfos.has(documentId)) {
|
|
32
|
+
this.#chunkInfos.set(documentId, []);
|
|
33
|
+
}
|
|
34
|
+
this.#chunkInfos.get(documentId).push({
|
|
35
|
+
key,
|
|
36
|
+
type: "incremental",
|
|
37
|
+
size: binary.length,
|
|
38
|
+
});
|
|
39
|
+
this.#storedHeads.set(documentId, A.getHeads(doc));
|
|
20
40
|
}
|
|
21
41
|
else {
|
|
22
|
-
Promise.resolve();
|
|
42
|
+
return Promise.resolve();
|
|
23
43
|
}
|
|
24
44
|
}
|
|
25
|
-
async #saveTotal(documentId, doc) {
|
|
45
|
+
async #saveTotal(documentId, doc, sourceChunks) {
|
|
26
46
|
const binary = A.save(doc);
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
}
|
|
40
|
-
async
|
|
41
|
-
const
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
47
|
+
const snapshotHash = headsHash(A.getHeads(doc));
|
|
48
|
+
const key = [documentId, "snapshot", snapshotHash];
|
|
49
|
+
const oldKeys = new Set(sourceChunks.map(c => c.key).filter(k => k[2] !== snapshotHash));
|
|
50
|
+
this.#log(`Saving snapshot ${key} for document ${documentId}`);
|
|
51
|
+
this.#log(`deleting old chunks ${Array.from(oldKeys)}`);
|
|
52
|
+
await this.#storageAdapter.save(key, binary);
|
|
53
|
+
for (const key of oldKeys) {
|
|
54
|
+
await this.#storageAdapter.remove(key);
|
|
55
|
+
}
|
|
56
|
+
const newChunkInfos = this.#chunkInfos.get(documentId)?.filter(c => !oldKeys.has(c.key)) ?? [];
|
|
57
|
+
newChunkInfos.push({ key, type: "snapshot", size: binary.length });
|
|
58
|
+
this.#chunkInfos.set(documentId, newChunkInfos);
|
|
59
|
+
}
|
|
60
|
+
async loadDoc(documentId) {
|
|
61
|
+
const loaded = await this.#storageAdapter.loadRange([documentId]);
|
|
62
|
+
const binaries = [];
|
|
63
|
+
const chunkInfos = [];
|
|
64
|
+
for (const chunk of loaded) {
|
|
65
|
+
const chunkType = chunkTypeFromKey(chunk.key);
|
|
66
|
+
if (chunkType == null) {
|
|
67
|
+
continue;
|
|
68
|
+
}
|
|
69
|
+
chunkInfos.push({
|
|
70
|
+
key: chunk.key,
|
|
71
|
+
type: chunkType,
|
|
72
|
+
size: chunk.data.length,
|
|
73
|
+
});
|
|
74
|
+
binaries.push(chunk.data);
|
|
75
|
+
}
|
|
76
|
+
this.#chunkInfos.set(documentId, chunkInfos);
|
|
77
|
+
const binary = mergeArrays(binaries);
|
|
78
|
+
if (binary.length === 0) {
|
|
79
|
+
return null;
|
|
80
|
+
}
|
|
81
|
+
const newDoc = A.loadIncremental(A.init(), binary);
|
|
82
|
+
this.#storedHeads.set(documentId, A.getHeads(newDoc));
|
|
83
|
+
return newDoc;
|
|
84
|
+
}
|
|
85
|
+
async saveDoc(documentId, doc) {
|
|
86
|
+
if (!this.#shouldSave(documentId, doc)) {
|
|
87
|
+
return;
|
|
88
|
+
}
|
|
89
|
+
let sourceChunks = this.#chunkInfos.get(documentId) ?? [];
|
|
90
|
+
if (this.#shouldCompact(sourceChunks)) {
|
|
91
|
+
this.#saveTotal(documentId, doc, sourceChunks);
|
|
48
92
|
}
|
|
49
93
|
else {
|
|
50
|
-
|
|
94
|
+
this.#saveIncremental(documentId, doc);
|
|
51
95
|
}
|
|
96
|
+
this.#storedHeads.set(documentId, A.getHeads(doc));
|
|
52
97
|
}
|
|
53
98
|
async remove(documentId) {
|
|
54
99
|
this.#storageAdapter.remove([documentId, "snapshot"]);
|
|
55
100
|
this.#storageAdapter.removeRange([documentId, "incremental"]);
|
|
56
101
|
}
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
102
|
+
#shouldSave(documentId, doc) {
|
|
103
|
+
const oldHeads = this.#storedHeads.get(documentId);
|
|
104
|
+
if (!oldHeads) {
|
|
105
|
+
return true;
|
|
106
|
+
}
|
|
107
|
+
const newHeads = A.getHeads(doc);
|
|
108
|
+
if (headsAreSame(newHeads, oldHeads)) {
|
|
109
|
+
return false;
|
|
110
|
+
}
|
|
111
|
+
return true;
|
|
112
|
+
}
|
|
113
|
+
#shouldCompact(sourceChunks) {
|
|
114
|
+
// compact if the incremental size is greater than the snapshot size
|
|
115
|
+
let snapshotSize = 0;
|
|
116
|
+
let incrementalSize = 0;
|
|
117
|
+
for (const chunk of sourceChunks) {
|
|
118
|
+
if (chunk.type === "snapshot") {
|
|
119
|
+
snapshotSize += chunk.size;
|
|
120
|
+
}
|
|
121
|
+
else {
|
|
122
|
+
incrementalSize += chunk.size;
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
return incrementalSize > snapshotSize;
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
function chunkTypeFromKey(key) {
|
|
129
|
+
if (key.length < 2) {
|
|
130
|
+
return null;
|
|
131
|
+
}
|
|
132
|
+
const chunkTypeStr = key[key.length - 2];
|
|
133
|
+
if (chunkTypeStr === "snapshot" || chunkTypeStr === "incremental") {
|
|
134
|
+
const chunkType = chunkTypeStr;
|
|
135
|
+
return chunkType;
|
|
136
|
+
}
|
|
137
|
+
else {
|
|
138
|
+
return null;
|
|
61
139
|
}
|
|
62
140
|
}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { DocCollection } from "../DocCollection.js";
|
|
2
|
-
import { ChannelId,
|
|
2
|
+
import { ChannelId, PeerId, DocumentId } from "../types.js";
|
|
3
3
|
import { Synchronizer } from "./Synchronizer.js";
|
|
4
4
|
/** A CollectionSynchronizer is responsible for synchronizing a DocCollection with peers. */
|
|
5
5
|
export declare class CollectionSynchronizer extends Synchronizer {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"CollectionSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/CollectionSynchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAA;
|
|
1
|
+
{"version":3,"file":"CollectionSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/CollectionSynchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAA;AAOnD,OAAO,EAAE,SAAS,EAAoB,MAAM,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAE7E,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAKhD,4FAA4F;AAC5F,qBAAa,sBAAuB,SAAQ,YAAY;;IAO1C,OAAO,CAAC,IAAI;gBAAJ,IAAI,EAAE,aAAa;IAiCvC;;;OAGG;IACG,kBAAkB,CACtB,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,UAAU;IAmBrB;;OAEG;IACH,WAAW,CAAC,UAAU,EAAE,UAAU;IAUlC,cAAc,CAAC,UAAU,EAAE,UAAU;IAIrC,2DAA2D;IAC3D,OAAO,CAAC,MAAM,EAAE,MAAM;IAWtB,uDAAuD;IACvD,UAAU,CAAC,MAAM,EAAE,MAAM;CAQ1B"}
|