@automerge/automerge-repo 0.1.4 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -18,7 +18,7 @@ Other packages in this monorepo include:
18
18
 
19
19
  #### Storage adapters
20
20
 
21
- - [@automerge/automerge-repo-storage-localforage](/packages/automerge-repo-storage-localforage/): A storage
21
+ - [@automerge/automerge-repo-storage-indexeddb](/packages/automerge-repo-storage-indexeddb/): A storage
22
22
  adapter to persist data in a browser
23
23
  - [@automerge/automerge-repo-storage-nodefs](/packages/automerge-repo-storage-nodefs/): A storage adapter to
24
24
  write changes to the filesystem
@@ -95,13 +95,13 @@ Multiple network adapters (even of the same type) can be added to a repo, even a
95
95
 
96
96
  A repo currently only supports a single storage adapter, and it must be provided at creation.
97
97
 
98
- Here is an example of creating a repo with a localforage storage adapter and a broadcast channel
98
+ Here is an example of creating a repo with a indexeddb storage adapter and a broadcast channel
99
99
  network adapter:
100
100
 
101
101
  ```ts
102
102
  const repo = new Repo({
103
103
  network: [new BroadcastChannelNetworkAdapter()],
104
- storage: new LocalForageStorageAdapter(),
104
+ storage: new IndexedDBStorageAdapter(),
105
105
  sharePolicy: async (peerId: PeerId, documentId: DocumentId) => true // this is the default
106
106
  })
107
107
  ```
@@ -136,7 +136,7 @@ yarn create vite
136
136
 
137
137
  cd hello-automerge-repo
138
138
  yarn
139
- yarn add @automerge/automerge @automerge/automerge-repo-react-hooks @automerge/automerge-repo-network-broadcastchannel @automerge/automerge-repo-storage-localforage vite-plugin-wasm vite-plugin-top-level-await
139
+ yarn add @automerge/automerge @automerge/automerge-repo-react-hooks @automerge/automerge-repo-network-broadcastchannel @automerge/automerge-repo-storage-indexeddb vite-plugin-wasm vite-plugin-top-level-await
140
140
  ```
141
141
 
142
142
  Edit the `vite.config.ts`. (This is all need to work around packaging hiccups due to WASM. We look
@@ -187,12 +187,12 @@ import ReactDOM from "react-dom/client"
187
187
  import App from "./App.js"
188
188
  import { Repo } from "@automerge/automerge-repo"
189
189
  import { BroadcastChannelNetworkAdapter } from "@automerge/automerge-repo-network-broadcastchannel"
190
- import { LocalForageStorageAdapter } from "@automerge/automerge-repo-storage-localforage"
190
+ import { IndexedDBStorageAdapter } from "@automerge/automerge-repo-storage-indexeddb"
191
191
  import { RepoContext } from "@automerge/automerge-repo-react-hooks"
192
192
 
193
193
  const repo = new Repo({
194
194
  network: [new BroadcastChannelNetworkAdapter()],
195
- storage: new LocalForageStorageAdapter(),
195
+ storage: new IndexedDBStorageAdapter(),
196
196
  })
197
197
 
198
198
  let rootDocId = localStorage.rootDocId
@@ -274,7 +274,7 @@ const repo = new Repo({
274
274
  new BroadcastChannelNetworkAdapter(),
275
275
  new BrowserWebSocketClientAdapter("ws://localhost:3030"), // <-- add this line
276
276
  ],
277
- storage: new LocalForageStorageAdapter(),
277
+ storage: new IndexedDBStorageAdapter(),
278
278
  })
279
279
 
280
280
  // ...
@@ -0,0 +1,20 @@
1
+ /// <reference types="node" />
2
+ export declare const linkForDocumentId: (id: any) => string;
3
+ export declare const documentIdFromShareLink: (link: any) => any;
4
+ export declare const isValidShareLink: (str: any) => boolean;
5
+ export declare const parts: (str: any) => {
6
+ key: any;
7
+ nonCrc: any;
8
+ crc: any;
9
+ };
10
+ export declare const encodedParts: (str: any) => {
11
+ nonCrc: any;
12
+ key: any;
13
+ crc: any;
14
+ };
15
+ export declare const withCrc: (str: any) => string;
16
+ export declare const encode: (str: any) => any;
17
+ export declare const decode: (str: any) => any;
18
+ export declare const hexToBuffer: (key: any) => Buffer;
19
+ export declare const bufferToHex: (key: any) => any;
20
+ //# sourceMappingURL=DocUrl.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"DocUrl.d.ts","sourceRoot":"","sources":["../src/DocUrl.ts"],"names":[],"mappings":";AAIA,eAAO,MAAM,iBAAiB,qBAA6C,CAAA;AAE3E,eAAO,MAAM,uBAAuB,oBAInC,CAAA;AAED,eAAO,MAAM,gBAAgB,uBAG5B,CAAA;AAED,eAAO,MAAM,KAAK;;;;CAQjB,CAAA;AAED,eAAO,MAAM,YAAY;;;;CAIxB,CAAA;AAED,eAAO,MAAM,OAAO,sBAAwC,CAAA;AAE5D,eAAO,MAAM,MAAM,mBAAyC,CAAA;AAE5D,eAAO,MAAM,MAAM,mBAAyC,CAAA;AAE5D,eAAO,MAAM,WAAW,sBAC8B,CAAA;AAEtD,eAAO,MAAM,WAAW,mBAC0B,CAAA"}
package/dist/DocUrl.js ADDED
@@ -0,0 +1,28 @@
1
+ import Base58 from "bs58";
2
+ import { crc16 } from "js-crc";
3
+ export const linkForDocumentId = id => withCrc("automerge://" + encode(id));
4
+ export const documentIdFromShareLink = link => {
5
+ const { key } = parts(link);
6
+ return key;
7
+ };
8
+ export const isValidShareLink = str => {
9
+ const { nonCrc, crc } = parts(str);
10
+ return Boolean(nonCrc) && Boolean(crc) && crc16(nonCrc) === crc;
11
+ };
12
+ export const parts = str => {
13
+ const p = encodedParts(str);
14
+ return {
15
+ key: p.key && decode(p.key),
16
+ nonCrc: p.nonCrc,
17
+ crc: p.crc && decode(p.crc),
18
+ };
19
+ };
20
+ export const encodedParts = str => {
21
+ const [m, nonCrc, key, crc] = str.match(/^(pxlpshr:\/\/(\w+))\/(\w{1,4})$/) || [];
22
+ return { nonCrc, key, crc };
23
+ };
24
+ export const withCrc = str => str + `/` + encode(crc16(str));
25
+ export const encode = str => Base58.encode(hexToBuffer(str));
26
+ export const decode = str => bufferToHex(Base58.decode(str));
27
+ export const hexToBuffer = key => Buffer.isBuffer(key) ? key : Buffer.from(key, "hex");
28
+ export const bufferToHex = key => Buffer.isBuffer(key) ? key.toString("hex") : key;
package/dist/Repo.js CHANGED
@@ -23,7 +23,7 @@ export class Repo extends DocCollection {
23
23
  // Save when the document changes
24
24
  handle.on("change", async ({ handle }) => {
25
25
  const doc = await handle.value();
26
- storageSubsystem.save(handle.documentId, doc);
26
+ await storageSubsystem.save(handle.documentId, doc);
27
27
  });
28
28
  // Try to load from disk
29
29
  const binary = await storageSubsystem.loadBinary(handle.documentId);
@@ -1,6 +1,8 @@
1
1
  export declare abstract class StorageAdapter {
2
- abstract load(docId: string): Promise<Uint8Array | null>;
3
- abstract save(docId: string, data: Uint8Array): void;
4
- abstract remove(docId: string): void;
2
+ abstract load(key: string[]): Promise<Uint8Array | undefined>;
3
+ abstract save(key: string[], data: Uint8Array): Promise<void>;
4
+ abstract remove(key: string[]): Promise<void>;
5
+ abstract loadRange(keyPrefix: string[]): Promise<Uint8Array[]>;
6
+ abstract removeRange(keyPrefix: string[]): Promise<void>;
5
7
  }
6
8
  //# sourceMappingURL=StorageAdapter.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"StorageAdapter.d.ts","sourceRoot":"","sources":["../../src/storage/StorageAdapter.ts"],"names":[],"mappings":"AAAA,8BAAsB,cAAc;IAClC,QAAQ,CAAC,IAAI,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,GAAG,IAAI,CAAC;IACxD,QAAQ,CAAC,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,UAAU,GAAG,IAAI;IACpD,QAAQ,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;CACrC"}
1
+ {"version":3,"file":"StorageAdapter.d.ts","sourceRoot":"","sources":["../../src/storage/StorageAdapter.ts"],"names":[],"mappings":"AAAA,8BAAsB,cAAc;IAMlC,QAAQ,CAAC,IAAI,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,UAAU,GAAG,SAAS,CAAC;IAC7D,QAAQ,CAAC,IAAI,CAAC,GAAG,EAAE,MAAM,EAAE,EAAE,IAAI,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC;IAC7D,QAAQ,CAAC,MAAM,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAO7C,QAAQ,CAAC,SAAS,CAAC,SAAS,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC;IAC9D,QAAQ,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;CACzD"}
@@ -6,7 +6,7 @@ export declare class StorageSubsystem {
6
6
  constructor(storageAdapter: StorageAdapter);
7
7
  loadBinary(documentId: DocumentId): Promise<Uint8Array>;
8
8
  load<T>(documentId: DocumentId, prevDoc?: A.Doc<T>): Promise<A.Doc<T>>;
9
- save(documentId: DocumentId, doc: A.Doc<unknown>): void;
10
- remove(documentId: DocumentId): void;
9
+ save(documentId: DocumentId, doc: A.Doc<unknown>): Promise<void>;
10
+ remove(documentId: DocumentId): Promise<void>;
11
11
  }
12
12
  //# sourceMappingURL=StorageSubsystem.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,sBAAsB,CAAA;AACzC,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAExC,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAA;AAEpD,qBAAa,gBAAgB;;gBAIf,cAAc,EAAE,cAAc;IA+BpC,UAAU,CAAC,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;IAqBvD,IAAI,CAAC,CAAC,EACV,UAAU,EAAE,UAAU,EACtB,OAAO,GAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAe,GAC9B,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;IAMpB,IAAI,CAAC,UAAU,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC;IAQhD,MAAM,CAAC,UAAU,EAAE,UAAU;CAY9B"}
1
+ {"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,sBAAsB,CAAA;AACzC,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AACxC,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAA;AAWpD,qBAAa,gBAAgB;;gBAGf,cAAc,EAAE,cAAc;IAwBpC,UAAU,CAAC,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;IAUvD,IAAI,CAAC,CAAC,EACV,UAAU,EAAE,UAAU,EACtB,OAAO,GAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAe,GAC9B,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;IAMd,IAAI,CAAC,UAAU,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC;IAQhD,MAAM,CAAC,UAAU,EAAE,UAAU;CAUpC"}
@@ -1,65 +1,62 @@
1
1
  import * as A from "@automerge/automerge";
2
2
  import { mergeArrays } from "../helpers/mergeArrays.js";
3
+ import * as sha256 from "fast-sha256";
4
+ function keyHash(binary) {
5
+ const hash = sha256.hash(binary);
6
+ const hashArray = Array.from(new Uint8Array(hash)); // convert buffer to byte array
7
+ const hashHex = hashArray.map(b => ("00" + b.toString(16)).slice(-2)).join(""); // convert bytes to hex string
8
+ return hashHex;
9
+ }
3
10
  export class StorageSubsystem {
4
11
  #storageAdapter;
5
- #changeCount = {};
6
12
  constructor(storageAdapter) {
7
13
  this.#storageAdapter = storageAdapter;
8
14
  }
9
- #saveIncremental(documentId, doc) {
15
+ async #saveIncremental(documentId, doc) {
10
16
  const binary = A.saveIncremental(doc);
11
17
  if (binary && binary.length > 0) {
12
- if (!this.#changeCount[documentId]) {
13
- this.#changeCount[documentId] = 0;
14
- }
15
- this.#storageAdapter.save(`${documentId}.incremental.${this.#changeCount[documentId]}`, binary);
16
- this.#changeCount[documentId]++;
18
+ const key = [documentId, "incremental", keyHash(binary)];
19
+ return await this.#storageAdapter.save(key, binary);
20
+ }
21
+ else {
22
+ Promise.resolve();
17
23
  }
18
24
  }
19
- #saveTotal(documentId, doc) {
25
+ async #saveTotal(documentId, doc) {
20
26
  const binary = A.save(doc);
21
- this.#storageAdapter.save(`${documentId}.snapshot`, binary);
22
- for (let i = 0; i < this.#changeCount[documentId]; i++) {
23
- this.#storageAdapter.remove(`${documentId}.incremental.${i}`);
24
- }
25
- this.#changeCount[documentId] = 0;
27
+ // TODO: this is still racy if two nodes are both writing to the store
28
+ await this.#storageAdapter.save([documentId, "snapshot"], binary);
29
+ // don't start deleting the incremental keys until save is done!
30
+ return this.#storageAdapter.removeRange([documentId, "incremental"]);
26
31
  }
27
32
  async loadBinary(documentId) {
28
- const result = [];
29
- let binary = await this.#storageAdapter.load(`${documentId}.snapshot`);
30
- if (binary && binary.length > 0) {
31
- result.push(binary);
32
- }
33
- let index = 0;
34
- while ((binary = await this.#storageAdapter.load(`${documentId}.incremental.${index}`))) {
35
- this.#changeCount[documentId] = index + 1;
36
- if (binary && binary.length > 0)
37
- result.push(binary);
38
- index += 1;
39
- }
40
- return mergeArrays(result);
33
+ // it would probably be best to ensure .snapshot comes back first
34
+ // prevent the race condition with saveIncremental
35
+ const binaries = await this.#storageAdapter.loadRange([
36
+ documentId,
37
+ ]);
38
+ return mergeArrays(binaries);
41
39
  }
42
40
  async load(documentId, prevDoc = A.init()) {
43
41
  const doc = A.loadIncremental(prevDoc, await this.loadBinary(documentId));
44
42
  A.saveIncremental(doc);
45
43
  return doc;
46
44
  }
47
- save(documentId, doc) {
45
+ async save(documentId, doc) {
48
46
  if (this.#shouldCompact(documentId)) {
49
- this.#saveTotal(documentId, doc);
47
+ return this.#saveTotal(documentId, doc);
50
48
  }
51
49
  else {
52
- this.#saveIncremental(documentId, doc);
50
+ return this.#saveIncremental(documentId, doc);
53
51
  }
54
52
  }
55
- remove(documentId) {
56
- this.#storageAdapter.remove(`${documentId}.snapshot`);
57
- for (let i = 0; i < this.#changeCount[documentId]; i++) {
58
- this.#storageAdapter.remove(`${documentId}.incremental.${i}`);
59
- }
53
+ async remove(documentId) {
54
+ this.#storageAdapter.remove([documentId, "snapshot"]);
55
+ this.#storageAdapter.removeRange([documentId, "incremental"]);
60
56
  }
61
57
  // TODO: make this, you know, good.
58
+ // this is probably fine
62
59
  #shouldCompact(documentId) {
63
- return this.#changeCount[documentId] >= 20;
60
+ return Math.random() < 0.05; // this.#changeCount[documentId] >= 20
64
61
  }
65
62
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@automerge/automerge-repo",
3
- "version": "0.1.4",
3
+ "version": "0.2.1",
4
4
  "description": "A repository object to manage a collection of automerge documents",
5
5
  "repository": "https://github.com/automerge/automerge-repo",
6
6
  "author": "Peter van Hardenberg <pvh@pvh.ca>",
@@ -35,6 +35,7 @@
35
35
  "cbor-x": "^1.3.0",
36
36
  "debug": "^4.3.4",
37
37
  "eventemitter3": "^4.0.7",
38
+ "fast-sha256": "^1.3.0",
38
39
  "tiny-typed-emitter": "^2.1.0",
39
40
  "ts-node": "^10.9.1",
40
41
  "uuid": "^8.3.2",
@@ -61,5 +62,5 @@
61
62
  "publishConfig": {
62
63
  "access": "public"
63
64
  },
64
- "gitHead": "3c94ba251df7a25722fb7a15646453e58e912dad"
65
+ "gitHead": "7f048ecaa62eb1246f54773c6b10bada0767497b"
65
66
  }
package/src/Repo.ts CHANGED
@@ -33,7 +33,7 @@ export class Repo extends DocCollection {
33
33
  // Save when the document changes
34
34
  handle.on("change", async ({ handle }) => {
35
35
  const doc = await handle.value()
36
- storageSubsystem.save(handle.documentId, doc)
36
+ await storageSubsystem.save(handle.documentId, doc)
37
37
  })
38
38
 
39
39
  // Try to load from disk
@@ -1,5 +1,18 @@
1
1
  export abstract class StorageAdapter {
2
- abstract load(docId: string): Promise<Uint8Array | null>
3
- abstract save(docId: string, data: Uint8Array): void
4
- abstract remove(docId: string): void
2
+ // load, store, or remove a single binary blob based on an array key
3
+ // automerge-repo mostly uses keys in the following form:
4
+ // [documentId, "snapshot"] or [documentId, "incremental", "0"]
5
+ // but the storage adapter is agnostic to the meaning of the key
6
+ // and we expect to store other data in the future such as syncstates
7
+ abstract load(key: string[]): Promise<Uint8Array | undefined>
8
+ abstract save(key: string[], data: Uint8Array): Promise<void>
9
+ abstract remove(key: string[]): Promise<void>
10
+
11
+ // the keyprefix will match any key that starts with the given array
12
+ // for example, [documentId, "incremental"] will match all incremental saves
13
+ // or [documentId] will match all data for a given document
14
+ // be careful! this will also match [documentId, "syncState"]!
15
+ // (we aren't using this yet but keep it in mind.)
16
+ abstract loadRange(keyPrefix: string[]): Promise<Uint8Array[]>
17
+ abstract removeRange(keyPrefix: string[]): Promise<void>
5
18
  }
@@ -1,62 +1,51 @@
1
1
  import * as A from "@automerge/automerge"
2
2
  import { DocumentId } from "../types.js"
3
- import { mergeArrays } from "../helpers/mergeArrays.js"
4
3
  import { StorageAdapter } from "./StorageAdapter.js"
4
+ import { mergeArrays } from "../helpers/mergeArrays.js"
5
+ import * as sha256 from "fast-sha256"
6
+
7
+ function keyHash(binary: Uint8Array) {
8
+ const hash = sha256.hash(binary)
9
+ const hashArray = Array.from(new Uint8Array(hash)) // convert buffer to byte array
10
+ const hashHex = hashArray.map(b => ("00" + b.toString(16)).slice(-2)).join("") // convert bytes to hex string
11
+ return hashHex
12
+ }
5
13
 
6
14
  export class StorageSubsystem {
7
15
  #storageAdapter: StorageAdapter
8
- #changeCount: Record<DocumentId, number> = {}
9
16
 
10
17
  constructor(storageAdapter: StorageAdapter) {
11
18
  this.#storageAdapter = storageAdapter
12
19
  }
13
20
 
14
- #saveIncremental(documentId: DocumentId, doc: A.Doc<unknown>) {
21
+ async #saveIncremental(documentId: DocumentId, doc: A.Doc<unknown>) {
15
22
  const binary = A.saveIncremental(doc)
16
23
  if (binary && binary.length > 0) {
17
- if (!this.#changeCount[documentId]) {
18
- this.#changeCount[documentId] = 0
19
- }
20
-
21
- this.#storageAdapter.save(
22
- `${documentId}.incremental.${this.#changeCount[documentId]}`,
23
- binary
24
- )
25
-
26
- this.#changeCount[documentId]++
24
+ const key = [documentId, "incremental", keyHash(binary)]
25
+ return await this.#storageAdapter.save(key, binary)
26
+ } else {
27
+ Promise.resolve()
27
28
  }
28
29
  }
29
30
 
30
- #saveTotal(documentId: DocumentId, doc: A.Doc<unknown>) {
31
+ async #saveTotal(documentId: DocumentId, doc: A.Doc<unknown>) {
31
32
  const binary = A.save(doc)
32
- this.#storageAdapter.save(`${documentId}.snapshot`, binary)
33
33
 
34
- for (let i = 0; i < this.#changeCount[documentId]; i++) {
35
- this.#storageAdapter.remove(`${documentId}.incremental.${i}`)
36
- }
34
+ // TODO: this is still racy if two nodes are both writing to the store
35
+ await this.#storageAdapter.save([documentId, "snapshot"], binary)
37
36
 
38
- this.#changeCount[documentId] = 0
37
+ // don't start deleting the incremental keys until save is done!
38
+ return this.#storageAdapter.removeRange([documentId, "incremental"])
39
39
  }
40
40
 
41
41
  async loadBinary(documentId: DocumentId): Promise<Uint8Array> {
42
- const result = []
43
- let binary = await this.#storageAdapter.load(`${documentId}.snapshot`)
44
- if (binary && binary.length > 0) {
45
- result.push(binary)
46
- }
42
+ // it would probably be best to ensure .snapshot comes back first
43
+ // prevent the race condition with saveIncremental
44
+ const binaries: Uint8Array[] = await this.#storageAdapter.loadRange([
45
+ documentId,
46
+ ])
47
47
 
48
- let index = 0
49
- while (
50
- (binary = await this.#storageAdapter.load(
51
- `${documentId}.incremental.${index}`
52
- ))
53
- ) {
54
- this.#changeCount[documentId] = index + 1
55
- if (binary && binary.length > 0) result.push(binary)
56
- index += 1
57
- }
58
-
59
- return mergeArrays(result)
48
+ return mergeArrays(binaries)
60
49
  }
61
50
 
62
51
  async load<T>(
@@ -68,24 +57,22 @@ export class StorageSubsystem {
68
57
  return doc
69
58
  }
70
59
 
71
- save(documentId: DocumentId, doc: A.Doc<unknown>) {
60
+ async save(documentId: DocumentId, doc: A.Doc<unknown>) {
72
61
  if (this.#shouldCompact(documentId)) {
73
- this.#saveTotal(documentId, doc)
62
+ return this.#saveTotal(documentId, doc)
74
63
  } else {
75
- this.#saveIncremental(documentId, doc)
64
+ return this.#saveIncremental(documentId, doc)
76
65
  }
77
66
  }
78
67
 
79
- remove(documentId: DocumentId) {
80
- this.#storageAdapter.remove(`${documentId}.snapshot`)
81
-
82
- for (let i = 0; i < this.#changeCount[documentId]; i++) {
83
- this.#storageAdapter.remove(`${documentId}.incremental.${i}`)
84
- }
68
+ async remove(documentId: DocumentId) {
69
+ this.#storageAdapter.remove([documentId, "snapshot"])
70
+ this.#storageAdapter.removeRange([documentId, "incremental"])
85
71
  }
86
72
 
87
73
  // TODO: make this, you know, good.
74
+ // this is probably fine
88
75
  #shouldCompact(documentId: DocumentId) {
89
- return this.#changeCount[documentId] >= 20
76
+ return Math.random() < 0.05 // this.#changeCount[documentId] >= 20
90
77
  }
91
78
  }
@@ -31,7 +31,7 @@ describe("StorageSubsystem", () => {
31
31
 
32
32
  // save it to storage
33
33
  const key = "test-key" as DocumentId
34
- storage.save(key, doc)
34
+ await storage.save(key, doc)
35
35
 
36
36
  // reload it from storage
37
37
  const reloadedDoc = await storage.load<TestDoc>(key)
@@ -69,10 +69,6 @@ describe("StorageSubsystem", () => {
69
69
  storage2.save(key, changedDoc)
70
70
 
71
71
  // check that the storage adapter contains the correct keys
72
- assert(adapter.keys().some(k => k.endsWith("1")))
73
-
74
- // check that the last incrementalSave is not a full save
75
- const bin = await adapter.load((key + ".incremental.1") as DocumentId)
76
- assert.throws(() => A.load(bin!))
72
+ assert(adapter.keys().some(k => k.startsWith("test-key.incremental.")))
77
73
  })
78
74
  })
@@ -1,20 +1,36 @@
1
- import { DocumentId, StorageAdapter } from "../../src"
1
+ import { StorageAdapter } from "../../src"
2
2
 
3
3
  export class DummyStorageAdapter implements StorageAdapter {
4
- #data: Record<DocumentId, Uint8Array> = {}
4
+ #data: Record<string, Uint8Array> = {}
5
5
 
6
- load(docId: DocumentId) {
7
- return new Promise<Uint8Array | null>(resolve =>
8
- resolve(this.#data[docId] || null)
9
- )
6
+ #keyToString(key: string[]) {
7
+ return key.join(".")
10
8
  }
11
9
 
12
- save(docId: DocumentId, binary: Uint8Array) {
13
- this.#data[docId] = binary
10
+ async loadRange(keyPrefix: string[]): Promise<Uint8Array[]> {
11
+ const range = Object.entries(this.#data)
12
+ .filter(([key, _]) => key.startsWith(this.#keyToString(keyPrefix)))
13
+ .map(([_, value]) => value)
14
+ return Promise.resolve(range)
14
15
  }
15
16
 
16
- remove(docId: DocumentId) {
17
- delete this.#data[docId]
17
+ async removeRange(keyPrefix: string[]): Promise<void> {
18
+ Object.entries(this.#data)
19
+ .filter(([key, _]) => key.startsWith(this.#keyToString(keyPrefix)))
20
+ .forEach(([key, _]) => delete this.#data[key])
21
+ }
22
+
23
+ async load(key: string[]): Promise<Uint8Array | undefined> {
24
+ return new Promise(resolve => resolve(this.#data[this.#keyToString(key)]))
25
+ }
26
+
27
+ async save(key: string[], binary: Uint8Array) {
28
+ this.#data[this.#keyToString(key)] = binary
29
+ return Promise.resolve()
30
+ }
31
+
32
+ async remove(key: string[]) {
33
+ delete this.#data[this.#keyToString(key)]
18
34
  }
19
35
 
20
36
  keys() {