@automerge/automerge-repo 1.0.0-alpha.0 → 1.0.0-alpha.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -50,8 +50,6 @@ export declare class DocHandle<T>//
50
50
  * @returns the current document, or undefined if the document is not ready
51
51
  */
52
52
  docSync(): A.Doc<T> | undefined;
53
- /** `load` is called by the repo when the document is found in storage */
54
- load(binary: Uint8Array): void;
55
53
  /** `update` is called by the repo when we receive changes from the network */
56
54
  update(callback: (doc: A.Doc<T>) => A.Doc<T>): void;
57
55
  /** `change` is called by the repo when the document is changed locally */
@@ -100,7 +98,6 @@ export declare const HandleState: {
100
98
  export type HandleState = (typeof HandleState)[keyof typeof HandleState];
101
99
  export declare const Event: {
102
100
  readonly CREATE: "CREATE";
103
- readonly LOAD: "LOAD";
104
101
  readonly FIND: "FIND";
105
102
  readonly REQUEST: "REQUEST";
106
103
  readonly REQUEST_COMPLETE: "REQUEST_COMPLETE";
@@ -1 +1 @@
1
- {"version":3,"file":"DocHandle.d.ts","sourceRoot":"","sources":["../src/DocHandle.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,sBAAsB,CAAA;AAEzC,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EASL,UAAU,EAEX,MAAM,QAAQ,CAAA;AAKf,OAAO,KAAK,EAEV,SAAS,EACT,UAAU,EACV,MAAM,EACN,YAAY,EACb,MAAM,YAAY,CAAA;AAGnB,iGAAiG;AACjG,qBAAa,SAAS,CAAC,CAAC,CAAE,EAAE;AAC1B,SAAQ,YAAY,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;;IAY/B,UAAU,EAAE,UAAU;IAL/B,IAAI,GAAG,IAAI,YAAY,CAEtB;gBAGQ,UAAU,EAAE,UAAU,EAC7B,EAAE,KAAa,EAAE,YAAqB,EAAE,GAAE,gBAAqB;IA0KjE;;;;OAIG;IACH,OAAO,gBAA0C;IACjD;;;;;OAKG;IACH,SAAS,gBAA4C;IACrD,OAAO,WAAY,WAAW,EAAE,aACmB;IAEnD,IAAI,KAAK,eAER;IAED;;;;;OAKG;IACG,SAAS,CAAC,WAAW,GAAE,WAAW,EAAY,GAAG,OAAO,CAAC,IAAI,CAAC;IAIpE;;;;;;OAMG;IACG,GAAG,CAAC,WAAW,GAAE,WAAW,EAAY,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;IAclE;;;;;;;;;OASG;IACH,OAAO,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS;IAQ/B,yEAAyE;IACzE,IAAI,CAAC,MAAM,EAAE,UAAU;IAMvB,8EAA8E;IAC9E,MAAM,CAAC,QAAQ,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;IAM5C,2EAA2E;IAC3E,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,OAAO,GAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAM;IAehE,QAAQ,CACN,KAAK,EAAE,CAAC,CAAC,KAAK,EACd,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EACvB,OAAO,GAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAM;IAgBlC,gFAAgF;IAChF,OAAO;IAIP,kEAAkE;IAClE,MAAM;CAGP;AAID,UAAU,gBAAgB;IACxB,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,YAAY,CAAC,EAAE,MAAM,CAAA;CACtB;AAED,MAAM,WAAW,uBAAuB;IACtC,aAAa,EAAE,MAAM,CAAA;IACrB,SAAS,EAAE,SAAS,CAAA;IACpB,IAAI,EAAE,UAAU,CAAA;CACjB;AAED,MAAM,WAAW,6BAA6B,CAAC,CAAC;IAC9C,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;CACd;AAED,MAAM,WAAW,sBAAsB,CAAC,CAAC;IACvC,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;CACrB;AAED,MAAM,WAAW,sBAAsB,CAAC,CAAC;IACvC,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACb,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAA;IAClB,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;CAC1B;AAED,MAAM,WAAW,eAAe,CAAC,CAAC;IAChC,eAAe,EAAE,CAAC,OAAO,EAAE,6BAA6B,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpE,MAAM,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpD,MAAM,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;CACrD;AAMD,eAAO,MAAM,WAAW;;;;;;;CAOd,CAAA;AACV,MAAM,MAAM,WAAW,GAAG,CAAC,OAAO,WAAW,CAAC,CAAC,MAAM,OAAO,WAAW,CAAC,CAAA;AAkBxE,eAAO,MAAM,KAAK;;;;;;;;;CASR,CAAA;AA2CV,eAAO,MAAQ,IAAI,UAAE,OAAO,aAAE,UAAU,gBAAE,KAAK,WAAE,MAAM,YAAE,OAAO,WAAgB,CAAA"}
1
+ {"version":3,"file":"DocHandle.d.ts","sourceRoot":"","sources":["../src/DocHandle.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,sBAAsB,CAAA;AAEzC,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EASL,UAAU,EAEX,MAAM,QAAQ,CAAA;AAKf,OAAO,KAAK,EAAE,SAAS,EAAE,UAAU,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,YAAY,CAAA;AAG7E,iGAAiG;AACjG,qBAAa,SAAS,CAAC,CAAC,CAAE,EAAE;AAC1B,SAAQ,YAAY,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;;IAY/B,UAAU,EAAE,UAAU;IAL/B,IAAI,GAAG,IAAI,YAAY,CAEtB;gBAGQ,UAAU,EAAE,UAAU,EAC7B,EAAE,KAAa,EAAE,YAAqB,EAAE,GAAE,gBAAqB;IAkKjE;;;;OAIG;IACH,OAAO,gBAA0C;IACjD;;;;;OAKG;IACH,SAAS,gBAA4C;IACrD,OAAO,WAAY,WAAW,EAAE,aACmB;IAEnD,IAAI,KAAK,eAER;IAED;;;;;OAKG;IACG,SAAS,CAAC,WAAW,GAAE,WAAW,EAAY,GAAG,OAAO,CAAC,IAAI,CAAC;IAIpE;;;;;;OAMG;IACG,GAAG,CAAC,WAAW,GAAE,WAAW,EAAY,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;IAclE;;;;;;;;;OASG;IACH,OAAO,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS;IAQ/B,8EAA8E;IAC9E,MAAM,CAAC,QAAQ,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;IAM5C,2EAA2E;IAC3E,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,OAAO,GAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAM;IAehE,QAAQ,CACN,KAAK,EAAE,CAAC,CAAC,KAAK,EACd,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EACvB,OAAO,GAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAM;IAgBlC,gFAAgF;IAChF,OAAO;IAIP,kEAAkE;IAClE,MAAM;CAGP;AAID,UAAU,gBAAgB;IACxB,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,YAAY,CAAC,EAAE,MAAM,CAAA;CACtB;AAED,MAAM,WAAW,uBAAuB;IACtC,aAAa,EAAE,MAAM,CAAA;IACrB,SAAS,EAAE,SAAS,CAAA;IACpB,IAAI,EAAE,UAAU,CAAA;CACjB;AAED,MAAM,WAAW,6BAA6B,CAAC,CAAC;IAC9C,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;CACd;AAED,MAAM,WAAW,sBAAsB,CAAC,CAAC;IACvC,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;CACrB;AAED,MAAM,WAAW,sBAAsB,CAAC,CAAC;IACvC,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACb,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAA;IAClB,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;CAC1B;AAED,MAAM,WAAW,eAAe,CAAC,CAAC;IAChC,eAAe,EAAE,CAAC,OAAO,EAAE,6BAA6B,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpE,MAAM,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpD,MAAM,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;CACrD;AAMD,eAAO,MAAM,WAAW;;;;;;;CAOd,CAAA;AACV,MAAM,MAAM,WAAW,GAAG,CAAC,OAAO,WAAW,CAAC,CAAC,MAAM,OAAO,WAAW,CAAC,CAAA;AAkBxE,eAAO,MAAM,KAAK;;;;;;;;CAQR,CAAA;AAyCV,eAAO,MAAQ,IAAI,UAAE,OAAO,aAAE,UAAU,gBAAE,KAAK,WAAE,MAAM,YAAE,OAAO,WAAgB,CAAA"}
package/dist/DocHandle.js CHANGED
@@ -54,8 +54,8 @@ export class DocHandle//
54
54
  },
55
55
  loading: {
56
56
  on: {
57
- // LOAD is called by the Repo if the document is found in storage
58
- LOAD: { actions: "onLoad", target: READY },
57
+ // UPDATE is called by the Repo if the document is found in storage
58
+ UPDATE: { actions: "onUpdate", target: READY },
59
59
  // REQUEST is called by the Repo if the document is not found in storage
60
60
  REQUEST: { target: REQUESTING },
61
61
  DELETE: { actions: "onDelete", target: DELETED },
@@ -98,14 +98,7 @@ export class DocHandle//
98
98
  },
99
99
  }, {
100
100
  actions: {
101
- /** Apply the binary changes from storage and put the updated doc on context */
102
- onLoad: assign((context, { payload }) => {
103
- const { binary } = payload;
104
- const { doc } = context;
105
- const newDoc = A.loadIncremental(doc, binary);
106
- return { doc: newDoc };
107
- }),
108
- /** Put the updated doc on context; if it's different, emit a `change` event */
101
+ /** Put the updated doc on context */
109
102
  onUpdate: assign((context, { payload }) => {
110
103
  const { doc: oldDoc } = context;
111
104
  const { callback } = payload;
@@ -121,8 +114,8 @@ export class DocHandle//
121
114
  .onTransition(({ value: state, history, context }, event) => {
122
115
  const oldDoc = history?.context?.doc;
123
116
  const newDoc = context.doc;
124
- this.#log(`${event} → ${state}`, newDoc);
125
- const docChanged = newDoc && oldDoc && !headsAreSame(newDoc, oldDoc);
117
+ console.log(`${event} → ${state}`, newDoc);
118
+ const docChanged = newDoc && oldDoc && !headsAreSame(A.getHeads(newDoc), A.getHeads(oldDoc));
126
119
  if (docChanged) {
127
120
  this.emit("heads-changed", { handle: this, doc: newDoc });
128
121
  const patches = A.diff(newDoc, A.getHeads(oldDoc), A.getHeads(newDoc));
@@ -225,12 +218,6 @@ export class DocHandle//
225
218
  }
226
219
  return this.#doc;
227
220
  }
228
- /** `load` is called by the repo when the document is found in storage */
229
- load(binary) {
230
- if (binary.length && binary.length > 0) {
231
- this.#machine.send(LOAD, { payload: { binary } });
232
- }
233
- }
234
221
  /** `update` is called by the repo when we receive changes from the network */
235
222
  update(callback) {
236
223
  this.#machine.send(UPDATE, {
@@ -285,7 +272,6 @@ export const HandleState = {
285
272
  // events
286
273
  export const Event = {
287
274
  CREATE: "CREATE",
288
- LOAD: "LOAD",
289
275
  FIND: "FIND",
290
276
  REQUEST: "REQUEST",
291
277
  REQUEST_COMPLETE: "REQUEST_COMPLETE",
@@ -295,4 +281,4 @@ export const Event = {
295
281
  };
296
282
  // CONSTANTS
297
283
  export const { IDLE, LOADING, REQUESTING, READY, FAILED, DELETED } = HandleState;
298
- const { CREATE, LOAD, FIND, REQUEST, UPDATE, TIMEOUT, DELETE, REQUEST_COMPLETE, } = Event;
284
+ const { CREATE, FIND, REQUEST, UPDATE, TIMEOUT, DELETE, REQUEST_COMPLETE } = Event;
@@ -1 +1 @@
1
- {"version":3,"file":"Repo.d.ts","sourceRoot":"","sources":["../src/Repo.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAA;AAClD,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAA;AAClD,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAEhE,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAI/C,oFAAoF;AACpF,qBAAa,IAAK,SAAQ,aAAa;;IAGrC,gBAAgB,EAAE,gBAAgB,CAAA;IAClC,gBAAgB,CAAC,EAAE,gBAAgB,CAAA;IACnC,aAAa,EAAE,aAAa,CAAA;gBAEhB,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,WAAW,EAAE,EAAE,UAAU;CA8GlE;AAED,MAAM,WAAW,UAAU;IACzB,4BAA4B;IAC5B,MAAM,CAAC,EAAE,MAAM,CAAA;IAEf,gDAAgD;IAChD,OAAO,CAAC,EAAE,cAAc,CAAA;IAExB,oDAAoD;IACpD,OAAO,EAAE,cAAc,EAAE,CAAA;IAEzB;;;OAGG;IACH,WAAW,CAAC,EAAE,WAAW,CAAA;CAC1B;AAED,MAAM,MAAM,WAAW,GAAG,CACxB,MAAM,EAAE,MAAM,EACd,UAAU,CAAC,EAAE,UAAU,KACpB,OAAO,CAAC,OAAO,CAAC,CAAA"}
1
+ {"version":3,"file":"Repo.d.ts","sourceRoot":"","sources":["../src/Repo.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAA;AAClD,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAA;AAClD,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAEhE,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAI/C,oFAAoF;AACpF,qBAAa,IAAK,SAAQ,aAAa;;IAGrC,gBAAgB,EAAE,gBAAgB,CAAA;IAClC,gBAAgB,CAAC,EAAE,gBAAgB,CAAA;IACnC,aAAa,EAAE,aAAa,CAAA;gBAEhB,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,WAAW,EAAE,EAAE,UAAU;CAgHlE;AAED,MAAM,WAAW,UAAU;IACzB,4BAA4B;IAC5B,MAAM,CAAC,EAAE,MAAM,CAAA;IAEf,gDAAgD;IAChD,OAAO,CAAC,EAAE,cAAc,CAAA;IAExB,oDAAoD;IACpD,OAAO,EAAE,cAAc,EAAE,CAAA;IAEzB;;;OAGG;IACH,WAAW,CAAC,EAAE,WAAW,CAAA;CAC1B;AAED,MAAM,MAAM,WAAW,GAAG,CACxB,MAAM,EAAE,MAAM,EACd,UAAU,CAAC,EAAE,UAAU,KACpB,OAAO,CAAC,OAAO,CAAC,CAAA"}
package/dist/Repo.js CHANGED
@@ -21,11 +21,13 @@ export class Repo extends DocCollection {
21
21
  if (storageSubsystem) {
22
22
  // Save when the document changes
23
23
  handle.on("heads-changed", async ({ handle, doc }) => {
24
- await storageSubsystem.save(handle.documentId, doc);
24
+ await storageSubsystem.saveDoc(handle.documentId, doc);
25
25
  });
26
26
  // Try to load from disk
27
- const binary = await storageSubsystem.loadBinary(handle.documentId);
28
- handle.load(binary);
27
+ const loadedDoc = await storageSubsystem.loadDoc(handle.documentId);
28
+ if (loadedDoc) {
29
+ handle.update(() => loadedDoc);
30
+ }
29
31
  }
30
32
  handle.request();
31
33
  // Register the document with the synchronizer. This advertises our interest in the document.
@@ -1,3 +1,3 @@
1
- import * as A from "@automerge/automerge";
2
- export declare const headsAreSame: <T>(a: A.next.Doc<T>, b: A.next.Doc<T>) => boolean;
1
+ import { Heads } from "@automerge/automerge";
2
+ export declare const headsAreSame: (a: Heads, b: Heads) => boolean;
3
3
  //# sourceMappingURL=headsAreSame.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"headsAreSame.d.ts","sourceRoot":"","sources":["../../src/helpers/headsAreSame.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,sBAAsB,CAAA;AAGzC,eAAO,MAAM,YAAY,oDAIxB,CAAA"}
1
+ {"version":3,"file":"headsAreSame.d.ts","sourceRoot":"","sources":["../../src/helpers/headsAreSame.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,KAAK,EAAC,MAAM,sBAAsB,CAAA;AAG1C,eAAO,MAAM,YAAY,iCAExB,CAAA"}
@@ -1,7 +1,4 @@
1
- import * as A from "@automerge/automerge";
2
1
  import { arraysAreEqual } from "./arraysAreEqual.js";
3
2
  export const headsAreSame = (a, b) => {
4
- const aHeads = A.getHeads(a);
5
- const bHeads = A.getHeads(b);
6
- return arraysAreEqual(aHeads, bHeads);
3
+ return arraysAreEqual(a, b);
7
4
  };
@@ -5,8 +5,8 @@ export type ChunkType = "snapshot" | "incremental";
5
5
  export declare class StorageSubsystem {
6
6
  #private;
7
7
  constructor(storageAdapter: StorageAdapter);
8
- loadBinary(documentId: DocumentId): Promise<Uint8Array>;
9
- save(documentId: DocumentId, doc: A.Doc<unknown>): Promise<void>;
8
+ loadDoc(documentId: DocumentId): Promise<A.Doc<unknown> | null>;
9
+ saveDoc(documentId: DocumentId, doc: A.Doc<unknown>): Promise<void>;
10
10
  remove(documentId: DocumentId): Promise<void>;
11
11
  }
12
12
  //# sourceMappingURL=StorageSubsystem.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,sBAAsB,CAAA;AACzC,OAAO,EAAE,cAAc,EAAc,MAAM,qBAAqB,CAAA;AAEhE,OAAO,EAAE,KAAK,UAAU,EAAE,MAAM,aAAa,CAAA;AAW7C,MAAM,MAAM,SAAS,GAAG,UAAU,GAAG,aAAa,CAAA;AAelD,qBAAa,gBAAgB;;gBAIf,cAAc,EAAE,cAAc;IA6CpC,UAAU,CAAC,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;IAoBvD,IAAI,CAAC,UAAU,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAShE,MAAM,CAAC,UAAU,EAAE,UAAU;CAkBpC"}
1
+ {"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,sBAAsB,CAAA;AACzC,OAAO,EAAE,cAAc,EAAc,MAAM,qBAAqB,CAAA;AAEhE,OAAO,EAAE,KAAK,UAAU,EAAE,MAAM,aAAa,CAAA;AAa7C,MAAM,MAAM,SAAS,GAAG,UAAU,GAAG,aAAa,CAAA;AAelD,qBAAa,gBAAgB;;gBAMf,cAAc,EAAE,cAAc;IAqDpC,OAAO,CAAC,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,IAAI,CAAC;IA0B/D,OAAO,CAAC,UAAU,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAanE,MAAM,CAAC,UAAU,EAAE,UAAU;CAgCpC"}
@@ -1,6 +1,8 @@
1
1
  import * as A from "@automerge/automerge";
2
2
  import * as sha256 from "fast-sha256";
3
3
  import { mergeArrays } from "../helpers/mergeArrays.js";
4
+ import debug from "debug";
5
+ import { headsAreSame } from "../helpers/headsAreSame.js";
4
6
  function keyHash(binary) {
5
7
  const hash = sha256.hash(binary);
6
8
  const hashArray = Array.from(new Uint8Array(hash)); // convert buffer to byte array
@@ -15,13 +17,16 @@ function headsHash(heads) {
15
17
  export class StorageSubsystem {
16
18
  #storageAdapter;
17
19
  #chunkInfos = new Map();
20
+ #storedHeads = new Map();
21
+ #log = debug(`automerge-repo:storage-subsystem`);
18
22
  constructor(storageAdapter) {
19
23
  this.#storageAdapter = storageAdapter;
20
24
  }
21
25
  async #saveIncremental(documentId, doc) {
22
- const binary = A.saveIncremental(doc);
26
+ const binary = A.saveSince(doc, this.#storedHeads.get(documentId) ?? []);
23
27
  if (binary && binary.length > 0) {
24
28
  const key = [documentId, "incremental", keyHash(binary)];
29
+ this.#log(`Saving incremental ${key} for document ${documentId}`);
25
30
  await this.#storageAdapter.save(key, binary);
26
31
  if (!this.#chunkInfos.has(documentId)) {
27
32
  this.#chunkInfos.set(documentId, []);
@@ -31,6 +36,7 @@ export class StorageSubsystem {
31
36
  type: "incremental",
32
37
  size: binary.length,
33
38
  });
39
+ this.#storedHeads.set(documentId, A.getHeads(doc));
34
40
  }
35
41
  else {
36
42
  return Promise.resolve();
@@ -38,8 +44,11 @@ export class StorageSubsystem {
38
44
  }
39
45
  async #saveTotal(documentId, doc, sourceChunks) {
40
46
  const binary = A.save(doc);
41
- const key = [documentId, "snapshot", headsHash(A.getHeads(doc))];
42
- const oldKeys = new Set(sourceChunks.map(c => c.key));
47
+ const snapshotHash = headsHash(A.getHeads(doc));
48
+ const key = [documentId, "snapshot", snapshotHash];
49
+ const oldKeys = new Set(sourceChunks.map(c => c.key).filter(k => k[2] !== snapshotHash));
50
+ this.#log(`Saving snapshot ${key} for document ${documentId}`);
51
+ this.#log(`deleting old chunks ${Array.from(oldKeys)}`);
43
52
  await this.#storageAdapter.save(key, binary);
44
53
  for (const key of oldKeys) {
45
54
  await this.#storageAdapter.remove(key);
@@ -48,7 +57,7 @@ export class StorageSubsystem {
48
57
  newChunkInfos.push({ key, type: "snapshot", size: binary.length });
49
58
  this.#chunkInfos.set(documentId, newChunkInfos);
50
59
  }
51
- async loadBinary(documentId) {
60
+ async loadDoc(documentId) {
52
61
  const loaded = await this.#storageAdapter.loadRange([documentId]);
53
62
  const binaries = [];
54
63
  const chunkInfos = [];
@@ -65,9 +74,18 @@ export class StorageSubsystem {
65
74
  binaries.push(chunk.data);
66
75
  }
67
76
  this.#chunkInfos.set(documentId, chunkInfos);
68
- return mergeArrays(binaries);
77
+ const binary = mergeArrays(binaries);
78
+ if (binary.length === 0) {
79
+ return null;
80
+ }
81
+ const newDoc = A.loadIncremental(A.init(), binary);
82
+ this.#storedHeads.set(documentId, A.getHeads(newDoc));
83
+ return newDoc;
69
84
  }
70
- async save(documentId, doc) {
85
+ async saveDoc(documentId, doc) {
86
+ if (!this.#shouldSave(documentId, doc)) {
87
+ return;
88
+ }
71
89
  let sourceChunks = this.#chunkInfos.get(documentId) ?? [];
72
90
  if (this.#shouldCompact(sourceChunks)) {
73
91
  this.#saveTotal(documentId, doc, sourceChunks);
@@ -75,11 +93,23 @@ export class StorageSubsystem {
75
93
  else {
76
94
  this.#saveIncremental(documentId, doc);
77
95
  }
96
+ this.#storedHeads.set(documentId, A.getHeads(doc));
78
97
  }
79
98
  async remove(documentId) {
80
99
  this.#storageAdapter.remove([documentId, "snapshot"]);
81
100
  this.#storageAdapter.removeRange([documentId, "incremental"]);
82
101
  }
102
+ #shouldSave(documentId, doc) {
103
+ const oldHeads = this.#storedHeads.get(documentId);
104
+ if (!oldHeads) {
105
+ return true;
106
+ }
107
+ const newHeads = A.getHeads(doc);
108
+ if (headsAreSame(newHeads, oldHeads)) {
109
+ return false;
110
+ }
111
+ return true;
112
+ }
83
113
  #shouldCompact(sourceChunks) {
84
114
  // compact if the incremental size is greater than the snapshot size
85
115
  let snapshotSize = 0;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@automerge/automerge-repo",
3
- "version": "1.0.0-alpha.0",
3
+ "version": "1.0.0-alpha.2",
4
4
  "description": "A repository object to manage a collection of automerge documents",
5
5
  "repository": "https://github.com/automerge/automerge-repo",
6
6
  "author": "Peter van Hardenberg <pvh@pvh.ca>",
@@ -31,7 +31,7 @@
31
31
  "typescript": "^5.1.6"
32
32
  },
33
33
  "peerDependencies": {
34
- "@automerge/automerge": "^2.1.0-alpha.9"
34
+ "@automerge/automerge": "^2.1.0-alpha.10"
35
35
  },
36
36
  "dependencies": {
37
37
  "bs58check": "^3.0.1",
@@ -65,5 +65,5 @@
65
65
  "publishConfig": {
66
66
  "access": "public"
67
67
  },
68
- "gitHead": "38c0c32796ddca5f86a2e55ab0f1202a2ce107c8"
68
+ "gitHead": "b5830dde8f135b694809698aaad2a9fdc79a9898"
69
69
  }
package/src/DocHandle.ts CHANGED
@@ -17,14 +17,8 @@ import { waitFor } from "xstate/lib/waitFor.js"
17
17
  import { headsAreSame } from "./helpers/headsAreSame.js"
18
18
  import { pause } from "./helpers/pause.js"
19
19
  import { TimeoutError, withTimeout } from "./helpers/withTimeout.js"
20
- import type {
21
- BinaryDocumentId,
22
- ChannelId,
23
- DocumentId,
24
- PeerId,
25
- AutomergeUrl,
26
- } from "./types.js"
27
- import { binaryToDocumentId, stringifyAutomergeUrl } from "./DocUrl.js"
20
+ import type { ChannelId, DocumentId, PeerId, AutomergeUrl } from "./types.js"
21
+ import { stringifyAutomergeUrl } from "./DocUrl.js"
28
22
 
29
23
  /** DocHandle is a wrapper around a single Automerge document that lets us listen for changes. */
30
24
  export class DocHandle<T> //
@@ -83,8 +77,8 @@ export class DocHandle<T> //
83
77
  },
84
78
  loading: {
85
79
  on: {
86
- // LOAD is called by the Repo if the document is found in storage
87
- LOAD: { actions: "onLoad", target: READY },
80
+ // UPDATE is called by the Repo if the document is found in storage
81
+ UPDATE: { actions: "onUpdate", target: READY },
88
82
  // REQUEST is called by the Repo if the document is not found in storage
89
83
  REQUEST: { target: REQUESTING },
90
84
  DELETE: { actions: "onDelete", target: DELETED },
@@ -129,15 +123,7 @@ export class DocHandle<T> //
129
123
 
130
124
  {
131
125
  actions: {
132
- /** Apply the binary changes from storage and put the updated doc on context */
133
- onLoad: assign((context, { payload }: LoadEvent) => {
134
- const { binary } = payload
135
- const { doc } = context
136
- const newDoc = A.loadIncremental(doc, binary)
137
- return { doc: newDoc }
138
- }),
139
-
140
- /** Put the updated doc on context; if it's different, emit a `change` event */
126
+ /** Put the updated doc on context */
141
127
  onUpdate: assign((context, { payload }: UpdateEvent<T>) => {
142
128
  const { doc: oldDoc } = context
143
129
 
@@ -158,9 +144,9 @@ export class DocHandle<T> //
158
144
  const oldDoc = history?.context?.doc
159
145
  const newDoc = context.doc
160
146
 
161
- this.#log(`${event} → ${state}`, newDoc)
147
+ console.log(`${event} → ${state}`, newDoc)
162
148
 
163
- const docChanged = newDoc && oldDoc && !headsAreSame(newDoc, oldDoc)
149
+ const docChanged = newDoc && oldDoc && !headsAreSame(A.getHeads(newDoc), A.getHeads(oldDoc))
164
150
  if (docChanged) {
165
151
  this.emit("heads-changed", { handle: this, doc: newDoc })
166
152
 
@@ -280,13 +266,6 @@ export class DocHandle<T> //
280
266
  return this.#doc
281
267
  }
282
268
 
283
- /** `load` is called by the repo when the document is found in storage */
284
- load(binary: Uint8Array) {
285
- if (binary.length && binary.length > 0) {
286
- this.#machine.send(LOAD, { payload: { binary } })
287
- }
288
- }
289
-
290
269
  /** `update` is called by the repo when we receive changes from the network */
291
270
  update(callback: (doc: A.Doc<T>) => A.Doc<T>) {
292
271
  this.#machine.send(UPDATE, {
@@ -407,7 +386,6 @@ interface DocHandleContext<T> {
407
386
 
408
387
  export const Event = {
409
388
  CREATE: "CREATE",
410
- LOAD: "LOAD",
411
389
  FIND: "FIND",
412
390
  REQUEST: "REQUEST",
413
391
  REQUEST_COMPLETE: "REQUEST_COMPLETE",
@@ -418,7 +396,6 @@ export const Event = {
418
396
  type Event = (typeof Event)[keyof typeof Event]
419
397
 
420
398
  type CreateEvent = { type: typeof CREATE; payload: { documentId: string } }
421
- type LoadEvent = { type: typeof LOAD; payload: { binary: Uint8Array } }
422
399
  type FindEvent = { type: typeof FIND; payload: { documentId: string } }
423
400
  type RequestEvent = { type: typeof REQUEST }
424
401
  type RequestCompleteEvent = { type: typeof REQUEST_COMPLETE }
@@ -431,7 +408,6 @@ type TimeoutEvent = { type: typeof TIMEOUT }
431
408
 
432
409
  type DocHandleEvent<T> =
433
410
  | CreateEvent
434
- | LoadEvent
435
411
  | FindEvent
436
412
  | RequestEvent
437
413
  | RequestCompleteEvent
@@ -458,13 +434,5 @@ type DocHandleXstateMachine<T> = Interpreter<
458
434
  // CONSTANTS
459
435
 
460
436
  export const { IDLE, LOADING, REQUESTING, READY, FAILED, DELETED } = HandleState
461
- const {
462
- CREATE,
463
- LOAD,
464
- FIND,
465
- REQUEST,
466
- UPDATE,
467
- TIMEOUT,
468
- DELETE,
469
- REQUEST_COMPLETE,
470
- } = Event
437
+ const { CREATE, FIND, REQUEST, UPDATE, TIMEOUT, DELETE, REQUEST_COMPLETE } =
438
+ Event
package/src/Repo.ts CHANGED
@@ -30,12 +30,14 @@ export class Repo extends DocCollection {
30
30
  if (storageSubsystem) {
31
31
  // Save when the document changes
32
32
  handle.on("heads-changed", async ({ handle, doc }) => {
33
- await storageSubsystem.save(handle.documentId, doc)
33
+ await storageSubsystem.saveDoc(handle.documentId, doc)
34
34
  })
35
35
 
36
36
  // Try to load from disk
37
- const binary = await storageSubsystem.loadBinary(handle.documentId)
38
- handle.load(binary)
37
+ const loadedDoc = await storageSubsystem.loadDoc(handle.documentId)
38
+ if (loadedDoc) {
39
+ handle.update(() => loadedDoc)
40
+ }
39
41
  }
40
42
 
41
43
  handle.request()
@@ -1,8 +1,6 @@
1
- import * as A from "@automerge/automerge"
1
+ import {Heads} from "@automerge/automerge"
2
2
  import { arraysAreEqual } from "./arraysAreEqual.js"
3
3
 
4
- export const headsAreSame = <T>(a: A.Doc<T>, b: A.Doc<T>) => {
5
- const aHeads = A.getHeads(a)
6
- const bHeads = A.getHeads(b)
7
- return arraysAreEqual(aHeads, bHeads)
4
+ export const headsAreSame = (a: Heads, b: Heads) => {
5
+ return arraysAreEqual(a, b)
8
6
  }
@@ -3,6 +3,8 @@ import { StorageAdapter, StorageKey } from "./StorageAdapter.js"
3
3
  import * as sha256 from "fast-sha256"
4
4
  import { type DocumentId } from "../types.js"
5
5
  import { mergeArrays } from "../helpers/mergeArrays.js"
6
+ import debug from "debug"
7
+ import { headsAreSame } from "../helpers/headsAreSame.js"
6
8
 
7
9
  // Metadata about a chunk of data loaded from storage. This is stored on the
8
10
  // StorageSubsystem so when we are compacting we know what chunks we can safely delete
@@ -30,6 +32,8 @@ function headsHash(heads: A.Heads): string {
30
32
  export class StorageSubsystem {
31
33
  #storageAdapter: StorageAdapter
32
34
  #chunkInfos: Map<DocumentId, StorageChunkInfo[]> = new Map()
35
+ #storedHeads: Map<DocumentId, A.Heads> = new Map()
36
+ #log = debug(`automerge-repo:storage-subsystem`)
33
37
 
34
38
  constructor(storageAdapter: StorageAdapter) {
35
39
  this.#storageAdapter = storageAdapter
@@ -39,9 +43,10 @@ export class StorageSubsystem {
39
43
  documentId: DocumentId,
40
44
  doc: A.Doc<unknown>
41
45
  ): Promise<void> {
42
- const binary = A.saveIncremental(doc)
46
+ const binary = A.saveSince(doc, this.#storedHeads.get(documentId) ?? [])
43
47
  if (binary && binary.length > 0) {
44
48
  const key = [documentId, "incremental", keyHash(binary)]
49
+ this.#log(`Saving incremental ${key} for document ${documentId}`)
45
50
  await this.#storageAdapter.save(key, binary)
46
51
  if (!this.#chunkInfos.has(documentId)) {
47
52
  this.#chunkInfos.set(documentId, [])
@@ -51,6 +56,7 @@ export class StorageSubsystem {
51
56
  type: "incremental",
52
57
  size: binary.length,
53
58
  })
59
+ this.#storedHeads.set(documentId, A.getHeads(doc))
54
60
  } else {
55
61
  return Promise.resolve()
56
62
  }
@@ -62,8 +68,14 @@ export class StorageSubsystem {
62
68
  sourceChunks: StorageChunkInfo[]
63
69
  ): Promise<void> {
64
70
  const binary = A.save(doc)
65
- const key = [documentId, "snapshot", headsHash(A.getHeads(doc))]
66
- const oldKeys = new Set(sourceChunks.map(c => c.key))
71
+ const snapshotHash = headsHash(A.getHeads(doc))
72
+ const key = [documentId, "snapshot", snapshotHash]
73
+ const oldKeys = new Set(
74
+ sourceChunks.map(c => c.key).filter(k => k[2] !== snapshotHash)
75
+ )
76
+
77
+ this.#log(`Saving snapshot ${key} for document ${documentId}`)
78
+ this.#log(`deleting old chunks ${Array.from(oldKeys)}`)
67
79
 
68
80
  await this.#storageAdapter.save(key, binary)
69
81
 
@@ -76,7 +88,7 @@ export class StorageSubsystem {
76
88
  this.#chunkInfos.set(documentId, newChunkInfos)
77
89
  }
78
90
 
79
- async loadBinary(documentId: DocumentId): Promise<Uint8Array> {
91
+ async loadDoc(documentId: DocumentId): Promise<A.Doc<unknown> | null> {
80
92
  const loaded = await this.#storageAdapter.loadRange([documentId])
81
93
  const binaries = []
82
94
  const chunkInfos: StorageChunkInfo[] = []
@@ -93,16 +105,26 @@ export class StorageSubsystem {
93
105
  binaries.push(chunk.data)
94
106
  }
95
107
  this.#chunkInfos.set(documentId, chunkInfos)
96
- return mergeArrays(binaries)
108
+ const binary = mergeArrays(binaries)
109
+ if (binary.length === 0) {
110
+ return null
111
+ }
112
+ const newDoc = A.loadIncremental(A.init(), binary)
113
+ this.#storedHeads.set(documentId, A.getHeads(newDoc))
114
+ return newDoc
97
115
  }
98
116
 
99
- async save(documentId: DocumentId, doc: A.Doc<unknown>): Promise<void> {
117
+ async saveDoc(documentId: DocumentId, doc: A.Doc<unknown>): Promise<void> {
118
+ if (!this.#shouldSave(documentId, doc)) {
119
+ return
120
+ }
100
121
  let sourceChunks = this.#chunkInfos.get(documentId) ?? []
101
122
  if (this.#shouldCompact(sourceChunks)) {
102
123
  this.#saveTotal(documentId, doc, sourceChunks)
103
124
  } else {
104
125
  this.#saveIncremental(documentId, doc)
105
126
  }
127
+ this.#storedHeads.set(documentId, A.getHeads(doc))
106
128
  }
107
129
 
108
130
  async remove(documentId: DocumentId) {
@@ -110,6 +132,20 @@ export class StorageSubsystem {
110
132
  this.#storageAdapter.removeRange([documentId, "incremental"])
111
133
  }
112
134
 
135
+ #shouldSave(documentId: DocumentId, doc: A.Doc<unknown>): boolean {
136
+ const oldHeads = this.#storedHeads.get(documentId)
137
+ if (!oldHeads) {
138
+ return true
139
+ }
140
+
141
+ const newHeads = A.getHeads(doc)
142
+ if (headsAreSame(newHeads, oldHeads)) {
143
+ return false
144
+ }
145
+
146
+ return true
147
+ }
148
+
113
149
  #shouldCompact(sourceChunks: StorageChunkInfo[]) {
114
150
  // compact if the incremental size is greater than the snapshot size
115
151
  let snapshotSize = 0
@@ -1,7 +1,7 @@
1
1
  import * as A from "@automerge/automerge"
2
2
  import assert from "assert"
3
3
  import { it } from "mocha"
4
- import { DocHandle, DocHandleChangePayload, BinaryDocumentId } from "../src"
4
+ import { DocHandle, DocHandleChangePayload } from "../src"
5
5
  import { pause } from "../src/helpers/pause"
6
6
  import { TestDoc } from "./types.js"
7
7
  import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl"
@@ -10,10 +10,8 @@ describe("DocHandle", () => {
10
10
  const TEST_ID = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
11
11
  const BOGUS_ID = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
12
12
 
13
- const binaryFromMockStorage = () => {
14
- const doc = A.change<{ foo: string }>(A.init(), d => (d.foo = "bar"))
15
- const binary = A.save(doc)
16
- return binary
13
+ const docFromMockStorage = (doc: A.Doc<{ foo: string }>) => {
14
+ return A.change<{ foo: string }>(doc, d => (d.foo = "bar"))
17
15
  }
18
16
 
19
17
  it("should take the UUID passed into it", () => {
@@ -26,10 +24,11 @@ describe("DocHandle", () => {
26
24
  assert.equal(handle.isReady(), false)
27
25
 
28
26
  // simulate loading from storage
29
- handle.load(binaryFromMockStorage())
27
+ handle.update(doc => docFromMockStorage(doc))
30
28
 
31
29
  assert.equal(handle.isReady(), true)
32
30
  const doc = await handle.doc()
31
+ console.log("DOC", JSON.stringify(doc))
33
32
  assert.equal(doc.foo, "bar")
34
33
  })
35
34
 
@@ -38,7 +37,7 @@ describe("DocHandle", () => {
38
37
  assert.equal(handle.isReady(), false)
39
38
 
40
39
  // simulate loading from storage
41
- handle.load(binaryFromMockStorage())
40
+ handle.update(doc => docFromMockStorage(doc))
42
41
 
43
42
  assert.equal(handle.isReady(), true)
44
43
  const doc = await handle.doc()
@@ -56,7 +55,7 @@ describe("DocHandle", () => {
56
55
  assert.equal(handle.isReady(), false)
57
56
 
58
57
  // simulate loading from storage
59
- handle.load(binaryFromMockStorage())
58
+ handle.update(doc => docFromMockStorage(doc))
60
59
 
61
60
  const doc = await handle.doc()
62
61
 
@@ -72,7 +71,7 @@ describe("DocHandle", () => {
72
71
  assert.throws(() => handle.change(d => (d.foo = "baz")))
73
72
 
74
73
  // simulate loading from storage
75
- handle.load(binaryFromMockStorage())
74
+ handle.update(doc => docFromMockStorage(doc))
76
75
 
77
76
  // now we're in READY state so we can make changes
78
77
  assert.equal(handle.isReady(), true)
@@ -229,7 +228,7 @@ describe("DocHandle", () => {
229
228
  const handle = new DocHandle<TestDoc>(TEST_ID, { timeoutDelay: 5 })
230
229
 
231
230
  // simulate loading from storage before the timeout expires
232
- handle.load(binaryFromMockStorage())
231
+ handle.update(doc => docFromMockStorage(doc))
233
232
 
234
233
  // now it should not time out
235
234
  const doc = await handle.doc()
package/test/Repo.test.ts CHANGED
@@ -55,6 +55,7 @@ describe("Repo", () => {
55
55
  d.foo = "bar"
56
56
  })
57
57
  const v = await handle.doc()
58
+ console.log("V is ", v)
58
59
  assert.equal(handle.isReady(), true)
59
60
 
60
61
  assert.equal(v.foo, "bar")
@@ -180,6 +181,64 @@ describe("Repo", () => {
180
181
 
181
182
  repo.delete(handle.documentId)
182
183
  })
184
+
185
+ it("storage state doesn't change across reloads when the document hasn't changed", async () => {
186
+ const storage = new DummyStorageAdapter()
187
+
188
+ const repo = new Repo({
189
+ storage,
190
+ network: [],
191
+ })
192
+
193
+ const handle = repo.create<{ count: number }>()
194
+
195
+ handle.change(d => {
196
+ d.count = 0
197
+ })
198
+ handle.change(d => {
199
+ d.count = 1
200
+ })
201
+
202
+ const initialKeys = storage.keys()
203
+
204
+ const repo2 = new Repo({
205
+ storage,
206
+ network: [],
207
+ })
208
+ const handle2 = repo2.find(handle.url)
209
+ await handle2.doc()
210
+
211
+ assert.deepEqual(storage.keys(), initialKeys)
212
+ })
213
+
214
+ it("doesn't delete a document from storage when we refresh", async () => {
215
+ const storage = new DummyStorageAdapter()
216
+
217
+ const repo = new Repo({
218
+ storage,
219
+ network: [],
220
+ })
221
+
222
+ const handle = repo.create<{ count: number }>()
223
+
224
+ handle.change(d => {
225
+ d.count = 0
226
+ })
227
+ handle.change(d => {
228
+ d.count = 1
229
+ })
230
+
231
+ for (let i = 0; i < 3; i++) {
232
+ const repo2 = new Repo({
233
+ storage,
234
+ network: [],
235
+ })
236
+ const handle2 = repo2.find(handle.url)
237
+ await handle2.doc()
238
+
239
+ assert(storage.keys().length !== 0)
240
+ }
241
+ })
183
242
  })
184
243
 
185
244
  describe("sync", async () => {
@@ -412,5 +471,4 @@ describe("Repo", () => {
412
471
  teardown()
413
472
  })
414
473
  })
415
-
416
474
  })
@@ -32,11 +32,10 @@ describe("StorageSubsystem", () => {
32
32
 
33
33
  // save it to storage
34
34
  const key = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
35
- await storage.save(key, doc)
35
+ await storage.saveDoc(key, doc)
36
36
 
37
37
  // reload it from storage
38
- const reloadedDocBinary = await storage.loadBinary(key)
39
- const reloadedDoc = A.load<TestDoc>(reloadedDocBinary)
38
+ const reloadedDoc = await storage.loadDoc(key)
40
39
 
41
40
  // check that it's the same doc
42
41
  assert.deepStrictEqual(reloadedDoc, doc)
@@ -54,14 +53,15 @@ describe("StorageSubsystem", () => {
54
53
 
55
54
  // save it to storage
56
55
  const key = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
57
- storage.save(key, doc)
56
+ storage.saveDoc(key, doc)
58
57
 
59
58
  // create new storage subsystem to simulate a new process
60
59
  const storage2 = new StorageSubsystem(adapter)
61
60
 
62
61
  // reload it from storage
63
- const reloadedDocBinary = await storage2.loadBinary(key)
64
- const reloadedDoc = A.load<TestDoc>(reloadedDocBinary)
62
+ const reloadedDoc = await storage2.loadDoc(key)
63
+
64
+ assert(reloadedDoc, "doc should be loaded")
65
65
 
66
66
  // make a change
67
67
  const changedDoc = A.change<any>(reloadedDoc, "test 2", d => {
@@ -69,7 +69,7 @@ describe("StorageSubsystem", () => {
69
69
  })
70
70
 
71
71
  // save it to storage
72
- storage2.save(key, changedDoc)
72
+ storage2.saveDoc(key, changedDoc)
73
73
 
74
74
  // check that the storage adapter contains the correct keys
75
75
  assert(adapter.keys().some(k => k.startsWith(`${key}.incremental.`)))