@automerge/automerge-repo 2.0.0-alpha.0 → 2.0.0-alpha.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -86,6 +86,57 @@ export declare class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
86
86
  * @returns the current document's heads, or undefined if the document is not ready
87
87
  */
88
88
  heads(): A.Heads | undefined;
89
+ /**
90
+ * Creates a fixed "view" of an automerge document at the given point in time represented
91
+ * by the `heads` passed in. The return value is the same type as docSync() and will return
92
+ * undefined if the object hasn't finished loading.
93
+ *
94
+ * @remarks
95
+ * A point-in-time in an automerge document is an *array* of heads since there may be
96
+ * concurrent edits. This API just returns a topologically sorted history of all edits
97
+ * so every previous entry will be (in some sense) before later ones, but the set of all possible
98
+ * history views would be quite large under concurrency (every thing in each branch against each other).
99
+ * There might be a clever way to think about this, but we haven't found it yet, so for now at least
100
+ * we present a single traversable view which excludes concurrency.
101
+ * @returns The individual heads for every change in the document.
102
+ */
103
+ history(): A.Heads[] | undefined;
104
+ /**
105
+ * Creates a fixed "view" of an automerge document at the given point in time represented
106
+ * by the `heads` passed in. The return value is the same type as docSync() and will return
107
+ * undefined if the object hasn't finished loading.
108
+ *
109
+ * @remarks
110
+ * Note that our Typescript types do not consider change over time and the current version
111
+ * of Automerge doesn't check types at runtime, so if you go back to an old set of heads
112
+ * that doesn't match the heads here, Typescript will not save you.
113
+ *
114
+ * @returns An Automerge.Doc<T> at the point in time.
115
+ */
116
+ view(heads: A.Heads): A.Doc<T> | undefined;
117
+ /**
118
+ * Returns a set of Patch operations that will move a materialized document from one state to another
119
+ * if applied.
120
+ *
121
+ * @remarks
122
+ * We allow specifying both a from/to heads or just a single comparison point, in which case
123
+ * the base will be the current document heads.
124
+ *
125
+ * @returns Automerge patches that go from one document state to the other. Use view() to get the full state.
126
+ */
127
+ diff(first: A.Heads, second?: A.Heads): A.Patch[] | undefined;
128
+ /**
129
+ * `metadata(head?)` allows you to look at the metadata for a change
130
+ * this can be used to build history graphs to find commit messages and edit times.
131
+ * this interface.
132
+ *
133
+ * @remarks
134
+ * I'm really not convinced this is the right way to surface this information so
135
+ * I'm leaving this API "hidden".
136
+ *
137
+ * @hidden
138
+ */
139
+ metadata(change?: string): A.DecodedChange | undefined;
89
140
  /**
90
141
  * `update` is called any time we have a new document state; could be
91
142
  * from a local change, a remote change, or a new document from storage.
@@ -158,6 +209,10 @@ export declare class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
158
209
  * must have a unique PeerId.
159
210
  */
160
211
  broadcast(message: unknown): void;
212
+ metrics(): {
213
+ numOps: number;
214
+ numChanges: number;
215
+ };
161
216
  }
162
217
  /** @hidden */
163
218
  export type DocHandleOptions<T> = {
@@ -1 +1 @@
1
- {"version":3,"file":"DocHandle.d.ts","sourceRoot":"","sources":["../src/DocHandle.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAEnD,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAM5C,OAAO,KAAK,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAClE,OAAO,EAAE,SAAS,EAAE,MAAM,oBAAoB,CAAA;AAE9C;;;;;;;;;;;;GAYG;AACH,qBAAa,SAAS,CAAC,CAAC,CAAE,SAAQ,YAAY,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;;IAkBvD,UAAU,EAAE,UAAU;IAF/B,cAAc;gBAEL,UAAU,EAAE,UAAU,EAC7B,OAAO,GAAE,gBAAgB,CAAC,CAAC,CAAM;IAoJnC;OACG;IACH,IAAI,GAAG,IAAI,YAAY,CAEtB;IAED;;;;;OAKG;IACH,OAAO,gBAAgC;IAEvC;;;;;OAKG;IACH,SAAS,gBAAkC;IAE3C;;;;OAIG;IACH,aAAa,gBAAsC;IAEnD;;OAEG;IACH,OAAO,WAAY,WAAW,EAAE,aAC0B;IAE1D,cAAc;IACd,IAAI,KAAK,4EAER;IAED;;;;;;OAMG;IACG,SAAS,CAAC,WAAW,GAAE,WAAW,EAAc;IAItD;;;;;OAKG;IACG,GAAG;IACP,sEAAsE;IACtE,WAAW,GAAE,WAAW,EAA6B;IAavD;;;;;;;;;;;;OAYG;IACH,OAAO;IAKP;;;;OAIG;IACH,KAAK,IAAI,CAAC,CAAC,KAAK,GAAG,SAAS;IAO5B;;;;;OAKG;IACH,MAAM,CAAC,QAAQ,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;IAI5C;;;;OAIG;IACH,WAAW;IAIX;;;OAGG;IACH,cAAc,CAAC,SAAS,EAAE,SAAS,EAAE,KAAK,EAAE,CAAC,CAAC,KAAK;IAKnD,0CAA0C;IAC1C,cAAc,CAAC,SAAS,EAAE,SAAS,GAAG,CAAC,CAAC,KAAK,GAAG,SAAS;IAIzD;;;;;;;;;;;;;;OAcG;IACH,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,OAAO,GAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAM;IAWhE;;;;OAIG;IACH,QAAQ,CACN,KAAK,EAAE,CAAC,CAAC,KAAK,EACd,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EACvB,OAAO,GAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAM,GAC/B,MAAM,EAAE,GAAG,SAAS;IAsBvB;;;;;;;OAOG;IACH,KAAK;IACH,wDAAwD;IACxD,WAAW,EAAE,SAAS,CAAC,CAAC,CAAC;IAe3B;;;OAGG;IACH,WAAW;IAIX;;SAEK;IACL,OAAO;IAIP,uDAAuD;IACvD,MAAM;IAIN;;;;;;OAMG;IACH,SAAS,CAAC,OAAO,EAAE,OAAO;CAM3B;AAID,cAAc;AACd,MAAM,MAAM,gBAAgB,CAAC,CAAC,IAE1B;IACE,gGAAgG;IAChG,KAAK,EAAE,IAAI,CAAA;IAEX,yCAAyC;IACzC,YAAY,CAAC,EAAE,CAAC,CAAA;CACjB,GAED;IACE,KAAK,CAAC,EAAE,KAAK,CAAA;IAEb,+HAA+H;IAC/H,YAAY,CAAC,EAAE,MAAM,CAAA;CACtB,CAAA;AAIL,2EAA2E;AAC3E,MAAM,WAAW,eAAe,CAAC,CAAC;IAChC,eAAe,EAAE,CAAC,OAAO,EAAE,6BAA6B,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpE,MAAM,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpD,MAAM,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpD,WAAW,EAAE,CAAC,OAAO,EAAE,2BAA2B,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IAC9D,mBAAmB,EAAE,CAAC,OAAO,EAAE,gCAAgC,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IAC3E,4BAA4B,EAAE,CAC5B,OAAO,EAAE,wCAAwC,CAAC,CAAC,CAAC,KACjD,IAAI,CAAA;IACT,cAAc,EAAE,CAAC,OAAO,EAAE,2BAA2B,KAAK,IAAI,CAAA;CAC/D;AAED,sDAAsD;AACtD,MAAM,WAAW,6BAA6B,CAAC,CAAC;IAC9C,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;CACd;AAED,6CAA6C;AAC7C,MAAM,WAAW,sBAAsB,CAAC,CAAC;IACvC,8BAA8B;IAC9B,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,iDAAiD;IACjD,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACb,wDAAwD;IACxD,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAA;IAClB,mCAAmC;IACnC,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;CAC1B;AAED,4CAA4C;AAC5C,MAAM,WAAW,sBAAsB,CAAC,CAAC;IACvC,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;CACrB;AAED,6DAA6D;AAC7D,MAAM,WAAW,2BAA2B,CAAC,CAAC;IAC5C,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;CACrB;AAED,qEAAqE;AACrE,MAAM,WAAW,gCAAgC,CAAC,CAAC;IACjD,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;CACjB;AAED,kEAAkE;AAClE,MAAM,WAAW,wCAAwC,CAAC,CAAC;IACzD,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,IAAI,EAAE,UAAU,CAAA;CACjB;AAED,8DAA8D;AAC9D,MAAM,WAAW,2BAA2B;IAC1C,SAAS,EAAE,SAAS,CAAA;IACpB,KAAK,EAAE,CAAC,CAAC,KAAK,CAAA;CACf;AAMD;;GAEG;AACH,eAAO,MAAM,WAAW;IACtB,kEAAkE;;IAElE,mDAAmD;;IAEnD,6EAA6E;;IAE7E,gCAAgC;;IAEhC,kDAAkD;;IAElD,4EAA4E;;CAEpE,CAAA;AACV,MAAM,MAAM,WAAW,GAAG,CAAC,OAAO,WAAW,CAAC,CAAC,MAAM,OAAO,WAAW,CAAC,CAAA;AAExE,eAAO,MAAQ,IAAI,UAAE,OAAO,aAAE,UAAU,gBAAE,KAAK,WAAE,OAAO,aAAE,WAAW,eACxD,CAAA"}
1
+ {"version":3,"file":"DocHandle.d.ts","sourceRoot":"","sources":["../src/DocHandle.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAEnD,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAM5C,OAAO,KAAK,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAClE,OAAO,EAAE,SAAS,EAAE,MAAM,oBAAoB,CAAA;AAE9C;;;;;;;;;;;;GAYG;AACH,qBAAa,SAAS,CAAC,CAAC,CAAE,SAAQ,YAAY,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;;IAkBvD,UAAU,EAAE,UAAU;IAF/B,cAAc;gBAEL,UAAU,EAAE,UAAU,EAC7B,OAAO,GAAE,gBAAgB,CAAC,CAAC,CAAM;IAoJnC;OACG;IACH,IAAI,GAAG,IAAI,YAAY,CAEtB;IAED;;;;;OAKG;IACH,OAAO,gBAAgC;IAEvC;;;;;OAKG;IACH,SAAS,gBAAkC;IAE3C;;;;OAIG;IACH,aAAa,gBAAsC;IAEnD;;OAEG;IACH,OAAO,WAAY,WAAW,EAAE,aAC0B;IAE1D,cAAc;IACd,IAAI,KAAK,4EAER;IAED;;;;;;OAMG;IACG,SAAS,CAAC,WAAW,GAAE,WAAW,EAAc;IAItD;;;;;OAKG;IACG,GAAG;IACP,sEAAsE;IACtE,WAAW,GAAE,WAAW,EAA6B;IAavD;;;;;;;;;;;;OAYG;IACH,OAAO;IAKP;;;;OAIG;IACH,KAAK,IAAI,CAAC,CAAC,KAAK,GAAG,SAAS;IAO5B;;;;;;;;;;;;;OAaG;IACH,OAAO,IAAI,CAAC,CAAC,KAAK,EAAE,GAAG,SAAS;IAShC;;;;;;;;;;;OAWG;IACH,IAAI,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS;IAO1C;;;;;;;;;OASG;IACH,IAAI,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,EAAE,MAAM,CAAC,EAAE,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,KAAK,EAAE,GAAG,SAAS;IAU7D;;;;;;;;;;OAUG;IACH,QAAQ,CAAC,MAAM,CAAC,EAAE,MAAM,GAAG,CAAC,CAAC,aAAa,GAAG,SAAS;IAWtD;;;;;OAKG;IACH,MAAM,CAAC,QAAQ,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;IAI5C;;;;OAIG;IACH,WAAW;IAIX;;;OAGG;IACH,cAAc,CAAC,SAAS,EAAE,SAAS,EAAE,KAAK,EAAE,CAAC,CAAC,KAAK;IAKnD,0CAA0C;IAC1C,cAAc,CAAC,SAAS,EAAE,SAAS,GAAG,CAAC,CAAC,KAAK,GAAG,SAAS;IAIzD;;;;;;;;;;;;;;OAcG;IACH,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,OAAO,GAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAM;IAWhE;;;;OAIG;IACH,QAAQ,CACN,KAAK,EAAE,CAAC,CAAC,KAAK,EACd,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EACvB,OAAO,GAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAM,GAC/B,MAAM,EAAE,GAAG,SAAS;IAsBvB;;;;;;;OAOG;IACH,KAAK;IACH,wDAAwD;IACxD,WAAW,EAAE,SAAS,CAAC,CAAC,CAAC;IAe3B;;;OAGG;IACH,WAAW;IAIX;;SAEK;IACL,OAAO;IAIP,uDAAuD;IACvD,MAAM;IAIN;;;;;;OAMG;IACH,SAAS,CAAC,OAAO,EAAE,OAAO;IAO1B,OAAO,IAAI;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,MAAM,CAAA;KAAE;CAGlD;AAID,cAAc;AACd,MAAM,MAAM,gBAAgB,CAAC,CAAC,IAE1B;IACE,gGAAgG;IAChG,KAAK,EAAE,IAAI,CAAA;IAEX,yCAAyC;IACzC,YAAY,CAAC,EAAE,CAAC,CAAA;CACjB,GAED;IACE,KAAK,CAAC,EAAE,KAAK,CAAA;IAEb,+HAA+H;IAC/H,YAAY,CAAC,EAAE,MAAM,CAAA;CACtB,CAAA;AAIL,2EAA2E;AAC3E,MAAM,WAAW,eAAe,CAAC,CAAC;IAChC,eAAe,EAAE,CAAC,OAAO,EAAE,6BAA6B,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpE,MAAM,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpD,MAAM,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpD,WAAW,EAAE,CAAC,OAAO,EAAE,2BAA2B,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IAC9D,mBAAmB,EAAE,CAAC,OAAO,EAAE,gCAAgC,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IAC3E,4BAA4B,EAAE,CAC5B,OAAO,EAAE,wCAAwC,CAAC,CAAC,CAAC,KACjD,IAAI,CAAA;IACT,cAAc,EAAE,CAAC,OAAO,EAAE,2BAA2B,KAAK,IAAI,CAAA;CAC/D;AAED,sDAAsD;AACtD,MAAM,WAAW,6BAA6B,CAAC,CAAC;IAC9C,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;CACd;AAED,6CAA6C;AAC7C,MAAM,WAAW,sBAAsB,CAAC,CAAC;IACvC,8BAA8B;IAC9B,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,iDAAiD;IACjD,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACb,wDAAwD;IACxD,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAA;IAClB,mCAAmC;IACnC,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;CAC1B;AAED,4CAA4C;AAC5C,MAAM,WAAW,sBAAsB,CAAC,CAAC;IACvC,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;CACrB;AAED,6DAA6D;AAC7D,MAAM,WAAW,2BAA2B,CAAC,CAAC;IAC5C,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;CACrB;AAED,qEAAqE;AACrE,MAAM,WAAW,gCAAgC,CAAC,CAAC;IACjD,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;CACjB;AAED,kEAAkE;AAClE,MAAM,WAAW,wCAAwC,CAAC,CAAC;IACzD,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,IAAI,EAAE,UAAU,CAAA;CACjB;AAED,8DAA8D;AAC9D,MAAM,WAAW,2BAA2B;IAC1C,SAAS,EAAE,SAAS,CAAA;IACpB,KAAK,EAAE,CAAC,CAAC,KAAK,CAAA;CACf;AAMD;;GAEG;AACH,eAAO,MAAM,WAAW;IACtB,kEAAkE;;IAElE,mDAAmD;;IAEnD,6EAA6E;;IAE7E,gCAAgC;;IAEhC,kDAAkD;;IAElD,4EAA4E;;CAEpE,CAAA;AACV,MAAM,MAAM,WAAW,GAAG,CAAC,OAAO,WAAW,CAAC,CAAC,MAAM,OAAO,WAAW,CAAC,CAAA;AAExE,eAAO,MAAQ,IAAI,UAAE,OAAO,aAAE,UAAU,gBAAE,KAAK,WAAE,OAAO,aAAE,WAAW,eACxD,CAAA"}
package/dist/DocHandle.js CHANGED
@@ -253,6 +253,85 @@ export class DocHandle extends EventEmitter {
253
253
  }
254
254
  return A.getHeads(this.#doc);
255
255
  }
256
+ /**
257
+ * Creates a fixed "view" of an automerge document at the given point in time represented
258
+ * by the `heads` passed in. The return value is the same type as docSync() and will return
259
+ * undefined if the object hasn't finished loading.
260
+ *
261
+ * @remarks
262
+ * A point-in-time in an automerge document is an *array* of heads since there may be
263
+ * concurrent edits. This API just returns a topologically sorted history of all edits
264
+ * so every previous entry will be (in some sense) before later ones, but the set of all possible
265
+ * history views would be quite large under concurrency (every thing in each branch against each other).
266
+ * There might be a clever way to think about this, but we haven't found it yet, so for now at least
267
+ * we present a single traversable view which excludes concurrency.
268
+ * @returns The individual heads for every change in the document.
269
+ */
270
+ history() {
271
+ if (!this.isReady()) {
272
+ return undefined;
273
+ }
274
+ // This just returns all the heads as individual strings.
275
+ return A.topoHistoryTraversal(this.#doc).map(h => [h]);
276
+ }
277
+ /**
278
+ * Creates a fixed "view" of an automerge document at the given point in time represented
279
+ * by the `heads` passed in. The return value is the same type as docSync() and will return
280
+ * undefined if the object hasn't finished loading.
281
+ *
282
+ * @remarks
283
+ * Note that our Typescript types do not consider change over time and the current version
284
+ * of Automerge doesn't check types at runtime, so if you go back to an old set of heads
285
+ * that doesn't match the heads here, Typescript will not save you.
286
+ *
287
+ * @returns An Automerge.Doc<T> at the point in time.
288
+ */
289
+ view(heads) {
290
+ if (!this.isReady()) {
291
+ return undefined;
292
+ }
293
+ return A.view(this.#doc, heads);
294
+ }
295
+ /**
296
+ * Returns a set of Patch operations that will move a materialized document from one state to another
297
+ * if applied.
298
+ *
299
+ * @remarks
300
+ * We allow specifying both a from/to heads or just a single comparison point, in which case
301
+ * the base will be the current document heads.
302
+ *
303
+ * @returns Automerge patches that go from one document state to the other. Use view() to get the full state.
304
+ */
305
+ diff(first, second) {
306
+ if (!this.isReady()) {
307
+ return undefined;
308
+ }
309
+ // We allow only one set of heads to be specified, in which case we use the doc's heads
310
+ const from = second ? first : this.heads() || []; // because we guard above this should always have useful data
311
+ const to = second ? second : first;
312
+ return A.diff(this.#doc, from, to);
313
+ }
314
+ /**
315
+ * `metadata(head?)` allows you to look at the metadata for a change
316
+ * this can be used to build history graphs to find commit messages and edit times.
317
+ * this interface.
318
+ *
319
+ * @remarks
320
+ * I'm really not convinced this is the right way to surface this information so
321
+ * I'm leaving this API "hidden".
322
+ *
323
+ * @hidden
324
+ */
325
+ metadata(change) {
326
+ if (!this.isReady()) {
327
+ return undefined;
328
+ }
329
+ if (!change) {
330
+ change = this.heads()[0];
331
+ }
332
+ // we return undefined instead of null by convention in this API
333
+ return A.inspectChange(this.#doc, change) || undefined;
334
+ }
256
335
  /**
257
336
  * `update` is called any time we have a new document state; could be
258
337
  * from a local change, a remote change, or a new document from storage.
@@ -382,6 +461,9 @@ export class DocHandle extends EventEmitter {
382
461
  data: encode(message),
383
462
  });
384
463
  }
464
+ metrics() {
465
+ return A.stats(this.#doc);
466
+ }
385
467
  }
386
468
  // STATE MACHINE TYPES & CONSTANTS
387
469
  // state
package/dist/Repo.d.ts CHANGED
@@ -5,6 +5,7 @@ import { NetworkSubsystem } from "./network/NetworkSubsystem.js";
5
5
  import { StorageAdapterInterface } from "./storage/StorageAdapterInterface.js";
6
6
  import { StorageSubsystem } from "./storage/StorageSubsystem.js";
7
7
  import { StorageId } from "./storage/types.js";
8
+ import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js";
8
9
  import type { AnyDocumentId, DocumentId, PeerId } from "./types.js";
9
10
  /** A Repo is a collection of documents with networking, syncing, and storage capabilities. */
10
11
  /** The `Repo` is the main entry point of this library
@@ -23,6 +24,8 @@ export declare class Repo extends EventEmitter<RepoEvents> {
23
24
  /** The debounce rate is adjustable on the repo. */
24
25
  /** @hidden */
25
26
  saveDebounceRate: number;
27
+ /** @hidden */
28
+ synchronizer: CollectionSynchronizer;
26
29
  /** By default, we share generously with all peers. */
27
30
  /** @hidden */
28
31
  sharePolicy: SharePolicy;
@@ -90,6 +93,11 @@ export declare class Repo extends EventEmitter<RepoEvents> {
90
93
  */
91
94
  flush(documents?: DocumentId[]): Promise<void>;
92
95
  shutdown(): Promise<void>;
96
+ metrics(): {
97
+ documents: {
98
+ [key: string]: any;
99
+ };
100
+ };
93
101
  }
94
102
  export interface RepoConfig {
95
103
  /** Our unique identifier */
@@ -1 +1 @@
1
- {"version":3,"file":"Repo.d.ts","sourceRoot":"","sources":["../src/Repo.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAM5C,OAAO,EAAE,SAAS,EAAiC,MAAM,gBAAgB,CAAA;AAIzE,OAAO,EACL,uBAAuB,EACvB,KAAK,YAAY,EAClB,MAAM,sCAAsC,CAAA;AAC7C,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAEhE,OAAO,EAAE,uBAAuB,EAAE,MAAM,sCAAsC,CAAA;AAC9E,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,SAAS,EAAE,MAAM,oBAAoB,CAAA;AAG9C,OAAO,KAAK,EAAE,aAAa,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAMnE,8FAA8F;AAC9F;;;;;;GAMG;AACH,qBAAa,IAAK,SAAQ,YAAY,CAAC,UAAU,CAAC;;IAGhD,cAAc;IACd,gBAAgB,EAAE,gBAAgB,CAAA;IAClC,cAAc;IACd,gBAAgB,CAAC,EAAE,gBAAgB,CAAA;IAEnC,mDAAmD;IACnD,cAAc;IACd,gBAAgB,SAAM;IAMtB,sDAAsD;IACtD,cAAc;IACd,WAAW,EAAE,WAAW,CAAmB;IAE3C,8GAA8G;IAC9G,cAAc;IACd,oBAAoB,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAK;gBAK3C,EACV,OAAO,EACP,OAAY,EACZ,MAAuB,EACvB,WAAW,EACX,WAAmC,EACnC,0BAAkC,GACnC,GAAE,UAAe;IAwPlB,8CAA8C;IAC9C,IAAI,OAAO,uCAEV;IAED,+CAA+C;IAC/C,IAAI,KAAK,IAAI,MAAM,EAAE,CAEpB;IAED,kBAAkB,CAAC,MAAM,EAAE,MAAM,GAAG,SAAS,GAAG,SAAS;IAIzD;;;;OAIG;IACH,MAAM,CAAC,CAAC,EAAE,YAAY,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC;IAuBzC;;;;;;;;;;;;;;OAcG;IACH,KAAK,CAAC,CAAC,EAAE,YAAY,EAAE,SAAS,CAAC,CAAC,CAAC;IAuBnC;;;OAGG;IACH,IAAI,CAAC,CAAC;IACJ,sDAAsD;IACtD,EAAE,EAAE,aAAa,GAChB,SAAS,CAAC,CAAC,CAAC;IA+Cf,MAAM;IACJ,oDAAoD;IACpD,EAAE,EAAE,aAAa;IAWnB;;;;;;OAMG;IACG,MAAM,CAAC,EAAE,EAAE,aAAa,GAAG,OAAO,CAAC,UAAU,GAAG,SAAS,CAAC;IAShE;;;OAGG;IACH,MAAM,CAAC,CAAC,EAAE,MAAM,EAAE,UAAU;IAY5B,kBAAkB,YAAa,SAAS,EAAE,UASzC;IAED,SAAS,QAAa,OAAO,CAAC,SAAS,GAAG,SAAS,CAAC,CAMnD;IAED;;;;;OAKG;IACG,KAAK,CAAC,SAAS,CAAC,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAkBpD,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;CAM1B;AAED,MAAM,WAAW,UAAU;IACzB,4BAA4B;IAC5B,MAAM,CAAC,EAAE,MAAM,CAAA;IAEf;8DAC0D;IAC1D,WAAW,CAAC,EAAE,OAAO,CAAA;IAErB,gDAAgD;IAChD,OAAO,CAAC,EAAE,uBAAuB,CAAA;IAEjC,iEAAiE;IACjE,OAAO,CAAC,EAAE,uBAAuB,EAAE,CAAA;IAEnC;;;OAGG;IACH,WAAW,CAAC,EAAE,WAAW,CAAA;IAEzB;;OAEG;IACH,0BAA0B,CAAC,EAAE,OAAO,CAAA;CACrC;AAED;;;;;;;KAOK;AACL,MAAM,MAAM,WAAW,GAAG,CACxB,MAAM,EAAE,MAAM,EACd,UAAU,CAAC,EAAE,UAAU,KACpB,OAAO,CAAC,OAAO,CAAC,CAAA;AAGrB,MAAM,WAAW,UAAU;IACzB,+CAA+C;IAC/C,QAAQ,EAAE,CAAC,GAAG,EAAE,eAAe,KAAK,IAAI,CAAA;IACxC,6BAA6B;IAC7B,iBAAiB,EAAE,CAAC,GAAG,EAAE,qBAAqB,KAAK,IAAI,CAAA;IACvD,4FAA4F;IAC5F,sBAAsB,EAAE,CAAC,GAAG,EAAE,qBAAqB,KAAK,IAAI,CAAA;CAC7D;AAED,MAAM,WAAW,eAAe;IAC9B,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC,CAAA;CACvB;AAED,MAAM,WAAW,qBAAqB;IACpC,UAAU,EAAE,UAAU,CAAA;CACvB"}
1
+ {"version":3,"file":"Repo.d.ts","sourceRoot":"","sources":["../src/Repo.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAM5C,OAAO,EAAE,SAAS,EAAiC,MAAM,gBAAgB,CAAA;AAIzE,OAAO,EACL,uBAAuB,EACvB,KAAK,YAAY,EAClB,MAAM,sCAAsC,CAAA;AAC7C,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAEhE,OAAO,EAAE,uBAAuB,EAAE,MAAM,sCAAsC,CAAA;AAC9E,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,SAAS,EAAE,MAAM,oBAAoB,CAAA;AAC9C,OAAO,EAAE,sBAAsB,EAAE,MAAM,0CAA0C,CAAA;AAEjF,OAAO,KAAK,EAAE,aAAa,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAMnE,8FAA8F;AAC9F;;;;;;GAMG;AACH,qBAAa,IAAK,SAAQ,YAAY,CAAC,UAAU,CAAC;;IAGhD,cAAc;IACd,gBAAgB,EAAE,gBAAgB,CAAA;IAClC,cAAc;IACd,gBAAgB,CAAC,EAAE,gBAAgB,CAAA;IAEnC,mDAAmD;IACnD,cAAc;IACd,gBAAgB,SAAM;IAItB,cAAc;IACd,YAAY,EAAE,sBAAsB,CAAA;IAEpC,sDAAsD;IACtD,cAAc;IACd,WAAW,EAAE,WAAW,CAAmB;IAE3C,8GAA8G;IAC9G,cAAc;IACd,oBAAoB,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAK;gBAK3C,EACV,OAAO,EACP,OAAY,EACZ,MAAuB,EACvB,WAAW,EACX,WAAmC,EACnC,0BAAkC,GACnC,GAAE,UAAe;IAuPlB,8CAA8C;IAC9C,IAAI,OAAO,uCAEV;IAED,+CAA+C;IAC/C,IAAI,KAAK,IAAI,MAAM,EAAE,CAEpB;IAED,kBAAkB,CAAC,MAAM,EAAE,MAAM,GAAG,SAAS,GAAG,SAAS;IAIzD;;;;OAIG;IACH,MAAM,CAAC,CAAC,EAAE,YAAY,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC;IAuBzC;;;;;;;;;;;;;;OAcG;IACH,KAAK,CAAC,CAAC,EAAE,YAAY,EAAE,SAAS,CAAC,CAAC,CAAC;IAuBnC;;;OAGG;IACH,IAAI,CAAC,CAAC;IACJ,sDAAsD;IACtD,EAAE,EAAE,aAAa,GAChB,SAAS,CAAC,CAAC,CAAC;IA+Cf,MAAM;IACJ,oDAAoD;IACpD,EAAE,EAAE,aAAa;IAWnB;;;;;;OAMG;IACG,MAAM,CAAC,EAAE,EAAE,aAAa,GAAG,OAAO,CAAC,UAAU,GAAG,SAAS,CAAC;IAShE;;;OAGG;IACH,MAAM,CAAC,CAAC,EAAE,MAAM,EAAE,UAAU;IAY5B,kBAAkB,YAAa,SAAS,EAAE,UASzC;IAED,SAAS,QAAa,OAAO,CAAC,SAAS,GAAG,SAAS,CAAC,CAMnD;IAED;;;;;OAKG;IACG,KAAK,CAAC,SAAS,CAAC,EAAE,UAAU,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAkBpD,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAOzB,OAAO,IAAI;QAAE,SAAS,EAAE;YAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;SAAE,CAAA;KAAE;CAGjD;AAED,MAAM,WAAW,UAAU;IACzB,4BAA4B;IAC5B,MAAM,CAAC,EAAE,MAAM,CAAA;IAEf;8DAC0D;IAC1D,WAAW,CAAC,EAAE,OAAO,CAAA;IAErB,gDAAgD;IAChD,OAAO,CAAC,EAAE,uBAAuB,CAAA;IAEjC,iEAAiE;IACjE,OAAO,CAAC,EAAE,uBAAuB,EAAE,CAAA;IAEnC;;;OAGG;IACH,WAAW,CAAC,EAAE,WAAW,CAAA;IAEzB;;OAEG;IACH,0BAA0B,CAAC,EAAE,OAAO,CAAA;CACrC;AAED;;;;;;;KAOK;AACL,MAAM,MAAM,WAAW,GAAG,CACxB,MAAM,EAAE,MAAM,EACd,UAAU,CAAC,EAAE,UAAU,KACpB,OAAO,CAAC,OAAO,CAAC,CAAA;AAGrB,MAAM,WAAW,UAAU;IACzB,+CAA+C;IAC/C,QAAQ,EAAE,CAAC,GAAG,EAAE,eAAe,KAAK,IAAI,CAAA;IACxC,6BAA6B;IAC7B,iBAAiB,EAAE,CAAC,GAAG,EAAE,qBAAqB,KAAK,IAAI,CAAA;IACvD,4FAA4F;IAC5F,sBAAsB,EAAE,CAAC,GAAG,EAAE,qBAAqB,KAAK,IAAI,CAAA;CAC7D;AAED,MAAM,WAAW,eAAe;IAC9B,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC,CAAA;CACvB;AAED,MAAM,WAAW,qBAAqB;IACpC,UAAU,EAAE,UAAU,CAAA;CACvB"}
package/dist/Repo.js CHANGED
@@ -30,7 +30,8 @@ export class Repo extends EventEmitter {
30
30
  /** @hidden */
31
31
  saveDebounceRate = 100;
32
32
  #handleCache = {};
33
- #synchronizer;
33
+ /** @hidden */
34
+ synchronizer;
34
35
  /** By default, we share generously with all peers. */
35
36
  /** @hidden */
36
37
  sharePolicy = async () => true;
@@ -44,26 +45,6 @@ export class Repo extends EventEmitter {
44
45
  this.#remoteHeadsGossipingEnabled = enableRemoteHeadsGossiping;
45
46
  this.#log = debug(`automerge-repo:repo`);
46
47
  this.sharePolicy = sharePolicy ?? this.sharePolicy;
47
- // DOC COLLECTION
48
- // The `document` event is fired by the DocCollection any time we create a new document or look
49
- // up a document by ID. We listen for it in order to wire up storage and network synchronization.
50
- this.on("document", async ({ handle }) => {
51
- if (storageSubsystem) {
52
- // Save when the document changes, but no more often than saveDebounceRate.
53
- const saveFn = ({ handle, doc, }) => {
54
- void storageSubsystem.saveDoc(handle.documentId, doc);
55
- };
56
- handle.on("heads-changed", throttle(saveFn, this.saveDebounceRate));
57
- }
58
- handle.on("unavailable", () => {
59
- this.#log("document unavailable", { documentId: handle.documentId });
60
- this.emit("unavailable-document", {
61
- documentId: handle.documentId,
62
- });
63
- });
64
- // Register the document with the synchronizer. This advertises our interest in the document.
65
- this.#synchronizer.addDocument(handle.documentId);
66
- });
67
48
  this.on("delete-document", ({ documentId }) => {
68
49
  // TODO Pass the delete on to the network
69
50
  // synchronizer.removeDocument(documentId)
@@ -75,14 +56,14 @@ export class Repo extends EventEmitter {
75
56
  });
76
57
  // SYNCHRONIZER
77
58
  // The synchronizer uses the network subsystem to keep documents in sync with peers.
78
- this.#synchronizer = new CollectionSynchronizer(this);
59
+ this.synchronizer = new CollectionSynchronizer(this);
79
60
  // When the synchronizer emits messages, send them to peers
80
- this.#synchronizer.on("message", message => {
61
+ this.synchronizer.on("message", message => {
81
62
  this.#log(`sending ${message.type} message to ${message.targetId}`);
82
63
  networkSubsystem.send(message);
83
64
  });
84
65
  if (this.#remoteHeadsGossipingEnabled) {
85
- this.#synchronizer.on("open-doc", ({ peerId, documentId }) => {
66
+ this.synchronizer.on("open-doc", ({ peerId, documentId }) => {
86
67
  this.#remoteHeadsSubscriptions.subscribePeerToDoc(peerId, documentId);
87
68
  });
88
69
  }
@@ -113,18 +94,18 @@ export class Repo extends EventEmitter {
113
94
  .catch(err => {
114
95
  console.log("error in share policy", { err });
115
96
  });
116
- this.#synchronizer.addPeer(peerId);
97
+ this.synchronizer.addPeer(peerId);
117
98
  });
118
99
  // When a peer disconnects, remove it from the synchronizer
119
100
  networkSubsystem.on("peer-disconnected", ({ peerId }) => {
120
- this.#synchronizer.removePeer(peerId);
101
+ this.synchronizer.removePeer(peerId);
121
102
  this.#remoteHeadsSubscriptions.removePeer(peerId);
122
103
  });
123
104
  // Handle incoming messages
124
105
  networkSubsystem.on("message", async (msg) => {
125
106
  this.#receiveMessage(msg);
126
107
  });
127
- this.#synchronizer.on("sync-state", message => {
108
+ this.synchronizer.on("sync-state", message => {
128
109
  this.#saveSyncState(message);
129
110
  const handle = this.#handleCache[message.documentId];
130
111
  const { storageId } = this.peerMetadataByPeerId[message.peerId] || {};
@@ -172,6 +153,28 @@ export class Repo extends EventEmitter {
172
153
  });
173
154
  }
174
155
  }
156
+ // The `document` event is fired by the DocCollection any time we create a new document or look
157
+ // up a document by ID. We listen for it in order to wire up storage and network synchronization.
158
+ #registerHandleWithSubsystems(handle) {
159
+ const { storageSubsystem } = this;
160
+ if (storageSubsystem) {
161
+ // Save when the document changes, but no more often than saveDebounceRate.
162
+ const saveFn = ({ handle, doc }) => {
163
+ void storageSubsystem.saveDoc(handle.documentId, doc);
164
+ };
165
+ handle.on("heads-changed", throttle(saveFn, this.saveDebounceRate));
166
+ }
167
+ handle.on("unavailable", () => {
168
+ this.#log("document unavailable", { documentId: handle.documentId });
169
+ this.emit("unavailable-document", {
170
+ documentId: handle.documentId,
171
+ });
172
+ });
173
+ // Register the document with the synchronizer. This advertises our interest in the document.
174
+ this.synchronizer.addDocument(handle.documentId);
175
+ // Preserve the old event in case anyone was using it.
176
+ this.emit("document", { handle });
177
+ }
175
178
  #receiveMessage(message) {
176
179
  switch (message.type) {
177
180
  case "remote-subscription-change":
@@ -188,7 +191,7 @@ export class Repo extends EventEmitter {
188
191
  case "request":
189
192
  case "ephemeral":
190
193
  case "doc-unavailable":
191
- this.#synchronizer.receiveMessage(message).catch(err => {
194
+ this.synchronizer.receiveMessage(message).catch(err => {
192
195
  console.log("error receiving message", { err });
193
196
  });
194
197
  }
@@ -229,7 +232,7 @@ export class Repo extends EventEmitter {
229
232
  }
230
233
  /** Returns a list of all connected peer ids */
231
234
  get peers() {
232
- return this.#synchronizer.peers;
235
+ return this.synchronizer.peers;
233
236
  }
234
237
  getStorageIdOfPeer(peerId) {
235
238
  return this.peerMetadataByPeerId[peerId]?.storageId;
@@ -245,7 +248,7 @@ export class Repo extends EventEmitter {
245
248
  const handle = this.#getHandle({
246
249
  documentId,
247
250
  });
248
- this.emit("document", { handle });
251
+ this.#registerHandleWithSubsystems(handle);
249
252
  handle.update(() => {
250
253
  let nextDoc;
251
254
  if (initialValue) {
@@ -277,7 +280,7 @@ export class Repo extends EventEmitter {
277
280
  clone(clonedHandle) {
278
281
  if (!clonedHandle.isReady()) {
279
282
  throw new Error(`Cloned handle is not yet in ready state.
280
- (Try await handle.waitForReady() first.)`);
283
+ (Try await handle.whenReady() first.)`);
281
284
  }
282
285
  const sourceDoc = clonedHandle.docSync();
283
286
  if (!sourceDoc) {
@@ -314,31 +317,29 @@ export class Repo extends EventEmitter {
314
317
  const handle = this.#getHandle({
315
318
  documentId,
316
319
  });
317
- // Try to load from disk before telling anyone else about it
318
- if (this.storageSubsystem) {
319
- void this.storageSubsystem.loadDoc(handle.documentId).then(loadedDoc => {
320
- if (loadedDoc) {
321
- // uhhhh, sorry if you're reading this because we were lying to the type system
322
- handle.update(() => loadedDoc);
323
- handle.doneLoading();
324
- }
325
- else {
326
- this.networkSubsystem
327
- .whenReady()
328
- .then(() => {
329
- handle.request();
330
- })
331
- .catch(err => {
332
- this.#log("error waiting for network", { err });
333
- });
334
- this.emit("document", { handle });
335
- }
336
- });
337
- }
338
- else {
339
- handle.request();
340
- this.emit("document", { handle });
341
- }
320
+ // Loading & network is going to be asynchronous no matter what,
321
+ // but we want to return the handle immediately.
322
+ const attemptLoad = this.storageSubsystem
323
+ ? this.storageSubsystem.loadDoc(handle.documentId)
324
+ : Promise.resolve(null);
325
+ attemptLoad
326
+ .then(async (loadedDoc) => {
327
+ if (loadedDoc) {
328
+ // uhhhh, sorry if you're reading this because we were lying to the type system
329
+ handle.update(() => loadedDoc);
330
+ handle.doneLoading();
331
+ }
332
+ else {
333
+ // we want to wait for the network subsystem to be ready before
334
+ // we request the document. this prevents entering unavailable during initialization.
335
+ await this.networkSubsystem.whenReady();
336
+ handle.request();
337
+ }
338
+ this.#registerHandleWithSubsystems(handle);
339
+ })
340
+ .catch(err => {
341
+ this.#log("error waiting for network", { err });
342
+ });
342
343
  return handle;
343
344
  }
344
345
  delete(
@@ -421,4 +422,7 @@ export class Repo extends EventEmitter {
421
422
  });
422
423
  return this.flush();
423
424
  }
425
+ metrics() {
426
+ return { documents: this.synchronizer.metrics() };
427
+ }
424
428
  }
@@ -1,2 +1,3 @@
1
1
  export * from "../index.js";
2
+ import "@automerge/automerge";
2
3
  //# sourceMappingURL=fullfat.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"fullfat.d.ts","sourceRoot":"","sources":["../../src/entrypoints/fullfat.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAA"}
1
+ {"version":3,"file":"fullfat.d.ts","sourceRoot":"","sources":["../../src/entrypoints/fullfat.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAA;AAS3B,OAAO,sBAAsB,CAAA"}
@@ -6,5 +6,4 @@ export * from "../index.js";
6
6
  // disable
7
7
  //
8
8
  // eslint-disable-next-line automerge-slimport/enforce-automerge-slim-import
9
- import { next as Am } from "@automerge/automerge";
10
- Am.init();
9
+ import "@automerge/automerge";
@@ -1,2 +1,4 @@
1
1
  export * from "../index.js";
2
+ export { initializeBase64Wasm, initializeWasm } from "@automerge/automerge/slim";
3
+ export * as Automerge from "@automerge/automerge/slim";
2
4
  //# sourceMappingURL=slim.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"slim.d.ts","sourceRoot":"","sources":["../../src/entrypoints/slim.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAA"}
1
+ {"version":3,"file":"slim.d.ts","sourceRoot":"","sources":["../../src/entrypoints/slim.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAA;AAC3B,OAAO,EAAE,oBAAoB,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAA;AAEhF,OAAO,KAAK,SAAS,MAAM,2BAA2B,CAAA"}
@@ -1 +1,4 @@
1
1
  export * from "../index.js";
2
+ export { initializeBase64Wasm, initializeWasm } from "@automerge/automerge/slim";
3
+ // TODO: temporary work-around during alpha.
4
+ export * as Automerge from "@automerge/automerge/slim";
@@ -1 +1 @@
1
- {"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAInD,OAAO,EAAE,KAAK,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,OAAO,EAAE,uBAAuB,EAAE,MAAM,8BAA8B,CAAA;AACtE,OAAO,EAAyB,SAAS,EAAE,MAAM,YAAY,CAAA;AAK7D;;;GAGG;AACH,qBAAa,gBAAgB;;gBAef,cAAc,EAAE,uBAAuB;IAI7C,EAAE,IAAI,OAAO,CAAC,SAAS,CAAC;IA2B9B,kCAAkC;IAC5B,IAAI;IACR,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,yFAAyF;IACzF,GAAG,EAAE,MAAM,GACV,OAAO,CAAC,UAAU,GAAG,SAAS,CAAC;IAKlC,gCAAgC;IAC1B,IAAI;IACR,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,yFAAyF;IACzF,GAAG,EAAE,MAAM;IAEX,sCAAsC;IACtC,IAAI,EAAE,UAAU,GACf,OAAO,CAAC,IAAI,CAAC;IAKhB,oCAAoC;IAC9B,MAAM;IACV,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,2FAA2F;IAC3F,GAAG,EAAE,MAAM,GACV,OAAO,CAAC,IAAI,CAAC;IAOhB;;OAEG;IACG,OAAO,CAAC,CAAC,EAAE,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;IAmClE;;;;;;OAMG;IACG,OAAO,CAAC,UAAU,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAazE;;OAEG;IACG,SAAS,CAAC,UAAU,EAAE,UAAU;IAkEhC,aAAa,CACjB,UAAU,EAAE,UAAU,EACtB,SAAS,EAAE,SAAS,GACnB,OAAO,CAAC,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC;IAM7B,aAAa,CACjB,UAAU,EAAE,UAAU,EACtB,SAAS,EAAE,SAAS,EACpB,SAAS,EAAE,CAAC,CAAC,SAAS,GACrB,OAAO,CAAC,IAAI,CAAC;CA8CjB"}
1
+ {"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAInD,OAAO,EAAE,KAAK,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,OAAO,EAAE,uBAAuB,EAAE,MAAM,8BAA8B,CAAA;AACtE,OAAO,EAAyB,SAAS,EAAE,MAAM,YAAY,CAAA;AAK7D;;;GAGG;AACH,qBAAa,gBAAgB;;gBAef,cAAc,EAAE,uBAAuB;IAI7C,EAAE,IAAI,OAAO,CAAC,SAAS,CAAC;IA2B9B,kCAAkC;IAC5B,IAAI;IACR,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,yFAAyF;IACzF,GAAG,EAAE,MAAM,GACV,OAAO,CAAC,UAAU,GAAG,SAAS,CAAC;IAKlC,gCAAgC;IAC1B,IAAI;IACR,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,yFAAyF;IACzF,GAAG,EAAE,MAAM;IAEX,sCAAsC;IACtC,IAAI,EAAE,UAAU,GACf,OAAO,CAAC,IAAI,CAAC;IAKhB,oCAAoC;IAC9B,MAAM;IACV,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,2FAA2F;IAC3F,GAAG,EAAE,MAAM,GACV,OAAO,CAAC,IAAI,CAAC;IAOhB;;OAEG;IACG,OAAO,CAAC,CAAC,EAAE,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;IAmClE;;;;;;OAMG;IACG,OAAO,CAAC,UAAU,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAazE;;OAEG;IACG,SAAS,CAAC,UAAU,EAAE,UAAU;IAkEhC,aAAa,CACjB,UAAU,EAAE,UAAU,EACtB,SAAS,EAAE,SAAS,GACnB,OAAO,CAAC,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC;IAW7B,aAAa,CACjB,UAAU,EAAE,UAAU,EACtB,SAAS,EAAE,SAAS,EACpB,SAAS,EAAE,CAAC,CAAC,SAAS,GACrB,OAAO,CAAC,IAAI,CAAC;CA8CjB"}
@@ -178,8 +178,14 @@ export class StorageSubsystem {
178
178
  }
179
179
  async loadSyncState(documentId, storageId) {
180
180
  const key = [documentId, "sync-state", storageId];
181
- const loaded = await this.#storageAdapter.load(key);
182
- return loaded ? A.decodeSyncState(loaded) : undefined;
181
+ try {
182
+ const loaded = await this.#storageAdapter.load(key);
183
+ return loaded ? A.decodeSyncState(loaded) : undefined;
184
+ }
185
+ catch (e) {
186
+ this.#log(`Error loading sync state for ${documentId} from ${storageId}`);
187
+ return undefined;
188
+ }
183
189
  }
184
190
  async saveSyncState(documentId, storageId, syncState) {
185
191
  const key = [documentId, "sync-state", storageId];
@@ -1,11 +1,15 @@
1
1
  import { Repo } from "../Repo.js";
2
2
  import { DocMessage } from "../network/messages.js";
3
3
  import { DocumentId, PeerId } from "../types.js";
4
+ import { DocSynchronizer } from "./DocSynchronizer.js";
4
5
  import { Synchronizer } from "./Synchronizer.js";
5
6
  /** A CollectionSynchronizer is responsible for synchronizing a DocCollection with peers. */
6
7
  export declare class CollectionSynchronizer extends Synchronizer {
7
8
  #private;
8
9
  private repo;
10
+ /** A map of documentIds to their synchronizers */
11
+ /** @hidden */
12
+ docSynchronizers: Record<DocumentId, DocSynchronizer>;
9
13
  constructor(repo: Repo);
10
14
  /**
11
15
  * When we receive a sync message for a document we haven't got in memory, we
@@ -23,5 +27,14 @@ export declare class CollectionSynchronizer extends Synchronizer {
23
27
  removePeer(peerId: PeerId): void;
24
28
  /** Returns a list of all connected peer ids */
25
29
  get peers(): PeerId[];
30
+ metrics(): {
31
+ [key: string]: {
32
+ peers: PeerId[];
33
+ size: {
34
+ numOps: number;
35
+ numChanges: number;
36
+ };
37
+ };
38
+ };
26
39
  }
27
40
  //# sourceMappingURL=CollectionSynchronizer.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"CollectionSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/CollectionSynchronizer.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,IAAI,EAAE,MAAM,YAAY,CAAA;AACjC,OAAO,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAA;AACnD,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAEhD,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAIhD,4FAA4F;AAC5F,qBAAa,sBAAuB,SAAQ,YAAY;;IAU1C,OAAO,CAAC,IAAI;gBAAJ,IAAI,EAAE,IAAI;IAqD9B;;;OAGG;IACG,cAAc,CAAC,OAAO,EAAE,UAAU;IAyBxC;;OAEG;IACH,WAAW,CAAC,UAAU,EAAE,UAAU;IAalC,cAAc,CAAC,UAAU,EAAE,UAAU;IAIrC,2DAA2D;IAC3D,OAAO,CAAC,MAAM,EAAE,MAAM;IAgBtB,uDAAuD;IACvD,UAAU,CAAC,MAAM,EAAE,MAAM;IASzB,+CAA+C;IAC/C,IAAI,KAAK,IAAI,MAAM,EAAE,CAEpB;CACF"}
1
+ {"version":3,"file":"CollectionSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/CollectionSynchronizer.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,IAAI,EAAE,MAAM,YAAY,CAAA;AACjC,OAAO,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAA;AACnD,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAChD,OAAO,EAAE,eAAe,EAAE,MAAM,sBAAsB,CAAA;AACtD,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAIhD,4FAA4F;AAC5F,qBAAa,sBAAuB,SAAQ,YAAY;;IAW1C,OAAO,CAAC,IAAI;IAPxB,kDAAkD;IAClD,cAAc;IACd,gBAAgB,EAAE,MAAM,CAAC,UAAU,EAAE,eAAe,CAAC,CAAK;gBAKtC,IAAI,EAAE,IAAI;IAqD9B;;;OAGG;IACG,cAAc,CAAC,OAAO,EAAE,UAAU;IAyBxC;;OAEG;IACH,WAAW,CAAC,UAAU,EAAE,UAAU;IAalC,cAAc,CAAC,UAAU,EAAE,UAAU;IAIrC,2DAA2D;IAC3D,OAAO,CAAC,MAAM,EAAE,MAAM;IAgBtB,uDAAuD;IACvD,UAAU,CAAC,MAAM,EAAE,MAAM;IASzB,+CAA+C;IAC/C,IAAI,KAAK,IAAI,MAAM,EAAE,CAEpB;IAED,OAAO,IAAI;QACT,CAAC,GAAG,EAAE,MAAM,GAAG;YACb,KAAK,EAAE,MAAM,EAAE,CAAA;YACf,IAAI,EAAE;gBAAE,MAAM,EAAE,MAAM,CAAC;gBAAC,UAAU,EAAE,MAAM,CAAA;aAAE,CAAA;SAC7C,CAAA;KACF;CASF"}
@@ -9,7 +9,8 @@ export class CollectionSynchronizer extends Synchronizer {
9
9
  /** The set of peers we are connected with */
10
10
  #peers = new Set();
11
11
  /** A map of documentIds to their synchronizers */
12
- #docSynchronizers = {};
12
+ /** @hidden */
13
+ docSynchronizers = {};
13
14
  /** Used to determine if the document is know to the Collection and a synchronizer exists or is being set up */
14
15
  #docSetUp = {};
15
16
  constructor(repo) {
@@ -18,11 +19,11 @@ export class CollectionSynchronizer extends Synchronizer {
18
19
  }
19
20
  /** Returns a synchronizer for the given document, creating one if it doesn't already exist. */
20
21
  #fetchDocSynchronizer(documentId) {
21
- if (!this.#docSynchronizers[documentId]) {
22
+ if (!this.docSynchronizers[documentId]) {
22
23
  const handle = this.repo.find(stringifyAutomergeUrl({ documentId }));
23
- this.#docSynchronizers[documentId] = this.#initDocSynchronizer(handle);
24
+ this.docSynchronizers[documentId] = this.#initDocSynchronizer(handle);
24
25
  }
25
- return this.#docSynchronizers[documentId];
26
+ return this.docSynchronizers[documentId];
26
27
  }
27
28
  /** Creates a new docSynchronizer and sets it up to propagate messages */
28
29
  #initDocSynchronizer(handle) {
@@ -98,7 +99,7 @@ export class CollectionSynchronizer extends Synchronizer {
98
99
  return;
99
100
  }
100
101
  this.#peers.add(peerId);
101
- for (const docSynchronizer of Object.values(this.#docSynchronizers)) {
102
+ for (const docSynchronizer of Object.values(this.docSynchronizers)) {
102
103
  const { documentId } = docSynchronizer;
103
104
  void this.repo.sharePolicy(peerId, documentId).then(okToShare => {
104
105
  if (okToShare)
@@ -110,7 +111,7 @@ export class CollectionSynchronizer extends Synchronizer {
110
111
  removePeer(peerId) {
111
112
  log(`removing peer ${peerId}`);
112
113
  this.#peers.delete(peerId);
113
- for (const docSynchronizer of Object.values(this.#docSynchronizers)) {
114
+ for (const docSynchronizer of Object.values(this.docSynchronizers)) {
114
115
  docSynchronizer.endSync(peerId);
115
116
  }
116
117
  }
@@ -118,4 +119,9 @@ export class CollectionSynchronizer extends Synchronizer {
118
119
  get peers() {
119
120
  return Array.from(this.#peers);
120
121
  }
122
+ metrics() {
123
+ return Object.fromEntries(Object.entries(this.docSynchronizers).map(([documentId, synchronizer]) => {
124
+ return [documentId, synchronizer.metrics()];
125
+ }));
126
+ }
121
127
  }
@@ -24,6 +24,13 @@ export declare class DocSynchronizer extends Synchronizer {
24
24
  receiveMessage(message: RepoMessage): void;
25
25
  receiveEphemeralMessage(message: EphemeralMessage): void;
26
26
  receiveSyncMessage(message: SyncMessage | RequestMessage): void;
27
+ metrics(): {
28
+ peers: PeerId[];
29
+ size: {
30
+ numOps: number;
31
+ numChanges: number;
32
+ };
33
+ };
27
34
  }
28
35
  export {};
29
36
  //# sourceMappingURL=DocSynchronizer.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"DocSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/DocSynchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAGnD,OAAO,EACL,SAAS,EAKV,MAAM,iBAAiB,CAAA;AACxB,OAAO,EAEL,gBAAgB,EAEhB,WAAW,EACX,cAAc,EACd,WAAW,EAEZ,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AACpC,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAGhD,KAAK,kBAAkB,GAAG,SAAS,GAAG,KAAK,GAAG,aAAa,GAAG,OAAO,CAAA;AAOrE,UAAU,qBAAqB;IAC7B,MAAM,EAAE,SAAS,CAAC,OAAO,CAAC,CAAA;IAC1B,eAAe,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,OAAO,CAAC,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC,CAAA;CACvE;AAED;;;GAGG;AACH,qBAAa,eAAgB,SAAQ,YAAY;;IAE/C,gBAAgB,SAAM;gBAsBV,EAAE,MAAM,EAAE,eAAe,EAAE,EAAE,qBAAqB;IAyB9D,IAAI,UAAU,uCAEb;IAED,IAAI,UAAU,qCAEb;IAkID,OAAO,CAAC,MAAM,EAAE,MAAM;IAItB,SAAS,CAAC,OAAO,EAAE,MAAM,EAAE;IAmD3B,OAAO,CAAC,MAAM,EAAE,MAAM;IAKtB,cAAc,CAAC,OAAO,EAAE,WAAW;IAkBnC,uBAAuB,CAAC,OAAO,EAAE,gBAAgB;IAuBjD,kBAAkB,CAAC,OAAO,EAAE,WAAW,GAAG,cAAc;CA8EzD"}
1
+ {"version":3,"file":"DocSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/DocSynchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAGnD,OAAO,EACL,SAAS,EAKV,MAAM,iBAAiB,CAAA;AACxB,OAAO,EAEL,gBAAgB,EAEhB,WAAW,EACX,cAAc,EACd,WAAW,EAEZ,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AACpC,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAGhD,KAAK,kBAAkB,GAAG,SAAS,GAAG,KAAK,GAAG,aAAa,GAAG,OAAO,CAAA;AAOrE,UAAU,qBAAqB;IAC7B,MAAM,EAAE,SAAS,CAAC,OAAO,CAAC,CAAA;IAC1B,eAAe,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,OAAO,CAAC,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC,CAAA;CACvE;AAED;;;GAGG;AACH,qBAAa,eAAgB,SAAQ,YAAY;;IAE/C,gBAAgB,SAAM;gBAsBV,EAAE,MAAM,EAAE,eAAe,EAAE,EAAE,qBAAqB;IAyB9D,IAAI,UAAU,uCAEb;IAED,IAAI,UAAU,qCAEb;IAkID,OAAO,CAAC,MAAM,EAAE,MAAM;IAItB,SAAS,CAAC,OAAO,EAAE,MAAM,EAAE;IAmD3B,OAAO,CAAC,MAAM,EAAE,MAAM;IAKtB,cAAc,CAAC,OAAO,EAAE,WAAW;IAkBnC,uBAAuB,CAAC,OAAO,EAAE,gBAAgB;IAuBjD,kBAAkB,CAAC,OAAO,EAAE,WAAW,GAAG,cAAc;IA+ExD,OAAO,IAAI;QAAE,KAAK,EAAE,MAAM,EAAE,CAAC;QAAC,IAAI,EAAE;YAAE,MAAM,EAAE,MAAM,CAAC;YAAC,UAAU,EAAE,MAAM,CAAA;SAAE,CAAA;KAAE;CAM7E"}
@@ -286,4 +286,10 @@ export class DocSynchronizer extends Synchronizer {
286
286
  }
287
287
  this.#pendingSyncMessages = [];
288
288
  }
289
+ metrics() {
290
+ return {
291
+ peers: this.#peers,
292
+ size: this.#handle.metrics(),
293
+ };
294
+ }
289
295
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@automerge/automerge-repo",
3
- "version": "2.0.0-alpha.0",
3
+ "version": "2.0.0-alpha.11",
4
4
  "description": "A repository object to manage a collection of automerge documents",
5
5
  "repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo",
6
6
  "author": "Peter van Hardenberg <pvh@pvh.ca>",
@@ -23,7 +23,7 @@
23
23
  "vite": "^5.0.8"
24
24
  },
25
25
  "dependencies": {
26
- "@automerge/automerge": "^2.2.7",
26
+ "@automerge/automerge": "^2.2.8",
27
27
  "bs58check": "^3.0.1",
28
28
  "cbor-x": "^1.3.0",
29
29
  "debug": "^4.3.4",
@@ -60,5 +60,5 @@
60
60
  "publishConfig": {
61
61
  "access": "public"
62
62
  },
63
- "gitHead": "16356392fb2ed9245565ae04f6e6b49e61195e65"
63
+ "gitHead": "66b09d6f3662e16e0cd8e2cafef39ef5504104d6"
64
64
  }
package/src/DocHandle.ts CHANGED
@@ -291,6 +291,90 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
291
291
  return A.getHeads(this.#doc)
292
292
  }
293
293
 
294
+ /**
295
+ * Creates a fixed "view" of an automerge document at the given point in time represented
296
+ * by the `heads` passed in. The return value is the same type as docSync() and will return
297
+ * undefined if the object hasn't finished loading.
298
+ *
299
+ * @remarks
300
+ * A point-in-time in an automerge document is an *array* of heads since there may be
301
+ * concurrent edits. This API just returns a topologically sorted history of all edits
302
+ * so every previous entry will be (in some sense) before later ones, but the set of all possible
303
+ * history views would be quite large under concurrency (every thing in each branch against each other).
304
+ * There might be a clever way to think about this, but we haven't found it yet, so for now at least
305
+ * we present a single traversable view which excludes concurrency.
306
+ * @returns The individual heads for every change in the document.
307
+ */
308
+ history(): A.Heads[] | undefined {
309
+ if (!this.isReady()) {
310
+ return undefined
311
+ }
312
+ // This just returns all the heads as individual strings.
313
+
314
+ return A.topoHistoryTraversal(this.#doc).map(h => [h]) as A.Heads[]
315
+ }
316
+
317
+ /**
318
+ * Creates a fixed "view" of an automerge document at the given point in time represented
319
+ * by the `heads` passed in. The return value is the same type as docSync() and will return
320
+ * undefined if the object hasn't finished loading.
321
+ *
322
+ * @remarks
323
+ * Note that our Typescript types do not consider change over time and the current version
324
+ * of Automerge doesn't check types at runtime, so if you go back to an old set of heads
325
+ * that doesn't match the heads here, Typescript will not save you.
326
+ *
327
+ * @returns An Automerge.Doc<T> at the point in time.
328
+ */
329
+ view(heads: A.Heads): A.Doc<T> | undefined {
330
+ if (!this.isReady()) {
331
+ return undefined
332
+ }
333
+ return A.view(this.#doc, heads)
334
+ }
335
+
336
+ /**
337
+ * Returns a set of Patch operations that will move a materialized document from one state to another
338
+ * if applied.
339
+ *
340
+ * @remarks
341
+ * We allow specifying both a from/to heads or just a single comparison point, in which case
342
+ * the base will be the current document heads.
343
+ *
344
+ * @returns Automerge patches that go from one document state to the other. Use view() to get the full state.
345
+ */
346
+ diff(first: A.Heads, second?: A.Heads): A.Patch[] | undefined {
347
+ if (!this.isReady()) {
348
+ return undefined
349
+ }
350
+ // We allow only one set of heads to be specified, in which case we use the doc's heads
351
+ const from = second ? first : this.heads() || [] // because we guard above this should always have useful data
352
+ const to = second ? second : first
353
+ return A.diff(this.#doc, from, to)
354
+ }
355
+
356
+ /**
357
+ * `metadata(head?)` allows you to look at the metadata for a change
358
+ * this can be used to build history graphs to find commit messages and edit times.
359
+ * this interface.
360
+ *
361
+ * @remarks
362
+ * I'm really not convinced this is the right way to surface this information so
363
+ * I'm leaving this API "hidden".
364
+ *
365
+ * @hidden
366
+ */
367
+ metadata(change?: string): A.DecodedChange | undefined {
368
+ if (!this.isReady()) {
369
+ return undefined
370
+ }
371
+ if (!change) {
372
+ change = this.heads()![0]
373
+ }
374
+ // we return undefined instead of null by convention in this API
375
+ return A.inspectChange(this.#doc, change) || undefined
376
+ }
377
+
294
378
  /**
295
379
  * `update` is called any time we have a new document state; could be
296
380
  * from a local change, a remote change, or a new document from storage.
@@ -439,6 +523,10 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
439
523
  data: encode(message),
440
524
  })
441
525
  }
526
+
527
+ metrics(): { numOps: number; numChanges: number } {
528
+ return A.stats(this.#doc)
529
+ }
442
530
  }
443
531
 
444
532
  // TYPES
package/src/Repo.ts CHANGED
@@ -49,7 +49,8 @@ export class Repo extends EventEmitter<RepoEvents> {
49
49
 
50
50
  #handleCache: Record<DocumentId, DocHandle<any>> = {}
51
51
 
52
- #synchronizer: CollectionSynchronizer
52
+ /** @hidden */
53
+ synchronizer: CollectionSynchronizer
53
54
 
54
55
  /** By default, we share generously with all peers. */
55
56
  /** @hidden */
@@ -75,33 +76,6 @@ export class Repo extends EventEmitter<RepoEvents> {
75
76
  this.#log = debug(`automerge-repo:repo`)
76
77
  this.sharePolicy = sharePolicy ?? this.sharePolicy
77
78
 
78
- // DOC COLLECTION
79
-
80
- // The `document` event is fired by the DocCollection any time we create a new document or look
81
- // up a document by ID. We listen for it in order to wire up storage and network synchronization.
82
- this.on("document", async ({ handle }) => {
83
- if (storageSubsystem) {
84
- // Save when the document changes, but no more often than saveDebounceRate.
85
- const saveFn = ({
86
- handle,
87
- doc,
88
- }: DocHandleEncodedChangePayload<any>) => {
89
- void storageSubsystem.saveDoc(handle.documentId, doc)
90
- }
91
- handle.on("heads-changed", throttle(saveFn, this.saveDebounceRate))
92
- }
93
-
94
- handle.on("unavailable", () => {
95
- this.#log("document unavailable", { documentId: handle.documentId })
96
- this.emit("unavailable-document", {
97
- documentId: handle.documentId,
98
- })
99
- })
100
-
101
- // Register the document with the synchronizer. This advertises our interest in the document.
102
- this.#synchronizer.addDocument(handle.documentId)
103
- })
104
-
105
79
  this.on("delete-document", ({ documentId }) => {
106
80
  // TODO Pass the delete on to the network
107
81
  // synchronizer.removeDocument(documentId)
@@ -115,16 +89,16 @@ export class Repo extends EventEmitter<RepoEvents> {
115
89
 
116
90
  // SYNCHRONIZER
117
91
  // The synchronizer uses the network subsystem to keep documents in sync with peers.
118
- this.#synchronizer = new CollectionSynchronizer(this)
92
+ this.synchronizer = new CollectionSynchronizer(this)
119
93
 
120
94
  // When the synchronizer emits messages, send them to peers
121
- this.#synchronizer.on("message", message => {
95
+ this.synchronizer.on("message", message => {
122
96
  this.#log(`sending ${message.type} message to ${message.targetId}`)
123
97
  networkSubsystem.send(message)
124
98
  })
125
99
 
126
100
  if (this.#remoteHeadsGossipingEnabled) {
127
- this.#synchronizer.on("open-doc", ({ peerId, documentId }) => {
101
+ this.synchronizer.on("open-doc", ({ peerId, documentId }) => {
128
102
  this.#remoteHeadsSubscriptions.subscribePeerToDoc(peerId, documentId)
129
103
  })
130
104
  }
@@ -167,12 +141,12 @@ export class Repo extends EventEmitter<RepoEvents> {
167
141
  console.log("error in share policy", { err })
168
142
  })
169
143
 
170
- this.#synchronizer.addPeer(peerId)
144
+ this.synchronizer.addPeer(peerId)
171
145
  })
172
146
 
173
147
  // When a peer disconnects, remove it from the synchronizer
174
148
  networkSubsystem.on("peer-disconnected", ({ peerId }) => {
175
- this.#synchronizer.removePeer(peerId)
149
+ this.synchronizer.removePeer(peerId)
176
150
  this.#remoteHeadsSubscriptions.removePeer(peerId)
177
151
  })
178
152
 
@@ -181,7 +155,7 @@ export class Repo extends EventEmitter<RepoEvents> {
181
155
  this.#receiveMessage(msg)
182
156
  })
183
157
 
184
- this.#synchronizer.on("sync-state", message => {
158
+ this.synchronizer.on("sync-state", message => {
185
159
  this.#saveSyncState(message)
186
160
 
187
161
  const handle = this.#handleCache[message.documentId]
@@ -243,6 +217,32 @@ export class Repo extends EventEmitter<RepoEvents> {
243
217
  }
244
218
  }
245
219
 
220
+ // The `document` event is fired by the DocCollection any time we create a new document or look
221
+ // up a document by ID. We listen for it in order to wire up storage and network synchronization.
222
+ #registerHandleWithSubsystems(handle: DocHandle<any>) {
223
+ const { storageSubsystem } = this
224
+ if (storageSubsystem) {
225
+ // Save when the document changes, but no more often than saveDebounceRate.
226
+ const saveFn = ({ handle, doc }: DocHandleEncodedChangePayload<any>) => {
227
+ void storageSubsystem.saveDoc(handle.documentId, doc)
228
+ }
229
+ handle.on("heads-changed", throttle(saveFn, this.saveDebounceRate))
230
+ }
231
+
232
+ handle.on("unavailable", () => {
233
+ this.#log("document unavailable", { documentId: handle.documentId })
234
+ this.emit("unavailable-document", {
235
+ documentId: handle.documentId,
236
+ })
237
+ })
238
+
239
+ // Register the document with the synchronizer. This advertises our interest in the document.
240
+ this.synchronizer.addDocument(handle.documentId)
241
+
242
+ // Preserve the old event in case anyone was using it.
243
+ this.emit("document", { handle })
244
+ }
245
+
246
246
  #receiveMessage(message: RepoMessage) {
247
247
  switch (message.type) {
248
248
  case "remote-subscription-change":
@@ -259,7 +259,7 @@ export class Repo extends EventEmitter<RepoEvents> {
259
259
  case "request":
260
260
  case "ephemeral":
261
261
  case "doc-unavailable":
262
- this.#synchronizer.receiveMessage(message).catch(err => {
262
+ this.synchronizer.receiveMessage(message).catch(err => {
263
263
  console.log("error receiving message", { err })
264
264
  })
265
265
  }
@@ -324,7 +324,7 @@ export class Repo extends EventEmitter<RepoEvents> {
324
324
 
325
325
  /** Returns a list of all connected peer ids */
326
326
  get peers(): PeerId[] {
327
- return this.#synchronizer.peers
327
+ return this.synchronizer.peers
328
328
  }
329
329
 
330
330
  getStorageIdOfPeer(peerId: PeerId): StorageId | undefined {
@@ -343,7 +343,7 @@ export class Repo extends EventEmitter<RepoEvents> {
343
343
  documentId,
344
344
  }) as DocHandle<T>
345
345
 
346
- this.emit("document", { handle })
346
+ this.#registerHandleWithSubsystems(handle)
347
347
 
348
348
  handle.update(() => {
349
349
  let nextDoc: Automerge.Doc<T>
@@ -378,7 +378,7 @@ export class Repo extends EventEmitter<RepoEvents> {
378
378
  if (!clonedHandle.isReady()) {
379
379
  throw new Error(
380
380
  `Cloned handle is not yet in ready state.
381
- (Try await handle.waitForReady() first.)`
381
+ (Try await handle.whenReady() first.)`
382
382
  )
383
383
  }
384
384
 
@@ -425,29 +425,29 @@ export class Repo extends EventEmitter<RepoEvents> {
425
425
  documentId,
426
426
  }) as DocHandle<T>
427
427
 
428
- // Try to load from disk before telling anyone else about it
429
- if (this.storageSubsystem) {
430
- void this.storageSubsystem.loadDoc(handle.documentId).then(loadedDoc => {
428
+ // Loading & network is going to be asynchronous no matter what,
429
+ // but we want to return the handle immediately.
430
+ const attemptLoad = this.storageSubsystem
431
+ ? this.storageSubsystem.loadDoc(handle.documentId)
432
+ : Promise.resolve(null)
433
+
434
+ attemptLoad
435
+ .then(async loadedDoc => {
431
436
  if (loadedDoc) {
432
437
  // uhhhh, sorry if you're reading this because we were lying to the type system
433
438
  handle.update(() => loadedDoc as Automerge.Doc<T>)
434
439
  handle.doneLoading()
435
440
  } else {
436
- this.networkSubsystem
437
- .whenReady()
438
- .then(() => {
439
- handle.request()
440
- })
441
- .catch(err => {
442
- this.#log("error waiting for network", { err })
443
- })
444
- this.emit("document", { handle })
441
+ // we want to wait for the network subsystem to be ready before
442
+ // we request the document. this prevents entering unavailable during initialization.
443
+ await this.networkSubsystem.whenReady()
444
+ handle.request()
445
445
  }
446
+ this.#registerHandleWithSubsystems(handle)
447
+ })
448
+ .catch(err => {
449
+ this.#log("error waiting for network", { err })
446
450
  })
447
- } else {
448
- handle.request()
449
- this.emit("document", { handle })
450
- }
451
451
  return handle
452
452
  }
453
453
 
@@ -545,6 +545,10 @@ export class Repo extends EventEmitter<RepoEvents> {
545
545
  })
546
546
  return this.flush()
547
547
  }
548
+
549
+ metrics(): { documents: { [key: string]: any } } {
550
+ return { documents: this.synchronizer.metrics() }
551
+ }
548
552
  }
549
553
 
550
554
  export interface RepoConfig {
@@ -7,5 +7,4 @@ export * from "../index.js"
7
7
  // disable
8
8
  //
9
9
  // eslint-disable-next-line automerge-slimport/enforce-automerge-slim-import
10
- import { next as Am } from "@automerge/automerge"
11
- Am.init()
10
+ import "@automerge/automerge"
@@ -1 +1,4 @@
1
1
  export * from "../index.js"
2
+ export { initializeBase64Wasm, initializeWasm } from "@automerge/automerge/slim"
3
+ // TODO: temporary work-around during alpha.
4
+ export * as Automerge from "@automerge/automerge/slim"
@@ -232,8 +232,13 @@ export class StorageSubsystem {
232
232
  storageId: StorageId
233
233
  ): Promise<A.SyncState | undefined> {
234
234
  const key = [documentId, "sync-state", storageId]
235
- const loaded = await this.#storageAdapter.load(key)
236
- return loaded ? A.decodeSyncState(loaded) : undefined
235
+ try {
236
+ const loaded = await this.#storageAdapter.load(key)
237
+ return loaded ? A.decodeSyncState(loaded) : undefined
238
+ } catch (e) {
239
+ this.#log(`Error loading sync state for ${documentId} from ${storageId}`)
240
+ return undefined
241
+ }
237
242
  }
238
243
 
239
244
  async saveSyncState(
@@ -15,7 +15,8 @@ export class CollectionSynchronizer extends Synchronizer {
15
15
  #peers: Set<PeerId> = new Set()
16
16
 
17
17
  /** A map of documentIds to their synchronizers */
18
- #docSynchronizers: Record<DocumentId, DocSynchronizer> = {}
18
+ /** @hidden */
19
+ docSynchronizers: Record<DocumentId, DocSynchronizer> = {}
19
20
 
20
21
  /** Used to determine if the document is know to the Collection and a synchronizer exists or is being set up */
21
22
  #docSetUp: Record<DocumentId, boolean> = {}
@@ -26,11 +27,11 @@ export class CollectionSynchronizer extends Synchronizer {
26
27
 
27
28
  /** Returns a synchronizer for the given document, creating one if it doesn't already exist. */
28
29
  #fetchDocSynchronizer(documentId: DocumentId) {
29
- if (!this.#docSynchronizers[documentId]) {
30
+ if (!this.docSynchronizers[documentId]) {
30
31
  const handle = this.repo.find(stringifyAutomergeUrl({ documentId }))
31
- this.#docSynchronizers[documentId] = this.#initDocSynchronizer(handle)
32
+ this.docSynchronizers[documentId] = this.#initDocSynchronizer(handle)
32
33
  }
33
- return this.#docSynchronizers[documentId]
34
+ return this.docSynchronizers[documentId]
34
35
  }
35
36
 
36
37
  /** Creates a new docSynchronizer and sets it up to propagate messages */
@@ -131,7 +132,7 @@ export class CollectionSynchronizer extends Synchronizer {
131
132
  }
132
133
 
133
134
  this.#peers.add(peerId)
134
- for (const docSynchronizer of Object.values(this.#docSynchronizers)) {
135
+ for (const docSynchronizer of Object.values(this.docSynchronizers)) {
135
136
  const { documentId } = docSynchronizer
136
137
  void this.repo.sharePolicy(peerId, documentId).then(okToShare => {
137
138
  if (okToShare) docSynchronizer.beginSync([peerId])
@@ -144,7 +145,7 @@ export class CollectionSynchronizer extends Synchronizer {
144
145
  log(`removing peer ${peerId}`)
145
146
  this.#peers.delete(peerId)
146
147
 
147
- for (const docSynchronizer of Object.values(this.#docSynchronizers)) {
148
+ for (const docSynchronizer of Object.values(this.docSynchronizers)) {
148
149
  docSynchronizer.endSync(peerId)
149
150
  }
150
151
  }
@@ -153,4 +154,19 @@ export class CollectionSynchronizer extends Synchronizer {
153
154
  get peers(): PeerId[] {
154
155
  return Array.from(this.#peers)
155
156
  }
157
+
158
+ metrics(): {
159
+ [key: string]: {
160
+ peers: PeerId[]
161
+ size: { numOps: number; numChanges: number }
162
+ }
163
+ } {
164
+ return Object.fromEntries(
165
+ Object.entries(this.docSynchronizers).map(
166
+ ([documentId, synchronizer]) => {
167
+ return [documentId, synchronizer.metrics()]
168
+ }
169
+ )
170
+ )
171
+ }
156
172
  }
@@ -401,4 +401,11 @@ export class DocSynchronizer extends Synchronizer {
401
401
 
402
402
  this.#pendingSyncMessages = []
403
403
  }
404
+
405
+ metrics(): { peers: PeerId[]; size: { numOps: number; numChanges: number } } {
406
+ return {
407
+ peers: this.#peers,
408
+ size: this.#handle.metrics(),
409
+ }
410
+ }
404
411
  }
@@ -68,6 +68,15 @@ describe("DocHandle", () => {
68
68
  assert.equal(doc?.foo, "bar")
69
69
  })
70
70
 
71
+ /** HISTORY TRAVERSAL
72
+ * This API is relatively alpha-ish but we're already
73
+ * doing things in our own apps that are fairly ambitious
74
+ * by routing around to a lower-level API.
75
+ * This is an attempt to wrap up the existing practice
76
+ * in a slightly more supportable set of APIs but should be
77
+ * considered provisional: expect further improvements.
78
+ */
79
+
71
80
  it("should return the heads when requested", async () => {
72
81
  const handle = setup()
73
82
  handle.change(d => (d.foo = "bar"))
@@ -84,6 +93,94 @@ describe("DocHandle", () => {
84
93
  assert.deepEqual(handle.heads(), undefined)
85
94
  })
86
95
 
96
+ it("should return the history when requested", async () => {
97
+ const handle = setup()
98
+ handle.change(d => (d.foo = "bar"))
99
+ handle.change(d => (d.foo = "baz"))
100
+ assert.equal(handle.isReady(), true)
101
+
102
+ const history = handle.history()
103
+ assert.deepEqual(handle.history().length, 2)
104
+ })
105
+
106
+ it("should return a commit from the history", async () => {
107
+ const handle = setup()
108
+ handle.change(d => (d.foo = "zero"))
109
+ handle.change(d => (d.foo = "one"))
110
+ handle.change(d => (d.foo = "two"))
111
+ handle.change(d => (d.foo = "three"))
112
+ assert.equal(handle.isReady(), true)
113
+
114
+ const history = handle.history()
115
+ const view = handle.view(history[1])
116
+ assert.deepEqual(view, { foo: "one" })
117
+ })
118
+
119
+ it("should return diffs", async () => {
120
+ const handle = setup()
121
+ handle.change(d => (d.foo = "zero"))
122
+ handle.change(d => (d.foo = "one"))
123
+ handle.change(d => (d.foo = "two"))
124
+ handle.change(d => (d.foo = "three"))
125
+ assert.equal(handle.isReady(), true)
126
+
127
+ const history = handle.history()
128
+ const patches = handle.diff(history[1])
129
+ assert.deepEqual(patches, [
130
+ { action: "put", path: ["foo"], value: "" },
131
+ { action: "splice", path: ["foo", 0], value: "one" },
132
+ ])
133
+ })
134
+
135
+ it("should support arbitrary diffs too", async () => {
136
+ const handle = setup()
137
+ handle.change(d => (d.foo = "zero"))
138
+ handle.change(d => (d.foo = "one"))
139
+ handle.change(d => (d.foo = "two"))
140
+ handle.change(d => (d.foo = "three"))
141
+ assert.equal(handle.isReady(), true)
142
+
143
+ const history = handle.history()
144
+ const patches = handle.diff(history[1], history[3])
145
+ assert.deepEqual(patches, [
146
+ { action: "put", path: ["foo"], value: "" },
147
+ { action: "splice", path: ["foo", 0], value: "three" },
148
+ ])
149
+ const backPatches = handle.diff(history[3], history[1])
150
+ assert.deepEqual(backPatches, [
151
+ { action: "put", path: ["foo"], value: "" },
152
+ { action: "splice", path: ["foo", 0], value: "one" },
153
+ ])
154
+ })
155
+
156
+ it("should allow direct access to decoded changes", async () => {
157
+ const handle = setup()
158
+ const time = Date.now()
159
+ handle.change(d => (d.foo = "foo"), { message: "commitMessage" })
160
+ assert.equal(handle.isReady(), true)
161
+
162
+ const metadata = handle.metadata()
163
+ assert.deepEqual(metadata.message, "commitMessage")
164
+ // NOTE: I'm not testing time because of https://github.com/automerge/automerge/issues/965
165
+ // but it does round-trip successfully!
166
+ })
167
+
168
+ it("should allow direct access to a specific decoded change", async () => {
169
+ const handle = setup()
170
+ const time = Date.now()
171
+ handle.change(d => (d.foo = "foo"), { message: "commitMessage" })
172
+ handle.change(d => (d.foo = "foo"), { message: "commitMessage2" })
173
+ handle.change(d => (d.foo = "foo"), { message: "commitMessage3" })
174
+ handle.change(d => (d.foo = "foo"), { message: "commitMessage4" })
175
+ assert.equal(handle.isReady(), true)
176
+
177
+ const history = handle.history()
178
+ const metadata = handle.metadata(history[0][0])
179
+ assert.deepEqual(metadata.message, "commitMessage")
180
+ // NOTE: I'm not testing time because of https://github.com/automerge/automerge/issues/965
181
+ // but it does round-trip successfully!
182
+ })
183
+
87
184
  /**
88
185
  * Once there's a Repo#stop API this case should be covered in accompanying
89
186
  * tests and the following test removed.
package/test/Repo.test.ts CHANGED
@@ -832,6 +832,46 @@ describe("Repo", () => {
832
832
  teardown()
833
833
  })
834
834
 
835
+ it("synchronizes changes from bobRepo to charlieRepo when loading from storage", async () => {
836
+ const { bobRepo, bobStorage, teardown } = await setup()
837
+
838
+ // We create a repo that uses bobStorage to put a document into its imaginary disk
839
+ // without it knowing about it
840
+ const bobRepo2 = new Repo({
841
+ storage: bobStorage,
842
+ })
843
+ const inStorageHandle = bobRepo2.create<TestDoc>({
844
+ foo: "foundOnFakeDisk",
845
+ })
846
+ await bobRepo2.flush()
847
+
848
+ // Now, let's load it on the original bob repo (which shares a "disk")
849
+ const bobFoundIt = bobRepo.find<TestDoc>(inStorageHandle.url)
850
+ await bobFoundIt.whenReady()
851
+
852
+ // Before checking if it syncs, make sure we have it!
853
+ // (This behaviour is mostly test-validation, we are already testing load/save elsewhere.)
854
+ assert.deepStrictEqual(await bobFoundIt.doc(), { foo: "foundOnFakeDisk" })
855
+
856
+ await pause(10)
857
+
858
+ // We should have a docSynchronizer and its peers should be alice and charlie
859
+ assert.strictEqual(
860
+ bobRepo.synchronizer.docSynchronizers[bobFoundIt.documentId]?.hasPeer(
861
+ "alice" as PeerId
862
+ ),
863
+ true
864
+ )
865
+ assert.strictEqual(
866
+ bobRepo.synchronizer.docSynchronizers[bobFoundIt.documentId]?.hasPeer(
867
+ "charlie" as PeerId
868
+ ),
869
+ true
870
+ )
871
+
872
+ teardown()
873
+ })
874
+
835
875
  it("charlieRepo doesn't have a document it's not supposed to have", async () => {
836
876
  const { aliceRepo, bobRepo, charlieRepo, notForCharlie, teardown } =
837
877
  await setup()
@@ -211,6 +211,23 @@ describe("StorageSubsystem", () => {
211
211
  )
212
212
  assert.strictEqual(loadedSyncState, undefined)
213
213
  })
214
+
215
+ it("returns a undefined if loading an existing sync state fails", async () => {
216
+ const storage = new StorageSubsystem(adapter)
217
+
218
+ const { documentId } = parseAutomergeUrl(generateAutomergeUrl())
219
+ const bobStorageId = Uuid.v4() as StorageId
220
+
221
+ const syncStateKey = [documentId, "sync-state", bobStorageId]
222
+ // Save garbage data to simulate a corrupted sync state
223
+ await adapter.save(syncStateKey, Buffer.from("invalid data"))
224
+
225
+ const loadedSyncState = await storage.loadSyncState(
226
+ documentId,
227
+ bobStorageId
228
+ )
229
+ assert.strictEqual(loadedSyncState, undefined)
230
+ })
214
231
  })
215
232
 
216
233
  describe("storage id", () => {