@fluidframework/tree 2.70.0-361788 → 2.70.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +52 -0
- package/dist/feature-libraries/chunked-forest/codec/compressedEncode.js +1 -1
- package/dist/feature-libraries/chunked-forest/codec/compressedEncode.js.map +1 -1
- package/dist/feature-libraries/forest-summary/incrementalSummaryBuilder.js +1 -1
- package/dist/feature-libraries/forest-summary/incrementalSummaryBuilder.js.map +1 -1
- package/dist/packageVersion.d.ts +1 -1
- package/dist/packageVersion.d.ts.map +1 -1
- package/dist/packageVersion.js +1 -1
- package/dist/packageVersion.js.map +1 -1
- package/dist/simple-tree/api/incrementalAllowedTypes.js +1 -1
- package/dist/simple-tree/api/incrementalAllowedTypes.js.map +1 -1
- package/lib/feature-libraries/chunked-forest/codec/compressedEncode.js +1 -1
- package/lib/feature-libraries/chunked-forest/codec/compressedEncode.js.map +1 -1
- package/lib/feature-libraries/forest-summary/incrementalSummaryBuilder.js +1 -1
- package/lib/feature-libraries/forest-summary/incrementalSummaryBuilder.js.map +1 -1
- package/lib/packageVersion.d.ts +1 -1
- package/lib/packageVersion.d.ts.map +1 -1
- package/lib/packageVersion.js +1 -1
- package/lib/packageVersion.js.map +1 -1
- package/lib/simple-tree/api/incrementalAllowedTypes.js +1 -1
- package/lib/simple-tree/api/incrementalAllowedTypes.js.map +1 -1
- package/package.json +20 -20
- package/src/feature-libraries/chunked-forest/codec/compressedEncode.ts +1 -1
- package/src/feature-libraries/forest-summary/incrementalSummaryBuilder.ts +1 -1
- package/src/packageVersion.ts +1 -1
- package/src/simple-tree/api/incrementalAllowedTypes.ts +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,57 @@
|
|
|
1
1
|
# @fluidframework/tree
|
|
2
2
|
|
|
3
|
+
## 2.70.0
|
|
4
|
+
|
|
5
|
+
### Minor Changes
|
|
6
|
+
|
|
7
|
+
- All non-structurally named beta schema factory APIs now support node schema metadata ([#25685](https://github.com/microsoft/FluidFramework/pull/25685)) [6d8c0ca181](https://github.com/microsoft/FluidFramework/commit/6d8c0ca181c7ed7c600e56197ed4bc75cfbba3db)
|
|
8
|
+
|
|
9
|
+
The "options" parameter which allows providing metadata for `TreeNodeSchema` is now available consistently on `SchemaFactoryBeta`,
|
|
10
|
+
not just `SchemaFactoryAlpha` and a subset of `SchemaFactoryBeta`.
|
|
11
|
+
|
|
12
|
+
- A minimal set of branching APIs has been promoted to beta. ([#25744](https://github.com/microsoft/FluidFramework/pull/25744)) [32cc2c75d8](https://github.com/microsoft/FluidFramework/commit/32cc2c75d82c35403caa91e67e81f71baee5d092)
|
|
13
|
+
|
|
14
|
+
The following APIs have been promoted to beta in `@fluidframework/tree`:
|
|
15
|
+
|
|
16
|
+
- `TreeBranch.fork()`
|
|
17
|
+
- `TreeBranch.merge()`
|
|
18
|
+
- `TreeBranch.rebaseOnto()`
|
|
19
|
+
- `TreeBranch.dispose()`
|
|
20
|
+
- `TreeView.fork()`
|
|
21
|
+
|
|
22
|
+
These APIs enable applications to implement basic local branching flows.
|
|
23
|
+
|
|
24
|
+
- Promote FluidSerializableAsTree APIs from alpha to beta ([#25693](https://github.com/microsoft/FluidFramework/pull/25693)) [43fbc54d05](https://github.com/microsoft/FluidFramework/commit/43fbc54d05351d06c4bc20d1e6f5ca732775f605)
|
|
25
|
+
|
|
26
|
+
`FluidSerializableAsTree` may now be imported from `/beta`.
|
|
27
|
+
|
|
28
|
+
- Update TableSchema APIs (alpha) to accept SchemaFactoryBeta in addition to SchemaFactoryAlpha ([#25613](https://github.com/microsoft/FluidFramework/pull/25613)) [1bdf44ac5a](https://github.com/microsoft/FluidFramework/commit/1bdf44ac5a93bcd1956e15e54c46a16ad2d1c005)
|
|
29
|
+
|
|
30
|
+
Makes the [TableSchema](https://fluidframework.com/docs/api/fluid-framework/tableschema-namespace) APIs more flexible, and prepares them for future promotion to beta themselves.
|
|
31
|
+
|
|
32
|
+
- Added `Tree.ensureSchema` ([#25740](https://github.com/microsoft/FluidFramework/pull/25740)) [8213407b3f](https://github.com/microsoft/FluidFramework/commit/8213407b3fe93f0e35925b1dcd7e799501cb0e92)
|
|
33
|
+
|
|
34
|
+
This helper function allows content to be tagged with a schema type before being inserted into the tree.
|
|
35
|
+
This allows content that would otherwise be ambiguous to be well-defined, without having to wrap it in a node constructor.
|
|
36
|
+
|
|
37
|
+
Example:
|
|
38
|
+
|
|
39
|
+
```typescript
|
|
40
|
+
const sf = new SchemaFactory("example");
|
|
41
|
+
class Dog extends sf.object("Dog", { name: sf.string() }) {}
|
|
42
|
+
class Cat extends sf.object("Cat", { name: sf.string() }) {}
|
|
43
|
+
class Root extends sf.object("Root", { pet: [Dog, Cat] }) {}
|
|
44
|
+
// ...
|
|
45
|
+
const pet = { name: "Max" };
|
|
46
|
+
view.root.pet = pet; // Error: `pet` is ambiguous - is it a Dog or a Cat?
|
|
47
|
+
view.root.pet = new Dog(pet); // This works, but has the overhead of creating a Dog node before the insertion actually happens.
|
|
48
|
+
TreeAlpha.ensureSchema(Dog, pet); // Instead, this tags the `pet` object as a Dog...
|
|
49
|
+
view.root.pet = pet; // So now there is no error for a normal insertion - it's a Dog.
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
This function works by leveraging the new `schemaSymbol`, which is also available for use.
|
|
53
|
+
See its documentation for more information.
|
|
54
|
+
|
|
3
55
|
## 2.63.0
|
|
4
56
|
|
|
5
57
|
### Minor Changes
|
|
@@ -249,7 +249,7 @@ exports.IncrementalChunkShape = IncrementalChunkShape;
|
|
|
249
249
|
*/
|
|
250
250
|
exports.incrementalFieldEncoder = {
|
|
251
251
|
encodeField(cursor, context, outputBuffer) {
|
|
252
|
-
(0, internal_1.assert)(context.incrementalEncoder !== undefined,
|
|
252
|
+
(0, internal_1.assert)(context.incrementalEncoder !== undefined, 0xc88 /* incremental encoder must be defined to use incrementalFieldEncoder */);
|
|
253
253
|
const chunkReferenceIds = context.incrementalEncoder.encodeIncrementalField(cursor, (chunk) => compressedEncode([chunk.cursor()], context));
|
|
254
254
|
outputBuffer.push(chunkReferenceIds);
|
|
255
255
|
},
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"compressedEncode.js","sourceRoot":"","sources":["../../../../src/feature-libraries/chunked-forest/codec/compressedEncode.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAEH,kEAAoF;AAGpF,qDAUgC;AAChC,qDAAqD;AAIrD,uEAImC;AAEnC,2CAQqB;AAGrB;;;;;;GAMG;AACH,SAAgB,gBAAgB,CAC/B,UAAsB,EACtB,OAAuB;IAEvB,MAAM,WAAW,GAAmB,EAAE,CAAC;IAEvC,6DAA6D;IAC7D,KAAK,MAAM,MAAM,IAAI,UAAU,EAAE,CAAC;QACjC,MAAM,MAAM,GAAiB,EAAE,CAAC;QAChC,uBAAe,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;QACrD,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IAC1B,CAAC;IACD,OAAO,IAAA,4DAAkC,EAAC,mBAAO,EAAE,WAAW,CAAC,CAAC;AACjE,CAAC;AAbD,4CAaC;AAoED;;;GAGG;AACH,SAAgB,cAAc,CAAC,OAAoB;IAClD,OAAO;QACN,WAAW,CACV,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;YAE1B,IAAA,sBAAW,EAAC,MAAM,EAAE,GAAG,EAAE,CAAC,OAAO,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC,CAAC;QAC9E,CAAC;QACD,KAAK,EAAE,OAAO,CAAC,KAAK;KACpB,CAAC;AACH,CAAC;AAXD,wCAWC;AAED;;GAEG;AACH,SAAgB,cAAc,CAAC,OAAoB;IAClD,OAAO;QACN,WAAW,CACV,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;YAE1B,OAAO,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;YAClD,MAAM,CAAC,QAAQ,EAAE,CAAC;QACnB,CAAC;QACD,KAAK,EAAE,OAAO,CAAC,KAAK;KACpB,CAAC;AACH,CAAC;AAZD,wCAYC;AAED;;GAEG;AACH,MAAa,QAAS,SAAQ,+BAA+B;IAC5D;QACC,KAAK,EAAE,CAAC;IACT,CAAC;IAGM,WAAW,CACjB,WAAuC,EACvC,MAAiC;QAEjC,MAAM,eAAe,GAAoB,CAAC,CAAC;QAC3C,OAAO,EAAE,CAAC,EAAE,eAAe,EAAE,CAAC;IAC/B,CAAC;IAEM,mCAAmC,CACzC,WAA4B,EAC5B,eAAuC,IAC/B,CAAC;IAEH,MAAM,CAAC,WAAW,CACxB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B,EAC1B,OAAqB;QAErB,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QACjC,OAAO,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;IACpD,CAAC;IAEM,MAAM,CAAC,UAAU,CACvB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B,EAC1B,OAAoB;QAEpB,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QACjC,OAAO,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;IACnD,CAAC;IAEM,MAAM,CAAC,WAAW,CACxB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B,EAC1B,OAAqB;QAErB,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QACjC,OAAO,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;IACpD,CAAC;;AA/CF,4BAgDC;AA5CuB,iBAAQ,GAAG,IAAI,QAAQ,EAAE,CAAC;AA8ClD;;GAEG;AACU,QAAA,cAAc,GAAgB;IAC1C,UAAU,CACT,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,yCAAyC;QACzC,MAAM,WAAW,GAAG,OAAO,CAAC,qBAAqB,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;QAC/D,QAAQ,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,EAAE,WAAW,CAAC,CAAC;IACjE,CAAC;IAED,KAAK,EAAE,QAAQ,CAAC,QAAQ;CACxB,CAAC;AAEF;;GAEG;AACU,QAAA,eAAe,GAAiB;IAC5C,WAAW,CACV,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,kCAAkC;QAElC,IAAI,MAAM,CAAC,cAAc,EAAE,KAAK,CAAC,EAAE,CAAC;YACnC,MAAM,KAAK,GAAG,kBAAkB,CAAC,KAAK,CAAC;YACvC,QAAQ,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,EAAE,KAAK,CAAC,CAAC;QAC5D,CAAC;aAAM,IAAI,MAAM,CAAC,cAAc,EAAE,KAAK,CAAC,EAAE,CAAC;YAC1C,oEAAoE;YACpE,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;YACpB,sBAAc,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;YACzD,MAAM,CAAC,QAAQ,EAAE,CAAC;QACnB,CAAC;aAAM,CAAC;YACP,kDAAkD;YAClD,4FAA4F;YAE5F,MAAM,KAAK,GAAG,OAAO,CAAC,kBAAkB,CAAC,sBAAc,CAAC,CAAC;YACzD,QAAQ,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,EAAE,KAAK,CAAC,CAAC;QAC5D,CAAC;IACF,CAAC;IAED,KAAK,EAAE,QAAQ,CAAC,QAAQ;CACxB,CAAC;AAEF;;;;;GAKG;AACH,MAAa,kBACZ,SAAQ,+BAA+B;IAiBvC;;OAEG;IACH,YACiB,MAAc,EACd,KAAmB;QAEnC,KAAK,EAAE,CAAC;QAHQ,WAAM,GAAN,MAAM,CAAQ;QACd,UAAK,GAAL,KAAK,CAAc;IAGpC,CAAC;IAEM,WAAW,CACjB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,2DAA2D;QAC3D,4DAA4D;QAC5D,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,IAAI,CAAC,MAAM,EAAE,KAAK,EAAE,EAAE,CAAC;YAClD,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;QACvD,CAAC;IACF,CAAC;IAEM,WAAW,CACjB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,iGAAiG;QACjG,IAAA,iBAAM,EACL,MAAM,CAAC,cAAc,EAAE,IAAI,IAAI,CAAC,MAAM,EACtC,KAAK,CAAC,8CAA8C,CACpD,CAAC;QACF,MAAM,CAAC,SAAS,EAAE,CAAC;QACnB,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;QAChD,IAAA,iBAAM,EACL,MAAM,CAAC,IAAI,sCAA8B,EACzC,KAAK,CAAC,yDAAyD,CAC/D,CAAC;IACH,CAAC;IAEM,WAAW,CACjB,WAAuC,EACvC,MAAiC;QAEjC,OAAO;YACN,CAAC,EAAE;gBACF,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,KAAK,EAAE,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,IAAA,eAAI,EAAC,KAAK,CAAC,mBAAmB,CAAC;aACnF;SACD,CAAC;IACH,CAAC;IAEM,mCAAmC,CACzC,WAA4B,EAC5B,eAAuC;QAEvC,eAAe,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IACnC,CAAC;IAED,IAAW,KAAK;QACf,OAAO,IAAI,CAAC;IACb,CAAC;;AA/EF,gDAgFC;AA5EuB,wBAAK,GAAuB,IAAI,kBAAkB,CAAC,CAAC,EAAE;IAC5E,IAAI,KAAK;QACR,0EAA0E;QAC1E,OAAO,kBAAkB,CAAC,KAAK,CAAC;IACjC,CAAC;IACD,WAAW,CACV,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,IAAA,eAAI,EAAC,KAAK,CAAC,6CAA6C,CAAC,CAAC;IAC3D,CAAC;CACD,CAAC,CAAC;AAkEJ;;GAEG;AACH,MAAa,gBAAiB,SAAQ,+BAA+B;IACpE;;OAEG;IACH,YAAmC,UAAiB;QACnD,KAAK,EAAE,CAAC;QAD0B,eAAU,GAAV,UAAU,CAAO;IAEpD,CAAC;IAEM,WAAW,CACjB,WAAuC,EACvC,MAAiC;QAEjC,MAAM,KAAK,GACV,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC;YACxC,IAAA,eAAI,EAAC,KAAK,CAAC,wCAAwC,CAAC,CAAC;QACtD,OAAO;YACN,CAAC,EAAE,KAAK;SACR,CAAC;IACH,CAAC;IAEM,mCAAmC,CACzC,WAA4B,EAC5B,eAAuC;QAEvC,eAAe,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;IAClC,CAAC;CACD;AA1BD,4CA0BC;AAED;;;;;GAKG;AACH,MAAa,kBAAkB;IAC9B,YACiB,YAAyB,EACzB,QAA0B,IAAI,gBAAgB,CAAC,YAAY,CAAC,KAAK,CAAC;QADlE,iBAAY,GAAZ,YAAY,CAAa;QACzB,UAAK,GAAL,KAAK,CAA6D;IAChF,CAAC;IAEG,WAAW,CACjB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,MAAM,MAAM,GAAiB,EAAE,CAAC;QAChC,IAAI,cAAc,GAAG,IAAI,CAAC;QAC1B,MAAM,MAAM,GAAG,MAAM,CAAC,cAAc,EAAE,CAAC;QACvC,IAAA,sBAAW,EAAC,MAAM,EAAE,GAAG,EAAE;YACxB,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;YAC7B,IAAI,CAAC,YAAY,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;YACtD,cAAc,KAAK,MAAM,CAAC,MAAM,GAAG,MAAM,KAAK,CAAC,CAAC;QACjD,CAAC,CAAC,CAAC;QACH,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACzB,mFAAmF;YACnF,oFAAoF;YACpF,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC3B,CAAC;aAAM,CAAC;YACP,IAAA,iBAAM,EACL,cAAc,EACd,KAAK,CAAC,8IAA8I,CACpJ,CAAC;YACF,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC3B,CAAC;IACF,CAAC;CACD;AA/BD,gDA+BC;AAED;;GAEG;AACH,MAAa,qBAAsB,SAAQ,+BAA+B;IAClE,WAAW,CACjB,WAAuC,EACvC,MAAiC;QAEjC,OAAO;YACN,CAAC,EAAE,CAAC,CAAC,kCAAkC;SACvC,CAAC;IACH,CAAC;IAEM,mCAAmC,CACzC,WAA4B,EAC5B,eAAuC,IAC/B,CAAC;IAEV,IAAW,KAAK;QACf,OAAO,IAAI,CAAC;IACb,CAAC;CACD;AAlBD,sDAkBC;AAED;;;;;GAKG;AACU,QAAA,uBAAuB,GAAiB;IACpD,WAAW,CACV,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,IAAA,iBAAM,EACL,OAAO,CAAC,kBAAkB,KAAK,SAAS,EACxC,oEAAoE,CACpE,CAAC;QAEF,MAAM,iBAAiB,GAAG,OAAO,CAAC,kBAAkB,CAAC,sBAAsB,CAC1E,MAAM,EACN,CAAC,KAAgB,EAAE,EAAE,CAAC,gBAAgB,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CACjE,CAAC;QACF,YAAY,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;IACtC,CAAC;IAED,KAAK,EAAE,IAAI,gBAAgB,CAAC,IAAI,qBAAqB,EAAE,CAAC,gBAAgB,CAAC;CACzE,CAAC;AAEF;;;;GAIG;AACH,SAAgB,WAAW,CAC1B,KAAY,EACZ,KAAwB,EACxB,YAA0B;IAE1B,IAAI,KAAK,KAAK,SAAS,EAAE,CAAC;QACzB,IAAI,KAAK,KAAK,SAAS,EAAE,CAAC;YACzB,YAAY,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;QAChC,CAAC;aAAM,CAAC;YACP,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAC1B,CAAC;IACF,CAAC;SAAM,CAAC;QACP,IAAI,KAAK,KAAK,IAAI,EAAE,CAAC;YACpB,IAAA,iBAAM,EAAC,KAAK,KAAK,SAAS,EAAE,KAAK,CAAC,wCAAwC,CAAC,CAAC;YAC5E,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAC1B,CAAC;aAAM,IAAI,KAAK,KAAK,KAAK,EAAE,CAAC;YAC5B,IAAA,iBAAM,EAAC,KAAK,KAAK,SAAS,EAAE,KAAK,CAAC,iDAAiD,CAAC,CAAC;QACtF,CAAC;aAAM,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC;YACjC,IAAA,iBAAM,EAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,KAAK,CAAC,0CAA0C,CAAC,CAAC;QAC9E,CAAC;aAAM,IAAI,KAAK,KAAK,wBAAY,CAAC,UAAU,EAAE,CAAC;YAC9C,0EAA0E;YAC1E,IAAA,iBAAM,EAAC,KAAK,KAAK,SAAS,EAAE,KAAK,CAAC,wCAAwC,CAAC,CAAC;YAC5E,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAC1B,CAAC;aAAM,CAAC;YACP,uBAAuB;YACvB,IAAA,0BAAe,EAAC,KAAK,EAAE,gDAAgD,CAAC,CAAC;QAC1E,CAAC;IACF,CAAC;AACF,CAAC;AA5BD,kCA4BC;AAED;;;;;;;GAOG;AACH,MAAa,cAAc;IAI1B,YACkB,qBAAwC,EACxC,sBAA0C,EAC3C,WAA4D,EAC5D,YAA2B;IAC3C;;;;OAIG;IACa,kBAAkD;QATjD,0BAAqB,GAArB,qBAAqB,CAAmB;QACxC,2BAAsB,GAAtB,sBAAsB,CAAoB;QAC3C,gBAAW,GAAX,WAAW,CAAiD;QAC5D,iBAAY,GAAZ,YAAY,CAAe;QAM3B,uBAAkB,GAAlB,kBAAkB,CAAgC;QAblD,2BAAsB,GACtC,IAAI,GAAG,EAAE,CAAC;QACM,wBAAmB,GAAyC,IAAI,GAAG,EAAE,CAAC;IAYpF,CAAC;IAEG,qBAAqB,CAAC,UAAoC;QAChE,OAAO,IAAA,sBAAW,EAAC,IAAI,CAAC,sBAAsB,EAAE,UAAU,EAAE,GAAG,EAAE,CAChE,IAAI,CAAC,qBAAqB,CAAC,IAAI,EAAE,UAAU,CAAC,CAC5C,CAAC;IACH,CAAC;IAEM,sBAAsB,CAAC,WAAkC;QAC/D,OAAO,IAAI,gBAAgB,CAAC,IAAI,EAAE,WAAW,EAAE,IAAI,CAAC,sBAAsB,CAAC,CAAC;IAC7E,CAAC;IAEM,kBAAkB,CAAC,KAAkB;QAC3C,OAAO,IAAA,sBAAW,EAAC,IAAI,CAAC,mBAAmB,EAAE,KAAK,EAAE,GAAG,EAAE,CAAC,IAAI,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC;IAC1F,CAAC;CACD;AA9BD,wCA8BC;AA0BD,MAAM,gBAAgB;IAGrB,YACiB,WAA8B,EAC9B,WAAkC,EACjC,sBAA0C;QAF3C,gBAAW,GAAX,WAAW,CAAmB;QAC9B,gBAAW,GAAX,WAAW,CAAuB;QACjC,2BAAsB,GAAtB,sBAAsB,CAAoB;IACzD,CAAC;IACG,WAAW,CACjB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;IACzD,CAAC;IAED,IAAY,OAAO;QAClB,IAAI,IAAI,CAAC,WAAW,KAAK,SAAS,EAAE,CAAC;YACpC,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,sBAAsB,CAAC,IAAI,CAAC,WAAW,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;QACpF,CAAC;QACD,OAAO,IAAI,CAAC,WAAW,CAAC;IACzB,CAAC;IAED,IAAW,KAAK;QACf,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC;IAC3B,CAAC;CACD","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n */\n\nimport { assert, unreachableCase, fail } from \"@fluidframework/core-utils/internal\";\nimport type { IIdCompressor } from \"@fluidframework/id-compressor\";\n\nimport {\n\tCursorLocationType,\n\ttype FieldKey,\n\ttype FieldKindIdentifier,\n\ttype ITreeCursorSynchronous,\n\ttype TreeChunk,\n\ttype TreeFieldStoredSchema,\n\ttype TreeNodeSchemaIdentifier,\n\ttype Value,\n\tforEachNode,\n} from \"../../../core/index.js\";\nimport { getOrCreate } from \"../../../util/index.js\";\nimport type { FlexFieldKind } from \"../../modular-schema/index.js\";\n\nimport type { Counter, DeduplicationTable } from \"./chunkCodecUtilities.js\";\nimport {\n\ttype BufferFormat as BufferFormatGeneric,\n\tShape as ShapeGeneric,\n\tupdateShapesAndIdentifiersEncoding,\n} from \"./chunkEncodingGeneric.js\";\nimport type { FieldBatch } from \"./fieldBatch.js\";\nimport {\n\ttype EncodedAnyShape,\n\ttype EncodedChunkShape,\n\ttype EncodedFieldBatch,\n\ttype EncodedNestedArrayShape,\n\ttype EncodedValueShape,\n\tSpecialField,\n\tversion,\n} from \"./format.js\";\nimport type { IncrementalEncoder } from \"./codecs.js\";\n\n/**\n * Encode data from `FieldBatch` into an `EncodedFieldBatch`.\n *\n * Optimized for encoded size and encoding performance.\n *\n * Most of the compression strategy comes from the policy provided via `context`.\n */\nexport function compressedEncode(\n\tfieldBatch: FieldBatch,\n\tcontext: EncoderContext,\n): EncodedFieldBatch {\n\tconst batchBuffer: BufferFormat[] = [];\n\n\t// Populate buffer, including shape and identifier references\n\tfor (const cursor of fieldBatch) {\n\t\tconst buffer: BufferFormat = [];\n\t\tanyFieldEncoder.encodeField(cursor, context, buffer);\n\t\tbatchBuffer.push(buffer);\n\t}\n\treturn updateShapesAndIdentifiersEncoding(version, batchBuffer);\n}\n\nexport type BufferFormat = BufferFormatGeneric<EncodedChunkShape>;\nexport type Shape = ShapeGeneric<EncodedChunkShape>;\n\n/**\n * Like {@link FieldEncoder}, except data will be prefixed with the key.\n */\nexport interface KeyedFieldEncoder {\n\treadonly key: FieldKey;\n\treadonly encoder: FieldEncoder;\n}\n\n/**\n * An encoder with an associated shape.\n */\nexport interface Encoder {\n\t/**\n\t * The shape which describes how the encoded data is laid out.\n\t * Used by decoders to interpret the output of `encodeNode`.\n\t */\n\treadonly shape: Shape;\n}\n\n/**\n * An encoder for a specific shape of node.\n *\n * Can only be used with compatible nodes.\n */\nexport interface NodeEncoder extends Encoder {\n\t/**\n\t * @param cursor - in Nodes mode. Does not move cursor.\n\t */\n\tencodeNode(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void;\n}\n\n/**\n * Like {@link NodeEncoder}, except encodes a run of nodes.\n */\nexport interface NodesEncoder extends Encoder {\n\t/**\n\t * @param cursor - in Nodes mode. Moves cursor however many nodes it encodes.\n\t */\n\tencodeNodes(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void;\n}\n\n/**\n * Like {@link NodeEncoder}, except encodes a field.\n */\nexport interface FieldEncoder extends Encoder {\n\t/**\n\t * @param cursor - in Fields mode. Encodes entire field.\n\t */\n\tencodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void;\n}\n\n/**\n * Makes a {@link FieldEncoder} which runs `encoder` on every node in the field.\n * This does not encode the number nodes: the user of this may need to encode that elsewhere.\n */\nexport function asFieldEncoder(encoder: NodeEncoder): FieldEncoder {\n\treturn {\n\t\tencodeField(\n\t\t\tcursor: ITreeCursorSynchronous,\n\t\t\tcontext: EncoderContext,\n\t\t\toutputBuffer: BufferFormat,\n\t\t): void {\n\t\t\tforEachNode(cursor, () => encoder.encodeNode(cursor, context, outputBuffer));\n\t\t},\n\t\tshape: encoder.shape,\n\t};\n}\n\n/**\n * Adapt a {@link NodeEncoder} to a {@link NodesEncoder} which invokes `encoder` once.\n */\nexport function asNodesEncoder(encoder: NodeEncoder): NodesEncoder {\n\treturn {\n\t\tencodeNodes(\n\t\t\tcursor: ITreeCursorSynchronous,\n\t\t\tcontext: EncoderContext,\n\t\t\toutputBuffer: BufferFormat,\n\t\t): void {\n\t\t\tencoder.encodeNode(cursor, context, outputBuffer);\n\t\t\tcursor.nextNode();\n\t\t},\n\t\tshape: encoder.shape,\n\t};\n}\n\n/**\n * Encodes a chunk with {@link EncodedAnyShape} by prefixing the data with its shape.\n */\nexport class AnyShape extends ShapeGeneric<EncodedChunkShape> {\n\tprivate constructor() {\n\t\tsuper();\n\t}\n\tpublic static readonly instance = new AnyShape();\n\n\tpublic encodeShape(\n\t\tidentifiers: DeduplicationTable<string>,\n\t\tshapes: DeduplicationTable<Shape>,\n\t): EncodedChunkShape {\n\t\tconst encodedAnyShape: EncodedAnyShape = 0;\n\t\treturn { d: encodedAnyShape };\n\t}\n\n\tpublic countReferencedShapesAndIdentifiers(\n\t\tidentifiers: Counter<string>,\n\t\tshapeDiscovered: (shape: Shape) => void,\n\t): void {}\n\n\tpublic static encodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t\tencoder: FieldEncoder,\n\t): void {\n\t\toutputBuffer.push(encoder.shape);\n\t\tencoder.encodeField(cursor, context, outputBuffer);\n\t}\n\n\tpublic static encodeNode(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t\tencoder: NodeEncoder,\n\t): void {\n\t\toutputBuffer.push(encoder.shape);\n\t\tencoder.encodeNode(cursor, context, outputBuffer);\n\t}\n\n\tpublic static encodeNodes(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t\tencoder: NodesEncoder,\n\t): void {\n\t\toutputBuffer.push(encoder.shape);\n\t\tencoder.encodeNodes(cursor, context, outputBuffer);\n\t}\n}\n\n/**\n * Encodes a single node polymorphically.\n */\nexport const anyNodeEncoder: NodeEncoder = {\n\tencodeNode(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\t// TODO: Fast path uniform chunk content.\n\t\tconst nodeEncoder = context.nodeEncoderFromSchema(cursor.type);\n\t\tAnyShape.encodeNode(cursor, context, outputBuffer, nodeEncoder);\n\t},\n\n\tshape: AnyShape.instance,\n};\n\n/**\n * Encodes a field polymorphically.\n */\nexport const anyFieldEncoder: FieldEncoder = {\n\tencodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\t// TODO: Fast path uniform chunks.\n\n\t\tif (cursor.getFieldLength() === 0) {\n\t\t\tconst shape = InlineArrayEncoder.empty;\n\t\t\tAnyShape.encodeField(cursor, context, outputBuffer, shape);\n\t\t} else if (cursor.getFieldLength() === 1) {\n\t\t\t// Fast path chunk of size one size one at least: skip nested array.\n\t\t\tcursor.enterNode(0);\n\t\t\tanyNodeEncoder.encodeNode(cursor, context, outputBuffer);\n\t\t\tcursor.exitNode();\n\t\t} else {\n\t\t\t// TODO: more efficient encoding for common cases.\n\t\t\t// Could try to find more specific shape compatible with all children than `anyNodeEncoder`.\n\n\t\t\tconst shape = context.nestedArrayEncoder(anyNodeEncoder);\n\t\t\tAnyShape.encodeField(cursor, context, outputBuffer, shape);\n\t\t}\n\t},\n\n\tshape: AnyShape.instance,\n};\n\n/**\n * Encodes a chunk using {@link EncodedInlineArrayShape}.\n * @remarks\n * The fact this is also a Shape is an implementation detail of the encoder: that allows the shape it uses to be itself,\n * which is an easy way to keep all the related code together without extra objects.\n */\nexport class InlineArrayEncoder\n\textends ShapeGeneric<EncodedChunkShape>\n\timplements NodesEncoder, FieldEncoder\n{\n\tpublic static readonly empty: InlineArrayEncoder = new InlineArrayEncoder(0, {\n\t\tget shape() {\n\t\t\t// Not actually used, makes count work without adding an additional shape.\n\t\t\treturn InlineArrayEncoder.empty;\n\t\t},\n\t\tencodeNodes(\n\t\t\tcursor: ITreeCursorSynchronous,\n\t\t\tcontext: EncoderContext,\n\t\t\toutputBuffer: BufferFormat,\n\t\t): void {\n\t\t\tfail(0xb4d /* Empty array should not encode any nodes */);\n\t\t},\n\t});\n\n\t/**\n\t * @param length - number of invocations of `inner`.\n\t */\n\tpublic constructor(\n\t\tpublic readonly length: number,\n\t\tpublic readonly inner: NodesEncoder,\n\t) {\n\t\tsuper();\n\t}\n\n\tpublic encodeNodes(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\t// Linter is wrong about this loop being for-of compatible.\n\t\t// eslint-disable-next-line @typescript-eslint/prefer-for-of\n\t\tfor (let index = 0; index < this.length; index++) {\n\t\t\tthis.inner.encodeNodes(cursor, context, outputBuffer);\n\t\t}\n\t}\n\n\tpublic encodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\t// Its possible individual items from this array encode multiple nodes, so don't assume === here.\n\t\tassert(\n\t\t\tcursor.getFieldLength() >= this.length,\n\t\t\t0x73c /* unexpected length for fixed length array */,\n\t\t);\n\t\tcursor.firstNode();\n\t\tthis.encodeNodes(cursor, context, outputBuffer);\n\t\tassert(\n\t\t\tcursor.mode === CursorLocationType.Fields,\n\t\t\t0x73d /* should return to fields mode when finished encoding */,\n\t\t);\n\t}\n\n\tpublic encodeShape(\n\t\tidentifiers: DeduplicationTable<string>,\n\t\tshapes: DeduplicationTable<Shape>,\n\t): EncodedChunkShape {\n\t\treturn {\n\t\t\tb: {\n\t\t\t\tlength: this.length,\n\t\t\t\tshape: shapes.valueToIndex.get(this.inner.shape) ?? fail(0xb4e /* missing shape */),\n\t\t\t},\n\t\t};\n\t}\n\n\tpublic countReferencedShapesAndIdentifiers(\n\t\tidentifiers: Counter<string>,\n\t\tshapeDiscovered: (shape: Shape) => void,\n\t): void {\n\t\tshapeDiscovered(this.inner.shape);\n\t}\n\n\tpublic get shape(): this {\n\t\treturn this;\n\t}\n}\n\n/**\n * Encodes the shape for a nested array as {@link EncodedNestedArrayShape} shape.\n */\nexport class NestedArrayShape extends ShapeGeneric<EncodedChunkShape> {\n\t/**\n\t * @param innerShape - The shape of each item in this nested array.\n\t */\n\tpublic constructor(public readonly innerShape: Shape) {\n\t\tsuper();\n\t}\n\n\tpublic encodeShape(\n\t\tidentifiers: DeduplicationTable<string>,\n\t\tshapes: DeduplicationTable<Shape>,\n\t): EncodedChunkShape {\n\t\tconst shape: EncodedNestedArrayShape =\n\t\t\tshapes.valueToIndex.get(this.innerShape) ??\n\t\t\tfail(0xb4f /* index for shape not found in table */);\n\t\treturn {\n\t\t\ta: shape,\n\t\t};\n\t}\n\n\tpublic countReferencedShapesAndIdentifiers(\n\t\tidentifiers: Counter<string>,\n\t\tshapeDiscovered: (shape: Shape) => void,\n\t): void {\n\t\tshapeDiscovered(this.innerShape);\n\t}\n}\n\n/**\n * Encodes a field as a nested array with the {@link EncodedNestedArrayShape} shape.\n * @remarks\n * The fact this is also exposes a Shape is an implementation detail: it allows the shape it uses to be itself\n * which is an easy way to keep all the related code together without extra objects.\n */\nexport class NestedArrayEncoder implements FieldEncoder {\n\tpublic constructor(\n\t\tpublic readonly innerEncoder: NodeEncoder,\n\t\tpublic readonly shape: NestedArrayShape = new NestedArrayShape(innerEncoder.shape),\n\t) {}\n\n\tpublic encodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\tconst buffer: BufferFormat = [];\n\t\tlet allNonZeroSize = true;\n\t\tconst length = cursor.getFieldLength();\n\t\tforEachNode(cursor, () => {\n\t\t\tconst before = buffer.length;\n\t\t\tthis.innerEncoder.encodeNode(cursor, context, buffer);\n\t\t\tallNonZeroSize &&= buffer.length - before !== 0;\n\t\t});\n\t\tif (buffer.length === 0) {\n\t\t\t// This relies on the number of inner chunks being the same as the number of nodes.\n\t\t\t// If making inner a `NodesEncoder`, this code will have to be adjusted accordingly.\n\t\t\toutputBuffer.push(length);\n\t\t} else {\n\t\t\tassert(\n\t\t\t\tallNonZeroSize,\n\t\t\t\t0x73e /* either all or none of the members of a nested array must be 0 sized, or there is no way the decoder could process the content correctly. */,\n\t\t\t);\n\t\t\toutputBuffer.push(buffer);\n\t\t}\n\t}\n}\n\n/**\n * Encodes the shape for an incremental chunk as {@link EncodedIncrementalChunkShape} shape.\n */\nexport class IncrementalChunkShape extends ShapeGeneric<EncodedChunkShape> {\n\tpublic encodeShape(\n\t\tidentifiers: DeduplicationTable<string>,\n\t\tshapes: DeduplicationTable<Shape>,\n\t): EncodedChunkShape {\n\t\treturn {\n\t\t\te: 0 /* EncodedIncrementalChunkShape */,\n\t\t};\n\t}\n\n\tpublic countReferencedShapesAndIdentifiers(\n\t\tidentifiers: Counter<string>,\n\t\tshapeDiscovered: (shape: Shape) => void,\n\t): void {}\n\n\tpublic get shape(): this {\n\t\treturn this;\n\t}\n}\n\n/**\n * Encodes an incremental field whose tree chunks are encoded separately and referenced by their {@link ChunkReferenceId}.\n * The shape of the content of this field is {@link NestedArrayShape}.\n * The inner items of the array have shape {@link IncrementalChunkShape} and are {@link ChunkReferenceId}s\n * of the encoded chunks.\n */\nexport const incrementalFieldEncoder: FieldEncoder = {\n\tencodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\tassert(\n\t\t\tcontext.incrementalEncoder !== undefined,\n\t\t\t\"incremental encoder must be defined to use incrementalFieldEncoder\",\n\t\t);\n\n\t\tconst chunkReferenceIds = context.incrementalEncoder.encodeIncrementalField(\n\t\t\tcursor,\n\t\t\t(chunk: TreeChunk) => compressedEncode([chunk.cursor()], context),\n\t\t);\n\t\toutputBuffer.push(chunkReferenceIds);\n\t},\n\n\tshape: new NestedArrayShape(new IncrementalChunkShape() /* innerShape */),\n};\n\n/**\n * Encode `value` with `shape` into `outputBuffer`.\n *\n * Requires that `value` is compatible with `shape`.\n */\nexport function encodeValue(\n\tvalue: Value,\n\tshape: EncodedValueShape,\n\toutputBuffer: BufferFormat,\n): void {\n\tif (shape === undefined) {\n\t\tif (value !== undefined) {\n\t\t\toutputBuffer.push(true, value);\n\t\t} else {\n\t\t\toutputBuffer.push(false);\n\t\t}\n\t} else {\n\t\tif (shape === true) {\n\t\t\tassert(value !== undefined, 0x78d /* required value must not be missing */);\n\t\t\toutputBuffer.push(value);\n\t\t} else if (shape === false) {\n\t\t\tassert(value === undefined, 0x73f /* incompatible value shape: expected no value */);\n\t\t} else if (Array.isArray(shape)) {\n\t\t\tassert(shape.length === 1, 0x740 /* expected a single constant for value */);\n\t\t} else if (shape === SpecialField.Identifier) {\n\t\t\t// This case is a special case handling the encoding of identifier fields.\n\t\t\tassert(value !== undefined, 0x998 /* required value must not be missing */);\n\t\t\toutputBuffer.push(value);\n\t\t} else {\n\t\t\t// EncodedCounter case:\n\t\t\tunreachableCase(shape, \"Encoding values as deltas is not yet supported\");\n\t\t}\n\t}\n}\n\n/**\n * Provides common contextual information during encoding, like schema and policy settings.\n * Also, provides a cache to avoid duplicating equivalent shapes during a batch of encode operations.\n * @remarks\n * To avoid Shape duplication, any Shapes used in the encoding should either be:\n * - Singletons defined in a static scope.\n * - Cached in this object for future reuse such that all equivalent Shapes are deduplicated.\n */\nexport class EncoderContext implements NodeEncodeBuilder, FieldEncodeBuilder {\n\tprivate readonly nodeEncodersFromSchema: Map<TreeNodeSchemaIdentifier, NodeEncoder> =\n\t\tnew Map();\n\tprivate readonly nestedArrayEncoders: Map<NodeEncoder, NestedArrayEncoder> = new Map();\n\tpublic constructor(\n\t\tprivate readonly nodeEncoderFromPolicy: NodeEncoderPolicy,\n\t\tprivate readonly fieldEncoderFromPolicy: FieldEncoderPolicy,\n\t\tpublic readonly fieldShapes: ReadonlyMap<FieldKindIdentifier, FlexFieldKind>,\n\t\tpublic readonly idCompressor: IIdCompressor,\n\t\t/**\n\t\t * To be used to encode incremental chunks, if any.\n\t\t * @remarks\n\t\t * See {@link IncrementalEncoder} for more information.\n\t\t */\n\t\tpublic readonly incrementalEncoder: IncrementalEncoder | undefined,\n\t) {}\n\n\tpublic nodeEncoderFromSchema(schemaName: TreeNodeSchemaIdentifier): NodeEncoder {\n\t\treturn getOrCreate(this.nodeEncodersFromSchema, schemaName, () =>\n\t\t\tthis.nodeEncoderFromPolicy(this, schemaName),\n\t\t);\n\t}\n\n\tpublic fieldEncoderFromSchema(fieldSchema: TreeFieldStoredSchema): FieldEncoder {\n\t\treturn new LazyFieldEncoder(this, fieldSchema, this.fieldEncoderFromPolicy);\n\t}\n\n\tpublic nestedArrayEncoder(inner: NodeEncoder): NestedArrayEncoder {\n\t\treturn getOrCreate(this.nestedArrayEncoders, inner, () => new NestedArrayEncoder(inner));\n\t}\n}\n\nexport interface NodeEncodeBuilder {\n\tnodeEncoderFromSchema(schemaName: TreeNodeSchemaIdentifier): NodeEncoder;\n}\n\nexport interface FieldEncodeBuilder {\n\tfieldEncoderFromSchema(schema: TreeFieldStoredSchema): FieldEncoder;\n}\n\n/**\n * The policy for building a {@link FieldEncoder} for a field.\n */\nexport type FieldEncoderPolicy = (\n\tnodeBuilder: NodeEncodeBuilder,\n\tschema: TreeFieldStoredSchema,\n) => FieldEncoder;\n\n/**\n * The policy for building a {@link NodeEncoder} for a node.\n */\nexport type NodeEncoderPolicy = (\n\tfieldBuilder: FieldEncodeBuilder,\n\tschemaName: TreeNodeSchemaIdentifier,\n) => NodeEncoder;\n\nclass LazyFieldEncoder implements FieldEncoder {\n\tprivate encoderLazy: FieldEncoder | undefined;\n\n\tpublic constructor(\n\t\tpublic readonly nodeBuilder: NodeEncodeBuilder,\n\t\tpublic readonly fieldSchema: TreeFieldStoredSchema,\n\t\tprivate readonly fieldEncoderFromPolicy: FieldEncoderPolicy,\n\t) {}\n\tpublic encodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\tthis.encoder.encodeField(cursor, context, outputBuffer);\n\t}\n\n\tprivate get encoder(): FieldEncoder {\n\t\tif (this.encoderLazy === undefined) {\n\t\t\tthis.encoderLazy = this.fieldEncoderFromPolicy(this.nodeBuilder, this.fieldSchema);\n\t\t}\n\t\treturn this.encoderLazy;\n\t}\n\n\tpublic get shape(): Shape {\n\t\treturn this.encoder.shape;\n\t}\n}\n"]}
|
|
1
|
+
{"version":3,"file":"compressedEncode.js","sourceRoot":"","sources":["../../../../src/feature-libraries/chunked-forest/codec/compressedEncode.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAEH,kEAAoF;AAGpF,qDAUgC;AAChC,qDAAqD;AAIrD,uEAImC;AAEnC,2CAQqB;AAGrB;;;;;;GAMG;AACH,SAAgB,gBAAgB,CAC/B,UAAsB,EACtB,OAAuB;IAEvB,MAAM,WAAW,GAAmB,EAAE,CAAC;IAEvC,6DAA6D;IAC7D,KAAK,MAAM,MAAM,IAAI,UAAU,EAAE,CAAC;QACjC,MAAM,MAAM,GAAiB,EAAE,CAAC;QAChC,uBAAe,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;QACrD,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IAC1B,CAAC;IACD,OAAO,IAAA,4DAAkC,EAAC,mBAAO,EAAE,WAAW,CAAC,CAAC;AACjE,CAAC;AAbD,4CAaC;AAoED;;;GAGG;AACH,SAAgB,cAAc,CAAC,OAAoB;IAClD,OAAO;QACN,WAAW,CACV,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;YAE1B,IAAA,sBAAW,EAAC,MAAM,EAAE,GAAG,EAAE,CAAC,OAAO,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC,CAAC;QAC9E,CAAC;QACD,KAAK,EAAE,OAAO,CAAC,KAAK;KACpB,CAAC;AACH,CAAC;AAXD,wCAWC;AAED;;GAEG;AACH,SAAgB,cAAc,CAAC,OAAoB;IAClD,OAAO;QACN,WAAW,CACV,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;YAE1B,OAAO,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;YAClD,MAAM,CAAC,QAAQ,EAAE,CAAC;QACnB,CAAC;QACD,KAAK,EAAE,OAAO,CAAC,KAAK;KACpB,CAAC;AACH,CAAC;AAZD,wCAYC;AAED;;GAEG;AACH,MAAa,QAAS,SAAQ,+BAA+B;IAC5D;QACC,KAAK,EAAE,CAAC;IACT,CAAC;IAGM,WAAW,CACjB,WAAuC,EACvC,MAAiC;QAEjC,MAAM,eAAe,GAAoB,CAAC,CAAC;QAC3C,OAAO,EAAE,CAAC,EAAE,eAAe,EAAE,CAAC;IAC/B,CAAC;IAEM,mCAAmC,CACzC,WAA4B,EAC5B,eAAuC,IAC/B,CAAC;IAEH,MAAM,CAAC,WAAW,CACxB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B,EAC1B,OAAqB;QAErB,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QACjC,OAAO,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;IACpD,CAAC;IAEM,MAAM,CAAC,UAAU,CACvB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B,EAC1B,OAAoB;QAEpB,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QACjC,OAAO,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;IACnD,CAAC;IAEM,MAAM,CAAC,WAAW,CACxB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B,EAC1B,OAAqB;QAErB,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QACjC,OAAO,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;IACpD,CAAC;;AA/CF,4BAgDC;AA5CuB,iBAAQ,GAAG,IAAI,QAAQ,EAAE,CAAC;AA8ClD;;GAEG;AACU,QAAA,cAAc,GAAgB;IAC1C,UAAU,CACT,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,yCAAyC;QACzC,MAAM,WAAW,GAAG,OAAO,CAAC,qBAAqB,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;QAC/D,QAAQ,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,EAAE,WAAW,CAAC,CAAC;IACjE,CAAC;IAED,KAAK,EAAE,QAAQ,CAAC,QAAQ;CACxB,CAAC;AAEF;;GAEG;AACU,QAAA,eAAe,GAAiB;IAC5C,WAAW,CACV,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,kCAAkC;QAElC,IAAI,MAAM,CAAC,cAAc,EAAE,KAAK,CAAC,EAAE,CAAC;YACnC,MAAM,KAAK,GAAG,kBAAkB,CAAC,KAAK,CAAC;YACvC,QAAQ,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,EAAE,KAAK,CAAC,CAAC;QAC5D,CAAC;aAAM,IAAI,MAAM,CAAC,cAAc,EAAE,KAAK,CAAC,EAAE,CAAC;YAC1C,oEAAoE;YACpE,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;YACpB,sBAAc,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;YACzD,MAAM,CAAC,QAAQ,EAAE,CAAC;QACnB,CAAC;aAAM,CAAC;YACP,kDAAkD;YAClD,4FAA4F;YAE5F,MAAM,KAAK,GAAG,OAAO,CAAC,kBAAkB,CAAC,sBAAc,CAAC,CAAC;YACzD,QAAQ,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,EAAE,KAAK,CAAC,CAAC;QAC5D,CAAC;IACF,CAAC;IAED,KAAK,EAAE,QAAQ,CAAC,QAAQ;CACxB,CAAC;AAEF;;;;;GAKG;AACH,MAAa,kBACZ,SAAQ,+BAA+B;IAiBvC;;OAEG;IACH,YACiB,MAAc,EACd,KAAmB;QAEnC,KAAK,EAAE,CAAC;QAHQ,WAAM,GAAN,MAAM,CAAQ;QACd,UAAK,GAAL,KAAK,CAAc;IAGpC,CAAC;IAEM,WAAW,CACjB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,2DAA2D;QAC3D,4DAA4D;QAC5D,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,IAAI,CAAC,MAAM,EAAE,KAAK,EAAE,EAAE,CAAC;YAClD,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;QACvD,CAAC;IACF,CAAC;IAEM,WAAW,CACjB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,iGAAiG;QACjG,IAAA,iBAAM,EACL,MAAM,CAAC,cAAc,EAAE,IAAI,IAAI,CAAC,MAAM,EACtC,KAAK,CAAC,8CAA8C,CACpD,CAAC;QACF,MAAM,CAAC,SAAS,EAAE,CAAC;QACnB,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;QAChD,IAAA,iBAAM,EACL,MAAM,CAAC,IAAI,sCAA8B,EACzC,KAAK,CAAC,yDAAyD,CAC/D,CAAC;IACH,CAAC;IAEM,WAAW,CACjB,WAAuC,EACvC,MAAiC;QAEjC,OAAO;YACN,CAAC,EAAE;gBACF,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,KAAK,EAAE,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,IAAA,eAAI,EAAC,KAAK,CAAC,mBAAmB,CAAC;aACnF;SACD,CAAC;IACH,CAAC;IAEM,mCAAmC,CACzC,WAA4B,EAC5B,eAAuC;QAEvC,eAAe,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IACnC,CAAC;IAED,IAAW,KAAK;QACf,OAAO,IAAI,CAAC;IACb,CAAC;;AA/EF,gDAgFC;AA5EuB,wBAAK,GAAuB,IAAI,kBAAkB,CAAC,CAAC,EAAE;IAC5E,IAAI,KAAK;QACR,0EAA0E;QAC1E,OAAO,kBAAkB,CAAC,KAAK,CAAC;IACjC,CAAC;IACD,WAAW,CACV,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,IAAA,eAAI,EAAC,KAAK,CAAC,6CAA6C,CAAC,CAAC;IAC3D,CAAC;CACD,CAAC,CAAC;AAkEJ;;GAEG;AACH,MAAa,gBAAiB,SAAQ,+BAA+B;IACpE;;OAEG;IACH,YAAmC,UAAiB;QACnD,KAAK,EAAE,CAAC;QAD0B,eAAU,GAAV,UAAU,CAAO;IAEpD,CAAC;IAEM,WAAW,CACjB,WAAuC,EACvC,MAAiC;QAEjC,MAAM,KAAK,GACV,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC;YACxC,IAAA,eAAI,EAAC,KAAK,CAAC,wCAAwC,CAAC,CAAC;QACtD,OAAO;YACN,CAAC,EAAE,KAAK;SACR,CAAC;IACH,CAAC;IAEM,mCAAmC,CACzC,WAA4B,EAC5B,eAAuC;QAEvC,eAAe,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;IAClC,CAAC;CACD;AA1BD,4CA0BC;AAED;;;;;GAKG;AACH,MAAa,kBAAkB;IAC9B,YACiB,YAAyB,EACzB,QAA0B,IAAI,gBAAgB,CAAC,YAAY,CAAC,KAAK,CAAC;QADlE,iBAAY,GAAZ,YAAY,CAAa;QACzB,UAAK,GAAL,KAAK,CAA6D;IAChF,CAAC;IAEG,WAAW,CACjB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,MAAM,MAAM,GAAiB,EAAE,CAAC;QAChC,IAAI,cAAc,GAAG,IAAI,CAAC;QAC1B,MAAM,MAAM,GAAG,MAAM,CAAC,cAAc,EAAE,CAAC;QACvC,IAAA,sBAAW,EAAC,MAAM,EAAE,GAAG,EAAE;YACxB,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;YAC7B,IAAI,CAAC,YAAY,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;YACtD,cAAc,KAAK,MAAM,CAAC,MAAM,GAAG,MAAM,KAAK,CAAC,CAAC;QACjD,CAAC,CAAC,CAAC;QACH,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACzB,mFAAmF;YACnF,oFAAoF;YACpF,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC3B,CAAC;aAAM,CAAC;YACP,IAAA,iBAAM,EACL,cAAc,EACd,KAAK,CAAC,8IAA8I,CACpJ,CAAC;YACF,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC3B,CAAC;IACF,CAAC;CACD;AA/BD,gDA+BC;AAED;;GAEG;AACH,MAAa,qBAAsB,SAAQ,+BAA+B;IAClE,WAAW,CACjB,WAAuC,EACvC,MAAiC;QAEjC,OAAO;YACN,CAAC,EAAE,CAAC,CAAC,kCAAkC;SACvC,CAAC;IACH,CAAC;IAEM,mCAAmC,CACzC,WAA4B,EAC5B,eAAuC,IAC/B,CAAC;IAEV,IAAW,KAAK;QACf,OAAO,IAAI,CAAC;IACb,CAAC;CACD;AAlBD,sDAkBC;AAED;;;;;GAKG;AACU,QAAA,uBAAuB,GAAiB;IACpD,WAAW,CACV,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,IAAA,iBAAM,EACL,OAAO,CAAC,kBAAkB,KAAK,SAAS,EACxC,KAAK,CAAC,wEAAwE,CAC9E,CAAC;QAEF,MAAM,iBAAiB,GAAG,OAAO,CAAC,kBAAkB,CAAC,sBAAsB,CAC1E,MAAM,EACN,CAAC,KAAgB,EAAE,EAAE,CAAC,gBAAgB,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CACjE,CAAC;QACF,YAAY,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;IACtC,CAAC;IAED,KAAK,EAAE,IAAI,gBAAgB,CAAC,IAAI,qBAAqB,EAAE,CAAC,gBAAgB,CAAC;CACzE,CAAC;AAEF;;;;GAIG;AACH,SAAgB,WAAW,CAC1B,KAAY,EACZ,KAAwB,EACxB,YAA0B;IAE1B,IAAI,KAAK,KAAK,SAAS,EAAE,CAAC;QACzB,IAAI,KAAK,KAAK,SAAS,EAAE,CAAC;YACzB,YAAY,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;QAChC,CAAC;aAAM,CAAC;YACP,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAC1B,CAAC;IACF,CAAC;SAAM,CAAC;QACP,IAAI,KAAK,KAAK,IAAI,EAAE,CAAC;YACpB,IAAA,iBAAM,EAAC,KAAK,KAAK,SAAS,EAAE,KAAK,CAAC,wCAAwC,CAAC,CAAC;YAC5E,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAC1B,CAAC;aAAM,IAAI,KAAK,KAAK,KAAK,EAAE,CAAC;YAC5B,IAAA,iBAAM,EAAC,KAAK,KAAK,SAAS,EAAE,KAAK,CAAC,iDAAiD,CAAC,CAAC;QACtF,CAAC;aAAM,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC;YACjC,IAAA,iBAAM,EAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,KAAK,CAAC,0CAA0C,CAAC,CAAC;QAC9E,CAAC;aAAM,IAAI,KAAK,KAAK,wBAAY,CAAC,UAAU,EAAE,CAAC;YAC9C,0EAA0E;YAC1E,IAAA,iBAAM,EAAC,KAAK,KAAK,SAAS,EAAE,KAAK,CAAC,wCAAwC,CAAC,CAAC;YAC5E,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAC1B,CAAC;aAAM,CAAC;YACP,uBAAuB;YACvB,IAAA,0BAAe,EAAC,KAAK,EAAE,gDAAgD,CAAC,CAAC;QAC1E,CAAC;IACF,CAAC;AACF,CAAC;AA5BD,kCA4BC;AAED;;;;;;;GAOG;AACH,MAAa,cAAc;IAI1B,YACkB,qBAAwC,EACxC,sBAA0C,EAC3C,WAA4D,EAC5D,YAA2B;IAC3C;;;;OAIG;IACa,kBAAkD;QATjD,0BAAqB,GAArB,qBAAqB,CAAmB;QACxC,2BAAsB,GAAtB,sBAAsB,CAAoB;QAC3C,gBAAW,GAAX,WAAW,CAAiD;QAC5D,iBAAY,GAAZ,YAAY,CAAe;QAM3B,uBAAkB,GAAlB,kBAAkB,CAAgC;QAblD,2BAAsB,GACtC,IAAI,GAAG,EAAE,CAAC;QACM,wBAAmB,GAAyC,IAAI,GAAG,EAAE,CAAC;IAYpF,CAAC;IAEG,qBAAqB,CAAC,UAAoC;QAChE,OAAO,IAAA,sBAAW,EAAC,IAAI,CAAC,sBAAsB,EAAE,UAAU,EAAE,GAAG,EAAE,CAChE,IAAI,CAAC,qBAAqB,CAAC,IAAI,EAAE,UAAU,CAAC,CAC5C,CAAC;IACH,CAAC;IAEM,sBAAsB,CAAC,WAAkC;QAC/D,OAAO,IAAI,gBAAgB,CAAC,IAAI,EAAE,WAAW,EAAE,IAAI,CAAC,sBAAsB,CAAC,CAAC;IAC7E,CAAC;IAEM,kBAAkB,CAAC,KAAkB;QAC3C,OAAO,IAAA,sBAAW,EAAC,IAAI,CAAC,mBAAmB,EAAE,KAAK,EAAE,GAAG,EAAE,CAAC,IAAI,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC;IAC1F,CAAC;CACD;AA9BD,wCA8BC;AA0BD,MAAM,gBAAgB;IAGrB,YACiB,WAA8B,EAC9B,WAAkC,EACjC,sBAA0C;QAF3C,gBAAW,GAAX,WAAW,CAAmB;QAC9B,gBAAW,GAAX,WAAW,CAAuB;QACjC,2BAAsB,GAAtB,sBAAsB,CAAoB;IACzD,CAAC;IACG,WAAW,CACjB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;IACzD,CAAC;IAED,IAAY,OAAO;QAClB,IAAI,IAAI,CAAC,WAAW,KAAK,SAAS,EAAE,CAAC;YACpC,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,sBAAsB,CAAC,IAAI,CAAC,WAAW,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;QACpF,CAAC;QACD,OAAO,IAAI,CAAC,WAAW,CAAC;IACzB,CAAC;IAED,IAAW,KAAK;QACf,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC;IAC3B,CAAC;CACD","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n */\n\nimport { assert, unreachableCase, fail } from \"@fluidframework/core-utils/internal\";\nimport type { IIdCompressor } from \"@fluidframework/id-compressor\";\n\nimport {\n\tCursorLocationType,\n\ttype FieldKey,\n\ttype FieldKindIdentifier,\n\ttype ITreeCursorSynchronous,\n\ttype TreeChunk,\n\ttype TreeFieldStoredSchema,\n\ttype TreeNodeSchemaIdentifier,\n\ttype Value,\n\tforEachNode,\n} from \"../../../core/index.js\";\nimport { getOrCreate } from \"../../../util/index.js\";\nimport type { FlexFieldKind } from \"../../modular-schema/index.js\";\n\nimport type { Counter, DeduplicationTable } from \"./chunkCodecUtilities.js\";\nimport {\n\ttype BufferFormat as BufferFormatGeneric,\n\tShape as ShapeGeneric,\n\tupdateShapesAndIdentifiersEncoding,\n} from \"./chunkEncodingGeneric.js\";\nimport type { FieldBatch } from \"./fieldBatch.js\";\nimport {\n\ttype EncodedAnyShape,\n\ttype EncodedChunkShape,\n\ttype EncodedFieldBatch,\n\ttype EncodedNestedArrayShape,\n\ttype EncodedValueShape,\n\tSpecialField,\n\tversion,\n} from \"./format.js\";\nimport type { IncrementalEncoder } from \"./codecs.js\";\n\n/**\n * Encode data from `FieldBatch` into an `EncodedFieldBatch`.\n *\n * Optimized for encoded size and encoding performance.\n *\n * Most of the compression strategy comes from the policy provided via `context`.\n */\nexport function compressedEncode(\n\tfieldBatch: FieldBatch,\n\tcontext: EncoderContext,\n): EncodedFieldBatch {\n\tconst batchBuffer: BufferFormat[] = [];\n\n\t// Populate buffer, including shape and identifier references\n\tfor (const cursor of fieldBatch) {\n\t\tconst buffer: BufferFormat = [];\n\t\tanyFieldEncoder.encodeField(cursor, context, buffer);\n\t\tbatchBuffer.push(buffer);\n\t}\n\treturn updateShapesAndIdentifiersEncoding(version, batchBuffer);\n}\n\nexport type BufferFormat = BufferFormatGeneric<EncodedChunkShape>;\nexport type Shape = ShapeGeneric<EncodedChunkShape>;\n\n/**\n * Like {@link FieldEncoder}, except data will be prefixed with the key.\n */\nexport interface KeyedFieldEncoder {\n\treadonly key: FieldKey;\n\treadonly encoder: FieldEncoder;\n}\n\n/**\n * An encoder with an associated shape.\n */\nexport interface Encoder {\n\t/**\n\t * The shape which describes how the encoded data is laid out.\n\t * Used by decoders to interpret the output of `encodeNode`.\n\t */\n\treadonly shape: Shape;\n}\n\n/**\n * An encoder for a specific shape of node.\n *\n * Can only be used with compatible nodes.\n */\nexport interface NodeEncoder extends Encoder {\n\t/**\n\t * @param cursor - in Nodes mode. Does not move cursor.\n\t */\n\tencodeNode(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void;\n}\n\n/**\n * Like {@link NodeEncoder}, except encodes a run of nodes.\n */\nexport interface NodesEncoder extends Encoder {\n\t/**\n\t * @param cursor - in Nodes mode. Moves cursor however many nodes it encodes.\n\t */\n\tencodeNodes(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void;\n}\n\n/**\n * Like {@link NodeEncoder}, except encodes a field.\n */\nexport interface FieldEncoder extends Encoder {\n\t/**\n\t * @param cursor - in Fields mode. Encodes entire field.\n\t */\n\tencodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void;\n}\n\n/**\n * Makes a {@link FieldEncoder} which runs `encoder` on every node in the field.\n * This does not encode the number nodes: the user of this may need to encode that elsewhere.\n */\nexport function asFieldEncoder(encoder: NodeEncoder): FieldEncoder {\n\treturn {\n\t\tencodeField(\n\t\t\tcursor: ITreeCursorSynchronous,\n\t\t\tcontext: EncoderContext,\n\t\t\toutputBuffer: BufferFormat,\n\t\t): void {\n\t\t\tforEachNode(cursor, () => encoder.encodeNode(cursor, context, outputBuffer));\n\t\t},\n\t\tshape: encoder.shape,\n\t};\n}\n\n/**\n * Adapt a {@link NodeEncoder} to a {@link NodesEncoder} which invokes `encoder` once.\n */\nexport function asNodesEncoder(encoder: NodeEncoder): NodesEncoder {\n\treturn {\n\t\tencodeNodes(\n\t\t\tcursor: ITreeCursorSynchronous,\n\t\t\tcontext: EncoderContext,\n\t\t\toutputBuffer: BufferFormat,\n\t\t): void {\n\t\t\tencoder.encodeNode(cursor, context, outputBuffer);\n\t\t\tcursor.nextNode();\n\t\t},\n\t\tshape: encoder.shape,\n\t};\n}\n\n/**\n * Encodes a chunk with {@link EncodedAnyShape} by prefixing the data with its shape.\n */\nexport class AnyShape extends ShapeGeneric<EncodedChunkShape> {\n\tprivate constructor() {\n\t\tsuper();\n\t}\n\tpublic static readonly instance = new AnyShape();\n\n\tpublic encodeShape(\n\t\tidentifiers: DeduplicationTable<string>,\n\t\tshapes: DeduplicationTable<Shape>,\n\t): EncodedChunkShape {\n\t\tconst encodedAnyShape: EncodedAnyShape = 0;\n\t\treturn { d: encodedAnyShape };\n\t}\n\n\tpublic countReferencedShapesAndIdentifiers(\n\t\tidentifiers: Counter<string>,\n\t\tshapeDiscovered: (shape: Shape) => void,\n\t): void {}\n\n\tpublic static encodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t\tencoder: FieldEncoder,\n\t): void {\n\t\toutputBuffer.push(encoder.shape);\n\t\tencoder.encodeField(cursor, context, outputBuffer);\n\t}\n\n\tpublic static encodeNode(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t\tencoder: NodeEncoder,\n\t): void {\n\t\toutputBuffer.push(encoder.shape);\n\t\tencoder.encodeNode(cursor, context, outputBuffer);\n\t}\n\n\tpublic static encodeNodes(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t\tencoder: NodesEncoder,\n\t): void {\n\t\toutputBuffer.push(encoder.shape);\n\t\tencoder.encodeNodes(cursor, context, outputBuffer);\n\t}\n}\n\n/**\n * Encodes a single node polymorphically.\n */\nexport const anyNodeEncoder: NodeEncoder = {\n\tencodeNode(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\t// TODO: Fast path uniform chunk content.\n\t\tconst nodeEncoder = context.nodeEncoderFromSchema(cursor.type);\n\t\tAnyShape.encodeNode(cursor, context, outputBuffer, nodeEncoder);\n\t},\n\n\tshape: AnyShape.instance,\n};\n\n/**\n * Encodes a field polymorphically.\n */\nexport const anyFieldEncoder: FieldEncoder = {\n\tencodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\t// TODO: Fast path uniform chunks.\n\n\t\tif (cursor.getFieldLength() === 0) {\n\t\t\tconst shape = InlineArrayEncoder.empty;\n\t\t\tAnyShape.encodeField(cursor, context, outputBuffer, shape);\n\t\t} else if (cursor.getFieldLength() === 1) {\n\t\t\t// Fast path chunk of size one size one at least: skip nested array.\n\t\t\tcursor.enterNode(0);\n\t\t\tanyNodeEncoder.encodeNode(cursor, context, outputBuffer);\n\t\t\tcursor.exitNode();\n\t\t} else {\n\t\t\t// TODO: more efficient encoding for common cases.\n\t\t\t// Could try to find more specific shape compatible with all children than `anyNodeEncoder`.\n\n\t\t\tconst shape = context.nestedArrayEncoder(anyNodeEncoder);\n\t\t\tAnyShape.encodeField(cursor, context, outputBuffer, shape);\n\t\t}\n\t},\n\n\tshape: AnyShape.instance,\n};\n\n/**\n * Encodes a chunk using {@link EncodedInlineArrayShape}.\n * @remarks\n * The fact this is also a Shape is an implementation detail of the encoder: that allows the shape it uses to be itself,\n * which is an easy way to keep all the related code together without extra objects.\n */\nexport class InlineArrayEncoder\n\textends ShapeGeneric<EncodedChunkShape>\n\timplements NodesEncoder, FieldEncoder\n{\n\tpublic static readonly empty: InlineArrayEncoder = new InlineArrayEncoder(0, {\n\t\tget shape() {\n\t\t\t// Not actually used, makes count work without adding an additional shape.\n\t\t\treturn InlineArrayEncoder.empty;\n\t\t},\n\t\tencodeNodes(\n\t\t\tcursor: ITreeCursorSynchronous,\n\t\t\tcontext: EncoderContext,\n\t\t\toutputBuffer: BufferFormat,\n\t\t): void {\n\t\t\tfail(0xb4d /* Empty array should not encode any nodes */);\n\t\t},\n\t});\n\n\t/**\n\t * @param length - number of invocations of `inner`.\n\t */\n\tpublic constructor(\n\t\tpublic readonly length: number,\n\t\tpublic readonly inner: NodesEncoder,\n\t) {\n\t\tsuper();\n\t}\n\n\tpublic encodeNodes(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\t// Linter is wrong about this loop being for-of compatible.\n\t\t// eslint-disable-next-line @typescript-eslint/prefer-for-of\n\t\tfor (let index = 0; index < this.length; index++) {\n\t\t\tthis.inner.encodeNodes(cursor, context, outputBuffer);\n\t\t}\n\t}\n\n\tpublic encodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\t// Its possible individual items from this array encode multiple nodes, so don't assume === here.\n\t\tassert(\n\t\t\tcursor.getFieldLength() >= this.length,\n\t\t\t0x73c /* unexpected length for fixed length array */,\n\t\t);\n\t\tcursor.firstNode();\n\t\tthis.encodeNodes(cursor, context, outputBuffer);\n\t\tassert(\n\t\t\tcursor.mode === CursorLocationType.Fields,\n\t\t\t0x73d /* should return to fields mode when finished encoding */,\n\t\t);\n\t}\n\n\tpublic encodeShape(\n\t\tidentifiers: DeduplicationTable<string>,\n\t\tshapes: DeduplicationTable<Shape>,\n\t): EncodedChunkShape {\n\t\treturn {\n\t\t\tb: {\n\t\t\t\tlength: this.length,\n\t\t\t\tshape: shapes.valueToIndex.get(this.inner.shape) ?? fail(0xb4e /* missing shape */),\n\t\t\t},\n\t\t};\n\t}\n\n\tpublic countReferencedShapesAndIdentifiers(\n\t\tidentifiers: Counter<string>,\n\t\tshapeDiscovered: (shape: Shape) => void,\n\t): void {\n\t\tshapeDiscovered(this.inner.shape);\n\t}\n\n\tpublic get shape(): this {\n\t\treturn this;\n\t}\n}\n\n/**\n * Encodes the shape for a nested array as {@link EncodedNestedArrayShape} shape.\n */\nexport class NestedArrayShape extends ShapeGeneric<EncodedChunkShape> {\n\t/**\n\t * @param innerShape - The shape of each item in this nested array.\n\t */\n\tpublic constructor(public readonly innerShape: Shape) {\n\t\tsuper();\n\t}\n\n\tpublic encodeShape(\n\t\tidentifiers: DeduplicationTable<string>,\n\t\tshapes: DeduplicationTable<Shape>,\n\t): EncodedChunkShape {\n\t\tconst shape: EncodedNestedArrayShape =\n\t\t\tshapes.valueToIndex.get(this.innerShape) ??\n\t\t\tfail(0xb4f /* index for shape not found in table */);\n\t\treturn {\n\t\t\ta: shape,\n\t\t};\n\t}\n\n\tpublic countReferencedShapesAndIdentifiers(\n\t\tidentifiers: Counter<string>,\n\t\tshapeDiscovered: (shape: Shape) => void,\n\t): void {\n\t\tshapeDiscovered(this.innerShape);\n\t}\n}\n\n/**\n * Encodes a field as a nested array with the {@link EncodedNestedArrayShape} shape.\n * @remarks\n * The fact this is also exposes a Shape is an implementation detail: it allows the shape it uses to be itself\n * which is an easy way to keep all the related code together without extra objects.\n */\nexport class NestedArrayEncoder implements FieldEncoder {\n\tpublic constructor(\n\t\tpublic readonly innerEncoder: NodeEncoder,\n\t\tpublic readonly shape: NestedArrayShape = new NestedArrayShape(innerEncoder.shape),\n\t) {}\n\n\tpublic encodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\tconst buffer: BufferFormat = [];\n\t\tlet allNonZeroSize = true;\n\t\tconst length = cursor.getFieldLength();\n\t\tforEachNode(cursor, () => {\n\t\t\tconst before = buffer.length;\n\t\t\tthis.innerEncoder.encodeNode(cursor, context, buffer);\n\t\t\tallNonZeroSize &&= buffer.length - before !== 0;\n\t\t});\n\t\tif (buffer.length === 0) {\n\t\t\t// This relies on the number of inner chunks being the same as the number of nodes.\n\t\t\t// If making inner a `NodesEncoder`, this code will have to be adjusted accordingly.\n\t\t\toutputBuffer.push(length);\n\t\t} else {\n\t\t\tassert(\n\t\t\t\tallNonZeroSize,\n\t\t\t\t0x73e /* either all or none of the members of a nested array must be 0 sized, or there is no way the decoder could process the content correctly. */,\n\t\t\t);\n\t\t\toutputBuffer.push(buffer);\n\t\t}\n\t}\n}\n\n/**\n * Encodes the shape for an incremental chunk as {@link EncodedIncrementalChunkShape} shape.\n */\nexport class IncrementalChunkShape extends ShapeGeneric<EncodedChunkShape> {\n\tpublic encodeShape(\n\t\tidentifiers: DeduplicationTable<string>,\n\t\tshapes: DeduplicationTable<Shape>,\n\t): EncodedChunkShape {\n\t\treturn {\n\t\t\te: 0 /* EncodedIncrementalChunkShape */,\n\t\t};\n\t}\n\n\tpublic countReferencedShapesAndIdentifiers(\n\t\tidentifiers: Counter<string>,\n\t\tshapeDiscovered: (shape: Shape) => void,\n\t): void {}\n\n\tpublic get shape(): this {\n\t\treturn this;\n\t}\n}\n\n/**\n * Encodes an incremental field whose tree chunks are encoded separately and referenced by their {@link ChunkReferenceId}.\n * The shape of the content of this field is {@link NestedArrayShape}.\n * The inner items of the array have shape {@link IncrementalChunkShape} and are {@link ChunkReferenceId}s\n * of the encoded chunks.\n */\nexport const incrementalFieldEncoder: FieldEncoder = {\n\tencodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\tassert(\n\t\t\tcontext.incrementalEncoder !== undefined,\n\t\t\t0xc88 /* incremental encoder must be defined to use incrementalFieldEncoder */,\n\t\t);\n\n\t\tconst chunkReferenceIds = context.incrementalEncoder.encodeIncrementalField(\n\t\t\tcursor,\n\t\t\t(chunk: TreeChunk) => compressedEncode([chunk.cursor()], context),\n\t\t);\n\t\toutputBuffer.push(chunkReferenceIds);\n\t},\n\n\tshape: new NestedArrayShape(new IncrementalChunkShape() /* innerShape */),\n};\n\n/**\n * Encode `value` with `shape` into `outputBuffer`.\n *\n * Requires that `value` is compatible with `shape`.\n */\nexport function encodeValue(\n\tvalue: Value,\n\tshape: EncodedValueShape,\n\toutputBuffer: BufferFormat,\n): void {\n\tif (shape === undefined) {\n\t\tif (value !== undefined) {\n\t\t\toutputBuffer.push(true, value);\n\t\t} else {\n\t\t\toutputBuffer.push(false);\n\t\t}\n\t} else {\n\t\tif (shape === true) {\n\t\t\tassert(value !== undefined, 0x78d /* required value must not be missing */);\n\t\t\toutputBuffer.push(value);\n\t\t} else if (shape === false) {\n\t\t\tassert(value === undefined, 0x73f /* incompatible value shape: expected no value */);\n\t\t} else if (Array.isArray(shape)) {\n\t\t\tassert(shape.length === 1, 0x740 /* expected a single constant for value */);\n\t\t} else if (shape === SpecialField.Identifier) {\n\t\t\t// This case is a special case handling the encoding of identifier fields.\n\t\t\tassert(value !== undefined, 0x998 /* required value must not be missing */);\n\t\t\toutputBuffer.push(value);\n\t\t} else {\n\t\t\t// EncodedCounter case:\n\t\t\tunreachableCase(shape, \"Encoding values as deltas is not yet supported\");\n\t\t}\n\t}\n}\n\n/**\n * Provides common contextual information during encoding, like schema and policy settings.\n * Also, provides a cache to avoid duplicating equivalent shapes during a batch of encode operations.\n * @remarks\n * To avoid Shape duplication, any Shapes used in the encoding should either be:\n * - Singletons defined in a static scope.\n * - Cached in this object for future reuse such that all equivalent Shapes are deduplicated.\n */\nexport class EncoderContext implements NodeEncodeBuilder, FieldEncodeBuilder {\n\tprivate readonly nodeEncodersFromSchema: Map<TreeNodeSchemaIdentifier, NodeEncoder> =\n\t\tnew Map();\n\tprivate readonly nestedArrayEncoders: Map<NodeEncoder, NestedArrayEncoder> = new Map();\n\tpublic constructor(\n\t\tprivate readonly nodeEncoderFromPolicy: NodeEncoderPolicy,\n\t\tprivate readonly fieldEncoderFromPolicy: FieldEncoderPolicy,\n\t\tpublic readonly fieldShapes: ReadonlyMap<FieldKindIdentifier, FlexFieldKind>,\n\t\tpublic readonly idCompressor: IIdCompressor,\n\t\t/**\n\t\t * To be used to encode incremental chunks, if any.\n\t\t * @remarks\n\t\t * See {@link IncrementalEncoder} for more information.\n\t\t */\n\t\tpublic readonly incrementalEncoder: IncrementalEncoder | undefined,\n\t) {}\n\n\tpublic nodeEncoderFromSchema(schemaName: TreeNodeSchemaIdentifier): NodeEncoder {\n\t\treturn getOrCreate(this.nodeEncodersFromSchema, schemaName, () =>\n\t\t\tthis.nodeEncoderFromPolicy(this, schemaName),\n\t\t);\n\t}\n\n\tpublic fieldEncoderFromSchema(fieldSchema: TreeFieldStoredSchema): FieldEncoder {\n\t\treturn new LazyFieldEncoder(this, fieldSchema, this.fieldEncoderFromPolicy);\n\t}\n\n\tpublic nestedArrayEncoder(inner: NodeEncoder): NestedArrayEncoder {\n\t\treturn getOrCreate(this.nestedArrayEncoders, inner, () => new NestedArrayEncoder(inner));\n\t}\n}\n\nexport interface NodeEncodeBuilder {\n\tnodeEncoderFromSchema(schemaName: TreeNodeSchemaIdentifier): NodeEncoder;\n}\n\nexport interface FieldEncodeBuilder {\n\tfieldEncoderFromSchema(schema: TreeFieldStoredSchema): FieldEncoder;\n}\n\n/**\n * The policy for building a {@link FieldEncoder} for a field.\n */\nexport type FieldEncoderPolicy = (\n\tnodeBuilder: NodeEncodeBuilder,\n\tschema: TreeFieldStoredSchema,\n) => FieldEncoder;\n\n/**\n * The policy for building a {@link NodeEncoder} for a node.\n */\nexport type NodeEncoderPolicy = (\n\tfieldBuilder: FieldEncodeBuilder,\n\tschemaName: TreeNodeSchemaIdentifier,\n) => NodeEncoder;\n\nclass LazyFieldEncoder implements FieldEncoder {\n\tprivate encoderLazy: FieldEncoder | undefined;\n\n\tpublic constructor(\n\t\tpublic readonly nodeBuilder: NodeEncodeBuilder,\n\t\tpublic readonly fieldSchema: TreeFieldStoredSchema,\n\t\tprivate readonly fieldEncoderFromPolicy: FieldEncoderPolicy,\n\t) {}\n\tpublic encodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\tthis.encoder.encodeField(cursor, context, outputBuffer);\n\t}\n\n\tprivate get encoder(): FieldEncoder {\n\t\tif (this.encoderLazy === undefined) {\n\t\t\tthis.encoderLazy = this.fieldEncoderFromPolicy(this.nodeBuilder, this.fieldSchema);\n\t\t}\n\t\treturn this.encoderLazy;\n\t}\n\n\tpublic get shape(): Shape {\n\t\treturn this.encoder.shape;\n\t}\n}\n"]}
|
|
@@ -316,7 +316,7 @@ class ForestIncrementalSummaryBuilder {
|
|
|
316
316
|
*/
|
|
317
317
|
decodeIncrementalChunk(referenceId, chunkDecoder) {
|
|
318
318
|
const ChunkLoadProperties = this.loadedChunksMap.get(`${referenceId}`);
|
|
319
|
-
(0, internal_1.assert)(ChunkLoadProperties !== undefined,
|
|
319
|
+
(0, internal_1.assert)(ChunkLoadProperties !== undefined, 0xc86 /* Encoded incremental chunk not found */);
|
|
320
320
|
const chunk = chunkDecoder(ChunkLoadProperties.encodedContents);
|
|
321
321
|
// Account for the reference about to be added in `chunkTrackingPropertiesMap`
|
|
322
322
|
// to ensure that no other users of this chunk think they have unique ownership.
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"incrementalSummaryBuilder.js","sourceRoot":"","sources":["../../../src/feature-libraries/forest-summary/incrementalSummaryBuilder.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAEH,kEAA6D;AAK7D,qEAA4E;AAC5E,kDAM6B;AAS7B,2EAAiE;AAGjE,uEAAwE;AAIxE;;;;GAIG;AACU,QAAA,uBAAuB,GAAG,YAAY,CAAC;AAEpD;;;;;GAKG;AACH,MAAM,oBAAoB,GAAG,UAAU,CAAC;AAExC;;GAEG;AACU,QAAA,0BAA0B,GAAG;IACzC,4CAA4C;IAC5C,QAAQ,EAAE,UAAU;IACpB,wCAAwC;IACxC,YAAY,EAAE,cAAc;CACnB,CAAC;AAoFX;;GAEG;AACH,IAAY,gCAgBX;AAhBD,WAAY,gCAAgC;IAC3C;;;;OAIG;IACH,qGAAW,CAAA;IACX;;;;;;;OAOG;IACH,mGAAU,CAAA;AACX,CAAC,EAhBW,gCAAgC,gDAAhC,gCAAgC,QAgB3C;AAED;;;;GAIG;AACH,SAAS,uBAAuB,CAC/B,kBAA8C,EAC9C,wBAA8D;IAE9D,IAAA,iBAAM,EACL,kBAAkB,KAAK,kCAA0B,CAAC,QAAQ,EAC1D,KAAK,CAAC,4BAA4B,CAClC,CAAC;IACF,IAAA,iBAAM,EACL,wBAAwB,KAAK,SAAS,EACtC,KAAK,CAAC,0EAA0E,CAChF,CAAC;AACH,CAAC;AAED;;;;GAIG;AACH,SAAS,2BAA2B,CACnC,kBAA8C,EAC9C,wBAA8D;IAE9D,IAAA,iBAAM,EACL,kBAAkB,KAAK,kCAA0B,CAAC,YAAY,EAC9D,KAAK,CAAC,gCAAgC,CACtC,CAAC;IACF,IAAA,iBAAM,EACL,wBAAwB,KAAK,SAAS,EACtC,KAAK,CAAC,0EAA0E,CAChF,CAAC;AACH,CAAC;AAED,4CAA4C;AAC5C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2CG;AACH,2CAA2C;AAC3C,MAAa,+BAA+B;IA2C3C,YACkB,wBAAiC,EACjC,gBAAiE,EAClE,yBAAoD,EACnD,qBAA6B;QAH7B,6BAAwB,GAAxB,wBAAwB,CAAS;QACjC,qBAAgB,GAAhB,gBAAgB,CAAiD;QAClE,8BAAyB,GAAzB,yBAAyB,CAA2B;QACnD,0BAAqB,GAArB,qBAAqB,CAAQ;QA9C/C;;WAEG;QACK,oBAAe,GAAqB,IAAA,gBAAK,EAAC,CAAC,CAAC,CAAC;QAErD;;;WAGG;QACc,+BAA0B,GAIvC,IAAI,GAAG,EAAE,CAAC;QAEd;;WAEG;QACI,uBAAkB,GACxB,kCAA0B,CAAC,YAAY,CAAC;QAEzC;;WAEG;QACK,gCAA2B,GAAW,CAAC,CAAC,CAAC;QAQjD;;;WAGG;QACH;;;WAGG;QACc,oBAAe,GAAqC,IAAI,GAAG,EAAE,CAAC;IAO5E,CAAC;IAEJ;;;;;OAKG;IACI,KAAK,CAAC,IAAI,CAChB,QAAgC,EAChC,iBAAqF;QAErF,MAAM,UAAU,GAAG,QAAQ,CAAC,eAAe,EAAE,EAAE,CAAC;QAChD,wGAAwG;QACxG,qDAAqD;QACrD,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;YAC9B,OAAO;QACR,CAAC;QAED,uGAAuG;QACvG,uDAAuD;QACvD,MAAM,2BAA2B,GAAG,KAAK,EACxC,YAA2B,EAC3B,aAAqB,EACL,EAAE;YAClB,iGAAiG;YACjG,oDAAoD;YACpD,KAAK,MAAM,CAAC,gBAAgB,EAAE,iBAAiB,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,KAAK,CAAC,EAAE,CAAC;gBACxF,MAAM,gBAAgB,GAAG,GAAG,aAAa,GAAG,gBAAgB,EAAE,CAAC;gBAC/D,MAAM,iBAAiB,GAAG,GAAG,gBAAgB,IAAI,oBAAoB,EAAE,CAAC;gBACxE,IAAI,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,EAAE,CAAC;oBACnD,MAAM,IAAI,uBAAY,CACrB,0DAA0D,iBAAiB,EAAE,CAC7E,CAAC;gBACH,CAAC;gBACD,MAAM,aAAa,GAAG,MAAM,iBAAiB,CAAoB,iBAAiB,CAAC,CAAC;gBACpF,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,gBAAgB,EAAE;oBAC1C,eAAe,EAAE,aAAa;oBAC9B,WAAW,EAAE,gBAAgB;iBAC7B,CAAC,CAAC;gBAEH,MAAM,sBAAsB,GAAG,MAAM,CAAC,gBAAgB,CAAC,CAAC;gBACxD,IAAI,CAAC,eAAe,GAAG,IAAA,gBAAK,EAC3B,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,eAAe,EAAE,sBAAsB,GAAG,CAAC,CAAC,CAC1D,CAAC;gBAEF,wEAAwE;gBACxE,MAAM,2BAA2B,CAAC,iBAAiB,EAAE,GAAG,gBAAgB,GAAG,CAAC,CAAC;YAC9E,CAAC;QACF,CAAC,CAAC;QACF,MAAM,2BAA2B,CAAC,UAAU,EAAE,EAAE,CAAC,CAAC;IACnD,CAAC;IAED;;;;;;;;OAQG;IACI,YAAY,CAAC,IAInB;QACA,MAAM,EAAE,QAAQ,EAAE,yBAAyB,EAAE,SAAS,EAAE,GAAG,IAAI,CAAC;QAChE,6GAA6G;QAC7G,0EAA0E;QAC1E,0GAA0G;QAC1G,IAAI,CAAC,IAAI,CAAC,wBAAwB,IAAI,yBAAyB,KAAK,SAAS,EAAE,CAAC;YAC/E,OAAO,gCAAgC,CAAC,UAAU,CAAC;QACpD,CAAC;QAED,2BAA2B,CAAC,IAAI,CAAC,kBAAkB,EAAE,IAAI,CAAC,wBAAwB,CAAC,CAAC;QAEpF,IAAI,CAAC,kBAAkB,GAAG,kCAA0B,CAAC,QAAQ,CAAC;QAC9D,IAAI,CAAC,2BAA2B,GAAG,yBAAyB,CAAC,2BAA2B,CAAC;QACzF,IAAI,CAAC,wBAAwB,GAAG;YAC/B,qBAAqB,EAAE,yBAAyB,CAAC,qBAAqB;YACtE,qBAAqB,EAAE,yBAAyB,CAAC,WAAW;YAC5D,gBAAgB,EAAE,EAAE;YACpB,oBAAoB,EAAE,IAAI,6BAAkB,EAAE;YAC9C,QAAQ;YACR,SAAS;SACT,CAAC;QACF,OAAO,gCAAgC,CAAC,WAAW,CAAC;IACrD,CAAC;IAED;;;OAGG;IACI,sBAAsB,CAC5B,MAA8B,EAC9B,YAAqD;QAErD,0GAA0G;QAC1G,uBAAuB,CAAC,IAAI,CAAC,kBAAkB,EAAE,IAAI,CAAC,wBAAwB,CAAC,CAAC;QAEhF,MAAM,iBAAiB,GAAuB,EAAE,CAAC;QACjD,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;QAC7C,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;YAC5B,IAAI,eAAuC,CAAC;YAE5C,8EAA8E;YAC9E,oGAAoG;YACpG,6EAA6E;YAC7E,MAAM,uBAAuB,GAAG,IAAA,8BAAmB,EAClD,IAAI,CAAC,0BAA0B,EAC/B,IAAI,CAAC,2BAA2B,EAChC,KAAK,CACL,CAAC;YACF,IAAI,uBAAuB,KAAK,SAAS,IAAI,CAAC,IAAI,CAAC,wBAAwB,CAAC,QAAQ,EAAE,CAAC;gBACtF,eAAe,GAAG,uBAAuB,CAAC;gBAC1C,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC,SAAS,CAC3D,GAAG,eAAe,CAAC,WAAW,EAAE,EAChC,gCAAW,CAAC,IAAI,EAChB,GAAG,IAAI,CAAC,wBAAwB,CAAC,qBAAqB,IAAI,eAAe,CAAC,WAAW,EAAE,CACvF,CAAC;YACH,CAAC;iBAAM,CAAC;gBACP,6CAA6C;gBAC7C,MAAM,cAAc,GAAqB,IAAA,gBAAK,EAAC,IAAI,CAAC,eAAe,EAAE,CAAC,CAAC;gBAEvE,oGAAoG;gBACpG,2CAA2C;gBAC3C,qGAAqG;gBACrG,oDAAoD;gBACpD,IAAI,CAAC,wBAAwB,CAAC,gBAAgB,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;gBAEpE,eAAe,GAAG;oBACjB,WAAW,EAAE,cAAc;oBAC3B,WAAW,EAAE,IAAI,CAAC,wBAAwB,CAAC,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC;iBACrE,CAAC;gBAEF,MAAM,oBAAoB,GAAG,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC;gBAChF,oGAAoG;gBACpG,gDAAgD;gBAChD,oGAAoG;gBACpG,qGAAqG;gBACrG,MAAM,mBAAmB,GAAG,IAAI,6BAAkB,EAAE,CAAC;gBACrD,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,GAAG,mBAAmB,CAAC;gBACzE,mBAAmB,CAAC,OAAO,CAC1B,oBAAoB,EACpB,IAAI,CAAC,wBAAwB,CAAC,SAAS,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,CAC5D,CAAC;gBAEF,oGAAoG;gBACpG,qEAAqE;gBACrE,oBAAoB,CAAC,YAAY,CAChC,GAAG,cAAc,EAAE,EACnB,mBAAmB,CAAC,cAAc,EAAE,CACpC,CAAC;gBAEF,6DAA6D;gBAC7D,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;gBAC1E,IAAI,CAAC,wBAAwB,CAAC,gBAAgB,CAAC,GAAG,EAAE,CAAC;YACtD,CAAC;YAED,IAAA,yBAAc,EACb,IAAI,CAAC,0BAA0B,EAC/B,IAAI,CAAC,wBAAwB,CAAC,qBAAqB,EACnD,KAAK,EACL,eAAe,CACf,CAAC;YACF,iBAAiB,CAAC,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QACrD,CAAC;QACD,OAAO,iBAAiB,CAAC;IAC1B,CAAC;IAED;;;;;;;;OAQG;IACI,eAAe,CAAC,IAGtB;QACA,MAAM,EAAE,yBAAyB,EAAE,oBAAoB,EAAE,GAAG,IAAI,CAAC;QACjE,IAAI,CAAC,IAAI,CAAC,wBAAwB,IAAI,yBAAyB,KAAK,SAAS,EAAE,CAAC;YAC/E,MAAM,cAAc,GAAG,IAAI,6BAAkB,EAAE,CAAC;YAChD,cAAc,CAAC,OAAO,CAAC,+BAAuB,EAAE,oBAAoB,CAAC,CAAC;YACtE,OAAO,cAAc,CAAC,cAAc,EAAE,CAAC;QACxC,CAAC;QAED,uBAAuB,CAAC,IAAI,CAAC,kBAAkB,EAAE,IAAI,CAAC,wBAAwB,CAAC,CAAC;QAEhF,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC,OAAO,CACzD,+BAAuB,EACvB,oBAAoB,CACpB,CAAC;QAEF,wEAAwE;QACxE,2GAA2G;QAC3G,2GAA2G;QAC3G,kEAAkE;QAClE,MAAM,wBAAwB,GAAG,IAAI,CAAC,0BAA0B,CAAC,GAAG,CACnE,IAAI,CAAC,2BAA2B,CAChC,CAAC;QACF,MAAM,yBAAyB,GAAG,IAAI,CAAC,0BAA0B,CAAC,GAAG,CACpE,IAAI,CAAC,wBAAwB,CAAC,qBAAqB,CACnD,CAAC;QACF,IAAI,wBAAwB,KAAK,SAAS,IAAI,yBAAyB,KAAK,SAAS,EAAE,CAAC;YACvF,KAAK,MAAM,CAAC,KAAK,EAAE,eAAe,CAAC,IAAI,wBAAwB,CAAC,OAAO,EAAE,EAAE,CAAC;gBAC3E,IAAI,CAAC,yBAAyB,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC;oBAC3C,yBAAyB,CAAC,GAAG,CAAC,KAAK,EAAE,eAAe,CAAC,CAAC;gBACvD,CAAC;YACF,CAAC;QACF,CAAC;QAED,oGAAoG;QACpG,4DAA4D;QAC5D,KAAK,MAAM,cAAc,IAAI,IAAI,CAAC,0BAA0B,CAAC,IAAI,EAAE,EAAE,CAAC;YACrE,IAAI,cAAc,GAAG,IAAI,CAAC,2BAA2B,EAAE,CAAC;gBACvD,IAAI,CAAC,0BAA0B,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YACxD,CAAC;QACF,CAAC;QAED,IAAI,CAAC,kBAAkB,GAAG,kCAA0B,CAAC,YAAY,CAAC;QAClE,MAAM,WAAW,GAAG,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC,cAAc,EAAE,CAAC;QACxF,IAAI,CAAC,wBAAwB,GAAG,SAAS,CAAC;QAC1C,OAAO,WAAW,CAAC;IACpB,CAAC;IAED;;OAEG;IACI,sBAAsB,CAC5B,WAA6B,EAC7B,YAAuD;QAEvD,MAAM,mBAAmB,GAAG,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,WAAW,EAAE,CAAC,CAAC;QACvE,IAAA,iBAAM,EAAC,mBAAmB,KAAK,SAAS,EAAE,qCAAqC,CAAC,CAAC;QACjF,MAAM,KAAK,GAAG,YAAY,CAAC,mBAAmB,CAAC,eAAe,CAAC,CAAC;QAEhE,8EAA8E;QAC9E,gFAAgF;QAChF,sFAAsF;QACtF,KAAK,CAAC,cAAc,EAAE,CAAC;QACvB,mGAAmG;QACnG,mGAAmG;QACnG,yCAAyC;QACzC,IAAA,yBAAc,EAAC,IAAI,CAAC,0BAA0B,EAAE,IAAI,CAAC,qBAAqB,EAAE,KAAK,EAAE;YAClF,WAAW;YACX,WAAW,EAAE,mBAAmB,CAAC,WAAW;SAC5C,CAAC,CAAC;QACH,OAAO,KAAK,CAAC;IACd,CAAC;CACD;AA9SD,0EA8SC","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n */\n\nimport { assert } from \"@fluidframework/core-utils/internal\";\nimport type {\n\tIExperimentalIncrementalSummaryContext,\n\tISummaryTreeWithStats,\n} from \"@fluidframework/runtime-definitions/internal\";\nimport { SummaryTreeBuilder } from \"@fluidframework/runtime-utils/internal\";\nimport {\n\tbrand,\n\tsetInNestedMap,\n\ttryGetFromNestedMap,\n\ttype JsonCompatible,\n\ttype NestedMap,\n} from \"../../util/index.js\";\nimport type {\n\tChunkReferenceId,\n\tEncodedFieldBatch,\n\tIncrementalEncoderDecoder,\n\tIncrementalEncodingPolicy,\n\tTreeChunk,\n} from \"../chunked-forest/index.js\";\nimport type { ITreeCursorSynchronous } from \"../../core/index.js\";\nimport { SummaryType } from \"@fluidframework/driver-definitions\";\nimport type { IChannelStorageService } from \"@fluidframework/datastore-definitions/internal\";\nimport type { ISnapshotTree } from \"@fluidframework/driver-definitions/internal\";\nimport { LoggingError } from \"@fluidframework/telemetry-utils/internal\";\nimport type { IFluidHandle } from \"@fluidframework/core-interfaces\";\nimport type { SummaryElementStringifier } from \"../../shared-tree-core/index.js\";\n\n/**\n * The key for the blob under ForestSummarizer's root.\n * This blob contains the ForestCodec's output.\n * See {@link ForestIncrementalSummaryBuilder} for details on the summary structure.\n */\nexport const forestSummaryContentKey = \"ForestTree\";\n\n/**\n * The contents of an incremental chunk is under a summary tree node with its {@link ChunkReferenceId} as the key.\n * The inline portion of the chunk content is encoded with the forest codec is stored in a blob with this key.\n * The rest of the chunk contents is stored in the summary tree under the summary tree node.\n * See the summary format in {@link ForestIncrementalSummaryBuilder} for more details.\n */\nconst chunkContentsBlobKey = \"contents\";\n\n/**\n * State that tells whether a summary is currently being tracked.\n */\nexport const ForestSummaryTrackingState = {\n\t/** A summary is currently being tracked. */\n\tTracking: \"Tracking\",\n\t/** A summary is ready to be tracked. */\n\tReadyToTrack: \"ReadyToTrack\",\n} as const;\nexport type ForestSummaryTrackingState =\n\t(typeof ForestSummaryTrackingState)[keyof typeof ForestSummaryTrackingState];\n\n/**\n * The properties of a chunk tracked during the loading process.\n * These are used to identify a chunk when it is decoded and recreate the tracking state\n * as it was when the summary that the client is loading from was generated.\n *\n * An encoded chunk, paired with a location it can be reused / reloaded from.\n * @remarks\n * This identifies a location in a specific summary where `encodedContents` was loaded from.\n *\n * When summarizing, Fluid always ensures the summary that the summary client is allowed to reuse content from\n * is the one it loaded from, so tracking this on load is sufficient for now:\n * there is no need to track the equivalent data when summarizing.\n */\ninterface ChunkLoadProperties {\n\t/**\n\t * The encoded contents of the chunk.\n\t */\n\treadonly encodedContents: EncodedFieldBatch;\n\t/**\n\t * The path for this chunk's contents in the summary tree relative to the forest's summary tree.\n\t * This path is used to generate a summary handle for the chunk if it doesn't change between summaries.\n\t */\n\treadonly summaryPath: string;\n}\n\n/**\n * The properties of a chunk that is tracked for every summary.\n * If a chunk doesn't change between summaries,\n * these properties will be used to generate a summary handle for the chunk.\n */\ninterface ChunkSummaryProperties {\n\t/**\n\t * The reference ID of the chunk which uniquely identifies it under its parent's summary tree.\n\t * The summary for this chunk will be stored against this reference ID as key in the summary tree.\n\t */\n\treadonly referenceId: ChunkReferenceId;\n\t/**\n\t * The path for this chunk's summary in the summary tree relative to the forest's summary tree.\n\t * This path is used to generate a summary handle for the chunk if it doesn't change between summaries.\n\t */\n\treadonly summaryPath: string;\n}\n\n/**\n * The properties of a summary being tracked.\n */\ninterface TrackedSummaryProperties {\n\t/**\n\t * The sequence number of the summary in progress.\n\t */\n\treadonly summarySequenceNumber: number;\n\t/**\n\t * The base path for the latest summary that was successful.\n\t * This is used to generate summary handles.\n\t */\n\treadonly latestSummaryBasePath: string;\n\t/**\n\t * Whether the summary being tracked is a full tree summary.\n\t * If true, the summary will not contain any summary handles. All chunks must be summarized in full.\n\t */\n\treadonly fullTree: boolean;\n\t/**\n\t * Represents the path of a chunk in the summary tree relative to the forest's summary tree.\n\t * Each item in the array is the {@link ChunkReferenceId} of a chunk in the summary tree starting\n\t * from the chunk under forest summary tree.\n\t * When a chunk is summarized, this array will be used to generate the path for the chunk's summary in the\n\t * summary tree.\n\t */\n\treadonly chunkSummaryPath: ChunkReferenceId[];\n\t/**\n\t * The parent summary builder to use to build the incremental summary tree.\n\t * When a chunk is being summarized, it will add its summary to this builder against its reference ID.\n\t */\n\tparentSummaryBuilder: SummaryTreeBuilder;\n\t/**\n\t * Serializes content (including {@link (IFluidHandle:interface)}s) for adding to a summary blob.\n\t */\n\tstringify: SummaryElementStringifier;\n}\n\n/**\n * The behavior of the forest's incremental summary - whether the summary should be a single blob or incremental.\n */\nexport enum ForestIncrementalSummaryBehavior {\n\t/**\n\t * The forest can encode chunks incrementally, i.e., chunks that support incremental encoding will be encoded\n\t * separately - they will be added to a separate tree.\n\t * The incremental summary format is described in {@link ForestIncrementalSummaryBuilder}.\n\t */\n\tIncremental,\n\t/**\n\t * The forest should encode all of it's data in a single summary blob.\n\t * @remarks\n\t * The format of the summary will be the same as the old format (pre-incremental summaries) and is fully\n\t * backwards compatible with the old format. The summary will basically look like an incremental summary\n\t * with no incremental fields - it will only contain the \"ForestTree\" blob in the summary format described\n\t * in {@link ForestIncrementalSummaryBuilder}.\n\t */\n\tSingleBlob,\n}\n\n/**\n * Validates that a summary is currently being tracked and that the tracked summary properties are defined.\n * @param forestSummaryState - The current state of the forest summary tracking.\n * @param trackedSummaryProperties - The properties of the tracked summary, which must be available.\n */\nfunction validateTrackingSummary(\n\tforestSummaryState: ForestSummaryTrackingState,\n\ttrackedSummaryProperties: TrackedSummaryProperties | undefined,\n): asserts trackedSummaryProperties is TrackedSummaryProperties {\n\tassert(\n\t\tforestSummaryState === ForestSummaryTrackingState.Tracking,\n\t\t0xc22 /* Not tracking a summary */,\n\t);\n\tassert(\n\t\ttrackedSummaryProperties !== undefined,\n\t\t0xc23 /* Tracked summary properties must be available when tracking a summary */,\n\t);\n}\n\n/**\n * Validates that a summary is ready to be tracked and that the tracked summary properties are undefined.\n * @param forestSummaryState - The current state of the forest summary tracking.\n * @param trackedSummaryProperties - The properties of the tracked summary, which must be undefined.\n */\nfunction validateReadyToTrackSummary(\n\tforestSummaryState: ForestSummaryTrackingState,\n\ttrackedSummaryProperties: TrackedSummaryProperties | undefined,\n): asserts trackedSummaryProperties is undefined {\n\tassert(\n\t\tforestSummaryState === ForestSummaryTrackingState.ReadyToTrack,\n\t\t0xc24 /* Already tracking a summary */,\n\t);\n\tassert(\n\t\ttrackedSummaryProperties === undefined,\n\t\t0xc25 /* Tracked summary properties must not be available when ready to track */,\n\t);\n}\n\n/* eslint-disable jsdoc/check-indentation */\n/**\n * Tracks and builds the incremental summary tree for a forest where chunks that support incremental encoding are\n * stored in a separate tree in the summary under its {@link ChunkReferenceId}.\n * The summary tree for a chunk is self-sufficient and can be independently loaded and used to reconstruct the\n * chunk's contents without any additional context from its parent.\n *\n * An example summary tree with incremental summary:\n * Forest\n * ├── ForestTree\n * ├── 0\n * | ├── contents\n * | ├── 1\n * | | ├── contents\n * | | ├── 2\n * | | | ├── contents\n * | ├── 3 - \".../Forest/ForestTree/0/1/3\"\n * ├── 4\n * | ├── contents\n * | ├── ...\n * ├── 5 - \"/.../Forest/ForestTree/5\"\n * - Forest is a summary tree node added by the shared tree and contains the following:\n * - The inline portion of the top-level forest content is stored in a summary blob called \"ForestTree\".\n * It also contains the {@link ChunkReferenceId}s of the incremental chunks under it.\n * - The summary for each incremental chunk under it is stored against its {@link ChunkReferenceId}.\n * - For each chunk, the structure of the summary tree is the same as the Forest. It contains the following:\n * - The inline portion of the chunk content is stored in a blob called \"contents\".\n * It also contains the {@link ChunkReferenceId}s of the incremental chunks under it.\n * - The summary for each incremental chunk under it is stored against its {@link ChunkReferenceId}.\n * - Chunks that do not change between summaries are summarized as handles in the summary tree.\n * @remarks\n * It may seem inconsistent that although the structure for the top-level forest tree is similar to that of\n * an incremental chunk, its content is stored in a summary blob called \"ForestTree\" while the content for\n * the incremental chunks are stored in a summary blob called \"contents\".\n * This is to keep this summary backwards compatible with old format (before incremental summaries were added)\n * where the entire forest content was in a summary blob called \"ForestTree\". So, if incremental summaries were\n * disabled, the forest content will be fully backwards compatible.\n * Note that this limits reusing the root node in a location other than root and a non-root node in the root.\n * We could phase this out by switching to write the top-level contents under \"contents\" if we want to support\n * the above. However, there is no plan to do that for now.\n *\n * TODO: AB#46752\n * Add strong types for the summary structure to document it better. It will help make it super clear what the actual\n * format is in a way that can easily be linked to, documented and inspected.\n */\n/* eslint-enable jsdoc/check-indentation */\nexport class ForestIncrementalSummaryBuilder implements IncrementalEncoderDecoder {\n\t/**\n\t * The next reference ID to use for a chunk.\n\t */\n\tprivate nextReferenceId: ChunkReferenceId = brand(0);\n\n\t/**\n\t * For a given summary sequence number, keeps track of a chunk's properties that will be used to generate\n\t * a summary handle for the chunk if it does not change between summaries.\n\t */\n\tprivate readonly chunkTrackingPropertiesMap: NestedMap<\n\t\tnumber,\n\t\tTreeChunk,\n\t\tChunkSummaryProperties\n\t> = new Map();\n\n\t/**\n\t * The state indicating whether a summary is currently being tracked or not.\n\t */\n\tpublic forestSummaryState: ForestSummaryTrackingState =\n\t\tForestSummaryTrackingState.ReadyToTrack;\n\n\t/**\n\t * The sequence number of the latest summary that was successful.\n\t */\n\tprivate latestSummarySequenceNumber: number = -1;\n\n\t/**\n\t * The current state of the summary being tracked.\n\t * This is undefined if no summary is currently being tracked.\n\t */\n\tprivate trackedSummaryProperties: TrackedSummaryProperties | undefined;\n\n\t/**\n\t * A map of chunk reference IDs to their encoded contents. This is typically used during the loading of the\n\t * forest to retrieve the contents of the chunks that were summarized incrementally.\n\t */\n\t/**\n\t * A map of chunk reference IDs to their {@link ChunkLoadProperties}.\n\t * This is used during the loading of the forest to track each chunk that is retrieved and decoded.\n\t */\n\tprivate readonly loadedChunksMap: Map<string, ChunkLoadProperties> = new Map();\n\n\tpublic constructor(\n\t\tprivate readonly enableIncrementalSummary: boolean,\n\t\tprivate readonly getChunkAtCursor: (cursor: ITreeCursorSynchronous) => TreeChunk[],\n\t\tpublic readonly shouldEncodeIncrementally: IncrementalEncodingPolicy,\n\t\tprivate readonly initialSequenceNumber: number,\n\t) {}\n\n\t/**\n\t * Must be called when the forest is loaded to download the encoded contents of incremental chunks.\n\t * @param services - The channel storage service to use to access the snapshot tree and download the\n\t * contents of the chunks.\n\t * @param readAndParse - A function that reads and parses a blob from the storage service.\n\t */\n\tpublic async load(\n\t\tservices: IChannelStorageService,\n\t\treadAndParseChunk: <T extends JsonCompatible<IFluidHandle>>(id: string) => Promise<T>,\n\t): Promise<void> {\n\t\tconst forestTree = services.getSnapshotTree?.();\n\t\t// Snapshot tree should be available when loading forest's contents. However, it is an optional function\n\t\t// and may not be implemented by the storage service.\n\t\tif (forestTree === undefined) {\n\t\t\treturn;\n\t\t}\n\n\t\t// Downloads the contents of incremental chunks in the given snapshot tree. Also, recursively downloads\n\t\t// the contents of incremental chunks in any sub-trees.\n\t\tconst downloadChunkContentsInTree = async (\n\t\t\tsnapshotTree: ISnapshotTree,\n\t\t\tparentTreeKey: string,\n\t\t): Promise<void> => {\n\t\t\t// All trees in the snapshot tree are for incremental chunks. The key is the chunk's reference ID\n\t\t\t// and the value is the snapshot tree for the chunk.\n\t\t\tfor (const [chunkReferenceId, chunkSnapshotTree] of Object.entries(snapshotTree.trees)) {\n\t\t\t\tconst chunkSubTreePath = `${parentTreeKey}${chunkReferenceId}`;\n\t\t\t\tconst chunkContentsPath = `${chunkSubTreePath}/${chunkContentsBlobKey}`;\n\t\t\t\tif (!(await services.contains(chunkContentsPath))) {\n\t\t\t\t\tthrow new LoggingError(\n\t\t\t\t\t\t`SharedTree: Cannot find contents for incremental chunk ${chunkContentsPath}`,\n\t\t\t\t\t);\n\t\t\t\t}\n\t\t\t\tconst chunkContents = await readAndParseChunk<EncodedFieldBatch>(chunkContentsPath);\n\t\t\t\tthis.loadedChunksMap.set(chunkReferenceId, {\n\t\t\t\t\tencodedContents: chunkContents,\n\t\t\t\t\tsummaryPath: chunkSubTreePath,\n\t\t\t\t});\n\n\t\t\t\tconst chunkReferenceIdNumber = Number(chunkReferenceId);\n\t\t\t\tthis.nextReferenceId = brand(\n\t\t\t\t\tMath.max(this.nextReferenceId, chunkReferenceIdNumber + 1),\n\t\t\t\t);\n\n\t\t\t\t// Recursively download the contents of chunks in this chunk's sub tree.\n\t\t\t\tawait downloadChunkContentsInTree(chunkSnapshotTree, `${chunkSubTreePath}/`);\n\t\t\t}\n\t\t};\n\t\tawait downloadChunkContentsInTree(forestTree, \"\");\n\t}\n\n\t/**\n\t * Must be called when starting a new forest summary to track it.\n\t * @param fullTree - Whether the summary is a full tree summary. If true, the summary will not contain\n\t * any summary handles. All chunks must be summarized in full.\n\t * @param incrementalSummaryContext - The context for the incremental summary that contains the sequence numbers\n\t * for the current and latest summaries.\n\t * @param stringify - Serializes content (including {@link (IFluidHandle:interface)}s) for adding to a summary blob.\n\t * @returns the behavior of the forest's incremental summary.\n\t */\n\tpublic startSummary(args: {\n\t\tfullTree: boolean;\n\t\tincrementalSummaryContext: IExperimentalIncrementalSummaryContext | undefined;\n\t\tstringify: SummaryElementStringifier;\n\t}): ForestIncrementalSummaryBehavior {\n\t\tconst { fullTree, incrementalSummaryContext, stringify } = args;\n\t\t// If there is no incremental summary context, do not summarize incrementally. This happens in two scenarios:\n\t\t// 1. When summarizing a detached container, i.e., the first ever summary.\n\t\t// 2. When running GC, the default behavior is to call summarize on DDS without incrementalSummaryContext.\n\t\tif (!this.enableIncrementalSummary || incrementalSummaryContext === undefined) {\n\t\t\treturn ForestIncrementalSummaryBehavior.SingleBlob;\n\t\t}\n\n\t\tvalidateReadyToTrackSummary(this.forestSummaryState, this.trackedSummaryProperties);\n\n\t\tthis.forestSummaryState = ForestSummaryTrackingState.Tracking;\n\t\tthis.latestSummarySequenceNumber = incrementalSummaryContext.latestSummarySequenceNumber;\n\t\tthis.trackedSummaryProperties = {\n\t\t\tsummarySequenceNumber: incrementalSummaryContext.summarySequenceNumber,\n\t\t\tlatestSummaryBasePath: incrementalSummaryContext.summaryPath,\n\t\t\tchunkSummaryPath: [],\n\t\t\tparentSummaryBuilder: new SummaryTreeBuilder(),\n\t\t\tfullTree,\n\t\t\tstringify,\n\t\t};\n\t\treturn ForestIncrementalSummaryBehavior.Incremental;\n\t}\n\n\t/**\n\t * {@link IncrementalEncoder.encodeIncrementalField}\n\t * @remarks Returns an empty array if the field has no content.\n\t */\n\tpublic encodeIncrementalField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tchunkEncoder: (chunk: TreeChunk) => EncodedFieldBatch,\n\t): ChunkReferenceId[] {\n\t\t// Validate that a summary is currently being tracked and that the tracked summary properties are defined.\n\t\tvalidateTrackingSummary(this.forestSummaryState, this.trackedSummaryProperties);\n\n\t\tconst chunkReferenceIds: ChunkReferenceId[] = [];\n\t\tconst chunks = this.getChunkAtCursor(cursor);\n\t\tfor (const chunk of chunks) {\n\t\t\tlet chunkProperties: ChunkSummaryProperties;\n\n\t\t\t// Try and get the properties of the chunk from the latest successful summary.\n\t\t\t// If it exists and the summary is not a full tree, use the properties to generate a summary handle.\n\t\t\t// If it does not exist, encode the chunk and generate new properties for it.\n\t\t\tconst previousChunkProperties = tryGetFromNestedMap(\n\t\t\t\tthis.chunkTrackingPropertiesMap,\n\t\t\t\tthis.latestSummarySequenceNumber,\n\t\t\t\tchunk,\n\t\t\t);\n\t\t\tif (previousChunkProperties !== undefined && !this.trackedSummaryProperties.fullTree) {\n\t\t\t\tchunkProperties = previousChunkProperties;\n\t\t\t\tthis.trackedSummaryProperties.parentSummaryBuilder.addHandle(\n\t\t\t\t\t`${chunkProperties.referenceId}`,\n\t\t\t\t\tSummaryType.Tree,\n\t\t\t\t\t`${this.trackedSummaryProperties.latestSummaryBasePath}/${chunkProperties.summaryPath}`,\n\t\t\t\t);\n\t\t\t} else {\n\t\t\t\t// Generate a new reference ID for the chunk.\n\t\t\t\tconst newReferenceId: ChunkReferenceId = brand(this.nextReferenceId++);\n\n\t\t\t\t// Add the reference ID of this chunk to the chunk summary path and use the path as the summary path\n\t\t\t\t// for the chunk in its summary properties.\n\t\t\t\t// This is done before encoding the chunk so that the summary path is updated correctly when encoding\n\t\t\t\t// any incremental chunks that are under this chunk.\n\t\t\t\tthis.trackedSummaryProperties.chunkSummaryPath.push(newReferenceId);\n\n\t\t\t\tchunkProperties = {\n\t\t\t\t\treferenceId: newReferenceId,\n\t\t\t\t\tsummaryPath: this.trackedSummaryProperties.chunkSummaryPath.join(\"/\"),\n\t\t\t\t};\n\n\t\t\t\tconst parentSummaryBuilder = this.trackedSummaryProperties.parentSummaryBuilder;\n\t\t\t\t// Create a new summary builder for this chunk to build its summary tree which will be stored in the\n\t\t\t\t// parent's summary tree under its reference ID.\n\t\t\t\t// Before encoding the chunk, set the parent summary builder to this chunk's summary builder so that\n\t\t\t\t// any incremental chunks in the subtree of this chunk will use that as their parent summary builder.\n\t\t\t\tconst chunkSummaryBuilder = new SummaryTreeBuilder();\n\t\t\t\tthis.trackedSummaryProperties.parentSummaryBuilder = chunkSummaryBuilder;\n\t\t\t\tchunkSummaryBuilder.addBlob(\n\t\t\t\t\tchunkContentsBlobKey,\n\t\t\t\t\tthis.trackedSummaryProperties.stringify(chunkEncoder(chunk)),\n\t\t\t\t);\n\n\t\t\t\t// Add this chunk's summary tree to the parent's summary tree. The summary tree contains its encoded\n\t\t\t\t// contents and the summary trees of any incremental chunks under it.\n\t\t\t\tparentSummaryBuilder.addWithStats(\n\t\t\t\t\t`${newReferenceId}`,\n\t\t\t\t\tchunkSummaryBuilder.getSummaryTree(),\n\t\t\t\t);\n\n\t\t\t\t// Restore the parent summary builder and chunk summary path.\n\t\t\t\tthis.trackedSummaryProperties.parentSummaryBuilder = parentSummaryBuilder;\n\t\t\t\tthis.trackedSummaryProperties.chunkSummaryPath.pop();\n\t\t\t}\n\n\t\t\tsetInNestedMap(\n\t\t\t\tthis.chunkTrackingPropertiesMap,\n\t\t\t\tthis.trackedSummaryProperties.summarySequenceNumber,\n\t\t\t\tchunk,\n\t\t\t\tchunkProperties,\n\t\t\t);\n\t\t\tchunkReferenceIds.push(chunkProperties.referenceId);\n\t\t}\n\t\treturn chunkReferenceIds;\n\t}\n\n\t/**\n\t * Must be called after summary generation is complete to finish tracking the summary.\n\t * It clears any tracking state and deletes the tracking properties for summaries that are older than the\n\t * latest successful summary.\n\t * @param incrementalSummaryContext - The context for the incremental summary that contains the sequence numbers.\n\t * If this is undefined, the summary tree will only contain a summary blob for `forestSummaryContent`.\n\t * @param forestSummaryContent - The stringified ForestCodec output of top-level Forest content.\n\t * @returns the Forest's summary tree.\n\t */\n\tpublic completeSummary(args: {\n\t\tincrementalSummaryContext: IExperimentalIncrementalSummaryContext | undefined;\n\t\tforestSummaryContent: string;\n\t}): ISummaryTreeWithStats {\n\t\tconst { incrementalSummaryContext, forestSummaryContent } = args;\n\t\tif (!this.enableIncrementalSummary || incrementalSummaryContext === undefined) {\n\t\t\tconst summaryBuilder = new SummaryTreeBuilder();\n\t\t\tsummaryBuilder.addBlob(forestSummaryContentKey, forestSummaryContent);\n\t\t\treturn summaryBuilder.getSummaryTree();\n\t\t}\n\n\t\tvalidateTrackingSummary(this.forestSummaryState, this.trackedSummaryProperties);\n\n\t\tthis.trackedSummaryProperties.parentSummaryBuilder.addBlob(\n\t\t\tforestSummaryContentKey,\n\t\t\tforestSummaryContent,\n\t\t);\n\n\t\t// Copy over the entries from the latest summary to the current summary.\n\t\t// In the current summary, there can be fields that haven't changed since the latest summary and the chunks\n\t\t// in these fields and in any of its children weren't encoded. So, we need get the entries for these chunks\n\t\t// to be able to incrementally summarize them in the next summary.\n\t\tconst latestSummaryTrackingMap = this.chunkTrackingPropertiesMap.get(\n\t\t\tthis.latestSummarySequenceNumber,\n\t\t);\n\t\tconst currentSummaryTrackingMap = this.chunkTrackingPropertiesMap.get(\n\t\t\tthis.trackedSummaryProperties.summarySequenceNumber,\n\t\t);\n\t\tif (latestSummaryTrackingMap !== undefined && currentSummaryTrackingMap !== undefined) {\n\t\t\tfor (const [chunk, chunkProperties] of latestSummaryTrackingMap.entries()) {\n\t\t\t\tif (!currentSummaryTrackingMap.has(chunk)) {\n\t\t\t\t\tcurrentSummaryTrackingMap.set(chunk, chunkProperties);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t// Delete tracking for summaries that are older than the latest successful summary because they will\n\t\t// never be referenced again for generating summary handles.\n\t\tfor (const sequenceNumber of this.chunkTrackingPropertiesMap.keys()) {\n\t\t\tif (sequenceNumber < this.latestSummarySequenceNumber) {\n\t\t\t\tthis.chunkTrackingPropertiesMap.delete(sequenceNumber);\n\t\t\t}\n\t\t}\n\n\t\tthis.forestSummaryState = ForestSummaryTrackingState.ReadyToTrack;\n\t\tconst summaryTree = this.trackedSummaryProperties.parentSummaryBuilder.getSummaryTree();\n\t\tthis.trackedSummaryProperties = undefined;\n\t\treturn summaryTree;\n\t}\n\n\t/**\n\t * {@link IncrementalEncoder.decodeIncrementalChunk}\n\t */\n\tpublic decodeIncrementalChunk(\n\t\treferenceId: ChunkReferenceId,\n\t\tchunkDecoder: (encoded: EncodedFieldBatch) => TreeChunk,\n\t): TreeChunk {\n\t\tconst ChunkLoadProperties = this.loadedChunksMap.get(`${referenceId}`);\n\t\tassert(ChunkLoadProperties !== undefined, \"Encoded incremental chunk not found\");\n\t\tconst chunk = chunkDecoder(ChunkLoadProperties.encodedContents);\n\n\t\t// Account for the reference about to be added in `chunkTrackingPropertiesMap`\n\t\t// to ensure that no other users of this chunk think they have unique ownership.\n\t\t// This prevents prevent whoever this chunk is returned to from modifying it in-place.\n\t\tchunk.referenceAdded();\n\t\t// Track the decoded chunk. This will recreate the tracking state when the summary that this client\n\t\t// is loaded from was generated. This is needed to ensure that incremental summaries work correctly\n\t\t// when a new client starts to summarize.\n\t\tsetInNestedMap(this.chunkTrackingPropertiesMap, this.initialSequenceNumber, chunk, {\n\t\t\treferenceId,\n\t\t\tsummaryPath: ChunkLoadProperties.summaryPath,\n\t\t});\n\t\treturn chunk;\n\t}\n}\n"]}
|
|
1
|
+
{"version":3,"file":"incrementalSummaryBuilder.js","sourceRoot":"","sources":["../../../src/feature-libraries/forest-summary/incrementalSummaryBuilder.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAEH,kEAA6D;AAK7D,qEAA4E;AAC5E,kDAM6B;AAS7B,2EAAiE;AAGjE,uEAAwE;AAIxE;;;;GAIG;AACU,QAAA,uBAAuB,GAAG,YAAY,CAAC;AAEpD;;;;;GAKG;AACH,MAAM,oBAAoB,GAAG,UAAU,CAAC;AAExC;;GAEG;AACU,QAAA,0BAA0B,GAAG;IACzC,4CAA4C;IAC5C,QAAQ,EAAE,UAAU;IACpB,wCAAwC;IACxC,YAAY,EAAE,cAAc;CACnB,CAAC;AAoFX;;GAEG;AACH,IAAY,gCAgBX;AAhBD,WAAY,gCAAgC;IAC3C;;;;OAIG;IACH,qGAAW,CAAA;IACX;;;;;;;OAOG;IACH,mGAAU,CAAA;AACX,CAAC,EAhBW,gCAAgC,gDAAhC,gCAAgC,QAgB3C;AAED;;;;GAIG;AACH,SAAS,uBAAuB,CAC/B,kBAA8C,EAC9C,wBAA8D;IAE9D,IAAA,iBAAM,EACL,kBAAkB,KAAK,kCAA0B,CAAC,QAAQ,EAC1D,KAAK,CAAC,4BAA4B,CAClC,CAAC;IACF,IAAA,iBAAM,EACL,wBAAwB,KAAK,SAAS,EACtC,KAAK,CAAC,0EAA0E,CAChF,CAAC;AACH,CAAC;AAED;;;;GAIG;AACH,SAAS,2BAA2B,CACnC,kBAA8C,EAC9C,wBAA8D;IAE9D,IAAA,iBAAM,EACL,kBAAkB,KAAK,kCAA0B,CAAC,YAAY,EAC9D,KAAK,CAAC,gCAAgC,CACtC,CAAC;IACF,IAAA,iBAAM,EACL,wBAAwB,KAAK,SAAS,EACtC,KAAK,CAAC,0EAA0E,CAChF,CAAC;AACH,CAAC;AAED,4CAA4C;AAC5C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2CG;AACH,2CAA2C;AAC3C,MAAa,+BAA+B;IA2C3C,YACkB,wBAAiC,EACjC,gBAAiE,EAClE,yBAAoD,EACnD,qBAA6B;QAH7B,6BAAwB,GAAxB,wBAAwB,CAAS;QACjC,qBAAgB,GAAhB,gBAAgB,CAAiD;QAClE,8BAAyB,GAAzB,yBAAyB,CAA2B;QACnD,0BAAqB,GAArB,qBAAqB,CAAQ;QA9C/C;;WAEG;QACK,oBAAe,GAAqB,IAAA,gBAAK,EAAC,CAAC,CAAC,CAAC;QAErD;;;WAGG;QACc,+BAA0B,GAIvC,IAAI,GAAG,EAAE,CAAC;QAEd;;WAEG;QACI,uBAAkB,GACxB,kCAA0B,CAAC,YAAY,CAAC;QAEzC;;WAEG;QACK,gCAA2B,GAAW,CAAC,CAAC,CAAC;QAQjD;;;WAGG;QACH;;;WAGG;QACc,oBAAe,GAAqC,IAAI,GAAG,EAAE,CAAC;IAO5E,CAAC;IAEJ;;;;;OAKG;IACI,KAAK,CAAC,IAAI,CAChB,QAAgC,EAChC,iBAAqF;QAErF,MAAM,UAAU,GAAG,QAAQ,CAAC,eAAe,EAAE,EAAE,CAAC;QAChD,wGAAwG;QACxG,qDAAqD;QACrD,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;YAC9B,OAAO;QACR,CAAC;QAED,uGAAuG;QACvG,uDAAuD;QACvD,MAAM,2BAA2B,GAAG,KAAK,EACxC,YAA2B,EAC3B,aAAqB,EACL,EAAE;YAClB,iGAAiG;YACjG,oDAAoD;YACpD,KAAK,MAAM,CAAC,gBAAgB,EAAE,iBAAiB,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,KAAK,CAAC,EAAE,CAAC;gBACxF,MAAM,gBAAgB,GAAG,GAAG,aAAa,GAAG,gBAAgB,EAAE,CAAC;gBAC/D,MAAM,iBAAiB,GAAG,GAAG,gBAAgB,IAAI,oBAAoB,EAAE,CAAC;gBACxE,IAAI,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,EAAE,CAAC;oBACnD,MAAM,IAAI,uBAAY,CACrB,0DAA0D,iBAAiB,EAAE,CAC7E,CAAC;gBACH,CAAC;gBACD,MAAM,aAAa,GAAG,MAAM,iBAAiB,CAAoB,iBAAiB,CAAC,CAAC;gBACpF,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,gBAAgB,EAAE;oBAC1C,eAAe,EAAE,aAAa;oBAC9B,WAAW,EAAE,gBAAgB;iBAC7B,CAAC,CAAC;gBAEH,MAAM,sBAAsB,GAAG,MAAM,CAAC,gBAAgB,CAAC,CAAC;gBACxD,IAAI,CAAC,eAAe,GAAG,IAAA,gBAAK,EAC3B,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,eAAe,EAAE,sBAAsB,GAAG,CAAC,CAAC,CAC1D,CAAC;gBAEF,wEAAwE;gBACxE,MAAM,2BAA2B,CAAC,iBAAiB,EAAE,GAAG,gBAAgB,GAAG,CAAC,CAAC;YAC9E,CAAC;QACF,CAAC,CAAC;QACF,MAAM,2BAA2B,CAAC,UAAU,EAAE,EAAE,CAAC,CAAC;IACnD,CAAC;IAED;;;;;;;;OAQG;IACI,YAAY,CAAC,IAInB;QACA,MAAM,EAAE,QAAQ,EAAE,yBAAyB,EAAE,SAAS,EAAE,GAAG,IAAI,CAAC;QAChE,6GAA6G;QAC7G,0EAA0E;QAC1E,0GAA0G;QAC1G,IAAI,CAAC,IAAI,CAAC,wBAAwB,IAAI,yBAAyB,KAAK,SAAS,EAAE,CAAC;YAC/E,OAAO,gCAAgC,CAAC,UAAU,CAAC;QACpD,CAAC;QAED,2BAA2B,CAAC,IAAI,CAAC,kBAAkB,EAAE,IAAI,CAAC,wBAAwB,CAAC,CAAC;QAEpF,IAAI,CAAC,kBAAkB,GAAG,kCAA0B,CAAC,QAAQ,CAAC;QAC9D,IAAI,CAAC,2BAA2B,GAAG,yBAAyB,CAAC,2BAA2B,CAAC;QACzF,IAAI,CAAC,wBAAwB,GAAG;YAC/B,qBAAqB,EAAE,yBAAyB,CAAC,qBAAqB;YACtE,qBAAqB,EAAE,yBAAyB,CAAC,WAAW;YAC5D,gBAAgB,EAAE,EAAE;YACpB,oBAAoB,EAAE,IAAI,6BAAkB,EAAE;YAC9C,QAAQ;YACR,SAAS;SACT,CAAC;QACF,OAAO,gCAAgC,CAAC,WAAW,CAAC;IACrD,CAAC;IAED;;;OAGG;IACI,sBAAsB,CAC5B,MAA8B,EAC9B,YAAqD;QAErD,0GAA0G;QAC1G,uBAAuB,CAAC,IAAI,CAAC,kBAAkB,EAAE,IAAI,CAAC,wBAAwB,CAAC,CAAC;QAEhF,MAAM,iBAAiB,GAAuB,EAAE,CAAC;QACjD,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;QAC7C,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;YAC5B,IAAI,eAAuC,CAAC;YAE5C,8EAA8E;YAC9E,oGAAoG;YACpG,6EAA6E;YAC7E,MAAM,uBAAuB,GAAG,IAAA,8BAAmB,EAClD,IAAI,CAAC,0BAA0B,EAC/B,IAAI,CAAC,2BAA2B,EAChC,KAAK,CACL,CAAC;YACF,IAAI,uBAAuB,KAAK,SAAS,IAAI,CAAC,IAAI,CAAC,wBAAwB,CAAC,QAAQ,EAAE,CAAC;gBACtF,eAAe,GAAG,uBAAuB,CAAC;gBAC1C,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC,SAAS,CAC3D,GAAG,eAAe,CAAC,WAAW,EAAE,EAChC,gCAAW,CAAC,IAAI,EAChB,GAAG,IAAI,CAAC,wBAAwB,CAAC,qBAAqB,IAAI,eAAe,CAAC,WAAW,EAAE,CACvF,CAAC;YACH,CAAC;iBAAM,CAAC;gBACP,6CAA6C;gBAC7C,MAAM,cAAc,GAAqB,IAAA,gBAAK,EAAC,IAAI,CAAC,eAAe,EAAE,CAAC,CAAC;gBAEvE,oGAAoG;gBACpG,2CAA2C;gBAC3C,qGAAqG;gBACrG,oDAAoD;gBACpD,IAAI,CAAC,wBAAwB,CAAC,gBAAgB,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;gBAEpE,eAAe,GAAG;oBACjB,WAAW,EAAE,cAAc;oBAC3B,WAAW,EAAE,IAAI,CAAC,wBAAwB,CAAC,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC;iBACrE,CAAC;gBAEF,MAAM,oBAAoB,GAAG,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC;gBAChF,oGAAoG;gBACpG,gDAAgD;gBAChD,oGAAoG;gBACpG,qGAAqG;gBACrG,MAAM,mBAAmB,GAAG,IAAI,6BAAkB,EAAE,CAAC;gBACrD,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,GAAG,mBAAmB,CAAC;gBACzE,mBAAmB,CAAC,OAAO,CAC1B,oBAAoB,EACpB,IAAI,CAAC,wBAAwB,CAAC,SAAS,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,CAC5D,CAAC;gBAEF,oGAAoG;gBACpG,qEAAqE;gBACrE,oBAAoB,CAAC,YAAY,CAChC,GAAG,cAAc,EAAE,EACnB,mBAAmB,CAAC,cAAc,EAAE,CACpC,CAAC;gBAEF,6DAA6D;gBAC7D,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;gBAC1E,IAAI,CAAC,wBAAwB,CAAC,gBAAgB,CAAC,GAAG,EAAE,CAAC;YACtD,CAAC;YAED,IAAA,yBAAc,EACb,IAAI,CAAC,0BAA0B,EAC/B,IAAI,CAAC,wBAAwB,CAAC,qBAAqB,EACnD,KAAK,EACL,eAAe,CACf,CAAC;YACF,iBAAiB,CAAC,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QACrD,CAAC;QACD,OAAO,iBAAiB,CAAC;IAC1B,CAAC;IAED;;;;;;;;OAQG;IACI,eAAe,CAAC,IAGtB;QACA,MAAM,EAAE,yBAAyB,EAAE,oBAAoB,EAAE,GAAG,IAAI,CAAC;QACjE,IAAI,CAAC,IAAI,CAAC,wBAAwB,IAAI,yBAAyB,KAAK,SAAS,EAAE,CAAC;YAC/E,MAAM,cAAc,GAAG,IAAI,6BAAkB,EAAE,CAAC;YAChD,cAAc,CAAC,OAAO,CAAC,+BAAuB,EAAE,oBAAoB,CAAC,CAAC;YACtE,OAAO,cAAc,CAAC,cAAc,EAAE,CAAC;QACxC,CAAC;QAED,uBAAuB,CAAC,IAAI,CAAC,kBAAkB,EAAE,IAAI,CAAC,wBAAwB,CAAC,CAAC;QAEhF,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC,OAAO,CACzD,+BAAuB,EACvB,oBAAoB,CACpB,CAAC;QAEF,wEAAwE;QACxE,2GAA2G;QAC3G,2GAA2G;QAC3G,kEAAkE;QAClE,MAAM,wBAAwB,GAAG,IAAI,CAAC,0BAA0B,CAAC,GAAG,CACnE,IAAI,CAAC,2BAA2B,CAChC,CAAC;QACF,MAAM,yBAAyB,GAAG,IAAI,CAAC,0BAA0B,CAAC,GAAG,CACpE,IAAI,CAAC,wBAAwB,CAAC,qBAAqB,CACnD,CAAC;QACF,IAAI,wBAAwB,KAAK,SAAS,IAAI,yBAAyB,KAAK,SAAS,EAAE,CAAC;YACvF,KAAK,MAAM,CAAC,KAAK,EAAE,eAAe,CAAC,IAAI,wBAAwB,CAAC,OAAO,EAAE,EAAE,CAAC;gBAC3E,IAAI,CAAC,yBAAyB,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC;oBAC3C,yBAAyB,CAAC,GAAG,CAAC,KAAK,EAAE,eAAe,CAAC,CAAC;gBACvD,CAAC;YACF,CAAC;QACF,CAAC;QAED,oGAAoG;QACpG,4DAA4D;QAC5D,KAAK,MAAM,cAAc,IAAI,IAAI,CAAC,0BAA0B,CAAC,IAAI,EAAE,EAAE,CAAC;YACrE,IAAI,cAAc,GAAG,IAAI,CAAC,2BAA2B,EAAE,CAAC;gBACvD,IAAI,CAAC,0BAA0B,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YACxD,CAAC;QACF,CAAC;QAED,IAAI,CAAC,kBAAkB,GAAG,kCAA0B,CAAC,YAAY,CAAC;QAClE,MAAM,WAAW,GAAG,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC,cAAc,EAAE,CAAC;QACxF,IAAI,CAAC,wBAAwB,GAAG,SAAS,CAAC;QAC1C,OAAO,WAAW,CAAC;IACpB,CAAC;IAED;;OAEG;IACI,sBAAsB,CAC5B,WAA6B,EAC7B,YAAuD;QAEvD,MAAM,mBAAmB,GAAG,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,WAAW,EAAE,CAAC,CAAC;QACvE,IAAA,iBAAM,EAAC,mBAAmB,KAAK,SAAS,EAAE,KAAK,CAAC,yCAAyC,CAAC,CAAC;QAC3F,MAAM,KAAK,GAAG,YAAY,CAAC,mBAAmB,CAAC,eAAe,CAAC,CAAC;QAEhE,8EAA8E;QAC9E,gFAAgF;QAChF,sFAAsF;QACtF,KAAK,CAAC,cAAc,EAAE,CAAC;QACvB,mGAAmG;QACnG,mGAAmG;QACnG,yCAAyC;QACzC,IAAA,yBAAc,EAAC,IAAI,CAAC,0BAA0B,EAAE,IAAI,CAAC,qBAAqB,EAAE,KAAK,EAAE;YAClF,WAAW;YACX,WAAW,EAAE,mBAAmB,CAAC,WAAW;SAC5C,CAAC,CAAC;QACH,OAAO,KAAK,CAAC;IACd,CAAC;CACD;AA9SD,0EA8SC","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n */\n\nimport { assert } from \"@fluidframework/core-utils/internal\";\nimport type {\n\tIExperimentalIncrementalSummaryContext,\n\tISummaryTreeWithStats,\n} from \"@fluidframework/runtime-definitions/internal\";\nimport { SummaryTreeBuilder } from \"@fluidframework/runtime-utils/internal\";\nimport {\n\tbrand,\n\tsetInNestedMap,\n\ttryGetFromNestedMap,\n\ttype JsonCompatible,\n\ttype NestedMap,\n} from \"../../util/index.js\";\nimport type {\n\tChunkReferenceId,\n\tEncodedFieldBatch,\n\tIncrementalEncoderDecoder,\n\tIncrementalEncodingPolicy,\n\tTreeChunk,\n} from \"../chunked-forest/index.js\";\nimport type { ITreeCursorSynchronous } from \"../../core/index.js\";\nimport { SummaryType } from \"@fluidframework/driver-definitions\";\nimport type { IChannelStorageService } from \"@fluidframework/datastore-definitions/internal\";\nimport type { ISnapshotTree } from \"@fluidframework/driver-definitions/internal\";\nimport { LoggingError } from \"@fluidframework/telemetry-utils/internal\";\nimport type { IFluidHandle } from \"@fluidframework/core-interfaces\";\nimport type { SummaryElementStringifier } from \"../../shared-tree-core/index.js\";\n\n/**\n * The key for the blob under ForestSummarizer's root.\n * This blob contains the ForestCodec's output.\n * See {@link ForestIncrementalSummaryBuilder} for details on the summary structure.\n */\nexport const forestSummaryContentKey = \"ForestTree\";\n\n/**\n * The contents of an incremental chunk is under a summary tree node with its {@link ChunkReferenceId} as the key.\n * The inline portion of the chunk content is encoded with the forest codec is stored in a blob with this key.\n * The rest of the chunk contents is stored in the summary tree under the summary tree node.\n * See the summary format in {@link ForestIncrementalSummaryBuilder} for more details.\n */\nconst chunkContentsBlobKey = \"contents\";\n\n/**\n * State that tells whether a summary is currently being tracked.\n */\nexport const ForestSummaryTrackingState = {\n\t/** A summary is currently being tracked. */\n\tTracking: \"Tracking\",\n\t/** A summary is ready to be tracked. */\n\tReadyToTrack: \"ReadyToTrack\",\n} as const;\nexport type ForestSummaryTrackingState =\n\t(typeof ForestSummaryTrackingState)[keyof typeof ForestSummaryTrackingState];\n\n/**\n * The properties of a chunk tracked during the loading process.\n * These are used to identify a chunk when it is decoded and recreate the tracking state\n * as it was when the summary that the client is loading from was generated.\n *\n * An encoded chunk, paired with a location it can be reused / reloaded from.\n * @remarks\n * This identifies a location in a specific summary where `encodedContents` was loaded from.\n *\n * When summarizing, Fluid always ensures the summary that the summary client is allowed to reuse content from\n * is the one it loaded from, so tracking this on load is sufficient for now:\n * there is no need to track the equivalent data when summarizing.\n */\ninterface ChunkLoadProperties {\n\t/**\n\t * The encoded contents of the chunk.\n\t */\n\treadonly encodedContents: EncodedFieldBatch;\n\t/**\n\t * The path for this chunk's contents in the summary tree relative to the forest's summary tree.\n\t * This path is used to generate a summary handle for the chunk if it doesn't change between summaries.\n\t */\n\treadonly summaryPath: string;\n}\n\n/**\n * The properties of a chunk that is tracked for every summary.\n * If a chunk doesn't change between summaries,\n * these properties will be used to generate a summary handle for the chunk.\n */\ninterface ChunkSummaryProperties {\n\t/**\n\t * The reference ID of the chunk which uniquely identifies it under its parent's summary tree.\n\t * The summary for this chunk will be stored against this reference ID as key in the summary tree.\n\t */\n\treadonly referenceId: ChunkReferenceId;\n\t/**\n\t * The path for this chunk's summary in the summary tree relative to the forest's summary tree.\n\t * This path is used to generate a summary handle for the chunk if it doesn't change between summaries.\n\t */\n\treadonly summaryPath: string;\n}\n\n/**\n * The properties of a summary being tracked.\n */\ninterface TrackedSummaryProperties {\n\t/**\n\t * The sequence number of the summary in progress.\n\t */\n\treadonly summarySequenceNumber: number;\n\t/**\n\t * The base path for the latest summary that was successful.\n\t * This is used to generate summary handles.\n\t */\n\treadonly latestSummaryBasePath: string;\n\t/**\n\t * Whether the summary being tracked is a full tree summary.\n\t * If true, the summary will not contain any summary handles. All chunks must be summarized in full.\n\t */\n\treadonly fullTree: boolean;\n\t/**\n\t * Represents the path of a chunk in the summary tree relative to the forest's summary tree.\n\t * Each item in the array is the {@link ChunkReferenceId} of a chunk in the summary tree starting\n\t * from the chunk under forest summary tree.\n\t * When a chunk is summarized, this array will be used to generate the path for the chunk's summary in the\n\t * summary tree.\n\t */\n\treadonly chunkSummaryPath: ChunkReferenceId[];\n\t/**\n\t * The parent summary builder to use to build the incremental summary tree.\n\t * When a chunk is being summarized, it will add its summary to this builder against its reference ID.\n\t */\n\tparentSummaryBuilder: SummaryTreeBuilder;\n\t/**\n\t * Serializes content (including {@link (IFluidHandle:interface)}s) for adding to a summary blob.\n\t */\n\tstringify: SummaryElementStringifier;\n}\n\n/**\n * The behavior of the forest's incremental summary - whether the summary should be a single blob or incremental.\n */\nexport enum ForestIncrementalSummaryBehavior {\n\t/**\n\t * The forest can encode chunks incrementally, i.e., chunks that support incremental encoding will be encoded\n\t * separately - they will be added to a separate tree.\n\t * The incremental summary format is described in {@link ForestIncrementalSummaryBuilder}.\n\t */\n\tIncremental,\n\t/**\n\t * The forest should encode all of it's data in a single summary blob.\n\t * @remarks\n\t * The format of the summary will be the same as the old format (pre-incremental summaries) and is fully\n\t * backwards compatible with the old format. The summary will basically look like an incremental summary\n\t * with no incremental fields - it will only contain the \"ForestTree\" blob in the summary format described\n\t * in {@link ForestIncrementalSummaryBuilder}.\n\t */\n\tSingleBlob,\n}\n\n/**\n * Validates that a summary is currently being tracked and that the tracked summary properties are defined.\n * @param forestSummaryState - The current state of the forest summary tracking.\n * @param trackedSummaryProperties - The properties of the tracked summary, which must be available.\n */\nfunction validateTrackingSummary(\n\tforestSummaryState: ForestSummaryTrackingState,\n\ttrackedSummaryProperties: TrackedSummaryProperties | undefined,\n): asserts trackedSummaryProperties is TrackedSummaryProperties {\n\tassert(\n\t\tforestSummaryState === ForestSummaryTrackingState.Tracking,\n\t\t0xc22 /* Not tracking a summary */,\n\t);\n\tassert(\n\t\ttrackedSummaryProperties !== undefined,\n\t\t0xc23 /* Tracked summary properties must be available when tracking a summary */,\n\t);\n}\n\n/**\n * Validates that a summary is ready to be tracked and that the tracked summary properties are undefined.\n * @param forestSummaryState - The current state of the forest summary tracking.\n * @param trackedSummaryProperties - The properties of the tracked summary, which must be undefined.\n */\nfunction validateReadyToTrackSummary(\n\tforestSummaryState: ForestSummaryTrackingState,\n\ttrackedSummaryProperties: TrackedSummaryProperties | undefined,\n): asserts trackedSummaryProperties is undefined {\n\tassert(\n\t\tforestSummaryState === ForestSummaryTrackingState.ReadyToTrack,\n\t\t0xc24 /* Already tracking a summary */,\n\t);\n\tassert(\n\t\ttrackedSummaryProperties === undefined,\n\t\t0xc25 /* Tracked summary properties must not be available when ready to track */,\n\t);\n}\n\n/* eslint-disable jsdoc/check-indentation */\n/**\n * Tracks and builds the incremental summary tree for a forest where chunks that support incremental encoding are\n * stored in a separate tree in the summary under its {@link ChunkReferenceId}.\n * The summary tree for a chunk is self-sufficient and can be independently loaded and used to reconstruct the\n * chunk's contents without any additional context from its parent.\n *\n * An example summary tree with incremental summary:\n * Forest\n * ├── ForestTree\n * ├── 0\n * | ├── contents\n * | ├── 1\n * | | ├── contents\n * | | ├── 2\n * | | | ├── contents\n * | ├── 3 - \".../Forest/ForestTree/0/1/3\"\n * ├── 4\n * | ├── contents\n * | ├── ...\n * ├── 5 - \"/.../Forest/ForestTree/5\"\n * - Forest is a summary tree node added by the shared tree and contains the following:\n * - The inline portion of the top-level forest content is stored in a summary blob called \"ForestTree\".\n * It also contains the {@link ChunkReferenceId}s of the incremental chunks under it.\n * - The summary for each incremental chunk under it is stored against its {@link ChunkReferenceId}.\n * - For each chunk, the structure of the summary tree is the same as the Forest. It contains the following:\n * - The inline portion of the chunk content is stored in a blob called \"contents\".\n * It also contains the {@link ChunkReferenceId}s of the incremental chunks under it.\n * - The summary for each incremental chunk under it is stored against its {@link ChunkReferenceId}.\n * - Chunks that do not change between summaries are summarized as handles in the summary tree.\n * @remarks\n * It may seem inconsistent that although the structure for the top-level forest tree is similar to that of\n * an incremental chunk, its content is stored in a summary blob called \"ForestTree\" while the content for\n * the incremental chunks are stored in a summary blob called \"contents\".\n * This is to keep this summary backwards compatible with old format (before incremental summaries were added)\n * where the entire forest content was in a summary blob called \"ForestTree\". So, if incremental summaries were\n * disabled, the forest content will be fully backwards compatible.\n * Note that this limits reusing the root node in a location other than root and a non-root node in the root.\n * We could phase this out by switching to write the top-level contents under \"contents\" if we want to support\n * the above. However, there is no plan to do that for now.\n *\n * TODO: AB#46752\n * Add strong types for the summary structure to document it better. It will help make it super clear what the actual\n * format is in a way that can easily be linked to, documented and inspected.\n */\n/* eslint-enable jsdoc/check-indentation */\nexport class ForestIncrementalSummaryBuilder implements IncrementalEncoderDecoder {\n\t/**\n\t * The next reference ID to use for a chunk.\n\t */\n\tprivate nextReferenceId: ChunkReferenceId = brand(0);\n\n\t/**\n\t * For a given summary sequence number, keeps track of a chunk's properties that will be used to generate\n\t * a summary handle for the chunk if it does not change between summaries.\n\t */\n\tprivate readonly chunkTrackingPropertiesMap: NestedMap<\n\t\tnumber,\n\t\tTreeChunk,\n\t\tChunkSummaryProperties\n\t> = new Map();\n\n\t/**\n\t * The state indicating whether a summary is currently being tracked or not.\n\t */\n\tpublic forestSummaryState: ForestSummaryTrackingState =\n\t\tForestSummaryTrackingState.ReadyToTrack;\n\n\t/**\n\t * The sequence number of the latest summary that was successful.\n\t */\n\tprivate latestSummarySequenceNumber: number = -1;\n\n\t/**\n\t * The current state of the summary being tracked.\n\t * This is undefined if no summary is currently being tracked.\n\t */\n\tprivate trackedSummaryProperties: TrackedSummaryProperties | undefined;\n\n\t/**\n\t * A map of chunk reference IDs to their encoded contents. This is typically used during the loading of the\n\t * forest to retrieve the contents of the chunks that were summarized incrementally.\n\t */\n\t/**\n\t * A map of chunk reference IDs to their {@link ChunkLoadProperties}.\n\t * This is used during the loading of the forest to track each chunk that is retrieved and decoded.\n\t */\n\tprivate readonly loadedChunksMap: Map<string, ChunkLoadProperties> = new Map();\n\n\tpublic constructor(\n\t\tprivate readonly enableIncrementalSummary: boolean,\n\t\tprivate readonly getChunkAtCursor: (cursor: ITreeCursorSynchronous) => TreeChunk[],\n\t\tpublic readonly shouldEncodeIncrementally: IncrementalEncodingPolicy,\n\t\tprivate readonly initialSequenceNumber: number,\n\t) {}\n\n\t/**\n\t * Must be called when the forest is loaded to download the encoded contents of incremental chunks.\n\t * @param services - The channel storage service to use to access the snapshot tree and download the\n\t * contents of the chunks.\n\t * @param readAndParse - A function that reads and parses a blob from the storage service.\n\t */\n\tpublic async load(\n\t\tservices: IChannelStorageService,\n\t\treadAndParseChunk: <T extends JsonCompatible<IFluidHandle>>(id: string) => Promise<T>,\n\t): Promise<void> {\n\t\tconst forestTree = services.getSnapshotTree?.();\n\t\t// Snapshot tree should be available when loading forest's contents. However, it is an optional function\n\t\t// and may not be implemented by the storage service.\n\t\tif (forestTree === undefined) {\n\t\t\treturn;\n\t\t}\n\n\t\t// Downloads the contents of incremental chunks in the given snapshot tree. Also, recursively downloads\n\t\t// the contents of incremental chunks in any sub-trees.\n\t\tconst downloadChunkContentsInTree = async (\n\t\t\tsnapshotTree: ISnapshotTree,\n\t\t\tparentTreeKey: string,\n\t\t): Promise<void> => {\n\t\t\t// All trees in the snapshot tree are for incremental chunks. The key is the chunk's reference ID\n\t\t\t// and the value is the snapshot tree for the chunk.\n\t\t\tfor (const [chunkReferenceId, chunkSnapshotTree] of Object.entries(snapshotTree.trees)) {\n\t\t\t\tconst chunkSubTreePath = `${parentTreeKey}${chunkReferenceId}`;\n\t\t\t\tconst chunkContentsPath = `${chunkSubTreePath}/${chunkContentsBlobKey}`;\n\t\t\t\tif (!(await services.contains(chunkContentsPath))) {\n\t\t\t\t\tthrow new LoggingError(\n\t\t\t\t\t\t`SharedTree: Cannot find contents for incremental chunk ${chunkContentsPath}`,\n\t\t\t\t\t);\n\t\t\t\t}\n\t\t\t\tconst chunkContents = await readAndParseChunk<EncodedFieldBatch>(chunkContentsPath);\n\t\t\t\tthis.loadedChunksMap.set(chunkReferenceId, {\n\t\t\t\t\tencodedContents: chunkContents,\n\t\t\t\t\tsummaryPath: chunkSubTreePath,\n\t\t\t\t});\n\n\t\t\t\tconst chunkReferenceIdNumber = Number(chunkReferenceId);\n\t\t\t\tthis.nextReferenceId = brand(\n\t\t\t\t\tMath.max(this.nextReferenceId, chunkReferenceIdNumber + 1),\n\t\t\t\t);\n\n\t\t\t\t// Recursively download the contents of chunks in this chunk's sub tree.\n\t\t\t\tawait downloadChunkContentsInTree(chunkSnapshotTree, `${chunkSubTreePath}/`);\n\t\t\t}\n\t\t};\n\t\tawait downloadChunkContentsInTree(forestTree, \"\");\n\t}\n\n\t/**\n\t * Must be called when starting a new forest summary to track it.\n\t * @param fullTree - Whether the summary is a full tree summary. If true, the summary will not contain\n\t * any summary handles. All chunks must be summarized in full.\n\t * @param incrementalSummaryContext - The context for the incremental summary that contains the sequence numbers\n\t * for the current and latest summaries.\n\t * @param stringify - Serializes content (including {@link (IFluidHandle:interface)}s) for adding to a summary blob.\n\t * @returns the behavior of the forest's incremental summary.\n\t */\n\tpublic startSummary(args: {\n\t\tfullTree: boolean;\n\t\tincrementalSummaryContext: IExperimentalIncrementalSummaryContext | undefined;\n\t\tstringify: SummaryElementStringifier;\n\t}): ForestIncrementalSummaryBehavior {\n\t\tconst { fullTree, incrementalSummaryContext, stringify } = args;\n\t\t// If there is no incremental summary context, do not summarize incrementally. This happens in two scenarios:\n\t\t// 1. When summarizing a detached container, i.e., the first ever summary.\n\t\t// 2. When running GC, the default behavior is to call summarize on DDS without incrementalSummaryContext.\n\t\tif (!this.enableIncrementalSummary || incrementalSummaryContext === undefined) {\n\t\t\treturn ForestIncrementalSummaryBehavior.SingleBlob;\n\t\t}\n\n\t\tvalidateReadyToTrackSummary(this.forestSummaryState, this.trackedSummaryProperties);\n\n\t\tthis.forestSummaryState = ForestSummaryTrackingState.Tracking;\n\t\tthis.latestSummarySequenceNumber = incrementalSummaryContext.latestSummarySequenceNumber;\n\t\tthis.trackedSummaryProperties = {\n\t\t\tsummarySequenceNumber: incrementalSummaryContext.summarySequenceNumber,\n\t\t\tlatestSummaryBasePath: incrementalSummaryContext.summaryPath,\n\t\t\tchunkSummaryPath: [],\n\t\t\tparentSummaryBuilder: new SummaryTreeBuilder(),\n\t\t\tfullTree,\n\t\t\tstringify,\n\t\t};\n\t\treturn ForestIncrementalSummaryBehavior.Incremental;\n\t}\n\n\t/**\n\t * {@link IncrementalEncoder.encodeIncrementalField}\n\t * @remarks Returns an empty array if the field has no content.\n\t */\n\tpublic encodeIncrementalField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tchunkEncoder: (chunk: TreeChunk) => EncodedFieldBatch,\n\t): ChunkReferenceId[] {\n\t\t// Validate that a summary is currently being tracked and that the tracked summary properties are defined.\n\t\tvalidateTrackingSummary(this.forestSummaryState, this.trackedSummaryProperties);\n\n\t\tconst chunkReferenceIds: ChunkReferenceId[] = [];\n\t\tconst chunks = this.getChunkAtCursor(cursor);\n\t\tfor (const chunk of chunks) {\n\t\t\tlet chunkProperties: ChunkSummaryProperties;\n\n\t\t\t// Try and get the properties of the chunk from the latest successful summary.\n\t\t\t// If it exists and the summary is not a full tree, use the properties to generate a summary handle.\n\t\t\t// If it does not exist, encode the chunk and generate new properties for it.\n\t\t\tconst previousChunkProperties = tryGetFromNestedMap(\n\t\t\t\tthis.chunkTrackingPropertiesMap,\n\t\t\t\tthis.latestSummarySequenceNumber,\n\t\t\t\tchunk,\n\t\t\t);\n\t\t\tif (previousChunkProperties !== undefined && !this.trackedSummaryProperties.fullTree) {\n\t\t\t\tchunkProperties = previousChunkProperties;\n\t\t\t\tthis.trackedSummaryProperties.parentSummaryBuilder.addHandle(\n\t\t\t\t\t`${chunkProperties.referenceId}`,\n\t\t\t\t\tSummaryType.Tree,\n\t\t\t\t\t`${this.trackedSummaryProperties.latestSummaryBasePath}/${chunkProperties.summaryPath}`,\n\t\t\t\t);\n\t\t\t} else {\n\t\t\t\t// Generate a new reference ID for the chunk.\n\t\t\t\tconst newReferenceId: ChunkReferenceId = brand(this.nextReferenceId++);\n\n\t\t\t\t// Add the reference ID of this chunk to the chunk summary path and use the path as the summary path\n\t\t\t\t// for the chunk in its summary properties.\n\t\t\t\t// This is done before encoding the chunk so that the summary path is updated correctly when encoding\n\t\t\t\t// any incremental chunks that are under this chunk.\n\t\t\t\tthis.trackedSummaryProperties.chunkSummaryPath.push(newReferenceId);\n\n\t\t\t\tchunkProperties = {\n\t\t\t\t\treferenceId: newReferenceId,\n\t\t\t\t\tsummaryPath: this.trackedSummaryProperties.chunkSummaryPath.join(\"/\"),\n\t\t\t\t};\n\n\t\t\t\tconst parentSummaryBuilder = this.trackedSummaryProperties.parentSummaryBuilder;\n\t\t\t\t// Create a new summary builder for this chunk to build its summary tree which will be stored in the\n\t\t\t\t// parent's summary tree under its reference ID.\n\t\t\t\t// Before encoding the chunk, set the parent summary builder to this chunk's summary builder so that\n\t\t\t\t// any incremental chunks in the subtree of this chunk will use that as their parent summary builder.\n\t\t\t\tconst chunkSummaryBuilder = new SummaryTreeBuilder();\n\t\t\t\tthis.trackedSummaryProperties.parentSummaryBuilder = chunkSummaryBuilder;\n\t\t\t\tchunkSummaryBuilder.addBlob(\n\t\t\t\t\tchunkContentsBlobKey,\n\t\t\t\t\tthis.trackedSummaryProperties.stringify(chunkEncoder(chunk)),\n\t\t\t\t);\n\n\t\t\t\t// Add this chunk's summary tree to the parent's summary tree. The summary tree contains its encoded\n\t\t\t\t// contents and the summary trees of any incremental chunks under it.\n\t\t\t\tparentSummaryBuilder.addWithStats(\n\t\t\t\t\t`${newReferenceId}`,\n\t\t\t\t\tchunkSummaryBuilder.getSummaryTree(),\n\t\t\t\t);\n\n\t\t\t\t// Restore the parent summary builder and chunk summary path.\n\t\t\t\tthis.trackedSummaryProperties.parentSummaryBuilder = parentSummaryBuilder;\n\t\t\t\tthis.trackedSummaryProperties.chunkSummaryPath.pop();\n\t\t\t}\n\n\t\t\tsetInNestedMap(\n\t\t\t\tthis.chunkTrackingPropertiesMap,\n\t\t\t\tthis.trackedSummaryProperties.summarySequenceNumber,\n\t\t\t\tchunk,\n\t\t\t\tchunkProperties,\n\t\t\t);\n\t\t\tchunkReferenceIds.push(chunkProperties.referenceId);\n\t\t}\n\t\treturn chunkReferenceIds;\n\t}\n\n\t/**\n\t * Must be called after summary generation is complete to finish tracking the summary.\n\t * It clears any tracking state and deletes the tracking properties for summaries that are older than the\n\t * latest successful summary.\n\t * @param incrementalSummaryContext - The context for the incremental summary that contains the sequence numbers.\n\t * If this is undefined, the summary tree will only contain a summary blob for `forestSummaryContent`.\n\t * @param forestSummaryContent - The stringified ForestCodec output of top-level Forest content.\n\t * @returns the Forest's summary tree.\n\t */\n\tpublic completeSummary(args: {\n\t\tincrementalSummaryContext: IExperimentalIncrementalSummaryContext | undefined;\n\t\tforestSummaryContent: string;\n\t}): ISummaryTreeWithStats {\n\t\tconst { incrementalSummaryContext, forestSummaryContent } = args;\n\t\tif (!this.enableIncrementalSummary || incrementalSummaryContext === undefined) {\n\t\t\tconst summaryBuilder = new SummaryTreeBuilder();\n\t\t\tsummaryBuilder.addBlob(forestSummaryContentKey, forestSummaryContent);\n\t\t\treturn summaryBuilder.getSummaryTree();\n\t\t}\n\n\t\tvalidateTrackingSummary(this.forestSummaryState, this.trackedSummaryProperties);\n\n\t\tthis.trackedSummaryProperties.parentSummaryBuilder.addBlob(\n\t\t\tforestSummaryContentKey,\n\t\t\tforestSummaryContent,\n\t\t);\n\n\t\t// Copy over the entries from the latest summary to the current summary.\n\t\t// In the current summary, there can be fields that haven't changed since the latest summary and the chunks\n\t\t// in these fields and in any of its children weren't encoded. So, we need get the entries for these chunks\n\t\t// to be able to incrementally summarize them in the next summary.\n\t\tconst latestSummaryTrackingMap = this.chunkTrackingPropertiesMap.get(\n\t\t\tthis.latestSummarySequenceNumber,\n\t\t);\n\t\tconst currentSummaryTrackingMap = this.chunkTrackingPropertiesMap.get(\n\t\t\tthis.trackedSummaryProperties.summarySequenceNumber,\n\t\t);\n\t\tif (latestSummaryTrackingMap !== undefined && currentSummaryTrackingMap !== undefined) {\n\t\t\tfor (const [chunk, chunkProperties] of latestSummaryTrackingMap.entries()) {\n\t\t\t\tif (!currentSummaryTrackingMap.has(chunk)) {\n\t\t\t\t\tcurrentSummaryTrackingMap.set(chunk, chunkProperties);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t// Delete tracking for summaries that are older than the latest successful summary because they will\n\t\t// never be referenced again for generating summary handles.\n\t\tfor (const sequenceNumber of this.chunkTrackingPropertiesMap.keys()) {\n\t\t\tif (sequenceNumber < this.latestSummarySequenceNumber) {\n\t\t\t\tthis.chunkTrackingPropertiesMap.delete(sequenceNumber);\n\t\t\t}\n\t\t}\n\n\t\tthis.forestSummaryState = ForestSummaryTrackingState.ReadyToTrack;\n\t\tconst summaryTree = this.trackedSummaryProperties.parentSummaryBuilder.getSummaryTree();\n\t\tthis.trackedSummaryProperties = undefined;\n\t\treturn summaryTree;\n\t}\n\n\t/**\n\t * {@link IncrementalEncoder.decodeIncrementalChunk}\n\t */\n\tpublic decodeIncrementalChunk(\n\t\treferenceId: ChunkReferenceId,\n\t\tchunkDecoder: (encoded: EncodedFieldBatch) => TreeChunk,\n\t): TreeChunk {\n\t\tconst ChunkLoadProperties = this.loadedChunksMap.get(`${referenceId}`);\n\t\tassert(ChunkLoadProperties !== undefined, 0xc86 /* Encoded incremental chunk not found */);\n\t\tconst chunk = chunkDecoder(ChunkLoadProperties.encodedContents);\n\n\t\t// Account for the reference about to be added in `chunkTrackingPropertiesMap`\n\t\t// to ensure that no other users of this chunk think they have unique ownership.\n\t\t// This prevents prevent whoever this chunk is returned to from modifying it in-place.\n\t\tchunk.referenceAdded();\n\t\t// Track the decoded chunk. This will recreate the tracking state when the summary that this client\n\t\t// is loaded from was generated. This is needed to ensure that incremental summaries work correctly\n\t\t// when a new client starts to summarize.\n\t\tsetInNestedMap(this.chunkTrackingPropertiesMap, this.initialSequenceNumber, chunk, {\n\t\t\treferenceId,\n\t\t\tsummaryPath: ChunkLoadProperties.summaryPath,\n\t\t});\n\t\treturn chunk;\n\t}\n}\n"]}
|
package/dist/packageVersion.d.ts
CHANGED
|
@@ -5,5 +5,5 @@
|
|
|
5
5
|
* THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY
|
|
6
6
|
*/
|
|
7
7
|
export declare const pkgName = "@fluidframework/tree";
|
|
8
|
-
export declare const pkgVersion = "2.70.0
|
|
8
|
+
export declare const pkgVersion = "2.70.0";
|
|
9
9
|
//# sourceMappingURL=packageVersion.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"packageVersion.d.ts","sourceRoot":"","sources":["../src/packageVersion.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,eAAO,MAAM,OAAO,yBAAyB,CAAC;AAC9C,eAAO,MAAM,UAAU,
|
|
1
|
+
{"version":3,"file":"packageVersion.d.ts","sourceRoot":"","sources":["../src/packageVersion.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,eAAO,MAAM,OAAO,yBAAyB,CAAC;AAC9C,eAAO,MAAM,UAAU,WAAW,CAAC"}
|
package/dist/packageVersion.js
CHANGED
|
@@ -8,5 +8,5 @@
|
|
|
8
8
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
9
9
|
exports.pkgVersion = exports.pkgName = void 0;
|
|
10
10
|
exports.pkgName = "@fluidframework/tree";
|
|
11
|
-
exports.pkgVersion = "2.70.0
|
|
11
|
+
exports.pkgVersion = "2.70.0";
|
|
12
12
|
//# sourceMappingURL=packageVersion.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"packageVersion.js","sourceRoot":"","sources":["../src/packageVersion.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;AAEU,QAAA,OAAO,GAAG,sBAAsB,CAAC;AACjC,QAAA,UAAU,GAAG,
|
|
1
|
+
{"version":3,"file":"packageVersion.js","sourceRoot":"","sources":["../src/packageVersion.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;AAEU,QAAA,OAAO,GAAG,sBAAsB,CAAC;AACjC,QAAA,UAAU,GAAG,QAAQ,CAAC","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n *\n * THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY\n */\n\nexport const pkgName = \"@fluidframework/tree\";\nexport const pkgVersion = \"2.70.0\";\n"]}
|
|
@@ -82,7 +82,7 @@ function getShouldIncrementallySummarizeAllowedTypes(rootSchema) {
|
|
|
82
82
|
return false;
|
|
83
83
|
}
|
|
84
84
|
const allowedTypes = (0, index_js_3.oneFromIterable)((0, index_js_1.getTreeNodeSchemaPrivateData)(targetNode).childAllowedTypes);
|
|
85
|
-
(0, internal_1.assert)(allowedTypes !== undefined,
|
|
85
|
+
(0, internal_1.assert)(allowedTypes !== undefined, 0xc87 /* Non object nodes with fields should only have one allowedTypes entry */);
|
|
86
86
|
return isIncrementalSummaryHintInAllowedTypes(allowedTypes);
|
|
87
87
|
};
|
|
88
88
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"incrementalAllowedTypes.js","sourceRoot":"","sources":["../../../src/simple-tree/api/incrementalAllowedTypes.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAGH,+CAAuF;AACvF,qDAA4D;AAG5D,kDAAsD;AACtD,kEAA6D;AAE7D;;;;;;;;;;;;;;;;;;GAkBG;AACU,QAAA,sBAAsB,GAAkB,MAAM,CAAC,wBAAwB,CAAC,CAAC;AAEtF;;GAEG;AACH,SAAS,sCAAsC,CAAC,YAA8B;IAC7E,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC,MAAM,CAAC;IACpD,OAAO,CACN,cAAc,KAAK,SAAS;QAC3B,cAA0C,CAAC,8BAAsB,CAAC,KAAK,IAAI,CAC5E,CAAC;AACH,CAAC;AAED;;;;;;;;;;;;;;;;;;GAkBG;AACH,SAAgB,2CAA2C,CAC1D,UAAsB;IAEtB,OAAO,CACN,oBAA0D,EAC1D,cAAwB,EACvB,EAAE;QACH,IAAI,oBAAoB,KAAK,SAAS,EAAE,CAAC;YACxC,iFAAiF;YACjF,OAAO,KAAK,CAAC;QACd,CAAC;QAED,MAAM,UAAU,GAAG,UAAU,CAAC,WAAW,CAAC,GAAG,CAAC,oBAAoB,CAAC,CAAC;QACpE,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;YAC9B,gDAAgD;YAChD,mIAAmI;YACnI,sCAAsC;YACtC,kEAAkE;YAClE,oIAAoI;YACpI,OAAO,KAAK,CAAC;QACd,CAAC;QAED,IAAI,IAAA,6BAAkB,EAAC,UAAU,CAAC,EAAE,CAAC;YACpC,MAAM,iBAAiB,GAAG,UAAU,CAAC,sBAAsB,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;YAChF,IAAI,iBAAiB,KAAK,SAAS,EAAE,CAAC;gBACrC,MAAM,WAAW,GAAG,UAAU,CAAC,MAAM,CAAC,GAAG,CAAC,iBAAiB,CAAC,CAAC;gBAC7D,IAAI,WAAW,KAAK,SAAS,EAAE,CAAC;oBAC/B,OAAO,sCAAsC,CAAC,WAAW,CAAC,gBAAgB,CAAC,CAAC;gBAC7E,CAAC;YACF,CAAC;YACD,OAAO,KAAK,CAAC;QACd,CAAC;QAED,MAAM,YAAY,GAAG,IAAA,0BAAe,EACnC,IAAA,uCAA4B,EAAC,UAAU,CAAC,CAAC,iBAAiB,CAC1D,CAAC;QACF,IAAA,iBAAM,EACL,YAAY,KAAK,SAAS,EAC1B,
|
|
1
|
+
{"version":3,"file":"incrementalAllowedTypes.js","sourceRoot":"","sources":["../../../src/simple-tree/api/incrementalAllowedTypes.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;AAGH,+CAAuF;AACvF,qDAA4D;AAG5D,kDAAsD;AACtD,kEAA6D;AAE7D;;;;;;;;;;;;;;;;;;GAkBG;AACU,QAAA,sBAAsB,GAAkB,MAAM,CAAC,wBAAwB,CAAC,CAAC;AAEtF;;GAEG;AACH,SAAS,sCAAsC,CAAC,YAA8B;IAC7E,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC,MAAM,CAAC;IACpD,OAAO,CACN,cAAc,KAAK,SAAS;QAC3B,cAA0C,CAAC,8BAAsB,CAAC,KAAK,IAAI,CAC5E,CAAC;AACH,CAAC;AAED;;;;;;;;;;;;;;;;;;GAkBG;AACH,SAAgB,2CAA2C,CAC1D,UAAsB;IAEtB,OAAO,CACN,oBAA0D,EAC1D,cAAwB,EACvB,EAAE;QACH,IAAI,oBAAoB,KAAK,SAAS,EAAE,CAAC;YACxC,iFAAiF;YACjF,OAAO,KAAK,CAAC;QACd,CAAC;QAED,MAAM,UAAU,GAAG,UAAU,CAAC,WAAW,CAAC,GAAG,CAAC,oBAAoB,CAAC,CAAC;QACpE,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;YAC9B,gDAAgD;YAChD,mIAAmI;YACnI,sCAAsC;YACtC,kEAAkE;YAClE,oIAAoI;YACpI,OAAO,KAAK,CAAC;QACd,CAAC;QAED,IAAI,IAAA,6BAAkB,EAAC,UAAU,CAAC,EAAE,CAAC;YACpC,MAAM,iBAAiB,GAAG,UAAU,CAAC,sBAAsB,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;YAChF,IAAI,iBAAiB,KAAK,SAAS,EAAE,CAAC;gBACrC,MAAM,WAAW,GAAG,UAAU,CAAC,MAAM,CAAC,GAAG,CAAC,iBAAiB,CAAC,CAAC;gBAC7D,IAAI,WAAW,KAAK,SAAS,EAAE,CAAC;oBAC/B,OAAO,sCAAsC,CAAC,WAAW,CAAC,gBAAgB,CAAC,CAAC;gBAC7E,CAAC;YACF,CAAC;YACD,OAAO,KAAK,CAAC;QACd,CAAC;QAED,MAAM,YAAY,GAAG,IAAA,0BAAe,EACnC,IAAA,uCAA4B,EAAC,UAAU,CAAC,CAAC,iBAAiB,CAC1D,CAAC;QACF,IAAA,iBAAM,EACL,YAAY,KAAK,SAAS,EAC1B,KAAK,CAAC,0EAA0E,CAChF,CAAC;QACF,OAAO,sCAAsC,CAAC,YAAY,CAAC,CAAC;IAC7D,CAAC,CAAC;AACH,CAAC;AA1CD,kGA0CC","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n */\n\nimport type { FieldKey, TreeNodeSchemaIdentifier } from \"../../core/index.js\";\nimport { getTreeNodeSchemaPrivateData, type AllowedTypesFull } from \"../core/index.js\";\nimport { isObjectNodeSchema } from \"../node-kinds/index.js\";\nimport type { TreeSchema } from \"./configuration.js\";\nimport type { IncrementalEncodingPolicy } from \"../../feature-libraries/index.js\";\nimport { oneFromIterable } from \"../../util/index.js\";\nimport { assert } from \"@fluidframework/core-utils/internal\";\n\n/**\n * A symbol when present in the {@link AnnotatedAllowedTypes.metadata.custom} property as true, opts in the allowed\n * types to incremental summary optimization.\n * These allowed types will be optimized during summary such that if they don't change across summaries,\n * they will not be encoded and their content will not be included in the summary that is uploaded to the service.\n * @remarks\n * See {@link getShouldIncrementallySummarizeAllowedTypes} for more details.\n *\n * Use {@link SchemaStaticsAlpha.types} to add this metadata to allowed types in a schema.\n * @example\n * ```typescript\n * const sf = new SchemaFactoryAlpha(\"IncrementalSummarization\");\n * class Foo extends sf.objectAlpha(\"foo\", {\n * bar: sf.types([{ type: sf.string, metadata: {} }], {\n * custom: { [incrementalSummaryHint]: true },\n * }),\n * }) {}\n * ```\n */\nexport const incrementalSummaryHint: unique symbol = Symbol(\"IncrementalSummaryHint\");\n\n/**\n * Returns true if the provided allowed types's custom metadata has {@link incrementalSummaryHint} as true.\n */\nfunction isIncrementalSummaryHintInAllowedTypes(allowedTypes: AllowedTypesFull): boolean {\n\tconst customMetadata = allowedTypes.metadata.custom;\n\treturn (\n\t\tcustomMetadata !== undefined &&\n\t\t(customMetadata as Record<symbol, unknown>)[incrementalSummaryHint] === true\n\t);\n}\n\n/**\n * This helper function {@link getShouldIncrementallySummarizeAllowedTypes} can be used to generate a callback function\n * of type {@link IncrementalEncodingPolicy}.\n * This callback can be passed as the value for {@link SharedTreeOptionsInternal.shouldEncodeFieldIncrementally} parameter\n * when creating the tree.\n * It will be called for each {@link AllowedTypes} in the schema to determine if it should be incrementally summarized.\n *\n * @param rootSchema - The schema for the root of the tree.\n * @returns A callback function of type {@link IncrementalEncodingPolicy} which can be used to determine if a field\n * should be incrementally summarized based on whether it is an allowed types with the\n * {@link incrementalAllowedTypesMetadata} metadata.\n *\n * @remarks\n * This only works for forest type {@link ForestTypeOptimized} and compression strategy\n * {@link TreeCompressionStrategyExtended.CompressedIncremental}.\n *\n * The {@link incrementalAllowedTypesMetadata} will be replaced with a specialized metadata property once the\n * incremental summary feature and APIs are stabilized.\n */\nexport function getShouldIncrementallySummarizeAllowedTypes(\n\trootSchema: TreeSchema,\n): IncrementalEncodingPolicy {\n\treturn (\n\t\ttargetNodeIdentifier: TreeNodeSchemaIdentifier | undefined,\n\t\ttargetFieldKey: FieldKey,\n\t) => {\n\t\tif (targetNodeIdentifier === undefined) {\n\t\t\t// Root fields cannot be allowed types, so we don't incrementally summarize them.\n\t\t\treturn false;\n\t\t}\n\n\t\tconst targetNode = rootSchema.definitions.get(targetNodeIdentifier);\n\t\tif (targetNode === undefined) {\n\t\t\t// The requested type is unknown to this schema.\n\t\t\t// In this case we have no hints available from the view schema, and fall back to the default behavior of non-incremental encoding.\n\t\t\t// There are two ways this can happen:\n\t\t\t// 1. The view schema being used does not match the stored schema.\n\t\t\t// 2. The view schema is compatible, but there are unknown optional fields which contain new types not described by the view schema.\n\t\t\treturn false;\n\t\t}\n\n\t\tif (isObjectNodeSchema(targetNode)) {\n\t\t\tconst targetPropertyKey = targetNode.storedKeyToPropertyKey.get(targetFieldKey);\n\t\t\tif (targetPropertyKey !== undefined) {\n\t\t\t\tconst fieldSchema = targetNode.fields.get(targetPropertyKey);\n\t\t\t\tif (fieldSchema !== undefined) {\n\t\t\t\t\treturn isIncrementalSummaryHintInAllowedTypes(fieldSchema.allowedTypesFull);\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn false;\n\t\t}\n\n\t\tconst allowedTypes = oneFromIterable(\n\t\t\tgetTreeNodeSchemaPrivateData(targetNode).childAllowedTypes,\n\t\t);\n\t\tassert(\n\t\t\tallowedTypes !== undefined,\n\t\t\t0xc87 /* Non object nodes with fields should only have one allowedTypes entry */,\n\t\t);\n\t\treturn isIncrementalSummaryHintInAllowedTypes(allowedTypes);\n\t};\n}\n"]}
|
|
@@ -238,7 +238,7 @@ export class IncrementalChunkShape extends ShapeGeneric {
|
|
|
238
238
|
*/
|
|
239
239
|
export const incrementalFieldEncoder = {
|
|
240
240
|
encodeField(cursor, context, outputBuffer) {
|
|
241
|
-
assert(context.incrementalEncoder !== undefined,
|
|
241
|
+
assert(context.incrementalEncoder !== undefined, 0xc88 /* incremental encoder must be defined to use incrementalFieldEncoder */);
|
|
242
242
|
const chunkReferenceIds = context.incrementalEncoder.encodeIncrementalField(cursor, (chunk) => compressedEncode([chunk.cursor()], context));
|
|
243
243
|
outputBuffer.push(chunkReferenceIds);
|
|
244
244
|
},
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"compressedEncode.js","sourceRoot":"","sources":["../../../../src/feature-libraries/chunked-forest/codec/compressedEncode.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAE,MAAM,EAAE,eAAe,EAAE,IAAI,EAAE,MAAM,qCAAqC,CAAC;AAGpF,OAAO,EASN,WAAW,GACX,MAAM,wBAAwB,CAAC;AAChC,OAAO,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAIrD,OAAO,EAEN,KAAK,IAAI,YAAY,EACrB,kCAAkC,GAClC,MAAM,2BAA2B,CAAC;AAEnC,OAAO,EAMN,YAAY,EACZ,OAAO,GACP,MAAM,aAAa,CAAC;AAGrB;;;;;;GAMG;AACH,MAAM,UAAU,gBAAgB,CAC/B,UAAsB,EACtB,OAAuB;IAEvB,MAAM,WAAW,GAAmB,EAAE,CAAC;IAEvC,6DAA6D;IAC7D,KAAK,MAAM,MAAM,IAAI,UAAU,EAAE,CAAC;QACjC,MAAM,MAAM,GAAiB,EAAE,CAAC;QAChC,eAAe,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;QACrD,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IAC1B,CAAC;IACD,OAAO,kCAAkC,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;AACjE,CAAC;AAoED;;;GAGG;AACH,MAAM,UAAU,cAAc,CAAC,OAAoB;IAClD,OAAO;QACN,WAAW,CACV,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;YAE1B,WAAW,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,OAAO,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC,CAAC;QAC9E,CAAC;QACD,KAAK,EAAE,OAAO,CAAC,KAAK;KACpB,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,cAAc,CAAC,OAAoB;IAClD,OAAO;QACN,WAAW,CACV,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;YAE1B,OAAO,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;YAClD,MAAM,CAAC,QAAQ,EAAE,CAAC;QACnB,CAAC;QACD,KAAK,EAAE,OAAO,CAAC,KAAK;KACpB,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,QAAS,SAAQ,YAA+B;IAC5D;QACC,KAAK,EAAE,CAAC;IACT,CAAC;IAGM,WAAW,CACjB,WAAuC,EACvC,MAAiC;QAEjC,MAAM,eAAe,GAAoB,CAAC,CAAC;QAC3C,OAAO,EAAE,CAAC,EAAE,eAAe,EAAE,CAAC;IAC/B,CAAC;IAEM,mCAAmC,CACzC,WAA4B,EAC5B,eAAuC,IAC/B,CAAC;IAEH,MAAM,CAAC,WAAW,CACxB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B,EAC1B,OAAqB;QAErB,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QACjC,OAAO,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;IACpD,CAAC;IAEM,MAAM,CAAC,UAAU,CACvB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B,EAC1B,OAAoB;QAEpB,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QACjC,OAAO,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;IACnD,CAAC;IAEM,MAAM,CAAC,WAAW,CACxB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B,EAC1B,OAAqB;QAErB,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QACjC,OAAO,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;IACpD,CAAC;;AA3CsB,iBAAQ,GAAG,IAAI,QAAQ,EAAE,CAAC;AA8ClD;;GAEG;AACH,MAAM,CAAC,MAAM,cAAc,GAAgB;IAC1C,UAAU,CACT,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,yCAAyC;QACzC,MAAM,WAAW,GAAG,OAAO,CAAC,qBAAqB,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;QAC/D,QAAQ,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,EAAE,WAAW,CAAC,CAAC;IACjE,CAAC;IAED,KAAK,EAAE,QAAQ,CAAC,QAAQ;CACxB,CAAC;AAEF;;GAEG;AACH,MAAM,CAAC,MAAM,eAAe,GAAiB;IAC5C,WAAW,CACV,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,kCAAkC;QAElC,IAAI,MAAM,CAAC,cAAc,EAAE,KAAK,CAAC,EAAE,CAAC;YACnC,MAAM,KAAK,GAAG,kBAAkB,CAAC,KAAK,CAAC;YACvC,QAAQ,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,EAAE,KAAK,CAAC,CAAC;QAC5D,CAAC;aAAM,IAAI,MAAM,CAAC,cAAc,EAAE,KAAK,CAAC,EAAE,CAAC;YAC1C,oEAAoE;YACpE,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;YACpB,cAAc,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;YACzD,MAAM,CAAC,QAAQ,EAAE,CAAC;QACnB,CAAC;aAAM,CAAC;YACP,kDAAkD;YAClD,4FAA4F;YAE5F,MAAM,KAAK,GAAG,OAAO,CAAC,kBAAkB,CAAC,cAAc,CAAC,CAAC;YACzD,QAAQ,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,EAAE,KAAK,CAAC,CAAC;QAC5D,CAAC;IACF,CAAC;IAED,KAAK,EAAE,QAAQ,CAAC,QAAQ;CACxB,CAAC;AAEF;;;;;GAKG;AACH,MAAM,OAAO,kBACZ,SAAQ,YAA+B;IAiBvC;;OAEG;IACH,YACiB,MAAc,EACd,KAAmB;QAEnC,KAAK,EAAE,CAAC;QAHQ,WAAM,GAAN,MAAM,CAAQ;QACd,UAAK,GAAL,KAAK,CAAc;IAGpC,CAAC;IAEM,WAAW,CACjB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,2DAA2D;QAC3D,4DAA4D;QAC5D,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,IAAI,CAAC,MAAM,EAAE,KAAK,EAAE,EAAE,CAAC;YAClD,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;QACvD,CAAC;IACF,CAAC;IAEM,WAAW,CACjB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,iGAAiG;QACjG,MAAM,CACL,MAAM,CAAC,cAAc,EAAE,IAAI,IAAI,CAAC,MAAM,EACtC,KAAK,CAAC,8CAA8C,CACpD,CAAC;QACF,MAAM,CAAC,SAAS,EAAE,CAAC;QACnB,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;QAChD,MAAM,CACL,MAAM,CAAC,IAAI,sCAA8B,EACzC,KAAK,CAAC,yDAAyD,CAC/D,CAAC;IACH,CAAC;IAEM,WAAW,CACjB,WAAuC,EACvC,MAAiC;QAEjC,OAAO;YACN,CAAC,EAAE;gBACF,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,KAAK,EAAE,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,mBAAmB,CAAC;aACnF;SACD,CAAC;IACH,CAAC;IAEM,mCAAmC,CACzC,WAA4B,EAC5B,eAAuC;QAEvC,eAAe,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IACnC,CAAC;IAED,IAAW,KAAK;QACf,OAAO,IAAI,CAAC;IACb,CAAC;;AA3EsB,wBAAK,GAAuB,IAAI,kBAAkB,CAAC,CAAC,EAAE;IAC5E,IAAI,KAAK;QACR,0EAA0E;QAC1E,OAAO,kBAAkB,CAAC,KAAK,CAAC;IACjC,CAAC;IACD,WAAW,CACV,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,IAAI,CAAC,KAAK,CAAC,6CAA6C,CAAC,CAAC;IAC3D,CAAC;CACD,CAAC,CAAC;AAkEJ;;GAEG;AACH,MAAM,OAAO,gBAAiB,SAAQ,YAA+B;IACpE;;OAEG;IACH,YAAmC,UAAiB;QACnD,KAAK,EAAE,CAAC;QAD0B,eAAU,GAAV,UAAU,CAAO;IAEpD,CAAC;IAEM,WAAW,CACjB,WAAuC,EACvC,MAAiC;QAEjC,MAAM,KAAK,GACV,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC;YACxC,IAAI,CAAC,KAAK,CAAC,wCAAwC,CAAC,CAAC;QACtD,OAAO;YACN,CAAC,EAAE,KAAK;SACR,CAAC;IACH,CAAC;IAEM,mCAAmC,CACzC,WAA4B,EAC5B,eAAuC;QAEvC,eAAe,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;IAClC,CAAC;CACD;AAED;;;;;GAKG;AACH,MAAM,OAAO,kBAAkB;IAC9B,YACiB,YAAyB,EACzB,QAA0B,IAAI,gBAAgB,CAAC,YAAY,CAAC,KAAK,CAAC;QADlE,iBAAY,GAAZ,YAAY,CAAa;QACzB,UAAK,GAAL,KAAK,CAA6D;IAChF,CAAC;IAEG,WAAW,CACjB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,MAAM,MAAM,GAAiB,EAAE,CAAC;QAChC,IAAI,cAAc,GAAG,IAAI,CAAC;QAC1B,MAAM,MAAM,GAAG,MAAM,CAAC,cAAc,EAAE,CAAC;QACvC,WAAW,CAAC,MAAM,EAAE,GAAG,EAAE;YACxB,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;YAC7B,IAAI,CAAC,YAAY,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;YACtD,cAAc,KAAK,MAAM,CAAC,MAAM,GAAG,MAAM,KAAK,CAAC,CAAC;QACjD,CAAC,CAAC,CAAC;QACH,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACzB,mFAAmF;YACnF,oFAAoF;YACpF,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC3B,CAAC;aAAM,CAAC;YACP,MAAM,CACL,cAAc,EACd,KAAK,CAAC,8IAA8I,CACpJ,CAAC;YACF,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC3B,CAAC;IACF,CAAC;CACD;AAED;;GAEG;AACH,MAAM,OAAO,qBAAsB,SAAQ,YAA+B;IAClE,WAAW,CACjB,WAAuC,EACvC,MAAiC;QAEjC,OAAO;YACN,CAAC,EAAE,CAAC,CAAC,kCAAkC;SACvC,CAAC;IACH,CAAC;IAEM,mCAAmC,CACzC,WAA4B,EAC5B,eAAuC,IAC/B,CAAC;IAEV,IAAW,KAAK;QACf,OAAO,IAAI,CAAC;IACb,CAAC;CACD;AAED;;;;;GAKG;AACH,MAAM,CAAC,MAAM,uBAAuB,GAAiB;IACpD,WAAW,CACV,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,MAAM,CACL,OAAO,CAAC,kBAAkB,KAAK,SAAS,EACxC,oEAAoE,CACpE,CAAC;QAEF,MAAM,iBAAiB,GAAG,OAAO,CAAC,kBAAkB,CAAC,sBAAsB,CAC1E,MAAM,EACN,CAAC,KAAgB,EAAE,EAAE,CAAC,gBAAgB,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CACjE,CAAC;QACF,YAAY,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;IACtC,CAAC;IAED,KAAK,EAAE,IAAI,gBAAgB,CAAC,IAAI,qBAAqB,EAAE,CAAC,gBAAgB,CAAC;CACzE,CAAC;AAEF;;;;GAIG;AACH,MAAM,UAAU,WAAW,CAC1B,KAAY,EACZ,KAAwB,EACxB,YAA0B;IAE1B,IAAI,KAAK,KAAK,SAAS,EAAE,CAAC;QACzB,IAAI,KAAK,KAAK,SAAS,EAAE,CAAC;YACzB,YAAY,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;QAChC,CAAC;aAAM,CAAC;YACP,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAC1B,CAAC;IACF,CAAC;SAAM,CAAC;QACP,IAAI,KAAK,KAAK,IAAI,EAAE,CAAC;YACpB,MAAM,CAAC,KAAK,KAAK,SAAS,EAAE,KAAK,CAAC,wCAAwC,CAAC,CAAC;YAC5E,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAC1B,CAAC;aAAM,IAAI,KAAK,KAAK,KAAK,EAAE,CAAC;YAC5B,MAAM,CAAC,KAAK,KAAK,SAAS,EAAE,KAAK,CAAC,iDAAiD,CAAC,CAAC;QACtF,CAAC;aAAM,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC;YACjC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,KAAK,CAAC,0CAA0C,CAAC,CAAC;QAC9E,CAAC;aAAM,IAAI,KAAK,KAAK,YAAY,CAAC,UAAU,EAAE,CAAC;YAC9C,0EAA0E;YAC1E,MAAM,CAAC,KAAK,KAAK,SAAS,EAAE,KAAK,CAAC,wCAAwC,CAAC,CAAC;YAC5E,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAC1B,CAAC;aAAM,CAAC;YACP,uBAAuB;YACvB,eAAe,CAAC,KAAK,EAAE,gDAAgD,CAAC,CAAC;QAC1E,CAAC;IACF,CAAC;AACF,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,OAAO,cAAc;IAI1B,YACkB,qBAAwC,EACxC,sBAA0C,EAC3C,WAA4D,EAC5D,YAA2B;IAC3C;;;;OAIG;IACa,kBAAkD;QATjD,0BAAqB,GAArB,qBAAqB,CAAmB;QACxC,2BAAsB,GAAtB,sBAAsB,CAAoB;QAC3C,gBAAW,GAAX,WAAW,CAAiD;QAC5D,iBAAY,GAAZ,YAAY,CAAe;QAM3B,uBAAkB,GAAlB,kBAAkB,CAAgC;QAblD,2BAAsB,GACtC,IAAI,GAAG,EAAE,CAAC;QACM,wBAAmB,GAAyC,IAAI,GAAG,EAAE,CAAC;IAYpF,CAAC;IAEG,qBAAqB,CAAC,UAAoC;QAChE,OAAO,WAAW,CAAC,IAAI,CAAC,sBAAsB,EAAE,UAAU,EAAE,GAAG,EAAE,CAChE,IAAI,CAAC,qBAAqB,CAAC,IAAI,EAAE,UAAU,CAAC,CAC5C,CAAC;IACH,CAAC;IAEM,sBAAsB,CAAC,WAAkC;QAC/D,OAAO,IAAI,gBAAgB,CAAC,IAAI,EAAE,WAAW,EAAE,IAAI,CAAC,sBAAsB,CAAC,CAAC;IAC7E,CAAC;IAEM,kBAAkB,CAAC,KAAkB;QAC3C,OAAO,WAAW,CAAC,IAAI,CAAC,mBAAmB,EAAE,KAAK,EAAE,GAAG,EAAE,CAAC,IAAI,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC;IAC1F,CAAC;CACD;AA0BD,MAAM,gBAAgB;IAGrB,YACiB,WAA8B,EAC9B,WAAkC,EACjC,sBAA0C;QAF3C,gBAAW,GAAX,WAAW,CAAmB;QAC9B,gBAAW,GAAX,WAAW,CAAuB;QACjC,2BAAsB,GAAtB,sBAAsB,CAAoB;IACzD,CAAC;IACG,WAAW,CACjB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;IACzD,CAAC;IAED,IAAY,OAAO;QAClB,IAAI,IAAI,CAAC,WAAW,KAAK,SAAS,EAAE,CAAC;YACpC,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,sBAAsB,CAAC,IAAI,CAAC,WAAW,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;QACpF,CAAC;QACD,OAAO,IAAI,CAAC,WAAW,CAAC;IACzB,CAAC;IAED,IAAW,KAAK;QACf,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC;IAC3B,CAAC;CACD","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n */\n\nimport { assert, unreachableCase, fail } from \"@fluidframework/core-utils/internal\";\nimport type { IIdCompressor } from \"@fluidframework/id-compressor\";\n\nimport {\n\tCursorLocationType,\n\ttype FieldKey,\n\ttype FieldKindIdentifier,\n\ttype ITreeCursorSynchronous,\n\ttype TreeChunk,\n\ttype TreeFieldStoredSchema,\n\ttype TreeNodeSchemaIdentifier,\n\ttype Value,\n\tforEachNode,\n} from \"../../../core/index.js\";\nimport { getOrCreate } from \"../../../util/index.js\";\nimport type { FlexFieldKind } from \"../../modular-schema/index.js\";\n\nimport type { Counter, DeduplicationTable } from \"./chunkCodecUtilities.js\";\nimport {\n\ttype BufferFormat as BufferFormatGeneric,\n\tShape as ShapeGeneric,\n\tupdateShapesAndIdentifiersEncoding,\n} from \"./chunkEncodingGeneric.js\";\nimport type { FieldBatch } from \"./fieldBatch.js\";\nimport {\n\ttype EncodedAnyShape,\n\ttype EncodedChunkShape,\n\ttype EncodedFieldBatch,\n\ttype EncodedNestedArrayShape,\n\ttype EncodedValueShape,\n\tSpecialField,\n\tversion,\n} from \"./format.js\";\nimport type { IncrementalEncoder } from \"./codecs.js\";\n\n/**\n * Encode data from `FieldBatch` into an `EncodedFieldBatch`.\n *\n * Optimized for encoded size and encoding performance.\n *\n * Most of the compression strategy comes from the policy provided via `context`.\n */\nexport function compressedEncode(\n\tfieldBatch: FieldBatch,\n\tcontext: EncoderContext,\n): EncodedFieldBatch {\n\tconst batchBuffer: BufferFormat[] = [];\n\n\t// Populate buffer, including shape and identifier references\n\tfor (const cursor of fieldBatch) {\n\t\tconst buffer: BufferFormat = [];\n\t\tanyFieldEncoder.encodeField(cursor, context, buffer);\n\t\tbatchBuffer.push(buffer);\n\t}\n\treturn updateShapesAndIdentifiersEncoding(version, batchBuffer);\n}\n\nexport type BufferFormat = BufferFormatGeneric<EncodedChunkShape>;\nexport type Shape = ShapeGeneric<EncodedChunkShape>;\n\n/**\n * Like {@link FieldEncoder}, except data will be prefixed with the key.\n */\nexport interface KeyedFieldEncoder {\n\treadonly key: FieldKey;\n\treadonly encoder: FieldEncoder;\n}\n\n/**\n * An encoder with an associated shape.\n */\nexport interface Encoder {\n\t/**\n\t * The shape which describes how the encoded data is laid out.\n\t * Used by decoders to interpret the output of `encodeNode`.\n\t */\n\treadonly shape: Shape;\n}\n\n/**\n * An encoder for a specific shape of node.\n *\n * Can only be used with compatible nodes.\n */\nexport interface NodeEncoder extends Encoder {\n\t/**\n\t * @param cursor - in Nodes mode. Does not move cursor.\n\t */\n\tencodeNode(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void;\n}\n\n/**\n * Like {@link NodeEncoder}, except encodes a run of nodes.\n */\nexport interface NodesEncoder extends Encoder {\n\t/**\n\t * @param cursor - in Nodes mode. Moves cursor however many nodes it encodes.\n\t */\n\tencodeNodes(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void;\n}\n\n/**\n * Like {@link NodeEncoder}, except encodes a field.\n */\nexport interface FieldEncoder extends Encoder {\n\t/**\n\t * @param cursor - in Fields mode. Encodes entire field.\n\t */\n\tencodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void;\n}\n\n/**\n * Makes a {@link FieldEncoder} which runs `encoder` on every node in the field.\n * This does not encode the number nodes: the user of this may need to encode that elsewhere.\n */\nexport function asFieldEncoder(encoder: NodeEncoder): FieldEncoder {\n\treturn {\n\t\tencodeField(\n\t\t\tcursor: ITreeCursorSynchronous,\n\t\t\tcontext: EncoderContext,\n\t\t\toutputBuffer: BufferFormat,\n\t\t): void {\n\t\t\tforEachNode(cursor, () => encoder.encodeNode(cursor, context, outputBuffer));\n\t\t},\n\t\tshape: encoder.shape,\n\t};\n}\n\n/**\n * Adapt a {@link NodeEncoder} to a {@link NodesEncoder} which invokes `encoder` once.\n */\nexport function asNodesEncoder(encoder: NodeEncoder): NodesEncoder {\n\treturn {\n\t\tencodeNodes(\n\t\t\tcursor: ITreeCursorSynchronous,\n\t\t\tcontext: EncoderContext,\n\t\t\toutputBuffer: BufferFormat,\n\t\t): void {\n\t\t\tencoder.encodeNode(cursor, context, outputBuffer);\n\t\t\tcursor.nextNode();\n\t\t},\n\t\tshape: encoder.shape,\n\t};\n}\n\n/**\n * Encodes a chunk with {@link EncodedAnyShape} by prefixing the data with its shape.\n */\nexport class AnyShape extends ShapeGeneric<EncodedChunkShape> {\n\tprivate constructor() {\n\t\tsuper();\n\t}\n\tpublic static readonly instance = new AnyShape();\n\n\tpublic encodeShape(\n\t\tidentifiers: DeduplicationTable<string>,\n\t\tshapes: DeduplicationTable<Shape>,\n\t): EncodedChunkShape {\n\t\tconst encodedAnyShape: EncodedAnyShape = 0;\n\t\treturn { d: encodedAnyShape };\n\t}\n\n\tpublic countReferencedShapesAndIdentifiers(\n\t\tidentifiers: Counter<string>,\n\t\tshapeDiscovered: (shape: Shape) => void,\n\t): void {}\n\n\tpublic static encodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t\tencoder: FieldEncoder,\n\t): void {\n\t\toutputBuffer.push(encoder.shape);\n\t\tencoder.encodeField(cursor, context, outputBuffer);\n\t}\n\n\tpublic static encodeNode(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t\tencoder: NodeEncoder,\n\t): void {\n\t\toutputBuffer.push(encoder.shape);\n\t\tencoder.encodeNode(cursor, context, outputBuffer);\n\t}\n\n\tpublic static encodeNodes(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t\tencoder: NodesEncoder,\n\t): void {\n\t\toutputBuffer.push(encoder.shape);\n\t\tencoder.encodeNodes(cursor, context, outputBuffer);\n\t}\n}\n\n/**\n * Encodes a single node polymorphically.\n */\nexport const anyNodeEncoder: NodeEncoder = {\n\tencodeNode(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\t// TODO: Fast path uniform chunk content.\n\t\tconst nodeEncoder = context.nodeEncoderFromSchema(cursor.type);\n\t\tAnyShape.encodeNode(cursor, context, outputBuffer, nodeEncoder);\n\t},\n\n\tshape: AnyShape.instance,\n};\n\n/**\n * Encodes a field polymorphically.\n */\nexport const anyFieldEncoder: FieldEncoder = {\n\tencodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\t// TODO: Fast path uniform chunks.\n\n\t\tif (cursor.getFieldLength() === 0) {\n\t\t\tconst shape = InlineArrayEncoder.empty;\n\t\t\tAnyShape.encodeField(cursor, context, outputBuffer, shape);\n\t\t} else if (cursor.getFieldLength() === 1) {\n\t\t\t// Fast path chunk of size one size one at least: skip nested array.\n\t\t\tcursor.enterNode(0);\n\t\t\tanyNodeEncoder.encodeNode(cursor, context, outputBuffer);\n\t\t\tcursor.exitNode();\n\t\t} else {\n\t\t\t// TODO: more efficient encoding for common cases.\n\t\t\t// Could try to find more specific shape compatible with all children than `anyNodeEncoder`.\n\n\t\t\tconst shape = context.nestedArrayEncoder(anyNodeEncoder);\n\t\t\tAnyShape.encodeField(cursor, context, outputBuffer, shape);\n\t\t}\n\t},\n\n\tshape: AnyShape.instance,\n};\n\n/**\n * Encodes a chunk using {@link EncodedInlineArrayShape}.\n * @remarks\n * The fact this is also a Shape is an implementation detail of the encoder: that allows the shape it uses to be itself,\n * which is an easy way to keep all the related code together without extra objects.\n */\nexport class InlineArrayEncoder\n\textends ShapeGeneric<EncodedChunkShape>\n\timplements NodesEncoder, FieldEncoder\n{\n\tpublic static readonly empty: InlineArrayEncoder = new InlineArrayEncoder(0, {\n\t\tget shape() {\n\t\t\t// Not actually used, makes count work without adding an additional shape.\n\t\t\treturn InlineArrayEncoder.empty;\n\t\t},\n\t\tencodeNodes(\n\t\t\tcursor: ITreeCursorSynchronous,\n\t\t\tcontext: EncoderContext,\n\t\t\toutputBuffer: BufferFormat,\n\t\t): void {\n\t\t\tfail(0xb4d /* Empty array should not encode any nodes */);\n\t\t},\n\t});\n\n\t/**\n\t * @param length - number of invocations of `inner`.\n\t */\n\tpublic constructor(\n\t\tpublic readonly length: number,\n\t\tpublic readonly inner: NodesEncoder,\n\t) {\n\t\tsuper();\n\t}\n\n\tpublic encodeNodes(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\t// Linter is wrong about this loop being for-of compatible.\n\t\t// eslint-disable-next-line @typescript-eslint/prefer-for-of\n\t\tfor (let index = 0; index < this.length; index++) {\n\t\t\tthis.inner.encodeNodes(cursor, context, outputBuffer);\n\t\t}\n\t}\n\n\tpublic encodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\t// Its possible individual items from this array encode multiple nodes, so don't assume === here.\n\t\tassert(\n\t\t\tcursor.getFieldLength() >= this.length,\n\t\t\t0x73c /* unexpected length for fixed length array */,\n\t\t);\n\t\tcursor.firstNode();\n\t\tthis.encodeNodes(cursor, context, outputBuffer);\n\t\tassert(\n\t\t\tcursor.mode === CursorLocationType.Fields,\n\t\t\t0x73d /* should return to fields mode when finished encoding */,\n\t\t);\n\t}\n\n\tpublic encodeShape(\n\t\tidentifiers: DeduplicationTable<string>,\n\t\tshapes: DeduplicationTable<Shape>,\n\t): EncodedChunkShape {\n\t\treturn {\n\t\t\tb: {\n\t\t\t\tlength: this.length,\n\t\t\t\tshape: shapes.valueToIndex.get(this.inner.shape) ?? fail(0xb4e /* missing shape */),\n\t\t\t},\n\t\t};\n\t}\n\n\tpublic countReferencedShapesAndIdentifiers(\n\t\tidentifiers: Counter<string>,\n\t\tshapeDiscovered: (shape: Shape) => void,\n\t): void {\n\t\tshapeDiscovered(this.inner.shape);\n\t}\n\n\tpublic get shape(): this {\n\t\treturn this;\n\t}\n}\n\n/**\n * Encodes the shape for a nested array as {@link EncodedNestedArrayShape} shape.\n */\nexport class NestedArrayShape extends ShapeGeneric<EncodedChunkShape> {\n\t/**\n\t * @param innerShape - The shape of each item in this nested array.\n\t */\n\tpublic constructor(public readonly innerShape: Shape) {\n\t\tsuper();\n\t}\n\n\tpublic encodeShape(\n\t\tidentifiers: DeduplicationTable<string>,\n\t\tshapes: DeduplicationTable<Shape>,\n\t): EncodedChunkShape {\n\t\tconst shape: EncodedNestedArrayShape =\n\t\t\tshapes.valueToIndex.get(this.innerShape) ??\n\t\t\tfail(0xb4f /* index for shape not found in table */);\n\t\treturn {\n\t\t\ta: shape,\n\t\t};\n\t}\n\n\tpublic countReferencedShapesAndIdentifiers(\n\t\tidentifiers: Counter<string>,\n\t\tshapeDiscovered: (shape: Shape) => void,\n\t): void {\n\t\tshapeDiscovered(this.innerShape);\n\t}\n}\n\n/**\n * Encodes a field as a nested array with the {@link EncodedNestedArrayShape} shape.\n * @remarks\n * The fact this is also exposes a Shape is an implementation detail: it allows the shape it uses to be itself\n * which is an easy way to keep all the related code together without extra objects.\n */\nexport class NestedArrayEncoder implements FieldEncoder {\n\tpublic constructor(\n\t\tpublic readonly innerEncoder: NodeEncoder,\n\t\tpublic readonly shape: NestedArrayShape = new NestedArrayShape(innerEncoder.shape),\n\t) {}\n\n\tpublic encodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\tconst buffer: BufferFormat = [];\n\t\tlet allNonZeroSize = true;\n\t\tconst length = cursor.getFieldLength();\n\t\tforEachNode(cursor, () => {\n\t\t\tconst before = buffer.length;\n\t\t\tthis.innerEncoder.encodeNode(cursor, context, buffer);\n\t\t\tallNonZeroSize &&= buffer.length - before !== 0;\n\t\t});\n\t\tif (buffer.length === 0) {\n\t\t\t// This relies on the number of inner chunks being the same as the number of nodes.\n\t\t\t// If making inner a `NodesEncoder`, this code will have to be adjusted accordingly.\n\t\t\toutputBuffer.push(length);\n\t\t} else {\n\t\t\tassert(\n\t\t\t\tallNonZeroSize,\n\t\t\t\t0x73e /* either all or none of the members of a nested array must be 0 sized, or there is no way the decoder could process the content correctly. */,\n\t\t\t);\n\t\t\toutputBuffer.push(buffer);\n\t\t}\n\t}\n}\n\n/**\n * Encodes the shape for an incremental chunk as {@link EncodedIncrementalChunkShape} shape.\n */\nexport class IncrementalChunkShape extends ShapeGeneric<EncodedChunkShape> {\n\tpublic encodeShape(\n\t\tidentifiers: DeduplicationTable<string>,\n\t\tshapes: DeduplicationTable<Shape>,\n\t): EncodedChunkShape {\n\t\treturn {\n\t\t\te: 0 /* EncodedIncrementalChunkShape */,\n\t\t};\n\t}\n\n\tpublic countReferencedShapesAndIdentifiers(\n\t\tidentifiers: Counter<string>,\n\t\tshapeDiscovered: (shape: Shape) => void,\n\t): void {}\n\n\tpublic get shape(): this {\n\t\treturn this;\n\t}\n}\n\n/**\n * Encodes an incremental field whose tree chunks are encoded separately and referenced by their {@link ChunkReferenceId}.\n * The shape of the content of this field is {@link NestedArrayShape}.\n * The inner items of the array have shape {@link IncrementalChunkShape} and are {@link ChunkReferenceId}s\n * of the encoded chunks.\n */\nexport const incrementalFieldEncoder: FieldEncoder = {\n\tencodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\tassert(\n\t\t\tcontext.incrementalEncoder !== undefined,\n\t\t\t\"incremental encoder must be defined to use incrementalFieldEncoder\",\n\t\t);\n\n\t\tconst chunkReferenceIds = context.incrementalEncoder.encodeIncrementalField(\n\t\t\tcursor,\n\t\t\t(chunk: TreeChunk) => compressedEncode([chunk.cursor()], context),\n\t\t);\n\t\toutputBuffer.push(chunkReferenceIds);\n\t},\n\n\tshape: new NestedArrayShape(new IncrementalChunkShape() /* innerShape */),\n};\n\n/**\n * Encode `value` with `shape` into `outputBuffer`.\n *\n * Requires that `value` is compatible with `shape`.\n */\nexport function encodeValue(\n\tvalue: Value,\n\tshape: EncodedValueShape,\n\toutputBuffer: BufferFormat,\n): void {\n\tif (shape === undefined) {\n\t\tif (value !== undefined) {\n\t\t\toutputBuffer.push(true, value);\n\t\t} else {\n\t\t\toutputBuffer.push(false);\n\t\t}\n\t} else {\n\t\tif (shape === true) {\n\t\t\tassert(value !== undefined, 0x78d /* required value must not be missing */);\n\t\t\toutputBuffer.push(value);\n\t\t} else if (shape === false) {\n\t\t\tassert(value === undefined, 0x73f /* incompatible value shape: expected no value */);\n\t\t} else if (Array.isArray(shape)) {\n\t\t\tassert(shape.length === 1, 0x740 /* expected a single constant for value */);\n\t\t} else if (shape === SpecialField.Identifier) {\n\t\t\t// This case is a special case handling the encoding of identifier fields.\n\t\t\tassert(value !== undefined, 0x998 /* required value must not be missing */);\n\t\t\toutputBuffer.push(value);\n\t\t} else {\n\t\t\t// EncodedCounter case:\n\t\t\tunreachableCase(shape, \"Encoding values as deltas is not yet supported\");\n\t\t}\n\t}\n}\n\n/**\n * Provides common contextual information during encoding, like schema and policy settings.\n * Also, provides a cache to avoid duplicating equivalent shapes during a batch of encode operations.\n * @remarks\n * To avoid Shape duplication, any Shapes used in the encoding should either be:\n * - Singletons defined in a static scope.\n * - Cached in this object for future reuse such that all equivalent Shapes are deduplicated.\n */\nexport class EncoderContext implements NodeEncodeBuilder, FieldEncodeBuilder {\n\tprivate readonly nodeEncodersFromSchema: Map<TreeNodeSchemaIdentifier, NodeEncoder> =\n\t\tnew Map();\n\tprivate readonly nestedArrayEncoders: Map<NodeEncoder, NestedArrayEncoder> = new Map();\n\tpublic constructor(\n\t\tprivate readonly nodeEncoderFromPolicy: NodeEncoderPolicy,\n\t\tprivate readonly fieldEncoderFromPolicy: FieldEncoderPolicy,\n\t\tpublic readonly fieldShapes: ReadonlyMap<FieldKindIdentifier, FlexFieldKind>,\n\t\tpublic readonly idCompressor: IIdCompressor,\n\t\t/**\n\t\t * To be used to encode incremental chunks, if any.\n\t\t * @remarks\n\t\t * See {@link IncrementalEncoder} for more information.\n\t\t */\n\t\tpublic readonly incrementalEncoder: IncrementalEncoder | undefined,\n\t) {}\n\n\tpublic nodeEncoderFromSchema(schemaName: TreeNodeSchemaIdentifier): NodeEncoder {\n\t\treturn getOrCreate(this.nodeEncodersFromSchema, schemaName, () =>\n\t\t\tthis.nodeEncoderFromPolicy(this, schemaName),\n\t\t);\n\t}\n\n\tpublic fieldEncoderFromSchema(fieldSchema: TreeFieldStoredSchema): FieldEncoder {\n\t\treturn new LazyFieldEncoder(this, fieldSchema, this.fieldEncoderFromPolicy);\n\t}\n\n\tpublic nestedArrayEncoder(inner: NodeEncoder): NestedArrayEncoder {\n\t\treturn getOrCreate(this.nestedArrayEncoders, inner, () => new NestedArrayEncoder(inner));\n\t}\n}\n\nexport interface NodeEncodeBuilder {\n\tnodeEncoderFromSchema(schemaName: TreeNodeSchemaIdentifier): NodeEncoder;\n}\n\nexport interface FieldEncodeBuilder {\n\tfieldEncoderFromSchema(schema: TreeFieldStoredSchema): FieldEncoder;\n}\n\n/**\n * The policy for building a {@link FieldEncoder} for a field.\n */\nexport type FieldEncoderPolicy = (\n\tnodeBuilder: NodeEncodeBuilder,\n\tschema: TreeFieldStoredSchema,\n) => FieldEncoder;\n\n/**\n * The policy for building a {@link NodeEncoder} for a node.\n */\nexport type NodeEncoderPolicy = (\n\tfieldBuilder: FieldEncodeBuilder,\n\tschemaName: TreeNodeSchemaIdentifier,\n) => NodeEncoder;\n\nclass LazyFieldEncoder implements FieldEncoder {\n\tprivate encoderLazy: FieldEncoder | undefined;\n\n\tpublic constructor(\n\t\tpublic readonly nodeBuilder: NodeEncodeBuilder,\n\t\tpublic readonly fieldSchema: TreeFieldStoredSchema,\n\t\tprivate readonly fieldEncoderFromPolicy: FieldEncoderPolicy,\n\t) {}\n\tpublic encodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\tthis.encoder.encodeField(cursor, context, outputBuffer);\n\t}\n\n\tprivate get encoder(): FieldEncoder {\n\t\tif (this.encoderLazy === undefined) {\n\t\t\tthis.encoderLazy = this.fieldEncoderFromPolicy(this.nodeBuilder, this.fieldSchema);\n\t\t}\n\t\treturn this.encoderLazy;\n\t}\n\n\tpublic get shape(): Shape {\n\t\treturn this.encoder.shape;\n\t}\n}\n"]}
|
|
1
|
+
{"version":3,"file":"compressedEncode.js","sourceRoot":"","sources":["../../../../src/feature-libraries/chunked-forest/codec/compressedEncode.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAE,MAAM,EAAE,eAAe,EAAE,IAAI,EAAE,MAAM,qCAAqC,CAAC;AAGpF,OAAO,EASN,WAAW,GACX,MAAM,wBAAwB,CAAC;AAChC,OAAO,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAIrD,OAAO,EAEN,KAAK,IAAI,YAAY,EACrB,kCAAkC,GAClC,MAAM,2BAA2B,CAAC;AAEnC,OAAO,EAMN,YAAY,EACZ,OAAO,GACP,MAAM,aAAa,CAAC;AAGrB;;;;;;GAMG;AACH,MAAM,UAAU,gBAAgB,CAC/B,UAAsB,EACtB,OAAuB;IAEvB,MAAM,WAAW,GAAmB,EAAE,CAAC;IAEvC,6DAA6D;IAC7D,KAAK,MAAM,MAAM,IAAI,UAAU,EAAE,CAAC;QACjC,MAAM,MAAM,GAAiB,EAAE,CAAC;QAChC,eAAe,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;QACrD,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IAC1B,CAAC;IACD,OAAO,kCAAkC,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;AACjE,CAAC;AAoED;;;GAGG;AACH,MAAM,UAAU,cAAc,CAAC,OAAoB;IAClD,OAAO;QACN,WAAW,CACV,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;YAE1B,WAAW,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,OAAO,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC,CAAC;QAC9E,CAAC;QACD,KAAK,EAAE,OAAO,CAAC,KAAK;KACpB,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,cAAc,CAAC,OAAoB;IAClD,OAAO;QACN,WAAW,CACV,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;YAE1B,OAAO,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;YAClD,MAAM,CAAC,QAAQ,EAAE,CAAC;QACnB,CAAC;QACD,KAAK,EAAE,OAAO,CAAC,KAAK;KACpB,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,QAAS,SAAQ,YAA+B;IAC5D;QACC,KAAK,EAAE,CAAC;IACT,CAAC;IAGM,WAAW,CACjB,WAAuC,EACvC,MAAiC;QAEjC,MAAM,eAAe,GAAoB,CAAC,CAAC;QAC3C,OAAO,EAAE,CAAC,EAAE,eAAe,EAAE,CAAC;IAC/B,CAAC;IAEM,mCAAmC,CACzC,WAA4B,EAC5B,eAAuC,IAC/B,CAAC;IAEH,MAAM,CAAC,WAAW,CACxB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B,EAC1B,OAAqB;QAErB,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QACjC,OAAO,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;IACpD,CAAC;IAEM,MAAM,CAAC,UAAU,CACvB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B,EAC1B,OAAoB;QAEpB,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QACjC,OAAO,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;IACnD,CAAC;IAEM,MAAM,CAAC,WAAW,CACxB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B,EAC1B,OAAqB;QAErB,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QACjC,OAAO,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;IACpD,CAAC;;AA3CsB,iBAAQ,GAAG,IAAI,QAAQ,EAAE,CAAC;AA8ClD;;GAEG;AACH,MAAM,CAAC,MAAM,cAAc,GAAgB;IAC1C,UAAU,CACT,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,yCAAyC;QACzC,MAAM,WAAW,GAAG,OAAO,CAAC,qBAAqB,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;QAC/D,QAAQ,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,EAAE,WAAW,CAAC,CAAC;IACjE,CAAC;IAED,KAAK,EAAE,QAAQ,CAAC,QAAQ;CACxB,CAAC;AAEF;;GAEG;AACH,MAAM,CAAC,MAAM,eAAe,GAAiB;IAC5C,WAAW,CACV,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,kCAAkC;QAElC,IAAI,MAAM,CAAC,cAAc,EAAE,KAAK,CAAC,EAAE,CAAC;YACnC,MAAM,KAAK,GAAG,kBAAkB,CAAC,KAAK,CAAC;YACvC,QAAQ,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,EAAE,KAAK,CAAC,CAAC;QAC5D,CAAC;aAAM,IAAI,MAAM,CAAC,cAAc,EAAE,KAAK,CAAC,EAAE,CAAC;YAC1C,oEAAoE;YACpE,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;YACpB,cAAc,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;YACzD,MAAM,CAAC,QAAQ,EAAE,CAAC;QACnB,CAAC;aAAM,CAAC;YACP,kDAAkD;YAClD,4FAA4F;YAE5F,MAAM,KAAK,GAAG,OAAO,CAAC,kBAAkB,CAAC,cAAc,CAAC,CAAC;YACzD,QAAQ,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,EAAE,KAAK,CAAC,CAAC;QAC5D,CAAC;IACF,CAAC;IAED,KAAK,EAAE,QAAQ,CAAC,QAAQ;CACxB,CAAC;AAEF;;;;;GAKG;AACH,MAAM,OAAO,kBACZ,SAAQ,YAA+B;IAiBvC;;OAEG;IACH,YACiB,MAAc,EACd,KAAmB;QAEnC,KAAK,EAAE,CAAC;QAHQ,WAAM,GAAN,MAAM,CAAQ;QACd,UAAK,GAAL,KAAK,CAAc;IAGpC,CAAC;IAEM,WAAW,CACjB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,2DAA2D;QAC3D,4DAA4D;QAC5D,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,IAAI,CAAC,MAAM,EAAE,KAAK,EAAE,EAAE,CAAC;YAClD,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;QACvD,CAAC;IACF,CAAC;IAEM,WAAW,CACjB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,iGAAiG;QACjG,MAAM,CACL,MAAM,CAAC,cAAc,EAAE,IAAI,IAAI,CAAC,MAAM,EACtC,KAAK,CAAC,8CAA8C,CACpD,CAAC;QACF,MAAM,CAAC,SAAS,EAAE,CAAC;QACnB,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;QAChD,MAAM,CACL,MAAM,CAAC,IAAI,sCAA8B,EACzC,KAAK,CAAC,yDAAyD,CAC/D,CAAC;IACH,CAAC;IAEM,WAAW,CACjB,WAAuC,EACvC,MAAiC;QAEjC,OAAO;YACN,CAAC,EAAE;gBACF,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,KAAK,EAAE,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,mBAAmB,CAAC;aACnF;SACD,CAAC;IACH,CAAC;IAEM,mCAAmC,CACzC,WAA4B,EAC5B,eAAuC;QAEvC,eAAe,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IACnC,CAAC;IAED,IAAW,KAAK;QACf,OAAO,IAAI,CAAC;IACb,CAAC;;AA3EsB,wBAAK,GAAuB,IAAI,kBAAkB,CAAC,CAAC,EAAE;IAC5E,IAAI,KAAK;QACR,0EAA0E;QAC1E,OAAO,kBAAkB,CAAC,KAAK,CAAC;IACjC,CAAC;IACD,WAAW,CACV,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,IAAI,CAAC,KAAK,CAAC,6CAA6C,CAAC,CAAC;IAC3D,CAAC;CACD,CAAC,CAAC;AAkEJ;;GAEG;AACH,MAAM,OAAO,gBAAiB,SAAQ,YAA+B;IACpE;;OAEG;IACH,YAAmC,UAAiB;QACnD,KAAK,EAAE,CAAC;QAD0B,eAAU,GAAV,UAAU,CAAO;IAEpD,CAAC;IAEM,WAAW,CACjB,WAAuC,EACvC,MAAiC;QAEjC,MAAM,KAAK,GACV,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC;YACxC,IAAI,CAAC,KAAK,CAAC,wCAAwC,CAAC,CAAC;QACtD,OAAO;YACN,CAAC,EAAE,KAAK;SACR,CAAC;IACH,CAAC;IAEM,mCAAmC,CACzC,WAA4B,EAC5B,eAAuC;QAEvC,eAAe,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;IAClC,CAAC;CACD;AAED;;;;;GAKG;AACH,MAAM,OAAO,kBAAkB;IAC9B,YACiB,YAAyB,EACzB,QAA0B,IAAI,gBAAgB,CAAC,YAAY,CAAC,KAAK,CAAC;QADlE,iBAAY,GAAZ,YAAY,CAAa;QACzB,UAAK,GAAL,KAAK,CAA6D;IAChF,CAAC;IAEG,WAAW,CACjB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,MAAM,MAAM,GAAiB,EAAE,CAAC;QAChC,IAAI,cAAc,GAAG,IAAI,CAAC;QAC1B,MAAM,MAAM,GAAG,MAAM,CAAC,cAAc,EAAE,CAAC;QACvC,WAAW,CAAC,MAAM,EAAE,GAAG,EAAE;YACxB,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;YAC7B,IAAI,CAAC,YAAY,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;YACtD,cAAc,KAAK,MAAM,CAAC,MAAM,GAAG,MAAM,KAAK,CAAC,CAAC;QACjD,CAAC,CAAC,CAAC;QACH,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACzB,mFAAmF;YACnF,oFAAoF;YACpF,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC3B,CAAC;aAAM,CAAC;YACP,MAAM,CACL,cAAc,EACd,KAAK,CAAC,8IAA8I,CACpJ,CAAC;YACF,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC3B,CAAC;IACF,CAAC;CACD;AAED;;GAEG;AACH,MAAM,OAAO,qBAAsB,SAAQ,YAA+B;IAClE,WAAW,CACjB,WAAuC,EACvC,MAAiC;QAEjC,OAAO;YACN,CAAC,EAAE,CAAC,CAAC,kCAAkC;SACvC,CAAC;IACH,CAAC;IAEM,mCAAmC,CACzC,WAA4B,EAC5B,eAAuC,IAC/B,CAAC;IAEV,IAAW,KAAK;QACf,OAAO,IAAI,CAAC;IACb,CAAC;CACD;AAED;;;;;GAKG;AACH,MAAM,CAAC,MAAM,uBAAuB,GAAiB;IACpD,WAAW,CACV,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,MAAM,CACL,OAAO,CAAC,kBAAkB,KAAK,SAAS,EACxC,KAAK,CAAC,wEAAwE,CAC9E,CAAC;QAEF,MAAM,iBAAiB,GAAG,OAAO,CAAC,kBAAkB,CAAC,sBAAsB,CAC1E,MAAM,EACN,CAAC,KAAgB,EAAE,EAAE,CAAC,gBAAgB,CAAC,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CACjE,CAAC;QACF,YAAY,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;IACtC,CAAC;IAED,KAAK,EAAE,IAAI,gBAAgB,CAAC,IAAI,qBAAqB,EAAE,CAAC,gBAAgB,CAAC;CACzE,CAAC;AAEF;;;;GAIG;AACH,MAAM,UAAU,WAAW,CAC1B,KAAY,EACZ,KAAwB,EACxB,YAA0B;IAE1B,IAAI,KAAK,KAAK,SAAS,EAAE,CAAC;QACzB,IAAI,KAAK,KAAK,SAAS,EAAE,CAAC;YACzB,YAAY,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;QAChC,CAAC;aAAM,CAAC;YACP,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAC1B,CAAC;IACF,CAAC;SAAM,CAAC;QACP,IAAI,KAAK,KAAK,IAAI,EAAE,CAAC;YACpB,MAAM,CAAC,KAAK,KAAK,SAAS,EAAE,KAAK,CAAC,wCAAwC,CAAC,CAAC;YAC5E,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAC1B,CAAC;aAAM,IAAI,KAAK,KAAK,KAAK,EAAE,CAAC;YAC5B,MAAM,CAAC,KAAK,KAAK,SAAS,EAAE,KAAK,CAAC,iDAAiD,CAAC,CAAC;QACtF,CAAC;aAAM,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC;YACjC,MAAM,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,KAAK,CAAC,0CAA0C,CAAC,CAAC;QAC9E,CAAC;aAAM,IAAI,KAAK,KAAK,YAAY,CAAC,UAAU,EAAE,CAAC;YAC9C,0EAA0E;YAC1E,MAAM,CAAC,KAAK,KAAK,SAAS,EAAE,KAAK,CAAC,wCAAwC,CAAC,CAAC;YAC5E,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAC1B,CAAC;aAAM,CAAC;YACP,uBAAuB;YACvB,eAAe,CAAC,KAAK,EAAE,gDAAgD,CAAC,CAAC;QAC1E,CAAC;IACF,CAAC;AACF,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,OAAO,cAAc;IAI1B,YACkB,qBAAwC,EACxC,sBAA0C,EAC3C,WAA4D,EAC5D,YAA2B;IAC3C;;;;OAIG;IACa,kBAAkD;QATjD,0BAAqB,GAArB,qBAAqB,CAAmB;QACxC,2BAAsB,GAAtB,sBAAsB,CAAoB;QAC3C,gBAAW,GAAX,WAAW,CAAiD;QAC5D,iBAAY,GAAZ,YAAY,CAAe;QAM3B,uBAAkB,GAAlB,kBAAkB,CAAgC;QAblD,2BAAsB,GACtC,IAAI,GAAG,EAAE,CAAC;QACM,wBAAmB,GAAyC,IAAI,GAAG,EAAE,CAAC;IAYpF,CAAC;IAEG,qBAAqB,CAAC,UAAoC;QAChE,OAAO,WAAW,CAAC,IAAI,CAAC,sBAAsB,EAAE,UAAU,EAAE,GAAG,EAAE,CAChE,IAAI,CAAC,qBAAqB,CAAC,IAAI,EAAE,UAAU,CAAC,CAC5C,CAAC;IACH,CAAC;IAEM,sBAAsB,CAAC,WAAkC;QAC/D,OAAO,IAAI,gBAAgB,CAAC,IAAI,EAAE,WAAW,EAAE,IAAI,CAAC,sBAAsB,CAAC,CAAC;IAC7E,CAAC;IAEM,kBAAkB,CAAC,KAAkB;QAC3C,OAAO,WAAW,CAAC,IAAI,CAAC,mBAAmB,EAAE,KAAK,EAAE,GAAG,EAAE,CAAC,IAAI,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC;IAC1F,CAAC;CACD;AA0BD,MAAM,gBAAgB;IAGrB,YACiB,WAA8B,EAC9B,WAAkC,EACjC,sBAA0C;QAF3C,gBAAW,GAAX,WAAW,CAAmB;QAC9B,gBAAW,GAAX,WAAW,CAAuB;QACjC,2BAAsB,GAAtB,sBAAsB,CAAoB;IACzD,CAAC;IACG,WAAW,CACjB,MAA8B,EAC9B,OAAuB,EACvB,YAA0B;QAE1B,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;IACzD,CAAC;IAED,IAAY,OAAO;QAClB,IAAI,IAAI,CAAC,WAAW,KAAK,SAAS,EAAE,CAAC;YACpC,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,sBAAsB,CAAC,IAAI,CAAC,WAAW,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;QACpF,CAAC;QACD,OAAO,IAAI,CAAC,WAAW,CAAC;IACzB,CAAC;IAED,IAAW,KAAK;QACf,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC;IAC3B,CAAC;CACD","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n */\n\nimport { assert, unreachableCase, fail } from \"@fluidframework/core-utils/internal\";\nimport type { IIdCompressor } from \"@fluidframework/id-compressor\";\n\nimport {\n\tCursorLocationType,\n\ttype FieldKey,\n\ttype FieldKindIdentifier,\n\ttype ITreeCursorSynchronous,\n\ttype TreeChunk,\n\ttype TreeFieldStoredSchema,\n\ttype TreeNodeSchemaIdentifier,\n\ttype Value,\n\tforEachNode,\n} from \"../../../core/index.js\";\nimport { getOrCreate } from \"../../../util/index.js\";\nimport type { FlexFieldKind } from \"../../modular-schema/index.js\";\n\nimport type { Counter, DeduplicationTable } from \"./chunkCodecUtilities.js\";\nimport {\n\ttype BufferFormat as BufferFormatGeneric,\n\tShape as ShapeGeneric,\n\tupdateShapesAndIdentifiersEncoding,\n} from \"./chunkEncodingGeneric.js\";\nimport type { FieldBatch } from \"./fieldBatch.js\";\nimport {\n\ttype EncodedAnyShape,\n\ttype EncodedChunkShape,\n\ttype EncodedFieldBatch,\n\ttype EncodedNestedArrayShape,\n\ttype EncodedValueShape,\n\tSpecialField,\n\tversion,\n} from \"./format.js\";\nimport type { IncrementalEncoder } from \"./codecs.js\";\n\n/**\n * Encode data from `FieldBatch` into an `EncodedFieldBatch`.\n *\n * Optimized for encoded size and encoding performance.\n *\n * Most of the compression strategy comes from the policy provided via `context`.\n */\nexport function compressedEncode(\n\tfieldBatch: FieldBatch,\n\tcontext: EncoderContext,\n): EncodedFieldBatch {\n\tconst batchBuffer: BufferFormat[] = [];\n\n\t// Populate buffer, including shape and identifier references\n\tfor (const cursor of fieldBatch) {\n\t\tconst buffer: BufferFormat = [];\n\t\tanyFieldEncoder.encodeField(cursor, context, buffer);\n\t\tbatchBuffer.push(buffer);\n\t}\n\treturn updateShapesAndIdentifiersEncoding(version, batchBuffer);\n}\n\nexport type BufferFormat = BufferFormatGeneric<EncodedChunkShape>;\nexport type Shape = ShapeGeneric<EncodedChunkShape>;\n\n/**\n * Like {@link FieldEncoder}, except data will be prefixed with the key.\n */\nexport interface KeyedFieldEncoder {\n\treadonly key: FieldKey;\n\treadonly encoder: FieldEncoder;\n}\n\n/**\n * An encoder with an associated shape.\n */\nexport interface Encoder {\n\t/**\n\t * The shape which describes how the encoded data is laid out.\n\t * Used by decoders to interpret the output of `encodeNode`.\n\t */\n\treadonly shape: Shape;\n}\n\n/**\n * An encoder for a specific shape of node.\n *\n * Can only be used with compatible nodes.\n */\nexport interface NodeEncoder extends Encoder {\n\t/**\n\t * @param cursor - in Nodes mode. Does not move cursor.\n\t */\n\tencodeNode(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void;\n}\n\n/**\n * Like {@link NodeEncoder}, except encodes a run of nodes.\n */\nexport interface NodesEncoder extends Encoder {\n\t/**\n\t * @param cursor - in Nodes mode. Moves cursor however many nodes it encodes.\n\t */\n\tencodeNodes(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void;\n}\n\n/**\n * Like {@link NodeEncoder}, except encodes a field.\n */\nexport interface FieldEncoder extends Encoder {\n\t/**\n\t * @param cursor - in Fields mode. Encodes entire field.\n\t */\n\tencodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void;\n}\n\n/**\n * Makes a {@link FieldEncoder} which runs `encoder` on every node in the field.\n * This does not encode the number nodes: the user of this may need to encode that elsewhere.\n */\nexport function asFieldEncoder(encoder: NodeEncoder): FieldEncoder {\n\treturn {\n\t\tencodeField(\n\t\t\tcursor: ITreeCursorSynchronous,\n\t\t\tcontext: EncoderContext,\n\t\t\toutputBuffer: BufferFormat,\n\t\t): void {\n\t\t\tforEachNode(cursor, () => encoder.encodeNode(cursor, context, outputBuffer));\n\t\t},\n\t\tshape: encoder.shape,\n\t};\n}\n\n/**\n * Adapt a {@link NodeEncoder} to a {@link NodesEncoder} which invokes `encoder` once.\n */\nexport function asNodesEncoder(encoder: NodeEncoder): NodesEncoder {\n\treturn {\n\t\tencodeNodes(\n\t\t\tcursor: ITreeCursorSynchronous,\n\t\t\tcontext: EncoderContext,\n\t\t\toutputBuffer: BufferFormat,\n\t\t): void {\n\t\t\tencoder.encodeNode(cursor, context, outputBuffer);\n\t\t\tcursor.nextNode();\n\t\t},\n\t\tshape: encoder.shape,\n\t};\n}\n\n/**\n * Encodes a chunk with {@link EncodedAnyShape} by prefixing the data with its shape.\n */\nexport class AnyShape extends ShapeGeneric<EncodedChunkShape> {\n\tprivate constructor() {\n\t\tsuper();\n\t}\n\tpublic static readonly instance = new AnyShape();\n\n\tpublic encodeShape(\n\t\tidentifiers: DeduplicationTable<string>,\n\t\tshapes: DeduplicationTable<Shape>,\n\t): EncodedChunkShape {\n\t\tconst encodedAnyShape: EncodedAnyShape = 0;\n\t\treturn { d: encodedAnyShape };\n\t}\n\n\tpublic countReferencedShapesAndIdentifiers(\n\t\tidentifiers: Counter<string>,\n\t\tshapeDiscovered: (shape: Shape) => void,\n\t): void {}\n\n\tpublic static encodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t\tencoder: FieldEncoder,\n\t): void {\n\t\toutputBuffer.push(encoder.shape);\n\t\tencoder.encodeField(cursor, context, outputBuffer);\n\t}\n\n\tpublic static encodeNode(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t\tencoder: NodeEncoder,\n\t): void {\n\t\toutputBuffer.push(encoder.shape);\n\t\tencoder.encodeNode(cursor, context, outputBuffer);\n\t}\n\n\tpublic static encodeNodes(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t\tencoder: NodesEncoder,\n\t): void {\n\t\toutputBuffer.push(encoder.shape);\n\t\tencoder.encodeNodes(cursor, context, outputBuffer);\n\t}\n}\n\n/**\n * Encodes a single node polymorphically.\n */\nexport const anyNodeEncoder: NodeEncoder = {\n\tencodeNode(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\t// TODO: Fast path uniform chunk content.\n\t\tconst nodeEncoder = context.nodeEncoderFromSchema(cursor.type);\n\t\tAnyShape.encodeNode(cursor, context, outputBuffer, nodeEncoder);\n\t},\n\n\tshape: AnyShape.instance,\n};\n\n/**\n * Encodes a field polymorphically.\n */\nexport const anyFieldEncoder: FieldEncoder = {\n\tencodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\t// TODO: Fast path uniform chunks.\n\n\t\tif (cursor.getFieldLength() === 0) {\n\t\t\tconst shape = InlineArrayEncoder.empty;\n\t\t\tAnyShape.encodeField(cursor, context, outputBuffer, shape);\n\t\t} else if (cursor.getFieldLength() === 1) {\n\t\t\t// Fast path chunk of size one size one at least: skip nested array.\n\t\t\tcursor.enterNode(0);\n\t\t\tanyNodeEncoder.encodeNode(cursor, context, outputBuffer);\n\t\t\tcursor.exitNode();\n\t\t} else {\n\t\t\t// TODO: more efficient encoding for common cases.\n\t\t\t// Could try to find more specific shape compatible with all children than `anyNodeEncoder`.\n\n\t\t\tconst shape = context.nestedArrayEncoder(anyNodeEncoder);\n\t\t\tAnyShape.encodeField(cursor, context, outputBuffer, shape);\n\t\t}\n\t},\n\n\tshape: AnyShape.instance,\n};\n\n/**\n * Encodes a chunk using {@link EncodedInlineArrayShape}.\n * @remarks\n * The fact this is also a Shape is an implementation detail of the encoder: that allows the shape it uses to be itself,\n * which is an easy way to keep all the related code together without extra objects.\n */\nexport class InlineArrayEncoder\n\textends ShapeGeneric<EncodedChunkShape>\n\timplements NodesEncoder, FieldEncoder\n{\n\tpublic static readonly empty: InlineArrayEncoder = new InlineArrayEncoder(0, {\n\t\tget shape() {\n\t\t\t// Not actually used, makes count work without adding an additional shape.\n\t\t\treturn InlineArrayEncoder.empty;\n\t\t},\n\t\tencodeNodes(\n\t\t\tcursor: ITreeCursorSynchronous,\n\t\t\tcontext: EncoderContext,\n\t\t\toutputBuffer: BufferFormat,\n\t\t): void {\n\t\t\tfail(0xb4d /* Empty array should not encode any nodes */);\n\t\t},\n\t});\n\n\t/**\n\t * @param length - number of invocations of `inner`.\n\t */\n\tpublic constructor(\n\t\tpublic readonly length: number,\n\t\tpublic readonly inner: NodesEncoder,\n\t) {\n\t\tsuper();\n\t}\n\n\tpublic encodeNodes(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\t// Linter is wrong about this loop being for-of compatible.\n\t\t// eslint-disable-next-line @typescript-eslint/prefer-for-of\n\t\tfor (let index = 0; index < this.length; index++) {\n\t\t\tthis.inner.encodeNodes(cursor, context, outputBuffer);\n\t\t}\n\t}\n\n\tpublic encodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\t// Its possible individual items from this array encode multiple nodes, so don't assume === here.\n\t\tassert(\n\t\t\tcursor.getFieldLength() >= this.length,\n\t\t\t0x73c /* unexpected length for fixed length array */,\n\t\t);\n\t\tcursor.firstNode();\n\t\tthis.encodeNodes(cursor, context, outputBuffer);\n\t\tassert(\n\t\t\tcursor.mode === CursorLocationType.Fields,\n\t\t\t0x73d /* should return to fields mode when finished encoding */,\n\t\t);\n\t}\n\n\tpublic encodeShape(\n\t\tidentifiers: DeduplicationTable<string>,\n\t\tshapes: DeduplicationTable<Shape>,\n\t): EncodedChunkShape {\n\t\treturn {\n\t\t\tb: {\n\t\t\t\tlength: this.length,\n\t\t\t\tshape: shapes.valueToIndex.get(this.inner.shape) ?? fail(0xb4e /* missing shape */),\n\t\t\t},\n\t\t};\n\t}\n\n\tpublic countReferencedShapesAndIdentifiers(\n\t\tidentifiers: Counter<string>,\n\t\tshapeDiscovered: (shape: Shape) => void,\n\t): void {\n\t\tshapeDiscovered(this.inner.shape);\n\t}\n\n\tpublic get shape(): this {\n\t\treturn this;\n\t}\n}\n\n/**\n * Encodes the shape for a nested array as {@link EncodedNestedArrayShape} shape.\n */\nexport class NestedArrayShape extends ShapeGeneric<EncodedChunkShape> {\n\t/**\n\t * @param innerShape - The shape of each item in this nested array.\n\t */\n\tpublic constructor(public readonly innerShape: Shape) {\n\t\tsuper();\n\t}\n\n\tpublic encodeShape(\n\t\tidentifiers: DeduplicationTable<string>,\n\t\tshapes: DeduplicationTable<Shape>,\n\t): EncodedChunkShape {\n\t\tconst shape: EncodedNestedArrayShape =\n\t\t\tshapes.valueToIndex.get(this.innerShape) ??\n\t\t\tfail(0xb4f /* index for shape not found in table */);\n\t\treturn {\n\t\t\ta: shape,\n\t\t};\n\t}\n\n\tpublic countReferencedShapesAndIdentifiers(\n\t\tidentifiers: Counter<string>,\n\t\tshapeDiscovered: (shape: Shape) => void,\n\t): void {\n\t\tshapeDiscovered(this.innerShape);\n\t}\n}\n\n/**\n * Encodes a field as a nested array with the {@link EncodedNestedArrayShape} shape.\n * @remarks\n * The fact this is also exposes a Shape is an implementation detail: it allows the shape it uses to be itself\n * which is an easy way to keep all the related code together without extra objects.\n */\nexport class NestedArrayEncoder implements FieldEncoder {\n\tpublic constructor(\n\t\tpublic readonly innerEncoder: NodeEncoder,\n\t\tpublic readonly shape: NestedArrayShape = new NestedArrayShape(innerEncoder.shape),\n\t) {}\n\n\tpublic encodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\tconst buffer: BufferFormat = [];\n\t\tlet allNonZeroSize = true;\n\t\tconst length = cursor.getFieldLength();\n\t\tforEachNode(cursor, () => {\n\t\t\tconst before = buffer.length;\n\t\t\tthis.innerEncoder.encodeNode(cursor, context, buffer);\n\t\t\tallNonZeroSize &&= buffer.length - before !== 0;\n\t\t});\n\t\tif (buffer.length === 0) {\n\t\t\t// This relies on the number of inner chunks being the same as the number of nodes.\n\t\t\t// If making inner a `NodesEncoder`, this code will have to be adjusted accordingly.\n\t\t\toutputBuffer.push(length);\n\t\t} else {\n\t\t\tassert(\n\t\t\t\tallNonZeroSize,\n\t\t\t\t0x73e /* either all or none of the members of a nested array must be 0 sized, or there is no way the decoder could process the content correctly. */,\n\t\t\t);\n\t\t\toutputBuffer.push(buffer);\n\t\t}\n\t}\n}\n\n/**\n * Encodes the shape for an incremental chunk as {@link EncodedIncrementalChunkShape} shape.\n */\nexport class IncrementalChunkShape extends ShapeGeneric<EncodedChunkShape> {\n\tpublic encodeShape(\n\t\tidentifiers: DeduplicationTable<string>,\n\t\tshapes: DeduplicationTable<Shape>,\n\t): EncodedChunkShape {\n\t\treturn {\n\t\t\te: 0 /* EncodedIncrementalChunkShape */,\n\t\t};\n\t}\n\n\tpublic countReferencedShapesAndIdentifiers(\n\t\tidentifiers: Counter<string>,\n\t\tshapeDiscovered: (shape: Shape) => void,\n\t): void {}\n\n\tpublic get shape(): this {\n\t\treturn this;\n\t}\n}\n\n/**\n * Encodes an incremental field whose tree chunks are encoded separately and referenced by their {@link ChunkReferenceId}.\n * The shape of the content of this field is {@link NestedArrayShape}.\n * The inner items of the array have shape {@link IncrementalChunkShape} and are {@link ChunkReferenceId}s\n * of the encoded chunks.\n */\nexport const incrementalFieldEncoder: FieldEncoder = {\n\tencodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\tassert(\n\t\t\tcontext.incrementalEncoder !== undefined,\n\t\t\t0xc88 /* incremental encoder must be defined to use incrementalFieldEncoder */,\n\t\t);\n\n\t\tconst chunkReferenceIds = context.incrementalEncoder.encodeIncrementalField(\n\t\t\tcursor,\n\t\t\t(chunk: TreeChunk) => compressedEncode([chunk.cursor()], context),\n\t\t);\n\t\toutputBuffer.push(chunkReferenceIds);\n\t},\n\n\tshape: new NestedArrayShape(new IncrementalChunkShape() /* innerShape */),\n};\n\n/**\n * Encode `value` with `shape` into `outputBuffer`.\n *\n * Requires that `value` is compatible with `shape`.\n */\nexport function encodeValue(\n\tvalue: Value,\n\tshape: EncodedValueShape,\n\toutputBuffer: BufferFormat,\n): void {\n\tif (shape === undefined) {\n\t\tif (value !== undefined) {\n\t\t\toutputBuffer.push(true, value);\n\t\t} else {\n\t\t\toutputBuffer.push(false);\n\t\t}\n\t} else {\n\t\tif (shape === true) {\n\t\t\tassert(value !== undefined, 0x78d /* required value must not be missing */);\n\t\t\toutputBuffer.push(value);\n\t\t} else if (shape === false) {\n\t\t\tassert(value === undefined, 0x73f /* incompatible value shape: expected no value */);\n\t\t} else if (Array.isArray(shape)) {\n\t\t\tassert(shape.length === 1, 0x740 /* expected a single constant for value */);\n\t\t} else if (shape === SpecialField.Identifier) {\n\t\t\t// This case is a special case handling the encoding of identifier fields.\n\t\t\tassert(value !== undefined, 0x998 /* required value must not be missing */);\n\t\t\toutputBuffer.push(value);\n\t\t} else {\n\t\t\t// EncodedCounter case:\n\t\t\tunreachableCase(shape, \"Encoding values as deltas is not yet supported\");\n\t\t}\n\t}\n}\n\n/**\n * Provides common contextual information during encoding, like schema and policy settings.\n * Also, provides a cache to avoid duplicating equivalent shapes during a batch of encode operations.\n * @remarks\n * To avoid Shape duplication, any Shapes used in the encoding should either be:\n * - Singletons defined in a static scope.\n * - Cached in this object for future reuse such that all equivalent Shapes are deduplicated.\n */\nexport class EncoderContext implements NodeEncodeBuilder, FieldEncodeBuilder {\n\tprivate readonly nodeEncodersFromSchema: Map<TreeNodeSchemaIdentifier, NodeEncoder> =\n\t\tnew Map();\n\tprivate readonly nestedArrayEncoders: Map<NodeEncoder, NestedArrayEncoder> = new Map();\n\tpublic constructor(\n\t\tprivate readonly nodeEncoderFromPolicy: NodeEncoderPolicy,\n\t\tprivate readonly fieldEncoderFromPolicy: FieldEncoderPolicy,\n\t\tpublic readonly fieldShapes: ReadonlyMap<FieldKindIdentifier, FlexFieldKind>,\n\t\tpublic readonly idCompressor: IIdCompressor,\n\t\t/**\n\t\t * To be used to encode incremental chunks, if any.\n\t\t * @remarks\n\t\t * See {@link IncrementalEncoder} for more information.\n\t\t */\n\t\tpublic readonly incrementalEncoder: IncrementalEncoder | undefined,\n\t) {}\n\n\tpublic nodeEncoderFromSchema(schemaName: TreeNodeSchemaIdentifier): NodeEncoder {\n\t\treturn getOrCreate(this.nodeEncodersFromSchema, schemaName, () =>\n\t\t\tthis.nodeEncoderFromPolicy(this, schemaName),\n\t\t);\n\t}\n\n\tpublic fieldEncoderFromSchema(fieldSchema: TreeFieldStoredSchema): FieldEncoder {\n\t\treturn new LazyFieldEncoder(this, fieldSchema, this.fieldEncoderFromPolicy);\n\t}\n\n\tpublic nestedArrayEncoder(inner: NodeEncoder): NestedArrayEncoder {\n\t\treturn getOrCreate(this.nestedArrayEncoders, inner, () => new NestedArrayEncoder(inner));\n\t}\n}\n\nexport interface NodeEncodeBuilder {\n\tnodeEncoderFromSchema(schemaName: TreeNodeSchemaIdentifier): NodeEncoder;\n}\n\nexport interface FieldEncodeBuilder {\n\tfieldEncoderFromSchema(schema: TreeFieldStoredSchema): FieldEncoder;\n}\n\n/**\n * The policy for building a {@link FieldEncoder} for a field.\n */\nexport type FieldEncoderPolicy = (\n\tnodeBuilder: NodeEncodeBuilder,\n\tschema: TreeFieldStoredSchema,\n) => FieldEncoder;\n\n/**\n * The policy for building a {@link NodeEncoder} for a node.\n */\nexport type NodeEncoderPolicy = (\n\tfieldBuilder: FieldEncodeBuilder,\n\tschemaName: TreeNodeSchemaIdentifier,\n) => NodeEncoder;\n\nclass LazyFieldEncoder implements FieldEncoder {\n\tprivate encoderLazy: FieldEncoder | undefined;\n\n\tpublic constructor(\n\t\tpublic readonly nodeBuilder: NodeEncodeBuilder,\n\t\tpublic readonly fieldSchema: TreeFieldStoredSchema,\n\t\tprivate readonly fieldEncoderFromPolicy: FieldEncoderPolicy,\n\t) {}\n\tpublic encodeField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tcontext: EncoderContext,\n\t\toutputBuffer: BufferFormat,\n\t): void {\n\t\tthis.encoder.encodeField(cursor, context, outputBuffer);\n\t}\n\n\tprivate get encoder(): FieldEncoder {\n\t\tif (this.encoderLazy === undefined) {\n\t\t\tthis.encoderLazy = this.fieldEncoderFromPolicy(this.nodeBuilder, this.fieldSchema);\n\t\t}\n\t\treturn this.encoderLazy;\n\t}\n\n\tpublic get shape(): Shape {\n\t\treturn this.encoder.shape;\n\t}\n}\n"]}
|
|
@@ -313,7 +313,7 @@ export class ForestIncrementalSummaryBuilder {
|
|
|
313
313
|
*/
|
|
314
314
|
decodeIncrementalChunk(referenceId, chunkDecoder) {
|
|
315
315
|
const ChunkLoadProperties = this.loadedChunksMap.get(`${referenceId}`);
|
|
316
|
-
assert(ChunkLoadProperties !== undefined,
|
|
316
|
+
assert(ChunkLoadProperties !== undefined, 0xc86 /* Encoded incremental chunk not found */);
|
|
317
317
|
const chunk = chunkDecoder(ChunkLoadProperties.encodedContents);
|
|
318
318
|
// Account for the reference about to be added in `chunkTrackingPropertiesMap`
|
|
319
319
|
// to ensure that no other users of this chunk think they have unique ownership.
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"incrementalSummaryBuilder.js","sourceRoot":"","sources":["../../../src/feature-libraries/forest-summary/incrementalSummaryBuilder.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAE,MAAM,EAAE,MAAM,qCAAqC,CAAC;AAK7D,OAAO,EAAE,kBAAkB,EAAE,MAAM,wCAAwC,CAAC;AAC5E,OAAO,EACN,KAAK,EACL,cAAc,EACd,mBAAmB,GAGnB,MAAM,qBAAqB,CAAC;AAS7B,OAAO,EAAE,WAAW,EAAE,MAAM,oCAAoC,CAAC;AAGjE,OAAO,EAAE,YAAY,EAAE,MAAM,0CAA0C,CAAC;AAIxE;;;;GAIG;AACH,MAAM,CAAC,MAAM,uBAAuB,GAAG,YAAY,CAAC;AAEpD;;;;;GAKG;AACH,MAAM,oBAAoB,GAAG,UAAU,CAAC;AAExC;;GAEG;AACH,MAAM,CAAC,MAAM,0BAA0B,GAAG;IACzC,4CAA4C;IAC5C,QAAQ,EAAE,UAAU;IACpB,wCAAwC;IACxC,YAAY,EAAE,cAAc;CACnB,CAAC;AAoFX;;GAEG;AACH,MAAM,CAAN,IAAY,gCAgBX;AAhBD,WAAY,gCAAgC;IAC3C;;;;OAIG;IACH,qGAAW,CAAA;IACX;;;;;;;OAOG;IACH,mGAAU,CAAA;AACX,CAAC,EAhBW,gCAAgC,KAAhC,gCAAgC,QAgB3C;AAED;;;;GAIG;AACH,SAAS,uBAAuB,CAC/B,kBAA8C,EAC9C,wBAA8D;IAE9D,MAAM,CACL,kBAAkB,KAAK,0BAA0B,CAAC,QAAQ,EAC1D,KAAK,CAAC,4BAA4B,CAClC,CAAC;IACF,MAAM,CACL,wBAAwB,KAAK,SAAS,EACtC,KAAK,CAAC,0EAA0E,CAChF,CAAC;AACH,CAAC;AAED;;;;GAIG;AACH,SAAS,2BAA2B,CACnC,kBAA8C,EAC9C,wBAA8D;IAE9D,MAAM,CACL,kBAAkB,KAAK,0BAA0B,CAAC,YAAY,EAC9D,KAAK,CAAC,gCAAgC,CACtC,CAAC;IACF,MAAM,CACL,wBAAwB,KAAK,SAAS,EACtC,KAAK,CAAC,0EAA0E,CAChF,CAAC;AACH,CAAC;AAED,4CAA4C;AAC5C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2CG;AACH,2CAA2C;AAC3C,MAAM,OAAO,+BAA+B;IA2C3C,YACkB,wBAAiC,EACjC,gBAAiE,EAClE,yBAAoD,EACnD,qBAA6B;QAH7B,6BAAwB,GAAxB,wBAAwB,CAAS;QACjC,qBAAgB,GAAhB,gBAAgB,CAAiD;QAClE,8BAAyB,GAAzB,yBAAyB,CAA2B;QACnD,0BAAqB,GAArB,qBAAqB,CAAQ;QA9C/C;;WAEG;QACK,oBAAe,GAAqB,KAAK,CAAC,CAAC,CAAC,CAAC;QAErD;;;WAGG;QACc,+BAA0B,GAIvC,IAAI,GAAG,EAAE,CAAC;QAEd;;WAEG;QACI,uBAAkB,GACxB,0BAA0B,CAAC,YAAY,CAAC;QAEzC;;WAEG;QACK,gCAA2B,GAAW,CAAC,CAAC,CAAC;QAQjD;;;WAGG;QACH;;;WAGG;QACc,oBAAe,GAAqC,IAAI,GAAG,EAAE,CAAC;IAO5E,CAAC;IAEJ;;;;;OAKG;IACI,KAAK,CAAC,IAAI,CAChB,QAAgC,EAChC,iBAAqF;QAErF,MAAM,UAAU,GAAG,QAAQ,CAAC,eAAe,EAAE,EAAE,CAAC;QAChD,wGAAwG;QACxG,qDAAqD;QACrD,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;YAC9B,OAAO;QACR,CAAC;QAED,uGAAuG;QACvG,uDAAuD;QACvD,MAAM,2BAA2B,GAAG,KAAK,EACxC,YAA2B,EAC3B,aAAqB,EACL,EAAE;YAClB,iGAAiG;YACjG,oDAAoD;YACpD,KAAK,MAAM,CAAC,gBAAgB,EAAE,iBAAiB,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,KAAK,CAAC,EAAE,CAAC;gBACxF,MAAM,gBAAgB,GAAG,GAAG,aAAa,GAAG,gBAAgB,EAAE,CAAC;gBAC/D,MAAM,iBAAiB,GAAG,GAAG,gBAAgB,IAAI,oBAAoB,EAAE,CAAC;gBACxE,IAAI,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,EAAE,CAAC;oBACnD,MAAM,IAAI,YAAY,CACrB,0DAA0D,iBAAiB,EAAE,CAC7E,CAAC;gBACH,CAAC;gBACD,MAAM,aAAa,GAAG,MAAM,iBAAiB,CAAoB,iBAAiB,CAAC,CAAC;gBACpF,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,gBAAgB,EAAE;oBAC1C,eAAe,EAAE,aAAa;oBAC9B,WAAW,EAAE,gBAAgB;iBAC7B,CAAC,CAAC;gBAEH,MAAM,sBAAsB,GAAG,MAAM,CAAC,gBAAgB,CAAC,CAAC;gBACxD,IAAI,CAAC,eAAe,GAAG,KAAK,CAC3B,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,eAAe,EAAE,sBAAsB,GAAG,CAAC,CAAC,CAC1D,CAAC;gBAEF,wEAAwE;gBACxE,MAAM,2BAA2B,CAAC,iBAAiB,EAAE,GAAG,gBAAgB,GAAG,CAAC,CAAC;YAC9E,CAAC;QACF,CAAC,CAAC;QACF,MAAM,2BAA2B,CAAC,UAAU,EAAE,EAAE,CAAC,CAAC;IACnD,CAAC;IAED;;;;;;;;OAQG;IACI,YAAY,CAAC,IAInB;QACA,MAAM,EAAE,QAAQ,EAAE,yBAAyB,EAAE,SAAS,EAAE,GAAG,IAAI,CAAC;QAChE,6GAA6G;QAC7G,0EAA0E;QAC1E,0GAA0G;QAC1G,IAAI,CAAC,IAAI,CAAC,wBAAwB,IAAI,yBAAyB,KAAK,SAAS,EAAE,CAAC;YAC/E,OAAO,gCAAgC,CAAC,UAAU,CAAC;QACpD,CAAC;QAED,2BAA2B,CAAC,IAAI,CAAC,kBAAkB,EAAE,IAAI,CAAC,wBAAwB,CAAC,CAAC;QAEpF,IAAI,CAAC,kBAAkB,GAAG,0BAA0B,CAAC,QAAQ,CAAC;QAC9D,IAAI,CAAC,2BAA2B,GAAG,yBAAyB,CAAC,2BAA2B,CAAC;QACzF,IAAI,CAAC,wBAAwB,GAAG;YAC/B,qBAAqB,EAAE,yBAAyB,CAAC,qBAAqB;YACtE,qBAAqB,EAAE,yBAAyB,CAAC,WAAW;YAC5D,gBAAgB,EAAE,EAAE;YACpB,oBAAoB,EAAE,IAAI,kBAAkB,EAAE;YAC9C,QAAQ;YACR,SAAS;SACT,CAAC;QACF,OAAO,gCAAgC,CAAC,WAAW,CAAC;IACrD,CAAC;IAED;;;OAGG;IACI,sBAAsB,CAC5B,MAA8B,EAC9B,YAAqD;QAErD,0GAA0G;QAC1G,uBAAuB,CAAC,IAAI,CAAC,kBAAkB,EAAE,IAAI,CAAC,wBAAwB,CAAC,CAAC;QAEhF,MAAM,iBAAiB,GAAuB,EAAE,CAAC;QACjD,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;QAC7C,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;YAC5B,IAAI,eAAuC,CAAC;YAE5C,8EAA8E;YAC9E,oGAAoG;YACpG,6EAA6E;YAC7E,MAAM,uBAAuB,GAAG,mBAAmB,CAClD,IAAI,CAAC,0BAA0B,EAC/B,IAAI,CAAC,2BAA2B,EAChC,KAAK,CACL,CAAC;YACF,IAAI,uBAAuB,KAAK,SAAS,IAAI,CAAC,IAAI,CAAC,wBAAwB,CAAC,QAAQ,EAAE,CAAC;gBACtF,eAAe,GAAG,uBAAuB,CAAC;gBAC1C,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC,SAAS,CAC3D,GAAG,eAAe,CAAC,WAAW,EAAE,EAChC,WAAW,CAAC,IAAI,EAChB,GAAG,IAAI,CAAC,wBAAwB,CAAC,qBAAqB,IAAI,eAAe,CAAC,WAAW,EAAE,CACvF,CAAC;YACH,CAAC;iBAAM,CAAC;gBACP,6CAA6C;gBAC7C,MAAM,cAAc,GAAqB,KAAK,CAAC,IAAI,CAAC,eAAe,EAAE,CAAC,CAAC;gBAEvE,oGAAoG;gBACpG,2CAA2C;gBAC3C,qGAAqG;gBACrG,oDAAoD;gBACpD,IAAI,CAAC,wBAAwB,CAAC,gBAAgB,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;gBAEpE,eAAe,GAAG;oBACjB,WAAW,EAAE,cAAc;oBAC3B,WAAW,EAAE,IAAI,CAAC,wBAAwB,CAAC,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC;iBACrE,CAAC;gBAEF,MAAM,oBAAoB,GAAG,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC;gBAChF,oGAAoG;gBACpG,gDAAgD;gBAChD,oGAAoG;gBACpG,qGAAqG;gBACrG,MAAM,mBAAmB,GAAG,IAAI,kBAAkB,EAAE,CAAC;gBACrD,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,GAAG,mBAAmB,CAAC;gBACzE,mBAAmB,CAAC,OAAO,CAC1B,oBAAoB,EACpB,IAAI,CAAC,wBAAwB,CAAC,SAAS,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,CAC5D,CAAC;gBAEF,oGAAoG;gBACpG,qEAAqE;gBACrE,oBAAoB,CAAC,YAAY,CAChC,GAAG,cAAc,EAAE,EACnB,mBAAmB,CAAC,cAAc,EAAE,CACpC,CAAC;gBAEF,6DAA6D;gBAC7D,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;gBAC1E,IAAI,CAAC,wBAAwB,CAAC,gBAAgB,CAAC,GAAG,EAAE,CAAC;YACtD,CAAC;YAED,cAAc,CACb,IAAI,CAAC,0BAA0B,EAC/B,IAAI,CAAC,wBAAwB,CAAC,qBAAqB,EACnD,KAAK,EACL,eAAe,CACf,CAAC;YACF,iBAAiB,CAAC,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QACrD,CAAC;QACD,OAAO,iBAAiB,CAAC;IAC1B,CAAC;IAED;;;;;;;;OAQG;IACI,eAAe,CAAC,IAGtB;QACA,MAAM,EAAE,yBAAyB,EAAE,oBAAoB,EAAE,GAAG,IAAI,CAAC;QACjE,IAAI,CAAC,IAAI,CAAC,wBAAwB,IAAI,yBAAyB,KAAK,SAAS,EAAE,CAAC;YAC/E,MAAM,cAAc,GAAG,IAAI,kBAAkB,EAAE,CAAC;YAChD,cAAc,CAAC,OAAO,CAAC,uBAAuB,EAAE,oBAAoB,CAAC,CAAC;YACtE,OAAO,cAAc,CAAC,cAAc,EAAE,CAAC;QACxC,CAAC;QAED,uBAAuB,CAAC,IAAI,CAAC,kBAAkB,EAAE,IAAI,CAAC,wBAAwB,CAAC,CAAC;QAEhF,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC,OAAO,CACzD,uBAAuB,EACvB,oBAAoB,CACpB,CAAC;QAEF,wEAAwE;QACxE,2GAA2G;QAC3G,2GAA2G;QAC3G,kEAAkE;QAClE,MAAM,wBAAwB,GAAG,IAAI,CAAC,0BAA0B,CAAC,GAAG,CACnE,IAAI,CAAC,2BAA2B,CAChC,CAAC;QACF,MAAM,yBAAyB,GAAG,IAAI,CAAC,0BAA0B,CAAC,GAAG,CACpE,IAAI,CAAC,wBAAwB,CAAC,qBAAqB,CACnD,CAAC;QACF,IAAI,wBAAwB,KAAK,SAAS,IAAI,yBAAyB,KAAK,SAAS,EAAE,CAAC;YACvF,KAAK,MAAM,CAAC,KAAK,EAAE,eAAe,CAAC,IAAI,wBAAwB,CAAC,OAAO,EAAE,EAAE,CAAC;gBAC3E,IAAI,CAAC,yBAAyB,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC;oBAC3C,yBAAyB,CAAC,GAAG,CAAC,KAAK,EAAE,eAAe,CAAC,CAAC;gBACvD,CAAC;YACF,CAAC;QACF,CAAC;QAED,oGAAoG;QACpG,4DAA4D;QAC5D,KAAK,MAAM,cAAc,IAAI,IAAI,CAAC,0BAA0B,CAAC,IAAI,EAAE,EAAE,CAAC;YACrE,IAAI,cAAc,GAAG,IAAI,CAAC,2BAA2B,EAAE,CAAC;gBACvD,IAAI,CAAC,0BAA0B,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YACxD,CAAC;QACF,CAAC;QAED,IAAI,CAAC,kBAAkB,GAAG,0BAA0B,CAAC,YAAY,CAAC;QAClE,MAAM,WAAW,GAAG,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC,cAAc,EAAE,CAAC;QACxF,IAAI,CAAC,wBAAwB,GAAG,SAAS,CAAC;QAC1C,OAAO,WAAW,CAAC;IACpB,CAAC;IAED;;OAEG;IACI,sBAAsB,CAC5B,WAA6B,EAC7B,YAAuD;QAEvD,MAAM,mBAAmB,GAAG,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,WAAW,EAAE,CAAC,CAAC;QACvE,MAAM,CAAC,mBAAmB,KAAK,SAAS,EAAE,qCAAqC,CAAC,CAAC;QACjF,MAAM,KAAK,GAAG,YAAY,CAAC,mBAAmB,CAAC,eAAe,CAAC,CAAC;QAEhE,8EAA8E;QAC9E,gFAAgF;QAChF,sFAAsF;QACtF,KAAK,CAAC,cAAc,EAAE,CAAC;QACvB,mGAAmG;QACnG,mGAAmG;QACnG,yCAAyC;QACzC,cAAc,CAAC,IAAI,CAAC,0BAA0B,EAAE,IAAI,CAAC,qBAAqB,EAAE,KAAK,EAAE;YAClF,WAAW;YACX,WAAW,EAAE,mBAAmB,CAAC,WAAW;SAC5C,CAAC,CAAC;QACH,OAAO,KAAK,CAAC;IACd,CAAC;CACD","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n */\n\nimport { assert } from \"@fluidframework/core-utils/internal\";\nimport type {\n\tIExperimentalIncrementalSummaryContext,\n\tISummaryTreeWithStats,\n} from \"@fluidframework/runtime-definitions/internal\";\nimport { SummaryTreeBuilder } from \"@fluidframework/runtime-utils/internal\";\nimport {\n\tbrand,\n\tsetInNestedMap,\n\ttryGetFromNestedMap,\n\ttype JsonCompatible,\n\ttype NestedMap,\n} from \"../../util/index.js\";\nimport type {\n\tChunkReferenceId,\n\tEncodedFieldBatch,\n\tIncrementalEncoderDecoder,\n\tIncrementalEncodingPolicy,\n\tTreeChunk,\n} from \"../chunked-forest/index.js\";\nimport type { ITreeCursorSynchronous } from \"../../core/index.js\";\nimport { SummaryType } from \"@fluidframework/driver-definitions\";\nimport type { IChannelStorageService } from \"@fluidframework/datastore-definitions/internal\";\nimport type { ISnapshotTree } from \"@fluidframework/driver-definitions/internal\";\nimport { LoggingError } from \"@fluidframework/telemetry-utils/internal\";\nimport type { IFluidHandle } from \"@fluidframework/core-interfaces\";\nimport type { SummaryElementStringifier } from \"../../shared-tree-core/index.js\";\n\n/**\n * The key for the blob under ForestSummarizer's root.\n * This blob contains the ForestCodec's output.\n * See {@link ForestIncrementalSummaryBuilder} for details on the summary structure.\n */\nexport const forestSummaryContentKey = \"ForestTree\";\n\n/**\n * The contents of an incremental chunk is under a summary tree node with its {@link ChunkReferenceId} as the key.\n * The inline portion of the chunk content is encoded with the forest codec is stored in a blob with this key.\n * The rest of the chunk contents is stored in the summary tree under the summary tree node.\n * See the summary format in {@link ForestIncrementalSummaryBuilder} for more details.\n */\nconst chunkContentsBlobKey = \"contents\";\n\n/**\n * State that tells whether a summary is currently being tracked.\n */\nexport const ForestSummaryTrackingState = {\n\t/** A summary is currently being tracked. */\n\tTracking: \"Tracking\",\n\t/** A summary is ready to be tracked. */\n\tReadyToTrack: \"ReadyToTrack\",\n} as const;\nexport type ForestSummaryTrackingState =\n\t(typeof ForestSummaryTrackingState)[keyof typeof ForestSummaryTrackingState];\n\n/**\n * The properties of a chunk tracked during the loading process.\n * These are used to identify a chunk when it is decoded and recreate the tracking state\n * as it was when the summary that the client is loading from was generated.\n *\n * An encoded chunk, paired with a location it can be reused / reloaded from.\n * @remarks\n * This identifies a location in a specific summary where `encodedContents` was loaded from.\n *\n * When summarizing, Fluid always ensures the summary that the summary client is allowed to reuse content from\n * is the one it loaded from, so tracking this on load is sufficient for now:\n * there is no need to track the equivalent data when summarizing.\n */\ninterface ChunkLoadProperties {\n\t/**\n\t * The encoded contents of the chunk.\n\t */\n\treadonly encodedContents: EncodedFieldBatch;\n\t/**\n\t * The path for this chunk's contents in the summary tree relative to the forest's summary tree.\n\t * This path is used to generate a summary handle for the chunk if it doesn't change between summaries.\n\t */\n\treadonly summaryPath: string;\n}\n\n/**\n * The properties of a chunk that is tracked for every summary.\n * If a chunk doesn't change between summaries,\n * these properties will be used to generate a summary handle for the chunk.\n */\ninterface ChunkSummaryProperties {\n\t/**\n\t * The reference ID of the chunk which uniquely identifies it under its parent's summary tree.\n\t * The summary for this chunk will be stored against this reference ID as key in the summary tree.\n\t */\n\treadonly referenceId: ChunkReferenceId;\n\t/**\n\t * The path for this chunk's summary in the summary tree relative to the forest's summary tree.\n\t * This path is used to generate a summary handle for the chunk if it doesn't change between summaries.\n\t */\n\treadonly summaryPath: string;\n}\n\n/**\n * The properties of a summary being tracked.\n */\ninterface TrackedSummaryProperties {\n\t/**\n\t * The sequence number of the summary in progress.\n\t */\n\treadonly summarySequenceNumber: number;\n\t/**\n\t * The base path for the latest summary that was successful.\n\t * This is used to generate summary handles.\n\t */\n\treadonly latestSummaryBasePath: string;\n\t/**\n\t * Whether the summary being tracked is a full tree summary.\n\t * If true, the summary will not contain any summary handles. All chunks must be summarized in full.\n\t */\n\treadonly fullTree: boolean;\n\t/**\n\t * Represents the path of a chunk in the summary tree relative to the forest's summary tree.\n\t * Each item in the array is the {@link ChunkReferenceId} of a chunk in the summary tree starting\n\t * from the chunk under forest summary tree.\n\t * When a chunk is summarized, this array will be used to generate the path for the chunk's summary in the\n\t * summary tree.\n\t */\n\treadonly chunkSummaryPath: ChunkReferenceId[];\n\t/**\n\t * The parent summary builder to use to build the incremental summary tree.\n\t * When a chunk is being summarized, it will add its summary to this builder against its reference ID.\n\t */\n\tparentSummaryBuilder: SummaryTreeBuilder;\n\t/**\n\t * Serializes content (including {@link (IFluidHandle:interface)}s) for adding to a summary blob.\n\t */\n\tstringify: SummaryElementStringifier;\n}\n\n/**\n * The behavior of the forest's incremental summary - whether the summary should be a single blob or incremental.\n */\nexport enum ForestIncrementalSummaryBehavior {\n\t/**\n\t * The forest can encode chunks incrementally, i.e., chunks that support incremental encoding will be encoded\n\t * separately - they will be added to a separate tree.\n\t * The incremental summary format is described in {@link ForestIncrementalSummaryBuilder}.\n\t */\n\tIncremental,\n\t/**\n\t * The forest should encode all of it's data in a single summary blob.\n\t * @remarks\n\t * The format of the summary will be the same as the old format (pre-incremental summaries) and is fully\n\t * backwards compatible with the old format. The summary will basically look like an incremental summary\n\t * with no incremental fields - it will only contain the \"ForestTree\" blob in the summary format described\n\t * in {@link ForestIncrementalSummaryBuilder}.\n\t */\n\tSingleBlob,\n}\n\n/**\n * Validates that a summary is currently being tracked and that the tracked summary properties are defined.\n * @param forestSummaryState - The current state of the forest summary tracking.\n * @param trackedSummaryProperties - The properties of the tracked summary, which must be available.\n */\nfunction validateTrackingSummary(\n\tforestSummaryState: ForestSummaryTrackingState,\n\ttrackedSummaryProperties: TrackedSummaryProperties | undefined,\n): asserts trackedSummaryProperties is TrackedSummaryProperties {\n\tassert(\n\t\tforestSummaryState === ForestSummaryTrackingState.Tracking,\n\t\t0xc22 /* Not tracking a summary */,\n\t);\n\tassert(\n\t\ttrackedSummaryProperties !== undefined,\n\t\t0xc23 /* Tracked summary properties must be available when tracking a summary */,\n\t);\n}\n\n/**\n * Validates that a summary is ready to be tracked and that the tracked summary properties are undefined.\n * @param forestSummaryState - The current state of the forest summary tracking.\n * @param trackedSummaryProperties - The properties of the tracked summary, which must be undefined.\n */\nfunction validateReadyToTrackSummary(\n\tforestSummaryState: ForestSummaryTrackingState,\n\ttrackedSummaryProperties: TrackedSummaryProperties | undefined,\n): asserts trackedSummaryProperties is undefined {\n\tassert(\n\t\tforestSummaryState === ForestSummaryTrackingState.ReadyToTrack,\n\t\t0xc24 /* Already tracking a summary */,\n\t);\n\tassert(\n\t\ttrackedSummaryProperties === undefined,\n\t\t0xc25 /* Tracked summary properties must not be available when ready to track */,\n\t);\n}\n\n/* eslint-disable jsdoc/check-indentation */\n/**\n * Tracks and builds the incremental summary tree for a forest where chunks that support incremental encoding are\n * stored in a separate tree in the summary under its {@link ChunkReferenceId}.\n * The summary tree for a chunk is self-sufficient and can be independently loaded and used to reconstruct the\n * chunk's contents without any additional context from its parent.\n *\n * An example summary tree with incremental summary:\n * Forest\n * ├── ForestTree\n * ├── 0\n * | ├── contents\n * | ├── 1\n * | | ├── contents\n * | | ├── 2\n * | | | ├── contents\n * | ├── 3 - \".../Forest/ForestTree/0/1/3\"\n * ├── 4\n * | ├── contents\n * | ├── ...\n * ├── 5 - \"/.../Forest/ForestTree/5\"\n * - Forest is a summary tree node added by the shared tree and contains the following:\n * - The inline portion of the top-level forest content is stored in a summary blob called \"ForestTree\".\n * It also contains the {@link ChunkReferenceId}s of the incremental chunks under it.\n * - The summary for each incremental chunk under it is stored against its {@link ChunkReferenceId}.\n * - For each chunk, the structure of the summary tree is the same as the Forest. It contains the following:\n * - The inline portion of the chunk content is stored in a blob called \"contents\".\n * It also contains the {@link ChunkReferenceId}s of the incremental chunks under it.\n * - The summary for each incremental chunk under it is stored against its {@link ChunkReferenceId}.\n * - Chunks that do not change between summaries are summarized as handles in the summary tree.\n * @remarks\n * It may seem inconsistent that although the structure for the top-level forest tree is similar to that of\n * an incremental chunk, its content is stored in a summary blob called \"ForestTree\" while the content for\n * the incremental chunks are stored in a summary blob called \"contents\".\n * This is to keep this summary backwards compatible with old format (before incremental summaries were added)\n * where the entire forest content was in a summary blob called \"ForestTree\". So, if incremental summaries were\n * disabled, the forest content will be fully backwards compatible.\n * Note that this limits reusing the root node in a location other than root and a non-root node in the root.\n * We could phase this out by switching to write the top-level contents under \"contents\" if we want to support\n * the above. However, there is no plan to do that for now.\n *\n * TODO: AB#46752\n * Add strong types for the summary structure to document it better. It will help make it super clear what the actual\n * format is in a way that can easily be linked to, documented and inspected.\n */\n/* eslint-enable jsdoc/check-indentation */\nexport class ForestIncrementalSummaryBuilder implements IncrementalEncoderDecoder {\n\t/**\n\t * The next reference ID to use for a chunk.\n\t */\n\tprivate nextReferenceId: ChunkReferenceId = brand(0);\n\n\t/**\n\t * For a given summary sequence number, keeps track of a chunk's properties that will be used to generate\n\t * a summary handle for the chunk if it does not change between summaries.\n\t */\n\tprivate readonly chunkTrackingPropertiesMap: NestedMap<\n\t\tnumber,\n\t\tTreeChunk,\n\t\tChunkSummaryProperties\n\t> = new Map();\n\n\t/**\n\t * The state indicating whether a summary is currently being tracked or not.\n\t */\n\tpublic forestSummaryState: ForestSummaryTrackingState =\n\t\tForestSummaryTrackingState.ReadyToTrack;\n\n\t/**\n\t * The sequence number of the latest summary that was successful.\n\t */\n\tprivate latestSummarySequenceNumber: number = -1;\n\n\t/**\n\t * The current state of the summary being tracked.\n\t * This is undefined if no summary is currently being tracked.\n\t */\n\tprivate trackedSummaryProperties: TrackedSummaryProperties | undefined;\n\n\t/**\n\t * A map of chunk reference IDs to their encoded contents. This is typically used during the loading of the\n\t * forest to retrieve the contents of the chunks that were summarized incrementally.\n\t */\n\t/**\n\t * A map of chunk reference IDs to their {@link ChunkLoadProperties}.\n\t * This is used during the loading of the forest to track each chunk that is retrieved and decoded.\n\t */\n\tprivate readonly loadedChunksMap: Map<string, ChunkLoadProperties> = new Map();\n\n\tpublic constructor(\n\t\tprivate readonly enableIncrementalSummary: boolean,\n\t\tprivate readonly getChunkAtCursor: (cursor: ITreeCursorSynchronous) => TreeChunk[],\n\t\tpublic readonly shouldEncodeIncrementally: IncrementalEncodingPolicy,\n\t\tprivate readonly initialSequenceNumber: number,\n\t) {}\n\n\t/**\n\t * Must be called when the forest is loaded to download the encoded contents of incremental chunks.\n\t * @param services - The channel storage service to use to access the snapshot tree and download the\n\t * contents of the chunks.\n\t * @param readAndParse - A function that reads and parses a blob from the storage service.\n\t */\n\tpublic async load(\n\t\tservices: IChannelStorageService,\n\t\treadAndParseChunk: <T extends JsonCompatible<IFluidHandle>>(id: string) => Promise<T>,\n\t): Promise<void> {\n\t\tconst forestTree = services.getSnapshotTree?.();\n\t\t// Snapshot tree should be available when loading forest's contents. However, it is an optional function\n\t\t// and may not be implemented by the storage service.\n\t\tif (forestTree === undefined) {\n\t\t\treturn;\n\t\t}\n\n\t\t// Downloads the contents of incremental chunks in the given snapshot tree. Also, recursively downloads\n\t\t// the contents of incremental chunks in any sub-trees.\n\t\tconst downloadChunkContentsInTree = async (\n\t\t\tsnapshotTree: ISnapshotTree,\n\t\t\tparentTreeKey: string,\n\t\t): Promise<void> => {\n\t\t\t// All trees in the snapshot tree are for incremental chunks. The key is the chunk's reference ID\n\t\t\t// and the value is the snapshot tree for the chunk.\n\t\t\tfor (const [chunkReferenceId, chunkSnapshotTree] of Object.entries(snapshotTree.trees)) {\n\t\t\t\tconst chunkSubTreePath = `${parentTreeKey}${chunkReferenceId}`;\n\t\t\t\tconst chunkContentsPath = `${chunkSubTreePath}/${chunkContentsBlobKey}`;\n\t\t\t\tif (!(await services.contains(chunkContentsPath))) {\n\t\t\t\t\tthrow new LoggingError(\n\t\t\t\t\t\t`SharedTree: Cannot find contents for incremental chunk ${chunkContentsPath}`,\n\t\t\t\t\t);\n\t\t\t\t}\n\t\t\t\tconst chunkContents = await readAndParseChunk<EncodedFieldBatch>(chunkContentsPath);\n\t\t\t\tthis.loadedChunksMap.set(chunkReferenceId, {\n\t\t\t\t\tencodedContents: chunkContents,\n\t\t\t\t\tsummaryPath: chunkSubTreePath,\n\t\t\t\t});\n\n\t\t\t\tconst chunkReferenceIdNumber = Number(chunkReferenceId);\n\t\t\t\tthis.nextReferenceId = brand(\n\t\t\t\t\tMath.max(this.nextReferenceId, chunkReferenceIdNumber + 1),\n\t\t\t\t);\n\n\t\t\t\t// Recursively download the contents of chunks in this chunk's sub tree.\n\t\t\t\tawait downloadChunkContentsInTree(chunkSnapshotTree, `${chunkSubTreePath}/`);\n\t\t\t}\n\t\t};\n\t\tawait downloadChunkContentsInTree(forestTree, \"\");\n\t}\n\n\t/**\n\t * Must be called when starting a new forest summary to track it.\n\t * @param fullTree - Whether the summary is a full tree summary. If true, the summary will not contain\n\t * any summary handles. All chunks must be summarized in full.\n\t * @param incrementalSummaryContext - The context for the incremental summary that contains the sequence numbers\n\t * for the current and latest summaries.\n\t * @param stringify - Serializes content (including {@link (IFluidHandle:interface)}s) for adding to a summary blob.\n\t * @returns the behavior of the forest's incremental summary.\n\t */\n\tpublic startSummary(args: {\n\t\tfullTree: boolean;\n\t\tincrementalSummaryContext: IExperimentalIncrementalSummaryContext | undefined;\n\t\tstringify: SummaryElementStringifier;\n\t}): ForestIncrementalSummaryBehavior {\n\t\tconst { fullTree, incrementalSummaryContext, stringify } = args;\n\t\t// If there is no incremental summary context, do not summarize incrementally. This happens in two scenarios:\n\t\t// 1. When summarizing a detached container, i.e., the first ever summary.\n\t\t// 2. When running GC, the default behavior is to call summarize on DDS without incrementalSummaryContext.\n\t\tif (!this.enableIncrementalSummary || incrementalSummaryContext === undefined) {\n\t\t\treturn ForestIncrementalSummaryBehavior.SingleBlob;\n\t\t}\n\n\t\tvalidateReadyToTrackSummary(this.forestSummaryState, this.trackedSummaryProperties);\n\n\t\tthis.forestSummaryState = ForestSummaryTrackingState.Tracking;\n\t\tthis.latestSummarySequenceNumber = incrementalSummaryContext.latestSummarySequenceNumber;\n\t\tthis.trackedSummaryProperties = {\n\t\t\tsummarySequenceNumber: incrementalSummaryContext.summarySequenceNumber,\n\t\t\tlatestSummaryBasePath: incrementalSummaryContext.summaryPath,\n\t\t\tchunkSummaryPath: [],\n\t\t\tparentSummaryBuilder: new SummaryTreeBuilder(),\n\t\t\tfullTree,\n\t\t\tstringify,\n\t\t};\n\t\treturn ForestIncrementalSummaryBehavior.Incremental;\n\t}\n\n\t/**\n\t * {@link IncrementalEncoder.encodeIncrementalField}\n\t * @remarks Returns an empty array if the field has no content.\n\t */\n\tpublic encodeIncrementalField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tchunkEncoder: (chunk: TreeChunk) => EncodedFieldBatch,\n\t): ChunkReferenceId[] {\n\t\t// Validate that a summary is currently being tracked and that the tracked summary properties are defined.\n\t\tvalidateTrackingSummary(this.forestSummaryState, this.trackedSummaryProperties);\n\n\t\tconst chunkReferenceIds: ChunkReferenceId[] = [];\n\t\tconst chunks = this.getChunkAtCursor(cursor);\n\t\tfor (const chunk of chunks) {\n\t\t\tlet chunkProperties: ChunkSummaryProperties;\n\n\t\t\t// Try and get the properties of the chunk from the latest successful summary.\n\t\t\t// If it exists and the summary is not a full tree, use the properties to generate a summary handle.\n\t\t\t// If it does not exist, encode the chunk and generate new properties for it.\n\t\t\tconst previousChunkProperties = tryGetFromNestedMap(\n\t\t\t\tthis.chunkTrackingPropertiesMap,\n\t\t\t\tthis.latestSummarySequenceNumber,\n\t\t\t\tchunk,\n\t\t\t);\n\t\t\tif (previousChunkProperties !== undefined && !this.trackedSummaryProperties.fullTree) {\n\t\t\t\tchunkProperties = previousChunkProperties;\n\t\t\t\tthis.trackedSummaryProperties.parentSummaryBuilder.addHandle(\n\t\t\t\t\t`${chunkProperties.referenceId}`,\n\t\t\t\t\tSummaryType.Tree,\n\t\t\t\t\t`${this.trackedSummaryProperties.latestSummaryBasePath}/${chunkProperties.summaryPath}`,\n\t\t\t\t);\n\t\t\t} else {\n\t\t\t\t// Generate a new reference ID for the chunk.\n\t\t\t\tconst newReferenceId: ChunkReferenceId = brand(this.nextReferenceId++);\n\n\t\t\t\t// Add the reference ID of this chunk to the chunk summary path and use the path as the summary path\n\t\t\t\t// for the chunk in its summary properties.\n\t\t\t\t// This is done before encoding the chunk so that the summary path is updated correctly when encoding\n\t\t\t\t// any incremental chunks that are under this chunk.\n\t\t\t\tthis.trackedSummaryProperties.chunkSummaryPath.push(newReferenceId);\n\n\t\t\t\tchunkProperties = {\n\t\t\t\t\treferenceId: newReferenceId,\n\t\t\t\t\tsummaryPath: this.trackedSummaryProperties.chunkSummaryPath.join(\"/\"),\n\t\t\t\t};\n\n\t\t\t\tconst parentSummaryBuilder = this.trackedSummaryProperties.parentSummaryBuilder;\n\t\t\t\t// Create a new summary builder for this chunk to build its summary tree which will be stored in the\n\t\t\t\t// parent's summary tree under its reference ID.\n\t\t\t\t// Before encoding the chunk, set the parent summary builder to this chunk's summary builder so that\n\t\t\t\t// any incremental chunks in the subtree of this chunk will use that as their parent summary builder.\n\t\t\t\tconst chunkSummaryBuilder = new SummaryTreeBuilder();\n\t\t\t\tthis.trackedSummaryProperties.parentSummaryBuilder = chunkSummaryBuilder;\n\t\t\t\tchunkSummaryBuilder.addBlob(\n\t\t\t\t\tchunkContentsBlobKey,\n\t\t\t\t\tthis.trackedSummaryProperties.stringify(chunkEncoder(chunk)),\n\t\t\t\t);\n\n\t\t\t\t// Add this chunk's summary tree to the parent's summary tree. The summary tree contains its encoded\n\t\t\t\t// contents and the summary trees of any incremental chunks under it.\n\t\t\t\tparentSummaryBuilder.addWithStats(\n\t\t\t\t\t`${newReferenceId}`,\n\t\t\t\t\tchunkSummaryBuilder.getSummaryTree(),\n\t\t\t\t);\n\n\t\t\t\t// Restore the parent summary builder and chunk summary path.\n\t\t\t\tthis.trackedSummaryProperties.parentSummaryBuilder = parentSummaryBuilder;\n\t\t\t\tthis.trackedSummaryProperties.chunkSummaryPath.pop();\n\t\t\t}\n\n\t\t\tsetInNestedMap(\n\t\t\t\tthis.chunkTrackingPropertiesMap,\n\t\t\t\tthis.trackedSummaryProperties.summarySequenceNumber,\n\t\t\t\tchunk,\n\t\t\t\tchunkProperties,\n\t\t\t);\n\t\t\tchunkReferenceIds.push(chunkProperties.referenceId);\n\t\t}\n\t\treturn chunkReferenceIds;\n\t}\n\n\t/**\n\t * Must be called after summary generation is complete to finish tracking the summary.\n\t * It clears any tracking state and deletes the tracking properties for summaries that are older than the\n\t * latest successful summary.\n\t * @param incrementalSummaryContext - The context for the incremental summary that contains the sequence numbers.\n\t * If this is undefined, the summary tree will only contain a summary blob for `forestSummaryContent`.\n\t * @param forestSummaryContent - The stringified ForestCodec output of top-level Forest content.\n\t * @returns the Forest's summary tree.\n\t */\n\tpublic completeSummary(args: {\n\t\tincrementalSummaryContext: IExperimentalIncrementalSummaryContext | undefined;\n\t\tforestSummaryContent: string;\n\t}): ISummaryTreeWithStats {\n\t\tconst { incrementalSummaryContext, forestSummaryContent } = args;\n\t\tif (!this.enableIncrementalSummary || incrementalSummaryContext === undefined) {\n\t\t\tconst summaryBuilder = new SummaryTreeBuilder();\n\t\t\tsummaryBuilder.addBlob(forestSummaryContentKey, forestSummaryContent);\n\t\t\treturn summaryBuilder.getSummaryTree();\n\t\t}\n\n\t\tvalidateTrackingSummary(this.forestSummaryState, this.trackedSummaryProperties);\n\n\t\tthis.trackedSummaryProperties.parentSummaryBuilder.addBlob(\n\t\t\tforestSummaryContentKey,\n\t\t\tforestSummaryContent,\n\t\t);\n\n\t\t// Copy over the entries from the latest summary to the current summary.\n\t\t// In the current summary, there can be fields that haven't changed since the latest summary and the chunks\n\t\t// in these fields and in any of its children weren't encoded. So, we need get the entries for these chunks\n\t\t// to be able to incrementally summarize them in the next summary.\n\t\tconst latestSummaryTrackingMap = this.chunkTrackingPropertiesMap.get(\n\t\t\tthis.latestSummarySequenceNumber,\n\t\t);\n\t\tconst currentSummaryTrackingMap = this.chunkTrackingPropertiesMap.get(\n\t\t\tthis.trackedSummaryProperties.summarySequenceNumber,\n\t\t);\n\t\tif (latestSummaryTrackingMap !== undefined && currentSummaryTrackingMap !== undefined) {\n\t\t\tfor (const [chunk, chunkProperties] of latestSummaryTrackingMap.entries()) {\n\t\t\t\tif (!currentSummaryTrackingMap.has(chunk)) {\n\t\t\t\t\tcurrentSummaryTrackingMap.set(chunk, chunkProperties);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t// Delete tracking for summaries that are older than the latest successful summary because they will\n\t\t// never be referenced again for generating summary handles.\n\t\tfor (const sequenceNumber of this.chunkTrackingPropertiesMap.keys()) {\n\t\t\tif (sequenceNumber < this.latestSummarySequenceNumber) {\n\t\t\t\tthis.chunkTrackingPropertiesMap.delete(sequenceNumber);\n\t\t\t}\n\t\t}\n\n\t\tthis.forestSummaryState = ForestSummaryTrackingState.ReadyToTrack;\n\t\tconst summaryTree = this.trackedSummaryProperties.parentSummaryBuilder.getSummaryTree();\n\t\tthis.trackedSummaryProperties = undefined;\n\t\treturn summaryTree;\n\t}\n\n\t/**\n\t * {@link IncrementalEncoder.decodeIncrementalChunk}\n\t */\n\tpublic decodeIncrementalChunk(\n\t\treferenceId: ChunkReferenceId,\n\t\tchunkDecoder: (encoded: EncodedFieldBatch) => TreeChunk,\n\t): TreeChunk {\n\t\tconst ChunkLoadProperties = this.loadedChunksMap.get(`${referenceId}`);\n\t\tassert(ChunkLoadProperties !== undefined, \"Encoded incremental chunk not found\");\n\t\tconst chunk = chunkDecoder(ChunkLoadProperties.encodedContents);\n\n\t\t// Account for the reference about to be added in `chunkTrackingPropertiesMap`\n\t\t// to ensure that no other users of this chunk think they have unique ownership.\n\t\t// This prevents prevent whoever this chunk is returned to from modifying it in-place.\n\t\tchunk.referenceAdded();\n\t\t// Track the decoded chunk. This will recreate the tracking state when the summary that this client\n\t\t// is loaded from was generated. This is needed to ensure that incremental summaries work correctly\n\t\t// when a new client starts to summarize.\n\t\tsetInNestedMap(this.chunkTrackingPropertiesMap, this.initialSequenceNumber, chunk, {\n\t\t\treferenceId,\n\t\t\tsummaryPath: ChunkLoadProperties.summaryPath,\n\t\t});\n\t\treturn chunk;\n\t}\n}\n"]}
|
|
1
|
+
{"version":3,"file":"incrementalSummaryBuilder.js","sourceRoot":"","sources":["../../../src/feature-libraries/forest-summary/incrementalSummaryBuilder.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAE,MAAM,EAAE,MAAM,qCAAqC,CAAC;AAK7D,OAAO,EAAE,kBAAkB,EAAE,MAAM,wCAAwC,CAAC;AAC5E,OAAO,EACN,KAAK,EACL,cAAc,EACd,mBAAmB,GAGnB,MAAM,qBAAqB,CAAC;AAS7B,OAAO,EAAE,WAAW,EAAE,MAAM,oCAAoC,CAAC;AAGjE,OAAO,EAAE,YAAY,EAAE,MAAM,0CAA0C,CAAC;AAIxE;;;;GAIG;AACH,MAAM,CAAC,MAAM,uBAAuB,GAAG,YAAY,CAAC;AAEpD;;;;;GAKG;AACH,MAAM,oBAAoB,GAAG,UAAU,CAAC;AAExC;;GAEG;AACH,MAAM,CAAC,MAAM,0BAA0B,GAAG;IACzC,4CAA4C;IAC5C,QAAQ,EAAE,UAAU;IACpB,wCAAwC;IACxC,YAAY,EAAE,cAAc;CACnB,CAAC;AAoFX;;GAEG;AACH,MAAM,CAAN,IAAY,gCAgBX;AAhBD,WAAY,gCAAgC;IAC3C;;;;OAIG;IACH,qGAAW,CAAA;IACX;;;;;;;OAOG;IACH,mGAAU,CAAA;AACX,CAAC,EAhBW,gCAAgC,KAAhC,gCAAgC,QAgB3C;AAED;;;;GAIG;AACH,SAAS,uBAAuB,CAC/B,kBAA8C,EAC9C,wBAA8D;IAE9D,MAAM,CACL,kBAAkB,KAAK,0BAA0B,CAAC,QAAQ,EAC1D,KAAK,CAAC,4BAA4B,CAClC,CAAC;IACF,MAAM,CACL,wBAAwB,KAAK,SAAS,EACtC,KAAK,CAAC,0EAA0E,CAChF,CAAC;AACH,CAAC;AAED;;;;GAIG;AACH,SAAS,2BAA2B,CACnC,kBAA8C,EAC9C,wBAA8D;IAE9D,MAAM,CACL,kBAAkB,KAAK,0BAA0B,CAAC,YAAY,EAC9D,KAAK,CAAC,gCAAgC,CACtC,CAAC;IACF,MAAM,CACL,wBAAwB,KAAK,SAAS,EACtC,KAAK,CAAC,0EAA0E,CAChF,CAAC;AACH,CAAC;AAED,4CAA4C;AAC5C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2CG;AACH,2CAA2C;AAC3C,MAAM,OAAO,+BAA+B;IA2C3C,YACkB,wBAAiC,EACjC,gBAAiE,EAClE,yBAAoD,EACnD,qBAA6B;QAH7B,6BAAwB,GAAxB,wBAAwB,CAAS;QACjC,qBAAgB,GAAhB,gBAAgB,CAAiD;QAClE,8BAAyB,GAAzB,yBAAyB,CAA2B;QACnD,0BAAqB,GAArB,qBAAqB,CAAQ;QA9C/C;;WAEG;QACK,oBAAe,GAAqB,KAAK,CAAC,CAAC,CAAC,CAAC;QAErD;;;WAGG;QACc,+BAA0B,GAIvC,IAAI,GAAG,EAAE,CAAC;QAEd;;WAEG;QACI,uBAAkB,GACxB,0BAA0B,CAAC,YAAY,CAAC;QAEzC;;WAEG;QACK,gCAA2B,GAAW,CAAC,CAAC,CAAC;QAQjD;;;WAGG;QACH;;;WAGG;QACc,oBAAe,GAAqC,IAAI,GAAG,EAAE,CAAC;IAO5E,CAAC;IAEJ;;;;;OAKG;IACI,KAAK,CAAC,IAAI,CAChB,QAAgC,EAChC,iBAAqF;QAErF,MAAM,UAAU,GAAG,QAAQ,CAAC,eAAe,EAAE,EAAE,CAAC;QAChD,wGAAwG;QACxG,qDAAqD;QACrD,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;YAC9B,OAAO;QACR,CAAC;QAED,uGAAuG;QACvG,uDAAuD;QACvD,MAAM,2BAA2B,GAAG,KAAK,EACxC,YAA2B,EAC3B,aAAqB,EACL,EAAE;YAClB,iGAAiG;YACjG,oDAAoD;YACpD,KAAK,MAAM,CAAC,gBAAgB,EAAE,iBAAiB,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,KAAK,CAAC,EAAE,CAAC;gBACxF,MAAM,gBAAgB,GAAG,GAAG,aAAa,GAAG,gBAAgB,EAAE,CAAC;gBAC/D,MAAM,iBAAiB,GAAG,GAAG,gBAAgB,IAAI,oBAAoB,EAAE,CAAC;gBACxE,IAAI,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,EAAE,CAAC;oBACnD,MAAM,IAAI,YAAY,CACrB,0DAA0D,iBAAiB,EAAE,CAC7E,CAAC;gBACH,CAAC;gBACD,MAAM,aAAa,GAAG,MAAM,iBAAiB,CAAoB,iBAAiB,CAAC,CAAC;gBACpF,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,gBAAgB,EAAE;oBAC1C,eAAe,EAAE,aAAa;oBAC9B,WAAW,EAAE,gBAAgB;iBAC7B,CAAC,CAAC;gBAEH,MAAM,sBAAsB,GAAG,MAAM,CAAC,gBAAgB,CAAC,CAAC;gBACxD,IAAI,CAAC,eAAe,GAAG,KAAK,CAC3B,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,eAAe,EAAE,sBAAsB,GAAG,CAAC,CAAC,CAC1D,CAAC;gBAEF,wEAAwE;gBACxE,MAAM,2BAA2B,CAAC,iBAAiB,EAAE,GAAG,gBAAgB,GAAG,CAAC,CAAC;YAC9E,CAAC;QACF,CAAC,CAAC;QACF,MAAM,2BAA2B,CAAC,UAAU,EAAE,EAAE,CAAC,CAAC;IACnD,CAAC;IAED;;;;;;;;OAQG;IACI,YAAY,CAAC,IAInB;QACA,MAAM,EAAE,QAAQ,EAAE,yBAAyB,EAAE,SAAS,EAAE,GAAG,IAAI,CAAC;QAChE,6GAA6G;QAC7G,0EAA0E;QAC1E,0GAA0G;QAC1G,IAAI,CAAC,IAAI,CAAC,wBAAwB,IAAI,yBAAyB,KAAK,SAAS,EAAE,CAAC;YAC/E,OAAO,gCAAgC,CAAC,UAAU,CAAC;QACpD,CAAC;QAED,2BAA2B,CAAC,IAAI,CAAC,kBAAkB,EAAE,IAAI,CAAC,wBAAwB,CAAC,CAAC;QAEpF,IAAI,CAAC,kBAAkB,GAAG,0BAA0B,CAAC,QAAQ,CAAC;QAC9D,IAAI,CAAC,2BAA2B,GAAG,yBAAyB,CAAC,2BAA2B,CAAC;QACzF,IAAI,CAAC,wBAAwB,GAAG;YAC/B,qBAAqB,EAAE,yBAAyB,CAAC,qBAAqB;YACtE,qBAAqB,EAAE,yBAAyB,CAAC,WAAW;YAC5D,gBAAgB,EAAE,EAAE;YACpB,oBAAoB,EAAE,IAAI,kBAAkB,EAAE;YAC9C,QAAQ;YACR,SAAS;SACT,CAAC;QACF,OAAO,gCAAgC,CAAC,WAAW,CAAC;IACrD,CAAC;IAED;;;OAGG;IACI,sBAAsB,CAC5B,MAA8B,EAC9B,YAAqD;QAErD,0GAA0G;QAC1G,uBAAuB,CAAC,IAAI,CAAC,kBAAkB,EAAE,IAAI,CAAC,wBAAwB,CAAC,CAAC;QAEhF,MAAM,iBAAiB,GAAuB,EAAE,CAAC;QACjD,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;QAC7C,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;YAC5B,IAAI,eAAuC,CAAC;YAE5C,8EAA8E;YAC9E,oGAAoG;YACpG,6EAA6E;YAC7E,MAAM,uBAAuB,GAAG,mBAAmB,CAClD,IAAI,CAAC,0BAA0B,EAC/B,IAAI,CAAC,2BAA2B,EAChC,KAAK,CACL,CAAC;YACF,IAAI,uBAAuB,KAAK,SAAS,IAAI,CAAC,IAAI,CAAC,wBAAwB,CAAC,QAAQ,EAAE,CAAC;gBACtF,eAAe,GAAG,uBAAuB,CAAC;gBAC1C,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC,SAAS,CAC3D,GAAG,eAAe,CAAC,WAAW,EAAE,EAChC,WAAW,CAAC,IAAI,EAChB,GAAG,IAAI,CAAC,wBAAwB,CAAC,qBAAqB,IAAI,eAAe,CAAC,WAAW,EAAE,CACvF,CAAC;YACH,CAAC;iBAAM,CAAC;gBACP,6CAA6C;gBAC7C,MAAM,cAAc,GAAqB,KAAK,CAAC,IAAI,CAAC,eAAe,EAAE,CAAC,CAAC;gBAEvE,oGAAoG;gBACpG,2CAA2C;gBAC3C,qGAAqG;gBACrG,oDAAoD;gBACpD,IAAI,CAAC,wBAAwB,CAAC,gBAAgB,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;gBAEpE,eAAe,GAAG;oBACjB,WAAW,EAAE,cAAc;oBAC3B,WAAW,EAAE,IAAI,CAAC,wBAAwB,CAAC,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC;iBACrE,CAAC;gBAEF,MAAM,oBAAoB,GAAG,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC;gBAChF,oGAAoG;gBACpG,gDAAgD;gBAChD,oGAAoG;gBACpG,qGAAqG;gBACrG,MAAM,mBAAmB,GAAG,IAAI,kBAAkB,EAAE,CAAC;gBACrD,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,GAAG,mBAAmB,CAAC;gBACzE,mBAAmB,CAAC,OAAO,CAC1B,oBAAoB,EACpB,IAAI,CAAC,wBAAwB,CAAC,SAAS,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,CAC5D,CAAC;gBAEF,oGAAoG;gBACpG,qEAAqE;gBACrE,oBAAoB,CAAC,YAAY,CAChC,GAAG,cAAc,EAAE,EACnB,mBAAmB,CAAC,cAAc,EAAE,CACpC,CAAC;gBAEF,6DAA6D;gBAC7D,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;gBAC1E,IAAI,CAAC,wBAAwB,CAAC,gBAAgB,CAAC,GAAG,EAAE,CAAC;YACtD,CAAC;YAED,cAAc,CACb,IAAI,CAAC,0BAA0B,EAC/B,IAAI,CAAC,wBAAwB,CAAC,qBAAqB,EACnD,KAAK,EACL,eAAe,CACf,CAAC;YACF,iBAAiB,CAAC,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QACrD,CAAC;QACD,OAAO,iBAAiB,CAAC;IAC1B,CAAC;IAED;;;;;;;;OAQG;IACI,eAAe,CAAC,IAGtB;QACA,MAAM,EAAE,yBAAyB,EAAE,oBAAoB,EAAE,GAAG,IAAI,CAAC;QACjE,IAAI,CAAC,IAAI,CAAC,wBAAwB,IAAI,yBAAyB,KAAK,SAAS,EAAE,CAAC;YAC/E,MAAM,cAAc,GAAG,IAAI,kBAAkB,EAAE,CAAC;YAChD,cAAc,CAAC,OAAO,CAAC,uBAAuB,EAAE,oBAAoB,CAAC,CAAC;YACtE,OAAO,cAAc,CAAC,cAAc,EAAE,CAAC;QACxC,CAAC;QAED,uBAAuB,CAAC,IAAI,CAAC,kBAAkB,EAAE,IAAI,CAAC,wBAAwB,CAAC,CAAC;QAEhF,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC,OAAO,CACzD,uBAAuB,EACvB,oBAAoB,CACpB,CAAC;QAEF,wEAAwE;QACxE,2GAA2G;QAC3G,2GAA2G;QAC3G,kEAAkE;QAClE,MAAM,wBAAwB,GAAG,IAAI,CAAC,0BAA0B,CAAC,GAAG,CACnE,IAAI,CAAC,2BAA2B,CAChC,CAAC;QACF,MAAM,yBAAyB,GAAG,IAAI,CAAC,0BAA0B,CAAC,GAAG,CACpE,IAAI,CAAC,wBAAwB,CAAC,qBAAqB,CACnD,CAAC;QACF,IAAI,wBAAwB,KAAK,SAAS,IAAI,yBAAyB,KAAK,SAAS,EAAE,CAAC;YACvF,KAAK,MAAM,CAAC,KAAK,EAAE,eAAe,CAAC,IAAI,wBAAwB,CAAC,OAAO,EAAE,EAAE,CAAC;gBAC3E,IAAI,CAAC,yBAAyB,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC;oBAC3C,yBAAyB,CAAC,GAAG,CAAC,KAAK,EAAE,eAAe,CAAC,CAAC;gBACvD,CAAC;YACF,CAAC;QACF,CAAC;QAED,oGAAoG;QACpG,4DAA4D;QAC5D,KAAK,MAAM,cAAc,IAAI,IAAI,CAAC,0BAA0B,CAAC,IAAI,EAAE,EAAE,CAAC;YACrE,IAAI,cAAc,GAAG,IAAI,CAAC,2BAA2B,EAAE,CAAC;gBACvD,IAAI,CAAC,0BAA0B,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YACxD,CAAC;QACF,CAAC;QAED,IAAI,CAAC,kBAAkB,GAAG,0BAA0B,CAAC,YAAY,CAAC;QAClE,MAAM,WAAW,GAAG,IAAI,CAAC,wBAAwB,CAAC,oBAAoB,CAAC,cAAc,EAAE,CAAC;QACxF,IAAI,CAAC,wBAAwB,GAAG,SAAS,CAAC;QAC1C,OAAO,WAAW,CAAC;IACpB,CAAC;IAED;;OAEG;IACI,sBAAsB,CAC5B,WAA6B,EAC7B,YAAuD;QAEvD,MAAM,mBAAmB,GAAG,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,WAAW,EAAE,CAAC,CAAC;QACvE,MAAM,CAAC,mBAAmB,KAAK,SAAS,EAAE,KAAK,CAAC,yCAAyC,CAAC,CAAC;QAC3F,MAAM,KAAK,GAAG,YAAY,CAAC,mBAAmB,CAAC,eAAe,CAAC,CAAC;QAEhE,8EAA8E;QAC9E,gFAAgF;QAChF,sFAAsF;QACtF,KAAK,CAAC,cAAc,EAAE,CAAC;QACvB,mGAAmG;QACnG,mGAAmG;QACnG,yCAAyC;QACzC,cAAc,CAAC,IAAI,CAAC,0BAA0B,EAAE,IAAI,CAAC,qBAAqB,EAAE,KAAK,EAAE;YAClF,WAAW;YACX,WAAW,EAAE,mBAAmB,CAAC,WAAW;SAC5C,CAAC,CAAC;QACH,OAAO,KAAK,CAAC;IACd,CAAC;CACD","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n */\n\nimport { assert } from \"@fluidframework/core-utils/internal\";\nimport type {\n\tIExperimentalIncrementalSummaryContext,\n\tISummaryTreeWithStats,\n} from \"@fluidframework/runtime-definitions/internal\";\nimport { SummaryTreeBuilder } from \"@fluidframework/runtime-utils/internal\";\nimport {\n\tbrand,\n\tsetInNestedMap,\n\ttryGetFromNestedMap,\n\ttype JsonCompatible,\n\ttype NestedMap,\n} from \"../../util/index.js\";\nimport type {\n\tChunkReferenceId,\n\tEncodedFieldBatch,\n\tIncrementalEncoderDecoder,\n\tIncrementalEncodingPolicy,\n\tTreeChunk,\n} from \"../chunked-forest/index.js\";\nimport type { ITreeCursorSynchronous } from \"../../core/index.js\";\nimport { SummaryType } from \"@fluidframework/driver-definitions\";\nimport type { IChannelStorageService } from \"@fluidframework/datastore-definitions/internal\";\nimport type { ISnapshotTree } from \"@fluidframework/driver-definitions/internal\";\nimport { LoggingError } from \"@fluidframework/telemetry-utils/internal\";\nimport type { IFluidHandle } from \"@fluidframework/core-interfaces\";\nimport type { SummaryElementStringifier } from \"../../shared-tree-core/index.js\";\n\n/**\n * The key for the blob under ForestSummarizer's root.\n * This blob contains the ForestCodec's output.\n * See {@link ForestIncrementalSummaryBuilder} for details on the summary structure.\n */\nexport const forestSummaryContentKey = \"ForestTree\";\n\n/**\n * The contents of an incremental chunk is under a summary tree node with its {@link ChunkReferenceId} as the key.\n * The inline portion of the chunk content is encoded with the forest codec is stored in a blob with this key.\n * The rest of the chunk contents is stored in the summary tree under the summary tree node.\n * See the summary format in {@link ForestIncrementalSummaryBuilder} for more details.\n */\nconst chunkContentsBlobKey = \"contents\";\n\n/**\n * State that tells whether a summary is currently being tracked.\n */\nexport const ForestSummaryTrackingState = {\n\t/** A summary is currently being tracked. */\n\tTracking: \"Tracking\",\n\t/** A summary is ready to be tracked. */\n\tReadyToTrack: \"ReadyToTrack\",\n} as const;\nexport type ForestSummaryTrackingState =\n\t(typeof ForestSummaryTrackingState)[keyof typeof ForestSummaryTrackingState];\n\n/**\n * The properties of a chunk tracked during the loading process.\n * These are used to identify a chunk when it is decoded and recreate the tracking state\n * as it was when the summary that the client is loading from was generated.\n *\n * An encoded chunk, paired with a location it can be reused / reloaded from.\n * @remarks\n * This identifies a location in a specific summary where `encodedContents` was loaded from.\n *\n * When summarizing, Fluid always ensures the summary that the summary client is allowed to reuse content from\n * is the one it loaded from, so tracking this on load is sufficient for now:\n * there is no need to track the equivalent data when summarizing.\n */\ninterface ChunkLoadProperties {\n\t/**\n\t * The encoded contents of the chunk.\n\t */\n\treadonly encodedContents: EncodedFieldBatch;\n\t/**\n\t * The path for this chunk's contents in the summary tree relative to the forest's summary tree.\n\t * This path is used to generate a summary handle for the chunk if it doesn't change between summaries.\n\t */\n\treadonly summaryPath: string;\n}\n\n/**\n * The properties of a chunk that is tracked for every summary.\n * If a chunk doesn't change between summaries,\n * these properties will be used to generate a summary handle for the chunk.\n */\ninterface ChunkSummaryProperties {\n\t/**\n\t * The reference ID of the chunk which uniquely identifies it under its parent's summary tree.\n\t * The summary for this chunk will be stored against this reference ID as key in the summary tree.\n\t */\n\treadonly referenceId: ChunkReferenceId;\n\t/**\n\t * The path for this chunk's summary in the summary tree relative to the forest's summary tree.\n\t * This path is used to generate a summary handle for the chunk if it doesn't change between summaries.\n\t */\n\treadonly summaryPath: string;\n}\n\n/**\n * The properties of a summary being tracked.\n */\ninterface TrackedSummaryProperties {\n\t/**\n\t * The sequence number of the summary in progress.\n\t */\n\treadonly summarySequenceNumber: number;\n\t/**\n\t * The base path for the latest summary that was successful.\n\t * This is used to generate summary handles.\n\t */\n\treadonly latestSummaryBasePath: string;\n\t/**\n\t * Whether the summary being tracked is a full tree summary.\n\t * If true, the summary will not contain any summary handles. All chunks must be summarized in full.\n\t */\n\treadonly fullTree: boolean;\n\t/**\n\t * Represents the path of a chunk in the summary tree relative to the forest's summary tree.\n\t * Each item in the array is the {@link ChunkReferenceId} of a chunk in the summary tree starting\n\t * from the chunk under forest summary tree.\n\t * When a chunk is summarized, this array will be used to generate the path for the chunk's summary in the\n\t * summary tree.\n\t */\n\treadonly chunkSummaryPath: ChunkReferenceId[];\n\t/**\n\t * The parent summary builder to use to build the incremental summary tree.\n\t * When a chunk is being summarized, it will add its summary to this builder against its reference ID.\n\t */\n\tparentSummaryBuilder: SummaryTreeBuilder;\n\t/**\n\t * Serializes content (including {@link (IFluidHandle:interface)}s) for adding to a summary blob.\n\t */\n\tstringify: SummaryElementStringifier;\n}\n\n/**\n * The behavior of the forest's incremental summary - whether the summary should be a single blob or incremental.\n */\nexport enum ForestIncrementalSummaryBehavior {\n\t/**\n\t * The forest can encode chunks incrementally, i.e., chunks that support incremental encoding will be encoded\n\t * separately - they will be added to a separate tree.\n\t * The incremental summary format is described in {@link ForestIncrementalSummaryBuilder}.\n\t */\n\tIncremental,\n\t/**\n\t * The forest should encode all of it's data in a single summary blob.\n\t * @remarks\n\t * The format of the summary will be the same as the old format (pre-incremental summaries) and is fully\n\t * backwards compatible with the old format. The summary will basically look like an incremental summary\n\t * with no incremental fields - it will only contain the \"ForestTree\" blob in the summary format described\n\t * in {@link ForestIncrementalSummaryBuilder}.\n\t */\n\tSingleBlob,\n}\n\n/**\n * Validates that a summary is currently being tracked and that the tracked summary properties are defined.\n * @param forestSummaryState - The current state of the forest summary tracking.\n * @param trackedSummaryProperties - The properties of the tracked summary, which must be available.\n */\nfunction validateTrackingSummary(\n\tforestSummaryState: ForestSummaryTrackingState,\n\ttrackedSummaryProperties: TrackedSummaryProperties | undefined,\n): asserts trackedSummaryProperties is TrackedSummaryProperties {\n\tassert(\n\t\tforestSummaryState === ForestSummaryTrackingState.Tracking,\n\t\t0xc22 /* Not tracking a summary */,\n\t);\n\tassert(\n\t\ttrackedSummaryProperties !== undefined,\n\t\t0xc23 /* Tracked summary properties must be available when tracking a summary */,\n\t);\n}\n\n/**\n * Validates that a summary is ready to be tracked and that the tracked summary properties are undefined.\n * @param forestSummaryState - The current state of the forest summary tracking.\n * @param trackedSummaryProperties - The properties of the tracked summary, which must be undefined.\n */\nfunction validateReadyToTrackSummary(\n\tforestSummaryState: ForestSummaryTrackingState,\n\ttrackedSummaryProperties: TrackedSummaryProperties | undefined,\n): asserts trackedSummaryProperties is undefined {\n\tassert(\n\t\tforestSummaryState === ForestSummaryTrackingState.ReadyToTrack,\n\t\t0xc24 /* Already tracking a summary */,\n\t);\n\tassert(\n\t\ttrackedSummaryProperties === undefined,\n\t\t0xc25 /* Tracked summary properties must not be available when ready to track */,\n\t);\n}\n\n/* eslint-disable jsdoc/check-indentation */\n/**\n * Tracks and builds the incremental summary tree for a forest where chunks that support incremental encoding are\n * stored in a separate tree in the summary under its {@link ChunkReferenceId}.\n * The summary tree for a chunk is self-sufficient and can be independently loaded and used to reconstruct the\n * chunk's contents without any additional context from its parent.\n *\n * An example summary tree with incremental summary:\n * Forest\n * ├── ForestTree\n * ├── 0\n * | ├── contents\n * | ├── 1\n * | | ├── contents\n * | | ├── 2\n * | | | ├── contents\n * | ├── 3 - \".../Forest/ForestTree/0/1/3\"\n * ├── 4\n * | ├── contents\n * | ├── ...\n * ├── 5 - \"/.../Forest/ForestTree/5\"\n * - Forest is a summary tree node added by the shared tree and contains the following:\n * - The inline portion of the top-level forest content is stored in a summary blob called \"ForestTree\".\n * It also contains the {@link ChunkReferenceId}s of the incremental chunks under it.\n * - The summary for each incremental chunk under it is stored against its {@link ChunkReferenceId}.\n * - For each chunk, the structure of the summary tree is the same as the Forest. It contains the following:\n * - The inline portion of the chunk content is stored in a blob called \"contents\".\n * It also contains the {@link ChunkReferenceId}s of the incremental chunks under it.\n * - The summary for each incremental chunk under it is stored against its {@link ChunkReferenceId}.\n * - Chunks that do not change between summaries are summarized as handles in the summary tree.\n * @remarks\n * It may seem inconsistent that although the structure for the top-level forest tree is similar to that of\n * an incremental chunk, its content is stored in a summary blob called \"ForestTree\" while the content for\n * the incremental chunks are stored in a summary blob called \"contents\".\n * This is to keep this summary backwards compatible with old format (before incremental summaries were added)\n * where the entire forest content was in a summary blob called \"ForestTree\". So, if incremental summaries were\n * disabled, the forest content will be fully backwards compatible.\n * Note that this limits reusing the root node in a location other than root and a non-root node in the root.\n * We could phase this out by switching to write the top-level contents under \"contents\" if we want to support\n * the above. However, there is no plan to do that for now.\n *\n * TODO: AB#46752\n * Add strong types for the summary structure to document it better. It will help make it super clear what the actual\n * format is in a way that can easily be linked to, documented and inspected.\n */\n/* eslint-enable jsdoc/check-indentation */\nexport class ForestIncrementalSummaryBuilder implements IncrementalEncoderDecoder {\n\t/**\n\t * The next reference ID to use for a chunk.\n\t */\n\tprivate nextReferenceId: ChunkReferenceId = brand(0);\n\n\t/**\n\t * For a given summary sequence number, keeps track of a chunk's properties that will be used to generate\n\t * a summary handle for the chunk if it does not change between summaries.\n\t */\n\tprivate readonly chunkTrackingPropertiesMap: NestedMap<\n\t\tnumber,\n\t\tTreeChunk,\n\t\tChunkSummaryProperties\n\t> = new Map();\n\n\t/**\n\t * The state indicating whether a summary is currently being tracked or not.\n\t */\n\tpublic forestSummaryState: ForestSummaryTrackingState =\n\t\tForestSummaryTrackingState.ReadyToTrack;\n\n\t/**\n\t * The sequence number of the latest summary that was successful.\n\t */\n\tprivate latestSummarySequenceNumber: number = -1;\n\n\t/**\n\t * The current state of the summary being tracked.\n\t * This is undefined if no summary is currently being tracked.\n\t */\n\tprivate trackedSummaryProperties: TrackedSummaryProperties | undefined;\n\n\t/**\n\t * A map of chunk reference IDs to their encoded contents. This is typically used during the loading of the\n\t * forest to retrieve the contents of the chunks that were summarized incrementally.\n\t */\n\t/**\n\t * A map of chunk reference IDs to their {@link ChunkLoadProperties}.\n\t * This is used during the loading of the forest to track each chunk that is retrieved and decoded.\n\t */\n\tprivate readonly loadedChunksMap: Map<string, ChunkLoadProperties> = new Map();\n\n\tpublic constructor(\n\t\tprivate readonly enableIncrementalSummary: boolean,\n\t\tprivate readonly getChunkAtCursor: (cursor: ITreeCursorSynchronous) => TreeChunk[],\n\t\tpublic readonly shouldEncodeIncrementally: IncrementalEncodingPolicy,\n\t\tprivate readonly initialSequenceNumber: number,\n\t) {}\n\n\t/**\n\t * Must be called when the forest is loaded to download the encoded contents of incremental chunks.\n\t * @param services - The channel storage service to use to access the snapshot tree and download the\n\t * contents of the chunks.\n\t * @param readAndParse - A function that reads and parses a blob from the storage service.\n\t */\n\tpublic async load(\n\t\tservices: IChannelStorageService,\n\t\treadAndParseChunk: <T extends JsonCompatible<IFluidHandle>>(id: string) => Promise<T>,\n\t): Promise<void> {\n\t\tconst forestTree = services.getSnapshotTree?.();\n\t\t// Snapshot tree should be available when loading forest's contents. However, it is an optional function\n\t\t// and may not be implemented by the storage service.\n\t\tif (forestTree === undefined) {\n\t\t\treturn;\n\t\t}\n\n\t\t// Downloads the contents of incremental chunks in the given snapshot tree. Also, recursively downloads\n\t\t// the contents of incremental chunks in any sub-trees.\n\t\tconst downloadChunkContentsInTree = async (\n\t\t\tsnapshotTree: ISnapshotTree,\n\t\t\tparentTreeKey: string,\n\t\t): Promise<void> => {\n\t\t\t// All trees in the snapshot tree are for incremental chunks. The key is the chunk's reference ID\n\t\t\t// and the value is the snapshot tree for the chunk.\n\t\t\tfor (const [chunkReferenceId, chunkSnapshotTree] of Object.entries(snapshotTree.trees)) {\n\t\t\t\tconst chunkSubTreePath = `${parentTreeKey}${chunkReferenceId}`;\n\t\t\t\tconst chunkContentsPath = `${chunkSubTreePath}/${chunkContentsBlobKey}`;\n\t\t\t\tif (!(await services.contains(chunkContentsPath))) {\n\t\t\t\t\tthrow new LoggingError(\n\t\t\t\t\t\t`SharedTree: Cannot find contents for incremental chunk ${chunkContentsPath}`,\n\t\t\t\t\t);\n\t\t\t\t}\n\t\t\t\tconst chunkContents = await readAndParseChunk<EncodedFieldBatch>(chunkContentsPath);\n\t\t\t\tthis.loadedChunksMap.set(chunkReferenceId, {\n\t\t\t\t\tencodedContents: chunkContents,\n\t\t\t\t\tsummaryPath: chunkSubTreePath,\n\t\t\t\t});\n\n\t\t\t\tconst chunkReferenceIdNumber = Number(chunkReferenceId);\n\t\t\t\tthis.nextReferenceId = brand(\n\t\t\t\t\tMath.max(this.nextReferenceId, chunkReferenceIdNumber + 1),\n\t\t\t\t);\n\n\t\t\t\t// Recursively download the contents of chunks in this chunk's sub tree.\n\t\t\t\tawait downloadChunkContentsInTree(chunkSnapshotTree, `${chunkSubTreePath}/`);\n\t\t\t}\n\t\t};\n\t\tawait downloadChunkContentsInTree(forestTree, \"\");\n\t}\n\n\t/**\n\t * Must be called when starting a new forest summary to track it.\n\t * @param fullTree - Whether the summary is a full tree summary. If true, the summary will not contain\n\t * any summary handles. All chunks must be summarized in full.\n\t * @param incrementalSummaryContext - The context for the incremental summary that contains the sequence numbers\n\t * for the current and latest summaries.\n\t * @param stringify - Serializes content (including {@link (IFluidHandle:interface)}s) for adding to a summary blob.\n\t * @returns the behavior of the forest's incremental summary.\n\t */\n\tpublic startSummary(args: {\n\t\tfullTree: boolean;\n\t\tincrementalSummaryContext: IExperimentalIncrementalSummaryContext | undefined;\n\t\tstringify: SummaryElementStringifier;\n\t}): ForestIncrementalSummaryBehavior {\n\t\tconst { fullTree, incrementalSummaryContext, stringify } = args;\n\t\t// If there is no incremental summary context, do not summarize incrementally. This happens in two scenarios:\n\t\t// 1. When summarizing a detached container, i.e., the first ever summary.\n\t\t// 2. When running GC, the default behavior is to call summarize on DDS without incrementalSummaryContext.\n\t\tif (!this.enableIncrementalSummary || incrementalSummaryContext === undefined) {\n\t\t\treturn ForestIncrementalSummaryBehavior.SingleBlob;\n\t\t}\n\n\t\tvalidateReadyToTrackSummary(this.forestSummaryState, this.trackedSummaryProperties);\n\n\t\tthis.forestSummaryState = ForestSummaryTrackingState.Tracking;\n\t\tthis.latestSummarySequenceNumber = incrementalSummaryContext.latestSummarySequenceNumber;\n\t\tthis.trackedSummaryProperties = {\n\t\t\tsummarySequenceNumber: incrementalSummaryContext.summarySequenceNumber,\n\t\t\tlatestSummaryBasePath: incrementalSummaryContext.summaryPath,\n\t\t\tchunkSummaryPath: [],\n\t\t\tparentSummaryBuilder: new SummaryTreeBuilder(),\n\t\t\tfullTree,\n\t\t\tstringify,\n\t\t};\n\t\treturn ForestIncrementalSummaryBehavior.Incremental;\n\t}\n\n\t/**\n\t * {@link IncrementalEncoder.encodeIncrementalField}\n\t * @remarks Returns an empty array if the field has no content.\n\t */\n\tpublic encodeIncrementalField(\n\t\tcursor: ITreeCursorSynchronous,\n\t\tchunkEncoder: (chunk: TreeChunk) => EncodedFieldBatch,\n\t): ChunkReferenceId[] {\n\t\t// Validate that a summary is currently being tracked and that the tracked summary properties are defined.\n\t\tvalidateTrackingSummary(this.forestSummaryState, this.trackedSummaryProperties);\n\n\t\tconst chunkReferenceIds: ChunkReferenceId[] = [];\n\t\tconst chunks = this.getChunkAtCursor(cursor);\n\t\tfor (const chunk of chunks) {\n\t\t\tlet chunkProperties: ChunkSummaryProperties;\n\n\t\t\t// Try and get the properties of the chunk from the latest successful summary.\n\t\t\t// If it exists and the summary is not a full tree, use the properties to generate a summary handle.\n\t\t\t// If it does not exist, encode the chunk and generate new properties for it.\n\t\t\tconst previousChunkProperties = tryGetFromNestedMap(\n\t\t\t\tthis.chunkTrackingPropertiesMap,\n\t\t\t\tthis.latestSummarySequenceNumber,\n\t\t\t\tchunk,\n\t\t\t);\n\t\t\tif (previousChunkProperties !== undefined && !this.trackedSummaryProperties.fullTree) {\n\t\t\t\tchunkProperties = previousChunkProperties;\n\t\t\t\tthis.trackedSummaryProperties.parentSummaryBuilder.addHandle(\n\t\t\t\t\t`${chunkProperties.referenceId}`,\n\t\t\t\t\tSummaryType.Tree,\n\t\t\t\t\t`${this.trackedSummaryProperties.latestSummaryBasePath}/${chunkProperties.summaryPath}`,\n\t\t\t\t);\n\t\t\t} else {\n\t\t\t\t// Generate a new reference ID for the chunk.\n\t\t\t\tconst newReferenceId: ChunkReferenceId = brand(this.nextReferenceId++);\n\n\t\t\t\t// Add the reference ID of this chunk to the chunk summary path and use the path as the summary path\n\t\t\t\t// for the chunk in its summary properties.\n\t\t\t\t// This is done before encoding the chunk so that the summary path is updated correctly when encoding\n\t\t\t\t// any incremental chunks that are under this chunk.\n\t\t\t\tthis.trackedSummaryProperties.chunkSummaryPath.push(newReferenceId);\n\n\t\t\t\tchunkProperties = {\n\t\t\t\t\treferenceId: newReferenceId,\n\t\t\t\t\tsummaryPath: this.trackedSummaryProperties.chunkSummaryPath.join(\"/\"),\n\t\t\t\t};\n\n\t\t\t\tconst parentSummaryBuilder = this.trackedSummaryProperties.parentSummaryBuilder;\n\t\t\t\t// Create a new summary builder for this chunk to build its summary tree which will be stored in the\n\t\t\t\t// parent's summary tree under its reference ID.\n\t\t\t\t// Before encoding the chunk, set the parent summary builder to this chunk's summary builder so that\n\t\t\t\t// any incremental chunks in the subtree of this chunk will use that as their parent summary builder.\n\t\t\t\tconst chunkSummaryBuilder = new SummaryTreeBuilder();\n\t\t\t\tthis.trackedSummaryProperties.parentSummaryBuilder = chunkSummaryBuilder;\n\t\t\t\tchunkSummaryBuilder.addBlob(\n\t\t\t\t\tchunkContentsBlobKey,\n\t\t\t\t\tthis.trackedSummaryProperties.stringify(chunkEncoder(chunk)),\n\t\t\t\t);\n\n\t\t\t\t// Add this chunk's summary tree to the parent's summary tree. The summary tree contains its encoded\n\t\t\t\t// contents and the summary trees of any incremental chunks under it.\n\t\t\t\tparentSummaryBuilder.addWithStats(\n\t\t\t\t\t`${newReferenceId}`,\n\t\t\t\t\tchunkSummaryBuilder.getSummaryTree(),\n\t\t\t\t);\n\n\t\t\t\t// Restore the parent summary builder and chunk summary path.\n\t\t\t\tthis.trackedSummaryProperties.parentSummaryBuilder = parentSummaryBuilder;\n\t\t\t\tthis.trackedSummaryProperties.chunkSummaryPath.pop();\n\t\t\t}\n\n\t\t\tsetInNestedMap(\n\t\t\t\tthis.chunkTrackingPropertiesMap,\n\t\t\t\tthis.trackedSummaryProperties.summarySequenceNumber,\n\t\t\t\tchunk,\n\t\t\t\tchunkProperties,\n\t\t\t);\n\t\t\tchunkReferenceIds.push(chunkProperties.referenceId);\n\t\t}\n\t\treturn chunkReferenceIds;\n\t}\n\n\t/**\n\t * Must be called after summary generation is complete to finish tracking the summary.\n\t * It clears any tracking state and deletes the tracking properties for summaries that are older than the\n\t * latest successful summary.\n\t * @param incrementalSummaryContext - The context for the incremental summary that contains the sequence numbers.\n\t * If this is undefined, the summary tree will only contain a summary blob for `forestSummaryContent`.\n\t * @param forestSummaryContent - The stringified ForestCodec output of top-level Forest content.\n\t * @returns the Forest's summary tree.\n\t */\n\tpublic completeSummary(args: {\n\t\tincrementalSummaryContext: IExperimentalIncrementalSummaryContext | undefined;\n\t\tforestSummaryContent: string;\n\t}): ISummaryTreeWithStats {\n\t\tconst { incrementalSummaryContext, forestSummaryContent } = args;\n\t\tif (!this.enableIncrementalSummary || incrementalSummaryContext === undefined) {\n\t\t\tconst summaryBuilder = new SummaryTreeBuilder();\n\t\t\tsummaryBuilder.addBlob(forestSummaryContentKey, forestSummaryContent);\n\t\t\treturn summaryBuilder.getSummaryTree();\n\t\t}\n\n\t\tvalidateTrackingSummary(this.forestSummaryState, this.trackedSummaryProperties);\n\n\t\tthis.trackedSummaryProperties.parentSummaryBuilder.addBlob(\n\t\t\tforestSummaryContentKey,\n\t\t\tforestSummaryContent,\n\t\t);\n\n\t\t// Copy over the entries from the latest summary to the current summary.\n\t\t// In the current summary, there can be fields that haven't changed since the latest summary and the chunks\n\t\t// in these fields and in any of its children weren't encoded. So, we need get the entries for these chunks\n\t\t// to be able to incrementally summarize them in the next summary.\n\t\tconst latestSummaryTrackingMap = this.chunkTrackingPropertiesMap.get(\n\t\t\tthis.latestSummarySequenceNumber,\n\t\t);\n\t\tconst currentSummaryTrackingMap = this.chunkTrackingPropertiesMap.get(\n\t\t\tthis.trackedSummaryProperties.summarySequenceNumber,\n\t\t);\n\t\tif (latestSummaryTrackingMap !== undefined && currentSummaryTrackingMap !== undefined) {\n\t\t\tfor (const [chunk, chunkProperties] of latestSummaryTrackingMap.entries()) {\n\t\t\t\tif (!currentSummaryTrackingMap.has(chunk)) {\n\t\t\t\t\tcurrentSummaryTrackingMap.set(chunk, chunkProperties);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t// Delete tracking for summaries that are older than the latest successful summary because they will\n\t\t// never be referenced again for generating summary handles.\n\t\tfor (const sequenceNumber of this.chunkTrackingPropertiesMap.keys()) {\n\t\t\tif (sequenceNumber < this.latestSummarySequenceNumber) {\n\t\t\t\tthis.chunkTrackingPropertiesMap.delete(sequenceNumber);\n\t\t\t}\n\t\t}\n\n\t\tthis.forestSummaryState = ForestSummaryTrackingState.ReadyToTrack;\n\t\tconst summaryTree = this.trackedSummaryProperties.parentSummaryBuilder.getSummaryTree();\n\t\tthis.trackedSummaryProperties = undefined;\n\t\treturn summaryTree;\n\t}\n\n\t/**\n\t * {@link IncrementalEncoder.decodeIncrementalChunk}\n\t */\n\tpublic decodeIncrementalChunk(\n\t\treferenceId: ChunkReferenceId,\n\t\tchunkDecoder: (encoded: EncodedFieldBatch) => TreeChunk,\n\t): TreeChunk {\n\t\tconst ChunkLoadProperties = this.loadedChunksMap.get(`${referenceId}`);\n\t\tassert(ChunkLoadProperties !== undefined, 0xc86 /* Encoded incremental chunk not found */);\n\t\tconst chunk = chunkDecoder(ChunkLoadProperties.encodedContents);\n\n\t\t// Account for the reference about to be added in `chunkTrackingPropertiesMap`\n\t\t// to ensure that no other users of this chunk think they have unique ownership.\n\t\t// This prevents prevent whoever this chunk is returned to from modifying it in-place.\n\t\tchunk.referenceAdded();\n\t\t// Track the decoded chunk. This will recreate the tracking state when the summary that this client\n\t\t// is loaded from was generated. This is needed to ensure that incremental summaries work correctly\n\t\t// when a new client starts to summarize.\n\t\tsetInNestedMap(this.chunkTrackingPropertiesMap, this.initialSequenceNumber, chunk, {\n\t\t\treferenceId,\n\t\t\tsummaryPath: ChunkLoadProperties.summaryPath,\n\t\t});\n\t\treturn chunk;\n\t}\n}\n"]}
|
package/lib/packageVersion.d.ts
CHANGED
|
@@ -5,5 +5,5 @@
|
|
|
5
5
|
* THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY
|
|
6
6
|
*/
|
|
7
7
|
export declare const pkgName = "@fluidframework/tree";
|
|
8
|
-
export declare const pkgVersion = "2.70.0
|
|
8
|
+
export declare const pkgVersion = "2.70.0";
|
|
9
9
|
//# sourceMappingURL=packageVersion.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"packageVersion.d.ts","sourceRoot":"","sources":["../src/packageVersion.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,eAAO,MAAM,OAAO,yBAAyB,CAAC;AAC9C,eAAO,MAAM,UAAU,
|
|
1
|
+
{"version":3,"file":"packageVersion.d.ts","sourceRoot":"","sources":["../src/packageVersion.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,eAAO,MAAM,OAAO,yBAAyB,CAAC;AAC9C,eAAO,MAAM,UAAU,WAAW,CAAC"}
|
package/lib/packageVersion.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"packageVersion.js","sourceRoot":"","sources":["../src/packageVersion.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,MAAM,CAAC,MAAM,OAAO,GAAG,sBAAsB,CAAC;AAC9C,MAAM,CAAC,MAAM,UAAU,GAAG,
|
|
1
|
+
{"version":3,"file":"packageVersion.js","sourceRoot":"","sources":["../src/packageVersion.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,MAAM,CAAC,MAAM,OAAO,GAAG,sBAAsB,CAAC;AAC9C,MAAM,CAAC,MAAM,UAAU,GAAG,QAAQ,CAAC","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n *\n * THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY\n */\n\nexport const pkgName = \"@fluidframework/tree\";\nexport const pkgVersion = \"2.70.0\";\n"]}
|
|
@@ -79,7 +79,7 @@ export function getShouldIncrementallySummarizeAllowedTypes(rootSchema) {
|
|
|
79
79
|
return false;
|
|
80
80
|
}
|
|
81
81
|
const allowedTypes = oneFromIterable(getTreeNodeSchemaPrivateData(targetNode).childAllowedTypes);
|
|
82
|
-
assert(allowedTypes !== undefined,
|
|
82
|
+
assert(allowedTypes !== undefined, 0xc87 /* Non object nodes with fields should only have one allowedTypes entry */);
|
|
83
83
|
return isIncrementalSummaryHintInAllowedTypes(allowedTypes);
|
|
84
84
|
};
|
|
85
85
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"incrementalAllowedTypes.js","sourceRoot":"","sources":["../../../src/simple-tree/api/incrementalAllowedTypes.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAGH,OAAO,EAAE,4BAA4B,EAAyB,MAAM,kBAAkB,CAAC;AACvF,OAAO,EAAE,kBAAkB,EAAE,MAAM,wBAAwB,CAAC;AAG5D,OAAO,EAAE,eAAe,EAAE,MAAM,qBAAqB,CAAC;AACtD,OAAO,EAAE,MAAM,EAAE,MAAM,qCAAqC,CAAC;AAE7D;;;;;;;;;;;;;;;;;;GAkBG;AACH,MAAM,CAAC,MAAM,sBAAsB,GAAkB,MAAM,CAAC,wBAAwB,CAAC,CAAC;AAEtF;;GAEG;AACH,SAAS,sCAAsC,CAAC,YAA8B;IAC7E,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC,MAAM,CAAC;IACpD,OAAO,CACN,cAAc,KAAK,SAAS;QAC3B,cAA0C,CAAC,sBAAsB,CAAC,KAAK,IAAI,CAC5E,CAAC;AACH,CAAC;AAED;;;;;;;;;;;;;;;;;;GAkBG;AACH,MAAM,UAAU,2CAA2C,CAC1D,UAAsB;IAEtB,OAAO,CACN,oBAA0D,EAC1D,cAAwB,EACvB,EAAE;QACH,IAAI,oBAAoB,KAAK,SAAS,EAAE,CAAC;YACxC,iFAAiF;YACjF,OAAO,KAAK,CAAC;QACd,CAAC;QAED,MAAM,UAAU,GAAG,UAAU,CAAC,WAAW,CAAC,GAAG,CAAC,oBAAoB,CAAC,CAAC;QACpE,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;YAC9B,gDAAgD;YAChD,mIAAmI;YACnI,sCAAsC;YACtC,kEAAkE;YAClE,oIAAoI;YACpI,OAAO,KAAK,CAAC;QACd,CAAC;QAED,IAAI,kBAAkB,CAAC,UAAU,CAAC,EAAE,CAAC;YACpC,MAAM,iBAAiB,GAAG,UAAU,CAAC,sBAAsB,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;YAChF,IAAI,iBAAiB,KAAK,SAAS,EAAE,CAAC;gBACrC,MAAM,WAAW,GAAG,UAAU,CAAC,MAAM,CAAC,GAAG,CAAC,iBAAiB,CAAC,CAAC;gBAC7D,IAAI,WAAW,KAAK,SAAS,EAAE,CAAC;oBAC/B,OAAO,sCAAsC,CAAC,WAAW,CAAC,gBAAgB,CAAC,CAAC;gBAC7E,CAAC;YACF,CAAC;YACD,OAAO,KAAK,CAAC;QACd,CAAC;QAED,MAAM,YAAY,GAAG,eAAe,CACnC,4BAA4B,CAAC,UAAU,CAAC,CAAC,iBAAiB,CAC1D,CAAC;QACF,MAAM,CACL,YAAY,KAAK,SAAS,EAC1B,
|
|
1
|
+
{"version":3,"file":"incrementalAllowedTypes.js","sourceRoot":"","sources":["../../../src/simple-tree/api/incrementalAllowedTypes.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAGH,OAAO,EAAE,4BAA4B,EAAyB,MAAM,kBAAkB,CAAC;AACvF,OAAO,EAAE,kBAAkB,EAAE,MAAM,wBAAwB,CAAC;AAG5D,OAAO,EAAE,eAAe,EAAE,MAAM,qBAAqB,CAAC;AACtD,OAAO,EAAE,MAAM,EAAE,MAAM,qCAAqC,CAAC;AAE7D;;;;;;;;;;;;;;;;;;GAkBG;AACH,MAAM,CAAC,MAAM,sBAAsB,GAAkB,MAAM,CAAC,wBAAwB,CAAC,CAAC;AAEtF;;GAEG;AACH,SAAS,sCAAsC,CAAC,YAA8B;IAC7E,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC,MAAM,CAAC;IACpD,OAAO,CACN,cAAc,KAAK,SAAS;QAC3B,cAA0C,CAAC,sBAAsB,CAAC,KAAK,IAAI,CAC5E,CAAC;AACH,CAAC;AAED;;;;;;;;;;;;;;;;;;GAkBG;AACH,MAAM,UAAU,2CAA2C,CAC1D,UAAsB;IAEtB,OAAO,CACN,oBAA0D,EAC1D,cAAwB,EACvB,EAAE;QACH,IAAI,oBAAoB,KAAK,SAAS,EAAE,CAAC;YACxC,iFAAiF;YACjF,OAAO,KAAK,CAAC;QACd,CAAC;QAED,MAAM,UAAU,GAAG,UAAU,CAAC,WAAW,CAAC,GAAG,CAAC,oBAAoB,CAAC,CAAC;QACpE,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;YAC9B,gDAAgD;YAChD,mIAAmI;YACnI,sCAAsC;YACtC,kEAAkE;YAClE,oIAAoI;YACpI,OAAO,KAAK,CAAC;QACd,CAAC;QAED,IAAI,kBAAkB,CAAC,UAAU,CAAC,EAAE,CAAC;YACpC,MAAM,iBAAiB,GAAG,UAAU,CAAC,sBAAsB,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;YAChF,IAAI,iBAAiB,KAAK,SAAS,EAAE,CAAC;gBACrC,MAAM,WAAW,GAAG,UAAU,CAAC,MAAM,CAAC,GAAG,CAAC,iBAAiB,CAAC,CAAC;gBAC7D,IAAI,WAAW,KAAK,SAAS,EAAE,CAAC;oBAC/B,OAAO,sCAAsC,CAAC,WAAW,CAAC,gBAAgB,CAAC,CAAC;gBAC7E,CAAC;YACF,CAAC;YACD,OAAO,KAAK,CAAC;QACd,CAAC;QAED,MAAM,YAAY,GAAG,eAAe,CACnC,4BAA4B,CAAC,UAAU,CAAC,CAAC,iBAAiB,CAC1D,CAAC;QACF,MAAM,CACL,YAAY,KAAK,SAAS,EAC1B,KAAK,CAAC,0EAA0E,CAChF,CAAC;QACF,OAAO,sCAAsC,CAAC,YAAY,CAAC,CAAC;IAC7D,CAAC,CAAC;AACH,CAAC","sourcesContent":["/*!\n * Copyright (c) Microsoft Corporation and contributors. All rights reserved.\n * Licensed under the MIT License.\n */\n\nimport type { FieldKey, TreeNodeSchemaIdentifier } from \"../../core/index.js\";\nimport { getTreeNodeSchemaPrivateData, type AllowedTypesFull } from \"../core/index.js\";\nimport { isObjectNodeSchema } from \"../node-kinds/index.js\";\nimport type { TreeSchema } from \"./configuration.js\";\nimport type { IncrementalEncodingPolicy } from \"../../feature-libraries/index.js\";\nimport { oneFromIterable } from \"../../util/index.js\";\nimport { assert } from \"@fluidframework/core-utils/internal\";\n\n/**\n * A symbol when present in the {@link AnnotatedAllowedTypes.metadata.custom} property as true, opts in the allowed\n * types to incremental summary optimization.\n * These allowed types will be optimized during summary such that if they don't change across summaries,\n * they will not be encoded and their content will not be included in the summary that is uploaded to the service.\n * @remarks\n * See {@link getShouldIncrementallySummarizeAllowedTypes} for more details.\n *\n * Use {@link SchemaStaticsAlpha.types} to add this metadata to allowed types in a schema.\n * @example\n * ```typescript\n * const sf = new SchemaFactoryAlpha(\"IncrementalSummarization\");\n * class Foo extends sf.objectAlpha(\"foo\", {\n * bar: sf.types([{ type: sf.string, metadata: {} }], {\n * custom: { [incrementalSummaryHint]: true },\n * }),\n * }) {}\n * ```\n */\nexport const incrementalSummaryHint: unique symbol = Symbol(\"IncrementalSummaryHint\");\n\n/**\n * Returns true if the provided allowed types's custom metadata has {@link incrementalSummaryHint} as true.\n */\nfunction isIncrementalSummaryHintInAllowedTypes(allowedTypes: AllowedTypesFull): boolean {\n\tconst customMetadata = allowedTypes.metadata.custom;\n\treturn (\n\t\tcustomMetadata !== undefined &&\n\t\t(customMetadata as Record<symbol, unknown>)[incrementalSummaryHint] === true\n\t);\n}\n\n/**\n * This helper function {@link getShouldIncrementallySummarizeAllowedTypes} can be used to generate a callback function\n * of type {@link IncrementalEncodingPolicy}.\n * This callback can be passed as the value for {@link SharedTreeOptionsInternal.shouldEncodeFieldIncrementally} parameter\n * when creating the tree.\n * It will be called for each {@link AllowedTypes} in the schema to determine if it should be incrementally summarized.\n *\n * @param rootSchema - The schema for the root of the tree.\n * @returns A callback function of type {@link IncrementalEncodingPolicy} which can be used to determine if a field\n * should be incrementally summarized based on whether it is an allowed types with the\n * {@link incrementalAllowedTypesMetadata} metadata.\n *\n * @remarks\n * This only works for forest type {@link ForestTypeOptimized} and compression strategy\n * {@link TreeCompressionStrategyExtended.CompressedIncremental}.\n *\n * The {@link incrementalAllowedTypesMetadata} will be replaced with a specialized metadata property once the\n * incremental summary feature and APIs are stabilized.\n */\nexport function getShouldIncrementallySummarizeAllowedTypes(\n\trootSchema: TreeSchema,\n): IncrementalEncodingPolicy {\n\treturn (\n\t\ttargetNodeIdentifier: TreeNodeSchemaIdentifier | undefined,\n\t\ttargetFieldKey: FieldKey,\n\t) => {\n\t\tif (targetNodeIdentifier === undefined) {\n\t\t\t// Root fields cannot be allowed types, so we don't incrementally summarize them.\n\t\t\treturn false;\n\t\t}\n\n\t\tconst targetNode = rootSchema.definitions.get(targetNodeIdentifier);\n\t\tif (targetNode === undefined) {\n\t\t\t// The requested type is unknown to this schema.\n\t\t\t// In this case we have no hints available from the view schema, and fall back to the default behavior of non-incremental encoding.\n\t\t\t// There are two ways this can happen:\n\t\t\t// 1. The view schema being used does not match the stored schema.\n\t\t\t// 2. The view schema is compatible, but there are unknown optional fields which contain new types not described by the view schema.\n\t\t\treturn false;\n\t\t}\n\n\t\tif (isObjectNodeSchema(targetNode)) {\n\t\t\tconst targetPropertyKey = targetNode.storedKeyToPropertyKey.get(targetFieldKey);\n\t\t\tif (targetPropertyKey !== undefined) {\n\t\t\t\tconst fieldSchema = targetNode.fields.get(targetPropertyKey);\n\t\t\t\tif (fieldSchema !== undefined) {\n\t\t\t\t\treturn isIncrementalSummaryHintInAllowedTypes(fieldSchema.allowedTypesFull);\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn false;\n\t\t}\n\n\t\tconst allowedTypes = oneFromIterable(\n\t\t\tgetTreeNodeSchemaPrivateData(targetNode).childAllowedTypes,\n\t\t);\n\t\tassert(\n\t\t\tallowedTypes !== undefined,\n\t\t\t0xc87 /* Non object nodes with fields should only have one allowedTypes entry */,\n\t\t);\n\t\treturn isIncrementalSummaryHintInAllowedTypes(allowedTypes);\n\t};\n}\n"]}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@fluidframework/tree",
|
|
3
|
-
"version": "2.70.0
|
|
3
|
+
"version": "2.70.0",
|
|
4
4
|
"description": "Distributed tree",
|
|
5
5
|
"homepage": "https://fluidframework.com",
|
|
6
6
|
"repository": {
|
|
@@ -101,17 +101,17 @@
|
|
|
101
101
|
"temp-directory": "nyc/.nyc_output"
|
|
102
102
|
},
|
|
103
103
|
"dependencies": {
|
|
104
|
-
"@fluid-internal/client-utils": "2.70.0
|
|
105
|
-
"@fluidframework/container-runtime": "2.70.0
|
|
106
|
-
"@fluidframework/core-interfaces": "2.70.0
|
|
107
|
-
"@fluidframework/core-utils": "2.70.0
|
|
108
|
-
"@fluidframework/datastore-definitions": "2.70.0
|
|
109
|
-
"@fluidframework/driver-definitions": "2.70.0
|
|
110
|
-
"@fluidframework/id-compressor": "2.70.0
|
|
111
|
-
"@fluidframework/runtime-definitions": "2.70.0
|
|
112
|
-
"@fluidframework/runtime-utils": "2.70.0
|
|
113
|
-
"@fluidframework/shared-object-base": "2.70.0
|
|
114
|
-
"@fluidframework/telemetry-utils": "2.70.0
|
|
104
|
+
"@fluid-internal/client-utils": "~2.70.0",
|
|
105
|
+
"@fluidframework/container-runtime": "~2.70.0",
|
|
106
|
+
"@fluidframework/core-interfaces": "~2.70.0",
|
|
107
|
+
"@fluidframework/core-utils": "~2.70.0",
|
|
108
|
+
"@fluidframework/datastore-definitions": "~2.70.0",
|
|
109
|
+
"@fluidframework/driver-definitions": "~2.70.0",
|
|
110
|
+
"@fluidframework/id-compressor": "~2.70.0",
|
|
111
|
+
"@fluidframework/runtime-definitions": "~2.70.0",
|
|
112
|
+
"@fluidframework/runtime-utils": "~2.70.0",
|
|
113
|
+
"@fluidframework/shared-object-base": "~2.70.0",
|
|
114
|
+
"@fluidframework/telemetry-utils": "~2.70.0",
|
|
115
115
|
"@sinclair/typebox": "^0.34.13",
|
|
116
116
|
"@tylerbu/sorted-btree-es6": "^1.8.0",
|
|
117
117
|
"@types/ungap__structured-clone": "^1.2.0",
|
|
@@ -122,19 +122,19 @@
|
|
|
122
122
|
"devDependencies": {
|
|
123
123
|
"@arethetypeswrong/cli": "^0.17.1",
|
|
124
124
|
"@biomejs/biome": "~1.9.3",
|
|
125
|
-
"@fluid-internal/mocha-test-setup": "2.70.0
|
|
126
|
-
"@fluid-private/stochastic-test-utils": "2.70.0
|
|
127
|
-
"@fluid-private/test-dds-utils": "2.70.0
|
|
128
|
-
"@fluid-private/test-drivers": "2.70.0
|
|
125
|
+
"@fluid-internal/mocha-test-setup": "~2.70.0",
|
|
126
|
+
"@fluid-private/stochastic-test-utils": "~2.70.0",
|
|
127
|
+
"@fluid-private/test-dds-utils": "~2.70.0",
|
|
128
|
+
"@fluid-private/test-drivers": "~2.70.0",
|
|
129
129
|
"@fluid-tools/benchmark": "^0.51.0",
|
|
130
130
|
"@fluid-tools/build-cli": "^0.58.3",
|
|
131
131
|
"@fluidframework/build-common": "^2.0.3",
|
|
132
132
|
"@fluidframework/build-tools": "^0.58.3",
|
|
133
|
-
"@fluidframework/container-definitions": "2.70.0
|
|
134
|
-
"@fluidframework/container-loader": "2.70.0
|
|
133
|
+
"@fluidframework/container-definitions": "~2.70.0",
|
|
134
|
+
"@fluidframework/container-loader": "~2.70.0",
|
|
135
135
|
"@fluidframework/eslint-config-fluid": "^6.1.0",
|
|
136
|
-
"@fluidframework/test-runtime-utils": "2.70.0
|
|
137
|
-
"@fluidframework/test-utils": "2.70.0
|
|
136
|
+
"@fluidframework/test-runtime-utils": "~2.70.0",
|
|
137
|
+
"@fluidframework/test-utils": "~2.70.0",
|
|
138
138
|
"@fluidframework/tree-previous": "npm:@fluidframework/tree@2.63.0",
|
|
139
139
|
"@microsoft/api-extractor": "7.52.11",
|
|
140
140
|
"@types/diff": "^3.5.1",
|
|
@@ -456,7 +456,7 @@ export const incrementalFieldEncoder: FieldEncoder = {
|
|
|
456
456
|
): void {
|
|
457
457
|
assert(
|
|
458
458
|
context.incrementalEncoder !== undefined,
|
|
459
|
-
|
|
459
|
+
0xc88 /* incremental encoder must be defined to use incrementalFieldEncoder */,
|
|
460
460
|
);
|
|
461
461
|
|
|
462
462
|
const chunkReferenceIds = context.incrementalEncoder.encodeIncrementalField(
|
|
@@ -529,7 +529,7 @@ export class ForestIncrementalSummaryBuilder implements IncrementalEncoderDecode
|
|
|
529
529
|
chunkDecoder: (encoded: EncodedFieldBatch) => TreeChunk,
|
|
530
530
|
): TreeChunk {
|
|
531
531
|
const ChunkLoadProperties = this.loadedChunksMap.get(`${referenceId}`);
|
|
532
|
-
assert(ChunkLoadProperties !== undefined,
|
|
532
|
+
assert(ChunkLoadProperties !== undefined, 0xc86 /* Encoded incremental chunk not found */);
|
|
533
533
|
const chunk = chunkDecoder(ChunkLoadProperties.encodedContents);
|
|
534
534
|
|
|
535
535
|
// Account for the reference about to be added in `chunkTrackingPropertiesMap`
|
package/src/packageVersion.ts
CHANGED
|
@@ -100,7 +100,7 @@ export function getShouldIncrementallySummarizeAllowedTypes(
|
|
|
100
100
|
);
|
|
101
101
|
assert(
|
|
102
102
|
allowedTypes !== undefined,
|
|
103
|
-
|
|
103
|
+
0xc87 /* Non object nodes with fields should only have one allowedTypes entry */,
|
|
104
104
|
);
|
|
105
105
|
return isIncrementalSummaryHintInAllowedTypes(allowedTypes);
|
|
106
106
|
};
|